diff --git a/.bazelrc b/.bazelrc index 55bf613e10f..af0b2fd5985 100644 --- a/.bazelrc +++ b/.bazelrc @@ -15,6 +15,18 @@ # # TFLM Bazel configuration file. +# The semver-format version label embedded in build outputs when and where +# stamping is used. Note TFLM does not currently publish semver-versioned +# releases; however, this value is used where a version label is required, such +# as in the Python distribution package. +build --embed_label=0 + +# Get stamp values from a script's output +build --workspace_status_command=./tools/workspace_status.sh + +# TODO(b/315853820): Needed for Bazel 7.0, until migrated to bzlmod +build --noenable_bzlmod + # Use the following C++ standard build --cxxopt -std=c++17 diff --git a/.bazelversion b/.bazelversion new file mode 100644 index 00000000000..66ce77b7ead --- /dev/null +++ b/.bazelversion @@ -0,0 +1 @@ +7.0.0 diff --git a/.github/mergify.yml b/.github/mergify.yml index 79b9fc9a182..4d3b6adae6d 100644 --- a/.github/mergify.yml +++ b/.github/mergify.yml @@ -1,6 +1,7 @@ queue_rules: - name: default checks_timeout: 2 h + branch_protection_injection_mode: queue conditions: - base=main - label=ci:ready_to_merge @@ -14,7 +15,6 @@ pull_request_rules: actions: queue: name: default - require_branch_protection: true method: squash commit_message_template: | {{ title }} (#{{ number }}) diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index 87286751b46..1386751a7d5 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -35,7 +35,6 @@ jobs: run: | sudo ci/install_bazelisk.sh pip3 install Pillow - pip3 install Wave pip3 install numpy - name: Test run: | @@ -56,7 +55,6 @@ jobs: run: | sudo ci/install_bazelisk.sh pip3 install Pillow - pip3 install Wave pip3 install numpy - name: Test run: | @@ -77,8 +75,12 @@ jobs: run: | sudo ci/install_bazelisk.sh pip3 install Pillow - pip3 install Wave pip3 install numpy + - name: Fix kernel mmap rnd bits + # Asan in llvm 14 provided in ubuntu 22.04 is incompatible with + # high-entropy ASLR in much newer kernels that GitHub runners are + # using leading to random crashes: https://reviews.llvm.org/D148280 + run: sudo sysctl vm.mmap_rnd_bits=28 - name: Test run: | tensorflow/lite/micro/tools/ci_build/test_bazel_msan.sh @@ -98,8 +100,12 @@ jobs: run: | sudo ci/install_bazelisk.sh pip3 install Pillow - pip3 install Wave pip3 install numpy + - name: Fix kernel mmap rnd bits + # Asan in llvm 14 provided in ubuntu 22.04 is incompatible with + # high-entropy ASLR in much newer kernels that GitHub runners are + # using leading to random crashes: https://reviews.llvm.org/D148280 + run: sudo sysctl vm.mmap_rnd_bits=28 - name: Test run: | tensorflow/lite/micro/tools/ci_build/test_bazel_asan.sh @@ -118,7 +124,6 @@ jobs: - name: Install dependencies run: | pip3 install Pillow - pip3 install Wave pip3 install numpy - name: Test run: | @@ -139,7 +144,6 @@ jobs: - name: Install dependencies run: | pip3 install Pillow - pip3 install Wave pip3 install numpy - name: Test run: | @@ -160,7 +164,6 @@ jobs: - name: Install dependencies run: | pip3 install Pillow - pip3 install Wave pip3 install numpy - name: Test run: | @@ -213,7 +216,6 @@ jobs: - name: Install dependencies run: | pip3 install Pillow - pip3 install Wave pip3 install numpy - name: Test run: | @@ -234,7 +236,6 @@ jobs: - name: Install dependencies run: | pip3 install Pillow - pip3 install Wave pip3 install numpy - name: Test run: | @@ -256,7 +257,6 @@ jobs: - name: Install dependencies run: | pip3 install Pillow - pip3 install Wave pip3 install numpy - name: Test run: | @@ -278,7 +278,6 @@ jobs: - name: Install dependencies run: | pip3 install Pillow - pip3 install Wave pip3 install numpy - name: Test run: | @@ -300,7 +299,6 @@ jobs: - name: Install dependencies run: | pip3 install Pillow - pip3 install Wave pip3 install numpy - name: Test run: | diff --git a/.github/workflows/cortex_m.yml b/.github/workflows/cortex_m.yml index 26fe77d8fb7..a1e8168fd98 100644 --- a/.github/workflows/cortex_m.yml +++ b/.github/workflows/cortex_m.yml @@ -14,24 +14,40 @@ on: # Allow manually triggering of the workflow. workflow_dispatch: {} + pull_request_target: + types: + - closed + - labeled + + workflow_call: + inputs: + trigger-sha: + required: true + type: string + secrets: + tflm-bot-token: + required: true + jobs: cortex_m_generic: runs-on: ubuntu-latest if: | github.event_name == 'workflow_dispatch' || - (github.event_name == 'schedule' && github.repository == 'tensorflow/tflite-micro') + (github.event_name == 'schedule' && + github.repository == 'tensorflow/tflite-micro') || + (github.event.action == 'labeled' && + github.event.label.name == 'ci:run_full') name: Cortex-M Generic steps: - uses: actions/setup-python@v4 - with: + with: python-version: '3.10' - uses: actions/checkout@v2 - name: Install dependencies run: | pip3 install Pillow - pip3 install Wave pip3 install numpy - name: Test run: | @@ -42,18 +58,20 @@ jobs: if: | github.event_name == 'workflow_dispatch' || - (github.event_name == 'schedule' && github.repository == 'tensorflow/tflite-micro') + (github.event_name == 'schedule' && + github.repository == 'tensorflow/tflite-micro') || + (github.event.action == 'labeled' && + github.event.label.name == 'ci:run_full') name: Cortex-M Corstone 300 (FVP) steps: - uses: actions/setup-python@v4 - with: + with: python-version: '3.10' - uses: actions/checkout@v2 - name: Install dependencies run: | pip3 install Pillow - pip3 install Wave pip3 install numpy - name: Test run: | diff --git a/.github/workflows/generate_integration_tests.yml b/.github/workflows/generate_integration_tests.yml index 91c8f1889d4..74ed9448e76 100644 --- a/.github/workflows/generate_integration_tests.yml +++ b/.github/workflows/generate_integration_tests.yml @@ -30,7 +30,6 @@ jobs: - name: Install dependencies run: | pip3 install Pillow - pip3 install Wave pip3 install numpy - name: Test run: | diff --git a/.github/workflows/pypi_build.yml b/.github/workflows/pypi_build.yml new file mode 100644 index 00000000000..52a8075f05e --- /dev/null +++ b/.github/workflows/pypi_build.yml @@ -0,0 +1,54 @@ +# YAML schema for GitHub Actions: +# https://help.github.com/en/actions/automating-your-workflow-with-github-actions/workflow-syntax-for-github-actions +# +# Helpful YAML parser to clarify YAML syntax: +# https://yaml-online-parser.appspot.com/ +# + +name: PyPI Build + +on: + schedule: + # 1pm UTC is 6am or 7am PT depending on daylight savings. + - cron: '0 13 * * *' + + workflow_dispatch: + inputs: + upload-type: + description: 'Upload type' + required: true + default: 'pypi' + type: choice + options: + - 'pypi' + - 'no upload' +env: + TWINE_PASSWORD: ${{ secrets.PYPI_API_KEY }} + +jobs: + pypi-build: + runs-on: ubuntu-latest + steps: + - uses: actions/checkout@v2 + with: + token: ${{ secrets.TFLM_BOT_REPO_TOKEN }} + - name: Build Wheel 3.10 + run: | + python/tflite_micro/pypi_build.sh cp310 + - name: Build Wheel 3.11 + run: | + python/tflite_micro/pypi_build.sh cp311 + - name: Check Directory Output + run: | + ls -l bazel-pypi-out + - name: Install Twine + run: | + python -m pip install --upgrade pip setuptools wheel + pip install twine + - name: upload to pypi + if: | + github.event.inputs.upload-type == 'pypi' || + github.event_name == 'schedule' + run: | + python/tflite_micro/pypi_upload.sh \ + bazel-pypi-out/tflite_micro-*.whl \ No newline at end of file diff --git a/.github/workflows/sync.yml b/.github/workflows/sync.yml index d505a9d44b5..0c5434e4599 100644 --- a/.github/workflows/sync.yml +++ b/.github/workflows/sync.yml @@ -62,5 +62,5 @@ jobs: author: TFLM-bot body: "BUG=automated sync from upstream\nNO_CHECK_TFLITE_FILES=automated sync from upstream" labels: bot:sync-tf, ci:run - reviewers: advaitjain + reviewers: rascani diff --git a/.github/workflows/tests_entry.yml b/.github/workflows/tests_entry.yml index 07cf61757d7..1ed4de16210 100644 --- a/.github/workflows/tests_entry.yml +++ b/.github/workflows/tests_entry.yml @@ -28,9 +28,10 @@ jobs: runs-on: ubuntu-latest steps: - name: fail-without-labels - if: ${{ !(contains(github.event.pull_request.labels.*.name, 'ci:run') || - contains(github.event.pull_request.labels.*.name, 'ci:ready_to_merge') || - contains(github.event.pull_request.labels.*.name, 'ci:run_full')) }} + if: github.event.action == 'labeled' && + !(github.event.label.name == 'ci:run' || + github.event.label.name == 'ci:ready_to_merge' || + github.event.label.name == 'ci:run_full') run: exit 1 ci-ready-to-merge: @@ -60,7 +61,8 @@ jobs: needs: ci-ready-to-merge steps: - name: remove-cirun - if: ${{ contains(github.event.pull_request.labels.*.name, 'ci:run') }} + if: github.event.action == 'labeled' && + github.event.label.name == 'ci:run' uses: actions/github-script@v5 with: github-token: ${{ secrets.TFLM_BOT_REPO_TOKEN }} @@ -78,7 +80,8 @@ jobs: needs: ci-run steps: - name: remove-cirun-full - if: ${{ contains(github.event.pull_request.labels.*.name, 'ci:run_full') }} + if: github.event.action == 'labeled' && + github.event.label.name == 'ci:run_full' uses: actions/github-script@v5 with: github-token: ${{ secrets.TFLM_BOT_REPO_TOKEN }} @@ -102,7 +105,7 @@ jobs: echo "PR description requires a BUG= line with issue number." echo "See https://testing.googleblog.com/2017/09/code-health-providing-context-with.html for additional context" exit 1 - + call-ci: needs: ci-run uses: ./.github/workflows/ci.yml @@ -128,7 +131,7 @@ jobs: call-check-tflite-files: needs: ci-run uses: ./.github/workflows/check_tflite_files.yml - with: + with: trigger-sha: ${{ github.event.pull_request.head.sha }} pr-number: ${{ github.event.pull_request.number }} pr-body: ${{ github.event.pull_request.body }} diff --git a/.github/workflows/tests_post.yml b/.github/workflows/tests_post.yml index 4919d1dcd54..d15b0921840 100644 --- a/.github/workflows/tests_post.yml +++ b/.github/workflows/tests_post.yml @@ -11,9 +11,29 @@ on: - labeled jobs: + ci_run_full: + runs-on: ubuntu-latest + steps: + - name: remove-cirun-full + if: github.event.action == 'labeled' && + github.event.label.name == 'ci:run_full' + uses: actions/github-script@v5 + with: + github-token: ${{ secrets.TFLM_BOT_REPO_TOKEN }} + script: | + github.rest.issues.removeLabel({ + issue_number: context.issue.number, + owner: context.repo.owner, + repo: context.repo.repo, + name: 'ci:run_full' + }) + continue-on-error: true + riscv_postmerge: - if: ${{ github.event.pull_request.merged == true || - contains(github.event.pull_request.labels.*.name, 'ci:run_full') }} + needs: [ci_run_full] + if: always() && github.event.pull_request.merged == true || + (github.event.action == 'labeled' && + github.event.label.name == 'ci:run_full') uses: ./.github/workflows/riscv_postmerge.yml with: trigger-sha: ${{ github.event.pull_request.head.sha }} @@ -21,18 +41,28 @@ jobs: tflm-bot-token: ${{ secrets.TFLM_BOT_PACKAGE_READ_TOKEN }} xtensa_postmerge: - if: ${{ github.event.pull_request.merged == true || - contains(github.event.pull_request.labels.*.name, 'ci:run_full') }} + needs: [ci_run_full] + if: always() && github.event.pull_request.merged == true || + (github.event.action == 'labeled' && + github.event.label.name == 'ci:run_full') uses: ./.github/workflows/xtensa_postmerge.yml with: trigger-sha: ${{ github.event.pull_request.head.sha }} secrets: tflm-bot-token: ${{ secrets.TFLM_BOT_PACKAGE_READ_TOKEN }} + cortex_m_ci_full: + needs: [ci_run_full] + uses: ./.github/workflows/cortex_m.yml + with: + trigger-sha: ${{ github.event.pull_request.head.sha }} + secrets: + tflm-bot-token: ${{ secrets.TFLM_BOT_PACKAGE_READ_TOKEN }} + issue_on_error: - needs: [xtensa_postmerge] + needs: [riscv_postmerge, xtensa_postmerge, cortex_m_ci_full] if: ${{ always() && contains(needs.*.result, 'failure') && - !contains(github.event.pull_request.labels.*.name, 'ci:run_full') }} + github.event.pull_request.merged == true }} uses: ./.github/workflows/issue_on_error.yml with: repo: ${{ github.repository}} @@ -44,22 +74,3 @@ jobs: pr_link: ${{ github.event.pull_request._links.html.href }} secrets: token: ${{ secrets.GITHUB_TOKEN }} - - ci_run_full: - needs: [issue_on_error] - runs-on: ubuntu-latest - steps: - - name: remove-cirun-full - if: ${{ contains(github.event.pull_request.labels.*.name, 'ci:run_full') }} - uses: actions/github-script@v5 - with: - github-token: ${{ secrets.TFLM_BOT_REPO_TOKEN }} - script: | - github.rest.issues.removeLabel({ - issue_number: context.issue.number, - owner: context.repo.owner, - repo: context.repo.repo, - name: 'ci:run_full' - }) - continue-on-error: true - diff --git a/.github/workflows/xtensa_postmerge.yml b/.github/workflows/xtensa_postmerge.yml index 8e1188e7f11..d91cc945200 100644 --- a/.github/workflows/xtensa_postmerge.yml +++ b/.github/workflows/xtensa_postmerge.yml @@ -31,7 +31,7 @@ jobs: - run: | rm -rf .git echo ${{ secrets.tflm-bot-token }} | docker login ghcr.io -u tflm-bot --password-stdin - docker run --env XTENSA_TOOLS_VERSION=RI-2020.4-linux --rm -v `pwd`:/opt/tflite-micro ghcr.io/tflm-bot/xtensa_xplorer_13:0.1 \ + docker run --env XTENSA_TOOLS_VERSION=RI-2020.4-linux --rm -v `pwd`:/opt/tflite-micro ghcr.io/tflm-bot/xtensa_xplorer_13:0.3 \ /bin/bash -c \ "cd /opt && tflite-micro/tensorflow/lite/micro/tools/ci_build/test_xtensa_fusion_f1.sh EXTERNAL tflite-micro/" @@ -46,7 +46,7 @@ jobs: - run: | rm -rf .git echo ${{ secrets.tflm-bot-token }} | docker login ghcr.io -u tflm-bot --password-stdin - docker run --env XTENSA_TOOLS_VERSION=RI-2020.4-linux --rm -v `pwd`:/opt/tflite-micro ghcr.io/tflm-bot/xtensa_xplorer_13:0.1 \ + docker run --env XTENSA_TOOLS_VERSION=RI-2020.4-linux --rm -v `pwd`:/opt/tflite-micro ghcr.io/tflm-bot/xtensa_xplorer_13:0.3 \ /bin/bash -c \ "cd /opt && tflite-micro/tensorflow/lite/micro/tools/ci_build/test_xtensa_vision_p6.sh RUN_TESTS tflite-micro/" @@ -61,6 +61,6 @@ jobs: - run: | rm -rf .git echo ${{ secrets.tflm-bot-token }} | docker login ghcr.io -u tflm-bot --password-stdin - docker run --env XTENSA_TOOLS_VERSION=RI-2019.2-linux --rm -v `pwd`:/opt/tflite-micro ghcr.io/tflm-bot/xtensa_xplorer_11:0.1 \ + docker run --env XTENSA_TOOLS_VERSION=RI-2019.2-linux --rm -v `pwd`:/opt/tflite-micro ghcr.io/tflm-bot/xtensa_xplorer_11:0.2 \ /bin/bash -c \ "cd /opt && tflite-micro/tensorflow/lite/micro/tools/ci_build/test_xtensa_hifimini.sh tflite-micro/" diff --git a/.github/workflows/xtensa_presubmit.yml b/.github/workflows/xtensa_presubmit.yml index 58c42581b8a..519aff9994d 100644 --- a/.github/workflows/xtensa_presubmit.yml +++ b/.github/workflows/xtensa_presubmit.yml @@ -32,7 +32,7 @@ jobs: - run: | rm -rf .git echo ${{ secrets.tflm-bot-token }} | docker login ghcr.io -u tflm-bot --password-stdin - docker run --env XTENSA_TOOLS_VERSION=RI-2020.4-linux --rm -v `pwd`:/opt/tflite-micro ghcr.io/tflm-bot/xtensa_xplorer_13:0.1 \ + docker run --env XTENSA_TOOLS_VERSION=RI-2020.4-linux --rm -v `pwd`:/opt/tflite-micro ghcr.io/tflm-bot/xtensa_xplorer_13:0.3 \ /bin/bash -c \ "cd /opt && tflite-micro/tensorflow/lite/micro/tools/ci_build/test_xtensa_vision_p6.sh RUN_NO_TESTS tflite-micro/" @@ -47,7 +47,7 @@ jobs: - run: | rm -rf .git echo ${{ secrets.tflm-bot-token }} | docker login ghcr.io -u tflm-bot --password-stdin - docker run --env XTENSA_TOOLS_VERSION=RI-2022.9-linux --rm -v `pwd`:/opt/tflite-micro ghcr.io/tflm-bot/xtensa_hifi5:0.1 \ + docker run --env XTENSA_TOOLS_VERSION=RI-2022.9-linux --rm -v `pwd`:/opt/tflite-micro ghcr.io/tflm-bot/xtensa_xplorer_hifi5:0.2 \ /bin/bash -c \ "cd /opt && tflite-micro/tensorflow/lite/micro/tools/ci_build/test_xtensa_hifi5.sh tflite-micro/" @@ -62,6 +62,6 @@ jobs: - run: | rm -rf .git echo ${{ secrets.tflm-bot-token }} | docker login ghcr.io -u tflm-bot --password-stdin - docker run --env XTENSA_TOOLS_VERSION=RI-2020.4-linux --rm -v `pwd`:/opt/tflite-micro ghcr.io/tflm-bot/xtensa_xplorer_13:0.1 \ + docker run --env XTENSA_TOOLS_VERSION=RI-2020.4-linux --rm -v `pwd`:/opt/tflite-micro ghcr.io/tflm-bot/xtensa_xplorer_13:0.3 \ /bin/bash -c \ "cd /opt && tflite-micro/tensorflow/lite/micro/tools/ci_build/test_xtensa_hifi3z.sh EXTERNAL tflite-micro/" diff --git a/CONTRIBUTING.md b/CONTRIBUTING.md index a44257092c4..e1410af4f91 100644 --- a/CONTRIBUTING.md +++ b/CONTRIBUTING.md @@ -4,25 +4,27 @@ https://github.com/ekalinin/github-markdown-toc#auto-insert-and-update-toc --> - * [How to Contribute](#how-to-contribute) - * [Contributor License Agreement](#contributor-license-agreement) - * [Community Guidelines](#community-guidelines) - * [Code Contribution Guidelines](#code-contribution-guidelines) - * [General Pull Request Guidelines](#general-pull-request-guidelines) - * [Guidelines for Specific Contribution Categories](#guidelines-for-specific-contribution-categories) - * [Bug Fixes](#bug-fixes) - * [Reference Kernel Implementations](#reference-kernel-implementations) - * [Optimized Kernel Implementations](#optimized-kernel-implementations) - * [New Target / Platform / IDE / Examples](#new-target--platform--ide--examples) - * [Development Workflow Notes](#development-workflow-notes) - * [Initial Setup](#initial-setup) - * [Before submitting your PR](#before-submitting-your-pr) - * [During the PR review](#during-the-pr-review) - * [Reviewer notes](#reviewer-notes) - * [Python notes](#python-notes) - * [Continuous Integration System](#continuous-integration-system) - - +* [How to Contribute](#how-to-contribute) + * [Contributor License Agreement](#contributor-license-agreement) + * [Community Guidelines](#community-guidelines) +* [Code Contribution Guidelines](#code-contribution-guidelines) + * [General Pull Request Guidelines](#general-pull-request-guidelines) + * [Guidelines for Specific Contribution Categories](#guidelines-for-specific-contribution-categories) + * [Bug Fixes](#bug-fixes) + * [Reference Kernel Implementations](#reference-kernel-implementations) + * [Optimized Kernel Implementations](#optimized-kernel-implementations) + * [New Target / Platform / IDE / Examples](#new-target--platform--ide--examples) +* [Development Environment](#development-environment) + * [Prerequisites](#prerequisites) + * [Recommendations](#recommendations) +* [Development Workflow Notes](#development-workflow-notes) + * [Before submitting your PR](#before-submitting-your-pr) + * [During the PR review](#during-the-pr-review) + * [Reviewer notes](#reviewer-notes) + * [Python notes](#python-notes) +* [Continuous Integration System](#continuous-integration-system) + + @@ -143,10 +145,20 @@ Please see the [optimized kernel implementations guide](tensorflow/lite/micro/do Please see the [new platform support guide](tensorflow/lite/micro/docs/new_platform_support.md) for documentation on how to add TFLM support for your particular platform. +# Development Environment -# Development Workflow Notes +We support amd64-architecture development and testing on Ubuntu 22.04, although +other OSes may work. + +## Prerequisites + +TFLM's primary build system is [Bazel](https://bazel.build). Add +[Bazelisk](https://github.com/bazelbuild/bazelisk) as the `bazel` executable in +your PATH ([e.g., copy it to `/usr/local/bin/bazel`](ci/install_bazelisk.sh)) to +automatically download and run the correct Bazel version as specified in +`//.bazelversion`. -## Initial Setup +## Recommendations Below are some tips that might be useful and improve the development experience. @@ -156,7 +168,9 @@ Below are some tips that might be useful and improve the development experience. * Code search the [TfLite Micro codebase](https://sourcegraph.com/github.com/tensorflow/tflite-micro@main) on Sourcegraph. And optionally install the [plugin that enables GitHub integration](https://docs.sourcegraph.com/integration/github#github-integration-with-sourcegraph). -* Install [bazel](ci/install_bazelisk.sh) and [buildifier](ci/install_buildifier.sh). +* Install + [Buildifier](https://github.com/bazelbuild/buildtools/blob/master/buildifier/README.md) + ([e.g.](ci/install_buildifier.sh)) to format Bazel BUILD and .bzl files. * Install the latest clang and clang-format. For example, [here](ci/Dockerfile.micro) is the what we do for the TFLM continuous integration Docker container. @@ -164,8 +178,8 @@ Below are some tips that might be useful and improve the development experience. * Get a copy of [cpplint](https://github.com/google/styleguide/tree/gh-pages/cpplint) or install it: -* Install Pillow and Wave. For example, [here](ci/Dockerfile.micro) is what we - do for the TFLM continuous integration Docker container. +* Install Pillow. For example, [here](ci/Dockerfile.micro) is what we do for + the TFLM continuous integration Docker container. ``` pip install cpplint @@ -184,6 +198,8 @@ Below are some tips that might be useful and improve the development experience. cp tensorflow/lite/micro/tools/dev_setup/pre-push.tflm .git/hooks/pre-push ``` +# Development Workflow Notes + ## Before submitting your PR 1. Run in-place clang-format on all the files that are modified in your git diff --git a/WORKSPACE b/WORKSPACE index f881df9f5be..7e76e1e3dae 100644 --- a/WORKSPACE +++ b/WORKSPACE @@ -15,54 +15,66 @@ workspace(name = "tflite_micro") +load("//tensorflow:workspace.bzl", "workspace") + +workspace() + load("@bazel_tools//tools/build_defs/repo:http.bzl", "http_archive") # compile_commands.json generator http_archive( name = "hedron_compile_commands", - url = "https://github.com/hedronvision/bazel-compile-commands-extractor/archive/1266d6a25314d165ca78d0061d3399e909b7920e.tar.gz", - strip_prefix = "bazel-compile-commands-extractor-1266d6a25314d165ca78d0061d3399e909b7920e", sha256 = "bacabfe758676fdc19e4bea7c4a3ac99c7e7378d259a9f1054d341c6a6b44ff6", + strip_prefix = "bazel-compile-commands-extractor-1266d6a25314d165ca78d0061d3399e909b7920e", + url = "https://github.com/hedronvision/bazel-compile-commands-extractor/archive/1266d6a25314d165ca78d0061d3399e909b7920e.tar.gz", ) + load("@hedron_compile_commands//:workspace_setup.bzl", "hedron_compile_commands_setup") + hedron_compile_commands_setup() http_archive( name = "rules_python", - sha256 = "497ca47374f48c8b067d786b512ac10a276211810f4a580178ee9b9ad139323a", - strip_prefix = "rules_python-0.16.1", - url = "https://github.com/bazelbuild/rules_python/archive/refs/tags/0.16.1.tar.gz", + sha256 = "0a8003b044294d7840ac7d9d73eef05d6ceb682d7516781a4ec62eeb34702578", + strip_prefix = "rules_python-0.24.0", + url = "https://github.com/bazelbuild/rules_python/archive/refs/tags/0.24.0.tar.gz", ) +# Read the Python package dependencies of the build environment. To modify +# them, see //third_party:python_requirements.in. load("@rules_python//python:pip.bzl", "pip_parse") + pip_parse( name = "tflm_pip_deps", requirements_lock = "//third_party:python_requirements.txt", ) +# Create repositories for each Python package dependency. load("@tflm_pip_deps//:requirements.bzl", "install_deps", "requirement") -install_deps() -load("//tensorflow:workspace.bzl", "workspace") -workspace() +install_deps() http_archive( - name = "pybind11_bazel", - strip_prefix = "pybind11_bazel-faf56fb3df11287f26dbc66fdedf60a2fc2c6631", - urls = ["https://github.com/pybind/pybind11_bazel/archive/faf56fb3df11287f26dbc66fdedf60a2fc2c6631.zip"], - sha256 = "a185aa68c93b9f62c80fcb3aadc3c83c763854750dc3f38be1dadcb7be223837", + name = "pybind11_bazel", + sha256 = "a185aa68c93b9f62c80fcb3aadc3c83c763854750dc3f38be1dadcb7be223837", + strip_prefix = "pybind11_bazel-faf56fb3df11287f26dbc66fdedf60a2fc2c6631", + urls = ["https://github.com/pybind/pybind11_bazel/archive/faf56fb3df11287f26dbc66fdedf60a2fc2c6631.zip"], ) http_archive( - name = "pybind11", - build_file = "@pybind11_bazel//:pybind11.BUILD", - strip_prefix = "pybind11-2.10.0", - urls = ["https://github.com/pybind/pybind11/archive/refs/tags/v2.10.0.tar.gz"], - sha256 = "eacf582fa8f696227988d08cfc46121770823839fe9e301a20fbce67e7cd70ec", + name = "pybind11", + build_file = "@pybind11_bazel//:pybind11.BUILD", + sha256 = "eacf582fa8f696227988d08cfc46121770823839fe9e301a20fbce67e7cd70ec", + strip_prefix = "pybind11-2.10.0", + urls = ["https://github.com/pybind/pybind11/archive/refs/tags/v2.10.0.tar.gz"], ) load("@pybind11_bazel//:python_configure.bzl", "python_configure") -python_configure(name = "local_config_python", python_version = "3") + +python_configure( + name = "local_config_python", + python_version = "3", +) load("//python:py_pkg_cc_deps.bzl", "py_pkg_cc_deps") @@ -76,5 +88,13 @@ py_pkg_cc_deps( name = "tensorflow_cc_deps", includes = ["tensorflow/include"], libs = ["tensorflow/libtensorflow_framework.so.2"], - pkg = requirement("tensorflow-cpu"), + pkg = requirement("tensorflow"), +) + +# Optimized kernel deps +http_archive( + name = "nnlib_hifi4", + build_file = "@tflite_micro//third_party/xtensa/nnlib_hifi4:nnlib_hifi4.BUILD", + strip_prefix = "nnlib-hifi4-34f5f995f28d298ae2b6e2ba6e76c32a5cb34989", + urls = ["https://github.com/foss-xtensa/nnlib-hifi4/archive/34f5f995f28d298ae2b6e2ba6e76c32a5cb34989.zip"], ) diff --git a/ci/Dockerfile.micro b/ci/Dockerfile.micro index 88d9cae9331..0515cca93a7 100644 --- a/ci/Dockerfile.micro +++ b/ci/Dockerfile.micro @@ -18,7 +18,11 @@ # CI container unnecessarily large. FROM python:3.10-bullseye AS qemu_builder RUN apt-get update + RUN apt-get install -y ninja-build wget xz-utils +RUN apt-get install -y libglib2.0-dev +RUN apt-get install -y build-essential libcairo2-dev libpango1.0-dev libjpeg-dev libgif-dev librsvg2-dev + COPY ci/install_qemu.sh ./ # Installs all built files into /qemu_install rather than /usr/local default. RUN ./install_qemu.sh /tmp/qemu_install @@ -55,7 +59,6 @@ RUN pip install yapf==0.32.0 # https://github.com/tensorflow/tflite-micro/pull/337 # https://github.com/tensorflow/tflite-micro/pull/410 RUN pip install Pillow -RUN pip install Wave # necessary bits for create_size_log scripts RUN pip install pandas diff --git a/ci/Dockerfile.xtensa_xplorer_11 b/ci/Dockerfile.xtensa_xplorer_11 new file mode 100644 index 00000000000..fa7f3168dec --- /dev/null +++ b/ci/Dockerfile.xtensa_xplorer_11 @@ -0,0 +1,49 @@ +FROM python:3.10-bullseye +ENV DEBIAN_FRONTEND noninterative + +RUN \ + apt update && \ + apt install -y \ + automake \ + build-essential \ + curl \ + git \ + unzip \ + wget + +WORKDIR /opt/xtensa + +COPY ./Xplorer-8.0.11-linux-x64-installer.bin . +COPY ./mini1m1m_RI_2019_2_linux_w_keys.tgz . +COPY ./XtensaTools_RI_2022_9_linux.tgz . +COPY ci/install_cores_xplorer_11.sh . +COPY ci/install_bazelisk.sh . + +RUN \ + pip3 install Pillow + +RUN \ + pip3 install numpy + +RUN \ + chmod +x Xplorer-8.0.11-linux-x64-installer.bin && \ + ./Xplorer-8.0.11-linux-x64-installer.bin --prefix /opt/xtensa --mode unattended + +ENV XTENSA_BASE "/opt/xtensa/XtDevTools/install/" + +RUN \ + chmod +x install_cores_xplorer_11.sh && \ + ./install_cores_xplorer_11.sh + +RUN ./install_bazelisk.sh + +RUN \ + rm Xplorer-8.0.11-linux-x64-installer.bin && \ + rm mini1m1m_RI_2019_2_linux_w_keys.tgz && \ + rm XtensaTools_RI_2022_9_linux.tgz && \ + rm -f install_cores_xplorer_11.sh + +ENV LIC_DIR "/opt/xtensa/licenses/RI-2020.4-linux" +ENV LM_LICENSE_FILE "/opt/xtensa/licenses/RI-2019.2-linux/mini1m1m_RG/misc/license.dat" + +CMD /opt/xtensa/XtDevTools/install/tools/RI-2020.4-linux/XtensaTools/bin/xt-clang++ --xtensa-core= diff --git a/ci/Dockerfile.xtensa_xplorer_13 b/ci/Dockerfile.xtensa_xplorer_13 new file mode 100644 index 00000000000..d50be6c3a35 --- /dev/null +++ b/ci/Dockerfile.xtensa_xplorer_13 @@ -0,0 +1,51 @@ +FROM python:3.10-bullseye +ENV DEBIAN_FRONTEND noninterative + +RUN \ + apt update && \ + apt install -y \ + automake \ + build-essential \ + curl \ + git \ + unzip \ + wget + +WORKDIR /opt/xtensa + +COPY ./Xplorer-8.0.13-linux-x64-installer.bin . +COPY ./F1_190305_swupgrade_linux.tgz . +COPY ./P6_200528_linux.tgz . +COPY ./HIFI_190304_swupgrade_linux.tgz . +COPY ci/install_cores_xplorer_13.sh . +COPY ci/install_bazelisk.sh . + +RUN \ + pip3 install Pillow + +RUN \ + pip3 install numpy + +RUN \ + chmod +x Xplorer-8.0.13-linux-x64-installer.bin && \ + ./Xplorer-8.0.13-linux-x64-installer.bin --prefix /opt/xtensa --mode unattended + +ENV XTENSA_BASE "/opt/xtensa/XtDevTools/install/" + +RUN \ + chmod +x install_cores_xplorer_13.sh && \ + ./install_cores_xplorer_13.sh + +RUN ./install_bazelisk.sh + +RUN \ + rm Xplorer-8.0.13-linux-x64-installer.bin && \ + rm F1_190305_swupgrade_linux.tgz && \ + rm P6_200528_linux.tgz && \ + rm HIFI_190304_swupgrade_linux.tgz && \ + rm -f install_cores_xplorer_13.sh + +ENV LIC_DIR "/opt/xtensa/licenses/RI-2020.4-linux" +ENV LM_LICENSE_FILE "${LIC_DIR}/F1_190305_swupgrade/misc/license.dat:${LIC_DIR}/AE_HiFi5_LE5_AO_FP_XC/misc/license.dat:${LIC_DIR}/P6_200528/misc/license.dat:${LIC_DIR}/HIFI_190304_swupgrade/misc/license.dat" + +CMD /opt/xtensa/XtDevTools/install/tools/RI-2020.4-linux/XtensaTools/bin/xt-clang++ --xtensa-core= diff --git a/ci/Dockerfile.xtensa_xplorer_solo b/ci/Dockerfile.xtensa_xplorer_solo new file mode 100644 index 00000000000..e7fe20590d1 --- /dev/null +++ b/ci/Dockerfile.xtensa_xplorer_solo @@ -0,0 +1,49 @@ +FROM python:3.10-bullseye +ENV DEBIAN_FRONTEND noninterative + +RUN \ + apt update && \ + apt install -y \ + automake \ + build-essential \ + curl \ + git \ + unzip \ + wget + +WORKDIR /opt/xtensa + +COPY ./Xplorer-solo-9.0.19-linux-x64-installer.bin . +COPY ./PRD_H5_RDO_07_01_2022_linux.tgz . +COPY ./XtensaTools_RI_2022_9_linux.tgz . +COPY ci/install_cores_xplorer_solo.sh . +COPY ci/install_bazelisk.sh . + +RUN \ + pip3 install Pillow + +RUN \ + pip3 install numpy + +RUN \ + chmod +x Xplorer-solo-9.0.19-linux-x64-installer.bin && \ + ./Xplorer-solo-9.0.19-linux-x64-installer.bin --prefix /opt/xtensa --mode unattended + +ENV XTENSA_BASE "/opt/xtensa/XtDevTools/install/" + +RUN \ + chmod +x install_cores_xplorer_solo.sh && \ + ./install_cores_xplorer_solo.sh + +RUN ./install_bazelisk.sh + +RUN \ + rm Xplorer-solo-9.0.19-linux-x64-installer.bin && \ + rm PRD_H5_RDO_07_01_2022_linux.tgz && \ + rm XtensaTools_RI_2022_9_linux.tgz && \ + rm -f install_cores_xplorer_solo.sh + +ENV LIC_DIR "/opt/xtensa/licenses/RI-2020.4-linux" +ENV LM_LICENSE_FILE "/opt/xtensa/licenses/RI-2022.9-linux/PRD_H5_RDO_07_01_2022/misc/license.dat" + +CMD /opt/xtensa/XtDevTools/install/tools/RI-2020.4-linux/XtensaTools/bin/xt-clang++ --xtensa-core= diff --git a/ci/flatbuffers_for_tf_sync/BUILD b/ci/flatbuffers_for_tf_sync/BUILD deleted file mode 100644 index 82bab3ffd96..00000000000 --- a/ci/flatbuffers_for_tf_sync/BUILD +++ /dev/null @@ -1 +0,0 @@ -# This empty BUILD file is required to make Bazel treat this directory as a package. diff --git a/ci/flatbuffers_for_tf_sync/BUILD.system b/ci/flatbuffers_for_tf_sync/BUILD.system deleted file mode 100644 index 8fe4d7a5907..00000000000 --- a/ci/flatbuffers_for_tf_sync/BUILD.system +++ /dev/null @@ -1,43 +0,0 @@ -licenses(["notice"]) # Apache 2.0 - -filegroup( - name = "LICENSE.txt", - visibility = ["//visibility:public"], -) - -# Public flatc library to compile flatbuffer files at runtime. -cc_library( - name = "flatbuffers", - linkopts = ["-lflatbuffers"], - visibility = ["//visibility:public"], -) - -# Public flatc compiler library. -cc_library( - name = "flatc_library", - linkopts = ["-lflatbuffers"], - visibility = ["//visibility:public"], -) - -genrule( - name = "lnflatc", - outs = ["flatc.bin"], - cmd = "ln -s $$(which flatc) $@", -) - -# Public flatc compiler. -sh_binary( - name = "flatc", - srcs = ["flatc.bin"], - visibility = ["//visibility:public"], -) - -cc_library( - name = "runtime_cc", - visibility = ["//visibility:public"], -) - -py_library( - name = "runtime_py", - visibility = ["//visibility:public"], -) diff --git a/ci/flatbuffers_for_tf_sync/build_defs.bzl b/ci/flatbuffers_for_tf_sync/build_defs.bzl deleted file mode 100644 index 94516de2873..00000000000 --- a/ci/flatbuffers_for_tf_sync/build_defs.bzl +++ /dev/null @@ -1,639 +0,0 @@ -"""BUILD rules for generating flatbuffer files.""" - -load("@build_bazel_rules_android//android:rules.bzl", "android_library") - -flatc_path = "@flatbuffers//:flatc" -zip_files = "//tensorflow/lite/tools:zip_files" - -DEFAULT_INCLUDE_PATHS = [ - "./", - "$(GENDIR)", - "$(BINDIR)", -] - -DEFAULT_FLATC_ARGS = [ - "--no-union-value-namespacing", - "--gen-object-api", -] - -def flatbuffer_library_public( - name, - srcs, - outs, - language_flag, - out_prefix = "", - includes = [], - include_paths = [], - compatible_with = [], - flatc_args = DEFAULT_FLATC_ARGS, - reflection_name = "", - reflection_visibility = None, - output_to_bindir = False): - """Generates code files for reading/writing the given flatbuffers in the requested language using the public compiler. - - Outs: - filegroup(name): all generated source files. - Fileset([reflection_name]): (Optional) all generated reflection binaries. - - Args: - name: Rule name. - srcs: Source .fbs files. Sent in order to the compiler. - outs: Output files from flatc. - language_flag: Target language flag. One of [-c, -j, -js]. - out_prefix: Prepend this path to the front of all generated files except on - single source targets. Usually is a directory name. - includes: Optional, list of filegroups of schemas that the srcs depend on. - include_paths: Optional, list of paths the includes files can be found in. - compatible_with: Optional, passed to genrule for environments this rule - can be built for. - flatc_args: Optional, list of additional arguments to pass to flatc. - reflection_name: Optional, if set this will generate the flatbuffer - reflection binaries for the schemas. - reflection_visibility: The visibility of the generated reflection Fileset. - output_to_bindir: Passed to genrule for output to bin directory. - """ - include_paths_cmd = ["-I %s" % (s) for s in include_paths] - - # '$(@D)' when given a single source target will give the appropriate - # directory. Appending 'out_prefix' is only necessary when given a build - # target with multiple sources. - output_directory = ( - ("-o $(@D)/%s" % (out_prefix)) if len(srcs) > 1 else ("-o $(@D)") - ) - genrule_cmd = " ".join([ - "for f in $(SRCS); do", - "$(location %s)" % (flatc_path), - " ".join(flatc_args), - " ".join(include_paths_cmd), - language_flag, - output_directory, - "$$f;", - "done", - ]) - native.genrule( - name = name, - srcs = srcs, - outs = outs, - output_to_bindir = output_to_bindir, - compatible_with = compatible_with, - tools = includes + [flatc_path], - cmd = genrule_cmd, - message = "Generating flatbuffer files for %s:" % (name), - ) - if reflection_name: - reflection_genrule_cmd = " ".join([ - "for f in $(SRCS); do", - "$(location %s)" % (flatc_path), - "-b --schema", - " ".join(flatc_args), - " ".join(include_paths_cmd), - language_flag, - output_directory, - "$$f;", - "done", - ]) - reflection_outs = [ - (out_prefix + "%s.bfbs") % (s.replace(".fbs", "").split("/")[-1]) - for s in srcs - ] - native.genrule( - name = "%s_srcs" % reflection_name, - srcs = srcs, - outs = reflection_outs, - output_to_bindir = output_to_bindir, - compatible_with = compatible_with, - tools = includes + [flatc_path], - cmd = reflection_genrule_cmd, - message = "Generating flatbuffer reflection binary for %s:" % (name), - ) - # TODO(b/114456773): Make bazel rules proper and supported by flatbuffer - # Have to comment this since FilesetEntry is not supported in bazel - # skylark. - # native.Fileset( - # name = reflection_name, - # out = "%s_out" % reflection_name, - # entries = [ - # native.FilesetEntry(files = reflection_outs), - # ], - # visibility = reflection_visibility, - # compatible_with = compatible_with, - # ) - -def flatbuffer_cc_library( - name, - srcs, - srcs_filegroup_name = "", - out_prefix = "", - includes = [], - include_paths = [], - compatible_with = [], - flatc_args = DEFAULT_FLATC_ARGS, - visibility = None, - srcs_filegroup_visibility = None, - gen_reflections = False): - '''A cc_library with the generated reader/writers for the given flatbuffer definitions. - - Outs: - filegroup([name]_srcs): all generated .h files. - filegroup(srcs_filegroup_name if specified, or [name]_includes if not): - Other flatbuffer_cc_library's can pass this in for their `includes` - parameter, if they depend on the schemas in this library. - Fileset([name]_reflection): (Optional) all generated reflection binaries. - cc_library([name]): library with sources and flatbuffers deps. - - Remarks: - ** Because the genrule used to call flatc does not have any trivial way of - computing the output list of files transitively generated by includes and - --gen-includes (the default) being defined for flatc, the --gen-includes - flag will not work as expected. The way around this is to add a dependency - to the flatbuffer_cc_library defined alongside the flatc included Fileset. - For example you might define: - - flatbuffer_cc_library( - name = "my_fbs", - srcs = [ "schemas/foo.fbs" ], - includes = [ "//third_party/bazz:bazz_fbs_includes" ], - ) - - In which foo.fbs includes a few files from the Fileset defined at - //third_party/bazz:bazz_fbs_includes. When compiling the library that - includes foo_generated.h, and therefore has my_fbs as a dependency, it - will fail to find any of the bazz *_generated.h files unless you also - add bazz's flatbuffer_cc_library to your own dependency list, e.g.: - - cc_library( - name = "my_lib", - deps = [ - ":my_fbs", - "//third_party/bazz:bazz_fbs" - ], - ) - - Happy dependent Flatbuffering! - - Args: - name: Rule name. - srcs: Source .fbs files. Sent in order to the compiler. - srcs_filegroup_name: Name of the output filegroup that holds srcs. Pass this - filegroup into the `includes` parameter of any other - flatbuffer_cc_library that depends on this one's schemas. - out_prefix: Prepend this path to the front of all generated files. Usually - is a directory name. - includes: Optional, list of filegroups of schemas that the srcs depend on. - ** SEE REMARKS BELOW ** - include_paths: Optional, list of paths the includes files can be found in. - compatible_with: Optional, passed to genrule for environments this rule - can be built for - flatc_args: Optional list of additional arguments to pass to flatc - (e.g. --gen-mutable). - visibility: The visibility of the generated cc_library. By default, use the - default visibility of the project. - srcs_filegroup_visibility: The visibility of the generated srcs filegroup. - By default, use the value of the visibility parameter above. - gen_reflections: Optional, if true this will generate the flatbuffer - reflection binaries for the schemas. - ''' - output_headers = [ - (out_prefix + "%s_generated.h") % (s.replace(".fbs", "").split("/")[-1]) - for s in srcs - ] - reflection_name = "%s_reflection" % name if gen_reflections else "" - - flatbuffer_library_public( - name = "%s_srcs" % (name), - srcs = srcs, - outs = output_headers, - language_flag = "-c", - out_prefix = out_prefix, - includes = includes, - include_paths = include_paths, - compatible_with = compatible_with, - flatc_args = flatc_args, - reflection_name = reflection_name, - reflection_visibility = visibility, - ) - native.cc_library( - name = name, - hdrs = output_headers, - srcs = output_headers, - features = [ - "-parse_headers", - ], - deps = [ - "@flatbuffers//:runtime_cc", - ], - includes = ["."], - linkstatic = 1, - visibility = visibility, - compatible_with = compatible_with, - ) - - # A filegroup for the `srcs`. That is, all the schema files for this - # Flatbuffer set. - native.filegroup( - name = srcs_filegroup_name if srcs_filegroup_name else "%s_includes" % (name), - srcs = srcs, - visibility = srcs_filegroup_visibility if srcs_filegroup_visibility != None else visibility, - compatible_with = compatible_with, - ) - -# Custom provider to track dependencies transitively. -FlatbufferInfo = provider( - fields = { - "transitive_srcs": "flatbuffer schema definitions.", - }, -) - -def _flatbuffer_schemas_aspect_impl(target, ctx): - _ignore = [target] - transitive_srcs = depset() - if hasattr(ctx.rule.attr, "deps"): - for dep in ctx.rule.attr.deps: - if FlatbufferInfo in dep: - transitive_srcs = depset(dep[FlatbufferInfo].transitive_srcs, transitive = [transitive_srcs]) - if hasattr(ctx.rule.attr, "srcs"): - for src in ctx.rule.attr.srcs: - if FlatbufferInfo in src: - transitive_srcs = depset(src[FlatbufferInfo].transitive_srcs, transitive = [transitive_srcs]) - for f in src.files: - if f.extension == "fbs": - transitive_srcs = depset([f], transitive = [transitive_srcs]) - return [FlatbufferInfo(transitive_srcs = transitive_srcs)] - -# An aspect that runs over all dependencies and transitively collects -# flatbuffer schema files. -_flatbuffer_schemas_aspect = aspect( - attr_aspects = [ - "deps", - "srcs", - ], - implementation = _flatbuffer_schemas_aspect_impl, -) - -# Rule to invoke the flatbuffer compiler. -def _gen_flatbuffer_srcs_impl(ctx): - outputs = ctx.attr.outputs - include_paths = ctx.attr.include_paths - if ctx.attr.no_includes: - no_includes_statement = ["--no-includes"] - else: - no_includes_statement = [] - - # Need to generate all files in a directory. - if not outputs: - outputs = [ctx.actions.declare_directory("{}_all".format(ctx.attr.name))] - output_directory = outputs[0].path - else: - outputs = [ctx.actions.declare_file(output) for output in outputs] - output_directory = outputs[0].dirname - - deps = depset(ctx.files.srcs + ctx.files.deps, transitive = [ - dep[FlatbufferInfo].transitive_srcs - for dep in ctx.attr.deps - if FlatbufferInfo in dep - ]) - - include_paths_cmd_line = [] - for s in include_paths: - include_paths_cmd_line.extend(["-I", s]) - - for src in ctx.files.srcs: - ctx.actions.run( - inputs = deps, - outputs = outputs, - executable = ctx.executable._flatc, - arguments = [ - ctx.attr.language_flag, - "-o", - output_directory, - # Allow for absolute imports and referencing of generated files. - "-I", - "./", - "-I", - ctx.genfiles_dir.path, - "-I", - ctx.bin_dir.path, - ] + no_includes_statement + - include_paths_cmd_line + [ - "--no-union-value-namespacing", - "--gen-object-api", - src.path, - ], - progress_message = "Generating flatbuffer files for {}:".format(src), - use_default_shell_env = True, - ) - return [ - DefaultInfo(files = depset(outputs)), - ] - -_gen_flatbuffer_srcs = rule( - _gen_flatbuffer_srcs_impl, - attrs = { - "srcs": attr.label_list( - allow_files = [".fbs"], - mandatory = True, - ), - "outputs": attr.string_list( - default = [], - mandatory = False, - ), - "deps": attr.label_list( - default = [], - mandatory = False, - aspects = [_flatbuffer_schemas_aspect], - ), - "include_paths": attr.string_list( - default = [], - mandatory = False, - ), - "language_flag": attr.string( - mandatory = True, - ), - "no_includes": attr.bool( - default = False, - mandatory = False, - ), - "_flatc": attr.label( - default = Label("@flatbuffers//:flatc"), - executable = True, - cfg = "exec", - ), - }, - output_to_genfiles = True, -) - -def flatbuffer_py_strip_prefix_srcs(name, srcs = [], strip_prefix = ""): - """Strips path prefix. - - Args: - name: Rule name. (required) - srcs: Source .py files. (required) - strip_prefix: Path that needs to be stripped from the srcs filepaths. (required) - """ - for src in srcs: - native.genrule( - name = name + "_" + src.replace(".", "_").replace("/", "_"), - srcs = [src], - outs = [src.replace(strip_prefix, "")], - cmd = "cp $< $@", - ) - -def _concat_flatbuffer_py_srcs_impl(ctx): - # Merge all generated python files. The files are concatenated and import - # statements are removed. Finally we import the flatbuffer runtime library. - # IMPORTANT: Our Windows shell does not support "find ... -exec" properly. - # If you're changing the commandline below, please build wheels and run smoke - # tests on all the three operating systems. - command = "echo 'import flatbuffers\n' > %s; " - command += "for f in $(find %s -name '*.py' | sort); do cat $f | sed '/import flatbuffers/d' >> %s; done " - ctx.actions.run_shell( - inputs = ctx.attr.deps[0].files, - outputs = [ctx.outputs.out], - command = command % ( - ctx.outputs.out.path, - ctx.attr.deps[0].files.to_list()[0].path, - ctx.outputs.out.path, - ), - use_default_shell_env = True, - ) - -_concat_flatbuffer_py_srcs = rule( - _concat_flatbuffer_py_srcs_impl, - attrs = { - "deps": attr.label_list(mandatory = True), - }, - output_to_genfiles = True, - outputs = {"out": "%{name}.py"}, -) - -def flatbuffer_py_library( - name, - srcs, - deps = [], - include_paths = []): - """A py_library with the generated reader/writers for the given schema. - - This rule assumes that the schema files define non-conflicting names, so that - they can be merged in a single file. This is e.g. the case if only a single - namespace is used. - The rule call the flatbuffer compiler for all schema files and merges the - generated python files into a single file that is wrapped in a py_library. - - Args: - name: Rule name. (required) - srcs: List of source .fbs files. (required) - deps: List of dependencies. - include_paths: Optional, list of paths the includes files can be found in. - """ - all_srcs = "{}_srcs".format(name) - _gen_flatbuffer_srcs( - name = all_srcs, - srcs = srcs, - language_flag = "--python", - deps = deps, - include_paths = include_paths, - ) - all_srcs_no_include = "{}_srcs_no_include".format(name) - _gen_flatbuffer_srcs( - name = all_srcs_no_include, - srcs = srcs, - language_flag = "--python", - deps = deps, - no_includes = True, - include_paths = include_paths, - ) - concat_py_srcs = "{}_generated".format(name) - _concat_flatbuffer_py_srcs( - name = concat_py_srcs, - deps = [ - ":{}".format(all_srcs_no_include), - ], - ) - native.py_library( - name = name, - srcs = [ - ":{}".format(concat_py_srcs), - ], - srcs_version = "PY3", - deps = deps + [ - "@flatbuffers//:runtime_py", - ], - ) - -def flatbuffer_java_library( - name, - srcs, - custom_package = "", - package_prefix = "", - include_paths = DEFAULT_INCLUDE_PATHS, - flatc_args = DEFAULT_FLATC_ARGS, - visibility = None): - """A java library with the generated reader/writers for the given flatbuffer definitions. - - Args: - name: Rule name. (required) - srcs: List of source .fbs files including all includes. (required) - custom_package: Package name of generated Java files. If not specified - namespace in the schema files will be used. (optional) - package_prefix: like custom_package, but prefixes to the existing - namespace. (optional) - include_paths: List of paths that includes files can be found in. (optional) - flatc_args: List of additional arguments to pass to flatc. (optional) - visibility: Visibility setting for the java_library rule. (optional) - """ - out_srcjar = "java_%s_all.srcjar" % name - flatbuffer_java_srcjar( - name = "%s_srcjar" % name, - srcs = srcs, - out = out_srcjar, - custom_package = custom_package, - flatc_args = flatc_args, - include_paths = include_paths, - package_prefix = package_prefix, - ) - - native.filegroup( - name = "%s.srcjar" % name, - srcs = [out_srcjar], - ) - - native.java_library( - name = name, - srcs = [out_srcjar], - javacopts = ["-source 7 -target 7"], - deps = [ - "@flatbuffers//:runtime_java", - ], - visibility = visibility, - ) - -def flatbuffer_java_srcjar( - name, - srcs, - out, - custom_package = "", - package_prefix = "", - include_paths = DEFAULT_INCLUDE_PATHS, - flatc_args = DEFAULT_FLATC_ARGS): - """Generate flatbuffer Java source files. - - Args: - name: Rule name. (required) - srcs: List of source .fbs files including all includes. (required) - out: Output file name. (required) - custom_package: Package name of generated Java files. If not specified - namespace in the schema files will be used. (optional) - package_prefix: like custom_package, but prefixes to the existing - namespace. (optional) - include_paths: List of paths that includes files can be found in. (optional) - flatc_args: List of additional arguments to pass to flatc. (optional) - """ - command_fmt = """set -e - tmpdir=$(@D) - schemas=$$tmpdir/schemas - java_root=$$tmpdir/java - rm -rf $$schemas - rm -rf $$java_root - mkdir -p $$schemas - mkdir -p $$java_root - - for src in $(SRCS); do - dest=$$schemas/$$src - rm -rf $$(dirname $$dest) - mkdir -p $$(dirname $$dest) - if [ -z "{custom_package}" ] && [ -z "{package_prefix}" ]; then - cp -f $$src $$dest - else - if [ -z "{package_prefix}" ]; then - sed -e "s/namespace\\s.*/namespace {custom_package};/" $$src > $$dest - else - sed -e "s/namespace \\([^;]\\+\\);/namespace {package_prefix}.\\1;/" $$src > $$dest - fi - fi - done - - flatc_arg_I="-I $$tmpdir/schemas" - for include_path in {include_paths}; do - flatc_arg_I="$$flatc_arg_I -I $$schemas/$$include_path" - done - - flatc_additional_args= - for arg in {flatc_args}; do - flatc_additional_args="$$flatc_additional_args $$arg" - done - - for src in $(SRCS); do - $(location {flatc_path}) $$flatc_arg_I --java $$flatc_additional_args -o $$java_root $$schemas/$$src - done - - $(location {zip_files}) -export_zip_path=$@ -file_directory=$$java_root - """ - genrule_cmd = command_fmt.format( - package_name = native.package_name(), - custom_package = custom_package, - package_prefix = package_prefix, - flatc_path = flatc_path, - zip_files = zip_files, - include_paths = " ".join(include_paths), - flatc_args = " ".join(flatc_args), - ) - - native.genrule( - name = name, - srcs = srcs, - outs = [out], - tools = [flatc_path, zip_files], - cmd = genrule_cmd, - ) - -def flatbuffer_android_library( - name, - srcs, - custom_package = "", - package_prefix = "", - include_paths = DEFAULT_INCLUDE_PATHS, - flatc_args = DEFAULT_FLATC_ARGS, - visibility = None): - """An android_library with the generated reader/writers for the given flatbuffer definitions. - - Args: - name: Rule name. (required) - srcs: List of source .fbs files including all includes. (required) - custom_package: Package name of generated Java files. If not specified - namespace in the schema files will be used. (optional) - package_prefix: like custom_package, but prefixes to the existing - namespace. (optional) - include_paths: List of paths that includes files can be found in. (optional) - flatc_args: List of additional arguments to pass to flatc. (optional) - visibility: Visibility setting for the android_library rule. (optional) - """ - out_srcjar = "android_%s_all.srcjar" % name - flatbuffer_java_srcjar( - name = "%s_srcjar" % name, - srcs = srcs, - out = out_srcjar, - custom_package = custom_package, - flatc_args = flatc_args, - include_paths = include_paths, - package_prefix = package_prefix, - ) - - native.filegroup( - name = "%s.srcjar" % name, - srcs = [out_srcjar], - ) - - # To support org.checkerframework.dataflow.qual.Pure. - checkerframework_annotations = [ - "@org_checkerframework_qual", - ] if "--java-checkerframework" in flatc_args else [] - - android_library( - name = name, - srcs = [out_srcjar], - javacopts = ["-source 7 -target 7"], - visibility = visibility, - deps = [ - "@flatbuffers//:runtime_android", - ] + checkerframework_annotations, - ) diff --git a/ci/flatbuffers_for_tf_sync/flatbuffers.BUILD b/ci/flatbuffers_for_tf_sync/flatbuffers.BUILD deleted file mode 100644 index 108c0cd8e3b..00000000000 --- a/ci/flatbuffers_for_tf_sync/flatbuffers.BUILD +++ /dev/null @@ -1,156 +0,0 @@ -load("@build_bazel_rules_android//android:rules.bzl", "android_library") -load(":build_defs.bzl", "flatbuffer_py_strip_prefix_srcs") - -package(default_visibility = ["//visibility:public"]) - -licenses(["notice"]) # Apache 2.0 - -exports_files(["LICENSE.txt"]) - -licenses(["notice"]) - -config_setting( - name = "freebsd", - values = {"cpu": "freebsd"}, -) - -config_setting( - name = "windows", - values = {"cpu": "x64_windows"}, -) - -load("@rules_cc//cc:defs.bzl", "cc_binary", "cc_library") - -# Public flatc library to compile flatbuffer files at runtime. -cc_library( - name = "flatbuffers", - hdrs = ["//:public_headers"], - linkstatic = 1, - strip_include_prefix = "/include", - visibility = ["//visibility:public"], - deps = ["//src:flatbuffers"], -) - -# Public C++ headers for the Flatbuffers library. -filegroup( - name = "public_headers", - srcs = [ - "include/flatbuffers/base.h", - "include/flatbuffers/code_generators.h", - "include/flatbuffers/flatbuffers.h", - "include/flatbuffers/flexbuffers.h", - "include/flatbuffers/hash.h", - "include/flatbuffers/idl.h", - "include/flatbuffers/minireflect.h", - "include/flatbuffers/reflection.h", - "include/flatbuffers/reflection_generated.h", - "include/flatbuffers/registry.h", - "include/flatbuffers/stl_emulation.h", - "include/flatbuffers/util.h", - ], - visibility = ["//:__subpackages__"], -) - -# Public flatc compiler library. -cc_library( - name = "flatc_library", - linkstatic = 1, - visibility = ["//visibility:public"], - deps = [ - "@flatbuffers//src:flatc_library", - ], -) - -# Public flatc compiler. -cc_binary( - name = "flatc", - linkopts = select({ - ":freebsd": [ - "-lm", - ], - ":windows": [], - "//conditions:default": [ - "-lm", - "-ldl", - ], - }), - visibility = ["//visibility:public"], - deps = [ - "@flatbuffers//src:flatc", - ], -) - -filegroup( - name = "flatc_headers", - srcs = [ - "include/flatbuffers/flatc.h", - ], - visibility = ["//:__subpackages__"], -) - -# Library used by flatbuffer_cc_library rules. -cc_library( - name = "runtime_cc", - hdrs = [ - "include/flatbuffers/base.h", - "include/flatbuffers/flatbuffers.h", - "include/flatbuffers/flexbuffers.h", - "include/flatbuffers/stl_emulation.h", - "include/flatbuffers/util.h", - ], - linkstatic = 1, - strip_include_prefix = "/include", - visibility = ["//visibility:public"], -) - -flatbuffer_py_strip_prefix_srcs( - name = "flatbuffer_py_strip_prefix", - srcs = [ - "python/flatbuffers/__init__.py", - "python/flatbuffers/builder.py", - "python/flatbuffers/compat.py", - "python/flatbuffers/encode.py", - "python/flatbuffers/number_types.py", - "python/flatbuffers/packer.py", - "python/flatbuffers/table.py", - "python/flatbuffers/util.py", - ], - strip_prefix = "python/flatbuffers/", -) - -filegroup( - name = "runtime_py_srcs", - srcs = [ - "__init__.py", - "builder.py", - "compat.py", - "encode.py", - "number_types.py", - "packer.py", - "table.py", - "util.py", - ], -) - -py_library( - name = "runtime_py", - srcs = [":runtime_py_srcs"], - visibility = ["//visibility:public"], -) - -filegroup( - name = "runtime_java_srcs", - srcs = glob(["java/com/google/flatbuffers/**/*.java"]), -) - -java_library( - name = "runtime_java", - srcs = [":runtime_java_srcs"], - visibility = ["//visibility:public"], -) - -android_library( - name = "runtime_android", - srcs = [":runtime_java_srcs"], - visibility = ["//visibility:public"], -) diff --git a/ci/flatbuffers_for_tf_sync/workspace.bzl b/ci/flatbuffers_for_tf_sync/workspace.bzl deleted file mode 100644 index 59c1fd9ea43..00000000000 --- a/ci/flatbuffers_for_tf_sync/workspace.bzl +++ /dev/null @@ -1,16 +0,0 @@ -"""Loads the Flatbuffers library, used by TF Lite.""" - -load("//third_party:repo.bzl", "tf_http_archive", "tf_mirror_urls") - -def repo(): - tf_http_archive( - name = "flatbuffers", - strip_prefix = "flatbuffers-1.12.0", - sha256 = "62f2223fb9181d1d6338451375628975775f7522185266cd5296571ac152bc45", - urls = tf_mirror_urls("https://github.com/google/flatbuffers/archive/v1.12.0.tar.gz"), - build_file = "//third_party/flatbuffers:flatbuffers.BUILD", - system_build_file = "//third_party/flatbuffers:BUILD.system", - link_files = { - "//third_party/flatbuffers:build_defs.bzl": "build_defs.bzl", - }, - ) diff --git a/ci/install_bazelisk.sh b/ci/install_bazelisk.sh index d2f8a13d99b..db385e6c42b 100755 --- a/ci/install_bazelisk.sh +++ b/ci/install_bazelisk.sh @@ -18,5 +18,5 @@ set -e wget https://github.com/bazelbuild/bazelisk/releases/download/v1.16.0/bazelisk-linux-amd64 mv bazelisk-linux-amd64 bazel chmod +x bazel -sudo mv bazel /usr/local/bin +mv bazel /usr/local/bin diff --git a/ci/install_cores_xplorer_11.sh b/ci/install_cores_xplorer_11.sh new file mode 100755 index 00000000000..1df00cb822c --- /dev/null +++ b/ci/install_cores_xplorer_11.sh @@ -0,0 +1,19 @@ +#!/bin/bash + +mkdir /opt/xtensa/licenses + +mkdir -p /opt/xtensa/XtDevTools/install/tools/ +tar xvzf XtensaTools_RI_2022_9_linux.tgz --dir /opt/xtensa/XtDevTools/install/tools/ + + +########### +# Hifimini +########### +cd /opt/xtensa/ +tar xvzf mini1m1m_RI_2019_2_linux_w_keys.tgz --dir /opt/xtensa/licenses/ +cd /opt/xtensa/licenses/RI-2019.2-linux/mini1m1m_RG/ + +./install --xtensa-tools \ + /opt/xtensa/XtDevTools/install/tools/RI-2019.2-linux/XtensaTools/ \ + --no-default \ + --no-replace diff --git a/ci/install_cores_xplorer_13.sh b/ci/install_cores_xplorer_13.sh new file mode 100755 index 00000000000..a24b1f3f794 --- /dev/null +++ b/ci/install_cores_xplorer_13.sh @@ -0,0 +1,39 @@ +#!/bin/bash + +mkdir /opt/xtensa/licenses + +############## +# Fusion F1 +############## +cd /opt/xtensa/ +tar xvzf F1_190305_swupgrade_linux.tgz --dir /opt/xtensa/licenses/ +cd /opt/xtensa/licenses/RI-2020.4-linux/F1_190305_swupgrade/ + +./install --xtensa-tools \ + /opt/xtensa/XtDevTools/install/tools/RI-2020.4-linux/XtensaTools/ \ + --no-default \ + --no-replace + +############## +# Vision P6 +############## +cd /opt/xtensa/ +tar xvzf P6_200528_linux.tgz --dir /opt/xtensa/licenses/ +cd /opt/xtensa/licenses/RI-2020.4-linux/P6_200528/ + +./install --xtensa-tools \ + /opt/xtensa/XtDevTools/install/tools/RI-2020.4-linux/XtensaTools/ \ + --no-default \ + --no-replace + +############## +# Hifi3Z +############## +cd /opt/xtensa/ +tar xvzf HIFI_190304_swupgrade_linux.tgz --dir /opt/xtensa/licenses/ +cd /opt/xtensa/licenses/RI-2020.4-linux/HIFI_190304_swupgrade/ + +./install --xtensa-tools \ + /opt/xtensa/XtDevTools/install/tools/RI-2020.4-linux/XtensaTools/ \ + --no-default \ + --no-replace diff --git a/ci/install_cores_xplorer_solo.sh b/ci/install_cores_xplorer_solo.sh new file mode 100755 index 00000000000..559c67a672a --- /dev/null +++ b/ci/install_cores_xplorer_solo.sh @@ -0,0 +1,19 @@ +#!/bin/bash + +mkdir /opt/xtensa/licenses + +mkdir -p /opt/xtensa/XtDevTools/install/tools/ +tar xvzf XtensaTools_RI_2022_9_linux.tgz --dir /opt/xtensa/XtDevTools/install/tools/ + + +########### +# Hifi5 +########### +cd /opt/xtensa/ +tar xvzf PRD_H5_RDO_07_01_2022_linux.tgz --dir /opt/xtensa/licenses/ +cd /opt/xtensa/licenses/RI-2022.9-linux/PRD_H5_RDO_07_01_2022/ + +./install --xtensa-tools \ + /opt/xtensa/XtDevTools/install/tools/RI-2022.9-linux/XtensaTools/ \ + --no-default \ + --no-replace diff --git a/ci/install_qemu.sh b/ci/install_qemu.sh index 5259978d8db..1888db208fd 100755 --- a/ci/install_qemu.sh +++ b/ci/install_qemu.sh @@ -13,6 +13,15 @@ # See the License for the specific language governing permissions and # limitations under the License. # ============================================================================== +# +# We are installing qemu from source because (copied from +# https://github.com/tensorflow/tflite-micro/pull/1892#discussion_r1164316226): +# QEMU 7.1 and above has a blocking bug +# (https://gitlab.com/qemu-project/qemu/-/issues/1528) and that's the only +# version available on our apt repo. +# +# TODO: b/285348856 - Update the version to allow for command line arguments. +# # Parameters: # ${1} Optional. Path to install QEMU. LINUX_PORTABLE_URL="https://download.qemu.org/qemu-6.2.0.tar.xz" diff --git a/ci/sync_from_upstream_tf.sh b/ci/sync_from_upstream_tf.sh index 094df6546fe..46eee4e21f8 100755 --- a/ci/sync_from_upstream_tf.sh +++ b/ci/sync_from_upstream_tf.sh @@ -47,6 +47,7 @@ done git checkout tensorflow/lite/kernels/internal/optimized/neon_check.h # http://b/149862813 git checkout tensorflow/lite/kernels/internal/runtime_shape.h +git checkout tensorflow/lite/kernels/internal/runtime_shape.cc # http://b/187728891 git checkout tensorflow/lite/kernels/op_macros.h # http://b/242077843 diff --git a/ci/tflite_files.txt b/ci/tflite_files.txt index 7fa53c4914b..51381ce7447 100644 --- a/ci/tflite_files.txt +++ b/ci/tflite_files.txt @@ -5,7 +5,6 @@ tensorflow/lite/builtin_ops.h tensorflow/lite/context_util.h tensorflow/lite/core/api/error_reporter.cc tensorflow/lite/core/api/flatbuffer_conversions.cc -tensorflow/lite/core/api/op_resolver.cc tensorflow/lite/core/api/tensor_utils.cc tensorflow/lite/core/c/common.cc tensorflow/lite/kernels/internal/common.cc @@ -19,7 +18,6 @@ tensorflow/lite/c/c_api_types.h tensorflow/lite/c/common.h tensorflow/lite/core/api/error_reporter.h tensorflow/lite/core/api/flatbuffer_conversions.h -tensorflow/lite/core/api/op_resolver.h tensorflow/lite/core/api/tensor_utils.h tensorflow/lite/core/c/builtin_op_data.h tensorflow/lite/core/c/c_api_types.h diff --git a/codegen/BUILD b/codegen/BUILD new file mode 100644 index 00000000000..ae62c04c759 --- /dev/null +++ b/codegen/BUILD @@ -0,0 +1,71 @@ +load("@rules_python//python:defs.bzl", "py_binary", "py_library") +load("@tflm_pip_deps//:requirements.bzl", "requirement") + +package( + default_visibility = ["//:__subpackages__"], + licenses = ["notice"], +) + +py_library( + name = "graph", + srcs = [ + "graph.py", + ], + deps = [ + ":tensor", + ":utils", + "//codegen/operators:factory", + "//codegen/operators:operator", + "//tensorflow/lite/python:schema_py", + "//tensorflow/lite/tools:visualize", + ], +) + +py_library( + name = "inference_generator", + srcs = [ + "inference_generator.py", + ], + data = [ + "templates/inference.cc.mako", + "templates/inference.h.mako", + ], + deps = [ + ":graph", + requirement("mako"), + ], +) + +py_library( + name = "tensor", + srcs = [ + "tensor.py", + ], + deps = [ + ":utils", + "//tensorflow/lite/python:schema_py", + ], +) + +py_library( + name = "utils", + srcs = [ + "utils.py", + ], +) + +py_binary( + name = "code_generator", + srcs = [ + "code_generator.py", + ], + python_version = "PY3", + srcs_version = "PY3", + deps = [ + ":graph", + ":inference_generator", + "//tensorflow/lite/tools:flatbuffer_utils", + "@absl_py//absl:app", + "@absl_py//absl/flags", + ], +) diff --git a/codegen/README.md b/codegen/README.md new file mode 100644 index 00000000000..ff7e9d22070 --- /dev/null +++ b/codegen/README.md @@ -0,0 +1,3 @@ +# TFLM Code Generator + +This is a work in progress experiment. It is not ready for use. diff --git a/codegen/build_def.bzl b/codegen/build_def.bzl new file mode 100644 index 00000000000..28b6232b339 --- /dev/null +++ b/codegen/build_def.bzl @@ -0,0 +1,44 @@ +""" Build rule for generating ML inference code from TFLite model. """ + +load("//tensorflow/lite/micro:build_def.bzl", "micro_copts") + +def tflm_inference_library( + name, + tflite_model, + visibility = None): + """Creates a C++ library capable of performing ML inference of the provided + model. + + Args: + name: Target name. + tflite_model: TFLite Model to generate inference from. + visibility: Visibility for the C++ library. + """ + generated_target = name + "_gen" + native.genrule( + name = generated_target, + srcs = [tflite_model], + outs = [name + ".h", name + ".cc"], + tools = ["//codegen:code_generator"], + cmd = "$(location //codegen:code_generator) " + + "--model=$< --output_dir=$(RULEDIR) --output_name=%s" % name, + visibility = ["//visibility:private"], + ) + + native.cc_library( + name = name, + hdrs = [name + ".h"], + srcs = [name + ".cc"], + deps = [ + generated_target, + "//codegen/runtime:micro_codegen_context", + "//tensorflow/lite/c:common", + "//tensorflow/lite/c:c_api_types", + "//tensorflow/lite/kernels/internal:compatibility", + "//tensorflow/lite/micro/kernels:micro_ops", + "//tensorflow/lite/micro:micro_common", + "//tensorflow/lite/micro:micro_context", + ], + copts = micro_copts(), + visibility = visibility, + ) diff --git a/codegen/code_generator.py b/codegen/code_generator.py new file mode 100644 index 00000000000..91cab732f71 --- /dev/null +++ b/codegen/code_generator.py @@ -0,0 +1,66 @@ +# Copyright 2023 The TensorFlow Authors. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# ============================================================================== +""" Generates C/C++ source code capable of performing inference for a model. """ + +import os + +from absl import app +from absl import flags +from collections.abc import Sequence + +from tflite_micro.codegen import inference_generator +from tflite_micro.codegen import graph +from tflite_micro.tensorflow.lite.tools import flatbuffer_utils + +# Usage information: +# Default: +# `bazel run codegen:code_generator -- \ +# --model= +# Output will be located at: /path/to/my_model.h|cc + +_MODEL_PATH = flags.DEFINE_string(name="model", + default=None, + help="Path to the TFLite model file.", + required=True) + +_OUTPUT_DIR = flags.DEFINE_string( + name="output_dir", + default=None, + help="Path to write generated source to. Leave blank to use 'model' path.", + required=False) + +_OUTPUT_NAME = flags.DEFINE_string( + name="output_name", + default=None, + help=("The output basename for the generated .h/.cc. Leave blank to use " + "'model' basename."), + required=False) + + +def main(argv: Sequence[str]) -> None: + output_dir = _OUTPUT_DIR.value or os.path.dirname(_MODEL_PATH.value) + output_name = _OUTPUT_NAME.value or os.path.splitext( + os.path.basename(_MODEL_PATH.value))[0] + + model = flatbuffer_utils.read_model(_MODEL_PATH.value) + + print("Generating inference code for model: {}".format(_MODEL_PATH.value)) + + inference_generator.generate(output_dir, output_name, + graph.OpCodeTable([model]), graph.Graph(model)) + + +if __name__ == "__main__": + app.run(main) diff --git a/codegen/examples/hello_world/BUILD b/codegen/examples/hello_world/BUILD new file mode 100644 index 00000000000..04425cbd697 --- /dev/null +++ b/codegen/examples/hello_world/BUILD @@ -0,0 +1,17 @@ +load("//codegen:build_def.bzl", "tflm_inference_library") + +package(default_visibility = ["//visibility:public"]) + +tflm_inference_library( + name = "hello_world_model", + tflite_model = "//tensorflow/lite/micro/examples/hello_world/models:hello_world_int8.tflite", +) + +cc_binary( + name = "hello_world", + srcs = ["hello_world.cc"], + deps = [ + ":hello_world_model", + "//tensorflow/lite/c:c_api_types", + ], +) diff --git a/codegen/examples/hello_world/README.md b/codegen/examples/hello_world/README.md new file mode 100644 index 00000000000..62afee5f20e --- /dev/null +++ b/codegen/examples/hello_world/README.md @@ -0,0 +1,27 @@ +# Codegen Hello World Example + +This is a code-generated example of the hello world model. The generated source +is checked in for now so that it can be reviewed during the prototyping stage. + +## Building the example executable +Please note that this will execute Bazel from make as part of the process. + +``` +bazel build //codegen/examples/hello_world:hello_world +``` + +## Running the example + +TODO(rjascani): The command works, but it'll just crash as we don't have all of +the data structures fully populated yet. + +``` +bazel run //codegen/examples/hello_world:hello_world +``` + +## Updating the generated sources +To update the generated source, you can execute this make target: + +``` +./codegen/examples/hello_world/update_example_source.sh +``` diff --git a/tensorflow/lite/micro/examples/micro_speech/simple_features/no_power_spectrum_data.cc b/codegen/examples/hello_world/hello_world.cc similarity index 58% rename from tensorflow/lite/micro/examples/micro_speech/simple_features/no_power_spectrum_data.cc rename to codegen/examples/hello_world/hello_world.cc index aff02429a21..70d665bc8cd 100644 --- a/tensorflow/lite/micro/examples/micro_speech/simple_features/no_power_spectrum_data.cc +++ b/codegen/examples/hello_world/hello_world.cc @@ -1,4 +1,4 @@ -/* Copyright 2018 The TensorFlow Authors. All Rights Reserved. +/* Copyright 2023 The TensorFlow Authors. All Rights Reserved. Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. @@ -13,11 +13,16 @@ See the License for the specific language governing permissions and limitations under the License. ==============================================================================*/ -// See the header for documentation on the meaning of this data. +#include "codegen/examples/hello_world/hello_world_model.h" +#include "tensorflow/lite/c/c_api_types.h" -#include "tensorflow/lite/micro/examples/micro_speech/simple_features/no_power_spectrum_data.h" +int main(int argc, char** argv) { + hello_world_model::Model hello_world{}; -const uint8_t g_no_power_spectrum_data[g_no_power_spectrum_data_size] = { - 255, 7, 6, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, - 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, -}; + TfLiteStatus status = hello_world.Invoke(); + if (status != kTfLiteOk) { + return -1; + } + + return 0; +} diff --git a/codegen/examples/hello_world/hello_world_model.cc b/codegen/examples/hello_world/hello_world_model.cc new file mode 100644 index 00000000000..7d8290c3794 --- /dev/null +++ b/codegen/examples/hello_world/hello_world_model.cc @@ -0,0 +1,316 @@ +/* Copyright 2023 The TensorFlow Authors. All Rights Reserved. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +==============================================================================*/ + +/* AUTOMATICALLY GENERATED DO NOT MODIFY */ + +#include "hello_world_model.h" + +#include "codegen/runtime/micro_codegen_context.h" +#include "tensorflow/lite/c/builtin_op_data.h" +#include "tensorflow/lite/c/c_api_types.h" +#include "tensorflow/lite/c/common.h" +#include "tensorflow/lite/kernels/internal/compatibility.h" +#include "tensorflow/lite/micro/kernels/micro_ops.h" +#include "tensorflow/lite/micro/micro_common.h" +#include "tensorflow/lite/micro/micro_context.h" + +namespace hello_world_model { +namespace { +// TODO(rjascani): We should probably split out the OpTable to a separate file +// once we start generating for multiple models. +enum OpCode { kFullyConnected, kCount }; + +TFLMInferenceRegistration op_table[OpCode::kCount] = { + tflite::RegisterInference_FULLY_CONNECTED(), +}; + +// buffer_1 is located in the arena + +alignas(16) uint8_t buffer_2[4] = { + 0xAD, + 0x01, + 0x00, + 0x00, +}; + +alignas(16) uint8_t buffer_3[16] = { + 0xD9, 0x3B, 0x27, 0x15, 0x1C, 0xE0, 0xDE, 0xDD, + 0x0F, 0x1B, 0xC5, 0xD7, 0x12, 0xDD, 0xF9, 0x7F, +}; + +alignas(16) uint8_t buffer_4[64] = { + 0x27, 0xFD, 0xFF, 0xFF, 0xA2, 0x07, 0x00, 0x00, 0x62, 0x02, 0x00, + 0x00, 0x00, 0x00, 0x00, 0x00, 0xF1, 0x00, 0x00, 0x00, 0x29, 0xFE, + 0xFF, 0xFF, 0xDD, 0xFF, 0xFF, 0xFF, 0x9D, 0xFC, 0xFF, 0xFF, 0x3B, + 0x02, 0x00, 0x00, 0x45, 0x02, 0x00, 0x00, 0xA4, 0x10, 0x00, 0x00, + 0x67, 0x0F, 0x00, 0x00, 0x4F, 0x02, 0x00, 0x00, 0x00, 0x00, 0x00, + 0x00, 0x87, 0xFC, 0xFF, 0xFF, 0x11, 0xEC, 0xFF, 0xFF, +}; + +alignas(16) uint8_t buffer_5[256] = { + 0xF4, 0x1A, 0xED, 0x09, 0x19, 0x21, 0xF4, 0x24, 0xE0, 0x21, 0xEF, 0xBC, + 0xF7, 0xF5, 0xFA, 0x19, 0x03, 0xDC, 0xD2, 0x02, 0x06, 0xF9, 0xF4, 0x02, + 0xFF, 0xFA, 0xEF, 0xF1, 0xEF, 0xD3, 0x27, 0xE1, 0xFB, 0x27, 0xDD, 0xEB, + 0xDB, 0xE4, 0x05, 0x1A, 0x17, 0xFC, 0x24, 0x12, 0x15, 0xEF, 0x1E, 0xE4, + 0x10, 0xFE, 0x14, 0xDA, 0x1C, 0xF8, 0xF3, 0xF1, 0xEF, 0xE2, 0xF3, 0x09, + 0xE3, 0xE9, 0xED, 0xE3, 0xE4, 0x15, 0x07, 0x0B, 0x04, 0x1B, 0x1A, 0xFE, + 0xEB, 0x01, 0xDE, 0x21, 0xE6, 0x0B, 0xEC, 0x03, 0x23, 0x0A, 0x22, 0x24, + 0x1E, 0x27, 0x03, 0xE6, 0x03, 0x24, 0xFF, 0xC0, 0x11, 0xF8, 0xFC, 0xF1, + 0x11, 0x0C, 0xF5, 0xE0, 0xF3, 0x07, 0x17, 0xE5, 0xE8, 0xED, 0xFA, 0xDC, + 0xE8, 0x23, 0xFB, 0x07, 0xDD, 0xFB, 0xFD, 0x00, 0x14, 0x26, 0x11, 0x17, + 0xE7, 0xF1, 0x11, 0xEA, 0x02, 0x26, 0x04, 0x04, 0x25, 0x21, 0x1D, 0x0A, + 0xDB, 0x1D, 0xDC, 0x20, 0x01, 0xFA, 0xE3, 0x37, 0x0B, 0xF1, 0x1A, 0x16, + 0xEF, 0x1C, 0xE7, 0x03, 0xE0, 0x16, 0x02, 0x03, 0x21, 0x18, 0x09, 0x2E, + 0xD9, 0xE5, 0x14, 0x0B, 0xEA, 0x1A, 0xFC, 0xD8, 0x13, 0x00, 0xC4, 0xD8, + 0xEC, 0xD9, 0xFE, 0x0D, 0x19, 0x20, 0xD8, 0xD6, 0xE2, 0x1F, 0xE9, 0xD7, + 0xCA, 0xE2, 0xDD, 0xC6, 0x13, 0xE7, 0x04, 0x3E, 0x00, 0x01, 0x14, 0xC7, + 0xDB, 0xE7, 0x15, 0x15, 0xF5, 0x06, 0xD6, 0x1A, 0xDC, 0x09, 0x22, 0xFE, + 0x08, 0x02, 0x13, 0xEF, 0x19, 0x1E, 0xE2, 0x09, 0xFD, 0xF3, 0x14, 0xDD, + 0xDA, 0x20, 0xD9, 0x0F, 0xE3, 0xF9, 0xF7, 0xEE, 0xE9, 0x24, 0xE6, 0x29, + 0x00, 0x07, 0x16, 0xE2, 0x1E, 0x0D, 0x23, 0xD3, 0xDD, 0xF7, 0x14, 0xFA, + 0x08, 0x22, 0x26, 0x21, 0x09, 0x08, 0x0F, 0x0B, 0xE0, 0x12, 0xF4, 0x7F, + 0xDC, 0x58, 0xE5, 0x26, +}; + +alignas(16) uint8_t buffer_6[64] = { + 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0xC2, 0xEA, 0xFF, + 0xFF, 0x75, 0xEA, 0xFF, 0xFF, 0xB8, 0xFA, 0xFF, 0xFF, 0x24, 0xFA, + 0xFF, 0xFF, 0xC8, 0xEF, 0xFF, 0xFF, 0xAC, 0xFF, 0xFF, 0xFF, 0x44, + 0x0D, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0xBD, 0x07, 0x00, 0x00, + 0x33, 0xEA, 0xFF, 0xFF, 0x00, 0x00, 0x00, 0x00, 0xCC, 0xE4, 0xFF, + 0xFF, 0x4F, 0x0D, 0x00, 0x00, 0xCF, 0xE3, 0xFF, 0xFF, +}; + +alignas(16) uint8_t buffer_7[16] = { + 0xF7, 0xCA, 0x39, 0x47, 0x68, 0x73, 0x62, 0x63, + 0x40, 0xE6, 0x7F, 0x19, 0xAE, 0x44, 0x5F, 0x56, +}; + +// buffer_8 is located in the arena + +// buffer_9 is located in the arena + +// buffer_10 is located in the arena + +constexpr size_t kSubgraph0Inputs[1] = {0}; + +constexpr size_t kSubgraph0Outputs[1] = {9}; + +struct Node0_0 { + struct Inputs { + int size = 3; + int data[3] = {0, 6, 5}; + } inputs; + struct Outputs { + int size = 1; + int data[1] = {7}; + } outputs; + // No intermediates + TfLiteFullyConnectedParams builtin_data = { + .activation = kTfLiteActRelu, + .weights_format = kTfLiteFullyConnectedWeightsFormatDefault, + .keep_num_dims = false, + .asymmetric_quantize_inputs = false, + .quantized_bias_type = kTfLiteNoType}; +} node_0_0; + +struct Node0_1 { + struct Inputs { + int size = 3; + int data[3] = {7, 4, 3}; + } inputs; + struct Outputs { + int size = 1; + int data[1] = {8}; + } outputs; + // No intermediates + TfLiteFullyConnectedParams builtin_data = { + .activation = kTfLiteActRelu, + .weights_format = kTfLiteFullyConnectedWeightsFormatDefault, + .keep_num_dims = false, + .asymmetric_quantize_inputs = false, + .quantized_bias_type = kTfLiteNoType}; +} node_0_1; + +struct Node0_2 { + struct Inputs { + int size = 3; + int data[3] = {8, 2, 1}; + } inputs; + struct Outputs { + int size = 1; + int data[1] = {9}; + } outputs; + // No intermediates + TfLiteFullyConnectedParams builtin_data = { + .activation = kTfLiteActNone, + .weights_format = kTfLiteFullyConnectedWeightsFormatDefault, + .keep_num_dims = false, + .asymmetric_quantize_inputs = false, + .quantized_bias_type = kTfLiteNoType}; +} node_0_2; + +struct Tensor0_0Dims { + int size = 2; + int data[2] = {1, 1}; +} tensor0_0_dims; + +struct Tensor0_1Dims { + int size = 1; + int data[1] = {1}; +} tensor0_1_dims; + +struct Tensor0_2Dims { + int size = 2; + int data[2] = {1, 16}; +} tensor0_2_dims; + +struct Tensor0_3Dims { + int size = 1; + int data[1] = {16}; +} tensor0_3_dims; + +struct Tensor0_4Dims { + int size = 2; + int data[2] = {16, 16}; +} tensor0_4_dims; + +struct Tensor0_5Dims { + int size = 1; + int data[1] = {16}; +} tensor0_5_dims; + +struct Tensor0_6Dims { + int size = 2; + int data[2] = {16, 1}; +} tensor0_6_dims; + +struct Tensor0_7Dims { + int size = 2; + int data[2] = {1, 16}; +} tensor0_7_dims; + +struct Tensor0_8Dims { + int size = 2; + int data[2] = {1, 16}; +} tensor0_8_dims; + +struct Tensor0_9Dims { + int size = 2; + int data[2] = {1, 1}; +} tensor0_9_dims; + +TfLiteStatus InvokeSubgraph0(TfLiteContext* context, + tflite::Span nodes) { + TFLITE_DCHECK(nodes.size() == 3); + TF_LITE_ENSURE_OK( + context, op_table[OpCode::kFullyConnected].invoke(context, &nodes[0])); + TF_LITE_ENSURE_OK( + context, op_table[OpCode::kFullyConnected].invoke(context, &nodes[1])); + TF_LITE_ENSURE_OK( + context, op_table[OpCode::kFullyConnected].invoke(context, &nodes[2])); + + return kTfLiteOk; +} + +} // namespace + +Model::Model() + : subgraphs_{ + {.inputs = {&kSubgraph0Inputs[0], 1}, + .outputs = {&kSubgraph0Outputs[0], 1}, + .nodes = {&subgraph0_nodes_[0], 3}, + .tensors = {&subgraph0_tensors_[0], 10}, + .invoke = &InvokeSubgraph0}, + }, + micro_context_{&context_, {&subgraphs_[0], 1}} { + context_.impl_ = static_cast(µ_context_); + context_.ReportError = nullptr; + context_.GetTensor = nullptr; + context_.GetEvalTensor = tflite::MicroContextGetEvalTensor; + context_.profiler = nullptr; + context_.GetExternalContext = nullptr; + context_.GetScratchBuffer = nullptr; + + subgraph0_nodes_[0] = TfLiteNode{ + .inputs = reinterpret_cast(&node_0_0.inputs), + .outputs = reinterpret_cast(&node_0_0.outputs), + .intermediates = nullptr, + .user_data = nullptr, + .builtin_data = static_cast(&node_0_0.builtin_data), + .custom_initial_data = nullptr, + .custom_initial_data_size = 0}; + subgraph0_nodes_[1] = TfLiteNode{ + .inputs = reinterpret_cast(&node_0_1.inputs), + .outputs = reinterpret_cast(&node_0_1.outputs), + .intermediates = nullptr, + .user_data = nullptr, + .builtin_data = static_cast(&node_0_1.builtin_data), + .custom_initial_data = nullptr, + .custom_initial_data_size = 0}; + subgraph0_nodes_[2] = TfLiteNode{ + .inputs = reinterpret_cast(&node_0_2.inputs), + .outputs = reinterpret_cast(&node_0_2.outputs), + .intermediates = nullptr, + .user_data = nullptr, + .builtin_data = static_cast(&node_0_2.builtin_data), + .custom_initial_data = nullptr, + .custom_initial_data_size = 0}; + + subgraph0_tensors_[0] = TfLiteEvalTensor{ + .data = {.data = static_cast(nullptr /* buffer_1 */)}, + .dims = reinterpret_cast(&tensor0_0_dims), + .type = kTfLiteInt8}; + subgraph0_tensors_[1] = TfLiteEvalTensor{ + .data = {.data = static_cast(&buffer_2)}, + .dims = reinterpret_cast(&tensor0_1_dims), + .type = kTfLiteInt32}; + subgraph0_tensors_[2] = TfLiteEvalTensor{ + .data = {.data = static_cast(&buffer_3)}, + .dims = reinterpret_cast(&tensor0_2_dims), + .type = kTfLiteInt8}; + subgraph0_tensors_[3] = TfLiteEvalTensor{ + .data = {.data = static_cast(&buffer_4)}, + .dims = reinterpret_cast(&tensor0_3_dims), + .type = kTfLiteInt32}; + subgraph0_tensors_[4] = TfLiteEvalTensor{ + .data = {.data = static_cast(&buffer_5)}, + .dims = reinterpret_cast(&tensor0_4_dims), + .type = kTfLiteInt8}; + subgraph0_tensors_[5] = TfLiteEvalTensor{ + .data = {.data = static_cast(&buffer_6)}, + .dims = reinterpret_cast(&tensor0_5_dims), + .type = kTfLiteInt32}; + subgraph0_tensors_[6] = TfLiteEvalTensor{ + .data = {.data = static_cast(&buffer_7)}, + .dims = reinterpret_cast(&tensor0_6_dims), + .type = kTfLiteInt8}; + subgraph0_tensors_[7] = TfLiteEvalTensor{ + .data = {.data = static_cast(nullptr /* buffer_8 */)}, + .dims = reinterpret_cast(&tensor0_7_dims), + .type = kTfLiteInt8}; + subgraph0_tensors_[8] = TfLiteEvalTensor{ + .data = {.data = static_cast(nullptr /* buffer_9 */)}, + .dims = reinterpret_cast(&tensor0_8_dims), + .type = kTfLiteInt8}; + subgraph0_tensors_[9] = TfLiteEvalTensor{ + .data = {.data = static_cast(nullptr /* buffer_10 */)}, + .dims = reinterpret_cast(&tensor0_9_dims), + .type = kTfLiteInt8}; +} + +TfLiteStatus Model::Invoke() { return micro_context_.InvokeSubgraph(0); } + +} // namespace hello_world_model diff --git a/codegen/examples/hello_world/hello_world_model.h b/codegen/examples/hello_world/hello_world_model.h new file mode 100644 index 00000000000..80cfe2c3221 --- /dev/null +++ b/codegen/examples/hello_world/hello_world_model.h @@ -0,0 +1,40 @@ +/* Copyright 2023 The TensorFlow Authors. All Rights Reserved. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +==============================================================================*/ + +/* AUTOMATICALLY GENERATED DO NOT MODIFY */ + +#pragma once + +#include "codegen/runtime/micro_codegen_context.h" +#include "tensorflow/lite/c/c_api_types.h" +#include "tensorflow/lite/c/common.h" + +namespace hello_world_model { + +class Model { + public: + Model(); + + TfLiteStatus Invoke(); + + private: + TfLiteContext context_ = {}; + tflite::Subgraph subgraphs_[1]; + tflite::MicroCodegenContext micro_context_; + TfLiteNode subgraph0_nodes_[3] = {}; + TfLiteEvalTensor subgraph0_tensors_[10] = {}; +}; + +} // namespace hello_world_model diff --git a/codegen/examples/hello_world/update_example_source.sh b/codegen/examples/hello_world/update_example_source.sh new file mode 100755 index 00000000000..a381fed58f1 --- /dev/null +++ b/codegen/examples/hello_world/update_example_source.sh @@ -0,0 +1,32 @@ +#!/usr/bin/env bash +# Copyright 2023 The TensorFlow Authors. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# ============================================================================== + +# +# Syncs the generated example source code in the repository. +# + +set -e + +SCRIPT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)" +ROOT_DIR=${SCRIPT_DIR}/../../.. +cd "${ROOT_DIR}" + +bazel build //codegen/examples/hello_world:hello_world_model +cp ./bazel-bin/codegen/examples/hello_world/hello_world_model.h ${SCRIPT_DIR} +cp ./bazel-bin/codegen/examples/hello_world/hello_world_model.cc ${SCRIPT_DIR} +clang-format --style=google -i \ + ${SCRIPT_DIR}/hello_world_model.h \ + ${SCRIPT_DIR}/hello_world_model.cc diff --git a/codegen/graph.py b/codegen/graph.py new file mode 100644 index 00000000000..ad5a700c696 --- /dev/null +++ b/codegen/graph.py @@ -0,0 +1,262 @@ +# Copyright 2023 The TensorFlow Authors. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# ============================================================================== +""" Provides object representation for the model that is conducive to code + generation using templates. """ + +from typing import Dict, List, Optional, Sequence +import string +import textwrap + +from tflite_micro.codegen.operators import factory +from tflite_micro.codegen.operators import operator +from tflite_micro.codegen import tensor +from tflite_micro.codegen import utils +from tflite_micro.tensorflow.lite.python import schema_py_generated as schema_fb +from tflite_micro.tensorflow.lite.tools import visualize + + +class OpCode(object): + + def __init__(self, op_code: schema_fb.OperatorCodeT): + self._op_code: schema_fb.OperatorCodeT = op_code + + @property + def name(self) -> str: + if self._op_code.customCode: + return self._op_code.customCode + return visualize.BuiltinCodeToName(self._op_code.builtinCode) + + @property + def register_function(self) -> str: + return "tflite::RegisterInference_{}".format(self.name) + + @property + def enum_name(self) -> str: + return "k{}".format(utils.to_pascal_case(self.name)) + + @property + def full_enum_name(self) -> str: + return "OpCode::" + self.enum_name + + +class Subgraph(object): + + def __init__(self, model: schema_fb.ModelT, buffers: Sequence[tensor.Buffer], + subgraph_idx: int, subgraph: schema_fb.SubGraphT): + self._subgraph_idx: int = subgraph_idx + self._subgraph: schema_fb.SubGraphT = subgraph + self._op_codes: List[OpCode] = [ + OpCode(op_code) for op_code in model.operatorCodes + ] + self._tensors: List[Tensor] = [] + for t in subgraph.tensors: + self._tensors.append(tensor.Tensor(buffers[t.buffer], t)) + + self._operators: List[operator.Operator] = [] + for op in subgraph.operators: + op_code = model.operatorCodes[op.opcodeIndex] + self._operators.append(factory.create_operator(op_code, op)) + + @property + def index(self) -> int: + return self._subgraph_idx + + @property + def inputs(self) -> Sequence[int]: + return self._subgraph.inputs + + @property + def outputs(self) -> Sequence[int]: + return self._subgraph.outputs + + @property + def operators(self) -> Sequence[operator.Operator]: + return self._operators + + @property + def tensors(self) -> Sequence[tensor.Tensor]: + return self._tensors + + @property + def needs_zero_length_int_array(self) -> bool: + return any(t.needs_zero_length_int_array for t in self.tensors) + + @property + def invoke_fn_name(self) -> str: + return f"InvokeSubgraph{self.index}" + + @property + def inputs_array_name(self) -> str: + return f"kSubgraph{self.index}Inputs" + + @property + def outputs_array_name(self) -> str: + return f"kSubgraph{self.index}Outputs" + + @property + def nodes_array(self) -> str: + return f"subgraph{self.index}_nodes_" + + def nodes_element(self, operator_idx: int) -> str: + return self.nodes_array + f"[{operator_idx}]" + + def node_data_type(self, operator_idx: int) -> str: + return f"Node{self.index}_{operator_idx}" + + def node_data_name(self, operator_idx: int) -> str: + return f"node_{self.index}_{operator_idx}" + + def generate_c_node_data(self, indent: str) -> str: + node_data_strs: List[str] = [] + for op_idx, op in enumerate(self.operators): + type_name = self.node_data_type(op_idx) + node_name = self.node_data_name(op_idx) + node_data_strs.append(op.generate_c_node_data(type_name, node_name)) + return textwrap.indent("\n\n".join(node_data_strs), indent) + + def generate_c_node_init(self, indent: str) -> str: + node_init_strs: List[str] = [] + for op_idx, op in enumerate(self.operators): + tflite_node_name = self.nodes_element(op_idx) + node_data_name = self.node_data_name(op_idx) + node_init_strs.append( + op.generate_c_node_init(tflite_node_name, node_data_name)) + return textwrap.indent("\n".join(node_init_strs), indent) + + def generate_c_invoke(self, indent: str) -> str: + function_template = string.Template( + "TfLiteStatus ${function_name}(TfLiteContext* context,\n" + " tflite::Span nodes) {\n" + " TFLITE_DCHECK(nodes.size() == ${num_nodes});\n" + "${body}\n" + " return kTfLiteOk;\n" + "}") + + body_template = string.Template( + " TF_LITE_ENSURE_OK(\n" + " context, op_table[${op_code}].invoke(context, &${node}));\n") + invoke_strs: List[str] = [] + for op_idx, op in enumerate(self.operators): + invoke_strs.append( + body_template.substitute( + op_code=self._op_codes[op.op_code_index].full_enum_name, + node=f"nodes[{op_idx}]")) + + invoke = function_template.substitute(function_name=self.invoke_fn_name, + num_nodes=len(self.operators), + body="".join(invoke_strs)) + return textwrap.indent(invoke, indent) + + def generate_c_input_array(self, indent: str) -> str: + return utils.generate_c_int_array(indent, "size_t", self.inputs_array_name, + self.inputs) + + def generate_c_output_array(self, indent: str) -> str: + return utils.generate_c_int_array(indent, "size_t", + self.outputs_array_name, self.outputs) + + def generate_c_subgraph_init(self, indent: str) -> str: + init_template = string.Template( + "{.inputs = {&${input_array}[0], ${input_size}},\n" + " .outputs = {&${output_array}[0], ${output_size}},\n" + " .nodes = {&${node_array}[0], ${node_size}},\n" + " .tensors = {&${tensor_array}[0], ${tensor_size}},\n" + " .invoke = &${invoke}},") + return textwrap.indent( + init_template.substitute(input_array=self.inputs_array_name, + input_size=len(self.inputs), + output_array=self.outputs_array_name, + output_size=len(self.outputs), + node_array=self.nodes_array, + node_size=len(self.operators), + tensor_array=self.tensors_array, + tensor_size=len(self.tensors), + invoke=self.invoke_fn_name), indent) + + @property + def tensors_array(self) -> str: + return f"subgraph{self.index}_tensors_" + + def tensors_element(self, tensor_idx: int) -> str: + return self.tensors_array + f"[{tensor_idx}]" + + def tensor_data_type(self, tensor_idx: int) -> str: + return f"Tensor{self.index}_{tensor_idx}" + + def tensor_data_name(self, tensor_idx: int) -> str: + return f"tensor{self.index}_{tensor_idx}" + + def generate_c_tensor_data(self, indent: str) -> str: + tensor_dims_strs: List[str] = [] + for tensor_idx, tensor in enumerate(self.tensors): + type_name = self.tensor_data_type(tensor_idx) + tensor_name = self.tensor_data_name(tensor_idx) + tensor_dims_strs.append( + tensor.generate_c_tensor_dims(type_name, tensor_name)) + return textwrap.indent("\n\n".join(tensor_dims_strs), indent) + + def generate_c_tensor_init(self, indent: str) -> str: + tensor_init_strs: List[str] = [] + for tensor_idx, tensor in enumerate(self.tensors): + tflite_tensor_name = self.tensors_element(tensor_idx) + tensor_data_name = self.tensor_data_name(tensor_idx) + tensor_init_strs.append( + tensor.generate_c_tensor_init(tflite_tensor_name, tensor_data_name)) + return textwrap.indent("\n".join(tensor_init_strs), indent) + + +class Graph(object): + + def __init__(self, model: schema_fb.ModelT): + buffers: List[tensor.Buffer] = [ + tensor.Buffer("buffer_{}".format(idx), buffer) + for idx, buffer in enumerate(model.buffers) + ] + self._subgraphs: List[SubGraph] = [ + Subgraph(model, buffers, idx, subgraph) + for idx, subgraph in enumerate(model.subgraphs) + ] + + @property + def subgraphs(self) -> Sequence[Subgraph]: + return self._subgraphs + + @property + def buffers(self) -> Sequence[tensor.Buffer]: + buffers: List[tensor.Buffer] = [] + for subgraph in self.subgraphs: + for t in subgraph.tensors: + buffers.append(t.buffer) + return buffers + + @property + def needs_zero_length_int_array(self) -> bool: + return any(subgraph.needs_zero_length_int_array + for subgraph in self.subgraphs) + + +class OpCodeTable(object): + + def __init__(self, models: Sequence[schema_fb.ModelT]): + op_codes = [] + for model in models: + for op_code in model.operatorCodes: + op_codes.append(OpCode(op_code)) + + self._op_codes: List([OpCode]) = list(set(op_codes)) + + @property + def op_codes(self) -> Sequence[OpCode]: + return self._op_codes diff --git a/codegen/inference_generator.py b/codegen/inference_generator.py new file mode 100644 index 00000000000..fe351f36550 --- /dev/null +++ b/codegen/inference_generator.py @@ -0,0 +1,68 @@ +# Copyright 2023 The TensorFlow Authors. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# ============================================================================== +""" Generates C/C++ inference source code. """ + +import pathlib + +from mako import template +from typing import TypedDict + +from tflite_micro.codegen import graph + +_TEMPLATE_DIR = pathlib.Path(__file__).parent / 'templates' +_HEADER_TEMPLATE = _TEMPLATE_DIR / 'inference.h.mako' +_SOURCE_TEMPLATE = _TEMPLATE_DIR / 'inference.cc.mako' + + +class ModelData(TypedDict): + header_file: str + model_name: str + op_code_table: graph.OpCodeTable + graph: graph.Graph + + +def _render(output_file: pathlib.Path, template_file: pathlib.Path, + model_data: ModelData) -> None: + print("Generating {}".format(output_file)) + t = template.Template(filename=str(template_file)) + with output_file.open('w+') as file: + file.write(t.render(**model_data)) + + +def _generate_header(header_path: pathlib.Path, model_data: ModelData) -> None: + _render(header_path, _HEADER_TEMPLATE, model_data) + + +def _generate_source(source_path: pathlib.Path, model_data: ModelData) -> None: + _render(source_path, _SOURCE_TEMPLATE, model_data) + + +def generate(output_dir: str, output_name: str, + op_code_table: graph.OpCodeTable, graph: graph.Graph) -> None: + """ Generate C/C++ inference code. """ + header_file = f"{output_name}.h" + model_data: ModelData = { + 'header_file': header_file, + 'model_name': output_name, + 'op_code_table': op_code_table, + 'graph': graph, + } + + # Ensure output directory exists + output_path = pathlib.Path(output_dir) + output_path.mkdir(parents=True, exist_ok=True) + + _generate_header(output_path / header_file, model_data) + _generate_source(output_path / f"{output_name}.cc", model_data) diff --git a/codegen/operators/BUILD b/codegen/operators/BUILD new file mode 100644 index 00000000000..3a7ae29c225 --- /dev/null +++ b/codegen/operators/BUILD @@ -0,0 +1,52 @@ +load("@rules_python//python:defs.bzl", "py_library") + +package( + default_visibility = ["//:__subpackages__"], + licenses = ["notice"], +) + +py_library( + name = "constants", + srcs = [ + "constants.py", + ], + deps = [ + "//tensorflow/lite/python:schema_py", + ], +) + +py_library( + name = "factory", + srcs = [ + "factory.py", + ], + deps = [ + ":fully_connected", + ":operator", + "//tensorflow/lite/python:schema_py", + ], +) + +py_library( + name = "fully_connected", + srcs = [ + "fully_connected.py", + ], + deps = [ + ":constants", + ":operator", + "//codegen:utils", + "//tensorflow/lite/python:schema_py", + ], +) + +py_library( + name = "operator", + srcs = [ + "operator.py", + ], + deps = [ + "//codegen:utils", + "//tensorflow/lite/python:schema_py", + ], +) diff --git a/codegen/operators/constants.py b/codegen/operators/constants.py new file mode 100644 index 00000000000..b9ff17a1209 --- /dev/null +++ b/codegen/operators/constants.py @@ -0,0 +1,50 @@ +# Copyright 2023 The TensorFlow Authors. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# ============================================================================== +""" Operator Constants """ + +from typing import Dict + +from tflite_micro.tensorflow.lite.python import schema_py_generated as schema_fb + +ACTIVATION_FUNCS: Dict[int, str] = { + schema_fb.ActivationFunctionType.NONE: "kTfLiteActNone", + schema_fb.ActivationFunctionType.RELU: "kTfLiteActRelu", + schema_fb.ActivationFunctionType.RELU_N1_TO_1: "kTfLiteActReluN1To1", + schema_fb.ActivationFunctionType.RELU6: "kTfLiteActRelu6", + schema_fb.ActivationFunctionType.TANH: "kTfLiteActTanh", + schema_fb.ActivationFunctionType.SIGN_BIT: "kTfLiteActSignBit", +} + +TFLITE_TYPE: Dict[int, str] = { + 0: "kTfLiteNoType", + 1: "kTfLiteFloat32", + 2: "kTfLiteInt32", + 3: "kTfLiteUInt8", + 4: "kTfLiteInt64", + 5: "kTfLiteString", + 6: "kTfLiteBool", + 7: "kTfLiteInt16", + 8: "kTfLiteComplex64", + 9: "kTfLiteInt8", + 10: "kTfLiteFloat16", + 11: "kTfLiteFloat64", + 12: "kTfLiteComplex128", + 13: "kTfLiteUInt64", + 14: "kTfLiteResource", + 15: "kTfLiteVariant", + 16: "kTfLiteUInt32", + 17: "kTfLiteUInt16", + 18: "kTfLiteInt4", +} diff --git a/codegen/operators/factory.py b/codegen/operators/factory.py new file mode 100644 index 00000000000..f62cacb94ed --- /dev/null +++ b/codegen/operators/factory.py @@ -0,0 +1,28 @@ +# Copyright 2023 The TensorFlow Authors. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# ============================================================================== +""" A factory function for creating operators """ + +from tflite_micro.codegen.operators import fully_connected +from tflite_micro.codegen.operators import operator +from tflite_micro.tensorflow.lite.python import schema_py_generated as schema_fb + +_BUILTIN_OPERATORS: dict[int, operator.Operator.__class__] = { + schema_fb.BuiltinOperator.FULLY_CONNECTED: fully_connected.FullyConnected, +} + + +def create_operator(op_code: schema_fb.OperatorCodeT, op: schema_fb.Operator): + operator_class = _BUILTIN_OPERATORS[op_code.builtinCode] + return operator_class(op) diff --git a/codegen/operators/fully_connected.py b/codegen/operators/fully_connected.py new file mode 100644 index 00000000000..f756bef1e1b --- /dev/null +++ b/codegen/operators/fully_connected.py @@ -0,0 +1,56 @@ +# Copyright 2023 The TensorFlow Authors. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# ============================================================================== +""" FullyConnected operator """ + +from typing import Dict +import string + +from tflite_micro.codegen.operators import constants +from tflite_micro.codegen.operators import operator +from tflite_micro.codegen import utils +from tflite_micro.tensorflow.lite.python import schema_py_generated as schema_fb + +_WEIGHTS_FORMATS: Dict[int, str] = { + schema_fb.FullyConnectedOptionsWeightsFormat.DEFAULT: + "kTfLiteFullyConnectedWeightsFormatDefault", + schema_fb.FullyConnectedOptionsWeightsFormat.SHUFFLED4x16INT8: + "kTfLiteFullyConnectedWeightsFormatShuffled4x16Int8", +} + + +class FullyConnected(operator.Operator): + + def __init__(self, op: schema_fb.OperatorT): + assert op.builtinOptionsType == schema_fb.BuiltinOptions.FullyConnectedOptions + super(FullyConnected, self).__init__(op) + self._builtin_options: schema_fb.FullyConnectedOptionsT = op.builtinOptions + + def generate_c_builtin_data(self) -> str: + builtin_template = string.Template( + "TfLiteFullyConnectedParams builtin_data = {\n" + " .activation = ${activation},\n" + " .weights_format = ${weights_format},\n" + " .keep_num_dims = ${keep_num_dims},\n" + " .asymmetric_quantize_inputs = ${asymmetric_quantize_inputs},\n" + " .quantized_bias_type = ${quantized_bias_type}};") + return builtin_template.substitute( + activation=constants.ACTIVATION_FUNCS[ + self._builtin_options.fusedActivationFunction], + weights_format=_WEIGHTS_FORMATS[self._builtin_options.weightsFormat], + keep_num_dims=utils.bool_to_c_str(self._builtin_options.keepNumDims), + asymmetric_quantize_inputs=utils.bool_to_c_str( + self._builtin_options.asymmetricQuantizeInputs), + quantized_bias_type=constants.TFLITE_TYPE[ + self._builtin_options.quantizedBiasType]) diff --git a/codegen/operators/operator.py b/codegen/operators/operator.py new file mode 100644 index 00000000000..2879d371664 --- /dev/null +++ b/codegen/operators/operator.py @@ -0,0 +1,92 @@ +# Copyright 2023 The TensorFlow Authors. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# ============================================================================== +""" Provides object representation for the model that is conducive to code + generation using templates. """ + +import abc +from typing import Optional +import string +import textwrap + +from tflite_micro.codegen import utils +from tflite_micro.tensorflow.lite.python import schema_py_generated as schema_fb + + +class Operator(abc.ABC): + + def __init__(self, operator: schema_fb.OperatorT): + self._operator: schema_fb.OperatorT = operator + self._inputs: utils.IntArray = utils.IntArray(self._operator.inputs) + self._outputs: utils.IntArray = utils.IntArray(self._operator.outputs) + self._intermediates: Optional[utils.IntArray] = utils.IntArray( + self._operator.intermediates) if self._operator.intermediates else None + + def generate_c_node_data(self, type_name: str, node_name: str) -> str: + struct_template = string.Template("struct ${type_name} {\n" + "${body}" + "} ${node_name};") + body_template = string.Template("${inputs}\n" + "${outputs}\n" + "${intermediates}\n" + "${builtin_data}\n") + if self._intermediates: + intermediates = self._intermediates.generate_c_struct( + "Intermediates", "intermediates") + else: + intermediates = "// No intermediates" + + body = body_template.substitute( + inputs=self._inputs.generate_c_struct("Inputs", "inputs"), + outputs=self._outputs.generate_c_struct("Outputs", "outputs"), + intermediates=intermediates, + builtin_data=self.generate_c_builtin_data()) + + return struct_template.substitute(type_name=type_name, + node_name=node_name, + body=textwrap.indent(body, " ")) + + def generate_c_node_init(self, tflite_node_name: str, + node_data_name: str) -> str: + init_template = string.Template( + "${tflite_node_name} = TfLiteNode{\n" + " .inputs =" + " reinterpret_cast(&${node_data_name}.inputs),\n" + " .outputs =" + " reinterpret_cast(&${node_data_name}.outputs),\n" + " .intermediates = ${intermediates},\n" + " .user_data = nullptr,\n" + " .builtin_data =" + " static_cast(&${node_data_name}.builtin_data),\n" + " .custom_initial_data = nullptr,\n" + " .custom_initial_data_size = 0};") + + if self._intermediates: + intermediates = ( + "reinterpret_cast(&{}.intermediates)".format( + self._intermediates)) + else: + intermediates = "nullptr" + + return init_template.substitute(tflite_node_name=tflite_node_name, + node_data_name=node_data_name, + intermediates=intermediates) + + @property + def op_code_index(self) -> int: + return self._operator.opcodeIndex + + @abc.abstractmethod + def generate_c_builtin_data(self) -> str: + raise NotImplementedError(f"Generating builtin data in {self.__name__}") diff --git a/codegen/runtime/BUILD b/codegen/runtime/BUILD new file mode 100644 index 00000000000..a1cb6c160bf --- /dev/null +++ b/codegen/runtime/BUILD @@ -0,0 +1,18 @@ +load("//tensorflow/lite/micro:build_def.bzl", "micro_copts") + +package(default_visibility = ["//visibility:public"]) + +cc_library( + name = "micro_codegen_context", + srcs = ["micro_codegen_context.cc"], + hdrs = ["micro_codegen_context.h"], + copts = micro_copts(), + deps = [ + "//tensorflow/lite/c:common", + "//tensorflow/lite/kernels:op_macros", + "//tensorflow/lite/kernels/internal:compatibility", + "//tensorflow/lite/micro:micro_context", + "//tensorflow/lite/micro:micro_graph", + "//tensorflow/lite/micro:micro_log", + ], +) diff --git a/codegen/runtime/micro_codegen_context.cc b/codegen/runtime/micro_codegen_context.cc new file mode 100644 index 00000000000..858c823c1f3 --- /dev/null +++ b/codegen/runtime/micro_codegen_context.cc @@ -0,0 +1,139 @@ +/* Copyright 2023 The TensorFlow Authors. All Rights Reserved. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +==============================================================================*/ + +#include "codegen/runtime/micro_codegen_context.h" + +#include "tensorflow/lite/c/common.h" +#include "tensorflow/lite/kernels/internal/compatibility.h" +#include "tensorflow/lite/kernels/op_macros.h" +#include "tensorflow/lite/micro/micro_log.h" + +namespace tflite { + +MicroCodegenContext::MicroCodegenContext(TfLiteContext* context, + Span subgraphs) + : context_(context), subgraphs_(subgraphs) {} + +void* MicroCodegenContext::GetScratchBuffer(int buffer_idx) { + // TODO(rjascani): Implement scratch buffers + return nullptr; +} + +TfLiteEvalTensor* MicroCodegenContext::GetEvalTensor(int tensor_idx) { + TFLITE_DCHECK(static_cast(tensor_idx) < + subgraphs_[current_subgraph_idx_].tensors.size()); + return &subgraphs_[current_subgraph_idx_].tensors[tensor_idx]; +} + +TfLiteStatus MicroCodegenContext::set_external_context( + void* external_context_payload) { + if (external_context_payload == nullptr || + external_context_payload_ != nullptr) { + MicroPrintf( + "Attempting to set external context to %x but it was %x already", + external_context_payload, external_context_payload_); + return kTfLiteError; + } + + external_context_payload_ = external_context_payload; + return kTfLiteOk; +} + +void* MicroCodegenContext::external_context() { + return external_context_payload_; +} + +MicroGraph& MicroCodegenContext::graph() { return *this; } + +void* MicroCodegenContext::AllocatePersistentBuffer(size_t) { + // Not allowed at Eval + TFLITE_ABORT; + return nullptr; +} + +TfLiteStatus MicroCodegenContext::RequestScratchBufferInArena(size_t, int*) { + // Not allowed at Eval + TFLITE_ABORT; + return kTfLiteError; +} + +TfLiteTensor* MicroCodegenContext::AllocateTempTfLiteTensor(int) { + // Not allowed at Eval + TFLITE_ABORT; + return nullptr; +} + +void MicroCodegenContext::DeallocateTempTfLiteTensor(TfLiteTensor*) { + // Not allowed at Eval + TFLITE_ABORT; +} + +uint8_t* MicroCodegenContext::AllocateTempBuffer(size_t, size_t) { + // Not allowed at Eval + TFLITE_ABORT; + return nullptr; +} + +void MicroCodegenContext::DeallocateTempBuffer(uint8_t*) { + // Not allowed at Eval + TFLITE_ABORT; +} + +TfLiteStatus MicroCodegenContext::InvokeSubgraph(int subgraph_idx) { + TF_LITE_ENSURE(context_, + static_cast(subgraph_idx) < subgraphs_.size()); + size_t previous_subgraph_idx = current_subgraph_idx_; + current_subgraph_idx_ = subgraph_idx; + TfLiteStatus status = + subgraphs_[subgraph_idx].invoke(context_, subgraphs_[subgraph_idx].nodes); + current_subgraph_idx_ = previous_subgraph_idx; + return status; +} + +size_t MicroCodegenContext::NumSubgraphInputs(int subgraph_idx) { + TFLITE_DCHECK(static_cast(subgraph_idx) < subgraphs_.size()); + return subgraphs_[subgraph_idx].inputs.size(); +} + +TfLiteEvalTensor* MicroCodegenContext::GetSubgraphInput(int subgraph_idx, + int input_idx) { + TFLITE_DCHECK(static_cast(subgraph_idx) < subgraphs_.size()); + TFLITE_DCHECK(static_cast(input_idx) < + subgraphs_[subgraph_idx].inputs.size()); + const size_t tensor_idx = subgraphs_[subgraph_idx].inputs[input_idx]; + return &subgraphs_[subgraph_idx].tensors[tensor_idx]; +} + +size_t MicroCodegenContext::NumSubgraphOutputs(int subgraph_idx) { + TFLITE_DCHECK(static_cast(subgraph_idx) < subgraphs_.size()); + return subgraphs_[subgraph_idx].outputs.size(); +} + +TfLiteEvalTensor* MicroCodegenContext::GetSubgraphOutput(int subgraph_idx, + int output_idx) { + TFLITE_DCHECK(static_cast(subgraph_idx) < subgraphs_.size()); + TFLITE_DCHECK(static_cast(output_idx) < + subgraphs_[subgraph_idx].outputs.size()); + const size_t tensor_idx = subgraphs_[subgraph_idx].outputs[output_idx]; + return &subgraphs_[subgraph_idx].tensors[tensor_idx]; +} + +int MicroCodegenContext::NumSubgraphs() { return subgraphs_.size(); } + +MicroResourceVariables* MicroCodegenContext::GetResourceVariables() { + return nullptr; +} + +} // namespace tflite diff --git a/codegen/runtime/micro_codegen_context.h b/codegen/runtime/micro_codegen_context.h new file mode 100644 index 00000000000..ca01a63bce4 --- /dev/null +++ b/codegen/runtime/micro_codegen_context.h @@ -0,0 +1,90 @@ +/* Copyright 2023 The TensorFlow Authors. All Rights Reserved. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +==============================================================================*/ + +#ifndef CODEGEN_RUNTIME_MICRO_CODEGEN_CONTEXT_H_ +#define CODEGEN_RUNTIME_MICRO_CODEGEN_CONTEXT_H_ + +#include "tensorflow/lite/c/common.h" +#include "tensorflow/lite/micro/micro_context.h" +#include "tensorflow/lite/micro/micro_graph.h" + +namespace tflite { + +// A poor man's std::span, we should consider using the Pigweed span instead. +template +class Span { + public: + constexpr Span(T* data, size_t size) noexcept : data_(data), size_(size) {} + + constexpr T& operator[](size_t idx) const noexcept { return *(data_ + idx); } + + constexpr T* data() const noexcept { return data_; } + constexpr size_t size() const noexcept { return size_; } + + private: + T* data_; + size_t size_; +}; + +struct Subgraph { + Span inputs; + Span outputs; + Span nodes; + Span tensors; + TfLiteStatus (*invoke)(TfLiteContext*, Span); +}; + +class MicroCodegenContext : public MicroContext, MicroGraph { + public: + MicroCodegenContext(TfLiteContext* context, Span subgraphs); + + ~MicroCodegenContext() = default; + + // MicroContext API + void* AllocatePersistentBuffer(size_t bytes) override; + TfLiteStatus RequestScratchBufferInArena(size_t bytes, + int* buffer_idx) override; + void* GetScratchBuffer(int buffer_idx) override; + TfLiteTensor* AllocateTempTfLiteTensor(int tensor_idx) override; + void DeallocateTempTfLiteTensor(TfLiteTensor* tensor) override; + uint8_t* AllocateTempBuffer(size_t size, size_t alignment) override; + void DeallocateTempBuffer(uint8_t* buffer) override; + TfLiteEvalTensor* GetEvalTensor(int tensor_idx) override; + TfLiteStatus set_external_context(void* external_context_payload) override; + void* external_context() override; + MicroGraph& graph() override; + + // MicroGraph API + TfLiteStatus InvokeSubgraph(int subgraph_idx) override; + size_t NumSubgraphInputs(int subgraph_idx) override; + TfLiteEvalTensor* GetSubgraphInput(int subgraph_idx, int input_idx) override; + size_t NumSubgraphOutputs(int subgraph_idx) override; + TfLiteEvalTensor* GetSubgraphOutput(int subgraph_idx, + int output_idx) override; + int NumSubgraphs() override; + MicroResourceVariables* GetResourceVariables() override; + + private: + TfLiteContext* context_; + Span subgraphs_; + size_t current_subgraph_idx_ = 0; + void* external_context_payload_ = nullptr; + + TF_LITE_REMOVE_VIRTUAL_DELETE +}; + +} // namespace tflite + +#endif // CODEGEN_RUNTIME_MICRO_CODEGEN_CONTEXT_H_ diff --git a/codegen/templates/inference.cc.mako b/codegen/templates/inference.cc.mako new file mode 100644 index 00000000000..cb6e59ad2d2 --- /dev/null +++ b/codegen/templates/inference.cc.mako @@ -0,0 +1,93 @@ +/* Copyright 2023 The TensorFlow Authors. All Rights Reserved. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +==============================================================================*/ + +/* AUTOMATICALLY GENERATED DO NOT MODIFY */ + +#include "${header_file}" + +#include "codegen/runtime/micro_codegen_context.h" +#include "tensorflow/lite/c/builtin_op_data.h" +#include "tensorflow/lite/c/c_api_types.h" +#include "tensorflow/lite/c/common.h" +#include "tensorflow/lite/kernels/internal/compatibility.h" +#include "tensorflow/lite/micro/kernels/micro_ops.h" +#include "tensorflow/lite/micro/micro_common.h" +#include "tensorflow/lite/micro/micro_context.h" + +namespace ${model_name} { +namespace { +// TODO(rjascani): We should probably split out the OpTable to a separate file +// once we start generating for multiple models. +enum OpCode { +% for op_code in op_code_table.op_codes: + ${op_code.enum_name}, +% endfor + kCount +}; + +TFLMInferenceRegistration op_table[OpCode::kCount] = { +% for op_code in op_code_table.op_codes: + ${op_code.register_function}(), +% endfor +}; + +% for buffer in graph.buffers: +${buffer.generate_c_buffer_array("")} +% endfor +% for subgraph in graph.subgraphs: +${subgraph.generate_c_input_array("")} + +${subgraph.generate_c_output_array("")} + +${subgraph.generate_c_node_data("")} + +${subgraph.generate_c_tensor_data("")} +% endfor +% if graph.needs_zero_length_int_array: + +TfLiteIntArray zero_length_int_array = {}; +% endif + +% for subgraph in graph.subgraphs: +${subgraph.generate_c_invoke("")} +% endfor + +} // namespace + +Model::Model() + : subgraphs_{ +%for subgraph in graph.subgraphs: +${subgraph.generate_c_subgraph_init(" ")} +%endfor + }, + micro_context_{&context_, {&subgraphs_[0], ${len(graph.subgraphs)}}} { + context_.impl_ = static_cast(µ_context_); + context_.ReportError = nullptr; + context_.GetTensor = nullptr; + context_.GetEvalTensor = tflite::MicroContextGetEvalTensor; + context_.profiler = nullptr; + context_.GetExternalContext = nullptr; + context_.GetScratchBuffer = nullptr; + +% for subgraph in graph.subgraphs: +${subgraph.generate_c_node_init(" ")} + +${subgraph.generate_c_tensor_init(" ")} +% endfor +} + +TfLiteStatus Model::Invoke() { return micro_context_.InvokeSubgraph(0); } + +} // namespace ${model_name} diff --git a/codegen/templates/inference.h.mako b/codegen/templates/inference.h.mako new file mode 100644 index 00000000000..5ab64e108c1 --- /dev/null +++ b/codegen/templates/inference.h.mako @@ -0,0 +1,44 @@ +/* Copyright 2023 The TensorFlow Authors. All Rights Reserved. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +==============================================================================*/ + +/* AUTOMATICALLY GENERATED DO NOT MODIFY */ + +#pragma once + +#include "codegen/runtime/micro_codegen_context.h" +#include "tensorflow/lite/c/c_api_types.h" +#include "tensorflow/lite/c/common.h" + +namespace ${model_name} { + +class Model { + public: + Model(); + + TfLiteStatus Invoke(); + + private: + TfLiteContext context_ = {}; + tflite::Subgraph subgraphs_[${len(graph.subgraphs)}]; + tflite::MicroCodegenContext micro_context_; +% for subgraph in graph.subgraphs: + TfLiteNode ${subgraph.nodes_array}[${len(subgraph.operators)}] = {}; +% endfor +% for subgraph in graph.subgraphs: + TfLiteEvalTensor ${subgraph.tensors_array}[${len(subgraph.tensors)}] = {}; +% endfor +}; + +} // namespace ${model_name} diff --git a/codegen/tensor.py b/codegen/tensor.py new file mode 100644 index 00000000000..83870fc0e24 --- /dev/null +++ b/codegen/tensor.py @@ -0,0 +1,127 @@ +# Copyright 2023 The TensorFlow Authors. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# ============================================================================== +""" Tensor class """ + +from typing import Dict, Optional +import string +import textwrap + +from tflite_micro.codegen import utils +from tflite_micro.tensorflow.lite.python import schema_py_generated as schema_fb + +_TENSOR_TYPES: Dict[int, str] = { + schema_fb.TensorType.FLOAT16: "kTfLiteFloat16", + schema_fb.TensorType.FLOAT32: "kTfLiteFloat32", + schema_fb.TensorType.FLOAT64: "kTfLiteFloat64", + schema_fb.TensorType.INT16: "kTfLiteInt16", + schema_fb.TensorType.UINT16: "kTfLiteUInt16", + schema_fb.TensorType.INT32: "kTfLiteInt32", + schema_fb.TensorType.UINT32: "kTfLiteUInt32", + schema_fb.TensorType.UINT8: "kTfLiteUInt8", + schema_fb.TensorType.INT8: "kTfLiteInt8", + schema_fb.TensorType.INT64: "kTfLiteInt64", + schema_fb.TensorType.UINT64: "kTfLiteUInt64", + schema_fb.TensorType.STRING: "kTfLiteString", + schema_fb.TensorType.BOOL: "kTfLiteBool", + schema_fb.TensorType.COMPLEX64: "kTfLiteComplex64", + schema_fb.TensorType.COMPLEX128: "kTfLiteComplex128", + schema_fb.TensorType.RESOURCE: "kTfLiteResource", + schema_fb.TensorType.VARIANT: "kTfLiteVariant", + schema_fb.TensorType.INT4: "kTfLiteInt4", +} + + +class Buffer(object): + """ This buffer could be either a static array or a pointer into the arena """ + + def __init__(self, buffer_name: str, buffer: schema_fb.BufferT): + # TODO(rjascani): Get arena allocation offsets from preprocessor + self._buffer_name = buffer_name + self._buffer = buffer + + @property + def address(self) -> str: + if self._buffer is None or self._buffer.data is None: + # TODO(rjascani): This needs to point into the arena + return f"nullptr /* {self._buffer_name} */" + return f"&{self._buffer_name}" + + def generate_c_buffer_array(self, indent: str) -> str: + if self._buffer is None or self._buffer.data is None: + return f"// {self._buffer_name} is located in the arena\n" + + buffer_template = string.Template( + "alignas(16) uint8_t ${buffer_name}[${size}] = {\n" + "${body}\n" + "};\n") + + byte_strs = ['0x{:02X}'.format(b) for b in self._buffer.data] + + lines = [] + for byte_strs_for_line in utils.split_into_chunks(byte_strs, 12): + bytes_segment = ', '.join(byte_strs_for_line) + lines.append(f' {bytes_segment},') + + return textwrap.indent( + buffer_template.substitute(buffer_name=self._buffer_name, + size=len(self._buffer.data), + body='\n'.join(lines)), indent) + + +class Tensor(object): + + def __init__(self, buffer: Buffer, tensor: schema_fb.TensorT): + self._buffer = buffer + self._tensor: schema_fb.TensorT = tensor + + @property + def buffer_index(self) -> bool: + return self._tensor.buffer + + @property + def buffer(self) -> Buffer: + return self._buffer + + @property + def has_shape(self) -> bool: + return self._tensor.shape is not None + + @property + def needs_zero_length_int_array(self) -> bool: + return not self.has_shape + + def generate_c_tensor_dims(self, type_name: str, tensor_name: str) -> str: + if not self.has_shape: + return f"// No data dims necessary for {tensor_name}" + return utils.IntArray(self._tensor.shape).generate_c_struct( + type_name + "Dims", tensor_name + "_dims") + + def generate_c_tensor_init(self, tflite_tensor_name: str, + tensor_name: str) -> str: + init_template = string.Template( + "${tflite_tensor_name} = TfLiteEvalTensor{\n" + " .data = {.data = static_cast(${data})},\n" + " .dims = ${dims},\n" + " .type = ${tflite_type}};") + dims = "reinterpret_cast(&{})".format( + f"{tensor_name}_dims" if self._tensor. + shape is not None else "zero_length_int_array") + + return init_template.substitute( + tflite_tensor_name=tflite_tensor_name, + tensor_name=tensor_name, + data=self._buffer.address, + dims=dims, + tflite_type=_TENSOR_TYPES[self._tensor.type]) diff --git a/codegen/utils.py b/codegen/utils.py new file mode 100644 index 00000000000..c6c31c8ada2 --- /dev/null +++ b/codegen/utils.py @@ -0,0 +1,101 @@ +# Copyright 2023 The TensorFlow Authors. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# ============================================================================== +""" Utility functions and classes for code generation. """ + +from typing import Any, Generator, Iterable, List, Optional, Sequence, Tuple +import string +import textwrap +import itertools + + +def to_pascal_case(s: str) -> str: + """ Basic function for converting snake_case to PascalCase. """ + # This isn't perfect, as there might be some cases where we want underscores + # to remain if they are used as number separators. + return s.title().replace('_', '') + + +def bool_to_c_str(b: bool) -> str: + """ Convert a python bool value to a C bool string. Ie, False -> 'false' """ + return str(b).lower() + + +def split_into_chunks( + data: Iterable[Any], + chunk_size: int) -> Generator[Tuple[Any, ...], None, None]: + """Splits an iterable into chunks of a given size.""" + data_iterator = iter(data) + while True: + chunk = tuple(itertools.islice(data_iterator, chunk_size)) + if not chunk: + break + yield chunk + + +def generate_c_int_array(indent: str, int_type: str, name: str, + ints: Sequence[int]) -> str: + int_strs = ['{}'.format(i) for i in ints] + + # Try to do it on a single line first + single_line_array_template = string.Template( + "constexpr ${int_type} ${name}[${size}] = {${data}};") + single_line = textwrap.indent( + single_line_array_template.substitute(int_type=int_type, + name=name, + size=len(int_strs), + data=', '.join(int_strs)), indent) + + if len(single_line) < 81: + return single_line + + # Couldn't fit, so split it across multiple lines + multi_line_array_template = string.Template( + "constexpr ${int_type} ${name}[${size}] = {\n" + "${body}\n" + "};\n") + + lines = [] + for int_strs_for_line in split_into_chunks(int_strs, 12): + ints_segment = ', '.join(int_strs_for_line) + lines.append(f' {ints_segment},') + + return textwrap.indent( + multi_line_array_template.substitute(int_type=int_type, + name=name, + size=len(ints), + body='\n'.join(lines)), indent) + + +class IntArray(object): + """ A helper class for generating int arrays that can be used to provide the + backing storage for a TfLiteIntArray. """ + + def __init__(self, data: List[int]): + self._data = data + + def generate_c_struct(self, type_name: str, + variable_name: Optional[str]) -> str: + struct_template = string.Template("struct ${type_name} {\n" + " int size = ${size};\n" + " int data[${size}] = {${data}};\n" + "}") + # TODO(rjascani): Make this pretty print in multi-line chunks + int_strs = ['{}'.format(i) for i in self._data] + c_struct_str = struct_template.substitute(type_name=type_name, + size=len(int_strs), + data=', '.join(int_strs)) + if variable_name: + return c_struct_str + " {};".format(variable_name) + return c_struct_str + ";" diff --git a/debugging_output.md b/debugging_output.md new file mode 100644 index 00000000000..59aa14e0f90 --- /dev/null +++ b/debugging_output.md @@ -0,0 +1,76 @@ +# How to debug invalid output + +The TFLM debugging output tools allow TFLM users to easily debug their models +by providing a tool that will compare the intermediate values(output of each OP/Kernel) +from a model post invoke between the TFLM and TfLite. As well as a way to +compare intermediate values between TFLM x86 implementations and Optimized +Implementations. + +## How to debug TFLM Interpreter output on embedded targets + +First you call a C++ binary that takes a TfLite model and returns a file that has +random inputs and their corresponding output values for each layer of the model +it was provided. + +The second is you provide a TfLite model and file outputted by C++ binary above +to a python script. The script runs TFLM x86 inference comparison to the +expected output. + +## How to debug TFLM Python Interpreter output + +Using a python script mentioned in the section above when only a TfLite model is +provided as input, the script generates random input and compares TFLM vs TfLite +inference outputs for each layer of the model. + +## C++ Expected Layer by Layer Output Tool on TFLite Micro + +This C++ binary allows you to pass in a TfLite model and returns a flatbuffer +file with input and the corresponding output values appended into it that can be +passed into a python debugging tool which can compare those golden values vs +the x86 TFLM reference kernel implementation. + +The C++ Tool/binary will write a debugging file to the path provide in +2nd arg using the tflite_model provided in the 1st arg. + +##### Command bazel/blaze: + +``` + bazel run tensorflow/lite/micro/tools:layer_cc -- \ + + +``` + +##### How to Build using Makefile : + +``` +make -f tensorflow/lite/micro/tools/make/Makefile layer_by_layer_output_tool -j24 +``` + +## Python Layer by Layer Debugging Tool + +The Python Tool/Script can first be used to compare TFLM vs Tflite outputs for +random inputs by only providing a TfLite file. + +#### TfLite vs TFLM command: +``` + bazel run tensorflow/lite/micro/tools:layer_by_layer_debugger -- \ + --input_tflite_file= +``` + +The Python Tool/Script can also be used to compare TFLM's python x86 output +vs expected output provided by the C++ Tool/binary. + +#### TFLM vs Expected Command: +``` + bazel run tensorflow/lite/micro/tools:layer_by_layer_debugger -- \ + --input_tflite_file= \ + --layer_by_layer_data_file= +``` + +#### Optional Flags: + ` --print_dump ` +When this flag is set, it will print the TFLM output for each layer that is +compared. + + ` --rng` +Integer random number seed for generating input data for comparisons against TFLite. (Default: 42) diff --git a/python/py_namespace.bzl b/python/py_namespace.bzl new file mode 100644 index 00000000000..13a7a0f2d59 --- /dev/null +++ b/python/py_namespace.bzl @@ -0,0 +1,141 @@ +"""Repository rule py_workspace(), augmenting @rules_python, for relocating +py_library() and py_package() files underneath a given Python namespace. + +The stock @rules_python py_library() -> py_package() -> py_wheel() BUILD file +workflow packages files at Python package paths set to the source paths of the +files relative to the workspace root. This has a several problems. Firstly, it +implies that files must be located underneath a source directory with the same +name as the desired Python namespace package. ( py_wheel.strip_path_prefixes +can remove path components, but cannot add them.) This is not always feasible +or desirable. + +Secondly, this path naming is incompatible with the PYTHONPATH set by +@rules_python when executing Python programs in the source tree via +py_binary(). PYTHONPATH is set such that imports should begin with the +WORKSPACE name, followed by the path from the workspace root. py_wheel(), +however, packages files such that imports use only the path from the workspace +root. + +For example, the source file: + example/hello.py +is imported by a py_binary() running in the source tree as: + `import workspace_name.example.hello` +but must be imported from within the package created by py_wheel() as: + `import example.hello` + +The end result is that code cannot be written to work both in the source tree +and installed in a Python environment via a package. + +py_namespace() fixes these problems by providing the means to package files +within a Python package namespace without adding a corresponding directory in +the source tree. The BUILD workflow changes to py_libary() -> py_package() -> +**py_namespace()** -> py_wheel(). For example: + +``` + # in example/BUILD + + py_library( + name = "library", + srcs = ["hello.py"], + deps = ..., + ) + + py_package( + name = "package", + deps = [":library"], + ) + + py_namespace( + name = "namespace", + deps = [":package"], + namespace = "foo", + ) + + py_wheel( + .... + deps = [":namespace"], + ) +``` + +In this case, the source file: + example/hello.py +which is imported by a py_binary() running in the source tree as: + `import workspace_name.example.hello` +is imported from the package created by py_wheel() as: + `import foo.example.hello` + +If the namespace and the WORKSPACE name match, the import paths used when +running in the source tree will match the import paths used when installed in +the Python environment. + +Furthermore, the Python package can be given an __init__.py file via the +attribute `init`. The given file is relocated directly under the namespace as +__init__.py, regardless of its path in the source tree. This __init__.py can be +used for, among other things, providing a user-friendly public API: providing +aliases for modules otherwise deeply nested in subpackages due to their +location in the source tree. +""" + +def _relocate_init(ctx): + # Copy the init file directly underneath the namespace directory. + outfile = ctx.actions.declare_file(ctx.attr.namespace + "/__init__.py") + ctx.actions.run_shell( + inputs = [ctx.file.init], + outputs = [outfile], + arguments = [ctx.file.init.path, outfile.path], + command = "cp $1 $2", + ) + return outfile + +def _relocate_deps(ctx): + # Copy all transitive deps underneath the namespace directory. E.g., + # example/hello.py + # becomes: + # namespace/example/hello.py + outfiles = [] + inputs = depset(transitive = [dep[DefaultInfo].files for dep in ctx.attr.deps]) + + for infile in sorted(inputs.to_list()): + outfile = ctx.actions.declare_file(ctx.attr.namespace + "/" + infile.short_path) + ctx.actions.run_shell( + inputs = [infile], + outputs = [outfile], + arguments = [infile.path, outfile.path], + command = "cp $1 $2", + ) + outfiles.append(outfile) + + return outfiles + +def _py_namespace(ctx): + # Copy all input files underneath the namesapce directory and return a + # Provider with the new file locations. + outfiles = [] + + if ctx.file.init: + outfiles.append(_relocate_init(ctx)) + + outfiles.extend(_relocate_deps(ctx)) + + return [ + DefaultInfo(files = depset(outfiles)), + ] + +py_namespace = rule( + implementation = _py_namespace, + attrs = { + "init": attr.label( + doc = "optional file for __init__.py", + allow_single_file = [".py"], + mandatory = False, + ), + "namespace": attr.string( + doc = "name for Python namespace", + mandatory = True, + ), + "deps": attr.label_list( + doc = "list of py_library() and py_package()s to include", + mandatory = True, + ), + }, +) diff --git a/python/tflite_micro/BUILD b/python/tflite_micro/BUILD index 24efc9f7d97..282e9db7be5 100644 --- a/python/tflite_micro/BUILD +++ b/python/tflite_micro/BUILD @@ -1,4 +1,9 @@ +load("@bazel_skylib//rules:common_settings.bzl", "string_flag") +load("@rules_python//python:defs.bzl", "py_library", "py_test") load("@pybind11_bazel//:build_defs.bzl", "pybind_extension") +load("//python:py_namespace.bzl", "py_namespace") +load("//tools:expand_stamp_vars.bzl", "expand_stamp_vars") +load("@rules_python//python:packaging.bzl", "py_package", "py_wheel") load("@tflm_pip_deps//:requirements.bzl", "requirement") load( "//tensorflow/lite/micro:build_def.bzl", @@ -68,9 +73,7 @@ py_library( srcs = [ "runtime.py", ], - data = [ - ":_runtime.so", - ], + data = [":_runtime.so"], srcs_version = "PY3", visibility = ["//visibility:public"], deps = [ @@ -89,9 +92,164 @@ py_test( "noubsan", ], deps = [ - requirement("numpy"), - requirement("tensorflow-cpu"), ":runtime", + requirement("numpy"), + requirement("tensorflow"), + "//tensorflow/lite/micro/examples/recipes:add_four_numbers", "//tensorflow/lite/micro/testing:generate_test_models_lib", ], ) + +py_library( + name = "postinstall_check", + srcs = [ + "postinstall_check.py", + ], + data = [ + "sine_float.tflite", + ], +) + +# Generate a version attribute, imported as tflite_micro.__version__, using +# stamp (a.k.a. workspace status) variables. +expand_stamp_vars( + name = "version", + out = "_version.py", + template = "_version.py.in", +) + +# Collect the `deps` and their transitive dependences together into a set of +# files to package. The files retain their full path relative to the workspace +# root, which determines the subpackage path at which they're located within +# the Python distribution package. +py_package( + name = "files_to_package", + + # Only Python subpackage paths matching the following prefixes are included + # in the files to package. This avoids packaging, e.g., numpy, which is a + # transitive dependency of the tflm runtime target. This list may require + # modification when adding, directly or indirectly, `deps` from other paths + # in the tflm tree. + packages = [ + "python.tflite_micro", + "tensorflow.lite.python", + "tensorflow.lite.tools.flatbuffer_utils", + ], + deps = [ + ":postinstall_check", + ":runtime", + ":version", + ], +) + +# Relocate `deps` underneath the given Python package namespace, otherwise +# maintaining their full paths relative to the workspace root. +# +# For example: +# ${workspace_root}/example/hello.py +# becomes: +# namespace.example.hello +# +# Place `init` at the root of the namespace, regardless of `init`'s path in the +# source tree. +py_namespace( + name = "namespace", + init = "__init__.py", + namespace = "tflite_micro", + deps = [ + ":files_to_package", + ], +) + +expand_stamp_vars( + name = "description_file", + out = "README.pypi.md", + template = "README.pypi.md.in", +) + +# Building the :whl or its descendants requires the following build setting to +# supply the Python compatibility tags for the wheel metadata. +string_flag( + name = "compatibility_tag", + build_setting_default = "local", + values = [ + "cp310_cp310_manylinux_2_28_x86_64", + "cp311_cp311_manylinux_2_28_x86_64", + "local", + ], +) + +config_setting( + name = "cp310_cp310_manylinux_2_28_x86_64", + flag_values = { + ":compatibility_tag": "cp310_cp310_manylinux_2_28_x86_64", + }, +) + +config_setting( + name = "cp311_cp311_manylinux_2_28_x86_64", + flag_values = { + ":compatibility_tag": "cp311_cp311_manylinux_2_28_x86_64", + }, +) + +config_setting( + name = "local", + flag_values = { + ":compatibility_tag": "local", + }, +) + +py_wheel( + name = "whl", + # This macro yields additional targets: + # + # - whl.dist: build a properly named file under whl_dist/ + # + abi = select({ + ":cp310_cp310_manylinux_2_28_x86_64": "cp310", + ":cp311_cp311_manylinux_2_28_x86_64": "cp311", + ":local": "none", + }), + description_file = ":description_file", + distribution = "tflite_micro", + platform = select({ + ":cp310_cp310_manylinux_2_28_x86_64": "manylinux_2_28_x86_64", + ":cp311_cp311_manylinux_2_28_x86_64": "manylinux_2_28_x86_64", + ":local": "any", + }), + python_tag = select({ + ":cp310_cp310_manylinux_2_28_x86_64": "cp310", + ":cp311_cp311_manylinux_2_28_x86_64": "cp311", + ":local": "py3", + }), + requires = [ + "flatbuffers", + "numpy", + "tensorflow", + ], + stamp = 1, # 1 == always stamp + strip_path_prefixes = [package_name()], + summary = "TensorFlow Lite for Microcontrollers", + twine = "@tflm_pip_deps_twine//:pkg", + version = "{BUILD_EMBED_LABEL}.dev{STABLE_GIT_COMMIT_TIME}", + deps = [ + ":namespace", + ], +) + +sh_test( + name = "whl_test", + srcs = [ + "whl_test.sh", + ], + args = [ + "$(location :whl)", + ], + data = [ + ":whl", + ], + tags = [ + "notap", # See http://b/294278650#comment4 for more details. + ], +) diff --git a/python/tflite_micro/README.md b/python/tflite_micro/README.md index cf5f638dd15..927705f42a7 100644 --- a/python/tflite_micro/README.md +++ b/python/tflite_micro/README.md @@ -1,25 +1,127 @@ -# TFLM Python Interpreter +# The `tflite_micro` Python Package -The TFLM interpreter can be invoked from Python by using the Python interpreter -wrapper in this directory. +This directory contains the `tflite_micro` Python package. The following is +mainly documentation for its developers. -## Usage +The `tflite_micro` package contains a complete TFLM interpreter built as a +CPython extension module. The build of simple Python packages may be driven by +standard Python package builders such as `build`, `setuptools`, and `flit`; +however, as TFLM is first and foremost a large C/C++ project, `tflite_micro`'s +build is instead driven by its C/C++ build system Bazel. -There are two ways to import the Python wrapper, either by using Bazel/Blaze, or -in near future by installing a PyPi package. +## Building and installing locally -### Bazel +### Building -#### Build +The Bazel target `//python/tflite_micro:whl.dist` builds a `tflite_micro` +Python *.whl* under the output directory `bazel-bin/python/tflite_micro/whl_dist`. For example: +``` +% bazel build //python/tflite_micro:whl.dist +.... +Target //python/tflite_micro:whl.dist up-to-date: + bazel-bin/python/tflite_micro/whl_dist + +% tree bazel-bin/python/tflite_micro/whl_dist +bazel-bin/python/tflite_micro/whl_dist +└── tflite_micro-0.dev20230920161638-py3-none-any.whl +``` + +### Installing + +Install the resulting *.whl* via pip. For example, in a Python virtual +environment: +``` +% python3 -m venv ~/tmp/venv +% source ~/tmp/venv/bin/activate +(venv) $ pip install bazel-bin/python/tflite_micro/whl_dist/tflite_micro-0.dev20230920161638-py3-none-any.whl +Processing ./bazel-bin/python/tflite_micro/whl_dist/tflite_micro-0.dev20230920161638-py3-none-any.whl +.... +Installing collected packages: [....] +``` + +The package should now be importable and usable. For example: +``` +(venv) $ python +Python 3.10.12 (main, Jun 11 2023, 05:26:28) [GCC 11.4.0] on linux +Type "help", "copyright", "credits" or "license" for more information. +>>> import tflite_micro +>>> tflite_micro.postinstall_check.passed() +True +>>> i = tflite_micro.runtime.Interpreter.from_file("foo.tflite") +>>> # etc. +``` + +## Building and uploading to PyPI + +The *.whl* generated above is unsuitable for distribution to the wider world +via PyPI. The extension module is inevitably compiled against a particular +Python implementation and platform C library. The resulting package is only +binary-compatible with a system running the same Python implementation and a +compatible (typically the same or newer) C library. + +The solution is to distribute multiple *.whl*s, one built for each Python +implementation and platform combination. TFLM accomplishes this by running +Bazel builds from within multiple, uniquely configured Docker containers. The +images used are based on standards-conforming images published by the Python +Package Authority (PyPA) for exactly such use. + +Python *.whl*s contain metadata used by installers such as `pip` to determine +which distributions (*.whl*s) are compatible with the target platform. See the PyPA +specification for [platform compatibility +tags](https://packaging.python.org/en/latest/specifications/platform-compatibility-tags/). + +### Building + +In an environment with a working Docker installation, run the script +`python/tflite_micro/pypi_build.sh ` once for each tag. The +script's online help (`--help`) lists the available tags. The script builds an +appropriate Docker container and invokes a Bazel build and test within it. +For example: +``` +% python/tflite_micro/pypi_build.sh cp310 +[+] Building 2.6s (7/7) FINISHED +=> writing image sha256:900704dad7fa27938dcc1c5057c0e760fb4ab0dff676415182455ae66546bbd4 +bazel build //python/tflite_micro:whl.dist \ + --//python/tflite_micro:compatibility_tag=cp310_cp310_manylinux_2_28_x86_64 +bazel test //python/tflite_micro:whl_test \ + --//python/tflite_micro:compatibility_tag=cp310_cp310_manylinux_2_28_x86_64 +//python/tflite_micro:whl_test +Executed 1 out of 1 test: 1 test passes. +Output: +bazel-pypi-out/tflite_micro-0.dev20230920031310-cp310-cp310-manylinux_2_28_x86_64.whl +``` + +By default, *.whl*s are generated under the output directory `bazel-pypi-out/`. + +### Uploading to PyPI + +Upload the generated *.whl*s to PyPI with the script +`python/tflite_micro/pypi_upload.sh`. This script lightly wraps the standard +upload tool `twine`. A PyPI authentication token must be assigned to +`TWINE_PASSWORD` in the environment. For example: +``` +% export TWINE_PASSWORD=pypi-AgENdGV[....] +% ./python/tflite_micro/pypi_upload.sh --test-pypi bazel-pypi-out/tflite_micro-*.whl +Uploading distributions to https://test.pypi.org/legacy/ +Uploading tflite_micro-0.dev20230920031310-cp310-cp310-manylinux_2_28_x86_64.whl +Uploading tflite_micro-0.dev20230920031310-cp311-cp311-manylinux_2_28_x86_64.whl +View at: +https://test.pypi.org/project/tflite-micro/0.dev20230920031310/ +``` + +See the script's online help (`--help`) for more. + +## Using `tflite_micro` from within the TFLM source tree + +:construction: +*The remainder of this document is under construction and may contain some +obsolete information.* +:construction: The only package that needs to be included in the `BUILD` file is `//python/tflite_micro:runtime`. It contains all the correct dependencies to build the Python interpreter. -### PyPi - -Work in progress. - ### Examples Depending on the workflow, the package import path may be slightly different. @@ -55,7 +157,7 @@ print(tflm_interpreter.get_input_details(0)) print(tflm_interpreter.get_output_details(0)) ``` -## Technical Details +### Technical Details The Python interpreter uses [pybind11](https://github.com/pybind/pybind11) to expose an evolving set of C++ APIs. The Bazel build leverages the @@ -64,7 +166,7 @@ expose an evolving set of C++ APIs. The Bazel build leverages the The most updated Python APIs can be found in `python/tflite_micro/runtime.py`. -## Custom Ops +### Custom Ops The Python interpreter works with models with [custom ops](https://www.tensorflow.org/lite/guide/ops_custom) but special steps @@ -126,7 +228,7 @@ The interpreter will then perform a dynamic lookup for the symbol called properly included in TFLM's op resolver. This approach is very similar to TFLite's custom op support. -## Print Allocations +### Print Allocations The Python interpreter can also be used to print memory arena allocations. This is very helpful to figure out actual memory arena usage. diff --git a/python/tflite_micro/README.pypi.md.in b/python/tflite_micro/README.pypi.md.in new file mode 100644 index 00000000000..a04356e94b2 --- /dev/null +++ b/python/tflite_micro/README.pypi.md.in @@ -0,0 +1,5 @@ +# TensorFlow Lite for Microcontrollers + +This package is built from commit +[{STABLE_GIT_HASH}](https://github.com/tensorflow/tflite-micro/blob/{STABLE_GIT_HASH}/python/tflite_micro) +of [github.com/tensorflow/tflite-micro](https://github.com/tensorflow/tflite-micro). diff --git a/python/tflite_micro/__init__.py b/python/tflite_micro/__init__.py new file mode 100644 index 00000000000..9f15213249b --- /dev/null +++ b/python/tflite_micro/__init__.py @@ -0,0 +1,28 @@ +# Copyright 2023 The TensorFlow Authors. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# ---- + +# Define a public API for the package by providing aliases for modules which +# are otherwise deeply nested in subpackages determined by their location in +# the tflm source tree. Directly using modules and subpackages not explicitly +# made part of the public API in code outside of the tflm source tree is +# unsupported. + +from tflite_micro.python.tflite_micro import runtime + +# Unambiguously identify the source used to build the package. +from tflite_micro.python.tflite_micro._version import __version__ + +# Ordered after `runtime` to avoid a circular dependency +from tflite_micro.python.tflite_micro import postinstall_check diff --git a/python/tflite_micro/_runtime.cc b/python/tflite_micro/_runtime.cc index 824b3b4553f..246545fd016 100644 --- a/python/tflite_micro/_runtime.cc +++ b/python/tflite_micro/_runtime.cc @@ -24,14 +24,19 @@ using tflite::InterpreterWrapper; PYBIND11_MODULE(_runtime, m) { m.doc() = "TFLite Micro Runtime Extension"; - + py::enum_(m, "PythonInterpreterConfig") + .value("kAllocationRecording", + tflite::InterpreterConfig::kAllocationRecording) + .value("kPreserveAllTensors", + tflite::InterpreterConfig::kPreserveAllTensors); py::class_(m, "InterpreterWrapper") .def(py::init([](const py::bytes& data, const std::vector& registerers_by_name, - size_t arena_size, int num_resource_variables) { + size_t arena_size, int num_resource_variables, + tflite::InterpreterConfig config) { return std::unique_ptr( new InterpreterWrapper(data.ptr(), registerers_by_name, arena_size, - num_resource_variables)); + num_resource_variables, config)); })) .def("PrintAllocations", &InterpreterWrapper::PrintAllocations) .def("Invoke", &InterpreterWrapper::Invoke) @@ -54,6 +59,14 @@ PYBIND11_MODULE(_runtime, m) { return tflite::PyoOrThrow(self.GetInputTensorDetails(index)); }, py::arg("index")) + .def( + "GetTensor", + [](InterpreterWrapper& self, size_t tensor_index, + size_t subgraph_index = 0) { + return tflite::PyoOrThrow( + self.GetTensor(tensor_index, subgraph_index)); + }, + py::arg("tensor_index"), py::arg("subgraph_index")) .def( "GetOutputTensorDetails", [](InterpreterWrapper& self, size_t index) { diff --git a/python/tflite_micro/_version.py.in b/python/tflite_micro/_version.py.in new file mode 100644 index 00000000000..77768f70dab --- /dev/null +++ b/python/tflite_micro/_version.py.in @@ -0,0 +1 @@ +__version__ = "{BUILD_EMBED_LABEL}.dev{STABLE_GIT_COMMIT_TIME}-g{STABLE_GIT_HASH}" diff --git a/python/tflite_micro/interpreter_wrapper.cc b/python/tflite_micro/interpreter_wrapper.cc index 41c4f7a3b87..53efe8e311b 100644 --- a/python/tflite_micro/interpreter_wrapper.cc +++ b/python/tflite_micro/interpreter_wrapper.cc @@ -15,6 +15,11 @@ limitations under the License. #include "python/tflite_micro/interpreter_wrapper.h" +#include + +#include "tensorflow/lite/micro/micro_allocator.h" +#include "tensorflow/lite/micro/micro_utils.h" + // Disallow Numpy 1.7 deprecated symbols. #define NPY_NO_DEPRECATED_API NPY_1_7_API_VERSION // See https://numpy.org/doc/1.16/reference/c-api.array.html#importing-the-api @@ -187,6 +192,39 @@ PyObject* GetTensorDetails(const TfLiteTensor* tensor) { return result; } +PyObject* GetEvalTensorDetails(const TfLiteEvalTensor* eval_tensor) { + PyObject* tensor_type = + PyArray_TypeObjectFromType(TfLiteTypeToPyArrayType(eval_tensor->type)); + PyObject* np_size_array = + PyArrayFromIntVector(eval_tensor->dims->data, eval_tensor->dims->size); + PyObject* tensor_size = + PyArray_Return(reinterpret_cast(np_size_array)); + + size_t eval_tensor_bytes = tflite::EvalTensorBytes(eval_tensor); + void* data = malloc(eval_tensor_bytes); + memcpy(data, eval_tensor->data.data, eval_tensor_bytes); + + std::vector dims(eval_tensor->dims->data, + eval_tensor->dims->data + eval_tensor->dims->size); + int py_type_num = TfLiteTypeToPyArrayType(eval_tensor->type); + PyObject* np_array = + PyArray_SimpleNewFromData(dims.size(), dims.data(), py_type_num, data); + + // Transfer ownership to Python so that there's Python will take care of + // releasing this buffer + PyArray_ENABLEFLAGS(reinterpret_cast(np_array), + NPY_ARRAY_OWNDATA); + + PyObject* result = PyDict_New(); + PyDict_SetItemString(result, "dtype", tensor_type); + PyDict_SetItemString(result, "shape", tensor_size); + PyDict_SetItemString( + result, "tensor_data", + PyArray_Return(reinterpret_cast(np_array))); + + return result; +} + } // namespace InterpreterWrapper::~InterpreterWrapper() { @@ -204,7 +242,7 @@ InterpreterWrapper::~InterpreterWrapper() { InterpreterWrapper::InterpreterWrapper( PyObject* model_data, const std::vector& registerers_by_name, - size_t arena_size, int num_resource_variables) { + size_t arena_size, int num_resource_variables, InterpreterConfig config) { interpreter_ = nullptr; // `model_data` is used as a raw pointer beyond the scope of this @@ -223,12 +261,6 @@ InterpreterWrapper::InterpreterWrapper( const Model* model = GetModel(buf); model_ = model_data; memory_arena_ = std::unique_ptr(new uint8_t[arena_size]); - allocator_ = RecordingMicroAllocator::Create(memory_arena_.get(), arena_size); - MicroResourceVariables* resource_variables_ = nullptr; - if (num_resource_variables > 0) - resource_variables_ = - MicroResourceVariables::Create(allocator_, num_resource_variables); - for (const std::string& registerer : registerers_by_name) { if (!AddCustomOpRegistererByName(registerer.c_str(), &python_ops_resolver_)) { @@ -237,6 +269,24 @@ InterpreterWrapper::InterpreterWrapper( } } + switch (config) { + case InterpreterConfig::kAllocationRecording: { + recording_allocator_ = + RecordingMicroAllocator::Create(memory_arena_.get(), arena_size); + allocator_ = recording_allocator_; + break; + } + case InterpreterConfig::kPreserveAllTensors: { + allocator_ = MicroAllocator::Create(memory_arena_.get(), arena_size, + MemoryPlannerType::kLinear); + break; + } + } + MicroResourceVariables* resource_variables_ = nullptr; + if (num_resource_variables > 0) + resource_variables_ = + MicroResourceVariables::Create(allocator_, num_resource_variables); + interpreter_ = new MicroInterpreter(model, python_ops_resolver_, allocator_, resource_variables_); @@ -250,7 +300,13 @@ InterpreterWrapper::InterpreterWrapper( ImportNumpy(); } -void InterpreterWrapper::PrintAllocations() { allocator_->PrintAllocations(); } +void InterpreterWrapper::PrintAllocations() { + if (!recording_allocator_) { + ThrowValueError("Cannot print allocations as they were not recorded"); + return; + } + return recording_allocator_->PrintAllocations(); +} int InterpreterWrapper::Invoke() { TfLiteStatus status = interpreter_->Invoke(); @@ -358,6 +414,18 @@ PyObject* InterpreterWrapper::GetOutputTensor(size_t index) const { return PyArray_Return(reinterpret_cast(np_array)); } +PyObject* InterpreterWrapper::GetTensor(size_t tensor_index, + size_t subgraph_index) { + if (!interpreter_->preserve_all_tensors()) { + ThrowRuntimeError( + "TFLM only supports GetTensor() when using a python interpreter with " + "the InterpreterConfig.kPeserverAllTensors interpreter_config"); + return nullptr; + } + return GetEvalTensorDetails( + interpreter_->GetTensor(tensor_index, subgraph_index)); +} + PyObject* InterpreterWrapper::GetInputTensorDetails(size_t index) const { return GetTensorDetails(interpreter_->input(index)); } diff --git a/python/tflite_micro/interpreter_wrapper.h b/python/tflite_micro/interpreter_wrapper.h index 1ead5afc85e..9bb31b067fe 100644 --- a/python/tflite_micro/interpreter_wrapper.h +++ b/python/tflite_micro/interpreter_wrapper.h @@ -18,16 +18,29 @@ limitations under the License. #include #include "python/tflite_micro/python_ops_resolver.h" +#include "tensorflow/lite/micro/micro_allocator.h" #include "tensorflow/lite/micro/micro_interpreter.h" #include "tensorflow/lite/micro/recording_micro_allocator.h" namespace tflite { +// Allocation Recording is mutually exclusive from the PreserveAllTensors +// debugging feature because PreserveAllTensors uses the LinearMemoryPlanner. +// This means that the Allocations recorded by the RecordingMicroAllocator +// wouldn't be accurate because the GreedyMemoryPlanner would have to be used. +// So this Enum was made to represent the two possible modes/configs you can use +// the python interpreter for. +enum InterpreterConfig { + kAllocationRecording = 0, + kPreserveAllTensors = 1, +}; + class InterpreterWrapper { public: - InterpreterWrapper(PyObject* model_data, - const std::vector& registerers_by_name, - size_t arena_size, int num_resource_variables); + InterpreterWrapper( + PyObject* model_data, const std::vector& registerers_by_name, + size_t arena_size, int num_resource_variables, + InterpreterConfig config = InterpreterConfig::kAllocationRecording); ~InterpreterWrapper(); void PrintAllocations(); @@ -37,9 +50,11 @@ class InterpreterWrapper { PyObject* GetOutputTensor(size_t index) const; PyObject* GetInputTensorDetails(size_t index) const; PyObject* GetOutputTensorDetails(size_t index) const; + PyObject* GetTensor(size_t tensor_index, size_t subgraph_index = 0); private: - tflite::RecordingMicroAllocator* allocator_; + tflite::MicroAllocator* allocator_ = nullptr; + tflite::RecordingMicroAllocator* recording_allocator_ = nullptr; const PyObject* model_; std::unique_ptr memory_arena_; tflite::PythonOpsResolver python_ops_resolver_; diff --git a/python/tflite_micro/numpy_utils.cc b/python/tflite_micro/numpy_utils.cc index 4a4aad8f27f..20f43c984dd 100644 --- a/python/tflite_micro/numpy_utils.cc +++ b/python/tflite_micro/numpy_utils.cc @@ -40,6 +40,9 @@ int TfLiteTypeToPyArrayType(TfLiteType tf_lite_type) { return NPY_FLOAT32; case kTfLiteFloat16: return NPY_FLOAT16; + case kTfLiteBFloat16: + // TODO(b/329491949): NPY_BFLOAT16 currently doesn't exist + return NPY_FLOAT16; case kTfLiteFloat64: return NPY_FLOAT64; case kTfLiteInt32: diff --git a/python/tflite_micro/postinstall_check.py b/python/tflite_micro/postinstall_check.py new file mode 100644 index 00000000000..89e98551696 --- /dev/null +++ b/python/tflite_micro/postinstall_check.py @@ -0,0 +1,53 @@ +# Copyright 2023 The TensorFlow Authors. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +# A simple test to check whether the tflite_micro package works after it is +# installed. + +# To test from the perspective of a package user, use import paths to locations +# in the Python installation environment rather than to locations in the tflm +# source tree. +from tflite_micro import runtime + +import numpy as np +import pkg_resources +import sys + + +def passed(): + # Create an interpreter with a sine model + model = pkg_resources.resource_filename(__name__, "sine_float.tflite") + interpreter = runtime.Interpreter.from_file(model) + OUTPUT_INDEX = 0 + INPUT_INDEX = 0 + input_shape = interpreter.get_input_details(INPUT_INDEX).get("shape") + + # The interpreter infers sin(x) + def infer(x): + tensor = np.array(x, np.float32).reshape(input_shape) + interpreter.set_input(tensor, INPUT_INDEX) + interpreter.invoke() + return interpreter.get_output(OUTPUT_INDEX).squeeze() + + # Check a few inferred values against a numerical computation + PI = 3.14 + inputs = (0.0, PI / 2, PI, 3 * PI / 2, 2 * PI) + outputs = [infer(x) for x in inputs] + goldens = np.sin(inputs) + + return np.allclose(outputs, goldens, atol=0.05) + + +if __name__ == "__main__": + sys.exit(0 if passed() else 1) diff --git a/python/tflite_micro/pypi_build.dockerfile b/python/tflite_micro/pypi_build.dockerfile new file mode 100644 index 00000000000..a2ac3200007 --- /dev/null +++ b/python/tflite_micro/pypi_build.dockerfile @@ -0,0 +1,16 @@ +# Use the Python Packaging Authority's reference build environment +# for binary extensions. Binary extensions are typically built and distributed +# for each target Python version and OS platform. The reference build +# environment contains Python installations for each version, and a C/C++ +# toolchain specified for maximum compatibility among x86_64 Linux paltforms. +FROM quay.io/pypa/manylinux_2_28_x86_64 + +# Install bazel (via bazelisk) +ENV BAZELISK=https://github.com/bazelbuild/bazelisk/releases/download/v1.18.0/bazelisk-linux-amd64 +ENV BAZEL=/usr/local/bin/bazel +RUN curl --output $BAZEL --location $BAZELISK && chmod 755 $BAZEL + +# Append the location of the C/C++ toolchain to the default PATH, where +# bazel expects to find it. The reference environment provides the location +# (typically somewhere under /opt) in DEVTOOLSET_ROOTPATH. +RUN echo "PATH="${PATH}:/${DEVTOOLSET_ROOTPATH}"" >>/etc/environment diff --git a/python/tflite_micro/pypi_build.sh b/python/tflite_micro/pypi_build.sh new file mode 100755 index 00000000000..05812257721 --- /dev/null +++ b/python/tflite_micro/pypi_build.sh @@ -0,0 +1,119 @@ +#!/bin/sh + +# Copyright 2023 The TensorFlow Authors. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +set -e + +OUT_DIR_DEFAULT=bazel-pypi-out + +USAGE="$(basename $0) [] + +Build a Python wheel for public release to PyPI using a special Docker build +container. Uses bazel, but does not pollute the WORKSPACE's default cache. + + must be one of the supported interpreters: + cp310 + cp311 + + defaults to $OUT_DIR_DEFAULT. +" + +case "$1" in + cp310|cp311) + PY_TAG=$1 + OUTDIR=$(realpath ${2:-$OUT_DIR_DEFAULT}) + mkdir -p $OUTDIR + break + ;; + *) + echo usage: "$USAGE" >&2 + exit 1 +esac + +SRCDIR=$(realpath .) +if ! test -f $SRCDIR/WORKSPACE; then + echo "error: must run from the top of the source tree" >&2 + exit 1 +fi + +# Remove Bazel's workspace symlinks so they'll be rewritten below, pointing into +# OUTDIR. +find . -maxdepth 1 -type l -name bazel-\* | xargs rm -f + +# Build the Docker image from its source file. Don't pollute the public list of +# images by tagging; just use the image's ID. +DOCKERFILE=python/tflite_micro/pypi_build.dockerfile +IMAGE_ID_FILE=$OUTDIR/image-id +docker build - --iidfile $IMAGE_ID_FILE <$DOCKERFILE +IMAGE_ID=$(cat $IMAGE_ID_FILE) + +# Build the Python package within an ephemeral container. +docker run \ + --rm \ + --interactive \ + --mount type=bind,source=$SRCDIR,destination=$SRCDIR \ + --mount type=bind,source=$OUTDIR,destination=$OUTDIR \ + --workdir $SRCDIR \ + --env USER=$(id -un) \ + $IMAGE_ID \ + /bin/bash -s -e -x -u \ +<&2 + exit 1 +} + +case "$1" in + --test-pypi) + export TWINE_REPOSITORY=testpypi + shift + ;; + -h|--help) + echo "$USAGE" + exit +esac + +if [ ! "$#" -ge 1 ]; then + die "$USAGE" +fi + +if [ ! -x $(command -v twine) ]; then + die "error: twine not found. On Debian and derivatives, try \`apt install twine\`." +fi + +if [ ! "$TWINE_PASSWORD" ]; then + die "error: TWINE_PASSWORD is not set" +fi + +: ${TWINE_USERNAME:="__token__"} + +export TWINE_PASSWORD +export TWINE_USERNAME +twine upload "$@" diff --git a/python/tflite_micro/python_ops_resolver.cc b/python/tflite_micro/python_ops_resolver.cc index 587adf7a383..f5d6e636c16 100644 --- a/python/tflite_micro/python_ops_resolver.cc +++ b/python/tflite_micro/python_ops_resolver.cc @@ -28,6 +28,7 @@ PythonOpsResolver::PythonOpsResolver() { AddArgMin(); AddAssignVariable(); AddAveragePool2D(); + AddBatchMatMul(); AddBatchToSpaceNd(); AddBroadcastArgs(); AddBroadcastTo(); @@ -39,20 +40,29 @@ PythonOpsResolver::PythonOpsResolver() { AddConv2D(); AddCos(); AddCumSum(); + AddDelay(); AddDepthToSpace(); AddDepthwiseConv2D(); AddDequantize(); AddDetectionPostprocess(); AddDiv(); AddElu(); + AddEmbeddingLookup(); + AddEnergy(); AddEqual(); AddEthosU(); AddExp(); AddExpandDims(); + AddFftAutoScale(); AddFill(); + AddFilterBank(); + AddFilterBankLog(); + AddFilterBankSpectralSubtraction(); + AddFilterBankSquareRoot(); AddFloor(); AddFloorDiv(); AddFloorMod(); + AddFramer(); AddFullyConnected(); AddGather(); AddGatherNd(); @@ -60,17 +70,18 @@ PythonOpsResolver::PythonOpsResolver() { AddGreaterEqual(); AddHardSwish(); AddIf(); + AddIrfft(); AddL2Normalization(); AddL2Pool2D(); AddLeakyRelu(); AddLess(); AddLessEqual(); AddLog(); + AddLogSoftmax(); AddLogicalAnd(); AddLogicalNot(); AddLogicalOr(); AddLogistic(); - AddLogSoftmax(); AddMaxPool2D(); AddMaximum(); AddMean(); @@ -79,6 +90,8 @@ PythonOpsResolver::PythonOpsResolver() { AddMul(); AddNeg(); AddNotEqual(); + AddOverlapAdd(); + AddPCAN(); AddPack(); AddPad(); AddPadV2(); @@ -107,6 +120,7 @@ PythonOpsResolver::PythonOpsResolver() { AddSquare(); AddSquaredDifference(); AddSqueeze(); + AddStacker(); AddStridedSlice(); AddSub(); AddSum(); diff --git a/python/tflite_micro/runtime.py b/python/tflite_micro/runtime.py index 06a62b01227..fbf2f205a50 100644 --- a/python/tflite_micro/runtime.py +++ b/python/tflite_micro/runtime.py @@ -14,15 +14,64 @@ # ============================================================================== """Python package for TFLM Python Interpreter""" +import enum import os - -from tflite_micro.python.tflite_micro import _runtime from tflite_micro.tensorflow.lite.tools import flatbuffer_utils +from tflite_micro.python.tflite_micro import _runtime + + +class InterpreterConfig(enum.Enum): + """There are two mutually exclusive types of way you could use the TFLM python + + interpreter, this enum is made so that users can clearly choose between the + two + different usage method for the interpreter. + + The first default way is kRecordingAllocation where all memory usage by the + interpreter is recorded on inference. When using this config the GetTensor() + api is disabled by the interpreter since this interpreter configuration + doesn’t + guarantee that the valid data for all tensors is available post inference. + + The second way is kPreserveAllTensors where the GetTensor() api is disabled by + the interpreter since this interpreter configuration doesn’t guarantee that + the + valid data for all tensors is available post inference. But the memory usage + by + the interpreter won’t be recorded on inference. + + Usage: + + default_interpreter = Interpreter(… + intrepreter_config=InterpreterConfig.kAllocationRecording) + + preserve_interpreter = Interpreter(… + intrepreter_config=InterpreterConfig.kPreserveAllTensors) + """ + + kAllocationRecording = 0 + kPreserveAllTensors = 1 + + +#TODO(b/297118768): Once Korko Docker contrainer for ubuntu x86 has imutabledict +# added to it, this should be turned into an immutabledict. +_ENUM_TRANSLATOR = { + InterpreterConfig.kAllocationRecording: + (_runtime.PythonInterpreterConfig.kAllocationRecording), + InterpreterConfig.kPreserveAllTensors: + (_runtime.PythonInterpreterConfig.kPreserveAllTensors), +} class Interpreter(object): - def __init__(self, model_data, custom_op_registerers, arena_size): + def __init__( + self, + model_data, + custom_op_registerers, + arena_size, + intrepreter_config=InterpreterConfig.kAllocationRecording, + ): if model_data is None: raise ValueError("Model must not be None") @@ -33,20 +82,28 @@ def __init__(self, model_data, custom_op_registerers, arena_size): # This is a heuristic to ensure that the arena is sufficiently sized. if arena_size is None: arena_size = len(model_data) * 10 - # Some models make use of resource variables ops, get the count here num_resource_variables = flatbuffer_utils.count_resource_variables( model_data) print("Number of resource variables the model uses = ", num_resource_variables) - self._interpreter = _runtime.InterpreterWrapper(model_data, - custom_op_registerers, - arena_size, - num_resource_variables) + self._interpreter = _runtime.InterpreterWrapper( + model_data, + custom_op_registerers, + arena_size, + num_resource_variables, + _ENUM_TRANSLATOR[intrepreter_config], + ) @classmethod - def from_file(self, model_path, custom_op_registerers=[], arena_size=None): + def from_file( + self, + model_path, + custom_op_registerers=[], + arena_size=None, + intrepreter_config=InterpreterConfig.kAllocationRecording, + ): """Instantiates a TFLM interpreter from a model .tflite filepath. Args: @@ -65,10 +122,21 @@ def from_file(self, model_path, custom_op_registerers=[], arena_size=None): with open(model_path, "rb") as f: model_data = f.read() - return Interpreter(model_data, custom_op_registerers, arena_size) + return Interpreter( + model_data, + custom_op_registerers, + arena_size, + intrepreter_config, + ) @classmethod - def from_bytes(self, model_data, custom_op_registerers=[], arena_size=None): + def from_bytes( + self, + model_data, + custom_op_registerers=[], + arena_size=None, + intrepreter_config=InterpreterConfig.kAllocationRecording, + ): """Instantiates a TFLM interpreter from a model in byte array. Args: @@ -82,7 +150,12 @@ def from_bytes(self, model_data, custom_op_registerers=[], arena_size=None): An Interpreter instance """ - return Interpreter(model_data, custom_op_registerers, arena_size) + return Interpreter( + model_data, + custom_op_registerers, + arena_size, + intrepreter_config, + ) def print_allocations(self): """Invoke the RecordingMicroAllocator to print the arena usage. @@ -157,6 +230,9 @@ def get_output(self, index): return self._interpreter.GetOutputTensor(index) + def GetTensor(self, tensor_index, subgraph_index): + return self._interpreter.GetTensor(tensor_index, subgraph_index) + def get_input_details(self, index): """Get input tensor information diff --git a/python/tflite_micro/runtime_test.py b/python/tflite_micro/runtime_test.py index 6a127fc924c..2a9003c6b2c 100644 --- a/python/tflite_micro/runtime_test.py +++ b/python/tflite_micro/runtime_test.py @@ -21,13 +21,78 @@ import gc import weakref + import numpy as np import tensorflow as tf from tensorflow.python.framework import test_util from tensorflow.python.platform import test -from tflite_micro.tensorflow.lite.micro.testing import generate_test_models from tflite_micro.python.tflite_micro import runtime +from tflite_micro.tensorflow.lite.micro.examples.recipes import add_four_numbers +from tflite_micro.tensorflow.lite.micro.testing import generate_test_models + + +class PeserveAllTensorsTest(test_util.TensorFlowTestCase): + + def AddFourNumbersTestInterpreterMaker(self, inputs): + """Returns a tflm interpreter with a simple model that loads 4 numbers loaded + + into it and loads the 4 inputs into the model + """ + model_data = add_four_numbers.generate_model(write_file=False) + tflm_interpreter = runtime.Interpreter.from_bytes( + model_data, + intrepreter_config=runtime.InterpreterConfig.kPreserveAllTensors, + ) + self.assertEqual(len(inputs), 4) + tflm_interpreter.set_input(inputs[0], 0) + tflm_interpreter.set_input(inputs[1], 1) + tflm_interpreter.set_input(inputs[2], 2) + tflm_interpreter.set_input(inputs[3], 3) + return tflm_interpreter + + def testGetTensorAccuratelyGetsAllTenors(self): + """Test checks that GetTensor() returns accurate values for each tensor in the + + model based on inputs of 1 2 3 4 into the AddFourNumbers TfLite model + """ + tflm_interpreter = self.AddFourNumbersTestInterpreterMaker( + [[np.float32(1)], [np.float32(2)], [np.float32(3)], [np.float32(4)]]) + + tflm_interpreter.invoke() + + tflm_output = tflm_interpreter.get_output(0) + self.assertEqual(tflm_output[0].astype("float32"), 10.0) + self.assertEqual(tflm_interpreter.GetTensor(0, 0)["tensor_data"][0], 1.0) + self.assertEqual(tflm_interpreter.GetTensor(1, 0)["tensor_data"][0], 2.0) + self.assertEqual(tflm_interpreter.GetTensor(2, 0)["tensor_data"][0], 3.0) + self.assertEqual(tflm_interpreter.GetTensor(3, 0)["tensor_data"][0], 4.0) + self.assertEqual(tflm_interpreter.GetTensor(4, 0)["tensor_data"][0], 7.0) + self.assertEqual(tflm_interpreter.GetTensor(5, 0)["tensor_data"][0], 9.0) + self.assertEqual(tflm_interpreter.GetTensor(6, 0)["tensor_data"][0], 10.0) + + def testGetTensorAllUniqueTensors(self): + """Test checks that GetTensor() returns all the tensors in the model. + + Due to the values used as inputs all the Tensors have unique data values so + this test confirms that this is the case. + """ + tflm_interpreter = self.AddFourNumbersTestInterpreterMaker( + [[np.float32(1)], [np.float32(2)], [np.float32(3)], [np.float32(4)]]) + + tflm_interpreter.invoke() + tensors = [ + tflm_interpreter.GetTensor(0, 0)["tensor_data"][0], + tflm_interpreter.GetTensor(1, 0)["tensor_data"][0], + tflm_interpreter.GetTensor(2, 0)["tensor_data"][0], + tflm_interpreter.GetTensor(3, 0)["tensor_data"][0], + tflm_interpreter.GetTensor(4, 0)["tensor_data"][0], + tflm_interpreter.GetTensor(5, 0)["tensor_data"][0], + tflm_interpreter.GetTensor(6, 0)["tensor_data"][0], + ] + + # Check that all tensors are unique + self.assertEqual(len(set(tensors)), 7) class ConvModelTests(test_util.TensorFlowTestCase): diff --git a/python/tflite_micro/signal/BUILD b/python/tflite_micro/signal/BUILD index 3f17a7fcf1e..0fce2668fb4 100644 --- a/python/tflite_micro/signal/BUILD +++ b/python/tflite_micro/signal/BUILD @@ -1,8 +1,10 @@ +load("@rules_python//python:defs.bzl", "py_library", "py_test") load("//python/tflite_micro/signal:tflm_signal.bzl", "py_tflm_signal_library") load("//tensorflow:extra_rules.bzl", "tflm_signal_friends") load("@tflm_pip_deps//:requirements.bzl", "requirement") package( + default_visibility = [":__subpackages__"], licenses = ["notice"], ) @@ -15,7 +17,14 @@ cc_library( name = "ops_lib", visibility = [":signal_friends"], deps = [ + ":delay_op_cc", + ":energy_op_cc", ":fft_ops_cc", + ":filter_bank_ops_cc", + ":framer_op_cc", + ":overlap_add_op_cc", + ":pcan_op_cc", + ":stacker_op_cc", ":window_op_cc", ], ) @@ -27,13 +36,73 @@ py_library( "ops/__init__.py", ], srcs_version = "PY3", - visibility = ["//python/tflite_micro/signal/utils:__subpackages__"], + visibility = ["//visibility:public"], deps = [ + ":delay_op", + ":energy_op", ":fft_ops", + ":filter_bank_ops", + ":framer_op", + ":overlap_add_op", + ":pcan_op", + ":stacker_op", ":window_op", ], ) +py_tflm_signal_library( + name = "delay_op", + srcs = ["ops/delay_op.py"], + cc_op_defs = ["//signal/tensorflow_core/ops:delay_op"], + cc_op_kernels = [ + "//signal/tensorflow_core/kernels:delay_kernel", + ], + deps = [ + "//python/tflite_micro/signal/utils:util", + ], +) + +py_test( + name = "delay_op_test", + size = "small", + srcs = ["ops/delay_op_test.py"], + python_version = "PY3", + srcs_version = "PY3", + deps = [ + ":delay_op", + requirement("numpy"), + requirement("tensorflow"), + ], +) + +py_tflm_signal_library( + name = "energy_op", + srcs = ["ops/energy_op.py"], + cc_op_defs = ["//signal/tensorflow_core/ops:energy_op"], + cc_op_kernels = [ + "//signal/tensorflow_core/kernels:energy_kernel", + ], + deps = [ + "//python/tflite_micro/signal/utils:util", + ], +) + +py_test( + name = "energy_op_test", + size = "small", + srcs = ["ops/energy_op_test.py"], + data = [ + "//python/tflite_micro/signal/ops/testdata:energy_test1.txt", + ], + python_version = "PY3", + srcs_version = "PY3", + deps = [ + ":energy_op", + requirement("numpy"), + requirement("tensorflow"), + ], +) + py_tflm_signal_library( name = "fft_ops", srcs = ["ops/fft_ops.py"], @@ -41,18 +110,173 @@ py_tflm_signal_library( cc_op_kernels = [ "//signal/tensorflow_core/kernels:fft_kernel", ], + deps = [ + "//python/tflite_micro/signal/utils:util", + ], ) py_test( name = "fft_ops_test", srcs = ["ops/fft_ops_test.py"], + data = [ + "//python/tflite_micro/signal/ops/testdata:fft_auto_scale_test1.txt", + "//python/tflite_micro/signal/ops/testdata:rfft_test1.txt", + ], python_version = "PY3", srcs_version = "PY3", deps = [ ":fft_ops", + requirement("numpy"), + requirement("tensorflow"), + ], +) + +py_tflm_signal_library( + name = "filter_bank_ops", + srcs = ["ops/filter_bank_ops.py"], + cc_op_defs = ["//signal/tensorflow_core/ops:filter_bank_ops"], + cc_op_kernels = [ + "//signal/tensorflow_core/kernels:filter_bank_kernels", + ], + deps = [ + "//python/tflite_micro/signal/utils:freq_to_mel", "//python/tflite_micro/signal/utils:util", + ], +) + +py_test( + name = "filter_bank_ops_test", + size = "small", + srcs = ["ops/filter_bank_ops_test.py"], + data = [ + "//python/tflite_micro/signal/ops/testdata:filter_bank_accumulation_16k.txt", + "//python/tflite_micro/signal/ops/testdata:filter_bank_accumulation_44k.txt", + "//python/tflite_micro/signal/ops/testdata:filter_bank_accumulation_8k.txt", + "//python/tflite_micro/signal/ops/testdata:filter_bank_spectral_subtraction_test1.txt", + "//python/tflite_micro/signal/ops/testdata:filter_bank_square_root_test1.txt", + "//python/tflite_micro/signal/ops/testdata:filter_bank_test1.txt", + ], + python_version = "PY3", + srcs_version = "PY3", + deps = [ + ":filter_bank_ops", requirement("numpy"), - requirement("tensorflow-cpu"), + requirement("tensorflow"), + ], +) + +py_tflm_signal_library( + name = "framer_op", + srcs = ["ops/framer_op.py"], + cc_op_defs = ["//signal/tensorflow_core/ops:framer_op"], + cc_op_kernels = [ + "//signal/tensorflow_core/kernels:framer_kernel", + ], + deps = [ + "//python/tflite_micro/signal/utils:util", + ], +) + +py_test( + name = "framer_op_test", + size = "small", + srcs = ["ops/framer_op_test.py"], + data = [ + "//python/tflite_micro/signal/ops/testdata:framer_test1.txt", + ], + python_version = "PY3", + srcs_version = "PY3", + deps = [ + ":framer_op", + requirement("numpy"), + requirement("tensorflow"), + ], +) + +py_tflm_signal_library( + name = "overlap_add_op", + srcs = ["ops/overlap_add_op.py"], + cc_op_defs = ["//signal/tensorflow_core/ops:overlap_add_op"], + cc_op_kernels = [ + "//signal/tensorflow_core/kernels:overlap_add_kernel", + ], + deps = [ + "//python/tflite_micro/signal/utils:util", + ], +) + +py_test( + name = "overlap_add_op_test", + size = "small", + srcs = ["ops/overlap_add_op_test.py"], + python_version = "PY3", + srcs_version = "PY3", + deps = [ + ":overlap_add_op", + "@absl_py//absl/testing:parameterized", + requirement("numpy"), + requirement("tensorflow"), + ], +) + +py_tflm_signal_library( + name = "pcan_op", + srcs = ["ops/pcan_op.py"], + cc_op_defs = ["//signal/tensorflow_core/ops:pcan_op"], + cc_op_kernels = [ + "//signal/tensorflow_core/kernels:pcan_kernel", + ], + deps = [ + "//python/tflite_micro/signal/utils:util", + "//python/tflite_micro/signal/utils:wide_dynamic_func_lut", + ], +) + +py_test( + name = "pcan_op_test", + srcs = ["ops/pcan_op_test.py"], + data = [ + "//python/tflite_micro/signal/ops/testdata:pcan_op_test1.txt", + ], + python_version = "PY3", + srcs_version = "PY3", + tags = [ + "noasan", + "nomsan", + "noubsan", + ], + deps = [ + ":pcan_op", + requirement("numpy"), + requirement("tensorflow"), + ], +) + +py_tflm_signal_library( + name = "stacker_op", + srcs = ["ops/stacker_op.py"], + cc_op_defs = ["//signal/tensorflow_core/ops:stacker_op"], + cc_op_kernels = [ + "//signal/tensorflow_core/kernels:stacker_kernel", + ], + deps = [ + "//python/tflite_micro/signal/utils:util", + ], +) + +py_test( + name = "stacker_op_test", + size = "small", + srcs = ["ops/stacker_op_test.py"], + data = [ + "//python/tflite_micro/signal/ops/testdata:stacker_test1.txt", + ], + python_version = "PY3", + srcs_version = "PY3", + deps = [ + ":stacker_op", + requirement("numpy"), + requirement("tensorflow"), ], ) @@ -63,6 +287,9 @@ py_tflm_signal_library( cc_op_kernels = [ "//signal/tensorflow_core/kernels:window_kernel", ], + deps = [ + "//python/tflite_micro/signal/utils:util", + ], ) py_test( @@ -75,8 +302,7 @@ py_test( srcs_version = "PY3", deps = [ ":window_op", - "//python/tflite_micro/signal/utils:util", requirement("numpy"), - requirement("tensorflow-cpu"), + requirement("tensorflow"), ], ) diff --git a/python/tflite_micro/signal/ops/delay_op.py b/python/tflite_micro/signal/ops/delay_op.py new file mode 100644 index 00000000000..c7508049ab3 --- /dev/null +++ b/python/tflite_micro/signal/ops/delay_op.py @@ -0,0 +1,37 @@ +# Copyright 2023 The TensorFlow Authors. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# ============================================================================== +"""Use overlap add op in python.""" + +import tensorflow as tf +from tflite_micro.python.tflite_micro.signal.utils import util + +gen_delay_op = util.load_custom_op('delay_op.so') + + +def _delay_wrapper(delay_fn, default_name): + """Wrapper around gen_delay_op.delay*.""" + + def _delay(input_tensor, delay_length, name=default_name): + with tf.name_scope(name) as name: + input_tensor = tf.convert_to_tensor(input_tensor, dtype=tf.int16) + return delay_fn(input_tensor, delay_length=delay_length, name=name) + + return _delay + + +# TODO(b/286250473): change back name after name clash resolved +delay = _delay_wrapper(gen_delay_op.signal_delay, "signal_delay") + +tf.no_gradient("signal_delay") diff --git a/python/tflite_micro/signal/ops/delay_op_test.py b/python/tflite_micro/signal/ops/delay_op_test.py new file mode 100644 index 00000000000..66b033fc977 --- /dev/null +++ b/python/tflite_micro/signal/ops/delay_op_test.py @@ -0,0 +1,85 @@ +# Copyright 2023 The TensorFlow Authors. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# ============================================================================== +"""Tests for delay op.""" + +import numpy as np +import tensorflow as tf + +from tflite_micro.python.tflite_micro.signal.ops import delay_op +from tflite_micro.python.tflite_micro.signal.utils import util + + +class DelayOpTest(tf.test.TestCase): + + def TestHelper(self, input_signal, delay_length, frame_size): + inner_dim_size = input_signal.shape[-1] + input_signal_rank = len(input_signal.shape) + frame_num = int(np.ceil((inner_dim_size + delay_length) / frame_size)) + # We need to continue feeding the op with zeros until the delay line is + # flushed. Pad the input signal to a multiple of frame_size. + padded_size = frame_num * frame_size + pad_size = int(padded_size - inner_dim_size) + # Axes to pass to np.pad. All axes have no padding except the innermost one. + pad_outer_axes = np.zeros([input_signal_rank - 1, 2], dtype=int) + pad_input_signal = np.vstack([pad_outer_axes, [0, pad_size]]) + input_signal_padded = np.pad(input_signal, pad_input_signal) + delay_exp_signal = np.vstack( + [pad_outer_axes, [delay_length, pad_size - delay_length]]) + delay_exp = np.pad(input_signal, delay_exp_signal) + delay_out = np.zeros(input_signal_padded.shape) + + in_frame_shape = input_signal.shape[:-1] + (frame_size, ) + func = tf.function(delay_op.delay) + concrete_function = func.get_concrete_function(tf.TensorSpec( + in_frame_shape, dtype=tf.int16), + delay_length=delay_length) + interpreter = util.get_tflm_interpreter(concrete_function, func) + + for i in range(frame_num): + in_frame = input_signal_padded[..., i * frame_size:(i + 1) * frame_size] + # TFLM + interpreter.set_input(in_frame, 0) + interpreter.invoke() + out_frame_tflm = interpreter.get_output(0) + # TF + out_frame = self.evaluate( + delay_op.delay(in_frame, delay_length=delay_length)) + delay_out[..., i * frame_size:(i + 1) * frame_size] = out_frame + self.assertAllEqual(out_frame, out_frame_tflm) + self.assertAllEqual(delay_out, delay_exp) + + def testFrameLargerThanDelay(self): + self.TestHelper(np.arange(0, 30, dtype=np.int16), 7, 10) + + def testFrameSmallerThanDelay(self): + self.TestHelper(np.arange(0, 70, dtype=np.int16), 21, 3) + + def testZeroDelay(self): + self.TestHelper(np.arange(0, 20, dtype=np.int16), 0, 3) + + def testNegativeDelay(self): + with self.assertRaises((tf.errors.InvalidArgumentError, ValueError)): + self.TestHelper(np.arange(1, 20, dtype=np.int16), -21, 3) + + def testMultiDimensionalDelay(self): + input_signal = np.reshape(np.arange(0, 120, dtype=np.int16), [2, 3, 20]) + self.TestHelper(input_signal, 4, 6) + input_signal = np.reshape(np.arange(0, 72, dtype=np.int16), + [2, 2, 3, 3, 2]) + self.TestHelper(input_signal, 7, 3) + + +if __name__ == '__main__': + tf.test.main() diff --git a/python/tflite_micro/signal/ops/energy_op.py b/python/tflite_micro/signal/ops/energy_op.py new file mode 100644 index 00000000000..1fd37e851f5 --- /dev/null +++ b/python/tflite_micro/signal/ops/energy_op.py @@ -0,0 +1,45 @@ +# Copyright 2021 The TensorFlow Authors. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# ============================================================================== +"""Use energy op in python.""" + +import tensorflow as tf +from tflite_micro.python.tflite_micro.signal.utils import util + +gen_energy_op = util.load_custom_op('energy_op.so') + + +def _energy_wrapper(energy_fn, default_name): + """Wrapper around gen_energy_op.energy*.""" + + def _energy(input_tensor, start_index=0, end_index=-1, name=default_name): + with tf.name_scope(name) as name: + input_tensor = tf.convert_to_tensor(input_tensor, dtype=tf.int16) + dim_list = input_tensor.shape.as_list() + if len(dim_list) != 1: + raise ValueError("Input tensor must have a rank of 1") + if end_index == -1: + end_index = dim_list[0] - 1 + return energy_fn(input_tensor, + start_index=start_index, + end_index=end_index, + name=name) + + return _energy + + +# TODO(b/286250473): change back name after name clash resolved +energy = _energy_wrapper(gen_energy_op.signal_energy, "signal_energy") + +tf.no_gradient("signal_energy") diff --git a/python/tflite_micro/signal/ops/energy_op_test.py b/python/tflite_micro/signal/ops/energy_op_test.py new file mode 100644 index 00000000000..8df9514df21 --- /dev/null +++ b/python/tflite_micro/signal/ops/energy_op_test.py @@ -0,0 +1,137 @@ +# Copyright 2021 The TensorFlow Authors. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# ============================================================================== +"""Tests for energy op.""" +import os + +import numpy as np +import tensorflow as tf + +from tensorflow.python.platform import resource_loader +from tflite_micro.python.tflite_micro.signal.ops import energy_op +from tflite_micro.python.tflite_micro.signal.utils import util + + +class EnergyOpTest(tf.test.TestCase): + + _PREFIX_PATH = resource_loader.get_path_to_datafile('') + + def GetResource(self, filepath): + full_path = os.path.join(self._PREFIX_PATH, filepath) + with open(full_path, 'rt') as f: + file_text = f.read() + return file_text + + def SingleEnergyTest(self, filename): + lines = self.GetResource(filename).splitlines() + args = lines[0].split() + start_index = int(args[0]) + end_index = int(args[1]) + + func = tf.function(energy_op.energy) + input_size = len(lines[1].split()) + concrete_function = func.get_concrete_function(tf.TensorSpec( + input_size, dtype=tf.int16), + start_index=start_index, + end_index=end_index) + interpreter = util.get_tflm_interpreter(concrete_function, func) + # Skip line 0, which contains the configuration params. + # Read lines in pairs + i = 1 + while i < len(lines): + in_frame = np.array([int(j) for j in lines[i].split()], dtype='int16') + out_frame_exp = [int(j) for j in lines[i + 1].split()] + # TFLM + interpreter.set_input(in_frame, 0) + interpreter.invoke() + out_frame = interpreter.get_output(0) + for j in range(start_index, end_index): + self.assertEqual(out_frame_exp[j], out_frame[j]) + # TF + out_frame = self.evaluate( + energy_op.energy(in_frame, + start_index=start_index, + end_index=end_index)) + for j in range(start_index, end_index): + self.assertEqual(out_frame_exp[j], out_frame[j]) + i += 2 + + def testSingleFrame(self): + start_index = 5 + end_index = 250 + energy_in = [ + -56, 0, 26, 49, 144, -183, -621, 16, 544, 605, 11, -581, -26, 245, + -210, -273, 200, 541, 268, -319, -43, -544, -747, 356, 415, 356, 174, + -133, 4, -278, -487, 104, 449, 560, 223, -691, -451, 130, 132, 202, 86, + -91, 170, -85, -454, -123, 330, 125, -434, 104, 422, 89, -14, -113, + -123, -63, 125, 142, 40, -218, -183, -10, 3, 154, 95, -64, -108, -55, + 55, 216, 47, -358, -297, 391, 437, 5, -59, -252, -102, -25, -60, 76, + -46, 6, 128, 113, -4, -101, 20, -75, -154, 88, 144, -50, -163, 58, 112, + 38, 31, 2, -38, -80, 77, 63, -136, -83, 83, 89, 32, 27, 6, -237, -247, + 250, 292, -13, -55, 4, 58, -182, -120, 63, -33, -40, -88, 152, 246, 41, + -99, -178, -11, 68, -10, 3, 14, 39, 30, -94, -29, 79, -6, -84, -65, 55, + 138, 71, -141, -151, 150, 149, -159, -106, 203, 55, -207, -153, -37, + 231, 187, -6, 54, -66, -85, -258, -244, 271, 157, 24, 117, 144, 144, + -202, -66, -320, -478, 340, 510, 46, -152, -185, -199, -19, 139, 282, + -15, -140, 129, 45, -124, -26, 145, -36, -79, -17, -85, -29, 104, 82, + -84, -7, 127, -96, -210, 60, 114, 67, 40, -3, -1, -101, -76, 77, 55, + -5, 19, 13, 13, -36, -40, -34, 20, 63, 7, -66, -44, -6, -22, 66, 40, + -20, 13, 21, -15, -45, 6, 38, 19, -40, -46, -3, 2, 41, 41, -17, -37, + -11, 15, 13, -4, -5, 0, 1, 2, 0, 0, 0, 0, 0, -1, 0, 0, 0, 0, 0, 0, 1, + 1, -1, -2, -1, 0, -1, 0, 0, -1, 0, -1, 0, 0, 1, 0, -1, 0, 1, -1, -1, 0, + 0, 0, -1, 0, -1, 0, 1, 0, 0, -1, -1, 1, -1, -1, 0, 0, 0, 0, -2, -1, -1, + 0, 0, -1, -1, 0, 0, -1, -1, -1, 1, 0, -1, 0, 0, 0, -1, 0, 0, 0, 1, -1, + -1, 0, 1, 0, -1, -1, 0, -2, 0, 0, 0, 0, -1, -3, 1, 2, 0, 0, 1, 2, -1, + -1, -1, -1, -1, -1, 0, 0, 1, 0, -1, -1, 1, 0, 0, 1, -1, 0, 0, 0, 0, 0, + -1, 0, -1, 0, 0, -1, -1, 0, 0, 0, 0, -1, -1, 0, 1, -1, -1, 0, -2, 0, 1, + 0, 0, 0, 0, 1, -1, -1, 1, 0, -1, 0, 0, -1, 0, 2, 1, -2, -1, 1, 0, 0, + -2, 0, 0, -1, -1, 0, 0, 0, 0, -1, -2, -1, 1, 1, 0, 0, 0, 0, -1, -1, 0, + 1, 1, 0, 0, 0, 0, -1, 0, 0, 0, 0, -1, 0, -1, 0, -1, -1, 0, 0, 0, 1, -1, + 0, 1, 0, -1, 0, -1, 0, 0, 0, 0, 0, -1, -2, 0, 1, 0, 1, 1, -1, -1, -1, + 0, 0, 0, 0, -1, -1, -1, 0, 1, 0, -2, -1, 1, 1, 1, -1, -3, -1, 1, -1, + -2, 0, -1, -2, -1, 0, 0, 0, -3, -1, 0, 0, -1, 0, 0, -2, 0 + ] + energy_exp = [ + 0, 0, 0, 0, 0, 337682, 60701, 118629, 332681, 173585, 297785, 684745, + 298961, 47965, 77300, 247985, 515201, 527210, 220301, 58228, 15677, + 36125, 221245, 124525, 199172, 186005, 12965, 19098, 35789, 49124, + 33589, 23725, 13121, 14689, 49681, 130373, 241090, 190994, 66985, + 11029, 9376, 2152, 29153, 10217, 6025, 31460, 23236, 29933, 13988, 965, + 7844, 9898, 25385, 14810, 1753, 56205, 123509, 85433, 3041, 36488, + 18369, 2689, 30848, 62197, 41485, 4745, 109, 1717, 9736, 7082, 7092, + 7250, 24085, 42682, 44701, 36517, 44234, 66258, 54730, 35005, 7272, + 73789, 132977, 25225, 34425, 61540, 106756, 344084, 262216, 57329, + 39962, 98845, 19825, 18666, 16052, 22321, 6530, 8066, 17540, 7105, + 25345, 47700, 17485, 1609, 10202, 11705, 3050, 530, 1465, 2756, 4369, + 4405, 1972, 4840, 2000, 610, 2250, 1480, 1961, 2125, 1685, 1970, 1490, + 394, 41, 1, 4, 0, 0, 1, 0, 0, 1, 2, 5, 1, 0, 1, 1, 1, 1, 1, 2, 0, 1, 1, + 1, 0, 2, 2, 1, 0, 4, 2, 0, 2, 0, 2, 2, 1, 0, 1, 0, 1, 2, 1, 1, 1, 4, 0, + 1, 10, 4, 1, 5, 2, 2, 1, 1, 1, 2, 0, 2, 0, 0, 1, 1, 0, 2, 0, 0, 2, 1, + 2, 4, 1, 0, 0, 2, 2, 1, 0, 1, 5, 5, 1, 4, 0, 2, 0, 0, 5, 2, 1, 0, 1, 1, + 2, 0, 0, 1, 0, 1, 1, 1, 1, 0, 2, 1, 1, 1, 0, 0, 1, 4, 1, 2, 2, 1, 0, 1, + 2, 1, 4, 2, 2, 10, 2, 5, 1, 0, 0, 0, 0, 0, 0, 0 + ] + energy_out = energy_op.energy(energy_in, + start_index=start_index, + end_index=end_index) + + for j in range(start_index, end_index): + self.assertEqual(energy_exp[j], energy_out[j]) + + def testEnergy(self): + self.SingleEnergyTest('testdata/energy_test1.txt') + + +if __name__ == '__main__': + tf.test.main() diff --git a/python/tflite_micro/signal/ops/fft_ops.py b/python/tflite_micro/signal/ops/fft_ops.py index 1628b6806ef..446b8789cce 100644 --- a/python/tflite_micro/signal/ops/fft_ops.py +++ b/python/tflite_micro/signal/ops/fft_ops.py @@ -65,5 +65,24 @@ def _fft(input_tensor, fft_length, name=default_name): return _fft +def _fft_auto_scale_wrapper(fft_auto_scale_fn, default_name): + """Wrapper around gen_fft_ops.fft_auto_scale*.""" + + def _fft_auto_scale(input_tensor, name=default_name): + with tf.name_scope(name) as name: + input_tensor = tf.convert_to_tensor(input_tensor, dtype=tf.int16) + dim_list = input_tensor.shape.as_list() + if len(dim_list) != 1: + raise ValueError("Input tensor must have a rank of 1") + return fft_auto_scale_fn(input_tensor, name=name) + + return _fft_auto_scale + + rfft = _fft_wrapper(gen_fft_ops.signal_rfft, "signal_rfft") +irfft = _fft_wrapper(gen_fft_ops.signal_irfft, "signal_irfft") +fft_auto_scale = _fft_auto_scale_wrapper(gen_fft_ops.signal_fft_auto_scale, + "signal_fft_auto_scale") tf.no_gradient("signal_rfft") +tf.no_gradient("signal_irfft") +tf.no_gradient("signal_fft_auto_scale") diff --git a/python/tflite_micro/signal/ops/fft_ops_test.py b/python/tflite_micro/signal/ops/fft_ops_test.py index 9cada6d8dbb..63de82404d3 100644 --- a/python/tflite_micro/signal/ops/fft_ops_test.py +++ b/python/tflite_micro/signal/ops/fft_ops_test.py @@ -33,6 +33,31 @@ def GetResource(self, filepath): file_text = f.read() return file_text + def SingleFftAutoScaleTest(self, filename): + lines = self.GetResource(filename).splitlines() + func = tf.function(fft_ops.fft_auto_scale) + input_size = len(lines[0].split()) + concrete_function = func.get_concrete_function( + tf.TensorSpec(input_size, dtype=tf.int16)) + interpreter = util.get_tflm_interpreter(concrete_function, func) + i = 0 + while i < len(lines): + in_frame = np.array([int(j) for j in lines[i].split()], dtype=np.int16) + out_frame_exp = [int(j) for j in lines[i + 1].split()] + scale_exp = [int(j) for j in lines[i + 2].split()] + # TFLM + interpreter.set_input(in_frame, 0) + interpreter.invoke() + out_frame = interpreter.get_output(0) + scale = interpreter.get_output(1) + self.assertAllEqual(out_frame_exp, out_frame) + self.assertEqual(scale_exp, scale) + # TF + out_frame, scale = self.evaluate(fft_ops.fft_auto_scale(in_frame)) + self.assertAllEqual(out_frame_exp, out_frame) + self.assertEqual(scale_exp, scale) + i += 3 + def SingleRfftTest(self, filename): lines = self.GetResource(filename).splitlines() args = lines[0].split() @@ -43,8 +68,6 @@ def SingleRfftTest(self, filename): tf.TensorSpec(input_size, dtype=tf.int16), fft_length) # TODO(b/286252893): make test more robust (vs scipy) interpreter = util.get_tflm_interpreter(concrete_function, func) - input_details = interpreter.get_input_details() - output_details = interpreter.get_output_details() # Skip line 0, which contains the configuration params. # Read lines in pairs i = 1 @@ -53,9 +76,9 @@ def SingleRfftTest(self, filename): out_frame_exp = [int(j) for j in lines[i + 1].split()] # Compare TFLM inference against the expected golden values # TODO(b/286252893): validate usage of testing vs interpreter here - interpreter.set_tensor(input_details[0]['index'], in_frame) + interpreter.set_input(in_frame, 0) interpreter.invoke() - out_frame = interpreter.get_tensor(output_details[0]['index']) + out_frame = interpreter.get_output(0) self.assertAllEqual(out_frame_exp, out_frame) # TF out_frame = self.evaluate(fft_ops.rfft(in_frame, fft_length)) @@ -83,11 +106,9 @@ def MultiDimRfftTest(self, filename): concrete_function = func.get_concrete_function( tf.TensorSpec(np.shape(in_frames), dtype=tf.int16), fft_length) interpreter = util.get_tflm_interpreter(concrete_function, func) - input_details = interpreter.get_input_details() - output_details = interpreter.get_output_details() - interpreter.set_tensor(input_details[0]['index'], in_frames) + interpreter.set_input(in_frames, 0) interpreter.invoke() - out_frame = interpreter.get_tensor(output_details[0]['index']) + out_frame = interpreter.get_output(0) self.assertAllEqual(out_frames_exp, out_frame) # TF out_frames = self.evaluate(fft_ops.rfft(in_frames, fft_length)) @@ -204,6 +225,12 @@ def testRfftSineTest(self): delta=1) fft_length = 2 * fft_length + def testRfft(self): + self.SingleRfftTest('testdata/rfft_test1.txt') + + def testRfftLargeOuterDimension(self): + self.MultiDimRfftTest('testdata/rfft_test1.txt') + def testFftTooLarge(self): for dtype in [np.int16, np.int32, np.float32]: fft_input = np.zeros(round(fft_ops._MAX_FFT_LENGTH * 2), dtype=dtype) @@ -224,6 +251,64 @@ def testFftLengthNoEven(self): with self.assertRaises((tf.errors.InvalidArgumentError, ValueError)): self.evaluate(fft_ops.rfft(fft_input, 127)) + def testIrfftTest(self): + for dtype in [np.int16, np.int32, np.float32]: + fft_length = fft_ops._MIN_FFT_LENGTH + while fft_length <= fft_ops._MAX_FFT_LENGTH: + if dtype == np.float32: + # Random input in the range [-1, 1) + fft_input = np.random.random(fft_length).astype(dtype) * 2 - 1 + else: + fft_input = np.random.randint( + np.iinfo(np.int16).min, + np.iinfo(np.int16).max + 1, fft_length).astype(dtype) + fft_output = self.evaluate(fft_ops.rfft(fft_input, fft_length)) + self.assertEqual(fft_output.shape[0], (fft_length / 2 + 1) * 2) + ifft_output = self.evaluate(fft_ops.irfft(fft_output, fft_length)) + self.assertEqual(ifft_output.shape[0], fft_length) + # Output of integer RFFT and IRFFT is scaled by 1/fft_length + if dtype == np.int16: + self.assertArrayNear(fft_input, + ifft_output.astype(np.int32) * fft_length, 6500) + elif dtype == np.int32: + self.assertArrayNear(fft_input, + ifft_output.astype(np.int32) * fft_length, 7875) + else: + self.assertArrayNear(fft_input, ifft_output, 5e-7) + fft_length = 2 * fft_length + + def testIrfftLargeOuterDimension(self): + for dtype in [np.int16, np.int32, np.float32]: + fft_length = fft_ops._MIN_FFT_LENGTH + while fft_length <= fft_ops._MAX_FFT_LENGTH: + if dtype == np.float32: + # Random input in the range [-1, 1) + fft_input = np.random.random([2, 5, fft_length + ]).astype(dtype) * 2 - 1 + else: + fft_input = np.random.randint( + np.iinfo(np.int16).min, + np.iinfo(np.int16).max + 1, [2, 5, fft_length]).astype(dtype) + fft_output = self.evaluate(fft_ops.rfft(fft_input, fft_length)) + self.assertEqual(fft_output.shape[-1], (fft_length / 2 + 1) * 2) + ifft_output = self.evaluate(fft_ops.irfft(fft_output, fft_length)) + self.assertEqual(ifft_output.shape[-1], fft_length) + # Output of integer RFFT and IRFFT is scaled by 1/fft_length + if dtype == np.int16: + self.assertAllClose(fft_input, + ifft_output.astype(np.int32) * fft_length, + atol=7875) + elif dtype == np.int32: + self.assertAllClose(fft_input, + ifft_output.astype(np.int32) * fft_length, + atol=7875) + else: + self.assertAllClose(fft_input, ifft_output, rtol=5e-7, atol=5e-7) + fft_length = 2 * fft_length + + def testAutoScale(self): + self.SingleFftAutoScaleTest('testdata/fft_auto_scale_test1.txt') + def testPow2FftLengthTest(self): fft_length, fft_bits = fft_ops.get_pow2_fft_length(131) self.assertEqual(fft_length, 256) diff --git a/python/tflite_micro/signal/ops/filter_bank_ops.py b/python/tflite_micro/signal/ops/filter_bank_ops.py new file mode 100644 index 00000000000..d89ae991f5c --- /dev/null +++ b/python/tflite_micro/signal/ops/filter_bank_ops.py @@ -0,0 +1,351 @@ +# Copyright 2023 The TensorFlow Authors. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# ============================================================================== +"""Use filter bank ops in python.""" + +import numpy as np +import tensorflow as tf + +from tflite_micro.python.tflite_micro.signal.utils import util +from tflite_micro.python.tflite_micro.signal.utils.freq_to_mel_wrapper import freq_to_mel + +gen_filter_bank_ops = util.load_custom_op('filter_bank_ops.so') + +# A note about precision: +# The code to calculate center frequencies and weights uses floating point +# extensively. The original speech micro code is written in C and uses +# 32-bit 'float' C types. Python's floating point type is 64-bit by default, +# which resulted in slight differences that made verification harder. +# In order to establish parity with speech micro, and recognizing the slight +# loss in precision, numpy.float32 was used throughout this code instead of +# the default Python 'float' type. For the same reason, the function freq_to_mel +# wraps the same FreqToMel() C function used by Speech Micro. + +FILTER_BANK_ALIGNMENT = 1 +FILTER_BANK_CHANNEL_BLOCK_SIZE = 1 +FILTER_BANK_WEIGHT_SCALING_BITS = 12 + + +def _calc_center_freq(channel_num, lower_freq_limit, upper_freq_limit): + """Calculate the center frequencies of mel spectrum filter banks.""" + if lower_freq_limit < 0: + raise ValueError("Lower frequency limit must be non negative") + if lower_freq_limit > upper_freq_limit: + raise ValueError("Lower frequency limit can't be larger than upper limit") + mel_lower = np.float32(freq_to_mel(lower_freq_limit)) + mel_upper = np.float32(freq_to_mel(upper_freq_limit)) + mel_span = mel_upper - mel_lower + mel_spacing = mel_span / np.float32(channel_num) + channels = np.arange(1, channel_num + 1, dtype=np.float32) + return mel_lower + (mel_spacing * channels) + + +def _quantize_filterbank_weight(float_weight, scale_bits): + """Scale float filterbank weights return the integer weights and unweights.""" + weight = int(float_weight * (1 << scale_bits)) + unweight = int((1 - float_weight) * (1 << scale_bits)) + return weight, unweight + + +def _init_filter_bank_weights(spectrum_size, sample_rate, alignment, + channel_block_size, num_channels, + lower_band_limit, upper_band_limit): + """Initialize mel-spectrum filter bank weights.""" + # How should we align things to index counts given the byte alignment? + item_size = np.dtype("int16").itemsize + if alignment < item_size: + index_alignment = 1 + else: + index_alignment = int(alignment / item_size) + + channel_frequency_starts = np.zeros(num_channels + 1, dtype=np.int16) + channel_weight_starts = np.zeros(num_channels + 1, dtype=np.int16) + channel_widths = np.zeros(num_channels + 1, dtype=np.int16) + + actual_channel_starts = np.zeros(num_channels + 1, dtype=np.int16) + actual_channel_widths = np.zeros(num_channels + 1, dtype=np.int16) + + center_mel_freqs = _calc_center_freq(num_channels + 1, lower_band_limit, + upper_band_limit) + + # (spectrum_size - 1) to exclude DC. Emulate Hidden Markov Model Toolkit (HTK) + hz_per_sbin = (sample_rate / 2) / (spectrum_size - 1) + # (1 + ...) to exclude DC. + start_index = round(1 + (lower_band_limit / hz_per_sbin)) + + # For each channel, we need to figure out what frequencies belong to it, and + # how much padding we need to add so that we can efficiently multiply the + # weights and unweights for accumulation. To simplify the multiplication + # logic, all channels will have some multiplication to do (even if there are + # no frequencies that accumulate to that channel) - they will be directed to + # a set of zero weights. + chan_freq_index_start = start_index + weight_index_start = 0 + needs_zeros = 0 + + for chan in range(num_channels + 1): + # Keep jumping frequencies until we overshoot the bound on this channel. + freq_index = chan_freq_index_start + while freq_to_mel(freq_index * hz_per_sbin) <= center_mel_freqs[chan]: + freq_index += 1 + + width = freq_index - chan_freq_index_start + actual_channel_starts[chan] = chan_freq_index_start + actual_channel_widths[chan] = width + + if width == 0: + # This channel doesn't actually get anything from the frequencies, it's + # always zero. We need then to insert some 'zero' weights into the + # output, and just redirect this channel to do a single multiplication at + # this point. For simplicity, the zeros are placed at the beginning of + # the weights arrays, so we have to go and update all the other + # weight_starts to reflect this shift (but only once). + channel_frequency_starts[chan] = 0 + channel_weight_starts[chan] = 0 + channel_widths[chan] = channel_block_size + if needs_zeros == 0: + needs_zeros = 1 + for j in range(chan): + channel_weight_starts[j] += channel_block_size + weight_index_start += channel_block_size + else: + # How far back do we need to go to ensure that we have the proper + # alignment? + aligned_start = int( + chan_freq_index_start / index_alignment) * index_alignment + aligned_width = (chan_freq_index_start - aligned_start + width) + padded_width = (int( + (aligned_width - 1) / channel_block_size) + 1) * channel_block_size + + channel_frequency_starts[chan] = aligned_start + channel_weight_starts[chan] = weight_index_start + channel_widths[chan] = padded_width + weight_index_start += padded_width + chan_freq_index_start = freq_index + + # Allocate the two arrays to store the weights - weight_index_start contains + # the index of what would be the next set of weights that we would need to + # add, so that's how many weights we need to allocate. + num_weights = weight_index_start + weights = np.zeros(num_weights, dtype=np.int16) + unweights = np.zeros(num_weights, dtype=np.int16) + + # Next pass, compute all the weights. Since everything has been memset to + # zero, we only need to fill in the weights that correspond to some frequency + # for a channel. + end_index = 0 + mel_low = freq_to_mel(lower_band_limit) + for chan in range(num_channels + 1): + frequency = actual_channel_starts[chan] + num_frequencies = actual_channel_widths[chan] + frequency_offset = frequency - channel_frequency_starts[chan] + weight_start = channel_weight_starts[chan] + if chan == 0: + denom_val = mel_low + else: + denom_val = center_mel_freqs[chan - 1] + for j in range(num_frequencies): + num = np.float32(center_mel_freqs[chan] - + freq_to_mel(frequency * hz_per_sbin)) + den = np.float32(center_mel_freqs[chan] - denom_val) + weight = num / den + # Make the float into an integer for the weights (and unweights). + weight_index = weight_start + frequency_offset + j + weights[weight_index], unweights[ + weight_index] = _quantize_filterbank_weight( + weight, FILTER_BANK_WEIGHT_SCALING_BITS) + frequency += 1 + if frequency > end_index: + end_index = frequency + + if end_index >= spectrum_size: + raise ValueError("Lower frequency limit can't be larger than upper limit") + + return (start_index, end_index, weights, unweights, channel_frequency_starts, + channel_weight_starts, channel_widths) + + +def calc_start_end_indices(fft_length, sample_rate, num_channels, + lower_band_limit, upper_band_limit): + """Returns the range of FFT indices needed by mel-spectrum filter bank. + + The caller can use the indices to avoid calculating the energy of FFT bins + that won't be used. + + Args: + fft_length: Length of FFT, in bins. + sample_rate: Sample rate, in Hz. + num_channels: Number of mel-spectrum filter bank channels. + lower_band_limit: lower limit of mel-spectrum filterbank, in Hz. + upper_band_limit: upper limit of mel-spectrum filterbank, in Hz. + + Returns: + A pair: start and end indices, in the range [0, fft_length) + + Raises: + ValueError: If fft_length isn't a power of 2 + """ + if fft_length % 2 != 0: + raise ValueError("FFT length must be an even number") + spectrum_size = fft_length / 2 + 1 + (start_index, end_index, _, _, _, _, + _) = _init_filter_bank_weights(spectrum_size, sample_rate, + FILTER_BANK_ALIGNMENT, + FILTER_BANK_CHANNEL_BLOCK_SIZE, num_channels, + lower_band_limit, upper_band_limit) + return start_index, end_index + + +def _filter_bank_wrapper(filter_bank_fn, default_name): + """Wrapper around gen_filter_bank_ops.filter_bank*.""" + + def _filter_bank(input_tensor, + sample_rate, + num_channels, + lower_band_limit, + upper_band_limit, + name=default_name): + with tf.name_scope(name) as name: + input_tensor = tf.convert_to_tensor(input_tensor, dtype=tf.uint32) + dim_list = input_tensor.shape.as_list() + if len(dim_list) != 1: + raise ValueError("Input tensor must have a rank of 1") + spectrum_size = dim_list[0] + + (_, _, weights, unweights, channel_frequency_starts, + channel_weight_starts, channel_widths) = _init_filter_bank_weights( + spectrum_size, sample_rate, FILTER_BANK_ALIGNMENT, + FILTER_BANK_CHANNEL_BLOCK_SIZE, num_channels, lower_band_limit, + upper_band_limit) + weights_tensor = tf.convert_to_tensor(weights, dtype=tf.int16) + unweights_tensor = tf.convert_to_tensor(unweights, dtype=tf.int16) + channel_frequency_starts_tensor = tf.convert_to_tensor( + channel_frequency_starts, dtype=tf.int16) + channel_weight_starts_tensor = tf.convert_to_tensor( + channel_weight_starts, dtype=tf.int16) + channel_widths_tensor = tf.convert_to_tensor(channel_widths, + dtype=tf.int16) + + return filter_bank_fn(input_tensor, + weights_tensor, + unweights_tensor, + channel_frequency_starts_tensor, + channel_weight_starts_tensor, + channel_widths_tensor, + num_channels=num_channels, + name=name) + + return _filter_bank + + +def _filter_bank_square_root_wrapper(filter_bank_square_root_fn, default_name): + """Wrapper around gen_filter_bank_ops.filter_bank_square_root*.""" + + def _filter_bank_square_root(input_tensor, scale_bits, name=default_name): + with tf.name_scope(name) as name: + input_tensor = tf.convert_to_tensor(input_tensor, dtype=tf.uint64) + dim_list = input_tensor.shape.as_list() + if len(dim_list) != 1: + raise ValueError("Input tensor must have a rank of 1") + scale_bits_tensor = tf.convert_to_tensor(scale_bits, dtype=tf.int32) + return filter_bank_square_root_fn(input_tensor, + scale_bits_tensor, + name=name) + + return _filter_bank_square_root + + +def _filter_bank_spectral_subtraction_wrapper( + filter_bank_spectral_subtraction_fn, default_name): + """Wrapper around gen_filter_bank_ops.filter_bank_spectral_subtraction*.""" + + def _filter_bank_spectral_subtraction(input_tensor, + num_channels, + smoothing, + alternate_smoothing, + smoothing_bits, + min_signal_remaining, + clamping, + spectral_subtraction_bits=14, + name=default_name): + with tf.name_scope(name) as name: + input_tensor = tf.convert_to_tensor(input_tensor, dtype=tf.uint32) + dim_list = input_tensor.shape.as_list() + if len(dim_list) != 1: + raise ValueError("Input tensor must have a rank of 1") + + min_signal_remaining = int(min_signal_remaining * + (1 << spectral_subtraction_bits)) + # Alternate smoothing may be disabled + if alternate_smoothing == 0: + alternate_smoothing = smoothing + + smoothing = int(smoothing * (1 << spectral_subtraction_bits)) + one_minus_smoothing = int((1 << spectral_subtraction_bits) - smoothing) + alternate_smoothing = int(alternate_smoothing * + (1 << spectral_subtraction_bits)) + alternate_one_minus_smoothing = int((1 << spectral_subtraction_bits) - + alternate_smoothing) + return filter_bank_spectral_subtraction_fn( + input_tensor, + num_channels=num_channels, + smoothing=smoothing, + one_minus_smoothing=one_minus_smoothing, + alternate_smoothing=alternate_smoothing, + alternate_one_minus_smoothing=alternate_one_minus_smoothing, + smoothing_bits=smoothing_bits, + min_signal_remaining=min_signal_remaining, + clamping=clamping, + spectral_subtraction_bits=spectral_subtraction_bits, + name=name) + + return _filter_bank_spectral_subtraction + + +def _filter_bank_log_wrapper(filter_bank_log_fn, default_name): + """Wrapper around gen_filter_bank_ops.filter_bank_log*.""" + + def _filter_bank_log(input_tensor, + output_scale, + input_correction_bits, + name=default_name): + with tf.name_scope(name) as name: + input_tensor = tf.convert_to_tensor(input_tensor, dtype=tf.uint32) + dim_list = input_tensor.shape.as_list() + if len(dim_list) != 1: + raise ValueError("Input tensor must have a rank of 1") + + return filter_bank_log_fn(input_tensor, + output_scale=output_scale, + input_correction_bits=input_correction_bits, + name=name) + + return _filter_bank_log + + +filter_bank = _filter_bank_wrapper(gen_filter_bank_ops.signal_filter_bank, + "signal_filter_bank") +filter_bank_square_root = _filter_bank_square_root_wrapper( + gen_filter_bank_ops.signal_filter_bank_square_root, + "signal_filter_bank_square_root") +filter_bank_spectral_subtraction = _filter_bank_spectral_subtraction_wrapper( + gen_filter_bank_ops.signal_filter_bank_spectral_subtraction, + "signal_filter_bank_spectral_subtraction") +filter_bank_log = _filter_bank_log_wrapper( + gen_filter_bank_ops.signal_filter_bank_log, "signal_filter_bank_log") + +tf.no_gradient("signal_filter_bank") +tf.no_gradient("signal_filter_bank_square_root") +tf.no_gradient("signal_filter_bank_spectral_subtraction") +tf.no_gradient("signal_filter_bank_log") diff --git a/python/tflite_micro/signal/ops/filter_bank_ops_test.py b/python/tflite_micro/signal/ops/filter_bank_ops_test.py new file mode 100644 index 00000000000..3d7a4338e3e --- /dev/null +++ b/python/tflite_micro/signal/ops/filter_bank_ops_test.py @@ -0,0 +1,1194 @@ +# Copyright 2023 The TensorFlow Authors. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# ============================================================================== +"""Tests for filter bank ops.""" +import os + +import numpy as np +import tensorflow as tf + +from tensorflow.python.platform import resource_loader +from tflite_micro.python.tflite_micro.signal.ops import filter_bank_ops +from tflite_micro.python.tflite_micro.signal.utils import util + + +class FilterBankOpTest(tf.test.TestCase): + + _PREFIX_PATH = resource_loader.get_path_to_datafile('') + + def GetResource(self, filepath): + full_path = os.path.join(self._PREFIX_PATH, filepath) + with open(full_path, 'rt') as f: + file_text = f.read() + return file_text + + def testFilterBankCenterFreq(self): + center_freq = filter_bank_ops._calc_center_freq(41, 125, 7800) + center_freq_exp = [ + 249.2831420898437500, 313.3967285156250000, 377.5103454589843750, + 441.6239624023437500, 505.7375488281250000, 569.8511352539062500, + 633.9647827148437500, 698.0783691406250000, 762.1919555664062500, + 826.3055419921875000, 890.4191894531250000, 954.5327758789062500, + 1018.6463623046875000, 1082.7600097656250000, 1146.8735351562500000, + 1210.9871826171875000, 1275.1008300781250000, 1339.2143554687500000, + 1403.3280029296875000, 1467.4415283203125000, 1531.5551757812500000, + 1595.6688232421875000, 1659.7823486328125000, 1723.8959960937500000, + 1788.0096435546875000, 1852.1231689453125000, 1916.2368164062500000, + 1980.3504638671875000, 2044.4639892578125000, 2108.5776367187500000, + 2172.6911621093750000, 2236.8046875000000000, 2300.9182128906250000, + 2365.0319824218750000, 2429.1455078125000000, 2493.2590332031250000, + 2557.3728027343750000, 2621.4863281250000000, 2685.5998535156250000, + 2749.7133789062500000, 2813.8271484375000000 + ] + self.assertAllEqual(center_freq_exp, center_freq) + + center_freq = filter_bank_ops._calc_center_freq(33, 125, 3800) + center_freq_exp = [ + 243.1058502197265625, 301.0421752929687500, 358.9784851074218750, + 416.9147949218750000, 474.8511352539062500, 532.7874145507812500, + 590.7237548828125000, 648.6600341796875000, 706.5963745117187500, + 764.5327148437500000, 822.4689941406250000, 880.4053344726562500, + 938.3416137695312500, 996.2779541015625000, 1054.2142333984375000, + 1112.1505126953125000, 1170.0869140625000000, 1228.0231933593750000, + 1285.9594726562500000, 1343.8958740234375000, 1401.8321533203125000, + 1459.7684326171875000, 1517.7047119140625000, 1575.6411132812500000, + 1633.5773925781250000, 1691.5136718750000000, 1749.4500732421875000, + 1807.3863525390625000, 1865.3226318359375000, 1923.2589111328125000, + 1981.1953125000000000, 2039.1315917968750000, 2097.0678710937500000 + ] + self.assertAllEqual(center_freq_exp, center_freq) + + center_freq = filter_bank_ops._calc_center_freq(41, 100, 7500) + center_freq_exp = [ + 214.4616394042968750, 278.4334106445312500, 342.4051513671875000, + 406.3768920898437500, 470.3486328125000000, 534.3204345703125000, + 598.2921752929687500, 662.2639160156250000, 726.2356567382812500, + 790.2073974609375000, 854.1791992187500000, 918.1509399414062500, + 982.1226806640625000, 1046.0944824218750000, 1110.0662841796875000, + 1174.0379638671875000, 1238.0097656250000000, 1301.9814453125000000, + 1365.9532470703125000, 1429.9249267578125000, 1493.8967285156250000, + 1557.8685302734375000, 1621.8402099609375000, 1685.8120117187500000, + 1749.7838134765625000, 1813.7554931640625000, 1877.7272949218750000, + 1941.6990966796875000, 2005.6707763671875000, 2069.6425781250000000, + 2133.6142578125000000, 2197.5861816406250000, 2261.5578613281250000, + 2325.5297851562500000, 2389.5014648437500000, 2453.4731445312500000, + 2517.4450683593750000, 2581.4167480468750000, 2645.3884277343750000, + 2709.3601074218750000, 2773.3320312500000000 + ] + self.assertAllLess(abs(center_freq_exp - center_freq), 0.00025) + + def testFilterBankStartEndIndices(self): + start_index, end_index = filter_bank_ops.calc_start_end_indices( + 512, 16000, 32, 125, 3800) + self.assertEqual(start_index, 5) + self.assertEqual(end_index, 122) + + start_index, end_index = filter_bank_ops.calc_start_end_indices( + 2048, 44000, 25, 125, 3800) + self.assertEqual(start_index, 7) + self.assertEqual(end_index, 177) + + start_index, end_index = filter_bank_ops.calc_start_end_indices( + 512, 16000, 40, 100, 7500) + self.assertEqual(start_index, 4) + self.assertEqual(end_index, 241) + + def testFilterBankInitWeight(self): + (start_index, end_index, weights, unweights, channel_frequency_starts, + channel_weight_starts, + channel_widths) = filter_bank_ops._init_filter_bank_weights( + 257, 16000, 1, 1, 32, 125, 3800) + weights_exp = [ + 1133, 2373, 3712, 1047, 2564, 66, 1740, 3486, 1202, 3079, 919, 2913, + 865, 2964, 1015, 3210, 1352, 3633, 1859, 123, 2520, 856, 3323, 1726, + 161, 2722, 1215, 3833, 2382, 956, 3652, 2276, 923, 3689, 2380, 1093, + 3922, 2676, 1448, 239, 3144, 1970, 814, 3770, 2646, 1538, 445, 3463, + 2399, 1349, 313, 3386, 2376, 1379, 394, 3517, 2556, 1607, 668, 3837, + 2920, 2013, 1117, 231, 3450, 2583, 1725, 877, 37, 3302, 2480, 1666, + 861, 63, 3369, 2588, 1813, 1046, 287, 3630, 2885, 2147, 1415, 690, + 4067, 3355, 2650, 1950, 1257, 569, 3984, 3308, 2638, 1973, 1314, 661, + 12, 3465, 2827, 2194, 1566, 944, 325, 3808, 3199, 2595, 1996, 1401, + 810, 224, 3738, 3160, 2586, 2017, 1451, 890, 332 + ] + unweights_exp = [ + 2962, + 1722, + 383, + 3048, + 1531, + 4029, + 2355, + 609, + 2893, + 1016, + 3176, + 1182, + 3230, + 1131, + 3080, + 885, + 2743, + 462, + 2236, + 3972, + 1575, + 3239, + 772, + 2369, + 3934, + 1373, + 2880, + 262, + 1713, + 3139, + 443, + 1819, + 3172, + 406, + 1715, + 3002, + 173, + 1419, + 2647, + 3856, + 951, + 2125, + 3281, + 325, + 1449, + 2557, + 3650, + 632, + 1696, + 2746, + 3782, + 709, + 1719, + 2716, + 3701, + 578, + 1539, + 2488, + 3427, + 258, + 1175, + 2082, + 2978, + 3864, + 645, + 1512, + 2370, + 3218, + 4058, + 793, + 1615, + 2429, + 3234, + 4032, + 726, + 1507, + 2282, + 3049, + 3808, + 465, + 1210, + 1948, + 2680, + 3405, + 28, + 740, + 1445, + 2145, + 2838, + 3526, + 111, + 787, + 1457, + 2122, + 2781, + 3434, + 4083, + 630, + 1268, + 1901, + 2529, + 3151, + 3770, + 287, + 896, + 1500, + 2099, + 2694, + 3285, + 3871, + 357, + 935, + 1509, + 2078, + 2644, + 3205, + 3763, + ] + channel_frequency_starts_exp = [ + 5, 6, 7, 9, 11, 12, 14, 16, 18, 20, 22, 25, 27, 30, 32, 35, 38, 41, 45, + 48, 52, 56, 60, 64, 69, 74, 79, 84, 89, 95, 102, 108, 115 + ] + channel_weight_starts_exp = [ + 0, 1, 2, 4, 6, 7, 9, 11, 13, 15, 17, 20, 22, 25, 27, 30, 33, 36, 40, + 43, 47, 51, 55, 59, 64, 69, 74, 79, 84, 90, 97, 103, 110 + ] + channel_widths_exp = [ + 1, 1, 2, 2, 1, 2, 2, 2, 2, 2, 3, 2, 3, 2, 3, 3, 3, 4, 3, 4, 4, 4, 4, 5, + 5, 5, 5, 5, 6, 7, 6, 7, 7 + ] + self.assertEqual(start_index, 5) + self.assertEqual(end_index, 122) + self.assertEqual(weights.size, 117) + self.assertAllEqual(weights, weights_exp) + self.assertAllEqual(unweights, unweights_exp) + self.assertAllEqual(channel_frequency_starts, channel_frequency_starts_exp) + self.assertAllEqual(channel_weight_starts, channel_weight_starts_exp) + self.assertAllEqual(channel_widths, channel_widths_exp) + ################################## + (start_index, end_index, weights, unweights, channel_frequency_starts, + channel_weight_starts, + channel_widths) = filter_bank_ops._init_filter_bank_weights( + 257, 16000, 1, 1, 40, 125, 7800) + weights_exp = [ + 1419, 2934, 442, 2130, 3896, 1638, 3546, 1422, 3454, 1449, 3593, 1693, + 3938, 2134, 373, 2751, 1073, 3528, 1925, 356, 2917, 1414, 4037, 2594, + 1180, 3888, 2527, 1191, 3976, 2688, 1422, 179, 3052, 1850, 668, 3601, + 2456, 1329, 220, 3223, 2147, 1087, 42, 3108, 2092, 1091, 103, 3225, + 2263, 1315, 378, 3550, 2638, 1736, 846, 4063, 3195, 2337, 1489, 650, + 3918, 3099, 2289, 1488, 696, 4008, 3233, 2466, 1708, 957, 214, 3575, + 2846, 2126, 1412, 706, 6, 3409, 2723, 2043, 1369, 702, 41, 3482, 2832, + 2189, 1551, 919, 292, 3767, 3151, 2540, 1935, 1334, 739, 148, 3658, + 3077, 2501, 1929, 1362, 799, 240, 3782, 3232, 2686, 2144, 1606, 1073, + 543, 17, 3591, 3072, 2558, 2047, 1539, 1035, 535, 38, 3641, 3151, 2664, + 2180, 1700, 1223, 749, 278, 3906, 3441, 2979, 2520, 2064, 1611, 1161, + 714, 269, 3923, 3484, 3047, 2613, 2182, 1753, 1326, 903, 481, 62, 3742, + 3328, 2916, 2507, 2100, 1695, 1293, 893, 495, 99, 3801, 3410, 3020, + 2633, 2248, 1864, 1483, 1104, 727, 352, 4075, 3703, 3334, 2966, 2601, + 2237, 1875, 1515, 1156, 800, 445, 92, 3836, 3487, 3139, 2792, 2448, + 2105, 1763, 1423, 1085, 749, 414, 80, 3844, 3514, 3185, 2857, 2531, + 2207, 1884, 1562, 1242, 923, 606, 290, 4072, 3758, 3447, 3136, 2827, + 2519, 2213, 1907, 1604, 1301, 999, 699, 400, 103, 3902, 3607, 3313, + 3020, 2729, 2438, 2149, 1861, 1574, 1288, 1003, 720, 437, 156, 3972, + 3693, 3415, 3137, 2862, 2587, 2313, 2040, 1768, 1497, 1228, 959, 691, + 424, 158 + ] + unweights_exp = [ + 2676, 1161, 3653, 1965, 199, 2457, 549, 2673, 641, 2646, 502, 2402, + 157, 1961, 3722, 1344, 3022, 567, 2170, 3739, 1178, 2681, 58, 1501, + 2915, 207, 1568, 2904, 119, 1407, 2673, 3916, 1043, 2245, 3427, 494, + 1639, 2766, 3875, 872, 1948, 3008, 4053, 987, 2003, 3004, 3992, 870, + 1832, 2780, 3717, 545, 1457, 2359, 3249, 32, 900, 1758, 2606, 3445, + 177, 996, 1806, 2607, 3399, 87, 862, 1629, 2387, 3138, 3881, 520, 1249, + 1969, 2683, 3389, 4089, 686, 1372, 2052, 2726, 3393, 4054, 613, 1263, + 1906, 2544, 3176, 3803, 328, 944, 1555, 2160, 2761, 3356, 3947, 437, + 1018, 1594, 2166, 2733, 3296, 3855, 313, 863, 1409, 1951, 2489, 3022, + 3552, 4078, 504, 1023, 1537, 2048, 2556, 3060, 3560, 4057, 454, 944, + 1431, 1915, 2395, 2872, 3346, 3817, 189, 654, 1116, 1575, 2031, 2484, + 2934, 3381, 3826, 172, 611, 1048, 1482, 1913, 2342, 2769, 3192, 3614, + 4033, 353, 767, 1179, 1588, 1995, 2400, 2802, 3202, 3600, 3996, 294, + 685, 1075, 1462, 1847, 2231, 2612, 2991, 3368, 3743, 20, 392, 761, + 1129, 1494, 1858, 2220, 2580, 2939, 3295, 3650, 4003, 259, 608, 956, + 1303, 1647, 1990, 2332, 2672, 3010, 3346, 3681, 4015, 251, 581, 910, + 1238, 1564, 1888, 2211, 2533, 2853, 3172, 3489, 3805, 23, 337, 648, + 959, 1268, 1576, 1882, 2188, 2491, 2794, 3096, 3396, 3695, 3992, 193, + 488, 782, 1075, 1366, 1657, 1946, 2234, 2521, 2807, 3092, 3375, 3658, + 3939, 123, 402, 680, 958, 1233, 1508, 1782, 2055, 2327, 2598, 2867, + 3136, 3404, 3671, 3937 + ] + channel_frequency_starts_exp = [ + 5, 6, 8, 9, 11, 13, 15, 17, 20, 22, 25, 27, 30, 33, 37, 40, 44, 48, 52, + 56, 60, 65, 70, 76, 82, 88, 94, 101, 108, 116, 124, 132, 141, 151, 161, + 171, 183, 195, 207, 221, 235 + ] + channel_weight_starts_exp = [ + 0, 1, 3, 4, 6, 8, 10, 12, 15, 17, 20, 22, 25, 28, 32, 35, 39, 43, 47, + 51, 55, 60, 65, 71, 77, 83, 89, 96, 103, 111, 119, 127, 136, 146, 156, + 166, 178, 190, 202, 216, 230 + ] + channel_widths_exp = [ + 1, 2, 1, 2, 2, 2, 2, 3, 2, 3, 2, 3, 3, 4, 3, 4, 4, 4, 4, 4, 5, 5, 6, 6, + 6, 6, 7, 7, 8, 8, 8, 9, 10, 10, 10, 12, 12, 12, 14, 14, 15 + ] + self.assertEqual(start_index, 5) + self.assertEqual(end_index, 250) + self.assertEqual(weights.size, 245) + self.assertAllEqual(weights, weights_exp) + self.assertAllEqual(unweights, unweights_exp) + self.assertAllEqual(channel_frequency_starts, channel_frequency_starts_exp) + self.assertAllEqual(channel_weight_starts, channel_weight_starts_exp) + self.assertAllEqual(channel_widths, channel_widths_exp) + ################################## + (start_index, end_index, weights, unweights, channel_frequency_starts, + channel_weight_starts, + channel_widths) = filter_bank_ops._init_filter_bank_weights( + 129, 8000, 1, 1, 25, 125, 3800) + weights_exp = [ + 1762, 3607, 1435, 3431, 1399, 3527, 1619, 3863, 2064, 316, 2710, 1054, + 3536, 1963, 428, 3025, 1562, 132, 2830, 1462, 123, 2909, 1625, 367, + 3230, 2020, 833, 3765, 2621, 1498, 395, 3407, 2341, 1293, 262, 3344, + 2346, 1363, 396, 3539, 2601, 1676, 765, 3963, 3078, 2205, 1343, 494, + 3752, 2925, 2108, 1303, 507, 3817, 3041, 2275, 1517, 769, 30, 3395, + 2673, 1958, 1252, 554, 3959, 3276, 2601, 1932, 1270, 616, 4064, 3423, + 2788, 2160, 1538, 921, 311, 3803, 3205, 2612, 2025, 1443, 866, 295, + 3825, 3264, 2708, 2157, 1611, 1069, 532, 0, 3568, 3044, 2525, 2010, + 1499, 992, 490, 4087, 3592, 3102, 2615, 2131, 1652, 1176, 703, 235, + 3865, 3403, 2945, 2490, 2038, 1589, 1143, 701, 262 + ] + unweights_exp = [ + 2333, 488, 2660, 664, 2696, 568, 2476, 232, 2031, 3779, 1385, 3041, + 559, 2132, 3667, 1070, 2533, 3963, 1265, 2633, 3972, 1186, 2470, 3728, + 865, 2075, 3262, 330, 1474, 2597, 3700, 688, 1754, 2802, 3833, 751, + 1749, 2732, 3699, 556, 1494, 2419, 3330, 132, 1017, 1890, 2752, 3601, + 343, 1170, 1987, 2792, 3588, 278, 1054, 1820, 2578, 3326, 4065, 700, + 1422, 2137, 2843, 3541, 136, 819, 1494, 2163, 2825, 3479, 31, 672, + 1307, 1935, 2557, 3174, 3784, 292, 890, 1483, 2070, 2652, 3229, 3800, + 270, 831, 1387, 1938, 2484, 3026, 3563, 4095, 527, 1051, 1570, 2085, + 2596, 3103, 3605, 8, 503, 993, 1480, 1964, 2443, 2919, 3392, 3860, 230, + 692, 1150, 1605, 2057, 2506, 2952, 3394, 3833 + ] + channel_frequency_starts_exp = [ + 5, 6, 8, 10, 12, 15, 17, 20, 23, 26, 29, 32, 36, 40, 44, 48, 53, 58, + 64, 69, 75, 82, 89, 97, 104, 113 + ] + channel_weight_starts_exp = [ + 0, 1, 3, 5, 7, 10, 12, 15, 18, 21, 24, 27, 31, 35, 39, 43, 48, 53, 59, + 64, 70, 77, 84, 92, 99, 108 + ] + channel_widths_exp = [ + 1, 2, 2, 2, 3, 2, 3, 3, 3, 3, 3, 4, 4, 4, 4, 5, 5, 6, 5, 6, 7, 7, 8, 7, + 9, 9 + ] + self.assertEqual(start_index, 5) + self.assertEqual(end_index, 122) + self.assertEqual(weights.size, 117) + self.assertAllEqual(weights, weights_exp) + self.assertAllEqual(unweights, unweights_exp) + self.assertAllEqual(channel_frequency_starts, channel_frequency_starts_exp) + self.assertAllEqual(channel_weight_starts, channel_weight_starts_exp) + self.assertAllEqual(channel_widths, channel_widths_exp) + ################################## + (start_index, end_index, weights, unweights, channel_frequency_starts, + channel_weight_starts, + channel_widths) = filter_bank_ops._init_filter_bank_weights( + 257, 16000, 2, 1, 25, 125, 3800) + weights_exp = [ + 1762, 3607, 1435, 3431, 1399, 3527, 1619, 3863, 2064, 316, 2710, 1054, + 3536, 1963, 428, 3025, 1562, 132, 2830, 1462, 123, 2909, 1625, 367, + 3230, 2020, 833, 3765, 2621, 1498, 395, 3407, 2341, 1293, 262, 3344, + 2346, 1363, 396, 3539, 2601, 1676, 765, 3963, 3078, 2205, 1343, 494, + 3752, 2925, 2108, 1303, 507, 3817, 3041, 2275, 1517, 769, 30, 3395, + 2673, 1958, 1252, 554, 3959, 3276, 2601, 1932, 1270, 616, 4064, 3423, + 2788, 2160, 1538, 921, 311, 3803, 3205, 2612, 2025, 1443, 866, 295, + 3825, 3264, 2708, 2157, 1611, 1069, 532, 0, 3568, 3044, 2525, 2010, + 1499, 992, 490, 4087, 3592, 3102, 2615, 2131, 1652, 1176, 703, 235, + 3865, 3403, 2945, 2490, 2038, 1589, 1143, 701, 262 + ] + unweights_exp = [ + 2333, 488, 2660, 664, 2696, 568, 2476, 232, 2031, 3779, 1385, 3041, + 559, 2132, 3667, 1070, 2533, 3963, 1265, 2633, 3972, 1186, 2470, 3728, + 865, 2075, 3262, 330, 1474, 2597, 3700, 688, 1754, 2802, 3833, 751, + 1749, 2732, 3699, 556, 1494, 2419, 3330, 132, 1017, 1890, 2752, 3601, + 343, 1170, 1987, 2792, 3588, 278, 1054, 1820, 2578, 3326, 4065, 700, + 1422, 2137, 2843, 3541, 136, 819, 1494, 2163, 2825, 3479, 31, 672, + 1307, 1935, 2557, 3174, 3784, 292, 890, 1483, 2070, 2652, 3229, 3800, + 270, 831, 1387, 1938, 2484, 3026, 3563, 4095, 527, 1051, 1570, 2085, + 2596, 3103, 3605, 8, 503, 993, 1480, 1964, 2443, 2919, 3392, 3860, 230, + 692, 1150, 1605, 2057, 2506, 2952, 3394, 3833 + ] + channel_frequency_starts_exp = [ + 5, 6, 8, 10, 12, 15, 17, 20, 23, 26, 29, 32, 36, 40, 44, 48, 53, 58, + 64, 69, 75, 82, 89, 97, 104, 113 + ] + channel_weight_starts_exp = [ + 0, 1, 3, 5, 7, 10, 12, 15, 18, 21, 24, 27, 31, 35, 39, 43, 48, 53, 59, + 64, 70, 77, 84, 92, 99, 108 + ] + channel_widths_exp = [ + 1, 2, 2, 2, 3, 2, 3, 3, 3, 3, 3, 4, 4, 4, 4, 5, 5, 6, 5, 6, 7, 7, 8, 7, + 9, 9 + ] + self.assertEqual(start_index, 5) + self.assertEqual(end_index, 122) + self.assertEqual(weights.size, 117) + self.assertAllEqual(weights, weights_exp) + self.assertAllEqual(unweights, unweights_exp) + self.assertAllEqual(channel_frequency_starts, channel_frequency_starts_exp) + self.assertAllEqual(channel_weight_starts, channel_weight_starts_exp) + self.assertAllEqual(channel_widths, channel_widths_exp) + ################################## + (start_index, end_index, weights, unweights, channel_frequency_starts, + channel_weight_starts, + channel_widths) = filter_bank_ops._init_filter_bank_weights( + 257, 16000, 1, 1, 40, 100, 7500) + weights_exp = [ + 1875, + 3288, + 702, + 2300, + 3983, + 1647, + 3481, + 1288, + 3255, + 1187, + 3273, + 1317, + 3509, + 1654, + 3941, + 2177, + 455, + 2869, + 1225, + 3714, + 2142, + 603, + 3192, + 1717, + 271, + 2949, + 1558, + 194, + 2951, + 1637, + 346, + 3174, + 1928, + 702, + 3594, + 2409, + 1243, + 96, + 3063, + 1951, + 856, + 3873, + 2810, + 1763, + 731, + 3809, + 2805, + 1815, + 839, + 3971, + 3021, + 2082, + 1156, + 241, + 3434, + 2542, + 1661, + 791, + 4027, + 3177, + 2337, + 1506, + 685, + 3970, + 3167, + 2373, + 1588, + 811, + 43, + 3378, + 2626, + 1881, + 1144, + 414, + 3788, + 3073, + 2365, + 1663, + 969, + 281, + 3696, + 3021, + 2352, + 1689, + 1033, + 382, + 3833, + 3194, + 2560, + 1932, + 1310, + 692, + 80, + 3570, + 2968, + 2371, + 1779, + 1192, + 609, + 32, + 3554, + 2986, + 2422, + 1862, + 1306, + 755, + 208, + 3761, + 3222, + 2687, + 2156, + 1629, + 1106, + 586, + 70, + 3654, + 3146, + 2641, + 2139, + 1641, + 1147, + 656, + 168, + 3779, + 3298, + 2819, + 2344, + 1872, + 1404, + 938, + 475, + 15, + 3654, + 3200, + 2749, + 2300, + 1854, + 1411, + 971, + 534, + 99, + 3762, + 3332, + 2905, + 2480, + 2058, + 1638, + 1221, + 806, + 394, + 4079, + 3671, + 3266, + 2862, + 2461, + 2062, + 1666, + 1271, + 879, + 489, + 100, + 3810, + 3426, + 3044, + 2664, + 2286, + 1910, + 1536, + 1164, + 794, + 425, + 59, + 3790, + 3428, + 3067, + 2707, + 2350, + 1994, + 1640, + 1288, + 938, + 589, + 242, + 3992, + 3649, + 3307, + 2966, + 2627, + 2290, + 1954, + 1620, + 1287, + 956, + 626, + 298, + 4067, + 3742, + 3418, + 3096, + 2775, + 2456, + 2138, + 1821, + 1506, + 1192, + 880, + 568, + 259, + 4046, + 3739, + 3433, + 3128, + 2825, + 2523, + 2222, + 1923, + 1624, + 1327, + 1031, + 737, + 443, + 151, + 3956, + 3666, + 3377, + 3090, + 2803, + 2518, + 2234, + 1951, + 1669, + 1388, + 1108, + 829, + 552, + 275, + 0, + ] + unweights_exp = [ + 2220, + 807, + 3393, + 1795, + 112, + 2448, + 614, + 2807, + 840, + 2908, + 822, + 2778, + 586, + 2441, + 154, + 1918, + 3640, + 1226, + 2870, + 381, + 1953, + 3492, + 903, + 2378, + 3824, + 1146, + 2537, + 3901, + 1144, + 2458, + 3749, + 921, + 2167, + 3393, + 501, + 1686, + 2852, + 3999, + 1032, + 2144, + 3239, + 222, + 1285, + 2332, + 3364, + 286, + 1290, + 2280, + 3256, + 124, + 1074, + 2013, + 2939, + 3854, + 661, + 1553, + 2434, + 3304, + 68, + 918, + 1758, + 2589, + 3410, + 125, + 928, + 1722, + 2507, + 3284, + 4052, + 717, + 1469, + 2214, + 2951, + 3681, + 307, + 1022, + 1730, + 2432, + 3126, + 3814, + 399, + 1074, + 1743, + 2406, + 3062, + 3713, + 262, + 901, + 1535, + 2163, + 2785, + 3403, + 4015, + 525, + 1127, + 1724, + 2316, + 2903, + 3486, + 4063, + 541, + 1109, + 1673, + 2233, + 2789, + 3340, + 3887, + 334, + 873, + 1408, + 1939, + 2466, + 2989, + 3509, + 4025, + 441, + 949, + 1454, + 1956, + 2454, + 2948, + 3439, + 3927, + 316, + 797, + 1276, + 1751, + 2223, + 2691, + 3157, + 3620, + 4080, + 441, + 895, + 1346, + 1795, + 2241, + 2684, + 3124, + 3561, + 3996, + 333, + 763, + 1190, + 1615, + 2037, + 2457, + 2874, + 3289, + 3701, + 16, + 424, + 829, + 1233, + 1634, + 2033, + 2429, + 2824, + 3216, + 3606, + 3995, + 285, + 669, + 1051, + 1431, + 1809, + 2185, + 2559, + 2931, + 3301, + 3670, + 4036, + 305, + 667, + 1028, + 1388, + 1745, + 2101, + 2455, + 2807, + 3157, + 3506, + 3853, + 103, + 446, + 788, + 1129, + 1468, + 1805, + 2141, + 2475, + 2808, + 3139, + 3469, + 3797, + 28, + 353, + 677, + 999, + 1320, + 1639, + 1957, + 2274, + 2589, + 2903, + 3215, + 3527, + 3836, + 49, + 356, + 662, + 967, + 1270, + 1572, + 1873, + 2172, + 2471, + 2768, + 3064, + 3358, + 3652, + 3944, + 139, + 429, + 718, + 1005, + 1292, + 1577, + 1861, + 2144, + 2426, + 2707, + 2987, + 3266, + 3543, + 3820, + 4096, + ] + channel_frequency_starts_exp = [ + 4, 5, 7, 8, 10, 12, 14, 16, 18, 21, 23, 26, 29, 32, 35, 38, 42, 45, 49, + 53, 58, 62, 67, 73, 78, 84, 90, 97, 104, 111, 119, 127, 136, 145, 154, + 165, 176, 187, 199, 212, 226 + ] + channel_weight_starts_exp = [ + 0, + 1, + 3, + 4, + 6, + 8, + 10, + 12, + 14, + 17, + 19, + 22, + 25, + 28, + 31, + 34, + 38, + 41, + 45, + 49, + 54, + 58, + 63, + 69, + 74, + 80, + 86, + 93, + 100, + 107, + 115, + 123, + 132, + 141, + 150, + 161, + 172, + 183, + 195, + 208, + 222, + ] + channel_widths_exp = [ + 1, + 2, + 1, + 2, + 2, + 2, + 2, + 2, + 3, + 2, + 3, + 3, + 3, + 3, + 3, + 4, + 3, + 4, + 4, + 5, + 4, + 5, + 6, + 5, + 6, + 6, + 7, + 7, + 7, + 8, + 8, + 9, + 9, + 9, + 11, + 11, + 11, + 12, + 13, + 14, + 15, + ] + self.assertEqual(start_index, 4) + self.assertEqual(end_index, 241) + self.assertEqual(weights.size, 237) + self.assertAllLessEqual(abs(weights - weights_exp), 1) + self.assertAllLessEqual(abs(unweights - unweights_exp), 1) + self.assertAllEqual(channel_frequency_starts, channel_frequency_starts_exp) + self.assertAllEqual(channel_weight_starts, channel_weight_starts_exp) + self.assertAllEqual(channel_widths, channel_widths_exp) + + def SingleFilterBankSpectralSubtractionVectorTest(self, filename): + lines = self.GetResource(filename).splitlines() + args = lines[0].split() + num_channels = int(args[0]) + smoothing = float(args[1]) + alternate_smoothing = float(args[2]) + smoothing_bits = int(args[3]) + min_signal_remaining = float(args[4]) + clamping = bool(int(args[5])) + + func = tf.function(filter_bank_ops.filter_bank_spectral_subtraction) + input_size = len(lines[1].split()) + concrete_function = func.get_concrete_function( + tf.TensorSpec(input_size, dtype=tf.uint32), num_channels, smoothing, + alternate_smoothing, smoothing_bits, min_signal_remaining, clamping) + interpreter = util.get_tflm_interpreter(concrete_function, func) + # Skip line 0, which contains the configuration params. + # Read lines in triplets + i = 1 + while i < len(lines): + in_frame = np.array([int(j) for j in lines[i].split()], dtype=np.uint32) + out_frame_exp = [int(j) for j in lines[i + 1].split()] + noise_estimate_exp = [int(j) for j in lines[i + 2].split()] + # TFLM + interpreter.set_input(in_frame, 0) + interpreter.invoke() + out_frame = interpreter.get_output(0) + noise_estimate = interpreter.get_output(1) + self.assertAllEqual(out_frame, out_frame_exp) + self.assertAllEqual(noise_estimate, noise_estimate_exp) + # TF + [out_frame, noise_estimate] = self.evaluate( + filter_bank_ops.filter_bank_spectral_subtraction( + in_frame, num_channels, smoothing, alternate_smoothing, + smoothing_bits, min_signal_remaining, clamping)) + self.assertAllEqual(out_frame, out_frame_exp) + self.assertAllEqual(noise_estimate, noise_estimate_exp) + i += 3 + + def SingleFilterBankSquareRootVectorTest(self, filename): + lines = self.GetResource(filename).splitlines() + func = tf.function(filter_bank_ops.filter_bank_square_root) + input_size = len(lines[0].split()) + concrete_function = func.get_concrete_function( + tf.TensorSpec(input_size, dtype=tf.uint64), + tf.TensorSpec([], dtype=tf.int32)) + interpreter = util.get_tflm_interpreter(concrete_function, func) + # Read lines in triplets + i = 0 + while i < len(lines): + in_frame = np.array([int(j) for j in lines[i].split()], dtype=np.uint64) + scale_bits = np.array(int(lines[i + 1]), dtype=np.int32) + out_frame_exp = [int(j) for j in lines[i + 2].split()] + # TFLM + interpreter.set_input(in_frame, 0) + interpreter.set_input(scale_bits, 1) + interpreter.invoke() + out_frame = interpreter.get_output(0) + self.assertAllEqual(out_frame, out_frame_exp) + # TF + out_frame = self.evaluate( + filter_bank_ops.filter_bank_square_root(in_frame, scale_bits)) + self.assertAllEqual(out_frame, out_frame_exp) + i += 3 + + def SingleFilterBankVectorTest(self, filename): + lines = self.GetResource(filename).splitlines() + args = lines[0].split() + sample_rate = int(args[0]) + num_channels = int(args[1]) + lower_band_limit = float(args[2]) + upper_band_limit = float(args[3]) + func = tf.function(filter_bank_ops.filter_bank) + input_size = len(lines[1].split()) + concrete_function = func.get_concrete_function( + tf.TensorSpec(input_size, dtype=tf.uint32), sample_rate, num_channels, + lower_band_limit, upper_band_limit) + interpreter = util.get_tflm_interpreter(concrete_function, func) + # Skip line 0, which contains the configuration params. + # Read lines in pairs + i = 1 + while i < len(lines): + in_frame = np.array([int(j) for j in lines[i].split()], dtype=np.uint32) + out_frame_exp = [int(j) for j in lines[i + 1].split()] + # TFLM + interpreter.set_input(in_frame, 0) + interpreter.invoke() + out_frame = interpreter.get_output(0) + self.assertAllEqual(out_frame_exp, out_frame) + # TF + out_frame = self.evaluate( + filter_bank_ops.filter_bank(in_frame, sample_rate, num_channels, + lower_band_limit, upper_band_limit)) + self.assertAllEqual(out_frame_exp, out_frame) + i += 2 + + def SingleFilterBankTest(self, filename): + lines = self.GetResource(filename).splitlines() + args = lines[0].split() + input_tensor = np.arange(int(args[0]), dtype=np.uint32) + output_exp = [int(i) for i in lines[1:]] + output_tensor = self.evaluate( + filter_bank_ops.filter_bank(input_tensor, int(args[1]), int(args[4]), + float(args[2]), float(args[3]))) + self.assertAllLessEqual(abs(output_exp - output_tensor), 144) + + def testFilterBank(self): + self.SingleFilterBankTest('testdata/filter_bank_accumulation_8k.txt') + self.SingleFilterBankTest('testdata/filter_bank_accumulation_16k.txt') + self.SingleFilterBankTest('testdata/filter_bank_accumulation_44k.txt') + self.SingleFilterBankVectorTest('testdata/filter_bank_test1.txt') + + def testFilterBankSpectralSubtractionVector(self): + self.SingleFilterBankSpectralSubtractionVectorTest( + 'testdata/filter_bank_spectral_subtraction_test1.txt') + + def testFilterBankSquareRootVector(self): + self.SingleFilterBankSquareRootVectorTest( + 'testdata/filter_bank_square_root_test1.txt') + + def testFilterBankSquareRoot(self): + fft_scale_bits = 7 + input_array = [ + 632803382, 3322331443, 7096652410, 7915374281, 1173754459, 305980674, + 2000536077, 1168558488, 5076475823, 15976754090, 3805664731, 613998164, + 1697378269, 2775934843, 3579468406, 2317762617, 2025182819, 3166301049, + 1937595023, 1774351019, 2085308695, 3187965791, 2871034131, 4396421345, + 8203017514, 4506083115, 3159809690, 750384531, 243621165, 61552427, + 794881, 285365, 324568, 209218, 212215, 311565, 183541, 223754, 201098, + 385031 + ] + output_exp = [ + 196, 450, 658, 695, 267, 136, 349, 267, 556, 987, 481, 193, 321, 411, + 467, 376, 351, 439, 343, 329, 356, 441, 418, 518, 707, 524, 439, 214, + 121, 61, 6, 4, 4, 3, 3, 4, 3, 3, 3, 4 + ] + output_array = self.evaluate( + filter_bank_ops.filter_bank_square_root(input_array, fft_scale_bits)) + self.assertAllEqual(output_array, output_exp) + + fft_scale_bits = 2 + input_array = [ + 1384809583, 3253852150, 7271882261, 4247132793, 165951197, 106924444, + 334793989, 1186792065, 683710887, 328783218, 1777824058, 859450346, + 384515125, 118491239, 29264336, 324188526, 1925807083, 2591551091, + 1170412774, 393317159, 1003847215, 1375415668, 1272433002, 5102945913, + 5527301760, 3564304855, 4171837220, 4252817101, 2886468276, 1293586339, + 867722874, 137636997 + ] + output_exp = [ + 9303, 14260, 21318, 16292, 3220, 2585, 4574, 8612, 6537, 4533, 10541, + 7329, 4902, 2721, 1352, 4501, 10971, 12726, 8552, 4958, 7921, 9271, + 8917, 17858, 18586, 14925, 16147, 16303, 13431, 8991, 7364, 2933 + ] + output_array = self.evaluate( + filter_bank_ops.filter_bank_square_root(input_array, fft_scale_bits)) + self.assertAllEqual(output_array, output_exp) + + def testFilterBankLog(self): + output_scale = 1600 + correction_bits = 3 + input_array = [ + 29, 21, 29, 40, 19, 11, 13, 23, 13, 11, 25, 17, 5, 4, 46, 14, 17, 14, + 20, 14, 10, 10, 15, 11, 17, 12, 15, 16, 19, 18, 6, 2 + ] + output_exp = [ + 8715, 8198, 8715, 9229, 8038, 7164, 7431, 8344, 7431, 7164, 8477, 7860, + 5902, 5545, 9453, 7550, 7860, 7550, 8120, 7550, 7011, 7011, 7660, 7164, + 7860, 7303, 7660, 7763, 8038, 7952, 6194, 4436 + ] + output_array = self.evaluate( + filter_bank_ops.filter_bank_log(input_array, output_scale, + correction_bits)) + self.assertAllEqual(output_array, output_exp) + + +if __name__ == '__main__': + tf.test.main() diff --git a/python/tflite_micro/signal/ops/framer_op.py b/python/tflite_micro/signal/ops/framer_op.py new file mode 100644 index 00000000000..f3a471df035 --- /dev/null +++ b/python/tflite_micro/signal/ops/framer_op.py @@ -0,0 +1,52 @@ +# Copyright 2023 The TensorFlow Authors. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# ============================================================================== +"""Use framer op in python.""" + +import tensorflow as tf +from tflite_micro.python.tflite_micro.signal.utils import util + +gen_framer_op = util.load_custom_op('framer_op.so') + + +def _framer_wrapper(framer_fn, default_name): + """Wrapper around gen_framer_op.framer*.""" + + def _framer(input_tensor, + frame_size, + frame_step, + prefill=False, + name=default_name): + if frame_step > frame_size: + raise ValueError("frame_step must not be greater than frame_size.") + with tf.name_scope(name) as name: + input_tensor = tf.convert_to_tensor(input_tensor, dtype=tf.int16) + dim_list = input_tensor.shape.as_list() + if dim_list[-1] % frame_step != 0: + raise ValueError( + "Innermost input dimenion size must be a multiple of %d elements" % + frame_step) + return framer_fn(input_tensor, + frame_size=frame_size, + frame_step=frame_step, + prefill=prefill, + name=name) + + return _framer + + +# TODO(b/286250473): change back name after name clash resolved +framer = _framer_wrapper(gen_framer_op.signal_framer, "signal_framer") + +tf.no_gradient("signal_framer") diff --git a/python/tflite_micro/signal/ops/framer_op_test.py b/python/tflite_micro/signal/ops/framer_op_test.py new file mode 100644 index 00000000000..8f04ed1304e --- /dev/null +++ b/python/tflite_micro/signal/ops/framer_op_test.py @@ -0,0 +1,142 @@ +# Copyright 2023 The TensorFlow Authors. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# ============================================================================== +"""Tests for framer op.""" +import os + +import numpy as np +import tensorflow as tf + +from tensorflow.python.platform import resource_loader +from tflite_micro.python.tflite_micro.signal.ops import framer_op +from tflite_micro.python.tflite_micro.signal.utils import util + + +class FramerOpTest(tf.test.TestCase): + + _PREFIX_PATH = resource_loader.get_path_to_datafile('') + + def GetResource(self, filepath): + full_path = os.path.join(self._PREFIX_PATH, filepath) + with open(full_path, 'rt') as f: + file_text = f.read() + return file_text + + def SingleFramerTest(self, filename): + lines = self.GetResource(filename).splitlines() + args = lines[0].split() + frame_size = int(args[0]) + frame_step = int(args[1]) + prefill = bool(int(args[2])) + func = tf.function(framer_op.framer) + input_size = len(lines[1].split()) + concrete_function = func.get_concrete_function( + tf.TensorSpec(input_size, dtype=tf.int16), frame_size, frame_step, + prefill) + interpreter = util.get_tflm_interpreter(concrete_function, func) + # Skip line 0, which contains the configuration params. + # Read lines in triplets + i = 1 + while i < len(lines): + in_block = np.array([int(j) for j in lines[i].split()], dtype=np.int16) + out_frame_exp = [[int(j) for j in lines[i + 1].split()]] + out_valid_exp = [int(j) for j in lines[i + 2].split()] + # TFLM + interpreter.set_input(in_block, 0) + interpreter.invoke() + out_frame = interpreter.get_output(0) + out_valid = interpreter.get_output(1) + self.assertEqual(out_valid, out_valid_exp) + if out_valid: + self.assertAllEqual(out_frame, out_frame_exp) + # TF + out_frame, out_valid = self.evaluate( + framer_op.framer(in_block, frame_size, frame_step, prefill)) + self.assertEqual(out_valid, out_valid_exp) + if out_valid: + self.assertAllEqual(out_frame, out_frame_exp) + i += 3 + + def MultiFrameRandomInputFramerTest(self, n_frames): + # Terminonlogy: input is in blocks, output is in frames + frame_step = 160 + frame_size = 400 + prefill = True + block_num = 10 + block_size = frame_step * n_frames + + test_input = np.random.randint(np.iinfo('int16').min, + np.iinfo('int16').max, + block_size * block_num, + dtype=np.int16) + expected_output = np.concatenate((np.zeros(frame_size - frame_step, + dtype=np.int16), test_input)) + func = tf.function(framer_op.framer) + concrete_function = func.get_concrete_function( + tf.TensorSpec(block_size, dtype=tf.int16), frame_size, frame_step, + prefill) + interpreter = util.get_tflm_interpreter(concrete_function, func) + block_index = 0 + frame_index = 0 + while block_index < block_num: + in_block = test_input[(block_index * block_size):((block_index + 1) * + block_size)] + expected_valid = 1 + expected_frame = [ + expected_output[((frame_index + i) * + frame_step):((frame_index + i) * frame_step + + frame_size)] for i in range(n_frames) + ] + # TFLM + interpreter.set_input(in_block, 0) + interpreter.invoke() + out_frame = interpreter.get_output(0) + out_valid = interpreter.get_output(1) + self.assertEqual(out_valid, expected_valid) + if out_valid: + self.assertAllEqual(out_frame, expected_frame) + # TF + out_frame, out_valid = self.evaluate( + framer_op.framer(in_block, frame_size, frame_step, prefill)) + frame_index += n_frames + self.assertEqual(out_valid, expected_valid) + self.assertAllEqual(out_frame, expected_frame) + block_index += 1 + + def testFramerVectors(self): + self.SingleFramerTest('testdata/framer_test1.txt') + + def testFramerRandomInput(self): + self.MultiFrameRandomInputFramerTest(1) + + def testFramerRandomInputNframes2(self): + self.MultiFrameRandomInputFramerTest(2) + + def testFramerRandomInputNframes4(self): + self.MultiFrameRandomInputFramerTest(4) + + def testStepSizeTooLarge(self): + framer_input = np.zeros(160, dtype=np.int16) + with self.assertRaises((tf.errors.InvalidArgumentError, ValueError)): + self.evaluate(framer_op.framer(framer_input, 128, 129)) + + def testStepSizeNotEqualInputSize(self): + framer_input = np.zeros(122, dtype=np.int16) + with self.assertRaises((tf.errors.InvalidArgumentError, ValueError)): + self.evaluate(framer_op.framer(framer_input, 321, 123)) + + +if __name__ == '__main__': + np.random.seed(0) + tf.test.main() diff --git a/python/tflite_micro/signal/ops/overlap_add_op.py b/python/tflite_micro/signal/ops/overlap_add_op.py new file mode 100644 index 00000000000..d6e70809c2f --- /dev/null +++ b/python/tflite_micro/signal/ops/overlap_add_op.py @@ -0,0 +1,42 @@ +# Copyright 2023 The TensorFlow Authors. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# ============================================================================== +"""Use overlap add op in python.""" + +import tensorflow as tf +from tflite_micro.python.tflite_micro.signal.utils import util + +gen_overlap_add_op = util.load_custom_op('overlap_add_op.so') + + +def _overlap_add_wrapper(overlap_add_fn, default_name): + """Wrapper around gen_overlap_add_op.overlap_add*.""" + + def _overlap_add(input_tensor, frame_step, name=default_name): + with tf.name_scope(name) as name: + input_tensor = tf.convert_to_tensor(input_tensor) + dim_list = input_tensor.shape.as_list() + if frame_step > dim_list[-1]: + raise ValueError( + "Frame_step must not exceed innermost input dimension") + return overlap_add_fn(input_tensor, frame_step=frame_step, name=name) + + return _overlap_add + + +# TODO(b/286250473): change back name after name clash resolved +overlap_add = _overlap_add_wrapper(gen_overlap_add_op.signal_overlap_add, + "signal_overlap_add") + +tf.no_gradient("signal_overlap_add") diff --git a/python/tflite_micro/signal/ops/overlap_add_op_test.py b/python/tflite_micro/signal/ops/overlap_add_op_test.py new file mode 100644 index 00000000000..ffd2ae35705 --- /dev/null +++ b/python/tflite_micro/signal/ops/overlap_add_op_test.py @@ -0,0 +1,232 @@ +# Copyright 2023 The TensorFlow Authors. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# ============================================================================== +"""Tests for overlap add op.""" + +from absl.testing import parameterized +import numpy as np +import tensorflow as tf + +from tflite_micro.python.tflite_micro.signal.ops import overlap_add_op +from tflite_micro.python.tflite_micro.signal.utils import util + + +class OverlapAddOpTest(parameterized.TestCase, tf.test.TestCase): + + def RunOverlapAdd(self, interpreter, input_frames, frame_step, + expected_output_frames, dtype): + input_frames = tf.convert_to_tensor(input_frames, dtype=dtype) + # TFLM + interpreter.set_input(input_frames, 0) + interpreter.invoke() + output_frame = interpreter.get_output(0) + self.assertAllEqual(output_frame, expected_output_frames) + + # TF + output_frame = self.evaluate( + overlap_add_op.overlap_add(input_frames, frame_step)) + self.assertAllEqual(output_frame, expected_output_frames) + + @parameterized.named_parameters(('_FLOAT32InputOutput', tf.float32), + ('_INT16InputOutput', tf.int16)) + def testOverlapAddValidInput(self, dtype): + input_frames = np.array([[1, -5, 4, 2, 7], [4, 15, -44, 27, -16], + [66, -19, 79, 8, -12], [-122, 17, 65, 18, -101], + [3, 33, -66, -19, 55]]) + expected_output_frames_step_1 = np.array([[1], [-1], [85], [-183], [133]]) + expected_output_frames_step_2 = np.array([[1, -5], [8, 17], [29, 8], + [-59, 25], [56, 51]]) + expected_output_frames_step_3 = np.array([[1, -5, 4], [6, 22, -44], + [93, -35, 79], [-114, 5, 65], + [21, -68, -66]]) + expected_output_frames_step_4 = np.array([[1, -5, 4, 2], [11, 15, -44, 27], + [50, -19, 79, 8], + [-134, 17, 65, 18], + [-98, 33, -66, -19]]) + expected_output_frames_step_5 = np.array([[1, -5, 4, 2, 7], + [4, 15, -44, 27, -16], + [66, -19, 79, 8, -12], + [-122, 17, 65, 18, -101], + [3, 33, -66, -19, 55]]) + func = tf.function(overlap_add_op.overlap_add) + # Initialize an interpreter for each step size + # TODO(b/263020764): use a parameterized test instead + interpreters = [None] * 6 + for i in range(5): + interpreters[i] = util.get_tflm_interpreter( + func.get_concrete_function( + tf.TensorSpec(np.shape([input_frames[0]]), dtype=dtype), i + 1), + func) + + frame_num = input_frames.shape[0] + frame_index = 0 + while frame_index < frame_num: + self.RunOverlapAdd(interpreters[0], [input_frames[frame_index]], + 1, + expected_output_frames_step_1[frame_index], + dtype=dtype) + self.RunOverlapAdd(interpreters[1], [input_frames[frame_index]], + 2, + expected_output_frames_step_2[frame_index], + dtype=dtype) + self.RunOverlapAdd(interpreters[2], [input_frames[frame_index]], + 3, + expected_output_frames_step_3[frame_index], + dtype=dtype) + self.RunOverlapAdd(interpreters[3], [input_frames[frame_index]], + 4, + expected_output_frames_step_4[frame_index], + dtype=dtype) + self.RunOverlapAdd(interpreters[4], [input_frames[frame_index]], + 5, + expected_output_frames_step_5[frame_index], + dtype=dtype) + frame_index += 1 + + @parameterized.named_parameters(('_FLOAT32InputOutput', tf.float32), + ('_INT16InputOutput', tf.int16)) + def testOverlapAddNframes5(self, dtype): + input_frames = np.array([[1, -5, 4, 2, 7], [4, 15, -44, 27, -16], + [66, -19, 79, 8, -12], [-122, 17, 65, 18, -101], + [3, 33, -66, -19, 55]]) + expected_output_frames_step_1 = np.array([1, -1, 85, -183, 133]) + expected_output_frames_step_2 = np.array( + [1, -5, 8, 17, 29, 8, -59, 25, 56, 51]) + expected_output_frames_step_3 = np.array( + [1, -5, 4, 6, 22, -44, 93, -35, 79, -114, 5, 65, 21, -68, -66]) + expected_output_frames_step_4 = np.array([ + 1, -5, 4, 2, 11, 15, -44, 27, 50, -19, 79, 8, -134, 17, 65, 18, -98, + 33, -66, -19 + ]) + expected_output_frames_step_5 = np.array([ + 1, -5, 4, 2, 7, 4, 15, -44, 27, -16, 66, -19, 79, 8, -12, -122, 17, 65, + 18, -101, 3, 33, -66, -19, 55 + ]) + func = tf.function(overlap_add_op.overlap_add) + # Initialize an interpreter for each step size + # TODO(b/263020764): use a parameterized test instead + interpreters = [None] * 6 + for i in range(5): + interpreters[i] = util.get_tflm_interpreter( + func.get_concrete_function( + tf.TensorSpec(np.shape(input_frames), dtype=dtype), i + 1), func) + self.RunOverlapAdd(interpreters[0], + input_frames, + 1, + expected_output_frames_step_1, + dtype=dtype) + self.RunOverlapAdd(interpreters[1], + input_frames, + 2, + expected_output_frames_step_2, + dtype=dtype) + self.RunOverlapAdd(interpreters[2], + input_frames, + 3, + expected_output_frames_step_3, + dtype=dtype) + self.RunOverlapAdd(interpreters[3], + input_frames, + 4, + expected_output_frames_step_4, + dtype=dtype) + self.RunOverlapAdd(interpreters[4], + input_frames, + 5, + expected_output_frames_step_5, + dtype=dtype) + + @parameterized.named_parameters(('_FLOAT32InputOutput', tf.float32), + ('_INT16InputOutput', tf.int16)) + def testOverlapAddNframes5Channels2(self, dtype): + input_frames = np.array([[[1, -5, 4, 2, 7], [4, 15, -44, 27, -16], + [66, -19, 79, 8, -12], [-122, 17, 65, 18, -101], + [3, 33, -66, -19, 55]], + [[1, -5, 4, 2, 7], [4, 15, -44, 27, -16], + [66, -19, 79, 8, -12], [-122, 17, 65, 18, -101], + [3, 33, -66, -19, 55]]]) + expected_output_frames_step_1 = np.array([[1, -1, 85, -183, 133], + [1, -1, 85, -183, 133]]) + expected_output_frames_step_2 = np.array( + [[1, -5, 8, 17, 29, 8, -59, 25, 56, 51], + [1, -5, 8, 17, 29, 8, -59, 25, 56, 51]]) + expected_output_frames_step_3 = np.array( + [[1, -5, 4, 6, 22, -44, 93, -35, 79, -114, 5, 65, 21, -68, -66], + [1, -5, 4, 6, 22, -44, 93, -35, 79, -114, 5, 65, 21, -68, -66]]) + expected_output_frames_step_4 = np.array([[ + 1, -5, 4, 2, 11, 15, -44, 27, 50, -19, 79, 8, -134, 17, 65, 18, -98, + 33, -66, -19 + ], + [ + 1, -5, 4, 2, 11, 15, -44, 27, + 50, -19, 79, 8, -134, 17, 65, + 18, -98, 33, -66, -19 + ]]) + expected_output_frames_step_5 = np.array([[ + 1, -5, 4, 2, 7, 4, 15, -44, 27, -16, 66, -19, 79, 8, -12, -122, 17, 65, + 18, -101, 3, 33, -66, -19, 55 + ], + [ + 1, -5, 4, 2, 7, 4, 15, -44, + 27, -16, 66, -19, 79, 8, -12, + -122, 17, 65, 18, -101, 3, + 33, -66, -19, 55 + ]]) + func = tf.function(overlap_add_op.overlap_add) + # Initialize an interpreter for each step size + # TODO(b/263020764): use a parameterized test instead + interpreters = [None] * 6 + for i in range(5): + interpreters[i] = util.get_tflm_interpreter( + func.get_concrete_function( + tf.TensorSpec(np.shape(input_frames), dtype=dtype), i + 1), func) + self.RunOverlapAdd(interpreters[0], + input_frames, + 1, + expected_output_frames_step_1, + dtype=dtype) + self.RunOverlapAdd(interpreters[1], + input_frames, + 2, + expected_output_frames_step_2, + dtype=dtype) + self.RunOverlapAdd(interpreters[2], + input_frames, + 3, + expected_output_frames_step_3, + dtype=dtype) + self.RunOverlapAdd(interpreters[3], + input_frames, + 4, + expected_output_frames_step_4, + dtype=dtype) + self.RunOverlapAdd(interpreters[4], + input_frames, + 5, + expected_output_frames_step_5, + dtype=dtype) + + def testStepSizeTooLarge(self): + ovlerap_add_input = np.zeros(160, dtype=np.int16) + with self.assertRaises((tf.errors.InvalidArgumentError, ValueError)): + self.evaluate(overlap_add_op.overlap_add(ovlerap_add_input, 128, 129)) + + def testStepSizeNotEqualOutputSize(self): + ovlerap_add_input = np.zeros(122, dtype=np.int16) + with self.assertRaises((tf.errors.InvalidArgumentError, ValueError)): + self.evaluate(overlap_add_op.overlap_add(ovlerap_add_input, 321, 123)) + + +if __name__ == '__main__': + tf.test.main() diff --git a/python/tflite_micro/signal/ops/pcan_op.py b/python/tflite_micro/signal/ops/pcan_op.py new file mode 100644 index 00000000000..753e76b3cbf --- /dev/null +++ b/python/tflite_micro/signal/ops/pcan_op.py @@ -0,0 +1,70 @@ +# Copyright 2023 The TensorFlow Authors. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# ============================================================================== + +import tensorflow as tf +from tflite_micro.python.tflite_micro.signal.utils import util +from tflite_micro.python.tflite_micro.signal.utils import wide_dynamic_func_lut_wrapper + +gen_pcan_op = util.load_custom_op("pcan_op.so") + +PCAN_SNR_BITS = 12 + + +def _pcan_wrapper(pcan_fn, default_name): + """Wrapper around gen_pcan.pcan*.""" + + def _pcan(input_tensor, + noise_estimate, + strength, + offset, + gain_bits, + smoothing_bits, + input_correction_bits, + name=default_name): + with tf.name_scope(name) as scope: + input_tensor = tf.convert_to_tensor(input_tensor, dtype=tf.uint32) + noise_estimate = tf.convert_to_tensor(noise_estimate, dtype=tf.uint32) + + input_bits = smoothing_bits - input_correction_bits + snr_shift = gain_bits - input_correction_bits - PCAN_SNR_BITS + if snr_shift < 1: + raise ValueError("SNR shift must be non-negative: %d" % snr_shift) + + lut = wide_dynamic_func_lut_wrapper.wide_dynamic_func_lut( + strength, offset, input_bits, gain_bits) + + lut_tensor = tf.convert_to_tensor(lut, dtype=tf.int16) + + dim_list = input_tensor.shape.as_list() + if len(dim_list) != 1: + raise ValueError("Input tensor must have a rank of 1") + dim_list = noise_estimate.shape.as_list() + if len(dim_list) != 1: + raise ValueError("Noise estimate must have a rank of 1") + + snr_shift = 6 + return pcan_fn(input_tensor, + noise_estimate, + lut_tensor, + snr_shift=snr_shift, + name=scope) + + return _pcan + + +# TODO(b/286250473): change back name after name clash resolved +pcan = _pcan_wrapper(gen_pcan_op.signal_pcan, "signal_pcan") + +tf.no_gradient("pcan") diff --git a/python/tflite_micro/signal/ops/pcan_op_test.py b/python/tflite_micro/signal/ops/pcan_op_test.py new file mode 100644 index 00000000000..1400bf3407b --- /dev/null +++ b/python/tflite_micro/signal/ops/pcan_op_test.py @@ -0,0 +1,83 @@ +# Copyright 2023 The TensorFlow Authors. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# ============================================================================== + +import os + +import numpy as np +import tensorflow as tf + +from tensorflow.python.platform import resource_loader +from tflite_micro.python.tflite_micro.signal.ops import pcan_op +from tflite_micro.python.tflite_micro.signal.utils import util + + +class PcanOpTest(tf.test.TestCase): + + _PREFIX_PATH = resource_loader.get_path_to_datafile('') + + def GetResource(self, filepath): + full_path = os.path.join(self._PREFIX_PATH, filepath) + with open(full_path, 'rt') as f: + file_text = f.read() + return file_text + + def SinglePcanOpTest(self, filename): + lines = self.GetResource(filename).splitlines() + args = lines[0].split() + strength = float(args[0]) + offset = float(args[1]) + gain_bits = int(args[2]) + smoothing_bits = int(args[3]) + input_correction_bits = int(args[4]) + + func = tf.function(pcan_op.pcan) + channel_num = len(lines[1].split()) + + concrete_function = func.get_concrete_function( + tf.TensorSpec(channel_num, dtype=tf.uint32), + tf.TensorSpec(channel_num, dtype=tf.uint32), + strength=strength, + offset=offset, + gain_bits=gain_bits, + smoothing_bits=smoothing_bits, + input_correction_bits=input_correction_bits) + interpreter = util.get_tflm_interpreter(concrete_function, func) + + # Read lines in pairs + for i in range(1, len(lines), 3): + in_frame = np.array([int(j) for j in lines[i + 0].split()], + dtype='uint32') + noise_estimate = np.array([int(j) for j in lines[i + 1].split()], + dtype='uint32') + output_expected = np.array([int(j) for j in lines[i + 2].split()], + dtype='uint32') + # TFLM + interpreter.set_input(in_frame, 0) + interpreter.set_input(noise_estimate, 1) + interpreter.invoke() + output = interpreter.get_output(0) + self.assertAllEqual(output_expected, output) + # TF + output = self.evaluate( + pcan_op.pcan(in_frame, noise_estimate, strength, offset, gain_bits, + smoothing_bits, input_correction_bits)) + self.assertAllEqual(output_expected, output) + + def testPcanOp(self): + self.SinglePcanOpTest('testdata/pcan_op_test1.txt') + + +if __name__ == '__main__': + tf.test.main() diff --git a/python/tflite_micro/signal/ops/stacker_op.py b/python/tflite_micro/signal/ops/stacker_op.py new file mode 100644 index 00000000000..3f87fc79736 --- /dev/null +++ b/python/tflite_micro/signal/ops/stacker_op.py @@ -0,0 +1,51 @@ +# Copyright 2023 The TensorFlow Authors. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# ============================================================================== +"""Use stacker op in python.""" + +import tensorflow as tf +from tflite_micro.python.tflite_micro.signal.utils import util + +gen_stacker_op = util.load_custom_op('stacker_op.so') + + +def _stacker_wrapper(stacker_fn, default_name): + """Wrapper around gen_stacker_op.stacker*.""" + + def _stacker(input_tensor, + num_channels, + stacker_left_context, + stacker_right_context, + stacker_step, + name=default_name): + with tf.name_scope(name) as name: + input_tensor = tf.convert_to_tensor(input_tensor, dtype=tf.int16) + dim_list = input_tensor.shape.as_list() + if len(dim_list) != 1: + raise ValueError("Input tensor must have a rank of 1") + + return stacker_fn(input_tensor, + num_channels=num_channels, + stacker_left_context=stacker_left_context, + stacker_right_context=stacker_right_context, + stacker_step=stacker_step, + name=name) + + return _stacker + + +# TODO(b/286250473): change back name after name clash resolved +stacker = _stacker_wrapper(gen_stacker_op.signal_stacker, "signal_stacker") + +tf.no_gradient("signal_stacker") diff --git a/python/tflite_micro/signal/ops/stacker_op_test.py b/python/tflite_micro/signal/ops/stacker_op_test.py new file mode 100644 index 00000000000..5943952b424 --- /dev/null +++ b/python/tflite_micro/signal/ops/stacker_op_test.py @@ -0,0 +1,80 @@ +# Copyright 2023 The TensorFlow Authors. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# ============================================================================== +"""Tests for stacker ops.""" +import os + +import numpy as np +import tensorflow as tf + +from tensorflow.python.platform import resource_loader +from tflite_micro.python.tflite_micro.signal.ops import stacker_op +from tflite_micro.python.tflite_micro.signal.utils import util + + +class StackerOpTest(tf.test.TestCase): + + _PREFIX_PATH = resource_loader.get_path_to_datafile('') + + def GetResource(self, filepath): + full_path = os.path.join(self._PREFIX_PATH, filepath) + with open(full_path, 'rt') as f: + file_text = f.read() + return file_text + + def SingleStackerTest(self, filename): + lines = self.GetResource(filename).splitlines() + args = lines[0].split() + num_channels = int(args[0]) + stacker_left_context = int(args[1]) + stacker_right_context = int(args[2]) + stacker_step = int(args[3]) + func = tf.function(stacker_op.stacker) + input_size = len(lines[1].split()) + concrete_function = func.get_concrete_function( + tf.TensorSpec(input_size, dtype=tf.int16), num_channels, + stacker_left_context, stacker_right_context, stacker_step) + interpreter = util.get_tflm_interpreter(concrete_function, func) + # Skip line 0, which contains the configuration params. + # Read lines in triplets + i = 1 + while i < len(lines): + input_array = np.array([int(j) for j in lines[i].split()], + dtype=np.int16) + output_array_exp = [int(j) for j in lines[i + 1].split()] + output_valid_exp = [int(j) for j in lines[i + 2].split()] + # TFLM + interpreter.set_input(input_array, 0) + interpreter.invoke() + out_frame = interpreter.get_output(0) + out_valid = interpreter.get_output(1) + self.assertEqual(out_valid, output_valid_exp) + if out_valid: + self.assertAllEqual(out_frame, output_array_exp) + # TF + [out_frame, out_valid] = self.evaluate( + stacker_op.stacker(input_array, num_channels, stacker_left_context, + stacker_right_context, stacker_step)) + self.assertEqual(out_valid, output_valid_exp) + if out_valid: + self.assertAllEqual(out_frame, output_array_exp) + i += 3 + + def testStacker(self): + self.SingleStackerTest('testdata/stacker_test1.txt') + + +if __name__ == '__main__': + np.random.seed(0) + tf.test.main() diff --git a/python/tflite_micro/signal/ops/testdata/BUILD b/python/tflite_micro/signal/ops/testdata/BUILD index 375ef31c9f7..4c8d7bcc0bf 100644 --- a/python/tflite_micro/signal/ops/testdata/BUILD +++ b/python/tflite_micro/signal/ops/testdata/BUILD @@ -7,6 +7,17 @@ package( ) exports_files([ + "energy_test1.txt", + "fft_auto_scale_test1.txt", + "filter_bank_accumulation_8k.txt", + "filter_bank_accumulation_16k.txt", + "filter_bank_accumulation_44k.txt", + "filter_bank_test1.txt", + "filter_bank_square_root_test1.txt", + "filter_bank_spectral_subtraction_test1.txt", + "framer_test1.txt", + "pcan_op_test1.txt", "rfft_test1.txt", + "stacker_test1.txt", "window_test1.txt", ]) diff --git a/python/tflite_micro/signal/ops/testdata/energy_test1.txt b/python/tflite_micro/signal/ops/testdata/energy_test1.txt new file mode 100644 index 00000000000..e1a972c0d2b --- /dev/null +++ b/python/tflite_micro/signal/ops/testdata/energy_test1.txt @@ -0,0 +1,367 @@ +5 122 +-65 0 5 4 -86 148 379 -160 -630 -280 337 1003 324 -1169 -646 680 626 -208 -683 27 663 159 -235 -401 -402 421 617 62 52 -679 -1095 567 1255 284 -387 -687 -263 275 163 80 42 -9 -53 -72 34 107 78 -185 -219 106 101 70 128 -50 -130 -48 -10 -9 51 129 -1 -149 -55 98 94 -17 -47 -83 -100 97 158 20 -33 -54 -41 -62 -69 110 166 -21 -121 33 23 -142 196 182 -463 -240 426 264 -177 22 205 -364 -358 257 214 17 -7 -30 -8 -12 35 -63 -156 59 157 69 -17 -129 -108 103 138 -63 -68 74 39 -120 -82 51 31 79 78 -39 -38 -77 -36 59 2 -6 45 3 -66 35 140 -68 -180 7 115 32 -56 -3 38 -5 -6 -1 -20 -26 27 51 -42 -67 54 86 -36 -100 -11 57 3 37 72 -59 -77 -21 -13 24 15 67 52 -73 -12 30 -37 -84 -29 85 32 48 64 -97 -65 45 12 -39 -26 14 -11 82 131 -71 -143 -65 0 127 122 -30 -109 -64 78 78 -81 -107 39 202 88 -232 -136 142 78 -104 -72 132 115 -68 -28 -85 -92 189 92 -337 -42 578 11 -728 140 775 -249 -885 212 906 -147 -833 107 687 -89 -569 11 424 28 -238 -12 78 -38 5 40 -28 -26 35 20 -26 -16 4 1 5 5 0 0 1 -1 -4 3 -2 -6 2 3 -1 2 -3 -8 2 8 0 -4 -7 -3 3 -2 3 -2 -3 3 4 -3 -6 -3 5 -1 -2 -1 4 2 -2 -4 0 -3 -2 0 1 -5 0 -1 -1 0 4 -1 2 0 0 -4 -1 1 5 3 0 -5 0 3 -2 -1 0 -2 3 2 1 0 -1 0 0 2 -1 -1 -1 -3 -3 -3 4 3 -1 -6 0 3 3 -5 0 0 6 1 -2 2 4 1 -4 -1 -2 -4 -1 3 3 -3 1 2 0 -3 -1 2 2 0 3 1 -4 1 1 -3 -4 6 7 -3 -2 2 -4 5 -1 -1 -2 -2 -4 -2 -4 -1 5 0 1 0 -3 1 0 0 -3 -5 2 2 -1 1 -2 -2 0 -2 0 -1 -2 -1 3 3 -1 -3 1 2 -2 -2 1 -1 0 -2 -1 2 1 -2 -1 1 3 2 -1 -3 1 0 -4 2 1 -5 2 2 1 4 0 -5 2 2 -1 0 3 4 -1 2 -2 0 -3 -2 2 2 2 2 -2 -3 -3 3 4 0 -4 -4 -1 1 -3 -1 -1 -4 0 -1 1 2 1 -2 1 2 -1 -3 3 3 0 -1 -3 4 4 -3 -7 -4 3 3 2 2 -2 -2 -2 -1 -1 0 0 0 -3 -4 2 1 0 1 0 +65471 262149 9764778 4284481915 4276682122 1119578 1471537 879716 435140 467218 464850 216026 338845 384533 463745 1520514 1655681 621738 144794 32969 1845 7993 12605 40309 59197 15101 18884 19204 181 19242 22202 12629 9125 9098 19409 25364 4005 5525 16861 27997 15730 20693 71540 271969 251172 31813 174521 194213 46085 949 208 5194 27817 29410 16930 22273 23013 10100 15921 9325 7202 7605 7373 4777 40 2034 5581 24224 32449 14249 3145 1469 37 1076 3330 6253 10312 11296 3370 1378 8665 6370 745 4714 8033 1044 8425 8066 3328 13505 6250 1665 872 6845 22202 24674 16129 15784 15977 12168 18010 42325 61568 38660 16900 22608 17849 8009 44185 122033 335848 530105 620225 845226 865780 715498 483418 331682 179897 57428 6228 1469 4293132328 2359270 4293263380 327664 327681 5 65536 4294770687 4294836227 196602 4294901763 4294770690 196600 8 4294574076 262141 262142 4294836222 262147 4294639613 393213 4294901759 327679 4294836226 65532 4294901757 65536 65531 4294967295 262144 196607 0 4294967292 327681 3 65531 4294836227 65535 262142 65538 4294901760 0 4294901762 4294967295 4294836221 327677 4294901763 65530 196611 65531 393216 4294836225 262146 4294705153 4294901759 4294967292 196611 131069 2 4294967293 131074 196608 4294705153 65537 4294770685 458758 4294901757 4294705154 4294901765 4294901759 4294770686 4294770686 393215 65536 4294770688 1 4294770688 196603 4294901762 4294836225 65534 65534 4294901759 262143 4294901763 131069 4294836226 131070 65535 4294967294 65538 4294967294 196609 4294901762 131069 4294705152 65538 196603 65538 4 196603 4294901762 196608 4294901764 4294836226 4294770688 196606 131074 4294836226 4294836221 262147 4294705152 4294967292 4294770689 4294967295 65532 131071 65538 131070 4294901762 262141 3 4294836223 262148 4294574077 262140 131075 4294836226 4294901758 4294967295 0 4294770688 196604 1 1 +-21 0 -44 232 -110 -560 590 1003 -1067 -927 1300 417 -561 -216 -731 131 1272 -420 -1345 726 1035 -258 -551 -302 270 524 -14 -420 -200 108 17 169 352 14 -249 -215 56 87 48 -20 -141 -46 104 89 -67 -65 13 84 95 -69 -114 -10 22 45 74 9 -76 -114 -17 141 56 -62 -32 16 33 -11 -30 -3 3 21 48 -16 -41 -24 -13 1 21 43 -52 -62 108 168 5 -348 -315 288 401 139 -23 -408 -254 161 59 72 119 29 -36 -93 -28 16 19 19 -17 7 63 -36 -91 -1 58 32 5 -48 -96 -6 77 116 18 -117 -35 51 8 -25 15 15 -46 -9 67 14 -47 -25 -6 -3 3 63 47 -43 -8 -23 -45 -15 -3 51 33 -25 -22 15 18 -28 -43 14 28 32 23 -13 -8 -40 -24 19 -3 25 37 -29 -27 9 3 -12 -17 5 17 37 26 -46 -30 3 -20 4 48 38 -16 -72 -33 26 23 42 10 -63 -17 36 -17 -31 15 53 4 -49 -15 22 -12 7 37 24 -6 -28 36 16 -5 -72 -68 13 8 39 3 15 56 -4 -62 -67 -42 137 224 5 -32 -298 -359 48 65 355 271 5 122 -220 -172 -192 -215 121 77 175 104 -17 38 -67 -71 -50 -11 81 46 -34 -22 -2 1 0 -2 6 3 -1 1 0 0 1 2 -3 -2 -1 0 1 0 -2 -2 1 -1 0 -1 1 0 0 0 -1 -1 1 -1 2 2 -2 1 -2 -3 0 -1 0 1 1 -1 -1 2 0 -1 -1 -2 3 3 -1 -2 -3 -1 3 2 -4 -1 0 1 0 0 -1 -1 -2 0 -1 1 2 1 -1 1 -2 -1 0 -1 -1 0 0 1 1 -1 -1 0 -3 1 2 -1 -1 0 0 2 -3 -1 1 -3 -1 1 -1 1 0 0 0 -1 1 3 -2 -1 0 -3 -4 1 2 -1 1 1 -1 -2 0 1 1 0 1 0 -3 -2 2 3 -2 -3 -1 0 2 2 1 -2 -2 1 0 -2 -1 2 -1 -2 2 3 -2 -2 0 0 0 0 -2 -2 -2 1 1 1 0 -3 -1 1 1 0 -2 1 -1 -1 0 0 -2 -2 -1 -1 2 0 0 1 -2 -1 -1 -1 2 1 -1 1 -2 -1 -3 1 0 -3 1 0 -1 1 1 0 -1 -3 2 2 0 2 -1 -2 1 -2 -1 0 -2 0 3 2 -3 0 0 -1 -2 1 1 -1 0 -1 -1 -2 -2 -2 2 0 0 -2 2 2 -1 1 1 0 -1 0 -2 -2 2 -1 -2 0 1 0 -1 -1 0 -2 0 1 0 -2 -1 1 3 0 -4 -1 2 1 0 +65515 15269844 4258332562 65733198 4234279893 1863889 361377 551522 1794384 2336101 1137789 394805 347476 176596 51664 28850 124100 108226 10705 2704 21997 18737 8714 7225 13786 13096 2509 5557 18772 20170 6980 1280 1210 909 450 2560 2257 170 2290 6548 39888 121129 182169 180122 166993 90437 8665 15002 9945 1040 722 338 5265 8282 4388 2329 9252 19385 14013 3826 689 450 2197 4685 2834 45 3978 4058 593 2250 2610 1714 709 1108 2045 1808 698 1664 937 634 2210 810 153 314 1658 2792 909 416 3748 5440 1765 2293 4069 1585 1250 3034 2417 709 193 1945 820 1552 5209 4793 1585 234 3152 8333 20533 50201 89828 131185 130250 73466 63284 66448 60866 36554 11105 5933 7541 6682 4292739118 4294901738 1 458750 4294901763 1 65536 4294770690 4294967294 65536 4294836224 131070 65535 131071 0 4294901760 131071 196607 4294836226 4294836225 65533 65535 65537 4294967295 2 4294967295 262142 4294901763 4294836222 262143 4294705154 65535 1 4294901760 4294901759 4294901760 131073 4294901761 4294836225 65535 4294967295 0 65537 4294967295 4294770688 131073 4294967295 0 4294770690 131071 4294967293 4294901761 1 0 131071 4294836227 65535 4294770685 131073 131071 4294901761 65534 65537 65536 4294770688 196606 4294836227 4294967293 131072 65538 4294901758 1 4294967294 4294901762 196606 4294836227 65534 0 4294836224 4294901758 65537 1 4294967293 65537 4294836224 4294901761 65535 4294836224 4294967294 196607 0 4294836225 4294967295 196607 4294901761 4294836225 4294836223 1 131069 4294901760 65537 4294901760 196605 2 4294901762 131070 4294967294 4294836224 196608 4294770690 0 4294901759 65537 65535 4294967295 4294901758 196606 0 196606 4294901762 65537 4294901760 4294836224 196606 4294901759 65536 4294901760 65535 65534 1 4294967294 196609 4294705152 196607 1 +-12 0 54 -55 -114 -52 -255 288 1003 52 -1134 -942 520 1415 233 -1196 -731 339 550 587 -234 -814 227 739 -128 -570 15 280 56 -125 -149 34 143 71 -48 -97 -7 41 -3 -3 23 14 -12 -47 -28 47 43 -26 -45 2 26 18 -3 -12 7 -1 -17 -7 -1 15 12 0 -6 -6 4 7 2 -16 -14 13 18 -4 -11 -6 -4 4 3 5 11 14 11 -34 -16 3 -12 -7 5 21 -3 -25 -17 22 15 19 22 -34 -35 12 21 1 -4 -2 -10 -5 0 12 8 3 5 -11 -16 0 10 7 -3 4 13 -16 -12 5 -4 -9 -2 15 11 -1 1 -16 -16 6 7 7 5 -3 -3 -7 -6 0 5 7 -6 -9 6 12 1 -9 -2 -3 0 6 -3 -6 -2 8 9 -3 -12 -6 3 8 7 -1 -9 -14 -1 16 4 -8 -4 0 -3 4 4 -2 0 5 7 -14 -21 5 15 5 0 -5 -9 -7 -1 10 3 2 3 -3 1 -9 -8 8 2 -4 -2 7 10 -5 -7 -3 2 0 -1 0 -1 -2 -2 2 -4 1 5 2 -6 -3 4 2 0 0 -10 -1 15 10 -5 -9 6 -1 -9 3 7 -8 -8 5 1 -6 -3 6 1 -1 2 5 5 -4 -3 -2 1 1 -1 0 0 0 0 0 0 0 0 -1 0 2 1 0 0 -1 0 1 1 0 0 0 1 -1 -1 0 -1 1 0 -1 -1 0 -1 0 0 0 -1 -1 1 0 0 -1 0 0 0 0 -2 -1 0 0 0 -1 0 1 0 1 0 0 0 0 -1 0 -1 0 -1 0 0 0 -1 0 -1 0 -1 0 0 1 0 0 0 -1 0 1 0 -2 -1 1 0 1 1 0 -1 1 0 0 0 0 1 0 -1 0 0 -1 0 0 0 0 -1 -1 0 0 0 0 -1 1 -1 0 0 1 -1 -1 -1 1 0 0 0 -1 0 0 -1 0 0 0 0 -1 -1 -1 -1 0 0 0 0 -1 -1 0 0 -1 0 0 0 -1 0 0 0 -1 0 0 0 -2 0 0 0 0 0 0 0 1 1 -1 1 0 0 0 -1 -1 0 1 0 0 1 1 -1 0 -1 -1 1 -1 1 0 -1 0 0 0 0 0 -1 -2 -1 -1 0 0 0 -1 -1 0 1 0 0 -1 0 0 -1 0 0 -1 0 0 0 -1 -1 0 0 -1 0 1 0 -1 -1 0 1 -1 -1 -1 1 0 -1 -1 0 0 0 -1 0 0 0 1 -1 0 0 0 0 -1 0 1 0 0 1 0 -1 0 0 1 0 0 0 0 +65524 4291362870 4291624846 18939649 3408875 2173320 2272625 1484705 649282 647069 717352 597650 341284 78625 18761 23357 25490 11713 1730 18 725 2353 2993 2525 2029 1000 153 50 338 226 144 72 65 260 365 340 157 32 34 317 1277 265 193 466 634 773 586 1640 1369 442 20 125 144 73 146 256 149 25 425 169 97 229 122 257 292 98 34 58 36 74 117 180 82 13 36 45 68 90 180 73 50 277 257 80 16 25 20 25 245 466 250 25 130 101 13 18 82 128 20 53 125 58 4 1 5 8 17 29 45 20 0 101 325 106 37 90 113 89 37 45 2 29 4294705157 4294901757 65537 65535 0 0 0 4294901760 131072 1 4294901760 65536 1 0 4294901761 65535 131071 4294901760 65535 65535 0 4294967295 1 4294901760 0 0 4294967294 0 4294901760 65536 65536 0 0 65535 65535 65535 0 65535 65535 65535 65536 0 4294901760 65536 4294836224 131071 65536 1 131071 0 0 1 65535 4294901760 0 0 4294967295 0 0 131071 65535 65536 4294967295 131071 0 4294901760 0 65535 0 4294901760 4294967295 65535 0 4294901760 65535 4294901760 0 4294901760 0 4294901760 0 4294836224 0 0 0 65536 4294901761 1 0 4294967295 65536 0 65537 65535 4294967295 4294901761 1 65535 0 0 4294901759 4294967295 0 4294901760 65535 1 4294901760 0 65535 4294901760 0 4294901760 65535 4294901760 65536 4294901760 65535 4294901761 4294967295 1 4294967295 0 4294901760 0 65536 65535 0 4294901760 65536 0 1 65535 65536 0 0 +5 0 -62 87 189 -102 -30 -292 -918 -156 -34 1470 1398 -495 -410 284 142 -1906 -705 3007 1854 -2752 -1740 429 -159 479 542 162 -143 -141 -11 51 128 111 6 -201 -38 95 2 -89 -42 57 25 -19 -47 45 56 -8 13 12 -26 -44 35 21 -63 -24 42 31 -17 -12 3 -3 1 10 -3 -10 -5 14 17 -3 1 -1 -2 -26 -15 21 18 9 -2 -63 -57 93 89 -53 -63 27 71 -36 -93 -2 55 39 -3 -20 -3 -31 -29 32 7 11 40 -2 -32 -32 -5 36 25 -24 -32 19 40 -22 -45 1 5 15 25 18 0 -44 -21 9 0 22 17 -23 -14 14 -4 -17 6 24 -1 -18 4 6 -17 4 41 -9 -45 -12 9 17 12 7 -1 -25 -17 19 12 -5 -1 7 -14 -12 26 23 -20 -35 -3 26 8 1 13 -11 -19 -11 -3 16 14 -3 -13 -3 13 -8 -25 10 14 -7 -8 31 31 -43 -41 18 24 8 -2 -16 -14 7 26 11 -18 -42 -5 47 4 -34 -5 22 11 3 -6 -25 10 25 -29 -21 47 10 -55 13 49 -53 -38 79 -3 -63 69 14 -110 33 73 -54 31 68 -95 -68 87 9 -63 67 37 -77 7 34 -37 -10 27 2 -10 6 3 -5 2 0 -1 -2 0 1 0 0 0 0 -1 -1 0 1 0 0 2 0 0 -1 -1 0 1 0 -1 -1 1 1 -2 -1 1 0 -3 0 1 0 0 0 1 0 0 0 0 -1 -2 1 0 -1 1 0 -1 -1 -1 -1 -1 0 1 2 -1 -2 0 1 0 -2 -1 0 1 0 0 -2 -1 1 0 1 1 0 0 -2 -2 1 0 0 0 -1 0 -1 -1 0 -1 0 1 0 1 1 -2 -1 0 -1 0 -1 -1 0 1 -2 -1 1 1 -1 -1 0 0 0 -1 0 2 0 0 -1 0 1 0 0 1 0 -1 0 1 -1 0 -1 0 0 -1 -1 1 0 0 0 0 -1 0 -1 -1 -1 -1 0 1 0 -1 0 1 0 -1 0 0 0 -1 1 1 -2 -1 -1 -1 1 0 -1 0 0 0 0 0 0 1 -1 -1 -2 -1 0 -1 0 1 2 -1 -2 -1 -1 0 -1 0 0 -2 -1 0 0 -1 0 0 0 0 -2 0 0 0 -1 -1 1 0 -1 -1 -1 1 2 0 -1 0 0 0 0 0 1 1 -1 0 -2 -1 1 1 1 -1 0 0 -1 -2 0 0 1 -1 0 -2 0 2 -1 0 -1 -1 0 0 -1 1 0 0 0 -2 -1 1 1 0 0 -2 1 1 0 -1 0 0 0 -1 0 +5 5767106 4288282813 4275896290 4284808298 2162056 2199429 248756 3653000 9539074 11010820 3211641 254722 320008 40330 2722 28705 40437 10469 7925 5013 986 4234 3200 313 2612 1666 4545 2725 433 18 101 109 221 298 2 680 666 405 3973 11898 10730 4698 6337 8653 4546 409 970 1865 170 1604 2048 1321 1201 1385 2084 2026 250 949 1936 522 484 818 392 305 612 325 52 305 1762 2169 370 193 626 650 169 50 340 1205 1625 685 65 290 482 265 205 178 233 725 245 1025 2810 2005 640 260 245 797 2088 2234 1172 509 130 661 725 1282 2309 3194 5210 7685 3978 4957 13189 8245 5585 13649 7650 8458 7298 1205 1469 733 136 4294639619 2 4294901759 65536 0 0 4294967295 65536 0 2 4294901760 65535 1 4294967295 65537 4294967294 1 65533 1 0 1 0 4294901760 131070 4294901760 1 4294967295 4294967295 65535 131073 4294901759 65536 4294836224 65535 1 4294836224 131071 65536 1 4294836224 131070 0 4294901760 4294901760 65535 65535 1 65537 4294967294 4294901760 4294901760 65535 4294836225 131071 4294901761 65535 0 65535 2 4294901760 65536 0 1 65535 4294901761 4294901760 0 4294967295 1 0 4294901760 4294901760 4294967295 65535 1 65535 1 65535 0 131071 4294836225 4294967295 131071 4294901760 0 0 0 4294901761 4294901759 65535 65535 131073 4294901759 4294967295 4294901760 0 4294967294 0 65535 0 4294836224 0 4294901760 131071 4294901760 4294967295 131073 4294901760 0 0 65536 4294901761 4294836224 131071 65537 65535 4294901760 65534 65536 65535 65534 4294901762 4294901760 65535 4294901760 1 0 4294967294 65537 0 131070 1 65535 0 65535 +-66 0 108 -60 -153 -74 -57 319 272 -200 -48 39 251 186 62 -1639 -2278 1367 1782 1699 1317 -1636 -1307 -551 -68 586 243 39 -36 -193 -84 23 -29 135 58 -66 73 33 -100 -100 16 73 9 -32 -71 17 99 72 -27 -123 -32 64 -9 -1 48 46 13 -88 -60 44 37 0 -12 -6 8 3 -7 -5 3 0 -10 3 6 14 16 -23 -33 -2 11 63 60 -64 -35 -8 -23 -15 -17 24 -3 23 42 6 -3 -18 -12 -38 -20 64 32 -37 2 -1 -31 -1 23 6 -13 -20 -21 32 27 4 8 -11 0 -15 -32 8 28 17 4 -17 -17 -6 -3 15 22 14 5 -40 -32 17 19 1 -13 11 40 -8 -39 -38 -8 48 24 -11 -7 -7 8 -1 -18 -6 5 12 7 6 17 -20 -29 -5 -7 14 23 16 -6 -25 -5 16 13 -16 -25 17 32 -14 -35 -1 10 26 28 -14 -23 -15 4 -5 -45 20 62 18 -40 -31 29 18 -24 -13 4 10 7 4 17 -2 -9 -40 -33 34 42 2 -21 -30 -22 17 10 38 64 -26 -51 -73 -79 69 109 87 52 -163 -193 8 86 200 168 -174 -251 -89 46 274 192 -156 -172 -94 -32 158 128 -36 -71 -55 -1 41 10 1 2 -6 -2 0 0 2 1 0 0 0 0 0 0 0 0 0 0 0 1 0 -1 0 1 2 -1 -1 0 0 0 -1 -1 1 0 -1 0 -1 0 0 -1 -1 1 1 2 -1 -1 -1 0 0 -1 -1 -2 1 0 0 -2 0 1 2 -1 -1 0 1 0 -1 -1 -2 -1 2 1 0 0 0 0 0 -1 0 0 1 0 0 0 -1 -1 -1 1 0 -3 0 1 0 -2 -1 0 0 -1 0 2 -1 -1 1 1 -1 1 -1 -1 1 0 -1 0 0 1 1 0 -1 1 2 0 -1 1 -1 0 -1 0 0 -1 -1 0 0 0 0 0 -1 0 0 -1 -1 -2 0 1 1 1 0 0 -2 1 0 0 -2 -2 -1 -1 -1 0 -1 -1 1 1 1 0 0 1 -1 -2 1 1 2 -2 0 1 0 -2 1 0 1 0 -1 0 -1 1 1 -1 -1 1 -1 -3 0 1 0 -1 0 1 -1 1 -1 -1 0 0 -2 0 -1 -1 1 1 0 -1 1 1 0 0 -1 -2 0 0 0 0 0 -2 0 0 0 1 0 0 -1 -1 -1 -1 0 0 -1 -1 0 1 0 0 0 0 0 -1 1 -1 -2 -1 0 0 1 0 0 -1 0 0 -1 0 0 0 1 1 0 1 0 -1 0 0 0 1 0 1 0 0 0 +65470 4291035244 4290183015 20971463 4281860368 3825 97597 2690165 7057973 6062125 4410985 2011850 348020 60570 38545 7585 19066 7720 6418 20000 5585 1105 5330 14985 15858 5120 82 4420 7913 5536 1369 180 73 74 9 109 232 785 1093 4090 7696 1289 754 865 538 1800 333 1588 4496 2393 5 962 565 569 1465 745 185 225 1088 1073 305 325 234 680 1625 1313 362 290 1664 2965 2368 697 98 65 360 169 85 689 866 245 785 661 281 425 914 1220 1226 776 980 754 41 2425 4168 2561 1165 745 116 65 293 1681 2245 1768 1341 773 1544 4772 7930 11002 19450 29273 37313 47396 58500 70922 77192 61200 38420 25988 17680 8066 1682 101 4294574082 65534 131072 1 0 0 0 0 0 1 65535 131073 4294967295 0 4294901760 131071 4294901760 4294901760 0 4294967295 65537 4294901762 4294967295 0 4294967295 131070 0 65534 131073 4294967295 65536 4294901760 4294901759 196607 1 0 0 65535 65536 0 4294901760 4294967295 1 65533 1 4294967294 0 65535 4294901762 131071 4294901761 4294901761 131071 4294901760 0 65537 4294901760 131073 4294901760 4294901761 4294901760 0 4294967295 0 0 4294901760 0 4294967295 65534 65537 1 4294836224 1 4294836224 4294967294 4294967295 4294901760 131071 65537 0 4294901761 131070 131073 65534 1 131070 65536 4294901760 4294901760 65537 4294967295 4294901761 65533 1 65535 4294901761 4294901761 65535 4294836224 4294901760 131071 1 131071 1 4294901760 65534 0 0 65534 0 1 4294901760 4294967295 65535 4294901760 65535 1 0 0 131071 4294901759 65535 65536 0 65535 4294901760 0 65536 1 1 65535 0 1 1 0 +12 0 -120 -103 250 315 -464 -546 476 827 -146 -862 -497 401 435 732 611 -959 -948 407 803 -196 -452 149 206 -350 -246 225 126 69 0 -230 -142 210 132 11 47 -130 -145 49 63 12 10 18 -34 -50 -4 44 0 18 46 19 -7 -71 -7 53 -4 -66 -34 53 34 -2 -16 -7 14 9 5 -4 -22 -21 33 43 -44 -80 86 133 -199 -187 390 184 -612 -124 702 60 -622 16 505 -61 -415 32 338 22 -239 -38 157 -3 -145 6 109 69 -15 -110 -50 71 53 -23 -38 -6 16 25 7 -19 -14 7 17 3 -1 -19 -31 10 37 12 -26 -26 6 30 8 -19 -1 11 4 -19 -9 4 -10 -1 10 6 -14 0 17 15 6 -29 -14 30 19 -33 -10 15 -9 -10 5 8 1 10 9 -20 -20 0 7 20 6 -15 -1 0 1 13 16 -38 -45 22 30 -3 -15 10 11 -17 -20 12 11 10 12 -1 13 -12 -14 -30 -32 22 24 26 9 -34 -24 29 30 -23 -19 14 14 -21 -25 17 17 -16 -21 30 26 -24 -18 21 18 -23 -30 20 30 3 -13 -8 -1 4 12 7 -16 -10 28 19 -2 -27 1 -8 -24 1 12 14 7 -12 -11 -13 -19 8 10 11 0 -5 0 3 0 -1 0 1 0 0 -1 0 0 1 0 0 1 0 0 2 0 0 1 1 -1 0 0 0 1 0 0 -1 -1 0 0 0 -1 0 0 1 -3 0 -1 2 1 0 0 0 0 0 -1 -2 -1 0 0 0 -1 0 0 0 0 0 0 -1 0 1 0 -1 -1 0 1 1 -2 -1 0 0 -1 -1 0 2 1 -1 0 0 -1 0 0 0 0 0 -2 -2 -2 0 1 1 -1 1 -1 -1 0 0 0 1 2 -1 -2 0 1 0 -1 -2 -1 1 1 0 2 -2 -2 0 1 0 -1 -1 0 0 0 -1 1 0 -1 -1 0 0 0 -1 -1 -1 -1 0 0 0 1 -1 -1 -1 0 0 0 -2 -1 -1 0 -1 -2 1 1 -1 -2 -1 -1 1 -1 -1 -1 -2 0 -1 -1 0 1 2 0 0 -1 -1 0 0 0 0 -1 0 -1 -1 0 1 -1 -1 -1 1 0 -1 -1 0 -1 -1 0 0 0 -1 -1 0 0 0 -1 -1 -1 0 -2 0 1 1 1 -2 1 0 -1 0 0 -1 0 1 1 -1 -1 -2 0 1 0 0 -3 -1 1 1 0 -2 -2 0 1 1 1 -3 -2 0 0 1 0 -1 -2 1 1 -1 0 -1 -1 1 0 0 0 -1 1 -1 -1 1 1 0 -1 -1 0 0 +12 4288282504 20644090 4259249712 54198748 764360 407810 725049 1293002 1064353 683225 226505 164936 111141 20637 52900 64264 17545 19109 23426 4113 424 3656 1952 324 2477 5090 2858 4372 3965 1160 305 277 41 925 2938 8336 25085 74570 185956 389920 496404 387140 258746 173249 114728 58565 24658 21061 16642 12325 7541 3338 1480 881 410 245 298 362 1061 1513 1352 936 425 122 377 97 101 136 196 514 877 1096 1450 325 181 89 101 481 400 449 261 1 170 1700 2509 909 325 410 544 221 145 313 1096 1508 1252 1237 1417 1429 557 637 914 545 1341 1252 765 853 1300 909 233 17 193 356 1145 733 65 577 340 193 290 425 221 4294639616 196608 4294901760 65536 0 65535 65536 0 1 131072 0 65537 65535 0 1 4294901760 65535 0 65535 65536 65533 196607 1 0 0 4294901759 65535 0 65535 0 0 4294901760 65536 4294901760 65535 65537 4294967294 0 4294967295 131072 4294901761 0 65535 0 0 4294901758 65534 65537 131071 4294967295 0 65536 4294901762 65534 1 4294901759 131071 1 4294836226 65534 1 4294967295 0 4294901760 1 4294967295 0 4294901760 4294967295 65535 0 4294901761 4294967295 0 4294836224 4294967295 4294901760 131070 4294901761 4294967294 131071 4294967295 4294901759 4294901760 65535 131073 0 4294967295 0 0 65535 4294967295 65536 4294967295 131071 4294901760 65535 4294967295 0 4294901760 65535 0 4294967295 65535 65534 65537 4294836225 1 65535 4294901760 65536 4294901761 4294901759 65536 0 4294967293 65537 4294836224 65534 65537 4294770689 65534 65536 4294901760 131070 4294901761 4294901760 131071 0 4294901760 4294901761 131071 1 4294967295 0 +-94 0 38 107 237 105 152 -833 -1156 615 1114 195 -548 -189 447 -239 -653 203 140 157 374 442 390 -759 -770 -27 321 407 -18 -268 -55 29 -77 145 181 -88 -102 -32 -38 53 57 35 48 -64 -86 -4 26 36 18 -16 -36 9 71 18 -43 -74 -13 39 -9 9 19 -2 -18 1 26 11 -15 -27 -15 5 -4 48 54 -9 65 -23 -56 -272 -563 284 741 610 250 -943 -648 76 58 171 -60 196 265 14 26 -164 -107 -54 -32 59 18 5 -18 4 9 16 17 11 -5 -32 13 32 -17 -35 12 26 0 -22 -19 17 27 1 0 -9 -16 -9 10 3 -23 4 10 5 9 3 -6 -1 13 -17 -38 16 44 2 -33 -8 21 15 12 -11 -15 -27 -30 24 42 15 -19 -33 1 16 -6 2 19 -2 -31 -8 39 28 -28 -47 3 39 -1 -11 16 20 13 -40 -34 6 4 7 4 5 2 -2 1 -4 -9 5 19 0 -24 -9 25 9 -21 0 29 -24 -48 3 9 28 18 -16 -7 26 29 -37 -40 -7 2 28 -1 -1 24 10 -7 -20 2 -1 -11 1 -4 7 25 4 -18 -25 -9 17 9 5 4 2 8 -15 -14 3 7 4 -5 -7 3 10 1 -9 -1 3 -1 -2 0 2 1 0 1 -1 0 1 1 0 1 -2 -2 0 1 1 0 -1 0 -1 -1 1 -1 0 0 -1 0 0 0 -1 0 -1 -1 0 1 -1 -1 0 0 1 1 1 -1 -2 0 1 1 -1 0 0 0 -1 -2 1 1 0 1 -1 -1 0 1 1 0 -1 0 -1 0 0 0 0 -1 -2 0 0 0 0 1 -1 0 0 0 -1 -1 0 1 1 0 0 1 0 -2 -1 0 0 -1 -1 1 0 0 -1 0 0 0 1 0 -1 0 -1 0 -2 -1 0 0 -1 -1 0 0 0 -1 1 0 0 0 -1 0 -2 -1 0 1 1 1 0 0 0 0 0 0 -1 -1 -2 -1 0 -1 -1 0 -1 -1 -2 -1 0 1 1 0 -1 0 1 0 -1 -1 1 1 -2 -1 0 0 -1 0 1 -1 0 -1 -1 -1 -1 0 0 -2 1 1 1 -1 -2 0 0 -1 -1 0 1 -1 -1 -1 2 2 0 0 0 1 0 1 -1 0 -1 0 0 -1 -1 0 0 1 -1 -1 -1 0 0 -1 -1 0 0 0 -1 0 -1 1 0 -1 -1 0 0 -1 0 0 1 0 -1 2 0 -2 0 1 0 0 -1 1 -1 0 1 0 -1 -1 0 1 0 -2 0 1 0 0 0 -1 -1 2 -1 0 0 2 0 +65442 7012390 6881517 4240375960 40369020 1279021 336025 256930 467618 44249 335240 728181 593629 268690 72148 3866 26954 40505 11428 4253 4474 6400 7412 1972 580 1377 5365 7325 1690 162 365 325 797 954 250 2320 2997 4754 77120 397625 921181 951749 425680 32605 42016 70421 27572 14365 4505 349 340 337 410 1049 1193 1514 820 484 650 730 81 337 109 545 125 90 37 458 1700 1940 1153 666 265 954 1476 1989 1450 257 40 365 1025 2305 2993 1530 122 656 1769 1192 65 41 8 17 106 361 657 706 441 1417 2313 865 580 725 2210 1649 788 2 676 449 5 122 65 641 949 370 106 20 289 205 65 74 109 82 262143 4294901759 131072 1 4294901761 65536 1 4294836225 65534 65537 4294901760 4294901760 131071 65535 4294901760 0 4294901760 4294901760 65535 4294901761 65535 65536 65537 4294901759 65536 4294901761 0 4294901760 131070 1 4294901761 65535 65537 4294901760 4294901760 0 0 4294901759 0 0 4294901761 0 4294901760 65535 65537 0 1 4294967294 0 4294967295 1 4294901760 0 65536 4294901760 4294901760 4294836224 65535 4294901760 65535 0 131071 0 4294901760 4294836224 65535 65537 1 0 0 4294901760 4294901759 65535 4294967295 4294901760 4294901759 65535 65537 4294901760 65536 4294901760 131071 4294836225 65535 4294901760 65536 65535 4294967295 4294967295 0 131070 65537 4294901759 0 4294967295 65536 4294967295 196607 2 0 1 4294901761 4294901760 0 4294967295 0 4294901761 4294967295 0 4294967295 0 4294901760 4294901760 1 4294967295 0 65535 65536 4294901760 2 65534 1 4294901760 4294901761 65536 4294901760 65535 1 65534 1 0 4294967295 4294901762 0 2 +16 0 -60 -106 -108 591 800 -546 -135 -150 -817 -1005 -390 1630 201 -287 1786 558 -2166 -1869 1369 1632 -1142 -912 670 861 -212 -488 406 404 -122 -547 -315 64 226 421 29 -405 -123 176 162 -21 -170 -117 69 159 -17 -83 46 18 -83 -5 30 20 19 60 60 -87 -89 5 30 19 -17 8 4 5 31 25 -23 -95 -46 132 39 -44 51 63 32 6 141 -84 2 -246 -362 64 142 -19 -302 197 297 142 29 -156 -30 42 -14 -62 38 67 -45 -43 35 10 10 23 -29 -76 -26 69 35 35 64 -62 -95 -24 56 58 -33 -62 -10 49 22 8 28 -33 -79 -6 59 70 8 -72 -35 18 10 37 39 -34 -12 0 -23 -48 -26 64 33 0 13 -17 -25 4 33 -14 -36 -1 -16 -6 30 55 8 -47 2 15 5 -40 -52 7 -9 40 55 20 -15 -37 18 5 -12 -6 -11 -30 -11 55 21 -39 -16 24 1 -21 3 23 -13 -4 36 21 20 -51 -77 -21 5 84 67 -30 -63 -30 27 58 30 -25 -18 -44 -17 45 26 -37 -53 4 27 38 4 -13 17 -13 -41 -6 31 39 7 -45 -24 5 -2 17 16 -17 -32 15 28 8 -6 -10 3 2 6 4 -1 -17 -9 4 -2 10 6 -6 -1 2 -1 -2 0 0 -2 2 4 0 -1 -3 -1 2 0 1 2 -1 1 0 0 2 0 -3 2 2 1 -1 0 -1 -1 -1 -1 -1 1 0 0 -2 -3 1 2 -3 -1 0 2 -1 -1 -4 -2 -1 -2 0 -1 1 2 0 -1 0 -1 2 1 -2 0 1 -1 0 0 -1 1 -2 -2 0 0 2 -2 -2 1 2 0 0 -1 0 0 0 -3 0 2 1 1 -3 -4 2 2 0 0 1 0 -1 -2 1 2 -1 -2 1 1 0 1 1 -1 0 3 -1 1 0 -1 -2 0 0 1 -1 1 -1 -2 -1 0 0 1 0 -1 0 2 -4 -2 -1 -2 1 0 0 2 0 -3 -1 -2 1 3 0 -3 2 3 1 0 -2 0 0 -2 1 1 -2 1 1 -1 -1 2 1 -3 1 3 0 -3 0 -2 1 1 2 -1 -1 0 1 1 -2 -2 1 1 2 3 -2 -2 0 1 2 1 -1 -1 2 2 -1 -2 -4 1 2 -2 -1 -1 0 1 2 0 -2 -1 1 0 0 1 -2 0 1 -1 0 0 -2 -2 -1 -1 -1 -2 -1 -2 1 2 0 0 -3 -3 1 -1 2 1 0 1 1 -2 -2 2 1 -1 2 2 0 -1 -2 1 1 -3 -2 -2 1 1 1 0 -2 -1 0 0 1 0 0 +16 4288085956 38797204 4259185440 4285202297 1677514 2809000 122770 3501160 8184717 4537585 2135908 1190221 283088 328052 314093 103321 228317 164866 46105 26685 42589 30042 7178 2440 6914 1300 3961 11169 7946 1261 353 41 1586 9554 19540 3457 6570 1060 26937 60520 135140 20525 130013 108373 25177 2664 4040 5933 3874 1325 629 6617 5437 2450 7940 9601 6500 4933 2501 548 1873 6277 8381 5248 1549 1469 2677 144 2833 4772 1089 458 641 1285 1297 292 3925 2273 229 1625 2753 1681 3425 1594 349 180 1021 3146 1962 832 442 538 185 1737 3001 6370 7081 5389 4869 4093 1525 2260 2314 2045 2825 2173 185 458 1717 2482 2074 601 293 545 1249 848 136 13 52 290 97 720894 4294574086 196607 4294901759 0 196606 4 4294836223 196607 65536 4294901762 1 131072 4294770688 131074 4294901761 4294901760 4294967295 4294967295 1 4294836224 131069 4294770690 65535 4294901762 4294770687 4294967294 65534 131071 2 65535 196607 4294836225 65536 65535 4294901760 4294836225 65534 131072 4294901758 131073 0 65535 0 65533 65538 4294770689 196604 2 65536 4294901760 131070 4294901762 131070 1 65537 65535 4294901763 1 4294901759 0 4294901761 4294901761 4294967294 0 1 65535 4294705154 4294967294 131070 0 2 4294967293 131070 3 196605 65539 4294836224 0 131070 4294836225 65537 4294967295 65538 131069 3 65533 131070 131073 4294967295 65536 4294836225 131070 131073 4294836227 65534 131073 4294901761 196607 4294901762 4294770686 131073 4294967294 65535 131073 4294836224 131071 0 4294836225 65536 65535 4294836224 4294967294 4294967295 4294967294 131070 2 4294770688 131069 196607 1 65537 4294901758 65538 196607 2 4294901759 65537 4294901757 131070 65537 4294836224 65535 65536 0 +-39 0 3 123 -93 -54 492 85 -568 -529 -133 681 523 -288 -241 656 882 -1670 -2082 1024 1371 448 -128 -233 206 -189 -157 -69 -128 -83 -31 263 232 -113 -263 -135 112 240 -10 -128 -4 25 6 60 14 -65 49 48 -76 -100 12 105 31 -30 -14 -24 8 26 -19 -43 -4 59 20 -40 -15 23 28 -24 -56 -8 25 47 16 -14 26 -31 -88 -6 94 74 -57 -147 -67 156 96 -1 50 -54 -89 -21 54 19 -28 -15 -28 -15 15 68 48 -41 -55 -37 1 73 44 -42 -42 -4 18 11 -17 1 17 11 6 -10 1 -8 -13 -3 -3 8 4 5 4 4 10 -9 -14 -8 0 8 -5 3 23 -2 -26 -21 -4 23 14 -2 -1 -2 1 -10 -6 9 -5 11 35 -18 -44 -5 10 21 13 -7 -7 -3 3 3 1 -12 -13 10 13 8 3 -15 -6 -2 -14 8 21 7 3 -17 -16 -5 -2 8 -3 6 19 5 -10 -22 -6 17 12 -3 -8 -7 0 5 3 1 -4 -10 -10 13 17 6 -1 -20 -13 9 7 2 -3 -4 -2 7 15 2 -9 -9 12 0 -16 -4 4 4 -1 3 7 -1 -6 -14 -6 15 8 -5 -4 2 8 -1 -10 -6 4 11 2 -6 -4 -1 1 4 3 -1 -2 -1 1 1 -1 0 1 0 -1 0 1 1 0 -1 0 1 0 0 0 0 -1 0 0 0 -1 -1 -1 1 2 -2 -1 0 0 -1 -1 -2 0 -1 0 0 -1 0 -1 -1 0 0 0 0 0 -1 -1 2 -1 0 0 0 -1 1 1 -1 0 0 0 0 -1 -1 0 1 -1 0 1 0 0 0 1 -1 -1 0 0 0 1 -2 -1 0 0 0 -1 0 1 1 2 -1 -2 -2 0 -1 0 1 -1 -1 1 0 0 0 -1 0 0 1 0 1 0 0 -1 -1 2 -1 -1 -1 0 1 0 0 -1 -1 0 0 0 -1 -2 1 0 -2 1 1 -1 -1 -1 1 -1 -1 -1 1 0 0 0 -1 1 -1 0 1 -2 -1 0 0 1 0 0 -1 -1 0 2 1 0 -2 -1 0 0 -1 -1 0 0 -1 -1 1 0 -2 -1 1 0 -1 1 1 -1 -1 2 0 -1 -1 1 -1 -1 1 0 0 -1 -1 -1 0 -1 -1 0 -1 0 0 -1 1 0 -1 0 0 0 -1 0 0 0 -1 0 1 0 0 0 0 1 0 -1 -1 1 0 0 -2 -1 0 0 -1 -1 0 1 -2 0 -1 -1 -1 -1 1 1 -1 0 0 0 0 -1 0 -2 0 1 1 1 -1 -1 -1 -1 1 1 0 +65497 8060931 4291493795 5571052 4260363720 481450 356473 488417 3566824 5383300 2080345 70673 78157 29410 23273 70130 66593 87394 70144 16484 641 3636 4421 4705 15776 11169 1861 772 740 2210 3497 2000 754 1360 3200 2834 452 1637 7780 14312 24858 28825 9217 5416 8362 3277 1009 1009 4849 3985 4394 5330 3700 1780 445 290 410 136 65 178 73 41 32 181 260 64 34 533 1117 545 200 5 101 117 146 1549 1961 541 218 58 18 145 269 233 234 40 260 490 298 281 68 45 386 584 325 153 113 25 10 116 269 325 401 250 53 25 53 229 162 144 272 32 10 50 232 261 89 20 65 136 137 40 4294967292 262145 4294901763 4294967294 65537 65535 1 65535 65537 4294901760 65536 0 0 65535 0 4294967295 131071 4294836226 65535 4294901760 4294901759 4294901760 0 65535 4294967295 0 0 4294901760 196607 65535 0 131071 4294901761 0 0 4294967295 65536 65535 1 0 4294901761 65535 0 4294836225 65535 0 65535 65537 4294901762 4294901758 4294901760 65536 4294967295 1 0 65535 65536 65536 0 4294967295 4294901762 4294967295 65536 0 4294967295 0 4294901760 131070 4294836224 65537 4294967295 131071 4294967295 131071 0 4294901760 4294901761 65536 4294967294 0 1 4294901760 65535 65538 4294836224 65535 4294901760 65535 4294901760 131071 4294836224 131071 4294901760 65537 4294967295 2 4294967295 4294901761 131071 0 4294967295 65535 4294967295 4294901760 0 131071 4294901760 0 4294901760 0 4294901760 65536 0 0 1 4294967295 1 4294836224 65535 4294901760 65535 4294836225 4294901760 4294967295 131071 4294901761 0 0 65535 65534 65537 4294901761 4294967295 131071 1 +80 0 19 -67 -643 -422 854 1952 -113 -2497 77 1769 235 -684 297 -1977 -2866 1961 1793 635 402 160 535 -981 -870 156 321 -214 -347 491 539 -265 -602 28 681 143 -359 -301 -53 -6 -22 252 149 -130 -60 -67 -197 27 187 208 13 -162 -21 3 -36 33 48 15 28 -46 -47 -26 -24 19 5 23 12 -10 -31 37 76 -19 -36 13 44 -73 -87 12 5 21 -67 66 169 33 -35 -76 -9 -83 -134 122 195 28 -56 -125 -62 34 41 52 2 -43 -26 -8 5 62 26 -50 -13 43 41 -43 -42 -14 2 20 5 -16 -18 20 26 -5 -12 -15 6 31 -2 -40 13 29 -23 -21 14 -4 -25 31 37 -18 -11 9 4 -33 -15 35 18 -39 -38 31 33 15 32 -58 -106 25 78 40 -10 -39 -13 16 19 -2 -22 -17 14 31 -7 -23 8 2 -10 25 25 -48 -42 24 33 15 -17 -36 8 39 -5 -42 -4 52 24 -44 -14 21 -1 -26 0 25 5 -25 -26 23 43 -6 -33 -5 21 -7 -13 13 10 -15 -15 1 8 11 -4 -14 10 15 -19 -31 0 25 -3 16 37 -21 -39 -13 16 27 -5 -31 -6 23 -2 -8 12 10 -4 -15 -4 15 17 -13 -19 -2 -3 8 12 1 -8 -4 5 4 -2 -1 1 0 0 0 2 0 -1 1 2 -3 -3 1 0 0 -1 1 -2 -1 1 1 3 -1 0 -2 -3 0 3 -1 -2 -1 -1 1 1 -2 -1 0 1 -2 -2 -1 0 2 1 0 -2 -1 -1 0 1 3 1 -2 -1 -1 0 -1 -1 -1 -2 -1 0 1 1 1 -3 -2 0 2 -2 0 3 0 -2 -1 0 1 -2 0 2 -1 -3 -1 1 2 1 -2 -3 0 2 0 2 -3 -3 0 0 1 -1 -3 1 1 -2 0 0 -3 -1 2 -1 -1 -1 -2 1 3 0 -2 0 0 0 3 0 -1 1 0 0 -2 -2 0 -1 2 1 1 -1 -1 0 0 2 -2 -1 -1 -1 2 2 0 1 -2 2 0 -4 -1 3 1 -2 -3 -2 2 -1 -1 -1 3 0 -2 0 3 1 -1 0 -2 -2 2 1 -2 -2 -1 0 0 2 -1 -2 -1 0 -1 1 1 -1 -1 -1 0 1 0 -3 1 -1 0 2 0 -1 0 2 -2 -2 2 1 0 -2 1 1 0 -1 -1 0 0 0 2 -1 1 1 1 2 -1 1 2 -2 -2 2 0 1 0 0 0 -2 -2 1 2 1 -2 1 -2 -2 -1 -1 0 -1 0 1 0 -2 0 -1 -2 0 1 2 1 -1 0 2 0 -3 -1 3 2 -2 0 +80 4290576403 4267375997 127927126 4131389327 3135290 523081 3996738 12059477 3618074 187204 1248586 781236 148837 361490 360746 363188 484210 219482 2845 63988 39101 8089 39538 78233 26413 450 2385 2529 2900 2885 937 554 244 2330 6137 1465 7265 7713 466 8845 29650 7001 6970 32840 38809 18761 5000 4385 1853 740 3869 3176 2018 3530 1960 404 281 724 701 369 997 1604 1010 970 212 1586 1693 202 1105 1450 1845 2405 1314 4388 11861 7684 1621 425 365 773 1157 578 68 725 2929 2340 1314 1585 1585 1789 2720 2512 637 677 625 650 1205 1885 1114 490 338 325 226 185 212 325 1322 625 265 1810 1690 985 986 565 68 244 241 241 458 365 73 65548 4294770680 262149 4294967294 1 0 2 131071 4294770690 131069 0 131071 4294967294 65537 4294901763 4294836224 65533 4294901763 4294967294 131071 4294836225 65535 4294836225 4294967294 131072 1 4294967294 65535 196609 4294836225 4294967295 4294901760 4294967295 4294967294 65536 65537 4294901757 131072 65534 3 4294967294 65536 65534 4294901762 4294967293 131073 4294836225 65533 2 4294770690 65533 65536 4294836223 65537 65534 4294770688 196607 4294967295 4294901759 196609 4294836224 0 196608 4294901760 1 4294836224 65534 196607 65537 4294967295 0 4294836226 4294967295 196607 2 4294836225 2 4294967292 65539 4294836222 196606 4294967295 262143 4294836224 196608 4294901761 4294836224 196606 4294836225 4294967294 0 4294901762 4294967294 4294901760 65537 4294967295 65535 1 131069 65535 2 65535 4294836226 196606 1 131070 1 4294967295 0 131072 131071 65537 4294901762 131073 4294901758 2 1 0 4294901758 131073 4294836225 4294836225 4294967294 65535 65535 1 65534 4294901759 65536 65538 65535 2 4294967293 131075 65534 +-65 0 33 102 -38 -533 -774 1173 1670 -407 -769 -664 -493 152 72 888 1363 -573 -1622 -526 1027 528 -860 -249 381 456 291 -290 -293 -42 126 51 -110 -244 -175 376 265 -74 -36 -92 -47 21 -19 22 37 11 34 0 -38 -76 -35 69 35 -7 0 0 -7 -17 3 26 6 -24 -15 11 17 0 -25 -8 22 25 6 -9 3 -38 -34 30 10 0 9 -4 -24 27 81 -28 -92 -43 32 63 -31 -39 47 66 -13 -75 0 36 -15 -15 25 14 -22 -36 -8 47 22 -23 -6 7 9 -17 -19 10 16 -7 -8 7 -3 -15 1 15 1 -6 -8 -7 -2 30 38 -28 -48 -8 18 22 7 -3 -3 -3 9 -20 -34 23 27 -7 -14 11 24 -13 -30 -10 4 32 31 -22 -24 -11 -1 9 2 -5 -7 7 12 -8 -17 -3 -2 15 21 -5 -7 -17 -25 8 24 15 -14 -19 -1 20 11 -1 2 -10 1 -1 -10 -16 -9 24 12 -7 9 8 -12 -16 14 14 -5 -26 -21 20 18 3 3 -2 -6 -11 2 12 1 -11 -3 2 -4 -1 0 12 23 -14 -30 -10 13 18 -5 -12 -1 12 8 -6 -2 0 1 -13 -10 11 11 -2 -10 -4 5 10 4 -11 -9 2 5 3 2 0 -2 -4 0 1 -2 -1 1 0 0 0 -1 0 -1 0 0 1 0 0 0 -1 0 1 0 -1 -1 0 0 -2 -1 1 0 0 0 0 0 1 1 0 -1 1 1 0 1 -1 0 -2 -1 0 -1 0 0 0 0 -1 0 -1 -1 0 0 0 -1 0 -1 -1 0 0 -1 0 1 1 1 0 -1 1 0 0 -1 -1 0 0 0 0 -1 -1 -2 0 0 0 0 1 -1 1 0 1 2 1 -1 0 0 0 0 0 0 -1 -2 -1 1 1 1 0 -1 0 0 0 0 0 -1 -1 0 0 0 -1 -1 0 0 0 1 0 0 -1 -1 0 -1 0 1 0 -1 -1 -1 -1 -1 0 -1 -1 -1 0 0 1 0 0 0 0 -1 0 0 0 0 0 1 -1 0 -1 0 -1 -2 -1 0 0 0 -1 0 0 -1 1 -1 0 -1 -1 1 1 -2 0 2 2 0 -1 -1 -1 1 -1 -2 0 2 1 -1 -1 -1 1 1 -1 -1 -1 -1 0 0 -1 1 0 0 0 2 -3 -1 -1 0 1 -1 -1 -1 1 1 1 1 -2 -2 2 1 -1 -1 -2 0 1 -2 -2 -1 0 -2 0 -1 -1 0 0 -1 -1 1 0 1 1 0 -2 -3 0 1 0 -1 0 0 1 1 1 1 -1 0 0 -1 0 +65471 6684705 4260102106 76938490 4268295814 1032257 266153 793728 2186098 2907560 1333513 801601 353097 168781 87613 18477 71636 172001 75701 9760 2650 845 1490 1156 7220 5986 1274 0 338 685 612 346 289 689 1109 117 1453 2056 100 97 1305 7345 10313 4993 2482 6565 5794 1296 450 821 1780 2273 1013 85 370 461 305 113 234 226 37 113 904 2228 2368 808 58 18 481 1685 778 317 745 1000 1040 1445 697 82 29 98 208 298 229 466 338 689 801 557 401 122 104 2 356 657 193 145 400 392 701 841 333 13 157 148 122 13 17 144 725 1000 493 169 145 100 4 170 221 125 116 125 137 85 196613 2 4294770686 65536 4294967294 1 0 65535 65535 65536 0 4294901760 65536 4294901760 65535 4294836224 131071 0 0 65536 1 131071 1 4294901761 4294836224 65535 65535 0 4294901760 4294901760 65535 0 65535 4294967295 0 65535 65537 1 131071 0 4294967295 0 0 4294967295 65534 0 65536 131071 65536 65538 65535 0 0 4294901760 4294967294 65537 1 65535 0 0 4294967295 0 4294901760 65535 0 1 4294901760 65535 65535 1 4294967295 4294967295 65535 4294967295 65535 65536 0 0 65535 0 0 4294901761 4294901760 4294901760 4294967294 0 4294901760 0 131071 65535 4294967295 65537 65534 131074 4294901760 4294967295 4294901761 65534 65538 4294967295 131071 4294901761 4294967295 65535 4294901760 1 0 4294770690 4294967295 65536 4294967295 131071 65537 4294836225 196606 4294901761 4294901759 65536 4294901758 65535 65534 4294967295 0 4294967295 1 65537 4294836224 65533 1 65535 65536 65537 4294901761 0 65535 +52 0 19 -78 151 -287 -920 213 676 622 378 -352 -378 -304 208 244 93 -605 -1410 337 1383 1069 24 -911 -373 -117 262 451 -107 -490 -108 297 274 -68 -282 -214 46 236 39 -48 -53 -57 56 127 -1 -124 -53 47 45 26 -29 -17 41 12 -22 -31 -1 18 -18 -11 25 23 -12 -19 0 13 8 0 12 -23 -48 15 34 15 14 -6 -23 -32 -12 41 36 1 4 -21 -8 -13 -16 -14 -21 9 -15 42 51 0 16 -21 -49 -29 30 37 -42 -33 29 52 -1 -30 7 6 -1 -1 -6 -19 -9 18 2 8 28 -6 -26 -16 3 23 10 -14 -1 4 0 -10 -10 -8 -12 26 28 -10 -21 -17 -14 38 49 -15 -27 -15 4 1 -10 -1 1 11 7 -13 -17 10 15 0 -4 0 -3 -8 5 14 2 -9 -1 -6 -3 7 8 0 -12 -13 5 24 5 -11 7 -4 -12 -3 2 3 4 0 -2 -5 -4 -2 0 6 -12 6 38 7 -30 -35 4 26 0 -13 -8 9 12 3 1 -4 0 -9 -15 9 14 -2 -5 10 2 -18 -1 17 7 -15 -9 -3 -3 4 -2 2 3 5 -3 -6 9 10 1 -14 -7 3 1 -1 -2 0 -1 4 3 -5 -6 9 11 -4 -7 -2 0 2 1 0 0 0 -1 -2 1 0 0 -1 0 1 0 1 1 0 0 -1 0 1 0 -1 0 0 0 0 0 0 0 1 0 0 1 0 0 1 1 -1 -1 1 -1 0 1 -1 -1 -1 -1 0 1 -1 -1 -1 -1 0 -1 1 2 -1 -2 0 -1 0 -1 -1 0 1 -1 -1 -1 1 -1 1 1 -1 -1 0 0 0 1 1 -1 -1 0 1 0 -1 0 0 1 0 0 -1 0 1 0 1 0 -1 0 -1 0 0 -1 0 0 -1 0 -1 -1 0 0 1 0 -1 -2 1 1 0 -1 0 -1 -2 0 0 1 0 -2 0 2 -2 -1 -1 -1 0 1 0 -2 0 1 1 0 -1 1 -1 -1 0 1 0 -1 0 -1 0 -1 -1 1 1 -1 -1 0 -1 0 0 -1 -1 0 -1 -3 -1 0 1 0 2 1 0 -1 1 0 -1 0 0 -1 0 1 1 0 -2 -1 0 1 1 -1 0 0 -1 1 0 -1 0 0 0 0 -1 -1 1 1 -1 0 -1 1 1 1 -1 0 -1 0 0 -1 0 0 0 1 -2 0 1 -1 -2 -1 0 -1 -1 0 0 0 -1 0 0 -1 -1 0 1 0 -1 0 1 1 -2 -1 2 -1 -1 0 1 -1 0 -1 0 1 1 0 -1 0 0 0 0 +52 4289855507 4276158615 14023784 40764068 266788 235300 102800 374674 2101669 3055450 830497 152818 272045 251549 99873 79700 125320 57812 3825 6058 19265 15377 5018 2701 1130 1825 1445 325 445 1154 505 169 64 673 2529 1381 232 1553 1825 1297 457 233 452 522 1989 2601 697 3242 2269 2853 3545 901 85 2 397 405 68 820 932 538 296 17 100 164 820 884 730 1640 2626 954 17 101 122 218 389 225 16 73 221 85 37 58 64 313 601 146 65 153 13 16 29 20 36 180 1493 2125 692 169 145 153 17 81 306 200 125 328 290 274 90 25 8 34 45 181 197 58 2 4 17 34 117 4294705163 4294901753 131072 1 0 4294901759 1 4294901760 65536 65536 1 4294901760 65536 4294901760 0 0 0 65536 0 1 65536 4294901761 131071 65535 4294901761 4294967295 65535 4294901761 4294967295 65535 131071 4294901762 65534 65535 4294967295 65536 4294967295 131071 131071 4294901761 65535 0 65537 4294967295 65536 4294901760 0 1 4294901760 65536 65536 4294901760 4294901760 0 65535 4294901760 4294901760 65535 65536 4294901760 131070 1 65535 4294901759 0 1 65534 4294836226 4294967295 65535 1 65534 65537 4294901760 4294901761 65535 1 65535 65535 4294967295 65537 4294967295 4294901760 0 4294967295 4294901760 4294967293 65536 131072 1 131071 4294901760 0 65535 65537 4294836224 65535 65537 65535 4294901760 1 65535 0 4294901760 131071 4294901761 4294901760 65537 4294901761 4294901760 0 65535 0 4294836225 65536 4294901759 65535 4294967295 0 4294901760 0 4294967295 65536 4294901760 65536 4294836225 196607 4294967295 65536 65535 65535 65537 4294901760 0 0 +-11 0 19 152 286 -282 -686 -221 406 851 93 -782 -198 344 25 -85 286 -20 -793 -411 363 1211 611 -815 -640 -146 127 498 157 -214 33 -75 -201 107 286 -36 -176 -151 -36 121 33 28 109 -100 -205 31 141 32 -60 -47 -27 42 52 6 -35 -24 42 39 -17 -64 -23 36 18 1 -10 -4 12 7 8 -20 -47 3 39 35 1 -28 -16 -13 3 33 2 -32 -15 36 36 -32 -58 28 81 -13 -84 -16 47 31 1 1 -2 -48 -27 29 -9 11 35 20 5 -32 -16 -2 -8 11 23 6 -12 -20 -6 9 -3 17 17 -17 -15 9 21 1 -23 -19 11 23 -4 -8 13 -10 -22 6 9 7 12 -16 -26 3 8 8 -2 4 17 -6 -29 -1 32 11 -24 -17 21 13 -31 -26 17 45 7 -35 -17 11 13 4 -6 -2 1 -2 1 14 15 -20 -31 2 21 12 -11 -4 13 -7 -18 7 21 0 -18 -11 7 21 4 -14 13 8 -17 -37 -14 39 17 -18 -10 10 3 0 3 -7 1 12 3 -23 -19 11 9 12 18 -7 -17 -14 -1 11 5 -2 -10 -1 9 2 -12 0 14 7 -5 -12 0 4 -1 -3 -5 4 12 0 -9 -4 3 0 -4 7 11 -1 -7 -6 2 4 0 -3 0 0 -1 0 1 0 0 -1 0 0 0 1 0 -1 1 1 0 1 1 -1 -1 0 0 0 0 -1 -1 0 -1 1 1 0 0 -2 -1 1 0 -1 0 0 1 1 -1 -1 0 0 0 -1 -1 -1 -1 0 0 -1 0 0 1 -1 0 0 0 0 1 0 0 -1 0 1 1 -1 -1 1 0 -1 0 -1 -1 0 -1 0 -1 0 0 1 -1 -2 0 0 -1 0 -1 -1 -1 0 0 1 1 0 0 1 0 -1 -1 0 0 0 0 0 0 -1 -1 0 0 -1 -1 -1 -1 1 0 0 0 0 0 2 0 0 -1 -1 1 1 -1 -1 1 0 0 0 1 -1 0 0 0 1 0 -1 1 0 0 -1 -2 0 0 0 -1 0 0 0 0 -1 0 -1 0 -2 0 0 -2 0 0 1 0 1 0 -1 -1 0 0 0 1 0 0 1 0 -1 0 0 0 2 0 -2 0 2 1 -2 -2 -2 -1 0 2 0 0 -2 -1 -1 0 -1 -1 2 1 0 0 -1 1 -1 -2 -1 0 2 0 -1 -1 1 0 -1 1 1 -1 -2 1 2 -1 -2 0 1 -1 0 0 0 1 -1 -1 1 1 0 0 0 1 0 -2 -1 -1 0 -1 0 1 0 0 0 0 1 1 0 -1 0 +65525 9961491 4276486430 4280548690 55771542 620173 157540 7850 82196 797770 1598290 1037546 430916 264133 70445 6714 51850 83092 53777 15937 1873 21881 42986 20905 5809 2493 2740 1801 3285 4385 1825 325 116 193 464 2218 2746 785 425 1098 1028 1521 2320 4148 6730 7312 3170 2 2308 1570 202 1625 1049 260 185 565 544 117 298 578 306 442 890 650 80 269 520 130 400 685 128 20 325 842 1145 865 610 1637 2314 1274 410 185 40 5 197 625 965 585 137 218 373 441 445 490 212 233 1658 1717 613 200 9 58 145 538 482 225 373 485 122 29 101 85 144 245 169 16 10 41 144 97 9 65 4294901771 4294639609 262146 4294770688 0 65535 1 4294901760 0 65536 4294901760 65537 65536 4294901761 65535 0 4294901760 65535 131071 1 4294836224 131071 4294901760 0 65537 4294967295 0 4294901760 4294967295 65535 4294901760 0 4294901761 0 0 1 4294901760 65536 4294901761 131071 4294901760 4294901760 65535 65535 65535 65536 4294901759 0 65535 4294967295 65535 65536 1 65536 4294901760 65535 0 0 4294901760 65535 4294901760 4294967295 131071 0 0 131072 0 4294967295 65537 4294967295 1 0 4294901761 0 65536 4294901760 1 4294901760 65534 0 65535 0 4294901760 4294901760 4294836224 0 65534 65536 65536 4294901760 65535 0 1 65536 4294901760 0 131072 4294836224 131072 4294836225 4294901758 65535 2 4294836224 4294967295 4294901760 196607 1 4294901760 4294901761 4294967294 131072 4294901760 131071 4294901760 65537 4294901759 131073 4294901759 65536 65535 0 4294901761 131071 1 0 1 4294967294 65535 65535 1 0 65536 1 65535 +-71 0 361 -115 -534 -124 798 25 -1357 -681 433 1607 556 -948 -152 671 516 -1259 -1056 516 -9 64 476 592 -31 -615 -218 392 384 64 204 -540 -715 257 438 -15 -178 177 225 -206 -151 230 196 -580 -498 652 458 -308 -157 141 29 -138 -14 107 11 -35 19 17 43 -84 -115 65 93 -33 -79 -1 60 35 -41 -38 30 2 -86 11 112 58 -46 -92 -26 35 21 16 -32 22 45 -16 38 50 12 -109 -77 -35 -30 110 34 5 92 -55 -147 -11 71 55 4 -23 -20 9 39 -27 -64 20 63 -23 -75 62 108 -73 -101 28 39 8 -5 -8 2 24 -5 -18 51 20 -62 -82 -16 108 64 -58 -44 17 -16 0 71 18 -64 -35 45 30 -23 -67 -36 78 22 -37 27 59 -4 -55 21 -3 -35 -28 -34 52 63 -20 -57 5 49 24 -11 -41 -3 11 -6 4 -13 -24 3 61 27 -30 2 -9 -12 -13 -20 17 16 -10 -15 20 1 9 47 -12 -28 -9 4 -31 -30 27 3 21 44 -29 -64 1 39 33 1 -19 -2 -8 13 3 -49 1 72 11 -56 -17 26 6 -1 11 5 -33 -15 19 -6 -18 5 35 8 -26 -6 14 3 -7 6 14 2 -37 -18 34 7 -16 -1 14 7 -7 -2 0 -3 -2 0 2 -1 1 2 -1 -2 0 1 1 1 0 0 -3 -3 0 -2 3 1 1 1 -1 -1 0 1 0 -1 0 0 -1 0 0 1 -1 -1 1 0 -2 0 1 -1 -1 0 3 0 -3 -1 3 1 -1 0 2 2 0 -1 -4 1 2 -3 -1 3 0 -1 -1 -1 -1 0 0 1 -1 0 2 0 -3 -1 2 0 -1 0 0 0 1 0 -1 -1 1 2 1 -2 -2 1 0 1 -1 -2 -2 -1 0 0 1 0 0 -2 -2 -2 1 1 5 0 -3 0 0 2 0 0 -1 -2 -3 -2 0 0 1 1 0 -1 -1 1 -1 -3 0 0 1 -1 0 0 0 -2 1 0 -1 0 2 1 1 0 -1 -2 1 2 0 0 -1 -1 1 0 -1 1 0 -1 0 -1 2 3 -2 -1 -2 -1 2 0 0 -1 -1 -1 1 2 1 1 0 0 -2 2 -1 -2 -2 -2 0 -1 0 0 2 1 -2 -1 -1 0 0 -1 -1 -2 1 0 0 2 0 2 -1 -2 -1 0 0 -2 -2 1 2 0 0 3 -2 -1 0 0 1 -1 1 3 -2 0 -1 -1 -1 -2 -1 1 2 1 -4 -4 -1 2 2 -1 -2 -2 -1 -2 0 2 2 -1 -1 -2 1 3 0 0 0 -1 2 1 -2 -1 0 +65465 4287431017 4286905834 1639198 4250401459 2769938 1207840 473345 1851337 1381392 4177 577040 379186 201188 151552 333216 577274 192069 63013 93061 75701 374816 673108 304628 44530 19885 11645 1346 650 8905 17450 9738 6242 4825 3125 904 7517 15908 10580 1901 697 1508 2281 3944 12025 7154 13000 1181 11489 21730 8066 545 481 2250 4496 4498 9469 16993 10985 1585 89 580 349 3001 10568 11920 7460 2225 256 5365 5321 2925 5018 7380 1853 4210 3041 450 2009 3860 4369 3274 2977 1802 130 52 745 3730 1629 85 313 689 356 625 82 2353 865 977 1629 450 2777 4097 2610 362 68 178 2402 5305 3425 712 122 1114 586 360 1250 740 232 58 232 1373 1480 305 983039 4294508551 65534 4294901757 131072 131071 4294901762 65534 65537 1 4294770688 65533 262142 65537 4294901761 65535 1 65535 4294901760 0 4294901761 131071 4294836224 65536 4294967295 196608 4294770688 262143 4294901761 131072 2 4294770687 131073 4294967293 3 4294967295 4294967295 0 4294901761 131072 4294770688 196607 4294901760 0 65536 4294901760 131071 65538 4294901758 1 4294901761 4294901758 65535 65536 0 4294901758 131070 327681 4294770688 0 2 4294901760 4294836222 65534 65536 1 4294967295 4294901761 65533 65536 65535 0 131070 4294901760 131072 65537 4294901760 131070 2 4294901760 131071 4294901760 1 65535 196607 4294836227 4294901759 196607 0 4294967295 131071 65538 1 4294836224 4294901762 4294901758 65534 65535 131072 4294836225 4294967295 0 4294967295 131070 0 2 4294901762 4294967294 0 4294901758 131073 0 4294836227 65535 65536 131071 4294836227 4294901760 4294967295 4294967294 131073 4294705153 4294967292 131074 4294901759 4294967294 65534 131074 4294967295 131070 3 0 196607 4294836225 65535 +35 0 -284 -111 48 314 325 508 151 -816 118 128 -227 -111 -505 -510 222 828 -501 310 1636 -470 -1597 -339 875 494 -588 -302 363 359 218 -378 -794 -150 521 837 52 -697 -18 440 224 -619 -759 160 348 599 273 -345 -23 -21 -179 -167 -8 211 121 -48 -150 -60 144 161 -15 -207 -67 101 24 -34 -34 -2 5 92 51 -74 -2 39 4 -71 -74 8 57 99 9 -146 -102 136 159 -33 -77 -65 -37 38 63 60 -4 -143 -108 141 135 -15 -11 -42 -64 -47 34 100 -1 -82 -9 22 -41 4 18 54 81 7 -8 -117 -111 44 90 49 -57 -48 36 21 -29 41 67 -48 -36 -11 -15 9 5 -27 -49 65 97 1 -42 -101 -49 96 42 -34 -22 10 4 36 61 -5 -29 -108 -72 110 75 -23 -28 -36 -18 78 61 -60 -40 17 23 -20 -36 5 21 6 -20 27 60 -57 -99 23 63 41 2 -54 -33 50 46 -33 -18 -5 -37 17 64 9 -29 -33 -18 -1 -5 54 62 -49 -73 -4 22 43 21 -33 -7 35 4 -55 -17 37 16 13 35 -57 -97 2 52 77 9 -53 12 3 -35 -11 12 16 -4 12 41 -20 -48 -7 22 -5 -20 21 -2 -18 27 42 -3 -58 -19 25 6 -2 -2 5 7 -2 -3 0 0 -2 0 2 1 0 0 1 0 1 2 0 -1 0 2 1 0 1 1 -1 0 0 1 -2 0 2 2 -3 -2 -2 0 0 -2 -1 3 1 0 -4 -3 -1 -1 1 -1 -1 0 0 0 -2 -3 0 2 2 -3 -2 -2 -1 -1 0 -1 3 -1 0 -1 0 2 -2 -3 2 3 1 -1 -3 -2 3 2 -2 1 -1 -2 -2 -2 2 0 0 0 0 1 -1 -3 -2 1 0 -1 -1 -1 1 0 2 2 0 -2 2 4 -2 -4 -2 1 4 0 -1 1 0 -1 -3 -1 3 -1 -2 1 2 0 -3 -1 2 -1 1 0 -1 -1 0 0 2 -1 0 3 2 -1 -1 1 -1 -1 0 -3 0 2 1 0 -1 -1 1 2 -2 0 1 0 -2 0 -1 -1 0 0 -2 -4 2 0 -2 0 2 2 -1 -1 -2 -2 0 2 1 -2 -1 2 -1 -2 -1 3 -1 -2 1 1 -1 -1 -2 0 2 1 0 1 2 0 -2 2 0 -2 -1 -1 -1 1 0 -3 -1 1 0 0 -1 -2 -1 -1 2 1 0 -2 1 2 -1 1 0 1 -1 0 -2 1 1 1 -1 0 -1 2 -1 -1 -5 -2 0 -2 1 -2 -2 2 2 -3 1 3 0 1 1 -2 -3 -2 0 0 0 1 -1 -3 0 +35 4287758052 20578352 33292613 4241490071 30308 63850 515125 734868 347101 2897396 2665330 1009661 436948 260650 190408 652936 972010 488513 193924 433337 601681 479905 193554 970 59930 44585 16945 26100 46657 43074 14690 1732 1160 8489 8077 1525 5057 5540 13050 21397 28900 26370 10154 2813 7569 20465 31545 18450 1885 6305 11156 6725 565 1697 3240 6610 13753 14257 10501 5553 1737 2522 6793 1417 306 754 6626 9410 11965 11617 2920 584 1312 3746 12505 17284 6154 2080 6408 7321 1889 929 1321 477 1129 6849 10330 5650 2920 3589 3205 349 1658 4177 1930 325 2941 6245 5345 2333 1530 1274 3041 1658 425 4474 9413 8633 2890 153 1346 400 160 2081 2353 509 841 328 2493 3373 986 4294836230 393214 4294836231 65533 4294836224 131072 1 65536 65536 2 65535 65538 65536 4294901761 0 4294836225 131072 4294770690 4294901758 0 4294967294 65539 4294705152 4294967293 131071 4294967295 0 4294836224 65533 131074 4294901757 4294967294 65535 262143 65535 65535 4294836226 196605 65539 4294836223 262142 4294836226 4294901761 4294901758 196606 0 0 4294901761 4294901757 1 4294967295 131071 131072 2 196606 4294836228 4294901756 262145 4294901760 1 4294836223 262143 4294901759 131073 4294770688 196607 131071 4294901760 65535 131072 65535 131075 4294967295 4294901761 65535 65533 65538 4294901760 131071 4294836226 65536 4294836224 4294901760 65535 4294836224 196604 4294836224 131072 4294901762 4294901759 65534 65538 4294967294 4294901762 4294967294 4294901763 131070 4294901761 4294901759 131072 1 131073 4294836224 2 4294967294 4294967295 1 4294967293 1 4294901760 4294967294 196607 1 131070 4294901762 1 4294901761 4294836224 65537 4294901761 4294901760 4294901762 4294705151 65534 131070 4294901758 131074 131069 3 65537 4294836222 65534 0 4294901761 65533 +-99 0 51 124 191 -218 -770 77 1259 480 -1089 -941 466 807 65 -177 116 -549 -986 461 1138 364 -542 -320 611 -139 -661 -124 214 324 52 -248 10 140 -214 -248 80 308 -13 -55 32 25 118 -31 -151 29 170 -50 -61 -24 -88 -44 62 117 -3 -116 -52 79 34 -19 -4 31 42 -26 -39 -27 3 31 15 -3 8 -25 -35 -8 8 41 31 -44 -65 21 46 -5 -38 32 48 -11 -9 -23 -10 15 -13 -23 -3 31 33 17 -13 -44 14 2 -47 0 23 36 27 -26 -22 -16 -7 18 7 -7 8 4 -16 -13 14 10 -14 -23 -29 32 49 10 -2 -24 -19 -21 -2 49 23 -41 -25 8 1 6 3 3 2 -12 -19 32 52 -16 -46 -23 23 35 -7 -35 -27 20 42 28 -10 -53 -20 34 21 -6 9 -3 -21 -23 7 20 -13 -7 -2 7 14 16 13 -19 -21 -25 -2 43 11 -40 -12 28 -10 -13 36 32 -27 -55 4 39 -3 -1 19 -13 -9 8 -1 -24 -20 27 32 -10 -35 -5 13 18 18 -2 -12 -27 -15 22 21 2 -10 -9 17 9 -6 -26 -13 7 -2 16 18 -7 -14 -1 22 -6 -40 -3 23 26 13 -12 -14 -17 -8 13 5 7 7 -5 -3 -1 4 -1 -3 -3 0 2 0 0 0 -1 -1 0 0 0 1 1 0 0 -1 -1 0 0 -1 0 0 0 0 -2 0 0 -1 0 -1 1 1 1 0 0 -1 -1 0 0 -1 0 -1 0 1 -1 0 0 0 0 0 -1 -1 -1 0 0 -1 -1 1 -1 -1 -1 0 1 1 -1 0 -1 -1 0 1 0 0 0 0 0 0 -1 -1 1 0 -1 0 1 0 1 1 0 0 -1 -1 0 -1 -1 0 -1 1 0 0 -1 0 0 0 -2 -1 1 -1 -1 0 1 0 -1 0 0 2 -1 -1 -2 -1 1 0 1 1 1 1 -2 0 0 -1 0 -1 -1 0 -1 0 0 1 0 -2 -1 0 -1 0 -1 -2 0 -1 0 1 0 -1 1 1 -1 -2 0 -1 0 0 0 0 -2 -1 1 0 -2 -1 -1 0 1 0 -1 0 0 -1 -2 -1 1 -1 1 0 0 -1 0 0 1 0 0 -1 0 -1 1 0 0 1 1 1 0 1 -1 -1 -3 -1 -1 0 0 0 -1 1 -1 0 -1 0 -1 0 -2 -1 1 -1 0 1 1 -1 0 -1 1 1 0 2 -1 -2 0 0 0 -1 1 1 0 1 0 -1 1 0 0 1 0 -1 -1 -1 -1 0 1 0 0 0 -2 -2 0 1 0 0 2 1 -2 -1 0 +65437 8126515 4280680639 5111038 31458539 2071402 868405 35554 314857 1184717 1427540 396164 392642 452297 150772 64208 19700 107300 101264 3194 1649 14885 23642 31400 4297 9680 17533 13465 8945 1517 977 2440 2250 970 234 689 1289 1745 2897 4666 2141 2468 2425 610 325 698 970 1378 2105 200 2209 1825 1405 740 373 98 80 425 296 725 1865 2501 580 802 2405 2210 689 37 18 148 1385 2960 2645 1754 1274 1129 2548 2909 1556 477 90 970 449 218 53 452 530 1066 1853 1721 928 269 2320 3754 1537 10 530 145 577 1129 1124 1250 493 328 873 709 445 181 370 712 218 260 373 197 520 1609 1205 313 485 233 74 74 4294967293 4294901764 4294836221 131072 0 4294901760 65535 0 65537 0 4294967295 0 65535 0 4294836224 0 65535 131071 65537 0 4294967295 0 65535 65535 4294901761 0 0 4294901760 4294967295 0 4294967295 4294901761 4294967295 65536 4294901761 4294901760 65535 1 0 0 4294901760 131071 4294901760 65536 65536 1 4294901760 65535 4294967295 4294901760 1 4294901760 0 4294836224 131071 4294967295 65536 4294901760 0 4294901762 4294901759 131071 65536 65537 4294836225 0 65535 4294967295 4294901760 0 1 4294967294 4294901760 4294901760 65534 65535 1 131071 4294901761 65534 65535 0 4294836224 131071 4294836224 4294967295 65536 4294901760 0 4294901759 131071 131071 0 65535 65536 0 65535 131071 0 65537 1 4294901761 4294836223 4294967295 0 4294901760 4294901761 4294901760 4294901760 4294836224 131071 65535 65537 65535 131071 1 4294901762 65534 0 131071 1 1 131071 0 1 4294967295 4294967295 65536 0 4294836224 65534 1 131072 4294836225 65535 +32 0 3 -176 73 197 128 -564 -1177 373 993 791 522 -619 -839 -273 436 71 -507 58 7 663 912 -462 -445 -289 -232 34 93 211 -12 -248 -231 353 477 19 -190 -383 -125 256 151 -35 -22 -101 -232 45 282 214 -71 -298 -56 149 11 -46 -10 66 60 -22 -20 -19 0 -7 -10 7 2 -11 -11 8 5 18 4 -28 1 36 0 -38 14 37 -2 -61 -46 30 6 29 65 2 -86 -46 78 65 -22 -59 -33 12 56 40 -26 -86 -45 63 61 6 -44 -42 31 53 -11 -49 -12 33 35 -10 -34 -17 14 15 -26 -6 43 12 -56 -19 56 40 -25 -58 14 34 -49 -23 63 48 -32 -48 17 19 -15 -7 17 -10 -48 11 54 15 -44 -13 56 24 -36 -56 3 57 15 -32 -3 2 -12 -7 6 0 -27 14 45 10 -21 -37 -10 25 5 -3 17 0 -40 -9 49 21 -19 -18 -1 -23 -27 26 19 23 15 -16 15 -20 -57 -18 25 67 13 -57 -3 23 -34 -21 38 23 -40 -8 41 20 -8 -31 -14 11 12 7 -6 -8 4 8 0 -1 3 3 7 -16 -26 13 26 -2 -14 5 16 -6 -3 -15 -19 5 3 14 20 -6 -19 -7 6 12 3 -9 -3 4 2 -4 -2 2 0 1 0 0 1 0 -1 0 0 0 0 -1 0 2 -1 0 0 0 0 0 1 -2 -2 0 -1 1 1 -1 -1 0 0 -2 -2 0 0 -2 0 -1 -1 -1 -1 1 -1 1 2 0 -1 -1 0 0 -1 -1 1 -1 -2 1 1 2 1 -1 -1 -1 0 0 -1 0 0 1 0 0 0 -1 -1 0 0 0 0 -1 0 0 0 -1 0 0 0 -1 -1 -2 0 1 -1 0 0 -1 0 -1 0 0 0 -1 0 -1 -1 -1 -1 1 0 -1 -1 0 0 -1 0 0 -2 -1 1 1 0 -1 -1 -1 -1 1 1 -2 -1 -2 -1 0 1 -1 1 0 -1 0 0 0 0 0 0 -2 -1 0 -1 0 0 0 0 0 0 -1 -1 0 0 1 1 -1 -1 -1 0 -1 -1 0 -1 -1 -1 0 0 0 -1 0 -1 0 1 0 -2 0 1 1 0 0 1 -2 -1 -1 -1 0 1 0 0 -1 0 1 -1 0 0 -1 -2 0 0 -1 -1 -1 0 1 0 -1 1 -1 0 -1 -1 0 0 0 0 -1 -1 0 -1 0 -1 -1 1 1 0 -1 -2 0 1 2 1 -1 0 0 -2 2 1 -1 0 -1 1 1 1 -2 -1 -1 0 -2 -1 1 -1 0 -1 -1 0 1 1 0 -1 0 0 -1 -2 0 +32 4283432963 12910665 4258005120 24509287 1611730 655645 778450 195137 260413 439618 1045188 281546 54980 53170 61648 177970 227890 182789 81161 24026 10685 55849 125320 93845 25337 2237 4456 4084 761 49 149 125 185 349 800 1297 1444 1565 3725 3016 877 4229 9512 10309 3965 1233 4736 8072 5994 3757 3700 3770 2522 1233 1325 1445 421 712 1993 3497 4736 3989 1352 2930 6273 3328 650 274 389 2425 3141 2105 3712 4432 3258 1249 13 193 36 925 2125 1810 725 34 289 1681 2842 685 530 1405 890 481 625 3573 5114 3418 538 1597 1973 1664 2081 1025 317 193 100 80 1 18 305 845 680 221 292 234 386 205 436 410 180 90 25 4294705154 196606 65536 0 1 65535 0 4294901760 131072 65535 0 0 4294836225 65534 131071 4294901761 65535 4294836224 65534 4294836224 4294901760 4294967295 131071 131071 2 4294967295 0 4294967295 4294901761 131070 131073 4294901761 4294967295 0 65535 65536 0 4294901760 65535 0 4294901760 0 4294901760 0 4294901760 4294901759 65536 65535 4294901760 4294901760 0 4294901760 4294901760 4294967295 131071 4294901760 65535 4294901760 0 4294967294 65537 4294901760 4294967295 131071 4294836225 4294901759 65535 4294901761 1 65535 0 0 4294836224 65535 65535 0 0 4294901760 65535 65536 4294901761 4294967295 4294901760 65535 4294967295 65535 0 65535 65535 1 65534 65537 0 4294836225 4294967295 65535 1 4294901760 65536 65535 4294901760 65534 4294901760 4294967295 65536 4294901760 4294901761 4294901760 65535 0 4294901760 65535 65535 4294967295 65537 4294901760 65534 131073 4294901761 0 196606 4294901761 4294901760 65537 4294836225 4294967295 4294836224 131071 65535 4294967295 65536 1 65535 4294901760 65534 +0 0 19 76 -122 52 242 -404 -26 925 -465 -1472 854 1588 -927 -1171 744 391 -611 381 499 -880 -368 1027 161 -770 16 411 65 -165 -245 45 309 -86 -284 175 196 -194 -117 203 87 -196 -17 180 -61 -188 63 135 -48 -23 37 -5 6 -27 -14 40 -6 -44 6 27 -3 -9 -6 8 16 -10 -19 10 9 -6 6 -6 -16 6 3 -8 -5 24 11 -55 38 84 -139 -77 216 -25 -211 197 136 -307 -29 286 -43 -210 60 162 -63 -152 81 129 -99 -77 86 21 -64 1 36 14 -22 -21 19 14 -7 -3 -2 -2 10 -6 -13 -2 -7 7 13 0 -8 3 19 0 -21 -18 3 12 -3 7 5 -1 4 -19 -1 25 -19 -28 17 21 1 2 -12 -10 25 0 -25 -1 7 0 -4 -1 5 9 -1 -14 -3 8 -1 -4 0 7 -1 -7 4 6 -1 -6 -7 -8 0 21 8 -12 -2 -4 -7 5 12 -1 -17 -6 13 13 -9 -10 10 10 -3 -15 -5 3 -7 9 16 1 -5 -8 -4 2 0 0 1 1 1 0 -4 -4 4 6 -3 -5 0 2 0 0 4 1 -4 -9 -3 8 4 -3 0 0 -5 -1 8 0 -7 -2 2 1 0 1 0 -1 0 3 3 -3 -2 1 2 -1 0 0 0 0 1 -1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 -1 0 0 0 -1 0 0 0 0 1 -1 0 0 0 0 0 0 0 0 0 -1 0 0 -1 0 0 0 0 0 0 0 0 -1 0 0 0 1 -1 -1 -1 -1 0 0 -1 -1 0 0 0 0 0 0 0 -1 -1 1 -1 -1 -1 0 0 0 0 0 0 0 0 -1 0 0 0 0 0 0 0 0 0 -1 1 0 -1 -1 -1 0 -1 -1 -1 0 0 0 0 0 0 0 0 -1 -1 0 -1 0 0 -1 0 0 1 0 0 -1 0 0 0 -1 0 -1 -1 0 0 -1 1 -1 -1 0 -1 0 -1 -1 0 0 1 1 0 -1 1 0 -1 -1 0 0 -1 -1 0 0 -1 0 1 0 -1 0 0 0 -1 0 0 0 0 0 0 0 0 -1 -1 -1 0 -1 -1 -1 -1 1 0 -1 1 0 0 0 -1 0 1 0 0 -1 0 0 -1 0 -1 0 -1 0 -1 -1 0 1 0 0 0 0 -1 -1 0 0 0 -1 0 0 -1 -1 0 -1 0 0 0 0 -1 -1 0 -1 0 0 -1 0 0 0 0 -1 0 0 0 0 1 0 0 1 0 0 0 0 +0 4980755 3473286 4268490994 60686310 2383009 3251060 2230570 706417 518482 1023401 1190153 618821 169177 31450 62050 102877 111281 76052 54898 45985 32689 39065 22194 2833 1394 765 1796 1972 765 90 100 356 461 117 72 292 73 601 3146 8500 25250 47281 83330 112745 82637 45949 29844 27073 23202 15730 7837 4097 1492 925 557 58 8 136 173 98 169 73 361 765 153 58 26 377 626 1145 730 5 244 625 626 49 17 106 197 73 17 49 50 52 37 113 441 208 20 74 145 325 338 181 200 234 34 130 257 89 20 0 2 1 32 52 34 4 0 17 97 73 25 0 26 64 53 5 1 1 9 4294770691 131070 4294901762 0 0 4294901761 0 0 0 0 0 0 0 0 0 0 0 4294901760 0 4294901760 0 0 4294901761 0 0 0 0 4294901760 0 65535 0 0 0 4294901760 0 65536 4294967295 4294967295 0 4294967295 0 0 0 4294901760 131071 4294967295 65535 0 0 0 4294901760 0 0 0 0 4294901760 1 4294967295 65535 4294967295 65535 0 0 0 4294901760 65535 65535 4294901760 0 1 4294901760 0 4294901760 4294901760 65535 4294901760 4294901761 65535 65535 4294967295 0 65537 4294901760 1 4294967295 0 4294967295 0 65535 1 65535 0 65535 0 0 0 4294901760 4294967295 4294901760 4294967295 131071 4294901760 1 0 65535 1 4294901760 0 65535 65535 65535 4294967295 65536 0 0 4294967295 0 4294901760 0 4294967295 4294901760 0 0 4294967295 4294901760 0 65535 0 4294901760 0 0 1 65536 0 0 +-6 0 -13 -40 7 8 107 420 210 -1468 -1435 1786 1873 -240 -629 -1130 -17 911 -950 -507 1747 880 -943 -1187 -341 712 646 -416 -319 656 -206 -728 595 681 -377 -639 -39 418 66 -257 26 291 79 -173 -158 -152 77 313 -16 -211 15 63 -5 37 6 -86 -20 58 6 -10 8 -4 -12 3 11 7 1 -21 -16 13 15 13 -6 -47 8 61 -43 -38 58 4 31 63 -160 -254 67 481 255 -412 -366 10 108 224 70 -132 -24 94 58 -159 -148 65 81 87 35 -91 -50 30 33 -12 -35 -6 14 30 8 -30 -12 15 5 2 6 -13 -16 12 15 -3 -6 -4 4 6 -3 -13 -7 9 8 10 0 -29 -22 36 55 -27 -70 -13 37 40 3 -35 -27 9 22 19 1 -27 -10 16 7 -6 -5 -2 -1 2 0 3 1 -3 0 1 3 -3 -9 -3 10 10 -1 -15 -10 10 9 0 -3 0 -1 -5 5 1 -12 -1 7 2 2 -2 -1 0 0 -5 -3 1 0 7 5 -10 -4 6 3 -4 -5 -1 2 2 -2 -1 0 -1 -4 4 4 -2 0 2 -7 -4 16 3 -25 -2 26 -2 -23 7 19 -9 -13 3 2 1 3 0 -2 -2 -1 2 0 -2 0 1 2 0 0 0 -1 0 0 0 0 0 0 0 0 0 -1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 -1 0 -1 0 0 0 -1 0 0 -1 0 0 -1 0 0 0 0 0 0 -1 0 0 0 -1 0 0 0 0 -1 0 0 -1 0 -1 -1 0 0 0 0 0 0 0 -1 0 0 0 -1 -1 0 1 0 0 0 -1 -1 0 0 0 -1 1 0 0 0 0 0 -1 -1 -1 -1 0 -1 -1 0 0 0 -1 0 0 -1 0 0 -1 0 -1 0 -1 0 0 -1 0 0 -1 -1 0 0 0 0 0 0 0 -1 1 0 0 0 -1 0 0 -1 0 -1 0 0 0 0 0 0 0 -1 0 0 0 0 0 -1 0 -1 0 0 -1 1 0 0 0 0 0 1 0 -1 0 0 0 0 -1 0 0 0 0 0 0 0 0 -1 0 -1 0 -1 0 0 0 -1 0 0 -1 -1 -1 0 0 0 -1 0 1 0 -1 1 -1 -1 0 0 0 0 0 1 0 -1 1 0 0 0 -1 -1 -1 0 0 -1 -1 0 0 0 0 0 -1 0 0 0 0 0 0 0 0 0 0 0 1 -1 0 0 0 0 0 0 0 0 0 -1 0 -1 0 -1 0 0 0 +65530 4292411379 524295 27525227 4198760658 5249021 3565729 1672541 830210 1159549 3826409 2298218 623225 590372 532097 572420 817786 550450 176245 70405 85357 36170 48068 103898 44777 4194 1394 7432 3764 136 80 153 170 442 425 394 2245 3785 3293 3380 4930 90116 235850 234769 134056 61840 22324 9412 28645 26129 14130 9506 3400 1233 1261 1096 964 369 29 205 400 234 52 52 178 130 164 841 1780 3754 5069 2969 1234 810 845 730 356 85 29 5 9 10 1 18 90 200 226 200 81 9 26 26 145 53 8 1 25 10 49 125 52 25 26 8 5 1 32 20 4 65 265 629 680 578 442 178 5 9 8 5 4 1 2 0 65535 0 0 0 0 131071 0 0 0 0 0 0 0 0 0 0 0 65535 65535 0 65535 4294901760 0 65535 0 0 4294901760 0 4294901760 0 0 65535 4294901760 4294901760 65535 0 0 0 65535 0 4294967295 65536 0 4294901760 65535 0 131071 0 0 4294901760 4294967295 65535 4294967295 0 4294901760 0 65535 4294901760 4294901760 4294901760 0 65535 4294901760 65535 0 0 0 131071 0 4294901760 0 65535 65535 0 0 0 65535 0 0 65535 65535 4294901760 1 0 0 1 65535 0 4294901760 0 0 0 0 65535 65535 65535 0 65535 4294901760 4294967295 0 4294901760 65536 4294901760 4294901761 65535 0 0 1 131071 0 4294901760 4294967295 0 4294967295 0 0 4294901760 0 0 0 0 0 65536 65535 0 0 0 0 65535 65535 65535 0 +1 0 -1 14 -2 1 -173 -256 -113 895 809 -748 -526 136 -196 -19 1126 -103 -2056 -544 1346 1309 -43 -609 -563 -476 939 1392 -579 -2083 -336 1518 524 -244 -240 -294 265 320 -200 -384 -23 216 13 40 136 -175 -297 136 261 27 -85 -61 0 -17 -14 71 41 -59 -25 18 1 -4 3 2 -2 -1 2 7 -6 -9 17 2 -45 22 101 -34 -117 -17 31 78 132 -51 -172 -112 8 129 23 -3 5 3 25 -10 -41 8 60 26 -4 -56 -34 10 22 11 -10 -21 -7 15 0 -16 -4 25 8 -25 -14 23 21 -7 -12 -6 -2 3 3 2 2 0 -3 -6 1 9 3 -11 -8 4 8 7 5 -15 -12 12 11 -22 -30 18 28 1 -17 -5 11 5 -8 -10 1 15 7 -9 -5 1 1 2 -2 -3 -3 2 4 1 -4 -5 3 5 0 -7 0 3 -2 -8 3 8 -7 -9 6 9 1 -2 -5 -12 -1 19 5 -16 -3 9 -1 0 6 -4 -8 4 6 -1 0 -1 -7 -3 6 6 -6 -6 4 7 0 -3 -1 0 1 0 -2 -1 0 5 10 -11 -21 3 10 3 5 9 6 -24 -17 12 10 1 -3 -3 1 3 0 -2 -1 2 0 0 0 0 0 0 0 0 0 0 1 0 0 0 0 0 0 0 0 0 0 0 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 1 -1 0 0 0 0 -1 0 0 -1 -1 0 -1 0 0 -1 0 0 0 0 -1 0 0 1 0 -1 1 0 -1 0 0 0 -1 0 0 -1 0 1 0 0 0 1 -2 0 1 -1 0 -1 -1 0 0 1 -1 0 0 1 -1 -1 0 0 -1 -1 0 0 0 -1 0 0 -1 -1 -1 -1 -1 0 -1 0 0 0 0 1 0 -1 -1 -1 0 0 0 0 0 0 -1 0 -1 0 -1 0 -1 1 0 -1 -1 -1 0 -1 0 -1 1 -1 0 -1 0 -1 -1 0 -1 0 0 0 1 0 1 0 -1 0 0 0 0 0 0 0 -1 -1 0 0 0 0 1 -1 -1 -1 0 0 0 0 0 -1 -1 0 -1 0 0 1 0 0 0 0 0 0 0 0 -1 0 0 0 0 0 0 0 0 0 0 0 0 1 0 -1 0 -1 0 0 -1 0 -1 0 0 -1 1 -1 -1 0 0 0 -1 1 0 0 0 0 0 0 -1 0 0 0 0 -1 0 0 0 -1 -1 1 0 0 -1 0 0 -1 0 -1 0 0 0 -1 0 -1 0 1 1 0 +1 983039 131070 4278255443 58720143 1213985 295172 38777 1278485 4523072 3525197 372730 543545 2819385 4674130 2417220 334112 144036 172625 187456 47185 1769 49121 106705 68850 10946 289 5237 5162 949 17 13 5 53 117 293 2509 11357 13978 7045 20025 42128 16705 538 34 725 1745 4276 3152 1256 605 541 274 256 641 689 725 490 180 13 13 4 45 82 130 80 113 250 288 605 1224 785 314 146 164 226 130 26 5 13 13 17 41 34 49 9 68 73 130 117 5 169 362 281 90 1 52 80 37 1 58 72 72 65 9 1 1 5 25 221 450 109 106 612 433 101 18 10 4 5 0 0 0 0 0 1 0 0 0 0 0 1 0 0 0 0 0 0 0 0 0 65536 4294901761 0 0 65535 4294901760 65535 65535 4294901760 0 0 65535 65536 4294901760 1 65535 0 65535 4294901760 65536 0 65536 65534 4294901761 4294901760 65535 65536 65535 65536 4294967295 0 4294967295 0 4294901760 0 4294967295 4294967295 65535 65535 0 65536 4294901760 4294967295 0 0 0 65535 65535 65535 131071 4294901760 4294967295 4294901760 4294901760 4294901761 4294901760 4294901760 65535 65535 0 1 1 65535 0 0 0 4294967295 0 0 4294901761 4294967295 0 0 4294901760 65535 65535 65536 0 0 0 0 65535 0 0 0 0 0 65536 4294901760 4294901760 0 65535 65535 4294901760 4294901761 65535 0 131071 0 0 0 65535 0 4294901760 0 4294901760 131071 0 65535 4294901760 4294901760 0 4294901760 4294901760 65536 1 +-2 0 3 -3 -2 2 130 105 -90 -436 -212 375 150 35 144 111 258 -881 -1237 884 1158 -53 -360 -51 344 -568 -1245 778 1528 13 -551 -362 -41 -34 62 222 112 -282 -248 112 95 7 -37 50 135 -12 -132 -119 7 107 4 -16 17 30 31 -52 -54 17 28 9 -6 -4 2 0 -1 -3 -2 3 -3 -7 -2 27 41 -34 -56 -14 10 36 12 -7 23 -31 -85 23 77 18 -30 -11 9 -7 -5 14 14 -9 -6 -7 -7 4 8 2 -6 -3 0 1 1 -3 -7 0 1 11 8 -8 -4 1 1 -5 -4 1 0 2 2 -3 -3 3 1 0 1 1 1 -6 -3 5 2 -6 1 8 2 -14 -9 8 2 1 0 4 5 -2 -2 -5 0 5 1 -9 -4 5 2 -2 0 -1 -1 -1 3 2 -4 -3 3 3 -4 0 7 -3 -6 1 8 -7 -14 5 10 0 0 0 -2 -5 -8 5 15 3 -10 -5 3 -2 -8 4 9 3 -7 -11 3 9 -5 -3 6 5 -2 -8 -2 4 -1 -2 3 2 -1 -3 0 1 3 0 -4 -5 -1 11 10 -13 -14 5 4 -3 -3 9 8 -6 -6 -1 2 0 -1 0 1 0 -1 0 1 2 0 -1 -1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 -1 -1 0 0 -1 -1 -1 0 0 -1 0 0 -1 -1 0 0 0 -1 0 0 0 0 0 -1 0 0 0 -1 0 1 -1 0 -1 0 0 1 1 -1 0 0 -1 0 1 0 0 -1 -1 -1 0 0 0 0 -1 0 0 0 0 -1 0 0 0 0 -1 -1 -1 0 -1 0 0 0 0 -1 0 0 0 -1 0 1 0 0 0 0 0 0 -1 -1 0 -1 0 0 0 0 -1 0 0 0 0 0 0 0 0 0 0 0 -1 0 -1 0 0 0 0 -1 0 0 0 0 -1 0 -2 0 -1 -1 0 -1 0 0 -1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 -1 0 0 -1 -1 0 0 0 0 0 0 0 0 0 0 1 0 -1 0 -1 -1 0 0 -1 0 0 0 -1 0 -1 0 0 0 -1 -1 0 0 0 0 -1 -2 0 0 -1 -1 -1 0 -1 0 0 -1 -1 0 0 -1 0 -1 0 -1 0 0 0 -1 -1 -1 -1 0 0 0 1 0 0 -1 0 -1 0 0 0 -1 0 0 -1 -1 0 0 0 -1 0 0 -1 -1 0 0 +65534 4294770691 196606 6881410 4266459046 185569 23725 33057 842725 2311625 1343773 132201 440960 2155309 2334953 434645 2837 53128 92068 74048 9074 3869 18369 31585 11498 272 1189 3665 3205 865 52 4 10 13 58 733 2837 3332 1396 193 1490 7754 6253 1021 130 221 277 85 65 68 45 1 10 49 122 128 17 26 17 4 13 18 1 2 37 34 40 65 200 145 5 16 29 29 25 82 41 8 1 2 13 25 18 16 58 37 113 221 100 0 29 89 234 125 13 80 90 170 90 34 61 68 20 5 13 10 1 9 41 122 269 221 25 90 100 37 4 1 1 1 5 1 65535 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 4294901760 65535 4294901760 4294967295 0 65535 4294901760 65535 0 65535 0 0 65535 0 65535 4294901761 4294901760 0 65537 65535 4294901760 65536 0 4294967295 65535 0 4294901760 0 0 65535 0 4294901760 4294967295 4294901760 0 0 65535 0 65535 1 0 0 4294901760 65535 65535 0 4294901760 0 0 0 0 0 4294901760 4294901760 0 0 65535 0 4294901760 4294836224 4294901760 65535 65535 4294901760 0 0 0 0 0 0 0 4294901760 0 4294967295 0 0 0 0 0 1 65535 4294967295 0 65535 0 65535 65535 0 4294967295 0 0 4294901759 0 4294967295 65535 65535 4294901760 65535 4294901760 4294901760 4294901760 0 4294901760 4294967295 65535 0 1 4294901760 4294901760 0 4294901760 0 4294967295 0 4294901760 0 4294967295 0 +0 0 -3 -3 10 8 -155 -62 253 400 42 -582 -157 245 149 99 -363 -1002 -395 1944 1134 -1093 -541 97 734 156 -1176 -1405 55 1953 583 -767 -149 53 57 -59 -162 -84 13 198 87 -71 -24 -4 16 52 65 -112 -104 27 30 15 -2 -14 -28 21 31 11 -5 -10 6 -7 -10 0 7 5 -3 -8 -9 -3 11 10 -38 -2 69 18 -63 -25 66 14 -49 -45 -39 42 68 38 -5 -56 -24 2 11 11 -13 -5 16 7 -12 -14 1 14 2 -7 -1 -1 -1 5 0 -8 -3 5 5 2 1 -6 -3 0 -2 1 4 2 -3 -6 -2 3 1 0 0 0 2 0 0 -7 -9 2 3 6 5 -2 -1 -1 1 -7 -6 5 5 -2 -2 -2 -2 -1 0 4 3 -3 0 -1 0 0 -2 -1 0 6 5 -4 -3 -3 -2 -1 -3 6 4 0 8 -2 -12 -9 -1 4 1 10 3 -15 -7 14 15 -8 -18 -14 3 23 3 -17 -8 11 12 0 -5 -12 -3 5 -1 -1 -1 3 2 1 3 -4 -4 -1 0 0 3 2 0 -3 -2 0 -2 0 -1 5 1 6 14 -12 -17 3 8 -4 -9 2 6 2 0 -1 -1 -1 1 1 -1 1 0 0 0 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 0 -1 -1 0 0 0 0 0 0 0 0 -1 -1 0 -1 -1 0 0 0 -1 0 0 0 0 0 -1 -1 0 0 0 0 0 -1 0 0 0 0 0 0 0 0 0 -1 0 -1 0 0 0 0 0 0 1 0 0 -1 -1 1 0 -1 -1 0 0 1 1 0 -1 -1 -1 1 0 0 0 0 0 0 0 1 0 0 -1 -1 0 0 0 0 0 0 0 -1 0 0 0 0 -1 -1 0 0 0 -1 -1 -1 -1 0 1 0 -1 1 -1 -1 -1 -1 -1 0 0 0 0 0 0 -1 -2 -1 -1 -1 0 -1 1 0 0 0 0 0 -1 0 0 -1 0 -1 -1 0 -1 -1 -1 0 0 0 -1 0 0 0 0 -1 -1 -1 -1 -1 -1 1 -1 0 0 1 0 0 -1 -1 0 0 0 0 0 0 0 0 0 -1 0 0 -1 1 0 -1 0 -1 0 0 0 0 0 0 0 -1 -1 -1 0 1 -1 -1 0 0 0 0 0 -1 -1 -1 -1 0 0 0 -1 0 0 -1 -1 0 0 -1 0 0 0 0 0 -1 -1 0 -1 0 -1 -1 -1 -1 0 -1 -1 0 0 0 0 0 0 0 +0 4294836221 524298 4290969445 26214653 340488 84674 32002 1135773 3935161 2480605 302090 563092 3357001 3817234 928178 25010 6730 33300 39373 12610 592 2960 16769 11545 1125 200 1225 1082 125 85 100 74 73 90 221 1448 5085 4594 4552 4426 3285 6068 3161 580 242 194 305 340 197 53 2 26 64 34 29 37 9 5 20 45 13 1 0 4 49 85 45 29 2 50 61 29 8 5 16 18 1 0 5 36 41 18 5 45 16 68 225 17 101 234 245 289 520 538 298 185 144 169 34 2 10 5 25 17 0 13 9 4 4 26 37 340 298 80 85 40 1 2 2 2 0 65536 0 0 0 0 0 0 0 0 0 0 0 0 0 1 4294967295 0 0 0 0 4294967295 4294901760 65535 0 65535 0 0 4294967295 0 0 4294901760 0 0 0 0 4294901760 4294901760 0 0 0 1 4294901760 131071 4294901760 65535 65536 1 4294967295 131071 0 0 0 65536 0 4294967295 0 0 0 4294901760 0 0 4294967295 0 4294901760 4294967295 65535 1 131071 4294967295 4294967295 65535 0 0 4294901760 4294967294 4294967295 4294901760 1 0 0 65535 4294901760 4294901760 65535 4294967295 65535 0 65535 0 4294901760 4294967295 4294967295 131071 65535 65536 0 4294967295 0 0 0 0 4294901760 0 131071 4294901760 4294901760 0 0 0 4294901760 4294967295 65536 4294967295 0 0 4294901760 4294967295 65535 0 65535 4294901760 65535 4294901760 0 0 4294901760 65535 65535 4294967295 4294967295 4294901760 65535 0 0 0 +1 0 5 4 9 1 136 -24 -292 -297 -14 537 162 -273 324 -64 -1426 -389 1296 1873 257 -1774 -683 445 427 595 678 -1160 -1354 25 342 575 71 -64 13 -17 12 -7 -14 77 74 -72 -30 -3 -52 5 65 58 19 -78 -53 18 26 7 -9 1 10 -11 -16 -6 1 16 6 -8 -10 5 19 -1 -21 -10 3 29 46 -17 -32 -54 -48 38 31 23 22 3 -18 -57 -23 79 54 -53 -45 13 18 9 6 -7 -5 -5 -1 -1 -5 -3 -3 10 13 -6 -14 -5 4 6 1 -1 -1 -1 1 0 0 0 3 -6 -5 3 1 -2 -1 2 0 -1 -1 1 0 -1 -3 1 3 1 0 -2 -2 1 3 0 2 -3 -6 -2 1 5 -1 -1 2 2 -3 -3 0 0 0 0 -1 -3 -1 3 0 -4 -1 3 1 -2 0 2 -2 -7 -5 7 9 2 -1 -9 -7 -1 -2 13 15 -8 -9 -6 0 1 3 3 -3 -17 -13 18 14 -5 -9 1 5 1 -3 1 5 -2 -3 -1 -1 0 0 3 0 -2 -2 -2 -1 3 3 -1 -3 -1 3 3 0 -10 -9 12 20 -1 -10 -14 -5 8 4 -2 -4 2 10 -1 -10 -7 4 8 0 -8 -2 3 2 0 0 -1 -1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 -1 0 -1 -1 -1 0 0 0 -1 1 0 -1 -1 0 0 -1 0 0 0 0 0 -1 0 0 -1 0 -1 -1 0 0 -1 0 0 -1 0 0 0 0 -1 -1 0 0 -1 0 0 -1 -1 -1 0 0 0 0 1 1 0 0 0 0 -1 -1 0 0 0 0 0 0 -1 0 1 0 0 -1 0 0 0 -1 0 1 -1 0 -1 0 0 -1 1 -1 0 0 0 0 0 0 0 -1 0 0 0 0 0 -1 -1 -1 0 -1 -1 -1 0 0 0 1 0 -1 0 -1 0 0 0 0 0 0 0 -1 0 0 0 0 0 0 -1 -1 0 0 0 -1 0 0 -1 0 1 0 1 -1 -1 0 -1 0 0 0 0 -1 0 -1 -1 0 0 0 0 0 -1 -1 0 0 0 0 0 0 -1 0 0 -1 0 -1 0 1 0 -1 0 -1 1 0 -1 0 -1 0 0 0 0 -1 -1 0 0 0 -1 0 0 -1 -1 0 -1 0 1 -1 0 -1 -1 1 0 0 0 0 0 0 0 0 0 0 0 -1 1 0 0 1 0 0 0 0 0 -1 0 -1 1 0 0 0 -1 0 0 0 1 1 0 +1 262149 65545 4293394568 4275568348 288565 100773 109072 2184797 5187745 3213125 664514 536354 1805284 1833941 447589 9137 458 193 6125 10660 909 2729 7589 6445 3133 725 82 221 292 257 100 125 362 541 850 2405 3940 3748 1490 493 3573 6770 5725 2194 405 85 50 2 34 109 205 221 52 2 2 1 0 45 34 5 5 1 2 1 10 10 4 5 9 13 40 26 2 8 18 0 0 10 10 16 10 5 4 53 74 85 82 50 173 289 117 1 18 298 493 221 82 26 10 29 10 1 9 4 8 10 10 10 18 100 225 401 296 89 20 20 101 149 80 64 13 2 4294901760 65535 0 0 0 0 0 0 0 0 0 0 0 4294901760 4294901760 4294967295 0 4294901760 1 4294967295 0 65535 0 0 65535 4294901760 4294901760 65535 4294901760 0 65535 0 4294901760 65535 4294901760 0 4294967295 65535 0 65536 1 0 4294901760 65535 0 0 4294901760 65536 0 65535 0 65535 4294901761 4294901760 0 131071 65535 0 0 0 65535 0 0 4294967295 65535 4294967295 65535 0 1 65535 65535 0 0 0 65535 0 0 4294901760 65535 0 65535 4294901760 65536 65536 4294967295 4294901760 0 0 65535 4294967295 0 0 4294901760 65535 0 0 4294901760 0 65535 65535 1 65535 131071 4294901760 4294901760 0 0 4294967295 0 4294901760 0 4294967295 4294901760 65536 65535 4294967295 1 0 0 0 0 0 131071 0 1 0 0 65535 131071 0 4294901760 0 65536 1 +-6 0 -20 -4 -2 -6 -198 82 447 245 -3 -510 -148 -230 -1038 -91 171 1900 1783 -620 -314 -1201 -989 17 -38 895 837 -177 -114 -348 -283 -362 -265 407 278 42 -171 -205 -23 264 112 -48 47 -66 -142 -18 77 71 -40 -30 19 30 12 12 30 -45 -51 22 39 -12 -27 0 6 10 7 -24 -30 29 32 -10 -12 -5 -10 7 17 1 -21 -5 17 32 42 -27 -26 -59 -63 29 5 39 54 25 -9 -52 -5 14 0 0 -3 -6 5 5 -10 0 17 7 -4 -19 -10 7 3 4 2 -2 -3 -2 -5 5 7 3 3 -5 -6 -5 -2 5 0 -2 0 2 1 -3 0 1 -2 -4 -1 5 2 -6 -9 5 10 3 -3 -8 -2 5 0 -4 -6 6 9 2 -1 -7 -8 1 3 7 7 -4 -6 -4 4 1 -9 -5 0 11 7 -5 -1 -4 -2 5 5 -8 -10 0 -1 6 6 5 6 -14 -10 0 -6 -3 -5 15 17 2 3 -12 -5 -5 -5 0 0 1 -4 6 11 -2 -7 -8 -2 6 3 -2 -3 -1 0 -1 -1 0 0 0 -2 0 -2 4 3 3 3 1 2 -7 -2 0 0 1 3 -5 -5 -5 -4 3 -3 3 5 4 3 -3 -2 -1 1 0 0 0 -1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 0 0 0 -1 0 0 -1 0 -1 -1 0 0 0 0 0 0 -1 0 0 0 -1 0 0 0 0 0 0 0 0 0 0 0 0 0 -1 -1 0 0 0 -1 0 0 1 -1 0 0 0 0 0 0 -1 0 0 0 0 -1 -1 0 -1 -1 0 0 0 0 -1 0 1 0 0 0 -1 0 0 -1 -1 -1 0 0 -1 -1 0 -1 -1 0 0 -1 1 0 0 0 -1 -1 0 1 0 -2 -1 0 0 0 0 0 1 -1 -1 0 0 -1 1 0 -1 0 0 0 0 0 0 0 1 0 0 0 -1 0 0 0 -1 -1 -1 -1 0 -1 0 0 0 0 0 -1 0 -1 0 -1 -1 0 0 0 0 0 0 0 -1 -1 -2 -1 0 0 -1 0 -1 -1 -1 0 0 -1 0 0 0 0 0 0 0 1 0 0 0 -1 0 0 -1 0 0 0 1 0 0 0 0 0 0 0 -1 0 0 -1 -1 -1 0 -1 0 0 0 0 1 1 0 -1 1 0 0 -1 0 0 0 0 -1 0 0 1 -1 -1 0 -1 -1 -1 0 -1 0 0 0 -1 0 -1 0 0 -1 0 -1 0 -1 -1 0 0 0 0 +65530 4294770668 4294639614 5439290 16056767 260109 74804 1085725 3639241 3563489 1540997 978410 802469 731898 134100 211133 235874 79048 71266 70225 14848 6565 20488 10970 2500 1261 288 2925 3085 1665 729 136 625 1741 1124 169 149 290 466 1313 2493 4157 4810 1546 3541 2785 221 0 45 50 100 338 377 149 25 8 13 50 58 34 61 29 4 4 10 1 20 26 40 106 109 73 29 16 72 85 50 65 58 65 52 17 106 121 74 17 29 89 100 37 61 232 100 45 250 293 153 50 25 1 52 125 113 40 13 10 1 1 0 4 20 18 10 53 4 1 34 50 25 18 41 18 4294967294 1 0 65535 0 0 0 0 0 0 0 0 65536 0 4294901760 0 65535 4294967295 0 0 0 65535 0 65535 0 0 0 0 0 0 4294967295 0 4294901760 0 4294901761 0 0 0 65535 0 4294901760 65535 4294967295 0 0 65535 1 0 65535 4294901760 4294967295 0 4294967295 4294901760 65535 4294901760 1 0 4294967295 65536 4294836224 65535 0 0 4294901761 65535 4294901760 1 65535 0 0 0 1 0 65535 0 4294967295 4294967295 4294901760 0 0 4294901760 4294901760 4294901760 65535 0 0 0 4294967295 4294967294 0 65535 4294967295 65535 4294901760 0 0 0 65536 0 4294901760 0 65535 0 1 0 0 0 65535 4294901760 4294967295 4294901760 0 0 65537 4294901760 1 4294901760 0 0 65535 65536 4294967295 4294901760 4294967295 4294901760 0 4294901760 4294901760 0 65535 65535 4294967295 0 0 +-10 0 21 18 44 -51 32 -126 -538 -61 371 672 60 -409 99 398 631 -492 -888 -542 21 740 339 -206 -737 -100 1151 804 -577 -1326 -181 708 118 -42 17 -42 -159 29 224 196 2 -193 23 -32 -182 -43 83 203 48 -126 7 34 -31 -70 -41 34 20 71 58 -54 -30 -15 -26 14 52 21 -19 -66 -32 28 3 25 45 -20 -54 -8 36 6 -32 11 25 -12 -1 12 -17 -34 -31 54 97 6 -60 -82 -14 53 24 3 -9 -22 -8 11 -3 12 17 -4 -7 -3 7 -3 -9 -4 4 4 -1 -7 -4 8 6 -4 -1 1 -5 -6 -2 10 4 -3 -1 1 2 -5 -2 1 -7 -3 7 10 -5 -6 6 7 1 -6 -2 2 3 -1 -3 -3 -1 0 -4 0 3 7 2 -11 -8 9 10 -7 -11 -4 0 10 -1 -8 0 16 7 -13 -8 4 2 0 0 4 7 -3 -8 -7 2 8 -12 -8 12 20 3 -16 -6 -1 3 0 -5 1 1 -4 0 11 4 -10 -2 2 0 -5 -6 6 6 -3 -8 1 4 5 3 -4 -6 0 6 0 -5 -1 3 -1 4 1 -13 -10 13 21 0 -22 -10 14 10 -3 -8 -3 6 7 0 -11 -5 6 4 -3 -3 1 2 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 0 0 0 -1 0 0 0 0 -1 -1 0 0 -1 0 0 0 0 0 -1 0 -1 0 0 0 -1 0 -1 0 0 -1 0 -1 0 0 1 -1 0 0 0 -1 0 0 0 -1 0 0 0 0 0 0 -1 0 0 0 -1 0 0 1 0 -1 -1 1 1 -1 -1 -1 1 1 -1 0 0 -1 0 0 0 1 0 -1 0 -1 0 0 -1 0 0 0 -1 -1 0 0 0 -1 0 0 -1 -1 0 -1 0 1 -1 -1 1 -1 0 0 -1 0 -1 0 -1 -1 -1 0 0 -1 0 0 0 0 0 0 -1 0 0 -1 0 1 -1 0 -1 -1 -1 0 -1 0 0 -1 0 0 -1 1 0 0 -1 -2 0 0 1 0 0 0 0 0 0 0 0 -1 -1 0 -1 0 0 0 -1 0 1 0 -1 -1 0 0 -1 -1 0 0 0 0 -1 -1 0 0 0 0 -1 0 0 0 0 0 0 0 -1 -1 -1 0 0 0 -1 0 0 0 0 0 -1 -1 0 -1 0 0 0 0 0 0 0 0 -1 0 0 0 1 0 0 1 0 0 0 0 0 0 -1 -1 0 0 -1 0 1 -1 -1 -1 0 0 0 +65526 1179669 4291625004 4286709792 4291034598 589225 170881 168205 640225 1082308 548041 157357 553169 1971217 2091205 534025 15688 2053 26122 88592 37253 1553 34973 48098 18180 1205 5861 2837 5441 6280 1125 872 3145 4717 1808 634 2425 2980 1332 1145 769 145 1445 3877 9445 10324 3005 585 565 185 153 305 58 58 97 32 50 80 52 2 61 104 25 2 29 5 58 149 61 85 37 8 10 18 1 16 58 125 145 149 137 100 65 256 218 80 4 16 58 113 68 208 544 265 37 9 26 17 121 116 8 25 72 45 65 41 25 36 36 26 10 17 269 610 484 296 109 73 85 121 61 25 131069 65538 0 0 0 0 0 0 0 0 0 0 0 1 0 65535 0 4294901760 65535 4294901760 0 0 4294901760 4294901760 0 4294901760 4294901760 0 65535 65535 65536 65535 0 65535 0 65535 0 0 4294901760 0 4294901760 0 1 4294967295 65537 4294967295 131071 4294901761 0 65535 0 1 65535 65535 4294901760 0 4294901760 65535 0 65535 4294901760 65535 65535 4294901761 131071 65535 4294901760 4294901760 4294901760 4294967295 0 65535 0 0 4294901760 0 65535 4294901761 4294901760 4294967295 4294901760 0 65535 4294901760 1 4294901760 65534 65536 0 0 0 0 4294967295 4294901760 0 4294901760 65536 4294901760 65535 4294901760 65535 0 4294901760 65535 0 4294901760 0 0 0 4294901760 4294967295 0 4294901760 0 0 4294901760 65535 65535 0 0 0 4294901760 0 65536 0 1 0 0 4294901760 65535 4294901760 65536 4294967295 65535 0 +10 0 -22 -28 3 57 -165 64 599 308 -89 -896 -444 352 127 21 96 -139 -479 -40 143 504 630 -230 -1186 -1085 -173 2643 1909 -1289 -789 -421 -194 122 169 -82 -458 145 516 189 -111 -283 -64 9 3 -47 -196 94 168 80 -1 15 80 -143 -161 62 107 48 -5 -66 -21 3 -37 -2 22 53 34 -32 -20 -7 3 -37 -68 63 81 16 21 -28 -31 -73 -78 70 88 40 -2 -69 -56 56 139 -28 -132 -60 24 36 10 33 -10 -42 -4 18 2 25 17 -28 -5 8 -10 -13 4 13 -4 -3 3 -1 -2 9 10 -4 -1 -10 -14 4 10 2 -9 2 17 3 -16 -9 17 -1 -35 1 32 20 -1 -22 -8 2 0 0 0 2 -3 -3 5 5 -8 -5 9 6 -9 1 19 6 -5 -21 -10 12 6 -4 -3 1 -12 -8 4 27 8 -12 8 -4 -14 0 11 -11 -16 15 17 -17 -36 12 34 14 1 -5 -4 -31 -13 31 24 -17 -41 -9 32 39 -6 -34 5 8 -19 -5 18 -3 -24 15 24 -12 -9 13 5 -21 -9 22 12 -19 -7 11 -1 -22 -11 30 22 -13 3 -14 -36 -5 20 20 -13 -3 18 4 -10 -4 5 5 7 -3 -4 -4 -1 -2 -4 3 2 1 0 0 0 0 0 0 -1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 -1 0 0 0 0 0 0 0 0 -1 0 0 1 0 -1 1 0 -2 -1 0 0 1 0 0 -1 -1 0 -1 -1 0 0 -1 0 0 0 0 -1 0 0 -1 0 1 0 -1 0 0 0 0 -1 0 0 1 -1 0 0 -1 -1 -1 1 0 0 0 -1 -1 0 0 -1 0 -1 0 0 -1 1 0 -1 0 0 0 0 0 0 -1 -1 -1 1 -1 -1 0 0 0 1 -1 0 0 0 0 0 -1 -1 0 0 0 0 0 -1 0 -1 0 1 -1 0 0 -1 0 0 0 -1 1 0 0 -1 0 0 0 0 0 0 0 0 1 -1 0 -1 0 0 0 0 -1 0 -1 -1 -1 0 0 1 0 -1 0 0 -1 0 0 0 -1 -1 0 0 -1 0 -1 -1 0 0 0 0 -1 0 1 1 -1 0 -1 0 -1 0 -2 -1 0 0 -1 -1 -1 -1 -1 -1 1 0 -1 -1 0 0 0 0 1 0 1 -1 0 -1 -1 0 1 0 0 0 -1 0 -1 0 0 0 0 0 0 0 0 0 0 1 0 -1 0 -1 -1 -1 -1 0 0 1 -1 -1 0 0 0 -1 0 -1 0 0 -1 -1 -1 0 0 +10 4293197802 3735555 4259675 20185687 810737 321040 16570 28537 231041 274465 449800 2583821 7015378 5305802 799762 52520 35285 230789 301977 92410 4177 2218 47252 34624 226 26849 29765 13753 4381 450 1373 3293 2180 449 1378 8593 6817 1225 6290 10984 9344 4765 6272 20105 21024 1872 1189 1864 340 629 1073 89 269 185 25 10 85 116 101 212 104 85 298 337 290 1226 1424 485 68 0 4 18 50 89 117 82 397 466 244 52 10 208 745 208 80 196 242 481 578 1440 1352 26 977 1130 865 1762 2545 1192 89 386 333 801 720 250 466 565 505 170 485 1021 653 205 1321 800 178 340 116 50 58 32 5 262140 65538 0 0 0 65535 0 0 0 0 0 0 65536 65535 0 0 0 4294901760 0 1 131071 4294836224 65535 65536 0 4294967295 4294901760 65535 4294901760 0 0 65535 4294901760 65536 4294901760 0 0 65535 65536 65535 4294901760 4294967295 1 0 4294967295 0 65535 65535 4294901760 1 65535 0 0 4294901760 4294967295 4294901761 65535 0 4294901761 0 0 4294901760 65535 0 0 65535 65535 4294901761 0 65535 0 131071 0 65535 0 0 0 65536 65535 65535 0 4294901760 4294901760 4294967295 0 1 65535 4294901760 0 4294901760 65535 4294901760 4294901760 65535 0 4294901760 65536 4294901761 4294901760 4294901760 4294836224 65535 4294901760 4294967295 4294967295 131071 4294901760 65535 0 65536 65536 65535 4294967295 65536 0 4294901760 4294901760 0 0 0 0 0 1 65535 4294967295 4294967295 0 4294901761 65535 0 65535 65535 4294901760 4294967295 0 +-26 0 27 59 125 -37 50 -251 -529 -214 102 1004 126 -921 626 1227 -779 -1878 439 1681 -86 -1035 195 530 115 -1637 -2413 1574 2234 820 -93 -718 25 -315 -473 43 199 575 495 -416 -356 -202 -42 165 125 11 5 -214 -286 65 117 184 66 11 96 -151 -134 39 47 -18 -34 -26 -30 68 26 -30 0 28 -5 -8 58 39 -19 -154 -86 128 38 -31 16 47 19 -37 -16 14 10 -32 -11 81 136 -67 -191 -207 -39 364 182 -196 -115 8 4 42 30 21 24 -78 -69 42 31 7 11 8 -4 -31 -9 19 -3 5 21 -14 -25 8 -2 -8 29 27 -26 -28 37 5 -35 -21 -23 8 16 55 38 -36 -31 -11 16 22 -12 -31 0 25 -2 -10 7 17 22 -26 -47 -4 23 21 -10 -4 12 6 -4 -4 19 -7 -26 -6 1 -9 -10 48 43 -37 -25 -13 -25 17 25 30 25 -29 -21 -4 10 -12 -44 -6 12 63 69 -35 -71 -43 42 71 -16 -94 -36 63 7 2 28 26 24 -39 -42 -29 -36 50 70 23 -8 -55 -8 6 -27 -13 1 45 22 -11 10 0 5 -34 -55 10 35 38 21 -21 -14 -32 -25 13 0 23 26 -3 -12 -13 -1 11 6 -1 0 -7 -7 2 3 1 -2 2 2 0 0 0 0 0 0 0 0 0 0 0 0 -1 -1 0 1 0 0 -1 0 0 -1 0 1 0 -1 -1 -1 -1 -1 0 0 0 -1 0 0 0 -1 0 0 -1 -1 0 0 0 -1 0 0 0 -2 -1 -1 0 0 0 1 0 0 -1 -1 0 1 1 -1 0 0 0 1 -1 -1 -1 0 -1 0 0 -1 0 0 1 1 0 1 0 0 0 1 0 -1 0 0 0 -1 -1 -1 0 -1 0 0 -1 0 -1 1 0 0 -1 0 0 0 1 0 0 0 0 0 -1 1 1 0 0 0 0 0 -1 0 0 0 0 0 0 -1 -1 0 0 0 -1 -1 -1 0 0 0 0 0 0 -1 0 0 -1 -1 0 0 0 0 -1 0 0 -1 0 1 -1 -1 0 0 -2 0 0 0 0 -1 0 1 0 -1 0 0 1 0 -1 -2 0 0 -1 -1 0 -1 0 0 -1 -1 -1 1 0 -1 0 0 -1 0 -1 -1 0 -1 0 -1 0 0 0 0 0 0 -1 0 0 0 0 0 0 0 0 0 -1 0 0 -1 -1 -1 0 0 1 -1 -1 -1 0 0 0 0 -1 -1 -1 -1 0 0 0 0 0 -1 -1 -1 0 -1 0 -1 1 -1 -1 0 0 0 0 0 -1 -1 0 2 0 +65510 3866651 4292542589 4278517810 4281007599 1018420 864117 1897405 4133725 3018482 1078621 318925 2692994 8300045 5663156 524173 99850 225578 370226 418081 167540 28989 15746 45821 86021 47545 4477 32017 19477 2533 1832 5524 1576 784 89 4885 24077 23780 2405 2465 1730 452 1124 6682 22985 79330 134017 71540 13289 1780 1341 6660 6525 1010 185 977 442 34 637 689 68 1570 1460 1394 1666 593 3281 2740 1082 740 1105 625 104 338 1160 2225 970 116 180 32 410 712 82 2404 3218 794 914 1525 1466 457 244 1972 4113 5986 6890 6805 9092 5265 53 1460 2097 2605 3796 5429 3089 100 898 2026 605 100 1181 3125 2669 882 1220 794 529 685 313 122 37 49 196601 65539 196606 2 0 0 0 0 0 4294901760 65535 1 4294901760 0 65535 1 4294967295 4294967295 65535 0 65535 0 65535 4294901760 65535 0 65535 0 4294967294 65535 0 1 4294901760 65535 65537 65535 0 4294901761 4294967295 4294901760 0 65535 65536 1 1 0 1 65535 0 4294967295 65535 65535 4294901760 4294901760 1 4294901760 0 65536 0 0 4294901760 65537 0 0 4294901760 0 0 0 4294967295 0 4294901760 4294967295 0 0 0 65535 4294901760 65535 0 4294901760 0 65535 4294901761 65535 4294836224 0 0 65535 1 65535 65536 4294901760 65534 4294901760 65535 65535 4294901760 4294967295 1 65535 4294901760 4294901760 65535 65535 65535 0 0 4294901760 0 0 0 0 4294901760 0 4294967295 65535 65536 4294967295 65535 0 4294901760 4294967295 65535 0 0 4294967295 65535 65535 131071 4294967295 0 0 4294901760 65535 2 +-1 0 10 -38 -65 90 57 -99 -30 283 365 -159 255 -629 -1833 -418 778 2567 1251 -1700 -583 230 -192 -328 835 24 -1693 -730 576 1248 169 -92 127 -260 -7 83 -36 12 160 -153 -214 60 72 -51 -12 62 -73 39 144 -62 -60 63 31 -154 -169 111 183 66 -63 -110 -16 42 36 35 11 -74 -59 21 33 19 -45 -19 44 98 72 -112 -93 -16 -9 56 41 -30 -58 32 75 32 -42 -79 40 55 5 66 56 -319 -223 303 160 -101 -82 20 84 58 -26 -83 -9 19 -9 -3 -3 40 27 -41 -10 18 -21 -22 21 45 14 -38 -25 -18 -30 60 81 -19 -46 -37 -5 25 0 9 23 -5 -13 -12 -8 11 28 -11 -41 -10 14 15 -2 5 5 -8 -5 6 13 15 10 -38 -32 17 27 1 -33 -6 31 26 3 -35 -23 -4 -12 29 25 6 13 -30 -43 17 47 -9 -52 -2 41 18 -30 -20 21 25 -6 -27 -11 31 17 -13 24 15 -11 -55 -29 11 -6 30 21 -10 -10 0 7 4 -11 -15 -1 22 4 3 11 -10 1 5 0 -22 -21 11 18 2 -21 -7 8 18 12 -1 0 -16 -14 8 10 6 3 -12 -11 3 8 1 -11 0 14 4 -8 -6 4 1 -3 1 3 1 1 -1 -1 1 1 0 0 0 0 1 1 -1 0 0 1 0 0 -1 -1 0 1 0 0 0 0 0 0 -1 -1 0 0 0 -1 0 -1 -1 0 0 0 0 0 0 -1 -2 0 0 0 -1 -2 0 0 0 0 0 0 0 0 0 1 0 0 0 0 0 0 -1 -1 0 0 0 0 1 1 0 -1 0 1 0 -1 0 0 1 0 -1 0 -2 0 0 0 1 0 0 -1 -1 1 -1 -1 -1 -1 0 -1 0 0 1 0 0 0 0 0 0 -1 0 0 -1 0 0 -1 1 1 -1 1 1 -1 -2 0 1 1 -1 -1 -1 0 0 1 -1 0 1 0 0 0 0 1 0 -1 0 -1 0 -1 0 0 0 0 0 0 -2 0 -1 1 -1 0 -1 -1 -1 -1 0 0 -1 0 0 1 0 -1 -2 -1 0 0 -1 -1 1 -1 0 1 -1 0 0 0 0 0 0 0 0 0 0 0 -1 -1 0 0 -1 0 0 0 1 0 0 0 -1 0 0 -1 0 -1 -1 -1 1 -1 -1 0 0 -1 -1 0 0 0 0 0 0 -1 -1 0 1 0 -1 0 1 -2 0 0 0 0 0 0 0 0 -1 1 0 0 0 -1 -1 1 1 -1 0 -1 0 1 -1 0 1 0 0 -1 0 +65535 4292476938 5963711 4288479289 18612194 158506 460666 3534613 7194773 4455001 392789 144448 697801 3399149 1889280 37025 83729 6938 1440 49009 49396 7785 3988 6850 24580 7569 24677 40882 37845 16069 2020 2521 5597 3922 1450 2386 11540 17728 8905 3217 2581 4388 6649 8005 4625 4381 104897 141538 35801 7124 10420 7565 442 90 1609 2410 424 925 2466 1640 949 4500 6922 3485 650 81 554 313 185 905 1781 421 29 89 61 394 1544 1313 730 1125 1637 1234 545 985 661 1069 2138 2290 2708 2005 1300 1066 765 1082 458 801 3146 962 936 541 100 65 346 485 25 221 26 484 562 328 490 388 145 256 260 136 153 130 65 121 212 100 65540 131069 65539 4294901761 131071 1 0 65536 4294901761 0 1 4294901760 65535 1 0 0 4294901760 65535 0 65535 4294967295 0 0 0 4294901759 0 4294901760 65534 0 0 0 0 1 0 0 4294901760 65535 0 65536 1 65535 1 65535 65536 4294901760 4294836224 0 65536 0 4294967295 4294901761 4294967295 65535 65535 65536 0 0 0 65535 4294901760 0 131071 4294901761 65537 4294901759 65536 4294901761 4294967295 0 4294901761 65536 0 0 1 65535 65535 65535 0 0 4294836224 4294901760 4294901761 4294901760 4294967295 65535 4294901760 0 1 4294901759 65535 4294901760 131071 65535 4294901761 0 0 0 0 0 4294901760 65535 4294901760 0 65536 0 4294901760 0 65535 4294967295 131071 4294967295 0 4294967295 0 0 0 4294967295 65536 4294901760 65536 65534 0 0 0 4294901760 1 0 4294967295 65537 65535 65535 4294901761 65536 0 65535 +-39 0 48 60 -212 32 555 -108 -716 30 417 -9 -297 438 920 -337 -587 -175 80 -804 -851 1259 457 -248 1079 453 -890 -1664 -575 1285 786 -16 -237 -295 -59 89 67 144 100 -99 3 -119 -289 68 329 171 -74 -281 -203 137 167 103 35 -115 -72 81 119 -107 -132 9 30 54 15 -19 9 -8 -13 19 -3 -34 30 11 -49 -3 -3 -1 3 21 79 -21 -141 27 82 -55 23 58 -76 17 116 25 -78 -384 -198 686 393 -382 -221 -41 112 92 -47 -16 -116 -82 142 57 -35 20 -101 32 107 -81 30 28 -84 34 -14 -38 60 -1 24 26 -89 -15 28 -19 33 27 -11 18 -22 -24 42 -32 -38 54 31 -26 -34 -22 30 20 -41 28 50 -40 -25 11 4 20 9 -36 -9 12 3 0 -20 -5 10 18 3 -6 11 5 -13 -22 5 12 2 12 -1 -45 -25 40 14 6 12 3 12 -26 -2 8 -39 -22 41 20 -38 -10 -4 30 62 -19 -57 -20 19 25 -16 -11 35 3 -43 -10 28 19 -11 -37 -14 35 25 2 -21 -36 14 26 -13 3 0 -14 9 15 9 -4 -18 -6 3 -9 9 21 -16 -10 13 -4 -7 7 2 -3 1 0 4 -4 -8 -6 -7 11 9 0 -2 -3 0 1 0 1 0 0 0 0 0 1 0 -1 0 0 1 -1 -1 0 0 0 1 -2 -2 1 0 -1 0 0 0 0 0 0 0 0 -1 1 0 -1 -1 0 0 0 0 0 -1 0 0 0 0 1 0 -1 0 1 0 -1 0 -1 -1 0 0 1 0 1 -1 -2 0 0 -1 -1 0 -1 -1 -1 -1 1 1 1 -1 -1 0 0 0 -1 -1 -1 -1 0 0 -1 0 1 -1 0 0 0 1 0 1 0 -1 0 0 0 0 -1 -1 1 0 1 0 0 0 -1 -1 -1 0 0 -1 -1 0 1 1 0 -1 0 0 0 0 1 0 -1 0 0 -1 -1 0 0 -1 -1 0 0 1 -1 0 -1 0 0 -1 -1 -1 0 0 1 1 0 -1 1 0 0 -1 0 0 0 1 0 0 0 0 0 0 1 -1 -1 -1 0 0 0 0 -1 -1 0 0 1 0 0 -1 1 -1 -1 1 0 0 0 0 -2 -1 0 -1 0 0 -1 -1 -1 0 -1 0 0 0 1 0 -1 0 0 -1 0 0 1 0 0 0 0 0 -1 -1 0 0 -1 0 0 1 0 -1 -1 0 0 0 1 1 -1 0 0 -1 -1 0 0 -1 -1 -1 -1 0 0 1 0 0 0 0 -1 0 -1 -2 -1 0 -1 1 0 +65497 3932208 2162476 4287889963 2030900 173970 280053 959969 375194 652816 2309282 270353 1369450 3560996 1981850 618052 143194 11402 25225 19801 14170 88145 137482 84437 59978 38498 14450 11745 25610 17505 3816 586 145 530 1165 1021 2410 10 450 6682 20610 9749 3893 6065 14081 153540 509800 300373 50522 21008 2465 20180 23413 1625 11225 18010 1684 8212 1640 3601 1252 8146 1145 1818 445 1060 2788 4360 1637 1640 1300 2465 4100 746 416 1377 225 9 425 424 45 146 653 169 148 2026 2225 232 153 820 68 2005 2081 1544 916 4205 3649 986 377 1234 1949 1145 1490 1421 629 1737 872 178 196 306 97 360 90 522 356 185 98 13 1 32 100 170 9 4294836222 65536 65536 0 0 65536 4294901760 0 4294901761 65535 0 4294836225 131070 4294901760 0 0 0 0 131071 4294901760 65535 0 0 65535 0 65536 4294901760 65536 4294901760 4294901760 65535 65536 65536 4294901759 0 4294967295 4294901760 4294967295 131071 65537 4294967295 0 4294901760 4294967295 65535 4294901760 65536 65535 0 1 1 65535 0 4294901760 131071 65536 0 4294901760 4294967295 0 4294967295 65536 1 65535 0 65536 4294901760 0 4294967295 0 4294967295 0 4294901761 4294901760 0 4294967295 65535 65536 1 131071 0 65535 0 1 0 0 65536 4294967295 65535 0 4294901760 65535 65536 0 131071 4294967295 1 0 4294836224 65535 65535 4294901760 4294967295 4294901760 0 65536 4294901760 0 65535 65536 0 0 4294901760 65535 4294901760 0 1 4294967295 0 65536 4294901761 0 4294967295 0 4294967295 4294967295 0 1 0 4294901760 4294901760 4294967294 4294901760 1 +6 0 -9 -25 -55 25 119 142 60 -313 -264 167 195 17 -205 -17 341 216 -120 -484 -238 285 90 63 252 106 -134 -401 -91 303 136 -208 -193 186 132 -109 -16 40 -14 49 -38 -128 57 155 4 -77 -49 -22 72 55 -68 -63 40 58 -8 -36 1 -11 -22 30 16 -25 -7 21 6 -9 -1 6 -11 -30 12 71 10 -92 -47 73 81 -40 -113 3 109 71 -19 -160 -109 191 169 -146 -144 65 89 105 85 -319 -267 310 257 -156 -187 -33 99 305 32 -563 -167 656 277 -509 -394 171 528 211 -602 -471 559 541 -402 -477 202 351 -30 -200 -83 54 124 32 -99 -41 32 11 38 18 -60 -42 27 48 9 -34 -29 13 41 20 -27 -60 -9 60 28 -34 -33 3 30 15 -20 -16 8 6 5 0 -10 -1 5 -4 1 -2 -12 7 15 -2 -4 -8 -10 8 10 1 1 -4 -10 4 16 -7 -25 -3 19 22 9 -31 -27 16 22 0 -1 -15 -16 20 10 -23 -5 18 8 7 0 -27 -11 23 24 -23 -30 25 14 -15 13 -17 -38 54 48 -62 -40 40 21 -8 -1 -20 -14 43 23 -50 -19 30 3 -4 9 -12 -10 12 7 -13 -2 13 -3 -10 5 6 -7 -2 4 0 -1 0 2 0 0 1 0 0 0 0 0 0 0 0 1 0 0 0 0 0 1 0 0 0 0 -1 0 0 -1 0 0 0 0 0 -1 -1 1 -1 -1 0 0 -1 0 1 -1 -1 0 -1 -1 0 0 0 -1 0 0 0 -1 0 -1 1 0 0 0 1 0 0 -1 0 0 0 0 0 -1 0 0 -1 0 0 0 0 -1 0 -1 0 0 0 0 0 -1 0 0 0 0 1 0 0 -1 0 0 -1 0 0 0 -1 -1 -1 -1 0 0 0 -1 -1 0 1 0 0 1 0 -1 -1 0 0 0 0 0 -1 0 0 1 -1 -1 0 -1 0 0 -1 0 0 -1 0 0 -1 -1 0 -2 0 0 0 0 0 0 -1 0 0 1 0 -1 1 1 0 0 0 0 -1 -1 1 1 -1 0 0 0 0 1 0 0 -1 -1 0 0 -1 0 0 0 0 0 0 1 0 0 -1 1 1 -1 1 0 0 -1 -1 0 0 0 0 0 0 0 -1 0 -1 0 -1 0 0 -1 -1 -1 0 0 1 0 0 -1 0 -1 0 0 -1 0 0 -1 -1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 -1 1 0 0 0 0 0 1 0 0 0 0 -2 -1 -1 -1 0 0 +6 4293394423 1703881 9306231 4274454588 97585 38314 42314 162937 248656 137869 12069 74740 178757 100090 61760 71845 29305 1856 2597 17828 27274 5945 2885 8209 8593 4964 1360 122 1384 881 490 117 37 1021 5185 8564 7538 8161 12778 16922 25961 48362 49877 24961 18946 108986 167389 90385 36058 102826 317993 458225 335810 184477 323305 584245 605162 389133 164005 40900 9805 16400 11482 1145 1768 5364 3033 1237 1010 2081 4329 3681 1940 1098 1125 656 100 25 101 41 5 193 229 80 164 101 17 116 305 634 845 1042 985 484 226 656 629 349 113 729 650 1105 1525 421 458 4360 6148 3200 505 401 2045 3029 1261 25 225 244 218 173 109 61 53 4 65535 2 65536 0 0 0 0 1 0 0 1 0 4294901760 0 65535 0 0 4294967295 4294901761 65535 4294901760 65536 4294967295 4294901760 65535 0 65535 0 65535 131071 0 65536 0 65535 0 0 65535 4294901760 0 0 65535 65535 0 0 65535 0 65536 0 65535 4294901760 0 4294901760 4294967295 65535 0 4294967295 65536 0 1 4294967295 0 0 4294901760 0 4294901761 65535 65535 4294901760 0 65535 4294901760 65535 65534 0 0 4294901760 0 1 131071 1 0 4294901760 131071 4294901761 0 0 1 4294901760 65535 4294901760 0 0 0 1 4294901760 65537 131071 0 4294967295 0 0 0 4294901760 4294901760 4294901760 0 4294967295 65535 65536 0 65535 65535 4294901760 0 4294967295 0 0 0 0 0 0 0 0 0 0 131071 0 0 65536 0 0 4294967294 4294967295 0 +-5 0 -17 18 37 -44 -142 67 170 77 -35 -99 67 55 -43 -68 77 -102 -192 15 -33 144 104 -84 -33 257 355 -355 -653 83 760 -19 -999 -31 891 369 -395 -531 -54 436 283 -144 -170 -138 -35 155 117 -47 -79 -65 -40 60 61 19 -44 5 92 -48 -134 22 98 26 -33 -22 8 -8 -4 19 -4 -69 -67 126 125 -71 -79 19 67 14 -21 -78 -26 48 44 -1 -81 -160 -70 274 140 -117 10 -80 -337 135 505 178 -197 -355 -70 189 172 86 178 -251 -398 -349 -216 700 626 -405 -893 -4 646 810 267 -757 -197 67 -201 -210 -29 379 233 -185 -307 -54 226 296 -30 -341 -63 217 37 -96 -2 59 18 -38 -33 4 33 32 -16 -48 42 42 -84 -77 89 94 -81 -101 26 99 25 -55 -41 6 31 22 -10 -26 -7 7 6 15 8 -24 -17 14 11 0 2 -7 -12 3 10 11 11 -12 -21 -4 19 10 -19 -16 6 23 -11 -33 31 69 -50 -122 22 183 54 -202 -120 170 138 -109 -111 45 93 22 -57 -124 -43 186 157 -148 -206 18 127 73 -57 -42 68 19 -83 -58 62 128 18 -156 -67 90 34 -29 8 10 -24 -8 11 -17 -11 59 23 -63 -19 34 0 -12 6 6 -1 -3 -1 -1 0 1 0 0 0 0 0 0 -1 0 -1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 -1 -1 -1 0 -1 0 0 0 0 -1 0 -1 0 -1 0 0 -1 -1 1 0 -1 -1 0 0 0 0 -1 1 0 0 -1 -1 0 -1 0 -1 -1 0 -1 -1 0 1 1 0 -1 -1 0 0 -1 -1 0 0 -2 0 0 1 0 0 -1 0 0 0 0 0 -1 0 0 1 1 -1 -1 0 0 -1 0 -1 -1 -1 0 -1 0 -1 0 -1 0 0 0 0 0 0 -1 -1 -1 0 0 -1 -1 0 0 -1 0 0 0 0 -1 0 -1 0 0 0 0 -1 0 -1 0 0 0 0 0 -1 -1 -1 0 -1 0 1 0 -1 0 1 0 0 0 -2 0 0 -1 0 0 0 0 0 0 0 0 -1 0 0 0 -1 -1 0 -1 -1 -1 0 0 -1 -1 0 -1 0 -1 0 -1 1 0 0 -1 0 0 -2 1 0 0 0 -1 -1 -1 0 0 -1 0 0 -1 0 0 0 0 0 0 -1 0 0 0 -1 0 0 1 0 -1 -1 -1 -1 -1 0 -1 0 -1 0 0 0 1 0 0 0 0 0 -1 -1 1 0 0 -1 0 0 0 0 1 0 0 -1 0 0 0 -1 1 0 +65531 1245167 4292083749 4456306 5046442 11026 7514 6473 16333 37089 21825 17872 67138 252050 433298 577961 998962 930042 437986 193012 100825 47944 25250 15898 10466 5200 4082 1961 10768 18440 10280 1573 128 377 4777 20365 20666 6602 4685 6525 2980 1937 32161 79976 33289 6500 131794 286709 164834 40621 36980 94685 280205 536656 555901 797465 1073416 644338 43298 84501 144482 88514 97165 138692 117181 51058 10585 3485 1768 1105 2113 2560 3528 12985 16757 16762 10477 3650 1717 1445 776 98 261 640 485 121 53 153 221 265 457 461 617 565 1210 5722 17384 33973 43720 43300 30925 14346 9133 18625 36445 46553 42760 21458 5013 4985 10253 20228 24660 12589 1997 164 640 410 3602 4498 1517 144 393222 4294836223 4294967295 65536 0 0 0 65535 65535 0 0 0 0 0 0 0 4294967295 65535 65535 0 4294901760 4294901760 4294901760 0 4294967295 1 4294967295 0 0 131071 0 4294967295 4294901760 4294901760 65535 4294967295 65536 1 4294967295 0 4294967295 0 65534 65536 0 65535 0 0 65535 65536 4294901761 65535 4294901760 4294901760 4294967295 4294901760 4294901760 4294901760 0 0 0 4294967295 65535 4294901760 65535 4294901760 0 0 65535 65535 0 4294901760 4294901760 0 0 4294901760 4294967295 4294901760 65536 4294901760 65536 0 4294836224 0 65535 0 0 0 4294901760 0 4294901760 65535 4294967295 65535 4294901760 65535 65535 65535 131071 0 65535 4294836224 1 0 4294967295 65535 4294901760 0 65535 0 0 4294901760 0 4294901760 0 1 4294967295 4294967295 65535 65535 65535 0 1 0 0 4294967295 1 4294901760 0 0 1 4294901760 0 4294901760 1 +15 0 43 4 2 -22 -240 -169 323 310 -404 -71 457 -57 -79 108 -211 -506 -114 896 735 -1031 -1477 820 1659 -147 -1359 75 1573 317 -255 -1260 -1282 -89 240 1106 246 -224 63 73 137 -57 -128 -93 89 108 -36 -161 11 201 31 -279 -140 292 236 -145 -65 -38 -116 -102 -24 185 73 -29 35 -8 -42 -49 31 -40 -215 79 218 111 8 -106 -77 -26 79 113 -29 -218 -135 241 267 -5 -13 -242 -330 -73 63 566 354 -333 47 -73 -471 -293 169 499 -38 -201 -33 -5 20 433 292 -328 120 95 -111 -482 -76 166 -274 -345 -332 552 476 150 -109 -128 227 -57 -259 140 449 -127 -251 -216 -168 -2 6 233 28 -21 150 -69 -225 114 232 -162 -117 240 123 -328 -205 64 -32 258 177 -158 15 84 -83 -147 53 116 -16 -71 -31 -4 18 54 -9 -34 31 16 -31 -41 -11 54 36 -29 -30 1 20 17 -10 -28 19 35 -16 -73 -42 86 78 -61 -147 94 251 12 -10 -145 -214 -73 109 37 -332 183 439 38 -157 -86 36 -52 7 181 104 -226 -49 42 -75 -91 -74 35 -11 118 79 21 -45 -48 188 112 -120 -248 28 88 -160 -8 158 114 -10 -119 -93 -27 10 168 116 -78 -52 -73 -44 63 35 -15 -11 5 3 -4 -1 2 1 0 -1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 0 -2 -1 0 0 -1 0 -1 0 -1 -1 0 -1 0 -1 0 0 0 0 0 0 -1 -1 -1 -1 -1 0 0 0 0 0 1 0 -1 0 0 0 0 0 -1 0 0 1 0 0 0 0 -1 0 -1 -1 0 0 1 -1 0 0 0 0 -1 0 0 0 0 0 1 0 0 -1 -1 -1 -1 0 0 0 0 0 -1 -1 -1 -1 0 -1 -1 -1 -1 -1 -1 0 0 1 0 0 1 0 -2 0 0 0 0 -1 0 0 0 -1 0 0 0 -1 0 0 0 0 -1 0 -1 0 0 -2 0 0 0 -1 -1 0 -1 -1 -1 0 0 1 0 0 0 -1 0 -1 1 0 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 -1 0 -1 -1 -1 0 0 0 -1 0 -1 -1 0 -1 0 0 0 0 0 0 0 0 0 -1 0 -1 0 -1 -1 -1 1 -1 0 -1 0 1 -1 0 -1 -1 0 0 1 0 -1 -1 0 0 0 0 1 -1 0 0 0 0 -1 0 0 1 0 -1 0 0 0 0 0 0 -1 0 0 -1 0 -1 -1 0 -1 0 0 -1 0 -1 0 -1 0 0 -1 -1 0 +15 262187 4293525506 4283957008 20316483 168257 212098 17905 300557 815812 1603186 2853929 2773890 1852506 2574818 1652625 1651445 1280836 110692 9298 22018 25033 19585 27217 40522 78802 104864 76721 5669 23860 34801 6170 1289 4165 2561 52466 59845 11300 6605 19010 48365 76306 71314 58733 114229 324325 236205 7538 307690 277562 41845 1114 187889 192848 23425 244645 33332 194101 414928 249076 28265 54778 86681 217730 109657 28228 54325 1225 27261 63621 80068 71289 122713 46121 67588 56293 7281 28498 16265 5297 977 3240 1237 1217 2642 3037 2137 901 689 884 1586 5585 9160 9805 30445 63145 21125 51125 13250 143713 194165 32045 4000 32810 61892 4165 13906 6701 14045 6682 4329 47888 75904 8528 25664 37960 14261 9378 28324 19540 8033 5905 4293984291 393205 4294705155 196607 1 65535 0 0 0 0 0 0 0 1 4294967294 0 65535 65535 4294967295 4294901760 4294901760 0 0 0 4294967295 4294967295 65535 0 0 1 65535 0 0 65535 65536 0 0 65535 4294967295 0 4294901761 0 0 65535 0 0 1 4294901760 4294967295 65535 0 0 4294967295 4294967295 4294901760 4294967295 4294967295 65535 65536 0 1 65534 0 4294901760 0 4294901760 0 4294901760 0 0 65535 65535 4294836224 0 4294901760 65535 4294967295 65535 65536 0 4294901760 4294901760 1 1 0 0 0 0 0 0 4294901760 4294901760 4294967295 0 4294901760 4294901760 65535 65535 0 0 0 0 65535 65535 4294967295 131071 65535 65535 4294901761 4294901760 65535 65536 4294901760 65535 0 65536 65535 0 4294901760 0 1 65535 0 0 4294901760 0 65535 4294967295 4294901760 0 65535 65535 65535 4294901760 65535 +-3 0 2 -3 27 29 -80 -21 130 -255 -308 833 525 -1300 -425 1375 -20 -1210 230 940 -74 -422 -36 276 386 -670 -494 695 -75 -588 423 536 -290 -299 2 45 121 57 -95 27 57 -107 -26 135 18 -126 7 117 11 -110 -13 35 -39 -20 9 43 28 -2 -1 -19 -14 -10 -1 18 1 -5 7 9 17 -28 -50 0 28 33 10 -17 -13 -6 6 -9 -20 23 12 12 24 -44 -18 57 7 -85 -46 49 25 58 71 -51 -40 -49 -59 6 22 86 66 -37 -63 -106 -28 145 24 -23 62 -32 0 4 -98 -45 86 30 -71 15 -4 -25 53 108 26 -118 -37 1 -14 47 -30 -19 114 4 -119 -9 8 5 87 36 -11 -13 -85 -143 48 219 11 -124 -26 -7 -12 76 50 -31 -12 -30 -26 12 6 10 20 -3 -28 -14 13 23 -3 -10 4 4 0 -3 -1 -1 0 -2 -2 8 3 -18 -9 20 0 -11 9 12 -10 2 29 -10 -10 -17 -50 9 81 45 -24 -113 -80 68 55 24 10 0 24 -37 -55 -14 4 49 33 -6 -8 -31 -19 20 25 3 -21 -24 -2 48 50 -33 -42 -34 -20 37 14 3 16 10 1 -30 -23 12 14 13 9 -16 -15 2 7 4 5 -3 -5 -1 1 0 -1 1 0 0 0 0 0 0 0 0 0 1 0 1 0 1 0 0 0 0 0 0 0 0 0 0 -1 0 0 0 0 0 1 1 0 0 -1 -1 0 -1 0 1 0 -1 -1 0 0 0 1 0 -1 0 0 0 0 -1 0 -1 -1 0 0 0 0 0 0 -1 -1 0 0 -1 -1 0 -1 0 0 0 1 0 0 0 -1 -1 1 0 -1 0 0 0 0 0 -1 0 -1 0 0 0 -1 0 0 0 -1 0 0 1 0 -1 0 -1 0 -1 0 1 1 0 0 -1 0 1 0 0 0 0 0 0 0 -1 0 0 0 0 -1 0 -1 0 -1 0 0 0 0 0 -1 0 0 -1 0 0 -1 0 0 -1 1 0 0 0 -1 -1 0 0 0 -1 -1 0 0 -1 0 0 -1 0 -1 -1 0 0 0 -1 0 -1 0 0 -1 -1 0 -1 0 -1 -1 1 0 -1 0 0 0 -1 0 0 1 0 0 0 -1 0 0 -1 0 0 1 0 -1 0 -1 0 0 -1 0 0 -1 1 1 0 0 1 0 -1 0 0 0 0 -1 1 -1 0 -1 0 0 0 0 -1 0 -1 0 0 0 -1 0 0 -1 0 1 0 0 0 -1 0 0 0 -1 0 0 0 1 0 0 -1 -1 0 +65533 4294770690 1900571 4293656496 4278255746 788753 1965625 2071250 1464500 936500 183560 77472 597896 727061 351369 466225 173501 2029 17890 9754 14698 18901 16200 13738 12221 1394 1921 1930 788 362 296 325 26 130 1073 2500 1873 389 205 117 929 288 2512 3573 7274 4517 3989 7642 4001 3517 7880 5725 15205 21809 1105 4868 16 11629 8296 5266 641 14473 14600 1370 2405 1261 13012 14242 89 8865 290 27674 50265 15497 725 5920 3461 1044 820 136 409 980 698 109 32 9 2 4 68 333 481 121 225 104 941 389 2581 8586 13345 11024 3601 100 1945 3221 2417 1125 1025 761 634 1017 2308 3589 2920 1769 205 356 901 673 365 337 229 65 4294770693 4294967291 1 131071 0 0 0 0 65536 65536 65536 0 0 0 0 0 65535 0 0 65537 0 4294967295 4294901760 65536 4294901760 65535 0 1 65535 0 4294901760 4294901760 65535 0 0 4294901760 65535 4294901760 65535 65535 0 1 0 4294967295 1 65535 0 0 65535 65535 0 65535 0 65535 65536 4294901760 4294901760 4294901760 65536 1 4294901760 65536 0 0 0 4294901760 0 0 65535 65535 65535 0 0 65535 4294901760 0 65535 4294901760 1 0 4294967295 0 4294901760 65535 4294901760 0 65535 4294967295 0 4294901760 4294901760 0 4294967295 4294901760 4294901760 131071 4294901760 0 4294901760 0 1 0 65535 4294901760 0 1 65535 65535 4294901760 0 131071 1 65536 4294901760 0 0 131071 65535 65535 0 4294901760 4294901760 0 4294901760 0 65535 1 0 65535 0 65535 0 1 4294901760 65535 +-3 0 1 1 -13 -10 92 -7 -440 12 825 453 -219 -1007 -670 382 91 322 926 397 -1109 -1269 1029 1488 -599 -1559 -249 1059 338 14 428 -544 -864 363 681 -127 -330 -37 50 134 10 -103 92 98 -97 -158 3 124 9 -46 16 57 6 -81 -27 59 26 -27 -26 -2 22 21 -11 -21 4 13 0 -11 -2 5 -1 -9 -9 14 15 -5 -7 -16 -9 43 23 -63 -17 57 -21 -24 96 -3 -144 -36 112 83 -62 -91 23 75 7 -60 -60 65 122 -76 -149 83 111 -72 -6 67 -53 -119 -13 169 139 -173 -230 97 197 34 -104 -96 104 55 -194 6 212 -42 -109 87 -18 -133 80 176 -6 -212 -134 154 173 -24 -99 -92 -14 141 58 -47 63 -123 -209 180 194 -159 -93 132 15 -103 4 72 4 -42 -13 19 9 2 -1 -9 7 5 -19 -10 15 9 -11 -3 7 -1 -6 2 7 2 -2 -12 -11 6 10 16 -3 -21 18 5 -44 4 56 5 -38 -22 6 12 -7 4 2 -4 4 17 5 -14 -1 1 2 -2 -4 -5 -2 5 7 -1 -7 -3 5 3 -4 -5 5 0 -11 -4 4 7 2 -8 -14 3 16 6 -12 -4 10 -2 -3 2 -7 -2 18 3 -18 -5 11 4 -5 -2 0 -1 0 1 0 0 0 0 0 0 0 0 -1 0 0 0 0 0 0 0 0 0 0 0 0 1 0 0 0 0 0 -1 -1 -1 0 -1 0 0 0 0 -1 0 0 0 0 -1 -1 0 0 0 0 0 -1 0 -1 0 -1 -1 -1 0 0 0 0 0 -1 1 0 0 1 -2 0 0 -1 0 -1 -1 1 0 0 0 -1 0 0 -1 0 0 0 0 0 -1 0 -1 0 -1 1 0 0 -1 -1 -1 0 -1 -1 -1 0 1 0 0 0 0 0 0 0 0 0 0 -1 0 -1 -1 0 1 -1 0 0 -1 -1 0 0 0 -1 0 -1 0 -1 -1 -1 -1 0 -1 -1 -1 0 0 1 0 -1 0 -1 0 0 -1 0 0 -1 0 0 0 1 -1 0 1 -1 0 0 0 0 -1 -1 0 -1 1 -1 0 -1 0 0 -1 0 -1 -1 -1 0 0 1 0 -1 0 0 -1 0 0 0 0 -1 0 0 -1 -1 0 0 1 -1 -1 0 0 -1 -1 0 0 -1 -1 0 0 -1 0 0 0 1 -1 0 0 -1 -1 0 0 -1 -1 0 -1 0 -1 0 -1 0 0 -1 0 0 -1 0 -1 1 -1 0 0 0 0 -1 0 0 0 1 0 0 -1 0 0 0 1 0 -1 0 0 1 0 +65533 65537 4294377459 4294508636 851528 885834 1062010 594824 111965 1015085 2840242 3272985 2789282 1183482 114440 479120 878265 479890 110269 20456 10709 18068 34373 15385 2197 3505 6597 4210 1405 680 925 562 185 121 29 82 277 250 305 1930 4498 3538 1017 9225 22032 19433 12125 6154 3649 7825 20660 29090 17505 4525 16970 28730 49250 62309 39965 20032 13841 37672 46708 19450 18013 37376 44980 41672 30505 18265 20077 5573 19098 76081 62917 26073 10834 5200 1780 530 85 82 74 461 306 130 50 40 53 148 157 356 450 349 1952 3161 1928 180 65 20 305 221 2 8 41 29 50 58 34 41 25 137 65 68 205 292 160 104 13 53 333 349 262155 4294901755 4294901760 65536 0 0 0 0 65535 0 0 0 0 0 65536 0 0 4294901760 4294967295 4294901760 0 0 65535 0 4294901760 65535 0 0 65535 65535 4294967295 65535 0 0 131071 0 4294836225 0 65535 4294967295 1 0 65535 4294901760 0 0 4294901760 4294901760 4294901760 1 4294901760 4294967295 4294901760 4294967295 65536 0 0 0 0 0 65535 4294967295 65536 65535 4294901760 65535 0 65535 65535 4294967295 4294967295 4294901760 4294967295 0 1 65535 65535 4294901760 0 65535 0 4294901761 65536 65535 0 4294901760 65535 131071 65535 65535 4294901760 4294901760 4294967295 0 1 65535 4294901760 0 0 65535 4294901760 65535 65536 4294967295 0 4294967295 0 4294967295 0 65535 0 4294901761 0 4294967295 0 4294967295 4294901760 4294901760 4294901760 0 65535 4294901760 4294901760 4294901761 0 0 65535 0 1 4294901760 0 65536 4294901760 0 1 +0 0 -1 1 -17 13 -31 8 201 139 -95 -404 -110 289 14 -76 -17 -107 -99 760 931 -1023 -1102 109 158 314 134 -116 16 500 261 -915 -584 632 352 -66 28 -56 -104 15 126 19 -48 -81 -39 12 7 37 2 -14 4 26 26 -22 -18 -15 -13 16 22 -9 -28 2 16 4 -4 0 2 3 -1 -7 2 7 -2 -10 0 7 -1 3 14 -27 -54 29 63 20 -3 -44 -39 -17 -12 49 23 -3 14 2 5 -8 -18 -32 35 66 -25 -140 -72 137 79 -8 46 -35 -97 -37 75 40 -85 -40 -8 64 139 17 -207 -53 316 71 -287 -197 45 201 95 -1 4 -134 -94 61 41 -2 -15 2 -20 46 93 -74 -145 91 201 -71 -186 -61 -4 168 186 -57 -115 -91 -10 46 9 21 3 -14 15 4 -21 -9 14 10 -2 -10 -5 -4 -5 10 10 -2 -2 -5 -3 1 3 1 -1 -6 -4 7 9 -6 -16 -5 -5 26 40 -17 -35 -11 10 4 -10 5 7 4 7 -10 -17 2 11 4 -3 -4 1 0 1 1 0 0 -1 0 4 1 -1 -3 2 0 -4 -2 0 2 3 -1 -3 0 -1 1 4 3 3 -7 -11 -1 7 9 4 -6 -8 -9 -4 13 7 -3 -1 -2 -1 0 0 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 -1 -1 0 0 0 0 0 -1 -1 0 0 0 0 0 -1 1 0 -1 0 0 0 -1 0 0 -1 0 0 -1 0 0 0 0 0 0 -1 0 -1 -1 0 0 0 -1 -2 0 -1 0 0 -1 -1 0 0 -1 0 0 -1 0 1 0 0 1 0 0 0 0 0 -1 0 0 -1 0 0 0 0 -1 0 -1 0 -1 0 0 0 -1 0 0 0 -1 0 -1 0 0 -1 -1 0 0 0 0 0 0 0 0 1 0 0 0 0 0 0 0 -1 0 0 0 0 0 0 -2 0 -1 -1 0 -1 0 0 -1 -1 0 0 -1 0 0 1 0 0 -1 0 -1 -1 -1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 0 0 -1 0 0 1 0 0 0 0 1 0 0 0 0 -1 0 1 0 1 0 0 0 0 0 -1 -1 0 -1 0 -1 -1 0 0 1 -1 -1 1 0 0 -1 0 -1 0 -1 0 1 0 0 0 -1 0 0 0 0 0 -1 0 0 0 0 -1 -1 -1 0 0 0 0 0 0 1 0 -1 -1 0 0 0 0 -1 0 0 -1 -1 1 1 -1 0 0 +0 131071 917487 589793 9109705 172241 95621 5972 11738 587401 1913290 1226285 123560 31412 250256 905346 740480 128260 3920 11041 16237 8865 1665 1418 200 692 1160 549 425 565 788 272 16 13 50 53 104 49 10 925 3757 4369 1945 1810 2545 538 200 89 1348 5581 20225 23953 6305 3341 10778 7225 8825 4160 19610 45658 104897 121178 42426 9026 17972 12557 1685 229 2516 14125 29306 45442 38317 28240 37845 21506 2216 522 205 241 522 296 104 41 125 104 29 10 10 37 65 117 281 701 1889 1346 116 125 65 149 293 137 25 1 2 0 1 17 10 4 20 4 10 9 2 25 58 122 130 52 145 185 4294770695 4294901759 65535 65536 0 0 0 0 0 0 0 0 0 0 4294967295 0 0 4294901760 65535 0 0 131071 4294901760 0 4294901760 0 65535 4294901760 0 0 0 65535 4294967295 0 4294901760 65534 65535 4294901760 65535 4294901760 0 65535 1 65536 0 0 4294901760 0 65535 0 4294901760 4294901760 4294901760 0 4294901760 0 4294901760 4294901760 0 4294967295 0 0 0 0 1 0 0 0 65535 0 0 4294836224 4294901760 65535 65535 4294901760 65535 4294901760 0 1 4294901760 4294901760 4294967295 0 0 0 0 0 0 0 65536 0 65535 65536 0 0 1 0 4294901760 65536 65536 0 0 4294901760 65535 65535 4294967295 0 4294901761 131071 0 65535 65535 65535 1 0 65535 0 0 65535 0 4294901760 4294967295 0 0 0 1 4294967295 0 0 65535 4294901760 131071 4294901761 0 +0 0 1 -1 6 -4 48 25 -95 -237 -138 352 252 -77 -76 -87 57 -255 -771 634 1327 47 -571 -651 16 270 -222 198 947 -525 -1469 -8 697 496 -33 -107 92 -62 -145 -86 69 153 16 -89 -17 23 10 -27 -20 32 23 -19 -6 5 3 -3 -15 -11 18 35 5 -40 -14 10 2 3 3 -6 -10 2 3 6 7 -5 -2 0 -11 -19 -6 45 35 -25 -22 -3 14 -17 -40 30 49 -9 -23 -11 9 10 -24 -9 71 28 -60 -102 -48 89 54 1 -5 34 58 -93 -110 19 28 78 24 -64 70 32 -272 -73 352 316 -43 -402 -102 99 -71 12 89 73 7 -47 -41 -31 36 31 -72 20 146 -27 -157 -86 -3 118 30 48 101 -92 -128 26 75 40 15 -53 -10 10 -45 -25 44 49 -8 -46 -19 15 9 2 -3 4 5 -2 -1 -6 -8 5 8 -2 -7 2 4 -2 0 -1 -4 2 0 -7 1 14 6 -24 -29 18 25 5 1 3 0 -25 -12 20 17 -6 -8 -7 -3 1 1 4 1 1 0 0 1 -1 -1 1 0 -4 -4 3 3 1 0 -2 -3 0 1 3 -1 -5 -1 2 -1 4 1 -7 -4 9 9 -4 -8 -6 -2 10 5 -5 0 1 0 -2 0 0 1 0 0 0 0 0 0 0 0 0 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 -1 -1 -1 -1 0 0 0 0 0 0 0 -1 0 -1 0 -1 0 0 0 0 -1 0 -1 -1 0 0 0 0 0 -1 0 0 0 0 0 0 -1 0 -1 0 1 0 0 -1 -1 0 0 -1 0 0 1 -1 0 0 0 0 -1 0 1 -1 -1 -1 0 0 0 0 0 0 -1 -1 -1 0 0 0 -1 0 -1 0 0 -1 -1 0 0 -1 -2 -1 -1 -1 -1 0 0 0 0 0 -1 0 0 0 0 -1 0 0 -1 0 -1 -1 0 -1 -1 -1 0 0 1 -1 0 0 -1 -1 -1 1 0 0 -1 0 0 -1 0 0 0 -1 -1 0 0 0 0 0 0 -1 -1 0 0 0 -1 0 1 0 0 0 -1 -1 0 -1 -1 -1 -1 0 -1 -1 0 0 1 -1 0 0 0 -1 0 0 0 0 0 -1 0 0 0 0 -1 -1 0 0 0 1 0 0 0 -1 0 0 -1 0 1 0 0 0 0 0 -1 0 -1 0 -1 -1 0 -1 0 0 -1 -1 0 0 0 0 -1 0 0 -1 0 0 0 -1 -1 0 -1 1 0 -1 -1 0 -1 -1 -1 -1 -1 0 0 0 0 0 0 +0 4294901761 4294705158 1638448 4279500705 142948 69433 13345 68274 996397 1763138 749842 73156 88488 1172434 2158025 731825 12538 12308 28421 28170 8177 818 829 1424 890 61 18 346 1549 1625 296 13 45 104 45 74 4 482 2061 1850 493 485 2500 2482 650 181 657 5825 14004 10225 2917 1181 12013 12461 6868 4672 5924 79313 223760 163453 20205 5185 13250 2258 2642 2257 5584 22045 32045 13933 3204 18665 17060 7225 3034 200 2650 4337 2180 586 85 25 29 37 89 68 53 20 1 20 49 197 612 1165 650 10 625 544 325 113 10 17 2 0 2 2 16 25 10 4 9 10 26 5 17 50 97 97 100 104 50 65536 4294836224 0 1 0 0 0 0 1 0 0 0 0 0 0 4294901760 4294967295 65535 0 0 0 65535 65535 65535 0 4294901760 4294901760 65535 0 0 65535 0 0 4294901760 4294901760 65536 0 4294967295 0 65535 65536 65535 0 4294901760 65536 4294967295 65535 0 0 4294901760 4294967295 0 4294901760 4294901760 0 4294967295 0 4294901759 4294967295 4294967295 0 0 4294901760 0 0 65535 4294901760 4294901760 65535 4294967295 65535 65536 65535 4294901760 4294967295 1 4294901760 0 65535 0 4294967295 0 0 0 4294967295 0 4294901760 65536 0 4294901760 65535 4294967295 4294967295 4294901760 65535 65536 65535 0 65535 0 0 65535 0 4294901760 65535 0 1 0 65535 4294901760 65536 0 0 4294901760 4294901760 4294901760 65535 65535 4294901760 65535 0 4294901760 0 65535 0 4294967295 4294901760 1 4294967295 4294901760 4294967295 4294967295 0 0 0 +0 0 0 -1 -3 2 -62 -34 67 208 130 -228 -103 0 -37 95 185 -472 -927 612 1064 131 -239 -271 -55 40 570 -538 -1697 259 1361 828 -85 -657 -196 17 169 1 -208 -47 58 44 -21 50 37 -52 -21 53 38 -55 -35 9 -6 9 28 -4 -32 -18 1 32 15 -8 0 -3 -5 -9 -4 10 6 -8 -3 5 -5 -4 13 14 1 -28 -18 4 3 20 -4 -21 2 49 36 -42 -27 -10 -18 17 38 -12 -75 -3 51 49 20 -15 -23 -32 39 41 -20 -83 -42 46 -5 22 87 -24 -187 8 224 165 82 -274 -277 -10 72 90 -71 92 168 -35 -68 -50 -27 -8 43 94 32 -141 -106 32 19 80 46 -29 -2 25 46 -81 -100 1 -7 107 132 -101 -164 75 160 -28 -75 -39 -12 16 7 16 4 -13 -4 14 9 -9 -4 -3 -3 6 4 -2 2 -1 -5 -6 -6 6 14 3 -11 -13 -6 13 20 9 -19 -19 32 17 -30 -33 -5 28 16 3 -4 -9 5 2 -5 -8 -1 7 5 -1 -5 -3 0 4 4 -4 -6 0 3 2 -1 0 1 -1 -1 1 1 -2 -4 1 7 4 -5 -9 -2 8 3 -1 6 -1 -8 -7 4 10 -2 -10 -2 5 -1 -1 3 3 0 -1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 -1 0 0 0 0 0 0 0 0 0 0 -1 0 -1 0 0 0 0 0 0 -1 -1 -1 1 0 -1 1 0 0 0 0 0 0 0 0 0 0 -1 -1 -1 0 0 -1 0 0 1 0 0 0 1 0 0 0 0 0 0 -1 -1 -1 0 0 1 -1 0 0 0 0 -1 -1 0 0 0 0 0 0 -1 -1 -1 -1 0 -1 0 -1 0 0 -1 0 0 0 0 -1 0 0 0 0 0 0 0 0 -1 0 0 0 -1 -1 -1 -1 -1 0 0 -2 0 0 0 0 0 0 -1 -1 -1 0 -1 0 -1 0 0 0 0 0 -1 1 0 0 0 0 0 0 0 -1 0 0 0 0 -1 0 0 0 -1 -1 0 -1 0 0 0 0 0 0 0 0 -1 0 0 0 0 0 0 0 0 -1 0 -1 -1 -1 0 -1 0 -1 0 0 0 0 0 0 0 -1 0 0 0 -1 0 0 0 0 0 0 0 -1 -1 0 1 -1 0 0 0 0 1 0 -1 0 0 1 -1 0 1 0 0 0 0 0 0 0 -1 0 0 0 0 0 -1 0 -1 0 -1 0 0 0 0 -1 0 0 0 0 1 -1 0 -1 0 1 0 0 0 +0 4294901760 196605 4292804546 13631555 68884 10609 10394 257009 1233873 1149257 130562 4625 614344 2946890 2537905 438874 38705 28562 45473 5300 2941 4073 3250 4469 1306 117 800 1348 1025 289 9 106 116 100 34 41 365 785 340 409 457 2405 3060 829 613 1588 5634 5002 625 1553 3202 7289 3880 509 8145 35033 77401 81800 76829 13284 13505 29449 7124 793 10685 20905 12260 6761 2957 629 8677 10001 11498 27625 32521 26384 7146 400 305 185 212 162 25 45 20 5 61 72 205 290 205 481 722 1313 1989 809 265 97 29 89 50 26 34 16 32 36 13 1 2 2 5 17 65 106 68 10 37 113 116 104 29 4294967295 196611 4294901760 0 0 0 0 0 0 0 65535 0 0 0 0 4294901760 4294901760 0 0 0 4294967295 131071 4294901760 1 0 0 0 0 4294901760 4294967295 0 65535 65536 0 65536 0 0 0 4294967295 65535 65536 65535 0 4294901760 65535 0 0 4294901760 4294967295 65535 65535 65535 4294901760 0 0 65535 0 0 0 4294901760 0 4294901760 4294967295 4294967295 0 65534 0 0 4294901760 4294967295 4294901760 4294901760 0 0 4294901760 1 0 0 0 65535 0 4294901760 0 4294901760 65535 65535 0 0 0 4294901760 0 0 0 0 65535 4294967295 65535 65535 65535 0 0 0 65535 0 65535 0 0 0 4294967295 65536 65535 0 65536 4294901760 0 4294901761 65536 0 0 0 4294901760 0 0 4294901760 4294901760 4294901760 0 0 65535 0 65536 65535 65535 1 0 +2 0 0 3 4 -6 66 24 -98 -168 -56 209 185 -75 -82 17 -53 -512 -521 919 820 -263 -252 -138 -78 -321 -434 1180 1550 -993 -1229 -160 148 165 49 217 33 -309 -193 236 148 5 0 -37 24 -1 -69 -48 19 81 16 -35 12 7 -31 -18 19 18 -11 -2 10 -1 0 3 2 -6 -4 -2 -3 2 -5 2 12 7 3 -16 -15 3 9 0 3 14 5 -44 -36 31 29 3 -12 1 31 -19 -54 -8 19 43 21 -9 14 -40 -70 26 58 31 3 -42 -9 26 0 -84 -98 105 124 21 -1 -36 -63 -61 97 159 2 -249 -144 118 80 -4 -50 76 124 -86 -111 -23 9 59 10 -19 20 8 -40 -32 19 69 -24 -60 98 62 -173 -113 134 225 67 -201 -114 14 12 20 -6 18 16 -4 -3 -1 1 -11 -8 12 8 -1 2 -5 -13 6 16 1 -10 -4 6 6 -2 -2 9 -5 -20 0 24 19 1 -39 -28 8 2 24 20 -13 -15 -2 3 8 4 -5 -1 1 0 -2 0 1 -1 -1 0 1 0 0 0 -2 -3 0 0 2 2 0 -2 0 1 -2 -1 3 -3 -5 -1 8 2 -4 0 2 4 -5 -8 -2 0 3 0 2 6 0 -2 -5 -2 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 0 0 0 0 0 0 1 -1 0 0 0 1 0 -1 -1 0 0 0 -1 0 -1 0 -1 0 0 0 0 0 0 1 0 0 0 0 0 -1 -1 0 0 0 0 0 0 0 0 0 0 1 0 0 -1 0 0 1 -1 0 1 0 1 0 0 -1 0 0 1 1 0 -1 0 0 0 0 0 0 1 0 0 0 0 0 -1 0 0 0 -1 0 0 -1 0 0 -1 0 0 0 1 0 0 0 -1 0 0 -1 0 1 0 1 0 0 0 -1 -1 0 0 0 -1 0 0 0 0 0 0 -1 0 -1 0 -1 0 0 0 0 0 0 0 -1 -1 1 -1 0 1 0 0 1 0 0 -1 0 1 -1 0 0 -1 -1 0 1 1 -1 -1 0 0 0 -1 0 0 -1 -1 -1 -1 0 -1 0 0 0 0 0 0 0 0 -1 0 -1 0 -1 0 -1 -1 -1 -1 0 -1 0 -1 -1 0 0 0 0 0 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 -1 -1 -1 0 0 0 -1 -1 -1 0 0 0 0 0 0 -1 1 0 -1 0 0 0 0 1 1 -1 0 -1 0 0 0 0 -1 0 -1 -1 0 -1 0 0 +2 196608 4294574084 1572930 4284022686 46817 39850 7013 264953 1116002 741569 82548 109125 1580756 3388549 1536041 49129 49490 96570 92945 21929 1369 577 7065 6922 1481 193 1285 685 125 101 9 40 20 13 29 193 265 234 81 205 1961 2257 850 145 1322 2980 2210 522 1796 5576 4325 1773 757 7056 20629 15817 1297 7690 34690 62005 34660 6416 8276 22772 12850 3562 461 464 2624 5122 4176 13448 42698 68581 44890 13192 544 360 272 10 122 208 65 29 205 257 116 72 8 106 400 937 1522 848 580 569 229 73 41 2 4 1 2 1 0 4 9 4 4 4 5 10 34 65 20 4 41 68 9 4 36 4294705150 131070 0 0 0 0 0 0 0 65536 0 0 0 4294901761 0 65536 4294901760 65535 0 65535 65535 65535 0 0 65536 0 0 4294901760 65535 0 0 0 0 65536 0 65535 65536 65535 1 1 4294901760 0 65537 4294901760 0 0 0 1 0 0 65535 0 65535 4294901760 0 65535 0 1 0 65535 4294901760 65536 65536 0 4294901760 65535 0 65535 0 0 4294901760 4294901760 4294901760 0 0 0 4294901760 131071 65535 1 65536 0 65535 4294901761 0 4294967295 65536 4294901761 65535 0 65535 4294901760 4294967295 65535 65535 0 0 0 4294901760 4294901760 4294901760 4294901760 4294967295 65535 65535 4294967295 0 0 0 4294901760 0 65535 4294901760 0 65535 4294967295 65535 0 4294967295 65535 0 0 4294901760 1 65535 0 65536 4294901761 4294901760 0 0 65535 4294967295 4294901760 0 +-2 0 2 -5 0 -6 -76 28 169 68 -93 -131 48 42 142 -73 -681 -304 490 1026 222 -736 -229 140 571 -149 -1118 -653 102 1283 683 -292 -136 -204 17 -185 -256 143 129 76 47 -53 -64 -12 57 40 -6 -49 -15 -12 -10 14 -27 1 48 38 -11 -57 -19 21 9 6 4 -5 -4 -2 2 0 -3 3 4 -7 -11 5 7 1 -6 -10 -3 26 28 -22 -20 -14 -12 11 11 -2 -21 9 25 14 12 -23 -28 8 31 -26 -63 31 53 10 -2 -15 -25 -4 47 33 -3 -65 -51 6 40 19 -103 24 214 46 -119 -179 -70 103 83 0 -127 1 130 97 17 -98 -60 0 49 20 -39 -43 -7 30 8 11 -24 -2 112 60 -31 -203 -155 105 90 52 -19 0 46 -5 -7 -9 0 -15 -5 1 -8 0 5 10 4 -5 -2 -7 -8 8 5 2 6 -5 -12 -5 5 4 -10 -6 -1 15 13 -8 -13 2 24 -4 -31 -11 14 24 4 -19 -11 5 7 -1 -4 -1 1 1 -1 0 -1 2 3 -2 -5 -1 5 0 -6 -1 6 2 -7 -3 2 3 2 -1 -3 -4 -4 3 6 3 -3 -4 3 0 -6 0 6 -4 -8 3 6 -3 -1 4 0 -5 0 2 -1 -1 0 0 0 0 0 0 0 0 0 0 1 0 0 0 0 0 0 0 0 0 0 1 0 1 0 0 -1 -1 0 -1 -1 -1 0 -1 0 0 -1 0 0 -1 0 -1 -1 -1 0 0 0 0 0 0 -1 0 0 -1 0 0 -1 -1 0 0 0 0 0 0 0 0 0 0 0 1 -1 0 0 0 0 1 0 -1 0 1 0 0 -1 -1 0 -1 -1 0 0 0 -1 0 0 0 0 0 -1 0 -1 -1 0 -1 0 -1 -1 -1 0 0 -1 0 0 0 0 0 0 -1 0 0 1 0 0 0 0 0 0 0 -1 0 0 0 -1 -1 0 0 -1 0 -1 0 0 0 0 -1 0 0 0 0 -2 0 0 0 0 -2 0 0 0 0 0 0 -1 -1 -1 -1 -1 0 -1 0 0 0 0 0 0 0 -1 0 0 0 0 0 0 0 0 -1 0 -1 -1 0 0 -1 -1 1 0 0 0 -1 0 0 -1 0 -1 0 0 -1 0 0 0 0 1 -1 -1 -1 -1 0 0 1 0 0 0 -1 -1 0 0 0 0 -1 0 -1 0 0 0 0 -1 -1 0 0 -1 0 0 0 0 -1 0 0 0 0 0 -1 -1 -1 -1 -1 0 1 0 1 0 0 0 -1 0 -1 -1 -1 -2 -1 0 0 +65534 4294639618 4294574080 1900468 4456617 25810 4068 25493 556177 1292776 590980 72041 348242 1676333 1656493 551753 60112 34514 85985 22417 5018 4240 4849 2437 369 296 730 3748 3370 802 117 41 20 4 18 65 146 50 136 685 1268 596 265 125 522 821 673 848 1637 4930 2909 229 641 3298 4234 2637 1961 11185 47912 46202 15509 6889 16130 26309 9893 3600 2801 3370 949 185 580 16144 42170 35050 10804 361 2141 130 225 26 64 125 41 53 128 29 61 169 41 136 226 233 173 592 1082 772 377 146 50 17 2 1 5 13 26 25 37 40 58 13 5 25 25 45 25 9 36 52 73 45 17 25 131072 4294967295 0 0 0 0 0 1 0 0 0 0 65536 65536 0 4294967295 4294901760 4294967295 4294901760 0 65535 4294901760 4294901760 4294967295 0 0 0 65535 4294901760 0 4294967295 0 0 0 0 0 65536 65535 0 65536 4294901760 65536 0 4294967295 4294901760 65535 0 65535 0 0 65535 4294967295 4294901760 4294901760 4294967295 0 65535 0 0 4294901760 0 1 0 0 0 65535 0 4294967295 0 65535 65535 0 4294901760 0 0 65534 0 4294836224 0 0 0 4294967295 4294967295 65535 65535 0 0 0 65535 0 0 0 4294901760 4294901760 65535 4294901760 131071 0 4294901760 0 65535 65535 4294901760 0 0 4294901761 4294967295 65535 65536 0 4294901760 65535 0 4294901760 4294901760 0 0 4294967295 0 65535 0 4294901760 0 0 4294901760 4294967295 4294967295 65536 65536 0 4294901760 4294901760 4294967295 4294967294 0 +6 0 8 5 -14 26 79 -190 -317 173 282 80 -30 -65 -121 -544 -774 1348 1684 -487 -556 -432 -767 27 1644 790 -1216 -1749 107 1011 -154 -112 -44 -127 -49 565 407 -327 -230 23 56 -38 -51 75 101 -22 -52 -54 -25 23 21 54 54 -80 -76 6 1 16 17 27 -1 -24 8 16 -6 -15 -5 7 7 10 10 -16 -12 0 0 2 16 5 -23 -33 -7 39 11 -1 14 -24 -36 17 21 0 -4 15 13 -42 -67 68 137 2 -61 -90 -32 21 0 48 34 -15 20 -20 -68 -43 49 129 51 -246 -269 169 145 61 122 59 -116 -219 22 232 115 -84 -31 -137 -127 114 148 -30 -104 -21 36 -15 -49 58 66 -16 11 -70 -180 9 153 145 -93 -110 98 126 24 -105 -42 -23 -23 24 14 14 10 -11 -13 -8 -1 8 0 1 3 0 2 -9 -16 4 19 12 -1 -24 -8 11 0 -11 -3 11 -3 -8 -11 5 11 31 23 -31 -10 3 -7 -12 6 8 -18 -9 12 11 -11 -4 8 1 -7 -3 0 5 7 1 -4 -3 8 -6 -16 0 3 8 6 -2 -1 -6 -7 3 12 1 -9 -9 -7 15 21 -3 -11 -15 0 3 -14 2 18 17 5 -26 -16 9 7 2 0 -1 0 0 0 0 0 0 0 0 0 0 0 0 1 0 0 0 0 0 0 0 -1 0 0 -1 -1 0 0 0 0 -1 0 0 -1 0 0 0 0 -1 0 0 0 1 -1 -1 -1 0 -1 -1 1 0 0 0 0 1 -1 -1 1 0 0 0 0 1 0 0 0 0 0 0 0 0 0 -1 0 0 0 -1 1 0 1 -1 0 0 -1 0 0 0 -1 -1 1 -1 -1 -1 -1 -1 0 -1 -1 0 -1 0 -1 0 0 0 -1 0 0 0 0 0 0 -1 -1 0 0 0 0 -1 0 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 -1 1 0 0 0 0 0 1 -1 0 -1 -1 1 0 0 -1 0 0 -1 0 0 0 0 0 0 0 -1 0 -1 0 0 -1 -1 -1 0 -1 -1 0 -1 0 1 0 0 0 -1 -1 0 0 -1 0 0 0 0 0 0 -1 0 -1 0 -1 0 0 0 0 -1 -1 1 -1 0 -1 0 0 0 0 0 1 1 -1 0 -1 0 0 0 -1 1 0 -1 0 -1 -1 -1 0 0 0 0 0 1 -1 0 0 0 -1 -1 -1 -2 0 -1 -1 1 0 -1 0 0 0 0 0 -1 -1 0 0 0 -1 1 1 -1 -1 -1 0 0 -1 0 0 +6 327688 1769458 4282515535 11402947 85924 5125 310577 2416180 3073025 495760 589018 3326836 4537657 1033570 36260 18065 321626 272578 53429 4580 8226 10685 5620 1154 3357 9316 5812 257 1018 577 320 261 74 149 356 144 4 281 1618 1570 122 772 1585 441 241 1933 9113 18773 11821 1465 2304 1381 800 6473 19042 63117 100922 24746 18365 61417 54308 20281 19730 29125 22804 11257 1521 5765 4612 5021 32481 44434 20749 25480 11601 2293 1105 392 221 233 65 1 9 85 272 505 577 185 121 130 73 146 1082 1490 109 193 100 405 265 137 65 58 25 50 25 100 256 73 40 37 58 145 162 274 450 346 9 200 613 701 337 131079 4294901760 0 0 0 0 0 0 1 0 0 0 65535 4294901760 65535 0 4294901760 0 65535 0 4294901760 0 65536 4294967295 65535 4294967295 1 0 65536 4294967295 1 0 65536 0 0 0 0 4294901760 0 4294901760 1 4294901761 0 65535 0 4294967295 4294901761 4294967295 4294967295 4294901760 65535 65535 65535 0 65535 0 0 4294901760 65535 0 4294901760 65536 0 0 0 0 0 0 0 131071 0 0 65536 65535 4294967295 1 4294901760 0 65535 0 0 0 65535 65535 4294901760 4294967295 4294901760 65535 65535 1 0 4294967295 0 65535 0 0 4294901760 4294901760 4294901760 0 0 4294967295 4294901761 4294901760 0 0 65536 4294901761 4294901760 0 4294901760 1 65535 4294967295 65535 0 0 4294901761 0 4294901760 4294967295 65534 4294967295 1 65535 0 0 4294967295 0 4294901760 65537 4294967295 65535 4294901760 0 +-3 0 -15 2 -28 -6 157 182 -82 -450 -170 328 164 -134 -621 424 1594 71 -1192 -1103 155 697 -706 406 2165 131 -1573 -1665 -203 1183 306 103 235 -208 -172 -109 -169 21 115 207 1 -73 79 19 -54 -51 37 13 -54 -13 18 29 23 9 7 -39 -25 3 -10 12 19 17 7 -27 -14 5 -1 0 7 6 -3 -13 2 10 -13 -11 7 21 20 -9 -17 -16 -7 -1 4 39 27 -43 -30 6 8 -7 -32 13 19 27 24 -8 16 -7 -34 -55 -8 76 39 -39 -27 3 42 -21 -77 -35 -17 52 -12 42 135 86 34 -278 -280 130 238 86 -119 -52 133 -60 -170 42 78 32 -6 19 85 -24 -77 -82 -34 117 89 -78 -24 72 35 -178 -179 115 131 49 -22 -31 17 -12 -10 7 2 -6 0 3 -1 -8 -3 8 0 -2 2 -4 -5 7 4 -3 3 3 0 -2 6 -3 -10 -9 -4 15 9 -3 0 -5 -12 -3 -2 23 24 -14 -14 -8 -6 -3 -7 18 23 -5 -10 -11 0 3 -5 0 4 1 1 -6 -11 1 8 10 4 -13 -8 6 4 -5 -4 0 0 2 3 0 -2 0 2 0 -2 3 7 -5 -8 -5 -2 1 -1 1 -3 3 5 2 2 -5 -3 -1 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 -1 0 1 0 0 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 -1 -1 -1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 -1 0 0 0 0 -1 0 0 -1 -1 0 1 0 0 0 0 0 0 0 -1 0 0 1 1 0 -1 0 1 1 -1 0 0 0 0 0 -1 0 0 0 0 0 -1 -1 -1 0 0 -1 -1 -1 -1 -1 0 -1 0 -1 -1 0 0 -1 0 0 0 0 1 0 0 0 0 -1 0 0 0 0 -1 1 -1 0 -1 0 0 -1 -1 0 0 0 0 0 0 0 0 -1 -1 0 0 0 -1 0 0 0 0 -1 -1 -1 -1 1 0 -1 0 -1 -1 0 0 -1 0 0 0 -1 0 -1 0 -1 0 0 0 0 -1 -1 0 0 0 0 0 0 0 -1 -1 -1 -1 0 1 0 0 -1 -1 -1 0 0 0 0 1 0 -1 0 0 -1 -1 -1 0 0 -1 0 0 -1 -1 -1 -1 0 0 -1 1 0 0 -1 0 0 1 0 1 1 -1 0 -1 -1 0 0 0 -1 0 -1 0 0 0 0 0 0 0 0 1 0 -1 0 +65533 196593 4294639588 11927709 4265541550 136484 44852 565417 2545877 2637473 509834 663272 4704386 5246554 1440698 104245 98489 41465 29002 56074 5330 6602 5517 1538 3085 1165 610 1570 634 244 650 778 221 1 85 178 104 290 490 481 545 50 1537 2578 936 113 1193 1090 640 305 4181 5840 3042 738 2205 7154 2993 1908 25621 78440 95300 64040 16865 21289 30664 7108 397 7801 12653 14845 14005 5760 32909 45266 19562 1445 433 149 40 9 65 73 4 20 74 25 18 4 45 181 241 90 25 153 533 772 260 45 373 554 221 9 25 17 37 122 164 185 100 41 16 4 9 4 4 13 74 89 5 2 18 29 4294639618 4294967293 65537 0 0 0 0 0 0 0 0 0 0 0 65535 1 65536 0 0 0 0 0 0 0 4294901760 4294967295 0 0 0 0 0 0 0 0 4294901760 0 0 65535 4294901760 65535 1 0 0 0 65535 65536 1 65535 65537 65535 0 0 65535 0 0 4294967295 65535 4294901760 4294967295 4294967295 4294901760 4294901760 65535 4294901760 0 0 1 0 4294901760 0 0 131071 65535 65535 4294901760 65535 0 0 0 4294901760 65535 0 65535 0 4294901760 4294967295 131071 4294901760 4294901760 65535 4294901760 0 4294901760 4294901760 4294901760 0 0 4294967295 0 0 0 4294901760 4294967295 65535 1 4294901760 4294967295 0 0 1 65535 4294901760 4294967295 0 65535 4294901760 4294967295 65535 4294901760 1 4294901760 0 1 65537 65535 4294967295 0 4294901760 4294901760 0 0 0 0 1 65535 +-6 0 1 1 -3 -57 -196 131 360 141 -12 -282 -388 158 1157 -22 -1518 -949 391 1258 -201 -593 530 1538 764 -2036 -992 530 -69 -22 29 243 265 55 -180 -256 -18 230 99 -28 21 -48 -38 -38 -37 19 7 49 39 -21 -6 -18 -25 7 11 9 7 -5 -6 -5 3 -1 -7 -3 -2 10 9 -5 -2 2 6 -10 -15 -3 -2 17 22 -5 -23 -16 14 39 19 -52 -49 20 28 15 -7 -7 8 7 13 -11 -27 -15 26 34 -17 -59 -22 38 9 16 22 -15 -15 5 19 6 16 -47 -51 38 45 -45 -59 -1 -28 72 52 -37 -33 109 188 -52 -119 -240 -125 279 134 -159 -74 122 74 -45 -55 -31 58 70 30 -92 -129 -57 34 145 -16 -60 47 71 31 -69 -53 -2 20 15 -11 -4 9 2 -1 -1 -5 -11 -2 18 11 -9 -10 -4 -2 6 6 3 2 -7 -9 2 15 1 -22 -6 15 15 1 -15 -11 15 22 -20 -22 11 4 -4 -2 7 6 5 1 -9 -1 3 0 -5 -7 2 6 3 -3 -6 -1 5 1 -4 -4 3 3 4 3 -8 -5 1 2 0 -2 0 4 1 -3 -5 -2 3 0 3 3 -3 -1 1 -1 -2 2 4 -1 -4 0 2 1 -1 0 0 0 0 0 0 1 0 0 0 0 0 0 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 -1 0 0 -1 0 0 0 0 0 0 -1 -1 0 0 -1 -1 -1 0 -1 0 -1 0 0 0 -1 0 0 0 0 0 -1 0 0 0 -1 0 -1 1 1 0 -1 1 -1 -1 0 -1 1 1 -1 -1 -1 0 0 0 0 0 -1 1 0 0 -1 -1 0 0 0 0 -1 0 0 -1 -1 0 0 0 1 0 0 0 0 0 0 0 0 -1 0 0 0 1 0 -1 0 1 0 -1 -1 0 -1 0 -1 0 0 -1 0 0 -1 0 0 0 0 0 -1 0 0 -1 0 0 0 -1 -1 -1 0 1 0 0 0 0 0 0 0 0 0 0 0 -1 -1 -1 -1 0 0 0 -1 -1 1 -1 0 0 -1 -1 0 -1 0 -1 0 -1 0 0 -1 0 -1 0 -1 0 0 -1 0 0 -1 0 0 0 -1 0 -1 0 0 -1 0 -1 0 0 0 0 0 0 0 0 -1 0 0 0 -1 0 0 0 0 1 0 0 0 -1 -1 -1 0 0 -1 0 0 0 0 0 0 0 0 -1 0 -1 0 1 0 -1 -1 -1 0 0 -1 0 -1 -1 0 0 +65530 65537 4291297277 8650556 9240936 79668 175508 1339133 3204925 1735445 392050 2646344 4728992 1264964 5245 59890 73250 97936 53224 10585 2745 2888 1730 2450 1962 360 674 202 74 61 10 58 104 106 8 136 234 293 509 785 1717 3065 2801 1009 98 113 290 954 1832 3770 1928 337 709 250 397 2465 4045 4050 3482 5968 4073 12970 38048 71761 93466 43237 20360 7501 3986 8264 9364 19890 22181 3856 7250 5722 2813 625 137 85 2 146 328 202 116 40 45 53 85 226 520 450 226 346 884 605 32 53 61 82 10 25 53 45 45 26 17 25 25 73 26 4 4 17 34 13 9 18 2 5 20 17 131072 4294901761 0 0 0 1 0 0 65536 0 0 0 0 0 0 0 0 0 0 0 65535 4294901760 0 0 0 4294967295 0 4294967295 65535 65535 65535 0 65535 0 0 65535 0 65535 131071 1 131071 4294967295 4294901760 65537 4294967295 65535 0 0 131071 0 4294967295 0 0 65535 4294901760 65535 0 1 0 0 0 4294901760 0 65536 4294901760 65536 4294901760 65535 65535 65535 4294901760 0 65535 0 0 65535 4294901760 0 4294901760 4294967295 65536 0 0 0 0 0 4294901760 4294967295 65535 0 4294967295 4294901761 0 4294967295 4294901760 4294901760 4294901760 0 65535 65535 65535 4294901760 0 65535 0 65535 65535 4294901760 4294901760 0 0 0 0 65535 0 65535 0 65536 0 4294901760 4294967295 0 65535 0 0 0 4294901760 4294901760 65536 4294901760 4294967295 0 65535 4294967295 0 +0 0 12 -14 36 25 -106 -176 44 272 -61 -181 319 -189 -1305 236 1476 914 -216 -1126 -203 827 1174 -1195 -2108 -45 879 1016 58 -341 -48 -91 -35 200 182 -101 -100 -68 -9 27 14 -1 -41 14 58 -6 -50 -16 13 26 19 -9 -23 -14 11 18 1 -8 -3 -5 -3 7 2 -4 -3 -1 -3 4 8 -2 -4 -2 1 -2 -4 5 7 0 -1 2 10 -5 -11 -16 -9 15 1 -11 -3 24 13 -23 -10 14 0 -16 -19 28 44 9 -13 -48 -20 31 14 -11 -7 13 11 -23 -40 31 81 18 -36 -102 -64 107 136 -40 -117 -60 40 55 -43 -10 76 -49 -224 35 200 205 80 -254 -188 123 174 -58 -107 0 0 5 66 47 -7 -123 -113 65 97 30 -57 -29 41 32 -16 -21 19 10 -11 -22 -11 15 14 4 -1 -8 -2 -1 3 2 -5 -6 0 10 3 -10 -11 9 19 2 -6 -21 -16 13 19 4 -18 -25 -11 35 33 -5 -15 -22 -4 18 11 -10 -6 -1 0 3 0 -1 -2 -1 -1 3 1 -2 -2 2 5 0 -3 -6 -2 8 7 -5 -7 -1 5 2 -3 -5 -1 5 0 -3 1 5 1 -9 -8 4 7 2 -1 0 2 -4 -5 1 2 0 0 0 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 -1 0 0 0 0 0 0 0 -1 -1 0 0 0 0 0 0 0 -1 0 0 -1 0 0 -1 1 0 0 -1 -1 0 -1 0 0 -1 0 0 0 0 -1 0 0 -1 1 0 0 0 0 0 0 -1 -1 1 0 -1 0 -1 0 0 1 0 0 0 -1 -1 0 0 0 0 0 0 -1 0 0 -1 0 0 -1 -1 0 0 0 0 0 -1 -1 1 0 0 0 0 -1 0 0 0 0 -1 0 1 -1 0 0 0 -1 0 -1 0 0 0 0 -1 1 0 1 0 0 0 0 0 0 0 -1 0 0 0 0 0 0 -1 0 0 0 -1 -1 0 0 -1 0 -1 0 0 -1 0 -1 -1 0 0 -1 0 -1 0 0 0 0 0 0 -1 0 0 0 0 -1 0 0 0 0 0 0 0 -1 0 0 1 0 0 0 -1 0 0 -1 -1 0 -1 0 -1 -1 0 0 0 0 -1 -1 -2 0 0 -1 0 0 0 -1 -1 0 0 -1 0 0 1 0 0 0 1 0 0 0 -1 -1 0 0 0 0 0 1 0 0 -1 0 -1 0 -1 -1 -1 0 0 -1 0 1 -1 0 -1 -1 -1 -1 0 0 +0 4294049804 1638436 4283498390 17825836 36482 137482 1758721 3013972 1314532 725138 2806301 4445689 1804897 119645 10585 41225 43325 14624 810 197 1877 3400 2756 845 442 725 445 65 34 58 20 10 25 68 20 5 41 49 5 125 377 306 122 585 698 296 256 1145 2017 2473 1361 317 218 650 2561 6885 11700 15545 20096 17289 4625 1949 8177 51401 82025 70916 50473 33640 11449 25 6565 15178 16994 10309 4090 2705 697 461 605 346 212 65 5 13 61 100 109 202 365 477 425 377 949 1346 1114 709 340 221 37 9 1 5 10 5 8 25 45 68 74 50 29 34 26 9 26 82 80 53 1 20 26 2 0 1 0 0 0 0 0 0 0 0 0 0 4294901761 0 0 0 4294901760 65535 0 0 0 65535 4294901760 0 131071 0 4294967295 4294901760 0 65535 0 4294901760 0 131071 0 0 0 4294967295 1 65535 65535 65536 0 4294901760 65535 0 0 4294901760 0 65535 4294901760 65535 0 0 4294967295 1 0 4294901760 0 0 65535 4294901761 0 4294901760 4294901760 0 0 131071 65536 0 0 0 4294901760 0 0 0 65535 0 4294967295 0 65535 65535 4294901760 4294901760 65535 4294901760 4294901760 0 0 0 65535 0 4294901760 0 0 0 4294901760 0 1 0 65535 4294901760 65535 65535 4294967295 0 0 4294967295 65534 4294901760 0 4294901760 65535 4294901760 0 1 0 1 0 4294967295 0 0 65536 0 65535 65535 4294967295 65535 4294901760 65536 65535 4294967295 4294967295 0 +8 0 -3 7 -40 150 455 -378 -970 59 547 471 166 563 1397 -1632 -2377 -33 1088 904 -294 -968 -928 418 150 1125 1148 18 2 -916 -460 220 146 -60 -97 115 122 -27 -17 -76 -91 31 76 52 33 -60 -59 -33 -5 36 12 2 12 -17 -39 -14 16 40 7 -24 -2 0 -9 3 6 3 1 -3 -2 -7 -11 8 11 1 -4 3 9 -12 -11 7 9 -11 -5 7 -9 -15 -10 27 21 9 25 -15 -31 -30 6 14 -19 9 12 -21 -22 21 9 14 14 -2 17 10 -1 -29 16 0 -29 -14 19 -10 -23 -22 -67 23 18 42 29 73 103 -26 46 -146 -181 -32 37 108 -3 -99 -167 70 140 253 196 -246 -186 -29 7 44 8 11 -37 -7 37 80 57 -47 -8 -42 -32 -11 -10 12 -5 9 16 5 -12 -12 4 17 10 -18 -20 2 8 4 -3 5 3 -4 5 7 -4 -15 -2 13 7 -9 -5 8 15 -10 -14 -7 -4 6 6 -3 -12 -6 -1 13 6 -5 -4 4 7 -2 -5 -2 2 1 0 0 -1 -1 -1 1 -1 0 2 4 1 -4 3 1 -1 -8 -2 6 2 -7 -8 1 0 10 9 -6 -5 2 10 -5 -8 -8 -6 3 4 5 2 -2 0 -1 -1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 -1 -1 -1 -1 0 0 0 -1 0 1 0 0 0 0 0 -1 0 0 0 0 0 -1 0 -1 0 0 0 -1 0 0 0 0 0 0 0 1 0 -1 1 1 -1 0 0 -1 0 0 -1 -1 0 -1 0 0 -1 0 0 0 -1 0 1 0 0 -1 -1 -1 1 -1 1 0 -1 -1 0 0 -1 0 0 0 0 -1 -1 0 1 0 -1 0 0 0 0 0 -1 0 -1 0 0 0 -1 0 0 0 0 0 -1 0 0 -1 0 0 -1 0 -1 0 -1 0 -1 0 -1 1 0 0 0 0 -1 0 -1 0 -1 0 0 1 0 0 -1 -1 0 1 -1 0 0 0 -1 0 -1 0 0 0 0 -1 0 -1 -1 0 0 0 -1 0 0 -1 -1 -1 0 -1 0 0 0 -1 0 0 0 0 0 -1 0 0 0 1 0 -1 0 -1 -1 -1 -1 0 -1 -1 0 0 0 0 0 -1 0 -1 0 0 0 -1 -1 0 0 -1 0 0 1 1 1 0 0 0 1 0 -1 0 -1 -1 0 -1 -1 0 0 0 1 0 0 -1 0 0 0 0 -1 0 0 -1 0 0 -1 0 0 -1 -1 0 0 +8 524285 9895896 4270195143 3931190 521050 344525 4615033 5651218 2000960 1023460 1035908 1288125 1318228 839060 260000 24916 22634 15613 6065 9242 8480 4689 4570 1321 148 433 1717 1856 625 4 90 45 10 53 185 122 25 225 170 202 74 306 829 522 850 1861 232 442 585 925 277 200 389 842 256 1037 461 1013 5018 2088 6170 11285 23432 33785 13033 9810 32789 83609 98932 35437 1985 185 1418 7769 5458 1828 1145 244 106 281 288 305 424 404 80 34 25 74 241 173 130 89 325 245 52 45 180 170 61 32 53 29 5 0 2 2 1 20 17 10 65 40 53 65 100 117 29 125 128 45 41 4294836226 4294901760 65535 0 0 0 0 0 0 0 0 0 0 0 4294901760 4294967295 65535 0 65535 1 0 0 65535 0 0 65535 65535 0 65535 0 0 0 1 131071 4294901761 0 65535 4294901760 65535 65535 4294901760 0 4294901760 65536 0 4294967295 131071 131071 4294901760 65535 4294901760 0 0 4294967295 65536 4294901760 0 0 4294901760 4294901760 0 4294901760 0 0 4294901760 0 65535 4294901760 4294901760 4294901760 4294901760 4294901760 1 0 4294901760 4294901760 4294901760 0 1 4294901760 65535 4294901761 0 4294901760 4294901760 0 0 65535 4294967295 0 4294901760 0 4294967295 65535 65535 0 65535 0 0 65535 0 1 65535 4294967295 4294967295 4294901760 65535 0 0 65535 65535 0 4294967295 0 65535 65536 65537 0 65536 4294901760 4294901760 65535 4294967295 0 65536 0 65535 0 4294901760 0 65535 4294901760 0 4294967295 0 +-14 0 -37 96 -275 -150 838 984 -114 -1874 -543 625 -1104 -126 684 1996 1613 -954 -876 -976 230 562 -744 -674 495 859 -274 -516 -68 335 454 111 -184 -365 6 8 -188 53 126 98 1 -95 -80 -2 63 98 -16 -104 -14 64 8 -32 -21 32 21 0 25 7 -16 -46 -18 27 6 5 11 -6 -16 4 21 -9 -31 8 14 3 15 12 -5 -20 2 -16 -40 30 40 3 2 1 -9 -38 -4 40 -12 -34 16 75 60 -102 -133 20 80 85 17 -90 -49 41 32 2 15 -13 -27 -39 -44 60 57 15 54 -54 -178 5 245 107 -171 -217 31 193 20 -69 70 -23 -147 -22 104 40 -80 -18 86 60 -29 -201 -152 193 108 -4 32 26 29 -61 -59 -31 -10 69 39 -24 -5 11 -4 -34 -3 35 -6 -35 5 45 9 -31 -5 8 4 -4 -1 -8 -13 -5 -7 19 24 2 -2 -19 -18 -4 11 16 -19 -11 21 20 -7 -12 14 -2 -19 -1 9 3 4 0 -7 2 11 -12 -17 5 7 -2 -11 1 6 7 6 -8 -12 2 8 0 -8 2 7 3 3 -5 -9 -5 4 4 -14 -4 16 13 -6 -19 -5 13 2 -3 4 8 5 -15 -12 2 5 4 0 -2 -1 -2 -1 3 1 -1 -1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 0 0 0 0 -1 0 0 0 0 1 0 0 -1 0 0 0 0 0 -1 -1 0 0 -1 -1 0 0 0 0 0 0 1 0 -1 -1 0 1 -1 0 -1 -1 0 0 -1 -1 -1 0 -1 0 0 -1 0 0 0 0 0 -1 -1 0 -1 -1 0 -1 0 0 -1 0 -1 0 0 0 -1 0 0 1 0 -1 0 -1 -1 0 0 1 0 -1 -1 0 0 -1 0 -1 0 -1 0 -1 0 1 0 0 -1 -1 -1 0 1 1 -1 0 0 0 0 0 0 -1 0 -1 0 -1 1 0 -1 0 0 0 0 1 -1 -1 0 0 0 0 1 0 -1 1 -1 0 0 0 0 0 1 -2 -1 0 0 1 0 0 0 0 0 0 0 0 0 0 -1 0 0 0 0 0 0 -1 0 0 0 0 0 0 -1 0 0 0 1 0 0 -1 1 1 0 0 0 0 1 0 0 -1 0 0 0 1 -1 0 0 0 0 -1 1 0 -1 -1 0 0 0 0 0 0 0 1 1 -1 0 0 -1 -1 0 0 -1 -1 0 -1 0 0 -1 -1 0 0 0 0 0 -1 0 -1 -1 0 -1 1 0 -1 -1 0 0 +65522 6356955 4285202157 64488262 4172218254 685474 1234692 4451872 3511885 1719952 368744 1007812 982906 341332 116849 218437 167081 100 38153 25480 9026 6404 13573 11072 4292 1088 1465 441 674 2372 1053 61 157 272 522 1025 205 369 425 260 2500 1609 5 1525 1616 1300 5881 14004 18089 13625 8389 4082 1028 394 2250 5536 3474 5832 31709 71474 76330 38210 5161 5429 22093 12416 6724 10996 41242 60353 11680 1700 4562 4442 4861 2097 146 1172 1234 1261 2050 1042 89 32 65 194 410 580 365 340 377 482 841 193 200 362 90 16 53 265 314 53 122 85 100 148 64 68 58 34 106 32 212 425 397 194 13 80 250 148 41 4 4294901759 262143 4294901761 65535 0 0 0 0 0 0 0 0 0 1 0 4294901760 0 0 1 4294901760 0 0 4294901760 65535 4294901760 65535 0 0 65536 4294901760 65535 4294901761 4294901760 65535 4294901760 4294967295 4294901760 0 65535 0 0 4294967295 4294901760 65535 65535 4294901760 4294901760 0 4294901760 0 1 65535 4294967295 0 1 4294967295 0 65535 65535 65535 65535 1 4294901760 4294967295 65536 4294901761 0 0 0 65535 65535 131071 4294901760 0 0 4294901761 65535 0 65536 4294901760 4294901761 0 0 65536 4294967294 0 1 0 0 0 0 4294901760 0 0 0 65535 0 0 4294901760 0 65536 0 131071 1 0 65536 0 65535 0 4294901761 0 0 131071 4294901760 65535 0 0 0 65537 65535 4294901760 65535 4294901760 65535 65535 4294901760 65535 0 0 65535 4294967295 4294901760 1 4294967295 0 +12 0 8 -68 -111 -357 -724 1216 1908 -521 -1074 -547 195 -225 -657 609 607 -247 -401 -10 -123 198 177 646 968 -780 -1074 -205 289 568 274 -476 -460 136 170 68 -8 27 34 3 15 -71 -73 40 54 54 42 -80 -64 20 28 -31 -37 39 11 -19 18 34 1 -44 -24 16 12 3 -5 5 17 -2 -9 -26 -23 25 21 13 24 -28 -55 1 36 31 1 -20 12 -13 -41 -3 13 24 21 18 9 -45 -7 12 -10 -29 -20 21 7 6 -4 -9 -18 35 54 -16 -35 -5 27 2 -16 -20 -25 26 65 43 43 -117 -122 -36 -42 113 46 4 65 62 29 -136 -91 0 -29 53 71 62 -46 -133 17 221 122 -203 -120 29 3 11 28 9 -33 1 23 -25 -38 52 50 -25 -12 10 13 -41 -52 21 31 26 3 -12 2 -7 -11 -1 11 4 -17 -11 16 19 -19 -22 22 31 -11 -42 -18 38 33 -5 -14 -9 9 -9 -32 1 23 21 3 -16 -15 -5 9 27 10 -31 -13 13 2 -11 -3 13 4 -6 -2 -2 -5 6 9 2 1 -5 -4 -1 0 2 0 5 7 -8 -6 -4 -7 2 0 7 3 0 1 2 5 -6 -5 -1 -1 1 1 -3 -5 8 6 -6 -3 3 0 -1 0 1 0 -1 0 0 0 0 0 0 0 1 0 0 0 1 0 0 0 0 0 0 0 0 0 0 0 -1 -1 0 -1 0 0 0 0 0 0 0 -1 0 -1 0 0 0 0 0 0 -1 -1 0 0 0 0 -1 0 0 -1 -1 0 0 0 0 0 -1 0 1 0 0 0 0 0 0 -1 0 -1 -1 -1 -1 -1 -1 0 1 0 0 -1 0 1 0 0 0 0 0 0 -1 0 0 -1 0 0 0 -1 0 0 0 -1 0 0 0 1 -1 0 0 0 0 0 -1 0 0 -1 0 1 0 0 0 0 0 0 -1 -1 -1 -1 0 1 0 0 0 0 0 0 -1 0 0 1 -1 0 0 -1 0 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 -1 0 0 -1 -1 0 0 -1 0 0 0 0 0 0 1 0 -1 0 0 0 0 0 -1 0 0 0 0 0 0 0 0 1 -1 0 0 -1 -1 -1 0 -2 0 0 0 0 0 -1 0 -1 -1 1 0 0 -1 0 -1 -1 0 -1 0 -1 0 -1 -1 0 -1 0 0 0 0 -1 0 0 0 -1 0 0 0 0 0 -1 1 0 -1 -1 0 -1 -1 0 0 0 -1 -1 1 1 -1 -1 0 0 -1 0 0 +12 4290510856 4271636369 79756588 4260824948 1452685 88650 802530 429458 160901 54333 448645 1545424 1195501 406145 301652 230096 33524 793 1165 5266 6929 5832 8164 4496 1745 2890 482 1480 1937 832 153 50 293 757 1154 610 1360 3026 2257 401 313 1690 745 765 2106 193 941 841 85 97 1549 3172 1250 733 656 1301 6074 15538 16180 14533 2132 8069 19337 8281 3650 8885 19805 49130 56093 15241 130 865 1090 1154 4148 3125 244 1850 3145 1637 153 53 122 137 410 617 845 1445 1885 1768 1114 277 162 1025 970 265 250 810 1061 338 125 178 52 8 61 85 26 17 4 25 113 52 53 49 9 5 61 26 2 10 89 4294574086 262141 4294901760 65536 4294901760 0 0 0 65536 0 65536 0 0 0 0 0 4294901760 65535 65535 0 0 0 65535 65535 0 0 4294901760 65535 0 4294901760 0 4294967295 0 0 4294901760 65536 0 0 0 65535 4294967295 4294967295 4294967295 65536 0 65535 1 0 0 4294901760 0 65535 0 65535 0 65535 0 4294901761 0 0 4294901760 0 65535 1 0 0 4294901760 4294967295 65535 1 0 0 4294901760 0 4294901761 0 65535 1 0 0 0 0 0 0 4294901760 0 4294967295 0 65535 0 0 65536 4294901760 0 0 4294901760 0 0 0 0 4294901761 0 4294967295 65535 65534 0 0 65535 4294967295 1 4294901760 4294901760 65535 65535 65535 4294967295 4294901760 0 0 65535 0 65535 0 0 131071 4294901760 65535 4294967295 0 4294901760 131071 4294901761 65535 4294901760 0 +-18 0 12 -34 376 -126 -1303 -409 932 1532 56 -1100 528 704 -824 -1697 120 1884 80 -1055 -46 586 375 -189 -56 -277 -589 -74 599 447 -446 -440 118 286 19 75 90 -144 -72 53 22 14 32 -49 -47 36 29 -1 12 -11 16 -28 -81 -7 44 59 21 -28 -20 -11 11 10 -9 -15 -6 13 11 -4 -10 -2 -5 7 21 2 -13 -22 -16 15 6 8 14 2 -6 1 15 -31 -44 25 38 16 8 -30 -20 0 -3 4 1 5 6 3 -3 -20 -9 13 -12 12 37 11 -12 -34 -11 26 8 -27 -15 44 27 -38 -15 16 -19 6 77 11 -54 -87 -41 77 55 -7 -42 -14 66 70 22 -147 -109 27 -9 57 31 28 69 -14 -73 -60 8 61 31 9 14 -43 -31 -15 -7 27 -7 0 21 8 -6 -4 8 -6 -10 -3 6 2 -14 -6 6 15 3 1 20 -3 -9 -27 -18 16 8 -4 -8 4 1 4 8 6 4 -7 2 -5 -14 -4 6 16 12 -6 -8 -12 -5 -2 -9 12 14 1 -2 -6 -1 0 -3 1 4 0 -3 -3 1 3 0 0 4 -3 -8 -1 3 3 0 -2 0 1 0 -2 1 2 -4 -5 1 12 6 -12 -15 4 8 6 1 -6 -1 -1 -2 0 1 0 0 0 -1 0 0 0 0 0 0 0 0 0 0 0 0 0 1 0 0 0 0 0 0 0 0 0 -1 0 0 0 0 0 -1 0 1 0 0 0 -1 0 -1 0 0 0 -1 0 0 -1 0 0 0 -1 0 1 -1 -1 0 0 0 0 -1 -1 0 0 -1 1 -1 0 0 0 0 -1 0 -1 0 0 -1 0 0 -1 -1 -1 1 -1 -1 -1 0 -1 -1 0 0 -1 -1 -1 0 -1 0 -1 -1 0 0 0 0 -1 0 0 1 0 0 -1 0 0 0 0 0 0 -1 0 0 1 0 -1 0 0 0 0 0 0 0 0 0 -1 0 0 0 1 0 0 0 1 0 -2 -1 0 -1 0 0 0 0 0 0 -1 -1 0 0 0 -1 1 0 0 -1 0 0 0 0 -1 0 -1 0 -1 -1 -1 0 0 0 0 -1 0 -1 0 0 1 -1 -1 0 0 -1 -1 0 0 -1 0 1 -1 0 1 1 0 0 0 -2 0 0 0 1 0 0 1 0 0 0 0 -1 0 -1 0 -1 0 -1 1 0 0 0 -1 -1 0 0 0 0 0 0 0 1 0 0 0 0 0 -1 1 0 -1 0 -1 0 -1 1 1 0 -1 -1 0 0 0 0 0 -1 0 0 0 0 0 +65518 4292739084 4286710136 4268227305 100402084 1213136 774400 3558785 3563856 1119425 345512 176346 79865 352397 558610 392516 95720 5986 28836 7993 680 3425 3505 842 265 1040 6610 5417 1225 521 221 306 205 137 104 74 445 653 481 100 200 37 1186 2561 1700 964 400 25 26 45 409 250 288 1490 1300 797 793 2161 2173 481 397 6050 10485 7610 3074 1960 9256 22093 12610 3330 1745 4957 8929 3785 1042 2045 1186 778 49 505 52 100 109 40 232 261 10 409 810 580 80 80 17 100 65 29 212 292 180 208 29 225 197 40 1 10 16 18 10 0 25 65 18 4 1 4 5 41 145 180 241 100 4294574081 4294967295 65534 1 0 65535 0 0 0 0 0 0 1 0 0 0 0 65535 0 0 65535 1 0 65535 65535 0 65535 4294901760 0 4294901760 65536 4294967295 0 0 4294967295 0 131071 65535 0 4294901760 4294901760 0 65535 4294901760 4294967295 4294901761 4294967295 4294901760 65535 4294901760 4294967295 4294901760 4294901760 65535 0 4294901760 0 1 4294901760 0 0 0 65535 65536 4294901760 0 0 0 0 4294901760 0 65536 0 65536 4294836224 65535 65535 0 0 4294901760 65535 0 131071 0 65535 0 4294901760 4294901760 4294901760 4294967295 0 0 65535 65535 65536 4294967295 0 4294967295 0 65535 4294901761 65536 1 0 65534 0 1 65536 0 0 65535 65535 65535 131071 0 4294901760 65535 0 0 0 1 0 0 131071 4294901760 4294901760 4294901760 65537 4294901760 65535 0 0 65535 0 0 +19 0 -2 -9 329 279 -179 -1363 -941 1343 795 -237 -766 434 2228 411 -1536 -2164 -201 1380 243 -43 -111 -224 181 522 324 -692 -862 38 470 608 30 -270 72 -141 -159 92 43 41 104 -63 -163 -10 114 51 -7 -13 -94 -24 181 14 -174 12 99 -93 -59 152 34 -155 -13 119 -12 -67 24 17 -22 33 15 -80 13 118 -64 -155 93 170 -77 -158 21 168 47 -229 -91 290 73 -289 -8 266 -21 -262 21 249 -32 -227 43 177 -52 -106 45 39 -23 -2 -21 -2 75 2 -73 -11 28 4 26 -18 -116 42 167 -49 -162 115 197 -209 -264 207 271 -152 -262 162 280 -180 -260 174 223 -150 -180 119 204 -117 -257 52 232 11 -189 -35 124 28 -102 33 142 -44 -144 -15 98 59 -50 -78 -1 70 36 -28 -31 -17 12 27 -15 -39 17 71 -20 -102 35 134 -49 -173 54 185 -72 -185 80 179 -86 -158 89 138 -72 -123 48 92 -32 -65 12 44 14 -26 -29 11 28 -10 -26 20 26 -26 -34 29 40 -14 -34 -5 32 13 -35 -22 39 24 -44 -20 47 16 -46 -10 41 -1 -37 6 21 -5 -3 4 -9 3 15 -13 -13 22 10 -32 -17 35 16 -29 -14 28 19 -23 -19 14 15 -14 -17 9 9 -5 -3 1 1 0 0 1 0 0 0 0 0 0 0 0 0 0 0 -1 -1 -1 -1 0 0 1 0 -1 0 0 0 0 -2 0 0 -1 0 0 0 -1 0 0 -1 0 0 0 0 0 0 0 0 -1 0 -1 0 0 -1 -1 0 0 0 -1 -1 -1 0 -1 0 1 0 -1 -1 0 0 0 -1 0 0 0 0 0 0 -1 -1 0 0 0 0 -1 -1 -1 0 0 0 0 -1 0 0 0 -1 -1 0 0 0 1 0 -1 -1 0 0 1 -1 -1 0 0 1 -1 0 0 -1 0 0 0 -1 0 0 0 -1 -1 0 -1 0 1 0 0 0 -1 -1 0 0 0 -1 0 0 0 0 0 0 -1 0 -1 0 0 0 0 -1 0 0 1 0 0 0 0 -1 -1 0 0 0 0 0 1 0 -1 0 0 1 1 -1 0 0 -1 0 -1 -1 -1 -1 -1 0 1 0 -1 0 -1 -1 -1 0 -1 0 0 1 -1 1 0 0 1 0 0 -1 0 1 0 0 -1 0 -1 0 0 0 -1 0 0 0 -1 0 -1 1 0 0 -1 0 -1 0 -1 0 0 0 -1 0 0 0 0 -1 -1 0 0 -1 0 0 0 -1 0 0 0 0 0 0 -1 0 1 0 0 1 0 -1 0 1 0 +19 4294443006 18284873 4205707085 88079443 688194 775112 5132905 7042192 1944801 60898 62497 305245 583840 744488 590564 73800 25065 33745 3530 14785 26669 15597 218 9412 32957 30420 18450 26585 25181 14330 4633 865 1573 6625 14093 28121 37549 30893 28665 54650 92381 88850 70820 69085 62442 52553 33178 13940 3546 533 445 5629 5450 800 1000 15220 30290 39469 82490 112545 96545 94888 110800 97876 72229 46561 55305 68753 53945 36946 16160 11493 22100 20961 13085 8584 4901 2080 1250 873 1746 5330 10804 19181 32330 37141 39409 38441 32360 26965 20313 10768 5249 2080 872 962 884 1076 1352 1997 1796 1181 1193 1709 2097 2336 2465 2216 1682 1405 466 25 90 394 653 1124 1514 1097 980 890 557 4294049807 655343 4294639625 131069 1 65536 0 0 0 0 0 4294901760 4294967295 65535 65536 4294901760 0 0 65534 4294901760 0 4294901760 0 65535 0 0 0 4294901760 4294901760 0 4294967295 0 4294901760 4294967295 4294901760 65536 4294901760 65535 0 65535 0 0 4294901760 65535 0 4294901760 4294967295 0 0 65535 0 4294967295 0 65536 4294901760 65535 65536 4294967295 0 4294901761 0 65535 0 65535 0 4294967295 4294901760 65536 0 4294901760 65535 0 65535 0 0 4294901760 4294901760 0 0 65535 65536 0 0 4294967295 0 0 65536 4294901760 0 65537 65535 4294901760 4294901760 4294967295 4294967295 65536 4294901760 4294901760 4294967295 4294901760 0 4294901761 1 65536 0 65535 1 4294901760 4294901760 0 4294901760 0 4294901760 4294901760 1 4294901760 4294901760 4294901760 0 4294901760 0 0 4294967295 0 65535 0 65535 0 0 4294901760 65536 0 1 65535 1 +-30 0 25 61 -62 259 954 -565 -1372 -394 385 851 320 -1021 -1664 1138 1870 334 -107 -698 -512 -28 145 -91 121 642 -73 -877 38 777 300 -665 -639 52 326 432 -2 -382 -98 211 97 -105 -205 116 464 -48 -594 -144 437 294 -79 -201 -62 -125 -174 202 265 100 -47 -240 -40 93 -83 -64 109 216 5 -336 -114 299 12 -209 141 363 51 -523 -286 314 308 -118 -445 -39 331 403 161 -358 -206 -12 17 -99 -238 261 329 5 -44 -94 -32 -114 -113 150 156 -19 -81 -110 -55 157 108 -75 -23 -8 -102 6 139 100 60 -122 -152 -185 -154 277 183 43 40 -8 193 -102 -281 -249 -105 260 106 87 66 -34 72 -12 -4 -134 -144 -6 3 58 -18 132 147 -94 -37 -4 -42 -71 2 58 -46 -14 50 28 -7 1 -19 -24 30 23 -25 11 61 6 13 -91 -113 -3 -41 86 105 109 110 -124 -74 -100 -123 21 1 126 105 1 0 -42 -2 -53 -66 44 77 -8 -79 -16 38 30 3 9 18 -30 -18 -8 -14 0 7 16 -16 -11 5 40 56 -32 -46 -16 0 -20 -44 58 77 -6 -30 -21 7 -25 -38 57 69 -48 -72 8 32 13 -14 14 41 -13 -25 -24 -10 10 0 4 -5 12 23 1 -7 -18 -6 3 0 3 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 -1 0 0 0 0 -1 0 -1 -1 1 0 0 1 0 0 0 0 0 0 -1 -1 -1 -1 0 -1 1 0 0 -1 0 0 0 0 0 0 0 -1 -1 0 -1 0 0 0 -2 0 0 0 0 0 0 0 0 0 0 0 0 -1 -1 0 -1 0 1 0 0 -1 0 0 -1 0 1 0 1 0 -2 -1 0 0 0 0 0 0 -1 0 0 0 -1 0 -1 -1 0 0 0 0 -1 -1 0 0 0 -1 0 -1 0 0 0 0 -1 -1 -1 0 0 0 0 0 -1 0 0 0 0 0 0 0 -1 0 0 1 -1 0 -1 -1 -1 0 -1 0 -1 0 -1 0 0 0 1 0 0 1 -1 -1 0 0 0 -1 -1 0 0 0 -2 -1 -1 -1 0 0 -1 0 -1 0 -1 -1 0 0 -1 -1 -1 -1 0 0 -1 -1 0 0 -1 0 0 0 -1 0 0 -1 -1 -1 0 -1 -1 -1 0 0 0 -1 0 0 0 -1 -1 0 0 0 -1 -1 0 0 0 0 0 -1 0 0 0 0 0 0 0 -1 0 0 -1 0 -1 -1 0 -1 -1 1 -1 0 -1 -1 0 -1 0 0 1 0 -1 -1 0 0 0 +65506 3997721 17039298 4257940410 4269210276 872426 1144841 4063940 3608456 498653 262928 29306 426805 774458 605173 532225 411025 292900 145928 54125 20434 55481 217600 373572 277405 46642 19469 71080 80225 59809 10249 10985 58537 112921 102397 43825 151650 276130 180392 108788 199546 271970 154085 42580 10090 124765 108266 10772 14020 35269 24697 18661 27674 17289 593 10440 29321 18484 57329 100445 35338 1664 47653 140962 78625 18805 5512 5328 17972 20772 3373 17748 30445 1385 6805 3368 2312 3284 50 937 1429 746 3757 8450 12778 9077 22906 27476 15476 15570 15877 11026 1764 2813 6292 5993 6497 2344 90 1224 388 196 305 377 1625 4160 2372 400 5300 5965 1341 674 4693 7065 5248 1193 392 1850 1201 200 16 169 65559 4293853177 262138 196608 0 0 0 0 0 0 0 0 0 65535 0 4294901760 4294901760 131071 0 1 0 0 4294901760 4294967295 65535 131071 0 65535 0 0 0 4294967295 4294901760 0 4294836224 0 0 0 0 0 0 4294967295 4294901760 65536 0 65535 4294901760 65536 65536 4294836224 65535 0 0 4294901760 0 4294901760 4294901760 65535 0 4294901760 65535 0 65535 65535 0 4294901760 4294967295 0 0 4294901760 0 0 0 4294901760 0 4294901761 4294901760 4294967295 4294901760 4294901760 4294901760 0 65536 0 4294901761 65535 0 4294967295 0 4294836224 4294967295 65535 4294901760 4294901760 4294901760 65535 4294901760 4294967295 65535 4294901760 65535 4294901760 0 4294901760 0 4294967295 65535 4294967295 65535 0 65535 0 4294967295 0 4294901760 65535 0 0 65535 0 0 0 65535 4294901760 4294901760 65535 4294967295 4294901761 4294901760 65535 65535 65536 4294901760 65535 0 +-1 0 1 31 -428 29 1029 610 -681 -1154 316 627 -170 332 612 -1738 -1673 1695 1176 -405 163 -294 -1387 409 1778 452 -642 -901 -37 161 -196 115 197 -24 -189 -12 10 113 103 26 -26 -23 -38 38 248 180 83 -528 -475 141 172 247 83 -118 -39 21 38 14 17 -99 -64 25 -53 -18 -16 91 57 90 176 -25 4 -314 -306 124 38 29 57 201 -4 -199 27 287 190 -279 -223 79 205 16 -110 -199 -87 124 -45 30 136 84 4 -90 -17 -10 -45 -31 57 92 -33 -136 -92 85 131 92 -26 -141 -17 32 -62 39 121 53 -2 -160 -109 100 124 -56 -141 -25 5 73 72 60 -12 -119 -13 80 -4 -22 46 -6 -80 17 156 17 -136 -148 -36 148 88 3 4 -43 -18 -42 -62 45 63 49 36 -98 -135 48 150 50 -83 -97 26 68 6 -24 -22 -27 1 33 -16 7 48 -4 -35 -24 19 15 -38 -9 48 -3 -79 30 113 3 -73 -37 68 25 -87 -85 17 140 44 -95 -42 45 39 -27 -51 9 45 23 -11 -39 -18 35 56 -29 -70 -19 16 29 -5 15 25 -8 14 -4 -19 -47 -39 35 16 21 26 4 6 -19 -6 -13 -14 1 -1 14 7 -13 -4 14 -3 -18 3 21 3 -13 -2 1 0 0 1 0 0 0 0 -1 0 0 0 0 0 0 0 0 -1 0 1 0 -1 0 -1 0 0 0 0 0 0 0 0 -1 -1 0 0 0 0 0 1 -1 0 0 -1 -1 1 0 0 -1 0 -1 0 0 0 0 0 0 -1 0 0 0 1 1 -1 1 0 0 0 0 0 0 0 0 0 -1 0 0 -1 0 0 -1 0 0 0 0 0 -1 -1 -1 0 0 0 -1 0 0 -1 -1 -1 0 0 -1 0 -1 -1 0 -1 0 0 0 -1 0 0 0 0 0 -1 0 -1 0 0 -1 0 0 -1 0 -1 0 0 -1 0 -1 0 -1 0 0 0 0 0 0 1 -1 0 0 -1 -1 0 0 0 0 -1 -2 0 0 -1 0 0 0 0 0 0 0 0 0 -1 0 -1 0 0 -2 0 0 0 0 -1 1 0 -1 0 -1 0 -1 0 0 -1 0 0 -1 0 0 0 0 0 0 0 0 0 0 1 0 0 -1 -1 0 -1 -1 0 0 -1 0 -1 0 0 0 0 -1 0 0 0 -1 0 -1 1 -1 -1 0 0 0 0 0 0 0 0 0 -1 0 -1 0 0 0 0 -1 -1 0 0 0 0 0 0 0 0 -1 0 -2 0 0 0 0 0 2 0 0 0 0 1 0 +65535 2031617 1965652 39977989 4219403607 492985 139124 3395188 5671954 1547001 113005 2091050 3365588 1223965 27290 51641 39385 35865 12869 11285 1205 2888 93904 285673 245506 90593 20813 1962 1640 10090 4721 3133 8537 11349 31601 98612 109012 2285 43650 39617 83098 113941 55970 42281 51701 22945 2925 25552 8116 389 2986 11713 19585 15689 25625 20557 1313 5365 17450 25604 21881 18512 20506 5354 8784 14305 6569 500 2152 6689 24625 40400 23200 7753 1865 2088 5869 6370 10900 20529 25000 16298 5300 612 1213 1090 305 2320 1801 586 1525 2313 7141 12778 6698 5249 14794 19889 10961 3789 2250 2682 2554 1642 1549 3977 5261 1097 250 689 212 2570 2746 697 692 397 205 197 197 218 212 333 1376259 4294115331 131070 0 1 0 4294901760 0 0 0 0 65535 1 65535 65535 0 0 0 4294901760 65535 0 0 4294901761 0 4294967295 1 4294901760 4294901760 0 0 0 65535 0 65537 131071 0 0 0 0 4294901760 0 65535 4294901760 0 0 4294901760 4294967295 0 4294901760 0 4294967295 65535 4294901760 4294901760 65535 65535 0 65535 0 0 65535 65535 4294901760 0 65535 65535 4294901760 4294901760 4294901760 0 0 0 4294901761 0 4294967295 0 0 4294901759 0 65535 0 0 0 0 65535 65535 4294836224 0 0 131071 4294901760 4294901760 4294901760 0 65535 4294901760 0 0 0 0 0 1 4294901760 65535 4294967295 0 65535 65535 0 4294901760 0 4294901760 4294901760 4294901761 65535 0 0 0 0 65535 65535 0 4294901760 65535 0 0 0 4294901760 4294836224 0 0 131072 0 0 1 +22 0 -27 3 -154 -167 92 688 443 -682 -411 78 -374 -297 483 1557 381 -1898 -423 812 -600 -84 1189 375 -650 -667 84 210 -162 28 104 136 20 -96 32 1 -98 10 94 16 -56 -42 -12 27 3 32 43 15 15 -58 -35 17 22 -7 -13 -15 -29 24 62 4 -53 -39 9 34 2 5 15 -17 -33 21 75 -4 -54 -63 -65 32 54 95 40 -64 19 -12 -51 -42 1 18 -24 21 20 -18 -31 26 42 50 35 -118 -85 99 82 -74 -85 29 45 38 28 -39 -53 -5 42 39 15 -37 -44 -29 11 50 -8 -31 -6 26 12 4 5 6 30 -27 -49 -6 1 10 20 39 18 -55 -39 -1 4 32 -3 -9 32 31 2 -58 -26 21 13 3 -10 -18 -4 25 0 -14 8 8 -20 -10 1 23 21 8 6 -17 2 -21 -41 13 40 15 -13 -31 -22 15 21 11 -9 -8 8 9 19 -26 -58 -2 33 30 -8 6 34 -18 -23 0 23 -18 -43 -5 6 18 0 7 5 -3 1 6 10 -10 -15 2 15 1 -4 -1 1 -18 -18 14 13 7 5 -8 -5 -3 -1 0 -6 6 14 -1 -9 -2 4 -1 -4 0 6 3 -9 -10 9 17 1 -20 -8 4 0 4 5 0 -1 -2 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 -1 0 0 0 0 -1 0 0 0 -1 0 0 -1 0 -1 0 0 0 -1 0 -1 -1 -1 0 0 0 -1 0 0 0 0 0 0 -1 0 1 -1 -1 -1 -1 0 -1 0 0 0 -1 0 0 0 1 -1 -1 0 -1 0 1 -1 0 0 -1 0 0 -1 0 0 -1 0 0 0 1 0 0 1 1 0 0 -1 0 0 0 0 -1 0 0 0 -1 0 -1 0 0 -1 1 1 -1 0 0 -1 -1 0 0 1 1 1 0 0 0 0 0 0 0 0 0 -1 -1 0 -1 0 0 0 0 -1 -1 -1 -1 0 0 1 0 0 -1 0 0 0 0 1 -1 -1 -1 -1 1 1 0 0 0 1 1 0 1 -1 0 -1 -1 0 0 -1 1 1 0 0 -1 0 0 -1 -1 0 -1 -1 0 -1 0 -1 0 0 -1 0 -1 -1 0 -1 1 0 1 0 0 0 -1 -1 0 -1 0 0 0 -2 -1 -1 0 -1 0 -1 0 0 0 0 1 -1 0 0 0 0 0 0 0 0 0 0 -1 0 0 0 -1 0 0 0 0 -1 0 -1 0 0 -1 0 1 0 0 0 0 -1 -1 -1 0 1 0 -1 -1 -1 0 0 +22 262117 4284088166 45088860 4250272187 175005 228085 2657538 3747565 838273 367056 1554346 867389 51156 27028 29312 9616 1025 9704 9092 4900 873 1033 2074 3589 1514 533 394 1417 3860 4330 1237 29 514 1530 5641 6885 5249 11941 5696 505 4365 325 1017 724 1637 4264 15149 17026 12200 8066 3469 2305 2834 3285 1594 2777 2621 1025 712 160 61 1629 2437 101 1921 3349 1522 1040 90 1985 3368 1117 178 424 641 196 128 500 530 505 325 445 1850 1825 1130 709 562 145 145 1037 3368 1989 100 1480 529 853 1874 360 49 34 37 200 229 226 17 325 520 218 89 34 1 72 197 85 17 16 45 181 370 401 80 262144 5 4294901759 0 0 0 0 0 0 0 0 4294901760 0 0 65535 0 65535 4294901760 4294901760 0 4294901760 4294901760 4294967295 0 4294901760 0 0 0 65535 4294901761 4294967295 65535 65535 0 65535 0 4294901761 65535 65535 4294901761 0 65535 4294901760 0 65535 0 1 65536 1 4294901760 0 0 65535 0 65535 65535 4294901760 65537 65535 4294901760 65535 65536 65537 0 0 0 0 4294901760 65535 65535 0 4294901760 4294967295 65535 65536 0 65535 0 65536 4294967295 4294967295 65537 0 65536 1 4294901761 4294901760 65535 4294901760 65537 0 65535 4294901760 65535 4294967295 4294901760 4294901760 0 65535 4294967295 4294901760 1 1 0 4294967295 4294901760 0 4294836224 4294967295 4294901760 4294901760 0 0 4294901761 0 0 0 0 0 65535 0 65535 0 4294901760 4294901760 0 65535 1 0 4294901760 4294967295 65536 4294901760 4294967295 0 +-8 0 6 7 99 -304 -667 627 1046 -174 -595 -286 876 434 -1623 -1926 437 3254 796 -1858 331 319 -1404 -884 707 1384 18 -668 -80 160 204 -90 -208 -20 37 9 -9 71 83 -33 -57 -51 -2 44 9 -32 -9 19 -10 -11 11 20 8 -6 -10 -25 -5 24 -10 9 52 -3 -35 -33 -5 14 0 8 -9 -3 28 33 12 -74 -69 52 67 2 -8 -5 5 -43 -44 24 15 37 56 -45 -72 17 41 -29 5 60 -10 -136 -97 127 103 5 -19 -29 6 5 -7 -19 -21 27 51 -4 -47 -41 5 56 26 -21 0 -13 -21 1 10 0 3 1 -32 -8 23 31 27 -15 -36 -37 -2 43 17 -12 -18 -17 -4 36 7 -18 25 19 -27 -42 7 33 2 -14 4 -5 -22 -10 3 35 20 -24 -12 3 -6 7 20 -9 -12 4 -13 -23 7 44 -2 -26 12 6 -25 -1 32 8 -22 -16 -1 17 20 6 6 -29 -29 -11 1 38 -1 -17 30 21 -23 -29 13 1 -22 12 20 -4 -10 -1 9 3 -2 -14 -13 7 1 4 15 9 -8 -21 1 13 0 -7 -1 6 0 -3 3 -5 -16 6 20 4 -7 -7 -3 0 10 4 -14 -17 1 16 -4 -1 10 1 -5 -6 0 3 0 0 0 0 1 0 0 0 0 0 0 0 0 0 0 0 0 -1 0 0 0 0 0 0 0 0 1 0 0 0 0 0 0 0 -1 -1 -1 1 -1 -1 -1 0 -1 0 -1 0 -1 0 0 0 0 -1 0 0 0 0 -1 -1 0 -1 0 0 0 -1 0 0 0 0 -1 -1 0 0 1 -1 -1 -1 0 0 0 0 -1 0 0 0 0 -1 -1 0 0 0 0 0 0 0 -1 0 0 0 -1 0 0 -1 -1 -1 0 0 0 0 -1 1 0 -1 0 -1 0 -1 -1 -1 0 0 -1 -1 -1 0 -1 -1 0 0 -1 0 -2 0 0 0 0 -1 0 -1 -1 0 0 0 0 0 -1 0 0 0 -1 0 -1 0 0 0 0 0 0 -1 0 0 -1 -1 1 0 0 0 -1 0 -1 -1 -1 0 0 0 0 -1 -1 -1 0 0 -1 0 -1 0 -1 0 0 0 -1 1 -1 0 -1 -1 0 -1 -1 0 1 0 0 0 -1 0 0 0 0 0 1 0 -1 0 0 -1 0 0 0 -1 1 0 0 -1 0 -1 0 -2 0 -1 0 0 -1 0 0 0 0 -1 0 0 -1 -1 0 0 0 0 0 0 -1 1 0 0 0 0 0 0 -1 -1 -1 0 -1 0 -1 -1 -1 0 0 0 +65528 458758 4275044451 41155941 4283565078 435821 955732 6343605 10779485 4085780 211322 2752672 2415305 446548 32000 49716 43664 1450 5122 7978 5850 1940 1105 442 221 521 100 725 601 181 2713 2314 221 64 90 1873 5620 7465 4493 89 1874 2512 1594 5161 5473 2522 3625 18596 25538 10634 1202 61 410 1170 2617 3890 3161 1117 169 442 100 10 1088 1490 954 2665 1853 433 613 1312 373 986 2493 1138 200 41 584 1234 976 153 85 481 160 698 1985 680 180 626 1088 740 290 436 877 962 1445 290 1341 1370 170 628 416 101 90 200 218 17 306 505 170 49 37 9 34 292 416 98 9 116 485 257 17 101 4294639611 196608 0 0 1 0 0 0 0 0 4294901760 0 0 0 0 1 0 0 0 4294967295 131071 4294967295 65535 65535 65535 65535 0 4294901760 0 0 4294967295 4294901760 0 4294901760 0 0 4294967295 0 4294901761 4294967295 0 0 65535 0 4294901760 65535 0 0 0 65535 0 65535 4294901760 4294967295 0 0 131071 4294901760 4294901760 4294901760 4294967295 0 4294967295 65535 4294967295 0 65535 65534 0 4294901760 4294901760 65535 0 0 65535 0 65535 65535 0 0 4294901760 0 4294967295 1 0 65535 4294967295 65535 0 4294901760 4294967295 0 65535 65535 65535 0 131071 65535 4294967295 4294901760 65535 1 0 65535 0 0 1 65535 4294901760 0 4294901760 1 4294901760 4294901760 4294836224 4294901760 0 65535 0 4294901760 0 4294967295 0 0 0 131071 0 0 0 4294967295 65535 65535 4294967295 65535 0 +2 0 17 -37 246 -80 -939 -335 650 1022 136 -634 -1128 434 2985 844 -2061 -2975 44 1838 -1132 -661 1706 2126 -252 -2314 -293 835 105 -57 -136 -201 10 186 -5 34 66 -47 -32 16 28 -6 -28 -24 35 27 -68 -40 56 71 -17 -54 5 15 -19 12 35 1 -9 -36 -37 22 33 15 -9 -12 7 -3 -14 -6 -12 27 31 -12 -15 24 50 -39 -72 -33 34 81 -32 -78 63 75 -87 -106 26 151 74 -79 -22 -51 -80 23 36 19 -15 32 41 -29 -30 12 23 -9 -21 4 29 8 -11 -19 -3 -3 -4 -3 -24 22 46 -19 -32 9 -15 -24 48 67 -36 -93 -3 72 32 -17 -34 -26 32 26 -44 -18 64 26 -54 -64 5 60 -9 -8 46 2 -37 -15 15 0 -10 5 -10 -8 13 21 2 -8 1 -21 -22 27 31 8 15 -49 -64 27 44 5 -4 17 11 -43 -18 17 -6 3 18 5 -9 -9 -4 1 12 11 -2 -26 -28 5 15 44 38 -36 -38 -24 -4 36 9 -15 1 6 2 -3 -6 -2 11 4 -11 -15 -8 12 12 8 1 -11 0 0 -4 -3 1 5 -4 6 20 -12 -24 2 6 -5 0 20 9 -26 -16 7 0 4 7 9 1 -12 -4 4 1 -1 -1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 -1 0 0 0 0 0 0 0 -1 0 0 0 0 -1 0 -1 0 1 0 -1 0 0 0 0 0 0 -1 0 0 0 -1 -1 -1 0 0 -1 -1 -1 -1 0 0 0 0 1 0 0 0 -1 1 0 0 0 0 -1 0 -1 0 0 0 -1 -1 0 -1 0 0 0 0 -1 0 0 0 0 0 -1 1 1 0 0 -1 0 -1 0 -1 -1 0 0 0 -1 -1 -1 0 -1 0 -1 0 0 0 0 -1 -1 0 1 -1 0 -1 -1 -1 0 -1 -1 -1 0 0 0 1 0 0 0 0 -2 -1 0 0 0 -1 -1 0 -2 -1 0 0 -1 -1 -1 -1 0 0 -1 0 -1 0 0 -1 0 0 0 0 0 1 -1 0 0 -1 0 1 0 0 0 0 0 -1 0 1 0 0 0 0 -1 0 0 -1 0 -1 0 -1 0 -1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 1 0 0 0 0 0 0 0 1 -1 0 0 -1 0 -1 -1 0 0 0 0 0 0 0 1 -1 0 -1 0 0 0 0 0 0 0 -1 0 0 0 0 1 0 0 -1 -1 0 0 -1 -1 -1 0 0 0 0 +2 4292542481 4289724662 4273077333 66978442 420452 1460740 9622561 13098346 3380180 1718345 7430312 5418100 783074 14274 58897 34696 1181 6565 1280 820 1360 1954 6224 8177 3205 250 505 1226 1377 1853 1314 225 58 232 873 1105 801 4021 6273 7717 7108 9594 18805 23477 11717 3085 6929 1657 1249 2522 1044 610 457 905 482 18 25 1060 2477 1105 801 6793 9945 5193 1313 1832 1700 2260 4772 7012 3625 145 2120 1594 225 125 164 610 68 442 1213 1025 2626 4825 1961 305 1970 613 45 349 162 17 265 680 809 2161 2740 2020 1312 306 37 13 40 137 346 208 208 122 0 25 26 52 544 580 61 400 757 305 16 130 145 327676 4294901761 131071 0 0 0 0 0 0 0 0 0 0 65535 0 0 0 65535 0 4294901760 4294901760 65536 4294901760 0 0 0 65535 0 4294967295 65535 4294901760 4294967295 65535 0 65536 0 4294901760 1 0 4294901760 4294901760 0 4294901760 65535 65535 0 4294901760 0 0 4294901760 65537 0 65535 65535 4294967295 0 4294901760 4294967295 4294901760 4294901760 0 0 4294967295 65536 65535 4294967295 65535 4294967295 65535 0 1 0 4294836224 65535 0 4294967295 4294836224 65535 4294901760 4294967295 65535 4294901760 4294901760 0 65535 0 0 4294901761 0 65535 1 0 0 65535 1 0 4294901760 0 65535 65535 65535 131071 0 0 0 0 0 0 65536 0 0 0 65536 65535 4294901760 4294901760 65535 0 0 0 4294901761 4294901760 0 0 0 4294901760 0 0 1 4294901760 65535 4294901760 4294967295 0 0 +-2 0 6 -2 131 107 -67 -521 -299 556 303 -109 155 -608 -1328 792 1522 393 -433 -657 702 468 -779 -1509 -273 1416 365 -299 30 -73 -115 49 101 50 2 -99 -59 36 46 5 -21 -14 -7 14 22 1 8 -10 -15 -25 -21 20 20 12 -1 -13 -1 0 -3 1 5 -1 -3 -3 -3 -3 -5 10 11 -6 -15 -4 4 16 11 -2 -3 -17 1 22 12 -32 -25 -18 -40 59 66 -12 -23 3 28 -26 -46 -4 23 35 9 -25 -5 -1 -7 -8 -10 15 18 -1 -15 0 16 4 -7 -14 -9 15 17 -8 -8 -4 -16 -3 15 34 24 -28 -24 -27 -25 41 33 -4 -6 -10 -15 5 22 18 8 -25 -12 -4 -4 -2 -17 9 22 18 9 -18 -16 -9 -2 14 3 4 9 -11 -16 -1 5 11 9 -5 -9 -8 -3 12 12 -3 0 -12 -19 -7 -1 30 23 -17 -15 -4 0 1 -1 7 9 -3 -8 -7 1 7 3 0 0 -12 -8 14 10 -8 -8 1 0 3 9 1 -8 -11 -1 9 4 -6 -2 -2 -5 1 4 2 -2 -4 -2 1 3 1 -1 -2 1 6 0 -14 -10 21 15 -8 1 0 -7 -11 -2 14 4 -6 -1 7 2 -8 -4 3 1 0 0 0 0 0 0 0 0 0 0 -1 0 0 0 0 1 0 0 0 0 0 0 0 0 0 0 0 -1 0 0 0 -1 0 1 0 -1 -1 0 0 0 0 0 0 -2 0 0 0 0 0 0 0 0 -1 -1 1 0 0 0 0 0 -1 0 -1 0 0 -1 0 0 0 0 0 0 0 0 0 0 0 -1 -1 0 -1 -1 0 0 0 0 1 0 0 0 0 0 -1 0 0 0 0 0 -1 1 0 -1 0 -1 0 0 0 0 0 1 0 -1 -1 0 -1 0 0 -1 0 0 0 0 0 0 0 -1 -1 0 0 0 -1 0 0 0 0 0 -1 -1 1 -1 -1 0 0 -1 -1 0 0 0 0 1 0 -1 -1 0 0 0 -1 -1 0 0 0 0 0 -1 0 0 0 0 -1 0 -2 0 0 -1 0 0 -1 0 1 0 0 -1 0 -1 0 -1 1 1 -1 0 -1 0 0 0 -1 0 -1 0 0 0 0 0 0 -1 0 -1 0 0 0 0 0 0 -1 0 0 0 -1 -1 -1 0 0 -1 -1 -1 1 -1 0 1 0 1 0 0 0 1 -1 0 -1 0 -1 0 1 1 0 0 0 0 0 0 0 0 -1 -1 0 0 -1 0 -1 -1 -1 -1 -1 -1 0 -1 0 0 0 0 0 +65534 4294836230 7012483 4260888509 36503253 103690 393689 2390848 2470933 619138 711828 2883922 2079585 222626 6229 15626 12701 9805 4777 2141 637 245 485 164 850 841 544 170 1 10 26 18 18 125 157 241 272 125 298 485 1168 949 5081 4500 538 1460 2132 1754 706 26 113 325 325 225 272 245 306 353 80 265 1381 1360 1305 2306 1105 136 250 808 689 160 20 370 808 405 337 200 25 202 257 146 106 145 153 153 144 410 901 818 241 1 50 90 113 50 9 144 260 164 65 9 82 185 82 52 8 26 20 20 5 10 5 37 196 541 289 1 170 200 52 50 68 25 1 0 0 0 0 4294901760 0 0 1 0 0 0 0 0 65535 0 65535 1 4294967295 0 0 0 65534 0 0 0 4294901760 131071 0 0 4294901760 4294901760 0 65535 0 0 0 0 0 4294967295 4294901760 65535 0 65536 0 0 4294901760 0 0 4294901760 1 65535 65535 0 0 1 4294967295 4294901760 0 65535 0 0 0 4294967295 0 4294901760 0 0 4294901760 131071 4294967295 0 4294967295 0 0 1 4294967295 0 4294901760 65535 0 0 65535 0 4294901760 4294836224 0 65535 4294901760 65536 0 65535 65535 131071 4294901761 4294901760 0 4294901760 4294901760 0 0 0 65535 65535 0 0 4294901760 0 4294901760 4294967295 0 4294967295 131071 65535 1 1 0 4294901761 4294901760 4294901760 65536 1 0 0 0 4294901760 65535 4294901760 4294901760 4294967295 4294967295 65535 65535 0 0 +-10 0 11 9 -56 156 439 -309 -694 -15 245 243 442 396 -86 -1392 -738 911 588 -60 -962 -470 838 1666 327 -1388 -303 204 -87 6 35 44 6 -21 -3 5 -4 13 26 -23 -34 -2 5 10 22 -9 -41 -5 29 17 -14 -17 -1 18 11 -11 -8 2 4 -3 -7 -1 3 6 5 -5 -6 -2 7 -3 -11 -1 8 7 -6 -8 14 14 -21 -40 0 62 35 -46 -39 -10 -10 23 16 18 10 -16 1 0 -1 -4 -1 -7 -8 6 4 0 -3 -4 -5 9 9 3 2 -10 -7 7 8 -5 0 -6 -17 2 8 8 6 1 1 -14 -20 7 22 15 -2 -23 -7 4 -6 7 9 0 -5 1 9 -6 -14 9 16 -11 -13 3 -2 5 13 -5 -19 -4 8 9 0 -2 2 -4 -1 0 -3 0 2 0 2 -2 -7 2 6 -7 -11 6 4 1 4 2 5 -3 -5 -7 -2 6 2 -2 -6 -3 4 10 3 -10 -7 -4 -5 6 7 3 0 -4 0 -1 -5 -2 1 6 3 -1 1 -2 -2 -3 -3 2 3 -1 -1 1 1 -3 -2 3 1 0 1 0 6 3 -2 -21 -17 15 15 3 -3 -2 6 -2 -5 -7 -3 6 1 0 3 3 0 -3 -1 0 0 0 0 0 0 0 0 0 1 0 0 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 -1 0 0 -1 0 0 0 0 0 0 -1 0 0 -1 0 0 0 0 0 0 0 0 -1 0 0 0 0 0 0 -1 -1 0 -1 0 0 0 1 0 0 0 0 0 -1 0 0 0 0 -1 -1 -1 0 0 0 0 -1 -1 -1 0 0 0 -2 1 -1 0 0 0 0 0 0 -1 0 0 0 0 -1 1 0 0 0 0 0 -1 0 0 0 0 1 0 0 -1 -1 1 0 1 0 -1 -1 0 0 -1 0 0 1 0 1 0 0 0 0 0 0 0 0 0 0 0 -1 -1 0 -1 0 0 0 0 -1 -1 0 0 0 -1 -1 0 0 0 -1 0 -1 -1 0 -1 0 0 0 -1 0 1 0 -1 0 -1 -1 -1 0 0 0 1 0 -1 0 -1 -2 -1 0 0 0 0 -1 0 -1 0 0 0 0 -1 0 0 -2 0 0 -1 -1 0 0 0 0 -1 -1 0 0 -1 0 0 0 -1 0 0 0 0 0 -1 -1 -1 -1 1 -1 0 0 -1 0 0 0 -1 0 0 -1 0 0 0 -1 0 -1 0 -1 0 -1 -1 0 0 0 1 -1 0 0 1 0 0 0 0 0 0 +65526 589835 10289096 4274717111 4294049098 119074 352180 1945060 1374565 349344 1146344 3477800 2033473 133425 7605 3161 477 34 185 1205 1160 125 565 1706 1130 485 325 242 68 25 50 45 50 40 58 122 113 100 392 2041 3844 3341 1621 629 580 356 1 17 50 100 16 25 106 90 104 98 89 36 293 128 37 197 449 709 533 65 85 81 26 117 277 377 178 29 194 377 145 4 20 1 9 4 8 53 85 157 17 20 34 74 40 8 45 116 109 65 61 58 16 1 29 37 10 5 13 13 10 2 10 13 1 1 45 445 514 234 13 40 74 45 1 18 4294770688 65535 0 0 0 0 1 65536 0 0 0 0 0 0 0 65535 4294901760 0 0 0 65535 4294901760 0 0 0 0 65535 0 0 4294901760 65535 65535 0 1 0 0 65535 0 4294901760 4294967295 0 0 4294967295 65535 0 131070 65535 0 0 4294901760 0 0 131071 0 0 4294901760 0 0 1 4294901760 131071 65536 4294901760 65535 4294901760 0 1 1 0 0 0 0 0 4294967295 4294901760 0 0 4294967295 0 4294901760 65535 0 65535 4294967295 4294901760 0 4294901760 65536 4294901760 4294901760 4294967295 0 65536 4294901760 4294901760 4294967294 0 0 65535 65535 0 4294901760 0 65534 4294901760 65535 0 4294901760 65535 4294901760 0 4294901760 0 0 4294901760 4294967295 131071 65535 4294901760 0 4294901760 0 65535 0 65535 65535 65535 4294967295 0 65536 65535 65536 0 0 0 +3 0 -28 2 -361 151 1027 392 -652 -1222 -53 559 -1244 557 2786 584 -1551 -1672 -381 879 2314 630 -2373 -3052 -4 2680 295 -427 277 47 -100 -126 46 82 -47 -84 31 99 32 -73 -20 -2 -36 23 56 22 7 -69 -63 32 39 0 -21 0 6 11 7 -4 -2 4 6 -7 -9 1 6 4 5 -5 -6 -8 -8 6 0 -1 -6 8 -6 20 48 5 10 -49 -73 -14 58 52 -61 -54 48 76 -7 -76 -19 45 5 -20 14 16 -14 -20 1 12 7 -2 -7 -6 0 8 7 -5 -10 -11 -9 19 21 -2 -5 -16 -22 6 26 22 -14 -34 17 43 -5 -45 6 15 -30 -17 16 41 19 -48 -28 45 28 -67 -59 69 57 -13 -5 -16 -32 -5 20 32 7 -29 -5 13 -3 -23 -9 30 17 -16 -15 -4 5 17 11 -23 -32 6 18 23 16 -18 -22 -6 8 12 2 -8 -8 -2 1 6 6 -2 -8 -8 -4 11 11 0 -3 -8 -5 3 6 0 -9 2 7 1 -1 2 1 -7 -7 3 6 1 -6 -5 1 7 1 -4 4 4 -5 -15 -2 16 3 -7 3 -1 -20 0 21 15 -3 -11 6 3 -3 -16 -10 9 7 -4 -7 0 0 6 7 -2 -4 -3 0 1 0 0 -1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 -1 0 0 -1 -1 1 -1 -1 0 0 -1 0 0 0 -1 0 0 -1 0 0 0 -1 0 0 0 0 0 0 0 -1 0 0 1 -1 -1 -2 -1 0 0 0 0 0 -2 0 0 -1 -1 -1 0 -1 0 0 -1 -2 -1 0 0 0 -1 0 0 0 0 0 0 0 0 0 1 -1 -1 1 0 -1 0 0 1 0 0 -2 0 -1 0 -1 -1 -1 1 0 -1 0 -1 0 0 -1 1 -1 0 0 -1 0 0 0 0 -1 -1 0 0 -1 0 0 -1 0 0 0 0 0 0 0 0 0 -1 0 0 0 0 0 0 0 0 -1 0 0 1 -1 -1 0 -1 0 0 -1 0 1 0 0 -1 1 0 0 0 0 0 0 -1 -1 0 0 -1 0 -1 -1 0 1 -1 -1 1 -1 0 0 -1 1 0 -1 0 0 0 0 0 -1 0 0 -1 0 0 0 -1 0 0 1 0 -1 -1 0 -1 0 0 0 0 0 -1 -1 0 -1 0 -1 0 -1 0 0 -1 0 0 0 0 0 -1 -1 0 -1 0 -2 -1 0 0 0 0 -1 0 0 0 1 0 0 0 0 0 0 0 -1 0 +3 196580 9961111 25691139 4214947188 315290 1857785 8102852 5201185 917802 5751496 14945833 7182416 269354 78938 25876 8840 9265 10762 6353 404 1825 3620 4810 4993 1521 441 157 65 20 85 82 52 50 100 100 1 100 436 2329 2501 5525 6068 6637 8080 5825 2386 425 452 596 145 53 85 64 74 221 442 445 281 520 1160 1352 2138 2050 261 1189 1937 2665 2809 5273 8242 3418 281 1049 1424 890 194 538 981 545 241 314 650 1060 853 580 520 208 68 68 37 40 128 137 121 73 34 36 85 50 5 50 58 37 61 50 17 32 250 260 58 10 400 666 130 45 265 181 65 49 36 53 4294836220 65536 0 65535 0 0 0 0 0 0 0 0 0 0 0 4294901760 0 4294967295 4294901761 65535 4294901760 0 4294901760 0 65535 0 65535 0 0 0 65535 65536 4294967295 4294967294 0 0 4294836224 0 4294967295 65535 65535 4294901760 4294967294 0 4294901760 0 0 0 0 65536 4294967295 1 65535 65536 0 65534 65535 4294967295 131071 4294901760 4294901760 0 131071 65535 4294901760 0 0 4294967295 0 65535 4294901760 0 0 0 0 4294901760 0 0 0 0 65535 65536 4294967295 4294901760 0 65535 1 4294901760 1 0 0 4294901760 65535 4294901760 4294901760 65535 4294901761 131071 65535 4294901760 1 65535 0 0 65535 4294901760 0 4294901760 0 1 4294967295 4294901760 0 0 4294901760 65535 65535 65535 65535 4294901760 0 0 4294901760 65535 65535 4294967294 0 0 65535 0 1 0 0 0 65535 +11 0 -8 28 -287 -213 250 986 654 -1036 -426 232 -434 -1427 -1053 2881 2158 -1468 -367 -422 -2717 218 3407 2225 -786 -2569 -252 513 -170 -10 -14 153 51 -52 19 50 16 -49 -42 -25 -20 40 21 9 0 -25 -16 33 36 -10 -17 -36 -20 32 21 -11 -21 3 17 -2 -15 6 9 -2 -1 2 -1 -8 -4 9 5 -13 -10 19 20 -17 -25 -3 6 22 24 -27 -48 -5 -3 31 37 32 23 -49 -19 -1 4 -20 -45 12 38 35 0 -34 -10 10 4 -1 0 2 -3 -7 3 9 -6 -11 10 13 -9 -19 2 13 -3 1 12 2 8 -8 -13 -30 -34 34 35 18 7 -22 -8 12 21 -10 -11 -36 -58 18 30 50 28 -23 -8 -18 -15 7 5 1 -4 11 10 -2 9 -4 -10 -15 -12 8 16 10 -9 -19 1 14 -4 -15 -1 24 7 -26 -10 17 5 -5 2 0 -3 -3 -1 4 7 -7 -11 1 12 3 -13 -8 11 10 -6 -13 -5 8 8 1 0 -5 -2 -4 -4 4 4 -1 -5 -1 5 1 -2 1 5 -2 0 -3 -8 -3 -3 9 10 1 2 -5 -8 -12 -4 12 3 4 1 -4 2 2 0 0 1 -7 -5 5 2 0 0 -1 -3 0 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 -1 0 0 0 0 0 0 0 0 -1 0 0 0 0 0 -1 0 -1 -1 0 -1 -1 0 0 0 0 0 0 -1 0 0 -1 0 -1 -1 -1 0 0 -1 0 -1 0 0 0 0 -1 1 0 0 0 0 0 0 -1 -1 0 0 -1 -1 0 -1 0 0 0 1 1 0 -1 0 0 0 -1 1 0 0 0 0 0 -2 -1 0 1 0 -1 0 0 0 0 -1 -1 0 0 -1 1 0 0 0 0 0 0 0 -1 -1 0 -1 -1 0 0 0 -1 1 -1 0 0 0 0 -1 0 0 0 0 0 -1 -1 0 -1 0 0 0 -1 0 0 -1 0 0 0 -1 0 0 0 0 0 -1 -1 0 0 0 -1 0 0 -1 0 0 0 0 0 -1 1 1 0 0 1 0 0 1 -1 0 0 -1 0 0 1 0 0 0 -1 0 0 0 -1 -1 0 -1 -1 0 0 -2 0 0 0 0 -1 -1 -1 0 0 -1 1 0 -1 1 0 0 -1 0 1 0 0 0 -1 0 0 0 0 0 -1 0 0 0 1 0 0 0 1 1 0 -1 0 0 -1 0 0 -1 0 -1 0 1 0 0 1 -1 -1 0 0 0 -1 0 +11 1900536 4281073377 64618746 4227072654 235300 2224685 9408970 6811988 312773 7429613 16558274 7217557 326673 29000 23605 5305 2861 2657 2389 2000 522 625 1345 1396 1585 1424 562 450 293 261 85 5 65 97 194 461 689 634 520 1305 2329 970 2393 2930 362 416 2169 2669 1156 200 17 4 58 90 157 269 442 173 10 148 128 1069 2312 1549 533 208 541 1417 3688 3400 1313 388 274 26 137 104 97 325 208 356 442 197 241 577 725 389 50 4 18 17 98 122 153 233 221 205 89 65 25 20 32 17 26 26 5 29 9 73 90 101 29 208 160 25 17 8 0 50 50 4 1 65533 65537 0 0 0 0 0 0 0 0 0 4294901760 0 0 0 0 65535 0 0 65535 4294967295 4294901760 65535 0 0 4294901760 0 65535 4294967295 65535 4294901760 4294901760 0 0 131071 0 0 0 4294967295 0 4294967295 4294901760 0 65536 1 65535 0 131071 0 0 4294836224 65535 1 65535 0 4294901760 65535 4294901760 1 0 0 0 4294967295 4294901760 65535 0 131071 65535 0 4294901760 0 0 4294901760 65535 65535 0 65535 4294901760 0 4294901760 0 0 4294901760 65535 0 65535 4294901760 0 0 4294901760 65537 0 1 65536 65535 4294901760 0 1 0 65535 0 4294967295 4294901760 65535 4294836224 0 0 4294967295 65535 4294901760 1 131071 0 65535 1 0 65535 0 0 65535 0 1 0 65537 4294901760 0 65535 4294901760 4294901760 65536 0 4294901761 65535 0 65535 +6 0 2 -6 -20 -172 -267 414 511 -98 -296 -46 854 -71 -1438 -885 536 1429 33 -190 1175 -755 -1972 -235 816 922 17 -257 17 6 56 -40 -29 -16 -7 -3 -5 12 17 -10 -15 -6 6 19 8 -25 -18 10 15 2 -8 -9 -1 7 2 -5 -7 8 14 -1 -6 -5 -1 -3 -4 7 6 -5 -7 5 13 -4 -15 -8 6 10 -4 -7 6 9 -7 -17 -3 23 9 -13 0 0 -8 -1 0 0 3 4 -6 -1 8 7 2 -5 -3 -3 2 3 -2 -7 -4 8 6 -1 -1 -1 4 -4 -11 -4 5 10 4 -7 -6 -3 1 4 -7 -5 8 10 -1 -11 -5 6 12 1 -12 -7 5 8 -5 -3 7 4 -5 -7 2 5 -6 -4 7 7 -3 -10 -2 5 -2 -1 4 3 -4 -7 -3 8 8 -3 -6 -3 4 1 -6 -6 -6 7 9 5 -1 -9 -1 0 0 1 -1 -5 1 5 -4 -3 5 5 -3 -3 6 -1 -9 -2 6 2 -3 -1 2 -1 -2 2 3 -3 -3 2 3 -2 -4 -1 0 2 1 0 -1 -2 -1 2 1 -2 -2 2 3 1 2 -3 -9 -1 7 6 -3 -9 -3 6 4 -1 -2 -4 -1 2 1 1 0 -1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 -1 0 0 0 0 0 0 0 0 0 0 0 1 0 0 -1 0 0 -1 -1 0 0 0 -1 0 0 0 0 0 0 0 -1 0 1 1 0 0 0 0 0 0 0 0 -1 0 -1 -1 0 -1 0 0 0 -1 0 0 0 0 0 1 -1 -1 0 0 -1 0 0 -1 0 0 0 0 0 0 0 -1 0 0 0 0 0 0 0 0 0 0 -1 0 -1 0 -1 -1 0 0 0 -1 -1 0 -1 0 0 0 0 -1 0 -1 -1 0 -1 0 -1 -1 0 1 -1 -1 0 0 0 0 0 0 0 -1 -1 0 0 0 0 0 -1 -1 0 -1 -1 0 0 1 -1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 0 -1 1 0 -1 1 0 0 0 -1 0 0 0 -1 0 0 -2 0 0 -1 -1 0 1 0 0 0 -1 0 0 0 0 0 0 0 0 0 -1 -1 -1 0 0 0 0 -1 0 1 0 0 0 -1 0 -1 0 0 0 0 0 -1 0 0 0 0 0 0 0 0 -1 -1 0 0 0 0 -1 0 -1 0 0 0 0 0 0 0 0 0 +6 4294574082 4283760620 27197173 4288545279 89732 734357 2851069 2329337 37189 1950650 3944009 1515940 66338 325 4736 1097 58 169 389 261 397 689 424 229 145 50 29 113 197 61 10 65 61 74 185 289 136 65 117 338 538 250 0 65 0 25 37 113 29 18 13 53 80 37 2 32 137 125 65 45 17 74 164 122 61 145 193 89 34 65 74 29 52 98 109 29 5 25 65 73 73 45 17 72 85 106 82 1 1 26 26 25 50 18 37 85 40 10 5 8 18 13 13 17 4 1 5 5 5 8 10 13 82 85 90 45 17 20 5 2 1 0 0 0 0 0 0 0 0 0 0 4294901760 0 0 0 0 0 65536 0 65535 4294901760 65535 0 65535 0 0 0 65535 65537 0 0 0 0 65535 4294967295 4294901760 0 4294901760 0 0 65536 4294967295 0 65535 4294901760 0 0 0 4294901760 0 0 0 0 0 65535 65535 4294967295 0 4294901760 65535 65535 0 4294901760 4294901760 65535 65535 4294967295 65536 4294967295 0 0 0 4294901760 65535 0 0 4294967295 4294901760 65535 65536 65535 0 0 0 0 0 0 0 0 0 0 0 0 65536 4294901760 1 131071 0 4294901760 0 4294901760 0 65534 4294901760 65535 1 0 65535 0 0 0 0 4294967295 65535 0 4294901760 65536 0 4294901760 4294901760 0 0 4294901760 0 0 0 0 4294967295 0 0 65535 65535 0 0 0 0 +-2 0 -5 -23 252 -222 -1006 93 885 629 -418 -767 161 1934 2000 -2610 -2590 315 113 -180 -616 2703 3026 -1783 -1915 -547 98 354 65 12 -114 59 79 7 -70 11 82 25 -37 -28 2 20 29 -3 -34 -20 27 16 -32 -11 20 15 -11 -9 4 9 -4 -5 9 14 6 -18 -10 -1 -6 5 5 5 -2 2 10 -5 1 -1 -1 -13 -12 -2 2 11 -21 -2 35 27 2 -32 -18 -8 -2 29 26 -22 -31 -3 11 16 4 -7 -2 1 0 -3 -7 5 13 4 -5 -10 3 2 -2 -1 0 -7 -6 9 1 -2 5 4 -3 -7 -6 4 6 15 16 -19 -20 -4 5 16 7 -23 -18 5 -2 10 8 1 -1 1 13 -3 -14 -16 -3 14 -3 -2 6 12 7 -9 -4 -6 -6 4 4 -4 -10 8 8 -6 -4 7 6 -3 1 -6 -8 2 10 10 -2 -20 -4 12 -5 -1 16 0 -10 -10 -6 3 2 13 12 -6 -3 -5 -12 -11 1 20 2 -5 7 -1 -7 -2 -1 1 -2 4 5 1 -1 -3 1 2 -3 -4 -1 2 3 1 -1 0 4 -1 -5 -2 -2 7 9 0 -2 -8 -6 0 0 2 0 0 0 -2 -1 2 0 0 1 -1 -1 0 0 0 -1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 -1 0 0 0 -1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 0 0 0 -1 0 0 0 0 0 0 0 0 -1 0 0 0 -1 0 0 1 -1 0 0 0 0 0 0 0 0 0 0 0 -1 0 -1 0 -1 0 0 0 0 0 -1 -1 -1 -1 0 0 0 -1 -1 -1 0 1 0 0 -1 0 0 0 0 0 -1 0 0 0 -1 0 0 1 0 0 0 0 -1 0 -1 -1 -1 0 -1 -1 -1 0 -1 0 -1 0 0 1 0 -1 0 0 1 0 0 -1 0 0 1 0 0 0 0 -1 0 -2 0 0 0 -1 0 1 0 0 1 0 -1 -1 0 0 0 0 -1 0 0 -1 0 0 -1 -1 0 0 1 -1 0 0 0 0 1 -1 0 0 0 0 0 0 1 1 0 0 0 0 -1 -1 -1 0 -1 0 0 0 1 -1 0 0 0 1 0 -1 -1 -1 -1 0 0 1 0 0 -1 0 0 0 0 0 0 -1 0 0 0 0 -1 -1 -1 0 0 0 0 0 0 -1 0 -1 0 1 1 0 1 1 -1 -1 1 0 0 0 0 -1 0 0 -1 -1 0 -1 0 0 0 0 +65534 4293525499 4280418556 6159378 41223029 763013 3766277 10812100 6807325 45169 7685665 12335765 3966434 134920 4369 16477 6290 5021 7349 2153 404 850 1556 985 1145 625 202 97 41 277 360 101 61 50 8 125 2 170 148 125 445 1954 1028 388 845 1160 970 377 65 5 9 74 185 125 13 5 49 117 5 41 58 52 261 617 416 281 578 349 104 65 2 178 452 205 13 180 130 52 52 32 164 100 65 45 37 68 200 404 160 26 256 200 45 173 180 34 265 401 29 50 53 2 20 26 10 5 25 5 10 1 17 29 53 81 68 36 4 0 4 5 0 2 65535 0 65535 0 0 0 0 0 0 0 0 65535 0 65535 0 0 0 0 0 0 0 65536 0 4294901760 0 0 0 0 65535 0 65535 65536 65535 0 0 0 0 0 65535 65535 65535 0 0 4294967295 4294967295 0 4294901760 4294967295 65536 0 65535 0 0 65535 0 65535 65536 0 0 65535 4294967295 65535 4294967295 65535 65535 65535 65536 4294901760 0 1 4294901760 0 1 0 4294901760 4294836224 0 4294901760 65536 0 1 4294967295 0 0 65535 4294901760 0 4294967295 0 4294901761 0 0 4294901761 0 0 0 65537 0 0 4294967295 65535 65535 0 4294901761 0 65536 4294901760 4294967295 65535 65536 0 65535 0 0 4294901760 0 0 4294967295 65535 0 0 4294901760 4294901760 65536 1 65537 4294967295 1 0 4294901760 0 4294967295 4294901760 0 0 +-4 0 17 1 354 6 -675 -699 -10 997 575 -603 -1760 336 2474 1474 -600 -1994 554 484 -1423 -1678 -519 2465 1263 -549 -209 -215 -43 4 13 12 14 -18 23 -36 -44 -16 -20 11 21 32 9 -49 -36 21 7 12 16 9 9 -24 -18 2 12 1 -14 -4 3 9 2 -1 -1 1 5 -9 -12 10 9 -5 2 8 -5 -19 -10 19 21 -10 -35 -13 23 43 6 -40 -6 9 -14 -2 14 7 -3 2 9 -12 -8 2 1 -2 1 4 -3 -7 1 9 -3 -9 0 14 8 -8 -6 -4 -9 2 9 8 3 -7 -8 -4 0 6 1 -2 4 -1 -9 -7 2 11 5 -10 -13 -2 7 16 7 -18 -14 9 13 -5 -16 2 11 7 -1 -5 3 -3 -4 -6 -2 5 1 -2 3 0 1 -3 -12 -5 4 19 12 -16 -12 1 3 2 2 -2 -10 2 19 7 -7 -24 -14 12 9 9 4 -13 -8 3 3 -1 -5 -2 3 13 13 -18 -23 4 8 9 5 -3 -4 -5 -3 3 2 -1 -1 1 -1 -1 2 3 0 -5 -3 4 4 0 -3 -5 -3 6 5 -1 -6 -2 3 4 -3 -2 1 0 -1 2 2 -1 -1 0 -2 -1 1 2 0 -2 -2 0 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 1 0 0 0 -1 0 0 0 0 0 0 1 0 0 0 0 0 0 0 0 0 0 -1 0 -1 -1 0 -1 -1 0 0 0 0 -1 0 1 0 0 0 0 -1 -1 0 0 0 0 0 0 -1 0 0 1 0 0 0 0 -1 -1 1 -1 -1 0 -1 -1 -1 -1 0 0 0 0 0 -1 -1 0 -1 0 0 0 0 -1 0 -1 -1 -1 0 0 -1 0 -1 0 1 -1 0 0 0 -1 0 0 0 0 0 0 0 -1 1 0 0 0 -1 0 0 -1 0 0 0 0 -1 0 0 0 -1 0 0 0 0 0 0 0 0 0 -1 0 0 0 -1 0 0 1 -1 0 0 -1 0 0 0 -1 0 0 -1 -1 0 0 0 -1 0 0 0 0 -1 0 0 0 -1 0 -1 -1 -1 0 0 0 0 0 0 0 -1 0 0 -1 -1 0 0 0 -1 0 0 -1 0 0 0 0 0 0 0 0 -1 -1 -1 -1 0 -1 0 0 1 0 -1 0 0 0 0 0 -1 0 1 0 -1 0 0 -1 -1 0 -1 0 1 0 -1 0 -1 -1 0 0 -1 -1 0 0 -2 0 0 0 0 0 0 0 0 0 +65532 65553 393570 4249222493 65404918 694234 3210496 8293352 4336036 541172 4840613 6345586 1896570 89906 1865 313 520 1825 2192 521 1465 2482 1737 193 337 657 328 145 212 90 5 2 106 244 106 68 386 461 541 1394 2378 1636 117 200 245 13 225 68 5 17 58 82 90 196 128 52 85 145 58 80 36 5 17 130 125 125 173 305 373 277 194 260 170 26 18 52 29 5 9 10 169 377 400 145 13 8 104 410 625 340 162 185 73 10 29 178 493 545 145 34 41 18 5 2 2 13 25 25 16 34 45 26 40 25 13 1 5 5 1 5 5 4 65534 1 0 0 0 0 0 0 0 0 0 0 65536 1 0 65535 0 0 65536 0 0 0 0 0 65535 4294967295 4294901760 65535 0 4294901760 65536 0 0 4294967295 0 0 0 65535 65536 0 0 4294967295 4294901761 65535 4294967295 4294967295 0 0 4294901760 65535 65535 0 4294901760 4294901760 4294967295 0 65535 65535 4294901761 0 4294901760 0 0 0 4294901760 1 0 65535 4294901760 0 0 65535 0 65535 0 0 0 0 65535 0 65535 65536 65535 4294901760 0 4294901760 0 4294967295 0 4294901760 0 0 65535 0 65535 4294967295 65535 0 0 0 65535 4294901760 65535 0 65535 4294901760 0 0 0 0 4294967295 4294967295 4294901760 0 1 65535 0 0 65535 1 65535 4294901760 65535 65535 1 65535 4294967295 0 4294967295 0 65534 0 0 0 0 +-1 0 -2 -8 227 246 17 -852 -567 646 742 -32 -848 -1235 -639 2145 1370 -656 -374 340 1017 -542 -897 -678 -216 497 114 55 -4 52 62 -16 -30 -32 1 57 29 -38 -13 12 7 -18 -26 10 28 24 -8 -30 10 18 -10 -20 6 7 -10 -3 2 4 5 3 -3 -8 -2 7 9 1 -2 -11 -12 1 -1 15 11 -8 -7 0 4 1 6 -4 -1 -8 -18 -11 -5 31 21 -7 -3 -11 -13 1 5 9 6 -4 -2 -4 -2 0 -4 -2 4 12 5 -10 -2 3 2 -6 -7 2 5 5 3 -6 -6 0 2 2 1 -3 -2 3 1 -8 -5 7 1 0 5 1 -8 -3 6 7 -3 -2 6 3 4 -6 -8 -5 -1 3 -4 1 6 1 -4 -2 2 3 3 -6 -9 3 10 3 -3 -9 -3 1 -1 4 -2 -1 4 8 3 -11 -11 2 2 4 -1 3 5 1 -3 -10 -5 15 8 -13 -7 9 5 -8 -6 9 13 -4 -5 -8 -6 5 6 1 -3 -1 2 0 0 -1 -3 -1 1 2 0 -1 2 -1 -2 -1 2 1 -1 0 1 -1 0 0 -1 -2 -4 -1 0 6 3 -6 -2 1 -1 -1 2 3 0 -2 0 0 -1 0 1 1 0 -1 -1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 -1 0 0 0 0 -1 0 0 0 0 0 0 0 0 0 -1 0 -1 0 0 -1 0 0 0 0 -1 0 0 0 0 0 -1 0 1 0 0 -1 0 -1 0 0 0 -1 1 -1 0 1 0 0 -1 -1 1 0 0 0 0 0 1 0 -1 0 0 -1 0 0 -1 0 0 0 0 -1 -1 0 0 0 -2 0 0 0 -1 0 0 0 1 -1 0 0 0 -1 0 0 0 0 -1 1 -1 0 0 0 0 0 0 0 0 -1 -1 0 0 0 0 0 0 -1 -1 -1 -1 0 0 0 0 0 -1 -1 0 -1 0 0 0 0 0 0 0 0 0 0 -1 0 -1 -1 -1 -1 0 0 0 -1 0 0 1 0 0 -1 0 1 0 0 0 -1 -1 -1 0 0 1 -1 -1 0 1 -1 0 0 0 0 0 0 0 0 -1 0 0 0 -1 0 -1 0 0 0 -1 -1 -1 -1 1 0 1 0 -1 0 0 -1 1 0 -1 1 -1 0 -1 -1 0 0 0 -1 -1 0 0 0 -1 0 0 0 0 0 -2 1 0 0 0 0 0 0 -1 0 0 1 0 +65535 4294508542 16122083 4239130641 42401225 551588 2244329 5009346 2307236 255476 1328053 1264293 293665 16021 2720 4100 1924 3250 2285 313 373 776 1360 964 424 500 85 109 20 34 73 53 82 125 145 226 185 49 17 52 65 445 986 490 130 170 106 52 20 4 20 160 125 13 40 53 50 45 36 8 10 13 65 74 1 26 73 85 13 45 52 89 10 17 37 20 13 45 90 109 90 10 17 5 80 130 125 20 10 26 109 250 233 130 89 117 185 89 61 37 10 4 1 10 5 1 5 5 5 1 2 0 5 17 36 45 5 2 13 4 0 1 65537 4294901760 65535 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 65535 0 4294901760 0 0 0 0 4294901760 4294901760 0 65535 0 4294901760 0 0 4294901760 65536 0 65535 65535 0 131071 65535 1 4294901760 131071 0 0 65536 4294901760 0 65535 4294901760 0 0 4294967295 0 4294836224 0 4294901760 0 65536 65535 0 65535 0 4294901760 4294901761 0 0 0 0 4294967295 0 0 0 4294967295 4294967295 0 0 4294901760 65535 65535 0 0 0 0 4294901760 4294901760 4294967295 65535 0 65535 65536 0 65535 1 0 4294967295 65535 65536 4294967295 65536 65535 0 0 0 4294901760 0 4294901760 4294901760 0 4294901760 4294967295 131071 65536 4294901760 0 131071 4294901760 4294901761 4294901760 65535 0 4294967295 0 4294901760 0 0 4294836224 1 0 0 4294901760 0 1 +-42 0 -46 10 140 551 823 -1038 -1357 134 737 820 918 -1538 -2644 -13 1261 1452 -628 -352 1078 606 -138 -607 8 -104 -33 73 -69 -139 -28 99 43 -43 -46 -4 -24 8 16 42 15 -22 -9 6 2 9 43 -3 -37 -55 -21 38 7 11 27 -3 -22 -22 -1 22 8 -8 -4 -2 -1 3 5 -3 -6 -2 -2 0 -4 10 18 -6 -16 -10 5 13 4 -7 2 -11 -26 3 21 27 -5 -30 9 22 -17 -21 14 24 -7 -24 2 21 -6 -25 -2 30 10 -14 2 -2 -8 0 6 -6 -9 5 6 -2 -6 -4 3 9 2 -12 -9 5 2 9 13 -3 -7 -10 -5 2 -6 7 17 5 -5 -16 -4 7 2 1 4 -1 -3 -10 -10 9 7 5 3 -7 -6 -1 3 11 5 -15 -6 9 5 -11 -17 10 25 4 -15 -14 -2 8 4 0 -12 0 14 11 -5 -16 -3 16 6 -6 14 -3 -28 -20 0 24 0 -1 9 6 1 -8 -4 -3 -2 8 4 -6 -1 0 -7 -2 4 9 1 -6 -3 -1 -3 2 3 2 1 -2 2 0 -4 -3 -1 3 2 -1 -4 -1 3 2 1 1 2 -5 -8 -4 0 7 3 -1 1 -2 -2 -1 -1 0 0 2 0 0 0 0 0 0 0 0 -1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 -1 0 0 -1 -1 -1 0 0 0 0 0 0 0 0 0 0 0 0 -1 0 0 0 0 0 -1 0 -1 0 -1 0 0 0 0 -1 -1 0 0 -2 0 -1 0 -1 -1 -1 0 1 0 0 0 0 0 0 0 0 1 -1 0 0 0 0 0 1 0 0 -1 0 0 0 0 -1 0 0 0 1 0 -1 0 0 0 0 0 0 0 0 0 0 0 -1 0 1 -1 0 0 0 0 0 -1 1 -1 0 0 0 -1 -1 0 0 -1 0 1 0 0 1 0 0 -1 -1 0 1 -1 -1 0 0 0 1 0 0 0 0 0 0 0 0 0 1 0 -1 -1 0 -1 -1 0 0 -1 0 0 0 0 0 -1 0 0 -1 1 0 0 0 0 0 1 -1 -1 0 -1 0 0 0 0 0 -2 0 -1 1 0 0 -1 -1 -1 -1 0 -1 0 1 0 1 -1 0 0 -1 0 -1 0 0 0 0 0 0 0 0 0 0 -1 0 0 0 1 -1 0 -1 0 -1 0 -1 0 0 0 0 0 0 0 -1 0 0 0 -1 0 -1 0 -1 -1 1 0 0 -1 0 0 0 0 +65494 720850 36110476 4226941751 8846003 1215569 3208168 6990905 3698425 518288 1529320 387493 10880 6418 24082 10585 3698 2132 640 2020 709 117 85 1858 4394 1885 170 738 968 485 128 20 10 34 40 4 116 360 356 194 65 125 685 1170 925 565 730 772 625 445 661 904 296 8 64 72 106 40 52 90 148 106 85 178 149 29 85 314 281 65 5 17 109 181 74 58 37 130 250 117 146 389 641 421 68 16 144 317 281 265 72 205 1184 576 1 117 65 25 68 52 1 53 97 37 10 13 13 5 4 25 10 5 17 13 2 29 80 49 10 5 5 1 131072 0 0 0 0 65535 0 0 0 0 0 0 0 0 0 0 4294901761 0 4294967295 65535 0 0 0 0 0 4294901760 0 0 4294901760 4294901760 4294901760 0 0 4294967295 0 65534 65535 4294967295 65535 1 0 0 0 65536 65535 0 0 1 4294901760 0 0 65535 0 1 65535 0 0 0 0 0 65535 4294901761 0 0 4294901760 4294901761 0 4294901760 65535 4294901760 65536 0 1 4294901760 65535 4294901761 65535 0 1 0 0 0 0 1 4294967295 4294901760 65535 4294901760 0 0 4294901760 0 131071 0 0 65536 4294967295 4294901760 0 0 4294836224 4294901760 1 4294901760 4294967295 65535 65535 1 4294901761 0 65535 65535 0 0 0 0 4294901760 0 65536 65535 65535 65535 65535 0 0 0 65535 0 65535 65535 4294967295 1 4294901760 0 0 +-102 0 -56 55 -282 691 1620 -597 -1509 -671 187 882 768 381 643 -2148 -2171 938 109 -722 98 3072 1293 -2568 -799 849 -1 -163 185 102 25 -134 -82 113 51 -198 -85 184 61 -97 -16 69 17 -70 -13 34 -38 -27 37 64 19 -30 -19 -29 20 13 -63 -5 64 21 -39 -12 26 9 -3 -21 -23 29 36 -21 -31 11 41 -5 -54 -14 49 12 -51 5 38 4 5 -5 -19 -4 30 -15 -62 -15 7 70 52 -23 -18 -13 16 -9 -37 -20 -9 30 9 21 27 -17 -12 -17 -21 13 22 13 0 -19 -2 4 -7 3 18 -9 -27 -2 10 11 8 -4 -10 -3 6 -6 -9 6 1 6 5 -4 13 7 -13 -22 -1 14 6 -5 -17 -2 15 12 -1 -15 -11 8 19 -5 -31 -14 17 35 5 -24 -1 9 12 -7 -11 -19 -10 13 4 7 14 -5 -21 -14 2 26 7 -11 -7 17 40 -17 -45 -21 0 19 2 21 16 -20 0 9 -11 -23 -1 27 12 -6 -7 -16 -5 14 -4 -4 8 6 3 -7 -11 -6 6 13 0 -15 -4 14 11 -8 -10 -2 1 3 8 1 -15 -10 10 16 -4 -9 1 3 1 2 0 -11 -5 10 2 -7 0 6 0 -6 -1 3 2 -1 -2 0 0 0 0 0 0 0 0 0 0 0 1 0 0 0 0 0 0 0 0 0 -1 0 0 0 -1 0 0 -1 0 0 0 0 0 0 0 -1 0 1 0 -1 0 1 0 -1 0 0 0 0 0 0 0 -1 0 0 0 0 1 -1 1 0 -1 -1 -1 -1 -1 -1 -1 0 0 0 0 0 0 0 0 0 0 -1 0 0 -1 0 0 1 0 -1 -1 0 0 0 0 0 0 0 -1 0 1 0 1 -1 0 -1 0 -1 -1 -1 -1 -1 1 -1 -1 0 -1 0 2 -1 0 0 -1 1 -1 -1 0 0 -1 0 0 0 -1 0 -1 -1 -1 0 -1 -1 0 0 -1 -1 0 0 -1 0 0 -1 -1 0 1 -1 0 -1 0 0 0 -1 0 0 0 -1 0 0 0 1 -1 0 -1 0 0 -1 -1 1 1 -1 -1 0 0 -1 0 0 -1 -1 -1 0 0 -1 0 0 0 0 0 0 0 0 1 0 -1 -1 0 0 0 0 -1 0 -1 0 -1 0 0 -2 0 0 0 1 0 0 0 0 0 0 0 0 0 0 -1 1 0 -1 -1 0 0 0 0 -1 1 0 -1 1 0 0 0 -1 0 -1 0 0 -1 0 -1 0 0 1 0 -1 0 -1 0 0 0 0 0 0 0 0 +65434 3669960 45350630 4255843924 4251056667 812893 734985 5027353 5593085 533165 9446788 8266473 1359202 26570 44629 18581 19493 41805 41081 13130 5017 5189 1325 2173 5465 1261 1202 569 3994 4537 1665 757 450 1370 1737 1082 1706 3112 2545 2626 1460 50 377 1125 4069 4949 3233 493 337 1769 981 522 1018 433 610 653 361 20 58 405 733 221 80 109 72 117 37 41 218 653 197 61 293 369 226 185 386 1157 1514 601 82 193 482 269 65 221 637 680 170 338 1889 2466 361 445 656 81 650 730 180 305 221 32 100 58 157 205 225 212 185 104 10 65 325 356 97 10 5 121 125 53 36 36 262143 4294901762 65534 0 0 0 0 0 1 0 0 0 0 65535 0 65535 4294901760 0 0 0 4294901760 65536 4294901760 65536 4294901760 0 0 0 4294901760 0 0 4294901761 1 4294967295 4294967295 4294967295 65535 0 0 0 0 4294901760 0 65535 65536 4294901760 65535 0 0 0 65535 1 4294901761 4294901760 4294901760 4294967295 4294967295 4294901761 65535 65535 4294901762 0 131071 4294967295 0 65535 0 65535 4294967295 65535 4294967295 0 4294967295 0 65535 4294901760 65535 4294901761 4294901760 0 4294901760 0 4294901760 0 65536 65535 65535 4294901760 131071 4294901761 65535 4294901760 0 4294967295 65535 4294901760 0 0 0 0 1 4294967295 0 0 65535 65535 65535 4294836224 0 65536 0 0 0 0 0 131071 4294901760 65535 0 4294901760 1 131071 0 4294901760 4294901760 0 65535 65535 65536 4294901760 4294901760 0 0 0 0 +-24 0 51 -103 -779 547 1876 -75 -1473 -759 535 286 -604 -46 14 1269 1768 -1317 -666 -425 -2818 -839 2490 3431 -269 -2211 41 75 -170 419 41 -406 -158 193 146 157 64 -203 -52 33 -57 -47 3 73 13 5 50 -10 -41 -60 -40 79 102 -49 -106 -11 22 26 17 44 40 -60 -61 7 40 22 -7 -32 -21 1 -5 20 8 -19 -22 55 82 -27 -58 -60 -27 63 60 -5 -49 -35 30 18 -41 13 39 -19 -33 23 31 -3 -20 -7 27 14 -17 -35 -7 27 5 -4 10 -13 -25 12 26 -4 -26 0 29 13 -14 -34 -8 24 -1 -8 14 14 -7 -26 -15 26 37 -8 -40 -20 23 34 -1 -38 -18 18 2 1 21 9 -28 -26 22 41 -5 -43 -5 33 6 -22 -1 10 8 -10 -24 -9 8 20 0 0 3 -9 -7 7 3 3 3 -5 1 0 -6 -7 -5 4 8 8 -12 -13 6 23 18 -6 -9 -26 -15 16 1 1 23 20 -8 -47 -11 31 2 -6 13 -6 -24 -1 9 14 9 -9 -14 -3 9 12 -4 -8 12 3 -11 -10 1 4 0 -6 -12 12 21 -4 -15 -15 -5 15 3 -3 10 5 -9 -10 -2 2 2 11 6 -6 -4 -7 2 5 -4 0 5 0 -1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 -1 0 -1 0 0 0 -1 0 -1 -1 0 1 1 0 -1 0 0 0 -1 0 -1 0 0 0 0 0 0 -1 0 0 0 0 -1 0 0 1 -1 0 0 0 0 0 -1 0 0 0 0 -1 -1 0 0 0 0 0 1 -1 -1 0 0 0 1 0 -1 -1 0 0 0 -1 -1 -1 0 0 0 0 0 0 0 0 0 0 -1 -1 1 0 -1 -1 -1 1 0 -1 0 -1 -1 1 -2 -1 1 0 -1 -1 0 1 0 0 0 -1 0 -2 -2 0 -1 0 0 0 1 0 -1 0 0 0 0 -1 0 1 1 -1 1 0 0 0 1 0 -2 -1 0 0 0 0 1 0 -1 0 0 -1 0 0 0 0 -1 0 -1 -1 -1 0 1 0 -2 -1 -1 0 0 -1 -1 -1 0 -1 1 0 0 0 1 0 -1 0 0 0 -1 0 -1 0 -1 0 0 -1 0 0 0 -1 -1 0 -1 -2 -1 -1 0 1 0 0 -1 0 -1 0 0 1 -1 -1 -1 0 0 0 -1 0 0 0 -1 0 0 1 0 0 1 0 0 0 0 0 -1 -1 0 -1 0 0 0 -1 -1 0 1 0 0 -1 0 0 +65512 4288217139 35912949 4290053972 4245289535 368021 366932 1610557 4860313 624181 8645045 17971861 4960882 7306 204461 166517 62213 45965 45305 3793 5458 5338 194 2600 5281 7841 12805 11357 1160 2225 5200 3770 2084 1073 442 425 425 3509 7453 6964 4698 3625 3626 1224 1850 1882 1618 970 449 925 1514 778 41 269 769 692 676 1010 1352 640 65 392 725 901 1433 2000 1685 1445 648 5 522 1460 2165 1874 1114 520 101 164 657 464 0 90 98 18 34 1 85 41 128 313 565 360 757 481 2 929 2273 1082 40 205 577 277 162 205 225 80 153 221 17 36 288 457 450 250 18 125 181 8 125 72 65 29 65532 5 65535 0 0 0 0 0 0 0 0 0 0 0 65535 65535 0 65535 4294967295 65536 1 65535 0 65535 65535 0 0 4294901760 0 0 65535 65536 65535 0 0 65535 0 4294901760 65535 0 0 4294901761 65535 0 1 4294967295 0 4294901760 4294967295 0 0 0 0 0 4294967295 1 4294967295 131071 4294901760 4294901760 131071 4294967294 1 4294967295 65536 0 4294901760 4294836224 65534 65535 0 1 65535 0 4294901760 65536 4294901761 1 0 1 4294967294 0 0 1 65535 4294901760 0 0 65535 4294967295 65535 1 4294967294 65535 4294901760 4294967295 4294901760 1 0 1 65535 0 65535 65535 65535 4294901760 0 4294901760 65535 4294901759 4294967295 65536 0 65535 65535 65536 4294967295 65535 0 65535 0 65535 65536 0 1 0 0 4294967295 4294901760 0 4294901760 65535 1 4294901760 0 +15 0 -23 48 -417 37 1020 568 -614 -1175 219 708 -629 -171 1219 -200 -1804 256 1138 407 1276 -163 -2517 -1641 1308 2085 -365 -638 378 -355 -404 470 290 -260 -135 94 41 -49 -23 77 62 -67 -41 18 2 -16 6 6 -9 19 -6 -38 22 62 10 -38 -21 -37 12 56 -33 -31 34 14 -9 9 -2 -23 7 15 -11 -10 0 13 15 7 -12 -26 7 26 -6 -26 -4 22 24 -22 -47 -8 14 41 30 -24 -28 -2 23 -2 -24 0 13 -3 -14 8 11 3 2 -7 -4 -5 -8 8 9 1 -1 0 4 -4 1 0 0 -8 -9 4 5 3 -4 2 7 -3 -10 6 19 -5 -14 -2 7 -4 -13 3 8 1 -10 14 28 -22 -23 7 7 -14 -15 17 12 -1 2 1 -12 -7 15 11 -6 -12 -7 1 7 6 -3 -5 0 0 0 2 2 -4 -1 0 -1 4 7 -8 -11 -1 -1 10 10 -10 -14 4 9 -1 -10 6 17 1 -4 -12 -11 -1 -1 13 10 -5 -5 -5 -4 2 7 2 -8 -4 7 4 -12 -10 5 19 3 -16 -3 11 -4 -10 15 7 -19 -10 13 7 -8 -4 0 2 0 6 11 -9 -17 0 15 10 -7 -16 -2 15 7 -8 -7 -2 1 3 1 0 0 0 0 0 0 -1 0 0 0 -1 0 0 0 0 0 -1 0 0 0 -1 -1 0 0 0 -1 0 -1 0 0 -1 -1 0 0 0 0 0 -1 -1 0 0 -1 0 0 -1 0 -1 1 -1 -1 0 0 1 1 0 0 -1 -1 0 0 0 0 -1 -1 0 0 0 0 0 0 0 -1 -1 -1 0 0 0 0 0 -1 0 0 1 -1 0 -1 -1 0 0 0 0 0 -1 0 0 0 0 0 0 0 1 0 0 0 0 -1 -1 0 0 -1 -1 0 1 0 0 0 0 1 0 0 0 -1 -1 0 -1 -1 1 0 -1 0 1 0 -1 -1 0 -1 -1 0 0 -1 0 0 -1 0 -1 0 -1 0 0 0 1 0 -1 1 -1 0 0 0 0 0 -1 -1 0 0 0 -1 0 -1 -1 0 0 -1 0 0 0 -1 0 1 0 0 0 0 0 0 1 -1 -1 -1 0 -1 0 -1 0 0 0 1 0 0 0 0 -1 1 0 -1 -1 1 1 1 -1 0 0 0 -1 0 0 1 0 0 0 0 0 0 0 -1 0 1 0 0 0 0 0 0 -1 0 1 0 0 -1 0 0 -1 0 0 0 -1 -1 0 0 1 0 0 0 0 0 -1 -1 0 0 0 -1 0 0 0 -1 0 +15 3211241 2489951 37225468 4218027418 549225 424882 1525961 3319952 1460693 1654745 9028170 6058089 540269 268909 384116 151700 27061 4082 6458 8333 2005 260 72 442 1480 4328 1544 1810 3280 2050 1352 162 533 274 221 169 274 820 725 712 500 1060 2273 1877 1476 788 533 576 178 260 130 53 41 128 82 1 32 1 64 97 34 20 58 136 386 200 65 178 65 296 1268 578 245 514 145 5 193 346 180 50 85 34 0 4 20 1 17 113 122 101 200 212 82 136 290 160 122 170 125 50 20 53 80 65 244 386 265 130 116 274 461 218 80 4 36 202 289 325 305 229 113 4294901753 196609 1 0 0 4294901760 0 4294901760 0 0 4294901760 0 4294901760 65535 0 65535 65535 4294901760 65535 0 0 4294967295 0 65535 4294901760 4294901760 4294901761 65535 65536 1 4294901760 65535 0 4294901760 65535 0 0 0 4294967295 65535 0 0 65535 65536 65535 4294967295 0 0 4294901760 0 0 0 65536 0 0 4294967295 0 4294967295 65536 0 0 1 0 4294967295 4294901760 131071 4294901760 65536 4294901760 65535 4294967295 0 65535 4294901760 4294901760 4294901760 0 65536 4294901760 4294901761 0 0 4294901760 65535 0 65535 4294967295 0 65535 0 65535 1 0 0 65536 4294967295 65535 65535 65535 0 1 0 4294901760 1 4294967295 65537 4294901761 0 4294901760 0 1 0 0 0 65535 1 0 0 4294901760 65536 0 65535 4294901760 0 4294901760 65535 65536 0 0 4294901760 65535 0 65535 0 65535 +14 0 -4 12 -192 -193 139 737 406 -789 -433 363 178 -829 -913 1554 1613 -1039 -853 186 -708 -520 1208 1980 -91 -2350 -386 976 -299 -103 579 196 -282 -276 49 129 -26 -86 -23 125 34 -78 3 44 -5 -46 -26 42 45 -15 -32 -37 -13 53 31 -14 -7 -36 -54 57 99 -22 -71 -25 16 28 10 -8 -5 -2 -1 -3 -6 4 17 1 -16 -12 0 7 -2 8 11 -12 -17 13 26 -10 -24 2 15 -1 -3 -2 1 -5 -10 -5 3 18 6 -12 -3 5 1 -2 -1 -1 -1 2 1 -1 2 1 -2 -3 -2 3 1 -2 -3 2 2 -3 -1 1 1 -2 -4 -3 -3 9 5 -6 3 2 -3 -7 4 5 -15 -8 14 26 2 -26 -4 6 -3 -1 -2 0 6 7 0 -15 -11 6 8 4 0 -7 -4 3 5 2 -3 -6 3 1 -4 1 3 0 1 -2 -7 -2 9 6 -7 -10 0 8 -1 -1 4 2 -1 -4 1 1 -5 -1 4 3 -3 -2 1 0 -1 0 -1 2 2 0 -1 -3 1 1 -5 -6 -1 9 3 -6 -2 8 16 -11 -25 -5 13 20 -1 -14 -1 5 0 -4 -4 1 3 2 -1 0 3 1 1 -2 -2 -1 0 1 -1 0 0 0 0 0 0 0 0 0 0 -1 0 0 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 -1 -1 0 0 0 -1 0 0 -1 1 0 -1 0 0 0 -1 0 -1 0 -1 -1 -1 0 0 -1 0 0 0 0 0 0 0 0 0 0 -1 0 0 0 0 0 -1 0 0 1 -1 0 -1 -1 0 0 0 0 0 -1 -1 0 -1 0 1 1 0 1 1 -1 0 -1 0 -1 0 -1 0 -1 -1 -1 -1 0 1 0 -1 -1 0 0 0 -1 -2 0 0 -1 0 0 0 0 0 0 0 0 -1 0 0 0 -1 -1 0 0 -1 0 0 -1 0 0 -1 0 0 0 1 0 0 0 0 0 0 0 0 -1 0 0 0 0 0 0 0 0 -1 0 0 0 0 0 0 0 -1 -1 -1 0 0 0 -1 0 0 0 0 0 -1 0 0 0 0 0 1 -1 0 -1 0 0 0 0 0 0 -1 -1 -1 -1 0 1 0 0 1 -1 0 0 0 0 0 1 0 0 0 0 0 0 0 0 0 0 0 0 -1 0 0 -1 0 0 0 0 -1 0 0 -1 -1 0 0 0 0 0 1 0 0 0 0 -1 0 -1 -1 0 0 0 0 0 0 0 +14 851964 4282384192 48300171 4243259798 319258 718925 3248485 3681290 762205 771664 5379664 5530781 1101572 100010 373657 155700 19042 8072 16154 7240 1945 2141 2440 2250 2393 2978 1157 1345 6165 10285 5666 1040 164 29 10 52 290 400 49 68 265 458 776 580 226 13 26 125 333 180 34 5 2 5 2 5 13 13 5 13 13 2 5 25 90 61 13 58 41 289 872 680 52 10 4 85 225 157 80 49 25 29 45 10 17 9 5 53 117 149 64 2 20 17 2 26 25 13 1 1 5 4 10 2 61 82 45 68 377 650 569 197 26 16 17 13 1 10 5 5 1 65535 0 0 0 0 4294901760 0 1 0 0 0 0 0 0 0 0 0 0 0 4294901760 65535 0 65535 4294901760 1 65535 0 65535 65535 4294967295 65535 4294901760 0 0 0 0 0 65535 0 0 65535 65536 65535 4294967295 0 0 4294901760 65535 65535 65537 65536 4294901761 4294901760 4294901760 4294901760 4294901760 4294967295 65535 1 4294967295 0 4294901760 65534 4294901760 0 0 0 0 65535 0 4294967295 0 65535 4294901760 0 65535 0 1 0 0 0 4294901760 0 0 0 0 65535 0 0 0 4294967295 65535 0 65535 0 0 65535 0 0 4294901761 4294901760 0 0 0 4294967295 4294967295 65536 0 4294901761 0 0 65536 0 0 0 0 0 0 65535 4294901760 0 0 65535 4294901760 65535 0 0 1 0 4294901760 4294901760 65535 0 0 0 +3 0 -3 -13 41 -268 -596 641 970 -159 -522 -118 1032 256 -1236 -1825 -384 2188 609 -567 883 847 -246 -2764 -1669 2097 1069 -211 178 425 120 -999 -467 605 289 -164 -191 -62 125 230 9 -167 -23 42 38 -46 -59 30 23 7 14 -44 -46 66 48 -57 -15 61 3 -118 -57 124 59 -43 -8 -1 0 8 -5 -12 7 3 -2 3 2 -12 -12 5 6 9 8 -2 0 -10 -4 -6 -16 6 8 15 15 -9 -9 -8 -5 10 14 -9 -18 -3 4 8 1 -3 1 5 0 -6 -5 2 6 3 -6 -10 2 8 0 -7 -2 4 0 0 3 1 -5 -5 1 4 1 0 0 -6 -9 4 10 5 -1 -4 3 -2 1 -9 -23 7 19 12 1 -8 -4 -3 -5 4 11 1 -6 -5 3 1 -4 -1 3 -1 -6 2 8 -4 -9 -2 0 5 6 0 -5 -4 -1 2 0 1 4 6 4 -16 -11 10 6 -6 -8 11 11 -10 -7 9 4 -14 -9 11 7 -1 -2 -4 -2 3 4 -2 -5 -3 3 5 2 -8 -6 0 4 2 -5 -3 2 9 1 -5 4 5 3 -11 -8 -3 -3 5 3 0 -1 -2 -1 3 2 -1 0 0 -1 -2 0 2 1 0 -2 -1 0 1 0 -1 0 0 0 0 0 0 0 0 0 0 0 0 -1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 -1 0 0 -1 -1 0 0 0 0 0 1 0 0 -1 -1 0 -1 0 0 0 0 0 0 -1 0 1 0 1 0 -1 0 -1 0 0 1 0 0 -1 -1 1 0 0 0 0 0 1 0 0 0 -1 0 0 0 0 0 0 -1 -1 0 0 0 -1 -1 -1 0 0 0 1 0 -1 0 0 0 0 -1 0 -1 1 -1 -1 0 0 -1 -1 -1 0 0 0 -1 0 -1 -1 -1 0 0 -1 -1 0 0 0 0 0 1 0 -1 0 0 0 0 0 0 -1 0 0 0 -1 0 0 0 0 -1 -1 0 0 0 -1 0 0 -1 0 0 0 1 0 -1 1 0 -1 -1 0 0 0 1 0 -1 0 0 0 0 0 0 0 0 0 -1 0 0 0 0 -1 -1 -1 0 -1 -1 0 0 -1 -1 -1 0 -1 -1 0 -1 0 0 -1 -2 0 1 0 -1 0 1 -1 0 0 0 -1 -1 0 -1 -1 0 -1 0 0 0 0 0 0 0 0 0 -1 -1 0 0 0 -1 0 -1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 0 +3 4294180861 4277403689 42073516 4284548042 286408 1130560 4858321 4934800 692370 1497098 7700212 7182970 1187282 212309 1012401 584114 110417 40325 68525 27970 2293 3560 4381 578 2132 6472 5553 3946 13933 18625 5330 65 64 169 58 13 148 169 117 68 100 52 292 289 306 145 125 277 333 80 10 26 36 29 45 136 68 49 20 0 10 50 17 1 36 97 125 17 13 82 578 505 65 25 41 122 61 10 17 10 40 80 85 25 36 41 5 1 52 272 221 72 185 221 130 212 202 50 20 13 20 34 34 68 36 20 34 85 26 41 130 73 34 9 5 10 5 0 5 4 1 4294967294 65536 4294901760 0 0 0 0 0 0 65535 0 0 0 0 0 0 0 0 0 0 4294901760 0 4294967295 0 0 65536 0 4294967295 4294901760 0 0 0 65535 1 1 65535 65535 65536 0 4294967295 1 0 0 1 0 65535 0 0 4294901760 65535 0 4294967295 65535 0 1 65535 0 4294901760 4294901760 4294901761 65535 4294901760 4294967295 0 4294901760 4294901760 4294967295 0 4294967295 0 0 65536 4294901760 0 0 0 65535 0 65535 0 4294901760 65535 0 65535 4294901760 0 65536 4294901760 1 4294967295 0 65536 4294901760 0 0 0 0 4294901760 0 0 4294967295 65535 4294967295 0 4294967295 65535 4294967295 4294901760 0 4294901759 65536 4294901760 65536 65535 0 4294967295 4294901760 65535 65535 0 0 0 0 4294967295 0 4294901760 4294901760 0 0 0 0 0 0 0 1 +0 0 -3 -18 288 -128 -965 -204 735 860 -42 -609 -576 580 1871 -14 -1360 -1301 128 720 -503 -842 -661 2032 1890 -698 -773 -398 650 -146 -1368 -319 775 833 -46 -403 -216 61 286 117 -84 -158 -68 70 148 -59 -186 10 99 34 26 -26 -116 -52 62 113 -29 -40 130 41 -111 -151 -12 120 43 -18 -33 -31 12 44 11 -22 -10 -10 2 21 -2 -20 3 15 -1 -8 -1 -2 -4 9 8 -7 -9 -3 7 8 -2 -11 -7 9 14 1 -10 -9 4 2 -1 2 -4 -8 0 11 1 -7 -3 -1 -2 7 11 -1 -5 -6 2 3 -7 -5 2 5 3 0 -2 -8 -3 6 0 -2 -1 1 3 3 0 -8 -3 3 1 9 13 -26 -28 20 19 -9 -12 8 9 -2 -2 -2 -4 -6 -1 6 0 -1 2 3 5 -7 -8 -3 -1 5 3 1 -1 -5 -4 4 1 -2 1 -1 -9 4 18 4 -8 -15 -2 7 -1 -11 -13 18 24 -1 -8 -11 -4 -1 1 6 -2 -3 1 0 -1 2 0 -3 -2 2 0 0 0 -1 -2 3 0 0 3 0 0 -4 -2 -3 -3 4 5 -2 -3 -1 0 0 1 1 -2 -3 -1 3 2 0 0 0 0 0 0 0 0 -1 0 1 0 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 -1 0 -1 0 0 0 -1 0 0 0 -1 0 0 0 -1 -1 0 -1 0 0 0 0 0 0 0 0 1 0 1 -1 0 -1 0 0 0 0 0 -1 -1 0 0 0 0 0 0 0 -1 0 -1 1 0 -1 0 0 -1 0 0 0 -1 0 0 -1 0 0 0 0 0 0 1 1 0 0 -1 0 1 1 -1 -1 0 0 0 -1 0 -1 -1 -1 1 0 0 0 0 0 0 0 0 0 0 0 -1 0 0 -1 -1 0 -1 -1 0 0 0 -1 -1 0 1 1 0 0 -1 -1 -1 0 -1 0 0 0 -1 -1 -1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 -1 0 0 0 -1 0 0 -1 0 1 -1 1 0 -1 -1 0 0 -1 0 -1 -1 0 0 -1 0 -1 0 -1 0 -1 0 0 0 0 0 -1 0 0 0 -1 0 0 -1 1 -1 0 0 -1 -1 0 0 0 0 -1 0 0 -1 0 0 0 -1 1 -1 -1 -1 0 0 0 1 -1 -1 0 0 0 0 0 1 0 0 -1 -1 0 0 0 -1 -1 0 -1 0 1 0 -1 0 0 +0 4293853181 4286578976 4281662523 56361695 372645 668176 3500837 3542201 534784 961973 4565945 4059304 755933 443816 1973185 1294514 164525 50377 95485 32020 9524 25385 34696 10957 1352 16160 16613 2441 18581 35122 14544 2173 2050 2080 605 200 445 404 234 65 5 97 113 90 113 125 130 197 181 20 5 80 121 50 10 53 122 61 13 74 29 9 68 45 4 2 18 64 18 82 845 1184 442 208 85 8 52 37 1 13 74 73 26 10 26 32 5 2 97 340 289 53 122 493 577 185 17 37 13 1 5 9 8 0 1 13 0 9 16 13 25 29 10 0 2 13 10 4 0 0 0 4294901760 65536 65536 0 0 0 0 0 0 0 0 0 0 0 0 0 4294901760 4294901760 0 4294901760 0 4294901760 0 4294901760 65535 65535 0 0 0 65536 65536 65535 65535 0 0 4294967295 0 0 0 4294901760 4294901760 1 65535 4294901760 0 4294901760 0 65535 0 0 65536 1 4294901760 65536 4294901761 65535 0 65535 4294967295 131071 0 0 0 0 0 4294901760 0 4294967295 4294901760 65535 0 4294967295 65536 1 4294901760 4294967295 4294901760 0 4294901760 4294967295 0 0 0 0 0 0 0 0 65535 0 65535 4294901760 65536 131071 4294901760 65535 4294901760 4294901760 65535 4294901760 4294901760 4294901760 4294901760 0 0 4294901760 0 4294901760 0 131071 65535 4294901760 65535 0 4294901760 0 65535 0 131071 4294967295 65535 0 4294901761 65535 0 0 1 4294901760 65535 0 4294967295 4294901760 65536 4294901760 0 +-8 0 21 -1 284 132 -352 -929 -364 1126 556 -321 -181 -702 -974 1390 1582 -407 -591 -363 -18 786 1070 -1253 -1391 213 501 318 -417 -879 -375 1745 1058 -937 -418 20 -130 144 303 -84 -187 -71 -50 100 248 -30 -262 -122 86 96 -31 75 117 -152 -163 76 78 -60 -112 140 191 -55 -95 -39 7 11 7 16 15 -25 -37 1 19 21 12 -16 -26 -10 6 16 0 1 12 -13 -21 7 17 0 -13 -3 6 3 0 0 3 -6 -6 10 15 -9 -15 -7 -1 8 7 6 0 -16 -8 11 2 -3 4 2 -6 -2 6 -2 -6 -3 -1 1 2 8 4 -11 -6 4 1 1 0 -2 1 -2 -6 0 7 5 -1 -11 -13 1 9 27 20 -28 -20 -5 0 8 -9 -4 13 10 -5 -9 -2 2 1 -1 -1 2 -2 -3 3 3 0 -1 -2 -1 3 1 -4 0 4 1 1 -4 -14 -4 13 19 -6 -18 1 14 7 -11 -8 6 7 -13 -15 9 7 6 2 -10 -5 5 1 1 5 -1 -7 -2 2 1 -1 2 1 -3 -1 0 -1 3 2 -2 -2 -1 -2 3 2 -1 0 0 1 -3 -4 0 2 1 1 0 0 -1 0 1 0 -1 0 1 1 -1 0 0 0 0 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 -1 0 0 0 0 0 0 0 0 0 0 0 -1 0 0 -1 -1 0 0 -1 0 0 0 1 0 0 1 0 -1 0 0 0 -1 0 0 0 0 -1 0 0 0 1 0 0 0 0 0 0 0 -1 -1 -1 0 0 0 -1 0 0 1 -1 0 0 0 0 -1 -2 0 0 0 1 0 0 0 1 0 0 -1 0 -1 0 -1 0 1 -1 0 0 0 -1 -1 0 -1 -1 -1 -1 -1 0 -1 -1 -1 -1 0 1 0 -1 -1 -1 0 0 0 -1 -1 0 -1 0 0 -1 0 0 -1 0 0 0 0 0 0 0 0 1 0 -1 0 0 -1 1 0 0 -1 0 0 -1 0 -1 0 0 -1 -1 0 -1 0 0 0 0 0 0 0 -1 0 0 -1 0 0 0 0 0 0 0 0 1 0 0 0 -1 0 -1 -1 1 0 0 0 -1 -1 0 1 0 -1 -1 -1 0 -1 0 1 0 1 0 0 0 0 0 -1 0 0 0 -1 0 0 0 -1 -1 -1 0 0 1 0 -1 0 0 0 0 -1 -1 0 0 0 0 -1 0 0 1 0 0 -1 0 -1 -1 -1 0 0 -1 0 -1 0 0 0 0 +65528 4294901781 8651036 4234149536 73858708 412177 525565 2880776 2668373 481050 618120 2714909 1980250 352125 946530 3185650 1997333 175124 37636 98865 40010 12500 62404 83528 16612 6586 36793 32345 9684 32144 39506 10546 170 305 850 1370 802 400 776 292 1 313 490 289 178 45 0 45 136 306 274 65 85 256 185 13 20 40 40 45 2 68 137 52 2 4 5 36 74 122 170 810 1184 425 64 97 269 106 8 2 5 13 18 1 5 10 16 17 17 212 530 360 197 170 100 218 306 85 104 50 2 26 53 5 5 10 1 10 8 5 13 5 0 10 16 5 1 1 1 1 1 2 0 0 1 0 0 0 0 0 0 0 0 0 0 65535 0 0 0 0 0 65535 4294901760 65535 4294901760 0 65536 0 1 65535 0 65535 0 4294901760 0 65536 0 0 0 4294901760 4294967295 0 4294901760 0 4294901761 0 0 4294901759 0 65536 0 65536 0 65535 65535 65535 4294901761 0 4294901760 65535 4294967295 4294967295 65535 4294967295 4294967295 65536 4294901760 4294967295 0 4294901760 65535 65535 4294901760 0 65535 0 0 0 65536 4294901760 0 131071 0 65535 4294901760 4294901760 0 4294967295 4294901760 0 0 0 4294901760 0 65535 0 0 0 65536 0 4294901760 4294901760 131071 0 4294901760 65535 1 4294967295 65535 65535 1 1 0 0 65535 0 65535 0 4294967295 65535 65536 4294901760 0 0 4294967295 0 0 65535 65536 0 65535 4294967295 65535 4294901760 4294901760 0 0 +-10 0 11 11 4 291 618 -717 -1172 210 550 348 508 164 -510 -1418 -546 1437 701 -214 107 -839 -1340 958 1444 12 -351 -404 -888 338 1846 564 -993 -1320 20 487 -114 192 277 -191 -223 5 -28 45 226 153 -122 -360 -82 240 -14 -73 120 202 14 -271 -31 119 -78 -130 -18 191 90 -68 -21 -7 -3 5 3 -20 -15 16 5 -1 12 3 -23 -18 13 30 -1 -18 14 5 -21 -18 2 17 1 -2 1 2 7 -4 -10 -6 3 3 -5 5 4 -3 2 5 -2 -12 -10 10 8 3 6 -3 -6 -8 -4 9 5 -1 1 -2 -1 -3 -7 2 3 5 2 -3 -3 -2 1 4 3 -6 -6 3 3 -3 -5 4 12 -2 -12 -15 -8 23 22 -12 -23 0 16 14 7 -17 -17 -2 12 11 -6 -14 -4 14 9 -5 -6 -2 0 2 3 0 -2 -3 -1 0 0 5 4 -9 -8 1 -8 0 13 15 0 -16 -6 6 19 -1 -27 -20 10 30 6 -26 -18 17 20 -8 -16 8 15 -13 -20 7 14 3 -5 -6 -2 3 2 0 0 -1 -3 -1 2 1 0 -3 0 1 -2 0 3 2 -2 0 1 0 0 -2 -1 1 1 0 0 0 0 -1 -1 -1 0 2 0 -1 0 0 0 0 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 -1 0 0 0 0 0 0 0 0 0 0 -1 0 0 0 0 0 0 0 0 0 -1 0 0 0 0 -1 0 0 0 0 0 0 -1 0 0 0 0 0 0 0 -2 0 0 0 1 0 -1 0 0 0 -1 0 0 0 0 0 0 0 1 0 0 0 -1 -1 0 0 0 0 0 -1 -1 -1 0 1 0 0 0 -1 0 0 0 0 0 -1 1 0 0 1 0 0 0 -1 -1 -1 0 -1 0 0 0 0 0 -1 -1 0 -1 0 0 -1 0 0 0 0 0 1 -1 -1 0 0 0 1 0 -1 0 0 1 -1 0 0 0 0 -1 0 1 -1 0 0 -1 0 0 0 0 0 0 0 -1 0 0 -1 0 1 -1 0 0 0 0 0 0 -1 0 -1 -1 0 0 0 -1 0 0 0 0 0 -1 -1 0 -1 -1 -1 0 0 0 1 0 -1 -1 -1 1 0 0 0 -1 -1 0 1 0 0 0 -1 0 0 1 1 0 -1 0 0 0 -1 0 0 0 0 1 1 0 -1 0 0 -1 0 0 -1 0 0 0 0 0 -1 -1 0 0 -1 0 0 -1 0 0 -1 0 0 0 0 0 0 +65526 720907 19070980 4247978602 13826924 423604 284960 2270824 2363085 537197 715370 2713364 2085280 286417 902788 3725812 2728449 237569 49860 113210 49754 2809 74485 144484 64324 5525 55204 73637 15122 22984 36805 12724 490 34 409 481 26 153 853 1069 325 221 765 293 5 5 65 136 18 50 25 29 148 200 73 45 100 97 26 5 10 53 34 13 13 17 45 45 18 41 148 369 593 628 529 452 338 293 265 232 212 106 40 4 9 13 1 25 97 65 64 394 256 72 362 1129 1000 712 613 464 320 394 449 205 61 13 4 1 10 5 9 1 4 13 4 1 4 2 1 0 1 2 131072 4294901760 0 0 1 0 0 0 0 0 0 0 0 4294901760 0 0 0 0 0 65535 0 0 0 0 65535 0 4294901760 0 0 0 65535 0 0 0 65534 0 1 65535 0 65535 0 0 0 1 0 4294967295 0 0 4294901760 4294967295 65536 0 4294901760 0 0 4294901760 1 65536 0 4294901760 4294967295 4294901760 0 0 4294901760 65535 65535 4294901760 0 0 65536 4294967295 0 65536 4294901760 0 4294901761 0 0 65535 4294901761 0 65535 0 0 0 65535 4294901760 65536 65535 0 0 4294901760 4294901760 65535 0 65535 0 0 4294967295 4294901760 4294967295 0 65536 4294901760 4294967295 1 0 4294967295 65536 0 4294901760 0 65537 4294901760 0 4294901760 0 0 65537 4294901760 0 65535 4294901760 0 0 4294901760 65535 4294901760 0 65535 4294901760 0 0 0 +0 0 -37 20 -219 134 842 197 -639 -916 -120 565 -331 184 1158 458 -617 -1320 -186 741 -297 296 1419 -79 -1277 -936 113 719 170 804 1073 -1656 -1748 570 648 194 -31 244 233 -312 -243 45 27 7 -16 221 298 -226 -303 5 190 -48 -344 33 311 200 -96 -132 125 -23 -127 -51 -2 58 23 8 6 -17 -17 1 11 14 4 -6 5 -3 -14 -21 -7 27 5 -3 17 6 -6 -25 -9 14 3 -5 0 4 -1 -8 -6 10 11 -1 0 -11 -10 5 5 2 1 1 2 -2 -3 -7 -3 5 0 -1 1 3 2 -4 -4 2 6 0 -7 -4 3 4 -2 -3 1 2 0 -3 -1 0 -4 0 4 6 1 -7 0 6 0 -5 6 9 -2 -15 -2 10 -11 -11 15 21 -12 -26 5 21 -6 -13 1 11 4 -2 -2 -1 2 -2 -3 0 1 2 0 -3 -1 -1 -3 -1 4 9 4 -7 2 -11 -22 15 33 -12 -32 1 10 -1 -4 21 6 -20 4 17 7 -13 -5 -11 -14 6 3 11 2 -5 2 2 -4 -5 2 1 -1 -3 -3 1 3 0 0 -1 0 2 0 -4 -4 1 3 2 0 -3 1 2 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 -1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 0 0 -1 -1 0 -1 -1 0 0 0 0 0 0 0 0 0 -1 0 -1 0 -1 -1 0 0 -1 0 1 0 -1 0 0 1 0 0 0 -1 0 0 0 0 0 0 -1 0 1 -1 0 0 -1 0 0 0 0 0 0 0 0 0 -1 0 0 0 -1 0 -1 0 0 0 0 0 0 0 -1 -1 1 0 -1 0 -1 0 -1 0 0 -1 -1 0 0 0 0 -1 0 0 0 0 -1 0 -1 0 0 -1 -1 0 -1 0 -1 0 0 -1 -1 0 0 0 0 0 0 0 0 0 -1 0 -1 0 0 -1 0 -1 0 1 0 0 0 0 1 0 -1 -1 0 -1 -1 0 0 0 0 0 -1 1 0 0 -1 0 -1 1 0 0 -1 0 0 -1 0 0 0 0 1 0 -1 0 -1 -1 0 0 0 -1 0 0 -1 -1 -1 0 0 0 -1 -1 0 0 0 -1 0 0 0 1 -1 -1 0 -1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 -1 0 0 -1 0 0 0 0 0 0 -1 0 -1 0 -1 0 0 0 -1 0 0 +0 1376219 8847141 12911434 4235001217 333625 143417 1550728 2123089 583677 175825 2019802 2506825 529730 675316 3893665 3380404 457540 60497 151633 61074 778 49097 139880 91834 38404 119425 136721 26640 16154 18730 3368 593 325 290 317 52 34 637 778 34 325 661 277 34 16 65 136 122 121 125 29 2 8 58 34 1 10 20 20 36 65 25 13 5 9 1 16 52 50 36 25 117 229 104 242 666 820 466 205 122 20 5 8 9 5 9 2 10 97 65 125 709 1233 1025 101 457 436 305 218 146 232 130 29 8 41 5 10 10 9 1 4 16 17 13 9 5 0 0 0 0 0 0 0 0 0 0 0 0 4294901760 0 0 0 0 0 0 0 0 0 0 65536 0 4294967295 4294901760 65535 0 0 0 0 65535 65535 4294967295 0 65535 1 65535 65536 0 4294901760 0 0 0 65535 4294901761 0 65535 0 0 0 0 65535 0 65535 65535 0 0 0 4294967295 1 65535 65535 65535 4294901760 65535 0 4294901760 0 0 65535 65535 4294901760 65535 65535 65535 4294901760 65535 0 0 0 0 65535 65535 4294901760 4294901760 65536 0 0 1 4294967295 4294901760 65535 0 0 131071 0 65535 131071 0 65535 4294901760 0 0 1 65535 4294967295 0 4294901760 0 4294967295 65535 0 4294967295 0 4294901760 0 65536 4294967295 4294901760 0 0 0 0 0 0 0 0 4294901760 0 65535 0 0 4294901760 4294901760 4294901760 0 4294901760 0 +-9 0 -11 9 -161 -168 75 835 634 -906 -589 100 179 -8 -744 -199 639 1224 357 -1102 -98 395 -392 -1099 -569 1627 1094 -624 1 326 -269 -1677 -982 1784 1013 -429 -255 116 316 -220 -334 -80 67 188 -26 -220 -55 447 313 -377 -254 161 272 -241 -446 78 212 122 4 34 42 -110 -46 35 9 -6 0 0 -12 1 22 9 -23 -19 22 20 -9 -31 -17 20 18 4 -8 -2 17 -1 -18 -12 8 12 -10 -9 8 14 -3 -10 3 4 -8 -4 8 9 -2 -12 -4 8 6 -2 -5 -3 0 3 -1 0 1 1 -1 -2 1 0 0 -1 -1 -7 -3 6 -2 -2 4 2 -3 -2 5 2 -2 -5 -2 1 -1 -1 -1 4 8 -1 -3 -8 -3 1 -9 -4 4 23 16 -13 -5 -10 -11 -7 -6 17 14 -7 -9 -3 4 6 2 -7 -4 2 4 -2 -5 -3 5 3 -8 -7 6 16 6 -25 -24 15 14 -8 -8 34 41 -49 -59 19 32 -11 -27 46 47 -63 -50 45 28 -28 -10 17 -5 -9 14 9 -14 -15 4 12 -2 -2 0 0 0 1 0 0 1 -1 -1 1 3 -1 -3 -2 2 0 -2 -1 0 2 2 0 0 0 0 0 0 0 0 0 -1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 0 0 0 1 0 0 0 0 0 -1 0 0 0 0 0 0 0 0 0 0 0 0 -1 0 0 0 0 -1 0 0 -1 0 0 0 -1 1 0 -1 -1 0 0 -2 -1 0 0 0 0 0 0 0 1 0 0 -1 0 0 0 0 -1 0 -1 -1 -1 0 -1 0 -1 -1 0 0 0 -1 0 -1 -1 -1 -1 0 -1 0 0 -2 -1 0 -1 1 1 -1 -1 -1 -1 -1 0 0 0 0 0 -1 0 -1 -2 0 0 0 -1 0 0 -1 -1 -1 -1 0 -1 0 1 -1 0 -1 0 0 0 -1 -1 0 0 0 0 -1 0 0 0 0 0 -1 -1 -1 0 0 0 0 -1 0 0 -1 -1 0 0 0 0 0 0 0 0 0 0 0 0 1 0 0 -1 0 -1 -1 -1 0 -1 -1 0 0 0 0 0 1 0 -1 0 0 -1 1 0 -1 1 0 0 0 0 0 0 0 1 -1 0 -1 0 -1 0 0 0 0 -1 0 0 -1 0 0 0 -1 -1 0 -1 -1 0 -1 -1 0 0 0 0 0 0 -1 1 0 0 0 0 0 1 0 -1 0 -1 -1 1 0 +65527 655349 4284022623 54722635 4235592314 356921 32105 593137 1906497 1341853 165629 1361465 2970890 1586212 106277 2884690 4146980 1210210 78481 148256 117956 39833 49076 202834 240098 90437 132065 205000 59828 1172 13864 3341 117 0 145 565 890 884 1042 689 340 68 290 468 208 181 260 109 25 80 145 148 80 40 34 9 1 2 5 1 1 50 45 8 20 13 29 29 5 2 17 65 73 10 97 545 425 125 170 325 245 90 52 53 20 20 34 34 113 292 661 801 260 1220 4082 3842 1145 2845 6178 4525 1568 389 106 277 421 160 8 0 1 0 2 2 10 13 4 5 4 4 0 0 0 0 65535 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 65536 0 65536 0 0 4294901760 0 0 0 0 0 0 65535 0 4294901760 0 65535 0 131071 4294901760 65535 4294836224 65535 0 0 0 1 4294901760 0 0 65535 4294967295 65535 65535 4294967295 0 4294901760 4294901760 4294967295 65535 65535 4294836224 65535 131071 4294901761 4294967295 4294967295 0 0 4294901760 4294901760 65534 0 65535 4294901760 4294967295 65535 65535 4294901761 4294901760 0 4294901760 65535 0 4294901760 0 0 4294901760 4294967295 0 0 65535 4294901760 65535 0 0 0 0 0 65536 0 65535 4294967295 65535 4294967295 0 0 65536 4294901760 0 131071 4294901760 1 0 0 0 4294901761 4294901760 4294901760 0 0 65535 4294901760 0 4294901760 65535 4294967295 4294901760 65535 0 0 4294901760 1 0 0 1 65535 4294967295 1 +4 0 5 -36 57 -160 -680 271 949 392 -266 -595 -277 254 833 215 -648 -897 119 495 -467 -401 -186 1025 951 -280 -275 -17 772 -666 -1737 -426 582 1397 226 -463 111 103 112 -197 -224 -65 114 107 84 -108 -325 -193 78 369 -10 -125 77 245 217 -286 -190 -11 -85 15 96 99 -10 -66 -5 4 -3 6 -10 -8 11 26 13 -19 -19 -14 11 15 -22 -6 18 -6 -17 14 5 -3 9 2 4 3 -10 -18 5 4 -15 8 11 -3 3 2 1 -2 -5 -6 3 7 -4 -4 2 1 -2 3 2 -4 0 5 2 -4 -3 -1 2 3 4 -1 -7 -9 -1 10 0 -3 6 1 -6 -3 1 1 0 0 -2 0 0 -1 -1 4 -1 -2 1 1 3 3 0 -14 -6 8 1 0 -1 -1 3 8 3 -11 -4 5 3 -4 -3 2 1 -4 -1 4 2 -6 -6 5 2 0 2 4 7 -3 0 0 10 -25 -41 8 10 20 6 13 32 -3 1 -50 -57 -8 0 68 45 -21 -7 -16 -11 -6 -11 11 19 6 -1 -10 -2 -1 -3 1 5 -2 -5 -1 1 0 0 1 0 1 2 -2 -4 -2 1 2 0 -1 0 0 -1 0 1 2 0 -2 -1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 -1 0 0 0 0 0 0 0 0 0 0 0 1 0 0 0 0 0 0 0 0 1 0 -1 -1 0 -1 0 0 0 0 0 0 0 0 0 -1 -1 -1 -1 -1 0 -1 0 0 0 1 0 -1 -1 -1 0 0 0 0 1 1 0 -1 0 0 -1 1 0 1 -1 0 0 0 0 0 0 0 -1 0 -1 0 0 -1 0 1 0 0 -1 0 -1 0 -1 -1 0 0 -1 -1 -1 0 -1 1 0 -1 0 0 -1 0 0 0 0 0 0 0 -1 0 0 -1 -1 0 0 0 0 0 0 0 -1 0 0 0 0 -1 0 1 1 0 0 -1 0 0 0 -1 0 0 0 0 0 0 -1 0 0 0 0 -1 1 0 0 0 0 0 0 -1 -1 0 0 -1 0 0 0 -1 0 -1 -1 1 0 0 0 -1 0 -1 -1 0 0 0 -1 0 -1 0 -1 0 0 0 1 0 0 0 -1 0 0 0 0 0 0 0 -1 1 0 -1 -1 0 0 0 0 -1 0 0 0 0 -1 0 -1 -1 0 -1 0 0 0 -1 0 0 0 0 0 0 0 0 1 0 0 -1 1 0 -1 -1 0 -1 -1 0 -1 -1 1 0 0 -1 -1 0 -1 0 0 0 +4 4292608005 4284481593 17825112 25691061 424781 141245 740114 1224513 259186 378890 1085221 982801 75914 1039540 3198645 2290333 265445 22930 51353 54401 24445 18720 142874 142245 15725 65954 128885 36221 7450 19017 4456 41 45 164 797 530 557 346 520 360 485 34 85 25 424 41 289 130 13 5 61 58 32 5 13 20 25 20 10 13 17 130 101 9 37 45 2 0 4 1 17 5 2 18 196 100 1 2 73 130 41 25 13 17 17 40 61 4 20 58 0 725 1745 500 205 1033 2501 3313 4624 2466 305 157 242 397 101 5 10 29 26 1 1 1 8 20 5 1 0 1 5 4 2 0 0 0 0 0 0 0 4294901760 0 0 0 0 0 65536 0 0 0 0 1 4294967295 4294901760 0 0 0 0 4294901760 4294967295 4294967295 4294901760 0 65536 4294901760 4294967295 0 0 65537 4294901760 0 131071 65536 65535 0 0 0 65535 65535 4294901760 65536 0 65535 65535 4294967295 0 4294967295 65535 131071 4294901760 0 65535 0 0 0 65535 4294901760 65535 0 0 0 65535 0 4294901760 65536 1 4294901760 0 4294901760 0 0 0 65535 0 4294901760 1 0 0 4294901760 65535 4294901760 0 4294901760 4294901760 131071 0 4294901760 4294901760 65535 0 65535 65535 65535 0 1 0 65535 0 0 0 131071 4294901760 65535 0 4294901760 0 0 65535 4294967295 4294901760 0 4294901760 0 0 0 0 1 4294901760 1 4294967295 4294901760 65535 4294967295 1 4294901760 65535 65535 0 +8 0 23 7 386 12 -740 -1004 -265 1631 833 -637 -35 -226 -1221 -163 1083 1709 453 -1569 -470 439 501 -108 -306 -880 -478 675 -69 -706 -787 1428 1383 -15 -41 -500 -167 -128 -116 28 214 131 -207 -459 -250 736 843 -472 -764 -203 244 310 -155 -302 -158 437 423 -17 -123 -330 -116 202 45 -39 39 43 -7 -68 -47 30 49 31 -4 -59 -41 39 66 25 -8 -84 -62 28 27 62 53 -67 -98 10 89 56 -21 -97 -53 56 53 20 -3 -40 -21 13 15 13 2 -26 -22 14 12 13 16 -16 -26 -7 9 21 3 -14 -1 6 -4 -4 6 2 -12 -6 4 17 14 -13 -14 -8 -7 7 8 9 1 -12 -4 3 1 2 5 -5 -10 -4 -2 13 19 -9 -22 -13 -3 28 28 -10 -27 -15 11 21 2 -9 5 -4 -8 -4 -6 2 4 8 0 -4 5 5 3 -11 -14 -2 4 9 3 -4 -5 4 15 6 11 -28 -43 -2 14 25 8 -3 -13 -12 14 25 -1 -47 -47 58 88 -14 -67 -29 44 31 -27 -34 3 13 -5 2 2 -1 4 4 1 -11 -10 4 2 5 5 -3 -6 -4 -1 5 2 -1 1 1 1 -1 0 -1 -2 -2 0 3 1 -1 0 0 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 0 0 0 -1 -1 -1 0 -1 0 0 -1 -1 0 -1 0 0 -1 0 -1 0 0 -1 -1 1 -1 -1 0 -1 0 0 -1 -1 0 1 0 0 0 0 0 0 1 0 -1 -1 0 1 0 0 0 0 -1 1 0 0 -1 -1 0 0 0 -1 0 0 -1 0 0 -1 -1 -1 0 -1 0 -1 0 -1 0 0 -1 0 0 -1 -1 -1 0 -1 0 0 0 0 0 1 0 1 1 -1 0 -1 0 0 -1 0 0 0 0 0 0 -1 0 0 0 -1 0 -1 0 0 1 0 0 0 1 1 0 -1 0 0 0 -1 1 0 0 0 0 0 0 0 1 0 -1 0 0 0 -1 -1 -1 -1 1 0 0 0 0 0 -1 0 0 0 -1 0 0 0 -1 -1 1 0 0 -1 -1 0 1 0 -1 0 0 0 -1 0 -1 -1 0 -1 0 0 -1 1 0 0 -1 -1 -1 0 1 1 0 -1 0 0 -1 0 0 0 -1 0 -1 0 0 0 0 0 1 0 -1 0 0 -1 0 0 0 0 0 0 0 -2 0 0 1 0 0 0 0 0 0 +8 458775 786818 4229233948 106954487 1099658 52301 1517410 4093570 2666970 413621 262665 868036 684109 503197 2658553 1912914 251681 44273 14240 62957 253530 604196 933433 624905 155636 115229 215933 179218 124029 54260 3546 3370 4673 3109 3362 3497 3202 4981 7120 4628 4573 7298 9704 11057 9850 5945 3209 1609 610 394 680 680 313 512 725 522 205 37 32 40 180 305 365 260 98 145 145 25 5 50 116 173 442 653 793 884 954 562 85 41 80 40 80 16 50 130 200 97 25 41 261 905 1853 821 73 313 821 2210 5573 7940 5330 2897 1885 178 29 5 32 122 116 29 34 52 26 5 2 2 1 8 9 2 0 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 0 4294967295 65535 65535 4294901760 65535 65535 4294901760 4294901760 0 4294967295 4294901761 65535 65535 4294901760 65535 1 0 0 65536 4294901760 65535 1 0 4294901760 1 4294901760 65535 0 65535 4294901760 0 4294967295 65535 65535 65535 65535 4294901760 0 4294967295 65535 65535 0 0 1 65537 65535 65535 4294901760 0 0 0 65535 0 65535 65535 65536 0 65536 1 65535 0 131071 0 0 0 65536 4294901760 0 4294901760 4294967295 131071 0 0 4294901760 0 4294901760 0 4294901760 131071 0 4294967295 65536 4294901760 0 4294901760 4294901760 65535 65535 4294901760 1 4294901760 4294967295 65536 1 65535 4294901760 0 4294901760 4294901760 0 0 65536 4294901760 0 65535 0 0 0 65534 65536 0 0 0 +-23 0 35 39 196 432 860 -1386 -2071 128 568 1165 250 53 1353 99 -905 -1797 -393 771 -636 372 1373 227 -969 -1075 -257 1013 -83 -35 995 1098 195 -1637 -520 371 77 164 116 -440 -353 219 -7 -52 -218 169 234 636 550 -625 -374 89 199 -57 -83 -239 -195 261 280 -159 -261 -107 -63 204 106 -20 1 31 -5 -54 -13 43 47 -10 -51 -23 26 38 18 -30 -28 -15 -31 36 40 3 19 17 18 -33 -26 -33 -8 14 5 12 -21 -7 4 -7 14 24 -2 -9 -19 -18 11 25 14 1 -20 -13 19 -2 -23 -6 1 22 4 -2 16 -6 -8 0 -9 -6 9 5 -12 -7 1 19 13 -16 -7 2 -9 -1 11 7 -4 -6 -1 2 4 2 2 -6 1 0 -7 -1 9 1 6 -11 -46 -7 34 34 -12 -18 13 2 -15 -6 3 14 14 -5 -13 -8 8 7 -2 -6 -5 1 3 5 1 -7 -5 13 15 -13 -9 -8 -28 3 41 47 -1 -81 -21 55 2 -64 -44 57 13 6 20 43 53 -30 -19 -53 -35 18 5 16 17 -7 -12 -4 0 2 -4 -3 1 9 7 -8 -11 3 7 -5 -11 7 9 -2 -5 -1 4 0 -3 0 -1 -1 1 3 -3 1 3 -1 0 1 -2 0 1 0 0 0 0 0 0 0 0 0 0 0 1 0 0 0 -1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 -1 0 -1 0 -1 -1 0 0 0 0 0 1 0 0 -1 0 0 -1 -1 0 0 0 0 0 -1 0 0 0 0 0 -1 0 0 0 0 -1 0 0 0 -1 0 1 1 0 0 -2 0 0 -1 -1 0 0 0 1 0 -1 0 -1 -1 0 -1 0 0 -1 0 0 0 0 0 0 -1 0 -1 0 1 0 0 0 1 0 0 1 0 0 -1 0 0 0 -1 1 -1 0 1 1 -1 -1 0 1 -1 0 0 -1 -1 -1 0 0 0 -1 0 0 0 -1 0 0 1 0 0 0 -1 0 0 -1 0 1 0 -1 -1 -1 0 0 0 1 0 -1 0 -1 0 0 1 0 0 0 0 0 -1 0 0 0 -1 -1 0 -1 0 0 0 0 -1 -1 0 0 0 -1 0 -1 0 0 0 0 0 0 0 0 0 0 -1 0 1 0 0 0 0 0 0 0 -1 0 0 -1 0 0 0 -1 0 0 0 0 -1 -1 0 -1 -1 0 -1 0 1 -1 -1 -1 -1 -1 0 -1 0 -1 0 0 0 0 -1 1 -1 0 -1 -1 0 -1 -1 0 -1 0 0 -1 0 +65513 2555939 28311748 4204135260 8452073 1679849 65309 1840410 4048234 748890 542880 1936658 2094586 1092218 8114 2195629 2717794 408041 32825 207056 172570 2753 76085 459252 693125 147797 42850 64010 106146 103681 79570 45585 11636 962 2941 2018 2309 3130 2120 1224 1009 2257 1609 650 1413 1765 260 169 490 65 772 85 685 746 197 569 365 565 485 20 292 64 117 106 193 362 425 53 82 170 52 5 20 40 1 50 82 157 2165 2312 468 173 261 205 221 233 113 40 26 34 50 194 394 145 793 3890 6562 3466 4100 5185 205 2249 3709 3170 1549 281 338 160 4 25 82 113 130 74 170 85 26 16 9 2 10 10 4294901763 65536 65534 1 0 0 0 0 0 1 0 65535 0 0 0 0 0 0 4294901760 4294901760 4294901760 65535 0 0 1 4294901760 0 4294967295 0 0 4294901760 0 0 4294901760 0 0 65535 0 65535 65537 0 65534 4294901760 65535 0 1 65535 4294967295 4294901760 0 65535 0 0 4294901760 4294901760 65536 0 65536 0 1 4294901760 0 4294901760 4294901761 65536 4294901761 65535 4294901761 0 4294967295 65535 0 65535 0 65535 65536 0 4294901760 0 65535 1 4294967295 65535 0 1 65535 65535 65536 0 0 4294901760 0 4294901760 65535 65535 0 4294901760 65535 0 65535 65535 0 0 0 0 4294901760 65536 0 0 0 4294901760 0 65535 0 65535 0 4294901760 65535 4294967295 4294901760 65536 4294967295 4294967295 65535 65535 65535 0 4294901760 4294901761 4294901760 65535 4294967295 4294901760 0 65535 +-37 0 -35 -19 -197 366 998 -78 -747 -831 33 571 28 -315 -690 -67 436 1337 821 -1161 -493 201 -266 -755 -229 1175 287 -665 -382 797 1343 -195 -969 -990 -29 695 90 43 222 -182 -310 -137 62 310 131 -124 -73 -288 -353 423 409 -30 -54 -89 -105 -46 35 197 164 -140 -154 -46 72 99 -5 -109 -21 14 -59 23 82 22 -75 -56 46 92 11 -80 -45 21 73 39 -109 -115 101 223 -18 -309 -96 325 191 -286 -263 192 285 -60 -249 -86 139 202 4 -240 -120 197 180 -99 -174 -11 121 90 -45 -120 -24 89 49 -48 -62 20 59 14 -27 -37 -10 27 20 1 -5 -18 -4 12 7 -12 -15 3 7 17 11 -15 -12 -1 0 3 2 0 -2 1 3 -8 -13 8 16 6 -7 -22 -13 24 24 8 12 -38 -44 8 28 10 -23 -1 29 -2 -27 -4 13 14 6 -13 -10 4 3 0 2 -3 -8 9 16 -12 -14 -3 -6 13 10 -2 14 16 10 -33 -22 -28 -39 43 29 1 3 16 15 -28 -22 5 11 12 7 -23 -25 12 23 5 -10 -12 1 10 1 -9 -1 6 0 -5 -6 -1 -2 9 10 0 -1 -4 -4 -3 6 3 -13 -6 15 17 -9 -27 -4 28 11 -19 -11 6 6 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 -1 0 0 0 0 -1 -1 0 -1 -1 -1 0 0 0 -1 -1 0 0 1 0 0 1 -1 -1 0 -1 0 1 0 0 0 0 0 0 0 0 -1 -1 0 -1 1 0 -1 -1 1 0 0 -1 0 0 -1 -1 -1 0 0 1 1 0 0 0 -1 0 1 -1 0 1 0 0 0 0 1 -1 0 0 0 0 0 0 1 0 -1 0 0 0 0 0 -1 0 0 0 0 1 -1 -1 0 0 0 -1 -1 0 0 1 0 -1 0 0 0 -1 0 0 0 -1 0 1 -1 -1 0 0 0 0 0 0 -1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 -1 0 0 0 0 0 0 0 -1 0 -1 -1 0 0 -1 -1 0 1 -1 -1 0 -1 0 -1 0 0 -1 -1 0 0 0 0 -1 -1 -1 -1 0 0 0 0 -1 -1 -1 0 -1 -1 0 -1 0 1 0 0 0 0 0 -1 0 0 -1 0 0 -1 0 0 -1 0 0 0 0 0 -1 0 -1 -1 0 0 1 0 0 -2 0 -1 -1 1 0 -1 0 0 -1 -1 1 0 -1 -1 -1 0 -1 1 -1 -1 1 0 0 -2 -2 1 0 +65499 4293787613 24051515 4289856486 4240571669 327130 100009 480589 1977665 2021962 283450 640781 1433066 524594 781133 1841674 1919061 483866 9949 82408 114869 99944 32537 88273 303538 168181 10837 13141 40034 46496 25832 14985 11906 637 4010 7208 8761 10580 6521 2466 6850 25106 59930 95805 114841 118277 106033 84825 69397 60125 57616 53209 42201 30397 22741 16425 8497 4705 4244 3677 2098 829 401 349 160 193 234 338 346 145 9 4 5 73 233 292 533 745 640 1588 2000 884 530 845 745 365 205 116 9 13 145 400 205 205 104 452 1189 1268 3370 842 265 1009 509 265 578 769 554 244 101 82 37 25 37 85 100 17 25 45 205 514 810 800 4293722123 458741 6 0 0 0 0 0 0 0 0 4294901760 0 0 4294967295 4294901760 4294967295 0 4294901760 65535 65536 0 4294901761 65535 65535 1 0 0 0 4294901760 65535 131071 4294901760 131071 0 65535 4294901760 4294967295 0 65537 0 4294901760 65536 65535 1 0 65536 65535 0 0 65536 4294901760 0 0 4294901760 0 0 4294901761 65535 0 4294967295 0 1 65535 0 65535 0 65535 4294901761 65535 0 0 4294901760 1 0 0 0 0 0 0 0 4294901760 0 0 0 4294901760 4294901760 65535 4294901760 65535 4294901761 65535 65535 65535 4294901760 65535 0 4294901760 4294967295 65535 0 4294901760 4294967295 4294901760 65535 65535 1 0 0 65535 4294901760 0 65535 4294901760 0 0 4294901760 4294901760 65535 65536 0 65534 4294967295 1 65535 4294901760 131071 4294901760 4294967295 4294901760 4294901761 131071 0 4294901758 1 +-36 0 -111 -62 -268 372 635 316 248 -657 -319 -85 -699 -104 1200 1247 -438 -1825 232 617 -1455 -752 147 1956 1425 -213 -112 -988 -539 366 967 -244 -1245 -885 -173 1228 602 -128 -191 -78 204 42 -18 -154 -272 -142 -3 687 674 -514 -558 -227 -35 341 75 -172 11 296 74 -270 -49 130 79 -45 -51 -129 -126 120 97 18 -6 45 64 -67 5 -22 -103 -39 52 117 107 -132 -199 -95 -106 147 6 129 216 206 175 -252 -24 -144 -132 -33 -79 -8 1 34 -86 20 -40 116 154 36 -30 -17 81 5 18 -91 -62 10 32 -21 -57 -16 1 27 -12 23 28 -18 -3 19 -13 -28 10 26 -1 -7 2 -2 0 -2 -4 11 16 -10 -20 -6 12 12 -8 -1 23 -19 -37 16 36 -15 -34 1 10 13 12 5 3 -41 -28 35 -7 -26 10 69 49 -31 -39 -45 12 45 -12 -22 4 -13 -11 36 22 -18 -16 -26 -17 40 27 -4 -6 -40 -38 64 80 -25 -54 -41 -17 63 55 -14 -1 -22 -31 -26 -15 35 29 -2 -22 -5 15 -10 -30 26 38 -4 -17 -9 11 10 2 -15 -9 -1 1 8 -3 -5 10 3 -11 -7 -1 9 15 3 -3 -16 -9 -7 -12 15 -1 6 17 17 14 -23 -11 -6 -4 1 0 1 0 0 -1 0 0 0 0 -2 0 0 0 0 0 0 0 -1 0 0 0 -1 0 0 1 -1 -1 -1 0 0 0 -1 -1 0 0 -1 1 1 -1 -1 1 0 0 0 0 0 0 0 0 -1 -1 -2 -1 0 -1 0 0 0 0 -1 0 -1 0 0 0 0 0 0 -1 0 1 0 0 1 -1 -1 0 0 1 -1 0 -1 -1 0 0 0 -1 0 -1 0 0 -1 0 0 0 -1 -1 1 -1 0 -1 -1 0 0 0 -1 0 0 1 0 0 0 -2 0 0 -1 -1 0 0 -1 0 0 0 -1 1 1 0 0 -1 0 0 -1 1 -1 -1 -1 0 0 1 0 0 0 0 0 1 1 -1 0 -1 0 -1 0 -1 0 0 1 -1 0 0 1 -1 0 -1 0 -1 0 1 0 -1 -1 0 0 1 0 -1 0 0 -1 1 -1 -1 -1 1 0 -1 0 0 0 0 0 -1 0 0 0 1 0 -2 -1 -1 0 1 -1 -1 0 -1 0 1 0 0 -1 0 -1 -1 0 -1 -1 1 -2 0 0 -1 -1 0 0 0 0 0 1 1 0 0 -1 1 0 -1 -1 0 0 -1 -1 0 0 -1 -1 0 0 0 0 0 0 -1 0 -1 -1 0 -1 -1 0 -1 0 0 0 0 0 +65500 4290969489 24444660 20710011 4251910392 108986 499417 2995009 3522469 434513 2682529 3847545 2075994 988688 424477 994625 2333250 1537913 378788 42565 43380 24040 94148 471978 718472 362893 117506 35209 87737 78376 19301 8266 19242 30276 9733 2061 8585 509 12130 16393 28873 48626 32845 16677 89092 94129 21312 18513 6305 1157 7796 15056 25012 1189 6586 8605 3944 1465 3505 730 673 1108 370 953 776 50 8 4 137 356 436 288 65 890 1625 1521 1157 269 169 1690 2009 725 4861 3362 3546 2169 628 185 1417 808 932 1889 745 1636 5540 7025 4597 4258 3221 485 1637 1450 845 509 325 1576 1460 370 221 229 82 65 34 109 170 82 234 265 130 369 37 578 4293459982 4294639605 131068 65536 0 65535 0 4294836224 0 0 0 4294901760 0 4294901760 0 4294901761 4294967295 0 4294901760 65535 4294901760 65537 4294967295 1 0 0 0 4294901760 4294901759 65535 65535 0 4294901760 4294901760 0 0 0 65535 1 65536 4294967295 0 4294901761 4294901760 65535 0 65535 65535 4294901760 0 4294901760 131071 65535 4294967295 0 4294901760 0 1 0 65534 4294901760 65535 4294901760 0 4294901760 65537 0 65535 4294901760 4294901761 4294967295 0 1 0 0 65537 65535 65535 65535 65535 65536 65535 65536 65535 65535 65535 1 4294967295 0 1 65535 4294901760 4294901761 4294967295 1 65535 0 0 65535 0 1 4294967294 65535 4294901761 65535 65535 1 4294901760 4294901760 65535 4294967295 4294836225 0 4294967295 0 0 65536 1 4294901760 1 4294967295 0 4294967295 0 4294967295 0 0 0 65535 4294967295 4294901760 65535 65535 0 0 +-44 0 6 8 -99 112 226 -85 -198 88 382 -22 -64 -559 -1232 43 842 1564 644 -855 -100 -610 -481 375 35 -99 169 113 -135 134 415 -294 -476 -193 -15 330 91 -121 -131 106 119 82 65 -64 61 -14 -139 -245 -142 252 139 112 63 -291 -284 369 443 -94 -133 -164 -94 31 41 48 19 -18 -2 -23 -34 -31 -42 72 79 16 16 -65 -74 -14 34 74 8 -92 -74 83 120 -20 -170 -38 196 177 -30 -267 -142 131 152 53 -54 -161 -73 138 125 -38 -104 -63 -5 112 89 -38 -64 -28 26 39 15 -21 -16 -19 5 21 -22 -40 -3 66 39 -30 -15 -30 -44 32 48 13 -10 -28 -13 18 21 -5 -19 -14 9 18 -7 -21 -12 21 32 2 -10 -15 5 -28 -48 40 58 -6 -40 -1 45 -7 -40 -12 -17 8 36 50 15 -57 -40 4 32 52 21 -90 -86 47 77 36 -16 -65 -29 38 33 4 -9 -29 -26 26 41 9 -8 -30 -13 1 -8 8 19 0 -28 -21 4 49 23 -34 -21 12 18 6 0 -32 -30 15 9 20 17 -6 -6 -7 6 6 -2 -14 -8 12 12 -2 -7 -13 -8 10 6 4 4 -10 -10 3 11 8 0 -14 -7 2 -2 5 9 4 -1 -8 0 2 -2 0 1 0 0 0 0 0 -1 0 0 0 0 0 0 0 0 1 1 0 -2 0 0 0 0 0 0 -1 -1 0 0 0 0 -1 0 0 -1 0 1 -1 0 -1 -2 0 0 0 -1 -1 -1 1 0 0 0 -1 1 0 1 0 0 0 -1 0 -1 0 -1 1 1 0 -1 -1 0 0 1 -1 0 0 0 0 0 0 1 0 -1 -1 -1 -1 1 0 0 -2 0 1 -1 0 0 0 0 0 0 0 -1 1 0 0 -1 0 0 1 0 -1 0 1 0 -1 0 -1 -1 0 -1 -2 -1 -1 0 -1 -1 0 0 0 -1 -1 0 0 0 0 1 0 -1 0 0 0 0 0 0 -1 0 -1 -1 0 0 -1 0 0 0 0 -1 1 1 -1 0 1 0 -1 -1 1 0 0 -1 0 -1 0 0 1 0 -1 -1 0 0 0 2 -1 0 -1 0 0 0 0 0 0 -1 -1 -1 0 0 -1 1 -1 -1 -1 0 0 -1 -1 0 0 0 0 0 -1 -1 1 1 0 0 0 0 0 0 -1 -1 0 0 -1 0 -1 -1 0 -1 -1 -1 1 1 -2 0 0 0 0 0 0 0 0 0 0 0 0 0 -1 0 1 1 0 0 0 0 0 1 0 0 0 1 -1 -1 1 0 0 0 0 +65492 524294 7405469 4289396962 5832506 146408 316577 1519673 3155060 1145761 382100 371986 11026 41330 36181 258661 263825 109125 22922 28397 20885 8321 3917 79346 83668 31865 88650 216817 205085 44585 9797 3985 685 533 2117 6948 6497 4481 5672 6632 8528 12365 14800 30344 69745 72189 37325 25913 28837 24373 17069 14785 12569 9365 4880 2197 666 617 466 2084 4365 2421 1125 2960 2473 884 493 466 557 405 490 585 1028 325 809 3904 3400 1601 2074 1744 353 3796 3474 1616 3728 8541 9605 7225 4481 2285 1105 922 1352 1762 964 170 128 361 1225 2417 1685 585 360 1024 1125 481 325 85 72 200 208 148 218 164 52 116 109 185 196 53 29 97 4294508543 131072 65534 1 0 0 65535 0 0 0 65536 1 65534 0 0 4294901760 65535 0 4294901760 0 65535 4294901761 4294901760 65534 0 4294967295 131071 0 4294901760 1 1 0 65535 65535 131071 1 4294967295 0 4294901761 0 0 0 1 4294967295 4294967295 1 4294836224 65536 65535 0 0 0 131071 0 65535 65536 4294901760 65536 4294901760 4294901760 65535 4294901759 4294967295 4294901760 65535 0 4294967295 0 0 1 65535 0 0 4294901760 4294901760 65535 4294901760 0 0 131071 4294901761 65536 4294901760 131071 0 65535 65535 65536 4294901760 65535 0 4294901762 4294901760 0 0 0 4294967295 65535 4294901760 4294901761 4294967295 0 4294967295 0 0 4294901760 131071 1 0 0 4294901760 65535 4294901760 4294901760 65535 4294967295 131071 4294836225 0 0 0 0 0 0 4294901760 65536 1 0 0 1 0 4294901761 131071 0 0 +-20 0 4 51 -376 34 1111 324 -1324 -814 1142 709 -1169 -294 1449 766 -277 -2166 -1844 1784 1842 -40 -141 -594 -1167 -158 880 1244 347 -1229 -695 402 307 -99 -182 -75 34 376 -78 -486 202 672 -194 -744 40 737 260 -382 -202 -119 -109 184 164 270 356 -424 -442 -154 -47 314 172 -101 -117 9 61 48 -26 -63 28 44 -77 -21 96 48 -48 -47 29 -13 -59 24 -18 -51 37 161 -63 -153 66 273 207 -215 -190 -46 43 -6 -27 97 48 -129 -73 34 25 31 -55 -58 52 104 -30 -113 -33 105 49 -40 -15 34 36 -56 -81 20 45 46 31 -17 0 -30 -43 -20 4 78 50 -55 -53 6 38 18 -24 -30 11 34 19 -42 -50 13 40 18 -23 -46 -35 51 50 31 33 -53 -24 -4 -15 -42 -65 52 80 56 23 -90 -62 13 1 26 60 3 -54 -58 -28 42 42 14 -47 -1 60 15 -26 -10 15 -12 -2 7 -31 -8 51 27 -42 -25 48 -1 -48 0 17 -18 -17 25 -11 9 42 11 0 -31 -37 8 40 16 -19 -33 0 24 -4 -17 6 21 -1 -23 3 24 -4 -35 -6 33 10 -27 -18 13 12 2 -5 -9 2 12 3 -18 -9 8 1 1 0 3 5 5 5 -15 -14 8 9 0 -3 1 0 -1 1 1 0 0 0 0 0 0 0 0 0 -1 -1 1 1 0 0 0 0 0 0 0 0 -1 0 0 1 0 0 -1 -1 0 -1 -1 1 0 0 -1 0 0 1 -1 0 0 0 0 0 0 1 0 0 -1 -1 -1 0 0 -1 -1 -1 1 1 -1 0 0 0 0 0 1 1 0 0 -2 -1 0 0 1 0 0 -1 0 0 0 -1 1 3 0 -1 -1 1 0 -1 -1 -1 1 1 1 2 -1 0 0 -1 -2 0 1 0 -1 1 0 -1 0 2 0 0 -2 0 -1 0 0 0 -1 0 -1 -1 0 -2 -1 1 0 -1 -2 -1 1 0 -1 0 0 0 -1 0 1 0 0 1 -1 -2 -1 -1 1 -1 1 0 -1 -2 -1 2 0 -1 0 0 1 0 0 0 -1 -1 0 -1 1 -1 -1 -1 0 0 2 1 -1 -1 -1 0 -1 0 0 -3 -1 1 0 -1 -1 -1 0 1 1 -1 -1 0 0 -1 0 0 1 0 -1 0 1 1 -2 -1 -2 1 0 0 0 -1 -1 0 -1 1 -1 0 0 2 -1 -2 -1 0 0 0 -2 -1 0 -1 0 1 0 0 -1 -1 1 1 -2 -1 -1 -1 0 2 0 -1 -1 -2 1 1 -1 -1 1 1 0 -2 0 0 0 +65516 3342340 2293384 21234775 4241685204 1806845 1452997 2686357 4768285 6582992 3394564 372717 1386853 2321936 1630850 644629 104050 38749 142532 242280 492388 591172 544769 213524 54965 45737 99796 306512 219080 100805 39785 13770 6025 4645 2720 6370 11520 4513 1010 4057 2925 27290 27378 78885 89074 38216 1885 10138 18945 6485 1586 6389 13520 13669 12114 4001 1381 4432 6961 4141 1250 900 2249 6100 5525 2845 1768 1476 1277 2125 2669 1924 2645 3826 3461 3898 592 1989 6929 9536 8629 4013 677 3609 6280 2548 1960 2210 3825 776 369 53 1025 3330 2389 2305 2304 613 914 202 1885 961 1433 1856 1450 576 305 477 530 585 1241 1125 829 493 148 106 148 333 145 2 9 50 4293984261 589810 9 131069 4294901760 65537 0 0 0 0 4294901760 131071 1 0 0 0 4294901760 0 1 4294901760 65535 4294967295 1 4294901760 0 4294901761 0 0 0 1 4294901760 4294967295 0 4294967295 131071 4294901761 0 0 65536 1 4294836224 65535 65536 0 65535 0 131071 3 4294967295 1 4294967295 131071 65537 4294901762 0 4294901759 65536 4294901760 1 65535 2 4294836224 4294901760 0 4294901760 4294901760 65535 4294967294 1 4294901759 131071 4294901760 0 4294901760 65536 0 4294901761 4294967294 131071 131071 4294901760 4294967294 2 65535 65536 0 4294901760 65535 131071 4294967295 65535 131072 4294901761 4294967295 4294901760 0 4294967293 1 4294967295 65535 65537 4294967295 0 65535 65536 4294901760 65536 4294836225 4294901759 1 0 4294967295 4294901760 4294901761 0 4294901762 4294967294 0 4294836224 65535 65535 1 4294901760 131071 4294836225 4294967295 65535 2 4294967295 131070 4294901761 131071 1 65534 0 +40 0 -68 81 -262 -465 186 1786 1279 -1929 -1262 24 -221 98 -23 865 379 -1257 -1004 2083 1910 -1383 -834 42 -644 -281 648 951 -357 -941 506 1288 -150 -1650 -224 1203 415 -427 -106 -372 -394 85 -230 343 697 324 -284 -706 61 486 18 -279 -52 -37 -150 175 231 -46 -187 -43 100 97 15 -52 -46 8 62 13 18 -39 -85 -53 -24 81 104 39 -95 -89 154 17 -315 -25 309 221 -39 -267 -47 97 -10 -86 -13 39 -122 28 211 122 -59 -210 -66 116 93 -4 -44 -100 -56 84 21 -12 18 47 16 -30 16 -3 -16 -26 -7 9 -19 -5 20 32 -9 -40 -20 46 53 -15 -30 -12 25 0 -28 -12 21 5 -41 0 47 3 -54 -4 47 21 -29 -30 13 46 19 -63 -49 56 44 -17 -3 -6 -7 11 12 -30 -3 30 -9 -43 -14 20 3 50 57 -78 -90 43 83 14 -11 -58 -66 24 48 7 -19 34 41 -73 -76 40 41 34 27 -31 3 -16 -69 -25 39 93 28 -81 -42 18 8 21 19 -27 -29 4 7 18 9 -15 -10 29 37 -47 -45 33 29 -43 -43 45 34 -8 3 3 -7 -19 6 3 -20 7 25 -8 -21 2 -3 -2 16 16 -15 -18 14 11 -15 -5 13 9 -2 -13 -5 1 0 2 0 0 1 0 -1 0 0 0 0 -1 -2 0 1 1 -1 0 -1 -1 0 0 -1 -1 0 0 0 0 0 1 0 -1 0 0 1 0 -1 1 0 0 0 0 1 -1 -2 0 -1 1 0 -1 -1 1 1 0 -2 -1 0 1 0 -1 -1 -1 -1 1 0 -1 -1 0 1 1 -1 -2 -1 0 1 1 0 0 -1 1 0 -1 1 0 -2 0 -2 -1 0 1 1 0 0 0 0 0 0 0 2 0 0 -1 -1 0 1 0 0 0 -1 0 -1 -1 0 0 1 0 0 0 1 -1 0 -1 0 -1 -1 0 0 -1 -1 0 -1 0 0 1 0 -1 -2 0 1 -1 -1 -1 -1 1 1 1 -1 -1 0 0 -2 -1 -1 0 0 1 0 0 -1 1 1 0 -3 -1 0 2 1 -1 -2 0 0 0 -1 0 0 0 -1 0 1 0 -1 0 1 0 1 -1 -2 -3 0 1 1 1 -1 -1 0 -1 0 1 1 -2 -1 0 1 0 -2 -1 -1 -2 0 0 -1 0 -1 -1 1 0 -2 1 1 2 0 -1 1 1 -1 0 0 0 1 0 -1 -2 -1 0 0 0 1 -1 -1 0 1 -1 -1 -1 0 1 0 -1 -1 -1 -1 1 1 -2 -1 0 -1 1 -1 1 1 -1 0 0 +40 5373884 4264558330 117047482 4168549631 1593220 58445 748754 1723690 5346905 5560789 697320 493697 1324305 1012930 1914980 2745000 1497385 354554 149620 162461 170549 590785 579092 239917 78165 4073 53125 55477 36818 19409 2929 2180 4013 1845 10034 7137 12337 16946 24005 99850 144322 72810 11618 7496 1690 15668 59405 47581 17812 8665 11936 10192 585 2533 1156 265 932 130 386 1424 1681 2516 3034 1044 625 928 466 1681 2218 2932 2650 1741 2285 4330 5537 2225 45 170 1044 909 1930 596 2509 9333 9949 7085 3485 4932 2353 1517 7010 7376 2837 1690 265 5386 10170 7345 2088 505 1090 857 373 306 941 3578 3114 2690 3874 1220 18 410 45 449 689 445 13 512 549 317 250 589837 4294180862 131067 131072 0 1 65535 0 4294901760 65534 65537 65535 4294967295 0 4294967295 0 0 65536 4294901760 0 1 131071 0 0 4294901761 65534 131071 4294901760 131071 1 4294967294 65536 4294901760 4294967295 131071 4294901760 65535 65537 4294901759 65535 65537 0 131071 4294901760 1 65534 4294967294 65536 1 0 0 0 2 4294901760 65535 1 0 65535 4294967295 0 1 0 4294901761 4294901760 4294901760 65535 4294901760 65535 65535 65536 4294901760 65534 4294901761 4294967295 131071 65537 4294967295 0 4294967294 65535 65536 0 131071 1 4294967293 131072 4294901761 65534 0 65535 0 65535 1 65535 1 4294901761 4294836222 65536 65537 4294967295 4294901760 65536 4294836225 65535 1 4294967294 4294901759 0 65535 4294967295 1 131070 131073 4294901760 65537 65535 0 1 4294901759 65535 0 4294901761 65535 4294901761 4294967295 65536 4294901760 4294967295 131071 4294836225 65535 131071 131071 4294901761 0 +9 0 100 -10 -63 -693 -1342 1280 2203 38 -934 -975 227 873 26 -1733 -1098 1634 286 -70 1493 796 -489 -2374 -1198 1554 977 -238 -849 289 1741 403 -864 -1640 -617 770 399 270 -15 -198 -78 19 -51 69 152 261 246 -302 -207 -236 -259 256 246 89 -10 -136 4 68 -50 -163 -64 199 114 -81 -88 -10 35 60 30 -34 -26 -30 -28 49 120 -6 -166 -181 -68 298 198 -43 10 -56 -23 -62 -14 2 -75 -25 3 109 80 9 29 -107 -60 38 -34 -40 53 70 -32 -40 1 -25 -26 55 22 15 35 -51 -46 33 27 -33 -26 36 14 -17 16 23 -6 -27 2 -5 9 14 -47 -35 44 45 -35 -14 31 23 30 -42 -59 4 13 -8 -9 42 13 -22 0 1 6 36 37 -73 -75 17 45 34 -15 -42 13 8 -74 7 113 46 -76 -92 2 63 8 14 23 -3 13 -34 -45 2 36 48 19 -69 -58 -12 -10 67 34 -17 14 -15 -57 17 97 3 -82 -48 41 19 -62 0 50 40 1 -56 -32 35 36 -10 -27 -2 16 -8 -26 24 29 11 41 -36 -68 -19 13 19 -10 23 34 -6 -17 -33 -11 32 5 -2 24 -4 -18 -16 -16 16 28 14 2 -24 -16 -2 -2 11 6 3 1 -5 0 0 -2 -2 1 0 0 0 0 0 -1 0 0 0 0 0 -1 0 1 0 0 -1 0 1 0 -1 0 -1 0 1 1 0 0 -1 -1 2 0 0 1 -1 1 0 -2 0 2 1 0 -1 -1 -1 1 1 0 -1 -1 1 1 1 -1 -1 -1 0 1 0 0 0 -1 0 0 0 1 0 0 -2 -1 1 0 -1 0 0 -1 0 -1 1 -1 0 2 -1 -2 1 -1 1 0 -1 0 0 -1 0 1 0 -2 1 2 1 0 0 0 -2 -1 1 -1 -1 -1 -1 0 -1 -1 -1 -1 0 -2 0 1 0 -2 1 0 0 -1 0 0 0 0 0 0 1 -1 -1 1 0 0 0 0 -1 1 0 -1 1 0 -1 0 0 1 1 0 0 1 -1 0 -1 1 0 -3 -2 2 1 0 -2 -1 -2 -1 0 -1 0 0 1 0 0 -1 0 1 0 -1 -1 0 0 0 2 0 0 1 0 0 2 -1 -2 0 1 1 -2 0 -1 -1 0 -1 -1 -1 1 0 -1 0 -1 1 0 0 1 0 -3 -1 1 0 0 0 0 0 -1 -2 1 -1 -1 0 1 1 0 -1 0 1 0 -1 1 0 1 1 0 -1 -1 0 2 0 -1 0 1 0 -1 0 -3 0 2 0 -2 -1 0 1 -1 -3 0 +9 4294312036 4249616321 83950274 2492571 1822981 813658 3003965 3875560 86696 2862665 5874997 3850120 1011173 804322 3193490 3436096 973589 232101 39429 6445 7362 91225 151720 98545 132617 68437 18596 4640 29069 43697 19557 7844 4825 2056 1576 3185 14436 60317 93428 41053 3236 4373 200 6250 11890 6481 12290 5044 2756 7709 2624 626 3701 709 3826 3205 1818 1972 485 785 765 29 277 3434 3961 1421 1490 2664 3497 233 1845 653 1 1332 6698 5914 3181 1989 233 5525 14885 14240 3973 260 538 1325 2029 3600 5122 3508 4589 1445 421 3538 9418 9028 2042 3844 4100 3137 2249 1396 733 320 1252 962 2977 4985 530 629 1192 1378 1145 29 592 580 512 980 580 260 125 196614 4294639617 0 4294901758 1 0 0 65535 0 0 65535 1 4294901760 65536 4294901760 4294901760 65536 1 4294901760 196607 0 4294901761 1 65534 65538 4294901760 4294967295 65537 4294901760 131071 65537 4294967295 65535 1 0 65535 0 1 4294836224 131071 4294901760 0 65535 131071 65535 4294901762 131070 131071 4294901760 0 65535 1 131070 65538 0 4294836224 131071 4294967295 4294967295 4294901760 4294967295 65535 65534 1 131070 0 65535 0 0 65536 4294967295 1 0 4294901760 1 131071 4294901760 0 65537 0 4294901761 4294901760 1 4294901757 65538 4294836224 4294901759 65535 65535 65536 0 65535 1 4294967295 0 131072 0 1 131072 4294901759 65536 4294836225 4294901760 65535 4294967295 131071 4294901760 4294901760 1 65536 4294770688 131071 0 0 4294901760 131070 4294967295 65536 1 65535 1 131071 65536 1 4294967295 131072 4294901760 65536 4294901760 4294770688 131072 4294836224 65535 4294901761 65533 +-11 0 52 -51 85 -57 -529 -227 286 686 142 -269 -252 -291 403 1111 308 -1683 -846 810 265 -126 -245 282 558 107 -231 -393 45 482 391 -694 -652 276 266 -5 -50 10 -56 28 72 10 -33 -55 -46 139 147 -125 -149 26 75 50 -22 -47 19 30 -20 2 80 -17 -87 -47 26 40 -14 -8 -3 4 16 11 -14 -8 18 -1 6 12 -16 -65 -18 56 -10 -8 32 21 1 -24 -7 3 -9 -10 2 24 15 -8 -7 -28 -42 30 65 32 -8 -73 -44 22 27 44 18 -64 -46 39 38 2 -9 -9 8 -16 -31 10 14 12 13 -2 -7 -17 -17 14 31 9 -6 -40 -39 21 36 26 -10 -41 -13 30 23 2 2 -32 -25 20 6 -4 24 10 -32 -34 5 44 11 -25 -15 17 21 1 2 -18 -8 -7 -24 -4 -1 42 41 -14 -33 -25 16 41 0 -44 -10 30 23 -17 -38 -10 29 31 -8 -34 -16 15 12 12 8 -2 12 -18 -30 -18 -6 36 17 -3 -1 -6 3 5 1 -3 -1 -12 6 10 -25 -21 16 36 6 -28 -17 3 0 13 11 4 4 -20 -16 12 19 -4 -18 -14 1 25 12 -17 -11 4 2 -1 4 4 -5 -8 1 6 2 -4 -4 1 3 2 0 -3 0 1 0 0 -1 1 0 0 0 0 1 -1 -1 0 0 0 0 0 0 0 0 0 -1 0 0 -1 -1 0 0 0 -1 0 1 0 -1 0 0 0 -1 1 0 -1 -1 -1 0 0 0 0 0 -1 -1 0 0 0 -1 0 0 1 0 -1 -1 0 1 -1 -1 0 0 0 0 0 0 0 0 -1 -1 1 0 1 0 0 -1 1 0 -1 0 -1 -1 -1 1 0 0 0 -1 0 0 0 0 1 1 0 -1 -1 0 0 -1 -1 0 0 0 -1 0 1 1 0 -1 -2 0 2 1 0 0 -1 -1 0 0 1 0 -1 -1 -1 1 1 0 -1 0 1 0 0 -1 -1 -1 0 -1 1 0 0 -1 -1 -1 -1 0 1 1 0 1 -1 -1 1 1 -1 0 -1 0 0 -1 0 1 0 -1 0 0 -1 0 -1 0 -1 -1 -1 0 0 -1 0 -1 1 1 -1 -1 0 0 0 0 0 0 -1 0 -1 -1 -1 0 0 -2 -1 0 -1 0 0 0 -1 0 0 0 -1 0 -1 -1 0 0 -1 -1 -1 0 1 0 1 0 -1 0 -1 1 1 -1 0 0 1 -1 -1 -1 0 0 -1 0 1 -1 -1 0 0 0 -1 0 -1 -1 0 -1 0 0 0 0 -1 0 1 0 -2 -1 0 +65525 4291625012 4291231829 4280155631 44957982 92525 148185 1396730 2927353 1371816 86101 139549 322813 207810 234349 634517 501280 70781 2600 3920 5284 4114 21437 37234 22877 8125 2693 1261 404 6689 9778 2276 260 25 377 260 325 180 4481 3460 164 1465 577 58 181 580 289 833 2664 5249 5393 2420 2665 4420 3637 1448 162 320 1061 340 173 338 485 1042 1636 1962 1972 1781 1069 533 1028 1025 52 676 2180 1961 746 514 442 328 113 592 1765 1877 1714 1937 1936 1000 818 1544 1802 1220 481 288 68 468 1224 1332 298 37 34 10 145 136 1066 1552 820 298 169 137 416 400 377 520 626 433 137 5 32 89 37 20 131068 131075 4294770688 65536 0 131071 0 0 4294901761 65535 0 0 0 0 65535 4294901760 65535 0 65535 1 65535 0 131071 4294901760 4294967295 0 0 4294901760 65535 0 65535 65536 4294901760 65535 4294901761 65535 0 0 0 4294901760 131071 65536 0 131071 4294901760 4294901760 4294967295 1 0 65535 0 65536 1 4294967295 0 4294967295 0 4294901760 65536 1 4294901759 131072 1 4294901760 65535 65536 4294901760 4294967295 65537 4294901760 65536 0 4294967295 65535 131071 0 4294967295 4294967295 65536 1 4294901761 131071 4294901761 4294901760 0 65535 1 65535 4294901760 4294901760 4294901760 4294967295 0 65535 131071 4294901761 65535 0 0 4294901760 4294901760 4294967295 0 4294967294 4294901760 0 4294901760 0 4294901760 4294901760 65535 4294901760 4294967295 65536 65536 4294901760 4294901760 65537 65535 65536 4294967295 65535 4294901760 65536 4294967295 0 4294901760 4294901760 65535 65535 0 4294901760 65536 4294836224 65535 +-28 0 -8 18 231 319 183 -1157 -1244 960 1333 327 -40 -997 -864 -661 -1230 2154 2656 -401 -1405 -629 806 472 -190 -546 -282 74 652 152 -901 -946 -105 1121 378 -67 208 -203 -365 -101 278 300 -131 -414 -135 383 254 -59 -47 -118 -14 75 33 -121 -93 53 -5 -18 4 142 114 -123 -68 -1 -40 -10 35 55 18 -46 -49 -17 -5 78 85 -28 -31 -95 -120 70 111 75 6 -104 -51 53 68 -16 -79 -39 47 79 0 -62 -8 24 10 -20 -33 -13 3 57 22 -35 20 -10 -99 -10 83 76 -23 -62 15 44 35 -40 -71 -24 41 64 -21 -53 19 42 -10 -27 -1 12 27 -11 -50 11 69 -16 -63 -6 19 -2 -19 36 59 -37 -81 -8 29 37 18 36 34 -102 -54 73 18 -68 -30 66 36 -23 -28 4 15 10 -3 -7 13 25 -9 -42 16 47 22 -48 -45 -23 -3 43 13 -12 -18 -24 -23 69 67 -33 -46 -5 25 16 2 -21 -16 12 32 -15 -45 -9 15 24 20 -4 -46 -29 46 62 -6 -77 -52 32 36 41 20 -27 -5 -10 -8 -6 -5 15 13 -22 -36 22 43 8 -24 -23 20 24 0 -22 -7 -1 8 -5 -35 2 37 23 -12 -30 -3 14 2 -2 0 -2 -1 2 -2 0 2 0 0 0 1 0 0 0 0 0 0 0 0 0 0 1 0 0 -1 1 1 0 0 -1 -1 0 1 0 0 -2 -1 0 -1 -1 1 -1 -2 1 0 0 0 -2 1 0 1 1 -1 0 -2 0 1 2 1 -1 1 0 -1 -1 0 0 2 1 -1 -2 -1 0 1 0 -1 -2 0 -1 -1 -2 0 1 0 -1 -1 1 0 0 -2 -1 0 0 1 1 -1 -2 0 1 2 0 -2 -1 0 1 1 -1 -1 0 -1 0 -1 -1 2 -2 -2 -1 -1 1 0 0 0 -2 0 2 2 -2 -2 0 0 1 0 -1 -1 1 1 -1 -1 -1 -1 1 -1 -1 0 0 1 0 0 -1 0 -1 -1 -1 0 1 -1 1 1 -1 -1 -1 1 0 1 -1 0 -1 -1 0 0 1 0 -1 0 -1 -1 -1 -1 0 0 -2 -1 0 -1 1 0 -1 -1 0 -1 0 -1 0 -1 -1 1 1 1 -1 -1 0 1 0 -1 0 1 0 1 -1 -2 -1 1 1 -1 -1 -1 1 0 0 0 -1 -1 0 -1 0 0 1 -1 0 1 0 0 -1 0 2 0 -1 -1 0 -1 0 -1 1 -1 1 0 0 0 -1 -1 0 -1 0 0 1 0 -1 -1 -1 -1 2 -1 -1 0 0 0 0 0 0 0 0 +65508 1245176 20906215 4219142327 62978852 1883818 995609 1183417 6152616 7215137 2369666 872420 334216 85000 448208 1706717 1267666 147373 84473 143426 167284 188557 164914 67997 16133 5821 15730 11458 349 20180 28125 4625 1700 4250 2440 2690 6109 8009 9986 19300 17946 10852 5410 4880 7762 8450 3844 640 500 1258 3258 1709 500 9901 12665 4373 2161 2825 5617 5777 3250 2125 829 145 850 2621 5017 4005 365 1657 4850 6625 2210 1620 11560 8245 4948 5256 1825 800 325 58 794 1845 2465 2788 2554 1858 313 900 5290 5578 2141 881 445 400 1249 2106 801 416 2957 5960 5965 3728 2977 1129 125 100 250 653 1780 1913 1105 976 484 50 89 1229 1898 1044 205 8 4294836224 196607 65534 2 0 1 0 0 0 0 65536 0 131071 1 4294901760 65535 1 4294836224 65535 4294967295 4294901761 131070 0 4294836224 1 65537 65535 65534 131073 4294901761 1 4294967295 0 65538 4294901759 65535 1 4294901759 4294901760 4294901759 65536 4294901760 131071 0 4294967294 0 65537 4294901759 65536 2 4294967294 65536 4294901761 65535 65535 4294967295 4294836226 4294967294 131071 0 4294836224 131072 4294836226 65534 65536 4294901760 131071 4294901761 4294967295 131071 4294967295 0 1 4294901760 4294901760 4294967295 65536 131071 4294901761 4294967295 1 4294901761 4294901760 65535 65536 4294901760 4294901760 4294967295 65535 4294836224 65535 131071 4294901760 65535 65535 65535 4294967295 65537 4294901761 65535 1 65535 1 4294901761 4294967294 65537 4294967295 131071 0 4294901760 65535 65535 65536 65535 1 4294901760 131072 4294901760 65535 65535 131071 131071 0 4294901760 65535 65535 65536 4294901760 4294967295 196607 4294967295 0 0 0 0 +-97 0 -12 115 -217 435 1663 -424 -1909 -1128 524 1243 -301 -814 -993 1725 3273 -272 -1638 -1569 55 -366 -1196 859 685 380 451 -124 -359 -1076 -1148 1323 1417 151 -236 -391 193 -108 -308 -171 -89 64 -60 344 201 -113 138 40 -220 -214 142 273 12 -192 22 15 -66 -91 -110 32 30 190 171 -106 -115 -78 6 66 -17 4 57 -20 -54 17 7 -47 43 131 -16 -210 -22 167 22 -122 -83 70 113 -17 -152 -15 67 104 92 -59 -113 28 122 -86 -142 105 151 -50 -86 -69 -57 49 16 99 114 -48 -32 -53 -42 -4 23 46 22 -45 -36 -40 -50 40 3 74 98 -52 -81 -8 10 65 134 -54 -155 -103 -1 142 27 -39 27 45 34 -41 -35 -62 -32 31 -70 55 183 79 -10 -220 -147 48 36 96 19 -70 -11 94 53 -92 -84 63 124 -3 -68 -97 -42 73 46 11 -8 -44 -22 30 6 34 82 -80 -142 25 74 11 -39 32 53 -1 -28 -54 -10 80 36 -52 -6 19 -2 -52 -69 75 136 11 -51 -117 -56 73 45 -15 -34 7 21 -9 -26 27 23 -23 -19 23 27 24 18 -65 -21 43 6 -62 -45 44 28 14 9 -30 -50 19 47 21 3 0 11 -39 -22 10 -1 3 3 2 1 -5 0 6 -1 -1 2 1 0 -2 0 3 2 -1 0 -1 0 0 0 -1 1 1 -1 -1 1 0 -1 0 0 -1 -1 2 0 2 1 -1 1 -2 -1 2 2 1 -1 -4 -1 1 1 0 0 -2 -1 -1 2 1 0 -1 1 0 1 -4 -3 0 1 0 -2 -2 0 2 1 -1 -1 -1 0 1 2 0 -1 0 0 -1 0 -1 -1 -1 -3 0 3 1 -2 2 3 -3 -2 3 0 -1 0 1 2 -1 -1 1 0 -1 0 2 -2 1 3 1 -1 -2 -1 2 2 0 -2 0 4 1 0 -3 -2 3 4 -1 -1 -3 0 -1 -1 -2 -3 1 0 -1 0 2 2 -2 -3 1 1 0 1 -1 -1 0 -1 2 1 0 2 0 -1 -2 -2 3 3 0 0 -3 -1 -1 0 1 0 -1 1 -2 -1 -1 -1 0 0 0 0 -1 1 1 -2 0 0 -1 -3 -1 1 0 -1 -1 1 0 -3 0 -1 4 3 -2 -5 2 4 0 -1 -1 0 0 -1 -3 -1 2 -2 -1 1 0 0 1 1 -1 0 0 1 0 -1 0 1 -1 -2 -2 -1 0 0 2 1 -2 0 1 3 -1 0 -2 -1 0 1 -1 -1 -1 0 3 -1 -3 0 0 1 -1 1 1 -2 -4 -1 2 2 0 -1 -2 -3 1 3 0 +65439 7602164 28573479 4267181695 4221106315 1819625 753197 3961674 10786513 5144805 136981 2168297 613625 218777 1286657 3068233 2030690 208577 48913 124105 12017 121936 53170 20644 94196 94693 37008 709 12637 13124 37000 40477 19309 4392 305 3649 3205 2258 19010 44356 28373 15368 11789 13058 23329 15305 11945 13553 22280 31189 25301 12157 5650 10057 15300 3833 1780 2645 2509 2896 4100 5485 12308 6625 4325 20872 34634 20165 2250 2754 2837 5069 1985 7925 39730 48500 23913 10512 5261 8957 11273 11025 15385 14033 7093 2237 2000 1384 1192 13124 20789 5597 2545 2810 3700 6500 4000 397 2708 10386 18617 16290 8465 2250 1205 522 1405 1058 890 1305 4549 2290 3880 3961 980 981 2861 2650 9 1642 584 10 131075 4294639617 393216 4294967295 65538 4294836224 196608 4294901762 4294901760 0 4294901760 65537 4294967295 1 65535 4294901760 196607 131072 4294901761 4294836225 196607 65538 4294770687 131071 1 4294836224 4294967295 65538 4294901760 1 4294705153 65533 1 4294901758 131072 4294901761 4294967295 65536 2 65535 4294901760 4294901760 4294967295 65533 65539 196606 4294770691 262142 4294901760 65536 4294901762 131071 4294901760 131072 131070 65539 4294901759 196607 2 65534 65540 4294770688 262142 4294901764 4294836223 4294901760 4294901759 131069 4294901760 131072 4294836226 131069 1 4294901761 65535 196607 1 2 4294901759 262142 3 4294770688 4294967295 65536 4294901760 4294836225 4294967295 65535 0 4294901760 65537 65534 4294901760 4294967293 1 4294967295 1 65533 327679 4294836227 196603 4 4294967295 0 4294836223 196607 4294967294 1 65536 4294901761 0 1 65535 4294901761 4294901758 65535 131072 4294836225 65536 4294901763 4294836224 65535 4294901761 4294967295 196608 4294836223 0 4294901761 65537 4294770686 196607 2 4294901759 131069 3 +4 0 -37 -23 -225 20 508 609 -75 -1016 56 618 -486 -518 858 537 -1078 -1165 79 1612 546 -697 47 124 -336 -290 139 212 -178 -51 141 328 241 -345 -208 50 66 -58 -120 -13 0 168 177 -138 -154 23 -17 -57 17 185 101 -117 -66 -13 5 -3 -25 8 33 42 -3 -80 -22 58 23 -27 -49 1 48 36 -12 -17 18 -28 -61 -4 24 83 60 -39 -13 -64 -55 46 44 -17 -42 -24 -7 73 52 -43 -47 -19 -20 77 120 -25 -87 -77 5 51 1 -1 -18 -16 35 52 -18 -81 -3 69 1 -55 -6 56 31 -52 -61 31 78 -9 -70 -18 26 17 3 18 13 -38 -48 9 42 38 -28 -35 46 48 -26 -71 -3 46 9 0 4 -49 -37 44 23 -17 -14 28 35 -14 -36 -23 8 56 21 -38 -2 11 1 -23 -15 5 1 -3 -5 6 3 12 9 -24 -26 15 25 26 17 -51 -28 11 -26 -7 37 54 -7 -69 -17 64 42 -38 -22 4 -5 -10 5 4 -21 -2 7 11 8 1 -16 -1 31 10 -25 -15 34 8 -43 -32 10 52 19 -44 -30 25 14 -10 4 30 17 -52 -33 30 20 -13 -15 6 25 2 -30 -24 2 38 22 -17 -19 -2 12 3 -9 -1 4 0 0 1 0 0 0 0 0 1 0 0 0 0 -1 0 0 0 0 0 0 0 0 -1 1 0 0 0 1 0 0 -1 -2 1 0 1 0 -1 0 0 1 0 -2 -1 0 0 0 -1 -1 0 0 0 0 0 -1 0 -1 2 0 -3 -1 1 0 -1 0 0 -1 1 0 -1 -1 0 0 1 0 -1 -1 0 0 -1 -1 0 0 -1 0 0 1 0 -2 -1 0 -1 0 1 -1 0 1 -1 0 -1 -1 0 1 1 0 0 -1 0 -1 1 -1 0 1 -1 -2 0 1 0 -1 -1 -1 0 1 1 0 -1 0 -1 0 -2 0 1 -1 -1 0 2 1 0 0 0 -1 -1 0 0 0 0 0 -1 0 0 0 1 2 0 0 -1 0 1 0 -1 1 0 -1 -1 0 0 0 1 1 -2 -1 1 -1 -1 -1 0 0 0 0 -1 -1 0 -1 0 0 -1 -1 0 0 0 0 -1 -1 0 -2 0 0 1 -1 -1 -1 0 0 0 1 -2 -2 0 1 1 -1 -1 -1 1 -2 0 0 0 0 1 2 0 0 -1 1 1 0 -1 -2 -1 1 0 -1 -1 0 -1 -1 1 0 0 1 -1 -1 -1 1 1 0 -2 -1 0 1 -1 -1 -1 -1 0 0 0 -1 0 0 0 0 1 1 -1 0 0 +4 4293525467 1376031 39911932 4228448181 385060 504520 1024533 2519309 2604785 783925 17585 196996 64265 34285 127465 177106 45764 7720 14569 28224 50373 24245 3538 34514 23890 4525 34 689 2853 6409 3848 1258 2402 3600 433 1108 3737 7465 5121 4265 5141 2225 2340 5378 4553 2570 6329 15025 13498 2626 2 580 3929 6885 4770 3026 3172 3665 4682 6165 5224 965 333 1613 2385 3208 2009 4420 5717 2125 81 2417 3305 818 980 1421 1825 3200 1885 125 530 250 10 61 153 657 901 1301 2890 905 725 4285 4810 4385 3208 500 125 41 445 170 65 257 1061 850 1220 2873 2804 2297 1525 296 916 2993 1989 569 261 629 1476 1448 773 365 153 4294967287 4 65536 0 0 65536 0 0 65535 0 0 0 4294901760 1 0 1 4294901760 131070 65536 4294901760 0 1 4294967294 0 4294901760 65535 0 0 65535 196607 4294770688 131071 4294901760 0 131071 4294901760 65535 65536 4294901760 65535 4294901760 65535 4294901760 0 1 4294967294 4294901760 65536 65535 4294901761 4294901760 65535 65537 0 65535 131071 65535 4294901761 65534 1 4294967295 65535 65537 4294901760 4294901760 4294836224 65536 4294967295 131072 1 0 4294967295 0 0 4294901760 0 65536 2 4294901760 65536 4294901760 1 4294967295 0 65536 4294836225 131071 4294967295 65535 0 4294901760 65535 65535 4294901760 65535 0 4294901760 65535 65534 65536 4294967295 65535 0 4294836225 65534 65537 4294967295 131071 65534 0 65536 2 4294901760 65537 4294901760 4294967294 1 4294967295 4294901760 131071 0 4294901761 4294967295 65537 4294836224 65535 4294901761 4294967295 65535 0 65535 0 65536 4294901761 0 +-111 0 16 12 11 -383 -963 1191 2107 -207 -593 -1109 -520 -551 -1811 1305 2320 358 -921 479 1587 -1281 -1288 -399 -400 862 648 24 -146 -415 7 696 322 -855 -389 438 63 -262 -12 246 108 -95 -235 -223 -20 480 303 -104 -75 -299 -100 243 49 -186 -106 165 168 -75 -205 -33 136 194 33 -211 -71 118 55 -36 25 -78 -130 59 85 36 -25 -88 0 151 48 -148 -54 26 -49 89 155 -75 -180 -3 143 88 -47 -129 -14 74 -23 -20 36 54 64 -47 -62 -51 -64 22 112 100 -25 -127 -7 -17 -100 34 43 64 32 -6 6 -11 23 25 40 -90 -110 -16 -5 95 74 -13 -34 -35 12 28 55 -20 -100 -73 25 66 -76 25 155 60 -69 -171 -62 138 79 -24 -13 7 38 -25 -64 -25 66 55 -37 -91 -50 41 51 53 -17 -62 5 54 43 -50 -101 16 128 37 -104 -73 65 52 -34 2 28 -68 -64 50 6 50 93 -28 -43 -15 20 -62 -56 52 -24 -5 83 30 -36 -20 -30 -44 44 90 -15 -78 -19 24 1 6 -4 70 76 -95 -49 57 15 -82 -30 47 -20 -32 -3 82 45 -41 29 23 -52 -107 -4 100 8 -47 -24 8 19 35 -4 -16 18 -10 -14 28 27 -45 -36 14 8 7 6 -1 -4 -1 3 1 -1 -1 0 0 -1 2 0 1 0 -1 1 1 -1 -1 -1 -1 -1 -1 -1 0 1 2 2 -2 -2 -1 0 -2 0 1 -1 -3 -1 1 0 -1 -2 1 0 1 -1 0 0 1 3 -1 -2 -1 0 1 0 -1 0 -1 -1 -2 -3 0 0 3 0 -2 -1 0 -1 -1 2 3 -1 -2 1 0 0 -1 -3 1 3 1 0 -2 1 0 -1 -2 1 2 -2 -2 0 1 1 0 0 0 0 -2 1 0 -3 -2 1 3 1 -4 -2 2 0 -1 1 1 0 1 -1 -2 2 0 -3 -3 -2 0 2 1 -2 -2 0 2 0 -2 2 2 1 -1 0 -1 -1 0 3 0 -3 -1 0 1 0 -1 -4 -2 1 2 2 2 1 -3 -3 0 1 1 1 0 -2 -3 2 1 0 -1 0 0 1 1 0 -1 0 -2 -2 0 1 -2 -1 0 0 0 -2 -3 2 1 -2 1 -1 0 0 0 0 2 1 0 -3 0 1 1 0 2 2 -1 -3 2 2 -1 0 -1 -1 2 1 -1 1 -1 2 -1 -3 0 1 0 0 -2 -3 1 1 0 0 0 -1 -1 -1 1 0 0 -1 -1 1 1 -1 0 -1 0 1 1 0 -1 -2 1 0 -1 -2 0 2 -1 0 -1 -2 0 1 0 +65425 786448 4269867019 78117949 4281403451 1581530 574001 4982746 5510564 1077682 4159530 1818145 903044 420480 193541 484465 834709 343165 72613 60660 20689 104954 230800 102625 95026 69049 36997 38461 33849 43114 56132 45610 18965 4321 6709 20381 8521 8369 22801 24208 3592 10322 29650 32409 28193 18850 5672 929 4212 6305 6445 4580 22544 16754 338 11156 5945 1060 157 1154 9700 12356 9050 5645 2381 928 3425 15329 4981 6401 27625 34002 22888 6817 218 2069 4721 7381 9650 4181 5410 4133 2941 4349 10457 17753 16145 6929 1160 5408 6596 2536 9433 2074 4244 5840 601 7789 1696 2836 10036 6309 937 37 4916 14801 5650 6949 3109 1424 6733 3706 1370 14153 10016 2273 640 1586 272 424 980 2754 983004 458760 4294901766 4294967292 65539 4294967295 0 196607 65536 4294901760 65537 4294967295 4294967295 4294967295 65535 131073 4294836226 4294967294 4294836224 65536 4294836223 131071 4294901760 131070 65536 65535 65536 4294901763 4294967294 65536 4294901760 4294901760 4294901759 65533 196608 4294836224 65535 4294967295 196610 4294901759 1 4294901760 131069 65539 4294836224 1 4294901759 131073 4294901758 65536 1 0 4294836224 1 4294901757 196609 4294705153 196606 4294901760 65537 65536 4294901759 2 4294836221 65534 65538 4294901758 131072 4294836224 131074 4294901761 4294901760 65535 3 4294967293 65536 4294901760 4294901756 131073 131074 4294770689 65533 65537 1 4294836222 65538 4294901760 0 65537 4294901760 4294836224 65534 4294836225 65535 0 4294836222 65538 131070 65535 0 131072 1 65533 65537 131072 4294901762 196605 4294901762 4294901760 196607 4294901761 4294901761 4294901762 65533 1 4294836224 131069 1 0 4294967295 131071 0 4294967295 65537 65535 65535 65537 4294901760 131070 4294901760 65534 4294901762 4294901760 65534 1 +8 0 -18 -30 222 -18 -644 -345 389 992 160 -798 -93 459 477 -383 -957 -825 -186 2038 1358 -1478 -1134 620 609 -288 -228 53 -57 61 147 123 183 -291 -335 -6 23 105 58 77 72 -123 -216 49 231 108 -80 -163 -78 79 128 72 -46 -101 31 31 -16 -31 -22 9 18 -16 -46 9 4 40 54 4 -23 -69 -38 66 49 -19 -25 -41 -50 106 157 -87 -163 -4 85 27 -54 14 83 4 -47 -50 21 37 -16 -59 -43 74 73 -39 -62 5 32 40 30 -80 -90 56 73 7 -16 2 23 -52 -48 64 49 -61 -39 57 45 -34 -52 -14 59 34 -69 -31 55 18 -15 -22 -35 41 69 -45 -61 -2 -1 37 -2 -17 61 30 -58 -19 50 -51 -21 66 -50 -60 78 35 -77 -8 26 12 31 2 -31 -20 -5 18 31 4 -16 -21 -7 24 42 -20 -43 -30 -4 30 -27 -3 79 60 -49 -136 -12 133 34 -77 -21 25 15 -27 -64 42 96 -6 -40 -39 -48 16 77 50 -25 -94 -38 55 24 6 15 -19 -40 13 68 12 -42 -62 -23 29 11 33 14 -22 15 2 -27 -16 8 0 -33 18 46 28 8 -57 -38 20 21 8 3 -20 -25 8 10 12 20 -8 -18 -13 -7 9 10 3 -4 -5 0 3 1 -1 -1 0 1 0 0 0 1 0 0 -1 -1 0 0 0 0 1 -1 -1 -1 0 -2 -1 0 0 -1 0 0 -2 0 1 1 0 0 -1 0 -2 0 2 -2 -1 0 0 -1 -1 1 0 -1 -1 -1 0 -1 0 0 0 -1 -1 -1 0 0 0 -1 0 -1 0 1 0 0 1 0 -1 1 0 0 0 -1 1 0 -2 0 0 0 -1 0 1 1 0 0 0 -1 0 1 0 -2 -1 0 2 0 -3 0 0 -1 0 -2 -1 0 2 1 0 0 0 -1 -1 -2 1 0 -1 0 1 1 -1 -1 -2 -1 0 0 0 -1 1 1 -1 -2 -1 1 1 -1 -1 0 0 0 -1 0 -1 0 1 -1 -1 -1 -1 2 0 0 0 -1 -2 0 0 -1 1 2 1 1 -1 -1 -1 -1 -1 -1 0 -1 0 0 1 0 1 -1 -1 1 2 0 0 0 0 1 0 1 -1 0 -2 0 0 -1 -1 0 1 1 0 1 0 -1 1 -1 0 -1 -2 -2 1 1 0 0 0 0 -1 1 -1 -1 0 0 0 -2 -1 0 1 -1 -1 0 1 -1 0 1 0 1 -2 -1 0 1 0 0 -1 -1 0 1 1 0 -2 0 -1 -1 -1 0 1 0 0 -1 -1 1 1 -2 0 2 0 +8 4293066734 4293787870 4272422268 65012101 662404 219330 374218 1596474 4188040 4028648 1670356 453825 54793 6970 36738 118170 112261 11554 9293 20313 49057 65025 32969 12325 21568 12317 1922 1217 565 580 2197 1616 2932 5290 5800 2762 2306 13736 32218 26585 7954 3112 6905 4709 1810 3737 7325 6850 3869 2624 7300 11236 5378 260 3233 6400 6122 4770 3181 2900 4637 5722 3349 709 2906 6786 3725 1370 293 4621 3725 5101 4797 6100 7309 5993 820 965 1361 349 977 697 625 2164 2749 916 738 9841 20897 17833 7085 1066 954 5860 9252 3121 2560 8429 9461 4469 612 586 1769 4768 5608 1370 1210 680 229 985 64 1413 2900 3313 1844 505 409 689 244 464 493 655353 196618 4294705148 196608 4294901761 65535 1 0 1 4294901760 65535 0 65536 4294967295 65535 4294967294 0 65535 4294836224 65536 1 4294901760 4294836224 131072 4294967294 0 4294967295 1 4294967295 65535 65535 0 4294967295 65535 0 65535 65535 1 65536 4294901760 1 0 131071 4294836224 0 4294901760 65536 1 0 65535 1 4294967294 131072 4294770688 0 65535 4294967294 131072 1 0 4294967295 131070 4294901760 65536 4294901761 4294901759 65535 0 131071 4294901761 4294967294 65537 4294967295 0 4294901760 4294901760 65536 4294967295 4294967295 2 0 4294901759 0 131071 65538 4294901761 4294967295 4294967295 65535 65535 65536 65536 4294967295 131073 0 0 1 4294901761 4294836224 0 4294967295 65536 1 1 131071 65535 4294901759 131070 1 0 4294901760 4294901761 65535 0 4294967294 65536 4294967295 65536 65535 1 4294836225 65535 1 4294901760 65535 65537 4294836224 4294901760 4294967295 65536 0 4294967295 65537 65534 2 +-82 0 10 158 498 -105 -315 -710 -547 311 -448 411 1399 335 -1083 -794 -76 306 -4 1303 1519 -100 331 -1962 -1912 362 726 753 -67 -444 -139 303 364 42 -176 -437 -144 302 109 -36 -181 -34 194 209 78 -72 -96 -207 -41 93 111 248 -36 -501 -86 376 66 -139 -53 24 19 3 22 54 -16 -67 20 -1 -38 35 -26 -36 98 104 -66 -197 -92 165 72 49 122 -4 14 -149 -151 7 80 27 -112 7 81 49 -28 -35 2 72 111 -47 -106 -70 5 99 114 -57 -134 -51 35 -5 -72 114 47 -35 147 52 -137 -205 -33 128 28 38 12 -117 -133 203 323 -71 -251 -175 27 246 195 -183 -311 -21 206 182 -72 -199 -55 133 103 127 148 -393 -460 275 385 122 75 -297 -372 47 195 204 114 -163 -243 7 204 84 -88 -109 -22 87 58 -52 -44 6 -15 53 62 -74 -75 122 135 -120 -42 30 -62 -132 -129 129 182 109 -66 -143 13 94 102 -20 -96 -161 -69 197 128 -91 -23 46 -46 -131 13 124 -65 -31 123 21 -71 -18 31 -53 -81 60 73 27 4 -58 -35 12 -28 39 121 -3 -93 -63 25 45 -1 -36 -27 39 17 -26 5 51 17 -73 -60 62 94 -28 -86 -20 42 36 -9 -26 -4 11 3 2 1 -2 1 0 0 0 2 -1 -2 -1 -1 3 1 0 0 0 0 0 0 0 0 0 0 -2 0 0 -1 -3 0 1 -2 -1 -1 1 -1 2 -1 -2 2 -1 -4 -1 -1 1 2 0 1 0 -1 -2 -1 0 0 -1 0 1 0 -1 -1 0 0 1 2 -2 -3 -2 -1 1 1 -2 -1 -1 0 -2 -2 -1 -1 1 2 0 -1 -4 -2 -1 -4 1 0 1 -1 -1 -1 0 -1 0 -1 0 0 3 -1 -1 -1 1 1 1 1 2 1 1 1 -2 1 1 1 0 1 0 -2 -1 1 -2 1 1 1 -3 -3 0 2 1 0 -1 1 -1 0 -1 1 -1 -1 -1 -1 -1 -3 0 1 1 -1 -1 -1 2 0 0 0 0 0 0 -1 1 2 0 1 -2 0 0 -1 -1 2 -1 -3 0 3 1 -1 -2 -1 -1 -1 1 0 -3 -1 1 -2 2 0 1 1 0 1 1 -2 -2 -1 2 0 -2 1 5 3 -4 0 0 -2 0 0 1 1 -1 0 1 1 1 0 -3 1 3 0 -1 1 -2 -2 -1 1 2 -1 0 0 0 2 1 -1 0 0 -1 0 -1 0 0 0 0 0 -2 0 0 -1 -1 1 0 -4 0 1 0 2 -2 -1 -2 -3 0 1 -2 -3 0 -1 3 2 0 +65454 10354698 4288086514 4248501957 20446685 369625 2069426 1803325 99412 1697825 2317361 3959005 3786788 1094085 201625 111130 134260 221945 111940 13177 33917 81317 11268 52065 10330 73825 252297 148772 23677 3385 370 3400 4745 401 2669 1972 20420 43165 35689 7585 14900 22397 22850 7129 12593 8962 2009 5188 14530 16136 9826 16245 20557 1250 18180 3434 24313 60794 17473 2228 13833 58898 109370 93626 61245 71514 97162 75560 44785 20714 26738 176353 287225 163109 93834 140593 79641 39565 59098 48672 19625 8053 6068 1972 3034 9320 20509 32625 2664 21268 33282 45005 24805 9005 10804 35137 43570 24665 2645 19277 15545 5186 15570 5365 3770 10161 6058 3380 1369 2305 14650 12618 2650 1297 2250 965 2626 5618 7444 9620 7796 3060 4293328887 786428 131075 4294836225 1 0 4294901762 4294967294 262143 1 0 0 0 0 4294836224 0 4294836223 65536 4294967294 131071 196607 4294901759 4294901762 4294967292 131071 2 1 4294901759 65535 4294901760 65536 4294901760 65535 65536 4294836226 4294901757 131071 4294836225 4294967295 4294836224 4294967294 131071 2 4294770687 4294967294 131068 65536 4294967295 65535 65535 65535 196608 4294967295 131071 65537 131073 65537 4294836225 65537 1 1 4294967294 4294836225 65537 4294770689 65533 65538 4294901760 4294901761 4294901760 4294901761 4294967295 4294967295 65533 65537 4294967295 196607 0 0 0 131071 2 4294836225 0 4294967295 4294901762 65533 65539 4294901759 4294967295 131071 4294770688 131071 196606 65536 1 65537 4294901758 196607 4294836224 327681 4294705155 0 65534 65536 4294901761 65536 65537 4294770688 196609 4294901760 4294836225 4294967294 131073 65535 0 65538 65535 4294901760 4294901760 0 0 4294836224 0 4294967295 1 65532 1 4294836226 4294901759 65533 4294836225 65533 262143 2 +-43 0 60 -3 -37 35 12 -214 -20 327 -341 -418 746 1085 -267 -1771 -458 1379 416 -551 -11 71 -357 13 293 159 -11 -17 6 -123 54 90 -149 -156 150 273 -35 -320 -65 219 75 -96 29 -39 -272 72 441 34 -432 -209 180 387 193 -314 -260 20 93 92 -32 -90 -2 103 40 -42 -20 -47 -27 73 38 -25 23 -30 -90 -3 43 43 16 -1 -10 -6 30 -37 -56 56 73 -71 -98 28 49 37 0 -32 2 27 -1 -35 -10 43 33 -37 -36 7 17 27 37 -48 -77 -21 9 82 65 -13 -10 -101 -82 86 59 -8 0 5 33 -42 -97 6 71 -1 -79 57 93 -47 -91 7 63 58 -13 -41 46 1 -27 -34 -88 -8 89 138 52 -111 -61 -52 -39 93 94 -13 -63 -90 -23 127 90 -78 -52 3 -37 -28 68 82 -93 -116 77 148 -13 -137 -46 64 19 6 -1 36 111 -53 -179 -41 153 111 -85 -168 -36 175 105 -105 -103 49 90 -44 -122 41 111 20 -26 -12 46 -49 -123 -9 83 112 29 -92 -54 -10 25 35 -63 -21 121 30 -143 -27 134 10 -67 -13 -27 -6 40 21 -12 13 0 -38 11 47 13 -53 -33 14 12 8 -14 -11 7 37 19 -28 -11 5 3 -11 -11 7 7 0 -2 -1 0 1 0 0 0 0 -1 1 1 1 0 -1 1 0 -1 0 -1 0 0 0 0 0 0 -1 -1 0 0 1 0 -1 1 0 0 0 -2 1 0 0 -1 -1 -1 0 -1 0 0 -1 -1 -1 1 0 -1 0 0 -1 0 -1 0 -2 1 0 -1 -1 0 0 -1 -1 0 0 0 0 -1 0 0 -1 1 1 -1 0 0 0 0 0 -1 -2 -1 -1 0 1 -1 1 -1 -1 -1 1 0 0 0 0 0 1 1 -2 -1 1 1 0 -2 0 0 0 0 -1 -1 0 0 -1 -1 0 0 1 0 0 0 1 0 -1 0 1 0 -2 0 0 -2 0 0 0 1 0 -1 -1 0 0 0 0 0 0 0 -1 0 2 1 -1 0 0 -1 -2 0 0 -1 1 0 -2 1 0 0 -2 0 1 0 0 -2 -1 0 -1 -2 -1 -1 0 0 0 1 0 -1 -2 -1 0 -1 0 2 -1 -2 -1 -1 0 -1 0 -1 1 0 -1 1 1 -1 -1 -2 0 0 -2 -1 0 1 -1 -1 -1 0 0 -1 -2 0 -1 -1 1 1 0 1 1 -1 0 -1 1 1 0 0 0 -1 -2 0 1 -2 1 1 -1 0 1 1 -1 -1 -2 0 -1 0 -1 -1 2 1 -1 0 -1 -1 0 1 0 +65493 4294770748 2359259 4280942604 21495788 291005 1733741 3207730 2111405 476657 5162 127618 111130 410 15165 11016 46537 97029 103625 52186 14841 2362 79168 195637 230305 182169 135845 68000 17113 9124 10613 3364 2609 6058 2069 1429 8109 3698 257 136 2269 6272 10370 10388 3770 1024 733 1226 1949 2458 1345 1018 3673 6370 6805 4394 10301 14120 3545 25 2853 9445 5042 9490 10858 8330 7333 1850 2117 1885 7808 26965 15025 6425 10170 9005 12069 16658 14184 2713 2153 11348 22105 27833 18938 6212 397 1297 15130 33722 35730 35449 31921 22050 13010 10036 16565 12721 820 4517 15210 19433 9305 3016 1850 4410 15541 21178 18056 4658 765 2041 313 1444 2330 2978 1285 208 317 1418 1145 146 4294246403 524277 7 4294967294 65536 0 0 131071 65537 4294901760 1 65535 65535 0 0 4294901760 65535 65536 4294901760 1 0 131070 0 4294967295 65535 65535 4294901760 4294967295 1 65535 4294901760 4294901760 4294836224 1 4294967295 0 4294967295 0 0 65535 4294901760 65537 65535 0 0 4294901759 4294967295 65536 131071 4294967295 131071 0 0 65536 4294836225 131071 1 65534 0 4294901760 65535 4294901760 65535 65536 0 65536 4294901760 65536 4294836224 0 65534 0 1 4294967295 0 0 0 4294901760 131072 4294901761 0 4294901759 0 131071 4294836224 1 4294836224 65536 0 4294967294 4294901760 4294967294 65535 0 1 4294901759 65535 65535 4294901762 4294967294 65535 65535 131071 4294901760 65537 4294967295 65534 4294836224 65535 4294901761 4294967295 0 4294901759 4294901760 131071 1 65537 65535 131071 1 0 4294901759 65536 131070 4294901761 65536 4294901761 4294901759 4294901760 4294901760 196607 4294901761 4294901760 65535 1 +31 0 -116 -80 -16 311 691 -247 -1062 -769 -359 1053 412 923 1876 -390 -1101 -1828 -427 615 -677 789 1389 -196 -689 -427 -88 211 101 285 223 -258 -6 -42 -305 -261 -123 468 285 -74 -16 77 207 -308 -514 -139 -106 683 689 -224 -422 -115 297 160 -70 -356 -141 292 216 -226 -275 28 110 125 25 -64 -8 -53 -118 77 125 46 53 -103 -167 4 80 95 90 -121 -214 46 64 40 141 105 -89 -183 50 66 -39 2 26 -63 -70 41 56 6 -88 23 181 2 -202 -77 139 126 -56 -144 -17 174 124 -198 -240 98 180 79 -2 -72 -40 -18 82 38 -64 -144 -174 78 188 240 48 -273 -83 112 98 -61 -134 -68 0 134 128 7 -82 -210 -106 204 150 29 -23 -176 -47 155 31 -108 -14 33 -59 -24 6 141 102 -90 66 6 -195 -204 -90 276 257 41 -85 -113 91 -46 -115 30 101 -2 -83 -148 -118 109 44 137 54 -68 57 69 35 -36 23 -133 -43 26 -119 -84 -40 226 212 -99 -189 67 233 -99 -162 15 -1 -56 28 135 18 -174 -89 51 -146 167 430 50 -141 -362 -284 206 348 99 -176 -261 -44 214 95 -29 9 -45 -29 14 -4 -46 -10 78 20 -79 -35 43 1 8 18 3 10 -3 -8 -14 -3 1 -3 3 3 4 1 -3 -1 1 2 0 -1 1 0 0 0 1 1 1 0 -2 1 -1 0 -1 0 -1 -1 0 -1 2 0 -1 1 1 2 -2 -2 1 2 -3 -2 -2 -3 -1 -2 0 0 -1 0 3 0 -3 0 0 -1 -1 0 2 1 -2 0 -3 -3 1 -1 -2 -2 2 0 -1 0 0 0 -1 -2 1 2 0 0 -1 -3 0 0 2 -1 -1 1 0 -1 1 0 -2 0 2 0 0 0 2 1 0 2 1 1 -1 -1 -1 -1 -3 -2 1 2 -2 -2 1 -1 0 1 0 -2 -1 1 0 0 -2 0 -1 -2 0 0 -1 -2 2 2 -1 -1 -1 -1 1 0 -2 -2 1 1 0 0 0 1 -1 0 0 -1 -2 -1 1 -1 0 0 2 1 -3 -2 0 2 2 2 -3 -2 -2 -2 1 0 -1 -1 0 0 1 0 -1 -1 0 3 0 -2 -2 0 -1 -2 1 0 3 2 -2 -1 -1 0 0 -2 0 -1 0 0 2 0 -1 1 -1 -1 1 1 0 -1 -1 1 0 2 -1 -3 0 2 -1 -2 0 1 0 -1 1 1 -3 -2 -1 0 2 0 0 -1 -1 -2 2 -1 -1 2 1 0 0 -2 1 2 -1 -2 2 0 0 2 -2 0 0 1 -2 -2 -1 0 1 1 0 +31 4289789836 20447216 4278780595 4244634586 1237690 1021673 3671476 4553785 560554 1080850 1967737 657050 52265 91426 116293 1800 161146 234153 86701 6185 137713 283517 477725 524897 191309 113809 131636 105145 97732 76409 27725 4721 2873 19853 17741 13418 27905 15425 22741 47912 5696 30906 41410 6856 1525 4645 6581 3172 8273 32765 46733 35197 23872 30565 54580 67204 38641 5188 1924 8168 24832 36360 92944 76833 19433 13325 22580 17956 16433 50824 52852 23341 31505 26234 12625 1285 4057 19917 18504 4392 79641 84276 67730 19994 10397 14125 10205 28793 25805 20705 7540 8010 2521 18218 2525 21217 52676 54745 40210 64090 26469 3137 19009 30600 10522 49205 187400 150925 123092 130905 99097 47732 9866 2106 1037 2132 6184 6641 3074 65 333 4294770698 4294115320 131069 262141 262147 4294770689 131071 2 131071 0 65536 65537 4294836224 4294901761 4294901760 4294901760 65535 196607 4294901760 65537 4294836226 131070 4294770690 4294901758 4294967293 65534 4294901760 196608 4294770688 0 4294967295 131072 4294836225 4294770688 131069 4294901759 196606 4294901760 0 4294901760 131070 2 4294901760 65533 131072 4294967295 1 131071 4294836224 131072 0 131072 1 65538 4294901761 4294967295 4294836223 131070 4294836226 131070 65535 1 4294967294 1 4294836224 4294901760 65534 4294901760 196606 4294901762 4294967295 131071 4294836224 131070 1 0 4294901761 0 4294901759 131071 65535 131072 4294770689 65534 131074 4294770690 4294901758 131070 4294901760 65535 65536 4294901760 65535 3 4294901758 4294901760 131070 196608 4294836226 4294967295 0 65534 65535 131072 4294901760 4294901761 131071 1 4294967295 1 4294901762 65533 4294901762 65534 1 131071 4294770689 4294967294 131072 0 4294967295 196606 4294967295 65538 0 131070 4294901762 196606 0 4294836226 0 4294836225 4294967294 65536 1 +-94 0 -2 137 34 -199 -151 438 634 -259 -560 -567 -70 749 -17 -666 -307 1243 1587 -665 -1485 -1054 179 1301 110 -472 130 334 100 -404 -291 92 55 204 202 -112 -189 79 348 -173 -426 45 331 -49 -390 95 547 -107 -749 -108 482 383 -159 -275 16 241 104 -124 26 68 7 -154 -76 52 38 -16 -80 -28 45 155 70 -195 -151 109 119 57 6 -174 -145 182 239 -14 -101 -249 -212 255 286 11 -51 -147 -135 43 127 38 -100 1 103 -4 -38 32 112 -137 -245 26 138 84 -66 -44 -2 82 79 -15 17 15 11 -119 -70 59 -1 10 60 17 -10 -72 -129 92 271 3 -198 -145 62 95 -64 -37 71 18 -113 16 155 28 -55 -112 -118 61 180 73 -116 -175 -35 114 33 83 83 -116 -80 47 90 -28 -126 -94 -61 190 267 7 -245 -221 160 312 44 -315 -202 68 100 112 -11 -113 -59 99 133 -17 -112 -117 -44 209 227 -152 -253 -5 97 92 84 -2 -89 -120 71 102 -54 -135 -125 38 40 237 233 -169 -184 -71 9 69 -35 -14 95 179 109 -362 -331 225 283 -27 -137 -65 -26 35 13 41 9 39 76 -38 -52 -25 -12 11 31 39 27 -34 -21 -59 -61 54 74 13 -49 -40 20 34 -4 -15 0 1 0 2 1 -1 -2 0 0 0 0 1 0 -1 -1 0 0 1 -1 0 -2 0 -1 0 0 -1 -1 0 0 0 -1 -2 -4 -1 1 2 -1 -2 1 -1 -1 0 0 0 -1 -1 -1 -2 -1 2 0 -4 -3 0 -2 2 2 0 -2 1 -1 0 1 -1 -2 4 1 -2 -1 1 0 1 0 0 0 0 0 -1 1 -2 -2 -2 -1 1 0 -3 -3 0 0 0 -3 1 2 1 0 -3 -2 1 -1 1 0 0 1 1 -2 -1 1 3 0 -3 0 4 2 0 0 -1 1 -1 -1 1 -1 -2 0 -1 -1 -2 -2 0 0 1 0 -1 0 2 3 0 -1 0 2 -3 -2 0 1 -1 0 0 0 -1 0 -2 -1 -1 -2 0 1 2 -1 -2 1 -2 1 0 -2 -2 1 -1 1 1 0 0 0 -2 0 -1 -1 0 1 1 0 -3 -2 -1 0 2 1 -1 -1 0 -2 -2 -2 0 -1 -1 1 -1 -1 -3 -1 3 2 -1 -2 -2 -2 1 0 -1 0 0 1 2 -2 -2 2 2 0 0 -1 -1 1 1 -1 -3 1 0 -3 0 1 0 -1 1 3 -4 -3 2 1 -3 -2 1 2 0 -2 -1 1 0 -2 0 1 -1 -2 0 -1 1 2 1 2 1 -1 -2 0 0 -2 1 0 0 +65442 9043966 4281925666 28770153 4277994106 635089 565901 443845 1639298 2960794 3316141 1724642 234884 128456 173216 93145 44641 53348 41962 151033 183501 111962 161125 310658 572665 379013 100906 58337 26192 5300 23765 8480 1700 7184 26050 42925 34682 17410 30312 54149 57317 72202 109969 81917 24210 20074 17573 10001 10625 2468 31313 60701 26100 6292 6728 6466 514 14282 8381 101 3889 5284 25105 73450 60229 12869 5465 5365 13025 24809 15569 17645 37729 44081 14221 7978 20345 8609 8884 24712 39821 71338 108866 122944 101161 45428 22544 12890 13282 17978 26233 45617 74633 64034 17873 7060 22321 15445 21141 17069 57769 82850 38897 4842 1421 41066 142925 160186 80818 22994 1901 1850 1602 7220 3329 265 2482 1885 3922 6637 5645 4001 2228244 4294049788 65536 131072 4294901761 65534 0 65536 4294901760 65535 65536 65535 65534 65535 4294901760 65535 0 4294901759 4294967292 131073 4294901759 4294901761 65535 0 4294967295 4294901759 196607 4294705152 65533 196606 2 131070 65535 4294901761 327678 4294836225 131071 65536 0 0 4294901760 4294836225 4294901758 131071 4294770688 65533 0 131069 65538 4294770688 131070 131071 0 65537 4294967294 196609 4294770688 262144 2 4294901760 4294901761 131071 4294901759 4294901760 4294901759 65534 65536 4294901760 131072 3 65535 4294770690 65534 4294901761 0 4294901760 4294836224 4294967295 65534 131073 4294901759 4294836225 1 4294901758 4294901761 65537 0 4294836224 4294901760 65535 65537 4294770688 4294967294 131072 4294901761 65535 4294901758 65534 4294967295 4294901761 4294836223 262143 4294901762 4294901758 131070 4294901760 0 131073 4294901758 131074 0 4294967295 65537 4294836223 1 65533 1 131071 4294705155 196605 4294770689 131070 2 4294967294 1 65534 4294901761 65534 131071 65538 65538 4294901759 0 131070 0 +-39 0 136 -11 -280 -150 59 173 59 122 -196 -272 119 555 210 -130 274 -231 -394 -41 658 7 -1045 -713 505 954 -470 -410 486 368 -125 -275 -11 120 -36 -93 -19 207 314 -43 -124 -318 -194 58 -38 66 -189 199 529 243 -103 -548 -68 241 -7 -160 -80 29 -26 103 69 -25 -2 28 -13 -68 8 91 23 -42 55 -8 -111 -114 31 237 89 -249 -130 123 43 -15 39 -21 -64 -24 -42 47 87 67 29 -132 -167 66 148 61 3 -1 13 -107 -15 2 -94 15 6 63 121 8 -115 -80 75 131 22 -143 -41 55 -38 -48 27 121 39 -120 -71 81 90 44 67 -194 -210 89 100 -5 -76 64 115 46 10 -158 -79 87 -4 -26 92 53 -62 -134 -61 88 23 52 116 -38 -99 -71 -15 72 70 -29 -101 0 114 35 -103 -129 -92 175 178 46 4 -95 -40 -19 11 42 20 -41 -40 11 42 13 -23 -45 17 96 62 -210 -173 146 34 -35 131 116 -89 -188 -19 48 -16 161 97 -209 -53 144 -44 -160 -16 184 78 -112 -112 65 58 22 70 75 91 -146 -129 -74 -49 79 50 44 -7 -58 -7 34 -20 -6 40 39 7 -68 -55 32 59 20 -30 -43 20 25 -27 -31 -15 22 20 22 6 -21 -5 2 2 0 0 -1 1 0 1 0 0 0 0 -2 0 0 -1 -1 -1 0 1 -1 1 -1 -2 -1 0 -1 -1 2 1 -3 -4 -1 -1 1 1 -1 -3 0 2 1 -1 -2 1 4 0 -2 1 2 -3 -3 4 2 -2 -3 -3 0 0 2 2 -2 -3 2 3 0 1 -1 -2 1 1 0 -1 -2 -1 2 -1 -2 1 2 0 -2 0 0 -1 -2 -2 1 -2 0 2 1 -3 0 2 2 0 -2 1 1 -2 -1 2 0 0 -2 0 -1 -1 1 0 -2 1 1 0 -1 1 -2 -2 -1 1 2 0 -2 -3 0 3 2 -4 -2 1 0 1 -1 -4 -1 0 2 0 1 1 0 -2 -2 0 1 -1 0 0 1 -1 -1 0 1 0 3 1 -4 -1 4 3 0 -1 -2 1 1 1 -2 -3 0 3 1 1 -1 -2 -2 -1 1 1 -1 0 0 0 2 1 -5 -1 1 1 1 -2 -4 -2 1 2 1 -3 -1 -1 1 0 0 -3 0 -1 1 1 1 1 1 0 1 -1 -2 1 2 -2 -1 1 1 -1 -1 2 0 0 -1 -3 -1 0 1 0 -1 1 -1 0 0 -1 -1 -1 0 0 1 0 -1 1 -3 -3 -1 -1 -1 1 3 -2 -3 0 3 3 -2 -2 -2 0 0 -2 -1 -1 0 +65497 4294246536 4285202152 11337787 7995451 112400 322186 61000 128437 156917 433013 1600394 1165141 389000 371620 91250 14521 9945 43210 100445 116500 41000 5800 75322 338890 310913 62705 25649 7241 11285 5386 788 4793 8345 2293 3089 25317 57130 69922 32029 2074 1962 4672 3973 12058 18265 32245 25625 10 11618 229 9061 4005 14705 19625 22786 20933 4706 3748 15370 15921 11602 10036 42125 52021 10025 9872 15341 25064 13810 692 11273 21800 11465 3233 14900 14842 5409 5741 10201 14221 27250 39089 33800 9041 1961 1885 2081 1721 1933 2554 9505 47944 51245 2381 30617 43265 2665 26177 53090 23545 27536 34112 18628 16769 3848 10525 29597 22117 8642 4436 3413 1205 436 3121 4673 4049 3881 2749 1025 1690 709 1441812 4293591046 196603 2 4294901760 1 1 0 4294836224 0 4294967295 65535 4294901761 4294901761 4294967294 4294901760 196607 4294770689 4294967292 131071 4294901761 65533 65538 4294901759 262145 4294836224 131073 4294836221 131076 4294836222 65533 131072 4294836226 196605 3 4294901761 131070 1 4294901759 196607 4294901759 131073 4294836224 0 4294901759 131070 65534 65538 65533 131074 4294836224 65537 4294967294 2 4294836224 4294901760 131071 4294836224 65537 4294901760 4294836225 4294967294 131073 4294836224 65533 131075 4294901756 1 4294901761 4294967292 131072 65536 1 4294901758 65536 65535 65536 4294967295 65536 196608 4294705153 327679 3 4294901759 65537 4294836225 65533 65539 4294901761 4294901758 131071 4294901761 0 131072 4294639617 131071 65537 4294770686 131070 65538 4294967293 131071 0 65533 131071 65537 65537 65536 4294901759 131073 4294967294 65537 4294967295 2 4294901760 4294967293 65536 4294901760 4294901761 0 4294967295 65535 65536 4294901760 4294770689 4294967293 4294967295 196609 4294836222 196608 4294836227 4294901758 0 4294967294 65535 +-23 0 11 -83 -276 95 679 329 -746 -889 366 1008 -143 -544 455 318 -649 -680 411 751 -585 -471 481 919 732 -710 -839 -591 -105 782 293 -351 -286 275 396 -97 -381 -30 331 43 -138 10 -56 -107 164 154 -81 -129 77 -47 -204 -110 -54 316 224 -123 -100 7 35 -41 -63 62 93 10 -9 -83 -48 19 -35 36 60 -1 -16 56 99 -59 -37 -118 -154 75 98 76 35 -67 -82 10 56 28 -15 -16 5 18 30 7 36 -113 -186 91 163 67 34 -149 -209 102 261 9 -170 -121 58 110 -82 -92 54 130 6 -31 42 -43 3 6 -79 -57 23 66 38 39 -5 -172 -118 145 73 -1 27 30 -17 -103 -11 163 113 -153 -122 -14 -16 65 53 21 -23 -30 84 8 -134 -79 79 135 -19 -117 -12 49 -23 33 37 -38 40 70 -80 -112 114 114 -87 -140 6 108 19 -68 -12 62 -9 -50 44 24 -58 14 94 -25 -49 -98 -148 73 96 148 166 -118 -171 -115 -67 146 181 79 22 -213 -196 -34 22 237 92 -128 -61 87 131 -75 -110 -25 -17 4 55 59 -72 -49 83 40 -40 -33 -31 -14 42 70 -20 -51 41 14 -45 -20 30 8 -22 -14 -1 -4 -31 29 54 6 -25 -29 2 25 6 -8 -1 0 2 -2 -2 1 0 0 2 -1 -1 -1 1 1 0 0 0 0 -1 -2 -1 1 0 -1 -1 -1 0 1 0 -1 -1 2 1 -2 -2 -1 1 -1 -4 1 2 -1 -1 1 2 -2 -2 -2 -4 -1 2 4 1 -3 -1 -1 1 2 -3 -2 0 -1 0 0 -1 0 0 0 0 0 2 0 -3 0 3 -2 -4 -1 1 3 2 -1 -1 -2 -1 2 0 -2 0 0 -2 1 1 1 2 -3 -2 2 3 0 -2 -3 0 2 0 -2 -1 0 1 -2 -3 0 0 0 -4 1 3 1 -2 -2 -1 3 3 -1 2 -2 0 -1 -2 0 2 -1 -4 -3 0 4 -1 -2 -1 1 1 0 -1 -2 -2 0 0 0 0 -1 0 0 -1 2 0 -1 2 -1 -2 0 1 1 -1 0 3 1 0 -2 -1 -1 1 0 2 -1 -2 -1 2 -2 -2 0 2 -3 -2 0 -1 -1 -2 1 0 1 1 -1 -2 -1 0 0 1 -1 0 -2 -2 0 1 1 -3 0 2 0 0 0 1 0 -3 -1 -1 1 0 0 1 0 0 1 -1 -1 0 1 1 2 0 -4 -3 1 3 0 0 -1 -3 0 2 -1 -1 -1 -3 1 2 0 0 -1 0 1 0 2 2 -2 -2 1 2 1 2 -2 -4 1 1 0 1 0 +65513 4289527819 6291180 21562023 4236770582 1150020 316385 308149 883601 732922 564066 1075922 1039924 1053202 622549 209050 157421 166225 146061 111410 19144 14585 50612 23202 8138 53716 102772 65305 10049 2906 7813 8749 6970 2665 2521 3601 3392 13282 15293 29341 15380 5714 6824 3920 481 349 949 14065 42877 31058 23357 54085 68202 43541 15464 15188 19816 997 3613 45 9490 4885 2965 29609 34949 5330 1629 10898 26690 36178 15080 4481 3250 1429 7120 24197 24466 14050 2545 1618 2813 6500 18944 25992 27169 11700 4985 3988 2581 2512 3560 9461 12005 27233 31120 41480 42466 25805 39002 45853 39572 56653 24848 11290 22786 12725 305 6506 7585 8489 2689 1157 6664 3001 1877 2425 964 680 17 1802 2952 1466 1638402 4294443014 65535 4294836226 131070 0 4294901762 4294967295 65537 0 0 4294901759 131071 4294901760 4294967295 65536 4294901760 196607 4294836225 4294967294 4294901761 131068 4294901762 131071 4294836226 4294901758 4294967292 262146 4294770689 4294967295 131073 4294901757 4294901760 0 65535 0 0 2 65533 4294836227 4294967292 196609 4294901762 4294901759 196607 4294836224 0 131070 65537 4294770690 196606 3 4294836222 131072 4294836224 65535 4294836225 65533 0 131068 65539 4294901758 262143 4294901763 4294836226 4294901760 65534 4294901762 4294836220 262144 4294901759 131071 1 4294901759 65534 0 4294901760 0 196607 4294901760 4294901762 65534 65537 65535 65539 4294836224 4294967295 1 4294901762 4294967294 4294836226 65534 4294770690 65534 4294967295 131070 65536 4294901761 4294967294 0 4294901761 4294836224 65534 65537 65533 2 0 1 4294967293 131071 0 1 65536 4294967295 65536 131073 4294705152 131069 3 4294901760 65533 4294901762 4294967295 131069 2 4294901760 65536 131072 4294836226 131070 65538 4294836226 131068 1 1 +-62 0 -29 113 -64 -152 -194 478 985 88 -578 -1204 -230 864 -171 -419 -84 814 1007 -63 -595 -895 6 509 33 0 -88 -240 194 194 -404 -260 202 340 -99 -129 -71 211 599 28 -585 -599 90 752 237 -466 -116 221 -64 -276 109 300 -176 -326 77 347 5 -160 53 57 -5 -49 -45 -24 35 78 -16 -132 -67 164 172 -85 -170 -45 76 83 1 -48 8 4 -73 -8 100 25 -51 -21 -45 -25 82 79 -57 -80 40 94 35 -155 -123 92 53 28 90 -58 -205 -68 98 225 26 -154 -18 139 132 -143 -144 -4 -12 24 71 82 12 -149 -145 85 178 57 -74 -106 -5 -18 -86 105 139 -8 -84 -42 155 55 -213 -214 112 278 -82 -191 104 145 -69 -127 18 64 -69 -21 102 120 -15 -148 51 62 -140 -102 69 159 79 -96 -129 -75 -40 120 121 83 1 -150 -10 41 -10 -19 -44 -48 -25 139 152 -62 -139 -96 16 146 75 -96 -114 37 111 5 -65 -19 10 -11 -34 32 97 30 -120 -125 70 196 60 -159 -95 19 48 5 -80 -27 4 115 136 -53 -126 -77 6 67 6 81 100 -146 -136 77 38 8 88 11 -77 -74 11 46 -21 -6 39 27 -10 -39 0 9 -6 -22 -22 32 25 -15 -10 11 6 -9 -1 3 -1 -2 -1 0 0 0 -1 0 -1 1 1 0 0 0 0 0 -1 -1 0 1 -1 0 0 1 0 1 0 0 0 0 1 -1 -2 2 1 -1 -1 1 -2 -1 -1 2 0 1 2 1 -1 -1 1 0 2 -1 -3 -2 -1 2 2 -1 0 0 0 -1 -1 0 0 -2 0 2 0 0 -2 -2 -1 1 2 0 -3 0 -1 0 0 5 3 -2 -3 0 3 2 -1 0 -1 0 3 -1 -3 -1 3 1 -1 0 0 -1 0 1 1 -1 -2 -1 1 4 0 -2 0 0 -1 1 1 1 1 0 1 -3 -1 1 1 -3 -3 -1 0 1 -1 -2 1 -1 -4 2 1 1 0 -1 0 2 1 1 -1 0 3 -2 0 -1 0 -1 1 -1 2 -1 -2 -2 1 2 1 0 -1 0 0 -1 1 -1 1 2 -1 -3 -2 0 3 0 -4 -4 0 1 -1 -1 0 2 1 1 -1 -2 -1 2 1 0 1 -1 -1 0 -1 1 -1 0 2 3 1 -2 -1 1 2 -1 0 -1 0 -1 0 -1 1 -1 -1 0 1 0 0 2 1 1 1 -1 -2 -1 1 -1 -1 -1 1 1 -2 -1 0 -1 1 1 0 0 0 -1 -1 1 2 -1 0 -1 -2 -1 1 2 1 1 1 -5 0 -1 -4 0 +65474 7471075 4285071296 31391550 5768153 1783700 799396 204802 669652 1018018 1155050 259117 1089 65344 75272 230816 156404 26442 49562 359585 701026 573604 273325 62297 80272 101881 137252 126338 25625 6058 2426 2601 7309 17680 31385 36809 30925 12665 2305 80 5393 10625 3042 2650 12965 9649 10436 25250 23593 3593 11464 46649 60229 24392 19645 37873 20752 720 11765 22345 28250 34933 16712 349 18421 19385 8820 27050 91165 89828 43205 31841 20890 4420 5202 24804 22129 6445 30004 30042 15457 22266 16000 21530 22501 1781 461 4240 19946 26948 28537 21572 14841 14365 12346 4586 221 2180 10309 30025 43316 28881 9386 2329 7129 13241 21305 21805 4525 6597 31316 24425 1508 7865 11405 2237 477 2250 1621 81 520 1508 4293984281 786422 4294377478 262143 4294901759 65535 0 65535 131071 1 0 0 4294967295 65536 65535 65536 65536 0 0 4294901761 196606 4294901761 131071 4294967294 196607 65536 65538 4294967295 1 4294901762 4294901757 196607 4294901762 0 4294901760 65535 4294836224 131072 0 4294901758 131071 2 65533 65535 327680 4294836227 65533 131075 65535 65535 4294901763 4294967293 65539 65535 4294901760 65536 4294901761 4294967294 262145 4294836224 0 131071 65537 1 4294770689 131071 4294770689 4294967293 65536 4294901759 4294901761 196604 65537 4294901760 131072 65537 65535 4294836227 4294901760 4294901760 4294901761 4294901762 4294901758 131073 1 65535 4294901760 4294901761 131073 4294836223 65534 3 4294770684 65536 4294967295 131072 65537 4294901759 196607 1 4294901761 65535 131071 65535 196610 4294836225 131071 4294901762 4294901760 4294901760 4294901760 4294901761 65535 1 131072 65537 4294901761 4294967294 4294901761 4294967295 65537 4294967294 4294901760 65537 0 4294901760 131071 4294901762 4294901760 4294967294 131073 65537 4294639617 4294901760 65532 +-9 0 112 -112 -251 10 149 109 -80 -371 -472 731 721 -176 -60 -34 81 -322 -567 553 1308 -773 -1748 231 1152 449 -311 -546 -93 116 -214 178 340 115 -18 -169 -225 -119 161 575 300 -581 -289 230 247 -398 -574 255 286 141 60 82 238 -202 -362 -184 66 276 15 -57 58 -29 -74 30 13 -55 0 156 83 -148 -87 97 164 -111 -249 21 202 33 -177 -52 98 142 77 -124 -150 -50 39 195 129 -159 -145 34 88 -2 -39 -81 -117 175 214 -116 -120 47 64 -76 -138 -15 94 156 -65 -127 89 147 37 -133 -81 -5 8 11 -22 46 27 -44 -47 81 158 -26 -126 -135 -14 121 24 -35 -69 28 92 69 50 15 85 -231 -252 41 35 126 60 14 43 -51 -21 20 15 -104 -106 60 8 52 96 68 41 -184 -174 135 211 -62 -174 -21 48 68 75 3 -59 -118 -33 108 50 1 -1 -65 -6 71 22 -75 -48 18 53 3 -73 -31 -6 24 29 97 53 -115 -82 66 134 -8 -96 -121 -58 87 65 23 -45 -19 47 13 -19 16 1 -50 -1 64 27 -11 32 -61 -78 -29 -59 106 157 16 -32 -120 -108 42 98 55 -43 -32 49 1 -17 -25 -37 5 40 20 -33 -32 9 47 26 -27 -14 -8 -6 3 2 3 0 -1 0 0 0 -1 0 0 -2 0 1 0 1 0 -1 0 0 -1 -1 -1 1 -1 -3 0 0 1 0 0 -1 -2 0 0 3 -1 -4 -1 -1 2 1 0 -2 0 -1 2 1 -1 -2 0 1 2 0 -3 -2 1 2 2 0 -3 0 0 1 0 -1 -1 -1 -3 -1 1 -1 -1 -1 1 1 -1 2 0 -4 -2 -1 2 -1 -1 0 3 0 -1 0 0 -2 0 0 0 2 2 -2 -1 1 2 -1 1 0 -1 1 2 1 -3 -1 -2 0 0 0 1 1 -3 -3 1 1 1 -1 1 1 -2 0 -1 -1 1 0 2 -1 -1 1 1 0 2 3 -1 -3 -5 -1 3 4 -2 -3 -1 0 1 2 0 0 -1 -4 1 3 0 -1 -2 0 2 -1 -1 2 0 -2 -2 -1 -1 1 1 0 1 -1 -3 -2 -1 -1 1 -1 2 -1 1 2 1 0 -1 -2 2 0 -2 1 0 -1 2 -1 0 2 2 0 -1 0 2 3 -1 0 -4 -1 0 -2 -1 0 1 1 0 -1 0 3 0 -2 -2 -1 -1 0 1 1 -2 -2 0 1 -2 0 2 0 -4 -3 3 1 -1 -1 1 1 -3 -3 1 0 0 1 -1 -4 -1 0 2 0 -1 0 -1 0 0 1 0 0 -1 -1 0 +65527 4287627376 720645 7143573 4270718896 757145 550817 4756 110245 627298 2308393 3108865 1528705 394837 22105 77480 128825 28885 64786 356546 427561 136421 219413 394501 101677 10324 97448 164900 80532 3474 4205 6376 3194 24336 28793 16978 39217 62442 41893 34033 29768 21305 25000 39546 41922 22181 7748 8082 44314 59252 16609 9872 19269 33172 20354 29530 19058 6586 185 2600 2665 8770 25640 34101 14837 1801 5545 13225 2725 60586 65185 17101 3796 4450 841 11041 14836 2768 13840 35537 48501 48365 30717 6928 5634 17405 12753 2501 4226 5077 6109 2628 2818 6290 612 10250 16034 11080 18020 23857 10933 4754 2386 2378 617 2501 4097 850 4745 6925 14717 24905 15424 13428 12629 2873 2402 914 1394 2000 2113 2290 4293197850 4294508530 262138 196610 4294901760 0 4294901760 0 65534 1 1 65535 4294901760 4294967295 4294901761 65533 65536 0 4294901759 0 4294901763 4294967292 196607 1 65534 196607 4294901761 65534 131073 4294770688 131070 131074 4294770688 0 1 4294967295 4294836223 131071 4294967295 131071 4294901761 2 4294901756 196607 4294967295 196608 4294901760 0 65534 0 131074 4294967294 131073 131071 4294901760 131073 4294770689 4294901759 0 65536 4294770689 131069 65537 131071 4294836225 4294901760 131071 131072 4294967295 65537 131072 4294901763 4294705149 262143 4294836228 4294967293 65536 2 4294901760 131068 3 4294901759 131072 4294967295 2 4294901758 4294967295 65537 65536 4294836223 4294967294 131071 196607 131071 65538 4294901760 196606 4294836224 1 196607 65535 131074 4294901760 131072 4294901763 4294705152 65535 4294967294 65536 1 65535 3 4294901758 4294967295 65536 4294836225 65534 4294836225 131072 4294705152 262141 4294901761 131071 4294770689 131069 0 4294901761 4294967292 131072 4294901760 4294901760 0 1 4294901760 65535 +-26 0 -48 -2 86 50 -81 34 -11 -143 149 366 215 -472 -908 83 1507 128 -2250 -86 2243 434 -1170 -247 643 -332 -500 181 148 16 -84 -10 137 129 -68 -196 -21 66 -109 104 236 9 -125 -45 206 -38 -264 -156 56 224 0 -102 2 67 27 25 42 -80 -42 7 -42 5 45 49 24 -46 -24 -32 -50 36 40 -4 -55 24 63 57 55 -92 -84 37 52 -59 -63 82 68 -63 -41 34 19 -43 -31 16 -8 38 64 -39 -91 -36 -3 137 122 -97 -97 21 47 0 32 -15 -88 -55 17 90 28 -31 -32 2 37 24 -3 -34 -2 0 -13 -4 5 1 -24 5 21 25 16 -27 -41 4 56 38 -6 -89 -66 37 37 38 8 -39 -21 38 53 -48 -66 21 13 -4 30 33 -17 -34 16 -10 -10 7 -44 -34 0 80 51 -11 -6 -45 -19 24 -11 -20 21 56 18 -69 -69 34 82 26 -48 -47 14 28 20 -14 -36 -15 -7 15 37 40 0 -66 -26 40 45 -44 -70 32 42 -16 -7 10 -20 -14 -2 37 50 -19 -40 -32 -20 47 53 -20 -43 -4 22 10 -3 -2 17 -15 -46 5 58 20 -32 -47 -11 24 3 14 20 -13 -21 -8 4 16 3 -8 1 7 2 -10 -4 0 -1 4 0 0 0 0 2 -1 0 -1 -1 0 0 0 0 0 -1 0 0 1 -1 -1 0 0 0 0 0 0 -1 -2 0 1 -1 0 1 -1 -1 -1 0 0 -1 0 1 0 1 -1 0 -1 0 2 0 -1 0 -1 -2 1 0 0 0 -1 0 -1 0 -1 0 0 -2 -2 0 1 0 -1 -1 -1 0 1 -1 -1 1 0 0 -1 0 0 -1 0 -1 -1 0 1 0 0 0 1 0 0 1 -2 -2 -1 -1 0 0 -1 -1 0 0 -2 -1 0 -2 0 0 1 -1 0 0 1 1 -1 -1 0 1 1 0 -2 0 1 0 -1 0 0 1 -1 -1 -1 -1 -1 -1 -2 -1 1 0 -1 -1 -1 -1 -1 -1 0 2 -1 0 -1 -1 0 -1 1 1 -1 -1 0 1 -1 -1 -2 -1 -2 -1 0 -2 0 1 0 0 -1 -1 -1 -1 0 0 -1 -1 0 0 0 -1 0 0 2 1 1 0 -1 0 0 -1 -1 1 0 0 1 1 0 1 -1 -1 -1 -1 0 -1 -1 0 0 1 0 0 -1 1 1 -1 0 0 0 0 0 0 -2 -1 0 0 0 -1 -1 0 1 0 -1 1 -2 -1 0 -1 0 0 1 -1 -2 0 -1 -1 0 0 1 0 0 0 -1 0 0 0 -1 0 0 0 0 +65510 4294901712 3276886 2293679 4285661173 156157 269009 831353 2287433 5069896 5219405 1429909 523673 282761 22160 7156 35410 43040 4797 22697 55777 17650 43880 94032 53312 10404 4493 1354 8164 1813 1789 4426 2692 1600 3796 1616 3601 7218 11489 8425 6185 10693 8593 2837 2210 1217 1508 5617 9577 18778 24293 9850 2209 1249 10769 8389 1745 1028 1945 1165 4 185 26 601 1066 985 1697 4580 7957 5725 2813 1585 1885 5113 4797 185 1989 1445 356 149 3092 6400 2722 2061 937 521 3577 5085 5917 7400 4513 980 596 1521 274 2969 4356 2276 3961 5924 2020 149 596 1373 2861 2624 2609 3209 1865 584 13 514 2141 3764 3233 697 205 569 505 272 73 50 4294311938 65532 327679 0 0 4294901762 4294901760 65535 0 0 65535 65536 4294967295 0 0 0 4294901759 65536 65535 4294901761 4294967295 0 65535 1 4294901761 4294901760 131072 4294901760 4294901760 131070 0 4294901760 4294901760 4294901760 0 4294901758 65536 4294901760 4294967295 65536 4294967295 1 4294901760 0 65535 4294967295 65536 0 65536 0 4294836225 4294967294 65535 4294901760 65535 4294836224 65535 65534 65536 65535 65536 4294901761 65535 65537 4294836224 65536 4294901760 0 4294901761 4294967295 4294967295 4294901759 131071 4294901760 4294967295 4294967295 65535 4294901762 4294901760 65535 131071 4294901761 65535 4294901761 4294901759 4294901759 65535 65534 1 4294901760 4294967295 65535 4294901760 65535 0 65535 131072 65537 4294901760 0 4294967295 1 65536 1 4294901761 4294967295 65535 4294967295 0 1 4294901760 65537 65535 0 0 4294836224 65535 0 4294967295 65536 4294901760 4294836225 65535 65535 65536 4294901759 4294901760 65535 65536 0 4294901760 0 4294901760 0 0 +-44 0 32 35 36 -25 -158 -178 -13 605 591 -657 -893 294 1243 308 -643 -2126 -2147 2328 2585 416 -77 -993 -514 -479 -280 595 373 -128 -238 -16 60 133 170 -70 -122 -77 -28 -27 -70 156 115 -33 37 35 9 -136 -49 30 -55 -36 0 116 100 -47 -111 -66 46 105 4 -42 0 -7 12 2 -48 0 47 24 -7 -8 5 -34 -20 29 42 1 -32 -82 -71 59 49 84 45 -86 -2 43 -41 -101 0 144 37 -115 -10 60 -29 -28 61 3 -79 -56 -14 100 65 -14 3 -38 -29 -16 -37 62 74 -58 -57 57 10 -51 36 55 -46 -51 55 21 -72 -14 64 13 -52 -1 45 -7 -49 7 39 15 -9 -11 11 -11 -17 20 7 -25 20 41 -3 -63 -24 0 -44 57 115 -7 -93 -70 15 77 27 -7 13 -51 -60 35 52 -5 -27 -1 13 -18 -44 12 41 44 0 -50 -9 39 35 -15 -14 -53 -51 56 65 -13 -47 -25 -2 47 48 -6 -23 -50 -5 62 24 -95 -103 86 121 14 -44 -58 -4 27 24 -2 -45 -24 43 45 -24 -50 -8 32 3 7 23 -6 -11 -3 12 -5 -13 -30 -20 37 10 -5 16 6 -2 -15 -9 -1 1 8 2 -10 -6 18 8 -12 1 1 -6 -1 3 1 0 0 0 0 0 0 1 0 0 0 0 -1 1 1 0 -2 -2 1 2 1 -1 -1 0 1 -1 0 0 0 -1 -1 -1 1 1 0 0 0 0 0 -2 0 -1 -2 0 2 0 1 0 -1 -1 0 1 1 -1 0 0 1 -1 -1 0 1 -1 -1 1 3 0 -2 -1 1 0 0 -1 0 1 1 0 -1 0 -1 0 1 -1 -2 -1 0 0 0 0 0 0 1 -1 0 1 -1 0 -1 -2 -1 0 0 -1 1 0 -1 0 1 2 0 0 -1 -1 0 0 -1 0 -1 -1 0 2 0 1 -1 -1 -1 0 0 0 -2 -1 0 0 0 -1 0 0 2 1 0 0 0 1 1 0 -2 -1 0 0 1 0 -1 1 -2 0 0 -2 0 1 -1 0 -1 0 0 -1 -1 -2 -1 2 0 0 -1 -1 0 1 1 -1 0 -1 0 -1 -1 -2 0 -1 -1 1 0 0 0 1 -1 -1 -2 -1 1 1 -1 -1 -1 1 1 0 1 -1 -2 1 0 -1 0 -2 2 0 1 1 1 -2 0 0 0 0 -1 1 0 0 0 0 0 0 1 0 -1 0 0 1 0 0 1 1 -1 0 -1 -1 -1 0 1 -1 0 0 0 0 -2 0 -2 -1 0 0 0 0 -1 -1 -1 -1 0 0 0 +65492 2293792 4293328932 4283367266 39714803 780930 883885 1639913 4933325 10029193 6855281 991978 493637 432425 155513 56900 21289 33800 20813 1513 29236 14314 2594 18577 3301 4321 13456 12209 16677 13141 1780 49 148 2304 2785 113 1181 1241 1765 7748 8522 9457 9421 1853 11882 20736 14594 3700 1625 3730 9377 10196 4421 1453 1097 5213 8840 6498 2701 4321 4717 3466 5380 4265 2705 2074 2450 1746 202 242 689 674 2081 3978 576 5185 13274 13549 6154 778 2770 4825 2729 730 493 2080 3617 2500 1602 1450 3005 5737 4394 2834 2213 2340 3029 3869 9601 18005 14837 5300 745 580 2601 3874 3076 1088 58 565 130 169 1069 1769 125 292 229 82 65 104 360 208 65537 4294967290 65539 0 0 0 1 0 4294901760 65537 4294836224 131070 65538 4294967295 65536 65535 0 4294967295 131071 1 0 0 65534 4294901759 131072 65536 4294901760 65535 65537 65535 65536 4294967295 65536 4294967295 196609 4294836224 131071 0 65535 65537 4294901760 4294901760 65536 4294901759 65535 0 0 65536 65535 4294901761 4294901760 4294967294 0 131071 4294901760 65536 2 4294901760 65535 4294901760 4294901760 65535 2 4294901761 4294967295 0 4294836224 65535 0 65535 131072 1 0 65537 4294836224 65535 65536 4294901760 4294836225 0 65534 4294901761 4294901760 0 4294967295 4294967294 2 4294901760 65535 65537 65535 65535 4294967295 65534 4294967295 1 0 4294901761 4294901759 131071 4294901761 4294967295 65537 65536 4294901759 1 65535 196606 65536 65537 65534 0 4294901760 1 0 0 65536 4294901760 0 1 65536 4294901761 4294901760 4294967295 65536 65535 0 4294836224 4294836224 65535 0 4294901760 4294967295 65535 0 +-44 0 123 -131 -291 421 824 -613 -741 136 145 -511 -645 672 638 -196 -709 -343 -865 1598 2868 54 -1208 -1532 -74 354 -33 -2 -353 203 588 96 -280 -406 -40 389 68 -208 159 148 -167 -365 -118 333 93 -76 121 75 -99 -157 39 6 -136 93 166 -69 -202 -22 100 211 103 -193 -147 46 80 60 19 -65 -21 -28 -67 28 27 77 105 -127 -206 129 244 -78 -181 -8 28 59 134 25 -60 -173 -98 34 -31 78 -5 129 313 -126 -337 -114 118 140 -28 -57 -25 80 102 -51 -80 -22 31 78 92 -152 -301 24 238 264 42 -248 -74 -10 -124 40 125 147 46 -123 4 -52 -137 1 55 95 9 -31 37 27 -14 -77 -25 49 -29 -32 75 139 24 -227 -129 133 74 -6 5 40 65 -88 -79 -3 -32 -17 15 113 51 -67 -7 3 -24 -63 -60 86 67 -2 -28 -35 16 86 34 -78 -25 8 2 -10 -41 69 129 -86 -125 20 45 -7 -19 5 11 36 22 -45 8 -22 -72 -30 -44 89 73 -21 -10 68 46 -86 -65 -12 5 114 93 -78 -63 -34 8 33 -47 -39 51 109 0 -131 -38 89 71 -20 -63 -76 -23 96 76 -31 -82 -9 78 17 -72 -25 45 48 15 -47 -49 -4 25 38 2 -29 -8 8 3 1 -1 -3 0 0 0 0 -2 0 0 1 2 0 -1 1 0 0 0 1 2 1 0 -2 -1 -2 1 1 -1 -1 -1 -1 0 0 1 -2 -1 1 0 1 0 -1 0 -1 -1 4 1 -2 -2 0 0 1 1 -1 -1 0 -1 -1 2 3 1 -1 0 1 -2 -3 1 1 -1 1 1 -1 -1 2 2 -1 -2 1 -1 0 2 -1 -1 -1 0 2 0 -2 2 3 -2 -2 1 0 0 0 1 -1 -1 1 1 -1 -1 1 1 0 1 -3 -2 -1 1 -1 -2 0 0 0 0 0 -1 0 0 -2 -1 -1 0 1 -1 -2 -2 1 1 -1 1 0 -1 0 1 -2 -4 1 4 2 -3 -4 0 3 0 -1 0 1 -1 2 1 0 1 2 0 -2 -1 0 -3 2 3 -1 -2 0 0 0 0 0 0 0 -1 -2 -1 2 1 -2 -3 0 -3 2 3 1 0 1 0 -2 0 -2 -2 0 0 1 1 1 -2 1 4 1 -2 -3 0 -1 -2 1 2 1 1 1 1 -3 -1 0 3 -2 -4 -2 -2 0 2 0 -2 0 0 0 0 0 -3 -2 -1 1 -1 1 1 -2 -2 0 0 1 -3 0 3 1 -3 -1 0 1 2 -1 1 0 -1 -2 0 1 0 -3 0 0 -3 -3 2 0 +65492 4286382203 27655901 4254794552 8977691 282146 867609 445460 620330 3301829 8228340 3806288 130792 1093 165818 354960 243236 152921 47888 47185 161114 124813 14425 20266 34450 1557 27145 32317 41288 54521 47858 23725 10000 4586 1225 5273 6658 27154 59077 65620 32825 4265 18581 33529 10760 7045 16666 113845 126565 33524 4033 7025 13005 6884 7045 31568 91177 126340 63268 5576 16976 37234 17245 2720 18770 12050 1042 2098 6125 3026 1865 24946 52105 34330 5512 1625 11969 6250 1313 12994 7090 58 4545 10996 4493 2009 7652 7240 689 104 6442 24037 16025 2074 386 1417 2509 548 6084 9857 5770 4724 9512 4369 13021 14733 5125 1153 3730 14482 17161 9365 5441 9745 9745 6737 6805 6373 5809 4329 2434 2417 2490393 4293066754 589816 65539 4294836223 0 0 65534 65536 2 131071 0 65536 65538 4294836224 4294901759 65537 4294967295 4294967295 0 4294836225 131071 65536 4294901760 4294901760 327679 4294836225 65534 65536 4294901761 65535 4294967295 196610 4294901761 65536 4294836222 65537 131071 4294901761 196607 4294901762 131070 65535 4294901762 4294967295 131072 4294836224 196610 4294901758 1 0 4294901761 131071 4294901761 131071 1 4294770689 4294967294 4294901761 65534 0 0 65535 4294836224 4294967295 65536 4294901759 131070 4294901761 1 65535 4294836225 131068 131076 4294770685 196608 4294901760 65536 196607 1 131073 4294836224 65535 196605 4294901763 65534 0 0 0 4294901759 196607 4294836225 65533 196605 65539 65536 4294836224 4294836224 65534 65536 65537 131070 65540 4294836222 4294901760 131070 65538 65537 4294770689 65535 4294836227 4294901756 65534 2 65534 0 0 4294901757 131071 131071 4294836225 65534 65536 65533 65539 4294967293 65536 4294901762 1 4294901759 65536 4294770688 0 4294836221 2 +-23 0 -124 40 -148 368 1154 -133 -1145 -933 76 876 164 -380 -291 866 813 -1512 -1303 1335 410 -117 1342 178 -1167 -983 111 822 594 -414 -711 20 482 -6 -333 -133 -148 237 312 55 -118 -20 241 -111 -304 -70 85 54 -59 226 249 -285 -242 42 22 72 47 -38 -68 30 73 40 -21 -24 25 -17 15 -1 -28 34 -6 -145 62 264 -78 -436 -119 447 161 -165 37 56 -63 -150 -66 120 112 18 -54 -86 -10 78 64 19 79 -149 -235 -28 50 208 108 -160 -133 117 90 -60 0 29 -55 -41 24 71 -3 -18 104 53 -40 -160 -51 46 -6 26 -38 -9 109 97 -38 -160 -21 69 -9 -2 -15 -19 39 77 -21 -72 57 47 -27 -106 -68 53 -26 18 94 91 -8 -109 7 47 17 -80 -110 49 87 18 -8 32 34 -101 -74 64 68 -52 -119 52 144 8 -112 -72 26 136 82 -121 -93 45 70 14 -20 -112 -109 94 95 59 24 -82 -53 7 30 56 21 -104 -78 70 74 -3 -60 -53 -15 120 154 -61 -179 -93 105 173 -15 -152 -17 66 1 -17 6 -20 -27 15 -48 -6 65 105 42 -116 -95 52 103 5 -69 -45 45 61 12 -68 -44 -12 -10 28 4 23 26 -31 -28 18 11 -3 4 5 4 -8 -7 -2 1 1 -1 0 2 -1 -2 0 2 -1 -1 -3 -1 1 0 0 2 -3 -1 1 1 -1 -3 -1 -1 1 1 0 -2 -1 0 1 0 -2 0 1 -1 0 0 0 -1 3 3 -2 -2 1 1 0 0 0 1 -1 -2 0 2 0 -1 1 0 1 2 -1 -2 -1 -2 0 1 1 2 -1 -2 -2 1 -2 1 0 1 0 -1 -2 -1 -2 -2 0 0 1 2 -2 -3 -2 -1 3 1 -2 -1 0 0 0 -1 0 -2 0 3 2 -2 -2 0 1 0 -1 1 0 -2 -1 2 1 -1 -1 -1 2 2 -2 -1 1 1 -1 0 2 -2 -3 2 3 0 -3 0 0 -2 -1 1 -2 -1 -1 -1 0 -1 -1 -1 1 0 1 3 -1 -4 0 -1 1 2 0 -3 -2 -3 5 2 1 4 -1 -1 -1 2 1 1 -2 -1 -1 -2 -2 1 2 -2 -2 0 0 -1 -1 1 2 -2 -2 2 1 -1 -2 -1 1 1 0 -1 -2 -1 0 -1 -1 1 1 0 -1 -1 0 1 0 0 -3 0 1 1 0 0 -2 -3 1 1 -2 0 0 -1 1 -2 -1 1 2 2 1 -2 0 2 -1 0 1 3 1 1 -3 -3 0 2 -2 -3 1 0 1 -1 3 0 -3 1 0 -2 -1 -1 2 3 0 +65513 2686852 24182636 4286252162 4233886599 773152 171296 834637 2947113 3480034 181789 1832648 2328178 688005 524232 505921 232360 128578 78073 100369 14324 70402 97316 10141 54557 143226 60328 5668 3653 5524 6929 1017 914 226 1940 21061 73540 196180 213970 53146 4505 26469 18756 12868 10312 6184 4457 28442 56009 45764 37264 31378 11700 841 4706 5617 333 13625 27200 4717 712 1525 21290 27044 5202 85 586 7450 5625 5458 11965 7433 1000 17117 11945 2258 6689 14501 7893 1088 11357 9572 7328 16865 20800 17728 19172 21365 10674 5096 12944 20717 12506 7300 2858 4036 11257 10984 5485 6409 14625 27437 40690 40954 23329 4645 290 436 954 2340 15250 15220 11729 10634 6786 5746 4768 2080 884 545 1637 1108 4294770699 327684 4294443012 4294901753 65537 65535 4294901762 65534 4294901762 4294836223 131071 0 4294770690 131071 4294901761 4294967293 131071 1 4294967294 65536 4294836224 65536 65535 0 262143 4294836227 131070 1 0 4294901761 65534 2 131071 65536 4294901762 4294967294 65534 65537 4294901762 4294901758 4294836225 1 1 4294901759 4294901759 65534 65536 4294836226 4294901757 262143 4294836225 65535 0 65535 65534 131075 4294901758 65536 4294901760 1 4294967294 65538 4294967295 196607 4294836226 131071 4294901761 131072 4294836222 196610 4294770688 0 4294967294 4294836225 4294967295 65535 4294967295 131071 65536 4294901763 65532 131071 2 4294901757 393213 65538 4294901764 4294967295 65538 4294836225 4294967295 4294901758 131073 4294901758 0 4294967295 131073 4294901758 65538 4294901759 131071 1 4294901759 65535 4294967295 65537 4294901760 65535 1 4294770688 65536 1 4294836224 131069 4294836225 0 131071 4294967294 131073 65538 65534 4294901762 65536 65539 4294770689 65533 4294836226 131069 65536 262143 4294770688 1 4294967294 196607 3 +-53 0 27 15 -164 -93 257 527 42 -694 -26 281 -187 54 396 -550 -993 403 620 716 740 -910 -1177 -67 494 613 -16 -411 -96 141 -2 174 269 -164 -212 -71 13 119 106 -49 -61 -32 -30 -39 -23 44 -39 110 191 -97 -120 -54 -63 49 104 33 -71 -67 39 87 -26 -115 -2 148 46 -118 -58 42 31 56 93 -95 -164 -68 -12 146 65 31 75 -49 -53 -55 -15 27 -9 12 38 -25 -62 25 57 -3 -21 9 25 -25 -35 -14 23 49 -15 -93 -66 114 123 -19 -53 -43 -13 9 27 7 -54 12 71 19 -13 -48 -23 18 4 8 38 1 -46 -60 -15 74 27 -22 14 6 -37 -39 5 82 52 -67 -38 16 2 -11 -4 -10 -22 42 39 0 10 -45 -36 34 25 -32 -21 5 -25 13 40 33 12 -44 -23 -1 14 1 -49 -18 25 55 12 -29 -24 9 42 24 -10 -45 -17 11 20 32 -3 -64 -14 62 20 -48 -25 28 17 3 20 -13 -31 -12 17 15 -16 -5 23 -10 -23 13 12 -48 -62 62 66 9 -9 -18 10 -5 -25 -10 12 45 34 -64 -59 38 48 -30 -57 19 36 6 -19 0 22 15 4 -28 -20 12 24 2 -17 -20 -3 17 4 3 7 -6 -6 0 1 0 -1 -1 0 0 0 0 0 0 0 0 1 -1 0 0 0 0 0 1 0 0 0 1 1 0 0 0 0 -1 -1 1 0 0 -1 2 1 -1 -1 -2 -1 0 -1 0 0 0 0 1 0 -1 0 1 0 -1 -2 -1 0 1 0 -1 -1 1 1 -1 0 0 -1 0 0 -1 0 -2 -1 1 0 -1 0 0 -1 0 0 1 0 -1 0 -1 -1 1 0 -1 0 0 1 0 0 0 -1 0 1 -2 -1 1 1 0 -1 0 -1 -1 -1 0 0 0 1 -1 0 -1 0 0 -1 -1 -1 1 -1 1 1 0 -1 -1 0 1 0 2 0 1 0 -2 -1 0 -1 0 -1 0 0 0 -1 0 1 0 0 0 -1 1 0 -1 2 2 0 -3 0 -1 0 0 1 -1 0 -1 1 1 0 -1 0 1 0 0 -2 -1 -1 1 2 -1 0 0 0 -1 -1 -1 0 0 -1 0 -1 1 0 -1 -1 0 1 0 0 -1 0 1 -1 0 0 0 -1 0 0 -1 0 -1 0 0 -1 -1 0 0 1 -1 -1 -1 0 0 1 0 1 -1 0 0 0 0 -2 0 0 2 0 0 0 -1 1 -1 -1 -1 1 -1 -1 0 0 -1 0 2 1 -1 1 -1 -2 0 0 0 1 -2 -1 1 1 0 +65483 983067 4288937820 34537729 4249485354 79637 37885 459316 1148458 897056 1375700 1389818 619805 169177 29097 30280 99257 49985 14330 13637 4745 2421 2465 13621 45890 17316 6370 11905 9530 9090 13901 21908 16040 5128 4097 17674 31520 21460 5186 8026 5834 954 225 2069 4469 3258 522 1250 1421 2930 8874 17352 15490 4658 250 778 3060 5402 2473 853 80 1445 5716 5701 1213 232 2890 6749 7193 1700 125 116 2248 1521 2125 2452 1649 466 794 2689 2080 530 197 2725 3650 985 657 2340 2125 410 1424 4105 4040 2704 1409 298 569 1105 514 281 629 698 2448 7688 4437 405 125 725 2169 5252 4925 3204 3610 1332 361 709 800 544 580 689 298 25 4294574087 65530 1 4294967295 0 0 0 0 4294901761 0 0 65536 0 65536 1 0 4294901760 131071 0 196607 4294901761 4294901759 65535 65535 0 65536 4294901760 65536 4294901760 4294967294 65536 4294901760 131071 4294901761 0 65535 4294901760 4294836224 131071 4294901760 0 65535 65536 4294901760 4294901760 131071 4294901760 0 1 0 65535 4294836225 131071 1 65535 4294967295 65535 0 4294901761 4294901760 0 4294967295 131071 131071 1 4294967295 65536 131072 65536 4294836224 65535 65535 65535 0 65535 1 0 131071 4294901760 131074 4294770688 4294901760 0 4294901761 4294901760 65537 4294901760 65536 0 4294967294 131071 4294901762 0 4294901760 4294967295 0 65535 131071 4294901760 65535 1 4294901760 65536 65535 0 65535 4294901760 4294901760 0 4294967295 0 4294901761 4294967295 0 1 4294901761 0 0 65534 131072 0 4294901760 4294901761 4294967295 4294901761 65535 4294901760 131072 4294901761 4294901761 65534 0 4294836225 131071 1 +12 0 -122 -4 178 -280 -985 817 1879 90 -975 -1439 -459 806 -257 992 2157 -1294 -2898 514 2343 293 -1016 -228 584 -331 -255 -18 -548 -92 434 547 -1 -383 -16 122 -107 -135 91 226 99 -290 -438 228 619 -1 -404 -176 -3 19 73 283 131 -297 -243 83 157 38 -161 -43 102 200 119 -128 -10 -79 -195 -16 189 157 -145 -131 191 63 -163 -82 29 4 -18 114 40 -109 -25 111 90 -147 -199 77 141 38 -24 18 88 -72 -146 -24 143 99 -73 -170 -94 115 123 -7 -145 35 213 48 -107 -192 -65 204 146 -84 -56 -14 -22 -2 57 -13 -98 3 43 8 -4 60 43 -57 5 -16 -63 -25 -5 34 1 28 7 21 110 -24 -153 -93 67 153 43 -113 -94 3 48 113 82 -169 -152 83 62 -12 13 35 -4 -32 7 -20 -32 40 49 -36 -39 -28 -37 58 37 -25 22 14 -89 -20 84 62 18 -73 -104 8 47 53 33 29 41 -69 -79 -26 50 55 -76 -18 125 12 -108 -43 51 18 -85 11 126 84 -17 -180 -73 110 28 -72 -39 98 53 -64 -39 47 14 -13 52 35 4 -78 -47 -35 -38 74 13 -20 36 48 -26 -58 23 57 -12 -52 14 41 2 -36 -18 2 3 13 1 -2 7 -3 -8 -1 2 3 1 -2 -2 2 2 -2 0 0 -2 0 0 -2 -1 4 1 -1 -1 0 0 1 0 -1 -2 2 1 1 1 0 0 -1 -3 0 1 0 -1 0 2 0 -4 -1 1 0 -1 -1 0 2 1 -4 -5 0 4 3 -1 -3 -2 -1 3 -1 -4 0 0 0 0 -1 1 2 -2 -1 -1 0 2 0 0 -1 0 -1 -2 -1 -1 2 2 -1 -2 -1 -2 1 1 -1 -1 2 -1 2 -1 -3 1 1 0 0 0 -1 -2 -2 0 3 1 -1 -1 0 1 -2 -2 1 2 0 0 0 0 -3 -2 2 1 0 0 -1 0 -1 0 2 2 -2 -1 0 3 0 -2 -3 0 0 -4 -1 0 0 0 2 2 -3 -3 -1 2 1 1 -2 -1 -2 -1 -1 0 -2 -3 -1 -1 3 2 -1 1 -3 -2 1 1 0 0 -2 -4 -1 1 2 0 0 0 1 1 -1 -1 2 0 0 1 -1 1 1 0 -2 -3 -1 -2 -1 0 3 0 -3 -2 1 -1 -2 0 1 -2 0 0 -1 -1 2 0 2 0 1 2 0 -1 -2 -1 1 3 -2 0 0 -1 0 0 -1 1 2 -1 -1 0 1 1 0 0 0 0 -3 -1 -2 -1 -1 -1 0 -2 1 1 -1 -2 2 2 -1 -2 -1 0 2 2 0 +12 4294770566 4276617394 53607463 5900119 3021346 860317 1050113 6327085 8662600 5575498 1084240 450617 65349 308768 487565 146690 15140 29674 59357 93901 243828 383162 194192 370 85418 105370 65938 26093 27770 50404 30545 6341 38281 60370 38186 40450 33293 857 13320 13481 12946 29709 45530 21325 900 12928 21892 30250 34229 22061 15178 22250 47673 48313 45841 28372 3332 488 3418 9613 1913 3616 5098 281 4594 1181 785 490 12676 32058 27898 14618 8845 15073 35285 29993 3988 1394 1040 449 2624 3697 2305 4733 1994 680 8321 10900 5653 10880 5018 1930 6442 6917 5525 6100 15769 13513 2925 7346 22932 32689 17429 5968 11125 6905 3730 365 3929 6100 3434 6920 569 3600 4040 3778 2848 1877 1300 328 178 4294836225 4294770695 4294967288 196610 4294836225 196606 4294836226 0 65534 4294836224 327679 4294901761 65535 65536 4294901760 196606 65537 1 4294901760 65533 1 65535 2 4294967292 1 4294967295 131072 4294705153 65531 196612 4294836223 4294967294 4294901763 65532 0 4294901760 131073 4294967294 65535 2 4294901760 4294901760 4294967294 196607 4294901762 4294967294 131070 4294901761 196607 196607 4294836223 65537 0 4294901760 4294901758 196608 4294901761 65535 4294836225 131070 2 0 4294770688 196606 1 4294901760 4294901760 131072 4294836226 65535 3 4294836222 0 4294967292 0 131072 4294770690 4294967293 65538 4294836225 4294901759 4294967295 4294836224 4294967293 262143 4294901762 4294770689 131070 1 4294836224 4294967292 131073 0 65536 4294901761 196607 0 4294901761 65537 4294836224 4294967293 4294967294 196608 4294770688 131070 4294901759 65536 65534 4294901760 196607 131072 65536 2 4294901759 131071 4294836227 0 65535 4294901760 131073 4294967295 65536 1 0 4294770688 4294901759 4294967295 65535 131070 4294901761 196606 4294901762 4294967294 131072 2 +-22 0 110 -30 267 -115 -1106 -663 834 1606 -459 -1151 482 1159 1271 -1665 -2992 88 1093 995 422 1185 911 -1798 -988 43 -168 596 566 -126 -121 -195 -177 -20 85 -16 -234 104 210 108 -83 -210 -65 418 533 -288 -521 -357 -22 487 179 -140 -48 48 75 -70 -49 3 -13 -47 -21 34 -40 -1 15 68 81 20 53 -108 -157 -131 -87 297 190 -76 32 -68 -171 -31 78 91 -52 13 119 -15 -38 48 62 -186 -167 120 129 -5 -149 -42 133 166 -1 -163 -58 78 93 30 10 -130 -93 67 69 -65 -96 51 14 -15 33 101 46 -113 -95 6 36 81 29 -10 42 -96 -133 74 113 2 -44 -18 33 -4 -61 13 90 16 -28 -58 -54 -23 -10 99 53 -58 -20 40 23 -66 -107 35 119 110 12 -130 -37 -6 -36 64 69 -23 -30 -30 -11 13 -16 30 78 -49 -152 3 138 129 4 -191 -93 94 67 22 -11 -103 -64 106 54 -29 13 0 -65 15 161 -3 -173 -115 6 130 58 3 -2 2 38 -53 -29 27 19 -69 -83 28 13 37 26 28 37 -3 1 -77 -46 44 -8 -9 63 52 -44 -104 -31 95 56 -2 11 -39 -20 -7 -45 -8 29 50 -2 -6 32 -27 -44 13 32 0 -14 -10 -2 6 4 1 2 -1 -2 0 3 0 -1 -1 -1 1 0 1 2 2 1 -1 1 0 -1 -2 0 -2 -1 1 0 -1 -1 -1 0 0 -1 -1 2 0 -2 -2 -1 1 1 2 -1 -4 0 1 -1 0 2 0 -1 -1 -1 0 2 1 -1 1 3 0 -3 -2 2 -2 -2 0 -3 1 1 0 0 0 -1 -1 -1 2 0 -2 -2 2 1 2 -2 0 0 0 2 1 1 -2 -2 0 1 1 -1 2 1 -1 0 0 0 1 -2 -1 2 3 -1 -3 -1 1 4 -2 0 0 -1 -1 3 0 -2 -3 0 1 0 -1 -1 -1 -1 1 1 0 -1 -2 -1 0 -1 -2 -1 3 3 -1 -2 -1 0 1 0 1 3 0 -2 -3 -1 1 2 2 -1 0 1 1 -2 -4 0 2 -1 -1 -1 -1 1 1 2 -1 0 0 -1 -2 -1 0 2 1 -2 -1 -1 3 1 -1 1 3 1 -1 -1 -1 1 1 -1 0 0 -4 0 3 1 -2 -2 0 1 1 1 -1 -4 0 2 0 0 -2 2 1 -2 -4 0 0 -1 -1 -2 3 0 -3 2 2 1 -3 -1 2 -1 -4 1 1 -1 -1 -2 1 1 0 -2 -1 2 -1 -2 -1 0 0 -1 -1 0 0 1 1 -1 -2 -2 0 2 0 1 2 0 -4 2 0 +65514 4293001326 4287430923 4251581358 105251650 1535482 1575605 4387666 8959808 2184674 1582309 4062725 977993 383440 336232 52666 31729 7481 65572 55764 50989 178949 367033 398890 237653 51641 4608 10525 2410 2378 1597 1601 4849 6961 14473 41810 95778 41876 5648 30202 14365 2873 14386 3748 38440 42289 16666 23965 45245 26570 9448 9549 17000 13138 8986 11817 421 11290 14885 9061 7857 941 10980 23165 12773 2260 1105 3890 8356 4148 3445 9901 6173 2000 4885 12674 26261 17044 1405 5392 5290 1800 290 1156 8485 23113 35685 36497 17485 4973 10730 15332 3757 169 4450 25930 43154 16936 3373 8 4253 1570 5122 7673 1538 1460 1378 5930 4052 145 6673 12752 9986 3140 1642 449 2089 3341 40 1753 2105 1024 4294377458 458750 65540 4294901762 65534 3 4294967295 131071 65536 131074 4294901761 1 4294901759 4294836224 131071 4294901760 4294967295 0 4294967295 2 4294901758 131071 131073 4294770687 65536 65535 2 4294967295 65535 65538 131071 3 4294901757 4294836226 65534 131069 1 0 4294967295 196607 4294836224 196606 131073 65534 0 65538 4294836225 65534 65537 196607 4294901761 0 65536 4294967294 196610 4294836223 131071 4294836228 0 4294967295 3 4294836222 65536 4294901760 4294967295 131071 1 4294901759 65535 4294901759 262143 4294901763 4294967294 65536 65536 3 4294836222 131071 131074 65535 65537 4294770686 131072 4294967295 4294967295 65537 4294901762 0 4294901759 65535 65538 4294967294 262143 4294901761 196609 4294901761 4294967295 65537 65535 4294705152 196608 4294836225 65534 65537 4294901761 65532 2 4294836224 65538 4294770686 0 4294967295 262142 4294770688 131074 4294770689 196607 4294770687 65537 4294967295 131070 1 4294967294 4294901762 4294967294 0 4294967295 0 65537 4294901759 65534 2 131073 4294705152 2 +-51 0 44 66 34 4 269 -312 -888 -129 416 925 430 -775 -1036 400 1547 559 -644 -1473 -487 708 264 280 196 -266 -148 -21 -87 108 239 -26 -195 -88 23 88 59 12 -28 -55 -13 88 68 -63 -13 -1 -14 16 32 -110 -99 107 62 -69 -68 73 75 -12 -27 -14 14 -14 -31 33 36 -32 -16 34 -3 -40 39 51 -32 -112 -83 114 152 0 -106 -74 63 44 -89 -32 67 90 53 -77 -87 -42 24 63 -11 -26 -17 7 27 48 21 -35 -18 -17 11 22 -29 -4 49 2 -6 -10 -36 -43 -6 59 14 -1 31 -22 -54 7 60 4 -40 -36 -25 36 40 18 7 -21 -9 -13 3 10 -17 -31 -7 53 26 -25 -3 -6 -16 -7 -6 16 6 12 25 -20 -34 17 46 -6 -19 -30 -29 14 22 31 11 -50 -21 45 16 -39 -17 3 -14 32 18 -13 7 12 8 3 6 -46 -38 36 35 14 5 -57 -41 35 12 5 13 12 44 -29 -85 -37 41 72 -3 -50 -25 24 29 16 1 -18 10 -5 -26 -12 12 23 1 -15 -3 4 -13 -7 17 39 19 -56 -35 18 0 -10 4 33 10 -23 -7 4 -5 2 19 9 -7 -33 -23 24 20 5 0 -6 1 -7 -8 0 2 2 0 -1 -2 2 1 0 1 1 0 -1 0 0 0 0 0 0 0 0 0 0 -1 1 -1 -1 1 0 -3 -2 0 0 -2 -2 0 0 -1 -1 0 -1 0 0 -1 1 0 0 0 -1 -1 -1 -1 1 -1 0 1 0 0 0 2 -1 -1 1 0 0 -1 0 1 0 0 0 0 -1 2 1 -1 -2 0 1 -1 -1 0 -1 -1 1 0 -1 0 -1 -1 0 -1 1 0 0 1 1 0 -1 1 -1 -1 -1 0 1 -1 0 -1 1 0 0 1 2 1 -1 -2 0 0 0 0 -1 -1 -1 0 1 0 0 -1 -1 1 1 -1 1 0 0 1 0 0 0 0 0 2 0 0 -3 -1 1 2 0 -1 -2 0 1 0 0 0 -1 0 0 -1 1 1 -1 0 -1 0 1 0 0 -1 -1 1 -1 0 -1 1 0 -1 -1 0 -1 1 0 -1 -1 0 -1 0 -1 0 0 -2 1 2 0 -1 0 0 -1 0 0 -1 0 -1 0 0 0 0 0 0 1 0 0 0 -1 2 1 -1 -2 -1 0 0 0 -1 0 0 1 0 0 -1 0 -1 0 -1 -1 -1 1 1 -1 1 -1 -1 -1 1 -2 -2 0 1 1 -2 -1 0 0 -1 0 0 -1 -1 0 0 0 -1 -1 0 -2 -1 0 1 0 +65485 4325420 262178 4274520333 4286577800 1028681 785525 1233296 2705690 2584465 738433 148096 109172 22345 19233 57797 45769 8273 3625 3809 7913 8593 170 452 13124 21250 8605 9953 5769 925 392 2050 2320 1412 1609 4122 13568 19885 23104 16712 5905 8945 12589 8738 9333 4545 797 338 3033 1666 613 605 857 2405 136 3145 3517 197 1445 2965 3616 2896 1921 1924 490 250 109 1250 2858 1301 45 305 292 180 1025 1445 2152 1261 1037 1445 2621 2466 1777 298 1220 493 193 73 2152 2740 1421 3274 2906 169 313 2777 8594 6865 2509 1201 1097 325 125 820 673 226 25 218 1810 3497 1549 100 1105 629 65 29 442 1138 1105 425 36 50 65528 131074 4294901760 196606 1 65537 4294901760 0 0 0 0 0 131071 4294967295 1 4294901757 0 4294901758 0 4294967295 4294901760 0 131071 0 4294901760 4294967295 131071 65535 1 0 4294901762 131071 0 65535 1 0 4294901760 65538 4294901759 65536 4294967295 4294901760 131071 4294901760 4294901760 65535 131071 0 65537 4294901760 4294901761 4294967295 65536 65535 131071 0 131073 4294901761 65534 0 4294901760 4294967295 65536 0 4294967295 65537 131071 0 1 0 0 2 4294770688 131071 2 4294901759 65536 0 4294901760 0 131071 4294901761 4294901760 65536 0 4294967295 4294901761 4294901760 1 4294967295 4294901760 1 4294967295 4294901760 4294901760 0 131070 2 65535 4294901760 0 65535 65535 0 0 65536 0 4294901760 65538 4294901759 65535 0 65535 65536 0 65535 65535 4294967295 131071 4294901761 4294901761 4294967295 4294836225 65534 65537 4294967294 0 65535 4294901760 65535 0 4294967295 4294836224 65535 1 +-88 0 81 -190 -443 430 721 21 -226 -181 362 -154 -618 76 1064 -92 -1004 -986 -615 911 476 454 459 -361 -494 17 238 46 21 201 250 -446 -585 184 461 146 -184 -181 39 158 103 -211 -296 154 324 76 -103 -262 -219 198 233 46 -26 28 86 -187 -136 102 69 -80 -106 75 86 47 40 -60 -24 -71 -144 61 114 86 6 -41 10 -7 28 52 79 -162 -181 5 27 74 -34 23 141 -3 -186 -62 129 148 17 -120 -68 3 19 37 50 29 -88 -134 60 212 62 -178 -123 0 16 123 102 -68 -79 -51 -14 60 -7 -2 80 60 -28 -148 -39 107 16 -41 29 7 -67 -1 61 21 -25 -10 48 -9 -87 -46 42 93 -8 -49 27 13 -34 -3 14 -29 -28 82 91 -53 -86 -34 29 51 -1 -21 -24 4 97 50 -91 -176 -30 183 77 -47 -22 -70 -57 99 79 -40 -11 -35 -80 35 63 40 37 -91 -146 99 187 -30 -109 -17 38 11 33 6 -24 -97 -87 84 69 3 -2 7 -29 -26 45 41 -37 -25 67 2 -69 -37 33 43 -32 -40 7 49 19 1 23 -21 -4 -28 -44 -12 -13 75 66 -46 -41 11 27 -13 -40 -15 17 35 -13 -10 15 7 5 3 -2 -17 -2 9 3 -2 -1 -1 -1 1 1 -1 -1 0 2 2 -1 -4 -2 2 1 -1 -1 2 0 0 0 0 1 -2 -3 1 0 1 -1 0 2 0 1 1 -2 -1 -2 1 2 0 -1 1 -1 -3 1 2 1 -1 -2 0 1 -1 -2 1 -1 -4 -1 3 0 1 0 0 2 1 -1 -1 -3 2 1 0 2 -1 -2 0 0 0 2 1 1 -1 -2 1 1 -2 -3 0 2 2 -1 -2 1 -2 -2 2 1 0 -1 -2 1 -1 2 2 -1 -3 0 -2 -1 2 1 -2 -2 -4 1 2 -3 0 3 1 0 -1 0 1 -1 1 0 -2 1 1 -1 1 -2 1 1 -1 0 -1 0 1 -2 0 0 0 2 1 0 -1 -4 -1 2 0 0 0 2 -2 -4 0 0 -1 0 0 1 2 2 -1 0 -2 0 0 0 -3 -3 0 1 -3 -3 0 1 4 1 -2 -1 -1 -1 3 2 -2 -1 -2 0 2 1 1 2 0 1 -2 -2 0 0 1 2 -1 -3 -1 1 0 -1 1 0 0 0 -2 -1 1 2 -1 -2 2 1 1 1 -1 0 -1 -3 -1 2 0 0 1 1 -2 -1 -1 1 0 0 -1 -3 -1 -1 -1 1 1 1 0 -2 -3 -3 0 2 0 0 0 0 1 0 -2 0 -1 -3 2 1 -2 -2 0 +65448 4282515537 28245573 1376977 4283170590 154760 387700 1140560 1980212 1208146 432692 341002 244325 58760 40842 261416 376081 233837 66617 26485 55130 111332 110752 79253 87165 56405 1460 42365 28900 11161 16861 9605 5200 5617 24457 20392 1717 149 3488 32485 32786 6205 1685 19890 38440 38545 14689 4633 1730 3341 25700 48544 35528 15129 15385 15028 8842 3796 53 10000 22688 12970 1937 890 4490 4162 725 2385 9685 10413 2465 898 1165 1037 7508 11090 8552 3442 442 592 11909 39257 34389 8138 5384 13050 7841 1346 7625 5569 9650 31117 35869 12170 1565 1125 9985 14625 4770 53 1517 3706 1994 4493 6130 2938 2624 2450 362 970 800 2080 5794 6472 1802 898 1825 1514 269 274 34 293 655358 4294836227 4294967295 131071 4294901761 65535 131074 4294770687 196606 4294901761 196607 0 0 4294836225 131069 65536 65535 2 65537 4294967294 131070 2 131071 4294836223 131073 4294901761 65534 4294901761 131070 4294770687 262143 65536 0 65538 4294967295 196605 1 4294901762 65534 0 65538 4294901761 131070 4294836225 65533 131074 4294901759 4294836225 196606 1 4294901759 4294901761 131074 4294836223 4294836224 196607 4294836225 4294770686 131073 65533 65539 4294901760 65536 131071 4294836224 65537 131071 131070 4294901761 4294901760 65536 65534 0 65538 4294901760 4294967292 2 0 4294836226 65532 4294901760 0 131073 4294901762 4294836224 0 4294770688 65533 4294770689 65533 262145 4294836225 4294967295 262143 4294836226 4294901759 131072 65537 2 4294836225 65534 65536 4294901762 4294967293 1 131071 0 4294836224 131071 4294901762 196606 65537 4294901761 4294901760 4294967293 2 65536 4294836225 4294967295 1 4294901760 4294967293 4294967295 65537 1 4294836222 65533 2 0 65536 4294836224 4294901760 196605 4294836225 65534 +80 0 -278 30 160 378 522 -381 -739 78 849 -377 -1308 237 713 309 158 -22 -329 -88 532 -82 -576 -226 123 683 329 -572 -112 226 -91 -289 -45 85 -244 213 464 -91 -361 48 190 -30 -13 81 75 -72 -114 -82 72 187 92 -252 -232 65 51 79 42 44 66 -41 -51 -65 -12 54 14 -27 -34 -8 -16 80 124 -46 -148 -34 148 25 -210 -15 201 97 -66 -124 -5 108 82 -150 -135 55 1 23 32 51 77 -16 -99 -75 79 129 29 -216 -230 167 224 22 -39 -49 -33 -14 32 10 -32 27 42 -48 -33 48 44 -24 -11 -56 -62 32 -1 8 37 83 53 -113 -100 13 53 52 -34 -33 40 48 8 -51 22 7 -96 -52 59 92 -12 -18 75 -54 -153 6 143 40 -154 -27 159 48 -105 -52 68 2 -130 37 176 58 -47 -113 -42 11 -7 39 68 23 -39 -120 -66 115 119 -24 -99 -53 12 97 126 -10 -90 -165 -37 134 -12 -36 82 65 -70 -86 91 53 -134 -79 75 112 7 -45 6 -37 -49 3 -4 69 52 -48 -19 37 31 -51 -46 -10 3 59 31 -64 -38 46 38 -25 -53 -18 20 72 36 -52 -36 8 31 11 -2 -50 -52 41 54 -10 -21 9 0 -27 2 31 0 -20 1 8 0 -3 -2 -1 -2 1 4 -1 -4 0 -1 2 3 -3 -3 0 -2 1 1 1 -1 -2 -1 2 0 -1 -1 3 1 -2 -1 0 2 0 -1 -3 -3 1 1 2 1 -1 1 0 1 -4 -1 0 -1 0 1 1 -1 0 1 0 0 -1 2 0 -3 -4 -3 0 1 -2 -5 2 2 -1 -3 0 1 2 -1 -2 0 0 -1 1 1 0 1 3 0 -3 -1 0 0 -1 -2 1 1 0 -3 -2 4 2 -2 -2 -1 4 0 -1 1 0 -1 0 -1 0 1 -1 -2 0 3 2 -4 -3 0 0 -3 1 1 1 -1 -1 -2 0 -1 1 1 -1 2 2 0 -3 1 -2 -2 0 0 1 -1 -2 -2 1 0 -2 -1 -1 -1 0 -1 0 0 2 -2 0 -1 0 -1 1 -2 0 0 0 -1 1 1 0 0 1 -1 0 0 1 -1 -1 4 1 -2 -2 0 -1 0 -1 -1 2 2 -3 -1 4 -1 -4 -1 1 -2 1 1 1 2 0 0 -2 -1 0 1 1 0 -2 -1 1 1 -4 0 1 -2 1 2 0 -2 -2 2 0 -2 0 0 -2 -1 -1 0 2 -1 0 0 -1 -1 0 1 1 2 0 0 0 0 -1 4 -2 -2 1 -2 1 0 0 0 0 -1 -2 1 0 -1 -1 0 0 +80 2031338 24772768 4269998602 5176605 862930 1767033 603850 25448 115985 289748 382852 481618 435425 63620 91802 9250 104905 223577 132625 37000 6730 10809 19720 40153 71968 58049 8842 3700 6037 6826 3060 925 1220 6656 17492 23060 22529 44325 49810 19732 11689 29224 21250 530 3625 6185 15426 22882 47497 80789 50660 3922 1285 1124 1753 4068 3393 2512 3257 4868 65 8258 15578 10169 5513 2245 3904 2665 533 11920 11945 468 8541 23445 22049 24445 27585 13729 4628 18269 34340 14978 1885 1570 5153 15921 17581 14737 12610 9553 15976 35325 19325 1440 10949 12296 11090 24197 18169 2074 1405 2410 4777 5008 1730 3562 2216 3490 5057 3560 2069 3133 5584 4000 1360 1082 2504 4385 3016 522 729 2031618 4293656576 524289 4294770688 4294967294 131070 4294901764 65532 196607 4294770691 65533 131070 65537 4294901759 196607 4294901760 262143 4294836225 65535 2 4294836223 131069 131073 4294901761 1 4294705153 65535 65535 65537 65535 1 4294901760 2 4294770685 65533 4294836225 196603 4294901762 65533 131073 4294901759 0 131071 1 196609 4294770688 65535 4294901760 131070 1 4294901757 131076 4294901758 327679 4294901760 1 65535 65535 4294901761 65534 131075 4294836220 0 131069 65537 4294967295 65534 131071 4294901761 131074 4294770688 4294836225 65534 65536 4294901759 131070 4294836224 4294967295 65535 65535 131072 65534 65535 131071 65534 0 131071 1 65536 65535 65536 4294967295 65540 4294901758 4294901760 4294901760 196607 4294770690 327679 4294770687 131071 131070 65537 2 4294836224 65535 65537 4294836224 131071 4294705153 65536 131070 2 4294901758 2 65534 4294836224 4294967295 131072 65535 4294901760 65535 65537 2 0 4294901760 4294836228 131070 131070 0 0 4294901759 1 4294967295 0 +-234 0 292 192 -374 -483 421 964 155 -1255 -460 220 -616 485 783 134 597 -288 -2040 -311 2459 954 -1947 -905 1230 773 -151 -653 -370 69 83 79 -51 211 381 -139 -356 -375 -174 456 288 17 -5 -216 -133 186 183 -113 -110 18 56 -43 -104 -38 28 156 24 -133 -10 103 10 -71 6 68 -5 -115 -10 141 -16 -111 70 111 -2 -74 3 -121 -162 104 72 91 83 -92 -121 71 164 18 -53 -139 -53 79 27 6 35 -82 -143 90 132 -23 -86 19 47 99 153 -172 -222 46 113 16 -23 14 31 -84 -128 68 133 25 -57 -48 -10 25 43 5 -39 25 95 -105 -155 89 128 -42 -112 -16 72 92 -34 -82 42 62 29 -60 -93 -40 -9 101 100 -5 -47 -27 63 -74 -134 18 -20 61 102 85 50 -116 -58 2 1 -62 -102 95 114 18 -30 -57 -13 59 60 -46 -49 -19 -6 28 11 -34 -47 51 56 -15 8 -10 -75 -50 1 115 68 -30 22 14 -6 -149 -125 118 57 18 58 27 -1 -72 -58 -1 61 70 -35 -129 -39 128 52 -51 -10 21 6 -29 -44 43 105 -17 -92 -38 37 31 0 -2 -30 -32 28 54 -34 -44 41 45 -25 -47 -17 29 32 29 21 -44 -40 -15 -4 38 18 -8 -2 -6 -2 3 1 1 0 -2 0 0 -1 1 0 1 1 0 1 0 -2 2 4 0 0 -1 2 0 -1 -2 -1 2 0 -4 -1 4 1 -3 -1 1 0 0 -1 -1 2 1 -2 -1 0 0 0 1 1 1 1 -3 -2 1 2 -3 -2 2 1 -2 0 1 2 -1 1 0 1 -1 0 0 1 -3 -2 0 0 0 0 0 -1 1 1 -1 -2 3 2 -2 1 0 -1 0 -1 -1 2 0 -1 -1 -1 0 1 2 -1 -2 -3 1 1 -1 -4 2 4 -1 -1 -1 -1 0 0 -1 -2 0 2 2 1 0 -1 -1 4 0 -1 -2 1 0 -2 -1 3 -1 -1 -1 -2 -1 4 -1 0 -2 -1 0 2 2 1 -3 -2 -1 -1 -1 0 -1 -1 -1 -1 -1 1 2 0 -2 -2 0 0 -3 -2 3 -1 -1 1 0 -2 -2 -3 1 -1 1 2 0 -1 -2 0 0 2 -2 -2 1 -3 -1 4 3 0 -3 -3 0 0 3 2 -1 -1 -2 -1 2 1 -1 -1 -2 0 1 -1 0 0 1 1 -3 -2 0 -1 2 1 -2 0 2 0 -2 0 1 -2 -1 1 -1 -1 3 3 0 -3 -1 2 0 -2 1 1 1 -1 -1 1 3 2 -3 -4 -1 1 2 1 -3 -1 -2 0 -1 0 -2 -2 0 +65302 12583204 4263378570 63177125 4212719771 260000 614681 631045 439353 4258321 6956797 4609834 2110429 449210 141661 13130 47122 164482 267361 238212 83233 46681 52285 46258 12424 4985 12260 25120 18265 10709 5141 4660 13250 19981 12577 17221 5480 14650 37060 13465 15353 19682 27220 22130 9050 765 7949 28549 17953 7757 12010 52993 51400 13025 725 8017 21008 18314 5553 725 1874 2146 20050 31946 18148 12800 13648 7880 5608 4441 10249 10282 10025 2938 9445 18280 4121 17629 15956 3368 3845 19429 13320 4149 3650 5716 2762 820 1277 4810 3361 164 8125 13226 5524 680 22237 29549 3573 4093 5185 3365 8621 17866 17905 5305 541 877 3785 11314 9908 2330 4 1924 3700 3092 3706 2834 1130 1865 2377 1825 2555900 4294443026 4294639614 262142 65537 4294836224 0 131071 65536 1 1 196606 4 4294901760 2 4294901759 196607 4294705152 327679 4294770689 131071 0 4294967295 65538 4294967294 0 65536 65537 4294770689 131070 4294770690 196606 4294836225 65536 4294901762 1 4294901761 0 4294770689 65534 0 0 131071 4294901761 262142 4294836226 1 65535 4294967295 2 4294967295 65535 131073 4294901759 131069 4294901761 196604 4294901764 4294967295 65535 4294901760 65534 131074 1 4294967295 4 4294901759 1 4294967294 4294901763 4294967295 4294967294 4294901764 4294836224 65535 131074 4294770689 4294967294 4294967295 4294901760 4294967295 4294967295 131073 4294836224 65534 4294770688 262142 4294967295 1 4294901758 131069 131071 2 4294901759 0 4294836226 131070 4294967293 196612 4294770688 65533 196608 4294901762 4294901759 196607 4294901761 4294901759 65536 65535 65536 4294770689 65534 196607 4294836225 131072 4294836224 65536 4294967294 4294901761 262143 3 4294967293 2 131070 65537 4294967295 196609 4294770690 4294967292 131073 4294770689 4294901759 4294901760 4294836224 65534 +33 0 -48 -175 -86 194 -112 120 496 40 -143 -381 -432 143 437 316 304 -541 -1298 -275 647 1556 796 -988 -653 -125 147 102 -70 45 11 -89 -56 24 44 106 -77 -148 78 196 17 -147 -71 37 32 22 11 25 36 -31 -46 -52 38 97 -15 -108 -29 28 -8 54 31 -40 -5 51 36 -71 -31 -4 -75 27 102 60 -8 -95 -51 20 10 47 55 -42 -88 -18 60 70 -37 -62 41 49 -27 -24 42 -24 -80 22 42 25 48 -18 -59 -32 -5 43 85 -5 -67 -74 -26 48 23 9 -17 8 48 10 -21 -38 3 2 -30 28 54 -27 -39 -17 -48 27 66 52 21 -70 -56 34 75 -23 -73 -32 2 60 48 -33 -66 2 58 19 -39 -28 -10 6 27 69 18 -107 -31 65 -17 -22 48 1 -50 43 57 -60 -17 30 -23 -37 7 41 21 -40 -53 37 59 -24 -49 18 36 -19 -47 23 67 0 -36 -31 -10 10 7 14 -6 -7 8 29 33 -39 -34 -13 -27 19 29 12 3 -5 -35 -19 35 53 -4 -38 24 -3 -43 -22 8 35 7 -21 -27 16 32 18 2 -22 -5 -14 -24 18 22 15 15 -26 -29 -8 -5 14 10 14 14 -18 -26 2 29 11 -22 -16 10 10 -1 -1 0 -2 -1 2 1 0 -1 1 2 0 -1 0 0 0 0 -1 0 1 2 0 0 -2 -1 -1 -1 1 1 -2 -1 1 0 -1 1 0 0 -3 -2 1 0 2 0 0 -1 0 0 2 -1 -1 0 -1 1 -1 -1 0 0 0 -1 0 -1 0 1 0 -1 -1 -1 -1 1 0 0 1 0 -1 -1 0 0 0 1 -1 -1 1 1 0 -2 -1 2 2 1 -1 -1 -1 0 -1 0 0 0 0 -1 0 -1 1 0 -2 0 -1 -2 -1 -1 0 2 0 -1 -1 0 0 -2 -1 1 -1 0 -1 0 -1 -1 0 -1 0 0 0 -1 1 0 -1 -1 -1 -1 0 0 0 -1 -2 -1 1 1 0 1 -1 -1 1 -1 1 0 0 1 -1 -1 1 1 -1 0 -1 -1 -1 1 0 -1 -1 -1 0 0 0 0 0 0 -1 0 0 -2 -1 2 0 -2 0 2 1 0 -2 0 0 -2 0 0 1 0 0 0 -2 -1 -1 0 1 0 0 -1 0 1 0 -1 -1 0 1 -1 0 1 0 -1 0 1 1 1 -2 0 -1 0 -1 1 0 -1 -2 -2 1 1 0 0 0 0 0 0 0 -2 -1 1 2 1 1 0 -2 0 -1 -1 0 0 -2 0 0 0 1 0 -1 0 0 1 0 -1 0 1 0 +33 4283563984 12779434 7929744 2621936 165610 207073 290825 385097 1760429 2839745 1609760 442034 32013 6925 8042 3712 13172 27833 44500 21898 6410 1508 746 2257 4820 10853 11889 1625 2980 2561 2626 6337 977 6354 14004 9089 3001 2309 4789 8068 8500 5213 4082 1305 2340 6884 2389 2628 4505 1874 7250 9965 2980 610 353 2404 1885 13 1684 3645 1810 3033 7060 5341 4292 6154 6353 3604 3393 4360 3725 2305 136 5490 11773 5186 773 2305 4349 6849 1189 1898 1730 2041 4178 4057 2725 1657 2738 4489 2257 200 245 85 905 2610 1325 1090 985 34 1586 4034 1460 585 2333 1289 490 985 1348 488 221 900 709 901 905 221 296 520 680 962 740 655370 4294967295 4294836224 196607 1 131071 2 65535 0 4294901760 65536 2 4294836224 4294967295 131071 4294836225 131071 4294901760 1 4294770688 131070 131072 0 65535 131072 4294967295 4294901760 4294901761 65535 0 65535 65535 1 4294967295 4294967295 1 65536 4294901760 65535 0 4294901761 131071 1 4294967294 131074 4294901761 4294967295 4294901760 0 0 65535 131071 4294836224 4294901760 4294967294 65535 2 4294967295 0 4294967294 4294901761 4294901760 4294901760 65535 65535 0 131071 4294901760 4294967295 65535 0 4294901759 131071 1 4294901761 131071 131071 0 4294901761 131071 4294901761 4294901760 4294967295 1 4294967295 65535 0 0 4294901760 0 4294967294 2 65534 65538 4294836224 0 65534 65536 0 4294836224 4294967295 65536 0 65535 1 4294967295 65536 65535 1 65535 65537 4294836225 4294901760 4294901760 1 4294901759 131070 1 0 0 0 4294967294 131073 65537 4294836224 4294901760 65535 4294836224 0 65536 4294901760 0 1 65535 1 +-58 0 -175 284 727 -149 -808 -548 29 807 714 -136 -863 -790 513 1370 490 -1387 -1470 133 732 839 -140 -190 84 120 480 38 -137 -431 -443 105 616 232 -478 -506 -54 416 255 94 -20 -250 -74 96 160 -6 -251 -146 195 241 -36 -298 -261 216 341 74 -128 -209 -48 119 27 -23 38 83 40 -119 -93 -71 -46 255 146 -222 -91 158 85 -150 -155 68 151 46 -145 -67 187 149 -49 -252 -97 107 5 5 -7 98 199 -109 -300 -60 181 210 20 -214 -179 114 216 36 -108 -104 10 56 27 -17 -31 6 24 -45 -74 55 47 6 31 42 33 -150 -190 137 249 -7 -173 -77 97 118 11 -153 -71 83 4 -49 23 74 -4 -75 -61 24 41 78 33 -37 33 -45 -116 -54 -3 203 136 -100 38 -72 -296 -13 250 242 62 -250 -205 1 91 131 -27 -100 3 88 38 0 54 -64 -74 -64 -52 116 48 -104 -41 194 120 -191 -122 81 67 -12 -42 -13 26 47 19 -56 -18 6 -62 13 141 26 -138 -94 20 96 65 29 3 -102 -31 15 -45 23 58 3 -51 5 54 -14 -58 45 86 -48 -62 13 15 -15 0 30 -2 -34 1 23 -19 1 48 -5 -50 2 63 -13 -59 -25 -12 33 34 18 -9 -30 -1 13 0 0 2 1 3 -1 0 -2 0 1 2 1 0 -4 -2 2 0 -1 0 0 -3 -2 2 1 -2 0 0 -1 -1 3 0 -1 3 2 -3 -2 2 2 -2 0 1 1 0 -2 2 0 -2 -1 0 1 0 2 3 -1 -1 -2 -1 3 3 -2 0 0 1 -2 -2 2 1 -2 1 0 -1 -1 -1 0 0 -1 -2 0 0 1 0 2 2 -3 -4 1 2 1 -1 -2 0 0 -1 2 3 0 -2 -1 2 2 1 -1 -2 -3 4 2 -2 -3 0 0 -1 0 2 0 0 -3 -2 -1 -1 2 0 1 0 0 1 0 -2 -2 -1 1 0 0 -2 1 3 -1 -1 -1 -1 -2 0 0 -1 1 0 1 1 2 1 0 -2 1 2 -1 0 -1 -2 1 0 0 1 2 1 -1 0 0 2 0 1 -3 -3 1 1 -3 -1 1 0 -2 -1 0 -1 0 3 0 -2 -4 -1 3 2 -1 -3 0 1 0 -1 0 0 -1 -1 2 1 -3 0 2 -3 -1 0 2 0 -1 0 2 -1 0 0 1 4 -2 -3 -1 3 -1 1 0 -1 -4 -1 1 0 -1 -2 2 1 -1 1 -1 1 -1 -3 -1 -1 0 1 2 0 -1 2 -3 -2 0 0 -1 1 -2 -1 1 0 -1 -2 -1 2 0 -4 0 +65478 18677585 4285203159 4259118296 52887581 528292 1368869 2140069 2163869 2178589 1239745 55700 21456 231844 204530 207274 433280 484520 175972 73861 62900 14692 25636 84317 96106 90100 114777 121757 60065 16465 1258 8333 15761 13690 67141 70600 33245 29725 28649 24917 25514 57170 65905 20858 50 9653 51482 93600 76861 46196 45037 47952 22480 3236 1018 997 2601 8501 2245 2725 23589 54869 62050 35858 23333 23530 11930 2417 6005 5641 4297 7765 2458 3114 16372 41218 28496 6628 87785 121064 66344 42026 25442 10729 7753 1444 7012 9572 16160 13120 39317 50881 21445 4633 1933 2885 3497 360 4013 20557 27880 9616 5066 10413 1186 2554 3373 2626 3112 5389 9700 4013 450 900 1160 530 362 2329 2504 4138 4106 1233 1179682 4293066743 917503 0 65538 4294901763 4294836224 65536 65538 4294705152 196606 4294901760 0 4294901757 65538 65534 4294901760 262143 4294901760 131075 4294901757 131074 65534 65537 4294836224 2 4294967294 65536 131072 4294901763 4294901759 262143 4294836227 0 4294836225 196606 4294836225 1 4294967295 65535 4294901760 65534 65536 131072 4294770690 131068 65538 4294901759 0 196607 3 4294967294 131074 4294901761 4294836222 131076 4294836222 0 65535 2 4294770688 4294967294 196607 65536 0 1 4294901758 131071 0 131070 4294901763 4294967295 4294901759 0 131071 65536 131073 1 131070 4294901762 4294901760 131070 0 131073 4294901761 0 2 4294770689 131069 4294770689 131071 4294836224 65535 65535 3 4294770686 262143 4294901762 65533 1 65535 4294901760 196607 4294770689 131072 4294967293 131072 4294901760 131072 65535 65536 4294836228 4294967293 4294901763 1 4294770687 131071 4294901760 196606 4294901761 4294901761 4294901761 4294967293 65535 131073 4294901760 4294770690 65534 4294901760 4294836225 131071 4294901760 4294967294 2 65532 +-117 0 266 -43 -107 -187 -153 -59 -50 284 274 29 85 -790 -1075 681 1186 340 -977 -1064 585 1828 319 -1615 -570 608 293 81 133 -362 -572 244 537 189 8 -205 -124 -208 -153 302 288 -153 -263 -31 123 228 144 -263 -213 35 69 38 6 -33 -138 -23 105 123 -45 -61 16 42 59 30 -10 -134 -136 127 220 84 10 -281 -206 118 77 -14 -22 47 -47 8 68 47 64 8 -5 -98 -41 -8 -22 35 49 -36 -143 -25 67 143 22 -46 39 -13 -72 18 134 41 -65 -150 -25 90 6 -1 22 -69 -66 78 20 -32 66 83 -51 -129 87 57 -140 -86 -7 70 46 71 6 -43 -3 15 84 25 -57 -132 -28 83 -8 15 59 -44 -81 34 52 14 11 -1 -30 -41 91 74 -146 -219 -34 331 232 -127 -131 -102 15 102 -16 -69 56 49 -67 -96 -29 90 41 11 27 -39 -75 38 146 12 -78 -121 -84 -11 -15 221 130 -167 -66 113 42 -125 -32 105 34 -90 -37 19 -59 38 115 41 4 -77 -88 -57 22 151 52 -116 -45 35 -28 -20 39 75 7 -53 39 -6 -91 -54 3 104 59 -25 -7 -11 -12 -35 -24 46 28 -11 20 10 -32 -69 -23 71 13 -13 37 17 -32 -49 -1 41 17 -9 -4 -8 -4 0 1 1 -2 0 0 1 0 1 0 0 1 0 1 -2 -4 0 5 1 0 -4 -3 1 1 2 5 -2 -3 -3 -4 2 4 0 -2 -1 0 -1 1 -1 -1 0 -3 0 1 0 -1 -1 -2 0 0 -1 0 1 1 -1 -2 1 2 0 -3 -2 -1 1 -1 -1 0 1 2 -3 -3 3 3 0 -2 -2 1 2 1 1 -1 0 1 2 2 -4 -2 1 0 0 2 0 1 1 0 -1 -1 0 1 1 1 1 2 -2 -1 0 -2 -1 1 -1 2 -2 -3 1 1 -1 -1 2 0 0 0 1 2 -1 -3 -3 1 1 0 0 -2 1 3 1 1 -3 -1 -2 -3 0 0 1 1 -1 -2 2 1 0 0 1 3 -3 -2 0 -1 -1 -1 -1 2 -2 -2 1 0 -1 -2 1 0 1 -1 1 3 2 0 -1 1 1 -1 -1 1 -1 2 -1 -4 0 1 0 -2 0 2 2 -1 -3 1 3 1 -3 -1 -1 -2 0 0 0 -1 0 0 0 0 -1 1 1 1 2 1 -2 0 -1 0 -1 -3 -1 0 0 1 0 -1 -1 3 0 0 -2 -2 1 1 0 -1 0 2 1 -5 -2 0 0 2 1 -1 -1 1 2 -1 0 2 -1 -3 0 3 -1 -2 -2 1 -2 -1 -1 -1 0 +65419 4292149514 4282777493 4291166055 18677710 75917 631325 1619386 1522196 2086625 3683809 2709986 694564 92410 148733 386720 324090 42089 58640 114613 106353 70130 67113 89905 46594 6205 1125 19573 26154 5746 2020 4381 18056 34625 55456 79061 56360 6125 2693 2273 6833 4160 9629 1745 1709 3697 21074 24938 2600 1690 5508 19637 26725 8725 37 5245 10440 1424 11245 19242 10818 26996 4949 7157 1885 234 7681 20673 7673 289 5417 7717 2900 122 2581 13757 69277 110717 69953 27565 10629 5017 5537 13705 8941 1802 2250 7069 21460 20725 7177 49066 44789 17125 17389 12049 9256 1730 4925 14906 5945 10993 23285 16160 3250 1184 7146 2858 1557 11197 10825 4106 170 1369 2692 905 500 5785 5570 338 1658 3425 2752511 4294377489 4294508540 65532 65537 65534 65536 65536 0 1 4294836225 65532 65541 4294705152 131069 131073 4294836229 4294836221 196604 4 4294967294 4294901760 4294901761 65535 65533 1 4294967295 65534 4294901760 65536 4294901761 131070 2 4294901757 131071 4294967295 65536 4294770690 262141 3 4294901758 131073 65537 65535 131073 4294705154 131070 0 2 65537 4294901760 65535 65537 65537 4294836226 65535 4294967294 4294901761 4294836226 131069 4294901761 196607 0 65536 4294901762 4294836221 65537 0 131070 65539 4294770689 4294901759 65533 65536 4294901761 196606 1 65536 4294770691 65534 4294967295 4294967295 4294836226 131070 4294901760 131070 65536 131071 131075 4294901760 65537 4294967295 4294901761 4294901762 65532 1 65534 131074 4294836223 196609 4294770689 4294967295 65534 0 65535 0 4294901760 65537 131073 4294836225 4294901760 4294901760 4294967293 0 1 4294967295 3 4294836224 131070 1 65535 65538 4294901755 0 65538 4294967295 131073 65535 4294901762 65533 4294901763 4294901758 4294836225 4294967295 65535 +50 0 -211 -49 315 356 -263 -537 377 547 -692 -379 1241 439 -1025 -1071 318 1298 110 -1972 -1492 2178 1765 -712 -704 281 535 -533 -581 147 188 385 292 -457 -198 207 -78 -222 14 135 -48 166 195 -249 -249 212 261 -128 -147 51 78 -74 -131 -16 87 188 54 -210 -84 66 -26 11 76 25 -45 -5 49 -48 -52 70 73 -74 -62 -7 -68 -12 54 179 101 -214 -202 106 192 38 -99 -112 16 100 11 -71 -50 76 101 -28 -46 -4 10 -77 -68 81 40 12 0 -16 40 45 18 -114 -136 53 136 73 -29 -94 -3 -24 -93 54 37 73 201 -27 -256 -200 104 282 9 -184 -19 63 -30 28 106 -46 -92 -9 -6 -6 -2 98 118 -87 -141 -16 56 61 10 -47 -12 107 134 -235 -293 142 195 -16 -129 69 162 -58 -180 40 204 40 -71 -119 -48 50 59 -34 -101 71 148 -47 -105 1 53 -38 -107 45 103 89 77 -148 -150 -15 -20 53 38 102 66 -66 12 -42 -116 4 75 33 -6 32 27 -114 -107 95 97 1 -3 13 15 -83 -38 28 -37 13 57 42 12 -44 -13 -16 -22 12 9 -6 -20 11 12 20 30 -11 -22 -10 9 -26 -50 41 49 20 12 -30 -4 -18 -45 1 31 33 -7 -20 7 8 -2 -3 0 0 0 0 2 -2 -3 0 1 -1 -1 0 2 0 -1 -1 -2 -1 -1 1 0 -1 -2 0 -2 2 2 0 -2 0 1 1 3 0 -1 0 1 -1 -1 0 2 2 2 -3 -4 0 0 2 -1 -1 0 0 0 2 1 -2 0 1 1 1 -1 0 1 -2 1 1 0 -3 -3 -1 0 1 1 -1 -2 0 2 -1 -3 2 1 -1 3 -2 0 -1 -5 -1 2 3 0 -3 0 2 -2 -3 -1 0 -1 0 -2 2 -1 0 -1 0 3 2 -1 1 2 -3 0 -5 -2 4 3 -2 -1 -1 0 1 -1 0 0 0 -2 -1 1 1 0 -1 -1 0 0 -1 -1 1 -3 0 1 -1 -2 1 2 0 -1 1 2 -2 -4 0 0 0 0 2 2 -1 -1 0 3 0 -1 1 1 -2 -1 -1 0 1 -1 0 3 0 -2 -3 0 -2 1 3 0 -3 1 -1 -1 -2 0 0 -1 2 2 1 -2 -2 2 0 -1 0 0 0 0 -1 1 1 1 -1 -2 -1 2 -1 -2 -1 -1 0 -1 -1 -1 0 1 0 0 1 -1 -1 -3 1 2 0 -1 1 1 -3 0 1 1 1 0 -4 -1 3 1 -2 2 2 -2 -3 1 2 0 0 1 0 0 -1 0 -2 -3 -1 2 -1 -4 0 +50 4291821357 23331131 4259839737 35848569 622505 1732802 2197666 1785928 3900884 6969748 3622169 574577 570314 359170 183569 294113 82053 55368 18421 29860 100026 106945 84505 24210 11560 17417 42913 47016 11412 797 6401 2050 4705 7604 10805 3893 4768 34957 55997 52040 38308 22345 10256 5162 8276 10985 2132 6029 11185 1744 256 3625 13320 21305 23825 9677 585 11565 6698 41130 105536 90340 33937 4330 1684 13352 8545 72 9608 21493 20137 6857 2309 11593 73181 106013 38281 21402 29608 34000 43216 19202 4804 4637 15242 24113 11026 4253 13474 18530 27833 22725 3209 11848 8712 1908 13472 6714 1060 13725 20474 9410 178 7114 2228 1538 5013 2080 425 628 117 521 544 1021 584 757 4181 2801 1044 340 2026 2162719 4293722105 524295 4294836222 0 0 4294836226 65533 4294901761 65535 2 4294967295 4294967294 131071 4294901760 65534 196606 2 65534 65537 3 65535 4294901761 65535 131074 4294770690 65532 131072 4294967295 0 131072 4294836225 65536 65537 65535 4294836225 65537 4294770688 4294967293 65536 4294901761 65534 4294901762 196605 4294901761 4294836227 4294901760 4294967291 196610 4294770688 131072 4294836222 65535 65535 196606 65535 65535 131075 131071 4294770690 4294639616 327678 4294836227 4294967295 65536 65535 0 4294967294 65537 4294901760 65535 4294901760 131071 65533 4294901761 131070 2 131071 4294836226 65532 0 131072 4294901762 65535 3 131071 4294836225 4294967295 65536 65535 3 4294836222 4294836224 196609 4294770688 4294901761 4294901759 0 196607 65538 4294901758 2 65535 0 4294901760 65537 4294901761 4294967294 4294901762 4294967294 65535 4294967295 65535 1 65536 4294967295 131069 2 131071 4294770689 65536 65537 4294705152 262143 4294836225 131074 4294836222 131073 0 1 4294901760 4294836224 4294967293 4294901762 65532 +-131 0 109 163 119 -130 -16 -406 -531 183 -444 748 2011 -17 -1658 -836 1344 693 -553 -1487 -1172 818 477 311 111 667 689 -723 -498 -77 -124 331 666 -243 -727 -257 108 438 330 -89 -247 -189 10 5 -137 263 204 -77 88 -18 -179 -125 104 182 16 -228 -150 139 68 -34 19 108 41 -137 -91 16 7 100 69 -42 7 -39 -59 5 -8 -12 -22 81 93 6 -26 -57 50 25 -58 -88 13 77 -31 -80 3 92 -37 -86 38 171 73 -158 -84 21 -1 58 73 -85 -105 76 73 -77 -57 82 7 -50 24 102 84 -121 -163 38 122 18 -19 84 115 -252 -239 124 79 -2 -33 100 166 -79 -172 -57 34 84 39 9 -19 -24 55 13 -31 -73 -66 55 114 -6 -112 -90 -59 79 42 138 141 -76 -33 -54 -9 -70 -85 39 -44 27 142 76 -103 -124 11 115 77 -5 9 -56 -41 -69 -43 87 20 -25 20 34 -38 -58 22 100 42 -53 -26 -57 -22 67 -25 -5 137 21 -129 -141 -33 175 103 -80 -40 72 60 -97 -81 -14 -7 85 53 -13 -9 -57 -18 41 -4 -13 -2 16 34 -14 -28 5 -7 5 24 -19 2 31 -48 -47 39 52 8 -6 -22 -32 25 25 -16 -22 -14 2 4 22 18 -7 -6 -8 -1 0 -3 0 1 4 0 -1 1 0 -1 0 0 3 1 -1 -1 -2 -1 1 0 -2 0 -1 -1 1 1 -2 -3 1 2 0 0 2 1 0 0 -1 0 0 1 -1 -2 0 0 0 -2 -1 0 1 -1 -1 0 4 1 -1 0 0 2 0 0 -1 -1 0 0 -2 -3 -2 0 3 1 -3 2 0 -1 -2 0 -1 1 0 -1 0 0 -2 -1 -2 1 -2 -3 -4 -3 2 -1 1 1 1 0 -1 -2 -1 0 3 1 -2 -2 0 1 1 -1 0 2 -1 -1 -1 -1 -1 -1 1 -3 0 2 0 -2 -2 -1 3 0 -2 -3 0 1 2 0 -3 -2 3 1 -1 -2 -1 1 0 -2 2 2 -1 1 2 0 -1 -1 -1 0 1 0 -1 2 0 -3 -2 0 2 -1 0 2 1 -2 -3 1 2 0 -1 -1 -1 -1 -1 2 1 0 -1 -2 0 -2 0 0 1 0 -1 -2 1 1 0 2 1 2 0 -1 0 0 -3 1 1 -2 -1 -2 1 1 0 -2 1 2 0 -1 -1 1 2 -1 -2 1 1 -2 -1 0 1 2 1 -1 -1 -3 0 0 1 1 1 -2 1 -3 1 4 1 0 -2 -2 2 1 0 1 1 1 -1 1 -1 -1 -2 1 -1 -2 1 2 2 2 -4 -3 0 +65405 10682477 4286447735 4268425200 12058093 756640 4044410 3447860 2286585 2516978 2042708 324250 457210 997450 253933 124937 502605 594578 203508 116821 96730 125 87938 47545 8068 47666 43940 52240 41821 5780 12025 20450 8537 10049 6525 1570 3506 208 7045 8685 3925 3125 11108 6098 7361 8473 8765 30685 30293 7497 3365 12554 16801 11258 9973 2549 10980 21697 28013 15208 7417 76729 72497 6245 11089 33797 32833 8212 1602 937 3194 6290 7381 13032 20644 9722 20808 25657 4005 4981 8746 2665 25940 25985 13346 5954 3217 6442 9418 1025 1556 4808 10484 4573 3925 4973 650 19210 36522 31714 17009 6784 13009 6757 7274 2978 3330 2005 185 260 1352 809 74 937 965 4513 4225 100 1508 1250 740 200 1441796 4294508562 4294508538 65535 65533 262145 4294901760 1 65535 196608 4294901761 4294901759 131071 4294836224 4294901760 131071 4294836225 131069 2 131072 1 4294901760 0 4294901761 65534 0 4294967294 65536 4294967295 262144 4294901761 0 2 4294901760 65535 4294836224 4294901757 196608 4294770689 2 4294901759 4294901760 1 65535 4294836224 4294901759 4294836225 4294770685 196605 131071 65537 4294901760 4294967294 196608 4294836225 65534 65537 65535 4294901762 4294967295 4294967295 131071 65533 2 4294901758 262143 4294836224 65533 131073 4294770688 262142 4294901761 4294967294 1 196606 4294901762 131073 4294901760 4294967295 65536 4294901760 2 4294901757 131072 65535 65538 4294836222 131073 4294901760 4294967295 4294967295 65538 4294901760 65534 65534 65536 4294901760 131070 1 65538 2 65535 4294770688 65537 4294967294 131070 1 131070 2 4294967295 131073 4294901759 65537 4294967294 65536 65538 4294967295 65533 65536 65537 131070 131069 65540 4294836224 196606 1 65537 4294901761 4294901761 4294901759 4294901761 131070 131074 4294705154 65533 +-25 0 20 -126 -201 198 439 83 -507 -658 -140 1240 1060 -243 81 -1239 -1320 496 820 301 -498 -585 -96 823 273 -133 339 14 -284 -516 -268 569 511 -60 -121 -275 -134 15 -1 238 224 -287 -316 20 -10 212 307 -20 -197 -233 -14 185 -19 -76 22 103 19 -16 100 3 -71 -149 -65 128 69 -27 2 -22 -86 10 103 57 -20 -110 -89 86 118 -40 -108 23 81 27 -37 -37 88 35 -99 -119 9 108 -31 -75 -9 169 150 -137 -127 36 66 -90 -128 160 225 -122 -186 -2 19 -34 -51 171 181 -107 -164 8 151 -4 -127 -74 -22 63 -26 98 231 -2 -124 -206 -50 115 23 -50 -86 68 106 32 1 -37 -33 -58 -20 120 77 -111 -81 83 62 -62 -11 60 24 -96 -106 45 97 57 -34 -70 32 44 -23 -78 -66 64 96 65 45 -135 -156 0 86 101 -21 -75 -12 36 16 40 64 -89 -106 40 33 -23 -11 65 42 -10 6 -80 -98 84 173 8 -125 -142 -4 115 -15 -29 37 48 -58 -10 183 48 -170 -190 46 222 61 -190 -145 125 173 -17 -126 -94 52 134 -5 -115 -45 55 75 45 -49 -127 -32 149 129 -69 -118 -83 -12 142 114 -86 -127 -4 60 50 -2 -35 -9 13 4 -9 -1 6 2 -1 1 -2 -1 1 0 -2 -1 0 0 1 0 1 0 -1 0 2 1 0 -1 0 0 0 -1 -1 -1 2 1 0 -2 0 0 1 -1 -1 1 2 0 -2 0 2 3 0 -2 -1 5 -1 -2 -1 1 -1 1 -2 -2 0 1 -1 -2 -2 1 -1 -3 0 1 2 2 -1 -1 -2 1 -1 -4 -2 1 3 0 -2 -2 0 1 0 0 1 1 0 -1 -1 0 -1 -1 0 -1 4 2 -3 -3 0 0 1 2 2 0 -3 -2 -3 0 3 1 0 -1 -2 1 1 1 0 -1 -4 -1 1 1 -1 1 -1 -2 -1 4 0 -2 0 0 -2 -1 -1 0 -1 -1 0 -3 0 2 0 -1 -2 -2 0 0 0 -3 1 3 0 -3 1 3 -1 -1 0 1 -1 -3 0 2 2 1 -1 -2 -2 2 0 -2 -3 0 0 0 0 1 -1 -2 -2 -1 1 1 0 -2 0 -2 -1 2 4 2 -4 -2 -1 -4 -1 2 4 0 -1 -1 -2 -1 1 1 1 0 1 0 -1 0 0 -1 0 0 0 0 0 -2 -1 2 3 0 -4 -2 0 -1 1 0 -1 0 1 -1 -2 -2 1 1 1 -1 -2 0 2 0 -1 1 1 -3 -2 2 2 -3 -3 0 2 1 -1 -2 0 0 -1 0 0 0 0 -1 0 +65511 4286709780 13041463 5439927 4251909637 1557200 1182649 1541682 1988416 763001 590229 686545 92218 115117 346912 395585 264721 90266 18181 56645 132545 100256 45044 94649 93098 34421 6137 11093 617 10009 27242 20609 5490 488 7496 13858 12500 15317 15524 12193 7290 2738 8969 23962 11745 6586 28642 41269 17425 12456 41984 65509 34600 1517 31842 44210 26960 22817 21605 4453 10280 53365 57812 15725 3029 12020 12260 1370 4453 14800 18250 13450 7688 3721 9792 13261 12658 6056 2960 6613 8452 13441 20250 24336 17597 6066 1440 1856 12017 12836 1618 4346 1864 6436 16660 29993 35789 13241 1066 3673 3464 35793 65000 51400 39821 36650 30218 24712 20660 13250 5050 7650 18530 23225 21402 20813 20308 20392 16145 6100 1229 250 4294377476 458751 4294901762 4294836225 131071 4294836224 65535 65536 65536 4294901760 131072 1 65535 0 4294967295 196607 1 65534 65536 4294967295 131073 4294836224 131072 3 4294967294 4294901765 4294967294 4294901761 4294836225 65534 4294901761 4294901758 4294901761 65533 131073 4294901762 4294901759 4294901761 4294901756 196609 4294836224 65534 1 65536 1 4294967295 4294901760 65535 327679 4294770690 65533 65536 131074 4294770688 4294836222 196608 1 4294901759 65537 1 4294770687 131071 4294901761 4294901761 4294967294 4 65534 4294836224 4294967295 4294901760 65535 65533 2 4294901759 65534 0 131069 3 131069 4294901763 65535 4294901761 65533 131074 4294901761 4294901758 2 4294836222 0 0 4294901761 4294901758 131071 1 65534 4294967294 262146 4294705154 4294967294 4294967292 262146 4294901760 4294901759 131071 65537 65536 4294901760 0 65535 0 0 4294967294 196610 4294705152 65534 131071 4294901760 65536 4294901759 131070 65537 4294901759 131072 4294901760 65537 4294901757 131074 4294836221 131072 4294901761 65534 4294901760 0 0 65535 +-121 0 -298 81 580 23 -721 332 528 -10 828 -377 -1488 -306 57 905 1825 -35 -1899 -1105 1535 1411 -1067 -2248 -67 2091 -30 -1027 146 749 359 -151 -387 -363 509 312 -598 -411 200 269 -130 215 502 -203 -607 -226 332 404 -173 -189 220 196 58 -315 -247 91 122 -28 -150 111 159 -18 -23 14 69 -92 -142 -23 -23 107 179 11 -158 -96 56 150 244 -152 -408 -67 312 117 -300 -136 129 188 -37 22 172 -77 -92 7 81 -82 -124 -75 -132 73 62 174 75 63 251 -179 -194 -158 -242 90 192 159 -86 -34 185 137 52 -326 -212 135 107 -49 -190 -2 163 234 -24 -193 121 65 -133 -55 99 -12 -79 -63 -85 98 92 62 75 -70 -8 -43 -104 -105 -126 157 218 124 -91 -227 75 229 0 -242 -2 151 -70 -189 20 214 -3 -135 21 48 -22 21 14 -105 -108 110 122 42 -29 -51 74 -51 -165 -10 2 147 226 2 -150 -224 -39 249 134 -125 -71 60 121 -102 -125 -13 -80 -41 83 199 -91 -102 297 145 -165 -365 -56 140 -141 27 131 34 -134 120 249 -74 -198 50 262 9 -126 -194 -94 134 151 58 -19 -206 -97 129 52 -68 -35 60 18 -41 -21 -30 -89 99 125 5 -42 -39 23 47 15 -58 -24 16 3 1 1 -1 7 -2 -7 -2 -1 3 5 -1 -3 0 1 2 0 -2 0 2 7 0 -4 -5 -4 2 2 -1 1 6 -3 -4 2 -2 -2 2 -1 -3 -2 -1 0 -1 0 2 -1 1 6 -2 -6 -2 5 3 -5 -1 6 1 -3 -3 -3 -2 -1 2 -2 0 1 3 1 -4 0 1 0 1 0 -3 2 0 -1 -1 -2 0 -1 2 3 0 1 -4 -5 2 6 -2 -2 -3 -1 2 -3 0 4 0 1 -2 -4 1 -2 -2 1 -1 -1 3 -2 -4 -2 2 2 -1 -1 -1 -3 0 -1 -1 -3 3 3 -1 -2 3 -1 -2 5 1 -1 -3 -5 0 1 2 1 0 -1 -2 -1 1 2 0 -7 0 8 1 -3 0 3 -1 0 -4 -2 -1 -1 0 -1 1 2 1 3 -3 -2 -4 -1 5 1 -4 -4 2 -2 -1 -1 2 -3 4 4 -5 -2 3 1 4 6 -3 -6 -5 1 4 1 0 2 -1 -2 -4 0 2 -1 3 2 -2 1 -1 -2 -3 1 2 -1 -1 -1 -1 -2 -1 3 4 0 -5 -4 0 0 4 0 0 4 2 -3 -5 1 2 0 -2 -1 -2 0 1 3 -4 -7 -1 3 3 -4 -1 3 4 4 -4 -2 -5 -2 2 0 -5 -4 2 -4 0 5 4 -3 -3 0 4 5 0 +65415 5373654 1507908 21822767 4294312464 827713 2307780 822274 3331850 4827226 4347146 6191993 4376770 1055629 582317 151682 281538 356425 526525 112361 63125 293213 419525 273440 65650 86816 102589 69290 15668 34821 25605 725 13225 20693 11978 32162 34180 25636 82640 170953 111033 108496 51985 1853 35513 8513 13285 21001 22753 34120 9594 95042 62600 66664 62145 8552 52994 108980 63169 13850 36104 81325 37825 18866 20714 9945 10210 16829 12308 10525 1913 21841 40525 62900 59810 58066 58564 22805 40621 46196 18234 2745 925 11221 23764 16648 3442 8077 27325 21613 51080 72676 63522 33581 8641 25045 15794 8081 46490 18685 109234 160450 22736 20610 18317 32356 67477 41704 68725 53512 26792 26165 42797 26050 7328 4825 2005 1341 17722 15650 3285 2738 4291166223 1114088 65539 4294901761 4294836231 4294901753 262143 4294901765 65533 131073 4294836224 131072 7 4294705148 196604 4294901762 393217 4294770685 4294836226 196606 4294836223 4294967294 4294901760 131072 131071 4294836230 4294901754 196613 4294967291 65542 4294836221 4294901757 196607 65534 196609 4294705153 65536 65536 4294770688 2 4294967295 65534 196607 3 4294705153 196603 4294836230 4294836222 196607 65533 4 4294836225 131068 4294901758 4294901761 262143 4294770686 196606 4294901762 4294967295 65533 4294967295 262141 4294901763 262142 4294901759 65541 4294836223 65531 131073 1 4294901759 131071 2 65529 65544 65533 4294901763 4294705152 4294967294 65535 131071 65538 4294770691 4294770686 393215 4294705153 196604 4294967294 196607 327677 4294639620 262142 262145 4294770694 4294705146 262145 1 4294901762 4294770686 131072 262143 4294836226 4294901761 4294836222 131073 4294967295 4294967295 4294967294 262147 4294639616 65532 262144 0 131076 4294705149 131073 4294836224 4294901759 65536 4294705155 4294967289 196611 4294967292 262147 4294705156 4294705150 196606 4294639616 196604 65532 262149 4294836221 262144 5 +-107 0 109 58 182 -137 -731 -313 710 1257 243 -1912 -1544 1713 2293 -894 -1893 -24 610 31 -217 326 -134 428 1078 -391 -654 -327 -97 80 105 277 184 -415 -573 186 410 258 35 -186 -72 -49 22 -34 -97 16 -118 101 302 45 -217 -90 170 39 -106 -24 97 -51 -147 22 60 48 16 -17 -3 17 -4 -23 2 -30 -39 134 138 -169 -217 102 243 34 -107 -143 -41 24 -22 49 3 71 96 -63 -26 -21 -77 0 59 39 -10 -37 -2 63 1 -131 -46 190 117 -125 -102 -43 -9 168 111 -151 -87 71 52 -42 -33 -32 -22 47 -29 -23 90 47 -172 -46 224 148 -68 -203 -64 68 40 39 3 -40 -32 15 53 17 -47 -43 -6 64 80 -40 -121 -1 135 60 -69 -85 66 -11 -142 2 54 43 7 46 58 -33 -19 -75 -96 63 130 32 -82 -65 55 14 -53 23 17 -62 -42 103 97 -59 -97 -11 41 33 -13 3 24 15 30 -35 -41 -9 41 -17 -120 15 112 59 25 -40 -60 -112 -67 119 53 12 26 30 72 -105 -164 8 77 67 -33 19 114 -28 -88 -41 24 25 -11 -19 -19 36 76 -30 -103 -34 58 48 -63 -9 81 31 -43 -74 -22 79 43 -29 -11 -1 -6 16 24 -25 -25 11 10 -3 -2 -1 -1 1 0 0 0 0 1 0 -1 0 2 0 1 0 -1 -1 0 -1 -1 0 0 1 -2 -4 0 1 1 1 -3 -2 0 0 -1 1 2 1 -1 0 1 0 2 -1 -3 1 1 -2 0 2 2 -1 1 1 3 -1 -3 -2 1 2 0 0 1 -3 -2 -1 0 0 -1 0 0 -1 0 1 2 0 0 -1 1 -1 0 -3 -3 2 0 -2 2 2 -4 -3 0 3 0 0 -1 0 0 3 1 0 1 1 1 -3 -1 0 1 -1 -1 1 2 -1 -2 0 2 2 -2 -2 1 2 2 -2 -3 -3 0 3 -1 -3 0 0 0 -1 1 0 -1 -3 -2 2 0 1 1 -1 1 -3 -2 1 -1 0 0 1 -2 -1 -1 2 -1 1 -1 -2 -1 2 2 1 1 0 0 0 0 0 -2 0 1 -1 0 -4 -1 -1 0 1 -1 -2 -1 2 0 -3 -2 1 0 1 0 0 2 0 0 -1 -1 -2 -1 -1 1 -1 -3 1 0 2 1 0 0 1 2 -2 -3 -2 0 3 3 -2 -1 -1 0 0 2 0 -1 -2 -2 -3 -1 2 1 0 -1 -2 -1 0 1 0 -1 0 0 1 2 -2 -2 -1 1 1 0 -2 -1 0 2 -1 -3 -1 0 2 3 -1 -2 -2 1 -1 -1 0 +65429 3801197 4285989046 4274519333 82379462 3714793 5318305 6057085 3584025 373061 153365 201140 1314965 534645 15809 87754 206081 362925 234664 35821 7585 1640 9665 24125 93229 55189 30421 11812 12010 22093 5904 545 298 545 904 19477 47605 57493 60205 31898 2257 2885 5050 13185 1117 5929 5002 1469 3973 17162 38216 29314 12253 28305 35122 12610 4468 2113 2693 1370 10309 31700 72080 45833 8720 3121 1609 1249 3098 4058 4132 8000 14642 21825 11986 4477 20168 4765 2165 4453 5986 13185 17924 10949 3221 3338 4133 12373 12890 9530 2770 178 801 2125 1762 1970 14625 16025 2225 16144 18650 2953 1576 16209 26960 10418 1450 13780 9425 1201 482 1657 6676 11765 5668 4050 7522 7325 6725 2690 122 292 4293328920 786407 4294770698 4294967294 131071 0 0 1 65535 2 1 4294967295 4294901760 65535 65536 4294770686 65536 65537 4294901757 0 131071 65538 65535 1 4294901762 131069 4294836225 131072 4294901762 65537 4294901763 4294901757 131073 0 4294770689 4294967294 0 65535 4294901760 65536 2 4294901760 4294901761 4294770688 196605 4294836224 131074 4294836220 196608 0 65535 196608 1 65537 4294770689 65535 4294901761 131071 4294901762 65534 131074 4294901758 131073 4294836226 4294836221 196608 4294836223 0 4294901760 1 4294836223 196606 65536 4294901761 4294770689 131070 65535 65536 4294967294 196607 131071 4294901759 196607 65538 1 0 0 65534 4294901761 4294705152 4294967295 65536 4294901759 196607 4294770688 131070 65536 0 2 4294901760 4294901759 4294967295 4294901761 131069 131072 1 65536 4294836226 4294901757 196608 4294836227 4294967295 0 2 4294901759 4294836222 196607 1 4294901759 65535 1 65535 65536 4294836226 4294967294 65537 4294836224 65535 4294901762 4294967293 131072 4294901763 4294901758 4294901761 65535 +33 0 -47 -85 178 330 146 -748 -615 130 68 293 -624 -160 681 1577 1013 -1258 -537 -600 -1109 722 1691 147 -1014 -847 -10 555 108 -52 30 140 45 -98 150 -57 -255 -100 5 127 154 31 -163 -115 17 158 273 -63 -376 -223 102 308 46 -50 33 -1 104 -103 -274 15 244 82 -156 -74 74 41 1 22 15 -133 -145 128 122 28 16 -20 35 -4 -20 -104 -77 44 62 95 0 -209 -121 227 204 -95 -173 -58 87 99 -40 -84 -15 36 -47 79 215 -30 -183 -126 28 138 77 -37 -26 -82 -88 8 35 102 33 -94 -64 45 12 45 116 2 -27 -157 -162 54 86 100 14 -37 2 -35 -14 62 23 -70 -45 29 36 44 -6 -86 -38 97 58 -21 28 -10 2 -54 -111 -47 23 162 76 -70 -40 -46 -22 67 51 -43 -43 39 65 -84 -103 16 -20 76 129 10 -47 -107 -46 61 16 -31 -29 38 27 44 38 -93 -69 86 73 -58 -59 37 105 -16 -135 -92 23 143 70 -29 -28 -69 -25 32 -2 17 4 -19 -28 67 88 -37 -16 0 -12 -72 -53 38 9 46 54 -27 -59 -3 44 5 -35 31 73 -35 -34 -31 -54 -7 19 57 8 -31 -23 38 64 -17 -48 -32 -2 40 30 -10 -13 -14 -1 6 0 1 0 0 1 0 0 0 -3 1 2 2 2 -1 -1 -2 -2 2 0 -1 0 2 -2 -2 1 3 0 0 1 -1 0 0 1 0 1 -2 -3 -1 3 0 1 -3 -3 -1 -1 1 0 3 2 -4 -3 0 0 1 0 0 -1 -1 -3 0 1 0 2 -2 -3 1 -1 1 0 2 1 -1 -3 1 3 -1 -2 0 0 -1 0 1 -1 -1 2 -1 -2 -1 -1 2 0 -1 -2 3 1 -1 -1 0 0 0 0 0 0 -2 -1 0 0 -1 -2 1 0 1 1 0 -1 -1 1 0 -3 1 1 1 1 -2 -2 2 2 0 -3 2 3 -2 -3 3 1 1 1 -1 2 0 1 -1 -1 -2 1 1 0 -2 0 0 2 0 -1 -1 -2 -2 -1 3 2 0 -1 0 2 2 0 -2 2 1 -1 -1 1 0 -1 0 1 0 2 0 -1 -1 -3 0 2 2 1 -2 0 1 2 -2 -1 -1 -2 0 2 -2 -1 -1 -3 0 3 0 -3 -1 2 1 3 -3 -5 -1 2 1 -3 -1 1 2 -1 1 0 -2 0 -1 0 -1 0 2 2 -1 0 -2 3 2 1 -4 -3 0 1 0 0 0 -1 0 0 1 0 0 2 -1 -1 -3 -1 2 2 -3 -2 1 1 0 1 -3 0 3 -1 0 +33 4289462225 21627058 4245946514 8584601 90473 414976 2950690 2608733 648369 1751165 2881090 1745605 308125 14368 20500 11629 25749 75025 16154 24677 39794 25253 78498 191105 105268 4616 1090 21425 75301 66260 29812 7157 485 17914 37409 15668 656 1241 11216 7865 12869 43681 66170 50641 33293 17370 8656 1521 8450 47125 49365 19828 7298 7400 7808 11629 9925 6121 2169 13460 25378 29160 17396 1565 1229 4040 5429 2866 3232 7432 10853 3805 884 2920 14530 26773 10676 3716 4973 4450 3370 11281 10865 6176 16741 13658 5837 1217 2285 2665 10093 12157 8693 4850 11281 26689 20978 5741 5545 1649 293 377 5273 9113 256 5328 4253 2197 3645 3490 1961 2186 6554 2117 2965 3610 1025 1973 4385 3328 1604 4294311966 4294115315 458751 65536 0 1 0 131069 131074 4294901762 4294901759 196606 4294901760 131072 4294901758 196609 0 4294901761 0 1 4294836225 4294967293 3 4294770689 4294967293 131071 196608 4294705154 65533 65536 0 4294967295 65533 1 4294836226 131069 131071 131072 4294901761 131069 4294901763 65534 4294901760 65536 4294967295 4294901762 4294967294 196607 4294901760 262142 4294901761 65535 0 0 4294836224 65535 4294901760 131070 65536 1 4294967295 1 131069 65537 4294836225 196606 2 196605 4294836227 262141 65537 4294901761 2 4294901761 4294901759 65537 4294836224 0 2 4294967295 4294901758 262143 2 65535 131074 4294836224 65538 4294967295 1 65535 1 2 4294967295 65533 131074 4294836225 65536 4294836226 4294967295 65534 4294836226 4294967295 65533 3 4294967293 65538 4294770691 4294967291 65538 4294967293 131073 131071 4294836224 4294901760 4294901760 131072 4294901762 4294836224 131075 4294705153 65533 1 0 65535 65536 0 4294901762 4294836223 196607 4294770690 131070 1 4294770689 196608 65535 +-117 0 20 74 72 95 371 -233 -904 -101 1153 474 -785 -935 -374 746 751 -279 -553 1155 1158 -2381 -1721 1880 746 -627 200 338 -222 -111 255 -20 -95 -160 -141 65 80 180 139 -237 -241 37 95 242 171 -410 -429 368 424 -81 -130 -9 62 -136 -160 165 263 -98 -235 -112 -29 111 47 116 81 -95 3 -16 -112 -45 61 84 -64 26 158 -95 -213 105 218 -60 -161 -26 22 91 122 8 -45 -132 -41 61 35 -48 -105 26 62 53 -13 1 77 -3 -12 -70 -131 -14 110 109 -66 -55 51 7 -5 39 -13 -57 53 -14 -127 56 108 -20 -27 10 8 -5 -6 -3 38 -23 -77 -9 17 47 52 16 -27 -95 -32 83 53 -16 -17 -62 -50 53 23 -16 12 62 -10 -121 -32 166 86 -103 -36 -10 -37 44 42 -22 9 18 6 -32 -41 -58 -39 142 130 -90 -125 -15 66 58 -23 -82 -55 106 114 -64 -90 4 1 12 16 56 69 -29 -61 -58 52 94 -29 -178 -66 171 77 -71 -22 3 -31 21 44 -11 -7 18 2 -66 -53 52 23 23 72 -11 -94 -55 59 80 -33 -62 43 73 1 -128 -77 93 51 -13 14 12 -7 -46 -32 32 33 -12 -32 3 16 22 5 -19 0 1 -6 10 7 -12 -2 3 -2 0 3 1 -1 -2 2 0 -1 -1 -1 -1 2 1 0 -1 1 2 -1 -3 -1 1 1 1 -1 1 0 -2 2 2 -1 -2 -2 0 -1 -2 0 2 3 0 -4 -2 -1 1 1 0 -2 1 -1 2 0 1 3 1 -1 -1 2 -1 -3 -3 0 1 1 0 1 0 0 0 1 -2 0 -1 1 1 -2 -3 -1 -2 -1 1 0 -1 -1 0 -1 1 0 1 1 0 1 0 -1 -2 -1 -1 -2 0 -1 2 1 1 0 0 1 -1 -3 -2 3 2 -1 -3 0 0 0 2 2 -1 0 -1 0 0 -1 -1 1 2 2 -2 -4 2 3 0 1 0 0 -2 0 -2 -2 0 0 1 0 -2 -2 1 -1 1 3 2 -2 -2 1 0 1 1 0 -4 -1 2 1 -2 0 -1 1 2 0 -3 -2 0 -1 0 -1 0 0 -1 1 -1 -2 0 4 2 0 -4 -3 -2 -1 3 4 -1 -2 -1 2 3 0 -2 1 -1 0 1 -1 -2 1 2 1 -2 -3 1 3 1 0 -2 -1 -1 1 1 0 -2 -1 -1 0 -1 0 0 1 2 -2 -3 2 0 0 0 -2 -1 1 2 2 -2 -1 0 -2 1 2 -2 -2 0 1 1 -2 -1 1 2 -2 0 2 1 1 -1 0 1 -1 -2 -1 0 +65419 4849684 6225992 4279697779 4288412792 1554085 1490450 696392 641842 1639834 7010125 6496241 949645 154244 61605 65425 34625 24106 38800 75490 59450 67589 197341 319465 186337 16981 22340 52825 78773 67769 13162 15665 15586 265 14569 10777 4772 33989 56394 51124 26597 8765 14948 19449 5402 3529 11701 6653 170 5938 5044 17357 23981 7381 2650 1546 3418 3005 19265 12064 829 89 45 1973 6010 2498 2960 9754 7913 3065 4133 5309 785 3988 14741 28580 18005 1396 3305 2248 405 1060 5045 21685 25000 15850 7720 7253 14261 17092 8116 145 3392 5602 7085 11540 32525 33597 10970 493 1402 2057 373 4360 5513 1058 5305 11861 9881 4933 7178 16385 14578 2770 340 2165 2048 1233 1033 740 386 1 720890 4294180871 262142 65534 65539 4294901759 2 4294967295 4294967295 65538 4294901760 131073 4294836223 131071 65537 131071 4294836224 131074 4294901759 65534 4294901759 131072 3 4294901756 131071 1 131070 196607 65536 65539 4294967295 4294901762 4294836221 65536 1 1 0 4294836225 4294901760 65537 4294836222 4294901759 131071 4294901760 65535 131071 65536 1 1 4294901759 4294967295 65534 196607 65537 0 4294901761 4294901757 131075 4294836223 0 131072 4294901762 4294901760 0 4294967295 131073 4294836226 196604 3 1 4294836224 4294836224 65534 65536 4294836224 131070 131071 131075 4294901758 1 65537 4294705152 196607 4294836225 4294901760 131073 4294770688 65534 65535 65535 4294901760 4294901761 65534 131076 4294705152 4294901757 262143 4294901764 4294967294 196610 4294836224 4294901761 65536 4294901759 131073 4294836225 131069 65539 4294836224 4294967295 65537 4294836224 4294967295 4294901760 0 131073 4294836222 2 0 4294967294 131073 4294836226 65535 131070 4294836226 65534 65537 4294967294 131073 65534 65538 4294901761 65536 4294901759 65535 +-35 0 3 -3 -22 56 2 29 268 67 -86 -502 -547 377 744 42 -671 -513 -348 954 1420 163 -412 -1356 -806 635 593 273 -143 -331 52 164 -85 -73 49 25 5 44 12 -47 -21 -23 -23 111 193 -132 -269 -49 78 96 -30 45 77 -13 8 -16 -11 -57 -28 35 -28 -17 60 73 -18 -98 -14 38 -20 18 36 6 48 -19 -96 -98 -21 153 67 -49 -2 9 1 -46 -38 39 33 -6 2 -8 -29 19 68 -30 -79 6 62 -4 -78 -16 89 77 -63 -143 -13 138 28 -45 31 -2 -43 -33 -31 64 134 -21 -145 -92 39 132 36 -82 -53 48 54 -25 -31 0 10 -8 -22 27 36 -17 -1 -2 -49 -15 57 29 -46 -14 30 -28 -39 51 14 -24 50 40 -24 -72 -30 2 8 80 32 -92 -40 55 7 -22 29 20 -40 -47 2 55 11 -11 10 -6 0 17 -8 -41 23 60 0 -86 -42 40 22 16 -4 -29 -4 43 7 -47 10 80 34 -115 -64 52 11 -21 -5 53 43 -51 -66 8 42 37 -11 -38 16 31 -16 -53 -24 59 45 -12 -13 -7 14 -12 -24 -15 -2 17 -18 -2 25 36 17 -44 -34 19 43 -14 -52 -5 35 24 -13 -33 -6 40 24 -36 -26 17 13 -1 0 -4 -2 0 0 1 0 1 1 0 -1 0 0 1 -1 0 0 0 0 0 0 0 0 0 -1 0 0 0 0 0 0 -1 -2 0 1 0 -1 1 1 1 0 -2 1 1 -2 -1 -2 1 1 0 -2 -1 0 0 -1 1 -1 1 0 0 0 -1 1 0 0 -1 -1 -1 -1 -1 0 1 -1 0 0 0 0 1 0 0 1 1 -1 -1 1 -1 0 -1 0 0 -1 0 1 0 -1 -2 0 2 0 -2 -1 2 2 0 -2 -1 0 -1 -1 0 -1 0 0 -1 -2 0 1 0 0 2 0 -2 0 -1 0 0 0 -1 0 -2 -1 1 0 1 0 -1 0 0 -1 -1 0 -1 0 1 0 0 0 0 -1 -1 0 2 1 -1 -1 -1 -2 -1 0 1 1 -1 0 1 0 0 -1 0 1 -1 0 -1 0 1 -1 -1 1 -1 0 1 0 0 0 -1 0 -1 1 0 -1 0 0 -1 -2 0 0 1 1 0 -2 0 2 -1 -1 0 0 -1 0 0 0 0 -1 0 0 0 0 0 1 0 0 -1 0 1 0 -1 -1 -1 -1 0 0 -1 0 0 -1 -1 0 1 1 0 0 1 0 0 -1 -1 0 0 0 -1 -1 0 1 1 -1 1 1 0 -1 2 1 -1 -1 0 0 0 1 0 +65501 4294770691 3735530 1900546 4391180 259400 441338 555300 713410 1031220 2042969 2008480 1052861 426178 130010 29600 12554 3026 1961 2353 970 12850 54673 74762 15300 2925 6098 320 3370 2009 1073 8929 9928 1640 724 1332 2665 18820 23850 6890 85 2117 2965 1125 68 1202 5524 6277 3860 6340 13850 24418 19213 2809 965 2938 5057 18397 29489 18945 8020 5113 3541 961 164 1213 1585 5 2626 4090 2312 1684 4122 772 4100 5760 904 6464 9488 4625 533 1241 3809 3029 242 136 289 1745 4129 7396 3364 740 857 1865 2258 6500 14381 6800 562 2834 4450 4420 3133 1565 1217 3065 4057 2169 218 340 801 293 328 1921 2225 1517 2045 2729 1801 1258 1636 1872 1179622 4294901773 4294705152 65534 65536 65536 1 65535 65536 65535 0 0 0 0 65535 0 0 4294901760 65534 1 131071 65537 4294836224 65537 4294967294 131070 1 4294967294 0 131071 131071 0 4294901760 1 4294901760 4294967295 4294967295 65536 65535 0 65536 0 65537 4294967295 4294901761 4294901760 0 65535 1 4294901759 131072 4294836224 196607 2 4294967294 4294901760 65535 65535 4294901760 65534 1 131072 4294836224 4294901760 0 4294901760 4294836224 131071 65536 4294901760 0 4294967295 4294901760 65536 0 0 4294967295 131072 4294901761 4294967295 4294967294 65536 4294901761 65536 0 65535 4294901761 4294901760 65536 4294967295 4294901761 65536 0 4294901760 4294901760 1 65535 4294901760 65534 65536 1 65534 4294901762 65535 4294901760 0 0 65535 0 0 1 4294901760 65536 4294901760 4294967295 65535 4294901760 0 4294967295 65536 1 65536 0 4294967295 0 4294901760 65535 65537 131071 1 196607 4294901761 65535 0 1 +-20 0 -27 4 93 106 52 -246 -195 -13 -3 227 14 -471 -621 802 1420 -154 -1488 -799 842 1300 -230 -880 202 607 5 -499 6 369 356 -363 -682 -281 343 671 -31 -505 -127 212 134 2 -149 31 332 18 -196 -273 -179 123 130 144 15 -68 -4 -9 21 -56 -131 77 131 48 23 -31 8 -136 -146 94 109 69 6 -146 -58 79 -58 -15 80 109 75 -62 -81 -76 62 76 -102 -113 46 142 41 -60 -32 -69 -98 70 85 65 46 -12 -56 -52 174 53 -171 -210 -41 120 -19 100 144 -51 -114 -25 53 46 67 48 5 -214 -129 130 108 -55 -160 2 97 109 3 -78 -5 74 67 -108 -126 30 44 76 97 -48 -67 -93 -117 65 118 128 82 -154 -150 -34 0 86 78 87 18 -233 -136 162 62 34 89 -34 -8 -78 -116 25 44 24 23 32 3 -12 -1 -95 -69 155 124 -67 -5 -48 -110 -30 51 91 -7 -44 10 -7 -53 8 -4 63 109 43 13 -153 -70 20 -52 9 27 47 11 17 13 -24 19 24 -1 -30 33 -18 -81 -28 36 52 -37 -31 31 58 7 -53 -15 40 39 -29 -33 -8 9 -12 -27 23 10 -21 -34 42 61 26 -6 -54 -6 25 36 -9 -42 -60 -20 59 20 -11 -4 2 1 1 0 -3 1 1 0 0 0 0 0 0 2 0 0 0 -1 -1 -1 -3 1 1 0 -1 -1 -1 0 -1 -2 -2 -1 0 0 0 -3 0 2 0 0 -2 -1 3 0 -2 0 0 -1 3 1 1 1 -1 -1 -2 1 0 -2 -3 -1 2 0 -1 0 1 1 0 -1 -1 1 -2 -3 0 1 2 0 -1 0 -2 -3 0 1 2 1 -1 -1 -2 -2 0 2 2 -1 -2 0 3 1 -3 -1 -2 1 0 -3 -1 -2 -3 1 4 2 0 -2 0 2 0 1 -2 0 0 0 -1 0 -1 -1 -2 0 2 -1 -1 1 -1 -2 2 -1 0 -2 0 1 3 0 -2 -2 1 2 1 -2 0 2 0 0 0 -2 0 0 1 0 1 -1 0 2 -2 -3 1 1 1 1 1 0 -1 1 0 1 0 -2 -1 2 -1 -2 1 3 -2 0 -3 -3 0 3 1 1 -1 1 -2 -2 2 2 -1 1 -2 -1 -2 1 0 0 1 0 -3 -1 0 -2 1 2 -1 0 0 0 -2 -2 0 -1 2 2 -1 -2 -2 -3 1 0 1 0 1 0 1 0 1 3 -2 -2 -1 0 3 1 -3 -1 -1 0 0 -2 -3 1 3 -1 -1 3 1 -1 -3 0 2 0 -3 -1 0 -3 1 0 -1 2 0 +65516 327653 6946909 4278845492 4294180669 51538 222037 1028845 2040116 2852545 2398964 827300 409253 249026 136197 258505 544085 567890 255986 61073 17960 23162 110548 112945 47170 37636 4849 97 3577 23090 19465 1490 18560 30152 16642 21352 9605 3589 18281 9469 12337 9620 23173 22280 5281 5785 14504 11450 2260 5840 33085 73341 16081 10361 23337 13621 4925 6793 45821 33541 14689 25604 21290 6093 5501 16153 16776 7712 11713 13138 17914 30308 30440 23656 7396 13653 54613 44740 5000 9077 6148 14081 2512 1553 153 9026 28786 19865 2329 13000 10882 1985 149 2873 3985 13730 23578 5300 2785 2938 410 745 937 901 1413 7345 4000 2330 4325 2858 1825 2362 1153 225 1258 541 2920 4397 2952 661 1377 5364 3932140 4294246420 196604 65537 4294770688 65537 0 0 0 2 0 4294967295 4294836223 65537 4294901760 4294967295 4294901760 4294901758 65535 0 65533 2 4294836224 262143 4294836224 0 262143 65537 4294901761 4294901759 1 4294836222 196607 4294901760 65536 1 4294967295 4294836225 65533 131073 4294901760 4294836224 65533 131073 4294901761 4294901759 65534 131074 4294901759 196608 4294770689 4294901759 1 4294967293 4294836222 262145 2 65534 2 4294836225 0 4294901760 4294901760 4294901759 131072 4294967295 4294901761 196606 65535 65534 196609 4294836224 131070 65538 65534 2 0 65534 65536 65536 65535 4294836226 131069 65537 65537 4294901760 1 1 4294967294 4294901762 131070 4294836227 4294770688 65533 65539 4294901761 4294836225 196606 4294901762 4294836225 4294901759 1 65536 4294770688 65535 131070 4294901762 0 4294836224 65534 196607 4294901762 4294901758 131069 65536 65536 65536 65536 4294836227 4294967294 196608 4294770689 4294967295 0 4294836222 196609 4294967295 65539 4294836223 131072 4294770688 65535 131069 4294901760 2 +-109 0 10 -39 89 173 19 -206 -210 -4 187 689 731 -1284 -973 956 532 -1652 -1395 1054 -74 1164 1625 -413 -418 57 443 -656 -760 -103 407 620 160 -917 -984 316 459 559 247 -288 -357 36 268 167 179 -22 -157 -361 -150 261 352 74 -387 -407 249 537 -25 -480 -170 197 98 35 -115 7 278 93 -181 -281 -77 282 354 -187 -577 -118 382 440 -75 -391 -47 237 52 -129 -56 239 358 -317 -549 14 353 208 -185 -174 125 169 -1 -209 -229 146 331 149 -17 -296 -217 -93 -48 390 182 -200 17 95 -49 -163 -83 19 -51 253 319 -163 -214 -38 80 -8 -21 -99 -175 188 206 -185 -207 202 135 -79 5 91 52 -155 -135 102 165 8 -127 -171 26 260 -6 -257 -3 268 -11 -163 229 151 -230 -460 -226 535 526 -164 -482 -148 343 286 -92 -264 -87 6 18 231 192 -281 -363 72 181 169 103 -38 -72 -178 -24 113 -85 12 161 70 -23 -169 -98 159 85 -137 -17 165 65 -168 -111 -22 -33 181 150 -142 -106 55 -23 -47 60 114 29 -114 -52 16 -43 -29 8 121 46 -45 20 8 2 -16 -3 -59 -32 62 5 -69 -44 87 71 24 22 -108 -68 89 77 -68 -83 13 31 36 44 -26 -76 -39 12 72 25 -23 1 3 -2 -9 -1 3 1 -4 1 1 -4 1 1 0 1 1 -3 0 4 1 -1 -2 -7 6 10 -1 -1 -2 1 -1 -4 1 4 -2 0 -1 -3 3 3 2 2 -4 -3 -1 0 3 2 -1 0 -2 0 -3 -4 2 2 0 0 -3 -3 3 -1 -4 -1 4 4 0 -1 -1 -1 1 0 -6 -1 2 -1 4 2 -1 -2 -1 4 0 -2 0 3 3 -2 -6 -2 1 0 3 -1 2 0 -2 5 1 -3 -3 3 -1 -3 0 3 -5 -3 0 -1 -2 -4 6 2 -2 1 1 1 -1 0 1 1 -1 -1 -2 3 0 -4 1 0 -3 -1 6 3 -3 0 1 -1 -3 -1 4 -1 1 1 -1 1 0 0 0 -1 1 3 -2 0 -3 -1 3 -1 -2 0 1 3 4 -2 -6 0 0 2 0 -2 -8 -3 3 0 0 -2 2 0 -2 -1 2 -1 0 2 2 0 1 -2 0 1 -1 3 4 0 -5 -6 4 9 -1 -2 1 -1 1 3 -1 0 -3 5 -1 -5 -1 3 -4 -5 -2 -1 2 3 -4 -7 1 1 0 0 3 3 -2 -2 -3 -2 5 4 -2 -4 -3 2 2 -1 2 3 1 -1 -3 3 3 -2 -2 -3 2 2 1 7 -2 -4 -4 0 -1 -6 2 5 3 -3 -5 4 5 -3 0 +65427 4292411402 11337817 4281466899 4294770478 509690 2183017 1860665 3012128 3056941 1360372 2811194 177973 626585 588209 550049 866489 1068112 523162 143953 128745 99713 32525 154970 90621 129380 315418 350370 231025 67709 10829 13274 85933 111722 85453 160285 346853 339524 158506 58378 19345 60257 228653 301597 167873 64501 44186 43682 73757 131762 87905 55738 154404 73124 9314 28970 7250 66610 128330 47240 6464 10242 65969 76661 83653 24466 8306 26729 28629 27289 45370 68276 66085 71833 26690 75242 264500 337301 303572 254228 199445 78160 7605 53685 115825 136953 61322 12053 36868 13345 7369 30821 29090 34885 25994 27514 32449 12805 33850 42664 14261 2738 16596 13837 2960 2690 14705 4141 464 260 3490 4868 4786 9505 5617 12148 12545 10553 7058 2257 2612 7297 4718604 4293459993 196609 4294443006 262143 4294705153 65537 131068 1 65537 65533 65540 4294901759 458745 4294901770 4294901759 4294901761 131068 4294836228 4294901760 262141 131075 4294705154 4294967293 196608 4294901762 4294836224 4294770688 196604 2 4294770688 262141 4294770687 327679 4 4294967295 131071 4294574080 196607 327679 4294901762 4294967294 4 65534 196611 4294639614 131070 196608 196607 4294836224 65541 4294836221 4294901763 65533 4294639619 65533 4294901759 458748 4294836226 65537 4294901761 65536 4294901761 4294901759 3 131068 4294770688 458751 4294770691 65536 4294836223 327679 131071 4294901761 1 0 131071 4294836227 4294770688 262143 4294901759 65536 262147 4294639614 0 2 4294508542 262141 0 196606 4294836224 196607 65535 131074 65536 65534 4294901761 262147 4294639616 327674 4294901769 131070 131071 4294901763 4294770688 4294901765 4294967291 4294705155 4294901755 196607 4294705155 131065 1 196608 4294836227 4294836222 393214 4294836228 4294836220 131074 196607 65539 4294836223 196611 4294901758 196605 65538 4294836231 4294770684 4294901760 196602 196613 4294705149 327684 65533 +-44 0 -45 112 136 -135 -91 156 -156 -184 528 425 -492 -520 723 -346 -1948 786 2395 59 -1468 -742 241 520 250 238 -116 -474 148 366 -107 -275 79 138 -4 -145 -114 46 57 49 26 -13 -81 -171 -43 390 195 -362 -199 198 120 7 107 -98 -172 -91 -39 113 74 21 -34 -32 -8 12 -9 79 112 -84 -126 44 106 25 28 -73 -25 -36 -114 -18 63 64 -53 2 -2 19 55 63 54 -30 8 -89 -68 15 7 -12 -36 53 70 -62 -121 48 92 23 24 -8 -32 -113 -135 130 207 13 -113 -60 40 -30 -79 84 85 1 16 -32 -46 -62 -30 136 153 -102 -170 -95 -24 177 92 -44 -23 12 44 -26 -54 -25 24 37 11 -41 -80 -29 7 131 59 -26 88 -27 -91 -111 -47 84 40 -44 -83 74 44 -30 31 152 101 -212 -148 62 53 16 -39 12 79 32 -4 -83 -66 16 27 17 8 64 63 -148 -149 117 167 -73 -194 11 106 87 36 -28 -30 -68 -21 54 22 17 41 -41 -70 -49 -13 99 53 -31 -34 -7 63 1 -85 -22 46 6 -35 44 59 -38 -66 -6 44 30 -27 1 60 -26 -72 -8 39 41 15 -46 -35 3 3 3 -12 19 31 -6 -23 -10 5 1 -13 15 18 -5 -4 1 1 -2 -1 0 0 1 0 0 1 1 -3 1 3 1 0 -1 0 1 0 -1 0 -3 -1 3 0 -1 0 1 1 0 -1 -3 2 4 1 -2 0 -2 0 0 0 0 -2 -2 -2 1 3 2 0 -3 0 0 -1 0 1 0 0 0 0 -2 1 0 -1 0 -1 0 1 0 0 -1 -1 0 0 1 2 0 -1 -2 2 2 0 -3 -3 1 2 0 -1 -2 -1 -2 1 3 2 -3 -1 -1 -1 0 0 0 -1 4 3 -1 -1 0 -1 1 0 0 2 -1 -1 -2 0 2 3 0 -1 -2 -1 1 3 1 1 -2 -3 -1 0 -2 -2 4 2 -1 -1 -2 0 1 -2 0 3 1 -2 -2 1 0 -2 -1 1 0 -1 0 0 -2 1 1 -3 -2 2 2 1 -1 -4 -1 1 2 1 -2 -3 -1 1 0 -2 1 2 1 1 -1 -1 -1 -2 1 0 0 0 -1 -1 2 1 -1 -2 -1 -1 3 2 1 3 1 0 -3 0 0 0 1 -1 -5 -1 1 -2 3 1 -1 2 -1 -1 -1 -3 1 3 1 -1 -2 -1 2 2 -1 -1 -2 -1 -2 -1 1 1 -1 -1 0 0 -1 1 -1 0 -1 -1 1 1 -2 0 0 -1 -1 0 -2 -3 1 0 1 -2 -3 1 3 -1 -1 2 0 +65492 7405523 4286120072 10289061 4282974052 459409 512464 642445 4412500 5739506 2705588 328481 119144 238132 155860 87074 25285 21041 15112 5650 845 35802 153949 169069 78805 14449 21053 37865 14290 5917 2180 208 6322 19600 17812 11861 6113 1921 13320 8065 2813 365 6994 3816 7985 4849 193 4105 8744 16945 8993 640 13793 35125 43018 16369 2500 13297 7226 1280 5960 19396 33813 37925 31905 10400 673 2612 3541 1945 1802 7241 17210 4157 8473 20602 9265 3536 12365 2836 24065 55145 25748 3065 1665 7265 6905 4612 1018 4160 25873 35890 33218 37757 18805 2080 5524 3357 773 3362 7301 9970 3770 1205 3970 7709 2152 3161 4925 4392 2836 730 4276 5248 3202 2341 1234 18 505 997 629 26 1048563 4294639634 131068 4294836225 65535 65536 0 65537 131069 65539 4294901760 65536 4294901760 4294770688 262143 4294901760 65536 1 4294836223 262146 4294836225 4294836224 0 0 4294901758 131070 131075 4294770688 0 65535 1 0 4294836224 1 65535 65535 1 4294901760 65535 65536 2 4294901759 131074 4294770688 131069 2 4294901759 4294901759 196609 4294770690 4294967295 65535 0 327679 4294901763 65535 131071 0 4294901762 4294901759 131072 3 4294901759 131071 65539 4294836225 4294967293 4294836224 327678 4294901762 4294901759 65536 65534 65539 4294901758 1 4294967294 1 65535 4294836224 65537 4294901757 131074 4294901761 4294967292 131073 4294836225 4294967293 1 131070 65538 4294901761 4294967295 131070 0 4294901760 196607 4294901761 4294967294 262143 65538 65539 4294770688 0 65536 4294705151 131071 262142 4294901761 4294901762 4294967295 131069 65539 4294901759 196607 4294901762 4294901759 4294901759 131071 4294901761 65535 4294901760 4294901761 4294901760 131071 4294836225 0 4294967295 4294836224 131069 65536 4294836222 196609 4294967295 2 +-53 0 51 33 3 -353 -341 716 489 -432 238 -36 -918 -627 -475 1512 1947 -58 -404 -1092 -736 -230 -77 587 268 314 313 -568 -673 1 403 406 -209 -229 225 130 -124 -108 52 55 56 -57 -233 -92 75 302 204 -34 -31 -292 -143 184 52 -46 16 62 17 -113 -75 131 121 -106 -115 41 69 -41 -121 55 145 64 -25 -145 -68 97 136 -5 -189 -170 110 288 6 -202 -41 44 31 20 -30 5 59 -57 -101 51 74 -12 -68 14 120 60 -73 -154 20 76 -99 12 130 6 -103 24 157 7 -86 -160 -88 123 67 27 30 -38 -38 15 59 -49 -123 14 97 44 -70 -35 44 39 -20 -1 63 -4 -66 -21 68 18 -51 -39 -3 41 42 -7 -26 -30 8 -35 -110 98 178 -45 -154 5 134 46 -29 -105 -43 -3 -91 49 107 93 24 -90 -19 11 -6 -2 29 -41 -89 46 107 -23 -121 -37 36 123 50 -53 40 15 -16 -122 -91 67 10 -17 38 109 9 -135 -51 64 46 27 -26 -63 19 78 32 -66 -53 1 40 50 -1 -117 -108 120 138 4 -36 -62 -4 25 0 -35 -37 14 -1 53 60 -24 -29 -18 4 9 -7 -15 11 11 -22 -9 10 3 -5 15 10 -6 11 -7 -25 -12 2 19 8 -6 -4 -1 0 0 0 0 0 1 1 -1 -2 2 3 -2 -2 -1 1 1 -1 -2 -2 0 2 0 -3 -2 -1 0 -2 -1 0 1 2 -1 0 0 -2 -1 1 0 -3 1 2 0 -1 1 0 1 1 0 -1 0 -1 1 -1 -1 3 1 1 -1 2 -2 0 0 2 -1 -2 -2 0 0 -2 -1 -1 2 0 0 -1 -1 0 3 0 -1 0 -2 1 2 1 -1 0 0 -1 1 0 -2 -1 0 -1 -2 -3 2 2 0 0 -1 0 -1 1 -3 -6 1 2 1 1 0 0 -1 -2 0 0 0 -2 -1 2 1 -2 -1 -2 0 1 2 2 -1 -2 -1 1 0 -1 -1 -1 0 0 1 1 -1 0 -2 -1 0 -2 1 3 -2 -4 -1 0 0 0 -2 -3 1 1 1 -1 -3 0 4 1 -2 0 -1 0 -2 -2 1 0 0 0 -1 1 0 0 -1 0 0 -2 -1 -1 0 -1 0 -1 -2 -2 0 2 2 0 0 2 -1 -3 -2 2 1 1 -3 -1 0 0 2 1 -1 -1 -1 -2 2 0 -1 -1 -1 -1 1 0 1 0 -1 0 -1 0 -1 -1 1 0 0 2 -4 -3 2 -1 -2 1 1 -2 -2 0 1 -3 0 0 -1 1 4 0 -2 0 0 0 -1 -1 -2 1 5 3 0 +65483 2162739 4271833091 46988971 4266656233 57940 1235853 2511769 3794173 1355680 594596 350498 170420 420593 452930 327245 96122 67525 27040 5729 6385 62753 96829 42772 86225 54305 4820 4100 13058 22786 25877 14906 6442 17666 25121 21650 14033 18521 64621 95044 40840 3617 1361 925 6730 12802 5620 4820 18000 29045 6176 9945 16936 11185 24698 32996 22873 5218 2344 1669 5882 15325 11345 6125 3457 401 3985 4797 4948 4122 1690 1813 1576 1289 21704 33709 23741 20072 11866 1858 10682 20098 8676 482 40 2522 10037 11978 16010 16425 5309 1825 15140 12770 389 13325 18306 6697 2845 4645 6445 5380 2810 4100 13690 26064 19060 5140 641 1225 1565 2810 4176 1165 97 274 242 565 109 250 136 170 4294246375 1245186 4294574088 4294967292 0 0 65536 4294901761 196606 4294836227 4294967294 65537 4294901759 65534 2 4294901757 65535 4294967294 65536 4294901762 0 4294967294 1 131069 2 131071 65536 1 65535 131071 4294967295 65539 4294901761 4294836226 0 4294901762 4294901758 0 4294967294 196607 0 4294967295 196608 4294901760 4294836224 131073 4294901761 0 131071 4294836224 65535 4294901759 196605 2 4294901760 4294901760 4294770689 131066 65538 1 4294901760 65534 0 4294967294 65538 4294967294 65534 131073 4294901762 4294967294 1 4294967295 65535 65536 4294901761 4294836224 65535 131070 4294836227 4294967292 0 4294836224 131069 65537 4294836223 262144 4294836225 4294901760 4294836224 131070 0 4294901760 1 4294901760 0 4294967294 65535 65535 4294901759 65534 131074 0 4294901762 4294901757 65538 4294770689 65535 131072 4294901761 4294967295 196606 4294901760 4294967295 131071 65536 4294901760 4294901760 4294901760 131071 0 4294705154 196605 4294901759 65537 4294901758 65536 65533 4294901760 262145 4294836224 0 4294901760 4294901759 327681 3 +-14 0 24 49 83 -277 -729 213 994 898 -1 -1679 -794 1223 995 -632 -1103 -358 232 1408 875 -1343 -1287 722 1060 -62 -559 -213 319 430 124 -673 -394 351 61 -176 71 353 15 -293 -4 76 -131 77 240 7 -127 -169 -106 166 171 24 4 -91 -78 19 95 -66 -178 19 75 119 21 -91 53 120 -39 -165 24 65 -13 27 -34 -132 54 233 33 -387 -317 360 364 -63 -168 -72 48 82 20 -44 39 -33 -123 -30 55 113 -2 -119 -31 160 75 -87 79 -32 -249 -76 137 180 -3 -65 12 5 7 -25 8 9 -48 -43 11 79 42 -41 -21 -11 10 -26 -84 17 59 49 7 1 16 -31 3 -20 -34 3 -42 21 102 32 -81 -61 44 60 -21 -43 67 61 -42 -184 -110 166 83 -33 6 70 16 -92 18 56 -22 -83 -40 57 40 2 -31 -11 68 46 -53 -129 -46 129 59 -36 -9 18 4 1 45 -35 -45 0 0 -28 -32 32 -26 22 134 74 -74 -187 -26 122 55 0 -32 -88 -22 73 23 -16 -13 1 15 -15 -57 -8 40 90 11 -82 22 57 -29 -64 22 52 -12 -68 -29 84 68 -56 -69 12 46 7 -18 -22 -33 18 44 11 -14 -13 -6 -13 -3 32 18 -16 -8 -3 0 0 -4 0 1 4 -1 0 1 1 0 0 0 2 0 1 1 -1 0 0 -2 0 2 1 -2 -1 1 -1 -3 3 1 0 -1 2 1 -4 -1 2 -1 0 0 0 1 1 0 1 0 -3 0 3 -1 -2 0 0 1 0 0 0 -2 0 2 1 0 -1 1 0 -1 1 1 -1 1 -2 -2 0 -1 2 2 -1 -2 -2 0 2 0 -2 0 0 -1 0 0 1 1 -2 -1 2 1 -3 -6 1 3 2 0 -1 2 -1 -2 -2 -1 -2 0 0 -2 1 -1 -1 -1 2 3 2 -3 -4 0 3 1 -1 -1 0 0 1 1 -2 -2 -1 0 2 1 -3 0 1 -1 0 1 -1 -2 0 2 2 0 -2 -1 1 0 -2 1 -2 0 -1 -3 0 -1 -1 -2 1 -1 2 -1 -1 -2 1 1 1 -1 0 -2 0 4 2 -2 -1 1 0 1 3 2 -3 0 1 2 -1 1 -2 -1 -2 0 -2 -4 -1 0 0 -2 0 -1 1 1 0 0 -1 0 0 0 -2 -2 1 0 -1 -1 -1 1 2 0 -3 -2 0 1 3 0 -1 -1 0 1 0 1 -2 0 0 3 -1 -3 -3 2 2 -1 -2 2 0 -1 -3 -1 1 0 1 3 -1 0 -2 -2 1 3 0 -1 -4 0 1 -1 -2 -1 1 0 0 +65522 3211288 4276813907 14023975 58852322 2819042 2126165 1389449 1344773 2036288 2569274 2177653 1127444 357850 286661 468305 278437 34697 129650 86074 5792 23090 57649 44690 38792 29817 8297 6445 13381 32045 19786 8722 17209 28746 4801 898 18580 57205 150858 230089 136465 33408 9028 2336 2610 16029 15794 14165 26561 13194 7265 67777 51169 4234 169 674 145 4153 6362 3445 562 776 7345 5882 50 1217 409 1165 2205 11428 10282 5536 2290 8210 35620 39656 7978 4936 8720 3460 7373 4849 1604 1082 6740 19450 18757 4777 405 17 3250 2025 784 2048 1160 23432 40445 15560 3025 8768 5813 785 170 450 3313 9700 6845 3733 4937 3188 4768 7897 7760 4905 2165 808 1413 2057 365 205 1033 580 4294836216 0 65532 262145 65535 65537 0 131072 65536 4294901761 0 65534 65538 4294967294 4294901761 262141 1 196607 4294705153 196607 65535 0 65537 65536 4294770688 196608 4294901759 0 1 0 65534 65538 4294901760 1 131071 4294901761 4294836225 65534 196607 4294901762 4294901758 131072 4294836224 0 65535 65536 4294836225 196607 4294770689 131066 131075 4294901760 4294901762 4294901758 4294901759 0 131070 4294967295 196607 131075 4294770685 196608 4294901761 65535 65536 4294836225 4294967294 131072 4294770689 65536 65535 4294901761 65534 131074 4294836224 131071 4294836224 4294836225 4294901760 65533 4294967295 131070 196607 4294967295 131070 65537 65535 65534 131076 4294967294 1 196609 4294770690 65536 4294901762 4294836225 4294901759 4294836224 4294967292 0 65534 131071 1 4294901760 0 4294836224 131070 4294901760 4294967295 131073 4294770688 65534 196609 4294901760 65535 1 4294836225 0 4294901763 4294836221 131074 4294901759 2 4294836223 131071 65536 4294901763 4294836224 131070 3 4294770687 65536 4294901759 131071 0 +-130 0 316 -50 -284 -112 248 -265 -551 -92 -607 192 329 1307 1095 -924 -1070 129 472 304 415 544 913 -1213 -1243 -521 -7 842 173 -612 -321 319 -400 140 643 562 415 -559 -653 -344 -20 522 282 -91 -47 69 -13 -323 -33 404 152 -421 -372 314 471 -89 -260 -71 -29 -87 90 232 -132 -319 -143 391 440 35 -184 -484 -272 429 395 81 2 -285 76 32 -308 -374 -135 515 222 -157 -75 148 52 -117 152 109 -190 -247 112 221 -111 -286 -108 351 225 -1 140 -252 -312 -63 -102 252 357 -4 -260 -141 153 188 14 -244 -140 139 79 -33 -55 11 76 63 -95 -91 168 146 -96 -232 -60 132 94 62 -38 -188 -45 220 131 -92 -45 -120 -128 77 67 68 -49 -54 81 154 90 -100 -49 -141 -1 73 -259 -56 273 268 19 -219 -125 16 136 -7 -163 21 149 -40 -151 -47 -15 150 89 -50 2 68 56 -150 -134 65 58 79 115 -117 -194 0 114 74 -110 -23 248 136 -111 -399 -198 305 219 -10 -50 -169 -139 168 172 -83 -152 48 114 13 -89 -44 75 134 69 -125 -67 -92 -110 123 93 -23 -70 53 125 15 -42 -57 43 -8 -72 -50 -4 86 71 -33 -80 -58 -5 95 63 -25 -21 -13 -8 -20 -4 32 8 -19 -10 7 2 4 6 -3 -5 0 4 0 -1 1 -3 -1 7 4 -6 -5 4 3 3 -2 -6 -3 -5 1 4 6 4 -2 -7 -1 5 0 0 0 0 -4 1 1 -4 -4 0 1 1 -1 -5 -2 7 3 -1 -8 -7 -7 -1 7 -4 3 3 -4 0 1 -2 3 2 -2 -3 0 1 0 -3 -1 -2 3 -3 4 7 0 0 -2 1 -1 -3 1 3 -5 -3 0 -2 4 2 -2 -3 1 3 5 0 -7 0 3 -4 2 3 2 -1 -4 -1 1 4 2 -1 -2 5 -2 -4 -4 2 3 -4 0 5 0 1 1 4 -9 -5 3 1 -3 -3 -6 -5 3 3 0 -7 -4 -2 5 -2 2 7 2 -2 -4 0 0 2 -3 -6 -1 -2 4 1 2 2 2 2 -1 -1 -5 -1 3 2 0 -4 3 6 -4 0 3 0 -8 -7 3 3 -1 0 2 -1 -1 -7 -3 4 2 -4 4 0 -2 -1 2 1 4 4 -1 2 0 0 -5 -4 2 3 0 -1 -2 0 3 0 -6 2 3 -3 -4 -4 -1 1 3 0 -3 -1 3 1 -2 2 0 -3 1 4 4 0 -6 4 2 -2 1 3 -5 -4 -1 2 2 4 -1 -4 -4 0 -2 -2 3 2 -2 -4 -1 -1 -1 -3 -1 -1 5 0 1 4 -1 -4 0 +65406 4291690812 4287692516 4277600504 4289002969 405313 1816490 2052801 1161541 315200 468161 2304938 1816490 709013 404473 204802 179600 729293 484706 544745 272884 87805 6970 104498 164305 200345 236980 229762 72641 8410 61924 119185 173330 194825 268112 258025 162586 81229 6800 234740 283450 73933 27529 16393 34985 97109 61385 94117 134865 50626 83104 101313 73908 127465 87481 58753 59732 38921 7330 3146 9745 17306 49540 63040 21024 12680 36788 50425 25625 16425 22313 9113 5317 30277 18100 22282 5330 70217 146353 48322 15881 18545 27010 23801 25010 22725 10421 4628 25636 22181 9605 26914 37636 18472 12629 80000 171522 132229 48061 31061 47545 36473 25408 13165 9857 23581 20386 12953 27229 9178 7709 15850 5013 1913 7684 7412 6130 9764 9050 4594 610 464 2162684 4293722120 524278 262146 4294770694 65531 4 131071 4294967293 262151 4294705146 196612 4294836227 4294836218 131067 393220 4294836228 4294967289 5 0 4294705152 65537 4294770684 65536 4294901761 4294901755 196615 4294508543 4294574073 524287 262140 4294705155 65536 262142 4294836226 65533 1 4294967293 262142 327677 7 4294836224 4294901761 131069 4294639619 65533 327678 4294836226 131069 327683 4294508544 196608 196604 131075 4294770687 131071 131076 4294901759 4294836229 4294770684 196610 65532 5 65537 4294377476 262139 4294770689 4294639613 262139 3 4294770681 393214 196606 131079 4294770686 0 4294770690 4294967290 327678 131073 131074 4294901762 4294705151 262143 2 262140 4294705158 196608 4294443008 262137 4294901763 131072 4294967295 4294836217 131076 327676 4294836224 196607 262145 4294901764 2 4294639616 196604 3 4294901759 196608 4294574080 196610 4294770685 4294967292 196609 4294770688 262143 4294836225 2 131069 262148 4294574080 131076 131070 4294639619 4294967292 131074 4294901764 4294770684 4294836224 262142 4294836226 4294967292 4294967295 4294967293 393215 65536 4294901764 65532 +15 0 -105 -106 -310 527 1126 -463 -1370 16 910 -5 -796 349 646 -45 160 161 0 -757 -645 232 473 215 -497 97 523 -292 -567 577 1042 -140 -824 -873 138 1077 226 -659 -341 101 199 287 -28 -450 -343 578 704 -248 -415 -57 207 -149 -233 95 -102 -72 117 396 278 -278 -280 -182 -22 246 87 -81 -43 34 -53 -113 20 282 88 -255 -170 165 169 30 89 13 -48 -228 -47 63 -86 -54 -129 247 468 -62 -351 -220 147 104 -204 -54 44 169 192 -9 -148 -215 -30 249 184 -95 -143 -170 -74 208 72 -99 -55 115 43 -18 90 3 -126 -64 112 139 32 -205 -145 48 -18 64 102 59 5 -125 -116 26 32 167 169 -160 -197 27 139 66 -100 -148 49 226 -23 -164 150 48 -334 -35 446 228 -214 -467 -125 287 124 28 -24 -136 -94 97 101 151 100 -229 -146 88 121 25 -63 -128 10 72 -80 -21 60 83 84 -87 -205 -31 175 154 -156 -100 206 103 -41 -56 -8 -111 3 -10 -143 123 168 -63 -54 -45 -69 15 -50 66 158 6 -155 -25 164 65 -77 -119 -16 58 -26 55 145 -44 -138 -7 146 -28 -177 -24 113 72 -92 -70 89 94 -69 -100 35 93 4 -50 -9 14 36 11 -38 -55 1 47 5 -6 11 -4 -9 -4 1 3 0 2 -2 0 5 0 -5 0 3 4 6 -2 -8 -4 2 2 -1 0 0 2 -1 -4 1 3 -1 1 0 -4 -1 1 5 1 1 2 -2 -8 -2 7 4 -7 -6 3 3 0 -2 -3 -2 -1 -3 5 2 1 -1 -4 3 2 -4 -1 4 -1 -6 -1 3 1 -5 0 2 4 -1 0 3 -2 -4 3 0 2 3 -4 -2 6 1 0 1 -4 0 3 -1 0 0 1 -4 -2 3 2 0 3 1 -1 4 -4 -1 1 1 -1 -1 0 -1 -3 -1 4 1 -4 0 4 -2 -4 3 5 2 -3 1 -3 -1 -2 -5 1 0 4 4 -3 -2 3 0 1 0 0 3 -2 -2 0 -1 4 0 -4 -1 4 1 2 2 2 4 -2 3 -3 -3 1 1 -1 2 0 -1 2 3 -1 3 -5 -1 2 1 -2 0 -2 0 -6 -3 4 -1 0 2 -5 -2 3 4 0 -6 -2 3 3 3 -3 -2 -4 2 1 -6 1 4 -3 -4 0 2 1 1 -2 -4 2 5 -2 -1 -1 -2 0 2 3 1 -3 -6 1 6 0 -3 -3 -3 2 -2 -3 2 3 -3 4 2 -5 -2 4 3 0 -4 -1 2 -1 0 2 1 -2 -5 -1 3 4 0 -2 -4 3 3 2 3 -5 2 0 -5 0 +15 4288085911 34602698 4264625254 1112742 828125 755417 419341 51521 573049 469849 269954 256418 358793 654418 1105364 1441105 1178973 485357 126482 121970 203284 451733 557120 175474 65050 63314 15588 170505 154568 111524 61000 14130 3005 15578 79924 72769 56125 29461 8090 54288 6178 10312 77650 222868 171601 32425 44532 30497 36945 68129 62901 42881 49349 48740 14985 16250 2173 8109 19972 31865 43049 23329 4420 13885 15650 14132 28913 54161 39538 23677 31904 53477 27425 24804 112781 250900 263885 97994 16160 19072 18245 33002 62441 29060 15266 20353 5284 6841 10489 14625 42986 54341 34336 53045 4817 12385 109 35578 32193 4941 4986 6856 25000 24650 31121 20090 3620 3701 22961 19093 22100 31905 17953 13364 16757 14761 9874 2516 277 1417 4469 3080193 4294574085 4294705163 4294770679 196609 131072 65534 5 65531 262147 4294836230 4294770680 131074 65535 131072 4294770687 196609 131071 4294705152 131071 65541 131073 4294508542 524286 4294508548 262138 3 4294836222 4294967294 393213 65538 4294770687 131075 4294967292 4294901764 4294967290 65539 65531 262146 65535 4294836227 262140 131072 4294705155 458750 1 4294705153 196608 65535 65536 4294901756 131075 196608 4294901761 4294705156 131071 4294901761 65535 4294836223 327679 4294705153 262144 4294770686 327683 4294770690 4294770689 4294901759 131067 262144 4294770692 262142 65536 0 4294836227 65534 327679 4294705152 327679 131073 131074 4294836228 4294770691 131069 4294901761 2 196607 4294901763 4294639619 196607 4294836225 4294836224 4294574080 327677 65535 4294639618 262142 4 4294901754 196611 4294770691 4294770686 65538 131066 4294770692 65532 65538 4294836225 196604 4294836229 4294967295 65534 196610 4294770689 131066 6 4294836221 196605 4294836222 196610 327677 4294639618 327678 3 4294967292 4294901762 131072 4294836225 4294967291 262147 4294836224 262140 131075 4294639619 2 65531 +-185 0 77 158 -353 -82 818 849 295 -1878 -1611 1251 852 -869 -55 1950 341 -2134 -488 1029 488 -89 -630 -647 -128 1085 996 -176 -494 -625 5 481 409 -555 -959 79 494 450 42 -184 -166 -129 159 179 -414 -62 492 313 -9 -156 45 -328 -302 140 54 197 209 -75 -140 -105 32 140 100 -99 -187 -29 265 49 -381 -168 142 346 167 -203 -184 78 251 -69 -328 -151 137 383 59 -327 -70 205 51 -16 177 -150 -260 -44 -17 50 9 166 169 -116 -217 -43 174 104 -208 -20 326 -5 -369 -43 298 117 -161 -60 217 -17 -259 -119 120 218 -29 -135 80 73 -33 -47 -20 -71 -10 68 -11 20 24 -31 8 -11 -42 4 -44 64 171 6 -151 -105 147 183 1 -307 -217 103 -7 141 216 115 27 -239 -4 1 -201 -75 100 217 59 -181 -157 90 168 58 1 -134 -151 5 172 105 -165 -189 24 198 68 -47 -32 -34 -23 32 44 56 91 -55 -24 -123 -266 -17 252 361 8 -443 -257 219 284 161 -42 -366 -165 285 225 -168 -236 33 99 70 8 32 80 -90 -83 8 23 -39 -95 68 108 31 35 -13 -30 -149 -64 107 -21 -40 0 78 52 18 44 -33 -39 -67 -22 53 7 3 32 -23 -54 -8 16 42 20 -22 -7 -2 -2 2 3 0 5 -5 -6 0 4 2 0 -6 -5 -2 -3 3 2 0 -1 -3 -1 2 1 -7 -6 5 3 -1 0 -2 -4 -2 -1 3 2 2 0 -1 3 -5 -5 0 1 1 -2 0 0 -2 3 1 -4 -3 1 0 -2 -2 -5 2 1 1 0 1 3 -3 -4 -3 -3 0 3 1 -3 -2 -3 2 2 4 3 -1 -1 -1 -4 -1 3 3 -2 -1 1 4 5 1 0 -8 -3 -1 2 1 -1 -4 -3 -3 -1 3 -1 1 0 -3 -3 1 1 -2 -3 -2 3 0 -8 -7 -3 5 1 2 -3 -3 -1 2 -1 2 2 -3 -4 2 1 1 -5 -1 2 0 -3 4 2 2 1 -4 0 3 0 -1 -1 3 0 1 3 3 0 -3 0 1 2 1 -5 1 3 1 -1 2 4 -1 0 -1 0 -2 0 0 1 -1 0 3 1 -2 3 4 1 -6 0 -1 -2 2 0 0 1 -6 -1 4 -3 -2 2 0 0 1 0 -5 -6 3 2 1 1 -2 -2 2 4 3 -3 -6 0 4 0 -1 2 2 1 -3 -1 -2 -1 -2 0 -5 -9 2 -3 3 6 2 -3 -1 -1 2 3 2 0 -4 0 5 4 -3 -2 -1 2 1 -2 -2 0 -1 0 2 1 0 1 -2 4 -1 -7 -2 1 0 +65351 10354765 4289658527 55640882 4171890983 4160322 1481065 3805525 4670237 1296985 246065 815509 1193609 1022992 634661 231386 475306 925922 446536 35620 44197 57322 175240 340033 24417 109609 110804 41725 49306 30625 20624 19801 35810 72626 173385 139880 69098 39940 67762 130385 165458 110410 46925 2857 53829 69536 2789 27637 42017 48938 41092 43664 106301 138010 102493 29521 47378 81242 61924 19066 11729 3298 5441 4724 521 1537 185 1780 6032 29277 33826 55098 94250 57698 19930 59881 57850 17 46026 57089 36242 32749 31588 17957 22826 40609 62946 39780 6833 2180 1553 5072 11306 15705 71045 193825 196313 114010 106577 135720 108450 78849 56785 14701 1088 14500 6953 2050 13649 12625 1394 23101 15545 2041 6084 3028 3025 6010 3293 58 1553 2980 2752528 4293525524 4294901753 196606 3 4294639621 65530 131076 4294574080 4294901755 262141 2 4294836223 196607 4294508545 393210 4294901763 4294836224 4294901756 262143 131074 4294901760 4294639619 65531 65537 65534 4294836224 65539 4294836220 1 4294901758 196603 65537 65536 4294770691 4294836220 65533 65539 4294901757 196605 262146 4294901763 4294967295 4294967292 196611 4294967294 262145 65541 4294443008 4294967293 65538 4294770687 4294836221 262143 131071 4294770688 131069 4294836225 4294901757 3 4294574072 393213 131073 4294836221 196607 196607 4294770690 196604 65537 4294967291 2 327677 131074 4294705153 196608 4294901760 262143 65536 196611 4294770688 65536 65538 131067 65539 196607 4294901764 4294901760 4294836224 0 4294901761 196608 4294836225 262147 4294574081 4294901760 196606 0 4294574081 327679 4294901757 2 65536 4294639616 262138 65538 4294836225 196606 196612 4294639613 262144 4294901760 131074 4294770689 4294901759 4294901759 4294639616 196599 262141 131078 4294967293 196607 131075 4294705152 327680 4294770692 4294967294 65538 4294901758 4294901760 131072 1 4294836225 4294901764 4294901753 1 +-36 0 21 -103 -188 51 68 273 138 -158 -66 479 1163 -1141 -2291 170 1164 956 417 -556 -1000 2 1020 200 -728 -186 453 135 -216 -66 122 -71 -75 213 176 -288 -195 45 -19 72 57 58 138 -174 -331 29 129 180 166 -35 -143 -174 -4 165 72 -66 -71 10 79 -1 -86 -31 62 57 15 -87 -166 -3 132 172 -11 -153 -25 106 51 -53 2 14 15 -31 -12 -11 -16 -88 -114 158 161 -34 -31 -55 -70 3 25 51 24 -5 -1 -20 12 44 51 -126 -135 28 -22 79 103 57 57 -84 -53 -33 -33 -23 -20 51 -2 12 20 22 79 -3 -122 -132 59 226 32 -210 -94 113 105 -1 -77 -74 30 103 28 -79 -35 8 25 4 -96 -24 97 79 -51 -39 64 19 -39 -80 -16 69 -46 -18 109 74 -53 -107 24 69 -17 -58 6 22 -32 7 50 14 18 -42 -93 -40 38 115 14 -80 -21 42 35 -14 -43 -10 37 5 -34 3 -24 36 157 7 -132 -119 17 87 43 -9 -35 -49 9 -3 -73 13 32 37 12 22 17 -42 18 14 -61 -36 32 39 -14 -8 6 32 53 -70 -97 34 41 9 19 16 -10 -41 -12 32 23 -25 -41 14 47 16 -17 -30 -9 8 12 3 -13 -4 10 5 -3 -3 1 0 -1 0 2 1 0 -2 -2 0 3 1 -1 0 0 1 0 -4 0 3 -1 -2 -1 2 1 0 1 0 3 -2 -3 0 1 0 0 -2 0 2 -1 -4 -2 -1 -3 0 3 -1 -2 -2 1 -1 -3 -1 0 2 -1 0 -1 0 1 1 0 -2 -2 1 0 1 -1 0 1 3 2 -2 -3 2 2 -2 -1 1 -1 -1 1 1 -2 1 0 -2 0 -1 -2 1 1 -2 -2 3 4 -2 -3 0 2 0 -2 -1 2 0 -3 1 0 -1 0 0 1 1 -2 -1 0 -1 0 3 0 -3 0 0 0 1 0 -1 1 1 2 -1 1 0 -3 -1 1 -2 -2 -2 -1 3 0 -5 -4 1 2 -1 1 2 -2 -5 -2 1 1 2 1 0 -1 -1 -1 -2 2 5 0 -4 -2 -2 2 0 -4 -1 1 0 0 1 -2 -3 -2 1 -1 1 -1 -1 -1 0 -1 2 0 -2 0 1 1 -1 -3 -2 0 2 1 0 1 -2 0 1 0 0 1 0 -2 1 2 0 -1 0 0 -2 0 0 0 0 0 0 1 -1 0 1 -2 -4 -1 0 2 0 -1 -1 -2 2 3 -2 0 1 1 0 0 -2 -1 0 1 -1 -1 0 0 -1 0 -1 -2 0 1 -1 -3 -2 -2 2 -1 0 0 0 +65500 4288217109 3407684 17891396 4284612746 233797 2654450 5277581 2268832 483025 1000004 1080400 564580 223434 51012 19925 50994 113920 40050 5545 6613 49320 110402 49041 28781 50725 27241 9540 5141 6242 8357 7093 7794 27565 47008 23530 11861 5410 200 1186 265 8000 37960 27077 3986 4909 3226 601 401 2080 18477 19009 6725 13858 10305 3898 1618 3001 148 884 6250 32308 54557 45124 21605 11026 11405 11509 7025 1289 641 9792 15650 4122 4457 7921 5017 2440 17357 14258 5337 3653 520 1073 2696 2088 10249 14669 6596 2205 1421 1949 1394 1165 1872 24698 31585 7858 1930 3626 90 5498 2393 628 2053 520 5017 2545 260 1060 7709 10565 1762 617 1781 1168 1154 1877 2465 1189 145 153 4294770675 327690 4294836221 1 65535 65538 4294836224 65534 65539 65535 65536 4294705152 196608 4294901759 196607 1 1 4294836227 65533 1 4294836224 131072 4294770687 4294967294 65533 4294901763 4294901758 4294901761 4294967293 131072 65535 65535 65537 4294836224 131070 65536 65535 196609 4294836226 196605 4294836226 131071 4294967295 65537 131070 4294836224 4294901760 131070 4294836225 262142 4294836228 65533 2 4294967294 2 131069 4294901760 0 65537 4294967294 4294901760 196608 4294770688 0 65536 4294901760 65537 4294901762 1 4294967293 4294836225 4294901758 262143 4294639616 131068 4294901762 131073 4294705150 131070 131073 1 4294967295 4294901759 327682 4294705152 4294901758 2 4294967292 1 65536 4294836222 131070 131071 4294967295 65535 196607 4294836224 65536 4294901761 4294901757 131072 1 4294836225 65536 0 1 131070 2 65535 4294836224 0 0 0 4294901761 65536 4294770686 65535 2 4294967295 196606 4294836227 65536 1 4294836224 65535 4294901761 65535 4294901760 4294901760 65534 4294901761 4294901757 196606 65535 0 +-87 0 -37 -22 167 190 -411 -321 506 529 -475 -261 961 620 -112 -2026 -1619 1842 1831 -515 -844 -414 -310 107 68 715 475 -350 -135 -14 65 -156 -414 13 438 423 -129 -541 -38 434 136 -385 -129 253 -95 -216 184 342 -23 -347 -139 172 128 -37 -72 44 -3 -34 72 48 -46 -14 50 -35 -85 -7 97 125 10 -199 -91 102 70 -57 -149 55 193 44 -141 -99 92 150 26 -199 -120 103 35 -12 29 76 31 -131 -105 121 144 -80 -143 44 172 1 -199 -124 119 226 -15 -170 -18 65 51 14 -67 -80 32 97 -4 -44 28 12 -54 -25 67 85 40 -107 -62 -43 -89 41 77 96 -5 -131 -46 143 78 -99 -55 85 92 -106 -162 103 270 -56 -278 -106 112 170 -14 -123 -32 134 79 -92 14 10 -112 -71 33 119 4 -58 46 88 -27 -149 -2 104 -37 -35 83 13 -82 -40 13 18 -24 53 74 -36 -81 5 79 29 -20 -68 -52 42 41 35 82 -51 -139 -74 30 130 48 -57 -57 -24 11 63 14 -14 26 -10 -28 -26 -37 43 75 18 -6 -70 -60 23 44 37 1 -68 -74 60 90 27 4 -62 -54 3 38 15 -40 6 41 -5 -30 13 45 -6 -32 -26 0 25 14 -10 -13 -2 1 0 0 4 0 1 0 -3 0 0 0 0 -1 1 0 -1 -1 1 0 0 -1 1 1 2 0 -1 1 0 0 0 2 1 0 0 0 0 2 -1 0 -2 0 -2 -2 -1 -1 1 -1 -2 -1 2 -1 -4 -1 2 0 -1 0 2 0 -2 0 0 -1 0 -2 -1 0 1 1 2 -1 -2 0 2 1 -2 -4 1 0 0 0 2 0 -1 0 -2 0 1 1 1 -1 -3 -1 4 0 0 -1 0 -1 0 1 1 -1 0 -1 1 1 0 2 -2 -3 0 2 0 -1 -3 0 4 1 -2 0 -1 0 1 -1 0 -1 -3 2 3 0 -1 0 -2 -1 -2 2 1 0 -1 -3 -1 1 -1 -1 -1 -1 1 0 -1 -1 1 2 1 0 -1 -2 1 1 -1 -1 1 1 -1 -1 -1 1 0 -3 0 4 -1 -2 -4 0 2 -2 0 3 -1 -2 0 0 0 -3 0 1 0 1 0 2 -2 -2 -1 0 0 0 -2 -3 1 1 1 0 -1 -1 -1 1 1 0 0 -1 -2 -3 0 0 0 1 0 -1 -1 1 2 1 -1 0 -2 -3 -1 0 2 0 -2 -2 0 1 -1 0 2 -2 -1 1 0 2 1 -2 -2 -1 0 0 1 0 0 0 -4 -2 3 1 -2 -3 1 1 -1 0 0 1 0 -1 0 +65449 4293591003 12452007 4273995365 34669050 293746 1307921 4117220 6014125 3617786 883732 107549 515849 348125 18421 28561 171565 370773 309322 189800 166721 80650 55681 150820 120938 48905 17753 7120 1165 7488 2312 3725 7274 25034 39701 18685 8149 25226 39185 29682 30964 40277 25009 1369 6617 18122 25666 27136 22385 29585 54977 65237 29125 4549 2797 10889 10433 1952 928 3541 11714 13049 5693 9602 15145 17186 22565 15885 10250 19700 36853 76036 88520 41444 15325 18980 14705 296 17585 15250 3380 9860 22930 10820 2594 7058 8324 493 3385 6772 6586 7082 5024 4468 2906 9325 24797 17800 5553 3825 4090 392 776 1460 3218 5949 4936 4129 3305 4625 9076 8829 3860 2925 1669 1636 1706 1069 2061 1700 625 296 4294901747 1 262144 65536 4294770688 0 0 131071 4294901760 131071 0 131071 131073 4294901760 1 0 65538 0 0 4294901762 4294836224 4294836224 4294967294 131071 4294901759 196607 4294770687 196607 4294901760 131072 4294836224 0 65535 4294967294 65536 131073 4294901759 131072 4294836225 131068 0 131072 4294901760 4294836224 65536 65537 4294836223 327679 0 65535 65535 65537 65535 131071 1 4294836226 65533 2 4294836223 262144 4294836225 4294901760 65536 65535 4294836223 196610 4294901760 4294836224 4294901759 65538 4294901760 4294967293 4294901761 4294967295 131071 4294901760 131071 65538 4294901760 131070 4294901761 131071 4294901761 4294967295 1 65533 4294901764 4294770686 131072 65534 4294901763 65534 0 65533 1 1 4294836226 4294967294 0 4294836224 131069 65537 4294901760 4294967295 65537 0 4294901759 65533 0 1 4294967295 131073 4294901761 4294836224 4294967293 131072 4294836224 65534 4294901761 131072 4294967294 1 65538 4294901758 65535 65536 0 4294705152 262142 4294836225 131069 4294901761 0 1 65535 +-37 0 -62 -15 268 252 11 -856 -952 722 733 166 65 -222 -644 788 2164 -916 -2154 -264 924 55 -890 452 817 -41 -265 -51 65 -191 -52 225 106 -248 -308 -9 92 277 -63 -62 270 173 52 -178 -112 -369 -303 468 332 -106 -156 -37 113 150 70 -237 -230 74 172 108 -17 -103 -40 -24 49 89 -41 -169 -80 144 106 -3 -116 -48 136 132 -27 -110 37 5 -37 -48 -10 17 -66 -34 14 123 67 -60 -18 54 69 -104 -71 -11 -50 24 20 -2 -52 48 39 91 173 -127 -226 -9 107 55 38 5 -91 -129 44 142 -72 -90 80 105 -21 -87 22 39 -44 -67 14 72 -29 -35 11 66 92 -12 -47 -87 -6 12 -39 -31 -77 52 47 68 59 34 66 -101 -124 20 175 -7 -231 -125 79 235 48 -140 -12 60 29 -60 -56 -40 -60 58 56 107 102 -139 -118 22 2 -14 18 92 37 -73 -16 15 9 -31 -59 4 53 52 -35 -66 9 79 66 -47 -83 -51 20 59 -13 -18 24 24 -29 -66 1 120 43 -111 -60 61 58 -5 -33 -28 6 30 10 -34 -22 47 27 -50 -2 45 2 -68 -43 62 65 -29 -37 -8 -29 -14 23 70 13 -63 1 48 0 -60 -16 45 24 -22 -28 0 17 10 -7 -8 -2 3 4 0 -1 0 -1 -1 0 0 0 3 1 0 1 0 -2 -2 -1 2 2 0 2 0 -3 -1 0 1 0 -1 -1 -2 -1 1 0 -2 -2 1 0 0 0 -1 -1 0 1 1 3 0 -2 -1 0 -1 -1 1 0 1 0 -1 -2 1 2 1 -2 -2 -2 1 2 1 1 -2 -1 0 -1 1 2 -1 -2 1 0 1 0 -1 2 -1 -2 -1 -2 1 2 -2 -3 0 0 -4 -3 2 1 1 -1 2 1 -2 -2 0 0 0 2 0 -2 -2 0 1 -2 -2 -1 2 4 1 -2 -1 1 1 -1 -1 1 2 0 -1 1 1 3 -2 -1 -2 1 -1 -1 -2 0 -1 -1 0 -1 -1 0 1 -1 -1 2 0 -1 -2 -3 1 3 1 0 -1 -3 0 0 -1 0 2 0 0 2 1 -1 -1 0 -1 -1 1 1 1 -2 -1 2 2 4 -1 -3 -2 1 2 0 1 0 -4 -3 0 -2 0 3 -2 -3 0 0 0 -1 0 0 0 -1 2 1 1 1 -4 -2 3 2 0 0 0 -1 -2 1 -1 -2 1 -1 0 2 -1 -2 2 1 -1 3 -2 -4 -1 0 0 -1 0 0 1 -2 -1 -1 0 -1 1 1 0 -1 -2 -2 -1 -3 0 1 1 1 1 -1 -3 0 1 1 0 +65499 4294049730 16515340 4238868491 47381576 564845 53509 1035680 5521952 4709412 856801 996404 669170 72826 40706 53329 72740 94945 85193 7813 102829 34388 148705 310833 121460 25705 35269 61069 58376 41248 10898 2176 10322 30242 27136 11245 15760 35920 12829 1394 3673 389 5512 15325 8089 3240 15577 5162 3076 404 5008 9802 46058 51157 14474 1469 24922 22100 13284 17425 8010 2005 6425 5380 2066 4477 8608 9778 180 2482 8633 6833 4637 14557 15776 30674 68986 61466 21904 3744 4441 4736 6964 14585 29725 14408 200 8788 6698 481 1042 3497 5513 5581 6322 6565 9490 3881 493 1152 5197 14401 14170 7321 3389 1873 936 1256 2693 3229 2029 4628 5693 5066 1433 1037 5429 4138 2305 3600 2281 1060 65508 655377 4294508537 262142 4 65535 4294967295 0 196608 1 1 4294901758 196607 2 2 4294967293 65536 4294901760 4294901759 131071 4294836224 131070 0 4294901760 65535 65537 3 4294967294 4294901760 131071 65536 4294901760 131070 65538 4294901758 131070 65538 4294836225 65535 131071 4294901762 131070 65536 4294901760 4294901762 4294967294 131070 4294836226 65533 4294705152 196605 65537 196607 4294836225 65534 0 2 4294901758 65536 4294901758 196607 65540 4294967294 65537 4294967295 131073 4294901760 65537 4294836227 4294901759 4294901761 4294901759 4294901760 65535 4294967295 65536 4294967295 2 4294901759 131069 65539 4294901760 65533 4294901760 131072 0 65538 4294967295 4294901760 131071 65537 4294967294 131074 4294901764 4294901757 131073 65536 4294705152 65533 65534 4294836227 65533 0 65535 0 196607 65537 4294705153 262142 2 0 4294901759 4294901761 131070 65535 4294901762 196606 4294901761 4294836227 4294967292 0 65535 65536 4294967294 65535 131071 1 4294901759 4294967294 65533 65537 65537 4294836223 65536 1 +-50 0 4 123 6 69 396 -349 -450 324 -40 -931 37 2136 983 -2908 -1784 1558 -144 9 1683 948 -756 -1142 534 0 -976 65 674 306 -284 -124 295 -121 -307 178 318 -142 -248 -146 -56 288 113 -25 279 -147 -539 -131 308 325 -126 -215 64 134 -38 -46 18 28 18 28 65 -29 -76 -103 -58 158 124 -46 -32 -28 -23 26 44 10 82 -130 -292 86 288 20 -216 -20 159 51 -56 -59 15 2 -10 -1 -50 -30 -1 106 107 -59 -110 -36 43 77 18 -54 -23 -26 -84 81 144 50 36 -145 -181 9 71 114 69 -10 26 -91 -111 -37 33 102 -37 -53 48 89 46 -73 -77 -28 34 67 4 -47 -11 -6 -49 29 101 -1 -125 -27 105 69 -26 -59 2 29 46 -69 -111 -7 -4 66 17 13 31 52 75 -75 -103 -11 67 26 -33 -23 -26 -7 11 84 80 -61 -40 -41 -60 6 30 58 21 -36 -33 29 41 -34 -26 55 62 -78 -116 13 122 66 -108 -123 47 120 -37 -60 44 52 -53 -8 147 3 -142 -90 39 81 0 -41 -32 32 46 -6 -51 -22 31 82 43 -79 -32 -12 -29 37 5 -48 -1 98 22 -65 27 3 -72 -24 31 40 -30 9 89 4 -74 -47 39 37 -25 -34 2 26 1 -4 3 1 -1 0 0 -1 1 1 1 0 -1 -2 0 1 2 1 -3 1 3 0 -1 0 0 -1 0 -1 -1 0 1 -1 -1 0 0 1 0 1 2 -1 -1 0 0 -1 0 -1 -2 1 0 0 -2 1 1 0 -2 1 0 -1 0 1 0 -1 -2 -1 -1 3 1 -2 -2 0 0 0 1 0 2 -2 -4 0 1 -1 0 1 -3 1 2 0 -1 2 2 0 1 0 -1 -2 0 1 1 -1 1 2 0 -4 0 0 -2 -1 1 0 -1 0 1 2 -1 -2 3 -3 -3 2 0 -2 -3 1 3 0 0 -2 -2 -1 -2 3 3 -1 1 -3 0 3 -1 -1 2 -3 -2 1 1 1 -1 -3 0 1 -3 1 0 2 -1 -2 0 4 1 1 0 -1 0 2 1 0 -2 -3 0 3 0 0 0 -1 0 -1 0 2 1 0 1 -2 0 -1 -1 0 0 -1 2 0 -1 -3 -1 1 2 1 0 -4 -2 -2 0 1 -1 1 1 0 -1 -2 0 1 0 -2 -3 3 1 0 1 -1 -2 -1 -2 1 1 0 -1 0 1 -1 -1 1 -2 -1 0 4 3 -1 -2 -2 1 1 -1 -2 0 0 1 1 -2 0 3 0 -2 0 2 -1 -3 -1 4 2 -1 -2 -2 0 1 0 0 1 0 0 +65486 8060932 4521990 4272095628 21298750 868361 4563865 9422753 5610020 20817 3731193 1875700 285156 956801 547912 96032 101666 125933 121288 82820 86080 13394 99450 307682 200489 62101 22052 3560 1108 1108 5066 16385 28328 17492 1808 1205 2036 23624 92660 83344 47056 27882 6617 229 101 3400 11237 14930 13396 7778 3240 1205 13617 23236 22321 32842 18037 4861 8957 13690 11493 4178 10225 7445 6713 5645 2225 157 3242 10202 16354 15786 4157 845 6877 12370 4372 458 3665 11250 10730 5165 1618 725 7177 10121 3281 3636 4264 1737 1930 2837 3701 9928 13625 19240 26793 16609 4969 4640 2873 21618 28264 8082 1681 2048 2152 3085 7685 8090 1168 2210 2329 9605 4709 738 5760 2561 981 7937 7685 2890 4292804583 1703938 4294705153 65539 65535 4294901760 65537 1 4294901759 65536 65538 131069 3 65535 4294901760 4294901760 65535 4294901761 65535 65536 65536 4294901762 65535 4294901760 4294901760 131070 0 131070 1 131070 4294901760 65536 4294901760 4294967294 262143 4294836225 65534 0 1 4294836226 65532 4294901761 65536 131069 2 196607 2 1 4294901759 65536 4294901761 131073 4294705152 0 4294967294 1 65535 131073 4294901759 4294770691 196605 4294836224 131069 3 4294836224 4294967294 262142 4294901763 4294770689 196608 4294967295 4294770690 131070 65537 4294836223 65536 131069 131072 4294901759 262144 65537 4294901760 131072 1 4294836222 196608 0 4294901760 4294901760 131072 1 4294836225 4294901760 65535 4294901760 2 4294836223 131071 65538 4294705152 4294901758 65536 131071 1 4294901759 65536 4294836224 262141 1 4294901761 4294967294 131070 1 65535 4294901761 131071 4294967294 262144 4294901763 4294901758 65537 4294901759 0 65537 65534 3 65534 4294901762 4294967293 131076 4294901759 65534 1 65536 0 +-78 0 126 -74 -416 69 776 441 -198 -1406 -1271 913 473 555 1039 896 883 -2358 -2499 221 731 1525 958 -283 -284 -925 -303 223 -296 170 199 245 357 -5 -146 -377 -222 137 201 155 -80 -330 -234 490 532 -257 -263 -27 6 -160 -175 157 127 120 42 -124 -24 53 -6 -67 25 70 -49 -90 11 79 -6 7 71 -28 -75 -27 47 82 62 -196 -302 114 224 167 60 -115 -51 -25 33 -72 -121 111 112 -59 -32 38 -8 -37 29 3 -47 -13 -6 34 20 -29 -13 82 70 -108 -108 46 81 33 0 -90 -77 56 38 5 19 -15 -62 40 119 8 -66 -103 1 75 -36 -17 73 -11 -92 -2 47 43 10 -29 3 33 28 -114 -115 86 47 17 59 3 -75 -47 55 94 12 -110 -45 67 59 -23 -54 -28 59 43 -74 -93 -11 116 74 -35 -51 -13 44 -18 -78 26 62 8 2 -6 -28 -22 28 14 -17 -20 -65 1 87 78 -24 -62 1 14 84 17 -95 -131 -33 94 26 22 -9 33 117 -55 -96 -48 -53 22 58 75 -11 -70 -9 46 25 -18 -27 5 34 18 11 -35 -46 -23 8 87 81 -71 -94 -57 -27 108 86 -18 -25 -33 2 -5 -39 -17 -15 61 61 -3 -10 -24 0 -12 -17 1 0 10 4 -2 -3 1 1 0 0 1 1 -1 -2 0 2 0 0 2 0 -1 0 -1 -1 -1 -1 0 1 -2 -1 0 -1 -2 -1 1 2 -3 -4 1 0 0 -1 -1 2 1 -1 -2 1 -1 -3 -1 2 2 -2 -1 3 -1 -2 0 0 1 2 -2 -2 1 1 2 2 -3 0 -2 -2 0 0 0 0 -1 -2 2 2 -3 -1 1 1 -1 -1 1 1 -2 0 1 0 -1 0 2 -3 -3 2 0 -2 -1 0 2 0 -2 0 -1 -1 -2 -4 0 1 4 3 -2 -3 0 0 1 1 -1 0 2 0 -2 1 -2 -2 2 1 -1 -1 0 -1 0 0 2 0 0 1 0 1 -1 -1 -3 1 0 1 -1 -1 -3 1 -1 0 2 0 -2 -2 0 2 0 -3 -1 0 2 -2 0 1 0 -1 0 -1 -1 1 1 -1 0 -1 0 -1 1 4 -1 -4 0 -1 1 3 1 2 -1 -2 -2 1 0 0 -3 -2 3 2 -4 -3 0 2 0 -3 -1 2 3 1 -2 0 0 -1 1 2 1 0 -3 -2 0 -1 1 1 -2 0 0 1 0 2 -1 -2 -3 -2 0 1 1 -4 -1 -3 1 1 1 2 0 -3 1 1 2 2 2 1 -1 2 2 -2 -2 2 -1 -2 0 1 0 -2 -3 0 0 +65458 4290117758 4587104 28902152 4202889018 2449010 531754 1882337 6339853 6293842 2859986 997853 936281 141538 116516 99626 127474 163445 68053 64426 115300 294856 349073 69898 25636 55274 30529 17140 3385 4525 5525 10501 6362 85 5825 6354 8933 42260 104200 78065 16825 3226 6273 26962 16025 2468 1433 850 2378 1192 1241 6893 16564 13780 7650 8100 9065 1469 586 5444 14225 14965 5626 1585 5450 8468 4058 941 1098 13780 20621 2498 3490 7834 11861 12244 6514 4010 3700 5330 14125 13577 6701 2770 2260 6760 3908 40 1268 980 689 4226 13653 4420 197 7345 26186 9925 1160 1170 16714 11520 3293 8989 5021 2197 949 754 1480 1346 2645 7633 11602 12085 12393 7720 1714 29 1810 3946 3730 676 4294180864 131055 655360 4294836228 131069 1 65536 4294901761 65534 2 131072 4294901760 4294901760 4294967295 65535 4294836225 65535 4294901759 131071 4294770690 131068 0 4294967295 65538 4294901759 4294901761 4294967293 131074 4294967294 4294901763 65534 65536 4294836226 131070 131073 4294770690 4294836224 65534 0 4294901760 196606 4294770690 131071 4294901761 131071 4294836225 65536 4294901760 131072 4294836221 2 4294967294 131072 4294836224 4294901760 4294901759 65532 262145 4294836227 65533 65536 4294901761 131072 4294836224 4294836225 196606 4294901761 65535 65535 131072 0 1 4294901761 4294836223 1 4294901761 4294836223 4294901761 131072 4294836224 65534 2 4294967293 131072 65534 1 65535 4294967295 65537 65535 65535 131071 4294901764 65532 131071 65539 4294901762 4294901758 1 4294770688 262142 4294705154 65533 2 4294967293 196610 4294836225 0 131071 65538 4294770688 65534 131071 4294836225 0 1 4294901762 4294836222 65534 65537 4294967292 131069 65537 2 131069 131073 131074 4294901761 131074 4294901758 4294901762 65534 1 4294836222 0 +-55 0 -210 50 -120 1 269 836 -9 -663 729 608 372 -1445 -1947 143 781 276 -512 920 461 -24 965 -799 -1514 185 657 681 527 -676 -802 -13 351 463 165 -401 -387 64 285 196 -210 -153 355 480 272 -916 -762 207 159 338 171 -173 -131 112 35 -39 79 114 66 -158 -88 -50 -151 105 186 67 39 -34 -111 -150 89 179 -38 -164 -55 18 -74 29 26 189 210 -152 -302 -58 249 343 87 -417 -183 104 -73 -35 138 285 33 -426 -151 343 168 -159 -129 -112 -65 297 137 -215 -20 222 55 -285 -148 244 260 -153 -267 -24 82 110 104 1 -101 -125 98 127 -100 -277 -150 386 301 -150 -137 -73 40 91 -67 -97 125 57 -213 -150 45 330 193 -225 -141 -4 0 40 -55 -43 69 125 -6 -21 39 -102 -45 151 129 -112 -115 -74 13 87 -40 -47 37 -2 -131 70 218 86 -101 -228 22 143 -102 -8 199 -11 -189 -93 30 239 205 -220 -205 24 -27 59 240 51 -168 -253 -114 230 264 -89 -294 -53 100 183 150 -88 -126 -78 -52 57 102 48 -62 -25 65 -40 -46 64 43 -69 -64 6 37 34 -19 22 55 -70 -84 47 104 7 -86 -60 68 46 -66 -51 25 48 -6 -51 -26 45 6 -8 17 16 4 -17 -11 5 9 -8 -2 2 0 -7 -2 4 -2 -2 1 2 -3 -3 2 0 -3 0 0 -5 -4 2 2 2 -1 0 3 1 -4 -2 4 6 7 -6 -3 -1 1 -7 -5 -1 -4 -1 -1 4 3 -3 -3 0 2 -2 -3 2 2 -2 1 0 1 -4 -2 -5 -1 1 -3 -5 -2 2 -1 -2 -6 0 2 2 -6 1 5 4 1 -3 -3 -1 -2 3 -2 0 0 5 2 -7 -5 4 2 1 1 1 1 -1 1 1 -3 -4 0 -1 -3 3 1 -3 1 3 -1 1 6 -4 -4 0 2 -2 -2 0 2 -2 -5 -2 1 1 -5 -2 0 -1 -2 1 -2 0 -5 -1 2 3 -2 3 -1 -1 -2 2 1 2 3 -3 -3 4 2 -2 2 -4 -3 1 -4 -2 2 7 -1 -4 1 6 1 -1 0 -1 -2 -1 3 1 -4 0 1 -1 0 6 4 -5 0 2 -1 -1 0 2 1 -3 -1 0 1 1 0 1 5 -5 -2 -2 -2 -1 0 -1 -2 1 -3 0 3 -3 -5 2 0 5 2 -6 -5 5 2 3 6 -2 -1 -2 -2 -3 0 2 2 0 -1 -2 -2 1 0 -1 -2 2 3 5 1 -5 0 0 2 -1 -5 -4 -1 8 5 -4 -4 1 4 2 3 -1 2 -5 -9 -1 4 1 -2 5 5 0 +65481 3342126 130952 54788365 4251582455 901105 2226409 3811258 686137 1108544 213097 1569626 2326421 895410 734705 643373 337570 188026 153865 119641 67509 356425 913040 623493 139525 59170 29705 2746 19237 29320 10244 33826 39085 2677 34821 39962 28340 3349 6317 36397 67204 94568 179650 181458 44305 6554 100269 182565 140450 53505 29185 92434 64994 49684 84250 81440 91009 71865 18824 10817 25826 25733 86729 171496 113101 24098 9881 13898 18874 67869 110925 87874 19897 1600 4874 20386 477 11925 24826 29185 18701 7738 3809 1373 22061 54920 62185 20933 10468 39722 44370 58021 90425 42601 4210 60201 92233 65896 77617 89245 43489 30244 21960 5953 12708 4469 5825 6212 6610 4132 2525 845 7925 9265 10865 10996 6740 6957 2929 2637 2701 100 1048593 4293853188 393205 4294443017 196606 4294508544 327678 4294901758 131073 4294836221 2 65533 4294639616 196604 131074 65535 65539 4294901756 393220 4294574087 4294967293 4294508545 4294967291 4294967292 327679 4294770691 65533 4294836226 196605 4294836226 1 4294705153 4294705150 131071 4294705149 196606 4294901759 65530 131074 131066 262149 4294770689 4294967293 262142 65534 327680 4294508546 327675 65538 65537 4294901761 65537 4294770685 4294901760 262141 4294770689 196609 131071 4294705158 65532 4294836226 65534 4294836226 4294901755 65537 4294901755 4294901760 131070 65534 4294967291 196610 262142 4294967295 196606 131073 4294770691 327677 4294836226 4294705154 131069 4294901756 458754 4294770687 393217 4294901761 4294901760 4294967294 65539 65532 4294901761 393216 4294639620 131072 4294967295 131072 4294770689 65535 65537 65536 4294639621 4294901758 4294967294 4294901760 131070 65533 4294770691 196603 327680 4294574082 393211 196610 4294836230 4294901759 4294836222 131072 2 4294901759 131070 4294901760 196606 327683 4294639617 0 4294901762 4294770683 589823 4294705157 131068 131076 4294901763 4294639618 4294967287 65540 393214 5 +-88 0 109 -48 -134 -43 -271 162 751 330 -448 -845 -135 807 951 -669 -1847 -191 1470 1399 -175 -1698 -845 1044 876 -179 -444 -108 217 98 -188 -20 237 0 -167 -5 34 5 68 2 0 69 -39 -164 209 69 -438 -147 355 256 -169 -240 -7 152 36 -21 -3 2 48 -2 -78 -53 28 82 31 -16 14 -75 -129 60 125 50 21 -58 -123 -15 188 27 -207 -18 143 -37 -86 60 -29 -49 53 121 30 -72 26 16 -6 -95 -96 16 -25 81 161 46 -146 -199 52 238 -28 -97 180 13 -187 -89 76 29 -81 6 18 38 45 25 13 -101 -88 64 59 8 -63 1 114 76 -6 -125 -90 36 140 27 -137 -101 16 84 25 4 -42 23 92 -16 -15 -18 -90 -10 166 48 -163 -129 34 167 84 -88 -68 -8 1 -1 27 27 -46 -42 -8 52 83 19 -66 -66 48 30 -24 -58 -110 65 148 65 -8 -114 -79 59 113 -38 -101 -10 -2 -7 13 113 95 -106 -122 -13 90 85 -32 -150 -85 119 108 -5 -60 -49 6 47 39 4 -36 -66 -28 61 44 23 39 -59 -112 3 132 34 -141 -54 89 87 0 -67 -25 27 26 -19 -27 -18 -24 45 34 -10 -4 -4 -3 13 14 -32 -27 18 13 1 -6 -3 4 10 2 -6 -1 0 0 0 1 2 0 -1 0 -1 -1 0 -1 1 -1 2 3 -1 1 1 -1 0 -1 0 0 -2 -1 3 0 -2 0 1 1 0 -1 0 0 0 1 1 0 -1 0 -1 0 -2 0 1 -3 -2 -1 2 1 0 -1 0 0 -1 1 -2 -3 0 1 0 -2 -2 1 2 1 1 -1 -3 1 1 -1 0 0 -1 -2 2 -1 -2 -2 2 1 3 2 -2 0 2 0 -2 -3 -1 0 1 1 0 -3 1 0 2 0 0 1 -1 -1 2 0 -2 -3 -1 1 2 -1 1 0 0 0 1 2 -1 -2 1 1 -1 -1 1 -1 -2 0 2 -1 2 2 -2 -1 1 1 1 -2 -1 0 0 1 2 0 -1 0 0 0 1 3 -1 -1 -1 -1 2 0 -1 0 1 -1 -2 0 2 0 -1 0 3 1 -1 0 -2 -3 3 3 2 -2 -1 3 0 -1 2 2 -1 -1 -3 0 3 0 -2 -2 -2 -2 1 0 2 2 -2 0 1 -1 0 2 0 -1 -1 2 1 2 -2 -1 -3 0 -2 -2 1 1 1 1 -1 0 1 0 -1 0 2 -1 -2 1 0 0 1 0 -1 0 0 1 1 1 -1 -3 -1 -1 2 2 0 0 -1 4 0 -3 -3 -1 -2 1 1 -2 -2 2 0 +65448 4291821677 4292214650 10682097 21627631 914729 669474 1351962 3447890 4118101 2913829 1803961 799417 208800 56693 35744 56169 27914 1181 4628 4761 28417 48442 213453 191561 86161 23153 1737 13 2308 8893 7508 1217 5821 20241 18125 3805 15354 36073 43173 21818 10996 3242 17450 6084 932 9061 9472 7186 28037 60917 59348 10193 32569 42890 6617 6597 1768 2650 10370 11840 3545 3970 18772 15661 9396 20329 28970 7312 641 2293 8720 549 8200 29860 43210 29045 14800 4688 2 1458 3880 2768 7250 8712 3204 3940 16325 26129 13060 9722 14213 10301 53 12938 20261 15053 15325 23524 21386 11689 6001 2245 1537 5652 4505 2465 5002 12553 18580 22797 15490 4489 1354 1037 1053 2601 1256 32 178 1220 1053 65549 4294836218 655364 4294574082 65535 0 131073 4294901760 4294901760 65535 131071 196607 4294901763 65537 65535 65535 4294836224 262143 4294836224 65536 1 65535 0 65537 4294901760 4294901760 4294836224 65536 4294901757 196607 1 65535 4294901760 4294836225 65533 1 4294901758 131073 65537 4294836223 65537 65535 4294901760 196606 4294901759 196606 196609 4294836226 131072 4294836224 4294967293 65536 1 131069 131072 0 4294901761 196607 4294836224 4294967293 131073 131071 0 65536 4294901762 131070 4294901761 131071 4294901759 131072 196607 4294836226 131071 65537 4294967294 0 131073 4294901760 0 65536 4294901763 4294967295 196607 4294901760 65536 4294901759 131072 4294901760 196608 4294901761 4294836224 262141 131075 4294967294 3 196607 4294901762 4294836223 196608 4294836224 4294901758 131070 131072 4294836226 65536 65535 2 4294967295 65538 4294836226 4294836223 4294836224 131070 65537 4294901761 65536 4294901760 131072 4294901759 1 65536 4294901760 0 65537 4294901761 4294967293 196607 2 4294901760 4 4294836221 4294901759 65537 4294901758 2 +-18 0 -51 -15 117 231 4 -511 -321 178 0 505 320 -542 -625 311 710 705 142 -794 -73 -8 172 34 -540 -437 200 394 -201 -84 -19 181 348 68 -190 -299 -91 168 235 84 -182 -268 -163 192 414 331 -50 -653 -236 259 46 -13 -31 99 107 -51 -89 -16 54 39 -16 14 50 -72 -70 36 14 -15 -7 75 69 -69 -49 -26 -105 84 276 87 -100 -372 -260 285 281 15 -91 -79 39 -26 -86 32 39 24 21 -29 -26 53 67 -95 -93 31 -41 -15 35 196 220 -157 -198 -126 -67 119 90 68 30 -67 -53 -15 48 19 -98 -26 106 95 -61 -120 37 127 42 -106 -95 -4 75 88 32 -123 -149 7 92 111 12 -96 -77 17 44 86 97 -103 -197 -40 75 173 66 -86 -34 10 32 -28 -27 4 1 -38 -82 79 180 -24 -192 -55 109 119 50 -97 -127 -12 97 85 -14 -100 -44 27 20 44 17 -77 -76 36 32 93 88 -77 -54 -23 -5 19 4 -23 -24 6 -4 3 -12 49 53 -50 -61 56 107 -47 -120 -23 66 73 0 -74 -22 44 29 -42 -92 9 51 111 108 -84 -103 -39 31 37 -7 -33 -33 29 34 -6 -7 -12 -38 4 49 30 -24 -51 -19 40 38 -5 -24 -8 16 4 -6 -3 0 -2 0 3 1 0 0 -1 -1 -1 0 2 -1 0 1 0 1 -3 -1 3 0 -3 0 0 -1 0 -2 0 1 0 -2 -4 0 4 -2 -3 -3 -1 1 2 0 -1 -1 0 1 -1 -1 1 0 -1 -1 0 1 2 1 0 0 -3 0 1 -1 -2 -2 2 2 1 -2 -4 1 1 -1 -1 -1 1 0 -3 -3 0 0 4 3 -2 -1 1 0 -3 1 1 -3 -1 0 2 3 -1 1 -1 0 0 0 -1 -1 0 3 0 0 -1 0 -1 -2 -2 0 1 0 -1 1 1 -1 -2 1 0 -2 2 0 0 2 2 -1 -3 -1 1 2 1 0 -3 -1 1 0 -3 -2 1 -1 -3 0 2 0 0 -2 1 3 1 -2 0 3 -2 -1 1 1 -3 -2 2 1 -1 0 1 0 -1 0 1 4 -1 -3 -4 -3 2 2 0 0 0 -1 0 1 -2 -2 -2 -2 3 0 -1 0 -1 -1 1 0 -1 0 0 0 1 -3 0 1 0 0 0 -1 2 0 -2 1 3 -2 -3 1 3 1 -2 0 -1 -2 -2 -1 2 2 -3 -5 0 1 3 0 -1 0 -2 0 1 -1 -1 0 1 2 -1 -1 -1 0 -1 1 3 1 -2 1 1 1 -2 -3 -1 0 -1 -3 -1 1 1 0 0 0 0 +65518 4294049741 15138933 4261478404 11730623 255025 396164 487346 1001125 650600 5393 30740 482569 195236 47457 33122 125728 125501 36505 62281 104948 63433 280957 428909 122777 2285 10762 14050 8177 4437 452 7684 6196 421 5674 9522 3077 18081 83745 148384 148825 79186 14522 2197 8420 2097 1282 3485 13514 9610 1906 39641 73049 55080 18650 12724 5389 3034 2665 10280 20261 18121 17498 13000 9041 13369 16153 22250 20785 9360 6218 9332 20018 40409 35554 11752 1256 1808 745 1445 12965 32976 39889 26042 11909 16273 16634 10196 2665 2336 6218 7072 9673 13673 3445 386 545 612 25 2545 5309 6857 13658 14929 9685 5476 2420 2605 8545 14922 18720 12130 2330 1138 1930 1192 193 1460 3301 3177 1961 1469 4294508520 262160 4294836218 4294836224 196608 1 4294901760 4294967295 131072 65535 1 4294770689 262143 4294770688 0 65535 65534 1 4294770686 262144 4294836222 4294967293 131073 4294901760 65535 4294901761 131071 4294901760 65535 131073 1 4294770688 65536 4294901759 196606 65538 4294770686 65537 4294967295 131071 4294770688 65533 262144 4294836227 131071 4294770688 65537 4294967293 131072 4294901763 4294901761 0 4294901760 65535 3 4294901760 4294901760 4294901758 65536 4294901760 65537 4294901759 1 196606 0 131074 4294836223 131071 65538 4294770688 131071 4294770688 131070 4294836223 131072 0 131070 65539 65534 4294836227 131071 4294770689 196606 4294901761 65536 4294901760 65536 4294901764 4294770685 196605 2 0 65535 4294836225 4294901758 262142 4294901760 4294901760 131071 4294901760 0 65536 65533 1 0 196607 4294836224 196609 4294836222 196609 4294836225 4294901760 4294901758 196607 4294770690 65531 196609 4294901760 4294836224 65536 4294967295 65536 4294901762 4294967295 4294901760 196609 4294836225 65537 4294836225 4294967293 4294901760 4294967293 65537 0 0 +-80 0 70 -20 -153 263 544 -534 -1018 293 1029 554 -351 -1326 -392 1426 1044 -1150 -1680 247 1556 874 -828 -1413 -71 1129 286 -278 81 -35 -174 -113 -24 255 310 -185 -350 -83 99 268 238 -223 -423 -90 208 510 284 -508 -353 162 204 -87 -221 17 151 84 -124 -71 99 106 32 -88 -80 -24 -21 51 71 -4 -63 53 107 -115 -155 38 81 87 -18 -51 71 47 0 -70 3 -12 -53 -57 -50 55 47 72 26 -94 -67 45 52 26 54 -9 -78 -107 8 58 -52 63 44 -79 31 140 -5 -189 -38 133 44 -61 -37 -17 -37 71 98 -14 -47 15 88 -90 -122 7 21 71 58 -73 -106 33 106 26 -86 -67 13 60 34 25 -28 -66 34 63 -19 -87 -47 71 29 -14 25 36 -44 -27 93 14 -68 -31 34 -7 -36 -24 -51 11 7 140 147 -141 -168 40 98 33 -13 -34 11 16 -19 -72 -23 99 49 -36 -32 -69 0 136 16 -108 11 4 -32 78 15 -108 9 82 -4 -41 -65 -10 84 76 -41 -34 80 -13 -60 -30 6 12 -7 -18 -21 37 19 -34 -10 42 3 -34 -2 14 -30 21 92 2 -105 -39 92 52 -41 -50 -11 13 13 11 1 25 18 -66 -23 57 20 -34 -20 -10 -3 25 6 -6 3 -2 0 0 -2 0 1 2 0 -1 0 0 0 0 -2 0 3 1 0 -1 -2 1 3 0 -2 -1 1 2 1 -3 -2 -1 -2 -1 4 0 -2 -1 -1 -1 0 1 0 0 1 0 2 -1 -4 -2 0 3 1 2 2 1 0 -3 -1 -1 3 1 -2 -2 -1 1 0 -2 0 -1 -3 -2 -1 2 -1 -1 0 2 2 -1 -2 1 1 2 0 0 1 -2 0 -1 -1 0 -1 2 1 -3 -3 2 1 1 0 -1 2 1 3 -1 0 -4 -1 2 -1 -3 -1 -1 0 3 1 -3 -1 0 1 -1 0 1 1 -1 -1 2 -1 -1 1 1 -1 0 1 -1 1 0 -1 -1 0 -3 1 1 -1 0 -1 -3 -1 1 2 0 -2 -3 -2 0 -1 -1 -1 0 -1 -1 -2 0 1 -1 0 -1 -1 -1 0 1 -2 -2 1 -1 -3 -1 0 0 -4 -2 0 3 1 -2 -2 0 -1 -1 0 1 0 -3 -1 0 -2 -1 0 0 1 0 0 -2 2 1 -2 -3 -3 0 -4 -2 -1 2 -1 3 1 -1 -1 1 0 1 4 -3 -5 -1 1 2 0 0 1 -1 -1 1 0 1 1 -2 -2 -1 2 -1 -1 0 -1 -1 1 1 -1 -2 -1 -1 -2 1 1 -1 -1 0 -1 -1 -1 -1 -2 0 +65456 4293656646 17301351 4259971616 19266566 1365757 1881477 2187140 2412436 2883409 3185012 2682153 1279682 159080 7786 43045 65601 130325 129389 81625 106373 187029 303364 338720 150853 49185 49130 29857 20417 21037 8768 6976 3042 5057 6778 24674 25469 14130 2925 7250 4900 153 6058 5525 7393 9512 6514 3380 2997 17533 3428 6673 8177 20561 35746 19133 5657 1658 6410 9800 2434 15844 14933 5482 8693 12325 11912 11885 3769 1781 5140 5125 7930 7250 1037 1921 2665 8845 5585 1205 1872 2722 19649 41490 29824 10693 1325 377 5545 10330 3697 5785 18496 11920 137 7108 11889 6805 1697 4325 12832 2837 6569 4500 180 373 1810 1517 1864 1165 200 1341 8468 12546 11168 4181 290 290 626 4680 3778 1556 4294377452 1703933 4294574086 4294836227 0 65534 131073 4294901760 0 0 65534 65539 4294901760 131070 3 4294967294 131073 4294770689 4294967294 4294967294 4 4294967294 4294967295 65536 0 1 4294901762 4294901756 196608 131073 65538 4294770688 4294967295 65539 4294901758 131071 4294836224 4294901760 4294901757 196607 4294967295 131072 4294901762 131070 131073 0 4294836225 4294901760 65535 196607 4294770689 196605 65537 4294901760 65538 4294901763 4294705152 196607 4294836223 4294967295 196608 4294770689 65535 4294901761 65536 4294901761 196607 4294967295 65537 65535 4294901761 1 4294967295 4294770688 65537 65535 4294836223 131071 2 4294836222 65534 4294967295 65535 4294967295 65534 4294901761 4294901760 4294967295 65536 4294901758 4294901761 4294967293 0 4294901756 196608 4294836225 65534 4294967295 65536 4294770688 65535 4294967294 0 1 4294836224 65538 4294836222 65533 4294901756 196607 262143 4294901761 131071 65536 4294770692 4294967291 131073 0 4294901761 131071 65536 4294836225 4294967294 4294901762 65535 4294967295 65537 4294901759 4294967295 131070 4294901761 65535 4294967295 4294967295 65534 +-60 0 -32 -7 -201 289 851 45 -687 -745 377 798 -39 -1010 -577 376 138 299 -217 -384 9 994 430 -364 373 -26 -403 -464 10 245 -49 3 35 -121 -113 181 117 -4 7 -126 -120 93 132 20 -180 -4 271 145 -17 -229 -53 6 -67 -11 18 -29 -108 112 119 13 -1 -74 -58 69 93 -26 -145 -109 107 337 95 -400 -210 249 239 -144 -257 16 125 109 -10 -138 -26 187 87 -192 -140 116 144 -21 -88 -19 49 6 -29 -32 -51 44 94 63 39 -168 -244 84 259 159 -49 -230 -1 59 -152 -30 157 120 -72 -101 31 49 -22 -67 -75 126 196 -35 -159 -73 77 60 -7 8 -2 -57 18 21 -29 -46 -84 93 215 -46 -223 -50 119 66 -67 -10 45 10 33 17 -30 -101 -51 55 21 99 107 -150 -144 79 94 -24 -79 -18 58 122 61 -144 -78 15 1 -4 -28 46 67 -54 -125 43 136 41 -34 -81 -101 16 154 139 3 -216 -205 59 225 182 -31 -272 -157 117 167 56 -74 -136 -82 101 123 40 -19 -70 -23 13 11 -25 -9 75 12 -132 -71 151 126 -71 -102 -10 72 12 -100 -13 90 82 13 -75 -10 -35 -70 28 67 33 -28 -60 -39 53 79 31 -18 -93 -43 56 45 -11 -34 -7 14 11 -3 -3 3 0 -3 -2 1 3 3 -1 -2 0 1 0 -1 0 2 -1 -1 0 -1 -1 2 -1 -2 -2 1 2 -1 -2 0 0 1 0 -2 -1 0 1 1 -3 -4 -1 1 3 -2 0 1 -1 0 -1 -2 1 0 0 2 -3 -1 2 -1 -2 0 0 1 -1 -1 2 1 -1 -1 -2 1 -1 -1 1 1 -2 -3 -2 2 2 1 -1 -2 -2 2 0 -1 -2 -1 1 -1 0 1 -1 0 -2 -3 0 1 3 1 -2 0 -1 0 -1 -1 -2 -2 1 2 0 -2 -2 0 1 0 1 0 -3 2 0 -4 0 3 0 -1 -1 2 1 -1 -2 0 0 -1 -1 -1 0 1 -1 -1 -4 -2 2 -1 -2 1 -2 0 -1 -3 1 1 1 0 -1 -1 -2 -1 3 0 -2 -2 -1 -1 1 -1 1 1 0 0 0 -2 -3 -1 1 -1 1 -2 -2 1 2 1 -2 -1 0 0 -2 -1 -1 1 0 -1 0 0 -3 -3 -1 1 2 -2 0 -1 2 1 1 3 1 -1 -3 -1 0 0 0 2 -1 -2 -3 -2 1 1 1 -2 0 0 1 1 0 -2 -1 2 -1 -1 1 0 -3 -1 -1 0 1 -1 -2 -1 2 1 -2 -2 1 1 0 -2 -1 1 0 0 -1 0 0 -1 -2 1 2 0 0 +65476 4294574048 19005239 2949971 4246207825 778933 1021621 474305 108445 194545 988117 317396 139805 377705 60125 2410 15866 45530 13705 15925 23049 17824 32416 94466 52730 2845 4610 1165 24208 14330 5477 8125 9325 32906 125018 169025 106101 77857 66305 27506 19144 35645 44433 33056 21177 8105 2437 1865 4537 12805 29745 66592 92362 55301 3482 24004 39049 15385 3362 4973 21501 39641 30610 9529 113 3253 765 2957 15705 48341 52229 18517 4589 2125 1378 11101 5626 10242 33949 26977 9412 6565 18248 24457 6309 17 2900 7405 17474 20177 7717 10457 43037 46665 45506 83749 74945 38338 31025 23972 16925 16729 5261 698 746 5706 17568 27842 20917 10504 5328 10169 14824 5794 1325 5684 5578 4384 4330 7202 8973 4985 4294246445 4294574046 720910 4294836221 3 4294901757 196609 4294901763 65534 1 65535 4294901762 65535 4294967295 4294901762 4294901758 131073 4294901759 0 1 4294967294 65536 4294770689 4294967292 196609 65534 4294901761 4294901760 131070 0 4294770690 196607 4294901759 0 4294901761 196607 4294901761 4294901759 4294901761 131071 4294836225 4294901757 131074 4294901761 4294901758 2 4294901759 131071 65535 4294901761 4294836224 65533 196609 4294836225 4294901760 4294901760 4294901759 131070 2 4294901758 65536 65536 4294770688 2 65532 3 4294967295 65538 4294901759 0 4294967295 65535 4294901761 4294770687 196606 4294901759 4294836225 4294901760 131069 65537 4294901760 4294901759 262143 4294836224 4294967294 131071 131071 1 0 4294836222 131071 131071 4294901758 131073 4294836225 65535 4294836224 4294967295 1 65535 4294770688 4294967293 131073 65534 196607 65537 65539 4294836223 65535 0 4294901762 4294836222 131070 65537 65534 65536 1 4294967294 4294901762 131071 4294770688 4294967295 65536 4294901759 196607 4294836225 131070 1 4294967294 1 4294901760 0 4294901759 131073 0 +-56 0 -37 -39 -136 119 264 304 133 -328 93 -46 -344 -474 -120 887 238 -731 217 391 -1048 -459 986 1098 -117 -971 -102 397 -52 -410 -155 624 467 -465 -467 112 235 101 -3 -131 -103 86 179 -60 -280 -128 154 361 -30 -330 57 301 40 -311 -201 161 201 46 -103 -106 56 63 -81 3 135 -30 -208 73 361 72 -183 -394 -78 380 52 -438 -199 550 298 -377 -310 164 254 129 -61 -215 45 150 -12 -195 -99 137 79 -54 -45 61 58 -66 -55 47 57 -63 -87 26 -10 -13 19 146 48 -89 122 25 -187 -197 13 229 54 -111 -71 54 57 -1 -34 42 100 -6 -19 -149 -155 155 188 -27 -99 -56 29 75 59 -39 -70 -134 -95 148 84 58 38 -83 16 16 -90 -46 26 106 113 -98 -177 2 68 47 -8 76 127 -129 -197 73 218 -8 -147 -96 28 102 -1 -98 -47 97 45 -31 7 62 39 -109 -56 38 -64 37 191 21 -160 -124 33 172 138 -123 -177 -39 83 117 8 -132 -109 75 147 33 -150 -113 112 138 -78 -94 42 30 -88 59 201 11 -136 -131 74 91 -90 -102 30 97 -41 -28 63 57 9 -56 -22 -12 -7 13 -8 2 6 13 28 -16 -55 -5 37 29 -13 -15 11 7 5 -6 -6 -13 -4 7 0 0 1 1 2 1 0 -4 0 2 0 2 -1 -1 2 1 1 0 -1 -2 0 0 0 0 -1 2 1 -3 0 1 1 -1 -1 1 -2 -2 1 -2 -2 2 0 -1 -1 0 -1 0 1 1 -1 0 0 0 0 0 0 1 0 0 0 -1 -1 1 -1 -1 1 1 -2 0 1 0 -1 -3 -1 2 -2 -2 0 4 1 -2 -4 1 2 1 0 -1 -1 0 -1 0 -1 0 1 1 0 0 -1 2 0 -1 0 1 0 -1 0 1 -2 0 0 -1 0 4 0 -1 2 -2 -2 1 3 1 -3 -4 0 3 2 -2 -1 1 4 1 0 -3 -2 -1 0 2 1 0 -2 -1 1 2 2 2 -1 -3 2 0 -2 -1 0 -1 0 -2 -2 0 4 1 -2 -3 2 0 0 -1 -1 -2 -2 2 0 1 0 -2 -1 1 0 1 0 -2 0 1 -2 -1 1 3 2 -2 -1 0 -1 0 1 -1 0 -1 2 1 -1 -1 0 -1 -1 0 -1 -1 -1 -1 0 0 0 0 0 -1 -4 -1 3 3 1 -4 0 0 0 -1 1 -2 -2 0 2 2 -1 -1 0 1 2 -1 -2 -2 -1 4 0 -2 3 0 -3 2 3 -3 0 1 -2 -2 -1 -1 1 2 0 -5 -1 1 1 -4 -4 0 +65480 4292476891 7864184 19923208 4273471621 10765 343012 801169 591005 199970 1308985 2177800 956530 168013 170804 413401 434314 230633 65426 17170 18005 35641 94784 154037 109800 93850 98321 66322 42517 21845 7105 6570 19125 48593 135505 188725 150484 194548 342101 230933 122996 81157 49946 24525 38169 28570 9157 5746 7720 5234 7218 8245 269 21677 10225 15509 73778 52610 15237 7957 3250 2920 10036 22562 48050 36073 12937 6466 5002 22856 30929 10420 8333 512 10216 11912 22373 31333 6833 5840 32770 44138 47588 30825 11188 9605 11618 2986 3893 13402 4580 5465 36922 40976 30673 34173 32850 20578 17488 17506 22698 35269 31588 14920 2664 11225 40522 35657 13757 18504 10309 2465 7218 3217 628 218 68 205 1040 3050 2210 394 458763 4294574085 4294180858 524284 0 65537 65538 4294705152 131072 131072 4294967295 65538 1 4294901759 0 0 196607 4294770689 65536 4294901761 131071 4294901758 4294836225 196606 4294901760 65535 65535 65537 65535 0 0 65536 0 4294901760 131071 4294967295 65537 65534 1 4294836223 196607 4294901758 262144 4294836225 131068 65538 4294901760 65535 65535 65535 65537 0 196607 4294901760 65536 4294901760 65536 65534 4294901760 262144 4294901760 4294836226 131070 65539 4294770685 196608 4294836226 131071 65540 4294770688 4294967294 131072 1 4294967294 131073 131074 4294836223 2 4294967294 4294901760 4294836224 65534 65540 4294836222 2 4294901760 4294901759 196606 65536 4294836224 131071 65536 4294836224 65536 4294967294 196609 4294836226 65535 65535 4294901761 4294901760 65538 4294967295 4294901760 65535 4294967295 4294967295 0 0 4294901760 4294967292 196611 4294705153 0 4294901760 4294836225 65534 131074 4294967295 65536 4294901762 4294901758 327679 4294836224 3 196605 4294770691 65536 4294901758 4294967295 131073 4294639616 131071 4294705153 65532 +-55 0 -67 70 217 -148 -449 368 624 -297 -274 59 -13 69 76 -858 -1011 1602 1636 -574 -297 -504 -743 -180 191 508 -88 -124 57 349 252 -191 65 45 -125 -365 -95 297 70 -110 -3 60 -18 -112 12 98 -179 -168 47 374 183 -109 -22 -133 -26 101 20 -111 -37 21 -49 -24 -27 112 87 48 121 -152 -162 60 138 -127 -155 -21 -212 -11 93 450 336 -250 -217 -85 93 132 20 -194 -63 103 -62 -127 35 165 -72 -58 126 83 -31 -93 16 28 -53 -68 -20 103 36 -21 82 37 -81 -206 -103 205 98 -6 26 12 41 -61 -85 2 112 3 -159 -52 121 104 -21 -135 -116 67 94 65 -8 -63 4 55 49 -63 -132 -38 121 200 -27 -271 -82 230 160 -71 -110 -75 46 87 24 -47 -81 -103 -25 199 62 -100 14 108 97 -154 -152 -12 25 66 8 -32 -60 24 77 61 -27 -64 65 48 -46 -118 -82 87 113 88 26 -149 -61 22 -4 16 11 -2 -24 -67 -7 142 -22 -176 48 215 -71 -235 -21 294 183 -149 -82 -32 -30 -47 -56 87 31 2 56 66 14 -91 24 -35 -130 13 62 33 -17 39 50 -64 -52 39 31 -19 -16 5 7 -3 -5 25 36 -50 -60 21 28 1 -10 17 15 -17 -16 6 9 3 0 -2 -1 -2 0 2 0 0 1 0 0 1 2 -1 0 -3 -3 4 3 -1 0 -1 0 0 -1 0 0 0 3 -1 -3 -1 -1 -2 -3 3 3 -1 -4 0 1 3 0 -1 2 0 -3 0 1 0 0 -1 0 -2 -2 2 -1 -2 0 1 2 2 1 0 0 -1 1 0 -2 -1 0 1 0 0 0 1 -1 -2 0 -1 -1 0 -1 0 0 0 0 -1 -1 1 2 0 1 -1 0 -1 -3 -1 2 2 0 -1 1 -2 -1 0 -1 0 -1 1 2 0 -2 0 -2 1 0 1 1 -2 -2 0 1 0 -1 -2 0 1 1 1 -2 -3 3 2 -1 -1 -1 -2 -2 0 1 3 0 -4 -2 2 1 1 -1 1 1 0 -2 0 2 1 -2 -1 0 1 -2 -2 -1 1 1 0 -1 0 3 -1 -1 -3 0 1 -1 -1 0 0 1 -1 -4 -1 2 1 1 -2 -3 1 2 0 -1 0 1 -1 -1 0 0 -2 -2 1 3 1 -2 -1 1 0 0 0 0 1 0 0 1 -2 0 -1 0 -1 0 1 0 -1 3 0 -2 -1 1 -1 -2 -2 0 3 1 -1 -1 -2 -1 1 1 3 3 -2 -1 -3 -1 0 2 1 -1 -2 -2 0 2 -1 -3 1 0 1 0 -2 -1 0 +65481 4652989 4285268185 24182335 4275503728 78557 4930 741940 3588525 3005972 342225 584449 294545 23120 125050 99985 6250 148850 97234 17000 3609 12868 9748 60265 142085 45370 18173 10877 12721 1810 2977 13273 9873 37745 29844 35173 24466 45065 211149 175396 54314 26073 38036 14578 19973 28450 8548 22765 9610 1040 7433 11009 1737 8093 48997 52634 9640 820 5402 7229 12553 27985 25457 18666 17945 13061 4033 3041 6370 18868 54641 74170 59624 30641 17725 9685 2785 17170 40226 13844 11860 33125 23248 4981 1088 4176 9650 4825 6529 16040 14293 20513 22877 4205 272 125 5065 20213 31460 48529 60266 86877 55690 7748 3109 10705 965 7492 8477 1801 17069 4933 1810 6596 4225 1322 281 58 650 3796 4041 785 1179638 4293853199 458736 196617 4294836224 4294901759 131072 0 1 65536 4294901762 4294770688 327677 4294901763 4294901760 0 65535 0 4294901763 4294967293 4294901759 262141 4294901763 65532 196609 4294901760 2 65533 1 4294901760 4294836224 196606 4294901759 65536 131074 1 4294901760 1 4294967294 65536 0 65536 4294901759 4294901760 65535 65535 0 4294901760 131071 2 4294901761 4294901760 4294967293 131074 4294901760 4294836225 65535 65535 131071 2 65534 131070 65536 4294836225 65534 1 4294901759 65536 65537 4294836222 131075 4294967295 4294901759 65534 196609 4294705152 196606 65537 131071 1 65534 65538 4294967294 65536 4294901758 131071 1 65535 4294901763 4294836223 65536 4294967295 0 4294901761 4294967292 65538 4294836225 131069 2 65535 4294901761 65535 4294836224 131070 65539 4294967294 1 0 65536 0 4294836225 4294901760 4294901760 65536 4294901760 3 4294967294 4294901761 4294901758 196608 4294901761 4294901759 131071 196609 4294836227 4294836223 65535 65538 4294901759 65534 4294901762 131069 65536 4294836224 65535 +-77 0 123 25 12 -129 -526 -329 334 1281 461 -1327 -989 516 635 616 748 -312 -835 -1346 -176 1489 265 -429 135 -164 -125 225 -34 -278 1 129 -235 54 362 194 -65 -273 -52 50 5 -8 -125 -9 130 197 53 -140 28 -95 -218 47 200 149 -34 -273 -134 225 140 -128 -122 104 131 -37 -75 -67 -77 81 152 103 22 -195 -98 -13 -77 98 61 30 76 15 -61 -66 57 43 -18 -19 86 -58 -219 -116 46 317 138 -184 -52 22 -22 -49 -10 42 -32 -27 47 87 -28 -58 119 28 -125 -119 5 74 -36 39 81 -22 -79 19 96 15 -15 -19 9 -101 -109 78 95 32 -64 -86 32 108 -23 -88 14 79 6 -51 -43 -17 -5 176 174 -146 -100 -22 -39 15 76 -7 -93 -31 -9 88 151 -26 -115 -121 -43 97 33 -21 -24 78 83 -66 -81 29 97 -35 -132 38 135 -14 -53 18 25 -90 -57 66 26 -7 31 -7 -72 -48 15 123 59 -119 -94 98 144 -32 -67 -129 -178 80 120 166 119 -74 -56 -85 30 -11 -115 -4 48 46 -26 1 71 37 -20 -110 -76 55 37 83 96 -102 -99 15 -1 -31 -9 102 77 -65 -58 4 27 -11 -20 3 -14 3 19 30 23 -28 -15 -16 -17 0 2 16 3 -4 1 -1 0 0 -2 1 0 -1 1 1 -1 -1 -1 1 0 0 -1 1 1 1 0 2 2 -2 1 -1 0 0 0 -3 0 -2 -2 -1 -4 -2 -1 3 0 -2 -1 0 -2 0 1 2 0 -3 -1 2 0 -1 1 0 -1 0 0 0 -1 0 1 -2 -3 2 1 0 0 0 0 0 -1 2 -3 -4 1 5 1 -2 -3 1 2 -2 -2 0 0 -2 0 -1 0 0 -1 -2 1 0 -2 0 0 1 1 0 1 -2 -3 2 1 -1 1 0 -3 -1 1 0 0 -1 0 0 1 0 1 0 -1 -1 -2 1 0 0 -1 -1 1 1 0 -2 -2 -1 0 1 -1 1 0 -3 0 2 -1 -3 1 1 0 0 0 -1 1 1 0 -3 -1 3 1 -2 -1 0 2 3 0 -2 1 1 0 0 0 0 0 -1 -1 -1 0 1 1 0 -3 -2 3 -2 -3 0 -1 0 -1 1 -1 -2 1 1 0 0 -2 0 -1 1 1 0 -2 1 0 -1 3 2 1 0 1 -1 0 -1 0 -1 1 0 -3 -1 1 -1 -2 0 -2 2 2 0 0 -1 -1 -1 1 0 -4 0 3 1 -1 -1 1 -1 -1 2 2 -4 -4 0 1 0 2 -2 -2 0 0 -3 0 0 -1 -2 -2 0 2 0 -1 0 +65459 1638523 4286513164 4273470962 83951950 1973450 1244377 782681 656848 2508941 2248097 254266 45121 66250 78440 16642 58141 168680 78754 5204 89 15706 55709 22409 9809 49733 62201 75685 68581 35984 25700 18530 10114 12490 33713 38509 9773 15533 4621 6001 8077 5098 685 10760 61417 102605 52900 3188 2885 1864 1753 9778 4148 14945 29786 5501 2817 7045 6602 9441 586 10282 17965 10049 11492 12688 8273 6437 2637 2138 31001 51592 10484 1746 5825 9610 7825 23477 27866 11258 1530 6660 11245 7402 10634 18868 18421 3133 8725 7605 725 1010 7488 15354 17642 18440 21760 21130 38084 41956 19637 10361 1021 13241 4420 677 6410 12500 8801 8258 19620 10026 962 10485 10154 3380 850 409 205 1261 1313 481 65519 1048578 4294705155 4294901761 0 131070 4294901760 65537 4294967295 131071 0 131071 65537 131072 4294836226 4294901761 0 4294770688 4294836224 4294967294 4294901756 262143 4294836224 65535 65534 131073 4294770688 196607 4294901760 1 65535 0 65535 4294836225 196605 1 0 0 196607 4294770685 327681 4294836225 131069 4294836226 65534 4294836224 4294901760 0 4294901759 1 65534 65536 1 4294836225 196605 4294901761 1 4294967293 1 4294901760 0 1 1 4294967295 131070 0 4294967295 65537 4294836224 4294967294 65536 131071 4294770688 131072 4294836223 65537 0 4294901760 65537 4294770688 262143 4294836225 65535 196610 4294836224 65537 0 0 4294901760 4294967295 65536 1 4294901757 4294836227 65533 65535 131071 4294901759 65537 0 65534 131071 1 131070 4294901760 131075 1 4294901761 4294901760 4294901760 1 4294967293 4294901761 65534 196606 2 4294901760 4294967295 1 65532 65539 4294967295 4294901761 196607 4294705154 65532 1 4294836226 65534 4294770688 0 4294901759 65534 2 65535 +4 0 23 -113 -158 397 783 -625 -1155 -14 290 315 255 437 -308 -793 870 1123 -752 -2029 -771 1918 1656 -324 -1093 -708 318 631 26 -145 123 -23 -89 -280 -225 337 207 -153 -6 121 -63 -85 4 -52 69 262 33 -338 -95 121 -14 29 14 -61 -35 153 59 -82 95 74 -67 -184 -49 101 28 -14 -13 12 28 43 12 -184 -116 251 123 -171 -34 63 -61 -43 37 130 67 -139 -41 45 -21 -15 -24 -79 -41 162 32 -13 118 -31 -100 -34 36 17 -59 -40 19 137 97 -102 -38 -40 -104 17 44 51 54 36 -16 -90 -5 18 -38 -9 -25 76 129 -15 -91 -83 33 118 29 -161 -88 94 13 -16 15 56 56 -45 -66 -54 -60 110 154 -1 -69 -71 6 -32 -73 149 157 -161 -170 93 91 14 17 -60 -33 65 36 -69 3 16 -70 -59 28 89 -74 -52 115 157 44 -200 -118 23 -4 63 51 -6 11 7 -27 -78 -5 69 -5 -53 -68 81 153 -11 -114 -30 94 -17 -121 40 119 -9 -47 -43 -76 74 181 -27 -134 -64 14 55 24 -52 -89 85 129 -74 -114 103 126 -168 -187 164 190 -29 -57 -62 -6 17 -20 -20 -16 35 31 19 10 -33 -26 1 23 15 -27 -17 27 26 -5 -27 -3 11 1 -7 0 6 -1 -3 -1 1 1 1 -1 0 0 0 1 0 2 -1 -2 0 1 -1 -2 1 1 0 2 -1 -1 -2 1 0 0 -2 1 0 0 -1 -2 0 -1 -2 -3 0 0 0 -1 2 -1 1 3 1 1 1 -1 -1 2 1 -1 -2 1 0 -2 1 1 -2 -2 2 2 0 -1 -1 -1 0 1 1 0 -4 -2 2 2 -1 -4 1 1 -1 1 1 -1 1 1 -2 0 -2 -1 2 0 1 0 -1 2 1 -2 -2 -2 -1 3 3 1 -3 -1 2 -2 0 1 -3 0 0 2 1 -4 -1 3 0 1 2 0 -2 -3 1 1 -2 0 1 1 -2 0 -1 -2 -2 -2 2 2 0 0 1 -1 -1 1 0 0 1 0 0 -2 -1 0 -1 0 1 0 -1 -2 1 1 1 1 -2 0 0 -2 -1 1 1 -1 -2 -1 2 0 -1 1 -1 0 -1 -1 1 1 -2 -1 2 1 0 0 -1 -1 2 1 0 2 0 -1 0 0 -2 0 1 1 0 -3 1 2 0 0 1 2 -1 -1 -3 1 2 1 -4 -2 -1 -2 0 1 0 -2 -1 0 1 -1 0 0 3 2 -2 -4 1 2 2 1 -2 -1 -1 -1 0 0 -1 0 -2 -2 -1 0 1 -1 0 -1 1 -2 -1 1 2 -2 0 +4 4287561751 26083170 4254008079 4294114173 183325 255994 723713 2018029 4682345 4273165 2847312 1695913 499285 21701 15658 86321 164194 66258 14677 11194 2720 73405 115333 23666 1037 3917 24634 10205 14501 38345 12602 980 313 2633 34000 76457 44370 5125 5570 18269 23810 3706 666 6817 27925 1193 14885 11156 1585 5081 19130 19813 3044 11105 4537 4212 8356 349 1525 6401 16866 15170 15013 26762 16580 425 3361 5161 7272 15700 23717 9802 1060 27530 50570 37549 8477 3889 5314 6057 265 8381 8705 8180 37874 41936 14453 3985 2637 170 6813 4786 2834 11185 23530 13896 9125 16241 14242 4058 11252 33490 22052 3221 3280 15146 22117 23605 44100 61865 36941 7093 325 800 1481 1322 1189 677 754 1018 1405 4293263355 786429 4294508545 393216 4294836223 131071 65537 65535 0 1 4294901762 65534 4294901761 131070 1 4294901762 4294901759 1 4294836224 1 4294901760 65534 4294901759 65533 0 196607 131071 65539 65537 4294967295 65538 4294901759 1 131070 4294836225 196606 2 4294967295 65535 65537 4294705152 196606 4294901762 131068 4294901761 65537 131071 4294836225 4294836224 196607 65536 4294901760 65538 4294901758 4294967294 196611 4294770689 196607 65534 4294770689 0 65538 4294967292 3 131073 4294836224 131069 4294836225 65536 4294836225 4294901760 4294901758 196606 2 65536 4294967295 1 65536 0 4294967294 4294901760 65536 4294901760 131070 65537 4294836225 0 4294967294 65537 4294901759 196607 4294901760 4294901761 4294901760 131071 4294836225 196607 1 4294901760 196607 1 2 65535 4294836224 65536 1 131069 2 65536 4294901762 4294836223 131073 4294705153 4294967294 65534 1 4294967294 65536 65535 196608 4294836226 131068 131074 4294836225 4294967295 65535 4294901760 4294836224 4294967294 65536 65535 131071 4294967294 131073 65534 +-103 0 -55 94 -24 129 441 207 65 -973 -1063 749 885 -136 -159 924 1209 -2027 -2458 813 1788 425 -1347 -723 670 1154 -30 -605 230 271 -105 -372 -42 400 255 -570 -541 315 335 196 98 -329 -309 67 108 304 278 -280 -298 -1 109 64 -50 51 178 -140 -279 24 102 79 49 49 3 -124 -33 98 26 -102 -57 129 188 -44 -172 -279 -155 405 286 -169 -155 35 117 16 -57 -161 -123 199 218 -65 -122 -93 -116 116 255 53 -145 -190 -3 145 28 -79 -41 49 22 -4 -8 53 110 -104 -191 32 142 85 -3 -131 -89 49 49 39 16 -54 -72 37 92 28 -67 -45 78 70 8 -101 -56 -8 -44 39 84 21 -90 -30 100 17 -99 -18 30 47 64 10 2 -65 -37 -77 -160 123 217 83 -34 -180 -115 126 166 -41 -77 -5 -26 -83 2 123 -31 -27 136 38 -74 -113 -5 39 -35 -4 20 35 34 -28 -82 -25 32 93 55 -45 -58 -19 60 45 11 -90 -121 -8 38 90 -11 36 155 -65 -212 -47 129 143 36 -96 -68 -66 -16 120 76 -97 -75 8 -67 -10 25 176 181 -71 -74 -132 -77 45 41 21 -37 -6 30 18 -10 -13 14 7 -25 -6 32 2 -48 -6 41 40 -6 -45 4 24 -7 -24 -2 14 3 0 0 -2 2 1 0 0 -1 0 1 0 -1 1 1 2 2 -3 -1 -1 -1 -1 -1 1 0 0 0 -2 -2 2 0 1 -1 0 1 1 0 -2 -1 1 1 -1 -1 -1 0 2 1 -2 -4 -2 2 1 -2 1 0 0 1 1 1 1 0 0 0 -2 1 0 -2 0 1 -1 1 0 0 -3 -1 0 -1 0 0 0 -2 3 2 -3 -5 -1 3 3 0 -1 1 0 -2 1 2 1 2 -1 -3 -3 1 2 -1 -1 -1 0 0 1 0 -1 1 0 1 -2 -2 -1 0 -2 -1 2 2 -2 -3 0 -2 1 3 3 0 0 -2 -4 -1 2 2 0 2 1 0 -2 1 -3 0 -2 -1 1 1 -2 0 -3 0 2 1 -3 -2 2 1 -3 -3 1 0 2 0 -2 -1 1 1 -1 1 2 -1 -1 0 0 -1 -1 1 3 0 -3 -1 -1 2 0 -3 0 0 0 0 3 3 -2 -1 -2 -2 0 2 0 -1 -1 0 2 -1 -3 0 1 0 -1 0 0 2 0 0 -1 -2 0 4 -2 0 -1 -2 -1 -1 -1 -1 0 0 0 -3 0 1 -2 1 0 -2 -1 -2 -1 0 -1 -1 2 1 -1 -1 -1 2 -1 0 2 3 0 0 -2 -3 -1 1 1 2 2 0 -4 -1 0 +65433 6225865 8519656 13566393 4231200833 1690970 801721 879057 5570410 6702733 3377569 2337138 1780616 366925 126341 149409 161764 389925 391906 150641 117845 99970 104080 155684 88805 15977 5101 51284 78417 16645 4802 15385 10693 11080 19890 37280 107425 188050 110357 25250 13945 29170 54730 51749 23533 26912 67834 57125 21034 7025 4082 500 2873 22916 37505 27389 17170 10322 3922 3172 6553 9248 6514 10984 10265 3200 3457 7497 9000 10289 10125 3109 4196 4229 7298 40729 53978 33556 29101 29237 5954 7565 15133 1690 19940 18245 1546 1241 1625 1940 7349 9673 5050 3725 5625 8221 14705 9544 1417 28250 47153 37090 10512 8980 14656 15185 5689 4589 31601 37802 22900 7954 2122 1405 1224 269 245 661 1028 2340 3281 2061 1572868 4293459961 983038 3 4294836224 65538 0 65535 1 131071 131073 4294770690 4294967295 4294967295 131071 0 4294836224 196606 65536 65535 65537 4294836224 131071 4294901761 4294967295 131072 4294836225 4294901756 65538 131070 0 65537 65537 0 4294836224 1 65534 4294901761 1 4294770688 65535 65535 0 262142 4294770690 4294967291 196611 4294901760 1 131070 65538 4294901762 4294836221 131073 4294967295 65535 65536 4294901760 1 4294836225 4294967294 4294836224 196607 4294836226 65533 131070 196611 0 4294770686 196607 2 65538 4294836224 4294770689 4294836224 131071 4294836225 4294770688 131072 4294770689 196606 4294770689 131069 131072 4294836224 131071 4294901761 131073 4294967295 0 4294967295 196609 4294770688 4294967295 2 65533 0 196608 4294836227 4294901759 65534 2 4294967295 131072 4294836223 65536 4294901760 0 2 4294901760 65534 4294836228 4294901760 4294967294 4294967295 65535 0 65533 4294836225 1 4294967294 4294967294 4294901760 196607 4294901761 4294967295 4294901762 131072 3 4294836224 4294967293 65537 131074 4294705152 65535 +-45 0 8 -97 -35 76 -153 106 220 -35 -55 579 1019 -1067 -1231 -221 -451 371 287 533 220 424 384 -432 297 -85 -380 -401 -272 25 275 406 -444 -525 296 859 216 -635 -307 167 222 37 -118 -139 -13 130 -10 -10 203 21 -195 -141 148 52 -266 -110 133 307 109 -213 -105 23 25 -21 -80 31 66 79 36 -92 -63 39 32 60 111 -28 3 -154 -216 63 206 63 -149 -106 96 74 -155 -58 149 178 20 -257 -169 157 107 -31 -35 45 43 18 29 -82 -39 66 51 -68 -57 -49 -75 115 122 -46 -104 27 107 -6 -79 -17 55 0 -74 16 59 2 8 47 5 -97 -12 39 -17 -12 -17 20 32 23 75 -13 -112 -132 -16 160 72 -37 -46 -58 2 81 -17 -26 72 35 -14 -66 -37 20 51 11 -13 -94 -154 59 156 147 24 -189 -76 25 -57 61 86 0 -7 23 11 -73 -24 54 1 -16 51 -12 -76 -6 42 26 2 -41 -35 19 16 25 23 -59 -94 37 69 64 21 -19 59 -42 -80 -64 -47 91 103 -4 -56 -31 52 -15 -109 -25 52 116 21 -92 -17 60 40 -39 -63 -10 58 51 -18 -63 -7 21 -1 -1 -19 -14 15 49 14 -32 3 -13 -17 2 -11 -15 -7 36 20 -8 -1 -8 -5 0 2 1 -1 -2 0 1 1 0 -1 -1 -2 0 1 2 -1 -1 1 0 -1 -1 0 -1 -1 0 1 -2 -3 0 -1 0 -2 0 -1 1 1 0 1 -2 -1 -1 -2 0 -1 0 2 3 1 -2 -2 -1 -1 2 1 0 -3 0 2 1 -1 1 2 -1 -1 1 1 -1 0 -1 -1 0 1 1 0 0 0 -2 0 1 1 1 -1 0 0 0 -1 -1 1 0 -1 1 2 0 -2 0 1 -2 3 2 -2 -4 1 0 0 0 0 -2 -3 1 2 -1 0 -2 -2 -1 0 -1 0 1 0 -3 -3 -1 0 1 2 0 -2 -2 1 2 0 -1 -3 0 2 2 0 0 1 2 0 0 1 -1 -1 -1 0 2 2 -4 -2 1 1 1 0 -2 1 -1 0 1 -3 -2 2 1 0 -2 -2 2 2 -1 -1 -2 -1 1 1 -1 -2 -2 1 1 0 -2 -3 -1 0 0 -1 -1 -2 -1 -1 0 0 1 -1 -1 -1 2 1 -1 -2 0 0 -1 -1 1 2 -1 -1 2 0 -2 2 1 -3 -3 1 3 -1 -1 1 0 -2 1 0 -1 0 0 -1 -1 1 1 -1 0 -2 -1 -2 1 2 2 0 -1 -1 -3 1 2 -1 -1 0 0 1 -2 -2 0 0 1 0 1 -1 -1 1 0 +65491 4288610312 5046237 7012199 4292673756 338266 2176850 1564202 341042 366458 228176 334080 95434 305201 74609 240461 472761 825497 449881 122138 50653 33245 17069 200 41650 57906 24608 82856 111938 57250 11554 1066 7361 10597 9760 5490 4624 13105 23725 50625 46405 33437 14692 27389 53885 66449 53210 12410 3250 2173 7565 5877 7225 5650 18850 17000 11545 11485 6530 3025 5732 3485 2273 9434 1665 433 689 1553 5794 29968 25856 6553 5480 6565 965 6409 4552 1769 2722 9005 27197 45945 36297 6401 6970 7396 578 5450 3492 257 2745 5812 2440 1685 1586 881 4010 10205 8857 802 5245 10496 10490 10625 4097 2929 12506 16160 8905 3889 3121 4069 5965 4293 490 2 557 2626 1220 178 293 346 2424825 4294443028 4294508543 65531 65538 4294901759 65536 1 4294967295 65534 131073 4294967295 1 4294967295 4294901760 65535 4294836225 65533 65535 65534 131071 1 4294836225 4294967295 65534 65535 196610 4294836225 4294967294 196607 1 65533 65538 131071 4294901762 131071 4294901761 4294901760 65535 65537 0 4294836224 65536 65537 65535 0 4294967295 1 131071 2 65534 4294836225 131075 4294770686 1 0 4294836224 131069 4294901762 4294836224 4294967294 4294901760 65536 4294770688 4294967293 65536 2 4294901758 131073 4294901760 65533 131074 0 131073 0 4294901761 4294967295 131072 4294705154 131070 65537 4294836224 4294901761 65536 4294901757 65538 4294836224 196606 4294901762 4294901759 131071 4294901761 4294901758 65537 4294836224 4294967293 0 4294967295 4294967294 65535 65536 4294967295 196607 4294901761 65534 4294901760 131071 4294901762 196607 4294836224 65538 4294836221 196609 4294967295 1 131070 4294901760 0 4294967295 65537 65535 4294967294 131070 131074 4294901760 4294836223 131073 4294967295 0 4294836225 65534 65536 65536 4294967295 1 +-49 0 -65 69 273 116 -514 -564 392 958 -227 -407 880 -101 -1040 -700 19 1466 883 -1337 -1098 522 483 415 316 -484 -435 -67 47 458 359 -164 44 -499 -649 283 345 62 -92 119 21 -131 151 156 -213 -110 324 53 -221 -214 -113 186 127 15 -25 44 154 -42 -152 -88 123 51 -146 -50 90 45 -66 -21 33 -6 -40 63 67 -46 -64 -21 64 118 24 -205 -107 92 -6 -18 -41 87 143 47 -55 -114 56 92 18 -176 -155 106 90 -33 -4 94 -25 -186 -43 231 111 -139 -84 5 -4 51 8 1 75 19 -30 -63 -14 -17 -40 27 -1 26 56 53 40 -148 -161 84 129 43 -18 -84 -49 80 94 -66 -107 -9 15 57 48 37 10 -109 -68 84 79 -3 9 -41 -86 -48 71 128 -54 -134 47 112 -25 -72 15 3 -29 4 -47 -3 77 103 28 -131 -97 15 33 97 38 -78 12 32 -65 -82 -1 129 102 -58 -121 -57 105 122 -84 -161 31 172 21 -124 -18 59 22 -40 -58 -15 26 61 9 -35 -28 5 27 32 -18 0 86 -22 -95 -20 77 10 -102 -56 47 116 0 -88 -22 49 26 15 14 -25 -1 -14 -35 8 36 15 2 -17 -30 -26 8 62 21 -49 -23 13 0 2 4 9 4 -6 0 3 0 -2 -1 -1 0 1 0 0 0 1 0 -3 -1 1 1 1 -1 -2 -3 1 1 0 -1 1 0 0 2 1 0 -2 0 1 1 1 0 0 -1 1 2 -1 -1 3 1 0 0 -2 0 0 0 2 1 -2 0 -1 -1 0 0 0 2 0 1 -2 -2 -1 0 2 2 0 -1 -1 0 -1 -1 0 3 1 -2 -3 0 0 1 1 -2 -1 0 0 0 1 -1 -2 -2 0 1 2 1 -2 -3 0 1 5 3 -1 -1 0 1 0 -1 2 1 -2 0 0 0 -1 -1 3 1 -3 -1 2 2 -3 0 0 0 -3 0 0 0 -1 1 0 -1 -1 -2 -1 0 -1 -1 0 0 1 1 -1 -2 0 0 -2 -2 3 0 0 1 1 0 -2 -3 3 1 1 0 0 1 -1 0 0 0 0 0 -1 0 1 0 -3 -2 2 1 -2 -2 2 0 -1 0 -1 1 -2 -1 -2 0 1 1 -2 -1 -2 -1 0 -1 0 0 -1 0 -1 0 0 -3 -3 -2 4 1 0 3 1 0 -3 -1 2 2 -2 -1 -2 0 1 1 1 -1 2 3 -1 0 -2 -1 3 0 -2 0 -2 -2 1 2 0 -1 -1 2 0 -2 -1 -1 -1 0 2 1 0 -3 3 2 -1 -3 -1 1 2 1 0 +65487 4587455 7602449 4258070014 62783880 217178 784601 1571600 2149517 2567258 1478088 405514 334112 193714 211973 155777 250937 501290 122869 22625 17602 47137 57469 107785 94637 47365 16354 2561 25480 30848 17730 23816 10125 4797 1125 5569 6605 4537 18020 42601 19913 360 9250 22658 16021 11600 31300 35261 9189 8852 35221 55210 31642 7081 2617 65 5986 4869 485 2329 677 5945 23504 32977 18490 7380 8801 13192 11530 3474 3673 11981 11680 6250 1762 9700 21425 20872 14753 5809 234 857 2218 16538 17945 9634 10498 7528 1168 10949 16642 13768 17890 25909 32977 30545 15817 3805 2084 3589 4397 1306 809 1753 324 7880 9425 6029 13540 15665 7744 2885 901 821 197 1289 1521 293 1576 3908 2842 698 131072 589828 4294574084 196608 4294836224 4294967295 65536 0 65536 4294770688 131071 65537 4294901759 131069 1 131071 0 65538 4294836224 65536 65537 0 131071 4294901762 262143 1 4294836224 0 131072 4294836225 4294901760 65535 0 2 4294836225 4294967294 131072 2 4294967295 4294901760 65535 65539 4294836222 0 65537 4294967294 0 65536 4294901759 65534 131073 4294836225 65533 327681 4294901763 65535 1 196607 4294836225 0 4294901760 262143 4294770689 196607 4294770690 0 4294770688 0 4294901760 1 4294967295 4294967294 4294901760 65535 65536 4294901761 65534 4294836224 262142 0 65537 4294836224 262141 65537 0 4294901761 0 0 4294901760 65536 4294770688 196606 4294836225 196606 4294901760 4294901760 4294836225 4294901759 65536 4294836225 4294901759 65535 65535 4294901760 4294901760 0 4294836221 327678 1 65539 4294770688 196607 4294836226 4294901759 65536 65537 196607 4294901763 4294836224 262143 4294836224 4294836224 131070 2 4294967295 2 4294967294 4294967295 131072 1 262141 4294901762 4294967293 131073 1 +-101 0 151 -27 -213 201 727 -637 -1580 110 825 1161 751 -878 -1086 5 1334 658 -994 -1889 -180 1907 417 -932 -241 322 122 101 54 -259 -289 142 431 348 -89 -712 -221 371 6 -18 78 239 359 -463 -742 149 493 267 -104 -241 -31 129 91 -22 -32 -66 14 98 48 -210 -207 119 127 97 7 -85 46 49 -50 -142 -89 152 163 -59 -168 -8 100 64 15 12 49 -115 -133 51 125 -2 -100 -36 1 11 0 68 -3 -47 20 97 108 -64 -70 -141 -156 119 136 47 -28 39 89 -147 -145 129 176 -51 -159 -81 72 136 -36 -90 47 48 -29 -46 -31 27 15 -17 -9 104 67 -107 -63 22 28 79 56 -131 -78 84 39 -79 -77 81 118 15 -66 -96 44 80 -77 -88 21 117 77 -10 9 -118 -115 16 21 73 46 4 -7 -83 -59 78 41 -8 34 15 -38 -58 3 60 39 4 16 -87 -69 30 -4 13 54 11 -95 -63 25 135 42 -26 82 -12 -68 -99 -30 65 21 -12 -5 8 -20 -30 16 73 11 -88 -39 75 28 -20 62 24 -66 -122 -6 109 -10 -76 -35 95 91 -17 -32 -42 -12 24 17 -10 0 2 -22 -18 4 31 25 -9 -7 -19 -31 -8 14 48 9 -33 -1 14 1 -11 -6 0 2 4 0 -2 0 0 -1 -1 0 3 1 0 0 0 0 0 -1 0 1 1 -2 0 1 1 -1 0 2 -1 -2 -3 -2 3 0 -1 2 2 2 1 -1 -1 0 0 0 0 -1 -1 1 0 1 0 -1 -1 1 2 1 -2 -1 -2 -2 3 1 1 1 -2 0 -1 -1 0 -1 -2 0 0 1 0 -3 0 1 2 0 -1 -1 -1 -1 1 0 1 1 -2 -1 3 0 -3 1 2 -1 1 -1 -2 1 2 3 -1 -3 -4 0 1 -1 1 -2 1 3 -1 0 -1 -1 0 0 -1 -3 0 1 2 1 -1 -1 -2 -3 -1 0 3 3 -1 -2 -2 1 -1 -2 0 3 2 -2 -5 -1 0 1 0 -2 -1 1 0 0 -2 0 0 -2 -1 0 3 3 -2 -3 -1 1 0 -3 1 1 0 1 -2 -1 0 -2 0 0 0 0 0 2 0 -3 -1 -1 2 1 0 0 -1 -2 0 2 1 -2 -2 1 2 0 0 0 0 0 -1 1 0 -1 -1 -1 1 1 0 0 -2 -2 2 4 -2 -3 -1 3 2 -4 -2 0 2 3 1 -1 -3 0 0 -1 -2 -3 2 1 1 0 1 -1 -1 -1 1 2 1 -1 0 1 0 0 0 1 -1 -1 2 2 0 -1 -2 1 0 -1 3 3 0 +65435 4293197975 13238059 4253221591 7272916 2028546 1334885 1179421 2212520 4556357 3669049 1042513 161765 25085 69997 103685 306865 514865 186482 360 63205 343250 572765 314338 68897 17602 8765 5380 9800 46404 57010 25538 7274 4517 22664 31025 30050 28288 14096 369 15626 20290 15629 11296 122 4624 2218 9809 15760 24781 38497 20705 2305 29530 37666 33577 31842 23680 9396 4513 2957 1690 514 10897 15938 4453 7025 20297 13140 7762 12490 14149 13572 8336 13673 14130 6029 14005 13481 5770 2132 6938 9565 1745 1381 4808 3609 1537 7825 5661 185 3037 12994 18850 2440 6868 14425 5125 585 89 1300 5585 7865 7146 1184 4420 19240 11917 5876 10250 8570 2788 720 389 4 808 977 706 410 1025 2500 1170 983039 4294246401 65530 262146 4294836224 0 4294967295 196608 1 0 0 65535 65537 65534 65537 65535 4294901762 4294836222 262142 4294901760 131074 65538 4294967295 0 0 4294967295 1 1 4294967295 131073 4294836225 4294901759 262142 65537 4294836225 4294901760 65535 4294901759 0 1 65533 131073 4294901760 4294967295 131071 65536 4294836225 262143 4294770688 131073 131071 4294901759 131073 4294901763 4294770685 65536 131071 131070 4294901763 4294901760 65535 4294901760 65533 131073 4294901761 4294901759 4294967293 196608 4294901763 4294901758 4294901761 65534 131075 4294705150 65535 1 4294967294 1 4294836224 0 4294967294 196608 4294836227 4294967293 1 131069 1 4294836225 65535 65534 0 0 2 4294967293 196607 1 4294901760 65534 65538 4294901758 131073 0 0 4294901760 1 4294967295 131071 1 4294836224 196606 4294836228 4294967293 131075 4294901756 131072 65539 4294836223 0 4294901759 196605 65537 65536 4294967295 131071 65538 65535 1 0 4294901761 196607 2 4294901759 1 262143 3 +2 0 -152 0 -143 288 814 208 -412 -861 -215 482 317 105 1 -524 36 514 -423 -1500 -719 2041 1022 -661 -105 141 -30 -352 -207 339 424 -283 -715 192 820 273 -273 -575 -106 326 237 -182 -341 -194 -41 414 103 -134 144 164 -127 -257 111 213 -109 -216 30 257 123 -183 -114 6 37 52 9 -107 -62 95 7 -106 -12 183 -18 -182 75 284 47 -320 -100 162 37 -96 -86 81 104 41 -4 -76 2 4 -90 -37 85 133 -22 -137 11 77 -43 -36 51 37 -3 27 95 -170 -270 86 179 53 -75 15 47 -26 47 65 3 -138 -75 36 6 50 71 -47 -176 16 225 110 -100 -169 -23 79 54 40 19 -83 -48 -32 -53 74 47 -9 -17 0 -16 31 93 44 -35 -134 -14 49 -50 40 101 -38 -97 4 91 7 -112 -37 76 82 -4 -47 12 -20 -45 -5 -5 44 57 -9 -40 -43 1 32 -16 28 87 -40 -91 -24 24 25 -55 13 107 43 -47 -70 11 -2 -24 24 -4 -14 4 21 5 23 44 -31 -28 -43 -43 37 1 6 39 58 43 -73 -48 -22 -23 11 9 31 -10 -15 20 29 -1 -33 -20 41 54 -33 -49 -1 16 8 -1 -3 -14 4 30 13 -9 -39 -13 16 -6 5 18 1 -8 -7 0 6 2 -2 0 -2 -1 0 -1 2 2 0 -1 0 1 0 -1 0 2 -2 -1 -3 -2 2 0 -2 -1 1 2 2 0 -3 -1 0 -1 0 0 1 -1 -2 0 3 -1 -1 -1 1 1 -1 0 1 2 -3 -1 1 -1 -1 2 1 -1 -3 1 2 -1 0 0 0 -1 -1 3 2 -3 -2 2 -1 0 0 0 0 -1 -2 1 -1 1 -1 -1 -2 -2 2 2 2 0 -1 0 -1 0 1 -1 0 0 1 0 -3 2 1 -2 -1 -1 -3 -1 3 0 1 1 -2 0 2 -1 -1 0 0 -1 -1 1 2 -1 -2 0 1 -1 -1 -1 2 0 0 3 1 1 -2 -2 -1 0 2 1 1 0 1 3 0 0 0 0 -2 0 -2 0 1 0 -1 -2 -2 0 0 0 -3 -2 0 -2 0 -1 1 0 0 -1 -1 -2 1 1 1 0 -1 -1 0 0 1 -1 2 4 -1 0 -1 -1 0 0 -2 -1 2 2 -1 -1 -1 -2 0 0 1 1 -2 1 -2 -1 3 0 -3 0 -2 -1 -1 -3 1 2 0 -1 -1 2 0 -2 1 2 2 0 -1 0 -2 0 0 -4 -1 0 0 1 1 2 -1 -5 -1 1 2 1 0 0 -2 0 1 1 -1 2 -2 -2 0 1 -1 -2 0 2 0 +2 65384 18939761 13632302 4238605924 278549 111514 274577 265492 2428929 4682642 1481405 30906 124804 157770 259865 548089 746929 405154 117512 89293 153917 173077 28565 47632 82178 57690 58537 66949 48618 13032 4073 11530 12869 11285 33633 33448 86281 104609 36244 10585 13957 12497 5792 20 9469 24914 19253 6050 3145 3970 738 37925 80296 34850 5850 2885 6434 19053 6921 2536 7250 31232 62725 38561 6770 4516 7250 3328 8285 2290 289 1217 10585 19181 2597 4100 11645 9425 8330 13913 12500 2225 544 2050 1961 3330 3449 1025 1040 9169 8857 1201 3194 13298 7109 125 1152 212 457 554 2897 2633 3218 37 4885 7178 2788 650 1042 325 1241 1090 2081 4005 2402 320 10 212 1069 1602 425 393210 65554 4294574072 393216 4294836226 4294836224 65535 196607 2 65535 1 65535 4294836226 4294836223 196606 4294836224 131071 131074 4294770688 65535 65535 65536 4294901759 196608 4294967295 131071 4294901761 65536 4294770690 131071 4294967295 65538 4294836223 131073 65535 0 4294967295 131075 4294901757 4294901762 0 0 4294901759 4294901761 4294901761 4294901759 196606 131074 4294901760 4294901760 65536 65535 65536 4294770688 65538 4294967294 4294836223 262143 65536 4294836225 131072 4294967295 0 4294967295 131073 4294901759 65536 4294967295 196607 0 65539 4294836225 4294967294 131072 65537 65536 3 0 4294836224 4294836224 65536 4294901760 4294901758 0 4294770688 65534 65534 131071 0 4294967295 131070 65537 4294901760 65535 65536 196607 4294901764 4294901760 65535 4294836224 196607 4294901762 4294967295 65534 65536 4294836225 4294836225 262143 4294770688 4294836224 4294967295 131069 2 4294967295 2 131070 131074 4294901760 4294836224 0 4294967292 0 65537 4294901762 4294967291 131073 1 4294836224 65536 4294901761 4294836226 65534 4294901761 65534 2 +-96 0 24 16 -13 -163 -339 700 1112 -771 -1316 320 1004 -88 -839 152 1362 235 -1081 -1664 -524 1555 494 -116 213 188 111 -510 -179 411 254 -533 -472 76 71 340 46 -163 89 191 -3 -191 -65 111 29 -7 185 109 -67 -368 -182 151 -13 97 110 39 34 -11 12 -84 -40 2 -21 47 69 -25 -43 -75 -78 51 -43 68 182 128 64 -239 -157 -28 -26 75 -25 43 103 -16 -100 36 105 -36 -54 52 79 -88 -79 -4 -68 25 93 75 8 -32 7 -90 -90 60 13 54 80 37 70 -103 -119 -58 -36 89 67 41 37 -80 -85 9 52 24 -75 -6 137 93 -15 -180 -82 32 -20 70 65 -25 -23 -24 -67 46 137 2 -117 -59 49 60 -16 -40 -22 66 128 -45 -167 -107 32 202 70 -114 -36 46 71 -61 -121 -11 43 31 -25 48 63 -42 -49 0 26 17 -8 -11 14 -2 -6 -22 -57 7 48 73 41 -75 -63 27 78 -35 -124 -29 47 126 49 -86 -32 13 -17 -4 23 43 24 -77 -71 66 86 -33 -75 -15 28 49 -3 -25 32 12 -27 -23 14 9 -30 -14 31 57 3 -80 -18 70 37 -72 -53 32 13 1 7 12 10 -8 -17 -15 5 20 -6 -7 17 13 -1 -27 -17 12 10 0 -1 0 -1 0 0 -1 0 2 0 -1 0 -2 -1 1 0 0 0 -1 -1 2 1 0 -2 1 2 0 -1 -1 0 0 1 -1 1 -1 -3 1 2 0 -4 -1 0 2 -1 0 1 0 0 2 0 -2 0 -2 -2 3 2 0 -2 0 2 -1 -2 1 1 1 0 -2 1 -2 -3 -1 -2 1 1 3 2 0 0 1 1 -1 3 0 -1 0 0 0 1 0 0 -2 -2 1 3 2 -2 1 3 -1 -2 -1 1 0 0 -1 2 -1 -3 -2 1 0 0 -1 0 0 2 0 0 -1 -3 -1 2 2 0 -3 0 2 0 -1 0 -1 -2 0 0 0 -1 0 -2 -1 1 4 0 -3 -3 0 2 2 1 -1 -1 -1 0 0 1 1 0 -1 1 -2 -3 -1 1 1 -2 -3 -1 1 1 0 -2 1 2 0 1 0 0 -1 0 -1 2 0 0 -2 1 -2 -1 0 1 -2 -3 -2 1 2 0 -2 0 1 -1 0 -1 4 3 -2 1 -1 -2 0 0 0 1 1 0 -3 1 2 -1 -2 1 1 -1 0 0 0 -2 -1 2 1 -2 1 2 -1 2 2 2 -3 0 0 1 -2 0 1 0 1 1 -2 -1 0 -1 0 2 -1 0 0 1 0 -1 -3 -2 1 0 0 -1 1 0 0 +65440 1048600 4284350451 45940397 4244440152 1834256 1015760 727025 1910269 3937457 2692601 257492 80713 272421 200962 348605 228560 120641 28685 44402 36490 16546 890 46106 139913 55925 9578 13621 1277 7200 1604 2650 5386 7474 8685 6473 49508 61217 25433 6301 2474 10865 11296 12321 5620 13985 6257 5249 14274 1088 8149 11700 3085 7769 15509 17525 9217 6170 7769 7306 3280 5661 27418 32625 7748 5300 4850 1105 6605 18773 17170 6001 1856 4840 18409 39338 41828 17896 3412 8762 14762 2810 2929 5733 2401 965 185 200 520 3298 7633 7306 4698 7309 16217 18085 9797 1193 305 2378 6505 9397 8485 5850 3185 634 1168 1258 277 1096 4210 6409 5224 6553 3833 170 193 164 514 425 85 458 4293263359 851951 10 65535 65535 4294901760 131072 4294901760 4294836224 131071 0 4294901760 196607 1 131070 2 4294967295 0 4294901761 4294901761 131069 2 4294967292 131072 65535 1 131072 4294836224 4294836224 262142 2 65534 4294901762 131070 65537 4294836224 4294836225 4294967293 131070 196609 2 65536 4294901761 3 65535 0 1 4294836224 131070 131075 131070 4294901763 4294967294 1 4294901760 4294901762 4294901757 1 4294901760 0 2 4294901760 4294967293 131074 4294770688 131072 4294901760 4294901760 65534 0 65535 4294967294 262145 4294770688 65533 131074 4294901761 4294967295 0 65537 4294901760 4294836225 4294967293 65537 4294836222 131071 1 131070 2 1 4294901760 4294901760 2 4294836224 4294836225 65535 4294836225 4294901757 131073 4294836224 65536 65535 327679 4294836227 4294901761 65534 0 65537 4294770688 131073 4294901759 65537 65535 0 4294967294 65538 131070 4294901762 131074 4294770690 0 4294836225 65536 65536 4294836225 65535 65535 4294901762 0 1 4294836223 131070 0 131071 0 +-16 0 -50 3 290 -89 -858 -240 402 960 429 -86 292 -734 -320 189 -160 -195 -38 -492 -634 1253 744 -621 -183 273 286 112 -76 -690 -309 402 175 102 26 -289 -237 251 241 69 -19 -189 25 170 -25 -237 -103 65 13 213 120 -174 -32 52 -61 -76 43 134 22 -104 -58 -12 36 113 3 -124 -45 85 108 -21 -106 -65 45 39 -90 26 130 -6 -113 4 62 33 90 -17 -138 -133 1 157 22 -8 64 -22 -19 -33 -92 -8 107 119 5 -147 -85 25 51 89 -13 -91 23 59 -16 -21 24 -33 -59 62 59 -87 -43 132 27 -108 23 30 2 8 -83 -86 26 88 18 39 12 -70 -26 4 10 54 59 -12 -77 -133 26 159 -87 -77 180 38 -238 6 220 -49 -172 152 180 -166 -88 134 19 -223 26 285 -141 -372 165 410 -213 -236 352 11 -373 200 214 -433 69 523 -393 -358 573 -4 -484 383 127 -633 291 659 -647 -441 820 163 -733 128 543 -342 -265 390 -11 -402 145 331 -233 -181 231 73 -137 59 111 -145 -81 103 40 -72 -41 27 -5 29 54 -20 -34 -2 22 15 -18 -75 -3 73 -54 -28 68 45 4 -14 -14 -41 -4 32 9 -10 -5 -11 -8 7 -2 24 23 -40 -15 43 6 -47 -5 28 -5 -9 6 3 -3 0 2 2 -1 -3 -1 3 0 -1 -1 2 -2 0 1 2 -1 -1 -1 3 1 0 -1 -1 1 1 0 1 1 -1 2 0 0 -2 0 0 -2 -1 0 1 2 -1 -1 -2 -2 3 2 -2 -1 -2 0 1 -3 1 3 0 1 0 2 -3 -3 -1 0 0 0 0 0 -1 -2 -2 -2 1 -1 -1 0 1 0 1 -1 1 1 0 1 -1 0 1 0 -4 -1 2 2 -1 -1 1 0 -1 0 -1 0 -1 -1 -1 0 2 2 -2 0 2 2 -1 -2 -4 -3 1 -2 2 2 -1 -2 -1 -1 1 0 -1 -1 0 2 0 -4 1 1 1 -1 0 2 0 -1 3 0 -2 1 0 -2 0 1 1 1 0 0 0 3 -1 -2 -2 1 1 1 -2 0 -1 0 1 -3 -1 2 -1 0 -1 -3 -1 1 1 -1 0 2 -1 -2 1 -1 -2 0 3 -1 -2 1 2 1 0 1 -1 -1 -2 0 0 -1 1 2 0 -1 -1 0 2 1 0 1 -3 -2 -2 -2 2 0 1 2 1 -1 -4 0 2 -1 1 2 0 1 -4 -2 0 -4 1 1 1 0 -1 0 2 -1 -1 1 1 -1 -1 -1 1 0 -1 1 1 0 -1 -1 0 -1 -1 0 1 2 1 -1 -4 0 0 +65520 262094 4289134882 4279303334 62914962 191437 624020 138121 63625 243508 1971965 939177 108018 94340 481876 257085 41029 84197 119170 62842 36082 29525 56794 14834 45538 44676 3728 9497 19805 11300 3508 14065 15385 9250 12105 15461 3546 8776 16936 12785 4933 8389 36733 24650 548 4580 1450 8528 25610 21634 7850 10522 8450 4010 697 1665 7325 11050 19273 12393 1429 68 14285 8420 1845 5044 692 3016 3625 23618 25957 13498 33844 56680 50801 52688 59956 25700 50090 81901 158265 195325 101065 124025 179129 233285 278290 282613 328345 380945 416818 518962 613090 698969 553673 411813 222325 161725 130586 87050 58690 22250 33346 17170 6784 2410 866 3316 1160 709 5949 5338 3700 6649 212 1877 1040 181 146 113 580 2129 2883569 4291887110 1900539 4294443003 196614 65533 131074 4294836223 262143 4294901760 196607 65534 131073 4294967295 262143 1 4294967295 65537 65536 4294901761 2 4294836224 0 4294967294 65536 4294901762 4294901759 262142 4294836226 4294901759 65536 131069 3 1 4294770690 4294967293 0 0 4294901760 4294901758 131070 4294967295 65536 65536 131071 1 4294901761 65536 4294705152 196607 4294901762 131071 4294901760 4294901760 4294901760 4294967295 131072 4294836226 131072 4294901762 4294770686 131069 196606 4294901762 4294967294 131071 4294901760 65535 2 131068 65537 65535 2 262143 4294836224 1 65534 65537 1 0 4294901763 4294901758 65537 4294836225 4294901760 65536 4294967293 4294901762 4294901760 4294967293 65537 65535 4294901762 131070 4294901759 196608 4294901759 131073 1 4294901761 4294901759 0 131071 2 4294967295 131072 1 4294770689 4294901758 196606 65536 65538 4294770687 131072 131071 2 4294705153 65534 131068 65537 4294901760 131072 4294967295 65537 4294967295 131071 4294901760 65537 4294901760 65535 4294967295 65536 65538 4294770687 0 +-49 0 53 29 45 -18 -50 -254 -258 352 330 -10 -34 -184 -102 -97 -207 624 844 -883 -1182 311 557 452 186 -500 -397 227 283 83 -17 -181 -74 43 0 60 70 -33 -51 -51 -2 103 87 -89 -102 -71 -43 200 182 -181 -193 64 75 -5 -30 29 54 20 -12 -81 -55 64 85 22 -37 -111 -31 138 75 -154 -108 144 60 -125 -22 166 56 -131 -26 18 -61 32 95 1 -49 -12 28 -48 -73 70 78 -15 -29 -45 -32 63 34 -37 -9 54 36 -91 -87 105 130 -64 -98 -11 42 20 -42 9 58 1 12 -42 -115 -16 73 126 54 -135 -133 53 123 75 -31 -167 -62 143 99 -47 -90 -53 53 112 8 -143 -101 132 115 -38 20 6 -78 -118 -67 111 107 119 66 -218 -98 76 -41 -36 -4 116 155 14 16 -184 -270 -20 92 224 183 70 -12 -450 -356 211 204 484 473 -680 -791 12 236 686 482 -494 -303 -239 -552 279 728 465 -44 -678 -220 76 -212 226 376 80 -9 -261 -230 32 50 141 144 -20 -76 -115 -41 73 34 -29 -2 67 3 -87 -6 47 -7 18 21 -72 -56 103 113 -68 -102 -25 30 65 7 -47 5 27 -28 -31 30 37 -23 -31 13 12 -6 15 22 -44 -50 27 34 11 -1 -13 -5 -1 0 3 0 0 0 0 0 0 0 0 0 0 -1 0 0 -1 0 0 1 0 0 1 0 -1 0 -1 0 0 -1 0 0 1 -1 1 0 0 0 1 0 0 1 -1 0 0 -1 -1 1 0 0 -1 0 1 -1 0 1 -2 -1 1 1 0 -1 0 0 0 0 0 1 -1 -1 0 0 0 0 -1 -2 0 2 -1 -1 -1 0 1 -1 0 -1 -1 0 2 2 -1 -1 1 0 0 -1 0 0 0 -1 0 0 1 0 0 1 1 -2 0 0 0 1 -1 -1 1 2 -2 -1 -1 -1 0 1 1 0 -1 0 0 -1 -1 0 0 1 0 0 0 0 -2 0 1 1 -2 0 0 -1 0 1 0 0 -2 -1 0 0 0 0 0 1 -1 0 0 -1 -2 0 0 0 0 -1 -1 -1 0 1 0 -1 0 0 1 1 0 0 -1 -1 -1 0 1 0 -1 -1 -1 0 1 0 -1 0 1 0 -1 0 1 0 -1 1 0 1 -1 0 0 -1 -1 0 -1 -3 -1 1 0 -2 1 1 0 0 0 0 0 0 0 -2 -1 0 0 1 -1 -1 -1 0 1 1 -1 0 -1 0 1 0 -1 0 0 -1 0 1 0 -1 -1 -1 -1 1 1 -2 -1 0 -1 -2 -1 -1 1 1 0 +65487 1900597 4293787693 4278386638 23133950 109000 35012 19813 432225 1492025 1493845 514553 284596 209138 86978 33050 7325 3600 5989 5202 10613 15490 15445 41849 65885 41345 5650 1741 3316 6705 7121 7709 13690 20005 29341 32400 19225 28040 20297 1000 4745 9026 2545 3088 10229 6309 2866 4993 2525 2997 9577 18594 20996 9725 2164 1845 3365 1908 13481 21205 21141 20498 20754 28850 24293 12010 10909 15353 20513 27625 14669 436 20008 16810 25610 51880 15380 2977 13472 24221 34112 73300 58640 38389 202644 171257 275872 686129 625825 526292 476360 148930 382545 746209 461620 54176 96020 147776 68202 53924 22381 21136 19001 7010 1997 4493 7578 2245 373 5625 13745 17393 11029 5125 2258 754 1745 2269 1490 313 261 2420 1834958 720930 4294180863 4294967291 196608 0 0 0 0 0 65535 4294901760 0 1 65536 4294901760 4294901760 0 65535 65536 131071 0 65536 0 4294901761 0 4294967295 1 4294901760 65536 65535 4294836225 131071 1 65535 0 0 4294901761 65535 0 4294901760 65534 4294901762 4294967295 65536 65535 4294967295 131072 4294901762 131071 0 65535 0 65535 65536 0 65537 65534 0 4294901761 131071 4294836226 4294967295 65535 65537 4294901760 0 4294967295 0 1 0 4294836224 65536 4294836225 0 65535 1 4294836224 65535 0 0 4294901761 0 4294901759 0 0 4294967295 65535 1 65535 65536 1 4294901760 4294967295 65536 4294901760 4294967295 65536 4294901760 65536 4294901760 65536 4294901760 1 4294901761 0 4294967295 4294901760 4294967293 1 131070 1 0 0 0 4294967294 0 4294901761 4294967295 65536 4294901761 4294901760 65536 4294901760 0 65535 1 4294967295 4294967295 65537 4294967294 4294901760 4294967294 131071 1 +6 0 -40 -31 24 128 122 -88 -87 -185 -197 263 267 -104 -366 22 539 621 342 -1324 -1311 690 1162 77 -798 -273 427 261 -243 -70 192 15 -110 74 119 -129 -84 94 43 -46 7 -37 -74 78 105 -40 -25 -78 -177 79 158 70 5 -26 -11 -64 -19 70 47 -60 -42 18 10 25 45 -54 -39 5 -39 -75 -109 165 206 20 -9 -89 -64 -90 -58 140 100 -24 -18 -41 -16 -15 -64 12 58 81 27 -79 -58 10 49 68 24 -101 -51 43 37 -63 -141 66 151 55 -44 -69 14 57 43 -104 -124 68 128 44 -46 -121 1 101 11 -87 -55 29 70 50 -92 -163 35 279 16 -244 -24 127 36 32 -59 -84 145 87 -53 -126 -146 12 152 91 -37 -48 -62 -50 8 60 70 149 30 -335 -143 329 186 -288 -199 220 152 -66 51 -163 -423 187 574 55 -346 -196 -114 2 332 478 -124 -628 -86 430 322 -120 -270 -361 -148 400 131 -59 85 129 198 -223 -429 -117 242 312 -33 -222 -109 98 167 24 -102 -109 22 115 -17 -114 17 98 -78 0 222 -12 -248 -105 127 184 -4 -137 24 69 -59 -119 9 127 2 -90 -33 63 42 3 3 -5 16 -36 -58 2 24 45 24 -24 -9 -17 -20 1 5 15 5 -3 0 -3 0 3 0 -2 0 -1 0 0 0 -1 -1 1 0 -1 0 1 0 1 1 0 0 0 0 0 0 1 -2 1 -1 0 1 1 0 -1 -3 -2 0 1 -1 -1 0 1 -1 0 0 1 0 -1 -1 -1 -1 0 0 -1 -1 0 0 0 -1 0 -1 1 0 -1 0 1 1 0 0 0 1 -1 -1 1 0 0 1 -1 -3 0 1 1 0 -1 0 0 1 0 1 0 -1 0 -2 0 0 1 -1 0 0 1 1 0 1 0 -1 0 -1 -1 0 0 1 0 -1 2 0 0 0 -1 0 0 0 0 0 0 -1 -1 0 0 0 1 -1 -1 0 -1 0 0 1 1 -1 -3 0 -1 0 0 -1 0 -1 1 0 -1 0 0 1 0 -2 -1 0 -1 -1 0 1 0 -1 -2 2 -1 -2 1 1 -1 0 -1 -2 0 0 0 0 -1 -1 0 -1 1 -1 -1 -1 0 1 -1 -1 0 1 0 1 1 1 0 -2 -3 -1 0 -1 -2 1 1 -1 -1 0 0 0 1 0 1 1 0 0 0 0 0 -2 0 1 1 1 -2 -1 0 0 0 -1 -1 -1 0 -1 1 0 -1 0 1 0 -2 0 0 1 -1 -1 0 1 0 -1 -1 -1 3 1 -2 -1 0 2 -1 -1 0 0 0 +6 4293001176 8388632 4289200250 4282908585 107978 82105 134440 676162 1869940 2194821 1356173 711333 250450 63949 37089 17576 30802 15892 3965 1418 11560 12625 6709 37570 29864 701 4217 5261 5809 2088 725 4941 1546 7146 39106 42836 8002 12196 22964 10576 2005 481 4240 9925 6970 3464 7025 10777 4450 5338 24237 25826 6697 3445 12665 20000 18320 16757 10202 7690 3866 7400 35033 79066 59792 16705 2320 10537 28594 18685 21460 31385 3673 6344 3664 27101 113125 128690 117540 88001 27460 29170 213898 332501 158132 13000 338708 409760 192296 118084 203221 181904 20642 23866 88933 197730 155908 50373 21485 28465 22285 13709 13285 9893 6084 49428 72529 49985 18785 5337 17642 16210 8104 5058 1773 34 1552 3368 2601 1152 370 131052 983045 4294770693 4294770688 196608 4294836224 4294901760 0 4294901760 131071 4294901760 65536 65536 1 0 0 65536 131070 65535 65537 4294901760 4294901757 65536 4294967295 65536 65535 65536 4294901760 4294967295 65535 4294901760 65535 0 65535 131071 4294901760 65536 1 0 4294901761 131071 0 4294901761 65533 65537 4294901760 0 1 1 65535 65534 65536 65535 65536 1 1 65535 4294967295 0 1 196607 0 4294901760 0 0 0 4294967295 0 65536 4294967295 4294901760 0 65537 4294836223 4294901760 0 65535 131071 4294901760 0 1 4294967294 4294901760 65535 1 4294901759 4294901762 131070 4294901761 4294901760 65534 0 4294901760 65535 131071 4294967295 65535 4294901761 65535 1 65537 1 4294836222 65535 4294901759 65537 4294967295 0 65536 65536 1 0 0 65534 65537 4294836225 65535 0 4294967295 65535 131071 4294901760 65536 4294836224 0 4294901761 65535 1 4294967295 262143 4294836225 65535 4294901762 65535 0 +-65 0 35 33 -33 -16 109 -28 -310 94 464 4 -300 -34 152 -24 143 110 -59 -505 -166 212 -459 -220 240 779 282 -263 -19 -81 104 -183 -419 119 327 129 -47 -122 -32 0 -25 -19 46 163 40 -287 -171 249 212 -61 -93 -81 -9 18 -30 69 34 -68 39 91 -48 -147 -19 147 87 -81 -74 -15 20 11 -56 -3 17 63 86 -8 -54 -77 -14 18 -14 45 26 -32 -23 21 24 3 1 10 10 -49 -28 44 26 -6 23 -23 -43 -17 29 11 -75 -46 19 119 35 -66 -18 6 -11 71 82 -97 -70 71 48 -48 16 -53 -126 67 131 0 -70 -85 3 65 -198 -15 351 267 -76 -443 -293 304 537 -10 -493 -288 232 342 -6 -202 -68 -28 -103 167 320 -78 -420 -151 285 466 85 -501 -108 252 15 -353 -380 392 661 -19 -514 -272 297 112 -531 78 706 331 -135 -737 -578 324 481 376 -56 -406 0 169 -68 -30 86 76 188 -144 -374 -4 369 73 -216 -219 -179 161 279 112 -166 -202 45 218 39 -168 -64 167 151 -107 -64 -71 -159 16 110 177 50 -92 23 -8 -23 -60 -20 -5 -91 3 53 94 47 -31 -31 -38 25 34 -24 -44 7 31 -15 -21 6 24 2 -10 -2 6 11 -7 -19 -2 13 7 -3 -5 0 2 0 1 1 0 0 -1 0 0 0 0 0 0 -1 -1 0 0 1 1 0 -1 0 -1 1 0 1 -1 0 -1 0 -1 1 1 -2 -1 1 1 0 0 0 -2 -1 0 -1 0 0 1 -1 0 1 0 0 -1 0 1 -1 -2 -1 1 -1 0 0 0 0 1 -1 1 -1 0 2 1 -1 -1 0 0 0 0 -2 0 1 0 0 -2 -1 1 -1 -1 0 0 0 0 -2 -1 0 1 0 -1 -1 1 1 -1 -1 -1 -1 0 -1 -2 -1 0 0 1 -1 -2 -1 0 0 -2 -1 1 1 -1 -1 0 -1 0 1 0 -1 -1 0 -1 -1 0 2 0 0 -1 0 1 0 -1 2 0 -3 1 1 -1 0 0 -2 1 0 0 1 0 0 -1 -3 0 0 1 -1 0 1 1 -2 -1 -1 1 1 0 0 0 0 0 0 -1 0 -1 -1 1 1 -1 -1 0 1 0 0 1 1 -1 0 1 0 -1 2 -1 -2 0 0 1 0 -2 0 -1 0 0 1 -1 -1 0 0 1 1 0 -1 -1 0 0 0 -2 0 1 -1 0 0 0 -1 -2 0 1 0 2 -1 -2 0 0 -1 0 0 -1 -3 1 2 0 -2 1 2 -1 1 0 0 -2 -1 1 -1 0 -2 0 1 0 +65471 2162723 4293984223 4293132397 6225610 215312 91156 23680 32549 258506 72500 259081 664441 148693 6922 44305 189722 123570 17093 1024 986 28685 83969 91242 48665 15210 405 5661 5780 9802 23913 21970 14130 5701 521 3145 4258 7460 8845 520 2221 1700 970 585 101 2501 2720 712 1058 2138 962 7741 14522 5581 360 5162 16133 9941 4608 3065 20365 17161 12125 4234 39429 194490 202025 178265 288469 325993 170788 40840 5408 38498 108484 199201 298381 258226 75168 124834 298064 437282 338180 100753 288045 607997 561394 439060 372737 167972 28561 5524 13172 56080 139892 141490 94617 57962 90385 68360 49549 29745 31985 34250 9137 25537 43429 10964 593 4129 425 8290 11645 3170 2405 1781 2512 1010 666 612 104 40 4294508555 4294901741 458765 4294705149 131072 65536 1 4294901760 0 0 0 4294967295 0 65537 4294901760 4294901760 1 4294901761 4294901760 4294901760 65537 4294967294 65537 0 4294836224 65535 65535 65536 65535 1 4294901760 65536 4294901759 131071 65535 0 65536 131071 65535 65538 4294967295 0 0 65534 1 4294836224 131071 4294967295 0 0 4294967294 65536 4294901760 131071 4294901761 4294967295 65535 4294901759 65535 65536 4294901759 65535 4294836224 131071 4294901761 65535 65535 1 4294967295 4294901760 65535 2 4294901760 65536 4294901760 2 131069 4294901761 0 131070 0 1 4294901760 65533 65536 65535 65537 4294967294 131071 1 0 0 4294901760 4294901760 131071 4294901761 65535 1 65536 4294901761 65536 4294901760 4294901762 65534 65536 4294836224 4294901760 0 4294901761 65535 65536 1 4294967295 0 4294836224 65536 65535 0 4294901759 65536 131072 4294901759 0 65535 4294901760 131069 2 131070 4294901762 1 4294836224 131071 65535 65534 1 +21 0 -88 -32 -16 75 134 222 361 -235 -515 -695 -515 991 1192 34 -895 -654 845 197 -1466 -242 942 1305 566 -1167 -833 176 502 239 -59 -483 -470 75 186 406 102 -116 -18 -130 -65 233 294 -36 -106 -411 -240 229 44 73 75 -75 -113 195 332 -156 -347 -179 -5 321 171 -55 -14 -13 30 -73 -48 13 25 3 -91 -57 83 127 4 -92 -104 -61 60 219 82 -176 -113 25 66 34 -57 -57 0 123 109 -78 -90 -46 -1 61 16 12 24 -22 26 -13 -39 -29 -30 -49 -74 153 203 -51 -128 -108 -85 38 40 222 248 -208 -351 -97 115 336 159 -232 -194 86 281 99 45 -505 -587 228 378 181 -71 -255 -445 224 498 393 219 -315 -159 -247 -171 118 88 -62 -332 330 598 -87 -293 54 546 -183 -496 -456 -133 430 162 -308 -673 277 539 590 328 -223 262 -372 -514 -290 -25 378 164 -168 -132 26 -138 -240 -224 631 680 -205 -603 -285 264 573 319 -223 -116 -504 -477 404 575 77 -260 -331 -20 122 -65 -57 -69 176 50 -8 181 108 -56 -265 -88 93 -57 52 166 68 -58 -138 28 82 -23 -133 -107 134 157 12 -70 -115 -29 107 36 -31 54 22 -50 -80 2 27 -40 17 45 11 -28 -26 9 40 18 -21 -6 -7 -3 2 -2 3 2 0 0 0 1 0 0 -4 -3 1 0 0 -1 1 -1 -2 1 1 1 -2 -4 -2 1 4 1 -1 1 3 0 -2 -1 -1 0 3 0 -1 2 -3 -2 2 0 -1 2 2 -2 -2 3 1 -1 -2 -1 -1 0 0 -1 2 2 -3 -2 1 -2 0 3 1 -1 -2 -1 1 -1 0 3 0 -1 0 1 0 -2 0 2 0 0 0 0 -2 -1 0 1 -2 -2 -1 0 2 -1 -3 1 3 3 -1 -1 -2 2 -2 -4 0 0 0 1 0 -3 -2 0 2 0 -1 -1 -1 2 1 0 1 1 -4 -2 1 1 3 1 -1 1 -1 -1 1 1 -2 -1 0 2 0 0 -3 -1 -1 -1 0 -1 -1 -1 -1 1 3 1 1 1 -2 0 -1 0 0 1 -2 -2 -2 1 0 -1 1 0 -1 -1 -1 0 1 0 -1 -1 2 1 -2 1 1 -2 -1 1 0 0 1 0 1 1 -2 -1 0 0 1 0 -2 1 0 0 -1 -3 1 1 0 -1 1 1 1 -1 0 -1 -1 1 3 1 -2 -1 -1 -1 2 0 -1 0 1 2 0 -1 -1 0 1 3 -2 -5 -1 1 3 0 1 1 -2 1 2 -2 -1 0 -1 3 0 -2 -2 -1 0 -1 -1 1 0 1 -1 -3 0 +21 4292935592 4980720 14549126 4279566697 748250 1247306 1422020 1228741 752834 2207720 2590389 1682245 724865 309125 236770 226525 199432 23860 17224 58514 87732 180157 110041 7265 11250 50794 134560 152450 103066 32266 365 6229 2473 634 11530 23018 8480 14537 51561 37700 13394 5512 6498 15129 17965 10216 3722 400 1060 845 2362 3301 28885 43810 28048 8669 50884 104768 132610 126121 79105 45032 88762 257050 396553 175645 70066 248201 402453 147186 86290 43165 11588 219124 365173 88765 331605 453952 202589 121108 529658 638621 157313 207028 348296 143509 55120 18100 76644 448337 504425 444834 398025 151490 267472 390745 336554 177161 15284 7474 35737 2564 44425 73361 16393 5953 32180 22408 7508 18218 29405 24793 18125 12290 2257 3400 8900 733 1889 2146 1460 2621449 4293591058 4294574074 196605 262142 2 0 1 4294705152 131069 0 131071 4294901759 65537 4294836225 4294901756 262145 4294901761 196609 4294836224 4294967295 196608 4294901760 4294770690 196606 4294901760 131074 4294901758 65539 4294901759 4294967295 0 196607 4294770690 131070 65534 65539 4294901759 131071 65535 3 65535 1 65534 2 0 4294836224 65535 4294836225 4294967294 131072 4294836223 196609 4294901763 4294901759 4294836226 65532 0 1 4294901757 131072 4294901760 4294967295 65538 65536 4294705153 131070 196609 4294901761 4294901761 131071 4294836225 65535 2 4294770688 4294967295 65535 4294967295 4294967295 196609 65537 4294836225 4294901760 0 4294836225 4294901758 1 131071 4294901760 4294967295 65536 4294901760 196607 4294836225 65537 4294967294 1 65536 65536 4294836225 65535 65536 4294836224 1 4294901760 131069 1 131071 65537 65535 4294967295 196609 4294836225 4294967295 196607 4294901760 65536 2 4294967295 65536 4294836227 4294967291 196609 65536 4294836225 131073 4294967294 4294901760 3 4294901758 65535 4294967295 1 4294901761 65533 +-55 0 39 66 37 -166 -203 232 367 -236 -561 68 563 263 -589 -486 424 912 382 -760 -581 -25 364 128 -414 -84 235 85 -124 25 37 26 116 -37 -193 -32 119 145 62 -146 -137 51 113 29 -31 -96 -74 60 60 66 58 -117 -105 43 37 -20 -42 56 48 -26 -18 28 14 -17 29 8 -48 -47 40 74 1 -97 -79 51 95 62 -23 -136 -68 89 76 1 -67 -28 67 37 -50 -30 30 32 13 -26 -37 0 59 7 -82 -23 62 30 -9 -21 -50 -6 44 36 4 -13 -1 3 10 -64 -63 62 35 -13 -9 27 21 -5 9 -11 -6 5 20 -24 -66 25 96 7 -7 -11 -88 -135 30 245 69 -175 -74 -7 -34 95 88 -43 -85 -5 140 83 -82 -304 -185 306 163 -97 -28 146 75 -146 -59 64 88 2 -47 -73 46 -15 -127 -20 13 76 45 11 -22 7 67 -34 -34 80 112 -233 -247 170 170 -82 -168 41 101 170 131 -225 -185 81 95 -50 -134 39 117 109 9 -133 -43 77 91 -25 -81 -90 -16 91 12 -1 45 7 -5 -47 -27 2 -16 11 12 10 -5 16 22 -15 -7 12 6 -21 11 21 -3 -50 -37 27 16 19 27 -7 -24 -27 7 22 -9 -19 -3 18 9 -8 -5 3 2 0 1 0 0 -1 0 0 0 0 0 0 -1 0 0 0 1 0 -2 -1 0 0 -2 0 0 -1 -1 0 1 1 -1 -3 -1 1 1 0 -2 0 1 -1 -1 1 0 -1 -1 -1 0 0 -1 -1 0 -1 0 0 -1 1 0 -1 0 0 0 0 0 0 0 0 2 0 0 -2 -1 -1 -1 0 0 0 0 -2 0 0 0 0 0 0 -1 -1 -1 0 -1 -1 -1 0 0 0 1 0 0 -1 1 1 -2 -2 0 1 -2 -1 0 1 -2 0 1 -2 0 1 0 0 -1 -1 1 1 0 0 -1 0 2 0 -1 0 1 -1 -1 -1 0 0 -1 0 1 0 0 -2 1 0 -2 -1 0 1 0 -1 0 0 -1 -1 -1 0 0 -1 0 1 1 -1 -1 0 1 0 -2 -2 1 0 0 0 -1 1 0 -1 0 -1 1 1 -1 0 0 0 -1 0 1 -1 0 0 0 0 0 0 -1 1 0 0 0 1 0 -2 0 0 0 1 -1 -1 -1 -1 0 1 -1 -1 0 0 0 0 -1 1 0 0 1 0 1 -1 0 0 -1 -1 -1 1 0 0 0 0 0 -1 -2 1 2 1 1 -1 -1 0 0 -1 1 1 1 0 -1 0 1 -1 0 0 -1 0 -1 -2 -1 1 1 0 +65481 4325415 4284088357 15269685 4279501167 319345 386138 583117 1011520 723524 338186 148880 178452 62450 16001 2045 14825 38273 35186 25160 21370 13610 10177 9076 7956 17053 12874 1769 4900 2980 1108 485 905 4513 7076 9410 8842 12869 19025 12545 5777 5273 5858 3400 1924 845 1369 3530 7253 4744 522 2536 3232 185 10 4196 7813 1394 810 466 202 61 976 4981 9265 170 25969 60925 35386 5525 10181 9593 7250 26489 99140 127861 35978 22100 26941 7577 7748 7538 2341 16529 5945 2146 533 5645 7556 66833 89909 35624 29905 39101 67786 40786 11525 19477 25570 17770 7778 8906 14661 8537 145 2074 2234 733 377 244 281 709 193 477 562 2509 2098 617 778 1305 533 442 1245181 4294443017 262139 2 1 4294901760 0 0 0 65535 0 1 4294967294 0 65534 4294901760 65535 65537 4294836223 131071 1 65534 4294901761 131071 4294901760 4294967295 0 4294967295 4294901760 0 131071 4294901760 0 0 0 0 2 4294836224 4294967295 65535 0 4294836224 0 0 0 4294967295 65535 4294967295 65535 0 1 4294901760 65537 4294901758 65536 4294967294 65536 65534 4294836225 65536 0 4294967295 65537 0 65535 2 65535 4294901761 4294967295 0 65535 1 4294836224 1 4294967294 65536 4294901760 0 4294967295 65535 4294901760 65536 4294901761 65535 1 4294901758 1 0 131071 4294901760 4294901760 65537 65535 0 65535 4294901761 0 0 0 131071 0 65536 4294836224 0 65536 4294967295 4294967295 65536 4294967295 0 0 131071 0 1 4294901761 0 4294967295 131071 0 0 4294901760 131070 65538 4294901761 65535 4294901760 65537 1 65535 4294901761 0 65535 4294901759 131071 1 +0 0 61 -175 -131 136 -126 -254 -365 620 1035 -35 -1071 -4 1937 93 -1693 -1391 520 1575 -137 -1291 -211 513 -407 541 929 -160 -344 -363 -108 266 31 37 328 -128 -419 -22 318 77 -145 -135 -131 69 299 30 -422 -46 453 100 -285 -164 95 66 -152 5 158 124 -8 -108 12 -21 -83 8 7 64 93 -5 -15 -17 -14 -87 -43 99 112 -35 -118 -100 -4 112 79 -32 -185 -63 138 215 38 -148 -50 29 29 -30 -24 58 105 -45 -132 -147 -69 236 222 -71 -201 -141 60 236 63 -175 -124 53 100 125 62 -189 -97 67 7 -29 -11 52 76 -47 -114 6 83 6 -117 -14 179 202 15 -438 -335 215 163 272 239 -269 -234 -25 9 115 65 -59 -66 60 32 49 182 -19 32 -144 -272 -224 -78 273 108 228 106 -365 -130 274 200 -69 -119 -220 -65 186 62 5 -10 -74 -12 10 -122 191 409 -106 -179 -162 -161 -203 -171 524 394 -303 -310 90 52 42 124 153 114 -214 -170 38 157 -8 -122 -94 -14 85 39 -22 -28 -46 -89 69 116 42 -21 -48 -5 -12 29 24 -80 -49 78 96 -15 -105 -16 52 -26 -32 20 75 52 -53 -46 -21 -2 25 0 -21 -4 57 44 -60 -41 9 -3 -3 0 15 -2 0 15 6 -7 -11 0 1 0 1 -2 0 2 1 -1 -2 -1 3 0 -1 0 2 -3 -2 0 4 0 -2 0 2 1 0 -1 0 2 3 0 -3 0 1 2 -1 -3 -3 -2 0 0 -1 -1 1 0 -2 0 -1 -2 1 2 1 0 -4 -2 0 0 -2 -1 3 0 -1 0 0 -1 -1 -1 -1 -1 3 1 -1 -1 -1 2 1 -1 1 0 -2 1 2 0 -3 -2 -1 1 0 -1 0 0 -1 1 1 1 -2 -2 -3 -3 2 3 1 -1 -1 -2 -1 -2 1 0 1 0 -1 -1 0 2 0 1 -3 -3 1 1 -2 0 0 -1 1 3 2 0 -5 -3 0 0 3 0 -3 -2 1 4 1 -1 -3 -1 1 -1 -3 0 2 1 -1 0 -1 1 0 -1 -3 0 2 -2 -1 0 -1 -3 0 2 1 -1 1 0 0 2 0 -1 -2 -1 0 1 -1 -1 1 4 -1 -2 -4 -2 2 3 0 -1 -1 -1 1 1 -2 1 1 0 -3 -3 0 -1 0 2 0 -1 0 0 1 3 -1 -3 -3 -2 0 -1 0 0 0 0 -1 0 -1 -2 -1 -1 -1 0 -1 0 0 -1 -2 -2 2 1 0 -3 -2 1 5 2 -3 1 -1 -1 -2 1 2 -1 -3 1 -1 -3 2 -1 -2 1 1 2 2 0 0 +0 4283498557 8978301 4278386562 40697491 1072450 1147057 3760618 4801130 2751025 1685450 307690 458330 888641 250105 82420 2330 123968 176045 107053 39250 21922 90301 180200 215209 108121 13381 23129 40340 11728 585 6953 4145 8674 514 7765 11650 13769 23924 12560 7265 38194 65269 23348 3341 1741 3940 13050 39033 60457 54325 60282 59296 34594 18185 25625 39565 13898 890 2825 7985 13032 6925 13885 72845 192069 158450 100553 129482 55381 13306 7706 7956 3425 33485 21760 124160 80613 63648 144461 91976 44761 62561 38821 3869 5576 244 51365 178517 58285 67130 303817 247045 104200 4468 38785 58792 30344 24713 23720 7421 2005 2900 12682 15220 2745 169 1417 8801 15300 11250 2960 1700 6025 5513 2557 629 441 3265 5536 1762 18 983040 65534 393231 4294311929 65536 65536 65534 65538 4294901759 262143 4294901760 131072 4294901757 262144 4294836224 131072 1 65535 196610 4294770688 65536 4294901762 4294836221 65534 4294901760 131071 4294836224 4294901760 131070 65538 4294705152 65534 4294836224 262143 4294901760 0 4294967295 4294967295 262143 4294901761 4294967295 65538 131071 4294836224 131073 4294770688 4294967294 1 65535 4294901760 65537 4294836225 4294836222 196605 65539 4294967295 4294967294 131070 65536 4294901760 65535 2 4294770689 131069 4294836225 0 131071 131075 4294639616 65533 196608 4294770688 131070 65540 4294836223 131071 4294836223 131072 4294901761 4294901760 1 4294836223 131072 4294967294 4294901760 65533 65538 131071 0 2 4294901759 65535 4294901761 131071 4294901764 4294770686 196606 3 4294967295 131071 4294836225 65537 4294770688 65533 65535 2 65535 65536 4294901763 4294836221 65534 65535 0 4294901760 4294901760 4294967294 4294967295 4294901760 0 4294901759 196606 1 4294901757 327681 4294770690 4294901761 4294901759 131073 4294836223 4294901761 196605 4294901759 65537 131074 0 +-23 0 -61 64 148 10 -13 -55 -51 -64 152 40 -505 -599 10 1453 417 -1269 -123 1273 563 -1317 -917 394 402 310 72 -254 -132 79 112 -26 -14 -109 -246 116 289 65 -127 -69 121 -31 -242 3 320 181 -124 -338 -109 144 24 39 21 23 32 -5 20 -43 -34 13 -17 -30 24 73 2 -72 -34 38 38 27 25 -71 -79 47 92 -25 -84 -11 32 50 36 -60 -71 5 0 55 68 -20 -62 -9 58 36 -21 -82 -27 94 82 -87 -119 37 122 -17 -109 -5 43 -7 -34 39 40 -3 -7 -41 -55 48 83 13 -31 -40 6 18 21 -16 -33 -33 -20 45 39 -30 -87 20 87 73 46 -100 -137 1 112 90 -26 -99 -30 43 44 0 -28 -4 26 -51 -82 101 176 -78 -231 -63 134 238 92 -264 -232 56 117 141 41 -144 -124 78 133 16 -56 -57 -21 35 73 22 1 -88 -138 -9 88 136 3 -16 120 -105 -132 -16 -71 18 88 108 13 -80 -30 34 66 -45 -72 -1 10 19 9 -13 -16 29 30 -17 -17 -10 9 6 -47 -3 78 35 -57 -73 2 56 -6 -9 25 20 8 -23 -14 -3 1 -13 -10 34 6 -42 2 45 -10 -45 -8 43 34 -13 -23 -8 12 -4 -18 8 13 1 -3 -5 -2 2 0 0 -1 0 1 1 0 -1 0 2 1 -1 0 0 0 0 0 -2 -1 1 -1 -1 0 0 0 -1 -1 0 0 0 0 -1 0 -1 0 0 0 -1 -2 0 1 1 -1 1 0 -2 -3 1 2 1 -1 -1 -1 -1 1 -1 -1 1 0 -1 -1 -1 -1 0 1 1 1 -2 -1 -1 0 0 1 1 0 -1 -1 0 0 -1 -1 0 -1 1 -1 0 0 -2 -1 -1 -1 0 0 -2 -2 1 3 0 -2 0 0 -1 0 1 -1 -1 -1 -1 0 0 0 0 -2 0 0 -1 -1 1 0 1 0 -2 -1 0 1 1 0 -2 -1 0 1 1 0 0 2 1 -1 0 0 -1 0 0 -1 -2 -2 0 0 0 0 1 -1 0 0 1 0 0 1 -1 0 0 -1 -1 0 1 0 -2 0 0 0 0 1 -2 0 -1 1 0 2 -1 0 -1 -1 -1 -1 1 1 0 -2 0 -1 1 0 -1 0 2 0 -2 0 0 -2 0 0 -1 -1 -1 0 0 -1 1 1 -1 -1 -1 1 1 -2 -1 1 0 0 0 1 -1 -1 0 -1 0 -2 -1 0 0 0 1 0 -1 0 -1 -1 -1 0 0 -2 -1 -1 1 0 0 0 -2 -1 0 0 1 -1 -1 -1 -1 1 -1 -1 1 1 0 +65513 4259779 655508 4291428339 4290838477 24704 613826 2111309 1784250 1635658 2051458 996125 257704 69700 23665 13220 12077 73972 87746 20890 15602 58573 135161 129620 32617 2097 970 1049 2249 1325 1189 5905 5188 2600 2173 5666 8450 9089 7177 3524 4896 5066 3025 5024 3925 4660 7165 9565 14293 15530 15173 11906 1898 2677 1609 1730 5329 7058 2561 360 697 2178 2425 2421 7969 12898 12116 18770 20644 10477 2749 1936 800 3277 16925 37060 57330 74600 78160 56960 33570 22417 21460 17945 6385 1666 5813 7745 19125 26240 265 25425 17680 5365 19408 6569 2056 6381 5185 461 250 1097 1189 389 117 2218 7309 8578 3140 117 1025 593 205 170 1256 1800 2029 2125 1913 1325 593 160 589806 65549 4294705149 196606 0 65535 65537 4294901760 131072 4294901761 0 0 4294836224 131071 4294967295 0 4294901760 65535 0 4294901760 4294901760 0 4294901760 65534 65537 131071 4294836224 131069 65538 4294967295 4294967295 4294901761 131071 4294901760 4294967295 65535 65537 4294836225 4294967295 0 65537 4294901760 65535 4294901760 65535 131071 65535 4294836224 4294967295 65535 4294836224 131070 3 65534 4294901760 65536 4294967295 4294967295 0 0 65534 4294901760 131071 65536 4294836224 65535 65537 4294836224 65535 65537 0 65538 65535 4294901760 0 4294901759 65534 0 65536 65535 65536 0 4294901761 0 4294967295 65536 4294836224 0 0 4294836225 4294901760 1 4294901762 4294901760 4294967295 131071 1 65534 131071 4294901760 131072 4294836224 0 65534 4294901760 4294967295 0 131071 4294901761 4294967295 65537 4294967294 1 0 4294901761 65535 65535 4294967294 0 65536 4294901760 4294901760 4294967295 0 4294967294 131071 0 4294836224 65535 65536 4294967295 4294967295 4294901761 131071 1 +-94 0 144 46 -60 -200 -299 187 652 181 -507 -339 1058 -118 -1890 -1355 -559 2557 1910 183 107 -1178 -192 29 -421 -186 226 217 -81 -67 -129 -119 18 381 368 -279 -380 -189 -63 176 27 125 -68 -3 352 279 -36 -555 -182 243 79 -67 -89 21 63 49 31 -68 -141 0 83 123 95 -55 -103 -95 25 85 -5 -9 30 12 44 -62 -121 -62 -5 176 108 -128 -125 52 82 37 27 -79 -94 13 25 47 21 -22 -44 46 75 21 46 -51 -24 7 41 -209 -313 215 311 34 -120 -65 56 -10 -69 32 69 17 4 -43 -20 -23 -42 39 61 5 -39 -34 53 54 -53 -87 41 19 -101 49 130 -29 -81 -65 -115 85 194 82 -129 -141 113 124 -94 -20 247 -99 -294 -55 85 55 -92 17 25 68 39 30 46 -20 -10 -65 -34 112 56 -111 35 136 3 -266 -175 213 240 -67 -233 -51 102 124 104 31 0 -313 -322 194 303 218 50 -371 -275 162 236 29 -184 -109 68 167 53 -59 -13 -63 -57 82 109 -31 -84 -60 29 98 17 -133 -79 72 -20 31 117 36 -36 -56 26 -50 -80 56 67 -30 -56 14 14 11 27 55 69 -133 -169 39 80 64 10 -35 -33 16 62 20 -23 -61 -15 29 3 -3 2 5 0 0 0 0 0 0 1 0 -2 2 4 1 -1 -2 2 3 1 -1 1 -1 1 -1 0 0 0 -1 0 1 3 -1 -2 0 3 -2 -2 -1 -1 3 1 -3 0 0 1 -1 -3 0 4 -1 0 -1 1 -1 -1 -2 2 -1 -4 -2 0 0 2 0 -1 -1 -1 1 0 -1 0 -2 -1 2 1 -1 -1 -2 0 0 0 0 0 0 -3 1 4 2 1 -3 0 -1 -2 0 4 -1 -3 -3 0 1 -3 0 5 -1 -2 -1 -1 -1 2 -2 -1 0 1 0 2 -4 -3 2 3 -1 -1 -2 -1 -1 1 -2 -2 1 0 -1 -1 -1 -1 0 0 1 1 0 0 -1 0 -1 0 -2 -2 -1 -1 1 -1 -3 -1 1 -1 -1 2 0 -3 -2 1 0 -3 0 0 2 -2 -1 1 0 0 1 -4 0 3 1 -2 -1 0 0 -1 0 2 -3 -3 1 0 -1 2 1 0 1 0 -1 2 -1 -2 -3 0 1 -1 -2 -2 0 1 -1 -1 -1 -1 -1 -2 0 1 1 0 0 0 -3 -3 1 1 0 -2 1 1 -1 0 -1 -1 1 2 0 -2 -1 2 -1 1 -1 -2 -1 1 -1 -2 -1 1 -2 -5 -1 -1 2 -1 -1 2 0 -1 -3 0 -2 -4 1 0 -1 -2 -1 -2 3 4 0 +65442 3014800 4281925572 12320469 11862668 371970 1133288 5408125 6850730 3681589 1399133 37705 211837 98165 11050 30802 145485 213265 180121 34945 16354 4633 201745 309321 92173 10730 8362 6370 5585 19881 22018 12050 19634 7850 106 1044 5780 18485 31001 28048 18329 8093 6970 9005 2834 925 4052 6066 4717 625 45362 144194 97877 18625 3236 5785 5050 1865 929 3285 3746 2677 5725 10378 2042 12602 17741 10786 20450 44360 36522 28145 9236 70810 89461 10250 8753 5249 2421 2516 4325 13700 15457 19721 70765 75994 62089 56890 25780 11777 97969 141320 139333 140141 101869 56537 45737 32513 6290 4138 9973 12842 10656 10445 17978 11425 1361 14985 4432 3176 9536 5389 3332 317 3754 22450 30082 10496 1325 1345 4244 4250 1966065 4294770691 327682 0 0 0 1 196606 65540 4294901759 196610 4294901761 4294901761 4294901761 0 4294901760 65536 4294901763 65534 4294836227 4294967294 262143 4294770689 0 4294901761 65533 4294901764 4294901760 4294901761 4294901759 4294901762 4294901756 0 2 4294967295 131071 4294901760 4294836224 196607 4294901761 4294901759 0 0 0 131069 131076 4294770689 4294901760 65534 4294901764 4294836221 65536 65533 4294901765 4294967294 4294967295 4294836226 65535 1 4294705154 196605 4294901763 4294901759 4294967295 4294836225 131070 4294901760 4294967295 65535 65536 1 4294901760 4294901760 4294836224 4294967294 131071 4294836223 131071 4294967295 2 4294901757 1 65533 131072 4294967294 1 65536 65532 65539 4294967294 0 65535 4294770690 131069 4294901760 65538 65536 4294901760 4294901762 4294836222 65536 4294901759 65534 4294901761 4294967295 4294967295 65534 65537 0 4294770688 131069 1 131070 4294901761 4294901760 131071 2 4294967294 4294901762 4294901761 4294967294 4294901761 4294967294 4294836225 4294967291 196607 4294967295 2 4294836223 4294836224 131068 4294901760 4294967294 262142 4 +9 0 -42 -130 16 280 -10 -384 -206 92 -288 870 1090 -793 -449 625 108 -1207 -227 919 40 -308 205 -414 -807 676 842 -177 -285 -96 -74 0 20 55 20 30 -33 62 207 -80 -256 -66 99 30 -80 327 306 -405 -169 142 -88 -99 112 68 -119 -26 88 76 7 -137 -124 113 164 2 -102 -121 -12 124 27 -34 15 -29 -39 67 11 -97 -10 128 26 -39 56 -27 -71 -28 6 52 48 -24 -69 -39 65 99 -49 -107 58 41 -88 44 164 -89 -156 -47 -75 53 81 138 46 -58 45 -46 -70 -33 -7 63 26 -60 -36 69 65 -29 -52 -33 40 61 -12 -125 -120 129 157 27 13 -98 -115 -21 46 106 44 -79 -13 10 -69 -108 -91 187 170 77 100 -218 -176 4 -51 50 121 53 -96 -33 117 2 -116 -7 101 40 10 -110 -179 4 86 169 40 -58 45 -3 -1 -38 6 -53 -133 -52 19 249 136 -179 -93 81 54 -52 -1 50 18 -109 -13 66 -59 -108 -20 106 17 31 59 -47 -43 0 9 -8 -29 12 36 -17 -106 32 153 72 -45 -136 -32 52 -30 12 77 58 18 -101 -78 16 34 12 -38 25 51 10 10 -20 0 -45 -66 20 53 39 -18 -46 16 21 -34 -19 13 31 9 -10 -2 0 1 -2 0 -2 -1 0 1 0 0 -3 -2 0 0 1 0 -1 -2 0 1 -1 -1 -2 0 1 1 0 -2 -2 0 3 1 -1 0 -1 -2 -1 0 0 0 -1 1 1 0 -3 -4 1 2 2 1 -1 -3 -1 1 -3 0 -1 -2 1 -1 1 1 0 -2 1 0 1 1 0 -1 1 0 -1 -1 1 1 -1 -1 1 0 3 2 -1 0 -1 2 1 -2 -1 0 1 2 -3 -1 0 0 0 1 -2 1 2 0 -6 -2 1 0 -1 3 -1 -1 0 -1 -4 -3 -1 -1 -1 -1 -1 -2 1 0 -1 -1 2 -1 -1 0 -1 -2 3 -1 -1 1 -1 -2 2 1 0 1 -2 -1 -3 -2 3 1 -2 0 1 0 0 1 2 2 -3 0 1 -1 -2 -1 1 2 1 1 -3 -2 -3 -2 1 -1 -3 0 3 -1 -3 0 4 -1 -1 1 0 1 0 -2 2 2 0 0 -2 -1 0 0 2 1 -2 -2 0 1 2 1 -2 -2 -2 -1 -2 -1 3 0 -2 -1 -1 -2 2 -1 -2 2 1 -3 -1 0 2 -1 -1 -1 0 1 -3 -3 4 0 -1 3 1 -3 -3 1 6 1 -2 0 -1 -1 0 1 0 -1 -3 -3 0 2 0 -1 -1 -2 1 0 0 0 -2 0 1 0 0 3 0 +9 4286513110 18350096 4269866998 6094642 839844 1816949 592226 1468513 896090 96464 213421 1108225 740293 90441 5476 3425 1300 4933 49249 69892 10701 113329 257661 48725 17545 17168 14837 13520 18818 28145 26900 25045 15520 1885 1066 6010 9530 16484 2197 3865 5825 2740 2880 6282 14026 13850 5045 9680 34817 26545 8434 25605 5480 4141 5989 4018 4276 6057 5066 3793 5321 15769 31041 25378 9773 13666 13352 8177 269 16425 43250 34829 57524 30992 5101 17450 10305 13693 13505 11801 12200 32057 35957 4964 2034 1445 2845 20393 62362 50537 15210 5620 2501 12205 4525 15145 11636 1250 5690 1849 145 985 1585 12260 28593 20521 3728 1044 9293 10525 6340 1300 2069 2701 500 2025 4756 4330 2440 697 1517 2031629 4294311945 65534 4294836225 4294836224 65535 1 4294770688 65534 65536 4294901760 65534 4294901761 4294901759 65536 1 4294901758 196608 4294901761 4294901760 4294967294 0 4294901760 65537 4294770688 131068 131074 4294901761 4294967293 4294770689 4294901760 131070 131071 1 131070 65536 1 131071 4294901760 131071 4294901761 131071 196608 4294901762 4294901760 65538 4294967294 65536 4294770690 65535 0 4294836225 131073 4294574080 131070 4294901760 4294901763 65535 4294770687 4294967293 4294967295 4294967295 131070 4294901760 196607 4294967295 4294901760 262142 4294967295 4294901761 196606 1 4294836225 4294836223 262142 4294836225 65536 0 131073 4294770690 65536 4294901759 131071 65538 4294770689 4294836222 131070 4294836223 196608 4294836223 262144 4294967295 1 1 196606 2 4294836224 65535 131072 4294836225 65534 131073 4294836225 4294901758 4294901759 262143 4294836224 4294967295 196606 4294901759 65538 4294967293 131072 4294967295 65535 4294770689 327677 4294901760 65539 4294836221 393217 4294836225 4294901760 65535 1 4294836223 65533 2 4294967295 131070 0 4294836224 65536 0 3 +-74 0 -67 105 229 26 34 -230 -691 3 863 993 412 -1580 -1278 248 -227 684 1513 512 -926 -1453 30 788 -250 319 937 -202 -669 -495 111 592 109 -495 -203 373 127 -202 13 169 67 -161 -93 -166 -387 353 706 129 -276 -536 -188 317 160 -19 -6 21 -19 -73 32 73 9 -65 -30 -8 16 26 -31 -74 -43 127 124 -52 -63 -76 -102 53 130 119 17 -164 -96 39 73 43 -53 -59 38 50 -14 -72 -87 93 128 27 76 -119 -258 -31 149 158 -12 -61 54 -37 -126 -14 68 98 49 -75 -88 8 91 34 -46 -86 -40 59 44 3 -27 -46 -21 84 31 -78 -12 111 62 -94 -62 -12 -7 64 37 -46 -39 59 98 -45 -84 -39 1 24 -12 53 110 -83 -202 -13 124 167 71 -173 -109 32 -31 -24 33 127 59 -91 -77 -9 33 52 -11 1 77 -26 -81 -45 17 55 4 -38 -55 15 38 86 89 -48 -25 -84 -82 18 30 13 -73 56 176 23 -91 -143 -58 74 65 58 -7 -80 -11 20 -7 13 -11 -36 -14 65 36 -10 25 11 3 -88 -45 63 -8 -65 -5 105 31 -72 -45 45 77 5 -48 -57 14 36 -23 -22 9 29 6 -6 0 2 33 -1 -38 -48 -8 46 13 -9 -1 4 4 -1 -2 -3 -1 2 0 -1 0 2 0 -1 0 0 0 0 1 -1 1 -1 -1 1 1 -1 1 -2 0 0 -2 0 -1 0 2 -1 0 2 -1 -2 2 -2 0 0 -3 1 1 0 3 0 -1 -1 0 -1 -2 1 3 -2 0 1 -2 -2 3 1 -2 -2 1 -1 -3 -1 -1 -1 -3 3 4 0 -3 -1 1 0 1 -2 -2 -2 0 -1 -1 3 0 0 1 -2 -3 -1 1 4 1 -1 -1 1 2 1 1 -3 -2 1 -1 2 2 0 -1 1 0 -1 -2 2 1 -1 0 0 0 0 -1 -2 1 -2 -2 3 2 -2 -1 -1 -1 1 2 -2 -2 0 0 0 0 0 0 -1 0 2 -1 -2 -1 0 -2 1 1 -1 0 3 -1 -3 0 1 1 0 0 -1 0 -1 0 -1 -1 0 -1 0 1 -1 -1 -1 -2 0 -1 1 1 0 -1 -1 -1 -1 0 1 -1 -2 -1 0 -1 1 1 -1 0 0 0 0 -1 0 0 0 -2 -1 -1 -2 -4 4 3 -1 -1 1 1 0 0 -2 -2 1 2 1 1 -1 0 -1 -2 -1 2 0 -1 -4 -3 0 -1 1 -1 0 2 0 -4 1 2 -1 -1 2 2 0 -1 -2 -1 0 2 -2 -3 2 2 0 3 0 -1 -1 -2 -2 0 1 0 0 +65462 6946749 1704165 4279894050 261453 1730818 2666144 1694788 519385 2551313 2968685 621844 164261 918773 692586 362785 256906 180338 56933 28730 30410 36205 274378 515077 363472 135833 25961 477 5690 6353 4306 964 932 6437 17978 18080 9745 13213 31061 27185 10737 7178 6290 3944 5380 16218 17113 19937 67525 47165 3865 4285 16072 14228 8026 7808 9437 9512 5081 1945 2845 7497 7045 12465 12680 3988 4145 3485 5002 11629 8577 577 2953 18989 40973 43265 34970 12905 1537 17218 11762 6010 3793 122 6605 8586 3314 1460 3250 8840 10225 7681 7048 1069 8465 31505 28730 8840 7589 6449 521 218 1417 4421 1396 746 7753 5994 4289 11050 6145 4050 5954 5553 1492 1013 922 72 4 1090 3748 2180 4294377485 327679 4294901764 4294836222 196607 4294901760 131072 4294901760 0 0 4294901761 4294901761 131071 4294901761 4294836225 0 65534 65535 4294901762 131072 4294901759 4294836226 0 131069 1 3 4294967295 4294901760 131070 4294836227 65536 4294901758 65539 4294901758 4294901761 4294967293 4294967295 262141 4 4294967293 1 4294836225 4294901758 4294901760 262143 0 4294836225 4294967293 262145 4294901761 131071 65538 4294770689 131070 196607 2 131071 4294901760 196606 4294901761 0 0 4294901759 4294836225 262142 4294836226 4294967295 131071 4294836226 65534 0 0 4294901760 131072 4294901759 65535 131070 4294901761 196608 4294836223 65536 1 4294901760 4294901760 4294901760 65535 65535 4294901761 4294967295 65534 131071 1 4294967295 4294967295 65536 4294901759 65535 131071 4294901761 0 0 65535 0 4294967294 4294901759 327676 4294901763 131071 1 4294836224 131070 65538 4294901761 4294901760 4294967294 2 4294770687 65533 131071 65535 2 131068 4294901762 196607 2 4294901759 65535 4294836226 196605 2 3 4294967295 4294901758 65536 0 +-150 0 52 18 118 -231 -582 530 1003 215 -239 -1206 125 884 -1096 -1166 -24 1415 1715 1061 -256 -3136 -1531 1078 323 1079 421 -332 327 -136 -245 222 436 -632 -463 9 -482 200 674 316 -243 -295 219 -62 -584 -110 272 556 171 -239 -81 66 207 -79 -72 -97 -186 -103 93 297 16 -240 -14 130 -52 -74 22 94 104 11 -94 -154 62 114 -169 -74 185 174 -11 -161 -90 27 127 65 -43 -101 -75 -56 84 220 -89 -345 -44 436 293 -274 -298 -141 -83 266 236 -10 -184 10 346 -88 -434 -10 382 56 -278 -37 157 -93 -112 164 14 -102 74 29 -68 40 -15 -170 -84 330 268 -169 -159 -67 8 54 19 43 55 -84 -61 0 1 -39 -100 99 100 -28 -10 85 115 -65 -103 -151 -9 154 23 -130 -184 61 149 190 93 -161 -105 -5 48 43 20 -74 -84 -23 -43 136 137 -94 -98 104 106 -141 -123 94 123 -14 -124 -54 147 61 -223 -51 216 124 -62 -99 83 24 -120 -198 -39 307 154 -208 -145 27 25 56 22 33 57 -146 -157 142 123 -80 -96 82 145 39 -13 -178 -85 24 -102 80 185 57 -127 -102 114 117 -37 -90 42 -12 -73 -19 0 52 26 -23 3 9 -49 -32 33 88 28 -68 -21 21 8 -15 -5 3 -1 0 -1 -2 0 4 -1 -3 3 5 0 -2 0 -1 2 1 1 -4 -5 -2 -3 -2 0 3 -1 0 -2 1 0 -3 2 5 2 -3 -5 -1 5 0 -2 -2 -5 -2 2 0 -1 0 1 -3 -6 -3 -1 4 -1 -1 0 3 -4 2 5 1 3 2 -1 -3 -2 -3 1 3 0 3 -3 -1 5 -1 -4 1 2 4 3 -8 -3 0 0 2 -3 1 4 -3 0 5 1 -7 -3 7 5 -5 -1 -1 -1 0 3 -4 -2 -1 -4 1 3 -4 -3 3 -2 -6 -4 3 -1 -2 2 6 -5 -1 3 2 1 1 1 -1 -5 -2 4 1 -2 -2 2 4 0 -3 3 0 0 -5 -2 2 -3 -2 4 1 -4 -3 3 1 -1 -1 -6 -3 2 7 2 -3 -1 3 2 -3 0 -1 -1 4 0 -3 0 2 3 -2 -5 3 5 2 0 -2 4 0 -3 -4 2 2 -5 -3 2 -1 1 1 -4 -4 -2 3 2 1 -2 -1 0 3 3 0 -3 -3 1 4 0 -3 0 0 -2 0 4 -1 -2 -2 1 -2 -3 -2 1 2 -4 -4 0 2 -3 -1 1 -4 -4 7 2 -5 -4 -1 -4 4 5 3 0 -2 -1 -1 2 4 2 0 -2 -4 0 3 2 1 -2 0 3 -4 -4 3 4 -2 -4 0 +65386 1179700 4279828598 34799034 14091243 1511557 797081 2560772 2002801 4066946 9900032 3506045 1268570 287465 125425 109309 589520 214450 272324 554132 146074 51805 353156 383120 86362 10917 49090 14593 45205 96858 57856 17096 8180 9320 10937 32552 16840 34037 64501 26042 8829 20354 12050 8761 55456 126946 192032 160925 108685 77645 55796 33956 127460 188456 149060 78653 33298 39440 10600 6317 6224 29125 115956 100385 29770 2980 2210 10081 3721 1522 19801 10784 7325 17450 33410 23797 17429 37577 58301 34570 11050 4153 5876 7585 20345 27605 20420 31117 23965 15325 18292 25330 52330 62032 13645 7465 53604 95770 66980 21754 3761 1573 24565 44813 21529 15940 22546 31853 7801 16804 37474 26533 26685 9469 1908 5690 2704 1205 90 3425 8833 5408 1441771 4293984264 262139 65535 4294901759 262144 4294836223 327683 4294836224 4294901760 65538 4294705153 4294901755 4294901757 196608 65535 131070 4294770688 327682 4294770690 4294967291 5 4294901758 4294901755 2 65535 4294770689 4294836218 327679 4294967295 196608 196604 65541 131075 4294836223 4294836222 196609 196608 4294967293 4294901765 131068 262146 4294443011 65533 131072 131069 4294770692 327680 4294508545 524285 4294639621 4294967295 65535 4294705155 4294967294 131068 4294705155 262141 4294639614 262140 4294901759 393218 4294967291 131075 65537 4294901761 4294901755 65540 4294901758 262146 4294770688 3 4294639616 196606 4294901757 65540 4294836220 65539 4294967295 4294836218 458754 4294770690 262143 4294770690 4294901760 327679 4294770688 131072 4294836227 262139 131077 4294836224 4 4294770685 131074 4294836219 4294901762 65537 4294770684 262142 65538 4294967294 196608 3 4294836221 262145 4294770688 0 65534 4294901764 4294901758 4294836225 4294901757 131073 4294770684 131072 4294967293 4294705153 524284 4294639618 4294967292 327676 196613 4294836224 4294967295 262146 2 4294770686 196608 65538 65534 4294705155 262140 4294836228 65532 +-35 0 7 10 105 22 -330 -355 303 879 -328 -1332 -83 2009 1346 -1758 -2076 339 1551 1014 -146 -1154 -710 189 710 435 -445 -528 16 232 24 237 312 -228 -394 -197 46 432 258 -186 -149 -77 101 31 -255 -116 183 219 -88 -90 55 3 -7 62 75 -86 -132 -38 32 123 27 -43 59 23 -56 -105 -24 111 70 -62 -38 -25 -72 22 38 49 62 14 -37 -96 -39 82 93 -24 -94 -47 4 57 33 63 111 -100 -141 -50 21 78 22 -39 -54 44 120 -50 -190 -37 57 116 47 70 95 -127 -39 -57 -184 2 152 213 50 -251 -165 111 121 43 -23 -38 78 -57 -167 19 123 30 -95 -33 83 80 -23 -98 -21 47 50 -9 -131 -34 171 130 -97 -161 23 109 49 -43 -59 -36 7 58 37 -65 -69 66 58 -69 -49 77 41 -57 -28 33 25 -4 0 -27 -30 -25 -54 89 126 -16 -66 -34 31 10 2 -14 -24 -61 -104 92 154 36 -69 -75 46 58 5 -64 -21 -21 -74 25 81 50 -46 -54 27 71 64 -70 -95 -31 -1 66 36 -10 -14 -7 -9 23 65 -10 -58 -53 5 36 -36 -4 35 60 59 -59 -85 -12 53 27 -37 -19 11 12 -6 7 25 1 -12 -26 -12 11 0 4 1 3 7 0 -3 -4 0 2 2 0 0 -1 -3 0 1 0 -2 2 2 -1 -3 -1 -1 3 2 -1 0 1 0 -1 0 -1 0 0 0 -2 -2 -4 0 1 -1 -1 -2 1 1 1 -1 1 0 -3 -2 3 2 1 -3 -2 2 1 -1 0 -1 1 1 1 2 -2 -2 0 1 -1 1 1 -1 -2 1 -2 -3 1 0 -1 -1 1 -2 -1 2 3 -2 -4 -2 3 -1 -2 -1 4 1 0 0 -2 -2 -2 2 3 -3 -4 1 2 0 0 -2 0 0 0 -1 0 2 1 1 -3 -2 1 -1 0 3 -1 -1 -2 -4 -1 0 1 1 1 0 1 0 1 2 -1 -1 -2 1 2 -2 -1 2 1 -1 0 -2 -2 0 1 0 0 1 -3 -3 -1 0 2 0 2 1 0 0 -1 -1 0 3 0 -4 0 1 0 -1 -1 -1 2 -1 1 1 -1 0 3 2 -2 -3 0 1 0 2 1 0 -3 0 1 -1 0 1 -3 -1 1 -1 -1 1 -1 -1 0 1 1 -1 -2 -2 -1 1 1 -3 1 1 -1 0 0 -2 2 2 -1 0 -1 2 2 2 -1 -3 -1 1 1 -1 0 2 -2 -2 0 0 1 0 -2 -2 0 0 0 0 -2 -1 1 0 0 -1 -1 0 2 -1 -1 1 -1 -3 0 3 0 +65501 655367 1441897 4271767222 57606447 1881808 4042970 4902280 4424697 3433797 1353032 539821 693325 476809 54080 56745 149328 194045 188740 101160 28130 11162 78481 81450 15844 3034 3893 13021 18868 16153 2578 4010 14161 12897 8744 2069 5668 3845 4040 10585 8245 9225 11045 3265 5058 22321 22381 6525 2005 4852 16900 37469 16705 7109 25154 4770 33860 68473 65501 39546 16490 1973 9333 28250 16029 10114 13289 10133 2650 2581 18317 46141 35330 12410 4250 4777 3413 5594 9117 8125 8330 4930 1873 641 729 1525 10837 16132 5512 1061 200 4297 19280 25012 10386 5480 4121 882 6101 9061 5032 5770 8996 9986 4357 1396 245 610 4325 6173 1321 1312 4825 6962 7369 3538 1730 265 85 626 820 265 262144 196609 7 4294770685 131072 2 4294901760 65533 1 196606 4294901762 4294967293 262143 4294901762 65536 4294901760 4294901760 0 4294836224 4294770686 65536 4294967295 131070 65537 131071 4294770688 262142 65538 4294901757 65538 65535 131071 65537 4294836226 65534 4294901761 65537 4294901759 4294836225 131069 4294901760 131071 4294967294 196610 4294770686 262142 4294901759 327679 1 4294836224 4294901758 196610 4294770685 131073 0 65534 0 65535 65538 4294770689 131070 65535 4294901763 4294901759 4294967292 65536 65537 65536 65536 4294901762 4294901759 131073 4294967294 65538 65535 4294901758 65536 0 4294770689 4294967293 131072 131072 1 4294901760 65535 3 65532 1 4294967295 196607 131071 4294901761 196608 4294836226 65533 1 65538 4294770688 65536 65535 4294770689 131071 4294967295 4294901761 65535 65537 4294901759 4294967294 65537 131069 4294901761 0 196606 4294901762 4294901760 131074 4294901762 4294967293 65537 65535 4294836226 65534 65536 4294836224 65534 0 4294836224 131071 0 4294967295 131072 4294967295 4294901761 65533 3 +-82 0 62 -103 8 283 269 -490 -493 -180 -589 288 554 1065 243 -914 151 611 308 -456 -558 -715 -41 1203 656 -787 -754 -164 108 527 160 -229 -139 185 209 -145 -132 66 96 -88 -109 32 137 24 -173 -247 -142 420 404 -165 -287 -61 140 34 -104 4 51 2 -35 8 48 55 15 -74 13 3 -87 -14 92 -16 -147 118 219 -157 -220 110 118 -26 16 -5 -76 1 102 3 -85 -59 8 54 -21 2 49 25 -9 -18 24 -47 -66 96 112 -123 -90 82 2 -130 -84 203 192 -88 -126 -56 -22 39 34 68 18 -31 56 19 -39 -121 -7 126 12 -160 -111 99 66 74 39 -50 -2 -17 35 10 -92 -74 44 90 28 -38 -64 -82 -84 114 124 69 31 -121 -110 39 124 10 -116 -40 93 88 9 -133 -77 29 -20 35 47 40 31 -65 -56 37 54 -36 -51 12 5 -20 -20 83 85 -67 -62 17 24 -41 -37 43 -1 -21 17 71 43 -69 -17 14 -27 -33 -3 12 -4 44 2 -18 65 3 -85 -43 62 65 -33 -95 -15 94 40 -72 -64 43 64 5 -24 -22 6 -3 -17 -10 -28 32 57 -6 -58 -17 25 52 45 -28 -35 -31 -1 25 -2 -19 -2 31 3 -25 2 24 1 -25 -3 18 3 -9 -1 2 0 -1 0 0 1 0 -1 -1 -1 -2 -3 2 0 0 -1 0 -2 0 1 1 2 -1 -1 -3 -1 3 1 -2 0 2 2 0 -2 -2 1 2 -1 -2 1 3 1 -3 -1 0 -1 1 0 -1 0 -2 -3 2 1 -2 -1 0 0 1 -1 0 2 1 0 -3 -1 0 0 0 1 0 0 -2 -4 -2 0 4 3 1 2 -2 -2 -2 0 0 0 0 0 0 0 -1 -2 -2 0 2 1 -3 1 2 -1 -3 -2 0 2 0 -2 -1 0 1 -1 -2 0 0 0 -1 -1 1 -1 -2 0 2 0 -1 2 0 -2 -1 0 0 -1 0 -2 0 0 -1 -1 0 -1 3 2 1 1 -2 -2 2 3 0 -3 -2 2 0 -2 0 0 -2 0 1 2 -2 -2 -2 -2 2 0 -1 -2 1 -1 2 -1 0 2 0 0 -2 1 0 -3 -2 2 2 -1 -1 0 -2 -3 1 2 0 -1 -1 -2 1 1 1 1 -1 -1 0 -3 -1 2 0 -1 -1 -2 -1 0 0 -1 1 -1 -1 -3 2 3 1 0 0 2 -3 -2 2 1 1 0 -1 2 1 -3 -2 1 -1 -2 1 1 0 -1 0 0 -1 2 2 1 -4 -2 -1 0 0 -1 2 3 1 -1 -2 -1 -2 0 0 -3 1 4 0 +65454 4288217150 18546696 4262854925 4283235859 429865 1441141 894445 396122 302800 822589 1448890 1049705 595412 289393 78041 53546 64706 21780 16960 12905 19345 90938 196564 190441 86090 20756 10832 2605 1289 5329 5701 178 7765 8720 35533 72610 60500 14600 281 5777 10413 10706 2980 445 3026 405 2785 13572 27673 14824 16904 48265 44608 19012 2005 5780 1285 3497 16162 15925 25744 22122 9832 4021 293 1325 13940 10036 2228 10820 20052 20137 15602 13621 15476 15056 16393 17770 6770 1625 3809 5186 4505 4212 2745 425 7289 11714 4133 2257 3218 442 5330 6610 485 1818 153 1952 328 4234 9074 8069 10114 9061 6784 5945 4121 1060 45 389 1808 3285 3653 3329 2809 2186 626 365 965 634 580 4293328897 1245181 4294377475 196607 4294901760 0 1 4294967295 4294901759 196605 0 65535 65534 65537 4294901762 4294836223 262143 4294836225 131072 2 4294901758 131073 4294901759 196609 4294770689 65535 131071 4294901760 4294836224 196605 4294836225 65535 65536 65535 65538 4294770688 65535 0 1 4294836224 4294901756 262144 65539 4294836226 4294901758 0 0 0 4294901760 4294901758 131072 4294770689 131073 4294836223 65534 2 4294967294 65536 4294901759 0 4294901760 131071 4294901759 131072 4294901760 2 4294967294 0 65535 65534 4294901760 65535 262143 65538 4294836225 196606 3 4294901757 2 65534 4294836224 65536 4294836226 4294901758 196606 4294901760 131070 196607 65535 2 4294836224 1 4294901757 131074 4294967295 4294836224 131069 2 4294967295 131070 65537 4294901761 65535 4294967293 2 4294967295 4294967294 0 131071 4294967295 196605 65539 0 4294770690 196606 65537 4294901760 65538 4294901757 4294901761 131070 1 65535 4294901760 131074 4294705153 4294967294 0 196607 65539 4294901759 4294901759 0 131069 4 +-30 0 -188 49 370 244 -90 -441 -280 160 257 111 -589 -9 1290 746 -820 -1933 -349 2060 1079 -1173 -764 66 118 176 -73 -219 62 360 -131 -336 76 235 -76 89 188 -84 -33 -39 -33 8 30 6 32 -96 -122 39 2 -63 -87 196 165 -4 20 -102 -51 18 -13 15 50 -71 -93 90 47 -33 54 55 40 -194 -310 125 237 108 72 21 -72 -303 -84 301 111 -137 -38 46 32 -70 -73 37 39 -6 -33 34 77 -13 -52 -57 -29 1 -74 152 236 -71 -111 -70 -13 -7 -31 6 -38 59 115 7 -81 -57 56 17 -106 7 94 21 -64 69 179 -113 -219 -16 139 114 -61 -160 -38 162 146 -81 -168 -68 56 129 23 -51 2 -21 -41 33 68 -32 -86 17 124 -3 -185 -87 109 180 -46 -154 29 167 41 -148 -52 59 35 -13 -29 -27 -29 29 43 38 6 -35 25 2 -52 -62 -8 65 -13 10 65 16 -2 -73 -55 31 13 -2 -20 21 70 33 -41 -115 -46 115 107 -41 -80 -48 -27 48 52 28 -16 -13 38 5 9 -53 -49 28 14 -39 -20 46 2 0 18 -10 -6 4 -26 0 48 31 -12 -55 -14 32 12 -16 -6 1 8 5 -17 -23 1 15 -19 10 28 14 2 -23 -12 1 -1 2 0 5 2 -2 -2 0 3 4 0 0 2 -2 -2 0 1 4 0 -1 -1 0 1 1 0 -1 0 0 -2 -1 1 0 -1 0 0 1 0 -1 0 1 -2 -1 -1 -1 1 1 -1 0 -2 0 1 -1 -1 1 0 1 -2 1 2 0 0 1 -1 0 0 0 2 0 -1 0 0 -2 1 -1 -2 -3 -1 1 0 1 2 -1 -1 -2 0 1 0 0 2 0 -1 1 1 0 -1 -1 0 -2 -3 0 -1 0 0 -1 -2 2 1 0 0 -1 -1 -1 -2 2 2 -1 -2 -1 -1 1 -1 0 1 1 -2 -2 1 -2 0 4 -1 -1 2 -1 -3 -1 -1 4 1 2 0 -1 2 1 -2 1 0 2 1 -3 0 0 0 1 -2 1 4 -2 -1 0 -1 1 2 -3 1 2 0 -2 -1 1 3 0 -3 -3 -1 3 2 0 1 -1 0 0 0 0 1 -1 1 -1 -1 0 0 0 -1 1 -1 0 1 0 -1 -2 -3 4 3 -2 -3 -2 0 1 -2 0 1 -1 -1 -1 0 3 -1 0 1 0 1 -2 0 1 0 0 1 -1 -2 0 2 0 -2 -4 -3 3 2 1 2 0 -1 0 1 1 0 -1 -1 1 0 0 1 0 1 -1 -1 -2 0 0 -2 -1 1 1 0 0 2 0 +65506 3276612 15991154 4266131366 10551016 78370 347002 2220616 4408889 4365401 2540170 588052 44900 53290 133444 130057 61001 13697 42400 2610 1153 936 10240 16405 3973 45985 27241 10804 2925 394 7541 16749 3298 5941 39236 111725 67833 5625 96993 97657 31090 3560 5924 6698 1557 2245 6098 5953 842 28580 60737 17221 218 997 4925 13274 9810 3425 11285 9277 8857 44810 48217 32317 29321 27688 27877 32848 19777 3130 445 2770 5648 7685 15385 41794 44281 25832 28730 23585 6185 1394 1570 1682 3293 1261 629 6548 4289 269 4481 5333 3986 173 841 5989 14906 15341 13130 8704 3033 3488 425 1469 2890 3185 1717 2516 4 424 52 676 3265 3169 1220 400 37 89 818 226 461 980 4293459970 131060 196607 327680 4294836226 65534 262147 0 4294836226 65534 262145 4294901760 65535 65537 4294901760 0 4294967294 1 65535 65536 4294901760 65536 4294967294 4294967295 65537 65535 65534 4294901761 131071 65536 131070 2 65536 65535 0 2 65535 4294836224 4294901761 4294836222 131071 65536 4294901762 4294901759 65536 0 2 131071 1 4294967295 4294836224 65533 65535 4294901760 196606 1 4294901760 4294967295 196606 4294901762 4294967294 131071 65535 65537 4294901758 4294836225 262144 4294967295 4294901762 4294967293 327679 131073 4294901760 65538 131070 131072 4294770689 0 65536 131070 4294836228 65535 131071 4294770690 131073 4294836224 131071 3 4294836221 262143 2 4294901761 0 0 4294901761 4294901761 65535 0 131071 65535 1 4294901759 327677 4294836227 4294901757 65536 65534 4294901761 4294967295 196608 65535 1 4294836225 65536 0 4294901761 65534 2 4294770686 262141 65538 2 65535 65537 4294901760 131071 0 1 4294901761 4294901759 0 4294967294 65537 0 2 +-191 0 113 197 444 -345 -829 -76 703 332 -186 -313 -227 -881 -910 1116 -251 204 1758 1485 -65 -1935 59 145 -865 -25 893 440 -472 -1192 -797 1024 1137 402 -371 -1164 -187 935 223 -564 -102 428 -33 -363 253 334 -266 -420 -45 257 92 132 140 -189 -217 24 155 145 55 -332 -363 318 426 41 -60 -326 -191 283 273 -333 -475 167 208 236 355 7 -198 -656 -318 578 174 -161 49 152 51 -72 -29 -41 51 -39 -129 13 61 30 -45 -9 46 129 203 -183 -313 -133 22 281 139 -111 -127 -7 126 23 -262 -15 343 150 -152 -154 105 -58 -254 -38 24 281 251 -49 -107 -109 99 27 -34 -119 -154 64 174 -12 -187 -74 16 184 51 -87 75 74 -47 -118 -70 -21 3 129 -28 -58 48 254 237 -320 -159 160 30 -325 -227 265 190 12 -42 -73 -48 96 160 -131 -245 98 234 -23 -216 22 275 16 -182 -150 -38 124 110 2 -121 -89 119 211 -62 -235 36 146 26 15 0 -175 -25 113 -12 -61 -76 8 122 114 -58 -172 -47 106 81 28 -59 -142 -57 213 207 -99 -152 -81 -13 95 97 -8 -74 -79 -20 79 48 -2 -36 -7 89 3 -105 -42 56 44 -10 -7 -14 -61 -20 113 30 -72 22 35 -41 -27 32 3 -25 5 8 -3 0 4 0 0 2 2 2 -2 2 -2 -5 2 2 0 2 -1 -1 -1 -2 -1 -1 3 3 -1 2 -2 -2 0 -3 -1 4 -3 -3 4 2 -4 -5 3 4 -3 -2 -1 -2 0 -1 2 1 -1 -3 3 6 -2 -2 2 -1 -3 2 3 -1 -2 -1 2 3 0 -2 -3 4 -2 -2 -1 -1 0 3 -4 -6 -4 2 2 -7 1 3 2 0 -4 -5 1 1 5 2 -2 -1 -3 -1 -4 -3 7 0 -3 1 -4 -2 6 1 0 -2 -2 2 3 5 -2 -6 -4 -1 -1 -1 1 1 0 -2 4 4 -6 -3 3 4 2 -3 -7 -5 3 3 4 -3 -1 2 0 -1 3 4 1 0 -4 2 0 -5 3 4 -2 -1 -3 -1 4 -1 -6 4 4 -7 -1 10 5 1 -8 -3 4 -1 -4 4 4 2 -7 -4 0 -4 -1 8 0 -4 -3 -6 -1 3 -2 -3 0 -2 -3 -5 4 4 2 -2 -1 -1 -3 0 3 -3 0 -1 3 4 1 -3 -2 2 4 -1 -2 5 -1 -7 -3 8 3 -4 -7 -6 2 4 1 -2 -1 -1 3 5 -3 -3 -3 -4 2 3 2 -2 -1 -1 2 1 0 -4 -4 0 8 0 -1 4 -5 -2 0 -2 1 3 1 2 -2 -4 -2 0 -1 1 -1 -5 0 +65345 12910705 4272357820 4290051267 21758655 132565 827690 2073556 104617 5295789 3748450 24506 748850 991049 1643648 1683785 1454373 1492537 909194 367825 193588 132858 175565 247156 68074 25888 55321 47665 45050 113249 232893 183157 109876 116570 185418 253514 98960 126074 469540 435208 56197 25505 7785 2522 4122 16810 4621 2106 18757 74698 115658 79445 31642 16178 16405 68869 140149 46820 14389 65960 79537 65402 23330 10530 15317 27812 30420 40445 34112 10170 11101 16133 5341 16650 4148 66820 158569 50881 106525 121754 36244 7093 11520 42761 69629 55285 47140 75881 55624 16820 12104 22562 58682 59069 22612 901 30625 13394 3865 5840 27880 32948 13445 7345 23645 48618 52650 29665 9194 9473 11717 6641 2308 1345 7930 12789 5072 149 3917 13169 6084 1709 4293263319 196640 393191 4294770696 262144 0 131074 4294836226 4294836226 196603 2 4294901762 4294967295 4294967294 262143 4294901763 4294836226 65534 4294967293 4294770692 327677 4294705154 262139 4294770692 4294967294 65534 196607 4294901761 262141 4294836230 196606 4294836223 196610 4294901759 196607 3 4294836222 4294836228 4294967294 65535 4294705155 4294770682 131074 131065 131075 4294705152 131067 327681 4294836226 4294836223 4294770687 524285 4294770688 4294705153 458750 1 4294901758 196610 4294836229 4294770682 4294967295 131071 1 327678 4294574084 262141 131076 4294574077 262139 262147 4294967293 2 262143 65540 4294705152 2 262139 4294836228 4294836223 327679 4294639615 262148 4294967289 327690 4294443009 327677 4294770687 262148 4294508546 65532 4294967292 8 4294836220 4294967290 4294836227 65533 4294836222 327675 131076 4294967294 4294836223 196608 65533 262143 65540 4294901757 262146 4294901759 4294901765 4294836217 196616 4294574076 196602 65540 4294967294 262143 4294770693 4294836221 196604 131075 4294967294 196607 1 4294770684 524288 4294901760 4294639620 65534 131070 65539 4294836226 4294901756 4294901760 4294901761 65531 +-15 0 28 -52 170 119 -525 -632 118 1012 267 -251 559 -219 -1517 -983 947 2034 -303 -368 1583 -1556 -2185 805 830 230 -278 -334 382 723 -39 -671 -303 261 458 374 176 -660 -312 146 -37 -370 -517 582 851 139 -318 -547 -126 254 182 -57 -253 168 354 -241 -345 198 266 5 39 -181 -264 32 238 144 -45 -224 -146 -42 -115 268 276 -41 -107 27 264 -290 -557 60 171 335 344 -188 -426 -9 368 55 -190 -63 23 -61 11 159 -32 -189 53 188 -104 -199 69 269 4 -255 -10 242 36 -196 -45 187 278 -171 -405 -204 46 383 145 -153 -122 72 168 -61 -195 24 217 67 -102 -188 -86 174 245 -54 -251 -144 93 189 3 -146 -162 91 242 139 -85 -285 -75 212 105 -15 -7 -104 45 138 43 -344 -455 191 430 277 -136 -309 81 218 5 -275 -185 314 387 -199 -306 -60 105 40 -196 -74 104 296 112 -224 -139 55 121 21 -156 -36 204 189 55 -363 -347 146 271 191 6 -323 -171 202 101 -66 38 23 -156 -101 80 221 70 -177 -101 69 19 -38 11 99 34 -71 -9 30 18 -89 -66 116 58 -76 -3 22 -35 0 40 25 42 -88 -143 42 90 19 -13 48 48 -115 -96 33 -11 70 121 -13 -83 -69 -1 59 25 -9 -4 -4 0 -1 0 0 3 1 0 -6 -1 7 -1 -6 0 1 3 0 -6 1 3 1 -1 -3 2 5 2 -1 4 -8 -5 4 2 -4 1 -1 -1 4 -1 -9 -4 3 3 2 1 0 -5 -4 -1 1 -3 0 3 5 0 -2 1 -3 -7 1 6 3 -2 -4 -2 1 4 -3 -8 2 2 0 -2 -1 -1 0 -4 -2 0 6 0 4 4 -1 0 -4 -1 7 -1 -1 5 0 3 0 -3 -7 -2 4 2 0 -2 -5 -1 4 4 -1 2 -5 -1 0 -1 -3 -3 -2 -1 0 0 2 0 -3 1 4 0 -5 1 -3 -4 5 2 -2 0 -2 0 -4 -8 2 2 1 0 0 1 2 -1 -7 -5 4 3 3 -3 -4 -1 -1 0 2 2 -2 -6 1 4 4 1 -5 -1 2 3 -3 -4 -2 -3 2 3 4 0 2 0 -4 -2 -1 1 1 -3 -1 -6 3 6 2 0 0 0 -1 0 1 -1 -1 -2 1 5 0 -5 -2 1 3 1 -2 1 -2 -1 1 -2 1 5 0 -2 -8 -4 1 -3 2 3 -2 0 1 -2 -3 2 2 -1 -4 -2 2 0 -1 2 -2 -3 2 -1 -4 -3 2 2 3 -3 2 7 -3 -1 -2 -3 -1 2 -1 -4 1 2 0 0 -1 -2 2 1 0 1 0 +65521 4291559452 7798954 4253613555 66322550 134290 360442 3267578 5033965 227233 4927025 5422250 741800 188840 668653 451762 159930 349640 466576 118660 138269 606013 743522 400333 80392 36373 92233 183397 158229 70781 34282 70720 77380 52201 23080 85049 77857 12178 153796 313849 141466 153680 181557 138449 40069 4250 25402 36745 38153 50417 77122 65041 58664 39712 36994 106525 205641 148805 44434 20068 31945 38601 51578 45748 37672 62941 83737 44370 21325 34525 77885 88450 50569 11250 10865 21069 120185 243506 261629 113977 54085 75650 132821 189370 97236 12625 43892 98432 62720 22346 15082 25632 77337 134794 141725 109922 104365 70045 14557 1973 34537 55241 36229 14962 1805 9922 6197 981 8245 17812 9140 493 1225 2225 9508 22213 8461 2473 15529 10305 5021 14810 4290510765 3932159 4294377497 4294770684 4294901760 0 65539 4294574080 524287 4294639615 65536 3 131066 65539 4294836223 327682 4294901762 4294443012 327675 4294705154 4294901761 327679 4294443007 262140 131075 1 4294770683 131071 65533 327683 4294836224 4294770689 131065 196614 4294770686 131070 4294770692 196600 2 4294967294 65535 4294901756 393216 262144 4294901764 4294705152 524287 4294967295 5 3 4294574077 327678 2 4294705150 327679 4294901764 4294639618 65535 4294836223 4294901757 65535 131072 4294770688 262145 4294639616 4294770689 393212 4294836226 4294836224 4294705152 196600 65538 0 131073 4294574079 327675 196611 4294770685 4294967295 131072 4294836226 131066 262148 4294639617 196607 4294770691 4294901756 196605 262147 131072 4294705152 4294967294 65537 4294967293 262138 131078 0 4294901760 65536 4294967295 131070 5 4294901755 196609 4294836225 4294836225 131071 131070 5 4294508542 131068 196605 4294836227 65536 4294836222 131074 4294770687 196606 4294901760 4294836226 196605 4294770687 196605 196610 196605 4294770695 4294901759 4294967293 4294901762 131068 2 4294901760 196606 1 1 +-24 0 -3 33 43 -17 68 -16 -283 -209 209 565 -27 -652 -6 857 493 -1351 -1417 1189 1506 -204 -595 -368 -80 284 318 -195 -387 5 212 156 -78 -136 51 91 -64 -92 0 105 42 -1 6 -45 29 74 6 -150 -76 102 98 -18 -73 -102 -67 121 99 5 -19 -50 -14 17 -4 -6 12 33 23 -35 -34 -16 1 36 -13 -16 27 45 48 -38 -68 -61 -3 90 57 -34 -31 -60 -71 45 62 51 7 -48 5 -6 -59 -17 32 74 14 -35 28 0 -19 -34 -22 -7 -24 44 50 -19 -59 8 56 28 -17 -33 13 15 0 -9 2 -18 -9 -7 -26 22 22 -13 -3 12 -20 -15 9 31 12 -6 7 -19 -34 22 51 -5 -34 2 44 -28 -82 10 82 39 -35 -70 -3 45 11 -44 -42 13 -6 33 29 -5 -11 15 17 10 43 -33 -48 -16 8 12 -8 -13 -20 5 11 32 7 -28 -21 19 33 11 -17 -20 17 17 2 -30 -18 21 28 -16 -22 -25 -31 30 24 12 16 -3 -3 -30 -17 24 11 -31 -40 34 58 9 -41 -37 34 40 -16 -49 -24 30 31 14 -6 -23 -4 1 1 12 8 -21 -29 8 15 23 19 -11 -11 -17 -7 6 3 15 12 -17 -14 2 6 -1 -5 2 0 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 0 -1 -1 0 -1 0 0 0 1 0 -1 -2 -1 0 0 0 -1 0 2 0 -1 -1 -1 0 -1 0 0 -1 0 -1 0 0 0 0 0 1 2 -1 0 0 1 -2 -3 -1 0 1 0 -1 -1 -1 0 1 -1 -1 1 1 -1 -1 0 1 0 -1 -1 0 1 0 -1 0 0 -1 1 -1 -1 0 -1 0 1 -1 0 0 -1 0 0 0 -2 -1 -1 1 0 0 0 -1 0 0 0 -1 0 0 -1 0 0 1 0 -1 -2 0 0 0 0 0 0 1 0 -1 -1 -1 -1 1 1 0 0 -1 -1 0 0 -1 -1 1 1 0 0 -2 -1 0 0 0 -2 0 0 0 1 -1 -2 0 -1 1 -1 -1 1 0 -1 0 1 0 -2 -1 1 1 0 -1 -1 -1 0 0 0 -1 -1 1 -1 -1 1 0 -1 0 1 -1 -1 -1 0 0 0 1 1 1 1 -1 -1 -1 0 -1 -1 1 -1 0 0 -1 -1 -1 0 1 -1 -2 0 0 0 0 -2 0 1 0 -1 -1 1 -1 -1 1 0 -1 -2 0 0 1 0 0 0 0 0 -2 0 0 -1 -1 0 1 -1 0 0 1 0 -1 1 1 1 -1 0 0 +65512 2228221 4293853227 4293918788 4281335525 362906 425833 734485 2068250 3421610 2309652 489449 87056 139149 149794 69280 24580 10882 12560 11025 1765 2061 6317 22536 16180 9928 15733 19130 9826 2861 485 52 1233 1754 1412 1297 425 2754 3748 8345 8109 4405 4561 7066 6445 2353 61 3770 6500 1421 784 1517 533 2512 2861 3545 3920 1378 394 81 328 130 1160 653 153 625 1042 180 410 1640 2626 1160 2720 6824 8245 6125 2034 2057 1933 1125 866 346 389 2938 2560 208 233 425 1145 833 802 1210 689 578 904 765 1040 1109 1861 720 265 909 865 1082 2756 3445 3050 2756 2657 1476 1157 565 17 145 505 905 754 482 410 85 234 433 196594 4294901766 196603 65536 1 0 0 0 0 0 0 65536 4294901760 65535 65535 0 1 4294901759 65535 0 65535 2 4294967295 65535 65535 4294901760 4294901760 0 0 65536 4294901762 0 4294836225 4294967293 65536 4294901760 4294967295 65536 4294967295 65537 4294967295 65536 4294901760 65535 1 65535 4294901760 4294901761 65535 65535 4294901761 0 65535 0 4294967294 131071 0 4294901760 0 4294901760 0 65535 65536 4294901760 65534 0 0 65536 4294901760 4294967295 131071 1 4294901760 65535 4294901760 131071 1 4294836224 65535 0 65534 0 4294901761 65534 131071 4294967295 1 65535 1 4294967294 65537 4294901760 4294967295 0 4294901760 131071 4294967295 1 65535 4294901761 4294967295 0 65536 65537 4294901761 4294967295 4294901760 131071 65535 4294901760 4294967295 65536 4294901759 0 0 65534 1 4294967295 4294901761 131071 4294901760 65534 65536 0 0 4294836224 0 4294967295 65536 65535 65536 4294901760 65537 4294901761 0 +-52 0 54 46 -115 -146 -25 293 229 177 447 -481 -219 -384 -820 -184 17 149 -1241 1607 3187 -287 -1905 -1247 571 836 -131 -300 237 34 -189 -271 -55 195 -70 -244 -63 432 162 -289 -55 189 -38 -257 -70 383 241 -231 -130 3 -5 37 47 -37 -14 15 -38 -59 25 73 -33 -65 18 57 -42 -37 53 70 -13 -96 -66 120 141 -74 -99 -14 -3 63 87 -54 -54 -3 -32 -17 36 67 -12 -72 40 80 -42 -122 10 69 -65 -25 40 81 57 -37 -23 -58 -39 45 7 -14 29 51 17 -48 -2 -39 -52 29 8 -13 -21 46 76 2 -67 -71 10 108 85 -98 -121 -1 26 63 59 -25 -45 -15 8 -7 -15 14 -1 49 95 -72 -98 -4 26 -40 -97 78 86 1 -17 32 46 -52 -27 17 22 -44 -93 42 136 10 -102 -66 -10 76 97 -12 -74 -54 9 47 9 -33 -26 55 64 -59 -63 13 9 19 39 -51 -104 60 117 -5 -100 -41 83 96 -20 -109 -40 59 67 -1 -51 -30 22 12 -21 -9 -33 24 76 40 -47 -66 51 63 5 -86 -58 9 -4 25 26 20 -10 -43 -21 51 53 -11 -32 -36 7 32 -8 -31 -2 38 18 -30 -21 11 8 4 11 1 -1 -23 -18 8 3 9 5 -1 0 -2 0 1 1 0 1 0 0 0 0 -3 -1 1 0 -2 -1 0 -1 -2 -1 3 0 -1 0 -1 1 1 -2 -1 2 -1 -1 1 -2 -1 0 0 -2 -1 1 3 -1 -1 2 -1 2 -1 -2 1 1 -1 0 2 1 -4 -2 2 1 -1 0 -2 0 -3 -4 3 1 -1 -2 0 0 -1 -2 -1 -1 0 -1 1 1 0 0 0 0 0 0 0 0 -1 -1 -1 -3 0 0 2 1 0 -2 -1 -1 0 0 1 -2 -1 -1 3 1 -1 -1 1 -2 -1 -1 1 2 -1 0 1 0 -1 -1 0 -2 1 4 1 0 -2 0 -1 -3 -2 1 2 1 0 -1 -3 -1 2 1 -2 -1 4 2 0 1 -3 -1 1 2 -3 -1 -1 -2 0 2 -2 -2 0 0 2 0 -2 -2 0 1 1 2 0 -1 0 -2 -3 -1 3 -1 0 0 1 0 -1 0 2 1 2 3 -3 -2 0 -1 0 3 1 -2 -2 0 1 0 -1 -3 1 5 -2 -5 0 1 2 2 -2 -1 -1 -1 0 0 1 0 -3 0 1 -2 -2 0 0 -1 -1 -2 0 0 2 2 -1 0 -1 -2 -2 0 0 0 0 -2 -1 -2 1 0 0 -1 0 1 0 -2 0 3 0 -1 0 -3 0 3 0 -2 1 2 0 +65484 3014710 4285464461 19267559 11600101 431170 195417 706256 22490 4122530 10239338 5184034 1024937 107161 57325 109162 41050 64436 190593 109765 38746 67493 151589 111442 16909 1394 3578 421 4925 5954 5314 3573 3133 7709 9385 18756 25357 9997 3978 10485 2925 1313 5785 5328 8000 16648 4861 4850 8161 4618 3893 3546 245 3442 2593 1525 3545 233 2557 5780 9530 11764 16829 14642 4645 4106 2250 113 421 2402 14209 9620 2276 15493 7397 1313 4820 1018 2420 10413 18596 14760 5876 9553 8392 2290 1170 3701 7577 4138 442 4122 14416 13714 11681 16105 12281 5081 4490 3501 628 522 1665 7376 6565 6570 7421 3445 641 1076 1949 3042 2930 2320 1073 1025 1448 1224 562 80 122 530 589806 589827 4294901765 4294836224 65536 1 1 0 4294770688 131071 4294836224 65535 4294901759 262143 4294901760 4294901760 65537 4294967294 4294901762 131071 4294967294 0 4294967294 196609 4294967295 4294901762 4294901762 131070 4294901761 131072 4294705153 196606 4294901761 4294836224 4294770688 262140 4294901761 65534 4294901760 4294967294 65535 131071 1 0 0 0 4294901760 4294967295 65533 131072 1 4294967294 65535 65536 4294967294 262143 4294901761 131071 4294967294 131071 4294901762 65536 4294901760 65535 131070 65540 4294836224 4294901760 4294901757 131073 1 4294836223 196607 4294836225 327679 2 4294770689 131071 4294770690 4294967295 65534 4294836226 65534 131072 4294836224 65534 65537 2 65535 4294836222 262143 65535 65536 4294901760 131072 131073 4294770691 65534 65535 65539 4294901758 65536 4294901760 131069 4294836229 65531 131073 4294836226 4294967295 65535 65536 4294770688 65536 4294901758 0 4294967295 65534 131072 4294901762 4294901760 4294901758 0 0 4294967294 131070 0 65535 1 65534 3 65535 65533 3 131070 2 +-47 0 10 -22 -114 -279 33 1022 471 -1313 -876 651 562 352 664 -573 -1101 -1160 -1783 1300 2263 2598 855 -2623 -292 11 -581 -316 -471 215 676 249 -806 -157 598 481 270 -288 -264 -304 -149 -122 -185 395 22 143 465 -102 -217 -146 -121 8 183 180 -45 -304 -109 205 -3 -73 107 168 29 -185 -35 20 -64 -55 13 106 -51 -122 -38 272 313 -195 -413 -85 317 323 -61 -366 -98 145 72 -13 -169 34 304 132 -73 -329 -181 140 76 23 17 28 -22 -27 63 -45 -167 87 221 -6 -103 -63 1 -19 -67 94 204 -34 -230 -89 178 96 -133 -78 18 2 10 106 -67 -141 107 255 5 -292 -105 186 102 -39 8 -25 -103 -27 95 134 63 -154 -108 -7 -49 -38 -16 225 112 -140 39 60 -94 -175 -27 177 46 -110 -125 103 194 98 -6 -237 -142 127 145 41 -24 -163 -76 151 115 -114 -115 -2 11 41 -42 44 205 -32 -315 -138 174 319 25 -235 -73 173 258 -151 -327 -32 191 80 -57 -63 -95 -10 110 116 -57 -117 65 58 -154 -67 107 181 63 -129 -80 8 79 5 -98 -53 48 62 -44 -12 56 16 -19 -29 -10 32 58 -26 -66 -30 32 11 -68 6 58 38 -14 -39 9 26 -8 -35 -8 31 10 -20 -12 13 9 -5 -5 0 1 3 -2 0 3 -1 -1 2 0 1 1 -4 -1 2 -1 -2 -1 -1 1 -1 -7 -1 2 4 2 -1 -3 -1 -2 -1 -1 3 2 3 -1 -4 -1 5 8 0 2 -4 -5 -3 1 -1 2 1 -4 -2 0 1 1 -1 -2 -1 -2 4 -2 0 4 0 -4 0 7 3 -2 -6 -1 2 2 1 -2 2 3 -6 -1 3 1 -2 -2 1 3 -5 0 2 1 -5 -5 -1 4 -4 -7 -4 -4 2 -3 2 2 0 -3 1 -1 3 3 0 -4 1 3 2 1 -3 -2 0 2 0 0 -2 1 0 -3 -2 1 3 0 0 4 -2 -3 -5 -1 -1 -5 2 2 4 2 -3 -5 -2 2 7 0 -4 0 -5 0 5 0 -1 2 -3 -1 -3 -3 3 1 -3 -1 0 -3 -2 3 0 -8 -1 3 5 -3 -3 1 7 2 1 1 -5 -3 2 0 -2 -2 5 8 3 -4 -6 3 3 -1 2 8 -5 -6 -2 5 1 -1 -8 -4 5 2 -2 0 -1 -2 0 -2 -1 -2 0 -2 2 6 -2 -6 -3 3 5 2 -6 -4 1 1 -2 -1 -2 -3 2 -2 0 5 3 0 -3 1 -2 -4 1 -1 1 1 1 2 4 2 -6 -3 0 3 3 1 -1 -5 -4 1 3 1 1 -3 0 +65489 4293525514 4276748174 66977825 4208918999 1191177 439748 769225 2557801 4869089 11870773 7611154 85385 437417 268066 518977 674285 588965 155844 162112 37085 190250 20933 226629 68405 14705 65889 94441 53906 5338 39673 35066 1625 7121 11405 17485 75428 135994 177794 204818 137677 30629 5353 29717 109840 113570 52361 6305 1073 1213 5994 35458 48877 14578 362 13325 42772 60821 40900 23773 328 11336 24370 76474 85289 45621 11925 689 11338 26981 27685 11713 3845 50881 32144 5121 39461 32058 14216 26234 47240 56205 36293 22706 27145 28577 26221 13229 1802 3700 43049 118269 132037 55850 35258 89365 107953 42881 7218 9125 25556 16938 7589 28205 44210 20610 6464 6266 12413 6148 2080 3392 1202 1124 4040 5256 1145 4660 4808 1717 757 1289 2097144 4293656586 917492 4294639625 65531 196609 65534 4294901763 196607 65536 4294705153 196607 4294901759 4294967295 4294901761 4294967289 262146 4294901762 4294967293 4294967294 262143 196610 4294770687 393215 8 4294705154 4294836219 4294901761 65538 4294901756 65536 4294901761 4294967294 327678 65534 4 65532 196615 4294639614 196607 65538 196606 4294574083 262143 4294836225 131070 4294639619 131072 4294639617 4294967291 4294705156 4294770681 196604 196605 2 131069 262143 3 131068 131075 4294770689 65534 2 4294836224 1 4294901757 196609 0 4294836228 4294705149 4294967295 196603 262146 4294770690 4294901755 458754 4294705152 4294639616 327680 4294901760 4294770690 4294836223 262141 4294770689 65535 4294901757 3 4294967288 327683 4294836221 458753 65538 4294639617 196605 4294836224 393214 196616 4294639612 196611 196607 4294639624 4294901754 65541 4294508543 393212 4294836226 4294901760 65534 4294967294 65534 196606 4294836230 4294836218 327683 4294574082 131068 4294836225 4294901759 196605 65534 196613 4294770688 4294836225 131068 131071 65537 262146 4294574082 65533 196611 4294901761 4294770683 196609 65537 65533 +-32 0 -37 -44 235 -79 -865 141 1228 454 -805 -835 514 613 -377 -402 -143 163 978 -42 -2077 -187 2117 918 -919 -878 197 245 -66 98 333 -181 -518 -242 135 523 179 -328 -231 60 77 165 253 -160 -465 -100 366 339 -121 -311 20 117 -59 -20 26 34 53 42 25 -101 -81 6 31 35 -42 7 79 -2 -44 19 56 -75 -51 40 -1 -53 -12 27 -87 26 118 61 -9 -49 -50 -22 64 99 26 -129 -54 72 36 -79 -45 55 -4 -17 45 51 -6 -110 -79 78 50 10 17 13 16 -46 -26 23 16 -58 -74 41 32 72 79 -54 -50 -46 -51 35 32 76 95 -58 -70 -41 7 0 -57 23 62 26 -25 -10 50 -27 -74 16 72 -16 -108 -13 85 111 29 -96 -25 -20 -38 8 0 34 18 -3 -1 -10 3 16 -3 4 29 -17 -25 -12 -7 38 55 -25 -42 -34 -3 24 4 -7 -35 -22 -5 68 32 21 59 -64 -41 15 -7 -37 57 18 -131 -82 39 146 59 -48 -34 -55 -24 59 -1 -19 60 65 -9 -94 -25 13 12 21 -26 -21 31 0 -56 15 38 14 31 1 -55 -45 46 49 -37 -38 25 26 -16 -46 -23 67 38 -49 -17 44 16 -46 -16 22 2 -18 -10 10 0 2 9 1 -4 -1 -2 -1 2 0 -3 0 -1 0 2 0 -2 -1 0 3 0 0 0 -2 -1 2 -1 -1 -1 1 -1 0 0 1 0 0 1 -1 2 -1 0 0 -1 -2 -2 0 1 0 -2 3 -2 -1 0 2 1 -2 0 1 2 0 -3 -1 2 2 1 -3 -2 3 -1 0 1 2 1 -2 0 1 0 0 -1 -2 0 0 2 1 -2 -2 0 -2 -3 0 0 1 0 0 2 1 -1 -3 -1 1 0 -2 -2 -1 0 -1 -1 0 1 -2 -1 0 1 0 -1 -1 -1 -2 0 -2 -1 0 -4 0 1 0 -1 1 1 0 0 0 1 -2 -1 1 -1 -3 -1 2 0 -1 -1 1 -1 0 2 0 0 0 -2 -2 -1 1 0 -1 -3 0 0 0 -2 -1 0 2 -1 2 0 -3 -1 4 1 -1 -2 2 4 -3 -3 0 0 -2 1 0 0 -2 -3 1 1 2 -1 -3 -1 2 1 3 -1 -2 -2 2 1 1 3 0 1 -1 0 -1 1 -2 0 0 -3 -1 0 1 -1 1 2 0 -2 -1 1 -1 -1 1 -2 -2 0 3 1 2 1 1 1 0 1 -2 0 3 0 -2 0 -1 0 0 -1 1 2 -1 -1 1 1 -2 -1 0 -1 1 1 0 -2 0 1 0 -2 -2 1 1 0 0 +65504 4292149211 4289790187 9305247 29754572 1345250 639965 303733 47018 958248 4348898 5324413 1615445 98834 13960 143650 326888 291754 139625 56961 33154 89609 226225 248877 111362 14089 3881 1832 4573 10826 6597 2186 1813 6245 2297 8761 4201 2810 873 8245 17645 2482 2984 13897 17317 8100 7537 5050 305 4626 12136 12325 2600 458 2372 1205 3620 7157 6208 9157 4616 3826 6800 12389 6581 49 3778 4520 725 3229 5732 5440 11833 19546 10057 1025 1508 1156 333 101 265 25 1130 769 1493 3650 2920 585 65 1709 4649 1465 7577 1906 1418 3573 23885 22837 5785 4181 4057 362 7825 8917 794 585 1117 961 3361 1640 962 5050 4517 2813 1301 2372 5018 3845 2225 2372 740 328 720886 131072 65545 4294967292 4294967294 2 65533 65535 2 4294967294 196608 0 4294836224 196607 4294967295 131071 65535 65536 0 4294901761 4294901762 0 4294901759 65534 1 262142 4294967294 131072 4294836225 65536 2 4294967293 131074 4294770689 262142 65535 131073 4294836225 65536 0 4294901759 0 65538 4294901758 4294836224 65533 65536 0 65538 4294836223 131071 4294836224 4294967294 4294901760 65535 4294836225 65535 1 4294967295 4294901759 4294836224 65535 65532 1 131071 1 0 4294836225 131071 4294836223 196607 4294901760 131071 65535 2 0 4294901758 131071 4294901760 65533 0 4294967294 131072 196607 4294770688 327679 4294901761 196606 4294770692 65533 4294836224 1 4294836224 131069 131073 4294836223 196607 196609 4294901759 196606 65537 3 4294901761 4294901760 4294836225 0 4294967293 65536 131071 2 4294967294 4294901761 131071 4294901758 196608 131073 65537 1 4294836225 196608 4294836224 4294901760 0 131071 4294901762 131071 4294836225 65535 131071 1 65534 1 4294901758 65537 0 +-44 0 19 42 184 -57 -314 -269 -172 386 390 65 -264 55 722 -400 -1351 237 1734 525 -967 -1411 -287 794 186 237 150 -149 37 -12 -41 -45 6 -6 -65 -65 -7 172 49 -164 -11 208 54 -264 -115 144 -23 -4 110 83 1 -81 -25 -37 -15 66 11 -29 20 -4 -31 15 38 -23 -44 -2 3 10 7 52 57 -84 -90 38 48 -16 -27 55 60 -73 -65 24 -5 26 87 -20 -103 -10 86 -2 -95 -2 57 48 15 -37 -16 5 4 -27 -19 31 0 -7 19 10 6 -1 -18 -17 28 4 -59 -5 63 30 -35 -46 -3 37 25 -17 -29 0 32 9 -18 -24 -12 13 13 0 -19 15 41 -7 -28 -14 22 -4 -63 3 65 29 -40 -21 32 25 21 -50 -77 39 86 -30 -78 26 56 -4 -29 -27 2 55 23 -56 -30 32 18 -13 3 8 -23 -13 30 9 -35 3 37 0 -21 -16 -14 20 36 4 -16 -39 -19 39 3 -2 65 10 -75 -67 22 56 -7 -9 12 1 -2 -19 -35 17 41 29 6 -51 -32 22 21 6 12 -21 -46 -1 26 31 7 -22 -6 -4 -27 -4 27 47 9 -51 -16 20 12 -6 -11 -15 -21 10 16 25 10 -22 0 3 -9 -14 -1 17 5 -10 -2 4 -1 -2 0 1 0 -1 -1 1 0 2 0 0 -1 0 0 1 0 0 1 0 -1 -1 0 0 1 0 -1 -1 1 0 0 -1 -1 1 1 -1 1 0 0 0 0 -1 -1 0 0 0 -1 1 0 -1 0 0 0 1 0 0 1 -1 -1 0 1 1 -1 -1 -1 -1 -1 -2 1 1 0 -1 -1 0 -1 0 1 -2 -1 -1 0 2 1 -2 0 0 -2 -1 0 1 0 -1 -1 0 0 0 -1 1 1 0 -1 0 0 0 -1 -2 -1 0 -1 0 -1 0 -1 0 1 1 1 0 -2 -1 2 -1 1 -1 -1 0 0 1 1 -2 0 -1 1 1 1 -2 -1 0 0 0 0 -2 1 0 -1 -1 0 -2 -2 1 0 -1 -1 0 -1 0 1 1 -1 -1 -2 -1 0 1 0 -1 0 -1 0 0 -2 0 -1 0 1 1 1 -1 1 -1 0 -2 0 1 -1 0 1 1 0 0 1 -3 -1 1 0 1 -1 -2 0 0 -1 0 0 -1 0 1 1 0 0 0 -2 -1 0 0 0 0 0 -1 1 0 -1 0 0 -1 -1 -1 0 0 -1 1 1 0 -1 0 -1 -1 -1 0 1 1 0 -1 1 -1 0 -1 0 1 0 0 -1 -1 0 0 1 0 -1 -2 1 1 -2 0 0 0 +65492 2752531 4291231928 4277403334 25362260 156325 72721 681284 1881370 3282381 2926010 712805 90765 44701 1513 3706 72 8450 29633 29297 43385 72612 33961 545 18989 6562 1994 4581 962 416 1186 1973 1940 109 2753 10305 9544 2560 3754 8929 4801 701 7969 10709 7400 9029 5553 1594 281 745 1322 49 461 37 613 800 3506 4869 3341 1378 914 841 1105 900 313 169 586 1730 980 500 3978 5066 2041 1649 2941 7450 8296 6760 3152 1570 3029 3665 1924 493 73 698 981 1234 1369 697 596 1312 1777 1882 13 4325 10114 3620 130 145 365 1514 2522 2637 1508 477 585 2117 1637 533 52 745 2938 2682 656 180 346 541 881 584 9 277 1179647 4294311941 327678 4294901759 65536 4294901760 131071 131072 0 65535 65536 0 1 4294967295 0 1 4294967295 1 4294901760 131071 4294901761 1 0 4294901760 65535 0 131071 4294901760 0 65536 0 4294901761 65535 65537 4294967295 4294967295 4294901759 65537 4294901760 65535 65535 4294836225 4294967295 131072 4294836225 0 4294967294 65536 4294901760 65535 0 131071 1 65535 0 4294901759 65535 65535 65535 65535 65537 1 4294967294 4294901762 4294901761 65535 65536 4294836225 4294901760 65537 4294836225 65535 0 4294836224 1 4294967295 4294836224 131070 4294901760 65535 65535 65537 4294967295 4294967294 65536 4294901760 4294901760 0 65534 65535 65537 4294901761 4294901761 4294836224 65536 65535 65537 0 4294770689 131071 65536 4294901759 0 65535 4294901760 65536 1 0 4294967294 0 0 4294901760 1 65535 4294901760 4294967295 0 131071 1 65535 4294967295 65535 65537 4294901760 4294901761 4294901760 65536 0 4294967295 0 1 4294901759 65537 65534 0 +-21 0 42 -1 -30 132 267 -606 -779 890 1227 -1008 -1988 182 1004 1413 185 -503 1256 365 -698 -2203 -1030 1472 669 -132 -43 -143 -234 128 161 11 -27 -42 -81 182 260 -109 -24 -50 -155 -337 -318 515 464 -46 -93 -89 29 -129 -219 133 268 52 -105 -167 -104 93 136 63 -21 -80 -27 -19 -19 43 -10 -23 46 116 62 -143 -58 -14 -64 7 -42 67 137 86 1 -182 -104 60 -16 82 207 34 -98 -217 -56 107 6 -14 -1 35 21 -40 -79 62 185 16 -184 -155 93 198 6 -142 -57 28 0 26 10 59 103 -68 -112 -26 51 23 -26 -11 7 3 -37 -24 33 72 -3 -63 -26 20 24 44 26 -36 -13 -19 24 -11 -117 -31 57 113 28 -18 8 -69 -58 76 124 36 4 -223 -203 134 152 34 -60 -79 -26 89 53 -6 17 -14 5 -19 -52 -47 -12 121 110 -65 -76 -44 -14 27 24 32 -10 -42 9 18 -7 -14 -46 6 48 64 94 -27 -112 -176 -52 220 103 -114 -64 79 28 -85 -1 93 -6 -64 25 36 -7 -26 0 -31 -53 66 92 -30 -61 -8 36 -7 -30 -4 -9 -7 -14 39 49 22 -7 -67 -8 36 -8 -15 3 -3 -5 25 21 -16 -5 -16 -17 2 -9 8 12 10 1 -9 -3 1 -1 0 -2 0 0 2 -2 1 1 1 0 0 1 2 0 1 1 -1 0 -2 -1 1 0 1 0 -2 0 0 -2 0 1 1 1 1 -1 -1 0 1 1 -2 -2 1 -1 -2 0 -1 -1 3 0 -1 0 1 1 -2 0 0 -2 -1 3 -1 -1 -1 -1 -1 0 -1 2 1 -1 0 -1 -1 1 0 -2 -1 2 1 -3 -1 -3 1 2 1 0 2 1 1 2 -3 0 -1 0 -2 -2 0 0 -1 -2 1 0 0 1 1 -1 -2 0 2 -1 -2 0 1 0 -2 -2 3 1 -1 -1 -2 -1 0 -2 1 4 0 -2 0 1 1 0 -2 -2 0 -1 0 3 1 -1 0 -1 0 1 1 -2 -2 -1 -1 0 1 1 -1 -1 0 1 -1 0 0 0 0 0 0 -1 -1 -1 0 1 1 0 -1 -3 -2 -1 1 2 0 -1 -1 1 0 0 -1 0 1 2 -2 -2 -3 0 1 -1 0 0 -1 0 -1 0 2 2 -5 -3 0 -1 1 1 1 -1 -1 -2 -2 -1 2 1 -1 0 0 -3 2 4 0 -3 1 1 1 -1 -1 2 1 0 -2 -3 0 1 0 -1 2 -1 0 3 2 1 -3 -3 -2 -1 1 0 -2 -1 1 -2 0 -1 -1 0 -1 -2 1 2 1 -1 0 +65515 4294901802 8716258 4255252747 58391797 2521593 3985268 3004585 287234 1710761 5340413 3227684 464985 22298 71140 26042 2493 39685 79481 3076 137594 366349 217412 16570 17482 65650 74528 38914 19465 22465 6841 1090 2210 629 15572 24293 3560 4145 6253 26165 33125 14416 6980 44005 56693 14585 232 1226 2041 10085 34481 57881 47853 20200 4033 676 3581 15233 13220 3130 797 58 1945 6273 3978 1076 2512 1972 530 697 14650 16018 1108 4825 9140 16672 49745 59165 24260 9841 8597 2845 485 386 4913 14785 16325 7712 925 1600 1864 405 245 2152 6400 9565 43520 51104 23605 10337 8009 8650 4132 1921 725 961 7165 9364 3785 1345 916 130 1717 2885 4538 1360 289 18 650 697 281 293 589815 655372 4294377473 131069 65535 65534 131072 131070 65537 0 131073 65536 4294901761 4294836224 131071 65536 4294836224 0 65534 65537 65537 4294967295 65536 4294836225 131070 4294901759 4294901760 262143 4294901760 65536 4294836225 0 4294967294 4294901763 4294967295 4294967295 4294901760 65538 65535 4294967295 1 4294967294 65538 4294967293 131069 65538 131072 65537 4294770690 4294901760 4294836224 65534 4294901760 131070 0 65537 4294901759 131072 4294901759 65536 4294836224 262142 4294901761 4294901759 65535 131070 4 65534 65537 4294836224 65534 65535 65539 65535 65535 65537 4294901758 4294967295 65536 4294901761 65535 4294901761 0 0 0 4294967295 65535 65537 4294901760 4294901757 131071 2 4294967295 1 4294901760 65536 4294836226 4294836222 65536 65535 4294901760 4294901760 131072 4294639618 65533 131071 65537 4294967295 4294901758 196607 4294901761 0 196605 4 131069 65537 4294967295 65538 4294836224 65533 1 196607 65535 131075 4294770689 4294901757 131071 4294836224 131071 65534 4294967295 4294901760 131070 65538 65535 +-51 0 -27 24 -278 250 1385 447 -284 -2071 -2091 526 1543 1114 -1778 -485 1626 1159 22 -35 871 -654 -360 -1235 -1284 918 677 132 354 -51 -872 -286 879 524 -447 -432 -2 -54 -151 328 182 166 256 -185 24 -196 -316 -103 23 200 -79 -121 74 429 190 -463 -223 197 89 79 88 -221 -109 146 -11 -80 -30 90 227 41 -185 -255 -31 200 111 -45 -99 -51 7 13 -58 125 151 -60 6 14 -81 -238 -74 265 -13 -69 173 62 -180 -53 204 71 -124 -92 173 45 -182 -287 -180 310 227 13 -69 -69 26 57 40 -22 16 -6 -10 -133 -153 128 151 16 -61 -74 -49 140 215 -111 -202 -25 18 40 137 28 -211 -81 205 116 -126 -125 86 -31 -296 200 475 -117 -371 43 209 -143 -68 283 28 -415 -116 261 9 25 65 -107 -51 176 58 -155 11 102 -98 -97 130 143 -34 -96 -2 -37 43 30 -154 -41 130 146 -1 -201 -44 136 -35 -22 128 -60 -166 40 111 146 135 -306 -240 131 39 -80 -150 160 280 131 7 -296 -201 64 110 91 -58 -35 128 18 -119 -139 -55 166 97 -40 -43 55 120 -97 -170 25 122 33 -55 -70 -35 61 24 35 99 -50 -144 -19 79 57 12 -35 -49 2 44 9 -21 -5 0 -11 -3 15 0 -4 1 3 -3 2 7 2 0 3 -1 -3 1 -2 0 6 -1 0 3 -1 2 0 1 -2 2 -1 -3 -5 -2 -4 -2 6 -1 -3 -5 -1 1 1 -3 5 -2 2 5 1 1 -2 -1 2 2 0 1 -4 -2 1 1 1 -2 -1 -1 2 -2 -5 4 2 -5 5 1 -3 2 3 1 1 -3 -2 -3 1 3 -2 -6 5 1 1 -1 6 4 -3 -3 4 5 -1 0 -2 -4 0 0 -2 -5 2 0 7 6 2 1 -5 0 2 -4 2 7 3 2 -6 -4 0 4 -6 -3 3 1 1 -4 -1 5 -2 -1 -1 0 -2 -3 0 0 0 3 0 -4 1 3 -2 -5 2 1 -2 2 3 -4 0 3 2 2 -2 -2 4 7 -5 -9 0 1 4 9 -2 -5 -8 -5 6 -2 -3 1 -2 -4 4 0 3 -1 -5 3 5 1 -2 -2 0 1 -4 -2 2 -3 -2 1 4 1 1 -1 4 4 2 0 -3 1 1 -4 -3 -1 5 7 0 -4 -4 -3 1 1 3 -1 1 4 3 2 -3 0 2 4 -4 -6 -1 4 3 2 -4 -4 1 3 1 3 -1 2 1 0 -8 0 5 -2 -4 -2 0 5 5 -4 -3 6 0 0 -1 -2 -3 1 -2 -2 1 -5 -2 3 3 1 -2 -3 0 +65485 1638373 16449258 29295977 4159307492 4648957 3621845 3396509 3987157 1709 1186357 1654825 2491380 475753 127917 842180 1047217 386433 2920 130385 60680 99761 38992 110465 40529 20882 189517 250469 88538 14162 56585 33197 6521 9000 53210 99250 40961 14346 12402 218 18989 26401 232 63205 75701 4930 33773 35209 46657 23840 31954 115493 128500 51698 9522 3925 2084 292 17789 39793 23057 9197 22001 58546 41429 1924 19553 51082 55481 31501 8357 127616 239314 139490 64130 84713 173009 81577 706 15674 33577 27389 10525 19013 37349 10372 1373 2749 25397 38216 40402 20432 1709 19984 29156 33637 111861 74761 7921 48100 95561 87665 44497 20381 4589 16708 33482 30581 11009 4874 23809 29525 15973 7925 4946 1801 12301 21097 9490 1369 2405 2017 4294705131 4294246400 1048573 4294705152 196609 196605 131079 196608 4294836223 4294836225 393216 65535 4294901763 2 4294836225 4294901762 4294705149 4294770686 458750 4294836223 4294967291 65537 393213 196606 65541 4294836225 196607 2 4294705153 131070 65537 4294967294 196607 4294705150 131076 393211 4294770689 196610 65537 4294901757 131069 4294836227 393210 65537 458751 4294770692 327677 4294901765 4294836224 65532 4294836224 196603 458752 131078 4294639617 131072 196604 196615 4294574082 65532 4294574084 262141 65537 4294967292 4294836229 4294967295 4294836224 65533 0 3 131068 4294836227 196603 4294836225 196610 65532 131075 4294836226 327678 4294639623 65527 262145 4294836233 4294508539 458747 4294836222 4294836225 327676 196608 4294705151 327683 4294836225 65534 4294705153 196606 4294901757 262145 65537 327679 131076 4294770688 65537 4294836220 393215 7 4294770684 131069 196609 131071 196612 4294770690 131072 4294705156 4294967290 196612 4294705154 131068 65539 4294901763 65538 4294443008 327680 4294770686 65534 327685 4294836220 6 4294901760 4294836222 4294836225 131070 4294901755 196611 4294836225 65533 +-50 0 6 128 -137 -341 34 843 321 -966 -568 850 667 -221 472 -422 -1627 -409 1179 1135 -454 -795 -23 247 124 314 235 -352 -236 -129 -325 235 561 300 -81 -549 -255 208 159 50 38 -79 -224 21 323 86 -281 -128 232 61 -151 -58 -34 -24 0 136 119 31 -53 -257 -103 319 226 -183 -201 -13 119 114 -19 -137 -25 51 -48 -3 77 20 -69 -42 9 101 80 -100 -60 47 -11 -80 -49 116 126 27 -19 -187 -102 134 17 -37 77 79 -28 -65 31 -18 -17 -8 -41 5 12 -9 3 35 -26 -12 47 0 -54 5 56 37 -5 -65 -2 40 -31 -41 57 59 -29 -91 -50 34 23 92 123 -126 -208 18 110 38 -51 51 110 -8 19 -158 -221 55 94 113 41 77 203 -258 -388 76 201 98 32 -41 -78 -75 -28 91 88 26 -5 -70 6 -26 -117 27 111 76 30 -128 -187 88 225 1 -146 -28 80 54 46 -157 -242 146 298 2 -200 -102 68 127 75 -66 -83 -76 -21 91 39 -59 -47 53 24 -15 13 0 -38 6 42 38 4 -53 9 15 -23 -6 11 -31 -19 27 -30 -11 26 61 39 -54 -39 18 24 -15 -13 -15 -37 19 46 41 2 -72 -26 39 20 6 6 -30 -21 10 5 8 4 -9 -3 4 -1 -3 0 1 2 0 -1 -1 -1 0 2 0 -3 -1 2 1 -2 1 -1 -2 0 2 0 -2 -2 -1 -1 2 1 -1 1 -1 -2 1 1 -1 0 -1 -1 1 3 -1 -2 0 1 -1 -1 0 1 -2 -2 2 2 0 -2 -5 0 2 -1 0 3 -1 -2 -2 0 3 2 -3 -2 -2 -1 2 -1 -1 0 0 0 -3 -3 2 3 3 0 -1 2 -2 0 -1 -2 0 0 0 1 -2 0 0 -1 -2 -2 -1 0 2 -2 -2 0 4 1 -3 -2 2 2 0 2 -2 -1 0 0 -2 0 1 1 0 0 -2 -1 1 1 0 0 -1 0 -1 -3 -1 -2 3 3 -3 -2 0 -4 -1 -1 5 4 -1 -3 0 4 -2 -2 -1 1 -2 0 1 -1 -3 0 0 -4 -1 3 4 -2 -4 1 4 1 -5 -3 3 0 -2 -1 0 0 1 0 1 0 -2 -1 -2 1 3 3 -2 -2 -3 -2 2 0 -2 -2 3 3 1 0 -2 1 1 0 -1 1 1 0 -2 1 0 -2 -2 0 1 1 -1 -2 0 0 -1 -2 1 0 1 -1 0 1 -3 -1 0 -1 -2 -3 1 -1 -1 -1 1 -1 0 -1 4 2 -2 -2 0 -1 1 -1 1 2 1 0 0 1 -1 -2 0 -1 1 2 0 +65486 8388614 4272684919 55246882 4231659841 1045124 493730 400868 2814410 2678266 838141 61538 113972 179129 72337 160850 404721 307962 108289 27781 7685 50617 111725 95345 57545 26165 1732 18496 15122 68858 112370 84565 40570 27157 19130 3226 2313 6329 6525 10282 16400 5809 6521 15857 16605 35330 28360 1658 12170 5009 1285 353 1706 225 1234 820 2209 2941 4505 4250 1604 2642 6730 9122 3656 8993 31005 43588 13544 5202 12164 25325 51866 21605 7610 107773 156320 50005 2705 11709 9065 8420 4925 712 14418 18097 17284 42713 50626 22100 9316 26765 79880 88808 50404 20753 9981 12665 8722 5002 5018 801 169 1480 3208 2825 306 565 1082 1090 1021 4397 4437 1845 801 394 1730 3797 5188 2197 436 936 720875 524293 4294377476 327677 4294836223 65536 2 4294967295 65535 2 4294967293 65538 131070 4294901759 131072 4294836224 4294967294 196607 4294901761 4294901761 131070 4294901761 4294901760 131071 4294901763 65534 4294901761 65535 4294836225 196606 2 4294705150 131072 65535 4294901763 4294901758 196608 4294770690 4294901758 196607 4294967295 0 4294770688 196605 196611 4294901760 4294836226 4294901760 65534 0 4294836225 0 4294901759 4294967294 131072 4294901758 262144 4294770689 196606 2 4294836226 65535 4294836224 65536 1 4294836224 131071 1 4294901760 4294901760 4294967293 262142 4294770691 65534 4294967292 393215 4294901764 65533 4294836228 4294967294 4294836225 65536 4294836223 0 4294967292 262147 4294770686 262145 4294639617 262141 4294836224 65535 65536 65536 4294836224 4294901759 196609 4294836227 4294836222 196606 4294836224 262142 65539 4294836224 65537 4294901760 65537 4294836224 1 4294901758 65536 4294901761 65534 4294901760 131070 65536 65535 4294770689 65535 4294901759 131069 4294967295 131071 65535 327679 4294836226 65534 131071 131071 65538 0 4294901761 65534 131071 2 +-13 0 108 -12 -343 -429 -107 970 341 -423 450 229 -486 -929 -467 714 272 580 1186 -510 -1197 -772 149 818 186 -242 -155 81 169 -204 -345 180 258 171 51 -256 -90 171 116 -164 -214 -12 101 244 92 -106 123 -177 -381 -10 143 251 135 -134 -172 -30 64 125 140 -36 -99 -185 -67 131 39 -39 -46 20 -3 47 52 -11 29 40 -34 -152 -40 145 104 -84 -218 -8 218 154 -87 -143 55 39 -70 2 94 32 -7 -144 -161 87 115 86 50 -100 -77 -3 -8 73 107 -98 -163 51 122 50 -13 -98 -48 39 21 22 -14 -33 11 52 28 -28 15 -2 -54 -88 -12 143 50 -89 -9 40 20 -83 -133 18 69 129 73 -71 -59 -53 -20 102 89 -44 -13 -43 -29 -28 -100 -70 -54 330 337 -238 -313 -1 194 70 -166 -70 172 41 -191 11 163 2 -17 -14 -84 -44 96 89 8 -119 -137 -55 14 243 141 -202 -203 51 95 104 20 -15 108 -29 -128 -102 36 102 4 -45 -31 21 85 -24 -103 -33 3 60 78 2 -82 -59 75 75 -69 -112 -5 125 71 -40 -59 -73 -16 109 46 -51 -22 7 24 23 -10 -69 -36 58 43 -5 -29 -24 29 48 -5 -86 -50 75 68 -15 -44 -29 15 40 3 -28 -8 13 7 2 0 -4 0 0 1 -1 -2 0 0 0 1 -1 0 -2 -4 -2 -1 3 0 -3 -1 2 2 -1 0 0 -1 -2 -2 1 1 1 0 -2 -2 -2 -2 0 2 0 -3 -3 -1 3 2 1 -1 -4 -2 2 0 0 0 0 -1 -3 -1 3 1 0 -1 -3 0 2 -3 -2 1 2 0 -1 1 0 1 -4 -1 0 0 0 3 -2 -3 -1 -1 1 -1 -2 2 1 -3 -2 4 1 -3 -3 1 2 2 -1 -3 0 -1 -1 2 0 -1 1 -3 -1 0 1 -1 0 1 2 -3 0 1 1 3 0 0 -2 -1 -1 -2 -3 0 2 -1 -1 3 0 -1 0 -1 -3 0 2 0 1 0 0 1 -1 -1 1 0 -3 -1 0 1 3 2 -3 -4 -2 -1 2 1 0 -1 0 2 0 -2 -3 0 1 -1 0 1 1 1 0 0 -2 -1 0 0 -1 -4 0 0 -1 -1 -1 -1 1 -2 -1 2 1 1 0 0 0 0 -2 1 -1 -1 -1 1 -1 -3 1 1 0 -1 1 0 1 -1 -1 -1 1 -2 -3 0 4 0 -2 0 2 2 0 -3 -2 1 1 2 0 -1 -1 1 -2 -1 -1 0 -1 0 0 1 -2 -1 -1 0 -1 0 -1 -1 -1 -2 -2 -1 0 2 -1 -1 -1 -1 0 +65523 4294180972 4266917545 63635349 4267245909 254941 1099237 727885 410384 1666696 2028793 691325 93160 30586 70177 151425 95805 68137 37341 40352 45940 69737 19700 46458 145261 83450 36181 30484 19721 20896 44026 21650 3042 2516 2218 2825 2441 24260 22625 17872 47588 71240 28018 4546 4904 9860 20785 33490 20621 12500 5938 5393 21053 29170 17384 9773 3825 925 1285 2825 1568 229 10660 20593 10421 1681 7289 18013 21402 10370 6290 10804 9857 2018 1625 14900 111816 170213 97970 42536 32456 31265 36602 26573 485 8992 17137 14225 21794 59245 60685 43810 19841 625 12505 26788 11700 2041 1402 7801 11698 3609 6088 10205 11250 17305 15650 6641 8810 12137 4717 533 1105 4861 4660 1874 1417 3145 7421 8125 4849 2777 2621455 4293132291 917496 131079 4294705152 0 4294901761 65534 0 4294901761 4294836224 4294901756 262143 4294770688 196607 4294901762 0 4294901759 131070 65537 4294836224 4294901758 65534 2 4294836221 262143 65538 4294770687 196606 0 0 4294836223 262143 1 4294836223 131072 4294901757 131073 4294901760 1 4294705153 65535 0 4294836227 4294967293 131071 4294901759 65538 4294901757 65540 4294836221 131073 4294901762 65533 4294967295 2 131071 4294967293 65536 65535 131073 65533 65537 3 4294836224 4294967295 4294836222 131072 4294967295 3 65535 4294836223 131072 65536 0 4294901761 131071 4294770688 65535 196609 4294770690 4294901756 196607 1 65535 2 4294836222 65536 65535 65537 1 4294836224 65535 4294901760 65532 4294901760 4294967295 131071 4294967294 65538 1 0 4294836224 4294901761 4294967295 4294901761 131069 1 131071 65536 4294967295 131071 4294836222 262144 4294836224 131072 2 4294901757 65537 2 4294967295 4294836225 4294967295 4294901760 0 4294836225 4294967295 4294901760 4294901760 4294967295 4294901758 65535 4294901762 4294967295 65535 +140 0 -245 187 330 -890 -818 1955 1059 -1806 9 1148 -735 -1215 145 1115 314 -605 -14 844 160 -1527 -548 1133 373 -402 -315 -32 175 267 -107 37 263 -346 -301 544 295 -503 -96 222 -81 -15 152 -125 -177 -2 -57 205 225 -101 -98 -26 -32 26 94 15 -89 -89 28 72 -44 42 126 -70 -123 21 100 -29 -129 28 162 28 -125 -158 -50 182 120 -29 -114 -74 101 193 24 -144 57 -96 -221 108 153 -61 -67 113 26 -141 -23 168 95 -151 -159 77 191 -2 -181 -119 35 216 113 -129 -104 1 82 60 -63 -134 12 164 33 -129 -38 57 8 -21 -10 -5 -50 62 162 13 -130 -129 97 104 -141 -75 142 57 -145 -36 127 53 -62 -32 17 -54 -5 101 -34 -73 97 14 -196 -22 146 176 21 -123 126 21 -221 -136 220 21 -455 25 422 224 -62 -254 -127 45 68 71 18 -7 0 -81 -58 40 -10 103 164 -111 -146 57 121 -103 -117 71 60 -82 -125 87 144 41 -53 -112 -16 100 59 -21 -11 -79 -55 79 51 -14 32 -19 -38 -36 -34 -15 -11 73 59 -21 -49 -52 -2 65 5 -4 52 -33 -98 14 90 33 -37 -49 14 28 -11 -15 34 8 -28 -42 -17 16 2 19 1 -20 -9 32 19 -14 -1 0 -5 -5 0 1 0 1 0 0 0 0 -2 -2 0 1 0 0 -2 -3 -2 2 -1 0 1 2 -1 0 -2 2 1 1 -1 0 0 -1 0 -1 -1 -1 -1 2 -1 -3 -1 1 2 1 2 -2 -2 -1 3 -1 -2 -1 0 -2 1 -1 -1 1 1 0 0 -4 -1 1 0 -1 -3 -2 -1 1 1 -2 -3 1 1 0 1 1 1 -1 -2 -1 1 1 2 -1 -1 -1 2 -1 -5 -1 1 -1 -1 0 1 1 1 -3 -1 2 0 -3 1 -1 -2 -3 -2 0 1 0 -2 -2 -3 1 1 -1 -1 1 1 -2 0 -1 -1 0 -2 -1 1 2 1 -2 -3 1 0 0 0 -1 1 1 -1 -2 0 -1 0 1 -1 -2 -1 0 -1 -1 0 1 -1 1 1 -1 -2 1 2 1 2 -2 -2 1 1 1 1 1 1 -2 -2 1 -1 -2 0 2 1 -2 -2 0 1 -3 -2 2 0 -1 0 0 1 -1 0 0 3 -3 -2 -1 -2 -1 0 1 1 1 -1 -3 -1 1 0 -1 1 0 -1 0 1 0 -1 -1 1 -1 -4 0 2 1 0 -1 -1 0 0 1 1 -2 0 0 -2 0 2 0 -1 0 0 1 0 1 2 -1 -1 1 2 -1 -3 0 0 0 -1 -1 2 0 -1 1 0 0 +140 12320523 4236640586 128187598 4176610339 1317985 2016450 1264250 464621 712532 2357329 1583993 300733 100249 101914 12818 188885 386537 340034 58500 6786 38729 31333 45274 60826 10280 1700 9061 15842 5968 3700 20776 15570 10841 17425 27028 40589 35624 15241 18472 47450 21312 12465 60505 27130 17258 20557 28753 31826 31210 36485 46922 47881 29410 10817 10324 21925 27040 17730 4693 505 125 6344 26413 33541 20225 25506 23413 22321 18938 4868 3205 10226 6485 9605 38900 52292 15570 16317 67337 48841 207650 228260 68360 18154 9665 373 6561 4964 10709 39217 24565 25250 18730 10324 23194 22417 15353 10256 3922 6362 9266 2797 1385 2740 1381 5450 3922 5105 4229 41 3793 9800 9189 3770 980 346 1220 2548 545 365 401 2162679 4294049811 65535 4294705147 65536 65536 0 0 4294901758 65536 0 4294836222 196606 65535 131073 65535 196606 65537 65535 4294901760 4294901760 4294967295 196607 4294836223 131071 65538 4294836226 4294967294 4294901763 4294967294 4294836224 4294901761 131071 1 4294705152 131071 4294901760 4294901757 131071 4294836225 131069 1 65537 4294901761 4294967294 65537 4294901762 4294967295 4294901762 4294967291 4294901761 65535 65537 4294770689 196607 4294770688 4294901761 4294836222 65534 1 4294901758 131069 4294901761 131071 4294836225 4294901760 65535 4294967294 131073 4294836225 131069 0 4294901760 65537 4294901759 4294901760 65536 4294901759 65535 4294967295 65536 131071 4294901761 131070 65538 4294836226 131070 65537 65537 4294836225 131070 4294901759 131072 4294836225 65534 4294770689 196606 4294901760 0 4294901761 0 4294770691 4294967294 4294967294 65536 65537 4294836223 131071 4294901760 1 65535 1 4294967295 4294901761 65532 65538 4294901760 65535 65536 4294836225 0 65534 2 65535 65536 65536 4294901762 131071 4294901762 65533 0 4294967295 2 131071 0 +-44 0 212 195 -152 -886 -964 980 1467 681 -82 -1773 -873 934 525 -141 -364 -180 17 779 814 -575 -742 -243 209 316 -68 -100 -5 -58 -119 217 372 31 -177 -471 -289 384 328 47 -52 -186 -118 57 133 93 -56 -164 -57 120 93 0 -52 -65 38 61 -61 -79 27 129 66 -104 -78 14 23 -7 -51 25 63 35 -29 -41 40 28 -20 -35 -15 8 23 34 31 -81 -132 21 87 102 62 -78 -67 -57 -55 47 40 71 43 -63 -20 30 17 -39 -7 -12 -34 21 18 -23 -31 36 39 -9 -31 -14 -8 36 50 14 -2 -46 -10 5 -19 -8 11 29 9 -16 10 0 -2 -42 -67 0 13 97 72 -42 -9 -39 -36 2 -19 -9 11 102 97 -143 -185 56 127 45 -40 -24 54 -2 -27 -29 -16 -47 -99 89 126 91 95 -138 -144 -51 15 87 17 -27 -15 -6 -15 21 34 15 -5 -49 -49 35 51 46 45 -61 -53 -38 -33 38 10 7 27 25 -19 -34 27 28 -2 -30 -27 -2 8 26 24 -11 -21 -34 -19 48 36 -25 -19 -2 -10 3 20 7 -21 -19 4 28 20 -1 2 -30 -26 7 0 10 10 7 -1 -2 13 1 -8 -18 -11 11 7 -4 0 9 0 -17 -10 11 9 2 -1 -3 0 0 0 0 0 0 0 1 0 0 0 0 -2 0 1 0 0 -1 0 0 0 -1 -1 -1 -1 1 0 0 1 1 0 0 1 0 0 1 -1 -1 0 1 0 2 0 0 -1 0 0 1 0 -1 0 -2 -1 0 -2 1 1 -1 1 1 -1 -1 0 1 2 0 -1 -1 1 1 0 -1 -1 -1 0 1 0 -3 0 1 -2 -1 1 1 -1 0 1 1 1 0 0 1 -1 0 0 0 0 -1 -1 1 -1 -1 -1 1 1 -1 0 1 -1 -2 -2 0 0 0 1 0 1 -1 -2 0 1 -1 -1 0 1 0 -1 0 1 -2 1 -1 0 -1 -1 -2 -1 1 -1 -1 1 0 -1 -1 0 0 1 0 -2 0 -2 -1 -1 1 -1 0 0 -1 -2 -1 0 -1 -2 0 0 -1 -1 -2 1 1 0 -2 -1 0 -1 0 0 -1 -1 0 1 -1 -1 -2 1 0 -2 -1 1 1 0 0 -1 -1 1 0 -2 0 0 1 0 -1 -1 0 0 1 0 0 0 -1 0 0 1 0 1 0 -2 0 0 0 0 0 0 1 1 0 0 0 -1 0 -2 0 1 1 0 1 1 -2 -2 0 2 0 -1 0 2 0 -2 -1 0 0 2 -1 0 -2 -1 0 0 -1 -1 -1 -1 0 2 0 +65492 12779732 4236967784 64289852 44631483 3150253 1634485 295506 164896 607130 993221 609613 143537 14624 3389 61250 139345 253170 230977 109793 37300 17173 26338 30032 17649 8649 6929 5165 9962 17370 15172 6280 578 3226 5194 2522 2384 1625 289 1685 7522 17865 17973 9928 7738 5234 6641 5818 1300 1810 193 1597 853 2257 1602 1157 1360 2696 2120 125 425 962 337 100 1768 4489 9578 6948 1602 1300 442 10525 29858 37361 18154 2176 2920 1570 2465 17722 24157 28069 23337 7794 1018 261 666 1381 2426 3626 4717 5746 4253 2533 149 1354 1517 1513 904 733 740 697 1597 2665 1921 365 109 449 802 800 401 904 725 100 149 5 170 388 242 65 81 289 786422 131081 4294836223 0 0 0 65536 0 0 65534 1 4294901760 0 4294901760 4294967295 131071 0 65537 0 1 65536 4294967295 65536 131072 0 65535 65536 4294901760 4294836224 65535 131070 4294901761 65537 4294967295 65536 2 4294967295 65537 4294901760 4294967295 65536 4294770688 65536 4294967294 65537 65535 65537 1 65536 65535 0 4294901760 131071 4294967295 131071 4294901761 65536 4294901759 65534 0 1 4294901761 65534 4294901761 65535 1 65535 4294836225 4294901761 4294901760 4294901759 131071 4294967295 1 4294967295 0 1 65534 4294967294 131071 65535 4294901760 4294967294 4294901760 65534 4294901760 4294901759 65537 4294836224 65535 65535 4294901760 65535 4294901761 4294901759 1 4294967294 65537 0 4294967295 1 65534 65536 4294901760 65535 65536 0 4294901760 0 1 1 65534 0 0 65536 1 0 65535 65534 65537 65536 4294836225 65534 2 65535 2 4294967294 0 4294901762 4294836224 65535 4294901760 4294967295 65535 2 +89 0 426 -100 -1142 -830 819 1716 -386 -1942 -684 1710 866 -188 412 -31 -798 -734 462 1006 -85 -436 412 40 -115 -294 -276 -228 -239 493 511 -220 -609 98 591 194 -218 -308 79 203 -31 -312 -137 288 119 -66 53 -23 -75 -65 -23 86 110 -55 -110 -62 -51 18 15 115 5 -33 54 41 26 -25 -32 -47 55 27 -64 -88 -87 91 175 28 -125 -78 84 31 -78 -26 -7 4 33 105 17 -131 -26 97 31 -58 -1 12 -15 -63 -99 99 161 -5 -48 -70 -102 20 117 79 -23 -82 -7 41 9 -66 -36 66 50 -8 -28 -52 18 53 -34 -77 -18 108 73 -88 -87 85 100 -95 -114 77 122 -9 -53 -91 -53 100 86 -36 9 -22 -76 -54 -37 -7 -64 169 164 -16 -14 -59 30 -25 -71 -55 -71 84 143 6 -112 -31 53 -3 -33 66 40 -45 33 2 -76 -32 59 35 -55 -45 22 55 0 -35 -11 25 12 -29 -8 37 7 -44 -56 45 143 37 -115 -162 6 143 22 -36 -13 -22 13 44 -3 -32 17 11 -7 -41 -76 52 122 6 -81 -18 60 -24 -72 30 51 2 4 -13 -28 -3 14 1 -1 24 15 -37 -16 16 -6 -3 11 4 -13 -7 21 36 2 -47 -18 4 16 14 -26 -2 19 -9 1 12 -7 0 3 -12 0 4 -1 2 -3 -3 0 3 1 -3 -2 2 0 -1 0 0 0 -1 1 1 1 -1 -2 -1 0 0 2 -1 -3 0 -1 -1 3 0 -2 -3 -1 0 -1 1 0 -2 0 -2 -2 2 0 -2 -1 1 1 -4 -3 2 2 0 -1 -1 -2 0 0 1 0 0 2 -3 -2 0 1 -2 -2 -2 0 0 -4 2 2 -1 -2 -1 2 2 0 -1 -2 0 1 1 0 1 0 0 0 1 0 -1 -2 0 -1 1 1 2 -1 -3 1 0 -2 -1 0 2 0 -3 1 1 -1 -2 0 0 -1 0 0 0 2 1 -2 -2 -1 0 0 -1 0 -1 -3 0 0 -1 0 -1 -2 -1 -2 1 1 0 -1 -2 1 1 -1 -1 0 -1 -2 -2 1 3 -1 -2 0 4 3 -6 0 2 0 -1 1 0 0 -5 -1 0 -1 -1 -1 3 0 -2 3 0 -1 -1 -1 0 1 0 -2 0 0 0 -1 2 1 -3 -3 2 3 -1 0 2 1 1 0 -3 0 -1 0 0 -1 -2 1 1 0 0 -2 0 -2 0 3 0 0 -1 -1 -1 2 -2 -1 -2 -3 2 2 -4 -5 0 2 1 -3 -2 1 1 -1 1 1 -1 -3 2 3 -1 -1 -1 0 -1 0 1 -2 -1 3 0 +89 4288414122 4240636810 112460595 4167761534 3391956 785300 170705 1175560 1225480 197321 171344 99661 128160 300170 309521 380485 386917 142388 47450 98305 101713 18517 3338 9850 7925 15125 15944 2925 13450 1114 4597 1301 3233 3754 11840 15850 31409 21709 8017 6760 65 12114 17450 10085 4325 145 4194 19602 25946 7204 10804 19930 7253 1730 4437 5652 2564 3488 3133 7085 11988 13073 14794 19025 18925 14965 11090 12809 8692 565 8692 1418 32657 27152 3677 1525 8066 12097 20485 13505 2818 5445 3625 1093 6800 4706 5050 3509 1225 746 985 1433 1985 5161 21818 39469 20485 1780 653 2105 1033 410 1730 8480 14920 6885 4176 6084 2605 185 793 197 577 1594 512 45 137 218 1737 2213 340 917520 4294901734 4294377491 786433 65529 4294180867 262144 196607 4294836221 196608 4294770689 196606 4294901760 0 4294901760 65537 4294901761 4294967294 0 4294901762 65533 4294967295 3 4294836222 65535 131071 4294836224 4294836224 196606 4294836224 131071 4294705153 196605 2 4294967295 65534 65536 0 4294770690 65534 4294836225 4294901758 0 196604 4294901762 4294967294 131074 4294901760 65534 65537 65536 0 65536 4294901760 65534 131071 131073 4294836223 1 4294967294 131072 4294770688 65537 4294901759 0 65535 0 65538 4294901758 65535 4294901760 4294901760 65533 4294901760 4294901760 4294967294 131070 1 4294901759 65537 4294967295 4294901760 4294901758 196609 4294901759 262144 4294574083 131072 4294901760 1 4294639616 65535 4294967295 262143 4294836224 3 4294967295 65535 1 65534 0 196607 4294770689 196605 4294901763 131072 65537 4294770688 4294901760 0 4294901759 65537 0 65534 65534 3 4294901760 4294967295 4294836226 4294901759 196605 4294705154 65531 65538 4294901757 65537 131071 4294901761 196605 4294901763 4294967295 4294901760 65536 4294967294 3 +-4 0 10 -10 -39 20 -1 -1 116 5 -172 44 148 -197 -42 288 -147 -236 279 114 -251 4 142 -89 -74 114 56 -109 -46 124 23 -166 25 187 -111 -172 184 129 -212 -51 201 -20 -161 75 118 -130 -66 178 -13 -199 96 173 -148 -112 156 47 -138 -4 108 -25 -80 48 53 -59 -32 55 17 -46 -7 41 -7 -41 20 33 -25 -12 1 -18 46 30 -93 13 101 -101 -38 179 -82 -191 191 122 -225 -7 176 -97 -94 136 33 -117 -18 86 30 -72 -30 82 7 -100 24 111 -60 -118 108 113 -164 -80 199 9 -193 84 141 -162 -64 199 -19 -199 91 169 -147 -113 188 38 -196 50 153 -152 -57 234 -88 -256 234 182 -319 -23 302 -164 -182 301 10 -347 152 310 -264 -217 322 105 -342 4 330 -112 -283 221 186 -324 -54 382 -94 -366 216 277 -270 -160 252 67 -211 -27 205 23 -245 -4 284 -77 -266 198 168 -308 -22 348 -115 -306 195 211 -213 -111 191 38 -165 -7 150 -2 -152 19 154 -63 -131 120 74 -165 4 167 -75 -125 106 54 -87 4 36 -32 7 21 -21 12 9 -40 12 48 -27 -40 32 30 -40 -27 48 23 -58 -11 63 -7 -62 25 52 -40 -43 50 32 -53 -21 49 11 -39 -3 24 -1 -11 2 2 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 0 -2 0 -1 -1 -1 0 0 -1 0 0 -1 -1 -1 0 0 -1 0 0 0 1 0 0 -1 0 -1 0 -1 0 -1 -1 0 -1 1 -1 -1 0 0 0 -1 0 0 0 0 0 0 -1 -1 0 0 0 0 0 0 -1 -1 1 -1 -1 0 0 0 0 1 -1 -1 0 0 0 0 0 0 0 0 1 0 0 0 0 -1 1 0 0 -1 0 0 0 -1 0 0 0 1 1 0 -1 0 -1 -1 0 0 -1 0 0 0 0 0 -1 -1 -1 -1 0 -1 0 0 -1 0 0 0 0 0 -1 0 0 -1 0 0 -1 0 -1 0 0 0 0 0 -1 -1 1 0 0 -1 0 0 0 0 0 0 0 -1 0 0 -1 -1 -1 0 -1 0 -2 0 0 0 0 0 0 -1 0 0 -1 -1 0 1 0 1 0 0 0 0 0 0 0 0 0 0 0 -1 -1 0 1 -1 0 0 -1 0 0 0 0 -1 -1 0 -1 0 0 0 0 0 0 -1 -1 0 0 0 -1 0 -1 0 -1 0 0 -1 0 0 0 0 0 -1 -1 -1 0 0 -1 -1 0 -1 0 1 -1 0 -1 0 0 -1 0 0 0 0 0 +65532 4294311946 1376217 4294967295 327796 31520 60713 84708 77305 90837 63017 28085 18472 15017 17492 28085 35594 41905 50497 47545 40801 31546 30824 36040 39770 39145 34448 26545 19060 12289 8704 6290 4049 2405 1730 1730 1489 769 325 3016 8818 20402 33485 43205 51365 50674 40385 27332 14778 7720 6084 7624 10049 12897 17524 24433 33296 39682 44305 46125 43697 39962 36842 34378 36788 40916 46513 58005 73280 87880 102290 118100 123725 120509 119204 116785 114709 116980 121444 128930 139572 148840 142792 123385 98500 67993 45250 42554 60041 86585 109960 123088 121588 106861 82546 57690 37925 27274 22504 23465 27685 31561 32701 27905 21250 14152 7585 2320 490 585 1681 2448 2329 1924 2329 2833 3485 4018 4469 4304 4349 3833 3276779 4292411403 1638397 4294311935 131074 0 0 0 0 0 0 0 1 65534 4294967295 65535 4294901760 0 4294967295 65535 4294901760 0 65536 0 65535 65535 65535 4294967295 4294901760 4294901761 65535 0 65535 0 0 4294901760 65535 0 0 4294901760 131071 4294967295 0 0 4294901761 65535 0 0 0 65536 0 0 131071 0 65535 0 65535 0 65537 4294901760 4294901760 65535 4294901760 0 0 4294901760 4294967295 65535 65535 4294901760 0 0 4294901760 0 65535 4294901760 4294901760 0 0 4294901760 131071 0 65535 0 0 0 65535 4294901760 4294967295 4294901760 4294836224 0 0 0 65535 4294901760 65535 1 1 0 0 0 0 0 4294967295 65536 65535 4294901760 0 0 4294967295 4294901760 0 0 0 4294967295 0 4294901760 4294901760 4294901760 0 65535 0 0 4294967295 65535 4294901760 65535 65535 4294901761 4294901760 0 65535 0 0 +-2 0 -3 -12 -24 44 139 -45 -329 -89 326 405 156 -471 -618 -205 312 1050 408 -1080 -580 439 198 -61 -15 109 122 -63 -65 -94 -67 -27 -152 186 387 92 -111 -402 -255 160 110 200 134 -110 28 -35 -191 -229 -118 459 450 -165 -283 -212 7 147 -84 15 200 102 -64 -214 -56 102 2 -15 21 52 35 -39 -23 -39 -37 41 7 28 107 -66 -242 -2 242 221 65 -387 -430 135 303 343 233 -370 -410 -128 7 399 306 -136 -194 -126 51 116 -31 -131 -93 169 192 -29 -64 -73 1 -60 -190 61 181 240 196 -298 -327 -100 -42 291 242 30 50 -216 -213 -67 -83 231 255 59 51 -241 -263 -93 -117 370 538 23 -213 -600 -485 408 461 344 279 -473 -523 -210 -90 518 425 15 39 -317 -264 -134 -223 286 332 265 283 -376 -417 -278 -252 451 478 189 79 -461 -351 50 116 133 -115 -39 162 273 279 -390 -526 -160 -41 530 503 -18 -90 -460 -370 129 144 270 136 -109 -12 -64 -26 -52 -135 -4 15 189 224 -20 -60 -275 -272 147 210 195 104 -222 -202 -11 61 140 80 -94 -125 -19 58 107 50 -84 -57 -6 -1 9 -22 27 49 25 31 -58 -75 -34 -19 79 56 10 23 -32 -25 -47 -54 36 43 32 8 -29 -12 0 0 3 0 0 0 0 0 0 0 0 0 0 0 0 0 1 0 -1 0 0 0 0 0 1 0 0 0 0 -1 1 0 1 -1 0 0 0 0 -1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 -1 -1 0 -1 0 -2 0 0 0 0 0 -1 0 0 -1 0 0 0 1 0 -1 0 -1 -1 1 0 0 0 -1 -1 0 -1 -1 0 1 0 0 0 -1 1 -1 0 0 0 0 0 0 0 1 1 0 1 -1 -1 -1 0 -1 0 -1 1 0 0 -1 -1 0 0 0 0 0 -1 -1 -1 0 -1 0 0 -1 0 0 -1 0 0 0 0 -1 -1 0 -1 0 0 -1 0 0 -1 0 -1 -1 0 -1 0 0 -1 -1 0 -1 -1 -1 0 -1 0 -1 1 0 -1 1 -1 0 0 0 -1 -1 0 0 0 -1 -1 -1 0 0 0 0 0 1 -1 -1 0 0 0 -1 0 -1 0 -1 0 0 0 0 -1 0 0 1 0 -1 0 0 0 0 -1 0 1 0 0 -1 -1 0 0 0 0 0 -1 0 0 0 0 0 1 0 -1 -1 0 0 0 1 -1 -1 -1 0 0 0 1 0 1 -1 1 0 0 -1 0 0 0 -1 0 0 1 0 0 -1 0 0 +65534 4294246397 2949096 4292018315 4289199799 270301 246177 423949 1199844 1332864 529121 42925 12106 18853 13061 5218 57700 158233 173925 90625 52100 30056 2009 88922 224605 229725 125033 21658 7281 50404 49892 13540 229 3145 2746 2050 3050 833 15805 58568 107405 153994 203125 209458 191189 184484 159250 112132 53512 16057 18122 37210 37705 9425 3601 39821 90361 127220 116929 86445 59464 49156 49858 60250 68506 60682 77818 150589 289973 405369 401689 330857 301570 317629 276424 180850 102010 87652 131525 180449 221465 251173 266905 264205 218762 125701 31145 14746 100773 229941 302276 282581 253333 219700 153541 93636 30377 4240 3380 18241 35946 50576 79225 95593 82125 60100 40925 23321 15236 15986 14813 9556 3285 82 1213 3026 4325 6781 6602 3236 1553 2834 2424778 2097195 4293066760 65524 196608 0 0 0 0 0 0 65536 4294901760 0 0 65536 0 0 131071 65536 65535 0 4294901760 1 0 0 0 0 0 0 4294901760 65535 65535 65534 0 0 65535 4294901760 0 65536 4294901760 4294901760 131071 0 4294901760 65535 4294967295 65536 0 4294901760 4294901761 0 0 0 65536 1 4294901761 4294967295 4294901760 4294901760 1 4294901760 65535 0 0 4294967295 65535 65535 4294901760 0 65535 0 4294901760 65535 65535 4294901760 0 65535 4294967295 4294901760 0 4294967295 4294901760 4294967295 4294901760 4294901760 1 131071 65535 0 4294967295 0 4294901760 4294967295 0 0 65536 4294967295 0 4294901760 4294901760 4294901760 0 0 65535 65536 4294901760 0 0 65535 1 4294901760 65535 0 0 65535 0 0 1 4294967295 0 65536 4294967295 65535 0 1 4294901761 1 4294901760 0 4294901760 0 1 4294901760 0 +-42 0 -43 -26 -41 194 -44 -10 618 419 -103 -1200 -1012 740 1228 944 832 -1734 -1593 -794 -943 1376 1046 451 68 -290 -77 -37 287 90 -187 -322 83 36 -417 -46 154 353 201 -73 -88 -161 -138 62 -49 322 511 33 -23 -365 -25 -71 -140 -150 -204 -21 -164 226 95 225 269 20 51 -222 -127 -11 0 -49 -109 126 179 6 -139 -181 -127 358 455 -6 -78 -366 -158 126 106 -70 -151 -67 -24 126 47 10 34 -59 -44 34 45 -105 -149 18 9 60 -36 -7 -65 104 64 121 186 30 21 -151 0 -1 5 -42 -16 -103 -127 71 95 -5 -132 -16 112 114 -50 -145 -21 180 153 -102 -83 -41 36 -33 -80 -81 -163 55 46 166 58 23 114 1 3 -161 -144 48 34 30 -27 50 47 32 47 -7 14 -34 -26 -27 4 38 -9 -57 -33 77 26 -13 4 59 64 25 64 -48 41 -24 -12 -94 -6 -22 -75 -95 -122 122 139 32 -49 -75 -50 63 144 16 -127 -174 -53 202 150 -59 -138 -86 31 175 72 -99 -11 14 -2 -69 -76 28 68 38 -44 -84 -43 96 97 -11 -53 -75 -19 43 -28 -6 10 46 -1 -3 29 42 28 -59 -49 20 22 10 10 14 14 -17 9 -8 -2 -15 -16 -22 -22 16 3 13 5 2 4 0 2 -1 1 -1 0 0 0 -1 0 0 0 0 0 -1 -1 -1 -1 0 0 0 0 -1 1 0 0 0 0 -1 0 1 0 1 -1 0 0 0 -1 -1 0 0 0 0 -1 1 0 -1 -1 -1 1 1 -1 -1 -1 0 0 -1 0 0 1 1 1 0 0 0 -1 -1 0 -1 -1 1 0 -1 -1 -1 0 0 -1 -1 -1 -1 -1 -1 0 -1 0 0 0 -1 0 1 1 0 -1 0 0 0 -1 0 0 0 0 0 0 0 -1 0 0 0 0 0 -1 1 0 -1 0 -2 0 0 0 -1 0 1 0 0 -1 -1 0 1 0 0 0 0 0 0 0 -1 -1 0 -1 0 0 -1 -1 -1 0 -1 -1 -1 0 0 -1 1 0 1 0 0 0 0 -1 0 0 1 0 0 0 0 0 -1 -1 0 -1 0 -1 0 0 -1 -1 0 -2 -1 0 1 0 -1 0 0 -1 0 0 0 -1 0 0 0 0 0 0 0 -1 0 0 0 -1 0 1 0 0 0 -1 0 0 -1 0 -1 1 0 1 0 1 0 -1 0 0 0 0 -1 0 0 0 -1 0 -1 -1 0 0 0 -1 0 0 0 0 0 0 0 -1 0 0 0 0 -1 0 0 0 0 -1 -1 0 1 0 0 +65494 4293328853 12779479 4294377428 27460202 1450609 1571744 2399120 3698980 3168085 2782625 1297517 88724 7298 90469 138653 8185 176005 148325 45730 33665 22888 106085 262210 133754 5666 42100 42057 77972 59650 72761 51885 16250 2401 27757 32077 52082 144293 207061 140040 40840 16136 27290 16452 2309 4637 3092 13050 22525 3681 1345 15041 18737 35496 23242 1 1789 10865 21170 9050 17680 25540 23525 32841 33813 8570 2385 12961 29594 29672 3893 12997 25930 23040 2056 3229 3233 2258 1352 1405 1460 3330 7018 845 3497 4721 6400 2257 8980 520 14650 29768 20345 8026 6469 20992 46405 43613 25981 26440 31586 14985 317 4765 6560 6068 8992 11065 9530 8434 2210 820 2216 10 2605 4265 2801 584 296 485 145 229 4293591024 1114090 851971 131077 4 4294901762 4294901761 0 4294901760 0 0 4294901760 4294967295 65535 0 4294901760 1 0 4294901760 65536 65536 65535 0 4294967295 0 0 131071 4294901760 4294967295 65537 4294967295 65535 4294901760 0 65537 1 0 4294967295 4294901760 131071 4294901760 4294967295 0 4294967295 4294967295 4294967295 4294901760 0 4294901760 65536 1 65535 0 65535 0 0 0 65535 0 0 131071 4294901760 4294836224 0 4294901760 65536 0 4294967295 65536 0 0 0 4294901760 65535 65535 4294901760 4294967295 4294901760 4294967295 0 131071 65536 0 0 65535 65536 0 0 4294901760 65535 65535 65535 4294901760 65535 4294967294 65536 4294901760 0 65535 0 65535 0 0 0 65535 0 65535 1 0 65535 4294901760 4294901760 1 1 1 65535 0 4294901760 0 4294901760 4294901760 65535 0 65535 0 0 0 65535 0 4294901760 0 0 4294967295 65536 0 +0 0 -38 21 172 248 395 -663 -1006 -118 322 833 151 -486 -21 115 -374 86 763 321 -430 -991 -355 944 488 -175 105 -105 -302 -279 -196 317 344 430 279 -657 -365 140 56 44 91 -36 -104 -92 -80 38 -7 195 83 -90 189 75 -241 -246 92 171 -24 16 65 -121 -127 51 123 15 -166 -109 4 245 174 -141 -148 88 83 -96 54 203 -9 -325 -31 284 102 -259 -154 104 81 -28 -50 -29 -74 70 123 64 39 -137 -124 9 10 30 4 45 40 -43 -74 32 99 35 1 -83 -63 -23 -40 97 97 -13 40 -44 -137 -79 56 81 -114 -17 69 94 12 32 99 -50 -7 -14 -20 -67 -42 -7 -15 85 66 -46 -23 -46 -58 2 17 72 -14 -110 -49 190 148 -100 -81 -8 42 -23 -75 40 93 -44 -94 12 8 16 33 42 22 -9 48 -97 -159 60 138 14 -41 -45 -79 -2 53 86 20 -36 -19 18 69 -2 -18 -76 -95 29 83 71 25 -35 29 -109 -143 53 105 10 -71 -17 3 -71 -108 199 162 -103 -61 84 30 -94 10 103 22 -122 -26 68 4 -61 11 33 -48 -66 -20 85 38 -7 -7 2 41 5 -19 -43 2 4 -52 -13 27 65 28 -44 -39 -8 14 36 16 -28 -19 4 8 2 -3 -1 -1 1 1 0 0 0 0 1 1 0 1 0 0 0 0 -1 -1 0 -1 -1 0 1 -1 -1 0 1 0 -1 0 0 -2 0 0 -2 0 -1 1 0 0 -1 -1 0 0 -1 0 -1 0 0 0 0 0 1 -1 -1 1 0 -1 0 0 0 0 -1 -1 -1 0 0 -1 -1 0 0 0 -1 0 0 1 0 -2 0 0 1 1 -2 -1 0 0 -1 -1 1 0 0 -1 -1 0 0 0 0 -1 0 1 1 0 -1 1 -1 -1 0 1 0 0 -1 0 -1 -1 0 0 0 0 0 0 0 0 -2 -2 0 0 0 0 0 1 -1 -1 1 1 0 -1 0 -1 -1 0 0 0 0 -1 0 0 0 1 -1 0 0 1 0 0 0 0 0 -1 0 -1 1 0 0 0 -1 -1 0 0 0 -1 0 -1 0 1 -2 0 0 -1 -1 0 -1 1 0 -1 0 0 -1 0 0 0 0 0 -1 -1 0 0 0 -1 0 0 1 -1 -1 0 0 0 0 -1 1 0 0 0 0 1 -1 0 0 1 0 0 -1 0 -1 -1 -1 0 1 -1 -1 1 -1 -1 -1 -1 0 -1 0 -1 0 0 -1 0 0 -1 -1 -1 0 0 0 -1 1 1 0 0 -1 -1 -1 -2 0 -1 0 0 0 +0 1441754 16253100 4251517323 4287298578 797573 258997 13666 147272 685210 1166981 1017161 268769 22050 169045 138905 303236 509490 152825 5072 9577 19280 7844 38074 14989 41346 118597 37705 832 18866 18730 15354 39437 60041 50157 29648 16105 44125 105706 81617 77485 34532 7345 3341 10376 19225 20290 15457 1000 2041 3449 6500 11026 6890 4498 11009 9578 3536 25010 9697 13285 13597 1168 12301 245 4889 1813 7450 6472 2645 3368 5473 12296 38501 31904 6625 2293 7225 10585 8980 320 2853 565 11713 28881 19240 3706 6245 10205 1696 685 4765 6100 9866 11930 1850 12722 23258 11125 5330 5050 51265 36853 10777 9736 10709 15368 5300 3737 1210 6660 7625 1493 53 1706 2210 20 2873 4954 2720 1585 1492 4293132304 327661 131080 4294967293 131071 1 0 65536 1 1 0 4294901760 65535 4294967295 65536 4294967295 65536 4294901760 0 65534 4294836224 4294901760 1 4294901760 65535 4294901760 4294901760 0 0 65536 4294967295 1 65535 0 4294901760 4294967295 0 4294967295 0 4294901760 0 1 65534 65536 4294836225 65535 4294901760 131071 0 4294967295 0 0 65535 65537 4294901760 4294901761 65535 1 4294901760 4294901760 65535 0 0 0 4294836224 65534 0 0 4294901761 131071 1 65535 4294967295 0 0 65535 0 4294901761 0 1 0 0 65535 131071 0 4294901760 65535 0 65535 65535 4294836225 0 4294967295 4294901760 1 65535 4294901760 0 0 4294901760 65535 0 65535 65536 4294967295 0 0 131071 0 0 4294901761 0 1 4294901760 4294901760 4294967295 65536 4294967295 4294901761 4294967295 65535 65535 65535 4294901760 0 4294967295 65535 0 131071 1 4294901760 4294967295 65534 65535 0 +-94 0 29 35 -196 199 795 -93 -798 -199 792 -49 -750 -497 -572 776 1032 196 -471 -131 975 -21 -722 -893 -304 560 63 80 162 237 253 -460 -895 59 876 895 -84 -1021 -161 398 73 -156 -213 136 438 -23 -485 -289 55 443 274 103 287 -562 -901 42 685 593 -273 -543 157 331 38 -229 -166 24 150 13 -182 -117 -125 134 173 405 439 -474 -424 -175 -150 145 37 208 110 -37 107 33 -104 -195 -28 162 87 -44 -102 -45 65 160 85 -192 -110 104 69 -99 -105 15 54 118 -17 -101 71 55 -31 -42 -147 -73 236 324 -23 -424 -156 117 -15 109 74 28 114 -59 -115 -108 -106 117 247 70 -193 -258 -3 310 169 -207 -192 108 181 -92 -141 -6 -82 30 106 172 122 -107 -110 -68 149 38 -238 -168 35 227 67 8 74 -127 -160 38 87 82 42 -20 94 -131 -271 -31 66 249 233 -224 -302 112 174 -71 -16 46 -94 -6 106 -5 -34 -1 -62 -66 -12 184 160 -76 -59 -85 -15 -11 -138 -19 -2 256 329 -87 -109 -220 -221 -33 28 271 102 -50 80 -3 -38 -182 -121 118 32 -2 18 84 94 -97 -116 30 80 -15 -15 -2 -40 -30 5 16 -25 52 54 -25 -7 -6 -27 2 34 1 -14 -1 10 -15 -15 -1 1 5 0 0 -1 1 0 1 0 0 0 0 0 0 0 0 -1 0 0 -1 1 1 1 -2 -1 -1 -2 0 1 1 -1 0 0 -1 0 -1 0 1 0 0 0 0 1 0 0 0 -1 0 -1 1 1 -1 -1 0 1 2 0 0 -1 -1 0 0 1 0 0 -1 -2 1 1 1 0 -2 0 2 0 -1 1 -2 0 -1 0 -1 -1 1 -1 -1 -1 -1 0 0 0 -2 -1 0 -2 -1 0 0 0 1 0 -1 -2 -1 0 2 -1 -1 -1 1 0 0 -1 -1 -1 0 1 1 0 0 1 -1 -1 0 0 1 1 1 0 0 0 1 1 1 1 0 0 -2 1 0 -2 0 0 -1 1 -1 -1 1 -2 -1 0 0 -1 0 1 0 0 -1 0 -1 -1 1 0 0 -1 -1 1 -1 -1 -1 0 -1 -1 -1 1 1 -1 0 0 0 0 -1 1 1 -1 -2 1 0 0 0 -1 -1 0 0 0 0 -1 -1 -1 0 1 0 0 0 -1 1 1 -1 0 0 0 -1 0 -1 -2 -1 -1 0 1 0 0 -1 0 -1 0 -2 -2 0 0 0 -1 -1 0 2 1 1 0 -2 -1 0 0 0 -1 0 1 0 0 0 0 1 -1 -1 0 -1 -1 -1 -1 0 1 -1 -2 0 2 0 +65442 2293789 13107004 4288873243 4281990370 629665 809509 929360 1103440 239002 951066 1318733 406016 10369 82413 275609 804506 1568401 1049497 184325 29665 63865 192373 318746 199274 85685 398213 813565 820874 369378 134210 53885 28132 22669 46813 33581 193954 417397 210401 43525 44633 13469 12538 48841 27028 9505 12429 29825 44089 22916 14562 11250 16840 10490 8066 2725 26938 160672 180305 38025 12106 6260 16477 24889 24925 65909 103813 96109 71410 48528 41225 19917 7624 40820 26333 16724 23645 84868 52754 4553 21605 27044 14293 2164 25997 74402 66357 104465 103748 35317 2372 8872 11261 1157 8200 34000 31376 10706 346 19405 65540 115810 60281 49930 74225 12904 6409 34568 28565 1028 7380 18245 14356 6625 229 2500 281 3329 3541 85 733 1157 4294967282 4293984266 4294967281 327681 0 131071 65536 0 0 0 0 65535 4294901760 65537 4294836225 4294967295 65534 65537 65535 4294901760 4294901760 65536 0 0 1 0 65535 131071 4294901761 65535 131073 0 4294967295 0 1 4294901760 131070 65537 4294836224 131072 4294901760 4294836225 4294901760 4294901760 131071 4294967295 4294967295 0 4294836224 65535 4294967294 0 65536 4294901760 4294967294 131072 4294967295 131071 0 4294967295 65535 65537 0 4294901761 65535 65536 65537 0 65536 65537 1 4294836224 1 65534 4294901760 4294901761 131071 4294967294 0 65535 1 4294901760 4294901760 131071 0 4294967295 4294901761 4294967295 4294901760 4294967295 65537 65535 0 4294901760 65537 4294901759 1 0 4294967295 0 0 4294967295 65535 1 0 131071 4294901761 0 4294901760 4294901760 4294967294 65535 1 4294901760 4294901760 4294836224 65534 0 4294967295 131072 65537 4294836224 65535 0 65535 1 0 65536 4294967295 4294901760 4294967295 65535 4294901761 65534 2 +9 0 -107 -35 -16 102 170 502 620 -946 -1058 106 364 531 402 -453 -828 -154 423 457 -459 95 877 13 -521 -170 302 -43 -77 32 -165 -113 417 -43 -792 -223 197 556 347 0 -241 -328 151 409 64 -323 36 106 -176 -332 -110 464 170 -183 -44 211 429 -105 -375 -512 -140 459 161 -87 -93 -50 14 106 33 8 29 3 133 -214 -437 187 391 -4 -225 40 236 -41 -190 -83 23 66 -5 118 130 -153 -159 62 131 23 -93 -55 148 83 -103 -218 -87 184 140 -44 -104 -72 -78 151 254 -45 -206 -112 -80 104 259 247 19 -472 -232 266 178 -101 -112 76 61 -126 -70 185 118 -182 -157 32 -2 99 79 -14 -62 35 164 43 -34 -191 -108 43 -11 95 106 -36 -85 -45 -3 127 182 -101 -169 -54 4 -45 -54 248 146 -251 -76 208 1 -263 -10 273 -66 -291 86 447 -29 -536 -90 544 324 -219 -212 -344 -164 502 323 -277 -230 35 72 24 -73 16 129 62 -73 -129 33 110 2 -88 -26 17 27 40 -32 -152 -88 232 182 -102 -72 -59 -67 42 65 -11 -107 38 148 56 -35 -110 -21 55 62 -75 -166 23 120 93 1 -73 -25 -13 -28 43 63 17 -12 -86 -53 72 53 -13 -2 -20 -29 3 10 9 2 0 0 -1 -1 -1 1 1 0 1 0 0 0 1 0 0 0 0 0 0 0 -1 -1 0 0 0 0 -2 0 1 1 0 0 -1 -1 0 -1 -1 0 1 1 -1 -3 -1 0 1 -1 -2 -1 1 0 -2 -1 1 -1 -2 -1 0 0 1 -1 0 0 -2 -1 2 2 -1 -1 0 1 1 0 -2 0 0 0 2 1 -1 0 0 0 0 1 1 0 0 -1 0 1 2 -1 -2 0 2 0 -1 0 -1 1 1 0 1 1 -1 1 -1 0 1 1 -1 0 0 0 -1 0 0 1 -1 -1 -1 0 0 -1 -1 1 -1 -1 0 0 0 0 -1 -1 -1 0 2 -1 -1 -1 1 1 2 1 0 -1 0 0 -1 0 0 0 0 -1 0 1 0 0 -1 -1 1 -1 0 0 2 0 0 0 0 0 1 0 1 -1 -1 -1 1 0 -1 2 1 -1 0 0 0 -2 0 -1 0 1 -1 -2 0 0 0 1 0 1 0 -1 0 1 1 0 -1 0 1 1 1 0 0 0 -2 0 1 1 -1 -1 0 0 1 1 -2 -1 2 1 -1 -1 -1 0 0 -1 -1 -1 0 -1 3 1 -1 0 0 -2 0 0 0 0 1 -1 0 1 -1 -1 0 -3 0 2 0 0 -1 0 -2 0 0 1 0 +9 4292738965 6750192 32899242 4232970860 1130600 414457 366813 709300 387778 219706 769298 300341 93053 6953 39994 175738 676993 347945 120409 165665 190082 108425 12532 141200 227396 62389 46457 195066 402769 230281 33490 11149 11432 1153 850 63485 225938 152897 52225 57377 42989 4885 13949 40309 29125 17690 11674 28793 58133 41425 21536 16000 28885 66541 54980 17216 128090 223145 124580 41885 18320 19597 39125 47048 25673 9805 6437 5069 28745 37637 13513 9146 12532 9250 16138 43325 31477 2041 64420 84317 49040 69170 74629 89037 207205 288137 304036 152937 163280 278900 181058 54125 5760 5585 20485 21970 13189 7748 965 2329 24128 61568 43528 8665 6253 4346 12893 25040 13325 3466 9469 28085 23049 5330 794 2633 4258 7540 7993 2978 404 262115 589834 2 4294901760 4294967295 65537 65536 0 65536 0 0 0 4294901760 65535 0 4294836224 65536 1 4294901760 65535 4294967295 65536 4294901761 4294967293 65536 4294901759 131071 4294836224 131071 4294901759 65535 65536 65535 4294836224 196607 4294901762 65535 65537 4294836224 0 131072 4294901761 0 0 65537 0 65535 131073 4294901759 131072 4294901760 4294901760 65537 65536 4294901761 4294901761 65536 4294901761 0 4294901760 0 4294901761 4294967295 0 4294967295 4294901761 65535 0 4294901760 4294967295 131072 4294967295 131071 131073 1 65535 4294901760 0 0 65535 1 4294901760 131071 65535 131072 0 0 65536 65536 4294967295 131071 4294901760 65538 65535 0 65534 65535 4294901761 65534 0 1 1 65535 65537 4294901760 65536 65537 0 4294836224 65536 4294901761 65535 65536 4294836225 196607 4294901761 4294967295 0 4294967295 65535 262143 4294901761 0 65534 0 65536 65535 4294901761 65535 65533 2 4294901760 4294836224 0 1 +-56 0 26 49 144 -183 -621 16 544 605 11 -581 -26 245 -210 -273 200 541 268 -319 -43 -544 -747 356 415 356 174 -133 4 -278 -487 104 449 560 223 -691 -451 130 132 202 86 -91 170 -85 -454 -123 330 125 -434 104 422 89 -14 -113 -123 -63 125 142 40 -218 -183 -10 3 154 95 -64 -108 -55 55 216 47 -358 -297 391 437 5 -59 -252 -102 -25 -60 76 -46 6 128 113 -4 -101 20 -75 -154 88 144 -50 -163 58 112 38 31 2 -38 -80 77 63 -136 -83 83 89 32 27 6 -237 -247 250 292 -13 -55 4 58 -182 -120 63 -33 -40 -88 152 246 41 -99 -178 -11 68 -10 3 14 39 30 -94 -29 79 -6 -84 -65 55 138 71 -141 -151 150 149 -159 -106 203 55 -207 -153 -37 231 187 -6 54 -66 -85 -258 -244 271 157 24 117 144 144 -202 -66 -320 -478 340 510 46 -152 -185 -199 -19 139 282 -15 -140 129 45 -124 -26 145 -36 -79 -17 -85 -29 104 82 -84 -7 127 -96 -210 60 114 67 40 -3 -1 -101 -76 77 55 -5 19 13 13 -36 -40 -34 20 63 7 -66 -44 -6 -22 66 40 -20 13 21 -15 -45 6 38 19 -40 -46 -3 2 41 41 -17 -37 -11 15 13 -4 -5 0 1 2 0 0 0 0 0 -1 0 0 0 0 0 0 1 1 -1 -2 -1 0 -1 0 0 -1 0 -1 0 0 1 0 -1 0 1 -1 -1 0 0 0 -1 0 -1 0 1 0 0 -1 -1 1 -1 -1 0 0 0 0 -2 -1 -1 0 0 -1 -1 0 0 -1 -1 -1 1 0 -1 0 0 0 -1 0 0 0 1 -1 -1 0 1 0 -1 -1 0 -2 0 0 0 0 -1 -3 1 2 0 0 1 2 -1 -1 -1 -1 -1 -1 0 0 1 0 -1 -1 1 0 0 1 -1 0 0 0 0 0 -1 0 -1 0 0 -1 -1 0 0 0 0 -1 -1 0 1 -1 -1 0 -2 0 1 0 0 0 0 1 -1 -1 1 0 -1 0 0 -1 0 2 1 -2 -1 1 0 0 -2 0 0 -1 -1 0 0 0 0 -1 -2 -1 1 1 0 0 0 0 -1 -1 0 1 1 0 0 0 0 -1 0 0 0 0 -1 0 -1 0 -1 -1 0 0 0 1 -1 0 1 0 -1 0 -1 0 0 0 0 0 -1 -2 0 1 0 1 1 -1 -1 -1 0 0 0 0 -1 -1 -1 0 1 0 -2 -1 1 1 1 -1 -3 -1 1 -1 -2 0 -1 -2 -1 0 0 0 -3 -1 0 0 -1 0 0 -2 0 +65480 3211290 4282974352 1113491 39649824 337682 60701 118629 332681 173585 297785 684745 298961 47965 77300 247985 515201 527210 220301 58228 15677 36125 221245 124525 199172 186005 12965 19098 35789 49124 33589 23725 13121 14689 49681 130373 241090 190994 66985 11029 9376 2152 29153 10217 6025 31460 23236 29933 13988 965 7844 9898 25385 14810 1753 56205 123509 85433 3041 36488 18369 2689 30848 62197 41485 4745 109 1717 9736 7082 7092 7250 24085 42682 44701 36517 44234 66258 54730 35005 7272 73789 132977 25225 34425 61540 106756 344084 262216 57329 39962 98845 19825 18666 16052 22321 6530 8066 17540 7105 25345 47700 17485 1609 10202 11705 3050 530 1465 2756 4369 4405 1972 4840 2000 610 2250 1480 1961 2125 1685 1970 4294311899 851983 4294705148 65536 2 0 0 65535 0 0 65536 4294901761 4294967294 4294901760 0 65535 65535 65536 4294901760 65536 4294967295 0 4294901760 4294901760 65536 0 4294967295 4294901761 65535 0 4294836224 4294967295 0 4294967295 0 4294967295 131071 4294901760 0 4294901760 0 65536 4294967295 65536 4294901760 65535 65534 0 4294901760 131069 2 65536 4294901762 4294967295 4294967295 65535 65536 4294901760 131071 0 4294901761 0 0 4294901760 4294901760 0 4294967295 0 0 4294967295 65536 4294967295 4294836224 65536 0 0 4294901761 131071 4294901760 0 65535 65538 4294967294 1 4294836224 0 4294967295 0 0 4294901759 131071 1 0 4294901760 65535 65537 0 0 65535 0 4294901760 4294901760 4294901760 65535 0 4294901761 65536 4294901760 4294901760 0 0 4294901760 65534 1 65537 4294967295 65535 0 4294901760 4294967295 65536 4294836224 131071 65537 4294836223 131071 4294901759 4294901760 4294967294 0 4294770688 65535 4294901760 0 65534 +34 0 -30 -51 334 14 -504 -473 -195 410 166 338 -21 5 755 8 -359 -673 -847 244 1084 782 -396 -874 211 174 -362 -51 68 -6 -29 138 -278 58 577 253 -164 -283 -7 -120 57 359 202 -637 -625 42 -331 778 1488 -44 -1055 -846 301 567 -201 -106 289 90 -187 -224 43 202 -20 -125 28 123 -83 -161 159 471 242 -703 -394 220 -53 -21 172 97 -312 58 622 -23 -708 -220 409 359 -63 -133 106 -54 -135 -195 -233 288 393 89 -215 -237 276 77 -522 -101 453 353 -2 -313 -188 -167 -119 415 251 -136 6 77 181 -294 -481 -76 102 434 197 -209 -81 66 59 -33 13 -72 3 -74 -314 108 445 69 -437 -193 282 389 125 -369 -343 67 325 129 -136 -206 -151 -132 -150 530 430 -116 45 -193 -270 -121 109 257 -38 -342 -198 352 195 -92 -26 227 415 -408 -781 5 561 404 -95 -235 225 -341 -797 151 517 482 161 -346 -222 -189 -180 206 146 272 260 -255 -112 -178 -422 243 656 62 -342 -289 -111 4 22 365 210 -122 -18 -213 -124 179 26 -157 -49 232 179 -131 -146 -45 28 103 78 -72 -95 -45 -13 73 63 22 -5 -95 -77 35 13 84 108 -53 -96 -21 69 16 -68 -24 41 36 0 -20 -9 -30 -35 34 31 4 -10 -5 4 3 0 0 1 0 -1 -2 -2 1 2 3 1 -2 -1 3 0 -2 2 1 1 -2 -2 1 1 -2 0 -3 -4 2 2 1 -1 -2 -1 1 1 1 1 -1 -4 -2 3 3 0 -1 -4 -1 3 1 -1 1 -1 0 3 0 0 1 -2 -2 4 0 1 -4 -2 0 1 -1 0 -1 -3 -1 0 1 0 -1 -1 -1 1 -1 -2 0 1 0 -1 0 0 -1 -3 2 2 -1 -1 -2 -2 2 1 -1 1 -2 -2 1 1 0 1 -3 -2 2 0 -3 -3 1 2 2 0 -3 -1 1 0 0 -1 -3 -3 2 1 2 -2 0 3 0 -1 0 -1 0 1 -1 -1 -2 0 1 -1 -1 0 2 -1 -1 1 0 -3 0 1 1 -1 -1 -1 2 -1 -1 -1 0 2 0 0 -2 -2 2 2 -1 -2 -1 0 -1 -3 2 2 0 -1 2 -2 -2 0 1 0 -1 0 0 0 -1 1 0 -1 0 -2 2 2 0 1 -1 -1 1 0 -1 1 -2 -1 -1 1 -2 -4 0 0 1 2 1 -2 -4 -1 1 2 1 -1 2 0 -4 0 2 0 1 -3 -1 3 1 0 0 -1 -3 -3 2 0 1 0 0 1 0 -1 1 2 -2 0 0 -1 0 -2 -1 1 2 3 -3 -1 -1 -2 0 +34 4291690466 917838 4264033800 26935101 141800 466 570089 581810 776945 1786580 920692 74797 133645 4660 19885 80648 396938 106985 14449 132130 446573 392389 714845 2216080 1828741 412090 51637 91621 85145 42653 16025 15913 32810 247122 552773 203636 3250 38993 100708 387413 549664 296162 21658 14152 56250 137233 162370 102394 82105 282685 329818 97973 63233 186386 81497 5965 119197 237137 198760 82490 10917 4570 5353 5485 110260 202786 228218 230845 151786 122138 122266 60932 40225 303400 198356 39274 87541 77930 118408 163108 46489 52205 338689 609986 477937 64250 166906 658010 499613 145637 85005 74836 95300 132625 44228 237133 434180 200485 12337 133709 58984 45693 47417 25325 56225 49202 23341 11393 11268 11050 5498 4453 9050 7154 7225 14473 9657 5017 5200 2977 400 4293066743 2293725 262175 4294705142 196612 0 1 4294901759 131070 196610 4294836225 262143 4294836224 65538 4294836225 131070 4294836225 4294770688 196604 65538 4294901759 131071 65537 4294901761 4294901756 196611 4294901760 4294967292 65539 131071 65535 3 65536 4294901758 4 4294705153 65534 4294901761 4294901760 4294967293 65536 4294901760 4294967295 4294901761 65534 1 65535 4294901760 196605 4294901762 4294901759 196606 4294901761 4294836225 131070 1 4294770689 196606 4294770688 131069 131074 4294770688 131071 0 4294836223 196605 131073 65534 3 65535 65535 4294901761 4294901759 65536 4294967295 131072 4294967295 1 65533 65537 4294967295 196607 4294967295 65535 2 4294836224 196606 4294901762 4294967294 4294901760 196605 2 196607 4294901758 65536 4294901760 0 4294901760 1 65535 196606 2 4294901761 131071 4294901760 4294836225 4294967295 4294836225 65532 65536 65538 4294770686 131071 65538 196607 4294705152 131072 65536 4294967293 65539 0 4294836223 196605 65536 0 1 131071 4294836226 0 65535 4294967294 131073 4294770691 4294967295 65534 +-85 0 38 175 210 -219 -225 -182 -341 458 929 -327 -1080 -42 413 149 447 380 -780 -1097 60 1318 719 -263 -247 -494 -134 -5 -108 397 260 -189 59 -286 -674 275 682 244 -95 -328 -138 -103 -93 236 64 95 490 -184 -588 -498 -366 574 515 288 37 -276 46 -1 -46 -131 -106 51 7 -6 -41 121 34 -13 200 172 101 -473 -488 123 218 216 79 49 183 -266 -319 -224 -241 417 273 126 232 -205 -326 3 263 113 -115 -251 59 316 43 -549 -429 392 238 136 205 -43 -49 -177 -200 -69 58 338 159 -179 -96 -27 130 54 -155 -157 76 117 -36 -120 -190 127 317 57 -254 -195 86 345 241 -274 -176 -52 -67 -57 -127 170 239 -1 -222 -111 63 237 149 -63 112 -170 -455 -63 333 357 -82 -260 114 92 -110 -145 -58 24 -45 116 22 85 148 -55 -5 133 217 -352 -303 -13 -214 13 137 442 216 -224 -6 -42 -61 -114 -145 146 286 -75 -335 -46 181 180 132 -94 -191 -314 -238 434 346 49 112 -262 -328 -13 134 237 164 -244 -302 16 88 148 38 -25 56 5 -51 -49 28 0 -24 24 20 -32 -14 9 -16 -14 14 50 19 -57 -46 2 4 51 21 -11 33 8 -11 -67 -41 35 7 4 18 12 -5 -20 0 0 -8 5 2 0 0 3 3 1 1 1 -1 -1 1 1 2 -1 -1 0 1 -3 -3 0 1 2 1 -1 -1 1 -1 1 -1 0 4 -1 -1 -1 0 0 -2 -2 1 -1 -1 2 0 -2 -2 1 2 -1 -1 -2 0 3 1 -1 -1 1 -2 1 2 1 0 0 1 -1 0 1 1 -2 0 1 1 -2 -3 -2 -1 0 0 -1 -1 2 -1 -1 0 0 -1 1 2 -1 -3 -3 0 3 0 1 1 0 2 2 2 -3 -5 -1 3 3 -1 -4 -1 3 -1 -2 -1 1 1 0 -1 -2 -1 2 1 -4 -1 2 -2 -1 0 1 1 0 -1 -2 0 1 0 0 0 1 1 1 -1 -2 0 1 3 0 -2 -3 -2 0 0 1 2 1 1 -2 0 -1 1 -1 -3 -1 0 0 -2 0 -2 1 1 1 2 -1 0 1 2 0 0 -2 1 0 0 -1 0 0 -2 -1 1 2 0 -1 -1 0 1 2 0 -2 0 1 0 -1 0 -1 -1 -1 1 -1 -1 1 0 -1 -1 1 0 -2 2 0 -2 -2 -4 -1 1 2 0 0 -1 0 0 0 0 -1 -1 -1 1 2 4 -1 -1 -3 -1 -1 -1 0 1 1 -3 0 3 0 0 2 0 -2 1 1 1 0 -3 -1 3 3 -1 -2 1 0 +65451 11468838 4280615122 4283105055 30080683 969970 1168164 192770 344209 1811809 1740724 586130 305045 17981 169273 103321 85277 529901 524660 116609 29653 64345 13121 273956 593748 463432 348169 77545 2117 19277 13837 85 16322 1325 69584 233930 253273 94180 8642 104245 151937 231970 90405 95849 106285 81938 76226 103337 303250 337705 75140 43874 33730 44761 117608 57322 9945 19816 48674 19465 15696 52229 103738 102541 126421 133157 33680 7738 45029 57122 61605 60138 26170 41444 210994 238338 74324 21460 33125 3940 15481 7709 24929 17714 170993 91978 45965 214133 96832 1800 16717 42341 87421 114341 65161 26260 135077 245000 122117 81188 107753 74125 86432 91460 29648 2069 3161 5002 784 1152 1424 277 452 2696 3610 2120 2617 562 1153 4610 2906 65 786450 4293722107 0 393208 2 196608 65539 65537 4294967295 65537 4294901762 65535 4294770689 65533 131073 4294901761 131071 131071 65535 4294901764 4294967295 0 4294901758 4294901761 196607 4294836224 131070 4294901762 4294901759 196608 4294901761 131071 131070 65538 0 4294901761 65536 4294836225 65536 4294836225 4294901757 65535 4294901760 196607 4294967295 0 131071 4294901762 4294836221 196608 65536 1 131074 4294770690 4294967291 196611 4294770687 262143 4294901759 131071 1 4294901759 196607 4294705153 196607 4294967294 65536 1 4294901759 65536 0 65536 65537 4294901759 65536 3 4294836222 65534 65536 65538 4294836225 4294901760 4294901761 4294967293 0 65534 131070 65537 4294901762 65536 2 4294836224 1 4294901760 0 4294967294 131073 4294901760 65535 131073 4294836224 65536 4294901760 4294901760 4294967295 4294901761 131071 4294901760 131071 4294836224 2 4294901758 4294967292 131073 0 65535 0 4294901760 4294967295 131073 4294901764 4294836223 4294967295 65535 65537 65533 3 131072 4294836224 65537 1 4294967293 196611 4294901759 1 \ No newline at end of file diff --git a/python/tflite_micro/signal/ops/testdata/fft_auto_scale_test1.txt b/python/tflite_micro/signal/ops/testdata/fft_auto_scale_test1.txt new file mode 100644 index 00000000000..c72880ae7e1 --- /dev/null +++ b/python/tflite_micro/signal/ops/testdata/fft_auto_scale_test1.txt @@ -0,0 +1,546 @@ +0 0 0 -1 -1 0 0 -1 -1 0 0 0 -1 -1 0 0 0 -1 -1 0 0 -1 -1 0 0 0 -1 0 0 0 -1 -1 0 0 -1 -1 0 0 0 -1 -1 0 0 -1 -1 -1 0 0 -1 -1 0 0 -1 -1 -1 0 0 0 -1 0 0 -1 -1 -1 0 0 -1 -1 0 0 0 -1 -1 0 0 -1 -1 -1 0 0 -1 -1 0 0 0 -1 0 0 -1 -3 -1 7 14 13 8 3 2 4 4 3 5 9 13 12 6 0 -7 -14 -20 -18 -12 -9 -14 -18 -7 14 26 17 -1 -8 1 9 6 2 7 18 22 15 4 -8 -17 -23 -24 -23 -20 -20 -21 -26 -36 -47 -51 -47 -41 -40 -40 -34 -22 -14 -15 -20 -18 -10 0 7 15 24 32 39 49 58 59 51 36 20 3 -13 -27 -31 -28 -24 -27 -34 -39 -37 -30 -29 -35 -39 -32 -13 6 17 19 27 44 58 60 53 49 56 71 88 100 96 79 64 64 64 49 29 31 49 52 24 -3 -3 9 7 -13 -23 -15 -10 -18 -22 -18 -22 -39 -45 -27 -7 -11 -16 4 25 0 -49 -57 -15 4 -36 -77 -51 0 -14 -80 -100 -43 -3 -46 -103 -77 -4 6 -51 -68 -6 46 8 -52 -32 37 47 -11 -28 33 81 42 -15 4 61 49 -22 -43 15 55 15 -29 8 70 49 -37 -68 -13 30 -5 -44 -5 59 51 -13 -31 21 53 6 -53 -45 8 28 4 -2 33 56 30 -9 -14 4 0 -28 -36 -11 16 14 -6 -13 -4 -2 -18 -36 -35 -17 1 7 8 13 20 18 5 -5 -6 -2 -2 -3 1 9 12 7 3 5 9 4 -8 -15 -15 -11 -9 -9 -6 -2 -1 -6 -11 -14 -14 -15 -15 -10 -2 6 9 9 10 11 11 9 6 6 8 9 8 7 7 9 9 7 6 5 5 3 1 1 1 1 1 1 1 0 -1 -1 -2 -2 -2 -3 -2 -2 -2 -2 -2 -2 -2 -2 -1 -1 -1 -1 -1 -1 -1 -1 -1 0 +0 0 0 -256 -256 0 0 -256 -256 0 0 0 -256 -256 0 0 0 -256 -256 0 0 -256 -256 0 0 0 -256 0 0 0 -256 -256 0 0 -256 -256 0 0 0 -256 -256 0 0 -256 -256 -256 0 0 -256 -256 0 0 -256 -256 -256 0 0 0 -256 0 0 -256 -256 -256 0 0 -256 -256 0 0 0 -256 -256 0 0 -256 -256 -256 0 0 -256 -256 0 0 0 -256 0 0 -256 -768 -256 1792 3584 3328 2048 768 512 1024 1024 768 1280 2304 3328 3072 1536 0 -1792 -3584 -5120 -4608 -3072 -2304 -3584 -4608 -1792 3584 6656 4352 -256 -2048 256 2304 1536 512 1792 4608 5632 3840 1024 -2048 -4352 -5888 -6144 -5888 -5120 -5120 -5376 -6656 -9216 -12032 -13056 -12032 -10496 -10240 -10240 -8704 -5632 -3584 -3840 -5120 -4608 -2560 0 1792 3840 6144 8192 9984 12544 14848 15104 13056 9216 5120 768 -3328 -6912 -7936 -7168 -6144 -6912 -8704 -9984 -9472 -7680 -7424 -8960 -9984 -8192 -3328 1536 4352 4864 6912 11264 14848 15360 13568 12544 14336 18176 22528 25600 24576 20224 16384 16384 16384 12544 7424 7936 12544 13312 6144 -768 -768 2304 1792 -3328 -5888 -3840 -2560 -4608 -5632 -4608 -5632 -9984 -11520 -6912 -1792 -2816 -4096 1024 6400 0 -12544 -14592 -3840 1024 -9216 -19712 -13056 0 -3584 -20480 -25600 -11008 -768 -11776 -26368 -19712 -1024 1536 -13056 -17408 -1536 11776 2048 -13312 -8192 9472 12032 -2816 -7168 8448 20736 10752 -3840 1024 15616 12544 -5632 -11008 3840 14080 3840 -7424 2048 17920 12544 -9472 -17408 -3328 7680 -1280 -11264 -1280 15104 13056 -3328 -7936 5376 13568 1536 -13568 -11520 2048 7168 1024 -512 8448 14336 7680 -2304 -3584 1024 0 -7168 -9216 -2816 4096 3584 -1536 -3328 -1024 -512 -4608 -9216 -8960 -4352 256 1792 2048 3328 5120 4608 1280 -1280 -1536 -512 -512 -768 256 2304 3072 1792 768 1280 2304 1024 -2048 -3840 -3840 -2816 -2304 -2304 -1536 -512 -256 -1536 -2816 -3584 -3584 -3840 -3840 -2560 -512 1536 2304 2304 2560 2816 2816 2304 1536 1536 2048 2304 2048 1792 1792 2304 2304 1792 1536 1280 1280 768 256 256 256 256 256 256 256 0 -256 -256 -512 -512 -512 -768 -512 -512 -512 -512 -512 -512 -512 -512 -256 -256 -256 -256 -256 -256 -256 -256 -256 0 +8 +0 0 0 0 0 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 0 0 0 0 1 2 2 2 2 3 4 5 6 6 5 4 5 5 4 2 3 5 5 2 -1 -1 1 1 -2 -4 -3 -2 -3 -4 -4 -4 -8 -9 -6 -2 -3 -4 1 5 0 -13 -15 -4 1 -11 -23 -16 0 -5 -27 -34 -15 -1 -17 -39 -30 -2 2 -22 -30 -3 20 4 -25 -16 18 24 -6 -15 18 45 24 -9 2 38 31 -15 -29 10 39 10 -21 6 53 39 -30 -56 -11 26 -5 -40 -5 55 50 -13 -31 22 56 7 -59 -51 9 33 5 -2 42 73 40 -12 -19 6 0 -41 -55 -16 27 24 -9 -23 -7 -3 -34 -68 -68 -33 3 15 18 30 46 42 14 -12 -15 -5 -4 -6 4 26 36 22 10 18 31 16 -26 -55 -53 -40 -35 -35 -24 -5 -4 -26 -52 -66 -68 -76 -78 -56 -8 39 62 65 69 82 87 71 50 52 76 90 83 73 85 107 114 95 79 77 74 52 27 22 33 37 32 31 34 24 -3 -31 -47 -62 -84 -103 -103 -89 -82 -87 -96 -110 -132 -153 -161 -158 -158 -167 -166 -146 -117 -101 -98 -93 -80 -71 -70 -73 -67 -54 -39 -25 -15 -7 1 10 17 18 14 15 22 31 34 35 37 36 31 20 12 9 9 10 7 -3 -21 -35 -35 -22 -14 -12 -13 -9 -4 -8 -18 -20 -14 -3 3 -1 -6 -6 -2 8 18 24 29 33 38 39 34 28 26 34 48 61 68 68 70 74 81 86 87 88 91 97 105 115 121 124 120 116 112 107 99 91 87 86 83 76 67 59 51 43 35 29 26 23 17 10 4 0 -5 -11 -17 -23 -30 -37 -41 -43 -44 -47 -52 -56 -59 -60 -61 -62 -64 -63 -62 -60 -56 -53 -51 -48 -45 -42 -37 -32 -29 -25 -22 -18 -14 -11 -9 -6 -3 -1 1 2 4 5 6 6 6 6 5 5 4 3 3 2 2 1 1 0 0 0 0 0 0 0 +0 0 0 0 0 -128 -128 -128 -128 -128 -128 -128 -128 -128 -128 -128 -128 -128 -128 -128 0 0 0 0 128 256 256 256 256 384 512 640 768 768 640 512 640 640 512 256 384 640 640 256 -128 -128 128 128 -256 -512 -384 -256 -384 -512 -512 -512 -1024 -1152 -768 -256 -384 -512 128 640 0 -1664 -1920 -512 128 -1408 -2944 -2048 0 -640 -3456 -4352 -1920 -128 -2176 -4992 -3840 -256 256 -2816 -3840 -384 2560 512 -3200 -2048 2304 3072 -768 -1920 2304 5760 3072 -1152 256 4864 3968 -1920 -3712 1280 4992 1280 -2688 768 6784 4992 -3840 -7168 -1408 3328 -640 -5120 -640 7040 6400 -1664 -3968 2816 7168 896 -7552 -6528 1152 4224 640 -256 5376 9344 5120 -1536 -2432 768 0 -5248 -7040 -2048 3456 3072 -1152 -2944 -896 -384 -4352 -8704 -8704 -4224 384 1920 2304 3840 5888 5376 1792 -1536 -1920 -640 -512 -768 512 3328 4608 2816 1280 2304 3968 2048 -3328 -7040 -6784 -5120 -4480 -4480 -3072 -640 -512 -3328 -6656 -8448 -8704 -9728 -9984 -7168 -1024 4992 7936 8320 8832 10496 11136 9088 6400 6656 9728 11520 10624 9344 10880 13696 14592 12160 10112 9856 9472 6656 3456 2816 4224 4736 4096 3968 4352 3072 -384 -3968 -6016 -7936 -10752 -13184 -13184 -11392 -10496 -11136 -12288 -14080 -16896 -19584 -20608 -20224 -20224 -21376 -21248 -18688 -14976 -12928 -12544 -11904 -10240 -9088 -8960 -9344 -8576 -6912 -4992 -3200 -1920 -896 128 1280 2176 2304 1792 1920 2816 3968 4352 4480 4736 4608 3968 2560 1536 1152 1152 1280 896 -384 -2688 -4480 -4480 -2816 -1792 -1536 -1664 -1152 -512 -1024 -2304 -2560 -1792 -384 384 -128 -768 -768 -256 1024 2304 3072 3712 4224 4864 4992 4352 3584 3328 4352 6144 7808 8704 8704 8960 9472 10368 11008 11136 11264 11648 12416 13440 14720 15488 15872 15360 14848 14336 13696 12672 11648 11136 11008 10624 9728 8576 7552 6528 5504 4480 3712 3328 2944 2176 1280 512 0 -640 -1408 -2176 -2944 -3840 -4736 -5248 -5504 -5632 -6016 -6656 -7168 -7552 -7680 -7808 -7936 -8192 -8064 -7936 -7680 -7168 -6784 -6528 -6144 -5760 -5376 -4736 -4096 -3712 -3200 -2816 -2304 -1792 -1408 -1152 -768 -384 -128 128 256 512 640 768 768 768 768 640 640 512 384 384 256 256 128 128 0 0 0 0 0 0 0 +7 +0 -1 0 0 0 0 0 0 0 0 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -2 -2 -3 -3 -3 -3 -1 1 3 3 3 4 5 4 3 4 6 7 7 6 8 11 12 10 9 9 9 7 3 3 5 5 5 5 6 4 -1 -6 -10 -13 -18 -23 -24 -22 -21 -22 -25 -30 -37 -44 -48 -48 -49 -54 -55 -50 -41 -36 -36 -35 -31 -28 -29 -31 -29 -24 -18 -12 -7 -4 0 5 9 9 8 8 13 18 20 21 23 24 20 14 8 6 7 7 5 -3 -17 -29 -29 -19 -12 -11 -12 -8 -4 -8 -18 -21 -15 -3 3 -1 -7 -8 -2 10 23 31 38 45 53 55 50 42 40 54 78 100 114 119 124 135 151 163 171 176 186 205 228 255 276 288 288 285 281 276 264 250 245 248 248 233 210 190 170 148 123 106 98 89 69 44 20 0 -23 -48 -79 -114 -154 -193 -222 -242 -262 -290 -331 -373 -407 -432 -455 -485 -515 -536 -548 -550 -544 -539 -537 -539 -534 -516 -483 -447 -418 -391 -354 -305 -256 -216 -177 -128 -69 -10 42 94 151 214 270 314 345 365 376 385 389 392 390 388 378 360 336 312 290 265 236 209 187 167 147 125 109 96 86 78 75 71 64 54 52 56 52 42 36 45 59 66 63 67 80 90 90 86 88 93 96 91 87 78 65 53 52 51 36 4 -28 -47 -58 -75 -94 -107 -114 -127 -148 -168 -183 -200 -222 -240 -241 -228 -220 -221 -223 -215 -205 -199 -197 -185 -163 -139 -121 -103 -80 -56 -37 -19 3 28 45 54 61 72 87 98 105 110 114 112 105 98 94 93 89 79 67 54 44 35 26 16 5 -3 -9 -11 -14 -18 -25 -32 -35 -36 -35 -35 -34 -31 -26 -22 -21 -19 -16 -10 -5 -2 -1 1 5 9 11 12 13 16 18 18 17 15 15 14 13 13 12 11 10 10 9 8 6 4 3 2 1 0 -1 -1 -1 -2 -2 -2 -2 -2 -2 -2 -2 -1 -1 -1 -1 -1 -1 0 +0 -32 0 0 0 0 0 0 0 0 -32 -32 -32 -32 -32 -32 -32 -32 -32 -32 -64 -64 -96 -96 -96 -96 -32 32 96 96 96 128 160 128 96 128 192 224 224 192 256 352 384 320 288 288 288 224 96 96 160 160 160 160 192 128 -32 -192 -320 -416 -576 -736 -768 -704 -672 -704 -800 -960 -1184 -1408 -1536 -1536 -1568 -1728 -1760 -1600 -1312 -1152 -1152 -1120 -992 -896 -928 -992 -928 -768 -576 -384 -224 -128 0 160 288 288 256 256 416 576 640 672 736 768 640 448 256 192 224 224 160 -96 -544 -928 -928 -608 -384 -352 -384 -256 -128 -256 -576 -672 -480 -96 96 -32 -224 -256 -64 320 736 992 1216 1440 1696 1760 1600 1344 1280 1728 2496 3200 3648 3808 3968 4320 4832 5216 5472 5632 5952 6560 7296 8160 8832 9216 9216 9120 8992 8832 8448 8000 7840 7936 7936 7456 6720 6080 5440 4736 3936 3392 3136 2848 2208 1408 640 0 -736 -1536 -2528 -3648 -4928 -6176 -7104 -7744 -8384 -9280 -10592 -11936 -13024 -13824 -14560 -15520 -16480 -17152 -17536 -17600 -17408 -17248 -17184 -17248 -17088 -16512 -15456 -14304 -13376 -12512 -11328 -9760 -8192 -6912 -5664 -4096 -2208 -320 1344 3008 4832 6848 8640 10048 11040 11680 12032 12320 12448 12544 12480 12416 12096 11520 10752 9984 9280 8480 7552 6688 5984 5344 4704 4000 3488 3072 2752 2496 2400 2272 2048 1728 1664 1792 1664 1344 1152 1440 1888 2112 2016 2144 2560 2880 2880 2752 2816 2976 3072 2912 2784 2496 2080 1696 1664 1632 1152 128 -896 -1504 -1856 -2400 -3008 -3424 -3648 -4064 -4736 -5376 -5856 -6400 -7104 -7680 -7712 -7296 -7040 -7072 -7136 -6880 -6560 -6368 -6304 -5920 -5216 -4448 -3872 -3296 -2560 -1792 -1184 -608 96 896 1440 1728 1952 2304 2784 3136 3360 3520 3648 3584 3360 3136 3008 2976 2848 2528 2144 1728 1408 1120 832 512 160 -96 -288 -352 -448 -576 -800 -1024 -1120 -1152 -1120 -1120 -1088 -992 -832 -704 -672 -608 -512 -320 -160 -64 -32 32 160 288 352 384 416 512 576 576 544 480 480 448 416 416 384 352 320 320 288 256 192 128 96 64 32 0 -32 -32 -32 -64 -64 -64 -64 -64 -64 -64 -64 -32 -32 -32 -32 -32 -32 0 +5 +0 0 0 0 0 0 0 0 0 0 0 0 0 1 0 0 0 0 -1 -2 -3 -4 -5 -7 -9 -10 -12 -14 -17 -20 -24 -27 -30 -33 -37 -41 -44 -47 -49 -51 -53 -56 -58 -58 -57 -55 -54 -53 -50 -45 -39 -34 -29 -22 -12 -2 7 18 30 44 58 69 79 86 92 97 101 105 107 110 110 108 104 99 95 89 82 74 68 63 56 49 44 40 37 34 34 33 30 26 26 29 27 22 20 25 34 39 38 42 51 59 60 59 62 67 70 68 66 61 52 44 44 44 32 4 -26 -44 -56 -74 -95 -110 -120 -137 -164 -190 -213 -237 -269 -298 -306 -297 -292 -302 -310 -307 -299 -298 -302 -290 -261 -229 -204 -177 -141 -101 -69 -36 6 56 94 114 131 160 198 228 251 271 287 291 281 267 265 270 266 243 209 175 148 122 93 57 20 -11 -33 -45 -57 -79 -112 -146 -168 -176 -177 -182 -187 -177 -154 -134 -129 -127 -107 -68 -31 -14 -6 15 52 86 107 123 150 184 213 225 225 221 223 227 230 231 228 227 232 243 240 220 184 152 121 85 42 4 -20 -35 -57 -93 -134 -172 -205 -238 -274 -309 -335 -346 -352 -353 -353 -353 -349 -340 -326 -310 -291 -270 -250 -231 -209 -170 -116 -65 -29 -2 34 80 115 125 127 145 188 234 259 272 286 305 318 319 317 317 312 303 295 293 288 275 255 230 195 148 98 61 37 9 -27 -64 -97 -129 -155 -162 -156 -169 -210 -243 -237 -215 -221 -246 -241 -195 -165 -182 -201 -169 -110 -90 -108 -94 -27 28 18 -9 22 91 118 88 74 118 163 148 105 102 139 149 112 84 101 126 113 78 68 83 81 49 25 30 38 20 -8 -16 -4 -4 -23 -38 -35 -30 -37 -49 -49 -39 -35 -43 -47 -41 -33 -31 -32 -30 -23 -20 -20 -19 -15 -11 -10 -9 -7 -4 -3 -3 -2 0 1 1 1 1 1 2 2 3 3 3 2 2 2 2 2 1 1 0 0 0 0 0 0 0 0 0 -1 -1 -1 0 +0 0 0 0 0 0 0 0 0 0 0 0 0 64 0 0 0 0 -64 -128 -192 -256 -320 -448 -576 -640 -768 -896 -1088 -1280 -1536 -1728 -1920 -2112 -2368 -2624 -2816 -3008 -3136 -3264 -3392 -3584 -3712 -3712 -3648 -3520 -3456 -3392 -3200 -2880 -2496 -2176 -1856 -1408 -768 -128 448 1152 1920 2816 3712 4416 5056 5504 5888 6208 6464 6720 6848 7040 7040 6912 6656 6336 6080 5696 5248 4736 4352 4032 3584 3136 2816 2560 2368 2176 2176 2112 1920 1664 1664 1856 1728 1408 1280 1600 2176 2496 2432 2688 3264 3776 3840 3776 3968 4288 4480 4352 4224 3904 3328 2816 2816 2816 2048 256 -1664 -2816 -3584 -4736 -6080 -7040 -7680 -8768 -10496 -12160 -13632 -15168 -17216 -19072 -19584 -19008 -18688 -19328 -19840 -19648 -19136 -19072 -19328 -18560 -16704 -14656 -13056 -11328 -9024 -6464 -4416 -2304 384 3584 6016 7296 8384 10240 12672 14592 16064 17344 18368 18624 17984 17088 16960 17280 17024 15552 13376 11200 9472 7808 5952 3648 1280 -704 -2112 -2880 -3648 -5056 -7168 -9344 -10752 -11264 -11328 -11648 -11968 -11328 -9856 -8576 -8256 -8128 -6848 -4352 -1984 -896 -384 960 3328 5504 6848 7872 9600 11776 13632 14400 14400 14144 14272 14528 14720 14784 14592 14528 14848 15552 15360 14080 11776 9728 7744 5440 2688 256 -1280 -2240 -3648 -5952 -8576 -11008 -13120 -15232 -17536 -19776 -21440 -22144 -22528 -22592 -22592 -22592 -22336 -21760 -20864 -19840 -18624 -17280 -16000 -14784 -13376 -10880 -7424 -4160 -1856 -128 2176 5120 7360 8000 8128 9280 12032 14976 16576 17408 18304 19520 20352 20416 20288 20288 19968 19392 18880 18752 18432 17600 16320 14720 12480 9472 6272 3904 2368 576 -1728 -4096 -6208 -8256 -9920 -10368 -9984 -10816 -13440 -15552 -15168 -13760 -14144 -15744 -15424 -12480 -10560 -11648 -12864 -10816 -7040 -5760 -6912 -6016 -1728 1792 1152 -576 1408 5824 7552 5632 4736 7552 10432 9472 6720 6528 8896 9536 7168 5376 6464 8064 7232 4992 4352 5312 5184 3136 1600 1920 2432 1280 -512 -1024 -256 -256 -1472 -2432 -2240 -1920 -2368 -3136 -3136 -2496 -2240 -2752 -3008 -2624 -2112 -1984 -2048 -1920 -1472 -1280 -1280 -1216 -960 -704 -640 -576 -448 -256 -192 -192 -128 0 64 64 64 64 64 128 128 192 192 192 128 128 128 128 128 64 64 0 0 0 0 0 0 0 0 0 -64 -64 -64 0 +6 +0 0 0 0 0 0 0 0 0 0 0 0 0 -1 -1 -1 -1 -2 -3 -4 -5 -6 -6 -7 -7 -8 -7 -7 -7 -7 -7 -5 -2 -1 -1 1 4 7 9 11 14 18 22 25 26 27 28 30 32 33 34 35 37 40 42 39 34 29 24 17 9 1 -5 -9 -14 -24 -35 -47 -57 -68 -81 -93 -104 -111 -116 -119 -123 -126 -128 -128 -127 -124 -119 -114 -108 -102 -95 -79 -55 -32 -15 -1 18 43 63 71 73 86 114 145 165 177 191 208 223 229 232 238 240 238 237 241 243 237 224 207 180 140 95 61 37 9 -29 -69 -108 -146 -180 -192 -189 -210 -266 -316 -316 -293 -307 -351 -352 -292 -252 -285 -323 -278 -185 -155 -191 -170 -49 54 35 -17 47 193 255 194 169 276 390 365 264 265 369 406 315 243 299 384 354 251 228 285 285 180 95 117 151 83 -33 -66 -18 -18 -109 -187 -179 -157 -204 -281 -288 -235 -224 -281 -321 -290 -242 -238 -261 -248 -203 -180 -191 -191 -156 -122 -113 -109 -81 -44 -32 -37 -29 0 27 34 28 30 42 60 86 115 132 126 112 115 134 146 133 106 88 81 76 58 34 23 29 40 36 12 -12 -17 -5 5 5 0 -3 -3 -8 -15 -23 -31 -38 -40 -40 -45 -52 -52 -43 -34 -39 -52 -55 -43 -32 -35 -45 -48 -46 -47 -51 -53 -50 -50 -53 -42 -22 -10 -16 -28 -23 -5 5 1 -3 5 19 28 29 28 29 26 22 21 22 22 23 34 48 58 56 50 52 58 59 55 53 58 63 67 67 63 55 42 32 26 20 11 1 -6 -11 -17 -26 -33 -35 -39 -45 -52 -53 -50 -46 -46 -48 -48 -46 -42 -41 -40 -38 -35 -31 -27 -25 -26 -27 -28 -26 -22 -19 -16 -13 -11 -8 -6 -8 -10 -11 -9 -6 -3 0 3 6 7 6 7 9 11 12 11 10 10 10 10 9 9 8 8 7 7 6 4 3 2 2 1 1 1 1 1 1 0 0 -1 -1 -1 -1 -1 -1 -1 0 0 0 0 +0 0 0 0 0 0 0 0 0 0 0 0 0 -64 -64 -64 -64 -128 -192 -256 -320 -384 -384 -448 -448 -512 -448 -448 -448 -448 -448 -320 -128 -64 -64 64 256 448 576 704 896 1152 1408 1600 1664 1728 1792 1920 2048 2112 2176 2240 2368 2560 2688 2496 2176 1856 1536 1088 576 64 -320 -576 -896 -1536 -2240 -3008 -3648 -4352 -5184 -5952 -6656 -7104 -7424 -7616 -7872 -8064 -8192 -8192 -8128 -7936 -7616 -7296 -6912 -6528 -6080 -5056 -3520 -2048 -960 -64 1152 2752 4032 4544 4672 5504 7296 9280 10560 11328 12224 13312 14272 14656 14848 15232 15360 15232 15168 15424 15552 15168 14336 13248 11520 8960 6080 3904 2368 576 -1856 -4416 -6912 -9344 -11520 -12288 -12096 -13440 -17024 -20224 -20224 -18752 -19648 -22464 -22528 -18688 -16128 -18240 -20672 -17792 -11840 -9920 -12224 -10880 -3136 3456 2240 -1088 3008 12352 16320 12416 10816 17664 24960 23360 16896 16960 23616 25984 20160 15552 19136 24576 22656 16064 14592 18240 18240 11520 6080 7488 9664 5312 -2112 -4224 -1152 -1152 -6976 -11968 -11456 -10048 -13056 -17984 -18432 -15040 -14336 -17984 -20544 -18560 -15488 -15232 -16704 -15872 -12992 -11520 -12224 -12224 -9984 -7808 -7232 -6976 -5184 -2816 -2048 -2368 -1856 0 1728 2176 1792 1920 2688 3840 5504 7360 8448 8064 7168 7360 8576 9344 8512 6784 5632 5184 4864 3712 2176 1472 1856 2560 2304 768 -768 -1088 -320 320 320 0 -192 -192 -512 -960 -1472 -1984 -2432 -2560 -2560 -2880 -3328 -3328 -2752 -2176 -2496 -3328 -3520 -2752 -2048 -2240 -2880 -3072 -2944 -3008 -3264 -3392 -3200 -3200 -3392 -2688 -1408 -640 -1024 -1792 -1472 -320 320 64 -192 320 1216 1792 1856 1792 1856 1664 1408 1344 1408 1408 1472 2176 3072 3712 3584 3200 3328 3712 3776 3520 3392 3712 4032 4288 4288 4032 3520 2688 2048 1664 1280 704 64 -384 -704 -1088 -1664 -2112 -2240 -2496 -2880 -3328 -3392 -3200 -2944 -2944 -3072 -3072 -2944 -2688 -2624 -2560 -2432 -2240 -1984 -1728 -1600 -1664 -1728 -1792 -1664 -1408 -1216 -1024 -832 -704 -512 -384 -512 -640 -704 -576 -384 -192 0 192 384 448 384 448 576 704 768 704 640 640 640 640 576 576 512 512 448 448 384 256 192 128 128 64 64 64 64 64 64 0 0 -64 -64 -64 -64 -64 -64 -64 0 0 0 0 +6 +0 0 0 0 0 0 1 0 1 1 2 1 0 1 2 1 -1 -2 -1 -1 -3 -6 -6 -6 -8 -12 -13 -11 -12 -15 -19 -18 -16 -17 -19 -19 -17 -16 -17 -18 -16 -13 -13 -13 -10 -6 -5 -5 -5 0 4 5 4 5 7 10 16 22 26 26 24 25 30 34 32 26 22 21 20 16 10 7 9 12 11 4 -5 -6 -2 2 2 0 -2 -2 -4 -7 -11 -15 -18 -20 -21 -23 -27 -28 -24 -20 -23 -31 -34 -27 -21 -23 -30 -33 -32 -34 -37 -40 -38 -40 -42 -35 -18 -8 -14 -25 -21 -5 5 1 -3 5 20 31 32 32 33 31 27 26 28 29 31 46 68 83 82 76 80 91 94 90 90 100 112 121 125 121 107 85 65 56 44 26 3 -13 -25 -41 -65 -83 -93 -104 -126 -148 -157 -150 -144 -148 -158 -163 -159 -153 -150 -151 -148 -143 -129 -116 -110 -117 -128 -136 -130 -117 -101 -88 -77 -63 -48 -40 -51 -67 -76 -66 -44 -20 4 34 60 74 74 79 106 141 163 164 160 163 171 177 180 183 192 198 199 194 178 154 124 98 87 86 88 88 95 110 114 89 32 -31 -74 -95 -103 -103 -78 -25 38 78 84 61 19 -33 -89 -124 -115 -65 11 84 142 176 189 173 126 59 0 -24 -6 34 81 139 212 277 291 239 150 64 -1 -50 -82 -81 -40 19 68 96 103 86 37 -40 -119 -172 -193 -190 -170 -136 -90 -48 -24 -28 -55 -98 -145 -184 -206 -206 -187 -153 -108 -61 -24 -7 -9 -20 -33 -46 -60 -69 -65 -46 -17 13 38 54 57 49 36 22 5 -16 -35 -46 -45 -36 -24 -13 -4 -3 -10 -23 -36 -44 -49 -52 -53 -46 -32 -14 2 12 17 16 10 2 -6 -11 -11 -7 0 9 18 27 32 33 28 21 14 9 6 5 6 9 12 15 16 15 13 9 6 3 0 -2 -2 -1 1 3 4 5 5 4 2 1 0 0 0 -1 0 1 1 1 1 0 0 0 0 0 0 0 0 0 0 0 +0 0 0 0 0 0 64 0 64 64 128 64 0 64 128 64 -64 -128 -64 -64 -192 -384 -384 -384 -512 -768 -832 -704 -768 -960 -1216 -1152 -1024 -1088 -1216 -1216 -1088 -1024 -1088 -1152 -1024 -832 -832 -832 -640 -384 -320 -320 -320 0 256 320 256 320 448 640 1024 1408 1664 1664 1536 1600 1920 2176 2048 1664 1408 1344 1280 1024 640 448 576 768 704 256 -320 -384 -128 128 128 0 -128 -128 -256 -448 -704 -960 -1152 -1280 -1344 -1472 -1728 -1792 -1536 -1280 -1472 -1984 -2176 -1728 -1344 -1472 -1920 -2112 -2048 -2176 -2368 -2560 -2432 -2560 -2688 -2240 -1152 -512 -896 -1600 -1344 -320 320 64 -192 320 1280 1984 2048 2048 2112 1984 1728 1664 1792 1856 1984 2944 4352 5312 5248 4864 5120 5824 6016 5760 5760 6400 7168 7744 8000 7744 6848 5440 4160 3584 2816 1664 192 -832 -1600 -2624 -4160 -5312 -5952 -6656 -8064 -9472 -10048 -9600 -9216 -9472 -10112 -10432 -10176 -9792 -9600 -9664 -9472 -9152 -8256 -7424 -7040 -7488 -8192 -8704 -8320 -7488 -6464 -5632 -4928 -4032 -3072 -2560 -3264 -4288 -4864 -4224 -2816 -1280 256 2176 3840 4736 4736 5056 6784 9024 10432 10496 10240 10432 10944 11328 11520 11712 12288 12672 12736 12416 11392 9856 7936 6272 5568 5504 5632 5632 6080 7040 7296 5696 2048 -1984 -4736 -6080 -6592 -6592 -4992 -1600 2432 4992 5376 3904 1216 -2112 -5696 -7936 -7360 -4160 704 5376 9088 11264 12096 11072 8064 3776 0 -1536 -384 2176 5184 8896 13568 17728 18624 15296 9600 4096 -64 -3200 -5248 -5184 -2560 1216 4352 6144 6592 5504 2368 -2560 -7616 -11008 -12352 -12160 -10880 -8704 -5760 -3072 -1536 -1792 -3520 -6272 -9280 -11776 -13184 -13184 -11968 -9792 -6912 -3904 -1536 -448 -576 -1280 -2112 -2944 -3840 -4416 -4160 -2944 -1088 832 2432 3456 3648 3136 2304 1408 320 -1024 -2240 -2944 -2880 -2304 -1536 -832 -256 -192 -640 -1472 -2304 -2816 -3136 -3328 -3392 -2944 -2048 -896 128 768 1088 1024 640 128 -384 -704 -704 -448 0 576 1152 1728 2048 2112 1792 1344 896 576 384 320 384 576 768 960 1024 960 832 576 384 192 0 -128 -128 -64 64 192 256 320 320 256 128 64 0 0 0 -64 0 64 64 64 64 0 0 0 0 0 0 0 0 0 0 0 +6 +0 -1 -1 -1 -1 -1 -1 -1 -1 -1 -2 -2 -2 -2 -2 -3 -3 -3 -3 -3 -4 -4 -5 -4 -4 -4 -4 -3 -3 -3 -3 -5 -5 -5 -4 -2 0 2 5 6 7 8 11 15 19 20 20 21 23 25 27 28 31 33 34 35 33 29 24 20 18 19 20 21 23 27 29 24 8 -9 -22 -29 -32 -33 -26 -9 13 28 31 23 7 -13 -37 -52 -50 -29 5 39 67 86 94 88 66 31 0 -14 -4 20 49 86 135 181 194 164 105 46 -1 -38 -63 -64 -33 15 57 83 91 78 34 -38 -115 -170 -195 -196 -180 -147 -100 -54 -28 -33 -66 -121 -184 -239 -274 -281 -261 -218 -157 -91 -37 -10 -14 -32 -55 -80 -107 -125 -120 -87 -33 27 78 113 123 109 83 52 12 -39 -88 -117 -117 -97 -67 -35 -12 -8 -31 -74 -117 -150 -172 -189 -196 -177 -125 -55 10 56 77 75 51 12 -29 -56 -58 -35 4 56 117 180 227 240 215 167 115 76 53 46 60 92 135 173 197 196 173 137 98 55 11 -20 -25 -2 35 74 117 153 168 140 86 42 24 16 3 -4 21 75 129 150 143 128 114 98 71 41 20 15 30 56 80 87 81 61 35 3 -30 -59 -85 -111 -135 -143 -127 -100 -85 -94 -122 -153 -175 -188 -195 -198 -201 -193 -176 -152 -127 -114 -113 -116 -113 -103 -90 -79 -68 -55 -37 -24 -19 -23 -24 -17 -8 -6 -7 -7 0 7 12 16 24 37 45 45 40 36 37 43 50 54 54 52 53 56 59 59 56 53 52 51 48 43 39 38 39 33 20 7 2 4 4 0 -5 -7 -8 -15 -26 -31 -26 -16 -12 -12 -12 -7 -2 -2 -7 -10 -9 -5 -2 0 0 -1 0 2 4 5 4 3 1 -1 -3 -3 -2 2 4 5 5 7 8 8 6 6 6 7 7 6 6 6 6 5 3 1 0 -1 -2 -2 -2 -2 -2 -2 -2 -2 -2 -2 -2 -2 -2 -2 -1 -1 -1 -1 -1 0 0 0 0 0 0 0 +0 -64 -64 -64 -64 -64 -64 -64 -64 -64 -128 -128 -128 -128 -128 -192 -192 -192 -192 -192 -256 -256 -320 -256 -256 -256 -256 -192 -192 -192 -192 -320 -320 -320 -256 -128 0 128 320 384 448 512 704 960 1216 1280 1280 1344 1472 1600 1728 1792 1984 2112 2176 2240 2112 1856 1536 1280 1152 1216 1280 1344 1472 1728 1856 1536 512 -576 -1408 -1856 -2048 -2112 -1664 -576 832 1792 1984 1472 448 -832 -2368 -3328 -3200 -1856 320 2496 4288 5504 6016 5632 4224 1984 0 -896 -256 1280 3136 5504 8640 11584 12416 10496 6720 2944 -64 -2432 -4032 -4096 -2112 960 3648 5312 5824 4992 2176 -2432 -7360 -10880 -12480 -12544 -11520 -9408 -6400 -3456 -1792 -2112 -4224 -7744 -11776 -15296 -17536 -17984 -16704 -13952 -10048 -5824 -2368 -640 -896 -2048 -3520 -5120 -6848 -8000 -7680 -5568 -2112 1728 4992 7232 7872 6976 5312 3328 768 -2496 -5632 -7488 -7488 -6208 -4288 -2240 -768 -512 -1984 -4736 -7488 -9600 -11008 -12096 -12544 -11328 -8000 -3520 640 3584 4928 4800 3264 768 -1856 -3584 -3712 -2240 256 3584 7488 11520 14528 15360 13760 10688 7360 4864 3392 2944 3840 5888 8640 11072 12608 12544 11072 8768 6272 3520 704 -1280 -1600 -128 2240 4736 7488 9792 10752 8960 5504 2688 1536 1024 192 -256 1344 4800 8256 9600 9152 8192 7296 6272 4544 2624 1280 960 1920 3584 5120 5568 5184 3904 2240 192 -1920 -3776 -5440 -7104 -8640 -9152 -8128 -6400 -5440 -6016 -7808 -9792 -11200 -12032 -12480 -12672 -12864 -12352 -11264 -9728 -8128 -7296 -7232 -7424 -7232 -6592 -5760 -5056 -4352 -3520 -2368 -1536 -1216 -1472 -1536 -1088 -512 -384 -448 -448 0 448 768 1024 1536 2368 2880 2880 2560 2304 2368 2752 3200 3456 3456 3328 3392 3584 3776 3776 3584 3392 3328 3264 3072 2752 2496 2432 2496 2112 1280 448 128 256 256 0 -320 -448 -512 -960 -1664 -1984 -1664 -1024 -768 -768 -768 -448 -128 -128 -448 -640 -576 -320 -128 0 0 -64 0 128 256 320 256 192 64 -64 -192 -192 -128 128 256 320 320 448 512 512 384 384 384 448 448 384 384 384 384 320 192 64 0 -64 -128 -128 -128 -128 -128 -128 -128 -128 -128 -128 -128 -128 -128 -128 -64 -64 -64 -64 -64 0 0 0 0 0 0 0 +6 +0 -1 -1 -1 -1 -1 -1 -1 -1 -1 -2 -2 -2 -3 -2 -1 0 1 1 1 1 0 -1 -2 -3 -2 0 2 5 9 12 14 13 11 8 5 4 3 5 8 13 17 21 22 20 16 12 7 1 -3 -4 -1 5 12 20 27 31 27 17 8 5 3 0 -1 5 19 33 40 39 36 33 29 22 13 6 5 10 20 29 33 31 24 14 1 -13 -26 -39 -52 -64 -70 -64 -52 -45 -51 -68 -86 -102 -112 -119 -124 -128 -126 -118 -104 -89 -82 -83 -87 -87 -81 -73 -65 -57 -48 -33 -22 -17 -21 -23 -17 -8 -6 -8 -7 0 9 14 19 30 46 57 59 53 49 52 62 74 81 83 83 86 93 100 102 100 96 96 97 93 85 80 81 84 73 46 17 6 10 10 0 -13 -18 -21 -41 -75 -93 -80 -52 -37 -41 -40 -23 -6 -8 -26 -41 -37 -22 -5 1 0 -3 0 15 27 33 27 18 9 -3 -16 -21 -8 15 34 45 52 65 78 82 75 71 78 89 95 94 96 101 101 89 64 37 10 -14 -35 -50 -57 -59 -66 -76 -83 -92 -103 -115 -124 -127 -125 -120 -110 -90 -57 -25 -2 6 7 11 20 28 38 52 77 109 137 154 162 166 167 162 153 148 151 154 151 145 141 140 132 114 89 67 41 7 -32 -62 -79 -89 -103 -121 -138 -149 -160 -175 -193 -206 -206 -197 -184 -173 -171 -176 -178 -170 -151 -134 -122 -114 -101 -80 -58 -37 -18 -2 12 21 31 42 51 56 57 59 62 63 62 64 69 74 73 65 52 41 34 31 32 31 31 34 40 44 41 33 22 11 1 -7 -12 -13 -11 -10 -10 -12 -15 -19 -24 -32 -38 -40 -37 -30 -25 -22 -20 -15 -10 -7 -5 -3 0 4 6 10 14 17 19 21 22 24 25 25 25 23 21 18 15 14 12 11 10 8 6 4 3 2 1 0 -1 -2 -1 -1 -2 -2 -2 -2 -3 -3 -3 -3 -3 -3 -3 -2 -2 -2 -1 -1 -1 -1 -1 -1 -1 0 +0 -128 -128 -128 -128 -128 -128 -128 -128 -128 -256 -256 -256 -384 -256 -128 0 128 128 128 128 0 -128 -256 -384 -256 0 256 640 1152 1536 1792 1664 1408 1024 640 512 384 640 1024 1664 2176 2688 2816 2560 2048 1536 896 128 -384 -512 -128 640 1536 2560 3456 3968 3456 2176 1024 640 384 0 -128 640 2432 4224 5120 4992 4608 4224 3712 2816 1664 768 640 1280 2560 3712 4224 3968 3072 1792 128 -1664 -3328 -4992 -6656 -8192 -8960 -8192 -6656 -5760 -6528 -8704 -11008 -13056 -14336 -15232 -15872 -16384 -16128 -15104 -13312 -11392 -10496 -10624 -11136 -11136 -10368 -9344 -8320 -7296 -6144 -4224 -2816 -2176 -2688 -2944 -2176 -1024 -768 -1024 -896 0 1152 1792 2432 3840 5888 7296 7552 6784 6272 6656 7936 9472 10368 10624 10624 11008 11904 12800 13056 12800 12288 12288 12416 11904 10880 10240 10368 10752 9344 5888 2176 768 1280 1280 0 -1664 -2304 -2688 -5248 -9600 -11904 -10240 -6656 -4736 -5248 -5120 -2944 -768 -1024 -3328 -5248 -4736 -2816 -640 128 0 -384 0 1920 3456 4224 3456 2304 1152 -384 -2048 -2688 -1024 1920 4352 5760 6656 8320 9984 10496 9600 9088 9984 11392 12160 12032 12288 12928 12928 11392 8192 4736 1280 -1792 -4480 -6400 -7296 -7552 -8448 -9728 -10624 -11776 -13184 -14720 -15872 -16256 -16000 -15360 -14080 -11520 -7296 -3200 -256 768 896 1408 2560 3584 4864 6656 9856 13952 17536 19712 20736 21248 21376 20736 19584 18944 19328 19712 19328 18560 18048 17920 16896 14592 11392 8576 5248 896 -4096 -7936 -10112 -11392 -13184 -15488 -17664 -19072 -20480 -22400 -24704 -26368 -26368 -25216 -23552 -22144 -21888 -22528 -22784 -21760 -19328 -17152 -15616 -14592 -12928 -10240 -7424 -4736 -2304 -256 1536 2688 3968 5376 6528 7168 7296 7552 7936 8064 7936 8192 8832 9472 9344 8320 6656 5248 4352 3968 4096 3968 3968 4352 5120 5632 5248 4224 2816 1408 128 -896 -1536 -1664 -1408 -1280 -1280 -1536 -1920 -2432 -3072 -4096 -4864 -5120 -4736 -3840 -3200 -2816 -2560 -1920 -1280 -896 -640 -384 0 512 768 1280 1792 2176 2432 2688 2816 3072 3200 3200 3200 2944 2688 2304 1920 1792 1536 1408 1280 1024 768 512 384 256 128 0 -128 -256 -128 -128 -256 -256 -256 -256 -384 -384 -384 -384 -384 -384 -384 -256 -256 -256 -128 -128 -128 -128 -128 -128 -128 0 +7 +0 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 0 0 -1 0 0 1 1 1 0 0 -1 -1 -2 -1 1 2 3 4 5 7 7 7 7 8 10 11 11 12 13 14 12 9 5 1 -3 -7 -9 -11 -12 -14 -16 -18 -21 -24 -27 -31 -32 -33 -32 -31 -26 -17 -8 -1 2 2 3 7 10 14 19 30 43 56 64 69 73 75 75 73 72 75 79 79 78 78 79 76 67 54 42 26 4 -21 -43 -56 -64 -75 -91 -106 -117 -129 -144 -162 -177 -181 -178 -170 -164 -165 -174 -180 -176 -160 -145 -135 -130 -117 -95 -70 -46 -23 -2 16 29 43 60 75 84 88 94 100 104 106 110 122 134 136 124 103 82 71 67 69 69 71 81 97 108 105 86 60 31 5 -20 -36 -39 -34 -30 -32 -40 -53 -69 -89 -120 -149 -163 -153 -128 -109 -102 -92 -74 -52 -37 -27 -15 3 24 44 68 97 126 149 168 185 204 222 238 247 245 228 204 187 177 169 159 147 132 111 87 67 50 28 0 -23 -31 -29 -30 -42 -56 -70 -88 -120 -158 -188 -209 -225 -239 -252 -261 -267 -273 -273 -267 -257 -251 -247 -240 -229 -219 -215 -207 -186 -152 -118 -86 -55 -21 8 31 49 67 85 99 110 122 139 160 179 194 197 194 188 178 168 155 141 125 106 89 78 72 66 54 36 18 2 -15 -30 -42 -47 -47 -48 -49 -52 -54 -59 -69 -82 -95 -103 -106 -104 -99 -90 -79 -71 -62 -53 -41 -29 -20 -13 -3 12 29 44 52 54 55 56 54 51 50 50 50 50 51 54 58 59 57 53 51 48 46 47 51 54 53 49 46 44 42 37 32 28 25 20 17 16 15 13 7 1 -4 -5 -5 -6 -7 -9 -10 -11 -11 -11 -12 -13 -13 -13 -12 -10 -9 -9 -9 -9 -8 -7 -6 -6 -5 -5 -5 -5 -5 -5 -4 -4 -4 -4 -4 -3 -3 -3 -2 -2 -2 -1 -1 -1 -1 -1 -1 -1 -1 0 0 0 0 0 0 +0 -64 -64 -64 -64 -64 -64 -64 -64 -64 -64 -64 -64 -64 -64 -64 -64 -64 -64 0 0 -64 0 0 64 64 64 0 0 -64 -64 -128 -64 64 128 192 256 320 448 448 448 448 512 640 704 704 768 832 896 768 576 320 64 -192 -448 -576 -704 -768 -896 -1024 -1152 -1344 -1536 -1728 -1984 -2048 -2112 -2048 -1984 -1664 -1088 -512 -64 128 128 192 448 640 896 1216 1920 2752 3584 4096 4416 4672 4800 4800 4672 4608 4800 5056 5056 4992 4992 5056 4864 4288 3456 2688 1664 256 -1344 -2752 -3584 -4096 -4800 -5824 -6784 -7488 -8256 -9216 -10368 -11328 -11584 -11392 -10880 -10496 -10560 -11136 -11520 -11264 -10240 -9280 -8640 -8320 -7488 -6080 -4480 -2944 -1472 -128 1024 1856 2752 3840 4800 5376 5632 6016 6400 6656 6784 7040 7808 8576 8704 7936 6592 5248 4544 4288 4416 4416 4544 5184 6208 6912 6720 5504 3840 1984 320 -1280 -2304 -2496 -2176 -1920 -2048 -2560 -3392 -4416 -5696 -7680 -9536 -10432 -9792 -8192 -6976 -6528 -5888 -4736 -3328 -2368 -1728 -960 192 1536 2816 4352 6208 8064 9536 10752 11840 13056 14208 15232 15808 15680 14592 13056 11968 11328 10816 10176 9408 8448 7104 5568 4288 3200 1792 0 -1472 -1984 -1856 -1920 -2688 -3584 -4480 -5632 -7680 -10112 -12032 -13376 -14400 -15296 -16128 -16704 -17088 -17472 -17472 -17088 -16448 -16064 -15808 -15360 -14656 -14016 -13760 -13248 -11904 -9728 -7552 -5504 -3520 -1344 512 1984 3136 4288 5440 6336 7040 7808 8896 10240 11456 12416 12608 12416 12032 11392 10752 9920 9024 8000 6784 5696 4992 4608 4224 3456 2304 1152 128 -960 -1920 -2688 -3008 -3008 -3072 -3136 -3328 -3456 -3776 -4416 -5248 -6080 -6592 -6784 -6656 -6336 -5760 -5056 -4544 -3968 -3392 -2624 -1856 -1280 -832 -192 768 1856 2816 3328 3456 3520 3584 3456 3264 3200 3200 3200 3200 3264 3456 3712 3776 3648 3392 3264 3072 2944 3008 3264 3456 3392 3136 2944 2816 2688 2368 2048 1792 1600 1280 1088 1024 960 832 448 64 -256 -320 -320 -384 -448 -576 -640 -704 -704 -704 -768 -832 -832 -832 -768 -640 -576 -576 -576 -576 -512 -448 -384 -384 -320 -320 -320 -320 -320 -320 -256 -256 -256 -256 -256 -192 -192 -192 -128 -128 -128 -64 -64 -64 -64 -64 -64 -64 -64 0 0 0 0 0 0 +6 +0 0 0 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -2 -3 -3 -3 -3 -3 -3 -3 -3 -2 -2 -1 -1 0 1 2 3 5 7 9 11 13 15 17 20 21 22 22 21 20 19 19 19 18 17 15 12 10 7 4 0 -4 -6 -6 -6 -9 -12 -15 -20 -28 -38 -46 -53 -59 -64 -70 -75 -79 -83 -85 -86 -85 -85 -86 -86 -84 -83 -84 -83 -76 -64 -51 -38 -25 -10 4 15 24 34 44 53 61 69 80 95 109 121 125 126 125 121 117 111 103 93 82 70 63 59 56 46 32 16 1 -14 -29 -42 -47 -48 -50 -54 -58 -61 -68 -81 -100 -118 -131 -138 -139 -134 -125 -113 -103 -93 -80 -64 -46 -33 -22 -5 21 53 83 99 105 110 115 115 111 110 114 118 121 125 136 150 156 155 151 148 144 142 150 166 180 181 173 168 166 161 148 132 120 109 94 80 78 78 67 42 8 -18 -28 -31 -33 -42 -57 -72 -79 -83 -87 -95 -108 -119 -123 -117 -105 -97 -98 -106 -109 -103 -93 -87 -85 -84 -85 -92 -100 -103 -102 -101 -105 -108 -112 -117 -122 -125 -116 -101 -89 -83 -79 -66 -49 -36 -30 -26 -20 -7 12 34 46 45 37 32 33 28 7 -19 -33 -29 -26 -37 -57 -72 -76 -83 -105 -132 -153 -162 -168 -172 -172 -164 -152 -139 -130 -115 -102 -88 -73 -52 -29 -9 6 22 43 66 88 105 117 121 119 115 112 115 122 132 136 134 130 130 136 143 147 147 145 143 142 147 153 155 148 135 121 112 106 97 85 75 68 65 58 46 32 21 15 12 5 -1 -5 -7 -10 -16 -21 -23 -22 -25 -30 -34 -37 -39 -44 -48 -48 -46 -46 -48 -51 -52 -52 -51 -49 -48 -50 -52 -53 -50 -44 -39 -35 -31 -25 -20 -14 -11 -9 -6 -3 0 3 6 8 9 9 9 8 8 7 6 3 1 0 -1 -2 -3 -3 -4 -4 -5 -6 -6 -6 -5 -5 -5 -5 -4 -4 -3 -3 -2 -2 -2 -2 -1 -1 -1 -1 -1 -1 -1 0 0 +0 0 0 -128 -128 -128 -128 -128 -128 -128 -128 -128 -128 -256 -384 -384 -384 -384 -384 -384 -384 -384 -256 -256 -128 -128 0 128 256 384 640 896 1152 1408 1664 1920 2176 2560 2688 2816 2816 2688 2560 2432 2432 2432 2304 2176 1920 1536 1280 896 512 0 -512 -768 -768 -768 -1152 -1536 -1920 -2560 -3584 -4864 -5888 -6784 -7552 -8192 -8960 -9600 -10112 -10624 -10880 -11008 -10880 -10880 -11008 -11008 -10752 -10624 -10752 -10624 -9728 -8192 -6528 -4864 -3200 -1280 512 1920 3072 4352 5632 6784 7808 8832 10240 12160 13952 15488 16000 16128 16000 15488 14976 14208 13184 11904 10496 8960 8064 7552 7168 5888 4096 2048 128 -1792 -3712 -5376 -6016 -6144 -6400 -6912 -7424 -7808 -8704 -10368 -12800 -15104 -16768 -17664 -17792 -17152 -16000 -14464 -13184 -11904 -10240 -8192 -5888 -4224 -2816 -640 2688 6784 10624 12672 13440 14080 14720 14720 14208 14080 14592 15104 15488 16000 17408 19200 19968 19840 19328 18944 18432 18176 19200 21248 23040 23168 22144 21504 21248 20608 18944 16896 15360 13952 12032 10240 9984 9984 8576 5376 1024 -2304 -3584 -3968 -4224 -5376 -7296 -9216 -10112 -10624 -11136 -12160 -13824 -15232 -15744 -14976 -13440 -12416 -12544 -13568 -13952 -13184 -11904 -11136 -10880 -10752 -10880 -11776 -12800 -13184 -13056 -12928 -13440 -13824 -14336 -14976 -15616 -16000 -14848 -12928 -11392 -10624 -10112 -8448 -6272 -4608 -3840 -3328 -2560 -896 1536 4352 5888 5760 4736 4096 4224 3584 896 -2432 -4224 -3712 -3328 -4736 -7296 -9216 -9728 -10624 -13440 -16896 -19584 -20736 -21504 -22016 -22016 -20992 -19456 -17792 -16640 -14720 -13056 -11264 -9344 -6656 -3712 -1152 768 2816 5504 8448 11264 13440 14976 15488 15232 14720 14336 14720 15616 16896 17408 17152 16640 16640 17408 18304 18816 18816 18560 18304 18176 18816 19584 19840 18944 17280 15488 14336 13568 12416 10880 9600 8704 8320 7424 5888 4096 2688 1920 1536 640 -128 -640 -896 -1280 -2048 -2688 -2944 -2816 -3200 -3840 -4352 -4736 -4992 -5632 -6144 -6144 -5888 -5888 -6144 -6528 -6656 -6656 -6528 -6272 -6144 -6400 -6656 -6784 -6400 -5632 -4992 -4480 -3968 -3200 -2560 -1792 -1408 -1152 -768 -384 0 384 768 1024 1152 1152 1152 1024 1024 896 768 384 128 0 -128 -256 -384 -384 -512 -512 -640 -768 -768 -768 -640 -640 -640 -640 -512 -512 -384 -384 -256 -256 -256 -256 -128 -128 -128 -128 -128 -128 -128 0 0 +7 +0 0 0 0 0 0 0 0 0 1 1 1 1 1 2 2 2 2 2 1 2 2 2 1 0 -1 -2 -2 -2 -3 -4 -5 -6 -6 -7 -8 -9 -11 -11 -11 -11 -10 -11 -12 -13 -13 -12 -12 -12 -13 -13 -15 -17 -18 -18 -19 -20 -21 -23 -25 -27 -28 -27 -24 -22 -21 -21 -18 -14 -10 -9 -8 -7 -3 4 11 16 16 13 12 12 11 2 -8 -15 -13 -12 -17 -27 -35 -38 -43 -55 -71 -85 -92 -97 -102 -105 -102 -97 -91 -87 -79 -72 -64 -53 -39 -23 -7 5 18 36 57 77 94 108 114 115 113 113 119 129 143 151 152 151 154 165 178 187 192 193 195 199 210 224 233 228 211 194 184 178 168 152 136 127 124 113 92 65 44 33 27 13 -2 -12 -17 -24 -40 -56 -62 -62 -71 -88 -104 -113 -125 -143 -162 -167 -166 -169 -182 -200 -212 -220 -221 -220 -222 -237 -259 -272 -264 -240 -221 -206 -190 -160 -128 -97 -78 -63 -46 -24 4 32 58 78 94 102 103 101 100 97 85 58 29 6 -8 -24 -43 -58 -72 -95 -133 -167 -183 -185 -189 -203 -217 -223 -220 -214 -202 -185 -172 -172 -180 -180 -167 -150 -134 -112 -75 -35 -8 1 9 27 45 58 63 75 100 130 155 173 186 196 202 201 197 201 213 230 244 247 242 234 224 209 186 159 135 119 109 98 83 65 49 36 23 6 -13 -26 -31 -34 -39 -47 -49 -46 -38 -29 -24 -25 -27 -28 -24 -17 -7 2 13 21 27 30 34 40 47 56 62 65 61 54 50 51 54 52 46 42 40 38 34 30 25 19 10 -2 -12 -19 -24 -30 -38 -46 -51 -54 -57 -63 -69 -75 -77 -80 -82 -83 -81 -76 -70 -62 -56 -51 -47 -41 -33 -24 -16 -8 -1 5 8 11 13 16 19 20 20 19 17 16 15 13 11 9 7 5 4 2 1 1 1 1 0 -1 -2 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 0 0 0 0 +0 0 0 0 0 0 0 0 0 64 64 64 64 64 128 128 128 128 128 64 128 128 128 64 0 -64 -128 -128 -128 -192 -256 -320 -384 -384 -448 -512 -576 -704 -704 -704 -704 -640 -704 -768 -832 -832 -768 -768 -768 -832 -832 -960 -1088 -1152 -1152 -1216 -1280 -1344 -1472 -1600 -1728 -1792 -1728 -1536 -1408 -1344 -1344 -1152 -896 -640 -576 -512 -448 -192 256 704 1024 1024 832 768 768 704 128 -512 -960 -832 -768 -1088 -1728 -2240 -2432 -2752 -3520 -4544 -5440 -5888 -6208 -6528 -6720 -6528 -6208 -5824 -5568 -5056 -4608 -4096 -3392 -2496 -1472 -448 320 1152 2304 3648 4928 6016 6912 7296 7360 7232 7232 7616 8256 9152 9664 9728 9664 9856 10560 11392 11968 12288 12352 12480 12736 13440 14336 14912 14592 13504 12416 11776 11392 10752 9728 8704 8128 7936 7232 5888 4160 2816 2112 1728 832 -128 -768 -1088 -1536 -2560 -3584 -3968 -3968 -4544 -5632 -6656 -7232 -8000 -9152 -10368 -10688 -10624 -10816 -11648 -12800 -13568 -14080 -14144 -14080 -14208 -15168 -16576 -17408 -16896 -15360 -14144 -13184 -12160 -10240 -8192 -6208 -4992 -4032 -2944 -1536 256 2048 3712 4992 6016 6528 6592 6464 6400 6208 5440 3712 1856 384 -512 -1536 -2752 -3712 -4608 -6080 -8512 -10688 -11712 -11840 -12096 -12992 -13888 -14272 -14080 -13696 -12928 -11840 -11008 -11008 -11520 -11520 -10688 -9600 -8576 -7168 -4800 -2240 -512 64 576 1728 2880 3712 4032 4800 6400 8320 9920 11072 11904 12544 12928 12864 12608 12864 13632 14720 15616 15808 15488 14976 14336 13376 11904 10176 8640 7616 6976 6272 5312 4160 3136 2304 1472 384 -832 -1664 -1984 -2176 -2496 -3008 -3136 -2944 -2432 -1856 -1536 -1600 -1728 -1792 -1536 -1088 -448 128 832 1344 1728 1920 2176 2560 3008 3584 3968 4160 3904 3456 3200 3264 3456 3328 2944 2688 2560 2432 2176 1920 1600 1216 640 -128 -768 -1216 -1536 -1920 -2432 -2944 -3264 -3456 -3648 -4032 -4416 -4800 -4928 -5120 -5248 -5312 -5184 -4864 -4480 -3968 -3584 -3264 -3008 -2624 -2112 -1536 -1024 -512 -64 320 512 704 832 1024 1216 1280 1280 1216 1088 1024 960 832 704 576 448 320 256 128 64 64 64 64 0 -64 -128 -64 -64 -64 -64 -64 -64 -64 -64 -64 -64 -64 -64 -64 -64 -64 -64 -64 -64 -64 -64 -64 -64 0 0 0 0 +6 +0 -1 -1 -1 -1 -1 -1 -1 -1 -1 -2 -2 -2 -3 -3 -4 -4 -5 -5 -6 -7 -8 -9 -10 -9 -9 -9 -9 -8 -7 -6 -5 -5 -4 -2 0 2 4 7 8 9 10 10 11 11 10 7 3 0 -2 -4 -7 -10 -12 -17 -24 -32 -36 -37 -40 -44 -49 -52 -52 -53 -51 -49 -47 -48 -52 -53 -51 -47 -43 -37 -26 -12 -3 0 3 10 18 23 26 32 44 58 72 82 91 98 103 105 106 110 120 133 145 150 150 149 146 139 127 111 97 87 82 75 65 52 40 31 20 5 -12 -24 -29 -33 -39 -47 -51 -48 -41 -32 -28 -29 -32 -34 -30 -21 -9 3 18 30 38 44 52 62 75 90 103 110 106 96 91 95 103 101 93 86 85 82 75 68 59 47 24 -5 -31 -50 -65 -84 -109 -135 -154 -168 -184 -207 -235 -261 -279 -296 -314 -329 -332 -321 -302 -277 -258 -245 -231 -208 -173 -130 -87 -46 -4 32 59 78 97 125 152 169 172 169 165 161 155 146 131 110 90 73 58 41 28 26 28 23 2 -18 -25 -18 -12 -13 -17 -19 -21 -25 -30 -34 -40 -46 -48 -43 -33 -28 -25 -18 -12 -12 -19 -23 -13 3 16 17 15 16 22 25 25 24 34 50 64 66 66 74 92 104 105 101 106 115 124 125 117 108 98 89 79 68 54 44 38 37 35 29 19 7 -8 -21 -31 -36 -39 -42 -41 -37 -33 -34 -39 -41 -38 -32 -27 -21 -9 2 3 -5 -9 -4 7 15 20 26 37 47 53 52 52 54 56 54 49 45 46 48 47 37 21 6 -5 -14 -25 -35 -46 -57 -67 -74 -78 -81 -85 -87 -85 -81 -77 -74 -71 -65 -57 -47 -40 -33 -29 -24 -19 -14 -9 -5 -2 1 1 1 2 3 4 3 2 3 6 8 9 8 8 8 7 5 3 2 0 -1 -2 -3 -3 -2 -2 -3 -3 -3 -3 -3 -3 -3 -2 -2 -1 -1 -1 -1 -1 -1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 +0 -64 -64 -64 -64 -64 -64 -64 -64 -64 -128 -128 -128 -192 -192 -256 -256 -320 -320 -384 -448 -512 -576 -640 -576 -576 -576 -576 -512 -448 -384 -320 -320 -256 -128 0 128 256 448 512 576 640 640 704 704 640 448 192 0 -128 -256 -448 -640 -768 -1088 -1536 -2048 -2304 -2368 -2560 -2816 -3136 -3328 -3328 -3392 -3264 -3136 -3008 -3072 -3328 -3392 -3264 -3008 -2752 -2368 -1664 -768 -192 0 192 640 1152 1472 1664 2048 2816 3712 4608 5248 5824 6272 6592 6720 6784 7040 7680 8512 9280 9600 9600 9536 9344 8896 8128 7104 6208 5568 5248 4800 4160 3328 2560 1984 1280 320 -768 -1536 -1856 -2112 -2496 -3008 -3264 -3072 -2624 -2048 -1792 -1856 -2048 -2176 -1920 -1344 -576 192 1152 1920 2432 2816 3328 3968 4800 5760 6592 7040 6784 6144 5824 6080 6592 6464 5952 5504 5440 5248 4800 4352 3776 3008 1536 -320 -1984 -3200 -4160 -5376 -6976 -8640 -9856 -10752 -11776 -13248 -15040 -16704 -17856 -18944 -20096 -21056 -21248 -20544 -19328 -17728 -16512 -15680 -14784 -13312 -11072 -8320 -5568 -2944 -256 2048 3776 4992 6208 8000 9728 10816 11008 10816 10560 10304 9920 9344 8384 7040 5760 4672 3712 2624 1792 1664 1792 1472 128 -1152 -1600 -1152 -768 -832 -1088 -1216 -1344 -1600 -1920 -2176 -2560 -2944 -3072 -2752 -2112 -1792 -1600 -1152 -768 -768 -1216 -1472 -832 192 1024 1088 960 1024 1408 1600 1600 1536 2176 3200 4096 4224 4224 4736 5888 6656 6720 6464 6784 7360 7936 8000 7488 6912 6272 5696 5056 4352 3456 2816 2432 2368 2240 1856 1216 448 -512 -1344 -1984 -2304 -2496 -2688 -2624 -2368 -2112 -2176 -2496 -2624 -2432 -2048 -1728 -1344 -576 128 192 -320 -576 -256 448 960 1280 1664 2368 3008 3392 3328 3328 3456 3584 3456 3136 2880 2944 3072 3008 2368 1344 384 -320 -896 -1600 -2240 -2944 -3648 -4288 -4736 -4992 -5184 -5440 -5568 -5440 -5184 -4928 -4736 -4544 -4160 -3648 -3008 -2560 -2112 -1856 -1536 -1216 -896 -576 -320 -128 64 64 64 128 192 256 192 128 192 384 512 576 512 512 512 448 320 192 128 0 -64 -128 -192 -192 -128 -128 -192 -192 -192 -192 -192 -192 -192 -128 -128 -64 -64 -64 -64 -64 -64 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 +6 +0 -1 -1 -1 -1 -1 -1 -1 -1 -2 -2 -3 -3 -4 -5 -6 -6 -6 -6 -7 -7 -7 -7 -6 -5 -4 -2 -1 1 3 4 5 8 10 12 13 13 13 14 14 14 13 11 10 8 7 5 3 3 4 3 0 -3 -5 -4 -3 -3 -4 -4 -5 -6 -7 -8 -10 -12 -12 -11 -9 -8 -8 -6 -4 -4 -6 -8 -5 1 5 6 5 6 8 10 10 10 15 22 29 31 32 37 47 54 56 56 59 67 73 76 72 69 64 59 54 47 39 32 29 28 28 23 15 5 -7 -18 -28 -33 -37 -40 -41 -37 -34 -36 -42 -46 -43 -37 -32 -25 -11 3 4 -6 -13 -5 10 22 30 40 58 76 87 89 91 96 101 100 94 89 92 100 99 79 47 14 -10 -32 -59 -88 -119 -150 -182 -207 -225 -240 -257 -270 -272 -267 -260 -259 -255 -242 -217 -187 -160 -140 -124 -107 -88 -66 -45 -25 -6 5 10 11 14 23 27 22 18 28 50 70 78 76 77 79 78 63 43 26 11 -5 -23 -33 -33 -27 -28 -41 -55 -61 -58 -56 -58 -60 -55 -44 -29 -14 -5 -7 -12 -10 0 11 19 30 51 72 83 88 106 140 168 167 151 138 148 169 186 191 181 165 152 148 143 129 101 76 59 51 44 33 23 15 9 2 -6 -16 -25 -30 -31 -31 -29 -22 -9 2 3 -10 -28 -43 -55 -62 -67 -68 -69 -71 -75 -77 -78 -77 -74 -70 -66 -63 -54 -38 -22 -15 -14 -13 -5 2 3 0 3 12 22 24 20 17 17 18 15 13 14 17 19 17 16 14 12 9 3 -4 -12 -18 -21 -21 -18 -16 -13 -13 -14 -15 -15 -12 -8 -3 2 7 12 15 16 18 20 24 27 27 25 22 18 16 16 16 13 9 7 6 6 6 6 7 7 6 5 5 5 6 6 4 4 4 4 3 2 1 0 0 0 -1 -1 -1 0 0 0 -1 -1 -1 -1 -1 -1 -1 -1 -1 -2 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 0 +0 -64 -64 -64 -64 -64 -64 -64 -64 -128 -128 -192 -192 -256 -320 -384 -384 -384 -384 -448 -448 -448 -448 -384 -320 -256 -128 -64 64 192 256 320 512 640 768 832 832 832 896 896 896 832 704 640 512 448 320 192 192 256 192 0 -192 -320 -256 -192 -192 -256 -256 -320 -384 -448 -512 -640 -768 -768 -704 -576 -512 -512 -384 -256 -256 -384 -512 -320 64 320 384 320 384 512 640 640 640 960 1408 1856 1984 2048 2368 3008 3456 3584 3584 3776 4288 4672 4864 4608 4416 4096 3776 3456 3008 2496 2048 1856 1792 1792 1472 960 320 -448 -1152 -1792 -2112 -2368 -2560 -2624 -2368 -2176 -2304 -2688 -2944 -2752 -2368 -2048 -1600 -704 192 256 -384 -832 -320 640 1408 1920 2560 3712 4864 5568 5696 5824 6144 6464 6400 6016 5696 5888 6400 6336 5056 3008 896 -640 -2048 -3776 -5632 -7616 -9600 -11648 -13248 -14400 -15360 -16448 -17280 -17408 -17088 -16640 -16576 -16320 -15488 -13888 -11968 -10240 -8960 -7936 -6848 -5632 -4224 -2880 -1600 -384 320 640 704 896 1472 1728 1408 1152 1792 3200 4480 4992 4864 4928 5056 4992 4032 2752 1664 704 -320 -1472 -2112 -2112 -1728 -1792 -2624 -3520 -3904 -3712 -3584 -3712 -3840 -3520 -2816 -1856 -896 -320 -448 -768 -640 0 704 1216 1920 3264 4608 5312 5632 6784 8960 10752 10688 9664 8832 9472 10816 11904 12224 11584 10560 9728 9472 9152 8256 6464 4864 3776 3264 2816 2112 1472 960 576 128 -384 -1024 -1600 -1920 -1984 -1984 -1856 -1408 -576 128 192 -640 -1792 -2752 -3520 -3968 -4288 -4352 -4416 -4544 -4800 -4928 -4992 -4928 -4736 -4480 -4224 -4032 -3456 -2432 -1408 -960 -896 -832 -320 128 192 0 192 768 1408 1536 1280 1088 1088 1152 960 832 896 1088 1216 1088 1024 896 768 576 192 -256 -768 -1152 -1344 -1344 -1152 -1024 -832 -832 -896 -960 -960 -768 -512 -192 128 448 768 960 1024 1152 1280 1536 1728 1728 1600 1408 1152 1024 1024 1024 832 576 448 384 384 384 384 448 448 384 320 320 320 384 384 256 256 256 256 192 128 64 0 0 0 -64 -64 -64 0 0 0 -64 -64 -64 -64 -64 -64 -64 -64 -64 -128 -64 -64 -64 -64 -64 -64 -64 -64 -64 -64 -64 0 +6 +0 -1 -1 -1 -1 -1 -1 -1 -2 -2 -2 -3 -3 -3 -3 -3 -3 -3 -3 -3 -2 -2 -1 -1 0 0 0 0 1 1 1 1 1 3 5 5 6 6 7 7 6 4 2 1 -1 -3 -5 -5 -4 -5 -7 -9 -10 -10 -10 -11 -12 -11 -9 -6 -3 -2 -2 -3 -3 0 2 5 8 14 21 25 27 33 46 56 58 53 50 56 65 74 78 76 71 67 67 66 61 49 38 30 26 23 18 13 8 5 1 -4 -11 -16 -20 -22 -22 -21 -16 -7 2 2 -8 -23 -36 -47 -54 -60 -63 -65 -69 -74 -78 -80 -81 -80 -78 -75 -73 -64 -46 -27 -19 -18 -17 -7 3 4 0 4 19 34 39 33 29 30 32 28 25 26 34 38 36 34 31 27 21 8 -8 -28 -45 -55 -55 -49 -43 -37 -37 -41 -46 -47 -40 -26 -10 10 29 49 62 69 76 90 109 127 134 129 113 98 92 96 96 84 63 47 44 47 49 52 58 62 57 49 48 59 70 68 58 53 55 58 49 32 18 16 16 12 -2 -13 -13 0 6 0 -12 -22 -29 -36 -40 -40 -41 -52 -77 -103 -117 -124 -131 -139 -143 -145 -148 -155 -163 -174 -183 -185 -173 -147 -113 -82 -59 -47 -48 -53 -49 -32 -18 -22 -41 -60 -73 -82 -85 -78 -64 -55 -53 -49 -27 4 25 38 54 78 96 102 104 112 118 112 96 81 71 57 39 23 11 -2 -23 -48 -59 -53 -41 -35 -36 -36 -30 -20 -17 -20 -19 -7 7 12 6 -2 -5 -8 -15 -21 -16 -2 8 7 1 -1 4 10 12 13 17 24 31 36 37 33 26 18 12 8 5 5 7 9 10 10 10 13 18 21 20 14 7 4 9 17 19 14 6 0 -2 -5 -9 -12 -12 -12 -13 -13 -12 -10 -9 -10 -10 -8 -6 -4 -2 0 3 5 7 8 9 9 10 9 8 6 4 2 1 -1 -2 -2 -2 -2 -3 -3 -3 -3 -2 -2 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 0 +0 -128 -128 -128 -128 -128 -128 -128 -256 -256 -256 -384 -384 -384 -384 -384 -384 -384 -384 -384 -256 -256 -128 -128 0 0 0 0 128 128 128 128 128 384 640 640 768 768 896 896 768 512 256 128 -128 -384 -640 -640 -512 -640 -896 -1152 -1280 -1280 -1280 -1408 -1536 -1408 -1152 -768 -384 -256 -256 -384 -384 0 256 640 1024 1792 2688 3200 3456 4224 5888 7168 7424 6784 6400 7168 8320 9472 9984 9728 9088 8576 8576 8448 7808 6272 4864 3840 3328 2944 2304 1664 1024 640 128 -512 -1408 -2048 -2560 -2816 -2816 -2688 -2048 -896 256 256 -1024 -2944 -4608 -6016 -6912 -7680 -8064 -8320 -8832 -9472 -9984 -10240 -10368 -10240 -9984 -9600 -9344 -8192 -5888 -3456 -2432 -2304 -2176 -896 384 512 0 512 2432 4352 4992 4224 3712 3840 4096 3584 3200 3328 4352 4864 4608 4352 3968 3456 2688 1024 -1024 -3584 -5760 -7040 -7040 -6272 -5504 -4736 -4736 -5248 -5888 -6016 -5120 -3328 -1280 1280 3712 6272 7936 8832 9728 11520 13952 16256 17152 16512 14464 12544 11776 12288 12288 10752 8064 6016 5632 6016 6272 6656 7424 7936 7296 6272 6144 7552 8960 8704 7424 6784 7040 7424 6272 4096 2304 2048 2048 1536 -256 -1664 -1664 0 768 0 -1536 -2816 -3712 -4608 -5120 -5120 -5248 -6656 -9856 -13184 -14976 -15872 -16768 -17792 -18304 -18560 -18944 -19840 -20864 -22272 -23424 -23680 -22144 -18816 -14464 -10496 -7552 -6016 -6144 -6784 -6272 -4096 -2304 -2816 -5248 -7680 -9344 -10496 -10880 -9984 -8192 -7040 -6784 -6272 -3456 512 3200 4864 6912 9984 12288 13056 13312 14336 15104 14336 12288 10368 9088 7296 4992 2944 1408 -256 -2944 -6144 -7552 -6784 -5248 -4480 -4608 -4608 -3840 -2560 -2176 -2560 -2432 -896 896 1536 768 -256 -640 -1024 -1920 -2688 -2048 -256 1024 896 128 -128 512 1280 1536 1664 2176 3072 3968 4608 4736 4224 3328 2304 1536 1024 640 640 896 1152 1280 1280 1280 1664 2304 2688 2560 1792 896 512 1152 2176 2432 1792 768 0 -256 -640 -1152 -1536 -1536 -1536 -1664 -1664 -1536 -1280 -1152 -1280 -1280 -1024 -768 -512 -256 0 384 640 896 1024 1152 1152 1280 1152 1024 768 512 256 128 -128 -256 -256 -256 -256 -384 -384 -384 -384 -256 -256 -128 -128 -128 -128 -128 -128 -128 -128 -128 -128 -128 -128 -128 0 +7 +0 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 0 0 0 0 1 1 1 2 3 3 3 3 3 3 3 4 3 3 2 2 2 3 3 4 4 4 4 4 5 6 7 6 6 6 7 6 4 2 2 2 2 -1 -3 -3 0 1 0 -3 -5 -7 -8 -10 -10 -10 -14 -20 -28 -33 -36 -39 -42 -45 -47 -49 -53 -57 -63 -67 -70 -67 -59 -46 -35 -26 -21 -22 -25 -23 -16 -9 -11 -22 -32 -40 -46 -50 -47 -39 -34 -34 -32 -18 3 18 27 39 58 74 80 84 92 99 96 85 73 65 54 38 23 11 -2 -24 -52 -66 -60 -48 -41 -44 -45 -38 -26 -23 -27 -26 -10 10 18 9 -2 -8 -13 -26 -36 -28 -4 15 15 2 -1 9 21 26 30 39 56 76 90 94 87 70 50 35 24 17 16 22 30 35 35 37 49 67 83 80 58 29 20 44 82 95 70 31 4 -9 -24 -50 -70 -75 -77 -86 -92 -85 -71 -68 -78 -83 -72 -51 -32 -15 7 41 74 100 117 135 155 170 171 152 121 91 65 37 -1 -39 -57 -64 -71 -89 -113 -128 -126 -111 -93 -80 -79 -88 -91 -80 -58 -41 -35 -34 -34 -33 -33 -32 -28 -25 -27 -24 -9 8 23 30 32 37 46 57 62 60 56 66 85 98 96 85 86 97 94 70 41 27 25 20 11 9 8 -8 -38 -66 -74 -73 -78 -84 -82 -72 -68 -73 -74 -68 -60 -59 -57 -43 -20 -2 8 15 30 48 65 74 80 81 80 81 84 83 76 67 64 63 58 48 46 53 61 59 49 39 34 31 26 20 14 12 9 6 2 -7 -16 -21 -20 -16 -16 -19 -26 -32 -36 -35 -30 -25 -27 -33 -38 -37 -33 -34 -37 -39 -38 -37 -38 -39 -36 -31 -27 -26 -26 -23 -18 -14 -12 -12 -11 -10 -8 -7 -6 -5 -4 -4 -3 -2 -1 0 1 1 1 1 1 1 2 2 2 1 1 1 0 0 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 0 +0 -128 -128 -128 -128 -128 -128 -128 -128 -128 -128 0 0 0 0 128 128 128 256 384 384 384 384 384 384 384 512 384 384 256 256 256 384 384 512 512 512 512 512 640 768 896 768 768 768 896 768 512 256 256 256 256 -128 -384 -384 0 128 0 -384 -640 -896 -1024 -1280 -1280 -1280 -1792 -2560 -3584 -4224 -4608 -4992 -5376 -5760 -6016 -6272 -6784 -7296 -8064 -8576 -8960 -8576 -7552 -5888 -4480 -3328 -2688 -2816 -3200 -2944 -2048 -1152 -1408 -2816 -4096 -5120 -5888 -6400 -6016 -4992 -4352 -4352 -4096 -2304 384 2304 3456 4992 7424 9472 10240 10752 11776 12672 12288 10880 9344 8320 6912 4864 2944 1408 -256 -3072 -6656 -8448 -7680 -6144 -5248 -5632 -5760 -4864 -3328 -2944 -3456 -3328 -1280 1280 2304 1152 -256 -1024 -1664 -3328 -4608 -3584 -512 1920 1920 256 -128 1152 2688 3328 3840 4992 7168 9728 11520 12032 11136 8960 6400 4480 3072 2176 2048 2816 3840 4480 4480 4736 6272 8576 10624 10240 7424 3712 2560 5632 10496 12160 8960 3968 512 -1152 -3072 -6400 -8960 -9600 -9856 -11008 -11776 -10880 -9088 -8704 -9984 -10624 -9216 -6528 -4096 -1920 896 5248 9472 12800 14976 17280 19840 21760 21888 19456 15488 11648 8320 4736 -128 -4992 -7296 -8192 -9088 -11392 -14464 -16384 -16128 -14208 -11904 -10240 -10112 -11264 -11648 -10240 -7424 -5248 -4480 -4352 -4352 -4224 -4224 -4096 -3584 -3200 -3456 -3072 -1152 1024 2944 3840 4096 4736 5888 7296 7936 7680 7168 8448 10880 12544 12288 10880 11008 12416 12032 8960 5248 3456 3200 2560 1408 1152 1024 -1024 -4864 -8448 -9472 -9344 -9984 -10752 -10496 -9216 -8704 -9344 -9472 -8704 -7680 -7552 -7296 -5504 -2560 -256 1024 1920 3840 6144 8320 9472 10240 10368 10240 10368 10752 10624 9728 8576 8192 8064 7424 6144 5888 6784 7808 7552 6272 4992 4352 3968 3328 2560 1792 1536 1152 768 256 -896 -2048 -2688 -2560 -2048 -2048 -2432 -3328 -4096 -4608 -4480 -3840 -3200 -3456 -4224 -4864 -4736 -4224 -4352 -4736 -4992 -4864 -4736 -4864 -4992 -4608 -3968 -3456 -3328 -3328 -2944 -2304 -1792 -1536 -1536 -1408 -1280 -1024 -896 -768 -640 -512 -512 -384 -256 -128 0 128 128 128 128 128 128 256 256 256 128 128 128 0 0 -128 -128 -128 -128 -128 -128 -128 -128 -128 -128 -128 -128 -128 -128 -128 -128 0 +7 +0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 0 0 0 0 1 2 2 0 0 -1 -1 -3 -4 -4 -5 -5 -6 -6 -5 -5 -6 -7 -7 -5 -3 -2 0 4 8 11 14 17 20 23 24 22 18 14 10 6 -1 -8 -12 -13 -15 -20 -26 -30 -30 -28 -24 -21 -21 -25 -26 -24 -18 -13 -12 -11 -12 -12 -12 -12 -11 -10 -11 -10 -4 3 10 13 14 18 22 28 32 31 30 36 48 57 57 52 54 61 61 47 28 19 18 14 8 7 6 -6 -32 -55 -63 -64 -70 -78 -77 -69 -68 -74 -77 -72 -65 -66 -64 -50 -23 -2 10 19 39 64 88 104 114 118 120 125 132 133 125 114 110 112 105 90 88 103 123 122 104 84 76 71 61 47 36 31 25 18 5 -19 -45 -62 -61 -50 -49 -63 -87 -112 -128 -128 -113 -98 -107 -138 -162 -163 -153 -160 -182 -199 -202 -202 -217 -229 -219 -194 -177 -179 -183 -167 -135 -109 -101 -99 -95 -88 -81 -74 -64 -53 -46 -43 -37 -23 -3 14 26 30 28 23 28 49 74 85 77 63 53 48 35 11 -13 -31 -43 -48 -50 -48 -45 -48 -52 -59 -68 -73 -67 -47 -22 -7 2 19 53 92 130 161 191 219 238 249 261 274 282 279 275 273 268 249 216 188 180 183 170 133 94 73 71 70 58 36 18 5 -6 -21 -41 -66 -85 -95 -96 -100 -109 -117 -112 -100 -97 -105 -113 -105 -87 -79 -84 -90 -85 -71 -61 -60 -67 -75 -78 -72 -60 -51 -48 -50 -47 -36 -25 -17 -13 -7 2 9 8 1 -9 -17 -22 -25 -29 -36 -44 -50 -52 -53 -56 -58 -56 -51 -47 -43 -36 -22 -6 6 15 24 35 41 41 39 39 42 47 49 48 44 39 35 32 29 23 18 15 16 18 20 20 19 18 15 12 9 9 10 9 7 5 5 4 3 2 1 -1 -1 -2 -2 -1 -1 -1 -1 -2 -2 -2 -2 -2 -2 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 0 0 0 0 +0 0 0 0 0 0 0 0 0 0 0 0 0 0 64 0 0 0 0 64 128 128 0 0 -64 -64 -192 -256 -256 -320 -320 -384 -384 -320 -320 -384 -448 -448 -320 -192 -128 0 256 512 704 896 1088 1280 1472 1536 1408 1152 896 640 384 -64 -512 -768 -832 -960 -1280 -1664 -1920 -1920 -1792 -1536 -1344 -1344 -1600 -1664 -1536 -1152 -832 -768 -704 -768 -768 -768 -768 -704 -640 -704 -640 -256 192 640 832 896 1152 1408 1792 2048 1984 1920 2304 3072 3648 3648 3328 3456 3904 3904 3008 1792 1216 1152 896 512 448 384 -384 -2048 -3520 -4032 -4096 -4480 -4992 -4928 -4416 -4352 -4736 -4928 -4608 -4160 -4224 -4096 -3200 -1472 -128 640 1216 2496 4096 5632 6656 7296 7552 7680 8000 8448 8512 8000 7296 7040 7168 6720 5760 5632 6592 7872 7808 6656 5376 4864 4544 3904 3008 2304 1984 1600 1152 320 -1216 -2880 -3968 -3904 -3200 -3136 -4032 -5568 -7168 -8192 -8192 -7232 -6272 -6848 -8832 -10368 -10432 -9792 -10240 -11648 -12736 -12928 -12928 -13888 -14656 -14016 -12416 -11328 -11456 -11712 -10688 -8640 -6976 -6464 -6336 -6080 -5632 -5184 -4736 -4096 -3392 -2944 -2752 -2368 -1472 -192 896 1664 1920 1792 1472 1792 3136 4736 5440 4928 4032 3392 3072 2240 704 -832 -1984 -2752 -3072 -3200 -3072 -2880 -3072 -3328 -3776 -4352 -4672 -4288 -3008 -1408 -448 128 1216 3392 5888 8320 10304 12224 14016 15232 15936 16704 17536 18048 17856 17600 17472 17152 15936 13824 12032 11520 11712 10880 8512 6016 4672 4544 4480 3712 2304 1152 320 -384 -1344 -2624 -4224 -5440 -6080 -6144 -6400 -6976 -7488 -7168 -6400 -6208 -6720 -7232 -6720 -5568 -5056 -5376 -5760 -5440 -4544 -3904 -3840 -4288 -4800 -4992 -4608 -3840 -3264 -3072 -3200 -3008 -2304 -1600 -1088 -832 -448 128 576 512 64 -576 -1088 -1408 -1600 -1856 -2304 -2816 -3200 -3328 -3392 -3584 -3712 -3584 -3264 -3008 -2752 -2304 -1408 -384 384 960 1536 2240 2624 2624 2496 2496 2688 3008 3136 3072 2816 2496 2240 2048 1856 1472 1152 960 1024 1152 1280 1280 1216 1152 960 768 576 576 640 576 448 320 320 256 192 128 64 -64 -64 -128 -128 -64 -64 -64 -64 -128 -128 -128 -128 -128 -128 -64 -64 -64 -64 -64 -64 -64 -64 -64 -64 -64 0 0 0 0 +6 +0 0 -1 -1 -1 -1 -1 -1 -1 -1 -1 -2 -2 -2 -2 -2 -3 -4 -4 -4 -5 -6 -7 -7 -8 -9 -10 -11 -10 -10 -11 -11 -11 -10 -8 -8 -8 -9 -8 -8 -8 -7 -6 -6 -6 -5 -3 -1 2 3 4 4 3 4 8 13 16 15 12 11 10 7 2 -3 -8 -11 -13 -14 -14 -13 -14 -16 -19 -22 -24 -23 -17 -8 -3 1 7 21 37 54 69 84 99 110 118 127 137 144 146 148 150 151 144 128 114 112 116 111 88 64 51 51 51 43 28 14 4 -5 -18 -36 -58 -77 -88 -91 -97 -108 -118 -116 -106 -105 -116 -128 -121 -103 -96 -104 -114 -111 -95 -82 -83 -95 -110 -116 -110 -95 -82 -79 -84 -80 -64 -44 -31 -25 -12 5 18 17 2 -19 -37 -50 -59 -70 -89 -114 -132 -142 -148 -161 -171 -169 -159 -150 -142 -121 -76 -20 24 59 97 143 175 182 176 182 206 235 255 257 246 228 212 200 186 158 129 114 125 146 164 173 176 172 152 125 106 109 118 116 99 81 73 68 57 39 18 -2 -19 -28 -26 -14 -6 -9 -28 -50 -67 -69 -69 -71 -75 -73 -74 -85 -104 -111 -100 -78 -64 -56 -37 -3 26 34 30 31 47 66 75 72 60 39 13 -14 -38 -52 -51 -34 -19 -28 -63 -94 -91 -56 -21 -17 -42 -62 -59 -40 -23 -15 -10 -6 -12 -29 -51 -64 -61 -50 -41 -38 -32 -23 -14 -10 -12 -11 -5 2 0 -12 -23 -25 -17 -9 -9 -17 -26 -28 -29 -36 -51 -61 -55 -40 -29 -28 -31 -24 -8 2 2 -5 -6 4 17 24 29 33 38 39 37 37 39 42 45 50 59 68 70 66 59 56 54 52 48 43 37 31 28 27 24 17 5 -4 -8 -12 -20 -29 -33 -32 -31 -35 -42 -47 -49 -48 -48 -48 -51 -53 -53 -51 -48 -46 -44 -40 -36 -33 -31 -29 -26 -22 -17 -12 -9 -5 -2 0 2 3 4 5 6 7 8 9 9 8 7 6 6 5 4 3 2 1 1 1 0 0 0 0 0 0 +0 0 -64 -64 -64 -64 -64 -64 -64 -64 -64 -128 -128 -128 -128 -128 -192 -256 -256 -256 -320 -384 -448 -448 -512 -576 -640 -704 -640 -640 -704 -704 -704 -640 -512 -512 -512 -576 -512 -512 -512 -448 -384 -384 -384 -320 -192 -64 128 192 256 256 192 256 512 832 1024 960 768 704 640 448 128 -192 -512 -704 -832 -896 -896 -832 -896 -1024 -1216 -1408 -1536 -1472 -1088 -512 -192 64 448 1344 2368 3456 4416 5376 6336 7040 7552 8128 8768 9216 9344 9472 9600 9664 9216 8192 7296 7168 7424 7104 5632 4096 3264 3264 3264 2752 1792 896 256 -320 -1152 -2304 -3712 -4928 -5632 -5824 -6208 -6912 -7552 -7424 -6784 -6720 -7424 -8192 -7744 -6592 -6144 -6656 -7296 -7104 -6080 -5248 -5312 -6080 -7040 -7424 -7040 -6080 -5248 -5056 -5376 -5120 -4096 -2816 -1984 -1600 -768 320 1152 1088 128 -1216 -2368 -3200 -3776 -4480 -5696 -7296 -8448 -9088 -9472 -10304 -10944 -10816 -10176 -9600 -9088 -7744 -4864 -1280 1536 3776 6208 9152 11200 11648 11264 11648 13184 15040 16320 16448 15744 14592 13568 12800 11904 10112 8256 7296 8000 9344 10496 11072 11264 11008 9728 8000 6784 6976 7552 7424 6336 5184 4672 4352 3648 2496 1152 -128 -1216 -1792 -1664 -896 -384 -576 -1792 -3200 -4288 -4416 -4416 -4544 -4800 -4672 -4736 -5440 -6656 -7104 -6400 -4992 -4096 -3584 -2368 -192 1664 2176 1920 1984 3008 4224 4800 4608 3840 2496 832 -896 -2432 -3328 -3264 -2176 -1216 -1792 -4032 -6016 -5824 -3584 -1344 -1088 -2688 -3968 -3776 -2560 -1472 -960 -640 -384 -768 -1856 -3264 -4096 -3904 -3200 -2624 -2432 -2048 -1472 -896 -640 -768 -704 -320 128 0 -768 -1472 -1600 -1088 -576 -576 -1088 -1664 -1792 -1856 -2304 -3264 -3904 -3520 -2560 -1856 -1792 -1984 -1536 -512 128 128 -320 -384 256 1088 1536 1856 2112 2432 2496 2368 2368 2496 2688 2880 3200 3776 4352 4480 4224 3776 3584 3456 3328 3072 2752 2368 1984 1792 1728 1536 1088 320 -256 -512 -768 -1280 -1856 -2112 -2048 -1984 -2240 -2688 -3008 -3136 -3072 -3072 -3072 -3264 -3392 -3392 -3264 -3072 -2944 -2816 -2560 -2304 -2112 -1984 -1856 -1664 -1408 -1088 -768 -576 -320 -128 0 128 192 256 320 384 448 512 576 576 512 448 384 384 320 256 192 128 64 64 64 0 0 0 0 0 0 +6 +0 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 0 0 1 2 3 3 3 4 5 6 8 8 9 9 9 9 9 8 7 6 8 9 11 13 14 14 13 11 10 11 12 13 11 10 9 9 8 5 2 -1 -4 -5 -5 -3 -2 -2 -6 -11 -15 -16 -16 -17 -19 -19 -20 -23 -29 -32 -30 -24 -20 -18 -12 -1 9 12 11 11 18 26 30 30 25 17 5 -7 -19 -26 -26 -18 -10 -15 -35 -53 -53 -33 -13 -11 -27 -41 -39 -27 -17 -11 -7 -5 -9 -23 -41 -52 -51 -43 -36 -34 -30 -22 -13 -10 -12 -11 -5 2 0 -13 -26 -30 -21 -11 -11 -23 -35 -38 -40 -52 -75 -91 -84 -62 -46 -46 -52 -41 -14 4 4 -10 -11 9 35 51 63 75 86 92 90 92 100 110 121 137 166 196 208 201 187 180 181 179 169 157 141 122 111 110 104 74 25 -16 -37 -56 -100 -151 -181 -180 -182 -211 -264 -310 -337 -346 -354 -375 -409 -444 -466 -470 -467 -466 -462 -446 -420 -402 -402 -400 -378 -331 -272 -211 -154 -100 -41 16 67 107 138 173 223 290 366 433 479 499 507 518 534 543 533 504 468 427 392 378 394 424 427 381 300 223 175 151 130 107 97 123 180 243 280 290 282 274 276 307 385 518 683 843 955 986 922 781 611 453 326 232 184 182 188 130 -32 -274 -538 -787 -1004 -1168 -1242 -1209 -1098 -969 -881 -866 -937 -1072 -1226 -1347 -1407 -1391 -1299 -1146 -973 -834 -764 -769 -826 -907 -981 -1010 -967 -843 -657 -434 -204 -1 147 230 269 298 342 410 499 601 712 825 928 1005 1044 1030 956 842 723 631 575 545 530 524 517 501 470 429 379 319 245 162 90 46 38 61 98 126 129 106 67 29 0 -17 -22 -14 7 48 107 171 215 217 178 114 52 5 -21 -26 -15 1 10 4 -14 -39 -67 -93 -113 -123 -122 -113 -96 -76 -57 -47 -47 -52 -57 -60 -60 -58 -53 -46 -40 -35 -30 -25 -21 -18 -15 -12 -10 -7 -5 -3 -1 -1 0 0 +0 -16 -16 -16 -16 -16 -16 -16 -16 -16 -16 -16 0 0 16 32 48 48 48 64 80 96 128 128 144 144 144 144 144 128 112 96 128 144 176 208 224 224 208 176 160 176 192 208 176 160 144 144 128 80 32 -16 -64 -80 -80 -48 -32 -32 -96 -176 -240 -256 -256 -272 -304 -304 -320 -368 -464 -512 -480 -384 -320 -288 -192 -16 144 192 176 176 288 416 480 480 400 272 80 -112 -304 -416 -416 -288 -160 -240 -560 -848 -848 -528 -208 -176 -432 -656 -624 -432 -272 -176 -112 -80 -144 -368 -656 -832 -816 -688 -576 -544 -480 -352 -208 -160 -192 -176 -80 32 0 -208 -416 -480 -336 -176 -176 -368 -560 -608 -640 -832 -1200 -1456 -1344 -992 -736 -736 -832 -656 -224 64 64 -160 -176 144 560 816 1008 1200 1376 1472 1440 1472 1600 1760 1936 2192 2656 3136 3328 3216 2992 2880 2896 2864 2704 2512 2256 1952 1776 1760 1664 1184 400 -256 -592 -896 -1600 -2416 -2896 -2880 -2912 -3376 -4224 -4960 -5392 -5536 -5664 -6000 -6544 -7104 -7456 -7520 -7472 -7456 -7392 -7136 -6720 -6432 -6432 -6400 -6048 -5296 -4352 -3376 -2464 -1600 -656 256 1072 1712 2208 2768 3568 4640 5856 6928 7664 7984 8112 8288 8544 8688 8528 8064 7488 6832 6272 6048 6304 6784 6832 6096 4800 3568 2800 2416 2080 1712 1552 1968 2880 3888 4480 4640 4512 4384 4416 4912 6160 8288 10928 13488 15280 15776 14752 12496 9776 7248 5216 3712 2944 2912 3008 2080 -512 -4384 -8608 -12592 -16064 -18688 -19872 -19344 -17568 -15504 -14096 -13856 -14992 -17152 -19616 -21552 -22512 -22256 -20784 -18336 -15568 -13344 -12224 -12304 -13216 -14512 -15696 -16160 -15472 -13488 -10512 -6944 -3264 -16 2352 3680 4304 4768 5472 6560 7984 9616 11392 13200 14848 16080 16704 16480 15296 13472 11568 10096 9200 8720 8480 8384 8272 8016 7520 6864 6064 5104 3920 2592 1440 736 608 976 1568 2016 2064 1696 1072 464 0 -272 -352 -224 112 768 1712 2736 3440 3472 2848 1824 832 80 -336 -416 -240 16 160 64 -224 -624 -1072 -1488 -1808 -1968 -1952 -1808 -1536 -1216 -912 -752 -752 -832 -912 -960 -960 -928 -848 -736 -640 -560 -480 -400 -336 -288 -240 -192 -160 -112 -80 -48 -16 -16 0 0 +4 +0 0 0 0 0 0 0 0 0 1 1 1 1 1 1 1 1 1 0 -1 -1 -2 -4 -6 -7 -8 -8 -10 -14 -17 -20 -21 -23 -26 -30 -34 -38 -40 -42 -44 -46 -46 -46 -46 -48 -49 -49 -45 -38 -31 -24 -16 -7 2 11 19 25 33 44 60 78 96 109 118 124 130 138 145 147 143 137 128 121 120 129 143 148 136 110 84 68 60 53 44 42 54 81 113 133 141 141 140 145 165 212 293 395 500 580 613 587 510 408 310 228 166 135 136 144 102 -26 -225 -454 -678 -885 -1054 -1146 -1142 -1061 -959 -891 -897 -992 -1161 -1358 -1528 -1632 -1651 -1578 -1424 -1238 -1085 -1018 -1048 -1152 -1296 -1434 -1513 -1481 -1323 -1055 -713 -343 -2 260 417 500 567 667 821 1022 1263 1536 1825 2103 2339 2493 2524 2406 2176 1920 1721 1610 1570 1571 1596 1619 1614 1560 1464 1332 1156 914 627 359 188 161 268 442 588 622 527 350 159 3 -95 -127 -86 50 323 743 1237 1619 1705 1454 979 467 48 -204 -264 -154 21 120 53 -181 -539 -975 -1441 -1860 -2159 -2297 -2264 -2063 -1738 -1415 -1257 -1344 -1619 -1951 -2253 -2495 -2650 -2695 -2644 -2571 -2526 -2496 -2449 -2409 -2432 -2513 -2557 -2474 -2252 -1941 -1549 -1060 -498 43 493 859 1197 1512 1780 2005 2228 2457 2651 2782 2877 2970 3016 2929 2687 2354 2004 1656 1315 1020 801 624 417 185 -1 -114 -216 -366 -528 -634 -688 -743 -781 -703 -467 -168 68 222 364 515 610 613 576 568 584 580 576 621 707 739 652 481 296 105 -114 -325 -442 -423 -319 -195 -48 152 371 526 574 562 544 529 501 485 525 609 671 663 603 519 400 219 0 -192 -322 -417 -518 -616 -684 -727 -786 -880 -991 -1096 -1197 -1307 -1418 -1511 -1572 -1597 -1571 -1485 -1347 -1189 -1027 -856 -662 -461 -280 -125 29 199 369 514 629 727 813 877 902 885 830 749 650 545 447 361 286 223 167 113 58 6 -37 -72 -103 -129 -146 -148 -137 -117 -94 -72 -55 -44 -36 -27 -16 -4 4 8 12 15 15 15 13 12 10 8 5 3 2 1 0 0 0 +0 0 0 0 0 0 0 0 0 8 8 8 8 8 8 8 8 8 0 -8 -8 -16 -32 -48 -56 -64 -64 -80 -112 -136 -160 -168 -184 -208 -240 -272 -304 -320 -336 -352 -368 -368 -368 -368 -384 -392 -392 -360 -304 -248 -192 -128 -56 16 88 152 200 264 352 480 624 768 872 944 992 1040 1104 1160 1176 1144 1096 1024 968 960 1032 1144 1184 1088 880 672 544 480 424 352 336 432 648 904 1064 1128 1128 1120 1160 1320 1696 2344 3160 4000 4640 4904 4696 4080 3264 2480 1824 1328 1080 1088 1152 816 -208 -1800 -3632 -5424 -7080 -8432 -9168 -9136 -8488 -7672 -7128 -7176 -7936 -9288 -10864 -12224 -13056 -13208 -12624 -11392 -9904 -8680 -8144 -8384 -9216 -10368 -11472 -12104 -11848 -10584 -8440 -5704 -2744 -16 2080 3336 4000 4536 5336 6568 8176 10104 12288 14600 16824 18712 19944 20192 19248 17408 15360 13768 12880 12560 12568 12768 12952 12912 12480 11712 10656 9248 7312 5016 2872 1504 1288 2144 3536 4704 4976 4216 2800 1272 24 -760 -1016 -688 400 2584 5944 9896 12952 13640 11632 7832 3736 384 -1632 -2112 -1232 168 960 424 -1448 -4312 -7800 -11528 -14880 -17272 -18376 -18112 -16504 -13904 -11320 -10056 -10752 -12952 -15608 -18024 -19960 -21200 -21560 -21152 -20568 -20208 -19968 -19592 -19272 -19456 -20104 -20456 -19792 -18016 -15528 -12392 -8480 -3984 344 3944 6872 9576 12096 14240 16040 17824 19656 21208 22256 23016 23760 24128 23432 21496 18832 16032 13248 10520 8160 6408 4992 3336 1480 -8 -912 -1728 -2928 -4224 -5072 -5504 -5944 -6248 -5624 -3736 -1344 544 1776 2912 4120 4880 4904 4608 4544 4672 4640 4608 4968 5656 5912 5216 3848 2368 840 -912 -2600 -3536 -3384 -2552 -1560 -384 1216 2968 4208 4592 4496 4352 4232 4008 3880 4200 4872 5368 5304 4824 4152 3200 1752 0 -1536 -2576 -3336 -4144 -4928 -5472 -5816 -6288 -7040 -7928 -8768 -9576 -10456 -11344 -12088 -12576 -12776 -12568 -11880 -10776 -9512 -8216 -6848 -5296 -3688 -2240 -1000 232 1592 2952 4112 5032 5816 6504 7016 7216 7080 6640 5992 5200 4360 3576 2888 2288 1784 1336 904 464 48 -296 -576 -824 -1032 -1168 -1184 -1096 -936 -752 -576 -440 -352 -288 -216 -128 -32 32 64 96 120 120 120 104 96 80 64 40 24 16 8 0 0 0 +3 +0 0 0 1 2 3 4 5 7 8 9 9 9 7 4 2 2 5 9 14 16 15 11 5 0 -4 -6 -4 2 17 42 74 103 115 104 74 37 4 -19 -25 -16 2 12 6 -22 -67 -125 -192 -259 -313 -345 -354 -335 -293 -247 -228 -252 -315 -392 -468 -536 -589 -618 -626 -628 -637 -649 -656 -665 -692 -737 -771 -769 -720 -638 -524 -368 -178 16 185 332 475 617 745 861 983 1112 1230 1324 1404 1486 1546 1539 1447 1299 1133 959 780 620 499 398 272 124 -1 -80 -155 -268 -396 -487 -541 -598 -643 -593 -403 -148 62 205 344 497 604 620 596 601 632 643 653 721 839 897 810 612 385 139 -155 -454 -631 -617 -477 -298 -75 244 611 886 991 994 987 982 953 947 1050 1249 1409 1429 1333 1177 931 523 0 -483 -831 -1107 -1411 -1725 -1969 -2152 -2391 -2754 -3191 -3632 -4083 -4591 -5135 -5639 -6048 -6333 -6430 -6271 -5873 -5351 -4783 -4118 -3292 -2371 -1494 -688 171 1183 2277 3294 4188 5031 5863 6586 7062 7221 7076 6666 6051 5320 4576 3872 3232 2649 2091 1492 817 102 -573 -1190 -1800 -2416 -2925 -3191 -3162 -2905 -2514 -2082 -1722 -1500 -1354 -1117 -701 -197 240 584 923 1288 1596 1791 1938 2109 2251 2232 2038 1778 1538 1291 980 631 302 -10 -330 -609 -764 -795 -807 -857 -863 -715 -411 -38 355 771 1167 1452 1603 1718 1900 2102 2196 2137 2005 1864 1675 1380 1008 644 304 -65 -477 -851 -1133 -1368 -1638 -1934 -2180 -2369 -2597 -2933 -3307 -3601 -3814 -4064 -4409 -4742 -4891 -4805 -4558 -4232 -3833 -3336 -2762 -2155 -1548 -950 -361 232 857 1514 2136 2650 3065 3454 3823 4079 4113 3948 3703 3448 3154 2779 2367 1995 1686 1395 1083 761 450 145 -171 -484 -760 -979 -1137 -1227 -1231 -1154 -1033 -909 -793 -668 -527 -392 -287 -209 -129 -29 80 175 242 291 326 342 334 308 271 226 165 92 23 -32 -77 -120 -161 -189 -194 -183 -171 -160 -146 -120 -85 -45 -4 42 89 129 156 172 182 186 185 175 162 147 130 109 89 71 56 39 21 5 -5 -10 -16 -20 -22 -22 -22 -23 -22 -20 -17 -14 -10 -7 -5 -3 0 +0 0 0 4 8 12 16 20 28 32 36 36 36 28 16 8 8 20 36 56 64 60 44 20 0 -16 -24 -16 8 68 168 296 412 460 416 296 148 16 -76 -100 -64 8 48 24 -88 -268 -500 -768 -1036 -1252 -1380 -1416 -1340 -1172 -988 -912 -1008 -1260 -1568 -1872 -2144 -2356 -2472 -2504 -2512 -2548 -2596 -2624 -2660 -2768 -2948 -3084 -3076 -2880 -2552 -2096 -1472 -712 64 740 1328 1900 2468 2980 3444 3932 4448 4920 5296 5616 5944 6184 6156 5788 5196 4532 3836 3120 2480 1996 1592 1088 496 -4 -320 -620 -1072 -1584 -1948 -2164 -2392 -2572 -2372 -1612 -592 248 820 1376 1988 2416 2480 2384 2404 2528 2572 2612 2884 3356 3588 3240 2448 1540 556 -620 -1816 -2524 -2468 -1908 -1192 -300 976 2444 3544 3964 3976 3948 3928 3812 3788 4200 4996 5636 5716 5332 4708 3724 2092 0 -1932 -3324 -4428 -5644 -6900 -7876 -8608 -9564 -11016 -12764 -14528 -16332 -18364 -20540 -22556 -24192 -25332 -25720 -25084 -23492 -21404 -19132 -16472 -13168 -9484 -5976 -2752 684 4732 9108 13176 16752 20124 23452 26344 28248 28884 28304 26664 24204 21280 18304 15488 12928 10596 8364 5968 3268 408 -2292 -4760 -7200 -9664 -11700 -12764 -12648 -11620 -10056 -8328 -6888 -6000 -5416 -4468 -2804 -788 960 2336 3692 5152 6384 7164 7752 8436 9004 8928 8152 7112 6152 5164 3920 2524 1208 -40 -1320 -2436 -3056 -3180 -3228 -3428 -3452 -2860 -1644 -152 1420 3084 4668 5808 6412 6872 7600 8408 8784 8548 8020 7456 6700 5520 4032 2576 1216 -260 -1908 -3404 -4532 -5472 -6552 -7736 -8720 -9476 -10388 -11732 -13228 -14404 -15256 -16256 -17636 -18968 -19564 -19220 -18232 -16928 -15332 -13344 -11048 -8620 -6192 -3800 -1444 928 3428 6056 8544 10600 12260 13816 15292 16316 16452 15792 14812 13792 12616 11116 9468 7980 6744 5580 4332 3044 1800 580 -684 -1936 -3040 -3916 -4548 -4908 -4924 -4616 -4132 -3636 -3172 -2672 -2108 -1568 -1148 -836 -516 -116 320 700 968 1164 1304 1368 1336 1232 1084 904 660 368 92 -128 -308 -480 -644 -756 -776 -732 -684 -640 -584 -480 -340 -180 -16 168 356 516 624 688 728 744 740 700 648 588 520 436 356 284 224 156 84 20 -20 -40 -64 -80 -88 -88 -88 -92 -88 -80 -68 -56 -40 -28 -20 -12 0 +2 +0 -1 -1 -2 -3 -6 -8 -12 -17 -25 -34 -44 -57 -71 -86 -98 -108 -114 -116 -114 -109 -96 -75 -52 -26 6 51 105 163 222 285 353 421 478 517 536 532 509 471 426 379 331 284 235 175 100 13 -77 -166 -261 -363 -457 -518 -533 -508 -455 -391 -335 -302 -282 -240 -156 -46 56 142 232 334 427 494 551 618 678 693 651 583 519 448 350 231 114 -4 -131 -249 -320 -342 -356 -388 -401 -341 -201 -19 182 405 629 801 906 995 1127 1277 1366 1362 1308 1245 1146 966 723 472 228 -50 -375 -685 -933 -1153 -1412 -1705 -1967 -2187 -2453 -2834 -3270 -3642 -3947 -4302 -4776 -5255 -5548 -5574 -5409 -5141 -4763 -4244 -3595 -2870 -2111 -1325 -516 340 1284 2320 3352 4258 5043 5820 6602 7215 7453 7330 7046 6725 6302 5693 4973 4300 3728 3163 2520 1818 1105 365 -442 -1284 -2071 -2740 -3274 -3630 -3746 -3611 -3326 -3013 -2705 -2346 -1908 -1463 -1104 -826 -525 -119 352 788 1130 1401 1623 1764 1786 1703 1556 1342 1018 592 153 -221 -548 -896 -1260 -1537 -1646 -1626 -1584 -1560 -1485 -1283 -949 -533 -39 562 1247 1908 2444 2856 3211 3521 3723 3784 3753 3675 3509 3207 2833 2488 2141 1648 973 287 -235 -648 -1121 -1669 -2147 -2535 -3027 -3761 -4575 -5218 -5713 -6338 -7185 -7982 -8392 -8396 -8218 -7967 -7528 -6805 -5901 -4983 -4094 -3157 -2115 -956 315 1640 2894 3981 4973 6010 7040 7776 8012 7878 7681 7513 7194 6588 5825 5146 4586 3971 3177 2275 1386 519 -377 -1262 -2028 -2650 -3188 -3641 -3894 -3847 -3582 -3300 -3114 -2978 -2789 -2512 -2166 -1765 -1302 -802 -310 152 597 1013 1343 1545 1634 1661 1656 1614 1529 1425 1327 1231 1111 950 747 512 265 35 -146 -266 -341 -377 -358 -261 -98 75 198 259 299 355 426 485 522 562 627 705 762 772 736 666 583 496 396 262 94 -73 -202 -302 -423 -591 -774 -919 -1032 -1164 -1341 -1517 -1621 -1635 -1600 -1552 -1483 -1375 -1220 -1045 -872 -711 -555 -394 -226 -53 109 252 376 493 595 666 689 678 654 626 589 532 468 407 357 306 250 193 142 99 61 28 -1 -23 -39 -47 -49 -48 -45 -40 -33 -26 -19 -14 -8 -5 -3 -2 0 +0 -2 -2 -4 -6 -12 -16 -24 -34 -50 -68 -88 -114 -142 -172 -196 -216 -228 -232 -228 -218 -192 -150 -104 -52 12 102 210 326 444 570 706 842 956 1034 1072 1064 1018 942 852 758 662 568 470 350 200 26 -154 -332 -522 -726 -914 -1036 -1066 -1016 -910 -782 -670 -604 -564 -480 -312 -92 112 284 464 668 854 988 1102 1236 1356 1386 1302 1166 1038 896 700 462 228 -8 -262 -498 -640 -684 -712 -776 -802 -682 -402 -38 364 810 1258 1602 1812 1990 2254 2554 2732 2724 2616 2490 2292 1932 1446 944 456 -100 -750 -1370 -1866 -2306 -2824 -3410 -3934 -4374 -4906 -5668 -6540 -7284 -7894 -8604 -9552 -10510 -11096 -11148 -10818 -10282 -9526 -8488 -7190 -5740 -4222 -2650 -1032 680 2568 4640 6704 8516 10086 11640 13204 14430 14906 14660 14092 13450 12604 11386 9946 8600 7456 6326 5040 3636 2210 730 -884 -2568 -4142 -5480 -6548 -7260 -7492 -7222 -6652 -6026 -5410 -4692 -3816 -2926 -2208 -1652 -1050 -238 704 1576 2260 2802 3246 3528 3572 3406 3112 2684 2036 1184 306 -442 -1096 -1792 -2520 -3074 -3292 -3252 -3168 -3120 -2970 -2566 -1898 -1066 -78 1124 2494 3816 4888 5712 6422 7042 7446 7568 7506 7350 7018 6414 5666 4976 4282 3296 1946 574 -470 -1296 -2242 -3338 -4294 -5070 -6054 -7522 -9150 -10436 -11426 -12676 -14370 -15964 -16784 -16792 -16436 -15934 -15056 -13610 -11802 -9966 -8188 -6314 -4230 -1912 630 3280 5788 7962 9946 12020 14080 15552 16024 15756 15362 15026 14388 13176 11650 10292 9172 7942 6354 4550 2772 1038 -754 -2524 -4056 -5300 -6376 -7282 -7788 -7694 -7164 -6600 -6228 -5956 -5578 -5024 -4332 -3530 -2604 -1604 -620 304 1194 2026 2686 3090 3268 3322 3312 3228 3058 2850 2654 2462 2222 1900 1494 1024 530 70 -292 -532 -682 -754 -716 -522 -196 150 396 518 598 710 852 970 1044 1124 1254 1410 1524 1544 1472 1332 1166 992 792 524 188 -146 -404 -604 -846 -1182 -1548 -1838 -2064 -2328 -2682 -3034 -3242 -3270 -3200 -3104 -2966 -2750 -2440 -2090 -1744 -1422 -1110 -788 -452 -106 218 504 752 986 1190 1332 1378 1356 1308 1252 1178 1064 936 814 714 612 500 386 284 198 122 56 -2 -46 -78 -94 -98 -96 -90 -80 -66 -52 -38 -28 -16 -10 -6 -4 0 +1 +0 -1 -2 -3 -5 -8 -11 -13 -15 -17 -17 -17 -15 -13 -12 -8 -3 6 17 26 36 46 55 61 63 62 58 47 29 8 -13 -34 -58 -86 -111 -125 -130 -134 -139 -139 -126 -98 -58 -5 65 152 243 325 396 464 528 581 613 632 641 634 601 549 500 444 354 216 65 -56 -159 -283 -434 -576 -700 -862 -1103 -1380 -1621 -1825 -2082 -2428 -2773 -2996 -3079 -3098 -3084 -2992 -2777 -2472 -2142 -1806 -1429 -981 -455 153 820 1484 2092 2679 3317 3980 4503 4755 4789 4780 4788 4695 4400 3984 3603 3287 2913 2385 1748 1089 417 -311 -1063 -1748 -2336 -2876 -3362 -3678 -3717 -3542 -3337 -3223 -3153 -3021 -2784 -2457 -2047 -1545 -974 -385 194 778 1349 1831 2155 2333 2428 2480 2474 2399 2290 2183 2074 1919 1681 1354 951 504 69 -291 -545 -716 -811 -790 -591 -227 179 485 652 773 943 1163 1359 1503 1665 1910 2209 2455 2561 2510 2342 2114 1854 1523 1040 386 -305 -878 -1360 -1970 -2845 -3847 -4733 -5504 -6432 -7677 -9003 -9991 -10476 -10649 -10732 -10685 -10312 -9544 -8525 -7430 -6325 -5159 -3842 -2302 -564 1240 2992 4712 6506 8310 9825 10783 11252 11541 11800 11847 11486 10832 10180 9617 8936 7940 6697 5412 4162 2868 1457 -32 -1500 -2849 -3991 -4882 -5574 -6176 -6683 -6945 -6857 -6533 -6196 -5898 -5478 -4824 -4051 -3337 -2674 -1911 -999 -79 718 1420 2111 2769 3285 3612 3782 3797 3621 3315 3023 2816 2606 2301 1967 1735 1581 1356 998 620 337 118 -120 -340 -432 -383 -286 -186 -22 223 446 544 577 669 806 848 731 571 504 510 471 350 208 65 -149 -478 -866 -1262 -1727 -2342 -3043 -3649 -4056 -4337 -4591 -4780 -4796 -4631 -4403 -4200 -4005 -3747 -3390 -2943 -2413 -1821 -1200 -564 126 883 1610 2174 2553 2848 3146 3411 3552 3554 3508 3489 3479 3418 3279 3080 2847 2588 2275 1903 1488 1065 667 311 5 -258 -498 -725 -930 -1091 -1205 -1306 -1405 -1471 -1463 -1391 -1305 -1223 -1113 -953 -787 -656 -553 -434 -293 -162 -62 28 124 210 261 283 299 311 306 286 267 258 246 226 206 193 180 157 126 99 81 65 46 28 17 13 9 5 2 1 0 -1 -3 -3 -3 -3 -3 -2 -2 -1 -1 0 +0 -2 -4 -6 -10 -16 -22 -26 -30 -34 -34 -34 -30 -26 -24 -16 -6 12 34 52 72 92 110 122 126 124 116 94 58 16 -26 -68 -116 -172 -222 -250 -260 -268 -278 -278 -252 -196 -116 -10 130 304 486 650 792 928 1056 1162 1226 1264 1282 1268 1202 1098 1000 888 708 432 130 -112 -318 -566 -868 -1152 -1400 -1724 -2206 -2760 -3242 -3650 -4164 -4856 -5546 -5992 -6158 -6196 -6168 -5984 -5554 -4944 -4284 -3612 -2858 -1962 -910 306 1640 2968 4184 5358 6634 7960 9006 9510 9578 9560 9576 9390 8800 7968 7206 6574 5826 4770 3496 2178 834 -622 -2126 -3496 -4672 -5752 -6724 -7356 -7434 -7084 -6674 -6446 -6306 -6042 -5568 -4914 -4094 -3090 -1948 -770 388 1556 2698 3662 4310 4666 4856 4960 4948 4798 4580 4366 4148 3838 3362 2708 1902 1008 138 -582 -1090 -1432 -1622 -1580 -1182 -454 358 970 1304 1546 1886 2326 2718 3006 3330 3820 4418 4910 5122 5020 4684 4228 3708 3046 2080 772 -610 -1756 -2720 -3940 -5690 -7694 -9466 -11008 -12864 -15354 -18006 -19982 -20952 -21298 -21464 -21370 -20624 -19088 -17050 -14860 -12650 -10318 -7684 -4604 -1128 2480 5984 9424 13012 16620 19650 21566 22504 23082 23600 23694 22972 21664 20360 19234 17872 15880 13394 10824 8324 5736 2914 -64 -3000 -5698 -7982 -9764 -11148 -12352 -13366 -13890 -13714 -13066 -12392 -11796 -10956 -9648 -8102 -6674 -5348 -3822 -1998 -158 1436 2840 4222 5538 6570 7224 7564 7594 7242 6630 6046 5632 5212 4602 3934 3470 3162 2712 1996 1240 674 236 -240 -680 -864 -766 -572 -372 -44 446 892 1088 1154 1338 1612 1696 1462 1142 1008 1020 942 700 416 130 -298 -956 -1732 -2524 -3454 -4684 -6086 -7298 -8112 -8674 -9182 -9560 -9592 -9262 -8806 -8400 -8010 -7494 -6780 -5886 -4826 -3642 -2400 -1128 252 1766 3220 4348 5106 5696 6292 6822 7104 7108 7016 6978 6958 6836 6558 6160 5694 5176 4550 3806 2976 2130 1334 622 10 -516 -996 -1450 -1860 -2182 -2410 -2612 -2810 -2942 -2926 -2782 -2610 -2446 -2226 -1906 -1574 -1312 -1106 -868 -586 -324 -124 56 248 420 522 566 598 622 612 572 534 516 492 452 412 386 360 314 252 198 162 130 92 56 34 26 18 10 4 2 0 -2 -6 -6 -6 -6 -6 -4 -4 -2 -2 0 +1 +0 0 0 1 2 4 6 8 11 14 16 17 18 17 14 5 -6 -18 -30 -47 -75 -112 -150 -189 -240 -308 -390 -463 -521 -566 -609 -645 -660 -648 -611 -564 -506 -435 -341 -215 -56 127 321 529 763 1018 1255 1437 1561 1669 1772 1848 1862 1824 1777 1739 1674 1541 1346 1124 893 637 334 -8 -367 -719 -1038 -1308 -1539 -1758 -1959 -2095 -2130 -2087 -2035 -1993 -1903 -1722 -1486 -1258 -1035 -760 -408 -33 308 626 955 1285 1563 1763 1893 1947 1903 1786 1668 1592 1509 1365 1196 1079 1007 885 667 424 236 85 -88 -255 -332 -301 -230 -153 -19 193 393 491 533 631 779 838 739 591 533 552 521 397 242 78 -181 -594 -1101 -1642 -2300 -3192 -4245 -5211 -5929 -6492 -7035 -7501 -7705 -7618 -7418 -7251 -7084 -6788 -6293 -5600 -4707 -3638 -2458 -1185 273 1951 3650 5060 6095 6979 7917 8816 9424 9691 9829 10047 10300 10409 10266 9918 9442 8829 7998 6893 5556 4100 2646 1273 23 -1125 -2238 -3373 -4475 -5425 -6208 -6966 -7766 -8422 -8683 -8575 -8363 -8140 -7691 -6864 -5901 -5134 -4512 -3697 -2602 -1506 -599 290 1338 2373 3105 3554 3957 4345 4526 4474 4436 4551 4649 4563 4447 4481 4514 4244 3681 3158 2831 2489 1933 1318 927 769 631 418 233 147 63 -112 -348 -573 -788 -1015 -1218 -1366 -1500 -1664 -1834 -1996 -2256 -2739 -3404 -4052 -4539 -4904 -5243 -5545 -5707 -5667 -5488 -5297 -5183 -5121 -4981 -4647 -4162 -3678 -3253 -2743 -1984 -1030 -122 587 1169 1765 2382 2921 3352 3737 4139 4564 4975 5347 5635 5796 5839 5798 5663 5383 4972 4524 4124 3731 3248 2676 2117 1626 1158 634 52 -535 -1089 -1590 -2009 -2313 -2509 -2634 -2711 -2725 -2674 -2595 -2523 -2448 -2331 -2146 -1905 -1636 -1352 -1061 -773 -512 -299 -141 -28 58 131 192 235 263 289 317 342 356 364 369 373 372 366 363 366 377 386 391 398 412 427 422 385 324 268 226 185 126 51 -19 -68 -99 -122 -144 -164 -179 -189 -193 -190 -184 -177 -170 -164 -159 -155 -146 -133 -117 -103 -90 -76 -60 -46 -37 -32 -28 -24 -23 -22 -21 -19 -15 -13 -11 -9 -8 -7 -6 -4 -1 1 3 4 4 4 4 4 4 3 2 1 1 0 0 +0 0 0 2 4 8 12 16 22 28 32 34 36 34 28 10 -12 -36 -60 -94 -150 -224 -300 -378 -480 -616 -780 -926 -1042 -1132 -1218 -1290 -1320 -1296 -1222 -1128 -1012 -870 -682 -430 -112 254 642 1058 1526 2036 2510 2874 3122 3338 3544 3696 3724 3648 3554 3478 3348 3082 2692 2248 1786 1274 668 -16 -734 -1438 -2076 -2616 -3078 -3516 -3918 -4190 -4260 -4174 -4070 -3986 -3806 -3444 -2972 -2516 -2070 -1520 -816 -66 616 1252 1910 2570 3126 3526 3786 3894 3806 3572 3336 3184 3018 2730 2392 2158 2014 1770 1334 848 472 170 -176 -510 -664 -602 -460 -306 -38 386 786 982 1066 1262 1558 1676 1478 1182 1066 1104 1042 794 484 156 -362 -1188 -2202 -3284 -4600 -6384 -8490 -10422 -11858 -12984 -14070 -15002 -15410 -15236 -14836 -14502 -14168 -13576 -12586 -11200 -9414 -7276 -4916 -2370 546 3902 7300 10120 12190 13958 15834 17632 18848 19382 19658 20094 20600 20818 20532 19836 18884 17658 15996 13786 11112 8200 5292 2546 46 -2250 -4476 -6746 -8950 -10850 -12416 -13932 -15532 -16844 -17366 -17150 -16726 -16280 -15382 -13728 -11802 -10268 -9024 -7394 -5204 -3012 -1198 580 2676 4746 6210 7108 7914 8690 9052 8948 8872 9102 9298 9126 8894 8962 9028 8488 7362 6316 5662 4978 3866 2636 1854 1538 1262 836 466 294 126 -224 -696 -1146 -1576 -2030 -2436 -2732 -3000 -3328 -3668 -3992 -4512 -5478 -6808 -8104 -9078 -9808 -10486 -11090 -11414 -11334 -10976 -10594 -10366 -10242 -9962 -9294 -8324 -7356 -6506 -5486 -3968 -2060 -244 1174 2338 3530 4764 5842 6704 7474 8278 9128 9950 10694 11270 11592 11678 11596 11326 10766 9944 9048 8248 7462 6496 5352 4234 3252 2316 1268 104 -1070 -2178 -3180 -4018 -4626 -5018 -5268 -5422 -5450 -5348 -5190 -5046 -4896 -4662 -4292 -3810 -3272 -2704 -2122 -1546 -1024 -598 -282 -56 116 262 384 470 526 578 634 684 712 728 738 746 744 732 726 732 754 772 782 796 824 854 844 770 648 536 452 370 252 102 -38 -136 -198 -244 -288 -328 -358 -378 -386 -380 -368 -354 -340 -328 -318 -310 -292 -266 -234 -206 -180 -152 -120 -92 -74 -64 -56 -48 -46 -44 -42 -38 -30 -26 -22 -18 -16 -14 -12 -8 -2 2 6 8 8 8 8 8 8 6 4 2 2 0 0 +1 +0 2 5 7 13 21 29 36 44 52 57 58 55 48 35 19 0 -22 -49 -81 -118 -157 -197 -239 -289 -338 -376 -398 -416 -433 -437 -415 -378 -349 -324 -281 -208 -127 -54 27 130 243 333 399 464 532 578 596 615 658 698 712 721 754 788 767 690 612 569 517 415 292 212 182 154 105 60 39 17 -32 -102 -173 -245 -325 -400 -462 -521 -594 -673 -753 -873 -1089 -1389 -1698 -1951 -2164 -2372 -2573 -2717 -2767 -2747 -2716 -2724 -2759 -2749 -2627 -2411 -2183 -1978 -1708 -1264 -672 -82 402 819 1265 1747 2193 2575 2937 3330 3757 4193 4609 4968 5230 5390 5476 5471 5323 5028 4682 4367 4041 3599 3036 2456 1930 1407 788 67 -697 -1451 -2168 -2803 -3303 -3668 -3943 -4154 -4276 -4296 -4268 -4251 -4226 -4123 -3887 -3537 -3113 -2637 -2120 -1584 -1076 -645 -312 -64 136 315 471 591 681 768 865 958 1027 1078 1125 1168 1199 1215 1238 1289 1366 1443 1507 1579 1687 1804 1843 1734 1513 1290 1125 954 673 284 -105 -402 -606 -777 -953 -1130 -1289 -1417 -1504 -1550 -1568 -1575 -1581 -1594 -1624 -1655 -1646 -1569 -1454 -1348 -1249 -1112 -932 -761 -653 -593 -546 -510 -510 -543 -558 -527 -471 -425 -395 -374 -363 -360 -336 -242 -69 143 339 503 657 837 1050 1257 1410 1511 1612 1758 1937 2091 2197 2278 2338 2342 2249 2079 1886 1700 1508 1277 1006 722 453 217 6 -200 -402 -573 -672 -707 -722 -754 -784 -765 -692 -608 -558 -547 -550 -552 -552 -550 -544 -537 -527 -505 -459 -399 -350 -321 -294 -242 -165 -88 -22 42 113 188 241 257 229 160 54 -66 -177 -261 -321 -359 -367 -335 -278 -225 -194 -175 -150 -118 -87 -57 -11 57 127 179 206 223 236 239 222 192 159 131 101 64 15 -46 -118 -197 -274 -343 -399 -439 -457 -448 -419 -384 -348 -309 -264 -223 -194 -173 -140 -91 -33 23 77 132 181 218 244 267 286 293 289 285 287 286 271 246 222 201 175 138 101 73 49 25 1 -16 -25 -33 -43 -51 -54 -53 -52 -50 -46 -41 -36 -32 -26 -19 -15 -13 -12 -10 -7 -5 -3 -2 0 0 1 1 1 0 0 0 0 0 0 +0 8 20 28 52 84 116 144 176 208 228 232 220 192 140 76 0 -88 -196 -324 -472 -628 -788 -956 -1156 -1352 -1504 -1592 -1664 -1732 -1748 -1660 -1512 -1396 -1296 -1124 -832 -508 -216 108 520 972 1332 1596 1856 2128 2312 2384 2460 2632 2792 2848 2884 3016 3152 3068 2760 2448 2276 2068 1660 1168 848 728 616 420 240 156 68 -128 -408 -692 -980 -1300 -1600 -1848 -2084 -2376 -2692 -3012 -3492 -4356 -5556 -6792 -7804 -8656 -9488 -10292 -10868 -11068 -10988 -10864 -10896 -11036 -10996 -10508 -9644 -8732 -7912 -6832 -5056 -2688 -328 1608 3276 5060 6988 8772 10300 11748 13320 15028 16772 18436 19872 20920 21560 21904 21884 21292 20112 18728 17468 16164 14396 12144 9824 7720 5628 3152 268 -2788 -5804 -8672 -11212 -13212 -14672 -15772 -16616 -17104 -17184 -17072 -17004 -16904 -16492 -15548 -14148 -12452 -10548 -8480 -6336 -4304 -2580 -1248 -256 544 1260 1884 2364 2724 3072 3460 3832 4108 4312 4500 4672 4796 4860 4952 5156 5464 5772 6028 6316 6748 7216 7372 6936 6052 5160 4500 3816 2692 1136 -420 -1608 -2424 -3108 -3812 -4520 -5156 -5668 -6016 -6200 -6272 -6300 -6324 -6376 -6496 -6620 -6584 -6276 -5816 -5392 -4996 -4448 -3728 -3044 -2612 -2372 -2184 -2040 -2040 -2172 -2232 -2108 -1884 -1700 -1580 -1496 -1452 -1440 -1344 -968 -276 572 1356 2012 2628 3348 4200 5028 5640 6044 6448 7032 7748 8364 8788 9112 9352 9368 8996 8316 7544 6800 6032 5108 4024 2888 1812 868 24 -800 -1608 -2292 -2688 -2828 -2888 -3016 -3136 -3060 -2768 -2432 -2232 -2188 -2200 -2208 -2208 -2200 -2176 -2148 -2108 -2020 -1836 -1596 -1400 -1284 -1176 -968 -660 -352 -88 168 452 752 964 1028 916 640 216 -264 -708 -1044 -1284 -1436 -1468 -1340 -1112 -900 -776 -700 -600 -472 -348 -228 -44 228 508 716 824 892 944 956 888 768 636 524 404 256 60 -184 -472 -788 -1096 -1372 -1596 -1756 -1828 -1792 -1676 -1536 -1392 -1236 -1056 -892 -776 -692 -560 -364 -132 92 308 528 724 872 976 1068 1144 1172 1156 1140 1148 1144 1084 984 888 804 700 552 404 292 196 100 4 -64 -100 -132 -172 -204 -216 -212 -208 -200 -184 -164 -144 -128 -104 -76 -60 -52 -48 -40 -28 -20 -12 -8 0 0 4 4 4 0 0 0 0 0 0 +2 +0 0 0 0 1 2 3 4 5 7 9 11 14 17 21 25 30 35 37 36 33 32 30 23 10 -5 -18 -29 -39 -51 -65 -78 -91 -102 -112 -119 -126 -134 -142 -152 -163 -169 -169 -164 -159 -154 -142 -125 -106 -95 -90 -86 -83 -86 -95 -101 -99 -92 -86 -82 -81 -81 -83 -80 -60 -18 37 90 139 187 245 316 390 450 496 544 610 691 767 828 881 929 955 942 893 832 769 700 607 491 361 232 114 3 -111 -228 -332 -399 -430 -449 -481 -512 -511 -474 -426 -400 -401 -413 -424 -434 -443 -448 -453 -454 -445 -414 -369 -330 -311 -291 -245 -171 -93 -24 46 129 219 286 312 285 203 71 -88 -242 -364 -459 -525 -550 -513 -436 -362 -319 -294 -258 -208 -158 -105 -20 112 255 367 434 482 523 543 518 458 390 330 262 171 42 -128 -339 -581 -832 -1072 -1284 -1456 -1558 -1573 -1517 -1432 -1339 -1223 -1078 -939 -846 -775 -650 -435 -162 122 415 731 1039 1296 1509 1717 1909 2032 2083 2138 2246 2336 2311 2189 2069 1968 1790 1488 1147 868 622 331 15 -226 -378 -535 -749 -955 -1078 -1136 -1189 -1232 -1223 -1177 -1134 -1083 -968 -796 -669 -643 -655 -608 -495 -368 -248 -124 4 115 215 331 432 452 376 293 279 302 271 157 26 -74 -174 -336 -559 -776 -934 -1032 -1102 -1149 -1154 -1107 -1012 -888 -740 -576 -405 -234 -70 87 234 371 509 665 821 937 981 971 939 888 791 646 493 369 259 132 0 -88 -121 -140 -182 -231 -264 -287 -324 -366 -378 -354 -334 -336 -335 -286 -197 -105 -30 50 153 266 361 430 481 519 540 551 567 585 585 562 532 508 482 426 330 220 127 60 2 -58 -115 -158 -191 -230 -282 -335 -378 -408 -430 -446 -453 -439 -402 -347 -287 -233 -188 -145 -98 -48 -5 27 56 93 133 163 173 169 162 156 144 120 89 62 42 25 7 -12 -27 -41 -53 -65 -73 -79 -80 -78 -72 -64 -56 -47 -38 -26 -16 -9 -4 1 5 9 12 14 16 18 17 15 12 10 9 7 5 4 3 2 2 1 1 0 0 0 0 0 0 +0 0 0 0 8 16 24 32 40 56 72 88 112 136 168 200 240 280 296 288 264 256 240 184 80 -40 -144 -232 -312 -408 -520 -624 -728 -816 -896 -952 -1008 -1072 -1136 -1216 -1304 -1352 -1352 -1312 -1272 -1232 -1136 -1000 -848 -760 -720 -688 -664 -688 -760 -808 -792 -736 -688 -656 -648 -648 -664 -640 -480 -144 296 720 1112 1496 1960 2528 3120 3600 3968 4352 4880 5528 6136 6624 7048 7432 7640 7536 7144 6656 6152 5600 4856 3928 2888 1856 912 24 -888 -1824 -2656 -3192 -3440 -3592 -3848 -4096 -4088 -3792 -3408 -3200 -3208 -3304 -3392 -3472 -3544 -3584 -3624 -3632 -3560 -3312 -2952 -2640 -2488 -2328 -1960 -1368 -744 -192 368 1032 1752 2288 2496 2280 1624 568 -704 -1936 -2912 -3672 -4200 -4400 -4104 -3488 -2896 -2552 -2352 -2064 -1664 -1264 -840 -160 896 2040 2936 3472 3856 4184 4344 4144 3664 3120 2640 2096 1368 336 -1024 -2712 -4648 -6656 -8576 -10272 -11648 -12464 -12584 -12136 -11456 -10712 -9784 -8624 -7512 -6768 -6200 -5200 -3480 -1296 976 3320 5848 8312 10368 12072 13736 15272 16256 16664 17104 17968 18688 18488 17512 16552 15744 14320 11904 9176 6944 4976 2648 120 -1808 -3024 -4280 -5992 -7640 -8624 -9088 -9512 -9856 -9784 -9416 -9072 -8664 -7744 -6368 -5352 -5144 -5240 -4864 -3960 -2944 -1984 -992 32 920 1720 2648 3456 3616 3008 2344 2232 2416 2168 1256 208 -592 -1392 -2688 -4472 -6208 -7472 -8256 -8816 -9192 -9232 -8856 -8096 -7104 -5920 -4608 -3240 -1872 -560 696 1872 2968 4072 5320 6568 7496 7848 7768 7512 7104 6328 5168 3944 2952 2072 1056 0 -704 -968 -1120 -1456 -1848 -2112 -2296 -2592 -2928 -3024 -2832 -2672 -2688 -2680 -2288 -1576 -840 -240 400 1224 2128 2888 3440 3848 4152 4320 4408 4536 4680 4680 4496 4256 4064 3856 3408 2640 1760 1016 480 16 -464 -920 -1264 -1528 -1840 -2256 -2680 -3024 -3264 -3440 -3568 -3624 -3512 -3216 -2776 -2296 -1864 -1504 -1160 -784 -384 -40 216 448 744 1064 1304 1384 1352 1296 1248 1152 960 712 496 336 200 56 -96 -216 -328 -424 -520 -584 -632 -640 -624 -576 -512 -448 -376 -304 -208 -128 -72 -32 8 40 72 96 112 128 144 136 120 96 80 72 56 40 32 24 16 16 8 8 0 0 0 0 0 0 +3 +0 0 -1 -1 -1 -2 -4 -5 -7 -10 -12 -13 -15 -16 -17 -17 -17 -17 -17 -16 -12 -5 3 14 27 41 56 69 85 101 115 125 136 152 167 175 174 174 174 166 145 117 93 69 38 1 -29 -51 -75 -109 -144 -169 -185 -201 -216 -222 -221 -220 -218 -201 -171 -149 -148 -155 -149 -125 -96 -67 -35 1 33 64 102 138 148 127 101 99 110 102 61 10 -30 -73 -145 -247 -351 -433 -491 -538 -575 -592 -582 -546 -490 -419 -334 -241 -143 -44 55 153 247 348 465 588 687 737 746 738 714 651 544 425 326 233 121 0 -85 -120 -142 -188 -244 -286 -318 -367 -425 -448 -430 -415 -428 -436 -381 -268 -146 -43 73 230 408 566 691 792 875 933 976 1027 1086 1114 1097 1063 1041 1014 918 730 500 296 143 6 -146 -295 -419 -521 -644 -810 -991 -1151 -1276 -1382 -1479 -1544 -1543 -1453 -1293 -1102 -923 -766 -610 -424 -212 -21 132 281 479 712 904 995 1005 1003 1005 963 833 648 472 334 206 59 -98 -248 -391 -540 -689 -823 -928 -1001 -1028 -1003 -938 -865 -779 -657 -486 -312 -181 -84 26 160 288 391 502 635 759 806 781 740 721 703 649 569 504 476 477 484 481 462 431 396 355 282 160 13 -107 -174 -208 -246 -302 -355 -387 -395 -383 -355 -309 -239 -141 -33 50 87 120 204 334 428 423 365 364 444 519 503 421 365 359 331 227 91 4 -34 -87 -179 -267 -311 -349 -430 -535 -598 -606 -616 -665 -708 -681 -581 -466 -376 -290 -187 -76 14 71 108 141 184 244 308 348 345 314 282 254 202 125 51 9 -9 -31 -57 -66 -58 -58 -87 -128 -148 -136 -108 -86 -70 -55 -40 -31 -31 -39 -49 -53 -48 -31 -9 11 28 43 59 70 70 59 43 32 34 42 48 48 44 43 41 33 17 -2 -14 -18 -17 -20 -25 -31 -32 -28 -26 -27 -31 -31 -25 -17 -10 -5 1 10 17 19 18 18 20 23 24 23 22 21 21 19 16 12 9 6 4 2 0 0 -1 -1 -2 -2 -2 -2 -2 -1 -1 -1 0 +0 0 -16 -16 -16 -32 -64 -80 -112 -160 -192 -208 -240 -256 -272 -272 -272 -272 -272 -256 -192 -80 48 224 432 656 896 1104 1360 1616 1840 2000 2176 2432 2672 2800 2784 2784 2784 2656 2320 1872 1488 1104 608 16 -464 -816 -1200 -1744 -2304 -2704 -2960 -3216 -3456 -3552 -3536 -3520 -3488 -3216 -2736 -2384 -2368 -2480 -2384 -2000 -1536 -1072 -560 16 528 1024 1632 2208 2368 2032 1616 1584 1760 1632 976 160 -480 -1168 -2320 -3952 -5616 -6928 -7856 -8608 -9200 -9472 -9312 -8736 -7840 -6704 -5344 -3856 -2288 -704 880 2448 3952 5568 7440 9408 10992 11792 11936 11808 11424 10416 8704 6800 5216 3728 1936 0 -1360 -1920 -2272 -3008 -3904 -4576 -5088 -5872 -6800 -7168 -6880 -6640 -6848 -6976 -6096 -4288 -2336 -688 1168 3680 6528 9056 11056 12672 14000 14928 15616 16432 17376 17824 17552 17008 16656 16224 14688 11680 8000 4736 2288 96 -2336 -4720 -6704 -8336 -10304 -12960 -15856 -18416 -20416 -22112 -23664 -24704 -24688 -23248 -20688 -17632 -14768 -12256 -9760 -6784 -3392 -336 2112 4496 7664 11392 14464 15920 16080 16048 16080 15408 13328 10368 7552 5344 3296 944 -1568 -3968 -6256 -8640 -11024 -13168 -14848 -16016 -16448 -16048 -15008 -13840 -12464 -10512 -7776 -4992 -2896 -1344 416 2560 4608 6256 8032 10160 12144 12896 12496 11840 11536 11248 10384 9104 8064 7616 7632 7744 7696 7392 6896 6336 5680 4512 2560 208 -1712 -2784 -3328 -3936 -4832 -5680 -6192 -6320 -6128 -5680 -4944 -3824 -2256 -528 800 1392 1920 3264 5344 6848 6768 5840 5824 7104 8304 8048 6736 5840 5744 5296 3632 1456 64 -544 -1392 -2864 -4272 -4976 -5584 -6880 -8560 -9568 -9696 -9856 -10640 -11328 -10896 -9296 -7456 -6016 -4640 -2992 -1216 224 1136 1728 2256 2944 3904 4928 5568 5520 5024 4512 4064 3232 2000 816 144 -144 -496 -912 -1056 -928 -928 -1392 -2048 -2368 -2176 -1728 -1376 -1120 -880 -640 -496 -496 -624 -784 -848 -768 -496 -144 176 448 688 944 1120 1120 944 688 512 544 672 768 768 704 688 656 528 272 -32 -224 -288 -272 -320 -400 -496 -512 -448 -416 -432 -496 -496 -400 -272 -160 -80 16 160 272 304 288 288 320 368 384 368 352 336 336 304 256 192 144 96 64 32 0 0 -16 -16 -32 -32 -32 -32 -32 -16 -16 -16 0 +4 +0 -1 -1 -1 -2 -3 -4 -6 -7 -10 -12 -13 -13 -13 -13 -12 -11 -9 -5 -1 3 8 15 24 33 39 43 46 50 51 47 39 30 22 14 4 -8 -21 -35 -51 -68 -85 -100 -113 -121 -123 -120 -116 -109 -95 -73 -49 -30 -14 4 29 54 75 100 132 162 179 179 175 176 177 168 152 139 135 139 145 149 147 141 133 123 100 58 5 -42 -69 -85 -103 -130 -157 -175 -184 -183 -174 -155 -123 -74 -18 27 49 69 121 203 266 269 238 243 303 364 360 309 274 276 260 182 75 3 -30 -76 -162 -247 -294 -337 -425 -541 -619 -642 -667 -737 -803 -790 -689 -567 -467 -369 -243 -101 19 99 154 207 276 374 483 559 567 529 488 449 367 232 98 19 -18 -62 -119 -142 -128 -131 -202 -306 -362 -341 -279 -227 -189 -153 -114 -90 -94 -122 -157 -175 -161 -107 -30 43 108 173 241 297 308 267 200 157 169 217 259 267 257 258 257 215 114 -11 -99 -130 -133 -157 -212 -269 -290 -272 -259 -285 -339 -360 -311 -220 -138 -72 25 170 309 375 377 396 479 593 668 689 704 748 801 805 745 645 534 418 297 186 93 17 -64 -156 -259 -382 -529 -684 -811 -892 -942 -983 -998 -960 -866 -761 -691 -661 -636 -590 -531 -474 -410 -320 -210 -112 -54 -21 25 101 178 225 245 278 345 419 449 434 405 392 395 394 374 347 327 325 334 332 300 245 191 153 132 122 128 157 192 212 208 197 191 186 167 138 112 95 81 59 31 1 -28 -47 -57 -68 -97 -146 -197 -231 -242 -246 -258 -274 -288 -299 -315 -336 -348 -337 -305 -264 -226 -192 -161 -132 -106 -77 -42 -3 30 55 76 99 122 140 151 153 146 134 125 126 134 139 137 134 133 129 114 87 57 34 19 10 5 5 6 6 2 -4 -9 -13 -18 -23 -25 -24 -21 -19 -18 -18 -18 -17 -17 -17 -16 -14 -11 -7 -5 -3 -3 -2 -2 -3 -4 -5 -4 -3 -2 -1 -2 -2 -3 -3 -2 -2 -1 -1 -1 0 0 0 0 0 0 +0 -32 -32 -32 -64 -96 -128 -192 -224 -320 -384 -416 -416 -416 -416 -384 -352 -288 -160 -32 96 256 480 768 1056 1248 1376 1472 1600 1632 1504 1248 960 704 448 128 -256 -672 -1120 -1632 -2176 -2720 -3200 -3616 -3872 -3936 -3840 -3712 -3488 -3040 -2336 -1568 -960 -448 128 928 1728 2400 3200 4224 5184 5728 5728 5600 5632 5664 5376 4864 4448 4320 4448 4640 4768 4704 4512 4256 3936 3200 1856 160 -1344 -2208 -2720 -3296 -4160 -5024 -5600 -5888 -5856 -5568 -4960 -3936 -2368 -576 864 1568 2208 3872 6496 8512 8608 7616 7776 9696 11648 11520 9888 8768 8832 8320 5824 2400 96 -960 -2432 -5184 -7904 -9408 -10784 -13600 -17312 -19808 -20544 -21344 -23584 -25696 -25280 -22048 -18144 -14944 -11808 -7776 -3232 608 3168 4928 6624 8832 11968 15456 17888 18144 16928 15616 14368 11744 7424 3136 608 -576 -1984 -3808 -4544 -4096 -4192 -6464 -9792 -11584 -10912 -8928 -7264 -6048 -4896 -3648 -2880 -3008 -3904 -5024 -5600 -5152 -3424 -960 1376 3456 5536 7712 9504 9856 8544 6400 5024 5408 6944 8288 8544 8224 8256 8224 6880 3648 -352 -3168 -4160 -4256 -5024 -6784 -8608 -9280 -8704 -8288 -9120 -10848 -11520 -9952 -7040 -4416 -2304 800 5440 9888 12000 12064 12672 15328 18976 21376 22048 22528 23936 25632 25760 23840 20640 17088 13376 9504 5952 2976 544 -2048 -4992 -8288 -12224 -16928 -21888 -25952 -28544 -30144 -31456 -31936 -30720 -27712 -24352 -22112 -21152 -20352 -18880 -16992 -15168 -13120 -10240 -6720 -3584 -1728 -672 800 3232 5696 7200 7840 8896 11040 13408 14368 13888 12960 12544 12640 12608 11968 11104 10464 10400 10688 10624 9600 7840 6112 4896 4224 3904 4096 5024 6144 6784 6656 6304 6112 5952 5344 4416 3584 3040 2592 1888 992 32 -896 -1504 -1824 -2176 -3104 -4672 -6304 -7392 -7744 -7872 -8256 -8768 -9216 -9568 -10080 -10752 -11136 -10784 -9760 -8448 -7232 -6144 -5152 -4224 -3392 -2464 -1344 -96 960 1760 2432 3168 3904 4480 4832 4896 4672 4288 4000 4032 4288 4448 4384 4288 4256 4128 3648 2784 1824 1088 608 320 160 160 192 192 64 -128 -288 -416 -576 -736 -800 -768 -672 -608 -576 -576 -576 -544 -544 -544 -512 -448 -352 -224 -160 -96 -96 -64 -64 -96 -128 -160 -128 -96 -64 -32 -64 -64 -96 -96 -64 -64 -32 -32 -32 0 0 0 0 0 0 +5 +0 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 0 1 2 3 5 5 5 4 4 4 6 8 9 10 11 11 10 6 -1 -6 -9 -9 -12 -17 -22 -25 -25 -25 -28 -35 -39 -35 -26 -17 -10 3 23 44 56 58 64 80 103 120 129 136 150 166 173 165 147 126 102 75 48 25 4 -19 -46 -78 -119 -169 -225 -274 -310 -337 -361 -376 -372 -344 -311 -290 -284 -281 -267 -247 -226 -200 -160 -108 -59 -29 -12 14 58 106 137 152 177 225 280 307 304 290 287 296 303 294 279 269 274 288 292 271 226 181 148 130 123 133 166 208 235 236 228 227 226 208 176 146 126 110 83 45 1 -41 -72 -89 -110 -160 -246 -341 -408 -437 -456 -490 -535 -576 -612 -661 -723 -768 -764 -709 -629 -552 -482 -415 -351 -288 -216 -119 -9 92 174 247 329 417 495 550 574 564 533 514 535 586 627 642 646 662 666 610 484 330 203 117 64 35 34 48 48 20 -26 -72 -113 -164 -221 -255 -254 -231 -215 -218 -230 -238 -244 -252 -266 -277 -263 -214 -149 -97 -72 -61 -55 -61 -97 -148 -179 -165 -117 -67 -50 -79 -149 -221 -250 -228 -192 -171 -147 -83 26 134 190 202 199 209 235 271 314 357 397 437 472 483 455 398 349 333 345 354 347 326 296 255 187 86 -34 -140 -208 -247 -285 -332 -372 -386 -387 -402 -435 -468 -482 -484 -478 -459 -417 -361 -302 -248 -199 -162 -147 -149 -148 -120 -61 20 113 196 252 277 285 288 288 279 261 246 239 236 223 194 148 92 33 -22 -72 -120 -158 -175 -169 -152 -141 -143 -146 -140 -125 -108 -93 -76 -46 -1 48 85 103 106 100 84 58 34 26 39 58 68 68 64 60 48 27 2 -13 -16 -8 1 9 13 13 5 -11 -32 -51 -61 -59 -49 -33 -17 -4 5 8 9 8 7 6 8 14 23 29 29 25 20 15 10 5 1 -2 -2 -1 0 0 0 -1 -1 -3 -4 -4 -4 -3 -2 -1 -1 -1 0 0 0 -1 -1 0 0 0 0 +0 -32 -32 -32 -32 -32 -32 -32 -32 -32 -32 -32 0 32 64 96 160 160 160 128 128 128 192 256 288 320 352 352 320 192 -32 -192 -288 -288 -384 -544 -704 -800 -800 -800 -896 -1120 -1248 -1120 -832 -544 -320 96 736 1408 1792 1856 2048 2560 3296 3840 4128 4352 4800 5312 5536 5280 4704 4032 3264 2400 1536 800 128 -608 -1472 -2496 -3808 -5408 -7200 -8768 -9920 -10784 -11552 -12032 -11904 -11008 -9952 -9280 -9088 -8992 -8544 -7904 -7232 -6400 -5120 -3456 -1888 -928 -384 448 1856 3392 4384 4864 5664 7200 8960 9824 9728 9280 9184 9472 9696 9408 8928 8608 8768 9216 9344 8672 7232 5792 4736 4160 3936 4256 5312 6656 7520 7552 7296 7264 7232 6656 5632 4672 4032 3520 2656 1440 32 -1312 -2304 -2848 -3520 -5120 -7872 -10912 -13056 -13984 -14592 -15680 -17120 -18432 -19584 -21152 -23136 -24576 -24448 -22688 -20128 -17664 -15424 -13280 -11232 -9216 -6912 -3808 -288 2944 5568 7904 10528 13344 15840 17600 18368 18048 17056 16448 17120 18752 20064 20544 20672 21184 21312 19520 15488 10560 6496 3744 2048 1120 1088 1536 1536 640 -832 -2304 -3616 -5248 -7072 -8160 -8128 -7392 -6880 -6976 -7360 -7616 -7808 -8064 -8512 -8864 -8416 -6848 -4768 -3104 -2304 -1952 -1760 -1952 -3104 -4736 -5728 -5280 -3744 -2144 -1600 -2528 -4768 -7072 -8000 -7296 -6144 -5472 -4704 -2656 832 4288 6080 6464 6368 6688 7520 8672 10048 11424 12704 13984 15104 15456 14560 12736 11168 10656 11040 11328 11104 10432 9472 8160 5984 2752 -1088 -4480 -6656 -7904 -9120 -10624 -11904 -12352 -12384 -12864 -13920 -14976 -15424 -15488 -15296 -14688 -13344 -11552 -9664 -7936 -6368 -5184 -4704 -4768 -4736 -3840 -1952 640 3616 6272 8064 8864 9120 9216 9216 8928 8352 7872 7648 7552 7136 6208 4736 2944 1056 -704 -2304 -3840 -5056 -5600 -5408 -4864 -4512 -4576 -4672 -4480 -4000 -3456 -2976 -2432 -1472 -32 1536 2720 3296 3392 3200 2688 1856 1088 832 1248 1856 2176 2176 2048 1920 1536 864 64 -416 -512 -256 32 288 416 416 160 -352 -1024 -1632 -1952 -1888 -1568 -1056 -544 -128 160 256 288 256 224 192 256 448 736 928 928 800 640 480 320 160 32 -64 -64 -32 0 0 0 -32 -32 -96 -128 -128 -128 -96 -64 -32 -32 -32 0 0 0 -32 -32 0 0 0 0 +5 +0 -1 -1 -1 -1 0 0 0 1 2 3 4 5 6 7 7 9 11 13 15 16 19 21 20 18 13 8 5 3 1 1 2 3 1 -2 -6 -10 -14 -20 -24 -25 -24 -24 -25 -27 -30 -32 -34 -37 -40 -40 -34 -25 -17 -13 -11 -11 -12 -20 -31 -39 -37 -27 -16 -13 -20 -39 -60 -69 -65 -57 -52 -46 -27 8 45 66 72 73 78 91 108 128 149 170 192 213 224 216 194 174 171 181 190 191 184 171 151 114 54 -22 -92 -139 -169 -200 -238 -273 -290 -297 -316 -350 -385 -406 -417 -421 -414 -385 -341 -292 -245 -201 -168 -155 -162 -163 -136 -71 24 137 243 321 361 379 393 403 399 381 368 367 371 359 319 250 159 59 -39 -134 -228 -308 -349 -345 -319 -303 -315 -330 -326 -299 -265 -234 -194 -121 -2 134 245 306 323 313 271 195 117 92 141 217 265 272 264 254 213 122 13 -63 -76 -42 8 52 79 81 36 -67 -212 -353 -438 -442 -376 -268 -141 -28 48 83 92 88 80 79 110 197 324 427 459 421 354 288 216 121 23 -40 -46 -15 19 31 20 -2 -38 -104 -190 -250 -243 -176 -104 -69 -53 -19 30 57 40 -2 -22 4 62 129 194 258 310 324 276 183 88 35 33 50 59 53 46 36 0 -77 -185 -298 -397 -474 -519 -528 -505 -469 -446 -454 -497 -562 -627 -659 -638 -560 -446 -327 -226 -146 -89 -50 -22 0 13 16 28 75 160 247 295 299 296 307 317 298 259 243 267 300 306 282 252 229 203 164 119 79 49 28 19 17 5 -26 -70 -110 -137 -150 -154 -146 -127 -103 -87 -81 -77 -68 -57 -45 -33 -13 17 56 99 132 143 132 120 124 138 139 124 109 111 123 125 114 102 98 91 70 44 28 23 21 18 22 32 35 29 25 31 26 -18 -93 -150 -147 -87 -9 52 88 93 59 -11 -85 -119 -98 -43 14 51 65 55 23 -19 -51 -60 -49 -28 -8 5 9 3 -8 -16 -18 -14 -7 -1 2 2 1 0 -1 -1 -1 0 +0 -32 -32 -32 -32 0 0 0 32 64 96 128 160 192 224 224 288 352 416 480 512 608 672 640 576 416 256 160 96 32 32 64 96 32 -64 -192 -320 -448 -640 -768 -800 -768 -768 -800 -864 -960 -1024 -1088 -1184 -1280 -1280 -1088 -800 -544 -416 -352 -352 -384 -640 -992 -1248 -1184 -864 -512 -416 -640 -1248 -1920 -2208 -2080 -1824 -1664 -1472 -864 256 1440 2112 2304 2336 2496 2912 3456 4096 4768 5440 6144 6816 7168 6912 6208 5568 5472 5792 6080 6112 5888 5472 4832 3648 1728 -704 -2944 -4448 -5408 -6400 -7616 -8736 -9280 -9504 -10112 -11200 -12320 -12992 -13344 -13472 -13248 -12320 -10912 -9344 -7840 -6432 -5376 -4960 -5184 -5216 -4352 -2272 768 4384 7776 10272 11552 12128 12576 12896 12768 12192 11776 11744 11872 11488 10208 8000 5088 1888 -1248 -4288 -7296 -9856 -11168 -11040 -10208 -9696 -10080 -10560 -10432 -9568 -8480 -7488 -6208 -3872 -64 4288 7840 9792 10336 10016 8672 6240 3744 2944 4512 6944 8480 8704 8448 8128 6816 3904 416 -2016 -2432 -1344 256 1664 2528 2592 1152 -2144 -6784 -11296 -14016 -14144 -12032 -8576 -4512 -896 1536 2656 2944 2816 2560 2528 3520 6304 10368 13664 14688 13472 11328 9216 6912 3872 736 -1280 -1472 -480 608 992 640 -64 -1216 -3328 -6080 -8000 -7776 -5632 -3328 -2208 -1696 -608 960 1824 1280 -64 -704 128 1984 4128 6208 8256 9920 10368 8832 5856 2816 1120 1056 1600 1888 1696 1472 1152 0 -2464 -5920 -9536 -12704 -15168 -16608 -16896 -16160 -15008 -14272 -14528 -15904 -17984 -20064 -21088 -20416 -17920 -14272 -10464 -7232 -4672 -2848 -1600 -704 0 416 512 896 2400 5120 7904 9440 9568 9472 9824 10144 9536 8288 7776 8544 9600 9792 9024 8064 7328 6496 5248 3808 2528 1568 896 608 544 160 -832 -2240 -3520 -4384 -4800 -4928 -4672 -4064 -3296 -2784 -2592 -2464 -2176 -1824 -1440 -1056 -416 544 1792 3168 4224 4576 4224 3840 3968 4416 4448 3968 3488 3552 3936 4000 3648 3264 3136 2912 2240 1408 896 736 672 576 704 1024 1120 928 800 992 832 -576 -2976 -4800 -4704 -2784 -288 1664 2816 2976 1888 -352 -2720 -3808 -3136 -1376 448 1632 2080 1760 736 -608 -1632 -1920 -1568 -896 -256 160 288 96 -256 -512 -576 -448 -224 -32 64 64 32 0 -32 -32 -32 0 +5 +0 -1 0 0 0 0 0 0 0 0 0 1 2 3 3 4 4 4 2 0 -2 -3 -2 0 1 3 3 1 -4 -12 -20 -27 -29 -26 -20 -11 -3 4 7 8 8 8 8 12 23 39 54 61 58 51 43 33 19 3 -7 -9 -3 3 6 4 -1 -9 -24 -45 -61 -62 -46 -28 -19 -15 -6 9 17 12 -1 -8 1 22 47 73 100 123 132 116 78 39 16 15 24 28 26 23 18 0 -43 -105 -173 -236 -288 -323 -337 -330 -313 -305 -318 -356 -412 -470 -507 -502 -450 -367 -276 -195 -129 -81 -46 -21 0 13 17 29 80 173 274 335 347 351 374 394 379 337 324 364 419 437 412 378 351 318 264 196 133 84 50 35 31 10 -50 -139 -226 -287 -322 -340 -331 -294 -245 -212 -203 -198 -180 -154 -126 -94 -38 51 177 321 439 487 464 436 465 531 555 507 460 484 553 583 549 511 507 486 390 256 169 147 138 126 158 231 268 227 206 265 233 -161 -900 -1531 -1573 -975 -106 657 1162 1301 873 -170 -1398 -2093 -1836 -848 309 1198 1633 1485 681 -574 -1736 -2273 -2033 -1278 -385 331 604 258 -612 -1564 -2058 -1843 -1072 -121 658 1010 812 156 -611 -1040 -877 -205 646 1322 1594 1406 870 232 -218 -298 -13 467 905 1103 975 580 82 -300 -405 -209 142 420 454 238 -115 -462 -688 -707 -505 -184 74 138 24 -157 -301 -345 -259 -50 198 359 361 229 44 -106 -157 -87 64 224 335 366 290 118 -46 -58 99 266 287 182 58 -80 -295 -505 -473 -111 333 520 386 126 -119 -373 -600 -573 -138 500 919 898 566 153 -243 -579 -718 -518 -55 371 506 338 35 -264 -500 -618 -550 -298 21 256 343 312 213 70 -75 -149 -98 35 145 159 105 54 25 -16 -92 -164 -183 -149 -112 -105 -120 -131 -134 -142 -150 -123 -40 75 161 170 111 41 11 20 36 47 69 106 123 83 2 -59 -63 -35 -17 -15 -7 9 8 -23 -57 -64 -40 -12 2 3 3 3 0 -4 -5 -2 1 2 1 0 0 0 +0 -8 0 0 0 0 0 0 0 0 0 8 16 24 24 32 32 32 16 0 -16 -24 -16 0 8 24 24 8 -32 -96 -160 -216 -232 -208 -160 -88 -24 32 56 64 64 64 64 96 184 312 432 488 464 408 344 264 152 24 -56 -72 -24 24 48 32 -8 -72 -192 -360 -488 -496 -368 -224 -152 -120 -48 72 136 96 -8 -64 8 176 376 584 800 984 1056 928 624 312 128 120 192 224 208 184 144 0 -344 -840 -1384 -1888 -2304 -2584 -2696 -2640 -2504 -2440 -2544 -2848 -3296 -3760 -4056 -4016 -3600 -2936 -2208 -1560 -1032 -648 -368 -168 0 104 136 232 640 1384 2192 2680 2776 2808 2992 3152 3032 2696 2592 2912 3352 3496 3296 3024 2808 2544 2112 1568 1064 672 400 280 248 80 -400 -1112 -1808 -2296 -2576 -2720 -2648 -2352 -1960 -1696 -1624 -1584 -1440 -1232 -1008 -752 -304 408 1416 2568 3512 3896 3712 3488 3720 4248 4440 4056 3680 3872 4424 4664 4392 4088 4056 3888 3120 2048 1352 1176 1104 1008 1264 1848 2144 1816 1648 2120 1864 -1288 -7200 -12248 -12584 -7800 -848 5256 9296 10408 6984 -1360 -11184 -16744 -14688 -6784 2472 9584 13064 11880 5448 -4592 -13888 -18184 -16264 -10224 -3080 2648 4832 2064 -4896 -12512 -16464 -14744 -8576 -968 5264 8080 6496 1248 -4888 -8320 -7016 -1640 5168 10576 12752 11248 6960 1856 -1744 -2384 -104 3736 7240 8824 7800 4640 656 -2400 -3240 -1672 1136 3360 3632 1904 -920 -3696 -5504 -5656 -4040 -1472 592 1104 192 -1256 -2408 -2760 -2072 -400 1584 2872 2888 1832 352 -848 -1256 -696 512 1792 2680 2928 2320 944 -368 -464 792 2128 2296 1456 464 -640 -2360 -4040 -3784 -888 2664 4160 3088 1008 -952 -2984 -4800 -4584 -1104 4000 7352 7184 4528 1224 -1944 -4632 -5744 -4144 -440 2968 4048 2704 280 -2112 -4000 -4944 -4400 -2384 168 2048 2744 2496 1704 560 -600 -1192 -784 280 1160 1272 840 432 200 -128 -736 -1312 -1464 -1192 -896 -840 -960 -1048 -1072 -1136 -1200 -984 -320 600 1288 1360 888 328 88 160 288 376 552 848 984 664 16 -472 -504 -280 -136 -120 -56 72 64 -184 -456 -512 -320 -96 16 24 24 24 0 -32 -40 -16 8 16 8 0 0 0 +3 +0 -1 -1 -1 -1 0 0 1 2 2 3 3 4 6 7 7 7 9 11 13 14 14 16 16 14 10 7 6 6 6 8 13 17 15 14 20 18 -14 -80 -143 -154 -100 -12 73 136 159 111 -23 -195 -303 -276 -133 50 201 285 268 127 -112 -349 -472 -437 -284 -89 78 147 65 -159 -419 -568 -525 -315 -37 204 322 266 52 -213 -372 -322 -78 250 525 650 588 374 102 -99 -139 -6 228 453 565 512 312 45 -170 -235 -124 86 261 289 155 -77 -317 -482 -507 -370 -139 57 108 19 -130 -253 -298 -228 -45 183 339 349 226 45 -110 -166 -94 71 254 389 435 353 147 -58 -76 132 362 401 260 85 -120 -451 -793 -759 -183 562 898 682 228 -221 -710 -1169 -1144 -281 1052 1981 1985 1284 356 -578 -1417 -1805 -1337 -146 1013 1417 975 105 -803 -1564 -1990 -1822 -1017 73 928 1282 1204 846 289 -315 -647 -442 165 698 794 545 289 139 -90 -541 -1008 -1169 -992 -769 -754 -898 -1021 -1089 -1209 -1328 -1143 -390 769 1734 1922 1322 525 155 285 538 735 1144 1882 2331 1680 52 -1348 -1556 -927 -475 -446 -213 370 355 -1020 -2938 -3675 -2604 -851 191 371 387 450 106 -714 -1221 -676 648 1769 2094 1863 1585 1425 1250 994 752 610 572 608 667 613 297 -251 -760 -964 -887 -788 -812 -805 -577 -268 -209 -441 -575 -309 109 182 -124 -305 -15 465 655 557 589 935 1285 1291 975 589 257 -38 -251 -292 -168 14 158 169 -82 -575 -994 -984 -600 -278 -268 -362 -279 -102 -78 -164 -100 122 192 -40 -300 -255 50 322 436 479 488 373 131 -83 -161 -164 -159 -99 51 177 144 -22 -179 -265 -323 -350 -278 -115 21 52 41 82 154 184 171 177 210 207 147 88 89 127 142 111 63 33 29 33 16 -37 -105 -145 -148 -152 -181 -203 -180 -131 -115 -146 -166 -134 -76 -48 -59 -65 -41 -5 18 28 43 62 75 74 63 52 43 41 43 45 39 26 17 14 13 7 -4 -10 -6 1 6 4 -1 -4 -5 -5 -4 -3 -1 -1 -1 -1 -1 0 +0 -8 -8 -8 -8 0 0 8 16 16 24 24 32 48 56 56 56 72 88 104 112 112 128 128 112 80 56 48 48 48 64 104 136 120 112 160 144 -112 -640 -1144 -1232 -800 -96 584 1088 1272 888 -184 -1560 -2424 -2208 -1064 400 1608 2280 2144 1016 -896 -2792 -3776 -3496 -2272 -712 624 1176 520 -1272 -3352 -4544 -4200 -2520 -296 1632 2576 2128 416 -1704 -2976 -2576 -624 2000 4200 5200 4704 2992 816 -792 -1112 -48 1824 3624 4520 4096 2496 360 -1360 -1880 -992 688 2088 2312 1240 -616 -2536 -3856 -4056 -2960 -1112 456 864 152 -1040 -2024 -2384 -1824 -360 1464 2712 2792 1808 360 -880 -1328 -752 568 2032 3112 3480 2824 1176 -464 -608 1056 2896 3208 2080 680 -960 -3608 -6344 -6072 -1464 4496 7184 5456 1824 -1768 -5680 -9352 -9152 -2248 8416 15848 15880 10272 2848 -4624 -11336 -14440 -10696 -1168 8104 11336 7800 840 -6424 -12512 -15920 -14576 -8136 584 7424 10256 9632 6768 2312 -2520 -5176 -3536 1320 5584 6352 4360 2312 1112 -720 -4328 -8064 -9352 -7936 -6152 -6032 -7184 -8168 -8712 -9672 -10624 -9144 -3120 6152 13872 15376 10576 4200 1240 2280 4304 5880 9152 15056 18648 13440 416 -10784 -12448 -7416 -3800 -3568 -1704 2960 2840 -8160 -23504 -29400 -20832 -6808 1528 2968 3096 3600 848 -5712 -9768 -5408 5184 14152 16752 14904 12680 11400 10000 7952 6016 4880 4576 4864 5336 4904 2376 -2008 -6080 -7712 -7096 -6304 -6496 -6440 -4616 -2144 -1672 -3528 -4600 -2472 872 1456 -992 -2440 -120 3720 5240 4456 4712 7480 10280 10328 7800 4712 2056 -304 -2008 -2336 -1344 112 1264 1352 -656 -4600 -7952 -7872 -4800 -2224 -2144 -2896 -2232 -816 -624 -1312 -800 976 1536 -320 -2400 -2040 400 2576 3488 3832 3904 2984 1048 -664 -1288 -1312 -1272 -792 408 1416 1152 -176 -1432 -2120 -2584 -2800 -2224 -920 168 416 328 656 1232 1472 1368 1416 1680 1656 1176 704 712 1016 1136 888 504 264 232 264 128 -296 -840 -1160 -1184 -1216 -1448 -1624 -1440 -1048 -920 -1168 -1328 -1072 -608 -384 -472 -520 -328 -40 144 224 344 496 600 592 504 416 344 328 344 360 312 208 136 112 104 56 -32 -80 -48 8 48 32 -8 -32 -40 -40 -32 -24 -8 -8 -8 -8 -8 0 +3 +0 0 0 0 0 -2 -5 -8 -9 -7 0 7 12 14 11 4 -6 -13 -10 3 18 22 17 9 5 -4 -24 -47 -59 -53 -44 -46 -58 -70 -79 -92 -107 -97 -35 71 169 197 142 59 18 34 68 98 158 272 350 262 8 -227 -272 -168 -90 -87 -43 76 76 -227 -674 -870 -636 -215 49 99 107 128 31 -216 -380 -216 212 597 727 665 581 537 483 395 307 255 246 268 302 284 141 -123 -381 -495 -466 -425 -449 -455 -334 -160 -127 -274 -366 -202 73 124 -87 -219 -11 349 503 438 474 769 1083 1113 860 531 237 -35 -243 -289 -170 15 168 183 -91 -652 -1153 -1167 -729 -346 -341 -471 -372 -139 -108 -234 -146 183 295 -63 -482 -420 85 557 771 869 906 710 256 -165 -330 -344 -341 -219 115 414 345 -53 -449 -684 -857 -954 -779 -329 64 159 130 264 513 628 601 642 784 799 582 361 376 558 642 518 306 167 150 178 93 -210 -619 -889 -945 -1010 -1250 -1463 -1348 -1020 -939 -1237 -1476 -1240 -734 -489 -622 -727 -486 -61 238 404 638 982 1254 1311 1200 1048 945 961 1096 1219 1140 851 589 539 558 333 -163 -532 -394 140 531 411 -81 -555 -835 -1003 -1075 -897 -462 -118 -239 -761 -1238 -1347 -1158 -893 -633 -371 -169 -108 -113 -37 142 324 483 677 873 926 816 733 808 900 845 769 915 1198 1207 768 256 131 341 463 362 368 682 928 599 -195 -796 -829 -624 -668 -901 -883 -514 -208 -308 -612 -688 -461 -246 -280 -448 -482 -269 100 426 511 290 -77 -324 -337 -215 -44 226 558 654 264 -385 -731 -524 -119 1 -196 -363 -314 -215 -251 -351 -354 -288 -303 -416 -460 -321 -85 68 64 -25 -87 -61 39 149 200 167 92 52 88 167 213 179 100 48 54 77 77 76 115 168 155 63 -15 13 113 169 132 73 67 98 106 82 76 107 138 128 88 57 50 48 35 17 11 20 38 54 54 36 11 -4 -2 5 7 3 1 2 0 -5 -9 -8 -4 -4 -9 -13 -12 -9 -6 -5 -5 -5 -4 -3 -2 -1 0 +0 0 0 0 0 -32 -80 -128 -144 -112 0 112 192 224 176 64 -96 -208 -160 48 288 352 272 144 80 -64 -384 -752 -944 -848 -704 -736 -928 -1120 -1264 -1472 -1712 -1552 -560 1136 2704 3152 2272 944 288 544 1088 1568 2528 4352 5600 4192 128 -3632 -4352 -2688 -1440 -1392 -688 1216 1216 -3632 -10784 -13920 -10176 -3440 784 1584 1712 2048 496 -3456 -6080 -3456 3392 9552 11632 10640 9296 8592 7728 6320 4912 4080 3936 4288 4832 4544 2256 -1968 -6096 -7920 -7456 -6800 -7184 -7280 -5344 -2560 -2032 -4384 -5856 -3232 1168 1984 -1392 -3504 -176 5584 8048 7008 7584 12304 17328 17808 13760 8496 3792 -560 -3888 -4624 -2720 240 2688 2928 -1456 -10432 -18448 -18672 -11664 -5536 -5456 -7536 -5952 -2224 -1728 -3744 -2336 2928 4720 -1008 -7712 -6720 1360 8912 12336 13904 14496 11360 4096 -2640 -5280 -5504 -5456 -3504 1840 6624 5520 -848 -7184 -10944 -13712 -15264 -12464 -5264 1024 2544 2080 4224 8208 10048 9616 10272 12544 12784 9312 5776 6016 8928 10272 8288 4896 2672 2400 2848 1488 -3360 -9904 -14224 -15120 -16160 -20000 -23408 -21568 -16320 -15024 -19792 -23616 -19840 -11744 -7824 -9952 -11632 -7776 -976 3808 6464 10208 15712 20064 20976 19200 16768 15120 15376 17536 19504 18240 13616 9424 8624 8928 5328 -2608 -8512 -6304 2240 8496 6576 -1296 -8880 -13360 -16048 -17200 -14352 -7392 -1888 -3824 -12176 -19808 -21552 -18528 -14288 -10128 -5936 -2704 -1728 -1808 -592 2272 5184 7728 10832 13968 14816 13056 11728 12928 14400 13520 12304 14640 19168 19312 12288 4096 2096 5456 7408 5792 5888 10912 14848 9584 -3120 -12736 -13264 -9984 -10688 -14416 -14128 -8224 -3328 -4928 -9792 -11008 -7376 -3936 -4480 -7168 -7712 -4304 1600 6816 8176 4640 -1232 -5184 -5392 -3440 -704 3616 8928 10464 4224 -6160 -11696 -8384 -1904 16 -3136 -5808 -5024 -3440 -4016 -5616 -5664 -4608 -4848 -6656 -7360 -5136 -1360 1088 1024 -400 -1392 -976 624 2384 3200 2672 1472 832 1408 2672 3408 2864 1600 768 864 1232 1232 1216 1840 2688 2480 1008 -240 208 1808 2704 2112 1168 1072 1568 1696 1312 1216 1712 2208 2048 1408 912 800 768 560 272 176 320 608 864 864 576 176 -64 -32 80 112 48 16 32 0 -80 -144 -128 -64 -64 -144 -208 -192 -144 -96 -80 -80 -80 -64 -48 -32 -16 0 +4 +0 -1 -1 -1 0 0 0 0 2 3 4 5 7 9 7 5 6 10 13 12 8 4 4 6 3 -9 -27 -42 -47 -54 -71 -89 -87 -70 -68 -94 -118 -105 -66 -46 -61 -75 -53 -7 28 49 81 130 174 189 180 163 153 161 191 220 213 165 118 111 119 74 -38 -126 -97 35 138 110 -23 -158 -245 -303 -334 -287 -152 -40 -83 -272 -454 -508 -448 -355 -259 -156 -73 -48 -51 -17 67 158 241 347 459 498 450 414 468 534 514 478 583 781 806 526 179 93 250 348 278 289 549 764 505 -168 -702 -748 -577 -631 -871 -873 -520 -215 -326 -663 -762 -522 -285 -332 -544 -599 -342 131 567 697 405 -110 -474 -504 -329 -69 364 918 1103 455 -681 -1325 -973 -225 2 -392 -743 -659 -462 -554 -794 -822 -686 -742 -1045 -1188 -850 -231 192 185 -74 -264 -189 126 495 685 589 336 195 340 665 872 758 437 219 253 372 386 396 618 929 892 376 -90 88 757 1173 957 554 528 806 907 738 707 1045 1412 1373 992 684 633 639 490 261 172 336 684 1018 1098 793 273 -77 -52 164 241 133 53 85 30 -245 -517 -491 -266 -299 -806 -1427 -1640 -1395 -1146 -1285 -1727 -2133 -2358 -2524 -2727 -2865 -2830 -2744 -2858 -3273 -3784 -4057 -3905 -3440 -2952 -2676 -2617 -2595 -2405 -1970 -1311 -490 379 1134 1657 2008 2403 2970 3595 4037 4192 4199 4261 4429 4574 4563 4384 4126 3867 3622 3378 3119 2844 2544 2210 1848 1506 1264 1161 1118 958 556 -4 -474 -672 -649 -606 -679 -850 -1032 -1166 -1221 -1174 -1049 -916 -837 -814 -823 -850 -861 -799 -676 -622 -759 -1022 -1207 -1234 -1275 -1560 -2091 -2617 -2855 -2716 -2348 -2018 -1967 -2263 -2736 -3059 -2982 -2464 -1689 -930 -430 -280 -364 -410 -172 386 1061 1570 1760 1699 1606 1651 1845 2057 2161 2118 1952 1720 1482 1301 1188 1064 836 501 169 -37 -86 -49 -33 -114 -306 -541 -710 -740 -640 -476 -319 -213 -182 -216 -258 -243 -157 -47 31 70 92 111 124 130 136 143 144 132 113 96 77 57 41 31 27 22 17 12 7 1 -3 -5 -4 -1 0 0 0 0 +0 -4 -4 -4 0 0 0 0 8 12 16 20 28 36 28 20 24 40 52 48 32 16 16 24 12 -36 -108 -168 -188 -216 -284 -356 -348 -280 -272 -376 -472 -420 -264 -184 -244 -300 -212 -28 112 196 324 520 696 756 720 652 612 644 764 880 852 660 472 444 476 296 -152 -504 -388 140 552 440 -92 -632 -980 -1212 -1336 -1148 -608 -160 -332 -1088 -1816 -2032 -1792 -1420 -1036 -624 -292 -192 -204 -68 268 632 964 1388 1836 1992 1800 1656 1872 2136 2056 1912 2332 3124 3224 2104 716 372 1000 1392 1112 1156 2196 3056 2020 -672 -2808 -2992 -2308 -2524 -3484 -3492 -2080 -860 -1304 -2652 -3048 -2088 -1140 -1328 -2176 -2396 -1368 524 2268 2788 1620 -440 -1896 -2016 -1316 -276 1456 3672 4412 1820 -2724 -5300 -3892 -900 8 -1568 -2972 -2636 -1848 -2216 -3176 -3288 -2744 -2968 -4180 -4752 -3400 -924 768 740 -296 -1056 -756 504 1980 2740 2356 1344 780 1360 2660 3488 3032 1748 876 1012 1488 1544 1584 2472 3716 3568 1504 -360 352 3028 4692 3828 2216 2112 3224 3628 2952 2828 4180 5648 5492 3968 2736 2532 2556 1960 1044 688 1344 2736 4072 4392 3172 1092 -308 -208 656 964 532 212 340 120 -980 -2068 -1964 -1064 -1196 -3224 -5708 -6560 -5580 -4584 -5140 -6908 -8532 -9432 -10096 -10908 -11460 -11320 -10976 -11432 -13092 -15136 -16228 -15620 -13760 -11808 -10704 -10468 -10380 -9620 -7880 -5244 -1960 1516 4536 6628 8032 9612 11880 14380 16148 16768 16796 17044 17716 18296 18252 17536 16504 15468 14488 13512 12476 11376 10176 8840 7392 6024 5056 4644 4472 3832 2224 -16 -1896 -2688 -2596 -2424 -2716 -3400 -4128 -4664 -4884 -4696 -4196 -3664 -3348 -3256 -3292 -3400 -3444 -3196 -2704 -2488 -3036 -4088 -4828 -4936 -5100 -6240 -8364 -10468 -11420 -10864 -9392 -8072 -7868 -9052 -10944 -12236 -11928 -9856 -6756 -3720 -1720 -1120 -1456 -1640 -688 1544 4244 6280 7040 6796 6424 6604 7380 8228 8644 8472 7808 6880 5928 5204 4752 4256 3344 2004 676 -148 -344 -196 -132 -456 -1224 -2164 -2840 -2960 -2560 -1904 -1276 -852 -728 -864 -1032 -972 -628 -188 124 280 368 444 496 520 544 572 576 528 452 384 308 228 164 124 108 88 68 48 28 4 -12 -20 -16 -4 0 0 0 0 +2 +0 -1 0 0 -1 -1 -1 0 2 4 4 2 1 3 8 13 13 8 4 6 9 11 12 21 34 35 16 -5 4 40 66 57 35 35 57 68 58 59 92 131 134 101 73 71 75 60 33 22 46 98 153 171 128 46 -14 -10 30 46 26 11 18 6 -56 -123 -120 -67 -78 -216 -394 -467 -409 -346 -399 -552 -701 -797 -877 -974 -1051 -1067 -1062 -1136 -1336 -1585 -1744 -1723 -1557 -1370 -1274 -1278 -1299 -1233 -1036 -706 -271 214 657 983 1221 1495 1892 2346 2696 2867 2940 3055 3248 3434 3505 3446 3320 3183 3052 2911 2750 2566 2349 2087 1785 1489 1278 1201 1183 1038 616 -5 -550 -798 -789 -754 -863 -1106 -1375 -1590 -1703 -1676 -1534 -1372 -1283 -1277 -1322 -1398 -1450 -1380 -1195 -1126 -1408 -1944 -2354 -2465 -2611 -3276 -4506 -5784 -6470 -6319 -5605 -4944 -4948 -5846 -7256 -8341 -8352 -7095 -4997 -2831 -1346 -901 -1205 -1398 -604 1398 3962 6044 6981 6955 6784 7204 8310 9579 10407 10541 10062 9180 8194 7455 7055 6563 5358 3338 1172 -263 -644 -382 -263 -968 -2717 -5026 -6917 -7557 -6853 -5361 -3780 -2663 -2402 -3005 -3799 -3790 -2588 -819 600 1412 1985 2586 3116 3518 3973 4548 4984 5031 4787 4453 4010 3368 2709 2352 2303 2267 2050 1692 1181 343 -758 -1574 -1520 -642 346 684 139 -963 -2146 -3202 -4280 -5581 -6919 -7699 -7458 -6429 -5510 -5615 -7050 -9306 -11289 -11849 -10429 -7496 -4324 -2218 -1762 -2548 -3412 -3078 -1007 2084 4696 5779 5570 5147 5352 6228 7317 8162 8503 8314 7755 7052 6329 5592 4866 4171 3421 2542 1715 1199 826 6 -1514 -3124 -3928 -3784 -3437 -3670 -4550 -5512 -5831 -5053 -3309 -1451 -569 -1058 -2238 -2986 -2642 -1252 723 2605 3598 3281 2145 1299 1441 2262 2947 3024 2602 2020 1579 1453 1606 1735 1532 1032 572 377 360 347 304 273 235 184 207 369 545 527 278 -11 -124 -11 203 296 36 -649 -1508 -2064 -1985 -1403 -812 -675 -1093 -1804 -2393 -2538 -2189 -1540 -908 -531 -467 -599 -724 -670 -385 16 343 485 488 475 524 607 662 658 619 584 563 544 500 423 322 224 145 94 68 56 39 5 -41 -82 -98 -84 -57 -35 -26 -26 -28 -28 -21 -11 -5 -2 0 0 +0 -2 0 0 -2 -2 -2 0 4 8 8 4 2 6 16 26 26 16 8 12 18 22 24 42 68 70 32 -10 8 80 132 114 70 70 114 136 116 118 184 262 268 202 146 142 150 120 66 44 92 196 306 342 256 92 -28 -20 60 92 52 22 36 12 -112 -246 -240 -134 -156 -432 -788 -934 -818 -692 -798 -1104 -1402 -1594 -1754 -1948 -2102 -2134 -2124 -2272 -2672 -3170 -3488 -3446 -3114 -2740 -2548 -2556 -2598 -2466 -2072 -1412 -542 428 1314 1966 2442 2990 3784 4692 5392 5734 5880 6110 6496 6868 7010 6892 6640 6366 6104 5822 5500 5132 4698 4174 3570 2978 2556 2402 2366 2076 1232 -10 -1100 -1596 -1578 -1508 -1726 -2212 -2750 -3180 -3406 -3352 -3068 -2744 -2566 -2554 -2644 -2796 -2900 -2760 -2390 -2252 -2816 -3888 -4708 -4930 -5222 -6552 -9012 -11568 -12940 -12638 -11210 -9888 -9896 -11692 -14512 -16682 -16704 -14190 -9994 -5662 -2692 -1802 -2410 -2796 -1208 2796 7924 12088 13962 13910 13568 14408 16620 19158 20814 21082 20124 18360 16388 14910 14110 13126 10716 6676 2344 -526 -1288 -764 -526 -1936 -5434 -10052 -13834 -15114 -13706 -10722 -7560 -5326 -4804 -6010 -7598 -7580 -5176 -1638 1200 2824 3970 5172 6232 7036 7946 9096 9968 10062 9574 8906 8020 6736 5418 4704 4606 4534 4100 3384 2362 686 -1516 -3148 -3040 -1284 692 1368 278 -1926 -4292 -6404 -8560 -11162 -13838 -15398 -14916 -12858 -11020 -11230 -14100 -18612 -22578 -23698 -20858 -14992 -8648 -4436 -3524 -5096 -6824 -6156 -2014 4168 9392 11558 11140 10294 10704 12456 14634 16324 17006 16628 15510 14104 12658 11184 9732 8342 6842 5084 3430 2398 1652 12 -3028 -6248 -7856 -7568 -6874 -7340 -9100 -11024 -11662 -10106 -6618 -2902 -1138 -2116 -4476 -5972 -5284 -2504 1446 5210 7196 6562 4290 2598 2882 4524 5894 6048 5204 4040 3158 2906 3212 3470 3064 2064 1144 754 720 694 608 546 470 368 414 738 1090 1054 556 -22 -248 -22 406 592 72 -1298 -3016 -4128 -3970 -2806 -1624 -1350 -2186 -3608 -4786 -5076 -4378 -3080 -1816 -1062 -934 -1198 -1448 -1340 -770 32 686 970 976 950 1048 1214 1324 1316 1238 1168 1126 1088 1000 846 644 448 290 188 136 112 78 10 -82 -164 -196 -168 -114 -70 -52 -52 -56 -56 -42 -22 -10 -4 0 0 +1 +0 -3 -5 -6 -7 -6 -4 -4 -6 -9 -5 11 39 70 94 105 116 139 179 228 273 304 317 314 304 298 304 304 266 177 66 -16 -42 -26 -19 -74 -217 -424 -614 -705 -671 -550 -407 -300 -282 -369 -486 -505 -360 -119 90 220 322 435 544 636 744 882 1001 1045 1027 989 919 797 662 593 598 607 566 481 346 103 -236 -503 -500 -217 120 244 51 -363 -831 -1273 -1747 -2338 -2974 -3397 -3375 -2983 -2623 -2741 -3529 -4772 -5934 -6383 -5756 -4238 -2504 -1317 -1071 -1586 -2174 -2009 -673 1426 3288 4143 4085 3864 4112 4896 5887 6718 7165 7166 6837 6364 5843 5281 4701 4125 3460 2631 1816 1299 916 6 -1757 -3707 -4772 -4703 -4372 -4776 -6061 -7514 -8135 -7216 -4837 -2172 -871 -1660 -3596 -4912 -4452 -2162 1279 4720 6679 6243 4184 2596 2953 4754 6352 6685 5898 4700 3770 3561 4042 4485 4065 2816 1604 1087 1067 1058 952 879 782 628 729 1338 2037 2029 1102 -43 -521 -47 916 1379 176 -3225 -7769 -11012 -10973 -8030 -4819 -4157 -7004 -12007 -16542 -18281 -16419 -12046 -7403 -4518 -4147 -5565 -7059 -6835 -4119 182 4071 6074 6446 6641 7730 9495 10980 11618 11671 11757 12163 12605 12487 11394 9421 7093 5017 3592 2897 2612 2015 307 -2687 -6060 -8286 -8368 -6691 -4792 -4207 -5416 -7620 -9423 -9749 -8286 -5455 -2167 417 1367 612 -838 -1542 -664 1538 4117 6132 7042 6796 5789 4671 3937 3661 3636 3674 3654 3398 2745 1787 885 328 130 154 291 438 426 144 -299 -612 -517 99 1110 2087 2463 1981 1008 261 175 645 1301 1773 1627 389 -2016 -4844 -6810 -6999 -5572 -3722 -2937 -4087 -6838 -9777 -11316 -10702 -8380 -5511 -3240 -2236 -2480 -3261 -3495 -2448 -320 1948 3496 4174 4424 4722 5183 5597 5750 5669 5602 5719 5878 5727 5047 3963 2794 1788 1077 740 711 667 197 -806 -1971 -2745 -2845 -2393 -1768 -1350 -1356 -1729 -2161 -2308 -2037 -1477 -841 -296 48 125 -9 -144 -76 210 552 801 925 946 871 724 575 492 464 439 391 336 283 209 106 7 -55 -72 -51 -7 40 53 15 -45 -79 -59 1 71 119 125 89 38 9 16 44 64 64 49 24 -7 -39 -61 -68 -59 -41 -26 -18 -17 -16 -13 -10 -5 0 +0 -3 -5 -6 -7 -6 -4 -4 -6 -9 -5 11 39 70 94 105 116 139 179 228 273 304 317 314 304 298 304 304 266 177 66 -16 -42 -26 -19 -74 -217 -424 -614 -705 -671 -550 -407 -300 -282 -369 -486 -505 -360 -119 90 220 322 435 544 636 744 882 1001 1045 1027 989 919 797 662 593 598 607 566 481 346 103 -236 -503 -500 -217 120 244 51 -363 -831 -1273 -1747 -2338 -2974 -3397 -3375 -2983 -2623 -2741 -3529 -4772 -5934 -6383 -5756 -4238 -2504 -1317 -1071 -1586 -2174 -2009 -673 1426 3288 4143 4085 3864 4112 4896 5887 6718 7165 7166 6837 6364 5843 5281 4701 4125 3460 2631 1816 1299 916 6 -1757 -3707 -4772 -4703 -4372 -4776 -6061 -7514 -8135 -7216 -4837 -2172 -871 -1660 -3596 -4912 -4452 -2162 1279 4720 6679 6243 4184 2596 2953 4754 6352 6685 5898 4700 3770 3561 4042 4485 4065 2816 1604 1087 1067 1058 952 879 782 628 729 1338 2037 2029 1102 -43 -521 -47 916 1379 176 -3225 -7769 -11012 -10973 -8030 -4819 -4157 -7004 -12007 -16542 -18281 -16419 -12046 -7403 -4518 -4147 -5565 -7059 -6835 -4119 182 4071 6074 6446 6641 7730 9495 10980 11618 11671 11757 12163 12605 12487 11394 9421 7093 5017 3592 2897 2612 2015 307 -2687 -6060 -8286 -8368 -6691 -4792 -4207 -5416 -7620 -9423 -9749 -8286 -5455 -2167 417 1367 612 -838 -1542 -664 1538 4117 6132 7042 6796 5789 4671 3937 3661 3636 3674 3654 3398 2745 1787 885 328 130 154 291 438 426 144 -299 -612 -517 99 1110 2087 2463 1981 1008 261 175 645 1301 1773 1627 389 -2016 -4844 -6810 -6999 -5572 -3722 -2937 -4087 -6838 -9777 -11316 -10702 -8380 -5511 -3240 -2236 -2480 -3261 -3495 -2448 -320 1948 3496 4174 4424 4722 5183 5597 5750 5669 5602 5719 5878 5727 5047 3963 2794 1788 1077 740 711 667 197 -806 -1971 -2745 -2845 -2393 -1768 -1350 -1356 -1729 -2161 -2308 -2037 -1477 -841 -296 48 125 -9 -144 -76 210 552 801 925 946 871 724 575 492 464 439 391 336 283 209 106 7 -55 -72 -51 -7 40 53 15 -45 -79 -59 1 71 119 125 89 38 9 16 44 64 64 49 24 -7 -39 -61 -68 -59 -41 -26 -18 -17 -16 -13 -10 -5 0 +0 +0 0 0 0 1 2 2 3 3 3 5 11 20 23 14 -1 -9 -1 19 32 4 -94 -246 -378 -408 -322 -209 -193 -349 -638 -939 -1103 -1050 -817 -531 -343 -332 -469 -626 -638 -404 18 437 682 756 813 987 1265 1524 1680 1753 1834 1972 2122 2180 2060 1766 1376 1008 746 622 580 462 72 -657 -1528 -2154 -2242 -1848 -1364 -1234 -1633 -2367 -3011 -3202 -2800 -1895 -774 153 515 237 -333 -630 -279 661 1816 2774 3267 3234 2826 2337 2018 1924 1958 2028 2066 1968 1629 1086 551 209 85 103 199 306 305 105 -225 -470 -406 80 914 1759 2123 1747 909 241 165 623 1286 1793 1684 412 -2183 -5368 -7724 -8119 -6613 -4522 -3650 -5199 -8900 -13024 -15428 -14930 -11967 -8055 -4851 -3427 -3891 -5240 -5749 -4124 -552 3445 6334 7749 8417 9210 10358 11465 12082 12220 12383 12964 13678 13674 12367 9973 7219 4744 2939 2074 2047 1976 601 -2522 -6346 -9100 -9704 -8410 -6401 -5038 -5215 -6856 -8845 -9750 -8884 -6648 -3915 -1424 241 644 -48 -796 -432 1250 3407 5136 6159 6547 6280 5438 4506 4016 3961 3911 3638 3282 2895 2242 1205 86 -686 -938 -708 -90 630 881 273 -844 -1575 -1262 41 1777 3220 3671 2845 1331 349 712 2052 3301 3727 3229 1840 -561 -3823 -7150 -9413 -9865 -8655 -6882 -6165 -7731 -11430 -15628 -18229 -17997 -15032 -10483 -6008 -3210 -2830 -4048 -4841 -3424 391 5151 8948 10766 10937 10613 10780 11602 12587 13244 13472 13319 12658 11251 9170 6841 4673 2825 1323 275 -299 -753 -1758 -3746 -6363 -8538 -9241 -8280 -6454 -4938 -4466 -4997 -5928 -6509 -6122 -4530 -2028 590 2388 2782 1902 619 110 1094 3246 5388 6377 5884 4396 2673 1346 812 1124 1837 2176 1623 329 -1083 -2055 -2385 -2166 -1644 -1131 -854 -813 -847 -847 -823 -744 -462 126 903 1613 2062 2163 1920 1470 1131 1212 1690 2174 2240 1800 1073 328 -318 -876 -1390 -1873 -2273 -2471 -2412 -2237 -2247 -2619 -3204 -3641 -3626 -3123 -2315 -1475 -856 -600 -642 -718 -534 8 722 1286 1498 1401 1223 1176 1293 1450 1509 1424 1260 1085 922 754 579 411 266 145 48 -20 -65 -106 -163 -240 -306 -327 -290 -216 -146 -111 -109 -115 -105 -77 -43 -16 2 9 9 7 5 3 2 1 1 0 +0 0 0 0 1 2 2 3 3 3 5 11 20 23 14 -1 -9 -1 19 32 4 -94 -246 -378 -408 -322 -209 -193 -349 -638 -939 -1103 -1050 -817 -531 -343 -332 -469 -626 -638 -404 18 437 682 756 813 987 1265 1524 1680 1753 1834 1972 2122 2180 2060 1766 1376 1008 746 622 580 462 72 -657 -1528 -2154 -2242 -1848 -1364 -1234 -1633 -2367 -3011 -3202 -2800 -1895 -774 153 515 237 -333 -630 -279 661 1816 2774 3267 3234 2826 2337 2018 1924 1958 2028 2066 1968 1629 1086 551 209 85 103 199 306 305 105 -225 -470 -406 80 914 1759 2123 1747 909 241 165 623 1286 1793 1684 412 -2183 -5368 -7724 -8119 -6613 -4522 -3650 -5199 -8900 -13024 -15428 -14930 -11967 -8055 -4851 -3427 -3891 -5240 -5749 -4124 -552 3445 6334 7749 8417 9210 10358 11465 12082 12220 12383 12964 13678 13674 12367 9973 7219 4744 2939 2074 2047 1976 601 -2522 -6346 -9100 -9704 -8410 -6401 -5038 -5215 -6856 -8845 -9750 -8884 -6648 -3915 -1424 241 644 -48 -796 -432 1250 3407 5136 6159 6547 6280 5438 4506 4016 3961 3911 3638 3282 2895 2242 1205 86 -686 -938 -708 -90 630 881 273 -844 -1575 -1262 41 1777 3220 3671 2845 1331 349 712 2052 3301 3727 3229 1840 -561 -3823 -7150 -9413 -9865 -8655 -6882 -6165 -7731 -11430 -15628 -18229 -17997 -15032 -10483 -6008 -3210 -2830 -4048 -4841 -3424 391 5151 8948 10766 10937 10613 10780 11602 12587 13244 13472 13319 12658 11251 9170 6841 4673 2825 1323 275 -299 -753 -1758 -3746 -6363 -8538 -9241 -8280 -6454 -4938 -4466 -4997 -5928 -6509 -6122 -4530 -2028 590 2388 2782 1902 619 110 1094 3246 5388 6377 5884 4396 2673 1346 812 1124 1837 2176 1623 329 -1083 -2055 -2385 -2166 -1644 -1131 -854 -813 -847 -847 -823 -744 -462 126 903 1613 2062 2163 1920 1470 1131 1212 1690 2174 2240 1800 1073 328 -318 -876 -1390 -1873 -2273 -2471 -2412 -2237 -2247 -2619 -3204 -3641 -3626 -3123 -2315 -1475 -856 -600 -642 -718 -534 8 722 1286 1498 1401 1223 1176 1293 1450 1509 1424 1260 1085 922 754 579 411 266 145 48 -20 -65 -106 -163 -240 -306 -327 -290 -216 -146 -111 -109 -115 -105 -77 -43 -16 2 9 9 7 5 3 2 1 1 0 +0 +0 0 1 1 2 1 -8 -24 -43 -58 -61 -55 -51 -62 -93 -135 -168 -173 -144 -94 -38 6 20 -2 -30 -18 54 157 255 327 371 378 347 305 287 300 312 306 290 269 219 123 9 -78 -111 -87 -12 84 122 39 -127 -246 -205 6 310 582 688 552 267 72 152 455 756 882 788 464 -146 -1024 -1974 -2679 -2892 -2610 -2138 -1970 -2539 -3862 -5428 -6508 -6600 -5666 -4057 -2388 -1310 -1186 -1740 -2136 -1550 181 2451 4368 5387 5607 5578 5807 6403 7116 7670 7995 8097 7877 7170 5984 4569 3197 1978 948 202 -225 -578 -1382 -3014 -5238 -7195 -7966 -7300 -5824 -4559 -4218 -4829 -5862 -6582 -6335 -4796 -2197 654 2709 3227 2257 752 137 1392 4225 7177 8694 8208 6278 3908 2015 1244 1764 2951 3579 2735 569 -1915 -3722 -4427 -4121 -3206 -2259 -1749 -1709 -1824 -1870 -1864 -1731 -1101 310 2273 4170 5472 5899 5381 4234 3350 3690 5293 7002 7430 6142 3772 1189 -1185 -3369 -5513 -7667 -9601 -10774 -10858 -10417 -10815 -13026 -16508 -19425 -20043 -17882 -13742 -9089 -5484 -3990 -4436 -5169 -4002 67 5891 10962 13330 13040 11930 12013 13865 16364 17909 17828 16639 15151 13610 11794 9602 7262 5013 2922 1045 -460 -1606 -2826 -4750 -7569 -10577 -12448 -12166 -9992 -7508 -6419 -7137 -8505 -8920 -7647 -5072 -2113 405 1972 2536 2520 2492 2691 3039 3506 4204 5015 5481 5249 4358 2973 1152 -769 -1890 -1472 -23 878 91 -2020 -4051 -4815 -3983 -2011 109 1292 1110 231 -183 419 1659 2889 3801 4330 4444 4277 4220 4500 4822 4686 4040 3383 3163 3318 3381 2889 1713 180 -1115 -1860 -2440 -3591 -5509 -7443 -8392 -8075 -7158 -6613 -7010 -8278 -9800 -10679 -10175 -8182 -5390 -2865 -1402 -1073 -1227 -911 476 2698 4848 6090 6322 6121 6100 6395 6690 6656 6247 5667 5102 4543 3847 2928 1886 895 50 -632 -1109 -1340 -1440 -1702 -2319 -3109 -3589 -3406 -2648 -1796 -1322 -1335 -1562 -1601 -1255 -633 -10 425 673 845 1005 1121 1130 1015 830 663 590 637 761 837 731 390 -75 -438 -523 -342 -85 39 -46 -257 -443 -490 -388 -211 -65 -5 -19 -51 -55 -16 53 123 165 168 151 143 151 158 148 122 96 81 77 75 64 41 13 -8 -15 -12 -8 -9 -13 -14 -11 -8 -4 0 +0 0 1 1 2 1 -8 -24 -43 -58 -61 -55 -51 -62 -93 -135 -168 -173 -144 -94 -38 6 20 -2 -30 -18 54 157 255 327 371 378 347 305 287 300 312 306 290 269 219 123 9 -78 -111 -87 -12 84 122 39 -127 -246 -205 6 310 582 688 552 267 72 152 455 756 882 788 464 -146 -1024 -1974 -2679 -2892 -2610 -2138 -1970 -2539 -3862 -5428 -6508 -6600 -5666 -4057 -2388 -1310 -1186 -1740 -2136 -1550 181 2451 4368 5387 5607 5578 5807 6403 7116 7670 7995 8097 7877 7170 5984 4569 3197 1978 948 202 -225 -578 -1382 -3014 -5238 -7195 -7966 -7300 -5824 -4559 -4218 -4829 -5862 -6582 -6335 -4796 -2197 654 2709 3227 2257 752 137 1392 4225 7177 8694 8208 6278 3908 2015 1244 1764 2951 3579 2735 569 -1915 -3722 -4427 -4121 -3206 -2259 -1749 -1709 -1824 -1870 -1864 -1731 -1101 310 2273 4170 5472 5899 5381 4234 3350 3690 5293 7002 7430 6142 3772 1189 -1185 -3369 -5513 -7667 -9601 -10774 -10858 -10417 -10815 -13026 -16508 -19425 -20043 -17882 -13742 -9089 -5484 -3990 -4436 -5169 -4002 67 5891 10962 13330 13040 11930 12013 13865 16364 17909 17828 16639 15151 13610 11794 9602 7262 5013 2922 1045 -460 -1606 -2826 -4750 -7569 -10577 -12448 -12166 -9992 -7508 -6419 -7137 -8505 -8920 -7647 -5072 -2113 405 1972 2536 2520 2492 2691 3039 3506 4204 5015 5481 5249 4358 2973 1152 -769 -1890 -1472 -23 878 91 -2020 -4051 -4815 -3983 -2011 109 1292 1110 231 -183 419 1659 2889 3801 4330 4444 4277 4220 4500 4822 4686 4040 3383 3163 3318 3381 2889 1713 180 -1115 -1860 -2440 -3591 -5509 -7443 -8392 -8075 -7158 -6613 -7010 -8278 -9800 -10679 -10175 -8182 -5390 -2865 -1402 -1073 -1227 -911 476 2698 4848 6090 6322 6121 6100 6395 6690 6656 6247 5667 5102 4543 3847 2928 1886 895 50 -632 -1109 -1340 -1440 -1702 -2319 -3109 -3589 -3406 -2648 -1796 -1322 -1335 -1562 -1601 -1255 -633 -10 425 673 845 1005 1121 1130 1015 830 663 590 637 761 837 731 390 -75 -438 -523 -342 -85 39 -46 -257 -443 -490 -388 -211 -65 -5 -19 -51 -55 -16 53 123 165 168 151 143 151 158 148 122 96 81 77 75 64 41 13 -8 -15 -12 -8 -9 -13 -14 -11 -8 -4 0 +0 +0 1 2 3 4 7 15 25 34 36 27 10 -12 -40 -75 -117 -165 -210 -235 -248 -284 -377 -522 -666 -745 -717 -594 -422 -273 -212 -252 -312 -256 4 422 830 1064 1098 1057 1120 1357 1678 1924 2003 1952 1856 1739 1571 1332 1050 753 455 169 -78 -281 -512 -891 -1469 -2126 -2586 -2613 -2220 -1722 -1520 -1744 -2145 -2319 -2049 -1401 -602 118 594 787 805 818 909 1055 1251 1541 1890 2121 2086 1778 1245 495 -340 -855 -683 -11 428 45 -1036 -2130 -2594 -2199 -1137 63 766 674 143 -117 273 1108 1976 2662 3104 3260 3211 3242 3537 3880 3857 3404 2916 2788 2994 3121 2729 1655 178 -1127 -1925 -2583 -3890 -6105 -8442 -9736 -9582 -8695 -8219 -8917 -10775 -13053 -14559 -14194 -11686 -7879 -4289 -2148 -1683 -1972 -1499 802 4659 8576 11035 11737 11646 11898 12780 13705 13985 13465 12526 11566 10572 9185 7177 4747 2314 134 -1723 -3107 -3856 -4262 -5180 -7259 -10011 -11901 -11619 -9305 -6505 -4932 -5136 -6193 -6552 -5302 -2760 -42 1980 3243 4208 5178 5984 6250 5816 4931 4089 3781 4247 5263 6031 5486 3053 -606 -3728 -4648 -3180 -825 406 -483 -2894 -5252 -6131 -5112 -2944 -949 -71 -303 -895 -1033 -315 1157 2868 4123 4536 4422 4537 5232 6041 6235 5691 4985 4760 5122 5621 5502 4148 1630 -1024 -2496 -2444 -2039 -2995 -6011 -10156 -13600 -14852 -13662 -11201 -9488 -10062 -12770 -15771 -16887 -15094 -10986 -6122 -2157 -88 248 155 1207 4004 7682 10736 12270 12537 12427 12619 13105 13360 12869 11597 10082 8931 8228 7404 5810 3345 595 -1631 -2847 -3150 -3089 -3344 -4372 -6107 -7937 -8999 -8741 -7295 -5437 -4042 -3516 -3587 -3594 -2962 -1563 228 1793 2680 2877 2769 2766 2967 3159 3070 2599 1854 1093 604 521 633 461 -320 -1509 -2476 -2707 -2227 -1506 -1030 -980 -1237 -1531 -1585 -1245 -581 145 665 897 965 1059 1296 1683 2123 2464 2595 2535 2426 2354 2278 2090 1775 1451 1231 1109 978 742 378 -70 -525 -883 -1053 -1040 -988 -1097 -1470 -1996 -2412 -2482 -2162 -1661 -1273 -1187 -1365 -1601 -1666 -1457 -1031 -534 -115 138 221 202 183 249 407 593 708 709 635 551 502 471 428 362 299 254 222 185 133 73 23 -8 -22 -25 -22 -21 -23 -28 -31 -30 -24 -15 -7 -3 -2 -1 0 +0 1 2 3 4 7 15 25 34 36 27 10 -12 -40 -75 -117 -165 -210 -235 -248 -284 -377 -522 -666 -745 -717 -594 -422 -273 -212 -252 -312 -256 4 422 830 1064 1098 1057 1120 1357 1678 1924 2003 1952 1856 1739 1571 1332 1050 753 455 169 -78 -281 -512 -891 -1469 -2126 -2586 -2613 -2220 -1722 -1520 -1744 -2145 -2319 -2049 -1401 -602 118 594 787 805 818 909 1055 1251 1541 1890 2121 2086 1778 1245 495 -340 -855 -683 -11 428 45 -1036 -2130 -2594 -2199 -1137 63 766 674 143 -117 273 1108 1976 2662 3104 3260 3211 3242 3537 3880 3857 3404 2916 2788 2994 3121 2729 1655 178 -1127 -1925 -2583 -3890 -6105 -8442 -9736 -9582 -8695 -8219 -8917 -10775 -13053 -14559 -14194 -11686 -7879 -4289 -2148 -1683 -1972 -1499 802 4659 8576 11035 11737 11646 11898 12780 13705 13985 13465 12526 11566 10572 9185 7177 4747 2314 134 -1723 -3107 -3856 -4262 -5180 -7259 -10011 -11901 -11619 -9305 -6505 -4932 -5136 -6193 -6552 -5302 -2760 -42 1980 3243 4208 5178 5984 6250 5816 4931 4089 3781 4247 5263 6031 5486 3053 -606 -3728 -4648 -3180 -825 406 -483 -2894 -5252 -6131 -5112 -2944 -949 -71 -303 -895 -1033 -315 1157 2868 4123 4536 4422 4537 5232 6041 6235 5691 4985 4760 5122 5621 5502 4148 1630 -1024 -2496 -2444 -2039 -2995 -6011 -10156 -13600 -14852 -13662 -11201 -9488 -10062 -12770 -15771 -16887 -15094 -10986 -6122 -2157 -88 248 155 1207 4004 7682 10736 12270 12537 12427 12619 13105 13360 12869 11597 10082 8931 8228 7404 5810 3345 595 -1631 -2847 -3150 -3089 -3344 -4372 -6107 -7937 -8999 -8741 -7295 -5437 -4042 -3516 -3587 -3594 -2962 -1563 228 1793 2680 2877 2769 2766 2967 3159 3070 2599 1854 1093 604 521 633 461 -320 -1509 -2476 -2707 -2227 -1506 -1030 -980 -1237 -1531 -1585 -1245 -581 145 665 897 965 1059 1296 1683 2123 2464 2595 2535 2426 2354 2278 2090 1775 1451 1231 1109 978 742 378 -70 -525 -883 -1053 -1040 -988 -1097 -1470 -1996 -2412 -2482 -2162 -1661 -1273 -1187 -1365 -1601 -1666 -1457 -1031 -534 -115 138 221 202 183 249 407 593 708 709 635 551 502 471 428 362 299 254 222 185 133 73 23 -8 -22 -25 -22 -21 -23 -28 -31 -30 -24 -15 -7 -3 -2 -1 0 +0 +0 -1 -2 -4 -6 -11 -21 -37 -56 -70 -68 -56 -50 -61 -84 -100 -92 -54 -1 47 85 121 163 205 232 233 213 189 187 225 298 363 350 207 -44 -283 -372 -268 -74 37 -48 -297 -565 -689 -600 -361 -122 -10 -43 -130 -156 -50 187 483 720 820 828 880 1051 1254 1339 1264 1143 1126 1251 1417 1429 1111 450 -292 -732 -737 -634 -957 -1975 -3431 -4724 -5302 -5011 -4222 -3672 -3999 -5212 -6606 -7258 -6659 -4971 -2841 -1027 -43 124 79 634 2156 4239 6069 7106 7441 7554 7853 8351 8718 8595 7933 7059 6402 6035 5558 4464 2629 479 -1343 -2399 -2716 -2723 -3018 -4036 -5769 -7669 -8899 -8839 -7550 -5756 -4378 -3896 -4068 -4169 -3515 -1899 283 2281 3488 3832 3775 3859 4237 4618 4596 3982 2910 1756 994 879 1093 816 -580 -2801 -4710 -5279 -4450 -3085 -2165 -2111 -2733 -3469 -3688 -2973 -1423 367 1718 2380 2633 2968 3734 4983 6464 7714 8356 8407 8276 8276 8252 7801 6830 5756 5039 4688 4269 3341 1763 -334 -2611 -4545 -5616 -5749 -5656 -6514 -9058 -12787 -16058 -17159 -15573 -12463 -9959 -9682 -11629 -14237 -15493 -14197 -10529 -5720 -1293 1644 2772 2670 2562 3672 6380 9831 12495 13370 12774 11899 11624 11759 11524 10593 9463 8780 8472 7801 6179 3784 1342 -518 -1604 -2062 -2190 -2403 -3153 -4645 -6536 -8005 -8226 -6978 -4920 -3241 -2818 -3531 -4381 -4367 -3250 -1577 -82 817 1143 1205 1335 1610 1776 1545 982 483 333 482 766 1095 1261 864 -216 -1375 -1717 -1036 -46 435 299 -37 -246 -245 54 701 1461 1922 2010 2078 2420 2913 3282 3455 3495 3384 3111 2855 2779 2719 2336 1606 892 510 418 392 273 -52 -673 -1462 -2024 -2077 -1784 -1538 -1496 -1594 -1909 -2656 -3743 -4625 -4768 -4182 -3336 -2700 -2528 -2882 -3610 -4264 -4327 -3633 -2494 -1383 -586 -174 -95 -157 -40 483 1309 2083 2530 2643 2591 2527 2526 2581 2653 2696 2675 2578 2384 2093 1725 1336 969 660 437 296 167 -55 -403 -790 -1082 -1203 -1163 -1024 -879 -830 -897 -988 -975 -827 -621 -444 -319 -223 -149 -90 -21 74 167 210 190 146 122 135 178 224 242 205 127 51 16 22 40 50 46 33 14 -2 -8 -5 -2 -2 -4 -3 -1 1 0 0 0 0 0 0 0 +0 -1 -2 -4 -6 -11 -21 -37 -56 -70 -68 -56 -50 -61 -84 -100 -92 -54 -1 47 85 121 163 205 232 233 213 189 187 225 298 363 350 207 -44 -283 -372 -268 -74 37 -48 -297 -565 -689 -600 -361 -122 -10 -43 -130 -156 -50 187 483 720 820 828 880 1051 1254 1339 1264 1143 1126 1251 1417 1429 1111 450 -292 -732 -737 -634 -957 -1975 -3431 -4724 -5302 -5011 -4222 -3672 -3999 -5212 -6606 -7258 -6659 -4971 -2841 -1027 -43 124 79 634 2156 4239 6069 7106 7441 7554 7853 8351 8718 8595 7933 7059 6402 6035 5558 4464 2629 479 -1343 -2399 -2716 -2723 -3018 -4036 -5769 -7669 -8899 -8839 -7550 -5756 -4378 -3896 -4068 -4169 -3515 -1899 283 2281 3488 3832 3775 3859 4237 4618 4596 3982 2910 1756 994 879 1093 816 -580 -2801 -4710 -5279 -4450 -3085 -2165 -2111 -2733 -3469 -3688 -2973 -1423 367 1718 2380 2633 2968 3734 4983 6464 7714 8356 8407 8276 8276 8252 7801 6830 5756 5039 4688 4269 3341 1763 -334 -2611 -4545 -5616 -5749 -5656 -6514 -9058 -12787 -16058 -17159 -15573 -12463 -9959 -9682 -11629 -14237 -15493 -14197 -10529 -5720 -1293 1644 2772 2670 2562 3672 6380 9831 12495 13370 12774 11899 11624 11759 11524 10593 9463 8780 8472 7801 6179 3784 1342 -518 -1604 -2062 -2190 -2403 -3153 -4645 -6536 -8005 -8226 -6978 -4920 -3241 -2818 -3531 -4381 -4367 -3250 -1577 -82 817 1143 1205 1335 1610 1776 1545 982 483 333 482 766 1095 1261 864 -216 -1375 -1717 -1036 -46 435 299 -37 -246 -245 54 701 1461 1922 2010 2078 2420 2913 3282 3455 3495 3384 3111 2855 2779 2719 2336 1606 892 510 418 392 273 -52 -673 -1462 -2024 -2077 -1784 -1538 -1496 -1594 -1909 -2656 -3743 -4625 -4768 -4182 -3336 -2700 -2528 -2882 -3610 -4264 -4327 -3633 -2494 -1383 -586 -174 -95 -157 -40 483 1309 2083 2530 2643 2591 2527 2526 2581 2653 2696 2675 2578 2384 2093 1725 1336 969 660 437 296 167 -55 -403 -790 -1082 -1203 -1163 -1024 -879 -830 -897 -988 -975 -827 -621 -444 -319 -223 -149 -90 -21 74 167 210 190 146 122 135 178 224 242 205 127 51 16 22 40 50 46 33 14 -2 -8 -5 -2 -2 -4 -3 -1 1 0 0 0 0 0 0 0 +0 +0 0 1 2 6 13 22 30 39 49 59 69 77 80 77 76 80 82 72 41 -9 -76 -144 -193 -214 -227 -282 -420 -636 -853 -974 -940 -797 -676 -694 -882 -1138 -1306 -1259 -982 -560 -133 176 311 313 314 469 850 1364 1806 2008 1993 1929 1957 2053 2084 1985 1836 1764 1759 1675 1372 867 317 -127 -405 -536 -587 -664 -897 -1362 -1971 -2486 -2628 -2292 -1662 -1126 -1006 -1295 -1652 -1690 -1292 -644 -35 351 504 545 619 766 867 773 503 254 179 266 433 634 748 525 -134 -876 -1121 -692 -32 305 214 -28 -185 -188 42 564 1203 1620 1733 1832 2183 2689 3100 3338 3456 3422 3220 3023 3010 3013 2650 1864 1059 620 520 498 355 -69 -918 -2040 -2890 -3035 -2671 -2357 -2348 -2562 -3140 -4474 -6462 -8180 -8638 -7763 -6347 -5265 -5050 -5903 -7585 -9190 -9563 -8233 -5801 -3301 -1435 -437 -245 -416 -108 1355 3769 6168 7706 8275 8343 8382 8618 9071 9611 10065 10296 10223 9761 8841 7527 6016 4514 3181 2176 1526 892 -304 -2308 -4690 -6667 -7705 -7738 -7078 -6332 -6223 -7015 -8060 -8309 -7358 -5772 -4327 -3252 -2389 -1673 -1060 -255 988 2343 3102 2972 2421 2159 2560 3582 4844 5611 5133 3424 1497 533 782 1560 2120 2174 1714 832 -102 -554 -398 -119 -184 -428 -384 -2 304 301 173 133 115 82 268 863 1598 1951 1703 1152 800 979 1697 2624 3226 3133 2488 1809 1484 1503 1647 1768 1746 1403 645 -381 -1454 -2557 -3813 -5128 -6126 -6471 -6257 -5959 -6123 -7013 -8408 -9615 -9828 -8735 -6761 -4743 -3334 -2695 -2599 -2591 -2100 -704 1483 3707 5086 5306 4879 4655 5134 6161 7174 7697 7605 7055 6262 5378 4542 3874 3382 2896 2208 1279 272 -617 -1345 -1994 -2600 -3071 -3274 -3218 -3092 -3122 -3355 -3613 -3647 -3338 -2762 -2091 -1506 -1135 -1001 -984 -894 -607 -151 358 812 1137 1286 1277 1200 1171 1221 1281 1289 1249 1190 1103 959 759 558 408 326 296 277 222 98 -78 -239 -315 -277 -156 -33 21 -17 -95 -135 -92 15 137 230 269 254 211 183 198 242 274 272 241 206 174 138 95 51 14 -13 -32 -44 -52 -65 -91 -124 -147 -151 -134 -104 -77 -64 -62 -61 -54 -43 -30 -17 -8 -4 -2 -1 0 +0 0 2 4 12 26 44 60 78 98 118 138 154 160 154 152 160 164 144 82 -18 -152 -288 -386 -428 -454 -564 -840 -1272 -1706 -1948 -1880 -1594 -1352 -1388 -1764 -2276 -2612 -2518 -1964 -1120 -266 352 622 626 628 938 1700 2728 3612 4016 3986 3858 3914 4106 4168 3970 3672 3528 3518 3350 2744 1734 634 -254 -810 -1072 -1174 -1328 -1794 -2724 -3942 -4972 -5256 -4584 -3324 -2252 -2012 -2590 -3304 -3380 -2584 -1288 -70 702 1008 1090 1238 1532 1734 1546 1006 508 358 532 866 1268 1496 1050 -268 -1752 -2242 -1384 -64 610 428 -56 -370 -376 84 1128 2406 3240 3466 3664 4366 5378 6200 6676 6912 6844 6440 6046 6020 6026 5300 3728 2118 1240 1040 996 710 -138 -1836 -4080 -5780 -6070 -5342 -4714 -4696 -5124 -6280 -8948 -12924 -16360 -17276 -15526 -12694 -10530 -10100 -11806 -15170 -18380 -19126 -16466 -11602 -6602 -2870 -874 -490 -832 -216 2710 7538 12336 15412 16550 16686 16764 17236 18142 19222 20130 20592 20446 19522 17682 15054 12032 9028 6362 4352 3052 1784 -608 -4616 -9380 -13334 -15410 -15476 -14156 -12664 -12446 -14030 -16120 -16618 -14716 -11544 -8654 -6504 -4778 -3346 -2120 -510 1976 4686 6204 5944 4842 4318 5120 7164 9688 11222 10266 6848 2994 1066 1564 3120 4240 4348 3428 1664 -204 -1108 -796 -238 -368 -856 -768 -4 608 602 346 266 230 164 536 1726 3196 3902 3406 2304 1600 1958 3394 5248 6452 6266 4976 3618 2968 3006 3294 3536 3492 2806 1290 -762 -2908 -5114 -7626 -10256 -12252 -12942 -12514 -11918 -12246 -14026 -16816 -19230 -19656 -17470 -13522 -9486 -6668 -5390 -5198 -5182 -4200 -1408 2966 7414 10172 10612 9758 9310 10268 12322 14348 15394 15210 14110 12524 10756 9084 7748 6764 5792 4416 2558 544 -1234 -2690 -3988 -5200 -6142 -6548 -6436 -6184 -6244 -6710 -7226 -7294 -6676 -5524 -4182 -3012 -2270 -2002 -1968 -1788 -1214 -302 716 1624 2274 2572 2554 2400 2342 2442 2562 2578 2498 2380 2206 1918 1518 1116 816 652 592 554 444 196 -156 -478 -630 -554 -312 -66 42 -34 -190 -270 -184 30 274 460 538 508 422 366 396 484 548 544 482 412 348 276 190 102 28 -26 -64 -88 -104 -130 -182 -248 -294 -302 -268 -208 -154 -128 -124 -122 -108 -86 -60 -34 -16 -8 -4 -2 0 +1 +0 -1 0 3 8 16 23 30 39 51 65 81 100 120 137 148 151 146 130 107 83 62 48 30 -12 -93 -203 -309 -384 -411 -402 -382 -398 -476 -578 -630 -588 -487 -384 -304 -234 -172 -114 -29 116 287 396 396 336 312 384 559 785 944 896 619 280 103 157 324 455 483 393 197 -25 -140 -104 -32 -51 -122 -113 -1 94 96 57 44 39 29 98 325 618 775 695 482 344 432 768 1217 1535 1529 1245 927 780 809 909 999 1011 833 392 -237 -927 -1669 -2547 -3508 -4290 -4638 -4590 -4473 -4704 -5513 -6765 -7915 -8282 -7529 -5961 -4280 -3078 -2545 -2511 -2562 -2124 -728 1570 4015 5636 6019 5661 5524 6237 7657 9126 10019 10130 9619 8736 7680 6639 5800 5183 4546 3548 2104 459 -1065 -2379 -3612 -4826 -5842 -6384 -6430 -6333 -6560 -7231 -7986 -8266 -7767 -6593 -5123 -3788 -2932 -2655 -2682 -2502 -1747 -446 1091 2543 3661 4267 4356 4220 4244 4557 4929 5115 5112 5026 4813 4319 3537 2689 2033 1681 1579 1536 1274 584 -478 -1531 -2096 -1912 -1124 -242 164 -138 -810 -1197 -854 154 1408 2469 3038 3024 2650 2423 2763 3571 4297 4509 4265 3886 3504 2981 2207 1289 400 -370 -1003 -1493 -1954 -2729 -4194 -6342 -8545 -9897 -9883 -8816 -7681 -7457 -8463 -10167 -11540 -11677 -10302 -7904 -5429 -3778 -3292 -3581 -3748 -2941 -931 1683 3879 4946 4987 4754 5017 6004 7365 8535 9125 9082 8601 7955 7357 6914 6619 6351 5928 5230 4323 3373 2447 1455 331 -767 -1567 -2003 -2372 -3050 -4121 -5292 -6156 -6447 -6145 -5461 -4779 -4474 -4631 -4911 -4784 -3953 -2583 -1141 -59 501 665 742 1052 1712 2545 3209 3464 3321 2985 2673 2514 2544 2675 2728 2518 2012 1368 824 533 499 611 707 654 415 80 -198 -323 -302 -207 -110 -61 -75 -128 -166 -140 -50 67 172 245 271 248 195 146 128 149 192 228 218 144 30 -68 -114 -122 -126 -149 -197 -284 -429 -613 -760 -789 -682 -518 -423 -481 -683 -926 -1082 -1067 -901 -684 -535 -515 -586 -654 -641 -526 -352 -182 -65 -22 -35 -51 -16 86 213 308 338 317 282 260 254 254 252 243 220 188 153 120 92 70 54 41 28 15 5 0 -3 -4 -3 -3 -2 0 +0 -2 0 6 16 32 46 60 78 102 130 162 200 240 274 296 302 292 260 214 166 124 96 60 -24 -186 -406 -618 -768 -822 -804 -764 -796 -952 -1156 -1260 -1176 -974 -768 -608 -468 -344 -228 -58 232 574 792 792 672 624 768 1118 1570 1888 1792 1238 560 206 314 648 910 966 786 394 -50 -280 -208 -64 -102 -244 -226 -2 188 192 114 88 78 58 196 650 1236 1550 1390 964 688 864 1536 2434 3070 3058 2490 1854 1560 1618 1818 1998 2022 1666 784 -474 -1854 -3338 -5094 -7016 -8580 -9276 -9180 -8946 -9408 -11026 -13530 -15830 -16564 -15058 -11922 -8560 -6156 -5090 -5022 -5124 -4248 -1456 3140 8030 11272 12038 11322 11048 12474 15314 18252 20038 20260 19238 17472 15360 13278 11600 10366 9092 7096 4208 918 -2130 -4758 -7224 -9652 -11684 -12768 -12860 -12666 -13120 -14462 -15972 -16532 -15534 -13186 -10246 -7576 -5864 -5310 -5364 -5004 -3494 -892 2182 5086 7322 8534 8712 8440 8488 9114 9858 10230 10224 10052 9626 8638 7074 5378 4066 3362 3158 3072 2548 1168 -956 -3062 -4192 -3824 -2248 -484 328 -276 -1620 -2394 -1708 308 2816 4938 6076 6048 5300 4846 5526 7142 8594 9018 8530 7772 7008 5962 4414 2578 800 -740 -2006 -2986 -3908 -5458 -8388 -12684 -17090 -19794 -19766 -17632 -15362 -14914 -16926 -20334 -23080 -23354 -20604 -15808 -10858 -7556 -6584 -7162 -7496 -5882 -1862 3366 7758 9892 9974 9508 10034 12008 14730 17070 18250 18164 17202 15910 14714 13828 13238 12702 11856 10460 8646 6746 4894 2910 662 -1534 -3134 -4006 -4744 -6100 -8242 -10584 -12312 -12894 -12290 -10922 -9558 -8948 -9262 -9822 -9568 -7906 -5166 -2282 -118 1002 1330 1484 2104 3424 5090 6418 6928 6642 5970 5346 5028 5088 5350 5456 5036 4024 2736 1648 1066 998 1222 1414 1308 830 160 -396 -646 -604 -414 -220 -122 -150 -256 -332 -280 -100 134 344 490 542 496 390 292 256 298 384 456 436 288 60 -136 -228 -244 -252 -298 -394 -568 -858 -1226 -1520 -1578 -1364 -1036 -846 -962 -1366 -1852 -2164 -2134 -1802 -1368 -1070 -1030 -1172 -1308 -1282 -1052 -704 -364 -130 -44 -70 -102 -32 172 426 616 676 634 564 520 508 508 504 486 440 376 306 240 184 140 108 82 56 30 10 0 -6 -8 -6 -6 -4 0 +1 +0 -1 -2 -2 -1 2 7 13 19 25 30 35 45 57 68 77 86 93 93 84 70 58 53 54 57 51 25 -23 -77 -112 -109 -68 -16 11 -10 -62 -96 -72 13 131 241 311 325 297 284 338 456 572 625 616 583 546 483 371 225 72 -70 -195 -301 -406 -586 -932 -1454 -2023 -2418 -2492 -2292 -2058 -2059 -2408 -2981 -3480 -3627 -3291 -2596 -1834 -1312 -1175 -1314 -1413 -1139 -370 686 1624 2125 2200 2151 2327 2857 3595 4271 4678 4773 4633 4390 4159 4004 3928 3860 3689 3333 2821 2253 1674 1019 237 -563 -1176 -1539 -1864 -2454 -3393 -4460 -5306 -5684 -5545 -5042 -4514 -4324 -4579 -4967 -4951 -4185 -2798 -1265 -67 582 789 902 1307 2178 3313 4275 4722 4634 4264 3907 3764 3898 4198 4383 4143 3390 2363 1458 967 926 1162 1380 1308 850 169 -427 -714 -683 -480 -262 -148 -188 -331 -439 -380 -139 194 512 747 850 801 648 498 450 539 718 877 865 589 130 -293 -514 -567 -607 -742 -1013 -1515 -2368 -3506 -4513 -4863 -4367 -3448 -2921 -3463 -5124 -7245 -8821 -9088 -8017 -6364 -5216 -5251 -6274 -7376 -7597 -6576 -4645 -2532 -957 -342 -573 -894 -290 1729 4613 7143 8435 8550 8246 8247 8793 9701 10624 11247 11375 10954 10069 8944 7864 7051 6468 5809 4751 3264 1617 105 -1188 -2326 -3344 -4191 -4865 -5489 -6248 -7188 -8085 -8559 -8379 -7725 -7114 -6955 -7198 -7387 -7052 -6038 -4580 -3139 -2151 -1766 -1748 -1628 -1042 46 1397 2640 3439 3654 3484 3384 3701 4349 4917 5081 4840 4406 3974 3686 3607 3670 3651 3325 2666 1859 1139 643 397 309 186 -147 -677 -1192 -1490 -1560 -1522 -1472 -1421 -1374 -1341 -1281 -1106 -781 -383 -31 211 347 427 511 650 837 1010 1130 1208 1228 1073 638 21 -469 -581 -373 -171 -288 -790 -1503 -2159 -2530 -2527 -2230 -1858 -1665 -1790 -2172 -2567 -2688 -2372 -1729 -1079 -720 -707 -827 -793 -476 36 553 936 1152 1232 1234 1229 1296 1462 1668 1782 1714 1496 1259 1129 1105 1091 994 807 589 408 287 209 140 56 -50 -167 -272 -343 -372 -372 -361 -355 -365 -388 -410 -410 -380 -325 -265 -212 -172 -147 -135 -126 -110 -81 -42 -7 13 18 15 10 8 10 13 13 10 6 3 1 0 +0 -2 -4 -4 -2 4 14 26 38 50 60 70 90 114 136 154 172 186 186 168 140 116 106 108 114 102 50 -46 -154 -224 -218 -136 -32 22 -20 -124 -192 -144 26 262 482 622 650 594 568 676 912 1144 1250 1232 1166 1092 966 742 450 144 -140 -390 -602 -812 -1172 -1864 -2908 -4046 -4836 -4984 -4584 -4116 -4118 -4816 -5962 -6960 -7254 -6582 -5192 -3668 -2624 -2350 -2628 -2826 -2278 -740 1372 3248 4250 4400 4302 4654 5714 7190 8542 9356 9546 9266 8780 8318 8008 7856 7720 7378 6666 5642 4506 3348 2038 474 -1126 -2352 -3078 -3728 -4908 -6786 -8920 -10612 -11368 -11090 -10084 -9028 -8648 -9158 -9934 -9902 -8370 -5596 -2530 -134 1164 1578 1804 2614 4356 6626 8550 9444 9268 8528 7814 7528 7796 8396 8766 8286 6780 4726 2916 1934 1852 2324 2760 2616 1700 338 -854 -1428 -1366 -960 -524 -296 -376 -662 -878 -760 -278 388 1024 1494 1700 1602 1296 996 900 1078 1436 1754 1730 1178 260 -586 -1028 -1134 -1214 -1484 -2026 -3030 -4736 -7012 -9026 -9726 -8734 -6896 -5842 -6926 -10248 -14490 -17642 -18176 -16034 -12728 -10432 -10502 -12548 -14752 -15194 -13152 -9290 -5064 -1914 -684 -1146 -1788 -580 3458 9226 14286 16870 17100 16492 16494 17586 19402 21248 22494 22750 21908 20138 17888 15728 14102 12936 11618 9502 6528 3234 210 -2376 -4652 -6688 -8382 -9730 -10978 -12496 -14376 -16170 -17118 -16758 -15450 -14228 -13910 -14396 -14774 -14104 -12076 -9160 -6278 -4302 -3532 -3496 -3256 -2084 92 2794 5280 6878 7308 6968 6768 7402 8698 9834 10162 9680 8812 7948 7372 7214 7340 7302 6650 5332 3718 2278 1286 794 618 372 -294 -1354 -2384 -2980 -3120 -3044 -2944 -2842 -2748 -2682 -2562 -2212 -1562 -766 -62 422 694 854 1022 1300 1674 2020 2260 2416 2456 2146 1276 42 -938 -1162 -746 -342 -576 -1580 -3006 -4318 -5060 -5054 -4460 -3716 -3330 -3580 -4344 -5134 -5376 -4744 -3458 -2158 -1440 -1414 -1654 -1586 -952 72 1106 1872 2304 2464 2468 2458 2592 2924 3336 3564 3428 2992 2518 2258 2210 2182 1988 1614 1178 816 574 418 280 112 -100 -334 -544 -686 -744 -744 -722 -710 -730 -776 -820 -820 -760 -650 -530 -424 -344 -294 -270 -252 -220 -162 -84 -14 26 36 30 20 16 20 26 26 20 12 6 2 0 +1 +0 -1 -1 0 0 1 2 2 3 2 3 4 7 10 11 8 2 -6 -12 -14 -16 -22 -32 -52 -89 -141 -195 -226 -218 -184 -166 -209 -328 -492 -633 -689 -641 -537 -463 -490 -615 -757 -817 -739 -545 -311 -123 -46 -80 -130 -44 269 748 1202 1473 1546 1545 1600 1767 2015 2281 2498 2608 2593 2459 2255 2043 1889 1785 1653 1393 984 502 33 -391 -786 -1162 -1496 -1784 -2069 -2419 -2857 -3300 -3585 -3601 -3408 -3219 -3227 -3426 -3606 -3529 -3096 -2407 -1691 -1187 -999 -1012 -966 -634 28 890 1722 2297 2499 2439 2425 2714 3265 3778 3994 3894 3626 3349 3177 3180 3312 3371 3141 2576 1838 1151 666 421 335 206 -167 -785 -1414 -1811 -1938 -1936 -1915 -1892 -1873 -1870 -1829 -1616 -1169 -587 -48 339 571 719 882 1150 1517 1876 2151 2357 2455 2198 1340 46 -1036 -1317 -867 -407 -704 -1987 -3884 -5727 -6897 -7078 -6419 -5499 -5067 -5603 -6994 -8512 -9168 -8335 -6263 -4027 -2768 -2804 -3382 -3349 -2074 162 2576 4508 5735 6348 6585 6799 7422 8687 10287 11425 11414 10347 9077 8475 8649 8909 8479 7182 5483 3979 2940 2245 1581 671 -621 -2203 -3788 -5047 -5813 -6165 -6356 -6675 -7334 -8358 -9479 -10222 -10209 -9464 -8366 -7320 -6545 -6155 -6203 -6485 -6395 -5278 -3091 -567 1338 2161 2118 1807 1878 2793 4539 6474 7651 7568 6577 5582 5311 5897 6898 7581 7317 6019 4287 2996 2610 2862 3113 2884 2094 968 -119 -810 -983 -854 -791 -986 -1347 -1668 -1817 -1753 -1497 -1155 -879 -739 -646 -473 -201 80 294 429 509 567 653 781 864 769 454 24 -377 -690 -933 -1129 -1309 -1545 -1915 -2421 -2955 -3368 -3562 -3528 -3361 -3215 -3204 -3308 -3381 -3269 -2915 -2395 -1871 -1505 -1364 -1361 -1293 -989 -443 188 684 914 901 817 875 1172 1617 2001 2165 2101 1919 1759 1718 1812 1953 1993 1830 1498 1135 879 778 783 788 687 442 118 -152 -277 -268 -212 -191 -242 -358 -495 -581 -563 -455 -336 -283 -306 -353 -371 -339 -271 -195 -136 -109 -109 -116 -113 -100 -77 -47 -17 1 -2 -15 -18 -2 24 45 54 56 57 60 64 71 78 83 81 75 69 65 62 58 52 46 38 31 25 21 17 13 9 6 3 1 0 0 0 0 +0 -2 -2 0 0 2 4 4 6 4 6 8 14 20 22 16 4 -12 -24 -28 -32 -44 -64 -104 -178 -282 -390 -452 -436 -368 -332 -418 -656 -984 -1266 -1378 -1282 -1074 -926 -980 -1230 -1514 -1634 -1478 -1090 -622 -246 -92 -160 -260 -88 538 1496 2404 2946 3092 3090 3200 3534 4030 4562 4996 5216 5186 4918 4510 4086 3778 3570 3306 2786 1968 1004 66 -782 -1572 -2324 -2992 -3568 -4138 -4838 -5714 -6600 -7170 -7202 -6816 -6438 -6454 -6852 -7212 -7058 -6192 -4814 -3382 -2374 -1998 -2024 -1932 -1268 56 1780 3444 4594 4998 4878 4850 5428 6530 7556 7988 7788 7252 6698 6354 6360 6624 6742 6282 5152 3676 2302 1332 842 670 412 -334 -1570 -2828 -3622 -3876 -3872 -3830 -3784 -3746 -3740 -3658 -3232 -2338 -1174 -96 678 1142 1438 1764 2300 3034 3752 4302 4714 4910 4396 2680 92 -2072 -2634 -1734 -814 -1408 -3974 -7768 -11454 -13794 -14156 -12838 -10998 -10134 -11206 -13988 -17024 -18336 -16670 -12526 -8054 -5536 -5608 -6764 -6698 -4148 324 5152 9016 11470 12696 13170 13598 14844 17374 20574 22850 22828 20694 18154 16950 17298 17818 16958 14364 10966 7958 5880 4490 3162 1342 -1242 -4406 -7576 -10094 -11626 -12330 -12712 -13350 -14668 -16716 -18958 -20444 -20418 -18928 -16732 -14640 -13090 -12310 -12406 -12970 -12790 -10556 -6182 -1134 2676 4322 4236 3614 3756 5586 9078 12948 15302 15136 13154 11164 10622 11794 13796 15162 14634 12038 8574 5992 5220 5724 6226 5768 4188 1936 -238 -1620 -1966 -1708 -1582 -1972 -2694 -3336 -3634 -3506 -2994 -2310 -1758 -1478 -1292 -946 -402 160 588 858 1018 1134 1306 1562 1728 1538 908 48 -754 -1380 -1866 -2258 -2618 -3090 -3830 -4842 -5910 -6736 -7124 -7056 -6722 -6430 -6408 -6616 -6762 -6538 -5830 -4790 -3742 -3010 -2728 -2722 -2586 -1978 -886 376 1368 1828 1802 1634 1750 2344 3234 4002 4330 4202 3838 3518 3436 3624 3906 3986 3660 2996 2270 1758 1556 1566 1576 1374 884 236 -304 -554 -536 -424 -382 -484 -716 -990 -1162 -1126 -910 -672 -566 -612 -706 -742 -678 -542 -390 -272 -218 -218 -232 -226 -200 -154 -94 -34 2 -4 -30 -36 -4 48 90 108 112 114 120 128 142 156 166 162 150 138 130 124 116 104 92 76 62 50 42 34 26 18 12 6 2 0 0 0 0 +1 +0 -2 -4 -6 -8 -11 -17 -26 -40 -55 -61 -54 -41 -33 -38 -52 -58 -41 3 61 118 165 200 225 252 297 375 476 568 606 586 547 541 586 638 642 573 462 352 274 219 162 72 -70 -259 -465 -645 -775 -856 -920 -1003 -1145 -1356 -1597 -1785 -1847 -1774 -1624 -1472 -1360 -1322 -1378 -1487 -1514 -1290 -780 -148 358 596 602 529 566 867 1450 2126 2584 2628 2348 2047 2002 2282 2741 3094 3064 2587 1891 1355 1211 1362 1519 1443 1073 509 -64 -447 -556 -495 -469 -600 -838 -1063 -1186 -1171 -1025 -809 -630 -542 -485 -363 -158 64 242 362 438 500 589 721 816 743 449 25 -390 -731 -1011 -1252 -1485 -1792 -2273 -2941 -3672 -4285 -4636 -4699 -4582 -4485 -4576 -4835 -5062 -5009 -4574 -3848 -3078 -2535 -2355 -2407 -2342 -1836 -842 366 1368 1873 1893 1762 1934 2658 3763 4778 5307 5287 4959 4667 4685 5076 5625 5900 5573 4689 3657 2915 2657 2753 2855 2566 1702 471 -621 -1169 -1168 -952 -886 -1161 -1779 -2548 -3100 -3111 -2602 -1994 -1745 -1958 -2348 -2562 -2439 -2034 -1521 -1106 -927 -966 -1071 -1099 -1013 -818 -524 -192 17 -17 -200 -258 -22 408 796 1021 1138 1248 1394 1607 1920 2304 2639 2823 2876 2917 3034 3216 3386 3477 3450 3310 3118 2977 2935 2923 2811 2524 2088 1597 1137 752 440 154 -177 -588 -1050 -1506 -1910 -2235 -2466 -2620 -2749 -2919 -3138 -3328 -3392 -3303 -3151 -3076 -3146 -3279 -3291 -3073 -2707 -2406 -2323 -2416 -2511 -2471 -2275 -1973 -1634 -1345 -1187 -1167 -1180 -1086 -854 -575 -363 -267 -260 -278 -225 -14 351 734 968 1002 944 937 1041 1228 1431 1580 1620 1548 1442 1397 1432 1479 1462 1364 1205 1016 834 710 670 681 675 613 500 358 209 91 38 56 100 124 119 103 78 32 -33 -88 -100 -61 10 78 103 61 -31 -131 -193 -203 -175 -129 -86 -62 -67 -99 -139 -165 -157 -115 -55 -5 12 -4 -28 -34 -11 32 74 96 93 72 53 50 73 114 158 181 174 145 112 94 93 100 107 109 100 82 56 32 17 6 -6 -18 -28 -34 -36 -40 -44 -46 -48 -48 -48 -44 -38 -30 -24 -20 -19 -16 -13 -10 -7 -4 -3 -2 -1 -1 0 +0 -8 -16 -24 -32 -44 -68 -104 -160 -220 -244 -216 -164 -132 -152 -208 -232 -164 12 244 472 660 800 900 1008 1188 1500 1904 2272 2424 2344 2188 2164 2344 2552 2568 2292 1848 1408 1096 876 648 288 -280 -1036 -1860 -2580 -3100 -3424 -3680 -4012 -4580 -5424 -6388 -7140 -7388 -7096 -6496 -5888 -5440 -5288 -5512 -5948 -6056 -5160 -3120 -592 1432 2384 2408 2116 2264 3468 5800 8504 10336 10512 9392 8188 8008 9128 10964 12376 12256 10348 7564 5420 4844 5448 6076 5772 4292 2036 -256 -1788 -2224 -1980 -1876 -2400 -3352 -4252 -4744 -4684 -4100 -3236 -2520 -2168 -1940 -1452 -632 256 968 1448 1752 2000 2356 2884 3264 2972 1796 100 -1560 -2924 -4044 -5008 -5940 -7168 -9092 -11764 -14688 -17140 -18544 -18796 -18328 -17940 -18304 -19340 -20248 -20036 -18296 -15392 -12312 -10140 -9420 -9628 -9368 -7344 -3368 1464 5472 7492 7572 7048 7736 10632 15052 19112 21228 21148 19836 18668 18740 20304 22500 23600 22292 18756 14628 11660 10628 11012 11420 10264 6808 1884 -2484 -4676 -4672 -3808 -3544 -4644 -7116 -10192 -12400 -12444 -10408 -7976 -6980 -7832 -9392 -10248 -9756 -8136 -6084 -4424 -3708 -3864 -4284 -4396 -4052 -3272 -2096 -768 68 -68 -800 -1032 -88 1632 3184 4084 4552 4992 5576 6428 7680 9216 10556 11292 11504 11668 12136 12864 13544 13908 13800 13240 12472 11908 11740 11692 11244 10096 8352 6388 4548 3008 1760 616 -708 -2352 -4200 -6024 -7640 -8940 -9864 -10480 -10996 -11676 -12552 -13312 -13568 -13212 -12604 -12304 -12584 -13116 -13164 -12292 -10828 -9624 -9292 -9664 -10044 -9884 -9100 -7892 -6536 -5380 -4748 -4668 -4720 -4344 -3416 -2300 -1452 -1068 -1040 -1112 -900 -56 1404 2936 3872 4008 3776 3748 4164 4912 5724 6320 6480 6192 5768 5588 5728 5916 5848 5456 4820 4064 3336 2840 2680 2724 2700 2452 2000 1432 836 364 152 224 400 496 476 412 312 128 -132 -352 -400 -244 40 312 412 244 -124 -524 -772 -812 -700 -516 -344 -248 -268 -396 -556 -660 -628 -460 -220 -20 48 -16 -112 -136 -44 128 296 384 372 288 212 200 292 456 632 724 696 580 448 376 372 400 428 436 400 328 224 128 68 24 -24 -72 -112 -136 -144 -160 -176 -184 -192 -192 -192 -176 -152 -120 -96 -80 -76 -64 -52 -40 -28 -16 -12 -8 -4 -4 0 +2 +0 1 2 4 7 11 13 13 13 15 19 24 25 19 6 -10 -21 -23 -21 -22 -31 -52 -81 -107 -116 -105 -87 -81 -98 -125 -146 -148 -130 -104 -80 -71 -78 -91 -98 -95 -81 -54 -21 2 -2 -25 -33 -3 56 115 153 177 202 234 280 347 431 512 567 597 626 674 737 801 849 870 860 835 821 835 856 847 783 667 524 384 261 157 56 -67 -228 -417 -615 -800 -961 -1088 -1186 -1276 -1390 -1532 -1666 -1739 -1736 -1698 -1698 -1779 -1899 -1954 -1868 -1685 -1533 -1516 -1614 -1718 -1730 -1631 -1447 -1227 -1034 -933 -939 -971 -916 -736 -507 -328 -246 -246 -269 -222 -14 364 778 1049 1110 1071 1087 1236 1492 1778 2010 2109 2062 1966 1949 2045 2162 2189 2091 1892 1633 1373 1196 1158 1204 1223 1138 951 698 419 186 81 122 222 281 278 246 192 81 -84 -232 -271 -170 30 232 314 193 -100 -432 -657 -713 -632 -480 -329 -243 -272 -417 -606 -740 -731 -552 -271 -24 65 -18 -158 -202 -66 207 493 669 672 546 415 415 625 1022 1470 1771 1787 1556 1266 1118 1165 1328 1501 1608 1579 1360 993 618 345 137 -119 -446 -752 -964 -1139 -1364 -1647 -1924 -2184 -2468 -2740 -2868 -2762 -2514 -2331 -2341 -2505 -2673 -2696 -2498 -2123 -1754 -1583 -1641 -1762 -1736 -1490 -1099 -683 -357 -231 -348 -584 -696 -523 -130 269 513 560 444 227 9 -91 -22 153 326 422 400 244 10 -167 -144 79 344 469 404 235 90 46 112 251 396 487 498 452 390 338 308 326 424 609 830 1011 1113 1145 1144 1137 1149 1219 1364 1526 1591 1498 1312 1175 1176 1265 1320 1256 1084 867 663 495 365 267 194 122 26 -102 -235 -331 -365 -355 -347 -370 -417 -456 -456 -415 -351 -290 -246 -219 -199 -172 -124 -53 28 83 92 61 27 28 70 130 173 171 125 52 -13 -47 -49 -38 -39 -68 -123 -187 -237 -254 -244 -224 -211 -206 -202 -197 -192 -184 -166 -136 -107 -91 -86 -82 -67 -43 -20 -8 -8 -14 -17 -13 -7 -4 -5 -8 -11 -13 -15 -16 -14 -12 -11 -12 -13 -13 -11 -8 -6 -4 -4 -4 -3 -2 -1 -1 -1 0 +0 8 16 32 56 88 104 104 104 120 152 192 200 152 48 -80 -168 -184 -168 -176 -248 -416 -648 -856 -928 -840 -696 -648 -784 -1000 -1168 -1184 -1040 -832 -640 -568 -624 -728 -784 -760 -648 -432 -168 16 -16 -200 -264 -24 448 920 1224 1416 1616 1872 2240 2776 3448 4096 4536 4776 5008 5392 5896 6408 6792 6960 6880 6680 6568 6680 6848 6776 6264 5336 4192 3072 2088 1256 448 -536 -1824 -3336 -4920 -6400 -7688 -8704 -9488 -10208 -11120 -12256 -13328 -13912 -13888 -13584 -13584 -14232 -15192 -15632 -14944 -13480 -12264 -12128 -12912 -13744 -13840 -13048 -11576 -9816 -8272 -7464 -7512 -7768 -7328 -5888 -4056 -2624 -1968 -1968 -2152 -1776 -112 2912 6224 8392 8880 8568 8696 9888 11936 14224 16080 16872 16496 15728 15592 16360 17296 17512 16728 15136 13064 10984 9568 9264 9632 9784 9104 7608 5584 3352 1488 648 976 1776 2248 2224 1968 1536 648 -672 -1856 -2168 -1360 240 1856 2512 1544 -800 -3456 -5256 -5704 -5056 -3840 -2632 -1944 -2176 -3336 -4848 -5920 -5848 -4416 -2168 -192 520 -144 -1264 -1616 -528 1656 3944 5352 5376 4368 3320 3320 5000 8176 11760 14168 14296 12448 10128 8944 9320 10624 12008 12864 12632 10880 7944 4944 2760 1096 -952 -3568 -6016 -7712 -9112 -10912 -13176 -15392 -17472 -19744 -21920 -22944 -22096 -20112 -18648 -18728 -20040 -21384 -21568 -19984 -16984 -14032 -12664 -13128 -14096 -13888 -11920 -8792 -5464 -2856 -1848 -2784 -4672 -5568 -4184 -1040 2152 4104 4480 3552 1816 72 -728 -176 1224 2608 3376 3200 1952 80 -1336 -1152 632 2752 3752 3232 1880 720 368 896 2008 3168 3896 3984 3616 3120 2704 2464 2608 3392 4872 6640 8088 8904 9160 9152 9096 9192 9752 10912 12208 12728 11984 10496 9400 9408 10120 10560 10048 8672 6936 5304 3960 2920 2136 1552 976 208 -816 -1880 -2648 -2920 -2840 -2776 -2960 -3336 -3648 -3648 -3320 -2808 -2320 -1968 -1752 -1592 -1376 -992 -424 224 664 736 488 216 224 560 1040 1384 1368 1000 416 -104 -376 -392 -304 -312 -544 -984 -1496 -1896 -2032 -1952 -1792 -1688 -1648 -1616 -1576 -1536 -1472 -1328 -1088 -856 -728 -688 -656 -536 -344 -160 -64 -64 -112 -136 -104 -56 -32 -40 -64 -88 -104 -120 -128 -112 -96 -88 -96 -104 -104 -88 -64 -48 -32 -32 -32 -24 -16 -8 -8 -8 0 +3 +0 -1 -1 0 0 0 0 -1 -3 -4 -6 -6 -5 -4 -4 -5 -8 -12 -16 -18 -15 -8 -1 2 -1 -7 -9 -4 10 26 37 40 34 28 29 47 81 123 157 166 152 129 120 130 155 183 205 210 188 143 92 53 22 -20 -78 -136 -181 -221 -275 -343 -414 -485 -566 -649 -701 -697 -654 -625 -647 -713 -784 -813 -776 -678 -577 -535 -570 -629 -637 -562 -426 -272 -146 -97 -150 -258 -315 -243 -62 131 256 287 233 122 5 -52 -13 90 198 262 254 159 6 -114 -101 57 252 352 310 184 72 38 95 216 349 440 459 427 377 334 311 337 449 660 920 1147 1291 1359 1390 1413 1462 1587 1817 2080 2220 2140 1918 1760 1802 1985 2121 2066 1827 1497 1174 897 677 508 379 245 54 -213 -507 -732 -826 -825 -827 -906 -1050 -1178 -1209 -1131 -983 -834 -727 -665 -621 -552 -410 -178 98 304 345 235 109 116 299 570 779 800 603 262 -65 -249 -269 -215 -232 -417 -785 -1245 -1636 -1830 -1827 -1752 -1722 -1750 -1790 -1826 -1867 -1877 -1770 -1525 -1265 -1130 -1133 -1135 -983 -667 -324 -130 -146 -275 -350 -292 -164 -93 -131 -236 -358 -492 -629 -713 -710 -672 -707 -863 -1078 -1229 -1217 -1051 -846 -763 -870 -1066 -1151 -1009 -700 -392 -217 -192 -231 -212 -57 217 514 757 944 1110 1241 1292 1276 1286 1413 1627 1795 1797 1638 1447 1368 1447 1607 1732 1756 1681 1543 1386 1285 1290 1378 1431 1342 1119 879 744 744 812 840 748 529 261 50 -38 -7 89 170 159 31 -141 -236 -208 -134 -130 -221 -330 -374 -343 -281 -234 -223 -245 -280 -300 -292 -259 -213 -176 -173 -224 -304 -356 -351 -316 -307 -346 -418 -496 -558 -581 -553 -498 -463 -466 -483 -479 -444 -393 -328 -253 -183 -135 -106 -78 -43 -5 33 73 111 129 118 94 81 84 96 107 112 102 68 17 -33 -69 -88 -94 -96 -100 -116 -143 -167 -175 -165 -149 -136 -130 -129 -130 -128 -118 -101 -83 -68 -58 -51 -47 -44 -37 -25 -12 -3 0 2 5 8 12 15 16 16 14 12 11 10 10 9 7 6 5 3 2 1 1 0 0 +0 -8 -8 0 0 0 0 -8 -24 -32 -48 -48 -40 -32 -32 -40 -64 -96 -128 -144 -120 -64 -8 16 -8 -56 -72 -32 80 208 296 320 272 224 232 376 648 984 1256 1328 1216 1032 960 1040 1240 1464 1640 1680 1504 1144 736 424 176 -160 -624 -1088 -1448 -1768 -2200 -2744 -3312 -3880 -4528 -5192 -5608 -5576 -5232 -5000 -5176 -5704 -6272 -6504 -6208 -5424 -4616 -4280 -4560 -5032 -5096 -4496 -3408 -2176 -1168 -776 -1200 -2064 -2520 -1944 -496 1048 2048 2296 1864 976 40 -416 -104 720 1584 2096 2032 1272 48 -912 -808 456 2016 2816 2480 1472 576 304 760 1728 2792 3520 3672 3416 3016 2672 2488 2696 3592 5280 7360 9176 10328 10872 11120 11304 11696 12696 14536 16640 17760 17120 15344 14080 14416 15880 16968 16528 14616 11976 9392 7176 5416 4064 3032 1960 432 -1704 -4056 -5856 -6608 -6600 -6616 -7248 -8400 -9424 -9672 -9048 -7864 -6672 -5816 -5320 -4968 -4416 -3280 -1424 784 2432 2760 1880 872 928 2392 4560 6232 6400 4824 2096 -520 -1992 -2152 -1720 -1856 -3336 -6280 -9960 -13088 -14640 -14616 -14016 -13776 -14000 -14320 -14608 -14936 -15016 -14160 -12200 -10120 -9040 -9064 -9080 -7864 -5336 -2592 -1040 -1168 -2200 -2800 -2336 -1312 -744 -1048 -1888 -2864 -3936 -5032 -5704 -5680 -5376 -5656 -6904 -8624 -9832 -9736 -8408 -6768 -6104 -6960 -8528 -9208 -8072 -5600 -3136 -1736 -1536 -1848 -1696 -456 1736 4112 6056 7552 8880 9928 10336 10208 10288 11304 13016 14360 14376 13104 11576 10944 11576 12856 13856 14048 13448 12344 11088 10280 10320 11024 11448 10736 8952 7032 5952 5952 6496 6720 5984 4232 2088 400 -304 -56 712 1360 1272 248 -1128 -1888 -1664 -1072 -1040 -1768 -2640 -2992 -2744 -2248 -1872 -1784 -1960 -2240 -2400 -2336 -2072 -1704 -1408 -1384 -1792 -2432 -2848 -2808 -2528 -2456 -2768 -3344 -3968 -4464 -4648 -4424 -3984 -3704 -3728 -3864 -3832 -3552 -3144 -2624 -2024 -1464 -1080 -848 -624 -344 -40 264 584 888 1032 944 752 648 672 768 856 896 816 544 136 -264 -552 -704 -752 -768 -800 -928 -1144 -1336 -1400 -1320 -1192 -1088 -1040 -1032 -1040 -1024 -944 -808 -664 -544 -464 -408 -376 -352 -296 -200 -96 -24 0 16 40 64 96 120 128 128 112 96 88 80 80 72 56 48 40 24 16 8 8 0 0 +3 +0 -1 -1 -1 -1 -2 -2 -3 -2 -2 0 2 3 2 1 1 5 11 16 19 15 7 -3 -9 -10 -9 -10 -20 -40 -67 -93 -111 -117 -119 -124 -133 -143 -154 -166 -176 -174 -157 -136 -127 -133 -140 -126 -89 -45 -19 -22 -43 -57 -50 -29 -17 -25 -46 -72 -103 -135 -159 -163 -159 -173 -218 -281 -330 -336 -299 -248 -230 -271 -341 -378 -341 -243 -140 -80 -73 -90 -85 -23 91 221 334 427 515 590 631 638 659 742 876 991 1016 949 859 832 900 1024 1130 1173 1150 1080 994 942 969 1059 1125 1080 921 741 642 656 732 775 706 512 259 51 -39 -7 97 189 181 36 -167 -287 -259 -170 -169 -294 -450 -521 -489 -410 -351 -342 -385 -450 -494 -492 -447 -377 -318 -321 -426 -593 -712 -718 -663 -660 -765 -948 -1153 -1332 -1424 -1390 -1286 -1226 -1270 -1352 -1377 -1314 -1195 -1027 -814 -605 -458 -371 -283 -158 -17 131 300 470 563 535 442 392 421 497 575 622 585 406 107 -211 -456 -603 -678 -716 -780 -944 -1212 -1483 -1626 -1609 -1517 -1452 -1457 -1527 -1621 -1677 -1633 -1485 -1292 -1127 -1011 -950 -937 -929 -834 -605 -311 -83 28 92 193 361 568 783 968 1073 1087 1073 1116 1234 1392 1548 1683 1769 1779 1745 1749 1846 2002 2163 2302 2409 2459 2442 2419 2468 2573 2629 2572 2457 2371 2327 2255 2104 1888 1656 1443 1265 1107 928 685 373 36 -268 -512 -700 -853 -997 -1154 -1334 -1518 -1680 -1805 -1886 -1918 -1899 -1841 -1768 -1691 -1605 -1504 -1396 -1276 -1132 -959 -785 -645 -556 -501 -448 -374 -277 -174 -92 -50 -54 -87 -128 -156 -163 -143 -107 -80 -86 -125 -165 -168 -120 -48 13 49 67 81 97 126 180 254 323 354 343 318 300 291 272 228 154 65 -14 -69 -103 -139 -198 -287 -389 -476 -521 -519 -489 -465 -470 -495 -513 -497 -448 -392 -354 -338 -327 -304 -263 -213 -159 -107 -62 -26 1 32 70 110 141 160 170 179 191 202 209 208 200 190 183 174 161 144 126 111 97 84 75 68 62 53 44 37 30 24 20 17 14 10 8 6 6 4 2 1 1 1 0 0 0 0 +0 -8 -8 -8 -8 -16 -16 -24 -16 -16 0 16 24 16 8 8 40 88 128 152 120 56 -24 -72 -80 -72 -80 -160 -320 -536 -744 -888 -936 -952 -992 -1064 -1144 -1232 -1328 -1408 -1392 -1256 -1088 -1016 -1064 -1120 -1008 -712 -360 -152 -176 -344 -456 -400 -232 -136 -200 -368 -576 -824 -1080 -1272 -1304 -1272 -1384 -1744 -2248 -2640 -2688 -2392 -1984 -1840 -2168 -2728 -3024 -2728 -1944 -1120 -640 -584 -720 -680 -184 728 1768 2672 3416 4120 4720 5048 5104 5272 5936 7008 7928 8128 7592 6872 6656 7200 8192 9040 9384 9200 8640 7952 7536 7752 8472 9000 8640 7368 5928 5136 5248 5856 6200 5648 4096 2072 408 -312 -56 776 1512 1448 288 -1336 -2296 -2072 -1360 -1352 -2352 -3600 -4168 -3912 -3280 -2808 -2736 -3080 -3600 -3952 -3936 -3576 -3016 -2544 -2568 -3408 -4744 -5696 -5744 -5304 -5280 -6120 -7584 -9224 -10656 -11392 -11120 -10288 -9808 -10160 -10816 -11016 -10512 -9560 -8216 -6512 -4840 -3664 -2968 -2264 -1264 -136 1048 2400 3760 4504 4280 3536 3136 3368 3976 4600 4976 4680 3248 856 -1688 -3648 -4824 -5424 -5728 -6240 -7552 -9696 -11864 -13008 -12872 -12136 -11616 -11656 -12216 -12968 -13416 -13064 -11880 -10336 -9016 -8088 -7600 -7496 -7432 -6672 -4840 -2488 -664 224 736 1544 2888 4544 6264 7744 8584 8696 8584 8928 9872 11136 12384 13464 14152 14232 13960 13992 14768 16016 17304 18416 19272 19672 19536 19352 19744 20584 21032 20576 19656 18968 18616 18040 16832 15104 13248 11544 10120 8856 7424 5480 2984 288 -2144 -4096 -5600 -6824 -7976 -9232 -10672 -12144 -13440 -14440 -15088 -15344 -15192 -14728 -14144 -13528 -12840 -12032 -11168 -10208 -9056 -7672 -6280 -5160 -4448 -4008 -3584 -2992 -2216 -1392 -736 -400 -432 -696 -1024 -1248 -1304 -1144 -856 -640 -688 -1000 -1320 -1344 -960 -384 104 392 536 648 776 1008 1440 2032 2584 2832 2744 2544 2400 2328 2176 1824 1232 520 -112 -552 -824 -1112 -1584 -2296 -3112 -3808 -4168 -4152 -3912 -3720 -3760 -3960 -4104 -3976 -3584 -3136 -2832 -2704 -2616 -2432 -2104 -1704 -1272 -856 -496 -208 8 256 560 880 1128 1280 1360 1432 1528 1616 1672 1664 1600 1520 1464 1392 1288 1152 1008 888 776 672 600 544 496 424 352 296 240 192 160 136 112 80 64 48 48 32 16 8 8 8 0 0 0 0 +3 +0 -1 -1 -2 -2 -3 -3 -3 -3 -3 -3 -3 -2 -1 1 4 8 10 11 10 10 12 15 19 23 23 17 5 -11 -25 -35 -41 -46 -53 -68 -92 -119 -137 -143 -142 -143 -150 -165 -183 -197 -201 -190 -173 -157 -147 -143 -147 -151 -141 -106 -57 -16 5 18 40 77 126 179 229 262 274 278 299 340 396 453 507 549 568 573 591 641 714 793 867 932 977 996 1013 1060 1135 1189 1193 1169 1157 1164 1156 1105 1017 914 816 732 657 564 426 237 24 -179 -350 -490 -612 -731 -866 -1025 -1194 -1352 -1486 -1589 -1653 -1674 -1661 -1632 -1597 -1551 -1488 -1412 -1321 -1199 -1039 -870 -731 -645 -595 -544 -465 -353 -227 -123 -68 -75 -124 -186 -234 -249 -223 -172 -131 -144 -215 -291 -303 -222 -91 26 98 138 170 209 279 408 593 771 867 865 821 796 793 764 656 456 200 -44 -221 -341 -472 -696 -1039 -1451 -1829 -2064 -2121 -2063 -2026 -2113 -2305 -2468 -2472 -2308 -2091 -1954 -1932 -1938 -1870 -1685 -1414 -1099 -770 -460 -203 15 273 624 1025 1380 1637 1829 2026 2267 2537 2770 2910 2956 2979 3035 3084 3047 2906 2732 2575 2422 2277 2192 2181 2161 2055 1882 1715 1567 1432 1334 1290 1229 1089 960 964 1046 993 740 506 543 857 1198 1214 602 -643 -1953 -2407 -1604 -228 535 221 -572 -1072 -1210 -1384 -1700 -1838 -1644 -1434 -1617 -2186 -2700 -2662 -1875 -664 247 214 -753 -1926 -2454 -2047 -1071 -125 404 506 455 505 615 507 88 -300 -239 210 532 399 59 -7 328 768 990 921 658 320 27 -121 -145 -184 -292 -343 -218 -50 -121 -504 -912 -988 -683 -272 -22 48 60 43 -84 -345 -629 -820 -918 -988 -1035 -1008 -907 -803 -748 -718 -669 -604 -536 -450 -325 -180 -66 -2 31 53 77 129 239 390 514 548 492 401 328 294 299 340 403 467 509 517 494 456 416 374 324 265 202 143 96 72 83 123 166 190 186 163 125 80 50 53 84 113 119 108 98 94 87 73 53 31 5 -26 -53 -71 -76 -68 -54 -39 -29 -27 -29 -30 -30 -30 -28 -24 -16 -10 -6 -3 -1 0 0 0 0 0 +0 -8 -8 -16 -16 -24 -24 -24 -24 -24 -24 -24 -16 -8 8 32 64 80 88 80 80 96 120 152 184 184 136 40 -88 -200 -280 -328 -368 -424 -544 -736 -952 -1096 -1144 -1136 -1144 -1200 -1320 -1464 -1576 -1608 -1520 -1384 -1256 -1176 -1144 -1176 -1208 -1128 -848 -456 -128 40 144 320 616 1008 1432 1832 2096 2192 2224 2392 2720 3168 3624 4056 4392 4544 4584 4728 5128 5712 6344 6936 7456 7816 7968 8104 8480 9080 9512 9544 9352 9256 9312 9248 8840 8136 7312 6528 5856 5256 4512 3408 1896 192 -1432 -2800 -3920 -4896 -5848 -6928 -8200 -9552 -10816 -11888 -12712 -13224 -13392 -13288 -13056 -12776 -12408 -11904 -11296 -10568 -9592 -8312 -6960 -5848 -5160 -4760 -4352 -3720 -2824 -1816 -984 -544 -600 -992 -1488 -1872 -1992 -1784 -1376 -1048 -1152 -1720 -2328 -2424 -1776 -728 208 784 1104 1360 1672 2232 3264 4744 6168 6936 6920 6568 6368 6344 6112 5248 3648 1600 -352 -1768 -2728 -3776 -5568 -8312 -11608 -14632 -16512 -16968 -16504 -16208 -16904 -18440 -19744 -19776 -18464 -16728 -15632 -15456 -15504 -14960 -13480 -11312 -8792 -6160 -3680 -1624 120 2184 4992 8200 11040 13096 14632 16208 18136 20296 22160 23280 23648 23832 24280 24672 24376 23248 21856 20600 19376 18216 17536 17448 17288 16440 15056 13720 12536 11456 10672 10320 9832 8712 7680 7712 8368 7944 5920 4048 4344 6856 9584 9712 4816 -5144 -15624 -19256 -12832 -1824 4280 1768 -4576 -8576 -9680 -11072 -13600 -14704 -13152 -11472 -12936 -17488 -21600 -21296 -15000 -5312 1976 1712 -6024 -15408 -19632 -16376 -8568 -1000 3232 4048 3640 4040 4920 4056 704 -2400 -1912 1680 4256 3192 472 -56 2624 6144 7920 7368 5264 2560 216 -968 -1160 -1472 -2336 -2744 -1744 -400 -968 -4032 -7296 -7904 -5464 -2176 -176 384 480 344 -672 -2760 -5032 -6560 -7344 -7904 -8280 -8064 -7256 -6424 -5984 -5744 -5352 -4832 -4288 -3600 -2600 -1440 -528 -16 248 424 616 1032 1912 3120 4112 4384 3936 3208 2624 2352 2392 2720 3224 3736 4072 4136 3952 3648 3328 2992 2592 2120 1616 1144 768 576 664 984 1328 1520 1488 1304 1000 640 400 424 672 904 952 864 784 752 696 584 424 248 40 -208 -424 -568 -608 -544 -432 -312 -232 -216 -232 -240 -240 -240 -224 -192 -128 -80 -48 -24 -8 0 0 0 0 0 +3 +0 0 0 0 0 0 -1 -1 -2 -3 -6 -9 -15 -22 -28 -33 -36 -40 -46 -55 -65 -72 -73 -72 -73 -78 -84 -87 -84 -76 -63 -47 -30 -14 1 20 49 86 122 152 179 207 243 285 325 356 377 397 421 446 457 453 443 433 422 411 411 423 434 426 404 380 359 339 325 325 319 291 265 274 306 299 230 161 178 289 416 433 220 -243 -756 -957 -655 -96 230 97 -259 -498 -576 -676 -851 -943 -864 -773 -893 -1236 -1564 -1580 -1140 -413 157 140 -503 -1317 -1719 -1468 -786 -94 310 397 366 415 518 437 78 -271 -220 198 514 395 60 -7 347 832 1097 1044 763 380 32 -150 -185 -239 -388 -467 -304 -72 -176 -755 -1398 -1550 -1097 -447 -37 83 106 79 -156 -657 -1227 -1638 -1880 -2076 -2231 -2227 -2055 -1868 -1786 -1758 -1683 -1560 -1422 -1227 -909 -519 -195 -5 97 172 257 442 841 1415 1921 2108 1951 1641 1385 1283 1350 1586 1943 2328 2625 2759 2731 2613 2471 2308 2080 1767 1403 1036 726 570 683 1050 1485 1769 1821 1673 1346 912 600 673 1118 1583 1764 1700 1640 1671 1650 1470 1156 732 129 -682 -1547 -2236 -2596 -2583 -2245 -1782 -1494 -1555 -1856 -2201 -2553 -2960 -3272 -3194 -2661 -1981 -1445 -976 -348 369 814 778 434 48 -312 -675 -944 -981 -839 -696 -546 -208 303 656 607 377 397 720 950 792 433 204 80 -225 -728 -1051 -907 -493 -228 -246 -349 -373 -365 -387 -369 -251 -147 -187 -295 -264 -27 282 531 695 771 688 436 161 20 -24 -144 -389 -603 -615 -445 -261 -187 -220 -299 -387 -460 -483 -430 -328 -258 -312 -502 -741 -904 -926 -854 -773 -709 -610 -418 -157 75 186 171 119 145 312 593 891 1106 1181 1135 1050 1017 1064 1124 1102 965 774 610 506 446 420 429 462 486 471 414 328 231 137 62 17 -13 -53 -114 -173 -199 -194 -185 -200 -235 -269 -286 -286 -268 -223 -156 -99 -79 -85 -80 -46 -11 -8 -35 -54 -40 -4 23 25 11 2 3 9 12 11 12 12 9 2 -3 -1 7 13 11 5 0 -2 -3 -4 -5 -5 -3 -2 -1 -1 -1 0 +0 0 0 0 0 0 -8 -8 -16 -24 -48 -72 -120 -176 -224 -264 -288 -320 -368 -440 -520 -576 -584 -576 -584 -624 -672 -696 -672 -608 -504 -376 -240 -112 8 160 392 688 976 1216 1432 1656 1944 2280 2600 2848 3016 3176 3368 3568 3656 3624 3544 3464 3376 3288 3288 3384 3472 3408 3232 3040 2872 2712 2600 2600 2552 2328 2120 2192 2448 2392 1840 1288 1424 2312 3328 3464 1760 -1944 -6048 -7656 -5240 -768 1840 776 -2072 -3984 -4608 -5408 -6808 -7544 -6912 -6184 -7144 -9888 -12512 -12640 -9120 -3304 1256 1120 -4024 -10536 -13752 -11744 -6288 -752 2480 3176 2928 3320 4144 3496 624 -2168 -1760 1584 4112 3160 480 -56 2776 6656 8776 8352 6104 3040 256 -1200 -1480 -1912 -3104 -3736 -2432 -576 -1408 -6040 -11184 -12400 -8776 -3576 -296 664 848 632 -1248 -5256 -9816 -13104 -15040 -16608 -17848 -17816 -16440 -14944 -14288 -14064 -13464 -12480 -11376 -9816 -7272 -4152 -1560 -40 776 1376 2056 3536 6728 11320 15368 16864 15608 13128 11080 10264 10800 12688 15544 18624 21000 22072 21848 20904 19768 18464 16640 14136 11224 8288 5808 4560 5464 8400 11880 14152 14568 13384 10768 7296 4800 5384 8944 12664 14112 13600 13120 13368 13200 11760 9248 5856 1032 -5456 -12376 -17888 -20768 -20664 -17960 -14256 -11952 -12440 -14848 -17608 -20424 -23680 -26176 -25552 -21288 -15848 -11560 -7808 -2784 2952 6512 6224 3472 384 -2496 -5400 -7552 -7848 -6712 -5568 -4368 -1664 2424 5248 4856 3016 3176 5760 7600 6336 3464 1632 640 -1800 -5824 -8408 -7256 -3944 -1824 -1968 -2792 -2984 -2920 -3096 -2952 -2008 -1176 -1496 -2360 -2112 -216 2256 4248 5560 6168 5504 3488 1288 160 -192 -1152 -3112 -4824 -4920 -3560 -2088 -1496 -1760 -2392 -3096 -3680 -3864 -3440 -2624 -2064 -2496 -4016 -5928 -7232 -7408 -6832 -6184 -5672 -4880 -3344 -1256 600 1488 1368 952 1160 2496 4744 7128 8848 9448 9080 8400 8136 8512 8992 8816 7720 6192 4880 4048 3568 3360 3432 3696 3888 3768 3312 2624 1848 1096 496 136 -104 -424 -912 -1384 -1592 -1552 -1480 -1600 -1880 -2152 -2288 -2288 -2144 -1784 -1248 -792 -632 -680 -640 -368 -88 -64 -280 -432 -320 -32 184 200 88 16 24 72 96 88 96 96 72 16 -24 -8 56 104 88 40 0 -16 -24 -32 -40 -40 -24 -16 -8 -8 -8 0 +3 +0 -1 -1 -1 -1 -1 0 0 1 2 6 11 19 24 26 24 23 24 29 37 51 67 82 94 101 104 106 106 103 93 79 62 46 38 49 79 118 149 161 156 131 93 64 75 131 194 225 226 227 241 247 229 187 123 22 -124 -290 -434 -522 -537 -483 -396 -343 -369 -454 -555 -664 -793 -904 -909 -781 -598 -449 -312 -115 124 282 277 159 18 -121 -269 -386 -411 -361 -307 -247 -97 144 320 304 193 209 388 524 447 250 121 48 -140 -464 -686 -606 -338 -160 -177 -256 -280 -280 -304 -297 -206 -124 -161 -260 -238 -25 267 513 687 779 712 461 174 22 -27 -167 -461 -732 -764 -566 -340 -249 -299 -417 -553 -671 -722 -659 -514 -415 -512 -845 -1279 -1599 -1677 -1584 -1469 -1383 -1219 -856 -329 162 412 388 278 347 766 1492 2303 2935 3221 3182 3024 3012 3240 3521 3551 3200 2643 2145 1832 1667 1618 1702 1892 2056 2058 1865 1530 1115 682 321 91 -68 -304 -677 -1063 -1275 -1288 -1277 -1437 -1762 -2098 -2327 -2434 -2382 -2066 -1515 -1010 -847 -955 -938 -571 -141 -109 -502 -830 -648 -55 444 508 257 48 77 242 354 378 419 482 411 128 -140 -29 496 990 989 517 3 -258 -409 -726 -1191 -1468 -1296 -785 -316 -184 -344 -523 -528 -463 -554 -809 -956 -756 -296 108 261 224 171 190 241 219 21 -351 -752 -1025 -1162 -1268 -1342 -1260 -1028 -909 -1143 -1599 -1886 -1814 -1595 -1544 -1727 -1967 -2101 -2085 -1939 -1692 -1409 -1170 -991 -819 -616 -404 -231 -84 98 368 709 1032 1258 1425 1662 2024 2379 2538 2456 2276 2147 2099 2104 2149 2208 2196 2054 1844 1683 1590 1484 1312 1113 931 751 548 357 229 155 89 24 -16 -34 -81 -198 -367 -534 -644 -678 -657 -639 -660 -690 -677 -618 -572 -573 -573 -505 -376 -259 -203 -193 -186 -169 -155 -152 -152 -139 -93 -7 97 169 175 129 75 44 40 51 67 83 89 85 74 59 39 16 -3 -11 -10 -7 -5 -3 2 6 7 4 -3 -11 -15 -9 3 8 4 -4 -7 -6 -4 -5 -7 -8 -8 -7 -5 -4 -3 -3 -3 -3 -3 -2 -2 -1 0 +0 -8 -8 -8 -8 -8 0 0 8 16 48 88 152 192 208 192 184 192 232 296 408 536 656 752 808 832 848 848 824 744 632 496 368 304 392 632 944 1192 1288 1248 1048 744 512 600 1048 1552 1800 1808 1816 1928 1976 1832 1496 984 176 -992 -2320 -3472 -4176 -4296 -3864 -3168 -2744 -2952 -3632 -4440 -5312 -6344 -7232 -7272 -6248 -4784 -3592 -2496 -920 992 2256 2216 1272 144 -968 -2152 -3088 -3288 -2888 -2456 -1976 -776 1152 2560 2432 1544 1672 3104 4192 3576 2000 968 384 -1120 -3712 -5488 -4848 -2704 -1280 -1416 -2048 -2240 -2240 -2432 -2376 -1648 -992 -1288 -2080 -1904 -200 2136 4104 5496 6232 5696 3688 1392 176 -216 -1336 -3688 -5856 -6112 -4528 -2720 -1992 -2392 -3336 -4424 -5368 -5776 -5272 -4112 -3320 -4096 -6760 -10232 -12792 -13416 -12672 -11752 -11064 -9752 -6848 -2632 1296 3296 3104 2224 2776 6128 11936 18424 23480 25768 25456 24192 24096 25920 28168 28408 25600 21144 17160 14656 13336 12944 13616 15136 16448 16464 14920 12240 8920 5456 2568 728 -544 -2432 -5416 -8504 -10200 -10304 -10216 -11496 -14096 -16784 -18616 -19472 -19056 -16528 -12120 -8080 -6776 -7640 -7504 -4568 -1128 -872 -4016 -6640 -5184 -440 3552 4064 2056 384 616 1936 2832 3024 3352 3856 3288 1024 -1120 -232 3968 7920 7912 4136 24 -2064 -3272 -5808 -9528 -11744 -10368 -6280 -2528 -1472 -2752 -4184 -4224 -3704 -4432 -6472 -7648 -6048 -2368 864 2088 1792 1368 1520 1928 1752 168 -2808 -6016 -8200 -9296 -10144 -10736 -10080 -8224 -7272 -9144 -12792 -15088 -14512 -12760 -12352 -13816 -15736 -16808 -16680 -15512 -13536 -11272 -9360 -7928 -6552 -4928 -3232 -1848 -672 784 2944 5672 8256 10064 11400 13296 16192 19032 20304 19648 18208 17176 16792 16832 17192 17664 17568 16432 14752 13464 12720 11872 10496 8904 7448 6008 4384 2856 1832 1240 712 192 -128 -272 -648 -1584 -2936 -4272 -5152 -5424 -5256 -5112 -5280 -5520 -5416 -4944 -4576 -4584 -4584 -4040 -3008 -2072 -1624 -1544 -1488 -1352 -1240 -1216 -1216 -1112 -744 -56 776 1352 1400 1032 600 352 320 408 536 664 712 680 592 472 312 128 -24 -88 -80 -56 -40 -24 16 48 56 32 -24 -88 -120 -72 24 64 32 -32 -56 -48 -32 -40 -56 -64 -64 -56 -40 -32 -24 -24 -24 -24 -24 -16 -16 -8 0 +3 +0 0 1 2 3 6 10 12 14 15 15 15 16 18 22 28 35 39 40 36 29 19 10 3 -3 -13 -30 -50 -64 -69 -73 -87 -113 -143 -167 -185 -191 -175 -135 -95 -83 -98 -101 -65 -17 -14 -65 -111 -90 -8 66 79 41 8 13 43 66 73 84 100 88 28 -32 -7 121 249 257 138 1 -74 -120 -219 -370 -469 -426 -266 -110 -66 -127 -197 -205 -184 -226 -339 -411 -334 -134 50 124 109 85 97 127 118 11 -198 -436 -608 -707 -789 -856 -822 -687 -622 -800 -1147 -1384 -1362 -1226 -1214 -1389 -1620 -1770 -1797 -1710 -1527 -1301 -1105 -958 -810 -623 -419 -244 -91 108 418 823 1224 1528 1772 2115 2635 3169 3460 3427 3251 3138 3143 3225 3373 3548 3612 3462 3185 2977 2881 2755 2497 2171 1861 1539 1153 770 506 351 208 58 -38 -86 -210 -525 -999 -1494 -1855 -2006 -1999 -1999 -2124 -2288 -2309 -2172 -2072 -2139 -2203 -2000 -1536 -1090 -885 -866 -865 -814 -771 -780 -808 -764 -532 -38 601 1084 1169 897 541 333 315 416 577 739 835 833 761 639 447 195 -30 -137 -130 -97 -75 -35 37 114 147 101 -56 -275 -389 -240 95 302 177 -128 -303 -266 -214 -321 -519 -662 -721 -735 -686 -549 -442 -549 -887 -1307 -1678 -1980 -2183 -2198 -2045 -1944 -2085 -2398 -2649 -2755 -2847 -3021 -3173 -3131 -2888 -2601 -2409 -2321 -2234 -2012 -1618 -1175 -859 -658 -336 282 1049 1654 2008 2322 2763 3216 3455 3478 3502 3682 3952 4141 4189 4141 4068 3987 3870 3679 3418 3144 2907 2681 2382 1965 1472 986 555 199 -69 -266 -435 -601 -758 -917 -1130 -1417 -1701 -1890 -1998 -2105 -2206 -2175 -1946 -1645 -1475 -1511 -1642 -1707 -1624 -1398 -1096 -806 -585 -416 -233 -10 213 386 514 628 732 809 858 903 952 998 1040 1079 1095 1042 923 799 723 682 625 538 458 402 336 224 90 -11 -54 -69 -92 -125 -149 -160 -173 -198 -228 -259 -290 -316 -322 -305 -274 -249 -237 -231 -230 -236 -245 -247 -233 -214 -198 -192 -189 -189 -191 -191 -186 -172 -154 -136 -122 -110 -100 -93 -86 -77 -66 -56 -46 -35 -23 -14 -6 -1 2 3 3 3 3 2 2 1 0 +0 0 4 8 12 24 40 48 56 60 60 60 64 72 88 112 140 156 160 144 116 76 40 12 -12 -52 -120 -200 -256 -276 -292 -348 -452 -572 -668 -740 -764 -700 -540 -380 -332 -392 -404 -260 -68 -56 -260 -444 -360 -32 264 316 164 32 52 172 264 292 336 400 352 112 -128 -28 484 996 1028 552 4 -296 -480 -876 -1480 -1876 -1704 -1064 -440 -264 -508 -788 -820 -736 -904 -1356 -1644 -1336 -536 200 496 436 340 388 508 472 44 -792 -1744 -2432 -2828 -3156 -3424 -3288 -2748 -2488 -3200 -4588 -5536 -5448 -4904 -4856 -5556 -6480 -7080 -7188 -6840 -6108 -5204 -4420 -3832 -3240 -2492 -1676 -976 -364 432 1672 3292 4896 6112 7088 8460 10540 12676 13840 13708 13004 12552 12572 12900 13492 14192 14448 13848 12740 11908 11524 11020 9988 8684 7444 6156 4612 3080 2024 1404 832 232 -152 -344 -840 -2100 -3996 -5976 -7420 -8024 -7996 -7996 -8496 -9152 -9236 -8688 -8288 -8556 -8812 -8000 -6144 -4360 -3540 -3464 -3460 -3256 -3084 -3120 -3232 -3056 -2128 -152 2404 4336 4676 3588 2164 1332 1260 1664 2308 2956 3340 3332 3044 2556 1788 780 -120 -548 -520 -388 -300 -140 148 456 588 404 -224 -1100 -1556 -960 380 1208 708 -512 -1212 -1064 -856 -1284 -2076 -2648 -2884 -2940 -2744 -2196 -1768 -2196 -3548 -5228 -6712 -7920 -8732 -8792 -8180 -7776 -8340 -9592 -10596 -11020 -11388 -12084 -12692 -12524 -11552 -10404 -9636 -9284 -8936 -8048 -6472 -4700 -3436 -2632 -1344 1128 4196 6616 8032 9288 11052 12864 13820 13912 14008 14728 15808 16564 16756 16564 16272 15948 15480 14716 13672 12576 11628 10724 9528 7860 5888 3944 2220 796 -276 -1064 -1740 -2404 -3032 -3668 -4520 -5668 -6804 -7560 -7992 -8420 -8824 -8700 -7784 -6580 -5900 -6044 -6568 -6828 -6496 -5592 -4384 -3224 -2340 -1664 -932 -40 852 1544 2056 2512 2928 3236 3432 3612 3808 3992 4160 4316 4380 4168 3692 3196 2892 2728 2500 2152 1832 1608 1344 896 360 -44 -216 -276 -368 -500 -596 -640 -692 -792 -912 -1036 -1160 -1264 -1288 -1220 -1096 -996 -948 -924 -920 -944 -980 -988 -932 -856 -792 -768 -756 -756 -764 -764 -744 -688 -616 -544 -488 -440 -400 -372 -344 -308 -264 -224 -184 -140 -92 -56 -24 -4 8 12 12 12 12 8 8 4 0 +2 +0 -1 -1 -2 -3 -5 -6 -8 -11 -14 -16 -18 -22 -26 -27 -24 -19 -18 -19 -21 -22 -23 -25 -28 -29 -22 -2 27 53 62 50 32 21 21 29 43 59 70 73 70 62 45 21 -4 -17 -16 -13 -10 -5 5 17 23 16 -10 -48 -71 -45 18 60 36 -28 -68 -61 -51 -79 -131 -172 -193 -203 -196 -161 -134 -171 -284 -430 -567 -688 -780 -806 -771 -753 -829 -979 -1110 -1184 -1256 -1367 -1472 -1490 -1410 -1302 -1236 -1220 -1204 -1111 -915 -681 -510 -400 -209 179 684 1105 1373 1626 1981 2359 2594 2672 2753 2963 3253 3490 3610 3651 3670 3681 3655 3555 3380 3179 3009 2839 2580 2178 1670 1144 658 242 -85 -339 -566 -800 -1033 -1279 -1614 -2071 -2546 -2896 -3135 -3383 -3628 -3664 -3359 -2910 -2672 -2804 -3124 -3330 -3244 -2863 -2303 -1737 -1292 -941 -542 -23 523 971 1328 1667 1998 2267 2472 2674 2900 3126 3349 3580 3735 3664 3343 2982 2783 2708 2558 2275 2000 1813 1564 1081 449 -54 -287 -381 -527 -740 -916 -1025 -1152 -1364 -1638 -1940 -2266 -2572 -2744 -2707 -2546 -2425 -2415 -2475 -2591 -2799 -3066 -3251 -3251 -3145 -3097 -3173 -3332 -3546 -3826 -4120 -4293 -4270 -4117 -3952 -3832 -3769 -3786 -3884 -3979 -3964 -3830 -3629 -3352 -2918 -2292 -1540 -777 -89 470 905 1310 1834 2526 3282 3983 4622 5233 5780 6205 6538 6844 7061 7024 6723 6393 6267 6301 6236 5904 5381 4853 4387 3889 3239 2460 1726 1185 773 279 -392 -1066 -1544 -1879 -2321 -2950 -3537 -3813 -3777 -3634 -3535 -3519 -3591 -3731 -3822 -3735 -3490 -3239 -3027 -2716 -2201 -1585 -1039 -596 -193 140 330 456 722 1186 1641 1892 2012 2212 2530 2773 2794 2668 2579 2593 2621 2536 2318 2060 1903 1890 1892 1703 1258 728 336 134 -4 -181 -371 -514 -628 -765 -915 -1012 -1038 -1046 -1087 -1148 -1178 -1154 -1091 -1020 -959 -916 -876 -819 -748 -696 -685 -689 -653 -562 -463 -413 -422 -454 -469 -468 -472 -493 -511 -505 -477 -454 -458 -475 -477 -453 -419 -389 -361 -324 -280 -237 -197 -144 -75 -4 47 73 86 103 131 164 193 215 232 244 246 239 224 205 188 172 154 134 114 98 83 69 57 46 33 21 12 6 2 0 -1 -1 -1 0 +0 -4 -4 -8 -12 -20 -24 -32 -44 -56 -64 -72 -88 -104 -108 -96 -76 -72 -76 -84 -88 -92 -100 -112 -116 -88 -8 108 212 248 200 128 84 84 116 172 236 280 292 280 248 180 84 -16 -68 -64 -52 -40 -20 20 68 92 64 -40 -192 -284 -180 72 240 144 -112 -272 -244 -204 -316 -524 -688 -772 -812 -784 -644 -536 -684 -1136 -1720 -2268 -2752 -3120 -3224 -3084 -3012 -3316 -3916 -4440 -4736 -5024 -5468 -5888 -5960 -5640 -5208 -4944 -4880 -4816 -4444 -3660 -2724 -2040 -1600 -836 716 2736 4420 5492 6504 7924 9436 10376 10688 11012 11852 13012 13960 14440 14604 14680 14724 14620 14220 13520 12716 12036 11356 10320 8712 6680 4576 2632 968 -340 -1356 -2264 -3200 -4132 -5116 -6456 -8284 -10184 -11584 -12540 -13532 -14512 -14656 -13436 -11640 -10688 -11216 -12496 -13320 -12976 -11452 -9212 -6948 -5168 -3764 -2168 -92 2092 3884 5312 6668 7992 9068 9888 10696 11600 12504 13396 14320 14940 14656 13372 11928 11132 10832 10232 9100 8000 7252 6256 4324 1796 -216 -1148 -1524 -2108 -2960 -3664 -4100 -4608 -5456 -6552 -7760 -9064 -10288 -10976 -10828 -10184 -9700 -9660 -9900 -10364 -11196 -12264 -13004 -13004 -12580 -12388 -12692 -13328 -14184 -15304 -16480 -17172 -17080 -16468 -15808 -15328 -15076 -15144 -15536 -15916 -15856 -15320 -14516 -13408 -11672 -9168 -6160 -3108 -356 1880 3620 5240 7336 10104 13128 15932 18488 20932 23120 24820 26152 27376 28244 28096 26892 25572 25068 25204 24944 23616 21524 19412 17548 15556 12956 9840 6904 4740 3092 1116 -1568 -4264 -6176 -7516 -9284 -11800 -14148 -15252 -15108 -14536 -14140 -14076 -14364 -14924 -15288 -14940 -13960 -12956 -12108 -10864 -8804 -6340 -4156 -2384 -772 560 1320 1824 2888 4744 6564 7568 8048 8848 10120 11092 11176 10672 10316 10372 10484 10144 9272 8240 7612 7560 7568 6812 5032 2912 1344 536 -16 -724 -1484 -2056 -2512 -3060 -3660 -4048 -4152 -4184 -4348 -4592 -4712 -4616 -4364 -4080 -3836 -3664 -3504 -3276 -2992 -2784 -2740 -2756 -2612 -2248 -1852 -1652 -1688 -1816 -1876 -1872 -1888 -1972 -2044 -2020 -1908 -1816 -1832 -1900 -1908 -1812 -1676 -1556 -1444 -1296 -1120 -948 -788 -576 -300 -16 188 292 344 412 524 656 772 860 928 976 984 956 896 820 752 688 616 536 456 392 332 276 228 184 132 84 48 24 8 0 -4 -4 -4 0 +2 +0 0 1 1 3 6 9 12 16 22 26 28 29 32 36 38 39 38 39 37 28 12 -2 -10 -15 -22 -32 -43 -51 -62 -78 -99 -125 -154 -185 -208 -217 -215 -215 -226 -243 -266 -301 -345 -382 -399 -402 -413 -441 -482 -533 -598 -669 -723 -746 -745 -741 -744 -758 -787 -834 -884 -909 -907 -887 -845 -759 -614 -425 -222 -27 142 281 418 602 853 1139 1422 1695 1972 2237 2466 2668 2866 3034 3098 3042 2966 2982 3076 3120 3027 2828 2614 2421 2198 1875 1460 1049 737 492 182 -262 -730 -1081 -1347 -1703 -2215 -2718 -2997 -3039 -2992 -2979 -3033 -3166 -3367 -3528 -3528 -3373 -3203 -3061 -2811 -2331 -1717 -1152 -675 -224 166 401 567 919 1544 2186 2580 2806 3160 3698 4152 4282 4187 4144 4266 4417 4380 4100 3732 3533 3596 3691 3404 2578 1529 725 296 -9 -421 -886 -1260 -1579 -1977 -2428 -2759 -2906 -3011 -3217 -3493 -3686 -3716 -3616 -3478 -3371 -3316 -3270 -3150 -2965 -2846 -2894 -3001 -2939 -2618 -2229 -2053 -2173 -2419 -2592 -2678 -2803 -3037 -3274 -3358 -3295 -3269 -3434 -3712 -3886 -3854 -3724 -3618 -3513 -3304 -2996 -2674 -2327 -1799 -989 -55 705 1148 1426 1821 2472 3307 4169 4996 5803 6575 7202 7588 7756 7833 7914 7993 7982 7829 7556 7277 7084 6975 6808 6411 5682 4650 3438 2241 1249 525 -98 -881 -1896 -2939 -3760 -4320 -4748 -5135 -5475 -5796 -6173 -6566 -6800 -6731 -6394 -5879 -5231 -4531 -3960 -3625 -3362 -2904 -2218 -1537 -1014 -529 101 873 1667 2437 3158 3699 3944 4010 4171 4504 4791 4846 4770 4763 4836 4824 4637 4314 3871 3255 2522 1865 1418 1117 835 553 280 -83 -664 -1364 -1876 -2018 -1945 -1961 -2189 -2502 -2717 -2753 -2627 -2410 -2218 -2162 -2233 -2281 -2163 -1907 -1655 -1475 -1290 -1011 -683 -456 -419 -511 -592 -568 -460 -357 -346 -445 -603 -741 -818 -848 -862 -867 -863 -881 -953 -1062 -1130 -1096 -987 -868 -770 -658 -514 -369 -256 -151 -11 159 302 387 448 533 650 764 854 917 962 984 984 966 938 895 838 778 721 661 593 521 457 399 337 263 183 111 54 12 -24 -56 -83 -104 -116 -126 -134 -141 -145 -146 -139 -126 -108 -90 -74 -61 -50 -40 -30 -22 -15 -10 -5 -2 -1 0 0 0 +0 0 4 4 12 24 36 48 64 88 104 112 116 128 144 152 156 152 156 148 112 48 -8 -40 -60 -88 -128 -172 -204 -248 -312 -396 -500 -616 -740 -832 -868 -860 -860 -904 -972 -1064 -1204 -1380 -1528 -1596 -1608 -1652 -1764 -1928 -2132 -2392 -2676 -2892 -2984 -2980 -2964 -2976 -3032 -3148 -3336 -3536 -3636 -3628 -3548 -3380 -3036 -2456 -1700 -888 -108 568 1124 1672 2408 3412 4556 5688 6780 7888 8948 9864 10672 11464 12136 12392 12168 11864 11928 12304 12480 12108 11312 10456 9684 8792 7500 5840 4196 2948 1968 728 -1048 -2920 -4324 -5388 -6812 -8860 -10872 -11988 -12156 -11968 -11916 -12132 -12664 -13468 -14112 -14112 -13492 -12812 -12244 -11244 -9324 -6868 -4608 -2700 -896 664 1604 2268 3676 6176 8744 10320 11224 12640 14792 16608 17128 16748 16576 17064 17668 17520 16400 14928 14132 14384 14764 13616 10312 6116 2900 1184 -36 -1684 -3544 -5040 -6316 -7908 -9712 -11036 -11624 -12044 -12868 -13972 -14744 -14864 -14464 -13912 -13484 -13264 -13080 -12600 -11860 -11384 -11576 -12004 -11756 -10472 -8916 -8212 -8692 -9676 -10368 -10712 -11212 -12148 -13096 -13432 -13180 -13076 -13736 -14848 -15544 -15416 -14896 -14472 -14052 -13216 -11984 -10696 -9308 -7196 -3956 -220 2820 4592 5704 7284 9888 13228 16676 19984 23212 26300 28808 30352 31024 31332 31656 31972 31928 31316 30224 29108 28336 27900 27232 25644 22728 18600 13752 8964 4996 2100 -392 -3524 -7584 -11756 -15040 -17280 -18992 -20540 -21900 -23184 -24692 -26264 -27200 -26924 -25576 -23516 -20924 -18124 -15840 -14500 -13448 -11616 -8872 -6148 -4056 -2116 404 3492 6668 9748 12632 14796 15776 16040 16684 18016 19164 19384 19080 19052 19344 19296 18548 17256 15484 13020 10088 7460 5672 4468 3340 2212 1120 -332 -2656 -5456 -7504 -8072 -7780 -7844 -8756 -10008 -10868 -11012 -10508 -9640 -8872 -8648 -8932 -9124 -8652 -7628 -6620 -5900 -5160 -4044 -2732 -1824 -1676 -2044 -2368 -2272 -1840 -1428 -1384 -1780 -2412 -2964 -3272 -3392 -3448 -3468 -3452 -3524 -3812 -4248 -4520 -4384 -3948 -3472 -3080 -2632 -2056 -1476 -1024 -604 -44 636 1208 1548 1792 2132 2600 3056 3416 3668 3848 3936 3936 3864 3752 3580 3352 3112 2884 2644 2372 2084 1828 1596 1348 1052 732 444 216 48 -96 -224 -332 -416 -464 -504 -536 -564 -580 -584 -556 -504 -432 -360 -296 -244 -200 -160 -120 -88 -60 -40 -20 -8 -4 0 0 0 +2 +0 -1 -2 -3 -5 -8 -11 -14 -17 -21 -25 -29 -33 -37 -40 -44 -50 -59 -64 -63 -59 -60 -69 -83 -97 -108 -122 -141 -163 -179 -187 -198 -220 -252 -279 -292 -298 -305 -312 -309 -294 -275 -251 -203 -117 -7 90 153 198 263 371 516 676 841 1013 1189 1350 1472 1559 1626 1699 1775 1830 1853 1846 1835 1841 1868 1879 1824 1665 1402 1068 716 410 177 -34 -315 -696 -1108 -1455 -1717 -1938 -2151 -2353 -2557 -2793 -3047 -3237 -3286 -3200 -3014 -2750 -2441 -2186 -2049 -1947 -1724 -1349 -957 -647 -345 67 597 1167 1746 2316 2777 3030 3152 3356 3708 4037 4177 4205 4298 4465 4557 4480 4266 3914 3369 2670 2020 1572 1267 969 656 341 -103 -845 -1775 -2499 -2751 -2713 -2801 -3200 -3746 -4163 -4320 -4221 -3964 -3737 -3732 -3949 -4132 -4015 -3628 -3227 -2948 -2643 -2123 -1471 -1007 -949 -1189 -1412 -1392 -1156 -922 -916 -1213 -1687 -2133 -2421 -2582 -2697 -2790 -2861 -3004 -3348 -3845 -4216 -4217 -3912 -3553 -3251 -2870 -2314 -1716 -1228 -750 -55 848 1671 2220 2663 3292 4167 5092 5910 6612 7225 7707 8030 8236 8351 8332 8173 7952 7735 7461 7041 6524 6033 5574 4981 4113 3036 1960 1032 242 -511 -1290 -2069 -2774 -3384 -3966 -4614 -5348 -6087 -6726 -7159 -7284 -7070 -6665 -6305 -6084 -5853 -5472 -4992 -4531 -4011 -3229 -2182 -1111 -184 686 1606 2480 3170 3745 4361 4967 5364 5572 5850 6298 6654 6645 6376 6127 5946 5631 5095 4482 3914 3341 2721 2140 1617 1011 250 -480 -985 -1384 -1964 -2703 -3245 -3374 -3309 -3361 -3512 -3548 -3451 -3414 -3507 -3535 -3334 -3003 -2735 -2540 -2288 -1935 -1573 -1278 -1026 -788 -600 -501 -463 -444 -441 -453 -431 -335 -221 -209 -352 -577 -794 -982 -1175 -1355 -1464 -1489 -1504 -1590 -1726 -1808 -1763 -1629 -1492 -1399 -1341 -1283 -1193 -1031 -773 -447 -121 168 440 717 965 1142 1264 1395 1558 1713 1818 1894 1979 2055 2075 2018 1925 1829 1727 1597 1450 1302 1152 995 842 712 592 451 281 109 -34 -152 -265 -373 -459 -518 -569 -621 -656 -655 -624 -589 -566 -544 -512 -468 -417 -355 -284 -214 -157 -110 -64 -16 23 48 64 81 94 100 101 102 103 100 92 85 76 66 54 43 33 24 17 11 6 3 1 0 0 0 +0 -2 -4 -6 -10 -16 -22 -28 -34 -42 -50 -58 -66 -74 -80 -88 -100 -118 -128 -126 -118 -120 -138 -166 -194 -216 -244 -282 -326 -358 -374 -396 -440 -504 -558 -584 -596 -610 -624 -618 -588 -550 -502 -406 -234 -14 180 306 396 526 742 1032 1352 1682 2026 2378 2700 2944 3118 3252 3398 3550 3660 3706 3692 3670 3682 3736 3758 3648 3330 2804 2136 1432 820 354 -68 -630 -1392 -2216 -2910 -3434 -3876 -4302 -4706 -5114 -5586 -6094 -6474 -6572 -6400 -6028 -5500 -4882 -4372 -4098 -3894 -3448 -2698 -1914 -1294 -690 134 1194 2334 3492 4632 5554 6060 6304 6712 7416 8074 8354 8410 8596 8930 9114 8960 8532 7828 6738 5340 4040 3144 2534 1938 1312 682 -206 -1690 -3550 -4998 -5502 -5426 -5602 -6400 -7492 -8326 -8640 -8442 -7928 -7474 -7464 -7898 -8264 -8030 -7256 -6454 -5896 -5286 -4246 -2942 -2014 -1898 -2378 -2824 -2784 -2312 -1844 -1832 -2426 -3374 -4266 -4842 -5164 -5394 -5580 -5722 -6008 -6696 -7690 -8432 -8434 -7824 -7106 -6502 -5740 -4628 -3432 -2456 -1500 -110 1696 3342 4440 5326 6584 8334 10184 11820 13224 14450 15414 16060 16472 16702 16664 16346 15904 15470 14922 14082 13048 12066 11148 9962 8226 6072 3920 2064 484 -1022 -2580 -4138 -5548 -6768 -7932 -9228 -10696 -12174 -13452 -14318 -14568 -14140 -13330 -12610 -12168 -11706 -10944 -9984 -9062 -8022 -6458 -4364 -2222 -368 1372 3212 4960 6340 7490 8722 9934 10728 11144 11700 12596 13308 13290 12752 12254 11892 11262 10190 8964 7828 6682 5442 4280 3234 2022 500 -960 -1970 -2768 -3928 -5406 -6490 -6748 -6618 -6722 -7024 -7096 -6902 -6828 -7014 -7070 -6668 -6006 -5470 -5080 -4576 -3870 -3146 -2556 -2052 -1576 -1200 -1002 -926 -888 -882 -906 -862 -670 -442 -418 -704 -1154 -1588 -1964 -2350 -2710 -2928 -2978 -3008 -3180 -3452 -3616 -3526 -3258 -2984 -2798 -2682 -2566 -2386 -2062 -1546 -894 -242 336 880 1434 1930 2284 2528 2790 3116 3426 3636 3788 3958 4110 4150 4036 3850 3658 3454 3194 2900 2604 2304 1990 1684 1424 1184 902 562 218 -68 -304 -530 -746 -918 -1036 -1138 -1242 -1312 -1310 -1248 -1178 -1132 -1088 -1024 -936 -834 -710 -568 -428 -314 -220 -128 -32 46 96 128 162 188 200 202 204 206 200 184 170 152 132 108 86 66 48 34 22 12 6 2 0 0 0 +1 +0 -1 -1 -2 -4 -6 -8 -11 -14 -18 -25 -33 -43 -50 -53 -55 -56 -56 -50 -41 -33 -22 -2 29 62 88 115 152 207 270 335 398 461 522 575 624 667 702 724 741 757 765 756 733 707 683 636 548 421 283 155 37 -83 -218 -362 -502 -635 -770 -928 -1111 -1308 -1494 -1642 -1725 -1728 -1681 -1639 -1630 -1616 -1557 -1464 -1367 -1246 -1032 -717 -376 -64 245 589 935 1227 1488 1779 2080 2305 2458 2646 2922 3166 3244 3190 3141 3125 3033 2812 2534 2267 1982 1654 1332 1030 660 167 -329 -690 -992 -1441 -2030 -2493 -2652 -2663 -2767 -2959 -3059 -3042 -3081 -3237 -3339 -3222 -2970 -2766 -2628 -2422 -2096 -1743 -1450 -1191 -935 -729 -623 -589 -578 -587 -617 -601 -478 -323 -313 -538 -905 -1275 -1616 -1979 -2339 -2590 -2697 -2792 -3025 -3365 -3612 -3612 -3422 -3214 -3092 -3039 -2985 -2848 -2526 -1944 -1154 -320 459 1235 2066 2858 3478 3959 4495 5168 5845 6391 6863 7387 7909 8230 8260 8135 7980 7776 7438 6983 6481 5937 5310 4659 4078 3518 2785 1802 731 -231 -1092 -1986 -2918 -3743 -4412 -5060 -5772 -6390 -6687 -6685 -6637 -6709 -6802 -6751 -6526 -6143 -5545 -4703 -3766 -2944 -2213 -1367 -361 587 1302 1895 2564 3272 3842 4273 4752 5329 5822 6109 6312 6543 6664 6468 6025 5586 5214 4726 4032 3322 2784 2321 1717 960 226 -404 -1019 -1684 -2322 -2864 -3353 -3828 -4204 -4368 -4343 -4252 -4161 -4058 -3944 -3848 -3756 -3590 -3327 -3027 -2742 -2428 -2038 -1623 -1285 -1034 -770 -429 -92 89 65 -83 -225 -288 -277 -244 -266 -411 -694 -1048 -1364 -1598 -1794 -2017 -2267 -2490 -2648 -2744 -2775 -2712 -2564 -2405 -2299 -2208 -2029 -1728 -1370 -1019 -669 -292 91 441 782 1175 1616 2008 2279 2457 2620 2807 3004 3183 3330 3409 3394 3309 3224 3179 3122 2977 2726 2430 2155 1911 1651 1328 949 578 276 50 -149 -364 -599 -827 -1029 -1204 -1342 -1435 -1475 -1470 -1424 -1351 -1276 -1213 -1153 -1061 -927 -780 -654 -555 -460 -353 -234 -106 29 153 241 289 323 369 421 458 470 469 463 455 444 431 412 379 328 271 222 183 150 116 86 59 36 14 -5 -18 -28 -36 -42 -47 -49 -48 -43 -37 -31 -25 -20 -16 -13 -9 -6 -4 -2 -1 0 +0 -2 -2 -4 -8 -12 -16 -22 -28 -36 -50 -66 -86 -100 -106 -110 -112 -112 -100 -82 -66 -44 -4 58 124 176 230 304 414 540 670 796 922 1044 1150 1248 1334 1404 1448 1482 1514 1530 1512 1466 1414 1366 1272 1096 842 566 310 74 -166 -436 -724 -1004 -1270 -1540 -1856 -2222 -2616 -2988 -3284 -3450 -3456 -3362 -3278 -3260 -3232 -3114 -2928 -2734 -2492 -2064 -1434 -752 -128 490 1178 1870 2454 2976 3558 4160 4610 4916 5292 5844 6332 6488 6380 6282 6250 6066 5624 5068 4534 3964 3308 2664 2060 1320 334 -658 -1380 -1984 -2882 -4060 -4986 -5304 -5326 -5534 -5918 -6118 -6084 -6162 -6474 -6678 -6444 -5940 -5532 -5256 -4844 -4192 -3486 -2900 -2382 -1870 -1458 -1246 -1178 -1156 -1174 -1234 -1202 -956 -646 -626 -1076 -1810 -2550 -3232 -3958 -4678 -5180 -5394 -5584 -6050 -6730 -7224 -7224 -6844 -6428 -6184 -6078 -5970 -5696 -5052 -3888 -2308 -640 918 2470 4132 5716 6956 7918 8990 10336 11690 12782 13726 14774 15818 16460 16520 16270 15960 15552 14876 13966 12962 11874 10620 9318 8156 7036 5570 3604 1462 -462 -2184 -3972 -5836 -7486 -8824 -10120 -11544 -12780 -13374 -13370 -13274 -13418 -13604 -13502 -13052 -12286 -11090 -9406 -7532 -5888 -4426 -2734 -722 1174 2604 3790 5128 6544 7684 8546 9504 10658 11644 12218 12624 13086 13328 12936 12050 11172 10428 9452 8064 6644 5568 4642 3434 1920 452 -808 -2038 -3368 -4644 -5728 -6706 -7656 -8408 -8736 -8686 -8504 -8322 -8116 -7888 -7696 -7512 -7180 -6654 -6054 -5484 -4856 -4076 -3246 -2570 -2068 -1540 -858 -184 178 130 -166 -450 -576 -554 -488 -532 -822 -1388 -2096 -2728 -3196 -3588 -4034 -4534 -4980 -5296 -5488 -5550 -5424 -5128 -4810 -4598 -4416 -4058 -3456 -2740 -2038 -1338 -584 182 882 1564 2350 3232 4016 4558 4914 5240 5614 6008 6366 6660 6818 6788 6618 6448 6358 6244 5954 5452 4860 4310 3822 3302 2656 1898 1156 552 100 -298 -728 -1198 -1654 -2058 -2408 -2684 -2870 -2950 -2940 -2848 -2702 -2552 -2426 -2306 -2122 -1854 -1560 -1308 -1110 -920 -706 -468 -212 58 306 482 578 646 738 842 916 940 938 926 910 888 862 824 758 656 542 444 366 300 232 172 118 72 28 -10 -36 -56 -72 -84 -94 -98 -96 -86 -74 -62 -50 -40 -32 -26 -18 -12 -8 -4 -2 0 +1 +0 0 0 1 3 7 11 16 24 34 46 58 73 92 110 125 139 154 168 177 183 187 187 181 173 163 152 129 89 38 -14 -66 -127 -198 -269 -335 -405 -487 -567 -624 -655 -681 -722 -765 -793 -800 -785 -739 -653 -545 -443 -346 -222 -61 102 235 355 497 657 798 917 1055 1221 1378 1492 1591 1700 1785 1785 1714 1637 1572 1467 1288 1091 940 806 612 352 85 -157 -405 -687 -973 -1231 -1480 -1732 -1951 -2079 -2120 -2128 -2133 -2133 -2125 -2124 -2124 -2080 -1975 -1840 -1707 -1547 -1330 -1084 -879 -724 -552 -315 -69 69 51 -67 -186 -243 -239 -216 -240 -379 -656 -1012 -1349 -1616 -1857 -2136 -2455 -2759 -3003 -3183 -3293 -3294 -3186 -3059 -2993 -2941 -2766 -2411 -1956 -1489 -1002 -447 143 708 1286 1981 2790 3552 4129 4561 4986 5476 6003 6521 6997 7349 7503 7502 7503 7591 7652 7492 7044 6448 5876 5355 4754 3931 2890 1810 891 167 -506 -1279 -2168 -3084 -3959 -4775 -5494 -6058 -6430 -6617 -6629 -6505 -6345 -6246 -6148 -5865 -5309 -4628 -4031 -3554 -3058 -2440 -1685 -795 227 1250 2055 2574 3011 3598 4309 4921 5312 5566 5797 6017 6202 6362 6443 6283 5793 5110 4473 3962 3472 2911 2315 1740 1152 506 -159 -752 -1273 -1813 -2415 -3034 -3586 -4013 -4272 -4325 -4222 -4098 -4070 -4130 -4197 -4214 -4149 -3925 -3463 -2834 -2254 -1875 -1621 -1305 -874 -442 -117 98 211 160 -55 -252 -214 30 184 -8 -519 -1138 -1711 -2202 -2574 -2785 -2893 -3066 -3385 -3737 -3929 -3923 -3836 -3775 -3737 -3671 -3542 -3310 -2914 -2339 -1686 -1098 -615 -150 383 969 1527 2029 2521 3044 3569 4056 4491 4882 5192 5355 5354 5265 5192 5151 5067 4880 4613 4317 3984 3575 3107 2649 2239 1830 1357 818 273 -227 -659 -1014 -1298 -1556 -1843 -2159 -2434 -2602 -2661 -2662 -2640 -2605 -2552 -2481 -2361 -2155 -1865 -1550 -1270 -1040 -833 -623 -409 -194 24 243 442 603 726 830 924 1010 1072 1093 1074 1027 977 936 898 847 781 709 632 544 440 327 221 134 64 2 -58 -115 -166 -204 -230 -245 -255 -260 -257 -246 -228 -207 -187 -168 -150 -131 -111 -91 -72 -55 -40 -28 -19 -10 -3 2 4 3 1 0 1 0 0 -1 -2 -2 -2 -2 -2 -1 0 +0 0 0 4 12 28 44 64 96 136 184 232 292 368 440 500 556 616 672 708 732 748 748 724 692 652 608 516 356 152 -56 -264 -508 -792 -1076 -1340 -1620 -1948 -2268 -2496 -2620 -2724 -2888 -3060 -3172 -3200 -3140 -2956 -2612 -2180 -1772 -1384 -888 -244 408 940 1420 1988 2628 3192 3668 4220 4884 5512 5968 6364 6800 7140 7140 6856 6548 6288 5868 5152 4364 3760 3224 2448 1408 340 -628 -1620 -2748 -3892 -4924 -5920 -6928 -7804 -8316 -8480 -8512 -8532 -8532 -8500 -8496 -8496 -8320 -7900 -7360 -6828 -6188 -5320 -4336 -3516 -2896 -2208 -1260 -276 276 204 -268 -744 -972 -956 -864 -960 -1516 -2624 -4048 -5396 -6464 -7428 -8544 -9820 -11036 -12012 -12732 -13172 -13176 -12744 -12236 -11972 -11764 -11064 -9644 -7824 -5956 -4008 -1788 572 2832 5144 7924 11160 14208 16516 18244 19944 21904 24012 26084 27988 29396 30012 30008 30012 30364 30608 29968 28176 25792 23504 21420 19016 15724 11560 7240 3564 668 -2024 -5116 -8672 -12336 -15836 -19100 -21976 -24232 -25720 -26468 -26516 -26020 -25380 -24984 -24592 -23460 -21236 -18512 -16124 -14216 -12232 -9760 -6740 -3180 908 5000 8220 10296 12044 14392 17236 19684 21248 22264 23188 24068 24808 25448 25772 25132 23172 20440 17892 15848 13888 11644 9260 6960 4608 2024 -636 -3008 -5092 -7252 -9660 -12136 -14344 -16052 -17088 -17300 -16888 -16392 -16280 -16520 -16788 -16856 -16596 -15700 -13852 -11336 -9016 -7500 -6484 -5220 -3496 -1768 -468 392 844 640 -220 -1008 -856 120 736 -32 -2076 -4552 -6844 -8808 -10296 -11140 -11572 -12264 -13540 -14948 -15716 -15692 -15344 -15100 -14948 -14684 -14168 -13240 -11656 -9356 -6744 -4392 -2460 -600 1532 3876 6108 8116 10084 12176 14276 16224 17964 19528 20768 21420 21416 21060 20768 20604 20268 19520 18452 17268 15936 14300 12428 10596 8956 7320 5428 3272 1092 -908 -2636 -4056 -5192 -6224 -7372 -8636 -9736 -10408 -10644 -10648 -10560 -10420 -10208 -9924 -9444 -8620 -7460 -6200 -5080 -4160 -3332 -2492 -1636 -776 96 972 1768 2412 2904 3320 3696 4040 4288 4372 4296 4108 3908 3744 3592 3388 3124 2836 2528 2176 1760 1308 884 536 256 8 -232 -460 -664 -816 -920 -980 -1020 -1040 -1028 -984 -912 -828 -748 -672 -600 -524 -444 -364 -288 -220 -160 -112 -76 -40 -12 8 16 12 4 0 4 0 0 -4 -8 -8 -8 -8 -8 -4 0 +2 +0 1 2 3 5 6 5 3 0 -4 -10 -19 -31 -47 -65 -84 -105 -125 -143 -158 -171 -184 -198 -211 -218 -213 -200 -187 -177 -163 -139 -102 -51 15 89 155 205 253 318 401 481 544 598 651 706 760 813 858 872 837 767 698 642 584 508 418 326 223 101 -33 -162 -283 -416 -572 -742 -905 -1043 -1145 -1194 -1202 -1202 -1228 -1283 -1341 -1384 -1402 -1364 -1237 -1040 -850 -726 -645 -533 -366 -190 -52 44 98 76 -27 -127 -110 15 99 -5 -294 -659 -1016 -1339 -1602 -1775 -1888 -2048 -2316 -2616 -2817 -2878 -2880 -2900 -2938 -2954 -2916 -2789 -2512 -2063 -1522 -1014 -581 -145 379 980 1580 2148 2731 3373 4048 4705 5330 5930 6454 6813 6969 7013 7078 7186 7236 7134 6907 6616 6253 5745 5112 4464 3866 3236 2459 1520 520 -443 -1317 -2076 -2726 -3352 -4073 -4894 -5664 -6211 -6521 -6696 -6821 -6911 -6959 -6948 -6797 -6379 -5678 -4850 -4088 -3447 -2842 -2189 -1479 -724 94 965 1811 2548 3168 3737 4306 4863 5333 5635 5734 5683 5600 5563 5537 5430 5206 4907 4553 4088 3449 2672 1887 1196 596 24 -585 -1233 -1866 -2420 -2870 -3234 -3551 -3825 -4014 -4071 -4005 -3881 -3752 -3626 -3470 -3251 -2963 -2631 -2271 -1897 -1522 -1168 -838 -510 -167 138 306 293 182 115 139 156 36 -243 -587 -903 -1213 -1612 -2151 -2766 -3318 -3710 -3963 -4183 -4454 -4739 -4919 -4928 -4828 -4738 -4685 -4578 -4306 -3856 -3294 -2688 -2068 -1447 -838 -213 483 1263 2074 2842 3559 4250 4904 5465 5900 6249 6558 6825 7005 7062 7011 6867 6642 6355 6032 5679 5263 4758 4170 3545 2920 2288 1611 880 147 -507 -1032 -1477 -1927 -2414 -2881 -3257 -3521 -3704 -3827 -3898 -3922 -3906 -3837 -3694 -3478 -3222 -2934 -2604 -2239 -1883 -1563 -1247 -895 -515 -151 184 511 838 1132 1368 1559 1723 1844 1906 1930 1951 1963 1925 1830 1715 1614 1501 1346 1159 971 791 606 425 275 153 23 -133 -276 -361 -397 -432 -488 -537 -548 -529 -507 -488 -457 -417 -377 -344 -310 -271 -234 -204 -174 -136 -96 -68 -56 -50 -41 -30 -20 -14 -13 -19 -34 -52 -66 -75 -82 -89 -94 -96 -95 -96 -98 -97 -92 -84 -76 -67 -56 -47 -39 -30 -22 -15 -10 -6 -3 -2 -1 -1 0 +0 4 8 12 20 24 20 12 0 -16 -40 -76 -124 -188 -260 -336 -420 -500 -572 -632 -684 -736 -792 -844 -872 -852 -800 -748 -708 -652 -556 -408 -204 60 356 620 820 1012 1272 1604 1924 2176 2392 2604 2824 3040 3252 3432 3488 3348 3068 2792 2568 2336 2032 1672 1304 892 404 -132 -648 -1132 -1664 -2288 -2968 -3620 -4172 -4580 -4776 -4808 -4808 -4912 -5132 -5364 -5536 -5608 -5456 -4948 -4160 -3400 -2904 -2580 -2132 -1464 -760 -208 176 392 304 -108 -508 -440 60 396 -20 -1176 -2636 -4064 -5356 -6408 -7100 -7552 -8192 -9264 -10464 -11268 -11512 -11520 -11600 -11752 -11816 -11664 -11156 -10048 -8252 -6088 -4056 -2324 -580 1516 3920 6320 8592 10924 13492 16192 18820 21320 23720 25816 27252 27876 28052 28312 28744 28944 28536 27628 26464 25012 22980 20448 17856 15464 12944 9836 6080 2080 -1772 -5268 -8304 -10904 -13408 -16292 -19576 -22656 -24844 -26084 -26784 -27284 -27644 -27836 -27792 -27188 -25516 -22712 -19400 -16352 -13788 -11368 -8756 -5916 -2896 376 3860 7244 10192 12672 14948 17224 19452 21332 22540 22936 22732 22400 22252 22148 21720 20824 19628 18212 16352 13796 10688 7548 4784 2384 96 -2340 -4932 -7464 -9680 -11480 -12936 -14204 -15300 -16056 -16284 -16020 -15524 -15008 -14504 -13880 -13004 -11852 -10524 -9084 -7588 -6088 -4672 -3352 -2040 -668 552 1224 1172 728 460 556 624 144 -972 -2348 -3612 -4852 -6448 -8604 -11064 -13272 -14840 -15852 -16732 -17816 -18956 -19676 -19712 -19312 -18952 -18740 -18312 -17224 -15424 -13176 -10752 -8272 -5788 -3352 -852 1932 5052 8296 11368 14236 17000 19616 21860 23600 24996 26232 27300 28020 28248 28044 27468 26568 25420 24128 22716 21052 19032 16680 14180 11680 9152 6444 3520 588 -2028 -4128 -5908 -7708 -9656 -11524 -13028 -14084 -14816 -15308 -15592 -15688 -15624 -15348 -14776 -13912 -12888 -11736 -10416 -8956 -7532 -6252 -4988 -3580 -2060 -604 736 2044 3352 4528 5472 6236 6892 7376 7624 7720 7804 7852 7700 7320 6860 6456 6004 5384 4636 3884 3164 2424 1700 1100 612 92 -532 -1104 -1444 -1588 -1728 -1952 -2148 -2192 -2116 -2028 -1952 -1828 -1668 -1508 -1376 -1240 -1084 -936 -816 -696 -544 -384 -272 -224 -200 -164 -120 -80 -56 -52 -76 -136 -208 -264 -300 -328 -356 -376 -384 -380 -384 -392 -388 -368 -336 -304 -268 -224 -188 -156 -120 -88 -60 -40 -24 -12 -8 -4 -4 0 +2 +0 -2 -4 -6 -9 -12 -14 -15 -17 -17 -16 -13 -8 1 13 27 43 61 80 102 127 154 178 196 211 224 240 256 270 276 278 274 261 233 191 143 95 50 2 -55 -121 -192 -261 -323 -380 -436 -489 -535 -565 -580 -583 -586 -588 -585 -568 -536 -494 -441 -382 -317 -251 -187 -117 -40 33 77 76 48 31 39 45 11 -76 -188 -297 -410 -560 -768 -1015 -1251 -1436 -1575 -1707 -1866 -2037 -2170 -2230 -2240 -2255 -2287 -2291 -2208 -2027 -1775 -1484 -1169 -838 -498 -130 300 805 1353 1898 2434 2976 3515 4008 4429 4801 5155 5492 5766 5951 6043 6055 5993 5867 5697 5487 5205 4812 4316 3753 3163 2535 1828 1021 174 -615 -1282 -1879 -2508 -3215 -3928 -4543 -5029 -5415 -5729 -5973 -6155 -6276 -6312 -6224 -6005 -5699 -5316 -4834 -4259 -3672 -3122 -2555 -1881 -1110 -333 417 1189 2001 2775 3443 4029 4571 5028 5339 5557 5777 5977 6027 5892 5687 5506 5278 4877 4326 3737 3140 2482 1798 1201 691 109 -637 -1370 -1856 -2114 -2388 -2795 -3184 -3374 -3388 -3373 -3370 -3293 -3125 -2947 -2806 -2641 -2410 -2174 -1984 -1775 -1458 -1083 -805 -692 -651 -567 -435 -309 -228 -222 -357 -675 -1105 -1510 -1851 -2191 -2580 -2964 -3292 -3619 -4033 -4525 -4975 -5299 -5499 -5614 -5643 -5582 -5466 -5308 -5034 -4559 -3914 -3230 -2605 -2023 -1418 -758 -40 752 1624 2515 3331 4048 4744 5498 6289 6998 7515 7820 7975 8065 8144 8204 8173 7997 7687 7311 6892 6393 5778 5094 4413 3716 2918 2007 1103 334 -326 -1020 -1809 -2584 -3195 -3636 -4026 -4439 -4826 -5105 -5257 -5328 -5341 -5267 -5087 -4813 -4478 -4115 -3761 -3410 -3008 -2506 -1933 -1387 -920 -476 32 595 1119 1531 1864 2177 2473 2694 2821 2882 2926 2966 2990 2980 2910 2759 2527 2262 2010 1777 1538 1281 1024 783 550 313 79 -134 -316 -477 -624 -753 -859 -939 -989 -1004 -979 -931 -887 -858 -836 -803 -751 -684 -611 -534 -459 -390 -327 -271 -220 -178 -146 -125 -111 -108 -118 -134 -148 -151 -151 -161 -187 -224 -260 -291 -313 -327 -334 -335 -337 -339 -334 -320 -299 -278 -257 -234 -206 -174 -141 -110 -80 -52 -27 -4 16 32 44 53 58 61 62 59 54 49 43 35 28 23 17 10 6 4 2 0 +0 -4 -8 -12 -18 -24 -28 -30 -34 -34 -32 -26 -16 2 26 54 86 122 160 204 254 308 356 392 422 448 480 512 540 552 556 548 522 466 382 286 190 100 4 -110 -242 -384 -522 -646 -760 -872 -978 -1070 -1130 -1160 -1166 -1172 -1176 -1170 -1136 -1072 -988 -882 -764 -634 -502 -374 -234 -80 66 154 152 96 62 78 90 22 -152 -376 -594 -820 -1120 -1536 -2030 -2502 -2872 -3150 -3414 -3732 -4074 -4340 -4460 -4480 -4510 -4574 -4582 -4416 -4054 -3550 -2968 -2338 -1676 -996 -260 600 1610 2706 3796 4868 5952 7030 8016 8858 9602 10310 10984 11532 11902 12086 12110 11986 11734 11394 10974 10410 9624 8632 7506 6326 5070 3656 2042 348 -1230 -2564 -3758 -5016 -6430 -7856 -9086 -10058 -10830 -11458 -11946 -12310 -12552 -12624 -12448 -12010 -11398 -10632 -9668 -8518 -7344 -6244 -5110 -3762 -2220 -666 834 2378 4002 5550 6886 8058 9142 10056 10678 11114 11554 11954 12054 11784 11374 11012 10556 9754 8652 7474 6280 4964 3596 2402 1382 218 -1274 -2740 -3712 -4228 -4776 -5590 -6368 -6748 -6776 -6746 -6740 -6586 -6250 -5894 -5612 -5282 -4820 -4348 -3968 -3550 -2916 -2166 -1610 -1384 -1302 -1134 -870 -618 -456 -444 -714 -1350 -2210 -3020 -3702 -4382 -5160 -5928 -6584 -7238 -8066 -9050 -9950 -10598 -10998 -11228 -11286 -11164 -10932 -10616 -10068 -9118 -7828 -6460 -5210 -4046 -2836 -1516 -80 1504 3248 5030 6662 8096 9488 10996 12578 13996 15030 15640 15950 16130 16288 16408 16346 15994 15374 14622 13784 12786 11556 10188 8826 7432 5836 4014 2206 668 -652 -2040 -3618 -5168 -6390 -7272 -8052 -8878 -9652 -10210 -10514 -10656 -10682 -10534 -10174 -9626 -8956 -8230 -7522 -6820 -6016 -5012 -3866 -2774 -1840 -952 64 1190 2238 3062 3728 4354 4946 5388 5642 5764 5852 5932 5980 5960 5820 5518 5054 4524 4020 3554 3076 2562 2048 1566 1100 626 158 -268 -632 -954 -1248 -1506 -1718 -1878 -1978 -2008 -1958 -1862 -1774 -1716 -1672 -1606 -1502 -1368 -1222 -1068 -918 -780 -654 -542 -440 -356 -292 -250 -222 -216 -236 -268 -296 -302 -302 -322 -374 -448 -520 -582 -626 -654 -668 -670 -674 -678 -668 -640 -598 -556 -514 -468 -412 -348 -282 -220 -160 -104 -54 -8 32 64 88 106 116 122 124 118 108 98 86 70 56 46 34 20 12 8 4 0 +1 +0 1 2 4 7 12 17 21 26 32 38 41 43 43 42 37 30 23 14 2 -17 -40 -59 -73 -89 -112 -138 -157 -169 -180 -192 -199 -200 -200 -202 -201 -193 -184 -176 -166 -143 -112 -87 -78 -77 -70 -56 -42 -32 -33 -54 -106 -180 -255 -324 -397 -484 -576 -662 -752 -866 -1005 -1141 -1255 -1344 -1416 -1467 -1496 -1509 -1511 -1476 -1375 -1216 -1032 -856 -684 -493 -271 -15 283 628 999 1359 1695 2038 2425 2845 3246 3576 3817 3991 4135 4280 4419 4511 4521 4452 4339 4189 3979 3682 3324 2947 2542 2043 1438 809 251 -250 -802 -1456 -2127 -2692 -3134 -3550 -4005 -4456 -4821 -5080 -5269 -5401 -5451 -5386 -5213 -4963 -4668 -4363 -4046 -3653 -3114 -2459 -1805 -1225 -649 44 851 1635 2293 2856 3417 3973 4432 4753 4977 5177 5374 5551 5670 5677 5513 5176 4752 4333 3929 3487 2981 2444 1919 1385 810 210 -364 -885 -1373 -1845 -2291 -2687 -3024 -3280 -3424 -3439 -3373 -3311 -3301 -3314 -3284 -3170 -2982 -2748 -2484 -2209 -1939 -1685 -1445 -1215 -1016 -866 -765 -712 -719 -810 -965 -1106 -1180 -1232 -1370 -1664 -2078 -2530 -2963 -3355 -3687 -3954 -4187 -4445 -4718 -4923 -4992 -4954 -4888 -4824 -4698 -4433 -4024 -3515 -2946 -2319 -1638 -905 -128 683 1503 2308 3088 3849 4592 5289 5913 6453 6918 7317 7643 7881 8032 8122 8165 8144 8006 7722 7314 6850 6368 5846 5248 4571 3852 3128 2401 1652 882 116 -627 -1348 -2048 -2708 -3286 -3754 -4124 -4417 -4656 -4837 -4957 -5028 -5041 -4973 -4807 -4568 -4303 -4042 -3753 -3368 -2859 -2271 -1709 -1236 -818 -366 162 703 1154 1493 1794 2139 2512 2819 2986 3035 3030 3007 2956 2847 2681 2486 2295 2101 1871 1582 1261 960 698 453 196 -61 -283 -460 -611 -757 -886 -976 -1019 -1028 -1036 -1061 -1102 -1131 -1122 -1074 -1005 -929 -845 -755 -670 -602 -537 -461 -383 -332 -311 -290 -250 -210 -207 -240 -267 -261 -243 -256 -317 -397 -455 -483 -501 -534 -584 -632 -654 -646 -620 -596 -582 -571 -549 -514 -472 -432 -391 -340 -272 -199 -133 -81 -32 23 80 131 171 203 229 251 266 277 286 293 293 285 270 253 236 218 197 176 154 131 109 90 74 59 45 33 23 15 8 3 0 -1 -2 -2 -2 -1 0 +0 4 8 16 28 48 68 84 104 128 152 164 172 172 168 148 120 92 56 8 -68 -160 -236 -292 -356 -448 -552 -628 -676 -720 -768 -796 -800 -800 -808 -804 -772 -736 -704 -664 -572 -448 -348 -312 -308 -280 -224 -168 -128 -132 -216 -424 -720 -1020 -1296 -1588 -1936 -2304 -2648 -3008 -3464 -4020 -4564 -5020 -5376 -5664 -5868 -5984 -6036 -6044 -5904 -5500 -4864 -4128 -3424 -2736 -1972 -1084 -60 1132 2512 3996 5436 6780 8152 9700 11380 12984 14304 15268 15964 16540 17120 17676 18044 18084 17808 17356 16756 15916 14728 13296 11788 10168 8172 5752 3236 1004 -1000 -3208 -5824 -8508 -10768 -12536 -14200 -16020 -17824 -19284 -20320 -21076 -21604 -21804 -21544 -20852 -19852 -18672 -17452 -16184 -14612 -12456 -9836 -7220 -4900 -2596 176 3404 6540 9172 11424 13668 15892 17728 19012 19908 20708 21496 22204 22680 22708 22052 20704 19008 17332 15716 13948 11924 9776 7676 5540 3240 840 -1456 -3540 -5492 -7380 -9164 -10748 -12096 -13120 -13696 -13756 -13492 -13244 -13204 -13256 -13136 -12680 -11928 -10992 -9936 -8836 -7756 -6740 -5780 -4860 -4064 -3464 -3060 -2848 -2876 -3240 -3860 -4424 -4720 -4928 -5480 -6656 -8312 -10120 -11852 -13420 -14748 -15816 -16748 -17780 -18872 -19692 -19968 -19816 -19552 -19296 -18792 -17732 -16096 -14060 -11784 -9276 -6552 -3620 -512 2732 6012 9232 12352 15396 18368 21156 23652 25812 27672 29268 30572 31524 32128 32488 32660 32576 32024 30888 29256 27400 25472 23384 20992 18284 15408 12512 9604 6608 3528 464 -2508 -5392 -8192 -10832 -13144 -15016 -16496 -17668 -18624 -19348 -19828 -20112 -20164 -19892 -19228 -18272 -17212 -16168 -15012 -13472 -11436 -9084 -6836 -4944 -3272 -1464 648 2812 4616 5972 7176 8556 10048 11276 11944 12140 12120 12028 11824 11388 10724 9944 9180 8404 7484 6328 5044 3840 2792 1812 784 -244 -1132 -1840 -2444 -3028 -3544 -3904 -4076 -4112 -4144 -4244 -4408 -4524 -4488 -4296 -4020 -3716 -3380 -3020 -2680 -2408 -2148 -1844 -1532 -1328 -1244 -1160 -1000 -840 -828 -960 -1068 -1044 -972 -1024 -1268 -1588 -1820 -1932 -2004 -2136 -2336 -2528 -2616 -2584 -2480 -2384 -2328 -2284 -2196 -2056 -1888 -1728 -1564 -1360 -1088 -796 -532 -324 -128 92 320 524 684 812 916 1004 1064 1108 1144 1172 1172 1140 1080 1012 944 872 788 704 616 524 436 360 296 236 180 132 92 60 32 12 0 -4 -8 -8 -8 -4 0 +2 +0 -1 -1 -2 -3 -5 -8 -12 -16 -21 -25 -29 -34 -39 -45 -50 -55 -58 -60 -60 -58 -57 -54 -50 -46 -41 -38 -36 -36 -39 -46 -59 -71 -81 -89 -104 -133 -176 -225 -277 -329 -379 -425 -471 -522 -579 -630 -666 -688 -707 -725 -734 -719 -678 -614 -533 -435 -318 -182 -27 146 333 529 731 940 1158 1374 1584 1781 1968 2145 2304 2447 2565 2667 2758 2828 2858 2831 2757 2651 2531 2385 2198 1964 1699 1415 1114 786 430 58 -322 -709 -1103 -1495 -1858 -2175 -2448 -2686 -2898 -3082 -3235 -3359 -3449 -3482 -3446 -3351 -3231 -3106 -2950 -2710 -2353 -1914 -1473 -1089 -738 -338 153 679 1141 1509 1856 2264 2721 3124 3387 3521 3595 3653 3674 3622 3490 3312 3128 2931 2672 2313 1888 1471 1096 728 323 -102 -488 -812 -1106 -1404 -1686 -1904 -2035 -2106 -2176 -2286 -2435 -2563 -2610 -2563 -2461 -2336 -2183 -2001 -1827 -1686 -1546 -1363 -1167 -1039 -1001 -961 -852 -737 -749 -894 -1027 -1035 -994 -1082 -1382 -1785 -2119 -2324 -2492 -2747 -3113 -3490 -3743 -3831 -3819 -3815 -3872 -3946 -3953 -3853 -3691 -3524 -3332 -3020 -2529 -1931 -1357 -859 -350 278 1011 1739 2393 2995 3591 4167 4699 5218 5769 6332 6802 7113 7284 7394 7488 7550 7532 7395 7136 6779 6371 5960 5551 5101 4572 3954 3269 2538 1774 1001 260 -430 -1081 -1712 -2307 -2817 -3212 -3534 -3840 -4145 -4403 -4566 -4638 -4666 -4664 -4612 -4475 -4264 -4007 -3725 -3414 -3058 -2640 -2181 -1739 -1363 -1040 -703 -300 141 545 881 1184 1489 1773 1999 2163 2285 2358 2348 2281 2221 2209 2202 2129 1976 1787 1603 1410 1191 955 734 552 415 306 194 51 -115 -263 -364 -437 -528 -642 -738 -778 -773 -760 -757 -753 -739 -723 -719 -730 -752 -776 -793 -794 -780 -762 -748 -730 -704 -680 -678 -700 -722 -728 -725 -730 -749 -767 -768 -758 -749 -742 -732 -717 -700 -682 -658 -619 -569 -516 -472 -429 -378 -315 -247 -182 -124 -70 -15 46 111 175 233 283 329 369 401 423 441 458 473 482 485 484 480 468 447 418 391 368 345 315 280 242 207 179 152 125 97 71 50 33 19 5 -7 -16 -21 -24 -25 -26 -25 -23 -21 -18 -16 -13 -11 -9 -6 -4 -3 -2 -1 0 +0 -4 -4 -8 -12 -20 -32 -48 -64 -84 -100 -116 -136 -156 -180 -200 -220 -232 -240 -240 -232 -228 -216 -200 -184 -164 -152 -144 -144 -156 -184 -236 -284 -324 -356 -416 -532 -704 -900 -1108 -1316 -1516 -1700 -1884 -2088 -2316 -2520 -2664 -2752 -2828 -2900 -2936 -2876 -2712 -2456 -2132 -1740 -1272 -728 -108 584 1332 2116 2924 3760 4632 5496 6336 7124 7872 8580 9216 9788 10260 10668 11032 11312 11432 11324 11028 10604 10124 9540 8792 7856 6796 5660 4456 3144 1720 232 -1288 -2836 -4412 -5980 -7432 -8700 -9792 -10744 -11592 -12328 -12940 -13436 -13796 -13928 -13784 -13404 -12924 -12424 -11800 -10840 -9412 -7656 -5892 -4356 -2952 -1352 612 2716 4564 6036 7424 9056 10884 12496 13548 14084 14380 14612 14696 14488 13960 13248 12512 11724 10688 9252 7552 5884 4384 2912 1292 -408 -1952 -3248 -4424 -5616 -6744 -7616 -8140 -8424 -8704 -9144 -9740 -10252 -10440 -10252 -9844 -9344 -8732 -8004 -7308 -6744 -6184 -5452 -4668 -4156 -4004 -3844 -3408 -2948 -2996 -3576 -4108 -4140 -3976 -4328 -5528 -7140 -8476 -9296 -9968 -10988 -12452 -13960 -14972 -15324 -15276 -15260 -15488 -15784 -15812 -15412 -14764 -14096 -13328 -12080 -10116 -7724 -5428 -3436 -1400 1112 4044 6956 9572 11980 14364 16668 18796 20872 23076 25328 27208 28452 29136 29576 29952 30200 30128 29580 28544 27116 25484 23840 22204 20404 18288 15816 13076 10152 7096 4004 1040 -1720 -4324 -6848 -9228 -11268 -12848 -14136 -15360 -16580 -17612 -18264 -18552 -18664 -18656 -18448 -17900 -17056 -16028 -14900 -13656 -12232 -10560 -8724 -6956 -5452 -4160 -2812 -1200 564 2180 3524 4736 5956 7092 7996 8652 9140 9432 9392 9124 8884 8836 8808 8516 7904 7148 6412 5640 4764 3820 2936 2208 1660 1224 776 204 -460 -1052 -1456 -1748 -2112 -2568 -2952 -3112 -3092 -3040 -3028 -3012 -2956 -2892 -2876 -2920 -3008 -3104 -3172 -3176 -3120 -3048 -2992 -2920 -2816 -2720 -2712 -2800 -2888 -2912 -2900 -2920 -2996 -3068 -3072 -3032 -2996 -2968 -2928 -2868 -2800 -2728 -2632 -2476 -2276 -2064 -1888 -1716 -1512 -1260 -988 -728 -496 -280 -60 184 444 700 932 1132 1316 1476 1604 1692 1764 1832 1892 1928 1940 1936 1920 1872 1788 1672 1564 1472 1380 1260 1120 968 828 716 608 500 388 284 200 132 76 20 -28 -64 -84 -96 -100 -104 -100 -92 -84 -72 -64 -52 -44 -36 -24 -16 -12 -8 -4 0 +2 +0 -1 -1 -2 -2 -3 -3 -4 -5 -6 -6 -7 -9 -13 -14 -16 -19 -27 -39 -51 -61 -73 -87 -107 -130 -150 -166 -177 -190 -206 -224 -239 -247 -251 -253 -253 -242 -214 -172 -127 -85 -36 29 113 204 293 382 478 578 679 783 900 1027 1145 1242 1317 1385 1453 1517 1564 1588 1584 1554 1508 1456 1399 1325 1224 1091 930 744 535 311 83 -142 -366 -595 -824 -1033 -1211 -1368 -1526 -1692 -1844 -1963 -2047 -2111 -2164 -2195 -2185 -2134 -2055 -1958 -1839 -1688 -1493 -1263 -1032 -829 -647 -448 -196 94 373 617 849 1092 1331 1536 1700 1839 1941 1978 1966 1958 1994 2033 2011 1909 1767 1621 1459 1261 1034 813 626 482 363 235 63 -146 -342 -484 -596 -736 -917 -1079 -1165 -1185 -1193 -1216 -1239 -1246 -1248 -1272 -1323 -1395 -1475 -1546 -1586 -1597 -1601 -1612 -1612 -1594 -1582 -1619 -1714 -1816 -1881 -1924 -1991 -2099 -2207 -2273 -2308 -2343 -2389 -2426 -2446 -2459 -2470 -2455 -2382 -2254 -2112 -1991 -1869 -1700 -1467 -1189 -905 -637 -371 -78 267 664 1083 1494 1888 2277 2660 3010 3315 3604 3908 4215 4492 4731 4951 5154 5285 5304 5238 5167 5134 5090 4942 4650 4277 3915 3602 3289 2902 2422 1915 1461 1072 680 226 -267 -719 -1070 -1351 -1621 -1896 -2126 -2271 -2361 -2466 -2615 -2757 -2830 -2813 -2752 -2692 -2648 -2592 -2488 -2314 -2099 -1900 -1753 -1616 -1423 -1156 -885 -663 -468 -238 32 275 453 595 748 912 1054 1155 1224 1267 1281 1278 1274 1267 1241 1200 1165 1132 1066 946 810 710 647 582 476 343 213 98 -12 -132 -269 -408 -521 -588 -632 -698 -798 -881 -899 -871 -872 -936 -1024 -1074 -1076 -1059 -1048 -1040 -1029 -1026 -1038 -1051 -1048 -1023 -990 -953 -914 -874 -833 -784 -716 -634 -555 -496 -451 -409 -365 -321 -282 -247 -214 -182 -145 -101 -53 -13 19 50 89 131 166 191 215 249 288 322 345 362 382 404 424 433 432 428 428 434 441 439 427 411 395 381 366 346 322 295 271 249 227 204 176 146 121 101 84 67 49 30 13 -2 -14 -22 -28 -33 -38 -42 -43 -43 -42 -40 -39 -37 -34 -29 -25 -22 -19 -16 -14 -11 -9 -7 -5 -4 -2 -1 -1 -1 -1 0 +0 -4 -4 -8 -8 -12 -12 -16 -20 -24 -24 -28 -36 -52 -56 -64 -76 -108 -156 -204 -244 -292 -348 -428 -520 -600 -664 -708 -760 -824 -896 -956 -988 -1004 -1012 -1012 -968 -856 -688 -508 -340 -144 116 452 816 1172 1528 1912 2312 2716 3132 3600 4108 4580 4968 5268 5540 5812 6068 6256 6352 6336 6216 6032 5824 5596 5300 4896 4364 3720 2976 2140 1244 332 -568 -1464 -2380 -3296 -4132 -4844 -5472 -6104 -6768 -7376 -7852 -8188 -8444 -8656 -8780 -8740 -8536 -8220 -7832 -7356 -6752 -5972 -5052 -4128 -3316 -2588 -1792 -784 376 1492 2468 3396 4368 5324 6144 6800 7356 7764 7912 7864 7832 7976 8132 8044 7636 7068 6484 5836 5044 4136 3252 2504 1928 1452 940 252 -584 -1368 -1936 -2384 -2944 -3668 -4316 -4660 -4740 -4772 -4864 -4956 -4984 -4992 -5088 -5292 -5580 -5900 -6184 -6344 -6388 -6404 -6448 -6448 -6376 -6328 -6476 -6856 -7264 -7524 -7696 -7964 -8396 -8828 -9092 -9232 -9372 -9556 -9704 -9784 -9836 -9880 -9820 -9528 -9016 -8448 -7964 -7476 -6800 -5868 -4756 -3620 -2548 -1484 -312 1068 2656 4332 5976 7552 9108 10640 12040 13260 14416 15632 16860 17968 18924 19804 20616 21140 21216 20952 20668 20536 20360 19768 18600 17108 15660 14408 13156 11608 9688 7660 5844 4288 2720 904 -1068 -2876 -4280 -5404 -6484 -7584 -8504 -9084 -9444 -9864 -10460 -11028 -11320 -11252 -11008 -10768 -10592 -10368 -9952 -9256 -8396 -7600 -7012 -6464 -5692 -4624 -3540 -2652 -1872 -952 128 1100 1812 2380 2992 3648 4216 4620 4896 5068 5124 5112 5096 5068 4964 4800 4660 4528 4264 3784 3240 2840 2588 2328 1904 1372 852 392 -48 -528 -1076 -1632 -2084 -2352 -2528 -2792 -3192 -3524 -3596 -3484 -3488 -3744 -4096 -4296 -4304 -4236 -4192 -4160 -4116 -4104 -4152 -4204 -4192 -4092 -3960 -3812 -3656 -3496 -3332 -3136 -2864 -2536 -2220 -1984 -1804 -1636 -1460 -1284 -1128 -988 -856 -728 -580 -404 -212 -52 76 200 356 524 664 764 860 996 1152 1288 1380 1448 1528 1616 1696 1732 1728 1712 1712 1736 1764 1756 1708 1644 1580 1524 1464 1384 1288 1180 1084 996 908 816 704 584 484 404 336 268 196 120 52 -8 -56 -88 -112 -132 -152 -168 -172 -172 -168 -160 -156 -148 -136 -116 -100 -88 -76 -64 -56 -44 -36 -28 -20 -16 -8 -4 -4 -4 -4 0 +2 +0 -1 -2 -2 -4 -5 -7 -9 -12 -15 -18 -21 -25 -28 -31 -33 -35 -37 -37 -35 -32 -27 -21 -13 -3 10 28 50 74 100 129 160 192 224 258 296 336 378 419 461 504 542 570 588 606 629 650 658 645 618 588 562 533 488 422 346 273 208 136 47 -58 -160 -246 -320 -396 -479 -553 -609 -652 -702 -767 -832 -879 -899 -904 -910 -920 -926 -913 -873 -813 -756 -716 -677 -612 -510 -401 -308 -223 -116 16 141 238 321 412 516 610 685 744 788 816 834 851 866 869 860 854 850 819 744 651 584 545 501 420 310 197 93 -12 -131 -272 -422 -552 -637 -701 -792 -926 -1046 -1093 -1083 -1109 -1218 -1363 -1464 -1500 -1512 -1532 -1557 -1577 -1611 -1668 -1729 -1765 -1766 -1750 -1727 -1697 -1663 -1625 -1567 -1466 -1331 -1196 -1096 -1022 -951 -870 -785 -708 -637 -568 -495 -405 -289 -156 -39 59 161 295 449 586 693 806 959 1145 1319 1460 1581 1720 1884 2041 2158 2227 2284 2367 2489 2619 2709 2741 2737 2734 2745 2749 2711 2630 2520 2414 2320 2222 2084 1888 1657 1443 1271 1119 946 730 480 218 -35 -259 -441 -594 -754 -938 -1119 -1255 -1344 -1422 -1522 -1633 -1706 -1714 -1678 -1632 -1608 -1602 -1588 -1539 -1455 -1365 -1299 -1236 -1125 -940 -718 -528 -395 -290 -171 -37 87 195 304 430 564 682 784 874 950 1001 1015 989 941 898 889 919 963 971 915 807 688 578 460 311 136 -31 -158 -252 -354 -499 -681 -853 -976 -1054 -1131 -1231 -1328 -1374 -1360 -1333 -1333 -1358 -1371 -1343 -1289 -1232 -1179 -1112 -1021 -914 -811 -724 -645 -557 -453 -340 -235 -144 -58 28 110 179 235 286 337 383 417 437 444 445 441 432 418 402 388 378 365 342 305 265 231 204 182 164 151 140 126 110 97 87 75 60 52 60 78 92 94 93 98 112 125 131 131 129 128 130 137 144 145 136 123 114 109 101 86 69 56 48 41 30 19 11 4 -5 -16 -25 -29 -30 -29 -30 -31 -32 -31 -29 -27 -25 -24 -24 -22 -19 -15 -12 -8 -6 -4 -2 -1 0 1 1 1 1 1 1 0 0 0 0 0 +0 -8 -16 -16 -32 -40 -56 -72 -96 -120 -144 -168 -200 -224 -248 -264 -280 -296 -296 -280 -256 -216 -168 -104 -24 80 224 400 592 800 1032 1280 1536 1792 2064 2368 2688 3024 3352 3688 4032 4336 4560 4704 4848 5032 5200 5264 5160 4944 4704 4496 4264 3904 3376 2768 2184 1664 1088 376 -464 -1280 -1968 -2560 -3168 -3832 -4424 -4872 -5216 -5616 -6136 -6656 -7032 -7192 -7232 -7280 -7360 -7408 -7304 -6984 -6504 -6048 -5728 -5416 -4896 -4080 -3208 -2464 -1784 -928 128 1128 1904 2568 3296 4128 4880 5480 5952 6304 6528 6672 6808 6928 6952 6880 6832 6800 6552 5952 5208 4672 4360 4008 3360 2480 1576 744 -96 -1048 -2176 -3376 -4416 -5096 -5608 -6336 -7408 -8368 -8744 -8664 -8872 -9744 -10904 -11712 -12000 -12096 -12256 -12456 -12616 -12888 -13344 -13832 -14120 -14128 -14000 -13816 -13576 -13304 -13000 -12536 -11728 -10648 -9568 -8768 -8176 -7608 -6960 -6280 -5664 -5096 -4544 -3960 -3240 -2312 -1248 -312 472 1288 2360 3592 4688 5544 6448 7672 9160 10552 11680 12648 13760 15072 16328 17264 17816 18272 18936 19912 20952 21672 21928 21896 21872 21960 21992 21688 21040 20160 19312 18560 17776 16672 15104 13256 11544 10168 8952 7568 5840 3840 1744 -280 -2072 -3528 -4752 -6032 -7504 -8952 -10040 -10752 -11376 -12176 -13064 -13648 -13712 -13424 -13056 -12864 -12816 -12704 -12312 -11640 -10920 -10392 -9888 -9000 -7520 -5744 -4224 -3160 -2320 -1368 -296 696 1560 2432 3440 4512 5456 6272 6992 7600 8008 8120 7912 7528 7184 7112 7352 7704 7768 7320 6456 5504 4624 3680 2488 1088 -248 -1264 -2016 -2832 -3992 -5448 -6824 -7808 -8432 -9048 -9848 -10624 -10992 -10880 -10664 -10664 -10864 -10968 -10744 -10312 -9856 -9432 -8896 -8168 -7312 -6488 -5792 -5160 -4456 -3624 -2720 -1880 -1152 -464 224 880 1432 1880 2288 2696 3064 3336 3496 3552 3560 3528 3456 3344 3216 3104 3024 2920 2736 2440 2120 1848 1632 1456 1312 1208 1120 1008 880 776 696 600 480 416 480 624 736 752 744 784 896 1000 1048 1048 1032 1024 1040 1096 1152 1160 1088 984 912 872 808 688 552 448 384 328 240 152 88 32 -40 -128 -200 -232 -240 -232 -240 -248 -256 -248 -232 -216 -200 -192 -192 -176 -152 -120 -96 -64 -48 -32 -16 -8 0 8 8 8 8 8 8 0 0 0 0 0 +3 +0 -1 -1 -1 -1 -1 0 0 1 2 4 5 8 11 15 20 25 30 37 44 53 62 70 78 88 99 113 125 136 145 155 165 175 183 188 191 192 195 197 194 184 170 155 142 131 115 93 64 30 -6 -39 -69 -97 -127 -164 -203 -236 -261 -286 -317 -351 -379 -393 -398 -399 -406 -417 -426 -425 -414 -400 -392 -384 -360 -309 -243 -184 -141 -107 -65 -15 34 79 127 185 248 308 364 416 463 501 520 520 507 496 502 532 571 590 569 514 449 386 314 218 97 -23 -118 -194 -278 -402 -560 -719 -841 -929 -1021 -1137 -1254 -1328 -1345 -1348 -1379 -1438 -1484 -1489 -1462 -1429 -1399 -1351 -1269 -1162 -1056 -964 -879 -776 -647 -497 -352 -220 -91 45 181 301 406 507 611 712 794 852 889 913 927 931 924 911 903 902 896 861 790 705 631 573 524 487 462 441 407 365 332 305 271 226 201 238 320 389 412 419 460 540 623 678 704 717 736 777 846 926 968 944 889 857 855 829 741 616 524 473 420 326 217 133 58 -56 -218 -369 -454 -483 -507 -556 -622 -678 -705 -705 -700 -713 -755 -803 -817 -778 -689 -575 -456 -343 -243 -144 -31 90 190 258 320 410 522 619 678 718 771 839 909 959 982 974 944 896 832 739 604 443 276 111 -62 -251 -449 -637 -807 -965 -1127 -1301 -1472 -1607 -1692 -1755 -1823 -1889 -1905 -1847 -1751 -1666 -1597 -1510 -1392 -1260 -1130 -988 -826 -659 -501 -341 -168 0 143 268 401 541 649 712 758 818 881 910 911 922 963 1007 1012 973 912 850 787 710 614 496 373 266 184 108 12 -100 -199 -265 -310 -361 -419 -462 -475 -474 -473 -470 -448 -403 -354 -318 -294 -267 -222 -164 -102 -42 17 76 132 185 234 281 324 355 369 367 354 337 321 304 292 280 264 242 214 184 156 130 107 85 62 39 19 7 3 -1 -8 -16 -21 -22 -21 -21 -20 -17 -14 -12 -11 -9 -5 0 4 8 11 12 13 12 11 10 9 8 7 6 6 5 3 1 0 0 0 -1 -1 -1 -1 -1 -1 -1 0 +0 -16 -16 -16 -16 -16 0 0 16 32 64 80 128 176 240 320 400 480 592 704 848 992 1120 1248 1408 1584 1808 2000 2176 2320 2480 2640 2800 2928 3008 3056 3072 3120 3152 3104 2944 2720 2480 2272 2096 1840 1488 1024 480 -96 -624 -1104 -1552 -2032 -2624 -3248 -3776 -4176 -4576 -5072 -5616 -6064 -6288 -6368 -6384 -6496 -6672 -6816 -6800 -6624 -6400 -6272 -6144 -5760 -4944 -3888 -2944 -2256 -1712 -1040 -240 544 1264 2032 2960 3968 4928 5824 6656 7408 8016 8320 8320 8112 7936 8032 8512 9136 9440 9104 8224 7184 6176 5024 3488 1552 -368 -1888 -3104 -4448 -6432 -8960 -11504 -13456 -14864 -16336 -18192 -20064 -21248 -21520 -21568 -22064 -23008 -23744 -23824 -23392 -22864 -22384 -21616 -20304 -18592 -16896 -15424 -14064 -12416 -10352 -7952 -5632 -3520 -1456 720 2896 4816 6496 8112 9776 11392 12704 13632 14224 14608 14832 14896 14784 14576 14448 14432 14336 13776 12640 11280 10096 9168 8384 7792 7392 7056 6512 5840 5312 4880 4336 3616 3216 3808 5120 6224 6592 6704 7360 8640 9968 10848 11264 11472 11776 12432 13536 14816 15488 15104 14224 13712 13680 13264 11856 9856 8384 7568 6720 5216 3472 2128 928 -896 -3488 -5904 -7264 -7728 -8112 -8896 -9952 -10848 -11280 -11280 -11200 -11408 -12080 -12848 -13072 -12448 -11024 -9200 -7296 -5488 -3888 -2304 -496 1440 3040 4128 5120 6560 8352 9904 10848 11488 12336 13424 14544 15344 15712 15584 15104 14336 13312 11824 9664 7088 4416 1776 -992 -4016 -7184 -10192 -12912 -15440 -18032 -20816 -23552 -25712 -27072 -28080 -29168 -30224 -30480 -29552 -28016 -26656 -25552 -24160 -22272 -20160 -18080 -15808 -13216 -10544 -8016 -5456 -2688 0 2288 4288 6416 8656 10384 11392 12128 13088 14096 14560 14576 14752 15408 16112 16192 15568 14592 13600 12592 11360 9824 7936 5968 4256 2944 1728 192 -1600 -3184 -4240 -4960 -5776 -6704 -7392 -7600 -7584 -7568 -7520 -7168 -6448 -5664 -5088 -4704 -4272 -3552 -2624 -1632 -672 272 1216 2112 2960 3744 4496 5184 5680 5904 5872 5664 5392 5136 4864 4672 4480 4224 3872 3424 2944 2496 2080 1712 1360 992 624 304 112 48 -16 -128 -256 -336 -352 -336 -336 -320 -272 -224 -192 -176 -144 -80 0 64 128 176 192 208 192 176 160 144 128 112 96 96 80 48 16 0 0 0 -16 -16 -16 -16 -16 -16 -16 0 +4 +0 0 0 0 0 0 1 1 1 1 2 2 2 2 3 4 6 8 9 10 14 18 21 24 26 29 33 39 46 51 53 53 54 58 59 56 49 44 42 39 31 22 14 6 -7 -27 -48 -61 -68 -74 -84 -98 -110 -119 -124 -127 -134 -147 -162 -170 -167 -154 -132 -108 -84 -62 -38 -9 24 54 75 96 127 166 203 229 249 275 307 342 371 390 397 395 385 367 334 280 211 135 55 -32 -132 -242 -352 -457 -559 -669 -791 -916 -1024 -1104 -1172 -1247 -1323 -1366 -1355 -1314 -1280 -1255 -1215 -1146 -1062 -974 -871 -746 -609 -473 -329 -166 0 148 284 434 599 736 827 900 994 1095 1158 1186 1228 1313 1404 1445 1423 1366 1304 1235 1142 1010 837 645 471 335 200 24 -195 -398 -542 -650 -777 -926 -1046 -1105 -1131 -1159 -1181 -1156 -1070 -965 -890 -845 -788 -676 -513 -327 -137 59 267 481 691 903 1117 1328 1502 1612 1653 1652 1627 1597 1570 1558 1549 1517 1441 1325 1184 1040 904 777 644 491 320 163 66 28 -3 -72 -164 -229 -252 -259 -272 -274 -247 -207 -189 -188 -164 -91 11 113 209 302 378 418 427 436 456 461 441 424 438 466 446 346 209 106 53 8 -81 -210 -345 -465 -576 -683 -769 -827 -893 -1016 -1205 -1405 -1551 -1626 -1654 -1665 -1673 -1678 -1668 -1628 -1547 -1448 -1350 -1250 -1124 -966 -800 -637 -469 -294 -135 -21 61 153 282 430 567 672 747 798 832 858 863 833 770 695 618 525 402 266 155 71 -27 -168 -337 -485 -589 -672 -771 -886 -981 -1029 -1038 -1038 -1041 -1034 -997 -938 -874 -810 -727 -611 -465 -309 -155 1 166 338 500 637 741 829 918 1009 1088 1140 1166 1174 1161 1123 1071 1016 952 869 773 683 602 517 419 318 226 138 48 -35 -101 -158 -219 -275 -304 -304 -302 -315 -327 -318 -293 -269 -252 -228 -192 -154 -121 -91 -57 -24 2 20 35 47 56 64 77 89 96 97 97 97 94 87 76 66 54 42 33 26 21 14 7 1 -2 -5 -9 -12 -13 -13 -12 -12 -11 -10 -8 -7 -6 -5 -4 -3 -2 -1 -1 0 +0 0 0 0 0 0 16 16 16 16 32 32 32 32 48 64 96 128 144 160 224 288 336 384 416 464 528 624 736 816 848 848 864 928 944 896 784 704 672 624 496 352 224 96 -112 -432 -768 -976 -1088 -1184 -1344 -1568 -1760 -1904 -1984 -2032 -2144 -2352 -2592 -2720 -2672 -2464 -2112 -1728 -1344 -992 -608 -144 384 864 1200 1536 2032 2656 3248 3664 3984 4400 4912 5472 5936 6240 6352 6320 6160 5872 5344 4480 3376 2160 880 -512 -2112 -3872 -5632 -7312 -8944 -10704 -12656 -14656 -16384 -17664 -18752 -19952 -21168 -21856 -21680 -21024 -20480 -20080 -19440 -18336 -16992 -15584 -13936 -11936 -9744 -7568 -5264 -2656 0 2368 4544 6944 9584 11776 13232 14400 15904 17520 18528 18976 19648 21008 22464 23120 22768 21856 20864 19760 18272 16160 13392 10320 7536 5360 3200 384 -3120 -6368 -8672 -10400 -12432 -14816 -16736 -17680 -18096 -18544 -18896 -18496 -17120 -15440 -14240 -13520 -12608 -10816 -8208 -5232 -2192 944 4272 7696 11056 14448 17872 21248 24032 25792 26448 26432 26032 25552 25120 24928 24784 24272 23056 21200 18944 16640 14464 12432 10304 7856 5120 2608 1056 448 -48 -1152 -2624 -3664 -4032 -4144 -4352 -4384 -3952 -3312 -3024 -3008 -2624 -1456 176 1808 3344 4832 6048 6688 6832 6976 7296 7376 7056 6784 7008 7456 7136 5536 3344 1696 848 128 -1296 -3360 -5520 -7440 -9216 -10928 -12304 -13232 -14288 -16256 -19280 -22480 -24816 -26016 -26464 -26640 -26768 -26848 -26688 -26048 -24752 -23168 -21600 -20000 -17984 -15456 -12800 -10192 -7504 -4704 -2160 -336 976 2448 4512 6880 9072 10752 11952 12768 13312 13728 13808 13328 12320 11120 9888 8400 6432 4256 2480 1136 -432 -2688 -5392 -7760 -9424 -10752 -12336 -14176 -15696 -16464 -16608 -16608 -16656 -16544 -15952 -15008 -13984 -12960 -11632 -9776 -7440 -4944 -2480 16 2656 5408 8000 10192 11856 13264 14688 16144 17408 18240 18656 18784 18576 17968 17136 16256 15232 13904 12368 10928 9632 8272 6704 5088 3616 2208 768 -560 -1616 -2528 -3504 -4400 -4864 -4864 -4832 -5040 -5232 -5088 -4688 -4304 -4032 -3648 -3072 -2464 -1936 -1456 -912 -384 32 320 560 752 896 1024 1232 1424 1536 1552 1552 1552 1504 1392 1216 1056 864 672 528 416 336 224 112 16 -32 -80 -144 -192 -208 -208 -192 -192 -176 -160 -128 -112 -96 -80 -64 -48 -32 -16 -16 0 +4 +0 -1 -1 -1 -2 -2 -2 -2 -1 0 1 4 6 10 15 20 25 31 35 39 42 46 49 53 57 60 62 61 58 55 51 46 41 33 22 12 5 2 -1 -7 -17 -24 -28 -30 -32 -34 -32 -28 -27 -28 -25 -15 1 19 36 54 70 81 85 90 97 102 101 100 107 117 115 92 57 30 15 2 -26 -68 -114 -157 -200 -244 -282 -312 -346 -404 -492 -589 -667 -718 -749 -773 -797 -819 -835 -835 -814 -780 -746 -707 -651 -573 -486 -396 -299 -192 -90 -14 42 110 207 323 435 528 601 657 701 739 761 752 711 656 597 520 406 276 164 77 -29 -191 -391 -576 -715 -835 -980 -1153 -1306 -1403 -1448 -1482 -1521 -1548 -1528 -1471 -1403 -1332 -1225 -1055 -822 -560 -287 3 324 675 1025 1339 1599 1832 2080 2350 2599 2793 2935 3035 3081 3063 3001 2926 2818 2646 2421 2199 1996 1765 1472 1153 844 533 194 -141 -425 -688 -985 -1279 -1460 -1512 -1556 -1678 -1806 -1821 -1736 -1656 -1610 -1514 -1325 -1104 -908 -710 -465 -199 22 193 346 486 602 732 918 1125 1278 1364 1439 1524 1571 1538 1449 1333 1180 993 826 716 614 451 245 76 -48 -200 -418 -643 -799 -901 -1009 -1131 -1226 -1280 -1317 -1364 -1430 -1511 -1586 -1619 -1591 -1534 -1497 -1467 -1392 -1260 -1128 -1025 -909 -745 -577 -475 -418 -326 -183 -62 -12 3 15 -13 -120 -248 -298 -285 -333 -520 -772 -978 -1129 -1290 -1480 -1628 -1701 -1754 -1845 -1938 -1942 -1828 -1670 -1535 -1412 -1238 -978 -657 -310 56 432 788 1105 1399 1704 2027 2323 2560 2745 2903 3035 3116 3139 3119 3064 2944 2745 2493 2240 2001 1739 1424 1080 755 467 201 -58 -301 -518 -721 -912 -1073 -1190 -1277 -1371 -1470 -1538 -1543 -1498 -1429 -1343 -1219 -1053 -868 -690 -521 -353 -189 -39 92 211 320 408 465 502 538 575 596 592 573 555 544 528 501 470 439 400 345 279 221 176 134 82 25 -23 -56 -80 -98 -105 -104 -103 -109 -114 -108 -94 -85 -88 -92 -89 -80 -72 -67 -61 -52 -43 -38 -35 -29 -22 -18 -16 -15 -12 -10 -8 -8 -8 -7 -7 -6 -6 -6 -4 -3 -2 -2 -1 0 +0 -8 -8 -8 -16 -16 -16 -16 -8 0 8 32 48 80 120 160 200 248 280 312 336 368 392 424 456 480 496 488 464 440 408 368 328 264 176 96 40 16 -8 -56 -136 -192 -224 -240 -256 -272 -256 -224 -216 -224 -200 -120 8 152 288 432 560 648 680 720 776 816 808 800 856 936 920 736 456 240 120 16 -208 -544 -912 -1256 -1600 -1952 -2256 -2496 -2768 -3232 -3936 -4712 -5336 -5744 -5992 -6184 -6376 -6552 -6680 -6680 -6512 -6240 -5968 -5656 -5208 -4584 -3888 -3168 -2392 -1536 -720 -112 336 880 1656 2584 3480 4224 4808 5256 5608 5912 6088 6016 5688 5248 4776 4160 3248 2208 1312 616 -232 -1528 -3128 -4608 -5720 -6680 -7840 -9224 -10448 -11224 -11584 -11856 -12168 -12384 -12224 -11768 -11224 -10656 -9800 -8440 -6576 -4480 -2296 24 2592 5400 8200 10712 12792 14656 16640 18800 20792 22344 23480 24280 24648 24504 24008 23408 22544 21168 19368 17592 15968 14120 11776 9224 6752 4264 1552 -1128 -3400 -5504 -7880 -10232 -11680 -12096 -12448 -13424 -14448 -14568 -13888 -13248 -12880 -12112 -10600 -8832 -7264 -5680 -3720 -1592 176 1544 2768 3888 4816 5856 7344 9000 10224 10912 11512 12192 12568 12304 11592 10664 9440 7944 6608 5728 4912 3608 1960 608 -384 -1600 -3344 -5144 -6392 -7208 -8072 -9048 -9808 -10240 -10536 -10912 -11440 -12088 -12688 -12952 -12728 -12272 -11976 -11736 -11136 -10080 -9024 -8200 -7272 -5960 -4616 -3800 -3344 -2608 -1464 -496 -96 24 120 -104 -960 -1984 -2384 -2280 -2664 -4160 -6176 -7824 -9032 -10320 -11840 -13024 -13608 -14032 -14760 -15504 -15536 -14624 -13360 -12280 -11296 -9904 -7824 -5256 -2480 448 3456 6304 8840 11192 13632 16216 18584 20480 21960 23224 24280 24928 25112 24952 24512 23552 21960 19944 17920 16008 13912 11392 8640 6040 3736 1608 -464 -2408 -4144 -5768 -7296 -8584 -9520 -10216 -10968 -11760 -12304 -12344 -11984 -11432 -10744 -9752 -8424 -6944 -5520 -4168 -2824 -1512 -312 736 1688 2560 3264 3720 4016 4304 4600 4768 4736 4584 4440 4352 4224 4008 3760 3512 3200 2760 2232 1768 1408 1072 656 200 -184 -448 -640 -784 -840 -832 -824 -872 -912 -864 -752 -680 -704 -736 -712 -640 -576 -536 -488 -416 -344 -304 -280 -232 -176 -144 -128 -120 -96 -80 -64 -64 -64 -56 -56 -48 -48 -48 -32 -24 -16 -16 -8 0 +3 +0 0 1 2 3 5 6 8 9 10 10 9 8 6 2 -3 -8 -14 -22 -31 -39 -44 -50 -58 -68 -73 -75 -77 -81 -81 -76 -67 -59 -49 -34 -16 1 16 30 45 58 75 98 126 150 167 183 203 218 222 217 208 191 167 144 129 115 87 49 15 -11 -45 -96 -153 -196 -228 -262 -303 -339 -365 -386 -412 -445 -483 -521 -547 -553 -548 -549 -553 -539 -501 -461 -430 -391 -329 -261 -221 -199 -159 -92 -32 -7 1 8 -7 -70 -148 -182 -178 -212 -339 -516 -669 -791 -925 -1085 -1222 -1307 -1379 -1485 -1595 -1636 -1576 -1472 -1385 -1304 -1169 -945 -650 -313 58 457 854 1225 1587 1977 2405 2823 3182 3492 3779 4042 4248 4380 4455 4479 4408 4207 3913 3599 3292 2930 2459 1911 1368 868 383 -113 -600 -1061 -1514 -1966 -2372 -2697 -2970 -3272 -3602 -3869 -3987 -3973 -3895 -3761 -3510 -3117 -2641 -2158 -1677 -1170 -644 -137 335 789 1234 1618 1903 2121 2347 2591 2779 2852 2850 2862 2904 2920 2871 2791 2707 2568 2300 1936 1594 1327 1054 674 216 -201 -520 -777 -992 -1122 -1168 -1222 -1359 -1495 -1496 -1379 -1327 -1446 -1618 -1670 -1601 -1546 -1550 -1517 -1384 -1245 -1202 -1198 -1102 -923 -805 -812 -836 -770 -673 -675 -786 -889 -940 -1039 -1275 -1563 -1752 -1824 -1914 -2126 -2428 -2723 -2995 -3292 -3610 -3876 -4055 -4175 -4260 -4260 -4110 -3816 -3442 -3022 -2528 -1950 -1341 -740 -105 633 1462 2269 2963 3564 4142 4724 5243 5644 5929 6138 6277 6325 6273 6140 5949 5687 5333 4865 4283 3619 2925 2250 1607 965 292 -393 -1015 -1510 -1888 -2213 -2529 -2826 -3071 -3248 -3365 -3411 -3378 -3271 -3119 -2932 -2692 -2388 -2039 -1680 -1317 -939 -551 -184 151 469 783 1068 1287 1429 1515 1563 1572 1544 1494 1431 1342 1223 1096 985 881 758 603 441 303 196 108 23 -58 -130 -190 -233 -256 -259 -244 -219 -188 -159 -129 -94 -52 -8 24 34 24 11 4 -2 -12 -27 -38 -41 -43 -53 -70 -84 -90 -95 -107 -121 -130 -133 -134 -138 -143 -145 -147 -152 -158 -161 -158 -154 -153 -156 -159 -158 -155 -150 -142 -134 -123 -108 -92 -77 -63 -49 -34 -22 -12 -5 1 4 6 6 5 3 2 1 0 +0 0 4 8 12 20 24 32 36 40 40 36 32 24 8 -12 -32 -56 -88 -124 -156 -176 -200 -232 -272 -292 -300 -308 -324 -324 -304 -268 -236 -196 -136 -64 4 64 120 180 232 300 392 504 600 668 732 812 872 888 868 832 764 668 576 516 460 348 196 60 -44 -180 -384 -612 -784 -912 -1048 -1212 -1356 -1460 -1544 -1648 -1780 -1932 -2084 -2188 -2212 -2192 -2196 -2212 -2156 -2004 -1844 -1720 -1564 -1316 -1044 -884 -796 -636 -368 -128 -28 4 32 -28 -280 -592 -728 -712 -848 -1356 -2064 -2676 -3164 -3700 -4340 -4888 -5228 -5516 -5940 -6380 -6544 -6304 -5888 -5540 -5216 -4676 -3780 -2600 -1252 232 1828 3416 4900 6348 7908 9620 11292 12728 13968 15116 16168 16992 17520 17820 17916 17632 16828 15652 14396 13168 11720 9836 7644 5472 3472 1532 -452 -2400 -4244 -6056 -7864 -9488 -10788 -11880 -13088 -14408 -15476 -15948 -15892 -15580 -15044 -14040 -12468 -10564 -8632 -6708 -4680 -2576 -548 1340 3156 4936 6472 7612 8484 9388 10364 11116 11408 11400 11448 11616 11680 11484 11164 10828 10272 9200 7744 6376 5308 4216 2696 864 -804 -2080 -3108 -3968 -4488 -4672 -4888 -5436 -5980 -5984 -5516 -5308 -5784 -6472 -6680 -6404 -6184 -6200 -6068 -5536 -4980 -4808 -4792 -4408 -3692 -3220 -3248 -3344 -3080 -2692 -2700 -3144 -3556 -3760 -4156 -5100 -6252 -7008 -7296 -7656 -8504 -9712 -10892 -11980 -13168 -14440 -15504 -16220 -16700 -17040 -17040 -16440 -15264 -13768 -12088 -10112 -7800 -5364 -2960 -420 2532 5848 9076 11852 14256 16568 18896 20972 22576 23716 24552 25108 25300 25092 24560 23796 22748 21332 19460 17132 14476 11700 9000 6428 3860 1168 -1572 -4060 -6040 -7552 -8852 -10116 -11304 -12284 -12992 -13460 -13644 -13512 -13084 -12476 -11728 -10768 -9552 -8156 -6720 -5268 -3756 -2204 -736 604 1876 3132 4272 5148 5716 6060 6252 6288 6176 5976 5724 5368 4892 4384 3940 3524 3032 2412 1764 1212 784 432 92 -232 -520 -760 -932 -1024 -1036 -976 -876 -752 -636 -516 -376 -208 -32 96 136 96 44 16 -8 -48 -108 -152 -164 -172 -212 -280 -336 -360 -380 -428 -484 -520 -532 -536 -552 -572 -580 -588 -608 -632 -644 -632 -616 -612 -624 -636 -632 -620 -600 -568 -536 -492 -432 -368 -308 -252 -196 -136 -88 -48 -20 4 16 24 24 20 12 8 4 0 +2 +0 -2 -3 -3 -5 -6 -7 -7 -6 -4 -1 2 7 14 21 28 36 45 55 66 74 82 90 99 108 115 120 125 127 122 109 96 84 71 48 16 -17 -44 -69 -93 -110 -120 -132 -153 -176 -184 -177 -177 -201 -234 -251 -250 -251 -261 -265 -251 -234 -234 -241 -229 -199 -179 -186 -198 -189 -170 -176 -211 -246 -268 -305 -385 -486 -560 -599 -647 -739 -867 -999 -1129 -1274 -1435 -1582 -1699 -1795 -1880 -1928 -1907 -1816 -1681 -1512 -1296 -1025 -722 -408 -60 367 868 1379 1844 2271 2703 3155 3587 3952 4250 4502 4712 4860 4931 4940 4897 4792 4597 4289 3865 3341 2763 2174 1589 976 302 -416 -1099 -1673 -2141 -2567 -3001 -3434 -3817 -4132 -4379 -4543 -4605 -4563 -4455 -4286 -4030 -3659 -3200 -2699 -2167 -1582 -952 -325 273 871 1489 2083 2572 2928 3185 3370 3476 3501 3478 3416 3289 3079 2833 2613 2403 2124 1738 1306 922 616 348 78 -197 -457 -686 -868 -985 -1027 -999 -921 -820 -713 -597 -451 -255 -37 132 188 142 70 25 -8 -78 -186 -271 -303 -334 -433 -597 -744 -834 -924 -1085 -1294 -1466 -1572 -1670 -1815 -1984 -2136 -2292 -2512 -2784 -3022 -3177 -3312 -3541 -3888 -4273 -4611 -4890 -5153 -5410 -5601 -5657 -5560 -5344 -5045 -4648 -4104 -3390 -2542 -1622 -680 274 1243 2225 3193 4090 4844 5448 5974 6499 7002 7378 7569 7629 7641 7593 7406 7042 6566 6048 5468 4744 3867 2926 2026 1200 404 -404 -1204 -1921 -2479 -2862 -3130 -3355 -3551 -3656 -3613 -3447 -3250 -3091 -2949 -2746 -2440 -2066 -1683 -1307 -905 -469 -31 363 714 1062 1420 1746 1976 2086 2111 2095 2041 1925 1735 1494 1242 1005 780 560 352 179 57 -17 -64 -103 -143 -172 -176 -157 -142 -155 -185 -200 -176 -131 -90 -57 -13 47 108 157 199 242 268 252 193 116 43 -27 -105 -190 -268 -329 -374 -408 -434 -452 -460 -454 -437 -421 -417 -420 -414 -396 -380 -391 -424 -457 -467 -458 -449 -458 -483 -512 -541 -566 -590 -606 -613 -604 -579 -544 -496 -433 -357 -270 -182 -100 -28 39 102 159 204 234 251 259 262 261 252 237 217 195 175 154 132 109 89 71 54 38 24 14 7 2 -2 -3 -4 -4 -3 -2 -1 0 +0 -8 -12 -12 -20 -24 -28 -28 -24 -16 -4 8 28 56 84 112 144 180 220 264 296 328 360 396 432 460 480 500 508 488 436 384 336 284 192 64 -68 -176 -276 -372 -440 -480 -528 -612 -704 -736 -708 -708 -804 -936 -1004 -1000 -1004 -1044 -1060 -1004 -936 -936 -964 -916 -796 -716 -744 -792 -756 -680 -704 -844 -984 -1072 -1220 -1540 -1944 -2240 -2396 -2588 -2956 -3468 -3996 -4516 -5096 -5740 -6328 -6796 -7180 -7520 -7712 -7628 -7264 -6724 -6048 -5184 -4100 -2888 -1632 -240 1468 3472 5516 7376 9084 10812 12620 14348 15808 17000 18008 18848 19440 19724 19760 19588 19168 18388 17156 15460 13364 11052 8696 6356 3904 1208 -1664 -4396 -6692 -8564 -10268 -12004 -13736 -15268 -16528 -17516 -18172 -18420 -18252 -17820 -17144 -16120 -14636 -12800 -10796 -8668 -6328 -3808 -1300 1092 3484 5956 8332 10288 11712 12740 13480 13904 14004 13912 13664 13156 12316 11332 10452 9612 8496 6952 5224 3688 2464 1392 312 -788 -1828 -2744 -3472 -3940 -4108 -3996 -3684 -3280 -2852 -2388 -1804 -1020 -148 528 752 568 280 100 -32 -312 -744 -1084 -1212 -1336 -1732 -2388 -2976 -3336 -3696 -4340 -5176 -5864 -6288 -6680 -7260 -7936 -8544 -9168 -10048 -11136 -12088 -12708 -13248 -14164 -15552 -17092 -18444 -19560 -20612 -21640 -22404 -22628 -22240 -21376 -20180 -18592 -16416 -13560 -10168 -6488 -2720 1096 4972 8900 12772 16360 19376 21792 23896 25996 28008 29512 30276 30516 30564 30372 29624 28168 26264 24192 21872 18976 15468 11704 8104 4800 1616 -1616 -4816 -7684 -9916 -11448 -12520 -13420 -14204 -14624 -14452 -13788 -13000 -12364 -11796 -10984 -9760 -8264 -6732 -5228 -3620 -1876 -124 1452 2856 4248 5680 6984 7904 8344 8444 8380 8164 7700 6940 5976 4968 4020 3120 2240 1408 716 228 -68 -256 -412 -572 -688 -704 -628 -568 -620 -740 -800 -704 -524 -360 -228 -52 188 432 628 796 968 1072 1008 772 464 172 -108 -420 -760 -1072 -1316 -1496 -1632 -1736 -1808 -1840 -1816 -1748 -1684 -1668 -1680 -1656 -1584 -1520 -1564 -1696 -1828 -1868 -1832 -1796 -1832 -1932 -2048 -2164 -2264 -2360 -2424 -2452 -2416 -2316 -2176 -1984 -1732 -1428 -1080 -728 -400 -112 156 408 636 816 936 1004 1036 1048 1044 1008 948 868 780 700 616 528 436 356 284 216 152 96 56 28 8 -8 -12 -16 -16 -12 -8 -4 0 +2 +0 0 1 1 1 1 1 1 0 -2 -4 -6 -9 -12 -14 -16 -16 -16 -16 -15 -12 -8 -2 4 6 5 3 1 -1 -5 -11 -17 -20 -23 -31 -46 -60 -71 -82 -102 -127 -151 -169 -188 -213 -244 -273 -306 -349 -403 -454 -496 -538 -597 -679 -773 -865 -949 -1036 -1124 -1203 -1257 -1275 -1266 -1233 -1172 -1067 -909 -702 -462 -200 82 386 710 1048 1381 1682 1945 2191 2450 2709 2932 3088 3195 3284 3350 3350 3267 3124 2952 2736 2432 2032 1576 1118 678 234 -240 -732 -1196 -1580 -1868 -2091 -2296 -2486 -2621 -2650 -2588 -2497 -2430 -2373 -2261 -2056 -1781 -1484 -1179 -836 -443 -30 359 722 1099 1504 1892 2189 2366 2449 2486 2479 2392 2207 1944 1654 1370 1088 800 515 269 87 -26 -102 -168 -240 -297 -310 -284 -264 -294 -361 -399 -360 -274 -193 -125 -29 110 259 385 502 626 712 688 541 334 129 -80 -329 -610 -886 -1120 -1313 -1477 -1617 -1737 -1824 -1859 -1845 -1833 -1874 -1954 -1993 -1967 -1958 -2081 -2343 -2613 -2771 -2819 -2877 -3047 -3336 -3688 -4056 -4428 -4807 -5165 -5449 -5613 -5644 -5548 -5310 -4884 -4230 -3370 -2390 -1389 -402 614 1699 2815 3856 4724 5416 6002 6546 7026 7364 7517 7527 7465 7356 7161 6835 6390 5875 5306 4641 3844 2942 2033 1197 440 -286 -1015 -1718 -2309 -2724 -2962 -3084 -3151 -3170 -3096 -2900 -2616 -2292 -1955 -1596 -1227 -880 -572 -274 64 443 818 1137 1383 1584 1772 1959 2114 2169 2067 1821 1517 1236 991 737 443 131 -155 -387 -544 -608 -573 -469 -346 -226 -96 63 246 415 551 643 695 717 728 745 757 734 669 588 523 486 473 478 487 464 369 199 7 -157 -295 -462 -685 -926 -1115 -1220 -1265 -1285 -1284 -1246 -1169 -1076 -988 -904 -805 -694 -598 -552 -563 -604 -639 -656 -669 -701 -760 -841 -934 -1031 -1133 -1227 -1300 -1349 -1368 -1357 -1306 -1214 -1083 -925 -747 -557 -361 -160 40 236 422 586 719 816 881 921 942 950 941 908 849 778 708 642 576 497 409 324 248 180 116 55 3 -39 -71 -98 -119 -131 -133 -125 -113 -100 -89 -75 -59 -41 -26 -14 -7 -1 4 8 10 11 10 9 8 7 6 4 2 1 1 0 0 +0 0 4 4 4 4 4 4 0 -8 -16 -24 -36 -48 -56 -64 -64 -64 -64 -60 -48 -32 -8 16 24 20 12 4 -4 -20 -44 -68 -80 -92 -124 -184 -240 -284 -328 -408 -508 -604 -676 -752 -852 -976 -1092 -1224 -1396 -1612 -1816 -1984 -2152 -2388 -2716 -3092 -3460 -3796 -4144 -4496 -4812 -5028 -5100 -5064 -4932 -4688 -4268 -3636 -2808 -1848 -800 328 1544 2840 4192 5524 6728 7780 8764 9800 10836 11728 12352 12780 13136 13400 13400 13068 12496 11808 10944 9728 8128 6304 4472 2712 936 -960 -2928 -4784 -6320 -7472 -8364 -9184 -9944 -10484 -10600 -10352 -9988 -9720 -9492 -9044 -8224 -7124 -5936 -4716 -3344 -1772 -120 1436 2888 4396 6016 7568 8756 9464 9796 9944 9916 9568 8828 7776 6616 5480 4352 3200 2060 1076 348 -104 -408 -672 -960 -1188 -1240 -1136 -1056 -1176 -1444 -1596 -1440 -1096 -772 -500 -116 440 1036 1540 2008 2504 2848 2752 2164 1336 516 -320 -1316 -2440 -3544 -4480 -5252 -5908 -6468 -6948 -7296 -7436 -7380 -7332 -7496 -7816 -7972 -7868 -7832 -8324 -9372 -10452 -11084 -11276 -11508 -12188 -13344 -14752 -16224 -17712 -19228 -20660 -21796 -22452 -22576 -22192 -21240 -19536 -16920 -13480 -9560 -5556 -1608 2456 6796 11260 15424 18896 21664 24008 26184 28104 29456 30068 30108 29860 29424 28644 27340 25560 23500 21224 18564 15376 11768 8132 4788 1760 -1144 -4060 -6872 -9236 -10896 -11848 -12336 -12604 -12680 -12384 -11600 -10464 -9168 -7820 -6384 -4908 -3520 -2288 -1096 256 1772 3272 4548 5532 6336 7088 7836 8456 8676 8268 7284 6068 4944 3964 2948 1772 524 -620 -1548 -2176 -2432 -2292 -1876 -1384 -904 -384 252 984 1660 2204 2572 2780 2868 2912 2980 3028 2936 2676 2352 2092 1944 1892 1912 1948 1856 1476 796 28 -628 -1180 -1848 -2740 -3704 -4460 -4880 -5060 -5140 -5136 -4984 -4676 -4304 -3952 -3616 -3220 -2776 -2392 -2208 -2252 -2416 -2556 -2624 -2676 -2804 -3040 -3364 -3736 -4124 -4532 -4908 -5200 -5396 -5472 -5428 -5224 -4856 -4332 -3700 -2988 -2228 -1444 -640 160 944 1688 2344 2876 3264 3524 3684 3768 3800 3764 3632 3396 3112 2832 2568 2304 1988 1636 1296 992 720 464 220 12 -156 -284 -392 -476 -524 -532 -500 -452 -400 -356 -300 -236 -164 -104 -56 -28 -4 16 32 40 44 40 36 32 28 24 16 8 4 4 0 0 +2 +0 0 0 0 0 -1 -1 -3 -5 -7 -10 -13 -17 -21 -25 -29 -32 -36 -41 -47 -53 -57 -62 -72 -88 -105 -120 -131 -144 -162 -190 -223 -260 -301 -345 -392 -436 -473 -501 -518 -520 -501 -455 -379 -281 -171 -52 81 235 407 579 737 878 1010 1143 1270 1380 1459 1513 1550 1579 1590 1567 1512 1435 1338 1206 1029 812 578 351 132 -89 -325 -565 -781 -946 -1058 -1131 -1188 -1227 -1231 -1184 -1096 -985 -863 -723 -569 -419 -280 -138 32 232 440 627 782 917 1052 1190 1316 1382 1348 1216 1037 865 710 541 333 100 -122 -311 -448 -512 -494 -414 -312 -209 -91 61 243 420 570 681 753 795 826 864 898 892 832 748 680 648 645 666 695 679 553 305 11 -252 -486 -778 -1182 -1637 -2019 -2265 -2407 -2505 -2565 -2552 -2455 -2318 -2184 -2048 -1873 -1656 -1464 -1388 -1454 -1602 -1742 -1838 -1925 -2073 -2312 -2632 -3006 -3419 -3865 -4311 -4709 -5033 -5264 -5382 -5346 -5128 -4723 -4163 -3476 -2682 -1792 -823 215 1307 2417 3482 4434 5231 5868 6375 6794 7135 7366 7412 7244 6928 6587 6267 5885 5335 4621 3851 3110 2387 1624 822 52 -634 -1248 -1836 -2384 -2819 -3061 -3104 -3025 -2914 -2791 -2586 -2225 -1712 -1166 -706 -356 -33 339 745 1097 1334 1488 1639 1827 2014 2139 2185 2186 2162 2076 1871 1545 1158 767 372 -67 -545 -984 -1275 -1366 -1304 -1178 -1037 -843 -539 -126 311 677 948 1163 1346 1476 1517 1469 1364 1220 1040 838 640 477 368 318 326 356 355 287 163 7 -178 -407 -678 -949 -1176 -1343 -1467 -1564 -1631 -1645 -1587 -1455 -1274 -1081 -899 -725 -559 -428 -369 -392 -470 -573 -702 -878 -1112 -1390 -1686 -1977 -2244 -2462 -2609 -2682 -2699 -2676 -2608 -2474 -2262 -1991 -1684 -1354 -991 -591 -182 203 557 904 1255 1571 1801 1938 2022 2092 2141 2131 2033 1871 1694 1530 1378 1215 1033 842 659 491 336 193 62 -57 -162 -256 -334 -388 -413 -412 -397 -379 -362 -334 -281 -205 -126 -66 -30 -2 33 76 112 132 136 136 140 143 139 128 115 104 91 72 47 26 14 6 -4 -15 -24 -26 -23 -18 -15 -12 -8 -3 2 5 6 6 6 5 4 2 1 0 0 0 +0 0 0 0 0 -4 -4 -12 -20 -28 -40 -52 -68 -84 -100 -116 -128 -144 -164 -188 -212 -228 -248 -288 -352 -420 -480 -524 -576 -648 -760 -892 -1040 -1204 -1380 -1568 -1744 -1892 -2004 -2072 -2080 -2004 -1820 -1516 -1124 -684 -208 324 940 1628 2316 2948 3512 4040 4572 5080 5520 5836 6052 6200 6316 6360 6268 6048 5740 5352 4824 4116 3248 2312 1404 528 -356 -1300 -2260 -3124 -3784 -4232 -4524 -4752 -4908 -4924 -4736 -4384 -3940 -3452 -2892 -2276 -1676 -1120 -552 128 928 1760 2508 3128 3668 4208 4760 5264 5528 5392 4864 4148 3460 2840 2164 1332 400 -488 -1244 -1792 -2048 -1976 -1656 -1248 -836 -364 244 972 1680 2280 2724 3012 3180 3304 3456 3592 3568 3328 2992 2720 2592 2580 2664 2780 2716 2212 1220 44 -1008 -1944 -3112 -4728 -6548 -8076 -9060 -9628 -10020 -10260 -10208 -9820 -9272 -8736 -8192 -7492 -6624 -5856 -5552 -5816 -6408 -6968 -7352 -7700 -8292 -9248 -10528 -12024 -13676 -15460 -17244 -18836 -20132 -21056 -21528 -21384 -20512 -18892 -16652 -13904 -10728 -7168 -3292 860 5228 9668 13928 17736 20924 23472 25500 27176 28540 29464 29648 28976 27712 26348 25068 23540 21340 18484 15404 12440 9548 6496 3288 208 -2536 -4992 -7344 -9536 -11276 -12244 -12416 -12100 -11656 -11164 -10344 -8900 -6848 -4664 -2824 -1424 -132 1356 2980 4388 5336 5952 6556 7308 8056 8556 8740 8744 8648 8304 7484 6180 4632 3068 1488 -268 -2180 -3936 -5100 -5464 -5216 -4712 -4148 -3372 -2156 -504 1244 2708 3792 4652 5384 5904 6068 5876 5456 4880 4160 3352 2560 1908 1472 1272 1304 1424 1420 1148 652 28 -712 -1628 -2712 -3796 -4704 -5372 -5868 -6256 -6524 -6580 -6348 -5820 -5096 -4324 -3596 -2900 -2236 -1712 -1476 -1568 -1880 -2292 -2808 -3512 -4448 -5560 -6744 -7908 -8976 -9848 -10436 -10728 -10796 -10704 -10432 -9896 -9048 -7964 -6736 -5416 -3964 -2364 -728 812 2228 3616 5020 6284 7204 7752 8088 8368 8564 8524 8132 7484 6776 6120 5512 4860 4132 3368 2636 1964 1344 772 248 -228 -648 -1024 -1336 -1552 -1652 -1648 -1588 -1516 -1448 -1336 -1124 -820 -504 -264 -120 -8 132 304 448 528 544 544 560 572 556 512 460 416 364 288 188 104 56 24 -16 -60 -96 -104 -92 -72 -60 -48 -32 -12 8 20 24 24 24 20 16 8 4 0 0 0 +2 +0 -1 -1 -2 -3 -5 -8 -11 -16 -23 -32 -40 -51 -62 -73 -82 -89 -92 -90 -83 -71 -52 -26 7 48 96 150 205 260 311 361 409 456 499 531 548 553 555 555 548 522 474 413 349 280 199 105 7 -88 -181 -276 -372 -458 -516 -542 -547 -547 -542 -520 -462 -368 -259 -162 -85 -9 85 193 293 368 423 480 551 625 683 717 738 751 741 686 582 448 304 151 -28 -235 -435 -577 -634 -621 -575 -519 -433 -284 -68 171 382 549 690 818 918 966 958 911 834 728 600 470 358 282 250 262 293 299 248 144 6 -164 -384 -655 -938 -1189 -1390 -1553 -1694 -1807 -1865 -1841 -1727 -1548 -1343 -1144 -944 -745 -583 -515 -559 -687 -858 -1076 -1378 -1787 -2286 -2840 -3413 -3969 -4460 -4843 -5103 -5264 -5347 -5341 -5198 -4876 -4399 -3817 -3150 -2366 -1447 -456 525 1478 2467 3516 4523 5332 5901 6330 6738 7100 7270 7148 6780 6324 5890 5467 4973 4365 3676 2971 2286 1620 963 320 -299 -895 -1463 -1978 -2390 -2646 -2743 -2742 -2725 -2711 -2606 -2289 -1744 -1121 -613 -285 -11 363 860 1340 1658 1806 1911 2066 2237 2312 2265 2176 2102 1976 1669 1193 726 411 191 -116 -568 -995 -1198 -1155 -1035 -966 -890 -653 -206 322 753 1052 1334 1678 1998 2110 1956 1682 1457 1294 1053 677 284 39 -43 -85 -173 -254 -253 -214 -268 -469 -730 -938 -1082 -1250 -1490 -1730 -1871 -1894 -1863 -1817 -1724 -1523 -1214 -859 -519 -203 87 325 452 421 243 -35 -389 -830 -1391 -2047 -2707 -3273 -3713 -4068 -4361 -4554 -4576 -4414 -4120 -3761 -3359 -2906 -2410 -1886 -1343 -768 -167 432 1001 1532 2026 2466 2823 3085 3267 3388 3437 3391 3246 3039 2810 2562 2264 1897 1494 1116 794 509 226 -54 -294 -460 -560 -632 -701 -754 -760 -711 -637 -570 -506 -414 -287 -149 -36 49 131 225 320 391 430 450 466 478 477 451 402 342 284 231 177 115 45 -19 -71 -110 -142 -167 -177 -172 -155 -132 -104 -68 -26 15 50 77 98 113 120 122 118 107 92 74 58 44 32 19 8 1 -1 0 0 -1 -4 -5 -6 -6 -7 -8 -8 -8 -7 -6 -5 -4 -3 -2 -1 -1 0 +0 -4 -4 -8 -12 -20 -32 -44 -64 -92 -128 -160 -204 -248 -292 -328 -356 -368 -360 -332 -284 -208 -104 28 192 384 600 820 1040 1244 1444 1636 1824 1996 2124 2192 2212 2220 2220 2192 2088 1896 1652 1396 1120 796 420 28 -352 -724 -1104 -1488 -1832 -2064 -2168 -2188 -2188 -2168 -2080 -1848 -1472 -1036 -648 -340 -36 340 772 1172 1472 1692 1920 2204 2500 2732 2868 2952 3004 2964 2744 2328 1792 1216 604 -112 -940 -1740 -2308 -2536 -2484 -2300 -2076 -1732 -1136 -272 684 1528 2196 2760 3272 3672 3864 3832 3644 3336 2912 2400 1880 1432 1128 1000 1048 1172 1196 992 576 24 -656 -1536 -2620 -3752 -4756 -5560 -6212 -6776 -7228 -7460 -7364 -6908 -6192 -5372 -4576 -3776 -2980 -2332 -2060 -2236 -2748 -3432 -4304 -5512 -7148 -9144 -11360 -13652 -15876 -17840 -19372 -20412 -21056 -21388 -21364 -20792 -19504 -17596 -15268 -12600 -9464 -5788 -1824 2100 5912 9868 14064 18092 21328 23604 25320 26952 28400 29080 28592 27120 25296 23560 21868 19892 17460 14704 11884 9144 6480 3852 1280 -1196 -3580 -5852 -7912 -9560 -10584 -10972 -10968 -10900 -10844 -10424 -9156 -6976 -4484 -2452 -1140 -44 1452 3440 5360 6632 7224 7644 8264 8948 9248 9060 8704 8408 7904 6676 4772 2904 1644 764 -464 -2272 -3980 -4792 -4620 -4140 -3864 -3560 -2612 -824 1288 3012 4208 5336 6712 7992 8440 7824 6728 5828 5176 4212 2708 1136 156 -172 -340 -692 -1016 -1012 -856 -1072 -1876 -2920 -3752 -4328 -5000 -5960 -6920 -7484 -7576 -7452 -7268 -6896 -6092 -4856 -3436 -2076 -812 348 1300 1808 1684 972 -140 -1556 -3320 -5564 -8188 -10828 -13092 -14852 -16272 -17444 -18216 -18304 -17656 -16480 -15044 -13436 -11624 -9640 -7544 -5372 -3072 -668 1728 4004 6128 8104 9864 11292 12340 13068 13552 13748 13564 12984 12156 11240 10248 9056 7588 5976 4464 3176 2036 904 -216 -1176 -1840 -2240 -2528 -2804 -3016 -3040 -2844 -2548 -2280 -2024 -1656 -1148 -596 -144 196 524 900 1280 1564 1720 1800 1864 1912 1908 1804 1608 1368 1136 924 708 460 180 -76 -284 -440 -568 -668 -708 -688 -620 -528 -416 -272 -104 60 200 308 392 452 480 488 472 428 368 296 232 176 128 76 32 4 -4 0 0 -4 -16 -20 -24 -24 -28 -32 -32 -32 -28 -24 -20 -16 -12 -8 -4 -4 0 +2 +0 0 1 3 7 12 18 24 33 43 51 57 63 69 73 75 75 71 64 54 42 27 10 -11 -34 -59 -86 -111 -132 -146 -156 -165 -174 -177 -165 -133 -90 -52 -26 -2 35 88 144 186 211 234 264 298 321 327 326 328 320 281 208 131 77 37 -24 -118 -214 -266 -265 -246 -236 -225 -170 -55 88 214 308 402 521 638 693 661 584 520 474 397 262 113 16 -18 -37 -76 -115 -118 -102 -131 -235 -374 -493 -583 -690 -842 -1002 -1111 -1152 -1159 -1158 -1125 -1018 -831 -602 -372 -149 65 250 355 338 200 -30 -335 -732 -1255 -1890 -2557 -3163 -3671 -4114 -4513 -4821 -4957 -4892 -4673 -4364 -3986 -3530 -2995 -2399 -1748 -1023 -227 603 1429 2240 3033 3779 4430 4957 5375 5709 5934 5998 5882 5642 5347 4997 4525 3887 3140 2406 1756 1154 527 -127 -720 -1157 -1447 -1677 -1912 -2112 -2187 -2102 -1939 -1784 -1627 -1373 -977 -523 -128 186 504 895 1312 1654 1877 2029 2171 2304 2374 2325 2149 1894 1626 1372 1093 738 305 -132 -508 -824 -1111 -1357 -1508 -1525 -1438 -1283 -1054 -720 -285 187 633 1027 1374 1667 1889 2030 2083 2032 1865 1614 1345 1104 865 582 276 46 -34 0 22 -46 -185 -320 -418 -511 -648 -845 -1073 -1288 -1456 -1566 -1625 -1643 -1611 -1507 -1316 -1052 -737 -395 -67 179 277 213 40 -206 -560 -1113 -1912 -2869 -3796 -4539 -5076 -5498 -5872 -6145 -6178 -5883 -5329 -4659 -3956 -3198 -2334 -1404 -511 295 1062 1840 2564 3112 3443 3662 3910 4199 4395 4379 4188 3974 3839 3735 3509 3087 2538 1995 1516 1048 517 -64 -596 -1004 -1295 -1527 -1710 -1793 -1735 -1565 -1362 -1175 -975 -709 -375 -47 200 353 478 637 822 955 991 961 945 974 1002 965 854 725 622 541 443 317 185 75 -12 -99 -192 -264 -287 -263 -223 -187 -140 -56 58 164 233 273 313 358 386 379 348 315 288 257 214 169 135 113 93 66 39 21 10 -8 -38 -75 -105 -126 -143 -163 -180 -188 -185 -176 -167 -158 -144 -123 -100 -79 -62 -47 -35 -27 -25 -28 -34 -41 -51 -64 -77 -84 -86 -84 -83 -78 -69 -56 -43 -30 -20 -13 -7 -2 0 1 1 1 0 +0 0 4 12 28 48 72 96 132 172 204 228 252 276 292 300 300 284 256 216 168 108 40 -44 -136 -236 -344 -444 -528 -584 -624 -660 -696 -708 -660 -532 -360 -208 -104 -8 140 352 576 744 844 936 1056 1192 1284 1308 1304 1312 1280 1124 832 524 308 148 -96 -472 -856 -1064 -1060 -984 -944 -900 -680 -220 352 856 1232 1608 2084 2552 2772 2644 2336 2080 1896 1588 1048 452 64 -72 -148 -304 -460 -472 -408 -524 -940 -1496 -1972 -2332 -2760 -3368 -4008 -4444 -4608 -4636 -4632 -4500 -4072 -3324 -2408 -1488 -596 260 1000 1420 1352 800 -120 -1340 -2928 -5020 -7560 -10228 -12652 -14684 -16456 -18052 -19284 -19828 -19568 -18692 -17456 -15944 -14120 -11980 -9596 -6992 -4092 -908 2412 5716 8960 12132 15116 17720 19828 21500 22836 23736 23992 23528 22568 21388 19988 18100 15548 12560 9624 7024 4616 2108 -508 -2880 -4628 -5788 -6708 -7648 -8448 -8748 -8408 -7756 -7136 -6508 -5492 -3908 -2092 -512 744 2016 3580 5248 6616 7508 8116 8684 9216 9496 9300 8596 7576 6504 5488 4372 2952 1220 -528 -2032 -3296 -4444 -5428 -6032 -6100 -5752 -5132 -4216 -2880 -1140 748 2532 4108 5496 6668 7556 8120 8332 8128 7460 6456 5380 4416 3460 2328 1104 184 -136 0 88 -184 -740 -1280 -1672 -2044 -2592 -3380 -4292 -5152 -5824 -6264 -6500 -6572 -6444 -6028 -5264 -4208 -2948 -1580 -268 716 1108 852 160 -824 -2240 -4452 -7648 -11476 -15184 -18156 -20304 -21992 -23488 -24580 -24712 -23532 -21316 -18636 -15824 -12792 -9336 -5616 -2044 1180 4248 7360 10256 12448 13772 14648 15640 16796 17580 17516 16752 15896 15356 14940 14036 12348 10152 7980 6064 4192 2068 -256 -2384 -4016 -5180 -6108 -6840 -7172 -6940 -6260 -5448 -4700 -3900 -2836 -1500 -188 800 1412 1912 2548 3288 3820 3964 3844 3780 3896 4008 3860 3416 2900 2488 2164 1772 1268 740 300 -48 -396 -768 -1056 -1148 -1052 -892 -748 -560 -224 232 656 932 1092 1252 1432 1544 1516 1392 1260 1152 1028 856 676 540 452 372 264 156 84 40 -32 -152 -300 -420 -504 -572 -652 -720 -752 -740 -704 -668 -632 -576 -492 -400 -316 -248 -188 -140 -108 -100 -112 -136 -164 -204 -256 -308 -336 -344 -336 -332 -312 -276 -224 -172 -120 -80 -52 -28 -8 0 4 4 4 0 +2 +0 -1 -2 -2 -3 -5 -6 -6 -7 -6 -4 -2 1 5 12 19 28 36 43 51 60 68 73 73 70 65 59 50 36 16 -8 -31 -53 -76 -98 -115 -122 -122 -114 -99 -71 -30 20 71 120 168 213 251 281 301 305 291 261 226 192 156 109 53 9 -7 0 4 -11 -44 -79 -106 -133 -174 -234 -306 -378 -440 -487 -519 -540 -544 -524 -470 -386 -278 -153 -27 73 116 91 17 -93 -260 -530 -934 -1436 -1947 -2386 -2734 -3034 -3320 -3559 -3666 -3577 -3317 -2969 -2582 -2136 -1596 -983 -366 216 798 1413 2016 2503 2834 3086 3371 3702 3966 4042 3956 3840 3797 3777 3631 3269 2749 2211 1719 1216 614 -78 -741 -1277 -1685 -2034 -2331 -2501 -2478 -2287 -2039 -1800 -1529 -1139 -617 -78 345 625 866 1184 1564 1864 1980 1970 1987 2101 2216 2188 1988 1731 1526 1361 1146 841 504 212 -35 -294 -583 -826 -922 -870 -761 -657 -505 -208 224 652 955 1156 1365 1612 1798 1829 1734 1623 1539 1423 1228 1003 832 729 621 458 284 163 80 -62 -323 -659 -970 -1220 -1459 -1738 -2021 -2221 -2307 -2322 -2330 -2323 -2238 -2035 -1756 -1481 -1240 -1015 -800 -662 -667 -817 -1062 -1404 -1926 -2673 -3538 -4329 -4960 -5521 -6111 -6636 -6839 -6547 -5862 -5037 -4211 -3303 -2151 -760 659 1877 2853 3740 4662 5518 6058 6168 6025 5890 5792 5499 4840 3984 3298 2942 2666 2141 1357 646 262 49 -370 -1135 -1962 -2479 -2664 -2848 -3264 -3747 -3922 -3663 -3220 -2881 -2604 -2100 -1234 -236 555 1075 1561 2201 2851 3201 3142 2902 2759 2719 2556 2144 1642 1300 1152 999 696 340 140 138 165 76 -81 -151 -83 11 23 -22 -7 115 275 377 416 462 569 701 788 805 791 791 788 732 609 466 351 262 153 9 -138 -241 -299 -347 -405 -457 -482 -483 -491 -521 -560 -582 -578 -565 -564 -573 -565 -518 -440 -364 -307 -254 -183 -94 -22 9 3 -14 -28 -53 -103 -175 -246 -301 -346 -392 -439 -466 -464 -438 -405 -373 -333 -277 -208 -141 -85 -36 11 58 98 126 142 150 154 153 147 136 121 107 94 79 64 51 40 31 23 15 8 4 0 -2 -3 -4 -3 -2 -2 -1 0 +0 -4 -8 -8 -12 -20 -24 -24 -28 -24 -16 -8 4 20 48 76 112 144 172 204 240 272 292 292 280 260 236 200 144 64 -32 -124 -212 -304 -392 -460 -488 -488 -456 -396 -284 -120 80 284 480 672 852 1004 1124 1204 1220 1164 1044 904 768 624 436 212 36 -28 0 16 -44 -176 -316 -424 -532 -696 -936 -1224 -1512 -1760 -1948 -2076 -2160 -2176 -2096 -1880 -1544 -1112 -612 -108 292 464 364 68 -372 -1040 -2120 -3736 -5744 -7788 -9544 -10936 -12136 -13280 -14236 -14664 -14308 -13268 -11876 -10328 -8544 -6384 -3932 -1464 864 3192 5652 8064 10012 11336 12344 13484 14808 15864 16168 15824 15360 15188 15108 14524 13076 10996 8844 6876 4864 2456 -312 -2964 -5108 -6740 -8136 -9324 -10004 -9912 -9148 -8156 -7200 -6116 -4556 -2468 -312 1380 2500 3464 4736 6256 7456 7920 7880 7948 8404 8864 8752 7952 6924 6104 5444 4584 3364 2016 848 -140 -1176 -2332 -3304 -3688 -3480 -3044 -2628 -2020 -832 896 2608 3820 4624 5460 6448 7192 7316 6936 6492 6156 5692 4912 4012 3328 2916 2484 1832 1136 652 320 -248 -1292 -2636 -3880 -4880 -5836 -6952 -8084 -8884 -9228 -9288 -9320 -9292 -8952 -8140 -7024 -5924 -4960 -4060 -3200 -2648 -2668 -3268 -4248 -5616 -7704 -10692 -14152 -17316 -19840 -22084 -24444 -26544 -27356 -26188 -23448 -20148 -16844 -13212 -8604 -3040 2636 7508 11412 14960 18648 22072 24232 24672 24100 23560 23168 21996 19360 15936 13192 11768 10664 8564 5428 2584 1048 196 -1480 -4540 -7848 -9916 -10656 -11392 -13056 -14988 -15688 -14652 -12880 -11524 -10416 -8400 -4936 -944 2220 4300 6244 8804 11404 12804 12568 11608 11036 10876 10224 8576 6568 5200 4608 3996 2784 1360 560 552 660 304 -324 -604 -332 44 92 -88 -28 460 1100 1508 1664 1848 2276 2804 3152 3220 3164 3164 3152 2928 2436 1864 1404 1048 612 36 -552 -964 -1196 -1388 -1620 -1828 -1928 -1932 -1964 -2084 -2240 -2328 -2312 -2260 -2256 -2292 -2260 -2072 -1760 -1456 -1228 -1016 -732 -376 -88 36 12 -56 -112 -212 -412 -700 -984 -1204 -1384 -1568 -1756 -1864 -1856 -1752 -1620 -1492 -1332 -1108 -832 -564 -340 -144 44 232 392 504 568 600 616 612 588 544 484 428 376 316 256 204 160 124 92 60 32 16 0 -8 -12 -16 -12 -8 -8 -4 0 +2 +0 0 0 -1 -1 -2 -3 -4 -5 -5 -5 -5 -3 2 8 14 19 26 34 42 48 50 51 52 52 49 43 38 36 33 25 17 10 5 -5 -25 -53 -82 -109 -137 -171 -208 -239 -260 -273 -286 -297 -299 -283 -254 -223 -194 -165 -135 -116 -121 -153 -207 -283 -400 -574 -786 -993 -1174 -1349 -1541 -1725 -1832 -1808 -1668 -1477 -1270 -1026 -688 -250 222 651 1018 1371 1757 2135 2408 2517 2523 2531 2555 2488 2245 1896 1610 1472 1367 1125 731 356 148 28 -220 -690 -1221 -1580 -1739 -1903 -2233 -2624 -2812 -2687 -2417 -2213 -2047 -1690 -1016 -199 478 948 1409 2032 2693 3093 3107 2935 2855 2878 2768 2376 1862 1508 1368 1213 865 433 182 184 226 106 -116 -220 -124 17 36 -35 -11 195 475 667 753 859 1083 1368 1576 1649 1663 1705 1742 1660 1418 1113 861 659 397 24 -374 -674 -861 -1026 -1231 -1430 -1552 -1602 -1674 -1831 -2027 -2172 -2221 -2239 -2306 -2418 -2461 -2329 -2047 -1751 -1527 -1308 -972 -520 -121 57 21 -87 -186 -361 -739 -1311 -1922 -2455 -2943 -3488 -4077 -4539 -4734 -4691 -4565 -4423 -4165 -3653 -2901 -2077 -1316 -593 201 1097 1988 2736 3301 3750 4151 4486 4682 4709 4625 4506 4357 4123 3776 3383 3032 2708 2318 1810 1245 711 212 -309 -874 -1432 -1932 -2363 -2718 -2955 -3030 -2957 -2784 -2523 -2139 -1614 -1015 -443 58 535 1036 1541 1943 2151 2188 2163 2144 2075 1860 1498 1116 840 671 521 359 257 285 407 515 553 547 551 578 597 586 548 502 462 436 426 435 477 565 692 825 922 977 1021 1070 1100 1061 931 749 561 374 157 -107 -387 -631 -819 -971 -1107 -1223 -1300 -1337 -1349 -1339 -1300 -1232 -1159 -1105 -1069 -1023 -951 -863 -777 -697 -612 -518 -427 -346 -276 -221 -204 -254 -370 -513 -626 -678 -677 -653 -619 -562 -466 -338 -211 -112 -36 49 156 266 346 393 429 468 486 458 386 308 255 220 178 123 80 74 98 121 125 118 114 116 110 85 51 20 -6 -34 -68 -101 -121 -127 -127 -129 -126 -112 -87 -59 -36 -16 2 19 32 38 39 37 36 33 29 23 18 15 11 8 5 3 2 1 0 0 0 0 +0 0 0 -4 -4 -8 -12 -16 -20 -20 -20 -20 -12 8 32 56 76 104 136 168 192 200 204 208 208 196 172 152 144 132 100 68 40 20 -20 -100 -212 -328 -436 -548 -684 -832 -956 -1040 -1092 -1144 -1188 -1196 -1132 -1016 -892 -776 -660 -540 -464 -484 -612 -828 -1132 -1600 -2296 -3144 -3972 -4696 -5396 -6164 -6900 -7328 -7232 -6672 -5908 -5080 -4104 -2752 -1000 888 2604 4072 5484 7028 8540 9632 10068 10092 10124 10220 9952 8980 7584 6440 5888 5468 4500 2924 1424 592 112 -880 -2760 -4884 -6320 -6956 -7612 -8932 -10496 -11248 -10748 -9668 -8852 -8188 -6760 -4064 -796 1912 3792 5636 8128 10772 12372 12428 11740 11420 11512 11072 9504 7448 6032 5472 4852 3460 1732 728 736 904 424 -464 -880 -496 68 144 -140 -44 780 1900 2668 3012 3436 4332 5472 6304 6596 6652 6820 6968 6640 5672 4452 3444 2636 1588 96 -1496 -2696 -3444 -4104 -4924 -5720 -6208 -6408 -6696 -7324 -8108 -8688 -8884 -8956 -9224 -9672 -9844 -9316 -8188 -7004 -6108 -5232 -3888 -2080 -484 228 84 -348 -744 -1444 -2956 -5244 -7688 -9820 -11772 -13952 -16308 -18156 -18936 -18764 -18260 -17692 -16660 -14612 -11604 -8308 -5264 -2372 804 4388 7952 10944 13204 15000 16604 17944 18728 18836 18500 18024 17428 16492 15104 13532 12128 10832 9272 7240 4980 2844 848 -1236 -3496 -5728 -7728 -9452 -10872 -11820 -12120 -11828 -11136 -10092 -8556 -6456 -4060 -1772 232 2140 4144 6164 7772 8604 8752 8652 8576 8300 7440 5992 4464 3360 2684 2084 1436 1028 1140 1628 2060 2212 2188 2204 2312 2388 2344 2192 2008 1848 1744 1704 1740 1908 2260 2768 3300 3688 3908 4084 4280 4400 4244 3724 2996 2244 1496 628 -428 -1548 -2524 -3276 -3884 -4428 -4892 -5200 -5348 -5396 -5356 -5200 -4928 -4636 -4420 -4276 -4092 -3804 -3452 -3108 -2788 -2448 -2072 -1708 -1384 -1104 -884 -816 -1016 -1480 -2052 -2504 -2712 -2708 -2612 -2476 -2248 -1864 -1352 -844 -448 -144 196 624 1064 1384 1572 1716 1872 1944 1832 1544 1232 1020 880 712 492 320 296 392 484 500 472 456 464 440 340 204 80 -24 -136 -272 -404 -484 -508 -508 -516 -504 -448 -348 -236 -144 -64 8 76 128 152 156 148 144 132 116 92 72 60 44 32 20 12 8 4 0 0 0 0 +2 +0 -1 -1 -1 -2 -3 -5 -6 -8 -10 -14 -18 -22 -27 -31 -36 -42 -48 -51 -49 -46 -45 -42 -34 -20 -5 2 1 -5 -10 -21 -45 -84 -131 -176 -223 -279 -344 -403 -442 -460 -469 -476 -469 -429 -356 -266 -176 -83 29 164 310 443 555 654 750 840 908 946 960 967 967 945 893 826 764 703 621 499 354 208 64 -96 -280 -471 -653 -821 -970 -1084 -1142 -1145 -1107 -1030 -896 -694 -448 -201 27 254 506 771 996 1130 1179 1194 1212 1202 1104 910 695 535 438 348 245 180 204 298 387 425 430 444 476 503 505 483 453 427 412 412 430 482 585 733 894 1021 1108 1184 1270 1336 1318 1184 975 747 510 219 -153 -565 -944 -1255 -1524 -1778 -2011 -2190 -2309 -2385 -2426 -2413 -2343 -2260 -2208 -2189 -2149 -2049 -1906 -1760 -1620 -1460 -1270 -1073 -894 -731 -601 -572 -731 -1095 -1560 -1958 -2181 -2243 -2227 -2174 -2036 -1739 -1299 -834 -457 -150 214 706 1239 1667 1958 2215 2498 2691 2625 2295 1904 1639 1470 1234 887 605 584 801 1033 1118 1098 1116 1191 1180 966 610 254 -67 -463 -999 -1570 -1996 -2227 -2388 -2579 -2708 -2592 -2170 -1587 -1022 -502 69 729 1357 1800 2041 2204 2376 2509 2497 2346 2179 2085 1993 1781 1450 1156 1014 965 870 699 569 583 695 768 755 741 813 935 985 913 810 790 851 884 808 682 616 619 586 436 235 125 146 199 171 73 -4 -20 -48 -168 -366 -561 -725 -903 -1141 -1404 -1614 -1745 -1837 -1933 -2010 -2011 -1926 -1800 -1683 -1568 -1419 -1227 -1022 -829 -649 -468 -303 -171 -73 14 103 176 212 223 243 286 328 332 289 231 189 161 122 63 4 -34 -49 -60 -74 -76 -56 -28 -16 -28 -44 -40 -23 -14 -27 -47 -56 -46 -28 -13 1 25 60 96 121 129 125 114 99 80 56 28 0 -22 -36 -42 -43 -39 -27 -4 28 60 87 110 134 159 176 178 168 152 138 123 102 74 47 27 14 4 -4 -9 -7 -2 2 4 6 9 13 14 13 12 12 13 13 12 10 8 8 9 8 7 6 5 4 3 2 1 0 0 0 -1 -1 0 +0 -8 -8 -8 -16 -24 -40 -48 -64 -80 -112 -144 -176 -216 -248 -288 -336 -384 -408 -392 -368 -360 -336 -272 -160 -40 16 8 -40 -80 -168 -360 -672 -1048 -1408 -1784 -2232 -2752 -3224 -3536 -3680 -3752 -3808 -3752 -3432 -2848 -2128 -1408 -664 232 1312 2480 3544 4440 5232 6000 6720 7264 7568 7680 7736 7736 7560 7144 6608 6112 5624 4968 3992 2832 1664 512 -768 -2240 -3768 -5224 -6568 -7760 -8672 -9136 -9160 -8856 -8240 -7168 -5552 -3584 -1608 216 2032 4048 6168 7968 9040 9432 9552 9696 9616 8832 7280 5560 4280 3504 2784 1960 1440 1632 2384 3096 3400 3440 3552 3808 4024 4040 3864 3624 3416 3296 3296 3440 3856 4680 5864 7152 8168 8864 9472 10160 10688 10544 9472 7800 5976 4080 1752 -1224 -4520 -7552 -10040 -12192 -14224 -16088 -17520 -18472 -19080 -19408 -19304 -18744 -18080 -17664 -17512 -17192 -16392 -15248 -14080 -12960 -11680 -10160 -8584 -7152 -5848 -4808 -4576 -5848 -8760 -12480 -15664 -17448 -17944 -17816 -17392 -16288 -13912 -10392 -6672 -3656 -1200 1712 5648 9912 13336 15664 17720 19984 21528 21000 18360 15232 13112 11760 9872 7096 4840 4672 6408 8264 8944 8784 8928 9528 9440 7728 4880 2032 -536 -3704 -7992 -12560 -15968 -17816 -19104 -20632 -21664 -20736 -17360 -12696 -8176 -4016 552 5832 10856 14400 16328 17632 19008 20072 19976 18768 17432 16680 15944 14248 11600 9248 8112 7720 6960 5592 4552 4664 5560 6144 6040 5928 6504 7480 7880 7304 6480 6320 6808 7072 6464 5456 4928 4952 4688 3488 1880 1000 1168 1592 1368 584 -32 -160 -384 -1344 -2928 -4488 -5800 -7224 -9128 -11232 -12912 -13960 -14696 -15464 -16080 -16088 -15408 -14400 -13464 -12544 -11352 -9816 -8176 -6632 -5192 -3744 -2424 -1368 -584 112 824 1408 1696 1784 1944 2288 2624 2656 2312 1848 1512 1288 976 504 32 -272 -392 -480 -592 -608 -448 -224 -128 -224 -352 -320 -184 -112 -216 -376 -448 -368 -224 -104 8 200 480 768 968 1032 1000 912 792 640 448 224 0 -176 -288 -336 -344 -312 -216 -32 224 480 696 880 1072 1272 1408 1424 1344 1216 1104 984 816 592 376 216 112 32 -32 -72 -56 -16 16 32 48 72 104 112 104 96 96 104 104 96 80 64 64 72 64 56 48 40 32 24 16 8 0 0 0 -8 -8 0 +3 +0 -1 -1 -1 -2 -4 -6 -8 -11 -14 -16 -18 -18 -16 -12 -7 -3 4 15 29 43 56 70 85 100 105 99 88 81 78 69 53 38 39 57 78 89 92 98 111 115 99 65 28 -8 -57 -128 -210 -277 -322 -359 -403 -440 -437 -379 -287 -192 -98 13 151 291 399 468 521 580 632 649 628 601 593 584 537 450 369 333 326 302 249 208 220 269 305 308 310 349 412 445 423 385 385 426 453 424 367 340 350 339 258 143 77 93 129 114 50 -3 -15 -35 -126 -281 -441 -583 -744 -962 -1210 -1423 -1574 -1696 -1826 -1943 -1988 -1947 -1863 -1782 -1699 -1572 -1391 -1186 -984 -788 -582 -385 -223 -97 20 144 251 310 334 372 449 528 546 487 398 335 292 227 121 8 -67 -99 -125 -158 -168 -127 -64 -38 -68 -109 -102 -59 -38 -75 -136 -164 -139 -87 -41 6 87 213 350 453 498 497 470 422 350 252 131 3 -106 -182 -222 -235 -222 -157 -20 181 402 604 796 1011 1245 1440 1525 1497 1422 1350 1264 1097 842 567 346 191 60 -59 -127 -108 -30 42 88 141 230 329 391 401 403 440 515 575 578 533 511 561 669 753 754 713 697 724 724 634 488 363 280 176 -4 -220 -392 -509 -639 -828 -1041 -1197 -1283 -1354 -1465 -1594 -1666 -1645 -1563 -1482 -1422 -1349 -1213 -1013 -804 -642 -531 -417 -264 -96 33 110 173 258 340 374 356 337 363 406 409 352 278 238 232 220 175 106 47 14 -9 -53 -129 -212 -258 -255 -229 -214 -212 -196 -149 -91 -44 -1 52 107 138 130 108 99 91 52 -27 -127 -216 -283 -338 -392 -444 -476 -470 -426 -357 -275 -184 -91 -2 76 146 215 278 319 332 329 323 315 299 277 255 238 223 213 205 198 184 167 148 131 111 92 76 62 48 34 26 23 18 8 3 7 18 29 44 61 72 65 43 21 15 21 30 38 48 54 52 38 17 -7 -26 -37 -38 -32 -25 -19 -16 -17 -22 -28 -32 -31 -28 -23 -17 -10 -4 -2 -1 0 0 -1 -1 0 0 0 0 0 0 +0 -16 -16 -16 -32 -64 -96 -128 -176 -224 -256 -288 -288 -256 -192 -112 -48 64 240 464 688 896 1120 1360 1600 1680 1584 1408 1296 1248 1104 848 608 624 912 1248 1424 1472 1568 1776 1840 1584 1040 448 -128 -912 -2048 -3360 -4432 -5152 -5744 -6448 -7040 -6992 -6064 -4592 -3072 -1568 208 2416 4656 6384 7488 8336 9280 10112 10384 10048 9616 9488 9344 8592 7200 5904 5328 5216 4832 3984 3328 3520 4304 4880 4928 4960 5584 6592 7120 6768 6160 6160 6816 7248 6784 5872 5440 5600 5424 4128 2288 1232 1488 2064 1824 800 -48 -240 -560 -2016 -4496 -7056 -9328 -11904 -15392 -19360 -22768 -25184 -27136 -29216 -31088 -31808 -31152 -29808 -28512 -27184 -25152 -22256 -18976 -15744 -12608 -9312 -6160 -3568 -1552 320 2304 4016 4960 5344 5952 7184 8448 8736 7792 6368 5360 4672 3632 1936 128 -1072 -1584 -2000 -2528 -2688 -2032 -1024 -608 -1088 -1744 -1632 -944 -608 -1200 -2176 -2624 -2224 -1392 -656 96 1392 3408 5600 7248 7968 7952 7520 6752 5600 4032 2096 48 -1696 -2912 -3552 -3760 -3552 -2512 -320 2896 6432 9664 12736 16176 19920 23040 24400 23952 22752 21600 20224 17552 13472 9072 5536 3056 960 -944 -2032 -1728 -480 672 1408 2256 3680 5264 6256 6416 6448 7040 8240 9200 9248 8528 8176 8976 10704 12048 12064 11408 11152 11584 11584 10144 7808 5808 4480 2816 -64 -3520 -6272 -8144 -10224 -13248 -16656 -19152 -20528 -21664 -23440 -25504 -26656 -26320 -25008 -23712 -22752 -21584 -19408 -16208 -12864 -10272 -8496 -6672 -4224 -1536 528 1760 2768 4128 5440 5984 5696 5392 5808 6496 6544 5632 4448 3808 3712 3520 2800 1696 752 224 -144 -848 -2064 -3392 -4128 -4080 -3664 -3424 -3392 -3136 -2384 -1456 -704 -16 832 1712 2208 2080 1728 1584 1456 832 -432 -2032 -3456 -4528 -5408 -6272 -7104 -7616 -7520 -6816 -5712 -4400 -2944 -1456 -32 1216 2336 3440 4448 5104 5312 5264 5168 5040 4784 4432 4080 3808 3568 3408 3280 3168 2944 2672 2368 2096 1776 1472 1216 992 768 544 416 368 288 128 48 112 288 464 704 976 1152 1040 688 336 240 336 480 608 768 864 832 608 272 -112 -416 -592 -608 -512 -400 -304 -256 -272 -352 -448 -512 -496 -448 -368 -272 -160 -64 -32 -16 0 0 -16 -16 0 0 0 0 0 0 +4 +0 -1 -1 -1 -1 -1 -1 -1 0 0 1 2 4 5 6 7 7 6 5 3 0 -4 -6 -8 -9 -9 -7 -1 9 21 34 48 64 84 103 115 119 119 119 117 107 86 61 38 22 7 -8 -17 -15 -5 6 13 22 38 57 70 75 78 88 107 123 128 122 121 137 168 195 202 196 198 212 218 197 156 119 94 61 -2 -81 -148 -197 -254 -338 -436 -515 -566 -613 -680 -759 -813 -824 -802 -779 -766 -745 -686 -587 -477 -391 -330 -266 -172 -64 23 77 124 189 255 287 280 271 299 342 352 310 251 219 219 212 173 107 49 15 -9 -58 -146 -246 -306 -310 -284 -272 -276 -261 -203 -127 -62 -1 78 165 216 209 179 167 157 93 -49 -236 -411 -551 -674 -802 -932 -1025 -1039 -966 -830 -656 -451 -228 -3 202 399 603 801 944 1011 1030 1041 1046 1022 975 926 889 861 846 842 836 805 752 692 631 557 474 406 346 277 207 166 151 120 58 21 57 144 244 375 547 678 641 440 234 172 249 377 514 670 805 821 646 304 -123 -523 -790 -870 -797 -666 -550 -503 -580 -820 -1151 -1438 -1577 -1574 -1450 -1189 -796 -395 -123 -15 5 1 -16 -27 21 146 305 457 607 748 830 812 730 647 570 482 387 309 243 153 38 -75 -185 -339 -550 -763 -912 -983 -991 -948 -864 -763 -659 -547 -427 -347 -345 -387 -404 -372 -328 -300 -264 -195 -121 -85 -86 -93 -98 -120 -155 -167 -126 -33 84 197 291 359 412 454 476 461 421 386 379 380 360 317 276 252 232 198 155 122 108 98 78 52 35 34 31 4 -45 -91 -110 -109 -113 -130 -147 -144 -119 -88 -65 -45 -18 25 77 122 149 160 168 178 183 169 142 114 97 85 68 38 0 -37 -63 -80 -95 -110 -121 -121 -112 -99 -87 -75 -60 -41 -25 -12 -2 6 11 11 7 4 6 10 14 18 20 22 22 22 22 21 19 17 16 16 15 12 8 4 2 0 -1 -2 -3 -2 -1 -1 -1 0 0 0 0 0 0 0 -1 -1 -1 0 +0 -16 -16 -16 -16 -16 -16 -16 0 0 16 32 64 80 96 112 112 96 80 48 0 -64 -96 -128 -144 -144 -112 -16 144 336 544 768 1024 1344 1648 1840 1904 1904 1904 1872 1712 1376 976 608 352 112 -128 -272 -240 -80 96 208 352 608 912 1120 1200 1248 1408 1712 1968 2048 1952 1936 2192 2688 3120 3232 3136 3168 3392 3488 3152 2496 1904 1504 976 -32 -1296 -2368 -3152 -4064 -5408 -6976 -8240 -9056 -9808 -10880 -12144 -13008 -13184 -12832 -12464 -12256 -11920 -10976 -9392 -7632 -6256 -5280 -4256 -2752 -1024 368 1232 1984 3024 4080 4592 4480 4336 4784 5472 5632 4960 4016 3504 3504 3392 2768 1712 784 240 -144 -928 -2336 -3936 -4896 -4960 -4544 -4352 -4416 -4176 -3248 -2032 -992 -16 1248 2640 3456 3344 2864 2672 2512 1488 -784 -3776 -6576 -8816 -10784 -12832 -14912 -16400 -16624 -15456 -13280 -10496 -7216 -3648 -48 3232 6384 9648 12816 15104 16176 16480 16656 16736 16352 15600 14816 14224 13776 13536 13472 13376 12880 12032 11072 10096 8912 7584 6496 5536 4432 3312 2656 2416 1920 928 336 912 2304 3904 6000 8752 10848 10256 7040 3744 2752 3984 6032 8224 10720 12880 13136 10336 4864 -1968 -8368 -12640 -13920 -12752 -10656 -8800 -8048 -9280 -13120 -18416 -23008 -25232 -25184 -23200 -19024 -12736 -6320 -1968 -240 80 16 -256 -432 336 2336 4880 7312 9712 11968 13280 12992 11680 10352 9120 7712 6192 4944 3888 2448 608 -1200 -2960 -5424 -8800 -12208 -14592 -15728 -15856 -15168 -13824 -12208 -10544 -8752 -6832 -5552 -5520 -6192 -6464 -5952 -5248 -4800 -4224 -3120 -1936 -1360 -1376 -1488 -1568 -1920 -2480 -2672 -2016 -528 1344 3152 4656 5744 6592 7264 7616 7376 6736 6176 6064 6080 5760 5072 4416 4032 3712 3168 2480 1952 1728 1568 1248 832 560 544 496 64 -720 -1456 -1760 -1744 -1808 -2080 -2352 -2304 -1904 -1408 -1040 -720 -288 400 1232 1952 2384 2560 2688 2848 2928 2704 2272 1824 1552 1360 1088 608 0 -592 -1008 -1280 -1520 -1760 -1936 -1936 -1792 -1584 -1392 -1200 -960 -656 -400 -192 -32 96 176 176 112 64 96 160 224 288 320 352 352 352 352 336 304 272 256 256 240 192 128 64 32 0 -16 -32 -48 -32 -16 -16 -16 0 0 0 0 0 0 0 -16 -16 -16 0 +4 +0 0 0 0 1 2 2 3 4 6 7 7 8 10 11 12 14 15 16 16 16 16 15 13 12 11 8 7 7 6 3 1 3 9 17 28 43 57 56 41 22 17 26 42 60 82 102 109 89 44 -19 -82 -129 -147 -140 -121 -104 -98 -117 -171 -248 -320 -362 -373 -355 -300 -207 -106 -34 -5 1 0 -5 -9 7 49 106 163 222 282 321 322 297 271 245 212 175 143 115 74 19 -39 -98 -183 -304 -432 -529 -583 -603 -590 -551 -498 -441 -374 -299 -249 -253 -291 -310 -293 -264 -247 -222 -168 -107 -77 -79 -88 -95 -119 -156 -173 -134 -36 93 224 337 426 500 564 605 601 561 527 528 543 527 476 424 396 373 326 261 212 192 178 145 99 70 68 65 10 -97 -200 -250 -254 -269 -318 -370 -370 -314 -239 -180 -129 -51 77 241 394 495 549 593 648 683 653 563 468 410 375 308 178 0 -180 -321 -427 -524 -627 -714 -746 -717 -658 -602 -540 -445 -321 -199 -101 -16 61 112 113 79 55 72 125 189 254 308 346 376 403 426 430 413 398 412 446 457 408 305 189 97 33 -33 -101 -144 -134 -83 -35 -11 9 50 91 97 67 32 7 -30 -102 -196 -276 -329 -377 -439 -509 -564 -588 -591 -600 -625 -644 -617 -537 -438 -357 -309 -273 -231 -182 -131 -71 -10 37 63 91 141 211 266 294 309 328 355 375 382 384 383 378 365 341 318 298 279 254 219 190 184 197 205 182 130 68 9 -44 -88 -117 -133 -147 -164 -170 -156 -128 -104 -89 -80 -75 -80 -100 -124 -146 -167 -187 -195 -191 -183 -183 -193 -200 -191 -164 -127 -92 -63 -36 -7 28 57 63 44 27 32 55 72 67 56 60 81 98 92 70 49 44 56 75 91 98 91 73 50 26 5 -14 -27 -30 -21 -7 5 9 7 4 3 4 4 3 2 3 7 9 9 5 0 -4 -7 -10 -12 -12 -11 -9 -6 -5 -4 -4 -3 -3 -3 -3 -3 -3 -2 -2 -1 -1 0 0 0 0 0 0 0 0 0 0 +0 0 0 0 32 64 64 96 128 192 224 224 256 320 352 384 448 480 512 512 512 512 480 416 384 352 256 224 224 192 96 32 96 288 544 896 1376 1824 1792 1312 704 544 832 1344 1920 2624 3264 3488 2848 1408 -608 -2624 -4128 -4704 -4480 -3872 -3328 -3136 -3744 -5472 -7936 -10240 -11584 -11936 -11360 -9600 -6624 -3392 -1088 -160 32 0 -160 -288 224 1568 3392 5216 7104 9024 10272 10304 9504 8672 7840 6784 5600 4576 3680 2368 608 -1248 -3136 -5856 -9728 -13824 -16928 -18656 -19296 -18880 -17632 -15936 -14112 -11968 -9568 -7968 -8096 -9312 -9920 -9376 -8448 -7904 -7104 -5376 -3424 -2464 -2528 -2816 -3040 -3808 -4992 -5536 -4288 -1152 2976 7168 10784 13632 16000 18048 19360 19232 17952 16864 16896 17376 16864 15232 13568 12672 11936 10432 8352 6784 6144 5696 4640 3168 2240 2176 2080 320 -3104 -6400 -8000 -8128 -8608 -10176 -11840 -11840 -10048 -7648 -5760 -4128 -1632 2464 7712 12608 15840 17568 18976 20736 21856 20896 18016 14976 13120 12000 9856 5696 0 -5760 -10272 -13664 -16768 -20064 -22848 -23872 -22944 -21056 -19264 -17280 -14240 -10272 -6368 -3232 -512 1952 3584 3616 2528 1760 2304 4000 6048 8128 9856 11072 12032 12896 13632 13760 13216 12736 13184 14272 14624 13056 9760 6048 3104 1056 -1056 -3232 -4608 -4288 -2656 -1120 -352 288 1600 2912 3104 2144 1024 224 -960 -3264 -6272 -8832 -10528 -12064 -14048 -16288 -18048 -18816 -18912 -19200 -20000 -20608 -19744 -17184 -14016 -11424 -9888 -8736 -7392 -5824 -4192 -2272 -320 1184 2016 2912 4512 6752 8512 9408 9888 10496 11360 12000 12224 12288 12256 12096 11680 10912 10176 9536 8928 8128 7008 6080 5888 6304 6560 5824 4160 2176 288 -1408 -2816 -3744 -4256 -4704 -5248 -5440 -4992 -4096 -3328 -2848 -2560 -2400 -2560 -3200 -3968 -4672 -5344 -5984 -6240 -6112 -5856 -5856 -6176 -6400 -6112 -5248 -4064 -2944 -2016 -1152 -224 896 1824 2016 1408 864 1024 1760 2304 2144 1792 1920 2592 3136 2944 2240 1568 1408 1792 2400 2912 3136 2912 2336 1600 832 160 -448 -864 -960 -672 -224 160 288 224 128 96 128 128 96 64 96 224 288 288 160 0 -128 -224 -320 -384 -384 -352 -288 -192 -160 -128 -128 -96 -96 -96 -96 -96 -96 -64 -64 -32 -32 0 0 0 0 0 0 0 0 0 0 +5 +0 -1 -1 -1 -1 0 0 1 2 3 4 5 6 7 7 7 7 7 6 4 0 -6 -11 -15 -20 -26 -31 -35 -36 -35 -35 -33 -29 -22 -15 -8 -2 5 10 10 7 5 7 14 22 31 39 46 52 58 63 67 66 67 72 80 85 79 61 39 20 7 -8 -24 -36 -34 -22 -10 -3 2 14 27 30 21 10 2 -11 -37 -72 -104 -128 -150 -179 -214 -243 -260 -268 -279 -298 -314 -309 -276 -230 -193 -171 -155 -134 -108 -80 -45 -7 24 42 62 99 151 195 221 237 258 286 309 322 331 338 341 337 322 307 294 283 262 232 206 204 224 238 217 159 85 11 -57 -117 -160 -185 -210 -240 -254 -239 -201 -167 -146 -134 -129 -142 -181 -229 -277 -326 -372 -399 -401 -393 -404 -438 -465 -454 -400 -320 -238 -166 -98 -18 82 170 192 140 87 106 189 253 245 210 232 323 401 392 307 223 209 271 373 470 523 506 423 300 164 33 -88 -180 -210 -158 -51 44 83 68 40 33 48 53 40 29 46 94 138 139 87 10 -65 -129 -189 -245 -277 -267 -223 -172 -137 -124 -118 -111 -103 -107 -123 -147 -158 -151 -124 -82 -33 10 31 23 15 38 82 115 110 85 77 101 136 149 131 94 53 17 -10 -30 -42 -47 -35 -10 20 42 57 70 74 55 19 -14 -23 -17 -13 -26 -51 -73 -80 -68 -46 -32 -35 -48 -57 -54 -44 -35 -34 -50 -84 -127 -161 -185 -206 -230 -249 -246 -214 -170 -134 -115 -110 -104 -87 -54 -14 23 47 57 57 58 64 74 81 78 73 79 100 128 151 166 178 192 203 202 186 159 129 102 79 58 35 10 -14 -35 -54 -68 -77 -83 -87 -87 -83 -72 -61 -55 -57 -60 -58 -53 -49 -48 -49 -46 -36 -22 -8 3 11 16 18 20 24 31 37 39 38 37 37 37 35 32 29 26 23 19 16 15 13 11 9 7 4 1 1 1 0 -1 -2 -1 -1 0 1 1 2 2 2 1 0 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 0 +0 -32 -32 -32 -32 0 0 32 64 96 128 160 192 224 224 224 224 224 192 128 0 -192 -352 -480 -640 -832 -992 -1120 -1152 -1120 -1120 -1056 -928 -704 -480 -256 -64 160 320 320 224 160 224 448 704 992 1248 1472 1664 1856 2016 2144 2112 2144 2304 2560 2720 2528 1952 1248 640 224 -256 -768 -1152 -1088 -704 -320 -96 64 448 864 960 672 320 64 -352 -1184 -2304 -3328 -4096 -4800 -5728 -6848 -7776 -8320 -8576 -8928 -9536 -10048 -9888 -8832 -7360 -6176 -5472 -4960 -4288 -3456 -2560 -1440 -224 768 1344 1984 3168 4832 6240 7072 7584 8256 9152 9888 10304 10592 10816 10912 10784 10304 9824 9408 9056 8384 7424 6592 6528 7168 7616 6944 5088 2720 352 -1824 -3744 -5120 -5920 -6720 -7680 -8128 -7648 -6432 -5344 -4672 -4288 -4128 -4544 -5792 -7328 -8864 -10432 -11904 -12768 -12832 -12576 -12928 -14016 -14880 -14528 -12800 -10240 -7616 -5312 -3136 -576 2624 5440 6144 4480 2784 3392 6048 8096 7840 6720 7424 10336 12832 12544 9824 7136 6688 8672 11936 15040 16736 16192 13536 9600 5248 1056 -2816 -5760 -6720 -5056 -1632 1408 2656 2176 1280 1056 1536 1696 1280 928 1472 3008 4416 4448 2784 320 -2080 -4128 -6048 -7840 -8864 -8544 -7136 -5504 -4384 -3968 -3776 -3552 -3296 -3424 -3936 -4704 -5056 -4832 -3968 -2624 -1056 320 992 736 480 1216 2624 3680 3520 2720 2464 3232 4352 4768 4192 3008 1696 544 -320 -960 -1344 -1504 -1120 -320 640 1344 1824 2240 2368 1760 608 -448 -736 -544 -416 -832 -1632 -2336 -2560 -2176 -1472 -1024 -1120 -1536 -1824 -1728 -1408 -1120 -1088 -1600 -2688 -4064 -5152 -5920 -6592 -7360 -7968 -7872 -6848 -5440 -4288 -3680 -3520 -3328 -2784 -1728 -448 736 1504 1824 1824 1856 2048 2368 2592 2496 2336 2528 3200 4096 4832 5312 5696 6144 6496 6464 5952 5088 4128 3264 2528 1856 1120 320 -448 -1120 -1728 -2176 -2464 -2656 -2784 -2784 -2656 -2304 -1952 -1760 -1824 -1920 -1856 -1696 -1568 -1536 -1568 -1472 -1152 -704 -256 96 352 512 576 640 768 992 1184 1248 1216 1184 1184 1184 1120 1024 928 832 736 608 512 480 416 352 288 224 128 32 32 32 0 -32 -64 -32 -32 0 32 32 64 64 64 32 0 -32 -32 -32 -32 -32 -32 -32 -32 -32 -32 -32 0 +5 +0 -1 -1 0 0 0 0 0 0 1 1 2 2 2 4 6 6 5 4 4 7 10 14 17 18 16 12 7 1 -5 -11 -13 -11 -4 3 6 5 3 3 4 5 4 3 5 11 17 17 11 1 -10 -20 -30 -40 -47 -47 -41 -33 -27 -25 -25 -24 -23 -25 -30 -36 -40 -40 -33 -23 -10 3 9 7 5 12 27 40 39 31 29 39 54 60 55 40 23 7 -5 -14 -21 -24 -18 -5 10 23 32 41 44 33 12 -9 -15 -11 -9 -19 -37 -54 -60 -52 -36 -26 -29 -41 -50 -48 -40 -33 -32 -49 -83 -128 -167 -196 -223 -255 -283 -285 -254 -207 -166 -147 -143 -138 -118 -76 -20 33 71 87 90 94 106 125 140 138 134 148 190 250 302 340 374 415 449 459 433 381 316 258 206 155 95 28 -39 -103 -162 -211 -248 -273 -294 -306 -298 -268 -232 -219 -230 -250 -252 -239 -227 -231 -241 -233 -191 -121 -43 23 72 103 122 141 179 236 290 320 325 331 349 365 367 353 337 317 290 257 233 221 211 197 172 134 82 40 25 31 26 -3 -35 -35 -4 34 66 102 145 177 173 134 71 -2 -78 -139 -172 -184 -203 -252 -323 -386 -427 -455 -484 -504 -495 -450 -395 -348 -316 -284 -254 -236 -235 -230 -204 -167 -141 -130 -118 -95 -73 -64 -72 -82 -80 -70 -54 -35 -12 21 61 103 140 162 171 167 154 135 111 83 56 42 44 48 36 5 -26 -41 -50 -68 -91 -101 -85 -54 -29 -14 3 31 67 101 122 131 136 150 171 183 172 149 134 138 146 139 119 100 96 100 99 84 66 55 57 69 82 93 103 110 111 103 89 71 53 34 17 3 -7 -15 -26 -42 -60 -74 -80 -82 -82 -81 -75 -63 -49 -39 -35 -34 -34 -32 -31 -30 -30 -26 -18 -8 0 4 5 6 7 8 9 10 11 10 8 5 4 4 2 -2 -6 -8 -8 -8 -9 -10 -10 -10 -8 -7 -5 -4 -3 -2 -2 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 0 +0 -64 -64 0 0 0 0 0 0 64 64 128 128 128 256 384 384 320 256 256 448 640 896 1088 1152 1024 768 448 64 -320 -704 -832 -704 -256 192 384 320 192 192 256 320 256 192 320 704 1088 1088 704 64 -640 -1280 -1920 -2560 -3008 -3008 -2624 -2112 -1728 -1600 -1600 -1536 -1472 -1600 -1920 -2304 -2560 -2560 -2112 -1472 -640 192 576 448 320 768 1728 2560 2496 1984 1856 2496 3456 3840 3520 2560 1472 448 -320 -896 -1344 -1536 -1152 -320 640 1472 2048 2624 2816 2112 768 -576 -960 -704 -576 -1216 -2368 -3456 -3840 -3328 -2304 -1664 -1856 -2624 -3200 -3072 -2560 -2112 -2048 -3136 -5312 -8192 -10688 -12544 -14272 -16320 -18112 -18240 -16256 -13248 -10624 -9408 -9152 -8832 -7552 -4864 -1280 2112 4544 5568 5760 6016 6784 8000 8960 8832 8576 9472 12160 16000 19328 21760 23936 26560 28736 29376 27712 24384 20224 16512 13184 9920 6080 1792 -2496 -6592 -10368 -13504 -15872 -17472 -18816 -19584 -19072 -17152 -14848 -14016 -14720 -16000 -16128 -15296 -14528 -14784 -15424 -14912 -12224 -7744 -2752 1472 4608 6592 7808 9024 11456 15104 18560 20480 20800 21184 22336 23360 23488 22592 21568 20288 18560 16448 14912 14144 13504 12608 11008 8576 5248 2560 1600 1984 1664 -192 -2240 -2240 -256 2176 4224 6528 9280 11328 11072 8576 4544 -128 -4992 -8896 -11008 -11776 -12992 -16128 -20672 -24704 -27328 -29120 -30976 -32256 -31680 -28800 -25280 -22272 -20224 -18176 -16256 -15104 -15040 -14720 -13056 -10688 -9024 -8320 -7552 -6080 -4672 -4096 -4608 -5248 -5120 -4480 -3456 -2240 -768 1344 3904 6592 8960 10368 10944 10688 9856 8640 7104 5312 3584 2688 2816 3072 2304 320 -1664 -2624 -3200 -4352 -5824 -6464 -5440 -3456 -1856 -896 192 1984 4288 6464 7808 8384 8704 9600 10944 11712 11008 9536 8576 8832 9344 8896 7616 6400 6144 6400 6336 5376 4224 3520 3648 4416 5248 5952 6592 7040 7104 6592 5696 4544 3392 2176 1088 192 -448 -960 -1664 -2688 -3840 -4736 -5120 -5248 -5248 -5184 -4800 -4032 -3136 -2496 -2240 -2176 -2176 -2048 -1984 -1920 -1920 -1664 -1152 -512 0 256 320 384 448 512 576 640 704 640 512 320 256 256 128 -128 -384 -512 -512 -512 -576 -640 -640 -640 -512 -448 -320 -256 -192 -128 -128 -64 -64 -64 -64 -64 -64 -64 -64 -64 -64 -64 -64 -64 0 +6 +0 0 0 -1 -1 -1 -1 -1 -2 -2 -3 -3 -3 -3 -3 -4 -5 -5 -6 -6 -7 -7 -8 -7 -5 -2 1 3 5 6 8 10 15 19 22 24 26 29 32 34 34 34 34 32 30 28 28 28 27 24 20 12 6 4 5 4 -1 -7 -7 -1 7 14 23 34 43 43 34 19 -1 -23 -41 -52 -57 -65 -83 -109 -134 -153 -167 -183 -195 -197 -184 -166 -150 -140 -129 -118 -112 -115 -115 -105 -88 -76 -72 -67 -55 -43 -39 -45 -52 -53 -47 -37 -25 -9 15 46 79 110 130 141 141 133 119 100 77 53 41 43 49 37 5 -28 -46 -57 -78 -108 -123 -105 -69 -38 -18 5 43 97 148 183 200 214 241 282 309 297 263 244 257 279 273 238 206 203 216 219 192 155 132 141 174 213 249 282 308 320 306 272 224 172 115 59 12 -25 -55 -100 -167 -245 -311 -349 -369 -381 -387 -372 -323 -260 -212 -196 -199 -205 -203 -202 -206 -211 -190 -136 -62 4 41 53 59 73 91 106 126 144 141 113 79 69 74 51 -19 -103 -152 -164 -178 -221 -279 -315 -318 -294 -259 -225 -193 -162 -129 -99 -80 -73 -75 -72 -64 -54 -54 -63 -67 -60 -51 -74 -151 -260 -349 -385 -384 -383 -386 -367 -318 -259 -218 -183 -137 -77 -27 2 29 75 144 216 279 332 374 406 421 418 405 396 395 401 405 409 418 422 405 365 314 259 190 107 29 -20 -46 -77 -120 -154 -164 -162 -165 -167 -149 -116 -87 -66 -37 7 40 47 43 61 96 116 101 71 56 59 63 50 26 4 0 14 39 59 64 61 61 67 73 74 69 64 63 65 68 72 74 72 65 54 41 26 10 -5 -17 -29 -42 -53 -57 -56 -56 -60 -68 -76 -81 -85 -87 -88 -86 -82 -74 -63 -52 -42 -35 -27 -18 -9 -2 2 4 5 7 7 6 3 1 -1 -3 -6 -10 -13 -14 -14 -15 -15 -15 -14 -13 -13 -12 -10 -8 -7 -6 -6 -5 -3 -1 -1 0 0 0 1 1 1 0 0 0 0 0 0 +0 0 0 -64 -64 -64 -64 -64 -128 -128 -192 -192 -192 -192 -192 -256 -320 -320 -384 -384 -448 -448 -512 -448 -320 -128 64 192 320 384 512 640 960 1216 1408 1536 1664 1856 2048 2176 2176 2176 2176 2048 1920 1792 1792 1792 1728 1536 1280 768 384 256 320 256 -64 -448 -448 -64 448 896 1472 2176 2752 2752 2176 1216 -64 -1472 -2624 -3328 -3648 -4160 -5312 -6976 -8576 -9792 -10688 -11712 -12480 -12608 -11776 -10624 -9600 -8960 -8256 -7552 -7168 -7360 -7360 -6720 -5632 -4864 -4608 -4288 -3520 -2752 -2496 -2880 -3328 -3392 -3008 -2368 -1600 -576 960 2944 5056 7040 8320 9024 9024 8512 7616 6400 4928 3392 2624 2752 3136 2368 320 -1792 -2944 -3648 -4992 -6912 -7872 -6720 -4416 -2432 -1152 320 2752 6208 9472 11712 12800 13696 15424 18048 19776 19008 16832 15616 16448 17856 17472 15232 13184 12992 13824 14016 12288 9920 8448 9024 11136 13632 15936 18048 19712 20480 19584 17408 14336 11008 7360 3776 768 -1600 -3520 -6400 -10688 -15680 -19904 -22336 -23616 -24384 -24768 -23808 -20672 -16640 -13568 -12544 -12736 -13120 -12992 -12928 -13184 -13504 -12160 -8704 -3968 256 2624 3392 3776 4672 5824 6784 8064 9216 9024 7232 5056 4416 4736 3264 -1216 -6592 -9728 -10496 -11392 -14144 -17856 -20160 -20352 -18816 -16576 -14400 -12352 -10368 -8256 -6336 -5120 -4672 -4800 -4608 -4096 -3456 -3456 -4032 -4288 -3840 -3264 -4736 -9664 -16640 -22336 -24640 -24576 -24512 -24704 -23488 -20352 -16576 -13952 -11712 -8768 -4928 -1728 128 1856 4800 9216 13824 17856 21248 23936 25984 26944 26752 25920 25344 25280 25664 25920 26176 26752 27008 25920 23360 20096 16576 12160 6848 1856 -1280 -2944 -4928 -7680 -9856 -10496 -10368 -10560 -10688 -9536 -7424 -5568 -4224 -2368 448 2560 3008 2752 3904 6144 7424 6464 4544 3584 3776 4032 3200 1664 256 0 896 2496 3776 4096 3904 3904 4288 4672 4736 4416 4096 4032 4160 4352 4608 4736 4608 4160 3456 2624 1664 640 -320 -1088 -1856 -2688 -3392 -3648 -3584 -3584 -3840 -4352 -4864 -5184 -5440 -5568 -5632 -5504 -5248 -4736 -4032 -3328 -2688 -2240 -1728 -1152 -576 -128 128 256 320 448 448 384 192 64 -64 -192 -384 -640 -832 -896 -896 -960 -960 -960 -896 -832 -832 -768 -640 -512 -448 -384 -384 -320 -192 -64 -64 0 0 0 64 64 64 0 0 0 0 0 0 +6 +0 0 0 0 0 0 0 0 0 0 0 -1 -1 -2 -3 -4 -6 -7 -8 -10 -11 -11 -11 -9 -8 -8 -9 -10 -11 -11 -12 -13 -13 -10 -5 0 3 4 5 6 8 10 13 16 16 13 10 9 10 7 -3 -16 -25 -28 -32 -40 -53 -62 -64 -61 -56 -50 -45 -39 -32 -25 -21 -20 -21 -21 -19 -17 -17 -20 -22 -20 -18 -27 -56 -98 -135 -153 -157 -161 -166 -162 -144 -120 -104 -90 -69 -40 -15 1 16 42 83 128 169 206 238 265 281 285 283 284 290 301 311 322 336 348 341 315 277 233 175 101 28 -19 -47 -80 -127 -167 -181 -183 -191 -198 -181 -144 -111 -86 -49 9 56 67 64 91 147 182 163 118 94 101 111 91 48 8 0 28 80 124 138 135 138 157 176 182 174 166 169 179 192 207 220 220 205 176 138 89 35 -16 -61 -111 -167 -215 -240 -244 -250 -279 -326 -375 -418 -451 -476 -499 -511 -506 -472 -416 -355 -302 -257 -205 -140 -69 -12 22 42 60 78 87 75 46 13 -11 -39 -86 -152 -212 -253 -279 -312 -348 -373 -377 -379 -389 -391 -361 -311 -280 -290 -303 -269 -181 -86 -26 5 49 129 226 310 367 402 425 435 434 424 412 403 402 400 395 378 359 341 314 272 221 180 164 155 126 70 12 -14 -8 -2 -21 -56 -80 -81 -68 -65 -71 -76 -76 -71 -60 -45 -27 -15 -16 -26 -36 -41 -45 -44 -39 -33 -29 -24 -9 16 36 39 32 32 46 66 73 69 70 92 128 154 154 139 127 126 126 115 95 84 85 88 78 51 17 -10 -29 -42 -44 -39 -33 -39 -61 -84 -97 -97 -94 -92 -90 -90 -92 -96 -94 -84 -74 -75 -81 -79 -66 -50 -39 -33 -25 -19 -20 -29 -37 -40 -38 -38 -38 -38 -38 -42 -46 -46 -40 -32 -25 -20 -14 -6 2 9 13 15 18 24 30 33 34 32 31 32 32 30 25 20 17 16 14 12 9 6 4 2 1 0 0 -1 -2 -2 -2 -2 -2 -2 -2 -2 -1 -1 -1 -1 -1 -1 0 +0 0 0 0 0 0 0 0 0 0 0 -64 -64 -128 -192 -256 -384 -448 -512 -640 -704 -704 -704 -576 -512 -512 -576 -640 -704 -704 -768 -832 -832 -640 -320 0 192 256 320 384 512 640 832 1024 1024 832 640 576 640 448 -192 -1024 -1600 -1792 -2048 -2560 -3392 -3968 -4096 -3904 -3584 -3200 -2880 -2496 -2048 -1600 -1344 -1280 -1344 -1344 -1216 -1088 -1088 -1280 -1408 -1280 -1152 -1728 -3584 -6272 -8640 -9792 -10048 -10304 -10624 -10368 -9216 -7680 -6656 -5760 -4416 -2560 -960 64 1024 2688 5312 8192 10816 13184 15232 16960 17984 18240 18112 18176 18560 19264 19904 20608 21504 22272 21824 20160 17728 14912 11200 6464 1792 -1216 -3008 -5120 -8128 -10688 -11584 -11712 -12224 -12672 -11584 -9216 -7104 -5504 -3136 576 3584 4288 4096 5824 9408 11648 10432 7552 6016 6464 7104 5824 3072 512 0 1792 5120 7936 8832 8640 8832 10048 11264 11648 11136 10624 10816 11456 12288 13248 14080 14080 13120 11264 8832 5696 2240 -1024 -3904 -7104 -10688 -13760 -15360 -15616 -16000 -17856 -20864 -24000 -26752 -28864 -30464 -31936 -32704 -32384 -30208 -26624 -22720 -19328 -16448 -13120 -8960 -4416 -768 1408 2688 3840 4992 5568 4800 2944 832 -704 -2496 -5504 -9728 -13568 -16192 -17856 -19968 -22272 -23872 -24128 -24256 -24896 -25024 -23104 -19904 -17920 -18560 -19392 -17216 -11584 -5504 -1664 320 3136 8256 14464 19840 23488 25728 27200 27840 27776 27136 26368 25792 25728 25600 25280 24192 22976 21824 20096 17408 14144 11520 10496 9920 8064 4480 768 -896 -512 -128 -1344 -3584 -5120 -5184 -4352 -4160 -4544 -4864 -4864 -4544 -3840 -2880 -1728 -960 -1024 -1664 -2304 -2624 -2880 -2816 -2496 -2112 -1856 -1536 -576 1024 2304 2496 2048 2048 2944 4224 4672 4416 4480 5888 8192 9856 9856 8896 8128 8064 8064 7360 6080 5376 5440 5632 4992 3264 1088 -640 -1856 -2688 -2816 -2496 -2112 -2496 -3904 -5376 -6208 -6208 -6016 -5888 -5760 -5760 -5888 -6144 -6016 -5376 -4736 -4800 -5184 -5056 -4224 -3200 -2496 -2112 -1600 -1216 -1280 -1856 -2368 -2560 -2432 -2432 -2432 -2432 -2432 -2688 -2944 -2944 -2560 -2048 -1600 -1280 -896 -384 128 576 832 960 1152 1536 1920 2112 2176 2048 1984 2048 2048 1920 1600 1280 1088 1024 896 768 576 384 256 128 64 0 0 -64 -128 -128 -128 -128 -128 -128 -128 -128 -64 -64 -64 -64 -64 -64 0 +6 +0 0 0 0 0 0 0 0 0 0 0 -1 -1 -2 -3 -4 -5 -5 -6 -7 -9 -11 -14 -16 -18 -20 -23 -24 -24 -23 -21 -19 -17 -14 -11 -6 -1 1 3 5 7 9 8 5 1 -2 -5 -12 -22 -31 -38 -44 -51 -59 -65 -69 -71 -76 -79 -75 -67 -63 -67 -72 -66 -46 -23 -7 1 14 37 68 96 117 132 143 151 154 155 155 156 159 163 165 162 158 154 146 129 108 90 84 81 68 38 6 -8 -5 -1 -13 -36 -53 -54 -47 -45 -51 -56 -58 -54 -48 -37 -22 -12 -14 -23 -32 -38 -42 -43 -38 -33 -30 -25 -9 18 41 46 39 39 57 84 95 92 96 129 183 225 231 214 200 203 208 194 165 148 155 165 148 99 35 -20 -61 -90 -97 -87 -76 -93 -148 -211 -249 -257 -255 -256 -259 -265 -280 -300 -300 -277 -252 -263 -292 -295 -252 -196 -159 -138 -109 -82 -92 -139 -185 -204 -202 -206 -216 -222 -234 -265 -302 -317 -286 -235 -192 -160 -119 -53 26 95 138 169 211 284 376 448 476 478 492 535 576 573 516 445 407 405 403 364 295 219 154 103 64 33 0 -51 -113 -162 -189 -213 -251 -294 -309 -297 -289 -304 -318 -301 -263 -231 -212 -182 -128 -63 -13 21 47 72 84 85 84 91 96 82 54 27 19 32 51 67 76 85 105 126 142 145 145 153 169 188 205 223 242 247 228 196 173 169 168 158 137 116 97 73 43 14 -12 -39 -75 -113 -147 -171 -187 -198 -208 -218 -225 -227 -220 -207 -193 -184 -180 -184 -190 -188 -173 -149 -132 -126 -119 -99 -73 -56 -56 -60 -57 -53 -58 -69 -75 -70 -59 -51 -47 -43 -33 -22 -8 7 24 39 47 52 58 69 83 94 97 96 95 97 95 84 67 52 43 38 31 22 15 11 8 2 -6 -13 -17 -17 -15 -14 -15 -16 -15 -11 -9 -9 -9 -8 -6 -5 -5 -4 -3 -1 0 1 3 5 6 6 5 5 5 5 5 4 4 3 2 2 1 1 0 0 0 0 0 0 0 0 0 +0 0 0 0 0 0 0 0 0 0 0 -32 -32 -64 -96 -128 -160 -160 -192 -224 -288 -352 -448 -512 -576 -640 -736 -768 -768 -736 -672 -608 -544 -448 -352 -192 -32 32 96 160 224 288 256 160 32 -64 -160 -384 -704 -992 -1216 -1408 -1632 -1888 -2080 -2208 -2272 -2432 -2528 -2400 -2144 -2016 -2144 -2304 -2112 -1472 -736 -224 32 448 1184 2176 3072 3744 4224 4576 4832 4928 4960 4960 4992 5088 5216 5280 5184 5056 4928 4672 4128 3456 2880 2688 2592 2176 1216 192 -256 -160 -32 -416 -1152 -1696 -1728 -1504 -1440 -1632 -1792 -1856 -1728 -1536 -1184 -704 -384 -448 -736 -1024 -1216 -1344 -1376 -1216 -1056 -960 -800 -288 576 1312 1472 1248 1248 1824 2688 3040 2944 3072 4128 5856 7200 7392 6848 6400 6496 6656 6208 5280 4736 4960 5280 4736 3168 1120 -640 -1952 -2880 -3104 -2784 -2432 -2976 -4736 -6752 -7968 -8224 -8160 -8192 -8288 -8480 -8960 -9600 -9600 -8864 -8064 -8416 -9344 -9440 -8064 -6272 -5088 -4416 -3488 -2624 -2944 -4448 -5920 -6528 -6464 -6592 -6912 -7104 -7488 -8480 -9664 -10144 -9152 -7520 -6144 -5120 -3808 -1696 832 3040 4416 5408 6752 9088 12032 14336 15232 15296 15744 17120 18432 18336 16512 14240 13024 12960 12896 11648 9440 7008 4928 3296 2048 1056 0 -1632 -3616 -5184 -6048 -6816 -8032 -9408 -9888 -9504 -9248 -9728 -10176 -9632 -8416 -7392 -6784 -5824 -4096 -2016 -416 672 1504 2304 2688 2720 2688 2912 3072 2624 1728 864 608 1024 1632 2144 2432 2720 3360 4032 4544 4640 4640 4896 5408 6016 6560 7136 7744 7904 7296 6272 5536 5408 5376 5056 4384 3712 3104 2336 1376 448 -384 -1248 -2400 -3616 -4704 -5472 -5984 -6336 -6656 -6976 -7200 -7264 -7040 -6624 -6176 -5888 -5760 -5888 -6080 -6016 -5536 -4768 -4224 -4032 -3808 -3168 -2336 -1792 -1792 -1920 -1824 -1696 -1856 -2208 -2400 -2240 -1888 -1632 -1504 -1376 -1056 -704 -256 224 768 1248 1504 1664 1856 2208 2656 3008 3104 3072 3040 3104 3040 2688 2144 1664 1376 1216 992 704 480 352 256 64 -192 -416 -544 -544 -480 -448 -480 -512 -480 -352 -288 -288 -288 -256 -192 -160 -160 -128 -96 -32 0 32 96 160 192 192 160 160 160 160 160 128 128 96 64 64 32 32 0 0 0 0 0 0 0 0 0 +5 +0 -1 -1 -1 -1 -1 -1 -2 -2 -2 -2 -3 -3 -3 -3 -3 -3 -3 -2 -3 -4 -6 -7 -7 -8 -9 -10 -11 -14 -17 -18 -18 -15 -13 -12 -10 -5 2 8 12 16 21 30 42 52 58 61 65 74 83 86 80 72 68 70 72 68 57 44 32 22 14 7 0 -13 -29 -43 -51 -59 -72 -87 -93 -93 -93 -100 -108 -105 -94 -85 -80 -71 -51 -26 -6 9 20 32 39 40 41 45 49 43 29 15 11 18 30 41 47 54 68 84 97 101 104 112 127 144 161 179 199 208 197 173 157 156 159 153 136 117 100 77 46 15 -14 -45 -88 -137 -182 -217 -243 -264 -284 -304 -322 -331 -330 -317 -303 -295 -296 -309 -328 -332 -313 -276 -251 -246 -237 -202 -152 -121 -123 -135 -131 -125 -142 -174 -193 -185 -160 -142 -136 -125 -101 -67 -25 26 83 137 173 194 224 275 341 399 427 433 445 469 475 435 358 289 251 228 194 144 105 80 58 20 -40 -100 -137 -145 -134 -130 -150 -169 -159 -127 -103 -109 -123 -113 -86 -72 -77 -74 -45 -9 12 39 94 166 211 214 206 219 248 275 299 328 348 336 292 247 218 199 165 122 93 84 81 69 43 14 -7 -9 7 24 20 -8 -38 -53 -50 -44 -50 -71 -97 -124 -145 -158 -163 -168 -182 -208 -241 -273 -285 -277 -263 -259 -268 -276 -271 -251 -220 -177 -127 -84 -57 -41 -12 34 78 96 95 93 104 118 125 126 121 108 87 64 52 52 60 67 64 45 16 -9 -20 -24 -34 -54 -67 -62 -49 -46 -58 -71 -72 -62 -48 -36 -29 -25 -24 -25 -22 -15 -6 2 6 7 5 4 7 10 11 9 6 3 -2 -8 -11 -7 0 4 0 -6 -5 3 12 18 18 19 22 25 27 28 30 31 31 31 32 33 32 28 24 22 22 22 21 18 15 12 9 8 7 4 0 -3 -4 -4 -3 -2 -2 -1 -2 -3 -3 -3 -2 -2 -2 -3 -2 -2 -1 -1 -2 -2 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 0 +0 -64 -64 -64 -64 -64 -64 -128 -128 -128 -128 -192 -192 -192 -192 -192 -192 -192 -128 -192 -256 -384 -448 -448 -512 -576 -640 -704 -896 -1088 -1152 -1152 -960 -832 -768 -640 -320 128 512 768 1024 1344 1920 2688 3328 3712 3904 4160 4736 5312 5504 5120 4608 4352 4480 4608 4352 3648 2816 2048 1408 896 448 0 -832 -1856 -2752 -3264 -3776 -4608 -5568 -5952 -5952 -5952 -6400 -6912 -6720 -6016 -5440 -5120 -4544 -3264 -1664 -384 576 1280 2048 2496 2560 2624 2880 3136 2752 1856 960 704 1152 1920 2624 3008 3456 4352 5376 6208 6464 6656 7168 8128 9216 10304 11456 12736 13312 12608 11072 10048 9984 10176 9792 8704 7488 6400 4928 2944 960 -896 -2880 -5632 -8768 -11648 -13888 -15552 -16896 -18176 -19456 -20608 -21184 -21120 -20288 -19392 -18880 -18944 -19776 -20992 -21248 -20032 -17664 -16064 -15744 -15168 -12928 -9728 -7744 -7872 -8640 -8384 -8000 -9088 -11136 -12352 -11840 -10240 -9088 -8704 -8000 -6464 -4288 -1600 1664 5312 8768 11072 12416 14336 17600 21824 25536 27328 27712 28480 30016 30400 27840 22912 18496 16064 14592 12416 9216 6720 5120 3712 1280 -2560 -6400 -8768 -9280 -8576 -8320 -9600 -10816 -10176 -8128 -6592 -6976 -7872 -7232 -5504 -4608 -4928 -4736 -2880 -576 768 2496 6016 10624 13504 13696 13184 14016 15872 17600 19136 20992 22272 21504 18688 15808 13952 12736 10560 7808 5952 5376 5184 4416 2752 896 -448 -576 448 1536 1280 -512 -2432 -3392 -3200 -2816 -3200 -4544 -6208 -7936 -9280 -10112 -10432 -10752 -11648 -13312 -15424 -17472 -18240 -17728 -16832 -16576 -17152 -17664 -17344 -16064 -14080 -11328 -8128 -5376 -3648 -2624 -768 2176 4992 6144 6080 5952 6656 7552 8000 8064 7744 6912 5568 4096 3328 3328 3840 4288 4096 2880 1024 -576 -1280 -1536 -2176 -3456 -4288 -3968 -3136 -2944 -3712 -4544 -4608 -3968 -3072 -2304 -1856 -1600 -1536 -1600 -1408 -960 -384 128 384 448 320 256 448 640 704 576 384 192 -128 -512 -704 -448 0 256 0 -384 -320 192 768 1152 1152 1216 1408 1600 1728 1792 1920 1984 1984 1984 2048 2112 2048 1792 1536 1408 1408 1408 1344 1152 960 768 576 512 448 256 0 -192 -256 -256 -192 -128 -128 -64 -128 -192 -192 -192 -128 -128 -128 -192 -128 -128 -64 -64 -128 -128 -64 -64 -64 -64 -64 -64 -64 -64 -64 -64 0 +6 +0 -1 -1 -1 -1 -1 -1 -1 0 0 0 1 1 2 3 5 6 8 9 10 12 13 13 12 10 10 9 8 7 5 4 3 1 -3 -8 -11 -12 -12 -12 -14 -17 -17 -14 -12 -13 -16 -15 -12 -10 -12 -12 -7 -2 2 6 17 31 41 43 42 47 55 63 70 80 87 87 78 68 62 58 49 38 29 27 27 23 15 5 -3 -4 2 9 8 -4 -17 -24 -23 -21 -25 -36 -50 -65 -78 -87 -92 -97 -108 -126 -150 -174 -186 -185 -180 -181 -192 -203 -203 -193 -173 -143 -105 -71 -50 -36 -11 32 73 93 94 94 108 125 136 140 137 125 103 78 65 66 78 90 87 63 22 -14 -30 -36 -53 -87 -111 -105 -85 -80 -104 -131 -137 -121 -96 -74 -60 -53 -53 -55 -51 -36 -14 7 17 19 14 13 20 30 34 29 20 12 -4 -25 -39 -26 1 17 1 -23 -22 14 60 86 93 103 120 139 156 172 188 202 209 217 233 250 251 230 206 199 210 222 221 202 169 142 122 112 97 62 10 -35 -58 -59 -52 -43 -27 -18 -33 -65 -90 -81 -58 -57 -86 -111 -102 -70 -52 -72 -106 -128 -133 -141 -164 -187 -194 -181 -162 -153 -156 -165 -168 -166 -157 -149 -141 -132 -119 -98 -74 -50 -30 -14 0 15 34 51 66 82 97 105 104 97 94 98 99 94 82 66 43 16 -14 -42 -69 -92 -104 -96 -80 -72 -75 -80 -78 -69 -63 -60 -58 -59 -63 -60 -44 -24 -16 -25 -35 -32 -23 -18 -17 -9 7 20 22 19 25 40 55 65 70 74 75 74 74 76 75 66 55 51 56 61 61 57 55 59 62 58 46 33 24 22 22 20 18 17 17 14 6 -6 -16 -22 -25 -25 -24 -23 -23 -24 -24 -22 -20 -21 -21 -18 -13 -11 -10 -7 -1 5 6 4 2 2 3 4 5 7 8 7 5 4 4 4 4 4 3 2 1 2 2 1 0 0 0 1 1 1 1 1 0 0 0 0 0 -1 -1 -1 -1 -1 -1 -1 -1 -1 0 +0 -128 -128 -128 -128 -128 -128 -128 0 0 0 128 128 256 384 640 768 1024 1152 1280 1536 1664 1664 1536 1280 1280 1152 1024 896 640 512 384 128 -384 -1024 -1408 -1536 -1536 -1536 -1792 -2176 -2176 -1792 -1536 -1664 -2048 -1920 -1536 -1280 -1536 -1536 -896 -256 256 768 2176 3968 5248 5504 5376 6016 7040 8064 8960 10240 11136 11136 9984 8704 7936 7424 6272 4864 3712 3456 3456 2944 1920 640 -384 -512 256 1152 1024 -512 -2176 -3072 -2944 -2688 -3200 -4608 -6400 -8320 -9984 -11136 -11776 -12416 -13824 -16128 -19200 -22272 -23808 -23680 -23040 -23168 -24576 -25984 -25984 -24704 -22144 -18304 -13440 -9088 -6400 -4608 -1408 4096 9344 11904 12032 12032 13824 16000 17408 17920 17536 16000 13184 9984 8320 8448 9984 11520 11136 8064 2816 -1792 -3840 -4608 -6784 -11136 -14208 -13440 -10880 -10240 -13312 -16768 -17536 -15488 -12288 -9472 -7680 -6784 -6784 -7040 -6528 -4608 -1792 896 2176 2432 1792 1664 2560 3840 4352 3712 2560 1536 -512 -3200 -4992 -3328 128 2176 128 -2944 -2816 1792 7680 11008 11904 13184 15360 17792 19968 22016 24064 25856 26752 27776 29824 32000 32128 29440 26368 25472 26880 28416 28288 25856 21632 18176 15616 14336 12416 7936 1280 -4480 -7424 -7552 -6656 -5504 -3456 -2304 -4224 -8320 -11520 -10368 -7424 -7296 -11008 -14208 -13056 -8960 -6656 -9216 -13568 -16384 -17024 -18048 -20992 -23936 -24832 -23168 -20736 -19584 -19968 -21120 -21504 -21248 -20096 -19072 -18048 -16896 -15232 -12544 -9472 -6400 -3840 -1792 0 1920 4352 6528 8448 10496 12416 13440 13312 12416 12032 12544 12672 12032 10496 8448 5504 2048 -1792 -5376 -8832 -11776 -13312 -12288 -10240 -9216 -9600 -10240 -9984 -8832 -8064 -7680 -7424 -7552 -8064 -7680 -5632 -3072 -2048 -3200 -4480 -4096 -2944 -2304 -2176 -1152 896 2560 2816 2432 3200 5120 7040 8320 8960 9472 9600 9472 9472 9728 9600 8448 7040 6528 7168 7808 7808 7296 7040 7552 7936 7424 5888 4224 3072 2816 2816 2560 2304 2176 2176 1792 768 -768 -2048 -2816 -3200 -3200 -3072 -2944 -2944 -3072 -3072 -2816 -2560 -2688 -2688 -2304 -1664 -1408 -1280 -896 -128 640 768 512 256 256 384 512 640 896 1024 896 640 512 512 512 512 512 384 256 128 256 256 128 0 0 0 128 128 128 128 128 0 0 0 0 0 -128 -128 -128 -128 -128 -128 -128 -128 -128 0 +7 +0 0 0 0 0 0 0 0 0 -1 -1 -1 -1 0 0 0 -1 -1 0 1 2 2 3 4 5 6 7 8 10 11 12 14 15 17 16 15 15 17 19 20 19 17 15 13 13 11 8 1 -5 -9 -9 -9 -7 -5 -4 -6 -13 -18 -17 -12 -13 -19 -26 -25 -17 -14 -19 -29 -36 -38 -42 -50 -58 -62 -60 -55 -54 -56 -61 -64 -64 -63 -61 -59 -57 -53 -45 -35 -24 -15 -7 0 8 18 28 37 47 57 63 64 61 61 65 68 66 59 48 32 12 -11 -34 -57 -78 -89 -84 -72 -66 -71 -78 -77 -70 -65 -64 -63 -66 -71 -69 -52 -29 -20 -32 -45 -43 -31 -25 -24 -14 10 31 35 31 42 68 95 115 127 139 144 145 148 156 157 143 122 115 130 147 150 144 144 157 170 164 135 98 74 68 72 69 64 60 62 54 24 -22 -62 -89 -106 -112 -111 -108 -112 -121 -124 -119 -115 -120 -124 -111 -87 -74 -70 -48 -1 43 53 36 18 20 31 45 63 86 102 98 81 68 65 72 84 89 79 55 41 50 64 52 20 0 15 43 61 68 75 71 48 17 5 13 6 -29 -65 -81 -97 -143 -205 -237 -223 -204 -214 -244 -256 -247 -246 -266 -284 -269 -230 -201 -201 -217 -226 -211 -180 -143 -107 -80 -64 -58 -48 -27 0 24 46 74 110 141 154 155 163 180 192 186 172 168 177 183 178 160 140 126 115 109 106 103 101 100 98 89 73 61 60 65 63 48 32 25 29 29 17 -5 -25 -36 -38 -34 -25 -14 -6 -8 -16 -19 -12 0 7 10 12 16 16 10 4 4 6 4 -3 -10 -15 -17 -19 -19 -20 -22 -25 -26 -24 -25 -30 -36 -35 -30 -27 -25 -24 -21 -18 -15 -13 -11 -11 -12 -13 -12 -10 -9 -7 -4 1 7 10 11 11 10 8 7 7 7 5 3 0 -2 -2 -2 -3 -4 -5 -6 -5 -5 -4 -3 -3 -3 -3 -3 -2 -2 -2 -2 -2 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 0 +0 0 0 0 0 0 0 0 0 -64 -64 -64 -64 0 0 0 -64 -64 0 64 128 128 192 256 320 384 448 512 640 704 768 896 960 1088 1024 960 960 1088 1216 1280 1216 1088 960 832 832 704 512 64 -320 -576 -576 -576 -448 -320 -256 -384 -832 -1152 -1088 -768 -832 -1216 -1664 -1600 -1088 -896 -1216 -1856 -2304 -2432 -2688 -3200 -3712 -3968 -3840 -3520 -3456 -3584 -3904 -4096 -4096 -4032 -3904 -3776 -3648 -3392 -2880 -2240 -1536 -960 -448 0 512 1152 1792 2368 3008 3648 4032 4096 3904 3904 4160 4352 4224 3776 3072 2048 768 -704 -2176 -3648 -4992 -5696 -5376 -4608 -4224 -4544 -4992 -4928 -4480 -4160 -4096 -4032 -4224 -4544 -4416 -3328 -1856 -1280 -2048 -2880 -2752 -1984 -1600 -1536 -896 640 1984 2240 1984 2688 4352 6080 7360 8128 8896 9216 9280 9472 9984 10048 9152 7808 7360 8320 9408 9600 9216 9216 10048 10880 10496 8640 6272 4736 4352 4608 4416 4096 3840 3968 3456 1536 -1408 -3968 -5696 -6784 -7168 -7104 -6912 -7168 -7744 -7936 -7616 -7360 -7680 -7936 -7104 -5568 -4736 -4480 -3072 -64 2752 3392 2304 1152 1280 1984 2880 4032 5504 6528 6272 5184 4352 4160 4608 5376 5696 5056 3520 2624 3200 4096 3328 1280 0 960 2752 3904 4352 4800 4544 3072 1088 320 832 384 -1856 -4160 -5184 -6208 -9152 -13120 -15168 -14272 -13056 -13696 -15616 -16384 -15808 -15744 -17024 -18176 -17216 -14720 -12864 -12864 -13888 -14464 -13504 -11520 -9152 -6848 -5120 -4096 -3712 -3072 -1728 0 1536 2944 4736 7040 9024 9856 9920 10432 11520 12288 11904 11008 10752 11328 11712 11392 10240 8960 8064 7360 6976 6784 6592 6464 6400 6272 5696 4672 3904 3840 4160 4032 3072 2048 1600 1856 1856 1088 -320 -1600 -2304 -2432 -2176 -1600 -896 -384 -512 -1024 -1216 -768 0 448 640 768 1024 1024 640 256 256 384 256 -192 -640 -960 -1088 -1216 -1216 -1280 -1408 -1600 -1664 -1536 -1600 -1920 -2304 -2240 -1920 -1728 -1600 -1536 -1344 -1152 -960 -832 -704 -704 -768 -832 -768 -640 -576 -448 -256 64 448 640 704 704 640 512 448 448 448 320 192 0 -128 -128 -128 -192 -256 -320 -384 -320 -320 -256 -192 -192 -192 -192 -192 -128 -128 -128 -128 -128 -64 -64 -64 -64 -64 -64 -64 -64 -64 -64 -64 0 +6 +0 0 0 0 0 0 0 0 0 0 0 0 0 0 -1 -1 -2 -3 -3 -3 -3 -4 -4 -5 -5 -5 -6 -6 -6 -5 -5 -5 -4 -1 3 4 2 1 1 2 4 6 9 11 11 10 8 8 10 12 13 12 8 6 8 11 9 3 0 3 9 13 15 17 17 12 4 1 3 1 -9 -20 -26 -31 -47 -70 -83 -80 -75 -81 -95 -102 -101 -103 -115 -126 -122 -107 -96 -98 -109 -116 -111 -97 -79 -61 -47 -38 -35 -30 -18 0 16 31 52 79 103 116 119 128 145 158 157 148 148 160 169 168 155 139 127 119 115 114 114 114 116 117 108 91 78 78 87 86 68 45 37 43 45 26 -8 -41 -60 -65 -60 -44 -25 -11 -15 -32 -38 -24 1 17 22 29 39 39 26 11 11 17 11 -7 -29 -43 -53 -59 -62 -67 -77 -91 -95 -90 -97 -122 -148 -153 -136 -122 -120 -118 -107 -93 -82 -73 -64 -65 -76 -86 -83 -72 -61 -52 -28 15 63 98 114 117 108 92 84 87 93 82 46 2 -24 -31 -31 -46 -77 -110 -131 -135 -123 -105 -92 -90 -102 -115 -114 -98 -86 -88 -100 -105 -100 -90 -78 -69 -58 -47 -37 -30 -31 -38 -36 -12 16 24 5 -13 -2 26 41 35 33 49 69 84 104 144 187 199 182 170 180 194 186 165 163 182 195 181 154 137 132 120 94 67 48 35 20 2 -11 -15 -19 -37 -60 -71 -64 -55 -59 -76 -87 -84 -76 -70 -67 -58 -47 -41 -37 -36 -38 -43 -44 -31 -11 -6 -19 -31 -25 -10 -6 -18 -31 -29 -15 2 13 16 18 26 44 62 64 54 48 56 66 60 41 27 25 23 13 1 -1 3 -2 -19 -37 -45 -44 -47 -53 -57 -55 -49 -45 -44 -45 -44 -44 -43 -43 -44 -42 -37 -32 -29 -28 -27 -22 -15 -10 -6 -3 2 7 10 10 8 8 10 12 14 14 13 12 11 11 10 9 7 6 5 5 4 3 3 3 3 2 1 0 0 0 0 0 -1 -1 -1 -1 -1 -1 0 +0 0 0 0 0 0 0 0 0 0 0 0 0 0 -128 -128 -256 -384 -384 -384 -384 -512 -512 -640 -640 -640 -768 -768 -768 -640 -640 -640 -512 -128 384 512 256 128 128 256 512 768 1152 1408 1408 1280 1024 1024 1280 1536 1664 1536 1024 768 1024 1408 1152 384 0 384 1152 1664 1920 2176 2176 1536 512 128 384 128 -1152 -2560 -3328 -3968 -6016 -8960 -10624 -10240 -9600 -10368 -12160 -13056 -12928 -13184 -14720 -16128 -15616 -13696 -12288 -12544 -13952 -14848 -14208 -12416 -10112 -7808 -6016 -4864 -4480 -3840 -2304 0 2048 3968 6656 10112 13184 14848 15232 16384 18560 20224 20096 18944 18944 20480 21632 21504 19840 17792 16256 15232 14720 14592 14592 14592 14848 14976 13824 11648 9984 9984 11136 11008 8704 5760 4736 5504 5760 3328 -1024 -5248 -7680 -8320 -7680 -5632 -3200 -1408 -1920 -4096 -4864 -3072 128 2176 2816 3712 4992 4992 3328 1408 1408 2176 1408 -896 -3712 -5504 -6784 -7552 -7936 -8576 -9856 -11648 -12160 -11520 -12416 -15616 -18944 -19584 -17408 -15616 -15360 -15104 -13696 -11904 -10496 -9344 -8192 -8320 -9728 -11008 -10624 -9216 -7808 -6656 -3584 1920 8064 12544 14592 14976 13824 11776 10752 11136 11904 10496 5888 256 -3072 -3968 -3968 -5888 -9856 -14080 -16768 -17280 -15744 -13440 -11776 -11520 -13056 -14720 -14592 -12544 -11008 -11264 -12800 -13440 -12800 -11520 -9984 -8832 -7424 -6016 -4736 -3840 -3968 -4864 -4608 -1536 2048 3072 640 -1664 -256 3328 5248 4480 4224 6272 8832 10752 13312 18432 23936 25472 23296 21760 23040 24832 23808 21120 20864 23296 24960 23168 19712 17536 16896 15360 12032 8576 6144 4480 2560 256 -1408 -1920 -2432 -4736 -7680 -9088 -8192 -7040 -7552 -9728 -11136 -10752 -9728 -8960 -8576 -7424 -6016 -5248 -4736 -4608 -4864 -5504 -5632 -3968 -1408 -768 -2432 -3968 -3200 -1280 -768 -2304 -3968 -3712 -1920 256 1664 2048 2304 3328 5632 7936 8192 6912 6144 7168 8448 7680 5248 3456 3200 2944 1664 128 -128 384 -256 -2432 -4736 -5760 -5632 -6016 -6784 -7296 -7040 -6272 -5760 -5632 -5760 -5632 -5632 -5504 -5504 -5632 -5376 -4736 -4096 -3712 -3584 -3456 -2816 -1920 -1280 -768 -384 256 896 1280 1280 1024 1024 1280 1536 1792 1792 1664 1536 1408 1408 1280 1152 896 768 640 640 512 384 384 384 384 256 128 0 0 0 0 0 -128 -128 -128 -128 -128 -128 0 +7 +0 0 0 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -2 -2 -2 -3 -3 -3 -3 -4 -4 -4 -4 -4 -3 -3 -3 -4 -5 -5 -5 -4 -4 -3 1 5 8 10 10 10 9 9 9 11 10 5 0 -4 -5 -5 -8 -13 -19 -23 -25 -24 -21 -19 -19 -22 -26 -26 -24 -21 -23 -26 -29 -28 -26 -23 -21 -18 -15 -12 -10 -11 -14 -14 -5 6 9 2 -6 -1 11 18 16 15 24 34 43 54 77 103 112 105 101 109 121 118 107 109 125 137 130 113 103 101 94 76 55 40 30 18 1 -11 -14 -19 -36 -61 -74 -68 -60 -66 -87 -101 -100 -92 -87 -85 -76 -63 -55 -52 -52 -55 -64 -67 -48 -17 -10 -31 -53 -43 -17 -11 -33 -60 -58 -30 5 28 37 42 62 106 152 163 141 128 154 185 173 122 82 78 76 44 6 -1 12 -7 -73 -145 -181 -186 -203 -236 -263 -262 -244 -232 -234 -245 -252 -257 -262 -275 -289 -287 -266 -240 -226 -228 -223 -189 -135 -88 -58 -30 22 88 136 140 123 125 161 212 250 264 268 270 273 281 281 266 236 212 210 221 214 192 184 210 243 235 170 85 29 12 13 7 -10 -30 -48 -62 -79 -101 -129 -152 -156 -133 -106 -99 -115 -125 -103 -52 -3 22 27 36 57 81 92 92 96 119 158 190 196 176 155 149 147 129 91 58 51 69 77 54 15 -14 -23 -23 -31 -43 -56 -72 -97 -127 -144 -137 -122 -120 -135 -147 -145 -134 -129 -132 -136 -135 -131 -125 -114 -87 -50 -19 -11 -21 -31 -25 -3 21 31 26 15 12 20 37 50 50 38 28 31 43 52 47 36 27 22 14 7 10 18 18 -3 -31 -45 -37 -18 -7 -12 -27 -41 -43 -30 -7 11 15 8 -1 -4 -2 3 11 17 18 14 8 5 6 9 10 9 6 4 4 5 4 1 -2 -3 -1 -1 -4 -6 -6 -5 -4 -3 -2 -3 -3 -3 -3 -2 -2 -2 -1 0 0 0 -1 -1 -1 -1 -1 0 0 0 -1 0 0 0 0 0 +0 0 0 -64 -64 -64 -64 -64 -64 -64 -64 -64 -64 -128 -128 -128 -192 -192 -192 -192 -256 -256 -256 -256 -256 -192 -192 -192 -256 -320 -320 -320 -256 -256 -192 64 320 512 640 640 640 576 576 576 704 640 320 0 -256 -320 -320 -512 -832 -1216 -1472 -1600 -1536 -1344 -1216 -1216 -1408 -1664 -1664 -1536 -1344 -1472 -1664 -1856 -1792 -1664 -1472 -1344 -1152 -960 -768 -640 -704 -896 -896 -320 384 576 128 -384 -64 704 1152 1024 960 1536 2176 2752 3456 4928 6592 7168 6720 6464 6976 7744 7552 6848 6976 8000 8768 8320 7232 6592 6464 6016 4864 3520 2560 1920 1152 64 -704 -896 -1216 -2304 -3904 -4736 -4352 -3840 -4224 -5568 -6464 -6400 -5888 -5568 -5440 -4864 -4032 -3520 -3328 -3328 -3520 -4096 -4288 -3072 -1088 -640 -1984 -3392 -2752 -1088 -704 -2112 -3840 -3712 -1920 320 1792 2368 2688 3968 6784 9728 10432 9024 8192 9856 11840 11072 7808 5248 4992 4864 2816 384 -64 768 -448 -4672 -9280 -11584 -11904 -12992 -15104 -16832 -16768 -15616 -14848 -14976 -15680 -16128 -16448 -16768 -17600 -18496 -18368 -17024 -15360 -14464 -14592 -14272 -12096 -8640 -5632 -3712 -1920 1408 5632 8704 8960 7872 8000 10304 13568 16000 16896 17152 17280 17472 17984 17984 17024 15104 13568 13440 14144 13696 12288 11776 13440 15552 15040 10880 5440 1856 768 832 448 -640 -1920 -3072 -3968 -5056 -6464 -8256 -9728 -9984 -8512 -6784 -6336 -7360 -8000 -6592 -3328 -192 1408 1728 2304 3648 5184 5888 5888 6144 7616 10112 12160 12544 11264 9920 9536 9408 8256 5824 3712 3264 4416 4928 3456 960 -896 -1472 -1472 -1984 -2752 -3584 -4608 -6208 -8128 -9216 -8768 -7808 -7680 -8640 -9408 -9280 -8576 -8256 -8448 -8704 -8640 -8384 -8000 -7296 -5568 -3200 -1216 -704 -1344 -1984 -1600 -192 1344 1984 1664 960 768 1280 2368 3200 3200 2432 1792 1984 2752 3328 3008 2304 1728 1408 896 448 640 1152 1152 -192 -1984 -2880 -2368 -1152 -448 -768 -1728 -2624 -2752 -1920 -448 704 960 512 -64 -256 -128 192 704 1088 1152 896 512 320 384 576 640 576 384 256 256 320 256 64 -128 -192 -64 -64 -256 -384 -384 -320 -256 -192 -128 -192 -192 -192 -192 -128 -128 -128 -64 0 0 0 -64 -64 -64 -64 -64 0 0 0 -64 0 0 0 0 0 +6 +0 0 0 0 0 0 0 0 0 0 -1 0 -1 -1 -2 -3 -4 -4 -6 -7 -7 -8 -8 -8 -10 -11 -12 -13 -14 -16 -17 -17 -16 -16 -17 -17 -16 -12 -8 -6 -3 2 9 15 16 15 16 21 29 36 39 41 43 46 49 50 49 45 42 43 47 47 44 43 51 61 61 45 23 8 3 3 2 -3 -10 -16 -22 -29 -37 -49 -59 -62 -55 -45 -43 -51 -57 -48 -25 -2 11 13 18 30 45 52 53 57 72 98 121 127 117 106 104 105 95 68 44 40 55 63 46 13 -13 -20 -21 -29 -42 -56 -73 -100 -134 -156 -152 -138 -140 -160 -179 -181 -171 -167 -175 -185 -188 -186 -183 -170 -133 -78 -31 -17 -35 -54 -44 -5 40 60 50 30 24 43 81 110 114 89 68 76 110 134 125 98 77 63 43 23 31 60 61 -8 -108 -162 -135 -68 -25 -46 -111 -177 -194 -137 -31 58 79 43 -3 -23 -12 22 72 116 129 103 62 43 54 79 93 84 60 41 46 60 52 14 -24 -30 -13 -14 -52 -95 -112 -99 -73 -53 -49 -64 -87 -93 -74 -53 -50 -50 -27 12 27 7 -16 -15 -3 -3 -6 0 9 0 -11 6 49 85 89 83 87 90 76 57 60 80 83 58 31 23 28 29 31 45 55 41 6 -12 8 46 63 47 20 13 32 53 46 14 -17 -17 4 17 11 2 12 27 23 -1 -18 -6 21 35 22 -6 -34 -57 -73 -81 -75 -58 -43 -48 -71 -95 -92 -63 -32 -26 -44 -60 -58 -44 -41 -52 -60 -52 -34 -23 -21 -16 -2 13 14 1 -8 3 29 51 59 56 49 41 34 35 43 48 39 20 8 15 31 37 27 14 13 24 38 45 43 34 24 17 14 11 5 -3 -10 -16 -22 -30 -34 -31 -24 -21 -21 -21 -16 -9 -5 -5 -8 -8 -8 -7 -7 -8 -9 -10 -9 -8 -7 -8 -9 -8 -6 -3 -1 -1 -2 -1 0 2 2 1 0 0 0 1 0 0 0 0 0 0 0 0 0 0 0 0 +0 0 0 0 0 0 0 0 0 0 -128 0 -128 -128 -256 -384 -512 -512 -768 -896 -896 -1024 -1024 -1024 -1280 -1408 -1536 -1664 -1792 -2048 -2176 -2176 -2048 -2048 -2176 -2176 -2048 -1536 -1024 -768 -384 256 1152 1920 2048 1920 2048 2688 3712 4608 4992 5248 5504 5888 6272 6400 6272 5760 5376 5504 6016 6016 5632 5504 6528 7808 7808 5760 2944 1024 384 384 256 -384 -1280 -2048 -2816 -3712 -4736 -6272 -7552 -7936 -7040 -5760 -5504 -6528 -7296 -6144 -3200 -256 1408 1664 2304 3840 5760 6656 6784 7296 9216 12544 15488 16256 14976 13568 13312 13440 12160 8704 5632 5120 7040 8064 5888 1664 -1664 -2560 -2688 -3712 -5376 -7168 -9344 -12800 -17152 -19968 -19456 -17664 -17920 -20480 -22912 -23168 -21888 -21376 -22400 -23680 -24064 -23808 -23424 -21760 -17024 -9984 -3968 -2176 -4480 -6912 -5632 -640 5120 7680 6400 3840 3072 5504 10368 14080 14592 11392 8704 9728 14080 17152 16000 12544 9856 8064 5504 2944 3968 7680 7808 -1024 -13824 -20736 -17280 -8704 -3200 -5888 -14208 -22656 -24832 -17536 -3968 7424 10112 5504 -384 -2944 -1536 2816 9216 14848 16512 13184 7936 5504 6912 10112 11904 10752 7680 5248 5888 7680 6656 1792 -3072 -3840 -1664 -1792 -6656 -12160 -14336 -12672 -9344 -6784 -6272 -8192 -11136 -11904 -9472 -6784 -6400 -6400 -3456 1536 3456 896 -2048 -1920 -384 -384 -768 0 1152 0 -1408 768 6272 10880 11392 10624 11136 11520 9728 7296 7680 10240 10624 7424 3968 2944 3584 3712 3968 5760 7040 5248 768 -1536 1024 5888 8064 6016 2560 1664 4096 6784 5888 1792 -2176 -2176 512 2176 1408 256 1536 3456 2944 -128 -2304 -768 2688 4480 2816 -768 -4352 -7296 -9344 -10368 -9600 -7424 -5504 -6144 -9088 -12160 -11776 -8064 -4096 -3328 -5632 -7680 -7424 -5632 -5248 -6656 -7680 -6656 -4352 -2944 -2688 -2048 -256 1664 1792 128 -1024 384 3712 6528 7552 7168 6272 5248 4352 4480 5504 6144 4992 2560 1024 1920 3968 4736 3456 1792 1664 3072 4864 5760 5504 4352 3072 2176 1792 1408 640 -384 -1280 -2048 -2816 -3840 -4352 -3968 -3072 -2688 -2688 -2688 -2048 -1152 -640 -640 -1024 -1024 -1024 -896 -896 -1024 -1152 -1280 -1152 -1024 -896 -1024 -1152 -1024 -768 -384 -128 -128 -256 -128 0 256 256 128 0 0 0 128 0 0 0 0 0 0 0 0 0 0 0 0 +7 +0 0 0 0 0 0 0 0 0 -1 -1 -2 -2 -1 -1 -1 -2 -4 -5 -4 -1 1 2 1 -1 -1 -1 1 3 6 7 6 4 2 3 6 7 7 5 3 4 6 5 1 -3 -4 -2 -2 -8 -14 -17 -16 -12 -9 -9 -12 -17 -18 -15 -11 -11 -12 -6 2 6 1 -4 -4 -1 -1 -2 0 2 0 -4 2 17 30 32 31 33 36 31 24 26 35 37 27 14 11 14 15 16 24 30 23 3 -7 4 28 40 30 13 8 22 38 34 10 -14 -14 3 14 9 2 11 24 21 -1 -17 -6 21 36 23 -7 -38 -64 -85 -96 -92 -72 -55 -62 -95 -129 -129 -89 -46 -39 -67 -95 -92 -73 -69 -89 -105 -93 -63 -43 -40 -32 -4 28 30 3 -17 7 69 125 149 146 131 112 96 101 128 147 124 64 28 53 111 134 102 57 53 101 163 199 196 161 119 87 72 59 30 -16 -60 -97 -141 -196 -230 -219 -179 -157 -169 -175 -140 -81 -44 -51 -76 -88 -84 -80 -84 -102 -127 -143 -140 -126 -127 -157 -188 -184 -136 -68 -23 -18 -38 -35 19 99 128 73 0 0 72 120 74 4 27 145 241 220 127 85 136 221 261 245 214 201 206 209 201 179 150 120 97 81 63 36 3 -17 -13 7 24 25 17 3 -17 -44 -53 -29 7 8 -43 -111 -143 -133 -119 -137 -176 -204 -212 -214 -224 -236 -234 -214 -194 -182 -169 -144 -113 -97 -97 -89 -50 -1 17 -4 -23 -1 46 65 42 11 8 24 35 39 54 75 78 60 53 75 99 87 46 22 37 61 57 29 17 29 40 27 7 6 20 21 -1 -25 -24 -7 3 3 5 16 22 13 -3 -5 9 20 16 2 -7 -9 -10 -14 -11 -1 8 6 -2 -6 -1 6 8 9 11 13 11 5 2 3 2 -3 -8 -7 -2 -1 -5 -7 -6 -1 1 0 -1 1 3 4 3 2 1 1 0 0 0 0 0 0 0 0 0 0 -1 -1 0 0 0 0 0 -1 -1 0 +0 0 0 0 0 0 0 0 0 -64 -64 -128 -128 -64 -64 -64 -128 -256 -320 -256 -64 64 128 64 -64 -64 -64 64 192 384 448 384 256 128 192 384 448 448 320 192 256 384 320 64 -192 -256 -128 -128 -512 -896 -1088 -1024 -768 -576 -576 -768 -1088 -1152 -960 -704 -704 -768 -384 128 384 64 -256 -256 -64 -64 -128 0 128 0 -256 128 1088 1920 2048 1984 2112 2304 1984 1536 1664 2240 2368 1728 896 704 896 960 1024 1536 1920 1472 192 -448 256 1792 2560 1920 832 512 1408 2432 2176 640 -896 -896 192 896 576 128 704 1536 1344 -64 -1088 -384 1344 2304 1472 -448 -2432 -4096 -5440 -6144 -5888 -4608 -3520 -3968 -6080 -8256 -8256 -5696 -2944 -2496 -4288 -6080 -5888 -4672 -4416 -5696 -6720 -5952 -4032 -2752 -2560 -2048 -256 1792 1920 192 -1088 448 4416 8000 9536 9344 8384 7168 6144 6464 8192 9408 7936 4096 1792 3392 7104 8576 6528 3648 3392 6464 10432 12736 12544 10304 7616 5568 4608 3776 1920 -1024 -3840 -6208 -9024 -12544 -14720 -14016 -11456 -10048 -10816 -11200 -8960 -5184 -2816 -3264 -4864 -5632 -5376 -5120 -5376 -6528 -8128 -9152 -8960 -8064 -8128 -10048 -12032 -11776 -8704 -4352 -1472 -1152 -2432 -2240 1216 6336 8192 4672 0 0 4608 7680 4736 256 1728 9280 15424 14080 8128 5440 8704 14144 16704 15680 13696 12864 13184 13376 12864 11456 9600 7680 6208 5184 4032 2304 192 -1088 -832 448 1536 1600 1088 192 -1088 -2816 -3392 -1856 448 512 -2752 -7104 -9152 -8512 -7616 -8768 -11264 -13056 -13568 -13696 -14336 -15104 -14976 -13696 -12416 -11648 -10816 -9216 -7232 -6208 -6208 -5696 -3200 -64 1088 -256 -1472 -64 2944 4160 2688 704 512 1536 2240 2496 3456 4800 4992 3840 3392 4800 6336 5568 2944 1408 2368 3904 3648 1856 1088 1856 2560 1728 448 384 1280 1344 -64 -1600 -1536 -448 192 192 320 1024 1408 832 -192 -320 576 1280 1024 128 -448 -576 -640 -896 -704 -64 512 384 -128 -384 -64 384 512 576 704 832 704 320 128 192 128 -192 -512 -448 -128 -64 -320 -448 -384 -64 64 0 -64 64 192 256 192 128 64 64 0 0 0 0 0 0 0 0 0 0 -64 -64 0 0 0 0 0 -64 -64 0 +6 +0 0 0 0 0 0 0 0 0 0 0 1 1 0 0 1 2 3 4 3 3 2 2 2 1 -1 -3 -5 -8 -11 -14 -14 -12 -11 -13 -14 -12 -7 -4 -5 -8 -10 -10 -9 -10 -13 -17 -19 -20 -19 -19 -25 -31 -31 -24 -13 -5 -4 -8 -8 4 22 29 17 0 0 18 32 20 1 8 43 75 70 41 28 47 79 95 92 83 80 84 87 86 79 68 55 46 39 31 18 1 -9 -7 4 14 15 10 2 -11 -29 -35 -20 5 5 -32 -83 -110 -104 -96 -113 -148 -176 -187 -194 -207 -223 -226 -212 -197 -188 -179 -156 -126 -110 -112 -105 -61 -1 22 -5 -30 -1 64 93 62 17 13 38 56 65 92 130 138 110 100 144 194 175 95 47 80 136 129 69 41 73 101 71 19 17 57 62 -3 -74 -74 -23 13 10 17 57 84 50 -11 -21 38 91 74 11 -34 -43 -52 -70 -59 -1 52 42 -9 -35 -3 43 62 71 95 116 97 49 28 39 28 -33 -91 -80 -24 -8 -60 -109 -85 -16 18 0 -8 36 99 124 104 78 64 52 27 8 8 19 23 19 18 20 16 3 -9 -9 5 31 57 66 42 -11 -60 -71 -44 -8 -5 -38 -74 -76 -52 -42 -66 -99 -103 -77 -56 -64 -83 -89 -83 -91 -117 -130 -113 -83 -71 -77 -74 -45 -9 7 -5 -23 -20 10 43 59 55 52 70 98 105 87 73 84 101 86 44 20 42 77 76 40 14 18 26 13 -10 -11 5 9 -6 -18 -8 5 -3 -28 -40 -30 -17 -20 -37 -48 -41 -26 -16 -13 -9 0 10 10 -2 -14 -12 3 19 25 26 27 27 20 10 5 8 11 8 2 -2 2 13 28 39 35 16 -2 -4 8 15 6 -11 -17 -9 2 1 -9 -17 -17 -13 -11 -14 -17 -19 -17 -16 -15 -12 -9 -8 -10 -11 -10 -7 -7 -9 -10 -7 -3 -1 -2 -2 0 2 2 2 2 2 2 2 1 1 1 1 0 0 -1 0 0 0 0 0 0 0 0 +0 0 0 0 0 0 0 0 0 0 0 128 128 0 0 128 256 384 512 384 384 256 256 256 128 -128 -384 -640 -1024 -1408 -1792 -1792 -1536 -1408 -1664 -1792 -1536 -896 -512 -640 -1024 -1280 -1280 -1152 -1280 -1664 -2176 -2432 -2560 -2432 -2432 -3200 -3968 -3968 -3072 -1664 -640 -512 -1024 -1024 512 2816 3712 2176 0 0 2304 4096 2560 128 1024 5504 9600 8960 5248 3584 6016 10112 12160 11776 10624 10240 10752 11136 11008 10112 8704 7040 5888 4992 3968 2304 128 -1152 -896 512 1792 1920 1280 256 -1408 -3712 -4480 -2560 640 640 -4096 -10624 -14080 -13312 -12288 -14464 -18944 -22528 -23936 -24832 -26496 -28544 -28928 -27136 -25216 -24064 -22912 -19968 -16128 -14080 -14336 -13440 -7808 -128 2816 -640 -3840 -128 8192 11904 7936 2176 1664 4864 7168 8320 11776 16640 17664 14080 12800 18432 24832 22400 12160 6016 10240 17408 16512 8832 5248 9344 12928 9088 2432 2176 7296 7936 -384 -9472 -9472 -2944 1664 1280 2176 7296 10752 6400 -1408 -2688 4864 11648 9472 1408 -4352 -5504 -6656 -8960 -7552 -128 6656 5376 -1152 -4480 -384 5504 7936 9088 12160 14848 12416 6272 3584 4992 3584 -4224 -11648 -10240 -3072 -1024 -7680 -13952 -10880 -2048 2304 0 -1024 4608 12672 15872 13312 9984 8192 6656 3456 1024 1024 2432 2944 2432 2304 2560 2048 384 -1152 -1152 640 3968 7296 8448 5376 -1408 -7680 -9088 -5632 -1024 -640 -4864 -9472 -9728 -6656 -5376 -8448 -12672 -13184 -9856 -7168 -8192 -10624 -11392 -10624 -11648 -14976 -16640 -14464 -10624 -9088 -9856 -9472 -5760 -1152 896 -640 -2944 -2560 1280 5504 7552 7040 6656 8960 12544 13440 11136 9344 10752 12928 11008 5632 2560 5376 9856 9728 5120 1792 2304 3328 1664 -1280 -1408 640 1152 -768 -2304 -1024 640 -384 -3584 -5120 -3840 -2176 -2560 -4736 -6144 -5248 -3328 -2048 -1664 -1152 0 1280 1280 -256 -1792 -1536 384 2432 3200 3328 3456 3456 2560 1280 640 1024 1408 1024 256 -256 256 1664 3584 4992 4480 2048 -256 -512 1024 1920 768 -1408 -2176 -1152 256 128 -1152 -2176 -2176 -1664 -1408 -1792 -2176 -2432 -2176 -2048 -1920 -1536 -1152 -1024 -1280 -1408 -1280 -896 -896 -1152 -1280 -896 -384 -128 -256 -256 0 256 256 256 256 256 256 256 128 128 128 128 0 0 -128 0 0 0 0 0 0 0 0 +7 +0 0 0 0 -1 -1 -1 -1 0 0 0 0 0 0 -1 -1 0 1 1 0 -1 -2 -2 -3 -3 -1 2 1 -1 -2 -1 2 4 4 6 8 7 4 2 3 2 -4 -10 -9 -3 -1 -8 -15 -12 -3 2 0 -2 6 17 22 19 15 12 10 5 1 1 4 5 4 4 5 4 1 -3 -3 1 10 18 22 14 -4 -22 -27 -17 -3 -2 -16 -32 -34 -24 -20 -32 -49 -52 -40 -30 -35 -46 -51 -48 -54 -71 -81 -72 -54 -47 -53 -52 -33 -7 5 -4 -18 -16 8 37 51 49 47 65 92 102 86 74 87 107 93 48 22 49 91 92 49 18 23 35 18 -13 -15 7 14 -9 -27 -13 8 -5 -48 -70 -54 -31 -38 -72 -95 -84 -55 -34 -28 -21 1 25 25 -4 -35 -31 10 54 73 77 84 86 66 33 17 28 41 32 9 -5 8 56 125 179 166 79 -10 -19 46 86 36 -63 -105 -54 13 11 -60 -125 -132 -102 -90 -118 -158 -176 -172 -170 -163 -137 -101 -94 -128 -160 -146 -110 -115 -163 -187 -136 -56 -24 -43 -43 10 72 96 93 109 147 161 137 115 131 161 153 89 13 -16 3 35 46 40 41 52 53 40 30 45 72 83 74 64 57 34 -15 -60 -64 -41 -36 -71 -106 -108 -78 -56 -56 -62 -50 -24 2 17 27 40 54 64 66 56 39 21 3 -5 0 15 22 11 -10 -23 -25 -24 -18 -4 14 12 -11 -27 -6 34 47 14 -25 -28 1 22 15 2 5 19 19 -1 -21 -20 -4 9 13 13 19 25 28 25 22 20 17 11 7 6 7 6 5 6 8 11 14 17 19 13 0 -11 -11 -2 6 7 -1 -10 -15 -14 -12 -12 -16 -19 -18 -12 -8 -8 -9 -8 -5 -2 0 1 0 -2 -4 -4 -2 -1 -3 -5 -6 -5 -4 -1 2 4 4 2 2 3 3 3 2 3 3 2 0 0 0 0 -1 -2 -2 -1 -1 -2 -2 -2 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 0 +0 0 0 0 -128 -128 -128 -128 0 0 0 0 0 0 -128 -128 0 128 128 0 -128 -256 -256 -384 -384 -128 256 128 -128 -256 -128 256 512 512 768 1024 896 512 256 384 256 -512 -1280 -1152 -384 -128 -1024 -1920 -1536 -384 256 0 -256 768 2176 2816 2432 1920 1536 1280 640 128 128 512 640 512 512 640 512 128 -384 -384 128 1280 2304 2816 1792 -512 -2816 -3456 -2176 -384 -256 -2048 -4096 -4352 -3072 -2560 -4096 -6272 -6656 -5120 -3840 -4480 -5888 -6528 -6144 -6912 -9088 -10368 -9216 -6912 -6016 -6784 -6656 -4224 -896 640 -512 -2304 -2048 1024 4736 6528 6272 6016 8320 11776 13056 11008 9472 11136 13696 11904 6144 2816 6272 11648 11776 6272 2304 2944 4480 2304 -1664 -1920 896 1792 -1152 -3456 -1664 1024 -640 -6144 -8960 -6912 -3968 -4864 -9216 -12160 -10752 -7040 -4352 -3584 -2688 128 3200 3200 -512 -4480 -3968 1280 6912 9344 9856 10752 11008 8448 4224 2176 3584 5248 4096 1152 -640 1024 7168 16000 22912 21248 10112 -1280 -2432 5888 11008 4608 -8064 -13440 -6912 1664 1408 -7680 -16000 -16896 -13056 -11520 -15104 -20224 -22528 -22016 -21760 -20864 -17536 -12928 -12032 -16384 -20480 -18688 -14080 -14720 -20864 -23936 -17408 -7168 -3072 -5504 -5504 1280 9216 12288 11904 13952 18816 20608 17536 14720 16768 20608 19584 11392 1664 -2048 384 4480 5888 5120 5248 6656 6784 5120 3840 5760 9216 10624 9472 8192 7296 4352 -1920 -7680 -8192 -5248 -4608 -9088 -13568 -13824 -9984 -7168 -7168 -7936 -6400 -3072 256 2176 3456 5120 6912 8192 8448 7168 4992 2688 384 -640 0 1920 2816 1408 -1280 -2944 -3200 -3072 -2304 -512 1792 1536 -1408 -3456 -768 4352 6016 1792 -3200 -3584 128 2816 1920 256 640 2432 2432 -128 -2688 -2560 -512 1152 1664 1664 2432 3200 3584 3200 2816 2560 2176 1408 896 768 896 768 640 768 1024 1408 1792 2176 2432 1664 0 -1408 -1408 -256 768 896 -128 -1280 -1920 -1792 -1536 -1536 -2048 -2432 -2304 -1536 -1024 -1024 -1152 -1024 -640 -256 0 128 0 -256 -512 -512 -256 -128 -384 -640 -768 -640 -512 -128 256 512 512 256 256 384 384 384 256 384 384 256 0 0 0 0 -128 -256 -256 -128 -128 -256 -256 -256 -128 -128 -128 -128 -128 -128 -128 -128 -128 -128 -128 0 +7 +0 0 0 0 0 0 0 0 0 0 0 0 0 0 -1 0 0 2 3 3 2 -1 -1 1 3 1 -3 -5 -3 0 0 -4 -8 -9 -8 -7 -10 -14 -16 -17 -17 -17 -15 -12 -12 -16 -21 -20 -16 -17 -25 -30 -23 -10 -5 -8 -8 2 14 20 20 24 33 38 33 29 34 43 42 25 3 -5 1 11 15 13 14 18 19 15 11 18 29 34 31 28 26 16 -7 -30 -32 -21 -19 -38 -59 -61 -45 -33 -34 -39 -32 -16 1 12 19 28 39 48 50 44 32 17 2 -5 0 14 20 10 -10 -23 -26 -25 -19 -4 15 14 -13 -32 -8 42 60 19 -33 -38 2 32 23 3 8 30 31 -2 -35 -35 -7 17 25 26 37 51 58 54 49 46 39 27 17 16 19 18 14 16 23 33 43 53 59 43 1 -35 -38 -7 24 28 -2 -41 -62 -61 -50 -55 -76 -94 -91 -63 -44 -44 -52 -48 -30 -12 1 9 4 -13 -30 -28 -13 -7 -26 -51 -62 -56 -36 -3 35 60 61 45 41 53 62 60 63 81 87 61 22 3 11 11 -17 -49 -58 -50 -59 -98 -131 -120 -68 -20 -8 -32 -58 -60 -42 -29 -38 -54 -52 -30 -6 2 4 18 45 59 52 45 62 90 104 94 80 75 72 57 41 44 61 64 35 -7 -32 -34 -38 -57 -76 -78 -61 -49 -61 -79 -73 -43 -20 -29 -60 -77 -65 -45 -50 -73 -81 -55 -12 12 8 -4 -4 12 27 27 21 23 41 57 49 19 -4 4 28 34 9 -21 -27 -11 5 3 -9 -14 -8 -3 -5 -13 -14 -5 8 10 -2 -18 -20 -8 5 9 7 13 25 32 26 14 10 17 26 28 23 20 23 29 31 26 18 12 9 5 1 1 4 7 5 -1 -5 -5 -5 -8 -10 -8 -5 -5 -6 -8 -7 -4 -4 -4 -4 -1 3 5 6 5 4 3 3 3 3 4 3 2 1 0 0 0 0 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 0 +0 0 0 0 0 0 0 0 0 0 0 0 0 0 -128 0 0 256 384 384 256 -128 -128 128 384 128 -384 -640 -384 0 0 -512 -1024 -1152 -1024 -896 -1280 -1792 -2048 -2176 -2176 -2176 -1920 -1536 -1536 -2048 -2688 -2560 -2048 -2176 -3200 -3840 -2944 -1280 -640 -1024 -1024 256 1792 2560 2560 3072 4224 4864 4224 3712 4352 5504 5376 3200 384 -640 128 1408 1920 1664 1792 2304 2432 1920 1408 2304 3712 4352 3968 3584 3328 2048 -896 -3840 -4096 -2688 -2432 -4864 -7552 -7808 -5760 -4224 -4352 -4992 -4096 -2048 128 1536 2432 3584 4992 6144 6400 5632 4096 2176 256 -640 0 1792 2560 1280 -1280 -2944 -3328 -3200 -2432 -512 1920 1792 -1664 -4096 -1024 5376 7680 2432 -4224 -4864 256 4096 2944 384 1024 3840 3968 -256 -4480 -4480 -896 2176 3200 3328 4736 6528 7424 6912 6272 5888 4992 3456 2176 2048 2432 2304 1792 2048 2944 4224 5504 6784 7552 5504 128 -4480 -4864 -896 3072 3584 -256 -5248 -7936 -7808 -6400 -7040 -9728 -12032 -11648 -8064 -5632 -5632 -6656 -6144 -3840 -1536 128 1152 512 -1664 -3840 -3584 -1664 -896 -3328 -6528 -7936 -7168 -4608 -384 4480 7680 7808 5760 5248 6784 7936 7680 8064 10368 11136 7808 2816 384 1408 1408 -2176 -6272 -7424 -6400 -7552 -12544 -16768 -15360 -8704 -2560 -1024 -4096 -7424 -7680 -5376 -3712 -4864 -6912 -6656 -3840 -768 256 512 2304 5760 7552 6656 5760 7936 11520 13312 12032 10240 9600 9216 7296 5248 5632 7808 8192 4480 -896 -4096 -4352 -4864 -7296 -9728 -9984 -7808 -6272 -7808 -10112 -9344 -5504 -2560 -3712 -7680 -9856 -8320 -5760 -6400 -9344 -10368 -7040 -1536 1536 1024 -512 -512 1536 3456 3456 2688 2944 5248 7296 6272 2432 -512 512 3584 4352 1152 -2688 -3456 -1408 640 384 -1152 -1792 -1024 -384 -640 -1664 -1792 -640 1024 1280 -256 -2304 -2560 -1024 640 1152 896 1664 3200 4096 3328 1792 1280 2176 3328 3584 2944 2560 2944 3712 3968 3328 2304 1536 1152 640 128 128 512 896 640 -128 -640 -640 -640 -1024 -1280 -1024 -640 -640 -768 -1024 -896 -512 -512 -512 -512 -128 384 640 768 640 512 384 384 384 384 512 384 256 128 0 0 0 0 -128 -128 -128 -128 -128 -128 -128 -128 -128 -128 -128 -128 -128 -128 -128 -128 0 +7 +0 0 0 0 0 0 0 0 0 -1 -1 -1 0 0 -1 -1 -2 -2 -2 -2 -2 -3 -3 -3 -2 -2 -3 -3 -2 -1 0 0 0 -1 -3 -3 -2 -1 -3 -5 -7 -6 -4 -1 4 7 7 6 5 7 9 9 10 13 15 11 4 0 2 2 -4 -11 -14 -12 -15 -25 -34 -32 -19 -6 -3 -10 -18 -20 -14 -10 -14 -19 -20 -12 -3 1 1 7 19 26 23 21 29 44 52 48 42 40 39 32 24 26 37 40 22 -5 -21 -24 -27 -41 -56 -59 -47 -39 -49 -65 -61 -37 -18 -27 -56 -73 -63 -45 -51 -76 -86 -60 -14 13 9 -5 -5 15 35 36 28 31 57 81 72 29 -5 6 45 56 15 -36 -48 -19 10 6 -17 -27 -15 -5 -11 -28 -31 -10 20 25 -5 -45 -51 -21 16 26 23 39 80 103 88 49 35 64 100 108 91 81 97 128 140 121 87 64 48 29 9 8 29 47 33 -3 -28 -31 -34 -55 -74 -67 -44 -39 -58 -75 -66 -44 -38 -45 -42 -7 45 86 100 91 76 68 70 82 97 112 115 92 54 27 18 18 5 -25 -50 -50 -34 -19 -22 -45 -70 -85 -86 -81 -83 -90 -93 -89 -88 -97 -111 -114 -111 -110 -103 -80 -49 -42 -66 -96 -99 -76 -57 -60 -73 -79 -77 -79 -86 -88 -78 -57 -32 -11 -4 -7 -4 14 41 59 65 63 58 61 72 95 114 105 67 31 32 63 85 75 50 39 42 38 21 11 17 30 32 27 29 39 36 16 -3 0 16 24 15 0 -5 -4 -6 -12 -12 -8 -10 -21 -29 -26 -14 -7 -6 -5 4 15 19 19 19 21 20 18 21 27 24 8 -11 -17 -7 4 4 -4 -8 -4 1 -4 -15 -21 -16 -5 3 3 0 -3 -4 -5 -5 -7 -8 -10 -13 -15 -15 -14 -14 -15 -16 -17 -15 -12 -10 -8 -6 -4 -3 -3 -4 -3 -1 1 1 1 0 1 2 2 2 1 1 1 1 0 0 -1 0 0 0 -1 -1 0 0 0 0 0 0 0 +0 0 0 0 0 0 0 0 0 -128 -128 -128 0 0 -128 -128 -256 -256 -256 -256 -256 -384 -384 -384 -256 -256 -384 -384 -256 -128 0 0 0 -128 -384 -384 -256 -128 -384 -640 -896 -768 -512 -128 512 896 896 768 640 896 1152 1152 1280 1664 1920 1408 512 0 256 256 -512 -1408 -1792 -1536 -1920 -3200 -4352 -4096 -2432 -768 -384 -1280 -2304 -2560 -1792 -1280 -1792 -2432 -2560 -1536 -384 128 128 896 2432 3328 2944 2688 3712 5632 6656 6144 5376 5120 4992 4096 3072 3328 4736 5120 2816 -640 -2688 -3072 -3456 -5248 -7168 -7552 -6016 -4992 -6272 -8320 -7808 -4736 -2304 -3456 -7168 -9344 -8064 -5760 -6528 -9728 -11008 -7680 -1792 1664 1152 -640 -640 1920 4480 4608 3584 3968 7296 10368 9216 3712 -640 768 5760 7168 1920 -4608 -6144 -2432 1280 768 -2176 -3456 -1920 -640 -1408 -3584 -3968 -1280 2560 3200 -640 -5760 -6528 -2688 2048 3328 2944 4992 10240 13184 11264 6272 4480 8192 12800 13824 11648 10368 12416 16384 17920 15488 11136 8192 6144 3712 1152 1024 3712 6016 4224 -384 -3584 -3968 -4352 -7040 -9472 -8576 -5632 -4992 -7424 -9600 -8448 -5632 -4864 -5760 -5376 -896 5760 11008 12800 11648 9728 8704 8960 10496 12416 14336 14720 11776 6912 3456 2304 2304 640 -3200 -6400 -6400 -4352 -2432 -2816 -5760 -8960 -10880 -11008 -10368 -10624 -11520 -11904 -11392 -11264 -12416 -14208 -14592 -14208 -14080 -13184 -10240 -6272 -5376 -8448 -12288 -12672 -9728 -7296 -7680 -9344 -10112 -9856 -10112 -11008 -11264 -9984 -7296 -4096 -1408 -512 -896 -512 1792 5248 7552 8320 8064 7424 7808 9216 12160 14592 13440 8576 3968 4096 8064 10880 9600 6400 4992 5376 4864 2688 1408 2176 3840 4096 3456 3712 4992 4608 2048 -384 0 2048 3072 1920 0 -640 -512 -768 -1536 -1536 -1024 -1280 -2688 -3712 -3328 -1792 -896 -768 -640 512 1920 2432 2432 2432 2688 2560 2304 2688 3456 3072 1024 -1408 -2176 -896 512 512 -512 -1024 -512 128 -512 -1920 -2688 -2048 -640 384 384 0 -384 -512 -640 -640 -896 -1024 -1280 -1664 -1920 -1920 -1792 -1792 -1920 -2048 -2176 -1920 -1536 -1280 -1024 -768 -512 -384 -384 -512 -384 -128 128 128 128 0 128 256 256 256 128 128 128 128 0 0 -128 0 0 0 -128 -128 0 0 0 0 0 0 0 +7 +0 -1 0 0 0 0 0 0 0 0 0 0 0 1 1 1 1 2 3 2 2 1 1 1 0 0 1 2 1 -1 -2 -2 -3 -4 -6 -6 -4 -4 -6 -7 -7 -5 -5 -6 -5 -1 5 11 13 13 11 10 11 13 17 20 21 17 11 5 3 4 1 -6 -13 -13 -9 -5 -7 -13 -21 -26 -27 -26 -28 -31 -33 -32 -33 -37 -43 -46 -46 -46 -45 -36 -23 -20 -32 -47 -50 -39 -30 -32 -41 -45 -45 -47 -53 -55 -50 -37 -21 -8 -3 -5 -3 10 31 47 52 51 49 53 64 86 105 99 64 31 32 65 90 81 55 45 49 46 26 14 22 39 43 36 41 55 53 24 -4 0 26 40 25 0 -8 -6 -11 -21 -23 -16 -19 -43 -63 -56 -31 -15 -14 -11 11 38 51 52 55 61 60 55 66 87 81 30 -37 -60 -25 17 17 -15 -31 -14 4 -17 -69 -100 -78 -23 18 22 0 -18 -24 -28 -34 -47 -60 -78 -100 -120 -128 -128 -134 -150 -171 -181 -170 -147 -122 -102 -81 -56 -40 -44 -60 -56 -18 30 49 33 24 52 97 111 87 65 70 86 77 35 0 -7 7 17 9 -8 -11 13 53 86 95 88 78 70 59 53 69 102 122 100 42 -16 -46 -41 -9 24 31 3 -39 -55 -37 -7 13 24 34 38 30 24 39 58 47 -1 -54 -70 -56 -45 -49 -52 -43 -31 -30 -34 -23 4 24 23 15 20 33 36 25 14 14 11 -8 -33 -41 -31 -23 -30 -41 -39 -30 -27 -30 -28 -21 -18 -25 -30 -25 -17 -17 -16 -6 12 20 12 5 15 37 53 51 40 33 35 39 43 46 49 44 29 9 0 4 12 11 3 -4 -2 3 4 0 -6 -10 -10 -7 -4 -4 -8 -11 -7 0 2 -3 -8 -9 -7 -8 -9 -9 -5 -1 0 1 2 3 3 0 -1 0 2 3 4 5 5 5 5 4 3 2 0 -1 -1 -1 -2 -2 -2 -2 -2 -2 -2 -2 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 0 +0 -128 0 0 0 0 0 0 0 0 0 0 0 128 128 128 128 256 384 256 256 128 128 128 0 0 128 256 128 -128 -256 -256 -384 -512 -768 -768 -512 -512 -768 -896 -896 -640 -640 -768 -640 -128 640 1408 1664 1664 1408 1280 1408 1664 2176 2560 2688 2176 1408 640 384 512 128 -768 -1664 -1664 -1152 -640 -896 -1664 -2688 -3328 -3456 -3328 -3584 -3968 -4224 -4096 -4224 -4736 -5504 -5888 -5888 -5888 -5760 -4608 -2944 -2560 -4096 -6016 -6400 -4992 -3840 -4096 -5248 -5760 -5760 -6016 -6784 -7040 -6400 -4736 -2688 -1024 -384 -640 -384 1280 3968 6016 6656 6528 6272 6784 8192 11008 13440 12672 8192 3968 4096 8320 11520 10368 7040 5760 6272 5888 3328 1792 2816 4992 5504 4608 5248 7040 6784 3072 -512 0 3328 5120 3200 0 -1024 -768 -1408 -2688 -2944 -2048 -2432 -5504 -8064 -7168 -3968 -1920 -1792 -1408 1408 4864 6528 6656 7040 7808 7680 7040 8448 11136 10368 3840 -4736 -7680 -3200 2176 2176 -1920 -3968 -1792 512 -2176 -8832 -12800 -9984 -2944 2304 2816 0 -2304 -3072 -3584 -4352 -6016 -7680 -9984 -12800 -15360 -16384 -16384 -17152 -19200 -21888 -23168 -21760 -18816 -15616 -13056 -10368 -7168 -5120 -5632 -7680 -7168 -2304 3840 6272 4224 3072 6656 12416 14208 11136 8320 8960 11008 9856 4480 0 -896 896 2176 1152 -1024 -1408 1664 6784 11008 12160 11264 9984 8960 7552 6784 8832 13056 15616 12800 5376 -2048 -5888 -5248 -1152 3072 3968 384 -4992 -7040 -4736 -896 1664 3072 4352 4864 3840 3072 4992 7424 6016 -128 -6912 -8960 -7168 -5760 -6272 -6656 -5504 -3968 -3840 -4352 -2944 512 3072 2944 1920 2560 4224 4608 3200 1792 1792 1408 -1024 -4224 -5248 -3968 -2944 -3840 -5248 -4992 -3840 -3456 -3840 -3584 -2688 -2304 -3200 -3840 -3200 -2176 -2176 -2048 -768 1536 2560 1536 640 1920 4736 6784 6528 5120 4224 4480 4992 5504 5888 6272 5632 3712 1152 0 512 1536 1408 384 -512 -256 384 512 0 -768 -1280 -1280 -896 -512 -512 -1024 -1408 -896 0 256 -384 -1024 -1152 -896 -1024 -1152 -1152 -640 -128 0 128 256 384 384 0 -128 0 256 384 512 640 640 640 640 512 384 256 0 -128 -128 -128 -256 -256 -256 -256 -256 -256 -256 -256 -128 -128 -128 -128 -128 -128 -128 -128 -128 -128 0 +7 +0 0 0 0 0 0 0 0 0 0 -1 -1 -1 0 0 -1 -1 -1 0 -1 -2 -3 -3 -1 0 0 0 -1 -2 -2 -2 -3 -4 -6 -8 -10 -11 -11 -12 -14 -17 -19 -19 -17 -15 -13 -11 -8 -6 -7 -9 -9 -3 5 8 6 4 10 19 23 18 14 16 20 18 8 0 -2 2 5 2 -3 -4 4 17 29 33 31 28 26 23 21 28 43 52 44 19 -8 -22 -20 -5 12 16 1 -22 -31 -22 -4 8 15 22 25 20 16 27 41 34 -1 -41 -55 -45 -37 -41 -45 -38 -28 -28 -32 -22 4 24 23 16 21 37 41 29 17 17 14 -10 -43 -55 -42 -32 -43 -59 -59 -46 -42 -48 -45 -35 -31 -43 -53 -46 -32 -33 -31 -11 26 43 27 13 36 90 130 128 104 89 95 110 124 138 149 138 93 31 0 17 44 43 12 -15 -8 15 21 1 -28 -45 -47 -34 -18 -21 -46 -63 -43 0 14 -15 -54 -64 -54 -58 -75 -73 -39 -3 10 14 30 46 37 9 -8 6 36 61 79 96 112 120 125 123 104 60 12 -21 -32 -36 -49 -66 -86 -114 -146 -167 -158 -122 -88 -75 -81 -89 -88 -86 -94 -105 -104 -76 -38 -15 -22 -50 -70 -68 -52 -37 -30 -28 -22 -5 27 62 95 115 125 131 136 143 152 152 131 93 53 31 26 29 26 15 -3 -18 -25 -18 -12 -24 -51 -69 -64 -46 -37 -42 -42 -31 -19 -17 -22 -25 -23 -19 -15 -10 -5 -5 -8 -8 -3 2 3 1 -4 -8 -8 2 21 34 32 20 14 18 20 11 0 -1 5 7 0 -8 -11 -14 -21 -25 -19 -10 -8 -16 -20 -13 -7 -15 -34 -46 -43 -31 -23 -20 -16 -10 -8 -12 -17 -16 -7 4 11 13 12 10 12 16 18 17 14 13 15 17 17 16 14 12 10 10 10 10 11 12 13 13 12 9 6 5 4 3 1 -1 -2 -3 -3 -3 -3 -3 -4 -5 -5 -4 -4 -3 -3 -3 -2 -2 -1 -1 -1 -1 -1 -1 -1 -1 0 +0 0 0 0 0 0 0 0 0 0 -128 -128 -128 0 0 -128 -128 -128 0 -128 -256 -384 -384 -128 0 0 0 -128 -256 -256 -256 -384 -512 -768 -1024 -1280 -1408 -1408 -1536 -1792 -2176 -2432 -2432 -2176 -1920 -1664 -1408 -1024 -768 -896 -1152 -1152 -384 640 1024 768 512 1280 2432 2944 2304 1792 2048 2560 2304 1024 0 -256 256 640 256 -384 -512 512 2176 3712 4224 3968 3584 3328 2944 2688 3584 5504 6656 5632 2432 -1024 -2816 -2560 -640 1536 2048 128 -2816 -3968 -2816 -512 1024 1920 2816 3200 2560 2048 3456 5248 4352 -128 -5248 -7040 -5760 -4736 -5248 -5760 -4864 -3584 -3584 -4096 -2816 512 3072 2944 2048 2688 4736 5248 3712 2176 2176 1792 -1280 -5504 -7040 -5376 -4096 -5504 -7552 -7552 -5888 -5376 -6144 -5760 -4480 -3968 -5504 -6784 -5888 -4096 -4224 -3968 -1408 3328 5504 3456 1664 4608 11520 16640 16384 13312 11392 12160 14080 15872 17664 19072 17664 11904 3968 0 2176 5632 5504 1536 -1920 -1024 1920 2688 128 -3584 -5760 -6016 -4352 -2304 -2688 -5888 -8064 -5504 0 1792 -1920 -6912 -8192 -6912 -7424 -9600 -9344 -4992 -384 1280 1792 3840 5888 4736 1152 -1024 768 4608 7808 10112 12288 14336 15360 16000 15744 13312 7680 1536 -2688 -4096 -4608 -6272 -8448 -11008 -14592 -18688 -21376 -20224 -15616 -11264 -9600 -10368 -11392 -11264 -11008 -12032 -13440 -13312 -9728 -4864 -1920 -2816 -6400 -8960 -8704 -6656 -4736 -3840 -3584 -2816 -640 3456 7936 12160 14720 16000 16768 17408 18304 19456 19456 16768 11904 6784 3968 3328 3712 3328 1920 -384 -2304 -3200 -2304 -1536 -3072 -6528 -8832 -8192 -5888 -4736 -5376 -5376 -3968 -2432 -2176 -2816 -3200 -2944 -2432 -1920 -1280 -640 -640 -1024 -1024 -384 256 384 128 -512 -1024 -1024 256 2688 4352 4096 2560 1792 2304 2560 1408 0 -128 640 896 0 -1024 -1408 -1792 -2688 -3200 -2432 -1280 -1024 -2048 -2560 -1664 -896 -1920 -4352 -5888 -5504 -3968 -2944 -2560 -2048 -1280 -1024 -1536 -2176 -2048 -896 512 1408 1664 1536 1280 1536 2048 2304 2176 1792 1664 1920 2176 2176 2048 1792 1536 1280 1280 1280 1280 1408 1536 1664 1664 1536 1152 768 640 512 384 128 -128 -256 -384 -384 -384 -384 -384 -512 -640 -640 -512 -512 -384 -384 -384 -256 -256 -128 -128 -128 -128 -128 -128 -128 -128 0 +7 +0 0 0 0 0 0 0 0 0 0 0 0 0 0 -1 -1 0 0 0 -1 -2 -2 -2 -1 -1 -2 -3 -2 0 0 -1 -4 -5 -4 -5 -6 -6 -4 -1 0 1 3 5 4 1 -1 0 4 8 11 14 17 19 21 21 18 11 2 -5 -7 -8 -11 -16 -21 -28 -37 -44 -43 -34 -25 -22 -25 -28 -28 -29 -32 -37 -37 -28 -15 -6 -9 -21 -30 -29 -23 -17 -14 -14 -11 -3 13 32 51 63 71 76 80 87 94 97 85 62 36 21 19 21 19 11 -2 -14 -21 -15 -10 -21 -46 -63 -60 -44 -37 -42 -43 -33 -20 -18 -25 -29 -27 -23 -19 -13 -6 -6 -10 -10 -4 3 5 1 -6 -13 -13 4 36 61 58 38 28 36 40 23 1 -1 11 16 0 -18 -27 -36 -54 -65 -51 -26 -24 -47 -59 -41 -22 -48 -114 -162 -155 -115 -86 -76 -63 -42 -34 -54 -79 -75 -35 23 63 76 69 62 76 104 123 118 102 100 118 140 149 143 131 117 110 109 115 127 141 163 187 201 188 153 115 95 85 70 38 -7 -46 -70 -83 -90 -102 -126 -166 -211 -240 -241 -228 -231 -255 -270 -250 -214 -204 -229 -253 -239 -201 -184 -195 -202 -176 -133 -102 -83 -54 -2 59 116 162 197 219 233 252 282 310 323 329 341 356 352 321 280 247 213 168 119 89 81 71 42 0 -31 -48 -67 -97 -129 -146 -150 -157 -172 -189 -195 -192 -191 -201 -217 -223 -213 -189 -166 -149 -139 -125 -99 -62 -21 16 44 64 75 80 86 103 125 144 153 158 169 185 194 187 169 146 127 115 111 107 95 72 45 23 9 -4 -21 -42 -60 -72 -78 -78 -80 -87 -98 -105 -104 -95 -88 -87 -89 -85 -78 -73 -73 -72 -63 -50 -40 -37 -36 -28 -14 -2 0 -4 -5 2 11 14 12 10 13 17 19 19 18 18 19 18 17 16 16 17 16 15 13 11 10 9 7 5 4 4 3 2 1 1 1 0 0 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 0 +0 0 0 0 0 0 0 0 0 0 0 0 0 0 -64 -64 0 0 0 -64 -128 -128 -128 -64 -64 -128 -192 -128 0 0 -64 -256 -320 -256 -320 -384 -384 -256 -64 0 64 192 320 256 64 -64 0 256 512 704 896 1088 1216 1344 1344 1152 704 128 -320 -448 -512 -704 -1024 -1344 -1792 -2368 -2816 -2752 -2176 -1600 -1408 -1600 -1792 -1792 -1856 -2048 -2368 -2368 -1792 -960 -384 -576 -1344 -1920 -1856 -1472 -1088 -896 -896 -704 -192 832 2048 3264 4032 4544 4864 5120 5568 6016 6208 5440 3968 2304 1344 1216 1344 1216 704 -128 -896 -1344 -960 -640 -1344 -2944 -4032 -3840 -2816 -2368 -2688 -2752 -2112 -1280 -1152 -1600 -1856 -1728 -1472 -1216 -832 -384 -384 -640 -640 -256 192 320 64 -384 -832 -832 256 2304 3904 3712 2432 1792 2304 2560 1472 64 -64 704 1024 0 -1152 -1728 -2304 -3456 -4160 -3264 -1664 -1536 -3008 -3776 -2624 -1408 -3072 -7296 -10368 -9920 -7360 -5504 -4864 -4032 -2688 -2176 -3456 -5056 -4800 -2240 1472 4032 4864 4416 3968 4864 6656 7872 7552 6528 6400 7552 8960 9536 9152 8384 7488 7040 6976 7360 8128 9024 10432 11968 12864 12032 9792 7360 6080 5440 4480 2432 -448 -2944 -4480 -5312 -5760 -6528 -8064 -10624 -13504 -15360 -15424 -14592 -14784 -16320 -17280 -16000 -13696 -13056 -14656 -16192 -15296 -12864 -11776 -12480 -12928 -11264 -8512 -6528 -5312 -3456 -128 3776 7424 10368 12608 14016 14912 16128 18048 19840 20672 21056 21824 22784 22528 20544 17920 15808 13632 10752 7616 5696 5184 4544 2688 0 -1984 -3072 -4288 -6208 -8256 -9344 -9600 -10048 -11008 -12096 -12480 -12288 -12224 -12864 -13888 -14272 -13632 -12096 -10624 -9536 -8896 -8000 -6336 -3968 -1344 1024 2816 4096 4800 5120 5504 6592 8000 9216 9792 10112 10816 11840 12416 11968 10816 9344 8128 7360 7104 6848 6080 4608 2880 1472 576 -256 -1344 -2688 -3840 -4608 -4992 -4992 -5120 -5568 -6272 -6720 -6656 -6080 -5632 -5568 -5696 -5440 -4992 -4672 -4672 -4608 -4032 -3200 -2560 -2368 -2304 -1792 -896 -128 0 -256 -320 128 704 896 768 640 832 1088 1216 1216 1152 1152 1216 1152 1088 1024 1024 1088 1024 960 832 704 640 576 448 320 256 256 192 128 64 64 64 0 0 -64 -64 -64 -64 -64 -64 -64 -64 -64 -64 -64 -64 0 +6 +0 -1 -1 -1 -1 -1 -1 -1 -1 -1 -2 -2 -2 -1 -2 -1 -1 -1 -2 -2 -2 -1 0 2 2 2 2 3 5 6 6 6 6 8 10 11 11 11 10 10 10 11 13 15 19 22 25 25 21 16 14 13 11 6 -2 -9 -14 -16 -18 -22 -27 -37 -49 -57 -59 -58 -60 -69 -75 -71 -63 -62 -71 -81 -79 -68 -64 -70 -74 -67 -52 -41 -34 -23 -1 26 52 75 93 107 116 129 148 167 178 186 197 211 214 199 178 161 142 115 83 64 59 53 32 0 -25 -39 -57 -84 -114 -132 -139 -149 -166 -187 -197 -199 -202 -218 -241 -253 -247 -225 -201 -186 -177 -163 -132 -84 -29 22 65 97 116 125 139 170 212 249 271 287 315 352 378 375 346 306 274 255 251 249 228 177 113 60 25 -10 -58 -119 -175 -219 -242 -250 -263 -295 -343 -381 -385 -363 -348 -356 -373 -369 -348 -340 -350 -355 -323 -262 -216 -209 -209 -168 -84 -11 1 -28 -34 17 90 120 107 96 126 181 214 217 217 231 253 263 261 263 280 303 311 302 287 275 266 246 210 172 154 154 149 123 87 66 66 62 33 -8 -38 -53 -77 -120 -162 -181 -178 -169 -167 -175 -183 -189 -189 -180 -163 -145 -129 -111 -88 -59 -32 -12 -1 1 -7 -19 -15 13 53 80 84 80 85 98 94 61 25 23 63 107 118 90 55 30 14 -3 -16 -18 -22 -42 -78 -107 -112 -95 -76 -71 -87 -110 -127 -129 -120 -107 -89 -65 -39 -23 -20 -19 -16 -13 -14 -9 13 42 57 50 36 30 31 29 27 31 43 53 55 51 50 52 54 56 58 56 47 32 19 14 14 15 13 9 6 5 4 -1 -9 -15 -18 -19 -21 -24 -26 -27 -29 -31 -31 -28 -25 -19 -12 -7 -7 -11 -12 -9 -5 -3 1 6 12 12 8 7 8 10 8 7 9 13 14 13 12 11 9 6 4 3 4 3 1 0 -1 0 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 0 -1 0 +0 -64 -64 -64 -64 -64 -64 -64 -64 -64 -128 -128 -128 -64 -128 -64 -64 -64 -128 -128 -128 -64 0 128 128 128 128 192 320 384 384 384 384 512 640 704 704 704 640 640 640 704 832 960 1216 1408 1600 1600 1344 1024 896 832 704 384 -128 -576 -896 -1024 -1152 -1408 -1728 -2368 -3136 -3648 -3776 -3712 -3840 -4416 -4800 -4544 -4032 -3968 -4544 -5184 -5056 -4352 -4096 -4480 -4736 -4288 -3328 -2624 -2176 -1472 -64 1664 3328 4800 5952 6848 7424 8256 9472 10688 11392 11904 12608 13504 13696 12736 11392 10304 9088 7360 5312 4096 3776 3392 2048 0 -1600 -2496 -3648 -5376 -7296 -8448 -8896 -9536 -10624 -11968 -12608 -12736 -12928 -13952 -15424 -16192 -15808 -14400 -12864 -11904 -11328 -10432 -8448 -5376 -1856 1408 4160 6208 7424 8000 8896 10880 13568 15936 17344 18368 20160 22528 24192 24000 22144 19584 17536 16320 16064 15936 14592 11328 7232 3840 1600 -640 -3712 -7616 -11200 -14016 -15488 -16000 -16832 -18880 -21952 -24384 -24640 -23232 -22272 -22784 -23872 -23616 -22272 -21760 -22400 -22720 -20672 -16768 -13824 -13376 -13376 -10752 -5376 -704 64 -1792 -2176 1088 5760 7680 6848 6144 8064 11584 13696 13888 13888 14784 16192 16832 16704 16832 17920 19392 19904 19328 18368 17600 17024 15744 13440 11008 9856 9856 9536 7872 5568 4224 4224 3968 2112 -512 -2432 -3392 -4928 -7680 -10368 -11584 -11392 -10816 -10688 -11200 -11712 -12096 -12096 -11520 -10432 -9280 -8256 -7104 -5632 -3776 -2048 -768 -64 64 -448 -1216 -960 832 3392 5120 5376 5120 5440 6272 6016 3904 1600 1472 4032 6848 7552 5760 3520 1920 896 -192 -1024 -1152 -1408 -2688 -4992 -6848 -7168 -6080 -4864 -4544 -5568 -7040 -8128 -8256 -7680 -6848 -5696 -4160 -2496 -1472 -1280 -1216 -1024 -832 -896 -576 832 2688 3648 3200 2304 1920 1984 1856 1728 1984 2752 3392 3520 3264 3200 3328 3456 3584 3712 3584 3008 2048 1216 896 896 960 832 576 384 320 256 -64 -576 -960 -1152 -1216 -1344 -1536 -1664 -1728 -1856 -1984 -1984 -1792 -1600 -1216 -768 -448 -448 -704 -768 -576 -320 -192 64 384 768 768 512 448 512 640 512 448 576 832 896 832 768 704 576 384 256 192 256 192 64 0 -64 0 -64 -64 -64 -64 -64 -64 -64 -64 -64 -64 -64 -64 -64 -64 -64 -64 -64 0 -64 0 +6 +0 -1 -1 -1 -1 -1 -1 -1 -2 -2 -3 -4 -4 -5 -5 -6 -7 -8 -8 -9 -10 -11 -11 -9 -9 -9 -10 -8 -5 -1 0 -2 -3 1 6 9 8 8 11 16 20 22 23 26 29 32 33 35 38 43 46 47 46 46 46 44 39 33 31 32 32 27 19 15 16 15 8 -3 -11 -15 -23 -36 -51 -58 -59 -57 -58 -63 -67 -72 -73 -72 -67 -61 -56 -49 -40 -27 -16 -6 -1 0 -4 -10 -8 7 30 47 51 50 54 64 63 42 17 16 46 80 90 71 44 25 12 -2 -14 -17 -21 -40 -75 -106 -114 -98 -80 -77 -96 -124 -147 -153 -146 -133 -113 -84 -51 -31 -27 -27 -23 -19 -22 -13 21 69 97 86 64 54 58 57 53 63 89 112 119 114 114 121 129 138 147 147 125 88 55 41 43 46 41 29 21 19 16 -2 -32 -57 -70 -78 -88 -103 -114 -124 -138 -151 -157 -149 -135 -109 -72 -42 -44 -68 -79 -59 -31 -16 8 57 109 117 86 74 96 115 102 87 119 183 220 215 200 195 179 134 92 87 105 96 50 6 -3 5 -1 -25 -45 -46 -37 -33 -40 -53 -62 -60 -52 -47 -57 -68 -64 -38 -8 4 -3 -12 -9 9 26 24 -2 -37 -58 -51 -23 5 17 14 0 -26 -53 -68 -51 -16 12 15 7 5 13 15 7 -2 -10 -26 -49 -66 -67 -68 -83 -109 -121 -110 -95 -96 -106 -114 -112 -104 -97 -87 -74 -64 -58 -50 -38 -31 -26 -18 -4 6 5 -5 -9 -5 4 12 20 34 50 62 65 60 53 48 47 42 34 27 28 37 42 37 27 17 8 -4 -14 -16 -11 -9 -12 -14 -8 -2 -6 -16 -21 -16 -10 -13 -20 -19 -8 2 3 -2 -3 5 16 22 22 19 20 25 30 29 23 16 13 15 18 17 14 12 12 10 6 1 0 1 1 -1 -1 0 1 -1 -3 -3 0 1 0 -1 -2 -1 0 0 -1 -1 0 0 0 0 0 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 0 +0 -128 -128 -128 -128 -128 -128 -128 -256 -256 -384 -512 -512 -640 -640 -768 -896 -1024 -1024 -1152 -1280 -1408 -1408 -1152 -1152 -1152 -1280 -1024 -640 -128 0 -256 -384 128 768 1152 1024 1024 1408 2048 2560 2816 2944 3328 3712 4096 4224 4480 4864 5504 5888 6016 5888 5888 5888 5632 4992 4224 3968 4096 4096 3456 2432 1920 2048 1920 1024 -384 -1408 -1920 -2944 -4608 -6528 -7424 -7552 -7296 -7424 -8064 -8576 -9216 -9344 -9216 -8576 -7808 -7168 -6272 -5120 -3456 -2048 -768 -128 0 -512 -1280 -1024 896 3840 6016 6528 6400 6912 8192 8064 5376 2176 2048 5888 10240 11520 9088 5632 3200 1536 -256 -1792 -2176 -2688 -5120 -9600 -13568 -14592 -12544 -10240 -9856 -12288 -15872 -18816 -19584 -18688 -17024 -14464 -10752 -6528 -3968 -3456 -3456 -2944 -2432 -2816 -1664 2688 8832 12416 11008 8192 6912 7424 7296 6784 8064 11392 14336 15232 14592 14592 15488 16512 17664 18816 18816 16000 11264 7040 5248 5504 5888 5248 3712 2688 2432 2048 -256 -4096 -7296 -8960 -9984 -11264 -13184 -14592 -15872 -17664 -19328 -20096 -19072 -17280 -13952 -9216 -5376 -5632 -8704 -10112 -7552 -3968 -2048 1024 7296 13952 14976 11008 9472 12288 14720 13056 11136 15232 23424 28160 27520 25600 24960 22912 17152 11776 11136 13440 12288 6400 768 -384 640 -128 -3200 -5760 -5888 -4736 -4224 -5120 -6784 -7936 -7680 -6656 -6016 -7296 -8704 -8192 -4864 -1024 512 -384 -1536 -1152 1152 3328 3072 -256 -4736 -7424 -6528 -2944 640 2176 1792 0 -3328 -6784 -8704 -6528 -2048 1536 1920 896 640 1664 1920 896 -256 -1280 -3328 -6272 -8448 -8576 -8704 -10624 -13952 -15488 -14080 -12160 -12288 -13568 -14592 -14336 -13312 -12416 -11136 -9472 -8192 -7424 -6400 -4864 -3968 -3328 -2304 -512 768 640 -640 -1152 -640 512 1536 2560 4352 6400 7936 8320 7680 6784 6144 6016 5376 4352 3456 3584 4736 5376 4736 3456 2176 1024 -512 -1792 -2048 -1408 -1152 -1536 -1792 -1024 -256 -768 -2048 -2688 -2048 -1280 -1664 -2560 -2432 -1024 256 384 -256 -384 640 2048 2816 2816 2432 2560 3200 3840 3712 2944 2048 1664 1920 2304 2176 1792 1536 1536 1280 768 128 0 128 128 -128 -128 0 128 -128 -384 -384 0 128 0 -128 -256 -128 0 0 -128 -128 0 0 0 0 0 -128 -128 -128 -128 -128 -128 -128 -128 -128 -128 -128 -128 0 +7 +0 0 0 0 0 0 0 0 0 0 0 -1 -1 -1 -1 -2 -2 -2 -3 -3 -4 -5 -5 -6 -6 -5 -4 -2 -3 -4 -5 -4 -2 -2 0 4 8 9 7 6 9 11 11 9 14 22 28 28 27 28 26 21 14 14 18 17 9 1 -1 1 -1 -6 -11 -11 -9 -9 -11 -15 -18 -17 -16 -15 -18 -22 -21 -13 -3 1 -2 -5 -4 3 10 10 -1 -17 -27 -24 -11 2 8 7 0 -14 -30 -39 -30 -10 7 9 4 3 8 10 5 -2 -8 -20 -38 -52 -54 -56 -70 -94 -107 -99 -88 -91 -103 -112 -113 -108 -103 -94 -82 -73 -67 -59 -46 -38 -33 -23 -6 9 7 -7 -13 -7 7 18 33 56 85 108 115 109 99 93 92 85 70 57 61 82 95 87 65 43 22 -10 -36 -42 -31 -25 -36 -41 -25 -6 -17 -53 -72 -56 -37 -47 -76 -76 -31 12 15 -9 -12 29 87 121 123 114 123 158 196 199 162 115 99 122 148 146 126 114 117 105 65 20 6 14 14 -3 -9 6 16 -6 -42 -41 0 33 16 -26 -39 -11 13 4 -16 -9 22 45 40 23 10 -1 -20 -40 -55 -78 -116 -157 -170 -151 -116 -93 -91 -105 -125 -141 -142 -133 -131 -144 -151 -134 -94 -59 -42 -37 -38 -41 -45 -43 -27 -7 0 -7 -8 10 45 75 88 96 108 126 132 115 90 74 68 58 38 26 31 47 53 40 21 9 0 -14 -31 -37 -28 -15 -13 -21 -33 -36 -32 -25 -24 -30 -44 -61 -81 -98 -102 -93 -76 -62 -52 -38 -19 -3 4 6 14 27 35 37 40 50 63 72 79 87 95 95 87 78 75 75 74 70 65 57 46 34 24 18 16 16 12 2 -12 -23 -28 -28 -30 -37 -41 -40 -33 -26 -22 -21 -22 -22 -18 -14 -10 -10 -10 -9 -7 -7 -9 -9 -7 -3 0 1 2 4 4 3 3 3 3 2 1 1 2 2 1 0 0 0 0 -1 -1 -1 0 0 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 0 +0 0 0 0 0 0 0 0 0 0 0 -128 -128 -128 -128 -256 -256 -256 -384 -384 -512 -640 -640 -768 -768 -640 -512 -256 -384 -512 -640 -512 -256 -256 0 512 1024 1152 896 768 1152 1408 1408 1152 1792 2816 3584 3584 3456 3584 3328 2688 1792 1792 2304 2176 1152 128 -128 128 -128 -768 -1408 -1408 -1152 -1152 -1408 -1920 -2304 -2176 -2048 -1920 -2304 -2816 -2688 -1664 -384 128 -256 -640 -512 384 1280 1280 -128 -2176 -3456 -3072 -1408 256 1024 896 0 -1792 -3840 -4992 -3840 -1280 896 1152 512 384 1024 1280 640 -256 -1024 -2560 -4864 -6656 -6912 -7168 -8960 -12032 -13696 -12672 -11264 -11648 -13184 -14336 -14464 -13824 -13184 -12032 -10496 -9344 -8576 -7552 -5888 -4864 -4224 -2944 -768 1152 896 -896 -1664 -896 896 2304 4224 7168 10880 13824 14720 13952 12672 11904 11776 10880 8960 7296 7808 10496 12160 11136 8320 5504 2816 -1280 -4608 -5376 -3968 -3200 -4608 -5248 -3200 -768 -2176 -6784 -9216 -7168 -4736 -6016 -9728 -9728 -3968 1536 1920 -1152 -1536 3712 11136 15488 15744 14592 15744 20224 25088 25472 20736 14720 12672 15616 18944 18688 16128 14592 14976 13440 8320 2560 768 1792 1792 -384 -1152 768 2048 -768 -5376 -5248 0 4224 2048 -3328 -4992 -1408 1664 512 -2048 -1152 2816 5760 5120 2944 1280 -128 -2560 -5120 -7040 -9984 -14848 -20096 -21760 -19328 -14848 -11904 -11648 -13440 -16000 -18048 -18176 -17024 -16768 -18432 -19328 -17152 -12032 -7552 -5376 -4736 -4864 -5248 -5760 -5504 -3456 -896 0 -896 -1024 1280 5760 9600 11264 12288 13824 16128 16896 14720 11520 9472 8704 7424 4864 3328 3968 6016 6784 5120 2688 1152 0 -1792 -3968 -4736 -3584 -1920 -1664 -2688 -4224 -4608 -4096 -3200 -3072 -3840 -5632 -7808 -10368 -12544 -13056 -11904 -9728 -7936 -6656 -4864 -2432 -384 512 768 1792 3456 4480 4736 5120 6400 8064 9216 10112 11136 12160 12160 11136 9984 9600 9600 9472 8960 8320 7296 5888 4352 3072 2304 2048 2048 1536 256 -1536 -2944 -3584 -3584 -3840 -4736 -5248 -5120 -4224 -3328 -2816 -2688 -2816 -2816 -2304 -1792 -1280 -1280 -1280 -1152 -896 -896 -1152 -1152 -896 -384 0 128 256 512 512 384 384 384 384 256 128 128 256 256 128 0 0 0 0 -128 -128 -128 0 0 -128 -128 -128 -128 -128 -128 -128 -128 -128 -128 -128 0 +7 +0 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -2 -2 -1 0 0 -1 -1 0 2 4 4 4 5 7 9 10 9 6 6 8 10 11 10 9 10 9 6 2 0 1 1 -1 -2 0 2 -1 -7 -7 0 5 2 -5 -8 -3 2 1 -4 -2 5 10 9 5 2 -1 -6 -12 -17 -24 -36 -51 -56 -51 -40 -34 -34 -40 -49 -57 -58 -56 -57 -64 -68 -62 -45 -29 -21 -19 -20 -22 -25 -25 -16 -4 0 -4 -6 6 30 51 62 68 79 95 101 90 72 61 57 50 34 23 28 45 51 39 21 9 0 -15 -34 -42 -33 -18 -16 -27 -41 -47 -42 -34 -33 -43 -65 -92 -124 -153 -163 -152 -128 -107 -92 -69 -35 -5 8 13 29 56 77 82 91 116 150 177 200 226 252 259 245 227 222 229 233 228 217 196 164 126 91 70 65 65 52 11 -50 -107 -131 -138 -155 -193 -226 -227 -195 -157 -137 -140 -151 -153 -135 -103 -80 -77 -83 -78 -64 -67 -88 -100 -78 -36 0 22 41 63 73 63 57 67 78 63 40 42 73 89 64 24 8 13 6 -19 -28 -4 21 3 -45 -76 -74 -68 -89 -117 -119 -98 -84 -95 -111 -108 -89 -75 -69 -66 -58 -54 -55 -51 -30 -2 9 -6 -31 -43 -36 -20 -5 9 24 35 32 16 -2 -13 -16 -19 -27 -41 -62 -76 -71 -50 -32 -31 -30 -12 17 32 19 3 8 29 38 28 21 36 62 78 74 69 75 87 93 92 93 103 115 120 115 102 87 74 68 68 70 67 56 41 34 32 32 24 9 -3 -6 1 8 10 6 -1 -6 -6 -2 -1 -6 -15 -23 -30 -37 -44 -48 -46 -44 -46 -53 -58 -56 -50 -43 -37 -32 -27 -24 -21 -18 -16 -15 -17 -19 -19 -16 -9 -4 -1 -2 -3 -3 0 3 6 8 10 9 7 5 5 5 6 5 4 4 4 3 2 0 -2 -3 -3 -3 -2 -2 -2 -2 -2 -2 -2 -2 -2 -2 -1 -1 -1 -1 -1 -1 -1 -1 0 0 0 +0 -64 -64 -64 -64 -64 -64 -64 -64 -64 -64 -64 -64 -64 -128 -128 -64 0 0 -64 -64 0 128 256 256 256 320 448 576 640 576 384 384 512 640 704 640 576 640 576 384 128 0 64 64 -64 -128 0 128 -64 -448 -448 0 320 128 -320 -512 -192 128 64 -256 -128 320 640 576 320 128 -64 -384 -768 -1088 -1536 -2304 -3264 -3584 -3264 -2560 -2176 -2176 -2560 -3136 -3648 -3712 -3584 -3648 -4096 -4352 -3968 -2880 -1856 -1344 -1216 -1280 -1408 -1600 -1600 -1024 -256 0 -256 -384 384 1920 3264 3968 4352 5056 6080 6464 5760 4608 3904 3648 3200 2176 1472 1792 2880 3264 2496 1344 576 0 -960 -2176 -2688 -2112 -1152 -1024 -1728 -2624 -3008 -2688 -2176 -2112 -2752 -4160 -5888 -7936 -9792 -10432 -9728 -8192 -6848 -5888 -4416 -2240 -320 512 832 1856 3584 4928 5248 5824 7424 9600 11328 12800 14464 16128 16576 15680 14528 14208 14656 14912 14592 13888 12544 10496 8064 5824 4480 4160 4160 3328 704 -3200 -6848 -8384 -8832 -9920 -12352 -14464 -14528 -12480 -10048 -8768 -8960 -9664 -9792 -8640 -6592 -5120 -4928 -5312 -4992 -4096 -4288 -5632 -6400 -4992 -2304 0 1408 2624 4032 4672 4032 3648 4288 4992 4032 2560 2688 4672 5696 4096 1536 512 832 384 -1216 -1792 -256 1344 192 -2880 -4864 -4736 -4352 -5696 -7488 -7616 -6272 -5376 -6080 -7104 -6912 -5696 -4800 -4416 -4224 -3712 -3456 -3520 -3264 -1920 -128 576 -384 -1984 -2752 -2304 -1280 -320 576 1536 2240 2048 1024 -128 -832 -1024 -1216 -1728 -2624 -3968 -4864 -4544 -3200 -2048 -1984 -1920 -768 1088 2048 1216 192 512 1856 2432 1792 1344 2304 3968 4992 4736 4416 4800 5568 5952 5888 5952 6592 7360 7680 7360 6528 5568 4736 4352 4352 4480 4288 3584 2624 2176 2048 2048 1536 576 -192 -384 64 512 640 384 -64 -384 -384 -128 -64 -384 -960 -1472 -1920 -2368 -2816 -3072 -2944 -2816 -2944 -3392 -3712 -3584 -3200 -2752 -2368 -2048 -1728 -1536 -1344 -1152 -1024 -960 -1088 -1216 -1216 -1024 -576 -256 -64 -128 -192 -192 0 192 384 512 640 576 448 320 320 320 384 320 256 256 256 192 128 0 -128 -192 -192 -192 -128 -128 -128 -128 -128 -128 -128 -128 -128 -128 -64 -64 -64 -64 -64 -64 -64 -64 0 0 0 +6 +0 0 0 0 0 0 0 0 1 1 1 1 0 0 0 0 0 0 -2 -3 -4 -4 -5 -7 -9 -10 -9 -8 -7 -8 -9 -10 -9 -7 -6 -6 -7 -7 -6 -7 -9 -11 -9 -5 0 2 5 8 10 9 8 10 12 10 7 7 13 17 12 5 1 3 1 -5 -7 -1 5 1 -13 -22 -22 -21 -28 -38 -40 -34 -30 -34 -41 -41 -35 -30 -29 -28 -25 -24 -25 -24 -14 -1 4 -3 -16 -23 -20 -12 -3 5 15 22 20 10 -2 -9 -11 -14 -20 -31 -48 -60 -57 -41 -27 -27 -26 -11 15 30 19 3 8 30 40 31 24 41 73 92 90 86 96 113 125 126 131 147 168 179 177 161 140 122 114 118 124 121 104 79 66 65 65 51 20 -6 -12 2 20 25 17 -1 -14 -14 -5 -3 -18 -44 -69 -94 -121 -150 -166 -165 -162 -175 -208 -235 -236 -219 -192 -169 -150 -133 -119 -108 -97 -89 -89 -102 -119 -124 -105 -65 -23 -5 -11 -24 -20 4 36 68 96 114 116 98 75 69 84 99 96 84 80 82 78 53 12 -30 -60 -80 -85 -77 -65 -59 -68 -91 -124 -146 -146 -130 -121 -122 -109 -70 -30 -23 -46 -54 -24 14 16 -9 -13 20 56 54 25 14 33 58 67 65 79 118 162 196 211 218 230 245 254 247 222 195 175 157 137 122 123 133 129 102 65 42 30 13 -16 -44 -59 -67 -75 -81 -81 -83 -99 -121 -129 -119 -113 -124 -142 -148 -140 -134 -128 -109 -71 -35 -21 -23 -16 7 34 49 50 48 46 44 42 44 52 57 56 51 48 45 40 33 26 20 18 21 25 24 15 3 -3 1 8 9 -2 -17 -27 -29 -28 -33 -43 -47 -41 -29 -22 -21 -22 -21 -18 -15 -14 -14 -13 -11 -7 -4 -5 -9 -12 -13 -12 -12 -15 -18 -17 -13 -10 -8 -9 -9 -9 -7 -5 -2 0 1 1 1 2 2 1 1 1 0 0 1 1 2 2 1 1 1 1 1 1 1 1 1 0 0 0 0 0 0 0 0 +0 0 0 0 0 0 0 0 128 128 128 128 0 0 0 0 0 0 -256 -384 -512 -512 -640 -896 -1152 -1280 -1152 -1024 -896 -1024 -1152 -1280 -1152 -896 -768 -768 -896 -896 -768 -896 -1152 -1408 -1152 -640 0 256 640 1024 1280 1152 1024 1280 1536 1280 896 896 1664 2176 1536 640 128 384 128 -640 -896 -128 640 128 -1664 -2816 -2816 -2688 -3584 -4864 -5120 -4352 -3840 -4352 -5248 -5248 -4480 -3840 -3712 -3584 -3200 -3072 -3200 -3072 -1792 -128 512 -384 -2048 -2944 -2560 -1536 -384 640 1920 2816 2560 1280 -256 -1152 -1408 -1792 -2560 -3968 -6144 -7680 -7296 -5248 -3456 -3456 -3328 -1408 1920 3840 2432 384 1024 3840 5120 3968 3072 5248 9344 11776 11520 11008 12288 14464 16000 16128 16768 18816 21504 22912 22656 20608 17920 15616 14592 15104 15872 15488 13312 10112 8448 8320 8320 6528 2560 -768 -1536 256 2560 3200 2176 -128 -1792 -1792 -640 -384 -2304 -5632 -8832 -12032 -15488 -19200 -21248 -21120 -20736 -22400 -26624 -30080 -30208 -28032 -24576 -21632 -19200 -17024 -15232 -13824 -12416 -11392 -11392 -13056 -15232 -15872 -13440 -8320 -2944 -640 -1408 -3072 -2560 512 4608 8704 12288 14592 14848 12544 9600 8832 10752 12672 12288 10752 10240 10496 9984 6784 1536 -3840 -7680 -10240 -10880 -9856 -8320 -7552 -8704 -11648 -15872 -18688 -18688 -16640 -15488 -15616 -13952 -8960 -3840 -2944 -5888 -6912 -3072 1792 2048 -1152 -1664 2560 7168 6912 3200 1792 4224 7424 8576 8320 10112 15104 20736 25088 27008 27904 29440 31360 32512 31616 28416 24960 22400 20096 17536 15616 15744 17024 16512 13056 8320 5376 3840 1664 -2048 -5632 -7552 -8576 -9600 -10368 -10368 -10624 -12672 -15488 -16512 -15232 -14464 -15872 -18176 -18944 -17920 -17152 -16384 -13952 -9088 -4480 -2688 -2944 -2048 896 4352 6272 6400 6144 5888 5632 5376 5632 6656 7296 7168 6528 6144 5760 5120 4224 3328 2560 2304 2688 3200 3072 1920 384 -384 128 1024 1152 -256 -2176 -3456 -3712 -3584 -4224 -5504 -6016 -5248 -3712 -2816 -2688 -2816 -2688 -2304 -1920 -1792 -1792 -1664 -1408 -896 -512 -640 -1152 -1536 -1664 -1536 -1536 -1920 -2304 -2176 -1664 -1280 -1024 -1152 -1152 -1152 -896 -640 -256 0 128 128 128 256 256 128 128 128 0 0 128 128 256 256 128 128 128 128 128 128 128 128 128 0 0 0 0 0 0 0 0 +7 +0 -1 -1 -1 -1 -1 -1 -1 -1 -1 -2 -2 -2 -3 -3 -4 -5 -5 -5 -5 -4 -4 -4 -4 -4 -4 -4 -5 -6 -7 -6 -4 -2 -1 -1 -2 -2 0 3 6 9 11 12 11 8 8 10 13 13 12 12 12 12 8 2 -6 -12 -16 -17 -16 -14 -13 -16 -22 -31 -37 -38 -35 -34 -35 -32 -21 -10 -8 -15 -19 -8 5 6 -4 -5 8 22 22 11 6 15 27 31 32 40 60 85 105 116 123 133 145 154 153 141 127 116 107 96 87 90 100 99 80 52 34 25 11 -14 -40 -55 -63 -73 -80 -81 -86 -105 -131 -143 -135 -131 -147 -172 -184 -178 -174 -171 -148 -99 -49 -31 -34 -25 11 55 81 85 84 82 80 78 85 101 114 114 108 104 100 92 78 62 49 46 56 68 66 42 10 -7 4 27 29 -5 -58 -95 -102 -102 -127 -168 -191 -172 -126 -98 -98 -106 -103 -89 -78 -75 -77 -74 -63 -42 -26 -32 -60 -90 -98 -92 -100 -131 -161 -161 -132 -98 -88 -96 -109 -111 -94 -64 -27 3 22 32 39 46 51 48 40 29 26 33 50 75 95 104 107 111 124 136 140 138 151 186 221 225 199 180 186 201 188 153 126 129 140 129 99 75 65 62 58 50 42 24 -2 -25 -28 -20 -11 -13 -28 -55 -91 -118 -124 -110 -95 -88 -86 -79 -67 -59 -61 -71 -77 -73 -59 -48 -45 -44 -33 -10 9 14 10 9 16 24 26 29 31 30 26 30 41 48 42 34 34 37 23 -8 -33 -33 -20 -21 -37 -50 -49 -44 -51 -70 -89 -99 -101 -101 -100 -98 -97 -96 -92 -85 -79 -72 -63 -51 -43 -41 -40 -33 -19 -4 7 13 17 21 26 33 39 43 46 49 51 52 49 44 40 37 35 36 38 40 38 33 28 27 28 27 22 16 12 13 13 13 10 8 6 5 2 -1 -4 -6 -6 -6 -6 -7 -7 -6 -5 -4 -5 -5 -5 -4 -4 -4 -4 -3 -3 -2 -2 -2 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 0 +0 -128 -128 -128 -128 -128 -128 -128 -128 -128 -256 -256 -256 -384 -384 -512 -640 -640 -640 -640 -512 -512 -512 -512 -512 -512 -512 -640 -768 -896 -768 -512 -256 -128 -128 -256 -256 0 384 768 1152 1408 1536 1408 1024 1024 1280 1664 1664 1536 1536 1536 1536 1024 256 -768 -1536 -2048 -2176 -2048 -1792 -1664 -2048 -2816 -3968 -4736 -4864 -4480 -4352 -4480 -4096 -2688 -1280 -1024 -1920 -2432 -1024 640 768 -512 -640 1024 2816 2816 1408 768 1920 3456 3968 4096 5120 7680 10880 13440 14848 15744 17024 18560 19712 19584 18048 16256 14848 13696 12288 11136 11520 12800 12672 10240 6656 4352 3200 1408 -1792 -5120 -7040 -8064 -9344 -10240 -10368 -11008 -13440 -16768 -18304 -17280 -16768 -18816 -22016 -23552 -22784 -22272 -21888 -18944 -12672 -6272 -3968 -4352 -3200 1408 7040 10368 10880 10752 10496 10240 9984 10880 12928 14592 14592 13824 13312 12800 11776 9984 7936 6272 5888 7168 8704 8448 5376 1280 -896 512 3456 3712 -640 -7424 -12160 -13056 -13056 -16256 -21504 -24448 -22016 -16128 -12544 -12544 -13568 -13184 -11392 -9984 -9600 -9856 -9472 -8064 -5376 -3328 -4096 -7680 -11520 -12544 -11776 -12800 -16768 -20608 -20608 -16896 -12544 -11264 -12288 -13952 -14208 -12032 -8192 -3456 384 2816 4096 4992 5888 6528 6144 5120 3712 3328 4224 6400 9600 12160 13312 13696 14208 15872 17408 17920 17664 19328 23808 28288 28800 25472 23040 23808 25728 24064 19584 16128 16512 17920 16512 12672 9600 8320 7936 7424 6400 5376 3072 -256 -3200 -3584 -2560 -1408 -1664 -3584 -7040 -11648 -15104 -15872 -14080 -12160 -11264 -11008 -10112 -8576 -7552 -7808 -9088 -9856 -9344 -7552 -6144 -5760 -5632 -4224 -1280 1152 1792 1280 1152 2048 3072 3328 3712 3968 3840 3328 3840 5248 6144 5376 4352 4352 4736 2944 -1024 -4224 -4224 -2560 -2688 -4736 -6400 -6272 -5632 -6528 -8960 -11392 -12672 -12928 -12928 -12800 -12544 -12416 -12288 -11776 -10880 -10112 -9216 -8064 -6528 -5504 -5248 -5120 -4224 -2432 -512 896 1664 2176 2688 3328 4224 4992 5504 5888 6272 6528 6656 6272 5632 5120 4736 4480 4608 4864 5120 4864 4224 3584 3456 3584 3456 2816 2048 1536 1664 1664 1664 1280 1024 768 640 256 -128 -512 -768 -768 -768 -768 -896 -896 -768 -640 -512 -640 -640 -640 -512 -512 -512 -512 -384 -384 -256 -256 -256 -128 -128 -128 -128 -128 -128 -128 -128 -128 -128 0 +7 +0 0 0 0 -1 0 0 0 -1 -1 -1 -1 -2 -2 -3 -3 -3 -3 -3 -3 -3 -3 -3 -3 -3 -4 -4 -3 -3 -2 -2 -4 -6 -7 -7 -8 -11 -14 -15 -13 -10 -10 -11 -13 -14 -12 -9 -4 0 3 4 6 7 8 8 7 5 5 6 10 16 21 23 25 27 31 35 37 38 43 54 66 70 63 59 63 69 67 56 47 50 56 52 41 32 29 28 27 24 20 12 -1 -13 -15 -11 -7 -8 -17 -34 -57 -76 -81 -74 -65 -62 -62 -58 -51 -45 -48 -58 -64 -61 -51 -42 -41 -41 -31 -10 9 14 10 10 18 26 30 33 36 36 33 38 53 64 58 47 49 55 35 -12 -51 -52 -33 -35 -63 -88 -89 -81 -96 -136 -177 -203 -213 -216 -220 -221 -226 -228 -224 -213 -203 -191 -170 -142 -124 -120 -121 -103 -61 -12 26 48 62 79 102 131 160 184 204 223 240 252 245 229 214 204 205 215 236 256 258 233 208 206 222 222 188 143 120 127 142 141 123 104 86 66 34 -6 -52 -88 -106 -112 -119 -131 -140 -128 -110 -108 -130 -156 -164 -158 -160 -176 -196 -197 -176 -151 -138 -137 -127 -104 -80 -77 -94 -105 -86 -50 -22 -11 0 24 46 49 44 45 50 45 27 13 14 22 22 23 35 55 66 61 52 51 47 33 16 13 17 10 -16 -38 -38 -23 -10 -11 -19 -22 -20 -23 -29 -32 -33 -36 -42 -48 -47 -45 -45 -55 -67 -75 -75 -63 -43 -23 -12 -9 -9 -5 8 21 28 33 41 52 56 54 52 56 63 66 65 62 58 48 33 19 15 23 33 36 32 29 29 29 22 7 -7 -13 -10 -5 -4 -10 -18 -20 -15 -11 -12 -17 -21 -15 -4 4 4 -2 -6 -6 -2 0 0 -2 -3 -2 0 1 0 -4 -6 -7 -6 -4 -1 2 3 3 2 2 3 6 7 6 6 5 5 5 4 4 3 3 2 1 1 0 -1 -1 -2 -1 -1 -1 -1 -1 0 0 0 -1 -1 -1 -1 0 0 0 0 -1 0 +0 0 0 0 -64 0 0 0 -64 -64 -64 -64 -128 -128 -192 -192 -192 -192 -192 -192 -192 -192 -192 -192 -192 -256 -256 -192 -192 -128 -128 -256 -384 -448 -448 -512 -704 -896 -960 -832 -640 -640 -704 -832 -896 -768 -576 -256 0 192 256 384 448 512 512 448 320 320 384 640 1024 1344 1472 1600 1728 1984 2240 2368 2432 2752 3456 4224 4480 4032 3776 4032 4416 4288 3584 3008 3200 3584 3328 2624 2048 1856 1792 1728 1536 1280 768 -64 -832 -960 -704 -448 -512 -1088 -2176 -3648 -4864 -5184 -4736 -4160 -3968 -3968 -3712 -3264 -2880 -3072 -3712 -4096 -3904 -3264 -2688 -2624 -2624 -1984 -640 576 896 640 640 1152 1664 1920 2112 2304 2304 2112 2432 3392 4096 3712 3008 3136 3520 2240 -768 -3264 -3328 -2112 -2240 -4032 -5632 -5696 -5184 -6144 -8704 -11328 -12992 -13632 -13824 -14080 -14144 -14464 -14592 -14336 -13632 -12992 -12224 -10880 -9088 -7936 -7680 -7744 -6592 -3904 -768 1664 3072 3968 5056 6528 8384 10240 11776 13056 14272 15360 16128 15680 14656 13696 13056 13120 13760 15104 16384 16512 14912 13312 13184 14208 14208 12032 9152 7680 8128 9088 9024 7872 6656 5504 4224 2176 -384 -3328 -5632 -6784 -7168 -7616 -8384 -8960 -8192 -7040 -6912 -8320 -9984 -10496 -10112 -10240 -11264 -12544 -12608 -11264 -9664 -8832 -8768 -8128 -6656 -5120 -4928 -6016 -6720 -5504 -3200 -1408 -704 0 1536 2944 3136 2816 2880 3200 2880 1728 832 896 1408 1408 1472 2240 3520 4224 3904 3328 3264 3008 2112 1024 832 1088 640 -1024 -2432 -2432 -1472 -640 -704 -1216 -1408 -1280 -1472 -1856 -2048 -2112 -2304 -2688 -3072 -3008 -2880 -2880 -3520 -4288 -4800 -4800 -4032 -2752 -1472 -768 -576 -576 -320 512 1344 1792 2112 2624 3328 3584 3456 3328 3584 4032 4224 4160 3968 3712 3072 2112 1216 960 1472 2112 2304 2048 1856 1856 1856 1408 448 -448 -832 -640 -320 -256 -640 -1152 -1280 -960 -704 -768 -1088 -1344 -960 -256 256 256 -128 -384 -384 -128 0 0 -128 -192 -128 0 64 0 -256 -384 -448 -384 -256 -64 128 192 192 128 128 192 384 448 384 384 320 320 320 256 256 192 192 128 64 64 0 -64 -64 -128 -64 -64 -64 -64 -64 0 0 0 -64 -64 -64 -64 0 0 0 0 -64 0 +6 +0 -1 -1 -1 -1 -1 -1 -1 -1 0 0 0 0 1 1 2 3 3 4 5 6 7 7 7 7 8 9 10 12 13 13 12 13 15 15 14 11 10 11 13 13 12 11 9 7 4 -1 -7 -13 -16 -17 -19 -22 -24 -23 -20 -21 -26 -32 -34 -34 -36 -41 -47 -49 -45 -40 -37 -38 -37 -31 -25 -24 -30 -35 -30 -18 -8 -5 0 9 18 20 18 19 22 20 12 6 7 11 11 12 18 30 37 35 31 31 29 21 10 8 12 7 -11 -28 -29 -17 -8 -9 -16 -18 -17 -20 -26 -30 -32 -35 -42 -49 -49 -48 -49 -61 -76 -87 -88 -76 -53 -30 -15 -12 -13 -7 12 30 42 51 65 83 92 92 90 100 114 124 125 121 117 100 69 41 34 53 77 86 79 73 76 77 60 21 -20 -38 -30 -14 -12 -33 -61 -70 -55 -40 -44 -68 -82 -63 -18 19 19 -8 -30 -28 -9 2 0 -10 -13 -7 3 9 0 -23 -45 -53 -43 -27 -5 19 40 42 29 25 46 83 105 102 94 96 102 102 95 91 88 76 57 43 37 23 -9 -41 -48 -28 -8 -8 -19 -18 0 13 10 -3 -12 -11 -2 6 11 8 0 -7 -11 -18 -33 -50 -59 -56 -50 -47 -45 -40 -34 -35 -44 -66 -91 -116 -129 -123 -103 -85 -79 -78 -67 -57 -61 -72 -66 -31 6 12 -12 -34 -26 4 25 22 9 4 10 19 23 20 19 21 22 19 14 12 15 16 10 0 -7 -6 -5 -10 -18 -23 -19 -9 1 5 2 -3 -4 2 11 16 15 13 13 12 7 -3 -14 -23 -21 -10 5 11 3 -8 -11 -2 6 4 -4 -8 -5 3 8 7 1 -6 -9 -9 -9 -12 -15 -13 -7 -2 -5 -10 -11 -8 -7 -7 -7 -5 -1 1 2 3 4 4 3 4 7 9 8 6 7 8 9 9 7 7 8 8 7 7 6 5 3 1 0 0 -1 0 0 1 1 0 0 -1 -1 0 0 -1 -1 -1 0 0 0 0 -1 -1 -1 -1 0 +0 -128 -128 -128 -128 -128 -128 -128 -128 0 0 0 0 128 128 256 384 384 512 640 768 896 896 896 896 1024 1152 1280 1536 1664 1664 1536 1664 1920 1920 1792 1408 1280 1408 1664 1664 1536 1408 1152 896 512 -128 -896 -1664 -2048 -2176 -2432 -2816 -3072 -2944 -2560 -2688 -3328 -4096 -4352 -4352 -4608 -5248 -6016 -6272 -5760 -5120 -4736 -4864 -4736 -3968 -3200 -3072 -3840 -4480 -3840 -2304 -1024 -640 0 1152 2304 2560 2304 2432 2816 2560 1536 768 896 1408 1408 1536 2304 3840 4736 4480 3968 3968 3712 2688 1280 1024 1536 896 -1408 -3584 -3712 -2176 -1024 -1152 -2048 -2304 -2176 -2560 -3328 -3840 -4096 -4480 -5376 -6272 -6272 -6144 -6272 -7808 -9728 -11136 -11264 -9728 -6784 -3840 -1920 -1536 -1664 -896 1536 3840 5376 6528 8320 10624 11776 11776 11520 12800 14592 15872 16000 15488 14976 12800 8832 5248 4352 6784 9856 11008 10112 9344 9728 9856 7680 2688 -2560 -4864 -3840 -1792 -1536 -4224 -7808 -8960 -7040 -5120 -5632 -8704 -10496 -8064 -2304 2432 2432 -1024 -3840 -3584 -1152 256 0 -1280 -1664 -896 384 1152 0 -2944 -5760 -6784 -5504 -3456 -640 2432 5120 5376 3712 3200 5888 10624 13440 13056 12032 12288 13056 13056 12160 11648 11264 9728 7296 5504 4736 2944 -1152 -5248 -6144 -3584 -1024 -1024 -2432 -2304 0 1664 1280 -384 -1536 -1408 -256 768 1408 1024 0 -896 -1408 -2304 -4224 -6400 -7552 -7168 -6400 -6016 -5760 -5120 -4352 -4480 -5632 -8448 -11648 -14848 -16512 -15744 -13184 -10880 -10112 -9984 -8576 -7296 -7808 -9216 -8448 -3968 768 1536 -1536 -4352 -3328 512 3200 2816 1152 512 1280 2432 2944 2560 2432 2688 2816 2432 1792 1536 1920 2048 1280 0 -896 -768 -640 -1280 -2304 -2944 -2432 -1152 128 640 256 -384 -512 256 1408 2048 1920 1664 1664 1536 896 -384 -1792 -2944 -2688 -1280 640 1408 384 -1024 -1408 -256 768 512 -512 -1024 -640 384 1024 896 128 -768 -1152 -1152 -1152 -1536 -1920 -1664 -896 -256 -640 -1280 -1408 -1024 -896 -896 -896 -640 -128 128 256 384 512 512 384 512 896 1152 1024 768 896 1024 1152 1152 896 896 1024 1024 896 896 768 640 384 128 0 0 -128 0 0 128 128 0 0 -128 -128 0 0 -128 -128 -128 0 0 0 0 -128 -128 -128 -128 0 +7 +0 0 0 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -2 -2 -1 0 0 -1 -1 -1 -1 0 0 -1 -1 -1 0 0 0 -2 -4 -4 -4 -3 -1 1 3 4 3 2 5 9 12 13 12 13 14 15 14 14 14 13 10 8 7 4 -2 -9 -11 -7 -2 -2 -5 -5 0 3 2 -1 -4 -4 -1 2 3 2 0 -3 -5 -7 -13 -21 -25 -24 -22 -21 -21 -19 -17 -18 -23 -35 -49 -64 -73 -71 -62 -52 -50 -50 -44 -38 -42 -51 -47 -23 5 9 -10 -27 -21 4 22 20 8 3 9 18 22 21 20 22 24 21 15 14 18 20 13 0 -9 -8 -7 -13 -26 -33 -28 -14 1 8 4 -5 -7 4 21 30 29 26 26 25 16 -5 -31 -51 -49 -23 13 27 9 -20 -28 -5 18 14 -10 -25 -14 12 29 25 4 -22 -35 -34 -34 -48 -64 -58 -29 -8 -21 -49 -58 -44 -35 -40 -43 -26 -1 12 18 27 37 37 31 39 66 88 87 74 78 104 123 118 107 114 131 141 139 135 133 119 86 44 17 4 -3 0 22 54 68 49 12 -9 -3 5 1 -12 -18 -2 24 39 30 7 -11 -13 -13 -21 -34 -44 -44 -42 -40 -26 1 22 16 -9 -28 -17 16 36 22 -19 -58 -65 -39 -9 -11 -38 -61 -60 -52 -56 -71 -81 -82 -90 -103 -108 -93 -64 -42 -39 -49 -59 -53 -28 -1 8 -3 -11 4 32 47 44 33 30 36 40 41 40 36 30 26 18 6 -6 -5 11 26 29 25 32 43 47 38 32 41 56 63 57 52 52 53 50 43 39 36 33 26 16 7 0 -5 -9 -12 -11 -7 -4 -7 -14 -21 -23 -23 -26 -28 -26 -18 -13 -12 -14 -15 -15 -15 -16 -16 -17 -18 -17 -11 -5 -1 -2 -3 -3 -1 2 5 7 8 8 7 8 9 9 8 6 4 3 2 1 0 -1 -1 -2 -3 -4 -4 -4 -4 -3 -3 -2 -2 -3 -3 -2 -2 -1 -1 -1 -1 -1 -1 -1 -1 -1 0 +0 0 0 -128 -128 -128 -128 -128 -128 -128 -128 -128 -128 -128 -128 -256 -256 -128 0 0 -128 -128 -128 -128 0 0 -128 -128 -128 0 0 0 -256 -512 -512 -512 -384 -128 128 384 512 384 256 640 1152 1536 1664 1536 1664 1792 1920 1792 1792 1792 1664 1280 1024 896 512 -256 -1152 -1408 -896 -256 -256 -640 -640 0 384 256 -128 -512 -512 -128 256 384 256 0 -384 -640 -896 -1664 -2688 -3200 -3072 -2816 -2688 -2688 -2432 -2176 -2304 -2944 -4480 -6272 -8192 -9344 -9088 -7936 -6656 -6400 -6400 -5632 -4864 -5376 -6528 -6016 -2944 640 1152 -1280 -3456 -2688 512 2816 2560 1024 384 1152 2304 2816 2688 2560 2816 3072 2688 1920 1792 2304 2560 1664 0 -1152 -1024 -896 -1664 -3328 -4224 -3584 -1792 128 1024 512 -640 -896 512 2688 3840 3712 3328 3328 3200 2048 -640 -3968 -6528 -6272 -2944 1664 3456 1152 -2560 -3584 -640 2304 1792 -1280 -3200 -1792 1536 3712 3200 512 -2816 -4480 -4352 -4352 -6144 -8192 -7424 -3712 -1024 -2688 -6272 -7424 -5632 -4480 -5120 -5504 -3328 -128 1536 2304 3456 4736 4736 3968 4992 8448 11264 11136 9472 9984 13312 15744 15104 13696 14592 16768 18048 17792 17280 17024 15232 11008 5632 2176 512 -384 0 2816 6912 8704 6272 1536 -1152 -384 640 128 -1536 -2304 -256 3072 4992 3840 896 -1408 -1664 -1664 -2688 -4352 -5632 -5632 -5376 -5120 -3328 128 2816 2048 -1152 -3584 -2176 2048 4608 2816 -2432 -7424 -8320 -4992 -1152 -1408 -4864 -7808 -7680 -6656 -7168 -9088 -10368 -10496 -11520 -13184 -13824 -11904 -8192 -5376 -4992 -6272 -7552 -6784 -3584 -128 1024 -384 -1408 512 4096 6016 5632 4224 3840 4608 5120 5248 5120 4608 3840 3328 2304 768 -768 -640 1408 3328 3712 3200 4096 5504 6016 4864 4096 5248 7168 8064 7296 6656 6656 6784 6400 5504 4992 4608 4224 3328 2048 896 0 -640 -1152 -1536 -1408 -896 -512 -896 -1792 -2688 -2944 -2944 -3328 -3584 -3328 -2304 -1664 -1536 -1792 -1920 -1920 -1920 -2048 -2048 -2176 -2304 -2176 -1408 -640 -128 -256 -384 -384 -128 256 640 896 1024 1024 896 1024 1152 1152 1024 768 512 384 256 128 0 -128 -128 -256 -384 -512 -512 -512 -512 -384 -384 -256 -256 -384 -384 -256 -256 -128 -128 -128 -128 -128 -128 -128 -128 -128 0 +7 +0 -1 -1 0 0 -1 -1 -1 0 0 0 0 -1 -1 -1 -1 -1 -2 -2 -1 -1 -1 -2 -2 -2 -2 -2 -2 -2 -1 0 1 1 2 2 2 3 5 7 8 7 8 11 13 13 13 14 17 19 20 20 20 19 14 7 3 0 -1 0 4 11 15 11 2 -3 -1 1 0 -4 -5 -1 7 12 9 2 -4 -5 -5 -8 -13 -17 -18 -18 -17 -12 0 10 7 -5 -14 -9 8 18 12 -11 -33 -38 -23 -6 -7 -24 -40 -41 -36 -40 -51 -59 -62 -69 -81 -87 -77 -54 -36 -34 -45 -55 -50 -28 -1 8 -3 -11 4 35 54 51 39 37 44 51 53 53 49 43 37 26 10 -8 -7 19 44 50 44 56 79 88 73 63 82 116 133 124 116 118 125 120 107 99 94 87 71 46 22 0 -15 -28 -36 -36 -24 -14 -22 -51 -78 -88 -93 -107 -120 -113 -83 -59 -56 -68 -76 -80 -84 -91 -99 -109 -118 -112 -79 -33 -7 -11 -25 -24 -3 26 56 82 96 97 96 106 129 142 126 100 81 72 55 26 1 -4 -10 -36 -81 -116 -130 -134 -145 -155 -147 -131 -136 -176 -221 -229 -196 -159 -149 -156 -155 -143 -131 -131 -133 -118 -78 -31 2 16 30 47 64 71 67 68 81 93 90 74 65 72 86 92 87 77 62 49 42 49 54 35 -4 -37 -45 -39 -42 -58 -71 -72 -74 -89 -104 -104 -87 -73 -71 -77 -80 -78 -75 -71 -62 -46 -24 5 38 66 75 73 73 84 99 104 105 114 126 129 120 112 118 127 124 110 101 100 99 85 66 56 53 46 26 7 -1 -2 -4 -12 -24 -36 -46 -55 -65 -75 -83 -84 -75 -60 -47 -42 -44 -47 -46 -42 -35 -26 -20 -16 -15 -13 -10 -4 3 10 14 13 11 11 15 18 19 17 14 13 11 10 11 11 11 9 7 5 3 2 2 2 2 1 0 -1 -2 -3 -3 -3 -4 -4 -4 -3 -3 -3 -3 -2 -2 -1 -1 -1 -1 -1 0 0 0 0 0 0 0 0 0 +0 -128 -128 0 0 -128 -128 -128 0 0 0 0 -128 -128 -128 -128 -128 -256 -256 -128 -128 -128 -256 -256 -256 -256 -256 -256 -256 -128 0 128 128 256 256 256 384 640 896 1024 896 1024 1408 1664 1664 1664 1792 2176 2432 2560 2560 2560 2432 1792 896 384 0 -128 0 512 1408 1920 1408 256 -384 -128 128 0 -512 -640 -128 896 1536 1152 256 -512 -640 -640 -1024 -1664 -2176 -2304 -2304 -2176 -1536 0 1280 896 -640 -1792 -1152 1024 2304 1536 -1408 -4224 -4864 -2944 -768 -896 -3072 -5120 -5248 -4608 -5120 -6528 -7552 -7936 -8832 -10368 -11136 -9856 -6912 -4608 -4352 -5760 -7040 -6400 -3584 -128 1024 -384 -1408 512 4480 6912 6528 4992 4736 5632 6528 6784 6784 6272 5504 4736 3328 1280 -1024 -896 2432 5632 6400 5632 7168 10112 11264 9344 8064 10496 14848 17024 15872 14848 15104 16000 15360 13696 12672 12032 11136 9088 5888 2816 0 -1920 -3584 -4608 -4608 -3072 -1792 -2816 -6528 -9984 -11264 -11904 -13696 -15360 -14464 -10624 -7552 -7168 -8704 -9728 -10240 -10752 -11648 -12672 -13952 -15104 -14336 -10112 -4224 -896 -1408 -3200 -3072 -384 3328 7168 10496 12288 12416 12288 13568 16512 18176 16128 12800 10368 9216 7040 3328 128 -512 -1280 -4608 -10368 -14848 -16640 -17152 -18560 -19840 -18816 -16768 -17408 -22528 -28288 -29312 -25088 -20352 -19072 -19968 -19840 -18304 -16768 -16768 -17024 -15104 -9984 -3968 256 2048 3840 6016 8192 9088 8576 8704 10368 11904 11520 9472 8320 9216 11008 11776 11136 9856 7936 6272 5376 6272 6912 4480 -512 -4736 -5760 -4992 -5376 -7424 -9088 -9216 -9472 -11392 -13312 -13312 -11136 -9344 -9088 -9856 -10240 -9984 -9600 -9088 -7936 -5888 -3072 640 4864 8448 9600 9344 9344 10752 12672 13312 13440 14592 16128 16512 15360 14336 15104 16256 15872 14080 12928 12800 12672 10880 8448 7168 6784 5888 3328 896 -128 -256 -512 -1536 -3072 -4608 -5888 -7040 -8320 -9600 -10624 -10752 -9600 -7680 -6016 -5376 -5632 -6016 -5888 -5376 -4480 -3328 -2560 -2048 -1920 -1664 -1280 -512 384 1280 1792 1664 1408 1408 1920 2304 2432 2176 1792 1664 1408 1280 1408 1408 1408 1152 896 640 384 256 256 256 256 128 0 -128 -256 -384 -384 -384 -512 -512 -512 -384 -384 -384 -384 -256 -256 -128 -128 -128 -128 -128 0 0 0 0 0 0 0 0 0 +7 +0 0 0 0 0 -1 -1 -1 -1 -1 -1 -1 -1 -1 -2 -2 -2 -3 -3 -2 -2 -2 -3 -3 -3 -4 -4 -5 -6 -7 -7 -5 -3 -1 -1 -2 -2 -1 2 5 8 9 10 10 12 15 18 16 13 11 10 8 4 0 -1 -2 -7 -16 -24 -27 -29 -33 -36 -35 -32 -35 -46 -60 -63 -56 -47 -45 -49 -50 -47 -45 -46 -48 -44 -30 -12 1 6 12 20 28 32 31 32 39 46 46 39 35 39 48 53 51 46 38 31 27 33 37 25 -3 -28 -34 -30 -33 -47 -59 -61 -64 -78 -94 -96 -83 -71 -70 -78 -82 -83 -81 -79 -71 -54 -29 7 48 84 98 97 99 118 142 153 158 174 199 208 198 190 203 225 225 205 192 196 199 175 138 121 118 104 62 18 -1 -3 -9 -32 -65 -99 -130 -163 -195 -234 -266 -278 -255 -209 -168 -156 -168 -183 -186 -175 -151 -117 -89 -76 -73 -67 -50 -19 22 64 89 87 75 80 109 141 150 138 124 115 110 106 113 125 127 117 96 70 47 33 32 41 46 36 11 -16 -39 -54 -63 -75 -98 -120 -127 -117 -111 -123 -137 -123 -86 -57 -55 -60 -46 -13 18 36 52 67 73 74 84 111 132 123 91 68 71 88 96 88 77 70 74 77 62 28 -2 -6 11 22 12 -5 -10 -2 5 4 -8 -25 -46 -59 -56 -44 -40 -50 -56 -43 -17 4 2 -18 -43 -56 -48 -26 -8 -6 -10 -9 -1 7 16 31 46 47 28 11 14 33 44 33 7 -12 -16 -9 -4 -8 -18 -26 -26 -17 -2 14 22 13 -12 -33 -35 -21 -8 -11 -21 -25 -20 -15 -16 -23 -32 -39 -40 -37 -32 -32 -37 -44 -49 -49 -48 -45 -44 -46 -51 -49 -36 -20 -11 -12 -16 -14 -6 3 8 12 14 17 20 20 17 12 8 8 9 11 10 9 7 5 4 4 5 6 7 7 7 7 6 5 4 3 1 -1 -1 -1 0 0 0 1 1 0 0 -1 -1 -1 -1 -1 0 0 -1 -1 -1 -1 -1 -1 0 +0 0 0 0 0 -64 -64 -64 -64 -64 -64 -64 -64 -64 -128 -128 -128 -192 -192 -128 -128 -128 -192 -192 -192 -256 -256 -320 -384 -448 -448 -320 -192 -64 -64 -128 -128 -64 128 320 512 576 640 640 768 960 1152 1024 832 704 640 512 256 0 -64 -128 -448 -1024 -1536 -1728 -1856 -2112 -2304 -2240 -2048 -2240 -2944 -3840 -4032 -3584 -3008 -2880 -3136 -3200 -3008 -2880 -2944 -3072 -2816 -1920 -768 64 384 768 1280 1792 2048 1984 2048 2496 2944 2944 2496 2240 2496 3072 3392 3264 2944 2432 1984 1728 2112 2368 1600 -192 -1792 -2176 -1920 -2112 -3008 -3776 -3904 -4096 -4992 -6016 -6144 -5312 -4544 -4480 -4992 -5248 -5312 -5184 -5056 -4544 -3456 -1856 448 3072 5376 6272 6208 6336 7552 9088 9792 10112 11136 12736 13312 12672 12160 12992 14400 14400 13120 12288 12544 12736 11200 8832 7744 7552 6656 3968 1152 -64 -192 -576 -2048 -4160 -6336 -8320 -10432 -12480 -14976 -17024 -17792 -16320 -13376 -10752 -9984 -10752 -11712 -11904 -11200 -9664 -7488 -5696 -4864 -4672 -4288 -3200 -1216 1408 4096 5696 5568 4800 5120 6976 9024 9600 8832 7936 7360 7040 6784 7232 8000 8128 7488 6144 4480 3008 2112 2048 2624 2944 2304 704 -1024 -2496 -3456 -4032 -4800 -6272 -7680 -8128 -7488 -7104 -7872 -8768 -7872 -5504 -3648 -3520 -3840 -2944 -832 1152 2304 3328 4288 4672 4736 5376 7104 8448 7872 5824 4352 4544 5632 6144 5632 4928 4480 4736 4928 3968 1792 -128 -384 704 1408 768 -320 -640 -128 320 256 -512 -1600 -2944 -3776 -3584 -2816 -2560 -3200 -3584 -2752 -1088 256 128 -1152 -2752 -3584 -3072 -1664 -512 -384 -640 -576 -64 448 1024 1984 2944 3008 1792 704 896 2112 2816 2112 448 -768 -1024 -576 -256 -512 -1152 -1664 -1664 -1088 -128 896 1408 832 -768 -2112 -2240 -1344 -512 -704 -1344 -1600 -1280 -960 -1024 -1472 -2048 -2496 -2560 -2368 -2048 -2048 -2368 -2816 -3136 -3136 -3072 -2880 -2816 -2944 -3264 -3136 -2304 -1280 -704 -768 -1024 -896 -384 192 512 768 896 1088 1280 1280 1088 768 512 512 576 704 640 576 448 320 256 256 320 384 448 448 448 448 384 320 256 192 64 -64 -64 -64 0 0 0 64 64 0 0 -64 -64 -64 -64 -64 0 0 -64 -64 -64 -64 -64 -64 0 +6 +0 -1 -1 -1 -1 -1 -1 -1 -2 -2 -2 -2 -2 -2 -3 -3 -4 -3 -3 -3 -2 -3 -3 -2 -1 0 2 4 4 4 4 6 9 10 9 9 9 9 9 10 12 13 12 10 8 5 4 4 5 6 5 1 -3 -7 -10 -12 -15 -19 -25 -27 -26 -25 -28 -33 -30 -22 -15 -15 -17 -13 -4 5 11 16 22 24 25 30 40 49 47 36 28 30 37 42 39 35 33 36 38 32 14 -1 -4 6 13 7 -3 -6 -2 3 3 -6 -18 -33 -43 -42 -34 -32 -40 -47 -36 -14 3 2 -16 -41 -54 -47 -26 -8 -6 -11 -10 -1 9 19 37 57 60 37 14 19 46 64 48 11 -19 -24 -14 -6 -13 -30 -46 -46 -31 -4 27 45 28 -24 -70 -77 -46 -18 -24 -50 -62 -51 -38 -42 -65 -93 -114 -121 -113 -101 -104 -126 -153 -174 -183 -182 -177 -177 -194 -221 -219 -168 -94 -53 -62 -85 -77 -33 18 55 77 98 122 146 154 135 99 71 72 92 107 109 101 88 70 53 52 69 95 114 126 133 132 123 108 97 76 37 -8 -32 -24 3 27 44 58 67 61 37 -1 -42 -71 -68 -33 11 19 -21 -72 -86 -56 -21 -21 -51 -76 -75 -57 -42 -38 -29 -1 32 51 49 38 35 42 54 53 46 40 42 44 43 39 32 31 36 49 59 50 22 -3 -2 19 32 23 9 10 19 22 24 34 46 39 18 10 27 40 28 1 -9 4 16 8 -2 5 18 14 -7 -18 -6 14 19 2 -21 -34 -26 -10 -1 -6 -14 -8 11 25 23 9 -5 -9 -8 -7 -9 -12 -15 -16 -14 -11 -12 -21 -34 -45 -51 -49 -43 -38 -38 -42 -43 -38 -29 -20 -11 -1 8 11 10 8 12 19 22 18 13 10 14 19 21 19 16 14 15 15 15 13 10 7 5 5 5 5 3 0 -1 -2 -2 -2 -3 -4 -5 -6 -6 -5 -4 -4 -4 -4 -4 -3 -2 -2 -2 -2 -1 -1 -1 -1 -1 -1 -1 -1 -1 0 0 0 0 +0 -128 -128 -128 -128 -128 -128 -128 -256 -256 -256 -256 -256 -256 -384 -384 -512 -384 -384 -384 -256 -384 -384 -256 -128 0 256 512 512 512 512 768 1152 1280 1152 1152 1152 1152 1152 1280 1536 1664 1536 1280 1024 640 512 512 640 768 640 128 -384 -896 -1280 -1536 -1920 -2432 -3200 -3456 -3328 -3200 -3584 -4224 -3840 -2816 -1920 -1920 -2176 -1664 -512 640 1408 2048 2816 3072 3200 3840 5120 6272 6016 4608 3584 3840 4736 5376 4992 4480 4224 4608 4864 4096 1792 -128 -512 768 1664 896 -384 -768 -256 384 384 -768 -2304 -4224 -5504 -5376 -4352 -4096 -5120 -6016 -4608 -1792 384 256 -2048 -5248 -6912 -6016 -3328 -1024 -768 -1408 -1280 -128 1152 2432 4736 7296 7680 4736 1792 2432 5888 8192 6144 1408 -2432 -3072 -1792 -768 -1664 -3840 -5888 -5888 -3968 -512 3456 5760 3584 -3072 -8960 -9856 -5888 -2304 -3072 -6400 -7936 -6528 -4864 -5376 -8320 -11904 -14592 -15488 -14464 -12928 -13312 -16128 -19584 -22272 -23424 -23296 -22656 -22656 -24832 -28288 -28032 -21504 -12032 -6784 -7936 -10880 -9856 -4224 2304 7040 9856 12544 15616 18688 19712 17280 12672 9088 9216 11776 13696 13952 12928 11264 8960 6784 6656 8832 12160 14592 16128 17024 16896 15744 13824 12416 9728 4736 -1024 -4096 -3072 384 3456 5632 7424 8576 7808 4736 -128 -5376 -9088 -8704 -4224 1408 2432 -2688 -9216 -11008 -7168 -2688 -2688 -6528 -9728 -9600 -7296 -5376 -4864 -3712 -128 4096 6528 6272 4864 4480 5376 6912 6784 5888 5120 5376 5632 5504 4992 4096 3968 4608 6272 7552 6400 2816 -384 -256 2432 4096 2944 1152 1280 2432 2816 3072 4352 5888 4992 2304 1280 3456 5120 3584 128 -1152 512 2048 1024 -256 640 2304 1792 -896 -2304 -768 1792 2432 256 -2688 -4352 -3328 -1280 -128 -768 -1792 -1024 1408 3200 2944 1152 -640 -1152 -1024 -896 -1152 -1536 -1920 -2048 -1792 -1408 -1536 -2688 -4352 -5760 -6528 -6272 -5504 -4864 -4864 -5376 -5504 -4864 -3712 -2560 -1408 -128 1024 1408 1280 1024 1536 2432 2816 2304 1664 1280 1792 2432 2688 2432 2048 1792 1920 1920 1920 1664 1280 896 640 640 640 640 384 0 -128 -256 -256 -256 -384 -512 -640 -768 -768 -640 -512 -512 -512 -512 -512 -384 -256 -256 -256 -256 -128 -128 -128 -128 -128 -128 -128 -128 -128 0 0 0 0 +7 +0 -1 -1 -1 -1 -1 -1 -1 -1 -1 -2 -2 -2 -3 -3 -3 -4 -5 -5 -4 -3 -2 -2 -3 -3 -2 0 2 3 5 6 8 9 9 7 5 5 7 9 10 9 9 7 6 6 8 12 15 17 19 19 19 17 16 13 6 -2 -7 -5 0 5 9 13 16 15 9 -1 -11 -20 -20 -10 3 6 -7 -24 -29 -20 -8 -8 -20 -30 -30 -23 -18 -17 -13 -1 14 24 24 19 18 22 29 29 26 23 24 26 27 25 21 20 24 34 42 36 16 -2 -2 15 26 19 8 9 17 20 22 33 45 40 19 11 29 45 32 1 -11 5 20 10 -2 6 24 19 -10 -26 -9 22 30 3 -35 -56 -45 -17 -2 -11 -27 -16 22 53 49 20 -10 -21 -19 -16 -20 -29 -38 -41 -38 -31 -35 -62 -103 -141 -162 -160 -144 -131 -136 -156 -165 -148 -115 -82 -47 -3 38 56 50 46 68 107 128 111 81 69 94 133 154 145 127 120 128 141 144 133 109 83 64 63 71 66 41 10 -11 -18 -20 -29 -48 -75 -110 -142 -153 -143 -124 -119 -128 -143 -147 -131 -105 -83 -79 -90 -95 -74 -39 -19 -25 -45 -52 -36 -6 23 33 33 34 40 42 33 26 37 60 70 59 55 77 108 111 83 60 65 79 75 52 31 18 6 -1 3 6 -11 -51 -78 -72 -50 -46 -59 -66 -57 -49 -62 -89 -100 -81 -40 -9 -8 -30 -42 -24 9 23 9 -6 7 41 62 56 39 30 28 24 16 14 16 14 1 -16 -29 -31 -23 -8 3 4 -7 -20 -27 -28 -27 -27 -27 -25 -22 -22 -23 -25 -26 -22 -14 -4 1 -2 -8 -13 -12 -4 5 11 12 10 9 12 14 14 13 12 13 13 11 11 14 18 20 19 16 14 11 9 7 5 4 5 6 6 5 3 1 -1 -2 -3 -3 -5 -6 -8 -10 -11 -11 -10 -9 -7 -6 -7 -8 -8 -7 -5 -3 -1 -1 -1 -1 0 1 1 0 0 0 0 0 0 0 0 0 0 0 0 +0 -128 -128 -128 -128 -128 -128 -128 -128 -128 -256 -256 -256 -384 -384 -384 -512 -640 -640 -512 -384 -256 -256 -384 -384 -256 0 256 384 640 768 1024 1152 1152 896 640 640 896 1152 1280 1152 1152 896 768 768 1024 1536 1920 2176 2432 2432 2432 2176 2048 1664 768 -256 -896 -640 0 640 1152 1664 2048 1920 1152 -128 -1408 -2560 -2560 -1280 384 768 -896 -3072 -3712 -2560 -1024 -1024 -2560 -3840 -3840 -2944 -2304 -2176 -1664 -128 1792 3072 3072 2432 2304 2816 3712 3712 3328 2944 3072 3328 3456 3200 2688 2560 3072 4352 5376 4608 2048 -256 -256 1920 3328 2432 1024 1152 2176 2560 2816 4224 5760 5120 2432 1408 3712 5760 4096 128 -1408 640 2560 1280 -256 768 3072 2432 -1280 -3328 -1152 2816 3840 384 -4480 -7168 -5760 -2176 -256 -1408 -3456 -2048 2816 6784 6272 2560 -1280 -2688 -2432 -2048 -2560 -3712 -4864 -5248 -4864 -3968 -4480 -7936 -13184 -18048 -20736 -20480 -18432 -16768 -17408 -19968 -21120 -18944 -14720 -10496 -6016 -384 4864 7168 6400 5888 8704 13696 16384 14208 10368 8832 12032 17024 19712 18560 16256 15360 16384 18048 18432 17024 13952 10624 8192 8064 9088 8448 5248 1280 -1408 -2304 -2560 -3712 -6144 -9600 -14080 -18176 -19584 -18304 -15872 -15232 -16384 -18304 -18816 -16768 -13440 -10624 -10112 -11520 -12160 -9472 -4992 -2432 -3200 -5760 -6656 -4608 -768 2944 4224 4224 4352 5120 5376 4224 3328 4736 7680 8960 7552 7040 9856 13824 14208 10624 7680 8320 10112 9600 6656 3968 2304 768 -128 384 768 -1408 -6528 -9984 -9216 -6400 -5888 -7552 -8448 -7296 -6272 -7936 -11392 -12800 -10368 -5120 -1152 -1024 -3840 -5376 -3072 1152 2944 1152 -768 896 5248 7936 7168 4992 3840 3584 3072 2048 1792 2048 1792 128 -2048 -3712 -3968 -2944 -1024 384 512 -896 -2560 -3456 -3584 -3456 -3456 -3456 -3200 -2816 -2816 -2944 -3200 -3328 -2816 -1792 -512 128 -256 -1024 -1664 -1536 -512 640 1408 1536 1280 1152 1536 1792 1792 1664 1536 1664 1664 1408 1408 1792 2304 2560 2432 2048 1792 1408 1152 896 640 512 640 768 768 640 384 128 -128 -256 -384 -384 -640 -768 -1024 -1280 -1408 -1408 -1280 -1152 -896 -768 -896 -1024 -1024 -896 -640 -384 -128 -128 -128 -128 0 128 128 0 0 0 0 0 0 0 0 0 0 0 0 +7 +0 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -2 -2 -2 -2 -2 -2 -1 -1 0 1 1 1 2 3 5 4 3 3 5 7 9 9 8 8 9 11 12 11 10 8 6 6 8 7 5 1 -2 -3 -3 -5 -8 -13 -19 -25 -28 -27 -24 -24 -27 -31 -33 -30 -25 -21 -20 -24 -26 -21 -12 -6 -8 -14 -17 -12 -2 7 12 12 12 15 16 13 11 15 26 31 27 26 37 54 57 44 32 36 45 43 31 19 11 4 -1 2 4 -8 -36 -57 -54 -38 -36 -48 -54 -48 -42 -55 -81 -92 -76 -39 -9 -8 -31 -45 -26 10 27 11 -8 9 51 79 73 52 42 40 34 24 21 25 22 2 -26 -48 -53 -40 -15 6 8 -13 -40 -54 -57 -58 -60 -60 -57 -53 -54 -58 -64 -68 -60 -37 -11 4 -4 -25 -40 -37 -14 17 41 46 39 38 49 61 64 60 58 63 65 59 61 79 105 120 118 106 94 82 68 52 40 38 48 58 61 53 36 15 -5 -19 -28 -39 -58 -86 -119 -153 -181 -200 -197 -174 -150 -148 -175 -214 -236 -219 -160 -87 -42 -39 -46 -25 34 88 101 79 60 69 109 154 184 192 180 174 191 225 246 240 213 184 169 170 184 198 189 152 107 79 70 58 35 9 -9 -27 -52 -73 -74 -61 -47 -50 -61 -66 -61 -53 -55 -69 -81 -76 -51 -35 -43 -68 -80 -67 -40 -17 -2 6 2 -19 -43 -46 -17 22 41 28 6 3 20 37 32 16 5 6 6 -7 -31 -51 -61 -63 -65 -70 -78 -84 -85 -79 -68 -61 -55 -46 -32 -22 -19 -21 -20 -13 -3 6 14 16 15 11 10 13 16 18 18 18 17 15 15 18 19 13 5 3 8 16 18 16 14 13 14 14 14 15 17 17 15 14 14 14 14 12 8 4 2 1 -1 -3 -5 -6 -6 -7 -10 -11 -10 -8 -6 -6 -7 -6 -5 -5 -5 -5 -4 -3 -3 -3 -2 -2 -1 -1 -1 -1 -1 -1 -1 0 0 0 0 0 0 0 0 +0 -128 -128 -128 -128 -128 -128 -128 -128 -128 -128 -256 -256 -256 -256 -256 -256 -128 -128 0 128 128 128 256 384 640 512 384 384 640 896 1152 1152 1024 1024 1152 1408 1536 1408 1280 1024 768 768 1024 896 640 128 -256 -384 -384 -640 -1024 -1664 -2432 -3200 -3584 -3456 -3072 -3072 -3456 -3968 -4224 -3840 -3200 -2688 -2560 -3072 -3328 -2688 -1536 -768 -1024 -1792 -2176 -1536 -256 896 1536 1536 1536 1920 2048 1664 1408 1920 3328 3968 3456 3328 4736 6912 7296 5632 4096 4608 5760 5504 3968 2432 1408 512 -128 256 512 -1024 -4608 -7296 -6912 -4864 -4608 -6144 -6912 -6144 -5376 -7040 -10368 -11776 -9728 -4992 -1152 -1024 -3968 -5760 -3328 1280 3456 1408 -1024 1152 6528 10112 9344 6656 5376 5120 4352 3072 2688 3200 2816 256 -3328 -6144 -6784 -5120 -1920 768 1024 -1664 -5120 -6912 -7296 -7424 -7680 -7680 -7296 -6784 -6912 -7424 -8192 -8704 -7680 -4736 -1408 512 -512 -3200 -5120 -4736 -1792 2176 5248 5888 4992 4864 6272 7808 8192 7680 7424 8064 8320 7552 7808 10112 13440 15360 15104 13568 12032 10496 8704 6656 5120 4864 6144 7424 7808 6784 4608 1920 -640 -2432 -3584 -4992 -7424 -11008 -15232 -19584 -23168 -25600 -25216 -22272 -19200 -18944 -22400 -27392 -30208 -28032 -20480 -11136 -5376 -4992 -5888 -3200 4352 11264 12928 10112 7680 8832 13952 19712 23552 24576 23040 22272 24448 28800 31488 30720 27264 23552 21632 21760 23552 25344 24192 19456 13696 10112 8960 7424 4480 1152 -1152 -3456 -6656 -9344 -9472 -7808 -6016 -6400 -7808 -8448 -7808 -6784 -7040 -8832 -10368 -9728 -6528 -4480 -5504 -8704 -10240 -8576 -5120 -2176 -256 768 256 -2432 -5504 -5888 -2176 2816 5248 3584 768 384 2560 4736 4096 2048 640 768 768 -896 -3968 -6528 -7808 -8064 -8320 -8960 -9984 -10752 -10880 -10112 -8704 -7808 -7040 -5888 -4096 -2816 -2432 -2688 -2560 -1664 -384 768 1792 2048 1920 1408 1280 1664 2048 2304 2304 2304 2176 1920 1920 2304 2432 1664 640 384 1024 2048 2304 2048 1792 1664 1792 1792 1792 1920 2176 2176 1920 1792 1792 1792 1792 1536 1024 512 256 128 -128 -384 -640 -768 -768 -896 -1280 -1408 -1280 -1024 -768 -768 -896 -768 -640 -640 -640 -640 -512 -384 -384 -384 -256 -256 -128 -128 -128 -128 -128 -128 -128 0 0 0 0 0 0 0 0 +7 +0 -1 -1 -1 0 -1 -1 -1 -1 -1 0 0 0 0 0 0 1 1 1 1 1 1 1 2 2 4 5 5 5 5 4 4 3 2 2 3 4 5 4 3 1 -1 -3 -4 -5 -8 -11 -16 -22 -27 -30 -31 -29 -26 -26 -32 -40 -46 -44 -34 -19 -10 -9 -11 -6 8 22 27 22 17 20 32 47 58 63 60 60 68 82 92 93 84 75 71 73 81 90 88 72 52 39 35 30 19 5 -5 -16 -31 -44 -46 -39 -31 -34 -42 -47 -44 -39 -41 -53 -64 -61 -42 -29 -37 -60 -72 -61 -37 -17 -2 6 2 -20 -47 -51 -19 25 48 34 8 4 27 49 44 23 7 9 10 -11 -48 -82 -99 -106 -111 -123 -140 -156 -161 -153 -136 -125 -114 -98 -71 -50 -44 -50 -49 -33 -7 18 38 46 43 35 33 43 54 60 64 66 63 58 61 74 79 57 24 13 39 77 93 85 74 72 80 86 92 102 114 118 114 107 109 117 121 107 76 45 29 18 -2 -35 -61 -67 -70 -99 -144 -174 -163 -133 -119 -128 -142 -143 -135 -135 -146 -147 -134 -118 -112 -114 -109 -87 -55 -31 -26 -36 -41 -30 -7 11 18 29 56 87 92 61 25 19 47 81 97 100 107 118 124 114 104 105 117 127 118 91 55 26 10 8 19 35 39 22 -14 -46 -54 -37 -14 -3 -7 -12 -11 -10 -19 -39 -58 -59 -45 -31 -28 -34 -38 -35 -33 -31 -28 -22 -19 -22 -26 -27 -23 -19 -19 -26 -40 -59 -72 -70 -57 -49 -55 -64 -63 -52 -45 -47 -48 -42 -29 -13 4 19 25 22 22 33 50 58 56 56 64 68 57 38 27 27 31 31 29 29 26 19 15 18 24 27 25 23 19 12 7 8 14 13 3 -3 1 9 7 -4 -11 -5 2 0 -8 -11 -7 -4 -5 -8 -6 -3 -4 -7 -7 -4 -2 -3 -5 -5 -4 -3 -3 -3 -2 -1 0 0 -1 0 0 1 0 0 0 0 0 0 0 0 0 0 0 0 -1 -1 -1 -1 0 +0 -128 -128 -128 0 -128 -128 -128 -128 -128 0 0 0 0 0 0 128 128 128 128 128 128 128 256 256 512 640 640 640 640 512 512 384 256 256 384 512 640 512 384 128 -128 -384 -512 -640 -1024 -1408 -2048 -2816 -3456 -3840 -3968 -3712 -3328 -3328 -4096 -5120 -5888 -5632 -4352 -2432 -1280 -1152 -1408 -768 1024 2816 3456 2816 2176 2560 4096 6016 7424 8064 7680 7680 8704 10496 11776 11904 10752 9600 9088 9344 10368 11520 11264 9216 6656 4992 4480 3840 2432 640 -640 -2048 -3968 -5632 -5888 -4992 -3968 -4352 -5376 -6016 -5632 -4992 -5248 -6784 -8192 -7808 -5376 -3712 -4736 -7680 -9216 -7808 -4736 -2176 -256 768 256 -2560 -6016 -6528 -2432 3200 6144 4352 1024 512 3456 6272 5632 2944 896 1152 1280 -1408 -6144 -10496 -12672 -13568 -14208 -15744 -17920 -19968 -20608 -19584 -17408 -16000 -14592 -12544 -9088 -6400 -5632 -6400 -6272 -4224 -896 2304 4864 5888 5504 4480 4224 5504 6912 7680 8192 8448 8064 7424 7808 9472 10112 7296 3072 1664 4992 9856 11904 10880 9472 9216 10240 11008 11776 13056 14592 15104 14592 13696 13952 14976 15488 13696 9728 5760 3712 2304 -256 -4480 -7808 -8576 -8960 -12672 -18432 -22272 -20864 -17024 -15232 -16384 -18176 -18304 -17280 -17280 -18688 -18816 -17152 -15104 -14336 -14592 -13952 -11136 -7040 -3968 -3328 -4608 -5248 -3840 -896 1408 2304 3712 7168 11136 11776 7808 3200 2432 6016 10368 12416 12800 13696 15104 15872 14592 13312 13440 14976 16256 15104 11648 7040 3328 1280 1024 2432 4480 4992 2816 -1792 -5888 -6912 -4736 -1792 -384 -896 -1536 -1408 -1280 -2432 -4992 -7424 -7552 -5760 -3968 -3584 -4352 -4864 -4480 -4224 -3968 -3584 -2816 -2432 -2816 -3328 -3456 -2944 -2432 -2432 -3328 -5120 -7552 -9216 -8960 -7296 -6272 -7040 -8192 -8064 -6656 -5760 -6016 -6144 -5376 -3712 -1664 512 2432 3200 2816 2816 4224 6400 7424 7168 7168 8192 8704 7296 4864 3456 3456 3968 3968 3712 3712 3328 2432 1920 2304 3072 3456 3200 2944 2432 1536 896 1024 1792 1664 384 -384 128 1152 896 -512 -1408 -640 256 0 -1024 -1408 -896 -512 -640 -1024 -768 -384 -512 -896 -896 -512 -256 -384 -640 -640 -512 -384 -384 -384 -256 -128 0 0 -128 0 0 128 0 0 0 0 0 0 0 0 0 0 0 0 -128 -128 -128 -128 0 +7 +0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 1 0 0 0 0 2 2 2 2 2 3 3 4 5 6 6 6 6 7 8 9 8 6 4 2 1 -1 -4 -7 -8 -9 -13 -20 -25 -24 -20 -19 -21 -24 -25 -25 -26 -29 -30 -28 -26 -25 -26 -26 -22 -14 -8 -7 -10 -12 -9 -2 3 5 9 19 30 33 22 9 7 18 33 40 43 47 53 57 54 50 52 60 67 63 50 31 15 6 5 12 22 26 15 -9 -32 -39 -28 -11 -2 -6 -10 -9 -8 -17 -35 -52 -55 -42 -30 -28 -34 -39 -38 -36 -34 -32 -25 -22 -27 -32 -34 -29 -25 -25 -36 -57 -86 -108 -106 -89 -78 -90 -108 -109 -92 -81 -86 -90 -81 -57 -25 8 41 56 50 51 79 123 147 146 148 174 191 165 114 82 86 101 103 101 103 96 74 59 72 101 116 113 103 88 60 36 45 75 74 22 -17 9 60 48 -27 -74 -35 20 6 -61 -92 -61 -32 -50 -76 -62 -33 -42 -83 -95 -59 -25 -36 -71 -86 -73 -62 -68 -67 -43 -8 6 0 -5 7 34 53 41 14 8 35 64 58 30 19 46 70 52 4 -29 -33 -36 -62 -80 -57 0 37 20 -36 -87 -108 -105 -97 -86 -69 -47 -36 -45 -64 -68 -48 -21 -10 -16 -10 15 45 49 26 2 -6 1 8 8 7 8 14 19 16 4 -7 -4 17 39 42 29 20 24 32 33 25 19 18 15 12 14 22 27 16 3 -2 4 7 3 0 8 20 20 8 -1 4 14 18 14 7 4 4 2 1 -2 -8 -15 -20 -19 -15 -14 -17 -19 -18 -17 -18 -21 -24 -24 -22 -21 -20 -19 -15 -11 -11 -12 -12 -10 -8 -8 -9 -9 -9 -11 -13 -9 -1 4 3 1 3 7 7 4 2 5 10 12 9 7 8 9 9 7 5 4 3 3 2 1 0 -1 -2 -1 0 0 0 -1 -2 -2 -2 -2 -2 -2 -2 -1 -1 -1 -1 -1 -1 -1 -1 -1 0 0 0 0 +0 0 0 0 0 0 0 0 0 0 0 0 0 0 128 128 0 0 0 0 256 256 256 256 256 384 384 512 640 768 768 768 768 896 1024 1152 1024 768 512 256 128 -128 -512 -896 -1024 -1152 -1664 -2560 -3200 -3072 -2560 -2432 -2688 -3072 -3200 -3200 -3328 -3712 -3840 -3584 -3328 -3200 -3328 -3328 -2816 -1792 -1024 -896 -1280 -1536 -1152 -256 384 640 1152 2432 3840 4224 2816 1152 896 2304 4224 5120 5504 6016 6784 7296 6912 6400 6656 7680 8576 8064 6400 3968 1920 768 640 1536 2816 3328 1920 -1152 -4096 -4992 -3584 -1408 -256 -768 -1280 -1152 -1024 -2176 -4480 -6656 -7040 -5376 -3840 -3584 -4352 -4992 -4864 -4608 -4352 -4096 -3200 -2816 -3456 -4096 -4352 -3712 -3200 -3200 -4608 -7296 -11008 -13824 -13568 -11392 -9984 -11520 -13824 -13952 -11776 -10368 -11008 -11520 -10368 -7296 -3200 1024 5248 7168 6400 6528 10112 15744 18816 18688 18944 22272 24448 21120 14592 10496 11008 12928 13184 12928 13184 12288 9472 7552 9216 12928 14848 14464 13184 11264 7680 4608 5760 9600 9472 2816 -2176 1152 7680 6144 -3456 -9472 -4480 2560 768 -7808 -11776 -7808 -4096 -6400 -9728 -7936 -4224 -5376 -10624 -12160 -7552 -3200 -4608 -9088 -11008 -9344 -7936 -8704 -8576 -5504 -1024 768 0 -640 896 4352 6784 5248 1792 1024 4480 8192 7424 3840 2432 5888 8960 6656 512 -3712 -4224 -4608 -7936 -10240 -7296 0 4736 2560 -4608 -11136 -13824 -13440 -12416 -11008 -8832 -6016 -4608 -5760 -8192 -8704 -6144 -2688 -1280 -2048 -1280 1920 5760 6272 3328 256 -768 128 1024 1024 896 1024 1792 2432 2048 512 -896 -512 2176 4992 5376 3712 2560 3072 4096 4224 3200 2432 2304 1920 1536 1792 2816 3456 2048 384 -256 512 896 384 0 1024 2560 2560 1024 -128 512 1792 2304 1792 896 512 512 256 128 -256 -1024 -1920 -2560 -2432 -1920 -1792 -2176 -2432 -2304 -2176 -2304 -2688 -3072 -3072 -2816 -2688 -2560 -2432 -1920 -1408 -1408 -1536 -1536 -1280 -1024 -1024 -1152 -1152 -1152 -1408 -1664 -1152 -128 512 384 128 384 896 896 512 256 640 1280 1536 1152 896 1024 1152 1152 896 640 512 384 384 256 128 0 -128 -256 -128 0 0 0 -128 -256 -256 -256 -256 -256 -256 -256 -128 -128 -128 -128 -128 -128 -128 -128 -128 0 0 0 0 +7 +0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 2 2 2 2 1 1 1 2 2 0 -1 0 3 2 -2 -5 -3 1 0 -5 -8 -6 -3 -5 -8 -7 -4 -5 -10 -12 -8 -4 -5 -11 -13 -12 -10 -12 -12 -8 -2 1 0 -2 1 7 12 9 3 2 9 17 16 8 5 13 21 16 1 -10 -12 -13 -23 -31 -22 0 15 8 -16 -38 -49 -49 -46 -42 -35 -24 -19 -24 -36 -39 -28 -13 -6 -10 -7 10 30 33 18 1 -4 1 6 6 5 7 12 16 14 4 -7 -4 17 39 42 30 21 26 36 37 29 22 22 19 16 19 29 36 23 4 -3 6 11 4 0 13 34 35 15 -1 7 27 36 28 16 9 8 5 2 -4 -18 -37 -49 -47 -39 -38 -46 -53 -52 -51 -55 -68 -78 -82 -77 -74 -74 -70 -57 -45 -43 -52 -54 -45 -35 -38 -43 -44 -48 -63 -75 -54 -5 30 26 9 22 56 64 39 21 52 103 124 105 89 103 123 120 97 76 67 64 60 55 38 9 -22 -35 -24 0 11 3 -20 -48 -72 -86 -91 -92 -91 -91 -96 -102 -103 -92 -74 -59 -52 -42 -24 0 19 31 45 61 78 93 110 125 122 97 64 42 43 55 59 50 36 31 33 32 23 16 19 23 8 -22 -37 -17 15 15 -19 -47 -36 -6 -3 -38 -70 -67 -42 -32 -40 -47 -35 -19 -13 -16 -10 6 17 13 4 12 37 64 81 80 71 62 58 56 50 42 38 38 39 34 25 20 21 20 12 2 -3 -1 -2 -9 -16 -16 -11 -7 -7 -8 -9 -8 -5 1 6 5 0 -5 -4 -1 -1 -3 -4 -1 -1 -7 -17 -26 -32 -34 -33 -29 -28 -31 -35 -38 -38 -34 -29 -23 -20 -21 -22 -21 -19 -17 -17 -16 -13 -9 -6 -5 -4 -3 -2 -2 -2 -2 -3 -3 0 3 4 4 4 4 5 5 4 4 4 4 4 3 2 2 2 1 1 1 1 1 0 0 0 0 0 0 0 0 0 +0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 256 512 512 512 512 256 256 256 512 512 0 -256 0 768 512 -512 -1280 -768 256 0 -1280 -2048 -1536 -768 -1280 -2048 -1792 -1024 -1280 -2560 -3072 -2048 -1024 -1280 -2816 -3328 -3072 -2560 -3072 -3072 -2048 -512 256 0 -512 256 1792 3072 2304 768 512 2304 4352 4096 2048 1280 3328 5376 4096 256 -2560 -3072 -3328 -5888 -7936 -5632 0 3840 2048 -4096 -9728 -12544 -12544 -11776 -10752 -8960 -6144 -4864 -6144 -9216 -9984 -7168 -3328 -1536 -2560 -1792 2560 7680 8448 4608 256 -1024 256 1536 1536 1280 1792 3072 4096 3584 1024 -1792 -1024 4352 9984 10752 7680 5376 6656 9216 9472 7424 5632 5632 4864 4096 4864 7424 9216 5888 1024 -768 1536 2816 1024 0 3328 8704 8960 3840 -256 1792 6912 9216 7168 4096 2304 2048 1280 512 -1024 -4608 -9472 -12544 -12032 -9984 -9728 -11776 -13568 -13312 -13056 -14080 -17408 -19968 -20992 -19712 -18944 -18944 -17920 -14592 -11520 -11008 -13312 -13824 -11520 -8960 -9728 -11008 -11264 -12288 -16128 -19200 -13824 -1280 7680 6656 2304 5632 14336 16384 9984 5376 13312 26368 31744 26880 22784 26368 31488 30720 24832 19456 17152 16384 15360 14080 9728 2304 -5632 -8960 -6144 0 2816 768 -5120 -12288 -18432 -22016 -23296 -23552 -23296 -23296 -24576 -26112 -26368 -23552 -18944 -15104 -13312 -10752 -6144 0 4864 7936 11520 15616 19968 23808 28160 32000 31232 24832 16384 10752 11008 14080 15104 12800 9216 7936 8448 8192 5888 4096 4864 5888 2048 -5632 -9472 -4352 3840 3840 -4864 -12032 -9216 -1536 -768 -9728 -17920 -17152 -10752 -8192 -10240 -12032 -8960 -4864 -3328 -4096 -2560 1536 4352 3328 1024 3072 9472 16384 20736 20480 18176 15872 14848 14336 12800 10752 9728 9728 9984 8704 6400 5120 5376 5120 3072 512 -768 -256 -512 -2304 -4096 -4096 -2816 -1792 -1792 -2048 -2304 -2048 -1280 256 1536 1280 0 -1280 -1024 -256 -256 -768 -1024 -256 -256 -1792 -4352 -6656 -8192 -8704 -8448 -7424 -7168 -7936 -8960 -9728 -9728 -8704 -7424 -5888 -5120 -5376 -5632 -5376 -4864 -4352 -4352 -4096 -3328 -2304 -1536 -1280 -1024 -768 -512 -512 -512 -512 -768 -768 0 768 1024 1024 1024 1024 1280 1280 1024 1024 1024 1024 1024 768 512 512 512 256 256 256 256 256 0 0 0 0 0 0 0 0 0 +8 +0 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -2 -2 -1 -2 -2 -2 -2 -3 -4 -3 -1 1 1 0 1 3 4 3 1 4 9 11 10 9 11 13 14 11 9 9 8 8 8 6 1 -4 -7 -5 0 2 0 -5 -11 -16 -20 -22 -23 -23 -24 -26 -29 -30 -27 -23 -19 -17 -14 -8 0 6 11 17 23 31 38 46 53 54 44 29 20 21 27 30 26 19 17 19 18 13 9 12 15 5 -15 -25 -12 11 11 -14 -36 -28 -5 -3 -32 -60 -59 -38 -29 -38 -46 -35 -19 -14 -17 -11 7 19 15 5 15 46 82 105 107 96 87 84 82 76 65 60 62 65 58 43 36 38 38 24 4 -5 -1 -4 -18 -35 -36 -24 -16 -17 -20 -21 -20 -11 4 18 16 0 -14 -12 -2 -3 -11 -12 -2 -3 -25 -66 -108 -139 -153 -150 -138 -139 -157 -185 -208 -214 -202 -175 -143 -130 -140 -156 -155 -143 -138 -143 -139 -112 -81 -60 -53 -43 -30 -21 -17 -16 -24 -37 -34 4 61 95 97 100 123 148 148 137 150 186 202 188 168 171 185 180 158 154 176 195 192 176 165 150 110 65 51 78 108 99 54 14 6 15 15 0 -17 -29 -37 -42 -44 -40 -28 -12 1 2 -10 -27 -39 -48 -67 -87 -94 -88 -89 -108 -129 -127 -109 -103 -113 -116 -93 -62 -57 -78 -97 -93 -72 -51 -39 -37 -41 -43 -32 -4 25 36 28 19 22 37 53 63 70 79 84 82 72 60 54 50 40 29 26 31 31 17 -4 -15 -11 0 5 4 0 -6 -17 -32 -44 -49 -48 -42 -35 -35 -43 -51 -52 -48 -44 -43 -40 -31 -21 -14 -10 -5 0 2 2 4 9 13 16 18 22 25 26 25 22 16 9 6 9 13 12 8 6 8 11 12 10 7 5 4 3 2 1 0 0 -1 -1 -1 -1 -2 -1 -1 -1 -1 -1 0 0 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 +0 -128 -128 -128 -128 -128 -128 -128 -128 -128 -128 -128 -128 -128 -128 -128 -128 -128 -256 -256 -128 -256 -256 -256 -256 -384 -512 -384 -128 128 128 0 128 384 512 384 128 512 1152 1408 1280 1152 1408 1664 1792 1408 1152 1152 1024 1024 1024 768 128 -512 -896 -640 0 256 0 -640 -1408 -2048 -2560 -2816 -2944 -2944 -3072 -3328 -3712 -3840 -3456 -2944 -2432 -2176 -1792 -1024 0 768 1408 2176 2944 3968 4864 5888 6784 6912 5632 3712 2560 2688 3456 3840 3328 2432 2176 2432 2304 1664 1152 1536 1920 640 -1920 -3200 -1536 1408 1408 -1792 -4608 -3584 -640 -384 -4096 -7680 -7552 -4864 -3712 -4864 -5888 -4480 -2432 -1792 -2176 -1408 896 2432 1920 640 1920 5888 10496 13440 13696 12288 11136 10752 10496 9728 8320 7680 7936 8320 7424 5504 4608 4864 4864 3072 512 -640 -128 -512 -2304 -4480 -4608 -3072 -2048 -2176 -2560 -2688 -2560 -1408 512 2304 2048 0 -1792 -1536 -256 -384 -1408 -1536 -256 -384 -3200 -8448 -13824 -17792 -19584 -19200 -17664 -17792 -20096 -23680 -26624 -27392 -25856 -22400 -18304 -16640 -17920 -19968 -19840 -18304 -17664 -18304 -17792 -14336 -10368 -7680 -6784 -5504 -3840 -2688 -2176 -2048 -3072 -4736 -4352 512 7808 12160 12416 12800 15744 18944 18944 17536 19200 23808 25856 24064 21504 21888 23680 23040 20224 19712 22528 24960 24576 22528 21120 19200 14080 8320 6528 9984 13824 12672 6912 1792 768 1920 1920 0 -2176 -3712 -4736 -5376 -5632 -5120 -3584 -1536 128 256 -1280 -3456 -4992 -6144 -8576 -11136 -12032 -11264 -11392 -13824 -16512 -16256 -13952 -13184 -14464 -14848 -11904 -7936 -7296 -9984 -12416 -11904 -9216 -6528 -4992 -4736 -5248 -5504 -4096 -512 3200 4608 3584 2432 2816 4736 6784 8064 8960 10112 10752 10496 9216 7680 6912 6400 5120 3712 3328 3968 3968 2176 -512 -1920 -1408 0 640 512 0 -768 -2176 -4096 -5632 -6272 -6144 -5376 -4480 -4480 -5504 -6528 -6656 -6144 -5632 -5504 -5120 -3968 -2688 -1792 -1280 -640 0 256 256 512 1152 1664 2048 2304 2816 3200 3328 3200 2816 2048 1152 768 1152 1664 1536 1024 768 1024 1408 1536 1280 896 640 512 384 256 128 0 0 -128 -128 -128 -128 -256 -128 -128 -128 -128 -128 0 0 128 128 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 +7 +0 -1 0 0 0 0 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -2 -3 -4 -4 -4 -4 -5 -7 -8 -9 -9 -9 -8 -7 -8 -10 -10 -10 -10 -11 -12 -10 -8 -6 -6 -5 -4 -3 -2 -2 -4 -5 -5 0 9 14 15 16 21 26 27 26 30 38 43 41 38 40 45 45 41 41 48 55 56 53 51 47 36 22 17 28 39 37 20 5 2 6 6 0 -8 -14 -18 -21 -22 -21 -15 -7 0 1 -6 -16 -24 -30 -43 -57 -63 -60 -63 -78 -95 -96 -84 -81 -91 -95 -78 -53 -50 -71 -90 -88 -69 -50 -39 -38 -43 -47 -35 -4 29 43 35 23 29 49 71 86 98 113 124 123 110 94 86 82 68 50 46 57 58 32 -8 -30 -23 0 12 10 1 -13 -40 -77 -109 -127 -127 -113 -98 -101 -126 -156 -163 -153 -143 -145 -139 -112 -78 -53 -38 -19 2 11 12 20 43 66 83 99 123 145 157 159 144 110 66 47 70 103 105 71 54 74 115 128 108 79 64 55 45 34 23 12 1 -5 -7 -11 -21 -26 -22 -15 -17 -25 -20 3 36 60 62 53 46 49 57 52 27 4 1 15 25 20 16 24 36 30 3 -24 -32 -23 -15 -13 -19 -29 -48 -70 -92 -108 -117 -114 -100 -79 -64 -59 -51 -40 -32 -35 -44 -43 -26 -8 0 3 12 29 46 56 62 65 63 56 56 67 73 61 39 27 35 45 40 24 16 24 33 29 15 3 -4 -10 -22 -31 -33 -35 -45 -60 -64 -55 -43 -42 -48 -50 -42 -30 -24 -23 -26 -30 -39 -51 -56 -50 -37 -26 -20 -19 -19 -15 -6 5 12 13 14 20 26 28 24 21 23 25 21 11 2 -1 -2 -2 -5 -8 -11 -10 -7 -2 0 -3 -7 -7 -3 -1 -2 -4 -4 1 5 7 6 5 5 4 3 3 4 5 5 4 4 6 8 9 7 5 4 5 6 6 4 4 3 3 2 1 0 0 0 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 0 +0 -128 0 0 0 0 -128 -128 -128 -128 -128 -128 -128 -128 -128 -128 -256 -384 -512 -512 -512 -512 -640 -896 -1024 -1152 -1152 -1152 -1024 -896 -1024 -1280 -1280 -1280 -1280 -1408 -1536 -1280 -1024 -768 -768 -640 -512 -384 -256 -256 -512 -640 -640 0 1152 1792 1920 2048 2688 3328 3456 3328 3840 4864 5504 5248 4864 5120 5760 5760 5248 5248 6144 7040 7168 6784 6528 6016 4608 2816 2176 3584 4992 4736 2560 640 256 768 768 0 -1024 -1792 -2304 -2688 -2816 -2688 -1920 -896 0 128 -768 -2048 -3072 -3840 -5504 -7296 -8064 -7680 -8064 -9984 -12160 -12288 -10752 -10368 -11648 -12160 -9984 -6784 -6400 -9088 -11520 -11264 -8832 -6400 -4992 -4864 -5504 -6016 -4480 -512 3712 5504 4480 2944 3712 6272 9088 11008 12544 14464 15872 15744 14080 12032 11008 10496 8704 6400 5888 7296 7424 4096 -1024 -3840 -2944 0 1536 1280 128 -1664 -5120 -9856 -13952 -16256 -16256 -14464 -12544 -12928 -16128 -19968 -20864 -19584 -18304 -18560 -17792 -14336 -9984 -6784 -4864 -2432 256 1408 1536 2560 5504 8448 10624 12672 15744 18560 20096 20352 18432 14080 8448 6016 8960 13184 13440 9088 6912 9472 14720 16384 13824 10112 8192 7040 5760 4352 2944 1536 128 -640 -896 -1408 -2688 -3328 -2816 -1920 -2176 -3200 -2560 384 4608 7680 7936 6784 5888 6272 7296 6656 3456 512 128 1920 3200 2560 2048 3072 4608 3840 384 -3072 -4096 -2944 -1920 -1664 -2432 -3712 -6144 -8960 -11776 -13824 -14976 -14592 -12800 -10112 -8192 -7552 -6528 -5120 -4096 -4480 -5632 -5504 -3328 -1024 0 384 1536 3712 5888 7168 7936 8320 8064 7168 7168 8576 9344 7808 4992 3456 4480 5760 5120 3072 2048 3072 4224 3712 1920 384 -512 -1280 -2816 -3968 -4224 -4480 -5760 -7680 -8192 -7040 -5504 -5376 -6144 -6400 -5376 -3840 -3072 -2944 -3328 -3840 -4992 -6528 -7168 -6400 -4736 -3328 -2560 -2432 -2432 -1920 -768 640 1536 1664 1792 2560 3328 3584 3072 2688 2944 3200 2688 1408 256 -128 -256 -256 -640 -1024 -1408 -1280 -896 -256 0 -384 -896 -896 -384 -128 -256 -512 -512 128 640 896 768 640 640 512 384 384 512 640 640 512 512 768 1024 1152 896 640 512 640 768 768 512 512 384 384 256 128 0 0 0 -128 -128 -128 -128 -128 -128 -128 -128 -128 -128 -128 -128 -128 -128 0 +7 +0 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 0 0 0 0 1 1 2 3 4 5 6 7 7 5 3 2 4 7 7 5 4 6 10 11 10 8 6 6 5 4 3 1 0 -1 -2 -2 -4 -5 -4 -3 -4 -5 -4 0 7 13 14 12 11 12 14 14 7 1 0 4 7 6 5 8 12 10 1 -9 -13 -9 -6 -6 -8 -13 -22 -33 -44 -53 -59 -59 -53 -43 -36 -33 -30 -24 -20 -22 -28 -28 -18 -6 0 2 8 21 35 44 50 54 53 48 50 60 67 58 38 27 35 47 42 26 18 27 39 34 18 4 -5 -13 -30 -43 -46 -50 -65 -89 -99 -87 -69 -68 -80 -85 -74 -54 -44 -44 -50 -60 -80 -107 -119 -110 -83 -60 -48 -46 -47 -38 -15 14 33 38 43 61 83 91 80 73 82 93 79 44 10 -4 -6 -9 -21 -38 -49 -49 -32 -6 2 -13 -37 -38 -16 -2 -10 -28 -23 8 46 62 59 52 49 48 42 41 53 68 69 63 70 106 148 160 134 102 104 138 165 162 141 130 129 123 100 69 40 18 0 -14 -32 -63 -102 -123 -116 -101 -112 -141 -150 -122 -90 -93 -132 -164 -165 -155 -159 -173 -175 -163 -157 -161 -152 -120 -80 -56 -46 -33 -10 11 26 46 72 94 103 104 115 128 129 106 80 70 73 70 58 48 50 58 59 51 44 42 36 9 -32 -69 -85 -83 -85 -96 -108 -112 -109 -108 -109 -102 -84 -63 -51 -53 -57 -48 -22 6 23 28 38 64 96 114 111 96 86 84 87 93 99 103 101 94 84 75 66 59 52 47 41 34 24 10 -4 -20 -31 -37 -38 -38 -42 -51 -62 -71 -73 -70 -65 -60 -54 -49 -45 -42 -37 -29 -23 -20 -17 -11 -3 2 2 1 3 6 9 10 12 12 11 9 9 9 9 8 7 7 7 5 3 3 4 4 3 3 1 -1 -3 -3 -2 -2 -2 -1 0 0 0 -1 -1 -1 0 0 0 0 0 0 0 0 0 0 +0 -128 -128 -128 -128 -128 -128 -128 -128 -128 -128 -128 -128 -128 -128 -128 0 0 0 0 128 128 256 384 512 640 768 896 896 640 384 256 512 896 896 640 512 768 1280 1408 1280 1024 768 768 640 512 384 128 0 -128 -256 -256 -512 -640 -512 -384 -512 -640 -512 0 896 1664 1792 1536 1408 1536 1792 1792 896 128 0 512 896 768 640 1024 1536 1280 128 -1152 -1664 -1152 -768 -768 -1024 -1664 -2816 -4224 -5632 -6784 -7552 -7552 -6784 -5504 -4608 -4224 -3840 -3072 -2560 -2816 -3584 -3584 -2304 -768 0 256 1024 2688 4480 5632 6400 6912 6784 6144 6400 7680 8576 7424 4864 3456 4480 6016 5376 3328 2304 3456 4992 4352 2304 512 -640 -1664 -3840 -5504 -5888 -6400 -8320 -11392 -12672 -11136 -8832 -8704 -10240 -10880 -9472 -6912 -5632 -5632 -6400 -7680 -10240 -13696 -15232 -14080 -10624 -7680 -6144 -5888 -6016 -4864 -1920 1792 4224 4864 5504 7808 10624 11648 10240 9344 10496 11904 10112 5632 1280 -512 -768 -1152 -2688 -4864 -6272 -6272 -4096 -768 256 -1664 -4736 -4864 -2048 -256 -1280 -3584 -2944 1024 5888 7936 7552 6656 6272 6144 5376 5248 6784 8704 8832 8064 8960 13568 18944 20480 17152 13056 13312 17664 21120 20736 18048 16640 16512 15744 12800 8832 5120 2304 0 -1792 -4096 -8064 -13056 -15744 -14848 -12928 -14336 -18048 -19200 -15616 -11520 -11904 -16896 -20992 -21120 -19840 -20352 -22144 -22400 -20864 -20096 -20608 -19456 -15360 -10240 -7168 -5888 -4224 -1280 1408 3328 5888 9216 12032 13184 13312 14720 16384 16512 13568 10240 8960 9344 8960 7424 6144 6400 7424 7552 6528 5632 5376 4608 1152 -4096 -8832 -10880 -10624 -10880 -12288 -13824 -14336 -13952 -13824 -13952 -13056 -10752 -8064 -6528 -6784 -7296 -6144 -2816 768 2944 3584 4864 8192 12288 14592 14208 12288 11008 10752 11136 11904 12672 13184 12928 12032 10752 9600 8448 7552 6656 6016 5248 4352 3072 1280 -512 -2560 -3968 -4736 -4864 -4864 -5376 -6528 -7936 -9088 -9344 -8960 -8320 -7680 -6912 -6272 -5760 -5376 -4736 -3712 -2944 -2560 -2176 -1408 -384 256 256 128 384 768 1152 1280 1536 1536 1408 1152 1152 1152 1152 1024 896 896 896 640 384 384 512 512 384 384 128 -128 -384 -384 -256 -256 -256 -128 0 0 0 -128 -128 -128 0 0 0 0 0 0 0 0 0 0 +7 +0 0 1 2 3 6 10 12 14 15 15 15 16 18 22 28 35 39 40 36 29 19 10 3 -3 -13 -30 -50 -64 -69 -73 -87 -113 -143 -167 -185 -191 -175 -135 -95 -83 -98 -101 -65 -17 -14 -65 -111 -90 -8 66 79 41 8 13 43 66 73 84 100 88 28 -32 -7 121 249 257 138 1 -74 -120 -219 -370 -469 -426 -266 -110 -66 -127 -197 -205 -184 -226 -339 -411 -334 -134 50 124 109 85 97 127 118 11 -198 -436 -608 -707 -789 -856 -822 -687 -622 -800 -1147 -1384 -1362 -1226 -1214 -1389 -1620 -1770 -1797 -1710 -1527 -1301 -1105 -958 -810 -623 -419 -244 -91 108 418 823 1224 1528 1772 2115 2635 3169 3460 3427 3251 3138 3143 3225 3373 3548 3612 3462 3185 2977 2881 2755 2497 2171 1861 1539 1153 770 506 351 208 58 -38 -86 -210 -525 -999 -1494 -1855 -2006 -1999 -1999 -2124 -2288 -2309 -2172 -2072 -2139 -2203 -2000 -1536 -1090 -885 -866 -865 -814 -771 -780 -808 -764 -532 -38 601 1084 1169 897 541 333 315 416 577 739 835 833 761 639 447 195 -30 -137 -130 -97 -75 -35 37 114 147 101 -56 -275 -389 -240 95 302 177 -128 -303 -266 -214 -321 -519 -662 -721 -735 -686 -549 -442 -549 -887 -1307 -1678 -1980 -2183 -2198 -2045 -1944 -2085 -2398 -2649 -2755 -2847 -3021 -3173 -3131 -2888 -2601 -2409 -2321 -2234 -2012 -1618 -1175 -859 -658 -336 282 1049 1654 2008 2322 2763 3216 3455 3478 3502 3682 3952 4141 4189 4141 4068 3987 3870 3679 3418 3144 2907 2681 2382 1965 1472 986 555 199 -69 -266 -435 -601 -758 -917 -1130 -1417 -1701 -1890 -1998 -2105 -2206 -2175 -1946 -1645 -1475 -1511 -1642 -1707 -1624 -1398 -1096 -806 -585 -416 -233 -10 213 386 514 628 732 809 858 903 952 998 1040 1079 1095 1042 923 799 723 682 625 538 458 402 336 224 90 -11 -54 -69 -92 -125 -149 -160 -173 -198 -228 -259 -290 -316 -322 -305 -274 -249 -237 -231 -230 -236 -245 -247 -233 -214 -198 -192 -189 -189 -191 -191 -186 -172 -154 -136 -122 -110 -100 -93 -86 -77 -66 -56 -46 -35 -23 -14 -6 -1 2 3 3 3 3 2 2 1 0 +0 0 4 8 12 24 40 48 56 60 60 60 64 72 88 112 140 156 160 144 116 76 40 12 -12 -52 -120 -200 -256 -276 -292 -348 -452 -572 -668 -740 -764 -700 -540 -380 -332 -392 -404 -260 -68 -56 -260 -444 -360 -32 264 316 164 32 52 172 264 292 336 400 352 112 -128 -28 484 996 1028 552 4 -296 -480 -876 -1480 -1876 -1704 -1064 -440 -264 -508 -788 -820 -736 -904 -1356 -1644 -1336 -536 200 496 436 340 388 508 472 44 -792 -1744 -2432 -2828 -3156 -3424 -3288 -2748 -2488 -3200 -4588 -5536 -5448 -4904 -4856 -5556 -6480 -7080 -7188 -6840 -6108 -5204 -4420 -3832 -3240 -2492 -1676 -976 -364 432 1672 3292 4896 6112 7088 8460 10540 12676 13840 13708 13004 12552 12572 12900 13492 14192 14448 13848 12740 11908 11524 11020 9988 8684 7444 6156 4612 3080 2024 1404 832 232 -152 -344 -840 -2100 -3996 -5976 -7420 -8024 -7996 -7996 -8496 -9152 -9236 -8688 -8288 -8556 -8812 -8000 -6144 -4360 -3540 -3464 -3460 -3256 -3084 -3120 -3232 -3056 -2128 -152 2404 4336 4676 3588 2164 1332 1260 1664 2308 2956 3340 3332 3044 2556 1788 780 -120 -548 -520 -388 -300 -140 148 456 588 404 -224 -1100 -1556 -960 380 1208 708 -512 -1212 -1064 -856 -1284 -2076 -2648 -2884 -2940 -2744 -2196 -1768 -2196 -3548 -5228 -6712 -7920 -8732 -8792 -8180 -7776 -8340 -9592 -10596 -11020 -11388 -12084 -12692 -12524 -11552 -10404 -9636 -9284 -8936 -8048 -6472 -4700 -3436 -2632 -1344 1128 4196 6616 8032 9288 11052 12864 13820 13912 14008 14728 15808 16564 16756 16564 16272 15948 15480 14716 13672 12576 11628 10724 9528 7860 5888 3944 2220 796 -276 -1064 -1740 -2404 -3032 -3668 -4520 -5668 -6804 -7560 -7992 -8420 -8824 -8700 -7784 -6580 -5900 -6044 -6568 -6828 -6496 -5592 -4384 -3224 -2340 -1664 -932 -40 852 1544 2056 2512 2928 3236 3432 3612 3808 3992 4160 4316 4380 4168 3692 3196 2892 2728 2500 2152 1832 1608 1344 896 360 -44 -216 -276 -368 -500 -596 -640 -692 -792 -912 -1036 -1160 -1264 -1288 -1220 -1096 -996 -948 -924 -920 -944 -980 -988 -932 -856 -792 -768 -756 -756 -764 -764 -744 -688 -616 -544 -488 -440 -400 -372 -344 -308 -264 -224 -184 -140 -92 -56 -24 -4 8 12 12 12 12 8 8 4 0 +2 +0 -1 -1 -2 -3 -5 -6 -8 -11 -14 -16 -18 -22 -26 -27 -24 -19 -18 -19 -21 -22 -23 -25 -28 -29 -22 -2 27 53 62 50 32 21 21 29 43 59 70 73 70 62 45 21 -4 -17 -16 -13 -10 -5 5 17 23 16 -10 -48 -71 -45 18 60 36 -28 -68 -61 -51 -79 -131 -172 -193 -203 -196 -161 -134 -171 -284 -430 -567 -688 -780 -806 -771 -753 -829 -979 -1110 -1184 -1256 -1367 -1472 -1490 -1410 -1302 -1236 -1220 -1204 -1111 -915 -681 -510 -400 -209 179 684 1105 1373 1626 1981 2359 2594 2672 2753 2963 3253 3490 3610 3651 3670 3681 3655 3555 3380 3179 3009 2839 2580 2178 1670 1144 658 242 -85 -339 -566 -800 -1033 -1279 -1614 -2071 -2546 -2896 -3135 -3383 -3628 -3664 -3359 -2910 -2672 -2804 -3124 -3330 -3244 -2863 -2303 -1737 -1292 -941 -542 -23 523 971 1328 1667 1998 2267 2472 2674 2900 3126 3349 3580 3735 3664 3343 2982 2783 2708 2558 2275 2000 1813 1564 1081 449 -54 -287 -381 -527 -740 -916 -1025 -1152 -1364 -1638 -1940 -2266 -2572 -2744 -2707 -2546 -2425 -2415 -2475 -2591 -2799 -3066 -3251 -3251 -3145 -3097 -3173 -3332 -3546 -3826 -4120 -4293 -4270 -4117 -3952 -3832 -3769 -3786 -3884 -3979 -3964 -3830 -3629 -3352 -2918 -2292 -1540 -777 -89 470 905 1310 1834 2526 3282 3983 4622 5233 5780 6205 6538 6844 7061 7024 6723 6393 6267 6301 6236 5904 5381 4853 4387 3889 3239 2460 1726 1185 773 279 -392 -1066 -1544 -1879 -2321 -2950 -3537 -3813 -3777 -3634 -3535 -3519 -3591 -3731 -3822 -3735 -3490 -3239 -3027 -2716 -2201 -1585 -1039 -596 -193 140 330 456 722 1186 1641 1892 2012 2212 2530 2773 2794 2668 2579 2593 2621 2536 2318 2060 1903 1890 1892 1703 1258 728 336 134 -4 -181 -371 -514 -628 -765 -915 -1012 -1038 -1046 -1087 -1148 -1178 -1154 -1091 -1020 -959 -916 -876 -819 -748 -696 -685 -689 -653 -562 -463 -413 -422 -454 -469 -468 -472 -493 -511 -505 -477 -454 -458 -475 -477 -453 -419 -389 -361 -324 -280 -237 -197 -144 -75 -4 47 73 86 103 131 164 193 215 232 244 246 239 224 205 188 172 154 134 114 98 83 69 57 46 33 21 12 6 2 0 -1 -1 -1 0 +0 -4 -4 -8 -12 -20 -24 -32 -44 -56 -64 -72 -88 -104 -108 -96 -76 -72 -76 -84 -88 -92 -100 -112 -116 -88 -8 108 212 248 200 128 84 84 116 172 236 280 292 280 248 180 84 -16 -68 -64 -52 -40 -20 20 68 92 64 -40 -192 -284 -180 72 240 144 -112 -272 -244 -204 -316 -524 -688 -772 -812 -784 -644 -536 -684 -1136 -1720 -2268 -2752 -3120 -3224 -3084 -3012 -3316 -3916 -4440 -4736 -5024 -5468 -5888 -5960 -5640 -5208 -4944 -4880 -4816 -4444 -3660 -2724 -2040 -1600 -836 716 2736 4420 5492 6504 7924 9436 10376 10688 11012 11852 13012 13960 14440 14604 14680 14724 14620 14220 13520 12716 12036 11356 10320 8712 6680 4576 2632 968 -340 -1356 -2264 -3200 -4132 -5116 -6456 -8284 -10184 -11584 -12540 -13532 -14512 -14656 -13436 -11640 -10688 -11216 -12496 -13320 -12976 -11452 -9212 -6948 -5168 -3764 -2168 -92 2092 3884 5312 6668 7992 9068 9888 10696 11600 12504 13396 14320 14940 14656 13372 11928 11132 10832 10232 9100 8000 7252 6256 4324 1796 -216 -1148 -1524 -2108 -2960 -3664 -4100 -4608 -5456 -6552 -7760 -9064 -10288 -10976 -10828 -10184 -9700 -9660 -9900 -10364 -11196 -12264 -13004 -13004 -12580 -12388 -12692 -13328 -14184 -15304 -16480 -17172 -17080 -16468 -15808 -15328 -15076 -15144 -15536 -15916 -15856 -15320 -14516 -13408 -11672 -9168 -6160 -3108 -356 1880 3620 5240 7336 10104 13128 15932 18488 20932 23120 24820 26152 27376 28244 28096 26892 25572 25068 25204 24944 23616 21524 19412 17548 15556 12956 9840 6904 4740 3092 1116 -1568 -4264 -6176 -7516 -9284 -11800 -14148 -15252 -15108 -14536 -14140 -14076 -14364 -14924 -15288 -14940 -13960 -12956 -12108 -10864 -8804 -6340 -4156 -2384 -772 560 1320 1824 2888 4744 6564 7568 8048 8848 10120 11092 11176 10672 10316 10372 10484 10144 9272 8240 7612 7560 7568 6812 5032 2912 1344 536 -16 -724 -1484 -2056 -2512 -3060 -3660 -4048 -4152 -4184 -4348 -4592 -4712 -4616 -4364 -4080 -3836 -3664 -3504 -3276 -2992 -2784 -2740 -2756 -2612 -2248 -1852 -1652 -1688 -1816 -1876 -1872 -1888 -1972 -2044 -2020 -1908 -1816 -1832 -1900 -1908 -1812 -1676 -1556 -1444 -1296 -1120 -948 -788 -576 -300 -16 188 292 344 412 524 656 772 860 928 976 984 956 896 820 752 688 616 536 456 392 332 276 228 184 132 84 48 24 8 0 -4 -4 -4 0 +2 +0 0 1 1 3 6 9 12 16 22 26 28 29 32 36 38 39 38 39 37 28 12 -2 -10 -15 -22 -32 -43 -51 -62 -78 -99 -125 -154 -185 -208 -217 -215 -215 -226 -243 -266 -301 -345 -382 -399 -402 -413 -441 -482 -533 -598 -669 -723 -746 -745 -741 -744 -758 -787 -834 -884 -909 -907 -887 -845 -759 -614 -425 -222 -27 142 281 418 602 853 1139 1422 1695 1972 2237 2466 2668 2866 3034 3098 3042 2966 2982 3076 3120 3027 2828 2614 2421 2198 1875 1460 1049 737 492 182 -262 -730 -1081 -1347 -1703 -2215 -2718 -2997 -3039 -2992 -2979 -3033 -3166 -3367 -3528 -3528 -3373 -3203 -3061 -2811 -2331 -1717 -1152 -675 -224 166 401 567 919 1544 2186 2580 2806 3160 3698 4152 4282 4187 4144 4266 4417 4380 4100 3732 3533 3596 3691 3404 2578 1529 725 296 -9 -421 -886 -1260 -1579 -1977 -2428 -2759 -2906 -3011 -3217 -3493 -3686 -3716 -3616 -3478 -3371 -3316 -3270 -3150 -2965 -2846 -2894 -3001 -2939 -2618 -2229 -2053 -2173 -2419 -2592 -2678 -2803 -3037 -3274 -3358 -3295 -3269 -3434 -3712 -3886 -3854 -3724 -3618 -3513 -3304 -2996 -2674 -2327 -1799 -989 -55 705 1148 1426 1821 2472 3307 4169 4996 5803 6575 7202 7588 7756 7833 7914 7993 7982 7829 7556 7277 7084 6975 6808 6411 5682 4650 3438 2241 1249 525 -98 -881 -1896 -2939 -3760 -4320 -4748 -5135 -5475 -5796 -6173 -6566 -6800 -6731 -6394 -5879 -5231 -4531 -3960 -3625 -3362 -2904 -2218 -1537 -1014 -529 101 873 1667 2437 3158 3699 3944 4010 4171 4504 4791 4846 4770 4763 4836 4824 4637 4314 3871 3255 2522 1865 1418 1117 835 553 280 -83 -664 -1364 -1876 -2018 -1945 -1961 -2189 -2502 -2717 -2753 -2627 -2410 -2218 -2162 -2233 -2281 -2163 -1907 -1655 -1475 -1290 -1011 -683 -456 -419 -511 -592 -568 -460 -357 -346 -445 -603 -741 -818 -848 -862 -867 -863 -881 -953 -1062 -1130 -1096 -987 -868 -770 -658 -514 -369 -256 -151 -11 159 302 387 448 533 650 764 854 917 962 984 984 966 938 895 838 778 721 661 593 521 457 399 337 263 183 111 54 12 -24 -56 -83 -104 -116 -126 -134 -141 -145 -146 -139 -126 -108 -90 -74 -61 -50 -40 -30 -22 -15 -10 -5 -2 -1 0 0 0 +0 0 4 4 12 24 36 48 64 88 104 112 116 128 144 152 156 152 156 148 112 48 -8 -40 -60 -88 -128 -172 -204 -248 -312 -396 -500 -616 -740 -832 -868 -860 -860 -904 -972 -1064 -1204 -1380 -1528 -1596 -1608 -1652 -1764 -1928 -2132 -2392 -2676 -2892 -2984 -2980 -2964 -2976 -3032 -3148 -3336 -3536 -3636 -3628 -3548 -3380 -3036 -2456 -1700 -888 -108 568 1124 1672 2408 3412 4556 5688 6780 7888 8948 9864 10672 11464 12136 12392 12168 11864 11928 12304 12480 12108 11312 10456 9684 8792 7500 5840 4196 2948 1968 728 -1048 -2920 -4324 -5388 -6812 -8860 -10872 -11988 -12156 -11968 -11916 -12132 -12664 -13468 -14112 -14112 -13492 -12812 -12244 -11244 -9324 -6868 -4608 -2700 -896 664 1604 2268 3676 6176 8744 10320 11224 12640 14792 16608 17128 16748 16576 17064 17668 17520 16400 14928 14132 14384 14764 13616 10312 6116 2900 1184 -36 -1684 -3544 -5040 -6316 -7908 -9712 -11036 -11624 -12044 -12868 -13972 -14744 -14864 -14464 -13912 -13484 -13264 -13080 -12600 -11860 -11384 -11576 -12004 -11756 -10472 -8916 -8212 -8692 -9676 -10368 -10712 -11212 -12148 -13096 -13432 -13180 -13076 -13736 -14848 -15544 -15416 -14896 -14472 -14052 -13216 -11984 -10696 -9308 -7196 -3956 -220 2820 4592 5704 7284 9888 13228 16676 19984 23212 26300 28808 30352 31024 31332 31656 31972 31928 31316 30224 29108 28336 27900 27232 25644 22728 18600 13752 8964 4996 2100 -392 -3524 -7584 -11756 -15040 -17280 -18992 -20540 -21900 -23184 -24692 -26264 -27200 -26924 -25576 -23516 -20924 -18124 -15840 -14500 -13448 -11616 -8872 -6148 -4056 -2116 404 3492 6668 9748 12632 14796 15776 16040 16684 18016 19164 19384 19080 19052 19344 19296 18548 17256 15484 13020 10088 7460 5672 4468 3340 2212 1120 -332 -2656 -5456 -7504 -8072 -7780 -7844 -8756 -10008 -10868 -11012 -10508 -9640 -8872 -8648 -8932 -9124 -8652 -7628 -6620 -5900 -5160 -4044 -2732 -1824 -1676 -2044 -2368 -2272 -1840 -1428 -1384 -1780 -2412 -2964 -3272 -3392 -3448 -3468 -3452 -3524 -3812 -4248 -4520 -4384 -3948 -3472 -3080 -2632 -2056 -1476 -1024 -604 -44 636 1208 1548 1792 2132 2600 3056 3416 3668 3848 3936 3936 3864 3752 3580 3352 3112 2884 2644 2372 2084 1828 1596 1348 1052 732 444 216 48 -96 -224 -332 -416 -464 -504 -536 -564 -580 -584 -556 -504 -432 -360 -296 -244 -200 -160 -120 -88 -60 -40 -20 -8 -4 0 0 0 +2 +0 -1 -2 -3 -5 -8 -11 -14 -17 -21 -25 -29 -33 -37 -40 -44 -50 -59 -64 -63 -59 -60 -69 -83 -97 -108 -122 -141 -163 -179 -187 -198 -220 -252 -279 -292 -298 -305 -312 -309 -294 -275 -251 -203 -117 -7 90 153 198 263 371 516 676 841 1013 1189 1350 1472 1559 1626 1699 1775 1830 1853 1846 1835 1841 1868 1879 1824 1665 1402 1068 716 410 177 -34 -315 -696 -1108 -1455 -1717 -1938 -2151 -2353 -2557 -2793 -3047 -3237 -3286 -3200 -3014 -2750 -2441 -2186 -2049 -1947 -1724 -1349 -957 -647 -345 67 597 1167 1746 2316 2777 3030 3152 3356 3708 4037 4177 4205 4298 4465 4557 4480 4266 3914 3369 2670 2020 1572 1267 969 656 341 -103 -845 -1775 -2499 -2751 -2713 -2801 -3200 -3746 -4163 -4320 -4221 -3964 -3737 -3732 -3949 -4132 -4015 -3628 -3227 -2948 -2643 -2123 -1471 -1007 -949 -1189 -1412 -1392 -1156 -922 -916 -1213 -1687 -2133 -2421 -2582 -2697 -2790 -2861 -3004 -3348 -3845 -4216 -4217 -3912 -3553 -3251 -2870 -2314 -1716 -1228 -750 -55 848 1671 2220 2663 3292 4167 5092 5910 6612 7225 7707 8030 8236 8351 8332 8173 7952 7735 7461 7041 6524 6033 5574 4981 4113 3036 1960 1032 242 -511 -1290 -2069 -2774 -3384 -3966 -4614 -5348 -6087 -6726 -7159 -7284 -7070 -6665 -6305 -6084 -5853 -5472 -4992 -4531 -4011 -3229 -2182 -1111 -184 686 1606 2480 3170 3745 4361 4967 5364 5572 5850 6298 6654 6645 6376 6127 5946 5631 5095 4482 3914 3341 2721 2140 1617 1011 250 -480 -985 -1384 -1964 -2703 -3245 -3374 -3309 -3361 -3512 -3548 -3451 -3414 -3507 -3535 -3334 -3003 -2735 -2540 -2288 -1935 -1573 -1278 -1026 -788 -600 -501 -463 -444 -441 -453 -431 -335 -221 -209 -352 -577 -794 -982 -1175 -1355 -1464 -1489 -1504 -1590 -1726 -1808 -1763 -1629 -1492 -1399 -1341 -1283 -1193 -1031 -773 -447 -121 168 440 717 965 1142 1264 1395 1558 1713 1818 1894 1979 2055 2075 2018 1925 1829 1727 1597 1450 1302 1152 995 842 712 592 451 281 109 -34 -152 -265 -373 -459 -518 -569 -621 -656 -655 -624 -589 -566 -544 -512 -468 -417 -355 -284 -214 -157 -110 -64 -16 23 48 64 81 94 100 101 102 103 100 92 85 76 66 54 43 33 24 17 11 6 3 1 0 0 0 +0 -2 -4 -6 -10 -16 -22 -28 -34 -42 -50 -58 -66 -74 -80 -88 -100 -118 -128 -126 -118 -120 -138 -166 -194 -216 -244 -282 -326 -358 -374 -396 -440 -504 -558 -584 -596 -610 -624 -618 -588 -550 -502 -406 -234 -14 180 306 396 526 742 1032 1352 1682 2026 2378 2700 2944 3118 3252 3398 3550 3660 3706 3692 3670 3682 3736 3758 3648 3330 2804 2136 1432 820 354 -68 -630 -1392 -2216 -2910 -3434 -3876 -4302 -4706 -5114 -5586 -6094 -6474 -6572 -6400 -6028 -5500 -4882 -4372 -4098 -3894 -3448 -2698 -1914 -1294 -690 134 1194 2334 3492 4632 5554 6060 6304 6712 7416 8074 8354 8410 8596 8930 9114 8960 8532 7828 6738 5340 4040 3144 2534 1938 1312 682 -206 -1690 -3550 -4998 -5502 -5426 -5602 -6400 -7492 -8326 -8640 -8442 -7928 -7474 -7464 -7898 -8264 -8030 -7256 -6454 -5896 -5286 -4246 -2942 -2014 -1898 -2378 -2824 -2784 -2312 -1844 -1832 -2426 -3374 -4266 -4842 -5164 -5394 -5580 -5722 -6008 -6696 -7690 -8432 -8434 -7824 -7106 -6502 -5740 -4628 -3432 -2456 -1500 -110 1696 3342 4440 5326 6584 8334 10184 11820 13224 14450 15414 16060 16472 16702 16664 16346 15904 15470 14922 14082 13048 12066 11148 9962 8226 6072 3920 2064 484 -1022 -2580 -4138 -5548 -6768 -7932 -9228 -10696 -12174 -13452 -14318 -14568 -14140 -13330 -12610 -12168 -11706 -10944 -9984 -9062 -8022 -6458 -4364 -2222 -368 1372 3212 4960 6340 7490 8722 9934 10728 11144 11700 12596 13308 13290 12752 12254 11892 11262 10190 8964 7828 6682 5442 4280 3234 2022 500 -960 -1970 -2768 -3928 -5406 -6490 -6748 -6618 -6722 -7024 -7096 -6902 -6828 -7014 -7070 -6668 -6006 -5470 -5080 -4576 -3870 -3146 -2556 -2052 -1576 -1200 -1002 -926 -888 -882 -906 -862 -670 -442 -418 -704 -1154 -1588 -1964 -2350 -2710 -2928 -2978 -3008 -3180 -3452 -3616 -3526 -3258 -2984 -2798 -2682 -2566 -2386 -2062 -1546 -894 -242 336 880 1434 1930 2284 2528 2790 3116 3426 3636 3788 3958 4110 4150 4036 3850 3658 3454 3194 2900 2604 2304 1990 1684 1424 1184 902 562 218 -68 -304 -530 -746 -918 -1036 -1138 -1242 -1312 -1310 -1248 -1178 -1132 -1088 -1024 -936 -834 -710 -568 -428 -314 -220 -128 -32 46 96 128 162 188 200 202 204 206 200 184 170 152 132 108 86 66 48 34 22 12 6 2 0 0 0 +1 +0 -1 -1 -2 -4 -6 -8 -11 -14 -18 -25 -33 -43 -50 -53 -55 -56 -56 -50 -41 -33 -22 -2 29 62 88 115 152 207 270 335 398 461 522 575 624 667 702 724 741 757 765 756 733 707 683 636 548 421 283 155 37 -83 -218 -362 -502 -635 -770 -928 -1111 -1308 -1494 -1642 -1725 -1728 -1681 -1639 -1630 -1616 -1557 -1464 -1367 -1246 -1032 -717 -376 -64 245 589 935 1227 1488 1779 2080 2305 2458 2646 2922 3166 3244 3190 3141 3125 3033 2812 2534 2267 1982 1654 1332 1030 660 167 -329 -690 -992 -1441 -2030 -2493 -2652 -2663 -2767 -2959 -3059 -3042 -3081 -3237 -3339 -3222 -2970 -2766 -2628 -2422 -2096 -1743 -1450 -1191 -935 -729 -623 -589 -578 -587 -617 -601 -478 -323 -313 -538 -905 -1275 -1616 -1979 -2339 -2590 -2697 -2792 -3025 -3365 -3612 -3612 -3422 -3214 -3092 -3039 -2985 -2848 -2526 -1944 -1154 -320 459 1235 2066 2858 3478 3959 4495 5168 5845 6391 6863 7387 7909 8230 8260 8135 7980 7776 7438 6983 6481 5937 5310 4659 4078 3518 2785 1802 731 -231 -1092 -1986 -2918 -3743 -4412 -5060 -5772 -6390 -6687 -6685 -6637 -6709 -6802 -6751 -6526 -6143 -5545 -4703 -3766 -2944 -2213 -1367 -361 587 1302 1895 2564 3272 3842 4273 4752 5329 5822 6109 6312 6543 6664 6468 6025 5586 5214 4726 4032 3322 2784 2321 1717 960 226 -404 -1019 -1684 -2322 -2864 -3353 -3828 -4204 -4368 -4343 -4252 -4161 -4058 -3944 -3848 -3756 -3590 -3327 -3027 -2742 -2428 -2038 -1623 -1285 -1034 -770 -429 -92 89 65 -83 -225 -288 -277 -244 -266 -411 -694 -1048 -1364 -1598 -1794 -2017 -2267 -2490 -2648 -2744 -2775 -2712 -2564 -2405 -2299 -2208 -2029 -1728 -1370 -1019 -669 -292 91 441 782 1175 1616 2008 2279 2457 2620 2807 3004 3183 3330 3409 3394 3309 3224 3179 3122 2977 2726 2430 2155 1911 1651 1328 949 578 276 50 -149 -364 -599 -827 -1029 -1204 -1342 -1435 -1475 -1470 -1424 -1351 -1276 -1213 -1153 -1061 -927 -780 -654 -555 -460 -353 -234 -106 29 153 241 289 323 369 421 458 470 469 463 455 444 431 412 379 328 271 222 183 150 116 86 59 36 14 -5 -18 -28 -36 -42 -47 -49 -48 -43 -37 -31 -25 -20 -16 -13 -9 -6 -4 -2 -1 0 +0 -2 -2 -4 -8 -12 -16 -22 -28 -36 -50 -66 -86 -100 -106 -110 -112 -112 -100 -82 -66 -44 -4 58 124 176 230 304 414 540 670 796 922 1044 1150 1248 1334 1404 1448 1482 1514 1530 1512 1466 1414 1366 1272 1096 842 566 310 74 -166 -436 -724 -1004 -1270 -1540 -1856 -2222 -2616 -2988 -3284 -3450 -3456 -3362 -3278 -3260 -3232 -3114 -2928 -2734 -2492 -2064 -1434 -752 -128 490 1178 1870 2454 2976 3558 4160 4610 4916 5292 5844 6332 6488 6380 6282 6250 6066 5624 5068 4534 3964 3308 2664 2060 1320 334 -658 -1380 -1984 -2882 -4060 -4986 -5304 -5326 -5534 -5918 -6118 -6084 -6162 -6474 -6678 -6444 -5940 -5532 -5256 -4844 -4192 -3486 -2900 -2382 -1870 -1458 -1246 -1178 -1156 -1174 -1234 -1202 -956 -646 -626 -1076 -1810 -2550 -3232 -3958 -4678 -5180 -5394 -5584 -6050 -6730 -7224 -7224 -6844 -6428 -6184 -6078 -5970 -5696 -5052 -3888 -2308 -640 918 2470 4132 5716 6956 7918 8990 10336 11690 12782 13726 14774 15818 16460 16520 16270 15960 15552 14876 13966 12962 11874 10620 9318 8156 7036 5570 3604 1462 -462 -2184 -3972 -5836 -7486 -8824 -10120 -11544 -12780 -13374 -13370 -13274 -13418 -13604 -13502 -13052 -12286 -11090 -9406 -7532 -5888 -4426 -2734 -722 1174 2604 3790 5128 6544 7684 8546 9504 10658 11644 12218 12624 13086 13328 12936 12050 11172 10428 9452 8064 6644 5568 4642 3434 1920 452 -808 -2038 -3368 -4644 -5728 -6706 -7656 -8408 -8736 -8686 -8504 -8322 -8116 -7888 -7696 -7512 -7180 -6654 -6054 -5484 -4856 -4076 -3246 -2570 -2068 -1540 -858 -184 178 130 -166 -450 -576 -554 -488 -532 -822 -1388 -2096 -2728 -3196 -3588 -4034 -4534 -4980 -5296 -5488 -5550 -5424 -5128 -4810 -4598 -4416 -4058 -3456 -2740 -2038 -1338 -584 182 882 1564 2350 3232 4016 4558 4914 5240 5614 6008 6366 6660 6818 6788 6618 6448 6358 6244 5954 5452 4860 4310 3822 3302 2656 1898 1156 552 100 -298 -728 -1198 -1654 -2058 -2408 -2684 -2870 -2950 -2940 -2848 -2702 -2552 -2426 -2306 -2122 -1854 -1560 -1308 -1110 -920 -706 -468 -212 58 306 482 578 646 738 842 916 940 938 926 910 888 862 824 758 656 542 444 366 300 232 172 118 72 28 -10 -36 -56 -72 -84 -94 -98 -96 -86 -74 -62 -50 -40 -32 -26 -18 -12 -8 -4 -2 0 +1 +0 0 0 1 3 7 11 16 24 34 46 58 73 92 110 125 139 154 168 177 183 187 187 181 173 163 152 129 89 38 -14 -66 -127 -198 -269 -335 -405 -487 -567 -624 -655 -681 -722 -765 -793 -800 -785 -739 -653 -545 -443 -346 -222 -61 102 235 355 497 657 798 917 1055 1221 1378 1492 1591 1700 1785 1785 1714 1637 1572 1467 1288 1091 940 806 612 352 85 -157 -405 -687 -973 -1231 -1480 -1732 -1951 -2079 -2120 -2128 -2133 -2133 -2125 -2124 -2124 -2080 -1975 -1840 -1707 -1547 -1330 -1084 -879 -724 -552 -315 -69 69 51 -67 -186 -243 -239 -216 -240 -379 -656 -1012 -1349 -1616 -1857 -2136 -2455 -2759 -3003 -3183 -3293 -3294 -3186 -3059 -2993 -2941 -2766 -2411 -1956 -1489 -1002 -447 143 708 1286 1981 2790 3552 4129 4561 4986 5476 6003 6521 6997 7349 7503 7502 7503 7591 7652 7492 7044 6448 5876 5355 4754 3931 2890 1810 891 167 -506 -1279 -2168 -3084 -3959 -4775 -5494 -6058 -6430 -6617 -6629 -6505 -6345 -6246 -6148 -5865 -5309 -4628 -4031 -3554 -3058 -2440 -1685 -795 227 1250 2055 2574 3011 3598 4309 4921 5312 5566 5797 6017 6202 6362 6443 6283 5793 5110 4473 3962 3472 2911 2315 1740 1152 506 -159 -752 -1273 -1813 -2415 -3034 -3586 -4013 -4272 -4325 -4222 -4098 -4070 -4130 -4197 -4214 -4149 -3925 -3463 -2834 -2254 -1875 -1621 -1305 -874 -442 -117 98 211 160 -55 -252 -214 30 184 -8 -519 -1138 -1711 -2202 -2574 -2785 -2893 -3066 -3385 -3737 -3929 -3923 -3836 -3775 -3737 -3671 -3542 -3310 -2914 -2339 -1686 -1098 -615 -150 383 969 1527 2029 2521 3044 3569 4056 4491 4882 5192 5355 5354 5265 5192 5151 5067 4880 4613 4317 3984 3575 3107 2649 2239 1830 1357 818 273 -227 -659 -1014 -1298 -1556 -1843 -2159 -2434 -2602 -2661 -2662 -2640 -2605 -2552 -2481 -2361 -2155 -1865 -1550 -1270 -1040 -833 -623 -409 -194 24 243 442 603 726 830 924 1010 1072 1093 1074 1027 977 936 898 847 781 709 632 544 440 327 221 134 64 2 -58 -115 -166 -204 -230 -245 -255 -260 -257 -246 -228 -207 -187 -168 -150 -131 -111 -91 -72 -55 -40 -28 -19 -10 -3 2 4 3 1 0 1 0 0 -1 -2 -2 -2 -2 -2 -1 0 +0 0 0 4 12 28 44 64 96 136 184 232 292 368 440 500 556 616 672 708 732 748 748 724 692 652 608 516 356 152 -56 -264 -508 -792 -1076 -1340 -1620 -1948 -2268 -2496 -2620 -2724 -2888 -3060 -3172 -3200 -3140 -2956 -2612 -2180 -1772 -1384 -888 -244 408 940 1420 1988 2628 3192 3668 4220 4884 5512 5968 6364 6800 7140 7140 6856 6548 6288 5868 5152 4364 3760 3224 2448 1408 340 -628 -1620 -2748 -3892 -4924 -5920 -6928 -7804 -8316 -8480 -8512 -8532 -8532 -8500 -8496 -8496 -8320 -7900 -7360 -6828 -6188 -5320 -4336 -3516 -2896 -2208 -1260 -276 276 204 -268 -744 -972 -956 -864 -960 -1516 -2624 -4048 -5396 -6464 -7428 -8544 -9820 -11036 -12012 -12732 -13172 -13176 -12744 -12236 -11972 -11764 -11064 -9644 -7824 -5956 -4008 -1788 572 2832 5144 7924 11160 14208 16516 18244 19944 21904 24012 26084 27988 29396 30012 30008 30012 30364 30608 29968 28176 25792 23504 21420 19016 15724 11560 7240 3564 668 -2024 -5116 -8672 -12336 -15836 -19100 -21976 -24232 -25720 -26468 -26516 -26020 -25380 -24984 -24592 -23460 -21236 -18512 -16124 -14216 -12232 -9760 -6740 -3180 908 5000 8220 10296 12044 14392 17236 19684 21248 22264 23188 24068 24808 25448 25772 25132 23172 20440 17892 15848 13888 11644 9260 6960 4608 2024 -636 -3008 -5092 -7252 -9660 -12136 -14344 -16052 -17088 -17300 -16888 -16392 -16280 -16520 -16788 -16856 -16596 -15700 -13852 -11336 -9016 -7500 -6484 -5220 -3496 -1768 -468 392 844 640 -220 -1008 -856 120 736 -32 -2076 -4552 -6844 -8808 -10296 -11140 -11572 -12264 -13540 -14948 -15716 -15692 -15344 -15100 -14948 -14684 -14168 -13240 -11656 -9356 -6744 -4392 -2460 -600 1532 3876 6108 8116 10084 12176 14276 16224 17964 19528 20768 21420 21416 21060 20768 20604 20268 19520 18452 17268 15936 14300 12428 10596 8956 7320 5428 3272 1092 -908 -2636 -4056 -5192 -6224 -7372 -8636 -9736 -10408 -10644 -10648 -10560 -10420 -10208 -9924 -9444 -8620 -7460 -6200 -5080 -4160 -3332 -2492 -1636 -776 96 972 1768 2412 2904 3320 3696 4040 4288 4372 4296 4108 3908 3744 3592 3388 3124 2836 2528 2176 1760 1308 884 536 256 8 -232 -460 -664 -816 -920 -980 -1020 -1040 -1028 -984 -912 -828 -748 -672 -600 -524 -444 -364 -288 -220 -160 -112 -76 -40 -12 8 16 12 4 0 4 0 0 -4 -8 -8 -8 -8 -8 -4 0 +2 +0 1 2 3 5 6 5 3 0 -4 -10 -19 -31 -47 -65 -84 -105 -125 -143 -158 -171 -184 -198 -211 -218 -213 -200 -187 -177 -163 -139 -102 -51 15 89 155 205 253 318 401 481 544 598 651 706 760 813 858 872 837 767 698 642 584 508 418 326 223 101 -33 -162 -283 -416 -572 -742 -905 -1043 -1145 -1194 -1202 -1202 -1228 -1283 -1341 -1384 -1402 -1364 -1237 -1040 -850 -726 -645 -533 -366 -190 -52 44 98 76 -27 -127 -110 15 99 -5 -294 -659 -1016 -1339 -1602 -1775 -1888 -2048 -2316 -2616 -2817 -2878 -2880 -2900 -2938 -2954 -2916 -2789 -2512 -2063 -1522 -1014 -581 -145 379 980 1580 2148 2731 3373 4048 4705 5330 5930 6454 6813 6969 7013 7078 7186 7236 7134 6907 6616 6253 5745 5112 4464 3866 3236 2459 1520 520 -443 -1317 -2076 -2726 -3352 -4073 -4894 -5664 -6211 -6521 -6696 -6821 -6911 -6959 -6948 -6797 -6379 -5678 -4850 -4088 -3447 -2842 -2189 -1479 -724 94 965 1811 2548 3168 3737 4306 4863 5333 5635 5734 5683 5600 5563 5537 5430 5206 4907 4553 4088 3449 2672 1887 1196 596 24 -585 -1233 -1866 -2420 -2870 -3234 -3551 -3825 -4014 -4071 -4005 -3881 -3752 -3626 -3470 -3251 -2963 -2631 -2271 -1897 -1522 -1168 -838 -510 -167 138 306 293 182 115 139 156 36 -243 -587 -903 -1213 -1612 -2151 -2766 -3318 -3710 -3963 -4183 -4454 -4739 -4919 -4928 -4828 -4738 -4685 -4578 -4306 -3856 -3294 -2688 -2068 -1447 -838 -213 483 1263 2074 2842 3559 4250 4904 5465 5900 6249 6558 6825 7005 7062 7011 6867 6642 6355 6032 5679 5263 4758 4170 3545 2920 2288 1611 880 147 -507 -1032 -1477 -1927 -2414 -2881 -3257 -3521 -3704 -3827 -3898 -3922 -3906 -3837 -3694 -3478 -3222 -2934 -2604 -2239 -1883 -1563 -1247 -895 -515 -151 184 511 838 1132 1368 1559 1723 1844 1906 1930 1951 1963 1925 1830 1715 1614 1501 1346 1159 971 791 606 425 275 153 23 -133 -276 -361 -397 -432 -488 -537 -548 -529 -507 -488 -457 -417 -377 -344 -310 -271 -234 -204 -174 -136 -96 -68 -56 -50 -41 -30 -20 -14 -13 -19 -34 -52 -66 -75 -82 -89 -94 -96 -95 -96 -98 -97 -92 -84 -76 -67 -56 -47 -39 -30 -22 -15 -10 -6 -3 -2 -1 -1 0 +0 4 8 12 20 24 20 12 0 -16 -40 -76 -124 -188 -260 -336 -420 -500 -572 -632 -684 -736 -792 -844 -872 -852 -800 -748 -708 -652 -556 -408 -204 60 356 620 820 1012 1272 1604 1924 2176 2392 2604 2824 3040 3252 3432 3488 3348 3068 2792 2568 2336 2032 1672 1304 892 404 -132 -648 -1132 -1664 -2288 -2968 -3620 -4172 -4580 -4776 -4808 -4808 -4912 -5132 -5364 -5536 -5608 -5456 -4948 -4160 -3400 -2904 -2580 -2132 -1464 -760 -208 176 392 304 -108 -508 -440 60 396 -20 -1176 -2636 -4064 -5356 -6408 -7100 -7552 -8192 -9264 -10464 -11268 -11512 -11520 -11600 -11752 -11816 -11664 -11156 -10048 -8252 -6088 -4056 -2324 -580 1516 3920 6320 8592 10924 13492 16192 18820 21320 23720 25816 27252 27876 28052 28312 28744 28944 28536 27628 26464 25012 22980 20448 17856 15464 12944 9836 6080 2080 -1772 -5268 -8304 -10904 -13408 -16292 -19576 -22656 -24844 -26084 -26784 -27284 -27644 -27836 -27792 -27188 -25516 -22712 -19400 -16352 -13788 -11368 -8756 -5916 -2896 376 3860 7244 10192 12672 14948 17224 19452 21332 22540 22936 22732 22400 22252 22148 21720 20824 19628 18212 16352 13796 10688 7548 4784 2384 96 -2340 -4932 -7464 -9680 -11480 -12936 -14204 -15300 -16056 -16284 -16020 -15524 -15008 -14504 -13880 -13004 -11852 -10524 -9084 -7588 -6088 -4672 -3352 -2040 -668 552 1224 1172 728 460 556 624 144 -972 -2348 -3612 -4852 -6448 -8604 -11064 -13272 -14840 -15852 -16732 -17816 -18956 -19676 -19712 -19312 -18952 -18740 -18312 -17224 -15424 -13176 -10752 -8272 -5788 -3352 -852 1932 5052 8296 11368 14236 17000 19616 21860 23600 24996 26232 27300 28020 28248 28044 27468 26568 25420 24128 22716 21052 19032 16680 14180 11680 9152 6444 3520 588 -2028 -4128 -5908 -7708 -9656 -11524 -13028 -14084 -14816 -15308 -15592 -15688 -15624 -15348 -14776 -13912 -12888 -11736 -10416 -8956 -7532 -6252 -4988 -3580 -2060 -604 736 2044 3352 4528 5472 6236 6892 7376 7624 7720 7804 7852 7700 7320 6860 6456 6004 5384 4636 3884 3164 2424 1700 1100 612 92 -532 -1104 -1444 -1588 -1728 -1952 -2148 -2192 -2116 -2028 -1952 -1828 -1668 -1508 -1376 -1240 -1084 -936 -816 -696 -544 -384 -272 -224 -200 -164 -120 -80 -56 -52 -76 -136 -208 -264 -300 -328 -356 -376 -384 -380 -384 -392 -388 -368 -336 -304 -268 -224 -188 -156 -120 -88 -60 -40 -24 -12 -8 -4 -4 0 +2 +0 -2 -4 -6 -9 -12 -14 -15 -17 -17 -16 -13 -8 1 13 27 43 61 80 102 127 154 178 196 211 224 240 256 270 276 278 274 261 233 191 143 95 50 2 -55 -121 -192 -261 -323 -380 -436 -489 -535 -565 -580 -583 -586 -588 -585 -568 -536 -494 -441 -382 -317 -251 -187 -117 -40 33 77 76 48 31 39 45 11 -76 -188 -297 -410 -560 -768 -1015 -1251 -1436 -1575 -1707 -1866 -2037 -2170 -2230 -2240 -2255 -2287 -2291 -2208 -2027 -1775 -1484 -1169 -838 -498 -130 300 805 1353 1898 2434 2976 3515 4008 4429 4801 5155 5492 5766 5951 6043 6055 5993 5867 5697 5487 5205 4812 4316 3753 3163 2535 1828 1021 174 -615 -1282 -1879 -2508 -3215 -3928 -4543 -5029 -5415 -5729 -5973 -6155 -6276 -6312 -6224 -6005 -5699 -5316 -4834 -4259 -3672 -3122 -2555 -1881 -1110 -333 417 1189 2001 2775 3443 4029 4571 5028 5339 5557 5777 5977 6027 5892 5687 5506 5278 4877 4326 3737 3140 2482 1798 1201 691 109 -637 -1370 -1856 -2114 -2388 -2795 -3184 -3374 -3388 -3373 -3370 -3293 -3125 -2947 -2806 -2641 -2410 -2174 -1984 -1775 -1458 -1083 -805 -692 -651 -567 -435 -309 -228 -222 -357 -675 -1105 -1510 -1851 -2191 -2580 -2964 -3292 -3619 -4033 -4525 -4975 -5299 -5499 -5614 -5643 -5582 -5466 -5308 -5034 -4559 -3914 -3230 -2605 -2023 -1418 -758 -40 752 1624 2515 3331 4048 4744 5498 6289 6998 7515 7820 7975 8065 8144 8204 8173 7997 7687 7311 6892 6393 5778 5094 4413 3716 2918 2007 1103 334 -326 -1020 -1809 -2584 -3195 -3636 -4026 -4439 -4826 -5105 -5257 -5328 -5341 -5267 -5087 -4813 -4478 -4115 -3761 -3410 -3008 -2506 -1933 -1387 -920 -476 32 595 1119 1531 1864 2177 2473 2694 2821 2882 2926 2966 2990 2980 2910 2759 2527 2262 2010 1777 1538 1281 1024 783 550 313 79 -134 -316 -477 -624 -753 -859 -939 -989 -1004 -979 -931 -887 -858 -836 -803 -751 -684 -611 -534 -459 -390 -327 -271 -220 -178 -146 -125 -111 -108 -118 -134 -148 -151 -151 -161 -187 -224 -260 -291 -313 -327 -334 -335 -337 -339 -334 -320 -299 -278 -257 -234 -206 -174 -141 -110 -80 -52 -27 -4 16 32 44 53 58 61 62 59 54 49 43 35 28 23 17 10 6 4 2 0 +0 -4 -8 -12 -18 -24 -28 -30 -34 -34 -32 -26 -16 2 26 54 86 122 160 204 254 308 356 392 422 448 480 512 540 552 556 548 522 466 382 286 190 100 4 -110 -242 -384 -522 -646 -760 -872 -978 -1070 -1130 -1160 -1166 -1172 -1176 -1170 -1136 -1072 -988 -882 -764 -634 -502 -374 -234 -80 66 154 152 96 62 78 90 22 -152 -376 -594 -820 -1120 -1536 -2030 -2502 -2872 -3150 -3414 -3732 -4074 -4340 -4460 -4480 -4510 -4574 -4582 -4416 -4054 -3550 -2968 -2338 -1676 -996 -260 600 1610 2706 3796 4868 5952 7030 8016 8858 9602 10310 10984 11532 11902 12086 12110 11986 11734 11394 10974 10410 9624 8632 7506 6326 5070 3656 2042 348 -1230 -2564 -3758 -5016 -6430 -7856 -9086 -10058 -10830 -11458 -11946 -12310 -12552 -12624 -12448 -12010 -11398 -10632 -9668 -8518 -7344 -6244 -5110 -3762 -2220 -666 834 2378 4002 5550 6886 8058 9142 10056 10678 11114 11554 11954 12054 11784 11374 11012 10556 9754 8652 7474 6280 4964 3596 2402 1382 218 -1274 -2740 -3712 -4228 -4776 -5590 -6368 -6748 -6776 -6746 -6740 -6586 -6250 -5894 -5612 -5282 -4820 -4348 -3968 -3550 -2916 -2166 -1610 -1384 -1302 -1134 -870 -618 -456 -444 -714 -1350 -2210 -3020 -3702 -4382 -5160 -5928 -6584 -7238 -8066 -9050 -9950 -10598 -10998 -11228 -11286 -11164 -10932 -10616 -10068 -9118 -7828 -6460 -5210 -4046 -2836 -1516 -80 1504 3248 5030 6662 8096 9488 10996 12578 13996 15030 15640 15950 16130 16288 16408 16346 15994 15374 14622 13784 12786 11556 10188 8826 7432 5836 4014 2206 668 -652 -2040 -3618 -5168 -6390 -7272 -8052 -8878 -9652 -10210 -10514 -10656 -10682 -10534 -10174 -9626 -8956 -8230 -7522 -6820 -6016 -5012 -3866 -2774 -1840 -952 64 1190 2238 3062 3728 4354 4946 5388 5642 5764 5852 5932 5980 5960 5820 5518 5054 4524 4020 3554 3076 2562 2048 1566 1100 626 158 -268 -632 -954 -1248 -1506 -1718 -1878 -1978 -2008 -1958 -1862 -1774 -1716 -1672 -1606 -1502 -1368 -1222 -1068 -918 -780 -654 -542 -440 -356 -292 -250 -222 -216 -236 -268 -296 -302 -302 -322 -374 -448 -520 -582 -626 -654 -668 -670 -674 -678 -668 -640 -598 -556 -514 -468 -412 -348 -282 -220 -160 -104 -54 -8 32 64 88 106 116 122 124 118 108 98 86 70 56 46 34 20 12 8 4 0 +1 +0 1 2 4 7 12 17 21 26 32 38 41 43 43 42 37 30 23 14 2 -17 -40 -59 -73 -89 -112 -138 -157 -169 -180 -192 -199 -200 -200 -202 -201 -193 -184 -176 -166 -143 -112 -87 -78 -77 -70 -56 -42 -32 -33 -54 -106 -180 -255 -324 -397 -484 -576 -662 -752 -866 -1005 -1141 -1255 -1344 -1416 -1467 -1496 -1509 -1511 -1476 -1375 -1216 -1032 -856 -684 -493 -271 -15 283 628 999 1359 1695 2038 2425 2845 3246 3576 3817 3991 4135 4280 4419 4511 4521 4452 4339 4189 3979 3682 3324 2947 2542 2043 1438 809 251 -250 -802 -1456 -2127 -2692 -3134 -3550 -4005 -4456 -4821 -5080 -5269 -5401 -5451 -5386 -5213 -4963 -4668 -4363 -4046 -3653 -3114 -2459 -1805 -1225 -649 44 851 1635 2293 2856 3417 3973 4432 4753 4977 5177 5374 5551 5670 5677 5513 5176 4752 4333 3929 3487 2981 2444 1919 1385 810 210 -364 -885 -1373 -1845 -2291 -2687 -3024 -3280 -3424 -3439 -3373 -3311 -3301 -3314 -3284 -3170 -2982 -2748 -2484 -2209 -1939 -1685 -1445 -1215 -1016 -866 -765 -712 -719 -810 -965 -1106 -1180 -1232 -1370 -1664 -2078 -2530 -2963 -3355 -3687 -3954 -4187 -4445 -4718 -4923 -4992 -4954 -4888 -4824 -4698 -4433 -4024 -3515 -2946 -2319 -1638 -905 -128 683 1503 2308 3088 3849 4592 5289 5913 6453 6918 7317 7643 7881 8032 8122 8165 8144 8006 7722 7314 6850 6368 5846 5248 4571 3852 3128 2401 1652 882 116 -627 -1348 -2048 -2708 -3286 -3754 -4124 -4417 -4656 -4837 -4957 -5028 -5041 -4973 -4807 -4568 -4303 -4042 -3753 -3368 -2859 -2271 -1709 -1236 -818 -366 162 703 1154 1493 1794 2139 2512 2819 2986 3035 3030 3007 2956 2847 2681 2486 2295 2101 1871 1582 1261 960 698 453 196 -61 -283 -460 -611 -757 -886 -976 -1019 -1028 -1036 -1061 -1102 -1131 -1122 -1074 -1005 -929 -845 -755 -670 -602 -537 -461 -383 -332 -311 -290 -250 -210 -207 -240 -267 -261 -243 -256 -317 -397 -455 -483 -501 -534 -584 -632 -654 -646 -620 -596 -582 -571 -549 -514 -472 -432 -391 -340 -272 -199 -133 -81 -32 23 80 131 171 203 229 251 266 277 286 293 293 285 270 253 236 218 197 176 154 131 109 90 74 59 45 33 23 15 8 3 0 -1 -2 -2 -2 -1 0 +0 4 8 16 28 48 68 84 104 128 152 164 172 172 168 148 120 92 56 8 -68 -160 -236 -292 -356 -448 -552 -628 -676 -720 -768 -796 -800 -800 -808 -804 -772 -736 -704 -664 -572 -448 -348 -312 -308 -280 -224 -168 -128 -132 -216 -424 -720 -1020 -1296 -1588 -1936 -2304 -2648 -3008 -3464 -4020 -4564 -5020 -5376 -5664 -5868 -5984 -6036 -6044 -5904 -5500 -4864 -4128 -3424 -2736 -1972 -1084 -60 1132 2512 3996 5436 6780 8152 9700 11380 12984 14304 15268 15964 16540 17120 17676 18044 18084 17808 17356 16756 15916 14728 13296 11788 10168 8172 5752 3236 1004 -1000 -3208 -5824 -8508 -10768 -12536 -14200 -16020 -17824 -19284 -20320 -21076 -21604 -21804 -21544 -20852 -19852 -18672 -17452 -16184 -14612 -12456 -9836 -7220 -4900 -2596 176 3404 6540 9172 11424 13668 15892 17728 19012 19908 20708 21496 22204 22680 22708 22052 20704 19008 17332 15716 13948 11924 9776 7676 5540 3240 840 -1456 -3540 -5492 -7380 -9164 -10748 -12096 -13120 -13696 -13756 -13492 -13244 -13204 -13256 -13136 -12680 -11928 -10992 -9936 -8836 -7756 -6740 -5780 -4860 -4064 -3464 -3060 -2848 -2876 -3240 -3860 -4424 -4720 -4928 -5480 -6656 -8312 -10120 -11852 -13420 -14748 -15816 -16748 -17780 -18872 -19692 -19968 -19816 -19552 -19296 -18792 -17732 -16096 -14060 -11784 -9276 -6552 -3620 -512 2732 6012 9232 12352 15396 18368 21156 23652 25812 27672 29268 30572 31524 32128 32488 32660 32576 32024 30888 29256 27400 25472 23384 20992 18284 15408 12512 9604 6608 3528 464 -2508 -5392 -8192 -10832 -13144 -15016 -16496 -17668 -18624 -19348 -19828 -20112 -20164 -19892 -19228 -18272 -17212 -16168 -15012 -13472 -11436 -9084 -6836 -4944 -3272 -1464 648 2812 4616 5972 7176 8556 10048 11276 11944 12140 12120 12028 11824 11388 10724 9944 9180 8404 7484 6328 5044 3840 2792 1812 784 -244 -1132 -1840 -2444 -3028 -3544 -3904 -4076 -4112 -4144 -4244 -4408 -4524 -4488 -4296 -4020 -3716 -3380 -3020 -2680 -2408 -2148 -1844 -1532 -1328 -1244 -1160 -1000 -840 -828 -960 -1068 -1044 -972 -1024 -1268 -1588 -1820 -1932 -2004 -2136 -2336 -2528 -2616 -2584 -2480 -2384 -2328 -2284 -2196 -2056 -1888 -1728 -1564 -1360 -1088 -796 -532 -324 -128 92 320 524 684 812 916 1004 1064 1108 1144 1172 1172 1140 1080 1012 944 872 788 704 616 524 436 360 296 236 180 132 92 60 32 12 0 -4 -8 -8 -8 -4 0 +2 +0 -1 -1 -2 -3 -5 -8 -12 -16 -21 -25 -29 -34 -39 -45 -50 -55 -58 -60 -60 -58 -57 -54 -50 -46 -41 -38 -36 -36 -39 -46 -59 -71 -81 -89 -104 -133 -176 -225 -277 -329 -379 -425 -471 -522 -579 -630 -666 -688 -707 -725 -734 -719 -678 -614 -533 -435 -318 -182 -27 146 333 529 731 940 1158 1374 1584 1781 1968 2145 2304 2447 2565 2667 2758 2828 2858 2831 2757 2651 2531 2385 2198 1964 1699 1415 1114 786 430 58 -322 -709 -1103 -1495 -1858 -2175 -2448 -2686 -2898 -3082 -3235 -3359 -3449 -3482 -3446 -3351 -3231 -3106 -2950 -2710 -2353 -1914 -1473 -1089 -738 -338 153 679 1141 1509 1856 2264 2721 3124 3387 3521 3595 3653 3674 3622 3490 3312 3128 2931 2672 2313 1888 1471 1096 728 323 -102 -488 -812 -1106 -1404 -1686 -1904 -2035 -2106 -2176 -2286 -2435 -2563 -2610 -2563 -2461 -2336 -2183 -2001 -1827 -1686 -1546 -1363 -1167 -1039 -1001 -961 -852 -737 -749 -894 -1027 -1035 -994 -1082 -1382 -1785 -2119 -2324 -2492 -2747 -3113 -3490 -3743 -3831 -3819 -3815 -3872 -3946 -3953 -3853 -3691 -3524 -3332 -3020 -2529 -1931 -1357 -859 -350 278 1011 1739 2393 2995 3591 4167 4699 5218 5769 6332 6802 7113 7284 7394 7488 7550 7532 7395 7136 6779 6371 5960 5551 5101 4572 3954 3269 2538 1774 1001 260 -430 -1081 -1712 -2307 -2817 -3212 -3534 -3840 -4145 -4403 -4566 -4638 -4666 -4664 -4612 -4475 -4264 -4007 -3725 -3414 -3058 -2640 -2181 -1739 -1363 -1040 -703 -300 141 545 881 1184 1489 1773 1999 2163 2285 2358 2348 2281 2221 2209 2202 2129 1976 1787 1603 1410 1191 955 734 552 415 306 194 51 -115 -263 -364 -437 -528 -642 -738 -778 -773 -760 -757 -753 -739 -723 -719 -730 -752 -776 -793 -794 -780 -762 -748 -730 -704 -680 -678 -700 -722 -728 -725 -730 -749 -767 -768 -758 -749 -742 -732 -717 -700 -682 -658 -619 -569 -516 -472 -429 -378 -315 -247 -182 -124 -70 -15 46 111 175 233 283 329 369 401 423 441 458 473 482 485 484 480 468 447 418 391 368 345 315 280 242 207 179 152 125 97 71 50 33 19 5 -7 -16 -21 -24 -25 -26 -25 -23 -21 -18 -16 -13 -11 -9 -6 -4 -3 -2 -1 0 +0 -4 -4 -8 -12 -20 -32 -48 -64 -84 -100 -116 -136 -156 -180 -200 -220 -232 -240 -240 -232 -228 -216 -200 -184 -164 -152 -144 -144 -156 -184 -236 -284 -324 -356 -416 -532 -704 -900 -1108 -1316 -1516 -1700 -1884 -2088 -2316 -2520 -2664 -2752 -2828 -2900 -2936 -2876 -2712 -2456 -2132 -1740 -1272 -728 -108 584 1332 2116 2924 3760 4632 5496 6336 7124 7872 8580 9216 9788 10260 10668 11032 11312 11432 11324 11028 10604 10124 9540 8792 7856 6796 5660 4456 3144 1720 232 -1288 -2836 -4412 -5980 -7432 -8700 -9792 -10744 -11592 -12328 -12940 -13436 -13796 -13928 -13784 -13404 -12924 -12424 -11800 -10840 -9412 -7656 -5892 -4356 -2952 -1352 612 2716 4564 6036 7424 9056 10884 12496 13548 14084 14380 14612 14696 14488 13960 13248 12512 11724 10688 9252 7552 5884 4384 2912 1292 -408 -1952 -3248 -4424 -5616 -6744 -7616 -8140 -8424 -8704 -9144 -9740 -10252 -10440 -10252 -9844 -9344 -8732 -8004 -7308 -6744 -6184 -5452 -4668 -4156 -4004 -3844 -3408 -2948 -2996 -3576 -4108 -4140 -3976 -4328 -5528 -7140 -8476 -9296 -9968 -10988 -12452 -13960 -14972 -15324 -15276 -15260 -15488 -15784 -15812 -15412 -14764 -14096 -13328 -12080 -10116 -7724 -5428 -3436 -1400 1112 4044 6956 9572 11980 14364 16668 18796 20872 23076 25328 27208 28452 29136 29576 29952 30200 30128 29580 28544 27116 25484 23840 22204 20404 18288 15816 13076 10152 7096 4004 1040 -1720 -4324 -6848 -9228 -11268 -12848 -14136 -15360 -16580 -17612 -18264 -18552 -18664 -18656 -18448 -17900 -17056 -16028 -14900 -13656 -12232 -10560 -8724 -6956 -5452 -4160 -2812 -1200 564 2180 3524 4736 5956 7092 7996 8652 9140 9432 9392 9124 8884 8836 8808 8516 7904 7148 6412 5640 4764 3820 2936 2208 1660 1224 776 204 -460 -1052 -1456 -1748 -2112 -2568 -2952 -3112 -3092 -3040 -3028 -3012 -2956 -2892 -2876 -2920 -3008 -3104 -3172 -3176 -3120 -3048 -2992 -2920 -2816 -2720 -2712 -2800 -2888 -2912 -2900 -2920 -2996 -3068 -3072 -3032 -2996 -2968 -2928 -2868 -2800 -2728 -2632 -2476 -2276 -2064 -1888 -1716 -1512 -1260 -988 -728 -496 -280 -60 184 444 700 932 1132 1316 1476 1604 1692 1764 1832 1892 1928 1940 1936 1920 1872 1788 1672 1564 1472 1380 1260 1120 968 828 716 608 500 388 284 200 132 76 20 -28 -64 -84 -96 -100 -104 -100 -92 -84 -72 -64 -52 -44 -36 -24 -16 -12 -8 -4 0 +2 +0 -1 -1 -2 -2 -3 -3 -4 -5 -6 -6 -7 -9 -13 -14 -16 -19 -27 -39 -51 -61 -73 -87 -107 -130 -150 -166 -177 -190 -206 -224 -239 -247 -251 -253 -253 -242 -214 -172 -127 -85 -36 29 113 204 293 382 478 578 679 783 900 1027 1145 1242 1317 1385 1453 1517 1564 1588 1584 1554 1508 1456 1399 1325 1224 1091 930 744 535 311 83 -142 -366 -595 -824 -1033 -1211 -1368 -1526 -1692 -1844 -1963 -2047 -2111 -2164 -2195 -2185 -2134 -2055 -1958 -1839 -1688 -1493 -1263 -1032 -829 -647 -448 -196 94 373 617 849 1092 1331 1536 1700 1839 1941 1978 1966 1958 1994 2033 2011 1909 1767 1621 1459 1261 1034 813 626 482 363 235 63 -146 -342 -484 -596 -736 -917 -1079 -1165 -1185 -1193 -1216 -1239 -1246 -1248 -1272 -1323 -1395 -1475 -1546 -1586 -1597 -1601 -1612 -1612 -1594 -1582 -1619 -1714 -1816 -1881 -1924 -1991 -2099 -2207 -2273 -2308 -2343 -2389 -2426 -2446 -2459 -2470 -2455 -2382 -2254 -2112 -1991 -1869 -1700 -1467 -1189 -905 -637 -371 -78 267 664 1083 1494 1888 2277 2660 3010 3315 3604 3908 4215 4492 4731 4951 5154 5285 5304 5238 5167 5134 5090 4942 4650 4277 3915 3602 3289 2902 2422 1915 1461 1072 680 226 -267 -719 -1070 -1351 -1621 -1896 -2126 -2271 -2361 -2466 -2615 -2757 -2830 -2813 -2752 -2692 -2648 -2592 -2488 -2314 -2099 -1900 -1753 -1616 -1423 -1156 -885 -663 -468 -238 32 275 453 595 748 912 1054 1155 1224 1267 1281 1278 1274 1267 1241 1200 1165 1132 1066 946 810 710 647 582 476 343 213 98 -12 -132 -269 -408 -521 -588 -632 -698 -798 -881 -899 -871 -872 -936 -1024 -1074 -1076 -1059 -1048 -1040 -1029 -1026 -1038 -1051 -1048 -1023 -990 -953 -914 -874 -833 -784 -716 -634 -555 -496 -451 -409 -365 -321 -282 -247 -214 -182 -145 -101 -53 -13 19 50 89 131 166 191 215 249 288 322 345 362 382 404 424 433 432 428 428 434 441 439 427 411 395 381 366 346 322 295 271 249 227 204 176 146 121 101 84 67 49 30 13 -2 -14 -22 -28 -33 -38 -42 -43 -43 -42 -40 -39 -37 -34 -29 -25 -22 -19 -16 -14 -11 -9 -7 -5 -4 -2 -1 -1 -1 -1 0 +0 -4 -4 -8 -8 -12 -12 -16 -20 -24 -24 -28 -36 -52 -56 -64 -76 -108 -156 -204 -244 -292 -348 -428 -520 -600 -664 -708 -760 -824 -896 -956 -988 -1004 -1012 -1012 -968 -856 -688 -508 -340 -144 116 452 816 1172 1528 1912 2312 2716 3132 3600 4108 4580 4968 5268 5540 5812 6068 6256 6352 6336 6216 6032 5824 5596 5300 4896 4364 3720 2976 2140 1244 332 -568 -1464 -2380 -3296 -4132 -4844 -5472 -6104 -6768 -7376 -7852 -8188 -8444 -8656 -8780 -8740 -8536 -8220 -7832 -7356 -6752 -5972 -5052 -4128 -3316 -2588 -1792 -784 376 1492 2468 3396 4368 5324 6144 6800 7356 7764 7912 7864 7832 7976 8132 8044 7636 7068 6484 5836 5044 4136 3252 2504 1928 1452 940 252 -584 -1368 -1936 -2384 -2944 -3668 -4316 -4660 -4740 -4772 -4864 -4956 -4984 -4992 -5088 -5292 -5580 -5900 -6184 -6344 -6388 -6404 -6448 -6448 -6376 -6328 -6476 -6856 -7264 -7524 -7696 -7964 -8396 -8828 -9092 -9232 -9372 -9556 -9704 -9784 -9836 -9880 -9820 -9528 -9016 -8448 -7964 -7476 -6800 -5868 -4756 -3620 -2548 -1484 -312 1068 2656 4332 5976 7552 9108 10640 12040 13260 14416 15632 16860 17968 18924 19804 20616 21140 21216 20952 20668 20536 20360 19768 18600 17108 15660 14408 13156 11608 9688 7660 5844 4288 2720 904 -1068 -2876 -4280 -5404 -6484 -7584 -8504 -9084 -9444 -9864 -10460 -11028 -11320 -11252 -11008 -10768 -10592 -10368 -9952 -9256 -8396 -7600 -7012 -6464 -5692 -4624 -3540 -2652 -1872 -952 128 1100 1812 2380 2992 3648 4216 4620 4896 5068 5124 5112 5096 5068 4964 4800 4660 4528 4264 3784 3240 2840 2588 2328 1904 1372 852 392 -48 -528 -1076 -1632 -2084 -2352 -2528 -2792 -3192 -3524 -3596 -3484 -3488 -3744 -4096 -4296 -4304 -4236 -4192 -4160 -4116 -4104 -4152 -4204 -4192 -4092 -3960 -3812 -3656 -3496 -3332 -3136 -2864 -2536 -2220 -1984 -1804 -1636 -1460 -1284 -1128 -988 -856 -728 -580 -404 -212 -52 76 200 356 524 664 764 860 996 1152 1288 1380 1448 1528 1616 1696 1732 1728 1712 1712 1736 1764 1756 1708 1644 1580 1524 1464 1384 1288 1180 1084 996 908 816 704 584 484 404 336 268 196 120 52 -8 -56 -88 -112 -132 -152 -168 -172 -172 -168 -160 -156 -148 -136 -116 -100 -88 -76 -64 -56 -44 -36 -28 -20 -16 -8 -4 -4 -4 -4 0 +2 +0 -1 -2 -2 -4 -5 -7 -9 -12 -15 -18 -21 -25 -28 -31 -33 -35 -37 -37 -35 -32 -27 -21 -13 -3 10 28 50 74 100 129 160 192 224 258 296 336 378 419 461 504 542 570 588 606 629 650 658 645 618 588 562 533 488 422 346 273 208 136 47 -58 -160 -246 -320 -396 -479 -553 -609 -652 -702 -767 -832 -879 -899 -904 -910 -920 -926 -913 -873 -813 -756 -716 -677 -612 -510 -401 -308 -223 -116 16 141 238 321 412 516 610 685 744 788 816 834 851 866 869 860 854 850 819 744 651 584 545 501 420 310 197 93 -12 -131 -272 -422 -552 -637 -701 -792 -926 -1046 -1093 -1083 -1109 -1218 -1363 -1464 -1500 -1512 -1532 -1557 -1577 -1611 -1668 -1729 -1765 -1766 -1750 -1727 -1697 -1663 -1625 -1567 -1466 -1331 -1196 -1096 -1022 -951 -870 -785 -708 -637 -568 -495 -405 -289 -156 -39 59 161 295 449 586 693 806 959 1145 1319 1460 1581 1720 1884 2041 2158 2227 2284 2367 2489 2619 2709 2741 2737 2734 2745 2749 2711 2630 2520 2414 2320 2222 2084 1888 1657 1443 1271 1119 946 730 480 218 -35 -259 -441 -594 -754 -938 -1119 -1255 -1344 -1422 -1522 -1633 -1706 -1714 -1678 -1632 -1608 -1602 -1588 -1539 -1455 -1365 -1299 -1236 -1125 -940 -718 -528 -395 -290 -171 -37 87 195 304 430 564 682 784 874 950 1001 1015 989 941 898 889 919 963 971 915 807 688 578 460 311 136 -31 -158 -252 -354 -499 -681 -853 -976 -1054 -1131 -1231 -1328 -1374 -1360 -1333 -1333 -1358 -1371 -1343 -1289 -1232 -1179 -1112 -1021 -914 -811 -724 -645 -557 -453 -340 -235 -144 -58 28 110 179 235 286 337 383 417 437 444 445 441 432 418 402 388 378 365 342 305 265 231 204 182 164 151 140 126 110 97 87 75 60 52 60 78 92 94 93 98 112 125 131 131 129 128 130 137 144 145 136 123 114 109 101 86 69 56 48 41 30 19 11 4 -5 -16 -25 -29 -30 -29 -30 -31 -32 -31 -29 -27 -25 -24 -24 -22 -19 -15 -12 -8 -6 -4 -2 -1 0 1 1 1 1 1 1 0 0 0 0 0 +0 -8 -16 -16 -32 -40 -56 -72 -96 -120 -144 -168 -200 -224 -248 -264 -280 -296 -296 -280 -256 -216 -168 -104 -24 80 224 400 592 800 1032 1280 1536 1792 2064 2368 2688 3024 3352 3688 4032 4336 4560 4704 4848 5032 5200 5264 5160 4944 4704 4496 4264 3904 3376 2768 2184 1664 1088 376 -464 -1280 -1968 -2560 -3168 -3832 -4424 -4872 -5216 -5616 -6136 -6656 -7032 -7192 -7232 -7280 -7360 -7408 -7304 -6984 -6504 -6048 -5728 -5416 -4896 -4080 -3208 -2464 -1784 -928 128 1128 1904 2568 3296 4128 4880 5480 5952 6304 6528 6672 6808 6928 6952 6880 6832 6800 6552 5952 5208 4672 4360 4008 3360 2480 1576 744 -96 -1048 -2176 -3376 -4416 -5096 -5608 -6336 -7408 -8368 -8744 -8664 -8872 -9744 -10904 -11712 -12000 -12096 -12256 -12456 -12616 -12888 -13344 -13832 -14120 -14128 -14000 -13816 -13576 -13304 -13000 -12536 -11728 -10648 -9568 -8768 -8176 -7608 -6960 -6280 -5664 -5096 -4544 -3960 -3240 -2312 -1248 -312 472 1288 2360 3592 4688 5544 6448 7672 9160 10552 11680 12648 13760 15072 16328 17264 17816 18272 18936 19912 20952 21672 21928 21896 21872 21960 21992 21688 21040 20160 19312 18560 17776 16672 15104 13256 11544 10168 8952 7568 5840 3840 1744 -280 -2072 -3528 -4752 -6032 -7504 -8952 -10040 -10752 -11376 -12176 -13064 -13648 -13712 -13424 -13056 -12864 -12816 -12704 -12312 -11640 -10920 -10392 -9888 -9000 -7520 -5744 -4224 -3160 -2320 -1368 -296 696 1560 2432 3440 4512 5456 6272 6992 7600 8008 8120 7912 7528 7184 7112 7352 7704 7768 7320 6456 5504 4624 3680 2488 1088 -248 -1264 -2016 -2832 -3992 -5448 -6824 -7808 -8432 -9048 -9848 -10624 -10992 -10880 -10664 -10664 -10864 -10968 -10744 -10312 -9856 -9432 -8896 -8168 -7312 -6488 -5792 -5160 -4456 -3624 -2720 -1880 -1152 -464 224 880 1432 1880 2288 2696 3064 3336 3496 3552 3560 3528 3456 3344 3216 3104 3024 2920 2736 2440 2120 1848 1632 1456 1312 1208 1120 1008 880 776 696 600 480 416 480 624 736 752 744 784 896 1000 1048 1048 1032 1024 1040 1096 1152 1160 1088 984 912 872 808 688 552 448 384 328 240 152 88 32 -40 -128 -200 -232 -240 -232 -240 -248 -256 -248 -232 -216 -200 -192 -192 -176 -152 -120 -96 -64 -48 -32 -16 -8 0 8 8 8 8 8 8 0 0 0 0 0 +3 +0 -1 -1 -1 -1 -1 0 0 1 2 4 5 8 11 15 20 25 30 37 44 53 62 70 78 88 99 113 125 136 145 155 165 175 183 188 191 192 195 197 194 184 170 155 142 131 115 93 64 30 -6 -39 -69 -97 -127 -164 -203 -236 -261 -286 -317 -351 -379 -393 -398 -399 -406 -417 -426 -425 -414 -400 -392 -384 -360 -309 -243 -184 -141 -107 -65 -15 34 79 127 185 248 308 364 416 463 501 520 520 507 496 502 532 571 590 569 514 449 386 314 218 97 -23 -118 -194 -278 -402 -560 -719 -841 -929 -1021 -1137 -1254 -1328 -1345 -1348 -1379 -1438 -1484 -1489 -1462 -1429 -1399 -1351 -1269 -1162 -1056 -964 -879 -776 -647 -497 -352 -220 -91 45 181 301 406 507 611 712 794 852 889 913 927 931 924 911 903 902 896 861 790 705 631 573 524 487 462 441 407 365 332 305 271 226 201 238 320 389 412 419 460 540 623 678 704 717 736 777 846 926 968 944 889 857 855 829 741 616 524 473 420 326 217 133 58 -56 -218 -369 -454 -483 -507 -556 -622 -678 -705 -705 -700 -713 -755 -803 -817 -778 -689 -575 -456 -343 -243 -144 -31 90 190 258 320 410 522 619 678 718 771 839 909 959 982 974 944 896 832 739 604 443 276 111 -62 -251 -449 -637 -807 -965 -1127 -1301 -1472 -1607 -1692 -1755 -1823 -1889 -1905 -1847 -1751 -1666 -1597 -1510 -1392 -1260 -1130 -988 -826 -659 -501 -341 -168 0 143 268 401 541 649 712 758 818 881 910 911 922 963 1007 1012 973 912 850 787 710 614 496 373 266 184 108 12 -100 -199 -265 -310 -361 -419 -462 -475 -474 -473 -470 -448 -403 -354 -318 -294 -267 -222 -164 -102 -42 17 76 132 185 234 281 324 355 369 367 354 337 321 304 292 280 264 242 214 184 156 130 107 85 62 39 19 7 3 -1 -8 -16 -21 -22 -21 -21 -20 -17 -14 -12 -11 -9 -5 0 4 8 11 12 13 12 11 10 9 8 7 6 6 5 3 1 0 0 0 -1 -1 -1 -1 -1 -1 -1 0 +0 -16 -16 -16 -16 -16 0 0 16 32 64 80 128 176 240 320 400 480 592 704 848 992 1120 1248 1408 1584 1808 2000 2176 2320 2480 2640 2800 2928 3008 3056 3072 3120 3152 3104 2944 2720 2480 2272 2096 1840 1488 1024 480 -96 -624 -1104 -1552 -2032 -2624 -3248 -3776 -4176 -4576 -5072 -5616 -6064 -6288 -6368 -6384 -6496 -6672 -6816 -6800 -6624 -6400 -6272 -6144 -5760 -4944 -3888 -2944 -2256 -1712 -1040 -240 544 1264 2032 2960 3968 4928 5824 6656 7408 8016 8320 8320 8112 7936 8032 8512 9136 9440 9104 8224 7184 6176 5024 3488 1552 -368 -1888 -3104 -4448 -6432 -8960 -11504 -13456 -14864 -16336 -18192 -20064 -21248 -21520 -21568 -22064 -23008 -23744 -23824 -23392 -22864 -22384 -21616 -20304 -18592 -16896 -15424 -14064 -12416 -10352 -7952 -5632 -3520 -1456 720 2896 4816 6496 8112 9776 11392 12704 13632 14224 14608 14832 14896 14784 14576 14448 14432 14336 13776 12640 11280 10096 9168 8384 7792 7392 7056 6512 5840 5312 4880 4336 3616 3216 3808 5120 6224 6592 6704 7360 8640 9968 10848 11264 11472 11776 12432 13536 14816 15488 15104 14224 13712 13680 13264 11856 9856 8384 7568 6720 5216 3472 2128 928 -896 -3488 -5904 -7264 -7728 -8112 -8896 -9952 -10848 -11280 -11280 -11200 -11408 -12080 -12848 -13072 -12448 -11024 -9200 -7296 -5488 -3888 -2304 -496 1440 3040 4128 5120 6560 8352 9904 10848 11488 12336 13424 14544 15344 15712 15584 15104 14336 13312 11824 9664 7088 4416 1776 -992 -4016 -7184 -10192 -12912 -15440 -18032 -20816 -23552 -25712 -27072 -28080 -29168 -30224 -30480 -29552 -28016 -26656 -25552 -24160 -22272 -20160 -18080 -15808 -13216 -10544 -8016 -5456 -2688 0 2288 4288 6416 8656 10384 11392 12128 13088 14096 14560 14576 14752 15408 16112 16192 15568 14592 13600 12592 11360 9824 7936 5968 4256 2944 1728 192 -1600 -3184 -4240 -4960 -5776 -6704 -7392 -7600 -7584 -7568 -7520 -7168 -6448 -5664 -5088 -4704 -4272 -3552 -2624 -1632 -672 272 1216 2112 2960 3744 4496 5184 5680 5904 5872 5664 5392 5136 4864 4672 4480 4224 3872 3424 2944 2496 2080 1712 1360 992 624 304 112 48 -16 -128 -256 -336 -352 -336 -336 -320 -272 -224 -192 -176 -144 -80 0 64 128 176 192 208 192 176 160 144 128 112 96 96 80 48 16 0 0 0 -16 -16 -16 -16 -16 -16 -16 0 +4 +0 0 0 0 0 0 1 1 1 1 2 2 2 2 3 4 6 8 9 10 14 18 21 24 26 29 33 39 46 51 53 53 54 58 59 56 49 44 42 39 31 22 14 6 -7 -27 -48 -61 -68 -74 -84 -98 -110 -119 -124 -127 -134 -147 -162 -170 -167 -154 -132 -108 -84 -62 -38 -9 24 54 75 96 127 166 203 229 249 275 307 342 371 390 397 395 385 367 334 280 211 135 55 -32 -132 -242 -352 -457 -559 -669 -791 -916 -1024 -1104 -1172 -1247 -1323 -1366 -1355 -1314 -1280 -1255 -1215 -1146 -1062 -974 -871 -746 -609 -473 -329 -166 0 148 284 434 599 736 827 900 994 1095 1158 1186 1228 1313 1404 1445 1423 1366 1304 1235 1142 1010 837 645 471 335 200 24 -195 -398 -542 -650 -777 -926 -1046 -1105 -1131 -1159 -1181 -1156 -1070 -965 -890 -845 -788 -676 -513 -327 -137 59 267 481 691 903 1117 1328 1502 1612 1653 1652 1627 1597 1570 1558 1549 1517 1441 1325 1184 1040 904 777 644 491 320 163 66 28 -3 -72 -164 -229 -252 -259 -272 -274 -247 -207 -189 -188 -164 -91 11 113 209 302 378 418 427 436 456 461 441 424 438 466 446 346 209 106 53 8 -81 -210 -345 -465 -576 -683 -769 -827 -893 -1016 -1205 -1405 -1551 -1626 -1654 -1665 -1673 -1678 -1668 -1628 -1547 -1448 -1350 -1250 -1124 -966 -800 -637 -469 -294 -135 -21 61 153 282 430 567 672 747 798 832 858 863 833 770 695 618 525 402 266 155 71 -27 -168 -337 -485 -589 -672 -771 -886 -981 -1029 -1038 -1038 -1041 -1034 -997 -938 -874 -810 -727 -611 -465 -309 -155 1 166 338 500 637 741 829 918 1009 1088 1140 1166 1174 1161 1123 1071 1016 952 869 773 683 602 517 419 318 226 138 48 -35 -101 -158 -219 -275 -304 -304 -302 -315 -327 -318 -293 -269 -252 -228 -192 -154 -121 -91 -57 -24 2 20 35 47 56 64 77 89 96 97 97 97 94 87 76 66 54 42 33 26 21 14 7 1 -2 -5 -9 -12 -13 -13 -12 -12 -11 -10 -8 -7 -6 -5 -4 -3 -2 -1 -1 0 +0 0 0 0 0 0 16 16 16 16 32 32 32 32 48 64 96 128 144 160 224 288 336 384 416 464 528 624 736 816 848 848 864 928 944 896 784 704 672 624 496 352 224 96 -112 -432 -768 -976 -1088 -1184 -1344 -1568 -1760 -1904 -1984 -2032 -2144 -2352 -2592 -2720 -2672 -2464 -2112 -1728 -1344 -992 -608 -144 384 864 1200 1536 2032 2656 3248 3664 3984 4400 4912 5472 5936 6240 6352 6320 6160 5872 5344 4480 3376 2160 880 -512 -2112 -3872 -5632 -7312 -8944 -10704 -12656 -14656 -16384 -17664 -18752 -19952 -21168 -21856 -21680 -21024 -20480 -20080 -19440 -18336 -16992 -15584 -13936 -11936 -9744 -7568 -5264 -2656 0 2368 4544 6944 9584 11776 13232 14400 15904 17520 18528 18976 19648 21008 22464 23120 22768 21856 20864 19760 18272 16160 13392 10320 7536 5360 3200 384 -3120 -6368 -8672 -10400 -12432 -14816 -16736 -17680 -18096 -18544 -18896 -18496 -17120 -15440 -14240 -13520 -12608 -10816 -8208 -5232 -2192 944 4272 7696 11056 14448 17872 21248 24032 25792 26448 26432 26032 25552 25120 24928 24784 24272 23056 21200 18944 16640 14464 12432 10304 7856 5120 2608 1056 448 -48 -1152 -2624 -3664 -4032 -4144 -4352 -4384 -3952 -3312 -3024 -3008 -2624 -1456 176 1808 3344 4832 6048 6688 6832 6976 7296 7376 7056 6784 7008 7456 7136 5536 3344 1696 848 128 -1296 -3360 -5520 -7440 -9216 -10928 -12304 -13232 -14288 -16256 -19280 -22480 -24816 -26016 -26464 -26640 -26768 -26848 -26688 -26048 -24752 -23168 -21600 -20000 -17984 -15456 -12800 -10192 -7504 -4704 -2160 -336 976 2448 4512 6880 9072 10752 11952 12768 13312 13728 13808 13328 12320 11120 9888 8400 6432 4256 2480 1136 -432 -2688 -5392 -7760 -9424 -10752 -12336 -14176 -15696 -16464 -16608 -16608 -16656 -16544 -15952 -15008 -13984 -12960 -11632 -9776 -7440 -4944 -2480 16 2656 5408 8000 10192 11856 13264 14688 16144 17408 18240 18656 18784 18576 17968 17136 16256 15232 13904 12368 10928 9632 8272 6704 5088 3616 2208 768 -560 -1616 -2528 -3504 -4400 -4864 -4864 -4832 -5040 -5232 -5088 -4688 -4304 -4032 -3648 -3072 -2464 -1936 -1456 -912 -384 32 320 560 752 896 1024 1232 1424 1536 1552 1552 1552 1504 1392 1216 1056 864 672 528 416 336 224 112 16 -32 -80 -144 -192 -208 -208 -192 -192 -176 -160 -128 -112 -96 -80 -64 -48 -32 -16 -16 0 +4 +0 -1 -1 -1 -2 -2 -2 -2 -1 0 1 4 6 10 15 20 25 31 35 39 42 46 49 53 57 60 62 61 58 55 51 46 41 33 22 12 5 2 -1 -7 -17 -24 -28 -30 -32 -34 -32 -28 -27 -28 -25 -15 1 19 36 54 70 81 85 90 97 102 101 100 107 117 115 92 57 30 15 2 -26 -68 -114 -157 -200 -244 -282 -312 -346 -404 -492 -589 -667 -718 -749 -773 -797 -819 -835 -835 -814 -780 -746 -707 -651 -573 -486 -396 -299 -192 -90 -14 42 110 207 323 435 528 601 657 701 739 761 752 711 656 597 520 406 276 164 77 -29 -191 -391 -576 -715 -835 -980 -1153 -1306 -1403 -1448 -1482 -1521 -1548 -1528 -1471 -1403 -1332 -1225 -1055 -822 -560 -287 3 324 675 1025 1339 1599 1832 2080 2350 2599 2793 2935 3035 3081 3063 3001 2926 2818 2646 2421 2199 1996 1765 1472 1153 844 533 194 -141 -425 -688 -985 -1279 -1460 -1512 -1556 -1678 -1806 -1821 -1736 -1656 -1610 -1514 -1325 -1104 -908 -710 -465 -199 22 193 346 486 602 732 918 1125 1278 1364 1439 1524 1571 1538 1449 1333 1180 993 826 716 614 451 245 76 -48 -200 -418 -643 -799 -901 -1009 -1131 -1226 -1280 -1317 -1364 -1430 -1511 -1586 -1619 -1591 -1534 -1497 -1467 -1392 -1260 -1128 -1025 -909 -745 -577 -475 -418 -326 -183 -62 -12 3 15 -13 -120 -248 -298 -285 -333 -520 -772 -978 -1129 -1290 -1480 -1628 -1701 -1754 -1845 -1938 -1942 -1828 -1670 -1535 -1412 -1238 -978 -657 -310 56 432 788 1105 1399 1704 2027 2323 2560 2745 2903 3035 3116 3139 3119 3064 2944 2745 2493 2240 2001 1739 1424 1080 755 467 201 -58 -301 -518 -721 -912 -1073 -1190 -1277 -1371 -1470 -1538 -1543 -1498 -1429 -1343 -1219 -1053 -868 -690 -521 -353 -189 -39 92 211 320 408 465 502 538 575 596 592 573 555 544 528 501 470 439 400 345 279 221 176 134 82 25 -23 -56 -80 -98 -105 -104 -103 -109 -114 -108 -94 -85 -88 -92 -89 -80 -72 -67 -61 -52 -43 -38 -35 -29 -22 -18 -16 -15 -12 -10 -8 -8 -8 -7 -7 -6 -6 -6 -4 -3 -2 -2 -1 0 +0 -8 -8 -8 -16 -16 -16 -16 -8 0 8 32 48 80 120 160 200 248 280 312 336 368 392 424 456 480 496 488 464 440 408 368 328 264 176 96 40 16 -8 -56 -136 -192 -224 -240 -256 -272 -256 -224 -216 -224 -200 -120 8 152 288 432 560 648 680 720 776 816 808 800 856 936 920 736 456 240 120 16 -208 -544 -912 -1256 -1600 -1952 -2256 -2496 -2768 -3232 -3936 -4712 -5336 -5744 -5992 -6184 -6376 -6552 -6680 -6680 -6512 -6240 -5968 -5656 -5208 -4584 -3888 -3168 -2392 -1536 -720 -112 336 880 1656 2584 3480 4224 4808 5256 5608 5912 6088 6016 5688 5248 4776 4160 3248 2208 1312 616 -232 -1528 -3128 -4608 -5720 -6680 -7840 -9224 -10448 -11224 -11584 -11856 -12168 -12384 -12224 -11768 -11224 -10656 -9800 -8440 -6576 -4480 -2296 24 2592 5400 8200 10712 12792 14656 16640 18800 20792 22344 23480 24280 24648 24504 24008 23408 22544 21168 19368 17592 15968 14120 11776 9224 6752 4264 1552 -1128 -3400 -5504 -7880 -10232 -11680 -12096 -12448 -13424 -14448 -14568 -13888 -13248 -12880 -12112 -10600 -8832 -7264 -5680 -3720 -1592 176 1544 2768 3888 4816 5856 7344 9000 10224 10912 11512 12192 12568 12304 11592 10664 9440 7944 6608 5728 4912 3608 1960 608 -384 -1600 -3344 -5144 -6392 -7208 -8072 -9048 -9808 -10240 -10536 -10912 -11440 -12088 -12688 -12952 -12728 -12272 -11976 -11736 -11136 -10080 -9024 -8200 -7272 -5960 -4616 -3800 -3344 -2608 -1464 -496 -96 24 120 -104 -960 -1984 -2384 -2280 -2664 -4160 -6176 -7824 -9032 -10320 -11840 -13024 -13608 -14032 -14760 -15504 -15536 -14624 -13360 -12280 -11296 -9904 -7824 -5256 -2480 448 3456 6304 8840 11192 13632 16216 18584 20480 21960 23224 24280 24928 25112 24952 24512 23552 21960 19944 17920 16008 13912 11392 8640 6040 3736 1608 -464 -2408 -4144 -5768 -7296 -8584 -9520 -10216 -10968 -11760 -12304 -12344 -11984 -11432 -10744 -9752 -8424 -6944 -5520 -4168 -2824 -1512 -312 736 1688 2560 3264 3720 4016 4304 4600 4768 4736 4584 4440 4352 4224 4008 3760 3512 3200 2760 2232 1768 1408 1072 656 200 -184 -448 -640 -784 -840 -832 -824 -872 -912 -864 -752 -680 -704 -736 -712 -640 -576 -536 -488 -416 -344 -304 -280 -232 -176 -144 -128 -120 -96 -80 -64 -64 -64 -56 -56 -48 -48 -48 -32 -24 -16 -16 -8 0 +3 +0 0 1 2 3 5 6 8 9 10 10 9 8 6 2 -3 -8 -14 -22 -31 -39 -44 -50 -58 -68 -73 -75 -77 -81 -81 -76 -67 -59 -49 -34 -16 1 16 30 45 58 75 98 126 150 167 183 203 218 222 217 208 191 167 144 129 115 87 49 15 -11 -45 -96 -153 -196 -228 -262 -303 -339 -365 -386 -412 -445 -483 -521 -547 -553 -548 -549 -553 -539 -501 -461 -430 -391 -329 -261 -221 -199 -159 -92 -32 -7 1 8 -7 -70 -148 -182 -178 -212 -339 -516 -669 -791 -925 -1085 -1222 -1307 -1379 -1485 -1595 -1636 -1576 -1472 -1385 -1304 -1169 -945 -650 -313 58 457 854 1225 1587 1977 2405 2823 3182 3492 3779 4042 4248 4380 4455 4479 4408 4207 3913 3599 3292 2930 2459 1911 1368 868 383 -113 -600 -1061 -1514 -1966 -2372 -2697 -2970 -3272 -3602 -3869 -3987 -3973 -3895 -3761 -3510 -3117 -2641 -2158 -1677 -1170 -644 -137 335 789 1234 1618 1903 2121 2347 2591 2779 2852 2850 2862 2904 2920 2871 2791 2707 2568 2300 1936 1594 1327 1054 674 216 -201 -520 -777 -992 -1122 -1168 -1222 -1359 -1495 -1496 -1379 -1327 -1446 -1618 -1670 -1601 -1546 -1550 -1517 -1384 -1245 -1202 -1198 -1102 -923 -805 -812 -836 -770 -673 -675 -786 -889 -940 -1039 -1275 -1563 -1752 -1824 -1914 -2126 -2428 -2723 -2995 -3292 -3610 -3876 -4055 -4175 -4260 -4260 -4110 -3816 -3442 -3022 -2528 -1950 -1341 -740 -105 633 1462 2269 2963 3564 4142 4724 5243 5644 5929 6138 6277 6325 6273 6140 5949 5687 5333 4865 4283 3619 2925 2250 1607 965 292 -393 -1015 -1510 -1888 -2213 -2529 -2826 -3071 -3248 -3365 -3411 -3378 -3271 -3119 -2932 -2692 -2388 -2039 -1680 -1317 -939 -551 -184 151 469 783 1068 1287 1429 1515 1563 1572 1544 1494 1431 1342 1223 1096 985 881 758 603 441 303 196 108 23 -58 -130 -190 -233 -256 -259 -244 -219 -188 -159 -129 -94 -52 -8 24 34 24 11 4 -2 -12 -27 -38 -41 -43 -53 -70 -84 -90 -95 -107 -121 -130 -133 -134 -138 -143 -145 -147 -152 -158 -161 -158 -154 -153 -156 -159 -158 -155 -150 -142 -134 -123 -108 -92 -77 -63 -49 -34 -22 -12 -5 1 4 6 6 5 3 2 1 0 +0 0 4 8 12 20 24 32 36 40 40 36 32 24 8 -12 -32 -56 -88 -124 -156 -176 -200 -232 -272 -292 -300 -308 -324 -324 -304 -268 -236 -196 -136 -64 4 64 120 180 232 300 392 504 600 668 732 812 872 888 868 832 764 668 576 516 460 348 196 60 -44 -180 -384 -612 -784 -912 -1048 -1212 -1356 -1460 -1544 -1648 -1780 -1932 -2084 -2188 -2212 -2192 -2196 -2212 -2156 -2004 -1844 -1720 -1564 -1316 -1044 -884 -796 -636 -368 -128 -28 4 32 -28 -280 -592 -728 -712 -848 -1356 -2064 -2676 -3164 -3700 -4340 -4888 -5228 -5516 -5940 -6380 -6544 -6304 -5888 -5540 -5216 -4676 -3780 -2600 -1252 232 1828 3416 4900 6348 7908 9620 11292 12728 13968 15116 16168 16992 17520 17820 17916 17632 16828 15652 14396 13168 11720 9836 7644 5472 3472 1532 -452 -2400 -4244 -6056 -7864 -9488 -10788 -11880 -13088 -14408 -15476 -15948 -15892 -15580 -15044 -14040 -12468 -10564 -8632 -6708 -4680 -2576 -548 1340 3156 4936 6472 7612 8484 9388 10364 11116 11408 11400 11448 11616 11680 11484 11164 10828 10272 9200 7744 6376 5308 4216 2696 864 -804 -2080 -3108 -3968 -4488 -4672 -4888 -5436 -5980 -5984 -5516 -5308 -5784 -6472 -6680 -6404 -6184 -6200 -6068 -5536 -4980 -4808 -4792 -4408 -3692 -3220 -3248 -3344 -3080 -2692 -2700 -3144 -3556 -3760 -4156 -5100 -6252 -7008 -7296 -7656 -8504 -9712 -10892 -11980 -13168 -14440 -15504 -16220 -16700 -17040 -17040 -16440 -15264 -13768 -12088 -10112 -7800 -5364 -2960 -420 2532 5848 9076 11852 14256 16568 18896 20972 22576 23716 24552 25108 25300 25092 24560 23796 22748 21332 19460 17132 14476 11700 9000 6428 3860 1168 -1572 -4060 -6040 -7552 -8852 -10116 -11304 -12284 -12992 -13460 -13644 -13512 -13084 -12476 -11728 -10768 -9552 -8156 -6720 -5268 -3756 -2204 -736 604 1876 3132 4272 5148 5716 6060 6252 6288 6176 5976 5724 5368 4892 4384 3940 3524 3032 2412 1764 1212 784 432 92 -232 -520 -760 -932 -1024 -1036 -976 -876 -752 -636 -516 -376 -208 -32 96 136 96 44 16 -8 -48 -108 -152 -164 -172 -212 -280 -336 -360 -380 -428 -484 -520 -532 -536 -552 -572 -580 -588 -608 -632 -644 -632 -616 -612 -624 -636 -632 -620 -600 -568 -536 -492 -432 -368 -308 -252 -196 -136 -88 -48 -20 4 16 24 24 20 12 8 4 0 +2 +0 -2 -3 -3 -5 -6 -7 -7 -6 -4 -1 2 7 14 21 28 36 45 55 66 74 82 90 99 108 115 120 125 127 122 109 96 84 71 48 16 -17 -44 -69 -93 -110 -120 -132 -153 -176 -184 -177 -177 -201 -234 -251 -250 -251 -261 -265 -251 -234 -234 -241 -229 -199 -179 -186 -198 -189 -170 -176 -211 -246 -268 -305 -385 -486 -560 -599 -647 -739 -867 -999 -1129 -1274 -1435 -1582 -1699 -1795 -1880 -1928 -1907 -1816 -1681 -1512 -1296 -1025 -722 -408 -60 367 868 1379 1844 2271 2703 3155 3587 3952 4250 4502 4712 4860 4931 4940 4897 4792 4597 4289 3865 3341 2763 2174 1589 976 302 -416 -1099 -1673 -2141 -2567 -3001 -3434 -3817 -4132 -4379 -4543 -4605 -4563 -4455 -4286 -4030 -3659 -3200 -2699 -2167 -1582 -952 -325 273 871 1489 2083 2572 2928 3185 3370 3476 3501 3478 3416 3289 3079 2833 2613 2403 2124 1738 1306 922 616 348 78 -197 -457 -686 -868 -985 -1027 -999 -921 -820 -713 -597 -451 -255 -37 132 188 142 70 25 -8 -78 -186 -271 -303 -334 -433 -597 -744 -834 -924 -1085 -1294 -1466 -1572 -1670 -1815 -1984 -2136 -2292 -2512 -2784 -3022 -3177 -3312 -3541 -3888 -4273 -4611 -4890 -5153 -5410 -5601 -5657 -5560 -5344 -5045 -4648 -4104 -3390 -2542 -1622 -680 274 1243 2225 3193 4090 4844 5448 5974 6499 7002 7378 7569 7629 7641 7593 7406 7042 6566 6048 5468 4744 3867 2926 2026 1200 404 -404 -1204 -1921 -2479 -2862 -3130 -3355 -3551 -3656 -3613 -3447 -3250 -3091 -2949 -2746 -2440 -2066 -1683 -1307 -905 -469 -31 363 714 1062 1420 1746 1976 2086 2111 2095 2041 1925 1735 1494 1242 1005 780 560 352 179 57 -17 -64 -103 -143 -172 -176 -157 -142 -155 -185 -200 -176 -131 -90 -57 -13 47 108 157 199 242 268 252 193 116 43 -27 -105 -190 -268 -329 -374 -408 -434 -452 -460 -454 -437 -421 -417 -420 -414 -396 -380 -391 -424 -457 -467 -458 -449 -458 -483 -512 -541 -566 -590 -606 -613 -604 -579 -544 -496 -433 -357 -270 -182 -100 -28 39 102 159 204 234 251 259 262 261 252 237 217 195 175 154 132 109 89 71 54 38 24 14 7 2 -2 -3 -4 -4 -3 -2 -1 0 +0 -8 -12 -12 -20 -24 -28 -28 -24 -16 -4 8 28 56 84 112 144 180 220 264 296 328 360 396 432 460 480 500 508 488 436 384 336 284 192 64 -68 -176 -276 -372 -440 -480 -528 -612 -704 -736 -708 -708 -804 -936 -1004 -1000 -1004 -1044 -1060 -1004 -936 -936 -964 -916 -796 -716 -744 -792 -756 -680 -704 -844 -984 -1072 -1220 -1540 -1944 -2240 -2396 -2588 -2956 -3468 -3996 -4516 -5096 -5740 -6328 -6796 -7180 -7520 -7712 -7628 -7264 -6724 -6048 -5184 -4100 -2888 -1632 -240 1468 3472 5516 7376 9084 10812 12620 14348 15808 17000 18008 18848 19440 19724 19760 19588 19168 18388 17156 15460 13364 11052 8696 6356 3904 1208 -1664 -4396 -6692 -8564 -10268 -12004 -13736 -15268 -16528 -17516 -18172 -18420 -18252 -17820 -17144 -16120 -14636 -12800 -10796 -8668 -6328 -3808 -1300 1092 3484 5956 8332 10288 11712 12740 13480 13904 14004 13912 13664 13156 12316 11332 10452 9612 8496 6952 5224 3688 2464 1392 312 -788 -1828 -2744 -3472 -3940 -4108 -3996 -3684 -3280 -2852 -2388 -1804 -1020 -148 528 752 568 280 100 -32 -312 -744 -1084 -1212 -1336 -1732 -2388 -2976 -3336 -3696 -4340 -5176 -5864 -6288 -6680 -7260 -7936 -8544 -9168 -10048 -11136 -12088 -12708 -13248 -14164 -15552 -17092 -18444 -19560 -20612 -21640 -22404 -22628 -22240 -21376 -20180 -18592 -16416 -13560 -10168 -6488 -2720 1096 4972 8900 12772 16360 19376 21792 23896 25996 28008 29512 30276 30516 30564 30372 29624 28168 26264 24192 21872 18976 15468 11704 8104 4800 1616 -1616 -4816 -7684 -9916 -11448 -12520 -13420 -14204 -14624 -14452 -13788 -13000 -12364 -11796 -10984 -9760 -8264 -6732 -5228 -3620 -1876 -124 1452 2856 4248 5680 6984 7904 8344 8444 8380 8164 7700 6940 5976 4968 4020 3120 2240 1408 716 228 -68 -256 -412 -572 -688 -704 -628 -568 -620 -740 -800 -704 -524 -360 -228 -52 188 432 628 796 968 1072 1008 772 464 172 -108 -420 -760 -1072 -1316 -1496 -1632 -1736 -1808 -1840 -1816 -1748 -1684 -1668 -1680 -1656 -1584 -1520 -1564 -1696 -1828 -1868 -1832 -1796 -1832 -1932 -2048 -2164 -2264 -2360 -2424 -2452 -2416 -2316 -2176 -1984 -1732 -1428 -1080 -728 -400 -112 156 408 636 816 936 1004 1036 1048 1044 1008 948 868 780 700 616 528 436 356 284 216 152 96 56 28 8 -8 -12 -16 -16 -12 -8 -4 0 +2 +0 0 1 1 1 1 1 1 0 -2 -4 -6 -9 -12 -14 -16 -16 -16 -16 -15 -12 -8 -2 4 6 5 3 1 -1 -5 -11 -17 -20 -23 -31 -46 -60 -71 -82 -102 -127 -151 -169 -188 -213 -244 -273 -306 -349 -403 -454 -496 -538 -597 -679 -773 -865 -949 -1036 -1124 -1203 -1257 -1275 -1266 -1233 -1172 -1067 -909 -702 -462 -200 82 386 710 1048 1381 1682 1945 2191 2450 2709 2932 3088 3195 3284 3350 3350 3267 3124 2952 2736 2432 2032 1576 1118 678 234 -240 -732 -1196 -1580 -1868 -2091 -2296 -2486 -2621 -2650 -2588 -2497 -2430 -2373 -2261 -2056 -1781 -1484 -1179 -836 -443 -30 359 722 1099 1504 1892 2189 2366 2449 2486 2479 2392 2207 1944 1654 1370 1088 800 515 269 87 -26 -102 -168 -240 -297 -310 -284 -264 -294 -361 -399 -360 -274 -193 -125 -29 110 259 385 502 626 712 688 541 334 129 -80 -329 -610 -886 -1120 -1313 -1477 -1617 -1737 -1824 -1859 -1845 -1833 -1874 -1954 -1993 -1967 -1958 -2081 -2343 -2613 -2771 -2819 -2877 -3047 -3336 -3688 -4056 -4428 -4807 -5165 -5449 -5613 -5644 -5548 -5310 -4884 -4230 -3370 -2390 -1389 -402 614 1699 2815 3856 4724 5416 6002 6546 7026 7364 7517 7527 7465 7356 7161 6835 6390 5875 5306 4641 3844 2942 2033 1197 440 -286 -1015 -1718 -2309 -2724 -2962 -3084 -3151 -3170 -3096 -2900 -2616 -2292 -1955 -1596 -1227 -880 -572 -274 64 443 818 1137 1383 1584 1772 1959 2114 2169 2067 1821 1517 1236 991 737 443 131 -155 -387 -544 -608 -573 -469 -346 -226 -96 63 246 415 551 643 695 717 728 745 757 734 669 588 523 486 473 478 487 464 369 199 7 -157 -295 -462 -685 -926 -1115 -1220 -1265 -1285 -1284 -1246 -1169 -1076 -988 -904 -805 -694 -598 -552 -563 -604 -639 -656 -669 -701 -760 -841 -934 -1031 -1133 -1227 -1300 -1349 -1368 -1357 -1306 -1214 -1083 -925 -747 -557 -361 -160 40 236 422 586 719 816 881 921 942 950 941 908 849 778 708 642 576 497 409 324 248 180 116 55 3 -39 -71 -98 -119 -131 -133 -125 -113 -100 -89 -75 -59 -41 -26 -14 -7 -1 4 8 10 11 10 9 8 7 6 4 2 1 1 0 0 +0 0 4 4 4 4 4 4 0 -8 -16 -24 -36 -48 -56 -64 -64 -64 -64 -60 -48 -32 -8 16 24 20 12 4 -4 -20 -44 -68 -80 -92 -124 -184 -240 -284 -328 -408 -508 -604 -676 -752 -852 -976 -1092 -1224 -1396 -1612 -1816 -1984 -2152 -2388 -2716 -3092 -3460 -3796 -4144 -4496 -4812 -5028 -5100 -5064 -4932 -4688 -4268 -3636 -2808 -1848 -800 328 1544 2840 4192 5524 6728 7780 8764 9800 10836 11728 12352 12780 13136 13400 13400 13068 12496 11808 10944 9728 8128 6304 4472 2712 936 -960 -2928 -4784 -6320 -7472 -8364 -9184 -9944 -10484 -10600 -10352 -9988 -9720 -9492 -9044 -8224 -7124 -5936 -4716 -3344 -1772 -120 1436 2888 4396 6016 7568 8756 9464 9796 9944 9916 9568 8828 7776 6616 5480 4352 3200 2060 1076 348 -104 -408 -672 -960 -1188 -1240 -1136 -1056 -1176 -1444 -1596 -1440 -1096 -772 -500 -116 440 1036 1540 2008 2504 2848 2752 2164 1336 516 -320 -1316 -2440 -3544 -4480 -5252 -5908 -6468 -6948 -7296 -7436 -7380 -7332 -7496 -7816 -7972 -7868 -7832 -8324 -9372 -10452 -11084 -11276 -11508 -12188 -13344 -14752 -16224 -17712 -19228 -20660 -21796 -22452 -22576 -22192 -21240 -19536 -16920 -13480 -9560 -5556 -1608 2456 6796 11260 15424 18896 21664 24008 26184 28104 29456 30068 30108 29860 29424 28644 27340 25560 23500 21224 18564 15376 11768 8132 4788 1760 -1144 -4060 -6872 -9236 -10896 -11848 -12336 -12604 -12680 -12384 -11600 -10464 -9168 -7820 -6384 -4908 -3520 -2288 -1096 256 1772 3272 4548 5532 6336 7088 7836 8456 8676 8268 7284 6068 4944 3964 2948 1772 524 -620 -1548 -2176 -2432 -2292 -1876 -1384 -904 -384 252 984 1660 2204 2572 2780 2868 2912 2980 3028 2936 2676 2352 2092 1944 1892 1912 1948 1856 1476 796 28 -628 -1180 -1848 -2740 -3704 -4460 -4880 -5060 -5140 -5136 -4984 -4676 -4304 -3952 -3616 -3220 -2776 -2392 -2208 -2252 -2416 -2556 -2624 -2676 -2804 -3040 -3364 -3736 -4124 -4532 -4908 -5200 -5396 -5472 -5428 -5224 -4856 -4332 -3700 -2988 -2228 -1444 -640 160 944 1688 2344 2876 3264 3524 3684 3768 3800 3764 3632 3396 3112 2832 2568 2304 1988 1636 1296 992 720 464 220 12 -156 -284 -392 -476 -524 -532 -500 -452 -400 -356 -300 -236 -164 -104 -56 -28 -4 16 32 40 44 40 36 32 28 24 16 8 4 4 0 0 +2 +0 0 0 0 0 -1 -1 -3 -5 -7 -10 -13 -17 -21 -25 -29 -32 -36 -41 -47 -53 -57 -62 -72 -88 -105 -120 -131 -144 -162 -190 -223 -260 -301 -345 -392 -436 -473 -501 -518 -520 -501 -455 -379 -281 -171 -52 81 235 407 579 737 878 1010 1143 1270 1380 1459 1513 1550 1579 1590 1567 1512 1435 1338 1206 1029 812 578 351 132 -89 -325 -565 -781 -946 -1058 -1131 -1188 -1227 -1231 -1184 -1096 -985 -863 -723 -569 -419 -280 -138 32 232 440 627 782 917 1052 1190 1316 1382 1348 1216 1037 865 710 541 333 100 -122 -311 -448 -512 -494 -414 -312 -209 -91 61 243 420 570 681 753 795 826 864 898 892 832 748 680 648 645 666 695 679 553 305 11 -252 -486 -778 -1182 -1637 -2019 -2265 -2407 -2505 -2565 -2552 -2455 -2318 -2184 -2048 -1873 -1656 -1464 -1388 -1454 -1602 -1742 -1838 -1925 -2073 -2312 -2632 -3006 -3419 -3865 -4311 -4709 -5033 -5264 -5382 -5346 -5128 -4723 -4163 -3476 -2682 -1792 -823 215 1307 2417 3482 4434 5231 5868 6375 6794 7135 7366 7412 7244 6928 6587 6267 5885 5335 4621 3851 3110 2387 1624 822 52 -634 -1248 -1836 -2384 -2819 -3061 -3104 -3025 -2914 -2791 -2586 -2225 -1712 -1166 -706 -356 -33 339 745 1097 1334 1488 1639 1827 2014 2139 2185 2186 2162 2076 1871 1545 1158 767 372 -67 -545 -984 -1275 -1366 -1304 -1178 -1037 -843 -539 -126 311 677 948 1163 1346 1476 1517 1469 1364 1220 1040 838 640 477 368 318 326 356 355 287 163 7 -178 -407 -678 -949 -1176 -1343 -1467 -1564 -1631 -1645 -1587 -1455 -1274 -1081 -899 -725 -559 -428 -369 -392 -470 -573 -702 -878 -1112 -1390 -1686 -1977 -2244 -2462 -2609 -2682 -2699 -2676 -2608 -2474 -2262 -1991 -1684 -1354 -991 -591 -182 203 557 904 1255 1571 1801 1938 2022 2092 2141 2131 2033 1871 1694 1530 1378 1215 1033 842 659 491 336 193 62 -57 -162 -256 -334 -388 -413 -412 -397 -379 -362 -334 -281 -205 -126 -66 -30 -2 33 76 112 132 136 136 140 143 139 128 115 104 91 72 47 26 14 6 -4 -15 -24 -26 -23 -18 -15 -12 -8 -3 2 5 6 6 6 5 4 2 1 0 0 0 +0 0 0 0 0 -4 -4 -12 -20 -28 -40 -52 -68 -84 -100 -116 -128 -144 -164 -188 -212 -228 -248 -288 -352 -420 -480 -524 -576 -648 -760 -892 -1040 -1204 -1380 -1568 -1744 -1892 -2004 -2072 -2080 -2004 -1820 -1516 -1124 -684 -208 324 940 1628 2316 2948 3512 4040 4572 5080 5520 5836 6052 6200 6316 6360 6268 6048 5740 5352 4824 4116 3248 2312 1404 528 -356 -1300 -2260 -3124 -3784 -4232 -4524 -4752 -4908 -4924 -4736 -4384 -3940 -3452 -2892 -2276 -1676 -1120 -552 128 928 1760 2508 3128 3668 4208 4760 5264 5528 5392 4864 4148 3460 2840 2164 1332 400 -488 -1244 -1792 -2048 -1976 -1656 -1248 -836 -364 244 972 1680 2280 2724 3012 3180 3304 3456 3592 3568 3328 2992 2720 2592 2580 2664 2780 2716 2212 1220 44 -1008 -1944 -3112 -4728 -6548 -8076 -9060 -9628 -10020 -10260 -10208 -9820 -9272 -8736 -8192 -7492 -6624 -5856 -5552 -5816 -6408 -6968 -7352 -7700 -8292 -9248 -10528 -12024 -13676 -15460 -17244 -18836 -20132 -21056 -21528 -21384 -20512 -18892 -16652 -13904 -10728 -7168 -3292 860 5228 9668 13928 17736 20924 23472 25500 27176 28540 29464 29648 28976 27712 26348 25068 23540 21340 18484 15404 12440 9548 6496 3288 208 -2536 -4992 -7344 -9536 -11276 -12244 -12416 -12100 -11656 -11164 -10344 -8900 -6848 -4664 -2824 -1424 -132 1356 2980 4388 5336 5952 6556 7308 8056 8556 8740 8744 8648 8304 7484 6180 4632 3068 1488 -268 -2180 -3936 -5100 -5464 -5216 -4712 -4148 -3372 -2156 -504 1244 2708 3792 4652 5384 5904 6068 5876 5456 4880 4160 3352 2560 1908 1472 1272 1304 1424 1420 1148 652 28 -712 -1628 -2712 -3796 -4704 -5372 -5868 -6256 -6524 -6580 -6348 -5820 -5096 -4324 -3596 -2900 -2236 -1712 -1476 -1568 -1880 -2292 -2808 -3512 -4448 -5560 -6744 -7908 -8976 -9848 -10436 -10728 -10796 -10704 -10432 -9896 -9048 -7964 -6736 -5416 -3964 -2364 -728 812 2228 3616 5020 6284 7204 7752 8088 8368 8564 8524 8132 7484 6776 6120 5512 4860 4132 3368 2636 1964 1344 772 248 -228 -648 -1024 -1336 -1552 -1652 -1648 -1588 -1516 -1448 -1336 -1124 -820 -504 -264 -120 -8 132 304 448 528 544 544 560 572 556 512 460 416 364 288 188 104 56 24 -16 -60 -96 -104 -92 -72 -60 -48 -32 -12 8 20 24 24 24 20 16 8 4 0 0 0 +2 +0 -1 -1 -2 -3 -5 -8 -11 -16 -23 -32 -40 -51 -62 -73 -82 -89 -92 -90 -83 -71 -52 -26 7 48 96 150 205 260 311 361 409 456 499 531 548 553 555 555 548 522 474 413 349 280 199 105 7 -88 -181 -276 -372 -458 -516 -542 -547 -547 -542 -520 -462 -368 -259 -162 -85 -9 85 193 293 368 423 480 551 625 683 717 738 751 741 686 582 448 304 151 -28 -235 -435 -577 -634 -621 -575 -519 -433 -284 -68 171 382 549 690 818 918 966 958 911 834 728 600 470 358 282 250 262 293 299 248 144 6 -164 -384 -655 -938 -1189 -1390 -1553 -1694 -1807 -1865 -1841 -1727 -1548 -1343 -1144 -944 -745 -583 -515 -559 -687 -858 -1076 -1378 -1787 -2286 -2840 -3413 -3969 -4460 -4843 -5103 -5264 -5347 -5341 -5198 -4876 -4399 -3817 -3150 -2366 -1447 -456 525 1478 2467 3516 4523 5332 5901 6330 6738 7100 7270 7148 6780 6324 5890 5467 4973 4365 3676 2971 2286 1620 963 320 -299 -895 -1463 -1978 -2390 -2646 -2743 -2742 -2725 -2711 -2606 -2289 -1744 -1121 -613 -285 -11 363 860 1340 1658 1806 1911 2066 2237 2312 2265 2176 2102 1976 1669 1193 726 411 191 -116 -568 -995 -1198 -1155 -1035 -966 -890 -653 -206 322 753 1052 1334 1678 1998 2110 1956 1682 1457 1294 1053 677 284 39 -43 -85 -173 -254 -253 -214 -268 -469 -730 -938 -1082 -1250 -1490 -1730 -1871 -1894 -1863 -1817 -1724 -1523 -1214 -859 -519 -203 87 325 452 421 243 -35 -389 -830 -1391 -2047 -2707 -3273 -3713 -4068 -4361 -4554 -4576 -4414 -4120 -3761 -3359 -2906 -2410 -1886 -1343 -768 -167 432 1001 1532 2026 2466 2823 3085 3267 3388 3437 3391 3246 3039 2810 2562 2264 1897 1494 1116 794 509 226 -54 -294 -460 -560 -632 -701 -754 -760 -711 -637 -570 -506 -414 -287 -149 -36 49 131 225 320 391 430 450 466 478 477 451 402 342 284 231 177 115 45 -19 -71 -110 -142 -167 -177 -172 -155 -132 -104 -68 -26 15 50 77 98 113 120 122 118 107 92 74 58 44 32 19 8 1 -1 0 0 -1 -4 -5 -6 -6 -7 -8 -8 -8 -7 -6 -5 -4 -3 -2 -1 -1 0 +0 -4 -4 -8 -12 -20 -32 -44 -64 -92 -128 -160 -204 -248 -292 -328 -356 -368 -360 -332 -284 -208 -104 28 192 384 600 820 1040 1244 1444 1636 1824 1996 2124 2192 2212 2220 2220 2192 2088 1896 1652 1396 1120 796 420 28 -352 -724 -1104 -1488 -1832 -2064 -2168 -2188 -2188 -2168 -2080 -1848 -1472 -1036 -648 -340 -36 340 772 1172 1472 1692 1920 2204 2500 2732 2868 2952 3004 2964 2744 2328 1792 1216 604 -112 -940 -1740 -2308 -2536 -2484 -2300 -2076 -1732 -1136 -272 684 1528 2196 2760 3272 3672 3864 3832 3644 3336 2912 2400 1880 1432 1128 1000 1048 1172 1196 992 576 24 -656 -1536 -2620 -3752 -4756 -5560 -6212 -6776 -7228 -7460 -7364 -6908 -6192 -5372 -4576 -3776 -2980 -2332 -2060 -2236 -2748 -3432 -4304 -5512 -7148 -9144 -11360 -13652 -15876 -17840 -19372 -20412 -21056 -21388 -21364 -20792 -19504 -17596 -15268 -12600 -9464 -5788 -1824 2100 5912 9868 14064 18092 21328 23604 25320 26952 28400 29080 28592 27120 25296 23560 21868 19892 17460 14704 11884 9144 6480 3852 1280 -1196 -3580 -5852 -7912 -9560 -10584 -10972 -10968 -10900 -10844 -10424 -9156 -6976 -4484 -2452 -1140 -44 1452 3440 5360 6632 7224 7644 8264 8948 9248 9060 8704 8408 7904 6676 4772 2904 1644 764 -464 -2272 -3980 -4792 -4620 -4140 -3864 -3560 -2612 -824 1288 3012 4208 5336 6712 7992 8440 7824 6728 5828 5176 4212 2708 1136 156 -172 -340 -692 -1016 -1012 -856 -1072 -1876 -2920 -3752 -4328 -5000 -5960 -6920 -7484 -7576 -7452 -7268 -6896 -6092 -4856 -3436 -2076 -812 348 1300 1808 1684 972 -140 -1556 -3320 -5564 -8188 -10828 -13092 -14852 -16272 -17444 -18216 -18304 -17656 -16480 -15044 -13436 -11624 -9640 -7544 -5372 -3072 -668 1728 4004 6128 8104 9864 11292 12340 13068 13552 13748 13564 12984 12156 11240 10248 9056 7588 5976 4464 3176 2036 904 -216 -1176 -1840 -2240 -2528 -2804 -3016 -3040 -2844 -2548 -2280 -2024 -1656 -1148 -596 -144 196 524 900 1280 1564 1720 1800 1864 1912 1908 1804 1608 1368 1136 924 708 460 180 -76 -284 -440 -568 -668 -708 -688 -620 -528 -416 -272 -104 60 200 308 392 452 480 488 472 428 368 296 232 176 128 76 32 4 -4 0 0 -4 -16 -20 -24 -24 -28 -32 -32 -32 -28 -24 -20 -16 -12 -8 -4 -4 0 +2 +0 0 1 3 7 12 18 24 33 43 51 57 63 69 73 75 75 71 64 54 42 27 10 -11 -34 -59 -86 -111 -132 -146 -156 -165 -174 -177 -165 -133 -90 -52 -26 -2 35 88 144 186 211 234 264 298 321 327 326 328 320 281 208 131 77 37 -24 -118 -214 -266 -265 -246 -236 -225 -170 -55 88 214 308 402 521 638 693 661 584 520 474 397 262 113 16 -18 -37 -76 -115 -118 -102 -131 -235 -374 -493 -583 -690 -842 -1002 -1111 -1152 -1159 -1158 -1125 -1018 -831 -602 -372 -149 65 250 355 338 200 -30 -335 -732 -1255 -1890 -2557 -3163 -3671 -4114 -4513 -4821 -4957 -4892 -4673 -4364 -3986 -3530 -2995 -2399 -1748 -1023 -227 603 1429 2240 3033 3779 4430 4957 5375 5709 5934 5998 5882 5642 5347 4997 4525 3887 3140 2406 1756 1154 527 -127 -720 -1157 -1447 -1677 -1912 -2112 -2187 -2102 -1939 -1784 -1627 -1373 -977 -523 -128 186 504 895 1312 1654 1877 2029 2171 2304 2374 2325 2149 1894 1626 1372 1093 738 305 -132 -508 -824 -1111 -1357 -1508 -1525 -1438 -1283 -1054 -720 -285 187 633 1027 1374 1667 1889 2030 2083 2032 1865 1614 1345 1104 865 582 276 46 -34 0 22 -46 -185 -320 -418 -511 -648 -845 -1073 -1288 -1456 -1566 -1625 -1643 -1611 -1507 -1316 -1052 -737 -395 -67 179 277 213 40 -206 -560 -1113 -1912 -2869 -3796 -4539 -5076 -5498 -5872 -6145 -6178 -5883 -5329 -4659 -3956 -3198 -2334 -1404 -511 295 1062 1840 2564 3112 3443 3662 3910 4199 4395 4379 4188 3974 3839 3735 3509 3087 2538 1995 1516 1048 517 -64 -596 -1004 -1295 -1527 -1710 -1793 -1735 -1565 -1362 -1175 -975 -709 -375 -47 200 353 478 637 822 955 991 961 945 974 1002 965 854 725 622 541 443 317 185 75 -12 -99 -192 -264 -287 -263 -223 -187 -140 -56 58 164 233 273 313 358 386 379 348 315 288 257 214 169 135 113 93 66 39 21 10 -8 -38 -75 -105 -126 -143 -163 -180 -188 -185 -176 -167 -158 -144 -123 -100 -79 -62 -47 -35 -27 -25 -28 -34 -41 -51 -64 -77 -84 -86 -84 -83 -78 -69 -56 -43 -30 -20 -13 -7 -2 0 1 1 1 0 +0 0 4 12 28 48 72 96 132 172 204 228 252 276 292 300 300 284 256 216 168 108 40 -44 -136 -236 -344 -444 -528 -584 -624 -660 -696 -708 -660 -532 -360 -208 -104 -8 140 352 576 744 844 936 1056 1192 1284 1308 1304 1312 1280 1124 832 524 308 148 -96 -472 -856 -1064 -1060 -984 -944 -900 -680 -220 352 856 1232 1608 2084 2552 2772 2644 2336 2080 1896 1588 1048 452 64 -72 -148 -304 -460 -472 -408 -524 -940 -1496 -1972 -2332 -2760 -3368 -4008 -4444 -4608 -4636 -4632 -4500 -4072 -3324 -2408 -1488 -596 260 1000 1420 1352 800 -120 -1340 -2928 -5020 -7560 -10228 -12652 -14684 -16456 -18052 -19284 -19828 -19568 -18692 -17456 -15944 -14120 -11980 -9596 -6992 -4092 -908 2412 5716 8960 12132 15116 17720 19828 21500 22836 23736 23992 23528 22568 21388 19988 18100 15548 12560 9624 7024 4616 2108 -508 -2880 -4628 -5788 -6708 -7648 -8448 -8748 -8408 -7756 -7136 -6508 -5492 -3908 -2092 -512 744 2016 3580 5248 6616 7508 8116 8684 9216 9496 9300 8596 7576 6504 5488 4372 2952 1220 -528 -2032 -3296 -4444 -5428 -6032 -6100 -5752 -5132 -4216 -2880 -1140 748 2532 4108 5496 6668 7556 8120 8332 8128 7460 6456 5380 4416 3460 2328 1104 184 -136 0 88 -184 -740 -1280 -1672 -2044 -2592 -3380 -4292 -5152 -5824 -6264 -6500 -6572 -6444 -6028 -5264 -4208 -2948 -1580 -268 716 1108 852 160 -824 -2240 -4452 -7648 -11476 -15184 -18156 -20304 -21992 -23488 -24580 -24712 -23532 -21316 -18636 -15824 -12792 -9336 -5616 -2044 1180 4248 7360 10256 12448 13772 14648 15640 16796 17580 17516 16752 15896 15356 14940 14036 12348 10152 7980 6064 4192 2068 -256 -2384 -4016 -5180 -6108 -6840 -7172 -6940 -6260 -5448 -4700 -3900 -2836 -1500 -188 800 1412 1912 2548 3288 3820 3964 3844 3780 3896 4008 3860 3416 2900 2488 2164 1772 1268 740 300 -48 -396 -768 -1056 -1148 -1052 -892 -748 -560 -224 232 656 932 1092 1252 1432 1544 1516 1392 1260 1152 1028 856 676 540 452 372 264 156 84 40 -32 -152 -300 -420 -504 -572 -652 -720 -752 -740 -704 -668 -632 -576 -492 -400 -316 -248 -188 -140 -108 -100 -112 -136 -164 -204 -256 -308 -336 -344 -336 -332 -312 -276 -224 -172 -120 -80 -52 -28 -8 0 4 4 4 0 +2 +0 -1 -2 -2 -3 -5 -6 -6 -7 -6 -4 -2 1 5 12 19 28 36 43 51 60 68 73 73 70 65 59 50 36 16 -8 -31 -53 -76 -98 -115 -122 -122 -114 -99 -71 -30 20 71 120 168 213 251 281 301 305 291 261 226 192 156 109 53 9 -7 0 4 -11 -44 -79 -106 -133 -174 -234 -306 -378 -440 -487 -519 -540 -544 -524 -470 -386 -278 -153 -27 73 116 91 17 -93 -260 -530 -934 -1436 -1947 -2386 -2734 -3034 -3320 -3559 -3666 -3577 -3317 -2969 -2582 -2136 -1596 -983 -366 216 798 1413 2016 2503 2834 3086 3371 3702 3966 4042 3956 3840 3797 3777 3631 3269 2749 2211 1719 1216 614 -78 -741 -1277 -1685 -2034 -2331 -2501 -2478 -2287 -2039 -1800 -1529 -1139 -617 -78 345 625 866 1184 1564 1864 1980 1970 1987 2101 2216 2188 1988 1731 1526 1361 1146 841 504 212 -35 -294 -583 -826 -922 -870 -761 -657 -505 -208 224 652 955 1156 1365 1612 1798 1829 1734 1623 1539 1423 1228 1003 832 729 621 458 284 163 80 -62 -323 -659 -970 -1220 -1459 -1738 -2021 -2221 -2307 -2322 -2330 -2323 -2238 -2035 -1756 -1481 -1240 -1015 -800 -662 -667 -817 -1062 -1404 -1926 -2673 -3538 -4329 -4960 -5521 -6111 -6636 -6839 -6547 -5862 -5037 -4211 -3303 -2151 -760 659 1877 2853 3740 4662 5518 6058 6168 6025 5890 5792 5499 4840 3984 3298 2942 2666 2141 1357 646 262 49 -370 -1135 -1962 -2479 -2664 -2848 -3264 -3747 -3922 -3663 -3220 -2881 -2604 -2100 -1234 -236 555 1075 1561 2201 2851 3201 3142 2902 2759 2719 2556 2144 1642 1300 1152 999 696 340 140 138 165 76 -81 -151 -83 11 23 -22 -7 115 275 377 416 462 569 701 788 805 791 791 788 732 609 466 351 262 153 9 -138 -241 -299 -347 -405 -457 -482 -483 -491 -521 -560 -582 -578 -565 -564 -573 -565 -518 -440 -364 -307 -254 -183 -94 -22 9 3 -14 -28 -53 -103 -175 -246 -301 -346 -392 -439 -466 -464 -438 -405 -373 -333 -277 -208 -141 -85 -36 11 58 98 126 142 150 154 153 147 136 121 107 94 79 64 51 40 31 23 15 8 4 0 -2 -3 -4 -3 -2 -2 -1 0 +0 -4 -8 -8 -12 -20 -24 -24 -28 -24 -16 -8 4 20 48 76 112 144 172 204 240 272 292 292 280 260 236 200 144 64 -32 -124 -212 -304 -392 -460 -488 -488 -456 -396 -284 -120 80 284 480 672 852 1004 1124 1204 1220 1164 1044 904 768 624 436 212 36 -28 0 16 -44 -176 -316 -424 -532 -696 -936 -1224 -1512 -1760 -1948 -2076 -2160 -2176 -2096 -1880 -1544 -1112 -612 -108 292 464 364 68 -372 -1040 -2120 -3736 -5744 -7788 -9544 -10936 -12136 -13280 -14236 -14664 -14308 -13268 -11876 -10328 -8544 -6384 -3932 -1464 864 3192 5652 8064 10012 11336 12344 13484 14808 15864 16168 15824 15360 15188 15108 14524 13076 10996 8844 6876 4864 2456 -312 -2964 -5108 -6740 -8136 -9324 -10004 -9912 -9148 -8156 -7200 -6116 -4556 -2468 -312 1380 2500 3464 4736 6256 7456 7920 7880 7948 8404 8864 8752 7952 6924 6104 5444 4584 3364 2016 848 -140 -1176 -2332 -3304 -3688 -3480 -3044 -2628 -2020 -832 896 2608 3820 4624 5460 6448 7192 7316 6936 6492 6156 5692 4912 4012 3328 2916 2484 1832 1136 652 320 -248 -1292 -2636 -3880 -4880 -5836 -6952 -8084 -8884 -9228 -9288 -9320 -9292 -8952 -8140 -7024 -5924 -4960 -4060 -3200 -2648 -2668 -3268 -4248 -5616 -7704 -10692 -14152 -17316 -19840 -22084 -24444 -26544 -27356 -26188 -23448 -20148 -16844 -13212 -8604 -3040 2636 7508 11412 14960 18648 22072 24232 24672 24100 23560 23168 21996 19360 15936 13192 11768 10664 8564 5428 2584 1048 196 -1480 -4540 -7848 -9916 -10656 -11392 -13056 -14988 -15688 -14652 -12880 -11524 -10416 -8400 -4936 -944 2220 4300 6244 8804 11404 12804 12568 11608 11036 10876 10224 8576 6568 5200 4608 3996 2784 1360 560 552 660 304 -324 -604 -332 44 92 -88 -28 460 1100 1508 1664 1848 2276 2804 3152 3220 3164 3164 3152 2928 2436 1864 1404 1048 612 36 -552 -964 -1196 -1388 -1620 -1828 -1928 -1932 -1964 -2084 -2240 -2328 -2312 -2260 -2256 -2292 -2260 -2072 -1760 -1456 -1228 -1016 -732 -376 -88 36 12 -56 -112 -212 -412 -700 -984 -1204 -1384 -1568 -1756 -1864 -1856 -1752 -1620 -1492 -1332 -1108 -832 -564 -340 -144 44 232 392 504 568 600 616 612 588 544 484 428 376 316 256 204 160 124 92 60 32 16 0 -8 -12 -16 -12 -8 -8 -4 0 +2 +0 0 0 -1 -1 -2 -3 -4 -5 -5 -5 -5 -3 2 8 14 19 26 34 42 48 50 51 52 52 49 43 38 36 33 25 17 10 5 -5 -25 -53 -82 -109 -137 -171 -208 -239 -260 -273 -286 -297 -299 -283 -254 -223 -194 -165 -135 -116 -121 -153 -207 -283 -400 -574 -786 -993 -1174 -1349 -1541 -1725 -1832 -1808 -1668 -1477 -1270 -1026 -688 -250 222 651 1018 1371 1757 2135 2408 2517 2523 2531 2555 2488 2245 1896 1610 1472 1367 1125 731 356 148 28 -220 -690 -1221 -1580 -1739 -1903 -2233 -2624 -2812 -2687 -2417 -2213 -2047 -1690 -1016 -199 478 948 1409 2032 2693 3093 3107 2935 2855 2878 2768 2376 1862 1508 1368 1213 865 433 182 184 226 106 -116 -220 -124 17 36 -35 -11 195 475 667 753 859 1083 1368 1576 1649 1663 1705 1742 1660 1418 1113 861 659 397 24 -374 -674 -861 -1026 -1231 -1430 -1552 -1602 -1674 -1831 -2027 -2172 -2221 -2239 -2306 -2418 -2461 -2329 -2047 -1751 -1527 -1308 -972 -520 -121 57 21 -87 -186 -361 -739 -1311 -1922 -2455 -2943 -3488 -4077 -4539 -4734 -4691 -4565 -4423 -4165 -3653 -2901 -2077 -1316 -593 201 1097 1988 2736 3301 3750 4151 4486 4682 4709 4625 4506 4357 4123 3776 3383 3032 2708 2318 1810 1245 711 212 -309 -874 -1432 -1932 -2363 -2718 -2955 -3030 -2957 -2784 -2523 -2139 -1614 -1015 -443 58 535 1036 1541 1943 2151 2188 2163 2144 2075 1860 1498 1116 840 671 521 359 257 285 407 515 553 547 551 578 597 586 548 502 462 436 426 435 477 565 692 825 922 977 1021 1070 1100 1061 931 749 561 374 157 -107 -387 -631 -819 -971 -1107 -1223 -1300 -1337 -1349 -1339 -1300 -1232 -1159 -1105 -1069 -1023 -951 -863 -777 -697 -612 -518 -427 -346 -276 -221 -204 -254 -370 -513 -626 -678 -677 -653 -619 -562 -466 -338 -211 -112 -36 49 156 266 346 393 429 468 486 458 386 308 255 220 178 123 80 74 98 121 125 118 114 116 110 85 51 20 -6 -34 -68 -101 -121 -127 -127 -129 -126 -112 -87 -59 -36 -16 2 19 32 38 39 37 36 33 29 23 18 15 11 8 5 3 2 1 0 0 0 0 +0 0 0 -4 -4 -8 -12 -16 -20 -20 -20 -20 -12 8 32 56 76 104 136 168 192 200 204 208 208 196 172 152 144 132 100 68 40 20 -20 -100 -212 -328 -436 -548 -684 -832 -956 -1040 -1092 -1144 -1188 -1196 -1132 -1016 -892 -776 -660 -540 -464 -484 -612 -828 -1132 -1600 -2296 -3144 -3972 -4696 -5396 -6164 -6900 -7328 -7232 -6672 -5908 -5080 -4104 -2752 -1000 888 2604 4072 5484 7028 8540 9632 10068 10092 10124 10220 9952 8980 7584 6440 5888 5468 4500 2924 1424 592 112 -880 -2760 -4884 -6320 -6956 -7612 -8932 -10496 -11248 -10748 -9668 -8852 -8188 -6760 -4064 -796 1912 3792 5636 8128 10772 12372 12428 11740 11420 11512 11072 9504 7448 6032 5472 4852 3460 1732 728 736 904 424 -464 -880 -496 68 144 -140 -44 780 1900 2668 3012 3436 4332 5472 6304 6596 6652 6820 6968 6640 5672 4452 3444 2636 1588 96 -1496 -2696 -3444 -4104 -4924 -5720 -6208 -6408 -6696 -7324 -8108 -8688 -8884 -8956 -9224 -9672 -9844 -9316 -8188 -7004 -6108 -5232 -3888 -2080 -484 228 84 -348 -744 -1444 -2956 -5244 -7688 -9820 -11772 -13952 -16308 -18156 -18936 -18764 -18260 -17692 -16660 -14612 -11604 -8308 -5264 -2372 804 4388 7952 10944 13204 15000 16604 17944 18728 18836 18500 18024 17428 16492 15104 13532 12128 10832 9272 7240 4980 2844 848 -1236 -3496 -5728 -7728 -9452 -10872 -11820 -12120 -11828 -11136 -10092 -8556 -6456 -4060 -1772 232 2140 4144 6164 7772 8604 8752 8652 8576 8300 7440 5992 4464 3360 2684 2084 1436 1028 1140 1628 2060 2212 2188 2204 2312 2388 2344 2192 2008 1848 1744 1704 1740 1908 2260 2768 3300 3688 3908 4084 4280 4400 4244 3724 2996 2244 1496 628 -428 -1548 -2524 -3276 -3884 -4428 -4892 -5200 -5348 -5396 -5356 -5200 -4928 -4636 -4420 -4276 -4092 -3804 -3452 -3108 -2788 -2448 -2072 -1708 -1384 -1104 -884 -816 -1016 -1480 -2052 -2504 -2712 -2708 -2612 -2476 -2248 -1864 -1352 -844 -448 -144 196 624 1064 1384 1572 1716 1872 1944 1832 1544 1232 1020 880 712 492 320 296 392 484 500 472 456 464 440 340 204 80 -24 -136 -272 -404 -484 -508 -508 -516 -504 -448 -348 -236 -144 -64 8 76 128 152 156 148 144 132 116 92 72 60 44 32 20 12 8 4 0 0 0 0 +2 +0 -1 -1 -1 -2 -3 -5 -6 -8 -10 -14 -18 -22 -27 -31 -36 -42 -48 -51 -49 -46 -45 -42 -34 -20 -5 2 1 -5 -10 -21 -45 -84 -131 -176 -223 -279 -344 -403 -442 -460 -469 -476 -469 -429 -356 -266 -176 -83 29 164 310 443 555 654 750 840 908 946 960 967 967 945 893 826 764 703 621 499 354 208 64 -96 -280 -471 -653 -821 -970 -1084 -1142 -1145 -1107 -1030 -896 -694 -448 -201 27 254 506 771 996 1130 1179 1194 1212 1202 1104 910 695 535 438 348 245 180 204 298 387 425 430 444 476 503 505 483 453 427 412 412 430 482 585 733 894 1021 1108 1184 1270 1336 1318 1184 975 747 510 219 -153 -565 -944 -1255 -1524 -1778 -2011 -2190 -2309 -2385 -2426 -2413 -2343 -2260 -2208 -2189 -2149 -2049 -1906 -1760 -1620 -1460 -1270 -1073 -894 -731 -601 -572 -731 -1095 -1560 -1958 -2181 -2243 -2227 -2174 -2036 -1739 -1299 -834 -457 -150 214 706 1239 1667 1958 2215 2498 2691 2625 2295 1904 1639 1470 1234 887 605 584 801 1033 1118 1098 1116 1191 1180 966 610 254 -67 -463 -999 -1570 -1996 -2227 -2388 -2579 -2708 -2592 -2170 -1587 -1022 -502 69 729 1357 1800 2041 2204 2376 2509 2497 2346 2179 2085 1993 1781 1450 1156 1014 965 870 699 569 583 695 768 755 741 813 935 985 913 810 790 851 884 808 682 616 619 586 436 235 125 146 199 171 73 -4 -20 -48 -168 -366 -561 -725 -903 -1141 -1404 -1614 -1745 -1837 -1933 -2010 -2011 -1926 -1800 -1683 -1568 -1419 -1227 -1022 -829 -649 -468 -303 -171 -73 14 103 176 212 223 243 286 328 332 289 231 189 161 122 63 4 -34 -49 -60 -74 -76 -56 -28 -16 -28 -44 -40 -23 -14 -27 -47 -56 -46 -28 -13 1 25 60 96 121 129 125 114 99 80 56 28 0 -22 -36 -42 -43 -39 -27 -4 28 60 87 110 134 159 176 178 168 152 138 123 102 74 47 27 14 4 -4 -9 -7 -2 2 4 6 9 13 14 13 12 12 13 13 12 10 8 8 9 8 7 6 5 4 3 2 1 0 0 0 -1 -1 0 +0 -8 -8 -8 -16 -24 -40 -48 -64 -80 -112 -144 -176 -216 -248 -288 -336 -384 -408 -392 -368 -360 -336 -272 -160 -40 16 8 -40 -80 -168 -360 -672 -1048 -1408 -1784 -2232 -2752 -3224 -3536 -3680 -3752 -3808 -3752 -3432 -2848 -2128 -1408 -664 232 1312 2480 3544 4440 5232 6000 6720 7264 7568 7680 7736 7736 7560 7144 6608 6112 5624 4968 3992 2832 1664 512 -768 -2240 -3768 -5224 -6568 -7760 -8672 -9136 -9160 -8856 -8240 -7168 -5552 -3584 -1608 216 2032 4048 6168 7968 9040 9432 9552 9696 9616 8832 7280 5560 4280 3504 2784 1960 1440 1632 2384 3096 3400 3440 3552 3808 4024 4040 3864 3624 3416 3296 3296 3440 3856 4680 5864 7152 8168 8864 9472 10160 10688 10544 9472 7800 5976 4080 1752 -1224 -4520 -7552 -10040 -12192 -14224 -16088 -17520 -18472 -19080 -19408 -19304 -18744 -18080 -17664 -17512 -17192 -16392 -15248 -14080 -12960 -11680 -10160 -8584 -7152 -5848 -4808 -4576 -5848 -8760 -12480 -15664 -17448 -17944 -17816 -17392 -16288 -13912 -10392 -6672 -3656 -1200 1712 5648 9912 13336 15664 17720 19984 21528 21000 18360 15232 13112 11760 9872 7096 4840 4672 6408 8264 8944 8784 8928 9528 9440 7728 4880 2032 -536 -3704 -7992 -12560 -15968 -17816 -19104 -20632 -21664 -20736 -17360 -12696 -8176 -4016 552 5832 10856 14400 16328 17632 19008 20072 19976 18768 17432 16680 15944 14248 11600 9248 8112 7720 6960 5592 4552 4664 5560 6144 6040 5928 6504 7480 7880 7304 6480 6320 6808 7072 6464 5456 4928 4952 4688 3488 1880 1000 1168 1592 1368 584 -32 -160 -384 -1344 -2928 -4488 -5800 -7224 -9128 -11232 -12912 -13960 -14696 -15464 -16080 -16088 -15408 -14400 -13464 -12544 -11352 -9816 -8176 -6632 -5192 -3744 -2424 -1368 -584 112 824 1408 1696 1784 1944 2288 2624 2656 2312 1848 1512 1288 976 504 32 -272 -392 -480 -592 -608 -448 -224 -128 -224 -352 -320 -184 -112 -216 -376 -448 -368 -224 -104 8 200 480 768 968 1032 1000 912 792 640 448 224 0 -176 -288 -336 -344 -312 -216 -32 224 480 696 880 1072 1272 1408 1424 1344 1216 1104 984 816 592 376 216 112 32 -32 -72 -56 -16 16 32 48 72 104 112 104 96 96 104 104 96 80 64 64 72 64 56 48 40 32 24 16 8 0 0 0 -8 -8 0 +3 +0 -1 -1 -1 -2 -4 -6 -8 -11 -14 -16 -18 -18 -16 -12 -7 -3 4 15 29 43 56 70 85 100 105 99 88 81 78 69 53 38 39 57 78 89 92 98 111 115 99 65 28 -8 -57 -128 -210 -277 -322 -359 -403 -440 -437 -379 -287 -192 -98 13 151 291 399 468 521 580 632 649 628 601 593 584 537 450 369 333 326 302 249 208 220 269 305 308 310 349 412 445 423 385 385 426 453 424 367 340 350 339 258 143 77 93 129 114 50 -3 -15 -35 -126 -281 -441 -583 -744 -962 -1210 -1423 -1574 -1696 -1826 -1943 -1988 -1947 -1863 -1782 -1699 -1572 -1391 -1186 -984 -788 -582 -385 -223 -97 20 144 251 310 334 372 449 528 546 487 398 335 292 227 121 8 -67 -99 -125 -158 -168 -127 -64 -38 -68 -109 -102 -59 -38 -75 -136 -164 -139 -87 -41 6 87 213 350 453 498 497 470 422 350 252 131 3 -106 -182 -222 -235 -222 -157 -20 181 402 604 796 1011 1245 1440 1525 1497 1422 1350 1264 1097 842 567 346 191 60 -59 -127 -108 -30 42 88 141 230 329 391 401 403 440 515 575 578 533 511 561 669 753 754 713 697 724 724 634 488 363 280 176 -4 -220 -392 -509 -639 -828 -1041 -1197 -1283 -1354 -1465 -1594 -1666 -1645 -1563 -1482 -1422 -1349 -1213 -1013 -804 -642 -531 -417 -264 -96 33 110 173 258 340 374 356 337 363 406 409 352 278 238 232 220 175 106 47 14 -9 -53 -129 -212 -258 -255 -229 -214 -212 -196 -149 -91 -44 -1 52 107 138 130 108 99 91 52 -27 -127 -216 -283 -338 -392 -444 -476 -470 -426 -357 -275 -184 -91 -2 76 146 215 278 319 332 329 323 315 299 277 255 238 223 213 205 198 184 167 148 131 111 92 76 62 48 34 26 23 18 8 3 7 18 29 44 61 72 65 43 21 15 21 30 38 48 54 52 38 17 -7 -26 -37 -38 -32 -25 -19 -16 -17 -22 -28 -32 -31 -28 -23 -17 -10 -4 -2 -1 0 0 -1 -1 0 0 0 0 0 0 +0 -16 -16 -16 -32 -64 -96 -128 -176 -224 -256 -288 -288 -256 -192 -112 -48 64 240 464 688 896 1120 1360 1600 1680 1584 1408 1296 1248 1104 848 608 624 912 1248 1424 1472 1568 1776 1840 1584 1040 448 -128 -912 -2048 -3360 -4432 -5152 -5744 -6448 -7040 -6992 -6064 -4592 -3072 -1568 208 2416 4656 6384 7488 8336 9280 10112 10384 10048 9616 9488 9344 8592 7200 5904 5328 5216 4832 3984 3328 3520 4304 4880 4928 4960 5584 6592 7120 6768 6160 6160 6816 7248 6784 5872 5440 5600 5424 4128 2288 1232 1488 2064 1824 800 -48 -240 -560 -2016 -4496 -7056 -9328 -11904 -15392 -19360 -22768 -25184 -27136 -29216 -31088 -31808 -31152 -29808 -28512 -27184 -25152 -22256 -18976 -15744 -12608 -9312 -6160 -3568 -1552 320 2304 4016 4960 5344 5952 7184 8448 8736 7792 6368 5360 4672 3632 1936 128 -1072 -1584 -2000 -2528 -2688 -2032 -1024 -608 -1088 -1744 -1632 -944 -608 -1200 -2176 -2624 -2224 -1392 -656 96 1392 3408 5600 7248 7968 7952 7520 6752 5600 4032 2096 48 -1696 -2912 -3552 -3760 -3552 -2512 -320 2896 6432 9664 12736 16176 19920 23040 24400 23952 22752 21600 20224 17552 13472 9072 5536 3056 960 -944 -2032 -1728 -480 672 1408 2256 3680 5264 6256 6416 6448 7040 8240 9200 9248 8528 8176 8976 10704 12048 12064 11408 11152 11584 11584 10144 7808 5808 4480 2816 -64 -3520 -6272 -8144 -10224 -13248 -16656 -19152 -20528 -21664 -23440 -25504 -26656 -26320 -25008 -23712 -22752 -21584 -19408 -16208 -12864 -10272 -8496 -6672 -4224 -1536 528 1760 2768 4128 5440 5984 5696 5392 5808 6496 6544 5632 4448 3808 3712 3520 2800 1696 752 224 -144 -848 -2064 -3392 -4128 -4080 -3664 -3424 -3392 -3136 -2384 -1456 -704 -16 832 1712 2208 2080 1728 1584 1456 832 -432 -2032 -3456 -4528 -5408 -6272 -7104 -7616 -7520 -6816 -5712 -4400 -2944 -1456 -32 1216 2336 3440 4448 5104 5312 5264 5168 5040 4784 4432 4080 3808 3568 3408 3280 3168 2944 2672 2368 2096 1776 1472 1216 992 768 544 416 368 288 128 48 112 288 464 704 976 1152 1040 688 336 240 336 480 608 768 864 832 608 272 -112 -416 -592 -608 -512 -400 -304 -256 -272 -352 -448 -512 -496 -448 -368 -272 -160 -64 -32 -16 0 0 -16 -16 0 0 0 0 0 0 +4 +0 -1 -1 -1 -1 -1 -1 -1 0 0 1 2 4 5 6 7 7 6 5 3 0 -4 -6 -8 -9 -9 -7 -1 9 21 34 48 64 84 103 115 119 119 119 117 107 86 61 38 22 7 -8 -17 -15 -5 6 13 22 38 57 70 75 78 88 107 123 128 122 121 137 168 195 202 196 198 212 218 197 156 119 94 61 -2 -81 -148 -197 -254 -338 -436 -515 -566 -613 -680 -759 -813 -824 -802 -779 -766 -745 -686 -587 -477 -391 -330 -266 -172 -64 23 77 124 189 255 287 280 271 299 342 352 310 251 219 219 212 173 107 49 15 -9 -58 -146 -246 -306 -310 -284 -272 -276 -261 -203 -127 -62 -1 78 165 216 209 179 167 157 93 -49 -236 -411 -551 -674 -802 -932 -1025 -1039 -966 -830 -656 -451 -228 -3 202 399 603 801 944 1011 1030 1041 1046 1022 975 926 889 861 846 842 836 805 752 692 631 557 474 406 346 277 207 166 151 120 58 21 57 144 244 375 547 678 641 440 234 172 249 377 514 670 805 821 646 304 -123 -523 -790 -870 -797 -666 -550 -503 -580 -820 -1151 -1438 -1577 -1574 -1450 -1189 -796 -395 -123 -15 5 1 -16 -27 21 146 305 457 607 748 830 812 730 647 570 482 387 309 243 153 38 -75 -185 -339 -550 -763 -912 -983 -991 -948 -864 -763 -659 -547 -427 -347 -345 -387 -404 -372 -328 -300 -264 -195 -121 -85 -86 -93 -98 -120 -155 -167 -126 -33 84 197 291 359 412 454 476 461 421 386 379 380 360 317 276 252 232 198 155 122 108 98 78 52 35 34 31 4 -45 -91 -110 -109 -113 -130 -147 -144 -119 -88 -65 -45 -18 25 77 122 149 160 168 178 183 169 142 114 97 85 68 38 0 -37 -63 -80 -95 -110 -121 -121 -112 -99 -87 -75 -60 -41 -25 -12 -2 6 11 11 7 4 6 10 14 18 20 22 22 22 22 21 19 17 16 16 15 12 8 4 2 0 -1 -2 -3 -2 -1 -1 -1 0 0 0 0 0 0 0 -1 -1 -1 0 +0 -16 -16 -16 -16 -16 -16 -16 0 0 16 32 64 80 96 112 112 96 80 48 0 -64 -96 -128 -144 -144 -112 -16 144 336 544 768 1024 1344 1648 1840 1904 1904 1904 1872 1712 1376 976 608 352 112 -128 -272 -240 -80 96 208 352 608 912 1120 1200 1248 1408 1712 1968 2048 1952 1936 2192 2688 3120 3232 3136 3168 3392 3488 3152 2496 1904 1504 976 -32 -1296 -2368 -3152 -4064 -5408 -6976 -8240 -9056 -9808 -10880 -12144 -13008 -13184 -12832 -12464 -12256 -11920 -10976 -9392 -7632 -6256 -5280 -4256 -2752 -1024 368 1232 1984 3024 4080 4592 4480 4336 4784 5472 5632 4960 4016 3504 3504 3392 2768 1712 784 240 -144 -928 -2336 -3936 -4896 -4960 -4544 -4352 -4416 -4176 -3248 -2032 -992 -16 1248 2640 3456 3344 2864 2672 2512 1488 -784 -3776 -6576 -8816 -10784 -12832 -14912 -16400 -16624 -15456 -13280 -10496 -7216 -3648 -48 3232 6384 9648 12816 15104 16176 16480 16656 16736 16352 15600 14816 14224 13776 13536 13472 13376 12880 12032 11072 10096 8912 7584 6496 5536 4432 3312 2656 2416 1920 928 336 912 2304 3904 6000 8752 10848 10256 7040 3744 2752 3984 6032 8224 10720 12880 13136 10336 4864 -1968 -8368 -12640 -13920 -12752 -10656 -8800 -8048 -9280 -13120 -18416 -23008 -25232 -25184 -23200 -19024 -12736 -6320 -1968 -240 80 16 -256 -432 336 2336 4880 7312 9712 11968 13280 12992 11680 10352 9120 7712 6192 4944 3888 2448 608 -1200 -2960 -5424 -8800 -12208 -14592 -15728 -15856 -15168 -13824 -12208 -10544 -8752 -6832 -5552 -5520 -6192 -6464 -5952 -5248 -4800 -4224 -3120 -1936 -1360 -1376 -1488 -1568 -1920 -2480 -2672 -2016 -528 1344 3152 4656 5744 6592 7264 7616 7376 6736 6176 6064 6080 5760 5072 4416 4032 3712 3168 2480 1952 1728 1568 1248 832 560 544 496 64 -720 -1456 -1760 -1744 -1808 -2080 -2352 -2304 -1904 -1408 -1040 -720 -288 400 1232 1952 2384 2560 2688 2848 2928 2704 2272 1824 1552 1360 1088 608 0 -592 -1008 -1280 -1520 -1760 -1936 -1936 -1792 -1584 -1392 -1200 -960 -656 -400 -192 -32 96 176 176 112 64 96 160 224 288 320 352 352 352 352 336 304 272 256 256 240 192 128 64 32 0 -16 -32 -48 -32 -16 -16 -16 0 0 0 0 0 0 0 -16 -16 -16 0 +4 +0 0 0 0 1 2 2 3 4 6 7 7 8 10 11 12 14 15 16 16 16 16 15 13 12 11 8 7 7 6 3 1 3 9 17 28 43 57 56 41 22 17 26 42 60 82 102 109 89 44 -19 -82 -129 -147 -140 -121 -104 -98 -117 -171 -248 -320 -362 -373 -355 -300 -207 -106 -34 -5 1 0 -5 -9 7 49 106 163 222 282 321 322 297 271 245 212 175 143 115 74 19 -39 -98 -183 -304 -432 -529 -583 -603 -590 -551 -498 -441 -374 -299 -249 -253 -291 -310 -293 -264 -247 -222 -168 -107 -77 -79 -88 -95 -119 -156 -173 -134 -36 93 224 337 426 500 564 605 601 561 527 528 543 527 476 424 396 373 326 261 212 192 178 145 99 70 68 65 10 -97 -200 -250 -254 -269 -318 -370 -370 -314 -239 -180 -129 -51 77 241 394 495 549 593 648 683 653 563 468 410 375 308 178 0 -180 -321 -427 -524 -627 -714 -746 -717 -658 -602 -540 -445 -321 -199 -101 -16 61 112 113 79 55 72 125 189 254 308 346 376 403 426 430 413 398 412 446 457 408 305 189 97 33 -33 -101 -144 -134 -83 -35 -11 9 50 91 97 67 32 7 -30 -102 -196 -276 -329 -377 -439 -509 -564 -588 -591 -600 -625 -644 -617 -537 -438 -357 -309 -273 -231 -182 -131 -71 -10 37 63 91 141 211 266 294 309 328 355 375 382 384 383 378 365 341 318 298 279 254 219 190 184 197 205 182 130 68 9 -44 -88 -117 -133 -147 -164 -170 -156 -128 -104 -89 -80 -75 -80 -100 -124 -146 -167 -187 -195 -191 -183 -183 -193 -200 -191 -164 -127 -92 -63 -36 -7 28 57 63 44 27 32 55 72 67 56 60 81 98 92 70 49 44 56 75 91 98 91 73 50 26 5 -14 -27 -30 -21 -7 5 9 7 4 3 4 4 3 2 3 7 9 9 5 0 -4 -7 -10 -12 -12 -11 -9 -6 -5 -4 -4 -3 -3 -3 -3 -3 -3 -2 -2 -1 -1 0 0 0 0 0 0 0 0 0 0 +0 0 0 0 32 64 64 96 128 192 224 224 256 320 352 384 448 480 512 512 512 512 480 416 384 352 256 224 224 192 96 32 96 288 544 896 1376 1824 1792 1312 704 544 832 1344 1920 2624 3264 3488 2848 1408 -608 -2624 -4128 -4704 -4480 -3872 -3328 -3136 -3744 -5472 -7936 -10240 -11584 -11936 -11360 -9600 -6624 -3392 -1088 -160 32 0 -160 -288 224 1568 3392 5216 7104 9024 10272 10304 9504 8672 7840 6784 5600 4576 3680 2368 608 -1248 -3136 -5856 -9728 -13824 -16928 -18656 -19296 -18880 -17632 -15936 -14112 -11968 -9568 -7968 -8096 -9312 -9920 -9376 -8448 -7904 -7104 -5376 -3424 -2464 -2528 -2816 -3040 -3808 -4992 -5536 -4288 -1152 2976 7168 10784 13632 16000 18048 19360 19232 17952 16864 16896 17376 16864 15232 13568 12672 11936 10432 8352 6784 6144 5696 4640 3168 2240 2176 2080 320 -3104 -6400 -8000 -8128 -8608 -10176 -11840 -11840 -10048 -7648 -5760 -4128 -1632 2464 7712 12608 15840 17568 18976 20736 21856 20896 18016 14976 13120 12000 9856 5696 0 -5760 -10272 -13664 -16768 -20064 -22848 -23872 -22944 -21056 -19264 -17280 -14240 -10272 -6368 -3232 -512 1952 3584 3616 2528 1760 2304 4000 6048 8128 9856 11072 12032 12896 13632 13760 13216 12736 13184 14272 14624 13056 9760 6048 3104 1056 -1056 -3232 -4608 -4288 -2656 -1120 -352 288 1600 2912 3104 2144 1024 224 -960 -3264 -6272 -8832 -10528 -12064 -14048 -16288 -18048 -18816 -18912 -19200 -20000 -20608 -19744 -17184 -14016 -11424 -9888 -8736 -7392 -5824 -4192 -2272 -320 1184 2016 2912 4512 6752 8512 9408 9888 10496 11360 12000 12224 12288 12256 12096 11680 10912 10176 9536 8928 8128 7008 6080 5888 6304 6560 5824 4160 2176 288 -1408 -2816 -3744 -4256 -4704 -5248 -5440 -4992 -4096 -3328 -2848 -2560 -2400 -2560 -3200 -3968 -4672 -5344 -5984 -6240 -6112 -5856 -5856 -6176 -6400 -6112 -5248 -4064 -2944 -2016 -1152 -224 896 1824 2016 1408 864 1024 1760 2304 2144 1792 1920 2592 3136 2944 2240 1568 1408 1792 2400 2912 3136 2912 2336 1600 832 160 -448 -864 -960 -672 -224 160 288 224 128 96 128 128 96 64 96 224 288 288 160 0 -128 -224 -320 -384 -384 -352 -288 -192 -160 -128 -128 -96 -96 -96 -96 -96 -96 -64 -64 -32 -32 0 0 0 0 0 0 0 0 0 0 +5 +0 -1 -1 -1 -1 0 0 1 2 3 4 5 6 7 7 7 7 7 6 4 0 -6 -11 -15 -20 -26 -31 -35 -36 -35 -35 -33 -29 -22 -15 -8 -2 5 10 10 7 5 7 14 22 31 39 46 52 58 63 67 66 67 72 80 85 79 61 39 20 7 -8 -24 -36 -34 -22 -10 -3 2 14 27 30 21 10 2 -11 -37 -72 -104 -128 -150 -179 -214 -243 -260 -268 -279 -298 -314 -309 -276 -230 -193 -171 -155 -134 -108 -80 -45 -7 24 42 62 99 151 195 221 237 258 286 309 322 331 338 341 337 322 307 294 283 262 232 206 204 224 238 217 159 85 11 -57 -117 -160 -185 -210 -240 -254 -239 -201 -167 -146 -134 -129 -142 -181 -229 -277 -326 -372 -399 -401 -393 -404 -438 -465 -454 -400 -320 -238 -166 -98 -18 82 170 192 140 87 106 189 253 245 210 232 323 401 392 307 223 209 271 373 470 523 506 423 300 164 33 -88 -180 -210 -158 -51 44 83 68 40 33 48 53 40 29 46 94 138 139 87 10 -65 -129 -189 -245 -277 -267 -223 -172 -137 -124 -118 -111 -103 -107 -123 -147 -158 -151 -124 -82 -33 10 31 23 15 38 82 115 110 85 77 101 136 149 131 94 53 17 -10 -30 -42 -47 -35 -10 20 42 57 70 74 55 19 -14 -23 -17 -13 -26 -51 -73 -80 -68 -46 -32 -35 -48 -57 -54 -44 -35 -34 -50 -84 -127 -161 -185 -206 -230 -249 -246 -214 -170 -134 -115 -110 -104 -87 -54 -14 23 47 57 57 58 64 74 81 78 73 79 100 128 151 166 178 192 203 202 186 159 129 102 79 58 35 10 -14 -35 -54 -68 -77 -83 -87 -87 -83 -72 -61 -55 -57 -60 -58 -53 -49 -48 -49 -46 -36 -22 -8 3 11 16 18 20 24 31 37 39 38 37 37 37 35 32 29 26 23 19 16 15 13 11 9 7 4 1 1 1 0 -1 -2 -1 -1 0 1 1 2 2 2 1 0 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 0 +0 -32 -32 -32 -32 0 0 32 64 96 128 160 192 224 224 224 224 224 192 128 0 -192 -352 -480 -640 -832 -992 -1120 -1152 -1120 -1120 -1056 -928 -704 -480 -256 -64 160 320 320 224 160 224 448 704 992 1248 1472 1664 1856 2016 2144 2112 2144 2304 2560 2720 2528 1952 1248 640 224 -256 -768 -1152 -1088 -704 -320 -96 64 448 864 960 672 320 64 -352 -1184 -2304 -3328 -4096 -4800 -5728 -6848 -7776 -8320 -8576 -8928 -9536 -10048 -9888 -8832 -7360 -6176 -5472 -4960 -4288 -3456 -2560 -1440 -224 768 1344 1984 3168 4832 6240 7072 7584 8256 9152 9888 10304 10592 10816 10912 10784 10304 9824 9408 9056 8384 7424 6592 6528 7168 7616 6944 5088 2720 352 -1824 -3744 -5120 -5920 -6720 -7680 -8128 -7648 -6432 -5344 -4672 -4288 -4128 -4544 -5792 -7328 -8864 -10432 -11904 -12768 -12832 -12576 -12928 -14016 -14880 -14528 -12800 -10240 -7616 -5312 -3136 -576 2624 5440 6144 4480 2784 3392 6048 8096 7840 6720 7424 10336 12832 12544 9824 7136 6688 8672 11936 15040 16736 16192 13536 9600 5248 1056 -2816 -5760 -6720 -5056 -1632 1408 2656 2176 1280 1056 1536 1696 1280 928 1472 3008 4416 4448 2784 320 -2080 -4128 -6048 -7840 -8864 -8544 -7136 -5504 -4384 -3968 -3776 -3552 -3296 -3424 -3936 -4704 -5056 -4832 -3968 -2624 -1056 320 992 736 480 1216 2624 3680 3520 2720 2464 3232 4352 4768 4192 3008 1696 544 -320 -960 -1344 -1504 -1120 -320 640 1344 1824 2240 2368 1760 608 -448 -736 -544 -416 -832 -1632 -2336 -2560 -2176 -1472 -1024 -1120 -1536 -1824 -1728 -1408 -1120 -1088 -1600 -2688 -4064 -5152 -5920 -6592 -7360 -7968 -7872 -6848 -5440 -4288 -3680 -3520 -3328 -2784 -1728 -448 736 1504 1824 1824 1856 2048 2368 2592 2496 2336 2528 3200 4096 4832 5312 5696 6144 6496 6464 5952 5088 4128 3264 2528 1856 1120 320 -448 -1120 -1728 -2176 -2464 -2656 -2784 -2784 -2656 -2304 -1952 -1760 -1824 -1920 -1856 -1696 -1568 -1536 -1568 -1472 -1152 -704 -256 96 352 512 576 640 768 992 1184 1248 1216 1184 1184 1184 1120 1024 928 832 736 608 512 480 416 352 288 224 128 32 32 32 0 -32 -64 -32 -32 0 32 32 64 64 64 32 0 -32 -32 -32 -32 -32 -32 -32 -32 -32 -32 -32 0 +5 +0 -1 -1 0 0 0 0 0 0 1 1 2 2 2 4 6 6 5 4 4 7 10 14 17 18 16 12 7 1 -5 -11 -13 -11 -4 3 6 5 3 3 4 5 4 3 5 11 17 17 11 1 -10 -20 -30 -40 -47 -47 -41 -33 -27 -25 -25 -24 -23 -25 -30 -36 -40 -40 -33 -23 -10 3 9 7 5 12 27 40 39 31 29 39 54 60 55 40 23 7 -5 -14 -21 -24 -18 -5 10 23 32 41 44 33 12 -9 -15 -11 -9 -19 -37 -54 -60 -52 -36 -26 -29 -41 -50 -48 -40 -33 -32 -49 -83 -128 -167 -196 -223 -255 -283 -285 -254 -207 -166 -147 -143 -138 -118 -76 -20 33 71 87 90 94 106 125 140 138 134 148 190 250 302 340 374 415 449 459 433 381 316 258 206 155 95 28 -39 -103 -162 -211 -248 -273 -294 -306 -298 -268 -232 -219 -230 -250 -252 -239 -227 -231 -241 -233 -191 -121 -43 23 72 103 122 141 179 236 290 320 325 331 349 365 367 353 337 317 290 257 233 221 211 197 172 134 82 40 25 31 26 -3 -35 -35 -4 34 66 102 145 177 173 134 71 -2 -78 -139 -172 -184 -203 -252 -323 -386 -427 -455 -484 -504 -495 -450 -395 -348 -316 -284 -254 -236 -235 -230 -204 -167 -141 -130 -118 -95 -73 -64 -72 -82 -80 -70 -54 -35 -12 21 61 103 140 162 171 167 154 135 111 83 56 42 44 48 36 5 -26 -41 -50 -68 -91 -101 -85 -54 -29 -14 3 31 67 101 122 131 136 150 171 183 172 149 134 138 146 139 119 100 96 100 99 84 66 55 57 69 82 93 103 110 111 103 89 71 53 34 17 3 -7 -15 -26 -42 -60 -74 -80 -82 -82 -81 -75 -63 -49 -39 -35 -34 -34 -32 -31 -30 -30 -26 -18 -8 0 4 5 6 7 8 9 10 11 10 8 5 4 4 2 -2 -6 -8 -8 -8 -9 -10 -10 -10 -8 -7 -5 -4 -3 -2 -2 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 0 +0 -64 -64 0 0 0 0 0 0 64 64 128 128 128 256 384 384 320 256 256 448 640 896 1088 1152 1024 768 448 64 -320 -704 -832 -704 -256 192 384 320 192 192 256 320 256 192 320 704 1088 1088 704 64 -640 -1280 -1920 -2560 -3008 -3008 -2624 -2112 -1728 -1600 -1600 -1536 -1472 -1600 -1920 -2304 -2560 -2560 -2112 -1472 -640 192 576 448 320 768 1728 2560 2496 1984 1856 2496 3456 3840 3520 2560 1472 448 -320 -896 -1344 -1536 -1152 -320 640 1472 2048 2624 2816 2112 768 -576 -960 -704 -576 -1216 -2368 -3456 -3840 -3328 -2304 -1664 -1856 -2624 -3200 -3072 -2560 -2112 -2048 -3136 -5312 -8192 -10688 -12544 -14272 -16320 -18112 -18240 -16256 -13248 -10624 -9408 -9152 -8832 -7552 -4864 -1280 2112 4544 5568 5760 6016 6784 8000 8960 8832 8576 9472 12160 16000 19328 21760 23936 26560 28736 29376 27712 24384 20224 16512 13184 9920 6080 1792 -2496 -6592 -10368 -13504 -15872 -17472 -18816 -19584 -19072 -17152 -14848 -14016 -14720 -16000 -16128 -15296 -14528 -14784 -15424 -14912 -12224 -7744 -2752 1472 4608 6592 7808 9024 11456 15104 18560 20480 20800 21184 22336 23360 23488 22592 21568 20288 18560 16448 14912 14144 13504 12608 11008 8576 5248 2560 1600 1984 1664 -192 -2240 -2240 -256 2176 4224 6528 9280 11328 11072 8576 4544 -128 -4992 -8896 -11008 -11776 -12992 -16128 -20672 -24704 -27328 -29120 -30976 -32256 -31680 -28800 -25280 -22272 -20224 -18176 -16256 -15104 -15040 -14720 -13056 -10688 -9024 -8320 -7552 -6080 -4672 -4096 -4608 -5248 -5120 -4480 -3456 -2240 -768 1344 3904 6592 8960 10368 10944 10688 9856 8640 7104 5312 3584 2688 2816 3072 2304 320 -1664 -2624 -3200 -4352 -5824 -6464 -5440 -3456 -1856 -896 192 1984 4288 6464 7808 8384 8704 9600 10944 11712 11008 9536 8576 8832 9344 8896 7616 6400 6144 6400 6336 5376 4224 3520 3648 4416 5248 5952 6592 7040 7104 6592 5696 4544 3392 2176 1088 192 -448 -960 -1664 -2688 -3840 -4736 -5120 -5248 -5248 -5184 -4800 -4032 -3136 -2496 -2240 -2176 -2176 -2048 -1984 -1920 -1920 -1664 -1152 -512 0 256 320 384 448 512 576 640 704 640 512 320 256 256 128 -128 -384 -512 -512 -512 -576 -640 -640 -640 -512 -448 -320 -256 -192 -128 -128 -64 -64 -64 -64 -64 -64 -64 -64 -64 -64 -64 -64 -64 0 +6 +0 0 0 -1 -1 -1 -1 -1 -2 -2 -3 -3 -3 -3 -3 -4 -5 -5 -6 -6 -7 -7 -8 -7 -5 -2 1 3 5 6 8 10 15 19 22 24 26 29 32 34 34 34 34 32 30 28 28 28 27 24 20 12 6 4 5 4 -1 -7 -7 -1 7 14 23 34 43 43 34 19 -1 -23 -41 -52 -57 -65 -83 -109 -134 -153 -167 -183 -195 -197 -184 -166 -150 -140 -129 -118 -112 -115 -115 -105 -88 -76 -72 -67 -55 -43 -39 -45 -52 -53 -47 -37 -25 -9 15 46 79 110 130 141 141 133 119 100 77 53 41 43 49 37 5 -28 -46 -57 -78 -108 -123 -105 -69 -38 -18 5 43 97 148 183 200 214 241 282 309 297 263 244 257 279 273 238 206 203 216 219 192 155 132 141 174 213 249 282 308 320 306 272 224 172 115 59 12 -25 -55 -100 -167 -245 -311 -349 -369 -381 -387 -372 -323 -260 -212 -196 -199 -205 -203 -202 -206 -211 -190 -136 -62 4 41 53 59 73 91 106 126 144 141 113 79 69 74 51 -19 -103 -152 -164 -178 -221 -279 -315 -318 -294 -259 -225 -193 -162 -129 -99 -80 -73 -75 -72 -64 -54 -54 -63 -67 -60 -51 -74 -151 -260 -349 -385 -384 -383 -386 -367 -318 -259 -218 -183 -137 -77 -27 2 29 75 144 216 279 332 374 406 421 418 405 396 395 401 405 409 418 422 405 365 314 259 190 107 29 -20 -46 -77 -120 -154 -164 -162 -165 -167 -149 -116 -87 -66 -37 7 40 47 43 61 96 116 101 71 56 59 63 50 26 4 0 14 39 59 64 61 61 67 73 74 69 64 63 65 68 72 74 72 65 54 41 26 10 -5 -17 -29 -42 -53 -57 -56 -56 -60 -68 -76 -81 -85 -87 -88 -86 -82 -74 -63 -52 -42 -35 -27 -18 -9 -2 2 4 5 7 7 6 3 1 -1 -3 -6 -10 -13 -14 -14 -15 -15 -15 -14 -13 -13 -12 -10 -8 -7 -6 -6 -5 -3 -1 -1 0 0 0 1 1 1 0 0 0 0 0 0 +0 0 0 -64 -64 -64 -64 -64 -128 -128 -192 -192 -192 -192 -192 -256 -320 -320 -384 -384 -448 -448 -512 -448 -320 -128 64 192 320 384 512 640 960 1216 1408 1536 1664 1856 2048 2176 2176 2176 2176 2048 1920 1792 1792 1792 1728 1536 1280 768 384 256 320 256 -64 -448 -448 -64 448 896 1472 2176 2752 2752 2176 1216 -64 -1472 -2624 -3328 -3648 -4160 -5312 -6976 -8576 -9792 -10688 -11712 -12480 -12608 -11776 -10624 -9600 -8960 -8256 -7552 -7168 -7360 -7360 -6720 -5632 -4864 -4608 -4288 -3520 -2752 -2496 -2880 -3328 -3392 -3008 -2368 -1600 -576 960 2944 5056 7040 8320 9024 9024 8512 7616 6400 4928 3392 2624 2752 3136 2368 320 -1792 -2944 -3648 -4992 -6912 -7872 -6720 -4416 -2432 -1152 320 2752 6208 9472 11712 12800 13696 15424 18048 19776 19008 16832 15616 16448 17856 17472 15232 13184 12992 13824 14016 12288 9920 8448 9024 11136 13632 15936 18048 19712 20480 19584 17408 14336 11008 7360 3776 768 -1600 -3520 -6400 -10688 -15680 -19904 -22336 -23616 -24384 -24768 -23808 -20672 -16640 -13568 -12544 -12736 -13120 -12992 -12928 -13184 -13504 -12160 -8704 -3968 256 2624 3392 3776 4672 5824 6784 8064 9216 9024 7232 5056 4416 4736 3264 -1216 -6592 -9728 -10496 -11392 -14144 -17856 -20160 -20352 -18816 -16576 -14400 -12352 -10368 -8256 -6336 -5120 -4672 -4800 -4608 -4096 -3456 -3456 -4032 -4288 -3840 -3264 -4736 -9664 -16640 -22336 -24640 -24576 -24512 -24704 -23488 -20352 -16576 -13952 -11712 -8768 -4928 -1728 128 1856 4800 9216 13824 17856 21248 23936 25984 26944 26752 25920 25344 25280 25664 25920 26176 26752 27008 25920 23360 20096 16576 12160 6848 1856 -1280 -2944 -4928 -7680 -9856 -10496 -10368 -10560 -10688 -9536 -7424 -5568 -4224 -2368 448 2560 3008 2752 3904 6144 7424 6464 4544 3584 3776 4032 3200 1664 256 0 896 2496 3776 4096 3904 3904 4288 4672 4736 4416 4096 4032 4160 4352 4608 4736 4608 4160 3456 2624 1664 640 -320 -1088 -1856 -2688 -3392 -3648 -3584 -3584 -3840 -4352 -4864 -5184 -5440 -5568 -5632 -5504 -5248 -4736 -4032 -3328 -2688 -2240 -1728 -1152 -576 -128 128 256 320 448 448 384 192 64 -64 -192 -384 -640 -832 -896 -896 -960 -960 -960 -896 -832 -832 -768 -640 -512 -448 -384 -384 -320 -192 -64 -64 0 0 0 64 64 64 0 0 0 0 0 0 +6 +0 0 0 0 0 0 0 0 0 0 0 -1 -1 -2 -3 -4 -6 -7 -8 -10 -11 -11 -11 -9 -8 -8 -9 -10 -11 -11 -12 -13 -13 -10 -5 0 3 4 5 6 8 10 13 16 16 13 10 9 10 7 -3 -16 -25 -28 -32 -40 -53 -62 -64 -61 -56 -50 -45 -39 -32 -25 -21 -20 -21 -21 -19 -17 -17 -20 -22 -20 -18 -27 -56 -98 -135 -153 -157 -161 -166 -162 -144 -120 -104 -90 -69 -40 -15 1 16 42 83 128 169 206 238 265 281 285 283 284 290 301 311 322 336 348 341 315 277 233 175 101 28 -19 -47 -80 -127 -167 -181 -183 -191 -198 -181 -144 -111 -86 -49 9 56 67 64 91 147 182 163 118 94 101 111 91 48 8 0 28 80 124 138 135 138 157 176 182 174 166 169 179 192 207 220 220 205 176 138 89 35 -16 -61 -111 -167 -215 -240 -244 -250 -279 -326 -375 -418 -451 -476 -499 -511 -506 -472 -416 -355 -302 -257 -205 -140 -69 -12 22 42 60 78 87 75 46 13 -11 -39 -86 -152 -212 -253 -279 -312 -348 -373 -377 -379 -389 -391 -361 -311 -280 -290 -303 -269 -181 -86 -26 5 49 129 226 310 367 402 425 435 434 424 412 403 402 400 395 378 359 341 314 272 221 180 164 155 126 70 12 -14 -8 -2 -21 -56 -80 -81 -68 -65 -71 -76 -76 -71 -60 -45 -27 -15 -16 -26 -36 -41 -45 -44 -39 -33 -29 -24 -9 16 36 39 32 32 46 66 73 69 70 92 128 154 154 139 127 126 126 115 95 84 85 88 78 51 17 -10 -29 -42 -44 -39 -33 -39 -61 -84 -97 -97 -94 -92 -90 -90 -92 -96 -94 -84 -74 -75 -81 -79 -66 -50 -39 -33 -25 -19 -20 -29 -37 -40 -38 -38 -38 -38 -38 -42 -46 -46 -40 -32 -25 -20 -14 -6 2 9 13 15 18 24 30 33 34 32 31 32 32 30 25 20 17 16 14 12 9 6 4 2 1 0 0 -1 -2 -2 -2 -2 -2 -2 -2 -2 -1 -1 -1 -1 -1 -1 0 +0 0 0 0 0 0 0 0 0 0 0 -64 -64 -128 -192 -256 -384 -448 -512 -640 -704 -704 -704 -576 -512 -512 -576 -640 -704 -704 -768 -832 -832 -640 -320 0 192 256 320 384 512 640 832 1024 1024 832 640 576 640 448 -192 -1024 -1600 -1792 -2048 -2560 -3392 -3968 -4096 -3904 -3584 -3200 -2880 -2496 -2048 -1600 -1344 -1280 -1344 -1344 -1216 -1088 -1088 -1280 -1408 -1280 -1152 -1728 -3584 -6272 -8640 -9792 -10048 -10304 -10624 -10368 -9216 -7680 -6656 -5760 -4416 -2560 -960 64 1024 2688 5312 8192 10816 13184 15232 16960 17984 18240 18112 18176 18560 19264 19904 20608 21504 22272 21824 20160 17728 14912 11200 6464 1792 -1216 -3008 -5120 -8128 -10688 -11584 -11712 -12224 -12672 -11584 -9216 -7104 -5504 -3136 576 3584 4288 4096 5824 9408 11648 10432 7552 6016 6464 7104 5824 3072 512 0 1792 5120 7936 8832 8640 8832 10048 11264 11648 11136 10624 10816 11456 12288 13248 14080 14080 13120 11264 8832 5696 2240 -1024 -3904 -7104 -10688 -13760 -15360 -15616 -16000 -17856 -20864 -24000 -26752 -28864 -30464 -31936 -32704 -32384 -30208 -26624 -22720 -19328 -16448 -13120 -8960 -4416 -768 1408 2688 3840 4992 5568 4800 2944 832 -704 -2496 -5504 -9728 -13568 -16192 -17856 -19968 -22272 -23872 -24128 -24256 -24896 -25024 -23104 -19904 -17920 -18560 -19392 -17216 -11584 -5504 -1664 320 3136 8256 14464 19840 23488 25728 27200 27840 27776 27136 26368 25792 25728 25600 25280 24192 22976 21824 20096 17408 14144 11520 10496 9920 8064 4480 768 -896 -512 -128 -1344 -3584 -5120 -5184 -4352 -4160 -4544 -4864 -4864 -4544 -3840 -2880 -1728 -960 -1024 -1664 -2304 -2624 -2880 -2816 -2496 -2112 -1856 -1536 -576 1024 2304 2496 2048 2048 2944 4224 4672 4416 4480 5888 8192 9856 9856 8896 8128 8064 8064 7360 6080 5376 5440 5632 4992 3264 1088 -640 -1856 -2688 -2816 -2496 -2112 -2496 -3904 -5376 -6208 -6208 -6016 -5888 -5760 -5760 -5888 -6144 -6016 -5376 -4736 -4800 -5184 -5056 -4224 -3200 -2496 -2112 -1600 -1216 -1280 -1856 -2368 -2560 -2432 -2432 -2432 -2432 -2432 -2688 -2944 -2944 -2560 -2048 -1600 -1280 -896 -384 128 576 832 960 1152 1536 1920 2112 2176 2048 1984 2048 2048 1920 1600 1280 1088 1024 896 768 576 384 256 128 64 0 0 -64 -128 -128 -128 -128 -128 -128 -128 -128 -64 -64 -64 -64 -64 -64 0 +6 +0 0 0 0 0 0 0 0 0 0 0 -1 -1 -2 -3 -4 -5 -5 -6 -7 -9 -11 -14 -16 -18 -20 -23 -24 -24 -23 -21 -19 -17 -14 -11 -6 -1 1 3 5 7 9 8 5 1 -2 -5 -12 -22 -31 -38 -44 -51 -59 -65 -69 -71 -76 -79 -75 -67 -63 -67 -72 -66 -46 -23 -7 1 14 37 68 96 117 132 143 151 154 155 155 156 159 163 165 162 158 154 146 129 108 90 84 81 68 38 6 -8 -5 -1 -13 -36 -53 -54 -47 -45 -51 -56 -58 -54 -48 -37 -22 -12 -14 -23 -32 -38 -42 -43 -38 -33 -30 -25 -9 18 41 46 39 39 57 84 95 92 96 129 183 225 231 214 200 203 208 194 165 148 155 165 148 99 35 -20 -61 -90 -97 -87 -76 -93 -148 -211 -249 -257 -255 -256 -259 -265 -280 -300 -300 -277 -252 -263 -292 -295 -252 -196 -159 -138 -109 -82 -92 -139 -185 -204 -202 -206 -216 -222 -234 -265 -302 -317 -286 -235 -192 -160 -119 -53 26 95 138 169 211 284 376 448 476 478 492 535 576 573 516 445 407 405 403 364 295 219 154 103 64 33 0 -51 -113 -162 -189 -213 -251 -294 -309 -297 -289 -304 -318 -301 -263 -231 -212 -182 -128 -63 -13 21 47 72 84 85 84 91 96 82 54 27 19 32 51 67 76 85 105 126 142 145 145 153 169 188 205 223 242 247 228 196 173 169 168 158 137 116 97 73 43 14 -12 -39 -75 -113 -147 -171 -187 -198 -208 -218 -225 -227 -220 -207 -193 -184 -180 -184 -190 -188 -173 -149 -132 -126 -119 -99 -73 -56 -56 -60 -57 -53 -58 -69 -75 -70 -59 -51 -47 -43 -33 -22 -8 7 24 39 47 52 58 69 83 94 97 96 95 97 95 84 67 52 43 38 31 22 15 11 8 2 -6 -13 -17 -17 -15 -14 -15 -16 -15 -11 -9 -9 -9 -8 -6 -5 -5 -4 -3 -1 0 1 3 5 6 6 5 5 5 5 5 4 4 3 2 2 1 1 0 0 0 0 0 0 0 0 0 +0 0 0 0 0 0 0 0 0 0 0 -32 -32 -64 -96 -128 -160 -160 -192 -224 -288 -352 -448 -512 -576 -640 -736 -768 -768 -736 -672 -608 -544 -448 -352 -192 -32 32 96 160 224 288 256 160 32 -64 -160 -384 -704 -992 -1216 -1408 -1632 -1888 -2080 -2208 -2272 -2432 -2528 -2400 -2144 -2016 -2144 -2304 -2112 -1472 -736 -224 32 448 1184 2176 3072 3744 4224 4576 4832 4928 4960 4960 4992 5088 5216 5280 5184 5056 4928 4672 4128 3456 2880 2688 2592 2176 1216 192 -256 -160 -32 -416 -1152 -1696 -1728 -1504 -1440 -1632 -1792 -1856 -1728 -1536 -1184 -704 -384 -448 -736 -1024 -1216 -1344 -1376 -1216 -1056 -960 -800 -288 576 1312 1472 1248 1248 1824 2688 3040 2944 3072 4128 5856 7200 7392 6848 6400 6496 6656 6208 5280 4736 4960 5280 4736 3168 1120 -640 -1952 -2880 -3104 -2784 -2432 -2976 -4736 -6752 -7968 -8224 -8160 -8192 -8288 -8480 -8960 -9600 -9600 -8864 -8064 -8416 -9344 -9440 -8064 -6272 -5088 -4416 -3488 -2624 -2944 -4448 -5920 -6528 -6464 -6592 -6912 -7104 -7488 -8480 -9664 -10144 -9152 -7520 -6144 -5120 -3808 -1696 832 3040 4416 5408 6752 9088 12032 14336 15232 15296 15744 17120 18432 18336 16512 14240 13024 12960 12896 11648 9440 7008 4928 3296 2048 1056 0 -1632 -3616 -5184 -6048 -6816 -8032 -9408 -9888 -9504 -9248 -9728 -10176 -9632 -8416 -7392 -6784 -5824 -4096 -2016 -416 672 1504 2304 2688 2720 2688 2912 3072 2624 1728 864 608 1024 1632 2144 2432 2720 3360 4032 4544 4640 4640 4896 5408 6016 6560 7136 7744 7904 7296 6272 5536 5408 5376 5056 4384 3712 3104 2336 1376 448 -384 -1248 -2400 -3616 -4704 -5472 -5984 -6336 -6656 -6976 -7200 -7264 -7040 -6624 -6176 -5888 -5760 -5888 -6080 -6016 -5536 -4768 -4224 -4032 -3808 -3168 -2336 -1792 -1792 -1920 -1824 -1696 -1856 -2208 -2400 -2240 -1888 -1632 -1504 -1376 -1056 -704 -256 224 768 1248 1504 1664 1856 2208 2656 3008 3104 3072 3040 3104 3040 2688 2144 1664 1376 1216 992 704 480 352 256 64 -192 -416 -544 -544 -480 -448 -480 -512 -480 -352 -288 -288 -288 -256 -192 -160 -160 -128 -96 -32 0 32 96 160 192 192 160 160 160 160 160 128 128 96 64 64 32 32 0 0 0 0 0 0 0 0 0 +5 +0 -1 -1 -1 -1 -1 -1 -2 -2 -2 -2 -3 -3 -3 -3 -3 -3 -3 -2 -3 -4 -6 -7 -7 -8 -9 -10 -11 -14 -17 -18 -18 -15 -13 -12 -10 -5 2 8 12 16 21 30 42 52 58 61 65 74 83 86 80 72 68 70 72 68 57 44 32 22 14 7 0 -13 -29 -43 -51 -59 -72 -87 -93 -93 -93 -100 -108 -105 -94 -85 -80 -71 -51 -26 -6 9 20 32 39 40 41 45 49 43 29 15 11 18 30 41 47 54 68 84 97 101 104 112 127 144 161 179 199 208 197 173 157 156 159 153 136 117 100 77 46 15 -14 -45 -88 -137 -182 -217 -243 -264 -284 -304 -322 -331 -330 -317 -303 -295 -296 -309 -328 -332 -313 -276 -251 -246 -237 -202 -152 -121 -123 -135 -131 -125 -142 -174 -193 -185 -160 -142 -136 -125 -101 -67 -25 26 83 137 173 194 224 275 341 399 427 433 445 469 475 435 358 289 251 228 194 144 105 80 58 20 -40 -100 -137 -145 -134 -130 -150 -169 -159 -127 -103 -109 -123 -113 -86 -72 -77 -74 -45 -9 12 39 94 166 211 214 206 219 248 275 299 328 348 336 292 247 218 199 165 122 93 84 81 69 43 14 -7 -9 7 24 20 -8 -38 -53 -50 -44 -50 -71 -97 -124 -145 -158 -163 -168 -182 -208 -241 -273 -285 -277 -263 -259 -268 -276 -271 -251 -220 -177 -127 -84 -57 -41 -12 34 78 96 95 93 104 118 125 126 121 108 87 64 52 52 60 67 64 45 16 -9 -20 -24 -34 -54 -67 -62 -49 -46 -58 -71 -72 -62 -48 -36 -29 -25 -24 -25 -22 -15 -6 2 6 7 5 4 7 10 11 9 6 3 -2 -8 -11 -7 0 4 0 -6 -5 3 12 18 18 19 22 25 27 28 30 31 31 31 32 33 32 28 24 22 22 22 21 18 15 12 9 8 7 4 0 -3 -4 -4 -3 -2 -2 -1 -2 -3 -3 -3 -2 -2 -2 -3 -2 -2 -1 -1 -2 -2 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 0 +0 -64 -64 -64 -64 -64 -64 -128 -128 -128 -128 -192 -192 -192 -192 -192 -192 -192 -128 -192 -256 -384 -448 -448 -512 -576 -640 -704 -896 -1088 -1152 -1152 -960 -832 -768 -640 -320 128 512 768 1024 1344 1920 2688 3328 3712 3904 4160 4736 5312 5504 5120 4608 4352 4480 4608 4352 3648 2816 2048 1408 896 448 0 -832 -1856 -2752 -3264 -3776 -4608 -5568 -5952 -5952 -5952 -6400 -6912 -6720 -6016 -5440 -5120 -4544 -3264 -1664 -384 576 1280 2048 2496 2560 2624 2880 3136 2752 1856 960 704 1152 1920 2624 3008 3456 4352 5376 6208 6464 6656 7168 8128 9216 10304 11456 12736 13312 12608 11072 10048 9984 10176 9792 8704 7488 6400 4928 2944 960 -896 -2880 -5632 -8768 -11648 -13888 -15552 -16896 -18176 -19456 -20608 -21184 -21120 -20288 -19392 -18880 -18944 -19776 -20992 -21248 -20032 -17664 -16064 -15744 -15168 -12928 -9728 -7744 -7872 -8640 -8384 -8000 -9088 -11136 -12352 -11840 -10240 -9088 -8704 -8000 -6464 -4288 -1600 1664 5312 8768 11072 12416 14336 17600 21824 25536 27328 27712 28480 30016 30400 27840 22912 18496 16064 14592 12416 9216 6720 5120 3712 1280 -2560 -6400 -8768 -9280 -8576 -8320 -9600 -10816 -10176 -8128 -6592 -6976 -7872 -7232 -5504 -4608 -4928 -4736 -2880 -576 768 2496 6016 10624 13504 13696 13184 14016 15872 17600 19136 20992 22272 21504 18688 15808 13952 12736 10560 7808 5952 5376 5184 4416 2752 896 -448 -576 448 1536 1280 -512 -2432 -3392 -3200 -2816 -3200 -4544 -6208 -7936 -9280 -10112 -10432 -10752 -11648 -13312 -15424 -17472 -18240 -17728 -16832 -16576 -17152 -17664 -17344 -16064 -14080 -11328 -8128 -5376 -3648 -2624 -768 2176 4992 6144 6080 5952 6656 7552 8000 8064 7744 6912 5568 4096 3328 3328 3840 4288 4096 2880 1024 -576 -1280 -1536 -2176 -3456 -4288 -3968 -3136 -2944 -3712 -4544 -4608 -3968 -3072 -2304 -1856 -1600 -1536 -1600 -1408 -960 -384 128 384 448 320 256 448 640 704 576 384 192 -128 -512 -704 -448 0 256 0 -384 -320 192 768 1152 1152 1216 1408 1600 1728 1792 1920 1984 1984 1984 2048 2112 2048 1792 1536 1408 1408 1408 1344 1152 960 768 576 512 448 256 0 -192 -256 -256 -192 -128 -128 -64 -128 -192 -192 -192 -128 -128 -128 -192 -128 -128 -64 -64 -128 -128 -64 -64 -64 -64 -64 -64 -64 -64 -64 -64 0 +6 +0 -1 -1 -1 -1 -1 -1 -1 0 0 0 1 1 2 3 5 6 8 9 10 12 13 13 12 10 10 9 8 7 5 4 3 1 -3 -8 -11 -12 -12 -12 -14 -17 -17 -14 -12 -13 -16 -15 -12 -10 -12 -12 -7 -2 2 6 17 31 41 43 42 47 55 63 70 80 87 87 78 68 62 58 49 38 29 27 27 23 15 5 -3 -4 2 9 8 -4 -17 -24 -23 -21 -25 -36 -50 -65 -78 -87 -92 -97 -108 -126 -150 -174 -186 -185 -180 -181 -192 -203 -203 -193 -173 -143 -105 -71 -50 -36 -11 32 73 93 94 94 108 125 136 140 137 125 103 78 65 66 78 90 87 63 22 -14 -30 -36 -53 -87 -111 -105 -85 -80 -104 -131 -137 -121 -96 -74 -60 -53 -53 -55 -51 -36 -14 7 17 19 14 13 20 30 34 29 20 12 -4 -25 -39 -26 1 17 1 -23 -22 14 60 86 93 103 120 139 156 172 188 202 209 217 233 250 251 230 206 199 210 222 221 202 169 142 122 112 97 62 10 -35 -58 -59 -52 -43 -27 -18 -33 -65 -90 -81 -58 -57 -86 -111 -102 -70 -52 -72 -106 -128 -133 -141 -164 -187 -194 -181 -162 -153 -156 -165 -168 -166 -157 -149 -141 -132 -119 -98 -74 -50 -30 -14 0 15 34 51 66 82 97 105 104 97 94 98 99 94 82 66 43 16 -14 -42 -69 -92 -104 -96 -80 -72 -75 -80 -78 -69 -63 -60 -58 -59 -63 -60 -44 -24 -16 -25 -35 -32 -23 -18 -17 -9 7 20 22 19 25 40 55 65 70 74 75 74 74 76 75 66 55 51 56 61 61 57 55 59 62 58 46 33 24 22 22 20 18 17 17 14 6 -6 -16 -22 -25 -25 -24 -23 -23 -24 -24 -22 -20 -21 -21 -18 -13 -11 -10 -7 -1 5 6 4 2 2 3 4 5 7 8 7 5 4 4 4 4 4 3 2 1 2 2 1 0 0 0 1 1 1 1 1 0 0 0 0 0 -1 -1 -1 -1 -1 -1 -1 -1 -1 0 +0 -128 -128 -128 -128 -128 -128 -128 0 0 0 128 128 256 384 640 768 1024 1152 1280 1536 1664 1664 1536 1280 1280 1152 1024 896 640 512 384 128 -384 -1024 -1408 -1536 -1536 -1536 -1792 -2176 -2176 -1792 -1536 -1664 -2048 -1920 -1536 -1280 -1536 -1536 -896 -256 256 768 2176 3968 5248 5504 5376 6016 7040 8064 8960 10240 11136 11136 9984 8704 7936 7424 6272 4864 3712 3456 3456 2944 1920 640 -384 -512 256 1152 1024 -512 -2176 -3072 -2944 -2688 -3200 -4608 -6400 -8320 -9984 -11136 -11776 -12416 -13824 -16128 -19200 -22272 -23808 -23680 -23040 -23168 -24576 -25984 -25984 -24704 -22144 -18304 -13440 -9088 -6400 -4608 -1408 4096 9344 11904 12032 12032 13824 16000 17408 17920 17536 16000 13184 9984 8320 8448 9984 11520 11136 8064 2816 -1792 -3840 -4608 -6784 -11136 -14208 -13440 -10880 -10240 -13312 -16768 -17536 -15488 -12288 -9472 -7680 -6784 -6784 -7040 -6528 -4608 -1792 896 2176 2432 1792 1664 2560 3840 4352 3712 2560 1536 -512 -3200 -4992 -3328 128 2176 128 -2944 -2816 1792 7680 11008 11904 13184 15360 17792 19968 22016 24064 25856 26752 27776 29824 32000 32128 29440 26368 25472 26880 28416 28288 25856 21632 18176 15616 14336 12416 7936 1280 -4480 -7424 -7552 -6656 -5504 -3456 -2304 -4224 -8320 -11520 -10368 -7424 -7296 -11008 -14208 -13056 -8960 -6656 -9216 -13568 -16384 -17024 -18048 -20992 -23936 -24832 -23168 -20736 -19584 -19968 -21120 -21504 -21248 -20096 -19072 -18048 -16896 -15232 -12544 -9472 -6400 -3840 -1792 0 1920 4352 6528 8448 10496 12416 13440 13312 12416 12032 12544 12672 12032 10496 8448 5504 2048 -1792 -5376 -8832 -11776 -13312 -12288 -10240 -9216 -9600 -10240 -9984 -8832 -8064 -7680 -7424 -7552 -8064 -7680 -5632 -3072 -2048 -3200 -4480 -4096 -2944 -2304 -2176 -1152 896 2560 2816 2432 3200 5120 7040 8320 8960 9472 9600 9472 9472 9728 9600 8448 7040 6528 7168 7808 7808 7296 7040 7552 7936 7424 5888 4224 3072 2816 2816 2560 2304 2176 2176 1792 768 -768 -2048 -2816 -3200 -3200 -3072 -2944 -2944 -3072 -3072 -2816 -2560 -2688 -2688 -2304 -1664 -1408 -1280 -896 -128 640 768 512 256 256 384 512 640 896 1024 896 640 512 512 512 512 512 384 256 128 256 256 128 0 0 0 128 128 128 128 128 0 0 0 0 0 -128 -128 -128 -128 -128 -128 -128 -128 -128 0 +7 +0 0 0 0 0 0 0 0 0 -1 -1 -1 -1 0 0 0 -1 -1 0 1 2 2 3 4 5 6 7 8 10 11 12 14 15 17 16 15 15 17 19 20 19 17 15 13 13 11 8 1 -5 -9 -9 -9 -7 -5 -4 -6 -13 -18 -17 -12 -13 -19 -26 -25 -17 -14 -19 -29 -36 -38 -42 -50 -58 -62 -60 -55 -54 -56 -61 -64 -64 -63 -61 -59 -57 -53 -45 -35 -24 -15 -7 0 8 18 28 37 47 57 63 64 61 61 65 68 66 59 48 32 12 -11 -34 -57 -78 -89 -84 -72 -66 -71 -78 -77 -70 -65 -64 -63 -66 -71 -69 -52 -29 -20 -32 -45 -43 -31 -25 -24 -14 10 31 35 31 42 68 95 115 127 139 144 145 148 156 157 143 122 115 130 147 150 144 144 157 170 164 135 98 74 68 72 69 64 60 62 54 24 -22 -62 -89 -106 -112 -111 -108 -112 -121 -124 -119 -115 -120 -124 -111 -87 -74 -70 -48 -1 43 53 36 18 20 31 45 63 86 102 98 81 68 65 72 84 89 79 55 41 50 64 52 20 0 15 43 61 68 75 71 48 17 5 13 6 -29 -65 -81 -97 -143 -205 -237 -223 -204 -214 -244 -256 -247 -246 -266 -284 -269 -230 -201 -201 -217 -226 -211 -180 -143 -107 -80 -64 -58 -48 -27 0 24 46 74 110 141 154 155 163 180 192 186 172 168 177 183 178 160 140 126 115 109 106 103 101 100 98 89 73 61 60 65 63 48 32 25 29 29 17 -5 -25 -36 -38 -34 -25 -14 -6 -8 -16 -19 -12 0 7 10 12 16 16 10 4 4 6 4 -3 -10 -15 -17 -19 -19 -20 -22 -25 -26 -24 -25 -30 -36 -35 -30 -27 -25 -24 -21 -18 -15 -13 -11 -11 -12 -13 -12 -10 -9 -7 -4 1 7 10 11 11 10 8 7 7 7 5 3 0 -2 -2 -2 -3 -4 -5 -6 -5 -5 -4 -3 -3 -3 -3 -3 -2 -2 -2 -2 -2 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 0 +0 0 0 0 0 0 0 0 0 -64 -64 -64 -64 0 0 0 -64 -64 0 64 128 128 192 256 320 384 448 512 640 704 768 896 960 1088 1024 960 960 1088 1216 1280 1216 1088 960 832 832 704 512 64 -320 -576 -576 -576 -448 -320 -256 -384 -832 -1152 -1088 -768 -832 -1216 -1664 -1600 -1088 -896 -1216 -1856 -2304 -2432 -2688 -3200 -3712 -3968 -3840 -3520 -3456 -3584 -3904 -4096 -4096 -4032 -3904 -3776 -3648 -3392 -2880 -2240 -1536 -960 -448 0 512 1152 1792 2368 3008 3648 4032 4096 3904 3904 4160 4352 4224 3776 3072 2048 768 -704 -2176 -3648 -4992 -5696 -5376 -4608 -4224 -4544 -4992 -4928 -4480 -4160 -4096 -4032 -4224 -4544 -4416 -3328 -1856 -1280 -2048 -2880 -2752 -1984 -1600 -1536 -896 640 1984 2240 1984 2688 4352 6080 7360 8128 8896 9216 9280 9472 9984 10048 9152 7808 7360 8320 9408 9600 9216 9216 10048 10880 10496 8640 6272 4736 4352 4608 4416 4096 3840 3968 3456 1536 -1408 -3968 -5696 -6784 -7168 -7104 -6912 -7168 -7744 -7936 -7616 -7360 -7680 -7936 -7104 -5568 -4736 -4480 -3072 -64 2752 3392 2304 1152 1280 1984 2880 4032 5504 6528 6272 5184 4352 4160 4608 5376 5696 5056 3520 2624 3200 4096 3328 1280 0 960 2752 3904 4352 4800 4544 3072 1088 320 832 384 -1856 -4160 -5184 -6208 -9152 -13120 -15168 -14272 -13056 -13696 -15616 -16384 -15808 -15744 -17024 -18176 -17216 -14720 -12864 -12864 -13888 -14464 -13504 -11520 -9152 -6848 -5120 -4096 -3712 -3072 -1728 0 1536 2944 4736 7040 9024 9856 9920 10432 11520 12288 11904 11008 10752 11328 11712 11392 10240 8960 8064 7360 6976 6784 6592 6464 6400 6272 5696 4672 3904 3840 4160 4032 3072 2048 1600 1856 1856 1088 -320 -1600 -2304 -2432 -2176 -1600 -896 -384 -512 -1024 -1216 -768 0 448 640 768 1024 1024 640 256 256 384 256 -192 -640 -960 -1088 -1216 -1216 -1280 -1408 -1600 -1664 -1536 -1600 -1920 -2304 -2240 -1920 -1728 -1600 -1536 -1344 -1152 -960 -832 -704 -704 -768 -832 -768 -640 -576 -448 -256 64 448 640 704 704 640 512 448 448 448 320 192 0 -128 -128 -128 -192 -256 -320 -384 -320 -320 -256 -192 -192 -192 -192 -192 -128 -128 -128 -128 -128 -64 -64 -64 -64 -64 -64 -64 -64 -64 -64 -64 0 +6 +0 0 0 0 0 0 0 0 0 0 0 0 0 0 -1 -1 -2 -3 -3 -3 -3 -4 -4 -5 -5 -5 -6 -6 -6 -5 -5 -5 -4 -1 3 4 2 1 1 2 4 6 9 11 11 10 8 8 10 12 13 12 8 6 8 11 9 3 0 3 9 13 15 17 17 12 4 1 3 1 -9 -20 -26 -31 -47 -70 -83 -80 -75 -81 -95 -102 -101 -103 -115 -126 -122 -107 -96 -98 -109 -116 -111 -97 -79 -61 -47 -38 -35 -30 -18 0 16 31 52 79 103 116 119 128 145 158 157 148 148 160 169 168 155 139 127 119 115 114 114 114 116 117 108 91 78 78 87 86 68 45 37 43 45 26 -8 -41 -60 -65 -60 -44 -25 -11 -15 -32 -38 -24 1 17 22 29 39 39 26 11 11 17 11 -7 -29 -43 -53 -59 -62 -67 -77 -91 -95 -90 -97 -122 -148 -153 -136 -122 -120 -118 -107 -93 -82 -73 -64 -65 -76 -86 -83 -72 -61 -52 -28 15 63 98 114 117 108 92 84 87 93 82 46 2 -24 -31 -31 -46 -77 -110 -131 -135 -123 -105 -92 -90 -102 -115 -114 -98 -86 -88 -100 -105 -100 -90 -78 -69 -58 -47 -37 -30 -31 -38 -36 -12 16 24 5 -13 -2 26 41 35 33 49 69 84 104 144 187 199 182 170 180 194 186 165 163 182 195 181 154 137 132 120 94 67 48 35 20 2 -11 -15 -19 -37 -60 -71 -64 -55 -59 -76 -87 -84 -76 -70 -67 -58 -47 -41 -37 -36 -38 -43 -44 -31 -11 -6 -19 -31 -25 -10 -6 -18 -31 -29 -15 2 13 16 18 26 44 62 64 54 48 56 66 60 41 27 25 23 13 1 -1 3 -2 -19 -37 -45 -44 -47 -53 -57 -55 -49 -45 -44 -45 -44 -44 -43 -43 -44 -42 -37 -32 -29 -28 -27 -22 -15 -10 -6 -3 2 7 10 10 8 8 10 12 14 14 13 12 11 11 10 9 7 6 5 5 4 3 3 3 3 2 1 0 0 0 0 0 -1 -1 -1 -1 -1 -1 0 +0 0 0 0 0 0 0 0 0 0 0 0 0 0 -128 -128 -256 -384 -384 -384 -384 -512 -512 -640 -640 -640 -768 -768 -768 -640 -640 -640 -512 -128 384 512 256 128 128 256 512 768 1152 1408 1408 1280 1024 1024 1280 1536 1664 1536 1024 768 1024 1408 1152 384 0 384 1152 1664 1920 2176 2176 1536 512 128 384 128 -1152 -2560 -3328 -3968 -6016 -8960 -10624 -10240 -9600 -10368 -12160 -13056 -12928 -13184 -14720 -16128 -15616 -13696 -12288 -12544 -13952 -14848 -14208 -12416 -10112 -7808 -6016 -4864 -4480 -3840 -2304 0 2048 3968 6656 10112 13184 14848 15232 16384 18560 20224 20096 18944 18944 20480 21632 21504 19840 17792 16256 15232 14720 14592 14592 14592 14848 14976 13824 11648 9984 9984 11136 11008 8704 5760 4736 5504 5760 3328 -1024 -5248 -7680 -8320 -7680 -5632 -3200 -1408 -1920 -4096 -4864 -3072 128 2176 2816 3712 4992 4992 3328 1408 1408 2176 1408 -896 -3712 -5504 -6784 -7552 -7936 -8576 -9856 -11648 -12160 -11520 -12416 -15616 -18944 -19584 -17408 -15616 -15360 -15104 -13696 -11904 -10496 -9344 -8192 -8320 -9728 -11008 -10624 -9216 -7808 -6656 -3584 1920 8064 12544 14592 14976 13824 11776 10752 11136 11904 10496 5888 256 -3072 -3968 -3968 -5888 -9856 -14080 -16768 -17280 -15744 -13440 -11776 -11520 -13056 -14720 -14592 -12544 -11008 -11264 -12800 -13440 -12800 -11520 -9984 -8832 -7424 -6016 -4736 -3840 -3968 -4864 -4608 -1536 2048 3072 640 -1664 -256 3328 5248 4480 4224 6272 8832 10752 13312 18432 23936 25472 23296 21760 23040 24832 23808 21120 20864 23296 24960 23168 19712 17536 16896 15360 12032 8576 6144 4480 2560 256 -1408 -1920 -2432 -4736 -7680 -9088 -8192 -7040 -7552 -9728 -11136 -10752 -9728 -8960 -8576 -7424 -6016 -5248 -4736 -4608 -4864 -5504 -5632 -3968 -1408 -768 -2432 -3968 -3200 -1280 -768 -2304 -3968 -3712 -1920 256 1664 2048 2304 3328 5632 7936 8192 6912 6144 7168 8448 7680 5248 3456 3200 2944 1664 128 -128 384 -256 -2432 -4736 -5760 -5632 -6016 -6784 -7296 -7040 -6272 -5760 -5632 -5760 -5632 -5632 -5504 -5504 -5632 -5376 -4736 -4096 -3712 -3584 -3456 -2816 -1920 -1280 -768 -384 256 896 1280 1280 1024 1024 1280 1536 1792 1792 1664 1536 1408 1408 1280 1152 896 768 640 640 512 384 384 384 384 256 128 0 0 0 0 0 -128 -128 -128 -128 -128 -128 0 +7 +0 0 0 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -2 -2 -2 -3 -3 -3 -3 -4 -4 -4 -4 -4 -3 -3 -3 -4 -5 -5 -5 -4 -4 -3 1 5 8 10 10 10 9 9 9 11 10 5 0 -4 -5 -5 -8 -13 -19 -23 -25 -24 -21 -19 -19 -22 -26 -26 -24 -21 -23 -26 -29 -28 -26 -23 -21 -18 -15 -12 -10 -11 -14 -14 -5 6 9 2 -6 -1 11 18 16 15 24 34 43 54 77 103 112 105 101 109 121 118 107 109 125 137 130 113 103 101 94 76 55 40 30 18 1 -11 -14 -19 -36 -61 -74 -68 -60 -66 -87 -101 -100 -92 -87 -85 -76 -63 -55 -52 -52 -55 -64 -67 -48 -17 -10 -31 -53 -43 -17 -11 -33 -60 -58 -30 5 28 37 42 62 106 152 163 141 128 154 185 173 122 82 78 76 44 6 -1 12 -7 -73 -145 -181 -186 -203 -236 -263 -262 -244 -232 -234 -245 -252 -257 -262 -275 -289 -287 -266 -240 -226 -228 -223 -189 -135 -88 -58 -30 22 88 136 140 123 125 161 212 250 264 268 270 273 281 281 266 236 212 210 221 214 192 184 210 243 235 170 85 29 12 13 7 -10 -30 -48 -62 -79 -101 -129 -152 -156 -133 -106 -99 -115 -125 -103 -52 -3 22 27 36 57 81 92 92 96 119 158 190 196 176 155 149 147 129 91 58 51 69 77 54 15 -14 -23 -23 -31 -43 -56 -72 -97 -127 -144 -137 -122 -120 -135 -147 -145 -134 -129 -132 -136 -135 -131 -125 -114 -87 -50 -19 -11 -21 -31 -25 -3 21 31 26 15 12 20 37 50 50 38 28 31 43 52 47 36 27 22 14 7 10 18 18 -3 -31 -45 -37 -18 -7 -12 -27 -41 -43 -30 -7 11 15 8 -1 -4 -2 3 11 17 18 14 8 5 6 9 10 9 6 4 4 5 4 1 -2 -3 -1 -1 -4 -6 -6 -5 -4 -3 -2 -3 -3 -3 -3 -2 -2 -2 -1 0 0 0 -1 -1 -1 -1 -1 0 0 0 -1 0 0 0 0 0 +0 0 0 -64 -64 -64 -64 -64 -64 -64 -64 -64 -64 -128 -128 -128 -192 -192 -192 -192 -256 -256 -256 -256 -256 -192 -192 -192 -256 -320 -320 -320 -256 -256 -192 64 320 512 640 640 640 576 576 576 704 640 320 0 -256 -320 -320 -512 -832 -1216 -1472 -1600 -1536 -1344 -1216 -1216 -1408 -1664 -1664 -1536 -1344 -1472 -1664 -1856 -1792 -1664 -1472 -1344 -1152 -960 -768 -640 -704 -896 -896 -320 384 576 128 -384 -64 704 1152 1024 960 1536 2176 2752 3456 4928 6592 7168 6720 6464 6976 7744 7552 6848 6976 8000 8768 8320 7232 6592 6464 6016 4864 3520 2560 1920 1152 64 -704 -896 -1216 -2304 -3904 -4736 -4352 -3840 -4224 -5568 -6464 -6400 -5888 -5568 -5440 -4864 -4032 -3520 -3328 -3328 -3520 -4096 -4288 -3072 -1088 -640 -1984 -3392 -2752 -1088 -704 -2112 -3840 -3712 -1920 320 1792 2368 2688 3968 6784 9728 10432 9024 8192 9856 11840 11072 7808 5248 4992 4864 2816 384 -64 768 -448 -4672 -9280 -11584 -11904 -12992 -15104 -16832 -16768 -15616 -14848 -14976 -15680 -16128 -16448 -16768 -17600 -18496 -18368 -17024 -15360 -14464 -14592 -14272 -12096 -8640 -5632 -3712 -1920 1408 5632 8704 8960 7872 8000 10304 13568 16000 16896 17152 17280 17472 17984 17984 17024 15104 13568 13440 14144 13696 12288 11776 13440 15552 15040 10880 5440 1856 768 832 448 -640 -1920 -3072 -3968 -5056 -6464 -8256 -9728 -9984 -8512 -6784 -6336 -7360 -8000 -6592 -3328 -192 1408 1728 2304 3648 5184 5888 5888 6144 7616 10112 12160 12544 11264 9920 9536 9408 8256 5824 3712 3264 4416 4928 3456 960 -896 -1472 -1472 -1984 -2752 -3584 -4608 -6208 -8128 -9216 -8768 -7808 -7680 -8640 -9408 -9280 -8576 -8256 -8448 -8704 -8640 -8384 -8000 -7296 -5568 -3200 -1216 -704 -1344 -1984 -1600 -192 1344 1984 1664 960 768 1280 2368 3200 3200 2432 1792 1984 2752 3328 3008 2304 1728 1408 896 448 640 1152 1152 -192 -1984 -2880 -2368 -1152 -448 -768 -1728 -2624 -2752 -1920 -448 704 960 512 -64 -256 -128 192 704 1088 1152 896 512 320 384 576 640 576 384 256 256 320 256 64 -128 -192 -64 -64 -256 -384 -384 -320 -256 -192 -128 -192 -192 -192 -192 -128 -128 -128 -64 0 0 0 -64 -64 -64 -64 -64 0 0 0 -64 0 0 0 0 0 +6 +0 0 0 0 0 0 0 0 0 0 -1 0 -1 -1 -2 -3 -4 -4 -6 -7 -7 -8 -8 -8 -10 -11 -12 -13 -14 -16 -17 -17 -16 -16 -17 -17 -16 -12 -8 -6 -3 2 9 15 16 15 16 21 29 36 39 41 43 46 49 50 49 45 42 43 47 47 44 43 51 61 61 45 23 8 3 3 2 -3 -10 -16 -22 -29 -37 -49 -59 -62 -55 -45 -43 -51 -57 -48 -25 -2 11 13 18 30 45 52 53 57 72 98 121 127 117 106 104 105 95 68 44 40 55 63 46 13 -13 -20 -21 -29 -42 -56 -73 -100 -134 -156 -152 -138 -140 -160 -179 -181 -171 -167 -175 -185 -188 -186 -183 -170 -133 -78 -31 -17 -35 -54 -44 -5 40 60 50 30 24 43 81 110 114 89 68 76 110 134 125 98 77 63 43 23 31 60 61 -8 -108 -162 -135 -68 -25 -46 -111 -177 -194 -137 -31 58 79 43 -3 -23 -12 22 72 116 129 103 62 43 54 79 93 84 60 41 46 60 52 14 -24 -30 -13 -14 -52 -95 -112 -99 -73 -53 -49 -64 -87 -93 -74 -53 -50 -50 -27 12 27 7 -16 -15 -3 -3 -6 0 9 0 -11 6 49 85 89 83 87 90 76 57 60 80 83 58 31 23 28 29 31 45 55 41 6 -12 8 46 63 47 20 13 32 53 46 14 -17 -17 4 17 11 2 12 27 23 -1 -18 -6 21 35 22 -6 -34 -57 -73 -81 -75 -58 -43 -48 -71 -95 -92 -63 -32 -26 -44 -60 -58 -44 -41 -52 -60 -52 -34 -23 -21 -16 -2 13 14 1 -8 3 29 51 59 56 49 41 34 35 43 48 39 20 8 15 31 37 27 14 13 24 38 45 43 34 24 17 14 11 5 -3 -10 -16 -22 -30 -34 -31 -24 -21 -21 -21 -16 -9 -5 -5 -8 -8 -8 -7 -7 -8 -9 -10 -9 -8 -7 -8 -9 -8 -6 -3 -1 -1 -2 -1 0 2 2 1 0 0 0 1 0 0 0 0 0 0 0 0 0 0 0 0 +0 0 0 0 0 0 0 0 0 0 -128 0 -128 -128 -256 -384 -512 -512 -768 -896 -896 -1024 -1024 -1024 -1280 -1408 -1536 -1664 -1792 -2048 -2176 -2176 -2048 -2048 -2176 -2176 -2048 -1536 -1024 -768 -384 256 1152 1920 2048 1920 2048 2688 3712 4608 4992 5248 5504 5888 6272 6400 6272 5760 5376 5504 6016 6016 5632 5504 6528 7808 7808 5760 2944 1024 384 384 256 -384 -1280 -2048 -2816 -3712 -4736 -6272 -7552 -7936 -7040 -5760 -5504 -6528 -7296 -6144 -3200 -256 1408 1664 2304 3840 5760 6656 6784 7296 9216 12544 15488 16256 14976 13568 13312 13440 12160 8704 5632 5120 7040 8064 5888 1664 -1664 -2560 -2688 -3712 -5376 -7168 -9344 -12800 -17152 -19968 -19456 -17664 -17920 -20480 -22912 -23168 -21888 -21376 -22400 -23680 -24064 -23808 -23424 -21760 -17024 -9984 -3968 -2176 -4480 -6912 -5632 -640 5120 7680 6400 3840 3072 5504 10368 14080 14592 11392 8704 9728 14080 17152 16000 12544 9856 8064 5504 2944 3968 7680 7808 -1024 -13824 -20736 -17280 -8704 -3200 -5888 -14208 -22656 -24832 -17536 -3968 7424 10112 5504 -384 -2944 -1536 2816 9216 14848 16512 13184 7936 5504 6912 10112 11904 10752 7680 5248 5888 7680 6656 1792 -3072 -3840 -1664 -1792 -6656 -12160 -14336 -12672 -9344 -6784 -6272 -8192 -11136 -11904 -9472 -6784 -6400 -6400 -3456 1536 3456 896 -2048 -1920 -384 -384 -768 0 1152 0 -1408 768 6272 10880 11392 10624 11136 11520 9728 7296 7680 10240 10624 7424 3968 2944 3584 3712 3968 5760 7040 5248 768 -1536 1024 5888 8064 6016 2560 1664 4096 6784 5888 1792 -2176 -2176 512 2176 1408 256 1536 3456 2944 -128 -2304 -768 2688 4480 2816 -768 -4352 -7296 -9344 -10368 -9600 -7424 -5504 -6144 -9088 -12160 -11776 -8064 -4096 -3328 -5632 -7680 -7424 -5632 -5248 -6656 -7680 -6656 -4352 -2944 -2688 -2048 -256 1664 1792 128 -1024 384 3712 6528 7552 7168 6272 5248 4352 4480 5504 6144 4992 2560 1024 1920 3968 4736 3456 1792 1664 3072 4864 5760 5504 4352 3072 2176 1792 1408 640 -384 -1280 -2048 -2816 -3840 -4352 -3968 -3072 -2688 -2688 -2688 -2048 -1152 -640 -640 -1024 -1024 -1024 -896 -896 -1024 -1152 -1280 -1152 -1024 -896 -1024 -1152 -1024 -768 -384 -128 -128 -256 -128 0 256 256 128 0 0 0 128 0 0 0 0 0 0 0 0 0 0 0 0 +7 +0 0 0 0 0 0 0 0 0 -1 -1 -2 -2 -1 -1 -1 -2 -4 -5 -4 -1 1 2 1 -1 -1 -1 1 3 6 7 6 4 2 3 6 7 7 5 3 4 6 5 1 -3 -4 -2 -2 -8 -14 -17 -16 -12 -9 -9 -12 -17 -18 -15 -11 -11 -12 -6 2 6 1 -4 -4 -1 -1 -2 0 2 0 -4 2 17 30 32 31 33 36 31 24 26 35 37 27 14 11 14 15 16 24 30 23 3 -7 4 28 40 30 13 8 22 38 34 10 -14 -14 3 14 9 2 11 24 21 -1 -17 -6 21 36 23 -7 -38 -64 -85 -96 -92 -72 -55 -62 -95 -129 -129 -89 -46 -39 -67 -95 -92 -73 -69 -89 -105 -93 -63 -43 -40 -32 -4 28 30 3 -17 7 69 125 149 146 131 112 96 101 128 147 124 64 28 53 111 134 102 57 53 101 163 199 196 161 119 87 72 59 30 -16 -60 -97 -141 -196 -230 -219 -179 -157 -169 -175 -140 -81 -44 -51 -76 -88 -84 -80 -84 -102 -127 -143 -140 -126 -127 -157 -188 -184 -136 -68 -23 -18 -38 -35 19 99 128 73 0 0 72 120 74 4 27 145 241 220 127 85 136 221 261 245 214 201 206 209 201 179 150 120 97 81 63 36 3 -17 -13 7 24 25 17 3 -17 -44 -53 -29 7 8 -43 -111 -143 -133 -119 -137 -176 -204 -212 -214 -224 -236 -234 -214 -194 -182 -169 -144 -113 -97 -97 -89 -50 -1 17 -4 -23 -1 46 65 42 11 8 24 35 39 54 75 78 60 53 75 99 87 46 22 37 61 57 29 17 29 40 27 7 6 20 21 -1 -25 -24 -7 3 3 5 16 22 13 -3 -5 9 20 16 2 -7 -9 -10 -14 -11 -1 8 6 -2 -6 -1 6 8 9 11 13 11 5 2 3 2 -3 -8 -7 -2 -1 -5 -7 -6 -1 1 0 -1 1 3 4 3 2 1 1 0 0 0 0 0 0 0 0 0 0 -1 -1 0 0 0 0 0 -1 -1 0 +0 0 0 0 0 0 0 0 0 -64 -64 -128 -128 -64 -64 -64 -128 -256 -320 -256 -64 64 128 64 -64 -64 -64 64 192 384 448 384 256 128 192 384 448 448 320 192 256 384 320 64 -192 -256 -128 -128 -512 -896 -1088 -1024 -768 -576 -576 -768 -1088 -1152 -960 -704 -704 -768 -384 128 384 64 -256 -256 -64 -64 -128 0 128 0 -256 128 1088 1920 2048 1984 2112 2304 1984 1536 1664 2240 2368 1728 896 704 896 960 1024 1536 1920 1472 192 -448 256 1792 2560 1920 832 512 1408 2432 2176 640 -896 -896 192 896 576 128 704 1536 1344 -64 -1088 -384 1344 2304 1472 -448 -2432 -4096 -5440 -6144 -5888 -4608 -3520 -3968 -6080 -8256 -8256 -5696 -2944 -2496 -4288 -6080 -5888 -4672 -4416 -5696 -6720 -5952 -4032 -2752 -2560 -2048 -256 1792 1920 192 -1088 448 4416 8000 9536 9344 8384 7168 6144 6464 8192 9408 7936 4096 1792 3392 7104 8576 6528 3648 3392 6464 10432 12736 12544 10304 7616 5568 4608 3776 1920 -1024 -3840 -6208 -9024 -12544 -14720 -14016 -11456 -10048 -10816 -11200 -8960 -5184 -2816 -3264 -4864 -5632 -5376 -5120 -5376 -6528 -8128 -9152 -8960 -8064 -8128 -10048 -12032 -11776 -8704 -4352 -1472 -1152 -2432 -2240 1216 6336 8192 4672 0 0 4608 7680 4736 256 1728 9280 15424 14080 8128 5440 8704 14144 16704 15680 13696 12864 13184 13376 12864 11456 9600 7680 6208 5184 4032 2304 192 -1088 -832 448 1536 1600 1088 192 -1088 -2816 -3392 -1856 448 512 -2752 -7104 -9152 -8512 -7616 -8768 -11264 -13056 -13568 -13696 -14336 -15104 -14976 -13696 -12416 -11648 -10816 -9216 -7232 -6208 -6208 -5696 -3200 -64 1088 -256 -1472 -64 2944 4160 2688 704 512 1536 2240 2496 3456 4800 4992 3840 3392 4800 6336 5568 2944 1408 2368 3904 3648 1856 1088 1856 2560 1728 448 384 1280 1344 -64 -1600 -1536 -448 192 192 320 1024 1408 832 -192 -320 576 1280 1024 128 -448 -576 -640 -896 -704 -64 512 384 -128 -384 -64 384 512 576 704 832 704 320 128 192 128 -192 -512 -448 -128 -64 -320 -448 -384 -64 64 0 -64 64 192 256 192 128 64 64 0 0 0 0 0 0 0 0 0 0 -64 -64 0 0 0 0 0 -64 -64 0 +6 +0 0 0 0 0 0 0 0 0 0 0 1 1 0 0 1 2 3 4 3 3 2 2 2 1 -1 -3 -5 -8 -11 -14 -14 -12 -11 -13 -14 -12 -7 -4 -5 -8 -10 -10 -9 -10 -13 -17 -19 -20 -19 -19 -25 -31 -31 -24 -13 -5 -4 -8 -8 4 22 29 17 0 0 18 32 20 1 8 43 75 70 41 28 47 79 95 92 83 80 84 87 86 79 68 55 46 39 31 18 1 -9 -7 4 14 15 10 2 -11 -29 -35 -20 5 5 -32 -83 -110 -104 -96 -113 -148 -176 -187 -194 -207 -223 -226 -212 -197 -188 -179 -156 -126 -110 -112 -105 -61 -1 22 -5 -30 -1 64 93 62 17 13 38 56 65 92 130 138 110 100 144 194 175 95 47 80 136 129 69 41 73 101 71 19 17 57 62 -3 -74 -74 -23 13 10 17 57 84 50 -11 -21 38 91 74 11 -34 -43 -52 -70 -59 -1 52 42 -9 -35 -3 43 62 71 95 116 97 49 28 39 28 -33 -91 -80 -24 -8 -60 -109 -85 -16 18 0 -8 36 99 124 104 78 64 52 27 8 8 19 23 19 18 20 16 3 -9 -9 5 31 57 66 42 -11 -60 -71 -44 -8 -5 -38 -74 -76 -52 -42 -66 -99 -103 -77 -56 -64 -83 -89 -83 -91 -117 -130 -113 -83 -71 -77 -74 -45 -9 7 -5 -23 -20 10 43 59 55 52 70 98 105 87 73 84 101 86 44 20 42 77 76 40 14 18 26 13 -10 -11 5 9 -6 -18 -8 5 -3 -28 -40 -30 -17 -20 -37 -48 -41 -26 -16 -13 -9 0 10 10 -2 -14 -12 3 19 25 26 27 27 20 10 5 8 11 8 2 -2 2 13 28 39 35 16 -2 -4 8 15 6 -11 -17 -9 2 1 -9 -17 -17 -13 -11 -14 -17 -19 -17 -16 -15 -12 -9 -8 -10 -11 -10 -7 -7 -9 -10 -7 -3 -1 -2 -2 0 2 2 2 2 2 2 2 1 1 1 1 0 0 -1 0 0 0 0 0 0 0 0 +0 0 0 0 0 0 0 0 0 0 0 128 128 0 0 128 256 384 512 384 384 256 256 256 128 -128 -384 -640 -1024 -1408 -1792 -1792 -1536 -1408 -1664 -1792 -1536 -896 -512 -640 -1024 -1280 -1280 -1152 -1280 -1664 -2176 -2432 -2560 -2432 -2432 -3200 -3968 -3968 -3072 -1664 -640 -512 -1024 -1024 512 2816 3712 2176 0 0 2304 4096 2560 128 1024 5504 9600 8960 5248 3584 6016 10112 12160 11776 10624 10240 10752 11136 11008 10112 8704 7040 5888 4992 3968 2304 128 -1152 -896 512 1792 1920 1280 256 -1408 -3712 -4480 -2560 640 640 -4096 -10624 -14080 -13312 -12288 -14464 -18944 -22528 -23936 -24832 -26496 -28544 -28928 -27136 -25216 -24064 -22912 -19968 -16128 -14080 -14336 -13440 -7808 -128 2816 -640 -3840 -128 8192 11904 7936 2176 1664 4864 7168 8320 11776 16640 17664 14080 12800 18432 24832 22400 12160 6016 10240 17408 16512 8832 5248 9344 12928 9088 2432 2176 7296 7936 -384 -9472 -9472 -2944 1664 1280 2176 7296 10752 6400 -1408 -2688 4864 11648 9472 1408 -4352 -5504 -6656 -8960 -7552 -128 6656 5376 -1152 -4480 -384 5504 7936 9088 12160 14848 12416 6272 3584 4992 3584 -4224 -11648 -10240 -3072 -1024 -7680 -13952 -10880 -2048 2304 0 -1024 4608 12672 15872 13312 9984 8192 6656 3456 1024 1024 2432 2944 2432 2304 2560 2048 384 -1152 -1152 640 3968 7296 8448 5376 -1408 -7680 -9088 -5632 -1024 -640 -4864 -9472 -9728 -6656 -5376 -8448 -12672 -13184 -9856 -7168 -8192 -10624 -11392 -10624 -11648 -14976 -16640 -14464 -10624 -9088 -9856 -9472 -5760 -1152 896 -640 -2944 -2560 1280 5504 7552 7040 6656 8960 12544 13440 11136 9344 10752 12928 11008 5632 2560 5376 9856 9728 5120 1792 2304 3328 1664 -1280 -1408 640 1152 -768 -2304 -1024 640 -384 -3584 -5120 -3840 -2176 -2560 -4736 -6144 -5248 -3328 -2048 -1664 -1152 0 1280 1280 -256 -1792 -1536 384 2432 3200 3328 3456 3456 2560 1280 640 1024 1408 1024 256 -256 256 1664 3584 4992 4480 2048 -256 -512 1024 1920 768 -1408 -2176 -1152 256 128 -1152 -2176 -2176 -1664 -1408 -1792 -2176 -2432 -2176 -2048 -1920 -1536 -1152 -1024 -1280 -1408 -1280 -896 -896 -1152 -1280 -896 -384 -128 -256 -256 0 256 256 256 256 256 256 256 128 128 128 128 0 0 -128 0 0 0 0 0 0 0 0 +7 +0 0 0 0 -1 -1 -1 -1 0 0 0 0 0 0 -1 -1 0 1 1 0 -1 -2 -2 -3 -3 -1 2 1 -1 -2 -1 2 4 4 6 8 7 4 2 3 2 -4 -10 -9 -3 -1 -8 -15 -12 -3 2 0 -2 6 17 22 19 15 12 10 5 1 1 4 5 4 4 5 4 1 -3 -3 1 10 18 22 14 -4 -22 -27 -17 -3 -2 -16 -32 -34 -24 -20 -32 -49 -52 -40 -30 -35 -46 -51 -48 -54 -71 -81 -72 -54 -47 -53 -52 -33 -7 5 -4 -18 -16 8 37 51 49 47 65 92 102 86 74 87 107 93 48 22 49 91 92 49 18 23 35 18 -13 -15 7 14 -9 -27 -13 8 -5 -48 -70 -54 -31 -38 -72 -95 -84 -55 -34 -28 -21 1 25 25 -4 -35 -31 10 54 73 77 84 86 66 33 17 28 41 32 9 -5 8 56 125 179 166 79 -10 -19 46 86 36 -63 -105 -54 13 11 -60 -125 -132 -102 -90 -118 -158 -176 -172 -170 -163 -137 -101 -94 -128 -160 -146 -110 -115 -163 -187 -136 -56 -24 -43 -43 10 72 96 93 109 147 161 137 115 131 161 153 89 13 -16 3 35 46 40 41 52 53 40 30 45 72 83 74 64 57 34 -15 -60 -64 -41 -36 -71 -106 -108 -78 -56 -56 -62 -50 -24 2 17 27 40 54 64 66 56 39 21 3 -5 0 15 22 11 -10 -23 -25 -24 -18 -4 14 12 -11 -27 -6 34 47 14 -25 -28 1 22 15 2 5 19 19 -1 -21 -20 -4 9 13 13 19 25 28 25 22 20 17 11 7 6 7 6 5 6 8 11 14 17 19 13 0 -11 -11 -2 6 7 -1 -10 -15 -14 -12 -12 -16 -19 -18 -12 -8 -8 -9 -8 -5 -2 0 1 0 -2 -4 -4 -2 -1 -3 -5 -6 -5 -4 -1 2 4 4 2 2 3 3 3 2 3 3 2 0 0 0 0 -1 -2 -2 -1 -1 -2 -2 -2 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 0 +0 0 0 0 -128 -128 -128 -128 0 0 0 0 0 0 -128 -128 0 128 128 0 -128 -256 -256 -384 -384 -128 256 128 -128 -256 -128 256 512 512 768 1024 896 512 256 384 256 -512 -1280 -1152 -384 -128 -1024 -1920 -1536 -384 256 0 -256 768 2176 2816 2432 1920 1536 1280 640 128 128 512 640 512 512 640 512 128 -384 -384 128 1280 2304 2816 1792 -512 -2816 -3456 -2176 -384 -256 -2048 -4096 -4352 -3072 -2560 -4096 -6272 -6656 -5120 -3840 -4480 -5888 -6528 -6144 -6912 -9088 -10368 -9216 -6912 -6016 -6784 -6656 -4224 -896 640 -512 -2304 -2048 1024 4736 6528 6272 6016 8320 11776 13056 11008 9472 11136 13696 11904 6144 2816 6272 11648 11776 6272 2304 2944 4480 2304 -1664 -1920 896 1792 -1152 -3456 -1664 1024 -640 -6144 -8960 -6912 -3968 -4864 -9216 -12160 -10752 -7040 -4352 -3584 -2688 128 3200 3200 -512 -4480 -3968 1280 6912 9344 9856 10752 11008 8448 4224 2176 3584 5248 4096 1152 -640 1024 7168 16000 22912 21248 10112 -1280 -2432 5888 11008 4608 -8064 -13440 -6912 1664 1408 -7680 -16000 -16896 -13056 -11520 -15104 -20224 -22528 -22016 -21760 -20864 -17536 -12928 -12032 -16384 -20480 -18688 -14080 -14720 -20864 -23936 -17408 -7168 -3072 -5504 -5504 1280 9216 12288 11904 13952 18816 20608 17536 14720 16768 20608 19584 11392 1664 -2048 384 4480 5888 5120 5248 6656 6784 5120 3840 5760 9216 10624 9472 8192 7296 4352 -1920 -7680 -8192 -5248 -4608 -9088 -13568 -13824 -9984 -7168 -7168 -7936 -6400 -3072 256 2176 3456 5120 6912 8192 8448 7168 4992 2688 384 -640 0 1920 2816 1408 -1280 -2944 -3200 -3072 -2304 -512 1792 1536 -1408 -3456 -768 4352 6016 1792 -3200 -3584 128 2816 1920 256 640 2432 2432 -128 -2688 -2560 -512 1152 1664 1664 2432 3200 3584 3200 2816 2560 2176 1408 896 768 896 768 640 768 1024 1408 1792 2176 2432 1664 0 -1408 -1408 -256 768 896 -128 -1280 -1920 -1792 -1536 -1536 -2048 -2432 -2304 -1536 -1024 -1024 -1152 -1024 -640 -256 0 128 0 -256 -512 -512 -256 -128 -384 -640 -768 -640 -512 -128 256 512 512 256 256 384 384 384 256 384 384 256 0 0 0 0 -128 -256 -256 -128 -128 -256 -256 -256 -128 -128 -128 -128 -128 -128 -128 -128 -128 -128 -128 0 +7 +0 0 0 0 0 0 0 0 0 0 0 0 0 0 -1 0 0 2 3 3 2 -1 -1 1 3 1 -3 -5 -3 0 0 -4 -8 -9 -8 -7 -10 -14 -16 -17 -17 -17 -15 -12 -12 -16 -21 -20 -16 -17 -25 -30 -23 -10 -5 -8 -8 2 14 20 20 24 33 38 33 29 34 43 42 25 3 -5 1 11 15 13 14 18 19 15 11 18 29 34 31 28 26 16 -7 -30 -32 -21 -19 -38 -59 -61 -45 -33 -34 -39 -32 -16 1 12 19 28 39 48 50 44 32 17 2 -5 0 14 20 10 -10 -23 -26 -25 -19 -4 15 14 -13 -32 -8 42 60 19 -33 -38 2 32 23 3 8 30 31 -2 -35 -35 -7 17 25 26 37 51 58 54 49 46 39 27 17 16 19 18 14 16 23 33 43 53 59 43 1 -35 -38 -7 24 28 -2 -41 -62 -61 -50 -55 -76 -94 -91 -63 -44 -44 -52 -48 -30 -12 1 9 4 -13 -30 -28 -13 -7 -26 -51 -62 -56 -36 -3 35 60 61 45 41 53 62 60 63 81 87 61 22 3 11 11 -17 -49 -58 -50 -59 -98 -131 -120 -68 -20 -8 -32 -58 -60 -42 -29 -38 -54 -52 -30 -6 2 4 18 45 59 52 45 62 90 104 94 80 75 72 57 41 44 61 64 35 -7 -32 -34 -38 -57 -76 -78 -61 -49 -61 -79 -73 -43 -20 -29 -60 -77 -65 -45 -50 -73 -81 -55 -12 12 8 -4 -4 12 27 27 21 23 41 57 49 19 -4 4 28 34 9 -21 -27 -11 5 3 -9 -14 -8 -3 -5 -13 -14 -5 8 10 -2 -18 -20 -8 5 9 7 13 25 32 26 14 10 17 26 28 23 20 23 29 31 26 18 12 9 5 1 1 4 7 5 -1 -5 -5 -5 -8 -10 -8 -5 -5 -6 -8 -7 -4 -4 -4 -4 -1 3 5 6 5 4 3 3 3 3 4 3 2 1 0 0 0 0 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 0 +0 0 0 0 0 0 0 0 0 0 0 0 0 0 -128 0 0 256 384 384 256 -128 -128 128 384 128 -384 -640 -384 0 0 -512 -1024 -1152 -1024 -896 -1280 -1792 -2048 -2176 -2176 -2176 -1920 -1536 -1536 -2048 -2688 -2560 -2048 -2176 -3200 -3840 -2944 -1280 -640 -1024 -1024 256 1792 2560 2560 3072 4224 4864 4224 3712 4352 5504 5376 3200 384 -640 128 1408 1920 1664 1792 2304 2432 1920 1408 2304 3712 4352 3968 3584 3328 2048 -896 -3840 -4096 -2688 -2432 -4864 -7552 -7808 -5760 -4224 -4352 -4992 -4096 -2048 128 1536 2432 3584 4992 6144 6400 5632 4096 2176 256 -640 0 1792 2560 1280 -1280 -2944 -3328 -3200 -2432 -512 1920 1792 -1664 -4096 -1024 5376 7680 2432 -4224 -4864 256 4096 2944 384 1024 3840 3968 -256 -4480 -4480 -896 2176 3200 3328 4736 6528 7424 6912 6272 5888 4992 3456 2176 2048 2432 2304 1792 2048 2944 4224 5504 6784 7552 5504 128 -4480 -4864 -896 3072 3584 -256 -5248 -7936 -7808 -6400 -7040 -9728 -12032 -11648 -8064 -5632 -5632 -6656 -6144 -3840 -1536 128 1152 512 -1664 -3840 -3584 -1664 -896 -3328 -6528 -7936 -7168 -4608 -384 4480 7680 7808 5760 5248 6784 7936 7680 8064 10368 11136 7808 2816 384 1408 1408 -2176 -6272 -7424 -6400 -7552 -12544 -16768 -15360 -8704 -2560 -1024 -4096 -7424 -7680 -5376 -3712 -4864 -6912 -6656 -3840 -768 256 512 2304 5760 7552 6656 5760 7936 11520 13312 12032 10240 9600 9216 7296 5248 5632 7808 8192 4480 -896 -4096 -4352 -4864 -7296 -9728 -9984 -7808 -6272 -7808 -10112 -9344 -5504 -2560 -3712 -7680 -9856 -8320 -5760 -6400 -9344 -10368 -7040 -1536 1536 1024 -512 -512 1536 3456 3456 2688 2944 5248 7296 6272 2432 -512 512 3584 4352 1152 -2688 -3456 -1408 640 384 -1152 -1792 -1024 -384 -640 -1664 -1792 -640 1024 1280 -256 -2304 -2560 -1024 640 1152 896 1664 3200 4096 3328 1792 1280 2176 3328 3584 2944 2560 2944 3712 3968 3328 2304 1536 1152 640 128 128 512 896 640 -128 -640 -640 -640 -1024 -1280 -1024 -640 -640 -768 -1024 -896 -512 -512 -512 -512 -128 384 640 768 640 512 384 384 384 384 512 384 256 128 0 0 0 0 -128 -128 -128 -128 -128 -128 -128 -128 -128 -128 -128 -128 -128 -128 -128 -128 0 +7 +0 0 0 0 0 0 0 0 0 -1 -1 -1 0 0 -1 -1 -2 -2 -2 -2 -2 -3 -3 -3 -2 -2 -3 -3 -2 -1 0 0 0 -1 -3 -3 -2 -1 -3 -5 -7 -6 -4 -1 4 7 7 6 5 7 9 9 10 13 15 11 4 0 2 2 -4 -11 -14 -12 -15 -25 -34 -32 -19 -6 -3 -10 -18 -20 -14 -10 -14 -19 -20 -12 -3 1 1 7 19 26 23 21 29 44 52 48 42 40 39 32 24 26 37 40 22 -5 -21 -24 -27 -41 -56 -59 -47 -39 -49 -65 -61 -37 -18 -27 -56 -73 -63 -45 -51 -76 -86 -60 -14 13 9 -5 -5 15 35 36 28 31 57 81 72 29 -5 6 45 56 15 -36 -48 -19 10 6 -17 -27 -15 -5 -11 -28 -31 -10 20 25 -5 -45 -51 -21 16 26 23 39 80 103 88 49 35 64 100 108 91 81 97 128 140 121 87 64 48 29 9 8 29 47 33 -3 -28 -31 -34 -55 -74 -67 -44 -39 -58 -75 -66 -44 -38 -45 -42 -7 45 86 100 91 76 68 70 82 97 112 115 92 54 27 18 18 5 -25 -50 -50 -34 -19 -22 -45 -70 -85 -86 -81 -83 -90 -93 -89 -88 -97 -111 -114 -111 -110 -103 -80 -49 -42 -66 -96 -99 -76 -57 -60 -73 -79 -77 -79 -86 -88 -78 -57 -32 -11 -4 -7 -4 14 41 59 65 63 58 61 72 95 114 105 67 31 32 63 85 75 50 39 42 38 21 11 17 30 32 27 29 39 36 16 -3 0 16 24 15 0 -5 -4 -6 -12 -12 -8 -10 -21 -29 -26 -14 -7 -6 -5 4 15 19 19 19 21 20 18 21 27 24 8 -11 -17 -7 4 4 -4 -8 -4 1 -4 -15 -21 -16 -5 3 3 0 -3 -4 -5 -5 -7 -8 -10 -13 -15 -15 -14 -14 -15 -16 -17 -15 -12 -10 -8 -6 -4 -3 -3 -4 -3 -1 1 1 1 0 1 2 2 2 1 1 1 1 0 0 -1 0 0 0 -1 -1 0 0 0 0 0 0 0 +0 0 0 0 0 0 0 0 0 -128 -128 -128 0 0 -128 -128 -256 -256 -256 -256 -256 -384 -384 -384 -256 -256 -384 -384 -256 -128 0 0 0 -128 -384 -384 -256 -128 -384 -640 -896 -768 -512 -128 512 896 896 768 640 896 1152 1152 1280 1664 1920 1408 512 0 256 256 -512 -1408 -1792 -1536 -1920 -3200 -4352 -4096 -2432 -768 -384 -1280 -2304 -2560 -1792 -1280 -1792 -2432 -2560 -1536 -384 128 128 896 2432 3328 2944 2688 3712 5632 6656 6144 5376 5120 4992 4096 3072 3328 4736 5120 2816 -640 -2688 -3072 -3456 -5248 -7168 -7552 -6016 -4992 -6272 -8320 -7808 -4736 -2304 -3456 -7168 -9344 -8064 -5760 -6528 -9728 -11008 -7680 -1792 1664 1152 -640 -640 1920 4480 4608 3584 3968 7296 10368 9216 3712 -640 768 5760 7168 1920 -4608 -6144 -2432 1280 768 -2176 -3456 -1920 -640 -1408 -3584 -3968 -1280 2560 3200 -640 -5760 -6528 -2688 2048 3328 2944 4992 10240 13184 11264 6272 4480 8192 12800 13824 11648 10368 12416 16384 17920 15488 11136 8192 6144 3712 1152 1024 3712 6016 4224 -384 -3584 -3968 -4352 -7040 -9472 -8576 -5632 -4992 -7424 -9600 -8448 -5632 -4864 -5760 -5376 -896 5760 11008 12800 11648 9728 8704 8960 10496 12416 14336 14720 11776 6912 3456 2304 2304 640 -3200 -6400 -6400 -4352 -2432 -2816 -5760 -8960 -10880 -11008 -10368 -10624 -11520 -11904 -11392 -11264 -12416 -14208 -14592 -14208 -14080 -13184 -10240 -6272 -5376 -8448 -12288 -12672 -9728 -7296 -7680 -9344 -10112 -9856 -10112 -11008 -11264 -9984 -7296 -4096 -1408 -512 -896 -512 1792 5248 7552 8320 8064 7424 7808 9216 12160 14592 13440 8576 3968 4096 8064 10880 9600 6400 4992 5376 4864 2688 1408 2176 3840 4096 3456 3712 4992 4608 2048 -384 0 2048 3072 1920 0 -640 -512 -768 -1536 -1536 -1024 -1280 -2688 -3712 -3328 -1792 -896 -768 -640 512 1920 2432 2432 2432 2688 2560 2304 2688 3456 3072 1024 -1408 -2176 -896 512 512 -512 -1024 -512 128 -512 -1920 -2688 -2048 -640 384 384 0 -384 -512 -640 -640 -896 -1024 -1280 -1664 -1920 -1920 -1792 -1792 -1920 -2048 -2176 -1920 -1536 -1280 -1024 -768 -512 -384 -384 -512 -384 -128 128 128 128 0 128 256 256 256 128 128 128 128 0 0 -128 0 0 0 -128 -128 0 0 0 0 0 0 0 +7 +0 -1 0 0 0 0 0 0 0 0 0 0 0 1 1 1 1 2 3 2 2 1 1 1 0 0 1 2 1 -1 -2 -2 -3 -4 -6 -6 -4 -4 -6 -7 -7 -5 -5 -6 -5 -1 5 11 13 13 11 10 11 13 17 20 21 17 11 5 3 4 1 -6 -13 -13 -9 -5 -7 -13 -21 -26 -27 -26 -28 -31 -33 -32 -33 -37 -43 -46 -46 -46 -45 -36 -23 -20 -32 -47 -50 -39 -30 -32 -41 -45 -45 -47 -53 -55 -50 -37 -21 -8 -3 -5 -3 10 31 47 52 51 49 53 64 86 105 99 64 31 32 65 90 81 55 45 49 46 26 14 22 39 43 36 41 55 53 24 -4 0 26 40 25 0 -8 -6 -11 -21 -23 -16 -19 -43 -63 -56 -31 -15 -14 -11 11 38 51 52 55 61 60 55 66 87 81 30 -37 -60 -25 17 17 -15 -31 -14 4 -17 -69 -100 -78 -23 18 22 0 -18 -24 -28 -34 -47 -60 -78 -100 -120 -128 -128 -134 -150 -171 -181 -170 -147 -122 -102 -81 -56 -40 -44 -60 -56 -18 30 49 33 24 52 97 111 87 65 70 86 77 35 0 -7 7 17 9 -8 -11 13 53 86 95 88 78 70 59 53 69 102 122 100 42 -16 -46 -41 -9 24 31 3 -39 -55 -37 -7 13 24 34 38 30 24 39 58 47 -1 -54 -70 -56 -45 -49 -52 -43 -31 -30 -34 -23 4 24 23 15 20 33 36 25 14 14 11 -8 -33 -41 -31 -23 -30 -41 -39 -30 -27 -30 -28 -21 -18 -25 -30 -25 -17 -17 -16 -6 12 20 12 5 15 37 53 51 40 33 35 39 43 46 49 44 29 9 0 4 12 11 3 -4 -2 3 4 0 -6 -10 -10 -7 -4 -4 -8 -11 -7 0 2 -3 -8 -9 -7 -8 -9 -9 -5 -1 0 1 2 3 3 0 -1 0 2 3 4 5 5 5 5 4 3 2 0 -1 -1 -1 -2 -2 -2 -2 -2 -2 -2 -2 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 0 +0 -128 0 0 0 0 0 0 0 0 0 0 0 128 128 128 128 256 384 256 256 128 128 128 0 0 128 256 128 -128 -256 -256 -384 -512 -768 -768 -512 -512 -768 -896 -896 -640 -640 -768 -640 -128 640 1408 1664 1664 1408 1280 1408 1664 2176 2560 2688 2176 1408 640 384 512 128 -768 -1664 -1664 -1152 -640 -896 -1664 -2688 -3328 -3456 -3328 -3584 -3968 -4224 -4096 -4224 -4736 -5504 -5888 -5888 -5888 -5760 -4608 -2944 -2560 -4096 -6016 -6400 -4992 -3840 -4096 -5248 -5760 -5760 -6016 -6784 -7040 -6400 -4736 -2688 -1024 -384 -640 -384 1280 3968 6016 6656 6528 6272 6784 8192 11008 13440 12672 8192 3968 4096 8320 11520 10368 7040 5760 6272 5888 3328 1792 2816 4992 5504 4608 5248 7040 6784 3072 -512 0 3328 5120 3200 0 -1024 -768 -1408 -2688 -2944 -2048 -2432 -5504 -8064 -7168 -3968 -1920 -1792 -1408 1408 4864 6528 6656 7040 7808 7680 7040 8448 11136 10368 3840 -4736 -7680 -3200 2176 2176 -1920 -3968 -1792 512 -2176 -8832 -12800 -9984 -2944 2304 2816 0 -2304 -3072 -3584 -4352 -6016 -7680 -9984 -12800 -15360 -16384 -16384 -17152 -19200 -21888 -23168 -21760 -18816 -15616 -13056 -10368 -7168 -5120 -5632 -7680 -7168 -2304 3840 6272 4224 3072 6656 12416 14208 11136 8320 8960 11008 9856 4480 0 -896 896 2176 1152 -1024 -1408 1664 6784 11008 12160 11264 9984 8960 7552 6784 8832 13056 15616 12800 5376 -2048 -5888 -5248 -1152 3072 3968 384 -4992 -7040 -4736 -896 1664 3072 4352 4864 3840 3072 4992 7424 6016 -128 -6912 -8960 -7168 -5760 -6272 -6656 -5504 -3968 -3840 -4352 -2944 512 3072 2944 1920 2560 4224 4608 3200 1792 1792 1408 -1024 -4224 -5248 -3968 -2944 -3840 -5248 -4992 -3840 -3456 -3840 -3584 -2688 -2304 -3200 -3840 -3200 -2176 -2176 -2048 -768 1536 2560 1536 640 1920 4736 6784 6528 5120 4224 4480 4992 5504 5888 6272 5632 3712 1152 0 512 1536 1408 384 -512 -256 384 512 0 -768 -1280 -1280 -896 -512 -512 -1024 -1408 -896 0 256 -384 -1024 -1152 -896 -1024 -1152 -1152 -640 -128 0 128 256 384 384 0 -128 0 256 384 512 640 640 640 640 512 384 256 0 -128 -128 -128 -256 -256 -256 -256 -256 -256 -256 -256 -128 -128 -128 -128 -128 -128 -128 -128 -128 -128 0 +7 +0 0 0 0 0 0 0 0 0 0 -1 -1 -1 0 0 -1 -1 -1 0 -1 -2 -3 -3 -1 0 0 0 -1 -2 -2 -2 -3 -4 -6 -8 -10 -11 -11 -12 -14 -17 -19 -19 -17 -15 -13 -11 -8 -6 -7 -9 -9 -3 5 8 6 4 10 19 23 18 14 16 20 18 8 0 -2 2 5 2 -3 -4 4 17 29 33 31 28 26 23 21 28 43 52 44 19 -8 -22 -20 -5 12 16 1 -22 -31 -22 -4 8 15 22 25 20 16 27 41 34 -1 -41 -55 -45 -37 -41 -45 -38 -28 -28 -32 -22 4 24 23 16 21 37 41 29 17 17 14 -10 -43 -55 -42 -32 -43 -59 -59 -46 -42 -48 -45 -35 -31 -43 -53 -46 -32 -33 -31 -11 26 43 27 13 36 90 130 128 104 89 95 110 124 138 149 138 93 31 0 17 44 43 12 -15 -8 15 21 1 -28 -45 -47 -34 -18 -21 -46 -63 -43 0 14 -15 -54 -64 -54 -58 -75 -73 -39 -3 10 14 30 46 37 9 -8 6 36 61 79 96 112 120 125 123 104 60 12 -21 -32 -36 -49 -66 -86 -114 -146 -167 -158 -122 -88 -75 -81 -89 -88 -86 -94 -105 -104 -76 -38 -15 -22 -50 -70 -68 -52 -37 -30 -28 -22 -5 27 62 95 115 125 131 136 143 152 152 131 93 53 31 26 29 26 15 -3 -18 -25 -18 -12 -24 -51 -69 -64 -46 -37 -42 -42 -31 -19 -17 -22 -25 -23 -19 -15 -10 -5 -5 -8 -8 -3 2 3 1 -4 -8 -8 2 21 34 32 20 14 18 20 11 0 -1 5 7 0 -8 -11 -14 -21 -25 -19 -10 -8 -16 -20 -13 -7 -15 -34 -46 -43 -31 -23 -20 -16 -10 -8 -12 -17 -16 -7 4 11 13 12 10 12 16 18 17 14 13 15 17 17 16 14 12 10 10 10 10 11 12 13 13 12 9 6 5 4 3 1 -1 -2 -3 -3 -3 -3 -3 -4 -5 -5 -4 -4 -3 -3 -3 -2 -2 -1 -1 -1 -1 -1 -1 -1 -1 0 +0 0 0 0 0 0 0 0 0 0 -128 -128 -128 0 0 -128 -128 -128 0 -128 -256 -384 -384 -128 0 0 0 -128 -256 -256 -256 -384 -512 -768 -1024 -1280 -1408 -1408 -1536 -1792 -2176 -2432 -2432 -2176 -1920 -1664 -1408 -1024 -768 -896 -1152 -1152 -384 640 1024 768 512 1280 2432 2944 2304 1792 2048 2560 2304 1024 0 -256 256 640 256 -384 -512 512 2176 3712 4224 3968 3584 3328 2944 2688 3584 5504 6656 5632 2432 -1024 -2816 -2560 -640 1536 2048 128 -2816 -3968 -2816 -512 1024 1920 2816 3200 2560 2048 3456 5248 4352 -128 -5248 -7040 -5760 -4736 -5248 -5760 -4864 -3584 -3584 -4096 -2816 512 3072 2944 2048 2688 4736 5248 3712 2176 2176 1792 -1280 -5504 -7040 -5376 -4096 -5504 -7552 -7552 -5888 -5376 -6144 -5760 -4480 -3968 -5504 -6784 -5888 -4096 -4224 -3968 -1408 3328 5504 3456 1664 4608 11520 16640 16384 13312 11392 12160 14080 15872 17664 19072 17664 11904 3968 0 2176 5632 5504 1536 -1920 -1024 1920 2688 128 -3584 -5760 -6016 -4352 -2304 -2688 -5888 -8064 -5504 0 1792 -1920 -6912 -8192 -6912 -7424 -9600 -9344 -4992 -384 1280 1792 3840 5888 4736 1152 -1024 768 4608 7808 10112 12288 14336 15360 16000 15744 13312 7680 1536 -2688 -4096 -4608 -6272 -8448 -11008 -14592 -18688 -21376 -20224 -15616 -11264 -9600 -10368 -11392 -11264 -11008 -12032 -13440 -13312 -9728 -4864 -1920 -2816 -6400 -8960 -8704 -6656 -4736 -3840 -3584 -2816 -640 3456 7936 12160 14720 16000 16768 17408 18304 19456 19456 16768 11904 6784 3968 3328 3712 3328 1920 -384 -2304 -3200 -2304 -1536 -3072 -6528 -8832 -8192 -5888 -4736 -5376 -5376 -3968 -2432 -2176 -2816 -3200 -2944 -2432 -1920 -1280 -640 -640 -1024 -1024 -384 256 384 128 -512 -1024 -1024 256 2688 4352 4096 2560 1792 2304 2560 1408 0 -128 640 896 0 -1024 -1408 -1792 -2688 -3200 -2432 -1280 -1024 -2048 -2560 -1664 -896 -1920 -4352 -5888 -5504 -3968 -2944 -2560 -2048 -1280 -1024 -1536 -2176 -2048 -896 512 1408 1664 1536 1280 1536 2048 2304 2176 1792 1664 1920 2176 2176 2048 1792 1536 1280 1280 1280 1280 1408 1536 1664 1664 1536 1152 768 640 512 384 128 -128 -256 -384 -384 -384 -384 -384 -512 -640 -640 -512 -512 -384 -384 -384 -256 -256 -128 -128 -128 -128 -128 -128 -128 -128 0 +7 +0 0 0 0 0 0 0 0 0 0 0 0 0 0 -1 -1 0 0 0 -1 -2 -2 -2 -1 -1 -2 -3 -2 0 0 -1 -4 -5 -4 -5 -6 -6 -4 -1 0 1 3 5 4 1 -1 0 4 8 11 14 17 19 21 21 18 11 2 -5 -7 -8 -11 -16 -21 -28 -37 -44 -43 -34 -25 -22 -25 -28 -28 -29 -32 -37 -37 -28 -15 -6 -9 -21 -30 -29 -23 -17 -14 -14 -11 -3 13 32 51 63 71 76 80 87 94 97 85 62 36 21 19 21 19 11 -2 -14 -21 -15 -10 -21 -46 -63 -60 -44 -37 -42 -43 -33 -20 -18 -25 -29 -27 -23 -19 -13 -6 -6 -10 -10 -4 3 5 1 -6 -13 -13 4 36 61 58 38 28 36 40 23 1 -1 11 16 0 -18 -27 -36 -54 -65 -51 -26 -24 -47 -59 -41 -22 -48 -114 -162 -155 -115 -86 -76 -63 -42 -34 -54 -79 -75 -35 23 63 76 69 62 76 104 123 118 102 100 118 140 149 143 131 117 110 109 115 127 141 163 187 201 188 153 115 95 85 70 38 -7 -46 -70 -83 -90 -102 -126 -166 -211 -240 -241 -228 -231 -255 -270 -250 -214 -204 -229 -253 -239 -201 -184 -195 -202 -176 -133 -102 -83 -54 -2 59 116 162 197 219 233 252 282 310 323 329 341 356 352 321 280 247 213 168 119 89 81 71 42 0 -31 -48 -67 -97 -129 -146 -150 -157 -172 -189 -195 -192 -191 -201 -217 -223 -213 -189 -166 -149 -139 -125 -99 -62 -21 16 44 64 75 80 86 103 125 144 153 158 169 185 194 187 169 146 127 115 111 107 95 72 45 23 9 -4 -21 -42 -60 -72 -78 -78 -80 -87 -98 -105 -104 -95 -88 -87 -89 -85 -78 -73 -73 -72 -63 -50 -40 -37 -36 -28 -14 -2 0 -4 -5 2 11 14 12 10 13 17 19 19 18 18 19 18 17 16 16 17 16 15 13 11 10 9 7 5 4 4 3 2 1 1 1 0 0 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 0 +0 0 0 0 0 0 0 0 0 0 0 0 0 0 -64 -64 0 0 0 -64 -128 -128 -128 -64 -64 -128 -192 -128 0 0 -64 -256 -320 -256 -320 -384 -384 -256 -64 0 64 192 320 256 64 -64 0 256 512 704 896 1088 1216 1344 1344 1152 704 128 -320 -448 -512 -704 -1024 -1344 -1792 -2368 -2816 -2752 -2176 -1600 -1408 -1600 -1792 -1792 -1856 -2048 -2368 -2368 -1792 -960 -384 -576 -1344 -1920 -1856 -1472 -1088 -896 -896 -704 -192 832 2048 3264 4032 4544 4864 5120 5568 6016 6208 5440 3968 2304 1344 1216 1344 1216 704 -128 -896 -1344 -960 -640 -1344 -2944 -4032 -3840 -2816 -2368 -2688 -2752 -2112 -1280 -1152 -1600 -1856 -1728 -1472 -1216 -832 -384 -384 -640 -640 -256 192 320 64 -384 -832 -832 256 2304 3904 3712 2432 1792 2304 2560 1472 64 -64 704 1024 0 -1152 -1728 -2304 -3456 -4160 -3264 -1664 -1536 -3008 -3776 -2624 -1408 -3072 -7296 -10368 -9920 -7360 -5504 -4864 -4032 -2688 -2176 -3456 -5056 -4800 -2240 1472 4032 4864 4416 3968 4864 6656 7872 7552 6528 6400 7552 8960 9536 9152 8384 7488 7040 6976 7360 8128 9024 10432 11968 12864 12032 9792 7360 6080 5440 4480 2432 -448 -2944 -4480 -5312 -5760 -6528 -8064 -10624 -13504 -15360 -15424 -14592 -14784 -16320 -17280 -16000 -13696 -13056 -14656 -16192 -15296 -12864 -11776 -12480 -12928 -11264 -8512 -6528 -5312 -3456 -128 3776 7424 10368 12608 14016 14912 16128 18048 19840 20672 21056 21824 22784 22528 20544 17920 15808 13632 10752 7616 5696 5184 4544 2688 0 -1984 -3072 -4288 -6208 -8256 -9344 -9600 -10048 -11008 -12096 -12480 -12288 -12224 -12864 -13888 -14272 -13632 -12096 -10624 -9536 -8896 -8000 -6336 -3968 -1344 1024 2816 4096 4800 5120 5504 6592 8000 9216 9792 10112 10816 11840 12416 11968 10816 9344 8128 7360 7104 6848 6080 4608 2880 1472 576 -256 -1344 -2688 -3840 -4608 -4992 -4992 -5120 -5568 -6272 -6720 -6656 -6080 -5632 -5568 -5696 -5440 -4992 -4672 -4672 -4608 -4032 -3200 -2560 -2368 -2304 -1792 -896 -128 0 -256 -320 128 704 896 768 640 832 1088 1216 1216 1152 1152 1216 1152 1088 1024 1024 1088 1024 960 832 704 640 576 448 320 256 256 192 128 64 64 64 0 0 -64 -64 -64 -64 -64 -64 -64 -64 -64 -64 -64 -64 0 +6 +0 -1 -1 -1 -1 -1 -1 -1 -1 -1 -2 -2 -2 -1 -2 -1 -1 -1 -2 -2 -2 -1 0 2 2 2 2 3 5 6 6 6 6 8 10 11 11 11 10 10 10 11 13 15 19 22 25 25 21 16 14 13 11 6 -2 -9 -14 -16 -18 -22 -27 -37 -49 -57 -59 -58 -60 -69 -75 -71 -63 -62 -71 -81 -79 -68 -64 -70 -74 -67 -52 -41 -34 -23 -1 26 52 75 93 107 116 129 148 167 178 186 197 211 214 199 178 161 142 115 83 64 59 53 32 0 -25 -39 -57 -84 -114 -132 -139 -149 -166 -187 -197 -199 -202 -218 -241 -253 -247 -225 -201 -186 -177 -163 -132 -84 -29 22 65 97 116 125 139 170 212 249 271 287 315 352 378 375 346 306 274 255 251 249 228 177 113 60 25 -10 -58 -119 -175 -219 -242 -250 -263 -295 -343 -381 -385 -363 -348 -356 -373 -369 -348 -340 -350 -355 -323 -262 -216 -209 -209 -168 -84 -11 1 -28 -34 17 90 120 107 96 126 181 214 217 217 231 253 263 261 263 280 303 311 302 287 275 266 246 210 172 154 154 149 123 87 66 66 62 33 -8 -38 -53 -77 -120 -162 -181 -178 -169 -167 -175 -183 -189 -189 -180 -163 -145 -129 -111 -88 -59 -32 -12 -1 1 -7 -19 -15 13 53 80 84 80 85 98 94 61 25 23 63 107 118 90 55 30 14 -3 -16 -18 -22 -42 -78 -107 -112 -95 -76 -71 -87 -110 -127 -129 -120 -107 -89 -65 -39 -23 -20 -19 -16 -13 -14 -9 13 42 57 50 36 30 31 29 27 31 43 53 55 51 50 52 54 56 58 56 47 32 19 14 14 15 13 9 6 5 4 -1 -9 -15 -18 -19 -21 -24 -26 -27 -29 -31 -31 -28 -25 -19 -12 -7 -7 -11 -12 -9 -5 -3 1 6 12 12 8 7 8 10 8 7 9 13 14 13 12 11 9 6 4 3 4 3 1 0 -1 0 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 0 -1 0 +0 -64 -64 -64 -64 -64 -64 -64 -64 -64 -128 -128 -128 -64 -128 -64 -64 -64 -128 -128 -128 -64 0 128 128 128 128 192 320 384 384 384 384 512 640 704 704 704 640 640 640 704 832 960 1216 1408 1600 1600 1344 1024 896 832 704 384 -128 -576 -896 -1024 -1152 -1408 -1728 -2368 -3136 -3648 -3776 -3712 -3840 -4416 -4800 -4544 -4032 -3968 -4544 -5184 -5056 -4352 -4096 -4480 -4736 -4288 -3328 -2624 -2176 -1472 -64 1664 3328 4800 5952 6848 7424 8256 9472 10688 11392 11904 12608 13504 13696 12736 11392 10304 9088 7360 5312 4096 3776 3392 2048 0 -1600 -2496 -3648 -5376 -7296 -8448 -8896 -9536 -10624 -11968 -12608 -12736 -12928 -13952 -15424 -16192 -15808 -14400 -12864 -11904 -11328 -10432 -8448 -5376 -1856 1408 4160 6208 7424 8000 8896 10880 13568 15936 17344 18368 20160 22528 24192 24000 22144 19584 17536 16320 16064 15936 14592 11328 7232 3840 1600 -640 -3712 -7616 -11200 -14016 -15488 -16000 -16832 -18880 -21952 -24384 -24640 -23232 -22272 -22784 -23872 -23616 -22272 -21760 -22400 -22720 -20672 -16768 -13824 -13376 -13376 -10752 -5376 -704 64 -1792 -2176 1088 5760 7680 6848 6144 8064 11584 13696 13888 13888 14784 16192 16832 16704 16832 17920 19392 19904 19328 18368 17600 17024 15744 13440 11008 9856 9856 9536 7872 5568 4224 4224 3968 2112 -512 -2432 -3392 -4928 -7680 -10368 -11584 -11392 -10816 -10688 -11200 -11712 -12096 -12096 -11520 -10432 -9280 -8256 -7104 -5632 -3776 -2048 -768 -64 64 -448 -1216 -960 832 3392 5120 5376 5120 5440 6272 6016 3904 1600 1472 4032 6848 7552 5760 3520 1920 896 -192 -1024 -1152 -1408 -2688 -4992 -6848 -7168 -6080 -4864 -4544 -5568 -7040 -8128 -8256 -7680 -6848 -5696 -4160 -2496 -1472 -1280 -1216 -1024 -832 -896 -576 832 2688 3648 3200 2304 1920 1984 1856 1728 1984 2752 3392 3520 3264 3200 3328 3456 3584 3712 3584 3008 2048 1216 896 896 960 832 576 384 320 256 -64 -576 -960 -1152 -1216 -1344 -1536 -1664 -1728 -1856 -1984 -1984 -1792 -1600 -1216 -768 -448 -448 -704 -768 -576 -320 -192 64 384 768 768 512 448 512 640 512 448 576 832 896 832 768 704 576 384 256 192 256 192 64 0 -64 0 -64 -64 -64 -64 -64 -64 -64 -64 -64 -64 -64 -64 -64 -64 -64 -64 -64 0 -64 0 +6 +0 -1 -1 -1 -1 -1 -1 -1 -2 -2 -3 -4 -4 -5 -5 -6 -7 -8 -8 -9 -10 -11 -11 -9 -9 -9 -10 -8 -5 -1 0 -2 -3 1 6 9 8 8 11 16 20 22 23 26 29 32 33 35 38 43 46 47 46 46 46 44 39 33 31 32 32 27 19 15 16 15 8 -3 -11 -15 -23 -36 -51 -58 -59 -57 -58 -63 -67 -72 -73 -72 -67 -61 -56 -49 -40 -27 -16 -6 -1 0 -4 -10 -8 7 30 47 51 50 54 64 63 42 17 16 46 80 90 71 44 25 12 -2 -14 -17 -21 -40 -75 -106 -114 -98 -80 -77 -96 -124 -147 -153 -146 -133 -113 -84 -51 -31 -27 -27 -23 -19 -22 -13 21 69 97 86 64 54 58 57 53 63 89 112 119 114 114 121 129 138 147 147 125 88 55 41 43 46 41 29 21 19 16 -2 -32 -57 -70 -78 -88 -103 -114 -124 -138 -151 -157 -149 -135 -109 -72 -42 -44 -68 -79 -59 -31 -16 8 57 109 117 86 74 96 115 102 87 119 183 220 215 200 195 179 134 92 87 105 96 50 6 -3 5 -1 -25 -45 -46 -37 -33 -40 -53 -62 -60 -52 -47 -57 -68 -64 -38 -8 4 -3 -12 -9 9 26 24 -2 -37 -58 -51 -23 5 17 14 0 -26 -53 -68 -51 -16 12 15 7 5 13 15 7 -2 -10 -26 -49 -66 -67 -68 -83 -109 -121 -110 -95 -96 -106 -114 -112 -104 -97 -87 -74 -64 -58 -50 -38 -31 -26 -18 -4 6 5 -5 -9 -5 4 12 20 34 50 62 65 60 53 48 47 42 34 27 28 37 42 37 27 17 8 -4 -14 -16 -11 -9 -12 -14 -8 -2 -6 -16 -21 -16 -10 -13 -20 -19 -8 2 3 -2 -3 5 16 22 22 19 20 25 30 29 23 16 13 15 18 17 14 12 12 10 6 1 0 1 1 -1 -1 0 1 -1 -3 -3 0 1 0 -1 -2 -1 0 0 -1 -1 0 0 0 0 0 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 0 +0 -128 -128 -128 -128 -128 -128 -128 -256 -256 -384 -512 -512 -640 -640 -768 -896 -1024 -1024 -1152 -1280 -1408 -1408 -1152 -1152 -1152 -1280 -1024 -640 -128 0 -256 -384 128 768 1152 1024 1024 1408 2048 2560 2816 2944 3328 3712 4096 4224 4480 4864 5504 5888 6016 5888 5888 5888 5632 4992 4224 3968 4096 4096 3456 2432 1920 2048 1920 1024 -384 -1408 -1920 -2944 -4608 -6528 -7424 -7552 -7296 -7424 -8064 -8576 -9216 -9344 -9216 -8576 -7808 -7168 -6272 -5120 -3456 -2048 -768 -128 0 -512 -1280 -1024 896 3840 6016 6528 6400 6912 8192 8064 5376 2176 2048 5888 10240 11520 9088 5632 3200 1536 -256 -1792 -2176 -2688 -5120 -9600 -13568 -14592 -12544 -10240 -9856 -12288 -15872 -18816 -19584 -18688 -17024 -14464 -10752 -6528 -3968 -3456 -3456 -2944 -2432 -2816 -1664 2688 8832 12416 11008 8192 6912 7424 7296 6784 8064 11392 14336 15232 14592 14592 15488 16512 17664 18816 18816 16000 11264 7040 5248 5504 5888 5248 3712 2688 2432 2048 -256 -4096 -7296 -8960 -9984 -11264 -13184 -14592 -15872 -17664 -19328 -20096 -19072 -17280 -13952 -9216 -5376 -5632 -8704 -10112 -7552 -3968 -2048 1024 7296 13952 14976 11008 9472 12288 14720 13056 11136 15232 23424 28160 27520 25600 24960 22912 17152 11776 11136 13440 12288 6400 768 -384 640 -128 -3200 -5760 -5888 -4736 -4224 -5120 -6784 -7936 -7680 -6656 -6016 -7296 -8704 -8192 -4864 -1024 512 -384 -1536 -1152 1152 3328 3072 -256 -4736 -7424 -6528 -2944 640 2176 1792 0 -3328 -6784 -8704 -6528 -2048 1536 1920 896 640 1664 1920 896 -256 -1280 -3328 -6272 -8448 -8576 -8704 -10624 -13952 -15488 -14080 -12160 -12288 -13568 -14592 -14336 -13312 -12416 -11136 -9472 -8192 -7424 -6400 -4864 -3968 -3328 -2304 -512 768 640 -640 -1152 -640 512 1536 2560 4352 6400 7936 8320 7680 6784 6144 6016 5376 4352 3456 3584 4736 5376 4736 3456 2176 1024 -512 -1792 -2048 -1408 -1152 -1536 -1792 -1024 -256 -768 -2048 -2688 -2048 -1280 -1664 -2560 -2432 -1024 256 384 -256 -384 640 2048 2816 2816 2432 2560 3200 3840 3712 2944 2048 1664 1920 2304 2176 1792 1536 1536 1280 768 128 0 128 128 -128 -128 0 128 -128 -384 -384 0 128 0 -128 -256 -128 0 0 -128 -128 0 0 0 0 0 -128 -128 -128 -128 -128 -128 -128 -128 -128 -128 -128 -128 0 +7 +0 0 0 0 0 0 0 0 0 0 0 -1 -1 -1 -1 -2 -2 -2 -3 -3 -4 -5 -5 -6 -6 -5 -4 -2 -3 -4 -5 -4 -2 -2 0 4 8 9 7 6 9 11 11 9 14 22 28 28 27 28 26 21 14 14 18 17 9 1 -1 1 -1 -6 -11 -11 -9 -9 -11 -15 -18 -17 -16 -15 -18 -22 -21 -13 -3 1 -2 -5 -4 3 10 10 -1 -17 -27 -24 -11 2 8 7 0 -14 -30 -39 -30 -10 7 9 4 3 8 10 5 -2 -8 -20 -38 -52 -54 -56 -70 -94 -107 -99 -88 -91 -103 -112 -113 -108 -103 -94 -82 -73 -67 -59 -46 -38 -33 -23 -6 9 7 -7 -13 -7 7 18 33 56 85 108 115 109 99 93 92 85 70 57 61 82 95 87 65 43 22 -10 -36 -42 -31 -25 -36 -41 -25 -6 -17 -53 -72 -56 -37 -47 -76 -76 -31 12 15 -9 -12 29 87 121 123 114 123 158 196 199 162 115 99 122 148 146 126 114 117 105 65 20 6 14 14 -3 -9 6 16 -6 -42 -41 0 33 16 -26 -39 -11 13 4 -16 -9 22 45 40 23 10 -1 -20 -40 -55 -78 -116 -157 -170 -151 -116 -93 -91 -105 -125 -141 -142 -133 -131 -144 -151 -134 -94 -59 -42 -37 -38 -41 -45 -43 -27 -7 0 -7 -8 10 45 75 88 96 108 126 132 115 90 74 68 58 38 26 31 47 53 40 21 9 0 -14 -31 -37 -28 -15 -13 -21 -33 -36 -32 -25 -24 -30 -44 -61 -81 -98 -102 -93 -76 -62 -52 -38 -19 -3 4 6 14 27 35 37 40 50 63 72 79 87 95 95 87 78 75 75 74 70 65 57 46 34 24 18 16 16 12 2 -12 -23 -28 -28 -30 -37 -41 -40 -33 -26 -22 -21 -22 -22 -18 -14 -10 -10 -10 -9 -7 -7 -9 -9 -7 -3 0 1 2 4 4 3 3 3 3 2 1 1 2 2 1 0 0 0 0 -1 -1 -1 0 0 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 0 +0 0 0 0 0 0 0 0 0 0 0 -128 -128 -128 -128 -256 -256 -256 -384 -384 -512 -640 -640 -768 -768 -640 -512 -256 -384 -512 -640 -512 -256 -256 0 512 1024 1152 896 768 1152 1408 1408 1152 1792 2816 3584 3584 3456 3584 3328 2688 1792 1792 2304 2176 1152 128 -128 128 -128 -768 -1408 -1408 -1152 -1152 -1408 -1920 -2304 -2176 -2048 -1920 -2304 -2816 -2688 -1664 -384 128 -256 -640 -512 384 1280 1280 -128 -2176 -3456 -3072 -1408 256 1024 896 0 -1792 -3840 -4992 -3840 -1280 896 1152 512 384 1024 1280 640 -256 -1024 -2560 -4864 -6656 -6912 -7168 -8960 -12032 -13696 -12672 -11264 -11648 -13184 -14336 -14464 -13824 -13184 -12032 -10496 -9344 -8576 -7552 -5888 -4864 -4224 -2944 -768 1152 896 -896 -1664 -896 896 2304 4224 7168 10880 13824 14720 13952 12672 11904 11776 10880 8960 7296 7808 10496 12160 11136 8320 5504 2816 -1280 -4608 -5376 -3968 -3200 -4608 -5248 -3200 -768 -2176 -6784 -9216 -7168 -4736 -6016 -9728 -9728 -3968 1536 1920 -1152 -1536 3712 11136 15488 15744 14592 15744 20224 25088 25472 20736 14720 12672 15616 18944 18688 16128 14592 14976 13440 8320 2560 768 1792 1792 -384 -1152 768 2048 -768 -5376 -5248 0 4224 2048 -3328 -4992 -1408 1664 512 -2048 -1152 2816 5760 5120 2944 1280 -128 -2560 -5120 -7040 -9984 -14848 -20096 -21760 -19328 -14848 -11904 -11648 -13440 -16000 -18048 -18176 -17024 -16768 -18432 -19328 -17152 -12032 -7552 -5376 -4736 -4864 -5248 -5760 -5504 -3456 -896 0 -896 -1024 1280 5760 9600 11264 12288 13824 16128 16896 14720 11520 9472 8704 7424 4864 3328 3968 6016 6784 5120 2688 1152 0 -1792 -3968 -4736 -3584 -1920 -1664 -2688 -4224 -4608 -4096 -3200 -3072 -3840 -5632 -7808 -10368 -12544 -13056 -11904 -9728 -7936 -6656 -4864 -2432 -384 512 768 1792 3456 4480 4736 5120 6400 8064 9216 10112 11136 12160 12160 11136 9984 9600 9600 9472 8960 8320 7296 5888 4352 3072 2304 2048 2048 1536 256 -1536 -2944 -3584 -3584 -3840 -4736 -5248 -5120 -4224 -3328 -2816 -2688 -2816 -2816 -2304 -1792 -1280 -1280 -1280 -1152 -896 -896 -1152 -1152 -896 -384 0 128 256 512 512 384 384 384 384 256 128 128 256 256 128 0 0 0 0 -128 -128 -128 0 0 -128 -128 -128 -128 -128 -128 -128 -128 -128 -128 -128 0 +7 +0 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -2 -2 -1 0 0 -1 -1 0 2 4 4 4 5 7 9 10 9 6 6 8 10 11 10 9 10 9 6 2 0 1 1 -1 -2 0 2 -1 -7 -7 0 5 2 -5 -8 -3 2 1 -4 -2 5 10 9 5 2 -1 -6 -12 -17 -24 -36 -51 -56 -51 -40 -34 -34 -40 -49 -57 -58 -56 -57 -64 -68 -62 -45 -29 -21 -19 -20 -22 -25 -25 -16 -4 0 -4 -6 6 30 51 62 68 79 95 101 90 72 61 57 50 34 23 28 45 51 39 21 9 0 -15 -34 -42 -33 -18 -16 -27 -41 -47 -42 -34 -33 -43 -65 -92 -124 -153 -163 -152 -128 -107 -92 -69 -35 -5 8 13 29 56 77 82 91 116 150 177 200 226 252 259 245 227 222 229 233 228 217 196 164 126 91 70 65 65 52 11 -50 -107 -131 -138 -155 -193 -226 -227 -195 -157 -137 -140 -151 -153 -135 -103 -80 -77 -83 -78 -64 -67 -88 -100 -78 -36 0 22 41 63 73 63 57 67 78 63 40 42 73 89 64 24 8 13 6 -19 -28 -4 21 3 -45 -76 -74 -68 -89 -117 -119 -98 -84 -95 -111 -108 -89 -75 -69 -66 -58 -54 -55 -51 -30 -2 9 -6 -31 -43 -36 -20 -5 9 24 35 32 16 -2 -13 -16 -19 -27 -41 -62 -76 -71 -50 -32 -31 -30 -12 17 32 19 3 8 29 38 28 21 36 62 78 74 69 75 87 93 92 93 103 115 120 115 102 87 74 68 68 70 67 56 41 34 32 32 24 9 -3 -6 1 8 10 6 -1 -6 -6 -2 -1 -6 -15 -23 -30 -37 -44 -48 -46 -44 -46 -53 -58 -56 -50 -43 -37 -32 -27 -24 -21 -18 -16 -15 -17 -19 -19 -16 -9 -4 -1 -2 -3 -3 0 3 6 8 10 9 7 5 5 5 6 5 4 4 4 3 2 0 -2 -3 -3 -3 -2 -2 -2 -2 -2 -2 -2 -2 -2 -2 -1 -1 -1 -1 -1 -1 -1 -1 0 0 0 +0 -64 -64 -64 -64 -64 -64 -64 -64 -64 -64 -64 -64 -64 -128 -128 -64 0 0 -64 -64 0 128 256 256 256 320 448 576 640 576 384 384 512 640 704 640 576 640 576 384 128 0 64 64 -64 -128 0 128 -64 -448 -448 0 320 128 -320 -512 -192 128 64 -256 -128 320 640 576 320 128 -64 -384 -768 -1088 -1536 -2304 -3264 -3584 -3264 -2560 -2176 -2176 -2560 -3136 -3648 -3712 -3584 -3648 -4096 -4352 -3968 -2880 -1856 -1344 -1216 -1280 -1408 -1600 -1600 -1024 -256 0 -256 -384 384 1920 3264 3968 4352 5056 6080 6464 5760 4608 3904 3648 3200 2176 1472 1792 2880 3264 2496 1344 576 0 -960 -2176 -2688 -2112 -1152 -1024 -1728 -2624 -3008 -2688 -2176 -2112 -2752 -4160 -5888 -7936 -9792 -10432 -9728 -8192 -6848 -5888 -4416 -2240 -320 512 832 1856 3584 4928 5248 5824 7424 9600 11328 12800 14464 16128 16576 15680 14528 14208 14656 14912 14592 13888 12544 10496 8064 5824 4480 4160 4160 3328 704 -3200 -6848 -8384 -8832 -9920 -12352 -14464 -14528 -12480 -10048 -8768 -8960 -9664 -9792 -8640 -6592 -5120 -4928 -5312 -4992 -4096 -4288 -5632 -6400 -4992 -2304 0 1408 2624 4032 4672 4032 3648 4288 4992 4032 2560 2688 4672 5696 4096 1536 512 832 384 -1216 -1792 -256 1344 192 -2880 -4864 -4736 -4352 -5696 -7488 -7616 -6272 -5376 -6080 -7104 -6912 -5696 -4800 -4416 -4224 -3712 -3456 -3520 -3264 -1920 -128 576 -384 -1984 -2752 -2304 -1280 -320 576 1536 2240 2048 1024 -128 -832 -1024 -1216 -1728 -2624 -3968 -4864 -4544 -3200 -2048 -1984 -1920 -768 1088 2048 1216 192 512 1856 2432 1792 1344 2304 3968 4992 4736 4416 4800 5568 5952 5888 5952 6592 7360 7680 7360 6528 5568 4736 4352 4352 4480 4288 3584 2624 2176 2048 2048 1536 576 -192 -384 64 512 640 384 -64 -384 -384 -128 -64 -384 -960 -1472 -1920 -2368 -2816 -3072 -2944 -2816 -2944 -3392 -3712 -3584 -3200 -2752 -2368 -2048 -1728 -1536 -1344 -1152 -1024 -960 -1088 -1216 -1216 -1024 -576 -256 -64 -128 -192 -192 0 192 384 512 640 576 448 320 320 320 384 320 256 256 256 192 128 0 -128 -192 -192 -192 -128 -128 -128 -128 -128 -128 -128 -128 -128 -128 -64 -64 -64 -64 -64 -64 -64 -64 0 0 0 +6 +0 0 0 0 0 0 0 0 1 1 1 1 0 0 0 0 0 0 -2 -3 -4 -4 -5 -7 -9 -10 -9 -8 -7 -8 -9 -10 -9 -7 -6 -6 -7 -7 -6 -7 -9 -11 -9 -5 0 2 5 8 10 9 8 10 12 10 7 7 13 17 12 5 1 3 1 -5 -7 -1 5 1 -13 -22 -22 -21 -28 -38 -40 -34 -30 -34 -41 -41 -35 -30 -29 -28 -25 -24 -25 -24 -14 -1 4 -3 -16 -23 -20 -12 -3 5 15 22 20 10 -2 -9 -11 -14 -20 -31 -48 -60 -57 -41 -27 -27 -26 -11 15 30 19 3 8 30 40 31 24 41 73 92 90 86 96 113 125 126 131 147 168 179 177 161 140 122 114 118 124 121 104 79 66 65 65 51 20 -6 -12 2 20 25 17 -1 -14 -14 -5 -3 -18 -44 -69 -94 -121 -150 -166 -165 -162 -175 -208 -235 -236 -219 -192 -169 -150 -133 -119 -108 -97 -89 -89 -102 -119 -124 -105 -65 -23 -5 -11 -24 -20 4 36 68 96 114 116 98 75 69 84 99 96 84 80 82 78 53 12 -30 -60 -80 -85 -77 -65 -59 -68 -91 -124 -146 -146 -130 -121 -122 -109 -70 -30 -23 -46 -54 -24 14 16 -9 -13 20 56 54 25 14 33 58 67 65 79 118 162 196 211 218 230 245 254 247 222 195 175 157 137 122 123 133 129 102 65 42 30 13 -16 -44 -59 -67 -75 -81 -81 -83 -99 -121 -129 -119 -113 -124 -142 -148 -140 -134 -128 -109 -71 -35 -21 -23 -16 7 34 49 50 48 46 44 42 44 52 57 56 51 48 45 40 33 26 20 18 21 25 24 15 3 -3 1 8 9 -2 -17 -27 -29 -28 -33 -43 -47 -41 -29 -22 -21 -22 -21 -18 -15 -14 -14 -13 -11 -7 -4 -5 -9 -12 -13 -12 -12 -15 -18 -17 -13 -10 -8 -9 -9 -9 -7 -5 -2 0 1 1 1 2 2 1 1 1 0 0 1 1 2 2 1 1 1 1 1 1 1 1 1 0 0 0 0 0 0 0 0 +0 0 0 0 0 0 0 0 128 128 128 128 0 0 0 0 0 0 -256 -384 -512 -512 -640 -896 -1152 -1280 -1152 -1024 -896 -1024 -1152 -1280 -1152 -896 -768 -768 -896 -896 -768 -896 -1152 -1408 -1152 -640 0 256 640 1024 1280 1152 1024 1280 1536 1280 896 896 1664 2176 1536 640 128 384 128 -640 -896 -128 640 128 -1664 -2816 -2816 -2688 -3584 -4864 -5120 -4352 -3840 -4352 -5248 -5248 -4480 -3840 -3712 -3584 -3200 -3072 -3200 -3072 -1792 -128 512 -384 -2048 -2944 -2560 -1536 -384 640 1920 2816 2560 1280 -256 -1152 -1408 -1792 -2560 -3968 -6144 -7680 -7296 -5248 -3456 -3456 -3328 -1408 1920 3840 2432 384 1024 3840 5120 3968 3072 5248 9344 11776 11520 11008 12288 14464 16000 16128 16768 18816 21504 22912 22656 20608 17920 15616 14592 15104 15872 15488 13312 10112 8448 8320 8320 6528 2560 -768 -1536 256 2560 3200 2176 -128 -1792 -1792 -640 -384 -2304 -5632 -8832 -12032 -15488 -19200 -21248 -21120 -20736 -22400 -26624 -30080 -30208 -28032 -24576 -21632 -19200 -17024 -15232 -13824 -12416 -11392 -11392 -13056 -15232 -15872 -13440 -8320 -2944 -640 -1408 -3072 -2560 512 4608 8704 12288 14592 14848 12544 9600 8832 10752 12672 12288 10752 10240 10496 9984 6784 1536 -3840 -7680 -10240 -10880 -9856 -8320 -7552 -8704 -11648 -15872 -18688 -18688 -16640 -15488 -15616 -13952 -8960 -3840 -2944 -5888 -6912 -3072 1792 2048 -1152 -1664 2560 7168 6912 3200 1792 4224 7424 8576 8320 10112 15104 20736 25088 27008 27904 29440 31360 32512 31616 28416 24960 22400 20096 17536 15616 15744 17024 16512 13056 8320 5376 3840 1664 -2048 -5632 -7552 -8576 -9600 -10368 -10368 -10624 -12672 -15488 -16512 -15232 -14464 -15872 -18176 -18944 -17920 -17152 -16384 -13952 -9088 -4480 -2688 -2944 -2048 896 4352 6272 6400 6144 5888 5632 5376 5632 6656 7296 7168 6528 6144 5760 5120 4224 3328 2560 2304 2688 3200 3072 1920 384 -384 128 1024 1152 -256 -2176 -3456 -3712 -3584 -4224 -5504 -6016 -5248 -3712 -2816 -2688 -2816 -2688 -2304 -1920 -1792 -1792 -1664 -1408 -896 -512 -640 -1152 -1536 -1664 -1536 -1536 -1920 -2304 -2176 -1664 -1280 -1024 -1152 -1152 -1152 -896 -640 -256 0 128 128 128 256 256 128 128 128 0 0 128 128 256 256 128 128 128 128 128 128 128 128 128 0 0 0 0 0 0 0 0 +7 +0 -1 -1 -1 -1 -1 -1 -1 -1 -1 -2 -2 -2 -3 -3 -4 -5 -5 -5 -5 -4 -4 -4 -4 -4 -4 -4 -5 -6 -7 -6 -4 -2 -1 -1 -2 -2 0 3 6 9 11 12 11 8 8 10 13 13 12 12 12 12 8 2 -6 -12 -16 -17 -16 -14 -13 -16 -22 -31 -37 -38 -35 -34 -35 -32 -21 -10 -8 -15 -19 -8 5 6 -4 -5 8 22 22 11 6 15 27 31 32 40 60 85 105 116 123 133 145 154 153 141 127 116 107 96 87 90 100 99 80 52 34 25 11 -14 -40 -55 -63 -73 -80 -81 -86 -105 -131 -143 -135 -131 -147 -172 -184 -178 -174 -171 -148 -99 -49 -31 -34 -25 11 55 81 85 84 82 80 78 85 101 114 114 108 104 100 92 78 62 49 46 56 68 66 42 10 -7 4 27 29 -5 -58 -95 -102 -102 -127 -168 -191 -172 -126 -98 -98 -106 -103 -89 -78 -75 -77 -74 -63 -42 -26 -32 -60 -90 -98 -92 -100 -131 -161 -161 -132 -98 -88 -96 -109 -111 -94 -64 -27 3 22 32 39 46 51 48 40 29 26 33 50 75 95 104 107 111 124 136 140 138 151 186 221 225 199 180 186 201 188 153 126 129 140 129 99 75 65 62 58 50 42 24 -2 -25 -28 -20 -11 -13 -28 -55 -91 -118 -124 -110 -95 -88 -86 -79 -67 -59 -61 -71 -77 -73 -59 -48 -45 -44 -33 -10 9 14 10 9 16 24 26 29 31 30 26 30 41 48 42 34 34 37 23 -8 -33 -33 -20 -21 -37 -50 -49 -44 -51 -70 -89 -99 -101 -101 -100 -98 -97 -96 -92 -85 -79 -72 -63 -51 -43 -41 -40 -33 -19 -4 7 13 17 21 26 33 39 43 46 49 51 52 49 44 40 37 35 36 38 40 38 33 28 27 28 27 22 16 12 13 13 13 10 8 6 5 2 -1 -4 -6 -6 -6 -6 -7 -7 -6 -5 -4 -5 -5 -5 -4 -4 -4 -4 -3 -3 -2 -2 -2 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 0 +0 -128 -128 -128 -128 -128 -128 -128 -128 -128 -256 -256 -256 -384 -384 -512 -640 -640 -640 -640 -512 -512 -512 -512 -512 -512 -512 -640 -768 -896 -768 -512 -256 -128 -128 -256 -256 0 384 768 1152 1408 1536 1408 1024 1024 1280 1664 1664 1536 1536 1536 1536 1024 256 -768 -1536 -2048 -2176 -2048 -1792 -1664 -2048 -2816 -3968 -4736 -4864 -4480 -4352 -4480 -4096 -2688 -1280 -1024 -1920 -2432 -1024 640 768 -512 -640 1024 2816 2816 1408 768 1920 3456 3968 4096 5120 7680 10880 13440 14848 15744 17024 18560 19712 19584 18048 16256 14848 13696 12288 11136 11520 12800 12672 10240 6656 4352 3200 1408 -1792 -5120 -7040 -8064 -9344 -10240 -10368 -11008 -13440 -16768 -18304 -17280 -16768 -18816 -22016 -23552 -22784 -22272 -21888 -18944 -12672 -6272 -3968 -4352 -3200 1408 7040 10368 10880 10752 10496 10240 9984 10880 12928 14592 14592 13824 13312 12800 11776 9984 7936 6272 5888 7168 8704 8448 5376 1280 -896 512 3456 3712 -640 -7424 -12160 -13056 -13056 -16256 -21504 -24448 -22016 -16128 -12544 -12544 -13568 -13184 -11392 -9984 -9600 -9856 -9472 -8064 -5376 -3328 -4096 -7680 -11520 -12544 -11776 -12800 -16768 -20608 -20608 -16896 -12544 -11264 -12288 -13952 -14208 -12032 -8192 -3456 384 2816 4096 4992 5888 6528 6144 5120 3712 3328 4224 6400 9600 12160 13312 13696 14208 15872 17408 17920 17664 19328 23808 28288 28800 25472 23040 23808 25728 24064 19584 16128 16512 17920 16512 12672 9600 8320 7936 7424 6400 5376 3072 -256 -3200 -3584 -2560 -1408 -1664 -3584 -7040 -11648 -15104 -15872 -14080 -12160 -11264 -11008 -10112 -8576 -7552 -7808 -9088 -9856 -9344 -7552 -6144 -5760 -5632 -4224 -1280 1152 1792 1280 1152 2048 3072 3328 3712 3968 3840 3328 3840 5248 6144 5376 4352 4352 4736 2944 -1024 -4224 -4224 -2560 -2688 -4736 -6400 -6272 -5632 -6528 -8960 -11392 -12672 -12928 -12928 -12800 -12544 -12416 -12288 -11776 -10880 -10112 -9216 -8064 -6528 -5504 -5248 -5120 -4224 -2432 -512 896 1664 2176 2688 3328 4224 4992 5504 5888 6272 6528 6656 6272 5632 5120 4736 4480 4608 4864 5120 4864 4224 3584 3456 3584 3456 2816 2048 1536 1664 1664 1664 1280 1024 768 640 256 -128 -512 -768 -768 -768 -768 -896 -896 -768 -640 -512 -640 -640 -640 -512 -512 -512 -512 -384 -384 -256 -256 -256 -128 -128 -128 -128 -128 -128 -128 -128 -128 -128 0 +7 +0 0 0 0 -1 0 0 0 -1 -1 -1 -1 -2 -2 -3 -3 -3 -3 -3 -3 -3 -3 -3 -3 -3 -4 -4 -3 -3 -2 -2 -4 -6 -7 -7 -8 -11 -14 -15 -13 -10 -10 -11 -13 -14 -12 -9 -4 0 3 4 6 7 8 8 7 5 5 6 10 16 21 23 25 27 31 35 37 38 43 54 66 70 63 59 63 69 67 56 47 50 56 52 41 32 29 28 27 24 20 12 -1 -13 -15 -11 -7 -8 -17 -34 -57 -76 -81 -74 -65 -62 -62 -58 -51 -45 -48 -58 -64 -61 -51 -42 -41 -41 -31 -10 9 14 10 10 18 26 30 33 36 36 33 38 53 64 58 47 49 55 35 -12 -51 -52 -33 -35 -63 -88 -89 -81 -96 -136 -177 -203 -213 -216 -220 -221 -226 -228 -224 -213 -203 -191 -170 -142 -124 -120 -121 -103 -61 -12 26 48 62 79 102 131 160 184 204 223 240 252 245 229 214 204 205 215 236 256 258 233 208 206 222 222 188 143 120 127 142 141 123 104 86 66 34 -6 -52 -88 -106 -112 -119 -131 -140 -128 -110 -108 -130 -156 -164 -158 -160 -176 -196 -197 -176 -151 -138 -137 -127 -104 -80 -77 -94 -105 -86 -50 -22 -11 0 24 46 49 44 45 50 45 27 13 14 22 22 23 35 55 66 61 52 51 47 33 16 13 17 10 -16 -38 -38 -23 -10 -11 -19 -22 -20 -23 -29 -32 -33 -36 -42 -48 -47 -45 -45 -55 -67 -75 -75 -63 -43 -23 -12 -9 -9 -5 8 21 28 33 41 52 56 54 52 56 63 66 65 62 58 48 33 19 15 23 33 36 32 29 29 29 22 7 -7 -13 -10 -5 -4 -10 -18 -20 -15 -11 -12 -17 -21 -15 -4 4 4 -2 -6 -6 -2 0 0 -2 -3 -2 0 1 0 -4 -6 -7 -6 -4 -1 2 3 3 2 2 3 6 7 6 6 5 5 5 4 4 3 3 2 1 1 0 -1 -1 -2 -1 -1 -1 -1 -1 0 0 0 -1 -1 -1 -1 0 0 0 0 -1 0 +0 0 0 0 -64 0 0 0 -64 -64 -64 -64 -128 -128 -192 -192 -192 -192 -192 -192 -192 -192 -192 -192 -192 -256 -256 -192 -192 -128 -128 -256 -384 -448 -448 -512 -704 -896 -960 -832 -640 -640 -704 -832 -896 -768 -576 -256 0 192 256 384 448 512 512 448 320 320 384 640 1024 1344 1472 1600 1728 1984 2240 2368 2432 2752 3456 4224 4480 4032 3776 4032 4416 4288 3584 3008 3200 3584 3328 2624 2048 1856 1792 1728 1536 1280 768 -64 -832 -960 -704 -448 -512 -1088 -2176 -3648 -4864 -5184 -4736 -4160 -3968 -3968 -3712 -3264 -2880 -3072 -3712 -4096 -3904 -3264 -2688 -2624 -2624 -1984 -640 576 896 640 640 1152 1664 1920 2112 2304 2304 2112 2432 3392 4096 3712 3008 3136 3520 2240 -768 -3264 -3328 -2112 -2240 -4032 -5632 -5696 -5184 -6144 -8704 -11328 -12992 -13632 -13824 -14080 -14144 -14464 -14592 -14336 -13632 -12992 -12224 -10880 -9088 -7936 -7680 -7744 -6592 -3904 -768 1664 3072 3968 5056 6528 8384 10240 11776 13056 14272 15360 16128 15680 14656 13696 13056 13120 13760 15104 16384 16512 14912 13312 13184 14208 14208 12032 9152 7680 8128 9088 9024 7872 6656 5504 4224 2176 -384 -3328 -5632 -6784 -7168 -7616 -8384 -8960 -8192 -7040 -6912 -8320 -9984 -10496 -10112 -10240 -11264 -12544 -12608 -11264 -9664 -8832 -8768 -8128 -6656 -5120 -4928 -6016 -6720 -5504 -3200 -1408 -704 0 1536 2944 3136 2816 2880 3200 2880 1728 832 896 1408 1408 1472 2240 3520 4224 3904 3328 3264 3008 2112 1024 832 1088 640 -1024 -2432 -2432 -1472 -640 -704 -1216 -1408 -1280 -1472 -1856 -2048 -2112 -2304 -2688 -3072 -3008 -2880 -2880 -3520 -4288 -4800 -4800 -4032 -2752 -1472 -768 -576 -576 -320 512 1344 1792 2112 2624 3328 3584 3456 3328 3584 4032 4224 4160 3968 3712 3072 2112 1216 960 1472 2112 2304 2048 1856 1856 1856 1408 448 -448 -832 -640 -320 -256 -640 -1152 -1280 -960 -704 -768 -1088 -1344 -960 -256 256 256 -128 -384 -384 -128 0 0 -128 -192 -128 0 64 0 -256 -384 -448 -384 -256 -64 128 192 192 128 128 192 384 448 384 384 320 320 320 256 256 192 192 128 64 64 0 -64 -64 -128 -64 -64 -64 -64 -64 0 0 0 -64 -64 -64 -64 0 0 0 0 -64 0 +6 +0 -1 -1 -1 -1 -1 -1 -1 -1 0 0 0 0 1 1 2 3 3 4 5 6 7 7 7 7 8 9 10 12 13 13 12 13 15 15 14 11 10 11 13 13 12 11 9 7 4 -1 -7 -13 -16 -17 -19 -22 -24 -23 -20 -21 -26 -32 -34 -34 -36 -41 -47 -49 -45 -40 -37 -38 -37 -31 -25 -24 -30 -35 -30 -18 -8 -5 0 9 18 20 18 19 22 20 12 6 7 11 11 12 18 30 37 35 31 31 29 21 10 8 12 7 -11 -28 -29 -17 -8 -9 -16 -18 -17 -20 -26 -30 -32 -35 -42 -49 -49 -48 -49 -61 -76 -87 -88 -76 -53 -30 -15 -12 -13 -7 12 30 42 51 65 83 92 92 90 100 114 124 125 121 117 100 69 41 34 53 77 86 79 73 76 77 60 21 -20 -38 -30 -14 -12 -33 -61 -70 -55 -40 -44 -68 -82 -63 -18 19 19 -8 -30 -28 -9 2 0 -10 -13 -7 3 9 0 -23 -45 -53 -43 -27 -5 19 40 42 29 25 46 83 105 102 94 96 102 102 95 91 88 76 57 43 37 23 -9 -41 -48 -28 -8 -8 -19 -18 0 13 10 -3 -12 -11 -2 6 11 8 0 -7 -11 -18 -33 -50 -59 -56 -50 -47 -45 -40 -34 -35 -44 -66 -91 -116 -129 -123 -103 -85 -79 -78 -67 -57 -61 -72 -66 -31 6 12 -12 -34 -26 4 25 22 9 4 10 19 23 20 19 21 22 19 14 12 15 16 10 0 -7 -6 -5 -10 -18 -23 -19 -9 1 5 2 -3 -4 2 11 16 15 13 13 12 7 -3 -14 -23 -21 -10 5 11 3 -8 -11 -2 6 4 -4 -8 -5 3 8 7 1 -6 -9 -9 -9 -12 -15 -13 -7 -2 -5 -10 -11 -8 -7 -7 -7 -5 -1 1 2 3 4 4 3 4 7 9 8 6 7 8 9 9 7 7 8 8 7 7 6 5 3 1 0 0 -1 0 0 1 1 0 0 -1 -1 0 0 -1 -1 -1 0 0 0 0 -1 -1 -1 -1 0 +0 -128 -128 -128 -128 -128 -128 -128 -128 0 0 0 0 128 128 256 384 384 512 640 768 896 896 896 896 1024 1152 1280 1536 1664 1664 1536 1664 1920 1920 1792 1408 1280 1408 1664 1664 1536 1408 1152 896 512 -128 -896 -1664 -2048 -2176 -2432 -2816 -3072 -2944 -2560 -2688 -3328 -4096 -4352 -4352 -4608 -5248 -6016 -6272 -5760 -5120 -4736 -4864 -4736 -3968 -3200 -3072 -3840 -4480 -3840 -2304 -1024 -640 0 1152 2304 2560 2304 2432 2816 2560 1536 768 896 1408 1408 1536 2304 3840 4736 4480 3968 3968 3712 2688 1280 1024 1536 896 -1408 -3584 -3712 -2176 -1024 -1152 -2048 -2304 -2176 -2560 -3328 -3840 -4096 -4480 -5376 -6272 -6272 -6144 -6272 -7808 -9728 -11136 -11264 -9728 -6784 -3840 -1920 -1536 -1664 -896 1536 3840 5376 6528 8320 10624 11776 11776 11520 12800 14592 15872 16000 15488 14976 12800 8832 5248 4352 6784 9856 11008 10112 9344 9728 9856 7680 2688 -2560 -4864 -3840 -1792 -1536 -4224 -7808 -8960 -7040 -5120 -5632 -8704 -10496 -8064 -2304 2432 2432 -1024 -3840 -3584 -1152 256 0 -1280 -1664 -896 384 1152 0 -2944 -5760 -6784 -5504 -3456 -640 2432 5120 5376 3712 3200 5888 10624 13440 13056 12032 12288 13056 13056 12160 11648 11264 9728 7296 5504 4736 2944 -1152 -5248 -6144 -3584 -1024 -1024 -2432 -2304 0 1664 1280 -384 -1536 -1408 -256 768 1408 1024 0 -896 -1408 -2304 -4224 -6400 -7552 -7168 -6400 -6016 -5760 -5120 -4352 -4480 -5632 -8448 -11648 -14848 -16512 -15744 -13184 -10880 -10112 -9984 -8576 -7296 -7808 -9216 -8448 -3968 768 1536 -1536 -4352 -3328 512 3200 2816 1152 512 1280 2432 2944 2560 2432 2688 2816 2432 1792 1536 1920 2048 1280 0 -896 -768 -640 -1280 -2304 -2944 -2432 -1152 128 640 256 -384 -512 256 1408 2048 1920 1664 1664 1536 896 -384 -1792 -2944 -2688 -1280 640 1408 384 -1024 -1408 -256 768 512 -512 -1024 -640 384 1024 896 128 -768 -1152 -1152 -1152 -1536 -1920 -1664 -896 -256 -640 -1280 -1408 -1024 -896 -896 -896 -640 -128 128 256 384 512 512 384 512 896 1152 1024 768 896 1024 1152 1152 896 896 1024 1024 896 896 768 640 384 128 0 0 -128 0 0 128 128 0 0 -128 -128 0 0 -128 -128 -128 0 0 0 0 -128 -128 -128 -128 0 +7 +0 0 0 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -2 -2 -1 0 0 -1 -1 -1 -1 0 0 -1 -1 -1 0 0 0 -2 -4 -4 -4 -3 -1 1 3 4 3 2 5 9 12 13 12 13 14 15 14 14 14 13 10 8 7 4 -2 -9 -11 -7 -2 -2 -5 -5 0 3 2 -1 -4 -4 -1 2 3 2 0 -3 -5 -7 -13 -21 -25 -24 -22 -21 -21 -19 -17 -18 -23 -35 -49 -64 -73 -71 -62 -52 -50 -50 -44 -38 -42 -51 -47 -23 5 9 -10 -27 -21 4 22 20 8 3 9 18 22 21 20 22 24 21 15 14 18 20 13 0 -9 -8 -7 -13 -26 -33 -28 -14 1 8 4 -5 -7 4 21 30 29 26 26 25 16 -5 -31 -51 -49 -23 13 27 9 -20 -28 -5 18 14 -10 -25 -14 12 29 25 4 -22 -35 -34 -34 -48 -64 -58 -29 -8 -21 -49 -58 -44 -35 -40 -43 -26 -1 12 18 27 37 37 31 39 66 88 87 74 78 104 123 118 107 114 131 141 139 135 133 119 86 44 17 4 -3 0 22 54 68 49 12 -9 -3 5 1 -12 -18 -2 24 39 30 7 -11 -13 -13 -21 -34 -44 -44 -42 -40 -26 1 22 16 -9 -28 -17 16 36 22 -19 -58 -65 -39 -9 -11 -38 -61 -60 -52 -56 -71 -81 -82 -90 -103 -108 -93 -64 -42 -39 -49 -59 -53 -28 -1 8 -3 -11 4 32 47 44 33 30 36 40 41 40 36 30 26 18 6 -6 -5 11 26 29 25 32 43 47 38 32 41 56 63 57 52 52 53 50 43 39 36 33 26 16 7 0 -5 -9 -12 -11 -7 -4 -7 -14 -21 -23 -23 -26 -28 -26 -18 -13 -12 -14 -15 -15 -15 -16 -16 -17 -18 -17 -11 -5 -1 -2 -3 -3 -1 2 5 7 8 8 7 8 9 9 8 6 4 3 2 1 0 -1 -1 -2 -3 -4 -4 -4 -4 -3 -3 -2 -2 -3 -3 -2 -2 -1 -1 -1 -1 -1 -1 -1 -1 -1 0 +0 0 0 -128 -128 -128 -128 -128 -128 -128 -128 -128 -128 -128 -128 -256 -256 -128 0 0 -128 -128 -128 -128 0 0 -128 -128 -128 0 0 0 -256 -512 -512 -512 -384 -128 128 384 512 384 256 640 1152 1536 1664 1536 1664 1792 1920 1792 1792 1792 1664 1280 1024 896 512 -256 -1152 -1408 -896 -256 -256 -640 -640 0 384 256 -128 -512 -512 -128 256 384 256 0 -384 -640 -896 -1664 -2688 -3200 -3072 -2816 -2688 -2688 -2432 -2176 -2304 -2944 -4480 -6272 -8192 -9344 -9088 -7936 -6656 -6400 -6400 -5632 -4864 -5376 -6528 -6016 -2944 640 1152 -1280 -3456 -2688 512 2816 2560 1024 384 1152 2304 2816 2688 2560 2816 3072 2688 1920 1792 2304 2560 1664 0 -1152 -1024 -896 -1664 -3328 -4224 -3584 -1792 128 1024 512 -640 -896 512 2688 3840 3712 3328 3328 3200 2048 -640 -3968 -6528 -6272 -2944 1664 3456 1152 -2560 -3584 -640 2304 1792 -1280 -3200 -1792 1536 3712 3200 512 -2816 -4480 -4352 -4352 -6144 -8192 -7424 -3712 -1024 -2688 -6272 -7424 -5632 -4480 -5120 -5504 -3328 -128 1536 2304 3456 4736 4736 3968 4992 8448 11264 11136 9472 9984 13312 15744 15104 13696 14592 16768 18048 17792 17280 17024 15232 11008 5632 2176 512 -384 0 2816 6912 8704 6272 1536 -1152 -384 640 128 -1536 -2304 -256 3072 4992 3840 896 -1408 -1664 -1664 -2688 -4352 -5632 -5632 -5376 -5120 -3328 128 2816 2048 -1152 -3584 -2176 2048 4608 2816 -2432 -7424 -8320 -4992 -1152 -1408 -4864 -7808 -7680 -6656 -7168 -9088 -10368 -10496 -11520 -13184 -13824 -11904 -8192 -5376 -4992 -6272 -7552 -6784 -3584 -128 1024 -384 -1408 512 4096 6016 5632 4224 3840 4608 5120 5248 5120 4608 3840 3328 2304 768 -768 -640 1408 3328 3712 3200 4096 5504 6016 4864 4096 5248 7168 8064 7296 6656 6656 6784 6400 5504 4992 4608 4224 3328 2048 896 0 -640 -1152 -1536 -1408 -896 -512 -896 -1792 -2688 -2944 -2944 -3328 -3584 -3328 -2304 -1664 -1536 -1792 -1920 -1920 -1920 -2048 -2048 -2176 -2304 -2176 -1408 -640 -128 -256 -384 -384 -128 256 640 896 1024 1024 896 1024 1152 1152 1024 768 512 384 256 128 0 -128 -128 -256 -384 -512 -512 -512 -512 -384 -384 -256 -256 -384 -384 -256 -256 -128 -128 -128 -128 -128 -128 -128 -128 -128 0 +7 +0 -1 -1 0 0 -1 -1 -1 0 0 0 0 -1 -1 -1 -1 -1 -2 -2 -1 -1 -1 -2 -2 -2 -2 -2 -2 -2 -1 0 1 1 2 2 2 3 5 7 8 7 8 11 13 13 13 14 17 19 20 20 20 19 14 7 3 0 -1 0 4 11 15 11 2 -3 -1 1 0 -4 -5 -1 7 12 9 2 -4 -5 -5 -8 -13 -17 -18 -18 -17 -12 0 10 7 -5 -14 -9 8 18 12 -11 -33 -38 -23 -6 -7 -24 -40 -41 -36 -40 -51 -59 -62 -69 -81 -87 -77 -54 -36 -34 -45 -55 -50 -28 -1 8 -3 -11 4 35 54 51 39 37 44 51 53 53 49 43 37 26 10 -8 -7 19 44 50 44 56 79 88 73 63 82 116 133 124 116 118 125 120 107 99 94 87 71 46 22 0 -15 -28 -36 -36 -24 -14 -22 -51 -78 -88 -93 -107 -120 -113 -83 -59 -56 -68 -76 -80 -84 -91 -99 -109 -118 -112 -79 -33 -7 -11 -25 -24 -3 26 56 82 96 97 96 106 129 142 126 100 81 72 55 26 1 -4 -10 -36 -81 -116 -130 -134 -145 -155 -147 -131 -136 -176 -221 -229 -196 -159 -149 -156 -155 -143 -131 -131 -133 -118 -78 -31 2 16 30 47 64 71 67 68 81 93 90 74 65 72 86 92 87 77 62 49 42 49 54 35 -4 -37 -45 -39 -42 -58 -71 -72 -74 -89 -104 -104 -87 -73 -71 -77 -80 -78 -75 -71 -62 -46 -24 5 38 66 75 73 73 84 99 104 105 114 126 129 120 112 118 127 124 110 101 100 99 85 66 56 53 46 26 7 -1 -2 -4 -12 -24 -36 -46 -55 -65 -75 -83 -84 -75 -60 -47 -42 -44 -47 -46 -42 -35 -26 -20 -16 -15 -13 -10 -4 3 10 14 13 11 11 15 18 19 17 14 13 11 10 11 11 11 9 7 5 3 2 2 2 2 1 0 -1 -2 -3 -3 -3 -4 -4 -4 -3 -3 -3 -3 -2 -2 -1 -1 -1 -1 -1 0 0 0 0 0 0 0 0 0 +0 -128 -128 0 0 -128 -128 -128 0 0 0 0 -128 -128 -128 -128 -128 -256 -256 -128 -128 -128 -256 -256 -256 -256 -256 -256 -256 -128 0 128 128 256 256 256 384 640 896 1024 896 1024 1408 1664 1664 1664 1792 2176 2432 2560 2560 2560 2432 1792 896 384 0 -128 0 512 1408 1920 1408 256 -384 -128 128 0 -512 -640 -128 896 1536 1152 256 -512 -640 -640 -1024 -1664 -2176 -2304 -2304 -2176 -1536 0 1280 896 -640 -1792 -1152 1024 2304 1536 -1408 -4224 -4864 -2944 -768 -896 -3072 -5120 -5248 -4608 -5120 -6528 -7552 -7936 -8832 -10368 -11136 -9856 -6912 -4608 -4352 -5760 -7040 -6400 -3584 -128 1024 -384 -1408 512 4480 6912 6528 4992 4736 5632 6528 6784 6784 6272 5504 4736 3328 1280 -1024 -896 2432 5632 6400 5632 7168 10112 11264 9344 8064 10496 14848 17024 15872 14848 15104 16000 15360 13696 12672 12032 11136 9088 5888 2816 0 -1920 -3584 -4608 -4608 -3072 -1792 -2816 -6528 -9984 -11264 -11904 -13696 -15360 -14464 -10624 -7552 -7168 -8704 -9728 -10240 -10752 -11648 -12672 -13952 -15104 -14336 -10112 -4224 -896 -1408 -3200 -3072 -384 3328 7168 10496 12288 12416 12288 13568 16512 18176 16128 12800 10368 9216 7040 3328 128 -512 -1280 -4608 -10368 -14848 -16640 -17152 -18560 -19840 -18816 -16768 -17408 -22528 -28288 -29312 -25088 -20352 -19072 -19968 -19840 -18304 -16768 -16768 -17024 -15104 -9984 -3968 256 2048 3840 6016 8192 9088 8576 8704 10368 11904 11520 9472 8320 9216 11008 11776 11136 9856 7936 6272 5376 6272 6912 4480 -512 -4736 -5760 -4992 -5376 -7424 -9088 -9216 -9472 -11392 -13312 -13312 -11136 -9344 -9088 -9856 -10240 -9984 -9600 -9088 -7936 -5888 -3072 640 4864 8448 9600 9344 9344 10752 12672 13312 13440 14592 16128 16512 15360 14336 15104 16256 15872 14080 12928 12800 12672 10880 8448 7168 6784 5888 3328 896 -128 -256 -512 -1536 -3072 -4608 -5888 -7040 -8320 -9600 -10624 -10752 -9600 -7680 -6016 -5376 -5632 -6016 -5888 -5376 -4480 -3328 -2560 -2048 -1920 -1664 -1280 -512 384 1280 1792 1664 1408 1408 1920 2304 2432 2176 1792 1664 1408 1280 1408 1408 1408 1152 896 640 384 256 256 256 256 128 0 -128 -256 -384 -384 -384 -512 -512 -512 -384 -384 -384 -384 -256 -256 -128 -128 -128 -128 -128 0 0 0 0 0 0 0 0 0 +7 +0 0 0 0 0 -1 -1 -1 -1 -1 -1 -1 -1 -1 -2 -2 -2 -3 -3 -2 -2 -2 -3 -3 -3 -4 -4 -5 -6 -7 -7 -5 -3 -1 -1 -2 -2 -1 2 5 8 9 10 10 12 15 18 16 13 11 10 8 4 0 -1 -2 -7 -16 -24 -27 -29 -33 -36 -35 -32 -35 -46 -60 -63 -56 -47 -45 -49 -50 -47 -45 -46 -48 -44 -30 -12 1 6 12 20 28 32 31 32 39 46 46 39 35 39 48 53 51 46 38 31 27 33 37 25 -3 -28 -34 -30 -33 -47 -59 -61 -64 -78 -94 -96 -83 -71 -70 -78 -82 -83 -81 -79 -71 -54 -29 7 48 84 98 97 99 118 142 153 158 174 199 208 198 190 203 225 225 205 192 196 199 175 138 121 118 104 62 18 -1 -3 -9 -32 -65 -99 -130 -163 -195 -234 -266 -278 -255 -209 -168 -156 -168 -183 -186 -175 -151 -117 -89 -76 -73 -67 -50 -19 22 64 89 87 75 80 109 141 150 138 124 115 110 106 113 125 127 117 96 70 47 33 32 41 46 36 11 -16 -39 -54 -63 -75 -98 -120 -127 -117 -111 -123 -137 -123 -86 -57 -55 -60 -46 -13 18 36 52 67 73 74 84 111 132 123 91 68 71 88 96 88 77 70 74 77 62 28 -2 -6 11 22 12 -5 -10 -2 5 4 -8 -25 -46 -59 -56 -44 -40 -50 -56 -43 -17 4 2 -18 -43 -56 -48 -26 -8 -6 -10 -9 -1 7 16 31 46 47 28 11 14 33 44 33 7 -12 -16 -9 -4 -8 -18 -26 -26 -17 -2 14 22 13 -12 -33 -35 -21 -8 -11 -21 -25 -20 -15 -16 -23 -32 -39 -40 -37 -32 -32 -37 -44 -49 -49 -48 -45 -44 -46 -51 -49 -36 -20 -11 -12 -16 -14 -6 3 8 12 14 17 20 20 17 12 8 8 9 11 10 9 7 5 4 4 5 6 7 7 7 7 6 5 4 3 1 -1 -1 -1 0 0 0 1 1 0 0 -1 -1 -1 -1 -1 0 0 -1 -1 -1 -1 -1 -1 0 +0 0 0 0 0 -64 -64 -64 -64 -64 -64 -64 -64 -64 -128 -128 -128 -192 -192 -128 -128 -128 -192 -192 -192 -256 -256 -320 -384 -448 -448 -320 -192 -64 -64 -128 -128 -64 128 320 512 576 640 640 768 960 1152 1024 832 704 640 512 256 0 -64 -128 -448 -1024 -1536 -1728 -1856 -2112 -2304 -2240 -2048 -2240 -2944 -3840 -4032 -3584 -3008 -2880 -3136 -3200 -3008 -2880 -2944 -3072 -2816 -1920 -768 64 384 768 1280 1792 2048 1984 2048 2496 2944 2944 2496 2240 2496 3072 3392 3264 2944 2432 1984 1728 2112 2368 1600 -192 -1792 -2176 -1920 -2112 -3008 -3776 -3904 -4096 -4992 -6016 -6144 -5312 -4544 -4480 -4992 -5248 -5312 -5184 -5056 -4544 -3456 -1856 448 3072 5376 6272 6208 6336 7552 9088 9792 10112 11136 12736 13312 12672 12160 12992 14400 14400 13120 12288 12544 12736 11200 8832 7744 7552 6656 3968 1152 -64 -192 -576 -2048 -4160 -6336 -8320 -10432 -12480 -14976 -17024 -17792 -16320 -13376 -10752 -9984 -10752 -11712 -11904 -11200 -9664 -7488 -5696 -4864 -4672 -4288 -3200 -1216 1408 4096 5696 5568 4800 5120 6976 9024 9600 8832 7936 7360 7040 6784 7232 8000 8128 7488 6144 4480 3008 2112 2048 2624 2944 2304 704 -1024 -2496 -3456 -4032 -4800 -6272 -7680 -8128 -7488 -7104 -7872 -8768 -7872 -5504 -3648 -3520 -3840 -2944 -832 1152 2304 3328 4288 4672 4736 5376 7104 8448 7872 5824 4352 4544 5632 6144 5632 4928 4480 4736 4928 3968 1792 -128 -384 704 1408 768 -320 -640 -128 320 256 -512 -1600 -2944 -3776 -3584 -2816 -2560 -3200 -3584 -2752 -1088 256 128 -1152 -2752 -3584 -3072 -1664 -512 -384 -640 -576 -64 448 1024 1984 2944 3008 1792 704 896 2112 2816 2112 448 -768 -1024 -576 -256 -512 -1152 -1664 -1664 -1088 -128 896 1408 832 -768 -2112 -2240 -1344 -512 -704 -1344 -1600 -1280 -960 -1024 -1472 -2048 -2496 -2560 -2368 -2048 -2048 -2368 -2816 -3136 -3136 -3072 -2880 -2816 -2944 -3264 -3136 -2304 -1280 -704 -768 -1024 -896 -384 192 512 768 896 1088 1280 1280 1088 768 512 512 576 704 640 576 448 320 256 256 320 384 448 448 448 448 384 320 256 192 64 -64 -64 -64 0 0 0 64 64 0 0 -64 -64 -64 -64 -64 0 0 -64 -64 -64 -64 -64 -64 0 +6 +0 -1 -1 -1 -1 -1 -1 -1 -2 -2 -2 -2 -2 -2 -3 -3 -4 -3 -3 -3 -2 -3 -3 -2 -1 0 2 4 4 4 4 6 9 10 9 9 9 9 9 10 12 13 12 10 8 5 4 4 5 6 5 1 -3 -7 -10 -12 -15 -19 -25 -27 -26 -25 -28 -33 -30 -22 -15 -15 -17 -13 -4 5 11 16 22 24 25 30 40 49 47 36 28 30 37 42 39 35 33 36 38 32 14 -1 -4 6 13 7 -3 -6 -2 3 3 -6 -18 -33 -43 -42 -34 -32 -40 -47 -36 -14 3 2 -16 -41 -54 -47 -26 -8 -6 -11 -10 -1 9 19 37 57 60 37 14 19 46 64 48 11 -19 -24 -14 -6 -13 -30 -46 -46 -31 -4 27 45 28 -24 -70 -77 -46 -18 -24 -50 -62 -51 -38 -42 -65 -93 -114 -121 -113 -101 -104 -126 -153 -174 -183 -182 -177 -177 -194 -221 -219 -168 -94 -53 -62 -85 -77 -33 18 55 77 98 122 146 154 135 99 71 72 92 107 109 101 88 70 53 52 69 95 114 126 133 132 123 108 97 76 37 -8 -32 -24 3 27 44 58 67 61 37 -1 -42 -71 -68 -33 11 19 -21 -72 -86 -56 -21 -21 -51 -76 -75 -57 -42 -38 -29 -1 32 51 49 38 35 42 54 53 46 40 42 44 43 39 32 31 36 49 59 50 22 -3 -2 19 32 23 9 10 19 22 24 34 46 39 18 10 27 40 28 1 -9 4 16 8 -2 5 18 14 -7 -18 -6 14 19 2 -21 -34 -26 -10 -1 -6 -14 -8 11 25 23 9 -5 -9 -8 -7 -9 -12 -15 -16 -14 -11 -12 -21 -34 -45 -51 -49 -43 -38 -38 -42 -43 -38 -29 -20 -11 -1 8 11 10 8 12 19 22 18 13 10 14 19 21 19 16 14 15 15 15 13 10 7 5 5 5 5 3 0 -1 -2 -2 -2 -3 -4 -5 -6 -6 -5 -4 -4 -4 -4 -4 -3 -2 -2 -2 -2 -1 -1 -1 -1 -1 -1 -1 -1 -1 0 0 0 0 +0 -128 -128 -128 -128 -128 -128 -128 -256 -256 -256 -256 -256 -256 -384 -384 -512 -384 -384 -384 -256 -384 -384 -256 -128 0 256 512 512 512 512 768 1152 1280 1152 1152 1152 1152 1152 1280 1536 1664 1536 1280 1024 640 512 512 640 768 640 128 -384 -896 -1280 -1536 -1920 -2432 -3200 -3456 -3328 -3200 -3584 -4224 -3840 -2816 -1920 -1920 -2176 -1664 -512 640 1408 2048 2816 3072 3200 3840 5120 6272 6016 4608 3584 3840 4736 5376 4992 4480 4224 4608 4864 4096 1792 -128 -512 768 1664 896 -384 -768 -256 384 384 -768 -2304 -4224 -5504 -5376 -4352 -4096 -5120 -6016 -4608 -1792 384 256 -2048 -5248 -6912 -6016 -3328 -1024 -768 -1408 -1280 -128 1152 2432 4736 7296 7680 4736 1792 2432 5888 8192 6144 1408 -2432 -3072 -1792 -768 -1664 -3840 -5888 -5888 -3968 -512 3456 5760 3584 -3072 -8960 -9856 -5888 -2304 -3072 -6400 -7936 -6528 -4864 -5376 -8320 -11904 -14592 -15488 -14464 -12928 -13312 -16128 -19584 -22272 -23424 -23296 -22656 -22656 -24832 -28288 -28032 -21504 -12032 -6784 -7936 -10880 -9856 -4224 2304 7040 9856 12544 15616 18688 19712 17280 12672 9088 9216 11776 13696 13952 12928 11264 8960 6784 6656 8832 12160 14592 16128 17024 16896 15744 13824 12416 9728 4736 -1024 -4096 -3072 384 3456 5632 7424 8576 7808 4736 -128 -5376 -9088 -8704 -4224 1408 2432 -2688 -9216 -11008 -7168 -2688 -2688 -6528 -9728 -9600 -7296 -5376 -4864 -3712 -128 4096 6528 6272 4864 4480 5376 6912 6784 5888 5120 5376 5632 5504 4992 4096 3968 4608 6272 7552 6400 2816 -384 -256 2432 4096 2944 1152 1280 2432 2816 3072 4352 5888 4992 2304 1280 3456 5120 3584 128 -1152 512 2048 1024 -256 640 2304 1792 -896 -2304 -768 1792 2432 256 -2688 -4352 -3328 -1280 -128 -768 -1792 -1024 1408 3200 2944 1152 -640 -1152 -1024 -896 -1152 -1536 -1920 -2048 -1792 -1408 -1536 -2688 -4352 -5760 -6528 -6272 -5504 -4864 -4864 -5376 -5504 -4864 -3712 -2560 -1408 -128 1024 1408 1280 1024 1536 2432 2816 2304 1664 1280 1792 2432 2688 2432 2048 1792 1920 1920 1920 1664 1280 896 640 640 640 640 384 0 -128 -256 -256 -256 -384 -512 -640 -768 -768 -640 -512 -512 -512 -512 -512 -384 -256 -256 -256 -256 -128 -128 -128 -128 -128 -128 -128 -128 -128 0 0 0 0 +7 +0 -1 -1 -1 -1 -1 -1 -1 -1 -1 -2 -2 -2 -3 -3 -3 -4 -5 -5 -4 -3 -2 -2 -3 -3 -2 0 2 3 5 6 8 9 9 7 5 5 7 9 10 9 9 7 6 6 8 12 15 17 19 19 19 17 16 13 6 -2 -7 -5 0 5 9 13 16 15 9 -1 -11 -20 -20 -10 3 6 -7 -24 -29 -20 -8 -8 -20 -30 -30 -23 -18 -17 -13 -1 14 24 24 19 18 22 29 29 26 23 24 26 27 25 21 20 24 34 42 36 16 -2 -2 15 26 19 8 9 17 20 22 33 45 40 19 11 29 45 32 1 -11 5 20 10 -2 6 24 19 -10 -26 -9 22 30 3 -35 -56 -45 -17 -2 -11 -27 -16 22 53 49 20 -10 -21 -19 -16 -20 -29 -38 -41 -38 -31 -35 -62 -103 -141 -162 -160 -144 -131 -136 -156 -165 -148 -115 -82 -47 -3 38 56 50 46 68 107 128 111 81 69 94 133 154 145 127 120 128 141 144 133 109 83 64 63 71 66 41 10 -11 -18 -20 -29 -48 -75 -110 -142 -153 -143 -124 -119 -128 -143 -147 -131 -105 -83 -79 -90 -95 -74 -39 -19 -25 -45 -52 -36 -6 23 33 33 34 40 42 33 26 37 60 70 59 55 77 108 111 83 60 65 79 75 52 31 18 6 -1 3 6 -11 -51 -78 -72 -50 -46 -59 -66 -57 -49 -62 -89 -100 -81 -40 -9 -8 -30 -42 -24 9 23 9 -6 7 41 62 56 39 30 28 24 16 14 16 14 1 -16 -29 -31 -23 -8 3 4 -7 -20 -27 -28 -27 -27 -27 -25 -22 -22 -23 -25 -26 -22 -14 -4 1 -2 -8 -13 -12 -4 5 11 12 10 9 12 14 14 13 12 13 13 11 11 14 18 20 19 16 14 11 9 7 5 4 5 6 6 5 3 1 -1 -2 -3 -3 -5 -6 -8 -10 -11 -11 -10 -9 -7 -6 -7 -8 -8 -7 -5 -3 -1 -1 -1 -1 0 1 1 0 0 0 0 0 0 0 0 0 0 0 0 +0 -128 -128 -128 -128 -128 -128 -128 -128 -128 -256 -256 -256 -384 -384 -384 -512 -640 -640 -512 -384 -256 -256 -384 -384 -256 0 256 384 640 768 1024 1152 1152 896 640 640 896 1152 1280 1152 1152 896 768 768 1024 1536 1920 2176 2432 2432 2432 2176 2048 1664 768 -256 -896 -640 0 640 1152 1664 2048 1920 1152 -128 -1408 -2560 -2560 -1280 384 768 -896 -3072 -3712 -2560 -1024 -1024 -2560 -3840 -3840 -2944 -2304 -2176 -1664 -128 1792 3072 3072 2432 2304 2816 3712 3712 3328 2944 3072 3328 3456 3200 2688 2560 3072 4352 5376 4608 2048 -256 -256 1920 3328 2432 1024 1152 2176 2560 2816 4224 5760 5120 2432 1408 3712 5760 4096 128 -1408 640 2560 1280 -256 768 3072 2432 -1280 -3328 -1152 2816 3840 384 -4480 -7168 -5760 -2176 -256 -1408 -3456 -2048 2816 6784 6272 2560 -1280 -2688 -2432 -2048 -2560 -3712 -4864 -5248 -4864 -3968 -4480 -7936 -13184 -18048 -20736 -20480 -18432 -16768 -17408 -19968 -21120 -18944 -14720 -10496 -6016 -384 4864 7168 6400 5888 8704 13696 16384 14208 10368 8832 12032 17024 19712 18560 16256 15360 16384 18048 18432 17024 13952 10624 8192 8064 9088 8448 5248 1280 -1408 -2304 -2560 -3712 -6144 -9600 -14080 -18176 -19584 -18304 -15872 -15232 -16384 -18304 -18816 -16768 -13440 -10624 -10112 -11520 -12160 -9472 -4992 -2432 -3200 -5760 -6656 -4608 -768 2944 4224 4224 4352 5120 5376 4224 3328 4736 7680 8960 7552 7040 9856 13824 14208 10624 7680 8320 10112 9600 6656 3968 2304 768 -128 384 768 -1408 -6528 -9984 -9216 -6400 -5888 -7552 -8448 -7296 -6272 -7936 -11392 -12800 -10368 -5120 -1152 -1024 -3840 -5376 -3072 1152 2944 1152 -768 896 5248 7936 7168 4992 3840 3584 3072 2048 1792 2048 1792 128 -2048 -3712 -3968 -2944 -1024 384 512 -896 -2560 -3456 -3584 -3456 -3456 -3456 -3200 -2816 -2816 -2944 -3200 -3328 -2816 -1792 -512 128 -256 -1024 -1664 -1536 -512 640 1408 1536 1280 1152 1536 1792 1792 1664 1536 1664 1664 1408 1408 1792 2304 2560 2432 2048 1792 1408 1152 896 640 512 640 768 768 640 384 128 -128 -256 -384 -384 -640 -768 -1024 -1280 -1408 -1408 -1280 -1152 -896 -768 -896 -1024 -1024 -896 -640 -384 -128 -128 -128 -128 0 128 128 0 0 0 0 0 0 0 0 0 0 0 0 +7 +0 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -2 -2 -2 -2 -2 -2 -1 -1 0 1 1 1 2 3 5 4 3 3 5 7 9 9 8 8 9 11 12 11 10 8 6 6 8 7 5 1 -2 -3 -3 -5 -8 -13 -19 -25 -28 -27 -24 -24 -27 -31 -33 -30 -25 -21 -20 -24 -26 -21 -12 -6 -8 -14 -17 -12 -2 7 12 12 12 15 16 13 11 15 26 31 27 26 37 54 57 44 32 36 45 43 31 19 11 4 -1 2 4 -8 -36 -57 -54 -38 -36 -48 -54 -48 -42 -55 -81 -92 -76 -39 -9 -8 -31 -45 -26 10 27 11 -8 9 51 79 73 52 42 40 34 24 21 25 22 2 -26 -48 -53 -40 -15 6 8 -13 -40 -54 -57 -58 -60 -60 -57 -53 -54 -58 -64 -68 -60 -37 -11 4 -4 -25 -40 -37 -14 17 41 46 39 38 49 61 64 60 58 63 65 59 61 79 105 120 118 106 94 82 68 52 40 38 48 58 61 53 36 15 -5 -19 -28 -39 -58 -86 -119 -153 -181 -200 -197 -174 -150 -148 -175 -214 -236 -219 -160 -87 -42 -39 -46 -25 34 88 101 79 60 69 109 154 184 192 180 174 191 225 246 240 213 184 169 170 184 198 189 152 107 79 70 58 35 9 -9 -27 -52 -73 -74 -61 -47 -50 -61 -66 -61 -53 -55 -69 -81 -76 -51 -35 -43 -68 -80 -67 -40 -17 -2 6 2 -19 -43 -46 -17 22 41 28 6 3 20 37 32 16 5 6 6 -7 -31 -51 -61 -63 -65 -70 -78 -84 -85 -79 -68 -61 -55 -46 -32 -22 -19 -21 -20 -13 -3 6 14 16 15 11 10 13 16 18 18 18 17 15 15 18 19 13 5 3 8 16 18 16 14 13 14 14 14 15 17 17 15 14 14 14 14 12 8 4 2 1 -1 -3 -5 -6 -6 -7 -10 -11 -10 -8 -6 -6 -7 -6 -5 -5 -5 -5 -4 -3 -3 -3 -2 -2 -1 -1 -1 -1 -1 -1 -1 0 0 0 0 0 0 0 0 +0 -128 -128 -128 -128 -128 -128 -128 -128 -128 -128 -256 -256 -256 -256 -256 -256 -128 -128 0 128 128 128 256 384 640 512 384 384 640 896 1152 1152 1024 1024 1152 1408 1536 1408 1280 1024 768 768 1024 896 640 128 -256 -384 -384 -640 -1024 -1664 -2432 -3200 -3584 -3456 -3072 -3072 -3456 -3968 -4224 -3840 -3200 -2688 -2560 -3072 -3328 -2688 -1536 -768 -1024 -1792 -2176 -1536 -256 896 1536 1536 1536 1920 2048 1664 1408 1920 3328 3968 3456 3328 4736 6912 7296 5632 4096 4608 5760 5504 3968 2432 1408 512 -128 256 512 -1024 -4608 -7296 -6912 -4864 -4608 -6144 -6912 -6144 -5376 -7040 -10368 -11776 -9728 -4992 -1152 -1024 -3968 -5760 -3328 1280 3456 1408 -1024 1152 6528 10112 9344 6656 5376 5120 4352 3072 2688 3200 2816 256 -3328 -6144 -6784 -5120 -1920 768 1024 -1664 -5120 -6912 -7296 -7424 -7680 -7680 -7296 -6784 -6912 -7424 -8192 -8704 -7680 -4736 -1408 512 -512 -3200 -5120 -4736 -1792 2176 5248 5888 4992 4864 6272 7808 8192 7680 7424 8064 8320 7552 7808 10112 13440 15360 15104 13568 12032 10496 8704 6656 5120 4864 6144 7424 7808 6784 4608 1920 -640 -2432 -3584 -4992 -7424 -11008 -15232 -19584 -23168 -25600 -25216 -22272 -19200 -18944 -22400 -27392 -30208 -28032 -20480 -11136 -5376 -4992 -5888 -3200 4352 11264 12928 10112 7680 8832 13952 19712 23552 24576 23040 22272 24448 28800 31488 30720 27264 23552 21632 21760 23552 25344 24192 19456 13696 10112 8960 7424 4480 1152 -1152 -3456 -6656 -9344 -9472 -7808 -6016 -6400 -7808 -8448 -7808 -6784 -7040 -8832 -10368 -9728 -6528 -4480 -5504 -8704 -10240 -8576 -5120 -2176 -256 768 256 -2432 -5504 -5888 -2176 2816 5248 3584 768 384 2560 4736 4096 2048 640 768 768 -896 -3968 -6528 -7808 -8064 -8320 -8960 -9984 -10752 -10880 -10112 -8704 -7808 -7040 -5888 -4096 -2816 -2432 -2688 -2560 -1664 -384 768 1792 2048 1920 1408 1280 1664 2048 2304 2304 2304 2176 1920 1920 2304 2432 1664 640 384 1024 2048 2304 2048 1792 1664 1792 1792 1792 1920 2176 2176 1920 1792 1792 1792 1792 1536 1024 512 256 128 -128 -384 -640 -768 -768 -896 -1280 -1408 -1280 -1024 -768 -768 -896 -768 -640 -640 -640 -640 -512 -384 -384 -384 -256 -256 -128 -128 -128 -128 -128 -128 -128 0 0 0 0 0 0 0 0 +7 +0 -1 -1 -1 0 -1 -1 -1 -1 -1 0 0 0 0 0 0 1 1 1 1 1 1 1 2 2 4 5 5 5 5 4 4 3 2 2 3 4 5 4 3 1 -1 -3 -4 -5 -8 -11 -16 -22 -27 -30 -31 -29 -26 -26 -32 -40 -46 -44 -34 -19 -10 -9 -11 -6 8 22 27 22 17 20 32 47 58 63 60 60 68 82 92 93 84 75 71 73 81 90 88 72 52 39 35 30 19 5 -5 -16 -31 -44 -46 -39 -31 -34 -42 -47 -44 -39 -41 -53 -64 -61 -42 -29 -37 -60 -72 -61 -37 -17 -2 6 2 -20 -47 -51 -19 25 48 34 8 4 27 49 44 23 7 9 10 -11 -48 -82 -99 -106 -111 -123 -140 -156 -161 -153 -136 -125 -114 -98 -71 -50 -44 -50 -49 -33 -7 18 38 46 43 35 33 43 54 60 64 66 63 58 61 74 79 57 24 13 39 77 93 85 74 72 80 86 92 102 114 118 114 107 109 117 121 107 76 45 29 18 -2 -35 -61 -67 -70 -99 -144 -174 -163 -133 -119 -128 -142 -143 -135 -135 -146 -147 -134 -118 -112 -114 -109 -87 -55 -31 -26 -36 -41 -30 -7 11 18 29 56 87 92 61 25 19 47 81 97 100 107 118 124 114 104 105 117 127 118 91 55 26 10 8 19 35 39 22 -14 -46 -54 -37 -14 -3 -7 -12 -11 -10 -19 -39 -58 -59 -45 -31 -28 -34 -38 -35 -33 -31 -28 -22 -19 -22 -26 -27 -23 -19 -19 -26 -40 -59 -72 -70 -57 -49 -55 -64 -63 -52 -45 -47 -48 -42 -29 -13 4 19 25 22 22 33 50 58 56 56 64 68 57 38 27 27 31 31 29 29 26 19 15 18 24 27 25 23 19 12 7 8 14 13 3 -3 1 9 7 -4 -11 -5 2 0 -8 -11 -7 -4 -5 -8 -6 -3 -4 -7 -7 -4 -2 -3 -5 -5 -4 -3 -3 -3 -2 -1 0 0 -1 0 0 1 0 0 0 0 0 0 0 0 0 0 0 0 -1 -1 -1 -1 0 +0 -128 -128 -128 0 -128 -128 -128 -128 -128 0 0 0 0 0 0 128 128 128 128 128 128 128 256 256 512 640 640 640 640 512 512 384 256 256 384 512 640 512 384 128 -128 -384 -512 -640 -1024 -1408 -2048 -2816 -3456 -3840 -3968 -3712 -3328 -3328 -4096 -5120 -5888 -5632 -4352 -2432 -1280 -1152 -1408 -768 1024 2816 3456 2816 2176 2560 4096 6016 7424 8064 7680 7680 8704 10496 11776 11904 10752 9600 9088 9344 10368 11520 11264 9216 6656 4992 4480 3840 2432 640 -640 -2048 -3968 -5632 -5888 -4992 -3968 -4352 -5376 -6016 -5632 -4992 -5248 -6784 -8192 -7808 -5376 -3712 -4736 -7680 -9216 -7808 -4736 -2176 -256 768 256 -2560 -6016 -6528 -2432 3200 6144 4352 1024 512 3456 6272 5632 2944 896 1152 1280 -1408 -6144 -10496 -12672 -13568 -14208 -15744 -17920 -19968 -20608 -19584 -17408 -16000 -14592 -12544 -9088 -6400 -5632 -6400 -6272 -4224 -896 2304 4864 5888 5504 4480 4224 5504 6912 7680 8192 8448 8064 7424 7808 9472 10112 7296 3072 1664 4992 9856 11904 10880 9472 9216 10240 11008 11776 13056 14592 15104 14592 13696 13952 14976 15488 13696 9728 5760 3712 2304 -256 -4480 -7808 -8576 -8960 -12672 -18432 -22272 -20864 -17024 -15232 -16384 -18176 -18304 -17280 -17280 -18688 -18816 -17152 -15104 -14336 -14592 -13952 -11136 -7040 -3968 -3328 -4608 -5248 -3840 -896 1408 2304 3712 7168 11136 11776 7808 3200 2432 6016 10368 12416 12800 13696 15104 15872 14592 13312 13440 14976 16256 15104 11648 7040 3328 1280 1024 2432 4480 4992 2816 -1792 -5888 -6912 -4736 -1792 -384 -896 -1536 -1408 -1280 -2432 -4992 -7424 -7552 -5760 -3968 -3584 -4352 -4864 -4480 -4224 -3968 -3584 -2816 -2432 -2816 -3328 -3456 -2944 -2432 -2432 -3328 -5120 -7552 -9216 -8960 -7296 -6272 -7040 -8192 -8064 -6656 -5760 -6016 -6144 -5376 -3712 -1664 512 2432 3200 2816 2816 4224 6400 7424 7168 7168 8192 8704 7296 4864 3456 3456 3968 3968 3712 3712 3328 2432 1920 2304 3072 3456 3200 2944 2432 1536 896 1024 1792 1664 384 -384 128 1152 896 -512 -1408 -640 256 0 -1024 -1408 -896 -512 -640 -1024 -768 -384 -512 -896 -896 -512 -256 -384 -640 -640 -512 -384 -384 -384 -256 -128 0 0 -128 0 0 128 0 0 0 0 0 0 0 0 0 0 0 0 -128 -128 -128 -128 0 +7 +0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 1 0 0 0 0 2 2 2 2 2 3 3 4 5 6 6 6 6 7 8 9 8 6 4 2 1 -1 -4 -7 -8 -9 -13 -20 -25 -24 -20 -19 -21 -24 -25 -25 -26 -29 -30 -28 -26 -25 -26 -26 -22 -14 -8 -7 -10 -12 -9 -2 3 5 9 19 30 33 22 9 7 18 33 40 43 47 53 57 54 50 52 60 67 63 50 31 15 6 5 12 22 26 15 -9 -32 -39 -28 -11 -2 -6 -10 -9 -8 -17 -35 -52 -55 -42 -30 -28 -34 -39 -38 -36 -34 -32 -25 -22 -27 -32 -34 -29 -25 -25 -36 -57 -86 -108 -106 -89 -78 -90 -108 -109 -92 -81 -86 -90 -81 -57 -25 8 41 56 50 51 79 123 147 146 148 174 191 165 114 82 86 101 103 101 103 96 74 59 72 101 116 113 103 88 60 36 45 75 74 22 -17 9 60 48 -27 -74 -35 20 6 -61 -92 -61 -32 -50 -76 -62 -33 -42 -83 -95 -59 -25 -36 -71 -86 -73 -62 -68 -67 -43 -8 6 0 -5 7 34 53 41 14 8 35 64 58 30 19 46 70 52 4 -29 -33 -36 -62 -80 -57 0 37 20 -36 -87 -108 -105 -97 -86 -69 -47 -36 -45 -64 -68 -48 -21 -10 -16 -10 15 45 49 26 2 -6 1 8 8 7 8 14 19 16 4 -7 -4 17 39 42 29 20 24 32 33 25 19 18 15 12 14 22 27 16 3 -2 4 7 3 0 8 20 20 8 -1 4 14 18 14 7 4 4 2 1 -2 -8 -15 -20 -19 -15 -14 -17 -19 -18 -17 -18 -21 -24 -24 -22 -21 -20 -19 -15 -11 -11 -12 -12 -10 -8 -8 -9 -9 -9 -11 -13 -9 -1 4 3 1 3 7 7 4 2 5 10 12 9 7 8 9 9 7 5 4 3 3 2 1 0 -1 -2 -1 0 0 0 -1 -2 -2 -2 -2 -2 -2 -2 -1 -1 -1 -1 -1 -1 -1 -1 -1 0 0 0 0 +0 0 0 0 0 0 0 0 0 0 0 0 0 0 128 128 0 0 0 0 256 256 256 256 256 384 384 512 640 768 768 768 768 896 1024 1152 1024 768 512 256 128 -128 -512 -896 -1024 -1152 -1664 -2560 -3200 -3072 -2560 -2432 -2688 -3072 -3200 -3200 -3328 -3712 -3840 -3584 -3328 -3200 -3328 -3328 -2816 -1792 -1024 -896 -1280 -1536 -1152 -256 384 640 1152 2432 3840 4224 2816 1152 896 2304 4224 5120 5504 6016 6784 7296 6912 6400 6656 7680 8576 8064 6400 3968 1920 768 640 1536 2816 3328 1920 -1152 -4096 -4992 -3584 -1408 -256 -768 -1280 -1152 -1024 -2176 -4480 -6656 -7040 -5376 -3840 -3584 -4352 -4992 -4864 -4608 -4352 -4096 -3200 -2816 -3456 -4096 -4352 -3712 -3200 -3200 -4608 -7296 -11008 -13824 -13568 -11392 -9984 -11520 -13824 -13952 -11776 -10368 -11008 -11520 -10368 -7296 -3200 1024 5248 7168 6400 6528 10112 15744 18816 18688 18944 22272 24448 21120 14592 10496 11008 12928 13184 12928 13184 12288 9472 7552 9216 12928 14848 14464 13184 11264 7680 4608 5760 9600 9472 2816 -2176 1152 7680 6144 -3456 -9472 -4480 2560 768 -7808 -11776 -7808 -4096 -6400 -9728 -7936 -4224 -5376 -10624 -12160 -7552 -3200 -4608 -9088 -11008 -9344 -7936 -8704 -8576 -5504 -1024 768 0 -640 896 4352 6784 5248 1792 1024 4480 8192 7424 3840 2432 5888 8960 6656 512 -3712 -4224 -4608 -7936 -10240 -7296 0 4736 2560 -4608 -11136 -13824 -13440 -12416 -11008 -8832 -6016 -4608 -5760 -8192 -8704 -6144 -2688 -1280 -2048 -1280 1920 5760 6272 3328 256 -768 128 1024 1024 896 1024 1792 2432 2048 512 -896 -512 2176 4992 5376 3712 2560 3072 4096 4224 3200 2432 2304 1920 1536 1792 2816 3456 2048 384 -256 512 896 384 0 1024 2560 2560 1024 -128 512 1792 2304 1792 896 512 512 256 128 -256 -1024 -1920 -2560 -2432 -1920 -1792 -2176 -2432 -2304 -2176 -2304 -2688 -3072 -3072 -2816 -2688 -2560 -2432 -1920 -1408 -1408 -1536 -1536 -1280 -1024 -1024 -1152 -1152 -1152 -1408 -1664 -1152 -128 512 384 128 384 896 896 512 256 640 1280 1536 1152 896 1024 1152 1152 896 640 512 384 384 256 128 0 -128 -256 -128 0 0 0 -128 -256 -256 -256 -256 -256 -256 -256 -128 -128 -128 -128 -128 -128 -128 -128 -128 0 0 0 0 +7 +0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 2 2 2 2 1 1 1 2 2 0 -1 0 3 2 -2 -5 -3 1 0 -5 -8 -6 -3 -5 -8 -7 -4 -5 -10 -12 -8 -4 -5 -11 -13 -12 -10 -12 -12 -8 -2 1 0 -2 1 7 12 9 3 2 9 17 16 8 5 13 21 16 1 -10 -12 -13 -23 -31 -22 0 15 8 -16 -38 -49 -49 -46 -42 -35 -24 -19 -24 -36 -39 -28 -13 -6 -10 -7 10 30 33 18 1 -4 1 6 6 5 7 12 16 14 4 -7 -4 17 39 42 30 21 26 36 37 29 22 22 19 16 19 29 36 23 4 -3 6 11 4 0 13 34 35 15 -1 7 27 36 28 16 9 8 5 2 -4 -18 -37 -49 -47 -39 -38 -46 -53 -52 -51 -55 -68 -78 -82 -77 -74 -74 -70 -57 -45 -43 -52 -54 -45 -35 -38 -43 -44 -48 -63 -75 -54 -5 30 26 9 22 56 64 39 21 52 103 124 105 89 103 123 120 97 76 67 64 60 55 38 9 -22 -35 -24 0 11 3 -20 -48 -72 -86 -91 -92 -91 -91 -96 -102 -103 -92 -74 -59 -52 -42 -24 0 19 31 45 61 78 93 110 125 122 97 64 42 43 55 59 50 36 31 33 32 23 16 19 23 8 -22 -37 -17 15 15 -19 -47 -36 -6 -3 -38 -70 -67 -42 -32 -40 -47 -35 -19 -13 -16 -10 6 17 13 4 12 37 64 81 80 71 62 58 56 50 42 38 38 39 34 25 20 21 20 12 2 -3 -1 -2 -9 -16 -16 -11 -7 -7 -8 -9 -8 -5 1 6 5 0 -5 -4 -1 -1 -3 -4 -1 -1 -7 -17 -26 -32 -34 -33 -29 -28 -31 -35 -38 -38 -34 -29 -23 -20 -21 -22 -21 -19 -17 -17 -16 -13 -9 -6 -5 -4 -3 -2 -2 -2 -2 -3 -3 0 3 4 4 4 4 5 5 4 4 4 4 4 3 2 2 2 1 1 1 1 1 0 0 0 0 0 0 0 0 0 +0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 256 512 512 512 512 256 256 256 512 512 0 -256 0 768 512 -512 -1280 -768 256 0 -1280 -2048 -1536 -768 -1280 -2048 -1792 -1024 -1280 -2560 -3072 -2048 -1024 -1280 -2816 -3328 -3072 -2560 -3072 -3072 -2048 -512 256 0 -512 256 1792 3072 2304 768 512 2304 4352 4096 2048 1280 3328 5376 4096 256 -2560 -3072 -3328 -5888 -7936 -5632 0 3840 2048 -4096 -9728 -12544 -12544 -11776 -10752 -8960 -6144 -4864 -6144 -9216 -9984 -7168 -3328 -1536 -2560 -1792 2560 7680 8448 4608 256 -1024 256 1536 1536 1280 1792 3072 4096 3584 1024 -1792 -1024 4352 9984 10752 7680 5376 6656 9216 9472 7424 5632 5632 4864 4096 4864 7424 9216 5888 1024 -768 1536 2816 1024 0 3328 8704 8960 3840 -256 1792 6912 9216 7168 4096 2304 2048 1280 512 -1024 -4608 -9472 -12544 -12032 -9984 -9728 -11776 -13568 -13312 -13056 -14080 -17408 -19968 -20992 -19712 -18944 -18944 -17920 -14592 -11520 -11008 -13312 -13824 -11520 -8960 -9728 -11008 -11264 -12288 -16128 -19200 -13824 -1280 7680 6656 2304 5632 14336 16384 9984 5376 13312 26368 31744 26880 22784 26368 31488 30720 24832 19456 17152 16384 15360 14080 9728 2304 -5632 -8960 -6144 0 2816 768 -5120 -12288 -18432 -22016 -23296 -23552 -23296 -23296 -24576 -26112 -26368 -23552 -18944 -15104 -13312 -10752 -6144 0 4864 7936 11520 15616 19968 23808 28160 32000 31232 24832 16384 10752 11008 14080 15104 12800 9216 7936 8448 8192 5888 4096 4864 5888 2048 -5632 -9472 -4352 3840 3840 -4864 -12032 -9216 -1536 -768 -9728 -17920 -17152 -10752 -8192 -10240 -12032 -8960 -4864 -3328 -4096 -2560 1536 4352 3328 1024 3072 9472 16384 20736 20480 18176 15872 14848 14336 12800 10752 9728 9728 9984 8704 6400 5120 5376 5120 3072 512 -768 -256 -512 -2304 -4096 -4096 -2816 -1792 -1792 -2048 -2304 -2048 -1280 256 1536 1280 0 -1280 -1024 -256 -256 -768 -1024 -256 -256 -1792 -4352 -6656 -8192 -8704 -8448 -7424 -7168 -7936 -8960 -9728 -9728 -8704 -7424 -5888 -5120 -5376 -5632 -5376 -4864 -4352 -4352 -4096 -3328 -2304 -1536 -1280 -1024 -768 -512 -512 -512 -512 -768 -768 0 768 1024 1024 1024 1024 1280 1280 1024 1024 1024 1024 1024 768 512 512 512 256 256 256 256 256 0 0 0 0 0 0 0 0 0 +8 +0 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -2 -2 -1 -2 -2 -2 -2 -3 -4 -3 -1 1 1 0 1 3 4 3 1 4 9 11 10 9 11 13 14 11 9 9 8 8 8 6 1 -4 -7 -5 0 2 0 -5 -11 -16 -20 -22 -23 -23 -24 -26 -29 -30 -27 -23 -19 -17 -14 -8 0 6 11 17 23 31 38 46 53 54 44 29 20 21 27 30 26 19 17 19 18 13 9 12 15 5 -15 -25 -12 11 11 -14 -36 -28 -5 -3 -32 -60 -59 -38 -29 -38 -46 -35 -19 -14 -17 -11 7 19 15 5 15 46 82 105 107 96 87 84 82 76 65 60 62 65 58 43 36 38 38 24 4 -5 -1 -4 -18 -35 -36 -24 -16 -17 -20 -21 -20 -11 4 18 16 0 -14 -12 -2 -3 -11 -12 -2 -3 -25 -66 -108 -139 -153 -150 -138 -139 -157 -185 -208 -214 -202 -175 -143 -130 -140 -156 -155 -143 -138 -143 -139 -112 -81 -60 -53 -43 -30 -21 -17 -16 -24 -37 -34 4 61 95 97 100 123 148 148 137 150 186 202 188 168 171 185 180 158 154 176 195 192 176 165 150 110 65 51 78 108 99 54 14 6 15 15 0 -17 -29 -37 -42 -44 -40 -28 -12 1 2 -10 -27 -39 -48 -67 -87 -94 -88 -89 -108 -129 -127 -109 -103 -113 -116 -93 -62 -57 -78 -97 -93 -72 -51 -39 -37 -41 -43 -32 -4 25 36 28 19 22 37 53 63 70 79 84 82 72 60 54 50 40 29 26 31 31 17 -4 -15 -11 0 5 4 0 -6 -17 -32 -44 -49 -48 -42 -35 -35 -43 -51 -52 -48 -44 -43 -40 -31 -21 -14 -10 -5 0 2 2 4 9 13 16 18 22 25 26 25 22 16 9 6 9 13 12 8 6 8 11 12 10 7 5 4 3 2 1 0 0 -1 -1 -1 -1 -2 -1 -1 -1 -1 -1 0 0 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 +0 -128 -128 -128 -128 -128 -128 -128 -128 -128 -128 -128 -128 -128 -128 -128 -128 -128 -256 -256 -128 -256 -256 -256 -256 -384 -512 -384 -128 128 128 0 128 384 512 384 128 512 1152 1408 1280 1152 1408 1664 1792 1408 1152 1152 1024 1024 1024 768 128 -512 -896 -640 0 256 0 -640 -1408 -2048 -2560 -2816 -2944 -2944 -3072 -3328 -3712 -3840 -3456 -2944 -2432 -2176 -1792 -1024 0 768 1408 2176 2944 3968 4864 5888 6784 6912 5632 3712 2560 2688 3456 3840 3328 2432 2176 2432 2304 1664 1152 1536 1920 640 -1920 -3200 -1536 1408 1408 -1792 -4608 -3584 -640 -384 -4096 -7680 -7552 -4864 -3712 -4864 -5888 -4480 -2432 -1792 -2176 -1408 896 2432 1920 640 1920 5888 10496 13440 13696 12288 11136 10752 10496 9728 8320 7680 7936 8320 7424 5504 4608 4864 4864 3072 512 -640 -128 -512 -2304 -4480 -4608 -3072 -2048 -2176 -2560 -2688 -2560 -1408 512 2304 2048 0 -1792 -1536 -256 -384 -1408 -1536 -256 -384 -3200 -8448 -13824 -17792 -19584 -19200 -17664 -17792 -20096 -23680 -26624 -27392 -25856 -22400 -18304 -16640 -17920 -19968 -19840 -18304 -17664 -18304 -17792 -14336 -10368 -7680 -6784 -5504 -3840 -2688 -2176 -2048 -3072 -4736 -4352 512 7808 12160 12416 12800 15744 18944 18944 17536 19200 23808 25856 24064 21504 21888 23680 23040 20224 19712 22528 24960 24576 22528 21120 19200 14080 8320 6528 9984 13824 12672 6912 1792 768 1920 1920 0 -2176 -3712 -4736 -5376 -5632 -5120 -3584 -1536 128 256 -1280 -3456 -4992 -6144 -8576 -11136 -12032 -11264 -11392 -13824 -16512 -16256 -13952 -13184 -14464 -14848 -11904 -7936 -7296 -9984 -12416 -11904 -9216 -6528 -4992 -4736 -5248 -5504 -4096 -512 3200 4608 3584 2432 2816 4736 6784 8064 8960 10112 10752 10496 9216 7680 6912 6400 5120 3712 3328 3968 3968 2176 -512 -1920 -1408 0 640 512 0 -768 -2176 -4096 -5632 -6272 -6144 -5376 -4480 -4480 -5504 -6528 -6656 -6144 -5632 -5504 -5120 -3968 -2688 -1792 -1280 -640 0 256 256 512 1152 1664 2048 2304 2816 3200 3328 3200 2816 2048 1152 768 1152 1664 1536 1024 768 1024 1408 1536 1280 896 640 512 384 256 128 0 0 -128 -128 -128 -128 -256 -128 -128 -128 -128 -128 0 0 128 128 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 +7 +0 -1 0 0 0 0 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -2 -3 -4 -4 -4 -4 -5 -7 -8 -9 -9 -9 -8 -7 -8 -10 -10 -10 -10 -11 -12 -10 -8 -6 -6 -5 -4 -3 -2 -2 -4 -5 -5 0 9 14 15 16 21 26 27 26 30 38 43 41 38 40 45 45 41 41 48 55 56 53 51 47 36 22 17 28 39 37 20 5 2 6 6 0 -8 -14 -18 -21 -22 -21 -15 -7 0 1 -6 -16 -24 -30 -43 -57 -63 -60 -63 -78 -95 -96 -84 -81 -91 -95 -78 -53 -50 -71 -90 -88 -69 -50 -39 -38 -43 -47 -35 -4 29 43 35 23 29 49 71 86 98 113 124 123 110 94 86 82 68 50 46 57 58 32 -8 -30 -23 0 12 10 1 -13 -40 -77 -109 -127 -127 -113 -98 -101 -126 -156 -163 -153 -143 -145 -139 -112 -78 -53 -38 -19 2 11 12 20 43 66 83 99 123 145 157 159 144 110 66 47 70 103 105 71 54 74 115 128 108 79 64 55 45 34 23 12 1 -5 -7 -11 -21 -26 -22 -15 -17 -25 -20 3 36 60 62 53 46 49 57 52 27 4 1 15 25 20 16 24 36 30 3 -24 -32 -23 -15 -13 -19 -29 -48 -70 -92 -108 -117 -114 -100 -79 -64 -59 -51 -40 -32 -35 -44 -43 -26 -8 0 3 12 29 46 56 62 65 63 56 56 67 73 61 39 27 35 45 40 24 16 24 33 29 15 3 -4 -10 -22 -31 -33 -35 -45 -60 -64 -55 -43 -42 -48 -50 -42 -30 -24 -23 -26 -30 -39 -51 -56 -50 -37 -26 -20 -19 -19 -15 -6 5 12 13 14 20 26 28 24 21 23 25 21 11 2 -1 -2 -2 -5 -8 -11 -10 -7 -2 0 -3 -7 -7 -3 -1 -2 -4 -4 1 5 7 6 5 5 4 3 3 4 5 5 4 4 6 8 9 7 5 4 5 6 6 4 4 3 3 2 1 0 0 0 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 0 +0 -128 0 0 0 0 -128 -128 -128 -128 -128 -128 -128 -128 -128 -128 -256 -384 -512 -512 -512 -512 -640 -896 -1024 -1152 -1152 -1152 -1024 -896 -1024 -1280 -1280 -1280 -1280 -1408 -1536 -1280 -1024 -768 -768 -640 -512 -384 -256 -256 -512 -640 -640 0 1152 1792 1920 2048 2688 3328 3456 3328 3840 4864 5504 5248 4864 5120 5760 5760 5248 5248 6144 7040 7168 6784 6528 6016 4608 2816 2176 3584 4992 4736 2560 640 256 768 768 0 -1024 -1792 -2304 -2688 -2816 -2688 -1920 -896 0 128 -768 -2048 -3072 -3840 -5504 -7296 -8064 -7680 -8064 -9984 -12160 -12288 -10752 -10368 -11648 -12160 -9984 -6784 -6400 -9088 -11520 -11264 -8832 -6400 -4992 -4864 -5504 -6016 -4480 -512 3712 5504 4480 2944 3712 6272 9088 11008 12544 14464 15872 15744 14080 12032 11008 10496 8704 6400 5888 7296 7424 4096 -1024 -3840 -2944 0 1536 1280 128 -1664 -5120 -9856 -13952 -16256 -16256 -14464 -12544 -12928 -16128 -19968 -20864 -19584 -18304 -18560 -17792 -14336 -9984 -6784 -4864 -2432 256 1408 1536 2560 5504 8448 10624 12672 15744 18560 20096 20352 18432 14080 8448 6016 8960 13184 13440 9088 6912 9472 14720 16384 13824 10112 8192 7040 5760 4352 2944 1536 128 -640 -896 -1408 -2688 -3328 -2816 -1920 -2176 -3200 -2560 384 4608 7680 7936 6784 5888 6272 7296 6656 3456 512 128 1920 3200 2560 2048 3072 4608 3840 384 -3072 -4096 -2944 -1920 -1664 -2432 -3712 -6144 -8960 -11776 -13824 -14976 -14592 -12800 -10112 -8192 -7552 -6528 -5120 -4096 -4480 -5632 -5504 -3328 -1024 0 384 1536 3712 5888 7168 7936 8320 8064 7168 7168 8576 9344 7808 4992 3456 4480 5760 5120 3072 2048 3072 4224 3712 1920 384 -512 -1280 -2816 -3968 -4224 -4480 -5760 -7680 -8192 -7040 -5504 -5376 -6144 -6400 -5376 -3840 -3072 -2944 -3328 -3840 -4992 -6528 -7168 -6400 -4736 -3328 -2560 -2432 -2432 -1920 -768 640 1536 1664 1792 2560 3328 3584 3072 2688 2944 3200 2688 1408 256 -128 -256 -256 -640 -1024 -1408 -1280 -896 -256 0 -384 -896 -896 -384 -128 -256 -512 -512 128 640 896 768 640 640 512 384 384 512 640 640 512 512 768 1024 1152 896 640 512 640 768 768 512 512 384 384 256 128 0 0 0 -128 -128 -128 -128 -128 -128 -128 -128 -128 -128 -128 -128 -128 -128 0 +7 +0 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 0 0 0 0 1 1 2 3 4 5 6 7 7 5 3 2 4 7 7 5 4 6 10 11 10 8 6 6 5 4 3 1 0 -1 -2 -2 -4 -5 -4 -3 -4 -5 -4 0 7 13 14 12 11 12 14 14 7 1 0 4 7 6 5 8 12 10 1 -9 -13 -9 -6 -6 -8 -13 -22 -33 -44 -53 -59 -59 -53 -43 -36 -33 -30 -24 -20 -22 -28 -28 -18 -6 0 2 8 21 35 44 50 54 53 48 50 60 67 58 38 27 35 47 42 26 18 27 39 34 18 4 -5 -13 -30 -43 -46 -50 -65 -89 -99 -87 -69 -68 -80 -85 -74 -54 -44 -44 -50 -60 -80 -107 -119 -110 -83 -60 -48 -46 -47 -38 -15 14 33 38 43 61 83 91 80 73 82 93 79 44 10 -4 -6 -9 -21 -38 -49 -49 -32 -6 2 -13 -37 -38 -16 -2 -10 -28 -23 8 46 62 59 52 49 48 42 41 53 68 69 63 70 106 148 160 134 102 104 138 165 162 141 130 129 123 100 69 40 18 0 -14 -32 -63 -102 -123 -116 -101 -112 -141 -150 -122 -90 -93 -132 -164 -165 -155 -159 -173 -175 -163 -157 -161 -152 -120 -80 -56 -46 -33 -10 11 26 46 72 94 103 104 115 128 129 106 80 70 73 70 58 48 50 58 59 51 44 42 36 9 -32 -69 -85 -83 -85 -96 -108 -112 -109 -108 -109 -102 -84 -63 -51 -53 -57 -48 -22 6 23 28 38 64 96 114 111 96 86 84 87 93 99 103 101 94 84 75 66 59 52 47 41 34 24 10 -4 -20 -31 -37 -38 -38 -42 -51 -62 -71 -73 -70 -65 -60 -54 -49 -45 -42 -37 -29 -23 -20 -17 -11 -3 2 2 1 3 6 9 10 12 12 11 9 9 9 9 8 7 7 7 5 3 3 4 4 3 3 1 -1 -3 -3 -2 -2 -2 -1 0 0 0 -1 -1 -1 0 0 0 0 0 0 0 0 0 0 +0 -128 -128 -128 -128 -128 -128 -128 -128 -128 -128 -128 -128 -128 -128 -128 0 0 0 0 128 128 256 384 512 640 768 896 896 640 384 256 512 896 896 640 512 768 1280 1408 1280 1024 768 768 640 512 384 128 0 -128 -256 -256 -512 -640 -512 -384 -512 -640 -512 0 896 1664 1792 1536 1408 1536 1792 1792 896 128 0 512 896 768 640 1024 1536 1280 128 -1152 -1664 -1152 -768 -768 -1024 -1664 -2816 -4224 -5632 -6784 -7552 -7552 -6784 -5504 -4608 -4224 -3840 -3072 -2560 -2816 -3584 -3584 -2304 -768 0 256 1024 2688 4480 5632 6400 6912 6784 6144 6400 7680 8576 7424 4864 3456 4480 6016 5376 3328 2304 3456 4992 4352 2304 512 -640 -1664 -3840 -5504 -5888 -6400 -8320 -11392 -12672 -11136 -8832 -8704 -10240 -10880 -9472 -6912 -5632 -5632 -6400 -7680 -10240 -13696 -15232 -14080 -10624 -7680 -6144 -5888 -6016 -4864 -1920 1792 4224 4864 5504 7808 10624 11648 10240 9344 10496 11904 10112 5632 1280 -512 -768 -1152 -2688 -4864 -6272 -6272 -4096 -768 256 -1664 -4736 -4864 -2048 -256 -1280 -3584 -2944 1024 5888 7936 7552 6656 6272 6144 5376 5248 6784 8704 8832 8064 8960 13568 18944 20480 17152 13056 13312 17664 21120 20736 18048 16640 16512 15744 12800 8832 5120 2304 0 -1792 -4096 -8064 -13056 -15744 -14848 -12928 -14336 -18048 -19200 -15616 -11520 -11904 -16896 -20992 -21120 -19840 -20352 -22144 -22400 -20864 -20096 -20608 -19456 -15360 -10240 -7168 -5888 -4224 -1280 1408 3328 5888 9216 12032 13184 13312 14720 16384 16512 13568 10240 8960 9344 8960 7424 6144 6400 7424 7552 6528 5632 5376 4608 1152 -4096 -8832 -10880 -10624 -10880 -12288 -13824 -14336 -13952 -13824 -13952 -13056 -10752 -8064 -6528 -6784 -7296 -6144 -2816 768 2944 3584 4864 8192 12288 14592 14208 12288 11008 10752 11136 11904 12672 13184 12928 12032 10752 9600 8448 7552 6656 6016 5248 4352 3072 1280 -512 -2560 -3968 -4736 -4864 -4864 -5376 -6528 -7936 -9088 -9344 -8960 -8320 -7680 -6912 -6272 -5760 -5376 -4736 -3712 -2944 -2560 -2176 -1408 -384 256 256 128 384 768 1152 1280 1536 1536 1408 1152 1152 1152 1152 1024 896 896 896 640 384 384 512 512 384 384 128 -128 -384 -384 -256 -256 -256 -128 0 0 0 -128 -128 -128 0 0 0 0 0 0 0 0 0 0 +7 \ No newline at end of file diff --git a/python/tflite_micro/signal/ops/testdata/filter_bank_accumulation_16k.txt b/python/tflite_micro/signal/ops/testdata/filter_bank_accumulation_16k.txt new file mode 100644 index 00000000000..5d98e9bbd3a --- /dev/null +++ b/python/tflite_micro/signal/ops/testdata/filter_bank_accumulation_16k.txt @@ -0,0 +1,26 @@ +257 16000 125.0 3800.0 25 5 122 +43352 +61587 +82655 +110528 +139607 +173009 +213758 +265032 +321372 +382003 +460187 +545621 +649849 +761362 +904955 +1052886 +1230922 +1448884 +1674101 +1960672 +2262187 +2621637 +3048830 +3522818 +4055575 \ No newline at end of file diff --git a/python/tflite_micro/signal/ops/testdata/filter_bank_accumulation_44k.txt b/python/tflite_micro/signal/ops/testdata/filter_bank_accumulation_44k.txt new file mode 100644 index 00000000000..372ba6bfd34 --- /dev/null +++ b/python/tflite_micro/signal/ops/testdata/filter_bank_accumulation_44k.txt @@ -0,0 +1,26 @@ +1025 44100 125.0 3800.0 25 7 177 +88928 +130900 +177498 +229636 +290802 +366442 +455869 +557023 +666091 +811407 +972244 +1144538 +1367988 +1616725 +1889658 +2224246 +2601615 +3033067 +3541380 +4113125 +4774754 +5532980 +6404370 +7413694 +8552917 \ No newline at end of file diff --git a/python/tflite_micro/signal/ops/testdata/filter_bank_accumulation_8k.txt b/python/tflite_micro/signal/ops/testdata/filter_bank_accumulation_8k.txt new file mode 100644 index 00000000000..fa6b061be06 --- /dev/null +++ b/python/tflite_micro/signal/ops/testdata/filter_bank_accumulation_8k.txt @@ -0,0 +1,26 @@ +129 8000 125.000000 3800.000000 25 5 122 +43352 +61587 +82655 +110528 +139607 +173009 +213758 +265032 +321372 +382003 +460187 +545621 +649849 +761362 +904955 +1052886 +1230922 +1448884 +1674101 +1960672 +2262187 +2621637 +3048830 +3522818 +4055575 \ No newline at end of file diff --git a/python/tflite_micro/signal/ops/testdata/filter_bank_spectral_subtraction_test1.txt b/python/tflite_micro/signal/ops/testdata/filter_bank_spectral_subtraction_test1.txt new file mode 100644 index 00000000000..93ce31f05b8 --- /dev/null +++ b/python/tflite_micro/signal/ops/testdata/filter_bank_spectral_subtraction_test1.txt @@ -0,0 +1,550 @@ +32 0.04 0.0 0 0.05 0 +322 308 210 212 181 251 403 259 65 48 76 48 50 46 53 52 112 191 136 59 70 51 39 64 33 44 41 49 74 107 262 479 +310 296 202 204 174 241 387 249 63 47 73 47 49 45 51 50 108 184 131 57 68 49 38 62 32 43 40 48 72 103 252 460 +12 12 8 8 7 10 16 10 2 1 3 1 1 1 2 2 4 7 5 2 2 2 1 2 1 1 1 1 2 4 10 19 +623 526 846 734 378 235 191 179 84 84 81 71 95 35 33 66 291 323 109 56 91 79 53 50 42 39 40 62 54 60 252 338 +587 494 805 697 357 217 169 163 79 80 75 68 91 33 30 62 276 304 100 52 86 74 50 47 40 37 38 59 50 54 233 307 +36 32 41 37 21 18 22 16 5 4 6 3 4 2 3 4 15 19 9 4 5 5 3 3 2 2 2 3 4 6 19 31 +3398 3141 2038 2192 1621 708 416 267 85 149 131 52 46 39 56 52 81 93 116 49 52 56 55 38 36 48 46 63 40 33 34 51 +3228 2985 1918 2069 1537 663 379 241 77 140 121 48 41 36 51 47 64 72 103 44 46 49 50 34 33 45 43 58 35 26 15 20 +170 156 120 123 84 45 37 26 8 9 10 4 5 3 5 5 17 21 13 5 6 7 5 4 3 3 3 5 5 7 19 31 +1684 1443 2981 3969 1468 542 202 222 114 83 66 84 53 19 29 81 162 134 68 73 76 63 44 61 52 59 43 70 81 84 184 228 +1454 1236 2747 3693 1329 478 159 189 102 72 54 77 47 16 24 73 140 109 53 66 68 54 38 55 48 54 39 63 73 74 159 190 +230 207 234 276 139 64 43 33 12 11 12 7 6 3 5 8 22 25 15 7 8 9 6 6 4 5 4 7 8 10 25 38 +243 2069 3060 2731 1212 353 175 145 146 118 165 95 108 26 21 78 97 56 87 58 52 49 60 82 49 51 63 87 87 109 382 572 +13 1788 2714 2357 1031 278 127 108 129 103 147 85 98 23 16 68 73 30 70 49 43 39 52 73 44 45 57 77 76 96 343 513 +230 281 346 374 181 75 48 37 17 15 18 10 10 3 5 10 24 26 17 9 9 10 8 9 5 6 6 10 11 13 39 59 +888 1076 1306 1079 550 361 309 228 162 72 66 96 87 33 116 550 1095 778 335 161 51 62 45 38 58 38 66 58 80 77 70 55 +632 764 922 677 355 275 251 184 140 55 47 83 74 29 107 519 1029 722 306 146 41 50 36 28 51 31 58 47 67 62 30 2 +256 312 384 402 195 86 58 44 22 17 19 13 13 4 9 31 66 56 29 15 10 12 9 10 7 7 8 11 13 15 40 58 +1058 702 636 809 1001 577 220 221 109 112 66 106 53 41 73 742 1432 564 216 49 62 42 32 68 72 70 85 46 65 85 54 43 +770 375 242 391 774 472 156 170 84 92 46 90 39 36 62 683 1312 488 180 33 50 29 23 56 63 61 74 34 50 68 14 2 +288 327 394 418 227 105 64 51 25 20 20 16 14 5 11 59 120 76 36 16 12 13 9 12 9 9 11 12 15 17 40 57 +842 739 1397 1451 762 416 334 291 169 126 59 54 62 31 84 101 250 234 68 57 80 62 69 48 40 48 48 42 70 72 50 37 +532 396 963 992 514 299 260 231 139 102 38 37 47 25 71 41 125 152 31 40 66 48 58 35 30 38 36 29 53 53 10 1 +310 343 434 459 248 117 74 60 30 24 21 17 15 6 13 60 125 82 37 17 14 14 11 13 10 10 12 13 17 19 40 56 +744 1226 2470 2022 340 256 354 378 182 95 160 82 74 69 78 154 247 137 106 116 48 21 26 45 48 51 33 38 35 35 30 29 +417 848 1955 1501 89 134 269 306 146 69 134 63 57 61 63 91 118 53 67 96 33 7 15 31 37 40 21 25 18 16 1 1 +327 378 515 521 251 122 85 72 36 26 26 19 17 8 15 63 129 84 39 20 15 14 11 14 11 11 12 13 17 19 39 54 +801 1315 1704 718 597 378 427 401 177 129 169 58 41 26 49 64 105 140 84 50 39 27 33 37 69 57 39 51 41 30 35 33 +456 900 1142 190 333 246 329 316 136 99 138 38 24 18 33 3 5 54 44 29 24 13 22 23 56 45 26 37 24 11 1 1 +345 415 562 528 264 132 98 85 41 30 31 20 17 8 16 63 128 86 40 21 15 14 11 14 13 12 13 14 17 19 38 53 +949 1178 1882 1655 900 491 351 462 174 56 108 58 36 37 50 52 129 132 86 67 35 35 72 55 65 41 48 43 46 42 46 35 +580 733 1268 1082 611 345 243 362 128 25 74 37 19 28 33 2 6 45 45 45 20 21 59 40 50 28 34 28 28 23 8 1 +369 445 614 573 289 146 108 100 46 31 34 21 17 9 17 62 128 87 41 22 15 14 13 15 15 13 14 15 18 19 38 52 +573 536 1285 2035 829 652 472 405 173 173 90 56 42 33 60 62 51 61 90 82 35 46 41 74 38 26 31 23 63 39 34 23 +196 88 645 1404 519 486 350 293 122 137 54 34 25 24 42 3 2 3 48 58 20 31 27 57 23 13 17 8 44 20 1 1 +377 448 640 631 310 166 122 112 51 36 36 22 17 9 18 62 124 85 42 24 15 15 14 17 15 13 14 15 19 19 37 50 +734 307 766 1520 1020 552 277 351 188 250 154 78 88 34 64 57 78 130 82 58 39 43 43 39 57 76 43 48 63 49 40 26 +343 15 121 854 682 371 149 230 132 206 114 54 69 25 45 2 3 44 39 33 24 27 28 22 41 61 28 32 43 29 3 1 +391 442 645 666 338 181 128 121 56 44 40 24 19 9 19 61 122 86 43 25 15 16 15 17 16 15 15 16 20 20 37 49 +822 593 756 437 422 311 432 310 260 500 273 68 76 74 57 81 42 81 96 66 86 68 80 70 72 60 50 36 38 54 54 34 +414 145 107 21 81 125 292 182 196 438 224 43 55 63 37 20 2 4 51 40 69 50 63 51 54 44 34 20 18 33 17 1 +408 448 649 656 341 186 140 128 64 62 49 25 21 11 20 61 118 85 45 26 17 18 17 19 18 16 16 16 20 21 37 48 +121 412 463 1013 793 417 469 582 452 502 227 145 154 85 63 77 132 101 121 75 78 91 56 90 84 92 63 77 62 66 79 48 +6 20 23 343 434 222 316 436 373 423 171 116 128 72 42 16 14 16 73 48 59 71 38 69 64 73 46 59 41 44 41 2 +396 446 641 670 359 195 153 146 79 79 56 29 26 13 21 61 118 85 48 27 19 20 18 21 20 19 17 18 21 22 38 48 +1414 691 1001 1419 833 711 351 405 191 213 183 179 102 69 49 88 89 54 69 70 36 67 80 63 97 83 50 87 82 70 53 65 +978 236 346 720 456 496 191 249 108 129 122 145 73 54 27 26 4 2 21 42 17 46 60 41 74 62 32 67 59 47 15 17 +436 455 655 699 377 215 160 156 83 84 61 34 29 15 22 62 116 83 48 28 19 21 20 22 23 21 18 20 23 23 38 48 +1243 1015 622 986 925 366 485 619 365 348 426 129 63 20 45 78 107 148 135 116 74 102 121 89 119 71 75 80 118 88 41 48 +775 538 31 276 527 145 313 445 271 254 351 92 33 5 23 16 5 63 84 85 53 78 97 65 93 49 55 58 92 63 3 2 +468 477 653 710 398 221 172 174 94 94 75 37 30 15 22 62 115 85 51 31 21 24 24 24 26 22 20 22 26 25 38 48 +7596 7556 4147 5224 4491 1996 1562 1725 1321 1107 569 235 192 108 93 278 1256 2074 1348 713 200 92 144 181 183 109 87 125 138 82 54 61 +6844 6796 3355 4334 3930 1705 1335 1489 1178 973 475 191 156 90 69 208 1096 1910 1246 655 172 66 116 151 151 84 65 99 108 55 16 13 +752 760 792 890 561 291 227 236 143 134 94 44 36 18 24 70 160 164 102 58 28 26 28 30 32 25 22 26 30 27 38 48 +19368 14371 9997 18196 11384 8150 9028 7222 3398 2926 2634 813 493 203 436 808 4641 5206 2305 1451 501 258 238 812 680 222 169 180 122 120 246 352 +17872 13067 8838 16615 10391 7545 8450 6707 3125 2681 2439 739 439 178 396 709 4302 4841 2115 1338 455 223 202 751 623 190 142 148 89 90 200 292 +1496 1304 1159 1581 993 605 578 515 273 245 195 74 54 25 40 99 339 365 190 113 46 35 36 61 57 32 27 32 33 30 46 60 +16386 11154 31366 36892 19623 39608 29073 9880 7609 5195 5929 1670 1120 141 1052 2860 4247 1615 1475 750 743 360 327 754 663 259 232 422 406 272 440 589 +14295 9457 29000 33900 17886 37444 27356 8991 7043 4753 5505 1533 1024 112 972 2651 3752 1201 1234 612 670 313 280 666 582 218 197 375 359 233 379 508 +2091 1697 2366 2992 1737 2164 1717 889 566 442 424 137 96 29 80 209 495 414 241 138 73 47 47 88 81 41 35 47 47 39 61 81 +12308 16170 46360 47914 33313 59356 30850 11434 9573 6031 5623 2352 1869 228 1762 2413 3675 1919 627 385 513 230 348 645 421 312 533 738 771 555 693 689 +9809 13895 42236 43127 30314 54906 27969 10124 8647 5366 4992 2127 1703 192 1615 2116 3053 1445 371 238 423 176 289 535 327 261 479 664 696 496 607 584 +2499 2275 4124 4787 2999 4450 2881 1310 926 665 631 225 166 36 147 297 622 474 256 147 90 54 59 110 94 51 54 74 75 59 86 105 +17388 19064 58582 64313 40955 75268 41548 6891 7075 3746 4683 1447 1043 487 1518 3587 3768 2300 939 457 350 284 330 432 329 265 477 1025 1330 507 511 834 +14294 16118 52281 57147 36439 67987 37122 5358 5904 2958 3891 1174 842 433 1317 3159 3021 1754 656 298 250 221 261 310 226 206 407 913 1205 431 409 700 +3094 2946 6301 7166 4516 7281 4426 1533 1171 788 792 273 201 54 201 428 747 546 283 159 100 63 69 122 103 59 70 112 125 76 102 134 +16537 26767 71089 74236 37432 53868 28774 2183 3692 3031 3617 1413 799 790 1908 2965 3409 2971 521 702 282 265 140 235 266 209 454 843 1059 398 658 989 +12906 22869 62198 64389 31601 44725 23375 625 2421 2154 2713 1095 575 707 1639 2436 2556 2329 229 522 175 194 69 109 157 145 369 702 897 310 534 821 +3631 3898 8891 9847 5831 9143 5399 1558 1271 877 904 318 224 83 269 529 853 642 292 180 107 71 71 126 109 64 85 141 162 88 124 168 +7697 22317 36009 28903 19334 14170 9881 7144 4700 2809 1720 1067 1068 750 702 743 1623 1468 482 452 230 205 118 259 268 284 308 358 467 308 145 201 +3904 17683 26034 18295 12964 4827 4303 5363 3292 1855 784 720 811 641 416 206 740 793 183 262 119 129 46 128 153 212 215 209 293 212 21 32 +3793 4634 9975 10608 6370 9343 5578 1781 1408 954 936 347 257 109 286 537 883 675 299 190 111 76 72 131 115 72 93 149 174 96 124 169 +5797 4982 8684 8041 8540 14235 7774 1734 2574 1999 1785 797 828 674 627 594 517 1170 574 195 122 114 103 138 87 191 209 220 215 146 167 324 +1924 335 434 401 2084 4697 2109 86 1120 1004 816 433 549 543 328 55 25 476 265 9 11 37 30 7 4 115 112 69 40 49 42 149 +3873 4647 9923 10505 6456 9538 5665 1779 1454 995 969 364 279 131 299 539 868 694 309 190 111 77 73 131 113 76 97 151 175 97 125 175 +3515 1586 1658 2959 8500 12333 5629 2134 2415 869 1022 907 543 298 379 520 648 822 381 174 77 92 174 191 74 140 150 246 334 261 237 230 +175 79 82 147 1963 2684 281 341 923 43 51 522 254 161 77 25 32 123 70 8 3 15 97 58 3 62 51 92 153 158 108 53 +3858 4524 9592 10203 6537 9649 5663 1793 1492 989 971 385 289 137 302 538 859 699 311 189 109 77 77 133 111 78 99 154 181 103 129 177 +2224 3543 4543 3049 4432 6523 2778 1520 1493 609 749 473 300 160 307 367 172 814 799 242 111 109 168 162 126 104 173 199 353 269 203 173 +111 177 227 152 221 326 138 75 74 30 37 85 14 23 15 18 8 111 469 51 5 31 88 28 15 25 72 44 166 160 72 8 +3792 4484 9390 9916 6452 9524 5547 1782 1492 973 962 388 289 137 302 531 831 703 330 191 109 78 80 134 111 79 101 155 187 109 131 176 +1235 4576 5823 2907 2588 3966 1494 413 560 305 389 537 434 203 250 344 251 613 918 281 145 204 199 106 97 149 161 196 164 104 94 77 +61 228 291 145 129 198 74 20 27 15 19 144 140 64 12 17 12 30 565 87 35 121 115 5 4 68 58 40 8 5 4 3 +3689 4487 9247 9635 6297 9301 5384 1727 1454 946 939 393 294 139 299 523 807 699 353 194 110 83 84 132 110 81 103 156 186 108 129 172 +1073 2081 1805 3243 3076 4169 2073 542 498 946 765 434 382 98 116 209 362 1329 1554 476 376 244 192 205 171 84 120 151 221 183 121 75 +53 104 90 162 153 208 103 27 24 47 38 40 85 4 5 10 18 605 1153 271 256 155 104 71 59 4 17 7 34 73 6 3 +3584 4390 8949 9379 6168 9095 5251 1679 1415 946 932 394 297 137 291 510 789 724 401 205 120 89 88 134 112 81 103 155 187 110 128 168 +2436 2480 4256 3867 2805 3807 2884 1712 1276 1361 1028 734 390 254 872 1373 2454 3335 4896 8146 10159 7203 1494 856 804 291 205 395 475 606 1013 578 +121 123 212 193 140 190 144 85 63 399 93 327 90 113 558 829 1599 2507 4316 7624 9638 6830 1350 694 665 202 98 231 277 477 850 394 +3538 4313 8761 9158 6033 8883 5156 1680 1409 962 935 407 300 141 314 544 855 828 580 522 521 373 144 162 139 89 107 164 198 129 163 184 +888 919 1524 1657 3008 6417 9674 9419 4806 2316 1283 879 1422 620 1495 1272 1934 3748 5666 7715 13043 7161 4938 1514 1730 1199 359 355 2311 3368 3008 1759 +44 45 76 82 150 320 4338 7430 3262 1300 335 454 1078 460 1134 699 1036 2804 4883 6906 12022 6517 4603 1298 1528 1066 242 184 2029 3110 2732 1513 +3432 4177 8471 8858 5912 8784 5336 1989 1544 1016 948 425 344 160 361 573 898 944 783 809 1021 644 335 216 202 133 117 171 282 258 276 246 +1978 1707 3491 7036 8527 8027 7337 5010 1004 916 1424 1691 1024 568 1175 1065 1748 2932 3303 2677 3235 3383 2563 1918 2265 1165 423 576 1940 2490 1455 1716 +98 85 174 351 2511 401 1922 2901 50 45 457 1216 653 392 782 473 817 1909 2420 1794 2126 2630 2139 1634 1981 991 294 389 1592 2143 1132 1412 +3373 4078 8271 8785 6016 8753 5415 2109 1522 1012 967 475 371 176 393 592 931 1023 883 883 1109 753 424 284 284 174 129 187 348 347 323 304 +20917 28069 21895 12010 14010 15767 12106 4142 2772 3069 2396 1007 611 416 1041 698 1055 2082 2192 3116 2613 2802 2723 3497 4473 1352 596 540 2244 2627 1644 1456 +16843 23032 13080 3097 7675 6734 6424 1952 1201 1975 1372 511 231 231 623 102 120 1017 1257 2144 1444 1968 2208 3085 4022 1131 449 339 1821 2189 1269 1106 +4074 5037 8815 8913 6335 9033 5682 2190 1571 1094 1024 496 380 185 418 596 935 1065 935 972 1169 834 515 412 451 221 147 201 423 438 375 350 +35861 32225 28979 68360 68654 38455 33954 27091 7850 7293 4286 3294 1764 989 614 1729 3611 7096 5992 8290 11745 11808 11950 11491 13422 6014 1045 1138 2705 1061 576 993 +30517 26102 19358 57071 59828 28246 27142 23906 6028 5952 3132 2687 1329 772 189 1088 2570 5790 4855 7026 10154 10536 10978 10637 12453 5562 863 900 2191 599 193 618 +5344 6123 9621 11289 8826 10209 6812 3185 1822 1341 1154 607 435 217 425 641 1041 1306 1137 1264 1591 1272 972 854 969 452 182 238 514 462 383 375 +27149 14111 40871 103642 57934 42056 73468 36226 10127 6884 2733 2738 2476 1416 812 2591 6200 4503 7927 13940 12303 31441 22214 16455 24114 8687 1717 1867 4008 1625 470 1109 +20934 7669 30001 88661 47145 30574 63992 31721 7973 5322 1516 2046 1960 1152 372 1873 4953 3070 6519 12170 10284 28963 20393 14978 22220 7906 1474 1564 3355 1117 84 705 +6215 6442 10870 14981 10789 11482 9476 4505 2154 1562 1217 692 516 264 440 718 1247 1433 1408 1770 2019 2478 1821 1477 1894 781 243 303 653 508 386 404 +24252 15511 53706 99669 46122 76770 100934 31158 14246 7214 3191 1583 3422 1828 862 2979 4090 4461 8899 10528 17571 41240 14701 16711 14591 6799 1863 1576 3493 1885 526 1046 +17316 8707 41124 81303 33921 62678 87802 25588 11609 5427 1896 856 2790 1502 406 2171 2730 2907 7192 8408 14931 37213 12366 14625 12190 5778 1556 1223 2727 1322 135 617 +6936 6804 12582 18366 12201 14092 13132 5570 2637 1787 1295 727 632 326 456 808 1360 1554 1707 2120 2640 4027 2335 2086 2401 1021 307 353 766 563 391 429 +15140 18054 63457 82138 32589 114830 111204 27831 13905 5843 5504 2754 2940 1096 1046 2324 3871 4943 6839 7482 22113 28244 14756 12446 18007 12857 1393 2325 4451 1533 677 1251 +7877 10801 48842 61223 19573 96711 94152 21372 10818 3894 4041 1946 2216 740 567 1456 2411 3254 4927 5148 18695 23249 11925 9946 14983 11363 1043 1894 3538 932 275 790 +7263 7253 14615 20915 13016 18119 17052 6459 3087 1949 1463 808 724 356 479 868 1460 1689 1912 2334 3418 4995 2831 2500 3024 1494 350 431 913 601 402 461 +15272 19289 60997 69570 49748 128392 92234 24464 21627 6264 6889 2903 1842 615 936 1576 3981 4263 5662 7533 13443 22028 16316 8626 24416 12788 1684 2644 3869 1075 395 878 +7689 11555 44528 46710 35264 105865 72177 17286 17799 4143 5210 2012 1074 249 439 680 2421 2472 3601 4992 9625 16353 12946 5882 20537 10843 1281 2125 2838 456 19 401 +7583 7734 16469 22860 14484 22527 20057 7178 3828 2121 1679 891 768 366 497 896 1560 1791 2061 2541 3818 5675 3370 2744 3879 1945 403 519 1031 619 401 477 +9279 26152 71124 66975 58297 103291 60335 21451 13716 6186 3244 4608 3795 618 827 1993 2896 2724 5365 5711 12045 20914 14421 9725 18907 4027 1294 2105 3347 979 896 958 +1629 17682 52471 42352 42062 77536 38668 13703 9493 3903 1503 3569 2906 242 317 1054 1283 896 3172 3044 7899 14630 10610 6702 14428 1999 856 1523 2224 346 476 462 +7650 8470 18653 24623 16235 25755 21667 7748 4223 2283 1741 1039 889 376 510 939 1613 1828 2193 2667 4146 6284 3811 3023 4479 2028 438 582 1123 633 420 496 +8165 30378 62039 43952 67894 67766 19339 21922 11165 4359 3103 3721 1663 871 757 1326 1771 2179 5907 3767 12113 13729 10502 7232 10397 3575 977 1336 1689 900 794 1327 +495 21033 41652 18557 49594 40332 966 13608 6665 1994 1308 2575 744 476 238 372 152 337 3566 1057 7649 7148 6424 4041 5682 1486 518 724 544 257 360 798 +7670 9345 20387 25395 18300 27434 21573 8314 4500 2365 1795 1146 919 395 519 954 1619 1842 2341 2710 4464 6581 4078 3191 4715 2089 459 612 1145 643 434 529 +11299 34791 60771 42562 77890 75125 23008 9915 7608 3395 2329 1594 1264 1008 608 1088 1652 2013 2148 3541 5412 15045 10239 7366 10037 2229 440 805 1444 1013 743 419 +3484 24429 38770 16481 57208 45785 1378 1537 2984 989 513 431 332 589 86 129 82 165 107 798 911 8126 5915 4009 5110 135 21 186 288 356 297 20 +7815 10362 22001 26081 20682 29340 21630 8378 4624 2406 1816 1163 932 419 522 959 1620 1848 2333 2743 4501 6919 4324 3357 4927 2094 458 619 1156 657 446 524 +12771 46445 60739 47014 77842 40653 11109 11952 5136 2314 1972 943 409 458 619 1326 2632 1521 2442 2036 3231 7034 14984 8088 7018 2517 856 1210 1459 533 453 345 +4758 34641 37190 20097 54875 10861 555 3432 492 115 150 47 20 38 94 353 972 76 122 101 161 351 10234 4542 2008 407 383 568 291 26 22 17 +8013 11804 23549 26917 22967 29792 21209 8520 4644 2402 1822 1154 911 420 525 973 1660 1834 2337 2714 4450 6923 4750 3546 5010 2110 473 642 1168 652 446 516 +10420 49801 57616 49540 77904 45571 8842 7629 2356 2393 1790 1077 451 274 308 345 868 1231 2012 2114 4809 7463 13168 12272 6894 2685 773 1428 2085 687 512 532 +2311 36478 32706 21719 52741 15149 441 381 117 119 89 53 22 13 15 17 43 61 100 105 345 519 8082 8378 1809 553 289 755 881 34 64 26 +8109 13323 24910 27821 25163 30422 20714 8484 4552 2401 1820 1150 892 414 516 947 1628 1809 2324 2690 4464 6944 5086 3894 5085 2132 484 673 1204 653 448 516 +12147 38439 38883 23703 23068 21535 10519 3112 2200 1889 1071 815 720 178 270 339 462 836 801 671 867 2232 4759 7507 3275 1278 589 454 472 282 165 355 +3877 24112 13415 1184 1153 1076 525 155 109 94 53 40 35 8 13 16 23 41 40 33 43 111 237 3469 163 63 101 22 23 14 8 17 +8270 14327 25468 27656 25079 30066 20306 8269 4457 2380 1790 1136 885 404 506 922 1581 1770 2263 2609 4320 6755 5072 4038 5012 2097 488 664 1174 638 436 509 +8803 18678 16222 9579 9850 5791 4505 2293 1577 1224 902 384 471 229 323 328 516 766 1670 1055 1402 3403 2131 2715 1345 660 385 391 318 233 201 293 +512 4178 810 478 492 289 225 114 78 61 45 19 23 11 16 16 25 38 83 52 70 170 106 135 67 32 19 19 15 11 10 14 +8291 14500 25098 26933 24470 29095 19674 8030 4341 2333 1754 1105 868 397 498 898 1538 1729 2239 2546 4203 6620 4954 3985 4865 2039 483 653 1139 621 426 500 +8397 7480 5635 4412 11096 9623 5687 2448 693 973 844 508 466 223 390 598 456 486 390 579 929 1749 1731 2816 1161 777 413 647 504 353 137 130 +419 373 281 220 554 481 284 122 34 48 42 25 23 11 19 29 22 24 19 28 46 87 86 140 58 38 20 32 25 17 6 6 +8295 14219 24319 26032 23935 28316 19114 7806 4195 2278 1717 1081 851 390 493 886 1494 1679 2165 2467 4072 6425 4825 3938 4716 1988 480 652 1113 610 414 485 +9211 16898 15369 7302 4178 7014 5725 1879 1000 616 447 822 397 193 197 283 362 542 209 366 563 890 1400 1626 1059 430 247 338 215 230 95 135 +880 2572 768 365 208 350 286 93 49 30 22 41 19 9 9 14 18 27 10 18 28 44 69 81 52 21 12 16 10 11 4 6 +8331 14326 23961 25283 23145 27464 18578 7569 4067 2211 1666 1070 832 382 481 861 1448 1633 2086 2383 3931 6203 4688 3845 4569 1925 470 639 1077 594 401 471 +7784 20827 21073 7215 5467 8578 6695 2060 1439 1502 1552 2022 1857 851 1648 2556 3842 3797 2314 853 1892 4583 4980 3868 2423 1257 2463 3215 1309 789 848 581 +389 6242 1053 360 273 428 334 102 71 75 77 914 985 451 1121 1628 2299 2078 219 42 94 229 281 193 121 62 1914 2474 223 188 430 106 +8309 14585 23845 24560 22438 26708 18102 7348 3961 2182 1661 1108 872 400 527 928 1543 1719 2095 2321 3849 6138 4699 3845 4483 1898 549 741 1086 601 418 475 +9100 18042 14711 5410 6234 8707 7660 5476 2716 5137 5825 2852 2854 3154 4490 6012 6126 4014 3001 2264 2337 3706 3957 2080 1775 808 1851 2319 1344 678 1123 1116 +760 3319 735 270 311 435 382 273 135 2837 3998 1675 1903 2644 3805 4881 4400 2204 870 113 116 185 197 103 88 40 1250 1515 248 74 677 616 +8340 14723 23479 23794 21790 25988 17684 7273 3911 2300 1827 1177 951 510 685 1131 1726 1810 2131 2318 3788 6040 4669 3774 4374 1854 601 804 1096 604 446 500 +5289 17130 18776 10048 16830 9455 2296 1820 944 3773 5453 1858 982 1283 3380 3050 3963 2820 1547 1768 1930 2211 1768 1852 2056 1987 1375 986 1896 1381 891 591 +264 2311 938 502 841 472 114 90 47 1415 3482 654 49 743 2588 1843 2148 970 77 88 96 110 88 92 102 128 744 175 769 746 428 88 +8218 14819 23290 23244 21591 25327 17068 7054 3792 2358 1971 1204 952 540 792 1207 1815 1850 2107 2296 3713 5886 4553 3697 4281 1859 631 811 1127 635 463 503 +8138 29771 30222 18484 20533 7010 3056 1814 1869 961 1215 728 1417 877 1710 2369 1495 1427 3266 2182 1554 1026 1305 1370 1084 748 1188 1210 1276 660 578 424 +406 14355 6655 923 1026 350 152 90 93 48 60 36 447 324 882 1116 74 71 1113 109 77 51 65 68 54 37 535 384 144 32 111 21 +8214 15416 23567 23053 21548 24594 16507 6844 3715 2302 1940 1184 970 553 828 1253 1802 1833 2153 2291 3626 5691 4423 3603 4153 1814 653 826 1132 635 467 499 +14914 47749 53488 27267 31063 13479 4464 2230 1798 979 485 537 897 846 1298 1829 1380 2056 3512 1499 1539 765 1216 1126 1096 849 989 1011 1151 744 555 554 +6433 31041 28725 4046 9135 673 223 111 89 48 24 26 44 282 452 553 68 215 1305 74 76 38 60 56 54 42 323 178 57 105 85 53 +8481 16708 24763 23221 21928 24149 16025 6659 3638 2249 1881 1158 967 564 846 1276 1785 1841 2207 2259 3542 5494 4294 3503 4030 1775 666 833 1132 639 470 501 +17160 56980 57221 39549 48300 18813 4356 2024 995 1228 1864 785 979 687 676 1642 2760 3568 1842 1036 708 1491 2273 1961 1532 803 1651 972 1303 1005 491 731 +8333 38663 31161 15676 25318 940 217 101 49 61 93 39 48 119 33 352 937 1658 92 51 35 74 113 98 76 40 946 134 165 352 24 221 +8827 18317 26060 23873 22982 23935 15558 6473 3532 2208 1880 1143 967 568 839 1290 1823 1910 2192 2210 3428 5333 4213 3441 3930 1736 705 838 1138 653 470 510 +17616 55090 50203 46994 59809 22046 5036 3969 1895 923 1280 956 257 388 762 1064 2665 2809 1918 981 1043 1870 2249 1414 1332 945 1285 1228 813 737 559 1098 +8438 35303 23178 22197 35355 1102 251 198 94 46 63 47 12 19 38 53 809 864 95 49 52 93 112 70 66 47 557 375 40 81 86 565 +9178 19787 27025 24797 24454 23859 15137 6372 3466 2156 1856 1135 938 560 835 1280 1856 1945 2181 2160 3332 5194 4134 3359 3826 1704 728 853 1125 656 473 533 +17236 48129 38425 52926 62127 20314 2190 782 1397 1297 1586 828 399 367 514 1664 2902 1406 470 511 655 897 1262 815 987 562 524 544 616 365 428 1099 +7736 27209 10945 27005 36167 1015 109 39 69 64 79 41 19 18 25 369 1005 70 23 25 32 44 63 40 49 28 26 27 30 18 21 544 +9500 20920 27480 25921 25960 23717 14619 6148 3383 2121 1845 1122 916 552 822 1295 1897 1923 2112 2094 3224 5022 4019 3257 3712 1658 719 840 1104 644 471 555 +18273 49195 36818 56227 61522 18405 3501 2218 1489 1324 1509 528 207 219 233 800 1954 2259 985 370 521 1003 1301 2091 1518 892 926 493 345 270 473 607 +8423 27145 8965 29095 34141 920 175 110 74 66 75 26 10 10 11 39 97 323 49 18 26 50 65 104 75 44 199 24 17 13 23 50 +9850 22050 27853 27132 27381 23504 14174 5990 3307 2089 1831 1098 887 538 798 1275 1899 1936 2066 2025 3115 4861 3910 3210 3624 1627 727 826 1073 629 471 557 +19326 53554 40046 60840 63515 18458 2765 1270 1045 713 937 782 465 233 419 712 1144 1202 1166 463 443 590 1110 1428 925 548 720 433 477 243 310 332 +9098 30245 11706 32361 34690 922 138 63 52 35 46 39 23 11 20 35 57 60 58 23 22 29 55 71 46 27 35 21 23 12 15 16 +10228 23309 28340 28479 28825 23302 13717 5801 3216 2033 1795 1085 870 525 782 1252 1868 1906 2030 1962 3008 4690 3798 3138 3516 1583 726 810 1049 613 464 548 +22407 59763 45869 60784 60523 16737 2157 771 831 1077 804 422 557 380 673 694 1004 441 421 386 477 563 702 664 558 508 586 460 469 313 259 558 +11693 34997 16829 31014 30431 836 107 38 41 53 40 21 27 18 33 34 50 22 21 19 23 28 35 33 27 25 29 22 23 15 12 27 +10714 24766 29040 29770 30092 23039 13254 5599 3120 1994 1755 1058 857 519 777 1229 1833 1847 1965 1898 2906 4525 3674 3039 3397 1540 720 796 1025 601 455 548 +26454 57958 39931 57281 51865 13310 2228 1725 1042 803 759 360 384 267 215 347 894 864 542 283 230 313 655 501 468 334 380 501 495 294 172 240 +15111 31866 10456 26412 20903 665 111 86 52 40 37 17 19 13 10 17 44 43 27 14 11 15 32 25 23 16 18 25 24 14 8 11 +11343 26092 29475 30869 30962 22650 12813 5444 3036 1946 1715 1030 838 508 754 1193 1795 1807 1908 1833 2799 4356 3553 2937 3279 1491 706 784 1003 588 443 535 +24590 50530 33633 44280 36796 9424 611 919 871 962 530 430 248 262 381 794 1025 392 237 300 324 252 365 556 352 338 512 614 548 344 199 166 +12718 23462 3992 12875 5601 471 30 45 43 48 26 21 12 13 19 39 51 19 11 14 16 12 18 27 17 16 25 30 27 17 9 8 +11872 27068 29641 31405 31195 22121 12325 5263 2949 1906 1667 1006 814 498 739 1177 1764 1750 1841 1771 2700 4191 3425 2841 3161 1444 698 777 984 578 433 520 +20856 39429 24614 22279 15851 3851 1229 1114 601 760 602 302 181 250 352 230 581 503 215 271 215 170 218 242 202 278 282 381 441 197 85 165 +8625 11867 1230 1113 792 192 61 55 30 37 30 15 9 12 17 11 29 25 10 13 10 8 10 12 10 13 14 19 22 9 4 8 +12231 27562 29440 31040 30581 21390 11881 5097 2855 1860 1624 977 788 488 723 1139 1716 1700 1775 1711 2600 4030 3296 2737 3042 1397 681 761 962 562 419 505 +13236 23501 15502 10918 3869 1305 1051 487 382 286 644 347 283 78 111 223 277 423 386 330 137 161 190 203 165 248 306 331 292 131 77 102 +965 1174 774 545 193 65 52 24 19 14 32 17 14 3 5 11 13 21 19 16 6 8 9 10 8 12 15 16 14 6 3 5 +12271 27399 28882 30235 29513 20587 11448 4912 2756 1797 1584 951 767 471 698 1102 1658 1648 1719 1655 2501 3875 3171 2635 2926 1351 666 743 935 544 405 488 +4027 10035 9049 14424 9730 2034 877 1070 655 331 350 245 360 208 269 346 231 402 285 222 165 143 91 130 156 237 163 278 239 142 131 116 +201 501 452 721 486 101 43 53 32 16 17 12 17 10 13 17 11 20 14 11 8 7 4 6 7 11 8 13 11 7 6 5 +11941 26704 28089 29602 28722 19845 11025 4758 2672 1738 1534 922 750 460 680 1071 1600 1598 1661 1597 2407 3725 3047 2534 2815 1306 645 724 907 527 394 473 +2767 6760 8292 16184 15261 3865 1934 1186 585 316 474 605 390 336 192 489 464 318 243 200 216 209 293 271 307 165 73 164 270 189 139 140 +138 337 414 809 762 193 96 59 29 15 23 30 19 16 9 24 23 15 12 9 10 10 14 13 15 8 3 8 13 9 6 6 +11574 25906 27297 29065 28183 19206 10661 4615 2588 1681 1491 909 735 455 660 1047 1554 1546 1604 1541 2319 3584 2936 2443 2714 1260 622 701 881 513 383 459 +6416 12419 15082 19610 25929 10026 5531 2111 918 467 384 626 632 372 225 326 437 579 352 179 116 111 198 292 366 237 103 194 252 204 305 286 +320 620 753 980 1296 501 276 105 45 23 19 31 31 18 11 16 21 28 17 8 5 5 9 14 18 11 5 9 12 10 15 14 +11367 25366 26808 28687 28092 18839 10455 4514 2521 1632 1446 897 730 451 642 1018 1509 1507 1553 1486 2230 3445 2826 2357 2620 1219 601 680 855 500 379 452 +12873 32740 30072 29194 45602 21222 10483 4227 2301 1233 1223 1134 1987 1475 258 409 498 599 351 266 77 101 187 467 576 359 193 298 179 147 569 457 +1446 7080 3134 1459 16810 2288 524 211 115 61 61 228 1207 984 12 20 24 29 17 13 3 5 9 23 28 17 9 14 8 7 183 22 +11427 25660 26938 28707 28792 18934 10456 4502 2512 1616 1437 906 780 491 626 993 1468 1470 1504 1437 2143 3311 2720 2281 2538 1184 584 664 827 485 386 452 +14856 39645 34256 35786 52797 24229 18076 8789 4674 1806 1241 1799 2802 1610 254 305 305 456 437 255 253 183 201 385 489 235 247 384 491 275 293 223 +3292 13426 7026 6796 23046 5084 7316 4116 2076 183 62 858 1942 1075 12 15 15 22 21 12 12 9 10 19 24 11 12 19 24 13 14 11 +11564 26219 27230 28990 29751 19145 10760 4673 2598 1623 1429 941 860 535 611 965 1421 1429 1461 1389 2067 3185 2619 2205 2456 1146 570 652 813 476 382 442 +12964 33405 29124 28106 40165 22134 25776 12009 5401 3644 3062 2702 3579 2565 924 510 276 305 358 254 248 231 188 440 737 236 209 430 608 166 131 132 +1345 6899 1819 1404 9998 2870 14416 7043 2691 1941 1568 1691 2611 1949 301 25 13 15 17 12 12 11 9 21 36 11 10 21 30 8 6 6 +11619 26506 27305 28954 30167 19264 11360 4966 2710 1703 1494 1011 968 616 623 946 1375 1384 1416 1343 1994 3066 2521 2134 2387 1109 555 643 804 463 371 429 +12419 29992 25630 22291 29283 22453 32643 14005 5547 5325 4539 4022 4291 2329 807 656 488 385 350 402 283 222 211 482 740 284 115 524 533 261 116 100 +769 3347 1281 1114 1463 3062 20433 8678 2724 3478 2924 2891 3191 1645 177 32 24 19 17 20 14 11 10 24 36 14 5 26 26 13 5 4 +11650 26645 27238 28687 30131 19391 12210 5327 2823 1847 1615 1131 1100 684 630 934 1339 1344 1373 1305 1925 2952 2428 2067 2321 1076 537 638 793 454 360 415 +10985 26685 24363 22948 29584 22779 36145 16307 5961 6117 6491 5373 4321 2365 462 651 637 368 229 290 297 187 169 355 761 589 255 413 947 772 224 85 +549 1333 1217 1147 1478 3253 22979 10542 3013 4100 4682 4073 3093 1614 23 32 31 18 11 14 14 9 8 17 38 29 12 20 148 306 11 4 +11623 26646 27123 28457 30109 19526 13166 5765 2948 2017 1809 1300 1228 751 623 922 1310 1304 1327 1264 1859 2841 2337 1998 2258 1056 525 629 799 466 354 401 +9112 22674 23148 17882 29719 23340 38157 19050 6766 5623 7496 7706 4237 1556 428 535 567 366 314 217 151 174 138 206 514 692 176 532 898 520 159 117 +455 1133 1157 893 1485 3662 23992 12754 3666 3462 5460 6150 2889 773 21 26 28 18 15 10 7 8 6 10 25 34 8 26 96 52 7 5 +11522 26487 26964 28034 30093 19678 14165 6296 3100 2161 2036 1556 1348 783 615 906 1280 1266 1286 1222 1790 2734 2249 1926 2188 1041 511 625 802 468 346 389 +8416 16304 24353 17837 31094 24208 37413 23971 7817 7064 10520 9202 4653 1352 580 811 524 476 323 301 127 120 162 160 429 561 306 808 2028 1590 324 91 +420 815 1217 891 1554 4349 22319 16969 4529 4707 8145 7341 3173 547 28 40 26 23 16 15 6 5 8 7 21 28 15 176 1177 1078 16 4 +11397 26079 26859 27626 30133 19859 15094 7002 3288 2357 2375 1861 1480 805 613 902 1249 1234 1247 1185 1723 2629 2165 1855 2117 1021 502 632 851 512 345 377 +9979 16341 17148 15434 19482 20854 33649 15175 4948 5398 7862 6756 4121 1568 572 612 514 362 353 191 139 179 242 110 221 282 217 527 1444 1507 324 105 +498 816 857 771 973 1042 17814 7847 1594 2920 5268 4700 2536 733 28 30 25 18 17 9 6 8 12 5 11 14 10 26 570 956 16 5 +11340 25689 26470 27138 29707 19898 15835 7328 3354 2478 2594 2056 1585 835 611 890 1219 1199 1211 1145 1659 2531 2088 1785 2041 991 490 627 874 551 344 366 +7268 12505 17644 9847 8354 10078 15178 7092 3159 8622 9133 5100 4386 1321 808 955 1133 1396 801 373 340 188 251 154 378 393 156 323 695 1162 302 92 +363 625 881 492 417 503 758 354 157 5899 6278 2923 2690 467 190 63 56 190 40 18 16 9 12 7 18 19 7 16 34 587 15 4 +11177 25161 26117 26446 28853 19505 15808 7318 3346 2723 2855 2177 1696 854 618 892 1215 1206 1194 1114 1606 2437 2014 1719 1974 967 476 614 866 575 342 355 +4476 6609 7654 5117 7174 5030 7718 4347 2194 2318 4062 1750 1957 1064 339 323 282 250 153 167 172 126 119 100 61 273 154 108 506 521 220 90 +223 330 382 255 358 251 385 217 109 115 1159 87 251 202 16 16 14 12 7 8 8 6 5 4 3 13 7 5 25 26 10 4 +10909 24419 25378 25593 27986 18926 15484 7199 3299 2706 2903 2159 1706 862 606 869 1177 1167 1152 1076 1548 2344 1938 1654 1897 939 463 593 851 572 337 344 +2170 3966 6677 4572 5279 5034 7029 3925 1733 1648 2689 1281 1212 711 533 593 1329 2368 2118 1660 988 440 162 119 116 278 245 122 273 299 168 114 +108 198 333 228 263 251 351 196 86 82 134 64 60 35 26 29 146 1153 928 561 49 21 8 5 5 13 12 6 13 14 8 5 +10559 23601 24630 24752 27078 18370 15145 7068 3236 2663 2894 2123 1686 855 603 857 1183 1215 1190 1099 1525 2267 1866 1592 1825 912 454 574 827 561 330 334 +1213 3911 3586 4212 4132 2489 3382 2946 845 1174 2202 1097 769 447 335 361 658 881 485 418 297 153 96 60 127 160 232 168 299 196 116 65 +60 195 179 210 206 124 169 147 42 58 110 54 38 22 16 18 32 44 24 20 14 7 4 2 6 7 11 8 14 9 5 3 +10185 22813 23788 23930 26160 17735 14674 6903 3140 2603 2866 2081 1649 838 592 837 1162 1201 1161 1071 1475 2182 1795 1530 1757 881 445 557 805 546 321 323 +1075 3044 3585 1961 970 681 1296 862 432 456 807 1177 916 173 225 270 428 823 654 458 222 187 179 102 161 213 311 290 147 158 90 62 +53 152 179 98 48 34 64 43 21 22 40 58 45 8 11 13 21 41 32 22 11 9 8 5 8 10 15 14 7 7 4 3 +9820 22022 22980 23051 25152 17053 14139 6661 3031 2517 2783 2044 1619 811 577 814 1132 1185 1140 1046 1424 2102 1730 1472 1693 854 439 546 778 530 311 312 +1465 2065 2824 2440 1440 1793 993 473 895 1041 529 664 558 178 132 115 284 421 202 184 161 118 124 96 119 162 144 99 101 86 71 52 +73 103 141 121 71 89 49 23 44 52 26 33 27 8 6 5 14 21 10 9 8 5 6 4 5 8 7 4 5 4 3 2 +9485 21224 22174 22227 24204 16442 13613 6413 2945 2457 2692 1988 1576 785 559 786 1098 1154 1102 1011 1373 2022 1665 1416 1630 826 427 528 750 512 301 301 +1089 1069 2188 2786 1099 1483 2001 1505 627 971 815 318 302 115 137 263 524 257 340 189 73 68 83 88 119 84 159 162 159 118 114 61 +54 53 109 139 54 74 100 75 31 48 40 15 15 5 6 13 26 12 16 9 3 3 4 4 5 4 7 8 7 5 5 3 +9149 20418 21374 21449 23280 15843 13148 6216 2852 2397 2616 1921 1525 758 542 765 1075 1118 1071 978 1321 1943 1601 1362 1569 796 416 513 726 496 293 291 +1337 2013 1807 2311 2636 1614 2307 1391 318 398 533 351 262 197 102 363 285 149 159 108 100 65 89 98 102 158 165 131 166 123 107 72 +66 100 90 115 131 80 115 69 15 19 26 17 13 9 5 18 14 7 7 5 4 3 4 4 5 7 8 6 8 6 5 3 +8836 19682 20591 20683 22454 15274 12714 6023 2750 2317 2532 1858 1474 735 524 748 1043 1079 1034 943 1272 1867 1540 1311 1510 770 405 497 703 481 285 282 +781 2881 3560 1620 1289 1432 1919 901 191 402 449 163 237 144 55 160 123 69 190 234 164 87 151 148 143 121 177 169 122 100 116 94 +39 144 177 80 64 71 95 45 9 20 22 8 11 7 2 7 6 3 9 11 8 4 7 7 7 6 8 8 6 4 5 4 +8513 19010 19910 19920 21607 14720 12282 5818 2647 2240 2448 1790 1424 711 505 724 1006 1038 1000 914 1227 1795 1484 1264 1455 744 395 483 679 465 278 274 +1392 1750 3040 2323 900 910 1538 760 569 583 279 157 204 131 106 184 195 145 84 121 150 122 90 119 163 124 93 118 93 140 87 77 +69 87 151 116 44 45 76 37 28 29 13 7 10 6 5 9 9 7 4 6 7 6 4 5 8 6 4 5 4 6 4 3 +8228 18319 19235 19216 20779 14167 11852 5615 2563 2173 2361 1724 1375 687 489 702 973 1002 963 882 1183 1728 1428 1218 1403 719 382 468 655 452 270 266 +661 1290 1704 862 676 689 1023 451 197 189 208 134 119 124 47 116 131 117 134 117 75 68 119 108 147 140 107 100 86 113 56 61 +33 64 85 43 33 34 51 22 9 9 10 6 5 6 2 5 6 5 6 5 3 3 5 5 7 6 5 4 4 5 2 3 +7925 17638 18534 18482 19975 13628 11419 5408 2468 2093 2274 1660 1324 664 471 678 939 966 929 851 1138 1661 1375 1173 1352 695 371 453 632 438 261 257 +755 1335 1900 1323 443 359 492 310 227 252 231 114 92 90 75 119 116 116 176 114 129 132 96 121 95 88 43 101 116 59 108 88 +37 66 94 66 22 17 24 15 11 12 11 5 4 4 3 5 5 5 8 5 6 6 4 6 4 4 2 5 5 2 5 4 +7638 16986 17869 17796 19194 13097 10982 5204 2378 2019 2192 1598 1274 641 455 655 906 932 898 821 1097 1599 1323 1130 1301 670 357 438 611 422 254 250 +607 1241 1151 1147 695 413 521 380 170 286 236 160 172 134 90 109 119 137 68 100 81 76 79 122 123 84 104 97 81 88 95 87 +30 62 57 57 34 20 26 18 8 14 11 7 8 6 4 5 5 6 3 4 4 3 3 6 6 4 5 4 4 4 4 4 +7356 16356 17200 17130 18454 12589 10563 5011 2289 1949 2113 1540 1229 620 440 633 874 900 864 792 1056 1538 1273 1089 1253 646 346 424 589 408 247 243 +778 916 1977 2497 1167 345 371 365 205 332 230 141 51 78 112 204 216 122 137 151 133 133 124 119 156 105 90 227 181 154 105 98 +38 45 98 124 58 17 18 18 10 16 11 7 2 3 5 10 10 6 6 7 6 6 6 5 7 5 4 11 9 7 5 4 +7093 15738 16591 16545 17762 12099 10155 4825 2205 1884 2037 1484 1181 598 426 615 847 868 834 766 1019 1481 1227 1050 1209 624 335 416 572 397 241 237 +605 795 579 1071 1206 614 263 272 153 154 185 299 107 44 79 144 119 92 95 113 141 193 294 292 419 258 120 180 190 137 99 90 +30 39 28 53 60 30 13 13 7 7 9 14 5 2 3 7 5 4 4 5 7 9 14 14 20 12 5 8 9 6 4 4 +6833 15140 15950 15926 17100 11639 9759 4642 2122 1814 1962 1436 1138 575 412 596 817 836 804 739 983 1429 1189 1019 1177 609 326 406 556 386 235 231 +1102 2043 1472 487 409 175 256 421 283 413 692 490 198 111 104 86 135 124 74 110 172 148 187 176 243 236 272 356 313 247 244 109 +55 102 73 24 20 8 12 21 14 20 34 24 9 5 5 4 6 6 3 5 8 7 9 8 12 11 13 17 15 12 12 5 +6603 14616 15371 15308 16432 11180 9379 4473 2048 1757 1911 1398 1100 556 399 575 789 807 774 713 950 1377 1148 985 1139 594 323 404 546 380 235 226 +609 1109 1012 726 653 239 192 282 212 380 485 282 244 118 107 131 147 110 91 175 203 145 229 176 181 168 232 155 181 235 394 217 +30 55 50 36 32 11 9 14 10 18 24 14 12 5 5 6 7 5 4 8 10 7 11 8 9 8 11 7 9 11 153 10 +6363 14076 14796 14725 15801 10742 9011 4305 1974 1701 1853 1353 1065 538 387 557 763 779 746 691 920 1327 1111 952 1100 576 319 394 531 374 241 225 +443 514 889 1129 554 263 198 175 276 322 499 266 111 80 145 164 244 188 113 161 84 100 193 135 172 191 313 213 212 255 311 88 +22 25 44 56 27 13 9 8 13 16 24 13 5 3 7 8 12 9 5 8 4 4 9 6 8 9 15 10 10 12 68 4 +6126 13533 14240 14181 15191 10323 8658 4139 1906 1645 1798 1309 1026 519 377 541 742 755 720 669 886 1277 1074 919 1062 560 318 386 518 369 243 219 +258 236 227 539 711 432 209 130 219 159 379 219 76 55 93 185 85 112 111 90 126 115 169 129 118 129 153 116 203 202 149 75 +12 11 11 26 35 21 10 6 10 7 18 10 3 2 4 9 4 5 5 4 6 5 8 6 5 6 7 5 10 10 7 3 +5891 13001 13679 13635 14612 9927 8320 3978 1838 1585 1741 1265 988 500 365 526 715 729 695 645 855 1230 1037 887 1024 542 311 375 505 362 239 213 +503 399 537 566 673 594 340 278 189 131 142 199 84 66 53 108 95 47 146 204 133 76 133 141 113 113 141 104 216 226 112 65 +25 19 26 28 33 29 16 13 9 6 7 9 4 3 2 5 4 2 7 10 6 3 6 7 5 5 7 5 10 11 5 3 +5675 12497 13153 13112 14054 9553 8000 3830 1772 1526 1677 1222 951 482 352 509 690 701 673 627 826 1183 1000 857 987 524 304 364 493 356 233 207 +662 416 541 637 205 196 280 218 505 449 241 255 108 82 145 88 67 90 123 177 151 145 137 243 157 164 142 158 131 170 153 110 +33 20 27 31 10 9 13 10 25 22 12 12 5 4 7 4 3 4 6 8 7 7 6 12 7 8 7 7 6 8 7 5 +5474 12014 12648 12613 13500 9178 7691 3685 1721 1482 1619 1183 917 466 343 492 665 676 651 609 799 1141 965 832 953 509 297 355 478 348 229 203 +465 256 357 977 895 367 202 220 408 374 302 249 146 86 144 182 152 155 159 148 139 89 131 170 127 172 162 89 118 114 108 122 +23 12 17 48 44 18 10 10 20 18 15 12 7 4 7 9 7 7 7 7 6 4 6 8 6 8 8 4 5 5 5 6 +5273 11543 12156 12147 12996 8825 7391 3546 1668 1437 1566 1145 886 450 335 479 644 655 631 590 772 1098 931 805 919 495 291 344 463 338 224 199 +518 1204 2244 2763 1162 543 212 230 260 327 339 110 96 89 93 155 161 100 191 195 136 57 69 140 120 97 120 150 121 120 111 97 +25 60 112 138 58 27 10 11 12 16 16 5 4 4 4 7 8 4 9 9 6 2 3 6 5 4 5 7 6 5 5 4 +5082 11129 11759 11771 12522 8493 7103 3413 1611 1392 1516 1103 854 435 325 466 624 632 613 574 746 1056 896 778 887 479 284 336 449 329 219 194 +1037 1766 3195 3303 1057 713 324 219 185 163 138 172 160 52 64 99 161 199 154 147 138 137 120 69 134 179 107 127 169 204 92 56 +51 88 159 165 52 35 16 10 9 8 6 8 7 2 3 4 8 9 7 7 6 6 5 3 6 8 5 6 8 10 4 2 +4920 10754 11416 11432 12063 8181 6831 3285 1553 1342 1460 1065 826 419 314 451 605 614 594 556 721 1019 864 749 856 467 276 327 437 324 213 188 +420 481 822 1654 758 236 361 243 232 188 114 130 174 106 67 176 146 136 245 112 228 200 119 97 159 89 87 99 87 106 119 117 +20 24 41 82 37 11 18 12 11 9 5 6 8 5 3 8 7 6 12 5 11 9 5 4 7 4 4 4 4 5 5 5 +4740 10343 10992 11041 11611 7863 6572 3163 1500 1295 1406 1027 799 406 304 440 586 594 580 538 701 986 834 722 828 451 268 317 423 315 209 185 +405 628 1055 751 899 578 422 249 181 188 206 165 57 33 162 320 149 97 176 145 84 102 104 82 105 102 142 138 112 180 122 109 +20 31 52 37 44 28 21 12 9 9 10 8 2 1 8 15 7 4 8 7 4 5 5 4 5 5 7 6 5 8 6 5 +4566 9954 10594 10629 11182 7571 6326 3046 1447 1250 1358 992 769 391 298 435 568 574 563 522 676 950 804 696 799 437 262 309 410 309 205 181 +282 851 1217 1509 1173 490 334 286 136 111 253 160 179 197 210 208 101 93 102 195 108 96 117 123 87 86 98 107 93 110 134 103 +14 42 60 75 58 24 16 14 6 5 12 7 8 9 10 10 5 4 5 9 5 4 5 6 4 4 4 5 4 5 6 5 +4394 9590 10219 10264 10781 7287 6086 2935 1394 1204 1313 958 745 383 294 425 549 554 544 508 653 915 776 673 770 422 255 300 397 301 202 177 +819 855 1606 1515 506 337 374 150 227 391 236 213 155 142 177 122 127 133 145 164 175 73 56 110 155 103 65 96 105 153 100 76 +40 42 80 75 25 16 18 7 11 19 11 10 7 7 8 6 6 6 7 8 8 3 2 5 7 5 3 4 5 7 4 3 +4251 9240 9874 9914 10370 7009 5857 2823 1347 1171 1269 928 721 373 289 412 532 537 528 494 633 881 747 650 745 409 247 291 385 295 197 172 +711 1316 1455 1012 905 412 192 138 199 401 361 91 42 58 169 162 99 149 158 115 97 97 103 77 100 106 125 149 147 91 86 79 +35 65 72 50 45 20 9 6 9 20 18 4 2 2 8 8 4 7 7 5 4 4 5 3 4 5 6 7 7 4 4 3 +4109 8923 9537 9558 9991 6745 5630 2715 1301 1140 1232 894 693 360 284 402 514 521 513 478 611 849 721 627 719 396 242 285 375 286 192 168 +1094 1462 1938 1324 429 243 289 152 113 90 166 159 83 70 132 232 184 152 76 64 88 101 74 71 60 92 78 98 138 93 83 67 +54 73 96 66 21 12 14 7 5 4 8 7 4 3 6 11 9 7 3 3 4 5 3 3 2 4 3 4 6 4 4 3 +3988 8624 9233 9228 9608 6485 5416 2612 1253 1098 1189 864 668 348 277 395 500 506 495 461 590 819 695 604 692 383 235 277 365 278 187 163 +290 652 765 505 324 204 363 294 171 236 209 132 111 81 100 91 116 147 98 162 107 100 68 76 74 110 140 132 124 78 64 62 +14 32 38 25 16 10 18 14 8 11 10 6 5 4 4 4 5 7 4 8 5 4 3 3 3 5 6 6 6 3 3 3 +3840 8305 8894 8879 9236 6233 5213 2519 1209 1063 1149 834 645 337 269 382 484 491 479 449 570 790 669 582 667 372 231 271 355 270 182 158 +641 569 195 350 435 334 170 264 219 89 163 143 61 43 98 161 139 91 161 172 50 64 92 73 130 156 123 147 146 105 75 72 +32 28 9 17 21 16 8 13 10 4 8 7 3 2 4 8 6 4 8 8 2 3 4 3 6 7 6 7 7 5 3 3 +3712 7995 8546 8538 8884 5997 5011 2428 1169 1024 1109 806 621 325 262 373 470 475 466 437 549 760 645 561 645 363 226 266 346 263 177 154 +368 485 889 1611 1069 430 168 314 288 171 110 100 87 89 100 123 128 103 121 166 113 89 137 99 106 121 94 73 122 121 100 74 +18 24 44 80 53 21 8 15 14 8 5 4 4 4 4 6 6 5 6 8 5 4 6 4 5 6 4 3 6 6 4 3 +3578 7694 8239 8261 8571 5774 4817 2343 1133 989 1069 777 599 315 255 363 456 460 452 426 531 733 624 542 623 353 220 258 337 257 173 150 +489 670 1208 2033 1145 308 115 206 245 103 89 144 92 103 154 122 145 107 113 140 82 96 148 142 141 143 122 120 81 99 84 67 +24 33 60 101 57 15 5 10 12 5 4 7 4 5 7 6 7 5 5 6 4 4 7 7 7 7 6 5 4 4 4 3 +3454 7413 7957 8012 8274 5555 4629 2257 1097 953 1029 751 578 306 250 353 443 445 438 414 513 707 604 526 603 344 216 252 326 250 169 146 +542 875 893 717 171 302 394 403 198 147 235 250 138 105 191 152 185 149 238 169 60 152 185 90 126 271 170 169 122 126 81 61 +27 43 44 35 8 15 19 20 9 7 11 12 6 5 9 7 9 7 11 8 2 7 9 4 6 13 8 8 6 6 4 3 +3337 7151 7674 7720 7950 5344 4459 2182 1061 920 997 730 560 297 247 344 432 433 430 404 494 684 587 508 583 341 214 248 317 245 165 142 +324 728 813 1178 737 302 379 222 228 207 162 87 82 115 200 104 65 77 99 111 83 118 90 92 134 243 105 167 156 128 98 72 +16 36 40 58 36 15 18 11 11 10 8 4 4 5 9 5 3 3 4 5 4 5 4 4 6 12 5 8 7 6 4 3 +3216 6894 7399 7458 7661 5142 4295 2103 1027 891 963 704 540 289 245 334 417 418 416 392 477 661 567 491 565 337 209 244 310 240 162 139 +602 891 1011 1569 827 447 338 220 146 227 152 135 110 52 72 145 173 90 75 79 117 198 189 106 182 232 152 147 124 117 61 44 +30 44 50 78 41 22 16 10 7 11 7 6 5 2 3 7 8 4 3 3 5 9 9 5 9 11 7 7 6 5 3 2 +3111 6654 7143 7222 7387 4954 4136 2027 991 864 930 681 522 279 238 326 407 404 402 379 462 642 551 475 549 332 206 240 302 235 157 135 +850 1162 952 878 465 497 400 444 234 173 143 168 115 91 57 59 70 88 128 103 110 178 181 102 122 126 132 85 124 150 62 53 +42 58 47 43 23 24 19 22 11 8 7 8 5 4 2 2 3 4 6 5 5 8 9 5 6 6 6 4 6 7 3 2 +3020 6434 6895 6968 7110 4775 3986 1963 960 836 898 660 505 271 230 315 393 391 391 367 447 623 536 460 531 323 203 233 294 231 153 131 +672 775 726 537 356 333 387 209 214 213 205 83 95 87 85 102 86 119 155 183 166 158 148 107 110 107 134 97 150 217 200 172 +33 38 36 26 17 16 19 10 10 10 10 4 4 4 4 5 4 5 7 9 8 7 7 5 5 5 6 4 7 10 46 40 +2926 6207 6648 6710 6839 4597 3842 1892 930 811 870 636 488 263 224 306 380 380 381 359 435 604 520 445 514 314 200 227 288 230 154 132 +347 400 594 737 688 341 183 219 153 210 144 105 67 47 67 122 137 68 71 116 116 114 83 62 117 105 65 108 108 151 133 85 +17 19 29 36 34 17 9 10 7 10 7 5 3 2 3 6 6 3 3 5 5 5 4 3 5 5 3 5 5 7 6 4 +2822 5974 6405 6471 6593 4426 3695 1825 898 786 840 614 471 254 217 298 370 367 368 349 422 584 502 429 498 305 194 222 280 226 153 130 +1200 1469 931 306 589 388 271 362 156 79 190 123 92 28 120 187 89 68 96 154 117 131 163 68 124 100 109 93 96 133 115 99 +59 73 46 15 29 19 13 18 7 3 9 6 4 1 5 9 4 3 4 7 5 6 8 3 6 4 5 4 4 6 5 4 +2757 5793 6186 6224 6352 4264 3558 1766 868 757 814 594 455 244 213 293 358 355 357 341 409 565 488 414 483 296 190 216 272 222 151 128 +276 937 814 892 898 350 91 143 127 154 272 119 185 116 118 72 139 188 115 158 94 116 111 74 97 107 111 96 133 89 77 73 +13 46 40 44 44 17 4 7 6 7 13 5 9 5 5 3 6 9 5 7 4 5 5 3 4 5 5 4 6 4 3 3 +2657 5598 5971 6010 6133 4107 3419 1701 838 732 792 575 444 238 209 284 349 348 347 333 396 547 472 400 467 288 186 211 266 216 148 125 +704 596 648 1602 1071 278 154 137 184 309 313 160 183 91 89 171 134 90 70 107 74 81 58 78 107 92 129 118 139 95 112 86 +35 29 32 80 53 13 7 6 9 15 15 7 9 4 4 8 6 4 3 5 3 4 2 3 5 4 6 5 6 4 5 4 +2578 5398 5758 5833 5930 3953 3288 1638 811 715 772 558 433 232 204 279 340 337 335 323 383 528 455 387 452 280 183 207 260 211 146 123 +665 933 1123 1802 1475 749 254 89 85 302 254 87 85 130 93 203 98 82 153 226 184 238 89 100 123 154 95 129 169 149 101 104 +33 46 56 90 73 37 12 4 4 15 12 4 4 6 4 10 4 4 7 11 9 11 4 4 6 7 4 6 8 7 5 5 +2501 5219 5572 5671 5751 3824 3166 1576 781 698 751 539 419 227 199 275 330 326 327 319 375 516 440 375 438 274 179 203 256 208 144 122 +203 621 927 971 473 315 430 448 185 206 189 72 97 107 113 93 108 101 105 170 124 159 119 134 172 156 104 118 107 65 70 56 +10 31 46 48 23 15 21 22 9 10 9 3 4 5 5 4 5 5 5 8 6 7 5 6 8 7 5 5 5 3 3 2 +2409 5035 5386 5483 5539 3683 3056 1530 757 678 728 520 406 222 195 267 321 317 318 313 364 501 427 365 427 269 176 199 250 202 141 119 +319 459 521 479 349 254 297 306 155 110 141 206 131 105 181 196 153 179 128 144 105 113 116 99 164 266 169 108 102 80 46 57 +15 22 26 23 17 12 14 15 7 5 7 10 6 5 9 9 7 8 6 7 5 5 5 4 8 13 8 5 5 3 2 2 +2325 4852 5191 5282 5331 3545 2945 1481 732 655 704 507 395 217 194 264 314 311 310 306 353 485 414 354 416 268 175 195 244 197 137 116 +396 693 1320 1112 300 283 179 95 72 244 223 121 75 74 95 75 58 65 82 123 135 110 157 70 108 140 136 148 148 87 78 64 +19 34 65 55 14 14 8 4 3 12 11 6 3 3 4 3 2 3 4 6 6 5 7 3 5 6 6 7 7 4 3 3 +2247 4685 5036 5115 5129 3414 2834 1425 705 638 684 491 382 211 190 256 303 301 300 298 344 470 403 342 403 262 173 193 240 192 134 113 +435 970 989 556 342 429 334 159 96 194 195 92 113 101 120 191 142 72 110 118 138 85 80 63 119 140 95 122 117 110 110 42 +21 48 49 27 17 21 16 7 4 9 9 4 5 5 5 9 7 3 5 5 6 4 3 3 5 6 4 6 5 5 5 2 +2174 4536 4874 4932 4937 3294 2734 1374 680 620 664 475 371 206 187 253 296 291 292 290 335 454 390 330 391 257 169 190 235 188 133 110 +904 790 782 1026 761 430 399 236 167 149 148 86 117 108 101 299 250 88 128 171 62 58 53 74 137 111 99 77 136 93 92 73 +45 39 39 51 38 21 19 11 8 7 7 4 5 5 5 45 12 4 6 8 3 2 2 3 6 5 4 3 6 4 4 3 +2123 4386 4710 4775 4770 3179 2640 1328 659 601 643 459 360 202 183 254 294 282 285 285 324 438 376 319 380 251 166 185 231 184 131 108 +289 427 280 311 438 264 183 263 221 104 156 173 103 166 199 154 158 105 143 146 125 72 97 85 78 132 85 83 157 126 79 56 +14 21 13 15 21 13 9 13 11 5 7 8 5 8 16 7 7 5 7 7 6 3 4 4 3 6 4 4 7 6 3 2 +2049 4227 4532 4596 4596 3062 2541 1285 641 581 623 447 349 200 183 250 288 274 279 279 316 423 364 309 367 246 162 180 228 181 128 105 +254 210 166 222 172 253 378 328 156 226 190 101 146 78 93 90 78 162 105 110 76 75 70 99 147 185 97 86 99 81 85 81 +12 10 8 11 8 12 18 16 7 11 9 5 7 3 4 4 3 8 5 5 3 3 3 4 7 9 4 4 4 4 4 4 +1977 4066 4357 4421 4419 2949 2454 1246 621 566 605 433 340 195 179 243 279 269 272 272 306 409 352 300 358 243 159 176 222 177 126 104 +491 573 538 258 334 298 233 273 112 149 141 107 73 92 145 126 142 91 90 135 131 85 28 78 126 112 105 80 195 170 66 52 +24 28 26 12 16 14 11 13 5 7 7 5 3 4 7 6 7 4 4 6 6 4 1 3 6 5 5 3 9 8 3 2 +1917 3926 4204 4254 4255 2843 2365 1207 600 549 586 419 329 190 177 238 273 261 264 266 299 396 339 291 348 237 156 172 220 176 123 101 +653 1272 790 637 540 266 148 183 95 197 161 113 67 102 138 56 103 100 59 108 71 119 174 92 91 103 76 81 123 76 70 64 +32 63 39 31 26 13 7 9 4 9 8 5 3 5 6 2 5 4 2 5 3 5 8 4 4 5 3 4 6 3 3 3 +1866 3819 4067 4109 4106 2739 2276 1166 579 534 569 406 318 186 175 230 266 254 255 259 289 384 332 283 337 231 152 168 216 172 120 99 +492 1206 1335 752 388 292 238 391 310 229 244 99 54 92 130 146 153 110 163 182 84 84 122 180 224 124 110 87 119 83 89 72 +24 60 66 37 19 14 11 19 15 11 12 4 2 4 6 7 7 5 8 9 4 4 6 8 11 6 5 4 5 4 4 3 +1811 3714 3957 3974 3957 2641 2194 1135 568 521 556 393 307 182 173 226 261 248 251 255 280 372 323 278 332 226 150 164 212 168 118 97 +331 770 1338 869 539 219 175 199 174 283 286 168 152 98 123 111 60 87 68 146 140 111 74 78 156 185 126 83 88 103 71 77 +16 38 66 43 26 10 8 9 8 14 14 8 7 4 6 5 2 4 3 7 6 5 3 3 7 9 6 4 4 5 3 3 +1751 3596 3852 3849 3820 2544 2113 1097 552 511 545 384 300 178 171 221 252 241 243 250 274 361 313 270 324 224 149 160 207 165 116 96 +904 1724 1132 1057 632 536 278 243 204 255 322 105 51 112 67 202 165 60 96 102 140 101 82 90 94 88 83 76 139 135 88 85 +45 86 56 52 31 26 13 12 10 12 16 5 2 5 3 10 8 2 4 5 6 5 4 4 4 4 4 3 6 6 4 4 +1717 3521 3743 3737 3692 2463 2039 1062 538 500 536 372 290 175 166 220 248 233 237 244 268 350 303 262 314 218 146 156 204 163 114 95 +720 942 1489 1182 594 295 240 240 252 375 188 123 63 64 79 212 135 96 39 90 86 88 80 90 102 98 87 65 112 110 76 107 +35 47 74 59 29 14 11 11 12 18 9 6 3 3 3 10 6 4 1 4 4 4 3 4 5 4 4 3 5 5 3 12 +1677 3417 3652 3634 3568 2376 1967 1029 526 495 522 362 280 170 162 219 243 227 229 237 260 339 294 255 305 213 143 152 200 160 112 95 +348 534 312 284 443 323 245 155 137 290 197 60 59 67 71 63 145 140 155 124 133 102 146 116 93 72 86 122 149 123 50 54 +17 26 15 14 22 16 12 7 6 14 9 2 2 3 3 3 7 6 7 6 6 5 7 5 4 3 4 6 7 6 2 2 +1623 3301 3518 3500 3443 2293 1898 994 510 486 509 349 271 165 158 212 239 223 226 232 254 329 288 249 296 207 140 150 197 158 109 93 +512 772 1152 1137 666 281 148 100 65 203 302 115 51 66 99 140 119 79 128 189 120 86 123 109 141 122 91 135 145 122 125 87 +25 38 57 56 33 14 7 4 3 10 15 5 2 3 4 6 5 3 6 9 5 4 6 5 7 6 4 6 7 6 16 4 +1578 3199 3423 3405 3331 2212 1828 958 492 474 500 339 262 161 155 209 234 217 222 230 248 319 281 243 289 203 138 149 194 156 109 92 +321 464 547 255 348 251 195 210 200 358 311 78 59 56 69 233 257 71 83 188 128 112 126 131 159 112 167 105 77 104 119 81 +16 23 27 12 17 12 9 10 9 17 15 3 2 2 3 24 23 3 4 9 6 5 6 6 7 5 28 5 3 5 10 4 +1527 3089 3308 3279 3211 2133 1762 928 480 469 492 328 253 156 151 209 234 211 216 228 243 310 274 238 283 199 139 147 189 153 109 91 +720 920 981 1156 818 282 165 242 246 379 318 151 106 61 108 90 57 77 78 108 122 91 107 88 76 81 151 101 107 89 61 92 +35 45 49 57 40 14 8 12 12 18 15 7 5 3 5 4 2 3 3 5 6 4 5 4 3 4 12 5 5 4 3 4 +1494 3002 3214 3194 3115 2059 1698 900 470 465 485 320 247 152 149 204 226 205 210 223 238 301 267 232 274 194 139 145 185 150 107 91 +537 470 256 543 321 288 97 116 99 150 165 64 92 127 290 227 164 128 94 221 161 138 110 158 118 147 123 154 269 163 146 107 +26 23 12 27 16 14 4 5 4 7 8 3 4 6 136 23 8 6 4 11 8 6 5 7 5 7 6 9 81 13 38 16 +1455 2900 3095 3088 3003 1988 1633 868 455 452 472 309 240 151 154 204 223 201 205 222 234 294 260 229 267 192 138 145 188 150 108 91 +227 505 406 756 727 347 424 299 119 215 264 215 127 145 324 422 292 158 86 89 178 126 162 136 121 166 177 132 204 190 173 71 +11 25 20 37 36 17 21 14 5 10 13 10 6 7 164 210 67 7 4 4 8 6 8 6 6 8 38 6 16 39 63 3 +1405 2804 2987 2994 2912 1922 1584 845 441 442 463 305 235 150 160 212 225 199 200 216 231 287 256 225 261 190 139 144 188 151 110 90 +122 630 1075 653 385 210 190 220 108 111 225 107 65 110 153 298 218 135 103 102 154 115 134 165 205 159 121 130 144 251 113 77 +6 31 53 32 19 10 9 10 5 5 11 5 3 5 7 83 10 6 5 5 7 5 6 8 10 7 6 6 7 97 5 3 +1353 2717 2910 2900 2810 1853 1528 820 427 428 453 297 228 148 159 215 224 196 196 211 227 280 251 222 258 188 138 143 186 154 110 89 +732 591 736 902 225 170 165 228 87 132 133 200 166 107 136 87 73 206 121 91 101 89 110 123 132 134 118 105 169 159 120 97 +36 29 36 45 11 8 8 11 4 6 6 9 8 5 6 4 3 10 6 4 5 4 5 6 6 6 5 5 8 7 10 8 +1328 2632 2823 2820 2706 1785 1473 796 413 416 440 293 225 146 158 209 217 196 193 206 221 272 245 218 252 185 137 141 185 154 110 89 +264 565 1068 1338 898 395 168 226 98 174 154 84 115 76 145 145 102 92 92 105 78 86 129 102 157 131 140 119 131 150 202 125 +13 28 53 66 44 19 8 11 4 8 7 4 5 3 7 7 5 4 4 5 3 4 6 5 7 6 6 5 6 7 89 35 +1285 2549 2752 2760 2633 1729 1420 773 400 406 428 284 220 143 157 206 212 191 188 201 215 264 240 213 248 182 137 140 182 153 113 90 +649 799 1455 1305 861 403 293 410 294 253 220 146 140 91 197 280 169 187 167 96 143 84 91 92 136 186 118 85 120 182 163 86 +32 39 72 65 43 20 14 20 14 12 10 7 6 4 39 72 8 9 8 4 7 4 4 4 6 9 5 4 5 28 49 4 +1259 2479 2700 2701 2562 1675 1374 758 395 399 419 278 216 140 158 208 210 190 187 196 212 256 234 208 243 182 136 137 179 154 114 89 +613 777 397 354 301 291 422 534 258 111 144 198 189 70 75 137 167 196 123 83 112 76 59 114 94 126 145 66 74 109 113 62 +30 38 19 17 15 14 21 26 12 5 7 9 9 3 3 6 8 9 6 4 5 3 2 5 4 6 9 3 3 5 5 3 +1233 2410 2607 2607 2471 1619 1335 749 389 387 408 274 214 137 154 205 208 190 184 191 208 248 227 204 237 179 136 134 174 152 113 87 +391 758 918 804 378 308 338 374 141 185 217 106 126 93 57 115 111 126 141 163 66 78 137 101 105 117 111 132 166 73 111 62 +19 37 45 40 18 15 16 18 7 9 10 5 6 4 2 5 5 6 7 8 3 3 6 5 5 5 5 6 8 3 5 3 +1199 2343 2539 2534 2387 1566 1295 734 379 378 400 267 210 135 150 201 204 187 182 189 202 241 223 199 231 176 135 133 173 148 112 86 +748 740 1080 1199 431 171 325 396 231 473 280 77 155 113 132 117 107 71 119 147 167 88 97 118 120 109 75 87 106 88 115 52 +37 36 53 59 21 8 16 19 11 92 13 3 7 5 6 5 5 3 5 7 8 4 4 5 5 5 3 4 5 4 5 2 +1180 2278 2480 2480 2308 1510 1256 720 373 381 395 259 207 134 149 197 200 182 179 187 200 234 217 195 226 173 132 131 170 145 112 84 +257 301 674 1239 478 259 443 517 283 269 189 196 163 83 143 225 119 89 99 154 142 113 173 77 93 108 74 76 82 56 62 51 +12 15 33 61 23 12 22 25 14 13 9 9 8 4 7 27 5 4 4 7 7 5 8 3 4 5 3 3 4 2 3 2 +1143 2198 2407 2430 2234 1459 1223 711 369 376 386 256 205 131 148 198 196 178 175 185 197 229 215 190 220 170 129 128 166 141 110 82 +691 627 999 1028 272 329 356 219 137 105 222 101 54 58 119 155 85 90 84 87 105 97 123 101 140 141 69 74 115 90 65 67 +34 31 49 51 13 16 17 10 6 5 11 5 2 2 5 7 4 4 4 4 5 4 6 5 6 7 3 3 5 4 3 3 +1124 2135 2350 2373 2155 1413 1188 691 359 365 379 249 198 128 146 196 191 174 171 181 193 223 211 186 216 168 126 125 163 138 108 81 +353 317 247 779 406 354 302 212 171 146 147 95 87 91 89 92 113 94 112 91 79 97 75 119 227 325 498 761 762 297 85 63 +17 15 12 38 20 17 15 10 8 7 7 4 4 4 4 4 5 4 5 4 3 4 3 5 11 151 358 611 576 153 4 3 +1093 2062 2265 2309 2085 1370 1152 671 351 356 369 242 193 126 143 191 187 170 168 177 188 217 205 183 216 174 140 150 186 144 107 80 +314 378 1132 1498 774 512 230 102 137 217 339 153 125 197 278 232 133 141 129 219 144 270 299 279 318 376 961 1630 1239 458 208 182 +15 18 56 74 38 25 11 5 6 10 16 7 6 69 130 40 6 7 6 41 7 51 91 93 98 194 789 1421 1011 302 97 98 +1061 1994 2219 2276 2032 1335 1115 648 342 350 367 238 190 128 148 192 184 168 166 178 186 219 208 186 220 182 172 209 228 156 111 84 +354 573 1312 1853 1207 582 252 204 110 153 245 133 104 91 258 237 151 144 162 234 233 221 403 306 310 649 942 1080 779 477 447 214 +17 28 65 92 60 29 12 10 5 7 12 6 5 4 106 44 7 7 8 54 46 11 188 116 87 449 740 837 529 309 323 125 +1032 1937 2182 2259 1999 1304 1080 630 332 342 362 233 186 126 152 193 182 167 165 180 187 219 215 190 223 200 202 243 250 168 124 89 +456 260 435 527 870 467 443 405 125 369 340 116 190 200 102 129 72 65 78 156 174 215 599 981 751 1064 1383 1145 706 532 303 150 +22 12 21 26 43 23 22 20 6 26 16 5 9 72 5 6 3 3 3 7 8 10 369 760 507 830 1134 866 438 350 172 59 +1008 1869 2112 2189 1953 1270 1054 621 323 343 361 228 186 128 150 190 177 162 161 179 186 218 230 221 244 234 249 279 268 182 131 91 +562 729 623 942 885 511 340 247 153 265 162 235 241 67 82 133 132 100 59 99 195 317 444 495 425 632 617 585 667 327 198 140 +28 36 31 47 44 25 16 12 7 13 8 11 53 3 4 6 6 4 2 4 9 96 206 264 174 383 354 294 384 140 65 48 +990 1823 2052 2139 1910 1239 1025 606 316 339 353 228 188 125 147 187 175 159 156 175 186 221 238 231 251 249 263 291 283 187 133 92 +674 974 1124 816 505 288 155 257 223 164 164 142 83 76 153 202 146 90 118 89 109 65 212 329 468 388 189 458 459 265 99 82 +33 48 56 40 25 14 7 12 11 8 8 7 4 3 7 15 7 4 5 4 5 3 10 95 209 134 9 161 169 75 4 4 +977 1789 2014 2086 1853 1200 990 592 312 332 345 224 183 123 147 187 173 156 154 171 182 214 236 234 259 254 260 297 290 190 131 91 +600 1131 1190 839 454 474 196 251 204 218 310 148 101 59 72 108 158 109 169 218 158 94 291 314 184 328 235 401 335 142 106 83 +29 56 59 41 22 23 9 12 10 10 15 7 5 2 3 5 7 5 15 46 7 4 53 77 9 72 11 100 44 7 5 4 +961 1762 1981 2036 1797 1170 958 578 307 327 343 220 179 120 144 183 172 154 154 172 181 209 238 237 256 256 259 301 291 188 130 90 +611 1620 1606 1747 901 329 200 361 267 471 345 58 72 106 121 145 120 130 206 174 116 95 182 234 289 521 288 283 231 108 139 92 +30 80 80 87 45 16 9 18 13 139 17 2 3 5 6 7 5 6 50 8 5 4 9 11 32 255 28 14 11 5 9 4 +947 1756 1966 2024 1761 1136 927 569 305 332 343 213 174 119 143 181 169 153 156 172 178 204 235 236 257 266 260 300 288 184 130 90 +481 1334 1408 831 240 170 209 295 150 286 268 68 102 98 56 128 104 61 105 251 95 59 96 169 222 110 230 326 333 142 113 128 +24 66 70 41 11 8 10 14 7 14 13 3 5 4 2 6 5 3 5 76 4 2 4 8 11 5 11 25 44 7 5 37 +928 1739 1943 1976 1700 1097 898 558 298 330 340 207 171 118 139 178 166 149 153 175 174 198 229 233 255 259 258 301 289 182 129 91 +644 647 653 360 566 434 85 66 155 245 231 98 117 123 63 82 59 82 119 126 75 85 141 128 195 136 143 186 119 97 124 75 +32 32 32 17 28 21 4 3 7 12 11 4 5 6 3 4 2 4 5 6 3 4 7 6 9 6 7 9 5 4 6 3 +916 1695 1891 1911 1654 1070 865 538 292 326 335 202 168 118 135 174 161 146 151 173 170 193 225 228 252 254 253 296 282 178 128 90 +836 835 739 1017 437 564 410 257 135 360 434 137 56 52 101 125 93 93 175 115 95 75 94 82 160 143 84 94 142 80 94 70 +41 41 36 50 21 28 20 12 6 33 96 6 2 2 5 6 4 4 24 5 4 3 4 4 7 7 4 4 7 3 4 3 +912 1660 1844 1875 1605 1049 846 526 285 327 338 199 163 115 133 172 158 143 151 170 167 188 219 222 248 249 246 287 276 174 126 89 +311 444 526 892 445 178 190 205 197 186 147 69 101 54 60 84 59 133 164 152 142 86 111 46 76 97 73 97 124 104 97 66 +15 22 26 44 22 8 9 10 9 9 7 3 5 2 2 4 2 6 13 7 7 4 5 2 3 4 3 4 6 5 4 3 +887 1611 1791 1835 1558 1014 819 513 281 321 330 193 160 112 130 168 154 142 151 169 166 183 214 214 241 242 239 279 269 171 124 88 +962 1348 1216 843 509 351 226 299 196 173 133 76 89 81 68 66 84 100 96 134 173 185 126 123 144 90 76 103 116 97 73 77 +73 67 60 42 25 17 11 14 9 8 6 3 4 4 3 3 4 4 4 6 8 9 6 6 7 4 3 5 5 4 3 3 +889 1600 1768 1795 1516 987 795 504 277 315 322 188 157 110 127 163 151 140 148 167 166 183 210 210 237 235 232 271 262 168 121 87 +535 615 381 620 689 450 214 147 93 218 299 121 50 55 154 156 77 48 101 164 110 120 103 102 139 120 74 81 61 96 79 60 +26 30 19 30 34 22 10 7 4 10 14 6 2 2 26 7 3 2 5 8 5 5 5 5 6 5 3 4 3 4 3 2 +874 1560 1712 1748 1482 965 771 489 269 311 321 185 152 107 128 162 148 136 146 166 163 180 205 205 233 230 225 263 253 165 119 85 +253 906 1251 1046 313 208 212 122 61 66 109 110 59 86 206 223 144 58 114 129 87 140 190 134 137 172 65 67 103 89 49 43 +12 45 62 52 15 10 10 6 3 3 5 5 2 4 75 59 7 2 5 6 4 6 9 6 6 8 3 3 5 4 2 2 +849 1533 1693 1719 1435 934 748 474 260 301 312 182 148 106 131 164 147 132 144 164 159 178 204 202 229 227 218 255 247 161 116 83 +189 375 477 595 242 389 437 382 212 158 124 85 148 162 178 231 137 39 88 116 122 120 86 82 98 157 124 120 94 91 93 52 +9 18 23 29 12 19 21 19 10 7 6 4 7 54 46 65 6 1 4 5 6 5 4 4 4 7 6 5 4 4 4 2 +822 1486 1644 1674 1387 912 735 470 258 295 304 178 148 108 132 166 146 128 141 162 157 175 199 197 223 224 214 249 240 158 115 81 +138 524 516 672 487 230 209 207 181 282 162 138 121 102 101 160 188 117 95 117 162 118 118 122 127 215 180 137 179 108 56 62 +6 26 25 33 24 11 10 10 9 14 8 6 6 5 5 7 41 5 4 5 8 5 5 6 6 10 8 6 8 5 2 3 +794 1447 1598 1633 1351 884 713 459 254 294 298 176 146 107 130 165 147 127 139 160 157 172 195 194 219 223 212 244 237 156 112 80 +713 1171 1945 1938 629 489 332 165 117 134 195 189 111 51 62 113 141 122 104 77 102 50 50 73 149 103 81 67 74 91 115 62 +35 58 334 293 31 24 16 8 5 6 9 13 5 2 3 5 7 6 5 3 5 2 2 3 7 5 4 3 3 4 5 3 +790 1435 1611 1645 1322 868 697 447 248 287 293 176 144 104 127 162 146 126 137 156 154 167 189 189 216 218 206 236 230 153 112 79 +325 426 840 1858 985 271 188 238 216 244 154 38 57 58 104 88 59 85 75 52 47 89 95 68 96 96 102 87 123 78 77 51 +16 21 41 205 49 13 9 11 10 12 7 1 2 2 5 4 2 4 3 2 2 4 4 3 4 4 5 4 6 3 3 2 +771 1394 1580 1653 1308 844 676 438 246 285 287 170 140 102 126 159 142 124 134 151 149 163 185 184 211 213 201 230 225 150 110 77 +264 309 570 1022 534 199 258 225 126 127 115 99 75 53 79 168 126 122 64 73 83 83 112 71 88 99 101 121 158 95 68 37 +13 15 28 51 26 9 12 11 6 6 5 4 3 2 3 9 6 6 3 3 4 4 5 3 4 4 5 6 7 4 3 1 +750 1350 1539 1627 1277 818 659 429 241 278 280 167 137 100 124 159 141 123 131 147 146 159 182 179 206 208 197 225 222 147 108 75 +579 372 428 1313 1056 296 319 331 185 309 264 55 67 49 61 61 81 96 68 76 57 82 80 66 104 35 42 58 111 95 63 71 +28 18 21 65 52 14 15 16 9 30 13 2 3 2 3 3 4 4 3 3 2 4 3 3 5 1 2 2 5 4 3 3 +743 1310 1494 1614 1268 797 645 425 238 279 279 162 134 97 121 155 138 121 128 144 142 155 177 174 201 201 190 218 217 144 106 74 +393 1062 1888 2099 707 216 71 188 296 298 164 90 56 65 131 131 133 163 86 42 90 89 55 89 139 149 83 77 133 102 92 82 +19 53 379 466 35 10 3 9 56 19 8 4 2 3 10 6 6 41 4 2 4 4 2 4 6 7 4 3 6 5 4 8 +729 1300 1509 1633 1245 773 622 415 240 279 274 159 130 95 121 154 137 122 126 139 139 152 172 170 198 198 185 212 213 142 105 74 +1016 1057 642 1347 751 189 109 150 251 344 163 176 106 47 96 96 135 155 89 180 94 72 56 71 133 175 91 76 158 131 75 52 +276 52 32 67 37 9 5 7 12 63 8 17 5 2 4 4 6 32 4 40 4 3 2 3 6 8 4 3 7 6 3 2 +740 1290 1474 1621 1225 749 601 404 240 281 269 159 129 93 120 151 136 123 124 140 137 148 167 166 195 197 181 206 210 141 103 73 +584 592 454 341 448 248 307 200 105 101 98 157 99 64 99 59 64 93 90 131 62 75 92 122 193 128 79 88 127 142 90 69 +29 29 22 17 22 12 15 9 5 5 4 7 4 3 4 2 3 4 4 6 3 3 4 6 9 6 3 4 6 7 4 3 +733 1262 1433 1569 1193 728 589 395 234 273 262 158 127 91 119 147 133 121 122 139 134 145 164 164 194 194 176 201 206 141 102 72 +510 567 977 680 215 241 351 333 135 211 185 94 211 197 92 75 91 131 99 38 43 62 112 155 238 223 134 226 224 77 52 62 +25 28 48 33 10 12 17 16 6 10 9 4 81 102 4 3 4 10 4 1 2 3 5 7 43 28 6 25 18 3 2 3 +724 1234 1414 1533 1153 708 579 392 230 270 258 155 130 95 117 144 131 121 121 134 130 141 161 163 195 195 174 201 206 138 100 71 +453 553 601 853 348 174 233 172 157 157 238 153 135 93 55 126 182 105 132 118 100 58 105 119 168 309 168 215 150 114 123 82 +22 27 30 42 17 8 11 8 7 7 11 7 6 4 2 6 49 5 11 5 4 2 5 5 8 110 8 14 7 5 23 11 +713 1206 1381 1505 1120 686 565 383 227 265 257 154 130 94 114 143 133 120 121 133 128 137 158 161 193 199 173 201 203 137 100 71 +728 726 486 902 546 220 250 393 197 139 153 69 79 100 135 146 154 157 162 187 138 98 151 142 138 281 318 151 164 99 80 77 +36 36 24 45 27 10 12 19 9 6 7 3 3 6 21 7 21 36 40 52 10 4 7 7 6 79 140 7 8 4 3 6 +713 1186 1345 1480 1097 667 552 383 225 259 252 150 127 94 114 143 133 121 122 135 128 135 157 160 190 202 178 199 201 135 99 71 +1795 998 739 1209 690 219 448 658 425 238 207 133 188 114 96 76 197 170 109 70 81 63 110 159 290 241 233 132 106 86 70 45 +1039 49 36 60 34 10 22 265 193 11 10 6 59 20 4 3 62 48 5 3 4 3 5 7 97 38 53 6 5 4 3 2 +756 1178 1320 1469 1080 649 547 393 232 258 250 149 129 94 113 140 135 122 121 132 126 132 155 159 193 203 180 196 197 133 97 69 +852 443 644 425 235 316 408 363 215 164 73 86 70 43 87 121 79 85 103 108 68 84 130 107 129 116 83 61 136 91 88 36 +93 22 32 21 11 15 20 18 10 8 3 4 3 2 4 6 3 4 5 5 3 4 6 5 6 5 4 3 6 4 4 1 +759 1148 1292 1427 1046 635 541 391 231 254 242 146 126 91 111 139 132 120 120 131 123 130 154 156 190 199 176 190 194 131 96 67 +3852 5589 5618 5255 3047 2897 3928 4843 4855 4377 4879 4303 3013 1819 1146 1223 4155 6248 4420 3126 5333 6712 6764 9664 12147 12942 11993 11181 10118 6528 3383 2268 +2970 4264 4154 3675 1922 2172 3252 4275 4440 3959 4452 3991 2772 1659 994 1041 3863 5884 4129 2876 5002 6319 6346 9128 11479 12234 11345 10552 9528 6142 3156 2114 +882 1325 1464 1580 1125 725 676 568 415 418 427 312 241 160 152 182 292 364 291 250 331 393 418 536 668 708 648 629 590 386 227 154 +9303 14260 21318 16292 3220 2585 4574 8612 6537 4533 10541 7329 4902 2721 1352 4501 10971 12726 8552 4958 7921 9271 8917 17858 18586 14925 16147 16303 13431 8991 7364 2933 +8085 12418 19061 14124 2012 1786 3743 7723 5878 3951 9710 6737 4475 2459 1153 4147 10253 11868 7931 4520 7287 8524 8160 16630 17202 13649 14880 15048 12328 8261 6852 2668 +1218 1842 2257 2168 1208 799 831 889 659 582 831 592 427 262 199 354 718 858 621 438 634 747 757 1228 1384 1276 1267 1255 1103 730 512 265 +5599 7777 8948 8459 3630 1370 1663 2038 1322 1958 2180 1294 1668 1159 1346 2583 1540 699 783 992 885 1079 1252 1144 998 440 575 968 1533 1327 834 481 +4206 5698 6424 6040 2326 549 799 1104 637 1321 1296 674 1192 862 1102 2140 790 34 156 532 241 319 476 57 49 21 28 48 413 574 310 208 +1393 2079 2524 2419 1304 821 864 934 685 637 884 620 476 297 244 443 750 851 627 460 644 760 776 1224 1368 1242 1239 1243 1120 753 524 273 +1705 797 1494 2710 1811 924 1375 1550 489 398 587 751 413 634 667 911 751 417 355 312 385 450 316 306 583 379 488 380 478 713 446 262 +300 39 74 280 487 99 491 592 24 19 29 126 20 324 407 450 37 20 17 15 19 22 15 15 29 18 24 18 23 35 22 13 +1405 2027 2482 2430 1324 825 884 958 677 627 872 625 473 310 260 461 750 833 616 454 633 747 757 1187 1336 1207 1208 1208 1094 751 520 272 +961 1210 1035 1258 1052 437 1066 1508 687 608 724 1169 1043 365 528 799 334 284 307 232 441 500 431 536 374 421 447 510 316 573 388 189 +48 60 51 62 52 21 175 529 34 30 36 523 548 53 258 325 16 14 15 11 22 24 21 26 18 21 22 25 15 28 19 9 +1387 1994 2424 2383 1313 809 891 979 677 626 866 646 495 312 270 474 733 811 603 445 625 737 743 1160 1297 1175 1177 1180 1062 743 514 268 +1028 829 899 829 851 352 548 930 615 518 555 522 819 343 305 621 361 289 327 352 534 615 355 283 295 459 835 1034 450 362 317 255 +51 41 44 41 42 17 27 46 30 25 27 26 312 30 34 142 18 14 16 17 26 30 17 14 14 22 41 51 22 18 15 12 +1372 1947 2363 2320 1294 790 877 977 674 621 853 641 507 313 271 479 718 790 591 441 621 732 727 1124 1256 1146 1163 1174 1037 727 506 267 +528 466 610 805 813 438 830 859 358 535 635 338 317 245 581 581 207 252 246 224 468 363 343 183 400 507 682 828 356 337 167 130 +26 23 30 40 40 21 41 42 17 26 31 16 15 12 298 98 10 12 12 11 23 18 17 9 19 25 34 41 17 16 8 6 +1338 1887 2292 2259 1274 775 875 972 661 617 844 628 499 310 283 483 697 768 577 432 614 717 711 1086 1221 1120 1143 1160 1009 711 492 261 +160 407 500 780 397 168 198 322 269 516 974 527 210 171 450 295 514 271 354 416 321 342 265 443 396 375 620 642 478 388 200 118 +7 20 24 38 19 8 9 16 13 25 125 26 10 8 161 14 25 13 17 20 16 17 13 22 19 18 30 32 23 19 9 5 +1290 1827 2220 2199 1238 750 847 946 645 612 849 623 487 304 289 475 689 748 568 431 602 702 693 1060 1188 1090 1122 1139 987 698 480 255 +586 434 599 812 387 228 271 461 263 225 520 359 104 96 326 282 345 284 409 286 219 202 318 239 345 251 306 323 385 370 129 59 +29 21 29 40 19 11 13 23 13 11 25 17 5 4 36 14 17 14 20 14 10 10 15 11 17 12 15 16 19 18 6 2 +1261 1771 2155 2143 1203 729 823 926 629 596 835 612 471 295 290 467 675 729 561 425 586 682 678 1027 1154 1056 1089 1106 962 684 465 247 \ No newline at end of file diff --git a/python/tflite_micro/signal/ops/testdata/filter_bank_square_root_test1.txt b/python/tflite_micro/signal/ops/testdata/filter_bank_square_root_test1.txt new file mode 100644 index 00000000000..52359355355 --- /dev/null +++ b/python/tflite_micro/signal/ops/testdata/filter_bank_square_root_test1.txt @@ -0,0 +1,549 @@ +6808147337 6255084086 2891865000 2964076648 2152163566 4144033795 10661119507 4427861633 282035520 151567251 385304142 152174611 166730384 142057431 185744758 182072626 828522087 2395432148 1217206879 232537562 325734697 171238639 102462894 271430791 75533907 128654737 112787255 157602543 360207890 755391391 4499305573 15049445959 +8 +322 308 210 212 181 251 403 259 65 48 76 48 50 46 53 52 112 191 136 59 70 51 39 64 33 44 41 49 74 107 262 479 +6378386839 4548260906 11745372396 8847683212 2353335503 908091718 599237014 530730360 116378469 117647851 110088584 82866604 148664412 20503308 17929649 73408437 1395183501 1713776676 195145610 52568541 136448881 102335389 46295364 41565888 30128609 24970519 26501226 64557492 47855937 59369896 1044206336 1873359366 +7 +623 526 846 734 378 235 191 179 84 84 81 71 95 35 33 66 291 323 109 56 91 79 53 50 42 39 40 62 54 60 252 338 +11830312965 10104483464 4254083699 4920784847 2691689024 514534283 177627123 73108160 7520576 22913015 17738822 2856491 2207854 1622477 3236693 2813354 6773105 8917610 13778260 2543654 2800499 3222588 3140880 1513508 1400454 2452535 2226265 4165951 1718554 1174583 1182831 2684807 +5 +3398 3141 2038 2192 1621 708 416 267 85 149 131 52 46 39 56 52 81 93 116 49 52 56 55 38 36 48 46 63 40 33 34 51 +11623254889 8535490010 36414517404 64555171414 8836025063 1207586430 168216022 203614811 53674241 28510524 18362880 29386172 11839398 1544701 3512255 27304657 108594872 74550693 19234123 22405562 24082157 16258164 8165249 15319916 11471675 14674879 7845527 20555044 27266014 29452154 139629499 213738262 +6 +1684 1443 2981 3969 1468 542 202 222 114 83 66 84 53 19 29 81 162 134 68 73 76 63 44 61 52 59 43 70 81 84 184 228 +242927331 17543652245 38377448409 30553590940 6023909610 512823860 125468738 86794564 88280568 57145279 111929488 37566392 47881879 2862718 1824794 25138320 39015764 13264948 31347231 13983838 11246708 10177784 15095122 27645495 10121102 10956097 16588418 31346023 31687207 49247267 599129402 1342925070 +6 +243 2069 3060 2731 1212 353 175 145 146 118 165 95 108 26 21 78 97 56 87 58 52 49 60 82 49 51 63 87 87 109 382 572 +3231767450 4747403802 6992857805 4776356668 1241977653 534133360 391355049 213046864 107540337 21753905 17939752 38069356 31690049 4514783 55902149 1239880908 4918683647 2484327852 460206148 106425571 11044392 16160905 8648933 6034216 14180674 6064888 18316917 14048399 26288688 24529777 20173858 12665313 +6 +888 1076 1306 1079 550 361 309 228 162 72 66 96 87 33 116 550 1095 778 335 161 51 62 45 38 58 38 66 58 80 77 70 55 +4585847527 2021955256 1659284130 2685462119 4107222329 1364536777 199134611 200880165 49038648 52179774 18199976 46830007 11684223 7003474 22309520 2259253004 8407756079 1304425202 191272625 10088876 16150624 7565978 4425560 19235510 21743127 20528309 29886565 8985334 17701920 29591094 12319506 7616931 +6 +1058 702 636 809 1001 577 220 221 109 112 66 106 53 41 73 742 1432 564 216 49 62 42 32 68 72 70 85 46 65 85 54 43 +11634553468 8958534760 32003651588 34529211612 9519445522 2842541748 1829328478 1395048731 471706024 263224460 58157172 48819653 64638154 15816622 118009888 169953303 1027412306 902228022 77795336 54973830 105900863 63035189 78178380 39164873 27167227 38390605 37860244 29174662 80574766 86316097 41953147 22529613 +7 +842 739 1397 1451 762 416 334 291 169 126 59 54 62 31 84 101 250 234 68 57 80 62 69 48 40 48 48 42 70 72 50 37 +2271965329 6161315138 24998827233 16746513325 474241037 268787780 515959467 585633622 137077066 37289369 104964050 27817360 22992018 19750183 25461499 97202988 249884704 77757616 46525943 55814543 9547193 1893661 2911526 8614811 9535752 10862368 4529579 6153510 5157694 5112980 3717683 3512177 +6 +744 1226 2470 2022 340 256 354 378 182 95 160 82 74 69 78 154 247 137 106 116 48 21 26 45 48 51 33 38 35 35 30 29 +10528000193 28362909357 47577133750 8466055850 5842710800 2350911449 2989811430 2646718839 515262774 276394561 469831522 55815334 28232446 11591835 40329249 67658028 183446654 323189165 117473797 41339272 25846050 12428673 18670978 22521722 78477733 54207503 25150296 43098592 28211625 15736687 20990296 17907031 +7 +801 1315 1704 718 597 378 427 401 177 129 169 58 41 26 49 64 105 140 84 50 39 27 33 37 69 57 39 51 41 30 35 33 +3689126303 5693478408 14510220226 11218983977 3321541259 990060296 505154293 874521146 125327571 13113319 48126300 14248109 5581766 5645895 10369126 11242595 68352305 71801379 30425732 18539146 5256846 5242451 21773148 12653302 17721172 7015540 9536686 7889633 9035362 7238258 9009320 5155487 +6 +949 1178 1882 1655 900 491 351 462 174 56 108 58 36 37 50 52 129 132 86 67 35 35 72 55 65 41 48 43 46 42 46 35 +1348592956 1179063878 6771717768 16973128069 2815542073 1746395005 913338332 674226143 122658832 123730551 33462594 13125255 7390858 4482455 14859657 15858428 10849103 15563708 33897074 28026839 5251424 8823330 7148914 23003498 6074383 2804360 4174611 2222401 16262269 6387177 4798964 2180319 +6 +573 536 1285 2035 829 652 472 405 173 173 90 56 42 33 60 62 51 61 90 82 35 46 41 74 38 26 31 23 63 39 34 23 +2210794846 386482292 2404509378 9466226320 4263081872 1249634934 315809414 505244943 145503189 257422228 98304260 25161953 31785652 4789663 16892084 13772237 24930132 69721305 27585705 14155631 6507418 7605749 7675786 6396648 13642936 24142622 7618359 9672939 16433302 9900030 6689766 2896092 +6 +734 307 766 1520 1020 552 277 351 188 250 154 78 88 34 64 57 78 130 82 58 39 43 43 39 57 76 43 48 63 49 40 26 +11070760676 5775306959 9366331081 3135789885 2922599572 1585815270 3060009695 1583948185 1107647025 4112297679 1229075584 76065393 96941802 90410526 54803188 108849892 30251308 109369301 151762458 72371138 121687651 75986837 105014058 81852362 86673720 59088946 41419995 21865918 24559283 47824461 49504106 19270102 +7 +822 593 756 437 422 311 432 310 260 500 273 68 76 74 57 81 42 81 96 66 86 68 80 70 72 60 50 36 38 54 54 34 +241288346 2791500496 3518365639 16842694315 10321741892 2856500415 3612347426 5556148044 3354278605 4137345289 847544054 347669177 393556856 118965162 66723550 97275871 288151003 169933912 243391109 93516383 100978467 138474361 52860527 135271105 117012607 138748503 66067470 99403751 64978722 71727089 104829244 37967637 +7 +121 412 463 1013 793 417 469 582 452 502 227 145 154 85 63 77 132 101 121 75 78 91 56 90 84 92 63 77 62 66 79 48 +8196217789 1957025137 4105133346 8254685747 2845377226 2070848339 507309560 673252806 149784914 187081437 137602536 132462889 43177557 19527115 10046263 32317284 32735405 11963013 19920883 20458214 5512408 18694013 26232132 16410094 38688435 28834970 10315897 31713278 27728808 20097256 11833634 17320225 +6 +1414 691 1001 1419 833 711 351 405 191 213 183 179 102 69 49 88 89 54 69 70 36 67 80 63 97 83 50 87 82 70 53 65 +6329789845 4222935529 1589637646 3988540345 3508973056 550883596 965366228 1570610251 548279819 497984197 744534210 69130211 16423215 1749836 8651550 25229284 47071422 89832514 75645986 55604185 22717076 42775713 60916084 33057526 58017351 21099429 23609223 26573470 57712926 32231803 7116200 9828388 +6 +1243 1015 622 986 925 366 485 619 365 348 426 129 63 20 45 78 107 148 135 116 74 102 121 89 119 71 75 80 118 88 41 48 +14773238038 14617819759 4404399640 6987944791 5163371075 1020149550 624962766 762177842 446908030 313892902 83094210 14168330 9434170 2986072 2245143 19789669 404318747 1101402493 465285483 130434468 10237462 2199612 5350454 8421955 8590531 3057048 1937279 4054659 4873241 1751857 752879 960938 +4 +7596 7556 4147 5224 4491 1996 1562 1725 1321 1107 569 235 192 108 93 278 1256 2074 1348 713 200 92 144 181 183 109 87 125 138 82 54 61 +24009075119 13217887400 6396689913 21190770700 8294492884 4251870864 5216966340 3338427807 739078305 548040252 444139732 42335812 15593099 2661234 12177824 41844984 1378487966 1734679305 340220206 134774160 16109355 4278460 3642201 42200780 29648878 3170056 1839418 2082559 955372 928552 3900984 7950512 +3 +19368 14371 9997 18196 11384 8150 9028 7222 3398 2926 2634 813 493 203 436 808 4641 5206 2305 1451 501 258 238 812 680 222 169 180 122 120 246 352 +4296266726 1990800203 15741493481 21776392145 6161477790 25100571160 13523866196 1562085004 926458893 431900830 562589786 44640973 20067177 318044 17722393 130937341 288588039 41745443 34804807 9009351 8845760 2076364 1712301 9103885 7045572 1075962 865130 2855220 2643046 1189154 3098130 5556710 +2 +16386 11154 31366 36892 19623 39608 29073 9880 7609 5195 5929 1670 1120 141 1052 2860 4247 1615 1475 750 743 360 327 754 663 259 232 422 406 272 440 589 +605954803 1045895109 8596982149 9183189032 4439201333 14092612619 3806964669 523005046 366555176 145508503 126483818 22129461 13971339 207660 12416315 23292080 54024445 14728020 1572314 595195 1052889 212395 485132 1666417 711176 390875 1135514 2178611 2380866 1231538 1921926 1900340 +1 +12308 16170 46360 47914 33313 59356 30850 11434 9573 6031 5623 2352 1869 228 1762 2413 3675 1919 627 385 513 230 348 645 421 312 533 738 771 555 693 689 +1209456858 1453754383 13727565604 16544725636 6709475864 22660985989 6904878652 189964515 200209624 56130992 87725944 8378007 4353578 948273 9223142 51480079 56796182 21173742 3528563 837291 489459 322404 435002 748647 434285 281370 910977 4202033 7081305 1029846 1047050 2781397 +1 +17388 19064 58582 64313 40955 75268 41548 6891 7075 3746 4683 1447 1043 487 1518 3587 3768 2300 939 457 350 284 330 432 329 265 477 1025 1330 507 511 834 +1093863859 2865888829 20214480262 22044372580 5604611882 11607364828 3311838971 19068201 54530851 36742630 52345102 7983694 2556635 2499466 14565655 35160346 46501016 35320368 1086201 1970274 319757 282071 78717 220565 283731 175340 823574 2847149 4486969 634424 1734847 3915481 +1 +16537 26767 71089 74236 37432 53868 28774 2183 3692 3031 3617 1413 799 790 1908 2965 3409 2971 521 702 282 265 140 235 266 209 454 843 1059 398 658 989 +947952750 7969309015 20746730841 13366811972 5981303880 3213009662 1562281490 816638907 353432806 126281909 47348624 18231457 18266153 9009828 7898301 8834641 42185198 34482319 3724320 3275706 846070 676290 223381 1076580 1155901 1289872 1523173 2054241 3496973 1520634 337294 647382 +2 +7697 22317 36009 28903 19334 14170 9881 7144 4700 2809 1720 1067 1068 750 702 743 1623 1468 482 452 230 205 118 259 268 284 308 358 467 308 145 201 +2150785063 1588949617 4827036733 4138871917 4668325703 12969199872 3868202669 192520894 424089128 255936015 203969230 40711251 43907143 29075112 25164731 22623862 17119766 87612575 21113733 2437026 954286 843476 687519 1225596 491323 2335704 2809254 3100201 2966911 1373946 1803698 6735683 +3 +5797 4982 8684 8041 8540 14235 7774 1734 2574 1999 1785 797 828 674 627 594 517 1170 574 195 122 114 103 138 87 191 209 220 215 146 167 324 +3163230914 644216959 703830900 2242195256 18498963362 38940581179 8114251229 1166614441 1493394923 193398575 267758980 210671975 75687386 22816163 36888732 69359382 107543927 173260053 37244740 7834191 1550610 2189149 7825768 9354076 1405989 5027645 5830452 15548178 28564770 17439516 14443692 13628940 +4 +3515 1586 1658 2959 8500 12333 5629 2134 2415 869 1022 907 543 298 379 520 648 822 381 174 77 92 174 191 74 140 150 246 334 261 237 230 +5067109681 12859186909 21136876749 9521989451 20115499549 43570635842 7904527964 2368341719 2283411614 380756767 575051532 229801984 92521323 26417836 96623487 138019913 30346884 679651136 654787263 60210513 12767069 12181393 28904429 26887267 16411142 11205753 30774139 40684513 127664601 74532854 42521063 30672949 +5 +2224 3543 4543 3049 4432 6523 2778 1520 1493 609 749 473 300 160 307 367 172 814 799 242 111 109 168 162 126 104 173 199 353 269 203 173 +1562655190 21446677639 34731971521 8654492932 6858486424 16109817961 2287003827 175389713 321662840 95315609 154988208 295608443 192992610 42215645 64078006 121609512 64579822 384850176 864670893 80885618 21645156 42949254 40877181 11540781 9656180 22741349 26694933 39576780 27645449 11218720 9050111 6099228 +5 +1235 4576 5823 2907 2588 3966 1494 413 560 305 389 537 434 203 250 344 251 613 918 281 145 204 199 106 97 149 161 196 164 104 94 77 +1179864909 4438484312 3337492275 10773972694 9690901207 17806062416 4403479604 300948683 254174295 918139968 599934592 193487700 149701042 9859033 13825897 44957753 134314275 1809100517 2473691731 232577496 145384776 61370824 38002572 43365701 30284484 7384129 14936535 23393597 50137944 34535972 15072053 5795591 +5 +1073 2081 1805 3243 3076 4169 2073 542 498 946 765 434 382 98 116 209 362 1329 1554 476 376 244 192 205 171 84 120 151 221 183 121 75 +379965892 393641315 1159491576 957166597 503832049 927595211 532474510 187713079 104200224 118561469 67707142 34565047 9777026 4133484 48683812 120798722 385450809 711914256 1534630396 4247719002 6605796171 3321302572 142909374 46979965 41371797 5428256 2691494 10025124 14492291 23564820 65698922 21418945 +3 +2436 2480 4256 3867 2805 3807 2884 1712 1276 1361 1028 734 390 254 872 1373 2454 3335 4896 8146 10159 7203 1494 856 804 291 205 395 475 606 1013 578 +50489734 54067535 148798789 175813464 579095728 2635338393 5990297540 5678906428 1478307664 343315467 105382572 49482409 129459599 24600130 143176782 103624298 239459514 899481722 2054853390 3809893642 10888309912 3282179924 1561099261 146734330 191627892 92045461 8267458 8072209 341897971 726277657 579229412 198148117 +3 +888 919 1524 1657 3008 6417 9674 9419 4806 2316 1283 879 1422 620 1495 1272 1934 3748 5666 7715 13043 7161 4938 1514 1730 1199 359 355 2311 3368 3008 1759 +1001685788 746379295 3120507595 12674081026 18617495547 16495225865 13783334513 6426636828 258352888 214884363 519203490 732337683 268748585 82861456 353754294 290416683 782930021 2201466361 2794374570 1834726522 2679756280 2930227311 1682665060 942156007 1313576578 347830410 45808259 85053499 964120244 1587740690 542068834 754676225 +4 +1978 1707 3491 7036 8527 8027 7337 5010 1004 916 1424 1691 1024 568 1175 1065 1748 2932 3303 2677 3235 3383 2563 1918 2265 1165 423 576 1940 2490 1455 1716 +7000714511 12606617750 7670385710 2308146020 3140639338 3977832063 2344885002 274558122 123010642 150749898 91901636 16249429 5981427 2777514 17349644 7798497 17836581 69400993 76902511 155430903 109242398 125647742 118696369 195676485 320175501 29279837 5693732 4665549 80570126 110458961 43238454 33928549 +2 +20917 28069 21895 12010 14010 15767 12106 4142 2772 3069 2396 1007 611 416 1041 698 1055 2082 2192 3116 2613 2802 2723 3497 4473 1352 596 540 2244 2627 1644 1456 +5143990038 4153995263 3359220824 18692424053 18853862091 5915158204 4611446955 2935754086 246522545 212786044 73487302 43412398 12454365 3913636 1507440 11968048 52157752 201422122 143619492 274890780 551805067 557731578 571234780 528233559 720591179 144674549 4367463 5183587 29278078 4504694 1329919 3945004 +1 +35861 32225 28979 68360 68654 38455 33954 27091 7850 7293 4286 3294 1764 989 614 1729 3611 7096 5992 8290 11745 11808 11950 11491 13422 6014 1045 1138 2705 1061 576 993 +737086475 199117112 1670438004 10741692241 3356388559 1768674154 5397602132 1312352655 102546050 47396047 7471078 7495982 6128779 2005378 658898 6712874 38438122 20278300 62843068 194318965 151369486 988510650 493465296 270768194 581474146 75460711 2947419 3483859 16066288 2640964 220569 1229451 +0 +27149 14111 40871 103642 57934 42056 73468 36226 10127 6884 2733 2738 2476 1416 812 2591 6200 4503 7927 13940 12303 31441 22214 16455 24114 8687 1717 1867 4008 1625 470 1109 +588176485 240583144 2884339303 9933891889 2127262302 5893697049 10187731939 970843117 202938032 52048018 10182704 2505728 11710226 3340048 743403 8872557 16728611 19896564 79197341 110845859 308742005 1700737148 216131009 279268534 212892002 46221585 3472275 2484835 12199449 3552489 276650 1095028 +0 +24252 15511 53706 99669 46122 76770 100934 31158 14246 7214 3191 1583 3422 1828 862 2979 4090 4461 8899 10528 17571 41240 14701 16711 14591 6799 1863 1576 3493 1885 526 1046 +229209565 325939649 4026845668 6746593896 1062037748 13185922822 12366346307 774579081 193349694 34146309 30290866 7585986 8640955 1200927 1094759 5399523 14986773 24429908 46767311 55975549 488988572 797725628 217726324 154908948 324268385 165305048 1940927 5406041 19815318 2349459 458869 1565114 +0 +15140 18054 63457 82138 32589 114830 111204 27831 13905 5843 5504 2754 2940 1096 1046 2324 3871 4943 6839 7482 22113 28244 14756 12446 18007 12857 1393 2325 4451 1533 677 1251 +233236004 372059747 3720635405 4840014083 2474879002 16484548283 8507153627 598495833 467734248 39243814 47455426 8430192 3392392 377981 875318 2484565 15845558 18176914 32061401 56747499 180709466 485218951 266196081 74416012 596128810 163535924 2834952 6989193 14972360 1155977 156099 770625 +0 +15272 19289 60997 69570 49748 128392 92234 24464 21627 6264 6889 2903 1842 615 936 1576 3981 4263 5662 7533 13443 22028 16316 8626 24416 12788 1684 2644 3869 1075 395 878 +86102584 683952431 5058673659 4485649816 3398580433 10669113701 3640325282 460145399 188133655 38263437 10525838 21231016 14400343 382117 684420 3970897 8388937 7420156 28785401 32618490 145087960 437388717 207952119 94581602 357456624 16216644 1674104 4431907 11204280 957639 802073 918059 +0 +9279 26152 71124 66975 58297 103291 60335 21451 13716 6186 3244 4608 3795 618 827 1993 2896 2724 5365 5711 12045 20914 14421 9725 18907 4027 1294 2105 3347 979 896 958 +266668513 3691427534 15395423891 7727109635 18438751400 18369169795 1496098715 1922356437 498670390 76025345 38520614 55382555 11059903 3032946 2294403 7035791 12546931 18996261 139567595 56779285 586886672 753999916 441184194 209198221 432436195 51134093 3818838 7142961 11417811 3240013 2520203 7044858 +1 +8165 30378 62039 43952 67894 67766 19339 21922 11165 4359 3103 3721 1663 871 757 1326 1771 2179 5907 3767 12113 13729 10502 7232 10397 3575 977 1336 1689 900 794 1327 +510699404 4841596267 14772517623 7246185629 24267853064 22574962093 2117596970 393222986 231544386 46108194 21699402 10161343 6388452 4063825 1480860 4735985 10925705 16215577 18467886 50175417 117189975 905474412 419407332 217074536 403021886 19882058 775532 2591199 8345167 4105573 2208603 701925 +1 +11299 34791 60771 42562 77890 75125 23008 9915 7608 3395 2329 1594 1264 1008 608 1088 1652 2013 2148 3541 5412 15045 10239 7366 10037 2229 440 805 1444 1013 743 419 +652457100 8628642947 14757055619 8841174305 24237892960 6610685245 493631450 571414491 105514170 21432075 15554668 3558877 668914 839542 1533633 7037972 27708223 9256703 23851076 16580191 41757120 197930080 898068540 261650776 197017328 25353992 2933924 5856325 8516081 1137938 820737 476529 +1 +12771 46445 60739 47014 77842 40653 11109 11952 5136 2314 1972 943 409 458 619 1326 2632 1521 2442 2036 3231 7034 14984 8088 7018 2517 856 1210 1459 533 453 345 +434304470 9920745040 13278495955 9817093234 24275996903 8307106192 312741830 232835386 22204618 22902495 12823996 4640824 813457 300122 381160 477293 3017640 6064145 16195485 17885065 92506863 222810473 693624334 602409633 190105119 28851156 2393151 8159542 17396328 1891183 1048006 1131508 +1 +10420 49801 57616 49540 77904 45571 8842 7629 2356 2393 1790 1077 451 274 308 345 868 1231 2012 2114 4809 7463 13168 12272 6894 2685 773 1428 2085 687 512 532 +2360907925 23641099792 24190479903 8989470020 8514477146 7420507469 1770403678 154991439 77470283 57133060 18375458 10645259 8301376 511665 1171654 1838584 3423538 11181629 10284332 7209387 12047105 79752205 362456965 901653550 171681651 26149641 5558929 3296404 3567963 1280080 439503 2015916 +2 +12147 38439 38883 23703 23068 21535 10519 3112 2200 1889 1071 815 720 178 270 339 462 836 801 671 867 2232 4759 7507 3275 1278 589 454 472 282 165 355 +4960298104 22328432083 16843586488 5872508968 6210088640 2146584904 1299267949 336760434 159261111 95975655 52071342 9469394 14208987 3370166 6696095 6916563 17041752 37625449 178646601 71244711 125940835 741172861 290740088 471878356 115904649 27906931 9524357 9782136 6484974 3475139 2605818 5507266 +3 +8803 18678 16222 9579 9850 5791 4505 2293 1577 1224 902 384 471 229 323 328 516 766 1670 1055 1402 3403 2131 2715 1345 660 385 391 318 233 201 293 +4513219420 3581289186 2032493116 1245889388 7880899241 5927486252 2069957980 383788919 30756951 60584197 45678440 16577108 13948981 3196244 9749280 22907599 13314734 15141562 9734323 21475900 55257346 195892955 191813710 507652966 86369699 38652965 10923132 26823590 16256703 7986545 1208121 1088002 +3 +8397 7480 5635 4412 11096 9623 5687 2448 693 973 844 508 466 223 390 598 456 486 390 579 929 1749 1731 2816 1161 777 413 647 504 353 137 130 +5430960032 18275083952 15118657235 3412749563 1117285414 3148804700 2098188826 226058619 64045356 24328113 12833562 43246712 10106915 2396236 2500634 5144562 8400664 18862668 2806171 8606983 20298825 50751969 125447823 169402516 71788537 11866110 3912105 7332435 2977944 3408669 581674 1180421 +3 +9211 16898 15369 7302 4178 7014 5725 1879 1000 616 447 822 397 193 197 283 362 542 209 366 563 890 1400 1626 1059 430 247 338 215 230 95 135 +3877771404 27761261248 28420992863 3331593153 1913040185 4709950193 2868691697 271794680 132554333 144464631 154168886 261686856 220920674 46414061 173845813 418420153 944901890 922766313 342868780 46615762 229212299 1344441985 1587270522 957735796 375938118 101153278 388529117 661581116 109769948 39880357 46054200 21636483 +3 +7784 20827 21073 7215 5467 8578 6695 2060 1439 1502 1552 2022 1857 851 1648 2556 3842 3797 2314 853 1892 4583 4980 3868 2423 1257 2463 3215 1309 789 848 581 +5300833505 20834814914 13850962448 1873767095 2487756376 4852873681 3755876693 1919366017 472353020 1689400436 2171894156 520817680 521459944 636950352 1290266840 2313635491 2401912814 1031627313 576525002 328303210 349755481 879293011 1002416987 276910370 201658232 41836064 219476277 344242602 115666562 29492736 80824852 79723660 +3 +9100 18042 14711 5410 6234 8707 7660 5476 2716 5137 5825 2852 2854 3154 4490 6012 6126 4014 3001 2264 2337 3706 3957 2080 1775 808 1851 2319 1344 678 1123 1116 +1790362822 18781045222 22562441690 6462182676 18128068448 5722057826 337490186 211995011 57085265 911404786 1903408596 221072138 61751041 105471168 731224461 595365388 1005357000 509110744 153303799 200052245 238443672 313109719 200258089 219562319 270700173 252786165 120989451 62254235 230127733 122118288 50809354 22421756 +3 +5289 17130 18776 10048 16830 9455 2296 1820 944 3773 5453 1858 982 1283 3380 3050 3963 2820 1547 1768 1930 2211 1768 1852 2056 1987 1375 986 1896 1381 891 591 +1059610515 14181243981 14613972842 5466826627 6745692396 786391149 149453393 52667898 55887880 14781041 23641746 8487401 32135496 12305835 46806769 89831400 35767578 32578157 170740089 76209230 38636763 16857759 27269618 30040677 18816332 8955638 22591373 23437362 26077341 6967113 5358029 2881430 +2 +8138 29771 30222 18484 20533 7010 3056 1814 1869 961 1215 728 1417 877 1710 2369 1495 1427 3266 2182 1554 1026 1305 1370 1084 748 1188 1210 1276 660 578 424 +3558853838 36479353059 45775358167 11896394798 15439046486 2907001113 318857498 79573626 51766602 15361996 3775154 4620117 12891819 11474850 26971555 53566278 30501324 67669079 197406668 35979243 37906039 9377114 23684015 20312735 19246500 11544515 15672959 16358304 21213969 8866935 4931195 4922791 +2 +14914 47749 53488 27267 31063 13479 4464 2230 1798 979 485 537 897 846 1298 1829 1380 2056 3512 1499 1539 765 1216 1126 1096 849 989 1011 1151 744 555 554 +4711714844 51948308974 52389391761 25027010465 37327136308 5663131971 303650269 65583162 15838335 24131249 55589256 9873513 15351748 7550862 7329051 43135396 121948918 203703621 54327701 17188164 8026672 35595326 82722205 61526159 37550176 10318907 43646151 15113825 27191727 16184164 3858708 8551257 +2 +17160 56980 57221 39549 48300 18813 4356 2024 995 1228 1864 785 979 687 676 1642 2760 3568 1842 1036 708 1491 2273 1961 1532 803 1651 972 1303 1005 491 731 +1241353777 12139827085 10081549048 8833879570 14308666072 1944063668 101436178 63014875 14373077 3407211 6551958 3654837 265591 604374 2326243 4528277 28422415 31578180 14722669 3851178 4350076 13992486 20242710 7999421 7094511 3573494 6609526 6029691 2645998 2174236 1252377 4823878 +1 +17616 55090 50203 46994 59809 22046 5036 3969 1895 923 1280 956 257 388 762 1064 2665 2809 1918 981 1043 1870 2249 1414 1332 945 1285 1228 813 737 559 1098 +1188420328 9265686235 5906008820 11204837640 15439282728 1650704336 19184927 2445049 7813235 6732564 10064406 2742109 637041 540628 1058714 11081953 33689765 7914707 885291 1047270 1717221 3223317 6373933 2659186 3900087 1265970 1099387 1183922 1519546 534873 732169 4831497 +1 +17236 48129 38425 52926 62127 20314 2190 782 1397 1297 1586 828 399 367 514 1664 2902 1406 470 511 655 897 1262 815 987 562 524 544 616 365 428 1099 +5342412785 38722533089 21689447260 50583681666 60559102399 5420162612 196148329 78721693 35503302 28070904 36470212 4461095 684869 766881 872950 10248336 61127767 81651106 15531901 2191609 4347601 16106647 27094104 69949829 36882222 12746307 13721105 3897734 1912678 1172204 3583264 5906856 +2 +18273 49195 36818 56227 61522 18405 3501 2218 1489 1324 1509 528 207 219 233 800 1954 2259 985 370 521 1003 1301 2091 1518 892 926 493 345 270 473 607 +5976136105 45889155646 25658879364 59224163000 64547799918 5451541197 122361710 25811723 17488931 8144534 14054226 9800110 3466293 869880 2810809 8112800 20962206 23134912 21780822 3439464 3145343 5567380 19715633 32666498 13694714 4817242 8309353 3003686 3640241 949912 1543539 1762807 +2 +19326 53554 40046 60840 63515 18458 2765 1270 1045 713 937 782 465 233 419 712 1144 1202 1166 463 443 590 1110 1428 925 548 720 433 477 243 310 332 +2008415345 14286546721 8415874099 14778680869 14652446311 1120476353 18617467 2379745 2763813 4639476 2584170 713447 1243275 577744 1812721 1926717 4036292 777832 710735 598220 910191 1268443 1972572 1763893 1248038 1033367 1376615 848022 881743 393300 268536 1244621 +1 +22407 59763 45869 60784 60523 16737 2157 771 831 1077 804 422 557 380 673 694 1004 441 421 386 477 563 702 664 558 508 586 460 469 313 259 558 +11197419827 53747313469 25512828106 52498929124 43039889339 2834476380 79435791 47620341 17388999 10313988 9217184 2076347 2364366 1145721 744706 1928110 12788648 11946948 4706399 1281115 850483 1569666 6871462 4022532 3507189 1789056 2310075 4018858 3929235 1385139 473221 925427 +2 +26454 57958 39931 57281 51865 13310 2228 1725 1042 803 759 360 384 267 215 347 894 864 542 283 230 313 655 501 468 334 380 501 495 294 172 240 +9674828116 40853226428 18099637010 31372683749 21663365062 1421139170 5982313 13535293 12142146 14815410 4509014 2959404 984268 1101467 2330853 10107173 16826751 2462645 903122 1444686 1681280 1016679 2136031 4944177 1982343 1835731 4198975 6033203 4819728 1897796 632957 444856 +2 +24590 50530 33633 44280 36796 9424 611 919 871 962 530 430 248 262 381 794 1025 392 237 300 324 252 365 556 352 338 512 614 548 344 199 166 +6959596373 24875102982 9693726808 7941729113 4020383447 237333518 24200982 19862103 5794857 9243781 5800712 1465016 528385 1003545 1982403 849579 5415628 4060057 745476 1182047 740350 462474 765759 938646 657592 1239116 1277482 2327988 3113696 621882 116217 435541 +2 +20856 39429 24614 22279 15851 3851 1229 1114 601 760 602 302 181 250 352 230 581 503 215 271 215 170 218 242 202 278 282 381 441 197 85 165 +11213498042 35346955631 15380141567 7629367028 958188131 109069287 70701726 15204656 9379514 5251685 26542446 7720732 5143634 390902 797326 3200745 4910921 11500298 9574115 6990819 1209812 1665187 2313311 2639143 1752545 3939236 5994852 7033823 5485706 1106207 385658 665411 +3 +13236 23501 15502 10918 3869 1305 1051 487 382 286 644 347 283 78 111 223 277 423 386 330 137 161 190 203 165 248 306 331 292 131 77 102 +4151908471 25783138501 20963722347 53261047487 24237659227 1059109658 197300754 293161910 110023109 28198999 31384590 15394241 33201497 11114967 18623849 30687969 13726223 41541520 20886489 12677903 7031868 5290971 2124916 4335075 6243582 14430042 6814970 19823481 14622024 5161140 4419940 3457317 +4 +4027 10035 9049 14424 9730 2034 877 1070 655 331 350 245 360 208 269 346 231 402 285 222 165 143 91 130 156 237 163 278 239 142 131 116 +1960807838 11698992199 17602050409 67057545556 59626149119 3824877938 957576562 360136631 87659551 25660229 57599760 93924666 39003744 28963610 9529702 61295249 55207502 26039995 15225624 10238539 12016218 11253646 22026464 18887085 24143240 6981991 1364725 6932771 18750506 9211890 4983732 5015579 +4 +2767 6760 8292 16184 15261 3865 1934 1186 585 316 474 605 390 336 192 489 464 318 243 200 216 209 293 271 307 165 73 164 270 189 139 140 +2635049436 9872003780 14558086781 24612304388 43029241942 6433347833 1958473825 285370348 53967072 14007214 9471104 25132244 25606158 8860906 3260197 6803138 12251186 21471749 7927187 2064678 870657 794234 2522193 5478489 8601980 3598645 680058 2417220 4073574 2682617 5959231 5245109 +3 +6416 12419 15082 19610 25929 10026 5531 2111 918 467 384 626 632 372 225 326 437 579 352 179 116 111 198 292 366 237 103 194 252 204 305 286 +2651651221 17150675800 14469965119 13636585471 33273500830 7206414998 1758370222 285864778 84753792 24333498 23961510 20573463 63212579 34844077 1066985 2685442 3977777 5743617 1972195 1136986 95561 165208 559069 3492449 5317475 2066435 597067 1420664 514618 345402 5189385 3344122 +2 +12873 32740 30072 29194 45602 21222 10483 4227 2301 1233 1223 1134 1987 1475 258 409 498 599 351 266 77 101 187 467 576 359 193 298 179 147 569 457 +3531159376 25147647472 18776052491 20490195192 44600945644 9393331486 5228126307 1236145833 349548411 52192002 24649124 51827336 125620612 41485682 1035161 1495022 1496644 3327860 3062154 1043210 1024827 537686 645819 2381446 3831722 883720 976694 2360246 3866652 1213036 1380602 799619 +2 +14856 39645 34256 35786 52797 24229 18076 8789 4674 1806 1241 1799 2802 1610 254 305 305 456 437 255 253 183 201 385 489 235 247 384 491 275 293 223 +2689356138 17854390463 13572125613 12639287821 25812165685 7838896784 10630986023 2307766001 466730835 212533312 150069330 116881346 204978663 105309915 13685491 4161676 1224780 1492001 2054219 1033583 989467 855353 566092 3107627 8700039 893998 699239 2956926 5916558 440566 277126 282354 +2 +12964 33405 29124 28106 40165 22134 25776 12009 5401 3644 3062 2702 3579 2565 924 510 276 305 358 254 248 231 188 440 737 236 209 430 608 166 131 132 +2468034019 14392249973 10510746232 7950834690 13720016445 8066648095 17049012169 3138399205 492402616 453731910 329673648 258917680 294714954 86804938 10432947 6893265 3818678 2378920 1963173 2587595 1280722 793862 711593 3720123 8782004 1298230 214480 4398132 4553477 1090233 217234 161015 +2 +12419 29992 25630 22291 29283 22453 32643 14005 5547 5325 4539 4022 4291 2329 807 656 488 385 350 402 283 222 211 482 740 284 115 524 533 261 116 100 +1930925128 11394149803 9496996200 8425927697 14003531534 8302245381 20903880642 4255067778 568586568 598848990 674150846 462004378 298733708 89528547 3427480 6791855 6494664 2173093 840611 1352961 1410280 560724 461019 2024137 9281243 5551951 1041627 2738005 14367963 9542068 804478 117035 +2 +10985 26685 24363 22948 29584 22779 36145 16307 5961 6117 6491 5373 4321 2365 462 651 637 368 229 290 297 187 169 355 761 589 255 413 947 772 224 85 +1328525791 8226140593 8573256374 5116463892 14132161120 8716741414 23295290048 5806712331 732551143 505886447 899225022 950209192 287283788 38740489 2929819 4581562 5147668 2147565 1584818 755456 365294 487850 304956 682218 4240222 7671975 500181 4526657 12917465 4330371 407572 219544 +2 +9112 22674 23148 17882 29719 23340 38157 19050 6766 5623 7496 7706 4237 1556 428 535 567 366 314 217 151 174 138 206 514 692 176 532 898 520 159 117 +1133385167 4253111713 9489616933 5090645284 15469399439 9376440319 22396737262 9193806184 977883467 798546547 1770828072 1354863630 346400303 29244433 5390761 10528631 4395479 3634108 1677060 1456693 261229 230404 423529 413822 2954817 5040869 1500295 10466684 65843411 40471437 1686611 134288 +2 +8416 16304 24353 17837 31094 24208 37413 23971 7817 7064 10520 9202 4653 1352 580 811 524 476 323 301 127 120 162 160 429 561 306 808 2028 1590 324 91 +1593375707 4272592169 4705338930 3811646116 6072988369 6958443426 18116419114 3684648771 391777920 466306181 989195668 730393632 271793383 39364748 5233658 5991419 4235205 2102618 1997891 584958 312464 514036 938636 195610 781691 1271603 753909 4451140 33391608 36348065 1679905 176958 +2 +9979 16341 17148 15434 19482 20854 33649 15175 4948 5398 7862 6756 4121 1568 572 612 514 362 353 191 139 179 242 110 221 282 217 527 1444 1507 324 105 +3381297269 10008656032 19923779456 6206647179 4466848589 6500315031 14744835027 3219672750 638796493 4758309065 5339256144 1665197727 1231201914 111731960 41839582 58386066 82285111 124761447 41117766 8919613 7426713 2276132 4053240 1527105 9156485 9899293 1564027 6679465 30990656 86485180 5837215 546026 +3 +7268 12505 17644 9847 8354 10078 15178 7092 3159 8622 9133 5100 4386 1321 808 955 1133 1396 801 373 340 188 251 154 378 393 156 323 695 1162 302 92 +5130690995 11182565016 14999878302 6703599030 13175402422 6478155605 15251450915 4837860078 1232529311 1375696577 4224259282 784177292 980731963 290238278 29442347 26859042 20425443 16041636 6029955 7143348 7646081 4070675 3652879 2583137 962290 19163541 6146589 3022459 65784087 69675267 12493461 2109656 +4 +4476 6609 7654 5117 7174 5030 7718 4347 2194 2318 4062 1750 1957 1064 339 323 282 250 153 167 172 126 119 100 61 273 154 108 506 521 220 90 +1206280417 4026777121 11415006775 5352602812 7135269319 6487994549 12651556535 3944350238 768922737 695447059 1851156050 420238201 376422141 129610728 72826935 90101262 452451467 1435602187 1149121230 706095212 250041616 49622678 6788180 3636234 3492206 19866375 15385766 3844897 19095518 22943392 7301731 3381152 +4 +2170 3966 6677 4572 5279 5034 7029 3925 1733 1648 2689 1281 1212 711 533 593 1329 2368 2118 1660 988 440 162 119 116 278 245 122 273 299 168 114 +1507933073 15665494525 13174712205 18167877044 17486286535 6345579788 11717249627 8891291597 731261308 1412151862 4967685464 1233210514 605941997 205483119 114992118 133464785 443761197 795697922 241067272 179661236 90409633 23966124 9456428 3758017 16699850 26363574 55250004 28897717 91735802 39373499 13867324 4389478 +5 +1213 3911 3586 4212 4132 2489 3382 2946 845 1174 2202 1097 769 447 335 361 658 881 485 418 297 153 96 60 127 160 232 168 299 196 116 65 +1184897717 9489519590 13161607443 3940896631 964142326 475393282 1721182582 761078663 191678384 213333567 667740806 1418962511 860135688 30847144 52034993 75139011 187985265 693983281 439025812 215563048 50720960 35836293 32903588 10828782 26542270 46513943 99317166 86242895 22280320 25839724 8423881 3936547 +5 +1075 3044 3585 1961 970 681 1296 862 432 456 807 1177 916 173 225 270 428 823 654 458 222 187 179 102 161 213 311 290 147 158 90 62 +8799836771 17466212413 32665440123 24403786688 8503285165 13175755526 4040900839 916525418 3287256116 4440195764 1148126538 1808176080 1277219974 131040940 71721774 54670601 331957739 727965377 167476152 140134932 106561361 57215247 63617300 37784561 58234141 108326095 85013850 40846623 42192265 30598885 21178158 11369907 +6 +1465 2065 2824 2440 1440 1793 993 473 895 1041 529 664 558 178 132 115 284 421 202 184 161 118 124 96 119 162 144 99 101 86 71 52 +4857807625 4684720568 19617056396 31803867236 4955030952 9010553928 16402536385 9283905906 1613912232 3863957616 2721204994 414517697 373669558 54762803 77511915 285341973 1127283203 272477508 474958707 147473374 22204190 18946427 28675182 32227063 58805087 29323757 104031048 108459580 104433090 57221973 53538137 15311093 +6 +1089 1069 2188 2786 1099 1483 2001 1505 627 971 815 318 302 115 137 263 524 257 340 189 73 68 83 88 119 84 159 162 159 118 114 61 +7330480156 16609548476 13374449909 21888903641 28472566201 10681371317 21811217525 7934125741 414589425 651486731 1165589522 505022475 282410707 159844879 43103177 542021657 334135918 91548305 104735383 48122773 41171550 17496248 32535061 39651417 42812099 102651127 111705587 71170133 113984641 62087517 47392465 21397867 +6 +1337 2013 1807 2311 2636 1614 2307 1391 318 398 533 351 262 197 102 363 285 149 159 108 100 65 89 98 102 158 165 131 166 123 107 72 +625702055 8504774921 12980538424 2689966485 1703751633 2102469141 3774778781 832820790 37537968 165873540 206806770 27484038 57626150 21396616 3149708 26221776 15662539 4994582 37202246 56286514 27550069 7752774 23610114 22600491 21012791 15007870 32234541 29274214 15314955 10375862 13971888 9079596 +5 +781 2881 3560 1620 1289 1432 1919 901 191 402 449 163 237 144 55 160 123 69 190 234 164 87 151 148 143 121 177 169 122 100 116 94 +7942449073 12549071554 37862431503 22111769861 3321796076 3397947899 9696101223 2370351364 1329201747 1392781535 320445636 102043833 171236102 70907865 46505081 140095821 156987770 87062258 29289267 60491731 92476199 61315586 33280233 58752054 109291434 63859244 35969405 57265624 36095817 81099274 31695923 24573369 +6 +1392 1750 3040 2323 900 910 1538 760 569 583 279 157 204 131 106 184 195 145 84 121 150 122 90 119 163 124 93 118 93 140 87 77 +7177065731 27296218233 47594933532 12201429684 7508406139 7787942516 17147770595 3344923997 640996045 588235407 715176244 295290820 232007772 252139481 36244711 221315637 285268621 226591117 295250070 227033690 94050213 75985131 234440956 194101174 357316577 322585390 188272947 166223568 121985693 210343986 53015728 62797518 +7 +661 1290 1704 862 676 689 1023 451 197 189 208 134 119 124 47 116 131 117 134 117 75 68 119 108 147 140 107 100 86 113 56 61 +2337773680 7309566459 14801657761 7176957560 805387261 528593059 995158038 394826627 212307176 262055828 220277966 54044754 35002809 33866875 23296997 58449045 55407599 55507554 126956238 53378795 68657945 71688372 38021275 60553511 37553778 32386758 7812694 41946056 55645055 14394219 47986037 32390008 +6 +755 1335 1900 1323 443 359 492 310 227 252 231 114 92 90 75 119 116 116 176 114 129 132 96 121 95 88 43 101 116 59 108 88 +6046596233 25253943382 21742296418 21572249812 7935159819 2807538510 4463643538 2371843820 477267601 1347163060 919215284 421756089 487698103 297825307 134980049 196728935 235049753 309552333 77540179 165912218 109102241 94745557 103497585 246149174 248478488 117665019 180592448 154574135 109365391 129086298 148305794 126572548 +7 +607 1241 1151 1147 695 413 521 380 170 286 236 160 172 134 90 109 119 137 68 100 81 76 79 122 123 84 104 97 81 88 95 87 +2482510738 3438291754 16013403574 25549731472 5581583516 490118426 565096383 545958421 173219808 451581527 217609846 82442676 10837682 25359092 51647568 171316459 191286047 61242489 77041846 94361758 73210822 73273377 63869368 58136238 100905104 45909251 33836093 211808645 135636783 97216943 45813997 40036664 +6 +778 916 1977 2497 1167 345 371 365 205 332 230 141 51 78 112 204 216 122 137 151 133 133 124 119 156 105 90 227 181 154 105 98 +6005577148 10361578336 5499850377 18824686244 23839289913 6194273642 1140881685 1220742485 386003454 392065145 564659148 1469245201 189397527 31994341 103827053 340931677 234547962 140085192 149629184 212309949 329765153 614939006 1425759264 1402856201 2886912421 1097431041 239235379 532403289 597181906 308022360 162363562 134388866 +7 +605 795 579 1071 1206 614 263 272 153 154 185 299 107 44 79 144 119 92 95 113 141 193 294 292 419 258 120 180 190 137 99 90 +4976124203 17103236797 8886612270 972614885 688432390 125681039 269886822 728523694 328765789 699247293 1964140608 983888132 161906359 50668386 44643549 30865144 75440878 63495672 22927173 49906428 121756205 89913669 144554167 128146397 242608495 229381232 304290428 520416520 401708572 251849615 245014203 48888072 +6 +1102 2043 1472 487 409 175 256 421 283 413 692 490 198 111 104 86 135 124 74 110 172 148 187 176 243 236 272 356 313 247 244 109 +6090467809 20155652713 16794708544 8636815204 6987319465 939720016 606870074 1304660187 737907949 2365890851 3863588022 1304580576 976860358 228676104 189562733 284175486 355739621 200192212 137950714 504845896 680360534 347997407 859665822 507802247 540486367 463426695 883873959 398332294 538245923 905454393 2553728946 771749081 +7 +609 1109 1012 726 653 239 192 282 212 380 485 282 244 118 107 131 147 110 91 175 203 145 229 176 181 168 232 155 181 235 394 217 +3224016691 4338379168 12976914061 20894584783 5034741646 1133599883 648001229 502753565 1253051496 1702828293 4085184714 1163694552 202926742 105583933 346290304 440669922 979093135 584597296 211004197 425233986 117367561 164643633 613232986 299306589 490263396 602047556 1614927769 743901983 736697030 1072714158 1590236639 129053730 +7 +443 514 889 1129 554 263 198 175 276 322 499 266 111 80 145 164 244 188 113 161 84 100 193 135 172 191 313 213 212 255 311 88 +1097476178 915709831 845753022 4769534864 8298187396 3063024599 718278018 279312287 787638110 418343968 2354346912 792335948 96968259 50105885 144381988 565074570 118376217 205787733 205129540 133248688 260822608 219911270 468065353 276707757 230721104 274481966 385569749 222564925 681777581 671036064 364396821 93738895 +7 +258 236 227 539 711 432 209 130 219 159 379 219 76 55 93 185 85 112 111 90 126 115 169 129 118 129 153 116 203 202 149 75 +4157140845 2613794305 4727677279 5266829776 7424920178 5789172423 1898804582 1268984326 589508951 284955983 330923704 652302163 117550766 71398485 47435034 192953592 149161984 37318450 352312438 682325331 291320265 96148797 293130744 328654162 209549663 211642083 327489326 178558175 766989134 843444145 208471186 70954403 +7 +503 399 537 566 673 594 340 278 189 131 142 199 84 66 53 108 95 47 146 204 133 76 133 141 113 113 141 104 216 226 112 65 +7180286108 2837910580 4805969914 6663145588 692560277 633585785 1288025150 782157731 4189382490 3312477086 958640458 1070047065 191834520 110954995 347219876 127320436 74010293 134611189 251041074 517901069 373619273 346336747 309180068 973624603 406885805 444251827 334254365 409321913 282700762 478584273 384632376 199537764 +7 +662 416 541 637 205 196 280 218 505 449 241 255 108 82 145 88 67 90 123 177 151 145 137 243 157 164 142 158 131 170 153 110 +3549752231 1081587661 2098594318 15670333735 13125036779 2212510201 668787910 799489594 2728346648 2295605347 1496766054 1019414618 350942674 121873407 341801137 545001242 382478865 397069631 417857927 361768822 317470503 130730389 284446221 477789783 266571294 486338168 433798147 131472826 231804649 215843507 191568207 245515551 +7 +465 256 357 977 895 367 202 220 408 374 302 249 146 86 144 182 152 155 159 148 139 89 131 170 127 172 162 89 118 114 108 122 +1100895391 5944158185 20634198057 31279035181 5532838495 1211751434 185620946 218129583 278239137 438555499 472347464 49821426 37765394 32961789 36121470 99356229 107249759 41443901 150329006 156190804 76305527 13528800 19747772 80944325 59395556 38588595 59382503 92873214 60702491 59337446 51312578 39023889 +6 +518 1204 2244 2763 1162 543 212 230 260 327 339 110 96 89 93 155 161 100 191 195 136 57 69 140 120 97 120 150 121 120 111 97 +4410573765 12774598301 41832160677 44700663352 4576701622 2082746085 429967465 197563025 141399631 109501830 78255428 121860269 105534550 11302932 16973718 40400621 107411346 162682307 97826762 89032476 78784929 77709024 59426154 19965316 74563247 131305094 47049398 66518581 117846439 170567788 35402405 12946196 +6 +1037 1766 3195 3303 1057 713 324 219 185 163 138 172 160 52 64 99 161 199 154 147 138 137 120 69 134 179 107 127 169 204 92 56 +2894552609 3797055728 11070337016 44830023179 9421062938 919576239 2136647181 971272589 885414244 579265818 215974286 277805290 496340961 184461986 73794839 510567707 352635930 304997437 990823923 206599052 856982848 658699988 232404631 156340838 418513647 131715390 125235512 160984806 125341342 184518063 235497079 227972411 +7 +420 481 822 1654 758 236 361 243 232 188 114 130 174 106 67 176 146 136 245 112 228 200 119 97 159 89 87 99 87 106 119 117 +2696561632 6478771567 18237271645 9249167455 13258896558 5487310594 2927509458 1023239367 538600627 584901084 696905960 447012548 54857462 18338811 432082218 1686129018 366641126 156393143 512191027 344969883 115854742 173670715 179416736 110651092 181075210 171004213 331721282 316314710 208810463 532662227 243854811 196314357 +7 +405 628 1055 751 899 578 422 249 181 188 206 165 57 33 162 320 149 97 176 145 84 102 104 82 105 102 142 138 112 180 122 109 +325785899 2972654488 6067265796 9334371356 5637012374 984307289 458104498 335088999 76713832 50761382 262959506 105377715 131984032 159794061 181939775 177845178 41791465 35482416 42788660 156130189 48261205 38297859 56378337 62910556 31262894 30750286 39552841 47077650 36096826 49911884 74330384 43967527 +6 +282 851 1217 1509 1173 490 334 286 136 111 253 160 179 197 210 208 101 93 102 195 108 96 117 123 87 86 98 107 93 110 134 103 +10990759093 12003943325 42266037627 37612699642 4202785560 1862249317 2302618798 370490871 847458520 2504998573 918153876 744446242 396277496 330378407 518987036 245698150 264961315 293261687 345452014 442180495 506957636 87733807 52827940 199969695 396467384 174260642 71157899 153940723 181696593 384841219 165598855 94858503 +7 +819 855 1606 1515 506 337 374 150 227 391 236 213 155 142 177 122 127 133 145 164 175 73 56 110 155 103 65 96 105 153 100 76 +8287008349 28381126978 34695907392 16789000355 13437895853 2788548039 607776570 312623176 651528790 2640181090 2139524770 135776980 29481891 55807068 467957774 433989861 161592354 364352505 413919453 218426838 154740493 156825765 176803172 97899529 165406399 184670220 259085635 365699794 354694505 138039161 121835372 103399287 +7 +711 1316 1455 1012 905 412 192 138 199 401 361 91 42 58 169 162 99 149 158 115 97 97 103 77 100 106 125 149 147 91 86 79 +4911003947 8763526232 15394600326 7189649512 756198362 243463683 343585242 95431383 52850061 33645734 114133620 104520383 28547201 20236364 72365414 221040839 139421779 94663512 24179563 17080685 32425849 42222208 22805983 20806474 15004565 34699445 25032637 39707359 78535598 35508375 28240865 18823022 +6 +1094 1462 1938 1324 429 243 289 152 113 90 166 159 83 70 132 232 184 152 76 64 88 101 74 71 60 92 78 98 138 93 83 67 +1378411220 6974660084 9598764672 4186331074 1725406180 684780427 2169545646 1424156315 484405791 914588364 717986872 286024031 202642971 107504739 166271847 138646137 222384425 358232022 159562359 433089923 190166981 166600545 76685655 96984659 90062497 199844955 324996331 286756381 255877843 100836359 68013968 64490643 +7 +290 652 765 505 324 204 363 294 171 236 209 132 111 81 100 91 116 147 98 162 107 100 68 76 74 110 140 132 124 78 64 62 +6749167969 5310966082 625348962 2011130207 3103917658 1833241905 473889147 1147073253 789219547 132072786 439624698 338110434 61639154 30999599 160385005 424786885 317599916 137921697 425942590 486475842 40957736 68557134 141457430 89658927 280279769 399481450 251894730 354942073 352755460 181417864 93692656 86388788 +7 +641 569 195 350 435 334 170 264 219 89 163 143 61 43 98 161 139 91 161 172 50 64 92 73 130 156 123 147 146 105 75 72 +2228758013 3860922313 12958321825 42569535724 18753064984 3033056480 465171772 1621218248 1366368893 479180367 199993446 164284814 124447333 130884997 166063678 249608692 268469255 176808652 241549506 453941159 209476733 129777923 308329916 161704970 186483300 240014684 146700237 87424784 247414711 239903892 163911599 89934460 +7 +368 485 889 1611 1069 430 168 314 288 171 110 100 87 89 100 123 128 103 121 166 113 89 137 99 106 121 94 73 122 121 100 74 +981921049 1839318665 5986324757 16937531804 5370394950 390524988 54784028 174597656 247498023 44105349 32766124 85536044 34912885 43770492 97878196 60989880 86880344 47567967 52628401 81227357 27662193 38248479 89976008 83498668 82354097 84771869 61738901 59775987 27305025 40147973 29121503 18911904 +6 +489 670 1208 2033 1145 308 115 206 245 103 89 144 92 103 154 122 145 107 113 140 82 96 148 142 141 143 122 120 81 99 84 67 +4813127041 12566699389 13082844505 8427270364 484645604 1504024072 2547359144 2673686483 648288796 357668585 907410788 1024591769 314612570 182934563 599268946 378911578 561754031 366772503 928286985 469398985 59436565 379515144 561681954 135133180 264077498 1209598503 474263768 468533054 246433937 261913996 109527904 61620491 +7 +542 875 893 717 171 302 394 403 198 147 235 250 138 105 191 152 185 149 238 169 60 152 185 90 126 271 170 169 122 126 81 61 +1723000379 8692041694 10853116140 22752064976 8914343954 1503676193 2359816603 809163005 855538770 703174981 433704754 124524739 110495301 220092074 660779188 179784049 70522154 99030830 161603184 204224364 113447459 231388479 132881881 138814588 294283155 971842590 183845225 459888531 399329293 271372669 157609500 84971990 +7 +324 728 813 1178 737 302 379 222 228 207 162 87 82 115 200 104 65 77 99 111 83 118 90 92 134 243 105 167 156 128 98 72 +5955798956 13011487852 16747084566 40355942156 11218700845 3274420136 1875658878 795480823 350443640 849401228 378564042 300207510 199351197 45863545 85186913 346101031 492575079 134782785 93329725 104266023 226234829 646316968 585231526 186119288 548549905 882535759 379305301 357415627 254453355 225306409 61600250 32835119 +7 +602 891 1011 1569 827 447 338 220 146 227 152 135 110 52 72 145 173 90 75 79 117 198 189 106 182 232 152 147 124 117 61 44 +11838552610 22156984835 14878391780 12647758850 3556377710 4060740550 2633194175 3236337077 900648528 494806198 336715032 464102756 219720549 138303264 53837886 58682030 82108336 129712051 272164505 174576002 200090207 520329331 541018536 170864530 246263075 260575469 289063917 119565437 255126675 369452892 63912690 47084642 +7 +850 1162 952 878 465 497 400 444 234 173 143 168 115 91 57 59 70 88 128 103 110 178 181 102 122 126 132 85 124 150 62 53 +7418852477 9841116714 8646445852 4740775472 2076656057 1820878906 2458052915 715842561 756044873 743352930 694224940 113629248 148593534 126128948 119001275 171717464 121341830 232640909 394904848 550424478 456127913 409460680 359755392 189997607 200565690 188135115 298011472 154573244 371304148 773153329 661894937 487793218 +7 +672 775 726 537 356 333 387 209 214 213 205 83 95 87 85 102 86 119 155 183 166 158 148 107 110 107 134 97 150 217 200 172 +7928047846 10514725198 23134328842 35679202060 31108429793 7651737428 2201803923 3158697181 1540626881 2900683119 1372586694 731920790 298172871 148676784 295422333 990824747 1235861224 304410678 338340361 891490880 886560293 855966951 455502558 252757660 908360053 732981696 284796896 775448158 769196362 1512979353 1164291045 482140784 +8 +347 400 594 737 688 341 183 219 153 210 144 105 67 47 67 122 137 68 71 116 116 114 83 62 117 105 65 108 108 151 133 85 +23623554631 35394494905 14210622249 1539047576 5700295980 2476863507 1209012726 2147737903 402298794 102639732 593830974 248993803 141217279 12980440 236394340 578069988 130111427 76563384 153297318 392535717 224287771 282915574 439071281 77078682 252028480 166151812 195283824 144397132 153003714 293391894 220106443 162080020 +7 +1200 1469 931 306 589 388 271 362 156 79 190 123 92 28 120 187 89 68 96 154 117 131 163 68 124 100 109 93 96 133 115 99 +1252719074 14398893403 10859527460 13061193324 13240512230 2017557642 135854050 335685158 267622253 392171933 1215484130 233473405 564412739 222547375 231821509 87021685 318629949 580571438 219511774 412018020 147011999 220643777 203455138 89939285 154581398 189620851 202916053 151894800 292746372 130303163 99560020 87839523 +7 +276 937 814 892 898 350 91 143 127 154 272 119 185 116 118 72 139 188 115 158 94 116 111 74 97 107 111 96 133 89 77 73 +8140037620 5823570578 6890255178 42057838819 18794511313 1274369067 392094795 310414680 558606828 1571754616 1607541006 421514817 548857322 137486958 131860091 480050694 294248226 134600860 81883582 189721940 90830919 109094917 56525695 101515509 190477468 141464979 275775163 228652878 320289931 149924776 207833913 123357813 +7 +704 596 648 1602 1071 278 154 137 184 309 313 160 183 91 89 171 134 90 70 107 74 81 58 78 107 92 129 118 139 95 112 86 +1815637874 3568197906 5166037614 13304675121 8912509802 2301628493 265287052 32813507 29952031 375597467 265789246 31561726 29903417 69841397 35983956 169608506 39637086 27584505 96971589 210476099 139632021 233024928 32581846 41560165 62014558 97974207 37350504 68202916 118092531 91633746 42079014 44716264 +6 +665 933 1123 1802 1475 749 254 89 85 302 254 87 85 130 93 203 98 82 153 226 184 238 89 100 123 154 95 129 169 149 101 104 +679549357 6337421806 14084578207 15472174351 3674276710 1626583953 3035532487 3298124769 566591630 696815515 589970900 85196334 154178937 189888941 211903465 141773909 193961282 169966663 181384300 477779183 252552003 419142260 235109221 295002447 489444336 400512313 179675619 229045914 190122325 71261613 81524660 52752179 +7 +203 621 927 971 473 315 430 448 185 206 189 72 97 107 113 93 108 101 105 170 124 159 119 134 172 156 104 118 107 65 70 56 +6690001121 13819641770 17821382115 15052593019 7993930918 4237686504 5792575305 6170956221 1583154888 804851022 1317521490 2799342211 1129755519 730293398 2170310099 2523714349 1547582881 2105923327 1085263785 1374868672 735135906 849156531 891953600 647572760 1777103546 4637709535 1880820792 772903882 686462170 420434578 139411965 216279535 +8 +319 459 521 479 349 254 297 306 155 110 141 206 131 105 181 196 153 179 128 144 105 113 116 99 164 266 169 108 102 80 46 57 +2576846530 7887106348 28590018627 20259554678 1475143403 1321388518 526756454 148376018 85610426 982241714 816609030 242390414 92548948 91539878 150145675 93874171 55265478 69845679 110407413 247911019 298851808 199202026 405268852 82416156 191720992 323411760 303399863 360609094 362141232 126549461 102223705 68019463 +7 +396 693 1320 1112 300 283 179 95 72 244 223 121 75 74 95 75 58 65 82 123 135 110 157 70 108 140 136 148 148 87 78 64 +3104297449 15424324525 16041853687 5081039884 1925059696 3015870954 1837919511 417683449 153565466 619680784 623833410 140973309 212287710 169246675 238904272 603095298 332614046 86200723 198568905 229155020 316020800 119910581 105160177 66207829 232660094 321630478 148565277 244482037 224465013 200539505 201191098 28990018 +7 +435 970 989 556 342 429 334 159 96 194 195 92 113 101 120 191 142 72 110 118 138 85 80 63 119 140 95 122 117 110 110 42 +13395391949 10226868149 10021641587 17258278094 9488778299 3034874960 2619684142 912831554 461552070 365750733 362883574 123507344 227194881 191310363 167188786 1468932321 1024710335 127877534 271661823 481507642 63021840 56963550 46330541 92130609 310777888 204309345 161588975 98490936 306449383 143066969 139612668 88350626 +7 +904 790 782 1026 761 430 399 236 167 149 148 86 117 108 101 299 250 88 128 171 62 58 53 74 137 111 99 77 136 93 92 73 +5511067876 11964126519 5165671177 6379383989 12612646756 4591002424 2215408965 4557488349 3220687086 722144762 1597221408 1973353097 695914636 1813036564 2608078656 1561284647 1651988109 732998875 1346887344 1397611074 1031393610 341035614 625553176 481900370 399485779 1153064652 477281099 456673035 1626740414 1044344727 419127730 208348104 +8 +289 427 280 311 438 264 183 263 221 104 156 173 103 166 199 154 158 105 143 146 125 72 97 85 78 132 85 83 157 126 79 56 +4245510791 2911364353 1817951281 3240079600 1960884004 4224929249 9393275262 7078446278 1604866995 3363966833 2369526478 678503798 1401714121 405013220 567057034 533074256 406335772 1720259922 723708675 804565052 380546833 373741765 330089746 647287167 1433271445 2249083105 625932431 494033574 650545524 430181062 481972882 436360489 +8 +254 210 166 222 172 253 378 328 156 226 190 101 146 78 93 90 78 162 105 110 76 75 70 99 147 185 97 86 99 81 85 81 +15837441009 21566240869 19034108281 4396065580 7311081053 5853188690 3565186420 4912226756 834113530 1468120149 1309321896 760213287 349693107 561354554 1383019501 1055481694 1323427950 547426640 533256372 1195722014 1140699492 473574750 51822514 399148385 1051651936 822357546 731652471 421976218 2511470853 1910720663 289675973 183460534 +8 +491 573 538 258 334 298 233 273 112 149 141 107 73 92 145 126 142 91 90 135 131 85 28 78 126 112 105 80 195 170 66 52 +6991516564 26536810676 10241189823 6648423391 4781035548 1165600805 362196314 552572883 150283520 636935013 426124828 209705222 75465686 172375881 314739611 53009594 176027830 164570405 57387671 191428320 83954446 232355962 496243174 140564769 138010160 175709611 95524615 109665385 249133993 96933615 81765835 68175275 +7 +653 1272 790 637 540 266 148 183 95 197 161 113 67 102 138 56 103 100 59 108 71 119 174 92 91 103 76 81 123 76 70 64 +3973772185 23832149477 29242277876 9286521854 2469973759 1404243484 929912962 2509864028 1578640392 866309862 981761046 161403460 48533456 139368762 279478811 352157635 388221921 201220879 437065939 547789365 117623820 116492343 247752466 531476687 825205459 255474844 200465233 124280351 233825955 113745496 131328950 85255791 +7 +492 1206 1335 752 388 292 238 391 310 229 244 99 54 92 130 146 153 110 163 182 84 84 122 180 224 124 110 87 119 83 89 72 +1800047747 9718070402 29359056370 12395903961 4766794892 792553273 504749245 653093277 496990989 1318353341 1341130282 465051857 380015840 158772520 249779474 202245705 59025271 124720402 77848585 352377572 323251868 203443632 91429727 100783885 402239094 560877295 263946069 114086007 129673752 176203486 84561493 97937251 +7 +331 770 1338 869 539 219 175 199 174 283 286 168 152 98 123 111 60 87 68 146 140 111 74 78 156 185 126 83 88 103 71 77 +13402136927 48709925606 21007888885 18328565424 6561402118 4716959753 1266761327 970492734 686687816 1070863377 1705636192 183156179 43429471 206120034 73900592 670786295 449233059 60188238 152700208 171596362 323884691 168642878 111671088 134338999 146821033 129244189 114201224 94745153 316889572 299713247 129524976 120777699 +7 +904 1724 1132 1057 632 536 278 243 204 255 322 105 51 112 67 202 165 60 96 102 140 101 82 90 94 88 83 76 139 135 88 85 +8515819862 14540741423 36370976979 22895019916 5783948057 1429973621 947504119 945703100 1044158335 2312106327 584813150 249467236 66850260 67985421 102639420 741690894 299005196 153894601 25276178 134107247 122617892 129071743 105794167 135314407 173076680 159820686 124146574 71055562 206248309 198451358 95266876 187817401 +7 +720 942 1489 1182 594 295 240 240 252 375 188 123 63 64 79 212 135 96 39 90 86 88 80 90 102 98 87 65 112 110 76 107 +7952341567 18699651433 6407428608 5286897917 12882655656 6860628001 3935796828 1583823195 1241106085 5530728622 2544684008 236455785 232020003 300962187 337663281 263258899 1391711140 1289186636 1575390634 1018595248 1173824760 689757438 1402138169 882143674 569554204 341740699 492750798 986031996 1465790708 996319288 166286506 197567807 +8 +348 534 312 284 443 323 245 155 137 290 197 60 59 67 71 63 145 140 155 124 133 102 146 116 93 72 86 122 149 123 50 54 +4298089100 9781258002 21778093844 21183521812 7286073417 1298309836 358888939 164751882 69292545 678522232 1497486092 216862219 43780399 71713908 162851148 323325669 232708549 104643346 272595917 588707153 238758774 122915021 249006695 195893186 326774777 246569211 137556615 299235358 349218187 245990662 257059234 124676392 +7 +512 772 1152 1137 666 281 148 100 65 203 302 115 51 66 99 140 119 79 128 189 120 86 123 109 141 122 91 135 145 122 125 87 +1695481222 3539400635 4906576856 1071284597 1989301906 1035261490 628215813 725394761 655757131 2100035882 1591483374 99973002 58239475 52926950 78621570 891923191 1089873680 84544173 113100295 582127301 272099009 208699166 263583472 281747152 416193689 206480599 458784717 181142870 98525525 177207041 234923919 109191855 +7 +321 464 547 255 348 251 195 210 200 358 311 78 59 56 69 233 257 71 83 188 128 112 126 131 159 112 167 105 77 104 119 81 +8510117155 13884387566 15794051016 21913858747 10968655927 1303076227 448448334 964848503 992064497 2357517463 1662721684 374439955 186519371 62314872 191854669 135480078 54909501 97244482 99910699 191353961 246353442 137257224 189199967 126966954 96601459 107821342 374125964 170153263 188516188 130001722 61100510 141178549 +7 +720 920 981 1156 818 282 165 242 246 379 318 151 106 61 108 90 57 77 78 108 122 91 107 88 76 81 151 101 107 89 61 92 +4731506179 3633393437 1076228277 4831240828 1688829220 1365181475 154342268 222601007 162634693 369155009 446761288 67374098 139402621 264561946 1381869817 845826015 443191697 270175876 145373315 803874910 427878214 314165876 200651750 412663185 229836836 355944100 249233074 392082238 1193399118 439470232 353505302 187884713 +7 +537 470 256 543 321 288 97 116 99 150 165 64 92 127 290 227 164 128 94 221 161 138 110 158 118 147 123 154 269 163 146 107 +845853406 4183388227 2707347057 9369426635 8665134206 1974409414 2951152667 1469653952 232863088 757908143 1145007316 763564862 265183947 347274196 1724893842 2920507584 1400764934 409551488 123205194 130226254 522627637 263670779 432783097 304345277 243052060 453694500 517002629 286026232 687333067 592265999 490494808 83790335 +7 +227 505 406 756 727 347 424 299 119 215 264 215 127 145 324 422 292 158 86 89 178 126 162 136 121 166 177 132 204 190 173 71 +244384724 6519756415 18951886278 6997908917 2430951505 723179230 591564660 793629576 191314080 203414463 833506224 188712224 69883722 198596324 386655634 1458767816 779506395 298904637 175224914 171820974 393318006 218727549 297609728 449642487 691285330 417317673 241413949 277117073 339922202 1038640357 210184981 99559177 +7 +122 630 1075 653 385 210 190 220 108 111 225 107 65 110 153 298 218 135 103 102 154 115 134 165 205 159 121 130 144 251 113 77 +8798265521 5735848922 8883138653 13341194324 835720736 475950697 447822965 852267978 126619296 288415958 293376646 655463999 454927615 189400270 307085520 124264660 88501554 696557793 243305601 135931388 169666588 130904022 200894138 250163429 289420997 295275696 231188891 181742549 468613279 419030949 237842735 156690188 +7 +732 591 736 902 225 170 165 228 87 132 133 200 166 107 136 87 73 206 121 91 101 89 110 123 132 134 118 105 169 159 120 97 +1150482412 5240120687 18715695941 29339574860 13217513048 2558449603 465258907 843663909 159753138 501362980 391765242 116127711 219926547 95960499 348195558 347009665 172370545 139689998 140508943 181054683 100535513 123970868 275750410 172411007 406900444 283127115 324490029 232234315 285149674 372863656 670202407 256518156 +7 +264 565 1068 1338 898 395 168 226 98 174 154 84 115 76 145 145 102 92 92 105 78 86 129 102 157 131 140 119 131 150 202 125 +6911137073 10475842416 34724015477 27936729844 12152231216 2672219979 1411389097 2765172797 1420661856 1056972746 793556340 350130233 322606370 137295615 638432326 1290859573 471390741 575175189 461554633 153369634 336576098 116798871 138012201 141557225 305736088 572470445 231253659 119128938 238810745 545930325 440451728 121617948 +7 +649 799 1455 1305 861 403 293 410 294 253 220 146 140 91 197 280 169 187 167 96 143 84 91 92 136 186 118 85 120 182 163 86 +6167608942 9911924498 2593243310 2061667502 1486292926 1391770569 2930714578 4682576166 1092543821 203525367 340754446 648659988 587112632 81721049 93387502 311141876 458755528 630752888 249233952 114323857 206781150 95880852 58775090 215914131 146588629 262694353 345415316 73247387 91862262 197031781 210122569 62984707 +7 +613 777 397 354 301 291 422 534 258 111 144 198 189 70 75 137 167 196 123 83 112 76 59 114 94 126 145 66 74 109 113 62 +2505139409 9435406421 13833653936 10591282910 2352544130 1559712740 1874707651 2302922325 328881483 565672104 776814926 185025692 260892744 144244449 53960005 218503995 202958384 261774586 330038711 437503813 72863013 99926767 308641829 168217792 183606421 224884523 203196718 287669468 456498379 87883789 205202657 63473057 +7 +391 758 918 804 378 308 338 374 141 185 217 106 126 93 57 115 111 126 141 163 66 78 137 101 105 117 111 132 166 73 111 62 +9176235357 8993191162 19120135785 23572353608 3049183075 481893068 1739676482 2578826428 878981992 3671155093 1292995962 98376569 396132786 212131690 288592629 228092143 191051413 82950507 234701840 355185468 460774588 128328650 156938187 230846426 238069252 195995787 93249843 126366584 185452976 127347550 217291634 45671478 +7 +748 740 1080 1199 431 171 325 396 231 473 280 77 155 113 132 117 107 71 119 147 167 88 97 118 120 109 75 87 106 88 115 52 +1089684860 1489227056 7451210935 25162699617 3746461499 1104469491 3228302402 4380573004 1314892448 1188968448 589498418 634188183 435477756 115260084 339081799 833157389 232978806 130079406 162680802 392176424 332030171 210008316 490972032 98211491 143588413 194241710 90231128 94932440 111336474 51651457 64531143 43049780 +7 +257 301 674 1239 478 259 443 517 283 269 189 196 163 83 143 225 119 89 99 154 142 113 173 77 93 108 74 76 82 56 62 51 +7843464752 6447907163 16374301901 17324772176 1215209220 1776398163 2081496617 789918720 308703987 183833051 808368400 167730902 49451263 56584758 234299204 398625714 120583575 132760892 118249319 125485707 183453905 157140695 248358144 168412071 321558941 328889136 78320401 90909662 220044504 134853075 70608029 74817173 +7 +691 627 999 1028 272 329 356 219 137 105 222 101 54 58 119 155 85 90 84 87 105 97 123 101 140 141 69 74 115 90 65 67 +2047835854 1653882967 1001333545 9952025713 2701709263 2058665901 1498435858 737457098 484075650 352429827 356059276 150000953 125341680 137900269 130508231 139433842 210634495 145134874 205966052 138492404 102869911 156180492 92734022 234423531 848969494 1737033796 4071341086 9495242795 9533850550 1452528935 120395452 66178728 +7 +353 317 247 779 406 354 302 212 171 146 147 95 87 91 89 92 113 94 112 91 79 97 75 119 227 325 498 761 762 297 85 63 +405941476 586376095 5249156049 9198314000 2455257847 1076533410 217788173 43317576 77805929 193894406 471430974 96006989 64326923 160154729 317918564 220708273 73326619 82335631 68925210 197055911 85555573 300129001 366399884 320192711 415893330 580530060 3785903636 10888500038 6292189300 859689286 178268465 136256436 +6 +314 378 1132 1498 774 512 230 102 137 217 339 153 125 197 278 232 133 141 129 219 144 270 299 279 318 376 961 1630 1239 458 208 182 +514666001 1348367704 7051816642 14065552969 5974535153 1388737409 260609466 171393655 50366952 95923776 245919450 73335165 45135850 34407825 274069828 231829918 93746349 84979594 108437794 226177882 223414955 201162764 665485419 384650067 394149983 1726925185 3637184851 4785261775 2485723605 934545908 818763521 188547824 +6 +354 573 1312 1853 1207 582 252 204 110 153 245 133 104 91 258 237 151 144 162 234 233 221 403 306 310 649 942 1080 779 477 447 214 +854067332 278949595 775873176 1138676126 3105106067 896842551 807293668 675085516 64433283 560219615 474784050 55633450 148408916 164344297 43165620 69096015 21284651 17409593 25064632 99936147 124962720 190186177 1474493209 3947269364 2315319133 4638085171 7835913743 5378442479 2045345134 1160376405 376248714 92547152 +6 +456 260 435 527 870 467 443 405 125 369 340 116 190 200 102 129 72 65 78 156 174 215 599 981 751 1064 1383 1145 706 532 303 150 +5176173638 8712890999 6365812124 14554769594 12835959895 4290909155 1898428525 1000121310 386479184 1152403961 429970454 909105722 958767541 75724965 110181941 291424365 287810852 165313508 57669057 161053097 624432018 1646374792 3244274671 4021445776 2963733650 6551241142 6239415482 5613163380 7301740504 1754762557 646424930 324924042 +7 +562 729 623 942 885 511 340 247 153 265 162 235 241 67 82 133 132 100 59 99 195 317 444 495 425 632 617 585 667 327 198 140 +1862205364 3888521380 5183882583 2729317838 1047760972 340491552 99043306 270973644 204286586 111068593 110590646 83372928 28302339 23901571 96547242 167427282 88321129 33870505 57122440 32785504 49320672 17622419 184258003 444068773 897635860 619039249 146576221 861149196 865052933 289573769 40528063 28186682 +6 +674 974 1124 816 505 288 155 257 223 164 164 142 83 76 153 202 146 90 118 89 109 65 212 329 468 388 189 458 459 265 99 82 +5898563161 20961429280 23239028734 11537877925 3390494812 3695778658 629892210 1033626875 684461179 784394336 1583375466 362004349 169599265 57868482 85139965 194395594 412126189 197767529 470346213 780111487 412325354 146611504 1391020511 1619964102 558404142 1763066946 910723280 2637170679 1845616056 331074007 185715390 113914004 +7 +600 1131 1190 839 454 474 196 251 204 218 310 148 101 59 72 108 158 109 169 218 158 94 291 314 184 328 235 401 335 142 106 83 +1529782346 10762297130 10576248687 12502774180 3328009919 443597551 165196441 534487068 292855042 910488893 487944186 14025531 21761292 46795089 60947682 86925297 59831893 70154221 174651284 125109384 55188163 37731519 136448550 225510906 343836549 1115942311 341484540 329432196 220071418 47903730 79324935 34758191 +6 +611 1620 1606 1747 901 329 200 361 267 471 345 58 72 106 121 145 120 130 206 174 116 95 182 234 289 521 288 283 231 108 139 92 +3791067564 29199196246 32484273889 11339226316 945253387 475330066 717325982 1430157039 370107607 1346486212 1185156574 77992329 170934105 158991872 52953515 270761258 179821416 61883022 182762647 1034542879 148317111 57617007 153211836 470058849 807680174 201829651 869546025 1742955039 1819718210 331119639 212114746 270214834 +7 +481 1334 1408 831 240 170 209 295 150 286 268 68 102 98 56 128 104 61 105 251 95 59 96 169 222 110 230 326 333 142 113 128 +6799237905 6864662201 7006791566 2131919786 5255710991 3100076957 120381357 72856497 396087215 987916494 880932902 158909096 226807246 250883270 65432547 111896369 57912617 112231411 232521343 261532899 93611355 118415912 326217427 268475091 626815747 305746581 335181856 569178816 233299401 154945255 254737606 92440792 +7 +644 647 653 360 566 434 85 66 155 245 231 98 117 123 63 82 59 82 119 126 75 85 141 128 195 136 143 186 119 97 124 75 +11453442628 11425949119 8969689026 16948900628 3141421612 5223916947 2760232084 1084064994 299444883 2125273638 3086707368 310453498 51732748 44780716 168107086 258667101 143446403 142283072 504420775 218872051 148184667 92856534 147118020 112855428 420454837 338584768 116140173 145410830 334581197 106611194 146951280 81442850 +7 +836 835 739 1017 437 564 410 257 135 360 434 137 56 52 101 125 93 93 175 115 95 75 94 82 160 143 84 94 142 80 94 70 +6368705047 12975091793 18166364780 52214241554 13024503925 2090833921 2377368194 2759098456 2553662904 2277234959 1432376136 318307292 677651485 192682720 241063458 462592106 235368194 1166063109 1772904359 1533640255 1330523268 493306239 820436618 142251500 385262686 624771286 355927840 626237684 1009919011 720155511 623816048 287317982 +8 +311 444 526 892 445 178 190 205 197 186 147 69 101 54 60 84 59 133 164 152 142 86 111 46 76 97 73 97 124 104 97 66 +15167883106 29791915459 24257605316 11647797675 4261333823 2020304337 838008789 1465373806 630426064 493998251 292290878 95823561 130185872 108447425 77255450 72530644 116218457 164994027 153258568 297986985 492976259 562879738 262318542 251356870 339723262 134238402 94797019 176615686 220477688 155696828 89528039 97176475 +7 +962 1348 1216 843 509 351 226 299 196 173 133 76 89 81 68 66 84 100 96 134 173 185 126 123 144 90 76 103 116 97 73 77 +4693087723 6216564376 2380622365 6298866481 7787092804 3324557987 753831692 354062072 144449470 783698733 1472676048 243518929 41036728 51250421 391588537 400567150 97662506 39175827 167586726 444544685 199762941 239548068 174399124 172894550 317108123 237666657 91541142 109760266 61357052 152356878 103664153 60379364 +7 +535 615 381 620 689 450 214 147 93 218 299 121 50 55 154 156 77 48 101 164 110 120 103 102 139 120 74 81 61 96 79 60 +1055567686 13456570819 25649328984 17940984341 1605438440 711908529 738183954 244667242 60959802 71775347 196393712 201673650 57696646 122893225 701802296 814937109 340078864 56672516 214498909 273792553 126126619 322584006 595525932 294341630 310891462 486753133 69983467 75163950 176063964 130864554 40845063 30362427 +7 +253 906 1251 1046 313 208 212 122 61 66 109 110 59 86 206 223 144 58 114 129 87 140 190 134 137 172 65 67 103 89 49 43 +2356765900 9231856051 14938845260 23252998449 3859443628 9931345014 12545280582 9608156787 2962240910 1641418625 1016734310 474338503 1445512897 1723338305 2077931414 3521573091 1231218638 101697740 514315548 892118793 984565825 954472196 495032180 449109219 640438476 1631596353 1016369956 954212876 589624551 553133001 578500285 181183624 +8 +189 375 477 595 242 389 437 382 212 158 124 85 148 162 178 231 137 39 88 116 122 120 86 82 98 157 124 120 94 91 93 52 +1253095846 18020492015 17502816756 29633592448 15582299230 3472022185 2882462250 2821745624 2156343322 5239984413 1728580184 1249957388 974119843 693747588 681183767 1686863608 2324003278 898510960 603378418 901724313 1727932658 921009907 914360739 990178863 1072537640 3031352176 2129162895 1233642791 2119732905 770415964 212273389 252309707 +8 +138 524 516 672 487 230 209 207 181 282 162 138 121 102 101 160 188 117 95 117 162 118 118 122 127 215 180 137 179 108 56 62 +2085429281 5625150496 15510778827 15395714078 1623386709 980459207 453238454 112620635 56614482 74049573 156061918 147585074 50684678 11069654 16005802 52633943 81430150 61376528 44882521 24554408 42983192 10304127 10318942 22143459 91918754 43725671 27069905 18803182 22888555 34364500 55093279 15797771 +6 +713 1171 1945 1938 629 489 332 165 117 134 195 189 111 51 62 113 141 122 104 77 102 50 50 73 149 103 81 67 74 91 115 62 +1740850081 2981677376 11585008796 56586105392 15910137974 1211026959 580256502 932978507 769262079 979396831 392956626 24642819 54221276 56930049 178922553 128969942 58634883 119252662 92626325 45322183 37240801 131661377 150226929 76823258 152767672 151599480 173606723 125902348 249501094 101846214 99105555 43231492 +7 +325 426 840 1858 985 271 188 238 216 244 154 38 57 58 104 88 59 85 75 52 47 89 95 68 96 96 102 87 123 78 77 51 +4571788278 6290626903 21358605837 68525327636 18747695014 2619761923 4394272938 3325827116 1051825374 1060442690 881575734 644303463 372206554 189555136 409476128 1870152222 1040487130 980446183 272211290 355747456 462108104 454320534 828205575 336353353 518949723 647624091 672858076 960496180 1638485836 598408408 303305384 90486984 +8 +264 309 570 1022 534 199 258 225 126 127 115 99 75 53 79 168 126 122 64 73 83 83 112 71 88 99 101 121 158 95 68 37 +5503267445 2278704472 3003615743 28253266350 18289232353 1444729957 1675007714 1800410951 560931463 1573374071 1142161202 50484454 75450249 39374112 62533252 62565644 109446110 152648383 77828314 96499736 54921832 110837628 106147571 72313778 180317388 20848765 29995158 56996012 205350876 148127425 65887511 84286350 +7 +579 372 428 1313 1056 296 319 331 185 309 264 55 67 49 61 61 81 96 68 76 57 82 80 66 104 35 42 58 111 95 63 71 +635601583 4623946160 14604489076 18054500301 2048793167 192660219 20826450 144947271 361186957 364299556 110576014 33743767 13077541 17638144 71055548 70392990 72599559 108934509 30702299 7492577 33653233 32770768 12689000 33028035 79630490 91012673 28887320 24907378 72498940 43317638 34826267 27743421 +6 +393 1062 1888 2099 707 216 71 188 296 298 164 90 56 65 131 131 133 163 86 42 90 89 55 89 139 149 83 77 133 102 92 82 +16925999430 18316385014 6765103749 29751869228 9248935726 590656637 196577266 369833100 1036347679 1949477709 436167336 511319619 184477055 37387933 151183436 151931021 299339842 394798275 131554144 534536901 145916020 86481382 52673975 84468035 290579874 505339277 136630608 96451864 413645390 284079029 93838091 44320937 +7 +1016 1057 642 1347 751 189 109 150 251 344 163 176 106 47 96 96 135 155 89 180 94 72 56 71 133 175 91 76 158 131 75 52 +22364848819 23019211877 13536398921 7661844332 13153918661 4061423712 6189535018 2626984739 734547992 679343661 640844424 1634139870 653180623 270600318 653081286 231006116 276192993 574796128 531493951 1136056218 257843259 376631657 556672806 985601197 2451927490 1080397176 418835476 516596298 1067991069 1326940241 533782371 317521998 +8 +584 592 454 341 448 248 307 200 105 101 98 157 99 64 99 59 64 93 90 131 62 75 92 122 193 128 79 88 127 142 90 69 +4267278578 5284912346 15654245454 7584371455 757541440 958175918 2029693395 1822263793 301143373 735864971 560795502 145468741 729607917 636312274 140085758 92179138 136678205 284556947 160571857 24156465 30303981 63889416 207515641 397539517 932404486 815941330 296130518 837264004 823507287 98236812 45243383 64272200 +7 +510 567 977 680 215 241 351 333 135 211 185 94 211 197 92 75 91 131 99 38 43 62 112 155 238 223 134 226 224 77 52 62 +3363624643 5024467282 5936939269 11928624073 1989590507 500454296 890244102 484960024 408268655 407748116 931223116 387325043 298601083 142573204 50333810 260657043 544829518 182232298 285774374 229077575 165510081 55234109 182984063 235559258 463042189 1572332571 463590384 764055301 369900031 213389719 248484276 110192412 +7 +453 553 601 853 348 174 233 172 157 157 238 153 135 93 55 126 182 105 132 118 100 58 105 119 168 309 168 215 150 114 123 82 +8688907420 8651681087 3882888320 13344712853 4899158051 798739702 1028831102 2539014856 638903122 317718010 384670182 80111143 103317142 167076352 301314820 350854382 391349540 404306210 433994476 574314223 313672804 157484335 377191355 334140619 316134597 1301708648 1659919320 374228018 445302887 163137190 106841473 98757674 +7 +728 726 486 902 546 220 250 393 197 139 153 69 79 100 135 146 154 157 162 187 138 98 151 142 138 281 318 151 164 99 80 77 +13209682291 4084147554 2238015712 5991930059 1953586645 196444746 822877259 1778500613 742348323 233802308 177148240 73219483 144849322 53782826 38117743 23906209 159975557 118985794 48876276 20259462 27125220 16381165 50210512 103903521 345857809 239779513 223664576 71409647 46379915 30995031 20584721 8614124 +6 +1795 998 739 1209 690 219 448 658 425 238 207 133 188 114 96 76 197 170 109 70 81 63 110 159 290 241 233 132 106 86 70 45 +11910490572 3216754880 6803640801 2969354749 904981686 1640133658 2731047426 2169674962 760261854 440687837 88060822 121667662 80768056 31045445 125967584 240259259 103206438 119576287 176621902 191961720 77702252 115933401 279500203 189714841 273168341 221844448 115316958 61706937 306214143 136507848 128407848 22071613 +7 +852 443 644 425 235 316 408 363 215 164 73 86 70 43 87 121 79 85 103 108 68 84 130 107 129 116 83 61 136 91 88 36 +237434189 499922217 505133994 441834840 148584001 134361612 246902979 375356541 377172109 306513225 380925456 296315571 145325444 52968199 21027784 23929228 276235605 624597590 312575443 156400513 455045323 720856046 732214896 1494389708 2361129006 2679936589 2301322707 2000335101 1637945505 681827461 183147366 82330012 +2 +3852 5589 5618 5255 3047 2897 3928 4843 4855 4377 4879 4303 3013 1819 1146 1223 4155 6248 4420 3126 5333 6712 6764 9664 12147 12942 11993 11181 10118 6528 3383 2268 +1384809583 3253852150 7271882261 4247132793 165951197 106924444 334793989 1186792065 683710887 328783218 1777824058 859450346 384515125 118491239 29264336 324188526 1925807083 2591551091 1170412774 393317159 1003847215 1375415668 1272433002 5102945913 5527301760 3564304855 4171837220 4252817101 2886468276 1293586339 867722874 137636997 +2 +9303 14260 21318 16292 3220 2585 4574 8612 6537 4533 10541 7329 4902 2721 1352 4501 10971 12726 8552 4958 7921 9271 8917 17858 18586 14925 16147 16303 13431 8991 7364 2933 +8026452370 15484908668 20499977190 18319213840 3373716595 481122188 708365662 1064222070 447613201 981882246 1216899638 429224583 712942489 343870777 464365911 1708426014 607524825 125260654 157218368 252388115 200501238 298140369 401480944 335145082 254978930 49584394 84879802 240024458 602349628 451255150 178445616 59224662 +4 +5599 7777 8948 8459 3630 1370 1663 2038 1322 1958 2180 1294 1668 1159 1346 2583 1540 699 783 992 885 1079 1252 1144 998 440 575 968 1533 1327 834 481 +2977011107 650914810 2288018320 7520683099 3358846989 875614221 1936947318 2462199032 245275565 162481150 354004652 579022806 174936282 412065585 456282401 851282558 577907117 178246772 129481190 100052269 151783470 208256264 102348220 96250207 348059763 147820204 244018392 148321767 234305056 521714339 203999644 70550417 +5 +1705 797 1494 2710 1811 924 1375 1550 489 398 587 751 413 634 667 911 751 417 355 312 385 450 316 306 583 379 488 380 478 713 446 262 +3786032587 5999060498 4394801484 6492352396 4533451529 784295559 4659258635 9314660305 1936272237 1517385845 2149905238 5604540659 4456645633 546039872 1142483135 2620204651 457615013 332270054 387560577 221048132 797480291 1027414868 763837324 1178612421 575232148 728347389 820289944 1067788799 410298531 1347049446 618702844 146953926 +6 +961 1210 1035 1258 1052 437 1066 1508 687 608 724 1169 1043 365 528 799 334 284 307 232 441 500 431 536 374 421 447 510 316 573 388 189 +4332343661 2817941910 3311199167 2817462112 2970535222 510272771 1231608931 3547933821 1553161029 1102581064 1265403206 1116133007 2749984636 483694422 382260155 1583576695 535394932 342644355 439256217 509798231 1169193597 1551288598 518829590 329655443 357735633 866583714 2856188825 4384512991 832189766 537900762 413652286 268099507 +6 +1028 829 899 829 851 352 548 930 615 518 555 522 819 343 305 621 361 289 327 352 534 615 355 283 295 459 835 1034 450 362 317 255 +1144257557 893194977 1524172345 2656990700 2712406451 786734909 2822954323 3023929466 527666841 1172738236 1651594914 468094942 413036746 247222724 1385679116 1385735570 176945160 261067824 249261965 205523600 898302221 540325841 482074263 137198138 655537414 1053681721 1905781387 2813846616 519180249 467040454 115316630 70053256 +6 +528 466 610 805 813 438 830 859 358 535 635 338 317 245 581 581 207 252 246 224 468 363 343 183 400 507 682 828 356 337 167 130 +421117418 2726127890 4101702227 9989637695 2589613292 464808813 646168314 1709204951 1193406951 4368908721 15543123258 4558462527 727446184 480111150 3321290976 1432573949 4334614036 1206612778 2062663595 2846829181 1697932639 1917744656 1153653438 3229768059 2571119827 2310793826 6307997684 6761925771 3750077475 2473878001 656529551 229061538 +7 +160 407 500 780 397 168 198 322 269 516 974 527 210 171 450 295 514 271 354 416 321 342 265 443 396 375 620 642 478 388 200 118 +5645104312 3087527471 5883346002 10807122775 2465336182 853935004 1206905130 3485828019 1134726750 832725041 4442875878 2122064365 178483220 151483681 1742660113 1309124116 1954305288 1323857378 2750861165 1340947482 792522630 669257768 1659699572 940652856 1957080469 1034203505 1541805928 1710818326 2432875876 2254719438 275379184 57293224 +7 +586 434 599 812 387 228 271 461 263 225 520 359 104 96 326 282 345 284 409 286 219 202 318 239 345 251 306 323 385 370 129 59 \ No newline at end of file diff --git a/python/tflite_micro/signal/ops/testdata/filter_bank_test1.txt b/python/tflite_micro/signal/ops/testdata/filter_bank_test1.txt new file mode 100644 index 00000000000..281339e5285 --- /dev/null +++ b/python/tflite_micro/signal/ops/testdata/filter_bank_test1.txt @@ -0,0 +1,367 @@ +16000 32 125 3800 +65471 262149 9764778 4284481915 4276682122 1119578 1471537 879716 435140 467218 464850 216026 338845 384533 463745 1520514 1655681 621738 144794 32969 1845 7993 12605 40309 59197 15101 18884 19204 181 19242 22202 12629 9125 9098 19409 25364 4005 5525 16861 27997 15730 20693 71540 271969 251172 31813 174521 194213 46085 949 208 5194 27817 29410 16930 22273 23013 10100 15921 9325 7202 7605 7373 4777 40 2034 5581 24224 32449 14249 3145 1469 37 1076 3330 6253 10312 11296 3370 1378 8665 6370 745 4714 8033 1044 8425 8066 3328 13505 6250 1665 872 6845 22202 24674 16129 15784 15977 12168 18010 42325 61568 38660 16900 22608 17849 8009 44185 122033 335848 530105 620225 845226 865780 715498 483418 331682 179897 57428 6228 1469 4293132328 2359270 4293263380 327664 327681 5 65536 4294770687 4294836227 196602 4294901763 4294770690 196600 8 4294574076 262141 262142 4294836222 262147 4294639613 393213 4294901759 327679 4294836226 65532 4294901757 65536 65531 4294967295 262144 196607 0 4294967292 327681 3 65531 4294836227 65535 262142 65538 4294901760 0 4294901762 4294967295 4294836221 327677 4294901763 65530 196611 65531 393216 4294836225 262146 4294705153 4294901759 4294967292 196611 131069 2 4294967293 131074 196608 4294705153 65537 4294770685 458758 4294901757 4294705154 4294901765 4294901759 4294770686 4294770686 393215 65536 4294770688 1 4294770688 196603 4294901762 4294836225 65534 65534 4294901759 262143 4294901763 131069 4294836226 131070 65535 4294967294 65538 4294967294 196609 4294901762 131069 4294705152 65538 196603 65538 4 196603 4294901762 196608 4294901764 4294836226 4294770688 196606 131074 4294836226 4294836221 262147 4294705152 4294967292 4294770689 4294967295 65532 131071 65538 131070 4294901762 262141 3 4294836223 262148 4294574077 262140 131075 4294836226 4294901758 4294967295 0 4294770688 196604 1 1 +6808147337 6255084086 2891865000 2964076648 2152163566 4144033795 10661119507 4427861633 282035520 151567251 385304142 152174611 166730384 142057431 185744758 182072626 828522087 2395432148 1217206879 232537562 325734697 171238639 102462894 271430791 75533907 128654737 112787255 157602543 360207890 755391391 4499305573 15049445959 +65515 15269844 4258332562 65733198 4234279893 1863889 361377 551522 1794384 2336101 1137789 394805 347476 176596 51664 28850 124100 108226 10705 2704 21997 18737 8714 7225 13786 13096 2509 5557 18772 20170 6980 1280 1210 909 450 2560 2257 170 2290 6548 39888 121129 182169 180122 166993 90437 8665 15002 9945 1040 722 338 5265 8282 4388 2329 9252 19385 14013 3826 689 450 2197 4685 2834 45 3978 4058 593 2250 2610 1714 709 1108 2045 1808 698 1664 937 634 2210 810 153 314 1658 2792 909 416 3748 5440 1765 2293 4069 1585 1250 3034 2417 709 193 1945 820 1552 5209 4793 1585 234 3152 8333 20533 50201 89828 131185 130250 73466 63284 66448 60866 36554 11105 5933 7541 6682 4292739118 4294901738 1 458750 4294901763 1 65536 4294770690 4294967294 65536 4294836224 131070 65535 131071 0 4294901760 131071 196607 4294836226 4294836225 65533 65535 65537 4294967295 2 4294967295 262142 4294901763 4294836222 262143 4294705154 65535 1 4294901760 4294901759 4294901760 131073 4294901761 4294836225 65535 4294967295 0 65537 4294967295 4294770688 131073 4294967295 0 4294770690 131071 4294967293 4294901761 1 0 131071 4294836227 65535 4294770685 131073 131071 4294901761 65534 65537 65536 4294770688 196606 4294836227 4294967293 131072 65538 4294901758 1 4294967294 4294901762 196606 4294836227 65534 0 4294836224 4294901758 65537 1 4294967293 65537 4294836224 4294901761 65535 4294836224 4294967294 196607 0 4294836225 4294967295 196607 4294901761 4294836225 4294836223 1 131069 4294901760 65537 4294901760 196605 2 4294901762 131070 4294967294 4294836224 196608 4294770690 0 4294901759 65537 65535 4294967295 4294901758 196606 0 196606 4294901762 65537 4294901760 4294836224 196606 4294901759 65536 4294901760 65535 65534 1 4294967294 196609 4294705152 196607 1 +6378386839 4548260906 11745372396 8847683212 2353335503 908091718 599237014 530730360 116378469 117647851 110088584 82866604 148664412 20503308 17929649 73408437 1395183501 1713776676 195145610 52568541 136448881 102335389 46295364 41565888 30128609 24970519 26501226 64557492 47855937 59369896 1044206336 1873359366 +65524 4291362870 4291624846 18939649 3408875 2173320 2272625 1484705 649282 647069 717352 597650 341284 78625 18761 23357 25490 11713 1730 18 725 2353 2993 2525 2029 1000 153 50 338 226 144 72 65 260 365 340 157 32 34 317 1277 265 193 466 634 773 586 1640 1369 442 20 125 144 73 146 256 149 25 425 169 97 229 122 257 292 98 34 58 36 74 117 180 82 13 36 45 68 90 180 73 50 277 257 80 16 25 20 25 245 466 250 25 130 101 13 18 82 128 20 53 125 58 4 1 5 8 17 29 45 20 0 101 325 106 37 90 113 89 37 45 2 29 4294705157 4294901757 65537 65535 0 0 0 4294901760 131072 1 4294901760 65536 1 0 4294901761 65535 131071 4294901760 65535 65535 0 4294967295 1 4294901760 0 0 4294967294 0 4294901760 65536 65536 0 0 65535 65535 65535 0 65535 65535 65535 65536 0 4294901760 65536 4294836224 131071 65536 1 131071 0 0 1 65535 4294901760 0 0 4294967295 0 0 131071 65535 65536 4294967295 131071 0 4294901760 0 65535 0 4294901760 4294967295 65535 0 4294901760 65535 4294901760 0 4294901760 0 4294901760 0 4294836224 0 0 0 65536 4294901761 1 0 4294967295 65536 0 65537 65535 4294967295 4294901761 1 65535 0 0 4294901759 4294967295 0 4294901760 65535 1 4294901760 0 65535 4294901760 0 4294901760 65535 4294901760 65536 4294901760 65535 4294901761 4294967295 1 4294967295 0 4294901760 0 65536 65535 0 4294901760 65536 0 1 65535 65536 0 0 +11830312965 10104483464 4254083699 4920784847 2691689024 514534283 177627123 73108160 7520576 22913015 17738822 2856491 2207854 1622477 3236693 2813354 6773105 8917610 13778260 2543654 2800499 3222588 3140880 1513508 1400454 2452535 2226265 4165951 1718554 1174583 1182831 2684807 +5 5767106 4288282813 4275896290 4284808298 2162056 2199429 248756 3653000 9539074 11010820 3211641 254722 320008 40330 2722 28705 40437 10469 7925 5013 986 4234 3200 313 2612 1666 4545 2725 433 18 101 109 221 298 2 680 666 405 3973 11898 10730 4698 6337 8653 4546 409 970 1865 170 1604 2048 1321 1201 1385 2084 2026 250 949 1936 522 484 818 392 305 612 325 52 305 1762 2169 370 193 626 650 169 50 340 1205 1625 685 65 290 482 265 205 178 233 725 245 1025 2810 2005 640 260 245 797 2088 2234 1172 509 130 661 725 1282 2309 3194 5210 7685 3978 4957 13189 8245 5585 13649 7650 8458 7298 1205 1469 733 136 4294639619 2 4294901759 65536 0 0 4294967295 65536 0 2 4294901760 65535 1 4294967295 65537 4294967294 1 65533 1 0 1 0 4294901760 131070 4294901760 1 4294967295 4294967295 65535 131073 4294901759 65536 4294836224 65535 1 4294836224 131071 65536 1 4294836224 131070 0 4294901760 4294901760 65535 65535 1 65537 4294967294 4294901760 4294901760 65535 4294836225 131071 4294901761 65535 0 65535 2 4294901760 65536 0 1 65535 4294901761 4294901760 0 4294967295 1 0 4294901760 4294901760 4294967295 65535 1 65535 1 65535 0 131071 4294836225 4294967295 131071 4294901760 0 0 0 4294901761 4294901759 65535 65535 131073 4294901759 4294967295 4294901760 0 4294967294 0 65535 0 4294836224 0 4294901760 131071 4294901760 4294967295 131073 4294901760 0 0 65536 4294901761 4294836224 131071 65537 65535 4294901760 65534 65536 65535 65534 4294901762 4294901760 65535 4294901760 1 0 4294967294 65537 0 131070 1 65535 0 65535 +11623254889 8535490010 36414517404 64555171414 8836025063 1207586430 168216022 203614811 53674241 28510524 18362880 29386172 11839398 1544701 3512255 27304657 108594872 74550693 19234123 22405562 24082157 16258164 8165249 15319916 11471675 14674879 7845527 20555044 27266014 29452154 139629499 213738262 +65470 4291035244 4290183015 20971463 4281860368 3825 97597 2690165 7057973 6062125 4410985 2011850 348020 60570 38545 7585 19066 7720 6418 20000 5585 1105 5330 14985 15858 5120 82 4420 7913 5536 1369 180 73 74 9 109 232 785 1093 4090 7696 1289 754 865 538 1800 333 1588 4496 2393 5 962 565 569 1465 745 185 225 1088 1073 305 325 234 680 1625 1313 362 290 1664 2965 2368 697 98 65 360 169 85 689 866 245 785 661 281 425 914 1220 1226 776 980 754 41 2425 4168 2561 1165 745 116 65 293 1681 2245 1768 1341 773 1544 4772 7930 11002 19450 29273 37313 47396 58500 70922 77192 61200 38420 25988 17680 8066 1682 101 4294574082 65534 131072 1 0 0 0 0 0 1 65535 131073 4294967295 0 4294901760 131071 4294901760 4294901760 0 4294967295 65537 4294901762 4294967295 0 4294967295 131070 0 65534 131073 4294967295 65536 4294901760 4294901759 196607 1 0 0 65535 65536 0 4294901760 4294967295 1 65533 1 4294967294 0 65535 4294901762 131071 4294901761 4294901761 131071 4294901760 0 65537 4294901760 131073 4294901760 4294901761 4294901760 0 4294967295 0 0 4294901760 0 4294967295 65534 65537 1 4294836224 1 4294836224 4294967294 4294967295 4294901760 131071 65537 0 4294901761 131070 131073 65534 1 131070 65536 4294901760 4294901760 65537 4294967295 4294901761 65533 1 65535 4294901761 4294901761 65535 4294836224 4294901760 131071 1 131071 1 4294901760 65534 0 0 65534 0 1 4294901760 4294967295 65535 4294901760 65535 1 0 0 131071 4294901759 65535 65536 0 65535 4294901760 0 65536 1 1 65535 0 1 1 0 +242927331 17543652245 38377448409 30553590940 6023909610 512823860 125468738 86794564 88280568 57145279 111929488 37566392 47881879 2862718 1824794 25138320 39015764 13264948 31347231 13983838 11246708 10177784 15095122 27645495 10121102 10956097 16588418 31346023 31687207 49247267 599129402 1342925070 +12 4288282504 20644090 4259249712 54198748 764360 407810 725049 1293002 1064353 683225 226505 164936 111141 20637 52900 64264 17545 19109 23426 4113 424 3656 1952 324 2477 5090 2858 4372 3965 1160 305 277 41 925 2938 8336 25085 74570 185956 389920 496404 387140 258746 173249 114728 58565 24658 21061 16642 12325 7541 3338 1480 881 410 245 298 362 1061 1513 1352 936 425 122 377 97 101 136 196 514 877 1096 1450 325 181 89 101 481 400 449 261 1 170 1700 2509 909 325 410 544 221 145 313 1096 1508 1252 1237 1417 1429 557 637 914 545 1341 1252 765 853 1300 909 233 17 193 356 1145 733 65 577 340 193 290 425 221 4294639616 196608 4294901760 65536 0 65535 65536 0 1 131072 0 65537 65535 0 1 4294901760 65535 0 65535 65536 65533 196607 1 0 0 4294901759 65535 0 65535 0 0 4294901760 65536 4294901760 65535 65537 4294967294 0 4294967295 131072 4294901761 0 65535 0 0 4294901758 65534 65537 131071 4294967295 0 65536 4294901762 65534 1 4294901759 131071 1 4294836226 65534 1 4294967295 0 4294901760 1 4294967295 0 4294901760 4294967295 65535 0 4294901761 4294967295 0 4294836224 4294967295 4294901760 131070 4294901761 4294967294 131071 4294967295 4294901759 4294901760 65535 131073 0 4294967295 0 0 65535 4294967295 65536 4294967295 131071 4294901760 65535 4294967295 0 4294901760 65535 0 4294967295 65535 65534 65537 4294836225 1 65535 4294901760 65536 4294901761 4294901759 65536 0 4294967293 65537 4294836224 65534 65537 4294770689 65534 65536 4294901760 131070 4294901761 4294901760 131071 0 4294901760 4294901761 131071 1 4294967295 0 +3231767450 4747403802 6992857805 4776356668 1241977653 534133360 391355049 213046864 107540337 21753905 17939752 38069356 31690049 4514783 55902149 1239880908 4918683647 2484327852 460206148 106425571 11044392 16160905 8648933 6034216 14180674 6064888 18316917 14048399 26288688 24529777 20173858 12665313 +65442 7012390 6881517 4240375960 40369020 1279021 336025 256930 467618 44249 335240 728181 593629 268690 72148 3866 26954 40505 11428 4253 4474 6400 7412 1972 580 1377 5365 7325 1690 162 365 325 797 954 250 2320 2997 4754 77120 397625 921181 951749 425680 32605 42016 70421 27572 14365 4505 349 340 337 410 1049 1193 1514 820 484 650 730 81 337 109 545 125 90 37 458 1700 1940 1153 666 265 954 1476 1989 1450 257 40 365 1025 2305 2993 1530 122 656 1769 1192 65 41 8 17 106 361 657 706 441 1417 2313 865 580 725 2210 1649 788 2 676 449 5 122 65 641 949 370 106 20 289 205 65 74 109 82 262143 4294901759 131072 1 4294901761 65536 1 4294836225 65534 65537 4294901760 4294901760 131071 65535 4294901760 0 4294901760 4294901760 65535 4294901761 65535 65536 65537 4294901759 65536 4294901761 0 4294901760 131070 1 4294901761 65535 65537 4294901760 4294901760 0 0 4294901759 0 0 4294901761 0 4294901760 65535 65537 0 1 4294967294 0 4294967295 1 4294901760 0 65536 4294901760 4294901760 4294836224 65535 4294901760 65535 0 131071 0 4294901760 4294836224 65535 65537 1 0 0 4294901760 4294901759 65535 4294967295 4294901760 4294901759 65535 65537 4294901760 65536 4294901760 131071 4294836225 65535 4294901760 65536 65535 4294967295 4294967295 0 131070 65537 4294901759 0 4294967295 65536 4294967295 196607 2 0 1 4294901761 4294901760 0 4294967295 0 4294901761 4294967295 0 4294967295 0 4294901760 4294901760 1 4294967295 0 65535 65536 4294901760 2 65534 1 4294901760 4294901761 65536 4294901760 65535 1 65534 1 0 4294967295 4294901762 0 2 +4585847527 2021955256 1659284130 2685462119 4107222329 1364536777 199134611 200880165 49038648 52179774 18199976 46830007 11684223 7003474 22309520 2259253004 8407756079 1304425202 191272625 10088876 16150624 7565978 4425560 19235510 21743127 20528309 29886565 8985334 17701920 29591094 12319506 7616931 +16 4288085956 38797204 4259185440 4285202297 1677514 2809000 122770 3501160 8184717 4537585 2135908 1190221 283088 328052 314093 103321 228317 164866 46105 26685 42589 30042 7178 2440 6914 1300 3961 11169 7946 1261 353 41 1586 9554 19540 3457 6570 1060 26937 60520 135140 20525 130013 108373 25177 2664 4040 5933 3874 1325 629 6617 5437 2450 7940 9601 6500 4933 2501 548 1873 6277 8381 5248 1549 1469 2677 144 2833 4772 1089 458 641 1285 1297 292 3925 2273 229 1625 2753 1681 3425 1594 349 180 1021 3146 1962 832 442 538 185 1737 3001 6370 7081 5389 4869 4093 1525 2260 2314 2045 2825 2173 185 458 1717 2482 2074 601 293 545 1249 848 136 13 52 290 97 720894 4294574086 196607 4294901759 0 196606 4 4294836223 196607 65536 4294901762 1 131072 4294770688 131074 4294901761 4294901760 4294967295 4294967295 1 4294836224 131069 4294770690 65535 4294901762 4294770687 4294967294 65534 131071 2 65535 196607 4294836225 65536 65535 4294901760 4294836225 65534 131072 4294901758 131073 0 65535 0 65533 65538 4294770689 196604 2 65536 4294901760 131070 4294901762 131070 1 65537 65535 4294901763 1 4294901759 0 4294901761 4294901761 4294967294 0 1 65535 4294705154 4294967294 131070 0 2 4294967293 131070 3 196605 65539 4294836224 0 131070 4294836225 65537 4294967295 65538 131069 3 65533 131070 131073 4294967295 65536 4294836225 131070 131073 4294836227 65534 131073 4294901761 196607 4294901762 4294770686 131073 4294967294 65535 131073 4294836224 131071 0 4294836225 65536 65535 4294836224 4294967294 4294967295 4294967294 131070 2 4294770688 131069 196607 1 65537 4294901758 65538 196607 2 4294901759 65537 4294901757 131070 65537 4294836224 65535 65536 0 +11634553468 8958534760 32003651588 34529211612 9519445522 2842541748 1829328478 1395048731 471706024 263224460 58157172 48819653 64638154 15816622 118009888 169953303 1027412306 902228022 77795336 54973830 105900863 63035189 78178380 39164873 27167227 38390605 37860244 29174662 80574766 86316097 41953147 22529613 +65497 8060931 4291493795 5571052 4260363720 481450 356473 488417 3566824 5383300 2080345 70673 78157 29410 23273 70130 66593 87394 70144 16484 641 3636 4421 4705 15776 11169 1861 772 740 2210 3497 2000 754 1360 3200 2834 452 1637 7780 14312 24858 28825 9217 5416 8362 3277 1009 1009 4849 3985 4394 5330 3700 1780 445 290 410 136 65 178 73 41 32 181 260 64 34 533 1117 545 200 5 101 117 146 1549 1961 541 218 58 18 145 269 233 234 40 260 490 298 281 68 45 386 584 325 153 113 25 10 116 269 325 401 250 53 25 53 229 162 144 272 32 10 50 232 261 89 20 65 136 137 40 4294967292 262145 4294901763 4294967294 65537 65535 1 65535 65537 4294901760 65536 0 0 65535 0 4294967295 131071 4294836226 65535 4294901760 4294901759 4294901760 0 65535 4294967295 0 0 4294901760 196607 65535 0 131071 4294901761 0 0 4294967295 65536 65535 1 0 4294901761 65535 0 4294836225 65535 0 65535 65537 4294901762 4294901758 4294901760 65536 4294967295 1 0 65535 65536 65536 0 4294967295 4294901762 4294967295 65536 0 4294967295 0 4294901760 131070 4294836224 65537 4294967295 131071 4294967295 131071 0 4294901760 4294901761 65536 4294967294 0 1 4294901760 65535 65538 4294836224 65535 4294901760 65535 4294901760 131071 4294836224 131071 4294901760 65537 4294967295 2 4294967295 4294901761 131071 0 4294967295 65535 4294967295 4294901760 0 131071 4294901760 0 4294901760 0 4294901760 65536 0 0 1 4294967295 1 4294836224 65535 4294901760 65535 4294836225 4294901760 4294967295 131071 4294901761 0 0 65535 65534 65537 4294901761 4294967295 131071 1 +2271965329 6161315138 24998827233 16746513325 474241037 268787780 515959467 585633622 137077066 37289369 104964050 27817360 22992018 19750183 25461499 97202988 249884704 77757616 46525943 55814543 9547193 1893661 2911526 8614811 9535752 10862368 4529579 6153510 5157694 5112980 3717683 3512177 +80 4290576403 4267375997 127927126 4131389327 3135290 523081 3996738 12059477 3618074 187204 1248586 781236 148837 361490 360746 363188 484210 219482 2845 63988 39101 8089 39538 78233 26413 450 2385 2529 2900 2885 937 554 244 2330 6137 1465 7265 7713 466 8845 29650 7001 6970 32840 38809 18761 5000 4385 1853 740 3869 3176 2018 3530 1960 404 281 724 701 369 997 1604 1010 970 212 1586 1693 202 1105 1450 1845 2405 1314 4388 11861 7684 1621 425 365 773 1157 578 68 725 2929 2340 1314 1585 1585 1789 2720 2512 637 677 625 650 1205 1885 1114 490 338 325 226 185 212 325 1322 625 265 1810 1690 985 986 565 68 244 241 241 458 365 73 65548 4294770680 262149 4294967294 1 0 2 131071 4294770690 131069 0 131071 4294967294 65537 4294901763 4294836224 65533 4294901763 4294967294 131071 4294836225 65535 4294836225 4294967294 131072 1 4294967294 65535 196609 4294836225 4294967295 4294901760 4294967295 4294967294 65536 65537 4294901757 131072 65534 3 4294967294 65536 65534 4294901762 4294967293 131073 4294836225 65533 2 4294770690 65533 65536 4294836223 65537 65534 4294770688 196607 4294967295 4294901759 196609 4294836224 0 196608 4294901760 1 4294836224 65534 196607 65537 4294967295 0 4294836226 4294967295 196607 2 4294836225 2 4294967292 65539 4294836222 196606 4294967295 262143 4294836224 196608 4294901761 4294836224 196606 4294836225 4294967294 0 4294901762 4294967294 4294901760 65537 4294967295 65535 1 131069 65535 2 65535 4294836226 196606 1 131070 1 4294967295 0 131072 131071 65537 4294901762 131073 4294901758 2 1 0 4294901758 131073 4294836225 4294836225 4294967294 65535 65535 1 65534 4294901759 65536 65538 65535 2 4294967293 131075 65534 +10528000193 28362909357 47577133750 8466055850 5842710800 2350911449 2989811430 2646718839 515262774 276394561 469831522 55815334 28232446 11591835 40329249 67658028 183446654 323189165 117473797 41339272 25846050 12428673 18670978 22521722 78477733 54207503 25150296 43098592 28211625 15736687 20990296 17907031 +65471 6684705 4260102106 76938490 4268295814 1032257 266153 793728 2186098 2907560 1333513 801601 353097 168781 87613 18477 71636 172001 75701 9760 2650 845 1490 1156 7220 5986 1274 0 338 685 612 346 289 689 1109 117 1453 2056 100 97 1305 7345 10313 4993 2482 6565 5794 1296 450 821 1780 2273 1013 85 370 461 305 113 234 226 37 113 904 2228 2368 808 58 18 481 1685 778 317 745 1000 1040 1445 697 82 29 98 208 298 229 466 338 689 801 557 401 122 104 2 356 657 193 145 400 392 701 841 333 13 157 148 122 13 17 144 725 1000 493 169 145 100 4 170 221 125 116 125 137 85 196613 2 4294770686 65536 4294967294 1 0 65535 65535 65536 0 4294901760 65536 4294901760 65535 4294836224 131071 0 0 65536 1 131071 1 4294901761 4294836224 65535 65535 0 4294901760 4294901760 65535 0 65535 4294967295 0 65535 65537 1 131071 0 4294967295 0 0 4294967295 65534 0 65536 131071 65536 65538 65535 0 0 4294901760 4294967294 65537 1 65535 0 0 4294967295 0 4294901760 65535 0 1 4294901760 65535 65535 1 4294967295 4294967295 65535 4294967295 65535 65536 0 0 65535 0 0 4294901761 4294901760 4294901760 4294967294 0 4294901760 0 131071 65535 4294967295 65537 65534 131074 4294901760 4294967295 4294901761 65534 65538 4294967295 131071 4294901761 4294967295 65535 4294901760 1 0 4294770690 4294967295 65536 4294967295 131071 65537 4294836225 196606 4294901761 4294901759 65536 4294901758 65535 65534 4294967295 0 4294967295 1 65537 4294836224 65533 1 65535 65536 65537 4294901761 0 65535 +3689126303 5693478408 14510220226 11218983977 3321541259 990060296 505154293 874521146 125327571 13113319 48126300 14248109 5581766 5645895 10369126 11242595 68352305 71801379 30425732 18539146 5256846 5242451 21773148 12653302 17721172 7015540 9536686 7889633 9035362 7238258 9009320 5155487 +52 4289855507 4276158615 14023784 40764068 266788 235300 102800 374674 2101669 3055450 830497 152818 272045 251549 99873 79700 125320 57812 3825 6058 19265 15377 5018 2701 1130 1825 1445 325 445 1154 505 169 64 673 2529 1381 232 1553 1825 1297 457 233 452 522 1989 2601 697 3242 2269 2853 3545 901 85 2 397 405 68 820 932 538 296 17 100 164 820 884 730 1640 2626 954 17 101 122 218 389 225 16 73 221 85 37 58 64 313 601 146 65 153 13 16 29 20 36 180 1493 2125 692 169 145 153 17 81 306 200 125 328 290 274 90 25 8 34 45 181 197 58 2 4 17 34 117 4294705163 4294901753 131072 1 0 4294901759 1 4294901760 65536 65536 1 4294901760 65536 4294901760 0 0 0 65536 0 1 65536 4294901761 131071 65535 4294901761 4294967295 65535 4294901761 4294967295 65535 131071 4294901762 65534 65535 4294967295 65536 4294967295 131071 131071 4294901761 65535 0 65537 4294967295 65536 4294901760 0 1 4294901760 65536 65536 4294901760 4294901760 0 65535 4294901760 4294901760 65535 65536 4294901760 131070 1 65535 4294901759 0 1 65534 4294836226 4294967295 65535 1 65534 65537 4294901760 4294901761 65535 1 65535 65535 4294967295 65537 4294967295 4294901760 0 4294967295 4294901760 4294967293 65536 131072 1 131071 4294901760 0 65535 65537 4294836224 65535 65537 65535 4294901760 1 65535 0 4294901760 131071 4294901761 4294901760 65537 4294901761 4294901760 0 65535 0 4294836225 65536 4294901759 65535 4294967295 0 4294901760 0 4294967295 65536 4294901760 65536 4294836225 196607 4294967295 65536 65535 65535 65537 4294901760 0 0 +1348592956 1179063878 6771717768 16973128069 2815542073 1746395005 913338332 674226143 122658832 123730551 33462594 13125255 7390858 4482455 14859657 15858428 10849103 15563708 33897074 28026839 5251424 8823330 7148914 23003498 6074383 2804360 4174611 2222401 16262269 6387177 4798964 2180319 +65525 9961491 4276486430 4280548690 55771542 620173 157540 7850 82196 797770 1598290 1037546 430916 264133 70445 6714 51850 83092 53777 15937 1873 21881 42986 20905 5809 2493 2740 1801 3285 4385 1825 325 116 193 464 2218 2746 785 425 1098 1028 1521 2320 4148 6730 7312 3170 2 2308 1570 202 1625 1049 260 185 565 544 117 298 578 306 442 890 650 80 269 520 130 400 685 128 20 325 842 1145 865 610 1637 2314 1274 410 185 40 5 197 625 965 585 137 218 373 441 445 490 212 233 1658 1717 613 200 9 58 145 538 482 225 373 485 122 29 101 85 144 245 169 16 10 41 144 97 9 65 4294901771 4294639609 262146 4294770688 0 65535 1 4294901760 0 65536 4294901760 65537 65536 4294901761 65535 0 4294901760 65535 131071 1 4294836224 131071 4294901760 0 65537 4294967295 0 4294901760 4294967295 65535 4294901760 0 4294901761 0 0 1 4294901760 65536 4294901761 131071 4294901760 4294901760 65535 65535 65535 65536 4294901759 0 65535 4294967295 65535 65536 1 65536 4294901760 65535 0 0 4294901760 65535 4294901760 4294967295 131071 0 0 131072 0 4294967295 65537 4294967295 1 0 4294901761 0 65536 4294901760 1 4294901760 65534 0 65535 0 4294901760 4294901760 4294836224 0 65534 65536 65536 4294901760 65535 0 1 65536 4294901760 0 131072 4294836224 131072 4294836225 4294901758 65535 2 4294836224 4294967295 4294901760 196607 1 4294901760 4294901761 4294967294 131072 4294901760 131071 4294901760 65537 4294901759 131073 4294901759 65536 65535 0 4294901761 131071 1 0 1 4294967294 65535 65535 1 0 65536 1 65535 +2210794846 386482292 2404509378 9466226320 4263081872 1249634934 315809414 505244943 145503189 257422228 98304260 25161953 31785652 4789663 16892084 13772237 24930132 69721305 27585705 14155631 6507418 7605749 7675786 6396648 13642936 24142622 7618359 9672939 16433302 9900030 6689766 2896092 +65465 4287431017 4286905834 1639198 4250401459 2769938 1207840 473345 1851337 1381392 4177 577040 379186 201188 151552 333216 577274 192069 63013 93061 75701 374816 673108 304628 44530 19885 11645 1346 650 8905 17450 9738 6242 4825 3125 904 7517 15908 10580 1901 697 1508 2281 3944 12025 7154 13000 1181 11489 21730 8066 545 481 2250 4496 4498 9469 16993 10985 1585 89 580 349 3001 10568 11920 7460 2225 256 5365 5321 2925 5018 7380 1853 4210 3041 450 2009 3860 4369 3274 2977 1802 130 52 745 3730 1629 85 313 689 356 625 82 2353 865 977 1629 450 2777 4097 2610 362 68 178 2402 5305 3425 712 122 1114 586 360 1250 740 232 58 232 1373 1480 305 983039 4294508551 65534 4294901757 131072 131071 4294901762 65534 65537 1 4294770688 65533 262142 65537 4294901761 65535 1 65535 4294901760 0 4294901761 131071 4294836224 65536 4294967295 196608 4294770688 262143 4294901761 131072 2 4294770687 131073 4294967293 3 4294967295 4294967295 0 4294901761 131072 4294770688 196607 4294901760 0 65536 4294901760 131071 65538 4294901758 1 4294901761 4294901758 65535 65536 0 4294901758 131070 327681 4294770688 0 2 4294901760 4294836222 65534 65536 1 4294967295 4294901761 65533 65536 65535 0 131070 4294901760 131072 65537 4294901760 131070 2 4294901760 131071 4294901760 1 65535 196607 4294836227 4294901759 196607 0 4294967295 131071 65538 1 4294836224 4294901762 4294901758 65534 65535 131072 4294836225 4294967295 0 4294967295 131070 0 2 4294901762 4294967294 0 4294901758 131073 0 4294836227 65535 65536 131071 4294836227 4294901760 4294967295 4294967294 131073 4294705153 4294967292 131074 4294901759 4294967294 65534 131074 4294967295 131070 3 0 196607 4294836225 65535 +11070760676 5775306959 9366331081 3135789885 2922599572 1585815270 3060009695 1583948185 1107647025 4112297679 1229075584 76065393 96941802 90410526 54803188 108849892 30251308 109369301 151762458 72371138 121687651 75986837 105014058 81852362 86673720 59088946 41419995 21865918 24559283 47824461 49504106 19270102 +35 4287758052 20578352 33292613 4241490071 30308 63850 515125 734868 347101 2897396 2665330 1009661 436948 260650 190408 652936 972010 488513 193924 433337 601681 479905 193554 970 59930 44585 16945 26100 46657 43074 14690 1732 1160 8489 8077 1525 5057 5540 13050 21397 28900 26370 10154 2813 7569 20465 31545 18450 1885 6305 11156 6725 565 1697 3240 6610 13753 14257 10501 5553 1737 2522 6793 1417 306 754 6626 9410 11965 11617 2920 584 1312 3746 12505 17284 6154 2080 6408 7321 1889 929 1321 477 1129 6849 10330 5650 2920 3589 3205 349 1658 4177 1930 325 2941 6245 5345 2333 1530 1274 3041 1658 425 4474 9413 8633 2890 153 1346 400 160 2081 2353 509 841 328 2493 3373 986 4294836230 393214 4294836231 65533 4294836224 131072 1 65536 65536 2 65535 65538 65536 4294901761 0 4294836225 131072 4294770690 4294901758 0 4294967294 65539 4294705152 4294967293 131071 4294967295 0 4294836224 65533 131074 4294901757 4294967294 65535 262143 65535 65535 4294836226 196605 65539 4294836223 262142 4294836226 4294901761 4294901758 196606 0 0 4294901761 4294901757 1 4294967295 131071 131072 2 196606 4294836228 4294901756 262145 4294901760 1 4294836223 262143 4294901759 131073 4294770688 196607 131071 4294901760 65535 131072 65535 131075 4294967295 4294901761 65535 65533 65538 4294901760 131071 4294836226 65536 4294836224 4294901760 65535 4294836224 196604 4294836224 131072 4294901762 4294901759 65534 65538 4294967294 4294901762 4294967294 4294901763 131070 4294901761 4294901759 131072 1 131073 4294836224 2 4294967294 4294967295 1 4294967293 1 4294901760 4294967294 196607 1 131070 4294901762 1 4294901761 4294836224 65537 4294901761 4294901760 4294901762 4294705151 65534 131070 4294901758 131074 131069 3 65537 4294836222 65534 0 4294901761 65533 +241288346 2791500496 3518365639 16842694315 10321741892 2856500415 3612347426 5556148044 3354278605 4137345289 847544054 347669177 393556856 118965162 66723550 97275871 288151003 169933912 243391109 93516383 100978467 138474361 52860527 135271105 117012607 138748503 66067470 99403751 64978722 71727089 104829244 37967637 +65437 8126515 4280680639 5111038 31458539 2071402 868405 35554 314857 1184717 1427540 396164 392642 452297 150772 64208 19700 107300 101264 3194 1649 14885 23642 31400 4297 9680 17533 13465 8945 1517 977 2440 2250 970 234 689 1289 1745 2897 4666 2141 2468 2425 610 325 698 970 1378 2105 200 2209 1825 1405 740 373 98 80 425 296 725 1865 2501 580 802 2405 2210 689 37 18 148 1385 2960 2645 1754 1274 1129 2548 2909 1556 477 90 970 449 218 53 452 530 1066 1853 1721 928 269 2320 3754 1537 10 530 145 577 1129 1124 1250 493 328 873 709 445 181 370 712 218 260 373 197 520 1609 1205 313 485 233 74 74 4294967293 4294901764 4294836221 131072 0 4294901760 65535 0 65537 0 4294967295 0 65535 0 4294836224 0 65535 131071 65537 0 4294967295 0 65535 65535 4294901761 0 0 4294901760 4294967295 0 4294967295 4294901761 4294967295 65536 4294901761 4294901760 65535 1 0 0 4294901760 131071 4294901760 65536 65536 1 4294901760 65535 4294967295 4294901760 1 4294901760 0 4294836224 131071 4294967295 65536 4294901760 0 4294901762 4294901759 131071 65536 65537 4294836225 0 65535 4294967295 4294901760 0 1 4294967294 4294901760 4294901760 65534 65535 1 131071 4294901761 65534 65535 0 4294836224 131071 4294836224 4294967295 65536 4294901760 0 4294901759 131071 131071 0 65535 65536 0 65535 131071 0 65537 1 4294901761 4294836223 4294967295 0 4294901760 4294901761 4294901760 4294901760 4294836224 131071 65535 65537 65535 131071 1 4294901762 65534 0 131071 1 1 131071 0 1 4294967295 4294967295 65536 0 4294836224 65534 1 131072 4294836225 65535 +8196217789 1957025137 4105133346 8254685747 2845377226 2070848339 507309560 673252806 149784914 187081437 137602536 132462889 43177557 19527115 10046263 32317284 32735405 11963013 19920883 20458214 5512408 18694013 26232132 16410094 38688435 28834970 10315897 31713278 27728808 20097256 11833634 17320225 +32 4283432963 12910665 4258005120 24509287 1611730 655645 778450 195137 260413 439618 1045188 281546 54980 53170 61648 177970 227890 182789 81161 24026 10685 55849 125320 93845 25337 2237 4456 4084 761 49 149 125 185 349 800 1297 1444 1565 3725 3016 877 4229 9512 10309 3965 1233 4736 8072 5994 3757 3700 3770 2522 1233 1325 1445 421 712 1993 3497 4736 3989 1352 2930 6273 3328 650 274 389 2425 3141 2105 3712 4432 3258 1249 13 193 36 925 2125 1810 725 34 289 1681 2842 685 530 1405 890 481 625 3573 5114 3418 538 1597 1973 1664 2081 1025 317 193 100 80 1 18 305 845 680 221 292 234 386 205 436 410 180 90 25 4294705154 196606 65536 0 1 65535 0 4294901760 131072 65535 0 0 4294836225 65534 131071 4294901761 65535 4294836224 65534 4294836224 4294901760 4294967295 131071 131071 2 4294967295 0 4294967295 4294901761 131070 131073 4294901761 4294967295 0 65535 65536 0 4294901760 65535 0 4294901760 0 4294901760 0 4294901760 4294901759 65536 65535 4294901760 4294901760 0 4294901760 4294901760 4294967295 131071 4294901760 65535 4294901760 0 4294967294 65537 4294901760 4294967295 131071 4294836225 4294901759 65535 4294901761 1 65535 0 0 4294836224 65535 65535 0 0 4294901760 65535 65536 4294901761 4294967295 4294901760 65535 4294967295 65535 0 65535 65535 1 65534 65537 0 4294836225 4294967295 65535 1 4294901760 65536 65535 4294901760 65534 4294901760 4294967295 65536 4294901760 4294901761 4294901760 65535 0 4294901760 65535 65535 4294967295 65537 4294901760 65534 131073 4294901761 0 196606 4294901761 4294901760 65537 4294836225 4294967295 4294836224 131071 65535 4294967295 65536 1 65535 4294901760 65534 +6329789845 4222935529 1589637646 3988540345 3508973056 550883596 965366228 1570610251 548279819 497984197 744534210 69130211 16423215 1749836 8651550 25229284 47071422 89832514 75645986 55604185 22717076 42775713 60916084 33057526 58017351 21099429 23609223 26573470 57712926 32231803 7116200 9828388 +0 4980755 3473286 4268490994 60686310 2383009 3251060 2230570 706417 518482 1023401 1190153 618821 169177 31450 62050 102877 111281 76052 54898 45985 32689 39065 22194 2833 1394 765 1796 1972 765 90 100 356 461 117 72 292 73 601 3146 8500 25250 47281 83330 112745 82637 45949 29844 27073 23202 15730 7837 4097 1492 925 557 58 8 136 173 98 169 73 361 765 153 58 26 377 626 1145 730 5 244 625 626 49 17 106 197 73 17 49 50 52 37 113 441 208 20 74 145 325 338 181 200 234 34 130 257 89 20 0 2 1 32 52 34 4 0 17 97 73 25 0 26 64 53 5 1 1 9 4294770691 131070 4294901762 0 0 4294901761 0 0 0 0 0 0 0 0 0 0 0 4294901760 0 4294901760 0 0 4294901761 0 0 0 0 4294901760 0 65535 0 0 0 4294901760 0 65536 4294967295 4294967295 0 4294967295 0 0 0 4294901760 131071 4294967295 65535 0 0 0 4294901760 0 0 0 0 4294901760 1 4294967295 65535 4294967295 65535 0 0 0 4294901760 65535 65535 4294901760 0 1 4294901760 0 4294901760 4294901760 65535 4294901760 4294901761 65535 65535 4294967295 0 65537 4294901760 1 4294967295 0 4294967295 0 65535 1 65535 0 65535 0 0 0 4294901760 4294967295 4294901760 4294967295 131071 4294901760 1 0 65535 1 4294901760 0 65535 65535 65535 4294967295 65536 0 0 4294967295 0 4294901760 0 4294967295 4294901760 0 0 4294967295 4294901760 0 65535 0 4294901760 0 0 1 65536 0 0 +14773238038 14617819759 4404399640 6987944791 5163371075 1020149550 624962766 762177842 446908030 313892902 83094210 14168330 9434170 2986072 2245143 19789669 404318747 1101402493 465285483 130434468 10237462 2199612 5350454 8421955 8590531 3057048 1937279 4054659 4873241 1751857 752879 960938 +65530 4292411379 524295 27525227 4198760658 5249021 3565729 1672541 830210 1159549 3826409 2298218 623225 590372 532097 572420 817786 550450 176245 70405 85357 36170 48068 103898 44777 4194 1394 7432 3764 136 80 153 170 442 425 394 2245 3785 3293 3380 4930 90116 235850 234769 134056 61840 22324 9412 28645 26129 14130 9506 3400 1233 1261 1096 964 369 29 205 400 234 52 52 178 130 164 841 1780 3754 5069 2969 1234 810 845 730 356 85 29 5 9 10 1 18 90 200 226 200 81 9 26 26 145 53 8 1 25 10 49 125 52 25 26 8 5 1 32 20 4 65 265 629 680 578 442 178 5 9 8 5 4 1 2 0 65535 0 0 0 0 131071 0 0 0 0 0 0 0 0 0 0 0 65535 65535 0 65535 4294901760 0 65535 0 0 4294901760 0 4294901760 0 0 65535 4294901760 4294901760 65535 0 0 0 65535 0 4294967295 65536 0 4294901760 65535 0 131071 0 0 4294901760 4294967295 65535 4294967295 0 4294901760 0 65535 4294901760 4294901760 4294901760 0 65535 4294901760 65535 0 0 0 131071 0 4294901760 0 65535 65535 0 0 0 65535 0 0 65535 65535 4294901760 1 0 0 1 65535 0 4294901760 0 0 0 0 65535 65535 65535 0 65535 4294901760 4294967295 0 4294901760 65536 4294901760 4294901761 65535 0 0 1 131071 0 4294901760 4294967295 0 4294967295 0 0 4294901760 0 0 0 0 0 65536 65535 0 0 0 0 65535 65535 65535 0 +24009075119 13217887400 6396689913 21190770700 8294492884 4251870864 5216966340 3338427807 739078305 548040252 444139732 42335812 15593099 2661234 12177824 41844984 1378487966 1734679305 340220206 134774160 16109355 4278460 3642201 42200780 29648878 3170056 1839418 2082559 955372 928552 3900984 7950512 +1 983039 131070 4278255443 58720143 1213985 295172 38777 1278485 4523072 3525197 372730 543545 2819385 4674130 2417220 334112 144036 172625 187456 47185 1769 49121 106705 68850 10946 289 5237 5162 949 17 13 5 53 117 293 2509 11357 13978 7045 20025 42128 16705 538 34 725 1745 4276 3152 1256 605 541 274 256 641 689 725 490 180 13 13 4 45 82 130 80 113 250 288 605 1224 785 314 146 164 226 130 26 5 13 13 17 41 34 49 9 68 73 130 117 5 169 362 281 90 1 52 80 37 1 58 72 72 65 9 1 1 5 25 221 450 109 106 612 433 101 18 10 4 5 0 0 0 0 0 1 0 0 0 0 0 1 0 0 0 0 0 0 0 0 0 65536 4294901761 0 0 65535 4294901760 65535 65535 4294901760 0 0 65535 65536 4294901760 1 65535 0 65535 4294901760 65536 0 65536 65534 4294901761 4294901760 65535 65536 65535 65536 4294967295 0 4294967295 0 4294901760 0 4294967295 4294967295 65535 65535 0 65536 4294901760 4294967295 0 0 0 65535 65535 65535 131071 4294901760 4294967295 4294901760 4294901760 4294901761 4294901760 4294901760 65535 65535 0 1 1 65535 0 0 0 4294967295 0 0 4294901761 4294967295 0 0 4294901760 65535 65535 65536 0 0 0 0 65535 0 0 0 0 0 65536 4294901760 4294901760 0 65535 65535 4294901760 4294901761 65535 0 131071 0 0 0 65535 0 4294901760 0 4294901760 131071 0 65535 4294901760 4294901760 0 4294901760 4294901760 65536 1 +4296266726 1990800203 15741493481 21776392145 6161477790 25100571160 13523866196 1562085004 926458893 431900830 562589786 44640973 20067177 318044 17722393 130937341 288588039 41745443 34804807 9009351 8845760 2076364 1712301 9103885 7045572 1075962 865130 2855220 2643046 1189154 3098130 5556710 +65534 4294770691 196606 6881410 4266459046 185569 23725 33057 842725 2311625 1343773 132201 440960 2155309 2334953 434645 2837 53128 92068 74048 9074 3869 18369 31585 11498 272 1189 3665 3205 865 52 4 10 13 58 733 2837 3332 1396 193 1490 7754 6253 1021 130 221 277 85 65 68 45 1 10 49 122 128 17 26 17 4 13 18 1 2 37 34 40 65 200 145 5 16 29 29 25 82 41 8 1 2 13 25 18 16 58 37 113 221 100 0 29 89 234 125 13 80 90 170 90 34 61 68 20 5 13 10 1 9 41 122 269 221 25 90 100 37 4 1 1 1 5 1 65535 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 4294901760 65535 4294901760 4294967295 0 65535 4294901760 65535 0 65535 0 0 65535 0 65535 4294901761 4294901760 0 65537 65535 4294901760 65536 0 4294967295 65535 0 4294901760 0 0 65535 0 4294901760 4294967295 4294901760 0 0 65535 0 65535 1 0 0 4294901760 65535 65535 0 4294901760 0 0 0 0 0 4294901760 4294901760 0 0 65535 0 4294901760 4294836224 4294901760 65535 65535 4294901760 0 0 0 0 0 0 0 4294901760 0 4294967295 0 0 0 0 0 1 65535 4294967295 0 65535 0 65535 65535 0 4294967295 0 0 4294901759 0 4294967295 65535 65535 4294901760 65535 4294901760 4294901760 4294901760 0 4294901760 4294967295 65535 0 1 4294901760 4294901760 0 4294901760 0 4294967295 0 4294901760 0 4294967295 0 +605954803 1045895109 8596982149 9183189032 4439201333 14092612619 3806964669 523005046 366555176 145508503 126483818 22129461 13971339 207660 12416315 23292080 54024445 14728020 1572314 595195 1052889 212395 485132 1666417 711176 390875 1135514 2178611 2380866 1231538 1921926 1900340 +0 4294836221 524298 4290969445 26214653 340488 84674 32002 1135773 3935161 2480605 302090 563092 3357001 3817234 928178 25010 6730 33300 39373 12610 592 2960 16769 11545 1125 200 1225 1082 125 85 100 74 73 90 221 1448 5085 4594 4552 4426 3285 6068 3161 580 242 194 305 340 197 53 2 26 64 34 29 37 9 5 20 45 13 1 0 4 49 85 45 29 2 50 61 29 8 5 16 18 1 0 5 36 41 18 5 45 16 68 225 17 101 234 245 289 520 538 298 185 144 169 34 2 10 5 25 17 0 13 9 4 4 26 37 340 298 80 85 40 1 2 2 2 0 65536 0 0 0 0 0 0 0 0 0 0 0 0 0 1 4294967295 0 0 0 0 4294967295 4294901760 65535 0 65535 0 0 4294967295 0 0 4294901760 0 0 0 0 4294901760 4294901760 0 0 0 1 4294901760 131071 4294901760 65535 65536 1 4294967295 131071 0 0 0 65536 0 4294967295 0 0 0 4294901760 0 0 4294967295 0 4294901760 4294967295 65535 1 131071 4294967295 4294967295 65535 0 0 4294901760 4294967294 4294967295 4294901760 1 0 0 65535 4294901760 4294901760 65535 4294967295 65535 0 65535 0 4294901760 4294967295 4294967295 131071 65535 65536 0 4294967295 0 0 0 0 4294901760 0 131071 4294901760 4294901760 0 0 0 4294901760 4294967295 65536 4294967295 0 0 4294901760 4294967295 65535 0 65535 4294901760 65535 4294901760 0 0 4294901760 65535 65535 4294967295 4294967295 4294901760 65535 0 0 0 +1209456858 1453754383 13727565604 16544725636 6709475864 22660985989 6904878652 189964515 200209624 56130992 87725944 8378007 4353578 948273 9223142 51480079 56796182 21173742 3528563 837291 489459 322404 435002 748647 434285 281370 910977 4202033 7081305 1029846 1047050 2781397 +1 262149 65545 4293394568 4275568348 288565 100773 109072 2184797 5187745 3213125 664514 536354 1805284 1833941 447589 9137 458 193 6125 10660 909 2729 7589 6445 3133 725 82 221 292 257 100 125 362 541 850 2405 3940 3748 1490 493 3573 6770 5725 2194 405 85 50 2 34 109 205 221 52 2 2 1 0 45 34 5 5 1 2 1 10 10 4 5 9 13 40 26 2 8 18 0 0 10 10 16 10 5 4 53 74 85 82 50 173 289 117 1 18 298 493 221 82 26 10 29 10 1 9 4 8 10 10 10 18 100 225 401 296 89 20 20 101 149 80 64 13 2 4294901760 65535 0 0 0 0 0 0 0 0 0 0 0 4294901760 4294901760 4294967295 0 4294901760 1 4294967295 0 65535 0 0 65535 4294901760 4294901760 65535 4294901760 0 65535 0 4294901760 65535 4294901760 0 4294967295 65535 0 65536 1 0 4294901760 65535 0 0 4294901760 65536 0 65535 0 65535 4294901761 4294901760 0 131071 65535 0 0 0 65535 0 0 4294967295 65535 4294967295 65535 0 1 65535 65535 0 0 0 65535 0 0 4294901760 65535 0 65535 4294901760 65536 65536 4294967295 4294901760 0 0 65535 4294967295 0 0 4294901760 65535 0 0 4294901760 0 65535 65535 1 65535 131071 4294901760 4294901760 0 0 4294967295 0 4294901760 0 4294967295 4294901760 65536 65535 4294967295 1 0 0 0 0 0 131071 0 1 0 0 65535 131071 0 4294901760 0 65536 1 +1093863859 2865888829 20214480262 22044372580 5604611882 11607364828 3311838971 19068201 54530851 36742630 52345102 7983694 2556635 2499466 14565655 35160346 46501016 35320368 1086201 1970274 319757 282071 78717 220565 283731 175340 823574 2847149 4486969 634424 1734847 3915481 +65530 4294770668 4294639614 5439290 16056767 260109 74804 1085725 3639241 3563489 1540997 978410 802469 731898 134100 211133 235874 79048 71266 70225 14848 6565 20488 10970 2500 1261 288 2925 3085 1665 729 136 625 1741 1124 169 149 290 466 1313 2493 4157 4810 1546 3541 2785 221 0 45 50 100 338 377 149 25 8 13 50 58 34 61 29 4 4 10 1 20 26 40 106 109 73 29 16 72 85 50 65 58 65 52 17 106 121 74 17 29 89 100 37 61 232 100 45 250 293 153 50 25 1 52 125 113 40 13 10 1 1 0 4 20 18 10 53 4 1 34 50 25 18 41 18 4294967294 1 0 65535 0 0 0 0 0 0 0 0 65536 0 4294901760 0 65535 4294967295 0 0 0 65535 0 65535 0 0 0 0 0 0 4294967295 0 4294901760 0 4294901761 0 0 0 65535 0 4294901760 65535 4294967295 0 0 65535 1 0 65535 4294901760 4294967295 0 4294967295 4294901760 65535 4294901760 1 0 4294967295 65536 4294836224 65535 0 0 4294901761 65535 4294901760 1 65535 0 0 0 1 0 65535 0 4294967295 4294967295 4294901760 0 0 4294901760 4294901760 4294901760 65535 0 0 0 4294967295 4294967294 0 65535 4294967295 65535 4294901760 0 0 0 65536 0 4294901760 0 65535 0 1 0 0 0 65535 4294901760 4294967295 4294901760 0 0 65537 4294901760 1 4294901760 0 0 65535 65536 4294967295 4294901760 4294967295 4294901760 0 4294901760 4294901760 0 65535 65535 4294967295 0 0 +947952750 7969309015 20746730841 13366811972 5981303880 3213009662 1562281490 816638907 353432806 126281909 47348624 18231457 18266153 9009828 7898301 8834641 42185198 34482319 3724320 3275706 846070 676290 223381 1076580 1155901 1289872 1523173 2054241 3496973 1520634 337294 647382 +65526 1179669 4291625004 4286709792 4291034598 589225 170881 168205 640225 1082308 548041 157357 553169 1971217 2091205 534025 15688 2053 26122 88592 37253 1553 34973 48098 18180 1205 5861 2837 5441 6280 1125 872 3145 4717 1808 634 2425 2980 1332 1145 769 145 1445 3877 9445 10324 3005 585 565 185 153 305 58 58 97 32 50 80 52 2 61 104 25 2 29 5 58 149 61 85 37 8 10 18 1 16 58 125 145 149 137 100 65 256 218 80 4 16 58 113 68 208 544 265 37 9 26 17 121 116 8 25 72 45 65 41 25 36 36 26 10 17 269 610 484 296 109 73 85 121 61 25 131069 65538 0 0 0 0 0 0 0 0 0 0 0 1 0 65535 0 4294901760 65535 4294901760 0 0 4294901760 4294901760 0 4294901760 4294901760 0 65535 65535 65536 65535 0 65535 0 65535 0 0 4294901760 0 4294901760 0 1 4294967295 65537 4294967295 131071 4294901761 0 65535 0 1 65535 65535 4294901760 0 4294901760 65535 0 65535 4294901760 65535 65535 4294901761 131071 65535 4294901760 4294901760 4294901760 4294967295 0 65535 0 0 4294901760 0 65535 4294901761 4294901760 4294967295 4294901760 0 65535 4294901760 1 4294901760 65534 65536 0 0 0 0 4294967295 4294901760 0 4294901760 65536 4294901760 65535 4294901760 65535 0 4294901760 65535 0 4294901760 0 0 0 4294901760 4294967295 0 4294901760 0 0 4294901760 65535 65535 0 0 0 4294901760 0 65536 0 1 0 0 4294901760 65535 4294901760 65536 4294967295 65535 0 +2150785063 1588949617 4827036733 4138871917 4668325703 12969199872 3868202669 192520894 424089128 255936015 203969230 40711251 43907143 29075112 25164731 22623862 17119766 87612575 21113733 2437026 954286 843476 687519 1225596 491323 2335704 2809254 3100201 2966911 1373946 1803698 6735683 +10 4293197802 3735555 4259675 20185687 810737 321040 16570 28537 231041 274465 449800 2583821 7015378 5305802 799762 52520 35285 230789 301977 92410 4177 2218 47252 34624 226 26849 29765 13753 4381 450 1373 3293 2180 449 1378 8593 6817 1225 6290 10984 9344 4765 6272 20105 21024 1872 1189 1864 340 629 1073 89 269 185 25 10 85 116 101 212 104 85 298 337 290 1226 1424 485 68 0 4 18 50 89 117 82 397 466 244 52 10 208 745 208 80 196 242 481 578 1440 1352 26 977 1130 865 1762 2545 1192 89 386 333 801 720 250 466 565 505 170 485 1021 653 205 1321 800 178 340 116 50 58 32 5 262140 65538 0 0 0 65535 0 0 0 0 0 0 65536 65535 0 0 0 4294901760 0 1 131071 4294836224 65535 65536 0 4294967295 4294901760 65535 4294901760 0 0 65535 4294901760 65536 4294901760 0 0 65535 65536 65535 4294901760 4294967295 1 0 4294967295 0 65535 65535 4294901760 1 65535 0 0 4294901760 4294967295 4294901761 65535 0 4294901761 0 0 4294901760 65535 0 0 65535 65535 4294901761 0 65535 0 131071 0 65535 0 0 0 65536 65535 65535 0 4294901760 4294901760 4294967295 0 1 65535 4294901760 0 4294901760 65535 4294901760 4294901760 65535 0 4294901760 65536 4294901761 4294901760 4294901760 4294836224 65535 4294901760 4294967295 4294967295 131071 4294901760 65535 0 65536 65536 65535 4294967295 65536 0 4294901760 4294901760 0 0 0 0 0 1 65535 4294967295 4294967295 0 4294901761 65535 0 65535 65535 4294901760 4294967295 0 +3163230914 644216959 703830900 2242195256 18498963362 38940581179 8114251229 1166614441 1493394923 193398575 267758980 210671975 75687386 22816163 36888732 69359382 107543927 173260053 37244740 7834191 1550610 2189149 7825768 9354076 1405989 5027645 5830452 15548178 28564770 17439516 14443692 13628940 +65510 3866651 4292542589 4278517810 4281007599 1018420 864117 1897405 4133725 3018482 1078621 318925 2692994 8300045 5663156 524173 99850 225578 370226 418081 167540 28989 15746 45821 86021 47545 4477 32017 19477 2533 1832 5524 1576 784 89 4885 24077 23780 2405 2465 1730 452 1124 6682 22985 79330 134017 71540 13289 1780 1341 6660 6525 1010 185 977 442 34 637 689 68 1570 1460 1394 1666 593 3281 2740 1082 740 1105 625 104 338 1160 2225 970 116 180 32 410 712 82 2404 3218 794 914 1525 1466 457 244 1972 4113 5986 6890 6805 9092 5265 53 1460 2097 2605 3796 5429 3089 100 898 2026 605 100 1181 3125 2669 882 1220 794 529 685 313 122 37 49 196601 65539 196606 2 0 0 0 0 0 4294901760 65535 1 4294901760 0 65535 1 4294967295 4294967295 65535 0 65535 0 65535 4294901760 65535 0 65535 0 4294967294 65535 0 1 4294901760 65535 65537 65535 0 4294901761 4294967295 4294901760 0 65535 65536 1 1 0 1 65535 0 4294967295 65535 65535 4294901760 4294901760 1 4294901760 0 65536 0 0 4294901760 65537 0 0 4294901760 0 0 0 4294967295 0 4294901760 4294967295 0 0 0 65535 4294901760 65535 0 4294901760 0 65535 4294901761 65535 4294836224 0 0 65535 1 65535 65536 4294901760 65534 4294901760 65535 65535 4294901760 4294967295 1 65535 4294901760 4294901760 65535 65535 65535 0 0 4294901760 0 0 0 0 4294901760 0 4294967295 65535 65536 4294967295 65535 0 4294901760 4294967295 65535 0 0 4294967295 65535 65535 131071 4294967295 0 0 4294901760 65535 2 +5067109681 12859186909 21136876749 9521989451 20115499549 43570635842 7904527964 2368341719 2283411614 380756767 575051532 229801984 92521323 26417836 96623487 138019913 30346884 679651136 654787263 60210513 12767069 12181393 28904429 26887267 16411142 11205753 30774139 40684513 127664601 74532854 42521063 30672949 +65535 4292476938 5963711 4288479289 18612194 158506 460666 3534613 7194773 4455001 392789 144448 697801 3399149 1889280 37025 83729 6938 1440 49009 49396 7785 3988 6850 24580 7569 24677 40882 37845 16069 2020 2521 5597 3922 1450 2386 11540 17728 8905 3217 2581 4388 6649 8005 4625 4381 104897 141538 35801 7124 10420 7565 442 90 1609 2410 424 925 2466 1640 949 4500 6922 3485 650 81 554 313 185 905 1781 421 29 89 61 394 1544 1313 730 1125 1637 1234 545 985 661 1069 2138 2290 2708 2005 1300 1066 765 1082 458 801 3146 962 936 541 100 65 346 485 25 221 26 484 562 328 490 388 145 256 260 136 153 130 65 121 212 100 65540 131069 65539 4294901761 131071 1 0 65536 4294901761 0 1 4294901760 65535 1 0 0 4294901760 65535 0 65535 4294967295 0 0 0 4294901759 0 4294901760 65534 0 0 0 0 1 0 0 4294901760 65535 0 65536 1 65535 1 65535 65536 4294901760 4294836224 0 65536 0 4294967295 4294901761 4294967295 65535 65535 65536 0 0 0 65535 4294901760 0 131071 4294901761 65537 4294901759 65536 4294901761 4294967295 0 4294901761 65536 0 0 1 65535 65535 65535 0 0 4294836224 4294901760 4294901761 4294901760 4294967295 65535 4294901760 0 1 4294901759 65535 4294901760 131071 65535 4294901761 0 0 0 0 0 4294901760 65535 4294901760 0 65536 0 4294901760 0 65535 4294967295 131071 4294967295 0 4294967295 0 0 0 4294967295 65536 4294901760 65536 65534 0 0 0 4294901760 1 0 4294967295 65537 65535 65535 4294901761 65536 0 65535 +1562655190 21446677639 34731971521 8654492932 6858486424 16109817961 2287003827 175389713 321662840 95315609 154988208 295608443 192992610 42215645 64078006 121609512 64579822 384850176 864670893 80885618 21645156 42949254 40877181 11540781 9656180 22741349 26694933 39576780 27645449 11218720 9050111 6099228 +65497 3932208 2162476 4287889963 2030900 173970 280053 959969 375194 652816 2309282 270353 1369450 3560996 1981850 618052 143194 11402 25225 19801 14170 88145 137482 84437 59978 38498 14450 11745 25610 17505 3816 586 145 530 1165 1021 2410 10 450 6682 20610 9749 3893 6065 14081 153540 509800 300373 50522 21008 2465 20180 23413 1625 11225 18010 1684 8212 1640 3601 1252 8146 1145 1818 445 1060 2788 4360 1637 1640 1300 2465 4100 746 416 1377 225 9 425 424 45 146 653 169 148 2026 2225 232 153 820 68 2005 2081 1544 916 4205 3649 986 377 1234 1949 1145 1490 1421 629 1737 872 178 196 306 97 360 90 522 356 185 98 13 1 32 100 170 9 4294836222 65536 65536 0 0 65536 4294901760 0 4294901761 65535 0 4294836225 131070 4294901760 0 0 0 0 131071 4294901760 65535 0 0 65535 0 65536 4294901760 65536 4294901760 4294901760 65535 65536 65536 4294901759 0 4294967295 4294901760 4294967295 131071 65537 4294967295 0 4294901760 4294967295 65535 4294901760 65536 65535 0 1 1 65535 0 4294901760 131071 65536 0 4294901760 4294967295 0 4294967295 65536 1 65535 0 65536 4294901760 0 4294967295 0 4294967295 0 4294901761 4294901760 0 4294967295 65535 65536 1 131071 0 65535 0 1 0 0 65536 4294967295 65535 0 4294901760 65535 65536 0 131071 4294967295 1 0 4294836224 65535 65535 4294901760 4294967295 4294901760 0 65536 4294901760 0 65535 65536 0 0 4294901760 65535 4294901760 0 1 4294967295 0 65536 4294901761 0 4294967295 0 4294967295 4294967295 0 1 0 4294901760 4294901760 4294967294 4294901760 1 +1179864909 4438484312 3337492275 10773972694 9690901207 17806062416 4403479604 300948683 254174295 918139968 599934592 193487700 149701042 9859033 13825897 44957753 134314275 1809100517 2473691731 232577496 145384776 61370824 38002572 43365701 30284484 7384129 14936535 23393597 50137944 34535972 15072053 5795591 +6 4293394423 1703881 9306231 4274454588 97585 38314 42314 162937 248656 137869 12069 74740 178757 100090 61760 71845 29305 1856 2597 17828 27274 5945 2885 8209 8593 4964 1360 122 1384 881 490 117 37 1021 5185 8564 7538 8161 12778 16922 25961 48362 49877 24961 18946 108986 167389 90385 36058 102826 317993 458225 335810 184477 323305 584245 605162 389133 164005 40900 9805 16400 11482 1145 1768 5364 3033 1237 1010 2081 4329 3681 1940 1098 1125 656 100 25 101 41 5 193 229 80 164 101 17 116 305 634 845 1042 985 484 226 656 629 349 113 729 650 1105 1525 421 458 4360 6148 3200 505 401 2045 3029 1261 25 225 244 218 173 109 61 53 4 65535 2 65536 0 0 0 0 1 0 0 1 0 4294901760 0 65535 0 0 4294967295 4294901761 65535 4294901760 65536 4294967295 4294901760 65535 0 65535 0 65535 131071 0 65536 0 65535 0 0 65535 4294901760 0 0 65535 65535 0 0 65535 0 65536 0 65535 4294901760 0 4294901760 4294967295 65535 0 4294967295 65536 0 1 4294967295 0 0 4294901760 0 4294901761 65535 65535 4294901760 0 65535 4294901760 65535 65534 0 0 4294901760 0 1 131071 1 0 4294901760 131071 4294901761 0 0 1 4294901760 65535 4294901760 0 0 0 1 4294901760 65537 131071 0 4294967295 0 0 0 4294901760 4294901760 4294901760 0 4294967295 65535 65536 0 65535 65535 4294901760 0 4294967295 0 0 0 0 0 0 0 0 0 0 131071 0 0 65536 0 0 4294967294 4294967295 0 +379965892 393641315 1159491576 957166597 503832049 927595211 532474510 187713079 104200224 118561469 67707142 34565047 9777026 4133484 48683812 120798722 385450809 711914256 1534630396 4247719002 6605796171 3321302572 142909374 46979965 41371797 5428256 2691494 10025124 14492291 23564820 65698922 21418945 +65531 1245167 4292083749 4456306 5046442 11026 7514 6473 16333 37089 21825 17872 67138 252050 433298 577961 998962 930042 437986 193012 100825 47944 25250 15898 10466 5200 4082 1961 10768 18440 10280 1573 128 377 4777 20365 20666 6602 4685 6525 2980 1937 32161 79976 33289 6500 131794 286709 164834 40621 36980 94685 280205 536656 555901 797465 1073416 644338 43298 84501 144482 88514 97165 138692 117181 51058 10585 3485 1768 1105 2113 2560 3528 12985 16757 16762 10477 3650 1717 1445 776 98 261 640 485 121 53 153 221 265 457 461 617 565 1210 5722 17384 33973 43720 43300 30925 14346 9133 18625 36445 46553 42760 21458 5013 4985 10253 20228 24660 12589 1997 164 640 410 3602 4498 1517 144 393222 4294836223 4294967295 65536 0 0 0 65535 65535 0 0 0 0 0 0 0 4294967295 65535 65535 0 4294901760 4294901760 4294901760 0 4294967295 1 4294967295 0 0 131071 0 4294967295 4294901760 4294901760 65535 4294967295 65536 1 4294967295 0 4294967295 0 65534 65536 0 65535 0 0 65535 65536 4294901761 65535 4294901760 4294901760 4294967295 4294901760 4294901760 4294901760 0 0 0 4294967295 65535 4294901760 65535 4294901760 0 0 65535 65535 0 4294901760 4294901760 0 0 4294901760 4294967295 4294901760 65536 4294901760 65536 0 4294836224 0 65535 0 0 0 4294901760 0 4294901760 65535 4294967295 65535 4294901760 65535 65535 65535 131071 0 65535 4294836224 1 0 4294967295 65535 4294901760 0 65535 0 0 4294901760 0 4294901760 0 1 4294967295 4294967295 65535 65535 65535 0 1 0 0 4294967295 1 4294901760 0 0 1 4294901760 0 4294901760 1 +50489734 54067535 148798789 175813464 579095728 2635338393 5990297540 5678906428 1478307664 343315467 105382572 49482409 129459599 24600130 143176782 103624298 239459514 899481722 2054853390 3809893642 10888309912 3282179924 1561099261 146734330 191627892 92045461 8267458 8072209 341897971 726277657 579229412 198148117 +15 262187 4293525506 4283957008 20316483 168257 212098 17905 300557 815812 1603186 2853929 2773890 1852506 2574818 1652625 1651445 1280836 110692 9298 22018 25033 19585 27217 40522 78802 104864 76721 5669 23860 34801 6170 1289 4165 2561 52466 59845 11300 6605 19010 48365 76306 71314 58733 114229 324325 236205 7538 307690 277562 41845 1114 187889 192848 23425 244645 33332 194101 414928 249076 28265 54778 86681 217730 109657 28228 54325 1225 27261 63621 80068 71289 122713 46121 67588 56293 7281 28498 16265 5297 977 3240 1237 1217 2642 3037 2137 901 689 884 1586 5585 9160 9805 30445 63145 21125 51125 13250 143713 194165 32045 4000 32810 61892 4165 13906 6701 14045 6682 4329 47888 75904 8528 25664 37960 14261 9378 28324 19540 8033 5905 4293984291 393205 4294705155 196607 1 65535 0 0 0 0 0 0 0 1 4294967294 0 65535 65535 4294967295 4294901760 4294901760 0 0 0 4294967295 4294967295 65535 0 0 1 65535 0 0 65535 65536 0 0 65535 4294967295 0 4294901761 0 0 65535 0 0 1 4294901760 4294967295 65535 0 0 4294967295 4294967295 4294901760 4294967295 4294967295 65535 65536 0 1 65534 0 4294901760 0 4294901760 0 4294901760 0 0 65535 65535 4294836224 0 4294901760 65535 4294967295 65535 65536 0 4294901760 4294901760 1 1 0 0 0 0 0 0 4294901760 4294901760 4294967295 0 4294901760 4294901760 65535 65535 0 0 0 0 65535 65535 4294967295 131071 65535 65535 4294901761 4294901760 65535 65536 4294901760 65535 0 65536 65535 0 4294901760 0 1 65535 0 0 4294901760 0 65535 4294967295 4294901760 0 65535 65535 65535 4294901760 65535 +1001685788 746379295 3120507595 12674081026 18617495547 16495225865 13783334513 6426636828 258352888 214884363 519203490 732337683 268748585 82861456 353754294 290416683 782930021 2201466361 2794374570 1834726522 2679756280 2930227311 1682665060 942156007 1313576578 347830410 45808259 85053499 964120244 1587740690 542068834 754676225 +65533 4294770690 1900571 4293656496 4278255746 788753 1965625 2071250 1464500 936500 183560 77472 597896 727061 351369 466225 173501 2029 17890 9754 14698 18901 16200 13738 12221 1394 1921 1930 788 362 296 325 26 130 1073 2500 1873 389 205 117 929 288 2512 3573 7274 4517 3989 7642 4001 3517 7880 5725 15205 21809 1105 4868 16 11629 8296 5266 641 14473 14600 1370 2405 1261 13012 14242 89 8865 290 27674 50265 15497 725 5920 3461 1044 820 136 409 980 698 109 32 9 2 4 68 333 481 121 225 104 941 389 2581 8586 13345 11024 3601 100 1945 3221 2417 1125 1025 761 634 1017 2308 3589 2920 1769 205 356 901 673 365 337 229 65 4294770693 4294967291 1 131071 0 0 0 0 65536 65536 65536 0 0 0 0 0 65535 0 0 65537 0 4294967295 4294901760 65536 4294901760 65535 0 1 65535 0 4294901760 4294901760 65535 0 0 4294901760 65535 4294901760 65535 65535 0 1 0 4294967295 1 65535 0 0 65535 65535 0 65535 0 65535 65536 4294901760 4294901760 4294901760 65536 1 4294901760 65536 0 0 0 4294901760 0 0 65535 65535 65535 0 0 65535 4294901760 0 65535 4294901760 1 0 4294967295 0 4294901760 65535 4294901760 0 65535 4294967295 0 4294901760 4294901760 0 4294967295 4294901760 4294901760 131071 4294901760 0 4294901760 0 1 0 65535 4294901760 0 1 65535 65535 4294901760 0 131071 1 65536 4294901760 0 0 131071 65535 65535 0 4294901760 4294901760 0 4294901760 0 65535 1 0 65535 0 65535 0 1 4294901760 65535 +7000714511 12606617750 7670385710 2308146020 3140639338 3977832063 2344885002 274558122 123010642 150749898 91901636 16249429 5981427 2777514 17349644 7798497 17836581 69400993 76902511 155430903 109242398 125647742 118696369 195676485 320175501 29279837 5693732 4665549 80570126 110458961 43238454 33928549 +65533 65537 4294377459 4294508636 851528 885834 1062010 594824 111965 1015085 2840242 3272985 2789282 1183482 114440 479120 878265 479890 110269 20456 10709 18068 34373 15385 2197 3505 6597 4210 1405 680 925 562 185 121 29 82 277 250 305 1930 4498 3538 1017 9225 22032 19433 12125 6154 3649 7825 20660 29090 17505 4525 16970 28730 49250 62309 39965 20032 13841 37672 46708 19450 18013 37376 44980 41672 30505 18265 20077 5573 19098 76081 62917 26073 10834 5200 1780 530 85 82 74 461 306 130 50 40 53 148 157 356 450 349 1952 3161 1928 180 65 20 305 221 2 8 41 29 50 58 34 41 25 137 65 68 205 292 160 104 13 53 333 349 262155 4294901755 4294901760 65536 0 0 0 0 65535 0 0 0 0 0 65536 0 0 4294901760 4294967295 4294901760 0 0 65535 0 4294901760 65535 0 0 65535 65535 4294967295 65535 0 0 131071 0 4294836225 0 65535 4294967295 1 0 65535 4294901760 0 0 4294901760 4294901760 4294901760 1 4294901760 4294967295 4294901760 4294967295 65536 0 0 0 0 0 65535 4294967295 65536 65535 4294901760 65535 0 65535 65535 4294967295 4294967295 4294901760 4294967295 0 1 65535 65535 4294901760 0 65535 0 4294901761 65536 65535 0 4294901760 65535 131071 65535 65535 4294901760 4294901760 4294967295 0 1 65535 4294901760 0 0 65535 4294901760 65535 65536 4294967295 0 4294967295 0 4294967295 0 65535 0 4294901761 0 4294967295 0 4294967295 4294901760 4294901760 4294901760 0 65535 4294901760 4294901760 4294901761 0 0 65535 0 1 4294901760 0 65536 4294901760 0 1 +5143990038 4153995263 3359220824 18692424053 18853862091 5915158204 4611446955 2935754086 246522545 212786044 73487302 43412398 12454365 3913636 1507440 11968048 52157752 201422122 143619492 274890780 551805067 557731578 571234780 528233559 720591179 144674549 4367463 5183587 29278078 4504694 1329919 3945004 +0 131071 917487 589793 9109705 172241 95621 5972 11738 587401 1913290 1226285 123560 31412 250256 905346 740480 128260 3920 11041 16237 8865 1665 1418 200 692 1160 549 425 565 788 272 16 13 50 53 104 49 10 925 3757 4369 1945 1810 2545 538 200 89 1348 5581 20225 23953 6305 3341 10778 7225 8825 4160 19610 45658 104897 121178 42426 9026 17972 12557 1685 229 2516 14125 29306 45442 38317 28240 37845 21506 2216 522 205 241 522 296 104 41 125 104 29 10 10 37 65 117 281 701 1889 1346 116 125 65 149 293 137 25 1 2 0 1 17 10 4 20 4 10 9 2 25 58 122 130 52 145 185 4294770695 4294901759 65535 65536 0 0 0 0 0 0 0 0 0 0 4294967295 0 0 4294901760 65535 0 0 131071 4294901760 0 4294901760 0 65535 4294901760 0 0 0 65535 4294967295 0 4294901760 65534 65535 4294901760 65535 4294901760 0 65535 1 65536 0 0 4294901760 0 65535 0 4294901760 4294901760 4294901760 0 4294901760 0 4294901760 4294901760 0 4294967295 0 0 0 0 1 0 0 0 65535 0 0 4294836224 4294901760 65535 65535 4294901760 65535 4294901760 0 1 4294901760 4294901760 4294967295 0 0 0 0 0 0 0 65536 0 65535 65536 0 0 1 0 4294901760 65536 65536 0 0 4294901760 65535 65535 4294967295 0 4294901761 131071 0 65535 65535 65535 1 0 65535 0 0 65535 0 4294901760 4294967295 0 0 0 1 4294967295 0 0 65535 4294901760 131071 4294901761 0 +737086475 199117112 1670438004 10741692241 3356388559 1768674154 5397602132 1312352655 102546050 47396047 7471078 7495982 6128779 2005378 658898 6712874 38438122 20278300 62843068 194318965 151369486 988510650 493465296 270768194 581474146 75460711 2947419 3483859 16066288 2640964 220569 1229451 +0 4294901761 4294705158 1638448 4279500705 142948 69433 13345 68274 996397 1763138 749842 73156 88488 1172434 2158025 731825 12538 12308 28421 28170 8177 818 829 1424 890 61 18 346 1549 1625 296 13 45 104 45 74 4 482 2061 1850 493 485 2500 2482 650 181 657 5825 14004 10225 2917 1181 12013 12461 6868 4672 5924 79313 223760 163453 20205 5185 13250 2258 2642 2257 5584 22045 32045 13933 3204 18665 17060 7225 3034 200 2650 4337 2180 586 85 25 29 37 89 68 53 20 1 20 49 197 612 1165 650 10 625 544 325 113 10 17 2 0 2 2 16 25 10 4 9 10 26 5 17 50 97 97 100 104 50 65536 4294836224 0 1 0 0 0 0 1 0 0 0 0 0 0 4294901760 4294967295 65535 0 0 0 65535 65535 65535 0 4294901760 4294901760 65535 0 0 65535 0 0 4294901760 4294901760 65536 0 4294967295 0 65535 65536 65535 0 4294901760 65536 4294967295 65535 0 0 4294901760 4294967295 0 4294901760 4294901760 0 4294967295 0 4294901759 4294967295 4294967295 0 0 4294901760 0 0 65535 4294901760 4294901760 65535 4294967295 65535 65536 65535 4294901760 4294967295 1 4294901760 0 65535 0 4294967295 0 0 0 4294967295 0 4294901760 65536 0 4294901760 65535 4294967295 4294967295 4294901760 65535 65536 65535 0 65535 0 0 65535 0 4294901760 65535 0 1 0 65535 4294901760 65536 0 0 4294901760 4294901760 4294901760 65535 65535 4294901760 65535 0 4294901760 0 65535 0 4294967295 4294901760 1 4294967295 4294901760 4294967295 4294967295 0 0 0 +588176485 240583144 2884339303 9933891889 2127262302 5893697049 10187731939 970843117 202938032 52048018 10182704 2505728 11710226 3340048 743403 8872557 16728611 19896564 79197341 110845859 308742005 1700737148 216131009 279268534 212892002 46221585 3472275 2484835 12199449 3552489 276650 1095028 +0 4294901760 196605 4292804546 13631555 68884 10609 10394 257009 1233873 1149257 130562 4625 614344 2946890 2537905 438874 38705 28562 45473 5300 2941 4073 3250 4469 1306 117 800 1348 1025 289 9 106 116 100 34 41 365 785 340 409 457 2405 3060 829 613 1588 5634 5002 625 1553 3202 7289 3880 509 8145 35033 77401 81800 76829 13284 13505 29449 7124 793 10685 20905 12260 6761 2957 629 8677 10001 11498 27625 32521 26384 7146 400 305 185 212 162 25 45 20 5 61 72 205 290 205 481 722 1313 1989 809 265 97 29 89 50 26 34 16 32 36 13 1 2 2 5 17 65 106 68 10 37 113 116 104 29 4294967295 196611 4294901760 0 0 0 0 0 0 0 65535 0 0 0 0 4294901760 4294901760 0 0 0 4294967295 131071 4294901760 1 0 0 0 0 4294901760 4294967295 0 65535 65536 0 65536 0 0 0 4294967295 65535 65536 65535 0 4294901760 65535 0 0 4294901760 4294967295 65535 65535 65535 4294901760 0 0 65535 0 0 0 4294901760 0 4294901760 4294967295 4294967295 0 65534 0 0 4294901760 4294967295 4294901760 4294901760 0 0 4294901760 1 0 0 0 65535 0 4294901760 0 4294901760 65535 65535 0 0 0 4294901760 0 0 0 0 65535 4294967295 65535 65535 65535 0 0 0 65535 0 65535 0 0 0 4294967295 65536 65535 0 65536 4294901760 0 4294901761 65536 0 0 0 4294901760 0 0 4294901760 4294901760 4294901760 0 0 65535 0 65536 65535 65535 1 0 +229209565 325939649 4026845668 6746593896 1062037748 13185922822 12366346307 774579081 193349694 34146309 30290866 7585986 8640955 1200927 1094759 5399523 14986773 24429908 46767311 55975549 488988572 797725628 217726324 154908948 324268385 165305048 1940927 5406041 19815318 2349459 458869 1565114 +2 196608 4294574084 1572930 4284022686 46817 39850 7013 264953 1116002 741569 82548 109125 1580756 3388549 1536041 49129 49490 96570 92945 21929 1369 577 7065 6922 1481 193 1285 685 125 101 9 40 20 13 29 193 265 234 81 205 1961 2257 850 145 1322 2980 2210 522 1796 5576 4325 1773 757 7056 20629 15817 1297 7690 34690 62005 34660 6416 8276 22772 12850 3562 461 464 2624 5122 4176 13448 42698 68581 44890 13192 544 360 272 10 122 208 65 29 205 257 116 72 8 106 400 937 1522 848 580 569 229 73 41 2 4 1 2 1 0 4 9 4 4 4 5 10 34 65 20 4 41 68 9 4 36 4294705150 131070 0 0 0 0 0 0 0 65536 0 0 0 4294901761 0 65536 4294901760 65535 0 65535 65535 65535 0 0 65536 0 0 4294901760 65535 0 0 0 0 65536 0 65535 65536 65535 1 1 4294901760 0 65537 4294901760 0 0 0 1 0 0 65535 0 65535 4294901760 0 65535 0 1 0 65535 4294901760 65536 65536 0 4294901760 65535 0 65535 0 0 4294901760 4294901760 4294901760 0 0 0 4294901760 131071 65535 1 65536 0 65535 4294901761 0 4294967295 65536 4294901761 65535 0 65535 4294901760 4294967295 65535 65535 0 0 0 4294901760 4294901760 4294901760 4294901760 4294967295 65535 65535 4294967295 0 0 0 4294901760 0 65535 4294901760 0 65535 4294967295 65535 0 4294967295 65535 0 0 4294901760 1 65535 0 65536 4294901761 4294901760 0 0 65535 4294967295 4294901760 0 +233236004 372059747 3720635405 4840014083 2474879002 16484548283 8507153627 598495833 467734248 39243814 47455426 8430192 3392392 377981 875318 2484565 15845558 18176914 32061401 56747499 180709466 485218951 266196081 74416012 596128810 163535924 2834952 6989193 14972360 1155977 156099 770625 +65534 4294639618 4294574080 1900468 4456617 25810 4068 25493 556177 1292776 590980 72041 348242 1676333 1656493 551753 60112 34514 85985 22417 5018 4240 4849 2437 369 296 730 3748 3370 802 117 41 20 4 18 65 146 50 136 685 1268 596 265 125 522 821 673 848 1637 4930 2909 229 641 3298 4234 2637 1961 11185 47912 46202 15509 6889 16130 26309 9893 3600 2801 3370 949 185 580 16144 42170 35050 10804 361 2141 130 225 26 64 125 41 53 128 29 61 169 41 136 226 233 173 592 1082 772 377 146 50 17 2 1 5 13 26 25 37 40 58 13 5 25 25 45 25 9 36 52 73 45 17 25 131072 4294967295 0 0 0 0 0 1 0 0 0 0 65536 65536 0 4294967295 4294901760 4294967295 4294901760 0 65535 4294901760 4294901760 4294967295 0 0 0 65535 4294901760 0 4294967295 0 0 0 0 0 65536 65535 0 65536 4294901760 65536 0 4294967295 4294901760 65535 0 65535 0 0 65535 4294967295 4294901760 4294901760 4294967295 0 65535 0 0 4294901760 0 1 0 0 0 65535 0 4294967295 0 65535 65535 0 4294901760 0 0 65534 0 4294836224 0 0 0 4294967295 4294967295 65535 65535 0 0 0 65535 0 0 0 4294901760 4294901760 65535 4294901760 131071 0 4294901760 0 65535 65535 4294901760 0 0 4294901761 4294967295 65535 65536 0 4294901760 65535 0 4294901760 4294901760 0 0 4294967295 0 65535 0 4294901760 0 0 4294901760 4294967295 4294967295 65536 65536 0 4294901760 4294901760 4294967295 4294967294 0 +86102584 683952431 5058673659 4485649816 3398580433 10669113701 3640325282 460145399 188133655 38263437 10525838 21231016 14400343 382117 684420 3970897 8388937 7420156 28785401 32618490 145087960 437388717 207952119 94581602 357456624 16216644 1674104 4431907 11204280 957639 802073 918059 +6 327688 1769458 4282515535 11402947 85924 5125 310577 2416180 3073025 495760 589018 3326836 4537657 1033570 36260 18065 321626 272578 53429 4580 8226 10685 5620 1154 3357 9316 5812 257 1018 577 320 261 74 149 356 144 4 281 1618 1570 122 772 1585 441 241 1933 9113 18773 11821 1465 2304 1381 800 6473 19042 63117 100922 24746 18365 61417 54308 20281 19730 29125 22804 11257 1521 5765 4612 5021 32481 44434 20749 25480 11601 2293 1105 392 221 233 65 1 9 85 272 505 577 185 121 130 73 146 1082 1490 109 193 100 405 265 137 65 58 25 50 25 100 256 73 40 37 58 145 162 274 450 346 9 200 613 701 337 131079 4294901760 0 0 0 0 0 0 1 0 0 0 65535 4294901760 65535 0 4294901760 0 65535 0 4294901760 0 65536 4294967295 65535 4294967295 1 0 65536 4294967295 1 0 65536 0 0 0 0 4294901760 0 4294901760 1 4294901761 0 65535 0 4294967295 4294901761 4294967295 4294967295 4294901760 65535 65535 65535 0 65535 0 0 4294901760 65535 0 4294901760 65536 0 0 0 0 0 0 0 131071 0 0 65536 65535 4294967295 1 4294901760 0 65535 0 0 0 65535 65535 4294901760 4294967295 4294901760 65535 65535 1 0 4294967295 0 65535 0 0 4294901760 4294901760 4294901760 0 0 4294967295 4294901761 4294901760 0 0 65536 4294901761 4294901760 0 4294901760 1 65535 4294967295 65535 0 0 4294901761 0 4294901760 4294967295 65534 4294967295 1 65535 0 0 4294967295 0 4294901760 65537 4294967295 65535 4294901760 0 +266668513 3691427534 15395423891 7727109635 18438751400 18369169795 1496098715 1922356437 498670390 76025345 38520614 55382555 11059903 3032946 2294403 7035791 12546931 18996261 139567595 56779285 586886672 753999916 441184194 209198221 432436195 51134093 3818838 7142961 11417811 3240013 2520203 7044858 +65533 196593 4294639588 11927709 4265541550 136484 44852 565417 2545877 2637473 509834 663272 4704386 5246554 1440698 104245 98489 41465 29002 56074 5330 6602 5517 1538 3085 1165 610 1570 634 244 650 778 221 1 85 178 104 290 490 481 545 50 1537 2578 936 113 1193 1090 640 305 4181 5840 3042 738 2205 7154 2993 1908 25621 78440 95300 64040 16865 21289 30664 7108 397 7801 12653 14845 14005 5760 32909 45266 19562 1445 433 149 40 9 65 73 4 20 74 25 18 4 45 181 241 90 25 153 533 772 260 45 373 554 221 9 25 17 37 122 164 185 100 41 16 4 9 4 4 13 74 89 5 2 18 29 4294639618 4294967293 65537 0 0 0 0 0 0 0 0 0 0 0 65535 1 65536 0 0 0 0 0 0 0 4294901760 4294967295 0 0 0 0 0 0 0 0 4294901760 0 0 65535 4294901760 65535 1 0 0 0 65535 65536 1 65535 65537 65535 0 0 65535 0 0 4294967295 65535 4294901760 4294967295 4294967295 4294901760 4294901760 65535 4294901760 0 0 1 0 4294901760 0 0 131071 65535 65535 4294901760 65535 0 0 0 4294901760 65535 0 65535 0 4294901760 4294967295 131071 4294901760 4294901760 65535 4294901760 0 4294901760 4294901760 4294901760 0 0 4294967295 0 0 0 4294901760 4294967295 65535 1 4294901760 4294967295 0 0 1 65535 4294901760 4294967295 0 65535 4294901760 4294967295 65535 4294901760 1 4294901760 0 1 65537 65535 4294967295 0 4294901760 4294901760 0 0 0 0 1 65535 +510699404 4841596267 14772517623 7246185629 24267853064 22574962093 2117596970 393222986 231544386 46108194 21699402 10161343 6388452 4063825 1480860 4735985 10925705 16215577 18467886 50175417 117189975 905474412 419407332 217074536 403021886 19882058 775532 2591199 8345167 4105573 2208603 701925 +65530 65537 4291297277 8650556 9240936 79668 175508 1339133 3204925 1735445 392050 2646344 4728992 1264964 5245 59890 73250 97936 53224 10585 2745 2888 1730 2450 1962 360 674 202 74 61 10 58 104 106 8 136 234 293 509 785 1717 3065 2801 1009 98 113 290 954 1832 3770 1928 337 709 250 397 2465 4045 4050 3482 5968 4073 12970 38048 71761 93466 43237 20360 7501 3986 8264 9364 19890 22181 3856 7250 5722 2813 625 137 85 2 146 328 202 116 40 45 53 85 226 520 450 226 346 884 605 32 53 61 82 10 25 53 45 45 26 17 25 25 73 26 4 4 17 34 13 9 18 2 5 20 17 131072 4294901761 0 0 0 1 0 0 65536 0 0 0 0 0 0 0 0 0 0 0 65535 4294901760 0 0 0 4294967295 0 4294967295 65535 65535 65535 0 65535 0 0 65535 0 65535 131071 1 131071 4294967295 4294901760 65537 4294967295 65535 0 0 131071 0 4294967295 0 0 65535 4294901760 65535 0 1 0 0 0 4294901760 0 65536 4294901760 65536 4294901760 65535 65535 65535 4294901760 0 65535 0 0 65535 4294901760 0 4294901760 4294967295 65536 0 0 0 0 0 4294901760 4294967295 65535 0 4294967295 4294901761 0 4294967295 4294901760 4294901760 4294901760 0 65535 65535 65535 4294901760 0 65535 0 65535 65535 4294901760 4294901760 0 0 0 0 65535 0 65535 0 65536 0 4294901760 4294967295 0 65535 0 0 0 4294901760 4294901760 65536 4294901760 4294967295 0 65535 4294967295 0 +652457100 8628642947 14757055619 8841174305 24237892960 6610685245 493631450 571414491 105514170 21432075 15554668 3558877 668914 839542 1533633 7037972 27708223 9256703 23851076 16580191 41757120 197930080 898068540 261650776 197017328 25353992 2933924 5856325 8516081 1137938 820737 476529 +0 4294049804 1638436 4283498390 17825836 36482 137482 1758721 3013972 1314532 725138 2806301 4445689 1804897 119645 10585 41225 43325 14624 810 197 1877 3400 2756 845 442 725 445 65 34 58 20 10 25 68 20 5 41 49 5 125 377 306 122 585 698 296 256 1145 2017 2473 1361 317 218 650 2561 6885 11700 15545 20096 17289 4625 1949 8177 51401 82025 70916 50473 33640 11449 25 6565 15178 16994 10309 4090 2705 697 461 605 346 212 65 5 13 61 100 109 202 365 477 425 377 949 1346 1114 709 340 221 37 9 1 5 10 5 8 25 45 68 74 50 29 34 26 9 26 82 80 53 1 20 26 2 0 1 0 0 0 0 0 0 0 0 0 0 4294901761 0 0 0 4294901760 65535 0 0 0 65535 4294901760 0 131071 0 4294967295 4294901760 0 65535 0 4294901760 0 131071 0 0 0 4294967295 1 65535 65535 65536 0 4294901760 65535 0 0 4294901760 0 65535 4294901760 65535 0 0 4294967295 1 0 4294901760 0 0 65535 4294901761 0 4294901760 4294901760 0 0 131071 65536 0 0 0 4294901760 0 0 0 65535 0 4294967295 0 65535 65535 4294901760 4294901760 65535 4294901760 4294901760 0 0 0 65535 0 4294901760 0 0 0 4294901760 0 1 0 65535 4294901760 65535 65535 4294967295 0 0 4294967295 65534 4294901760 0 4294901760 65535 4294901760 0 1 0 1 0 4294967295 0 0 65536 0 65535 65535 4294967295 65535 4294901760 65536 65535 4294967295 4294967295 0 +434304470 9920745040 13278495955 9817093234 24275996903 8307106192 312741830 232835386 22204618 22902495 12823996 4640824 813457 300122 381160 477293 3017640 6064145 16195485 17885065 92506863 222810473 693624334 602409633 190105119 28851156 2393151 8159542 17396328 1891183 1048006 1131508 +8 524285 9895896 4270195143 3931190 521050 344525 4615033 5651218 2000960 1023460 1035908 1288125 1318228 839060 260000 24916 22634 15613 6065 9242 8480 4689 4570 1321 148 433 1717 1856 625 4 90 45 10 53 185 122 25 225 170 202 74 306 829 522 850 1861 232 442 585 925 277 200 389 842 256 1037 461 1013 5018 2088 6170 11285 23432 33785 13033 9810 32789 83609 98932 35437 1985 185 1418 7769 5458 1828 1145 244 106 281 288 305 424 404 80 34 25 74 241 173 130 89 325 245 52 45 180 170 61 32 53 29 5 0 2 2 1 20 17 10 65 40 53 65 100 117 29 125 128 45 41 4294836226 4294901760 65535 0 0 0 0 0 0 0 0 0 0 0 4294901760 4294967295 65535 0 65535 1 0 0 65535 0 0 65535 65535 0 65535 0 0 0 1 131071 4294901761 0 65535 4294901760 65535 65535 4294901760 0 4294901760 65536 0 4294967295 131071 131071 4294901760 65535 4294901760 0 0 4294967295 65536 4294901760 0 0 4294901760 4294901760 0 4294901760 0 0 4294901760 0 65535 4294901760 4294901760 4294901760 4294901760 4294901760 1 0 4294901760 4294901760 4294901760 0 1 4294901760 65535 4294901761 0 4294901760 4294901760 0 0 65535 4294967295 0 4294901760 0 4294967295 65535 65535 0 65535 0 0 65535 0 1 65535 4294967295 4294967295 4294901760 65535 0 0 65535 65535 0 4294967295 0 65535 65536 65537 0 65536 4294901760 4294901760 65535 4294967295 0 65536 0 65535 0 4294901760 0 65535 4294901760 0 4294967295 0 +2360907925 23641099792 24190479903 8989470020 8514477146 7420507469 1770403678 154991439 77470283 57133060 18375458 10645259 8301376 511665 1171654 1838584 3423538 11181629 10284332 7209387 12047105 79752205 362456965 901653550 171681651 26149641 5558929 3296404 3567963 1280080 439503 2015916 +65522 6356955 4285202157 64488262 4172218254 685474 1234692 4451872 3511885 1719952 368744 1007812 982906 341332 116849 218437 167081 100 38153 25480 9026 6404 13573 11072 4292 1088 1465 441 674 2372 1053 61 157 272 522 1025 205 369 425 260 2500 1609 5 1525 1616 1300 5881 14004 18089 13625 8389 4082 1028 394 2250 5536 3474 5832 31709 71474 76330 38210 5161 5429 22093 12416 6724 10996 41242 60353 11680 1700 4562 4442 4861 2097 146 1172 1234 1261 2050 1042 89 32 65 194 410 580 365 340 377 482 841 193 200 362 90 16 53 265 314 53 122 85 100 148 64 68 58 34 106 32 212 425 397 194 13 80 250 148 41 4 4294901759 262143 4294901761 65535 0 0 0 0 0 0 0 0 0 1 0 4294901760 0 0 1 4294901760 0 0 4294901760 65535 4294901760 65535 0 0 65536 4294901760 65535 4294901761 4294901760 65535 4294901760 4294967295 4294901760 0 65535 0 0 4294967295 4294901760 65535 65535 4294901760 4294901760 0 4294901760 0 1 65535 4294967295 0 1 4294967295 0 65535 65535 65535 65535 1 4294901760 4294967295 65536 4294901761 0 0 0 65535 65535 131071 4294901760 0 0 4294901761 65535 0 65536 4294901760 4294901761 0 0 65536 4294967294 0 1 0 0 0 0 4294901760 0 0 0 65535 0 0 4294901760 0 65536 0 131071 1 0 65536 0 65535 0 4294901761 0 0 131071 4294901760 65535 0 0 0 65537 65535 4294901760 65535 4294901760 65535 65535 4294901760 65535 0 0 65535 4294967295 4294901760 1 4294967295 0 +4960298104 22328432083 16843586488 5872508968 6210088640 2146584904 1299267949 336760434 159261111 95975655 52071342 9469394 14208987 3370166 6696095 6916563 17041752 37625449 178646601 71244711 125940835 741172861 290740088 471878356 115904649 27906931 9524357 9782136 6484974 3475139 2605818 5507266 +12 4290510856 4271636369 79756588 4260824948 1452685 88650 802530 429458 160901 54333 448645 1545424 1195501 406145 301652 230096 33524 793 1165 5266 6929 5832 8164 4496 1745 2890 482 1480 1937 832 153 50 293 757 1154 610 1360 3026 2257 401 313 1690 745 765 2106 193 941 841 85 97 1549 3172 1250 733 656 1301 6074 15538 16180 14533 2132 8069 19337 8281 3650 8885 19805 49130 56093 15241 130 865 1090 1154 4148 3125 244 1850 3145 1637 153 53 122 137 410 617 845 1445 1885 1768 1114 277 162 1025 970 265 250 810 1061 338 125 178 52 8 61 85 26 17 4 25 113 52 53 49 9 5 61 26 2 10 89 4294574086 262141 4294901760 65536 4294901760 0 0 0 65536 0 65536 0 0 0 0 0 4294901760 65535 65535 0 0 0 65535 65535 0 0 4294901760 65535 0 4294901760 0 4294967295 0 0 4294901760 65536 0 0 0 65535 4294967295 4294967295 4294967295 65536 0 65535 1 0 0 4294901760 0 65535 0 65535 0 65535 0 4294901761 0 0 4294901760 0 65535 1 0 0 4294901760 4294967295 65535 1 0 0 4294901760 0 4294901761 0 65535 1 0 0 0 0 0 0 4294901760 0 4294967295 0 65535 0 0 65536 4294901760 0 0 4294901760 0 0 0 0 4294901761 0 4294967295 65535 65534 0 0 65535 4294967295 1 4294901760 4294901760 65535 65535 65535 4294967295 4294901760 0 0 65535 0 65535 0 0 131071 4294901760 65535 4294967295 0 4294901760 131071 4294901761 65535 4294901760 0 +4513219420 3581289186 2032493116 1245889388 7880899241 5927486252 2069957980 383788919 30756951 60584197 45678440 16577108 13948981 3196244 9749280 22907599 13314734 15141562 9734323 21475900 55257346 195892955 191813710 507652966 86369699 38652965 10923132 26823590 16256703 7986545 1208121 1088002 +65518 4292739084 4286710136 4268227305 100402084 1213136 774400 3558785 3563856 1119425 345512 176346 79865 352397 558610 392516 95720 5986 28836 7993 680 3425 3505 842 265 1040 6610 5417 1225 521 221 306 205 137 104 74 445 653 481 100 200 37 1186 2561 1700 964 400 25 26 45 409 250 288 1490 1300 797 793 2161 2173 481 397 6050 10485 7610 3074 1960 9256 22093 12610 3330 1745 4957 8929 3785 1042 2045 1186 778 49 505 52 100 109 40 232 261 10 409 810 580 80 80 17 100 65 29 212 292 180 208 29 225 197 40 1 10 16 18 10 0 25 65 18 4 1 4 5 41 145 180 241 100 4294574081 4294967295 65534 1 0 65535 0 0 0 0 0 0 1 0 0 0 0 65535 0 0 65535 1 0 65535 65535 0 65535 4294901760 0 4294901760 65536 4294967295 0 0 4294967295 0 131071 65535 0 4294901760 4294901760 0 65535 4294901760 4294967295 4294901761 4294967295 4294901760 65535 4294901760 4294967295 4294901760 4294901760 65535 0 4294901760 0 1 4294901760 0 0 0 65535 65536 4294901760 0 0 0 0 4294901760 0 65536 0 65536 4294836224 65535 65535 0 0 4294901760 65535 0 131071 0 65535 0 4294901760 4294901760 4294901760 4294967295 0 0 65535 65535 65536 4294967295 0 4294967295 0 65535 4294901761 65536 1 0 65534 0 1 65536 0 0 65535 65535 65535 131071 0 4294901760 65535 0 0 0 1 0 0 131071 4294901760 4294901760 4294901760 65537 4294901760 65535 0 0 65535 0 0 +5430960032 18275083952 15118657235 3412749563 1117285414 3148804700 2098188826 226058619 64045356 24328113 12833562 43246712 10106915 2396236 2500634 5144562 8400664 18862668 2806171 8606983 20298825 50751969 125447823 169402516 71788537 11866110 3912105 7332435 2977944 3408669 581674 1180421 +19 4294443006 18284873 4205707085 88079443 688194 775112 5132905 7042192 1944801 60898 62497 305245 583840 744488 590564 73800 25065 33745 3530 14785 26669 15597 218 9412 32957 30420 18450 26585 25181 14330 4633 865 1573 6625 14093 28121 37549 30893 28665 54650 92381 88850 70820 69085 62442 52553 33178 13940 3546 533 445 5629 5450 800 1000 15220 30290 39469 82490 112545 96545 94888 110800 97876 72229 46561 55305 68753 53945 36946 16160 11493 22100 20961 13085 8584 4901 2080 1250 873 1746 5330 10804 19181 32330 37141 39409 38441 32360 26965 20313 10768 5249 2080 872 962 884 1076 1352 1997 1796 1181 1193 1709 2097 2336 2465 2216 1682 1405 466 25 90 394 653 1124 1514 1097 980 890 557 4294049807 655343 4294639625 131069 1 65536 0 0 0 0 0 4294901760 4294967295 65535 65536 4294901760 0 0 65534 4294901760 0 4294901760 0 65535 0 0 0 4294901760 4294901760 0 4294967295 0 4294901760 4294967295 4294901760 65536 4294901760 65535 0 65535 0 0 4294901760 65535 0 4294901760 4294967295 0 0 65535 0 4294967295 0 65536 4294901760 65535 65536 4294967295 0 4294901761 0 65535 0 65535 0 4294967295 4294901760 65536 0 4294901760 65535 0 65535 0 0 4294901760 4294901760 0 0 65535 65536 0 0 4294967295 0 0 65536 4294901760 0 65537 65535 4294901760 4294901760 4294967295 4294967295 65536 4294901760 4294901760 4294967295 4294901760 0 4294901761 1 65536 0 65535 1 4294901760 4294901760 0 4294901760 0 4294901760 4294901760 1 4294901760 4294901760 4294901760 0 4294901760 0 0 4294967295 0 65535 0 65535 0 0 4294901760 65536 0 1 65535 1 +3877771404 27761261248 28420992863 3331593153 1913040185 4709950193 2868691697 271794680 132554333 144464631 154168886 261686856 220920674 46414061 173845813 418420153 944901890 922766313 342868780 46615762 229212299 1344441985 1587270522 957735796 375938118 101153278 388529117 661581116 109769948 39880357 46054200 21636483 +65506 3997721 17039298 4257940410 4269210276 872426 1144841 4063940 3608456 498653 262928 29306 426805 774458 605173 532225 411025 292900 145928 54125 20434 55481 217600 373572 277405 46642 19469 71080 80225 59809 10249 10985 58537 112921 102397 43825 151650 276130 180392 108788 199546 271970 154085 42580 10090 124765 108266 10772 14020 35269 24697 18661 27674 17289 593 10440 29321 18484 57329 100445 35338 1664 47653 140962 78625 18805 5512 5328 17972 20772 3373 17748 30445 1385 6805 3368 2312 3284 50 937 1429 746 3757 8450 12778 9077 22906 27476 15476 15570 15877 11026 1764 2813 6292 5993 6497 2344 90 1224 388 196 305 377 1625 4160 2372 400 5300 5965 1341 674 4693 7065 5248 1193 392 1850 1201 200 16 169 65559 4293853177 262138 196608 0 0 0 0 0 0 0 0 0 65535 0 4294901760 4294901760 131071 0 1 0 0 4294901760 4294967295 65535 131071 0 65535 0 0 0 4294967295 4294901760 0 4294836224 0 0 0 0 0 0 4294967295 4294901760 65536 0 65535 4294901760 65536 65536 4294836224 65535 0 0 4294901760 0 4294901760 4294901760 65535 0 4294901760 65535 0 65535 65535 0 4294901760 4294967295 0 0 4294901760 0 0 0 4294901760 0 4294901761 4294901760 4294967295 4294901760 4294901760 4294901760 0 65536 0 4294901761 65535 0 4294967295 0 4294836224 4294967295 65535 4294901760 4294901760 4294901760 65535 4294901760 4294967295 65535 4294901760 65535 4294901760 0 4294901760 0 4294967295 65535 4294967295 65535 0 65535 0 4294967295 0 4294901760 65535 0 0 65535 0 0 0 65535 4294901760 4294901760 65535 4294967295 4294901761 4294901760 65535 65535 65536 4294901760 65535 0 +5300833505 20834814914 13850962448 1873767095 2487756376 4852873681 3755876693 1919366017 472353020 1689400436 2171894156 520817680 521459944 636950352 1290266840 2313635491 2401912814 1031627313 576525002 328303210 349755481 879293011 1002416987 276910370 201658232 41836064 219476277 344242602 115666562 29492736 80824852 79723660 +65535 2031617 1965652 39977989 4219403607 492985 139124 3395188 5671954 1547001 113005 2091050 3365588 1223965 27290 51641 39385 35865 12869 11285 1205 2888 93904 285673 245506 90593 20813 1962 1640 10090 4721 3133 8537 11349 31601 98612 109012 2285 43650 39617 83098 113941 55970 42281 51701 22945 2925 25552 8116 389 2986 11713 19585 15689 25625 20557 1313 5365 17450 25604 21881 18512 20506 5354 8784 14305 6569 500 2152 6689 24625 40400 23200 7753 1865 2088 5869 6370 10900 20529 25000 16298 5300 612 1213 1090 305 2320 1801 586 1525 2313 7141 12778 6698 5249 14794 19889 10961 3789 2250 2682 2554 1642 1549 3977 5261 1097 250 689 212 2570 2746 697 692 397 205 197 197 218 212 333 1376259 4294115331 131070 0 1 0 4294901760 0 0 0 0 65535 1 65535 65535 0 0 0 4294901760 65535 0 0 4294901761 0 4294967295 1 4294901760 4294901760 0 0 0 65535 0 65537 131071 0 0 0 0 4294901760 0 65535 4294901760 0 0 4294901760 4294967295 0 4294901760 0 4294967295 65535 4294901760 4294901760 65535 65535 0 65535 0 0 65535 65535 4294901760 0 65535 65535 4294901760 4294901760 4294901760 0 0 0 4294901761 0 4294967295 0 0 4294901759 0 65535 0 0 0 0 65535 65535 4294836224 0 0 131071 4294901760 4294901760 4294901760 0 65535 4294901760 0 0 0 0 0 1 4294901760 65535 4294967295 0 65535 65535 0 4294901760 0 4294901760 4294901760 4294901761 65535 0 0 0 0 65535 65535 0 4294901760 65535 0 0 0 4294901760 4294836224 0 0 131072 0 0 1 +1790362822 18781045222 22562441690 6462182676 18128068448 5722057826 337490186 211995011 57085265 911404786 1903408596 221072138 61751041 105471168 731224461 595365388 1005357000 509110744 153303799 200052245 238443672 313109719 200258089 219562319 270700173 252786165 120989451 62254235 230127733 122118288 50809354 22421756 +22 262117 4284088166 45088860 4250272187 175005 228085 2657538 3747565 838273 367056 1554346 867389 51156 27028 29312 9616 1025 9704 9092 4900 873 1033 2074 3589 1514 533 394 1417 3860 4330 1237 29 514 1530 5641 6885 5249 11941 5696 505 4365 325 1017 724 1637 4264 15149 17026 12200 8066 3469 2305 2834 3285 1594 2777 2621 1025 712 160 61 1629 2437 101 1921 3349 1522 1040 90 1985 3368 1117 178 424 641 196 128 500 530 505 325 445 1850 1825 1130 709 562 145 145 1037 3368 1989 100 1480 529 853 1874 360 49 34 37 200 229 226 17 325 520 218 89 34 1 72 197 85 17 16 45 181 370 401 80 262144 5 4294901759 0 0 0 0 0 0 0 0 4294901760 0 0 65535 0 65535 4294901760 4294901760 0 4294901760 4294901760 4294967295 0 4294901760 0 0 0 65535 4294901761 4294967295 65535 65535 0 65535 0 4294901761 65535 65535 4294901761 0 65535 4294901760 0 65535 0 1 65536 1 4294901760 0 0 65535 0 65535 65535 4294901760 65537 65535 4294901760 65535 65536 65537 0 0 0 0 4294901760 65535 65535 0 4294901760 4294967295 65535 65536 0 65535 0 65536 4294967295 4294967295 65537 0 65536 1 4294901761 4294901760 65535 4294901760 65537 0 65535 4294901760 65535 4294967295 4294901760 4294901760 0 65535 4294967295 4294901760 1 1 0 4294967295 4294901760 0 4294836224 4294967295 4294901760 4294901760 0 0 4294901761 0 0 0 0 0 65535 0 65535 0 4294901760 4294901760 0 65535 1 0 4294901760 4294967295 65536 4294901760 4294967295 0 +1059610515 14181243981 14613972842 5466826627 6745692396 786391149 149453393 52667898 55887880 14781041 23641746 8487401 32135496 12305835 46806769 89831400 35767578 32578157 170740089 76209230 38636763 16857759 27269618 30040677 18816332 8955638 22591373 23437362 26077341 6967113 5358029 2881430 +65528 458758 4275044451 41155941 4283565078 435821 955732 6343605 10779485 4085780 211322 2752672 2415305 446548 32000 49716 43664 1450 5122 7978 5850 1940 1105 442 221 521 100 725 601 181 2713 2314 221 64 90 1873 5620 7465 4493 89 1874 2512 1594 5161 5473 2522 3625 18596 25538 10634 1202 61 410 1170 2617 3890 3161 1117 169 442 100 10 1088 1490 954 2665 1853 433 613 1312 373 986 2493 1138 200 41 584 1234 976 153 85 481 160 698 1985 680 180 626 1088 740 290 436 877 962 1445 290 1341 1370 170 628 416 101 90 200 218 17 306 505 170 49 37 9 34 292 416 98 9 116 485 257 17 101 4294639611 196608 0 0 1 0 0 0 0 0 4294901760 0 0 0 0 1 0 0 0 4294967295 131071 4294967295 65535 65535 65535 65535 0 4294901760 0 0 4294967295 4294901760 0 4294901760 0 0 4294967295 0 4294901761 4294967295 0 0 65535 0 4294901760 65535 0 0 0 65535 0 65535 4294901760 4294967295 0 0 131071 4294901760 4294901760 4294901760 4294967295 0 4294967295 65535 4294967295 0 65535 65534 0 4294901760 4294901760 65535 0 0 65535 0 65535 65535 0 0 4294901760 0 4294967295 1 0 65535 4294967295 65535 0 4294901760 4294967295 0 65535 65535 65535 0 131071 65535 4294967295 4294901760 65535 1 0 65535 0 0 1 65535 4294901760 0 4294901760 1 4294901760 4294901760 4294836224 4294901760 0 65535 0 4294901760 0 4294967295 0 0 0 131071 0 0 0 4294967295 65535 65535 4294967295 65535 0 +3558853838 36479353059 45775358167 11896394798 15439046486 2907001113 318857498 79573626 51766602 15361996 3775154 4620117 12891819 11474850 26971555 53566278 30501324 67669079 197406668 35979243 37906039 9377114 23684015 20312735 19246500 11544515 15672959 16358304 21213969 8866935 4931195 4922791 +2 4292542481 4289724662 4273077333 66978442 420452 1460740 9622561 13098346 3380180 1718345 7430312 5418100 783074 14274 58897 34696 1181 6565 1280 820 1360 1954 6224 8177 3205 250 505 1226 1377 1853 1314 225 58 232 873 1105 801 4021 6273 7717 7108 9594 18805 23477 11717 3085 6929 1657 1249 2522 1044 610 457 905 482 18 25 1060 2477 1105 801 6793 9945 5193 1313 1832 1700 2260 4772 7012 3625 145 2120 1594 225 125 164 610 68 442 1213 1025 2626 4825 1961 305 1970 613 45 349 162 17 265 680 809 2161 2740 2020 1312 306 37 13 40 137 346 208 208 122 0 25 26 52 544 580 61 400 757 305 16 130 145 327676 4294901761 131071 0 0 0 0 0 0 0 0 0 0 65535 0 0 0 65535 0 4294901760 4294901760 65536 4294901760 0 0 0 65535 0 4294967295 65535 4294901760 4294967295 65535 0 65536 0 4294901760 1 0 4294901760 4294901760 0 4294901760 65535 65535 0 4294901760 0 0 4294901760 65537 0 65535 65535 4294967295 0 4294901760 4294967295 4294901760 4294901760 0 0 4294967295 65536 65535 4294967295 65535 4294967295 65535 0 1 0 4294836224 65535 0 4294967295 4294836224 65535 4294901760 4294967295 65535 4294901760 4294901760 0 65535 0 0 4294901761 0 65535 1 0 0 65535 1 0 4294901760 0 65535 65535 65535 131071 0 0 0 0 0 0 65536 0 0 0 65536 65535 4294901760 4294901760 65535 0 0 0 4294901761 4294901760 0 0 0 4294901760 0 0 1 4294901760 65535 4294901760 4294967295 0 0 +4711714844 51948308974 52389391761 25027010465 37327136308 5663131971 303650269 65583162 15838335 24131249 55589256 9873513 15351748 7550862 7329051 43135396 121948918 203703621 54327701 17188164 8026672 35595326 82722205 61526159 37550176 10318907 43646151 15113825 27191727 16184164 3858708 8551257 +65534 4294836230 7012483 4260888509 36503253 103690 393689 2390848 2470933 619138 711828 2883922 2079585 222626 6229 15626 12701 9805 4777 2141 637 245 485 164 850 841 544 170 1 10 26 18 18 125 157 241 272 125 298 485 1168 949 5081 4500 538 1460 2132 1754 706 26 113 325 325 225 272 245 306 353 80 265 1381 1360 1305 2306 1105 136 250 808 689 160 20 370 808 405 337 200 25 202 257 146 106 145 153 153 144 410 901 818 241 1 50 90 113 50 9 144 260 164 65 9 82 185 82 52 8 26 20 20 5 10 5 37 196 541 289 1 170 200 52 50 68 25 1 0 0 0 0 4294901760 0 0 1 0 0 0 0 0 65535 0 65535 1 4294967295 0 0 0 65534 0 0 0 4294901760 131071 0 0 4294901760 4294901760 0 65535 0 0 0 0 0 4294967295 4294901760 65535 0 65536 0 0 4294901760 0 0 4294901760 1 65535 65535 0 0 1 4294967295 4294901760 0 65535 0 0 0 4294967295 0 4294901760 0 0 4294901760 131071 4294967295 0 4294967295 0 0 1 4294967295 0 4294901760 65535 0 0 65535 0 4294901760 4294836224 0 65535 4294901760 65536 0 65535 65535 131071 4294901761 4294901760 0 4294901760 4294901760 0 0 0 65535 65535 0 0 4294901760 0 4294901760 4294967295 0 4294967295 131071 65535 1 1 0 4294901761 4294901760 4294901760 65536 1 0 0 0 4294901760 65535 4294901760 4294901760 4294967295 4294967295 65535 65535 0 0 +1241353777 12139827085 10081549048 8833879570 14308666072 1944063668 101436178 63014875 14373077 3407211 6551958 3654837 265591 604374 2326243 4528277 28422415 31578180 14722669 3851178 4350076 13992486 20242710 7999421 7094511 3573494 6609526 6029691 2645998 2174236 1252377 4823878 +65526 589835 10289096 4274717111 4294049098 119074 352180 1945060 1374565 349344 1146344 3477800 2033473 133425 7605 3161 477 34 185 1205 1160 125 565 1706 1130 485 325 242 68 25 50 45 50 40 58 122 113 100 392 2041 3844 3341 1621 629 580 356 1 17 50 100 16 25 106 90 104 98 89 36 293 128 37 197 449 709 533 65 85 81 26 117 277 377 178 29 194 377 145 4 20 1 9 4 8 53 85 157 17 20 34 74 40 8 45 116 109 65 61 58 16 1 29 37 10 5 13 13 10 2 10 13 1 1 45 445 514 234 13 40 74 45 1 18 4294770688 65535 0 0 0 0 1 65536 0 0 0 0 0 0 0 65535 4294901760 0 0 0 65535 4294901760 0 0 0 0 65535 0 0 4294901760 65535 65535 0 1 0 0 65535 0 4294901760 4294967295 0 0 4294967295 65535 0 131070 65535 0 0 4294901760 0 0 131071 0 0 4294901760 0 0 1 4294901760 131071 65536 4294901760 65535 4294901760 0 1 1 0 0 0 0 0 4294967295 4294901760 0 0 4294967295 0 4294901760 65535 0 65535 4294967295 4294901760 0 4294901760 65536 4294901760 4294901760 4294967295 0 65536 4294901760 4294901760 4294967294 0 0 65535 65535 0 4294901760 0 65534 4294901760 65535 0 4294901760 65535 4294901760 0 4294901760 0 0 4294901760 4294967295 131071 65535 4294901760 0 4294901760 0 65535 0 65535 65535 65535 4294967295 0 65536 65535 65536 0 0 0 +1188420328 9265686235 5906008820 11204837640 15439282728 1650704336 19184927 2445049 7813235 6732564 10064406 2742109 637041 540628 1058714 11081953 33689765 7914707 885291 1047270 1717221 3223317 6373933 2659186 3900087 1265970 1099387 1183922 1519546 534873 732169 4831497 +3 196580 9961111 25691139 4214947188 315290 1857785 8102852 5201185 917802 5751496 14945833 7182416 269354 78938 25876 8840 9265 10762 6353 404 1825 3620 4810 4993 1521 441 157 65 20 85 82 52 50 100 100 1 100 436 2329 2501 5525 6068 6637 8080 5825 2386 425 452 596 145 53 85 64 74 221 442 445 281 520 1160 1352 2138 2050 261 1189 1937 2665 2809 5273 8242 3418 281 1049 1424 890 194 538 981 545 241 314 650 1060 853 580 520 208 68 68 37 40 128 137 121 73 34 36 85 50 5 50 58 37 61 50 17 32 250 260 58 10 400 666 130 45 265 181 65 49 36 53 4294836220 65536 0 65535 0 0 0 0 0 0 0 0 0 0 0 4294901760 0 4294967295 4294901761 65535 4294901760 0 4294901760 0 65535 0 65535 0 0 0 65535 65536 4294967295 4294967294 0 0 4294836224 0 4294967295 65535 65535 4294901760 4294967294 0 4294901760 0 0 0 0 65536 4294967295 1 65535 65536 0 65534 65535 4294967295 131071 4294901760 4294901760 0 131071 65535 4294901760 0 0 4294967295 0 65535 4294901760 0 0 0 0 4294901760 0 0 0 0 65535 65536 4294967295 4294901760 0 65535 1 4294901760 1 0 0 4294901760 65535 4294901760 4294901760 65535 4294901761 131071 65535 4294901760 1 65535 0 0 65535 4294901760 0 4294901760 0 1 4294967295 4294901760 0 0 4294901760 65535 65535 65535 65535 4294901760 0 0 4294901760 65535 65535 4294967294 0 0 65535 0 1 0 0 0 65535 +5342412785 38722533089 21689447260 50583681666 60559102399 5420162612 196148329 78721693 35503302 28070904 36470212 4461095 684869 766881 872950 10248336 61127767 81651106 15531901 2191609 4347601 16106647 27094104 69949829 36882222 12746307 13721105 3897734 1912678 1172204 3583264 5906856 +11 1900536 4281073377 64618746 4227072654 235300 2224685 9408970 6811988 312773 7429613 16558274 7217557 326673 29000 23605 5305 2861 2657 2389 2000 522 625 1345 1396 1585 1424 562 450 293 261 85 5 65 97 194 461 689 634 520 1305 2329 970 2393 2930 362 416 2169 2669 1156 200 17 4 58 90 157 269 442 173 10 148 128 1069 2312 1549 533 208 541 1417 3688 3400 1313 388 274 26 137 104 97 325 208 356 442 197 241 577 725 389 50 4 18 17 98 122 153 233 221 205 89 65 25 20 32 17 26 26 5 29 9 73 90 101 29 208 160 25 17 8 0 50 50 4 1 65533 65537 0 0 0 0 0 0 0 0 0 4294901760 0 0 0 0 65535 0 0 65535 4294967295 4294901760 65535 0 0 4294901760 0 65535 4294967295 65535 4294901760 4294901760 0 0 131071 0 0 0 4294967295 0 4294967295 4294901760 0 65536 1 65535 0 131071 0 0 4294836224 65535 1 65535 0 4294901760 65535 4294901760 1 0 0 0 4294967295 4294901760 65535 0 131071 65535 0 4294901760 0 0 4294901760 65535 65535 0 65535 4294901760 0 4294901760 0 0 4294901760 65535 0 65535 4294901760 0 0 4294901760 65537 0 1 65536 65535 4294901760 0 1 0 65535 0 4294967295 4294901760 65535 4294836224 0 0 4294967295 65535 4294901760 1 131071 0 65535 1 0 65535 0 0 65535 0 1 0 65537 4294901760 0 65535 4294901760 4294901760 65536 0 4294901761 65535 0 65535 +5976136105 45889155646 25658879364 59224163000 64547799918 5451541197 122361710 25811723 17488931 8144534 14054226 9800110 3466293 869880 2810809 8112800 20962206 23134912 21780822 3439464 3145343 5567380 19715633 32666498 13694714 4817242 8309353 3003686 3640241 949912 1543539 1762807 +6 4294574082 4283760620 27197173 4288545279 89732 734357 2851069 2329337 37189 1950650 3944009 1515940 66338 325 4736 1097 58 169 389 261 397 689 424 229 145 50 29 113 197 61 10 65 61 74 185 289 136 65 117 338 538 250 0 65 0 25 37 113 29 18 13 53 80 37 2 32 137 125 65 45 17 74 164 122 61 145 193 89 34 65 74 29 52 98 109 29 5 25 65 73 73 45 17 72 85 106 82 1 1 26 26 25 50 18 37 85 40 10 5 8 18 13 13 17 4 1 5 5 5 8 10 13 82 85 90 45 17 20 5 2 1 0 0 0 0 0 0 0 0 0 0 4294901760 0 0 0 0 0 65536 0 65535 4294901760 65535 0 65535 0 0 0 65535 65537 0 0 0 0 65535 4294967295 4294901760 0 4294901760 0 0 65536 4294967295 0 65535 4294901760 0 0 0 4294901760 0 0 0 0 0 65535 65535 4294967295 0 4294901760 65535 65535 0 4294901760 4294901760 65535 65535 4294967295 65536 4294967295 0 0 0 4294901760 65535 0 0 4294967295 4294901760 65535 65536 65535 0 0 0 0 0 0 0 0 0 0 0 0 65536 4294901760 1 131071 0 4294901760 0 4294901760 0 65534 4294901760 65535 1 0 65535 0 0 0 0 4294967295 65535 0 4294901760 65536 0 4294901760 4294901760 0 0 4294901760 0 0 0 0 4294967295 0 0 65535 65535 0 0 0 0 +2008415345 14286546721 8415874099 14778680869 14652446311 1120476353 18617467 2379745 2763813 4639476 2584170 713447 1243275 577744 1812721 1926717 4036292 777832 710735 598220 910191 1268443 1972572 1763893 1248038 1033367 1376615 848022 881743 393300 268536 1244621 +65534 4293525499 4280418556 6159378 41223029 763013 3766277 10812100 6807325 45169 7685665 12335765 3966434 134920 4369 16477 6290 5021 7349 2153 404 850 1556 985 1145 625 202 97 41 277 360 101 61 50 8 125 2 170 148 125 445 1954 1028 388 845 1160 970 377 65 5 9 74 185 125 13 5 49 117 5 41 58 52 261 617 416 281 578 349 104 65 2 178 452 205 13 180 130 52 52 32 164 100 65 45 37 68 200 404 160 26 256 200 45 173 180 34 265 401 29 50 53 2 20 26 10 5 25 5 10 1 17 29 53 81 68 36 4 0 4 5 0 2 65535 0 65535 0 0 0 0 0 0 0 0 65535 0 65535 0 0 0 0 0 0 0 65536 0 4294901760 0 0 0 0 65535 0 65535 65536 65535 0 0 0 0 0 65535 65535 65535 0 0 4294967295 4294967295 0 4294901760 4294967295 65536 0 65535 0 0 65535 0 65535 65536 0 0 65535 4294967295 65535 4294967295 65535 65535 65535 65536 4294901760 0 1 4294901760 0 1 0 4294901760 4294836224 0 4294901760 65536 0 1 4294967295 0 0 65535 4294901760 0 4294967295 0 4294901761 0 0 4294901761 0 0 0 65537 0 0 4294967295 65535 65535 0 4294901761 0 65536 4294901760 4294967295 65535 65536 0 65535 0 0 4294901760 0 0 4294967295 65535 0 0 4294901760 4294901760 65536 1 65537 4294967295 1 0 4294901760 0 4294967295 4294901760 0 0 +11197419827 53747313469 25512828106 52498929124 43039889339 2834476380 79435791 47620341 17388999 10313988 9217184 2076347 2364366 1145721 744706 1928110 12788648 11946948 4706399 1281115 850483 1569666 6871462 4022532 3507189 1789056 2310075 4018858 3929235 1385139 473221 925427 +65532 65553 393570 4249222493 65404918 694234 3210496 8293352 4336036 541172 4840613 6345586 1896570 89906 1865 313 520 1825 2192 521 1465 2482 1737 193 337 657 328 145 212 90 5 2 106 244 106 68 386 461 541 1394 2378 1636 117 200 245 13 225 68 5 17 58 82 90 196 128 52 85 145 58 80 36 5 17 130 125 125 173 305 373 277 194 260 170 26 18 52 29 5 9 10 169 377 400 145 13 8 104 410 625 340 162 185 73 10 29 178 493 545 145 34 41 18 5 2 2 13 25 25 16 34 45 26 40 25 13 1 5 5 1 5 5 4 65534 1 0 0 0 0 0 0 0 0 0 0 65536 1 0 65535 0 0 65536 0 0 0 0 0 65535 4294967295 4294901760 65535 0 4294901760 65536 0 0 4294967295 0 0 0 65535 65536 0 0 4294967295 4294901761 65535 4294967295 4294967295 0 0 4294901760 65535 65535 0 4294901760 4294901760 4294967295 0 65535 65535 4294901761 0 4294901760 0 0 0 4294901760 1 0 65535 4294901760 0 0 65535 0 65535 0 0 0 0 65535 0 65535 65536 65535 4294901760 0 4294901760 0 4294967295 0 4294901760 0 0 65535 0 65535 4294967295 65535 0 0 0 65535 4294901760 65535 0 65535 4294901760 0 0 0 0 4294967295 4294967295 4294901760 0 1 65535 0 0 65535 1 65535 4294901760 65535 65535 1 65535 4294967295 0 4294967295 0 65534 0 0 0 0 +9674828116 40853226428 18099637010 31372683749 21663365062 1421139170 5982313 13535293 12142146 14815410 4509014 2959404 984268 1101467 2330853 10107173 16826751 2462645 903122 1444686 1681280 1016679 2136031 4944177 1982343 1835731 4198975 6033203 4819728 1897796 632957 444856 +65535 4294508542 16122083 4239130641 42401225 551588 2244329 5009346 2307236 255476 1328053 1264293 293665 16021 2720 4100 1924 3250 2285 313 373 776 1360 964 424 500 85 109 20 34 73 53 82 125 145 226 185 49 17 52 65 445 986 490 130 170 106 52 20 4 20 160 125 13 40 53 50 45 36 8 10 13 65 74 1 26 73 85 13 45 52 89 10 17 37 20 13 45 90 109 90 10 17 5 80 130 125 20 10 26 109 250 233 130 89 117 185 89 61 37 10 4 1 10 5 1 5 5 5 1 2 0 5 17 36 45 5 2 13 4 0 1 65537 4294901760 65535 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 65535 0 4294901760 0 0 0 0 4294901760 4294901760 0 65535 0 4294901760 0 0 4294901760 65536 0 65535 65535 0 131071 65535 1 4294901760 131071 0 0 65536 4294901760 0 65535 4294901760 0 0 4294967295 0 4294836224 0 4294901760 0 65536 65535 0 65535 0 4294901760 4294901761 0 0 0 0 4294967295 0 0 0 4294967295 4294967295 0 0 4294901760 65535 65535 0 0 0 0 4294901760 4294901760 4294967295 65535 0 65535 65536 0 65535 1 0 4294967295 65535 65536 4294967295 65536 65535 0 0 0 4294901760 0 4294901760 4294901760 0 4294901760 4294967295 131071 65536 4294901760 0 131071 4294901760 4294901761 4294901760 65535 0 4294967295 0 4294901760 0 0 4294836224 1 0 0 4294901760 0 1 +6959596373 24875102982 9693726808 7941729113 4020383447 237333518 24200982 19862103 5794857 9243781 5800712 1465016 528385 1003545 1982403 849579 5415628 4060057 745476 1182047 740350 462474 765759 938646 657592 1239116 1277482 2327988 3113696 621882 116217 435541 +65494 720850 36110476 4226941751 8846003 1215569 3208168 6990905 3698425 518288 1529320 387493 10880 6418 24082 10585 3698 2132 640 2020 709 117 85 1858 4394 1885 170 738 968 485 128 20 10 34 40 4 116 360 356 194 65 125 685 1170 925 565 730 772 625 445 661 904 296 8 64 72 106 40 52 90 148 106 85 178 149 29 85 314 281 65 5 17 109 181 74 58 37 130 250 117 146 389 641 421 68 16 144 317 281 265 72 205 1184 576 1 117 65 25 68 52 1 53 97 37 10 13 13 5 4 25 10 5 17 13 2 29 80 49 10 5 5 1 131072 0 0 0 0 65535 0 0 0 0 0 0 0 0 0 0 4294901761 0 4294967295 65535 0 0 0 0 0 4294901760 0 0 4294901760 4294901760 4294901760 0 0 4294967295 0 65534 65535 4294967295 65535 1 0 0 0 65536 65535 0 0 1 4294901760 0 0 65535 0 1 65535 0 0 0 0 0 65535 4294901761 0 0 4294901760 4294901761 0 4294901760 65535 4294901760 65536 0 1 4294901760 65535 4294901761 65535 0 1 0 0 0 0 1 4294967295 4294901760 65535 4294901760 0 0 4294901760 0 131071 0 0 65536 4294967295 4294901760 0 0 4294836224 4294901760 1 4294901760 4294967295 65535 65535 1 4294901761 0 65535 65535 0 0 0 0 4294901760 0 65536 65535 65535 65535 65535 0 0 0 65535 0 65535 65535 4294967295 1 4294901760 0 0 +11213498042 35346955631 15380141567 7629367028 958188131 109069287 70701726 15204656 9379514 5251685 26542446 7720732 5143634 390902 797326 3200745 4910921 11500298 9574115 6990819 1209812 1665187 2313311 2639143 1752545 3939236 5994852 7033823 5485706 1106207 385658 665411 +65434 3669960 45350630 4255843924 4251056667 812893 734985 5027353 5593085 533165 9446788 8266473 1359202 26570 44629 18581 19493 41805 41081 13130 5017 5189 1325 2173 5465 1261 1202 569 3994 4537 1665 757 450 1370 1737 1082 1706 3112 2545 2626 1460 50 377 1125 4069 4949 3233 493 337 1769 981 522 1018 433 610 653 361 20 58 405 733 221 80 109 72 117 37 41 218 653 197 61 293 369 226 185 386 1157 1514 601 82 193 482 269 65 221 637 680 170 338 1889 2466 361 445 656 81 650 730 180 305 221 32 100 58 157 205 225 212 185 104 10 65 325 356 97 10 5 121 125 53 36 36 262143 4294901762 65534 0 0 0 0 0 1 0 0 0 0 65535 0 65535 4294901760 0 0 0 4294901760 65536 4294901760 65536 4294901760 0 0 0 4294901760 0 0 4294901761 1 4294967295 4294967295 4294967295 65535 0 0 0 0 4294901760 0 65535 65536 4294901760 65535 0 0 0 65535 1 4294901761 4294901760 4294901760 4294967295 4294967295 4294901761 65535 65535 4294901762 0 131071 4294967295 0 65535 0 65535 4294967295 65535 4294967295 0 4294967295 0 65535 4294901760 65535 4294901761 4294901760 0 4294901760 0 4294901760 0 65536 65535 65535 4294901760 131071 4294901761 65535 4294901760 0 4294967295 65535 4294901760 0 0 0 0 1 4294967295 0 0 65535 65535 65535 4294836224 0 65536 0 0 0 0 0 131071 4294901760 65535 0 4294901760 1 131071 0 4294901760 4294901760 0 65535 65535 65536 4294901760 4294901760 0 0 0 0 +4151908471 25783138501 20963722347 53261047487 24237659227 1059109658 197300754 293161910 110023109 28198999 31384590 15394241 33201497 11114967 18623849 30687969 13726223 41541520 20886489 12677903 7031868 5290971 2124916 4335075 6243582 14430042 6814970 19823481 14622024 5161140 4419940 3457317 +65512 4288217139 35912949 4290053972 4245289535 368021 366932 1610557 4860313 624181 8645045 17971861 4960882 7306 204461 166517 62213 45965 45305 3793 5458 5338 194 2600 5281 7841 12805 11357 1160 2225 5200 3770 2084 1073 442 425 425 3509 7453 6964 4698 3625 3626 1224 1850 1882 1618 970 449 925 1514 778 41 269 769 692 676 1010 1352 640 65 392 725 901 1433 2000 1685 1445 648 5 522 1460 2165 1874 1114 520 101 164 657 464 0 90 98 18 34 1 85 41 128 313 565 360 757 481 2 929 2273 1082 40 205 577 277 162 205 225 80 153 221 17 36 288 457 450 250 18 125 181 8 125 72 65 29 65532 5 65535 0 0 0 0 0 0 0 0 0 0 0 65535 65535 0 65535 4294967295 65536 1 65535 0 65535 65535 0 0 4294901760 0 0 65535 65536 65535 0 0 65535 0 4294901760 65535 0 0 4294901761 65535 0 1 4294967295 0 4294901760 4294967295 0 0 0 0 0 4294967295 1 4294967295 131071 4294901760 4294901760 131071 4294967294 1 4294967295 65536 0 4294901760 4294836224 65534 65535 0 1 65535 0 4294901760 65536 4294901761 1 0 1 4294967294 0 0 1 65535 4294901760 0 0 65535 4294967295 65535 1 4294967294 65535 4294901760 4294967295 4294901760 1 0 1 65535 0 65535 65535 65535 4294901760 0 4294901760 65535 4294901759 4294967295 65536 0 65535 65535 65536 4294967295 65535 0 65535 0 65535 65536 0 1 0 0 4294967295 4294901760 0 4294901760 65535 1 4294901760 0 +1960807838 11698992199 17602050409 67057545556 59626149119 3824877938 957576562 360136631 87659551 25660229 57599760 93924666 39003744 28963610 9529702 61295249 55207502 26039995 15225624 10238539 12016218 11253646 22026464 18887085 24143240 6981991 1364725 6932771 18750506 9211890 4983732 5015579 +15 3211241 2489951 37225468 4218027418 549225 424882 1525961 3319952 1460693 1654745 9028170 6058089 540269 268909 384116 151700 27061 4082 6458 8333 2005 260 72 442 1480 4328 1544 1810 3280 2050 1352 162 533 274 221 169 274 820 725 712 500 1060 2273 1877 1476 788 533 576 178 260 130 53 41 128 82 1 32 1 64 97 34 20 58 136 386 200 65 178 65 296 1268 578 245 514 145 5 193 346 180 50 85 34 0 4 20 1 17 113 122 101 200 212 82 136 290 160 122 170 125 50 20 53 80 65 244 386 265 130 116 274 461 218 80 4 36 202 289 325 305 229 113 4294901753 196609 1 0 0 4294901760 0 4294901760 0 0 4294901760 0 4294901760 65535 0 65535 65535 4294901760 65535 0 0 4294967295 0 65535 4294901760 4294901760 4294901761 65535 65536 1 4294901760 65535 0 4294901760 65535 0 0 0 4294967295 65535 0 0 65535 65536 65535 4294967295 0 0 4294901760 0 0 0 65536 0 0 4294967295 0 4294967295 65536 0 0 1 0 4294967295 4294901760 131071 4294901760 65536 4294901760 65535 4294967295 0 65535 4294901760 4294901760 4294901760 0 65536 4294901760 4294901761 0 0 4294901760 65535 0 65535 4294967295 0 65535 0 65535 1 0 0 65536 4294967295 65535 65535 65535 0 1 0 4294901760 1 4294967295 65537 4294901761 0 4294901760 0 1 0 0 0 65535 1 0 0 4294901760 65536 0 65535 4294901760 0 4294901760 65535 65536 0 0 4294901760 65535 0 65535 0 65535 +2635049436 9872003780 14558086781 24612304388 43029241942 6433347833 1958473825 285370348 53967072 14007214 9471104 25132244 25606158 8860906 3260197 6803138 12251186 21471749 7927187 2064678 870657 794234 2522193 5478489 8601980 3598645 680058 2417220 4073574 2682617 5959231 5245109 +14 851964 4282384192 48300171 4243259798 319258 718925 3248485 3681290 762205 771664 5379664 5530781 1101572 100010 373657 155700 19042 8072 16154 7240 1945 2141 2440 2250 2393 2978 1157 1345 6165 10285 5666 1040 164 29 10 52 290 400 49 68 265 458 776 580 226 13 26 125 333 180 34 5 2 5 2 5 13 13 5 13 13 2 5 25 90 61 13 58 41 289 872 680 52 10 4 85 225 157 80 49 25 29 45 10 17 9 5 53 117 149 64 2 20 17 2 26 25 13 1 1 5 4 10 2 61 82 45 68 377 650 569 197 26 16 17 13 1 10 5 5 1 65535 0 0 0 0 4294901760 0 1 0 0 0 0 0 0 0 0 0 0 0 4294901760 65535 0 65535 4294901760 1 65535 0 65535 65535 4294967295 65535 4294901760 0 0 0 0 0 65535 0 0 65535 65536 65535 4294967295 0 0 4294901760 65535 65535 65537 65536 4294901761 4294901760 4294901760 4294901760 4294901760 4294967295 65535 1 4294967295 0 4294901760 65534 4294901760 0 0 0 0 65535 0 4294967295 0 65535 4294901760 0 65535 0 1 0 0 0 4294901760 0 0 0 0 65535 0 0 0 4294967295 65535 0 65535 0 0 65535 0 0 4294901761 4294901760 0 0 0 4294967295 4294967295 65536 0 4294901761 0 0 65536 0 0 0 0 0 0 65535 4294901760 0 0 65535 4294901760 65535 0 0 1 0 4294901760 4294901760 65535 0 0 0 +2651651221 17150675800 14469965119 13636585471 33273500830 7206414998 1758370222 285864778 84753792 24333498 23961510 20573463 63212579 34844077 1066985 2685442 3977777 5743617 1972195 1136986 95561 165208 559069 3492449 5317475 2066435 597067 1420664 514618 345402 5189385 3344122 +3 4294180861 4277403689 42073516 4284548042 286408 1130560 4858321 4934800 692370 1497098 7700212 7182970 1187282 212309 1012401 584114 110417 40325 68525 27970 2293 3560 4381 578 2132 6472 5553 3946 13933 18625 5330 65 64 169 58 13 148 169 117 68 100 52 292 289 306 145 125 277 333 80 10 26 36 29 45 136 68 49 20 0 10 50 17 1 36 97 125 17 13 82 578 505 65 25 41 122 61 10 17 10 40 80 85 25 36 41 5 1 52 272 221 72 185 221 130 212 202 50 20 13 20 34 34 68 36 20 34 85 26 41 130 73 34 9 5 10 5 0 5 4 1 4294967294 65536 4294901760 0 0 0 0 0 0 65535 0 0 0 0 0 0 0 0 0 0 4294901760 0 4294967295 0 0 65536 0 4294967295 4294901760 0 0 0 65535 1 1 65535 65535 65536 0 4294967295 1 0 0 1 0 65535 0 0 4294901760 65535 0 4294967295 65535 0 1 65535 0 4294901760 4294901760 4294901761 65535 4294901760 4294967295 0 4294901760 4294901760 4294967295 0 4294967295 0 0 65536 4294901760 0 0 0 65535 0 65535 0 4294901760 65535 0 65535 4294901760 0 65536 4294901760 1 4294967295 0 65536 4294901760 0 0 0 0 4294901760 0 0 4294967295 65535 4294967295 0 4294967295 65535 4294967295 4294901760 0 4294901759 65536 4294901760 65536 65535 0 4294967295 4294901760 65535 65535 0 0 0 0 4294967295 0 4294901760 4294901760 0 0 0 0 0 0 0 1 +3531159376 25147647472 18776052491 20490195192 44600945644 9393331486 5228126307 1236145833 349548411 52192002 24649124 51827336 125620612 41485682 1035161 1495022 1496644 3327860 3062154 1043210 1024827 537686 645819 2381446 3831722 883720 976694 2360246 3866652 1213036 1380602 799619 +0 4293853181 4286578976 4281662523 56361695 372645 668176 3500837 3542201 534784 961973 4565945 4059304 755933 443816 1973185 1294514 164525 50377 95485 32020 9524 25385 34696 10957 1352 16160 16613 2441 18581 35122 14544 2173 2050 2080 605 200 445 404 234 65 5 97 113 90 113 125 130 197 181 20 5 80 121 50 10 53 122 61 13 74 29 9 68 45 4 2 18 64 18 82 845 1184 442 208 85 8 52 37 1 13 74 73 26 10 26 32 5 2 97 340 289 53 122 493 577 185 17 37 13 1 5 9 8 0 1 13 0 9 16 13 25 29 10 0 2 13 10 4 0 0 0 4294901760 65536 65536 0 0 0 0 0 0 0 0 0 0 0 0 0 4294901760 4294901760 0 4294901760 0 4294901760 0 4294901760 65535 65535 0 0 0 65536 65536 65535 65535 0 0 4294967295 0 0 0 4294901760 4294901760 1 65535 4294901760 0 4294901760 0 65535 0 0 65536 1 4294901760 65536 4294901761 65535 0 65535 4294967295 131071 0 0 0 0 0 4294901760 0 4294967295 4294901760 65535 0 4294967295 65536 1 4294901760 4294967295 4294901760 0 4294901760 4294967295 0 0 0 0 0 0 0 0 65535 0 65535 4294901760 65536 131071 4294901760 65535 4294901760 4294901760 65535 4294901760 4294901760 4294901760 4294901760 0 0 4294901760 0 4294901760 0 131071 65535 4294901760 65535 0 4294901760 0 65535 0 131071 4294967295 65535 0 4294901761 65535 0 0 1 4294901760 65535 0 4294967295 4294901760 65536 4294901760 0 +2689356138 17854390463 13572125613 12639287821 25812165685 7838896784 10630986023 2307766001 466730835 212533312 150069330 116881346 204978663 105309915 13685491 4161676 1224780 1492001 2054219 1033583 989467 855353 566092 3107627 8700039 893998 699239 2956926 5916558 440566 277126 282354 +65528 4294901781 8651036 4234149536 73858708 412177 525565 2880776 2668373 481050 618120 2714909 1980250 352125 946530 3185650 1997333 175124 37636 98865 40010 12500 62404 83528 16612 6586 36793 32345 9684 32144 39506 10546 170 305 850 1370 802 400 776 292 1 313 490 289 178 45 0 45 136 306 274 65 85 256 185 13 20 40 40 45 2 68 137 52 2 4 5 36 74 122 170 810 1184 425 64 97 269 106 8 2 5 13 18 1 5 10 16 17 17 212 530 360 197 170 100 218 306 85 104 50 2 26 53 5 5 10 1 10 8 5 13 5 0 10 16 5 1 1 1 1 1 2 0 0 1 0 0 0 0 0 0 0 0 0 0 65535 0 0 0 0 0 65535 4294901760 65535 4294901760 0 65536 0 1 65535 0 65535 0 4294901760 0 65536 0 0 0 4294901760 4294967295 0 4294901760 0 4294901761 0 0 4294901759 0 65536 0 65536 0 65535 65535 65535 4294901761 0 4294901760 65535 4294967295 4294967295 65535 4294967295 4294967295 65536 4294901760 4294967295 0 4294901760 65535 65535 4294901760 0 65535 0 0 0 65536 4294901760 0 131071 0 65535 4294901760 4294901760 0 4294967295 4294901760 0 0 0 4294901760 0 65535 0 0 0 65536 0 4294901760 4294901760 131071 0 4294901760 65535 1 4294967295 65535 65535 1 1 0 0 65535 0 65535 0 4294967295 65535 65536 4294901760 0 0 4294967295 0 0 65535 65536 0 65535 4294967295 65535 4294901760 4294901760 0 0 +2468034019 14392249973 10510746232 7950834690 13720016445 8066648095 17049012169 3138399205 492402616 453731910 329673648 258917680 294714954 86804938 10432947 6893265 3818678 2378920 1963173 2587595 1280722 793862 711593 3720123 8782004 1298230 214480 4398132 4553477 1090233 217234 161015 +65526 720907 19070980 4247978602 13826924 423604 284960 2270824 2363085 537197 715370 2713364 2085280 286417 902788 3725812 2728449 237569 49860 113210 49754 2809 74485 144484 64324 5525 55204 73637 15122 22984 36805 12724 490 34 409 481 26 153 853 1069 325 221 765 293 5 5 65 136 18 50 25 29 148 200 73 45 100 97 26 5 10 53 34 13 13 17 45 45 18 41 148 369 593 628 529 452 338 293 265 232 212 106 40 4 9 13 1 25 97 65 64 394 256 72 362 1129 1000 712 613 464 320 394 449 205 61 13 4 1 10 5 9 1 4 13 4 1 4 2 1 0 1 2 131072 4294901760 0 0 1 0 0 0 0 0 0 0 0 4294901760 0 0 0 0 0 65535 0 0 0 0 65535 0 4294901760 0 0 0 65535 0 0 0 65534 0 1 65535 0 65535 0 0 0 1 0 4294967295 0 0 4294901760 4294967295 65536 0 4294901760 0 0 4294901760 1 65536 0 4294901760 4294967295 4294901760 0 0 4294901760 65535 65535 4294901760 0 0 65536 4294967295 0 65536 4294901760 0 4294901761 0 0 65535 4294901761 0 65535 0 0 0 65535 4294901760 65536 65535 0 0 4294901760 4294901760 65535 0 65535 0 0 4294967295 4294901760 4294967295 0 65536 4294901760 4294967295 1 0 4294967295 65536 0 4294901760 0 65537 4294901760 0 4294901760 0 0 65537 4294901760 0 65535 4294901760 0 0 4294901760 65535 4294901760 0 65535 4294901760 0 0 0 +1930925128 11394149803 9496996200 8425927697 14003531534 8302245381 20903880642 4255067778 568586568 598848990 674150846 462004378 298733708 89528547 3427480 6791855 6494664 2173093 840611 1352961 1410280 560724 461019 2024137 9281243 5551951 1041627 2738005 14367963 9542068 804478 117035 +0 1376219 8847141 12911434 4235001217 333625 143417 1550728 2123089 583677 175825 2019802 2506825 529730 675316 3893665 3380404 457540 60497 151633 61074 778 49097 139880 91834 38404 119425 136721 26640 16154 18730 3368 593 325 290 317 52 34 637 778 34 325 661 277 34 16 65 136 122 121 125 29 2 8 58 34 1 10 20 20 36 65 25 13 5 9 1 16 52 50 36 25 117 229 104 242 666 820 466 205 122 20 5 8 9 5 9 2 10 97 65 125 709 1233 1025 101 457 436 305 218 146 232 130 29 8 41 5 10 10 9 1 4 16 17 13 9 5 0 0 0 0 0 0 0 0 0 0 0 0 4294901760 0 0 0 0 0 0 0 0 0 0 65536 0 4294967295 4294901760 65535 0 0 0 0 65535 65535 4294967295 0 65535 1 65535 65536 0 4294901760 0 0 0 65535 4294901761 0 65535 0 0 0 0 65535 0 65535 65535 0 0 0 4294967295 1 65535 65535 65535 4294901760 65535 0 4294901760 0 0 65535 65535 4294901760 65535 65535 65535 4294901760 65535 0 0 0 0 65535 65535 4294901760 4294901760 65536 0 0 1 4294967295 4294901760 65535 0 0 131071 0 65535 131071 0 65535 4294901760 0 0 1 65535 4294967295 0 4294901760 0 4294967295 65535 0 4294967295 0 4294901760 0 65536 4294967295 4294901760 0 0 0 0 0 0 0 0 4294901760 0 65535 0 0 4294901760 4294901760 4294901760 0 4294901760 0 +1328525791 8226140593 8573256374 5116463892 14132161120 8716741414 23295290048 5806712331 732551143 505886447 899225022 950209192 287283788 38740489 2929819 4581562 5147668 2147565 1584818 755456 365294 487850 304956 682218 4240222 7671975 500181 4526657 12917465 4330371 407572 219544 +65527 655349 4284022623 54722635 4235592314 356921 32105 593137 1906497 1341853 165629 1361465 2970890 1586212 106277 2884690 4146980 1210210 78481 148256 117956 39833 49076 202834 240098 90437 132065 205000 59828 1172 13864 3341 117 0 145 565 890 884 1042 689 340 68 290 468 208 181 260 109 25 80 145 148 80 40 34 9 1 2 5 1 1 50 45 8 20 13 29 29 5 2 17 65 73 10 97 545 425 125 170 325 245 90 52 53 20 20 34 34 113 292 661 801 260 1220 4082 3842 1145 2845 6178 4525 1568 389 106 277 421 160 8 0 1 0 2 2 10 13 4 5 4 4 0 0 0 0 65535 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 65536 0 65536 0 0 4294901760 0 0 0 0 0 0 65535 0 4294901760 0 65535 0 131071 4294901760 65535 4294836224 65535 0 0 0 1 4294901760 0 0 65535 4294967295 65535 65535 4294967295 0 4294901760 4294901760 4294967295 65535 65535 4294836224 65535 131071 4294901761 4294967295 4294967295 0 0 4294901760 4294901760 65534 0 65535 4294901760 4294967295 65535 65535 4294901761 4294901760 0 4294901760 65535 0 4294901760 0 0 4294901760 4294967295 0 0 65535 4294901760 65535 0 0 0 0 0 65536 0 65535 4294967295 65535 4294967295 0 0 65536 4294901760 0 131071 4294901760 1 0 0 0 4294901761 4294901760 4294901760 0 0 65535 4294901760 0 4294901760 65535 4294967295 4294901760 65535 0 0 4294901760 1 0 0 1 65535 4294967295 1 +1133385167 4253111713 9489616933 5090645284 15469399439 9376440319 22396737262 9193806184 977883467 798546547 1770828072 1354863630 346400303 29244433 5390761 10528631 4395479 3634108 1677060 1456693 261229 230404 423529 413822 2954817 5040869 1500295 10466684 65843411 40471437 1686611 134288 +4 4292608005 4284481593 17825112 25691061 424781 141245 740114 1224513 259186 378890 1085221 982801 75914 1039540 3198645 2290333 265445 22930 51353 54401 24445 18720 142874 142245 15725 65954 128885 36221 7450 19017 4456 41 45 164 797 530 557 346 520 360 485 34 85 25 424 41 289 130 13 5 61 58 32 5 13 20 25 20 10 13 17 130 101 9 37 45 2 0 4 1 17 5 2 18 196 100 1 2 73 130 41 25 13 17 17 40 61 4 20 58 0 725 1745 500 205 1033 2501 3313 4624 2466 305 157 242 397 101 5 10 29 26 1 1 1 8 20 5 1 0 1 5 4 2 0 0 0 0 0 0 0 4294901760 0 0 0 0 0 65536 0 0 0 0 1 4294967295 4294901760 0 0 0 0 4294901760 4294967295 4294967295 4294901760 0 65536 4294901760 4294967295 0 0 65537 4294901760 0 131071 65536 65535 0 0 0 65535 65535 4294901760 65536 0 65535 65535 4294967295 0 4294967295 65535 131071 4294901760 0 65535 0 0 0 65535 4294901760 65535 0 0 0 65535 0 4294901760 65536 1 4294901760 0 4294901760 0 0 0 65535 0 4294901760 1 0 0 4294901760 65535 4294901760 0 4294901760 4294901760 131071 0 4294901760 4294901760 65535 0 65535 65535 65535 0 1 0 65535 0 0 0 131071 4294901760 65535 0 4294901760 0 0 65535 4294967295 4294901760 0 4294901760 0 0 0 0 1 4294901760 1 4294967295 4294901760 65535 4294967295 1 4294901760 65535 65535 0 +1593375707 4272592169 4705338930 3811646116 6072988369 6958443426 18116419114 3684648771 391777920 466306181 989195668 730393632 271793383 39364748 5233658 5991419 4235205 2102618 1997891 584958 312464 514036 938636 195610 781691 1271603 753909 4451140 33391608 36348065 1679905 176958 +8 458775 786818 4229233948 106954487 1099658 52301 1517410 4093570 2666970 413621 262665 868036 684109 503197 2658553 1912914 251681 44273 14240 62957 253530 604196 933433 624905 155636 115229 215933 179218 124029 54260 3546 3370 4673 3109 3362 3497 3202 4981 7120 4628 4573 7298 9704 11057 9850 5945 3209 1609 610 394 680 680 313 512 725 522 205 37 32 40 180 305 365 260 98 145 145 25 5 50 116 173 442 653 793 884 954 562 85 41 80 40 80 16 50 130 200 97 25 41 261 905 1853 821 73 313 821 2210 5573 7940 5330 2897 1885 178 29 5 32 122 116 29 34 52 26 5 2 2 1 8 9 2 0 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 0 4294967295 65535 65535 4294901760 65535 65535 4294901760 4294901760 0 4294967295 4294901761 65535 65535 4294901760 65535 1 0 0 65536 4294901760 65535 1 0 4294901760 1 4294901760 65535 0 65535 4294901760 0 4294967295 65535 65535 65535 65535 4294901760 0 4294967295 65535 65535 0 0 1 65537 65535 65535 4294901760 0 0 0 65535 0 65535 65535 65536 0 65536 1 65535 0 131071 0 0 0 65536 4294901760 0 4294901760 4294967295 131071 0 0 4294901760 0 4294901760 0 4294901760 131071 0 4294967295 65536 4294901760 0 4294901760 4294901760 65535 65535 4294901760 1 4294901760 4294967295 65536 1 65535 4294901760 0 4294901760 4294901760 0 0 65536 4294901760 0 65535 0 0 0 65534 65536 0 0 0 +3381297269 10008656032 19923779456 6206647179 4466848589 6500315031 14744835027 3219672750 638796493 4758309065 5339256144 1665197727 1231201914 111731960 41839582 58386066 82285111 124761447 41117766 8919613 7426713 2276132 4053240 1527105 9156485 9899293 1564027 6679465 30990656 86485180 5837215 546026 +65513 2555939 28311748 4204135260 8452073 1679849 65309 1840410 4048234 748890 542880 1936658 2094586 1092218 8114 2195629 2717794 408041 32825 207056 172570 2753 76085 459252 693125 147797 42850 64010 106146 103681 79570 45585 11636 962 2941 2018 2309 3130 2120 1224 1009 2257 1609 650 1413 1765 260 169 490 65 772 85 685 746 197 569 365 565 485 20 292 64 117 106 193 362 425 53 82 170 52 5 20 40 1 50 82 157 2165 2312 468 173 261 205 221 233 113 40 26 34 50 194 394 145 793 3890 6562 3466 4100 5185 205 2249 3709 3170 1549 281 338 160 4 25 82 113 130 74 170 85 26 16 9 2 10 10 4294901763 65536 65534 1 0 0 0 0 0 1 0 65535 0 0 0 0 0 0 4294901760 4294901760 4294901760 65535 0 0 1 4294901760 0 4294967295 0 0 4294901760 0 0 4294901760 0 0 65535 0 65535 65537 0 65534 4294901760 65535 0 1 65535 4294967295 4294901760 0 65535 0 0 4294901760 4294901760 65536 0 65536 0 1 4294901760 0 4294901760 4294901761 65536 4294901761 65535 4294901761 0 4294967295 65535 0 65535 0 65535 65536 0 4294901760 0 65535 1 4294967295 65535 0 1 65535 65535 65536 0 0 4294901760 0 4294901760 65535 65535 0 4294901760 65535 0 65535 65535 0 0 0 0 4294901760 65536 0 0 0 4294901760 0 65535 0 65535 0 4294901760 65535 4294967295 4294901760 65536 4294967295 4294967295 65535 65535 65535 0 4294901760 4294901761 4294901760 65535 4294967295 4294901760 0 65535 +5130690995 11182565016 14999878302 6703599030 13175402422 6478155605 15251450915 4837860078 1232529311 1375696577 4224259282 784177292 980731963 290238278 29442347 26859042 20425443 16041636 6029955 7143348 7646081 4070675 3652879 2583137 962290 19163541 6146589 3022459 65784087 69675267 12493461 2109656 +65499 4293787613 24051515 4289856486 4240571669 327130 100009 480589 1977665 2021962 283450 640781 1433066 524594 781133 1841674 1919061 483866 9949 82408 114869 99944 32537 88273 303538 168181 10837 13141 40034 46496 25832 14985 11906 637 4010 7208 8761 10580 6521 2466 6850 25106 59930 95805 114841 118277 106033 84825 69397 60125 57616 53209 42201 30397 22741 16425 8497 4705 4244 3677 2098 829 401 349 160 193 234 338 346 145 9 4 5 73 233 292 533 745 640 1588 2000 884 530 845 745 365 205 116 9 13 145 400 205 205 104 452 1189 1268 3370 842 265 1009 509 265 578 769 554 244 101 82 37 25 37 85 100 17 25 45 205 514 810 800 4293722123 458741 6 0 0 0 0 0 0 0 0 4294901760 0 0 4294967295 4294901760 4294967295 0 4294901760 65535 65536 0 4294901761 65535 65535 1 0 0 0 4294901760 65535 131071 4294901760 131071 0 65535 4294901760 4294967295 0 65537 0 4294901760 65536 65535 1 0 65536 65535 0 0 65536 4294901760 0 0 4294901760 0 0 4294901761 65535 0 4294967295 0 1 65535 0 65535 0 65535 4294901761 65535 0 0 4294901760 1 0 0 0 0 0 0 0 4294901760 0 0 0 4294901760 4294901760 65535 4294901760 65535 4294901761 65535 65535 65535 4294901760 65535 0 4294901760 4294967295 65535 0 4294901760 4294967295 4294901760 65535 65535 1 0 0 65535 4294901760 0 65535 4294901760 0 0 4294901760 4294901760 65535 65536 0 65534 4294967295 1 65535 4294901760 131071 4294901760 4294967295 4294901760 4294901761 131071 0 4294901758 1 +1206280417 4026777121 11415006775 5352602812 7135269319 6487994549 12651556535 3944350238 768922737 695447059 1851156050 420238201 376422141 129610728 72826935 90101262 452451467 1435602187 1149121230 706095212 250041616 49622678 6788180 3636234 3492206 19866375 15385766 3844897 19095518 22943392 7301731 3381152 +65500 4290969489 24444660 20710011 4251910392 108986 499417 2995009 3522469 434513 2682529 3847545 2075994 988688 424477 994625 2333250 1537913 378788 42565 43380 24040 94148 471978 718472 362893 117506 35209 87737 78376 19301 8266 19242 30276 9733 2061 8585 509 12130 16393 28873 48626 32845 16677 89092 94129 21312 18513 6305 1157 7796 15056 25012 1189 6586 8605 3944 1465 3505 730 673 1108 370 953 776 50 8 4 137 356 436 288 65 890 1625 1521 1157 269 169 1690 2009 725 4861 3362 3546 2169 628 185 1417 808 932 1889 745 1636 5540 7025 4597 4258 3221 485 1637 1450 845 509 325 1576 1460 370 221 229 82 65 34 109 170 82 234 265 130 369 37 578 4293459982 4294639605 131068 65536 0 65535 0 4294836224 0 0 0 4294901760 0 4294901760 0 4294901761 4294967295 0 4294901760 65535 4294901760 65537 4294967295 1 0 0 0 4294901760 4294901759 65535 65535 0 4294901760 4294901760 0 0 0 65535 1 65536 4294967295 0 4294901761 4294901760 65535 0 65535 65535 4294901760 0 4294901760 131071 65535 4294967295 0 4294901760 0 1 0 65534 4294901760 65535 4294901760 0 4294901760 65537 0 65535 4294901760 4294901761 4294967295 0 1 0 0 65537 65535 65535 65535 65535 65536 65535 65536 65535 65535 65535 1 4294967295 0 1 65535 4294901760 4294901761 4294967295 1 65535 0 0 65535 0 1 4294967294 65535 4294901761 65535 65535 1 4294901760 4294901760 65535 4294967295 4294836225 0 4294967295 0 0 65536 1 4294901760 1 4294967295 0 4294967295 0 4294967295 0 0 0 65535 4294967295 4294901760 65535 65535 0 0 +1507933073 15665494525 13174712205 18167877044 17486286535 6345579788 11717249627 8891291597 731261308 1412151862 4967685464 1233210514 605941997 205483119 114992118 133464785 443761197 795697922 241067272 179661236 90409633 23966124 9456428 3758017 16699850 26363574 55250004 28897717 91735802 39373499 13867324 4389478 +65492 524294 7405469 4289396962 5832506 146408 316577 1519673 3155060 1145761 382100 371986 11026 41330 36181 258661 263825 109125 22922 28397 20885 8321 3917 79346 83668 31865 88650 216817 205085 44585 9797 3985 685 533 2117 6948 6497 4481 5672 6632 8528 12365 14800 30344 69745 72189 37325 25913 28837 24373 17069 14785 12569 9365 4880 2197 666 617 466 2084 4365 2421 1125 2960 2473 884 493 466 557 405 490 585 1028 325 809 3904 3400 1601 2074 1744 353 3796 3474 1616 3728 8541 9605 7225 4481 2285 1105 922 1352 1762 964 170 128 361 1225 2417 1685 585 360 1024 1125 481 325 85 72 200 208 148 218 164 52 116 109 185 196 53 29 97 4294508543 131072 65534 1 0 0 65535 0 0 0 65536 1 65534 0 0 4294901760 65535 0 4294901760 0 65535 4294901761 4294901760 65534 0 4294967295 131071 0 4294901760 1 1 0 65535 65535 131071 1 4294967295 0 4294901761 0 0 0 1 4294967295 4294967295 1 4294836224 65536 65535 0 0 0 131071 0 65535 65536 4294901760 65536 4294901760 4294901760 65535 4294901759 4294967295 4294901760 65535 0 4294967295 0 0 1 65535 0 0 4294901760 4294901760 65535 4294901760 0 0 131071 4294901761 65536 4294901760 131071 0 65535 65535 65536 4294901760 65535 0 4294901762 4294901760 0 0 0 4294967295 65535 4294901760 4294901761 4294967295 0 4294967295 0 0 4294901760 131071 1 0 0 4294901760 65535 4294901760 4294901760 65535 4294967295 131071 4294836225 0 0 0 0 0 0 4294901760 65536 1 0 0 1 0 4294901761 131071 0 0 +1184897717 9489519590 13161607443 3940896631 964142326 475393282 1721182582 761078663 191678384 213333567 667740806 1418962511 860135688 30847144 52034993 75139011 187985265 693983281 439025812 215563048 50720960 35836293 32903588 10828782 26542270 46513943 99317166 86242895 22280320 25839724 8423881 3936547 +65516 3342340 2293384 21234775 4241685204 1806845 1452997 2686357 4768285 6582992 3394564 372717 1386853 2321936 1630850 644629 104050 38749 142532 242280 492388 591172 544769 213524 54965 45737 99796 306512 219080 100805 39785 13770 6025 4645 2720 6370 11520 4513 1010 4057 2925 27290 27378 78885 89074 38216 1885 10138 18945 6485 1586 6389 13520 13669 12114 4001 1381 4432 6961 4141 1250 900 2249 6100 5525 2845 1768 1476 1277 2125 2669 1924 2645 3826 3461 3898 592 1989 6929 9536 8629 4013 677 3609 6280 2548 1960 2210 3825 776 369 53 1025 3330 2389 2305 2304 613 914 202 1885 961 1433 1856 1450 576 305 477 530 585 1241 1125 829 493 148 106 148 333 145 2 9 50 4293984261 589810 9 131069 4294901760 65537 0 0 0 0 4294901760 131071 1 0 0 0 4294901760 0 1 4294901760 65535 4294967295 1 4294901760 0 4294901761 0 0 0 1 4294901760 4294967295 0 4294967295 131071 4294901761 0 0 65536 1 4294836224 65535 65536 0 65535 0 131071 3 4294967295 1 4294967295 131071 65537 4294901762 0 4294901759 65536 4294901760 1 65535 2 4294836224 4294901760 0 4294901760 4294901760 65535 4294967294 1 4294901759 131071 4294901760 0 4294901760 65536 0 4294901761 4294967294 131071 131071 4294901760 4294967294 2 65535 65536 0 4294901760 65535 131071 4294967295 65535 131072 4294901761 4294967295 4294901760 0 4294967293 1 4294967295 65535 65537 4294967295 0 65535 65536 4294901760 65536 4294836225 4294901759 1 0 4294967295 4294901760 4294901761 0 4294901762 4294967294 0 4294836224 65535 65535 1 4294901760 131071 4294836225 4294967295 65535 2 4294967295 131070 4294901761 131071 1 65534 0 +8799836771 17466212413 32665440123 24403786688 8503285165 13175755526 4040900839 916525418 3287256116 4440195764 1148126538 1808176080 1277219974 131040940 71721774 54670601 331957739 727965377 167476152 140134932 106561361 57215247 63617300 37784561 58234141 108326095 85013850 40846623 42192265 30598885 21178158 11369907 +40 5373884 4264558330 117047482 4168549631 1593220 58445 748754 1723690 5346905 5560789 697320 493697 1324305 1012930 1914980 2745000 1497385 354554 149620 162461 170549 590785 579092 239917 78165 4073 53125 55477 36818 19409 2929 2180 4013 1845 10034 7137 12337 16946 24005 99850 144322 72810 11618 7496 1690 15668 59405 47581 17812 8665 11936 10192 585 2533 1156 265 932 130 386 1424 1681 2516 3034 1044 625 928 466 1681 2218 2932 2650 1741 2285 4330 5537 2225 45 170 1044 909 1930 596 2509 9333 9949 7085 3485 4932 2353 1517 7010 7376 2837 1690 265 5386 10170 7345 2088 505 1090 857 373 306 941 3578 3114 2690 3874 1220 18 410 45 449 689 445 13 512 549 317 250 589837 4294180862 131067 131072 0 1 65535 0 4294901760 65534 65537 65535 4294967295 0 4294967295 0 0 65536 4294901760 0 1 131071 0 0 4294901761 65534 131071 4294901760 131071 1 4294967294 65536 4294901760 4294967295 131071 4294901760 65535 65537 4294901759 65535 65537 0 131071 4294901760 1 65534 4294967294 65536 1 0 0 0 2 4294901760 65535 1 0 65535 4294967295 0 1 0 4294901761 4294901760 4294901760 65535 4294901760 65535 65535 65536 4294901760 65534 4294901761 4294967295 131071 65537 4294967295 0 4294967294 65535 65536 0 131071 1 4294967293 131072 4294901761 65534 0 65535 0 65535 1 65535 1 4294901761 4294836222 65536 65537 4294967295 4294901760 65536 4294836225 65535 1 4294967294 4294901759 0 65535 4294967295 1 131070 131073 4294901760 65537 65535 0 1 4294901759 65535 0 4294901761 65535 4294901761 4294967295 65536 4294901760 4294967295 131071 4294836225 65535 131071 131071 4294901761 0 +4857807625 4684720568 19617056396 31803867236 4955030952 9010553928 16402536385 9283905906 1613912232 3863957616 2721204994 414517697 373669558 54762803 77511915 285341973 1127283203 272477508 474958707 147473374 22204190 18946427 28675182 32227063 58805087 29323757 104031048 108459580 104433090 57221973 53538137 15311093 +9 4294312036 4249616321 83950274 2492571 1822981 813658 3003965 3875560 86696 2862665 5874997 3850120 1011173 804322 3193490 3436096 973589 232101 39429 6445 7362 91225 151720 98545 132617 68437 18596 4640 29069 43697 19557 7844 4825 2056 1576 3185 14436 60317 93428 41053 3236 4373 200 6250 11890 6481 12290 5044 2756 7709 2624 626 3701 709 3826 3205 1818 1972 485 785 765 29 277 3434 3961 1421 1490 2664 3497 233 1845 653 1 1332 6698 5914 3181 1989 233 5525 14885 14240 3973 260 538 1325 2029 3600 5122 3508 4589 1445 421 3538 9418 9028 2042 3844 4100 3137 2249 1396 733 320 1252 962 2977 4985 530 629 1192 1378 1145 29 592 580 512 980 580 260 125 196614 4294639617 0 4294901758 1 0 0 65535 0 0 65535 1 4294901760 65536 4294901760 4294901760 65536 1 4294901760 196607 0 4294901761 1 65534 65538 4294901760 4294967295 65537 4294901760 131071 65537 4294967295 65535 1 0 65535 0 1 4294836224 131071 4294901760 0 65535 131071 65535 4294901762 131070 131071 4294901760 0 65535 1 131070 65538 0 4294836224 131071 4294967295 4294967295 4294901760 4294967295 65535 65534 1 131070 0 65535 0 0 65536 4294967295 1 0 4294901760 1 131071 4294901760 0 65537 0 4294901761 4294901760 1 4294901757 65538 4294836224 4294901759 65535 65535 65536 0 65535 1 4294967295 0 131072 0 1 131072 4294901759 65536 4294836225 4294901760 65535 4294967295 131071 4294901760 4294901760 1 65536 4294770688 131071 0 0 4294901760 131070 4294967295 65536 1 65535 1 131071 65536 1 4294967295 131072 4294901760 65536 4294901760 4294770688 131072 4294836224 65535 4294901761 65533 +7330480156 16609548476 13374449909 21888903641 28472566201 10681371317 21811217525 7934125741 414589425 651486731 1165589522 505022475 282410707 159844879 43103177 542021657 334135918 91548305 104735383 48122773 41171550 17496248 32535061 39651417 42812099 102651127 111705587 71170133 113984641 62087517 47392465 21397867 +65525 4291625012 4291231829 4280155631 44957982 92525 148185 1396730 2927353 1371816 86101 139549 322813 207810 234349 634517 501280 70781 2600 3920 5284 4114 21437 37234 22877 8125 2693 1261 404 6689 9778 2276 260 25 377 260 325 180 4481 3460 164 1465 577 58 181 580 289 833 2664 5249 5393 2420 2665 4420 3637 1448 162 320 1061 340 173 338 485 1042 1636 1962 1972 1781 1069 533 1028 1025 52 676 2180 1961 746 514 442 328 113 592 1765 1877 1714 1937 1936 1000 818 1544 1802 1220 481 288 68 468 1224 1332 298 37 34 10 145 136 1066 1552 820 298 169 137 416 400 377 520 626 433 137 5 32 89 37 20 131068 131075 4294770688 65536 0 131071 0 0 4294901761 65535 0 0 0 0 65535 4294901760 65535 0 65535 1 65535 0 131071 4294901760 4294967295 0 0 4294901760 65535 0 65535 65536 4294901760 65535 4294901761 65535 0 0 0 4294901760 131071 65536 0 131071 4294901760 4294901760 4294967295 1 0 65535 0 65536 1 4294967295 0 4294967295 0 4294901760 65536 1 4294901759 131072 1 4294901760 65535 65536 4294901760 4294967295 65537 4294901760 65536 0 4294967295 65535 131071 0 4294967295 4294967295 65536 1 4294901761 131071 4294901761 4294901760 0 65535 1 65535 4294901760 4294901760 4294901760 4294967295 0 65535 131071 4294901761 65535 0 0 4294901760 4294901760 4294967295 0 4294967294 4294901760 0 4294901760 0 4294901760 4294901760 65535 4294901760 4294967295 65536 65536 4294901760 4294901760 65537 65535 65536 4294967295 65535 4294901760 65536 4294967295 0 4294901760 4294901760 65535 65535 0 4294901760 65536 4294836224 65535 +625702055 8504774921 12980538424 2689966485 1703751633 2102469141 3774778781 832820790 37537968 165873540 206806770 27484038 57626150 21396616 3149708 26221776 15662539 4994582 37202246 56286514 27550069 7752774 23610114 22600491 21012791 15007870 32234541 29274214 15314955 10375862 13971888 9079596 +65508 1245176 20906215 4219142327 62978852 1883818 995609 1183417 6152616 7215137 2369666 872420 334216 85000 448208 1706717 1267666 147373 84473 143426 167284 188557 164914 67997 16133 5821 15730 11458 349 20180 28125 4625 1700 4250 2440 2690 6109 8009 9986 19300 17946 10852 5410 4880 7762 8450 3844 640 500 1258 3258 1709 500 9901 12665 4373 2161 2825 5617 5777 3250 2125 829 145 850 2621 5017 4005 365 1657 4850 6625 2210 1620 11560 8245 4948 5256 1825 800 325 58 794 1845 2465 2788 2554 1858 313 900 5290 5578 2141 881 445 400 1249 2106 801 416 2957 5960 5965 3728 2977 1129 125 100 250 653 1780 1913 1105 976 484 50 89 1229 1898 1044 205 8 4294836224 196607 65534 2 0 1 0 0 0 0 65536 0 131071 1 4294901760 65535 1 4294836224 65535 4294967295 4294901761 131070 0 4294836224 1 65537 65535 65534 131073 4294901761 1 4294967295 0 65538 4294901759 65535 1 4294901759 4294901760 4294901759 65536 4294901760 131071 0 4294967294 0 65537 4294901759 65536 2 4294967294 65536 4294901761 65535 65535 4294967295 4294836226 4294967294 131071 0 4294836224 131072 4294836226 65534 65536 4294901760 131071 4294901761 4294967295 131071 4294967295 0 1 4294901760 4294901760 4294967295 65536 131071 4294901761 4294967295 1 4294901761 4294901760 65535 65536 4294901760 4294901760 4294967295 65535 4294836224 65535 131071 4294901760 65535 65535 65535 4294967295 65537 4294901761 65535 1 65535 1 4294901761 4294967294 65537 4294967295 131071 0 4294901760 65535 65535 65536 65535 1 4294901760 131072 4294901760 65535 65535 131071 131071 0 4294901760 65535 65535 65536 4294901760 4294967295 196607 4294967295 0 0 0 0 +7942449073 12549071554 37862431503 22111769861 3321796076 3397947899 9696101223 2370351364 1329201747 1392781535 320445636 102043833 171236102 70907865 46505081 140095821 156987770 87062258 29289267 60491731 92476199 61315586 33280233 58752054 109291434 63859244 35969405 57265624 36095817 81099274 31695923 24573369 +65439 7602164 28573479 4267181695 4221106315 1819625 753197 3961674 10786513 5144805 136981 2168297 613625 218777 1286657 3068233 2030690 208577 48913 124105 12017 121936 53170 20644 94196 94693 37008 709 12637 13124 37000 40477 19309 4392 305 3649 3205 2258 19010 44356 28373 15368 11789 13058 23329 15305 11945 13553 22280 31189 25301 12157 5650 10057 15300 3833 1780 2645 2509 2896 4100 5485 12308 6625 4325 20872 34634 20165 2250 2754 2837 5069 1985 7925 39730 48500 23913 10512 5261 8957 11273 11025 15385 14033 7093 2237 2000 1384 1192 13124 20789 5597 2545 2810 3700 6500 4000 397 2708 10386 18617 16290 8465 2250 1205 522 1405 1058 890 1305 4549 2290 3880 3961 980 981 2861 2650 9 1642 584 10 131075 4294639617 393216 4294967295 65538 4294836224 196608 4294901762 4294901760 0 4294901760 65537 4294967295 1 65535 4294901760 196607 131072 4294901761 4294836225 196607 65538 4294770687 131071 1 4294836224 4294967295 65538 4294901760 1 4294705153 65533 1 4294901758 131072 4294901761 4294967295 65536 2 65535 4294901760 4294901760 4294967295 65533 65539 196606 4294770691 262142 4294901760 65536 4294901762 131071 4294901760 131072 131070 65539 4294901759 196607 2 65534 65540 4294770688 262142 4294901764 4294836223 4294901760 4294901759 131069 4294901760 131072 4294836226 131069 1 4294901761 65535 196607 1 2 4294901759 262142 3 4294770688 4294967295 65536 4294901760 4294836225 4294967295 65535 0 4294901760 65537 65534 4294901760 4294967293 1 4294967295 1 65533 327679 4294836227 196603 4 4294967295 0 4294836223 196607 4294967294 1 65536 4294901761 0 1 65535 4294901761 4294901758 65535 131072 4294836225 65536 4294901763 4294836224 65535 4294901761 4294967295 196608 4294836223 0 4294901761 65537 4294770686 196607 2 4294901759 131069 3 +7177065731 27296218233 47594933532 12201429684 7508406139 7787942516 17147770595 3344923997 640996045 588235407 715176244 295290820 232007772 252139481 36244711 221315637 285268621 226591117 295250070 227033690 94050213 75985131 234440956 194101174 357316577 322585390 188272947 166223568 121985693 210343986 53015728 62797518 +4 4293525467 1376031 39911932 4228448181 385060 504520 1024533 2519309 2604785 783925 17585 196996 64265 34285 127465 177106 45764 7720 14569 28224 50373 24245 3538 34514 23890 4525 34 689 2853 6409 3848 1258 2402 3600 433 1108 3737 7465 5121 4265 5141 2225 2340 5378 4553 2570 6329 15025 13498 2626 2 580 3929 6885 4770 3026 3172 3665 4682 6165 5224 965 333 1613 2385 3208 2009 4420 5717 2125 81 2417 3305 818 980 1421 1825 3200 1885 125 530 250 10 61 153 657 901 1301 2890 905 725 4285 4810 4385 3208 500 125 41 445 170 65 257 1061 850 1220 2873 2804 2297 1525 296 916 2993 1989 569 261 629 1476 1448 773 365 153 4294967287 4 65536 0 0 65536 0 0 65535 0 0 0 4294901760 1 0 1 4294901760 131070 65536 4294901760 0 1 4294967294 0 4294901760 65535 0 0 65535 196607 4294770688 131071 4294901760 0 131071 4294901760 65535 65536 4294901760 65535 4294901760 65535 4294901760 0 1 4294967294 4294901760 65536 65535 4294901761 4294901760 65535 65537 0 65535 131071 65535 4294901761 65534 1 4294967295 65535 65537 4294901760 4294901760 4294836224 65536 4294967295 131072 1 0 4294967295 0 0 4294901760 0 65536 2 4294901760 65536 4294901760 1 4294967295 0 65536 4294836225 131071 4294967295 65535 0 4294901760 65535 65535 4294901760 65535 0 4294901760 65535 65534 65536 4294967295 65535 0 4294836225 65534 65537 4294967295 131071 65534 0 65536 2 4294901760 65537 4294901760 4294967294 1 4294967295 4294901760 131071 0 4294901761 4294967295 65537 4294836224 65535 4294901761 4294967295 65535 0 65535 0 65536 4294901761 0 +2337773680 7309566459 14801657761 7176957560 805387261 528593059 995158038 394826627 212307176 262055828 220277966 54044754 35002809 33866875 23296997 58449045 55407599 55507554 126956238 53378795 68657945 71688372 38021275 60553511 37553778 32386758 7812694 41946056 55645055 14394219 47986037 32390008 +65425 786448 4269867019 78117949 4281403451 1581530 574001 4982746 5510564 1077682 4159530 1818145 903044 420480 193541 484465 834709 343165 72613 60660 20689 104954 230800 102625 95026 69049 36997 38461 33849 43114 56132 45610 18965 4321 6709 20381 8521 8369 22801 24208 3592 10322 29650 32409 28193 18850 5672 929 4212 6305 6445 4580 22544 16754 338 11156 5945 1060 157 1154 9700 12356 9050 5645 2381 928 3425 15329 4981 6401 27625 34002 22888 6817 218 2069 4721 7381 9650 4181 5410 4133 2941 4349 10457 17753 16145 6929 1160 5408 6596 2536 9433 2074 4244 5840 601 7789 1696 2836 10036 6309 937 37 4916 14801 5650 6949 3109 1424 6733 3706 1370 14153 10016 2273 640 1586 272 424 980 2754 983004 458760 4294901766 4294967292 65539 4294967295 0 196607 65536 4294901760 65537 4294967295 4294967295 4294967295 65535 131073 4294836226 4294967294 4294836224 65536 4294836223 131071 4294901760 131070 65536 65535 65536 4294901763 4294967294 65536 4294901760 4294901760 4294901759 65533 196608 4294836224 65535 4294967295 196610 4294901759 1 4294901760 131069 65539 4294836224 1 4294901759 131073 4294901758 65536 1 0 4294836224 1 4294901757 196609 4294705153 196606 4294901760 65537 65536 4294901759 2 4294836221 65534 65538 4294901758 131072 4294836224 131074 4294901761 4294901760 65535 3 4294967293 65536 4294901760 4294901756 131073 131074 4294770689 65533 65537 1 4294836222 65538 4294901760 0 65537 4294901760 4294836224 65534 4294836225 65535 0 4294836222 65538 131070 65535 0 131072 1 65533 65537 131072 4294901762 196605 4294901762 4294901760 196607 4294901761 4294901761 4294901762 65533 1 4294836224 131069 1 0 4294967295 131071 0 4294967295 65537 65535 65535 65537 4294901760 131070 4294901760 65534 4294901762 4294901760 65534 1 +6046596233 25253943382 21742296418 21572249812 7935159819 2807538510 4463643538 2371843820 477267601 1347163060 919215284 421756089 487698103 297825307 134980049 196728935 235049753 309552333 77540179 165912218 109102241 94745557 103497585 246149174 248478488 117665019 180592448 154574135 109365391 129086298 148305794 126572548 +8 4293066734 4293787870 4272422268 65012101 662404 219330 374218 1596474 4188040 4028648 1670356 453825 54793 6970 36738 118170 112261 11554 9293 20313 49057 65025 32969 12325 21568 12317 1922 1217 565 580 2197 1616 2932 5290 5800 2762 2306 13736 32218 26585 7954 3112 6905 4709 1810 3737 7325 6850 3869 2624 7300 11236 5378 260 3233 6400 6122 4770 3181 2900 4637 5722 3349 709 2906 6786 3725 1370 293 4621 3725 5101 4797 6100 7309 5993 820 965 1361 349 977 697 625 2164 2749 916 738 9841 20897 17833 7085 1066 954 5860 9252 3121 2560 8429 9461 4469 612 586 1769 4768 5608 1370 1210 680 229 985 64 1413 2900 3313 1844 505 409 689 244 464 493 655353 196618 4294705148 196608 4294901761 65535 1 0 1 4294901760 65535 0 65536 4294967295 65535 4294967294 0 65535 4294836224 65536 1 4294901760 4294836224 131072 4294967294 0 4294967295 1 4294967295 65535 65535 0 4294967295 65535 0 65535 65535 1 65536 4294901760 1 0 131071 4294836224 0 4294901760 65536 1 0 65535 1 4294967294 131072 4294770688 0 65535 4294967294 131072 1 0 4294967295 131070 4294901760 65536 4294901761 4294901759 65535 0 131071 4294901761 4294967294 65537 4294967295 0 4294901760 4294901760 65536 4294967295 4294967295 2 0 4294901759 0 131071 65538 4294901761 4294967295 4294967295 65535 65535 65536 65536 4294967295 131073 0 0 1 4294901761 4294836224 0 4294967295 65536 1 1 131071 65535 4294901759 131070 1 0 4294901760 4294901761 65535 0 4294967294 65536 4294967295 65536 65535 1 4294836225 65535 1 4294901760 65535 65537 4294836224 4294901760 4294967295 65536 0 4294967295 65537 65534 2 +2482510738 3438291754 16013403574 25549731472 5581583516 490118426 565096383 545958421 173219808 451581527 217609846 82442676 10837682 25359092 51647568 171316459 191286047 61242489 77041846 94361758 73210822 73273377 63869368 58136238 100905104 45909251 33836093 211808645 135636783 97216943 45813997 40036664 +65454 10354698 4288086514 4248501957 20446685 369625 2069426 1803325 99412 1697825 2317361 3959005 3786788 1094085 201625 111130 134260 221945 111940 13177 33917 81317 11268 52065 10330 73825 252297 148772 23677 3385 370 3400 4745 401 2669 1972 20420 43165 35689 7585 14900 22397 22850 7129 12593 8962 2009 5188 14530 16136 9826 16245 20557 1250 18180 3434 24313 60794 17473 2228 13833 58898 109370 93626 61245 71514 97162 75560 44785 20714 26738 176353 287225 163109 93834 140593 79641 39565 59098 48672 19625 8053 6068 1972 3034 9320 20509 32625 2664 21268 33282 45005 24805 9005 10804 35137 43570 24665 2645 19277 15545 5186 15570 5365 3770 10161 6058 3380 1369 2305 14650 12618 2650 1297 2250 965 2626 5618 7444 9620 7796 3060 4293328887 786428 131075 4294836225 1 0 4294901762 4294967294 262143 1 0 0 0 0 4294836224 0 4294836223 65536 4294967294 131071 196607 4294901759 4294901762 4294967292 131071 2 1 4294901759 65535 4294901760 65536 4294901760 65535 65536 4294836226 4294901757 131071 4294836225 4294967295 4294836224 4294967294 131071 2 4294770687 4294967294 131068 65536 4294967295 65535 65535 65535 196608 4294967295 131071 65537 131073 65537 4294836225 65537 1 1 4294967294 4294836225 65537 4294770689 65533 65538 4294901760 4294901761 4294901760 4294901761 4294967295 4294967295 65533 65537 4294967295 196607 0 0 0 131071 2 4294836225 0 4294967295 4294901762 65533 65539 4294901759 4294967295 131071 4294770688 131071 196606 65536 1 65537 4294901758 196607 4294836224 327681 4294705155 0 65534 65536 4294901761 65536 65537 4294770688 196609 4294901760 4294836225 4294967294 131073 65535 0 65538 65535 4294901760 4294901760 0 0 4294836224 0 4294967295 1 65532 1 4294836226 4294901759 65533 4294836225 65533 262143 2 +6005577148 10361578336 5499850377 18824686244 23839289913 6194273642 1140881685 1220742485 386003454 392065145 564659148 1469245201 189397527 31994341 103827053 340931677 234547962 140085192 149629184 212309949 329765153 614939006 1425759264 1402856201 2886912421 1097431041 239235379 532403289 597181906 308022360 162363562 134388866 +65493 4294770748 2359259 4280942604 21495788 291005 1733741 3207730 2111405 476657 5162 127618 111130 410 15165 11016 46537 97029 103625 52186 14841 2362 79168 195637 230305 182169 135845 68000 17113 9124 10613 3364 2609 6058 2069 1429 8109 3698 257 136 2269 6272 10370 10388 3770 1024 733 1226 1949 2458 1345 1018 3673 6370 6805 4394 10301 14120 3545 25 2853 9445 5042 9490 10858 8330 7333 1850 2117 1885 7808 26965 15025 6425 10170 9005 12069 16658 14184 2713 2153 11348 22105 27833 18938 6212 397 1297 15130 33722 35730 35449 31921 22050 13010 10036 16565 12721 820 4517 15210 19433 9305 3016 1850 4410 15541 21178 18056 4658 765 2041 313 1444 2330 2978 1285 208 317 1418 1145 146 4294246403 524277 7 4294967294 65536 0 0 131071 65537 4294901760 1 65535 65535 0 0 4294901760 65535 65536 4294901760 1 0 131070 0 4294967295 65535 65535 4294901760 4294967295 1 65535 4294901760 4294901760 4294836224 1 4294967295 0 4294967295 0 0 65535 4294901760 65537 65535 0 0 4294901759 4294967295 65536 131071 4294967295 131071 0 0 65536 4294836225 131071 1 65534 0 4294901760 65535 4294901760 65535 65536 0 65536 4294901760 65536 4294836224 0 65534 0 1 4294967295 0 0 0 4294901760 131072 4294901761 0 4294901759 0 131071 4294836224 1 4294836224 65536 0 4294967294 4294901760 4294967294 65535 0 1 4294901759 65535 65535 4294901762 4294967294 65535 65535 131071 4294901760 65537 4294967295 65534 4294836224 65535 4294901761 4294967295 0 4294901759 4294901760 131071 1 65537 65535 131071 1 0 4294901759 65536 131070 4294901761 65536 4294901761 4294901759 4294901760 4294901760 196607 4294901761 4294901760 65535 1 +4976124203 17103236797 8886612270 972614885 688432390 125681039 269886822 728523694 328765789 699247293 1964140608 983888132 161906359 50668386 44643549 30865144 75440878 63495672 22927173 49906428 121756205 89913669 144554167 128146397 242608495 229381232 304290428 520416520 401708572 251849615 245014203 48888072 +31 4289789836 20447216 4278780595 4244634586 1237690 1021673 3671476 4553785 560554 1080850 1967737 657050 52265 91426 116293 1800 161146 234153 86701 6185 137713 283517 477725 524897 191309 113809 131636 105145 97732 76409 27725 4721 2873 19853 17741 13418 27905 15425 22741 47912 5696 30906 41410 6856 1525 4645 6581 3172 8273 32765 46733 35197 23872 30565 54580 67204 38641 5188 1924 8168 24832 36360 92944 76833 19433 13325 22580 17956 16433 50824 52852 23341 31505 26234 12625 1285 4057 19917 18504 4392 79641 84276 67730 19994 10397 14125 10205 28793 25805 20705 7540 8010 2521 18218 2525 21217 52676 54745 40210 64090 26469 3137 19009 30600 10522 49205 187400 150925 123092 130905 99097 47732 9866 2106 1037 2132 6184 6641 3074 65 333 4294770698 4294115320 131069 262141 262147 4294770689 131071 2 131071 0 65536 65537 4294836224 4294901761 4294901760 4294901760 65535 196607 4294901760 65537 4294836226 131070 4294770690 4294901758 4294967293 65534 4294901760 196608 4294770688 0 4294967295 131072 4294836225 4294770688 131069 4294901759 196606 4294901760 0 4294901760 131070 2 4294901760 65533 131072 4294967295 1 131071 4294836224 131072 0 131072 1 65538 4294901761 4294967295 4294836223 131070 4294836226 131070 65535 1 4294967294 1 4294836224 4294901760 65534 4294901760 196606 4294901762 4294967295 131071 4294836224 131070 1 0 4294901761 0 4294901759 131071 65535 131072 4294770689 65534 131074 4294770690 4294901758 131070 4294901760 65535 65536 4294901760 65535 3 4294901758 4294901760 131070 196608 4294836226 4294967295 0 65534 65535 131072 4294901760 4294901761 131071 1 4294967295 1 4294901762 65533 4294901762 65534 1 131071 4294770689 4294967294 131072 0 4294967295 196606 4294967295 65538 0 131070 4294901762 196606 0 4294836226 0 4294836225 4294967294 65536 1 +6090467809 20155652713 16794708544 8636815204 6987319465 939720016 606870074 1304660187 737907949 2365890851 3863588022 1304580576 976860358 228676104 189562733 284175486 355739621 200192212 137950714 504845896 680360534 347997407 859665822 507802247 540486367 463426695 883873959 398332294 538245923 905454393 2553728946 771749081 +65442 9043966 4281925666 28770153 4277994106 635089 565901 443845 1639298 2960794 3316141 1724642 234884 128456 173216 93145 44641 53348 41962 151033 183501 111962 161125 310658 572665 379013 100906 58337 26192 5300 23765 8480 1700 7184 26050 42925 34682 17410 30312 54149 57317 72202 109969 81917 24210 20074 17573 10001 10625 2468 31313 60701 26100 6292 6728 6466 514 14282 8381 101 3889 5284 25105 73450 60229 12869 5465 5365 13025 24809 15569 17645 37729 44081 14221 7978 20345 8609 8884 24712 39821 71338 108866 122944 101161 45428 22544 12890 13282 17978 26233 45617 74633 64034 17873 7060 22321 15445 21141 17069 57769 82850 38897 4842 1421 41066 142925 160186 80818 22994 1901 1850 1602 7220 3329 265 2482 1885 3922 6637 5645 4001 2228244 4294049788 65536 131072 4294901761 65534 0 65536 4294901760 65535 65536 65535 65534 65535 4294901760 65535 0 4294901759 4294967292 131073 4294901759 4294901761 65535 0 4294967295 4294901759 196607 4294705152 65533 196606 2 131070 65535 4294901761 327678 4294836225 131071 65536 0 0 4294901760 4294836225 4294901758 131071 4294770688 65533 0 131069 65538 4294770688 131070 131071 0 65537 4294967294 196609 4294770688 262144 2 4294901760 4294901761 131071 4294901759 4294901760 4294901759 65534 65536 4294901760 131072 3 65535 4294770690 65534 4294901761 0 4294901760 4294836224 4294967295 65534 131073 4294901759 4294836225 1 4294901758 4294901761 65537 0 4294836224 4294901760 65535 65537 4294770688 4294967294 131072 4294901761 65535 4294901758 65534 4294967295 4294901761 4294836223 262143 4294901762 4294901758 131070 4294901760 0 131073 4294901758 131074 0 4294967295 65537 4294836223 1 65533 1 131071 4294705155 196605 4294770689 131070 2 4294967294 1 65534 4294901761 65534 131071 65538 65538 4294901759 0 131070 0 +3224016691 4338379168 12976914061 20894584783 5034741646 1133599883 648001229 502753565 1253051496 1702828293 4085184714 1163694552 202926742 105583933 346290304 440669922 979093135 584597296 211004197 425233986 117367561 164643633 613232986 299306589 490263396 602047556 1614927769 743901983 736697030 1072714158 1590236639 129053730 +65497 4294246536 4285202152 11337787 7995451 112400 322186 61000 128437 156917 433013 1600394 1165141 389000 371620 91250 14521 9945 43210 100445 116500 41000 5800 75322 338890 310913 62705 25649 7241 11285 5386 788 4793 8345 2293 3089 25317 57130 69922 32029 2074 1962 4672 3973 12058 18265 32245 25625 10 11618 229 9061 4005 14705 19625 22786 20933 4706 3748 15370 15921 11602 10036 42125 52021 10025 9872 15341 25064 13810 692 11273 21800 11465 3233 14900 14842 5409 5741 10201 14221 27250 39089 33800 9041 1961 1885 2081 1721 1933 2554 9505 47944 51245 2381 30617 43265 2665 26177 53090 23545 27536 34112 18628 16769 3848 10525 29597 22117 8642 4436 3413 1205 436 3121 4673 4049 3881 2749 1025 1690 709 1441812 4293591046 196603 2 4294901760 1 1 0 4294836224 0 4294967295 65535 4294901761 4294901761 4294967294 4294901760 196607 4294770689 4294967292 131071 4294901761 65533 65538 4294901759 262145 4294836224 131073 4294836221 131076 4294836222 65533 131072 4294836226 196605 3 4294901761 131070 1 4294901759 196607 4294901759 131073 4294836224 0 4294901759 131070 65534 65538 65533 131074 4294836224 65537 4294967294 2 4294836224 4294901760 131071 4294836224 65537 4294901760 4294836225 4294967294 131073 4294836224 65533 131075 4294901756 1 4294901761 4294967292 131072 65536 1 4294901758 65536 65535 65536 4294967295 65536 196608 4294705153 327679 3 4294901759 65537 4294836225 65533 65539 4294901761 4294901758 131071 4294901761 0 131072 4294639617 131071 65537 4294770686 131070 65538 4294967293 131071 0 65533 131071 65537 65537 65536 4294901759 131073 4294967294 65537 4294967295 2 4294901760 4294967293 65536 4294901760 4294901761 0 4294967295 65535 65536 4294901760 4294770689 4294967293 4294967295 196609 4294836222 196608 4294836227 4294901758 0 4294967294 65535 +1097476178 915709831 845753022 4769534864 8298187396 3063024599 718278018 279312287 787638110 418343968 2354346912 792335948 96968259 50105885 144381988 565074570 118376217 205787733 205129540 133248688 260822608 219911270 468065353 276707757 230721104 274481966 385569749 222564925 681777581 671036064 364396821 93738895 +65513 4289527819 6291180 21562023 4236770582 1150020 316385 308149 883601 732922 564066 1075922 1039924 1053202 622549 209050 157421 166225 146061 111410 19144 14585 50612 23202 8138 53716 102772 65305 10049 2906 7813 8749 6970 2665 2521 3601 3392 13282 15293 29341 15380 5714 6824 3920 481 349 949 14065 42877 31058 23357 54085 68202 43541 15464 15188 19816 997 3613 45 9490 4885 2965 29609 34949 5330 1629 10898 26690 36178 15080 4481 3250 1429 7120 24197 24466 14050 2545 1618 2813 6500 18944 25992 27169 11700 4985 3988 2581 2512 3560 9461 12005 27233 31120 41480 42466 25805 39002 45853 39572 56653 24848 11290 22786 12725 305 6506 7585 8489 2689 1157 6664 3001 1877 2425 964 680 17 1802 2952 1466 1638402 4294443014 65535 4294836226 131070 0 4294901762 4294967295 65537 0 0 4294901759 131071 4294901760 4294967295 65536 4294901760 196607 4294836225 4294967294 4294901761 131068 4294901762 131071 4294836226 4294901758 4294967292 262146 4294770689 4294967295 131073 4294901757 4294901760 0 65535 0 0 2 65533 4294836227 4294967292 196609 4294901762 4294901759 196607 4294836224 0 131070 65537 4294770690 196606 3 4294836222 131072 4294836224 65535 4294836225 65533 0 131068 65539 4294901758 262143 4294901763 4294836226 4294901760 65534 4294901762 4294836220 262144 4294901759 131071 1 4294901759 65534 0 4294901760 0 196607 4294901760 4294901762 65534 65537 65535 65539 4294836224 4294967295 1 4294901762 4294967294 4294836226 65534 4294770690 65534 4294967295 131070 65536 4294901761 4294967294 0 4294901761 4294836224 65534 65537 65533 2 0 1 4294967293 131071 0 1 65536 4294967295 65536 131073 4294705152 131069 3 4294901760 65533 4294901762 4294967295 131069 2 4294901760 65536 131072 4294836226 131070 65538 4294836226 131068 1 1 +4157140845 2613794305 4727677279 5266829776 7424920178 5789172423 1898804582 1268984326 589508951 284955983 330923704 652302163 117550766 71398485 47435034 192953592 149161984 37318450 352312438 682325331 291320265 96148797 293130744 328654162 209549663 211642083 327489326 178558175 766989134 843444145 208471186 70954403 +65474 7471075 4285071296 31391550 5768153 1783700 799396 204802 669652 1018018 1155050 259117 1089 65344 75272 230816 156404 26442 49562 359585 701026 573604 273325 62297 80272 101881 137252 126338 25625 6058 2426 2601 7309 17680 31385 36809 30925 12665 2305 80 5393 10625 3042 2650 12965 9649 10436 25250 23593 3593 11464 46649 60229 24392 19645 37873 20752 720 11765 22345 28250 34933 16712 349 18421 19385 8820 27050 91165 89828 43205 31841 20890 4420 5202 24804 22129 6445 30004 30042 15457 22266 16000 21530 22501 1781 461 4240 19946 26948 28537 21572 14841 14365 12346 4586 221 2180 10309 30025 43316 28881 9386 2329 7129 13241 21305 21805 4525 6597 31316 24425 1508 7865 11405 2237 477 2250 1621 81 520 1508 4293984281 786422 4294377478 262143 4294901759 65535 0 65535 131071 1 0 0 4294967295 65536 65535 65536 65536 0 0 4294901761 196606 4294901761 131071 4294967294 196607 65536 65538 4294967295 1 4294901762 4294901757 196607 4294901762 0 4294901760 65535 4294836224 131072 0 4294901758 131071 2 65533 65535 327680 4294836227 65533 131075 65535 65535 4294901763 4294967293 65539 65535 4294901760 65536 4294901761 4294967294 262145 4294836224 0 131071 65537 1 4294770689 131071 4294770689 4294967293 65536 4294901759 4294901761 196604 65537 4294901760 131072 65537 65535 4294836227 4294901760 4294901760 4294901761 4294901762 4294901758 131073 1 65535 4294901760 4294901761 131073 4294836223 65534 3 4294770684 65536 4294967295 131072 65537 4294901759 196607 1 4294901761 65535 131071 65535 196610 4294836225 131071 4294901762 4294901760 4294901760 4294901760 4294901761 65535 1 131072 65537 4294901761 4294967294 4294901761 4294967295 65537 4294967294 4294901760 65537 0 4294901760 131071 4294901762 4294901760 4294967294 131073 65537 4294639617 4294901760 65532 +7180286108 2837910580 4805969914 6663145588 692560277 633585785 1288025150 782157731 4189382490 3312477086 958640458 1070047065 191834520 110954995 347219876 127320436 74010293 134611189 251041074 517901069 373619273 346336747 309180068 973624603 406885805 444251827 334254365 409321913 282700762 478584273 384632376 199537764 +65527 4287627376 720645 7143573 4270718896 757145 550817 4756 110245 627298 2308393 3108865 1528705 394837 22105 77480 128825 28885 64786 356546 427561 136421 219413 394501 101677 10324 97448 164900 80532 3474 4205 6376 3194 24336 28793 16978 39217 62442 41893 34033 29768 21305 25000 39546 41922 22181 7748 8082 44314 59252 16609 9872 19269 33172 20354 29530 19058 6586 185 2600 2665 8770 25640 34101 14837 1801 5545 13225 2725 60586 65185 17101 3796 4450 841 11041 14836 2768 13840 35537 48501 48365 30717 6928 5634 17405 12753 2501 4226 5077 6109 2628 2818 6290 612 10250 16034 11080 18020 23857 10933 4754 2386 2378 617 2501 4097 850 4745 6925 14717 24905 15424 13428 12629 2873 2402 914 1394 2000 2113 2290 4293197850 4294508530 262138 196610 4294901760 0 4294901760 0 65534 1 1 65535 4294901760 4294967295 4294901761 65533 65536 0 4294901759 0 4294901763 4294967292 196607 1 65534 196607 4294901761 65534 131073 4294770688 131070 131074 4294770688 0 1 4294967295 4294836223 131071 4294967295 131071 4294901761 2 4294901756 196607 4294967295 196608 4294901760 0 65534 0 131074 4294967294 131073 131071 4294901760 131073 4294770689 4294901759 0 65536 4294770689 131069 65537 131071 4294836225 4294901760 131071 131072 4294967295 65537 131072 4294901763 4294705149 262143 4294836228 4294967293 65536 2 4294901760 131068 3 4294901759 131072 4294967295 2 4294901758 4294967295 65537 65536 4294836223 4294967294 131071 196607 131071 65538 4294901760 196606 4294836224 1 196607 65535 131074 4294901760 131072 4294901763 4294705152 65535 4294967294 65536 1 65535 3 4294901758 4294967295 65536 4294836225 65534 4294836225 131072 4294705152 262141 4294901761 131071 4294770689 131069 0 4294901761 4294967292 131072 4294901760 4294901760 0 1 4294901760 65535 +3549752231 1081587661 2098594318 15670333735 13125036779 2212510201 668787910 799489594 2728346648 2295605347 1496766054 1019414618 350942674 121873407 341801137 545001242 382478865 397069631 417857927 361768822 317470503 130730389 284446221 477789783 266571294 486338168 433798147 131472826 231804649 215843507 191568207 245515551 +65510 4294901712 3276886 2293679 4285661173 156157 269009 831353 2287433 5069896 5219405 1429909 523673 282761 22160 7156 35410 43040 4797 22697 55777 17650 43880 94032 53312 10404 4493 1354 8164 1813 1789 4426 2692 1600 3796 1616 3601 7218 11489 8425 6185 10693 8593 2837 2210 1217 1508 5617 9577 18778 24293 9850 2209 1249 10769 8389 1745 1028 1945 1165 4 185 26 601 1066 985 1697 4580 7957 5725 2813 1585 1885 5113 4797 185 1989 1445 356 149 3092 6400 2722 2061 937 521 3577 5085 5917 7400 4513 980 596 1521 274 2969 4356 2276 3961 5924 2020 149 596 1373 2861 2624 2609 3209 1865 584 13 514 2141 3764 3233 697 205 569 505 272 73 50 4294311938 65532 327679 0 0 4294901762 4294901760 65535 0 0 65535 65536 4294967295 0 0 0 4294901759 65536 65535 4294901761 4294967295 0 65535 1 4294901761 4294901760 131072 4294901760 4294901760 131070 0 4294901760 4294901760 4294901760 0 4294901758 65536 4294901760 4294967295 65536 4294967295 1 4294901760 0 65535 4294967295 65536 0 65536 0 4294836225 4294967294 65535 4294901760 65535 4294836224 65535 65534 65536 65535 65536 4294901761 65535 65537 4294836224 65536 4294901760 0 4294901761 4294967295 4294967295 4294901759 131071 4294901760 4294967295 4294967295 65535 4294901762 4294901760 65535 131071 4294901761 65535 4294901761 4294901759 4294901759 65535 65534 1 4294901760 4294967295 65535 4294901760 65535 0 65535 131072 65537 4294901760 0 4294967295 1 65536 1 4294901761 4294967295 65535 4294967295 0 1 4294901760 65537 65535 0 0 4294836224 65535 0 4294967295 65536 4294901760 4294836225 65535 65535 65536 4294901759 4294901760 65535 65536 0 4294901760 0 4294901760 0 0 +1100895391 5944158185 20634198057 31279035181 5532838495 1211751434 185620946 218129583 278239137 438555499 472347464 49821426 37765394 32961789 36121470 99356229 107249759 41443901 150329006 156190804 76305527 13528800 19747772 80944325 59395556 38588595 59382503 92873214 60702491 59337446 51312578 39023889 +65492 2293792 4293328932 4283367266 39714803 780930 883885 1639913 4933325 10029193 6855281 991978 493637 432425 155513 56900 21289 33800 20813 1513 29236 14314 2594 18577 3301 4321 13456 12209 16677 13141 1780 49 148 2304 2785 113 1181 1241 1765 7748 8522 9457 9421 1853 11882 20736 14594 3700 1625 3730 9377 10196 4421 1453 1097 5213 8840 6498 2701 4321 4717 3466 5380 4265 2705 2074 2450 1746 202 242 689 674 2081 3978 576 5185 13274 13549 6154 778 2770 4825 2729 730 493 2080 3617 2500 1602 1450 3005 5737 4394 2834 2213 2340 3029 3869 9601 18005 14837 5300 745 580 2601 3874 3076 1088 58 565 130 169 1069 1769 125 292 229 82 65 104 360 208 65537 4294967290 65539 0 0 0 1 0 4294901760 65537 4294836224 131070 65538 4294967295 65536 65535 0 4294967295 131071 1 0 0 65534 4294901759 131072 65536 4294901760 65535 65537 65535 65536 4294967295 65536 4294967295 196609 4294836224 131071 0 65535 65537 4294901760 4294901760 65536 4294901759 65535 0 0 65536 65535 4294901761 4294901760 4294967294 0 131071 4294901760 65536 2 4294901760 65535 4294901760 4294901760 65535 2 4294901761 4294967295 0 4294836224 65535 0 65535 131072 1 0 65537 4294836224 65535 65536 4294901760 4294836225 0 65534 4294901761 4294901760 0 4294967295 4294967294 2 4294901760 65535 65537 65535 65535 4294967295 65534 4294967295 1 0 4294901761 4294901759 131071 4294901761 4294967295 65537 65536 4294901759 1 65535 196606 65536 65537 65534 0 4294901760 1 0 0 65536 4294901760 0 1 65536 4294901761 4294901760 4294967295 65536 65535 0 4294836224 4294836224 65535 0 4294901760 4294967295 65535 0 +4410573765 12774598301 41832160677 44700663352 4576701622 2082746085 429967465 197563025 141399631 109501830 78255428 121860269 105534550 11302932 16973718 40400621 107411346 162682307 97826762 89032476 78784929 77709024 59426154 19965316 74563247 131305094 47049398 66518581 117846439 170567788 35402405 12946196 +65492 4286382203 27655901 4254794552 8977691 282146 867609 445460 620330 3301829 8228340 3806288 130792 1093 165818 354960 243236 152921 47888 47185 161114 124813 14425 20266 34450 1557 27145 32317 41288 54521 47858 23725 10000 4586 1225 5273 6658 27154 59077 65620 32825 4265 18581 33529 10760 7045 16666 113845 126565 33524 4033 7025 13005 6884 7045 31568 91177 126340 63268 5576 16976 37234 17245 2720 18770 12050 1042 2098 6125 3026 1865 24946 52105 34330 5512 1625 11969 6250 1313 12994 7090 58 4545 10996 4493 2009 7652 7240 689 104 6442 24037 16025 2074 386 1417 2509 548 6084 9857 5770 4724 9512 4369 13021 14733 5125 1153 3730 14482 17161 9365 5441 9745 9745 6737 6805 6373 5809 4329 2434 2417 2490393 4293066754 589816 65539 4294836223 0 0 65534 65536 2 131071 0 65536 65538 4294836224 4294901759 65537 4294967295 4294967295 0 4294836225 131071 65536 4294901760 4294901760 327679 4294836225 65534 65536 4294901761 65535 4294967295 196610 4294901761 65536 4294836222 65537 131071 4294901761 196607 4294901762 131070 65535 4294901762 4294967295 131072 4294836224 196610 4294901758 1 0 4294901761 131071 4294901761 131071 1 4294770689 4294967294 4294901761 65534 0 0 65535 4294836224 4294967295 65536 4294901759 131070 4294901761 1 65535 4294836225 131068 131076 4294770685 196608 4294901760 65536 196607 1 131073 4294836224 65535 196605 4294901763 65534 0 0 0 4294901759 196607 4294836225 65533 196605 65539 65536 4294836224 4294836224 65534 65536 65537 131070 65540 4294836222 4294901760 131070 65538 65537 4294770689 65535 4294836227 4294901756 65534 2 65534 0 0 4294901757 131071 131071 4294836225 65534 65536 65533 65539 4294967293 65536 4294901762 1 4294901759 65536 4294770688 0 4294836221 2 +2894552609 3797055728 11070337016 44830023179 9421062938 919576239 2136647181 971272589 885414244 579265818 215974286 277805290 496340961 184461986 73794839 510567707 352635930 304997437 990823923 206599052 856982848 658699988 232404631 156340838 418513647 131715390 125235512 160984806 125341342 184518063 235497079 227972411 +65513 2686852 24182636 4286252162 4233886599 773152 171296 834637 2947113 3480034 181789 1832648 2328178 688005 524232 505921 232360 128578 78073 100369 14324 70402 97316 10141 54557 143226 60328 5668 3653 5524 6929 1017 914 226 1940 21061 73540 196180 213970 53146 4505 26469 18756 12868 10312 6184 4457 28442 56009 45764 37264 31378 11700 841 4706 5617 333 13625 27200 4717 712 1525 21290 27044 5202 85 586 7450 5625 5458 11965 7433 1000 17117 11945 2258 6689 14501 7893 1088 11357 9572 7328 16865 20800 17728 19172 21365 10674 5096 12944 20717 12506 7300 2858 4036 11257 10984 5485 6409 14625 27437 40690 40954 23329 4645 290 436 954 2340 15250 15220 11729 10634 6786 5746 4768 2080 884 545 1637 1108 4294770699 327684 4294443012 4294901753 65537 65535 4294901762 65534 4294901762 4294836223 131071 0 4294770690 131071 4294901761 4294967293 131071 1 4294967294 65536 4294836224 65536 65535 0 262143 4294836227 131070 1 0 4294901761 65534 2 131071 65536 4294901762 4294967294 65534 65537 4294901762 4294901758 4294836225 1 1 4294901759 4294901759 65534 65536 4294836226 4294901757 262143 4294836225 65535 0 65535 65534 131075 4294901758 65536 4294901760 1 4294967294 65538 4294967295 196607 4294836226 131071 4294901761 131072 4294836222 196610 4294770688 0 4294967294 4294836225 4294967295 65535 4294967295 131071 65536 4294901763 65532 131071 2 4294901757 393213 65538 4294901764 4294967295 65538 4294836225 4294967295 4294901758 131073 4294901758 0 4294967295 131073 4294901758 65538 4294901759 131071 1 4294901759 65535 4294967295 65537 4294901760 65535 1 4294770688 65536 1 4294836224 131069 4294836225 0 131071 4294967294 131073 65538 65534 4294901762 65536 65539 4294770689 65533 4294836226 131069 65536 262143 4294770688 1 4294967294 196607 3 +2696561632 6478771567 18237271645 9249167455 13258896558 5487310594 2927509458 1023239367 538600627 584901084 696905960 447012548 54857462 18338811 432082218 1686129018 366641126 156393143 512191027 344969883 115854742 173670715 179416736 110651092 181075210 171004213 331721282 316314710 208810463 532662227 243854811 196314357 +65483 983067 4288937820 34537729 4249485354 79637 37885 459316 1148458 897056 1375700 1389818 619805 169177 29097 30280 99257 49985 14330 13637 4745 2421 2465 13621 45890 17316 6370 11905 9530 9090 13901 21908 16040 5128 4097 17674 31520 21460 5186 8026 5834 954 225 2069 4469 3258 522 1250 1421 2930 8874 17352 15490 4658 250 778 3060 5402 2473 853 80 1445 5716 5701 1213 232 2890 6749 7193 1700 125 116 2248 1521 2125 2452 1649 466 794 2689 2080 530 197 2725 3650 985 657 2340 2125 410 1424 4105 4040 2704 1409 298 569 1105 514 281 629 698 2448 7688 4437 405 125 725 2169 5252 4925 3204 3610 1332 361 709 800 544 580 689 298 25 4294574087 65530 1 4294967295 0 0 0 0 4294901761 0 0 65536 0 65536 1 0 4294901760 131071 0 196607 4294901761 4294901759 65535 65535 0 65536 4294901760 65536 4294901760 4294967294 65536 4294901760 131071 4294901761 0 65535 4294901760 4294836224 131071 4294901760 0 65535 65536 4294901760 4294901760 131071 4294901760 0 1 0 65535 4294836225 131071 1 65535 4294967295 65535 0 4294901761 4294901760 0 4294967295 131071 131071 1 4294967295 65536 131072 65536 4294836224 65535 65535 65535 0 65535 1 0 131071 4294901760 131074 4294770688 4294901760 0 4294901761 4294901760 65537 4294901760 65536 0 4294967294 131071 4294901762 0 4294901760 4294967295 0 65535 131071 4294901760 65535 1 4294901760 65536 65535 0 65535 4294901760 4294901760 0 4294967295 0 4294901761 4294967295 0 1 4294901761 0 0 65534 131072 0 4294901760 4294901761 4294967295 4294901761 65535 4294901760 131072 4294901761 4294901761 65534 0 4294836225 131071 1 +325785899 2972654488 6067265796 9334371356 5637012374 984307289 458104498 335088999 76713832 50761382 262959506 105377715 131984032 159794061 181939775 177845178 41791465 35482416 42788660 156130189 48261205 38297859 56378337 62910556 31262894 30750286 39552841 47077650 36096826 49911884 74330384 43967527 +12 4294770566 4276617394 53607463 5900119 3021346 860317 1050113 6327085 8662600 5575498 1084240 450617 65349 308768 487565 146690 15140 29674 59357 93901 243828 383162 194192 370 85418 105370 65938 26093 27770 50404 30545 6341 38281 60370 38186 40450 33293 857 13320 13481 12946 29709 45530 21325 900 12928 21892 30250 34229 22061 15178 22250 47673 48313 45841 28372 3332 488 3418 9613 1913 3616 5098 281 4594 1181 785 490 12676 32058 27898 14618 8845 15073 35285 29993 3988 1394 1040 449 2624 3697 2305 4733 1994 680 8321 10900 5653 10880 5018 1930 6442 6917 5525 6100 15769 13513 2925 7346 22932 32689 17429 5968 11125 6905 3730 365 3929 6100 3434 6920 569 3600 4040 3778 2848 1877 1300 328 178 4294836225 4294770695 4294967288 196610 4294836225 196606 4294836226 0 65534 4294836224 327679 4294901761 65535 65536 4294901760 196606 65537 1 4294901760 65533 1 65535 2 4294967292 1 4294967295 131072 4294705153 65531 196612 4294836223 4294967294 4294901763 65532 0 4294901760 131073 4294967294 65535 2 4294901760 4294901760 4294967294 196607 4294901762 4294967294 131070 4294901761 196607 196607 4294836223 65537 0 4294901760 4294901758 196608 4294901761 65535 4294836225 131070 2 0 4294770688 196606 1 4294901760 4294901760 131072 4294836226 65535 3 4294836222 0 4294967292 0 131072 4294770690 4294967293 65538 4294836225 4294901759 4294967295 4294836224 4294967293 262143 4294901762 4294770689 131070 1 4294836224 4294967292 131073 0 65536 4294901761 196607 0 4294901761 65537 4294836224 4294967293 4294967294 196608 4294770688 131070 4294901759 65536 65534 4294901760 196607 131072 65536 2 4294901759 131071 4294836227 0 65535 4294901760 131073 4294967295 65536 1 0 4294770688 4294901759 4294967295 65535 131070 4294901761 196606 4294901762 4294967294 131072 2 +10990759093 12003943325 42266037627 37612699642 4202785560 1862249317 2302618798 370490871 847458520 2504998573 918153876 744446242 396277496 330378407 518987036 245698150 264961315 293261687 345452014 442180495 506957636 87733807 52827940 199969695 396467384 174260642 71157899 153940723 181696593 384841219 165598855 94858503 +65514 4293001326 4287430923 4251581358 105251650 1535482 1575605 4387666 8959808 2184674 1582309 4062725 977993 383440 336232 52666 31729 7481 65572 55764 50989 178949 367033 398890 237653 51641 4608 10525 2410 2378 1597 1601 4849 6961 14473 41810 95778 41876 5648 30202 14365 2873 14386 3748 38440 42289 16666 23965 45245 26570 9448 9549 17000 13138 8986 11817 421 11290 14885 9061 7857 941 10980 23165 12773 2260 1105 3890 8356 4148 3445 9901 6173 2000 4885 12674 26261 17044 1405 5392 5290 1800 290 1156 8485 23113 35685 36497 17485 4973 10730 15332 3757 169 4450 25930 43154 16936 3373 8 4253 1570 5122 7673 1538 1460 1378 5930 4052 145 6673 12752 9986 3140 1642 449 2089 3341 40 1753 2105 1024 4294377458 458750 65540 4294901762 65534 3 4294967295 131071 65536 131074 4294901761 1 4294901759 4294836224 131071 4294901760 4294967295 0 4294967295 2 4294901758 131071 131073 4294770687 65536 65535 2 4294967295 65535 65538 131071 3 4294901757 4294836226 65534 131069 1 0 4294967295 196607 4294836224 196606 131073 65534 0 65538 4294836225 65534 65537 196607 4294901761 0 65536 4294967294 196610 4294836223 131071 4294836228 0 4294967295 3 4294836222 65536 4294901760 4294967295 131071 1 4294901759 65535 4294901759 262143 4294901763 4294967294 65536 65536 3 4294836222 131071 131074 65535 65537 4294770686 131072 4294967295 4294967295 65537 4294901762 0 4294901759 65535 65538 4294967294 262143 4294901761 196609 4294901761 4294967295 65537 65535 4294705152 196608 4294836225 65534 65537 4294901761 65532 2 4294836224 65538 4294770686 0 4294967295 262142 4294770688 131074 4294770689 196607 4294770687 65537 4294967295 131070 1 4294967294 4294901762 4294967294 0 4294967295 0 65537 4294901759 65534 2 131073 4294705152 2 +8287008349 28381126978 34695907392 16789000355 13437895853 2788548039 607776570 312623176 651528790 2640181090 2139524770 135776980 29481891 55807068 467957774 433989861 161592354 364352505 413919453 218426838 154740493 156825765 176803172 97899529 165406399 184670220 259085635 365699794 354694505 138039161 121835372 103399287 +65485 4325420 262178 4274520333 4286577800 1028681 785525 1233296 2705690 2584465 738433 148096 109172 22345 19233 57797 45769 8273 3625 3809 7913 8593 170 452 13124 21250 8605 9953 5769 925 392 2050 2320 1412 1609 4122 13568 19885 23104 16712 5905 8945 12589 8738 9333 4545 797 338 3033 1666 613 605 857 2405 136 3145 3517 197 1445 2965 3616 2896 1921 1924 490 250 109 1250 2858 1301 45 305 292 180 1025 1445 2152 1261 1037 1445 2621 2466 1777 298 1220 493 193 73 2152 2740 1421 3274 2906 169 313 2777 8594 6865 2509 1201 1097 325 125 820 673 226 25 218 1810 3497 1549 100 1105 629 65 29 442 1138 1105 425 36 50 65528 131074 4294901760 196606 1 65537 4294901760 0 0 0 0 0 131071 4294967295 1 4294901757 0 4294901758 0 4294967295 4294901760 0 131071 0 4294901760 4294967295 131071 65535 1 0 4294901762 131071 0 65535 1 0 4294901760 65538 4294901759 65536 4294967295 4294901760 131071 4294901760 4294901760 65535 131071 0 65537 4294901760 4294901761 4294967295 65536 65535 131071 0 131073 4294901761 65534 0 4294901760 4294967295 65536 0 4294967295 65537 131071 0 1 0 0 2 4294770688 131071 2 4294901759 65536 0 4294901760 0 131071 4294901761 4294901760 65536 0 4294967295 4294901761 4294901760 1 4294967295 4294901760 1 4294967295 4294901760 4294901760 0 131070 2 65535 4294901760 0 65535 65535 0 0 65536 0 4294901760 65538 4294901759 65535 0 65535 65536 0 65535 65535 4294967295 131071 4294901761 4294901761 4294967295 4294836225 65534 65537 4294967294 0 65535 4294901760 65535 0 4294967295 4294836224 65535 1 +4911003947 8763526232 15394600326 7189649512 756198362 243463683 343585242 95431383 52850061 33645734 114133620 104520383 28547201 20236364 72365414 221040839 139421779 94663512 24179563 17080685 32425849 42222208 22805983 20806474 15004565 34699445 25032637 39707359 78535598 35508375 28240865 18823022 +65448 4282515537 28245573 1376977 4283170590 154760 387700 1140560 1980212 1208146 432692 341002 244325 58760 40842 261416 376081 233837 66617 26485 55130 111332 110752 79253 87165 56405 1460 42365 28900 11161 16861 9605 5200 5617 24457 20392 1717 149 3488 32485 32786 6205 1685 19890 38440 38545 14689 4633 1730 3341 25700 48544 35528 15129 15385 15028 8842 3796 53 10000 22688 12970 1937 890 4490 4162 725 2385 9685 10413 2465 898 1165 1037 7508 11090 8552 3442 442 592 11909 39257 34389 8138 5384 13050 7841 1346 7625 5569 9650 31117 35869 12170 1565 1125 9985 14625 4770 53 1517 3706 1994 4493 6130 2938 2624 2450 362 970 800 2080 5794 6472 1802 898 1825 1514 269 274 34 293 655358 4294836227 4294967295 131071 4294901761 65535 131074 4294770687 196606 4294901761 196607 0 0 4294836225 131069 65536 65535 2 65537 4294967294 131070 2 131071 4294836223 131073 4294901761 65534 4294901761 131070 4294770687 262143 65536 0 65538 4294967295 196605 1 4294901762 65534 0 65538 4294901761 131070 4294836225 65533 131074 4294901759 4294836225 196606 1 4294901759 4294901761 131074 4294836223 4294836224 196607 4294836225 4294770686 131073 65533 65539 4294901760 65536 131071 4294836224 65537 131071 131070 4294901761 4294901760 65536 65534 0 65538 4294901760 4294967292 2 0 4294836226 65532 4294901760 0 131073 4294901762 4294836224 0 4294770688 65533 4294770689 65533 262145 4294836225 4294967295 262143 4294836226 4294901759 131072 65537 2 4294836225 65534 65536 4294901762 4294967293 1 131071 0 4294836224 131071 4294901762 196606 65537 4294901761 4294901760 4294967293 2 65536 4294836225 4294967295 1 4294901760 4294967293 4294967295 65537 1 4294836222 65533 2 0 65536 4294836224 4294901760 196605 4294836225 65534 +1378411220 6974660084 9598764672 4186331074 1725406180 684780427 2169545646 1424156315 484405791 914588364 717986872 286024031 202642971 107504739 166271847 138646137 222384425 358232022 159562359 433089923 190166981 166600545 76685655 96984659 90062497 199844955 324996331 286756381 255877843 100836359 68013968 64490643 +80 2031338 24772768 4269998602 5176605 862930 1767033 603850 25448 115985 289748 382852 481618 435425 63620 91802 9250 104905 223577 132625 37000 6730 10809 19720 40153 71968 58049 8842 3700 6037 6826 3060 925 1220 6656 17492 23060 22529 44325 49810 19732 11689 29224 21250 530 3625 6185 15426 22882 47497 80789 50660 3922 1285 1124 1753 4068 3393 2512 3257 4868 65 8258 15578 10169 5513 2245 3904 2665 533 11920 11945 468 8541 23445 22049 24445 27585 13729 4628 18269 34340 14978 1885 1570 5153 15921 17581 14737 12610 9553 15976 35325 19325 1440 10949 12296 11090 24197 18169 2074 1405 2410 4777 5008 1730 3562 2216 3490 5057 3560 2069 3133 5584 4000 1360 1082 2504 4385 3016 522 729 2031618 4293656576 524289 4294770688 4294967294 131070 4294901764 65532 196607 4294770691 65533 131070 65537 4294901759 196607 4294901760 262143 4294836225 65535 2 4294836223 131069 131073 4294901761 1 4294705153 65535 65535 65537 65535 1 4294901760 2 4294770685 65533 4294836225 196603 4294901762 65533 131073 4294901759 0 131071 1 196609 4294770688 65535 4294901760 131070 1 4294901757 131076 4294901758 327679 4294901760 1 65535 65535 4294901761 65534 131075 4294836220 0 131069 65537 4294967295 65534 131071 4294901761 131074 4294770688 4294836225 65534 65536 4294901759 131070 4294836224 4294967295 65535 65535 131072 65534 65535 131071 65534 0 131071 1 65536 65535 65536 4294967295 65540 4294901758 4294901760 4294901760 196607 4294770690 327679 4294770687 131071 131070 65537 2 4294836224 65535 65537 4294836224 131071 4294705153 65536 131070 2 4294901758 2 65534 4294836224 4294967295 131072 65535 4294901760 65535 65537 2 0 4294901760 4294836228 131070 131070 0 0 4294901759 1 4294967295 0 +6749167969 5310966082 625348962 2011130207 3103917658 1833241905 473889147 1147073253 789219547 132072786 439624698 338110434 61639154 30999599 160385005 424786885 317599916 137921697 425942590 486475842 40957736 68557134 141457430 89658927 280279769 399481450 251894730 354942073 352755460 181417864 93692656 86388788 +65302 12583204 4263378570 63177125 4212719771 260000 614681 631045 439353 4258321 6956797 4609834 2110429 449210 141661 13130 47122 164482 267361 238212 83233 46681 52285 46258 12424 4985 12260 25120 18265 10709 5141 4660 13250 19981 12577 17221 5480 14650 37060 13465 15353 19682 27220 22130 9050 765 7949 28549 17953 7757 12010 52993 51400 13025 725 8017 21008 18314 5553 725 1874 2146 20050 31946 18148 12800 13648 7880 5608 4441 10249 10282 10025 2938 9445 18280 4121 17629 15956 3368 3845 19429 13320 4149 3650 5716 2762 820 1277 4810 3361 164 8125 13226 5524 680 22237 29549 3573 4093 5185 3365 8621 17866 17905 5305 541 877 3785 11314 9908 2330 4 1924 3700 3092 3706 2834 1130 1865 2377 1825 2555900 4294443026 4294639614 262142 65537 4294836224 0 131071 65536 1 1 196606 4 4294901760 2 4294901759 196607 4294705152 327679 4294770689 131071 0 4294967295 65538 4294967294 0 65536 65537 4294770689 131070 4294770690 196606 4294836225 65536 4294901762 1 4294901761 0 4294770689 65534 0 0 131071 4294901761 262142 4294836226 1 65535 4294967295 2 4294967295 65535 131073 4294901759 131069 4294901761 196604 4294901764 4294967295 65535 4294901760 65534 131074 1 4294967295 4 4294901759 1 4294967294 4294901763 4294967295 4294967294 4294901764 4294836224 65535 131074 4294770689 4294967294 4294967295 4294901760 4294967295 4294967295 131073 4294836224 65534 4294770688 262142 4294967295 1 4294901758 131069 131071 2 4294901759 0 4294836226 131070 4294967293 196612 4294770688 65533 196608 4294901762 4294901759 196607 4294901761 4294901759 65536 65535 65536 4294770689 65534 196607 4294836225 131072 4294836224 65536 4294967294 4294901761 262143 3 4294967293 2 131070 65537 4294967295 196609 4294770690 4294967292 131073 4294770689 4294901759 4294901760 4294836224 65534 +2228758013 3860922313 12958321825 42569535724 18753064984 3033056480 465171772 1621218248 1366368893 479180367 199993446 164284814 124447333 130884997 166063678 249608692 268469255 176808652 241549506 453941159 209476733 129777923 308329916 161704970 186483300 240014684 146700237 87424784 247414711 239903892 163911599 89934460 +33 4283563984 12779434 7929744 2621936 165610 207073 290825 385097 1760429 2839745 1609760 442034 32013 6925 8042 3712 13172 27833 44500 21898 6410 1508 746 2257 4820 10853 11889 1625 2980 2561 2626 6337 977 6354 14004 9089 3001 2309 4789 8068 8500 5213 4082 1305 2340 6884 2389 2628 4505 1874 7250 9965 2980 610 353 2404 1885 13 1684 3645 1810 3033 7060 5341 4292 6154 6353 3604 3393 4360 3725 2305 136 5490 11773 5186 773 2305 4349 6849 1189 1898 1730 2041 4178 4057 2725 1657 2738 4489 2257 200 245 85 905 2610 1325 1090 985 34 1586 4034 1460 585 2333 1289 490 985 1348 488 221 900 709 901 905 221 296 520 680 962 740 655370 4294967295 4294836224 196607 1 131071 2 65535 0 4294901760 65536 2 4294836224 4294967295 131071 4294836225 131071 4294901760 1 4294770688 131070 131072 0 65535 131072 4294967295 4294901760 4294901761 65535 0 65535 65535 1 4294967295 4294967295 1 65536 4294901760 65535 0 4294901761 131071 1 4294967294 131074 4294901761 4294967295 4294901760 0 0 65535 131071 4294836224 4294901760 4294967294 65535 2 4294967295 0 4294967294 4294901761 4294901760 4294901760 65535 65535 0 131071 4294901760 4294967295 65535 0 4294901759 131071 1 4294901761 131071 131071 0 4294901761 131071 4294901761 4294901760 4294967295 1 4294967295 65535 0 0 4294901760 0 4294967294 2 65534 65538 4294836224 0 65534 65536 0 4294836224 4294967295 65536 0 65535 1 4294967295 65536 65535 1 65535 65537 4294836225 4294901760 4294901760 1 4294901759 131070 1 0 0 0 4294967294 131073 65537 4294836224 4294901760 65535 4294836224 0 65536 4294901760 0 1 65535 1 +981921049 1839318665 5986324757 16937531804 5370394950 390524988 54784028 174597656 247498023 44105349 32766124 85536044 34912885 43770492 97878196 60989880 86880344 47567967 52628401 81227357 27662193 38248479 89976008 83498668 82354097 84771869 61738901 59775987 27305025 40147973 29121503 18911904 +65478 18677585 4285203159 4259118296 52887581 528292 1368869 2140069 2163869 2178589 1239745 55700 21456 231844 204530 207274 433280 484520 175972 73861 62900 14692 25636 84317 96106 90100 114777 121757 60065 16465 1258 8333 15761 13690 67141 70600 33245 29725 28649 24917 25514 57170 65905 20858 50 9653 51482 93600 76861 46196 45037 47952 22480 3236 1018 997 2601 8501 2245 2725 23589 54869 62050 35858 23333 23530 11930 2417 6005 5641 4297 7765 2458 3114 16372 41218 28496 6628 87785 121064 66344 42026 25442 10729 7753 1444 7012 9572 16160 13120 39317 50881 21445 4633 1933 2885 3497 360 4013 20557 27880 9616 5066 10413 1186 2554 3373 2626 3112 5389 9700 4013 450 900 1160 530 362 2329 2504 4138 4106 1233 1179682 4293066743 917503 0 65538 4294901763 4294836224 65536 65538 4294705152 196606 4294901760 0 4294901757 65538 65534 4294901760 262143 4294901760 131075 4294901757 131074 65534 65537 4294836224 2 4294967294 65536 131072 4294901763 4294901759 262143 4294836227 0 4294836225 196606 4294836225 1 4294967295 65535 4294901760 65534 65536 131072 4294770690 131068 65538 4294901759 0 196607 3 4294967294 131074 4294901761 4294836222 131076 4294836222 0 65535 2 4294770688 4294967294 196607 65536 0 1 4294901758 131071 0 131070 4294901763 4294967295 4294901759 0 131071 65536 131073 1 131070 4294901762 4294901760 131070 0 131073 4294901761 0 2 4294770689 131069 4294770689 131071 4294836224 65535 65535 3 4294770686 262143 4294901762 65533 1 65535 4294901760 196607 4294770689 131072 4294967293 131072 4294901760 131072 65535 65536 4294836228 4294967293 4294901763 1 4294770687 131071 4294901760 196606 4294901761 4294901761 4294901761 4294967293 65535 131073 4294901760 4294770690 65534 4294901760 4294836225 131071 4294901760 4294967294 2 65532 +4813127041 12566699389 13082844505 8427270364 484645604 1504024072 2547359144 2673686483 648288796 357668585 907410788 1024591769 314612570 182934563 599268946 378911578 561754031 366772503 928286985 469398985 59436565 379515144 561681954 135133180 264077498 1209598503 474263768 468533054 246433937 261913996 109527904 61620491 +65419 4292149514 4282777493 4291166055 18677710 75917 631325 1619386 1522196 2086625 3683809 2709986 694564 92410 148733 386720 324090 42089 58640 114613 106353 70130 67113 89905 46594 6205 1125 19573 26154 5746 2020 4381 18056 34625 55456 79061 56360 6125 2693 2273 6833 4160 9629 1745 1709 3697 21074 24938 2600 1690 5508 19637 26725 8725 37 5245 10440 1424 11245 19242 10818 26996 4949 7157 1885 234 7681 20673 7673 289 5417 7717 2900 122 2581 13757 69277 110717 69953 27565 10629 5017 5537 13705 8941 1802 2250 7069 21460 20725 7177 49066 44789 17125 17389 12049 9256 1730 4925 14906 5945 10993 23285 16160 3250 1184 7146 2858 1557 11197 10825 4106 170 1369 2692 905 500 5785 5570 338 1658 3425 2752511 4294377489 4294508540 65532 65537 65534 65536 65536 0 1 4294836225 65532 65541 4294705152 131069 131073 4294836229 4294836221 196604 4 4294967294 4294901760 4294901761 65535 65533 1 4294967295 65534 4294901760 65536 4294901761 131070 2 4294901757 131071 4294967295 65536 4294770690 262141 3 4294901758 131073 65537 65535 131073 4294705154 131070 0 2 65537 4294901760 65535 65537 65537 4294836226 65535 4294967294 4294901761 4294836226 131069 4294901761 196607 0 65536 4294901762 4294836221 65537 0 131070 65539 4294770689 4294901759 65533 65536 4294901761 196606 1 65536 4294770691 65534 4294967295 4294967295 4294836226 131070 4294901760 131070 65536 131071 131075 4294901760 65537 4294967295 4294901761 4294901762 65532 1 65534 131074 4294836223 196609 4294770689 4294967295 65534 0 65535 0 4294901760 65537 131073 4294836225 4294901760 4294901760 4294967293 0 1 4294967295 3 4294836224 131070 1 65535 65538 4294901755 0 65538 4294967295 131073 65535 4294901762 65533 4294901763 4294901758 4294836225 4294967295 65535 +1723000379 8692041694 10853116140 22752064976 8914343954 1503676193 2359816603 809163005 855538770 703174981 433704754 124524739 110495301 220092074 660779188 179784049 70522154 99030830 161603184 204224364 113447459 231388479 132881881 138814588 294283155 971842590 183845225 459888531 399329293 271372669 157609500 84971990 +50 4291821357 23331131 4259839737 35848569 622505 1732802 2197666 1785928 3900884 6969748 3622169 574577 570314 359170 183569 294113 82053 55368 18421 29860 100026 106945 84505 24210 11560 17417 42913 47016 11412 797 6401 2050 4705 7604 10805 3893 4768 34957 55997 52040 38308 22345 10256 5162 8276 10985 2132 6029 11185 1744 256 3625 13320 21305 23825 9677 585 11565 6698 41130 105536 90340 33937 4330 1684 13352 8545 72 9608 21493 20137 6857 2309 11593 73181 106013 38281 21402 29608 34000 43216 19202 4804 4637 15242 24113 11026 4253 13474 18530 27833 22725 3209 11848 8712 1908 13472 6714 1060 13725 20474 9410 178 7114 2228 1538 5013 2080 425 628 117 521 544 1021 584 757 4181 2801 1044 340 2026 2162719 4293722105 524295 4294836222 0 0 4294836226 65533 4294901761 65535 2 4294967295 4294967294 131071 4294901760 65534 196606 2 65534 65537 3 65535 4294901761 65535 131074 4294770690 65532 131072 4294967295 0 131072 4294836225 65536 65537 65535 4294836225 65537 4294770688 4294967293 65536 4294901761 65534 4294901762 196605 4294901761 4294836227 4294901760 4294967291 196610 4294770688 131072 4294836222 65535 65535 196606 65535 65535 131075 131071 4294770690 4294639616 327678 4294836227 4294967295 65536 65535 0 4294967294 65537 4294901760 65535 4294901760 131071 65533 4294901761 131070 2 131071 4294836226 65532 0 131072 4294901762 65535 3 131071 4294836225 4294967295 65536 65535 3 4294836222 4294836224 196609 4294770688 4294901761 4294901759 0 196607 65538 4294901758 2 65535 0 4294901760 65537 4294901761 4294967294 4294901762 4294967294 65535 4294967295 65535 1 65536 4294967295 131069 2 131071 4294770689 65536 65537 4294705152 262143 4294836225 131074 4294836222 131073 0 1 4294901760 4294836224 4294967293 4294901762 65532 +5955798956 13011487852 16747084566 40355942156 11218700845 3274420136 1875658878 795480823 350443640 849401228 378564042 300207510 199351197 45863545 85186913 346101031 492575079 134782785 93329725 104266023 226234829 646316968 585231526 186119288 548549905 882535759 379305301 357415627 254453355 225306409 61600250 32835119 +65405 10682477 4286447735 4268425200 12058093 756640 4044410 3447860 2286585 2516978 2042708 324250 457210 997450 253933 124937 502605 594578 203508 116821 96730 125 87938 47545 8068 47666 43940 52240 41821 5780 12025 20450 8537 10049 6525 1570 3506 208 7045 8685 3925 3125 11108 6098 7361 8473 8765 30685 30293 7497 3365 12554 16801 11258 9973 2549 10980 21697 28013 15208 7417 76729 72497 6245 11089 33797 32833 8212 1602 937 3194 6290 7381 13032 20644 9722 20808 25657 4005 4981 8746 2665 25940 25985 13346 5954 3217 6442 9418 1025 1556 4808 10484 4573 3925 4973 650 19210 36522 31714 17009 6784 13009 6757 7274 2978 3330 2005 185 260 1352 809 74 937 965 4513 4225 100 1508 1250 740 200 1441796 4294508562 4294508538 65535 65533 262145 4294901760 1 65535 196608 4294901761 4294901759 131071 4294836224 4294901760 131071 4294836225 131069 2 131072 1 4294901760 0 4294901761 65534 0 4294967294 65536 4294967295 262144 4294901761 0 2 4294901760 65535 4294836224 4294901757 196608 4294770689 2 4294901759 4294901760 1 65535 4294836224 4294901759 4294836225 4294770685 196605 131071 65537 4294901760 4294967294 196608 4294836225 65534 65537 65535 4294901762 4294967295 4294967295 131071 65533 2 4294901758 262143 4294836224 65533 131073 4294770688 262142 4294901761 4294967294 1 196606 4294901762 131073 4294901760 4294967295 65536 4294901760 2 4294901757 131072 65535 65538 4294836222 131073 4294901760 4294967295 4294967295 65538 4294901760 65534 65534 65536 4294901760 131070 1 65538 2 65535 4294770688 65537 4294967294 131070 1 131070 2 4294967295 131073 4294901759 65537 4294967294 65536 65538 4294967295 65533 65536 65537 131070 131069 65540 4294836224 196606 1 65537 4294901761 4294901761 4294901759 4294901761 131070 131074 4294705154 65533 +11838552610 22156984835 14878391780 12647758850 3556377710 4060740550 2633194175 3236337077 900648528 494806198 336715032 464102756 219720549 138303264 53837886 58682030 82108336 129712051 272164505 174576002 200090207 520329331 541018536 170864530 246263075 260575469 289063917 119565437 255126675 369452892 63912690 47084642 +65511 4286709780 13041463 5439927 4251909637 1557200 1182649 1541682 1988416 763001 590229 686545 92218 115117 346912 395585 264721 90266 18181 56645 132545 100256 45044 94649 93098 34421 6137 11093 617 10009 27242 20609 5490 488 7496 13858 12500 15317 15524 12193 7290 2738 8969 23962 11745 6586 28642 41269 17425 12456 41984 65509 34600 1517 31842 44210 26960 22817 21605 4453 10280 53365 57812 15725 3029 12020 12260 1370 4453 14800 18250 13450 7688 3721 9792 13261 12658 6056 2960 6613 8452 13441 20250 24336 17597 6066 1440 1856 12017 12836 1618 4346 1864 6436 16660 29993 35789 13241 1066 3673 3464 35793 65000 51400 39821 36650 30218 24712 20660 13250 5050 7650 18530 23225 21402 20813 20308 20392 16145 6100 1229 250 4294377476 458751 4294901762 4294836225 131071 4294836224 65535 65536 65536 4294901760 131072 1 65535 0 4294967295 196607 1 65534 65536 4294967295 131073 4294836224 131072 3 4294967294 4294901765 4294967294 4294901761 4294836225 65534 4294901761 4294901758 4294901761 65533 131073 4294901762 4294901759 4294901761 4294901756 196609 4294836224 65534 1 65536 1 4294967295 4294901760 65535 327679 4294770690 65533 65536 131074 4294770688 4294836222 196608 1 4294901759 65537 1 4294770687 131071 4294901761 4294901761 4294967294 4 65534 4294836224 4294967295 4294901760 65535 65533 2 4294901759 65534 0 131069 3 131069 4294901763 65535 4294901761 65533 131074 4294901761 4294901758 2 4294836222 0 0 4294901761 4294901758 131071 1 65534 4294967294 262146 4294705154 4294967294 4294967292 262146 4294901760 4294901759 131071 65537 65536 4294901760 0 65535 0 0 4294967294 196610 4294705152 65534 131071 4294901760 65536 4294901759 131070 65537 4294901759 131072 4294901760 65537 4294901757 131074 4294836221 131072 4294901761 65534 4294901760 0 0 65535 +7418852477 9841116714 8646445852 4740775472 2076656057 1820878906 2458052915 715842561 756044873 743352930 694224940 113629248 148593534 126128948 119001275 171717464 121341830 232640909 394904848 550424478 456127913 409460680 359755392 189997607 200565690 188135115 298011472 154573244 371304148 773153329 661894937 487793218 +65415 5373654 1507908 21822767 4294312464 827713 2307780 822274 3331850 4827226 4347146 6191993 4376770 1055629 582317 151682 281538 356425 526525 112361 63125 293213 419525 273440 65650 86816 102589 69290 15668 34821 25605 725 13225 20693 11978 32162 34180 25636 82640 170953 111033 108496 51985 1853 35513 8513 13285 21001 22753 34120 9594 95042 62600 66664 62145 8552 52994 108980 63169 13850 36104 81325 37825 18866 20714 9945 10210 16829 12308 10525 1913 21841 40525 62900 59810 58066 58564 22805 40621 46196 18234 2745 925 11221 23764 16648 3442 8077 27325 21613 51080 72676 63522 33581 8641 25045 15794 8081 46490 18685 109234 160450 22736 20610 18317 32356 67477 41704 68725 53512 26792 26165 42797 26050 7328 4825 2005 1341 17722 15650 3285 2738 4291166223 1114088 65539 4294901761 4294836231 4294901753 262143 4294901765 65533 131073 4294836224 131072 7 4294705148 196604 4294901762 393217 4294770685 4294836226 196606 4294836223 4294967294 4294901760 131072 131071 4294836230 4294901754 196613 4294967291 65542 4294836221 4294901757 196607 65534 196609 4294705153 65536 65536 4294770688 2 4294967295 65534 196607 3 4294705153 196603 4294836230 4294836222 196607 65533 4 4294836225 131068 4294901758 4294901761 262143 4294770686 196606 4294901762 4294967295 65533 4294967295 262141 4294901763 262142 4294901759 65541 4294836223 65531 131073 1 4294901759 131071 2 65529 65544 65533 4294901763 4294705152 4294967294 65535 131071 65538 4294770691 4294770686 393215 4294705153 196604 4294967294 196607 327677 4294639620 262142 262145 4294770694 4294705146 262145 1 4294901762 4294770686 131072 262143 4294836226 4294901761 4294836222 131073 4294967295 4294967295 4294967294 262147 4294639616 65532 262144 0 131076 4294705149 131073 4294836224 4294901759 65536 4294705155 4294967289 196611 4294967292 262147 4294705156 4294705150 196606 4294639616 196604 65532 262149 4294836221 262144 5 +7928047846 10514725198 23134328842 35679202060 31108429793 7651737428 2201803923 3158697181 1540626881 2900683119 1372586694 731920790 298172871 148676784 295422333 990824747 1235861224 304410678 338340361 891490880 886560293 855966951 455502558 252757660 908360053 732981696 284796896 775448158 769196362 1512979353 1164291045 482140784 +65429 3801197 4285989046 4274519333 82379462 3714793 5318305 6057085 3584025 373061 153365 201140 1314965 534645 15809 87754 206081 362925 234664 35821 7585 1640 9665 24125 93229 55189 30421 11812 12010 22093 5904 545 298 545 904 19477 47605 57493 60205 31898 2257 2885 5050 13185 1117 5929 5002 1469 3973 17162 38216 29314 12253 28305 35122 12610 4468 2113 2693 1370 10309 31700 72080 45833 8720 3121 1609 1249 3098 4058 4132 8000 14642 21825 11986 4477 20168 4765 2165 4453 5986 13185 17924 10949 3221 3338 4133 12373 12890 9530 2770 178 801 2125 1762 1970 14625 16025 2225 16144 18650 2953 1576 16209 26960 10418 1450 13780 9425 1201 482 1657 6676 11765 5668 4050 7522 7325 6725 2690 122 292 4293328920 786407 4294770698 4294967294 131071 0 0 1 65535 2 1 4294967295 4294901760 65535 65536 4294770686 65536 65537 4294901757 0 131071 65538 65535 1 4294901762 131069 4294836225 131072 4294901762 65537 4294901763 4294901757 131073 0 4294770689 4294967294 0 65535 4294901760 65536 2 4294901760 4294901761 4294770688 196605 4294836224 131074 4294836220 196608 0 65535 196608 1 65537 4294770689 65535 4294901761 131071 4294901762 65534 131074 4294901758 131073 4294836226 4294836221 196608 4294836223 0 4294901760 1 4294836223 196606 65536 4294901761 4294770689 131070 65535 65536 4294967294 196607 131071 4294901759 196607 65538 1 0 0 65534 4294901761 4294705152 4294967295 65536 4294901759 196607 4294770688 131070 65536 0 2 4294901760 4294901759 4294967295 4294901761 131069 131072 1 65536 4294836226 4294901757 196608 4294836227 4294967295 0 2 4294901759 4294836222 196607 1 4294901759 65535 1 65535 65536 4294836226 4294967294 65537 4294836224 65535 4294901762 4294967293 131072 4294901763 4294901758 4294901761 65535 +23623554631 35394494905 14210622249 1539047576 5700295980 2476863507 1209012726 2147737903 402298794 102639732 593830974 248993803 141217279 12980440 236394340 578069988 130111427 76563384 153297318 392535717 224287771 282915574 439071281 77078682 252028480 166151812 195283824 144397132 153003714 293391894 220106443 162080020 +33 4289462225 21627058 4245946514 8584601 90473 414976 2950690 2608733 648369 1751165 2881090 1745605 308125 14368 20500 11629 25749 75025 16154 24677 39794 25253 78498 191105 105268 4616 1090 21425 75301 66260 29812 7157 485 17914 37409 15668 656 1241 11216 7865 12869 43681 66170 50641 33293 17370 8656 1521 8450 47125 49365 19828 7298 7400 7808 11629 9925 6121 2169 13460 25378 29160 17396 1565 1229 4040 5429 2866 3232 7432 10853 3805 884 2920 14530 26773 10676 3716 4973 4450 3370 11281 10865 6176 16741 13658 5837 1217 2285 2665 10093 12157 8693 4850 11281 26689 20978 5741 5545 1649 293 377 5273 9113 256 5328 4253 2197 3645 3490 1961 2186 6554 2117 2965 3610 1025 1973 4385 3328 1604 4294311966 4294115315 458751 65536 0 1 0 131069 131074 4294901762 4294901759 196606 4294901760 131072 4294901758 196609 0 4294901761 0 1 4294836225 4294967293 3 4294770689 4294967293 131071 196608 4294705154 65533 65536 0 4294967295 65533 1 4294836226 131069 131071 131072 4294901761 131069 4294901763 65534 4294901760 65536 4294967295 4294901762 4294967294 196607 4294901760 262142 4294901761 65535 0 0 4294836224 65535 4294901760 131070 65536 1 4294967295 1 131069 65537 4294836225 196606 2 196605 4294836227 262141 65537 4294901761 2 4294901761 4294901759 65537 4294836224 0 2 4294967295 4294901758 262143 2 65535 131074 4294836224 65538 4294967295 1 65535 1 2 4294967295 65533 131074 4294836225 65536 4294836226 4294967295 65534 4294836226 4294967295 65533 3 4294967293 65538 4294770691 4294967291 65538 4294967293 131073 131071 4294836224 4294901760 4294901760 131072 4294901762 4294836224 131075 4294705153 65533 1 0 65535 65536 0 4294901762 4294836223 196607 4294770690 131070 1 4294770689 196608 65535 +1252719074 14398893403 10859527460 13061193324 13240512230 2017557642 135854050 335685158 267622253 392171933 1215484130 233473405 564412739 222547375 231821509 87021685 318629949 580571438 219511774 412018020 147011999 220643777 203455138 89939285 154581398 189620851 202916053 151894800 292746372 130303163 99560020 87839523 +65419 4849684 6225992 4279697779 4288412792 1554085 1490450 696392 641842 1639834 7010125 6496241 949645 154244 61605 65425 34625 24106 38800 75490 59450 67589 197341 319465 186337 16981 22340 52825 78773 67769 13162 15665 15586 265 14569 10777 4772 33989 56394 51124 26597 8765 14948 19449 5402 3529 11701 6653 170 5938 5044 17357 23981 7381 2650 1546 3418 3005 19265 12064 829 89 45 1973 6010 2498 2960 9754 7913 3065 4133 5309 785 3988 14741 28580 18005 1396 3305 2248 405 1060 5045 21685 25000 15850 7720 7253 14261 17092 8116 145 3392 5602 7085 11540 32525 33597 10970 493 1402 2057 373 4360 5513 1058 5305 11861 9881 4933 7178 16385 14578 2770 340 2165 2048 1233 1033 740 386 1 720890 4294180871 262142 65534 65539 4294901759 2 4294967295 4294967295 65538 4294901760 131073 4294836223 131071 65537 131071 4294836224 131074 4294901759 65534 4294901759 131072 3 4294901756 131071 1 131070 196607 65536 65539 4294967295 4294901762 4294836221 65536 1 1 0 4294836225 4294901760 65537 4294836222 4294901759 131071 4294901760 65535 131071 65536 1 1 4294901759 4294967295 65534 196607 65537 0 4294901761 4294901757 131075 4294836223 0 131072 4294901762 4294901760 0 4294967295 131073 4294836226 196604 3 1 4294836224 4294836224 65534 65536 4294836224 131070 131071 131075 4294901758 1 65537 4294705152 196607 4294836225 4294901760 131073 4294770688 65534 65535 65535 4294901760 4294901761 65534 131076 4294705152 4294901757 262143 4294901764 4294967294 196610 4294836224 4294901761 65536 4294901759 131073 4294836225 131069 65539 4294836224 4294967295 65537 4294836224 4294967295 4294901760 0 131073 4294836222 2 0 4294967294 131073 4294836226 65535 131070 4294836226 65534 65537 4294967294 131073 65534 65538 4294901761 65536 4294901759 65535 +8140037620 5823570578 6890255178 42057838819 18794511313 1274369067 392094795 310414680 558606828 1571754616 1607541006 421514817 548857322 137486958 131860091 480050694 294248226 134600860 81883582 189721940 90830919 109094917 56525695 101515509 190477468 141464979 275775163 228652878 320289931 149924776 207833913 123357813 +65501 4294770691 3735530 1900546 4391180 259400 441338 555300 713410 1031220 2042969 2008480 1052861 426178 130010 29600 12554 3026 1961 2353 970 12850 54673 74762 15300 2925 6098 320 3370 2009 1073 8929 9928 1640 724 1332 2665 18820 23850 6890 85 2117 2965 1125 68 1202 5524 6277 3860 6340 13850 24418 19213 2809 965 2938 5057 18397 29489 18945 8020 5113 3541 961 164 1213 1585 5 2626 4090 2312 1684 4122 772 4100 5760 904 6464 9488 4625 533 1241 3809 3029 242 136 289 1745 4129 7396 3364 740 857 1865 2258 6500 14381 6800 562 2834 4450 4420 3133 1565 1217 3065 4057 2169 218 340 801 293 328 1921 2225 1517 2045 2729 1801 1258 1636 1872 1179622 4294901773 4294705152 65534 65536 65536 1 65535 65536 65535 0 0 0 0 65535 0 0 4294901760 65534 1 131071 65537 4294836224 65537 4294967294 131070 1 4294967294 0 131071 131071 0 4294901760 1 4294901760 4294967295 4294967295 65536 65535 0 65536 0 65537 4294967295 4294901761 4294901760 0 65535 1 4294901759 131072 4294836224 196607 2 4294967294 4294901760 65535 65535 4294901760 65534 1 131072 4294836224 4294901760 0 4294901760 4294836224 131071 65536 4294901760 0 4294967295 4294901760 65536 0 0 4294967295 131072 4294901761 4294967295 4294967294 65536 4294901761 65536 0 65535 4294901761 4294901760 65536 4294967295 4294901761 65536 0 4294901760 4294901760 1 65535 4294901760 65534 65536 1 65534 4294901762 65535 4294901760 0 0 65535 0 0 1 4294901760 65536 4294901760 4294967295 65535 4294901760 0 4294967295 65536 1 65536 0 4294967295 0 4294901760 65535 65537 131071 1 196607 4294901761 65535 0 1 +1815637874 3568197906 5166037614 13304675121 8912509802 2301628493 265287052 32813507 29952031 375597467 265789246 31561726 29903417 69841397 35983956 169608506 39637086 27584505 96971589 210476099 139632021 233024928 32581846 41560165 62014558 97974207 37350504 68202916 118092531 91633746 42079014 44716264 +65516 327653 6946909 4278845492 4294180669 51538 222037 1028845 2040116 2852545 2398964 827300 409253 249026 136197 258505 544085 567890 255986 61073 17960 23162 110548 112945 47170 37636 4849 97 3577 23090 19465 1490 18560 30152 16642 21352 9605 3589 18281 9469 12337 9620 23173 22280 5281 5785 14504 11450 2260 5840 33085 73341 16081 10361 23337 13621 4925 6793 45821 33541 14689 25604 21290 6093 5501 16153 16776 7712 11713 13138 17914 30308 30440 23656 7396 13653 54613 44740 5000 9077 6148 14081 2512 1553 153 9026 28786 19865 2329 13000 10882 1985 149 2873 3985 13730 23578 5300 2785 2938 410 745 937 901 1413 7345 4000 2330 4325 2858 1825 2362 1153 225 1258 541 2920 4397 2952 661 1377 5364 3932140 4294246420 196604 65537 4294770688 65537 0 0 0 2 0 4294967295 4294836223 65537 4294901760 4294967295 4294901760 4294901758 65535 0 65533 2 4294836224 262143 4294836224 0 262143 65537 4294901761 4294901759 1 4294836222 196607 4294901760 65536 1 4294967295 4294836225 65533 131073 4294901760 4294836224 65533 131073 4294901761 4294901759 65534 131074 4294901759 196608 4294770689 4294901759 1 4294967293 4294836222 262145 2 65534 2 4294836225 0 4294901760 4294901760 4294901759 131072 4294967295 4294901761 196606 65535 65534 196609 4294836224 131070 65538 65534 2 0 65534 65536 65536 65535 4294836226 131069 65537 65537 4294901760 1 1 4294967294 4294901762 131070 4294836227 4294770688 65533 65539 4294901761 4294836225 196606 4294901762 4294836225 4294901759 1 65536 4294770688 65535 131070 4294901762 0 4294836224 65534 196607 4294901762 4294901758 131069 65536 65536 65536 65536 4294836227 4294967294 196608 4294770689 4294967295 0 4294836222 196609 4294967295 65539 4294836223 131072 4294770688 65535 131069 4294901760 2 +679549357 6337421806 14084578207 15472174351 3674276710 1626583953 3035532487 3298124769 566591630 696815515 589970900 85196334 154178937 189888941 211903465 141773909 193961282 169966663 181384300 477779183 252552003 419142260 235109221 295002447 489444336 400512313 179675619 229045914 190122325 71261613 81524660 52752179 +65427 4292411402 11337817 4281466899 4294770478 509690 2183017 1860665 3012128 3056941 1360372 2811194 177973 626585 588209 550049 866489 1068112 523162 143953 128745 99713 32525 154970 90621 129380 315418 350370 231025 67709 10829 13274 85933 111722 85453 160285 346853 339524 158506 58378 19345 60257 228653 301597 167873 64501 44186 43682 73757 131762 87905 55738 154404 73124 9314 28970 7250 66610 128330 47240 6464 10242 65969 76661 83653 24466 8306 26729 28629 27289 45370 68276 66085 71833 26690 75242 264500 337301 303572 254228 199445 78160 7605 53685 115825 136953 61322 12053 36868 13345 7369 30821 29090 34885 25994 27514 32449 12805 33850 42664 14261 2738 16596 13837 2960 2690 14705 4141 464 260 3490 4868 4786 9505 5617 12148 12545 10553 7058 2257 2612 7297 4718604 4293459993 196609 4294443006 262143 4294705153 65537 131068 1 65537 65533 65540 4294901759 458745 4294901770 4294901759 4294901761 131068 4294836228 4294901760 262141 131075 4294705154 4294967293 196608 4294901762 4294836224 4294770688 196604 2 4294770688 262141 4294770687 327679 4 4294967295 131071 4294574080 196607 327679 4294901762 4294967294 4 65534 196611 4294639614 131070 196608 196607 4294836224 65541 4294836221 4294901763 65533 4294639619 65533 4294901759 458748 4294836226 65537 4294901761 65536 4294901761 4294901759 3 131068 4294770688 458751 4294770691 65536 4294836223 327679 131071 4294901761 1 0 131071 4294836227 4294770688 262143 4294901759 65536 262147 4294639614 0 2 4294508542 262141 0 196606 4294836224 196607 65535 131074 65536 65534 4294901761 262147 4294639616 327674 4294901769 131070 131071 4294901763 4294770688 4294901765 4294967291 4294705155 4294901755 196607 4294705155 131065 1 196608 4294836227 4294836222 393214 4294836228 4294836220 131074 196607 65539 4294836223 196611 4294901758 196605 65538 4294836231 4294770684 4294901760 196602 196613 4294705149 327684 65533 +6690001121 13819641770 17821382115 15052593019 7993930918 4237686504 5792575305 6170956221 1583154888 804851022 1317521490 2799342211 1129755519 730293398 2170310099 2523714349 1547582881 2105923327 1085263785 1374868672 735135906 849156531 891953600 647572760 1777103546 4637709535 1880820792 772903882 686462170 420434578 139411965 216279535 +65492 7405523 4286120072 10289061 4282974052 459409 512464 642445 4412500 5739506 2705588 328481 119144 238132 155860 87074 25285 21041 15112 5650 845 35802 153949 169069 78805 14449 21053 37865 14290 5917 2180 208 6322 19600 17812 11861 6113 1921 13320 8065 2813 365 6994 3816 7985 4849 193 4105 8744 16945 8993 640 13793 35125 43018 16369 2500 13297 7226 1280 5960 19396 33813 37925 31905 10400 673 2612 3541 1945 1802 7241 17210 4157 8473 20602 9265 3536 12365 2836 24065 55145 25748 3065 1665 7265 6905 4612 1018 4160 25873 35890 33218 37757 18805 2080 5524 3357 773 3362 7301 9970 3770 1205 3970 7709 2152 3161 4925 4392 2836 730 4276 5248 3202 2341 1234 18 505 997 629 26 1048563 4294639634 131068 4294836225 65535 65536 0 65537 131069 65539 4294901760 65536 4294901760 4294770688 262143 4294901760 65536 1 4294836223 262146 4294836225 4294836224 0 0 4294901758 131070 131075 4294770688 0 65535 1 0 4294836224 1 65535 65535 1 4294901760 65535 65536 2 4294901759 131074 4294770688 131069 2 4294901759 4294901759 196609 4294770690 4294967295 65535 0 327679 4294901763 65535 131071 0 4294901762 4294901759 131072 3 4294901759 131071 65539 4294836225 4294967293 4294836224 327678 4294901762 4294901759 65536 65534 65539 4294901758 1 4294967294 1 65535 4294836224 65537 4294901757 131074 4294901761 4294967292 131073 4294836225 4294967293 1 131070 65538 4294901761 4294967295 131070 0 4294901760 196607 4294901761 4294967294 262143 65538 65539 4294770688 0 65536 4294705151 131071 262142 4294901761 4294901762 4294967295 131069 65539 4294901759 196607 4294901762 4294901759 4294901759 131071 4294901761 65535 4294901760 4294901761 4294901760 131071 4294836225 0 4294967295 4294836224 131069 65536 4294836222 196609 4294967295 2 +2576846530 7887106348 28590018627 20259554678 1475143403 1321388518 526756454 148376018 85610426 982241714 816609030 242390414 92548948 91539878 150145675 93874171 55265478 69845679 110407413 247911019 298851808 199202026 405268852 82416156 191720992 323411760 303399863 360609094 362141232 126549461 102223705 68019463 +65483 2162739 4271833091 46988971 4266656233 57940 1235853 2511769 3794173 1355680 594596 350498 170420 420593 452930 327245 96122 67525 27040 5729 6385 62753 96829 42772 86225 54305 4820 4100 13058 22786 25877 14906 6442 17666 25121 21650 14033 18521 64621 95044 40840 3617 1361 925 6730 12802 5620 4820 18000 29045 6176 9945 16936 11185 24698 32996 22873 5218 2344 1669 5882 15325 11345 6125 3457 401 3985 4797 4948 4122 1690 1813 1576 1289 21704 33709 23741 20072 11866 1858 10682 20098 8676 482 40 2522 10037 11978 16010 16425 5309 1825 15140 12770 389 13325 18306 6697 2845 4645 6445 5380 2810 4100 13690 26064 19060 5140 641 1225 1565 2810 4176 1165 97 274 242 565 109 250 136 170 4294246375 1245186 4294574088 4294967292 0 0 65536 4294901761 196606 4294836227 4294967294 65537 4294901759 65534 2 4294901757 65535 4294967294 65536 4294901762 0 4294967294 1 131069 2 131071 65536 1 65535 131071 4294967295 65539 4294901761 4294836226 0 4294901762 4294901758 0 4294967294 196607 0 4294967295 196608 4294901760 4294836224 131073 4294901761 0 131071 4294836224 65535 4294901759 196605 2 4294901760 4294901760 4294770689 131066 65538 1 4294901760 65534 0 4294967294 65538 4294967294 65534 131073 4294901762 4294967294 1 4294967295 65535 65536 4294901761 4294836224 65535 131070 4294836227 4294967292 0 4294836224 131069 65537 4294836223 262144 4294836225 4294901760 4294836224 131070 0 4294901760 1 4294901760 0 4294967294 65535 65535 4294901759 65534 131074 0 4294901762 4294901757 65538 4294770689 65535 131072 4294901761 4294967295 196606 4294901760 4294967295 131071 65536 4294901760 4294901760 4294901760 131071 0 4294705154 196605 4294901759 65537 4294901758 65536 65533 4294901760 262145 4294836224 0 4294901760 4294901759 327681 3 +3104297449 15424324525 16041853687 5081039884 1925059696 3015870954 1837919511 417683449 153565466 619680784 623833410 140973309 212287710 169246675 238904272 603095298 332614046 86200723 198568905 229155020 316020800 119910581 105160177 66207829 232660094 321630478 148565277 244482037 224465013 200539505 201191098 28990018 +65522 3211288 4276813907 14023975 58852322 2819042 2126165 1389449 1344773 2036288 2569274 2177653 1127444 357850 286661 468305 278437 34697 129650 86074 5792 23090 57649 44690 38792 29817 8297 6445 13381 32045 19786 8722 17209 28746 4801 898 18580 57205 150858 230089 136465 33408 9028 2336 2610 16029 15794 14165 26561 13194 7265 67777 51169 4234 169 674 145 4153 6362 3445 562 776 7345 5882 50 1217 409 1165 2205 11428 10282 5536 2290 8210 35620 39656 7978 4936 8720 3460 7373 4849 1604 1082 6740 19450 18757 4777 405 17 3250 2025 784 2048 1160 23432 40445 15560 3025 8768 5813 785 170 450 3313 9700 6845 3733 4937 3188 4768 7897 7760 4905 2165 808 1413 2057 365 205 1033 580 4294836216 0 65532 262145 65535 65537 0 131072 65536 4294901761 0 65534 65538 4294967294 4294901761 262141 1 196607 4294705153 196607 65535 0 65537 65536 4294770688 196608 4294901759 0 1 0 65534 65538 4294901760 1 131071 4294901761 4294836225 65534 196607 4294901762 4294901758 131072 4294836224 0 65535 65536 4294836225 196607 4294770689 131066 131075 4294901760 4294901762 4294901758 4294901759 0 131070 4294967295 196607 131075 4294770685 196608 4294901761 65535 65536 4294836225 4294967294 131072 4294770689 65536 65535 4294901761 65534 131074 4294836224 131071 4294836224 4294836225 4294901760 65533 4294967295 131070 196607 4294967295 131070 65537 65535 65534 131076 4294967294 1 196609 4294770690 65536 4294901762 4294836225 4294901759 4294836224 4294967292 0 65534 131071 1 4294901760 0 4294836224 131070 4294901760 4294967295 131073 4294770688 65534 196609 4294901760 65535 1 4294836225 0 4294901763 4294836221 131074 4294901759 2 4294836223 131071 65536 4294901763 4294836224 131070 3 4294770687 65536 4294901759 131071 0 +13395391949 10226868149 10021641587 17258278094 9488778299 3034874960 2619684142 912831554 461552070 365750733 362883574 123507344 227194881 191310363 167188786 1468932321 1024710335 127877534 271661823 481507642 63021840 56963550 46330541 92130609 310777888 204309345 161588975 98490936 306449383 143066969 139612668 88350626 +65406 4291690812 4287692516 4277600504 4289002969 405313 1816490 2052801 1161541 315200 468161 2304938 1816490 709013 404473 204802 179600 729293 484706 544745 272884 87805 6970 104498 164305 200345 236980 229762 72641 8410 61924 119185 173330 194825 268112 258025 162586 81229 6800 234740 283450 73933 27529 16393 34985 97109 61385 94117 134865 50626 83104 101313 73908 127465 87481 58753 59732 38921 7330 3146 9745 17306 49540 63040 21024 12680 36788 50425 25625 16425 22313 9113 5317 30277 18100 22282 5330 70217 146353 48322 15881 18545 27010 23801 25010 22725 10421 4628 25636 22181 9605 26914 37636 18472 12629 80000 171522 132229 48061 31061 47545 36473 25408 13165 9857 23581 20386 12953 27229 9178 7709 15850 5013 1913 7684 7412 6130 9764 9050 4594 610 464 2162684 4293722120 524278 262146 4294770694 65531 4 131071 4294967293 262151 4294705146 196612 4294836227 4294836218 131067 393220 4294836228 4294967289 5 0 4294705152 65537 4294770684 65536 4294901761 4294901755 196615 4294508543 4294574073 524287 262140 4294705155 65536 262142 4294836226 65533 1 4294967293 262142 327677 7 4294836224 4294901761 131069 4294639619 65533 327678 4294836226 131069 327683 4294508544 196608 196604 131075 4294770687 131071 131076 4294901759 4294836229 4294770684 196610 65532 5 65537 4294377476 262139 4294770689 4294639613 262139 3 4294770681 393214 196606 131079 4294770686 0 4294770690 4294967290 327678 131073 131074 4294901762 4294705151 262143 2 262140 4294705158 196608 4294443008 262137 4294901763 131072 4294967295 4294836217 131076 327676 4294836224 196607 262145 4294901764 2 4294639616 196604 3 4294901759 196608 4294574080 196610 4294770685 4294967292 196609 4294770688 262143 4294836225 2 131069 262148 4294574080 131076 131070 4294639619 4294967292 131074 4294901764 4294770684 4294836224 262142 4294836226 4294967292 4294967295 4294967293 393215 65536 4294901764 65532 +5511067876 11964126519 5165671177 6379383989 12612646756 4591002424 2215408965 4557488349 3220687086 722144762 1597221408 1973353097 695914636 1813036564 2608078656 1561284647 1651988109 732998875 1346887344 1397611074 1031393610 341035614 625553176 481900370 399485779 1153064652 477281099 456673035 1626740414 1044344727 419127730 208348104 +15 4288085911 34602698 4264625254 1112742 828125 755417 419341 51521 573049 469849 269954 256418 358793 654418 1105364 1441105 1178973 485357 126482 121970 203284 451733 557120 175474 65050 63314 15588 170505 154568 111524 61000 14130 3005 15578 79924 72769 56125 29461 8090 54288 6178 10312 77650 222868 171601 32425 44532 30497 36945 68129 62901 42881 49349 48740 14985 16250 2173 8109 19972 31865 43049 23329 4420 13885 15650 14132 28913 54161 39538 23677 31904 53477 27425 24804 112781 250900 263885 97994 16160 19072 18245 33002 62441 29060 15266 20353 5284 6841 10489 14625 42986 54341 34336 53045 4817 12385 109 35578 32193 4941 4986 6856 25000 24650 31121 20090 3620 3701 22961 19093 22100 31905 17953 13364 16757 14761 9874 2516 277 1417 4469 3080193 4294574085 4294705163 4294770679 196609 131072 65534 5 65531 262147 4294836230 4294770680 131074 65535 131072 4294770687 196609 131071 4294705152 131071 65541 131073 4294508542 524286 4294508548 262138 3 4294836222 4294967294 393213 65538 4294770687 131075 4294967292 4294901764 4294967290 65539 65531 262146 65535 4294836227 262140 131072 4294705155 458750 1 4294705153 196608 65535 65536 4294901756 131075 196608 4294901761 4294705156 131071 4294901761 65535 4294836223 327679 4294705153 262144 4294770686 327683 4294770690 4294770689 4294901759 131067 262144 4294770692 262142 65536 0 4294836227 65534 327679 4294705152 327679 131073 131074 4294836228 4294770691 131069 4294901761 2 196607 4294901763 4294639619 196607 4294836225 4294836224 4294574080 327677 65535 4294639618 262142 4 4294901754 196611 4294770691 4294770686 65538 131066 4294770692 65532 65538 4294836225 196604 4294836229 4294967295 65534 196610 4294770689 131066 6 4294836221 196605 4294836222 196610 327677 4294639618 327678 3 4294967292 4294901762 131072 4294836225 4294967291 262147 4294836224 262140 131075 4294639619 2 65531 +4245510791 2911364353 1817951281 3240079600 1960884004 4224929249 9393275262 7078446278 1604866995 3363966833 2369526478 678503798 1401714121 405013220 567057034 533074256 406335772 1720259922 723708675 804565052 380546833 373741765 330089746 647287167 1433271445 2249083105 625932431 494033574 650545524 430181062 481972882 436360489 +65351 10354765 4289658527 55640882 4171890983 4160322 1481065 3805525 4670237 1296985 246065 815509 1193609 1022992 634661 231386 475306 925922 446536 35620 44197 57322 175240 340033 24417 109609 110804 41725 49306 30625 20624 19801 35810 72626 173385 139880 69098 39940 67762 130385 165458 110410 46925 2857 53829 69536 2789 27637 42017 48938 41092 43664 106301 138010 102493 29521 47378 81242 61924 19066 11729 3298 5441 4724 521 1537 185 1780 6032 29277 33826 55098 94250 57698 19930 59881 57850 17 46026 57089 36242 32749 31588 17957 22826 40609 62946 39780 6833 2180 1553 5072 11306 15705 71045 193825 196313 114010 106577 135720 108450 78849 56785 14701 1088 14500 6953 2050 13649 12625 1394 23101 15545 2041 6084 3028 3025 6010 3293 58 1553 2980 2752528 4293525524 4294901753 196606 3 4294639621 65530 131076 4294574080 4294901755 262141 2 4294836223 196607 4294508545 393210 4294901763 4294836224 4294901756 262143 131074 4294901760 4294639619 65531 65537 65534 4294836224 65539 4294836220 1 4294901758 196603 65537 65536 4294770691 4294836220 65533 65539 4294901757 196605 262146 4294901763 4294967295 4294967292 196611 4294967294 262145 65541 4294443008 4294967293 65538 4294770687 4294836221 262143 131071 4294770688 131069 4294836225 4294901757 3 4294574072 393213 131073 4294836221 196607 196607 4294770690 196604 65537 4294967291 2 327677 131074 4294705153 196608 4294901760 262143 65536 196611 4294770688 65536 65538 131067 65539 196607 4294901764 4294901760 4294836224 0 4294901761 196608 4294836225 262147 4294574081 4294901760 196606 0 4294574081 327679 4294901757 2 65536 4294639616 262138 65538 4294836225 196606 196612 4294639613 262144 4294901760 131074 4294770689 4294901759 4294901759 4294639616 196599 262141 131078 4294967293 196607 131075 4294705152 327680 4294770692 4294967294 65538 4294901758 4294901760 131072 1 4294836225 4294901764 4294901753 1 +15837441009 21566240869 19034108281 4396065580 7311081053 5853188690 3565186420 4912226756 834113530 1468120149 1309321896 760213287 349693107 561354554 1383019501 1055481694 1323427950 547426640 533256372 1195722014 1140699492 473574750 51822514 399148385 1051651936 822357546 731652471 421976218 2511470853 1910720663 289675973 183460534 +65500 4288217109 3407684 17891396 4284612746 233797 2654450 5277581 2268832 483025 1000004 1080400 564580 223434 51012 19925 50994 113920 40050 5545 6613 49320 110402 49041 28781 50725 27241 9540 5141 6242 8357 7093 7794 27565 47008 23530 11861 5410 200 1186 265 8000 37960 27077 3986 4909 3226 601 401 2080 18477 19009 6725 13858 10305 3898 1618 3001 148 884 6250 32308 54557 45124 21605 11026 11405 11509 7025 1289 641 9792 15650 4122 4457 7921 5017 2440 17357 14258 5337 3653 520 1073 2696 2088 10249 14669 6596 2205 1421 1949 1394 1165 1872 24698 31585 7858 1930 3626 90 5498 2393 628 2053 520 5017 2545 260 1060 7709 10565 1762 617 1781 1168 1154 1877 2465 1189 145 153 4294770675 327690 4294836221 1 65535 65538 4294836224 65534 65539 65535 65536 4294705152 196608 4294901759 196607 1 1 4294836227 65533 1 4294836224 131072 4294770687 4294967294 65533 4294901763 4294901758 4294901761 4294967293 131072 65535 65535 65537 4294836224 131070 65536 65535 196609 4294836226 196605 4294836226 131071 4294967295 65537 131070 4294836224 4294901760 131070 4294836225 262142 4294836228 65533 2 4294967294 2 131069 4294901760 0 65537 4294967294 4294901760 196608 4294770688 0 65536 4294901760 65537 4294901762 1 4294967293 4294836225 4294901758 262143 4294639616 131068 4294901762 131073 4294705150 131070 131073 1 4294967295 4294901759 327682 4294705152 4294901758 2 4294967292 1 65536 4294836222 131070 131071 4294967295 65535 196607 4294836224 65536 4294901761 4294901757 131072 1 4294836225 65536 0 1 131070 2 65535 4294836224 0 0 0 4294901761 65536 4294770686 65535 2 4294967295 196606 4294836227 65536 1 4294836224 65535 4294901761 65535 4294901760 4294901760 65534 4294901761 4294901757 196606 65535 0 +6991516564 26536810676 10241189823 6648423391 4781035548 1165600805 362196314 552572883 150283520 636935013 426124828 209705222 75465686 172375881 314739611 53009594 176027830 164570405 57387671 191428320 83954446 232355962 496243174 140564769 138010160 175709611 95524615 109665385 249133993 96933615 81765835 68175275 +65449 4293591003 12452007 4273995365 34669050 293746 1307921 4117220 6014125 3617786 883732 107549 515849 348125 18421 28561 171565 370773 309322 189800 166721 80650 55681 150820 120938 48905 17753 7120 1165 7488 2312 3725 7274 25034 39701 18685 8149 25226 39185 29682 30964 40277 25009 1369 6617 18122 25666 27136 22385 29585 54977 65237 29125 4549 2797 10889 10433 1952 928 3541 11714 13049 5693 9602 15145 17186 22565 15885 10250 19700 36853 76036 88520 41444 15325 18980 14705 296 17585 15250 3380 9860 22930 10820 2594 7058 8324 493 3385 6772 6586 7082 5024 4468 2906 9325 24797 17800 5553 3825 4090 392 776 1460 3218 5949 4936 4129 3305 4625 9076 8829 3860 2925 1669 1636 1706 1069 2061 1700 625 296 4294901747 1 262144 65536 4294770688 0 0 131071 4294901760 131071 0 131071 131073 4294901760 1 0 65538 0 0 4294901762 4294836224 4294836224 4294967294 131071 4294901759 196607 4294770687 196607 4294901760 131072 4294836224 0 65535 4294967294 65536 131073 4294901759 131072 4294836225 131068 0 131072 4294901760 4294836224 65536 65537 4294836223 327679 0 65535 65535 65537 65535 131071 1 4294836226 65533 2 4294836223 262144 4294836225 4294901760 65536 65535 4294836223 196610 4294901760 4294836224 4294901759 65538 4294901760 4294967293 4294901761 4294967295 131071 4294901760 131071 65538 4294901760 131070 4294901761 131071 4294901761 4294967295 1 65533 4294901764 4294770686 131072 65534 4294901763 65534 0 65533 1 1 4294836226 4294967294 0 4294836224 131069 65537 4294901760 4294967295 65537 0 4294901759 65533 0 1 4294967295 131073 4294901761 4294836224 4294967293 131072 4294836224 65534 4294901761 131072 4294967294 1 65538 4294901758 65535 65536 0 4294705152 262142 4294836225 131069 4294901761 0 1 65535 +3973772185 23832149477 29242277876 9286521854 2469973759 1404243484 929912962 2509864028 1578640392 866309862 981761046 161403460 48533456 139368762 279478811 352157635 388221921 201220879 437065939 547789365 117623820 116492343 247752466 531476687 825205459 255474844 200465233 124280351 233825955 113745496 131328950 85255791 +65499 4294049730 16515340 4238868491 47381576 564845 53509 1035680 5521952 4709412 856801 996404 669170 72826 40706 53329 72740 94945 85193 7813 102829 34388 148705 310833 121460 25705 35269 61069 58376 41248 10898 2176 10322 30242 27136 11245 15760 35920 12829 1394 3673 389 5512 15325 8089 3240 15577 5162 3076 404 5008 9802 46058 51157 14474 1469 24922 22100 13284 17425 8010 2005 6425 5380 2066 4477 8608 9778 180 2482 8633 6833 4637 14557 15776 30674 68986 61466 21904 3744 4441 4736 6964 14585 29725 14408 200 8788 6698 481 1042 3497 5513 5581 6322 6565 9490 3881 493 1152 5197 14401 14170 7321 3389 1873 936 1256 2693 3229 2029 4628 5693 5066 1433 1037 5429 4138 2305 3600 2281 1060 65508 655377 4294508537 262142 4 65535 4294967295 0 196608 1 1 4294901758 196607 2 2 4294967293 65536 4294901760 4294901759 131071 4294836224 131070 0 4294901760 65535 65537 3 4294967294 4294901760 131071 65536 4294901760 131070 65538 4294901758 131070 65538 4294836225 65535 131071 4294901762 131070 65536 4294901760 4294901762 4294967294 131070 4294836226 65533 4294705152 196605 65537 196607 4294836225 65534 0 2 4294901758 65536 4294901758 196607 65540 4294967294 65537 4294967295 131073 4294901760 65537 4294836227 4294901759 4294901761 4294901759 4294901760 65535 4294967295 65536 4294967295 2 4294901759 131069 65539 4294901760 65533 4294901760 131072 0 65538 4294967295 4294901760 131071 65537 4294967294 131074 4294901764 4294901757 131073 65536 4294705152 65533 65534 4294836227 65533 0 65535 0 196607 65537 4294705153 262142 2 0 4294901759 4294901761 131070 65535 4294901762 196606 4294901761 4294836227 4294967292 0 65535 65536 4294967294 65535 131071 1 4294901759 4294967294 65533 65537 65537 4294836223 65536 1 +1800047747 9718070402 29359056370 12395903961 4766794892 792553273 504749245 653093277 496990989 1318353341 1341130282 465051857 380015840 158772520 249779474 202245705 59025271 124720402 77848585 352377572 323251868 203443632 91429727 100783885 402239094 560877295 263946069 114086007 129673752 176203486 84561493 97937251 +65486 8060932 4521990 4272095628 21298750 868361 4563865 9422753 5610020 20817 3731193 1875700 285156 956801 547912 96032 101666 125933 121288 82820 86080 13394 99450 307682 200489 62101 22052 3560 1108 1108 5066 16385 28328 17492 1808 1205 2036 23624 92660 83344 47056 27882 6617 229 101 3400 11237 14930 13396 7778 3240 1205 13617 23236 22321 32842 18037 4861 8957 13690 11493 4178 10225 7445 6713 5645 2225 157 3242 10202 16354 15786 4157 845 6877 12370 4372 458 3665 11250 10730 5165 1618 725 7177 10121 3281 3636 4264 1737 1930 2837 3701 9928 13625 19240 26793 16609 4969 4640 2873 21618 28264 8082 1681 2048 2152 3085 7685 8090 1168 2210 2329 9605 4709 738 5760 2561 981 7937 7685 2890 4292804583 1703938 4294705153 65539 65535 4294901760 65537 1 4294901759 65536 65538 131069 3 65535 4294901760 4294901760 65535 4294901761 65535 65536 65536 4294901762 65535 4294901760 4294901760 131070 0 131070 1 131070 4294901760 65536 4294901760 4294967294 262143 4294836225 65534 0 1 4294836226 65532 4294901761 65536 131069 2 196607 2 1 4294901759 65536 4294901761 131073 4294705152 0 4294967294 1 65535 131073 4294901759 4294770691 196605 4294836224 131069 3 4294836224 4294967294 262142 4294901763 4294770689 196608 4294967295 4294770690 131070 65537 4294836223 65536 131069 131072 4294901759 262144 65537 4294901760 131072 1 4294836222 196608 0 4294901760 4294901760 131072 1 4294836225 4294901760 65535 4294901760 2 4294836223 131071 65538 4294705152 4294901758 65536 131071 1 4294901759 65536 4294836224 262141 1 4294901761 4294967294 131070 1 65535 4294901761 131071 4294967294 262144 4294901763 4294901758 65537 4294901759 0 65537 65534 3 65534 4294901762 4294967293 131076 4294901759 65534 1 65536 0 +13402136927 48709925606 21007888885 18328565424 6561402118 4716959753 1266761327 970492734 686687816 1070863377 1705636192 183156179 43429471 206120034 73900592 670786295 449233059 60188238 152700208 171596362 323884691 168642878 111671088 134338999 146821033 129244189 114201224 94745153 316889572 299713247 129524976 120777699 +65458 4290117758 4587104 28902152 4202889018 2449010 531754 1882337 6339853 6293842 2859986 997853 936281 141538 116516 99626 127474 163445 68053 64426 115300 294856 349073 69898 25636 55274 30529 17140 3385 4525 5525 10501 6362 85 5825 6354 8933 42260 104200 78065 16825 3226 6273 26962 16025 2468 1433 850 2378 1192 1241 6893 16564 13780 7650 8100 9065 1469 586 5444 14225 14965 5626 1585 5450 8468 4058 941 1098 13780 20621 2498 3490 7834 11861 12244 6514 4010 3700 5330 14125 13577 6701 2770 2260 6760 3908 40 1268 980 689 4226 13653 4420 197 7345 26186 9925 1160 1170 16714 11520 3293 8989 5021 2197 949 754 1480 1346 2645 7633 11602 12085 12393 7720 1714 29 1810 3946 3730 676 4294180864 131055 655360 4294836228 131069 1 65536 4294901761 65534 2 131072 4294901760 4294901760 4294967295 65535 4294836225 65535 4294901759 131071 4294770690 131068 0 4294967295 65538 4294901759 4294901761 4294967293 131074 4294967294 4294901763 65534 65536 4294836226 131070 131073 4294770690 4294836224 65534 0 4294901760 196606 4294770690 131071 4294901761 131071 4294836225 65536 4294901760 131072 4294836221 2 4294967294 131072 4294836224 4294901760 4294901759 65532 262145 4294836227 65533 65536 4294901761 131072 4294836224 4294836225 196606 4294901761 65535 65535 131072 0 1 4294901761 4294836223 1 4294901761 4294836223 4294901761 131072 4294836224 65534 2 4294967293 131072 65534 1 65535 4294967295 65537 65535 65535 131071 4294901764 65532 131071 65539 4294901762 4294901758 1 4294770688 262142 4294705154 65533 2 4294967293 196610 4294836225 0 131071 65538 4294770688 65534 131071 4294836225 0 1 4294901762 4294836222 65534 65537 4294967292 131069 65537 2 131069 131073 131074 4294901761 131074 4294901758 4294901762 65534 1 4294836222 0 +8515819862 14540741423 36370976979 22895019916 5783948057 1429973621 947504119 945703100 1044158335 2312106327 584813150 249467236 66850260 67985421 102639420 741690894 299005196 153894601 25276178 134107247 122617892 129071743 105794167 135314407 173076680 159820686 124146574 71055562 206248309 198451358 95266876 187817401 +65481 3342126 130952 54788365 4251582455 901105 2226409 3811258 686137 1108544 213097 1569626 2326421 895410 734705 643373 337570 188026 153865 119641 67509 356425 913040 623493 139525 59170 29705 2746 19237 29320 10244 33826 39085 2677 34821 39962 28340 3349 6317 36397 67204 94568 179650 181458 44305 6554 100269 182565 140450 53505 29185 92434 64994 49684 84250 81440 91009 71865 18824 10817 25826 25733 86729 171496 113101 24098 9881 13898 18874 67869 110925 87874 19897 1600 4874 20386 477 11925 24826 29185 18701 7738 3809 1373 22061 54920 62185 20933 10468 39722 44370 58021 90425 42601 4210 60201 92233 65896 77617 89245 43489 30244 21960 5953 12708 4469 5825 6212 6610 4132 2525 845 7925 9265 10865 10996 6740 6957 2929 2637 2701 100 1048593 4293853188 393205 4294443017 196606 4294508544 327678 4294901758 131073 4294836221 2 65533 4294639616 196604 131074 65535 65539 4294901756 393220 4294574087 4294967293 4294508545 4294967291 4294967292 327679 4294770691 65533 4294836226 196605 4294836226 1 4294705153 4294705150 131071 4294705149 196606 4294901759 65530 131074 131066 262149 4294770689 4294967293 262142 65534 327680 4294508546 327675 65538 65537 4294901761 65537 4294770685 4294901760 262141 4294770689 196609 131071 4294705158 65532 4294836226 65534 4294836226 4294901755 65537 4294901755 4294901760 131070 65534 4294967291 196610 262142 4294967295 196606 131073 4294770691 327677 4294836226 4294705154 131069 4294901756 458754 4294770687 393217 4294901761 4294901760 4294967294 65539 65532 4294901761 393216 4294639620 131072 4294967295 131072 4294770689 65535 65537 65536 4294639621 4294901758 4294967294 4294901760 131070 65533 4294770691 196603 327680 4294574082 393211 196610 4294836230 4294901759 4294836222 131072 2 4294901759 131070 4294901760 196606 327683 4294639617 0 4294901762 4294770683 589823 4294705157 131068 131076 4294901763 4294639618 4294967287 65540 393214 5 +7952341567 18699651433 6407428608 5286897917 12882655656 6860628001 3935796828 1583823195 1241106085 5530728622 2544684008 236455785 232020003 300962187 337663281 263258899 1391711140 1289186636 1575390634 1018595248 1173824760 689757438 1402138169 882143674 569554204 341740699 492750798 986031996 1465790708 996319288 166286506 197567807 +65448 4291821677 4292214650 10682097 21627631 914729 669474 1351962 3447890 4118101 2913829 1803961 799417 208800 56693 35744 56169 27914 1181 4628 4761 28417 48442 213453 191561 86161 23153 1737 13 2308 8893 7508 1217 5821 20241 18125 3805 15354 36073 43173 21818 10996 3242 17450 6084 932 9061 9472 7186 28037 60917 59348 10193 32569 42890 6617 6597 1768 2650 10370 11840 3545 3970 18772 15661 9396 20329 28970 7312 641 2293 8720 549 8200 29860 43210 29045 14800 4688 2 1458 3880 2768 7250 8712 3204 3940 16325 26129 13060 9722 14213 10301 53 12938 20261 15053 15325 23524 21386 11689 6001 2245 1537 5652 4505 2465 5002 12553 18580 22797 15490 4489 1354 1037 1053 2601 1256 32 178 1220 1053 65549 4294836218 655364 4294574082 65535 0 131073 4294901760 4294901760 65535 131071 196607 4294901763 65537 65535 65535 4294836224 262143 4294836224 65536 1 65535 0 65537 4294901760 4294901760 4294836224 65536 4294901757 196607 1 65535 4294901760 4294836225 65533 1 4294901758 131073 65537 4294836223 65537 65535 4294901760 196606 4294901759 196606 196609 4294836226 131072 4294836224 4294967293 65536 1 131069 131072 0 4294901761 196607 4294836224 4294967293 131073 131071 0 65536 4294901762 131070 4294901761 131071 4294901759 131072 196607 4294836226 131071 65537 4294967294 0 131073 4294901760 0 65536 4294901763 4294967295 196607 4294901760 65536 4294901759 131072 4294901760 196608 4294901761 4294836224 262141 131075 4294967294 3 196607 4294901762 4294836223 196608 4294836224 4294901758 131070 131072 4294836226 65536 65535 2 4294967295 65538 4294836226 4294836223 4294836224 131070 65537 4294901761 65536 4294901760 131072 4294901759 1 65536 4294901760 0 65537 4294901761 4294967293 196607 2 4294901760 4 4294836221 4294901759 65537 4294901758 2 +4298089100 9781258002 21778093844 21183521812 7286073417 1298309836 358888939 164751882 69292545 678522232 1497486092 216862219 43780399 71713908 162851148 323325669 232708549 104643346 272595917 588707153 238758774 122915021 249006695 195893186 326774777 246569211 137556615 299235358 349218187 245990662 257059234 124676392 +65518 4294049741 15138933 4261478404 11730623 255025 396164 487346 1001125 650600 5393 30740 482569 195236 47457 33122 125728 125501 36505 62281 104948 63433 280957 428909 122777 2285 10762 14050 8177 4437 452 7684 6196 421 5674 9522 3077 18081 83745 148384 148825 79186 14522 2197 8420 2097 1282 3485 13514 9610 1906 39641 73049 55080 18650 12724 5389 3034 2665 10280 20261 18121 17498 13000 9041 13369 16153 22250 20785 9360 6218 9332 20018 40409 35554 11752 1256 1808 745 1445 12965 32976 39889 26042 11909 16273 16634 10196 2665 2336 6218 7072 9673 13673 3445 386 545 612 25 2545 5309 6857 13658 14929 9685 5476 2420 2605 8545 14922 18720 12130 2330 1138 1930 1192 193 1460 3301 3177 1961 1469 4294508520 262160 4294836218 4294836224 196608 1 4294901760 4294967295 131072 65535 1 4294770689 262143 4294770688 0 65535 65534 1 4294770686 262144 4294836222 4294967293 131073 4294901760 65535 4294901761 131071 4294901760 65535 131073 1 4294770688 65536 4294901759 196606 65538 4294770686 65537 4294967295 131071 4294770688 65533 262144 4294836227 131071 4294770688 65537 4294967293 131072 4294901763 4294901761 0 4294901760 65535 3 4294901760 4294901760 4294901758 65536 4294901760 65537 4294901759 1 196606 0 131074 4294836223 131071 65538 4294770688 131071 4294770688 131070 4294836223 131072 0 131070 65539 65534 4294836227 131071 4294770689 196606 4294901761 65536 4294901760 65536 4294901764 4294770685 196605 2 0 65535 4294836225 4294901758 262142 4294901760 4294901760 131071 4294901760 0 65536 65533 1 0 196607 4294836224 196609 4294836222 196609 4294836225 4294901760 4294901758 196607 4294770690 65531 196609 4294901760 4294836224 65536 4294967295 65536 4294901762 4294967295 4294901760 196609 4294836225 65537 4294836225 4294967293 4294901760 4294967293 65537 0 0 +1695481222 3539400635 4906576856 1071284597 1989301906 1035261490 628215813 725394761 655757131 2100035882 1591483374 99973002 58239475 52926950 78621570 891923191 1089873680 84544173 113100295 582127301 272099009 208699166 263583472 281747152 416193689 206480599 458784717 181142870 98525525 177207041 234923919 109191855 +65456 4293656646 17301351 4259971616 19266566 1365757 1881477 2187140 2412436 2883409 3185012 2682153 1279682 159080 7786 43045 65601 130325 129389 81625 106373 187029 303364 338720 150853 49185 49130 29857 20417 21037 8768 6976 3042 5057 6778 24674 25469 14130 2925 7250 4900 153 6058 5525 7393 9512 6514 3380 2997 17533 3428 6673 8177 20561 35746 19133 5657 1658 6410 9800 2434 15844 14933 5482 8693 12325 11912 11885 3769 1781 5140 5125 7930 7250 1037 1921 2665 8845 5585 1205 1872 2722 19649 41490 29824 10693 1325 377 5545 10330 3697 5785 18496 11920 137 7108 11889 6805 1697 4325 12832 2837 6569 4500 180 373 1810 1517 1864 1165 200 1341 8468 12546 11168 4181 290 290 626 4680 3778 1556 4294377452 1703933 4294574086 4294836227 0 65534 131073 4294901760 0 0 65534 65539 4294901760 131070 3 4294967294 131073 4294770689 4294967294 4294967294 4 4294967294 4294967295 65536 0 1 4294901762 4294901756 196608 131073 65538 4294770688 4294967295 65539 4294901758 131071 4294836224 4294901760 4294901757 196607 4294967295 131072 4294901762 131070 131073 0 4294836225 4294901760 65535 196607 4294770689 196605 65537 4294901760 65538 4294901763 4294705152 196607 4294836223 4294967295 196608 4294770689 65535 4294901761 65536 4294901761 196607 4294967295 65537 65535 4294901761 1 4294967295 4294770688 65537 65535 4294836223 131071 2 4294836222 65534 4294967295 65535 4294967295 65534 4294901761 4294901760 4294967295 65536 4294901758 4294901761 4294967293 0 4294901756 196608 4294836225 65534 4294967295 65536 4294770688 65535 4294967294 0 1 4294836224 65538 4294836222 65533 4294901756 196607 262143 4294901761 131071 65536 4294770692 4294967291 131073 0 4294901761 131071 65536 4294836225 4294967294 4294901762 65535 4294967295 65537 4294901759 4294967295 131070 4294901761 65535 4294967295 4294967295 65534 +8510117155 13884387566 15794051016 21913858747 10968655927 1303076227 448448334 964848503 992064497 2357517463 1662721684 374439955 186519371 62314872 191854669 135480078 54909501 97244482 99910699 191353961 246353442 137257224 189199967 126966954 96601459 107821342 374125964 170153263 188516188 130001722 61100510 141178549 +65476 4294574048 19005239 2949971 4246207825 778933 1021621 474305 108445 194545 988117 317396 139805 377705 60125 2410 15866 45530 13705 15925 23049 17824 32416 94466 52730 2845 4610 1165 24208 14330 5477 8125 9325 32906 125018 169025 106101 77857 66305 27506 19144 35645 44433 33056 21177 8105 2437 1865 4537 12805 29745 66592 92362 55301 3482 24004 39049 15385 3362 4973 21501 39641 30610 9529 113 3253 765 2957 15705 48341 52229 18517 4589 2125 1378 11101 5626 10242 33949 26977 9412 6565 18248 24457 6309 17 2900 7405 17474 20177 7717 10457 43037 46665 45506 83749 74945 38338 31025 23972 16925 16729 5261 698 746 5706 17568 27842 20917 10504 5328 10169 14824 5794 1325 5684 5578 4384 4330 7202 8973 4985 4294246445 4294574046 720910 4294836221 3 4294901757 196609 4294901763 65534 1 65535 4294901762 65535 4294967295 4294901762 4294901758 131073 4294901759 0 1 4294967294 65536 4294770689 4294967292 196609 65534 4294901761 4294901760 131070 0 4294770690 196607 4294901759 0 4294901761 196607 4294901761 4294901759 4294901761 131071 4294836225 4294901757 131074 4294901761 4294901758 2 4294901759 131071 65535 4294901761 4294836224 65533 196609 4294836225 4294901760 4294901760 4294901759 131070 2 4294901758 65536 65536 4294770688 2 65532 3 4294967295 65538 4294901759 0 4294967295 65535 4294901761 4294770687 196606 4294901759 4294836225 4294901760 131069 65537 4294901760 4294901759 262143 4294836224 4294967294 131071 131071 1 0 4294836222 131071 131071 4294901758 131073 4294836225 65535 4294836224 4294967295 1 65535 4294770688 4294967293 131073 65534 196607 65537 65539 4294836223 65535 0 4294901762 4294836222 131070 65537 65534 65536 1 4294967294 4294901762 131071 4294770688 4294967295 65536 4294901759 196607 4294836225 131070 1 4294967294 1 4294901760 0 4294901759 131073 0 +4731506179 3633393437 1076228277 4831240828 1688829220 1365181475 154342268 222601007 162634693 369155009 446761288 67374098 139402621 264561946 1381869817 845826015 443191697 270175876 145373315 803874910 427878214 314165876 200651750 412663185 229836836 355944100 249233074 392082238 1193399118 439470232 353505302 187884713 +65480 4292476891 7864184 19923208 4273471621 10765 343012 801169 591005 199970 1308985 2177800 956530 168013 170804 413401 434314 230633 65426 17170 18005 35641 94784 154037 109800 93850 98321 66322 42517 21845 7105 6570 19125 48593 135505 188725 150484 194548 342101 230933 122996 81157 49946 24525 38169 28570 9157 5746 7720 5234 7218 8245 269 21677 10225 15509 73778 52610 15237 7957 3250 2920 10036 22562 48050 36073 12937 6466 5002 22856 30929 10420 8333 512 10216 11912 22373 31333 6833 5840 32770 44138 47588 30825 11188 9605 11618 2986 3893 13402 4580 5465 36922 40976 30673 34173 32850 20578 17488 17506 22698 35269 31588 14920 2664 11225 40522 35657 13757 18504 10309 2465 7218 3217 628 218 68 205 1040 3050 2210 394 458763 4294574085 4294180858 524284 0 65537 65538 4294705152 131072 131072 4294967295 65538 1 4294901759 0 0 196607 4294770689 65536 4294901761 131071 4294901758 4294836225 196606 4294901760 65535 65535 65537 65535 0 0 65536 0 4294901760 131071 4294967295 65537 65534 1 4294836223 196607 4294901758 262144 4294836225 131068 65538 4294901760 65535 65535 65535 65537 0 196607 4294901760 65536 4294901760 65536 65534 4294901760 262144 4294901760 4294836226 131070 65539 4294770685 196608 4294836226 131071 65540 4294770688 4294967294 131072 1 4294967294 131073 131074 4294836223 2 4294967294 4294901760 4294836224 65534 65540 4294836222 2 4294901760 4294901759 196606 65536 4294836224 131071 65536 4294836224 65536 4294967294 196609 4294836226 65535 65535 4294901761 4294901760 65538 4294967295 4294901760 65535 4294967295 4294967295 0 0 4294901760 4294967292 196611 4294705153 0 4294901760 4294836225 65534 131074 4294967295 65536 4294901762 4294901758 327679 4294836224 3 196605 4294770691 65536 4294901758 4294967295 131073 4294639616 131071 4294705153 65532 +845853406 4183388227 2707347057 9369426635 8665134206 1974409414 2951152667 1469653952 232863088 757908143 1145007316 763564862 265183947 347274196 1724893842 2920507584 1400764934 409551488 123205194 130226254 522627637 263670779 432783097 304345277 243052060 453694500 517002629 286026232 687333067 592265999 490494808 83790335 +65481 4652989 4285268185 24182335 4275503728 78557 4930 741940 3588525 3005972 342225 584449 294545 23120 125050 99985 6250 148850 97234 17000 3609 12868 9748 60265 142085 45370 18173 10877 12721 1810 2977 13273 9873 37745 29844 35173 24466 45065 211149 175396 54314 26073 38036 14578 19973 28450 8548 22765 9610 1040 7433 11009 1737 8093 48997 52634 9640 820 5402 7229 12553 27985 25457 18666 17945 13061 4033 3041 6370 18868 54641 74170 59624 30641 17725 9685 2785 17170 40226 13844 11860 33125 23248 4981 1088 4176 9650 4825 6529 16040 14293 20513 22877 4205 272 125 5065 20213 31460 48529 60266 86877 55690 7748 3109 10705 965 7492 8477 1801 17069 4933 1810 6596 4225 1322 281 58 650 3796 4041 785 1179638 4293853199 458736 196617 4294836224 4294901759 131072 0 1 65536 4294901762 4294770688 327677 4294901763 4294901760 0 65535 0 4294901763 4294967293 4294901759 262141 4294901763 65532 196609 4294901760 2 65533 1 4294901760 4294836224 196606 4294901759 65536 131074 1 4294901760 1 4294967294 65536 0 65536 4294901759 4294901760 65535 65535 0 4294901760 131071 2 4294901761 4294901760 4294967293 131074 4294901760 4294836225 65535 65535 131071 2 65534 131070 65536 4294836225 65534 1 4294901759 65536 65537 4294836222 131075 4294967295 4294901759 65534 196609 4294705152 196606 65537 131071 1 65534 65538 4294967294 65536 4294901758 131071 1 65535 4294901763 4294836223 65536 4294967295 0 4294901761 4294967292 65538 4294836225 131069 2 65535 4294901761 65535 4294836224 131070 65539 4294967294 1 0 65536 0 4294836225 4294901760 4294901760 65536 4294901760 3 4294967294 4294901761 4294901758 196608 4294901761 4294901759 131071 196609 4294836227 4294836223 65535 65538 4294901759 65534 4294901762 131069 65536 4294836224 65535 +244384724 6519756415 18951886278 6997908917 2430951505 723179230 591564660 793629576 191314080 203414463 833506224 188712224 69883722 198596324 386655634 1458767816 779506395 298904637 175224914 171820974 393318006 218727549 297609728 449642487 691285330 417317673 241413949 277117073 339922202 1038640357 210184981 99559177 +65459 1638523 4286513164 4273470962 83951950 1973450 1244377 782681 656848 2508941 2248097 254266 45121 66250 78440 16642 58141 168680 78754 5204 89 15706 55709 22409 9809 49733 62201 75685 68581 35984 25700 18530 10114 12490 33713 38509 9773 15533 4621 6001 8077 5098 685 10760 61417 102605 52900 3188 2885 1864 1753 9778 4148 14945 29786 5501 2817 7045 6602 9441 586 10282 17965 10049 11492 12688 8273 6437 2637 2138 31001 51592 10484 1746 5825 9610 7825 23477 27866 11258 1530 6660 11245 7402 10634 18868 18421 3133 8725 7605 725 1010 7488 15354 17642 18440 21760 21130 38084 41956 19637 10361 1021 13241 4420 677 6410 12500 8801 8258 19620 10026 962 10485 10154 3380 850 409 205 1261 1313 481 65519 1048578 4294705155 4294901761 0 131070 4294901760 65537 4294967295 131071 0 131071 65537 131072 4294836226 4294901761 0 4294770688 4294836224 4294967294 4294901756 262143 4294836224 65535 65534 131073 4294770688 196607 4294901760 1 65535 0 65535 4294836225 196605 1 0 0 196607 4294770685 327681 4294836225 131069 4294836226 65534 4294836224 4294901760 0 4294901759 1 65534 65536 1 4294836225 196605 4294901761 1 4294967293 1 4294901760 0 1 1 4294967295 131070 0 4294967295 65537 4294836224 4294967294 65536 131071 4294770688 131072 4294836223 65537 0 4294901760 65537 4294770688 262143 4294836225 65535 196610 4294836224 65537 0 0 4294901760 4294967295 65536 1 4294901757 4294836227 65533 65535 131071 4294901759 65537 0 65534 131071 1 131070 4294901760 131075 1 4294901761 4294901760 4294901760 1 4294967293 4294901761 65534 196606 2 4294901760 4294967295 1 65532 65539 4294967295 4294901761 196607 4294705154 65532 1 4294836226 65534 4294770688 0 4294901759 65534 2 65535 +8798265521 5735848922 8883138653 13341194324 835720736 475950697 447822965 852267978 126619296 288415958 293376646 655463999 454927615 189400270 307085520 124264660 88501554 696557793 243305601 135931388 169666588 130904022 200894138 250163429 289420997 295275696 231188891 181742549 468613279 419030949 237842735 156690188 +4 4287561751 26083170 4254008079 4294114173 183325 255994 723713 2018029 4682345 4273165 2847312 1695913 499285 21701 15658 86321 164194 66258 14677 11194 2720 73405 115333 23666 1037 3917 24634 10205 14501 38345 12602 980 313 2633 34000 76457 44370 5125 5570 18269 23810 3706 666 6817 27925 1193 14885 11156 1585 5081 19130 19813 3044 11105 4537 4212 8356 349 1525 6401 16866 15170 15013 26762 16580 425 3361 5161 7272 15700 23717 9802 1060 27530 50570 37549 8477 3889 5314 6057 265 8381 8705 8180 37874 41936 14453 3985 2637 170 6813 4786 2834 11185 23530 13896 9125 16241 14242 4058 11252 33490 22052 3221 3280 15146 22117 23605 44100 61865 36941 7093 325 800 1481 1322 1189 677 754 1018 1405 4293263355 786429 4294508545 393216 4294836223 131071 65537 65535 0 1 4294901762 65534 4294901761 131070 1 4294901762 4294901759 1 4294836224 1 4294901760 65534 4294901759 65533 0 196607 131071 65539 65537 4294967295 65538 4294901759 1 131070 4294836225 196606 2 4294967295 65535 65537 4294705152 196606 4294901762 131068 4294901761 65537 131071 4294836225 4294836224 196607 65536 4294901760 65538 4294901758 4294967294 196611 4294770689 196607 65534 4294770689 0 65538 4294967292 3 131073 4294836224 131069 4294836225 65536 4294836225 4294901760 4294901758 196606 2 65536 4294967295 1 65536 0 4294967294 4294901760 65536 4294901760 131070 65537 4294836225 0 4294967294 65537 4294901759 196607 4294901760 4294901761 4294901760 131071 4294836225 196607 1 4294901760 196607 1 2 65535 4294836224 65536 1 131069 2 65536 4294901762 4294836223 131073 4294705153 4294967294 65534 1 4294967294 65536 65535 196608 4294836226 131068 131074 4294836225 4294967295 65535 4294901760 4294836224 4294967294 65536 65535 131071 4294967294 131073 65534 +1150482412 5240120687 18715695941 29339574860 13217513048 2558449603 465258907 843663909 159753138 501362980 391765242 116127711 219926547 95960499 348195558 347009665 172370545 139689998 140508943 181054683 100535513 123970868 275750410 172411007 406900444 283127115 324490029 232234315 285149674 372863656 670202407 256518156 +65433 6225865 8519656 13566393 4231200833 1690970 801721 879057 5570410 6702733 3377569 2337138 1780616 366925 126341 149409 161764 389925 391906 150641 117845 99970 104080 155684 88805 15977 5101 51284 78417 16645 4802 15385 10693 11080 19890 37280 107425 188050 110357 25250 13945 29170 54730 51749 23533 26912 67834 57125 21034 7025 4082 500 2873 22916 37505 27389 17170 10322 3922 3172 6553 9248 6514 10984 10265 3200 3457 7497 9000 10289 10125 3109 4196 4229 7298 40729 53978 33556 29101 29237 5954 7565 15133 1690 19940 18245 1546 1241 1625 1940 7349 9673 5050 3725 5625 8221 14705 9544 1417 28250 47153 37090 10512 8980 14656 15185 5689 4589 31601 37802 22900 7954 2122 1405 1224 269 245 661 1028 2340 3281 2061 1572868 4293459961 983038 3 4294836224 65538 0 65535 1 131071 131073 4294770690 4294967295 4294967295 131071 0 4294836224 196606 65536 65535 65537 4294836224 131071 4294901761 4294967295 131072 4294836225 4294901756 65538 131070 0 65537 65537 0 4294836224 1 65534 4294901761 1 4294770688 65535 65535 0 262142 4294770690 4294967291 196611 4294901760 1 131070 65538 4294901762 4294836221 131073 4294967295 65535 65536 4294901760 1 4294836225 4294967294 4294836224 196607 4294836226 65533 131070 196611 0 4294770686 196607 2 65538 4294836224 4294770689 4294836224 131071 4294836225 4294770688 131072 4294770689 196606 4294770689 131069 131072 4294836224 131071 4294901761 131073 4294967295 0 4294967295 196609 4294770688 4294967295 2 65533 0 196608 4294836227 4294901759 65534 2 4294967295 131072 4294836223 65536 4294901760 0 2 4294901760 65534 4294836228 4294901760 4294967294 4294967295 65535 0 65533 4294836225 1 4294967294 4294967294 4294901760 196607 4294901761 4294967295 4294901762 131072 3 4294836224 4294967293 65537 131074 4294705152 65535 +6911137073 10475842416 34724015477 27936729844 12152231216 2672219979 1411389097 2765172797 1420661856 1056972746 793556340 350130233 322606370 137295615 638432326 1290859573 471390741 575175189 461554633 153369634 336576098 116798871 138012201 141557225 305736088 572470445 231253659 119128938 238810745 545930325 440451728 121617948 +65491 4288610312 5046237 7012199 4292673756 338266 2176850 1564202 341042 366458 228176 334080 95434 305201 74609 240461 472761 825497 449881 122138 50653 33245 17069 200 41650 57906 24608 82856 111938 57250 11554 1066 7361 10597 9760 5490 4624 13105 23725 50625 46405 33437 14692 27389 53885 66449 53210 12410 3250 2173 7565 5877 7225 5650 18850 17000 11545 11485 6530 3025 5732 3485 2273 9434 1665 433 689 1553 5794 29968 25856 6553 5480 6565 965 6409 4552 1769 2722 9005 27197 45945 36297 6401 6970 7396 578 5450 3492 257 2745 5812 2440 1685 1586 881 4010 10205 8857 802 5245 10496 10490 10625 4097 2929 12506 16160 8905 3889 3121 4069 5965 4293 490 2 557 2626 1220 178 293 346 2424825 4294443028 4294508543 65531 65538 4294901759 65536 1 4294967295 65534 131073 4294967295 1 4294967295 4294901760 65535 4294836225 65533 65535 65534 131071 1 4294836225 4294967295 65534 65535 196610 4294836225 4294967294 196607 1 65533 65538 131071 4294901762 131071 4294901761 4294901760 65535 65537 0 4294836224 65536 65537 65535 0 4294967295 1 131071 2 65534 4294836225 131075 4294770686 1 0 4294836224 131069 4294901762 4294836224 4294967294 4294901760 65536 4294770688 4294967293 65536 2 4294901758 131073 4294901760 65533 131074 0 131073 0 4294901761 4294967295 131072 4294705154 131070 65537 4294836224 4294901761 65536 4294901757 65538 4294836224 196606 4294901762 4294901759 131071 4294901761 4294901758 65537 4294836224 4294967293 0 4294967295 4294967294 65535 65536 4294967295 196607 4294901761 65534 4294901760 131071 4294901762 196607 4294836224 65538 4294836221 196609 4294967295 1 131070 4294901760 0 4294967295 65537 65535 4294967294 131070 131074 4294901760 4294836223 131073 4294967295 0 4294836225 65534 65536 65536 4294967295 1 +6167608942 9911924498 2593243310 2061667502 1486292926 1391770569 2930714578 4682576166 1092543821 203525367 340754446 648659988 587112632 81721049 93387502 311141876 458755528 630752888 249233952 114323857 206781150 95880852 58775090 215914131 146588629 262694353 345415316 73247387 91862262 197031781 210122569 62984707 +65487 4587455 7602449 4258070014 62783880 217178 784601 1571600 2149517 2567258 1478088 405514 334112 193714 211973 155777 250937 501290 122869 22625 17602 47137 57469 107785 94637 47365 16354 2561 25480 30848 17730 23816 10125 4797 1125 5569 6605 4537 18020 42601 19913 360 9250 22658 16021 11600 31300 35261 9189 8852 35221 55210 31642 7081 2617 65 5986 4869 485 2329 677 5945 23504 32977 18490 7380 8801 13192 11530 3474 3673 11981 11680 6250 1762 9700 21425 20872 14753 5809 234 857 2218 16538 17945 9634 10498 7528 1168 10949 16642 13768 17890 25909 32977 30545 15817 3805 2084 3589 4397 1306 809 1753 324 7880 9425 6029 13540 15665 7744 2885 901 821 197 1289 1521 293 1576 3908 2842 698 131072 589828 4294574084 196608 4294836224 4294967295 65536 0 65536 4294770688 131071 65537 4294901759 131069 1 131071 0 65538 4294836224 65536 65537 0 131071 4294901762 262143 1 4294836224 0 131072 4294836225 4294901760 65535 0 2 4294836225 4294967294 131072 2 4294967295 4294901760 65535 65539 4294836222 0 65537 4294967294 0 65536 4294901759 65534 131073 4294836225 65533 327681 4294901763 65535 1 196607 4294836225 0 4294901760 262143 4294770689 196607 4294770690 0 4294770688 0 4294901760 1 4294967295 4294967294 4294901760 65535 65536 4294901761 65534 4294836224 262142 0 65537 4294836224 262141 65537 0 4294901761 0 0 4294901760 65536 4294770688 196606 4294836225 196606 4294901760 4294901760 4294836225 4294901759 65536 4294836225 4294901759 65535 65535 4294901760 4294901760 0 4294836221 327678 1 65539 4294770688 196607 4294836226 4294901759 65536 65537 196607 4294901763 4294836224 262143 4294836224 4294836224 131070 2 4294967295 2 4294967294 4294967295 131072 1 262141 4294901762 4294967293 131073 1 +2505139409 9435406421 13833653936 10591282910 2352544130 1559712740 1874707651 2302922325 328881483 565672104 776814926 185025692 260892744 144244449 53960005 218503995 202958384 261774586 330038711 437503813 72863013 99926767 308641829 168217792 183606421 224884523 203196718 287669468 456498379 87883789 205202657 63473057 +65435 4293197975 13238059 4253221591 7272916 2028546 1334885 1179421 2212520 4556357 3669049 1042513 161765 25085 69997 103685 306865 514865 186482 360 63205 343250 572765 314338 68897 17602 8765 5380 9800 46404 57010 25538 7274 4517 22664 31025 30050 28288 14096 369 15626 20290 15629 11296 122 4624 2218 9809 15760 24781 38497 20705 2305 29530 37666 33577 31842 23680 9396 4513 2957 1690 514 10897 15938 4453 7025 20297 13140 7762 12490 14149 13572 8336 13673 14130 6029 14005 13481 5770 2132 6938 9565 1745 1381 4808 3609 1537 7825 5661 185 3037 12994 18850 2440 6868 14425 5125 585 89 1300 5585 7865 7146 1184 4420 19240 11917 5876 10250 8570 2788 720 389 4 808 977 706 410 1025 2500 1170 983039 4294246401 65530 262146 4294836224 0 4294967295 196608 1 0 0 65535 65537 65534 65537 65535 4294901762 4294836222 262142 4294901760 131074 65538 4294967295 0 0 4294967295 1 1 4294967295 131073 4294836225 4294901759 262142 65537 4294836225 4294901760 65535 4294901759 0 1 65533 131073 4294901760 4294967295 131071 65536 4294836225 262143 4294770688 131073 131071 4294901759 131073 4294901763 4294770685 65536 131071 131070 4294901763 4294901760 65535 4294901760 65533 131073 4294901761 4294901759 4294967293 196608 4294901763 4294901758 4294901761 65534 131075 4294705150 65535 1 4294967294 1 4294836224 0 4294967294 196608 4294836227 4294967293 1 131069 1 4294836225 65535 65534 0 0 2 4294967293 196607 1 4294901760 65534 65538 4294901758 131073 0 0 4294901760 1 4294967295 131071 1 4294836224 196606 4294836228 4294967293 131075 4294901756 131072 65539 4294836223 0 4294901759 196605 65537 65536 4294967295 131071 65538 65535 1 0 4294901761 196607 2 4294901759 1 262143 3 +9176235357 8993191162 19120135785 23572353608 3049183075 481893068 1739676482 2578826428 878981992 3671155093 1292995962 98376569 396132786 212131690 288592629 228092143 191051413 82950507 234701840 355185468 460774588 128328650 156938187 230846426 238069252 195995787 93249843 126366584 185452976 127347550 217291634 45671478 +2 65384 18939761 13632302 4238605924 278549 111514 274577 265492 2428929 4682642 1481405 30906 124804 157770 259865 548089 746929 405154 117512 89293 153917 173077 28565 47632 82178 57690 58537 66949 48618 13032 4073 11530 12869 11285 33633 33448 86281 104609 36244 10585 13957 12497 5792 20 9469 24914 19253 6050 3145 3970 738 37925 80296 34850 5850 2885 6434 19053 6921 2536 7250 31232 62725 38561 6770 4516 7250 3328 8285 2290 289 1217 10585 19181 2597 4100 11645 9425 8330 13913 12500 2225 544 2050 1961 3330 3449 1025 1040 9169 8857 1201 3194 13298 7109 125 1152 212 457 554 2897 2633 3218 37 4885 7178 2788 650 1042 325 1241 1090 2081 4005 2402 320 10 212 1069 1602 425 393210 65554 4294574072 393216 4294836226 4294836224 65535 196607 2 65535 1 65535 4294836226 4294836223 196606 4294836224 131071 131074 4294770688 65535 65535 65536 4294901759 196608 4294967295 131071 4294901761 65536 4294770690 131071 4294967295 65538 4294836223 131073 65535 0 4294967295 131075 4294901757 4294901762 0 0 4294901759 4294901761 4294901761 4294901759 196606 131074 4294901760 4294901760 65536 65535 65536 4294770688 65538 4294967294 4294836223 262143 65536 4294836225 131072 4294967295 0 4294967295 131073 4294901759 65536 4294967295 196607 0 65539 4294836225 4294967294 131072 65537 65536 3 0 4294836224 4294836224 65536 4294901760 4294901758 0 4294770688 65534 65534 131071 0 4294967295 131070 65537 4294901760 65535 65536 196607 4294901764 4294901760 65535 4294836224 196607 4294901762 4294967295 65534 65536 4294836225 4294836225 262143 4294770688 4294836224 4294967295 131069 2 4294967295 2 131070 131074 4294901760 4294836224 0 4294967292 0 65537 4294901762 4294967291 131073 1 4294836224 65536 4294901761 4294836226 65534 4294901761 65534 2 +1089684860 1489227056 7451210935 25162699617 3746461499 1104469491 3228302402 4380573004 1314892448 1188968448 589498418 634188183 435477756 115260084 339081799 833157389 232978806 130079406 162680802 392176424 332030171 210008316 490972032 98211491 143588413 194241710 90231128 94932440 111336474 51651457 64531143 43049780 +65440 1048600 4284350451 45940397 4244440152 1834256 1015760 727025 1910269 3937457 2692601 257492 80713 272421 200962 348605 228560 120641 28685 44402 36490 16546 890 46106 139913 55925 9578 13621 1277 7200 1604 2650 5386 7474 8685 6473 49508 61217 25433 6301 2474 10865 11296 12321 5620 13985 6257 5249 14274 1088 8149 11700 3085 7769 15509 17525 9217 6170 7769 7306 3280 5661 27418 32625 7748 5300 4850 1105 6605 18773 17170 6001 1856 4840 18409 39338 41828 17896 3412 8762 14762 2810 2929 5733 2401 965 185 200 520 3298 7633 7306 4698 7309 16217 18085 9797 1193 305 2378 6505 9397 8485 5850 3185 634 1168 1258 277 1096 4210 6409 5224 6553 3833 170 193 164 514 425 85 458 4293263359 851951 10 65535 65535 4294901760 131072 4294901760 4294836224 131071 0 4294901760 196607 1 131070 2 4294967295 0 4294901761 4294901761 131069 2 4294967292 131072 65535 1 131072 4294836224 4294836224 262142 2 65534 4294901762 131070 65537 4294836224 4294836225 4294967293 131070 196609 2 65536 4294901761 3 65535 0 1 4294836224 131070 131075 131070 4294901763 4294967294 1 4294901760 4294901762 4294901757 1 4294901760 0 2 4294901760 4294967293 131074 4294770688 131072 4294901760 4294901760 65534 0 65535 4294967294 262145 4294770688 65533 131074 4294901761 4294967295 0 65537 4294901760 4294836225 4294967293 65537 4294836222 131071 1 131070 2 1 4294901760 4294901760 2 4294836224 4294836225 65535 4294836225 4294901757 131073 4294836224 65536 65535 327679 4294836227 4294901761 65534 0 65537 4294770688 131073 4294901759 65537 65535 0 4294967294 65538 131070 4294901762 131074 4294770690 0 4294836225 65536 65536 4294836225 65535 65535 4294901762 0 1 4294836223 131070 0 131071 0 +7843464752 6447907163 16374301901 17324772176 1215209220 1776398163 2081496617 789918720 308703987 183833051 808368400 167730902 49451263 56584758 234299204 398625714 120583575 132760892 118249319 125485707 183453905 157140695 248358144 168412071 321558941 328889136 78320401 90909662 220044504 134853075 70608029 74817173 +65520 262094 4289134882 4279303334 62914962 191437 624020 138121 63625 243508 1971965 939177 108018 94340 481876 257085 41029 84197 119170 62842 36082 29525 56794 14834 45538 44676 3728 9497 19805 11300 3508 14065 15385 9250 12105 15461 3546 8776 16936 12785 4933 8389 36733 24650 548 4580 1450 8528 25610 21634 7850 10522 8450 4010 697 1665 7325 11050 19273 12393 1429 68 14285 8420 1845 5044 692 3016 3625 23618 25957 13498 33844 56680 50801 52688 59956 25700 50090 81901 158265 195325 101065 124025 179129 233285 278290 282613 328345 380945 416818 518962 613090 698969 553673 411813 222325 161725 130586 87050 58690 22250 33346 17170 6784 2410 866 3316 1160 709 5949 5338 3700 6649 212 1877 1040 181 146 113 580 2129 2883569 4291887110 1900539 4294443003 196614 65533 131074 4294836223 262143 4294901760 196607 65534 131073 4294967295 262143 1 4294967295 65537 65536 4294901761 2 4294836224 0 4294967294 65536 4294901762 4294901759 262142 4294836226 4294901759 65536 131069 3 1 4294770690 4294967293 0 0 4294901760 4294901758 131070 4294967295 65536 65536 131071 1 4294901761 65536 4294705152 196607 4294901762 131071 4294901760 4294901760 4294901760 4294967295 131072 4294836226 131072 4294901762 4294770686 131069 196606 4294901762 4294967294 131071 4294901760 65535 2 131068 65537 65535 2 262143 4294836224 1 65534 65537 1 0 4294901763 4294901758 65537 4294836225 4294901760 65536 4294967293 4294901762 4294901760 4294967293 65537 65535 4294901762 131070 4294901759 196608 4294901759 131073 1 4294901761 4294901759 0 131071 2 4294967295 131072 1 4294770689 4294901758 196606 65536 65538 4294770687 131072 131071 2 4294705153 65534 131068 65537 4294901760 131072 4294967295 65537 4294967295 131071 4294901760 65537 4294901760 65535 4294967295 65536 65538 4294770687 0 +2047835854 1653882967 1001333545 9952025713 2701709263 2058665901 1498435858 737457098 484075650 352429827 356059276 150000953 125341680 137900269 130508231 139433842 210634495 145134874 205966052 138492404 102869911 156180492 92734022 234423531 848969494 1737033796 4071341086 9495242795 9533850550 1452528935 120395452 66178728 +65487 1900597 4293787693 4278386638 23133950 109000 35012 19813 432225 1492025 1493845 514553 284596 209138 86978 33050 7325 3600 5989 5202 10613 15490 15445 41849 65885 41345 5650 1741 3316 6705 7121 7709 13690 20005 29341 32400 19225 28040 20297 1000 4745 9026 2545 3088 10229 6309 2866 4993 2525 2997 9577 18594 20996 9725 2164 1845 3365 1908 13481 21205 21141 20498 20754 28850 24293 12010 10909 15353 20513 27625 14669 436 20008 16810 25610 51880 15380 2977 13472 24221 34112 73300 58640 38389 202644 171257 275872 686129 625825 526292 476360 148930 382545 746209 461620 54176 96020 147776 68202 53924 22381 21136 19001 7010 1997 4493 7578 2245 373 5625 13745 17393 11029 5125 2258 754 1745 2269 1490 313 261 2420 1834958 720930 4294180863 4294967291 196608 0 0 0 0 0 65535 4294901760 0 1 65536 4294901760 4294901760 0 65535 65536 131071 0 65536 0 4294901761 0 4294967295 1 4294901760 65536 65535 4294836225 131071 1 65535 0 0 4294901761 65535 0 4294901760 65534 4294901762 4294967295 65536 65535 4294967295 131072 4294901762 131071 0 65535 0 65535 65536 0 65537 65534 0 4294901761 131071 4294836226 4294967295 65535 65537 4294901760 0 4294967295 0 1 0 4294836224 65536 4294836225 0 65535 1 4294836224 65535 0 0 4294901761 0 4294901759 0 0 4294967295 65535 1 65535 65536 1 4294901760 4294967295 65536 4294901760 4294967295 65536 4294901760 65536 4294901760 65536 4294901760 1 4294901761 0 4294967295 4294901760 4294967293 1 131070 1 0 0 0 4294967294 0 4294901761 4294967295 65536 4294901761 4294901760 65536 4294901760 0 65535 1 4294967295 4294967295 65537 4294967294 4294901760 4294967294 131071 1 +405941476 586376095 5249156049 9198314000 2455257847 1076533410 217788173 43317576 77805929 193894406 471430974 96006989 64326923 160154729 317918564 220708273 73326619 82335631 68925210 197055911 85555573 300129001 366399884 320192711 415893330 580530060 3785903636 10888500038 6292189300 859689286 178268465 136256436 +6 4293001176 8388632 4289200250 4282908585 107978 82105 134440 676162 1869940 2194821 1356173 711333 250450 63949 37089 17576 30802 15892 3965 1418 11560 12625 6709 37570 29864 701 4217 5261 5809 2088 725 4941 1546 7146 39106 42836 8002 12196 22964 10576 2005 481 4240 9925 6970 3464 7025 10777 4450 5338 24237 25826 6697 3445 12665 20000 18320 16757 10202 7690 3866 7400 35033 79066 59792 16705 2320 10537 28594 18685 21460 31385 3673 6344 3664 27101 113125 128690 117540 88001 27460 29170 213898 332501 158132 13000 338708 409760 192296 118084 203221 181904 20642 23866 88933 197730 155908 50373 21485 28465 22285 13709 13285 9893 6084 49428 72529 49985 18785 5337 17642 16210 8104 5058 1773 34 1552 3368 2601 1152 370 131052 983045 4294770693 4294770688 196608 4294836224 4294901760 0 4294901760 131071 4294901760 65536 65536 1 0 0 65536 131070 65535 65537 4294901760 4294901757 65536 4294967295 65536 65535 65536 4294901760 4294967295 65535 4294901760 65535 0 65535 131071 4294901760 65536 1 0 4294901761 131071 0 4294901761 65533 65537 4294901760 0 1 1 65535 65534 65536 65535 65536 1 1 65535 4294967295 0 1 196607 0 4294901760 0 0 0 4294967295 0 65536 4294967295 4294901760 0 65537 4294836223 4294901760 0 65535 131071 4294901760 0 1 4294967294 4294901760 65535 1 4294901759 4294901762 131070 4294901761 4294901760 65534 0 4294901760 65535 131071 4294967295 65535 4294901761 65535 1 65537 1 4294836222 65535 4294901759 65537 4294967295 0 65536 65536 1 0 0 65534 65537 4294836225 65535 0 4294967295 65535 131071 4294901760 65536 4294836224 0 4294901761 65535 1 4294967295 262143 4294836225 65535 4294901762 65535 0 +514666001 1348367704 7051816642 14065552969 5974535153 1388737409 260609466 171393655 50366952 95923776 245919450 73335165 45135850 34407825 274069828 231829918 93746349 84979594 108437794 226177882 223414955 201162764 665485419 384650067 394149983 1726925185 3637184851 4785261775 2485723605 934545908 818763521 188547824 +65471 2162723 4293984223 4293132397 6225610 215312 91156 23680 32549 258506 72500 259081 664441 148693 6922 44305 189722 123570 17093 1024 986 28685 83969 91242 48665 15210 405 5661 5780 9802 23913 21970 14130 5701 521 3145 4258 7460 8845 520 2221 1700 970 585 101 2501 2720 712 1058 2138 962 7741 14522 5581 360 5162 16133 9941 4608 3065 20365 17161 12125 4234 39429 194490 202025 178265 288469 325993 170788 40840 5408 38498 108484 199201 298381 258226 75168 124834 298064 437282 338180 100753 288045 607997 561394 439060 372737 167972 28561 5524 13172 56080 139892 141490 94617 57962 90385 68360 49549 29745 31985 34250 9137 25537 43429 10964 593 4129 425 8290 11645 3170 2405 1781 2512 1010 666 612 104 40 4294508555 4294901741 458765 4294705149 131072 65536 1 4294901760 0 0 0 4294967295 0 65537 4294901760 4294901760 1 4294901761 4294901760 4294901760 65537 4294967294 65537 0 4294836224 65535 65535 65536 65535 1 4294901760 65536 4294901759 131071 65535 0 65536 131071 65535 65538 4294967295 0 0 65534 1 4294836224 131071 4294967295 0 0 4294967294 65536 4294901760 131071 4294901761 4294967295 65535 4294901759 65535 65536 4294901759 65535 4294836224 131071 4294901761 65535 65535 1 4294967295 4294901760 65535 2 4294901760 65536 4294901760 2 131069 4294901761 0 131070 0 1 4294901760 65533 65536 65535 65537 4294967294 131071 1 0 0 4294901760 4294901760 131071 4294901761 65535 1 65536 4294901761 65536 4294901760 4294901762 65534 65536 4294836224 4294901760 0 4294901761 65535 65536 1 4294967295 0 4294836224 65536 65535 0 4294901759 65536 131072 4294901759 0 65535 4294901760 131069 2 131070 4294901762 1 4294836224 131071 65535 65534 1 +854067332 278949595 775873176 1138676126 3105106067 896842551 807293668 675085516 64433283 560219615 474784050 55633450 148408916 164344297 43165620 69096015 21284651 17409593 25064632 99936147 124962720 190186177 1474493209 3947269364 2315319133 4638085171 7835913743 5378442479 2045345134 1160376405 376248714 92547152 +21 4292935592 4980720 14549126 4279566697 748250 1247306 1422020 1228741 752834 2207720 2590389 1682245 724865 309125 236770 226525 199432 23860 17224 58514 87732 180157 110041 7265 11250 50794 134560 152450 103066 32266 365 6229 2473 634 11530 23018 8480 14537 51561 37700 13394 5512 6498 15129 17965 10216 3722 400 1060 845 2362 3301 28885 43810 28048 8669 50884 104768 132610 126121 79105 45032 88762 257050 396553 175645 70066 248201 402453 147186 86290 43165 11588 219124 365173 88765 331605 453952 202589 121108 529658 638621 157313 207028 348296 143509 55120 18100 76644 448337 504425 444834 398025 151490 267472 390745 336554 177161 15284 7474 35737 2564 44425 73361 16393 5953 32180 22408 7508 18218 29405 24793 18125 12290 2257 3400 8900 733 1889 2146 1460 2621449 4293591058 4294574074 196605 262142 2 0 1 4294705152 131069 0 131071 4294901759 65537 4294836225 4294901756 262145 4294901761 196609 4294836224 4294967295 196608 4294901760 4294770690 196606 4294901760 131074 4294901758 65539 4294901759 4294967295 0 196607 4294770690 131070 65534 65539 4294901759 131071 65535 3 65535 1 65534 2 0 4294836224 65535 4294836225 4294967294 131072 4294836223 196609 4294901763 4294901759 4294836226 65532 0 1 4294901757 131072 4294901760 4294967295 65538 65536 4294705153 131070 196609 4294901761 4294901761 131071 4294836225 65535 2 4294770688 4294967295 65535 4294967295 4294967295 196609 65537 4294836225 4294901760 0 4294836225 4294901758 1 131071 4294901760 4294967295 65536 4294901760 196607 4294836225 65537 4294967294 1 65536 65536 4294836225 65535 65536 4294836224 1 4294901760 131069 1 131071 65537 65535 4294967295 196609 4294836225 4294967295 196607 4294901760 65536 2 4294967295 65536 4294836227 4294967291 196609 65536 4294836225 131073 4294967294 4294901760 3 4294901758 65535 4294967295 1 4294901761 65533 +5176173638 8712890999 6365812124 14554769594 12835959895 4290909155 1898428525 1000121310 386479184 1152403961 429970454 909105722 958767541 75724965 110181941 291424365 287810852 165313508 57669057 161053097 624432018 1646374792 3244274671 4021445776 2963733650 6551241142 6239415482 5613163380 7301740504 1754762557 646424930 324924042 +65481 4325415 4284088357 15269685 4279501167 319345 386138 583117 1011520 723524 338186 148880 178452 62450 16001 2045 14825 38273 35186 25160 21370 13610 10177 9076 7956 17053 12874 1769 4900 2980 1108 485 905 4513 7076 9410 8842 12869 19025 12545 5777 5273 5858 3400 1924 845 1369 3530 7253 4744 522 2536 3232 185 10 4196 7813 1394 810 466 202 61 976 4981 9265 170 25969 60925 35386 5525 10181 9593 7250 26489 99140 127861 35978 22100 26941 7577 7748 7538 2341 16529 5945 2146 533 5645 7556 66833 89909 35624 29905 39101 67786 40786 11525 19477 25570 17770 7778 8906 14661 8537 145 2074 2234 733 377 244 281 709 193 477 562 2509 2098 617 778 1305 533 442 1245181 4294443017 262139 2 1 4294901760 0 0 0 65535 0 1 4294967294 0 65534 4294901760 65535 65537 4294836223 131071 1 65534 4294901761 131071 4294901760 4294967295 0 4294967295 4294901760 0 131071 4294901760 0 0 0 0 2 4294836224 4294967295 65535 0 4294836224 0 0 0 4294967295 65535 4294967295 65535 0 1 4294901760 65537 4294901758 65536 4294967294 65536 65534 4294836225 65536 0 4294967295 65537 0 65535 2 65535 4294901761 4294967295 0 65535 1 4294836224 1 4294967294 65536 4294901760 0 4294967295 65535 4294901760 65536 4294901761 65535 1 4294901758 1 0 131071 4294901760 4294901760 65537 65535 0 65535 4294901761 0 0 0 131071 0 65536 4294836224 0 65536 4294967295 4294967295 65536 4294967295 0 0 131071 0 1 4294901761 0 4294967295 131071 0 0 4294901760 131070 65538 4294901761 65535 4294901760 65537 1 65535 4294901761 0 65535 4294901759 131071 1 +1862205364 3888521380 5183882583 2729317838 1047760972 340491552 99043306 270973644 204286586 111068593 110590646 83372928 28302339 23901571 96547242 167427282 88321129 33870505 57122440 32785504 49320672 17622419 184258003 444068773 897635860 619039249 146576221 861149196 865052933 289573769 40528063 28186682 +0 4283498557 8978301 4278386562 40697491 1072450 1147057 3760618 4801130 2751025 1685450 307690 458330 888641 250105 82420 2330 123968 176045 107053 39250 21922 90301 180200 215209 108121 13381 23129 40340 11728 585 6953 4145 8674 514 7765 11650 13769 23924 12560 7265 38194 65269 23348 3341 1741 3940 13050 39033 60457 54325 60282 59296 34594 18185 25625 39565 13898 890 2825 7985 13032 6925 13885 72845 192069 158450 100553 129482 55381 13306 7706 7956 3425 33485 21760 124160 80613 63648 144461 91976 44761 62561 38821 3869 5576 244 51365 178517 58285 67130 303817 247045 104200 4468 38785 58792 30344 24713 23720 7421 2005 2900 12682 15220 2745 169 1417 8801 15300 11250 2960 1700 6025 5513 2557 629 441 3265 5536 1762 18 983040 65534 393231 4294311929 65536 65536 65534 65538 4294901759 262143 4294901760 131072 4294901757 262144 4294836224 131072 1 65535 196610 4294770688 65536 4294901762 4294836221 65534 4294901760 131071 4294836224 4294901760 131070 65538 4294705152 65534 4294836224 262143 4294901760 0 4294967295 4294967295 262143 4294901761 4294967295 65538 131071 4294836224 131073 4294770688 4294967294 1 65535 4294901760 65537 4294836225 4294836222 196605 65539 4294967295 4294967294 131070 65536 4294901760 65535 2 4294770689 131069 4294836225 0 131071 131075 4294639616 65533 196608 4294770688 131070 65540 4294836223 131071 4294836223 131072 4294901761 4294901760 1 4294836223 131072 4294967294 4294901760 65533 65538 131071 0 2 4294901759 65535 4294901761 131071 4294901764 4294770686 196606 3 4294967295 131071 4294836225 65537 4294770688 65533 65535 2 65535 65536 4294901763 4294836221 65534 65535 0 4294901760 4294901760 4294967294 4294967295 4294901760 0 4294901759 196606 1 4294901757 327681 4294770690 4294901761 4294901759 131073 4294836223 4294901761 196605 4294901759 65537 131074 0 +5898563161 20961429280 23239028734 11537877925 3390494812 3695778658 629892210 1033626875 684461179 784394336 1583375466 362004349 169599265 57868482 85139965 194395594 412126189 197767529 470346213 780111487 412325354 146611504 1391020511 1619964102 558404142 1763066946 910723280 2637170679 1845616056 331074007 185715390 113914004 +65513 4259779 655508 4291428339 4290838477 24704 613826 2111309 1784250 1635658 2051458 996125 257704 69700 23665 13220 12077 73972 87746 20890 15602 58573 135161 129620 32617 2097 970 1049 2249 1325 1189 5905 5188 2600 2173 5666 8450 9089 7177 3524 4896 5066 3025 5024 3925 4660 7165 9565 14293 15530 15173 11906 1898 2677 1609 1730 5329 7058 2561 360 697 2178 2425 2421 7969 12898 12116 18770 20644 10477 2749 1936 800 3277 16925 37060 57330 74600 78160 56960 33570 22417 21460 17945 6385 1666 5813 7745 19125 26240 265 25425 17680 5365 19408 6569 2056 6381 5185 461 250 1097 1189 389 117 2218 7309 8578 3140 117 1025 593 205 170 1256 1800 2029 2125 1913 1325 593 160 589806 65549 4294705149 196606 0 65535 65537 4294901760 131072 4294901761 0 0 4294836224 131071 4294967295 0 4294901760 65535 0 4294901760 4294901760 0 4294901760 65534 65537 131071 4294836224 131069 65538 4294967295 4294967295 4294901761 131071 4294901760 4294967295 65535 65537 4294836225 4294967295 0 65537 4294901760 65535 4294901760 65535 131071 65535 4294836224 4294967295 65535 4294836224 131070 3 65534 4294901760 65536 4294967295 4294967295 0 0 65534 4294901760 131071 65536 4294836224 65535 65537 4294836224 65535 65537 0 65538 65535 4294901760 0 4294901759 65534 0 65536 65535 65536 0 4294901761 0 4294967295 65536 4294836224 0 0 4294836225 4294901760 1 4294901762 4294901760 4294967295 131071 1 65534 131071 4294901760 131072 4294836224 0 65534 4294901760 4294967295 0 131071 4294901761 4294967295 65537 4294967294 1 0 4294901761 65535 65535 4294967294 0 65536 4294901760 4294901760 4294967295 0 4294967294 131071 0 4294836224 65535 65536 4294967295 4294967295 4294901761 131071 1 +1529782346 10762297130 10576248687 12502774180 3328009919 443597551 165196441 534487068 292855042 910488893 487944186 14025531 21761292 46795089 60947682 86925297 59831893 70154221 174651284 125109384 55188163 37731519 136448550 225510906 343836549 1115942311 341484540 329432196 220071418 47903730 79324935 34758191 +65442 3014800 4281925572 12320469 11862668 371970 1133288 5408125 6850730 3681589 1399133 37705 211837 98165 11050 30802 145485 213265 180121 34945 16354 4633 201745 309321 92173 10730 8362 6370 5585 19881 22018 12050 19634 7850 106 1044 5780 18485 31001 28048 18329 8093 6970 9005 2834 925 4052 6066 4717 625 45362 144194 97877 18625 3236 5785 5050 1865 929 3285 3746 2677 5725 10378 2042 12602 17741 10786 20450 44360 36522 28145 9236 70810 89461 10250 8753 5249 2421 2516 4325 13700 15457 19721 70765 75994 62089 56890 25780 11777 97969 141320 139333 140141 101869 56537 45737 32513 6290 4138 9973 12842 10656 10445 17978 11425 1361 14985 4432 3176 9536 5389 3332 317 3754 22450 30082 10496 1325 1345 4244 4250 1966065 4294770691 327682 0 0 0 1 196606 65540 4294901759 196610 4294901761 4294901761 4294901761 0 4294901760 65536 4294901763 65534 4294836227 4294967294 262143 4294770689 0 4294901761 65533 4294901764 4294901760 4294901761 4294901759 4294901762 4294901756 0 2 4294967295 131071 4294901760 4294836224 196607 4294901761 4294901759 0 0 0 131069 131076 4294770689 4294901760 65534 4294901764 4294836221 65536 65533 4294901765 4294967294 4294967295 4294836226 65535 1 4294705154 196605 4294901763 4294901759 4294967295 4294836225 131070 4294901760 4294967295 65535 65536 1 4294901760 4294901760 4294836224 4294967294 131071 4294836223 131071 4294967295 2 4294901757 1 65533 131072 4294967294 1 65536 65532 65539 4294967294 0 65535 4294770690 131069 4294901760 65538 65536 4294901760 4294901762 4294836222 65536 4294901759 65534 4294901761 4294967295 4294967295 65534 65537 0 4294770688 131069 1 131070 4294901761 4294901760 131071 2 4294967294 4294901762 4294901761 4294967294 4294901761 4294967294 4294836225 4294967291 196607 4294967295 2 4294836223 4294836224 131068 4294901760 4294967294 262142 4 +3791067564 29199196246 32484273889 11339226316 945253387 475330066 717325982 1430157039 370107607 1346486212 1185156574 77992329 170934105 158991872 52953515 270761258 179821416 61883022 182762647 1034542879 148317111 57617007 153211836 470058849 807680174 201829651 869546025 1742955039 1819718210 331119639 212114746 270214834 +9 4286513110 18350096 4269866998 6094642 839844 1816949 592226 1468513 896090 96464 213421 1108225 740293 90441 5476 3425 1300 4933 49249 69892 10701 113329 257661 48725 17545 17168 14837 13520 18818 28145 26900 25045 15520 1885 1066 6010 9530 16484 2197 3865 5825 2740 2880 6282 14026 13850 5045 9680 34817 26545 8434 25605 5480 4141 5989 4018 4276 6057 5066 3793 5321 15769 31041 25378 9773 13666 13352 8177 269 16425 43250 34829 57524 30992 5101 17450 10305 13693 13505 11801 12200 32057 35957 4964 2034 1445 2845 20393 62362 50537 15210 5620 2501 12205 4525 15145 11636 1250 5690 1849 145 985 1585 12260 28593 20521 3728 1044 9293 10525 6340 1300 2069 2701 500 2025 4756 4330 2440 697 1517 2031629 4294311945 65534 4294836225 4294836224 65535 1 4294770688 65534 65536 4294901760 65534 4294901761 4294901759 65536 1 4294901758 196608 4294901761 4294901760 4294967294 0 4294901760 65537 4294770688 131068 131074 4294901761 4294967293 4294770689 4294901760 131070 131071 1 131070 65536 1 131071 4294901760 131071 4294901761 131071 196608 4294901762 4294901760 65538 4294967294 65536 4294770690 65535 0 4294836225 131073 4294574080 131070 4294901760 4294901763 65535 4294770687 4294967293 4294967295 4294967295 131070 4294901760 196607 4294967295 4294901760 262142 4294967295 4294901761 196606 1 4294836225 4294836223 262142 4294836225 65536 0 131073 4294770690 65536 4294901759 131071 65538 4294770689 4294836222 131070 4294836223 196608 4294836223 262144 4294967295 1 1 196606 2 4294836224 65535 131072 4294836225 65534 131073 4294836225 4294901758 4294901759 262143 4294836224 4294967295 196606 4294901759 65538 4294967293 131072 4294967295 65535 4294770689 327677 4294901760 65539 4294836221 393217 4294836225 4294901760 65535 1 4294836223 65533 2 4294967295 131070 0 4294836224 65536 0 3 +6799237905 6864662201 7006791566 2131919786 5255710991 3100076957 120381357 72856497 396087215 987916494 880932902 158909096 226807246 250883270 65432547 111896369 57912617 112231411 232521343 261532899 93611355 118415912 326217427 268475091 626815747 305746581 335181856 569178816 233299401 154945255 254737606 92440792 +65462 6946749 1704165 4279894050 261453 1730818 2666144 1694788 519385 2551313 2968685 621844 164261 918773 692586 362785 256906 180338 56933 28730 30410 36205 274378 515077 363472 135833 25961 477 5690 6353 4306 964 932 6437 17978 18080 9745 13213 31061 27185 10737 7178 6290 3944 5380 16218 17113 19937 67525 47165 3865 4285 16072 14228 8026 7808 9437 9512 5081 1945 2845 7497 7045 12465 12680 3988 4145 3485 5002 11629 8577 577 2953 18989 40973 43265 34970 12905 1537 17218 11762 6010 3793 122 6605 8586 3314 1460 3250 8840 10225 7681 7048 1069 8465 31505 28730 8840 7589 6449 521 218 1417 4421 1396 746 7753 5994 4289 11050 6145 4050 5954 5553 1492 1013 922 72 4 1090 3748 2180 4294377485 327679 4294901764 4294836222 196607 4294901760 131072 4294901760 0 0 4294901761 4294901761 131071 4294901761 4294836225 0 65534 65535 4294901762 131072 4294901759 4294836226 0 131069 1 3 4294967295 4294901760 131070 4294836227 65536 4294901758 65539 4294901758 4294901761 4294967293 4294967295 262141 4 4294967293 1 4294836225 4294901758 4294901760 262143 0 4294836225 4294967293 262145 4294901761 131071 65538 4294770689 131070 196607 2 131071 4294901760 196606 4294901761 0 0 4294901759 4294836225 262142 4294836226 4294967295 131071 4294836226 65534 0 0 4294901760 131072 4294901759 65535 131070 4294901761 196608 4294836223 65536 1 4294901760 4294901760 4294901760 65535 65535 4294901761 4294967295 65534 131071 1 4294967295 4294967295 65536 4294901759 65535 131071 4294901761 0 0 65535 0 4294967294 4294901759 327676 4294901763 131071 1 4294836224 131070 65538 4294901761 4294901760 4294967294 2 4294770687 65533 131071 65535 2 131068 4294901762 196607 2 4294901759 65535 4294836226 196605 2 3 4294967295 4294901758 65536 0 +11453442628 11425949119 8969689026 16948900628 3141421612 5223916947 2760232084 1084064994 299444883 2125273638 3086707368 310453498 51732748 44780716 168107086 258667101 143446403 142283072 504420775 218872051 148184667 92856534 147118020 112855428 420454837 338584768 116140173 145410830 334581197 106611194 146951280 81442850 +65386 1179700 4279828598 34799034 14091243 1511557 797081 2560772 2002801 4066946 9900032 3506045 1268570 287465 125425 109309 589520 214450 272324 554132 146074 51805 353156 383120 86362 10917 49090 14593 45205 96858 57856 17096 8180 9320 10937 32552 16840 34037 64501 26042 8829 20354 12050 8761 55456 126946 192032 160925 108685 77645 55796 33956 127460 188456 149060 78653 33298 39440 10600 6317 6224 29125 115956 100385 29770 2980 2210 10081 3721 1522 19801 10784 7325 17450 33410 23797 17429 37577 58301 34570 11050 4153 5876 7585 20345 27605 20420 31117 23965 15325 18292 25330 52330 62032 13645 7465 53604 95770 66980 21754 3761 1573 24565 44813 21529 15940 22546 31853 7801 16804 37474 26533 26685 9469 1908 5690 2704 1205 90 3425 8833 5408 1441771 4293984264 262139 65535 4294901759 262144 4294836223 327683 4294836224 4294901760 65538 4294705153 4294901755 4294901757 196608 65535 131070 4294770688 327682 4294770690 4294967291 5 4294901758 4294901755 2 65535 4294770689 4294836218 327679 4294967295 196608 196604 65541 131075 4294836223 4294836222 196609 196608 4294967293 4294901765 131068 262146 4294443011 65533 131072 131069 4294770692 327680 4294508545 524285 4294639621 4294967295 65535 4294705155 4294967294 131068 4294705155 262141 4294639614 262140 4294901759 393218 4294967291 131075 65537 4294901761 4294901755 65540 4294901758 262146 4294770688 3 4294639616 196606 4294901757 65540 4294836220 65539 4294967295 4294836218 458754 4294770690 262143 4294770690 4294901760 327679 4294770688 131072 4294836227 262139 131077 4294836224 4 4294770685 131074 4294836219 4294901762 65537 4294770684 262142 65538 4294967294 196608 3 4294836221 262145 4294770688 0 65534 4294901764 4294901758 4294836225 4294901757 131073 4294770684 131072 4294967293 4294705153 524284 4294639618 4294967292 327676 196613 4294836224 4294967295 262146 2 4294770686 196608 65538 65534 4294705155 262140 4294836228 65532 +6368705047 12975091793 18166364780 52214241554 13024503925 2090833921 2377368194 2759098456 2553662904 2277234959 1432376136 318307292 677651485 192682720 241063458 462592106 235368194 1166063109 1772904359 1533640255 1330523268 493306239 820436618 142251500 385262686 624771286 355927840 626237684 1009919011 720155511 623816048 287317982 +65501 655367 1441897 4271767222 57606447 1881808 4042970 4902280 4424697 3433797 1353032 539821 693325 476809 54080 56745 149328 194045 188740 101160 28130 11162 78481 81450 15844 3034 3893 13021 18868 16153 2578 4010 14161 12897 8744 2069 5668 3845 4040 10585 8245 9225 11045 3265 5058 22321 22381 6525 2005 4852 16900 37469 16705 7109 25154 4770 33860 68473 65501 39546 16490 1973 9333 28250 16029 10114 13289 10133 2650 2581 18317 46141 35330 12410 4250 4777 3413 5594 9117 8125 8330 4930 1873 641 729 1525 10837 16132 5512 1061 200 4297 19280 25012 10386 5480 4121 882 6101 9061 5032 5770 8996 9986 4357 1396 245 610 4325 6173 1321 1312 4825 6962 7369 3538 1730 265 85 626 820 265 262144 196609 7 4294770685 131072 2 4294901760 65533 1 196606 4294901762 4294967293 262143 4294901762 65536 4294901760 4294901760 0 4294836224 4294770686 65536 4294967295 131070 65537 131071 4294770688 262142 65538 4294901757 65538 65535 131071 65537 4294836226 65534 4294901761 65537 4294901759 4294836225 131069 4294901760 131071 4294967294 196610 4294770686 262142 4294901759 327679 1 4294836224 4294901758 196610 4294770685 131073 0 65534 0 65535 65538 4294770689 131070 65535 4294901763 4294901759 4294967292 65536 65537 65536 65536 4294901762 4294901759 131073 4294967294 65538 65535 4294901758 65536 0 4294770689 4294967293 131072 131072 1 4294901760 65535 3 65532 1 4294967295 196607 131071 4294901761 196608 4294836226 65533 1 65538 4294770688 65536 65535 4294770689 131071 4294967295 4294901761 65535 65537 4294901759 4294967294 65537 131069 4294901761 0 196606 4294901762 4294901760 131074 4294901762 4294967293 65537 65535 4294836226 65534 65536 4294836224 65534 0 4294836224 131071 0 4294967295 131072 4294967295 4294901761 65533 3 +15167883106 29791915459 24257605316 11647797675 4261333823 2020304337 838008789 1465373806 630426064 493998251 292290878 95823561 130185872 108447425 77255450 72530644 116218457 164994027 153258568 297986985 492976259 562879738 262318542 251356870 339723262 134238402 94797019 176615686 220477688 155696828 89528039 97176475 +65454 4288217150 18546696 4262854925 4283235859 429865 1441141 894445 396122 302800 822589 1448890 1049705 595412 289393 78041 53546 64706 21780 16960 12905 19345 90938 196564 190441 86090 20756 10832 2605 1289 5329 5701 178 7765 8720 35533 72610 60500 14600 281 5777 10413 10706 2980 445 3026 405 2785 13572 27673 14824 16904 48265 44608 19012 2005 5780 1285 3497 16162 15925 25744 22122 9832 4021 293 1325 13940 10036 2228 10820 20052 20137 15602 13621 15476 15056 16393 17770 6770 1625 3809 5186 4505 4212 2745 425 7289 11714 4133 2257 3218 442 5330 6610 485 1818 153 1952 328 4234 9074 8069 10114 9061 6784 5945 4121 1060 45 389 1808 3285 3653 3329 2809 2186 626 365 965 634 580 4293328897 1245181 4294377475 196607 4294901760 0 1 4294967295 4294901759 196605 0 65535 65534 65537 4294901762 4294836223 262143 4294836225 131072 2 4294901758 131073 4294901759 196609 4294770689 65535 131071 4294901760 4294836224 196605 4294836225 65535 65536 65535 65538 4294770688 65535 0 1 4294836224 4294901756 262144 65539 4294836226 4294901758 0 0 0 4294901760 4294901758 131072 4294770689 131073 4294836223 65534 2 4294967294 65536 4294901759 0 4294901760 131071 4294901759 131072 4294901760 2 4294967294 0 65535 65534 4294901760 65535 262143 65538 4294836225 196606 3 4294901757 2 65534 4294836224 65536 4294836226 4294901758 196606 4294901760 131070 196607 65535 2 4294836224 1 4294901757 131074 4294967295 4294836224 131069 2 4294967295 131070 65537 4294901761 65535 4294967293 2 4294967295 4294967294 0 131071 4294967295 196605 65539 0 4294770690 196606 65537 4294901760 65538 4294901757 4294901761 131070 1 65535 4294901760 131074 4294705153 4294967294 0 196607 65539 4294901759 4294901759 0 131069 4 +4693087723 6216564376 2380622365 6298866481 7787092804 3324557987 753831692 354062072 144449470 783698733 1472676048 243518929 41036728 51250421 391588537 400567150 97662506 39175827 167586726 444544685 199762941 239548068 174399124 172894550 317108123 237666657 91541142 109760266 61357052 152356878 103664153 60379364 +65506 3276612 15991154 4266131366 10551016 78370 347002 2220616 4408889 4365401 2540170 588052 44900 53290 133444 130057 61001 13697 42400 2610 1153 936 10240 16405 3973 45985 27241 10804 2925 394 7541 16749 3298 5941 39236 111725 67833 5625 96993 97657 31090 3560 5924 6698 1557 2245 6098 5953 842 28580 60737 17221 218 997 4925 13274 9810 3425 11285 9277 8857 44810 48217 32317 29321 27688 27877 32848 19777 3130 445 2770 5648 7685 15385 41794 44281 25832 28730 23585 6185 1394 1570 1682 3293 1261 629 6548 4289 269 4481 5333 3986 173 841 5989 14906 15341 13130 8704 3033 3488 425 1469 2890 3185 1717 2516 4 424 52 676 3265 3169 1220 400 37 89 818 226 461 980 4293459970 131060 196607 327680 4294836226 65534 262147 0 4294836226 65534 262145 4294901760 65535 65537 4294901760 0 4294967294 1 65535 65536 4294901760 65536 4294967294 4294967295 65537 65535 65534 4294901761 131071 65536 131070 2 65536 65535 0 2 65535 4294836224 4294901761 4294836222 131071 65536 4294901762 4294901759 65536 0 2 131071 1 4294967295 4294836224 65533 65535 4294901760 196606 1 4294901760 4294967295 196606 4294901762 4294967294 131071 65535 65537 4294901758 4294836225 262144 4294967295 4294901762 4294967293 327679 131073 4294901760 65538 131070 131072 4294770689 0 65536 131070 4294836228 65535 131071 4294770690 131073 4294836224 131071 3 4294836221 262143 2 4294901761 0 0 4294901761 4294901761 65535 0 131071 65535 1 4294901759 327677 4294836227 4294901757 65536 65534 4294901761 4294967295 196608 65535 1 4294836225 65536 0 4294901761 65534 2 4294770686 262141 65538 2 65535 65537 4294901760 131071 0 1 4294901761 4294901759 0 4294967294 65537 0 2 +1055567686 13456570819 25649328984 17940984341 1605438440 711908529 738183954 244667242 60959802 71775347 196393712 201673650 57696646 122893225 701802296 814937109 340078864 56672516 214498909 273792553 126126619 322584006 595525932 294341630 310891462 486753133 69983467 75163950 176063964 130864554 40845063 30362427 +65345 12910705 4272357820 4290051267 21758655 132565 827690 2073556 104617 5295789 3748450 24506 748850 991049 1643648 1683785 1454373 1492537 909194 367825 193588 132858 175565 247156 68074 25888 55321 47665 45050 113249 232893 183157 109876 116570 185418 253514 98960 126074 469540 435208 56197 25505 7785 2522 4122 16810 4621 2106 18757 74698 115658 79445 31642 16178 16405 68869 140149 46820 14389 65960 79537 65402 23330 10530 15317 27812 30420 40445 34112 10170 11101 16133 5341 16650 4148 66820 158569 50881 106525 121754 36244 7093 11520 42761 69629 55285 47140 75881 55624 16820 12104 22562 58682 59069 22612 901 30625 13394 3865 5840 27880 32948 13445 7345 23645 48618 52650 29665 9194 9473 11717 6641 2308 1345 7930 12789 5072 149 3917 13169 6084 1709 4293263319 196640 393191 4294770696 262144 0 131074 4294836226 4294836226 196603 2 4294901762 4294967295 4294967294 262143 4294901763 4294836226 65534 4294967293 4294770692 327677 4294705154 262139 4294770692 4294967294 65534 196607 4294901761 262141 4294836230 196606 4294836223 196610 4294901759 196607 3 4294836222 4294836228 4294967294 65535 4294705155 4294770682 131074 131065 131075 4294705152 131067 327681 4294836226 4294836223 4294770687 524285 4294770688 4294705153 458750 1 4294901758 196610 4294836229 4294770682 4294967295 131071 1 327678 4294574084 262141 131076 4294574077 262139 262147 4294967293 2 262143 65540 4294705152 2 262139 4294836228 4294836223 327679 4294639615 262148 4294967289 327690 4294443009 327677 4294770687 262148 4294508546 65532 4294967292 8 4294836220 4294967290 4294836227 65533 4294836222 327675 131076 4294967294 4294836223 196608 65533 262143 65540 4294901757 262146 4294901759 4294901765 4294836217 196616 4294574076 196602 65540 4294967294 262143 4294770693 4294836221 196604 131075 4294967294 196607 1 4294770684 524288 4294901760 4294639620 65534 131070 65539 4294836226 4294901756 4294901760 4294901761 65531 +2356765900 9231856051 14938845260 23252998449 3859443628 9931345014 12545280582 9608156787 2962240910 1641418625 1016734310 474338503 1445512897 1723338305 2077931414 3521573091 1231218638 101697740 514315548 892118793 984565825 954472196 495032180 449109219 640438476 1631596353 1016369956 954212876 589624551 553133001 578500285 181183624 +65521 4291559452 7798954 4253613555 66322550 134290 360442 3267578 5033965 227233 4927025 5422250 741800 188840 668653 451762 159930 349640 466576 118660 138269 606013 743522 400333 80392 36373 92233 183397 158229 70781 34282 70720 77380 52201 23080 85049 77857 12178 153796 313849 141466 153680 181557 138449 40069 4250 25402 36745 38153 50417 77122 65041 58664 39712 36994 106525 205641 148805 44434 20068 31945 38601 51578 45748 37672 62941 83737 44370 21325 34525 77885 88450 50569 11250 10865 21069 120185 243506 261629 113977 54085 75650 132821 189370 97236 12625 43892 98432 62720 22346 15082 25632 77337 134794 141725 109922 104365 70045 14557 1973 34537 55241 36229 14962 1805 9922 6197 981 8245 17812 9140 493 1225 2225 9508 22213 8461 2473 15529 10305 5021 14810 4290510765 3932159 4294377497 4294770684 4294901760 0 65539 4294574080 524287 4294639615 65536 3 131066 65539 4294836223 327682 4294901762 4294443012 327675 4294705154 4294901761 327679 4294443007 262140 131075 1 4294770683 131071 65533 327683 4294836224 4294770689 131065 196614 4294770686 131070 4294770692 196600 2 4294967294 65535 4294901756 393216 262144 4294901764 4294705152 524287 4294967295 5 3 4294574077 327678 2 4294705150 327679 4294901764 4294639618 65535 4294836223 4294901757 65535 131072 4294770688 262145 4294639616 4294770689 393212 4294836226 4294836224 4294705152 196600 65538 0 131073 4294574079 327675 196611 4294770685 4294967295 131072 4294836226 131066 262148 4294639617 196607 4294770691 4294901756 196605 262147 131072 4294705152 4294967294 65537 4294967293 262138 131078 0 4294901760 65536 4294967295 131070 5 4294901755 196609 4294836225 4294836225 131071 131070 5 4294508542 131068 196605 4294836227 65536 4294836222 131074 4294770687 196606 4294901760 4294836226 196605 4294770687 196605 196610 196605 4294770695 4294901759 4294967293 4294901762 131068 2 4294901760 196606 1 1 +1253095846 18020492015 17502816756 29633592448 15582299230 3472022185 2882462250 2821745624 2156343322 5239984413 1728580184 1249957388 974119843 693747588 681183767 1686863608 2324003278 898510960 603378418 901724313 1727932658 921009907 914360739 990178863 1072537640 3031352176 2129162895 1233642791 2119732905 770415964 212273389 252309707 +65512 2228221 4293853227 4293918788 4281335525 362906 425833 734485 2068250 3421610 2309652 489449 87056 139149 149794 69280 24580 10882 12560 11025 1765 2061 6317 22536 16180 9928 15733 19130 9826 2861 485 52 1233 1754 1412 1297 425 2754 3748 8345 8109 4405 4561 7066 6445 2353 61 3770 6500 1421 784 1517 533 2512 2861 3545 3920 1378 394 81 328 130 1160 653 153 625 1042 180 410 1640 2626 1160 2720 6824 8245 6125 2034 2057 1933 1125 866 346 389 2938 2560 208 233 425 1145 833 802 1210 689 578 904 765 1040 1109 1861 720 265 909 865 1082 2756 3445 3050 2756 2657 1476 1157 565 17 145 505 905 754 482 410 85 234 433 196594 4294901766 196603 65536 1 0 0 0 0 0 0 65536 4294901760 65535 65535 0 1 4294901759 65535 0 65535 2 4294967295 65535 65535 4294901760 4294901760 0 0 65536 4294901762 0 4294836225 4294967293 65536 4294901760 4294967295 65536 4294967295 65537 4294967295 65536 4294901760 65535 1 65535 4294901760 4294901761 65535 65535 4294901761 0 65535 0 4294967294 131071 0 4294901760 0 4294901760 0 65535 65536 4294901760 65534 0 0 65536 4294901760 4294967295 131071 1 4294901760 65535 4294901760 131071 1 4294836224 65535 0 65534 0 4294901761 65534 131071 4294967295 1 65535 1 4294967294 65537 4294901760 4294967295 0 4294901760 131071 4294967295 1 65535 4294901761 4294967295 0 65536 65537 4294901761 4294967295 4294901760 131071 65535 4294901760 4294967295 65536 4294901759 0 0 65534 1 4294967295 4294901761 131071 4294901760 65534 65536 0 0 4294836224 0 4294967295 65536 65535 65536 4294901760 65537 4294901761 0 +2085429281 5625150496 15510778827 15395714078 1623386709 980459207 453238454 112620635 56614482 74049573 156061918 147585074 50684678 11069654 16005802 52633943 81430150 61376528 44882521 24554408 42983192 10304127 10318942 22143459 91918754 43725671 27069905 18803182 22888555 34364500 55093279 15797771 +65484 3014710 4285464461 19267559 11600101 431170 195417 706256 22490 4122530 10239338 5184034 1024937 107161 57325 109162 41050 64436 190593 109765 38746 67493 151589 111442 16909 1394 3578 421 4925 5954 5314 3573 3133 7709 9385 18756 25357 9997 3978 10485 2925 1313 5785 5328 8000 16648 4861 4850 8161 4618 3893 3546 245 3442 2593 1525 3545 233 2557 5780 9530 11764 16829 14642 4645 4106 2250 113 421 2402 14209 9620 2276 15493 7397 1313 4820 1018 2420 10413 18596 14760 5876 9553 8392 2290 1170 3701 7577 4138 442 4122 14416 13714 11681 16105 12281 5081 4490 3501 628 522 1665 7376 6565 6570 7421 3445 641 1076 1949 3042 2930 2320 1073 1025 1448 1224 562 80 122 530 589806 589827 4294901765 4294836224 65536 1 1 0 4294770688 131071 4294836224 65535 4294901759 262143 4294901760 4294901760 65537 4294967294 4294901762 131071 4294967294 0 4294967294 196609 4294967295 4294901762 4294901762 131070 4294901761 131072 4294705153 196606 4294901761 4294836224 4294770688 262140 4294901761 65534 4294901760 4294967294 65535 131071 1 0 0 0 4294901760 4294967295 65533 131072 1 4294967294 65535 65536 4294967294 262143 4294901761 131071 4294967294 131071 4294901762 65536 4294901760 65535 131070 65540 4294836224 4294901760 4294901757 131073 1 4294836223 196607 4294836225 327679 2 4294770689 131071 4294770690 4294967295 65534 4294836226 65534 131072 4294836224 65534 65537 2 65535 4294836222 262143 65535 65536 4294901760 131072 131073 4294770691 65534 65535 65539 4294901758 65536 4294901760 131069 4294836229 65531 131073 4294836226 4294967295 65535 65536 4294770688 65536 4294901758 0 4294967295 65534 131072 4294901762 4294901760 4294901758 0 0 4294967294 131070 0 65535 1 65534 3 65535 65533 3 131070 2 +1740850081 2981677376 11585008796 56586105392 15910137974 1211026959 580256502 932978507 769262079 979396831 392956626 24642819 54221276 56930049 178922553 128969942 58634883 119252662 92626325 45322183 37240801 131661377 150226929 76823258 152767672 151599480 173606723 125902348 249501094 101846214 99105555 43231492 +65489 4293525514 4276748174 66977825 4208918999 1191177 439748 769225 2557801 4869089 11870773 7611154 85385 437417 268066 518977 674285 588965 155844 162112 37085 190250 20933 226629 68405 14705 65889 94441 53906 5338 39673 35066 1625 7121 11405 17485 75428 135994 177794 204818 137677 30629 5353 29717 109840 113570 52361 6305 1073 1213 5994 35458 48877 14578 362 13325 42772 60821 40900 23773 328 11336 24370 76474 85289 45621 11925 689 11338 26981 27685 11713 3845 50881 32144 5121 39461 32058 14216 26234 47240 56205 36293 22706 27145 28577 26221 13229 1802 3700 43049 118269 132037 55850 35258 89365 107953 42881 7218 9125 25556 16938 7589 28205 44210 20610 6464 6266 12413 6148 2080 3392 1202 1124 4040 5256 1145 4660 4808 1717 757 1289 2097144 4293656586 917492 4294639625 65531 196609 65534 4294901763 196607 65536 4294705153 196607 4294901759 4294967295 4294901761 4294967289 262146 4294901762 4294967293 4294967294 262143 196610 4294770687 393215 8 4294705154 4294836219 4294901761 65538 4294901756 65536 4294901761 4294967294 327678 65534 4 65532 196615 4294639614 196607 65538 196606 4294574083 262143 4294836225 131070 4294639619 131072 4294639617 4294967291 4294705156 4294770681 196604 196605 2 131069 262143 3 131068 131075 4294770689 65534 2 4294836224 1 4294901757 196609 0 4294836228 4294705149 4294967295 196603 262146 4294770690 4294901755 458754 4294705152 4294639616 327680 4294901760 4294770690 4294836223 262141 4294770689 65535 4294901757 3 4294967288 327683 4294836221 458753 65538 4294639617 196605 4294836224 393214 196616 4294639612 196611 196607 4294639624 4294901754 65541 4294508543 393212 4294836226 4294901760 65534 4294967294 65534 196606 4294836230 4294836218 327683 4294574082 131068 4294836225 4294901759 196605 65534 196613 4294770688 4294836225 131068 131071 65537 262146 4294574082 65533 196611 4294901761 4294770683 196609 65537 65533 +4571788278 6290626903 21358605837 68525327636 18747695014 2619761923 4394272938 3325827116 1051825374 1060442690 881575734 644303463 372206554 189555136 409476128 1870152222 1040487130 980446183 272211290 355747456 462108104 454320534 828205575 336353353 518949723 647624091 672858076 960496180 1638485836 598408408 303305384 90486984 +65504 4292149211 4289790187 9305247 29754572 1345250 639965 303733 47018 958248 4348898 5324413 1615445 98834 13960 143650 326888 291754 139625 56961 33154 89609 226225 248877 111362 14089 3881 1832 4573 10826 6597 2186 1813 6245 2297 8761 4201 2810 873 8245 17645 2482 2984 13897 17317 8100 7537 5050 305 4626 12136 12325 2600 458 2372 1205 3620 7157 6208 9157 4616 3826 6800 12389 6581 49 3778 4520 725 3229 5732 5440 11833 19546 10057 1025 1508 1156 333 101 265 25 1130 769 1493 3650 2920 585 65 1709 4649 1465 7577 1906 1418 3573 23885 22837 5785 4181 4057 362 7825 8917 794 585 1117 961 3361 1640 962 5050 4517 2813 1301 2372 5018 3845 2225 2372 740 328 720886 131072 65545 4294967292 4294967294 2 65533 65535 2 4294967294 196608 0 4294836224 196607 4294967295 131071 65535 65536 0 4294901761 4294901762 0 4294901759 65534 1 262142 4294967294 131072 4294836225 65536 2 4294967293 131074 4294770689 262142 65535 131073 4294836225 65536 0 4294901759 0 65538 4294901758 4294836224 65533 65536 0 65538 4294836223 131071 4294836224 4294967294 4294901760 65535 4294836225 65535 1 4294967295 4294901759 4294836224 65535 65532 1 131071 1 0 4294836225 131071 4294836223 196607 4294901760 131071 65535 2 0 4294901758 131071 4294901760 65533 0 4294967294 131072 196607 4294770688 327679 4294901761 196606 4294770692 65533 4294836224 1 4294836224 131069 131073 4294836223 196607 196609 4294901759 196606 65537 3 4294901761 4294901760 4294836225 0 4294967293 65536 131071 2 4294967294 4294901761 131071 4294901758 196608 131073 65537 1 4294836225 196608 4294836224 4294901760 0 131071 4294901762 131071 4294836225 65535 131071 1 65534 1 4294901758 65537 0 +5503267445 2278704472 3003615743 28253266350 18289232353 1444729957 1675007714 1800410951 560931463 1573374071 1142161202 50484454 75450249 39374112 62533252 62565644 109446110 152648383 77828314 96499736 54921832 110837628 106147571 72313778 180317388 20848765 29995158 56996012 205350876 148127425 65887511 84286350 +65492 2752531 4291231928 4277403334 25362260 156325 72721 681284 1881370 3282381 2926010 712805 90765 44701 1513 3706 72 8450 29633 29297 43385 72612 33961 545 18989 6562 1994 4581 962 416 1186 1973 1940 109 2753 10305 9544 2560 3754 8929 4801 701 7969 10709 7400 9029 5553 1594 281 745 1322 49 461 37 613 800 3506 4869 3341 1378 914 841 1105 900 313 169 586 1730 980 500 3978 5066 2041 1649 2941 7450 8296 6760 3152 1570 3029 3665 1924 493 73 698 981 1234 1369 697 596 1312 1777 1882 13 4325 10114 3620 130 145 365 1514 2522 2637 1508 477 585 2117 1637 533 52 745 2938 2682 656 180 346 541 881 584 9 277 1179647 4294311941 327678 4294901759 65536 4294901760 131071 131072 0 65535 65536 0 1 4294967295 0 1 4294967295 1 4294901760 131071 4294901761 1 0 4294901760 65535 0 131071 4294901760 0 65536 0 4294901761 65535 65537 4294967295 4294967295 4294901759 65537 4294901760 65535 65535 4294836225 4294967295 131072 4294836225 0 4294967294 65536 4294901760 65535 0 131071 1 65535 0 4294901759 65535 65535 65535 65535 65537 1 4294967294 4294901762 4294901761 65535 65536 4294836225 4294901760 65537 4294836225 65535 0 4294836224 1 4294967295 4294836224 131070 4294901760 65535 65535 65537 4294967295 4294967294 65536 4294901760 4294901760 0 65534 65535 65537 4294901761 4294901761 4294836224 65536 65535 65537 0 4294770689 131071 65536 4294901759 0 65535 4294901760 65536 1 0 4294967294 0 0 4294901760 1 65535 4294901760 4294967295 0 131071 1 65535 4294967295 65535 65537 4294901760 4294901761 4294901760 65536 0 4294967295 0 1 4294901759 65537 65534 0 +635601583 4623946160 14604489076 18054500301 2048793167 192660219 20826450 144947271 361186957 364299556 110576014 33743767 13077541 17638144 71055548 70392990 72599559 108934509 30702299 7492577 33653233 32770768 12689000 33028035 79630490 91012673 28887320 24907378 72498940 43317638 34826267 27743421 +65515 4294901802 8716258 4255252747 58391797 2521593 3985268 3004585 287234 1710761 5340413 3227684 464985 22298 71140 26042 2493 39685 79481 3076 137594 366349 217412 16570 17482 65650 74528 38914 19465 22465 6841 1090 2210 629 15572 24293 3560 4145 6253 26165 33125 14416 6980 44005 56693 14585 232 1226 2041 10085 34481 57881 47853 20200 4033 676 3581 15233 13220 3130 797 58 1945 6273 3978 1076 2512 1972 530 697 14650 16018 1108 4825 9140 16672 49745 59165 24260 9841 8597 2845 485 386 4913 14785 16325 7712 925 1600 1864 405 245 2152 6400 9565 43520 51104 23605 10337 8009 8650 4132 1921 725 961 7165 9364 3785 1345 916 130 1717 2885 4538 1360 289 18 650 697 281 293 589815 655372 4294377473 131069 65535 65534 131072 131070 65537 0 131073 65536 4294901761 4294836224 131071 65536 4294836224 0 65534 65537 65537 4294967295 65536 4294836225 131070 4294901759 4294901760 262143 4294901760 65536 4294836225 0 4294967294 4294901763 4294967295 4294967295 4294901760 65538 65535 4294967295 1 4294967294 65538 4294967293 131069 65538 131072 65537 4294770690 4294901760 4294836224 65534 4294901760 131070 0 65537 4294901759 131072 4294901759 65536 4294836224 262142 4294901761 4294901759 65535 131070 4 65534 65537 4294836224 65534 65535 65539 65535 65535 65537 4294901758 4294967295 65536 4294901761 65535 4294901761 0 0 0 4294967295 65535 65537 4294901760 4294901757 131071 2 4294967295 1 4294901760 65536 4294836226 4294836222 65536 65535 4294901760 4294901760 131072 4294639618 65533 131071 65537 4294967295 4294901758 196607 4294901761 0 196605 4 131069 65537 4294967295 65538 4294836224 65533 1 196607 65535 131075 4294770689 4294901757 131071 4294836224 131071 65534 4294967295 4294901760 131070 65538 65535 +16925999430 18316385014 6765103749 29751869228 9248935726 590656637 196577266 369833100 1036347679 1949477709 436167336 511319619 184477055 37387933 151183436 151931021 299339842 394798275 131554144 534536901 145916020 86481382 52673975 84468035 290579874 505339277 136630608 96451864 413645390 284079029 93838091 44320937 +65485 1638373 16449258 29295977 4159307492 4648957 3621845 3396509 3987157 1709 1186357 1654825 2491380 475753 127917 842180 1047217 386433 2920 130385 60680 99761 38992 110465 40529 20882 189517 250469 88538 14162 56585 33197 6521 9000 53210 99250 40961 14346 12402 218 18989 26401 232 63205 75701 4930 33773 35209 46657 23840 31954 115493 128500 51698 9522 3925 2084 292 17789 39793 23057 9197 22001 58546 41429 1924 19553 51082 55481 31501 8357 127616 239314 139490 64130 84713 173009 81577 706 15674 33577 27389 10525 19013 37349 10372 1373 2749 25397 38216 40402 20432 1709 19984 29156 33637 111861 74761 7921 48100 95561 87665 44497 20381 4589 16708 33482 30581 11009 4874 23809 29525 15973 7925 4946 1801 12301 21097 9490 1369 2405 2017 4294705131 4294246400 1048573 4294705152 196609 196605 131079 196608 4294836223 4294836225 393216 65535 4294901763 2 4294836225 4294901762 4294705149 4294770686 458750 4294836223 4294967291 65537 393213 196606 65541 4294836225 196607 2 4294705153 131070 65537 4294967294 196607 4294705150 131076 393211 4294770689 196610 65537 4294901757 131069 4294836227 393210 65537 458751 4294770692 327677 4294901765 4294836224 65532 4294836224 196603 458752 131078 4294639617 131072 196604 196615 4294574082 65532 4294574084 262141 65537 4294967292 4294836229 4294967295 4294836224 65533 0 3 131068 4294836227 196603 4294836225 196610 65532 131075 4294836226 327678 4294639623 65527 262145 4294836233 4294508539 458747 4294836222 4294836225 327676 196608 4294705151 327683 4294836225 65534 4294705153 196606 4294901757 262145 65537 327679 131076 4294770688 65537 4294836220 393215 7 4294770684 131069 196609 131071 196612 4294770690 131072 4294705156 4294967290 196612 4294705154 131068 65539 4294901763 65538 4294443008 327680 4294770686 65534 327685 4294836220 6 4294901760 4294836222 4294836225 131070 4294901755 196611 4294836225 65533 +22364848819 23019211877 13536398921 7661844332 13153918661 4061423712 6189535018 2626984739 734547992 679343661 640844424 1634139870 653180623 270600318 653081286 231006116 276192993 574796128 531493951 1136056218 257843259 376631657 556672806 985601197 2451927490 1080397176 418835476 516596298 1067991069 1326940241 533782371 317521998 +65486 8388614 4272684919 55246882 4231659841 1045124 493730 400868 2814410 2678266 838141 61538 113972 179129 72337 160850 404721 307962 108289 27781 7685 50617 111725 95345 57545 26165 1732 18496 15122 68858 112370 84565 40570 27157 19130 3226 2313 6329 6525 10282 16400 5809 6521 15857 16605 35330 28360 1658 12170 5009 1285 353 1706 225 1234 820 2209 2941 4505 4250 1604 2642 6730 9122 3656 8993 31005 43588 13544 5202 12164 25325 51866 21605 7610 107773 156320 50005 2705 11709 9065 8420 4925 712 14418 18097 17284 42713 50626 22100 9316 26765 79880 88808 50404 20753 9981 12665 8722 5002 5018 801 169 1480 3208 2825 306 565 1082 1090 1021 4397 4437 1845 801 394 1730 3797 5188 2197 436 936 720875 524293 4294377476 327677 4294836223 65536 2 4294967295 65535 2 4294967293 65538 131070 4294901759 131072 4294836224 4294967294 196607 4294901761 4294901761 131070 4294901761 4294901760 131071 4294901763 65534 4294901761 65535 4294836225 196606 2 4294705150 131072 65535 4294901763 4294901758 196608 4294770690 4294901758 196607 4294967295 0 4294770688 196605 196611 4294901760 4294836226 4294901760 65534 0 4294836225 0 4294901759 4294967294 131072 4294901758 262144 4294770689 196606 2 4294836226 65535 4294836224 65536 1 4294836224 131071 1 4294901760 4294901760 4294967293 262142 4294770691 65534 4294967292 393215 4294901764 65533 4294836228 4294967294 4294836225 65536 4294836223 0 4294967292 262147 4294770686 262145 4294639617 262141 4294836224 65535 65536 65536 4294836224 4294901759 196609 4294836227 4294836222 196606 4294836224 262142 65539 4294836224 65537 4294901760 65537 4294836224 1 4294901758 65536 4294901761 65534 4294901760 131070 65536 65535 4294770689 65535 4294901759 131069 4294967295 131071 65535 327679 4294836226 65534 131071 131071 65538 0 4294901761 65534 131071 2 +4267278578 5284912346 15654245454 7584371455 757541440 958175918 2029693395 1822263793 301143373 735864971 560795502 145468741 729607917 636312274 140085758 92179138 136678205 284556947 160571857 24156465 30303981 63889416 207515641 397539517 932404486 815941330 296130518 837264004 823507287 98236812 45243383 64272200 +65523 4294180972 4266917545 63635349 4267245909 254941 1099237 727885 410384 1666696 2028793 691325 93160 30586 70177 151425 95805 68137 37341 40352 45940 69737 19700 46458 145261 83450 36181 30484 19721 20896 44026 21650 3042 2516 2218 2825 2441 24260 22625 17872 47588 71240 28018 4546 4904 9860 20785 33490 20621 12500 5938 5393 21053 29170 17384 9773 3825 925 1285 2825 1568 229 10660 20593 10421 1681 7289 18013 21402 10370 6290 10804 9857 2018 1625 14900 111816 170213 97970 42536 32456 31265 36602 26573 485 8992 17137 14225 21794 59245 60685 43810 19841 625 12505 26788 11700 2041 1402 7801 11698 3609 6088 10205 11250 17305 15650 6641 8810 12137 4717 533 1105 4861 4660 1874 1417 3145 7421 8125 4849 2777 2621455 4293132291 917496 131079 4294705152 0 4294901761 65534 0 4294901761 4294836224 4294901756 262143 4294770688 196607 4294901762 0 4294901759 131070 65537 4294836224 4294901758 65534 2 4294836221 262143 65538 4294770687 196606 0 0 4294836223 262143 1 4294836223 131072 4294901757 131073 4294901760 1 4294705153 65535 0 4294836227 4294967293 131071 4294901759 65538 4294901757 65540 4294836221 131073 4294901762 65533 4294967295 2 131071 4294967293 65536 65535 131073 65533 65537 3 4294836224 4294967295 4294836222 131072 4294967295 3 65535 4294836223 131072 65536 0 4294901761 131071 4294770688 65535 196609 4294770690 4294901756 196607 1 65535 2 4294836222 65536 65535 65537 1 4294836224 65535 4294901760 65532 4294901760 4294967295 131071 4294967294 65538 1 0 4294836224 4294901761 4294967295 4294901761 131069 1 131071 65536 4294967295 131071 4294836222 262144 4294836224 131072 2 4294901757 65537 2 4294967295 4294836225 4294967295 4294901760 0 4294836225 4294967295 4294901760 4294901760 4294967295 4294901758 65535 4294901762 4294967295 65535 +3363624643 5024467282 5936939269 11928624073 1989590507 500454296 890244102 484960024 408268655 407748116 931223116 387325043 298601083 142573204 50333810 260657043 544829518 182232298 285774374 229077575 165510081 55234109 182984063 235559258 463042189 1572332571 463590384 764055301 369900031 213389719 248484276 110192412 +140 12320523 4236640586 128187598 4176610339 1317985 2016450 1264250 464621 712532 2357329 1583993 300733 100249 101914 12818 188885 386537 340034 58500 6786 38729 31333 45274 60826 10280 1700 9061 15842 5968 3700 20776 15570 10841 17425 27028 40589 35624 15241 18472 47450 21312 12465 60505 27130 17258 20557 28753 31826 31210 36485 46922 47881 29410 10817 10324 21925 27040 17730 4693 505 125 6344 26413 33541 20225 25506 23413 22321 18938 4868 3205 10226 6485 9605 38900 52292 15570 16317 67337 48841 207650 228260 68360 18154 9665 373 6561 4964 10709 39217 24565 25250 18730 10324 23194 22417 15353 10256 3922 6362 9266 2797 1385 2740 1381 5450 3922 5105 4229 41 3793 9800 9189 3770 980 346 1220 2548 545 365 401 2162679 4294049811 65535 4294705147 65536 65536 0 0 4294901758 65536 0 4294836222 196606 65535 131073 65535 196606 65537 65535 4294901760 4294901760 4294967295 196607 4294836223 131071 65538 4294836226 4294967294 4294901763 4294967294 4294836224 4294901761 131071 1 4294705152 131071 4294901760 4294901757 131071 4294836225 131069 1 65537 4294901761 4294967294 65537 4294901762 4294967295 4294901762 4294967291 4294901761 65535 65537 4294770689 196607 4294770688 4294901761 4294836222 65534 1 4294901758 131069 4294901761 131071 4294836225 4294901760 65535 4294967294 131073 4294836225 131069 0 4294901760 65537 4294901759 4294901760 65536 4294901759 65535 4294967295 65536 131071 4294901761 131070 65538 4294836226 131070 65537 65537 4294836225 131070 4294901759 131072 4294836225 65534 4294770689 196606 4294901760 0 4294901761 0 4294770691 4294967294 4294967294 65536 65537 4294836223 131071 4294901760 1 65535 1 4294967295 4294901761 65532 65538 4294901760 65535 65536 4294836225 0 65534 2 65535 65536 65536 4294901762 131071 4294901762 65533 0 4294967295 2 131071 0 +8688907420 8651681087 3882888320 13344712853 4899158051 798739702 1028831102 2539014856 638903122 317718010 384670182 80111143 103317142 167076352 301314820 350854382 391349540 404306210 433994476 574314223 313672804 157484335 377191355 334140619 316134597 1301708648 1659919320 374228018 445302887 163137190 106841473 98757674 +65492 12779732 4236967784 64289852 44631483 3150253 1634485 295506 164896 607130 993221 609613 143537 14624 3389 61250 139345 253170 230977 109793 37300 17173 26338 30032 17649 8649 6929 5165 9962 17370 15172 6280 578 3226 5194 2522 2384 1625 289 1685 7522 17865 17973 9928 7738 5234 6641 5818 1300 1810 193 1597 853 2257 1602 1157 1360 2696 2120 125 425 962 337 100 1768 4489 9578 6948 1602 1300 442 10525 29858 37361 18154 2176 2920 1570 2465 17722 24157 28069 23337 7794 1018 261 666 1381 2426 3626 4717 5746 4253 2533 149 1354 1517 1513 904 733 740 697 1597 2665 1921 365 109 449 802 800 401 904 725 100 149 5 170 388 242 65 81 289 786422 131081 4294836223 0 0 0 65536 0 0 65534 1 4294901760 0 4294901760 4294967295 131071 0 65537 0 1 65536 4294967295 65536 131072 0 65535 65536 4294901760 4294836224 65535 131070 4294901761 65537 4294967295 65536 2 4294967295 65537 4294901760 4294967295 65536 4294770688 65536 4294967294 65537 65535 65537 1 65536 65535 0 4294901760 131071 4294967295 131071 4294901761 65536 4294901759 65534 0 1 4294901761 65534 4294901761 65535 1 65535 4294836225 4294901761 4294901760 4294901759 131071 4294967295 1 4294967295 0 1 65534 4294967294 131071 65535 4294901760 4294967294 4294901760 65534 4294901760 4294901759 65537 4294836224 65535 65535 4294901760 65535 4294901761 4294901759 1 4294967294 65537 0 4294967295 1 65534 65536 4294901760 65535 65536 0 4294901760 0 1 1 65534 0 0 65536 1 0 65535 65534 65537 65536 4294836225 65534 2 65535 2 4294967294 0 4294901762 4294836224 65535 4294901760 4294967295 65535 2 +13209682291 4084147554 2238015712 5991930059 1953586645 196444746 822877259 1778500613 742348323 233802308 177148240 73219483 144849322 53782826 38117743 23906209 159975557 118985794 48876276 20259462 27125220 16381165 50210512 103903521 345857809 239779513 223664576 71409647 46379915 30995031 20584721 8614124 +89 4288414122 4240636810 112460595 4167761534 3391956 785300 170705 1175560 1225480 197321 171344 99661 128160 300170 309521 380485 386917 142388 47450 98305 101713 18517 3338 9850 7925 15125 15944 2925 13450 1114 4597 1301 3233 3754 11840 15850 31409 21709 8017 6760 65 12114 17450 10085 4325 145 4194 19602 25946 7204 10804 19930 7253 1730 4437 5652 2564 3488 3133 7085 11988 13073 14794 19025 18925 14965 11090 12809 8692 565 8692 1418 32657 27152 3677 1525 8066 12097 20485 13505 2818 5445 3625 1093 6800 4706 5050 3509 1225 746 985 1433 1985 5161 21818 39469 20485 1780 653 2105 1033 410 1730 8480 14920 6885 4176 6084 2605 185 793 197 577 1594 512 45 137 218 1737 2213 340 917520 4294901734 4294377491 786433 65529 4294180867 262144 196607 4294836221 196608 4294770689 196606 4294901760 0 4294901760 65537 4294901761 4294967294 0 4294901762 65533 4294967295 3 4294836222 65535 131071 4294836224 4294836224 196606 4294836224 131071 4294705153 196605 2 4294967295 65534 65536 0 4294770690 65534 4294836225 4294901758 0 196604 4294901762 4294967294 131074 4294901760 65534 65537 65536 0 65536 4294901760 65534 131071 131073 4294836223 1 4294967294 131072 4294770688 65537 4294901759 0 65535 0 65538 4294901758 65535 4294901760 4294901760 65533 4294901760 4294901760 4294967294 131070 1 4294901759 65537 4294967295 4294901760 4294901758 196609 4294901759 262144 4294574083 131072 4294901760 1 4294639616 65535 4294967295 262143 4294836224 3 4294967295 65535 1 65534 0 196607 4294770689 196605 4294901763 131072 65537 4294770688 4294901760 0 4294901759 65537 0 65534 65534 3 4294901760 4294967295 4294836226 4294901759 196605 4294705154 65531 65538 4294901757 65537 131071 4294901761 196605 4294901763 4294967295 4294901760 65536 4294967294 3 +11910490572 3216754880 6803640801 2969354749 904981686 1640133658 2731047426 2169674962 760261854 440687837 88060822 121667662 80768056 31045445 125967584 240259259 103206438 119576287 176621902 191961720 77702252 115933401 279500203 189714841 273168341 221844448 115316958 61706937 306214143 136507848 128407848 22071613 +65532 4294311946 1376217 4294967295 327796 31520 60713 84708 77305 90837 63017 28085 18472 15017 17492 28085 35594 41905 50497 47545 40801 31546 30824 36040 39770 39145 34448 26545 19060 12289 8704 6290 4049 2405 1730 1730 1489 769 325 3016 8818 20402 33485 43205 51365 50674 40385 27332 14778 7720 6084 7624 10049 12897 17524 24433 33296 39682 44305 46125 43697 39962 36842 34378 36788 40916 46513 58005 73280 87880 102290 118100 123725 120509 119204 116785 114709 116980 121444 128930 139572 148840 142792 123385 98500 67993 45250 42554 60041 86585 109960 123088 121588 106861 82546 57690 37925 27274 22504 23465 27685 31561 32701 27905 21250 14152 7585 2320 490 585 1681 2448 2329 1924 2329 2833 3485 4018 4469 4304 4349 3833 3276779 4292411403 1638397 4294311935 131074 0 0 0 0 0 0 0 1 65534 4294967295 65535 4294901760 0 4294967295 65535 4294901760 0 65536 0 65535 65535 65535 4294967295 4294901760 4294901761 65535 0 65535 0 0 4294901760 65535 0 0 4294901760 131071 4294967295 0 0 4294901761 65535 0 0 0 65536 0 0 131071 0 65535 0 65535 0 65537 4294901760 4294901760 65535 4294901760 0 0 4294901760 4294967295 65535 65535 4294901760 0 0 4294901760 0 65535 4294901760 4294901760 0 0 4294901760 131071 0 65535 0 0 0 65535 4294901760 4294967295 4294901760 4294836224 0 0 0 65535 4294901760 65535 1 1 0 0 0 0 0 4294967295 65536 65535 4294901760 0 0 4294967295 4294901760 0 0 0 4294967295 0 4294901760 4294901760 4294901760 0 65535 0 0 4294967295 65535 4294901760 65535 65535 4294901761 4294901760 0 65535 0 0 +237434189 499922217 505133994 441834840 148584001 134361612 246902979 375356541 377172109 306513225 380925456 296315571 145325444 52968199 21027784 23929228 276235605 624597590 312575443 156400513 455045323 720856046 732214896 1494389708 2361129006 2679936589 2301322707 2000335101 1637945505 681827461 183147366 82330012 +65534 4294246397 2949096 4292018315 4289199799 270301 246177 423949 1199844 1332864 529121 42925 12106 18853 13061 5218 57700 158233 173925 90625 52100 30056 2009 88922 224605 229725 125033 21658 7281 50404 49892 13540 229 3145 2746 2050 3050 833 15805 58568 107405 153994 203125 209458 191189 184484 159250 112132 53512 16057 18122 37210 37705 9425 3601 39821 90361 127220 116929 86445 59464 49156 49858 60250 68506 60682 77818 150589 289973 405369 401689 330857 301570 317629 276424 180850 102010 87652 131525 180449 221465 251173 266905 264205 218762 125701 31145 14746 100773 229941 302276 282581 253333 219700 153541 93636 30377 4240 3380 18241 35946 50576 79225 95593 82125 60100 40925 23321 15236 15986 14813 9556 3285 82 1213 3026 4325 6781 6602 3236 1553 2834 2424778 2097195 4293066760 65524 196608 0 0 0 0 0 0 65536 4294901760 0 0 65536 0 0 131071 65536 65535 0 4294901760 1 0 0 0 0 0 0 4294901760 65535 65535 65534 0 0 65535 4294901760 0 65536 4294901760 4294901760 131071 0 4294901760 65535 4294967295 65536 0 4294901760 4294901761 0 0 0 65536 1 4294901761 4294967295 4294901760 4294901760 1 4294901760 65535 0 0 4294967295 65535 65535 4294901760 0 65535 0 4294901760 65535 65535 4294901760 0 65535 4294967295 4294901760 0 4294967295 4294901760 4294967295 4294901760 4294901760 1 131071 65535 0 4294967295 0 4294901760 4294967295 0 0 65536 4294967295 0 4294901760 4294901760 4294901760 0 0 65535 65536 4294901760 0 0 65535 1 4294901760 65535 0 0 65535 0 0 1 4294967295 0 65536 4294967295 65535 0 1 4294901761 1 4294901760 0 4294901760 0 1 4294901760 0 +1384809583 3253852150 7271882261 4247132793 165951197 106924444 334793989 1186792065 683710887 328783218 1777824058 859450346 384515125 118491239 29264336 324188526 1925807083 2591551091 1170412774 393317159 1003847215 1375415668 1272433002 5102945913 5527301760 3564304855 4171837220 4252817101 2886468276 1293586339 867722874 137636997 +65494 4293328853 12779479 4294377428 27460202 1450609 1571744 2399120 3698980 3168085 2782625 1297517 88724 7298 90469 138653 8185 176005 148325 45730 33665 22888 106085 262210 133754 5666 42100 42057 77972 59650 72761 51885 16250 2401 27757 32077 52082 144293 207061 140040 40840 16136 27290 16452 2309 4637 3092 13050 22525 3681 1345 15041 18737 35496 23242 1 1789 10865 21170 9050 17680 25540 23525 32841 33813 8570 2385 12961 29594 29672 3893 12997 25930 23040 2056 3229 3233 2258 1352 1405 1460 3330 7018 845 3497 4721 6400 2257 8980 520 14650 29768 20345 8026 6469 20992 46405 43613 25981 26440 31586 14985 317 4765 6560 6068 8992 11065 9530 8434 2210 820 2216 10 2605 4265 2801 584 296 485 145 229 4293591024 1114090 851971 131077 4 4294901762 4294901761 0 4294901760 0 0 4294901760 4294967295 65535 0 4294901760 1 0 4294901760 65536 65536 65535 0 4294967295 0 0 131071 4294901760 4294967295 65537 4294967295 65535 4294901760 0 65537 1 0 4294967295 4294901760 131071 4294901760 4294967295 0 4294967295 4294967295 4294967295 4294901760 0 4294901760 65536 1 65535 0 65535 0 0 0 65535 0 0 131071 4294901760 4294836224 0 4294901760 65536 0 4294967295 65536 0 0 0 4294901760 65535 65535 4294901760 4294967295 4294901760 4294967295 0 131071 65536 0 0 65535 65536 0 0 4294901760 65535 65535 65535 4294901760 65535 4294967294 65536 4294901760 0 65535 0 65535 0 0 0 65535 0 65535 1 0 65535 4294901760 4294901760 1 1 1 65535 0 4294901760 0 4294901760 4294901760 65535 0 65535 0 0 0 65535 0 4294901760 0 0 4294967295 65536 0 +8026452370 15484908668 20499977190 18319213840 3373716595 481122188 708365662 1064222070 447613201 981882246 1216899638 429224583 712942489 343870777 464365911 1708426014 607524825 125260654 157218368 252388115 200501238 298140369 401480944 335145082 254978930 49584394 84879802 240024458 602349628 451255150 178445616 59224662 +0 1441754 16253100 4251517323 4287298578 797573 258997 13666 147272 685210 1166981 1017161 268769 22050 169045 138905 303236 509490 152825 5072 9577 19280 7844 38074 14989 41346 118597 37705 832 18866 18730 15354 39437 60041 50157 29648 16105 44125 105706 81617 77485 34532 7345 3341 10376 19225 20290 15457 1000 2041 3449 6500 11026 6890 4498 11009 9578 3536 25010 9697 13285 13597 1168 12301 245 4889 1813 7450 6472 2645 3368 5473 12296 38501 31904 6625 2293 7225 10585 8980 320 2853 565 11713 28881 19240 3706 6245 10205 1696 685 4765 6100 9866 11930 1850 12722 23258 11125 5330 5050 51265 36853 10777 9736 10709 15368 5300 3737 1210 6660 7625 1493 53 1706 2210 20 2873 4954 2720 1585 1492 4293132304 327661 131080 4294967293 131071 1 0 65536 1 1 0 4294901760 65535 4294967295 65536 4294967295 65536 4294901760 0 65534 4294836224 4294901760 1 4294901760 65535 4294901760 4294901760 0 0 65536 4294967295 1 65535 0 4294901760 4294967295 0 4294967295 0 4294901760 0 1 65534 65536 4294836225 65535 4294901760 131071 0 4294967295 0 0 65535 65537 4294901760 4294901761 65535 1 4294901760 4294901760 65535 0 0 0 4294836224 65534 0 0 4294901761 131071 1 65535 4294967295 0 0 65535 0 4294901761 0 1 0 0 65535 131071 0 4294901760 65535 0 65535 65535 4294836225 0 4294967295 4294901760 1 65535 4294901760 0 0 4294901760 65535 0 65535 65536 4294967295 0 0 131071 0 0 4294901761 0 1 4294901760 4294901760 4294967295 65536 4294967295 4294901761 4294967295 65535 65535 65535 4294901760 0 4294967295 65535 0 131071 1 4294901760 4294967295 65534 65535 0 +2977011107 650914810 2288018320 7520683099 3358846989 875614221 1936947318 2462199032 245275565 162481150 354004652 579022806 174936282 412065585 456282401 851282558 577907117 178246772 129481190 100052269 151783470 208256264 102348220 96250207 348059763 147820204 244018392 148321767 234305056 521714339 203999644 70550417 +65442 2293789 13107004 4288873243 4281990370 629665 809509 929360 1103440 239002 951066 1318733 406016 10369 82413 275609 804506 1568401 1049497 184325 29665 63865 192373 318746 199274 85685 398213 813565 820874 369378 134210 53885 28132 22669 46813 33581 193954 417397 210401 43525 44633 13469 12538 48841 27028 9505 12429 29825 44089 22916 14562 11250 16840 10490 8066 2725 26938 160672 180305 38025 12106 6260 16477 24889 24925 65909 103813 96109 71410 48528 41225 19917 7624 40820 26333 16724 23645 84868 52754 4553 21605 27044 14293 2164 25997 74402 66357 104465 103748 35317 2372 8872 11261 1157 8200 34000 31376 10706 346 19405 65540 115810 60281 49930 74225 12904 6409 34568 28565 1028 7380 18245 14356 6625 229 2500 281 3329 3541 85 733 1157 4294967282 4293984266 4294967281 327681 0 131071 65536 0 0 0 0 65535 4294901760 65537 4294836225 4294967295 65534 65537 65535 4294901760 4294901760 65536 0 0 1 0 65535 131071 4294901761 65535 131073 0 4294967295 0 1 4294901760 131070 65537 4294836224 131072 4294901760 4294836225 4294901760 4294901760 131071 4294967295 4294967295 0 4294836224 65535 4294967294 0 65536 4294901760 4294967294 131072 4294967295 131071 0 4294967295 65535 65537 0 4294901761 65535 65536 65537 0 65536 65537 1 4294836224 1 65534 4294901760 4294901761 131071 4294967294 0 65535 1 4294901760 4294901760 131071 0 4294967295 4294901761 4294967295 4294901760 4294967295 65537 65535 0 4294901760 65537 4294901759 1 0 4294967295 0 0 4294967295 65535 1 0 131071 4294901761 0 4294901760 4294901760 4294967294 65535 1 4294901760 4294901760 4294836224 65534 0 4294967295 131072 65537 4294836224 65535 0 65535 1 0 65536 4294967295 4294901760 4294967295 65535 4294901761 65534 2 +3786032587 5999060498 4394801484 6492352396 4533451529 784295559 4659258635 9314660305 1936272237 1517385845 2149905238 5604540659 4456645633 546039872 1142483135 2620204651 457615013 332270054 387560577 221048132 797480291 1027414868 763837324 1178612421 575232148 728347389 820289944 1067788799 410298531 1347049446 618702844 146953926 +9 4292738965 6750192 32899242 4232970860 1130600 414457 366813 709300 387778 219706 769298 300341 93053 6953 39994 175738 676993 347945 120409 165665 190082 108425 12532 141200 227396 62389 46457 195066 402769 230281 33490 11149 11432 1153 850 63485 225938 152897 52225 57377 42989 4885 13949 40309 29125 17690 11674 28793 58133 41425 21536 16000 28885 66541 54980 17216 128090 223145 124580 41885 18320 19597 39125 47048 25673 9805 6437 5069 28745 37637 13513 9146 12532 9250 16138 43325 31477 2041 64420 84317 49040 69170 74629 89037 207205 288137 304036 152937 163280 278900 181058 54125 5760 5585 20485 21970 13189 7748 965 2329 24128 61568 43528 8665 6253 4346 12893 25040 13325 3466 9469 28085 23049 5330 794 2633 4258 7540 7993 2978 404 262115 589834 2 4294901760 4294967295 65537 65536 0 65536 0 0 0 4294901760 65535 0 4294836224 65536 1 4294901760 65535 4294967295 65536 4294901761 4294967293 65536 4294901759 131071 4294836224 131071 4294901759 65535 65536 65535 4294836224 196607 4294901762 65535 65537 4294836224 0 131072 4294901761 0 0 65537 0 65535 131073 4294901759 131072 4294901760 4294901760 65537 65536 4294901761 4294901761 65536 4294901761 0 4294901760 0 4294901761 4294967295 0 4294967295 4294901761 65535 0 4294901760 4294967295 131072 4294967295 131071 131073 1 65535 4294901760 0 0 65535 1 4294901760 131071 65535 131072 0 0 65536 65536 4294967295 131071 4294901760 65538 65535 0 65534 65535 4294901761 65534 0 1 1 65535 65537 4294901760 65536 65537 0 4294836224 65536 4294901761 65535 65536 4294836225 196607 4294901761 4294967295 0 4294967295 65535 262143 4294901761 0 65534 0 65536 65535 4294901761 65535 65533 2 4294901760 4294836224 0 1 +4332343661 2817941910 3311199167 2817462112 2970535222 510272771 1231608931 3547933821 1553161029 1102581064 1265403206 1116133007 2749984636 483694422 382260155 1583576695 535394932 342644355 439256217 509798231 1169193597 1551288598 518829590 329655443 357735633 866583714 2856188825 4384512991 832189766 537900762 413652286 268099507 +65480 3211290 4282974352 1113491 39649824 337682 60701 118629 332681 173585 297785 684745 298961 47965 77300 247985 515201 527210 220301 58228 15677 36125 221245 124525 199172 186005 12965 19098 35789 49124 33589 23725 13121 14689 49681 130373 241090 190994 66985 11029 9376 2152 29153 10217 6025 31460 23236 29933 13988 965 7844 9898 25385 14810 1753 56205 123509 85433 3041 36488 18369 2689 30848 62197 41485 4745 109 1717 9736 7082 7092 7250 24085 42682 44701 36517 44234 66258 54730 35005 7272 73789 132977 25225 34425 61540 106756 344084 262216 57329 39962 98845 19825 18666 16052 22321 6530 8066 17540 7105 25345 47700 17485 1609 10202 11705 3050 530 1465 2756 4369 4405 1972 4840 2000 610 2250 1480 1961 2125 1685 1970 4294311899 851983 4294705148 65536 2 0 0 65535 0 0 65536 4294901761 4294967294 4294901760 0 65535 65535 65536 4294901760 65536 4294967295 0 4294901760 4294901760 65536 0 4294967295 4294901761 65535 0 4294836224 4294967295 0 4294967295 0 4294967295 131071 4294901760 0 4294901760 0 65536 4294967295 65536 4294901760 65535 65534 0 4294901760 131069 2 65536 4294901762 4294967295 4294967295 65535 65536 4294901760 131071 0 4294901761 0 0 4294901760 4294901760 0 4294967295 0 0 4294967295 65536 4294967295 4294836224 65536 0 0 4294901761 131071 4294901760 0 65535 65538 4294967294 1 4294836224 0 4294967295 0 0 4294901759 131071 1 0 4294901760 65535 65537 0 0 65535 0 4294901760 4294901760 4294901760 65535 0 4294901761 65536 4294901760 4294901760 0 0 4294901760 65534 1 65537 4294967295 65535 0 4294901760 4294967295 65536 4294836224 131071 65537 4294836223 131071 4294901759 4294901760 4294967294 0 4294770688 65535 4294901760 0 65534 +1144257557 893194977 1524172345 2656990700 2712406451 786734909 2822954323 3023929466 527666841 1172738236 1651594914 468094942 413036746 247222724 1385679116 1385735570 176945160 261067824 249261965 205523600 898302221 540325841 482074263 137198138 655537414 1053681721 1905781387 2813846616 519180249 467040454 115316630 70053256 +34 4291690466 917838 4264033800 26935101 141800 466 570089 581810 776945 1786580 920692 74797 133645 4660 19885 80648 396938 106985 14449 132130 446573 392389 714845 2216080 1828741 412090 51637 91621 85145 42653 16025 15913 32810 247122 552773 203636 3250 38993 100708 387413 549664 296162 21658 14152 56250 137233 162370 102394 82105 282685 329818 97973 63233 186386 81497 5965 119197 237137 198760 82490 10917 4570 5353 5485 110260 202786 228218 230845 151786 122138 122266 60932 40225 303400 198356 39274 87541 77930 118408 163108 46489 52205 338689 609986 477937 64250 166906 658010 499613 145637 85005 74836 95300 132625 44228 237133 434180 200485 12337 133709 58984 45693 47417 25325 56225 49202 23341 11393 11268 11050 5498 4453 9050 7154 7225 14473 9657 5017 5200 2977 400 4293066743 2293725 262175 4294705142 196612 0 1 4294901759 131070 196610 4294836225 262143 4294836224 65538 4294836225 131070 4294836225 4294770688 196604 65538 4294901759 131071 65537 4294901761 4294901756 196611 4294901760 4294967292 65539 131071 65535 3 65536 4294901758 4 4294705153 65534 4294901761 4294901760 4294967293 65536 4294901760 4294967295 4294901761 65534 1 65535 4294901760 196605 4294901762 4294901759 196606 4294901761 4294836225 131070 1 4294770689 196606 4294770688 131069 131074 4294770688 131071 0 4294836223 196605 131073 65534 3 65535 65535 4294901761 4294901759 65536 4294967295 131072 4294967295 1 65533 65537 4294967295 196607 4294967295 65535 2 4294836224 196606 4294901762 4294967294 4294901760 196605 2 196607 4294901758 65536 4294901760 0 4294901760 1 65535 196606 2 4294901761 131071 4294901760 4294836225 4294967295 4294836225 65532 65536 65538 4294770686 131071 65538 196607 4294705152 131072 65536 4294967293 65539 0 4294836223 196605 65536 0 1 131071 4294836226 0 65535 4294967294 131073 4294770691 4294967295 65534 +421117418 2726127890 4101702227 9989637695 2589613292 464808813 646168314 1709204951 1193406951 4368908721 15543123258 4558462527 727446184 480111150 3321290976 1432573949 4334614036 1206612778 2062663595 2846829181 1697932639 1917744656 1153653438 3229768059 2571119827 2310793826 6307997684 6761925771 3750077475 2473878001 656529551 229061538 +65451 11468838 4280615122 4283105055 30080683 969970 1168164 192770 344209 1811809 1740724 586130 305045 17981 169273 103321 85277 529901 524660 116609 29653 64345 13121 273956 593748 463432 348169 77545 2117 19277 13837 85 16322 1325 69584 233930 253273 94180 8642 104245 151937 231970 90405 95849 106285 81938 76226 103337 303250 337705 75140 43874 33730 44761 117608 57322 9945 19816 48674 19465 15696 52229 103738 102541 126421 133157 33680 7738 45029 57122 61605 60138 26170 41444 210994 238338 74324 21460 33125 3940 15481 7709 24929 17714 170993 91978 45965 214133 96832 1800 16717 42341 87421 114341 65161 26260 135077 245000 122117 81188 107753 74125 86432 91460 29648 2069 3161 5002 784 1152 1424 277 452 2696 3610 2120 2617 562 1153 4610 2906 65 786450 4293722107 0 393208 2 196608 65539 65537 4294967295 65537 4294901762 65535 4294770689 65533 131073 4294901761 131071 131071 65535 4294901764 4294967295 0 4294901758 4294901761 196607 4294836224 131070 4294901762 4294901759 196608 4294901761 131071 131070 65538 0 4294901761 65536 4294836225 65536 4294836225 4294901757 65535 4294901760 196607 4294967295 0 131071 4294901762 4294836221 196608 65536 1 131074 4294770690 4294967291 196611 4294770687 262143 4294901759 131071 1 4294901759 196607 4294705153 196607 4294967294 65536 1 4294901759 65536 0 65536 65537 4294901759 65536 3 4294836222 65534 65536 65538 4294836225 4294901760 4294901761 4294967293 0 65534 131070 65537 4294901762 65536 2 4294836224 1 4294901760 0 4294967294 131073 4294901760 65535 131073 4294836224 65536 4294901760 4294901760 4294967295 4294901761 131071 4294901760 131071 4294836224 2 4294901758 4294967292 131073 0 65535 0 4294901760 4294967295 131073 4294901764 4294836223 4294967295 65535 65537 65533 3 131072 4294836224 65537 1 4294967293 196611 4294901759 1 +5645104312 3087527471 5883346002 10807122775 2465336182 853935004 1206905130 3485828019 1134726750 832725041 4442875878 2122064365 178483220 151483681 1742660113 1309124116 1954305288 1323857378 2750861165 1340947482 792522630 669257768 1659699572 940652856 1957080469 1034203505 1541805928 1710818326 2432875876 2254719438 275379184 57293224 \ No newline at end of file diff --git a/python/tflite_micro/signal/ops/testdata/framer_test1.txt b/python/tflite_micro/signal/ops/testdata/framer_test1.txt new file mode 100644 index 00000000000..3c814620675 --- /dev/null +++ b/python/tflite_micro/signal/ops/testdata/framer_test1.txt @@ -0,0 +1,556 @@ +400 160 0 +1 1 0 -2 -1 2 2 -1 -3 0 3 3 -2 -2 1 3 1 -3 -2 2 2 -1 -1 1 2 1 -2 0 1 0 -2 -2 2 3 -2 -3 1 4 1 -5 -4 1 3 -1 -3 -1 3 3 -2 -3 1 2 -1 -3 -1 2 2 0 -1 0 0 -1 -1 -1 2 2 -1 -1 0 1 0 -2 -1 2 2 -1 -2 -1 1 2 -1 -2 1 2 0 -2 1 2 -1 -5 -1 17 32 31 18 7 5 9 10 8 11 19 26 23 12 0 -11 -25 -34 -31 -19 -14 -22 -29 -11 24 42 27 -1 -11 2 14 10 4 11 26 32 22 6 -11 -22 -30 -31 -30 -26 -26 -27 -33 -45 -59 -63 -58 -50 -48 -48 -40 -26 -16 -17 -23 -21 -11 0 9 18 28 37 44 55 65 +0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 +0 +66 56 40 22 4 -13 -28 -33 -29 -25 -28 -35 -40 -38 -31 -30 -36 -40 -32 -13 7 18 20 28 45 59 61 54 50 57 72 89 101 97 80 65 65 65 50 29 31 50 53 25 -3 -3 10 8 -13 -23 -15 -10 -18 -22 -18 -22 -39 -45 -27 -7 -11 -16 5 26 1 -50 -59 -15 5 -38 -81 -54 0 -15 -86 -108 -46 -3 -50 -113 -85 -4 7 -57 -77 -6 53 10 -60 -37 44 56 -12 -33 41 99 52 -18 6 77 62 -28 -55 20 73 20 -38 12 95 68 -51 -94 -17 43 -7 -63 -7 88 78 -19 -47 34 84 11 -85 -73 14 47 8 -2 58 99 55 -15 -24 9 1 -52 -70 -20 34 30 -11 -27 -8 -3 -40 -81 -80 -38 4 18 21 35 53 48 16 -13 -16 -5 +0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 +0 +-4 -6 5 29 39 24 11 20 33 17 -27 -57 -55 -41 -36 -36 -24 -5 -4 -26 -53 -67 -69 -77 -79 -56 -8 40 63 66 70 83 88 72 51 53 77 91 84 73 85 108 115 96 80 78 75 53 28 23 34 38 33 32 35 25 -3 -31 -48 -63 -86 -105 -106 -92 -85 -90 -100 -115 -138 -161 -170 -168 -168 -179 -178 -157 -127 -110 -107 -102 -88 -78 -78 -82 -75 -61 -44 -28 -17 -8 2 12 21 22 18 19 28 39 43 44 47 47 40 27 16 12 13 14 10 -4 -29 -48 -48 -31 -19 -17 -19 -12 -5 -12 -27 -31 -21 -4 5 -1 -9 -10 -2 14 32 43 52 60 70 73 66 54 52 69 98 125 142 146 151 164 182 195 203 207 218 238 263 293 315 327 325 320 314 307 +1 1 0 -2 -1 2 2 -1 -3 0 3 3 -2 -2 1 3 1 -3 -2 2 2 -1 -1 1 2 1 -2 0 1 0 -2 -2 2 3 -2 -3 1 4 1 -5 -4 1 3 -1 -3 -1 3 3 -2 -3 1 2 -1 -3 -1 2 2 0 -1 0 0 -1 -1 -1 2 2 -1 -1 0 1 0 -2 -1 2 2 -1 -2 -1 1 2 -1 -2 1 2 0 -2 1 2 -1 -5 -1 17 32 31 18 7 5 9 10 8 11 19 26 23 12 0 -11 -25 -34 -31 -19 -14 -22 -29 -11 24 42 27 -1 -11 2 14 10 4 11 26 32 22 6 -11 -22 -30 -31 -30 -26 -26 -27 -33 -45 -59 -63 -58 -50 -48 -48 -40 -26 -16 -17 -23 -21 -11 0 9 18 28 37 44 55 65 66 56 40 22 4 -13 -28 -33 -29 -25 -28 -35 -40 -38 -31 -30 -36 -40 -32 -13 7 18 20 28 45 59 61 54 50 57 72 89 101 97 80 65 65 65 50 29 31 50 53 25 -3 -3 10 8 -13 -23 -15 -10 -18 -22 -18 -22 -39 -45 -27 -7 -11 -16 5 26 1 -50 -59 -15 5 -38 -81 -54 0 -15 -86 -108 -46 -3 -50 -113 -85 -4 7 -57 -77 -6 53 10 -60 -37 44 56 -12 -33 41 99 52 -18 6 77 62 -28 -55 20 73 20 -38 12 95 68 -51 -94 -17 43 -7 -63 -7 88 78 -19 -47 34 84 11 -85 -73 14 47 8 -2 58 99 55 -15 -24 9 1 -52 -70 -20 34 30 -11 -27 -8 -3 -40 -81 -80 -38 4 18 21 35 53 48 16 -13 -16 -5 -4 -6 5 29 39 24 11 20 33 17 -27 -57 -55 -41 -36 -36 -24 -5 -4 -26 -53 -67 -69 -77 -79 -56 -8 40 63 66 70 83 88 72 51 53 77 91 84 73 85 108 115 96 80 78 75 53 28 23 34 38 33 32 35 25 -3 -31 -48 -63 -86 -105 -106 -92 -85 -90 -100 -115 -138 -161 -170 -168 -168 -179 -178 -157 -127 -110 -107 -102 +1 +292 275 268 270 269 251 226 203 181 157 130 112 103 93 72 46 21 1 -23 -49 -80 -116 -156 -196 -225 -245 -264 -292 -333 -375 -409 -433 -456 -486 -516 -536 -548 -550 -544 -539 -537 -539 -534 -516 -483 -447 -419 -392 -355 -306 -257 -217 -178 -129 -69 -10 43 96 155 220 278 324 357 378 391 401 407 411 411 410 401 384 359 335 313 287 257 229 206 185 163 140 123 109 98 89 87 83 75 64 62 67 63 51 44 55 73 82 79 85 102 116 116 112 116 124 128 123 118 107 91 75 74 74 53 7 -40 -69 -87 -113 -144 -165 -178 -201 -238 -273 -302 -333 -375 -411 -418 -402 -392 -401 -409 -401 -387 -383 -384 -366 -327 -284 -251 -216 -171 -122 -82 -42 8 66 110 133 152 184 226 259 283 304 320 323 +66 56 40 22 4 -13 -28 -33 -29 -25 -28 -35 -40 -38 -31 -30 -36 -40 -32 -13 7 18 20 28 45 59 61 54 50 57 72 89 101 97 80 65 65 65 50 29 31 50 53 25 -3 -3 10 8 -13 -23 -15 -10 -18 -22 -18 -22 -39 -45 -27 -7 -11 -16 5 26 1 -50 -59 -15 5 -38 -81 -54 0 -15 -86 -108 -46 -3 -50 -113 -85 -4 7 -57 -77 -6 53 10 -60 -37 44 56 -12 -33 41 99 52 -18 6 77 62 -28 -55 20 73 20 -38 12 95 68 -51 -94 -17 43 -7 -63 -7 88 78 -19 -47 34 84 11 -85 -73 14 47 8 -2 58 99 55 -15 -24 9 1 -52 -70 -20 34 30 -11 -27 -8 -3 -40 -81 -80 -38 4 18 21 35 53 48 16 -13 -16 -5 -4 -6 5 29 39 24 11 20 33 17 -27 -57 -55 -41 -36 -36 -24 -5 -4 -26 -53 -67 -69 -77 -79 -56 -8 40 63 66 70 83 88 72 51 53 77 91 84 73 85 108 115 96 80 78 75 53 28 23 34 38 33 32 35 25 -3 -31 -48 -63 -86 -105 -106 -92 -85 -90 -100 -115 -138 -161 -170 -168 -168 -179 -178 -157 -127 -110 -107 -102 -88 -78 -78 -82 -75 -61 -44 -28 -17 -8 2 12 21 22 18 19 28 39 43 44 47 47 40 27 16 12 13 14 10 -4 -29 -48 -48 -31 -19 -17 -19 -12 -5 -12 -27 -31 -21 -4 5 -1 -9 -10 -2 14 32 43 52 60 70 73 66 54 52 69 98 125 142 146 151 164 182 195 203 207 218 238 263 293 315 327 325 320 314 307 292 275 268 270 269 251 226 203 181 157 130 112 103 93 72 46 21 1 -23 -49 -80 -116 -156 -196 -225 -245 -264 -292 -333 -375 -409 -433 -456 -486 -516 -536 -548 -550 -544 -539 -537 -539 -534 -516 -483 -447 -419 -392 -355 -306 -257 -217 -178 -129 -69 -10 43 96 155 220 278 324 357 378 391 401 407 411 411 410 401 384 359 335 313 287 257 229 206 185 +1 +310 294 290 294 288 262 225 187 158 130 99 61 22 -11 -34 -46 -58 -81 -115 -149 -171 -179 -180 -185 -189 -179 -155 -135 -130 -127 -107 -68 -31 -14 -6 16 53 87 108 123 150 185 214 226 226 222 224 228 232 233 230 229 235 246 244 224 188 155 124 88 44 5 -20 -36 -59 -96 -139 -180 -215 -251 -290 -328 -357 -371 -379 -381 -383 -385 -382 -374 -361 -345 -326 -304 -283 -263 -239 -195 -134 -75 -34 -2 41 96 140 153 156 180 235 294 328 347 368 396 417 422 422 426 424 415 408 409 407 392 367 335 288 221 148 94 57 15 -42 -101 -156 -210 -256 -270 -263 -289 -364 -428 -424 -390 -405 -458 -456 -375 -321 -360 -405 -345 -228 -190 -232 -205 -58 65 42 -19 55 224 295 223 193 313 440 409 295 294 +-4 -6 5 29 39 24 11 20 33 17 -27 -57 -55 -41 -36 -36 -24 -5 -4 -26 -53 -67 -69 -77 -79 -56 -8 40 63 66 70 83 88 72 51 53 77 91 84 73 85 108 115 96 80 78 75 53 28 23 34 38 33 32 35 25 -3 -31 -48 -63 -86 -105 -106 -92 -85 -90 -100 -115 -138 -161 -170 -168 -168 -179 -178 -157 -127 -110 -107 -102 -88 -78 -78 -82 -75 -61 -44 -28 -17 -8 2 12 21 22 18 19 28 39 43 44 47 47 40 27 16 12 13 14 10 -4 -29 -48 -48 -31 -19 -17 -19 -12 -5 -12 -27 -31 -21 -4 5 -1 -9 -10 -2 14 32 43 52 60 70 73 66 54 52 69 98 125 142 146 151 164 182 195 203 207 218 238 263 293 315 327 325 320 314 307 292 275 268 270 269 251 226 203 181 157 130 112 103 93 72 46 21 1 -23 -49 -80 -116 -156 -196 -225 -245 -264 -292 -333 -375 -409 -433 -456 -486 -516 -536 -548 -550 -544 -539 -537 -539 -534 -516 -483 -447 -419 -392 -355 -306 -257 -217 -178 -129 -69 -10 43 96 155 220 278 324 357 378 391 401 407 411 411 410 401 384 359 335 313 287 257 229 206 185 163 140 123 109 98 89 87 83 75 64 62 67 63 51 44 55 73 82 79 85 102 116 116 112 116 124 128 123 118 107 91 75 74 74 53 7 -40 -69 -87 -113 -144 -165 -178 -201 -238 -273 -302 -333 -375 -411 -418 -402 -392 -401 -409 -401 -387 -383 -384 -366 -327 -284 -251 -216 -171 -122 -82 -42 8 66 110 133 152 184 226 259 283 304 320 323 310 294 290 294 288 262 225 187 158 130 99 61 22 -11 -34 -46 -58 -81 -115 -149 -171 -179 -180 -185 -189 -179 -155 -135 -130 -127 -107 -68 -31 -14 -6 16 53 87 108 123 150 185 214 226 226 222 224 228 232 233 230 229 235 246 244 224 188 155 124 88 44 5 -20 -36 -59 -96 -139 -180 -215 -251 -290 -328 -357 -371 -379 -381 -383 -385 -382 -374 +1 +407 446 345 265 324 414 380 269 243 303 301 190 100 123 158 87 -34 -68 -18 -18 -111 -190 -182 -159 -207 -284 -291 -237 -225 -282 -322 -291 -242 -238 -261 -248 -203 -180 -191 -191 -156 -122 -113 -109 -81 -44 -32 -37 -29 1 28 35 29 31 43 61 88 118 135 129 115 119 139 152 139 111 92 85 80 62 37 25 31 43 39 13 -13 -18 -5 6 6 0 -3 -3 -8 -16 -26 -35 -43 -46 -47 -53 -61 -62 -51 -41 -47 -63 -68 -53 -40 -44 -57 -62 -59 -61 -67 -70 -67 -68 -72 -58 -30 -13 -22 -40 -33 -7 9 3 -4 8 31 46 47 47 48 44 38 37 39 40 42 62 90 109 107 98 103 116 119 113 112 123 137 147 150 145 127 100 77 65 51 30 4 -14 -28 -45 -72 -92 +292 275 268 270 269 251 226 203 181 157 130 112 103 93 72 46 21 1 -23 -49 -80 -116 -156 -196 -225 -245 -264 -292 -333 -375 -409 -433 -456 -486 -516 -536 -548 -550 -544 -539 -537 -539 -534 -516 -483 -447 -419 -392 -355 -306 -257 -217 -178 -129 -69 -10 43 96 155 220 278 324 357 378 391 401 407 411 411 410 401 384 359 335 313 287 257 229 206 185 163 140 123 109 98 89 87 83 75 64 62 67 63 51 44 55 73 82 79 85 102 116 116 112 116 124 128 123 118 107 91 75 74 74 53 7 -40 -69 -87 -113 -144 -165 -178 -201 -238 -273 -302 -333 -375 -411 -418 -402 -392 -401 -409 -401 -387 -383 -384 -366 -327 -284 -251 -216 -171 -122 -82 -42 8 66 110 133 152 184 226 259 283 304 320 323 310 294 290 294 288 262 225 187 158 130 99 61 22 -11 -34 -46 -58 -81 -115 -149 -171 -179 -180 -185 -189 -179 -155 -135 -130 -127 -107 -68 -31 -14 -6 16 53 87 108 123 150 185 214 226 226 222 224 228 232 233 230 229 235 246 244 224 188 155 124 88 44 5 -20 -36 -59 -96 -139 -180 -215 -251 -290 -328 -357 -371 -379 -381 -383 -385 -382 -374 -361 -345 -326 -304 -283 -263 -239 -195 -134 -75 -34 -2 41 96 140 153 156 180 235 294 328 347 368 396 417 422 422 426 424 415 408 409 407 392 367 335 288 221 148 94 57 15 -42 -101 -156 -210 -256 -270 -263 -289 -364 -428 -424 -390 -405 -458 -456 -375 -321 -360 -405 -345 -228 -190 -232 -205 -58 65 42 -19 55 224 295 223 193 313 440 409 295 294 407 446 345 265 324 414 380 269 243 303 301 190 100 123 158 87 -34 -68 -18 -18 -111 -190 -182 -159 -207 -284 -291 -237 -225 -282 -322 -291 -242 -238 -261 -248 -203 -180 -191 -191 -156 -122 -113 -109 -81 -44 -32 -37 -29 1 28 35 29 31 43 61 88 118 135 129 115 119 139 152 139 111 92 85 80 62 37 25 31 43 39 13 -13 -18 -5 6 +1 +-102 -114 -137 -160 -169 -161 -154 -158 -168 -172 -167 -160 -157 -157 -154 -148 -133 -119 -113 -120 -131 -138 -132 -118 -102 -89 -77 -63 -48 -40 -51 -67 -76 -66 -44 -20 5 35 61 74 74 80 107 142 164 165 161 164 172 179 182 185 194 201 202 197 182 157 127 101 90 89 91 92 99 115 120 94 34 -32 -78 -101 -110 -110 -83 -26 42 86 93 68 22 -36 -99 -139 -130 -73 13 98 165 206 223 205 151 71 1 -28 -7 42 101 175 269 354 375 311 197 85 -1 -67 -111 -110 -55 27 97 138 149 126 55 -59 -178 -261 -295 -294 -267 -215 -145 -77 -39 -46 -92 -167 -251 -324 -368 -373 -344 -284 -203 -116 -46 -12 -17 -39 -67 -97 -129 -151 -143 -103 -39 32 92 132 142 126 95 59 14 -43 -97 -129 +310 294 290 294 288 262 225 187 158 130 99 61 22 -11 -34 -46 -58 -81 -115 -149 -171 -179 -180 -185 -189 -179 -155 -135 -130 -127 -107 -68 -31 -14 -6 16 53 87 108 123 150 185 214 226 226 222 224 228 232 233 230 229 235 246 244 224 188 155 124 88 44 5 -20 -36 -59 -96 -139 -180 -215 -251 -290 -328 -357 -371 -379 -381 -383 -385 -382 -374 -361 -345 -326 -304 -283 -263 -239 -195 -134 -75 -34 -2 41 96 140 153 156 180 235 294 328 347 368 396 417 422 422 426 424 415 408 409 407 392 367 335 288 221 148 94 57 15 -42 -101 -156 -210 -256 -270 -263 -289 -364 -428 -424 -390 -405 -458 -456 -375 -321 -360 -405 -345 -228 -190 -232 -205 -58 65 42 -19 55 224 295 223 193 313 440 409 295 294 407 446 345 265 324 414 380 269 243 303 301 190 100 123 158 87 -34 -68 -18 -18 -111 -190 -182 -159 -207 -284 -291 -237 -225 -282 -322 -291 -242 -238 -261 -248 -203 -180 -191 -191 -156 -122 -113 -109 -81 -44 -32 -37 -29 1 28 35 29 31 43 61 88 118 135 129 115 119 139 152 139 111 92 85 80 62 37 25 31 43 39 13 -13 -18 -5 6 6 0 -3 -3 -8 -16 -26 -35 -43 -46 -47 -53 -61 -62 -51 -41 -47 -63 -68 -53 -40 -44 -57 -62 -59 -61 -67 -70 -67 -68 -72 -58 -30 -13 -22 -40 -33 -7 9 3 -4 8 31 46 47 47 48 44 38 37 39 40 42 62 90 109 107 98 103 116 119 113 112 123 137 147 150 145 127 100 77 65 51 30 4 -14 -28 -45 -72 -92 -102 -114 -137 -160 -169 -161 -154 -158 -168 -172 -167 -160 -157 -157 -154 -148 -133 -119 -113 -120 -131 -138 -132 -118 -102 -89 -77 -63 -48 -40 -51 -67 -76 -66 -44 -20 5 35 61 74 74 80 107 142 164 165 161 164 172 179 182 185 194 201 202 197 182 157 127 101 90 89 91 92 99 115 120 94 34 -32 -78 -101 -110 -110 -83 -26 42 86 93 68 +1 +-129 -106 -73 -38 -12 -8 -33 -79 -124 -158 -181 -198 -205 -184 -130 -57 11 58 80 77 53 13 -29 -56 -58 -35 5 57 118 182 229 242 216 168 116 77 54 47 61 92 135 174 198 197 174 138 99 56 12 -20 -25 -2 36 75 119 156 171 143 88 43 25 17 4 -4 22 79 135 158 151 136 121 105 76 45 22 17 33 62 88 97 90 69 40 4 -33 -67 -97 -128 -156 -166 -149 -118 -101 -112 -147 -185 -214 -232 -242 -248 -253 -246 -226 -196 -165 -150 -150 -155 -152 -141 -124 -110 -95 -78 -53 -34 -27 -33 -35 -25 -11 -8 -11 -10 0 13 21 27 42 64 79 80 72 66 69 82 96 105 107 105 108 116 124 126 122 117 116 116 111 101 94 95 97 84 53 20 7 12 12 0 +407 446 345 265 324 414 380 269 243 303 301 190 100 123 158 87 -34 -68 -18 -18 -111 -190 -182 -159 -207 -284 -291 -237 -225 -282 -322 -291 -242 -238 -261 -248 -203 -180 -191 -191 -156 -122 -113 -109 -81 -44 -32 -37 -29 1 28 35 29 31 43 61 88 118 135 129 115 119 139 152 139 111 92 85 80 62 37 25 31 43 39 13 -13 -18 -5 6 6 0 -3 -3 -8 -16 -26 -35 -43 -46 -47 -53 -61 -62 -51 -41 -47 -63 -68 -53 -40 -44 -57 -62 -59 -61 -67 -70 -67 -68 -72 -58 -30 -13 -22 -40 -33 -7 9 3 -4 8 31 46 47 47 48 44 38 37 39 40 42 62 90 109 107 98 103 116 119 113 112 123 137 147 150 145 127 100 77 65 51 30 4 -14 -28 -45 -72 -92 -102 -114 -137 -160 -169 -161 -154 -158 -168 -172 -167 -160 -157 -157 -154 -148 -133 -119 -113 -120 -131 -138 -132 -118 -102 -89 -77 -63 -48 -40 -51 -67 -76 -66 -44 -20 5 35 61 74 74 80 107 142 164 165 161 164 172 179 182 185 194 201 202 197 182 157 127 101 90 89 91 92 99 115 120 94 34 -32 -78 -101 -110 -110 -83 -26 42 86 93 68 22 -36 -99 -139 -130 -73 13 98 165 206 223 205 151 71 1 -28 -7 42 101 175 269 354 375 311 197 85 -1 -67 -111 -110 -55 27 97 138 149 126 55 -59 -178 -261 -295 -294 -267 -215 -145 -77 -39 -46 -92 -167 -251 -324 -368 -373 -344 -284 -203 -116 -46 -12 -17 -39 -67 -97 -129 -151 -143 -103 -39 32 92 132 142 126 95 59 14 -43 -97 -129 -129 -106 -73 -38 -12 -8 -33 -79 -124 -158 -181 -198 -205 -184 -130 -57 11 58 80 77 53 13 -29 -56 -58 -35 5 57 118 182 229 242 216 168 116 77 54 47 61 92 135 174 198 197 174 138 99 56 12 -20 -25 -2 36 75 119 156 171 143 88 43 25 17 4 -4 22 79 135 158 151 136 121 105 76 45 22 17 33 62 88 97 +1 +-14 -19 -22 -44 -81 -100 -85 -55 -39 -43 -42 -24 -6 -8 -27 -42 -38 -22 -5 2 0 -3 1 16 28 34 28 19 10 -3 -16 -21 -8 16 35 46 53 66 79 82 75 72 79 90 96 95 97 102 102 90 65 38 11 -14 -35 -50 -57 -60 -67 -77 -85 -94 -106 -118 -128 -132 -130 -125 -115 -94 -60 -26 -2 7 8 12 22 31 42 58 86 122 154 174 184 190 192 187 178 174 178 183 180 175 171 171 162 141 112 85 53 9 -40 -80 -103 -117 -136 -162 -187 -203 -220 -244 -271 -292 -295 -286 -270 -257 -256 -266 -273 -263 -237 -212 -196 -186 -166 -133 -97 -63 -31 -2 22 39 58 79 98 109 113 119 126 130 131 136 149 163 164 148 122 97 83 78 80 80 82 92 110 122 117 96 +-102 -114 -137 -160 -169 -161 -154 -158 -168 -172 -167 -160 -157 -157 -154 -148 -133 -119 -113 -120 -131 -138 -132 -118 -102 -89 -77 -63 -48 -40 -51 -67 -76 -66 -44 -20 5 35 61 74 74 80 107 142 164 165 161 164 172 179 182 185 194 201 202 197 182 157 127 101 90 89 91 92 99 115 120 94 34 -32 -78 -101 -110 -110 -83 -26 42 86 93 68 22 -36 -99 -139 -130 -73 13 98 165 206 223 205 151 71 1 -28 -7 42 101 175 269 354 375 311 197 85 -1 -67 -111 -110 -55 27 97 138 149 126 55 -59 -178 -261 -295 -294 -267 -215 -145 -77 -39 -46 -92 -167 -251 -324 -368 -373 -344 -284 -203 -116 -46 -12 -17 -39 -67 -97 -129 -151 -143 -103 -39 32 92 132 142 126 95 59 14 -43 -97 -129 -129 -106 -73 -38 -12 -8 -33 -79 -124 -158 -181 -198 -205 -184 -130 -57 11 58 80 77 53 13 -29 -56 -58 -35 5 57 118 182 229 242 216 168 116 77 54 47 61 92 135 174 198 197 174 138 99 56 12 -20 -25 -2 36 75 119 156 171 143 88 43 25 17 4 -4 22 79 135 158 151 136 121 105 76 45 22 17 33 62 88 97 90 69 40 4 -33 -67 -97 -128 -156 -166 -149 -118 -101 -112 -147 -185 -214 -232 -242 -248 -253 -246 -226 -196 -165 -150 -150 -155 -152 -141 -124 -110 -95 -78 -53 -34 -27 -33 -35 -25 -11 -8 -11 -10 0 13 21 27 42 64 79 80 72 66 69 82 96 105 107 105 108 116 124 126 122 117 116 116 111 101 94 95 97 84 53 20 7 12 12 0 -14 -19 -22 -44 -81 -100 -85 -55 -39 -43 -42 -24 -6 -8 -27 -42 -38 -22 -5 2 0 -3 1 16 28 34 28 19 10 -3 -16 -21 -8 16 35 46 53 66 79 82 75 72 79 90 96 95 97 102 102 90 65 38 11 -14 -35 -50 -57 -60 -67 -77 -85 -94 -106 -118 -128 -132 -130 -125 -115 -94 -60 -26 -2 7 8 12 22 31 42 58 +1 +67 35 6 -21 -38 -42 -36 -31 -34 -42 -55 -72 -93 -125 -155 -169 -158 -132 -112 -104 -94 -75 -52 -37 -27 -15 4 25 45 69 98 127 150 169 186 205 223 239 248 245 228 205 188 178 170 160 148 133 112 88 68 51 29 0 -23 -31 -29 -30 -42 -57 -71 -90 -123 -163 -195 -217 -234 -250 -265 -275 -282 -290 -291 -286 -276 -271 -268 -262 -251 -241 -238 -230 -208 -171 -133 -97 -62 -24 10 37 58 80 102 119 134 149 171 198 224 244 250 248 242 231 220 205 188 168 145 123 109 101 94 77 53 27 3 -21 -44 -63 -71 -72 -74 -78 -83 -87 -96 -114 -139 -162 -178 -186 -186 -178 -164 -147 -133 -119 -102 -80 -57 -40 -26 -6 26 65 100 119 125 130 135 134 129 127 131 134 137 141 152 167 +-129 -106 -73 -38 -12 -8 -33 -79 -124 -158 -181 -198 -205 -184 -130 -57 11 58 80 77 53 13 -29 -56 -58 -35 5 57 118 182 229 242 216 168 116 77 54 47 61 92 135 174 198 197 174 138 99 56 12 -20 -25 -2 36 75 119 156 171 143 88 43 25 17 4 -4 22 79 135 158 151 136 121 105 76 45 22 17 33 62 88 97 90 69 40 4 -33 -67 -97 -128 -156 -166 -149 -118 -101 -112 -147 -185 -214 -232 -242 -248 -253 -246 -226 -196 -165 -150 -150 -155 -152 -141 -124 -110 -95 -78 -53 -34 -27 -33 -35 -25 -11 -8 -11 -10 0 13 21 27 42 64 79 80 72 66 69 82 96 105 107 105 108 116 124 126 122 117 116 116 111 101 94 95 97 84 53 20 7 12 12 0 -14 -19 -22 -44 -81 -100 -85 -55 -39 -43 -42 -24 -6 -8 -27 -42 -38 -22 -5 2 0 -3 1 16 28 34 28 19 10 -3 -16 -21 -8 16 35 46 53 66 79 82 75 72 79 90 96 95 97 102 102 90 65 38 11 -14 -35 -50 -57 -60 -67 -77 -85 -94 -106 -118 -128 -132 -130 -125 -115 -94 -60 -26 -2 7 8 12 22 31 42 58 86 122 154 174 184 190 192 187 178 174 178 183 180 175 171 171 162 141 112 85 53 9 -40 -80 -103 -117 -136 -162 -187 -203 -220 -244 -271 -292 -295 -286 -270 -257 -256 -266 -273 -263 -237 -212 -196 -186 -166 -133 -97 -63 -31 -2 22 39 58 79 98 109 113 119 126 130 131 136 149 163 164 148 122 97 83 78 80 80 82 92 110 122 117 96 67 35 6 -21 -38 -42 -36 -31 -34 -42 -55 -72 -93 -125 -155 -169 -158 -132 -112 -104 -94 -75 -52 -37 -27 -15 4 25 45 69 98 127 150 169 186 205 223 239 248 245 228 205 188 178 170 160 148 133 112 88 68 51 29 0 -23 -31 -29 -30 -42 -57 -71 -90 -123 -163 -195 -217 -234 -250 -265 -275 -282 -290 -291 -286 -276 -271 -268 -262 -251 -241 +1 +173 171 165 161 156 154 161 178 192 192 183 177 175 169 155 138 125 113 97 83 80 80 69 43 9 -18 -28 -31 -33 -42 -57 -72 -79 -83 -87 -95 -108 -119 -123 -117 -105 -97 -98 -106 -109 -103 -93 -87 -85 -84 -85 -92 -100 -104 -103 -102 -106 -110 -114 -119 -125 -128 -119 -104 -92 -86 -82 -69 -51 -37 -31 -27 -21 -7 14 37 51 50 41 36 37 32 8 -21 -37 -33 -29 -42 -65 -83 -89 -98 -124 -158 -184 -197 -205 -212 -213 -205 -191 -177 -166 -149 -133 -116 -96 -69 -39 -12 9 31 61 95 127 153 172 180 180 175 173 179 193 211 220 219 216 218 231 247 257 261 260 260 263 275 291 300 291 268 244 230 221 206 185 165 153 148 134 109 77 52 39 31 15 -2 -13 -19 -26 -44 +-14 -19 -22 -44 -81 -100 -85 -55 -39 -43 -42 -24 -6 -8 -27 -42 -38 -22 -5 2 0 -3 1 16 28 34 28 19 10 -3 -16 -21 -8 16 35 46 53 66 79 82 75 72 79 90 96 95 97 102 102 90 65 38 11 -14 -35 -50 -57 -60 -67 -77 -85 -94 -106 -118 -128 -132 -130 -125 -115 -94 -60 -26 -2 7 8 12 22 31 42 58 86 122 154 174 184 190 192 187 178 174 178 183 180 175 171 171 162 141 112 85 53 9 -40 -80 -103 -117 -136 -162 -187 -203 -220 -244 -271 -292 -295 -286 -270 -257 -256 -266 -273 -263 -237 -212 -196 -186 -166 -133 -97 -63 -31 -2 22 39 58 79 98 109 113 119 126 130 131 136 149 163 164 148 122 97 83 78 80 80 82 92 110 122 117 96 67 35 6 -21 -38 -42 -36 -31 -34 -42 -55 -72 -93 -125 -155 -169 -158 -132 -112 -104 -94 -75 -52 -37 -27 -15 4 25 45 69 98 127 150 169 186 205 223 239 248 245 228 205 188 178 170 160 148 133 112 88 68 51 29 0 -23 -31 -29 -30 -42 -57 -71 -90 -123 -163 -195 -217 -234 -250 -265 -275 -282 -290 -291 -286 -276 -271 -268 -262 -251 -241 -238 -230 -208 -171 -133 -97 -62 -24 10 37 58 80 102 119 134 149 171 198 224 244 250 248 242 231 220 205 188 168 145 123 109 101 94 77 53 27 3 -21 -44 -63 -71 -72 -74 -78 -83 -87 -96 -114 -139 -162 -178 -186 -186 -178 -164 -147 -133 -119 -102 -80 -57 -40 -26 -6 26 65 100 119 125 130 135 134 129 127 131 134 137 141 152 167 173 171 165 161 156 154 161 178 192 192 183 177 175 169 155 138 125 113 97 83 80 80 69 43 9 -18 -28 -31 -33 -42 -57 -72 -79 -83 -87 -95 -108 -119 -123 -117 -105 -97 -98 -106 -109 -103 -93 -87 -85 -84 -85 -92 -100 -104 -103 -102 -106 -110 -114 -119 -125 -128 -119 -104 -92 -86 -82 -69 -51 -37 -31 -27 -21 -7 14 37 51 50 41 36 +1 +-61 -67 -67 -77 -95 -111 -121 -133 -152 -171 -176 -174 -177 -190 -208 -220 -227 -228 -226 -227 -242 -264 -277 -268 -243 -223 -208 -191 -161 -128 -97 -78 -63 -46 -24 5 33 59 79 94 102 104 102 101 98 86 59 30 7 -8 -24 -43 -58 -72 -96 -134 -169 -186 -188 -193 -208 -223 -230 -227 -222 -210 -193 -180 -180 -190 -190 -177 -160 -143 -120 -81 -37 -8 2 11 30 51 65 72 86 114 149 179 201 218 231 239 240 237 243 260 283 302 308 304 296 286 269 242 208 179 159 147 134 114 90 69 52 34 9 -18 -37 -45 -50 -59 -71 -76 -71 -59 -46 -39 -40 -45 -47 -41 -28 -12 5 24 40 51 58 67 80 95 113 129 137 131 118 110 115 123 120 110 101 99 95 87 78 67 53 28 -5 -34 +67 35 6 -21 -38 -42 -36 -31 -34 -42 -55 -72 -93 -125 -155 -169 -158 -132 -112 -104 -94 -75 -52 -37 -27 -15 4 25 45 69 98 127 150 169 186 205 223 239 248 245 228 205 188 178 170 160 148 133 112 88 68 51 29 0 -23 -31 -29 -30 -42 -57 -71 -90 -123 -163 -195 -217 -234 -250 -265 -275 -282 -290 -291 -286 -276 -271 -268 -262 -251 -241 -238 -230 -208 -171 -133 -97 -62 -24 10 37 58 80 102 119 134 149 171 198 224 244 250 248 242 231 220 205 188 168 145 123 109 101 94 77 53 27 3 -21 -44 -63 -71 -72 -74 -78 -83 -87 -96 -114 -139 -162 -178 -186 -186 -178 -164 -147 -133 -119 -102 -80 -57 -40 -26 -6 26 65 100 119 125 130 135 134 129 127 131 134 137 141 152 167 173 171 165 161 156 154 161 178 192 192 183 177 175 169 155 138 125 113 97 83 80 80 69 43 9 -18 -28 -31 -33 -42 -57 -72 -79 -83 -87 -95 -108 -119 -123 -117 -105 -97 -98 -106 -109 -103 -93 -87 -85 -84 -85 -92 -100 -104 -103 -102 -106 -110 -114 -119 -125 -128 -119 -104 -92 -86 -82 -69 -51 -37 -31 -27 -21 -7 14 37 51 50 41 36 37 32 8 -21 -37 -33 -29 -42 -65 -83 -89 -98 -124 -158 -184 -197 -205 -212 -213 -205 -191 -177 -166 -149 -133 -116 -96 -69 -39 -12 9 31 61 95 127 153 172 180 180 175 173 179 193 211 220 219 216 218 231 247 257 261 260 260 263 275 291 300 291 268 244 230 221 206 185 165 153 148 134 109 77 52 39 31 15 -2 -13 -19 -26 -44 -61 -67 -67 -77 -95 -111 -121 -133 -152 -171 -176 -174 -177 -190 -208 -220 -227 -228 -226 -227 -242 -264 -277 -268 -243 -223 -208 -191 -161 -128 -97 -78 -63 -46 -24 5 33 59 79 94 102 104 102 101 98 86 59 30 7 -8 -24 -43 -58 -72 -96 -134 -169 -186 -188 -193 -208 -223 -230 -227 -222 -210 -193 -180 -180 -190 -190 -177 -160 -143 -120 -81 -37 -8 2 11 +1 +-55 -71 -91 -118 -145 -165 -180 -196 -220 -249 -275 -293 -310 -327 -342 -344 -332 -311 -285 -264 -250 -235 -211 -175 -131 -88 -46 -4 33 60 79 98 126 153 170 173 170 166 162 155 146 132 111 91 74 59 42 29 27 29 24 3 -18 -25 -18 -12 -13 -17 -19 -21 -25 -30 -35 -41 -47 -49 -44 -34 -29 -26 -19 -12 -12 -20 -24 -13 4 18 19 17 18 25 29 29 28 39 58 74 77 78 88 109 124 126 123 129 142 154 156 147 137 126 115 103 89 72 59 52 51 49 41 27 10 -10 -29 -44 -52 -57 -62 -62 -56 -50 -52 -61 -66 -61 -52 -45 -34 -14 5 6 -8 -16 -6 14 29 39 52 74 96 109 110 112 117 123 121 112 106 109 117 116 92 55 17 -11 -36 -66 -97 -131 +173 171 165 161 156 154 161 178 192 192 183 177 175 169 155 138 125 113 97 83 80 80 69 43 9 -18 -28 -31 -33 -42 -57 -72 -79 -83 -87 -95 -108 -119 -123 -117 -105 -97 -98 -106 -109 -103 -93 -87 -85 -84 -85 -92 -100 -104 -103 -102 -106 -110 -114 -119 -125 -128 -119 -104 -92 -86 -82 -69 -51 -37 -31 -27 -21 -7 14 37 51 50 41 36 37 32 8 -21 -37 -33 -29 -42 -65 -83 -89 -98 -124 -158 -184 -197 -205 -212 -213 -205 -191 -177 -166 -149 -133 -116 -96 -69 -39 -12 9 31 61 95 127 153 172 180 180 175 173 179 193 211 220 219 216 218 231 247 257 261 260 260 263 275 291 300 291 268 244 230 221 206 185 165 153 148 134 109 77 52 39 31 15 -2 -13 -19 -26 -44 -61 -67 -67 -77 -95 -111 -121 -133 -152 -171 -176 -174 -177 -190 -208 -220 -227 -228 -226 -227 -242 -264 -277 -268 -243 -223 -208 -191 -161 -128 -97 -78 -63 -46 -24 5 33 59 79 94 102 104 102 101 98 86 59 30 7 -8 -24 -43 -58 -72 -96 -134 -169 -186 -188 -193 -208 -223 -230 -227 -222 -210 -193 -180 -180 -190 -190 -177 -160 -143 -120 -81 -37 -8 2 11 30 51 65 72 86 114 149 179 201 218 231 239 240 237 243 260 283 302 308 304 296 286 269 242 208 179 159 147 134 114 90 69 52 34 9 -18 -37 -45 -50 -59 -71 -76 -71 -59 -46 -39 -40 -45 -47 -41 -28 -12 5 24 40 51 58 67 80 95 113 129 137 131 118 110 115 123 120 110 101 99 95 87 78 67 53 28 -5 -34 -55 -71 -91 -118 -145 -165 -180 -196 -220 -249 -275 -293 -310 -327 -342 -344 -332 -311 -285 -264 -250 -235 -211 -175 -131 -88 -46 -4 33 60 79 98 126 153 170 173 170 166 162 155 146 132 111 91 74 59 42 29 27 29 24 3 -18 -25 -18 -12 -13 -17 -19 -21 -25 -30 -35 -41 -47 -49 -44 -34 -29 -26 -19 -12 -12 -20 -24 -13 4 18 19 17 +1 +-165 -199 -225 -244 -259 -276 -289 -290 -283 -275 -273 -268 -253 -226 -194 -166 -144 -127 -110 -90 -67 -45 -25 -6 6 11 12 15 24 28 23 19 29 51 71 79 77 78 80 78 63 44 27 12 -5 -23 -33 -33 -27 -28 -41 -55 -61 -58 -56 -58 -61 -56 -44 -29 -14 -5 -7 -12 -10 1 12 20 32 54 77 89 95 114 151 182 182 165 152 164 188 208 215 205 188 174 170 165 150 119 90 70 61 53 41 29 19 12 3 -7 -20 -31 -38 -40 -40 -38 -29 -12 4 5 -13 -38 -60 -77 -88 -96 -99 -102 -106 -113 -117 -120 -120 -117 -112 -107 -103 -89 -63 -37 -25 -24 -22 -9 4 6 1 6 25 44 49 42 37 38 39 34 30 32 41 45 43 40 36 31 24 10 -9 -31 -50 -60 +-61 -67 -67 -77 -95 -111 -121 -133 -152 -171 -176 -174 -177 -190 -208 -220 -227 -228 -226 -227 -242 -264 -277 -268 -243 -223 -208 -191 -161 -128 -97 -78 -63 -46 -24 5 33 59 79 94 102 104 102 101 98 86 59 30 7 -8 -24 -43 -58 -72 -96 -134 -169 -186 -188 -193 -208 -223 -230 -227 -222 -210 -193 -180 -180 -190 -190 -177 -160 -143 -120 -81 -37 -8 2 11 30 51 65 72 86 114 149 179 201 218 231 239 240 237 243 260 283 302 308 304 296 286 269 242 208 179 159 147 134 114 90 69 52 34 9 -18 -37 -45 -50 -59 -71 -76 -71 -59 -46 -39 -40 -45 -47 -41 -28 -12 5 24 40 51 58 67 80 95 113 129 137 131 118 110 115 123 120 110 101 99 95 87 78 67 53 28 -5 -34 -55 -71 -91 -118 -145 -165 -180 -196 -220 -249 -275 -293 -310 -327 -342 -344 -332 -311 -285 -264 -250 -235 -211 -175 -131 -88 -46 -4 33 60 79 98 126 153 170 173 170 166 162 155 146 132 111 91 74 59 42 29 27 29 24 3 -18 -25 -18 -12 -13 -17 -19 -21 -25 -30 -35 -41 -47 -49 -44 -34 -29 -26 -19 -12 -12 -20 -24 -13 4 18 19 17 18 25 29 29 28 39 58 74 77 78 88 109 124 126 123 129 142 154 156 147 137 126 115 103 89 72 59 52 51 49 41 27 10 -10 -29 -44 -52 -57 -62 -62 -56 -50 -52 -61 -66 -61 -52 -45 -34 -14 5 6 -8 -16 -6 14 29 39 52 74 96 109 110 112 117 123 121 112 106 109 117 116 92 55 17 -11 -36 -66 -97 -131 -165 -199 -225 -244 -259 -276 -289 -290 -283 -275 -273 -268 -253 -226 -194 -166 -144 -127 -110 -90 -67 -45 -25 -6 6 11 12 15 24 28 23 19 29 51 71 79 77 78 80 78 63 44 27 12 -5 -23 -33 -33 -27 -28 -41 -55 -61 -58 -56 -58 -61 -56 -44 -29 -14 -5 -7 -12 -10 1 12 20 32 54 77 89 95 114 151 182 182 165 152 164 +1 +-60 -53 -46 -40 -40 -44 -49 -50 -42 -27 -10 11 31 52 65 72 79 93 113 131 138 132 116 100 94 98 98 85 64 48 45 48 50 53 59 63 58 50 49 59 70 69 59 54 56 59 50 33 19 17 17 13 -2 -13 -13 0 7 1 -12 -22 -29 -37 -41 -41 -42 -54 -80 -107 -123 -130 -138 -147 -152 -155 -159 -167 -177 -190 -200 -204 -191 -163 -126 -92 -66 -53 -54 -60 -56 -37 -21 -25 -48 -71 -87 -99 -104 -96 -79 -68 -67 -62 -34 6 34 51 72 105 131 141 145 157 167 160 139 118 105 86 60 36 18 -2 -35 -75 -95 -86 -67 -57 -61 -61 -51 -34 -30 -35 -34 -13 14 24 12 -2 -10 -16 -31 -43 -34 -4 18 18 3 -1 11 25 30 35 45 64 86 101 105 97 +-55 -71 -91 -118 -145 -165 -180 -196 -220 -249 -275 -293 -310 -327 -342 -344 -332 -311 -285 -264 -250 -235 -211 -175 -131 -88 -46 -4 33 60 79 98 126 153 170 173 170 166 162 155 146 132 111 91 74 59 42 29 27 29 24 3 -18 -25 -18 -12 -13 -17 -19 -21 -25 -30 -35 -41 -47 -49 -44 -34 -29 -26 -19 -12 -12 -20 -24 -13 4 18 19 17 18 25 29 29 28 39 58 74 77 78 88 109 124 126 123 129 142 154 156 147 137 126 115 103 89 72 59 52 51 49 41 27 10 -10 -29 -44 -52 -57 -62 -62 -56 -50 -52 -61 -66 -61 -52 -45 -34 -14 5 6 -8 -16 -6 14 29 39 52 74 96 109 110 112 117 123 121 112 106 109 117 116 92 55 17 -11 -36 -66 -97 -131 -165 -199 -225 -244 -259 -276 -289 -290 -283 -275 -273 -268 -253 -226 -194 -166 -144 -127 -110 -90 -67 -45 -25 -6 6 11 12 15 24 28 23 19 29 51 71 79 77 78 80 78 63 44 27 12 -5 -23 -33 -33 -27 -28 -41 -55 -61 -58 -56 -58 -61 -56 -44 -29 -14 -5 -7 -12 -10 1 12 20 32 54 77 89 95 114 151 182 182 165 152 164 188 208 215 205 188 174 170 165 150 119 90 70 61 53 41 29 19 12 3 -7 -20 -31 -38 -40 -40 -38 -29 -12 4 5 -13 -38 -60 -77 -88 -96 -99 -102 -106 -113 -117 -120 -120 -117 -112 -107 -103 -89 -63 -37 -25 -24 -22 -9 4 6 1 6 25 44 49 42 37 38 39 34 30 32 41 45 43 40 36 31 24 10 -9 -31 -50 -60 -60 -53 -46 -40 -40 -44 -49 -50 -42 -27 -10 11 31 52 65 72 79 93 113 131 138 132 116 100 94 98 98 85 64 48 45 48 50 53 59 63 58 50 49 59 70 69 59 54 56 59 50 33 19 17 17 13 -2 -13 -13 0 7 1 -12 -22 -29 -37 -41 -41 -42 -54 -80 -107 -123 -130 -138 -147 -152 -155 -159 -167 -177 -190 -200 -204 +1 +78 55 39 27 19 18 24 33 38 38 40 52 71 87 84 61 31 21 46 85 98 72 32 5 -9 -24 -50 -70 -75 -77 -86 -92 -85 -71 -68 -78 -83 -72 -51 -32 -15 8 42 75 101 118 136 156 171 172 154 122 92 66 38 -1 -39 -58 -65 -72 -91 -116 -132 -130 -115 -96 -83 -82 -92 -96 -84 -61 -43 -37 -36 -36 -35 -36 -35 -30 -27 -29 -26 -10 10 27 35 37 44 54 68 74 72 68 80 104 121 119 107 109 123 120 91 54 36 34 27 16 13 12 -10 -53 -92 -104 -104 -112 -123 -121 -107 -103 -111 -115 -106 -95 -95 -92 -70 -32 -2 14 27 53 87 118 138 150 154 155 160 168 168 156 141 136 137 127 109 105 123 145 143 121 97 88 82 70 54 41 35 28 +-165 -199 -225 -244 -259 -276 -289 -290 -283 -275 -273 -268 -253 -226 -194 -166 -144 -127 -110 -90 -67 -45 -25 -6 6 11 12 15 24 28 23 19 29 51 71 79 77 78 80 78 63 44 27 12 -5 -23 -33 -33 -27 -28 -41 -55 -61 -58 -56 -58 -61 -56 -44 -29 -14 -5 -7 -12 -10 1 12 20 32 54 77 89 95 114 151 182 182 165 152 164 188 208 215 205 188 174 170 165 150 119 90 70 61 53 41 29 19 12 3 -7 -20 -31 -38 -40 -40 -38 -29 -12 4 5 -13 -38 -60 -77 -88 -96 -99 -102 -106 -113 -117 -120 -120 -117 -112 -107 -103 -89 -63 -37 -25 -24 -22 -9 4 6 1 6 25 44 49 42 37 38 39 34 30 32 41 45 43 40 36 31 24 10 -9 -31 -50 -60 -60 -53 -46 -40 -40 -44 -49 -50 -42 -27 -10 11 31 52 65 72 79 93 113 131 138 132 116 100 94 98 98 85 64 48 45 48 50 53 59 63 58 50 49 59 70 69 59 54 56 59 50 33 19 17 17 13 -2 -13 -13 0 7 1 -12 -22 -29 -37 -41 -41 -42 -54 -80 -107 -123 -130 -138 -147 -152 -155 -159 -167 -177 -190 -200 -204 -191 -163 -126 -92 -66 -53 -54 -60 -56 -37 -21 -25 -48 -71 -87 -99 -104 -96 -79 -68 -67 -62 -34 6 34 51 72 105 131 141 145 157 167 160 139 118 105 86 60 36 18 -2 -35 -75 -95 -86 -67 -57 -61 -61 -51 -34 -30 -35 -34 -13 14 24 12 -2 -10 -16 -31 -43 -34 -4 18 18 3 -1 11 25 30 35 45 64 86 101 105 97 78 55 39 27 19 18 24 33 38 38 40 52 71 87 84 61 31 21 46 85 98 72 32 5 -9 -24 -50 -70 -75 -77 -86 -92 -85 -71 -68 -78 -83 -72 -51 -32 -15 8 42 75 101 118 136 156 171 172 154 122 92 66 38 -1 -39 -58 -65 -72 -91 -116 -132 -130 -115 -96 -83 -82 -92 -96 -84 -61 -43 -37 -36 -36 -35 -36 -35 -30 +1 +20 6 -20 -48 -67 -65 -53 -52 -67 -92 -118 -134 -134 -117 -101 -111 -142 -167 -167 -156 -163 -185 -202 -205 -205 -219 -231 -221 -195 -178 -180 -183 -167 -135 -109 -101 -99 -95 -88 -81 -74 -64 -53 -46 -43 -37 -23 -3 15 27 31 29 24 29 50 76 87 79 65 55 50 37 12 -13 -32 -44 -50 -52 -50 -47 -50 -55 -63 -72 -78 -72 -51 -24 -7 3 22 60 104 147 183 218 251 275 290 305 323 334 333 330 330 327 306 267 235 227 232 217 171 122 96 94 94 78 50 25 8 -7 -29 -58 -94 -123 -139 -142 -150 -165 -179 -174 -157 -153 -168 -183 -172 -145 -133 -143 -156 -150 -127 -109 -109 -124 -142 -149 -140 -119 -102 -98 -103 -98 -77 -53 -37 -29 -14 7 22 20 3 -21 -42 -56 -66 -78 -99 -126 +-60 -53 -46 -40 -40 -44 -49 -50 -42 -27 -10 11 31 52 65 72 79 93 113 131 138 132 116 100 94 98 98 85 64 48 45 48 50 53 59 63 58 50 49 59 70 69 59 54 56 59 50 33 19 17 17 13 -2 -13 -13 0 7 1 -12 -22 -29 -37 -41 -41 -42 -54 -80 -107 -123 -130 -138 -147 -152 -155 -159 -167 -177 -190 -200 -204 -191 -163 -126 -92 -66 -53 -54 -60 -56 -37 -21 -25 -48 -71 -87 -99 -104 -96 -79 -68 -67 -62 -34 6 34 51 72 105 131 141 145 157 167 160 139 118 105 86 60 36 18 -2 -35 -75 -95 -86 -67 -57 -61 -61 -51 -34 -30 -35 -34 -13 14 24 12 -2 -10 -16 -31 -43 -34 -4 18 18 3 -1 11 25 30 35 45 64 86 101 105 97 78 55 39 27 19 18 24 33 38 38 40 52 71 87 84 61 31 21 46 85 98 72 32 5 -9 -24 -50 -70 -75 -77 -86 -92 -85 -71 -68 -78 -83 -72 -51 -32 -15 8 42 75 101 118 136 156 171 172 154 122 92 66 38 -1 -39 -58 -65 -72 -91 -116 -132 -130 -115 -96 -83 -82 -92 -96 -84 -61 -43 -37 -36 -36 -35 -36 -35 -30 -27 -29 -26 -10 10 27 35 37 44 54 68 74 72 68 80 104 121 119 107 109 123 120 91 54 36 34 27 16 13 12 -10 -53 -92 -104 -104 -112 -123 -121 -107 -103 -111 -115 -106 -95 -95 -92 -70 -32 -2 14 27 53 87 118 138 150 154 155 160 168 168 156 141 136 137 127 109 105 123 145 143 121 97 88 82 70 54 41 35 28 20 6 -20 -48 -67 -65 -53 -52 -67 -92 -118 -134 -134 -117 -101 -111 -142 -167 -167 -156 -163 -185 -202 -205 -205 -219 -231 -221 -195 -178 -180 -183 -167 -135 -109 -101 -99 -95 -88 -81 -74 -64 -53 -46 -43 -37 -23 -3 15 27 31 29 24 29 50 76 87 79 65 55 50 37 12 -13 -32 -44 -50 -52 -50 -47 -50 -55 -63 -72 -78 -72 -51 -24 -7 3 +1 +-145 -155 -161 -174 -184 -182 -170 -160 -151 -128 -80 -21 26 62 102 149 182 188 182 187 211 241 260 262 250 232 215 202 188 160 130 115 126 147 165 174 177 173 153 125 106 110 119 117 100 82 74 69 58 40 19 -2 -19 -28 -26 -14 -6 -9 -28 -51 -68 -71 -71 -73 -77 -76 -77 -89 -109 -117 -105 -82 -68 -59 -39 -3 29 38 33 35 53 74 85 82 68 45 15 -16 -44 -60 -59 -40 -22 -33 -75 -114 -111 -68 -25 -21 -53 -79 -75 -51 -30 -19 -12 -8 -15 -39 -70 -88 -85 -70 -58 -54 -47 -34 -20 -14 -17 -16 -7 4 0 -18 -37 -41 -28 -14 -15 -30 -46 -50 -52 -67 -96 -116 -107 -78 -57 -57 -64 -50 -17 6 5 -11 -12 11 41 60 73 86 99 105 102 104 112 123 +78 55 39 27 19 18 24 33 38 38 40 52 71 87 84 61 31 21 46 85 98 72 32 5 -9 -24 -50 -70 -75 -77 -86 -92 -85 -71 -68 -78 -83 -72 -51 -32 -15 8 42 75 101 118 136 156 171 172 154 122 92 66 38 -1 -39 -58 -65 -72 -91 -116 -132 -130 -115 -96 -83 -82 -92 -96 -84 -61 -43 -37 -36 -36 -35 -36 -35 -30 -27 -29 -26 -10 10 27 35 37 44 54 68 74 72 68 80 104 121 119 107 109 123 120 91 54 36 34 27 16 13 12 -10 -53 -92 -104 -104 -112 -123 -121 -107 -103 -111 -115 -106 -95 -95 -92 -70 -32 -2 14 27 53 87 118 138 150 154 155 160 168 168 156 141 136 137 127 109 105 123 145 143 121 97 88 82 70 54 41 35 28 20 6 -20 -48 -67 -65 -53 -52 -67 -92 -118 -134 -134 -117 -101 -111 -142 -167 -167 -156 -163 -185 -202 -205 -205 -219 -231 -221 -195 -178 -180 -183 -167 -135 -109 -101 -99 -95 -88 -81 -74 -64 -53 -46 -43 -37 -23 -3 15 27 31 29 24 29 50 76 87 79 65 55 50 37 12 -13 -32 -44 -50 -52 -50 -47 -50 -55 -63 -72 -78 -72 -51 -24 -7 3 22 60 104 147 183 218 251 275 290 305 323 334 333 330 330 327 306 267 235 227 232 217 171 122 96 94 94 78 50 25 8 -7 -29 -58 -94 -123 -139 -142 -150 -165 -179 -174 -157 -153 -168 -183 -172 -145 -133 -143 -156 -150 -127 -109 -109 -124 -142 -149 -140 -119 -102 -98 -103 -98 -77 -53 -37 -29 -14 7 22 20 3 -21 -42 -56 -66 -78 -99 -126 -145 -155 -161 -174 -184 -182 -170 -160 -151 -128 -80 -21 26 62 102 149 182 188 182 187 211 241 260 262 250 232 215 202 188 160 130 115 126 147 165 174 177 173 153 125 106 110 119 117 100 82 74 69 58 40 19 -2 -19 -28 -26 -14 -6 -9 -28 -51 -68 -71 -71 -73 -77 -76 -77 -89 -109 -117 -105 -82 -68 -59 -39 -3 29 38 33 35 +1 +134 151 182 214 226 217 201 193 193 190 179 166 148 128 116 115 108 77 26 -16 -37 -57 -101 -153 -183 -182 -184 -213 -266 -312 -338 -347 -355 -376 -409 -444 -466 -470 -467 -466 -462 -446 -420 -402 -402 -400 -379 -332 -273 -212 -155 -100 -41 17 68 109 141 177 228 297 376 446 495 517 527 540 558 570 561 533 496 455 419 406 425 459 465 417 330 246 195 169 146 121 111 141 207 281 326 339 332 325 330 369 466 631 838 1042 1189 1237 1166 997 786 588 427 307 245 245 255 179 -43 -381 -758 -1120 -1444 -1697 -1824 -1795 -1648 -1472 -1353 -1346 -1473 -1705 -1974 -2197 -2324 -2327 -2201 -1968 -1694 -1471 -1367 -1394 -1520 -1694 -1859 -1944 -1888 -1672 -1323 -887 -423 -2 317 505 601 676 790 965 1194 1466 1772 2092 2397 2650 2809 2828 2681 2413 +20 6 -20 -48 -67 -65 -53 -52 -67 -92 -118 -134 -134 -117 -101 -111 -142 -167 -167 -156 -163 -185 -202 -205 -205 -219 -231 -221 -195 -178 -180 -183 -167 -135 -109 -101 -99 -95 -88 -81 -74 -64 -53 -46 -43 -37 -23 -3 15 27 31 29 24 29 50 76 87 79 65 55 50 37 12 -13 -32 -44 -50 -52 -50 -47 -50 -55 -63 -72 -78 -72 -51 -24 -7 3 22 60 104 147 183 218 251 275 290 305 323 334 333 330 330 327 306 267 235 227 232 217 171 122 96 94 94 78 50 25 8 -7 -29 -58 -94 -123 -139 -142 -150 -165 -179 -174 -157 -153 -168 -183 -172 -145 -133 -143 -156 -150 -127 -109 -109 -124 -142 -149 -140 -119 -102 -98 -103 -98 -77 -53 -37 -29 -14 7 22 20 3 -21 -42 -56 -66 -78 -99 -126 -145 -155 -161 -174 -184 -182 -170 -160 -151 -128 -80 -21 26 62 102 149 182 188 182 187 211 241 260 262 250 232 215 202 188 160 130 115 126 147 165 174 177 173 153 125 106 110 119 117 100 82 74 69 58 40 19 -2 -19 -28 -26 -14 -6 -9 -28 -51 -68 -71 -71 -73 -77 -76 -77 -89 -109 -117 -105 -82 -68 -59 -39 -3 29 38 33 35 53 74 85 82 68 45 15 -16 -44 -60 -59 -40 -22 -33 -75 -114 -111 -68 -25 -21 -53 -79 -75 -51 -30 -19 -12 -8 -15 -39 -70 -88 -85 -70 -58 -54 -47 -34 -20 -14 -17 -16 -7 4 0 -18 -37 -41 -28 -14 -15 -30 -46 -50 -52 -67 -96 -116 -107 -78 -57 -57 -64 -50 -17 6 5 -11 -12 11 41 60 73 86 99 105 102 104 112 123 134 151 182 214 226 217 201 193 193 190 179 166 148 128 116 115 108 77 26 -16 -37 -57 -101 -153 -183 -182 -184 -213 -266 -312 -338 -347 -355 -376 -409 -444 -466 -470 -467 -466 -462 -446 -420 -402 -402 -400 -379 -332 -273 -212 -155 -100 -41 17 68 109 141 177 228 297 376 446 495 517 527 540 558 570 561 533 496 455 419 406 425 459 465 417 330 246 +1 +2118 1889 1759 1707 1700 1719 1737 1724 1660 1552 1407 1216 959 655 374 196 167 277 455 604 637 539 357 162 4 -96 -128 -86 51 326 748 1243 1625 1710 1457 981 468 49 -204 -264 -154 22 121 54 -181 -540 -977 -1445 -1868 -2171 -2312 -2282 -2082 -1757 -1433 -1275 -1366 -1649 -1992 -2306 -2560 -2727 -2781 -2736 -2668 -2629 -2607 -2566 -2533 -2567 -2663 -2720 -2642 -2415 -2090 -1675 -1151 -543 48 544 953 1334 1694 2006 2272 2539 2817 3058 3229 3361 3491 3569 3490 3224 2845 2439 2031 1625 1270 1006 790 533 239 -1 -149 -285 -486 -708 -859 -941 -1026 -1089 -991 -665 -241 100 327 542 774 928 942 896 894 930 936 940 1027 1183 1253 1121 839 523 188 -206 -598 -824 -800 -613 -380 -94 307 761 1095 1216 1210 1193 1179 1136 1121 1235 1459 1636 1649 1528 1342 1055 590 0 -538 -921 +-145 -155 -161 -174 -184 -182 -170 -160 -151 -128 -80 -21 26 62 102 149 182 188 182 187 211 241 260 262 250 232 215 202 188 160 130 115 126 147 165 174 177 173 153 125 106 110 119 117 100 82 74 69 58 40 19 -2 -19 -28 -26 -14 -6 -9 -28 -51 -68 -71 -71 -73 -77 -76 -77 -89 -109 -117 -105 -82 -68 -59 -39 -3 29 38 33 35 53 74 85 82 68 45 15 -16 -44 -60 -59 -40 -22 -33 -75 -114 -111 -68 -25 -21 -53 -79 -75 -51 -30 -19 -12 -8 -15 -39 -70 -88 -85 -70 -58 -54 -47 -34 -20 -14 -17 -16 -7 4 0 -18 -37 -41 -28 -14 -15 -30 -46 -50 -52 -67 -96 -116 -107 -78 -57 -57 -64 -50 -17 6 5 -11 -12 11 41 60 73 86 99 105 102 104 112 123 134 151 182 214 226 217 201 193 193 190 179 166 148 128 116 115 108 77 26 -16 -37 -57 -101 -153 -183 -182 -184 -213 -266 -312 -338 -347 -355 -376 -409 -444 -466 -470 -467 -466 -462 -446 -420 -402 -402 -400 -379 -332 -273 -212 -155 -100 -41 17 68 109 141 177 228 297 376 446 495 517 527 540 558 570 561 533 496 455 419 406 425 459 465 417 330 246 195 169 146 121 111 141 207 281 326 339 332 325 330 369 466 631 838 1042 1189 1237 1166 997 786 588 427 307 245 245 255 179 -43 -381 -758 -1120 -1444 -1697 -1824 -1795 -1648 -1472 -1353 -1346 -1473 -1705 -1974 -2197 -2324 -2327 -2201 -1968 -1694 -1471 -1367 -1394 -1520 -1694 -1859 -1944 -1888 -1672 -1323 -887 -423 -2 317 505 601 676 790 965 1194 1466 1772 2092 2397 2650 2809 2828 2681 2413 2118 1889 1759 1707 1700 1719 1737 1724 1660 1552 1407 1216 959 655 374 196 167 277 455 604 637 539 357 162 4 -96 -128 -86 51 326 748 1243 1625 1710 1457 981 468 49 -204 -264 -154 22 121 54 -181 -540 -977 -1445 -1868 -2171 -2312 -2282 -2082 -1757 -1433 -1275 -1366 -1649 -1992 -2306 -2560 -2727 -2781 -2736 -2668 -2629 -2607 -2566 -2533 -2567 -2663 -2720 -2642 -2415 -2090 -1675 -1151 -543 48 544 +1 +-1220 -1548 -1883 -2139 -2328 -2575 -2953 -3408 -3864 -4327 -4846 -5401 -5910 -6317 -6593 -6674 -6489 -6060 -5506 -4908 -4215 -3362 -2416 -1519 -698 174 1197 2299 3321 4217 5060 5889 6609 7082 7236 7085 6671 6054 5322 4576 3872 3233 2651 2093 1494 819 103 -574 -1195 -1810 -2432 -2948 -3221 -3197 -2943 -2551 -2117 -1754 -1532 -1386 -1146 -721 -203 249 607 962 1346 1673 1884 2046 2236 2395 2384 2186 1915 1665 1404 1071 693 334 -10 -367 -682 -860 -900 -919 -982 -995 -829 -479 -44 421 919 1401 1755 1951 2107 2347 2617 2755 2703 2557 2397 2173 1805 1331 858 409 -88 -652 -1176 -1581 -1928 -2332 -2782 -3169 -3481 -3858 -4405 -5023 -5532 -5928 -6391 -7018 -7640 -7981 -7938 -7626 -7174 -6584 -5809 -4873 -3855 -2809 -1748 -674 441 1651 2959 4239 5342 6279 7189 8094 8780 9006 8795 8395 7957 7408 6650 5771 4959 4273 3604 2855 2048 1238 407 -489 +134 151 182 214 226 217 201 193 193 190 179 166 148 128 116 115 108 77 26 -16 -37 -57 -101 -153 -183 -182 -184 -213 -266 -312 -338 -347 -355 -376 -409 -444 -466 -470 -467 -466 -462 -446 -420 -402 -402 -400 -379 -332 -273 -212 -155 -100 -41 17 68 109 141 177 228 297 376 446 495 517 527 540 558 570 561 533 496 455 419 406 425 459 465 417 330 246 195 169 146 121 111 141 207 281 326 339 332 325 330 369 466 631 838 1042 1189 1237 1166 997 786 588 427 307 245 245 255 179 -43 -381 -758 -1120 -1444 -1697 -1824 -1795 -1648 -1472 -1353 -1346 -1473 -1705 -1974 -2197 -2324 -2327 -2201 -1968 -1694 -1471 -1367 -1394 -1520 -1694 -1859 -1944 -1888 -1672 -1323 -887 -423 -2 317 505 601 676 790 965 1194 1466 1772 2092 2397 2650 2809 2828 2681 2413 2118 1889 1759 1707 1700 1719 1737 1724 1660 1552 1407 1216 959 655 374 196 167 277 455 604 637 539 357 162 4 -96 -128 -86 51 326 748 1243 1625 1710 1457 981 468 49 -204 -264 -154 22 121 54 -181 -540 -977 -1445 -1868 -2171 -2312 -2282 -2082 -1757 -1433 -1275 -1366 -1649 -1992 -2306 -2560 -2727 -2781 -2736 -2668 -2629 -2607 -2566 -2533 -2567 -2663 -2720 -2642 -2415 -2090 -1675 -1151 -543 48 544 953 1334 1694 2006 2272 2539 2817 3058 3229 3361 3491 3569 3490 3224 2845 2439 2031 1625 1270 1006 790 533 239 -1 -149 -285 -486 -708 -859 -941 -1026 -1089 -991 -665 -241 100 327 542 774 928 942 896 894 930 936 940 1027 1183 1253 1121 839 523 188 -206 -598 -824 -800 -613 -380 -94 307 761 1095 1216 1210 1193 1179 1136 1121 1235 1459 1636 1649 1528 1342 1055 590 0 -538 -921 -1220 -1548 -1883 -2139 -2328 -2575 -2953 -3408 -3864 -4327 -4846 -5401 -5910 -6317 -6593 -6674 -6489 -6060 -5506 -4908 -4215 -3362 -2416 -1519 -698 174 1197 2299 3321 4217 5060 5889 6609 7082 7236 7085 6671 6054 5322 4576 3872 3233 2651 2093 1494 819 103 -574 -1195 -1810 -2432 -2948 -3221 -3197 -2943 -2551 -2117 -1754 -1532 -1386 -1146 -721 -203 249 607 962 1346 1673 1884 2046 2236 2395 2384 2186 1915 1665 1404 1071 693 334 +1 +-1415 -2272 -2992 -3557 -3927 -4034 -3872 -3552 -3205 -2866 -2476 -2006 -1533 -1153 -860 -544 -123 364 811 1160 1435 1658 1798 1817 1729 1577 1358 1028 597 155 -222 -550 -899 -1263 -1540 -1648 -1627 -1584 -1560 -1485 -1283 -949 -533 -39 563 1250 1914 2453 2869 3230 3546 3754 3821 3796 3724 3562 3262 2888 2542 2192 1692 1002 297 -243 -672 -1167 -1743 -2250 -2666 -3195 -3985 -4867 -5573 -6127 -6826 -7773 -8674 -9163 -9212 -9063 -8831 -8388 -7623 -6647 -5644 -4664 -3618 -2438 -1109 368 1928 3425 4744 5967 7262 8567 9534 9897 9808 9637 9502 9175 8470 7553 6730 6052 5287 4268 3085 1898 718 -526 -1778 -2887 -3812 -4634 -5351 -5785 -5777 -5441 -5069 -4840 -4683 -4439 -4047 -3534 -2915 -2178 -1358 -532 266 1055 1813 2438 2845 3052 3149 3189 3155 3035 2873 2718 2563 2353 2046 1637 1141 601 82 -341 -636 -830 -935 -905 -673 -257 202 544 727 857 +2118 1889 1759 1707 1700 1719 1737 1724 1660 1552 1407 1216 959 655 374 196 167 277 455 604 637 539 357 162 4 -96 -128 -86 51 326 748 1243 1625 1710 1457 981 468 49 -204 -264 -154 22 121 54 -181 -540 -977 -1445 -1868 -2171 -2312 -2282 -2082 -1757 -1433 -1275 -1366 -1649 -1992 -2306 -2560 -2727 -2781 -2736 -2668 -2629 -2607 -2566 -2533 -2567 -2663 -2720 -2642 -2415 -2090 -1675 -1151 -543 48 544 953 1334 1694 2006 2272 2539 2817 3058 3229 3361 3491 3569 3490 3224 2845 2439 2031 1625 1270 1006 790 533 239 -1 -149 -285 -486 -708 -859 -941 -1026 -1089 -991 -665 -241 100 327 542 774 928 942 896 894 930 936 940 1027 1183 1253 1121 839 523 188 -206 -598 -824 -800 -613 -380 -94 307 761 1095 1216 1210 1193 1179 1136 1121 1235 1459 1636 1649 1528 1342 1055 590 0 -538 -921 -1220 -1548 -1883 -2139 -2328 -2575 -2953 -3408 -3864 -4327 -4846 -5401 -5910 -6317 -6593 -6674 -6489 -6060 -5506 -4908 -4215 -3362 -2416 -1519 -698 174 1197 2299 3321 4217 5060 5889 6609 7082 7236 7085 6671 6054 5322 4576 3872 3233 2651 2093 1494 819 103 -574 -1195 -1810 -2432 -2948 -3221 -3197 -2943 -2551 -2117 -1754 -1532 -1386 -1146 -721 -203 249 607 962 1346 1673 1884 2046 2236 2395 2384 2186 1915 1665 1404 1071 693 334 -10 -367 -682 -860 -900 -919 -982 -995 -829 -479 -44 421 919 1401 1755 1951 2107 2347 2617 2755 2703 2557 2397 2173 1805 1331 858 409 -88 -652 -1176 -1581 -1928 -2332 -2782 -3169 -3481 -3858 -4405 -5023 -5532 -5928 -6391 -7018 -7640 -7981 -7938 -7626 -7174 -6584 -5809 -4873 -3855 -2809 -1748 -674 441 1651 2959 4239 5342 6279 7189 8094 8780 9006 8795 8395 7957 7408 6650 5771 4959 4273 3604 2855 2048 1238 407 -489 -1415 -2272 -2992 -3557 -3927 -4034 -3872 -3552 -3205 -2866 -2476 -2006 -1533 -1153 -860 -544 -123 364 811 1160 1435 1658 1798 1817 1729 1577 1358 1028 597 155 -222 -550 -899 -1263 -1540 -1648 -1627 -1584 -1560 -1485 -1283 -949 -533 -39 563 1250 1914 2453 2869 3230 3546 3754 3821 3796 3724 3562 3262 2888 2542 2192 1692 1002 297 -243 -672 -1167 -1743 -2250 -2666 -3195 -3985 -4867 -5573 -6127 -6826 -7773 -8674 -9163 -9212 -9063 +1 +1041 1277 1484 1634 1802 2058 2370 2623 2725 2661 2473 2224 1944 1592 1083 401 -315 -906 -1399 -2021 -2912 -3929 -4823 -5596 -6527 -7777 -9105 -10087 -10561 -10722 -10792 -10732 -10347 -9569 -8541 -7439 -6329 -5161 -3842 -2302 -564 1241 2994 4716 6514 8327 9852 10820 11302 11607 11882 11944 11597 10956 10314 9760 9087 8093 6841 5541 4272 2952 1504 -33 -1556 -2966 -4169 -5116 -5863 -6519 -7082 -7388 -7323 -7006 -6673 -6380 -5953 -5267 -4444 -3680 -2964 -2129 -1119 -88 814 1618 2420 3194 3812 4219 4446 4493 4315 3978 3653 3427 3195 2843 2450 2177 2000 1730 1284 804 441 157 -159 -456 -585 -523 -394 -259 -31 319 642 792 849 994 1212 1289 1124 889 793 812 759 572 345 110 -252 -821 -1507 -2226 -3089 -4249 -5601 -6814 -7687 -8346 -8970 -9485 -9666 -9484 -9162 -8889 -8620 -8201 -7549 -6671 -5568 -4275 -2870 -1374 315 2237 4159 5732 6867 7819 8822 9773 +-1220 -1548 -1883 -2139 -2328 -2575 -2953 -3408 -3864 -4327 -4846 -5401 -5910 -6317 -6593 -6674 -6489 -6060 -5506 -4908 -4215 -3362 -2416 -1519 -698 174 1197 2299 3321 4217 5060 5889 6609 7082 7236 7085 6671 6054 5322 4576 3872 3233 2651 2093 1494 819 103 -574 -1195 -1810 -2432 -2948 -3221 -3197 -2943 -2551 -2117 -1754 -1532 -1386 -1146 -721 -203 249 607 962 1346 1673 1884 2046 2236 2395 2384 2186 1915 1665 1404 1071 693 334 -10 -367 -682 -860 -900 -919 -982 -995 -829 -479 -44 421 919 1401 1755 1951 2107 2347 2617 2755 2703 2557 2397 2173 1805 1331 858 409 -88 -652 -1176 -1581 -1928 -2332 -2782 -3169 -3481 -3858 -4405 -5023 -5532 -5928 -6391 -7018 -7640 -7981 -7938 -7626 -7174 -6584 -5809 -4873 -3855 -2809 -1748 -674 441 1651 2959 4239 5342 6279 7189 8094 8780 9006 8795 8395 7957 7408 6650 5771 4959 4273 3604 2855 2048 1238 407 -489 -1415 -2272 -2992 -3557 -3927 -4034 -3872 -3552 -3205 -2866 -2476 -2006 -1533 -1153 -860 -544 -123 364 811 1160 1435 1658 1798 1817 1729 1577 1358 1028 597 155 -222 -550 -899 -1263 -1540 -1648 -1627 -1584 -1560 -1485 -1283 -949 -533 -39 563 1250 1914 2453 2869 3230 3546 3754 3821 3796 3724 3562 3262 2888 2542 2192 1692 1002 297 -243 -672 -1167 -1743 -2250 -2666 -3195 -3985 -4867 -5573 -6127 -6826 -7773 -8674 -9163 -9212 -9063 -8831 -8388 -7623 -6647 -5644 -4664 -3618 -2438 -1109 368 1928 3425 4744 5967 7262 8567 9534 9897 9808 9637 9502 9175 8470 7553 6730 6052 5287 4268 3085 1898 718 -526 -1778 -2887 -3812 -4634 -5351 -5785 -5777 -5441 -5069 -4840 -4683 -4439 -4047 -3534 -2915 -2178 -1358 -532 266 1055 1813 2438 2845 3052 3149 3189 3155 3035 2873 2718 2563 2353 2046 1637 1141 601 82 -341 -636 -830 -935 -905 -673 -257 202 544 727 857 1041 1277 1484 1634 1802 2058 2370 2623 2725 2661 2473 2224 1944 1592 1083 401 -315 -906 -1399 -2021 -2912 -3929 -4823 -5596 -6527 -7777 -9105 -10087 -10561 -10722 -10792 -10732 -10347 -9569 -8541 -7439 -6329 -5161 -3842 -2302 -564 1241 2994 4716 6514 8327 9852 10820 11302 11607 11882 11944 11597 10956 10314 9760 9087 8093 6841 5541 4272 2952 1504 -33 -1556 -2966 -4169 -5116 -5863 -6519 -7082 -7388 -7323 -7006 -6673 -6380 -5953 -5267 -4444 -3680 +1 +10394 10634 10734 10919 11144 11212 11011 10594 10046 9357 8444 7251 5824 4284 2755 1322 24 -1160 -2303 -3461 -4581 -5541 -6326 -7083 -7881 -8532 -8781 -8657 -8430 -8196 -7734 -6894 -5921 -5147 -4520 -3701 -2603 -1506 -599 290 1338 2374 3107 3557 3962 4354 4539 4490 4456 4577 4681 4601 4490 4532 4574 4308 3744 3219 2892 2549 1984 1357 957 796 656 436 244 155 67 -118 -368 -609 -841 -1088 -1311 -1477 -1630 -1817 -2012 -2201 -2500 -3051 -3813 -4564 -5141 -5587 -6008 -6394 -6622 -6618 -6450 -6267 -6174 -6144 -6018 -5654 -5102 -4542 -4049 -3441 -2508 -1313 -156 762 1530 2330 3171 3925 4546 5116 5722 6370 7016 7616 8108 8428 8582 8614 8506 8179 7639 7033 6488 5940 5234 4368 3498 2722 1964 1090 92 -944 -1948 -2885 -3698 -4319 -4755 -5069 -5297 -5407 -5389 -5313 -5250 -5180 -5016 -4696 -4243 -3708 -3119 -2491 -1849 -1248 -743 -357 -72 155 355 528 659 755 +-1415 -2272 -2992 -3557 -3927 -4034 -3872 -3552 -3205 -2866 -2476 -2006 -1533 -1153 -860 -544 -123 364 811 1160 1435 1658 1798 1817 1729 1577 1358 1028 597 155 -222 -550 -899 -1263 -1540 -1648 -1627 -1584 -1560 -1485 -1283 -949 -533 -39 563 1250 1914 2453 2869 3230 3546 3754 3821 3796 3724 3562 3262 2888 2542 2192 1692 1002 297 -243 -672 -1167 -1743 -2250 -2666 -3195 -3985 -4867 -5573 -6127 -6826 -7773 -8674 -9163 -9212 -9063 -8831 -8388 -7623 -6647 -5644 -4664 -3618 -2438 -1109 368 1928 3425 4744 5967 7262 8567 9534 9897 9808 9637 9502 9175 8470 7553 6730 6052 5287 4268 3085 1898 718 -526 -1778 -2887 -3812 -4634 -5351 -5785 -5777 -5441 -5069 -4840 -4683 -4439 -4047 -3534 -2915 -2178 -1358 -532 266 1055 1813 2438 2845 3052 3149 3189 3155 3035 2873 2718 2563 2353 2046 1637 1141 601 82 -341 -636 -830 -935 -905 -673 -257 202 544 727 857 1041 1277 1484 1634 1802 2058 2370 2623 2725 2661 2473 2224 1944 1592 1083 401 -315 -906 -1399 -2021 -2912 -3929 -4823 -5596 -6527 -7777 -9105 -10087 -10561 -10722 -10792 -10732 -10347 -9569 -8541 -7439 -6329 -5161 -3842 -2302 -564 1241 2994 4716 6514 8327 9852 10820 11302 11607 11882 11944 11597 10956 10314 9760 9087 8093 6841 5541 4272 2952 1504 -33 -1556 -2966 -4169 -5116 -5863 -6519 -7082 -7388 -7323 -7006 -6673 -6380 -5953 -5267 -4444 -3680 -2964 -2129 -1119 -88 814 1618 2420 3194 3812 4219 4446 4493 4315 3978 3653 3427 3195 2843 2450 2177 2000 1730 1284 804 441 157 -159 -456 -585 -523 -394 -259 -31 319 642 792 849 994 1212 1289 1124 889 793 812 759 572 345 110 -252 -821 -1507 -2226 -3089 -4249 -5601 -6814 -7687 -8346 -8970 -9485 -9666 -9484 -9162 -8889 -8620 -8201 -7549 -6671 -5568 -4275 -2870 -1374 315 2237 4159 5732 6867 7819 8822 9773 10394 10634 10734 10919 11144 11212 11011 10594 10046 9357 8444 7251 5824 4284 2755 1322 24 -1160 -2303 -3461 -4581 -5541 -6326 -7083 -7881 -8532 -8781 -8657 -8430 -8196 -7734 -6894 -5921 -5147 -4520 -3701 -2603 -1506 -599 290 1338 2374 3107 3557 3962 4354 4539 4490 4456 4577 4681 4601 4490 4532 4574 4308 3744 3219 2892 2549 1984 1357 957 796 656 436 244 155 67 -118 -368 -609 -841 -1088 -1311 -1477 -1630 -1817 -2012 -2201 +1 +848 950 1047 1117 1167 1212 1253 1281 1293 1313 1361 1437 1513 1575 1645 1752 1867 1903 1785 1553 1321 1150 973 685 289 -106 -406 -611 -783 -959 -1136 -1294 -1421 -1508 -1553 -1569 -1576 -1581 -1594 -1624 -1655 -1646 -1569 -1455 -1349 -1251 -1114 -935 -764 -656 -597 -550 -514 -515 -550 -566 -535 -480 -434 -404 -383 -373 -371 -347 -251 -71 150 356 530 694 888 1118 1343 1513 1628 1744 1911 2116 2295 2424 2526 2606 2624 2534 2356 2149 1949 1740 1482 1176 849 537 259 8 -241 -489 -702 -830 -879 -905 -953 -999 -983 -897 -795 -736 -727 -738 -748 -755 -760 -759 -757 -750 -726 -667 -586 -519 -482 -446 -371 -256 -137 -34 68 186 312 404 436 394 279 97 -117 -321 -480 -599 -680 -706 -654 -551 -453 -396 -363 -316 -252 -190 -125 -23 133 300 429 504 556 600 619 587 517 438 368 291 +1041 1277 1484 1634 1802 2058 2370 2623 2725 2661 2473 2224 1944 1592 1083 401 -315 -906 -1399 -2021 -2912 -3929 -4823 -5596 -6527 -7777 -9105 -10087 -10561 -10722 -10792 -10732 -10347 -9569 -8541 -7439 -6329 -5161 -3842 -2302 -564 1241 2994 4716 6514 8327 9852 10820 11302 11607 11882 11944 11597 10956 10314 9760 9087 8093 6841 5541 4272 2952 1504 -33 -1556 -2966 -4169 -5116 -5863 -6519 -7082 -7388 -7323 -7006 -6673 -6380 -5953 -5267 -4444 -3680 -2964 -2129 -1119 -88 814 1618 2420 3194 3812 4219 4446 4493 4315 3978 3653 3427 3195 2843 2450 2177 2000 1730 1284 804 441 157 -159 -456 -585 -523 -394 -259 -31 319 642 792 849 994 1212 1289 1124 889 793 812 759 572 345 110 -252 -821 -1507 -2226 -3089 -4249 -5601 -6814 -7687 -8346 -8970 -9485 -9666 -9484 -9162 -8889 -8620 -8201 -7549 -6671 -5568 -4275 -2870 -1374 315 2237 4159 5732 6867 7819 8822 9773 10394 10634 10734 10919 11144 11212 11011 10594 10046 9357 8444 7251 5824 4284 2755 1322 24 -1160 -2303 -3461 -4581 -5541 -6326 -7083 -7881 -8532 -8781 -8657 -8430 -8196 -7734 -6894 -5921 -5147 -4520 -3701 -2603 -1506 -599 290 1338 2374 3107 3557 3962 4354 4539 4490 4456 4577 4681 4601 4490 4532 4574 4308 3744 3219 2892 2549 1984 1357 957 796 656 436 244 155 67 -118 -368 -609 -841 -1088 -1311 -1477 -1630 -1817 -2012 -2201 -2500 -3051 -3813 -4564 -5141 -5587 -6008 -6394 -6622 -6618 -6450 -6267 -6174 -6144 -6018 -5654 -5102 -4542 -4049 -3441 -2508 -1313 -156 762 1530 2330 3171 3925 4546 5116 5722 6370 7016 7616 8108 8428 8582 8614 8506 8179 7639 7033 6488 5940 5234 4368 3498 2722 1964 1090 92 -944 -1948 -2885 -3698 -4319 -4755 -5069 -5297 -5407 -5389 -5313 -5250 -5180 -5016 -4696 -4243 -3708 -3119 -2491 -1849 -1248 -743 -357 -72 155 355 528 659 755 848 950 1047 1117 1167 1212 1253 1281 1293 1313 1361 1437 1513 1575 1645 1752 1867 1903 1785 1553 1321 1150 973 685 289 -106 -406 -611 -783 -959 -1136 -1294 -1421 -1508 -1553 -1569 -1576 -1581 -1594 -1624 -1655 -1646 -1569 -1455 -1349 -1251 -1114 -935 -764 -656 -597 -550 -514 -515 -550 -566 -535 -480 -434 -404 -383 -373 -371 -347 -251 -71 150 356 530 694 888 1118 1343 1513 1628 1744 1911 2116 2295 2424 +1 +189 47 -139 -368 -628 -896 -1149 -1371 -1548 -1651 -1660 -1595 -1500 -1398 -1273 -1118 -971 -873 -797 -667 -445 -165 125 423 742 1053 1311 1524 1731 1923 2044 2093 2146 2253 2341 2314 2191 2071 1969 1790 1488 1148 869 623 332 16 -226 -379 -537 -753 -961 -1086 -1146 -1202 -1248 -1241 -1196 -1155 -1106 -990 -816 -688 -663 -677 -631 -515 -384 -259 -130 5 122 229 354 464 487 407 319 305 332 299 175 30 -82 -195 -380 -636 -889 -1076 -1197 -1286 -1350 -1365 -1318 -1214 -1072 -900 -706 -500 -291 -87 111 299 477 661 870 1084 1248 1319 1317 1286 1228 1105 911 703 532 377 194 1 -131 -183 -215 -282 -362 -419 -461 -528 -604 -631 -600 -573 -585 -590 -511 -356 -192 -56 95 296 521 717 868 987 1082 1144 1188 1242 1303 1328 1299 1250 1216 1177 1059 837 570 336 162 7 -162 -327 +10394 10634 10734 10919 11144 11212 11011 10594 10046 9357 8444 7251 5824 4284 2755 1322 24 -1160 -2303 -3461 -4581 -5541 -6326 -7083 -7881 -8532 -8781 -8657 -8430 -8196 -7734 -6894 -5921 -5147 -4520 -3701 -2603 -1506 -599 290 1338 2374 3107 3557 3962 4354 4539 4490 4456 4577 4681 4601 4490 4532 4574 4308 3744 3219 2892 2549 1984 1357 957 796 656 436 244 155 67 -118 -368 -609 -841 -1088 -1311 -1477 -1630 -1817 -2012 -2201 -2500 -3051 -3813 -4564 -5141 -5587 -6008 -6394 -6622 -6618 -6450 -6267 -6174 -6144 -6018 -5654 -5102 -4542 -4049 -3441 -2508 -1313 -156 762 1530 2330 3171 3925 4546 5116 5722 6370 7016 7616 8108 8428 8582 8614 8506 8179 7639 7033 6488 5940 5234 4368 3498 2722 1964 1090 92 -944 -1948 -2885 -3698 -4319 -4755 -5069 -5297 -5407 -5389 -5313 -5250 -5180 -5016 -4696 -4243 -3708 -3119 -2491 -1849 -1248 -743 -357 -72 155 355 528 659 755 848 950 1047 1117 1167 1212 1253 1281 1293 1313 1361 1437 1513 1575 1645 1752 1867 1903 1785 1553 1321 1150 973 685 289 -106 -406 -611 -783 -959 -1136 -1294 -1421 -1508 -1553 -1569 -1576 -1581 -1594 -1624 -1655 -1646 -1569 -1455 -1349 -1251 -1114 -935 -764 -656 -597 -550 -514 -515 -550 -566 -535 -480 -434 -404 -383 -373 -371 -347 -251 -71 150 356 530 694 888 1118 1343 1513 1628 1744 1911 2116 2295 2424 2526 2606 2624 2534 2356 2149 1949 1740 1482 1176 849 537 259 8 -241 -489 -702 -830 -879 -905 -953 -999 -983 -897 -795 -736 -727 -738 -748 -755 -760 -759 -757 -750 -726 -667 -586 -519 -482 -446 -371 -256 -137 -34 68 186 312 404 436 394 279 97 -117 -321 -480 -599 -680 -706 -654 -551 -453 -396 -363 -316 -252 -190 -125 -23 133 300 429 504 556 600 619 587 517 438 368 291 189 47 -139 -368 -628 -896 -1149 -1371 -1548 -1651 -1660 -1595 -1500 -1398 -1273 -1118 -971 -873 -797 -667 -445 -165 125 423 742 1053 1311 1524 1731 1923 2044 2093 2146 2253 2341 2314 2191 2071 1969 1790 1488 1148 869 623 332 16 -226 -379 -537 -753 -961 -1086 -1146 -1202 -1248 -1241 -1196 -1155 -1106 -990 -816 -688 -663 -677 -631 -515 -384 -259 -130 5 122 229 354 464 487 407 319 305 332 299 +1 +-461 -571 -703 -880 -1072 -1239 -1368 -1476 -1573 -1636 -1628 -1528 -1355 -1151 -961 -795 -631 -437 -218 -21 136 288 489 725 918 1009 1017 1013 1014 970 838 651 474 335 207 60 -98 -248 -391 -540 -689 -823 -928 -1001 -1029 -1004 -940 -867 -782 -660 -489 -314 -182 -84 27 163 293 399 513 651 779 830 806 766 749 732 678 597 531 503 506 515 514 496 465 429 386 308 176 15 -118 -193 -232 -276 -341 -404 -443 -455 -444 -414 -363 -282 -167 -39 61 107 148 253 417 538 535 466 468 576 680 664 561 491 488 454 314 128 6 -48 -124 -260 -392 -461 -523 -652 -821 -929 -953 -980 -1071 -1155 -1124 -971 -790 -645 -505 -329 -135 26 131 202 269 355 477 612 702 707 654 599 547 444 279 117 23 -20 -72 -137 -163 -146 -149 -228 -344 -405 -379 -309 +848 950 1047 1117 1167 1212 1253 1281 1293 1313 1361 1437 1513 1575 1645 1752 1867 1903 1785 1553 1321 1150 973 685 289 -106 -406 -611 -783 -959 -1136 -1294 -1421 -1508 -1553 -1569 -1576 -1581 -1594 -1624 -1655 -1646 -1569 -1455 -1349 -1251 -1114 -935 -764 -656 -597 -550 -514 -515 -550 -566 -535 -480 -434 -404 -383 -373 -371 -347 -251 -71 150 356 530 694 888 1118 1343 1513 1628 1744 1911 2116 2295 2424 2526 2606 2624 2534 2356 2149 1949 1740 1482 1176 849 537 259 8 -241 -489 -702 -830 -879 -905 -953 -999 -983 -897 -795 -736 -727 -738 -748 -755 -760 -759 -757 -750 -726 -667 -586 -519 -482 -446 -371 -256 -137 -34 68 186 312 404 436 394 279 97 -117 -321 -480 -599 -680 -706 -654 -551 -453 -396 -363 -316 -252 -190 -125 -23 133 300 429 504 556 600 619 587 517 438 368 291 189 47 -139 -368 -628 -896 -1149 -1371 -1548 -1651 -1660 -1595 -1500 -1398 -1273 -1118 -971 -873 -797 -667 -445 -165 125 423 742 1053 1311 1524 1731 1923 2044 2093 2146 2253 2341 2314 2191 2071 1969 1790 1488 1148 869 623 332 16 -226 -379 -537 -753 -961 -1086 -1146 -1202 -1248 -1241 -1196 -1155 -1106 -990 -816 -688 -663 -677 -631 -515 -384 -259 -130 5 122 229 354 464 487 407 319 305 332 299 175 30 -82 -195 -380 -636 -889 -1076 -1197 -1286 -1350 -1365 -1318 -1214 -1072 -900 -706 -500 -291 -87 111 299 477 661 870 1084 1248 1319 1317 1286 1228 1105 911 703 532 377 194 1 -131 -183 -215 -282 -362 -419 -461 -528 -604 -631 -600 -573 -585 -590 -511 -356 -192 -56 95 296 521 717 868 987 1082 1144 1188 1242 1303 1328 1299 1250 1216 1177 1059 837 570 336 162 7 -162 -327 -461 -571 -703 -880 -1072 -1239 -1368 -1476 -1573 -1636 -1628 -1528 -1355 -1151 -961 -795 -631 -437 -218 -21 136 288 489 725 918 1009 1017 1013 1014 970 838 651 474 335 207 60 -98 -248 -391 -540 -689 -823 -928 -1001 -1029 -1004 -940 -867 -782 -660 -489 -314 -182 -84 27 163 293 399 513 651 779 830 806 766 749 732 678 597 531 503 506 515 514 496 465 429 386 308 176 15 +1 +-250 -207 -166 -123 -97 -101 -130 -167 -186 -170 -112 -31 46 113 181 251 308 318 275 206 161 173 222 264 271 261 261 260 217 115 -11 -99 -130 -133 -157 -212 -269 -290 -272 -259 -285 -339 -360 -311 -220 -138 -72 26 171 311 378 381 400 485 601 678 701 718 765 821 827 767 666 553 434 310 195 98 18 -67 -165 -275 -407 -567 -736 -877 -969 -1028 -1078 -1100 -1064 -964 -852 -778 -748 -724 -676 -612 -550 -478 -375 -248 -133 -64 -25 31 124 221 281 308 352 441 539 583 568 535 522 531 535 513 480 457 459 477 478 437 361 285 231 201 188 200 247 307 342 340 326 321 316 288 241 198 170 147 110 59 2 -52 -91 -112 -137 -198 -303 -417 -496 -528 -546 -583 -632 -676 -714 -766 -833 -880 -870 -802 -708 -618 -536 -459 +189 47 -139 -368 -628 -896 -1149 -1371 -1548 -1651 -1660 -1595 -1500 -1398 -1273 -1118 -971 -873 -797 -667 -445 -165 125 423 742 1053 1311 1524 1731 1923 2044 2093 2146 2253 2341 2314 2191 2071 1969 1790 1488 1148 869 623 332 16 -226 -379 -537 -753 -961 -1086 -1146 -1202 -1248 -1241 -1196 -1155 -1106 -990 -816 -688 -663 -677 -631 -515 -384 -259 -130 5 122 229 354 464 487 407 319 305 332 299 175 30 -82 -195 -380 -636 -889 -1076 -1197 -1286 -1350 -1365 -1318 -1214 -1072 -900 -706 -500 -291 -87 111 299 477 661 870 1084 1248 1319 1317 1286 1228 1105 911 703 532 377 194 1 -131 -183 -215 -282 -362 -419 -461 -528 -604 -631 -600 -573 -585 -590 -511 -356 -192 -56 95 296 521 717 868 987 1082 1144 1188 1242 1303 1328 1299 1250 1216 1177 1059 837 570 336 162 7 -162 -327 -461 -571 -703 -880 -1072 -1239 -1368 -1476 -1573 -1636 -1628 -1528 -1355 -1151 -961 -795 -631 -437 -218 -21 136 288 489 725 918 1009 1017 1013 1014 970 838 651 474 335 207 60 -98 -248 -391 -540 -689 -823 -928 -1001 -1029 -1004 -940 -867 -782 -660 -489 -314 -182 -84 27 163 293 399 513 651 779 830 806 766 749 732 678 597 531 503 506 515 514 496 465 429 386 308 176 15 -118 -193 -232 -276 -341 -404 -443 -455 -444 -414 -363 -282 -167 -39 61 107 148 253 417 538 535 466 468 576 680 664 561 491 488 454 314 128 6 -48 -124 -260 -392 -461 -523 -652 -821 -929 -953 -980 -1071 -1155 -1124 -971 -790 -645 -505 -329 -135 26 131 202 269 355 477 612 702 707 654 599 547 444 279 117 23 -20 -72 -137 -163 -146 -149 -228 -344 -405 -379 -309 -250 -207 -166 -123 -97 -101 -130 -167 -186 -170 -112 -31 46 113 181 251 308 318 275 206 161 173 222 264 271 261 261 260 217 115 -11 -99 -130 -133 -157 -212 -269 -290 -272 -259 -285 -339 -360 -311 -220 -138 -72 26 171 311 378 381 400 485 601 678 701 718 765 821 827 767 666 553 434 310 195 98 18 -67 -165 -275 -407 -567 -736 -877 -969 -1028 -1078 -1100 +1 +-386 -316 -235 -129 -9 100 187 264 351 442 523 579 602 590 555 534 554 605 646 659 662 677 679 621 492 335 206 119 65 36 35 49 49 21 -26 -72 -113 -164 -221 -255 -254 -231 -215 -218 -230 -238 -244 -252 -267 -278 -264 -215 -150 -98 -72 -61 -55 -62 -99 -151 -183 -169 -120 -69 -51 -82 -155 -231 -262 -240 -203 -181 -157 -88 29 145 207 221 219 231 261 303 352 403 450 498 541 558 529 465 411 395 412 425 420 397 364 315 234 109 -42 -178 -267 -320 -372 -438 -494 -518 -524 -550 -601 -652 -679 -688 -687 -666 -612 -535 -453 -376 -305 -251 -230 -237 -237 -195 -100 35 192 337 440 490 510 524 532 522 495 474 469 470 451 398 309 195 72 -46 -160 -271 -364 -410 -402 -369 -349 -360 -375 -369 -336 -296 -260 -215 +-461 -571 -703 -880 -1072 -1239 -1368 -1476 -1573 -1636 -1628 -1528 -1355 -1151 -961 -795 -631 -437 -218 -21 136 288 489 725 918 1009 1017 1013 1014 970 838 651 474 335 207 60 -98 -248 -391 -540 -689 -823 -928 -1001 -1029 -1004 -940 -867 -782 -660 -489 -314 -182 -84 27 163 293 399 513 651 779 830 806 766 749 732 678 597 531 503 506 515 514 496 465 429 386 308 176 15 -118 -193 -232 -276 -341 -404 -443 -455 -444 -414 -363 -282 -167 -39 61 107 148 253 417 538 535 466 468 576 680 664 561 491 488 454 314 128 6 -48 -124 -260 -392 -461 -523 -652 -821 -929 -953 -980 -1071 -1155 -1124 -971 -790 -645 -505 -329 -135 26 131 202 269 355 477 612 702 707 654 599 547 444 279 117 23 -20 -72 -137 -163 -146 -149 -228 -344 -405 -379 -309 -250 -207 -166 -123 -97 -101 -130 -167 -186 -170 -112 -31 46 113 181 251 308 318 275 206 161 173 222 264 271 261 261 260 217 115 -11 -99 -130 -133 -157 -212 -269 -290 -272 -259 -285 -339 -360 -311 -220 -138 -72 26 171 311 378 381 400 485 601 678 701 718 765 821 827 767 666 553 434 310 195 98 18 -67 -165 -275 -407 -567 -736 -877 -969 -1028 -1078 -1100 -1064 -964 -852 -778 -748 -724 -676 -612 -550 -478 -375 -248 -133 -64 -25 31 124 221 281 308 352 441 539 583 568 535 522 531 535 513 480 457 459 477 478 437 361 285 231 201 188 200 247 307 342 340 326 321 316 288 241 198 170 147 110 59 2 -52 -91 -112 -137 -198 -303 -417 -496 -528 -546 -583 -632 -676 -714 -766 -833 -880 -870 -802 -708 -618 -536 -459 -386 -316 -235 -129 -9 100 187 264 351 442 523 579 602 590 555 534 554 605 646 659 662 677 679 621 492 335 206 119 65 36 35 49 49 21 -26 -72 -113 -164 -221 -255 -254 -231 -215 -218 -230 -238 -244 -252 -267 -278 -264 -215 -150 -98 -72 -61 -55 -62 -99 -151 -183 -169 -120 -69 -51 -82 -155 -231 -262 -240 -203 -181 -157 -88 29 145 207 221 219 231 +1 +-133 -2 147 267 332 348 336 290 208 124 98 149 228 277 284 275 263 220 126 14 -64 -77 -42 9 53 81 82 37 -67 -213 -354 -439 -443 -377 -268 -141 -28 49 84 92 88 81 80 111 198 325 429 461 423 356 290 218 123 24 -40 -46 -15 20 32 21 -2 -39 -107 -196 -259 -252 -183 -109 -72 -55 -20 32 61 43 -2 -23 5 68 142 214 287 346 363 312 208 101 41 39 59 69 63 55 43 1 -93 -225 -365 -490 -589 -651 -667 -643 -602 -577 -593 -655 -747 -841 -893 -873 -773 -622 -461 -321 -210 -129 -73 -32 0 20 26 45 119 255 399 482 495 496 522 546 519 458 436 485 553 572 535 486 448 403 332 245 165 104 62 43 38 12 -58 -163 -263 -333 -371 -389 -377 -333 -275 -237 -226 -219 +-250 -207 -166 -123 -97 -101 -130 -167 -186 -170 -112 -31 46 113 181 251 308 318 275 206 161 173 222 264 271 261 261 260 217 115 -11 -99 -130 -133 -157 -212 -269 -290 -272 -259 -285 -339 -360 -311 -220 -138 -72 26 171 311 378 381 400 485 601 678 701 718 765 821 827 767 666 553 434 310 195 98 18 -67 -165 -275 -407 -567 -736 -877 -969 -1028 -1078 -1100 -1064 -964 -852 -778 -748 -724 -676 -612 -550 -478 -375 -248 -133 -64 -25 31 124 221 281 308 352 441 539 583 568 535 522 531 535 513 480 457 459 477 478 437 361 285 231 201 188 200 247 307 342 340 326 321 316 288 241 198 170 147 110 59 2 -52 -91 -112 -137 -198 -303 -417 -496 -528 -546 -583 -632 -676 -714 -766 -833 -880 -870 -802 -708 -618 -536 -459 -386 -316 -235 -129 -9 100 187 264 351 442 523 579 602 590 555 534 554 605 646 659 662 677 679 621 492 335 206 119 65 36 35 49 49 21 -26 -72 -113 -164 -221 -255 -254 -231 -215 -218 -230 -238 -244 -252 -267 -278 -264 -215 -150 -98 -72 -61 -55 -62 -99 -151 -183 -169 -120 -69 -51 -82 -155 -231 -262 -240 -203 -181 -157 -88 29 145 207 221 219 231 261 303 352 403 450 498 541 558 529 465 411 395 412 425 420 397 364 315 234 109 -42 -178 -267 -320 -372 -438 -494 -518 -524 -550 -601 -652 -679 -688 -687 -666 -612 -535 -453 -376 -305 -251 -230 -237 -237 -195 -100 35 192 337 440 490 510 524 532 522 495 474 469 470 451 398 309 195 72 -46 -160 -271 -364 -410 -402 -369 -349 -360 -375 -369 -336 -296 -260 -215 -133 -2 147 267 332 348 336 290 208 124 98 149 228 277 284 275 263 220 126 14 -64 -77 -42 9 53 81 82 37 -67 -213 -354 -439 -443 -377 -268 -141 -28 49 84 92 88 81 80 111 198 325 429 461 423 356 290 218 123 24 -40 -46 -15 20 32 21 -2 -39 -107 -196 -259 -252 -183 -109 -72 -55 -20 32 61 43 -2 -23 5 68 142 214 +1 +-198 -168 -137 -102 -41 56 190 343 468 517 490 459 488 555 578 527 477 500 570 599 563 522 517 495 396 260 171 149 140 127 159 233 269 228 207 266 234 -161 -900 -1531 -1573 -975 -106 658 1164 1304 876 -170 -1404 -2104 -1848 -854 312 1212 1655 1508 693 -584 -1773 -2326 -2086 -1315 -397 343 627 269 -639 -1639 -2164 -1945 -1136 -128 703 1084 875 169 -664 -1135 -962 -226 717 1474 1786 1584 986 265 -249 -343 -14 546 1064 1305 1162 696 100 -365 -496 -258 178 527 575 304 -147 -599 -899 -932 -671 -247 101 189 34 -219 -423 -491 -372 -72 292 534 543 348 69 -164 -246 -138 104 366 555 614 493 204 -79 -102 178 483 530 341 111 -153 -575 -1002 -952 -227 695 1102 831 276 -264 -845 -1383 -1344 -328 1221 2285 2276 1463 404 -651 -1587 -2011 -1481 +-386 -316 -235 -129 -9 100 187 264 351 442 523 579 602 590 555 534 554 605 646 659 662 677 679 621 492 335 206 119 65 36 35 49 49 21 -26 -72 -113 -164 -221 -255 -254 -231 -215 -218 -230 -238 -244 -252 -267 -278 -264 -215 -150 -98 -72 -61 -55 -62 -99 -151 -183 -169 -120 -69 -51 -82 -155 -231 -262 -240 -203 -181 -157 -88 29 145 207 221 219 231 261 303 352 403 450 498 541 558 529 465 411 395 412 425 420 397 364 315 234 109 -42 -178 -267 -320 -372 -438 -494 -518 -524 -550 -601 -652 -679 -688 -687 -666 -612 -535 -453 -376 -305 -251 -230 -237 -237 -195 -100 35 192 337 440 490 510 524 532 522 495 474 469 470 451 398 309 195 72 -46 -160 -271 -364 -410 -402 -369 -349 -360 -375 -369 -336 -296 -260 -215 -133 -2 147 267 332 348 336 290 208 124 98 149 228 277 284 275 263 220 126 14 -64 -77 -42 9 53 81 82 37 -67 -213 -354 -439 -443 -377 -268 -141 -28 49 84 92 88 81 80 111 198 325 429 461 423 356 290 218 123 24 -40 -46 -15 20 32 21 -2 -39 -107 -196 -259 -252 -183 -109 -72 -55 -20 32 61 43 -2 -23 5 68 142 214 287 346 363 312 208 101 41 39 59 69 63 55 43 1 -93 -225 -365 -490 -589 -651 -667 -643 -602 -577 -593 -655 -747 -841 -893 -873 -773 -622 -461 -321 -210 -129 -73 -32 0 20 26 45 119 255 399 482 495 496 522 546 519 458 436 485 553 572 535 486 448 403 332 245 165 104 62 43 38 12 -58 -163 -263 -333 -371 -389 -377 -333 -275 -237 -226 -219 -198 -168 -137 -102 -41 56 190 343 468 517 490 459 488 555 578 527 477 500 570 599 563 522 517 495 396 260 171 149 140 127 159 233 269 228 207 266 234 -161 -900 -1531 -1573 -975 -106 658 1164 1304 876 -170 -1404 -2104 -1848 -854 312 1212 1655 1508 693 -584 -1773 -2326 -2086 -1315 -397 343 627 269 -639 -1639 -2164 -1945 -1136 -128 703 1084 875 169 -664 -1135 -962 -226 +1 +-160 1112 1548 1060 114 -864 -1677 -2125 -1938 -1077 78 977 1344 1258 881 301 -325 -667 -454 170 715 812 556 294 142 -91 -547 -1017 -1178 -998 -773 -757 -901 -1023 -1091 -1210 -1328 -1143 -390 769 1734 1923 1323 526 156 286 540 738 1150 1893 2348 1694 53 -1363 -1576 -940 -483 -454 -217 379 365 -1049 -3031 -3803 -2702 -886 200 389 408 476 113 -759 -1304 -725 698 1914 2276 2035 1740 1572 1386 1108 843 688 649 693 765 707 345 -293 -893 -1140 -1056 -945 -981 -979 -707 -331 -260 -552 -726 -393 141 236 -161 -402 -19 625 889 763 815 1305 1813 1839 1404 857 378 -55 -377 -443 -257 23 250 270 -131 -938 -1641 -1645 -1017 -477 -466 -638 -499 -185 -142 -305 -189 236 377 -79 -604 -522 105 683 939 1050 1088 846 304 -193 -385 -398 -393 -250 132 469 389 -59 -500 -758 +-133 -2 147 267 332 348 336 290 208 124 98 149 228 277 284 275 263 220 126 14 -64 -77 -42 9 53 81 82 37 -67 -213 -354 -439 -443 -377 -268 -141 -28 49 84 92 88 81 80 111 198 325 429 461 423 356 290 218 123 24 -40 -46 -15 20 32 21 -2 -39 -107 -196 -259 -252 -183 -109 -72 -55 -20 32 61 43 -2 -23 5 68 142 214 287 346 363 312 208 101 41 39 59 69 63 55 43 1 -93 -225 -365 -490 -589 -651 -667 -643 -602 -577 -593 -655 -747 -841 -893 -873 -773 -622 -461 -321 -210 -129 -73 -32 0 20 26 45 119 255 399 482 495 496 522 546 519 458 436 485 553 572 535 486 448 403 332 245 165 104 62 43 38 12 -58 -163 -263 -333 -371 -389 -377 -333 -275 -237 -226 -219 -198 -168 -137 -102 -41 56 190 343 468 517 490 459 488 555 578 527 477 500 570 599 563 522 517 495 396 260 171 149 140 127 159 233 269 228 207 266 234 -161 -900 -1531 -1573 -975 -106 658 1164 1304 876 -170 -1404 -2104 -1848 -854 312 1212 1655 1508 693 -584 -1773 -2326 -2086 -1315 -397 343 627 269 -639 -1639 -2164 -1945 -1136 -128 703 1084 875 169 -664 -1135 -962 -226 717 1474 1786 1584 986 265 -249 -343 -14 546 1064 1305 1162 696 100 -365 -496 -258 178 527 575 304 -147 -599 -899 -932 -671 -247 101 189 34 -219 -423 -491 -372 -72 292 534 543 348 69 -164 -246 -138 104 366 555 614 493 204 -79 -102 178 483 530 341 111 -153 -575 -1002 -952 -227 695 1102 831 276 -264 -845 -1383 -1344 -328 1221 2285 2276 1463 404 -651 -1587 -2011 -1481 -160 1112 1548 1060 114 -864 -1677 -2125 -1938 -1077 78 977 1344 1258 881 301 -325 -667 -454 170 715 812 556 294 142 -91 -547 -1017 -1178 -998 -773 -757 -901 -1023 -1091 -1210 -1328 -1143 -390 769 1734 1923 1323 526 156 286 540 738 1150 1893 2348 1694 53 -1363 -1576 -940 -483 -454 -217 379 365 -1049 -3031 -3803 -2702 -886 200 389 408 476 113 -759 -1304 -725 698 1914 2276 2035 1740 1572 +1 +-945 -1046 -850 -357 70 172 140 283 546 666 635 676 822 835 607 375 390 576 661 532 314 171 153 182 95 -212 -626 -897 -952 -1016 -1257 -1469 -1352 -1022 -940 -1238 -1477 -1240 -734 -489 -622 -727 -486 -61 239 405 640 986 1260 1319 1209 1057 955 972 1111 1238 1160 868 602 552 573 343 -168 -550 -408 146 555 431 -85 -585 -884 -1067 -1148 -961 -497 -127 -259 -830 -1358 -1485 -1283 -994 -709 -417 -191 -122 -129 -42 165 379 568 802 1041 1111 987 893 991 1112 1053 965 1158 1528 1553 997 335 173 455 623 491 505 944 1296 845 -277 -1145 -1204 -917 -992 -1353 -1341 -789 -322 -483 -973 -1107 -751 -405 -468 -759 -828 -467 178 763 928 535 -143 -614 -647 -419 -86 457 1144 1363 559 -828 -1600 -1167 -268 3 -460 -867 -764 -532 -634 -904 -931 -772 -831 -1164 -1316 +-198 -168 -137 -102 -41 56 190 343 468 517 490 459 488 555 578 527 477 500 570 599 563 522 517 495 396 260 171 149 140 127 159 233 269 228 207 266 234 -161 -900 -1531 -1573 -975 -106 658 1164 1304 876 -170 -1404 -2104 -1848 -854 312 1212 1655 1508 693 -584 -1773 -2326 -2086 -1315 -397 343 627 269 -639 -1639 -2164 -1945 -1136 -128 703 1084 875 169 -664 -1135 -962 -226 717 1474 1786 1584 986 265 -249 -343 -14 546 1064 1305 1162 696 100 -365 -496 -258 178 527 575 304 -147 -599 -899 -932 -671 -247 101 189 34 -219 -423 -491 -372 -72 292 534 543 348 69 -164 -246 -138 104 366 555 614 493 204 -79 -102 178 483 530 341 111 -153 -575 -1002 -952 -227 695 1102 831 276 -264 -845 -1383 -1344 -328 1221 2285 2276 1463 404 -651 -1587 -2011 -1481 -160 1112 1548 1060 114 -864 -1677 -2125 -1938 -1077 78 977 1344 1258 881 301 -325 -667 -454 170 715 812 556 294 142 -91 -547 -1017 -1178 -998 -773 -757 -901 -1023 -1091 -1210 -1328 -1143 -390 769 1734 1923 1323 526 156 286 540 738 1150 1893 2348 1694 53 -1363 -1576 -940 -483 -454 -217 379 365 -1049 -3031 -3803 -2702 -886 200 389 408 476 113 -759 -1304 -725 698 1914 2276 2035 1740 1572 1386 1108 843 688 649 693 765 707 345 -293 -893 -1140 -1056 -945 -981 -979 -707 -331 -260 -552 -726 -393 141 236 -161 -402 -19 625 889 763 815 1305 1813 1839 1404 857 378 -55 -377 -443 -257 23 250 270 -131 -938 -1641 -1645 -1017 -477 -466 -638 -499 -185 -142 -305 -189 236 377 -79 -604 -522 105 683 939 1050 1088 846 304 -193 -385 -398 -393 -250 132 469 389 -59 -500 -758 -945 -1046 -850 -357 70 172 140 283 546 666 635 676 822 835 607 375 390 576 661 532 314 171 153 182 95 -212 -626 -897 -952 -1016 -1257 -1469 -1352 -1022 -940 -1238 -1477 -1240 -734 -489 -622 -727 -486 -61 239 405 640 986 1260 1319 1209 1057 955 972 1111 1238 1160 868 602 552 573 343 -168 -550 -408 146 555 431 -85 -585 -884 -1067 -1148 -961 -497 -127 -259 -830 -1358 -1485 +1 +-937 -253 210 202 -79 -284 -202 135 527 726 622 354 205 356 693 906 785 451 226 260 381 395 404 629 943 904 381 -90 89 763 1180 962 556 530 808 909 739 708 1046 1412 1373 993 685 634 640 491 262 173 338 688 1026 1107 801 277 -78 -52 167 246 136 55 88 31 -252 -535 -509 -276 -312 -844 -1501 -1731 -1478 -1219 -1372 -1852 -2297 -2550 -2742 -2977 -3143 -3121 -3041 -3184 -3666 -4262 -4595 -4449 -3942 -3403 -3104 -3056 -3049 -2845 -2347 -1572 -592 462 1391 2047 2501 3015 3756 4585 5191 5436 5492 5624 5896 6145 6187 6002 5705 5398 5108 4811 4488 4136 3740 3284 2776 2289 1943 1805 1759 1526 896 -6 -783 -1124 -1100 -1041 -1181 -1499 -1846 -2116 -2246 -2191 -1988 -1763 -1635 -1614 -1658 -1740 -1791 -1691 -1453 -1360 -1689 -2316 -2784 -2897 -3049 -3801 -5196 -6628 -7371 -7157 -6314 -5539 -5513 -6480 +-160 1112 1548 1060 114 -864 -1677 -2125 -1938 -1077 78 977 1344 1258 881 301 -325 -667 -454 170 715 812 556 294 142 -91 -547 -1017 -1178 -998 -773 -757 -901 -1023 -1091 -1210 -1328 -1143 -390 769 1734 1923 1323 526 156 286 540 738 1150 1893 2348 1694 53 -1363 -1576 -940 -483 -454 -217 379 365 -1049 -3031 -3803 -2702 -886 200 389 408 476 113 -759 -1304 -725 698 1914 2276 2035 1740 1572 1386 1108 843 688 649 693 765 707 345 -293 -893 -1140 -1056 -945 -981 -979 -707 -331 -260 -552 -726 -393 141 236 -161 -402 -19 625 889 763 815 1305 1813 1839 1404 857 378 -55 -377 -443 -257 23 250 270 -131 -938 -1641 -1645 -1017 -477 -466 -638 -499 -185 -142 -305 -189 236 377 -79 -604 -522 105 683 939 1050 1088 846 304 -193 -385 -398 -393 -250 132 469 389 -59 -500 -758 -945 -1046 -850 -357 70 172 140 283 546 666 635 676 822 835 607 375 390 576 661 532 314 171 153 182 95 -212 -626 -897 -952 -1016 -1257 -1469 -1352 -1022 -940 -1238 -1477 -1240 -734 -489 -622 -727 -486 -61 239 405 640 986 1260 1319 1209 1057 955 972 1111 1238 1160 868 602 552 573 343 -168 -550 -408 146 555 431 -85 -585 -884 -1067 -1148 -961 -497 -127 -259 -830 -1358 -1485 -1283 -994 -709 -417 -191 -122 -129 -42 165 379 568 802 1041 1111 987 893 991 1112 1053 965 1158 1528 1553 997 335 173 455 623 491 505 944 1296 845 -277 -1145 -1204 -917 -992 -1353 -1341 -789 -322 -483 -973 -1107 -751 -405 -468 -759 -828 -467 178 763 928 535 -143 -614 -647 -419 -86 457 1144 1363 559 -828 -1600 -1167 -268 3 -460 -867 -764 -532 -634 -904 -931 -772 -831 -1164 -1316 -937 -253 210 202 -79 -284 -202 135 527 726 622 354 205 356 693 906 785 451 226 260 381 395 404 629 943 904 381 -90 89 763 1180 962 556 530 808 909 739 708 1046 1412 1373 993 685 634 640 491 262 173 338 688 1026 1107 801 277 -78 -52 167 246 136 55 88 31 -252 -535 -509 -276 -312 -844 -1501 -1731 -1478 -1219 -1372 -1852 -2297 -2550 -2742 -2977 -3143 -3121 +1 +-8002 -9152 -9120 -7710 -5406 -3049 -1443 -962 -1281 -1481 -637 1471 4153 6314 7269 7220 7021 7435 8553 9832 10655 10768 10255 9335 8316 7553 7136 6627 5402 3361 1179 -264 -646 -383 -263 -969 -2718 -5028 -6918 -7557 -6853 -5362 -3781 -2664 -2404 -3010 -3809 -3802 -2599 -823 605 1424 2005 2616 3157 3571 4041 4636 5091 5151 4913 4583 4139 3486 2813 2449 2406 2377 2157 1787 1252 365 -809 -1688 -1637 -694 377 747 153 -1061 -2378 -3567 -4795 -6287 -7837 -8771 -8547 -7413 -6393 -6557 -8286 -11010 -13449 -14215 -12600 -9121 -5300 -2739 -2193 -3196 -4314 -3925 -1294 2703 6142 7627 7415 6915 7257 8527 10115 11391 11990 11841 11158 10255 9302 8308 7309 6338 5257 3952 2699 1910 1332 10 -2501 -5226 -6659 -6501 -5985 -6475 -8141 -10002 -10734 -9436 -6271 -2792 -1110 -2098 -4511 -6115 -5498 -2650 1557 5704 8014 7438 4951 3052 3449 5517 7325 7662 6720 5325 4247 3990 4504 4972 +-945 -1046 -850 -357 70 172 140 283 546 666 635 676 822 835 607 375 390 576 661 532 314 171 153 182 95 -212 -626 -897 -952 -1016 -1257 -1469 -1352 -1022 -940 -1238 -1477 -1240 -734 -489 -622 -727 -486 -61 239 405 640 986 1260 1319 1209 1057 955 972 1111 1238 1160 868 602 552 573 343 -168 -550 -408 146 555 431 -85 -585 -884 -1067 -1148 -961 -497 -127 -259 -830 -1358 -1485 -1283 -994 -709 -417 -191 -122 -129 -42 165 379 568 802 1041 1111 987 893 991 1112 1053 965 1158 1528 1553 997 335 173 455 623 491 505 944 1296 845 -277 -1145 -1204 -917 -992 -1353 -1341 -789 -322 -483 -973 -1107 -751 -405 -468 -759 -828 -467 178 763 928 535 -143 -614 -647 -419 -86 457 1144 1363 559 -828 -1600 -1167 -268 3 -460 -867 -764 -532 -634 -904 -931 -772 -831 -1164 -1316 -937 -253 210 202 -79 -284 -202 135 527 726 622 354 205 356 693 906 785 451 226 260 381 395 404 629 943 904 381 -90 89 763 1180 962 556 530 808 909 739 708 1046 1412 1373 993 685 634 640 491 262 173 338 688 1026 1107 801 277 -78 -52 167 246 136 55 88 31 -252 -535 -509 -276 -312 -844 -1501 -1731 -1478 -1219 -1372 -1852 -2297 -2550 -2742 -2977 -3143 -3121 -3041 -3184 -3666 -4262 -4595 -4449 -3942 -3403 -3104 -3056 -3049 -2845 -2347 -1572 -592 462 1391 2047 2501 3015 3756 4585 5191 5436 5492 5624 5896 6145 6187 6002 5705 5398 5108 4811 4488 4136 3740 3284 2776 2289 1943 1805 1759 1526 896 -6 -783 -1124 -1100 -1041 -1181 -1499 -1846 -2116 -2246 -2191 -1988 -1763 -1635 -1614 -1658 -1740 -1791 -1691 -1453 -1360 -1689 -2316 -2784 -2897 -3049 -3801 -5196 -6628 -7371 -7157 -6314 -5539 -5513 -6480 -8002 -9152 -9120 -7710 -5406 -3049 -1443 -962 -1281 -1481 -637 1471 4153 6314 7269 7220 7021 7435 8553 9832 10655 10768 10255 9335 8316 7553 7136 6627 5402 3361 1179 -264 -646 -383 -263 -969 -2718 -5028 -6918 -7557 -6853 -5362 -3781 -2664 -2404 -3010 -3809 -3802 -2599 -823 605 1424 2005 2616 3157 3571 4041 4636 5091 5151 4913 4583 4139 3486 2813 2449 2406 2377 2157 1787 1252 365 -809 -1688 -1637 -694 377 747 153 -1061 +1 +4484 3090 1752 1182 1155 1140 1022 939 832 666 770 1408 2135 2120 1148 -44 -539 -48 943 1416 181 -3294 -7917 -11197 -11136 -8135 -4873 -4196 -7060 -12089 -16635 -18361 -16475 -12078 -7417 -4523 -4150 -5567 -7060 -6835 -4119 183 4073 6079 6454 6654 7751 9528 11029 11684 11752 11853 12280 12749 12651 11564 9581 7229 5125 3678 2974 2689 2080 318 -2789 -6309 -8655 -8770 -7038 -5058 -4458 -5761 -8138 -10106 -10500 -8964 -5928 -2366 458 1508 679 -933 -1727 -748 1743 4691 7028 8121 7886 6762 5491 4658 4362 4363 4440 4447 4166 3391 2226 1111 415 167 199 378 573 563 192 -401 -829 -707 138 1550 2944 3509 2851 1466 384 260 969 1977 2725 2530 613 -3208 -7805 -11112 -11563 -9323 -6310 -5045 -7117 -12066 -17495 -20536 -19701 -15650 -10444 -6236 -4369 -4920 -6573 -7157 -5094 -676 4193 7654 9297 10029 10897 12175 13391 14020 14092 14192 14771 15494 15405 13856 11113 8003 +-937 -253 210 202 -79 -284 -202 135 527 726 622 354 205 356 693 906 785 451 226 260 381 395 404 629 943 904 381 -90 89 763 1180 962 556 530 808 909 739 708 1046 1412 1373 993 685 634 640 491 262 173 338 688 1026 1107 801 277 -78 -52 167 246 136 55 88 31 -252 -535 -509 -276 -312 -844 -1501 -1731 -1478 -1219 -1372 -1852 -2297 -2550 -2742 -2977 -3143 -3121 -3041 -3184 -3666 -4262 -4595 -4449 -3942 -3403 -3104 -3056 -3049 -2845 -2347 -1572 -592 462 1391 2047 2501 3015 3756 4585 5191 5436 5492 5624 5896 6145 6187 6002 5705 5398 5108 4811 4488 4136 3740 3284 2776 2289 1943 1805 1759 1526 896 -6 -783 -1124 -1100 -1041 -1181 -1499 -1846 -2116 -2246 -2191 -1988 -1763 -1635 -1614 -1658 -1740 -1791 -1691 -1453 -1360 -1689 -2316 -2784 -2897 -3049 -3801 -5196 -6628 -7371 -7157 -6314 -5539 -5513 -6480 -8002 -9152 -9120 -7710 -5406 -3049 -1443 -962 -1281 -1481 -637 1471 4153 6314 7269 7220 7021 7435 8553 9832 10655 10768 10255 9335 8316 7553 7136 6627 5402 3361 1179 -264 -646 -383 -263 -969 -2718 -5028 -6918 -7557 -6853 -5362 -3781 -2664 -2404 -3010 -3809 -3802 -2599 -823 605 1424 2005 2616 3157 3571 4041 4636 5091 5151 4913 4583 4139 3486 2813 2449 2406 2377 2157 1787 1252 365 -809 -1688 -1637 -694 377 747 153 -1061 -2378 -3567 -4795 -6287 -7837 -8771 -8547 -7413 -6393 -6557 -8286 -11010 -13449 -14215 -12600 -9121 -5300 -2739 -2193 -3196 -4314 -3925 -1294 2703 6142 7627 7415 6915 7257 8527 10115 11391 11990 11841 11158 10255 9302 8308 7309 6338 5257 3952 2699 1910 1332 10 -2501 -5226 -6659 -6501 -5985 -6475 -8141 -10002 -10734 -9436 -6271 -2792 -1110 -2098 -4511 -6115 -5498 -2650 1557 5704 8014 7438 4951 3052 3449 5517 7325 7662 6720 5325 4247 3990 4504 4972 4484 3090 1752 1182 1155 1140 1022 939 832 666 770 1408 2135 2120 1148 -44 -539 -48 943 1416 181 -3294 -7917 -11197 -11136 -8135 -4873 -4196 -7060 -12089 -16635 -18361 -16475 -12078 -7417 -4523 -4150 -5567 -7060 -6835 -4119 183 4073 6079 6454 6654 7751 9528 11029 11684 11752 11853 12280 12749 12651 11564 9581 7229 5125 3678 2974 2689 2080 318 -2789 -6309 -8655 -8770 -7038 -5058 -4458 -5761 -8138 -10106 -10500 -8964 -5928 -2366 458 1508 +1 +5233 3225 2265 2225 2138 648 -2704 -6777 -9681 -10283 -8878 -6733 -5280 -5447 -7138 -9181 -10089 -9168 -6841 -4018 -1457 247 657 -48 -807 -437 1265 3440 5178 6202 6584 6308 5457 4519 4024 3966 3914 3640 3283 2895 2242 1206 87 -686 -939 -709 -90 633 885 275 -849 -1587 -1274 42 1801 3268 3733 2900 1360 358 731 2112 3407 3857 3352 1916 -585 -4006 -7520 -9936 -10454 -9208 -7350 -6612 -8326 -12365 -16983 -19905 -19746 -16577 -11620 -6694 -3596 -3187 -4585 -5515 -3924 452 5977 10451 12654 12940 12645 12934 14019 15317 16237 16641 16583 15880 14229 11694 8795 6060 3695 1747 367 -401 -1020 -2406 -5178 -8880 -12038 -13161 -11912 -9383 -7256 -6634 -7505 -9006 -9999 -9515 -7124 -3228 952 3898 4597 3183 1050 190 1906 5729 9642 11574 10832 8211 5068 2591 1587 2231 3703 4457 3379 698 -2330 -4497 -5311 -4909 -3792 -2655 -2042 -1982 -2103 -2143 -2123 -1960 -1240 348 2533 4623 +-8002 -9152 -9120 -7710 -5406 -3049 -1443 -962 -1281 -1481 -637 1471 4153 6314 7269 7220 7021 7435 8553 9832 10655 10768 10255 9335 8316 7553 7136 6627 5402 3361 1179 -264 -646 -383 -263 -969 -2718 -5028 -6918 -7557 -6853 -5362 -3781 -2664 -2404 -3010 -3809 -3802 -2599 -823 605 1424 2005 2616 3157 3571 4041 4636 5091 5151 4913 4583 4139 3486 2813 2449 2406 2377 2157 1787 1252 365 -809 -1688 -1637 -694 377 747 153 -1061 -2378 -3567 -4795 -6287 -7837 -8771 -8547 -7413 -6393 -6557 -8286 -11010 -13449 -14215 -12600 -9121 -5300 -2739 -2193 -3196 -4314 -3925 -1294 2703 6142 7627 7415 6915 7257 8527 10115 11391 11990 11841 11158 10255 9302 8308 7309 6338 5257 3952 2699 1910 1332 10 -2501 -5226 -6659 -6501 -5985 -6475 -8141 -10002 -10734 -9436 -6271 -2792 -1110 -2098 -4511 -6115 -5498 -2650 1557 5704 8014 7438 4951 3052 3449 5517 7325 7662 6720 5325 4247 3990 4504 4972 4484 3090 1752 1182 1155 1140 1022 939 832 666 770 1408 2135 2120 1148 -44 -539 -48 943 1416 181 -3294 -7917 -11197 -11136 -8135 -4873 -4196 -7060 -12089 -16635 -18361 -16475 -12078 -7417 -4523 -4150 -5567 -7060 -6835 -4119 183 4073 6079 6454 6654 7751 9528 11029 11684 11752 11853 12280 12749 12651 11564 9581 7229 5125 3678 2974 2689 2080 318 -2789 -6309 -8655 -8770 -7038 -5058 -4458 -5761 -8138 -10106 -10500 -8964 -5928 -2366 458 1508 679 -933 -1727 -748 1743 4691 7028 8121 7886 6762 5491 4658 4362 4363 4440 4447 4166 3391 2226 1111 415 167 199 378 573 563 192 -401 -829 -707 138 1550 2944 3509 2851 1466 384 260 969 1977 2725 2530 613 -3208 -7805 -11112 -11563 -9323 -6310 -5045 -7117 -12066 -17495 -20536 -19701 -15650 -10444 -6236 -4369 -4920 -6573 -7157 -5094 -676 4193 7654 9297 10029 10897 12175 13391 14020 14092 14192 14771 15494 15405 13856 11113 8003 5233 3225 2265 2225 2138 648 -2704 -6777 -9681 -10283 -8878 -6733 -5280 -5447 -7138 -9181 -10089 -9168 -6841 -4018 -1457 247 657 -48 -807 -437 1265 3440 5178 6202 6584 6308 5457 4519 4024 3966 3914 3640 3283 2895 2242 1206 87 -686 -939 -709 -90 633 885 275 -849 -1587 -1274 42 1801 3268 3733 2900 1360 358 731 2112 3407 3857 3352 1916 -585 -4006 -7520 -9936 -10454 -9208 -7350 -6612 -8326 -12365 -16983 -19905 -19746 -16577 +1 +6035 6473 5876 4602 3625 3975 5677 7479 7905 6510 3983 1251 -1242 -3519 -5739 -7958 -9935 -11118 -11174 -10691 -11071 -13305 -16824 -19752 -20340 -18116 -13898 -9176 -5528 -4017 -4461 -5191 -4015 68 5903 10976 13340 13047 11933 12013 13865 16368 17918 17842 16660 15181 13647 11835 9645 7304 5048 2946 1056 -465 -1627 -2868 -4830 -7713 -10803 -12743 -12486 -10283 -7748 -6642 -7408 -8855 -9318 -8014 -5335 -2230 430 2099 2709 2703 2684 2912 3303 3829 4613 5531 6076 5849 4883 3350 1306 -876 -2165 -1697 -26 1026 107 -2389 -4826 -5776 -4812 -2447 134 1596 1382 290 -231 535 2133 3747 4972 5715 5917 5747 5722 6161 6667 6540 5697 4819 4551 4825 4969 4293 2574 274 -1712 -2891 -3837 -5716 -8876 -12145 -13865 -13510 -12134 -11361 -12207 -14608 -17535 -19380 -18730 -15282 -10216 -5513 -2738 -2128 -2473 -1865 991 5713 10436 13334 14082 13875 14078 15021 16007 16228 15528 14356 13178 11976 10348 8041 5290 2566 +4484 3090 1752 1182 1155 1140 1022 939 832 666 770 1408 2135 2120 1148 -44 -539 -48 943 1416 181 -3294 -7917 -11197 -11136 -8135 -4873 -4196 -7060 -12089 -16635 -18361 -16475 -12078 -7417 -4523 -4150 -5567 -7060 -6835 -4119 183 4073 6079 6454 6654 7751 9528 11029 11684 11752 11853 12280 12749 12651 11564 9581 7229 5125 3678 2974 2689 2080 318 -2789 -6309 -8655 -8770 -7038 -5058 -4458 -5761 -8138 -10106 -10500 -8964 -5928 -2366 458 1508 679 -933 -1727 -748 1743 4691 7028 8121 7886 6762 5491 4658 4362 4363 4440 4447 4166 3391 2226 1111 415 167 199 378 573 563 192 -401 -829 -707 138 1550 2944 3509 2851 1466 384 260 969 1977 2725 2530 613 -3208 -7805 -11112 -11563 -9323 -6310 -5045 -7117 -12066 -17495 -20536 -19701 -15650 -10444 -6236 -4369 -4920 -6573 -7157 -5094 -676 4193 7654 9297 10029 10897 12175 13391 14020 14092 14192 14771 15494 15405 13856 11113 8003 5233 3225 2265 2225 2138 648 -2704 -6777 -9681 -10283 -8878 -6733 -5280 -5447 -7138 -9181 -10089 -9168 -6841 -4018 -1457 247 657 -48 -807 -437 1265 3440 5178 6202 6584 6308 5457 4519 4024 3966 3914 3640 3283 2895 2242 1206 87 -686 -939 -709 -90 633 885 275 -849 -1587 -1274 42 1801 3268 3733 2900 1360 358 731 2112 3407 3857 3352 1916 -585 -4006 -7520 -9936 -10454 -9208 -7350 -6612 -8326 -12365 -16983 -19905 -19746 -16577 -11620 -6694 -3596 -3187 -4585 -5515 -3924 452 5977 10451 12654 12940 12645 12934 14019 15317 16237 16641 16583 15880 14229 11694 8795 6060 3695 1747 367 -401 -1020 -2406 -5178 -8880 -12038 -13161 -11912 -9383 -7256 -6634 -7505 -9006 -9999 -9515 -7124 -3228 952 3898 4597 3183 1050 190 1906 5729 9642 11574 10832 8211 5068 2591 1587 2231 3703 4457 3379 698 -2330 -4497 -5311 -4909 -3792 -2655 -2042 -1982 -2103 -2143 -2123 -1960 -1240 348 2533 4623 6035 6473 5876 4602 3625 3975 5677 7479 7905 6510 3983 1251 -1242 -3519 -5739 -7958 -9935 -11118 -11174 -10691 -11071 -13305 -16824 -19752 -20340 -18116 -13898 -9176 -5528 -4017 -4461 -5191 -4015 68 5903 10976 13340 13047 11933 12013 13865 16368 17918 17842 16660 15181 13647 11835 9645 7304 5048 2946 1056 -465 -1627 -2868 -4830 -7713 -10803 -12743 -12486 -10283 -7748 -6642 -7408 -8855 -9318 -8014 -5335 -2230 430 2099 2709 2703 2684 2912 3303 3829 4613 5531 +1 +148 -1890 -3392 -4190 -4610 -5579 -7785 -10692 -12661 -12313 -9823 -6842 -5169 -5365 -6447 -6801 -5486 -2848 -43 2033 3321 4299 5278 6086 6343 5893 4988 4129 3812 4277 5293 6058 5505 3062 -607 -3732 -4651 -3181 -825 406 -483 -2894 -5254 -6135 -5118 -2949 -951 -71 -304 -900 -1040 -317 1169 2901 4178 4604 4497 4624 5345 6185 6400 5857 5145 4926 5317 5853 5748 4348 1715 -1081 -2645 -2600 -2177 -3212 -6474 -10987 -14779 -16218 -14990 -12352 -10517 -11211 -14306 -17766 -19128 -17197 -12590 -7059 -2502 -102 292 184 1439 4804 9282 13065 15043 15487 15472 15831 16573 17038 16545 15037 13186 11786 10954 9947 7878 4580 823 -2276 -4014 -4486 -4443 -4862 -6424 -9073 -11921 -13672 -13428 -11339 -8551 -6434 -5664 -5852 -5937 -4955 -2649 392 3124 4730 5149 5026 5093 5542 5989 5910 5079 3681 2204 1238 1086 1340 994 -700 -3360 -5611 -6245 -5230 -3603 -2511 -2434 -3132 -3952 -4177 -3348 -1594 409 1905 +5233 3225 2265 2225 2138 648 -2704 -6777 -9681 -10283 -8878 -6733 -5280 -5447 -7138 -9181 -10089 -9168 -6841 -4018 -1457 247 657 -48 -807 -437 1265 3440 5178 6202 6584 6308 5457 4519 4024 3966 3914 3640 3283 2895 2242 1206 87 -686 -939 -709 -90 633 885 275 -849 -1587 -1274 42 1801 3268 3733 2900 1360 358 731 2112 3407 3857 3352 1916 -585 -4006 -7520 -9936 -10454 -9208 -7350 -6612 -8326 -12365 -16983 -19905 -19746 -16577 -11620 -6694 -3596 -3187 -4585 -5515 -3924 452 5977 10451 12654 12940 12645 12934 14019 15317 16237 16641 16583 15880 14229 11694 8795 6060 3695 1747 367 -401 -1020 -2406 -5178 -8880 -12038 -13161 -11912 -9383 -7256 -6634 -7505 -9006 -9999 -9515 -7124 -3228 952 3898 4597 3183 1050 190 1906 5729 9642 11574 10832 8211 5068 2591 1587 2231 3703 4457 3379 698 -2330 -4497 -5311 -4909 -3792 -2655 -2042 -1982 -2103 -2143 -2123 -1960 -1240 348 2533 4623 6035 6473 5876 4602 3625 3975 5677 7479 7905 6510 3983 1251 -1242 -3519 -5739 -7958 -9935 -11118 -11174 -10691 -11071 -13305 -16824 -19752 -20340 -18116 -13898 -9176 -5528 -4017 -4461 -5191 -4015 68 5903 10976 13340 13047 11933 12013 13865 16368 17918 17842 16660 15181 13647 11835 9645 7304 5048 2946 1056 -465 -1627 -2868 -4830 -7713 -10803 -12743 -12486 -10283 -7748 -6642 -7408 -8855 -9318 -8014 -5335 -2230 430 2099 2709 2703 2684 2912 3303 3829 4613 5531 6076 5849 4883 3350 1306 -876 -2165 -1697 -26 1026 107 -2389 -4826 -5776 -4812 -2447 134 1596 1382 290 -231 535 2133 3747 4972 5715 5917 5747 5722 6161 6667 6540 5697 4819 4551 4825 4969 4293 2574 274 -1712 -2891 -3837 -5716 -8876 -12145 -13865 -13510 -12134 -11361 -12207 -14608 -17535 -19380 -18730 -15282 -10216 -5513 -2738 -2128 -2473 -1865 991 5713 10436 13334 14082 13875 14078 15021 16007 16228 15528 14356 13178 11976 10348 8041 5290 2566 148 -1890 -3392 -4190 -4610 -5579 -7785 -10692 -12661 -12313 -9823 -6842 -5169 -5365 -6447 -6801 -5486 -2848 -43 2033 3321 4299 5278 6086 6343 5893 4988 4129 3812 4277 5293 6058 5505 3062 -607 -3732 -4651 -3181 -825 406 -483 -2894 -5254 -6135 -5118 -2949 -951 -71 -304 -900 -1040 -317 1169 2901 4178 4604 4497 4624 5345 6185 6400 5857 5145 4926 5317 5853 5748 4348 1715 -1081 -2645 -2600 -2177 -3212 -6474 -10987 -14779 -16218 -14990 -12352 +1 +2625 2890 3242 4058 5392 6963 8274 8925 8945 8771 8737 8681 8177 7135 5994 5231 4852 4406 3439 1810 -341 -2666 -4632 -5710 -5834 -5730 -6587 -9145 -12890 -16168 -17255 -15641 -12505 -9985 -9700 -11643 -14247 -15500 -14200 -10529 -5720 -1293 1645 2775 2674 2568 3682 6402 9875 12566 13463 12878 12014 11757 11914 11696 10772 9645 8969 8674 8007 6360 3906 1389 -537 -1670 -2154 -2295 -2527 -3328 -4922 -6953 -8549 -8822 -7515 -5322 -3522 -3077 -3874 -4831 -4840 -3621 -1766 -92 926 1303 1382 1540 1869 2075 1816 1163 576 400 583 933 1343 1558 1076 -270 -1738 -2189 -1331 -59 570 395 -49 -330 -331 74 970 2040 2711 2864 2991 3519 4282 4877 5190 5311 5200 4837 4492 4425 4382 3813 2655 1494 866 719 683 482 -92 -1221 -2691 -3779 -3935 -3433 -3005 -2968 -3213 -3909 -5526 -7922 -9953 -10436 -9313 -7561 -6229 -5935 -6894 -8801 -10597 -10959 -9380 -6571 -3718 -1607 -486 -271 +6035 6473 5876 4602 3625 3975 5677 7479 7905 6510 3983 1251 -1242 -3519 -5739 -7958 -9935 -11118 -11174 -10691 -11071 -13305 -16824 -19752 -20340 -18116 -13898 -9176 -5528 -4017 -4461 -5191 -4015 68 5903 10976 13340 13047 11933 12013 13865 16368 17918 17842 16660 15181 13647 11835 9645 7304 5048 2946 1056 -465 -1627 -2868 -4830 -7713 -10803 -12743 -12486 -10283 -7748 -6642 -7408 -8855 -9318 -8014 -5335 -2230 430 2099 2709 2703 2684 2912 3303 3829 4613 5531 6076 5849 4883 3350 1306 -876 -2165 -1697 -26 1026 107 -2389 -4826 -5776 -4812 -2447 134 1596 1382 290 -231 535 2133 3747 4972 5715 5917 5747 5722 6161 6667 6540 5697 4819 4551 4825 4969 4293 2574 274 -1712 -2891 -3837 -5716 -8876 -12145 -13865 -13510 -12134 -11361 -12207 -14608 -17535 -19380 -18730 -15282 -10216 -5513 -2738 -2128 -2473 -1865 991 5713 10436 13334 14082 13875 14078 15021 16007 16228 15528 14356 13178 11976 10348 8041 5290 2566 148 -1890 -3392 -4190 -4610 -5579 -7785 -10692 -12661 -12313 -9823 -6842 -5169 -5365 -6447 -6801 -5486 -2848 -43 2033 3321 4299 5278 6086 6343 5893 4988 4129 3812 4277 5293 6058 5505 3062 -607 -3732 -4651 -3181 -825 406 -483 -2894 -5254 -6135 -5118 -2949 -951 -71 -304 -900 -1040 -317 1169 2901 4178 4604 4497 4624 5345 6185 6400 5857 5145 4926 5317 5853 5748 4348 1715 -1081 -2645 -2600 -2177 -3212 -6474 -10987 -14779 -16218 -14990 -12352 -10517 -11211 -14306 -17766 -19128 -17197 -12590 -7059 -2502 -102 292 184 1439 4804 9282 13065 15043 15487 15472 15831 16573 17038 16545 15037 13186 11786 10954 9947 7878 4580 823 -2276 -4014 -4486 -4443 -4862 -6424 -9073 -11921 -13672 -13428 -11339 -8551 -6434 -5664 -5852 -5937 -4955 -2649 392 3124 4730 5149 5026 5093 5542 5989 5910 5079 3681 2204 1238 1086 1340 994 -700 -3360 -5611 -6245 -5230 -3603 -2511 -2434 -3132 -3952 -4177 -3348 -1594 409 1905 2625 2890 3242 4058 5392 6963 8274 8925 8945 8771 8737 8681 8177 7135 5994 5231 4852 4406 3439 1810 -341 -2666 -4632 -5710 -5834 -5730 -6587 -9145 -12890 -16168 -17255 -15641 -12505 -9985 -9700 -11643 -14247 -15500 -14200 -10529 -5720 -1293 1645 2775 2674 2568 3682 6402 9875 12566 13463 12878 12014 11757 11914 11696 10772 9645 8969 8674 8007 6360 3906 1389 -537 -1670 -2154 -2295 -2527 -3328 -4922 -6953 -8549 -8822 -7515 -5322 -3522 -3077 -3874 -4831 +1 +-458 -118 1480 4096 6674 8300 8876 8911 8918 9134 9576 10110 10550 10756 10645 10133 9150 7768 6192 4633 3257 2223 1556 908 -308 -2338 -4743 -6731 -7767 -7791 -7117 -6359 -6244 -7033 -8075 -8319 -7363 -5774 -4328 -3252 -2389 -1673 -1060 -255 990 2348 3111 2983 2432 2172 2578 3612 4891 5675 5201 3475 1523 544 799 1598 2176 2238 1769 862 -105 -576 -415 -124 -193 -451 -406 -2 325 323 187 144 125 90 295 952 1772 2174 1908 1298 907 1116 1946 3027 3744 3660 2925 2141 1769 1804 1991 2152 2141 1734 804 -477 -1838 -3260 -4901 -6649 -8011 -8538 -8329 -8004 -8301 -9600 -11623 -13418 -13856 -12440 -9726 -6896 -4899 -4003 -3903 -3935 -3226 -1093 2333 5902 8196 8660 8063 7789 8705 10586 12494 13585 13609 12805 11528 10045 8609 7457 6610 5749 4452 2620 568 -1305 -2894 -4364 -5790 -6960 -7553 -7557 -7396 -7611 -8338 -9152 -9417 -8797 -7426 -5739 -4220 -3250 +148 -1890 -3392 -4190 -4610 -5579 -7785 -10692 -12661 -12313 -9823 -6842 -5169 -5365 -6447 -6801 -5486 -2848 -43 2033 3321 4299 5278 6086 6343 5893 4988 4129 3812 4277 5293 6058 5505 3062 -607 -3732 -4651 -3181 -825 406 -483 -2894 -5254 -6135 -5118 -2949 -951 -71 -304 -900 -1040 -317 1169 2901 4178 4604 4497 4624 5345 6185 6400 5857 5145 4926 5317 5853 5748 4348 1715 -1081 -2645 -2600 -2177 -3212 -6474 -10987 -14779 -16218 -14990 -12352 -10517 -11211 -14306 -17766 -19128 -17197 -12590 -7059 -2502 -102 292 184 1439 4804 9282 13065 15043 15487 15472 15831 16573 17038 16545 15037 13186 11786 10954 9947 7878 4580 823 -2276 -4014 -4486 -4443 -4862 -6424 -9073 -11921 -13672 -13428 -11339 -8551 -6434 -5664 -5852 -5937 -4955 -2649 392 3124 4730 5149 5026 5093 5542 5989 5910 5079 3681 2204 1238 1086 1340 994 -700 -3360 -5611 -6245 -5230 -3603 -2511 -2434 -3132 -3952 -4177 -3348 -1594 409 1905 2625 2890 3242 4058 5392 6963 8274 8925 8945 8771 8737 8681 8177 7135 5994 5231 4852 4406 3439 1810 -341 -2666 -4632 -5710 -5834 -5730 -6587 -9145 -12890 -16168 -17255 -15641 -12505 -9985 -9700 -11643 -14247 -15500 -14200 -10529 -5720 -1293 1645 2775 2674 2568 3682 6402 9875 12566 13463 12878 12014 11757 11914 11696 10772 9645 8969 8674 8007 6360 3906 1389 -537 -1670 -2154 -2295 -2527 -3328 -4922 -6953 -8549 -8822 -7515 -5322 -3522 -3077 -3874 -4831 -4840 -3621 -1766 -92 926 1303 1382 1540 1869 2075 1816 1163 576 400 583 933 1343 1558 1076 -270 -1738 -2189 -1331 -59 570 395 -49 -330 -331 74 970 2040 2711 2864 2991 3519 4282 4877 5190 5311 5200 4837 4492 4425 4382 3813 2655 1494 866 719 683 482 -92 -1221 -2691 -3779 -3935 -3433 -3005 -2968 -3213 -3909 -5526 -7922 -9953 -10436 -9313 -7561 -6229 -5935 -6894 -8801 -10597 -10959 -9380 -6571 -3718 -1607 -486 -271 -458 -118 1480 4096 6674 8300 8876 8911 8918 9134 9576 10110 10550 10756 10645 10133 9150 7768 6192 4633 3257 2223 1556 908 -308 -2338 -4743 -6731 -7767 -7791 -7117 -6359 -6244 -7033 -8075 -8319 -7363 -5774 -4328 -3252 -2389 -1673 -1060 -255 990 2348 3111 2983 2432 2172 2578 3612 4891 5675 5201 3475 1523 544 799 1598 2176 2238 1769 862 -105 -576 -415 -124 -193 -451 -406 -2 325 323 187 144 125 90 295 952 +1 +-2928 -2942 -2732 -1898 -482 1176 2728 3911 4540 4617 4455 4465 4777 5150 5326 5307 5202 4968 4445 3631 2753 2077 1714 1606 1559 1291 591 -482 -1543 -2110 -1922 -1128 -242 165 -138 -810 -1197 -854 155 1408 2469 3039 3026 2652 2426 2769 3581 4312 4529 4290 3913 3533 3010 2233 1306 406 -376 -1022 -1525 -2000 -2800 -4316 -6544 -8842 -10273 -10289 -9209 -8050 -7843 -8934 -10774 -12277 -12471 -11049 -8512 -5873 -4105 -3594 -3929 -4133 -3260 -1037 1886 4370 5603 5682 5449 5786 6967 8602 10032 10797 10821 10320 9612 8953 8477 8176 7907 7437 6615 5514 4337 3173 1904 438 -1021 -2104 -2715 -3246 -4216 -5751 -7461 -8766 -9274 -8934 -8025 -7099 -6720 -7035 -7545 -7436 -6217 -4111 -1838 -95 829 1113 1259 1808 2982 4493 5744 6287 6115 5577 5067 4840 4972 5309 5499 5158 4188 2897 1775 1169 1112 1385 1633 1538 993 197 -492 -818 -778 -543 -295 -165 -209 -366 +2625 2890 3242 4058 5392 6963 8274 8925 8945 8771 8737 8681 8177 7135 5994 5231 4852 4406 3439 1810 -341 -2666 -4632 -5710 -5834 -5730 -6587 -9145 -12890 -16168 -17255 -15641 -12505 -9985 -9700 -11643 -14247 -15500 -14200 -10529 -5720 -1293 1645 2775 2674 2568 3682 6402 9875 12566 13463 12878 12014 11757 11914 11696 10772 9645 8969 8674 8007 6360 3906 1389 -537 -1670 -2154 -2295 -2527 -3328 -4922 -6953 -8549 -8822 -7515 -5322 -3522 -3077 -3874 -4831 -4840 -3621 -1766 -92 926 1303 1382 1540 1869 2075 1816 1163 576 400 583 933 1343 1558 1076 -270 -1738 -2189 -1331 -59 570 395 -49 -330 -331 74 970 2040 2711 2864 2991 3519 4282 4877 5190 5311 5200 4837 4492 4425 4382 3813 2655 1494 866 719 683 482 -92 -1221 -2691 -3779 -3935 -3433 -3005 -2968 -3213 -3909 -5526 -7922 -9953 -10436 -9313 -7561 -6229 -5935 -6894 -8801 -10597 -10959 -9380 -6571 -3718 -1607 -486 -271 -458 -118 1480 4096 6674 8300 8876 8911 8918 9134 9576 10110 10550 10756 10645 10133 9150 7768 6192 4633 3257 2223 1556 908 -308 -2338 -4743 -6731 -7767 -7791 -7117 -6359 -6244 -7033 -8075 -8319 -7363 -5774 -4328 -3252 -2389 -1673 -1060 -255 990 2348 3111 2983 2432 2172 2578 3612 4891 5675 5201 3475 1523 544 799 1598 2176 2238 1769 862 -105 -576 -415 -124 -193 -451 -406 -2 325 323 187 144 125 90 295 952 1772 2174 1908 1298 907 1116 1946 3027 3744 3660 2925 2141 1769 1804 1991 2152 2141 1734 804 -477 -1838 -3260 -4901 -6649 -8011 -8538 -8329 -8004 -8301 -9600 -11623 -13418 -13856 -12440 -9726 -6896 -4899 -4003 -3903 -3935 -3226 -1093 2333 5902 8196 8660 8063 7789 8705 10586 12494 13585 13609 12805 11528 10045 8609 7457 6610 5749 4452 2620 568 -1305 -2894 -4364 -5790 -6960 -7553 -7557 -7396 -7611 -8338 -9152 -9417 -8797 -7426 -5739 -4220 -3250 -2928 -2942 -2732 -1898 -482 1176 2728 3911 4540 4617 4455 4465 4777 5150 5326 5307 5202 4968 4445 3631 2753 2077 1714 1606 1559 1291 591 -482 -1543 -2110 -1922 -1128 -242 165 -138 -810 -1197 -854 155 1408 2469 3039 3026 2652 2426 2769 3581 4312 4529 4290 3913 3533 3010 2233 1306 406 -376 -1022 -1525 -2000 -2800 -4316 -6544 -8842 -10273 -10289 -9209 -8050 -7843 -8934 -10774 -12277 -12471 -11049 -8512 -5873 -4105 -3594 -3929 -4133 +1 +-484 -416 -151 211 554 805 912 856 690 528 476 568 753 917 901 612 135 -302 -528 -581 -621 -757 -1032 -1540 -2403 -3551 -4564 -4909 -4402 -3471 -2937 -3478 -5141 -7264 -8838 -9099 -8022 -6367 -5217 -5251 -6274 -7377 -7600 -6580 -4650 -2536 -959 -343 -575 -899 -291 1744 4658 7225 8546 8678 8386 8406 8982 9932 10904 11575 11740 11336 10452 9313 8215 7391 6804 6133 5036 3473 1728 113 -1279 -2516 -3634 -4576 -5337 -6053 -6926 -8009 -9058 -9641 -9491 -8801 -8153 -8020 -8352 -8627 -8288 -7143 -5456 -3765 -2598 -2149 -2142 -2010 -1297 58 1767 3367 4422 4738 4557 4466 4927 5843 6668 6957 6692 6149 5604 5250 5190 5337 5367 4941 4005 2825 1750 1001 626 493 300 -239 -1117 -1993 -2526 -2679 -2650 -2596 -2541 -2492 -2467 -2391 -2095 -1502 -748 -60 426 712 889 1082 1400 1833 2251 2563 2789 2886 2568 1556 54 -1187 -1500 -981 -458 -788 -2213 -4305 +-458 -118 1480 4096 6674 8300 8876 8911 8918 9134 9576 10110 10550 10756 10645 10133 9150 7768 6192 4633 3257 2223 1556 908 -308 -2338 -4743 -6731 -7767 -7791 -7117 -6359 -6244 -7033 -8075 -8319 -7363 -5774 -4328 -3252 -2389 -1673 -1060 -255 990 2348 3111 2983 2432 2172 2578 3612 4891 5675 5201 3475 1523 544 799 1598 2176 2238 1769 862 -105 -576 -415 -124 -193 -451 -406 -2 325 323 187 144 125 90 295 952 1772 2174 1908 1298 907 1116 1946 3027 3744 3660 2925 2141 1769 1804 1991 2152 2141 1734 804 -477 -1838 -3260 -4901 -6649 -8011 -8538 -8329 -8004 -8301 -9600 -11623 -13418 -13856 -12440 -9726 -6896 -4899 -4003 -3903 -3935 -3226 -1093 2333 5902 8196 8660 8063 7789 8705 10586 12494 13585 13609 12805 11528 10045 8609 7457 6610 5749 4452 2620 568 -1305 -2894 -4364 -5790 -6960 -7553 -7557 -7396 -7611 -8338 -9152 -9417 -8797 -7426 -5739 -4220 -3250 -2928 -2942 -2732 -1898 -482 1176 2728 3911 4540 4617 4455 4465 4777 5150 5326 5307 5202 4968 4445 3631 2753 2077 1714 1606 1559 1291 591 -482 -1543 -2110 -1922 -1128 -242 165 -138 -810 -1197 -854 155 1408 2469 3039 3026 2652 2426 2769 3581 4312 4529 4290 3913 3533 3010 2233 1306 406 -376 -1022 -1525 -2000 -2800 -4316 -6544 -8842 -10273 -10289 -9209 -8050 -7843 -8934 -10774 -12277 -12471 -11049 -8512 -5873 -4105 -3594 -3929 -4133 -3260 -1037 1886 4370 5603 5682 5449 5786 6967 8602 10032 10797 10821 10320 9612 8953 8477 8176 7907 7437 6615 5514 4337 3173 1904 438 -1021 -2104 -2715 -3246 -4216 -5751 -7461 -8766 -9274 -8934 -8025 -7099 -6720 -7035 -7545 -7436 -6217 -4111 -1838 -95 829 1113 1259 1808 2982 4493 5744 6287 6115 5577 5067 4840 4972 5309 5499 5158 4188 2897 1775 1169 1112 1385 1633 1538 993 197 -492 -818 -778 -543 -295 -165 -209 -366 -484 -416 -151 211 554 805 912 856 690 528 476 568 753 917 901 612 135 -302 -528 -581 -621 -757 -1032 -1540 -2403 -3551 -4564 -4909 -4402 -3471 -2937 -3478 -5141 -7264 -8838 -9099 -8022 -6367 -5217 -5251 -6274 -7377 -7600 -6580 -4650 -2536 -959 -343 -575 -899 -291 1744 4658 7225 8546 8678 8386 8406 8982 9932 10904 11575 11740 11336 10452 9313 8215 7391 6804 6133 5036 3473 1728 113 -1279 -2516 -3634 -4576 -5337 -6053 +1 +-6315 -7567 -7729 -6975 -5949 -5457 -6009 -7469 -9055 -9715 -8798 -6587 -4220 -2891 -2919 -3510 -3465 -2140 167 2644 4616 5859 6470 6697 6901 7520 8786 10386 11518 11493 10406 9118 8505 8673 8927 8490 7188 5486 3980 2940 2245 1582 672 -621 -2205 -3795 -5060 -5832 -6192 -6391 -6720 -7393 -8438 -9586 -10355 -10360 -9623 -8526 -7476 -6700 -6316 -6383 -6692 -6617 -5478 -3218 -592 1403 2274 2236 1915 1998 2984 4869 6973 8278 8225 7183 6125 5858 6537 7687 8494 8243 6819 4885 3434 3010 3322 3636 3390 2478 1154 -142 -978 -1195 -1046 -976 -1227 -1689 -2109 -2317 -2253 -1941 -1510 -1159 -983 -867 -640 -275 111 411 606 725 816 950 1148 1285 1156 690 38 -585 -1085 -1485 -1819 -2136 -2552 -3204 -4104 -5076 -5865 -6285 -6312 -6099 -5918 -5984 -6269 -6507 -6387 -5784 -4827 -3831 -3131 -2886 -2928 -2829 -2202 -1002 434 1608 2188 2197 2032 2217 3029 4263 5383 5946 5892 5498 +-2928 -2942 -2732 -1898 -482 1176 2728 3911 4540 4617 4455 4465 4777 5150 5326 5307 5202 4968 4445 3631 2753 2077 1714 1606 1559 1291 591 -482 -1543 -2110 -1922 -1128 -242 165 -138 -810 -1197 -854 155 1408 2469 3039 3026 2652 2426 2769 3581 4312 4529 4290 3913 3533 3010 2233 1306 406 -376 -1022 -1525 -2000 -2800 -4316 -6544 -8842 -10273 -10289 -9209 -8050 -7843 -8934 -10774 -12277 -12471 -11049 -8512 -5873 -4105 -3594 -3929 -4133 -3260 -1037 1886 4370 5603 5682 5449 5786 6967 8602 10032 10797 10821 10320 9612 8953 8477 8176 7907 7437 6615 5514 4337 3173 1904 438 -1021 -2104 -2715 -3246 -4216 -5751 -7461 -8766 -9274 -8934 -8025 -7099 -6720 -7035 -7545 -7436 -6217 -4111 -1838 -95 829 1113 1259 1808 2982 4493 5744 6287 6115 5577 5067 4840 4972 5309 5499 5158 4188 2897 1775 1169 1112 1385 1633 1538 993 197 -492 -818 -778 -543 -295 -165 -209 -366 -484 -416 -151 211 554 805 912 856 690 528 476 568 753 917 901 612 135 -302 -528 -581 -621 -757 -1032 -1540 -2403 -3551 -4564 -4909 -4402 -3471 -2937 -3478 -5141 -7264 -8838 -9099 -8022 -6367 -5217 -5251 -6274 -7377 -7600 -6580 -4650 -2536 -959 -343 -575 -899 -291 1744 4658 7225 8546 8678 8386 8406 8982 9932 10904 11575 11740 11336 10452 9313 8215 7391 6804 6133 5036 3473 1728 113 -1279 -2516 -3634 -4576 -5337 -6053 -6926 -8009 -9058 -9641 -9491 -8801 -8153 -8020 -8352 -8627 -8288 -7143 -5456 -3765 -2598 -2149 -2142 -2010 -1297 58 1767 3367 4422 4738 4557 4466 4927 5843 6668 6957 6692 6149 5604 5250 5190 5337 5367 4941 4005 2825 1750 1001 626 493 300 -239 -1117 -1993 -2526 -2679 -2650 -2596 -2541 -2492 -2467 -2391 -2095 -1502 -748 -60 426 712 889 1082 1400 1833 2251 2563 2789 2886 2568 1556 54 -1187 -1500 -981 -458 -788 -2213 -4305 -6315 -7567 -7729 -6975 -5949 -5457 -6009 -7469 -9055 -9715 -8798 -6587 -4220 -2891 -2919 -3510 -3465 -2140 167 2644 4616 5859 6470 6697 6901 7520 8786 10386 11518 11493 10406 9118 8505 8673 8927 8490 7188 5486 3980 2940 2245 1582 672 -621 -2205 -3795 -5060 -5832 -6192 -6391 -6720 -7393 -8438 -9586 -10355 -10360 -9623 -8526 -7476 -6700 -6316 -6383 -6692 -6617 -5478 -3218 -592 1403 2274 2236 1915 1998 2984 4869 6973 8278 8225 7183 6125 5858 +1 +5148 5141 5543 6114 6384 6003 5030 3906 3102 2816 2907 3004 2690 1779 491 -644 -1209 -1205 -979 -909 -1188 -1817 -2596 -3152 -3157 -2636 -2016 -1761 -1973 -2364 -2576 -2449 -2040 -1525 -1108 -928 -966 -1071 -1099 -1013 -818 -524 -192 18 -17 -200 -258 -22 410 801 1029 1148 1260 1410 1629 1949 2343 2690 2884 2945 2994 3123 3319 3505 3610 3593 3458 3269 3132 3099 3098 2991 2696 2240 1721 1231 818 481 169 -195 -651 -1169 -1687 -2151 -2531 -2809 -3002 -3169 -3387 -3664 -3911 -4012 -3934 -3780 -3716 -3828 -4019 -4065 -3825 -3395 -3042 -2962 -3106 -3255 -3231 -3001 -2626 -2195 -1823 -1624 -1613 -1646 -1531 -1216 -826 -527 -391 -386 -417 -341 -20 547 1156 1542 1615 1541 1549 1743 2083 2459 2752 2860 2771 2618 2572 2675 2804 2815 2667 2393 2049 1710 1478 1420 1466 1478 1366 1134 827 493 218 95 141 255 321 315 278 216 91 -93 +-484 -416 -151 211 554 805 912 856 690 528 476 568 753 917 901 612 135 -302 -528 -581 -621 -757 -1032 -1540 -2403 -3551 -4564 -4909 -4402 -3471 -2937 -3478 -5141 -7264 -8838 -9099 -8022 -6367 -5217 -5251 -6274 -7377 -7600 -6580 -4650 -2536 -959 -343 -575 -899 -291 1744 4658 7225 8546 8678 8386 8406 8982 9932 10904 11575 11740 11336 10452 9313 8215 7391 6804 6133 5036 3473 1728 113 -1279 -2516 -3634 -4576 -5337 -6053 -6926 -8009 -9058 -9641 -9491 -8801 -8153 -8020 -8352 -8627 -8288 -7143 -5456 -3765 -2598 -2149 -2142 -2010 -1297 58 1767 3367 4422 4738 4557 4466 4927 5843 6668 6957 6692 6149 5604 5250 5190 5337 5367 4941 4005 2825 1750 1001 626 493 300 -239 -1117 -1993 -2526 -2679 -2650 -2596 -2541 -2492 -2467 -2391 -2095 -1502 -748 -60 426 712 889 1082 1400 1833 2251 2563 2789 2886 2568 1556 54 -1187 -1500 -981 -458 -788 -2213 -4305 -6315 -7567 -7729 -6975 -5949 -5457 -6009 -7469 -9055 -9715 -8798 -6587 -4220 -2891 -2919 -3510 -3465 -2140 167 2644 4616 5859 6470 6697 6901 7520 8786 10386 11518 11493 10406 9118 8505 8673 8927 8490 7188 5486 3980 2940 2245 1582 672 -621 -2205 -3795 -5060 -5832 -6192 -6391 -6720 -7393 -8438 -9586 -10355 -10360 -9623 -8526 -7476 -6700 -6316 -6383 -6692 -6617 -5478 -3218 -592 1403 2274 2236 1915 1998 2984 4869 6973 8278 8225 7183 6125 5858 6537 7687 8494 8243 6819 4885 3434 3010 3322 3636 3390 2478 1154 -142 -978 -1195 -1046 -976 -1227 -1689 -2109 -2317 -2253 -1941 -1510 -1159 -983 -867 -640 -275 111 411 606 725 816 950 1148 1285 1156 690 38 -585 -1085 -1485 -1819 -2136 -2552 -3204 -4104 -5076 -5865 -6285 -6312 -6099 -5918 -5984 -6269 -6507 -6387 -5784 -4827 -3831 -3131 -2886 -2928 -2829 -2202 -1002 434 1608 2188 2197 2032 2217 3029 4263 5383 5946 5892 5498 5148 5141 5543 6114 6384 6003 5030 3906 3102 2816 2907 3004 2690 1779 491 -644 -1209 -1205 -979 -909 -1188 -1817 -2596 -3152 -3157 -2636 -2016 -1761 -1973 -2364 -2576 -2449 -2040 -1525 -1108 -928 -966 -1071 -1099 -1013 -818 -524 -192 18 -17 -200 -258 -22 410 801 1029 1148 1260 1410 1629 1949 2343 2690 2884 2945 2994 3123 3319 3505 3610 3593 3458 3269 3132 3099 3098 2991 2696 2240 1721 1231 818 481 169 -195 +1 +-255 -297 -185 33 252 339 208 -106 -459 -696 -752 -664 -503 -343 -252 -282 -431 -625 -761 -750 -565 -276 -24 67 -18 -160 -204 -66 209 497 673 675 548 417 416 626 1023 1471 1772 1787 1556 1267 1119 1166 1330 1504 1613 1585 1367 999 623 348 139 -120 -451 -763 -980 -1160 -1393 -1686 -1974 -2247 -2546 -2835 -2976 -2875 -2626 -2443 -2462 -2644 -2832 -2868 -2667 -2276 -1889 -1712 -1783 -1923 -1904 -1643 -1218 -760 -399 -260 -394 -665 -797 -602 -150 315 603 663 529 273 12 -110 -26 189 407 530 506 312 13 -216 -188 105 458 631 548 322 125 65 159 358 571 709 732 672 587 514 474 507 668 970 1338 1651 1840 1917 1940 1954 2002 2153 2442 2770 2930 2799 2487 2263 2299 2511 2661 2573 2257 1836 1429 1085 813 606 449 288 64 -247 -584 -838 -941 -934 -931 -1014 -1169 -1305 +-6315 -7567 -7729 -6975 -5949 -5457 -6009 -7469 -9055 -9715 -8798 -6587 -4220 -2891 -2919 -3510 -3465 -2140 167 2644 4616 5859 6470 6697 6901 7520 8786 10386 11518 11493 10406 9118 8505 8673 8927 8490 7188 5486 3980 2940 2245 1582 672 -621 -2205 -3795 -5060 -5832 -6192 -6391 -6720 -7393 -8438 -9586 -10355 -10360 -9623 -8526 -7476 -6700 -6316 -6383 -6692 -6617 -5478 -3218 -592 1403 2274 2236 1915 1998 2984 4869 6973 8278 8225 7183 6125 5858 6537 7687 8494 8243 6819 4885 3434 3010 3322 3636 3390 2478 1154 -142 -978 -1195 -1046 -976 -1227 -1689 -2109 -2317 -2253 -1941 -1510 -1159 -983 -867 -640 -275 111 411 606 725 816 950 1148 1285 1156 690 38 -585 -1085 -1485 -1819 -2136 -2552 -3204 -4104 -5076 -5865 -6285 -6312 -6099 -5918 -5984 -6269 -6507 -6387 -5784 -4827 -3831 -3131 -2886 -2928 -2829 -2202 -1002 434 1608 2188 2197 2032 2217 3029 4263 5383 5946 5892 5498 5148 5141 5543 6114 6384 6003 5030 3906 3102 2816 2907 3004 2690 1779 491 -644 -1209 -1205 -979 -909 -1188 -1817 -2596 -3152 -3157 -2636 -2016 -1761 -1973 -2364 -2576 -2449 -2040 -1525 -1108 -928 -966 -1071 -1099 -1013 -818 -524 -192 18 -17 -200 -258 -22 410 801 1029 1148 1260 1410 1629 1949 2343 2690 2884 2945 2994 3123 3319 3505 3610 3593 3458 3269 3132 3099 3098 2991 2696 2240 1721 1231 818 481 169 -195 -651 -1169 -1687 -2151 -2531 -2809 -3002 -3169 -3387 -3664 -3911 -4012 -3934 -3780 -3716 -3828 -4019 -4065 -3825 -3395 -3042 -2962 -3106 -3255 -3231 -3001 -2626 -2195 -1823 -1624 -1613 -1646 -1531 -1216 -826 -527 -391 -386 -417 -341 -20 547 1156 1542 1615 1541 1549 1743 2083 2459 2752 2860 2771 2618 2572 2675 2804 2815 2667 2393 2049 1710 1478 1420 1466 1478 1366 1134 827 493 218 95 141 255 321 315 278 216 91 -93 -255 -297 -185 33 252 339 208 -106 -459 -696 -752 -664 -503 -343 -252 -282 -431 -625 -761 -750 -565 -276 -24 67 -18 -160 -204 -66 209 497 673 675 548 417 416 626 1023 1471 1772 1787 1556 1267 1119 1166 1330 1504 1613 1585 1367 999 623 348 139 -120 -451 -763 -980 -1160 -1393 -1686 -1974 -2247 -2546 -2835 -2976 -2875 -2626 -2443 -2462 -2644 -2832 -2868 -2667 -2276 -1889 -1712 -1783 -1923 -1904 -1643 +1 +-1333 -1240 -1073 -906 -786 -716 -665 -589 -436 -188 104 320 362 246 114 121 310 589 802 822 618 268 -66 -253 -272 -217 -234 -420 -791 -1253 -1645 -1838 -1833 -1756 -1725 -1752 -1791 -1826 -1867 -1877 -1770 -1525 -1265 -1130 -1134 -1137 -985 -669 -325 -130 -146 -277 -353 -295 -166 -94 -133 -240 -365 -503 -645 -733 -732 -695 -733 -898 -1126 -1288 -1280 -1109 -896 -811 -929 -1143 -1239 -1091 -760 -428 -238 -211 -256 -236 -63 245 583 863 1083 1280 1440 1510 1500 1522 1684 1953 2170 2187 2009 1788 1704 1816 2033 2209 2258 2180 2018 1830 1711 1734 1869 1960 1856 1563 1240 1061 1071 1181 1235 1112 796 398 78 -58 -10 143 275 261 52 -235 -400 -357 -232 -229 -394 -598 -687 -639 -531 -450 -435 -486 -564 -614 -607 -547 -458 -384 -384 -507 -701 -836 -838 -769 -761 -876 -1079 -1306 -1500 -1595 -1548 -1425 +5148 5141 5543 6114 6384 6003 5030 3906 3102 2816 2907 3004 2690 1779 491 -644 -1209 -1205 -979 -909 -1188 -1817 -2596 -3152 -3157 -2636 -2016 -1761 -1973 -2364 -2576 -2449 -2040 -1525 -1108 -928 -966 -1071 -1099 -1013 -818 -524 -192 18 -17 -200 -258 -22 410 801 1029 1148 1260 1410 1629 1949 2343 2690 2884 2945 2994 3123 3319 3505 3610 3593 3458 3269 3132 3099 3098 2991 2696 2240 1721 1231 818 481 169 -195 -651 -1169 -1687 -2151 -2531 -2809 -3002 -3169 -3387 -3664 -3911 -4012 -3934 -3780 -3716 -3828 -4019 -4065 -3825 -3395 -3042 -2962 -3106 -3255 -3231 -3001 -2626 -2195 -1823 -1624 -1613 -1646 -1531 -1216 -826 -527 -391 -386 -417 -341 -20 547 1156 1542 1615 1541 1549 1743 2083 2459 2752 2860 2771 2618 2572 2675 2804 2815 2667 2393 2049 1710 1478 1420 1466 1478 1366 1134 827 493 218 95 141 255 321 315 278 216 91 -93 -255 -297 -185 33 252 339 208 -106 -459 -696 -752 -664 -503 -343 -252 -282 -431 -625 -761 -750 -565 -276 -24 67 -18 -160 -204 -66 209 497 673 675 548 417 416 626 1023 1471 1772 1787 1556 1267 1119 1166 1330 1504 1613 1585 1367 999 623 348 139 -120 -451 -763 -980 -1160 -1393 -1686 -1974 -2247 -2546 -2835 -2976 -2875 -2626 -2443 -2462 -2644 -2832 -2868 -2667 -2276 -1889 -1712 -1783 -1923 -1904 -1643 -1218 -760 -399 -260 -394 -665 -797 -602 -150 315 603 663 529 273 12 -110 -26 189 407 530 506 312 13 -216 -188 105 458 631 548 322 125 65 159 358 571 709 732 672 587 514 474 507 668 970 1338 1651 1840 1917 1940 1954 2002 2153 2442 2770 2930 2799 2487 2263 2299 2511 2661 2573 2257 1836 1429 1085 813 606 449 288 64 -247 -584 -838 -941 -934 -931 -1014 -1169 -1305 -1333 -1240 -1073 -906 -786 -716 -665 -589 -436 -188 104 320 362 246 114 121 310 589 802 822 618 268 -66 -253 -272 -217 -234 -420 -791 -1253 -1645 -1838 -1833 -1756 -1725 -1752 -1791 -1826 -1867 -1877 -1770 -1525 -1265 -1130 -1134 -1137 -985 -669 -325 -130 -146 -277 -353 -295 -166 -94 -133 -240 -365 -503 -645 -733 -732 -695 -733 -898 -1126 -1288 -1280 -1109 -896 -811 -929 -1143 -1239 -1091 -760 -428 -238 -211 +1 +-1352 -1393 -1476 -1496 -1421 -1286 -1101 -869 -643 -485 -391 -297 -165 -17 137 312 487 582 551 454 402 431 507 585 632 593 411 109 -212 -459 -606 -680 -718 -782 -945 -1213 -1484 -1626 -1609 -1517 -1452 -1457 -1527 -1622 -1679 -1636 -1488 -1296 -1131 -1016 -956 -944 -937 -843 -612 -315 -84 29 94 198 371 585 809 1002 1114 1132 1121 1170 1299 1470 1641 1791 1890 1909 1880 1893 2007 2187 2374 2539 2671 2741 2737 2726 2796 2932 3013 2967 2851 2770 2736 2668 2507 2266 2002 1757 1551 1368 1156 860 472 47 -344 -663 -915 -1125 -1326 -1549 -1808 -2078 -2322 -2518 -2658 -2731 -2731 -2676 -2598 -2512 -2410 -2285 -2144 -1983 -1780 -1526 -1264 -1051 -918 -838 -758 -642 -482 -307 -164 -90 -98 -161 -241 -300 -317 -282 -215 -162 -177 -263 -354 -366 -266 -108 31 116 162 198 242 320 465 672 869 972 964 911 +-255 -297 -185 33 252 339 208 -106 -459 -696 -752 -664 -503 -343 -252 -282 -431 -625 -761 -750 -565 -276 -24 67 -18 -160 -204 -66 209 497 673 675 548 417 416 626 1023 1471 1772 1787 1556 1267 1119 1166 1330 1504 1613 1585 1367 999 623 348 139 -120 -451 -763 -980 -1160 -1393 -1686 -1974 -2247 -2546 -2835 -2976 -2875 -2626 -2443 -2462 -2644 -2832 -2868 -2667 -2276 -1889 -1712 -1783 -1923 -1904 -1643 -1218 -760 -399 -260 -394 -665 -797 -602 -150 315 603 663 529 273 12 -110 -26 189 407 530 506 312 13 -216 -188 105 458 631 548 322 125 65 159 358 571 709 732 672 587 514 474 507 668 970 1338 1651 1840 1917 1940 1954 2002 2153 2442 2770 2930 2799 2487 2263 2299 2511 2661 2573 2257 1836 1429 1085 813 606 449 288 64 -247 -584 -838 -941 -934 -931 -1014 -1169 -1305 -1333 -1240 -1073 -906 -786 -716 -665 -589 -436 -188 104 320 362 246 114 121 310 589 802 822 618 268 -66 -253 -272 -217 -234 -420 -791 -1253 -1645 -1838 -1833 -1756 -1725 -1752 -1791 -1826 -1867 -1877 -1770 -1525 -1265 -1130 -1134 -1137 -985 -669 -325 -130 -146 -277 -353 -295 -166 -94 -133 -240 -365 -503 -645 -733 -732 -695 -733 -898 -1126 -1288 -1280 -1109 -896 -811 -929 -1143 -1239 -1091 -760 -428 -238 -211 -256 -236 -63 245 583 863 1083 1280 1440 1510 1500 1522 1684 1953 2170 2187 2009 1788 1704 1816 2033 2209 2258 2180 2018 1830 1711 1734 1869 1960 1856 1563 1240 1061 1071 1181 1235 1112 796 398 78 -58 -10 143 275 261 52 -235 -400 -357 -232 -229 -394 -598 -687 -639 -531 -450 -435 -486 -564 -614 -607 -547 -458 -384 -384 -507 -701 -836 -838 -769 -761 -876 -1079 -1306 -1500 -1595 -1548 -1425 -1352 -1393 -1476 -1496 -1421 -1286 -1101 -869 -643 -485 -391 -297 -165 -17 137 312 487 582 551 454 402 431 507 585 632 593 411 109 -212 -459 -606 -680 -718 -782 -945 -1213 -1484 -1626 -1609 -1517 -1452 -1457 -1527 -1622 -1679 -1636 -1488 -1296 -1131 -1016 -956 -944 -937 -843 -612 -315 -84 29 94 198 371 585 809 1002 1114 1132 1121 1170 1299 1470 1641 1791 1890 1909 1880 1893 2007 2187 2374 2539 +1 +878 871 835 714 494 216 -47 -236 -362 -500 -734 -1092 -1520 -1910 -2148 -2201 -2134 -2090 -2174 -2365 -2526 -2524 -2352 -2126 -1983 -1957 -1960 -1887 -1698 -1423 -1105 -773 -461 -203 16 274 625 1026 1381 1637 1829 2027 2269 2539 2774 2916 2964 2990 3049 3102 3068 2930 2759 2605 2454 2311 2230 2223 2208 2104 1932 1765 1618 1482 1385 1344 1284 1142 1010 1018 1109 1057 791 543 585 928 1302 1326 661 -709 -2164 -2682 -1797 -256 607 252 -655 -1236 -1404 -1616 -1998 -2174 -1958 -1720 -1953 -2659 -3310 -3287 -2334 -832 313 274 -968 -2496 -3209 -2701 -1425 -167 548 693 629 705 868 723 128 -436 -350 312 800 607 92 -10 517 1224 1596 1503 1088 536 46 -207 -252 -323 -521 -621 -401 -93 -228 -970 -1782 -1959 -1375 -556 -45 102 130 96 -186 -782 -1451 -1925 -2195 -2408 -2572 -2552 -2341 -2115 -2011 -1969 -1875 -1729 +-1333 -1240 -1073 -906 -786 -716 -665 -589 -436 -188 104 320 362 246 114 121 310 589 802 822 618 268 -66 -253 -272 -217 -234 -420 -791 -1253 -1645 -1838 -1833 -1756 -1725 -1752 -1791 -1826 -1867 -1877 -1770 -1525 -1265 -1130 -1134 -1137 -985 -669 -325 -130 -146 -277 -353 -295 -166 -94 -133 -240 -365 -503 -645 -733 -732 -695 -733 -898 -1126 -1288 -1280 -1109 -896 -811 -929 -1143 -1239 -1091 -760 -428 -238 -211 -256 -236 -63 245 583 863 1083 1280 1440 1510 1500 1522 1684 1953 2170 2187 2009 1788 1704 1816 2033 2209 2258 2180 2018 1830 1711 1734 1869 1960 1856 1563 1240 1061 1071 1181 1235 1112 796 398 78 -58 -10 143 275 261 52 -235 -400 -357 -232 -229 -394 -598 -687 -639 -531 -450 -435 -486 -564 -614 -607 -547 -458 -384 -384 -507 -701 -836 -838 -769 -761 -876 -1079 -1306 -1500 -1595 -1548 -1425 -1352 -1393 -1476 -1496 -1421 -1286 -1101 -869 -643 -485 -391 -297 -165 -17 137 312 487 582 551 454 402 431 507 585 632 593 411 109 -212 -459 -606 -680 -718 -782 -945 -1213 -1484 -1626 -1609 -1517 -1452 -1457 -1527 -1622 -1679 -1636 -1488 -1296 -1131 -1016 -956 -944 -937 -843 -612 -315 -84 29 94 198 371 585 809 1002 1114 1132 1121 1170 1299 1470 1641 1791 1890 1909 1880 1893 2007 2187 2374 2539 2671 2741 2737 2726 2796 2932 3013 2967 2851 2770 2736 2668 2507 2266 2002 1757 1551 1368 1156 860 472 47 -344 -663 -915 -1125 -1326 -1549 -1808 -2078 -2322 -2518 -2658 -2731 -2731 -2676 -2598 -2512 -2410 -2285 -2144 -1983 -1780 -1526 -1264 -1051 -918 -838 -758 -642 -482 -307 -164 -90 -98 -161 -241 -300 -317 -282 -215 -162 -177 -263 -354 -366 -266 -108 31 116 162 198 242 320 465 672 869 972 964 911 878 871 835 714 494 216 -47 -236 -362 -500 -734 -1092 -1520 -1910 -2148 -2201 -2134 -2090 -2174 -2365 -2526 -2524 -2352 -2126 -1983 -1957 -1960 -1887 -1698 -1423 -1105 -773 -461 -203 16 274 625 1026 1381 1637 1829 2027 2269 2539 2774 2916 2964 2990 3049 3102 3068 2930 2759 2605 2454 2311 2230 2223 2208 2104 1932 1765 1618 1482 1385 1344 1284 1142 1010 1018 1109 1057 791 543 585 928 1302 1326 661 -709 +1 +-1568 -1346 -992 -563 -210 -5 105 184 274 469 888 1489 2014 2203 2032 1704 1434 1325 1390 1628 1990 2378 2676 2806 2772 2648 2500 2331 2097 1780 1411 1041 729 572 685 1052 1487 1770 1822 1673 1346 913 601 674 1120 1587 1769 1706 1648 1681 1662 1482 1168 741 131 -692 -1573 -2278 -2651 -2644 -2304 -1833 -1541 -1609 -1926 -2291 -2666 -3102 -3441 -3371 -2820 -2107 -1543 -1046 -374 400 885 850 477 53 -345 -752 -1057 -1104 -950 -792 -625 -239 352 767 714 447 474 865 1148 964 531 253 100 -282 -920 -1339 -1166 -639 -298 -324 -464 -501 -494 -529 -509 -349 -207 -266 -424 -383 -39 420 798 1057 1185 1070 686 257 33 -38 -237 -650 -1021 -1055 -774 -460 -334 -398 -550 -722 -870 -928 -840 -649 -520 -637 -1043 -1568 -1945 -2026 -1900 -1750 -1636 -1432 -999 -381 187 473 443 315 391 859 1663 2553 +-1352 -1393 -1476 -1496 -1421 -1286 -1101 -869 -643 -485 -391 -297 -165 -17 137 312 487 582 551 454 402 431 507 585 632 593 411 109 -212 -459 -606 -680 -718 -782 -945 -1213 -1484 -1626 -1609 -1517 -1452 -1457 -1527 -1622 -1679 -1636 -1488 -1296 -1131 -1016 -956 -944 -937 -843 -612 -315 -84 29 94 198 371 585 809 1002 1114 1132 1121 1170 1299 1470 1641 1791 1890 1909 1880 1893 2007 2187 2374 2539 2671 2741 2737 2726 2796 2932 3013 2967 2851 2770 2736 2668 2507 2266 2002 1757 1551 1368 1156 860 472 47 -344 -663 -915 -1125 -1326 -1549 -1808 -2078 -2322 -2518 -2658 -2731 -2731 -2676 -2598 -2512 -2410 -2285 -2144 -1983 -1780 -1526 -1264 -1051 -918 -838 -758 -642 -482 -307 -164 -90 -98 -161 -241 -300 -317 -282 -215 -162 -177 -263 -354 -366 -266 -108 31 116 162 198 242 320 465 672 869 972 964 911 878 871 835 714 494 216 -47 -236 -362 -500 -734 -1092 -1520 -1910 -2148 -2201 -2134 -2090 -2174 -2365 -2526 -2524 -2352 -2126 -1983 -1957 -1960 -1887 -1698 -1423 -1105 -773 -461 -203 16 274 625 1026 1381 1637 1829 2027 2269 2539 2774 2916 2964 2990 3049 3102 3068 2930 2759 2605 2454 2311 2230 2223 2208 2104 1932 1765 1618 1482 1385 1344 1284 1142 1010 1018 1109 1057 791 543 585 928 1302 1326 661 -709 -2164 -2682 -1797 -256 607 252 -655 -1236 -1404 -1616 -1998 -2174 -1958 -1720 -1953 -2659 -3310 -3287 -2334 -832 313 274 -968 -2496 -3209 -2701 -1425 -167 548 693 629 705 868 723 128 -436 -350 312 800 607 92 -10 517 1224 1596 1503 1088 536 46 -207 -252 -323 -521 -621 -401 -93 -228 -970 -1782 -1959 -1375 -556 -45 102 130 96 -186 -782 -1451 -1925 -2195 -2408 -2572 -2552 -2341 -2115 -2011 -1969 -1875 -1729 -1568 -1346 -992 -563 -210 -5 105 184 274 469 888 1489 2014 2203 2032 1704 1434 1325 1390 1628 1990 2378 2676 2806 2772 2648 2500 2331 2097 1780 1411 1041 729 572 685 1052 1487 1770 1822 1673 1346 913 601 674 1120 1587 1769 1706 1648 1681 1662 1482 1168 741 131 -692 -1573 -2278 -2651 -2644 -2304 -1833 -1541 -1609 -1926 -2291 -2666 -3102 -3441 -3371 -2820 -2107 -1543 -1046 -374 400 885 850 477 53 +1 +3237 3535 3475 3287 3259 3490 3777 3793 3405 2801 2265 1928 1748 1691 1773 1964 2128 2124 1920 1571 1142 697 328 93 -69 -307 -684 -1073 -1285 -1296 -1284 -1443 -1768 -2103 -2331 -2436 -2383 -2067 -1515 -1010 -847 -955 -938 -571 -141 -109 -503 -832 -650 -55 448 513 260 49 79 246 360 386 428 494 422 132 -144 -29 515 1031 1034 542 4 -272 -433 -772 -1272 -1574 -1395 -849 -343 -200 -377 -576 -585 -515 -620 -911 -1082 -861 -339 125 303 262 201 225 288 263 26 -426 -921 -1265 -1446 -1590 -1697 -1606 -1321 -1178 -1494 -2110 -2510 -2436 -2162 -2113 -2386 -2745 -2961 -2969 -2789 -2459 -2070 -1737 -1488 -1244 -945 -628 -362 -133 158 602 1173 1727 2133 2450 2896 3573 4258 4607 4523 4252 4070 4041 4113 4266 4452 4498 4277 3905 3623 3482 3306 2976 2569 2188 1798 1338 889 581 401 236 66 -42 -95 -232 +878 871 835 714 494 216 -47 -236 -362 -500 -734 -1092 -1520 -1910 -2148 -2201 -2134 -2090 -2174 -2365 -2526 -2524 -2352 -2126 -1983 -1957 -1960 -1887 -1698 -1423 -1105 -773 -461 -203 16 274 625 1026 1381 1637 1829 2027 2269 2539 2774 2916 2964 2990 3049 3102 3068 2930 2759 2605 2454 2311 2230 2223 2208 2104 1932 1765 1618 1482 1385 1344 1284 1142 1010 1018 1109 1057 791 543 585 928 1302 1326 661 -709 -2164 -2682 -1797 -256 607 252 -655 -1236 -1404 -1616 -1998 -2174 -1958 -1720 -1953 -2659 -3310 -3287 -2334 -832 313 274 -968 -2496 -3209 -2701 -1425 -167 548 693 629 705 868 723 128 -436 -350 312 800 607 92 -10 517 1224 1596 1503 1088 536 46 -207 -252 -323 -521 -621 -401 -93 -228 -970 -1782 -1959 -1375 -556 -45 102 130 96 -186 -782 -1451 -1925 -2195 -2408 -2572 -2552 -2341 -2115 -2011 -1969 -1875 -1729 -1568 -1346 -992 -563 -210 -5 105 184 274 469 888 1489 2014 2203 2032 1704 1434 1325 1390 1628 1990 2378 2676 2806 2772 2648 2500 2331 2097 1780 1411 1041 729 572 685 1052 1487 1770 1822 1673 1346 913 601 674 1120 1587 1769 1706 1648 1681 1662 1482 1168 741 131 -692 -1573 -2278 -2651 -2644 -2304 -1833 -1541 -1609 -1926 -2291 -2666 -3102 -3441 -3371 -2820 -2107 -1543 -1046 -374 400 885 850 477 53 -345 -752 -1057 -1104 -950 -792 -625 -239 352 767 714 447 474 865 1148 964 531 253 100 -282 -920 -1339 -1166 -639 -298 -324 -464 -501 -494 -529 -509 -349 -207 -266 -424 -383 -39 420 798 1057 1185 1070 686 257 33 -38 -237 -650 -1021 -1055 -774 -460 -334 -398 -550 -722 -870 -928 -840 -649 -520 -637 -1043 -1568 -1945 -2026 -1900 -1750 -1636 -1432 -999 -381 187 473 443 315 391 859 1663 2553 3237 3535 3475 3287 3259 3490 3777 3793 3405 2801 2265 1928 1748 1691 1773 1964 2128 2124 1920 1571 1142 697 328 93 -69 -307 -684 -1073 -1285 -1296 -1284 -1443 -1768 -2103 -2331 -2436 -2383 -2067 -1515 -1010 -847 -955 -938 -571 -141 -109 -503 -832 -650 -55 448 513 260 49 79 246 360 386 428 494 422 132 -144 -29 515 1031 1034 542 4 -272 -433 -772 -1272 -1574 -1395 -849 -343 -200 -377 -576 +1 +-578 -1096 -1631 -2015 -2170 -2152 -2143 -2268 -2434 -2447 -2292 -2179 -2241 -2301 -2082 -1594 -1128 -913 -891 -887 -833 -787 -794 -821 -775 -538 -38 607 1093 1178 903 544 335 316 417 578 740 836 834 761 639 448 196 -30 -137 -130 -97 -75 -35 38 115 149 102 -56 -278 -394 -244 97 309 182 -131 -311 -274 -221 -333 -540 -691 -755 -773 -724 -581 -470 -586 -951 -1407 -1815 -2151 -2383 -2411 -2255 -2154 -2323 -2686 -2984 -3120 -3243 -3462 -3658 -3632 -3373 -3057 -2850 -2765 -2680 -2431 -1968 -1440 -1061 -819 -421 357 1338 2127 2604 3037 3647 4282 4642 4716 4795 5091 5516 5840 5966 5958 5915 5861 5750 5527 5194 4831 4520 4218 3793 3167 2403 1630 929 339 -117 -463 -767 -1074 -1374 -1687 -2110 -2685 -3273 -3693 -3964 -4243 -4516 -4525 -4118 -3540 -3228 -3364 -3721 -3939 -3812 -3343 -2672 -2003 -1480 -1072 -613 -25 586 1083 1473 +-1568 -1346 -992 -563 -210 -5 105 184 274 469 888 1489 2014 2203 2032 1704 1434 1325 1390 1628 1990 2378 2676 2806 2772 2648 2500 2331 2097 1780 1411 1041 729 572 685 1052 1487 1770 1822 1673 1346 913 601 674 1120 1587 1769 1706 1648 1681 1662 1482 1168 741 131 -692 -1573 -2278 -2651 -2644 -2304 -1833 -1541 -1609 -1926 -2291 -2666 -3102 -3441 -3371 -2820 -2107 -1543 -1046 -374 400 885 850 477 53 -345 -752 -1057 -1104 -950 -792 -625 -239 352 767 714 447 474 865 1148 964 531 253 100 -282 -920 -1339 -1166 -639 -298 -324 -464 -501 -494 -529 -509 -349 -207 -266 -424 -383 -39 420 798 1057 1185 1070 686 257 33 -38 -237 -650 -1021 -1055 -774 -460 -334 -398 -550 -722 -870 -928 -840 -649 -520 -637 -1043 -1568 -1945 -2026 -1900 -1750 -1636 -1432 -999 -381 187 473 443 315 391 859 1663 2553 3237 3535 3475 3287 3259 3490 3777 3793 3405 2801 2265 1928 1748 1691 1773 1964 2128 2124 1920 1571 1142 697 328 93 -69 -307 -684 -1073 -1285 -1296 -1284 -1443 -1768 -2103 -2331 -2436 -2383 -2067 -1515 -1010 -847 -955 -938 -571 -141 -109 -503 -832 -650 -55 448 513 260 49 79 246 360 386 428 494 422 132 -144 -29 515 1031 1034 542 4 -272 -433 -772 -1272 -1574 -1395 -849 -343 -200 -377 -576 -585 -515 -620 -911 -1082 -861 -339 125 303 262 201 225 288 263 26 -426 -921 -1265 -1446 -1590 -1697 -1606 -1321 -1178 -1494 -2110 -2510 -2436 -2162 -2113 -2386 -2745 -2961 -2969 -2789 -2459 -2070 -1737 -1488 -1244 -945 -628 -362 -133 158 602 1173 1727 2133 2450 2896 3573 4258 4607 4523 4252 4070 4041 4113 4266 4452 4498 4277 3905 3623 3482 3306 2976 2569 2188 1798 1338 889 581 401 236 66 -42 -95 -232 -578 -1096 -1631 -2015 -2170 -2152 -2143 -2268 -2434 -2447 -2292 -2179 -2241 -2301 -2082 -1594 -1128 -913 -891 -887 -833 -787 -794 -821 -775 -538 -38 607 1093 1178 903 544 335 316 417 578 740 836 834 761 639 448 196 -30 -137 -130 -97 -75 -35 38 115 149 102 -56 -278 -394 -244 97 309 182 -131 -311 -274 -221 -333 -540 -691 -755 -773 -724 -581 -470 -586 -951 -1407 -1815 -2151 -2383 -2411 -2255 +1 +1839 2193 2476 2687 2893 3124 3353 3577 3809 3959 3868 3517 3126 2908 2820 2656 2355 2065 1866 1606 1107 459 -55 -291 -386 -533 -748 -924 -1033 -1159 -1371 -1645 -1946 -2272 -2577 -2747 -2708 -2547 -2425 -2415 -2475 -2591 -2800 -3068 -3254 -3257 -3153 -3107 -3187 -3350 -3570 -3857 -4159 -4341 -4325 -4178 -4018 -3905 -3849 -3875 -3986 -4094 -4090 -3963 -3766 -3489 -3048 -2402 -1619 -820 -94 501 967 1406 1976 2733 3567 4350 5072 5772 6408 6915 7325 7710 7999 8003 7706 7373 7272 7360 7330 6986 6412 5823 5301 4733 3971 3039 2149 1487 978 356 -503 -1382 -2019 -2479 -3089 -3963 -4795 -5219 -5221 -5071 -4983 -5011 -5166 -5424 -5616 -5548 -5242 -4920 -4650 -4221 -3462 -2523 -1674 -971 -318 235 561 784 1259 2094 2937 3435 3704 4133 4796 5339 5461 5296 5199 5312 5456 5370 4990 4510 4239 4285 4367 4001 3012 1775 837 340 -10 -476 -997 -1411 -1759 -2191 +3237 3535 3475 3287 3259 3490 3777 3793 3405 2801 2265 1928 1748 1691 1773 1964 2128 2124 1920 1571 1142 697 328 93 -69 -307 -684 -1073 -1285 -1296 -1284 -1443 -1768 -2103 -2331 -2436 -2383 -2067 -1515 -1010 -847 -955 -938 -571 -141 -109 -503 -832 -650 -55 448 513 260 49 79 246 360 386 428 494 422 132 -144 -29 515 1031 1034 542 4 -272 -433 -772 -1272 -1574 -1395 -849 -343 -200 -377 -576 -585 -515 -620 -911 -1082 -861 -339 125 303 262 201 225 288 263 26 -426 -921 -1265 -1446 -1590 -1697 -1606 -1321 -1178 -1494 -2110 -2510 -2436 -2162 -2113 -2386 -2745 -2961 -2969 -2789 -2459 -2070 -1737 -1488 -1244 -945 -628 -362 -133 158 602 1173 1727 2133 2450 2896 3573 4258 4607 4523 4252 4070 4041 4113 4266 4452 4498 4277 3905 3623 3482 3306 2976 2569 2188 1798 1338 889 581 401 236 66 -42 -95 -232 -578 -1096 -1631 -2015 -2170 -2152 -2143 -2268 -2434 -2447 -2292 -2179 -2241 -2301 -2082 -1594 -1128 -913 -891 -887 -833 -787 -794 -821 -775 -538 -38 607 1093 1178 903 544 335 316 417 578 740 836 834 761 639 448 196 -30 -137 -130 -97 -75 -35 38 115 149 102 -56 -278 -394 -244 97 309 182 -131 -311 -274 -221 -333 -540 -691 -755 -773 -724 -581 -470 -586 -951 -1407 -1815 -2151 -2383 -2411 -2255 -2154 -2323 -2686 -2984 -3120 -3243 -3462 -3658 -3632 -3373 -3057 -2850 -2765 -2680 -2431 -1968 -1440 -1061 -819 -421 357 1338 2127 2604 3037 3647 4282 4642 4716 4795 5091 5516 5840 5966 5958 5915 5861 5750 5527 5194 4831 4520 4218 3793 3167 2403 1630 929 339 -117 -463 -767 -1074 -1374 -1687 -2110 -2685 -3273 -3693 -3964 -4243 -4516 -4525 -4118 -3540 -3228 -3364 -3721 -3939 -3812 -3343 -2672 -2003 -1480 -1072 -613 -25 586 1083 1473 1839 2193 2476 2687 2893 3124 3353 3577 3809 3959 3868 3517 3126 2908 2820 2656 2355 2065 1866 1606 1107 459 -55 -291 -386 -533 -748 -924 -1033 -1159 -1371 -1645 -1946 -2272 -2577 -2747 -2708 -2547 -2425 -2415 -2475 -2591 -2800 -3068 -3254 -3257 -3153 -3107 -3187 -3350 -3570 -3857 -4159 -4341 -4325 -4178 -4018 -3905 -3849 -3875 -3986 -4094 -4090 -3963 -3766 -3489 -3048 -2402 -1619 -820 -94 501 967 1406 1976 2733 3567 4350 5072 5772 +1 +-2677 -3027 -3173 -3272 -3480 -3762 -3953 -3968 -3847 -3685 -3558 -3488 -3427 -3290 -3087 -2954 -2994 -3097 -3024 -2686 -2281 -2097 -2214 -2459 -2630 -2713 -2834 -3066 -3300 -3381 -3313 -3283 -3445 -3721 -3893 -3858 -3726 -3619 -3513 -3304 -2996 -2674 -2328 -1800 -990 -55 707 1152 1433 1832 2490 3334 4210 5053 5880 6673 7324 7734 7923 8019 8123 8226 8238 8102 7844 7577 7401 7311 7162 6768 6022 4948 3673 2404 1346 569 -106 -961 -2080 -3241 -4167 -4813 -5319 -5784 -6201 -6603 -7074 -7571 -7890 -7861 -7514 -6955 -6232 -5436 -4784 -4410 -4121 -3587 -2761 -1928 -1282 -674 130 1132 2181 3216 4204 4970 5348 5490 5767 6287 6756 6903 6863 6926 7108 7168 6965 6555 5948 5061 3968 2969 2286 1824 1381 926 476 -142 -1156 -2406 -3356 -3661 -3580 -3662 -4149 -4815 -5308 -5462 -5295 -4935 -4615 -4575 -4805 -4992 -4817 -4322 -3817 -3464 -3086 -2463 -1696 -1154 -1081 -1346 -1590 -1559 -1288 -1021 +-578 -1096 -1631 -2015 -2170 -2152 -2143 -2268 -2434 -2447 -2292 -2179 -2241 -2301 -2082 -1594 -1128 -913 -891 -887 -833 -787 -794 -821 -775 -538 -38 607 1093 1178 903 544 335 316 417 578 740 836 834 761 639 448 196 -30 -137 -130 -97 -75 -35 38 115 149 102 -56 -278 -394 -244 97 309 182 -131 -311 -274 -221 -333 -540 -691 -755 -773 -724 -581 -470 -586 -951 -1407 -1815 -2151 -2383 -2411 -2255 -2154 -2323 -2686 -2984 -3120 -3243 -3462 -3658 -3632 -3373 -3057 -2850 -2765 -2680 -2431 -1968 -1440 -1061 -819 -421 357 1338 2127 2604 3037 3647 4282 4642 4716 4795 5091 5516 5840 5966 5958 5915 5861 5750 5527 5194 4831 4520 4218 3793 3167 2403 1630 929 339 -117 -463 -767 -1074 -1374 -1687 -2110 -2685 -3273 -3693 -3964 -4243 -4516 -4525 -4118 -3540 -3228 -3364 -3721 -3939 -3812 -3343 -2672 -2003 -1480 -1072 -613 -25 586 1083 1473 1839 2193 2476 2687 2893 3124 3353 3577 3809 3959 3868 3517 3126 2908 2820 2656 2355 2065 1866 1606 1107 459 -55 -291 -386 -533 -748 -924 -1033 -1159 -1371 -1645 -1946 -2272 -2577 -2747 -2708 -2547 -2425 -2415 -2475 -2591 -2800 -3068 -3254 -3257 -3153 -3107 -3187 -3350 -3570 -3857 -4159 -4341 -4325 -4178 -4018 -3905 -3849 -3875 -3986 -4094 -4090 -3963 -3766 -3489 -3048 -2402 -1619 -820 -94 501 967 1406 1976 2733 3567 4350 5072 5772 6408 6915 7325 7710 7999 8003 7706 7373 7272 7360 7330 6986 6412 5823 5301 4733 3971 3039 2149 1487 978 356 -503 -1382 -2019 -2479 -3089 -3963 -4795 -5219 -5221 -5071 -4983 -5011 -5166 -5424 -5616 -5548 -5242 -4920 -4650 -4221 -3462 -2523 -1674 -971 -318 235 561 784 1259 2094 2937 3435 3704 4133 4796 5339 5461 5296 5199 5312 5456 5370 4990 4510 4239 4285 4367 4001 3012 1775 837 340 -10 -476 -997 -1411 -1759 -2191 -2677 -3027 -3173 -3272 -3480 -3762 -3953 -3968 -3847 -3685 -3558 -3488 -3427 -3290 -3087 -2954 -2994 -3097 -3024 -2686 -2281 -2097 -2214 -2459 -2630 -2713 -2834 -3066 -3300 -3381 -3313 -3283 -3445 -3721 -3893 -3858 -3726 -3619 -3513 -3304 -2996 -2674 -2328 -1800 -990 -55 707 1152 1433 1832 2490 3334 4210 5053 5880 6673 7324 7734 7923 8019 8123 8226 8238 8102 7844 7577 7401 7311 7162 6768 6022 4948 3673 2404 1346 569 -106 -961 -2080 -3241 +1 +-1010 -1330 -1842 -2317 -2619 -2780 -2892 -2979 -3043 -3183 -3534 -4044 -4418 -4405 -4073 -3688 -3364 -2961 -2381 -1761 -1257 -766 -56 863 1696 2250 2694 3324 4201 5128 5944 6642 7250 7728 8046 8247 8358 8337 8175 7952 7735 7463 7045 6529 6041 5585 4995 4128 3050 1972 1040 244 -515 -1304 -2096 -2815 -3441 -4041 -4712 -5474 -6247 -6922 -7388 -7537 -7338 -6939 -6586 -6376 -6156 -5776 -5290 -4820 -4283 -3463 -2350 -1201 -199 750 1763 2736 3515 4173 4886 5596 6077 6349 6705 7263 7721 7762 7494 7250 7085 6756 6157 5455 4799 4127 3388 2685 2046 1290 322 -622 -1287 -1826 -2614 -3631 -4399 -4618 -4574 -4690 -4951 -5053 -4964 -4964 -5153 -5251 -5007 -4562 -4201 -3947 -3598 -3079 -2534 -2085 -1695 -1317 -1017 -860 -806 -783 -788 -821 -793 -625 -418 -402 -686 -1144 -1599 -2011 -2444 -2867 -3151 -3258 -3349 -3603 -3981 -4245 -4218 -3970 -3706 -3543 -3462 -3381 -3208 -2830 -2166 -1279 +1839 2193 2476 2687 2893 3124 3353 3577 3809 3959 3868 3517 3126 2908 2820 2656 2355 2065 1866 1606 1107 459 -55 -291 -386 -533 -748 -924 -1033 -1159 -1371 -1645 -1946 -2272 -2577 -2747 -2708 -2547 -2425 -2415 -2475 -2591 -2800 -3068 -3254 -3257 -3153 -3107 -3187 -3350 -3570 -3857 -4159 -4341 -4325 -4178 -4018 -3905 -3849 -3875 -3986 -4094 -4090 -3963 -3766 -3489 -3048 -2402 -1619 -820 -94 501 967 1406 1976 2733 3567 4350 5072 5772 6408 6915 7325 7710 7999 8003 7706 7373 7272 7360 7330 6986 6412 5823 5301 4733 3971 3039 2149 1487 978 356 -503 -1382 -2019 -2479 -3089 -3963 -4795 -5219 -5221 -5071 -4983 -5011 -5166 -5424 -5616 -5548 -5242 -4920 -4650 -4221 -3462 -2523 -1674 -971 -318 235 561 784 1259 2094 2937 3435 3704 4133 4796 5339 5461 5296 5199 5312 5456 5370 4990 4510 4239 4285 4367 4001 3012 1775 837 340 -10 -476 -997 -1411 -1759 -2191 -2677 -3027 -3173 -3272 -3480 -3762 -3953 -3968 -3847 -3685 -3558 -3488 -3427 -3290 -3087 -2954 -2994 -3097 -3024 -2686 -2281 -2097 -2214 -2459 -2630 -2713 -2834 -3066 -3300 -3381 -3313 -3283 -3445 -3721 -3893 -3858 -3726 -3619 -3513 -3304 -2996 -2674 -2328 -1800 -990 -55 707 1152 1433 1832 2490 3334 4210 5053 5880 6673 7324 7734 7923 8019 8123 8226 8238 8102 7844 7577 7401 7311 7162 6768 6022 4948 3673 2404 1346 569 -106 -961 -2080 -3241 -4167 -4813 -5319 -5784 -6201 -6603 -7074 -7571 -7890 -7861 -7514 -6955 -6232 -5436 -4784 -4410 -4121 -3587 -2761 -1928 -1282 -674 130 1132 2181 3216 4204 4970 5348 5490 5767 6287 6756 6903 6863 6926 7108 7168 6965 6555 5948 5061 3968 2969 2286 1824 1381 926 476 -142 -1156 -2406 -3356 -3661 -3580 -3662 -4149 -4815 -5308 -5462 -5295 -4935 -4615 -4575 -4805 -4992 -4817 -4322 -3817 -3464 -3086 -2463 -1696 -1154 -1081 -1346 -1590 -1559 -1288 -1021 -1010 -1330 -1842 -2317 -2619 -2780 -2892 -2979 -3043 -3183 -3534 -4044 -4418 -4405 -4073 -3688 -3364 -2961 -2381 -1761 -1257 -766 -56 863 1696 2250 2694 3324 4201 5128 5944 6642 7250 7728 8046 8247 8358 8337 8175 7952 7735 7463 7045 6529 6041 5585 4995 4128 3050 1972 1040 244 -515 -1304 -2096 -2815 -3441 -4041 -4712 -5474 -6247 -6922 -7388 -7537 -7338 -6939 -6586 -6376 -6156 -5776 -5290 -4820 -4283 -3463 -2350 -1201 -199 750 1763 2736 +1 +-352 504 1349 2246 3093 3747 4247 4801 5499 6195 6747 7220 7743 8263 8569 8575 8419 8236 8003 7634 7149 6621 6051 5400 4729 4132 3558 2812 1817 737 -232 -1096 -1992 -2925 -3750 -4417 -5063 -5774 -6391 -6687 -6685 -6638 -6712 -6806 -6759 -6538 -6159 -5564 -4723 -3787 -2964 -2230 -1380 -365 595 1322 1927 2614 3343 3934 4386 4891 5500 6026 6342 6572 6836 6985 6804 6361 5920 5548 5048 4325 3579 3013 2523 1875 1054 250 -447 -1135 -1886 -2615 -3244 -3820 -4387 -4847 -5068 -5072 -4997 -4922 -4834 -4731 -4649 -4570 -4401 -4109 -3768 -3439 -3070 -2598 -2086 -1665 -1352 -1015 -570 -123 122 90 -114 -314 -406 -394 -351 -386 -603 -1031 -1573 -2072 -2455 -2788 -3172 -3608 -4011 -4320 -4533 -4643 -4597 -4404 -4187 -4057 -3950 -3682 -3181 -2558 -1930 -1287 -570 182 889 1602 2447 3421 4323 4990 5473 5941 6479 7056 7617 8119 8475 8599 8548 8500 8552 8573 8348 7809 +-2677 -3027 -3173 -3272 -3480 -3762 -3953 -3968 -3847 -3685 -3558 -3488 -3427 -3290 -3087 -2954 -2994 -3097 -3024 -2686 -2281 -2097 -2214 -2459 -2630 -2713 -2834 -3066 -3300 -3381 -3313 -3283 -3445 -3721 -3893 -3858 -3726 -3619 -3513 -3304 -2996 -2674 -2328 -1800 -990 -55 707 1152 1433 1832 2490 3334 4210 5053 5880 6673 7324 7734 7923 8019 8123 8226 8238 8102 7844 7577 7401 7311 7162 6768 6022 4948 3673 2404 1346 569 -106 -961 -2080 -3241 -4167 -4813 -5319 -5784 -6201 -6603 -7074 -7571 -7890 -7861 -7514 -6955 -6232 -5436 -4784 -4410 -4121 -3587 -2761 -1928 -1282 -674 130 1132 2181 3216 4204 4970 5348 5490 5767 6287 6756 6903 6863 6926 7108 7168 6965 6555 5948 5061 3968 2969 2286 1824 1381 926 476 -142 -1156 -2406 -3356 -3661 -3580 -3662 -4149 -4815 -5308 -5462 -5295 -4935 -4615 -4575 -4805 -4992 -4817 -4322 -3817 -3464 -3086 -2463 -1696 -1154 -1081 -1346 -1590 -1559 -1288 -1021 -1010 -1330 -1842 -2317 -2619 -2780 -2892 -2979 -3043 -3183 -3534 -4044 -4418 -4405 -4073 -3688 -3364 -2961 -2381 -1761 -1257 -766 -56 863 1696 2250 2694 3324 4201 5128 5944 6642 7250 7728 8046 8247 8358 8337 8175 7952 7735 7463 7045 6529 6041 5585 4995 4128 3050 1972 1040 244 -515 -1304 -2096 -2815 -3441 -4041 -4712 -5474 -6247 -6922 -7388 -7537 -7338 -6939 -6586 -6376 -6156 -5776 -5290 -4820 -4283 -3463 -2350 -1201 -199 750 1763 2736 3515 4173 4886 5596 6077 6349 6705 7263 7721 7762 7494 7250 7085 6756 6157 5455 4799 4127 3388 2685 2046 1290 322 -622 -1287 -1826 -2614 -3631 -4399 -4618 -4574 -4690 -4951 -5053 -4964 -4964 -5153 -5251 -5007 -4562 -4201 -3947 -3598 -3079 -2534 -2085 -1695 -1317 -1017 -860 -806 -783 -788 -821 -793 -625 -418 -402 -686 -1144 -1599 -2011 -2444 -2867 -3151 -3258 -3349 -3603 -3981 -4245 -4218 -3970 -3706 -3543 -3462 -3381 -3208 -2830 -2166 -1279 -352 504 1349 2246 3093 3747 4247 4801 5499 6195 6747 7220 7743 8263 8569 8575 8419 8236 8003 7634 7149 6621 6051 5400 4729 4132 3558 2812 1817 737 -232 -1096 -1992 -2925 -3750 -4417 -5063 -5774 -6391 -6687 -6685 -6638 -6712 -6806 -6759 -6538 -6159 -5564 -4723 -3787 -2964 -2230 -1380 -365 595 1322 1927 2614 3343 3934 4386 4891 5500 6026 6342 6572 6836 6985 6804 6361 5920 5548 5048 4325 3579 3013 2523 1875 1054 250 +1 +7112 6448 5848 5167 4253 3113 1942 952 178 -536 -1350 -2280 -3232 -4135 -4971 -5702 -6269 -6635 -6809 -6803 -6659 -6481 -6365 -6251 -5952 -5378 -4680 -4069 -3582 -3079 -2453 -1692 -797 228 1253 2058 2576 3013 3599 4309 4921 5314 5569 5802 6025 6215 6380 6466 6311 5826 5146 4510 4001 3512 2950 2350 1770 1175 517 -162 -771 -1310 -1870 -2499 -3149 -3733 -4192 -4477 -4549 -4456 -4342 -4329 -4411 -4501 -4538 -4488 -4265 -3781 -3109 -2485 -2078 -1806 -1462 -984 -500 -133 113 244 186 -64 -296 -253 36 221 -9 -631 -1394 -2113 -2741 -3229 -3521 -3688 -3941 -4389 -4886 -5184 -5222 -5152 -5117 -5115 -5074 -4943 -4666 -4149 -3365 -2451 -1613 -913 -225 583 1490 2374 3192 4014 4905 5825 6702 7516 8277 8922 9327 9450 9422 9423 9483 9464 9251 8881 8438 7908 7208 6365 5515 4740 3939 2972 1824 620 -523 -1547 -2424 -3163 -3865 -4667 -5575 -6415 -6996 -7305 -7461 -7561 +-1010 -1330 -1842 -2317 -2619 -2780 -2892 -2979 -3043 -3183 -3534 -4044 -4418 -4405 -4073 -3688 -3364 -2961 -2381 -1761 -1257 -766 -56 863 1696 2250 2694 3324 4201 5128 5944 6642 7250 7728 8046 8247 8358 8337 8175 7952 7735 7463 7045 6529 6041 5585 4995 4128 3050 1972 1040 244 -515 -1304 -2096 -2815 -3441 -4041 -4712 -5474 -6247 -6922 -7388 -7537 -7338 -6939 -6586 -6376 -6156 -5776 -5290 -4820 -4283 -3463 -2350 -1201 -199 750 1763 2736 3515 4173 4886 5596 6077 6349 6705 7263 7721 7762 7494 7250 7085 6756 6157 5455 4799 4127 3388 2685 2046 1290 322 -622 -1287 -1826 -2614 -3631 -4399 -4618 -4574 -4690 -4951 -5053 -4964 -4964 -5153 -5251 -5007 -4562 -4201 -3947 -3598 -3079 -2534 -2085 -1695 -1317 -1017 -860 -806 -783 -788 -821 -793 -625 -418 -402 -686 -1144 -1599 -2011 -2444 -2867 -3151 -3258 -3349 -3603 -3981 -4245 -4218 -3970 -3706 -3543 -3462 -3381 -3208 -2830 -2166 -1279 -352 504 1349 2246 3093 3747 4247 4801 5499 6195 6747 7220 7743 8263 8569 8575 8419 8236 8003 7634 7149 6621 6051 5400 4729 4132 3558 2812 1817 737 -232 -1096 -1992 -2925 -3750 -4417 -5063 -5774 -6391 -6687 -6685 -6638 -6712 -6806 -6759 -6538 -6159 -5564 -4723 -3787 -2964 -2230 -1380 -365 595 1322 1927 2614 3343 3934 4386 4891 5500 6026 6342 6572 6836 6985 6804 6361 5920 5548 5048 4325 3579 3013 2523 1875 1054 250 -447 -1135 -1886 -2615 -3244 -3820 -4387 -4847 -5068 -5072 -4997 -4922 -4834 -4731 -4649 -4570 -4401 -4109 -3768 -3439 -3070 -2598 -2086 -1665 -1352 -1015 -570 -123 122 90 -114 -314 -406 -394 -351 -386 -603 -1031 -1573 -2072 -2455 -2788 -3172 -3608 -4011 -4320 -4533 -4643 -4597 -4404 -4187 -4057 -3950 -3682 -3181 -2558 -1930 -1287 -570 182 889 1602 2447 3421 4323 4990 5473 5941 6479 7056 7617 8119 8475 8599 8548 8500 8552 8573 8348 7809 7112 6448 5848 5167 4253 3113 1942 952 178 -536 -1350 -2280 -3232 -4135 -4971 -5702 -6269 -6635 -6809 -6803 -6659 -6481 -6365 -6251 -5952 -5378 -4680 -4069 -3582 -3079 -2453 -1692 -797 228 1253 2058 2576 3013 3599 4309 4921 5314 5569 5802 6025 6215 6380 6466 6311 5826 5146 4510 4001 3512 2950 2350 1770 1175 517 -162 -771 -1310 -1870 -2499 -3149 -3733 -4192 -4477 -4549 -4456 -4342 -4329 -4411 -4501 -4538 -4488 -4265 -3781 -3109 -2485 +1 +-7621 -7635 -7587 -7386 -6901 -6115 -5201 -4366 -3667 -3011 -2310 -1555 -758 99 1005 1880 2637 3270 3846 4420 4979 5448 5743 5831 5768 5674 5627 5591 5475 5242 4935 4574 4103 3459 2678 1890 1197 597 25 -585 -1233 -1866 -2421 -2872 -3237 -3557 -3835 -4027 -4088 -4027 -3907 -3782 -3660 -3509 -3293 -3007 -2675 -2314 -1937 -1558 -1198 -862 -526 -172 144 319 307 191 121 147 166 39 -259 -629 -972 -1312 -1751 -2348 -3034 -3659 -4112 -4415 -4686 -5017 -5368 -5604 -5647 -5567 -5497 -5471 -5380 -5094 -4594 -3952 -3247 -2516 -1773 -1035 -265 606 1598 2645 3654 4615 5559 6472 7275 7927 8473 8979 9436 9777 9958 9986 9881 9658 9340 8962 8530 7997 7311 6483 5577 4649 3687 2630 1455 246 -858 -1772 -2571 -3400 -4318 -5228 -5994 -6576 -7020 -7365 -7616 -7783 -7873 -7858 -7687 -7362 -6934 -6423 -5799 -5074 -4344 -3669 -2983 -2182 -1280 -381 476 1347 2255 3109 3837 4467 +-352 504 1349 2246 3093 3747 4247 4801 5499 6195 6747 7220 7743 8263 8569 8575 8419 8236 8003 7634 7149 6621 6051 5400 4729 4132 3558 2812 1817 737 -232 -1096 -1992 -2925 -3750 -4417 -5063 -5774 -6391 -6687 -6685 -6638 -6712 -6806 -6759 -6538 -6159 -5564 -4723 -3787 -2964 -2230 -1380 -365 595 1322 1927 2614 3343 3934 4386 4891 5500 6026 6342 6572 6836 6985 6804 6361 5920 5548 5048 4325 3579 3013 2523 1875 1054 250 -447 -1135 -1886 -2615 -3244 -3820 -4387 -4847 -5068 -5072 -4997 -4922 -4834 -4731 -4649 -4570 -4401 -4109 -3768 -3439 -3070 -2598 -2086 -1665 -1352 -1015 -570 -123 122 90 -114 -314 -406 -394 -351 -386 -603 -1031 -1573 -2072 -2455 -2788 -3172 -3608 -4011 -4320 -4533 -4643 -4597 -4404 -4187 -4057 -3950 -3682 -3181 -2558 -1930 -1287 -570 182 889 1602 2447 3421 4323 4990 5473 5941 6479 7056 7617 8119 8475 8599 8548 8500 8552 8573 8348 7809 7112 6448 5848 5167 4253 3113 1942 952 178 -536 -1350 -2280 -3232 -4135 -4971 -5702 -6269 -6635 -6809 -6803 -6659 -6481 -6365 -6251 -5952 -5378 -4680 -4069 -3582 -3079 -2453 -1692 -797 228 1253 2058 2576 3013 3599 4309 4921 5314 5569 5802 6025 6215 6380 6466 6311 5826 5146 4510 4001 3512 2950 2350 1770 1175 517 -162 -771 -1310 -1870 -2499 -3149 -3733 -4192 -4477 -4549 -4456 -4342 -4329 -4411 -4501 -4538 -4488 -4265 -3781 -3109 -2485 -2078 -1806 -1462 -984 -500 -133 113 244 186 -64 -296 -253 36 221 -9 -631 -1394 -2113 -2741 -3229 -3521 -3688 -3941 -4389 -4886 -5184 -5222 -5152 -5117 -5115 -5074 -4943 -4666 -4149 -3365 -2451 -1613 -913 -225 583 1490 2374 3192 4014 4905 5825 6702 7516 8277 8922 9327 9450 9422 9423 9483 9464 9251 8881 8438 7908 7208 6365 5515 4740 3939 2972 1824 620 -523 -1547 -2424 -3163 -3865 -4667 -5575 -6415 -6996 -7305 -7461 -7561 -7621 -7635 -7587 -7386 -6901 -6115 -5201 -4366 -3667 -3011 -2310 -1555 -758 99 1005 1880 2637 3270 3846 4420 4979 5448 5743 5831 5768 5674 5627 5591 5475 5242 4935 4574 4103 3459 2678 1890 1197 597 25 -585 -1233 -1866 -2421 -2872 -3237 -3557 -3835 -4027 -4088 -4027 -3907 -3782 -3660 -3509 -3293 -3007 -2675 -2314 -1937 -1558 -1198 -862 -526 -172 144 319 307 191 121 147 166 39 -259 -629 -972 -1312 -1751 -2348 -3034 -3659 +1 +5042 5518 5831 6040 6251 6438 6465 6294 6051 5836 5572 5131 4535 3904 3270 2577 1861 1240 712 112 -652 -1399 -1891 -2149 -2423 -2831 -3220 -3406 -3415 -3396 -3389 -3307 -3135 -2954 -2811 -2644 -2411 -2175 -1984 -1775 -1458 -1083 -805 -692 -651 -568 -436 -310 -229 -223 -359 -680 -1115 -1527 -1875 -2223 -2623 -3020 -3362 -3704 -4139 -4656 -5134 -5483 -5708 -5845 -5894 -5850 -5749 -5603 -5334 -4850 -4180 -3464 -2805 -2188 -1540 -827 -43 830 1801 2803 3733 4561 5374 6265 7208 8070 8720 9134 9374 9542 9703 9843 9876 9732 9425 9031 8581 8021 7308 6497 5674 4819 3817 2649 1469 450 -441 -1395 -2500 -3605 -4504 -5178 -5792 -6453 -7092 -7583 -7896 -8094 -8205 -8187 -8001 -7661 -7215 -6715 -6213 -5704 -5098 -4304 -3365 -2446 -1645 -863 59 1113 2121 2948 3643 4322 4985 5519 5872 6102 6300 6494 6661 6756 6717 6480 6046 5515 4997 4503 3974 3377 2754 2150 1544 899 +7112 6448 5848 5167 4253 3113 1942 952 178 -536 -1350 -2280 -3232 -4135 -4971 -5702 -6269 -6635 -6809 -6803 -6659 -6481 -6365 -6251 -5952 -5378 -4680 -4069 -3582 -3079 -2453 -1692 -797 228 1253 2058 2576 3013 3599 4309 4921 5314 5569 5802 6025 6215 6380 6466 6311 5826 5146 4510 4001 3512 2950 2350 1770 1175 517 -162 -771 -1310 -1870 -2499 -3149 -3733 -4192 -4477 -4549 -4456 -4342 -4329 -4411 -4501 -4538 -4488 -4265 -3781 -3109 -2485 -2078 -1806 -1462 -984 -500 -133 113 244 186 -64 -296 -253 36 221 -9 -631 -1394 -2113 -2741 -3229 -3521 -3688 -3941 -4389 -4886 -5184 -5222 -5152 -5117 -5115 -5074 -4943 -4666 -4149 -3365 -2451 -1613 -913 -225 583 1490 2374 3192 4014 4905 5825 6702 7516 8277 8922 9327 9450 9422 9423 9483 9464 9251 8881 8438 7908 7208 6365 5515 4740 3939 2972 1824 620 -523 -1547 -2424 -3163 -3865 -4667 -5575 -6415 -6996 -7305 -7461 -7561 -7621 -7635 -7587 -7386 -6901 -6115 -5201 -4366 -3667 -3011 -2310 -1555 -758 99 1005 1880 2637 3270 3846 4420 4979 5448 5743 5831 5768 5674 5627 5591 5475 5242 4935 4574 4103 3459 2678 1890 1197 597 25 -585 -1233 -1866 -2421 -2872 -3237 -3557 -3835 -4027 -4088 -4027 -3907 -3782 -3660 -3509 -3293 -3007 -2675 -2314 -1937 -1558 -1198 -862 -526 -172 144 319 307 191 121 147 166 39 -259 -629 -972 -1312 -1751 -2348 -3034 -3659 -4112 -4415 -4686 -5017 -5368 -5604 -5647 -5567 -5497 -5471 -5380 -5094 -4594 -3952 -3247 -2516 -1773 -1035 -265 606 1598 2645 3654 4615 5559 6472 7275 7927 8473 8979 9436 9777 9958 9986 9881 9658 9340 8962 8530 7997 7311 6483 5577 4649 3687 2630 1455 246 -858 -1772 -2571 -3400 -4318 -5228 -5994 -6576 -7020 -7365 -7616 -7783 -7873 -7858 -7687 -7362 -6934 -6423 -5799 -5074 -4344 -3669 -2983 -2182 -1280 -381 476 1347 2255 3109 3837 4467 5042 5518 5831 6040 6251 6438 6465 6294 6051 5836 5572 5131 4535 3904 3270 2577 1861 1240 712 112 -652 -1399 -1891 -2149 -2423 -2831 -3220 -3406 -3415 -3396 -3389 -3307 -3135 -2954 -2811 -2644 -2411 -2175 -1984 -1775 -1458 -1083 -805 -692 -651 -568 -436 -310 -229 -223 -359 -680 -1115 -1527 -1875 -2223 -2623 -3020 -3362 -3704 -4139 -4656 -5134 -5483 -5708 -5845 -5894 -5850 -5749 -5603 -5334 -4850 -4180 -3464 -2805 -2188 -1540 -827 -43 830 +1 +232 -399 -966 -1492 -1996 -2467 -2881 -3229 -3489 -3628 -3630 -3547 -3470 -3448 -3450 -3408 -3280 -3077 -2828 -2549 -2261 -1980 -1717 -1469 -1233 -1029 -875 -772 -717 -723 -814 -969 -1109 -1183 -1234 -1371 -1665 -2079 -2530 -2963 -3355 -3687 -3955 -4190 -4450 -4727 -4936 -5009 -4975 -4915 -4857 -4736 -4475 -4069 -3561 -2989 -2358 -1669 -924 -131 701 1547 2382 3196 3996 4782 5526 6198 6788 7304 7755 8132 8418 8615 8748 8834 8851 8743 8473 8067 7594 7096 6550 5912 5178 4389 3586 2769 1917 1031 137 -741 -1605 -2456 -3271 -3998 -4602 -5093 -5499 -5841 -6116 -6321 -6464 -6536 -6503 -6343 -6080 -5780 -5480 -5137 -4655 -3989 -3201 -2433 -1777 -1189 -537 242 1057 1754 2294 2789 3365 4000 4543 4874 5015 5070 5099 5079 4959 4732 4450 4165 3868 3495 3000 2428 1877 1387 914 403 -125 -598 -988 -1336 -1684 -2008 -2252 -2391 -2459 -2524 -2635 -2790 -2920 -2956 -2887 -2757 -2602 -2419 +-7621 -7635 -7587 -7386 -6901 -6115 -5201 -4366 -3667 -3011 -2310 -1555 -758 99 1005 1880 2637 3270 3846 4420 4979 5448 5743 5831 5768 5674 5627 5591 5475 5242 4935 4574 4103 3459 2678 1890 1197 597 25 -585 -1233 -1866 -2421 -2872 -3237 -3557 -3835 -4027 -4088 -4027 -3907 -3782 -3660 -3509 -3293 -3007 -2675 -2314 -1937 -1558 -1198 -862 -526 -172 144 319 307 191 121 147 166 39 -259 -629 -972 -1312 -1751 -2348 -3034 -3659 -4112 -4415 -4686 -5017 -5368 -5604 -5647 -5567 -5497 -5471 -5380 -5094 -4594 -3952 -3247 -2516 -1773 -1035 -265 606 1598 2645 3654 4615 5559 6472 7275 7927 8473 8979 9436 9777 9958 9986 9881 9658 9340 8962 8530 7997 7311 6483 5577 4649 3687 2630 1455 246 -858 -1772 -2571 -3400 -4318 -5228 -5994 -6576 -7020 -7365 -7616 -7783 -7873 -7858 -7687 -7362 -6934 -6423 -5799 -5074 -4344 -3669 -2983 -2182 -1280 -381 476 1347 2255 3109 3837 4467 5042 5518 5831 6040 6251 6438 6465 6294 6051 5836 5572 5131 4535 3904 3270 2577 1861 1240 712 112 -652 -1399 -1891 -2149 -2423 -2831 -3220 -3406 -3415 -3396 -3389 -3307 -3135 -2954 -2811 -2644 -2411 -2175 -1984 -1775 -1458 -1083 -805 -692 -651 -568 -436 -310 -229 -223 -359 -680 -1115 -1527 -1875 -2223 -2623 -3020 -3362 -3704 -4139 -4656 -5134 -5483 -5708 -5845 -5894 -5850 -5749 -5603 -5334 -4850 -4180 -3464 -2805 -2188 -1540 -827 -43 830 1801 2803 3733 4561 5374 6265 7208 8070 8720 9134 9374 9542 9703 9843 9876 9732 9425 9031 8581 8021 7308 6497 5674 4819 3817 2649 1469 450 -441 -1395 -2500 -3605 -4504 -5178 -5792 -6453 -7092 -7583 -7896 -8094 -8205 -8187 -8001 -7661 -7215 -6715 -6213 -5704 -5098 -4304 -3365 -2446 -1645 -863 59 1113 2121 2948 3643 4322 4985 5519 5872 6102 6300 6494 6661 6756 6717 6480 6046 5515 4997 4503 3974 3377 2754 2150 1544 899 232 -399 -966 -1492 -1996 -2467 -2881 -3229 -3489 -3628 -3630 -3547 -3470 -3448 -3450 -3408 -3280 -3077 -2828 -2549 -2261 -1980 -1717 -1469 -1233 -1029 -875 -772 -717 -723 -814 -969 -1109 -1183 -1234 -1371 -1665 -2079 -2530 -2963 -3355 -3687 -3955 -4190 -4450 -4727 -4936 -5009 -4975 -4915 -4857 -4736 -4475 -4069 -3561 -2989 -2358 -1669 -924 -131 701 1547 2382 3196 3996 4782 5526 6198 6788 7304 7755 8132 8418 8615 8748 8834 8851 8743 8473 8067 +1 +-2206 -2004 -1840 -1680 -1474 -1256 -1114 -1069 -1022 -902 -778 -787 -937 -1072 -1077 -1031 -1119 -1426 -1837 -2174 -2379 -2545 -2799 -3165 -3541 -3792 -3874 -3855 -3845 -3898 -3968 -3970 -3866 -3700 -3530 -3336 -3022 -2530 -1931 -1357 -859 -350 279 1012 1742 2398 3004 3604 4186 4726 5254 5816 6393 6880 7207 7393 7519 7632 7712 7711 7590 7344 6996 6594 6187 5780 5329 4792 4160 3452 2690 1888 1070 279 -463 -1169 -1860 -2519 -3090 -3542 -3917 -4278 -4643 -4959 -5172 -5284 -5347 -5378 -5351 -5226 -5011 -4740 -4437 -4096 -3694 -3212 -2673 -2147 -1696 -1304 -889 -382 182 707 1153 1563 1983 2383 2711 2962 3160 3291 3311 3249 3196 3213 3237 3164 2969 2716 2463 2192 1874 1521 1183 902 687 513 329 88 -199 -463 -650 -793 -971 -1198 -1398 -1497 -1510 -1508 -1525 -1542 -1538 -1530 -1547 -1598 -1673 -1757 -1828 -1864 -1865 -1857 -1858 -1847 -1816 -1791 -1823 -1920 -2023 -2085 +5042 5518 5831 6040 6251 6438 6465 6294 6051 5836 5572 5131 4535 3904 3270 2577 1861 1240 712 112 -652 -1399 -1891 -2149 -2423 -2831 -3220 -3406 -3415 -3396 -3389 -3307 -3135 -2954 -2811 -2644 -2411 -2175 -1984 -1775 -1458 -1083 -805 -692 -651 -568 -436 -310 -229 -223 -359 -680 -1115 -1527 -1875 -2223 -2623 -3020 -3362 -3704 -4139 -4656 -5134 -5483 -5708 -5845 -5894 -5850 -5749 -5603 -5334 -4850 -4180 -3464 -2805 -2188 -1540 -827 -43 830 1801 2803 3733 4561 5374 6265 7208 8070 8720 9134 9374 9542 9703 9843 9876 9732 9425 9031 8581 8021 7308 6497 5674 4819 3817 2649 1469 450 -441 -1395 -2500 -3605 -4504 -5178 -5792 -6453 -7092 -7583 -7896 -8094 -8205 -8187 -8001 -7661 -7215 -6715 -6213 -5704 -5098 -4304 -3365 -2446 -1645 -863 59 1113 2121 2948 3643 4322 4985 5519 5872 6102 6300 6494 6661 6756 6717 6480 6046 5515 4997 4503 3974 3377 2754 2150 1544 899 232 -399 -966 -1492 -1996 -2467 -2881 -3229 -3489 -3628 -3630 -3547 -3470 -3448 -3450 -3408 -3280 -3077 -2828 -2549 -2261 -1980 -1717 -1469 -1233 -1029 -875 -772 -717 -723 -814 -969 -1109 -1183 -1234 -1371 -1665 -2079 -2530 -2963 -3355 -3687 -3955 -4190 -4450 -4727 -4936 -5009 -4975 -4915 -4857 -4736 -4475 -4069 -3561 -2989 -2358 -1669 -924 -131 701 1547 2382 3196 3996 4782 5526 6198 6788 7304 7755 8132 8418 8615 8748 8834 8851 8743 8473 8067 7594 7096 6550 5912 5178 4389 3586 2769 1917 1031 137 -741 -1605 -2456 -3271 -3998 -4602 -5093 -5499 -5841 -6116 -6321 -6464 -6536 -6503 -6343 -6080 -5780 -5480 -5137 -4655 -3989 -3201 -2433 -1777 -1189 -537 242 1057 1754 2294 2789 3365 4000 4543 4874 5015 5070 5099 5079 4959 4732 4450 4165 3868 3495 3000 2428 1877 1387 914 403 -125 -598 -988 -1336 -1684 -2008 -2252 -2391 -2459 -2524 -2635 -2790 -2920 -2956 -2887 -2757 -2602 -2419 -2206 -2004 -1840 -1680 -1474 -1256 -1114 -1069 -1022 -902 -778 -787 -937 -1072 -1077 -1031 -1119 -1426 -1837 -2174 -2379 -2545 -2799 -3165 -3541 -3792 -3874 -3855 -3845 -3898 -3968 -3970 -3866 -3700 -3530 -3336 -3022 -2530 -1931 -1357 -859 -350 279 1012 1742 2398 3004 3604 4186 4726 5254 5816 6393 6880 7207 7393 7519 7632 7712 7711 7590 7344 6996 6594 6187 5780 5329 4792 4160 3452 2690 1888 1070 279 -463 -1169 -1860 -2519 -3090 -3542 +1 +-2121 -2184 -2291 -2398 -2459 -2485 -2512 -2551 -2581 -2592 -2595 -2598 -2573 -2488 -2346 -2192 -2060 -1928 -1749 -1505 -1217 -924 -649 -377 -79 271 672 1094 1507 1901 2290 2672 3021 3324 3612 3913 4219 4495 4733 4951 5154 5287 5307 5242 5174 5145 5104 4959 4671 4302 3942 3632 3321 2935 2454 1944 1486 1093 695 232 -274 -739 -1104 -1398 -1682 -1974 -2220 -2380 -2483 -2603 -2771 -2933 -3022 -3016 -2964 -2912 -2877 -2830 -2729 -2552 -2326 -2117 -1963 -1820 -1611 -1317 -1014 -764 -542 -277 38 326 540 715 904 1111 1293 1427 1524 1590 1621 1631 1639 1643 1624 1584 1551 1522 1446 1296 1120 991 913 829 686 500 314 147 -18 -200 -413 -633 -819 -936 -1018 -1139 -1318 -1474 -1524 -1496 -1517 -1651 -1831 -1948 -1979 -1977 -1986 -2001 -2010 -2036 -2092 -2152 -2180 -2165 -2129 -2086 -2035 -1981 -1922 -1841 -1712 -1544 -1379 -1255 -1164 -1077 -979 -879 -788 -706 +232 -399 -966 -1492 -1996 -2467 -2881 -3229 -3489 -3628 -3630 -3547 -3470 -3448 -3450 -3408 -3280 -3077 -2828 -2549 -2261 -1980 -1717 -1469 -1233 -1029 -875 -772 -717 -723 -814 -969 -1109 -1183 -1234 -1371 -1665 -2079 -2530 -2963 -3355 -3687 -3955 -4190 -4450 -4727 -4936 -5009 -4975 -4915 -4857 -4736 -4475 -4069 -3561 -2989 -2358 -1669 -924 -131 701 1547 2382 3196 3996 4782 5526 6198 6788 7304 7755 8132 8418 8615 8748 8834 8851 8743 8473 8067 7594 7096 6550 5912 5178 4389 3586 2769 1917 1031 137 -741 -1605 -2456 -3271 -3998 -4602 -5093 -5499 -5841 -6116 -6321 -6464 -6536 -6503 -6343 -6080 -5780 -5480 -5137 -4655 -3989 -3201 -2433 -1777 -1189 -537 242 1057 1754 2294 2789 3365 4000 4543 4874 5015 5070 5099 5079 4959 4732 4450 4165 3868 3495 3000 2428 1877 1387 914 403 -125 -598 -988 -1336 -1684 -2008 -2252 -2391 -2459 -2524 -2635 -2790 -2920 -2956 -2887 -2757 -2602 -2419 -2206 -2004 -1840 -1680 -1474 -1256 -1114 -1069 -1022 -902 -778 -787 -937 -1072 -1077 -1031 -1119 -1426 -1837 -2174 -2379 -2545 -2799 -3165 -3541 -3792 -3874 -3855 -3845 -3898 -3968 -3970 -3866 -3700 -3530 -3336 -3022 -2530 -1931 -1357 -859 -350 279 1012 1742 2398 3004 3604 4186 4726 5254 5816 6393 6880 7207 7393 7519 7632 7712 7711 7590 7344 6996 6594 6187 5780 5329 4792 4160 3452 2690 1888 1070 279 -463 -1169 -1860 -2519 -3090 -3542 -3917 -4278 -4643 -4959 -5172 -5284 -5347 -5378 -5351 -5226 -5011 -4740 -4437 -4096 -3694 -3212 -2673 -2147 -1696 -1304 -889 -382 182 707 1153 1563 1983 2383 2711 2962 3160 3291 3311 3249 3196 3213 3237 3164 2969 2716 2463 2192 1874 1521 1183 902 687 513 329 88 -199 -463 -650 -793 -971 -1198 -1398 -1497 -1510 -1508 -1525 -1542 -1538 -1530 -1547 -1598 -1673 -1757 -1828 -1864 -1865 -1857 -1858 -1847 -1816 -1791 -1823 -1920 -2023 -2085 -2121 -2184 -2291 -2398 -2459 -2485 -2512 -2551 -2581 -2592 -2595 -2598 -2573 -2488 -2346 -2192 -2060 -1928 -1749 -1505 -1217 -924 -649 -377 -79 271 672 1094 1507 1901 2290 2672 3021 3324 3612 3913 4219 4495 4733 4951 5154 5287 5307 5242 5174 5145 5104 4959 4671 4302 3942 3632 3321 2935 2454 1944 1486 1093 695 232 -274 -739 -1104 -1398 -1682 -1974 -2220 -2380 -2483 -2603 -2771 -2933 -3022 -3016 -2964 -2912 -2877 -2830 -2729 -2552 +1 +-626 -543 -442 -313 -168 -41 64 172 314 476 619 729 845 1002 1193 1370 1511 1632 1771 1934 2090 2205 2270 2323 2403 2522 2649 2736 2764 2756 2750 2758 2759 2719 2636 2524 2416 2322 2223 2084 1888 1658 1444 1272 1121 948 732 482 219 -35 -260 -444 -599 -762 -950 -1135 -1276 -1369 -1452 -1558 -1675 -1755 -1768 -1736 -1694 -1674 -1673 -1664 -1618 -1535 -1446 -1382 -1320 -1206 -1012 -776 -573 -431 -318 -188 -41 98 219 343 488 643 782 905 1015 1110 1177 1201 1179 1130 1086 1082 1127 1190 1210 1148 1021 878 744 597 408 180 -40 -211 -341 -484 -689 -949 -1202 -1389 -1516 -1644 -1809 -1972 -2063 -2066 -2047 -2071 -2135 -2181 -2164 -2102 -2035 -1972 -1885 -1753 -1590 -1431 -1295 -1169 -1024 -845 -644 -452 -280 -114 57 226 373 498 617 739 855 946 1009 1045 1067 1076 1074 1060 1039 1023 1017 1004 960 876 +-2206 -2004 -1840 -1680 -1474 -1256 -1114 -1069 -1022 -902 -778 -787 -937 -1072 -1077 -1031 -1119 -1426 -1837 -2174 -2379 -2545 -2799 -3165 -3541 -3792 -3874 -3855 -3845 -3898 -3968 -3970 -3866 -3700 -3530 -3336 -3022 -2530 -1931 -1357 -859 -350 279 1012 1742 2398 3004 3604 4186 4726 5254 5816 6393 6880 7207 7393 7519 7632 7712 7711 7590 7344 6996 6594 6187 5780 5329 4792 4160 3452 2690 1888 1070 279 -463 -1169 -1860 -2519 -3090 -3542 -3917 -4278 -4643 -4959 -5172 -5284 -5347 -5378 -5351 -5226 -5011 -4740 -4437 -4096 -3694 -3212 -2673 -2147 -1696 -1304 -889 -382 182 707 1153 1563 1983 2383 2711 2962 3160 3291 3311 3249 3196 3213 3237 3164 2969 2716 2463 2192 1874 1521 1183 902 687 513 329 88 -199 -463 -650 -793 -971 -1198 -1398 -1497 -1510 -1508 -1525 -1542 -1538 -1530 -1547 -1598 -1673 -1757 -1828 -1864 -1865 -1857 -1858 -1847 -1816 -1791 -1823 -1920 -2023 -2085 -2121 -2184 -2291 -2398 -2459 -2485 -2512 -2551 -2581 -2592 -2595 -2598 -2573 -2488 -2346 -2192 -2060 -1928 -1749 -1505 -1217 -924 -649 -377 -79 271 672 1094 1507 1901 2290 2672 3021 3324 3612 3913 4219 4495 4733 4951 5154 5287 5307 5242 5174 5145 5104 4959 4671 4302 3942 3632 3321 2935 2454 1944 1486 1093 695 232 -274 -739 -1104 -1398 -1682 -1974 -2220 -2380 -2483 -2603 -2771 -2933 -3022 -3016 -2964 -2912 -2877 -2830 -2729 -2552 -2326 -2117 -1963 -1820 -1611 -1317 -1014 -764 -542 -277 38 326 540 715 904 1111 1293 1427 1524 1590 1621 1631 1639 1643 1624 1584 1551 1522 1446 1296 1120 991 913 829 686 500 314 147 -18 -200 -413 -633 -819 -936 -1018 -1139 -1318 -1474 -1524 -1496 -1517 -1651 -1831 -1948 -1979 -1977 -1986 -2001 -2010 -2036 -2092 -2152 -2180 -2165 -2129 -2086 -2035 -1981 -1922 -1841 -1712 -1544 -1379 -1255 -1164 -1077 -979 -879 -788 -706 -626 -543 -442 -313 -168 -41 64 172 314 476 619 729 845 1002 1193 1370 1511 1632 1771 1934 2090 2205 2270 2323 2403 2522 2649 2736 2764 2756 2750 2758 2759 2719 2636 2524 2416 2322 2223 2084 1888 1658 1444 1272 1121 948 732 482 219 -35 -260 -444 -599 -762 -950 -1135 -1276 -1369 -1452 -1558 -1675 -1755 -1768 -1736 -1694 -1674 -1673 -1664 -1618 -1535 -1446 -1382 -1320 -1206 -1012 -776 -573 -431 -318 -188 +1 +778 693 626 570 527 498 473 435 389 352 323 286 237 211 248 333 403 426 432 473 553 637 691 716 728 746 786 855 934 975 950 893 860 858 831 742 617 525 474 420 326 218 134 59 -56 -218 -369 -455 -485 -509 -559 -627 -684 -713 -714 -710 -725 -769 -820 -836 -798 -709 -593 -471 -356 -253 -150 -32 95 201 274 341 438 560 667 734 781 842 921 1003 1064 1095 1092 1064 1016 949 848 697 515 323 131 -73 -299 -538 -769 -982 -1183 -1391 -1619 -1846 -2031 -2157 -2255 -2363 -2470 -2513 -2458 -2351 -2258 -2185 -2087 -1942 -1776 -1608 -1421 -1201 -968 -743 -511 -255 1 223 422 639 872 1060 1178 1269 1388 1514 1586 1609 1650 1748 1854 1891 1846 1757 1663 1563 1433 1258 1034 791 574 405 241 29 -230 -467 -633 -754 -896 -1061 -1191 -1251 -1273 -1298 -1315 -1281 +-2121 -2184 -2291 -2398 -2459 -2485 -2512 -2551 -2581 -2592 -2595 -2598 -2573 -2488 -2346 -2192 -2060 -1928 -1749 -1505 -1217 -924 -649 -377 -79 271 672 1094 1507 1901 2290 2672 3021 3324 3612 3913 4219 4495 4733 4951 5154 5287 5307 5242 5174 5145 5104 4959 4671 4302 3942 3632 3321 2935 2454 1944 1486 1093 695 232 -274 -739 -1104 -1398 -1682 -1974 -2220 -2380 -2483 -2603 -2771 -2933 -3022 -3016 -2964 -2912 -2877 -2830 -2729 -2552 -2326 -2117 -1963 -1820 -1611 -1317 -1014 -764 -542 -277 38 326 540 715 904 1111 1293 1427 1524 1590 1621 1631 1639 1643 1624 1584 1551 1522 1446 1296 1120 991 913 829 686 500 314 147 -18 -200 -413 -633 -819 -936 -1018 -1139 -1318 -1474 -1524 -1496 -1517 -1651 -1831 -1948 -1979 -1977 -1986 -2001 -2010 -2036 -2092 -2152 -2180 -2165 -2129 -2086 -2035 -1981 -1922 -1841 -1712 -1544 -1379 -1255 -1164 -1077 -979 -879 -788 -706 -626 -543 -442 -313 -168 -41 64 172 314 476 619 729 845 1002 1193 1370 1511 1632 1771 1934 2090 2205 2270 2323 2403 2522 2649 2736 2764 2756 2750 2758 2759 2719 2636 2524 2416 2322 2223 2084 1888 1658 1444 1272 1121 948 732 482 219 -35 -260 -444 -599 -762 -950 -1135 -1276 -1369 -1452 -1558 -1675 -1755 -1768 -1736 -1694 -1674 -1673 -1664 -1618 -1535 -1446 -1382 -1320 -1206 -1012 -776 -573 -431 -318 -188 -41 98 219 343 488 643 782 905 1015 1110 1177 1201 1179 1130 1086 1082 1127 1190 1210 1148 1021 878 744 597 408 180 -40 -211 -341 -484 -689 -949 -1202 -1389 -1516 -1644 -1809 -1972 -2063 -2066 -2047 -2071 -2135 -2181 -2164 -2102 -2035 -1972 -1885 -1753 -1590 -1431 -1295 -1169 -1024 -845 -644 -452 -280 -114 57 226 373 498 617 739 855 946 1009 1045 1067 1076 1074 1060 1039 1023 1017 1004 960 876 778 693 626 570 527 498 473 435 389 352 323 286 237 211 248 333 403 426 432 473 553 637 691 716 728 746 786 855 934 975 950 893 860 858 831 742 617 525 474 420 326 218 134 59 -56 -218 -369 -455 -485 -509 -559 -627 -684 -713 -714 -710 -725 -769 -820 -836 -798 -709 -593 -471 -356 -253 -150 -32 95 201 274 341 438 560 667 734 781 842 921 1003 +1 +-1179 -1058 -971 -918 -852 -728 -550 -349 -145 63 282 506 725 944 1164 1379 1555 1664 1702 1696 1666 1632 1601 1585 1573 1537 1458 1338 1194 1048 910 781 647 493 321 164 67 29 -3 -72 -164 -229 -252 -259 -272 -274 -247 -207 -189 -189 -165 -91 12 115 212 307 385 427 437 447 468 475 456 439 455 486 466 363 220 112 57 9 -86 -225 -371 -502 -625 -745 -843 -912 -989 -1132 -1350 -1582 -1756 -1852 -1895 -1919 -1941 -1959 -1960 -1925 -1843 -1737 -1631 -1520 -1377 -1192 -995 -798 -593 -374 -173 -26 80 203 376 579 769 921 1033 1114 1174 1222 1242 1212 1132 1033 929 799 618 415 245 114 -42 -274 -556 -811 -997 -1154 -1341 -1562 -1754 -1867 -1910 -1937 -1972 -1989 -1948 -1860 -1760 -1658 -1513 -1293 -1000 -676 -344 4 384 794 1198 1554 1844 2100 2371 2662 2928 3130 3271 3365 +-626 -543 -442 -313 -168 -41 64 172 314 476 619 729 845 1002 1193 1370 1511 1632 1771 1934 2090 2205 2270 2323 2403 2522 2649 2736 2764 2756 2750 2758 2759 2719 2636 2524 2416 2322 2223 2084 1888 1658 1444 1272 1121 948 732 482 219 -35 -260 -444 -599 -762 -950 -1135 -1276 -1369 -1452 -1558 -1675 -1755 -1768 -1736 -1694 -1674 -1673 -1664 -1618 -1535 -1446 -1382 -1320 -1206 -1012 -776 -573 -431 -318 -188 -41 98 219 343 488 643 782 905 1015 1110 1177 1201 1179 1130 1086 1082 1127 1190 1210 1148 1021 878 744 597 408 180 -40 -211 -341 -484 -689 -949 -1202 -1389 -1516 -1644 -1809 -1972 -2063 -2066 -2047 -2071 -2135 -2181 -2164 -2102 -2035 -1972 -1885 -1753 -1590 -1431 -1295 -1169 -1024 -845 -644 -452 -280 -114 57 226 373 498 617 739 855 946 1009 1045 1067 1076 1074 1060 1039 1023 1017 1004 960 876 778 693 626 570 527 498 473 435 389 352 323 286 237 211 248 333 403 426 432 473 553 637 691 716 728 746 786 855 934 975 950 893 860 858 831 742 617 525 474 420 326 218 134 59 -56 -218 -369 -455 -485 -509 -559 -627 -684 -713 -714 -710 -725 -769 -820 -836 -798 -709 -593 -471 -356 -253 -150 -32 95 201 274 341 438 560 667 734 781 842 921 1003 1064 1095 1092 1064 1016 949 848 697 515 323 131 -73 -299 -538 -769 -982 -1183 -1391 -1619 -1846 -2031 -2157 -2255 -2363 -2470 -2513 -2458 -2351 -2258 -2185 -2087 -1942 -1776 -1608 -1421 -1201 -968 -743 -511 -255 1 223 422 639 872 1060 1178 1269 1388 1514 1586 1609 1650 1748 1854 1891 1846 1757 1663 1563 1433 1258 1034 791 574 405 241 29 -230 -467 -633 -754 -896 -1061 -1191 -1251 -1273 -1298 -1315 -1281 -1179 -1058 -971 -918 -852 -728 -550 -349 -145 63 282 506 725 944 1164 1379 1555 1664 1702 1696 1666 1632 1601 1585 1573 1537 1458 1338 1194 1048 910 781 647 493 321 164 67 29 -3 -72 -164 -229 -252 -259 -272 -274 -247 -207 -189 -189 -165 -91 12 115 212 307 385 427 437 447 468 475 456 439 455 486 466 363 220 112 57 9 -86 -225 -371 -502 -625 -745 -843 -912 +1 +3398 3361 3278 3180 3049 2850 2597 2349 2124 1871 1555 1213 885 557 202 -146 -439 -709 -1013 -1312 -1494 -1544 -1585 -1706 -1832 -1844 -1755 -1671 -1623 -1524 -1332 -1108 -911 -711 -465 -199 23 194 347 486 602 733 919 1126 1280 1367 1443 1530 1578 1547 1459 1344 1192 1005 837 727 625 460 251 78 -49 -205 -431 -665 -829 -938 -1053 -1185 -1289 -1351 -1395 -1451 -1527 -1620 -1708 -1751 -1729 -1675 -1642 -1617 -1542 -1403 -1263 -1154 -1029 -848 -661 -547 -484 -380 -215 -73 -14 4 19 -15 -147 -306 -371 -357 -420 -662 -992 -1268 -1476 -1702 -1969 -2186 -2306 -2400 -2550 -2704 -2737 -2603 -2402 -2231 -2075 -1838 -1468 -998 -475 88 680 1256 1782 2284 2817 3392 3940 4400 4781 5124 5431 5656 5780 5827 5808 5668 5366 4949 4516 4099 3620 3015 2326 1653 1042 457 -133 -705 -1239 -1756 -2266 -2718 -3072 -3364 -3685 -4035 -4311 -4419 +778 693 626 570 527 498 473 435 389 352 323 286 237 211 248 333 403 426 432 473 553 637 691 716 728 746 786 855 934 975 950 893 860 858 831 742 617 525 474 420 326 218 134 59 -56 -218 -369 -455 -485 -509 -559 -627 -684 -713 -714 -710 -725 -769 -820 -836 -798 -709 -593 -471 -356 -253 -150 -32 95 201 274 341 438 560 667 734 781 842 921 1003 1064 1095 1092 1064 1016 949 848 697 515 323 131 -73 -299 -538 -769 -982 -1183 -1391 -1619 -1846 -2031 -2157 -2255 -2363 -2470 -2513 -2458 -2351 -2258 -2185 -2087 -1942 -1776 -1608 -1421 -1201 -968 -743 -511 -255 1 223 422 639 872 1060 1178 1269 1388 1514 1586 1609 1650 1748 1854 1891 1846 1757 1663 1563 1433 1258 1034 791 574 405 241 29 -230 -467 -633 -754 -896 -1061 -1191 -1251 -1273 -1298 -1315 -1281 -1179 -1058 -971 -918 -852 -728 -550 -349 -145 63 282 506 725 944 1164 1379 1555 1664 1702 1696 1666 1632 1601 1585 1573 1537 1458 1338 1194 1048 910 781 647 493 321 164 67 29 -3 -72 -164 -229 -252 -259 -272 -274 -247 -207 -189 -189 -165 -91 12 115 212 307 385 427 437 447 468 475 456 439 455 486 466 363 220 112 57 9 -86 -225 -371 -502 -625 -745 -843 -912 -989 -1132 -1350 -1582 -1756 -1852 -1895 -1919 -1941 -1959 -1960 -1925 -1843 -1737 -1631 -1520 -1377 -1192 -995 -798 -593 -374 -173 -26 80 203 376 579 769 921 1033 1114 1174 1222 1242 1212 1132 1033 929 799 618 415 245 114 -42 -274 -556 -811 -997 -1154 -1341 -1562 -1754 -1867 -1910 -1937 -1972 -1989 -1948 -1860 -1760 -1658 -1513 -1293 -1000 -676 -344 4 384 794 1198 1554 1844 2100 2371 2662 2928 3130 3271 3365 3398 3361 3278 3180 3049 2850 2597 2349 2124 1871 1555 1213 885 557 202 -146 -439 -709 -1013 -1312 -1494 -1544 -1585 -1706 -1832 -1844 -1755 -1671 -1623 -1524 -1332 -1108 -911 -711 -465 -199 23 194 347 486 602 733 919 1126 1280 1367 1443 1530 1578 1547 1459 1344 1192 1005 837 727 625 460 251 78 -49 -205 -431 -665 -829 -938 -1053 -1185 -1289 -1351 -1395 -1451 -1527 -1620 -1708 -1751 -1729 -1675 -1642 -1617 +1 +-4381 -4273 -4106 -3814 -3372 -2844 -2314 -1791 -1244 -682 -144 353 828 1290 1685 1976 2195 2423 2667 2853 2920 2912 2917 2954 2964 2909 2823 2734 2589 2316 1947 1602 1332 1057 676 217 -201 -520 -777 -992 -1122 -1168 -1222 -1359 -1496 -1498 -1382 -1331 -1452 -1627 -1681 -1613 -1560 -1567 -1536 -1404 -1266 -1225 -1223 -1128 -947 -828 -837 -865 -799 -700 -705 -823 -935 -992 -1101 -1356 -1669 -1879 -1964 -2070 -2310 -2651 -2987 -3303 -3649 -4022 -4342 -4567 -4729 -4853 -4882 -4739 -4427 -4020 -3551 -2990 -2323 -1608 -893 -127 777 1807 2825 3718 4507 5283 6074 6799 7382 7825 8172 8433 8577 8588 8488 8303 8019 7596 7000 6228 5320 4347 3380 2442 1484 454 -617 -1615 -2432 -3080 -3655 -4230 -4791 -5276 -5656 -5937 -6103 -6129 -6021 -5825 -5556 -5180 -4665 -4046 -3385 -2697 -1954 -1166 -395 331 1046 1775 2465 3023 3420 3696 3887 3984 3989 3940 3849 3685 3431 3141 +-1179 -1058 -971 -918 -852 -728 -550 -349 -145 63 282 506 725 944 1164 1379 1555 1664 1702 1696 1666 1632 1601 1585 1573 1537 1458 1338 1194 1048 910 781 647 493 321 164 67 29 -3 -72 -164 -229 -252 -259 -272 -274 -247 -207 -189 -189 -165 -91 12 115 212 307 385 427 437 447 468 475 456 439 455 486 466 363 220 112 57 9 -86 -225 -371 -502 -625 -745 -843 -912 -989 -1132 -1350 -1582 -1756 -1852 -1895 -1919 -1941 -1959 -1960 -1925 -1843 -1737 -1631 -1520 -1377 -1192 -995 -798 -593 -374 -173 -26 80 203 376 579 769 921 1033 1114 1174 1222 1242 1212 1132 1033 929 799 618 415 245 114 -42 -274 -556 -811 -997 -1154 -1341 -1562 -1754 -1867 -1910 -1937 -1972 -1989 -1948 -1860 -1760 -1658 -1513 -1293 -1000 -676 -344 4 384 794 1198 1554 1844 2100 2371 2662 2928 3130 3271 3365 3398 3361 3278 3180 3049 2850 2597 2349 2124 1871 1555 1213 885 557 202 -146 -439 -709 -1013 -1312 -1494 -1544 -1585 -1706 -1832 -1844 -1755 -1671 -1623 -1524 -1332 -1108 -911 -711 -465 -199 23 194 347 486 602 733 919 1126 1280 1367 1443 1530 1578 1547 1459 1344 1192 1005 837 727 625 460 251 78 -49 -205 -431 -665 -829 -938 -1053 -1185 -1289 -1351 -1395 -1451 -1527 -1620 -1708 -1751 -1729 -1675 -1642 -1617 -1542 -1403 -1263 -1154 -1029 -848 -661 -547 -484 -380 -215 -73 -14 4 19 -15 -147 -306 -371 -357 -420 -662 -992 -1268 -1476 -1702 -1969 -2186 -2306 -2400 -2550 -2704 -2737 -2603 -2402 -2231 -2075 -1838 -1468 -998 -475 88 680 1256 1782 2284 2817 3392 3940 4400 4781 5124 5431 5656 5780 5827 5808 5668 5366 4949 4516 4099 3620 3015 2326 1653 1042 457 -133 -705 -1239 -1756 -2266 -2718 -3072 -3364 -3685 -4035 -4311 -4419 -4381 -4273 -4106 -3814 -3372 -2844 -2314 -1791 -1244 -682 -144 353 828 1290 1685 1976 2195 2423 2667 2853 2920 2912 2917 2954 2964 2909 2823 2734 2589 2316 1947 1602 1332 1057 676 217 -201 -520 -777 -992 -1122 -1168 -1222 -1359 -1496 -1498 -1382 -1331 -1452 -1627 -1681 -1613 -1560 -1567 -1536 -1404 -1266 -1225 -1223 -1128 -947 -828 -837 -865 -799 -700 -705 -823 -935 -992 -1101 -1356 -1669 -1879 -1964 -2070 -2310 -2651 -2987 -3303 +1 +2882 2637 2320 1889 1413 994 661 372 84 -208 -482 -721 -909 -1028 -1069 -1036 -953 -846 -733 -612 -461 -260 -37 135 191 144 71 26 -8 -78 -187 -272 -304 -334 -433 -597 -744 -834 -924 -1085 -1294 -1466 -1572 -1671 -1817 -1987 -2141 -2299 -2523 -2799 -3042 -3202 -3343 -3581 -3938 -4336 -4688 -4983 -5263 -5538 -5748 -5821 -5737 -5530 -5236 -4839 -4287 -3553 -2673 -1712 -720 292 1328 2387 3440 4425 5265 5950 6556 7168 7762 8222 8480 8595 8656 8652 8488 8121 7619 7064 6427 5613 4608 3511 2449 1461 496 -498 -1498 -2409 -3134 -3649 -4023 -4350 -4643 -4824 -4809 -4630 -4406 -4231 -4076 -3832 -3440 -2942 -2421 -1899 -1330 -696 -46 552 1097 1652 2235 2781 3184 3405 3489 3506 3461 3308 3022 2637 2223 1824 1437 1047 669 346 112 -32 -127 -209 -296 -364 -377 -343 -316 -350 -426 -468 -420 -317 -222 -143 -33 125 292 432 560 694 +3398 3361 3278 3180 3049 2850 2597 2349 2124 1871 1555 1213 885 557 202 -146 -439 -709 -1013 -1312 -1494 -1544 -1585 -1706 -1832 -1844 -1755 -1671 -1623 -1524 -1332 -1108 -911 -711 -465 -199 23 194 347 486 602 733 919 1126 1280 1367 1443 1530 1578 1547 1459 1344 1192 1005 837 727 625 460 251 78 -49 -205 -431 -665 -829 -938 -1053 -1185 -1289 -1351 -1395 -1451 -1527 -1620 -1708 -1751 -1729 -1675 -1642 -1617 -1542 -1403 -1263 -1154 -1029 -848 -661 -547 -484 -380 -215 -73 -14 4 19 -15 -147 -306 -371 -357 -420 -662 -992 -1268 -1476 -1702 -1969 -2186 -2306 -2400 -2550 -2704 -2737 -2603 -2402 -2231 -2075 -1838 -1468 -998 -475 88 680 1256 1782 2284 2817 3392 3940 4400 4781 5124 5431 5656 5780 5827 5808 5668 5366 4949 4516 4099 3620 3015 2326 1653 1042 457 -133 -705 -1239 -1756 -2266 -2718 -3072 -3364 -3685 -4035 -4311 -4419 -4381 -4273 -4106 -3814 -3372 -2844 -2314 -1791 -1244 -682 -144 353 828 1290 1685 1976 2195 2423 2667 2853 2920 2912 2917 2954 2964 2909 2823 2734 2589 2316 1947 1602 1332 1057 676 217 -201 -520 -777 -992 -1122 -1168 -1222 -1359 -1496 -1498 -1382 -1331 -1452 -1627 -1681 -1613 -1560 -1567 -1536 -1404 -1266 -1225 -1223 -1128 -947 -828 -837 -865 -799 -700 -705 -823 -935 -992 -1101 -1356 -1669 -1879 -1964 -2070 -2310 -2651 -2987 -3303 -3649 -4022 -4342 -4567 -4729 -4853 -4882 -4739 -4427 -4020 -3551 -2990 -2323 -1608 -893 -127 777 1807 2825 3718 4507 5283 6074 6799 7382 7825 8172 8433 8577 8588 8488 8303 8019 7596 7000 6228 5320 4347 3380 2442 1484 454 -617 -1615 -2432 -3080 -3655 -4230 -4791 -5276 -5656 -5937 -6103 -6129 -6021 -5825 -5556 -5180 -4665 -4046 -3385 -2697 -1954 -1166 -395 331 1046 1775 2465 3023 3420 3696 3887 3984 3989 3940 3849 3685 3431 3141 2882 2637 2320 1889 1413 994 661 372 84 -208 -482 -721 -909 -1028 -1069 -1036 -953 -846 -733 -612 -461 -260 -37 135 191 144 71 26 -8 -78 -187 -272 -304 -334 -433 -597 -744 -834 -924 -1085 -1294 -1466 -1572 -1671 -1817 -1987 -2141 -2299 -2523 -2799 -3042 -3202 -3343 -3581 -3938 -4336 -4688 -4983 -5263 -5538 -5748 -5821 -5737 -5530 -5236 -4839 -4287 -3553 -2673 -1712 -720 292 1328 2387 3440 4425 5265 5950 6556 7168 +1 +786 755 591 364 140 -86 -352 -651 -942 -1186 -1386 -1553 -1694 -1814 -1899 -1929 -1909 -1891 -1928 -2005 -2040 -2009 -1995 -2116 -2377 -2647 -2802 -2846 -2900 -3067 -3354 -3704 -4069 -4439 -4816 -5171 -5452 -5615 -5645 -5548 -5310 -4885 -4232 -3372 -2392 -1391 -403 617 1707 2831 3883 4763 5469 6071 6632 7131 7489 7662 7689 7643 7550 7370 7054 6613 6099 5525 4849 4029 3095 2147 1269 469 -305 -1088 -1850 -2498 -2960 -3234 -3383 -3474 -3513 -3449 -3249 -2946 -2596 -2227 -1829 -1414 -1021 -668 -322 76 528 982 1374 1684 1943 2190 2439 2653 2744 2636 2342 1967 1617 1308 982 596 178 -212 -534 -758 -856 -816 -674 -502 -332 -142 96 374 639 857 1012 1107 1156 1189 1231 1267 1245 1151 1025 923 871 859 880 910 881 712 390 14 -315 -604 -960 -1448 -1992 -2439 -2717 -2867 -2963 -3014 -2980 -2848 -2672 -2502 -2333 -2121 -1865 -1640 -1546 -1611 +-4381 -4273 -4106 -3814 -3372 -2844 -2314 -1791 -1244 -682 -144 353 828 1290 1685 1976 2195 2423 2667 2853 2920 2912 2917 2954 2964 2909 2823 2734 2589 2316 1947 1602 1332 1057 676 217 -201 -520 -777 -992 -1122 -1168 -1222 -1359 -1496 -1498 -1382 -1331 -1452 -1627 -1681 -1613 -1560 -1567 -1536 -1404 -1266 -1225 -1223 -1128 -947 -828 -837 -865 -799 -700 -705 -823 -935 -992 -1101 -1356 -1669 -1879 -1964 -2070 -2310 -2651 -2987 -3303 -3649 -4022 -4342 -4567 -4729 -4853 -4882 -4739 -4427 -4020 -3551 -2990 -2323 -1608 -893 -127 777 1807 2825 3718 4507 5283 6074 6799 7382 7825 8172 8433 8577 8588 8488 8303 8019 7596 7000 6228 5320 4347 3380 2442 1484 454 -617 -1615 -2432 -3080 -3655 -4230 -4791 -5276 -5656 -5937 -6103 -6129 -6021 -5825 -5556 -5180 -4665 -4046 -3385 -2697 -1954 -1166 -395 331 1046 1775 2465 3023 3420 3696 3887 3984 3989 3940 3849 3685 3431 3141 2882 2637 2320 1889 1413 994 661 372 84 -208 -482 -721 -909 -1028 -1069 -1036 -953 -846 -733 -612 -461 -260 -37 135 191 144 71 26 -8 -78 -187 -272 -304 -334 -433 -597 -744 -834 -924 -1085 -1294 -1466 -1572 -1671 -1817 -1987 -2141 -2299 -2523 -2799 -3042 -3202 -3343 -3581 -3938 -4336 -4688 -4983 -5263 -5538 -5748 -5821 -5737 -5530 -5236 -4839 -4287 -3553 -2673 -1712 -720 292 1328 2387 3440 4425 5265 5950 6556 7168 7762 8222 8480 8595 8656 8652 8488 8121 7619 7064 6427 5613 4608 3511 2449 1461 496 -498 -1498 -2409 -3134 -3649 -4023 -4350 -4643 -4824 -4809 -4630 -4406 -4231 -4076 -3832 -3440 -2942 -2421 -1899 -1330 -696 -46 552 1097 1652 2235 2781 3184 3405 3489 3506 3461 3308 3022 2637 2223 1824 1437 1047 669 346 112 -32 -127 -209 -296 -364 -377 -343 -316 -350 -426 -468 -420 -317 -222 -143 -33 125 292 432 560 694 786 755 591 364 140 -86 -352 -651 -942 -1186 -1386 -1553 -1694 -1814 -1899 -1929 -1909 -1891 -1928 -2005 -2040 -2009 -1995 -2116 -2377 -2647 -2802 -2846 -2900 -3067 -3354 -3704 -4069 -4439 -4816 -5171 -5452 -5615 -5645 -5548 -5310 -4885 -4232 -3372 -2392 -1391 -403 617 1707 2831 3883 4763 5469 6071 6632 7131 7489 7662 7689 7643 7550 7370 7054 6613 6099 5525 4849 4029 3095 2147 1269 469 -305 -1088 -1850 -2498 -2960 -3234 -3383 -3474 +1 +-1766 -1911 -2006 -2091 -2242 -2490 -2822 -3210 -3637 -4095 -4550 -4953 -5275 -5498 -5603 -5549 -5306 -4874 -4284 -3567 -2745 -1830 -838 219 1327 2449 3522 4477 5274 5909 6411 6824 7160 7386 7427 7253 6934 6591 6269 5885 5335 4623 3853 3113 2390 1628 825 53 -636 -1255 -1848 -2403 -2846 -3095 -3144 -3069 -2963 -2844 -2641 -2277 -1757 -1199 -728 -368 -34 354 779 1150 1404 1571 1737 1944 2152 2295 2354 2365 2350 2267 2053 1704 1284 855 417 -75 -617 -1121 -1461 -1575 -1512 -1375 -1218 -997 -642 -150 376 824 1163 1437 1676 1852 1919 1874 1754 1582 1361 1106 853 642 499 436 451 498 501 410 235 11 -261 -604 -1018 -1441 -1806 -2087 -2307 -2489 -2627 -2683 -2621 -2434 -2159 -1856 -1565 -1279 -1000 -776 -679 -731 -890 -1102 -1371 -1742 -2241 -2846 -3508 -4184 -4829 -5388 -5810 -6079 -6227 -6284 -6238 -6031 -5622 -5041 -4348 -3568 -2665 -1621 -508 582 +2882 2637 2320 1889 1413 994 661 372 84 -208 -482 -721 -909 -1028 -1069 -1036 -953 -846 -733 -612 -461 -260 -37 135 191 144 71 26 -8 -78 -187 -272 -304 -334 -433 -597 -744 -834 -924 -1085 -1294 -1466 -1572 -1671 -1817 -1987 -2141 -2299 -2523 -2799 -3042 -3202 -3343 -3581 -3938 -4336 -4688 -4983 -5263 -5538 -5748 -5821 -5737 -5530 -5236 -4839 -4287 -3553 -2673 -1712 -720 292 1328 2387 3440 4425 5265 5950 6556 7168 7762 8222 8480 8595 8656 8652 8488 8121 7619 7064 6427 5613 4608 3511 2449 1461 496 -498 -1498 -2409 -3134 -3649 -4023 -4350 -4643 -4824 -4809 -4630 -4406 -4231 -4076 -3832 -3440 -2942 -2421 -1899 -1330 -696 -46 552 1097 1652 2235 2781 3184 3405 3489 3506 3461 3308 3022 2637 2223 1824 1437 1047 669 346 112 -32 -127 -209 -296 -364 -377 -343 -316 -350 -426 -468 -420 -317 -222 -143 -33 125 292 432 560 694 786 755 591 364 140 -86 -352 -651 -942 -1186 -1386 -1553 -1694 -1814 -1899 -1929 -1909 -1891 -1928 -2005 -2040 -2009 -1995 -2116 -2377 -2647 -2802 -2846 -2900 -3067 -3354 -3704 -4069 -4439 -4816 -5171 -5452 -5615 -5645 -5548 -5310 -4885 -4232 -3372 -2392 -1391 -403 617 1707 2831 3883 4763 5469 6071 6632 7131 7489 7662 7689 7643 7550 7370 7054 6613 6099 5525 4849 4029 3095 2147 1269 469 -305 -1088 -1850 -2498 -2960 -3234 -3383 -3474 -3513 -3449 -3249 -2946 -2596 -2227 -1829 -1414 -1021 -668 -322 76 528 982 1374 1684 1943 2190 2439 2653 2744 2636 2342 1967 1617 1308 982 596 178 -212 -534 -758 -856 -816 -674 -502 -332 -142 96 374 639 857 1012 1107 1156 1189 1231 1267 1245 1151 1025 923 871 859 880 910 881 712 390 14 -315 -604 -960 -1448 -1992 -2439 -2717 -2867 -2963 -3014 -2980 -2848 -2672 -2502 -2333 -2121 -1865 -1640 -1546 -1611 -1766 -1911 -2006 -2091 -2242 -2490 -2822 -3210 -3637 -4095 -4550 -4953 -5275 -5498 -5603 -5549 -5306 -4874 -4284 -3567 -2745 -1830 -838 219 1327 2449 3522 4477 5274 5909 6411 6824 7160 7386 7427 7253 6934 6591 6269 5885 5335 4623 3853 3113 2390 1628 825 53 -636 -1255 -1848 -2403 -2846 -3095 -3144 -3069 -2963 -2844 -2641 -2277 -1757 -1199 -728 -368 -34 354 779 1150 1404 1571 1737 1944 2152 2295 2354 2365 2350 2267 2053 1704 +1 +1631 2707 3840 4916 5769 6356 6790 7197 7554 7705 7546 7132 6629 6153 5693 5163 4518 3794 3058 2347 1659 984 327 -304 -908 -1482 -2000 -2412 -2667 -2761 -2757 -2737 -2720 -2613 -2293 -1746 -1121 -613 -285 -11 363 861 1341 1660 1809 1915 2072 2245 2323 2278 2191 2120 1996 1688 1209 737 418 195 -118 -581 -1021 -1232 -1191 -1071 -1002 -926 -682 -215 339 795 1115 1420 1793 2143 2273 2117 1828 1592 1420 1162 751 317 44 -48 -96 -196 -290 -291 -248 -312 -551 -863 -1117 -1297 -1510 -1812 -2120 -2311 -2358 -2336 -2297 -2198 -1958 -1574 -1123 -684 -270 118 442 619 582 340 -49 -553 -1194 -2022 -3008 -4021 -4916 -5640 -6249 -6778 -7162 -7284 -7112 -6722 -6214 -5620 -4926 -4139 -3284 -2369 -1373 -302 797 1870 2905 3900 4820 5603 6219 6692 7052 7276 7299 7107 6770 6371 5913 5319 4540 3644 2775 2013 1315 598 -143 -806 -1289 -1603 +786 755 591 364 140 -86 -352 -651 -942 -1186 -1386 -1553 -1694 -1814 -1899 -1929 -1909 -1891 -1928 -2005 -2040 -2009 -1995 -2116 -2377 -2647 -2802 -2846 -2900 -3067 -3354 -3704 -4069 -4439 -4816 -5171 -5452 -5615 -5645 -5548 -5310 -4885 -4232 -3372 -2392 -1391 -403 617 1707 2831 3883 4763 5469 6071 6632 7131 7489 7662 7689 7643 7550 7370 7054 6613 6099 5525 4849 4029 3095 2147 1269 469 -305 -1088 -1850 -2498 -2960 -3234 -3383 -3474 -3513 -3449 -3249 -2946 -2596 -2227 -1829 -1414 -1021 -668 -322 76 528 982 1374 1684 1943 2190 2439 2653 2744 2636 2342 1967 1617 1308 982 596 178 -212 -534 -758 -856 -816 -674 -502 -332 -142 96 374 639 857 1012 1107 1156 1189 1231 1267 1245 1151 1025 923 871 859 880 910 881 712 390 14 -315 -604 -960 -1448 -1992 -2439 -2717 -2867 -2963 -3014 -2980 -2848 -2672 -2502 -2333 -2121 -1865 -1640 -1546 -1611 -1766 -1911 -2006 -2091 -2242 -2490 -2822 -3210 -3637 -4095 -4550 -4953 -5275 -5498 -5603 -5549 -5306 -4874 -4284 -3567 -2745 -1830 -838 219 1327 2449 3522 4477 5274 5909 6411 6824 7160 7386 7427 7253 6934 6591 6269 5885 5335 4623 3853 3113 2390 1628 825 53 -636 -1255 -1848 -2403 -2846 -3095 -3144 -3069 -2963 -2844 -2641 -2277 -1757 -1199 -728 -368 -34 354 779 1150 1404 1571 1737 1944 2152 2295 2354 2365 2350 2267 2053 1704 1284 855 417 -75 -617 -1121 -1461 -1575 -1512 -1375 -1218 -997 -642 -150 376 824 1163 1437 1676 1852 1919 1874 1754 1582 1361 1106 853 642 499 436 451 498 501 410 235 11 -261 -604 -1018 -1441 -1806 -2087 -2307 -2489 -2627 -2683 -2621 -2434 -2159 -1856 -1565 -1279 -1000 -776 -679 -731 -890 -1102 -1371 -1742 -2241 -2846 -3508 -4184 -4829 -5388 -5810 -6079 -6227 -6284 -6238 -6031 -5622 -5041 -4348 -3568 -2665 -1621 -508 582 1631 2707 3840 4916 5769 6356 6790 7197 7554 7705 7546 7132 6629 6153 5693 5163 4518 3794 3058 2347 1659 984 327 -304 -908 -1482 -2000 -2412 -2667 -2761 -2757 -2737 -2720 -2613 -2293 -1746 -1121 -613 -285 -11 363 861 1341 1660 1809 1915 2072 2245 2323 2278 2191 2120 1996 1688 1209 737 418 195 -118 -581 -1021 -1232 -1191 -1071 -1002 -926 -682 -215 339 795 1115 1420 1793 2143 2273 2117 1828 1592 1420 1162 +1 +-1849 -2097 -2306 -2376 -2274 -2088 -1913 -1737 -1460 -1035 -552 -134 195 527 932 1362 1712 1938 2089 2229 2359 2425 2370 2186 1923 1648 1388 1104 744 308 -132 -510 -826 -1113 -1359 -1509 -1526 -1438 -1283 -1054 -720 -285 188 634 1029 1377 1672 1896 2039 2095 2046 1881 1630 1361 1119 878 592 282 48 -34 0 23 -47 -191 -332 -435 -533 -679 -888 -1132 -1364 -1549 -1672 -1742 -1769 -1742 -1637 -1437 -1154 -812 -437 -74 201 313 242 46 -235 -645 -1291 -2233 -3371 -4491 -5407 -6089 -6642 -7145 -7533 -7630 -7324 -6685 -5891 -5044 -4111 -3025 -1835 -673 393 1428 2495 3511 4302 4806 5164 5570 6042 6391 6436 6223 5970 5834 5739 5455 4857 4041 3215 2474 1733 866 -108 -1024 -1747 -2284 -2732 -3102 -3300 -3240 -2965 -2621 -2295 -1933 -1428 -768 -96 424 761 1047 1421 1864 2206 2328 2301 2306 2423 2540 2493 2253 1951 1710 1517 1271 +-1766 -1911 -2006 -2091 -2242 -2490 -2822 -3210 -3637 -4095 -4550 -4953 -5275 -5498 -5603 -5549 -5306 -4874 -4284 -3567 -2745 -1830 -838 219 1327 2449 3522 4477 5274 5909 6411 6824 7160 7386 7427 7253 6934 6591 6269 5885 5335 4623 3853 3113 2390 1628 825 53 -636 -1255 -1848 -2403 -2846 -3095 -3144 -3069 -2963 -2844 -2641 -2277 -1757 -1199 -728 -368 -34 354 779 1150 1404 1571 1737 1944 2152 2295 2354 2365 2350 2267 2053 1704 1284 855 417 -75 -617 -1121 -1461 -1575 -1512 -1375 -1218 -997 -642 -150 376 824 1163 1437 1676 1852 1919 1874 1754 1582 1361 1106 853 642 499 436 451 498 501 410 235 11 -261 -604 -1018 -1441 -1806 -2087 -2307 -2489 -2627 -2683 -2621 -2434 -2159 -1856 -1565 -1279 -1000 -776 -679 -731 -890 -1102 -1371 -1742 -2241 -2846 -3508 -4184 -4829 -5388 -5810 -6079 -6227 -6284 -6238 -6031 -5622 -5041 -4348 -3568 -2665 -1621 -508 582 1631 2707 3840 4916 5769 6356 6790 7197 7554 7705 7546 7132 6629 6153 5693 5163 4518 3794 3058 2347 1659 984 327 -304 -908 -1482 -2000 -2412 -2667 -2761 -2757 -2737 -2720 -2613 -2293 -1746 -1121 -613 -285 -11 363 861 1341 1660 1809 1915 2072 2245 2323 2278 2191 2120 1996 1688 1209 737 418 195 -118 -581 -1021 -1232 -1191 -1071 -1002 -926 -682 -215 339 795 1115 1420 1793 2143 2273 2117 1828 1592 1420 1162 751 317 44 -48 -96 -196 -290 -291 -248 -312 -551 -863 -1117 -1297 -1510 -1812 -2120 -2311 -2358 -2336 -2297 -2198 -1958 -1574 -1123 -684 -270 118 442 619 582 340 -49 -553 -1194 -2022 -3008 -4021 -4916 -5640 -6249 -6778 -7162 -7284 -7112 -6722 -6214 -5620 -4926 -4139 -3284 -2369 -1373 -302 797 1870 2905 3900 4820 5603 6219 6692 7052 7276 7299 7107 6770 6371 5913 5319 4540 3644 2775 2013 1315 598 -143 -806 -1289 -1603 -1849 -2097 -2306 -2376 -2274 -2088 -1913 -1737 -1460 -1035 -552 -134 195 527 932 1362 1712 1938 2089 2229 2359 2425 2370 2186 1923 1648 1388 1104 744 308 -132 -510 -826 -1113 -1359 -1509 -1526 -1438 -1283 -1054 -720 -285 188 634 1029 1377 1672 1896 2039 2095 2046 1881 1630 1361 1119 878 592 282 48 -34 0 23 -47 -191 -332 -435 -533 -679 -888 -1132 -1364 -1549 -1672 -1742 -1769 -1742 -1637 -1437 -1154 -812 +1 +928 554 232 -37 -317 -627 -885 -984 -925 -806 -693 -531 -218 234 679 992 1197 1409 1660 1846 1873 1772 1655 1565 1445 1245 1015 841 735 626 461 286 164 81 -62 -323 -659 -970 -1220 -1459 -1738 -2021 -2222 -2308 -2324 -2334 -2329 -2245 -2043 -1765 -1491 -1250 -1024 -809 -670 -676 -830 -1082 -1434 -1971 -2743 -3641 -4467 -5132 -5730 -6362 -6932 -7167 -6886 -6188 -5337 -4480 -3527 -2307 -818 714 2040 3116 4104 5142 6117 6751 6911 6788 6672 6600 6303 5582 4623 3853 3458 3155 2551 1629 781 319 61 -457 -1413 -2461 -3134 -3397 -3661 -4232 -4900 -5175 -4876 -4325 -3905 -3564 -2902 -1721 -332 791 1548 2271 3235 4236 4808 4774 4460 4289 4277 4069 3456 2680 2149 1929 1694 1196 593 248 248 301 141 -151 -285 -159 22 46 -43 -13 241 583 812 911 1031 1291 1619 1853 1926 1930 1967 1997 1892 1607 1254 965 735 441 +1631 2707 3840 4916 5769 6356 6790 7197 7554 7705 7546 7132 6629 6153 5693 5163 4518 3794 3058 2347 1659 984 327 -304 -908 -1482 -2000 -2412 -2667 -2761 -2757 -2737 -2720 -2613 -2293 -1746 -1121 -613 -285 -11 363 861 1341 1660 1809 1915 2072 2245 2323 2278 2191 2120 1996 1688 1209 737 418 195 -118 -581 -1021 -1232 -1191 -1071 -1002 -926 -682 -215 339 795 1115 1420 1793 2143 2273 2117 1828 1592 1420 1162 751 317 44 -48 -96 -196 -290 -291 -248 -312 -551 -863 -1117 -1297 -1510 -1812 -2120 -2311 -2358 -2336 -2297 -2198 -1958 -1574 -1123 -684 -270 118 442 619 582 340 -49 -553 -1194 -2022 -3008 -4021 -4916 -5640 -6249 -6778 -7162 -7284 -7112 -6722 -6214 -5620 -4926 -4139 -3284 -2369 -1373 -302 797 1870 2905 3900 4820 5603 6219 6692 7052 7276 7299 7107 6770 6371 5913 5319 4540 3644 2775 2013 1315 598 -143 -806 -1289 -1603 -1849 -2097 -2306 -2376 -2274 -2088 -1913 -1737 -1460 -1035 -552 -134 195 527 932 1362 1712 1938 2089 2229 2359 2425 2370 2186 1923 1648 1388 1104 744 308 -132 -510 -826 -1113 -1359 -1509 -1526 -1438 -1283 -1054 -720 -285 188 634 1029 1377 1672 1896 2039 2095 2046 1881 1630 1361 1119 878 592 282 48 -34 0 23 -47 -191 -332 -435 -533 -679 -888 -1132 -1364 -1549 -1672 -1742 -1769 -1742 -1637 -1437 -1154 -812 -437 -74 201 313 242 46 -235 -645 -1291 -2233 -3371 -4491 -5407 -6089 -6642 -7145 -7533 -7630 -7324 -6685 -5891 -5044 -4111 -3025 -1835 -673 393 1428 2495 3511 4302 4806 5164 5570 6042 6391 6436 6223 5970 5834 5739 5455 4857 4041 3215 2474 1733 866 -108 -1024 -1747 -2284 -2732 -3102 -3300 -3240 -2965 -2621 -2295 -1933 -1428 -768 -96 424 761 1047 1421 1864 2206 2328 2301 2306 2423 2540 2493 2253 1951 1710 1517 1271 928 554 232 -37 -317 -627 -885 -984 -925 -806 -693 -531 -218 234 679 992 1197 1409 1660 1846 1873 1772 1655 1565 1445 1245 1015 841 735 626 461 286 164 81 -62 -323 -659 -970 -1220 -1459 -1738 -2021 -2222 -2308 -2324 -2334 -2329 -2245 -2043 -1765 -1491 -1250 -1024 -809 -670 -676 -830 -1082 -1434 -1971 -2743 -3641 -4467 -5132 -5730 -6362 -6932 -7167 -6886 -6188 -5337 -4480 -3527 -2307 -818 714 2040 3116 4104 5142 +1 +27 -410 -735 -935 -1109 -1325 -1533 -1657 -1704 -1773 -1932 -2132 -2276 -2320 -2331 -2393 -2502 -2539 -2396 -2100 -1792 -1559 -1333 -988 -527 -122 58 22 -87 -187 -363 -742 -1315 -1927 -2459 -2946 -3490 -4078 -4540 -4734 -4691 -4566 -4425 -4168 -3657 -2906 -2082 -1320 -595 203 1105 2005 2763 3339 3800 4213 4562 4772 4810 4735 4625 4485 4255 3908 3512 3157 2829 2430 1904 1315 754 226 -329 -937 -1542 -2090 -2567 -2967 -3242 -3341 -3277 -3102 -2826 -2409 -1828 -1156 -507 68 621 1211 1812 2299 2563 2626 2614 2609 2545 2298 1865 1401 1063 856 670 466 337 377 542 693 751 750 763 808 843 835 789 730 680 649 641 661 733 879 1090 1314 1486 1595 1687 1791 1865 1823 1622 1323 1004 680 290 -199 -732 -1213 -1600 -1926 -2230 -2503 -2705 -2830 -2902 -2931 -2895 -2791 -2673 -2594 -2556 -2493 -2362 -2184 -2005 -1835 -1644 -1422 -1195 -990 +-1849 -2097 -2306 -2376 -2274 -2088 -1913 -1737 -1460 -1035 -552 -134 195 527 932 1362 1712 1938 2089 2229 2359 2425 2370 2186 1923 1648 1388 1104 744 308 -132 -510 -826 -1113 -1359 -1509 -1526 -1438 -1283 -1054 -720 -285 188 634 1029 1377 1672 1896 2039 2095 2046 1881 1630 1361 1119 878 592 282 48 -34 0 23 -47 -191 -332 -435 -533 -679 -888 -1132 -1364 -1549 -1672 -1742 -1769 -1742 -1637 -1437 -1154 -812 -437 -74 201 313 242 46 -235 -645 -1291 -2233 -3371 -4491 -5407 -6089 -6642 -7145 -7533 -7630 -7324 -6685 -5891 -5044 -4111 -3025 -1835 -673 393 1428 2495 3511 4302 4806 5164 5570 6042 6391 6436 6223 5970 5834 5739 5455 4857 4041 3215 2474 1733 866 -108 -1024 -1747 -2284 -2732 -3102 -3300 -3240 -2965 -2621 -2295 -1933 -1428 -768 -96 424 761 1047 1421 1864 2206 2328 2301 2306 2423 2540 2493 2253 1951 1710 1517 1271 928 554 232 -37 -317 -627 -885 -984 -925 -806 -693 -531 -218 234 679 992 1197 1409 1660 1846 1873 1772 1655 1565 1445 1245 1015 841 735 626 461 286 164 81 -62 -323 -659 -970 -1220 -1459 -1738 -2021 -2222 -2308 -2324 -2334 -2329 -2245 -2043 -1765 -1491 -1250 -1024 -809 -670 -676 -830 -1082 -1434 -1971 -2743 -3641 -4467 -5132 -5730 -6362 -6932 -7167 -6886 -6188 -5337 -4480 -3527 -2307 -818 714 2040 3116 4104 5142 6117 6751 6911 6788 6672 6600 6303 5582 4623 3853 3458 3155 2551 1629 781 319 61 -457 -1413 -2461 -3134 -3397 -3661 -4232 -4900 -5175 -4876 -4325 -3905 -3564 -2902 -1721 -332 791 1548 2271 3235 4236 4808 4774 4460 4289 4277 4069 3456 2680 2149 1929 1694 1196 593 248 248 301 141 -151 -285 -159 22 46 -43 -13 241 583 812 911 1031 1291 1619 1853 1926 1930 1967 1997 1892 1607 1254 965 735 441 27 -410 -735 -935 -1109 -1325 -1533 -1657 -1704 -1773 -1932 -2132 -2276 -2320 -2331 -2393 -2502 -2539 -2396 -2100 -1792 -1559 -1333 -988 -527 -122 58 22 -87 -187 -363 -742 -1315 -1927 -2459 -2946 -3490 -4078 -4540 -4734 -4691 -4566 -4425 -4168 -3657 -2906 -2082 -1320 -595 203 1105 2005 2763 3339 3800 4213 4562 4772 4810 4735 4625 4485 4255 3908 3512 3157 2829 2430 1904 1315 754 226 -329 -937 -1542 -2090 -2567 -2967 -3242 -3341 +1 +-806 -659 -624 -794 -1184 -1680 -2099 -2329 -2386 -2360 -2294 -2141 -1822 -1356 -868 -474 -155 221 727 1272 1707 2000 2258 2541 2732 2660 2322 1923 1653 1481 1241 891 608 586 803 1035 1119 1099 1117 1191 1180 967 611 255 -67 -463 -1001 -1575 -2004 -2239 -2404 -2599 -2734 -2621 -2198 -1610 -1039 -511 71 747 1393 1853 2107 2281 2467 2613 2609 2459 2293 2202 2113 1895 1549 1240 1093 1045 946 764 625 644 771 856 846 835 922 1066 1129 1053 940 923 1001 1046 963 819 745 754 719 539 293 157 185 254 221 95 -4 -26 -63 -225 -495 -767 -1001 -1260 -1608 -1999 -2321 -2536 -2699 -2871 -3019 -3054 -2958 -2798 -2646 -2496 -2285 -2001 -1688 -1387 -1099 -804 -526 -301 -130 27 191 329 403 430 475 568 663 681 602 489 408 353 273 145 10 -78 -115 -144 -182 -192 -144 -72 -42 -76 -121 -113 +928 554 232 -37 -317 -627 -885 -984 -925 -806 -693 -531 -218 234 679 992 1197 1409 1660 1846 1873 1772 1655 1565 1445 1245 1015 841 735 626 461 286 164 81 -62 -323 -659 -970 -1220 -1459 -1738 -2021 -2222 -2308 -2324 -2334 -2329 -2245 -2043 -1765 -1491 -1250 -1024 -809 -670 -676 -830 -1082 -1434 -1971 -2743 -3641 -4467 -5132 -5730 -6362 -6932 -7167 -6886 -6188 -5337 -4480 -3527 -2307 -818 714 2040 3116 4104 5142 6117 6751 6911 6788 6672 6600 6303 5582 4623 3853 3458 3155 2551 1629 781 319 61 -457 -1413 -2461 -3134 -3397 -3661 -4232 -4900 -5175 -4876 -4325 -3905 -3564 -2902 -1721 -332 791 1548 2271 3235 4236 4808 4774 4460 4289 4277 4069 3456 2680 2149 1929 1694 1196 593 248 248 301 141 -151 -285 -159 22 46 -43 -13 241 583 812 911 1031 1291 1619 1853 1926 1930 1967 1997 1892 1607 1254 965 735 441 27 -410 -735 -935 -1109 -1325 -1533 -1657 -1704 -1773 -1932 -2132 -2276 -2320 -2331 -2393 -2502 -2539 -2396 -2100 -1792 -1559 -1333 -988 -527 -122 58 22 -87 -187 -363 -742 -1315 -1927 -2459 -2946 -3490 -4078 -4540 -4734 -4691 -4566 -4425 -4168 -3657 -2906 -2082 -1320 -595 203 1105 2005 2763 3339 3800 4213 4562 4772 4810 4735 4625 4485 4255 3908 3512 3157 2829 2430 1904 1315 754 226 -329 -937 -1542 -2090 -2567 -2967 -3242 -3341 -3277 -3102 -2826 -2409 -1828 -1156 -507 68 621 1211 1812 2299 2563 2626 2614 2609 2545 2298 1865 1401 1063 856 670 466 337 377 542 693 751 750 763 808 843 835 789 730 680 649 641 661 733 879 1090 1314 1486 1595 1687 1791 1865 1823 1622 1323 1004 680 290 -199 -732 -1213 -1600 -1926 -2230 -2503 -2705 -2830 -2902 -2931 -2895 -2791 -2673 -2594 -2556 -2493 -2362 -2184 -2005 -1835 -1644 -1422 -1195 -990 -806 -659 -624 -794 -1184 -1680 -2099 -2329 -2386 -2360 -2294 -2141 -1822 -1356 -868 -474 -155 221 727 1272 1707 2000 2258 2541 2732 2660 2322 1923 1653 1481 1241 891 608 586 803 1035 1119 1099 1117 1191 1180 967 611 255 -67 -463 -1001 -1575 -2004 -2239 -2404 -2599 -2734 -2621 -2198 -1610 -1039 -511 71 747 1393 1853 2107 2281 2467 2613 2609 2459 2293 2202 2113 1895 1549 1240 1093 1045 946 764 625 644 +1 +-65 -41 -81 -147 -177 -149 -93 -43 7 93 225 369 475 521 518 488 437 362 260 135 4 -108 -185 -225 -238 -224 -158 -20 183 405 608 800 1015 1249 1443 1527 1499 1423 1351 1264 1097 843 568 347 192 61 -59 -127 -108 -30 43 89 143 233 334 397 408 411 450 528 591 595 551 529 583 697 787 791 750 736 768 771 678 524 392 304 192 -4 -241 -432 -564 -711 -927 -1172 -1355 -1461 -1551 -1689 -1849 -1945 -1933 -1849 -1765 -1705 -1629 -1475 -1241 -992 -799 -665 -527 -336 -123 44 144 229 344 458 508 488 467 507 573 583 507 405 350 346 331 266 163 74 23 -13 -84 -210 -349 -431 -432 -392 -372 -374 -350 -270 -167 -81 -1 101 211 274 263 223 207 193 114 -58 -282 -489 -651 -792 -936 -1081 -1181 -1190 -1100 -940 -738 -505 -253 -3 +27 -410 -735 -935 -1109 -1325 -1533 -1657 -1704 -1773 -1932 -2132 -2276 -2320 -2331 -2393 -2502 -2539 -2396 -2100 -1792 -1559 -1333 -988 -527 -122 58 22 -87 -187 -363 -742 -1315 -1927 -2459 -2946 -3490 -4078 -4540 -4734 -4691 -4566 -4425 -4168 -3657 -2906 -2082 -1320 -595 203 1105 2005 2763 3339 3800 4213 4562 4772 4810 4735 4625 4485 4255 3908 3512 3157 2829 2430 1904 1315 754 226 -329 -937 -1542 -2090 -2567 -2967 -3242 -3341 -3277 -3102 -2826 -2409 -1828 -1156 -507 68 621 1211 1812 2299 2563 2626 2614 2609 2545 2298 1865 1401 1063 856 670 466 337 377 542 693 751 750 763 808 843 835 789 730 680 649 641 661 733 879 1090 1314 1486 1595 1687 1791 1865 1823 1622 1323 1004 680 290 -199 -732 -1213 -1600 -1926 -2230 -2503 -2705 -2830 -2902 -2931 -2895 -2791 -2673 -2594 -2556 -2493 -2362 -2184 -2005 -1835 -1644 -1422 -1195 -990 -806 -659 -624 -794 -1184 -1680 -2099 -2329 -2386 -2360 -2294 -2141 -1822 -1356 -868 -474 -155 221 727 1272 1707 2000 2258 2541 2732 2660 2322 1923 1653 1481 1241 891 608 586 803 1035 1119 1099 1117 1191 1180 967 611 255 -67 -463 -1001 -1575 -2004 -2239 -2404 -2599 -2734 -2621 -2198 -1610 -1039 -511 71 747 1393 1853 2107 2281 2467 2613 2609 2459 2293 2202 2113 1895 1549 1240 1093 1045 946 764 625 644 771 856 846 835 922 1066 1129 1053 940 923 1001 1046 963 819 745 754 719 539 293 157 185 254 221 95 -4 -26 -63 -225 -495 -767 -1001 -1260 -1608 -1999 -2321 -2536 -2699 -2871 -3019 -3054 -2958 -2798 -2646 -2496 -2285 -2001 -1688 -1387 -1099 -804 -526 -301 -130 27 191 329 403 430 475 568 663 681 602 489 408 353 273 145 10 -78 -115 -144 -182 -192 -144 -72 -42 -76 -121 -113 -65 -41 -81 -147 -177 -149 -93 -43 7 93 225 369 475 521 518 488 437 362 260 135 4 -108 -185 -225 -238 -224 -158 -20 183 405 608 800 1015 1249 1443 1527 1499 1423 1351 1264 1097 843 568 347 192 61 -59 -127 -108 -30 43 89 143 233 334 397 408 411 450 528 591 595 551 529 583 697 787 791 750 736 768 771 678 524 392 304 192 -4 -241 -432 +1 +223 438 659 871 1022 1089 1105 1112 1113 1084 1030 975 932 900 881 875 866 831 774 711 646 569 484 413 352 281 210 168 153 121 59 22 58 145 245 376 548 679 642 440 234 173 250 378 515 672 808 824 649 306 -123 -527 -797 -879 -807 -675 -559 -512 -592 -839 -1181 -1479 -1627 -1628 -1505 -1237 -831 -414 -129 -15 6 2 -17 -28 23 158 332 500 667 826 921 905 818 729 646 550 444 357 282 179 45 -88 -220 -406 -664 -928 -1118 -1213 -1233 -1189 -1092 -972 -847 -708 -558 -457 -458 -519 -547 -509 -453 -418 -371 -277 -174 -123 -125 -137 -147 -182 -237 -259 -198 -52 136 323 481 601 699 781 829 815 754 702 698 711 684 612 541 502 468 406 323 260 234 216 174 118 83 81 76 12 -111 -229 -284 -287 -302 -356 -412 -410 +-806 -659 -624 -794 -1184 -1680 -2099 -2329 -2386 -2360 -2294 -2141 -1822 -1356 -868 -474 -155 221 727 1272 1707 2000 2258 2541 2732 2660 2322 1923 1653 1481 1241 891 608 586 803 1035 1119 1099 1117 1191 1180 967 611 255 -67 -463 -1001 -1575 -2004 -2239 -2404 -2599 -2734 -2621 -2198 -1610 -1039 -511 71 747 1393 1853 2107 2281 2467 2613 2609 2459 2293 2202 2113 1895 1549 1240 1093 1045 946 764 625 644 771 856 846 835 922 1066 1129 1053 940 923 1001 1046 963 819 745 754 719 539 293 157 185 254 221 95 -4 -26 -63 -225 -495 -767 -1001 -1260 -1608 -1999 -2321 -2536 -2699 -2871 -3019 -3054 -2958 -2798 -2646 -2496 -2285 -2001 -1688 -1387 -1099 -804 -526 -301 -130 27 191 329 403 430 475 568 663 681 602 489 408 353 273 145 10 -78 -115 -144 -182 -192 -144 -72 -42 -76 -121 -113 -65 -41 -81 -147 -177 -149 -93 -43 7 93 225 369 475 521 518 488 437 362 260 135 4 -108 -185 -225 -238 -224 -158 -20 183 405 608 800 1015 1249 1443 1527 1499 1423 1351 1264 1097 843 568 347 192 61 -59 -127 -108 -30 43 89 143 233 334 397 408 411 450 528 591 595 551 529 583 697 787 791 750 736 768 771 678 524 392 304 192 -4 -241 -432 -564 -711 -927 -1172 -1355 -1461 -1551 -1689 -1849 -1945 -1933 -1849 -1765 -1705 -1629 -1475 -1241 -992 -799 -665 -527 -336 -123 44 144 229 344 458 508 488 467 507 573 583 507 405 350 346 331 266 163 74 23 -13 -84 -210 -349 -431 -432 -392 -372 -374 -350 -270 -167 -81 -1 101 211 274 263 223 207 193 114 -58 -282 -489 -651 -792 -936 -1081 -1181 -1190 -1100 -940 -738 -505 -253 -3 223 438 659 871 1022 1089 1105 1112 1113 1084 1030 975 932 900 881 875 866 831 774 711 646 569 484 413 352 281 210 168 153 121 59 22 58 145 245 376 548 679 642 440 234 173 250 378 515 672 808 824 649 306 -123 -527 -797 -879 -807 -675 -559 -512 -592 -839 -1181 -1479 -1627 -1628 -1505 -1237 -831 -414 -129 -15 6 2 -17 -28 23 158 332 500 667 826 +1 +-346 -262 -196 -140 -55 84 259 421 527 582 627 682 716 683 587 486 425 387 318 183 0 -183 -327 -434 -531 -635 -722 -753 -722 -662 -605 -542 -446 -321 -199 -101 -16 62 113 113 79 56 73 126 190 255 309 348 378 406 429 434 417 403 418 453 465 416 312 194 100 34 -34 -104 -149 -139 -86 -36 -11 10 54 97 104 72 35 8 -32 -111 -215 -304 -364 -420 -491 -573 -638 -669 -677 -691 -725 -751 -725 -635 -521 -428 -373 -332 -282 -224 -163 -89 -12 48 82 118 185 279 355 396 419 450 492 524 539 547 552 551 537 508 478 453 430 395 345 303 297 323 339 306 222 118 16 -77 -157 -212 -243 -274 -310 -326 -304 -254 -209 -181 -165 -157 -172 -218 -274 -330 -385 -437 -466 -465 -453 -463 -498 -526 -511 -448 -356 -263 +-65 -41 -81 -147 -177 -149 -93 -43 7 93 225 369 475 521 518 488 437 362 260 135 4 -108 -185 -225 -238 -224 -158 -20 183 405 608 800 1015 1249 1443 1527 1499 1423 1351 1264 1097 843 568 347 192 61 -59 -127 -108 -30 43 89 143 233 334 397 408 411 450 528 591 595 551 529 583 697 787 791 750 736 768 771 678 524 392 304 192 -4 -241 -432 -564 -711 -927 -1172 -1355 -1461 -1551 -1689 -1849 -1945 -1933 -1849 -1765 -1705 -1629 -1475 -1241 -992 -799 -665 -527 -336 -123 44 144 229 344 458 508 488 467 507 573 583 507 405 350 346 331 266 163 74 23 -13 -84 -210 -349 -431 -432 -392 -372 -374 -350 -270 -167 -81 -1 101 211 274 263 223 207 193 114 -58 -282 -489 -651 -792 -936 -1081 -1181 -1190 -1100 -940 -738 -505 -253 -3 223 438 659 871 1022 1089 1105 1112 1113 1084 1030 975 932 900 881 875 866 831 774 711 646 569 484 413 352 281 210 168 153 121 59 22 58 145 245 376 548 679 642 440 234 173 250 378 515 672 808 824 649 306 -123 -527 -797 -879 -807 -675 -559 -512 -592 -839 -1181 -1479 -1627 -1628 -1505 -1237 -831 -414 -129 -15 6 2 -17 -28 23 158 332 500 667 826 921 905 818 729 646 550 444 357 282 179 45 -88 -220 -406 -664 -928 -1118 -1213 -1233 -1189 -1092 -972 -847 -708 -558 -457 -458 -519 -547 -509 -453 -418 -371 -277 -174 -123 -125 -137 -147 -182 -237 -259 -198 -52 136 323 481 601 699 781 829 815 754 702 698 711 684 612 541 502 468 406 323 260 234 216 174 118 83 81 76 12 -111 -229 -284 -287 -302 -356 -412 -410 -346 -262 -196 -140 -55 84 259 421 527 582 627 682 716 683 587 486 425 387 318 183 0 -183 -327 -434 -531 -635 -722 -753 -722 -662 -605 -542 -446 -321 -199 -101 -16 62 113 113 79 56 73 126 190 255 309 348 378 406 429 434 417 403 418 453 465 416 312 194 100 34 -34 -104 -149 -139 -86 -36 -11 10 54 97 104 72 35 8 -32 -111 -215 -304 +1 +-182 -107 -19 90 184 207 151 93 113 201 268 258 221 243 337 417 406 317 230 215 278 382 480 532 514 429 304 166 34 -88 -181 -210 -158 -51 45 84 69 41 34 48 53 41 30 47 95 139 140 88 11 -65 -129 -190 -247 -280 -270 -226 -174 -139 -126 -120 -113 -105 -110 -127 -152 -164 -157 -129 -86 -34 11 33 25 17 41 89 126 121 94 85 113 152 167 148 107 61 20 -11 -34 -49 -55 -41 -11 24 51 70 87 92 69 25 -17 -29 -21 -16 -34 -67 -97 -107 -91 -62 -44 -48 -67 -81 -77 -63 -51 -50 -75 -127 -194 -250 -290 -327 -370 -406 -405 -358 -288 -229 -200 -193 -185 -157 -99 -25 44 92 112 115 118 132 155 172 169 162 178 227 296 355 398 435 479 515 523 491 430 355 288 229 +223 438 659 871 1022 1089 1105 1112 1113 1084 1030 975 932 900 881 875 866 831 774 711 646 569 484 413 352 281 210 168 153 121 59 22 58 145 245 376 548 679 642 440 234 173 250 378 515 672 808 824 649 306 -123 -527 -797 -879 -807 -675 -559 -512 -592 -839 -1181 -1479 -1627 -1628 -1505 -1237 -831 -414 -129 -15 6 2 -17 -28 23 158 332 500 667 826 921 905 818 729 646 550 444 357 282 179 45 -88 -220 -406 -664 -928 -1118 -1213 -1233 -1189 -1092 -972 -847 -708 -558 -457 -458 -519 -547 -509 -453 -418 -371 -277 -174 -123 -125 -137 -147 -182 -237 -259 -198 -52 136 323 481 601 699 781 829 815 754 702 698 711 684 612 541 502 468 406 323 260 234 216 174 118 83 81 76 12 -111 -229 -284 -287 -302 -356 -412 -410 -346 -262 -196 -140 -55 84 259 421 527 582 627 682 716 683 587 486 425 387 318 183 0 -183 -327 -434 -531 -635 -722 -753 -722 -662 -605 -542 -446 -321 -199 -101 -16 62 113 113 79 56 73 126 190 255 309 348 378 406 429 434 417 403 418 453 465 416 312 194 100 34 -34 -104 -149 -139 -86 -36 -11 10 54 97 104 72 35 8 -32 -111 -215 -304 -364 -420 -491 -573 -638 -669 -677 -691 -725 -751 -725 -635 -521 -428 -373 -332 -282 -224 -163 -89 -12 48 82 118 185 279 355 396 419 450 492 524 539 547 552 551 537 508 478 453 430 395 345 303 297 323 339 306 222 118 16 -77 -157 -212 -243 -274 -310 -326 -304 -254 -209 -181 -165 -157 -172 -218 -274 -330 -385 -437 -466 -465 -453 -463 -498 -526 -511 -448 -356 -263 -182 -107 -19 90 184 207 151 93 113 201 268 258 221 243 337 417 406 317 230 215 278 382 480 532 514 429 304 166 34 -88 -181 -210 -158 -51 45 84 69 41 34 48 53 41 30 47 95 139 140 88 11 -65 -129 -190 -247 -280 -270 -226 -174 -139 -126 -120 -113 -105 -110 -127 -152 -164 -157 -129 -86 -34 11 33 25 17 41 89 126 121 94 85 +1 +171 105 31 -42 -111 -174 -226 -264 -290 -311 -322 -313 -280 -242 -227 -238 -258 -260 -245 -232 -236 -246 -237 -194 -122 -43 24 73 104 123 142 180 237 291 321 326 332 350 366 367 353 338 318 291 258 234 222 212 198 173 135 83 41 26 32 27 -3 -35 -35 -4 35 68 106 151 184 181 140 75 -2 -82 -147 -182 -196 -217 -271 -349 -419 -466 -499 -533 -558 -551 -504 -444 -394 -360 -325 -292 -273 -274 -270 -241 -198 -168 -157 -143 -116 -89 -79 -90 -103 -102 -89 -69 -45 -15 29 83 140 192 225 240 236 220 195 162 123 84 64 67 75 56 8 -40 -66 -81 -111 -152 -171 -145 -94 -51 -24 7 58 127 192 236 256 271 303 352 382 365 321 295 309 333 323 280 241 236 250 251 219 176 149 158 194 237 +-346 -262 -196 -140 -55 84 259 421 527 582 627 682 716 683 587 486 425 387 318 183 0 -183 -327 -434 -531 -635 -722 -753 -722 -662 -605 -542 -446 -321 -199 -101 -16 62 113 113 79 56 73 126 190 255 309 348 378 406 429 434 417 403 418 453 465 416 312 194 100 34 -34 -104 -149 -139 -86 -36 -11 10 54 97 104 72 35 8 -32 -111 -215 -304 -364 -420 -491 -573 -638 -669 -677 -691 -725 -751 -725 -635 -521 -428 -373 -332 -282 -224 -163 -89 -12 48 82 118 185 279 355 396 419 450 492 524 539 547 552 551 537 508 478 453 430 395 345 303 297 323 339 306 222 118 16 -77 -157 -212 -243 -274 -310 -326 -304 -254 -209 -181 -165 -157 -172 -218 -274 -330 -385 -437 -466 -465 -453 -463 -498 -526 -511 -448 -356 -263 -182 -107 -19 90 184 207 151 93 113 201 268 258 221 243 337 417 406 317 230 215 278 382 480 532 514 429 304 166 34 -88 -181 -210 -158 -51 45 84 69 41 34 48 53 41 30 47 95 139 140 88 11 -65 -129 -190 -247 -280 -270 -226 -174 -139 -126 -120 -113 -105 -110 -127 -152 -164 -157 -129 -86 -34 11 33 25 17 41 89 126 121 94 85 113 152 167 148 107 61 20 -11 -34 -49 -55 -41 -11 24 51 70 87 92 69 25 -17 -29 -21 -16 -34 -67 -97 -107 -91 -62 -44 -48 -67 -81 -77 -63 -51 -50 -75 -127 -194 -250 -290 -327 -370 -406 -405 -358 -288 -229 -200 -193 -185 -157 -99 -25 44 92 112 115 118 132 155 172 169 162 178 227 296 355 398 435 479 515 523 491 430 355 288 229 171 105 31 -42 -111 -174 -226 -264 -290 -311 -322 -313 -280 -242 -227 -238 -258 -260 -245 -232 -236 -246 -237 -194 -122 -43 24 73 104 123 142 180 237 291 321 326 332 350 366 367 353 338 318 291 258 234 222 212 198 173 135 83 41 26 32 27 -3 -35 -35 -4 35 68 106 151 184 181 140 75 -2 -82 -147 -182 -196 -217 -271 -349 -419 -466 -499 -533 +1 +275 310 337 348 332 293 241 184 123 63 13 -26 -57 -104 -173 -254 -321 -360 -379 -391 -396 -379 -329 -264 -215 -198 -201 -206 -204 -203 -207 -211 -190 -136 -62 5 42 54 60 73 91 107 127 145 142 114 80 70 75 52 -19 -103 -153 -165 -180 -224 -283 -321 -324 -300 -265 -231 -199 -167 -133 -103 -83 -76 -78 -76 -67 -57 -57 -67 -72 -64 -55 -80 -165 -286 -386 -428 -430 -431 -437 -418 -364 -298 -252 -213 -160 -90 -32 3 36 92 177 268 348 417 474 519 542 542 530 523 527 540 550 561 579 590 572 521 453 377 280 159 44 -29 -70 -119 -188 -245 -263 -263 -271 -279 -252 -199 -151 -116 -65 13 74 88 83 118 188 231 205 147 117 125 136 111 58 10 1 34 94 144 160 155 158 178 199 204 194 185 +-182 -107 -19 90 184 207 151 93 113 201 268 258 221 243 337 417 406 317 230 215 278 382 480 532 514 429 304 166 34 -88 -181 -210 -158 -51 45 84 69 41 34 48 53 41 30 47 95 139 140 88 11 -65 -129 -190 -247 -280 -270 -226 -174 -139 -126 -120 -113 -105 -110 -127 -152 -164 -157 -129 -86 -34 11 33 25 17 41 89 126 121 94 85 113 152 167 148 107 61 20 -11 -34 -49 -55 -41 -11 24 51 70 87 92 69 25 -17 -29 -21 -16 -34 -67 -97 -107 -91 -62 -44 -48 -67 -81 -77 -63 -51 -50 -75 -127 -194 -250 -290 -327 -370 -406 -405 -358 -288 -229 -200 -193 -185 -157 -99 -25 44 92 112 115 118 132 155 172 169 162 178 227 296 355 398 435 479 515 523 491 430 355 288 229 171 105 31 -42 -111 -174 -226 -264 -290 -311 -322 -313 -280 -242 -227 -238 -258 -260 -245 -232 -236 -246 -237 -194 -122 -43 24 73 104 123 142 180 237 291 321 326 332 350 366 367 353 338 318 291 258 234 222 212 198 173 135 83 41 26 32 27 -3 -35 -35 -4 35 68 106 151 184 181 140 75 -2 -82 -147 -182 -196 -217 -271 -349 -419 -466 -499 -533 -558 -551 -504 -444 -394 -360 -325 -292 -273 -274 -270 -241 -198 -168 -157 -143 -116 -89 -79 -90 -103 -102 -89 -69 -45 -15 29 83 140 192 225 240 236 220 195 162 123 84 64 67 75 56 8 -40 -66 -81 -111 -152 -171 -145 -94 -51 -24 7 58 127 192 236 256 271 303 352 382 365 321 295 309 333 323 280 241 236 250 251 219 176 149 158 194 237 275 310 337 348 332 293 241 184 123 63 13 -26 -57 -104 -173 -254 -321 -360 -379 -391 -396 -379 -329 -264 -215 -198 -201 -206 -204 -203 -207 -211 -190 -136 -62 5 42 54 60 73 91 107 127 145 142 114 80 70 75 52 -19 -103 -153 -165 -180 -224 -283 -321 -324 -300 -265 -231 -199 -167 -133 -103 -83 -76 -78 -76 -67 -57 -57 -67 -72 -64 -55 -80 -165 -286 +1 +187 197 210 226 239 237 220 188 147 95 38 -16 -63 -115 -173 -223 -248 -251 -257 -286 -333 -383 -425 -458 -483 -505 -516 -510 -475 -418 -356 -303 -257 -205 -140 -69 -12 23 43 60 78 88 76 47 14 -11 -39 -86 -152 -213 -254 -281 -314 -351 -377 -382 -385 -396 -399 -369 -319 -288 -299 -313 -279 -188 -89 -27 6 52 137 241 332 394 434 460 473 474 466 455 447 448 449 445 429 410 391 363 316 259 212 195 185 152 85 15 -16 -9 -2 -26 -70 -102 -103 -88 -84 -93 -101 -102 -95 -82 -62 -37 -20 -22 -36 -51 -60 -66 -66 -58 -50 -45 -37 -13 27 59 66 55 55 80 115 129 124 128 171 240 292 297 273 253 255 259 240 203 181 188 198 177 118 42 -23 -70 -103 -111 -99 -85 -104 -165 -234 -276 +171 105 31 -42 -111 -174 -226 -264 -290 -311 -322 -313 -280 -242 -227 -238 -258 -260 -245 -232 -236 -246 -237 -194 -122 -43 24 73 104 123 142 180 237 291 321 326 332 350 366 367 353 338 318 291 258 234 222 212 198 173 135 83 41 26 32 27 -3 -35 -35 -4 35 68 106 151 184 181 140 75 -2 -82 -147 -182 -196 -217 -271 -349 -419 -466 -499 -533 -558 -551 -504 -444 -394 -360 -325 -292 -273 -274 -270 -241 -198 -168 -157 -143 -116 -89 -79 -90 -103 -102 -89 -69 -45 -15 29 83 140 192 225 240 236 220 195 162 123 84 64 67 75 56 8 -40 -66 -81 -111 -152 -171 -145 -94 -51 -24 7 58 127 192 236 256 271 303 352 382 365 321 295 309 333 323 280 241 236 250 251 219 176 149 158 194 237 275 310 337 348 332 293 241 184 123 63 13 -26 -57 -104 -173 -254 -321 -360 -379 -391 -396 -379 -329 -264 -215 -198 -201 -206 -204 -203 -207 -211 -190 -136 -62 5 42 54 60 73 91 107 127 145 142 114 80 70 75 52 -19 -103 -153 -165 -180 -224 -283 -321 -324 -300 -265 -231 -199 -167 -133 -103 -83 -76 -78 -76 -67 -57 -57 -67 -72 -64 -55 -80 -165 -286 -386 -428 -430 -431 -437 -418 -364 -298 -252 -213 -160 -90 -32 3 36 92 177 268 348 417 474 519 542 542 530 523 527 540 550 561 579 590 572 521 453 377 280 159 44 -29 -70 -119 -188 -245 -263 -263 -271 -279 -252 -199 -151 -116 -65 13 74 88 83 118 188 231 205 147 117 125 136 111 58 10 1 34 94 144 160 155 158 178 199 204 194 185 187 197 210 226 239 237 220 188 147 95 38 -16 -63 -115 -173 -223 -248 -251 -257 -286 -333 -383 -425 -458 -483 -505 -516 -510 -475 -418 -356 -303 -257 -205 -140 -69 -12 23 43 60 78 88 76 47 14 -11 -39 -86 -152 -213 -254 -281 -314 -351 -377 -382 -385 -396 -399 -369 -319 -288 -299 -313 -279 -188 -89 -27 6 52 137 241 332 394 434 460 473 474 466 455 +1 +-283 -279 -279 -281 -286 -301 -321 -320 -294 -267 -277 -307 -309 -263 -204 -165 -142 -112 -84 -94 -142 -188 -207 -205 -209 -218 -224 -236 -267 -304 -318 -287 -235 -192 -160 -119 -53 27 96 138 169 212 285 377 449 477 480 494 538 580 577 521 450 412 411 409 371 301 224 158 106 66 35 0 -52 -117 -169 -198 -223 -264 -311 -328 -317 -309 -327 -343 -327 -287 -253 -233 -201 -142 -70 -14 24 54 83 98 99 99 107 114 98 65 33 24 40 64 84 96 108 134 163 185 190 192 204 228 255 281 309 338 349 326 283 253 249 251 238 209 179 151 115 69 23 -19 -64 -124 -191 -251 -297 -329 -354 -377 -400 -420 -429 -423 -404 -383 -370 -368 -381 -401 -404 -378 -331 -298 -290 -278 -235 -176 -139 -140 -153 -148 -140 -158 -193 -213 +275 310 337 348 332 293 241 184 123 63 13 -26 -57 -104 -173 -254 -321 -360 -379 -391 -396 -379 -329 -264 -215 -198 -201 -206 -204 -203 -207 -211 -190 -136 -62 5 42 54 60 73 91 107 127 145 142 114 80 70 75 52 -19 -103 -153 -165 -180 -224 -283 -321 -324 -300 -265 -231 -199 -167 -133 -103 -83 -76 -78 -76 -67 -57 -57 -67 -72 -64 -55 -80 -165 -286 -386 -428 -430 -431 -437 -418 -364 -298 -252 -213 -160 -90 -32 3 36 92 177 268 348 417 474 519 542 542 530 523 527 540 550 561 579 590 572 521 453 377 280 159 44 -29 -70 -119 -188 -245 -263 -263 -271 -279 -252 -199 -151 -116 -65 13 74 88 83 118 188 231 205 147 117 125 136 111 58 10 1 34 94 144 160 155 158 178 199 204 194 185 187 197 210 226 239 237 220 188 147 95 38 -16 -63 -115 -173 -223 -248 -251 -257 -286 -333 -383 -425 -458 -483 -505 -516 -510 -475 -418 -356 -303 -257 -205 -140 -69 -12 23 43 60 78 88 76 47 14 -11 -39 -86 -152 -213 -254 -281 -314 -351 -377 -382 -385 -396 -399 -369 -319 -288 -299 -313 -279 -188 -89 -27 6 52 137 241 332 394 434 460 473 474 466 455 447 448 449 445 429 410 391 363 316 259 212 195 185 152 85 15 -16 -9 -2 -26 -70 -102 -103 -88 -84 -93 -101 -102 -95 -82 -62 -37 -20 -22 -36 -51 -60 -66 -66 -58 -50 -45 -37 -13 27 59 66 55 55 80 115 129 124 128 171 240 292 297 273 253 255 259 240 203 181 188 198 177 118 42 -23 -70 -103 -111 -99 -85 -104 -165 -234 -276 -283 -279 -279 -281 -286 -301 -321 -320 -294 -267 -277 -307 -309 -263 -204 -165 -142 -112 -84 -94 -142 -188 -207 -205 -209 -218 -224 -236 -267 -304 -318 -287 -235 -192 -160 -119 -53 27 96 138 169 212 285 377 449 477 480 494 538 580 577 521 450 412 411 409 371 301 224 158 106 66 35 0 -52 -117 -169 -198 -223 -264 -311 -328 -317 -309 -327 -343 -327 -287 -253 -233 +1 +-203 -175 -154 -147 -135 -108 -71 -26 28 89 145 182 204 234 287 355 413 441 446 457 481 486 444 365 294 255 231 196 146 106 81 59 21 -40 -100 -137 -145 -134 -130 -150 -169 -159 -127 -103 -109 -123 -113 -86 -72 -77 -74 -45 -9 13 40 96 169 216 219 211 225 256 284 310 341 363 352 307 260 231 211 176 131 100 91 88 75 48 16 -7 -9 8 27 23 -8 -43 -60 -57 -50 -58 -83 -114 -147 -173 -190 -198 -205 -224 -258 -302 -344 -363 -356 -340 -338 -353 -367 -363 -340 -300 -244 -177 -118 -81 -58 -17 51 116 145 145 144 163 186 200 204 198 179 146 110 91 91 106 121 117 84 30 -17 -38 -45 -67 -108 -137 -129 -103 -97 -125 -157 -163 -142 -112 -86 -69 -60 -60 -62 -57 -40 -15 8 19 +187 197 210 226 239 237 220 188 147 95 38 -16 -63 -115 -173 -223 -248 -251 -257 -286 -333 -383 -425 -458 -483 -505 -516 -510 -475 -418 -356 -303 -257 -205 -140 -69 -12 23 43 60 78 88 76 47 14 -11 -39 -86 -152 -213 -254 -281 -314 -351 -377 -382 -385 -396 -399 -369 -319 -288 -299 -313 -279 -188 -89 -27 6 52 137 241 332 394 434 460 473 474 466 455 447 448 449 445 429 410 391 363 316 259 212 195 185 152 85 15 -16 -9 -2 -26 -70 -102 -103 -88 -84 -93 -101 -102 -95 -82 -62 -37 -20 -22 -36 -51 -60 -66 -66 -58 -50 -45 -37 -13 27 59 66 55 55 80 115 129 124 128 171 240 292 297 273 253 255 259 240 203 181 188 198 177 118 42 -23 -70 -103 -111 -99 -85 -104 -165 -234 -276 -283 -279 -279 -281 -286 -301 -321 -320 -294 -267 -277 -307 -309 -263 -204 -165 -142 -112 -84 -94 -142 -188 -207 -205 -209 -218 -224 -236 -267 -304 -318 -287 -235 -192 -160 -119 -53 27 96 138 169 212 285 377 449 477 480 494 538 580 577 521 450 412 411 409 371 301 224 158 106 66 35 0 -52 -117 -169 -198 -223 -264 -311 -328 -317 -309 -327 -343 -327 -287 -253 -233 -201 -142 -70 -14 24 54 83 98 99 99 107 114 98 65 33 24 40 64 84 96 108 134 163 185 190 192 204 228 255 281 309 338 349 326 283 253 249 251 238 209 179 151 115 69 23 -19 -64 -124 -191 -251 -297 -329 -354 -377 -400 -420 -429 -423 -404 -383 -370 -368 -381 -401 -404 -378 -331 -298 -290 -278 -235 -176 -139 -140 -153 -148 -140 -158 -193 -213 -203 -175 -154 -147 -135 -108 -71 -26 28 89 145 182 204 234 287 355 413 441 446 457 481 486 444 365 294 255 231 196 146 106 81 59 21 -40 -100 -137 -145 -134 -130 -150 -169 -159 -127 -103 -109 -123 -113 -86 -72 -77 -74 -45 -9 13 40 96 169 216 219 211 225 256 284 310 341 363 352 307 260 231 211 176 131 100 91 88 75 48 16 -7 +1 +21 16 15 22 33 37 32 22 13 -4 -26 -40 -27 2 18 2 -23 -22 15 62 89 96 105 123 142 159 174 190 204 211 219 235 251 252 231 207 200 211 223 221 202 170 143 123 113 98 63 11 -35 -58 -59 -52 -43 -27 -18 -33 -66 -91 -82 -59 -58 -88 -114 -105 -72 -54 -75 -111 -134 -140 -149 -174 -199 -207 -194 -175 -166 -170 -180 -185 -183 -174 -166 -158 -149 -135 -112 -85 -57 -35 -16 1 19 41 62 81 101 120 131 131 123 121 126 129 124 109 88 59 22 -19 -58 -96 -129 -147 -137 -116 -105 -111 -120 -117 -106 -97 -94 -92 -95 -102 -98 -73 -40 -27 -43 -61 -57 -40 -32 -31 -17 14 40 45 40 53 85 117 140 154 167 172 172 175 183 183 165 140 132 148 166 169 161 160 +-283 -279 -279 -281 -286 -301 -321 -320 -294 -267 -277 -307 -309 -263 -204 -165 -142 -112 -84 -94 -142 -188 -207 -205 -209 -218 -224 -236 -267 -304 -318 -287 -235 -192 -160 -119 -53 27 96 138 169 212 285 377 449 477 480 494 538 580 577 521 450 412 411 409 371 301 224 158 106 66 35 0 -52 -117 -169 -198 -223 -264 -311 -328 -317 -309 -327 -343 -327 -287 -253 -233 -201 -142 -70 -14 24 54 83 98 99 99 107 114 98 65 33 24 40 64 84 96 108 134 163 185 190 192 204 228 255 281 309 338 349 326 283 253 249 251 238 209 179 151 115 69 23 -19 -64 -124 -191 -251 -297 -329 -354 -377 -400 -420 -429 -423 -404 -383 -370 -368 -381 -401 -404 -378 -331 -298 -290 -278 -235 -176 -139 -140 -153 -148 -140 -158 -193 -213 -203 -175 -154 -147 -135 -108 -71 -26 28 89 145 182 204 234 287 355 413 441 446 457 481 486 444 365 294 255 231 196 146 106 81 59 21 -40 -100 -137 -145 -134 -130 -150 -169 -159 -127 -103 -109 -123 -113 -86 -72 -77 -74 -45 -9 13 40 96 169 216 219 211 225 256 284 310 341 363 352 307 260 231 211 176 131 100 91 88 75 48 16 -7 -9 8 27 23 -8 -43 -60 -57 -50 -58 -83 -114 -147 -173 -190 -198 -205 -224 -258 -302 -344 -363 -356 -340 -338 -353 -367 -363 -340 -300 -244 -177 -118 -81 -58 -17 51 116 145 145 144 163 186 200 204 198 179 146 110 91 91 106 121 117 84 30 -17 -38 -45 -67 -108 -137 -129 -103 -97 -125 -157 -163 -142 -112 -86 -69 -60 -60 -62 -57 -40 -15 8 19 21 16 15 22 33 37 32 22 13 -4 -26 -40 -27 2 18 2 -23 -22 15 62 89 96 105 123 142 159 174 190 204 211 219 235 251 252 231 207 200 211 223 221 202 170 143 123 113 98 63 11 -35 -58 -59 -52 -43 -27 -18 -33 -66 -91 -82 -59 -58 -88 -114 -105 -72 -54 -75 -111 -134 -140 -149 -174 -199 -207 -194 -175 -166 -170 -180 -185 +1 +174 187 180 147 107 80 74 77 74 68 64 66 57 26 -22 -64 -92 -109 -115 -113 -110 -114 -123 -126 -120 -116 -121 -125 -111 -87 -74 -70 -48 -1 44 54 37 19 21 31 45 64 87 103 99 82 69 66 73 85 90 80 56 42 51 65 53 21 1 16 45 63 71 78 74 50 18 6 14 7 -30 -69 -86 -104 -154 -221 -257 -243 -223 -236 -270 -285 -276 -277 -301 -323 -308 -265 -233 -234 -255 -267 -251 -215 -172 -130 -98 -79 -71 -60 -34 0 31 60 98 146 188 208 211 224 250 268 263 246 242 258 270 265 241 214 194 180 172 169 166 165 166 165 151 127 107 106 118 115 90 60 48 56 58 34 -10 -51 -74 -79 -72 -53 -29 -12 -17 -37 -44 -27 2 20 26 33 44 44 29 13 +-203 -175 -154 -147 -135 -108 -71 -26 28 89 145 182 204 234 287 355 413 441 446 457 481 486 444 365 294 255 231 196 146 106 81 59 21 -40 -100 -137 -145 -134 -130 -150 -169 -159 -127 -103 -109 -123 -113 -86 -72 -77 -74 -45 -9 13 40 96 169 216 219 211 225 256 284 310 341 363 352 307 260 231 211 176 131 100 91 88 75 48 16 -7 -9 8 27 23 -8 -43 -60 -57 -50 -58 -83 -114 -147 -173 -190 -198 -205 -224 -258 -302 -344 -363 -356 -340 -338 -353 -367 -363 -340 -300 -244 -177 -118 -81 -58 -17 51 116 145 145 144 163 186 200 204 198 179 146 110 91 91 106 121 117 84 30 -17 -38 -45 -67 -108 -137 -129 -103 -97 -125 -157 -163 -142 -112 -86 -69 -60 -60 -62 -57 -40 -15 8 19 21 16 15 22 33 37 32 22 13 -4 -26 -40 -27 2 18 2 -23 -22 15 62 89 96 105 123 142 159 174 190 204 211 219 235 251 252 231 207 200 211 223 221 202 170 143 123 113 98 63 11 -35 -58 -59 -52 -43 -27 -18 -33 -66 -91 -82 -59 -58 -88 -114 -105 -72 -54 -75 -111 -134 -140 -149 -174 -199 -207 -194 -175 -166 -170 -180 -185 -183 -174 -166 -158 -149 -135 -112 -85 -57 -35 -16 1 19 41 62 81 101 120 131 131 123 121 126 129 124 109 88 59 22 -19 -58 -96 -129 -147 -137 -116 -105 -111 -120 -117 -106 -97 -94 -92 -95 -102 -98 -73 -40 -27 -43 -61 -57 -40 -32 -31 -17 14 40 45 40 53 85 117 140 154 167 172 172 175 183 183 165 140 132 148 166 169 161 160 174 187 180 147 107 80 74 77 74 68 64 66 57 26 -22 -64 -92 -109 -115 -113 -110 -114 -123 -126 -120 -116 -121 -125 -111 -87 -74 -70 -48 -1 44 54 37 19 21 31 45 64 87 103 99 82 69 66 73 85 90 80 56 42 51 65 53 21 1 16 45 63 71 78 74 50 18 6 14 7 -30 -69 -86 -104 -154 -221 -257 -243 -223 -236 +1 +13 19 13 -7 -31 -46 -56 -63 -65 -70 -81 -95 -99 -93 -100 -126 -153 -157 -139 -125 -122 -120 -109 -94 -83 -73 -64 -65 -76 -86 -83 -72 -61 -52 -28 16 64 99 115 117 108 93 85 88 94 83 47 3 -24 -31 -31 -46 -77 -111 -132 -136 -125 -107 -93 -92 -104 -118 -117 -101 -89 -91 -104 -110 -105 -94 -82 -73 -61 -50 -39 -32 -33 -41 -39 -13 18 27 6 -14 -2 30 47 41 39 58 82 100 124 173 226 243 224 211 225 244 236 211 210 237 256 240 206 185 179 165 131 94 68 51 30 3 -16 -21 -28 -55 -92 -110 -100 -87 -95 -124 -143 -140 -128 -120 -116 -102 -84 -73 -68 -67 -71 -82 -85 -60 -21 -12 -38 -64 -52 -20 -12 -39 -70 -68 -34 6 33 43 48 71 120 171 182 157 +21 16 15 22 33 37 32 22 13 -4 -26 -40 -27 2 18 2 -23 -22 15 62 89 96 105 123 142 159 174 190 204 211 219 235 251 252 231 207 200 211 223 221 202 170 143 123 113 98 63 11 -35 -58 -59 -52 -43 -27 -18 -33 -66 -91 -82 -59 -58 -88 -114 -105 -72 -54 -75 -111 -134 -140 -149 -174 -199 -207 -194 -175 -166 -170 -180 -185 -183 -174 -166 -158 -149 -135 -112 -85 -57 -35 -16 1 19 41 62 81 101 120 131 131 123 121 126 129 124 109 88 59 22 -19 -58 -96 -129 -147 -137 -116 -105 -111 -120 -117 -106 -97 -94 -92 -95 -102 -98 -73 -40 -27 -43 -61 -57 -40 -32 -31 -17 14 40 45 40 53 85 117 140 154 167 172 172 175 183 183 165 140 132 148 166 169 161 160 174 187 180 147 107 80 74 77 74 68 64 66 57 26 -22 -64 -92 -109 -115 -113 -110 -114 -123 -126 -120 -116 -121 -125 -111 -87 -74 -70 -48 -1 44 54 37 19 21 31 45 64 87 103 99 82 69 66 73 85 90 80 56 42 51 65 53 21 1 16 45 63 71 78 74 50 18 6 14 7 -30 -69 -86 -104 -154 -221 -257 -243 -223 -236 -270 -285 -276 -277 -301 -323 -308 -265 -233 -234 -255 -267 -251 -215 -172 -130 -98 -79 -71 -60 -34 0 31 60 98 146 188 208 211 224 250 268 263 246 242 258 270 265 241 214 194 180 172 169 166 165 166 165 151 127 107 106 118 115 90 60 48 56 58 34 -10 -51 -74 -79 -72 -53 -29 -12 -17 -37 -44 -27 2 20 26 33 44 44 29 13 13 19 13 -7 -31 -46 -56 -63 -65 -70 -81 -95 -99 -93 -100 -126 -153 -157 -139 -125 -122 -120 -109 -94 -83 -73 -64 -65 -76 -86 -83 -72 -61 -52 -28 16 64 99 115 117 108 93 85 88 94 83 47 3 -24 -31 -31 -46 -77 -111 -132 -136 -125 -107 -93 -92 -104 -118 -117 -101 -89 -91 -104 -110 -105 -94 -82 -73 -61 -50 -39 -32 -33 -41 -39 -13 +1 +142 169 203 189 132 89 84 82 47 7 -1 13 -7 -76 -150 -187 -192 -209 -242 -269 -268 -249 -236 -237 -248 -255 -259 -264 -277 -290 -288 -267 -240 -226 -228 -223 -189 -135 -88 -58 -30 23 89 137 141 124 126 162 213 252 266 271 273 277 285 286 271 241 217 216 227 221 199 191 218 254 246 179 90 31 13 14 8 -10 -32 -51 -67 -86 -110 -142 -168 -173 -149 -119 -112 -130 -143 -118 -60 -3 27 32 43 69 99 113 113 119 149 199 241 250 227 202 195 195 173 123 79 71 96 108 77 22 -20 -32 -33 -45 -64 -85 -110 -150 -199 -228 -220 -198 -198 -225 -249 -249 -233 -226 -235 -246 -247 -243 -236 -218 -169 -98 -38 -21 -42 -65 -53 -5 49 72 60 36 29 51 94 127 130 101 77 86 123 149 +174 187 180 147 107 80 74 77 74 68 64 66 57 26 -22 -64 -92 -109 -115 -113 -110 -114 -123 -126 -120 -116 -121 -125 -111 -87 -74 -70 -48 -1 44 54 37 19 21 31 45 64 87 103 99 82 69 66 73 85 90 80 56 42 51 65 53 21 1 16 45 63 71 78 74 50 18 6 14 7 -30 -69 -86 -104 -154 -221 -257 -243 -223 -236 -270 -285 -276 -277 -301 -323 -308 -265 -233 -234 -255 -267 -251 -215 -172 -130 -98 -79 -71 -60 -34 0 31 60 98 146 188 208 211 224 250 268 263 246 242 258 270 265 241 214 194 180 172 169 166 165 166 165 151 127 107 106 118 115 90 60 48 56 58 34 -10 -51 -74 -79 -72 -53 -29 -12 -17 -37 -44 -27 2 20 26 33 44 44 29 13 13 19 13 -7 -31 -46 -56 -63 -65 -70 -81 -95 -99 -93 -100 -126 -153 -157 -139 -125 -122 -120 -109 -94 -83 -73 -64 -65 -76 -86 -83 -72 -61 -52 -28 16 64 99 115 117 108 93 85 88 94 83 47 3 -24 -31 -31 -46 -77 -111 -132 -136 -125 -107 -93 -92 -104 -118 -117 -101 -89 -91 -104 -110 -105 -94 -82 -73 -61 -50 -39 -32 -33 -41 -39 -13 18 27 6 -14 -2 30 47 41 39 58 82 100 124 173 226 243 224 211 225 244 236 211 210 237 256 240 206 185 179 165 131 94 68 51 30 3 -16 -21 -28 -55 -92 -110 -100 -87 -95 -124 -143 -140 -128 -120 -116 -102 -84 -73 -68 -67 -71 -82 -85 -60 -21 -12 -38 -64 -52 -20 -12 -39 -70 -68 -34 6 33 43 48 71 120 171 182 157 142 169 203 189 132 89 84 82 47 7 -1 13 -7 -76 -150 -187 -192 -209 -242 -269 -268 -249 -236 -237 -248 -255 -259 -264 -277 -290 -288 -267 -240 -226 -228 -223 -189 -135 -88 -58 -30 23 89 137 141 124 126 162 213 252 266 271 273 277 285 286 271 241 217 216 227 221 199 191 218 254 246 179 90 31 13 14 8 -10 -32 -51 -67 -86 -110 -142 +1 +138 108 85 69 47 25 34 65 65 -8 -113 -170 -141 -70 -25 -47 -114 -182 -199 -140 -31 60 81 44 -3 -23 -12 23 73 117 130 104 63 44 55 80 94 85 61 41 46 61 53 15 -24 -30 -13 -14 -52 -95 -112 -99 -73 -53 -49 -64 -88 -94 -75 -54 -51 -51 -27 13 29 8 -16 -15 -3 -3 -6 1 10 1 -11 7 54 93 98 92 97 101 86 65 69 92 96 68 36 28 34 35 38 54 67 51 8 -14 10 58 80 60 26 17 42 70 62 19 -23 -23 6 25 16 4 18 40 34 -1 -26 -9 33 55 35 -9 -54 -92 -120 -135 -127 -99 -74 -83 -127 -171 -169 -116 -59 -50 -85 -119 -115 -90 -85 -109 -127 -112 -75 -51 -47 -37 -4 33 35 4 -19 9 78 141 167 162 +13 19 13 -7 -31 -46 -56 -63 -65 -70 -81 -95 -99 -93 -100 -126 -153 -157 -139 -125 -122 -120 -109 -94 -83 -73 -64 -65 -76 -86 -83 -72 -61 -52 -28 16 64 99 115 117 108 93 85 88 94 83 47 3 -24 -31 -31 -46 -77 -111 -132 -136 -125 -107 -93 -92 -104 -118 -117 -101 -89 -91 -104 -110 -105 -94 -82 -73 -61 -50 -39 -32 -33 -41 -39 -13 18 27 6 -14 -2 30 47 41 39 58 82 100 124 173 226 243 224 211 225 244 236 211 210 237 256 240 206 185 179 165 131 94 68 51 30 3 -16 -21 -28 -55 -92 -110 -100 -87 -95 -124 -143 -140 -128 -120 -116 -102 -84 -73 -68 -67 -71 -82 -85 -60 -21 -12 -38 -64 -52 -20 -12 -39 -70 -68 -34 6 33 43 48 71 120 171 182 157 142 169 203 189 132 89 84 82 47 7 -1 13 -7 -76 -150 -187 -192 -209 -242 -269 -268 -249 -236 -237 -248 -255 -259 -264 -277 -290 -288 -267 -240 -226 -228 -223 -189 -135 -88 -58 -30 23 89 137 141 124 126 162 213 252 266 271 273 277 285 286 271 241 217 216 227 221 199 191 218 254 246 179 90 31 13 14 8 -10 -32 -51 -67 -86 -110 -142 -168 -173 -149 -119 -112 -130 -143 -118 -60 -3 27 32 43 69 99 113 113 119 149 199 241 250 227 202 195 195 173 123 79 71 96 108 77 22 -20 -32 -33 -45 -64 -85 -110 -150 -199 -228 -220 -198 -198 -225 -249 -249 -233 -226 -235 -246 -247 -243 -236 -218 -169 -98 -38 -21 -42 -65 -53 -5 49 72 60 36 29 51 94 127 130 101 77 86 123 149 138 108 85 69 47 25 34 65 65 -8 -113 -170 -141 -70 -25 -47 -114 -182 -199 -140 -31 60 81 44 -3 -23 -12 23 73 117 130 104 63 44 55 80 94 85 61 41 46 61 53 15 -24 -30 -13 -14 -52 -95 -112 -99 -73 -53 -49 -64 -88 -94 -75 -54 -51 -51 -27 13 29 8 -16 -15 -3 -3 -6 1 10 1 -11 7 54 93 98 92 +1 +145 123 105 110 139 159 133 69 30 57 118 142 107 60 56 105 169 206 202 166 122 89 74 61 31 -16 -60 -97 -142 -197 -231 -219 -179 -157 -169 -175 -140 -81 -44 -51 -76 -88 -84 -80 -84 -102 -127 -143 -140 -126 -127 -158 -189 -186 -137 -69 -23 -18 -38 -35 20 102 133 76 0 0 76 126 78 5 29 155 258 236 137 92 148 242 287 271 238 225 231 236 228 205 173 139 113 95 75 43 4 -20 -15 9 30 32 22 5 -21 -55 -67 -37 10 11 -57 -148 -193 -181 -164 -191 -247 -290 -305 -311 -329 -350 -350 -325 -298 -282 -265 -228 -182 -157 -159 -148 -84 -1 31 -6 -40 -1 85 122 81 23 17 49 71 81 114 160 168 133 120 172 230 206 111 55 93 156 148 79 47 82 113 79 +142 169 203 189 132 89 84 82 47 7 -1 13 -7 -76 -150 -187 -192 -209 -242 -269 -268 -249 -236 -237 -248 -255 -259 -264 -277 -290 -288 -267 -240 -226 -228 -223 -189 -135 -88 -58 -30 23 89 137 141 124 126 162 213 252 266 271 273 277 285 286 271 241 217 216 227 221 199 191 218 254 246 179 90 31 13 14 8 -10 -32 -51 -67 -86 -110 -142 -168 -173 -149 -119 -112 -130 -143 -118 -60 -3 27 32 43 69 99 113 113 119 149 199 241 250 227 202 195 195 173 123 79 71 96 108 77 22 -20 -32 -33 -45 -64 -85 -110 -150 -199 -228 -220 -198 -198 -225 -249 -249 -233 -226 -235 -246 -247 -243 -236 -218 -169 -98 -38 -21 -42 -65 -53 -5 49 72 60 36 29 51 94 127 130 101 77 86 123 149 138 108 85 69 47 25 34 65 65 -8 -113 -170 -141 -70 -25 -47 -114 -182 -199 -140 -31 60 81 44 -3 -23 -12 23 73 117 130 104 63 44 55 80 94 85 61 41 46 61 53 15 -24 -30 -13 -14 -52 -95 -112 -99 -73 -53 -49 -64 -88 -94 -75 -54 -51 -51 -27 13 29 8 -16 -15 -3 -3 -6 1 10 1 -11 7 54 93 98 92 97 101 86 65 69 92 96 68 36 28 34 35 38 54 67 51 8 -14 10 58 80 60 26 17 42 70 62 19 -23 -23 6 25 16 4 18 40 34 -1 -26 -9 33 55 35 -9 -54 -92 -120 -135 -127 -99 -74 -83 -127 -171 -169 -116 -59 -50 -85 -119 -115 -90 -85 -109 -127 -112 -75 -51 -47 -37 -4 33 35 4 -19 9 78 141 167 162 145 123 105 110 139 159 133 69 30 57 118 142 107 60 56 105 169 206 202 166 122 89 74 61 31 -16 -60 -97 -142 -197 -231 -219 -179 -157 -169 -175 -140 -81 -44 -51 -76 -88 -84 -80 -84 -102 -127 -143 -140 -126 -127 -158 -189 -186 -137 -69 -23 -18 -38 -35 20 102 133 76 0 0 76 126 78 5 29 155 258 236 137 92 148 242 287 271 +1 +22 19 63 68 -3 -79 -79 -24 14 11 19 61 89 53 -11 -21 40 94 77 12 -34 -43 -52 -71 -59 -1 53 43 -9 -35 -3 44 63 72 96 117 98 50 29 39 28 -33 -91 -80 -24 -8 -60 -109 -85 -16 19 0 -8 37 101 126 106 80 66 54 28 9 9 20 24 20 19 21 17 4 -9 -9 6 34 62 72 46 -11 -65 -78 -48 -8 -5 -42 -83 -86 -59 -48 -76 -115 -120 -91 -66 -76 -100 -108 -101 -112 -145 -162 -142 -105 -90 -99 -96 -59 -11 10 -6 -31 -27 14 62 85 80 77 104 146 159 133 113 132 160 138 71 33 71 129 129 69 25 32 48 24 -17 -19 10 18 -11 -34 -16 11 -6 -58 -85 -65 -36 -45 -85 -111 -98 -63 -39 -31 -23 2 29 29 -4 -38 +138 108 85 69 47 25 34 65 65 -8 -113 -170 -141 -70 -25 -47 -114 -182 -199 -140 -31 60 81 44 -3 -23 -12 23 73 117 130 104 63 44 55 80 94 85 61 41 46 61 53 15 -24 -30 -13 -14 -52 -95 -112 -99 -73 -53 -49 -64 -88 -94 -75 -54 -51 -51 -27 13 29 8 -16 -15 -3 -3 -6 1 10 1 -11 7 54 93 98 92 97 101 86 65 69 92 96 68 36 28 34 35 38 54 67 51 8 -14 10 58 80 60 26 17 42 70 62 19 -23 -23 6 25 16 4 18 40 34 -1 -26 -9 33 55 35 -9 -54 -92 -120 -135 -127 -99 -74 -83 -127 -171 -169 -116 -59 -50 -85 -119 -115 -90 -85 -109 -127 -112 -75 -51 -47 -37 -4 33 35 4 -19 9 78 141 167 162 145 123 105 110 139 159 133 69 30 57 118 142 107 60 56 105 169 206 202 166 122 89 74 61 31 -16 -60 -97 -142 -197 -231 -219 -179 -157 -169 -175 -140 -81 -44 -51 -76 -88 -84 -80 -84 -102 -127 -143 -140 -126 -127 -158 -189 -186 -137 -69 -23 -18 -38 -35 20 102 133 76 0 0 76 126 78 5 29 155 258 236 137 92 148 242 287 271 238 225 231 236 228 205 173 139 113 95 75 43 4 -20 -15 9 30 32 22 5 -21 -55 -67 -37 10 11 -57 -148 -193 -181 -164 -191 -247 -290 -305 -311 -329 -350 -350 -325 -298 -282 -265 -228 -182 -157 -159 -148 -84 -1 31 -6 -40 -1 85 122 81 23 17 49 71 81 114 160 168 133 120 172 230 206 111 55 93 156 148 79 47 82 113 79 22 19 63 68 -3 -79 -79 -24 14 11 19 61 89 53 -11 -21 40 94 77 12 -34 -43 -52 -71 -59 -1 53 43 -9 -35 -3 44 63 72 96 117 98 50 29 39 28 -33 -91 -80 -24 -8 -60 -109 -85 -16 19 0 -8 37 101 126 106 80 66 54 28 9 9 20 24 20 19 21 17 4 -9 -9 6 34 62 72 46 -11 -65 -78 +1 +-34 11 60 80 84 91 93 71 36 19 30 44 34 10 -5 9 58 130 185 171 81 -10 -19 47 88 37 -63 -105 -54 14 12 -60 -125 -132 -102 -90 -118 -158 -176 -172 -170 -163 -137 -101 -94 -128 -160 -146 -110 -115 -164 -188 -137 -56 -24 -43 -43 11 74 99 96 113 152 167 143 120 137 169 161 94 14 -17 4 38 50 44 45 57 59 45 34 51 81 94 84 73 66 40 -17 -70 -75 -48 -42 -84 -128 -131 -95 -68 -69 -77 -62 -30 3 23 36 53 72 87 90 77 55 30 5 -7 1 23 33 17 -14 -34 -38 -37 -28 -5 23 21 -18 -45 -10 59 83 26 -44 -50 3 42 30 4 11 39 40 -2 -43 -42 -8 21 30 31 44 61 68 63 57 53 45 31 20 18 22 20 +145 123 105 110 139 159 133 69 30 57 118 142 107 60 56 105 169 206 202 166 122 89 74 61 31 -16 -60 -97 -142 -197 -231 -219 -179 -157 -169 -175 -140 -81 -44 -51 -76 -88 -84 -80 -84 -102 -127 -143 -140 -126 -127 -158 -189 -186 -137 -69 -23 -18 -38 -35 20 102 133 76 0 0 76 126 78 5 29 155 258 236 137 92 148 242 287 271 238 225 231 236 228 205 173 139 113 95 75 43 4 -20 -15 9 30 32 22 5 -21 -55 -67 -37 10 11 -57 -148 -193 -181 -164 -191 -247 -290 -305 -311 -329 -350 -350 -325 -298 -282 -265 -228 -182 -157 -159 -148 -84 -1 31 -6 -40 -1 85 122 81 23 17 49 71 81 114 160 168 133 120 172 230 206 111 55 93 156 148 79 47 82 113 79 22 19 63 68 -3 -79 -79 -24 14 11 19 61 89 53 -11 -21 40 94 77 12 -34 -43 -52 -71 -59 -1 53 43 -9 -35 -3 44 63 72 96 117 98 50 29 39 28 -33 -91 -80 -24 -8 -60 -109 -85 -16 19 0 -8 37 101 126 106 80 66 54 28 9 9 20 24 20 19 21 17 4 -9 -9 6 34 62 72 46 -11 -65 -78 -48 -8 -5 -42 -83 -86 -59 -48 -76 -115 -120 -91 -66 -76 -100 -108 -101 -112 -145 -162 -142 -105 -90 -99 -96 -59 -11 10 -6 -31 -27 14 62 85 80 77 104 146 159 133 113 132 160 138 71 33 71 129 129 69 25 32 48 24 -17 -19 10 18 -11 -34 -16 11 -6 -58 -85 -65 -36 -45 -85 -111 -98 -63 -39 -31 -23 2 29 29 -4 -38 -34 11 60 80 84 91 93 71 36 19 30 44 34 10 -5 9 58 130 185 171 81 -10 -19 47 88 37 -63 -105 -54 14 12 -60 -125 -132 -102 -90 -118 -158 -176 -172 -170 -163 -137 -101 -94 -128 -160 -146 -110 -115 -164 -188 -137 -56 -24 -43 -43 11 74 99 96 113 152 167 143 120 137 169 161 94 14 -17 4 38 50 44 45 57 59 45 +1 +16 18 26 36 47 58 64 46 2 -37 -40 -7 26 30 -2 -42 -64 -62 -51 -56 -77 -96 -92 -64 -44 -44 -52 -48 -30 -12 2 10 5 -13 -30 -28 -13 -7 -26 -51 -62 -56 -36 -3 36 61 62 46 42 54 63 61 64 82 89 62 23 4 12 12 -17 -50 -59 -51 -61 -101 -136 -125 -71 -21 -8 -34 -61 -64 -45 -31 -41 -58 -57 -33 -6 3 5 21 51 68 60 53 72 106 123 112 96 91 87 70 51 55 77 81 45 -8 -40 -44 -49 -75 -101 -104 -82 -67 -84 -109 -102 -61 -28 -42 -88 -114 -97 -68 -76 -113 -127 -87 -19 20 14 -6 -6 21 48 49 38 42 76 107 94 38 -6 8 57 70 19 -44 -58 -22 13 8 -19 -31 -17 -5 -12 -31 -35 -11 23 29 -5 -49 +22 19 63 68 -3 -79 -79 -24 14 11 19 61 89 53 -11 -21 40 94 77 12 -34 -43 -52 -71 -59 -1 53 43 -9 -35 -3 44 63 72 96 117 98 50 29 39 28 -33 -91 -80 -24 -8 -60 -109 -85 -16 19 0 -8 37 101 126 106 80 66 54 28 9 9 20 24 20 19 21 17 4 -9 -9 6 34 62 72 46 -11 -65 -78 -48 -8 -5 -42 -83 -86 -59 -48 -76 -115 -120 -91 -66 -76 -100 -108 -101 -112 -145 -162 -142 -105 -90 -99 -96 -59 -11 10 -6 -31 -27 14 62 85 80 77 104 146 159 133 113 132 160 138 71 33 71 129 129 69 25 32 48 24 -17 -19 10 18 -11 -34 -16 11 -6 -58 -85 -65 -36 -45 -85 -111 -98 -63 -39 -31 -23 2 29 29 -4 -38 -34 11 60 80 84 91 93 71 36 19 30 44 34 10 -5 9 58 130 185 171 81 -10 -19 47 88 37 -63 -105 -54 14 12 -60 -125 -132 -102 -90 -118 -158 -176 -172 -170 -163 -137 -101 -94 -128 -160 -146 -110 -115 -164 -188 -137 -56 -24 -43 -43 11 74 99 96 113 152 167 143 120 137 169 161 94 14 -17 4 38 50 44 45 57 59 45 34 51 81 94 84 73 66 40 -17 -70 -75 -48 -42 -84 -128 -131 -95 -68 -69 -77 -62 -30 3 23 36 53 72 87 90 77 55 30 5 -7 1 23 33 17 -14 -34 -38 -37 -28 -5 23 21 -18 -45 -10 59 83 26 -44 -50 3 42 30 4 11 39 40 -2 -43 -42 -8 21 30 31 44 61 68 63 57 53 45 31 20 18 22 20 16 18 26 36 47 58 64 46 2 -37 -40 -7 26 30 -2 -42 -64 -62 -51 -56 -77 -96 -92 -64 -44 -44 -52 -48 -30 -12 2 10 5 -13 -30 -28 -13 -7 -26 -51 -62 -56 -36 -3 36 61 62 46 42 54 63 61 64 82 89 62 23 4 12 12 -17 -50 -59 -51 -61 -101 -136 -125 -71 -21 -8 -34 -61 -64 -45 -31 -41 -58 -57 -33 +1 +-56 -22 18 29 25 43 86 111 94 52 38 68 105 113 95 85 101 133 145 125 90 66 49 30 10 9 30 48 34 -3 -28 -31 -34 -55 -74 -67 -44 -39 -58 -75 -66 -44 -38 -45 -42 -7 46 87 101 92 77 69 71 83 99 114 117 94 56 28 19 19 6 -25 -51 -52 -35 -19 -23 -47 -74 -90 -91 -86 -89 -97 -101 -97 -96 -106 -122 -127 -124 -123 -116 -91 -56 -48 -76 -112 -116 -89 -67 -71 -88 -95 -94 -97 -107 -110 -98 -72 -40 -14 -5 -8 -5 19 56 82 91 88 83 88 105 139 168 157 101 48 50 99 134 120 81 65 70 65 37 20 31 53 58 49 55 73 69 31 -4 1 33 51 32 1 -9 -7 -13 -25 -27 -18 -22 -49 -72 -64 -35 -16 -15 -12 13 43 +-34 11 60 80 84 91 93 71 36 19 30 44 34 10 -5 9 58 130 185 171 81 -10 -19 47 88 37 -63 -105 -54 14 12 -60 -125 -132 -102 -90 -118 -158 -176 -172 -170 -163 -137 -101 -94 -128 -160 -146 -110 -115 -164 -188 -137 -56 -24 -43 -43 11 74 99 96 113 152 167 143 120 137 169 161 94 14 -17 4 38 50 44 45 57 59 45 34 51 81 94 84 73 66 40 -17 -70 -75 -48 -42 -84 -128 -131 -95 -68 -69 -77 -62 -30 3 23 36 53 72 87 90 77 55 30 5 -7 1 23 33 17 -14 -34 -38 -37 -28 -5 23 21 -18 -45 -10 59 83 26 -44 -50 3 42 30 4 11 39 40 -2 -43 -42 -8 21 30 31 44 61 68 63 57 53 45 31 20 18 22 20 16 18 26 36 47 58 64 46 2 -37 -40 -7 26 30 -2 -42 -64 -62 -51 -56 -77 -96 -92 -64 -44 -44 -52 -48 -30 -12 2 10 5 -13 -30 -28 -13 -7 -26 -51 -62 -56 -36 -3 36 61 62 46 42 54 63 61 64 82 89 62 23 4 12 12 -17 -50 -59 -51 -61 -101 -136 -125 -71 -21 -8 -34 -61 -64 -45 -31 -41 -58 -57 -33 -6 3 5 21 51 68 60 53 72 106 123 112 96 91 87 70 51 55 77 81 45 -8 -40 -44 -49 -75 -101 -104 -82 -67 -84 -109 -102 -61 -28 -42 -88 -114 -97 -68 -76 -113 -127 -87 -19 20 14 -6 -6 21 48 49 38 42 76 107 94 38 -6 8 57 70 19 -44 -58 -22 13 8 -19 -31 -17 -5 -12 -31 -35 -11 23 29 -5 -49 -56 -22 18 29 25 43 86 111 94 52 38 68 105 113 95 85 101 133 145 125 90 66 49 30 10 9 30 48 34 -3 -28 -31 -34 -55 -74 -67 -44 -39 -58 -75 -66 -44 -38 -45 -42 -7 46 87 101 92 77 69 71 83 99 114 117 94 56 28 19 19 6 -25 -51 -52 -35 -19 -23 -47 -74 -90 -91 -86 -89 -97 -101 -97 -96 -106 +1 +57 58 61 67 65 60 71 93 87 32 -39 -63 -26 18 18 -15 -32 -14 5 -17 -70 -102 -79 -23 19 23 0 -18 -24 -28 -34 -47 -60 -78 -100 -120 -128 -128 -134 -150 -171 -181 -170 -147 -122 -102 -81 -56 -40 -44 -60 -56 -18 31 50 34 25 54 100 114 90 67 73 90 80 37 0 -7 8 19 10 -8 -11 14 58 94 104 97 86 78 66 60 78 116 139 114 49 -18 -53 -47 -10 29 37 4 -46 -66 -45 -8 17 31 44 49 39 32 52 77 63 -1 -72 -95 -77 -62 -68 -74 -61 -44 -43 -50 -34 7 37 36 25 32 54 60 42 24 24 20 -13 -57 -73 -55 -42 -55 -76 -75 -58 -52 -59 -56 -42 -38 -52 -64 -54 -38 -38 -36 -12 31 50 31 15 41 102 146 143 116 +16 18 26 36 47 58 64 46 2 -37 -40 -7 26 30 -2 -42 -64 -62 -51 -56 -77 -96 -92 -64 -44 -44 -52 -48 -30 -12 2 10 5 -13 -30 -28 -13 -7 -26 -51 -62 -56 -36 -3 36 61 62 46 42 54 63 61 64 82 89 62 23 4 12 12 -17 -50 -59 -51 -61 -101 -136 -125 -71 -21 -8 -34 -61 -64 -45 -31 -41 -58 -57 -33 -6 3 5 21 51 68 60 53 72 106 123 112 96 91 87 70 51 55 77 81 45 -8 -40 -44 -49 -75 -101 -104 -82 -67 -84 -109 -102 -61 -28 -42 -88 -114 -97 -68 -76 -113 -127 -87 -19 20 14 -6 -6 21 48 49 38 42 76 107 94 38 -6 8 57 70 19 -44 -58 -22 13 8 -19 -31 -17 -5 -12 -31 -35 -11 23 29 -5 -49 -56 -22 18 29 25 43 86 111 94 52 38 68 105 113 95 85 101 133 145 125 90 66 49 30 10 9 30 48 34 -3 -28 -31 -34 -55 -74 -67 -44 -39 -58 -75 -66 -44 -38 -45 -42 -7 46 87 101 92 77 69 71 83 99 114 117 94 56 28 19 19 6 -25 -51 -52 -35 -19 -23 -47 -74 -90 -91 -86 -89 -97 -101 -97 -96 -106 -122 -127 -124 -123 -116 -91 -56 -48 -76 -112 -116 -89 -67 -71 -88 -95 -94 -97 -107 -110 -98 -72 -40 -14 -5 -8 -5 19 56 82 91 88 83 88 105 139 168 157 101 48 50 99 134 120 81 65 70 65 37 20 31 53 58 49 55 73 69 31 -4 1 33 51 32 1 -9 -7 -13 -25 -27 -18 -22 -49 -72 -64 -35 -16 -15 -12 13 43 57 58 61 67 65 60 71 93 87 32 -39 -63 -26 18 18 -15 -32 -14 5 -17 -70 -102 -79 -23 19 23 0 -18 -24 -28 -34 -47 -60 -78 -100 -120 -128 -128 -134 -150 -171 -181 -170 -147 -122 -102 -81 -56 -40 -44 -60 -56 -18 31 50 34 25 54 100 114 90 67 73 90 80 37 0 -7 8 19 10 -8 -11 14 58 94 104 97 86 78 +1 +99 105 121 135 150 161 149 100 34 1 18 47 46 13 -15 -8 16 22 2 -28 -46 -47 -34 -18 -21 -46 -63 -43 0 15 -15 -54 -64 -54 -58 -75 -73 -39 -3 10 14 31 47 38 10 -8 7 37 62 80 97 113 122 127 125 106 62 13 -21 -32 -36 -50 -68 -88 -118 -152 -174 -165 -128 -92 -79 -86 -94 -94 -92 -101 -114 -113 -83 -41 -16 -24 -56 -78 -76 -59 -42 -34 -32 -25 -5 32 74 114 139 153 161 168 179 191 193 168 120 69 41 35 39 35 21 -3 -24 -34 -25 -16 -34 -73 -100 -94 -68 -56 -63 -64 -48 -29 -26 -35 -40 -37 -31 -25 -17 -8 -8 -13 -13 -5 5 7 2 -7 -16 -16 6 45 75 71 46 34 43 48 28 2 -1 13 19 1 -20 -30 -39 -59 +-56 -22 18 29 25 43 86 111 94 52 38 68 105 113 95 85 101 133 145 125 90 66 49 30 10 9 30 48 34 -3 -28 -31 -34 -55 -74 -67 -44 -39 -58 -75 -66 -44 -38 -45 -42 -7 46 87 101 92 77 69 71 83 99 114 117 94 56 28 19 19 6 -25 -51 -52 -35 -19 -23 -47 -74 -90 -91 -86 -89 -97 -101 -97 -96 -106 -122 -127 -124 -123 -116 -91 -56 -48 -76 -112 -116 -89 -67 -71 -88 -95 -94 -97 -107 -110 -98 -72 -40 -14 -5 -8 -5 19 56 82 91 88 83 88 105 139 168 157 101 48 50 99 134 120 81 65 70 65 37 20 31 53 58 49 55 73 69 31 -4 1 33 51 32 1 -9 -7 -13 -25 -27 -18 -22 -49 -72 -64 -35 -16 -15 -12 13 43 57 58 61 67 65 60 71 93 87 32 -39 -63 -26 18 18 -15 -32 -14 5 -17 -70 -102 -79 -23 19 23 0 -18 -24 -28 -34 -47 -60 -78 -100 -120 -128 -128 -134 -150 -171 -181 -170 -147 -122 -102 -81 -56 -40 -44 -60 -56 -18 31 50 34 25 54 100 114 90 67 73 90 80 37 0 -7 8 19 10 -8 -11 14 58 94 104 97 86 78 66 60 78 116 139 114 49 -18 -53 -47 -10 29 37 4 -46 -66 -45 -8 17 31 44 49 39 32 52 77 63 -1 -72 -95 -77 -62 -68 -74 -61 -44 -43 -50 -34 7 37 36 25 32 54 60 42 24 24 20 -13 -57 -73 -55 -42 -55 -76 -75 -58 -52 -59 -56 -42 -38 -52 -64 -54 -38 -38 -36 -12 31 50 31 15 41 102 146 143 116 99 105 121 135 150 161 149 100 34 1 18 47 46 13 -15 -8 16 22 2 -28 -46 -47 -34 -18 -21 -46 -63 -43 0 15 -15 -54 -64 -54 -58 -75 -73 -39 -3 10 14 31 47 38 10 -8 7 37 62 80 97 113 122 127 125 106 62 13 -21 -32 -36 -50 -68 -88 -118 -152 -174 -165 -128 -92 -79 -86 -94 -94 -92 -101 -114 -113 -83 -41 +1 +-71 -55 -28 -25 -50 -63 -43 -23 -51 -120 -170 -162 -120 -89 -79 -65 -43 -35 -55 -81 -76 -35 24 65 78 70 63 77 105 124 119 103 101 119 141 150 144 132 118 110 109 116 128 142 164 188 202 189 154 116 96 86 71 39 -7 -46 -71 -84 -91 -104 -129 -170 -217 -248 -250 -237 -241 -267 -283 -263 -226 -216 -244 -271 -257 -217 -199 -212 -221 -194 -147 -113 -92 -60 -2 68 133 187 229 256 274 299 337 372 391 401 419 440 439 403 355 315 274 218 156 118 108 96 57 1 -42 -66 -94 -138 -185 -212 -220 -233 -257 -286 -299 -298 -300 -319 -349 -363 -351 -316 -280 -256 -241 -220 -176 -111 -37 30 85 125 148 159 175 212 262 306 330 347 378 420 448 441 405 356 316 293 287 283 257 199 127 67 +57 58 61 67 65 60 71 93 87 32 -39 -63 -26 18 18 -15 -32 -14 5 -17 -70 -102 -79 -23 19 23 0 -18 -24 -28 -34 -47 -60 -78 -100 -120 -128 -128 -134 -150 -171 -181 -170 -147 -122 -102 -81 -56 -40 -44 -60 -56 -18 31 50 34 25 54 100 114 90 67 73 90 80 37 0 -7 8 19 10 -8 -11 14 58 94 104 97 86 78 66 60 78 116 139 114 49 -18 -53 -47 -10 29 37 4 -46 -66 -45 -8 17 31 44 49 39 32 52 77 63 -1 -72 -95 -77 -62 -68 -74 -61 -44 -43 -50 -34 7 37 36 25 32 54 60 42 24 24 20 -13 -57 -73 -55 -42 -55 -76 -75 -58 -52 -59 -56 -42 -38 -52 -64 -54 -38 -38 -36 -12 31 50 31 15 41 102 146 143 116 99 105 121 135 150 161 149 100 34 1 18 47 46 13 -15 -8 16 22 2 -28 -46 -47 -34 -18 -21 -46 -63 -43 0 15 -15 -54 -64 -54 -58 -75 -73 -39 -3 10 14 31 47 38 10 -8 7 37 62 80 97 113 122 127 125 106 62 13 -21 -32 -36 -50 -68 -88 -118 -152 -174 -165 -128 -92 -79 -86 -94 -94 -92 -101 -114 -113 -83 -41 -16 -24 -56 -78 -76 -59 -42 -34 -32 -25 -5 32 74 114 139 153 161 168 179 191 193 168 120 69 41 35 39 35 21 -3 -24 -34 -25 -16 -34 -73 -100 -94 -68 -56 -63 -64 -48 -29 -26 -35 -40 -37 -31 -25 -17 -8 -8 -13 -13 -5 5 7 2 -7 -16 -16 6 45 75 71 46 34 43 48 28 2 -1 13 19 1 -20 -30 -39 -59 -71 -55 -28 -25 -50 -63 -43 -23 -51 -120 -170 -162 -120 -89 -79 -65 -43 -35 -55 -81 -76 -35 24 65 78 70 63 77 105 124 119 103 101 119 141 150 144 132 118 110 109 116 128 142 164 188 202 189 154 116 96 86 71 39 -7 -46 -71 -84 -91 -104 -129 -170 -217 -248 -250 -237 -241 -267 -283 -263 -226 -216 -244 -271 -257 -217 -199 -212 -221 -194 +1 +28 -10 -63 -129 -189 -235 -259 -267 -279 -312 -362 -400 -403 -379 -362 -369 -385 -380 -358 -348 -358 -362 -329 -266 -219 -211 -211 -169 -84 -11 2 -28 -34 18 91 121 108 97 127 181 214 218 218 232 254 264 262 264 282 305 314 305 290 279 270 250 214 176 158 158 153 127 90 69 69 65 35 -8 -39 -55 -81 -127 -173 -194 -191 -182 -181 -191 -200 -208 -209 -200 -182 -163 -146 -126 -100 -67 -37 -14 -1 2 -8 -22 -17 16 65 99 105 101 108 126 122 80 33 31 84 144 160 124 77 43 21 -3 -22 -26 -32 -62 -116 -162 -172 -147 -118 -113 -139 -178 -209 -215 -203 -183 -154 -113 -68 -41 -35 -35 -29 -24 -27 -16 27 87 120 106 78 66 70 68 63 75 105 131 138 131 130 138 146 155 164 163 +99 105 121 135 150 161 149 100 34 1 18 47 46 13 -15 -8 16 22 2 -28 -46 -47 -34 -18 -21 -46 -63 -43 0 15 -15 -54 -64 -54 -58 -75 -73 -39 -3 10 14 31 47 38 10 -8 7 37 62 80 97 113 122 127 125 106 62 13 -21 -32 -36 -50 -68 -88 -118 -152 -174 -165 -128 -92 -79 -86 -94 -94 -92 -101 -114 -113 -83 -41 -16 -24 -56 -78 -76 -59 -42 -34 -32 -25 -5 32 74 114 139 153 161 168 179 191 193 168 120 69 41 35 39 35 21 -3 -24 -34 -25 -16 -34 -73 -100 -94 -68 -56 -63 -64 -48 -29 -26 -35 -40 -37 -31 -25 -17 -8 -8 -13 -13 -5 5 7 2 -7 -16 -16 6 45 75 71 46 34 43 48 28 2 -1 13 19 1 -20 -30 -39 -59 -71 -55 -28 -25 -50 -63 -43 -23 -51 -120 -170 -162 -120 -89 -79 -65 -43 -35 -55 -81 -76 -35 24 65 78 70 63 77 105 124 119 103 101 119 141 150 144 132 118 110 109 116 128 142 164 188 202 189 154 116 96 86 71 39 -7 -46 -71 -84 -91 -104 -129 -170 -217 -248 -250 -237 -241 -267 -283 -263 -226 -216 -244 -271 -257 -217 -199 -212 -221 -194 -147 -113 -92 -60 -2 68 133 187 229 256 274 299 337 372 391 401 419 440 439 403 355 315 274 218 156 118 108 96 57 1 -42 -66 -94 -138 -185 -212 -220 -233 -257 -286 -299 -298 -300 -319 -349 -363 -351 -316 -280 -256 -241 -220 -176 -111 -37 30 85 125 148 159 175 212 262 306 330 347 378 420 448 441 405 356 316 293 287 283 257 199 127 67 28 -10 -63 -129 -189 -235 -259 -267 -279 -312 -362 -400 -403 -379 -362 -369 -385 -380 -358 -348 -358 -362 -329 -266 -219 -211 -211 -169 -84 -11 2 -28 -34 18 91 121 108 97 127 181 214 218 218 232 254 264 262 264 282 305 314 305 290 279 270 250 214 176 158 158 153 127 90 69 69 65 35 -8 -39 -55 -81 -127 -173 -194 -191 -182 -181 -191 -200 -208 +1 +138 97 61 45 47 50 44 31 23 21 17 -2 -33 -59 -72 -80 -91 -106 -117 -127 -141 -154 -159 -151 -137 -110 -72 -42 -44 -68 -79 -59 -31 -16 9 58 110 118 87 74 96 116 103 88 120 184 221 216 201 197 181 136 93 88 107 98 51 7 -3 6 -1 -25 -46 -47 -38 -34 -41 -55 -65 -63 -55 -50 -60 -72 -68 -41 -8 5 -3 -13 -9 11 30 28 -2 -42 -66 -58 -26 6 20 17 1 -30 -64 -82 -62 -19 16 20 9 7 17 20 10 -2 -13 -34 -66 -90 -92 -94 -117 -155 -174 -159 -139 -142 -159 -172 -171 -161 -152 -137 -118 -104 -95 -82 -64 -52 -45 -31 -7 12 10 -8 -16 -8 9 24 42 70 105 133 141 132 119 111 109 100 82 67 71 94 109 99 74 49 25 -10 +-71 -55 -28 -25 -50 -63 -43 -23 -51 -120 -170 -162 -120 -89 -79 -65 -43 -35 -55 -81 -76 -35 24 65 78 70 63 77 105 124 119 103 101 119 141 150 144 132 118 110 109 116 128 142 164 188 202 189 154 116 96 86 71 39 -7 -46 -71 -84 -91 -104 -129 -170 -217 -248 -250 -237 -241 -267 -283 -263 -226 -216 -244 -271 -257 -217 -199 -212 -221 -194 -147 -113 -92 -60 -2 68 133 187 229 256 274 299 337 372 391 401 419 440 439 403 355 315 274 218 156 118 108 96 57 1 -42 -66 -94 -138 -185 -212 -220 -233 -257 -286 -299 -298 -300 -319 -349 -363 -351 -316 -280 -256 -241 -220 -176 -111 -37 30 85 125 148 159 175 212 262 306 330 347 378 420 448 441 405 356 316 293 287 283 257 199 127 67 28 -10 -63 -129 -189 -235 -259 -267 -279 -312 -362 -400 -403 -379 -362 -369 -385 -380 -358 -348 -358 -362 -329 -266 -219 -211 -211 -169 -84 -11 2 -28 -34 18 91 121 108 97 127 181 214 218 218 232 254 264 262 264 282 305 314 305 290 279 270 250 214 176 158 158 153 127 90 69 69 65 35 -8 -39 -55 -81 -127 -173 -194 -191 -182 -181 -191 -200 -208 -209 -200 -182 -163 -146 -126 -100 -67 -37 -14 -1 2 -8 -22 -17 16 65 99 105 101 108 126 122 80 33 31 84 144 160 124 77 43 21 -3 -22 -26 -32 -62 -116 -162 -172 -147 -118 -113 -139 -178 -209 -215 -203 -183 -154 -113 -68 -41 -35 -35 -29 -24 -27 -16 27 87 120 106 78 66 70 68 63 75 105 131 138 131 130 138 146 155 164 163 138 97 61 45 47 50 44 31 23 21 17 -2 -33 -59 -72 -80 -91 -106 -117 -127 -141 -154 -159 -151 -137 -110 -72 -42 -44 -68 -79 -59 -31 -16 9 58 110 118 87 74 96 116 103 88 120 184 221 216 201 197 181 136 93 88 107 98 51 7 -3 6 -1 -25 -46 -47 -38 -34 -41 -55 -65 -63 -55 -50 -60 -72 -68 -41 -8 5 -3 -13 +1 +-39 -46 -33 -27 -38 -44 -26 -6 -18 -56 -76 -58 -38 -49 -79 -78 -32 13 16 -9 -12 30 89 124 125 116 125 160 198 201 163 116 100 123 149 147 127 115 118 105 65 21 7 15 15 -3 -9 7 17 -6 -42 -41 0 34 17 -26 -39 -11 14 5 -16 -9 23 47 42 24 11 -1 -21 -42 -58 -82 -123 -168 -183 -163 -125 -101 -99 -115 -138 -157 -158 -149 -148 -163 -172 -154 -109 -68 -49 -43 -45 -48 -54 -52 -32 -8 1 -8 -10 13 58 98 116 127 145 170 179 158 125 104 96 83 56 38 46 71 80 61 33 14 0 -22 -49 -59 -46 -25 -21 -36 -56 -63 -56 -45 -43 -56 -83 -117 -158 -193 -204 -189 -157 -130 -111 -83 -41 -5 10 16 35 66 89 95 104 132 170 199 223 251 +28 -10 -63 -129 -189 -235 -259 -267 -279 -312 -362 -400 -403 -379 -362 -369 -385 -380 -358 -348 -358 -362 -329 -266 -219 -211 -211 -169 -84 -11 2 -28 -34 18 91 121 108 97 127 181 214 218 218 232 254 264 262 264 282 305 314 305 290 279 270 250 214 176 158 158 153 127 90 69 69 65 35 -8 -39 -55 -81 -127 -173 -194 -191 -182 -181 -191 -200 -208 -209 -200 -182 -163 -146 -126 -100 -67 -37 -14 -1 2 -8 -22 -17 16 65 99 105 101 108 126 122 80 33 31 84 144 160 124 77 43 21 -3 -22 -26 -32 -62 -116 -162 -172 -147 -118 -113 -139 -178 -209 -215 -203 -183 -154 -113 -68 -41 -35 -35 -29 -24 -27 -16 27 87 120 106 78 66 70 68 63 75 105 131 138 131 130 138 146 155 164 163 138 97 61 45 47 50 44 31 23 21 17 -2 -33 -59 -72 -80 -91 -106 -117 -127 -141 -154 -159 -151 -137 -110 -72 -42 -44 -68 -79 -59 -31 -16 9 58 110 118 87 74 96 116 103 88 120 184 221 216 201 197 181 136 93 88 107 98 51 7 -3 6 -1 -25 -46 -47 -38 -34 -41 -55 -65 -63 -55 -50 -60 -72 -68 -41 -8 5 -3 -13 -9 11 30 28 -2 -42 -66 -58 -26 6 20 17 1 -30 -64 -82 -62 -19 16 20 9 7 17 20 10 -2 -13 -34 -66 -90 -92 -94 -117 -155 -174 -159 -139 -142 -159 -172 -171 -161 -152 -137 -118 -104 -95 -82 -64 -52 -45 -31 -7 12 10 -8 -16 -8 9 24 42 70 105 133 141 132 119 111 109 100 82 67 71 94 109 99 74 49 25 -10 -39 -46 -33 -27 -38 -44 -26 -6 -18 -56 -76 -58 -38 -49 -79 -78 -32 13 16 -9 -12 30 89 124 125 116 125 160 198 201 163 116 100 123 149 147 127 115 118 105 65 21 7 15 15 -3 -9 7 17 -6 -42 -41 0 34 17 -26 -39 -11 14 5 -16 -9 23 47 42 24 11 -1 -21 -42 -58 -82 -123 -168 -183 -163 -125 -101 -99 -115 +1 +278 285 268 247 241 247 250 244 231 208 174 133 96 74 68 68 54 12 -51 -109 -134 -140 -157 -196 -229 -229 -197 -158 -138 -140 -151 -153 -135 -103 -80 -77 -83 -78 -64 -67 -88 -100 -78 -36 1 23 42 64 74 64 58 68 79 64 41 43 75 91 66 25 9 14 7 -19 -29 -4 22 4 -47 -80 -78 -72 -94 -125 -128 -106 -91 -103 -121 -119 -98 -83 -77 -74 -65 -61 -63 -58 -34 -2 11 -6 -36 -51 -43 -24 -5 12 31 45 41 21 -2 -16 -20 -24 -35 -55 -83 -103 -97 -69 -45 -43 -42 -17 25 48 30 5 13 46 60 46 35 59 104 131 126 120 132 154 168 168 173 193 218 231 226 204 176 153 142 145 152 147 125 95 79 77 77 60 24 -6 -13 3 23 29 19 -1 +138 97 61 45 47 50 44 31 23 21 17 -2 -33 -59 -72 -80 -91 -106 -117 -127 -141 -154 -159 -151 -137 -110 -72 -42 -44 -68 -79 -59 -31 -16 9 58 110 118 87 74 96 116 103 88 120 184 221 216 201 197 181 136 93 88 107 98 51 7 -3 6 -1 -25 -46 -47 -38 -34 -41 -55 -65 -63 -55 -50 -60 -72 -68 -41 -8 5 -3 -13 -9 11 30 28 -2 -42 -66 -58 -26 6 20 17 1 -30 -64 -82 -62 -19 16 20 9 7 17 20 10 -2 -13 -34 -66 -90 -92 -94 -117 -155 -174 -159 -139 -142 -159 -172 -171 -161 -152 -137 -118 -104 -95 -82 -64 -52 -45 -31 -7 12 10 -8 -16 -8 9 24 42 70 105 133 141 132 119 111 109 100 82 67 71 94 109 99 74 49 25 -10 -39 -46 -33 -27 -38 -44 -26 -6 -18 -56 -76 -58 -38 -49 -79 -78 -32 13 16 -9 -12 30 89 124 125 116 125 160 198 201 163 116 100 123 149 147 127 115 118 105 65 21 7 15 15 -3 -9 7 17 -6 -42 -41 0 34 17 -26 -39 -11 14 5 -16 -9 23 47 42 24 11 -1 -21 -42 -58 -82 -123 -168 -183 -163 -125 -101 -99 -115 -138 -157 -158 -149 -148 -163 -172 -154 -109 -68 -49 -43 -45 -48 -54 -52 -32 -8 1 -8 -10 13 58 98 116 127 145 170 179 158 125 104 96 83 56 38 46 71 80 61 33 14 0 -22 -49 -59 -46 -25 -21 -36 -56 -63 -56 -45 -43 -56 -83 -117 -158 -193 -204 -189 -157 -130 -111 -83 -41 -5 10 16 35 66 89 95 104 132 170 199 223 251 278 285 268 247 241 247 250 244 231 208 174 133 96 74 68 68 54 12 -51 -109 -134 -140 -157 -196 -229 -229 -197 -158 -138 -140 -151 -153 -135 -103 -80 -77 -83 -78 -64 -67 -88 -100 -78 -36 1 23 42 64 74 64 58 68 79 64 41 43 75 91 66 25 9 14 7 -19 -29 -4 22 4 -47 -80 -78 -72 -94 -125 -128 -106 -91 -103 -121 -119 +1 +-15 -15 -5 -3 -19 -47 -74 -100 -128 -158 -175 -173 -169 -182 -216 -243 -244 -225 -197 -173 -153 -135 -121 -109 -98 -90 -90 -102 -119 -124 -105 -65 -23 -5 -11 -24 -20 5 37 68 96 115 117 99 76 70 85 100 97 85 81 83 79 54 13 -30 -61 -81 -86 -78 -66 -60 -70 -94 -128 -152 -152 -136 -127 -128 -115 -74 -32 -24 -49 -58 -25 16 18 -9 -14 23 63 61 29 16 38 68 78 77 94 140 194 236 255 266 282 303 317 310 281 249 225 204 180 161 164 179 176 140 90 59 43 19 -22 -63 -86 -99 -112 -122 -123 -129 -155 -192 -207 -193 -186 -206 -240 -254 -243 -235 -229 -197 -130 -64 -39 -43 -31 15 70 101 106 103 101 97 94 102 120 134 134 126 120 115 105 89 70 55 52 63 +-39 -46 -33 -27 -38 -44 -26 -6 -18 -56 -76 -58 -38 -49 -79 -78 -32 13 16 -9 -12 30 89 124 125 116 125 160 198 201 163 116 100 123 149 147 127 115 118 105 65 21 7 15 15 -3 -9 7 17 -6 -42 -41 0 34 17 -26 -39 -11 14 5 -16 -9 23 47 42 24 11 -1 -21 -42 -58 -82 -123 -168 -183 -163 -125 -101 -99 -115 -138 -157 -158 -149 -148 -163 -172 -154 -109 -68 -49 -43 -45 -48 -54 -52 -32 -8 1 -8 -10 13 58 98 116 127 145 170 179 158 125 104 96 83 56 38 46 71 80 61 33 14 0 -22 -49 -59 -46 -25 -21 -36 -56 -63 -56 -45 -43 -56 -83 -117 -158 -193 -204 -189 -157 -130 -111 -83 -41 -5 10 16 35 66 89 95 104 132 170 199 223 251 278 285 268 247 241 247 250 244 231 208 174 133 96 74 68 68 54 12 -51 -109 -134 -140 -157 -196 -229 -229 -197 -158 -138 -140 -151 -153 -135 -103 -80 -77 -83 -78 -64 -67 -88 -100 -78 -36 1 23 42 64 74 64 58 68 79 64 41 43 75 91 66 25 9 14 7 -19 -29 -4 22 4 -47 -80 -78 -72 -94 -125 -128 -106 -91 -103 -121 -119 -98 -83 -77 -74 -65 -61 -63 -58 -34 -2 11 -6 -36 -51 -43 -24 -5 12 31 45 41 21 -2 -16 -20 -24 -35 -55 -83 -103 -97 -69 -45 -43 -42 -17 25 48 30 5 13 46 60 46 35 59 104 131 126 120 132 154 168 168 173 193 218 231 226 204 176 153 142 145 152 147 125 95 79 77 77 60 24 -6 -13 3 23 29 19 -1 -15 -15 -5 -3 -19 -47 -74 -100 -128 -158 -175 -173 -169 -182 -216 -243 -244 -225 -197 -173 -153 -135 -121 -109 -98 -90 -90 -102 -119 -124 -105 -65 -23 -5 -11 -24 -20 5 37 68 96 115 117 99 76 70 85 100 97 85 81 83 79 54 13 -30 -61 -81 -86 -78 -66 -60 -70 -94 -128 -152 -152 -136 -127 -128 -115 -74 -32 -24 -49 -58 -25 16 18 -9 +1 +75 73 46 11 -7 5 29 31 -5 -61 -100 -107 -106 -132 -174 -198 -177 -129 -100 -100 -108 -105 -90 -79 -76 -78 -74 -63 -42 -26 -32 -60 -90 -98 -92 -100 -131 -161 -161 -132 -98 -88 -96 -109 -111 -94 -64 -27 4 23 33 40 47 52 49 41 30 27 34 52 77 98 108 111 116 130 143 147 146 160 198 236 241 214 194 202 219 206 168 140 144 157 145 112 85 75 72 68 59 50 29 -2 -29 -33 -23 -13 -15 -34 -68 -113 -149 -158 -141 -122 -115 -113 -105 -90 -79 -83 -98 -107 -102 -84 -68 -65 -64 -48 -14 15 22 16 15 27 39 44 48 52 51 46 53 73 87 78 63 65 72 46 -15 -64 -65 -40 -42 -77 -107 -107 -96 -114 -160 -207 -236 -246 -249 -251 -251 -255 -256 -250 -237 -224 +278 285 268 247 241 247 250 244 231 208 174 133 96 74 68 68 54 12 -51 -109 -134 -140 -157 -196 -229 -229 -197 -158 -138 -140 -151 -153 -135 -103 -80 -77 -83 -78 -64 -67 -88 -100 -78 -36 1 23 42 64 74 64 58 68 79 64 41 43 75 91 66 25 9 14 7 -19 -29 -4 22 4 -47 -80 -78 -72 -94 -125 -128 -106 -91 -103 -121 -119 -98 -83 -77 -74 -65 -61 -63 -58 -34 -2 11 -6 -36 -51 -43 -24 -5 12 31 45 41 21 -2 -16 -20 -24 -35 -55 -83 -103 -97 -69 -45 -43 -42 -17 25 48 30 5 13 46 60 46 35 59 104 131 126 120 132 154 168 168 173 193 218 231 226 204 176 153 142 145 152 147 125 95 79 77 77 60 24 -6 -13 3 23 29 19 -1 -15 -15 -5 -3 -19 -47 -74 -100 -128 -158 -175 -173 -169 -182 -216 -243 -244 -225 -197 -173 -153 -135 -121 -109 -98 -90 -90 -102 -119 -124 -105 -65 -23 -5 -11 -24 -20 5 37 68 96 115 117 99 76 70 85 100 97 85 81 83 79 54 13 -30 -61 -81 -86 -78 -66 -60 -70 -94 -128 -152 -152 -136 -127 -128 -115 -74 -32 -24 -49 -58 -25 16 18 -9 -14 23 63 61 29 16 38 68 78 77 94 140 194 236 255 266 282 303 317 310 281 249 225 204 180 161 164 179 176 140 90 59 43 19 -22 -63 -86 -99 -112 -122 -123 -129 -155 -192 -207 -193 -186 -206 -240 -254 -243 -235 -229 -197 -130 -64 -39 -43 -31 15 70 101 106 103 101 97 94 102 120 134 134 126 120 115 105 89 70 55 52 63 75 73 46 11 -7 5 29 31 -5 -61 -100 -107 -106 -132 -174 -198 -177 -129 -100 -100 -108 -105 -90 -79 -76 -78 -74 -63 -42 -26 -32 -60 -90 -98 -92 -100 -131 -161 -161 -132 -98 -88 -96 -109 -111 -94 -64 -27 4 23 33 40 47 52 49 41 30 27 34 52 77 98 108 111 116 130 143 147 146 160 198 236 241 214 194 202 219 206 168 140 +1 +-210 -186 -154 -134 -129 -130 -110 -65 -12 28 51 66 83 107 137 167 191 211 230 247 258 251 234 218 208 208 218 239 259 260 235 209 207 223 223 189 144 121 128 142 141 124 105 87 67 35 -6 -52 -88 -106 -112 -119 -132 -141 -129 -111 -109 -132 -159 -167 -162 -164 -181 -202 -204 -183 -157 -144 -144 -134 -110 -85 -82 -100 -113 -93 -54 -24 -12 1 27 52 56 50 52 57 52 32 16 17 26 27 28 42 67 81 75 65 64 59 42 21 17 23 14 -20 -50 -51 -30 -13 -15 -26 -30 -28 -32 -41 -47 -49 -53 -63 -73 -73 -70 -71 -88 -109 -123 -124 -106 -73 -40 -20 -15 -16 -8 16 40 55 66 83 105 115 114 111 122 138 149 149 144 138 117 81 48 40 61 88 97 89 82 85 +-15 -15 -5 -3 -19 -47 -74 -100 -128 -158 -175 -173 -169 -182 -216 -243 -244 -225 -197 -173 -153 -135 -121 -109 -98 -90 -90 -102 -119 -124 -105 -65 -23 -5 -11 -24 -20 5 37 68 96 115 117 99 76 70 85 100 97 85 81 83 79 54 13 -30 -61 -81 -86 -78 -66 -60 -70 -94 -128 -152 -152 -136 -127 -128 -115 -74 -32 -24 -49 -58 -25 16 18 -9 -14 23 63 61 29 16 38 68 78 77 94 140 194 236 255 266 282 303 317 310 281 249 225 204 180 161 164 179 176 140 90 59 43 19 -22 -63 -86 -99 -112 -122 -123 -129 -155 -192 -207 -193 -186 -206 -240 -254 -243 -235 -229 -197 -130 -64 -39 -43 -31 15 70 101 106 103 101 97 94 102 120 134 134 126 120 115 105 89 70 55 52 63 75 73 46 11 -7 5 29 31 -5 -61 -100 -107 -106 -132 -174 -198 -177 -129 -100 -100 -108 -105 -90 -79 -76 -78 -74 -63 -42 -26 -32 -60 -90 -98 -92 -100 -131 -161 -161 -132 -98 -88 -96 -109 -111 -94 -64 -27 4 23 33 40 47 52 49 41 30 27 34 52 77 98 108 111 116 130 143 147 146 160 198 236 241 214 194 202 219 206 168 140 144 157 145 112 85 75 72 68 59 50 29 -2 -29 -33 -23 -13 -15 -34 -68 -113 -149 -158 -141 -122 -115 -113 -105 -90 -79 -83 -98 -107 -102 -84 -68 -65 -64 -48 -14 15 22 16 15 27 39 44 48 52 51 46 53 73 87 78 63 65 72 46 -15 -64 -65 -40 -42 -77 -107 -107 -96 -114 -160 -207 -236 -246 -249 -251 -251 -255 -256 -250 -237 -224 -210 -186 -154 -134 -129 -130 -110 -65 -12 28 51 66 83 107 137 167 191 211 230 247 258 251 234 218 208 208 218 239 259 260 235 209 207 223 223 189 144 121 128 142 141 124 105 87 67 35 -6 -52 -88 -106 -112 -119 -132 -141 -129 -111 -109 -132 -159 -167 -162 -164 -181 -202 -204 -183 -157 -144 -144 -134 -110 -85 -82 -100 -113 -93 -54 -24 -12 1 +1 +86 66 23 -21 -41 -32 -14 -12 -35 -64 -73 -57 -41 -45 -70 -85 -65 -18 20 20 -8 -30 -28 -9 3 0 -10 -13 -7 4 10 0 -23 -45 -53 -43 -27 -5 20 40 42 30 26 47 84 106 103 95 97 103 103 96 92 89 78 58 44 38 24 -9 -42 -49 -28 -8 -8 -19 -18 0 14 11 -3 -12 -11 -2 7 12 9 1 -7 -12 -19 -36 -55 -66 -63 -56 -53 -51 -46 -39 -40 -52 -78 -108 -139 -156 -150 -127 -105 -99 -98 -85 -73 -79 -94 -86 -40 9 17 -16 -46 -35 7 37 33 14 6 15 29 35 32 31 34 36 31 23 20 26 28 18 0 -11 -10 -8 -17 -33 -42 -35 -17 2 11 5 -6 -8 6 26 37 35 31 31 30 19 -5 -35 -58 -55 -25 15 31 10 +75 73 46 11 -7 5 29 31 -5 -61 -100 -107 -106 -132 -174 -198 -177 -129 -100 -100 -108 -105 -90 -79 -76 -78 -74 -63 -42 -26 -32 -60 -90 -98 -92 -100 -131 -161 -161 -132 -98 -88 -96 -109 -111 -94 -64 -27 4 23 33 40 47 52 49 41 30 27 34 52 77 98 108 111 116 130 143 147 146 160 198 236 241 214 194 202 219 206 168 140 144 157 145 112 85 75 72 68 59 50 29 -2 -29 -33 -23 -13 -15 -34 -68 -113 -149 -158 -141 -122 -115 -113 -105 -90 -79 -83 -98 -107 -102 -84 -68 -65 -64 -48 -14 15 22 16 15 27 39 44 48 52 51 46 53 73 87 78 63 65 72 46 -15 -64 -65 -40 -42 -77 -107 -107 -96 -114 -160 -207 -236 -246 -249 -251 -251 -255 -256 -250 -237 -224 -210 -186 -154 -134 -129 -130 -110 -65 -12 28 51 66 83 107 137 167 191 211 230 247 258 251 234 218 208 208 218 239 259 260 235 209 207 223 223 189 144 121 128 142 141 124 105 87 67 35 -6 -52 -88 -106 -112 -119 -132 -141 -129 -111 -109 -132 -159 -167 -162 -164 -181 -202 -204 -183 -157 -144 -144 -134 -110 -85 -82 -100 -113 -93 -54 -24 -12 1 27 52 56 50 52 57 52 32 16 17 26 27 28 42 67 81 75 65 64 59 42 21 17 23 14 -20 -50 -51 -30 -13 -15 -26 -30 -28 -32 -41 -47 -49 -53 -63 -73 -73 -70 -71 -88 -109 -123 -124 -106 -73 -40 -20 -15 -16 -8 16 40 55 66 83 105 115 114 111 122 138 149 149 144 138 117 81 48 40 61 88 97 89 82 85 86 66 23 -21 -41 -32 -14 -12 -35 -64 -73 -57 -41 -45 -70 -85 -65 -18 20 20 -8 -30 -28 -9 3 0 -10 -13 -7 4 10 0 -23 -45 -53 -43 -27 -5 20 40 42 30 26 47 84 106 103 95 97 103 103 96 92 89 78 58 44 38 24 -9 -42 -49 -28 -8 -8 -19 -18 0 14 11 -3 -12 -11 -2 7 12 9 1 -7 -12 +1 +-22 -30 -5 20 16 -10 -26 -14 13 31 27 5 -23 -36 -35 -35 -49 -66 -59 -29 -8 -21 -49 -58 -44 -35 -40 -43 -26 -1 13 19 28 38 38 32 40 67 89 87 74 79 105 124 119 108 115 132 142 140 136 135 121 87 45 18 5 -3 0 23 56 71 51 13 -9 -3 6 2 -12 -18 -2 26 42 33 8 -11 -14 -14 -22 -37 -48 -48 -47 -44 -29 2 26 19 -10 -32 -19 20 43 27 -22 -70 -79 -47 -11 -13 -47 -77 -77 -67 -73 -93 -107 -110 -121 -141 -149 -129 -89 -59 -55 -71 -86 -78 -42 -1 13 -4 -16 7 52 78 73 56 52 62 71 73 72 66 57 49 35 13 -10 -8 24 55 62 55 69 96 106 87 75 97 136 155 144 133 135 142 136 120 111 105 +-210 -186 -154 -134 -129 -130 -110 -65 -12 28 51 66 83 107 137 167 191 211 230 247 258 251 234 218 208 208 218 239 259 260 235 209 207 223 223 189 144 121 128 142 141 124 105 87 67 35 -6 -52 -88 -106 -112 -119 -132 -141 -129 -111 -109 -132 -159 -167 -162 -164 -181 -202 -204 -183 -157 -144 -144 -134 -110 -85 -82 -100 -113 -93 -54 -24 -12 1 27 52 56 50 52 57 52 32 16 17 26 27 28 42 67 81 75 65 64 59 42 21 17 23 14 -20 -50 -51 -30 -13 -15 -26 -30 -28 -32 -41 -47 -49 -53 -63 -73 -73 -70 -71 -88 -109 -123 -124 -106 -73 -40 -20 -15 -16 -8 16 40 55 66 83 105 115 114 111 122 138 149 149 144 138 117 81 48 40 61 88 97 89 82 85 86 66 23 -21 -41 -32 -14 -12 -35 -64 -73 -57 -41 -45 -70 -85 -65 -18 20 20 -8 -30 -28 -9 3 0 -10 -13 -7 4 10 0 -23 -45 -53 -43 -27 -5 20 40 42 30 26 47 84 106 103 95 97 103 103 96 92 89 78 58 44 38 24 -9 -42 -49 -28 -8 -8 -19 -18 0 14 11 -3 -12 -11 -2 7 12 9 1 -7 -12 -19 -36 -55 -66 -63 -56 -53 -51 -46 -39 -40 -52 -78 -108 -139 -156 -150 -127 -105 -99 -98 -85 -73 -79 -94 -86 -40 9 17 -16 -46 -35 7 37 33 14 6 15 29 35 32 31 34 36 31 23 20 26 28 18 0 -11 -10 -8 -17 -33 -42 -35 -17 2 11 5 -6 -8 6 26 37 35 31 31 30 19 -5 -35 -58 -55 -25 15 31 10 -22 -30 -5 20 16 -10 -26 -14 13 31 27 5 -23 -36 -35 -35 -49 -66 -59 -29 -8 -21 -49 -58 -44 -35 -40 -43 -26 -1 13 19 28 38 38 32 40 67 89 87 74 79 105 124 119 108 115 132 142 140 136 135 121 87 45 18 5 -3 0 23 56 71 51 13 -9 -3 6 2 -12 -18 -2 26 42 33 8 -11 -14 -14 -22 -37 +1 +97 79 51 24 1 -16 -29 -38 -38 -25 -14 -23 -53 -81 -91 -96 -110 -123 -116 -85 -60 -57 -69 -77 -81 -85 -92 -99 -109 -118 -112 -79 -33 -7 -11 -25 -24 -3 27 56 82 97 98 97 107 130 143 127 101 82 73 56 27 2 -4 -10 -36 -82 -118 -133 -137 -149 -159 -152 -135 -141 -183 -231 -240 -206 -168 -158 -166 -166 -153 -141 -142 -145 -129 -85 -34 3 19 34 54 74 82 78 79 95 110 107 89 78 87 105 114 108 96 78 62 54 64 71 47 -5 -49 -60 -52 -57 -80 -99 -101 -105 -127 -151 -152 -129 -109 -107 -118 -123 -122 -119 -114 -101 -76 -40 10 67 116 133 131 133 156 186 199 204 223 252 262 247 235 250 274 272 247 229 233 234 205 161 140 136 119 71 21 -1 -3 -9 +86 66 23 -21 -41 -32 -14 -12 -35 -64 -73 -57 -41 -45 -70 -85 -65 -18 20 20 -8 -30 -28 -9 3 0 -10 -13 -7 4 10 0 -23 -45 -53 -43 -27 -5 20 40 42 30 26 47 84 106 103 95 97 103 103 96 92 89 78 58 44 38 24 -9 -42 -49 -28 -8 -8 -19 -18 0 14 11 -3 -12 -11 -2 7 12 9 1 -7 -12 -19 -36 -55 -66 -63 -56 -53 -51 -46 -39 -40 -52 -78 -108 -139 -156 -150 -127 -105 -99 -98 -85 -73 -79 -94 -86 -40 9 17 -16 -46 -35 7 37 33 14 6 15 29 35 32 31 34 36 31 23 20 26 28 18 0 -11 -10 -8 -17 -33 -42 -35 -17 2 11 5 -6 -8 6 26 37 35 31 31 30 19 -5 -35 -58 -55 -25 15 31 10 -22 -30 -5 20 16 -10 -26 -14 13 31 27 5 -23 -36 -35 -35 -49 -66 -59 -29 -8 -21 -49 -58 -44 -35 -40 -43 -26 -1 13 19 28 38 38 32 40 67 89 87 74 79 105 124 119 108 115 132 142 140 136 135 121 87 45 18 5 -3 0 23 56 71 51 13 -9 -3 6 2 -12 -18 -2 26 42 33 8 -11 -14 -14 -22 -37 -48 -48 -47 -44 -29 2 26 19 -10 -32 -19 20 43 27 -22 -70 -79 -47 -11 -13 -47 -77 -77 -67 -73 -93 -107 -110 -121 -141 -149 -129 -89 -59 -55 -71 -86 -78 -42 -1 13 -4 -16 7 52 78 73 56 52 62 71 73 72 66 57 49 35 13 -10 -8 24 55 62 55 69 96 106 87 75 97 136 155 144 133 135 142 136 120 111 105 97 79 51 24 1 -16 -29 -38 -38 -25 -14 -23 -53 -81 -91 -96 -110 -123 -116 -85 -60 -57 -69 -77 -81 -85 -92 -99 -109 -118 -112 -79 -33 -7 -11 -25 -24 -3 27 56 82 97 98 97 107 130 143 127 101 82 73 56 27 2 -4 -10 -36 -82 -118 -133 -137 -149 -159 -152 -135 -141 -183 -231 -240 -206 -168 -158 -166 -166 -153 -141 -142 -145 -129 -85 +1 +-35 -71 -108 -141 -176 -210 -250 -284 -295 -270 -220 -176 -163 -175 -190 -193 -181 -155 -120 -91 -77 -74 -68 -50 -19 23 65 90 88 76 81 110 142 151 139 125 116 111 107 113 125 128 118 97 71 48 34 33 42 47 37 12 -16 -39 -54 -63 -76 -99 -122 -130 -120 -114 -126 -141 -127 -89 -59 -57 -63 -48 -13 20 39 56 73 80 81 92 122 146 137 102 77 81 100 110 101 89 82 87 91 74 34 -2 -7 14 28 16 -6 -12 -2 7 6 -10 -32 -60 -78 -74 -59 -54 -68 -78 -60 -23 6 4 -25 -63 -83 -72 -39 -12 -8 -15 -14 -1 13 28 53 80 83 51 20 26 61 84 63 15 -23 -30 -17 -7 -15 -36 -55 -55 -37 -4 33 54 33 -27 -80 -88 -52 -20 -27 -56 -69 -56 +-22 -30 -5 20 16 -10 -26 -14 13 31 27 5 -23 -36 -35 -35 -49 -66 -59 -29 -8 -21 -49 -58 -44 -35 -40 -43 -26 -1 13 19 28 38 38 32 40 67 89 87 74 79 105 124 119 108 115 132 142 140 136 135 121 87 45 18 5 -3 0 23 56 71 51 13 -9 -3 6 2 -12 -18 -2 26 42 33 8 -11 -14 -14 -22 -37 -48 -48 -47 -44 -29 2 26 19 -10 -32 -19 20 43 27 -22 -70 -79 -47 -11 -13 -47 -77 -77 -67 -73 -93 -107 -110 -121 -141 -149 -129 -89 -59 -55 -71 -86 -78 -42 -1 13 -4 -16 7 52 78 73 56 52 62 71 73 72 66 57 49 35 13 -10 -8 24 55 62 55 69 96 106 87 75 97 136 155 144 133 135 142 136 120 111 105 97 79 51 24 1 -16 -29 -38 -38 -25 -14 -23 -53 -81 -91 -96 -110 -123 -116 -85 -60 -57 -69 -77 -81 -85 -92 -99 -109 -118 -112 -79 -33 -7 -11 -25 -24 -3 27 56 82 97 98 97 107 130 143 127 101 82 73 56 27 2 -4 -10 -36 -82 -118 -133 -137 -149 -159 -152 -135 -141 -183 -231 -240 -206 -168 -158 -166 -166 -153 -141 -142 -145 -129 -85 -34 3 19 34 54 74 82 78 79 95 110 107 89 78 87 105 114 108 96 78 62 54 64 71 47 -5 -49 -60 -52 -57 -80 -99 -101 -105 -127 -151 -152 -129 -109 -107 -118 -123 -122 -119 -114 -101 -76 -40 10 67 116 133 131 133 156 186 199 204 223 252 262 247 235 250 274 272 247 229 233 234 205 161 140 136 119 71 21 -1 -3 -9 -35 -71 -108 -141 -176 -210 -250 -284 -295 -270 -220 -176 -163 -175 -190 -193 -181 -155 -120 -91 -77 -74 -68 -50 -19 23 65 90 88 76 81 110 142 151 139 125 116 111 107 113 125 128 118 97 71 48 34 33 42 47 37 12 -16 -39 -54 -63 -76 -99 -122 -130 -120 -114 -126 -141 -127 -89 -59 -57 -63 -48 -13 20 39 56 73 80 81 92 122 146 +1 +-41 -46 -70 -100 -123 -130 -121 -107 -110 -133 -161 -183 -191 -190 -184 -183 -200 -228 -225 -172 -96 -54 -63 -86 -78 -33 19 56 78 99 123 147 155 136 100 72 73 93 108 109 101 89 71 54 53 70 96 115 127 134 133 124 110 99 78 38 -8 -32 -24 4 28 46 60 70 64 39 -1 -43 -74 -71 -34 12 21 -22 -77 -92 -60 -22 -23 -56 -84 -83 -63 -47 -43 -32 -1 37 60 58 45 42 51 65 65 57 50 52 55 55 50 42 40 47 65 78 67 30 -3 -2 27 45 33 14 15 28 33 36 52 70 61 29 17 43 66 47 2 -15 8 28 14 -2 9 33 26 -12 -33 -11 29 38 5 -43 -69 -54 -20 -2 -13 -31 -18 26 62 58 24 -11 -23 -21 -18 -22 -32 -42 +97 79 51 24 1 -16 -29 -38 -38 -25 -14 -23 -53 -81 -91 -96 -110 -123 -116 -85 -60 -57 -69 -77 -81 -85 -92 -99 -109 -118 -112 -79 -33 -7 -11 -25 -24 -3 27 56 82 97 98 97 107 130 143 127 101 82 73 56 27 2 -4 -10 -36 -82 -118 -133 -137 -149 -159 -152 -135 -141 -183 -231 -240 -206 -168 -158 -166 -166 -153 -141 -142 -145 -129 -85 -34 3 19 34 54 74 82 78 79 95 110 107 89 78 87 105 114 108 96 78 62 54 64 71 47 -5 -49 -60 -52 -57 -80 -99 -101 -105 -127 -151 -152 -129 -109 -107 -118 -123 -122 -119 -114 -101 -76 -40 10 67 116 133 131 133 156 186 199 204 223 252 262 247 235 250 274 272 247 229 233 234 205 161 140 136 119 71 21 -1 -3 -9 -35 -71 -108 -141 -176 -210 -250 -284 -295 -270 -220 -176 -163 -175 -190 -193 -181 -155 -120 -91 -77 -74 -68 -50 -19 23 65 90 88 76 81 110 142 151 139 125 116 111 107 113 125 128 118 97 71 48 34 33 42 47 37 12 -16 -39 -54 -63 -76 -99 -122 -130 -120 -114 -126 -141 -127 -89 -59 -57 -63 -48 -13 20 39 56 73 80 81 92 122 146 137 102 77 81 100 110 101 89 82 87 91 74 34 -2 -7 14 28 16 -6 -12 -2 7 6 -10 -32 -60 -78 -74 -59 -54 -68 -78 -60 -23 6 4 -25 -63 -83 -72 -39 -12 -8 -15 -14 -1 13 28 53 80 83 51 20 26 61 84 63 15 -23 -30 -17 -7 -15 -36 -55 -55 -37 -4 33 54 33 -27 -80 -88 -52 -20 -27 -56 -69 -56 -41 -46 -70 -100 -123 -130 -121 -107 -110 -133 -161 -183 -191 -190 -184 -183 -200 -228 -225 -172 -96 -54 -63 -86 -78 -33 19 56 78 99 123 147 155 136 100 72 73 93 108 109 101 89 71 54 53 70 96 115 127 134 133 124 110 99 78 38 -8 -32 -24 4 28 46 60 70 64 39 -1 -43 -74 -71 -34 12 21 -22 -77 -92 -60 -22 -23 -56 +1 +-45 -41 -33 -37 -66 -110 -151 -173 -170 -152 -138 -143 -163 -172 -154 -119 -84 -48 -3 39 58 52 47 70 109 130 113 82 70 95 134 155 146 128 121 129 142 145 134 109 83 65 64 72 67 42 11 -11 -18 -20 -29 -48 -75 -111 -143 -155 -145 -126 -121 -131 -146 -151 -135 -108 -86 -82 -94 -99 -77 -41 -20 -26 -47 -55 -38 -6 25 37 37 38 45 47 38 30 42 69 81 69 64 90 128 132 100 73 79 97 92 65 39 23 8 -1 5 9 -14 -66 -103 -96 -67 -62 -81 -91 -79 -69 -89 -129 -146 -119 -60 -13 -12 -46 -66 -38 15 39 16 -10 13 71 109 100 71 56 53 45 32 27 33 29 3 -32 -59 -64 -48 -17 8 10 -15 -46 -63 -66 -66 -68 -68 -64 -59 -60 -64 -70 +-35 -71 -108 -141 -176 -210 -250 -284 -295 -270 -220 -176 -163 -175 -190 -193 -181 -155 -120 -91 -77 -74 -68 -50 -19 23 65 90 88 76 81 110 142 151 139 125 116 111 107 113 125 128 118 97 71 48 34 33 42 47 37 12 -16 -39 -54 -63 -76 -99 -122 -130 -120 -114 -126 -141 -127 -89 -59 -57 -63 -48 -13 20 39 56 73 80 81 92 122 146 137 102 77 81 100 110 101 89 82 87 91 74 34 -2 -7 14 28 16 -6 -12 -2 7 6 -10 -32 -60 -78 -74 -59 -54 -68 -78 -60 -23 6 4 -25 -63 -83 -72 -39 -12 -8 -15 -14 -1 13 28 53 80 83 51 20 26 61 84 63 15 -23 -30 -17 -7 -15 -36 -55 -55 -37 -4 33 54 33 -27 -80 -88 -52 -20 -27 -56 -69 -56 -41 -46 -70 -100 -123 -130 -121 -107 -110 -133 -161 -183 -191 -190 -184 -183 -200 -228 -225 -172 -96 -54 -63 -86 -78 -33 19 56 78 99 123 147 155 136 100 72 73 93 108 109 101 89 71 54 53 70 96 115 127 134 133 124 110 99 78 38 -8 -32 -24 4 28 46 60 70 64 39 -1 -43 -74 -71 -34 12 21 -22 -77 -92 -60 -22 -23 -56 -84 -83 -63 -47 -43 -32 -1 37 60 58 45 42 51 65 65 57 50 52 55 55 50 42 40 47 65 78 67 30 -3 -2 27 45 33 14 15 28 33 36 52 70 61 29 17 43 66 47 2 -15 8 28 14 -2 9 33 26 -12 -33 -11 29 38 5 -43 -69 -54 -20 -2 -13 -31 -18 26 62 58 24 -11 -23 -21 -18 -22 -32 -42 -45 -41 -33 -37 -66 -110 -151 -173 -170 -152 -138 -143 -163 -172 -154 -119 -84 -48 -3 39 58 52 47 70 109 130 113 82 70 95 134 155 146 128 121 129 142 145 134 109 83 65 64 72 67 42 11 -11 -18 -20 -29 -48 -75 -111 -143 -155 -145 -126 -121 -131 -146 -151 -135 -108 -86 -82 -94 -99 -77 -41 -20 -26 -47 -55 -38 -6 25 37 37 38 +1 +-74 -65 -40 -11 5 -4 -26 -42 -39 -14 19 44 49 41 40 51 64 67 62 60 65 67 61 63 81 107 122 120 107 95 83 69 53 41 39 49 59 62 54 36 15 -5 -19 -28 -39 -58 -86 -119 -153 -182 -201 -198 -175 -151 -149 -177 -217 -240 -223 -163 -89 -43 -40 -47 -25 36 92 106 84 64 74 116 165 198 207 195 190 209 247 272 267 238 207 191 193 210 228 219 177 125 93 83 70 43 12 -10 -33 -63 -90 -92 -76 -59 -64 -78 -86 -80 -70 -73 -93 -110 -104 -71 -48 -61 -97 -115 -97 -58 -25 -2 10 4 -29 -68 -74 -27 37 69 48 12 6 37 67 59 31 10 12 13 -13 -60 -102 -123 -130 -136 -149 -169 -186 -191 -180 -159 -145 -132 -112 -81 -56 -49 -56 -54 -36 -7 +-41 -46 -70 -100 -123 -130 -121 -107 -110 -133 -161 -183 -191 -190 -184 -183 -200 -228 -225 -172 -96 -54 -63 -86 -78 -33 19 56 78 99 123 147 155 136 100 72 73 93 108 109 101 89 71 54 53 70 96 115 127 134 133 124 110 99 78 38 -8 -32 -24 4 28 46 60 70 64 39 -1 -43 -74 -71 -34 12 21 -22 -77 -92 -60 -22 -23 -56 -84 -83 -63 -47 -43 -32 -1 37 60 58 45 42 51 65 65 57 50 52 55 55 50 42 40 47 65 78 67 30 -3 -2 27 45 33 14 15 28 33 36 52 70 61 29 17 43 66 47 2 -15 8 28 14 -2 9 33 26 -12 -33 -11 29 38 5 -43 -69 -54 -20 -2 -13 -31 -18 26 62 58 24 -11 -23 -21 -18 -22 -32 -42 -45 -41 -33 -37 -66 -110 -151 -173 -170 -152 -138 -143 -163 -172 -154 -119 -84 -48 -3 39 58 52 47 70 109 130 113 82 70 95 134 155 146 128 121 129 142 145 134 109 83 65 64 72 67 42 11 -11 -18 -20 -29 -48 -75 -111 -143 -155 -145 -126 -121 -131 -146 -151 -135 -108 -86 -82 -94 -99 -77 -41 -20 -26 -47 -55 -38 -6 25 37 37 38 45 47 38 30 42 69 81 69 64 90 128 132 100 73 79 97 92 65 39 23 8 -1 5 9 -14 -66 -103 -96 -67 -62 -81 -91 -79 -69 -89 -129 -146 -119 -60 -13 -12 -46 -66 -38 15 39 16 -10 13 71 109 100 71 56 53 45 32 27 33 29 3 -32 -59 -64 -48 -17 8 10 -15 -46 -63 -66 -66 -68 -68 -64 -59 -60 -64 -70 -74 -65 -40 -11 5 -4 -26 -42 -39 -14 19 44 49 41 40 51 64 67 62 60 65 67 61 63 81 107 122 120 107 95 83 69 53 41 39 49 59 62 54 36 15 -5 -19 -28 -39 -58 -86 -119 -153 -182 -201 -198 -175 -151 -149 -177 -217 -240 -223 -163 -89 -43 -40 -47 -25 36 92 106 84 64 74 116 165 198 207 195 190 209 247 272 +1 +20 42 51 47 38 36 47 58 64 68 70 67 61 64 78 83 60 25 14 41 79 96 87 76 74 82 87 93 103 115 119 115 108 110 118 122 108 77 46 29 18 -2 -35 -61 -67 -70 -99 -144 -174 -163 -133 -119 -129 -143 -144 -136 -137 -148 -150 -137 -121 -115 -117 -112 -90 -57 -32 -27 -37 -43 -31 -7 12 20 32 61 95 101 67 28 22 53 91 110 114 122 136 143 133 122 124 139 152 142 110 68 33 13 11 25 45 51 29 -17 -59 -70 -49 -18 -3 -9 -16 -15 -13 -27 -56 -83 -86 -66 -46 -42 -51 -58 -55 -52 -49 -45 -35 -31 -37 -44 -46 -39 -33 -33 -47 -74 -111 -138 -135 -112 -97 -111 -133 -133 -111 -97 -103 -107 -95 -66 -29 10 48 65 58 58 90 138 164 162 +-45 -41 -33 -37 -66 -110 -151 -173 -170 -152 -138 -143 -163 -172 -154 -119 -84 -48 -3 39 58 52 47 70 109 130 113 82 70 95 134 155 146 128 121 129 142 145 134 109 83 65 64 72 67 42 11 -11 -18 -20 -29 -48 -75 -111 -143 -155 -145 -126 -121 -131 -146 -151 -135 -108 -86 -82 -94 -99 -77 -41 -20 -26 -47 -55 -38 -6 25 37 37 38 45 47 38 30 42 69 81 69 64 90 128 132 100 73 79 97 92 65 39 23 8 -1 5 9 -14 -66 -103 -96 -67 -62 -81 -91 -79 -69 -89 -129 -146 -119 -60 -13 -12 -46 -66 -38 15 39 16 -10 13 71 109 100 71 56 53 45 32 27 33 29 3 -32 -59 -64 -48 -17 8 10 -15 -46 -63 -66 -66 -68 -68 -64 -59 -60 -64 -70 -74 -65 -40 -11 5 -4 -26 -42 -39 -14 19 44 49 41 40 51 64 67 62 60 65 67 61 63 81 107 122 120 107 95 83 69 53 41 39 49 59 62 54 36 15 -5 -19 -28 -39 -58 -86 -119 -153 -182 -201 -198 -175 -151 -149 -177 -217 -240 -223 -163 -89 -43 -40 -47 -25 36 92 106 84 64 74 116 165 198 207 195 190 209 247 272 267 238 207 191 193 210 228 219 177 125 93 83 70 43 12 -10 -33 -63 -90 -92 -76 -59 -64 -78 -86 -80 -70 -73 -93 -110 -104 -71 -48 -61 -97 -115 -97 -58 -25 -2 10 4 -29 -68 -74 -27 37 69 48 12 6 37 67 59 31 10 12 13 -13 -60 -102 -123 -130 -136 -149 -169 -186 -191 -180 -159 -145 -132 -112 -81 -56 -49 -56 -54 -36 -7 20 42 51 47 38 36 47 58 64 68 70 67 61 64 78 83 60 25 14 41 79 96 87 76 74 82 87 93 103 115 119 115 108 110 118 122 108 77 46 29 18 -2 -35 -61 -67 -70 -99 -144 -174 -163 -133 -119 -129 -143 -144 -136 -137 -148 -150 -137 -121 -115 -117 -112 -90 -57 -32 -27 -37 -43 -31 -7 12 20 32 61 95 101 67 28 +1 +164 192 209 180 124 89 93 108 110 108 109 101 78 62 75 105 121 117 107 91 62 37 46 77 76 23 -17 10 61 49 -27 -74 -35 21 7 -61 -92 -61 -32 -50 -76 -62 -33 -42 -83 -95 -59 -25 -36 -71 -86 -73 -62 -68 -67 -43 -8 7 1 -5 8 36 55 43 15 9 37 68 62 32 21 49 75 56 5 -31 -35 -39 -68 -88 -63 0 42 23 -40 -98 -123 -121 -112 -100 -81 -55 -42 -53 -77 -82 -58 -25 -12 -19 -12 20 59 64 35 3 -7 2 12 11 10 12 21 28 24 7 -10 -5 27 60 65 46 32 39 53 54 42 32 31 27 22 26 40 49 31 6 -3 8 15 6 1 17 42 43 19 -1 9 33 43 34 19 11 10 6 3 -4 -20 -41 -54 -52 +-74 -65 -40 -11 5 -4 -26 -42 -39 -14 19 44 49 41 40 51 64 67 62 60 65 67 61 63 81 107 122 120 107 95 83 69 53 41 39 49 59 62 54 36 15 -5 -19 -28 -39 -58 -86 -119 -153 -182 -201 -198 -175 -151 -149 -177 -217 -240 -223 -163 -89 -43 -40 -47 -25 36 92 106 84 64 74 116 165 198 207 195 190 209 247 272 267 238 207 191 193 210 228 219 177 125 93 83 70 43 12 -10 -33 -63 -90 -92 -76 -59 -64 -78 -86 -80 -70 -73 -93 -110 -104 -71 -48 -61 -97 -115 -97 -58 -25 -2 10 4 -29 -68 -74 -27 37 69 48 12 6 37 67 59 31 10 12 13 -13 -60 -102 -123 -130 -136 -149 -169 -186 -191 -180 -159 -145 -132 -112 -81 -56 -49 -56 -54 -36 -7 20 42 51 47 38 36 47 58 64 68 70 67 61 64 78 83 60 25 14 41 79 96 87 76 74 82 87 93 103 115 119 115 108 110 118 122 108 77 46 29 18 -2 -35 -61 -67 -70 -99 -144 -174 -163 -133 -119 -129 -143 -144 -136 -137 -148 -150 -137 -121 -115 -117 -112 -90 -57 -32 -27 -37 -43 -31 -7 12 20 32 61 95 101 67 28 22 53 91 110 114 122 136 143 133 122 124 139 152 142 110 68 33 13 11 25 45 51 29 -17 -59 -70 -49 -18 -3 -9 -16 -15 -13 -27 -56 -83 -86 -66 -46 -42 -51 -58 -55 -52 -49 -45 -35 -31 -37 -44 -46 -39 -33 -33 -47 -74 -111 -138 -135 -112 -97 -111 -133 -133 -111 -97 -103 -107 -95 -66 -29 10 48 65 58 58 90 138 164 162 164 192 209 180 124 89 93 108 110 108 109 101 78 62 75 105 121 117 107 91 62 37 46 77 76 23 -17 10 61 49 -27 -74 -35 21 7 -61 -92 -61 -32 -50 -76 -62 -33 -42 -83 -95 -59 -25 -36 -71 -86 -73 -62 -68 -67 -43 -8 7 1 -5 8 36 55 43 15 9 37 68 62 32 21 49 75 56 5 -31 -35 -39 -68 -88 +1 +-43 -41 -50 -57 -56 -54 -58 -72 -82 -86 -81 -77 -77 -73 -59 -46 -44 -53 -55 -46 -35 -38 -43 -44 -48 -63 -75 -54 -5 31 27 10 23 57 65 40 22 53 104 124 105 90 104 124 121 98 77 68 65 61 56 39 10 -22 -35 -24 0 12 4 -20 -49 -74 -88 -94 -95 -94 -95 -100 -107 -108 -97 -78 -62 -55 -45 -25 1 21 35 50 68 87 105 124 142 140 112 74 49 51 65 70 60 44 38 41 40 29 20 25 30 11 -28 -47 -21 21 21 -25 -63 -48 -7 -4 -53 -99 -95 -61 -46 -59 -70 -53 -28 -20 -25 -15 11 28 22 8 21 64 113 143 144 129 115 110 107 98 84 76 78 81 72 53 44 47 46 29 5 -5 -1 -4 -20 -40 -41 -27 -17 -18 -22 -23 +20 42 51 47 38 36 47 58 64 68 70 67 61 64 78 83 60 25 14 41 79 96 87 76 74 82 87 93 103 115 119 115 108 110 118 122 108 77 46 29 18 -2 -35 -61 -67 -70 -99 -144 -174 -163 -133 -119 -129 -143 -144 -136 -137 -148 -150 -137 -121 -115 -117 -112 -90 -57 -32 -27 -37 -43 -31 -7 12 20 32 61 95 101 67 28 22 53 91 110 114 122 136 143 133 122 124 139 152 142 110 68 33 13 11 25 45 51 29 -17 -59 -70 -49 -18 -3 -9 -16 -15 -13 -27 -56 -83 -86 -66 -46 -42 -51 -58 -55 -52 -49 -45 -35 -31 -37 -44 -46 -39 -33 -33 -47 -74 -111 -138 -135 -112 -97 -111 -133 -133 -111 -97 -103 -107 -95 -66 -29 10 48 65 58 58 90 138 164 162 164 192 209 180 124 89 93 108 110 108 109 101 78 62 75 105 121 117 107 91 62 37 46 77 76 23 -17 10 61 49 -27 -74 -35 21 7 -61 -92 -61 -32 -50 -76 -62 -33 -42 -83 -95 -59 -25 -36 -71 -86 -73 -62 -68 -67 -43 -8 7 1 -5 8 36 55 43 15 9 37 68 62 32 21 49 75 56 5 -31 -35 -39 -68 -88 -63 0 42 23 -40 -98 -123 -121 -112 -100 -81 -55 -42 -53 -77 -82 -58 -25 -12 -19 -12 20 59 64 35 3 -7 2 12 11 10 12 21 28 24 7 -10 -5 27 60 65 46 32 39 53 54 42 32 31 27 22 26 40 49 31 6 -3 8 15 6 1 17 42 43 19 -1 9 33 43 34 19 11 10 6 3 -4 -20 -41 -54 -52 -43 -41 -50 -57 -56 -54 -58 -72 -82 -86 -81 -77 -77 -73 -59 -46 -44 -53 -55 -46 -35 -38 -43 -44 -48 -63 -75 -54 -5 31 27 10 23 57 65 40 22 53 104 124 105 90 104 124 121 98 77 68 65 61 56 39 10 -22 -35 -24 0 12 4 -20 -49 -74 -88 -94 -95 -94 -95 -100 -107 -108 -97 -78 -62 -55 -45 -25 1 21 35 50 +1 +-21 -12 5 20 18 0 -15 -12 -2 -3 -11 -12 -2 -3 -26 -68 -111 -143 -157 -153 -141 -141 -160 -188 -211 -216 -204 -176 -144 -130 -140 -156 -155 -143 -138 -143 -139 -112 -81 -60 -53 -43 -30 -21 -17 -16 -24 -37 -34 5 62 96 98 102 125 151 151 140 154 191 208 194 174 177 193 188 166 162 186 206 204 188 177 161 119 71 56 86 119 110 60 16 7 17 17 1 -19 -33 -42 -49 -51 -47 -33 -14 2 3 -12 -33 -48 -60 -84 -110 -120 -113 -116 -142 -171 -170 -147 -140 -156 -161 -130 -87 -81 -113 -142 -137 -107 -76 -59 -57 -63 -68 -50 -5 42 61 49 33 40 67 96 115 130 149 161 159 141 120 109 103 84 62 57 69 70 39 -9 -35 -26 0 14 12 2 -14 -45 -86 -121 -140 +164 192 209 180 124 89 93 108 110 108 109 101 78 62 75 105 121 117 107 91 62 37 46 77 76 23 -17 10 61 49 -27 -74 -35 21 7 -61 -92 -61 -32 -50 -76 -62 -33 -42 -83 -95 -59 -25 -36 -71 -86 -73 -62 -68 -67 -43 -8 7 1 -5 8 36 55 43 15 9 37 68 62 32 21 49 75 56 5 -31 -35 -39 -68 -88 -63 0 42 23 -40 -98 -123 -121 -112 -100 -81 -55 -42 -53 -77 -82 -58 -25 -12 -19 -12 20 59 64 35 3 -7 2 12 11 10 12 21 28 24 7 -10 -5 27 60 65 46 32 39 53 54 42 32 31 27 22 26 40 49 31 6 -3 8 15 6 1 17 42 43 19 -1 9 33 43 34 19 11 10 6 3 -4 -20 -41 -54 -52 -43 -41 -50 -57 -56 -54 -58 -72 -82 -86 -81 -77 -77 -73 -59 -46 -44 -53 -55 -46 -35 -38 -43 -44 -48 -63 -75 -54 -5 31 27 10 23 57 65 40 22 53 104 124 105 90 104 124 121 98 77 68 65 61 56 39 10 -22 -35 -24 0 12 4 -20 -49 -74 -88 -94 -95 -94 -95 -100 -107 -108 -97 -78 -62 -55 -45 -25 1 21 35 50 68 87 105 124 142 140 112 74 49 51 65 70 60 44 38 41 40 29 20 25 30 11 -28 -47 -21 21 21 -25 -63 -48 -7 -4 -53 -99 -95 -61 -46 -59 -70 -53 -28 -20 -25 -15 11 28 22 8 21 64 113 143 144 129 115 110 107 98 84 76 78 81 72 53 44 47 46 29 5 -5 -1 -4 -20 -40 -41 -27 -17 -18 -22 -23 -21 -12 5 20 18 0 -15 -12 -2 -3 -11 -12 -2 -3 -26 -68 -111 -143 -157 -153 -141 -141 -160 -188 -211 -216 -204 -176 -144 -130 -140 -156 -155 -143 -138 -143 -139 -112 -81 -60 -53 -43 -30 -21 -17 -16 -24 -37 -34 5 62 96 98 102 125 151 151 140 154 191 208 194 174 177 193 188 166 162 186 206 204 188 177 161 119 71 56 86 119 110 +1 +-139 -123 -107 -109 -136 -167 -174 -163 -152 -153 -146 -117 -81 -55 -39 -19 3 12 13 21 45 68 85 101 125 147 159 161 146 111 67 48 71 104 106 72 55 75 116 128 108 80 65 56 46 35 24 13 2 -5 -7 -11 -21 -26 -22 -15 -17 -25 -20 4 37 62 64 55 48 52 60 55 29 5 2 16 27 22 18 26 40 33 4 -26 -35 -25 -16 -14 -21 -33 -54 -80 -106 -125 -137 -134 -118 -94 -77 -71 -62 -49 -39 -43 -55 -54 -33 -10 0 4 16 39 63 77 87 92 89 80 82 98 108 92 60 42 54 71 63 39 27 40 56 49 26 6 -6 -17 -39 -56 -60 -65 -84 -114 -125 -109 -86 -84 -98 -104 -89 -65 -52 -52 -58 -70 -93 -123 -137 -125 -94 -67 -53 -51 -52 -42 +-43 -41 -50 -57 -56 -54 -58 -72 -82 -86 -81 -77 -77 -73 -59 -46 -44 -53 -55 -46 -35 -38 -43 -44 -48 -63 -75 -54 -5 31 27 10 23 57 65 40 22 53 104 124 105 90 104 124 121 98 77 68 65 61 56 39 10 -22 -35 -24 0 12 4 -20 -49 -74 -88 -94 -95 -94 -95 -100 -107 -108 -97 -78 -62 -55 -45 -25 1 21 35 50 68 87 105 124 142 140 112 74 49 51 65 70 60 44 38 41 40 29 20 25 30 11 -28 -47 -21 21 21 -25 -63 -48 -7 -4 -53 -99 -95 -61 -46 -59 -70 -53 -28 -20 -25 -15 11 28 22 8 21 64 113 143 144 129 115 110 107 98 84 76 78 81 72 53 44 47 46 29 5 -5 -1 -4 -20 -40 -41 -27 -17 -18 -22 -23 -21 -12 5 20 18 0 -15 -12 -2 -3 -11 -12 -2 -3 -26 -68 -111 -143 -157 -153 -141 -141 -160 -188 -211 -216 -204 -176 -144 -130 -140 -156 -155 -143 -138 -143 -139 -112 -81 -60 -53 -43 -30 -21 -17 -16 -24 -37 -34 5 62 96 98 102 125 151 151 140 154 191 208 194 174 177 193 188 166 162 186 206 204 188 177 161 119 71 56 86 119 110 60 16 7 17 17 1 -19 -33 -42 -49 -51 -47 -33 -14 2 3 -12 -33 -48 -60 -84 -110 -120 -113 -116 -142 -171 -170 -147 -140 -156 -161 -130 -87 -81 -113 -142 -137 -107 -76 -59 -57 -63 -68 -50 -5 42 61 49 33 40 67 96 115 130 149 161 159 141 120 109 103 84 62 57 69 70 39 -9 -35 -26 0 14 12 2 -14 -45 -86 -121 -140 -139 -123 -107 -109 -136 -167 -174 -163 -152 -153 -146 -117 -81 -55 -39 -19 3 12 13 21 45 68 85 101 125 147 159 161 146 111 67 48 71 104 106 72 55 75 116 128 108 80 65 56 46 35 24 13 2 -5 -7 -11 -21 -26 -22 -15 -17 -25 -20 4 37 62 64 55 48 52 60 55 29 5 2 16 27 22 18 26 40 33 4 -26 +1 +-16 16 37 42 47 66 90 98 86 78 87 98 83 47 11 -4 -6 -9 -21 -38 -50 -50 -32 -6 3 -13 -37 -38 -16 -2 -10 -28 -23 9 47 63 60 53 50 48 42 42 54 69 70 64 71 107 149 161 135 103 105 140 168 165 144 133 132 126 103 72 42 19 1 -14 -33 -66 -107 -129 -122 -107 -119 -151 -161 -131 -97 -101 -144 -180 -182 -172 -178 -194 -198 -185 -179 -185 -176 -139 -93 -66 -54 -39 -12 14 32 57 90 119 131 133 148 167 169 141 107 95 99 96 81 67 71 84 86 75 65 63 55 15 -49 -107 -133 -132 -136 -156 -178 -187 -184 -185 -189 -180 -150 -113 -93 -98 -107 -91 -42 13 47 59 80 136 207 251 249 219 199 198 210 228 247 262 262 248 227 207 187 169 +-21 -12 5 20 18 0 -15 -12 -2 -3 -11 -12 -2 -3 -26 -68 -111 -143 -157 -153 -141 -141 -160 -188 -211 -216 -204 -176 -144 -130 -140 -156 -155 -143 -138 -143 -139 -112 -81 -60 -53 -43 -30 -21 -17 -16 -24 -37 -34 5 62 96 98 102 125 151 151 140 154 191 208 194 174 177 193 188 166 162 186 206 204 188 177 161 119 71 56 86 119 110 60 16 7 17 17 1 -19 -33 -42 -49 -51 -47 -33 -14 2 3 -12 -33 -48 -60 -84 -110 -120 -113 -116 -142 -171 -170 -147 -140 -156 -161 -130 -87 -81 -113 -142 -137 -107 -76 -59 -57 -63 -68 -50 -5 42 61 49 33 40 67 96 115 130 149 161 159 141 120 109 103 84 62 57 69 70 39 -9 -35 -26 0 14 12 2 -14 -45 -86 -121 -140 -139 -123 -107 -109 -136 -167 -174 -163 -152 -153 -146 -117 -81 -55 -39 -19 3 12 13 21 45 68 85 101 125 147 159 161 146 111 67 48 71 104 106 72 55 75 116 128 108 80 65 56 46 35 24 13 2 -5 -7 -11 -21 -26 -22 -15 -17 -25 -20 4 37 62 64 55 48 52 60 55 29 5 2 16 27 22 18 26 40 33 4 -26 -35 -25 -16 -14 -21 -33 -54 -80 -106 -125 -137 -134 -118 -94 -77 -71 -62 -49 -39 -43 -55 -54 -33 -10 0 4 16 39 63 77 87 92 89 80 82 98 108 92 60 42 54 71 63 39 27 40 56 49 26 6 -6 -17 -39 -56 -60 -65 -84 -114 -125 -109 -86 -84 -98 -104 -89 -65 -52 -52 -58 -70 -93 -123 -137 -125 -94 -67 -53 -51 -52 -42 -16 16 37 42 47 66 90 98 86 78 87 98 83 47 11 -4 -6 -9 -21 -38 -50 -50 -32 -6 3 -13 -37 -38 -16 -2 -10 -28 -23 9 47 63 60 53 50 48 42 42 54 69 70 64 71 107 149 161 135 103 105 140 168 165 144 133 132 126 103 72 42 19 1 -14 -33 -66 -107 -129 -122 -107 -119 -151 -161 -131 -97 -101 -144 -180 +1 +154 142 128 107 78 36 -13 -66 -108 -131 -138 -144 -162 -202 -255 -298 -315 -311 -298 -284 -264 -244 -233 -225 -205 -167 -134 -121 -109 -71 -16 17 17 12 28 56 81 100 119 129 119 107 110 123 125 116 116 125 118 91 73 79 93 99 100 94 59 -11 -73 -80 -52 -49 -71 -59 12 70 42 -40 -80 -42 18 44 38 43 64 73 58 35 24 28 40 50 47 31 14 9 3 -23 -55 -61 -36 -7 -4 -21 -29 -20 -9 -12 -26 -42 -54 -54 -47 -37 -31 -28 -27 -29 -47 -73 -89 -79 -50 -22 -1 16 36 51 47 23 -4 -9 13 32 10 -48 -92 -80 -23 22 21 -15 -34 -12 42 90 101 72 33 17 30 57 82 95 91 63 23 -5 -5 13 25 20 0 -29 -61 -85 -90 -83 -84 -98 +-139 -123 -107 -109 -136 -167 -174 -163 -152 -153 -146 -117 -81 -55 -39 -19 3 12 13 21 45 68 85 101 125 147 159 161 146 111 67 48 71 104 106 72 55 75 116 128 108 80 65 56 46 35 24 13 2 -5 -7 -11 -21 -26 -22 -15 -17 -25 -20 4 37 62 64 55 48 52 60 55 29 5 2 16 27 22 18 26 40 33 4 -26 -35 -25 -16 -14 -21 -33 -54 -80 -106 -125 -137 -134 -118 -94 -77 -71 -62 -49 -39 -43 -55 -54 -33 -10 0 4 16 39 63 77 87 92 89 80 82 98 108 92 60 42 54 71 63 39 27 40 56 49 26 6 -6 -17 -39 -56 -60 -65 -84 -114 -125 -109 -86 -84 -98 -104 -89 -65 -52 -52 -58 -70 -93 -123 -137 -125 -94 -67 -53 -51 -52 -42 -16 16 37 42 47 66 90 98 86 78 87 98 83 47 11 -4 -6 -9 -21 -38 -50 -50 -32 -6 3 -13 -37 -38 -16 -2 -10 -28 -23 9 47 63 60 53 50 48 42 42 54 69 70 64 71 107 149 161 135 103 105 140 168 165 144 133 132 126 103 72 42 19 1 -14 -33 -66 -107 -129 -122 -107 -119 -151 -161 -131 -97 -101 -144 -180 -182 -172 -178 -194 -198 -185 -179 -185 -176 -139 -93 -66 -54 -39 -12 14 32 57 90 119 131 133 148 167 169 141 107 95 99 96 81 67 71 84 86 75 65 63 55 15 -49 -107 -133 -132 -136 -156 -178 -187 -184 -185 -189 -180 -150 -113 -93 -98 -107 -91 -42 13 47 59 80 136 207 251 249 219 199 198 210 228 247 262 262 248 227 207 187 169 154 142 128 107 78 36 -13 -66 -108 -131 -138 -144 -162 -202 -255 -298 -315 -311 -298 -284 -264 -244 -233 -225 -205 -167 -134 -121 -109 -71 -16 17 17 12 28 56 81 100 119 129 119 107 110 123 125 116 116 125 118 91 73 79 93 99 100 94 59 -11 -73 -80 -52 -49 -71 -59 12 70 42 -40 -80 -42 18 44 38 43 64 73 58 35 24 28 +1 +-103 -85 -58 -41 -38 -48 -71 -99 -114 -94 -52 -24 -21 -26 -23 -16 -16 -14 1 18 27 36 52 62 50 29 26 48 63 51 25 16 27 40 51 65 87 106 115 118 116 111 113 129 151 160 146 124 102 82 61 45 33 14 -22 -62 -97 -126 -154 -177 -176 -154 -134 -133 -141 -143 -142 -147 -147 -127 -90 -65 -67 -78 -64 -19 30 52 50 42 44 50 54 57 69 81 82 76 75 84 90 79 64 67 88 104 101 88 75 61 41 23 19 28 32 24 3 -18 -38 -51 -52 -42 -36 -38 -47 -52 -55 -59 -70 -88 -105 -108 -80 -30 8 2 -34 -60 -50 -22 -6 -5 2 28 63 80 71 47 33 33 37 31 18 13 23 36 34 15 -4 -18 -25 -34 -41 -50 -62 -75 -73 -51 -26 -15 +-16 16 37 42 47 66 90 98 86 78 87 98 83 47 11 -4 -6 -9 -21 -38 -50 -50 -32 -6 3 -13 -37 -38 -16 -2 -10 -28 -23 9 47 63 60 53 50 48 42 42 54 69 70 64 71 107 149 161 135 103 105 140 168 165 144 133 132 126 103 72 42 19 1 -14 -33 -66 -107 -129 -122 -107 -119 -151 -161 -131 -97 -101 -144 -180 -182 -172 -178 -194 -198 -185 -179 -185 -176 -139 -93 -66 -54 -39 -12 14 32 57 90 119 131 133 148 167 169 141 107 95 99 96 81 67 71 84 86 75 65 63 55 15 -49 -107 -133 -132 -136 -156 -178 -187 -184 -185 -189 -180 -150 -113 -93 -98 -107 -91 -42 13 47 59 80 136 207 251 249 219 199 198 210 228 247 262 262 248 227 207 187 169 154 142 128 107 78 36 -13 -66 -108 -131 -138 -144 -162 -202 -255 -298 -315 -311 -298 -284 -264 -244 -233 -225 -205 -167 -134 -121 -109 -71 -16 17 17 12 28 56 81 100 119 129 119 107 110 123 125 116 116 125 118 91 73 79 93 99 100 94 59 -11 -73 -80 -52 -49 -71 -59 12 70 42 -40 -80 -42 18 44 38 43 64 73 58 35 24 28 40 50 47 31 14 9 3 -23 -55 -61 -36 -7 -4 -21 -29 -20 -9 -12 -26 -42 -54 -54 -47 -37 -31 -28 -27 -29 -47 -73 -89 -79 -50 -22 -1 16 36 51 47 23 -4 -9 13 32 10 -48 -92 -80 -23 22 21 -15 -34 -12 42 90 101 72 33 17 30 57 82 95 91 63 23 -5 -5 13 25 20 0 -29 -61 -85 -90 -83 -84 -98 -103 -85 -58 -41 -38 -48 -71 -99 -114 -94 -52 -24 -21 -26 -23 -16 -16 -14 1 18 27 36 52 62 50 29 26 48 63 51 25 16 27 40 51 65 87 106 115 118 116 111 113 129 151 160 146 124 102 82 61 45 33 14 -22 -62 -97 -126 -154 -177 -176 -154 -134 -133 -141 -143 -142 -147 -147 -127 -90 -65 -67 -78 -64 -19 30 52 50 42 +1 +-17 -17 -6 19 47 62 46 2 -36 -37 -7 11 -9 -47 -67 -54 -27 -7 -4 -25 -60 -84 -70 -22 26 45 43 41 47 55 66 77 80 70 62 78 111 119 84 33 13 31 61 79 80 76 71 60 46 26 0 -36 -69 -91 -97 -95 -87 -69 -44 -23 -14 -6 7 17 4 -38 -83 -95 -62 -11 17 6 -25 -45 -40 -22 -6 -4 -19 -42 -57 -52 -24 13 39 38 9 -27 -44 -30 -8 -2 -17 -32 -29 -11 9 21 22 18 11 21 45 68 66 44 24 31 53 67 62 47 38 48 66 73 60 41 42 61 73 57 31 25 38 42 19 -13 -28 -27 -25 -21 -12 -3 -21 -70 -119 -130 -102 -62 -36 -40 -63 -95 -117 -122 -120 -122 -128 -123 -101 -75 -65 -67 -58 -25 11 29 33 45 +154 142 128 107 78 36 -13 -66 -108 -131 -138 -144 -162 -202 -255 -298 -315 -311 -298 -284 -264 -244 -233 -225 -205 -167 -134 -121 -109 -71 -16 17 17 12 28 56 81 100 119 129 119 107 110 123 125 116 116 125 118 91 73 79 93 99 100 94 59 -11 -73 -80 -52 -49 -71 -59 12 70 42 -40 -80 -42 18 44 38 43 64 73 58 35 24 28 40 50 47 31 14 9 3 -23 -55 -61 -36 -7 -4 -21 -29 -20 -9 -12 -26 -42 -54 -54 -47 -37 -31 -28 -27 -29 -47 -73 -89 -79 -50 -22 -1 16 36 51 47 23 -4 -9 13 32 10 -48 -92 -80 -23 22 21 -15 -34 -12 42 90 101 72 33 17 30 57 82 95 91 63 23 -5 -5 13 25 20 0 -29 -61 -85 -90 -83 -84 -98 -103 -85 -58 -41 -38 -48 -71 -99 -114 -94 -52 -24 -21 -26 -23 -16 -16 -14 1 18 27 36 52 62 50 29 26 48 63 51 25 16 27 40 51 65 87 106 115 118 116 111 113 129 151 160 146 124 102 82 61 45 33 14 -22 -62 -97 -126 -154 -177 -176 -154 -134 -133 -141 -143 -142 -147 -147 -127 -90 -65 -67 -78 -64 -19 30 52 50 42 44 50 54 57 69 81 82 76 75 84 90 79 64 67 88 104 101 88 75 61 41 23 19 28 32 24 3 -18 -38 -51 -52 -42 -36 -38 -47 -52 -55 -59 -70 -88 -105 -108 -80 -30 8 2 -34 -60 -50 -22 -6 -5 2 28 63 80 71 47 33 33 37 31 18 13 23 36 34 15 -4 -18 -25 -34 -41 -50 -62 -75 -73 -51 -26 -15 -17 -17 -6 19 47 62 46 2 -36 -37 -7 11 -9 -47 -67 -54 -27 -7 -4 -25 -60 -84 -70 -22 26 45 43 41 47 55 66 77 80 70 62 78 111 119 84 33 13 31 61 79 80 76 71 60 46 26 0 -36 -69 -91 -97 -95 -87 -69 -44 -23 -14 -6 7 17 4 -38 -83 -95 -62 -11 17 6 -25 -45 -40 -22 -6 -4 -19 -42 +1 +68 82 70 42 30 49 77 83 66 45 47 71 96 105 94 78 70 74 79 77 69 60 57 53 33 -3 -40 -60 -63 -57 -53 -65 -91 -121 -139 -135 -122 -110 -103 -92 -79 -71 -76 -80 -67 -37 -13 -11 -22 -24 -10 5 12 15 27 50 70 77 71 65 76 105 132 130 99 70 75 110 131 108 63 38 45 61 58 37 19 3 -10 -17 -17 -25 -54 -87 -85 -44 -1 0 -36 -70 -83 -90 -109 -130 -133 -121 -118 -140 -170 -183 -168 -149 -146 -154 -144 -106 -57 -29 -28 -39 -36 -16 21 65 99 110 109 117 144 166 163 144 142 171 204 211 195 184 193 210 219 212 198 185 170 153 131 105 82 61 41 11 -23 -49 -64 -79 -102 -126 -134 -121 -99 -83 -79 -87 -95 -100 -93 -80 -71 -74 +-103 -85 -58 -41 -38 -48 -71 -99 -114 -94 -52 -24 -21 -26 -23 -16 -16 -14 1 18 27 36 52 62 50 29 26 48 63 51 25 16 27 40 51 65 87 106 115 118 116 111 113 129 151 160 146 124 102 82 61 45 33 14 -22 -62 -97 -126 -154 -177 -176 -154 -134 -133 -141 -143 -142 -147 -147 -127 -90 -65 -67 -78 -64 -19 30 52 50 42 44 50 54 57 69 81 82 76 75 84 90 79 64 67 88 104 101 88 75 61 41 23 19 28 32 24 3 -18 -38 -51 -52 -42 -36 -38 -47 -52 -55 -59 -70 -88 -105 -108 -80 -30 8 2 -34 -60 -50 -22 -6 -5 2 28 63 80 71 47 33 33 37 31 18 13 23 36 34 15 -4 -18 -25 -34 -41 -50 -62 -75 -73 -51 -26 -15 -17 -17 -6 19 47 62 46 2 -36 -37 -7 11 -9 -47 -67 -54 -27 -7 -4 -25 -60 -84 -70 -22 26 45 43 41 47 55 66 77 80 70 62 78 111 119 84 33 13 31 61 79 80 76 71 60 46 26 0 -36 -69 -91 -97 -95 -87 -69 -44 -23 -14 -6 7 17 4 -38 -83 -95 -62 -11 17 6 -25 -45 -40 -22 -6 -4 -19 -42 -57 -52 -24 13 39 38 9 -27 -44 -30 -8 -2 -17 -32 -29 -11 9 21 22 18 11 21 45 68 66 44 24 31 53 67 62 47 38 48 66 73 60 41 42 61 73 57 31 25 38 42 19 -13 -28 -27 -25 -21 -12 -3 -21 -70 -119 -130 -102 -62 -36 -40 -63 -95 -117 -122 -120 -122 -128 -123 -101 -75 -65 -67 -58 -25 11 29 33 45 68 82 70 42 30 49 77 83 66 45 47 71 96 105 94 78 70 74 79 77 69 60 57 53 33 -3 -40 -60 -63 -57 -53 -65 -91 -121 -139 -135 -122 -110 -103 -92 -79 -71 -76 -80 -67 -37 -13 -11 -22 -24 -10 5 12 15 27 50 70 77 71 65 76 105 132 130 99 70 75 110 131 108 63 38 45 61 58 37 19 3 -10 -17 +1 +-90 -104 -101 -82 -60 -56 -67 -70 -56 -34 -22 -23 -25 -30 -41 -51 -35 7 46 50 29 16 24 26 0 -37 -46 -16 18 23 7 1 23 46 48 37 38 58 75 66 40 25 33 45 49 52 62 70 56 20 -10 -21 -23 -39 -72 -103 -110 -92 -72 -69 -84 -94 -81 -56 -42 -52 -79 -101 -110 -106 -92 -70 -45 -25 -14 -10 -11 -12 -8 2 12 8 -9 -26 -15 21 50 37 -13 -61 -71 -46 -18 -8 -6 7 31 48 48 38 29 29 35 50 69 78 71 54 42 47 63 81 100 113 114 99 82 72 70 64 49 29 12 4 10 17 19 10 -2 -11 -15 -24 -31 -22 6 31 33 19 14 36 63 70 53 33 30 42 51 43 22 5 4 16 24 28 33 45 50 34 5 -7 +-17 -17 -6 19 47 62 46 2 -36 -37 -7 11 -9 -47 -67 -54 -27 -7 -4 -25 -60 -84 -70 -22 26 45 43 41 47 55 66 77 80 70 62 78 111 119 84 33 13 31 61 79 80 76 71 60 46 26 0 -36 -69 -91 -97 -95 -87 -69 -44 -23 -14 -6 7 17 4 -38 -83 -95 -62 -11 17 6 -25 -45 -40 -22 -6 -4 -19 -42 -57 -52 -24 13 39 38 9 -27 -44 -30 -8 -2 -17 -32 -29 -11 9 21 22 18 11 21 45 68 66 44 24 31 53 67 62 47 38 48 66 73 60 41 42 61 73 57 31 25 38 42 19 -13 -28 -27 -25 -21 -12 -3 -21 -70 -119 -130 -102 -62 -36 -40 -63 -95 -117 -122 -120 -122 -128 -123 -101 -75 -65 -67 -58 -25 11 29 33 45 68 82 70 42 30 49 77 83 66 45 47 71 96 105 94 78 70 74 79 77 69 60 57 53 33 -3 -40 -60 -63 -57 -53 -65 -91 -121 -139 -135 -122 -110 -103 -92 -79 -71 -76 -80 -67 -37 -13 -11 -22 -24 -10 5 12 15 27 50 70 77 71 65 76 105 132 130 99 70 75 110 131 108 63 38 45 61 58 37 19 3 -10 -17 -17 -25 -54 -87 -85 -44 -1 0 -36 -70 -83 -90 -109 -130 -133 -121 -118 -140 -170 -183 -168 -149 -146 -154 -144 -106 -57 -29 -28 -39 -36 -16 21 65 99 110 109 117 144 166 163 144 142 171 204 211 195 184 193 210 219 212 198 185 170 153 131 105 82 61 41 11 -23 -49 -64 -79 -102 -126 -134 -121 -99 -83 -79 -87 -95 -100 -93 -80 -71 -74 -90 -104 -101 -82 -60 -56 -67 -70 -56 -34 -22 -23 -25 -30 -41 -51 -35 7 46 50 29 16 24 26 0 -37 -46 -16 18 23 7 1 23 46 48 37 38 58 75 66 40 25 33 45 49 52 62 70 56 20 -10 -21 -23 -39 -72 -103 -110 -92 -72 -69 -84 -94 -81 -56 -42 -52 -79 -101 -110 -106 -92 -70 -45 -25 -14 -10 -11 -12 -8 2 +1 +6 30 44 49 60 77 83 67 38 7 -22 -49 -67 -71 -70 -66 -63 -60 -66 -95 -130 -146 -143 -144 -169 -201 -206 -181 -162 -176 -205 -220 -204 -182 -174 -187 -203 -203 -184 -149 -110 -74 -43 -9 29 62 82 91 101 111 114 105 100 116 149 176 187 194 210 219 201 157 126 124 123 93 45 15 26 49 52 35 17 2 -12 -23 -25 -24 -37 -66 -82 -53 4 52 64 49 33 22 13 8 17 44 71 77 68 64 78 87 67 25 -11 -23 -20 -20 -26 -27 -22 -22 -31 -36 -23 0 11 7 -6 -20 -35 -45 -50 -54 -65 -79 -78 -49 -18 -13 -47 -90 -111 -107 -101 -108 -120 -118 -87 -42 -8 1 -8 -6 19 49 60 55 63 86 100 90 75 80 101 109 94 75 74 84 81 60 28 -5 +68 82 70 42 30 49 77 83 66 45 47 71 96 105 94 78 70 74 79 77 69 60 57 53 33 -3 -40 -60 -63 -57 -53 -65 -91 -121 -139 -135 -122 -110 -103 -92 -79 -71 -76 -80 -67 -37 -13 -11 -22 -24 -10 5 12 15 27 50 70 77 71 65 76 105 132 130 99 70 75 110 131 108 63 38 45 61 58 37 19 3 -10 -17 -17 -25 -54 -87 -85 -44 -1 0 -36 -70 -83 -90 -109 -130 -133 -121 -118 -140 -170 -183 -168 -149 -146 -154 -144 -106 -57 -29 -28 -39 -36 -16 21 65 99 110 109 117 144 166 163 144 142 171 204 211 195 184 193 210 219 212 198 185 170 153 131 105 82 61 41 11 -23 -49 -64 -79 -102 -126 -134 -121 -99 -83 -79 -87 -95 -100 -93 -80 -71 -74 -90 -104 -101 -82 -60 -56 -67 -70 -56 -34 -22 -23 -25 -30 -41 -51 -35 7 46 50 29 16 24 26 0 -37 -46 -16 18 23 7 1 23 46 48 37 38 58 75 66 40 25 33 45 49 52 62 70 56 20 -10 -21 -23 -39 -72 -103 -110 -92 -72 -69 -84 -94 -81 -56 -42 -52 -79 -101 -110 -106 -92 -70 -45 -25 -14 -10 -11 -12 -8 2 12 8 -9 -26 -15 21 50 37 -13 -61 -71 -46 -18 -8 -6 7 31 48 48 38 29 29 35 50 69 78 71 54 42 47 63 81 100 113 114 99 82 72 70 64 49 29 12 4 10 17 19 10 -2 -11 -15 -24 -31 -22 6 31 33 19 14 36 63 70 53 33 30 42 51 43 22 5 4 16 24 28 33 45 50 34 5 -7 6 30 44 49 60 77 83 67 38 7 -22 -49 -67 -71 -70 -66 -63 -60 -66 -95 -130 -146 -143 -144 -169 -201 -206 -181 -162 -176 -205 -220 -204 -182 -174 -187 -203 -203 -184 -149 -110 -74 -43 -9 29 62 82 91 101 111 114 105 100 116 149 176 187 194 210 219 201 157 126 124 123 93 45 15 26 49 52 35 17 2 -12 -23 -25 -24 -37 -66 +1 +-35 -49 -46 -26 -5 8 16 18 9 -7 -14 -1 20 29 21 8 -1 -11 -27 -44 -51 -39 -16 2 6 7 18 41 52 32 -13 -46 -46 -26 -19 -30 -31 -10 12 2 -26 -30 6 41 29 -26 -69 -65 -38 -22 -30 -43 -39 -23 -8 -9 -27 -50 -59 -42 -10 14 18 12 11 20 33 52 76 98 103 86 61 47 41 30 13 -2 -4 8 19 25 19 3 -14 -24 -25 -17 -12 -14 -21 -18 -8 -1 -14 -44 -66 -70 -69 -78 -95 -91 -59 -23 -9 -14 -13 2 13 2 -21 -33 -27 -15 -13 -18 -26 -27 -22 -17 -20 -31 -45 -48 -37 -18 4 25 40 42 28 7 -8 -9 5 22 29 26 17 15 22 31 35 37 33 23 8 -1 0 4 -5 -29 -49 -47 -29 -13 -20 -37 -41 -19 +-90 -104 -101 -82 -60 -56 -67 -70 -56 -34 -22 -23 -25 -30 -41 -51 -35 7 46 50 29 16 24 26 0 -37 -46 -16 18 23 7 1 23 46 48 37 38 58 75 66 40 25 33 45 49 52 62 70 56 20 -10 -21 -23 -39 -72 -103 -110 -92 -72 -69 -84 -94 -81 -56 -42 -52 -79 -101 -110 -106 -92 -70 -45 -25 -14 -10 -11 -12 -8 2 12 8 -9 -26 -15 21 50 37 -13 -61 -71 -46 -18 -8 -6 7 31 48 48 38 29 29 35 50 69 78 71 54 42 47 63 81 100 113 114 99 82 72 70 64 49 29 12 4 10 17 19 10 -2 -11 -15 -24 -31 -22 6 31 33 19 14 36 63 70 53 33 30 42 51 43 22 5 4 16 24 28 33 45 50 34 5 -7 6 30 44 49 60 77 83 67 38 7 -22 -49 -67 -71 -70 -66 -63 -60 -66 -95 -130 -146 -143 -144 -169 -201 -206 -181 -162 -176 -205 -220 -204 -182 -174 -187 -203 -203 -184 -149 -110 -74 -43 -9 29 62 82 91 101 111 114 105 100 116 149 176 187 194 210 219 201 157 126 124 123 93 45 15 26 49 52 35 17 2 -12 -23 -25 -24 -37 -66 -82 -53 4 52 64 49 33 22 13 8 17 44 71 77 68 64 78 87 67 25 -11 -23 -20 -20 -26 -27 -22 -22 -31 -36 -23 0 11 7 -6 -20 -35 -45 -50 -54 -65 -79 -78 -49 -18 -13 -47 -90 -111 -107 -101 -108 -120 -118 -87 -42 -8 1 -8 -6 19 49 60 55 63 86 100 90 75 80 101 109 94 75 74 84 81 60 28 -5 -35 -49 -46 -26 -5 8 16 18 9 -7 -14 -1 20 29 21 8 -1 -11 -27 -44 -51 -39 -16 2 6 7 18 41 52 32 -13 -46 -46 -26 -19 -30 -31 -10 12 2 -26 -30 6 41 29 -26 -69 -65 -38 -22 -30 -43 -39 -23 -8 -9 -27 -50 -59 -42 -10 14 18 12 11 20 33 52 76 98 103 86 61 47 41 30 13 -2 -4 8 +1 +12 21 0 -35 -52 -39 -11 13 25 25 11 -3 0 30 74 99 93 69 55 53 52 38 19 -1 -12 -14 -5 8 13 3 -22 -46 -51 -28 8 35 45 53 71 90 94 78 62 58 64 63 56 51 46 37 25 18 15 1 -25 -43 -39 -26 -26 -36 -31 -7 7 -4 -24 -21 7 39 57 56 43 28 28 50 72 62 20 -19 -24 -2 8 3 3 17 23 8 -11 -12 -3 -21 -71 -115 -115 -85 -65 -73 -88 -97 -103 -111 -110 -87 -56 -49 -69 -80 -59 -21 0 -1 -5 0 1 -12 -28 -29 -20 -19 -33 -47 -49 -38 -25 -21 -25 -31 -25 2 41 64 55 28 6 2 8 13 12 7 6 13 24 32 27 18 17 24 20 13 22 55 89 91 60 33 37 62 82 77 55 36 +6 30 44 49 60 77 83 67 38 7 -22 -49 -67 -71 -70 -66 -63 -60 -66 -95 -130 -146 -143 -144 -169 -201 -206 -181 -162 -176 -205 -220 -204 -182 -174 -187 -203 -203 -184 -149 -110 -74 -43 -9 29 62 82 91 101 111 114 105 100 116 149 176 187 194 210 219 201 157 126 124 123 93 45 15 26 49 52 35 17 2 -12 -23 -25 -24 -37 -66 -82 -53 4 52 64 49 33 22 13 8 17 44 71 77 68 64 78 87 67 25 -11 -23 -20 -20 -26 -27 -22 -22 -31 -36 -23 0 11 7 -6 -20 -35 -45 -50 -54 -65 -79 -78 -49 -18 -13 -47 -90 -111 -107 -101 -108 -120 -118 -87 -42 -8 1 -8 -6 19 49 60 55 63 86 100 90 75 80 101 109 94 75 74 84 81 60 28 -5 -35 -49 -46 -26 -5 8 16 18 9 -7 -14 -1 20 29 21 8 -1 -11 -27 -44 -51 -39 -16 2 6 7 18 41 52 32 -13 -46 -46 -26 -19 -30 -31 -10 12 2 -26 -30 6 41 29 -26 -69 -65 -38 -22 -30 -43 -39 -23 -8 -9 -27 -50 -59 -42 -10 14 18 12 11 20 33 52 76 98 103 86 61 47 41 30 13 -2 -4 8 19 25 19 3 -14 -24 -25 -17 -12 -14 -21 -18 -8 -1 -14 -44 -66 -70 -69 -78 -95 -91 -59 -23 -9 -14 -13 2 13 2 -21 -33 -27 -15 -13 -18 -26 -27 -22 -17 -20 -31 -45 -48 -37 -18 4 25 40 42 28 7 -8 -9 5 22 29 26 17 15 22 31 35 37 33 23 8 -1 0 4 -5 -29 -49 -47 -29 -13 -20 -37 -41 -19 12 21 0 -35 -52 -39 -11 13 25 25 11 -3 0 30 74 99 93 69 55 53 52 38 19 -1 -12 -14 -5 8 13 3 -22 -46 -51 -28 8 35 45 53 71 90 94 78 62 58 64 63 56 51 46 37 25 18 15 1 -25 -43 -39 -26 -26 -36 -31 -7 7 -4 -24 -21 7 39 57 56 43 28 28 50 72 62 20 -19 -24 -2 +1 +43 70 90 82 56 44 58 72 53 15 -6 6 24 13 -21 -45 -44 -35 -45 -70 -86 -76 -60 -55 -59 -46 -7 31 36 9 -10 10 55 76 54 15 4 36 80 99 86 64 59 67 61 29 -10 -30 -28 -29 -47 -68 -67 -46 -27 -26 -35 -36 -32 -37 -60 -86 -95 -84 -69 -65 -69 -69 -57 -46 -53 -79 -101 -103 -88 -74 -71 -72 -73 -77 -85 -93 -86 -58 -22 2 -1 -22 -40 -45 -38 -30 -17 1 18 27 35 48 61 52 23 4 30 89 131 130 105 91 91 84 64 51 63 82 89 78 69 73 79 77 67 61 64 69 72 68 62 54 53 56 56 47 31 16 4 -14 -45 -78 -97 -95 -87 -91 -104 -108 -91 -65 -57 -71 -88 -86 -66 -48 -43 -50 -53 -49 -48 -56 -72 -90 +-35 -49 -46 -26 -5 8 16 18 9 -7 -14 -1 20 29 21 8 -1 -11 -27 -44 -51 -39 -16 2 6 7 18 41 52 32 -13 -46 -46 -26 -19 -30 -31 -10 12 2 -26 -30 6 41 29 -26 -69 -65 -38 -22 -30 -43 -39 -23 -8 -9 -27 -50 -59 -42 -10 14 18 12 11 20 33 52 76 98 103 86 61 47 41 30 13 -2 -4 8 19 25 19 3 -14 -24 -25 -17 -12 -14 -21 -18 -8 -1 -14 -44 -66 -70 -69 -78 -95 -91 -59 -23 -9 -14 -13 2 13 2 -21 -33 -27 -15 -13 -18 -26 -27 -22 -17 -20 -31 -45 -48 -37 -18 4 25 40 42 28 7 -8 -9 5 22 29 26 17 15 22 31 35 37 33 23 8 -1 0 4 -5 -29 -49 -47 -29 -13 -20 -37 -41 -19 12 21 0 -35 -52 -39 -11 13 25 25 11 -3 0 30 74 99 93 69 55 53 52 38 19 -1 -12 -14 -5 8 13 3 -22 -46 -51 -28 8 35 45 53 71 90 94 78 62 58 64 63 56 51 46 37 25 18 15 1 -25 -43 -39 -26 -26 -36 -31 -7 7 -4 -24 -21 7 39 57 56 43 28 28 50 72 62 20 -19 -24 -2 8 3 3 17 23 8 -11 -12 -3 -21 -71 -115 -115 -85 -65 -73 -88 -97 -103 -111 -110 -87 -56 -49 -69 -80 -59 -21 0 -1 -5 0 1 -12 -28 -29 -20 -19 -33 -47 -49 -38 -25 -21 -25 -31 -25 2 41 64 55 28 6 2 8 13 12 7 6 13 24 32 27 18 17 24 20 13 22 55 89 91 60 33 37 62 82 77 55 36 43 70 90 82 56 44 58 72 53 15 -6 6 24 13 -21 -45 -44 -35 -45 -70 -86 -76 -60 -55 -59 -46 -7 31 36 9 -10 10 55 76 54 15 4 36 80 99 86 64 59 67 61 29 -10 -30 -28 -29 -47 -68 -67 -46 -27 -26 -35 -36 -32 -37 -60 -86 -95 -84 -69 -65 -69 -69 -57 -46 -53 -79 -101 -103 -88 -74 -71 -72 -73 -77 +1 +-99 -91 -64 -30 -9 -5 -9 -6 1 6 15 36 67 86 89 92 112 138 147 139 133 141 146 129 103 89 97 115 129 134 127 110 93 89 93 91 70 45 26 8 -28 -72 -95 -82 -57 -52 -80 -118 -136 -127 -108 -97 -97 -105 -107 -110 -123 -141 -147 -127 -96 -77 -81 -89 -88 -78 -60 -36 -11 -1 -17 -42 -48 -29 -1 21 34 44 49 48 46 51 57 48 30 22 37 60 60 38 20 20 25 23 14 9 8 -5 -29 -41 -24 12 38 41 35 36 52 74 98 110 106 88 73 78 95 105 98 81 72 70 64 46 19 -7 -20 -20 -16 -24 -44 -70 -93 -111 -125 -135 -135 -127 -121 -126 -140 -148 -142 -132 -132 -142 -141 -118 -88 -77 -88 -81 -31 36 65 46 17 23 52 69 63 63 +12 21 0 -35 -52 -39 -11 13 25 25 11 -3 0 30 74 99 93 69 55 53 52 38 19 -1 -12 -14 -5 8 13 3 -22 -46 -51 -28 8 35 45 53 71 90 94 78 62 58 64 63 56 51 46 37 25 18 15 1 -25 -43 -39 -26 -26 -36 -31 -7 7 -4 -24 -21 7 39 57 56 43 28 28 50 72 62 20 -19 -24 -2 8 3 3 17 23 8 -11 -12 -3 -21 -71 -115 -115 -85 -65 -73 -88 -97 -103 -111 -110 -87 -56 -49 -69 -80 -59 -21 0 -1 -5 0 1 -12 -28 -29 -20 -19 -33 -47 -49 -38 -25 -21 -25 -31 -25 2 41 64 55 28 6 2 8 13 12 7 6 13 24 32 27 18 17 24 20 13 22 55 89 91 60 33 37 62 82 77 55 36 43 70 90 82 56 44 58 72 53 15 -6 6 24 13 -21 -45 -44 -35 -45 -70 -86 -76 -60 -55 -59 -46 -7 31 36 9 -10 10 55 76 54 15 4 36 80 99 86 64 59 67 61 29 -10 -30 -28 -29 -47 -68 -67 -46 -27 -26 -35 -36 -32 -37 -60 -86 -95 -84 -69 -65 -69 -69 -57 -46 -53 -79 -101 -103 -88 -74 -71 -72 -73 -77 -85 -93 -86 -58 -22 2 -1 -22 -40 -45 -38 -30 -17 1 18 27 35 48 61 52 23 4 30 89 131 130 105 91 91 84 64 51 63 82 89 78 69 73 79 77 67 61 64 69 72 68 62 54 53 56 56 47 31 16 4 -14 -45 -78 -97 -95 -87 -91 -104 -108 -91 -65 -57 -71 -88 -86 -66 -48 -43 -50 -53 -49 -48 -56 -72 -90 -99 -91 -64 -30 -9 -5 -9 -6 1 6 15 36 67 86 89 92 112 138 147 139 133 141 146 129 103 89 97 115 129 134 127 110 93 89 93 91 70 45 26 8 -28 -72 -95 -82 -57 -52 -80 -118 -136 -127 -108 -97 -97 -105 -107 -110 -123 -141 -147 -127 -96 -77 -81 -89 -88 -78 -60 -36 -11 -1 -17 -42 -48 -29 -1 21 34 44 49 48 +1 +85 104 97 71 58 73 99 117 122 117 103 90 95 116 138 132 106 89 90 92 72 33 1 -14 -22 -37 -53 -58 -52 -59 -94 -142 -167 -158 -137 -132 -143 -144 -124 -98 -83 -81 -85 -97 -117 -130 -117 -81 -45 -33 -43 -53 -56 -47 -22 12 39 41 25 7 -4 -11 -18 -15 5 26 37 51 89 147 189 192 161 126 111 126 166 217 251 254 226 189 159 142 131 118 93 46 -11 -47 -44 -14 3 -11 -50 -83 -87 -64 -34 -17 -22 -39 -45 -32 -23 -38 -72 -101 -103 -88 -80 -86 -100 -108 -109 -99 -80 -58 -45 -40 -29 -3 27 39 32 20 23 38 55 67 68 55 33 14 14 23 21 2 -13 -7 6 2 -25 -51 -58 -50 -45 -47 -46 -44 -44 -51 -61 -76 -89 -94 -86 -71 -66 -76 +43 70 90 82 56 44 58 72 53 15 -6 6 24 13 -21 -45 -44 -35 -45 -70 -86 -76 -60 -55 -59 -46 -7 31 36 9 -10 10 55 76 54 15 4 36 80 99 86 64 59 67 61 29 -10 -30 -28 -29 -47 -68 -67 -46 -27 -26 -35 -36 -32 -37 -60 -86 -95 -84 -69 -65 -69 -69 -57 -46 -53 -79 -101 -103 -88 -74 -71 -72 -73 -77 -85 -93 -86 -58 -22 2 -1 -22 -40 -45 -38 -30 -17 1 18 27 35 48 61 52 23 4 30 89 131 130 105 91 91 84 64 51 63 82 89 78 69 73 79 77 67 61 64 69 72 68 62 54 53 56 56 47 31 16 4 -14 -45 -78 -97 -95 -87 -91 -104 -108 -91 -65 -57 -71 -88 -86 -66 -48 -43 -50 -53 -49 -48 -56 -72 -90 -99 -91 -64 -30 -9 -5 -9 -6 1 6 15 36 67 86 89 92 112 138 147 139 133 141 146 129 103 89 97 115 129 134 127 110 93 89 93 91 70 45 26 8 -28 -72 -95 -82 -57 -52 -80 -118 -136 -127 -108 -97 -97 -105 -107 -110 -123 -141 -147 -127 -96 -77 -81 -89 -88 -78 -60 -36 -11 -1 -17 -42 -48 -29 -1 21 34 44 49 48 46 51 57 48 30 22 37 60 60 38 20 20 25 23 14 9 8 -5 -29 -41 -24 12 38 41 35 36 52 74 98 110 106 88 73 78 95 105 98 81 72 70 64 46 19 -7 -20 -20 -16 -24 -44 -70 -93 -111 -125 -135 -135 -127 -121 -126 -140 -148 -142 -132 -132 -142 -141 -118 -88 -77 -88 -81 -31 36 65 46 17 23 52 69 63 63 85 104 97 71 58 73 99 117 122 117 103 90 95 116 138 132 106 89 90 92 72 33 1 -14 -22 -37 -53 -58 -52 -59 -94 -142 -167 -158 -137 -132 -143 -144 -124 -98 -83 -81 -85 -97 -117 -130 -117 -81 -45 -33 -43 -53 -56 -47 -22 12 39 41 25 7 -4 -11 -18 -15 5 26 37 51 89 147 189 192 161 126 111 126 166 217 251 254 +1 +-92 -100 -99 -100 -97 -88 -73 -65 -64 -57 -32 -2 7 -14 -42 -42 -9 27 31 1 -38 -63 -69 -61 -37 -9 18 37 60 92 127 145 144 138 138 143 147 153 157 147 124 103 96 102 110 116 118 112 97 88 100 112 97 56 24 33 59 62 32 -2 -17 -21 -33 -50 -57 -55 -57 -80 -118 -143 -130 -84 -48 -53 -88 -108 -96 -75 -72 -84 -90 -90 -91 -84 -54 -7 23 24 25 56 98 119 110 97 99 112 134 164 191 189 155 117 105 107 97 73 59 59 43 0 -47 -67 -69 -75 -93 -102 -97 -98 -118 -148 -164 -160 -155 -158 -158 -149 -145 -158 -188 -212 -209 -184 -149 -119 -107 -103 -93 -68 -36 -8 15 40 73 105 125 128 117 98 78 76 93 116 121 112 108 115 118 101 70 48 +-99 -91 -64 -30 -9 -5 -9 -6 1 6 15 36 67 86 89 92 112 138 147 139 133 141 146 129 103 89 97 115 129 134 127 110 93 89 93 91 70 45 26 8 -28 -72 -95 -82 -57 -52 -80 -118 -136 -127 -108 -97 -97 -105 -107 -110 -123 -141 -147 -127 -96 -77 -81 -89 -88 -78 -60 -36 -11 -1 -17 -42 -48 -29 -1 21 34 44 49 48 46 51 57 48 30 22 37 60 60 38 20 20 25 23 14 9 8 -5 -29 -41 -24 12 38 41 35 36 52 74 98 110 106 88 73 78 95 105 98 81 72 70 64 46 19 -7 -20 -20 -16 -24 -44 -70 -93 -111 -125 -135 -135 -127 -121 -126 -140 -148 -142 -132 -132 -142 -141 -118 -88 -77 -88 -81 -31 36 65 46 17 23 52 69 63 63 85 104 97 71 58 73 99 117 122 117 103 90 95 116 138 132 106 89 90 92 72 33 1 -14 -22 -37 -53 -58 -52 -59 -94 -142 -167 -158 -137 -132 -143 -144 -124 -98 -83 -81 -85 -97 -117 -130 -117 -81 -45 -33 -43 -53 -56 -47 -22 12 39 41 25 7 -4 -11 -18 -15 5 26 37 51 89 147 189 192 161 126 111 126 166 217 251 254 226 189 159 142 131 118 93 46 -11 -47 -44 -14 3 -11 -50 -83 -87 -64 -34 -17 -22 -39 -45 -32 -23 -38 -72 -101 -103 -88 -80 -86 -100 -108 -109 -99 -80 -58 -45 -40 -29 -3 27 39 32 20 23 38 55 67 68 55 33 14 14 23 21 2 -13 -7 6 2 -25 -51 -58 -50 -45 -47 -46 -44 -44 -51 -61 -76 -89 -94 -86 -71 -66 -76 -92 -100 -99 -100 -97 -88 -73 -65 -64 -57 -32 -2 7 -14 -42 -42 -9 27 31 1 -38 -63 -69 -61 -37 -9 18 37 60 92 127 145 144 138 138 143 147 153 157 147 124 103 96 102 110 116 118 112 97 88 100 112 97 56 24 33 59 62 32 -2 -17 -21 -33 -50 -57 -55 -57 -80 -118 -143 -130 -84 -48 -53 -88 -108 -96 -75 -72 -84 +1 +52 69 90 100 97 87 75 74 79 83 82 80 78 66 33 -8 -40 -54 -62 -71 -82 -85 -77 -71 -74 -81 -75 -57 -47 -69 -110 -132 -111 -73 -61 -79 -94 -83 -68 -74 -88 -85 -65 -57 -69 -73 -52 -15 12 24 30 32 30 27 31 36 33 21 20 36 65 87 90 73 53 50 71 91 94 84 86 101 106 87 72 88 123 133 105 66 43 42 44 43 40 44 65 104 137 141 122 121 149 160 120 53 17 24 26 -6 -53 -82 -100 -128 -169 -204 -232 -265 -299 -317 -318 -313 -305 -285 -263 -264 -284 -286 -255 -222 -221 -236 -218 -165 -124 -116 -104 -53 20 67 80 90 117 150 170 184 212 246 263 258 250 252 259 253 232 200 168 140 125 127 139 144 130 116 112 116 107 82 57 50 +85 104 97 71 58 73 99 117 122 117 103 90 95 116 138 132 106 89 90 92 72 33 1 -14 -22 -37 -53 -58 -52 -59 -94 -142 -167 -158 -137 -132 -143 -144 -124 -98 -83 -81 -85 -97 -117 -130 -117 -81 -45 -33 -43 -53 -56 -47 -22 12 39 41 25 7 -4 -11 -18 -15 5 26 37 51 89 147 189 192 161 126 111 126 166 217 251 254 226 189 159 142 131 118 93 46 -11 -47 -44 -14 3 -11 -50 -83 -87 -64 -34 -17 -22 -39 -45 -32 -23 -38 -72 -101 -103 -88 -80 -86 -100 -108 -109 -99 -80 -58 -45 -40 -29 -3 27 39 32 20 23 38 55 67 68 55 33 14 14 23 21 2 -13 -7 6 2 -25 -51 -58 -50 -45 -47 -46 -44 -44 -51 -61 -76 -89 -94 -86 -71 -66 -76 -92 -100 -99 -100 -97 -88 -73 -65 -64 -57 -32 -2 7 -14 -42 -42 -9 27 31 1 -38 -63 -69 -61 -37 -9 18 37 60 92 127 145 144 138 138 143 147 153 157 147 124 103 96 102 110 116 118 112 97 88 100 112 97 56 24 33 59 62 32 -2 -17 -21 -33 -50 -57 -55 -57 -80 -118 -143 -130 -84 -48 -53 -88 -108 -96 -75 -72 -84 -90 -90 -91 -84 -54 -7 23 24 25 56 98 119 110 97 99 112 134 164 191 189 155 117 105 107 97 73 59 59 43 0 -47 -67 -69 -75 -93 -102 -97 -98 -118 -148 -164 -160 -155 -158 -158 -149 -145 -158 -188 -212 -209 -184 -149 -119 -107 -103 -93 -68 -36 -8 15 40 73 105 125 128 117 98 78 76 93 116 121 112 108 115 118 101 70 48 52 69 90 100 97 87 75 74 79 83 82 80 78 66 33 -8 -40 -54 -62 -71 -82 -85 -77 -71 -74 -81 -75 -57 -47 -69 -110 -132 -111 -73 -61 -79 -94 -83 -68 -74 -88 -85 -65 -57 -69 -73 -52 -15 12 24 30 32 30 27 31 36 33 21 20 36 65 87 90 73 53 50 71 91 94 84 86 101 106 87 72 88 123 133 105 66 +1 +49 33 3 -19 -25 -29 -41 -56 -57 -51 -58 -77 -93 -99 -108 -128 -133 -99 -47 -17 -24 -33 -9 28 45 43 54 87 110 95 61 51 74 104 113 102 86 73 65 62 53 25 -14 -36 -25 5 22 25 24 26 18 0 -14 -14 -13 -32 -60 -78 -78 -74 -82 -98 -116 -129 -137 -134 -131 -138 -154 -161 -144 -113 -85 -78 -80 -78 -65 -48 -41 -47 -65 -87 -104 -108 -97 -73 -45 -30 -33 -41 -37 -19 2 17 33 56 78 84 75 64 70 88 98 89 74 65 70 78 80 72 65 59 54 46 38 35 40 44 43 40 45 49 46 30 7 -9 -16 -18 -18 -24 -38 -57 -75 -86 -88 -84 -76 -65 -57 -55 -52 -34 -4 28 50 62 58 38 8 -7 2 20 19 -1 -11 9 46 58 30 -8 +-92 -100 -99 -100 -97 -88 -73 -65 -64 -57 -32 -2 7 -14 -42 -42 -9 27 31 1 -38 -63 -69 -61 -37 -9 18 37 60 92 127 145 144 138 138 143 147 153 157 147 124 103 96 102 110 116 118 112 97 88 100 112 97 56 24 33 59 62 32 -2 -17 -21 -33 -50 -57 -55 -57 -80 -118 -143 -130 -84 -48 -53 -88 -108 -96 -75 -72 -84 -90 -90 -91 -84 -54 -7 23 24 25 56 98 119 110 97 99 112 134 164 191 189 155 117 105 107 97 73 59 59 43 0 -47 -67 -69 -75 -93 -102 -97 -98 -118 -148 -164 -160 -155 -158 -158 -149 -145 -158 -188 -212 -209 -184 -149 -119 -107 -103 -93 -68 -36 -8 15 40 73 105 125 128 117 98 78 76 93 116 121 112 108 115 118 101 70 48 52 69 90 100 97 87 75 74 79 83 82 80 78 66 33 -8 -40 -54 -62 -71 -82 -85 -77 -71 -74 -81 -75 -57 -47 -69 -110 -132 -111 -73 -61 -79 -94 -83 -68 -74 -88 -85 -65 -57 -69 -73 -52 -15 12 24 30 32 30 27 31 36 33 21 20 36 65 87 90 73 53 50 71 91 94 84 86 101 106 87 72 88 123 133 105 66 43 42 44 43 40 44 65 104 137 141 122 121 149 160 120 53 17 24 26 -6 -53 -82 -100 -128 -169 -204 -232 -265 -299 -317 -318 -313 -305 -285 -263 -264 -284 -286 -255 -222 -221 -236 -218 -165 -124 -116 -104 -53 20 67 80 90 117 150 170 184 212 246 263 258 250 252 259 253 232 200 168 140 125 127 139 144 130 116 112 116 107 82 57 50 49 33 3 -19 -25 -29 -41 -56 -57 -51 -58 -77 -93 -99 -108 -128 -133 -99 -47 -17 -24 -33 -9 28 45 43 54 87 110 95 61 51 74 104 113 102 86 73 65 62 53 25 -14 -36 -25 5 22 25 24 26 18 0 -14 -14 -13 -32 -60 -78 -78 -74 -82 -98 -116 -129 -137 -134 -131 -138 -154 -161 -144 -113 -85 -78 -80 -78 -65 -48 -41 -47 +1 +-21 -6 3 -16 -44 -42 -4 37 35 -9 -55 -67 -49 -31 -30 -39 -38 -24 -10 -6 -4 10 37 63 69 59 57 76 108 124 116 100 100 105 96 63 27 8 8 9 3 -5 -14 -25 -43 -57 -56 -35 -9 8 6 -13 -41 -60 -58 -41 -20 -5 1 -5 -25 -55 -77 -77 -67 -67 -76 -74 -50 -21 -8 -12 -14 -12 -17 -37 -53 -51 -31 -3 23 30 6 -35 -56 -37 1 11 -15 -41 -31 -4 8 -3 -9 4 25 34 31 24 18 13 17 31 52 64 62 52 41 35 27 19 20 31 51 67 68 56 37 20 8 6 10 22 36 44 43 37 27 17 5 -9 -21 -26 -28 -36 -47 -52 -45 -32 -31 -41 -53 -49 -24 6 28 32 23 9 1 4 17 30 36 41 54 78 99 104 +52 69 90 100 97 87 75 74 79 83 82 80 78 66 33 -8 -40 -54 -62 -71 -82 -85 -77 -71 -74 -81 -75 -57 -47 -69 -110 -132 -111 -73 -61 -79 -94 -83 -68 -74 -88 -85 -65 -57 -69 -73 -52 -15 12 24 30 32 30 27 31 36 33 21 20 36 65 87 90 73 53 50 71 91 94 84 86 101 106 87 72 88 123 133 105 66 43 42 44 43 40 44 65 104 137 141 122 121 149 160 120 53 17 24 26 -6 -53 -82 -100 -128 -169 -204 -232 -265 -299 -317 -318 -313 -305 -285 -263 -264 -284 -286 -255 -222 -221 -236 -218 -165 -124 -116 -104 -53 20 67 80 90 117 150 170 184 212 246 263 258 250 252 259 253 232 200 168 140 125 127 139 144 130 116 112 116 107 82 57 50 49 33 3 -19 -25 -29 -41 -56 -57 -51 -58 -77 -93 -99 -108 -128 -133 -99 -47 -17 -24 -33 -9 28 45 43 54 87 110 95 61 51 74 104 113 102 86 73 65 62 53 25 -14 -36 -25 5 22 25 24 26 18 0 -14 -14 -13 -32 -60 -78 -78 -74 -82 -98 -116 -129 -137 -134 -131 -138 -154 -161 -144 -113 -85 -78 -80 -78 -65 -48 -41 -47 -65 -87 -104 -108 -97 -73 -45 -30 -33 -41 -37 -19 2 17 33 56 78 84 75 64 70 88 98 89 74 65 70 78 80 72 65 59 54 46 38 35 40 44 43 40 45 49 46 30 7 -9 -16 -18 -18 -24 -38 -57 -75 -86 -88 -84 -76 -65 -57 -55 -52 -34 -4 28 50 62 58 38 8 -7 2 20 19 -1 -11 9 46 58 30 -8 -21 -6 3 -16 -44 -42 -4 37 35 -9 -55 -67 -49 -31 -30 -39 -38 -24 -10 -6 -4 10 37 63 69 59 57 76 108 124 116 100 100 105 96 63 27 8 8 9 3 -5 -14 -25 -43 -57 -56 -35 -9 8 6 -13 -41 -60 -58 -41 -20 -5 1 -5 -25 -55 -77 -77 -67 -67 -76 -74 -50 -21 -8 -12 -14 -12 -17 -37 -53 -51 -31 -3 +1 +96 92 103 116 115 106 103 121 136 121 78 40 40 66 69 29 -25 -41 -16 -2 -47 -126 -174 -163 -130 -115 -121 -128 -132 -150 -175 -181 -161 -147 -161 -192 -209 -204 -189 -172 -144 -110 -88 -86 -85 -68 -39 -11 21 60 93 100 85 84 114 154 164 139 121 134 163 165 132 96 94 122 144 136 116 106 108 102 81 57 37 12 -22 -52 -65 -65 -65 -70 -75 -84 -100 -114 -103 -64 -26 -19 -35 -40 -21 3 5 -6 -6 10 28 31 23 16 5 -19 -49 -60 -40 1 35 49 54 52 35 5 -16 -8 18 34 31 26 40 53 37 -9 -47 -56 -45 -37 -39 -35 -22 -14 -14 -11 6 28 39 36 21 3 -22 -52 -77 -91 -99 -103 -100 -82 -54 -38 -47 -79 -109 -120 -106 -82 -56 -25 11 40 +49 33 3 -19 -25 -29 -41 -56 -57 -51 -58 -77 -93 -99 -108 -128 -133 -99 -47 -17 -24 -33 -9 28 45 43 54 87 110 95 61 51 74 104 113 102 86 73 65 62 53 25 -14 -36 -25 5 22 25 24 26 18 0 -14 -14 -13 -32 -60 -78 -78 -74 -82 -98 -116 -129 -137 -134 -131 -138 -154 -161 -144 -113 -85 -78 -80 -78 -65 -48 -41 -47 -65 -87 -104 -108 -97 -73 -45 -30 -33 -41 -37 -19 2 17 33 56 78 84 75 64 70 88 98 89 74 65 70 78 80 72 65 59 54 46 38 35 40 44 43 40 45 49 46 30 7 -9 -16 -18 -18 -24 -38 -57 -75 -86 -88 -84 -76 -65 -57 -55 -52 -34 -4 28 50 62 58 38 8 -7 2 20 19 -1 -11 9 46 58 30 -8 -21 -6 3 -16 -44 -42 -4 37 35 -9 -55 -67 -49 -31 -30 -39 -38 -24 -10 -6 -4 10 37 63 69 59 57 76 108 124 116 100 100 105 96 63 27 8 8 9 3 -5 -14 -25 -43 -57 -56 -35 -9 8 6 -13 -41 -60 -58 -41 -20 -5 1 -5 -25 -55 -77 -77 -67 -67 -76 -74 -50 -21 -8 -12 -14 -12 -17 -37 -53 -51 -31 -3 23 30 6 -35 -56 -37 1 11 -15 -41 -31 -4 8 -3 -9 4 25 34 31 24 18 13 17 31 52 64 62 52 41 35 27 19 20 31 51 67 68 56 37 20 8 6 10 22 36 44 43 37 27 17 5 -9 -21 -26 -28 -36 -47 -52 -45 -32 -31 -41 -53 -49 -24 6 28 32 23 9 1 4 17 30 36 41 54 78 99 104 96 92 103 116 115 106 103 121 136 121 78 40 40 66 69 29 -25 -41 -16 -2 -47 -126 -174 -163 -130 -115 -121 -128 -132 -150 -175 -181 -161 -147 -161 -192 -209 -204 -189 -172 -144 -110 -88 -86 -85 -68 -39 -11 21 60 93 100 85 84 114 154 164 139 121 134 163 165 132 96 94 122 144 136 116 106 108 102 81 57 37 12 -22 -52 -65 -65 +1 +39 16 -2 7 32 38 9 -35 -66 -66 -37 2 24 4 -36 -49 -5 58 82 57 30 36 59 74 87 121 164 178 158 131 115 97 59 28 30 56 58 24 -12 -13 12 30 23 4 -8 -9 -15 -30 -42 -41 -32 -28 -30 -28 -19 -14 -35 -73 -90 -69 -34 -23 -43 -54 -32 12 49 63 58 40 19 3 -1 2 8 14 18 21 17 13 10 3 -17 -39 -36 -2 35 48 38 28 32 33 29 23 27 28 14 -10 -28 -30 -19 -9 -4 3 17 32 39 36 25 20 25 31 26 13 1 0 9 17 18 8 -9 -26 -40 -47 -47 -43 -41 -44 -46 -42 -33 -32 -42 -57 -59 -40 -14 5 2 -18 -39 -43 -34 -27 -36 -48 -40 -16 9 16 16 22 33 43 51 56 52 31 11 9 +-21 -6 3 -16 -44 -42 -4 37 35 -9 -55 -67 -49 -31 -30 -39 -38 -24 -10 -6 -4 10 37 63 69 59 57 76 108 124 116 100 100 105 96 63 27 8 8 9 3 -5 -14 -25 -43 -57 -56 -35 -9 8 6 -13 -41 -60 -58 -41 -20 -5 1 -5 -25 -55 -77 -77 -67 -67 -76 -74 -50 -21 -8 -12 -14 -12 -17 -37 -53 -51 -31 -3 23 30 6 -35 -56 -37 1 11 -15 -41 -31 -4 8 -3 -9 4 25 34 31 24 18 13 17 31 52 64 62 52 41 35 27 19 20 31 51 67 68 56 37 20 8 6 10 22 36 44 43 37 27 17 5 -9 -21 -26 -28 -36 -47 -52 -45 -32 -31 -41 -53 -49 -24 6 28 32 23 9 1 4 17 30 36 41 54 78 99 104 96 92 103 116 115 106 103 121 136 121 78 40 40 66 69 29 -25 -41 -16 -2 -47 -126 -174 -163 -130 -115 -121 -128 -132 -150 -175 -181 -161 -147 -161 -192 -209 -204 -189 -172 -144 -110 -88 -86 -85 -68 -39 -11 21 60 93 100 85 84 114 154 164 139 121 134 163 165 132 96 94 122 144 136 116 106 108 102 81 57 37 12 -22 -52 -65 -65 -65 -70 -75 -84 -100 -114 -103 -64 -26 -19 -35 -40 -21 3 5 -6 -6 10 28 31 23 16 5 -19 -49 -60 -40 1 35 49 54 52 35 5 -16 -8 18 34 31 26 40 53 37 -9 -47 -56 -45 -37 -39 -35 -22 -14 -14 -11 6 28 39 36 21 3 -22 -52 -77 -91 -99 -103 -100 -82 -54 -38 -47 -79 -109 -120 -106 -82 -56 -25 11 40 39 16 -2 7 32 38 9 -35 -66 -66 -37 2 24 4 -36 -49 -5 58 82 57 30 36 59 74 87 121 164 178 158 131 115 97 59 28 30 56 58 24 -12 -13 12 30 23 4 -8 -9 -15 -30 -42 -41 -32 -28 -30 -28 -19 -14 -35 -73 -90 -69 -34 -23 -43 -54 -32 12 49 63 58 40 19 3 -1 2 8 14 18 21 17 13 +1 +22 23 -4 -40 -65 -79 -102 -130 -147 -150 -149 -148 -140 -126 -117 -118 -110 -85 -54 -32 -20 -4 22 43 58 74 93 105 97 79 77 105 144 165 154 123 104 115 148 181 201 208 214 224 234 238 227 208 195 196 191 152 77 10 -16 -12 -20 -49 -76 -82 -82 -94 -114 -124 -125 -133 -149 -154 -143 -136 -144 -158 -160 -153 -145 -141 -136 -133 -130 -119 -92 -59 -48 -48 -31 11 51 61 52 54 73 77 53 26 15 17 10 -2 0 15 17 -4 -26 -23 -1 16 10 -11 -30 -39 -38 -35 -36 -45 -58 -64 -50 -21 -5 -17 -52 -80 -80 -57 -28 -4 15 25 16 -9 -27 -19 7 26 33 42 53 52 29 4 -5 -5 -12 -20 -8 18 21 -12 -42 -33 -6 -14 -55 -76 -51 -9 -3 -34 -60 -54 +96 92 103 116 115 106 103 121 136 121 78 40 40 66 69 29 -25 -41 -16 -2 -47 -126 -174 -163 -130 -115 -121 -128 -132 -150 -175 -181 -161 -147 -161 -192 -209 -204 -189 -172 -144 -110 -88 -86 -85 -68 -39 -11 21 60 93 100 85 84 114 154 164 139 121 134 163 165 132 96 94 122 144 136 116 106 108 102 81 57 37 12 -22 -52 -65 -65 -65 -70 -75 -84 -100 -114 -103 -64 -26 -19 -35 -40 -21 3 5 -6 -6 10 28 31 23 16 5 -19 -49 -60 -40 1 35 49 54 52 35 5 -16 -8 18 34 31 26 40 53 37 -9 -47 -56 -45 -37 -39 -35 -22 -14 -14 -11 6 28 39 36 21 3 -22 -52 -77 -91 -99 -103 -100 -82 -54 -38 -47 -79 -109 -120 -106 -82 -56 -25 11 40 39 16 -2 7 32 38 9 -35 -66 -66 -37 2 24 4 -36 -49 -5 58 82 57 30 36 59 74 87 121 164 178 158 131 115 97 59 28 30 56 58 24 -12 -13 12 30 23 4 -8 -9 -15 -30 -42 -41 -32 -28 -30 -28 -19 -14 -35 -73 -90 -69 -34 -23 -43 -54 -32 12 49 63 58 40 19 3 -1 2 8 14 18 21 17 13 10 3 -17 -39 -36 -2 35 48 38 28 32 33 29 23 27 28 14 -10 -28 -30 -19 -9 -4 3 17 32 39 36 25 20 25 31 26 13 1 0 9 17 18 8 -9 -26 -40 -47 -47 -43 -41 -44 -46 -42 -33 -32 -42 -57 -59 -40 -14 5 2 -18 -39 -43 -34 -27 -36 -48 -40 -16 9 16 16 22 33 43 51 56 52 31 11 9 22 23 -4 -40 -65 -79 -102 -130 -147 -150 -149 -148 -140 -126 -117 -118 -110 -85 -54 -32 -20 -4 22 43 58 74 93 105 97 79 77 105 144 165 154 123 104 115 148 181 201 208 214 224 234 238 227 208 195 196 191 152 77 10 -16 -12 -20 -49 -76 -82 -82 -94 -114 -124 -125 -133 -149 -154 -143 -136 -144 -158 -160 -153 -145 -141 -136 -133 -130 -119 +1 +-29 -11 -16 -34 -35 3 60 78 32 -28 -23 47 99 80 34 40 102 150 138 91 56 49 54 62 69 71 65 62 60 48 30 36 89 152 166 125 80 77 100 100 59 6 -20 -10 12 23 20 23 44 63 49 -3 -56 -78 -78 -83 -84 -63 -28 -21 -48 -66 -44 -10 -9 -49 -86 -96 -97 -108 -119 -117 -104 -93 -79 -42 5 25 -8 -67 -97 -78 -39 -15 -12 -2 20 28 1 -33 -38 -10 12 6 -12 -19 -21 -36 -54 -51 -31 -23 -35 -44 -31 -5 5 -7 -22 -32 -44 -60 -71 -65 -49 -35 -30 -32 -33 -24 2 43 80 92 82 64 55 63 79 96 105 103 101 109 126 132 109 66 32 29 46 56 49 37 25 7 -18 -36 -26 3 18 -4 -52 -90 -97 -75 -48 -36 -40 -52 +39 16 -2 7 32 38 9 -35 -66 -66 -37 2 24 4 -36 -49 -5 58 82 57 30 36 59 74 87 121 164 178 158 131 115 97 59 28 30 56 58 24 -12 -13 12 30 23 4 -8 -9 -15 -30 -42 -41 -32 -28 -30 -28 -19 -14 -35 -73 -90 -69 -34 -23 -43 -54 -32 12 49 63 58 40 19 3 -1 2 8 14 18 21 17 13 10 3 -17 -39 -36 -2 35 48 38 28 32 33 29 23 27 28 14 -10 -28 -30 -19 -9 -4 3 17 32 39 36 25 20 25 31 26 13 1 0 9 17 18 8 -9 -26 -40 -47 -47 -43 -41 -44 -46 -42 -33 -32 -42 -57 -59 -40 -14 5 2 -18 -39 -43 -34 -27 -36 -48 -40 -16 9 16 16 22 33 43 51 56 52 31 11 9 22 23 -4 -40 -65 -79 -102 -130 -147 -150 -149 -148 -140 -126 -117 -118 -110 -85 -54 -32 -20 -4 22 43 58 74 93 105 97 79 77 105 144 165 154 123 104 115 148 181 201 208 214 224 234 238 227 208 195 196 191 152 77 10 -16 -12 -20 -49 -76 -82 -82 -94 -114 -124 -125 -133 -149 -154 -143 -136 -144 -158 -160 -153 -145 -141 -136 -133 -130 -119 -92 -59 -48 -48 -31 11 51 61 52 54 73 77 53 26 15 17 10 -2 0 15 17 -4 -26 -23 -1 16 10 -11 -30 -39 -38 -35 -36 -45 -58 -64 -50 -21 -5 -17 -52 -80 -80 -57 -28 -4 15 25 16 -9 -27 -19 7 26 33 42 53 52 29 4 -5 -5 -12 -20 -8 18 21 -12 -42 -33 -6 -14 -55 -76 -51 -9 -3 -34 -60 -54 -29 -11 -16 -34 -35 3 60 78 32 -28 -23 47 99 80 34 40 102 150 138 91 56 49 54 62 69 71 65 62 60 48 30 36 89 152 166 125 80 77 100 100 59 6 -20 -10 12 23 20 23 44 63 49 -3 -56 -78 -78 -83 -84 -63 -28 -21 -48 -66 -44 -10 -9 -49 -86 -96 -97 -108 -119 -117 -104 -93 -79 -42 5 25 -8 -67 +1 +-53 -40 -20 -6 -8 -17 -16 3 21 18 -12 -30 -12 30 49 24 -14 -17 17 45 41 23 26 44 45 21 -1 9 40 48 14 -35 -49 -2 67 106 93 66 62 74 57 2 -55 -72 -58 -53 -78 -108 -114 -89 -46 -5 14 6 -20 -35 -32 -22 -17 -6 33 96 145 164 159 145 117 63 3 -25 -16 -9 -39 -84 -93 -39 31 58 28 -22 -55 -75 -105 -143 -170 -178 -181 -187 -174 -127 -77 -71 -110 -131 -89 -19 9 -13 -25 14 69 82 54 43 79 126 145 137 136 142 128 81 35 25 50 81 96 98 93 85 72 56 47 45 42 28 4 -6 2 12 -1 -29 -49 -42 -29 -36 -70 -106 -119 -107 -88 -81 -89 -94 -74 -32 3 -8 -48 -81 -82 -74 -75 -75 -49 -6 27 32 34 55 +22 23 -4 -40 -65 -79 -102 -130 -147 -150 -149 -148 -140 -126 -117 -118 -110 -85 -54 -32 -20 -4 22 43 58 74 93 105 97 79 77 105 144 165 154 123 104 115 148 181 201 208 214 224 234 238 227 208 195 196 191 152 77 10 -16 -12 -20 -49 -76 -82 -82 -94 -114 -124 -125 -133 -149 -154 -143 -136 -144 -158 -160 -153 -145 -141 -136 -133 -130 -119 -92 -59 -48 -48 -31 11 51 61 52 54 73 77 53 26 15 17 10 -2 0 15 17 -4 -26 -23 -1 16 10 -11 -30 -39 -38 -35 -36 -45 -58 -64 -50 -21 -5 -17 -52 -80 -80 -57 -28 -4 15 25 16 -9 -27 -19 7 26 33 42 53 52 29 4 -5 -5 -12 -20 -8 18 21 -12 -42 -33 -6 -14 -55 -76 -51 -9 -3 -34 -60 -54 -29 -11 -16 -34 -35 3 60 78 32 -28 -23 47 99 80 34 40 102 150 138 91 56 49 54 62 69 71 65 62 60 48 30 36 89 152 166 125 80 77 100 100 59 6 -20 -10 12 23 20 23 44 63 49 -3 -56 -78 -78 -83 -84 -63 -28 -21 -48 -66 -44 -10 -9 -49 -86 -96 -97 -108 -119 -117 -104 -93 -79 -42 5 25 -8 -67 -97 -78 -39 -15 -12 -2 20 28 1 -33 -38 -10 12 6 -12 -19 -21 -36 -54 -51 -31 -23 -35 -44 -31 -5 5 -7 -22 -32 -44 -60 -71 -65 -49 -35 -30 -32 -33 -24 2 43 80 92 82 64 55 63 79 96 105 103 101 109 126 132 109 66 32 29 46 56 49 37 25 7 -18 -36 -26 3 18 -4 -52 -90 -97 -75 -48 -36 -40 -52 -53 -40 -20 -6 -8 -17 -16 3 21 18 -12 -30 -12 30 49 24 -14 -17 17 45 41 23 26 44 45 21 -1 9 40 48 14 -35 -49 -2 67 106 93 66 62 74 57 2 -55 -72 -58 -53 -78 -108 -114 -89 -46 -5 14 6 -20 -35 -32 -22 -17 -6 33 96 145 164 159 145 117 63 3 -25 -16 -9 -39 -84 -93 -39 31 58 28 -22 +1 +88 109 109 99 90 83 77 75 75 79 81 81 86 93 93 76 39 -1 -28 -31 -18 -3 2 0 -3 -10 -26 -43 -41 -23 -20 -60 -129 -171 -153 -93 -51 -61 -107 -141 -136 -102 -69 -56 -62 -73 -80 -80 -71 -48 -17 4 1 -22 -37 -35 -26 -28 -32 -13 24 53 51 31 27 46 68 65 36 14 19 42 46 23 5 23 66 90 80 63 70 79 56 5 -15 24 96 139 131 93 59 47 48 39 10 -22 -34 -27 -29 -55 -72 -55 -35 -57 -116 -137 -84 -20 -17 -67 -91 -54 0 11 -14 -22 10 51 68 67 71 85 91 80 59 46 44 49 57 70 89 111 128 139 147 151 145 131 119 105 78 42 27 47 84 94 69 39 25 11 -23 -56 -52 -19 -9 -50 -104 -118 -91 -73 +-29 -11 -16 -34 -35 3 60 78 32 -28 -23 47 99 80 34 40 102 150 138 91 56 49 54 62 69 71 65 62 60 48 30 36 89 152 166 125 80 77 100 100 59 6 -20 -10 12 23 20 23 44 63 49 -3 -56 -78 -78 -83 -84 -63 -28 -21 -48 -66 -44 -10 -9 -49 -86 -96 -97 -108 -119 -117 -104 -93 -79 -42 5 25 -8 -67 -97 -78 -39 -15 -12 -2 20 28 1 -33 -38 -10 12 6 -12 -19 -21 -36 -54 -51 -31 -23 -35 -44 -31 -5 5 -7 -22 -32 -44 -60 -71 -65 -49 -35 -30 -32 -33 -24 2 43 80 92 82 64 55 63 79 96 105 103 101 109 126 132 109 66 32 29 46 56 49 37 25 7 -18 -36 -26 3 18 -4 -52 -90 -97 -75 -48 -36 -40 -52 -53 -40 -20 -6 -8 -17 -16 3 21 18 -12 -30 -12 30 49 24 -14 -17 17 45 41 23 26 44 45 21 -1 9 40 48 14 -35 -49 -2 67 106 93 66 62 74 57 2 -55 -72 -58 -53 -78 -108 -114 -89 -46 -5 14 6 -20 -35 -32 -22 -17 -6 33 96 145 164 159 145 117 63 3 -25 -16 -9 -39 -84 -93 -39 31 58 28 -22 -55 -75 -105 -143 -170 -178 -181 -187 -174 -127 -77 -71 -110 -131 -89 -19 9 -13 -25 14 69 82 54 43 79 126 145 137 136 142 128 81 35 25 50 81 96 98 93 85 72 56 47 45 42 28 4 -6 2 12 -1 -29 -49 -42 -29 -36 -70 -106 -119 -107 -88 -81 -89 -94 -74 -32 3 -8 -48 -81 -82 -74 -75 -75 -49 -6 27 32 34 55 88 109 109 99 90 83 77 75 75 79 81 81 86 93 93 76 39 -1 -28 -31 -18 -3 2 0 -3 -10 -26 -43 -41 -23 -20 -60 -129 -171 -153 -93 -51 -61 -107 -141 -136 -102 -69 -56 -62 -73 -80 -80 -71 -48 -17 4 1 -22 -37 -35 -26 -28 -32 -13 24 53 51 31 27 46 68 65 36 14 19 42 46 23 5 23 66 90 80 63 +1 +-100 -151 -181 -177 -157 -151 -162 -176 -183 -178 -167 -158 -149 -131 -108 -89 -82 -78 -58 -18 18 23 -8 -46 -61 -44 -15 0 -2 -7 4 26 45 46 44 51 62 58 36 20 33 64 76 51 18 8 18 25 16 10 26 52 63 51 33 18 5 -6 -9 -2 0 -15 -36 -33 1 39 51 40 29 36 53 68 80 94 103 104 98 93 92 85 66 44 32 34 43 53 55 47 39 46 70 85 61 4 -39 -37 -9 0 -18 -34 -25 -14 -33 -75 -103 -104 -87 -64 -40 -24 -35 -75 -110 -110 -92 -93 -124 -147 -133 -99 -83 -92 -106 -100 -85 -77 -72 -59 -37 -19 -11 -10 -6 2 16 39 74 105 118 104 80 64 67 78 84 73 44 6 -21 -21 -4 3 -19 -53 -72 -67 -60 -71 -90 -93 +-53 -40 -20 -6 -8 -17 -16 3 21 18 -12 -30 -12 30 49 24 -14 -17 17 45 41 23 26 44 45 21 -1 9 40 48 14 -35 -49 -2 67 106 93 66 62 74 57 2 -55 -72 -58 -53 -78 -108 -114 -89 -46 -5 14 6 -20 -35 -32 -22 -17 -6 33 96 145 164 159 145 117 63 3 -25 -16 -9 -39 -84 -93 -39 31 58 28 -22 -55 -75 -105 -143 -170 -178 -181 -187 -174 -127 -77 -71 -110 -131 -89 -19 9 -13 -25 14 69 82 54 43 79 126 145 137 136 142 128 81 35 25 50 81 96 98 93 85 72 56 47 45 42 28 4 -6 2 12 -1 -29 -49 -42 -29 -36 -70 -106 -119 -107 -88 -81 -89 -94 -74 -32 3 -8 -48 -81 -82 -74 -75 -75 -49 -6 27 32 34 55 88 109 109 99 90 83 77 75 75 79 81 81 86 93 93 76 39 -1 -28 -31 -18 -3 2 0 -3 -10 -26 -43 -41 -23 -20 -60 -129 -171 -153 -93 -51 -61 -107 -141 -136 -102 -69 -56 -62 -73 -80 -80 -71 -48 -17 4 1 -22 -37 -35 -26 -28 -32 -13 24 53 51 31 27 46 68 65 36 14 19 42 46 23 5 23 66 90 80 63 70 79 56 5 -15 24 96 139 131 93 59 47 48 39 10 -22 -34 -27 -29 -55 -72 -55 -35 -57 -116 -137 -84 -20 -17 -67 -91 -54 0 11 -14 -22 10 51 68 67 71 85 91 80 59 46 44 49 57 70 89 111 128 139 147 151 145 131 119 105 78 42 27 47 84 94 69 39 25 11 -23 -56 -52 -19 -9 -50 -104 -118 -91 -73 -100 -151 -181 -177 -157 -151 -162 -176 -183 -178 -167 -158 -149 -131 -108 -89 -82 -78 -58 -18 18 23 -8 -46 -61 -44 -15 0 -2 -7 4 26 45 46 44 51 62 58 36 20 33 64 76 51 18 8 18 25 16 10 26 52 63 51 33 18 5 -6 -9 -2 0 -15 -36 -33 1 39 51 40 29 36 53 68 80 94 103 104 98 93 92 85 +1 +-80 -73 -88 -104 -91 -52 -22 -26 -51 -64 -50 -26 -12 -3 16 48 79 102 128 155 166 149 126 128 164 196 197 176 158 154 157 160 159 153 134 109 93 88 77 55 33 26 19 -4 -35 -50 -51 -63 -89 -103 -89 -83 -119 -178 -202 -171 -129 -126 -147 -139 -92 -52 -54 -69 -39 37 102 103 62 36 56 93 107 102 102 112 115 111 119 142 144 107 59 45 60 59 29 0 4 17 6 -21 -28 -19 -34 -83 -124 -113 -71 -57 -91 -132 -136 -106 -89 -99 -117 -116 -104 -106 -129 -146 -142 -124 -113 -110 -106 -99 -95 -87 -67 -39 -36 -77 -129 -139 -104 -59 -40 -32 -9 22 36 39 58 93 104 65 12 8 51 81 57 10 4 51 96 97 69 66 92 113 107 99 128 181 204 175 126 105 +88 109 109 99 90 83 77 75 75 79 81 81 86 93 93 76 39 -1 -28 -31 -18 -3 2 0 -3 -10 -26 -43 -41 -23 -20 -60 -129 -171 -153 -93 -51 -61 -107 -141 -136 -102 -69 -56 -62 -73 -80 -80 -71 -48 -17 4 1 -22 -37 -35 -26 -28 -32 -13 24 53 51 31 27 46 68 65 36 14 19 42 46 23 5 23 66 90 80 63 70 79 56 5 -15 24 96 139 131 93 59 47 48 39 10 -22 -34 -27 -29 -55 -72 -55 -35 -57 -116 -137 -84 -20 -17 -67 -91 -54 0 11 -14 -22 10 51 68 67 71 85 91 80 59 46 44 49 57 70 89 111 128 139 147 151 145 131 119 105 78 42 27 47 84 94 69 39 25 11 -23 -56 -52 -19 -9 -50 -104 -118 -91 -73 -100 -151 -181 -177 -157 -151 -162 -176 -183 -178 -167 -158 -149 -131 -108 -89 -82 -78 -58 -18 18 23 -8 -46 -61 -44 -15 0 -2 -7 4 26 45 46 44 51 62 58 36 20 33 64 76 51 18 8 18 25 16 10 26 52 63 51 33 18 5 -6 -9 -2 0 -15 -36 -33 1 39 51 40 29 36 53 68 80 94 103 104 98 93 92 85 66 44 32 34 43 53 55 47 39 46 70 85 61 4 -39 -37 -9 0 -18 -34 -25 -14 -33 -75 -103 -104 -87 -64 -40 -24 -35 -75 -110 -110 -92 -93 -124 -147 -133 -99 -83 -92 -106 -100 -85 -77 -72 -59 -37 -19 -11 -10 -6 2 16 39 74 105 118 104 80 64 67 78 84 73 44 6 -21 -21 -4 3 -19 -53 -72 -67 -60 -71 -90 -93 -80 -73 -88 -104 -91 -52 -22 -26 -51 -64 -50 -26 -12 -3 16 48 79 102 128 155 166 149 126 128 164 196 197 176 158 154 157 160 159 153 134 109 93 88 77 55 33 26 19 -4 -35 -50 -51 -63 -89 -103 -89 -83 -119 -178 -202 -171 -129 -126 -147 -139 -92 -52 -54 -69 -39 37 102 103 62 36 56 93 107 102 102 112 115 111 119 142 +1 +117 122 91 45 14 8 9 -4 -30 -65 -106 -141 -153 -130 -90 -54 -36 -30 -36 -50 -67 -73 -65 -56 -51 -41 -14 20 38 42 45 54 54 39 24 19 14 -3 -14 14 87 159 181 154 127 127 143 146 119 87 74 82 85 68 42 26 13 -18 -63 -95 -102 -105 -130 -173 -211 -230 -241 -245 -240 -228 -221 -225 -225 -214 -202 -200 -203 -193 -173 -149 -115 -59 9 57 70 68 81 111 140 156 160 162 169 177 179 161 130 112 124 147 149 128 113 115 116 104 96 110 124 102 49 14 25 53 51 11 -33 -42 -28 -20 -32 -49 -49 -28 -9 -10 -23 -34 -37 -41 -55 -73 -86 -94 -102 -103 -80 -38 -1 3 -24 -49 -48 -26 -10 -21 -50 -69 -54 -17 15 17 -5 -31 -45 -43 -32 -23 -19 -21 +-100 -151 -181 -177 -157 -151 -162 -176 -183 -178 -167 -158 -149 -131 -108 -89 -82 -78 -58 -18 18 23 -8 -46 -61 -44 -15 0 -2 -7 4 26 45 46 44 51 62 58 36 20 33 64 76 51 18 8 18 25 16 10 26 52 63 51 33 18 5 -6 -9 -2 0 -15 -36 -33 1 39 51 40 29 36 53 68 80 94 103 104 98 93 92 85 66 44 32 34 43 53 55 47 39 46 70 85 61 4 -39 -37 -9 0 -18 -34 -25 -14 -33 -75 -103 -104 -87 -64 -40 -24 -35 -75 -110 -110 -92 -93 -124 -147 -133 -99 -83 -92 -106 -100 -85 -77 -72 -59 -37 -19 -11 -10 -6 2 16 39 74 105 118 104 80 64 67 78 84 73 44 6 -21 -21 -4 3 -19 -53 -72 -67 -60 -71 -90 -93 -80 -73 -88 -104 -91 -52 -22 -26 -51 -64 -50 -26 -12 -3 16 48 79 102 128 155 166 149 126 128 164 196 197 176 158 154 157 160 159 153 134 109 93 88 77 55 33 26 19 -4 -35 -50 -51 -63 -89 -103 -89 -83 -119 -178 -202 -171 -129 -126 -147 -139 -92 -52 -54 -69 -39 37 102 103 62 36 56 93 107 102 102 112 115 111 119 142 144 107 59 45 60 59 29 0 4 17 6 -21 -28 -19 -34 -83 -124 -113 -71 -57 -91 -132 -136 -106 -89 -99 -117 -116 -104 -106 -129 -146 -142 -124 -113 -110 -106 -99 -95 -87 -67 -39 -36 -77 -129 -139 -104 -59 -40 -32 -9 22 36 39 58 93 104 65 12 8 51 81 57 10 4 51 96 97 69 66 92 113 107 99 128 181 204 175 126 105 117 122 91 45 14 8 9 -4 -30 -65 -106 -141 -153 -130 -90 -54 -36 -30 -36 -50 -67 -73 -65 -56 -51 -41 -14 20 38 42 45 54 54 39 24 19 14 -3 -14 14 87 159 181 154 127 127 143 146 119 87 74 82 85 68 42 26 13 -18 -63 -95 -102 -105 -130 -173 -211 -230 -241 -245 -240 -228 -221 -225 -225 -214 -202 -200 -203 -193 -173 -149 +1 +-27 -24 -14 3 22 35 40 40 42 50 61 69 72 76 84 97 107 108 94 73 62 64 66 45 6 -23 -30 -23 -19 -28 -41 -56 -60 -40 6 49 61 43 25 26 32 22 10 17 39 46 27 11 14 17 -9 -58 -86 -75 -48 -41 -55 -65 -60 -62 -90 -127 -143 -127 -105 -95 -88 -63 -23 13 30 40 55 68 59 30 6 4 15 19 11 2 1 2 -3 -16 -30 -41 -45 -36 -15 17 47 64 65 56 47 43 43 47 53 56 56 51 50 55 65 69 64 63 68 70 54 23 -4 -7 15 37 39 13 -23 -44 -37 -23 -16 -15 -8 11 26 31 30 34 39 42 45 54 56 37 3 -17 -20 -25 -49 -84 -100 -96 -90 -98 -117 -143 -169 -181 -170 -146 -136 -150 -160 -136 -91 -61 +-80 -73 -88 -104 -91 -52 -22 -26 -51 -64 -50 -26 -12 -3 16 48 79 102 128 155 166 149 126 128 164 196 197 176 158 154 157 160 159 153 134 109 93 88 77 55 33 26 19 -4 -35 -50 -51 -63 -89 -103 -89 -83 -119 -178 -202 -171 -129 -126 -147 -139 -92 -52 -54 -69 -39 37 102 103 62 36 56 93 107 102 102 112 115 111 119 142 144 107 59 45 60 59 29 0 4 17 6 -21 -28 -19 -34 -83 -124 -113 -71 -57 -91 -132 -136 -106 -89 -99 -117 -116 -104 -106 -129 -146 -142 -124 -113 -110 -106 -99 -95 -87 -67 -39 -36 -77 -129 -139 -104 -59 -40 -32 -9 22 36 39 58 93 104 65 12 8 51 81 57 10 4 51 96 97 69 66 92 113 107 99 128 181 204 175 126 105 117 122 91 45 14 8 9 -4 -30 -65 -106 -141 -153 -130 -90 -54 -36 -30 -36 -50 -67 -73 -65 -56 -51 -41 -14 20 38 42 45 54 54 39 24 19 14 -3 -14 14 87 159 181 154 127 127 143 146 119 87 74 82 85 68 42 26 13 -18 -63 -95 -102 -105 -130 -173 -211 -230 -241 -245 -240 -228 -221 -225 -225 -214 -202 -200 -203 -193 -173 -149 -115 -59 9 57 70 68 81 111 140 156 160 162 169 177 179 161 130 112 124 147 149 128 113 115 116 104 96 110 124 102 49 14 25 53 51 11 -33 -42 -28 -20 -32 -49 -49 -28 -9 -10 -23 -34 -37 -41 -55 -73 -86 -94 -102 -103 -80 -38 -1 3 -24 -49 -48 -26 -10 -21 -50 -69 -54 -17 15 17 -5 -31 -45 -43 -32 -23 -19 -21 -27 -24 -14 3 22 35 40 40 42 50 61 69 72 76 84 97 107 108 94 73 62 64 66 45 6 -23 -30 -23 -19 -28 -41 -56 -60 -40 6 49 61 43 25 26 32 22 10 17 39 46 27 11 14 17 -9 -58 -86 -75 -48 -41 -55 -65 -60 -62 -90 -127 -143 -127 -105 -95 -88 -63 -23 13 30 40 55 68 59 30 6 4 15 19 +1 +-59 -71 -73 -67 -57 -34 7 51 75 78 72 73 80 88 103 123 137 130 104 79 72 78 82 69 46 25 19 21 19 10 6 5 -5 -31 -58 -61 -41 -21 -20 -30 -34 -30 -35 -53 -79 -103 -114 -108 -90 -81 -100 -135 -141 -95 -29 -2 -25 -53 -41 -1 22 12 2 26 71 101 95 74 66 76 82 71 57 62 90 122 141 152 159 159 147 126 108 91 71 61 71 96 104 74 32 19 38 56 53 44 52 67 62 38 15 9 6 -11 -38 -51 -41 -18 -7 -28 -66 -96 -102 -90 -84 -89 -95 -96 -90 -83 -70 -48 -29 -27 -36 -33 -11 19 47 65 67 38 -8 -43 -45 -35 -47 -82 -108 -100 -71 -61 -81 -108 -115 -92 -56 -31 -29 -46 -60 -53 -33 -26 -38 -51 -45 -27 -20 -29 +117 122 91 45 14 8 9 -4 -30 -65 -106 -141 -153 -130 -90 -54 -36 -30 -36 -50 -67 -73 -65 -56 -51 -41 -14 20 38 42 45 54 54 39 24 19 14 -3 -14 14 87 159 181 154 127 127 143 146 119 87 74 82 85 68 42 26 13 -18 -63 -95 -102 -105 -130 -173 -211 -230 -241 -245 -240 -228 -221 -225 -225 -214 -202 -200 -203 -193 -173 -149 -115 -59 9 57 70 68 81 111 140 156 160 162 169 177 179 161 130 112 124 147 149 128 113 115 116 104 96 110 124 102 49 14 25 53 51 11 -33 -42 -28 -20 -32 -49 -49 -28 -9 -10 -23 -34 -37 -41 -55 -73 -86 -94 -102 -103 -80 -38 -1 3 -24 -49 -48 -26 -10 -21 -50 -69 -54 -17 15 17 -5 -31 -45 -43 -32 -23 -19 -21 -27 -24 -14 3 22 35 40 40 42 50 61 69 72 76 84 97 107 108 94 73 62 64 66 45 6 -23 -30 -23 -19 -28 -41 -56 -60 -40 6 49 61 43 25 26 32 22 10 17 39 46 27 11 14 17 -9 -58 -86 -75 -48 -41 -55 -65 -60 -62 -90 -127 -143 -127 -105 -95 -88 -63 -23 13 30 40 55 68 59 30 6 4 15 19 11 2 1 2 -3 -16 -30 -41 -45 -36 -15 17 47 64 65 56 47 43 43 47 53 56 56 51 50 55 65 69 64 63 68 70 54 23 -4 -7 15 37 39 13 -23 -44 -37 -23 -16 -15 -8 11 26 31 30 34 39 42 45 54 56 37 3 -17 -20 -25 -49 -84 -100 -96 -90 -98 -117 -143 -169 -181 -170 -146 -136 -150 -160 -136 -91 -61 -59 -71 -73 -67 -57 -34 7 51 75 78 72 73 80 88 103 123 137 130 104 79 72 78 82 69 46 25 19 21 19 10 6 5 -5 -31 -58 -61 -41 -21 -20 -30 -34 -30 -35 -53 -79 -103 -114 -108 -90 -81 -100 -135 -141 -95 -29 -2 -25 -53 -41 -1 22 12 2 26 71 101 95 74 66 76 82 71 57 62 90 122 141 152 159 159 +1 +-42 -51 -68 -101 -129 -130 -104 -75 -58 -47 -30 -12 -3 1 12 38 65 84 96 114 136 156 159 152 155 171 189 189 175 164 170 186 201 214 229 238 228 200 174 162 151 127 88 52 17 -22 -63 -89 -95 -100 -114 -133 -133 -109 -79 -64 -65 -77 -89 -102 -111 -103 -81 -55 -51 -75 -109 -127 -120 -99 -79 -62 -51 -56 -76 -91 -82 -57 -38 -24 6 57 102 112 99 96 111 116 94 62 55 71 77 47 -1 -37 -40 -32 -31 -48 -68 -79 -87 -106 -131 -138 -115 -85 -79 -95 -108 -101 -91 -100 -125 -143 -144 -133 -117 -94 -65 -49 -49 -52 -37 -13 -1 -8 -14 2 39 69 72 53 33 32 48 61 62 57 58 65 68 63 62 67 73 70 58 49 42 42 46 52 48 25 -7 -31 -38 -30 +-27 -24 -14 3 22 35 40 40 42 50 61 69 72 76 84 97 107 108 94 73 62 64 66 45 6 -23 -30 -23 -19 -28 -41 -56 -60 -40 6 49 61 43 25 26 32 22 10 17 39 46 27 11 14 17 -9 -58 -86 -75 -48 -41 -55 -65 -60 -62 -90 -127 -143 -127 -105 -95 -88 -63 -23 13 30 40 55 68 59 30 6 4 15 19 11 2 1 2 -3 -16 -30 -41 -45 -36 -15 17 47 64 65 56 47 43 43 47 53 56 56 51 50 55 65 69 64 63 68 70 54 23 -4 -7 15 37 39 13 -23 -44 -37 -23 -16 -15 -8 11 26 31 30 34 39 42 45 54 56 37 3 -17 -20 -25 -49 -84 -100 -96 -90 -98 -117 -143 -169 -181 -170 -146 -136 -150 -160 -136 -91 -61 -59 -71 -73 -67 -57 -34 7 51 75 78 72 73 80 88 103 123 137 130 104 79 72 78 82 69 46 25 19 21 19 10 6 5 -5 -31 -58 -61 -41 -21 -20 -30 -34 -30 -35 -53 -79 -103 -114 -108 -90 -81 -100 -135 -141 -95 -29 -2 -25 -53 -41 -1 22 12 2 26 71 101 95 74 66 76 82 71 57 62 90 122 141 152 159 159 147 126 108 91 71 61 71 96 104 74 32 19 38 56 53 44 52 67 62 38 15 9 6 -11 -38 -51 -41 -18 -7 -28 -66 -96 -102 -90 -84 -89 -95 -96 -90 -83 -70 -48 -29 -27 -36 -33 -11 19 47 65 67 38 -8 -43 -45 -35 -47 -82 -108 -100 -71 -61 -81 -108 -115 -92 -56 -31 -29 -46 -60 -53 -33 -26 -38 -51 -45 -27 -20 -29 -42 -51 -68 -101 -129 -130 -104 -75 -58 -47 -30 -12 -3 1 12 38 65 84 96 114 136 156 159 152 155 171 189 189 175 164 170 186 201 214 229 238 228 200 174 162 151 127 88 52 17 -22 -63 -89 -95 -100 -114 -133 -133 -109 -79 -64 -65 -77 -89 -102 -111 -103 -81 -55 -51 -75 -109 -127 -120 -99 -79 -62 -51 -56 -76 -91 -82 -57 -38 -24 +1 +-17 -8 -16 -41 -72 -86 -73 -43 -15 2 10 11 12 22 49 85 119 144 165 177 168 143 118 105 99 88 77 77 81 70 43 18 14 25 32 25 14 8 2 -3 -1 10 18 10 -6 -11 -2 7 2 -11 -22 -33 -51 -77 -99 -108 -108 -108 -111 -118 -132 -153 -175 -186 -179 -153 -111 -63 -22 1 15 39 77 108 113 93 73 69 77 81 73 66 76 99 119 122 107 81 52 28 14 5 -8 -32 -56 -64 -54 -50 -65 -93 -116 -135 -159 -175 -162 -123 -84 -75 -91 -100 -83 -49 -16 5 15 15 15 12 -2 -35 -72 -84 -56 -1 53 86 98 98 90 87 95 111 125 131 132 128 120 114 115 126 128 113 88 70 61 53 38 22 12 4 -8 -24 -30 -23 -12 -11 -30 -54 -68 -69 +-59 -71 -73 -67 -57 -34 7 51 75 78 72 73 80 88 103 123 137 130 104 79 72 78 82 69 46 25 19 21 19 10 6 5 -5 -31 -58 -61 -41 -21 -20 -30 -34 -30 -35 -53 -79 -103 -114 -108 -90 -81 -100 -135 -141 -95 -29 -2 -25 -53 -41 -1 22 12 2 26 71 101 95 74 66 76 82 71 57 62 90 122 141 152 159 159 147 126 108 91 71 61 71 96 104 74 32 19 38 56 53 44 52 67 62 38 15 9 6 -11 -38 -51 -41 -18 -7 -28 -66 -96 -102 -90 -84 -89 -95 -96 -90 -83 -70 -48 -29 -27 -36 -33 -11 19 47 65 67 38 -8 -43 -45 -35 -47 -82 -108 -100 -71 -61 -81 -108 -115 -92 -56 -31 -29 -46 -60 -53 -33 -26 -38 -51 -45 -27 -20 -29 -42 -51 -68 -101 -129 -130 -104 -75 -58 -47 -30 -12 -3 1 12 38 65 84 96 114 136 156 159 152 155 171 189 189 175 164 170 186 201 214 229 238 228 200 174 162 151 127 88 52 17 -22 -63 -89 -95 -100 -114 -133 -133 -109 -79 -64 -65 -77 -89 -102 -111 -103 -81 -55 -51 -75 -109 -127 -120 -99 -79 -62 -51 -56 -76 -91 -82 -57 -38 -24 6 57 102 112 99 96 111 116 94 62 55 71 77 47 -1 -37 -40 -32 -31 -48 -68 -79 -87 -106 -131 -138 -115 -85 -79 -95 -108 -101 -91 -100 -125 -143 -144 -133 -117 -94 -65 -49 -49 -52 -37 -13 -1 -8 -14 2 39 69 72 53 33 32 48 61 62 57 58 65 68 63 62 67 73 70 58 49 42 42 46 52 48 25 -7 -31 -38 -30 -17 -8 -16 -41 -72 -86 -73 -43 -15 2 10 11 12 22 49 85 119 144 165 177 168 143 118 105 99 88 77 77 81 70 43 18 14 25 32 25 14 8 2 -3 -1 10 18 10 -6 -11 -2 7 2 -11 -22 -33 -51 -77 -99 -108 -108 -108 -111 -118 -132 -153 -175 -186 -179 -153 -111 -63 -22 1 15 39 77 108 113 93 73 69 77 81 +1 +-65 -68 -76 -77 -68 -53 -47 -47 -43 -26 3 31 40 34 29 35 46 52 54 71 97 107 81 36 12 27 55 61 40 14 0 1 11 21 26 19 4 -9 -12 -9 -10 -23 -38 -50 -56 -60 -66 -68 -68 -74 -95 -123 -135 -122 -103 -106 -138 -179 -202 -195 -171 -148 -145 -165 -184 -176 -143 -115 -109 -109 -92 -64 -50 -49 -26 28 82 102 95 106 139 159 145 115 108 130 155 156 140 125 119 119 120 122 120 109 89 75 71 77 80 75 68 67 69 67 55 37 18 0 -12 -13 -1 8 6 -1 -2 4 9 11 12 21 29 31 30 39 50 57 50 35 18 -6 -30 -44 -36 -14 0 -3 -17 -30 -40 -44 -45 -40 -37 -44 -53 -49 -32 -15 -11 -21 -36 -52 -69 -78 -73 -56 -43 -40 +-42 -51 -68 -101 -129 -130 -104 -75 -58 -47 -30 -12 -3 1 12 38 65 84 96 114 136 156 159 152 155 171 189 189 175 164 170 186 201 214 229 238 228 200 174 162 151 127 88 52 17 -22 -63 -89 -95 -100 -114 -133 -133 -109 -79 -64 -65 -77 -89 -102 -111 -103 -81 -55 -51 -75 -109 -127 -120 -99 -79 -62 -51 -56 -76 -91 -82 -57 -38 -24 6 57 102 112 99 96 111 116 94 62 55 71 77 47 -1 -37 -40 -32 -31 -48 -68 -79 -87 -106 -131 -138 -115 -85 -79 -95 -108 -101 -91 -100 -125 -143 -144 -133 -117 -94 -65 -49 -49 -52 -37 -13 -1 -8 -14 2 39 69 72 53 33 32 48 61 62 57 58 65 68 63 62 67 73 70 58 49 42 42 46 52 48 25 -7 -31 -38 -30 -17 -8 -16 -41 -72 -86 -73 -43 -15 2 10 11 12 22 49 85 119 144 165 177 168 143 118 105 99 88 77 77 81 70 43 18 14 25 32 25 14 8 2 -3 -1 10 18 10 -6 -11 -2 7 2 -11 -22 -33 -51 -77 -99 -108 -108 -108 -111 -118 -132 -153 -175 -186 -179 -153 -111 -63 -22 1 15 39 77 108 113 93 73 69 77 81 73 66 76 99 119 122 107 81 52 28 14 5 -8 -32 -56 -64 -54 -50 -65 -93 -116 -135 -159 -175 -162 -123 -84 -75 -91 -100 -83 -49 -16 5 15 15 15 12 -2 -35 -72 -84 -56 -1 53 86 98 98 90 87 95 111 125 131 132 128 120 114 115 126 128 113 88 70 61 53 38 22 12 4 -8 -24 -30 -23 -12 -11 -30 -54 -68 -69 -65 -68 -76 -77 -68 -53 -47 -47 -43 -26 3 31 40 34 29 35 46 52 54 71 97 107 81 36 12 27 55 61 40 14 0 1 11 21 26 19 4 -9 -12 -9 -10 -23 -38 -50 -56 -60 -66 -68 -68 -74 -95 -123 -135 -122 -103 -106 -138 -179 -202 -195 -171 -148 -145 -165 -184 -176 -143 -115 -109 -109 -92 -64 -50 -49 -26 28 82 102 95 106 +1 +-40 -36 -34 -43 -59 -69 -68 -66 -76 -93 -106 -112 -113 -110 -102 -87 -75 -67 -58 -45 -21 7 25 26 15 16 44 84 105 98 85 89 100 98 77 55 45 45 44 37 31 22 4 -25 -52 -63 -59 -47 -36 -29 -22 -14 -9 -10 -16 -21 -28 -36 -36 -24 -9 -12 -32 -38 -10 32 52 38 21 27 50 65 66 58 57 65 75 77 66 51 51 71 91 88 63 41 40 44 39 20 8 10 23 27 10 -18 -25 0 31 23 -30 -68 -47 17 55 25 -47 -91 -52 50 131 110 3 -78 -50 43 79 21 -37 -13 51 47 -24 -46 58 191 186 13 -146 -88 157 323 196 -128 -301 -118 239 373 122 -252 -359 -110 186 177 -126 -381 -291 48 256 94 -261 -427 -256 18 70 -145 -379 -394 -207 -26 +-17 -8 -16 -41 -72 -86 -73 -43 -15 2 10 11 12 22 49 85 119 144 165 177 168 143 118 105 99 88 77 77 81 70 43 18 14 25 32 25 14 8 2 -3 -1 10 18 10 -6 -11 -2 7 2 -11 -22 -33 -51 -77 -99 -108 -108 -108 -111 -118 -132 -153 -175 -186 -179 -153 -111 -63 -22 1 15 39 77 108 113 93 73 69 77 81 73 66 76 99 119 122 107 81 52 28 14 5 -8 -32 -56 -64 -54 -50 -65 -93 -116 -135 -159 -175 -162 -123 -84 -75 -91 -100 -83 -49 -16 5 15 15 15 12 -2 -35 -72 -84 -56 -1 53 86 98 98 90 87 95 111 125 131 132 128 120 114 115 126 128 113 88 70 61 53 38 22 12 4 -8 -24 -30 -23 -12 -11 -30 -54 -68 -69 -65 -68 -76 -77 -68 -53 -47 -47 -43 -26 3 31 40 34 29 35 46 52 54 71 97 107 81 36 12 27 55 61 40 14 0 1 11 21 26 19 4 -9 -12 -9 -10 -23 -38 -50 -56 -60 -66 -68 -68 -74 -95 -123 -135 -122 -103 -106 -138 -179 -202 -195 -171 -148 -145 -165 -184 -176 -143 -115 -109 -109 -92 -64 -50 -49 -26 28 82 102 95 106 139 159 145 115 108 130 155 156 140 125 119 119 120 122 120 109 89 75 71 77 80 75 68 67 69 67 55 37 18 0 -12 -13 -1 8 6 -1 -2 4 9 11 12 21 29 31 30 39 50 57 50 35 18 -6 -30 -44 -36 -14 0 -3 -17 -30 -40 -44 -45 -40 -37 -44 -53 -49 -32 -15 -11 -21 -36 -52 -69 -78 -73 -56 -43 -40 -40 -36 -34 -43 -59 -69 -68 -66 -76 -93 -106 -112 -113 -110 -102 -87 -75 -67 -58 -45 -21 7 25 26 15 16 44 84 105 98 85 89 100 98 77 55 45 45 44 37 31 22 4 -25 -52 -63 -59 -47 -36 -29 -22 -14 -9 -10 -16 -21 -28 -36 -36 -24 -9 -12 -32 -38 -10 32 52 38 21 27 50 65 66 58 57 65 75 77 66 51 +1 +-21 -172 -317 -299 -115 83 119 -15 -147 -119 49 194 206 132 82 106 165 199 190 154 139 166 209 207 132 30 -11 42 128 149 79 -23 -68 -37 8 5 -32 -45 -18 -3 -46 -120 -166 -171 -159 -141 -102 -68 -93 -180 -234 -172 -21 92 97 43 16 45 93 116 100 58 20 19 68 124 140 114 98 134 179 163 78 7 32 127 182 127 20 -25 26 96 90 8 -65 -62 5 58 27 -76 -162 -155 -78 -38 -98 -175 -147 -14 73 11 -124 -175 -111 -42 -68 -152 -197 -167 -107 -77 -90 -117 -111 -46 44 87 39 -37 -57 -2 53 42 -16 -45 0 92 164 170 116 61 47 54 35 1 14 88 137 83 -14 -11 118 229 177 7 -113 -93 -10 15 -50 -141 -177 -139 -67 -32 -76 -162 -193 -121 -17 -2 +-65 -68 -76 -77 -68 -53 -47 -47 -43 -26 3 31 40 34 29 35 46 52 54 71 97 107 81 36 12 27 55 61 40 14 0 1 11 21 26 19 4 -9 -12 -9 -10 -23 -38 -50 -56 -60 -66 -68 -68 -74 -95 -123 -135 -122 -103 -106 -138 -179 -202 -195 -171 -148 -145 -165 -184 -176 -143 -115 -109 -109 -92 -64 -50 -49 -26 28 82 102 95 106 139 159 145 115 108 130 155 156 140 125 119 119 120 122 120 109 89 75 71 77 80 75 68 67 69 67 55 37 18 0 -12 -13 -1 8 6 -1 -2 4 9 11 12 21 29 31 30 39 50 57 50 35 18 -6 -30 -44 -36 -14 0 -3 -17 -30 -40 -44 -45 -40 -37 -44 -53 -49 -32 -15 -11 -21 -36 -52 -69 -78 -73 -56 -43 -40 -40 -36 -34 -43 -59 -69 -68 -66 -76 -93 -106 -112 -113 -110 -102 -87 -75 -67 -58 -45 -21 7 25 26 15 16 44 84 105 98 85 89 100 98 77 55 45 45 44 37 31 22 4 -25 -52 -63 -59 -47 -36 -29 -22 -14 -9 -10 -16 -21 -28 -36 -36 -24 -9 -12 -32 -38 -10 32 52 38 21 27 50 65 66 58 57 65 75 77 66 51 51 71 91 88 63 41 40 44 39 20 8 10 23 27 10 -18 -25 0 31 23 -30 -68 -47 17 55 25 -47 -91 -52 50 131 110 3 -78 -50 43 79 21 -37 -13 51 47 -24 -46 58 191 186 13 -146 -88 157 323 196 -128 -301 -118 239 373 122 -252 -359 -110 186 177 -126 -381 -291 48 256 94 -261 -427 -256 18 70 -145 -379 -394 -207 -26 -21 -172 -317 -299 -115 83 119 -15 -147 -119 49 194 206 132 82 106 165 199 190 154 139 166 209 207 132 30 -11 42 128 149 79 -23 -68 -37 8 5 -32 -45 -18 -3 -46 -120 -166 -171 -159 -141 -102 -68 -93 -180 -234 -172 -21 92 97 43 16 45 93 116 100 58 20 19 68 124 140 114 98 134 179 163 78 7 32 127 182 127 20 -25 +1 +-99 -196 -170 -39 53 6 -123 -183 -98 50 117 51 -61 -95 -16 109 190 187 124 62 70 178 326 383 286 118 56 174 343 362 186 -13 -38 113 244 200 32 -68 -8 103 89 -78 -247 -262 -136 -19 -19 -111 -194 -222 -230 -255 -268 -226 -134 -58 -63 -156 -257 -270 -161 -16 30 -54 -147 -105 59 177 146 50 43 137 209 180 125 133 182 180 131 120 175 209 152 71 72 142 163 83 -19 -53 -42 -64 -122 -141 -91 -48 -85 -169 -203 -163 -115 -110 -130 -126 -111 -119 -127 -72 51 132 85 -30 -60 58 201 207 74 -40 1 148 244 189 42 -54 -17 116 232 226 89 -87 -175 -119 14 89 18 -145 -254 -202 -32 87 39 -117 -198 -96 89 157 33 -147 -182 -34 144 176 40 -125 -176 -98 13 52 2 -78 +-40 -36 -34 -43 -59 -69 -68 -66 -76 -93 -106 -112 -113 -110 -102 -87 -75 -67 -58 -45 -21 7 25 26 15 16 44 84 105 98 85 89 100 98 77 55 45 45 44 37 31 22 4 -25 -52 -63 -59 -47 -36 -29 -22 -14 -9 -10 -16 -21 -28 -36 -36 -24 -9 -12 -32 -38 -10 32 52 38 21 27 50 65 66 58 57 65 75 77 66 51 51 71 91 88 63 41 40 44 39 20 8 10 23 27 10 -18 -25 0 31 23 -30 -68 -47 17 55 25 -47 -91 -52 50 131 110 3 -78 -50 43 79 21 -37 -13 51 47 -24 -46 58 191 186 13 -146 -88 157 323 196 -128 -301 -118 239 373 122 -252 -359 -110 186 177 -126 -381 -291 48 256 94 -261 -427 -256 18 70 -145 -379 -394 -207 -26 -21 -172 -317 -299 -115 83 119 -15 -147 -119 49 194 206 132 82 106 165 199 190 154 139 166 209 207 132 30 -11 42 128 149 79 -23 -68 -37 8 5 -32 -45 -18 -3 -46 -120 -166 -171 -159 -141 -102 -68 -93 -180 -234 -172 -21 92 97 43 16 45 93 116 100 58 20 19 68 124 140 114 98 134 179 163 78 7 32 127 182 127 20 -25 26 96 90 8 -65 -62 5 58 27 -76 -162 -155 -78 -38 -98 -175 -147 -14 73 11 -124 -175 -111 -42 -68 -152 -197 -167 -107 -77 -90 -117 -111 -46 44 87 39 -37 -57 -2 53 42 -16 -45 0 92 164 170 116 61 47 54 35 1 14 88 137 83 -14 -11 118 229 177 7 -113 -93 -10 15 -50 -141 -177 -139 -67 -32 -76 -162 -193 -121 -17 -2 -99 -196 -170 -39 53 6 -123 -183 -98 50 117 51 -61 -95 -16 109 190 187 124 62 70 178 326 383 286 118 56 174 343 362 186 -13 -38 113 244 200 32 -68 -8 103 89 -78 -247 -262 -136 -19 -19 -111 -194 -222 -230 -255 -268 -226 -134 -58 -63 -156 -257 -270 -161 -16 30 -54 -147 -105 59 177 146 50 43 137 209 180 125 133 182 180 131 120 +1 +-111 -75 -23 -20 -60 -58 35 148 137 -26 -194 -181 14 200 205 50 -98 -130 -71 -3 39 55 32 -44 -133 -150 -55 77 127 54 -54 -84 -13 74 108 97 98 109 66 -55 -160 -117 85 280 269 44 -173 -152 97 323 302 79 -94 -54 112 191 84 -101 -188 -109 42 106 2 -193 -305 -224 -15 136 100 -80 -248 -270 -130 84 229 200 27 -135 -128 50 240 283 170 19 -57 -40 22 71 72 14 -74 -129 -108 -32 23 -3 -81 -123 -78 1 13 -70 -152 -142 -58 -17 -87 -195 -226 -159 -87 -94 -150 -160 -96 -18 20 19 7 -18 -68 -101 -60 54 138 110 1 -73 -42 54 123 117 66 35 68 142 183 142 61 44 133 242 244 120 -3 4 124 217 176 32 -83 -66 56 152 118 -23 -147 -160 -97 -56 +-21 -172 -317 -299 -115 83 119 -15 -147 -119 49 194 206 132 82 106 165 199 190 154 139 166 209 207 132 30 -11 42 128 149 79 -23 -68 -37 8 5 -32 -45 -18 -3 -46 -120 -166 -171 -159 -141 -102 -68 -93 -180 -234 -172 -21 92 97 43 16 45 93 116 100 58 20 19 68 124 140 114 98 134 179 163 78 7 32 127 182 127 20 -25 26 96 90 8 -65 -62 5 58 27 -76 -162 -155 -78 -38 -98 -175 -147 -14 73 11 -124 -175 -111 -42 -68 -152 -197 -167 -107 -77 -90 -117 -111 -46 44 87 39 -37 -57 -2 53 42 -16 -45 0 92 164 170 116 61 47 54 35 1 14 88 137 83 -14 -11 118 229 177 7 -113 -93 -10 15 -50 -141 -177 -139 -67 -32 -76 -162 -193 -121 -17 -2 -99 -196 -170 -39 53 6 -123 -183 -98 50 117 51 -61 -95 -16 109 190 187 124 62 70 178 326 383 286 118 56 174 343 362 186 -13 -38 113 244 200 32 -68 -8 103 89 -78 -247 -262 -136 -19 -19 -111 -194 -222 -230 -255 -268 -226 -134 -58 -63 -156 -257 -270 -161 -16 30 -54 -147 -105 59 177 146 50 43 137 209 180 125 133 182 180 131 120 175 209 152 71 72 142 163 83 -19 -53 -42 -64 -122 -141 -91 -48 -85 -169 -203 -163 -115 -110 -130 -126 -111 -119 -127 -72 51 132 85 -30 -60 58 201 207 74 -40 1 148 244 189 42 -54 -17 116 232 226 89 -87 -175 -119 14 89 18 -145 -254 -202 -32 87 39 -117 -198 -96 89 157 33 -147 -182 -34 144 176 40 -125 -176 -98 13 52 2 -78 -111 -75 -23 -20 -60 -58 35 148 137 -26 -194 -181 14 200 205 50 -98 -130 -71 -3 39 55 32 -44 -133 -150 -55 77 127 54 -54 -84 -13 74 108 97 98 109 66 -55 -160 -117 85 280 269 44 -173 -152 97 323 302 79 -94 -54 112 191 84 -101 -188 -109 42 106 2 -193 -305 -224 -15 136 100 -80 -248 -270 -130 84 229 200 27 -135 -128 50 +1 +-73 -89 -52 -8 -35 -120 -156 -58 114 219 182 76 21 50 85 63 16 21 89 136 90 -29 -122 -112 -12 85 88 -16 -147 -200 -139 -37 5 -37 -101 -127 -131 -150 -181 -178 -143 -122 -144 -170 -142 -63 2 8 -25 -37 1 64 107 103 58 8 2 64 161 219 186 93 28 55 148 218 200 117 42 35 91 153 150 62 -51 -90 -17 84 93 -3 -91 -79 -8 13 -45 -98 -74 -15 -14 -88 -159 -154 -89 -44 -61 -109 -127 -88 -26 2 -23 -62 -64 -25 17 38 43 47 40 14 -9 4 48 85 84 54 18 -7 -8 28 87 117 81 14 -9 40 107 117 62 1 -20 -10 5 11 19 24 18 -4 -24 -30 -20 -10 -13 -29 -44 -42 -28 -27 -55 -85 -81 -39 -8 -28 -81 -113 -100 -68 -50 -43 +-99 -196 -170 -39 53 6 -123 -183 -98 50 117 51 -61 -95 -16 109 190 187 124 62 70 178 326 383 286 118 56 174 343 362 186 -13 -38 113 244 200 32 -68 -8 103 89 -78 -247 -262 -136 -19 -19 -111 -194 -222 -230 -255 -268 -226 -134 -58 -63 -156 -257 -270 -161 -16 30 -54 -147 -105 59 177 146 50 43 137 209 180 125 133 182 180 131 120 175 209 152 71 72 142 163 83 -19 -53 -42 -64 -122 -141 -91 -48 -85 -169 -203 -163 -115 -110 -130 -126 -111 -119 -127 -72 51 132 85 -30 -60 58 201 207 74 -40 1 148 244 189 42 -54 -17 116 232 226 89 -87 -175 -119 14 89 18 -145 -254 -202 -32 87 39 -117 -198 -96 89 157 33 -147 -182 -34 144 176 40 -125 -176 -98 13 52 2 -78 -111 -75 -23 -20 -60 -58 35 148 137 -26 -194 -181 14 200 205 50 -98 -130 -71 -3 39 55 32 -44 -133 -150 -55 77 127 54 -54 -84 -13 74 108 97 98 109 66 -55 -160 -117 85 280 269 44 -173 -152 97 323 302 79 -94 -54 112 191 84 -101 -188 -109 42 106 2 -193 -305 -224 -15 136 100 -80 -248 -270 -130 84 229 200 27 -135 -128 50 240 283 170 19 -57 -40 22 71 72 14 -74 -129 -108 -32 23 -3 -81 -123 -78 1 13 -70 -152 -142 -58 -17 -87 -195 -226 -159 -87 -94 -150 -160 -96 -18 20 19 7 -18 -68 -101 -60 54 138 110 1 -73 -42 54 123 117 66 35 68 142 183 142 61 44 133 242 244 120 -3 4 124 217 176 32 -83 -66 56 152 118 -23 -147 -160 -97 -56 -73 -89 -52 -8 -35 -120 -156 -58 114 219 182 76 21 50 85 63 16 21 89 136 90 -29 -122 -112 -12 85 88 -16 -147 -200 -139 -37 5 -37 -101 -127 -131 -150 -181 -178 -143 -122 -144 -170 -142 -63 2 8 -25 -37 1 64 107 103 58 8 2 64 161 219 186 93 28 55 148 218 200 117 42 35 91 153 150 62 -51 -90 -17 84 93 -3 +1 +-24 6 21 20 27 60 104 128 123 95 55 22 20 60 114 133 99 40 6 21 72 115 109 52 -9 -20 25 70 73 50 50 76 75 21 -34 -28 29 49 -31 -166 -253 -229 -140 -90 -132 -215 -241 -187 -123 -135 -211 -263 -241 -190 -177 -202 -193 -121 -39 -27 -90 -149 -126 -29 54 51 -18 -63 -21 78 150 143 81 36 50 112 168 168 114 53 42 94 159 183 156 108 73 65 83 116 137 122 82 39 16 11 18 33 41 18 -33 -72 -57 -2 46 52 25 -7 -26 -18 27 82 93 36 -38 -57 -13 28 18 -15 -11 38 79 62 4 -42 -40 -2 29 20 -22 -65 -82 -70 -60 -67 -83 -81 -55 -39 -60 -101 -105 -42 52 99 51 -49 -108 -65 49 139 140 73 23 43 115 165 147 84 +-111 -75 -23 -20 -60 -58 35 148 137 -26 -194 -181 14 200 205 50 -98 -130 -71 -3 39 55 32 -44 -133 -150 -55 77 127 54 -54 -84 -13 74 108 97 98 109 66 -55 -160 -117 85 280 269 44 -173 -152 97 323 302 79 -94 -54 112 191 84 -101 -188 -109 42 106 2 -193 -305 -224 -15 136 100 -80 -248 -270 -130 84 229 200 27 -135 -128 50 240 283 170 19 -57 -40 22 71 72 14 -74 -129 -108 -32 23 -3 -81 -123 -78 1 13 -70 -152 -142 -58 -17 -87 -195 -226 -159 -87 -94 -150 -160 -96 -18 20 19 7 -18 -68 -101 -60 54 138 110 1 -73 -42 54 123 117 66 35 68 142 183 142 61 44 133 242 244 120 -3 4 124 217 176 32 -83 -66 56 152 118 -23 -147 -160 -97 -56 -73 -89 -52 -8 -35 -120 -156 -58 114 219 182 76 21 50 85 63 16 21 89 136 90 -29 -122 -112 -12 85 88 -16 -147 -200 -139 -37 5 -37 -101 -127 -131 -150 -181 -178 -143 -122 -144 -170 -142 -63 2 8 -25 -37 1 64 107 103 58 8 2 64 161 219 186 93 28 55 148 218 200 117 42 35 91 153 150 62 -51 -90 -17 84 93 -3 -91 -79 -8 13 -45 -98 -74 -15 -14 -88 -159 -154 -89 -44 -61 -109 -127 -88 -26 2 -23 -62 -64 -25 17 38 43 47 40 14 -9 4 48 85 84 54 18 -7 -8 28 87 117 81 14 -9 40 107 117 62 1 -20 -10 5 11 19 24 18 -4 -24 -30 -20 -10 -13 -29 -44 -42 -28 -27 -55 -85 -81 -39 -8 -28 -81 -113 -100 -68 -50 -43 -24 6 21 20 27 60 104 128 123 95 55 22 20 60 114 133 99 40 6 21 72 115 109 52 -9 -20 25 70 73 50 50 76 75 21 -34 -28 29 49 -31 -166 -253 -229 -140 -90 -132 -215 -241 -187 -123 -135 -211 -263 -241 -190 -177 -202 -193 -121 -39 -27 -90 -149 -126 -29 54 51 -18 -63 -21 78 150 143 81 36 50 112 168 168 114 53 +1 +37 35 59 69 59 61 81 88 46 -33 -95 -100 -75 -63 -81 -95 -93 -94 -124 -169 -184 -144 -77 -35 -46 -92 -131 -130 -94 -51 -31 -38 -47 -46 -25 6 34 36 -2 -51 -69 -38 17 59 82 105 132 142 124 101 100 119 142 165 188 203 189 152 123 117 123 120 107 97 82 55 29 34 71 100 84 44 33 63 87 60 3 -34 -22 4 -5 -64 -138 -178 -167 -134 -127 -163 -208 -216 -181 -145 -146 -171 -177 -144 -101 -97 -141 -183 -170 -107 -54 -58 -92 -91 -32 34 50 21 10 49 111 151 151 132 118 124 150 171 151 81 14 7 55 87 56 -12 -47 -23 22 42 30 14 14 26 28 7 -17 -24 -8 10 2 -28 -56 -65 -62 -69 -84 -96 -93 -70 -38 -18 -28 -53 -56 -9 58 88 +-73 -89 -52 -8 -35 -120 -156 -58 114 219 182 76 21 50 85 63 16 21 89 136 90 -29 -122 -112 -12 85 88 -16 -147 -200 -139 -37 5 -37 -101 -127 -131 -150 -181 -178 -143 -122 -144 -170 -142 -63 2 8 -25 -37 1 64 107 103 58 8 2 64 161 219 186 93 28 55 148 218 200 117 42 35 91 153 150 62 -51 -90 -17 84 93 -3 -91 -79 -8 13 -45 -98 -74 -15 -14 -88 -159 -154 -89 -44 -61 -109 -127 -88 -26 2 -23 -62 -64 -25 17 38 43 47 40 14 -9 4 48 85 84 54 18 -7 -8 28 87 117 81 14 -9 40 107 117 62 1 -20 -10 5 11 19 24 18 -4 -24 -30 -20 -10 -13 -29 -44 -42 -28 -27 -55 -85 -81 -39 -8 -28 -81 -113 -100 -68 -50 -43 -24 6 21 20 27 60 104 128 123 95 55 22 20 60 114 133 99 40 6 21 72 115 109 52 -9 -20 25 70 73 50 50 76 75 21 -34 -28 29 49 -31 -166 -253 -229 -140 -90 -132 -215 -241 -187 -123 -135 -211 -263 -241 -190 -177 -202 -193 -121 -39 -27 -90 -149 -126 -29 54 51 -18 -63 -21 78 150 143 81 36 50 112 168 168 114 53 42 94 159 183 156 108 73 65 83 116 137 122 82 39 16 11 18 33 41 18 -33 -72 -57 -2 46 52 25 -7 -26 -18 27 82 93 36 -38 -57 -13 28 18 -15 -11 38 79 62 4 -42 -40 -2 29 20 -22 -65 -82 -70 -60 -67 -83 -81 -55 -39 -60 -101 -105 -42 52 99 51 -49 -108 -65 49 139 140 73 23 43 115 165 147 84 37 35 59 69 59 61 81 88 46 -33 -95 -100 -75 -63 -81 -95 -93 -94 -124 -169 -184 -144 -77 -35 -46 -92 -131 -130 -94 -51 -31 -38 -47 -46 -25 6 34 36 -2 -51 -69 -38 17 59 82 105 132 142 124 101 100 119 142 165 188 203 189 152 123 117 123 120 107 97 82 55 29 34 71 100 84 44 33 63 87 60 3 -34 -22 4 +1 +56 5 -5 48 115 136 89 10 -51 -61 -24 18 22 -36 -130 -204 -206 -136 -49 -13 -55 -133 -175 -151 -97 -78 -117 -171 -182 -139 -75 -36 -35 -47 -40 2 62 103 101 67 53 91 169 242 288 304 294 259 216 207 241 275 250 169 101 94 129 147 117 51 -10 -35 -22 11 35 33 11 -29 -80 -129 -148 -139 -134 -167 -213 -228 -211 -210 -248 -291 -300 -289 -290 -296 -263 -191 -130 -113 -113 -93 -57 -28 -1 39 83 96 86 97 152 204 200 147 113 131 176 201 187 163 157 175 196 199 180 165 177 209 234 227 196 167 143 111 66 23 2 3 3 -15 -50 -91 -123 -134 -126 -117 -124 -153 -175 -167 -144 -148 -186 -217 -198 -149 -120 -130 -135 -98 -41 -15 -37 -72 -81 -57 -9 49 86 70 23 6 +-24 6 21 20 27 60 104 128 123 95 55 22 20 60 114 133 99 40 6 21 72 115 109 52 -9 -20 25 70 73 50 50 76 75 21 -34 -28 29 49 -31 -166 -253 -229 -140 -90 -132 -215 -241 -187 -123 -135 -211 -263 -241 -190 -177 -202 -193 -121 -39 -27 -90 -149 -126 -29 54 51 -18 -63 -21 78 150 143 81 36 50 112 168 168 114 53 42 94 159 183 156 108 73 65 83 116 137 122 82 39 16 11 18 33 41 18 -33 -72 -57 -2 46 52 25 -7 -26 -18 27 82 93 36 -38 -57 -13 28 18 -15 -11 38 79 62 4 -42 -40 -2 29 20 -22 -65 -82 -70 -60 -67 -83 -81 -55 -39 -60 -101 -105 -42 52 99 51 -49 -108 -65 49 139 140 73 23 43 115 165 147 84 37 35 59 69 59 61 81 88 46 -33 -95 -100 -75 -63 -81 -95 -93 -94 -124 -169 -184 -144 -77 -35 -46 -92 -131 -130 -94 -51 -31 -38 -47 -46 -25 6 34 36 -2 -51 -69 -38 17 59 82 105 132 142 124 101 100 119 142 165 188 203 189 152 123 117 123 120 107 97 82 55 29 34 71 100 84 44 33 63 87 60 3 -34 -22 4 -5 -64 -138 -178 -167 -134 -127 -163 -208 -216 -181 -145 -146 -171 -177 -144 -101 -97 -141 -183 -170 -107 -54 -58 -92 -91 -32 34 50 21 10 49 111 151 151 132 118 124 150 171 151 81 14 7 55 87 56 -12 -47 -23 22 42 30 14 14 26 28 7 -17 -24 -8 10 2 -28 -56 -65 -62 -69 -84 -96 -93 -70 -38 -18 -28 -53 -56 -9 58 88 56 5 -5 48 115 136 89 10 -51 -61 -24 18 22 -36 -130 -204 -206 -136 -49 -13 -55 -133 -175 -151 -97 -78 -117 -171 -182 -139 -75 -36 -35 -47 -40 2 62 103 101 67 53 91 169 242 288 304 294 259 216 207 241 275 250 169 101 94 129 147 117 51 -10 -35 -22 11 35 33 11 -29 -80 -129 -148 -139 -134 -167 -213 -228 -211 -210 -248 -291 +1 +57 129 142 90 45 65 115 125 88 48 55 94 121 105 58 23 43 104 143 105 28 3 63 135 139 77 29 34 46 12 -60 -118 -135 -130 -125 -125 -128 -134 -135 -130 -129 -142 -156 -151 -125 -107 -108 -107 -91 -71 -74 -99 -117 -106 -76 -49 -36 -41 -59 -71 -58 -25 -4 -7 -6 25 77 100 76 38 36 73 105 90 46 23 50 106 141 125 66 12 6 47 100 123 108 80 67 69 69 64 57 49 35 16 13 31 38 6 -49 -71 -40 2 9 -4 5 35 41 7 -30 -24 17 49 50 35 14 -9 -27 -22 -3 -12 -64 -125 -139 -102 -55 -42 -62 -88 -102 -110 -104 -81 -41 -9 -13 -48 -89 -96 -66 -27 -12 -24 -35 -26 -4 7 10 12 17 13 6 14 44 71 73 56 56 75 +37 35 59 69 59 61 81 88 46 -33 -95 -100 -75 -63 -81 -95 -93 -94 -124 -169 -184 -144 -77 -35 -46 -92 -131 -130 -94 -51 -31 -38 -47 -46 -25 6 34 36 -2 -51 -69 -38 17 59 82 105 132 142 124 101 100 119 142 165 188 203 189 152 123 117 123 120 107 97 82 55 29 34 71 100 84 44 33 63 87 60 3 -34 -22 4 -5 -64 -138 -178 -167 -134 -127 -163 -208 -216 -181 -145 -146 -171 -177 -144 -101 -97 -141 -183 -170 -107 -54 -58 -92 -91 -32 34 50 21 10 49 111 151 151 132 118 124 150 171 151 81 14 7 55 87 56 -12 -47 -23 22 42 30 14 14 26 28 7 -17 -24 -8 10 2 -28 -56 -65 -62 -69 -84 -96 -93 -70 -38 -18 -28 -53 -56 -9 58 88 56 5 -5 48 115 136 89 10 -51 -61 -24 18 22 -36 -130 -204 -206 -136 -49 -13 -55 -133 -175 -151 -97 -78 -117 -171 -182 -139 -75 -36 -35 -47 -40 2 62 103 101 67 53 91 169 242 288 304 294 259 216 207 241 275 250 169 101 94 129 147 117 51 -10 -35 -22 11 35 33 11 -29 -80 -129 -148 -139 -134 -167 -213 -228 -211 -210 -248 -291 -300 -289 -290 -296 -263 -191 -130 -113 -113 -93 -57 -28 -1 39 83 96 86 97 152 204 200 147 113 131 176 201 187 163 157 175 196 199 180 165 177 209 234 227 196 167 143 111 66 23 2 3 3 -15 -50 -91 -123 -134 -126 -117 -124 -153 -175 -167 -144 -148 -186 -217 -198 -149 -120 -130 -135 -98 -41 -15 -37 -72 -81 -57 -9 49 86 70 23 6 57 129 142 90 45 65 115 125 88 48 55 94 121 105 58 23 43 104 143 105 28 3 63 135 139 77 29 34 46 12 -60 -118 -135 -130 -125 -125 -128 -134 -135 -130 -129 -142 -156 -151 -125 -107 -108 -107 -91 -71 -74 -99 -117 -106 -76 -49 -36 -41 -59 -71 -58 -25 -4 -7 -6 25 77 100 76 38 36 73 105 90 46 23 50 106 141 125 +1 +86 63 29 17 38 61 55 27 1 -9 -14 -25 -44 -63 -69 -53 -24 2 4 -13 -27 -12 28 58 53 22 -8 -14 5 35 61 68 59 51 54 68 82 94 107 117 109 83 58 51 56 54 38 25 24 26 17 0 -14 -17 -11 -9 -11 -17 -28 -43 -60 -72 -68 -52 -41 -49 -63 -54 -23 -7 -34 -76 -81 -43 -8 -17 -46 -57 -42 -28 -24 -17 -6 -5 -20 -25 -7 15 17 7 10 29 32 4 -34 -45 -26 -7 -8 -28 -34 -11 34 68 69 51 54 90 133 146 133 127 148 173 172 144 110 89 84 87 89 85 71 49 21 -7 -32 -41 -31 -16 -13 -29 -47 -53 -46 -49 -84 -150 -218 -253 -247 -224 -216 -235 -253 -249 -227 -208 -200 -192 -175 -153 -130 -91 -38 7 23 9 -4 6 +56 5 -5 48 115 136 89 10 -51 -61 -24 18 22 -36 -130 -204 -206 -136 -49 -13 -55 -133 -175 -151 -97 -78 -117 -171 -182 -139 -75 -36 -35 -47 -40 2 62 103 101 67 53 91 169 242 288 304 294 259 216 207 241 275 250 169 101 94 129 147 117 51 -10 -35 -22 11 35 33 11 -29 -80 -129 -148 -139 -134 -167 -213 -228 -211 -210 -248 -291 -300 -289 -290 -296 -263 -191 -130 -113 -113 -93 -57 -28 -1 39 83 96 86 97 152 204 200 147 113 131 176 201 187 163 157 175 196 199 180 165 177 209 234 227 196 167 143 111 66 23 2 3 3 -15 -50 -91 -123 -134 -126 -117 -124 -153 -175 -167 -144 -148 -186 -217 -198 -149 -120 -130 -135 -98 -41 -15 -37 -72 -81 -57 -9 49 86 70 23 6 57 129 142 90 45 65 115 125 88 48 55 94 121 105 58 23 43 104 143 105 28 3 63 135 139 77 29 34 46 12 -60 -118 -135 -130 -125 -125 -128 -134 -135 -130 -129 -142 -156 -151 -125 -107 -108 -107 -91 -71 -74 -99 -117 -106 -76 -49 -36 -41 -59 -71 -58 -25 -4 -7 -6 25 77 100 76 38 36 73 105 90 46 23 50 106 141 125 66 12 6 47 100 123 108 80 67 69 69 64 57 49 35 16 13 31 38 6 -49 -71 -40 2 9 -4 5 35 41 7 -30 -24 17 49 50 35 14 -9 -27 -22 -3 -12 -64 -125 -139 -102 -55 -42 -62 -88 -102 -110 -104 -81 -41 -9 -13 -48 -89 -96 -66 -27 -12 -24 -35 -26 -4 7 10 12 17 13 6 14 44 71 73 56 56 75 86 63 29 17 38 61 55 27 1 -9 -14 -25 -44 -63 -69 -53 -24 2 4 -13 -27 -12 28 58 53 22 -8 -14 5 35 61 68 59 51 54 68 82 94 107 117 109 83 58 51 56 54 38 25 24 26 17 0 -14 -17 -11 -9 -11 -17 -28 -43 -60 -72 -68 -52 -41 -49 -63 -54 -23 -7 -34 -76 -81 -43 -8 -17 -46 -57 -42 -28 +1 +34 59 71 68 58 48 44 44 43 44 58 85 109 117 117 130 152 156 123 70 35 34 50 53 32 5 -5 5 18 22 15 16 26 31 25 20 31 58 86 99 99 86 62 36 19 7 -11 -30 -34 -21 -16 -39 -75 -89 -74 -56 -46 -33 -6 13 7 -11 -3 31 58 53 29 14 11 15 22 31 30 8 -25 -34 -8 28 34 4 -31 -45 -46 -56 -82 -107 -114 -115 -122 -134 -135 -122 -116 -130 -148 -140 -103 -62 -34 -17 5 30 48 60 75 97 113 115 113 117 122 121 113 106 106 104 91 74 70 86 108 120 109 85 66 58 48 25 7 13 37 47 22 -20 -43 -43 -43 -60 -79 -87 -89 -95 -105 -109 -105 -103 -103 -95 -76 -55 -41 -27 -7 13 23 29 39 53 53 44 +57 129 142 90 45 65 115 125 88 48 55 94 121 105 58 23 43 104 143 105 28 3 63 135 139 77 29 34 46 12 -60 -118 -135 -130 -125 -125 -128 -134 -135 -130 -129 -142 -156 -151 -125 -107 -108 -107 -91 -71 -74 -99 -117 -106 -76 -49 -36 -41 -59 -71 -58 -25 -4 -7 -6 25 77 100 76 38 36 73 105 90 46 23 50 106 141 125 66 12 6 47 100 123 108 80 67 69 69 64 57 49 35 16 13 31 38 6 -49 -71 -40 2 9 -4 5 35 41 7 -30 -24 17 49 50 35 14 -9 -27 -22 -3 -12 -64 -125 -139 -102 -55 -42 -62 -88 -102 -110 -104 -81 -41 -9 -13 -48 -89 -96 -66 -27 -12 -24 -35 -26 -4 7 10 12 17 13 6 14 44 71 73 56 56 75 86 63 29 17 38 61 55 27 1 -9 -14 -25 -44 -63 -69 -53 -24 2 4 -13 -27 -12 28 58 53 22 -8 -14 5 35 61 68 59 51 54 68 82 94 107 117 109 83 58 51 56 54 38 25 24 26 17 0 -14 -17 -11 -9 -11 -17 -28 -43 -60 -72 -68 -52 -41 -49 -63 -54 -23 -7 -34 -76 -81 -43 -8 -17 -46 -57 -42 -28 -24 -17 -6 -5 -20 -25 -7 15 17 7 10 29 32 4 -34 -45 -26 -7 -8 -28 -34 -11 34 68 69 51 54 90 133 146 133 127 148 173 172 144 110 89 84 87 89 85 71 49 21 -7 -32 -41 -31 -16 -13 -29 -47 -53 -46 -49 -84 -150 -218 -253 -247 -224 -216 -235 -253 -249 -227 -208 -200 -192 -175 -153 -130 -91 -38 7 23 9 -4 6 34 59 71 68 58 48 44 44 43 44 58 85 109 117 117 130 152 156 123 70 35 34 50 53 32 5 -5 5 18 22 15 16 26 31 25 20 31 58 86 99 99 86 62 36 19 7 -11 -30 -34 -21 -16 -39 -75 -89 -74 -56 -46 -33 -6 13 7 -11 -3 31 58 53 29 14 11 15 22 31 30 8 -25 -34 -8 28 34 4 +1 +42 67 102 114 92 59 44 52 63 63 55 52 55 53 40 27 23 19 -2 -39 -69 -70 -55 -53 -69 -79 -69 -48 -45 -63 -87 -101 -101 -88 -66 -42 -27 -30 -47 -66 -77 -83 -81 -74 -69 -65 -57 -38 -14 2 4 -2 -3 5 15 26 36 49 60 65 58 41 25 22 32 43 42 36 33 37 38 23 -7 -37 -40 -4 43 59 34 5 12 45 53 23 -4 9 41 40 -3 -42 -39 -8 13 16 17 24 28 26 26 34 40 38 39 52 69 78 71 55 40 19 -4 -23 -29 -21 -9 1 2 -7 -30 -57 -77 -79 -62 -38 -21 -17 -20 -30 -45 -63 -69 -53 -27 -7 0 5 10 -5 -37 -67 -71 -54 -43 -51 -62 -61 -53 -48 -40 -17 21 54 69 74 74 68 56 56 84 132 +86 63 29 17 38 61 55 27 1 -9 -14 -25 -44 -63 -69 -53 -24 2 4 -13 -27 -12 28 58 53 22 -8 -14 5 35 61 68 59 51 54 68 82 94 107 117 109 83 58 51 56 54 38 25 24 26 17 0 -14 -17 -11 -9 -11 -17 -28 -43 -60 -72 -68 -52 -41 -49 -63 -54 -23 -7 -34 -76 -81 -43 -8 -17 -46 -57 -42 -28 -24 -17 -6 -5 -20 -25 -7 15 17 7 10 29 32 4 -34 -45 -26 -7 -8 -28 -34 -11 34 68 69 51 54 90 133 146 133 127 148 173 172 144 110 89 84 87 89 85 71 49 21 -7 -32 -41 -31 -16 -13 -29 -47 -53 -46 -49 -84 -150 -218 -253 -247 -224 -216 -235 -253 -249 -227 -208 -200 -192 -175 -153 -130 -91 -38 7 23 9 -4 6 34 59 71 68 58 48 44 44 43 44 58 85 109 117 117 130 152 156 123 70 35 34 50 53 32 5 -5 5 18 22 15 16 26 31 25 20 31 58 86 99 99 86 62 36 19 7 -11 -30 -34 -21 -16 -39 -75 -89 -74 -56 -46 -33 -6 13 7 -11 -3 31 58 53 29 14 11 15 22 31 30 8 -25 -34 -8 28 34 4 -31 -45 -46 -56 -82 -107 -114 -115 -122 -134 -135 -122 -116 -130 -148 -140 -103 -62 -34 -17 5 30 48 60 75 97 113 115 113 117 122 121 113 106 106 104 91 74 70 86 108 120 109 85 66 58 48 25 7 13 37 47 22 -20 -43 -43 -43 -60 -79 -87 -89 -95 -105 -109 -105 -103 -103 -95 -76 -55 -41 -27 -7 13 23 29 39 53 53 44 42 67 102 114 92 59 44 52 63 63 55 52 55 53 40 27 23 19 -2 -39 -69 -70 -55 -53 -69 -79 -69 -48 -45 -63 -87 -101 -101 -88 -66 -42 -27 -30 -47 -66 -77 -83 -81 -74 -69 -65 -57 -38 -14 2 4 -2 -3 5 15 26 36 49 60 65 58 41 25 22 32 43 42 36 33 37 38 23 -7 -37 -40 -4 43 59 34 5 +1 +167 168 150 139 149 158 148 119 95 93 105 106 78 31 -2 -10 -11 -35 -78 -113 -123 -127 -149 -183 -199 -182 -155 -155 -179 -202 -207 -197 -181 -161 -138 -122 -125 -142 -148 -137 -117 -104 -103 -98 -83 -60 -39 -16 6 18 16 17 44 97 147 174 174 158 139 127 138 169 205 215 199 179 168 160 145 134 137 139 116 74 45 53 75 80 67 62 76 86 71 41 23 30 51 66 59 28 -6 -28 -30 -29 -38 -59 -69 -59 -42 -42 -62 -88 -96 -85 -69 -60 -67 -88 -112 -130 -135 -129 -115 -103 -95 -97 -105 -106 -88 -65 -54 -60 -69 -59 -29 3 28 41 41 26 4 -4 9 34 47 38 20 3 -9 -23 -32 -32 -24 -15 -10 -4 1 0 -14 -26 -27 -22 -23 -33 -37 -29 -18 -17 -25 -31 +34 59 71 68 58 48 44 44 43 44 58 85 109 117 117 130 152 156 123 70 35 34 50 53 32 5 -5 5 18 22 15 16 26 31 25 20 31 58 86 99 99 86 62 36 19 7 -11 -30 -34 -21 -16 -39 -75 -89 -74 -56 -46 -33 -6 13 7 -11 -3 31 58 53 29 14 11 15 22 31 30 8 -25 -34 -8 28 34 4 -31 -45 -46 -56 -82 -107 -114 -115 -122 -134 -135 -122 -116 -130 -148 -140 -103 -62 -34 -17 5 30 48 60 75 97 113 115 113 117 122 121 113 106 106 104 91 74 70 86 108 120 109 85 66 58 48 25 7 13 37 47 22 -20 -43 -43 -43 -60 -79 -87 -89 -95 -105 -109 -105 -103 -103 -95 -76 -55 -41 -27 -7 13 23 29 39 53 53 44 42 67 102 114 92 59 44 52 63 63 55 52 55 53 40 27 23 19 -2 -39 -69 -70 -55 -53 -69 -79 -69 -48 -45 -63 -87 -101 -101 -88 -66 -42 -27 -30 -47 -66 -77 -83 -81 -74 -69 -65 -57 -38 -14 2 4 -2 -3 5 15 26 36 49 60 65 58 41 25 22 32 43 42 36 33 37 38 23 -7 -37 -40 -4 43 59 34 5 12 45 53 23 -4 9 41 40 -3 -42 -39 -8 13 16 17 24 28 26 26 34 40 38 39 52 69 78 71 55 40 19 -4 -23 -29 -21 -9 1 2 -7 -30 -57 -77 -79 -62 -38 -21 -17 -20 -30 -45 -63 -69 -53 -27 -7 0 5 10 -5 -37 -67 -71 -54 -43 -51 -62 -61 -53 -48 -40 -17 21 54 69 74 74 68 56 56 84 132 167 168 150 139 149 158 148 119 95 93 105 106 78 31 -2 -10 -11 -35 -78 -113 -123 -127 -149 -183 -199 -182 -155 -155 -179 -202 -207 -197 -181 -161 -138 -122 -125 -142 -148 -137 -117 -104 -103 -98 -83 -60 -39 -16 6 18 16 17 44 97 147 174 174 158 139 127 138 169 205 215 199 179 168 160 145 134 137 139 116 74 45 53 75 80 67 62 +1 +-26 -14 6 32 57 67 58 52 75 118 155 162 148 136 134 139 141 136 120 91 57 34 27 33 43 47 37 9 -26 -46 -44 -32 -33 -51 -71 -81 -84 -94 -104 -99 -84 -79 -89 -99 -94 -85 -91 -106 -96 -50 10 44 53 60 75 79 68 60 73 91 88 58 30 23 31 33 20 1 -7 -2 7 13 14 16 17 16 7 4 21 47 57 36 13 16 37 41 12 -29 -48 -36 -9 15 24 24 21 21 22 18 7 3 9 19 18 6 -17 -48 -77 -85 -62 -25 -2 0 0 5 -1 -35 -76 -96 -85 -66 -63 -73 -80 -75 -63 -53 -57 -77 -105 -119 -112 -98 -91 -88 -78 -56 -34 -18 -6 13 37 53 51 40 31 32 38 37 29 28 47 73 93 100 106 112 106 84 51 28 +42 67 102 114 92 59 44 52 63 63 55 52 55 53 40 27 23 19 -2 -39 -69 -70 -55 -53 -69 -79 -69 -48 -45 -63 -87 -101 -101 -88 -66 -42 -27 -30 -47 -66 -77 -83 -81 -74 -69 -65 -57 -38 -14 2 4 -2 -3 5 15 26 36 49 60 65 58 41 25 22 32 43 42 36 33 37 38 23 -7 -37 -40 -4 43 59 34 5 12 45 53 23 -4 9 41 40 -3 -42 -39 -8 13 16 17 24 28 26 26 34 40 38 39 52 69 78 71 55 40 19 -4 -23 -29 -21 -9 1 2 -7 -30 -57 -77 -79 -62 -38 -21 -17 -20 -30 -45 -63 -69 -53 -27 -7 0 5 10 -5 -37 -67 -71 -54 -43 -51 -62 -61 -53 -48 -40 -17 21 54 69 74 74 68 56 56 84 132 167 168 150 139 149 158 148 119 95 93 105 106 78 31 -2 -10 -11 -35 -78 -113 -123 -127 -149 -183 -199 -182 -155 -155 -179 -202 -207 -197 -181 -161 -138 -122 -125 -142 -148 -137 -117 -104 -103 -98 -83 -60 -39 -16 6 18 16 17 44 97 147 174 174 158 139 127 138 169 205 215 199 179 168 160 145 134 137 139 116 74 45 53 75 80 67 62 76 86 71 41 23 30 51 66 59 28 -6 -28 -30 -29 -38 -59 -69 -59 -42 -42 -62 -88 -96 -85 -69 -60 -67 -88 -112 -130 -135 -129 -115 -103 -95 -97 -105 -106 -88 -65 -54 -60 -69 -59 -29 3 28 41 41 26 4 -4 9 34 47 38 20 3 -9 -23 -32 -32 -24 -15 -10 -4 1 0 -14 -26 -27 -22 -23 -33 -37 -29 -18 -17 -25 -31 -26 -14 6 32 57 67 58 52 75 118 155 162 148 136 134 139 141 136 120 91 57 34 27 33 43 47 37 9 -26 -46 -44 -32 -33 -51 -71 -81 -84 -94 -104 -99 -84 -79 -89 -99 -94 -85 -91 -106 -96 -50 10 44 53 60 75 79 68 60 73 91 88 58 30 23 31 33 20 1 -7 -2 7 13 14 16 17 16 7 4 21 47 +1 +22 26 30 29 23 15 8 6 4 -3 -16 -29 -41 -57 -81 -95 -89 -73 -67 -87 -112 -122 -117 -114 -119 -121 -106 -87 -86 -109 -133 -123 -75 -13 28 35 22 9 10 24 48 77 100 112 108 102 108 138 177 198 187 157 144 160 185 189 168 146 139 137 125 99 71 51 36 30 30 24 -1 -44 -77 -83 -71 -58 -55 -62 -85 -124 -163 -171 -139 -91 -62 -66 -85 -98 -92 -78 -67 -69 -78 -85 -75 -48 -14 20 41 50 52 57 68 72 56 21 -9 -11 17 55 82 87 80 73 72 74 77 81 88 93 89 72 55 46 51 62 69 75 79 71 46 11 -20 -44 -60 -68 -64 -53 -51 -62 -74 -72 -67 -73 -90 -103 -99 -89 -84 -77 -62 -41 -31 -31 -22 0 23 25 12 7 20 40 +167 168 150 139 149 158 148 119 95 93 105 106 78 31 -2 -10 -11 -35 -78 -113 -123 -127 -149 -183 -199 -182 -155 -155 -179 -202 -207 -197 -181 -161 -138 -122 -125 -142 -148 -137 -117 -104 -103 -98 -83 -60 -39 -16 6 18 16 17 44 97 147 174 174 158 139 127 138 169 205 215 199 179 168 160 145 134 137 139 116 74 45 53 75 80 67 62 76 86 71 41 23 30 51 66 59 28 -6 -28 -30 -29 -38 -59 -69 -59 -42 -42 -62 -88 -96 -85 -69 -60 -67 -88 -112 -130 -135 -129 -115 -103 -95 -97 -105 -106 -88 -65 -54 -60 -69 -59 -29 3 28 41 41 26 4 -4 9 34 47 38 20 3 -9 -23 -32 -32 -24 -15 -10 -4 1 0 -14 -26 -27 -22 -23 -33 -37 -29 -18 -17 -25 -31 -26 -14 6 32 57 67 58 52 75 118 155 162 148 136 134 139 141 136 120 91 57 34 27 33 43 47 37 9 -26 -46 -44 -32 -33 -51 -71 -81 -84 -94 -104 -99 -84 -79 -89 -99 -94 -85 -91 -106 -96 -50 10 44 53 60 75 79 68 60 73 91 88 58 30 23 31 33 20 1 -7 -2 7 13 14 16 17 16 7 4 21 47 57 36 13 16 37 41 12 -29 -48 -36 -9 15 24 24 21 21 22 18 7 3 9 19 18 6 -17 -48 -77 -85 -62 -25 -2 0 0 5 -1 -35 -76 -96 -85 -66 -63 -73 -80 -75 -63 -53 -57 -77 -105 -119 -112 -98 -91 -88 -78 -56 -34 -18 -6 13 37 53 51 40 31 32 38 37 29 28 47 73 93 100 106 112 106 84 51 28 22 26 30 29 23 15 8 6 4 -3 -16 -29 -41 -57 -81 -95 -89 -73 -67 -87 -112 -122 -117 -114 -119 -121 -106 -87 -86 -109 -133 -123 -75 -13 28 35 22 9 10 24 48 77 100 112 108 102 108 138 177 198 187 157 144 160 185 189 168 146 139 137 125 99 71 51 36 30 30 24 -1 -44 -77 -83 -71 -58 -55 -62 -85 -124 -163 -171 +1 +48 44 41 47 54 54 44 31 22 5 -24 -63 -93 -107 -108 -106 -94 -62 -24 -4 -14 -28 -20 2 3 -17 -32 -16 8 6 -16 -21 9 45 48 22 4 13 31 28 2 -28 -41 -41 -42 -55 -75 -83 -66 -26 8 12 -16 -50 -56 -26 23 61 72 58 41 37 49 62 61 47 37 45 63 74 66 45 20 -1 -14 -22 -29 -38 -53 -68 -75 -72 -58 -43 -28 -15 -6 0 -5 -14 -21 -21 -19 -18 -9 9 30 36 26 12 6 5 3 5 21 39 35 19 23 62 115 146 143 123 112 114 124 125 105 61 27 31 61 77 57 16 -17 -35 -48 -51 -51 -60 -83 -100 -93 -72 -67 -77 -67 -33 -8 -19 -48 -60 -56 -67 -96 -114 -100 -69 -47 -44 -52 -61 -60 -41 -18 -11 -17 -17 +-26 -14 6 32 57 67 58 52 75 118 155 162 148 136 134 139 141 136 120 91 57 34 27 33 43 47 37 9 -26 -46 -44 -32 -33 -51 -71 -81 -84 -94 -104 -99 -84 -79 -89 -99 -94 -85 -91 -106 -96 -50 10 44 53 60 75 79 68 60 73 91 88 58 30 23 31 33 20 1 -7 -2 7 13 14 16 17 16 7 4 21 47 57 36 13 16 37 41 12 -29 -48 -36 -9 15 24 24 21 21 22 18 7 3 9 19 18 6 -17 -48 -77 -85 -62 -25 -2 0 0 5 -1 -35 -76 -96 -85 -66 -63 -73 -80 -75 -63 -53 -57 -77 -105 -119 -112 -98 -91 -88 -78 -56 -34 -18 -6 13 37 53 51 40 31 32 38 37 29 28 47 73 93 100 106 112 106 84 51 28 22 26 30 29 23 15 8 6 4 -3 -16 -29 -41 -57 -81 -95 -89 -73 -67 -87 -112 -122 -117 -114 -119 -121 -106 -87 -86 -109 -133 -123 -75 -13 28 35 22 9 10 24 48 77 100 112 108 102 108 138 177 198 187 157 144 160 185 189 168 146 139 137 125 99 71 51 36 30 30 24 -1 -44 -77 -83 -71 -58 -55 -62 -85 -124 -163 -171 -139 -91 -62 -66 -85 -98 -92 -78 -67 -69 -78 -85 -75 -48 -14 20 41 50 52 57 68 72 56 21 -9 -11 17 55 82 87 80 73 72 74 77 81 88 93 89 72 55 46 51 62 69 75 79 71 46 11 -20 -44 -60 -68 -64 -53 -51 -62 -74 -72 -67 -73 -90 -103 -99 -89 -84 -77 -62 -41 -31 -31 -22 0 23 25 12 7 20 40 48 44 41 47 54 54 44 31 22 5 -24 -63 -93 -107 -108 -106 -94 -62 -24 -4 -14 -28 -20 2 3 -17 -32 -16 8 6 -16 -21 9 45 48 22 4 13 31 28 2 -28 -41 -41 -42 -55 -75 -83 -66 -26 8 12 -16 -50 -56 -26 23 61 72 58 41 37 49 62 61 47 37 45 63 74 66 45 20 -1 -14 -22 -29 -38 -53 -68 +1 +6 29 28 7 1 23 47 47 28 11 16 30 35 31 30 47 74 82 55 13 -7 9 27 17 -13 -28 -17 7 26 27 12 -14 -46 -70 -82 -80 -65 -43 -29 -42 -70 -74 -26 54 105 96 46 6 4 39 78 92 72 46 43 60 57 14 -50 -86 -71 -31 -3 -5 -28 -46 -45 -25 -2 7 4 -3 -5 -4 -5 -9 -13 -18 -24 -21 -1 34 58 52 24 10 29 60 71 60 58 82 113 121 104 93 96 95 72 48 50 69 75 55 30 18 12 -2 -24 -40 -45 -44 -46 -50 -60 -79 -98 -113 -127 -143 -149 -128 -89 -58 -51 -58 -63 -63 -71 -84 -94 -97 -100 -97 -81 -54 -30 -12 4 16 16 4 3 29 63 77 73 75 92 104 96 83 85 94 88 68 53 55 64 71 +22 26 30 29 23 15 8 6 4 -3 -16 -29 -41 -57 -81 -95 -89 -73 -67 -87 -112 -122 -117 -114 -119 -121 -106 -87 -86 -109 -133 -123 -75 -13 28 35 22 9 10 24 48 77 100 112 108 102 108 138 177 198 187 157 144 160 185 189 168 146 139 137 125 99 71 51 36 30 30 24 -1 -44 -77 -83 -71 -58 -55 -62 -85 -124 -163 -171 -139 -91 -62 -66 -85 -98 -92 -78 -67 -69 -78 -85 -75 -48 -14 20 41 50 52 57 68 72 56 21 -9 -11 17 55 82 87 80 73 72 74 77 81 88 93 89 72 55 46 51 62 69 75 79 71 46 11 -20 -44 -60 -68 -64 -53 -51 -62 -74 -72 -67 -73 -90 -103 -99 -89 -84 -77 -62 -41 -31 -31 -22 0 23 25 12 7 20 40 48 44 41 47 54 54 44 31 22 5 -24 -63 -93 -107 -108 -106 -94 -62 -24 -4 -14 -28 -20 2 3 -17 -32 -16 8 6 -16 -21 9 45 48 22 4 13 31 28 2 -28 -41 -41 -42 -55 -75 -83 -66 -26 8 12 -16 -50 -56 -26 23 61 72 58 41 37 49 62 61 47 37 45 63 74 66 45 20 -1 -14 -22 -29 -38 -53 -68 -75 -72 -58 -43 -28 -15 -6 0 -5 -14 -21 -21 -19 -18 -9 9 30 36 26 12 6 5 3 5 21 39 35 19 23 62 115 146 143 123 112 114 124 125 105 61 27 31 61 77 57 16 -17 -35 -48 -51 -51 -60 -83 -100 -93 -72 -67 -77 -67 -33 -8 -19 -48 -60 -56 -67 -96 -114 -100 -69 -47 -44 -52 -61 -60 -41 -18 -11 -17 -17 6 29 28 7 1 23 47 47 28 11 16 30 35 31 30 47 74 82 55 13 -7 9 27 17 -13 -28 -17 7 26 27 12 -14 -46 -70 -82 -80 -65 -43 -29 -42 -70 -74 -26 54 105 96 46 6 4 39 78 92 72 46 43 60 57 14 -50 -86 -71 -31 -3 -5 -28 -46 -45 -25 -2 7 4 -3 -5 -4 -5 -9 -13 -18 -24 -21 +1 +81 92 85 56 22 2 -11 -39 -81 -113 -123 -124 -132 -141 -144 -146 -162 -183 -182 -153 -120 -117 -136 -140 -108 -59 -30 -32 -39 -22 16 48 59 65 76 98 126 160 206 256 288 294 290 289 291 283 266 250 237 217 180 140 113 100 80 36 -19 -56 -57 -47 -61 -104 -147 -170 -178 -193 -210 -212 -198 -194 -219 -248 -248 -219 -195 -196 -209 -211 -200 -185 -171 -149 -127 -110 -105 -97 -78 -49 -27 -24 -29 -24 4 45 80 97 105 108 111 110 114 135 169 199 210 205 201 200 201 199 190 174 156 142 132 126 116 96 67 28 -12 -41 -49 -48 -56 -72 -90 -99 -104 -110 -111 -106 -98 -95 -91 -83 -79 -83 -88 -74 -47 -26 -27 -33 -29 -10 8 19 29 44 54 57 63 80 104 122 124 122 124 128 +48 44 41 47 54 54 44 31 22 5 -24 -63 -93 -107 -108 -106 -94 -62 -24 -4 -14 -28 -20 2 3 -17 -32 -16 8 6 -16 -21 9 45 48 22 4 13 31 28 2 -28 -41 -41 -42 -55 -75 -83 -66 -26 8 12 -16 -50 -56 -26 23 61 72 58 41 37 49 62 61 47 37 45 63 74 66 45 20 -1 -14 -22 -29 -38 -53 -68 -75 -72 -58 -43 -28 -15 -6 0 -5 -14 -21 -21 -19 -18 -9 9 30 36 26 12 6 5 3 5 21 39 35 19 23 62 115 146 143 123 112 114 124 125 105 61 27 31 61 77 57 16 -17 -35 -48 -51 -51 -60 -83 -100 -93 -72 -67 -77 -67 -33 -8 -19 -48 -60 -56 -67 -96 -114 -100 -69 -47 -44 -52 -61 -60 -41 -18 -11 -17 -17 6 29 28 7 1 23 47 47 28 11 16 30 35 31 30 47 74 82 55 13 -7 9 27 17 -13 -28 -17 7 26 27 12 -14 -46 -70 -82 -80 -65 -43 -29 -42 -70 -74 -26 54 105 96 46 6 4 39 78 92 72 46 43 60 57 14 -50 -86 -71 -31 -3 -5 -28 -46 -45 -25 -2 7 4 -3 -5 -4 -5 -9 -13 -18 -24 -21 -1 34 58 52 24 10 29 60 71 60 58 82 113 121 104 93 96 95 72 48 50 69 75 55 30 18 12 -2 -24 -40 -45 -44 -46 -50 -60 -79 -98 -113 -127 -143 -149 -128 -89 -58 -51 -58 -63 -63 -71 -84 -94 -97 -100 -97 -81 -54 -30 -12 4 16 16 4 3 29 63 77 73 75 92 104 96 83 85 94 88 68 53 55 64 71 81 92 85 56 22 2 -11 -39 -81 -113 -123 -124 -132 -141 -144 -146 -162 -183 -182 -153 -120 -117 -136 -140 -108 -59 -30 -32 -39 -22 16 48 59 65 76 98 126 160 206 256 288 294 290 289 291 283 266 250 237 217 180 140 113 100 80 36 -19 -56 -57 -47 -61 -104 -147 -170 -178 -193 -210 -212 -198 -194 -219 -248 -248 -219 -195 -196 -209 -211 -200 -185 +1 +119 92 56 33 30 32 23 2 -21 -36 -48 -60 -66 -67 -73 -94 -118 -130 -126 -120 -123 -122 -111 -97 -94 -101 -101 -88 -77 -76 -71 -49 -17 -4 -15 -22 6 67 118 129 106 83 87 110 127 122 104 97 112 126 118 86 57 56 74 78 54 22 6 2 -8 -29 -47 -56 -64 -84 -106 -117 -118 -123 -140 -159 -171 -179 -185 -184 -168 -149 -137 -123 -86 -28 26 51 53 59 81 110 131 144 154 162 161 153 143 140 146 145 124 89 57 47 56 61 44 12 -12 -21 -21 -28 -41 -55 -68 -77 -78 -70 -63 -71 -94 -118 -133 -144 -154 -165 -161 -142 -121 -108 -101 -88 -65 -41 -24 -19 -13 7 42 81 107 117 118 119 118 112 103 102 116 129 127 108 94 96 109 117 112 98 79 64 51 41 +6 29 28 7 1 23 47 47 28 11 16 30 35 31 30 47 74 82 55 13 -7 9 27 17 -13 -28 -17 7 26 27 12 -14 -46 -70 -82 -80 -65 -43 -29 -42 -70 -74 -26 54 105 96 46 6 4 39 78 92 72 46 43 60 57 14 -50 -86 -71 -31 -3 -5 -28 -46 -45 -25 -2 7 4 -3 -5 -4 -5 -9 -13 -18 -24 -21 -1 34 58 52 24 10 29 60 71 60 58 82 113 121 104 93 96 95 72 48 50 69 75 55 30 18 12 -2 -24 -40 -45 -44 -46 -50 -60 -79 -98 -113 -127 -143 -149 -128 -89 -58 -51 -58 -63 -63 -71 -84 -94 -97 -100 -97 -81 -54 -30 -12 4 16 16 4 3 29 63 77 73 75 92 104 96 83 85 94 88 68 53 55 64 71 81 92 85 56 22 2 -11 -39 -81 -113 -123 -124 -132 -141 -144 -146 -162 -183 -182 -153 -120 -117 -136 -140 -108 -59 -30 -32 -39 -22 16 48 59 65 76 98 126 160 206 256 288 294 290 289 291 283 266 250 237 217 180 140 113 100 80 36 -19 -56 -57 -47 -61 -104 -147 -170 -178 -193 -210 -212 -198 -194 -219 -248 -248 -219 -195 -196 -209 -211 -200 -185 -171 -149 -127 -110 -105 -97 -78 -49 -27 -24 -29 -24 4 45 80 97 105 108 111 110 114 135 169 199 210 205 201 200 201 199 190 174 156 142 132 126 116 96 67 28 -12 -41 -49 -48 -56 -72 -90 -99 -104 -110 -111 -106 -98 -95 -91 -83 -79 -83 -88 -74 -47 -26 -27 -33 -29 -10 8 19 29 44 54 57 63 80 104 122 124 122 124 128 119 92 56 33 30 32 23 2 -21 -36 -48 -60 -66 -67 -73 -94 -118 -130 -126 -120 -123 -122 -111 -97 -94 -101 -101 -88 -77 -76 -71 -49 -17 -4 -15 -22 6 67 118 129 106 83 87 110 127 122 104 97 112 126 118 86 57 56 74 78 54 22 6 2 -8 -29 -47 -56 -64 -84 -106 -117 -118 -123 -140 -159 -171 -179 -185 -184 -168 -149 -137 -123 +1 +37 31 14 -7 -20 -14 1 3 -16 -37 -41 -26 -17 -28 -51 -62 -50 -30 -21 -35 -57 -65 -51 -25 -10 -15 -39 -60 -70 -63 -50 -35 -14 11 34 45 42 37 39 44 40 22 9 15 35 46 35 17 16 37 51 27 -33 -85 -91 -59 -38 -52 -81 -84 -61 -47 -67 -99 -109 -97 -83 -83 -82 -73 -62 -49 -33 -10 6 12 14 26 35 26 4 -5 11 36 48 38 22 21 38 58 62 42 13 2 10 23 25 16 8 1 -12 -28 -27 -2 31 49 42 20 1 -8 -4 3 12 18 21 19 8 -7 -18 -16 -4 5 9 16 33 53 58 48 43 58 83 90 70 48 43 54 61 58 62 77 87 71 41 24 38 63 69 47 16 0 -2 -3 -15 -38 -57 -67 -68 -76 -96 -122 +81 92 85 56 22 2 -11 -39 -81 -113 -123 -124 -132 -141 -144 -146 -162 -183 -182 -153 -120 -117 -136 -140 -108 -59 -30 -32 -39 -22 16 48 59 65 76 98 126 160 206 256 288 294 290 289 291 283 266 250 237 217 180 140 113 100 80 36 -19 -56 -57 -47 -61 -104 -147 -170 -178 -193 -210 -212 -198 -194 -219 -248 -248 -219 -195 -196 -209 -211 -200 -185 -171 -149 -127 -110 -105 -97 -78 -49 -27 -24 -29 -24 4 45 80 97 105 108 111 110 114 135 169 199 210 205 201 200 201 199 190 174 156 142 132 126 116 96 67 28 -12 -41 -49 -48 -56 -72 -90 -99 -104 -110 -111 -106 -98 -95 -91 -83 -79 -83 -88 -74 -47 -26 -27 -33 -29 -10 8 19 29 44 54 57 63 80 104 122 124 122 124 128 119 92 56 33 30 32 23 2 -21 -36 -48 -60 -66 -67 -73 -94 -118 -130 -126 -120 -123 -122 -111 -97 -94 -101 -101 -88 -77 -76 -71 -49 -17 -4 -15 -22 6 67 118 129 106 83 87 110 127 122 104 97 112 126 118 86 57 56 74 78 54 22 6 2 -8 -29 -47 -56 -64 -84 -106 -117 -118 -123 -140 -159 -171 -179 -185 -184 -168 -149 -137 -123 -86 -28 26 51 53 59 81 110 131 144 154 162 161 153 143 140 146 145 124 89 57 47 56 61 44 12 -12 -21 -21 -28 -41 -55 -68 -77 -78 -70 -63 -71 -94 -118 -133 -144 -154 -165 -161 -142 -121 -108 -101 -88 -65 -41 -24 -19 -13 7 42 81 107 117 118 119 118 112 103 102 116 129 127 108 94 96 109 117 112 98 79 64 51 41 37 31 14 -7 -20 -14 1 3 -16 -37 -41 -26 -17 -28 -51 -62 -50 -30 -21 -35 -57 -65 -51 -25 -10 -15 -39 -60 -70 -63 -50 -35 -14 11 34 45 42 37 39 44 40 22 9 15 35 46 35 17 16 37 51 27 -33 -85 -91 -59 -38 -52 -81 -84 -61 -47 -67 -99 -109 -97 -83 -83 -82 -73 -62 -49 -33 -10 6 12 14 26 35 26 +1 +-138 -132 -114 -96 -84 -70 -49 -27 -12 -11 -11 3 34 68 89 94 91 92 94 88 74 55 35 13 -10 -28 -38 -40 -41 -50 -66 -81 -83 -78 -73 -76 -84 -90 -89 -84 -83 -90 -102 -112 -117 -124 -141 -158 -158 -139 -119 -120 -128 -117 -83 -48 -36 -33 -15 18 42 48 58 83 110 118 112 118 139 159 160 154 149 148 151 164 182 190 171 141 117 105 91 72 62 61 52 18 -21 -40 -46 -58 -80 -96 -98 -97 -95 -80 -44 -12 -6 -10 -1 31 58 73 92 121 145 137 110 95 97 103 99 89 84 91 100 108 108 98 83 70 60 42 13 -17 -32 -37 -47 -72 -102 -122 -131 -141 -165 -205 -238 -245 -228 -213 -226 -241 -222 -171 -133 -137 -153 -139 -93 -61 -65 -76 -60 -28 -8 -2 12 46 +119 92 56 33 30 32 23 2 -21 -36 -48 -60 -66 -67 -73 -94 -118 -130 -126 -120 -123 -122 -111 -97 -94 -101 -101 -88 -77 -76 -71 -49 -17 -4 -15 -22 6 67 118 129 106 83 87 110 127 122 104 97 112 126 118 86 57 56 74 78 54 22 6 2 -8 -29 -47 -56 -64 -84 -106 -117 -118 -123 -140 -159 -171 -179 -185 -184 -168 -149 -137 -123 -86 -28 26 51 53 59 81 110 131 144 154 162 161 153 143 140 146 145 124 89 57 47 56 61 44 12 -12 -21 -21 -28 -41 -55 -68 -77 -78 -70 -63 -71 -94 -118 -133 -144 -154 -165 -161 -142 -121 -108 -101 -88 -65 -41 -24 -19 -13 7 42 81 107 117 118 119 118 112 103 102 116 129 127 108 94 96 109 117 112 98 79 64 51 41 37 31 14 -7 -20 -14 1 3 -16 -37 -41 -26 -17 -28 -51 -62 -50 -30 -21 -35 -57 -65 -51 -25 -10 -15 -39 -60 -70 -63 -50 -35 -14 11 34 45 42 37 39 44 40 22 9 15 35 46 35 17 16 37 51 27 -33 -85 -91 -59 -38 -52 -81 -84 -61 -47 -67 -99 -109 -97 -83 -83 -82 -73 -62 -49 -33 -10 6 12 14 26 35 26 4 -5 11 36 48 38 22 21 38 58 62 42 13 2 10 23 25 16 8 1 -12 -28 -27 -2 31 49 42 20 1 -8 -4 3 12 18 21 19 8 -7 -18 -16 -4 5 9 16 33 53 58 48 43 58 83 90 70 48 43 54 61 58 62 77 87 71 41 24 38 63 69 47 16 0 -2 -3 -15 -38 -57 -67 -68 -76 -96 -122 -138 -132 -114 -96 -84 -70 -49 -27 -12 -11 -11 3 34 68 89 94 91 92 94 88 74 55 35 13 -10 -28 -38 -40 -41 -50 -66 -81 -83 -78 -73 -76 -84 -90 -89 -84 -83 -90 -102 -112 -117 -124 -141 -158 -158 -139 -119 -120 -128 -117 -83 -48 -36 -33 -15 18 42 48 58 83 110 118 112 118 139 159 160 154 149 148 151 164 182 190 171 141 +1 +81 99 108 125 147 153 147 145 155 163 153 137 129 130 120 92 60 40 28 15 -12 -41 -65 -81 -93 -106 -124 -141 -151 -152 -157 -178 -206 -213 -187 -148 -128 -131 -136 -134 -130 -129 -121 -97 -70 -58 -52 -28 22 65 75 61 57 84 129 165 179 179 179 197 236 279 306 304 279 242 208 184 176 182 187 174 137 91 58 50 46 21 -29 -76 -88 -70 -60 -76 -101 -115 -119 -128 -138 -135 -122 -123 -142 -163 -169 -176 -189 -204 -203 -195 -190 -187 -166 -129 -99 -82 -59 -17 25 39 27 23 51 98 138 157 165 182 208 225 214 177 140 122 117 104 76 54 60 87 109 103 76 53 53 69 75 54 12 -25 -42 -40 -34 -35 -45 -72 -115 -159 -179 -177 -162 -155 -151 -137 -109 -71 -37 -24 -38 -64 +37 31 14 -7 -20 -14 1 3 -16 -37 -41 -26 -17 -28 -51 -62 -50 -30 -21 -35 -57 -65 -51 -25 -10 -15 -39 -60 -70 -63 -50 -35 -14 11 34 45 42 37 39 44 40 22 9 15 35 46 35 17 16 37 51 27 -33 -85 -91 -59 -38 -52 -81 -84 -61 -47 -67 -99 -109 -97 -83 -83 -82 -73 -62 -49 -33 -10 6 12 14 26 35 26 4 -5 11 36 48 38 22 21 38 58 62 42 13 2 10 23 25 16 8 1 -12 -28 -27 -2 31 49 42 20 1 -8 -4 3 12 18 21 19 8 -7 -18 -16 -4 5 9 16 33 53 58 48 43 58 83 90 70 48 43 54 61 58 62 77 87 71 41 24 38 63 69 47 16 0 -2 -3 -15 -38 -57 -67 -68 -76 -96 -122 -138 -132 -114 -96 -84 -70 -49 -27 -12 -11 -11 3 34 68 89 94 91 92 94 88 74 55 35 13 -10 -28 -38 -40 -41 -50 -66 -81 -83 -78 -73 -76 -84 -90 -89 -84 -83 -90 -102 -112 -117 -124 -141 -158 -158 -139 -119 -120 -128 -117 -83 -48 -36 -33 -15 18 42 48 58 83 110 118 112 118 139 159 160 154 149 148 151 164 182 190 171 141 117 105 91 72 62 61 52 18 -21 -40 -46 -58 -80 -96 -98 -97 -95 -80 -44 -12 -6 -10 -1 31 58 73 92 121 145 137 110 95 97 103 99 89 84 91 100 108 108 98 83 70 60 42 13 -17 -32 -37 -47 -72 -102 -122 -131 -141 -165 -205 -238 -245 -228 -213 -226 -241 -222 -171 -133 -137 -153 -139 -93 -61 -65 -76 -60 -28 -8 -2 12 46 81 99 108 125 147 153 147 145 155 163 153 137 129 130 120 92 60 40 28 15 -12 -41 -65 -81 -93 -106 -124 -141 -151 -152 -157 -178 -206 -213 -187 -148 -128 -131 -136 -134 -130 -129 -121 -97 -70 -58 -52 -28 22 65 75 61 57 84 129 165 179 179 179 197 236 279 306 304 279 242 208 184 176 182 187 174 137 91 58 50 46 21 -29 -76 +1 +-72 -42 4 29 20 1 1 12 13 0 -8 6 18 9 -18 -30 -12 15 14 -20 -61 -77 -66 -45 -40 -57 -85 -103 -98 -77 -55 -48 -49 -48 -37 -25 -19 -19 -20 -20 -15 1 30 64 90 105 110 119 139 167 187 190 182 181 203 232 236 203 158 133 134 140 136 130 132 136 119 79 42 29 37 42 32 22 25 34 23 -14 -66 -106 -129 -143 -152 -158 -161 -166 -175 -184 -189 -187 -172 -147 -127 -121 -127 -121 -90 -51 -38 -51 -61 -43 -11 -3 -25 -45 -36 -9 1 -18 -38 -30 -2 17 11 -3 -3 10 15 4 -4 7 31 40 18 -12 -24 -19 -25 -55 -88 -93 -69 -44 -40 -47 -40 -24 -20 -38 -55 -46 -19 5 12 14 20 32 48 66 82 89 85 88 105 123 122 103 83 72 +-138 -132 -114 -96 -84 -70 -49 -27 -12 -11 -11 3 34 68 89 94 91 92 94 88 74 55 35 13 -10 -28 -38 -40 -41 -50 -66 -81 -83 -78 -73 -76 -84 -90 -89 -84 -83 -90 -102 -112 -117 -124 -141 -158 -158 -139 -119 -120 -128 -117 -83 -48 -36 -33 -15 18 42 48 58 83 110 118 112 118 139 159 160 154 149 148 151 164 182 190 171 141 117 105 91 72 62 61 52 18 -21 -40 -46 -58 -80 -96 -98 -97 -95 -80 -44 -12 -6 -10 -1 31 58 73 92 121 145 137 110 95 97 103 99 89 84 91 100 108 108 98 83 70 60 42 13 -17 -32 -37 -47 -72 -102 -122 -131 -141 -165 -205 -238 -245 -228 -213 -226 -241 -222 -171 -133 -137 -153 -139 -93 -61 -65 -76 -60 -28 -8 -2 12 46 81 99 108 125 147 153 147 145 155 163 153 137 129 130 120 92 60 40 28 15 -12 -41 -65 -81 -93 -106 -124 -141 -151 -152 -157 -178 -206 -213 -187 -148 -128 -131 -136 -134 -130 -129 -121 -97 -70 -58 -52 -28 22 65 75 61 57 84 129 165 179 179 179 197 236 279 306 304 279 242 208 184 176 182 187 174 137 91 58 50 46 21 -29 -76 -88 -70 -60 -76 -101 -115 -119 -128 -138 -135 -122 -123 -142 -163 -169 -176 -189 -204 -203 -195 -190 -187 -166 -129 -99 -82 -59 -17 25 39 27 23 51 98 138 157 165 182 208 225 214 177 140 122 117 104 76 54 60 87 109 103 76 53 53 69 75 54 12 -25 -42 -40 -34 -35 -45 -72 -115 -159 -179 -177 -162 -155 -151 -137 -109 -71 -37 -24 -38 -64 -72 -42 4 29 20 1 1 12 13 0 -8 6 18 9 -18 -30 -12 15 14 -20 -61 -77 -66 -45 -40 -57 -85 -103 -98 -77 -55 -48 -49 -48 -37 -25 -19 -19 -20 -20 -15 1 30 64 90 105 110 119 139 167 187 190 182 181 203 232 236 203 158 133 134 140 136 130 132 136 119 79 42 29 37 42 32 22 25 34 23 -14 -66 -106 +1 +67 65 72 87 90 70 39 22 25 34 43 53 57 44 14 -2 13 41 51 33 14 18 34 44 38 29 27 32 44 50 46 39 42 63 78 66 28 -5 -15 -15 -32 -57 -63 -44 -26 -39 -70 -89 -76 -55 -49 -58 -62 -55 -50 -69 -101 -118 -108 -88 -83 -90 -84 -57 -38 -47 -73 -75 -48 -20 -22 -41 -50 -36 -17 -10 -13 -21 -32 -34 -16 18 45 43 22 16 34 55 64 65 76 91 93 83 79 91 100 83 44 4 -10 2 24 38 35 16 -10 -28 -28 -19 -10 -14 -26 -35 -39 -34 -15 6 17 0 -38 -69 -68 -37 3 29 30 5 -31 -48 -29 9 25 -11 -74 -117 -115 -74 -33 -27 -58 -98 -111 -88 -54 -36 -37 -32 -9 22 53 79 95 95 85 84 103 126 138 138 +81 99 108 125 147 153 147 145 155 163 153 137 129 130 120 92 60 40 28 15 -12 -41 -65 -81 -93 -106 -124 -141 -151 -152 -157 -178 -206 -213 -187 -148 -128 -131 -136 -134 -130 -129 -121 -97 -70 -58 -52 -28 22 65 75 61 57 84 129 165 179 179 179 197 236 279 306 304 279 242 208 184 176 182 187 174 137 91 58 50 46 21 -29 -76 -88 -70 -60 -76 -101 -115 -119 -128 -138 -135 -122 -123 -142 -163 -169 -176 -189 -204 -203 -195 -190 -187 -166 -129 -99 -82 -59 -17 25 39 27 23 51 98 138 157 165 182 208 225 214 177 140 122 117 104 76 54 60 87 109 103 76 53 53 69 75 54 12 -25 -42 -40 -34 -35 -45 -72 -115 -159 -179 -177 -162 -155 -151 -137 -109 -71 -37 -24 -38 -64 -72 -42 4 29 20 1 1 12 13 0 -8 6 18 9 -18 -30 -12 15 14 -20 -61 -77 -66 -45 -40 -57 -85 -103 -98 -77 -55 -48 -49 -48 -37 -25 -19 -19 -20 -20 -15 1 30 64 90 105 110 119 139 167 187 190 182 181 203 232 236 203 158 133 134 140 136 130 132 136 119 79 42 29 37 42 32 22 25 34 23 -14 -66 -106 -129 -143 -152 -158 -161 -166 -175 -184 -189 -187 -172 -147 -127 -121 -127 -121 -90 -51 -38 -51 -61 -43 -11 -3 -25 -45 -36 -9 1 -18 -38 -30 -2 17 11 -3 -3 10 15 4 -4 7 31 40 18 -12 -24 -19 -25 -55 -88 -93 -69 -44 -40 -47 -40 -24 -20 -38 -55 -46 -19 5 12 14 20 32 48 66 82 89 85 88 105 123 122 103 83 72 67 65 72 87 90 70 39 22 25 34 43 53 57 44 14 -2 13 41 51 33 14 18 34 44 38 29 27 32 44 50 46 39 42 63 78 66 28 -5 -15 -15 -32 -57 -63 -44 -26 -39 -70 -89 -76 -55 -49 -58 -62 -55 -50 -69 -101 -118 -108 -88 -83 -90 -84 -57 -38 -47 -73 -75 -48 -20 -22 -41 -50 -36 -17 -10 -13 -21 -32 -34 +1 +141 144 142 147 171 192 180 127 78 66 77 72 37 2 -13 -18 -31 -49 -51 -35 -11 6 3 -16 -32 -22 6 13 -31 -92 -115 -86 -52 -64 -107 -127 -99 -55 -46 -82 -124 -134 -105 -63 -50 -73 -104 -108 -74 -28 -4 -11 -21 -16 3 19 17 15 28 52 63 38 -7 -41 -38 -10 9 1 -23 -32 -14 20 45 41 10 -28 -57 -66 -61 -62 -77 -97 -107 -102 -87 -70 -52 -39 -42 -56 -71 -61 -32 -7 -4 -20 -34 -36 -31 -21 -18 -20 -22 -16 8 30 31 15 9 31 63 86 105 132 163 170 148 132 154 206 235 212 159 128 139 168 178 157 114 74 49 40 40 42 46 48 44 28 6 -8 -7 0 -6 -34 -72 -100 -101 -77 -59 -76 -125 -165 -159 -120 -89 -86 -92 -72 -21 33 +-72 -42 4 29 20 1 1 12 13 0 -8 6 18 9 -18 -30 -12 15 14 -20 -61 -77 -66 -45 -40 -57 -85 -103 -98 -77 -55 -48 -49 -48 -37 -25 -19 -19 -20 -20 -15 1 30 64 90 105 110 119 139 167 187 190 182 181 203 232 236 203 158 133 134 140 136 130 132 136 119 79 42 29 37 42 32 22 25 34 23 -14 -66 -106 -129 -143 -152 -158 -161 -166 -175 -184 -189 -187 -172 -147 -127 -121 -127 -121 -90 -51 -38 -51 -61 -43 -11 -3 -25 -45 -36 -9 1 -18 -38 -30 -2 17 11 -3 -3 10 15 4 -4 7 31 40 18 -12 -24 -19 -25 -55 -88 -93 -69 -44 -40 -47 -40 -24 -20 -38 -55 -46 -19 5 12 14 20 32 48 66 82 89 85 88 105 123 122 103 83 72 67 65 72 87 90 70 39 22 25 34 43 53 57 44 14 -2 13 41 51 33 14 18 34 44 38 29 27 32 44 50 46 39 42 63 78 66 28 -5 -15 -15 -32 -57 -63 -44 -26 -39 -70 -89 -76 -55 -49 -58 -62 -55 -50 -69 -101 -118 -108 -88 -83 -90 -84 -57 -38 -47 -73 -75 -48 -20 -22 -41 -50 -36 -17 -10 -13 -21 -32 -34 -16 18 45 43 22 16 34 55 64 65 76 91 93 83 79 91 100 83 44 4 -10 2 24 38 35 16 -10 -28 -28 -19 -10 -14 -26 -35 -39 -34 -15 6 17 0 -38 -69 -68 -37 3 29 30 5 -31 -48 -29 9 25 -11 -74 -117 -115 -74 -33 -27 -58 -98 -111 -88 -54 -36 -37 -32 -9 22 53 79 95 95 85 84 103 126 138 138 141 144 142 147 171 192 180 127 78 66 77 72 37 2 -13 -18 -31 -49 -51 -35 -11 6 3 -16 -32 -22 6 13 -31 -92 -115 -86 -52 -64 -107 -127 -99 -55 -46 -82 -124 -134 -105 -63 -50 -73 -104 -108 -74 -28 -4 -11 -21 -16 3 19 17 15 28 52 63 38 -7 -41 -38 -10 9 1 -23 -32 -14 20 45 41 10 -28 -57 -66 -61 -62 +1 +56 41 6 -27 -34 -15 6 5 -12 -17 8 41 49 28 3 0 25 58 79 82 77 76 72 54 28 27 56 78 59 15 -10 -4 -2 -16 -23 -10 -10 -47 -83 -66 -11 -1 -66 -145 -162 -118 -74 -66 -74 -75 -64 -48 -30 -15 -12 -15 -15 -6 0 1 10 32 46 36 25 33 54 55 19 -38 -81 -92 -68 -27 -4 -21 -65 -89 -66 -23 -10 -44 -91 -110 -91 -59 -39 -40 -53 -66 -67 -63 -62 -66 -68 -68 -65 -54 -32 -1 18 12 -7 -8 21 61 79 62 31 13 22 50 83 101 96 77 62 68 91 105 92 65 52 73 112 130 112 78 61 73 91 95 80 62 56 57 56 51 46 35 11 -15 -19 0 15 2 -20 -8 33 57 28 -24 -42 -13 16 6 -36 -73 -78 -55 +67 65 72 87 90 70 39 22 25 34 43 53 57 44 14 -2 13 41 51 33 14 18 34 44 38 29 27 32 44 50 46 39 42 63 78 66 28 -5 -15 -15 -32 -57 -63 -44 -26 -39 -70 -89 -76 -55 -49 -58 -62 -55 -50 -69 -101 -118 -108 -88 -83 -90 -84 -57 -38 -47 -73 -75 -48 -20 -22 -41 -50 -36 -17 -10 -13 -21 -32 -34 -16 18 45 43 22 16 34 55 64 65 76 91 93 83 79 91 100 83 44 4 -10 2 24 38 35 16 -10 -28 -28 -19 -10 -14 -26 -35 -39 -34 -15 6 17 0 -38 -69 -68 -37 3 29 30 5 -31 -48 -29 9 25 -11 -74 -117 -115 -74 -33 -27 -58 -98 -111 -88 -54 -36 -37 -32 -9 22 53 79 95 95 85 84 103 126 138 138 141 144 142 147 171 192 180 127 78 66 77 72 37 2 -13 -18 -31 -49 -51 -35 -11 6 3 -16 -32 -22 6 13 -31 -92 -115 -86 -52 -64 -107 -127 -99 -55 -46 -82 -124 -134 -105 -63 -50 -73 -104 -108 -74 -28 -4 -11 -21 -16 3 19 17 15 28 52 63 38 -7 -41 -38 -10 9 1 -23 -32 -14 20 45 41 10 -28 -57 -66 -61 -62 -77 -97 -107 -102 -87 -70 -52 -39 -42 -56 -71 -61 -32 -7 -4 -20 -34 -36 -31 -21 -18 -20 -22 -16 8 30 31 15 9 31 63 86 105 132 163 170 148 132 154 206 235 212 159 128 139 168 178 157 114 74 49 40 40 42 46 48 44 28 6 -8 -7 0 -6 -34 -72 -100 -101 -77 -59 -76 -125 -165 -159 -120 -89 -86 -92 -72 -21 33 56 41 6 -27 -34 -15 6 5 -12 -17 8 41 49 28 3 0 25 58 79 82 77 76 72 54 28 27 56 78 59 15 -10 -4 -2 -16 -23 -10 -10 -47 -83 -66 -11 -1 -66 -145 -162 -118 -74 -66 -74 -75 -64 -48 -30 -15 -12 -15 -15 -6 0 1 10 32 46 36 25 33 54 55 19 -38 -81 -92 -68 -27 -4 -21 -65 -89 -66 -23 +1 +-20 8 16 2 -11 -5 19 32 24 11 13 29 48 61 70 68 46 9 -23 -36 -37 -49 -79 -104 -98 -63 -33 -39 -76 -109 -110 -85 -64 -57 -56 -56 -59 -67 -69 -61 -47 -33 -21 -1 25 39 26 -11 -54 -75 -57 -8 36 41 16 -4 5 20 7 -33 -59 -51 -22 -8 -26 -64 -96 -103 -84 -60 -64 -98 -133 -132 -100 -78 -100 -155 -199 -215 -207 -190 -165 -135 -115 -122 -141 -141 -104 -60 -40 -53 -63 -43 7 67 113 133 132 135 167 219 246 216 167 167 238 317 328 262 177 127 127 157 193 213 203 171 148 162 197 227 228 203 163 128 115 133 167 191 189 171 160 164 170 160 129 90 62 59 72 78 63 41 33 41 48 34 -7 -46 -42 21 105 147 126 84 86 140 185 172 +141 144 142 147 171 192 180 127 78 66 77 72 37 2 -13 -18 -31 -49 -51 -35 -11 6 3 -16 -32 -22 6 13 -31 -92 -115 -86 -52 -64 -107 -127 -99 -55 -46 -82 -124 -134 -105 -63 -50 -73 -104 -108 -74 -28 -4 -11 -21 -16 3 19 17 15 28 52 63 38 -7 -41 -38 -10 9 1 -23 -32 -14 20 45 41 10 -28 -57 -66 -61 -62 -77 -97 -107 -102 -87 -70 -52 -39 -42 -56 -71 -61 -32 -7 -4 -20 -34 -36 -31 -21 -18 -20 -22 -16 8 30 31 15 9 31 63 86 105 132 163 170 148 132 154 206 235 212 159 128 139 168 178 157 114 74 49 40 40 42 46 48 44 28 6 -8 -7 0 -6 -34 -72 -100 -101 -77 -59 -76 -125 -165 -159 -120 -89 -86 -92 -72 -21 33 56 41 6 -27 -34 -15 6 5 -12 -17 8 41 49 28 3 0 25 58 79 82 77 76 72 54 28 27 56 78 59 15 -10 -4 -2 -16 -23 -10 -10 -47 -83 -66 -11 -1 -66 -145 -162 -118 -74 -66 -74 -75 -64 -48 -30 -15 -12 -15 -15 -6 0 1 10 32 46 36 25 33 54 55 19 -38 -81 -92 -68 -27 -4 -21 -65 -89 -66 -23 -10 -44 -91 -110 -91 -59 -39 -40 -53 -66 -67 -63 -62 -66 -68 -68 -65 -54 -32 -1 18 12 -7 -8 21 61 79 62 31 13 22 50 83 101 96 77 62 68 91 105 92 65 52 73 112 130 112 78 61 73 91 95 80 62 56 57 56 51 46 35 11 -15 -19 0 15 2 -20 -8 33 57 28 -24 -42 -13 16 6 -36 -73 -78 -55 -20 8 16 2 -11 -5 19 32 24 11 13 29 48 61 70 68 46 9 -23 -36 -37 -49 -79 -104 -98 -63 -33 -39 -76 -109 -110 -85 -64 -57 -56 -56 -59 -67 -69 -61 -47 -33 -21 -1 25 39 26 -11 -54 -75 -57 -8 36 41 16 -4 5 20 7 -33 -59 -51 -22 -8 -26 -64 -96 -103 -84 -60 -64 -98 -133 -132 -100 -78 -100 -155 -199 -215 +1 +112 57 38 37 32 11 -17 -59 -112 -161 -191 -209 -244 -304 -370 -403 -388 -351 -327 -330 -346 -357 -350 -334 -311 -287 -269 -265 -268 -265 -245 -217 -199 -202 -220 -236 -235 -218 -192 -173 -164 -155 -139 -115 -89 -55 -13 36 72 80 66 48 45 60 88 112 113 94 76 83 113 140 143 122 96 78 75 84 91 77 43 5 -18 -22 -16 -7 -2 -6 -12 -9 22 68 99 98 76 63 69 82 90 97 101 99 92 84 89 105 120 123 99 58 23 19 38 50 36 5 -16 -18 -13 -12 -15 -21 -36 -57 -66 -55 -32 -24 -41 -72 -92 -92 -78 -58 -43 -43 -58 -79 -87 -74 -47 -25 -17 -9 14 46 68 69 63 67 80 85 75 64 75 99 111 102 88 86 88 87 81 84 103 125 131 120 104 94 +56 41 6 -27 -34 -15 6 5 -12 -17 8 41 49 28 3 0 25 58 79 82 77 76 72 54 28 27 56 78 59 15 -10 -4 -2 -16 -23 -10 -10 -47 -83 -66 -11 -1 -66 -145 -162 -118 -74 -66 -74 -75 -64 -48 -30 -15 -12 -15 -15 -6 0 1 10 32 46 36 25 33 54 55 19 -38 -81 -92 -68 -27 -4 -21 -65 -89 -66 -23 -10 -44 -91 -110 -91 -59 -39 -40 -53 -66 -67 -63 -62 -66 -68 -68 -65 -54 -32 -1 18 12 -7 -8 21 61 79 62 31 13 22 50 83 101 96 77 62 68 91 105 92 65 52 73 112 130 112 78 61 73 91 95 80 62 56 57 56 51 46 35 11 -15 -19 0 15 2 -20 -8 33 57 28 -24 -42 -13 16 6 -36 -73 -78 -55 -20 8 16 2 -11 -5 19 32 24 11 13 29 48 61 70 68 46 9 -23 -36 -37 -49 -79 -104 -98 -63 -33 -39 -76 -109 -110 -85 -64 -57 -56 -56 -59 -67 -69 -61 -47 -33 -21 -1 25 39 26 -11 -54 -75 -57 -8 36 41 16 -4 5 20 7 -33 -59 -51 -22 -8 -26 -64 -96 -103 -84 -60 -64 -98 -133 -132 -100 -78 -100 -155 -199 -215 -207 -190 -165 -135 -115 -122 -141 -141 -104 -60 -40 -53 -63 -43 7 67 113 133 132 135 167 219 246 216 167 167 238 317 328 262 177 127 127 157 193 213 203 171 148 162 197 227 228 203 163 128 115 133 167 191 189 171 160 164 170 160 129 90 62 59 72 78 63 41 33 41 48 34 -7 -46 -42 21 105 147 126 84 86 140 185 172 112 57 38 37 32 11 -17 -59 -112 -161 -191 -209 -244 -304 -370 -403 -388 -351 -327 -330 -346 -357 -350 -334 -311 -287 -269 -265 -268 -265 -245 -217 -199 -202 -220 -236 -235 -218 -192 -173 -164 -155 -139 -115 -89 -55 -13 36 72 80 66 48 45 60 88 112 113 94 76 83 113 140 143 122 96 78 75 84 91 77 43 5 -18 -22 -16 -7 -2 -6 -12 -9 +1 +97 105 108 98 86 89 107 123 117 96 84 85 88 84 77 79 90 103 106 97 78 59 43 31 17 -4 -22 -32 -36 -52 -77 -97 -94 -78 -66 -74 -93 -105 -102 -90 -79 -80 -88 -91 -76 -50 -25 -5 14 23 3 -43 -80 -74 -38 -17 -38 -78 -102 -101 -99 -113 -135 -148 -139 -126 -131 -153 -170 -166 -143 -121 -111 -103 -92 -88 -93 -97 -88 -74 -66 -69 -76 -88 -112 -142 -155 -137 -114 -112 -135 -150 -139 -119 -111 -114 -112 -96 -75 -56 -43 -21 10 44 68 74 73 72 75 77 81 99 134 164 170 154 144 153 170 167 141 120 122 140 149 137 116 100 94 96 103 106 92 56 21 15 37 57 51 29 17 20 23 17 19 28 24 -6 -34 -22 21 54 51 32 33 50 55 27 -8 -20 +-20 8 16 2 -11 -5 19 32 24 11 13 29 48 61 70 68 46 9 -23 -36 -37 -49 -79 -104 -98 -63 -33 -39 -76 -109 -110 -85 -64 -57 -56 -56 -59 -67 -69 -61 -47 -33 -21 -1 25 39 26 -11 -54 -75 -57 -8 36 41 16 -4 5 20 7 -33 -59 -51 -22 -8 -26 -64 -96 -103 -84 -60 -64 -98 -133 -132 -100 -78 -100 -155 -199 -215 -207 -190 -165 -135 -115 -122 -141 -141 -104 -60 -40 -53 -63 -43 7 67 113 133 132 135 167 219 246 216 167 167 238 317 328 262 177 127 127 157 193 213 203 171 148 162 197 227 228 203 163 128 115 133 167 191 189 171 160 164 170 160 129 90 62 59 72 78 63 41 33 41 48 34 -7 -46 -42 21 105 147 126 84 86 140 185 172 112 57 38 37 32 11 -17 -59 -112 -161 -191 -209 -244 -304 -370 -403 -388 -351 -327 -330 -346 -357 -350 -334 -311 -287 -269 -265 -268 -265 -245 -217 -199 -202 -220 -236 -235 -218 -192 -173 -164 -155 -139 -115 -89 -55 -13 36 72 80 66 48 45 60 88 112 113 94 76 83 113 140 143 122 96 78 75 84 91 77 43 5 -18 -22 -16 -7 -2 -6 -12 -9 22 68 99 98 76 63 69 82 90 97 101 99 92 84 89 105 120 123 99 58 23 19 38 50 36 5 -16 -18 -13 -12 -15 -21 -36 -57 -66 -55 -32 -24 -41 -72 -92 -92 -78 -58 -43 -43 -58 -79 -87 -74 -47 -25 -17 -9 14 46 68 69 63 67 80 85 75 64 75 99 111 102 88 86 88 87 81 84 103 125 131 120 104 94 97 105 108 98 86 89 107 123 117 96 84 85 88 84 77 79 90 103 106 97 78 59 43 31 17 -4 -22 -32 -36 -52 -77 -97 -94 -78 -66 -74 -93 -105 -102 -90 -79 -80 -88 -91 -76 -50 -25 -5 14 23 3 -43 -80 -74 -38 -17 -38 -78 -102 -101 -99 -113 -135 -148 -139 -126 -131 -153 -170 -166 -143 -121 -111 -103 -92 -88 -93 -97 -88 -74 +1 +1 29 35 19 11 38 83 111 111 104 105 106 100 105 126 143 134 103 69 42 27 34 59 69 40 4 -1 13 0 -43 -74 -74 -67 -65 -45 -13 -14 -56 -86 -71 -49 -53 -51 -18 -1 -41 -90 -85 -52 -55 -78 -67 -43 -64 -101 -78 -19 -29 -104 -126 -61 -16 -27 -1 83 92 -18 -91 -13 58 -48 -228 -265 -181 -157 -206 -201 -163 -220 -321 -285 -151 -136 -217 -142 76 116 -92 -198 -20 116 -34 -191 -78 50 -112 -292 -108 159 23 -282 -179 162 96 -213 5 517 180 -835 -632 1322 2260 -501 -4922 -5689 -864 5103 6434 2759 -785 349 5242 8661 6383 -395 -5562 -3684 4077 10313 8727 662 -6824 -8723 -6075 -2639 -311 831 267 -2470 -5583 -5781 -2047 2740 4707 2725 -1238 -4507 -5605 -4439 -1892 503 1269 100 -1712 -2374 -1464 -571 -1570 -4108 -5500 +112 57 38 37 32 11 -17 -59 -112 -161 -191 -209 -244 -304 -370 -403 -388 -351 -327 -330 -346 -357 -350 -334 -311 -287 -269 -265 -268 -265 -245 -217 -199 -202 -220 -236 -235 -218 -192 -173 -164 -155 -139 -115 -89 -55 -13 36 72 80 66 48 45 60 88 112 113 94 76 83 113 140 143 122 96 78 75 84 91 77 43 5 -18 -22 -16 -7 -2 -6 -12 -9 22 68 99 98 76 63 69 82 90 97 101 99 92 84 89 105 120 123 99 58 23 19 38 50 36 5 -16 -18 -13 -12 -15 -21 -36 -57 -66 -55 -32 -24 -41 -72 -92 -92 -78 -58 -43 -43 -58 -79 -87 -74 -47 -25 -17 -9 14 46 68 69 63 67 80 85 75 64 75 99 111 102 88 86 88 87 81 84 103 125 131 120 104 94 97 105 108 98 86 89 107 123 117 96 84 85 88 84 77 79 90 103 106 97 78 59 43 31 17 -4 -22 -32 -36 -52 -77 -97 -94 -78 -66 -74 -93 -105 -102 -90 -79 -80 -88 -91 -76 -50 -25 -5 14 23 3 -43 -80 -74 -38 -17 -38 -78 -102 -101 -99 -113 -135 -148 -139 -126 -131 -153 -170 -166 -143 -121 -111 -103 -92 -88 -93 -97 -88 -74 -66 -69 -76 -88 -112 -142 -155 -137 -114 -112 -135 -150 -139 -119 -111 -114 -112 -96 -75 -56 -43 -21 10 44 68 74 73 72 75 77 81 99 134 164 170 154 144 153 170 167 141 120 122 140 149 137 116 100 94 96 103 106 92 56 21 15 37 57 51 29 17 20 23 17 19 28 24 -6 -34 -22 21 54 51 32 33 50 55 27 -8 -20 1 29 35 19 11 38 83 111 111 104 105 106 100 105 126 143 134 103 69 42 27 34 59 69 40 4 -1 13 0 -43 -74 -74 -67 -65 -45 -13 -14 -56 -86 -71 -49 -53 -51 -18 -1 -41 -90 -85 -52 -55 -78 -67 -43 -64 -101 -78 -19 -29 -104 -126 -61 -16 -27 -1 83 92 -18 -91 -13 58 -48 -228 -265 -181 -157 -206 -201 -163 -220 -321 +1 +-3823 -588 891 -180 -894 1269 4438 4256 -330 -5042 -4995 -341 4089 4399 1583 -377 784 3706 5630 5329 3577 1688 503 459 1609 3238 4146 3891 3305 3252 3264 2208 146 -1408 -1358 -372 169 -79 -350 -111 323 448 180 -307 -811 -999 -558 292 767 325 -733 -1677 -2154 -2313 -2377 -2401 -2389 -2363 -2317 -2307 -2496 -2892 -3062 -2438 -988 599 1507 1488 964 566 610 885 893 415 -176 -317 83 596 908 1156 1517 1780 1655 1308 1207 1475 1746 1673 1295 804 261 -279 -609 -608 -519 -724 -1255 -1758 -1927 -1805 -1606 -1440 -1292 -1168 -1105 -1087 -1049 -981 -940 -975 -1083 -1228 -1350 -1376 -1248 -990 -720 -552 -500 -466 -343 -140 46 145 210 326 493 638 752 924 1222 1578 1870 2069 2237 2396 2473 2382 2140 1851 1593 1375 1176 1019 972 1036 1069 903 565 300 279 349 213 -156 -451 -448 -328 -421 -721 -890 +97 105 108 98 86 89 107 123 117 96 84 85 88 84 77 79 90 103 106 97 78 59 43 31 17 -4 -22 -32 -36 -52 -77 -97 -94 -78 -66 -74 -93 -105 -102 -90 -79 -80 -88 -91 -76 -50 -25 -5 14 23 3 -43 -80 -74 -38 -17 -38 -78 -102 -101 -99 -113 -135 -148 -139 -126 -131 -153 -170 -166 -143 -121 -111 -103 -92 -88 -93 -97 -88 -74 -66 -69 -76 -88 -112 -142 -155 -137 -114 -112 -135 -150 -139 -119 -111 -114 -112 -96 -75 -56 -43 -21 10 44 68 74 73 72 75 77 81 99 134 164 170 154 144 153 170 167 141 120 122 140 149 137 116 100 94 96 103 106 92 56 21 15 37 57 51 29 17 20 23 17 19 28 24 -6 -34 -22 21 54 51 32 33 50 55 27 -8 -20 1 29 35 19 11 38 83 111 111 104 105 106 100 105 126 143 134 103 69 42 27 34 59 69 40 4 -1 13 0 -43 -74 -74 -67 -65 -45 -13 -14 -56 -86 -71 -49 -53 -51 -18 -1 -41 -90 -85 -52 -55 -78 -67 -43 -64 -101 -78 -19 -29 -104 -126 -61 -16 -27 -1 83 92 -18 -91 -13 58 -48 -228 -265 -181 -157 -206 -201 -163 -220 -321 -285 -151 -136 -217 -142 76 116 -92 -198 -20 116 -34 -191 -78 50 -112 -292 -108 159 23 -282 -179 162 96 -213 5 517 180 -835 -632 1322 2260 -501 -4922 -5689 -864 5103 6434 2759 -785 349 5242 8661 6383 -395 -5562 -3684 4077 10313 8727 662 -6824 -8723 -6075 -2639 -311 831 267 -2470 -5583 -5781 -2047 2740 4707 2725 -1238 -4507 -5605 -4439 -1892 503 1269 100 -1712 -2374 -1464 -571 -1570 -4108 -5500 -3823 -588 891 -180 -894 1269 4438 4256 -330 -5042 -4995 -341 4089 4399 1583 -377 784 3706 5630 5329 3577 1688 503 459 1609 3238 4146 3891 3305 3252 3264 2208 146 -1408 -1358 -372 169 -79 -350 -111 323 448 180 -307 -811 -999 -558 292 767 325 -733 -1677 -2154 -2313 -2377 -2401 -2389 -2363 -2317 -2307 -2496 -2892 -3062 -2438 -988 599 1507 1488 964 566 610 885 893 415 -176 -317 83 596 908 1156 +1 +-742 -526 -590 -929 -1218 -1256 -1188 -1266 -1518 -1725 -1673 -1354 -947 -658 -555 -542 -479 -361 -338 -523 -801 -894 -648 -222 69 101 46 131 336 440 332 176 200 359 374 107 -201 -179 230 672 775 546 298 247 274 163 -59 -148 10 197 133 -142 -295 -120 223 449 492 504 551 528 380 253 299 444 481 359 244 267 344 324 188 58 24 61 101 98 33 -72 -134 -57 139 298 297 184 93 73 66 47 86 220 348 325 148 -52 -155 -175 -170 -153 -118 -94 -123 -182 -204 -182 -173 -206 -230 -206 -178 -219 -306 -340 -280 -197 -181 -230 -261 -209 -92 23 75 56 33 87 219 320 291 161 54 27 9 -85 -228 -321 -329 -312 -337 -410 -497 -580 -660 -730 -762 -711 -566 -397 -320 -377 -456 -399 -211 -69 -96 -185 -140 50 207 206 118 87 +1 29 35 19 11 38 83 111 111 104 105 106 100 105 126 143 134 103 69 42 27 34 59 69 40 4 -1 13 0 -43 -74 -74 -67 -65 -45 -13 -14 -56 -86 -71 -49 -53 -51 -18 -1 -41 -90 -85 -52 -55 -78 -67 -43 -64 -101 -78 -19 -29 -104 -126 -61 -16 -27 -1 83 92 -18 -91 -13 58 -48 -228 -265 -181 -157 -206 -201 -163 -220 -321 -285 -151 -136 -217 -142 76 116 -92 -198 -20 116 -34 -191 -78 50 -112 -292 -108 159 23 -282 -179 162 96 -213 5 517 180 -835 -632 1322 2260 -501 -4922 -5689 -864 5103 6434 2759 -785 349 5242 8661 6383 -395 -5562 -3684 4077 10313 8727 662 -6824 -8723 -6075 -2639 -311 831 267 -2470 -5583 -5781 -2047 2740 4707 2725 -1238 -4507 -5605 -4439 -1892 503 1269 100 -1712 -2374 -1464 -571 -1570 -4108 -5500 -3823 -588 891 -180 -894 1269 4438 4256 -330 -5042 -4995 -341 4089 4399 1583 -377 784 3706 5630 5329 3577 1688 503 459 1609 3238 4146 3891 3305 3252 3264 2208 146 -1408 -1358 -372 169 -79 -350 -111 323 448 180 -307 -811 -999 -558 292 767 325 -733 -1677 -2154 -2313 -2377 -2401 -2389 -2363 -2317 -2307 -2496 -2892 -3062 -2438 -988 599 1507 1488 964 566 610 885 893 415 -176 -317 83 596 908 1156 1517 1780 1655 1308 1207 1475 1746 1673 1295 804 261 -279 -609 -608 -519 -724 -1255 -1758 -1927 -1805 -1606 -1440 -1292 -1168 -1105 -1087 -1049 -981 -940 -975 -1083 -1228 -1350 -1376 -1248 -990 -720 -552 -500 -466 -343 -140 46 145 210 326 493 638 752 924 1222 1578 1870 2069 2237 2396 2473 2382 2140 1851 1593 1375 1176 1019 972 1036 1069 903 565 300 279 349 213 -156 -451 -448 -328 -421 -721 -890 -742 -526 -590 -929 -1218 -1256 -1188 -1266 -1518 -1725 -1673 -1354 -947 -658 -555 -542 -479 -361 -338 -523 -801 -894 -648 -222 69 101 46 131 336 440 332 176 200 359 374 107 -201 -179 230 672 775 546 298 247 274 163 -59 -148 10 197 133 -142 -295 -120 223 449 492 504 551 528 380 253 299 444 481 359 244 267 344 324 188 58 24 61 101 98 33 -72 -134 -57 +1 +133 175 186 209 271 346 397 411 375 283 177 131 157 189 162 97 61 90 180 305 414 444 379 286 240 235 208 132 58 28 34 55 74 80 61 28 12 9 -22 -86 -121 -82 -17 3 -15 -7 43 86 94 99 122 130 111 128 227 354 415 384 324 288 266 242 231 229 193 107 32 32 86 107 52 -36 -103 -146 -173 -190 -212 -285 -415 -514 -472 -297 -130 -91 -142 -157 -115 -114 -215 -334 -357 -318 -352 -479 -540 -397 -133 31 12 -55 -22 73 82 -32 -125 -69 74 132 68 30 144 323 355 187 24 74 279 376 234 15 -44 78 203 218 189 200 206 103 -88 -245 -300 -306 -337 -367 -327 -215 -114 -82 -87 -48 74 227 314 276 151 38 13 55 89 70 27 5 5 -4 -41 -93 -138 -159 -144 -92 +-3823 -588 891 -180 -894 1269 4438 4256 -330 -5042 -4995 -341 4089 4399 1583 -377 784 3706 5630 5329 3577 1688 503 459 1609 3238 4146 3891 3305 3252 3264 2208 146 -1408 -1358 -372 169 -79 -350 -111 323 448 180 -307 -811 -999 -558 292 767 325 -733 -1677 -2154 -2313 -2377 -2401 -2389 -2363 -2317 -2307 -2496 -2892 -3062 -2438 -988 599 1507 1488 964 566 610 885 893 415 -176 -317 83 596 908 1156 1517 1780 1655 1308 1207 1475 1746 1673 1295 804 261 -279 -609 -608 -519 -724 -1255 -1758 -1927 -1805 -1606 -1440 -1292 -1168 -1105 -1087 -1049 -981 -940 -975 -1083 -1228 -1350 -1376 -1248 -990 -720 -552 -500 -466 -343 -140 46 145 210 326 493 638 752 924 1222 1578 1870 2069 2237 2396 2473 2382 2140 1851 1593 1375 1176 1019 972 1036 1069 903 565 300 279 349 213 -156 -451 -448 -328 -421 -721 -890 -742 -526 -590 -929 -1218 -1256 -1188 -1266 -1518 -1725 -1673 -1354 -947 -658 -555 -542 -479 -361 -338 -523 -801 -894 -648 -222 69 101 46 131 336 440 332 176 200 359 374 107 -201 -179 230 672 775 546 298 247 274 163 -59 -148 10 197 133 -142 -295 -120 223 449 492 504 551 528 380 253 299 444 481 359 244 267 344 324 188 58 24 61 101 98 33 -72 -134 -57 139 298 297 184 93 73 66 47 86 220 348 325 148 -52 -155 -175 -170 -153 -118 -94 -123 -182 -204 -182 -173 -206 -230 -206 -178 -219 -306 -340 -280 -197 -181 -230 -261 -209 -92 23 75 56 33 87 219 320 291 161 54 27 9 -85 -228 -321 -329 -312 -337 -410 -497 -580 -660 -730 -762 -711 -566 -397 -320 -377 -456 -399 -211 -69 -96 -185 -140 50 207 206 118 87 133 175 186 209 271 346 397 411 375 283 177 131 157 189 162 97 61 90 180 305 414 444 379 286 240 235 208 132 58 28 34 55 74 80 61 28 12 9 -22 -86 -121 -82 -17 3 -15 -7 43 86 94 99 122 130 111 128 227 354 415 384 324 288 266 242 231 229 193 107 32 32 86 107 52 -36 -103 -146 -173 -190 -212 -285 -415 -514 +1 +-27 -4 -57 -141 -162 -85 25 59 -16 -122 -163 -129 -95 -111 -148 -134 -59 19 56 81 157 277 360 327 204 87 47 67 74 20 -79 -164 -182 -112 16 146 230 246 209 161 149 202 290 353 362 350 353 344 265 126 31 56 156 207 154 51 -22 -49 -63 -95 -146 -211 -277 -323 -332 -323 -330 -346 -329 -258 -180 -147 -144 -95 33 165 201 123 25 -11 7 10 -34 -100 -163 -226 -270 -239 -110 37 83 15 -62 -48 29 74 51 13 4 -3 -49 -113 -139 -104 -57 -38 -40 -35 -27 -46 -95 -132 -116 -72 -48 -61 -72 -64 -71 -130 -212 -253 -236 -217 -232 -247 -201 -96 8 56 49 36 70 163 259 261 139 -7 -36 66 177 192 143 127 160 176 141 97 85 91 101 128 171 171 106 30 28 83 114 115 +-742 -526 -590 -929 -1218 -1256 -1188 -1266 -1518 -1725 -1673 -1354 -947 -658 -555 -542 -479 -361 -338 -523 -801 -894 -648 -222 69 101 46 131 336 440 332 176 200 359 374 107 -201 -179 230 672 775 546 298 247 274 163 -59 -148 10 197 133 -142 -295 -120 223 449 492 504 551 528 380 253 299 444 481 359 244 267 344 324 188 58 24 61 101 98 33 -72 -134 -57 139 298 297 184 93 73 66 47 86 220 348 325 148 -52 -155 -175 -170 -153 -118 -94 -123 -182 -204 -182 -173 -206 -230 -206 -178 -219 -306 -340 -280 -197 -181 -230 -261 -209 -92 23 75 56 33 87 219 320 291 161 54 27 9 -85 -228 -321 -329 -312 -337 -410 -497 -580 -660 -730 -762 -711 -566 -397 -320 -377 -456 -399 -211 -69 -96 -185 -140 50 207 206 118 87 133 175 186 209 271 346 397 411 375 283 177 131 157 189 162 97 61 90 180 305 414 444 379 286 240 235 208 132 58 28 34 55 74 80 61 28 12 9 -22 -86 -121 -82 -17 3 -15 -7 43 86 94 99 122 130 111 128 227 354 415 384 324 288 266 242 231 229 193 107 32 32 86 107 52 -36 -103 -146 -173 -190 -212 -285 -415 -514 -472 -297 -130 -91 -142 -157 -115 -114 -215 -334 -357 -318 -352 -479 -540 -397 -133 31 12 -55 -22 73 82 -32 -125 -69 74 132 68 30 144 323 355 187 24 74 279 376 234 15 -44 78 203 218 189 200 206 103 -88 -245 -300 -306 -337 -367 -327 -215 -114 -82 -87 -48 74 227 314 276 151 38 13 55 89 70 27 5 5 -4 -41 -93 -138 -159 -144 -92 -27 -4 -57 -141 -162 -85 25 59 -16 -122 -163 -129 -95 -111 -148 -134 -59 19 56 81 157 277 360 327 204 87 47 67 74 20 -79 -164 -182 -112 16 146 230 246 209 161 149 202 290 353 362 350 353 344 265 126 31 56 156 207 154 51 -22 -49 -63 -95 -146 -211 -277 -323 -332 -323 -330 -346 -329 -258 -180 -147 -144 -95 33 165 201 123 25 -11 +1 +165 296 414 400 251 87 25 84 197 256 188 15 -135 -149 -68 -25 -85 -145 -87 51 116 41 -80 -118 -64 1 21 12 7 3 -6 -9 -9 -44 -129 -210 -192 -60 72 70 -65 -178 -151 -55 -66 -234 -409 -418 -285 -188 -231 -323 -303 -143 41 126 99 29 -6 27 109 165 130 28 -41 -20 28 10 -68 -92 6 153 222 168 61 -16 -27 16 86 137 132 82 46 66 100 73 -26 -124 -153 -121 -82 -72 -92 -120 -120 -53 79 206 221 83 -113 -222 -169 -15 99 88 -1 -45 26 141 166 61 -62 -62 78 222 233 119 5 14 159 346 463 454 349 233 183 211 247 204 68 -68 -90 3 78 12 -161 -276 -237 -134 -122 -217 -296 -258 -159 -113 -146 -184 -160 -89 -23 6 -5 -31 -34 6 56 55 0 -33 +133 175 186 209 271 346 397 411 375 283 177 131 157 189 162 97 61 90 180 305 414 444 379 286 240 235 208 132 58 28 34 55 74 80 61 28 12 9 -22 -86 -121 -82 -17 3 -15 -7 43 86 94 99 122 130 111 128 227 354 415 384 324 288 266 242 231 229 193 107 32 32 86 107 52 -36 -103 -146 -173 -190 -212 -285 -415 -514 -472 -297 -130 -91 -142 -157 -115 -114 -215 -334 -357 -318 -352 -479 -540 -397 -133 31 12 -55 -22 73 82 -32 -125 -69 74 132 68 30 144 323 355 187 24 74 279 376 234 15 -44 78 203 218 189 200 206 103 -88 -245 -300 -306 -337 -367 -327 -215 -114 -82 -87 -48 74 227 314 276 151 38 13 55 89 70 27 5 5 -4 -41 -93 -138 -159 -144 -92 -27 -4 -57 -141 -162 -85 25 59 -16 -122 -163 -129 -95 -111 -148 -134 -59 19 56 81 157 277 360 327 204 87 47 67 74 20 -79 -164 -182 -112 16 146 230 246 209 161 149 202 290 353 362 350 353 344 265 126 31 56 156 207 154 51 -22 -49 -63 -95 -146 -211 -277 -323 -332 -323 -330 -346 -329 -258 -180 -147 -144 -95 33 165 201 123 25 -11 7 10 -34 -100 -163 -226 -270 -239 -110 37 83 15 -62 -48 29 74 51 13 4 -3 -49 -113 -139 -104 -57 -38 -40 -35 -27 -46 -95 -132 -116 -72 -48 -61 -72 -64 -71 -130 -212 -253 -236 -217 -232 -247 -201 -96 8 56 49 36 70 163 259 261 139 -7 -36 66 177 192 143 127 160 176 141 97 85 91 101 128 171 171 106 30 28 83 114 115 165 296 414 400 251 87 25 84 197 256 188 15 -135 -149 -68 -25 -85 -145 -87 51 116 41 -80 -118 -64 1 21 12 7 3 -6 -9 -9 -44 -129 -210 -192 -60 72 70 -65 -178 -151 -55 -66 -234 -409 -418 -285 -188 -231 -323 -303 -143 41 126 99 29 -6 27 109 165 130 28 -41 -20 28 10 -68 -92 6 153 222 168 61 -16 -27 16 86 137 +1 +23 135 198 157 47 -46 -75 -44 -8 -19 -102 -203 -239 -190 -122 -104 -116 -80 31 147 175 94 -33 -117 -122 -81 -59 -86 -125 -121 -68 -17 -8 -37 -69 -89 -90 -75 -62 -83 -141 -181 -151 -80 -48 -80 -113 -78 3 51 27 -29 -54 -30 20 61 67 48 48 104 193 241 204 113 52 56 91 93 39 -43 -115 -145 -117 -38 61 122 100 12 -63 -55 39 143 186 163 128 125 146 163 163 163 171 173 149 107 74 62 42 -11 -74 -79 0 100 126 63 -9 -13 44 107 131 111 47 -48 -138 -175 -160 -145 -162 -173 -131 -42 15 -21 -128 -217 -207 -107 -10 5 -36 -22 89 188 122 -98 -267 -214 -15 97 -5 -193 -251 -122 52 107 33 -49 -25 97 219 259 213 136 88 81 90 78 35 -19 -64 -100 +-27 -4 -57 -141 -162 -85 25 59 -16 -122 -163 -129 -95 -111 -148 -134 -59 19 56 81 157 277 360 327 204 87 47 67 74 20 -79 -164 -182 -112 16 146 230 246 209 161 149 202 290 353 362 350 353 344 265 126 31 56 156 207 154 51 -22 -49 -63 -95 -146 -211 -277 -323 -332 -323 -330 -346 -329 -258 -180 -147 -144 -95 33 165 201 123 25 -11 7 10 -34 -100 -163 -226 -270 -239 -110 37 83 15 -62 -48 29 74 51 13 4 -3 -49 -113 -139 -104 -57 -38 -40 -35 -27 -46 -95 -132 -116 -72 -48 -61 -72 -64 -71 -130 -212 -253 -236 -217 -232 -247 -201 -96 8 56 49 36 70 163 259 261 139 -7 -36 66 177 192 143 127 160 176 141 97 85 91 101 128 171 171 106 30 28 83 114 115 165 296 414 400 251 87 25 84 197 256 188 15 -135 -149 -68 -25 -85 -145 -87 51 116 41 -80 -118 -64 1 21 12 7 3 -6 -9 -9 -44 -129 -210 -192 -60 72 70 -65 -178 -151 -55 -66 -234 -409 -418 -285 -188 -231 -323 -303 -143 41 126 99 29 -6 27 109 165 130 28 -41 -20 28 10 -68 -92 6 153 222 168 61 -16 -27 16 86 137 132 82 46 66 100 73 -26 -124 -153 -121 -82 -72 -92 -120 -120 -53 79 206 221 83 -113 -222 -169 -15 99 88 -1 -45 26 141 166 61 -62 -62 78 222 233 119 5 14 159 346 463 454 349 233 183 211 247 204 68 -68 -90 3 78 12 -161 -276 -237 -134 -122 -217 -296 -258 -159 -113 -146 -184 -160 -89 -23 6 -5 -31 -34 6 56 55 0 -33 23 135 198 157 47 -46 -75 -44 -8 -19 -102 -203 -239 -190 -122 -104 -116 -80 31 147 175 94 -33 -117 -122 -81 -59 -86 -125 -121 -68 -17 -8 -37 -69 -89 -90 -75 -62 -83 -141 -181 -151 -80 -48 -80 -113 -78 3 51 27 -29 -54 -30 20 61 67 48 48 104 193 241 204 113 52 56 91 93 39 -43 -115 -145 -117 -38 61 122 100 12 -63 -55 +1 +-138 -157 -126 -52 -4 -26 -85 -102 -58 -6 12 12 23 22 -28 -101 -108 -23 72 80 -9 -116 -169 -164 -143 -128 -113 -82 -33 11 31 33 40 70 106 117 103 82 65 44 13 -9 -7 10 27 47 83 122 138 122 93 59 11 -41 -42 46 177 236 163 18 -73 -58 -3 -2 -71 -144 -158 -115 -57 -23 -10 0 22 48 57 35 -2 -31 -22 29 101 140 92 -39 -162 -171 -60 55 65 -10 -47 35 175 252 210 100 10 -7 27 51 17 -63 -122 -113 -61 -30 -36 -36 6 68 103 86 37 -18 -57 -69 -62 -45 -23 14 59 73 25 -66 -139 -169 -171 -159 -118 -39 40 77 91 144 240 292 226 91 15 67 174 205 116 -9 -64 -34 1 -24 -75 -80 -29 -3 -68 -182 -232 -164 -54 -15 -72 -138 +165 296 414 400 251 87 25 84 197 256 188 15 -135 -149 -68 -25 -85 -145 -87 51 116 41 -80 -118 -64 1 21 12 7 3 -6 -9 -9 -44 -129 -210 -192 -60 72 70 -65 -178 -151 -55 -66 -234 -409 -418 -285 -188 -231 -323 -303 -143 41 126 99 29 -6 27 109 165 130 28 -41 -20 28 10 -68 -92 6 153 222 168 61 -16 -27 16 86 137 132 82 46 66 100 73 -26 -124 -153 -121 -82 -72 -92 -120 -120 -53 79 206 221 83 -113 -222 -169 -15 99 88 -1 -45 26 141 166 61 -62 -62 78 222 233 119 5 14 159 346 463 454 349 233 183 211 247 204 68 -68 -90 3 78 12 -161 -276 -237 -134 -122 -217 -296 -258 -159 -113 -146 -184 -160 -89 -23 6 -5 -31 -34 6 56 55 0 -33 23 135 198 157 47 -46 -75 -44 -8 -19 -102 -203 -239 -190 -122 -104 -116 -80 31 147 175 94 -33 -117 -122 -81 -59 -86 -125 -121 -68 -17 -8 -37 -69 -89 -90 -75 -62 -83 -141 -181 -151 -80 -48 -80 -113 -78 3 51 27 -29 -54 -30 20 61 67 48 48 104 193 241 204 113 52 56 91 93 39 -43 -115 -145 -117 -38 61 122 100 12 -63 -55 39 143 186 163 128 125 146 163 163 163 171 173 149 107 74 62 42 -11 -74 -79 0 100 126 63 -9 -13 44 107 131 111 47 -48 -138 -175 -160 -145 -162 -173 -131 -42 15 -21 -128 -217 -207 -107 -10 5 -36 -22 89 188 122 -98 -267 -214 -15 97 -5 -193 -251 -122 52 107 33 -49 -25 97 219 259 213 136 88 81 90 78 35 -19 -64 -100 -138 -157 -126 -52 -4 -26 -85 -102 -58 -6 12 12 23 22 -28 -101 -108 -23 72 80 -9 -116 -169 -164 -143 -128 -113 -82 -33 11 31 33 40 70 106 117 103 82 65 44 13 -9 -7 10 27 47 83 122 138 122 93 59 11 -41 -42 46 177 236 163 18 -73 -58 -3 -2 -71 -144 -158 -115 -57 -23 -10 0 22 48 57 35 -2 -31 -22 29 +1 +-135 -75 -45 -80 -137 -153 -132 -120 -138 -142 -93 -15 15 -27 -86 -89 -31 20 19 -12 -17 25 69 65 13 -46 -77 -69 -38 -2 33 62 81 92 101 113 130 139 124 86 42 12 4 18 47 84 98 61 -16 -86 -107 -86 -63 -48 -26 15 49 38 -13 -55 -50 -7 41 65 65 50 27 0 -17 -20 -8 12 28 33 27 31 59 101 125 105 43 -33 -80 -71 -7 63 84 38 -22 -25 36 89 66 -18 -85 -84 -36 -3 -13 -44 -59 -53 -49 -67 -95 -110 -99 -82 -81 -98 -113 -109 -98 -97 -109 -120 -119 -92 -41 12 39 29 6 2 21 44 59 75 87 80 51 41 75 122 122 56 -12 -15 49 113 115 62 17 29 89 145 150 115 96 129 194 233 211 146 88 62 62 61 56 51 +23 135 198 157 47 -46 -75 -44 -8 -19 -102 -203 -239 -190 -122 -104 -116 -80 31 147 175 94 -33 -117 -122 -81 -59 -86 -125 -121 -68 -17 -8 -37 -69 -89 -90 -75 -62 -83 -141 -181 -151 -80 -48 -80 -113 -78 3 51 27 -29 -54 -30 20 61 67 48 48 104 193 241 204 113 52 56 91 93 39 -43 -115 -145 -117 -38 61 122 100 12 -63 -55 39 143 186 163 128 125 146 163 163 163 171 173 149 107 74 62 42 -11 -74 -79 0 100 126 63 -9 -13 44 107 131 111 47 -48 -138 -175 -160 -145 -162 -173 -131 -42 15 -21 -128 -217 -207 -107 -10 5 -36 -22 89 188 122 -98 -267 -214 -15 97 -5 -193 -251 -122 52 107 33 -49 -25 97 219 259 213 136 88 81 90 78 35 -19 -64 -100 -138 -157 -126 -52 -4 -26 -85 -102 -58 -6 12 12 23 22 -28 -101 -108 -23 72 80 -9 -116 -169 -164 -143 -128 -113 -82 -33 11 31 33 40 70 106 117 103 82 65 44 13 -9 -7 10 27 47 83 122 138 122 93 59 11 -41 -42 46 177 236 163 18 -73 -58 -3 -2 -71 -144 -158 -115 -57 -23 -10 0 22 48 57 35 -2 -31 -22 29 101 140 92 -39 -162 -171 -60 55 65 -10 -47 35 175 252 210 100 10 -7 27 51 17 -63 -122 -113 -61 -30 -36 -36 6 68 103 86 37 -18 -57 -69 -62 -45 -23 14 59 73 25 -66 -139 -169 -171 -159 -118 -39 40 77 91 144 240 292 226 91 15 67 174 205 116 -9 -64 -34 1 -24 -75 -80 -29 -3 -68 -182 -232 -164 -54 -15 -72 -138 -135 -75 -45 -80 -137 -153 -132 -120 -138 -142 -93 -15 15 -27 -86 -89 -31 20 19 -12 -17 25 69 65 13 -46 -77 -69 -38 -2 33 62 81 92 101 113 130 139 124 86 42 12 4 18 47 84 98 61 -16 -86 -107 -86 -63 -48 -26 15 49 38 -13 -55 -50 -7 41 65 65 50 27 0 -17 -20 -8 12 28 33 27 31 59 101 125 105 +1 +54 46 10 -54 -115 -148 -153 -146 -131 -103 -66 -47 -67 -114 -156 -173 -176 -178 -175 -154 -120 -93 -90 -95 -73 -18 35 47 23 9 20 33 22 18 72 173 231 185 86 34 61 100 83 31 5 26 51 52 50 69 93 86 41 -2 -13 8 33 43 24 -20 -62 -67 -32 -1 -18 -80 -133 -140 -112 -74 -47 -42 -49 -52 -22 33 64 34 -33 -66 -33 31 62 43 -5 -50 -75 -66 -27 21 46 36 8 -23 -52 -73 -72 -45 -11 -2 -23 -55 -76 -80 -79 -77 -80 -88 -100 -106 -89 -43 19 65 74 49 9 -20 -21 8 48 66 48 14 -1 19 53 70 64 40 15 0 2 15 24 16 -9 -63 -151 -250 -318 -348 -386 -485 -619 -722 -779 -834 -916 -998 -1051 -1095 -1130 -1116 -1025 -927 -886 -834 -639 -303 +-138 -157 -126 -52 -4 -26 -85 -102 -58 -6 12 12 23 22 -28 -101 -108 -23 72 80 -9 -116 -169 -164 -143 -128 -113 -82 -33 11 31 33 40 70 106 117 103 82 65 44 13 -9 -7 10 27 47 83 122 138 122 93 59 11 -41 -42 46 177 236 163 18 -73 -58 -3 -2 -71 -144 -158 -115 -57 -23 -10 0 22 48 57 35 -2 -31 -22 29 101 140 92 -39 -162 -171 -60 55 65 -10 -47 35 175 252 210 100 10 -7 27 51 17 -63 -122 -113 -61 -30 -36 -36 6 68 103 86 37 -18 -57 -69 -62 -45 -23 14 59 73 25 -66 -139 -169 -171 -159 -118 -39 40 77 91 144 240 292 226 91 15 67 174 205 116 -9 -64 -34 1 -24 -75 -80 -29 -3 -68 -182 -232 -164 -54 -15 -72 -138 -135 -75 -45 -80 -137 -153 -132 -120 -138 -142 -93 -15 15 -27 -86 -89 -31 20 19 -12 -17 25 69 65 13 -46 -77 -69 -38 -2 33 62 81 92 101 113 130 139 124 86 42 12 4 18 47 84 98 61 -16 -86 -107 -86 -63 -48 -26 15 49 38 -13 -55 -50 -7 41 65 65 50 27 0 -17 -20 -8 12 28 33 27 31 59 101 125 105 43 -33 -80 -71 -7 63 84 38 -22 -25 36 89 66 -18 -85 -84 -36 -3 -13 -44 -59 -53 -49 -67 -95 -110 -99 -82 -81 -98 -113 -109 -98 -97 -109 -120 -119 -92 -41 12 39 29 6 2 21 44 59 75 87 80 51 41 75 122 122 56 -12 -15 49 113 115 62 17 29 89 145 150 115 96 129 194 233 211 146 88 62 62 61 56 51 54 46 10 -54 -115 -148 -153 -146 -131 -103 -66 -47 -67 -114 -156 -173 -176 -178 -175 -154 -120 -93 -90 -95 -73 -18 35 47 23 9 20 33 22 18 72 173 231 185 86 34 61 100 83 31 5 26 51 52 50 69 93 86 41 -2 -13 8 33 43 24 -20 -62 -67 -32 -1 -18 -80 -133 -140 -112 -74 -47 -42 -49 -52 -22 33 64 34 -33 -66 +1 \ No newline at end of file diff --git a/python/tflite_micro/signal/ops/testdata/pcan_op_test1.txt b/python/tflite_micro/signal/ops/testdata/pcan_op_test1.txt new file mode 100644 index 00000000000..68da771ef66 --- /dev/null +++ b/python/tflite_micro/signal/ops/testdata/pcan_op_test1.txt @@ -0,0 +1,1348 @@ +0.950000 80.000000 21 10 3 +286 298 305 291 290 279 273 257 250 240 240 233 234 230 221 205 183 159 156 188 239 298 345 374 380 369 359 364 372 354 302 243 194 135 64 72 171 245 277 304 +7310 18308 7796 17878 7413 17141 6978 15789 6390 14745 6135 14314 5981 14130 5649 12594 4677 9768 3987 11550 6109 18308 8819 22977 9713 22670 9176 22363 9509 21748 7719 14929 4959 8294 1636 4423 4371 15052 7080 18677 +1301 836 1354 827 1312 811 1263 779 1192 753 1160 743 1140 738 1096 698 956 607 845 667 1157 836 1461 912 1546 908 1496 904 1527 895 1346 758 999 548 378 344 908 761 1274 843 +6 8 8 11 10 8 5 4 4 2 6 8 11 11 10 11 6 7 5 6 11 8 9 8 7 6 6 11 14 12 11 11 11 9 8 9 9 11 11 5 +7280 17701 7805 17481 7483 16604 6931 15087 6332 13983 6135 13946 6112 13958 5763 12514 4713 9612 4015 11225 6237 17701 8828 22089 9649 21678 9100 21697 9629 21180 7807 14709 5116 8349 1799 4710 4491 14824 7184 17863 +3 2 5 4 8 2 2 0 1 0 3 3 12 5 10 6 4 3 3 2 12 2 6 1 3 1 2 3 13 4 10 5 13 6 11 9 10 5 10 0 +8 5 9 7 12 10 9 7 5 4 4 6 8 8 12 8 8 10 9 8 16 12 10 10 9 7 5 12 9 9 12 8 10 11 10 11 10 9 9 7 +7302 16946 7840 16862 7602 16222 6988 14611 6301 13389 6084 13477 6163 13612 5925 12254 4799 9649 4144 11043 6490 17376 8863 21378 9638 20807 9000 21132 9618 20462 7918 14317 5243 8523 2009 5103 4634 14487 7234 17221 +5 0 6 1 12 4 7 2 2 0 1 1 6 3 15 3 7 7 10 3 24 5 7 2 5 1 1 4 5 2 12 2 11 9 17 13 12 3 7 1 +11 14 8 17 14 7 9 8 3 5 7 6 8 10 5 3 4 14 14 6 11 11 11 11 4 11 7 7 9 9 10 14 15 11 11 13 10 10 11 6 +7400 16789 7848 16894 7770 15678 7043 14225 6220 12892 6111 13037 6213 13409 5904 11703 4781 9930 4398 10749 6609 17009 8922 20771 9499 20234 8954 20294 9607 19787 7975 14318 5495 8687 2240 5595 4773 14232 7334 16556 +10 7 5 11 16 2 7 3 0 1 4 1 6 5 2 0 1 13 24 2 11 4 9 3 1 3 3 1 5 2 8 9 24 9 20 18 11 4 10 1 +6 8 3 5 6 5 7 8 7 8 10 9 10 6 8 7 7 12 10 8 12 8 13 9 7 12 9 9 8 13 10 12 17 12 8 9 8 9 10 8 +7368 16273 7728 16187 7729 15044 7046 13863 6243 12610 6214 12807 6313 12973 5961 11430 4840 10071 4543 10595 6750 16480 9031 20077 9440 19757 8960 19629 9571 19398 8031 14196 5792 8903 2388 5812 4858 13931 7406 16054 +3 2 0 1 3 1 4 3 4 3 10 4 9 1 6 2 5 9 12 4 13 2 12 2 3 4 6 2 4 5 8 6 30 10 10 8 7 3 8 2 +7 9 11 5 5 7 10 6 5 7 6 4 7 6 5 10 9 11 10 11 11 6 9 6 10 8 11 12 9 12 11 13 15 11 10 9 8 11 11 9 +7363 15849 7816 15523 7663 14571 7125 13399 6214 12283 6212 12284 6334 12563 5940 11358 4949 10142 4685 10635 6862 15859 9035 19241 9459 19063 9017 19188 9562 18971 8111 14142 6030 9044 2584 6016 4941 13770 7502 15643 +4 3 10 1 2 2 9 1 2 2 3 0 4 1 2 6 9 8 12 7 11 1 6 1 7 2 9 4 5 4 9 8 23 9 16 8 7 5 10 3 +7 15 13 7 6 7 6 5 10 9 4 8 7 3 9 11 7 9 5 5 11 10 6 5 8 8 10 14 9 7 8 11 13 13 16 10 8 8 5 5 +7358 15819 7953 15021 7625 14126 7100 12902 6314 12098 6159 12038 6354 11993 6021 11352 5004 10086 4695 10304 6971 15521 8962 18393 9427 18410 9047 18896 9553 18262 8113 13969 6211 9300 2928 6269 5022 13435 7442 15011 +4 9 14 2 3 2 3 1 9 4 1 3 4 0 8 7 5 5 3 1 11 4 2 0 4 2 7 6 5 1 5 5 17 12 39 10 7 3 2 1 +5 7 5 6 5 5 7 9 8 7 4 4 4 4 7 10 12 9 5 6 8 7 7 6 7 9 6 8 10 10 10 14 10 13 11 11 8 9 7 6 +7302 15299 7882 14488 7562 13585 7101 12680 6360 11802 6107 11561 6297 11519 6049 11285 5185 10033 4705 10054 7001 15019 8917 17658 9370 17858 8974 18253 9570 17780 8166 13991 6311 9540 3136 6568 5101 13181 7435 14479 +2 2 2 1 2 1 4 4 6 2 1 0 1 0 5 6 16 5 3 2 5 2 3 1 3 2 2 2 7 3 8 9 9 12 18 11 7 4 4 1 +3 1 3 5 4 5 10 8 6 8 3 3 2 4 6 6 6 8 7 5 7 8 8 5 11 10 10 11 11 6 8 10 9 11 12 9 10 10 9 10 +7196 14442 7761 13925 7475 13077 7179 12410 6354 11585 6031 11051 6190 11073 6051 10976 5208 9922 4766 9757 7005 14609 8898 16905 9417 17400 9005 17833 9612 17081 8166 13765 6383 9643 3364 6726 5229 13004 7479 14224 +0 0 0 1 1 1 9 3 3 3 0 0 0 0 3 2 4 4 5 1 4 2 4 0 8 3 7 4 8 1 5 4 7 8 20 7 11 5 7 4 +4 8 6 3 5 9 6 4 6 7 6 8 4 3 7 10 8 7 4 3 10 8 13 10 9 10 9 12 9 7 7 10 11 8 9 11 7 11 7 7 +7118 14067 7720 13273 7416 12845 7153 11911 6348 11319 6033 10879 6137 10592 6078 10931 5282 9756 4749 9355 7085 14223 9008 16505 9411 16970 9010 17500 9602 16486 8141 13553 6504 9555 3510 6998 5277 12899 7471 13800 +1 3 3 0 2 4 3 0 3 2 3 4 1 0 5 6 7 3 1 0 9 3 12 4 5 3 6 5 5 1 3 4 11 4 11 11 5 6 4 2 +7 13 9 9 8 8 5 4 5 5 4 6 8 5 5 6 6 6 7 5 7 8 12 9 4 9 8 6 6 8 5 9 10 9 9 10 9 11 8 6 +7119 14021 7757 13029 7435 12565 7102 11442 6317 10947 5984 10594 6188 10263 6054 10643 5303 9539 4809 9100 7087 13861 9089 16067 9278 16504 8989 16818 9515 15988 8065 13292 6597 9534 3652 7192 5375 12800 7488 13340 +4 8 6 4 5 3 2 0 2 1 1 2 6 1 2 2 4 2 5 1 4 3 10 3 1 3 4 1 2 2 2 4 9 5 11 9 9 6 5 1 +6 7 10 8 11 14 6 7 8 8 6 6 12 8 6 7 7 5 6 6 11 11 8 10 9 9 11 9 10 12 12 9 14 14 14 16 11 7 8 7 +7094 13609 7818 12738 7530 12671 7078 11185 6363 10781 5987 10327 6340 10138 6056 10434 5349 9273 4842 8922 7191 13705 9066 15717 9276 16066 9045 16361 9533 15766 8170 13047 6790 9822 3918 7743 5522 12462 7505 12969 +3 2 8 3 10 10 3 2 6 4 3 2 14 4 3 3 5 1 4 2 10 5 4 4 5 3 9 3 7 6 11 4 18 13 26 21 13 2 5 2 +7 7 3 4 4 6 6 8 8 5 5 6 8 6 4 7 6 9 10 8 6 9 14 11 8 10 9 14 10 9 11 9 11 16 11 9 10 9 6 6 +7095 13222 7699 12219 7444 12279 7054 11005 6408 10441 5965 10076 6386 9898 6007 10238 5368 9269 4976 8878 7164 13435 9197 15449 9248 15716 9049 16239 9550 15373 8247 12817 6901 10215 4101 7831 5639 12267 7471 12559 +4 2 0 0 1 1 3 3 6 1 2 2 6 2 1 3 4 5 11 4 3 4 14 5 4 4 6 7 7 3 9 4 11 17 15 6 10 4 3 1 +15 8 5 6 8 15 15 7 8 7 5 3 5 7 9 9 8 6 5 7 7 10 15 8 6 11 9 11 9 10 8 10 13 14 7 6 7 8 9 10 +7301 12920 7634 11854 7462 12463 7261 10774 6452 10244 5943 9655 6354 9734 6087 10176 5438 9081 4979 8775 7164 13243 9350 15013 9170 15448 9053 15940 9541 15065 8245 12662 7061 10462 4177 7729 5677 12022 7514 12419 +20 3 2 2 5 12 20 3 6 3 2 0 2 3 8 5 7 2 2 3 4 5 16 2 2 5 6 5 5 4 5 5 15 12 6 3 5 3 7 5 +10 9 6 3 5 9 13 11 5 3 10 8 6 4 7 9 9 9 5 5 10 15 7 7 8 8 7 5 8 9 10 11 11 11 12 9 9 10 9 10 +7374 12697 7596 11327 7403 12268 7412 10803 6418 9813 6050 9567 6348 9395 6113 10118 5532 9089 4982 8555 7240 13370 9295 14542 9145 15012 9005 15290 9507 14714 8294 12578 7165 10510 4379 7818 5765 11915 7556 12288 +8 4 3 0 2 4 14 7 2 0 10 4 3 1 4 5 8 6 2 1 8 11 3 2 4 2 3 1 4 3 8 6 10 7 18 6 8 5 7 5 +11 8 7 12 8 11 9 7 4 7 8 7 6 3 4 4 8 9 8 10 9 9 8 12 12 8 8 13 12 6 8 9 11 8 8 8 8 10 11 8 +7471 12426 7585 11384 7422 12207 7457 10584 6360 9654 6103 9423 6342 9015 6062 9756 5598 9096 5062 8656 7289 13120 9267 14406 9223 14602 8984 15171 9576 14199 8291 12376 7267 10370 4474 7840 5825 11814 7648 12042 +10 3 4 8 5 6 7 3 1 3 6 3 3 0 1 1 6 6 7 7 7 4 4 6 10 2 4 7 10 1 5 4 10 4 7 5 6 5 10 3 +14 13 18 9 10 6 6 5 4 6 7 5 3 5 4 4 8 10 8 12 13 10 7 4 5 12 14 9 13 9 12 7 9 13 10 8 12 9 9 9 +7642 12479 7855 11253 7492 11843 7424 10256 6303 9443 6129 9164 6260 8781 6012 9416 5662 9164 5140 8873 7439 12947 9214 13787 9120 14463 9117 14813 9669 13900 8390 12063 7315 10546 4617 7861 5986 11658 7687 11872 +16 9 27 4 8 2 3 1 1 2 4 1 0 1 1 1 6 7 7 10 14 5 3 0 1 6 14 3 11 3 11 2 7 10 12 5 14 4 6 4 +10 12 5 6 8 7 8 9 10 5 7 7 8 7 7 6 7 13 7 13 10 7 7 6 8 12 11 12 13 11 15 15 9 9 9 8 7 7 8 6 +7706 12467 7786 10946 7509 11562 7443 10193 6401 9183 6154 9044 6308 8684 6040 9219 5699 9412 5190 9139 7508 12600 9162 13328 9096 14332 9170 14661 9759 13741 8563 12260 7362 10466 4731 7880 6015 11388 7699 11528 +8 7 2 2 5 2 5 5 9 1 4 3 6 3 5 2 5 12 5 12 8 2 3 1 4 6 8 6 11 5 17 12 7 5 9 5 5 2 5 2 +13 10 6 11 8 7 9 7 7 10 9 5 5 4 7 5 5 6 7 10 16 8 9 7 7 12 12 8 9 10 15 8 10 10 11 8 6 8 9 7 +7845 12333 7745 10965 7526 11298 7487 10011 6420 9246 6230 8808 6278 8408 6068 8973 5684 9215 5239 9205 7729 12335 9163 12958 9047 14209 9247 14272 9745 13530 8732 12015 7433 10452 4894 7898 6018 11196 7736 11266 +14 5 3 7 5 2 7 3 4 7 8 1 2 1 5 1 2 2 5 7 21 3 5 2 3 6 10 3 5 4 17 3 8 6 14 5 3 3 6 2 +8 7 5 11 7 7 6 7 9 7 9 11 6 7 9 8 5 5 7 7 9 10 12 10 9 10 10 11 11 10 14 10 11 11 10 9 8 6 5 5 +7853 12023 7679 10982 7517 11050 7453 9840 6489 9121 6304 8955 6274 8333 6146 8926 5669 8969 5287 9082 7766 12209 9241 12794 9051 13970 9271 14091 9782 13332 8871 11908 7528 10500 5027 7977 6072 10892 7670 10897 +5 2 2 7 4 3 3 3 7 3 8 9 3 3 8 4 2 1 5 3 6 5 10 5 6 4 7 5 8 5 14 5 10 7 11 6 6 2 2 1 +10 8 5 8 6 4 11 9 5 10 11 12 6 5 9 6 4 8 9 14 9 9 12 9 8 6 9 10 9 11 9 13 14 13 10 11 9 9 8 7 +7912 11793 7615 10814 7482 10632 7548 9802 6454 9188 6427 9154 6270 8140 6222 8759 5629 8922 5385 9397 7802 12029 9317 12579 9029 13500 9269 13859 9767 13207 8879 11992 7697 10668 5157 8174 6150 10791 7683 10673 +8 3 2 4 3 1 10 5 2 7 11 10 3 2 8 2 1 4 9 14 6 4 10 4 4 1 5 4 5 6 6 9 16 10 11 9 8 5 5 3 +10 7 8 10 12 8 9 13 7 5 5 5 5 7 8 5 5 5 8 10 9 7 8 7 10 9 9 11 9 8 8 9 11 7 9 11 9 10 9 7 +7970 11515 7629 10779 7601 10485 7589 10012 6471 8943 6394 8911 6241 8081 6271 8540 5616 8693 5455 9447 7837 11737 9288 12254 9059 13242 9267 13703 9753 12906 8861 11825 7786 10458 5258 8359 6226 10757 7721 10462 +8 2 5 6 12 4 6 11 4 1 2 1 2 4 6 1 2 1 7 7 6 2 4 2 7 4 5 5 5 3 4 4 10 3 9 9 8 6 6 3 +8 7 11 10 12 6 11 10 7 10 5 5 10 5 6 8 8 7 8 6 8 9 11 6 6 7 5 12 14 10 9 12 11 10 14 11 8 5 5 7 +7975 11254 7719 10746 7718 10224 7680 10025 6488 9020 6362 8683 6340 7903 6267 8519 5680 8601 5523 9248 7845 11585 9337 11887 8986 12877 9163 13618 9867 12746 8869 11852 7872 10444 5484 8533 6275 10418 7656 10264 +5 2 10 6 12 2 10 6 4 7 2 1 9 2 3 5 6 3 6 2 5 4 8 2 2 2 1 7 13 5 6 8 10 6 21 9 6 1 2 3 +7 14 14 10 12 7 6 5 6 6 5 4 6 9 10 8 9 9 9 5 8 7 9 8 6 8 8 9 10 9 8 7 9 14 12 8 9 9 6 8 +7954 11438 7884 10715 7832 10040 7641 9730 6479 8847 6330 8407 6335 7981 6366 8499 5768 8637 5615 9000 7853 11319 9333 11665 8915 12595 9138 13353 9876 12534 8852 11570 7905 10677 5653 8512 6348 10345 7618 10139 +4 11 16 6 12 3 3 1 3 2 2 1 3 6 9 5 8 6 8 1 5 2 5 3 2 3 4 4 6 4 4 2 6 12 15 5 8 5 3 4 +6 8 7 5 6 6 3 3 7 5 4 4 7 4 5 7 10 7 7 4 7 8 12 8 7 7 6 11 14 11 9 7 9 9 8 10 11 8 6 6 +7908 11243 7866 10379 7789 9806 7526 9330 6496 8623 6274 8148 6355 7747 6334 8419 5879 8548 5653 8705 7835 11131 9406 11456 8871 12269 9063 13227 9987 12457 8861 11305 7937 10589 5716 8615 6470 10215 7581 9899 +3 3 4 1 3 2 0 0 4 1 1 1 4 1 2 3 10 3 5 1 4 3 10 3 3 2 2 6 13 6 6 2 6 5 6 7 11 4 3 2 +5 3 7 10 7 7 11 8 14 16 9 5 9 4 3 6 9 7 5 10 7 5 7 8 7 8 10 9 15 10 8 5 10 11 12 10 11 7 7 7 +7838 10752 7848 10370 7773 9647 7619 9261 6691 9088 6347 7966 6426 7527 6252 8282 5962 8465 5639 8797 7818 10770 9350 11260 8828 12024 9092 12986 10121 12323 8844 10933 7994 10629 5880 8712 6589 10032 7570 9735 +2 0 4 6 4 3 10 4 18 19 8 2 7 1 0 2 8 3 2 7 4 1 3 3 3 3 7 4 15 5 4 1 8 7 15 7 11 3 4 3 +5 7 8 8 13 14 10 7 9 10 8 5 7 7 6 12 10 6 6 10 9 6 7 8 11 10 13 8 10 10 8 12 9 9 15 11 10 9 11 9 +7770 10536 7856 10239 7911 9928 7684 9135 6754 9157 6393 7795 6444 7505 6249 8522 6068 8325 5651 8883 7852 10492 9295 11075 8888 11916 9197 12698 10123 12198 8827 11014 8024 10544 6116 8865 6680 9983 7662 9703 +2 3 5 4 14 13 8 3 7 7 6 2 4 4 3 11 10 2 3 7 6 2 3 3 9 5 12 3 6 5 4 8 6 5 22 9 9 5 10 5 +5 4 6 5 8 5 3 4 5 8 10 7 11 7 7 11 8 9 9 12 8 7 11 9 7 12 14 10 10 9 11 12 12 12 9 11 8 9 9 5 +7703 10149 7813 9931 7918 9639 7568 8832 6713 9099 6489 7757 6564 7484 6271 8686 6121 8378 5739 9087 7860 10292 9344 10963 8845 11938 9325 12550 10125 12019 8887 11090 8130 10648 6193 9008 6717 9936 7700 9428 +2 1 3 1 5 1 0 1 2 4 9 4 11 4 4 9 6 6 8 10 5 3 8 5 3 8 14 5 6 4 9 8 11 9 8 9 6 5 6 1 +3 6 4 3 4 3 2 2 3 5 3 4 10 10 5 5 10 7 8 8 8 7 8 6 8 10 5 8 10 13 10 12 10 9 7 7 9 10 9 6 +7587 9908 7720 9519 7822 9244 7430 8424 6622 8860 6403 7537 6655 7649 6242 8472 6223 8305 5800 9033 7868 10104 9315 10673 8828 11836 9220 12288 10127 12096 8920 11161 8182 10562 6217 8897 6779 9954 7737 9230 +0 2 1 0 1 0 0 0 0 1 0 1 9 8 2 1 10 3 6 4 5 3 4 2 4 5 1 3 6 9 7 8 8 5 4 3 7 6 6 2 +5 8 6 12 10 6 5 2 3 7 4 2 7 5 8 5 9 7 10 7 5 6 6 8 7 9 8 9 11 8 10 10 8 8 9 10 7 11 9 5 +7525 9805 7680 9685 7882 9058 7372 8041 6533 8758 6345 7207 6667 7497 6290 8270 6297 8236 5910 8921 7799 9866 9235 10524 8786 11678 9194 12103 10155 11861 8952 11105 8182 10419 6291 8977 6788 10032 7773 8983 +2 4 3 10 8 2 2 0 0 3 1 0 4 2 6 2 8 3 10 3 2 2 2 4 3 4 4 4 8 3 7 6 5 4 8 7 4 8 6 1 +5 3 3 5 3 5 9 6 4 4 7 6 12 6 10 10 10 7 10 13 12 10 8 7 11 9 11 12 9 7 14 12 8 11 10 10 8 7 8 5 +7464 9401 7564 9411 7761 8821 7418 7927 6472 8478 6365 7143 6807 7415 6388 8388 6395 8171 6018 9184 7911 9888 9208 10322 8847 11530 9245 12114 10131 11579 9086 11175 8182 10469 6389 9052 6823 9860 7783 8751 +2 0 0 1 0 1 7 3 1 1 4 3 13 3 9 7 9 3 10 12 12 6 4 3 9 4 8 8 5 2 14 8 5 7 9 7 6 3 5 1 +10 7 4 5 4 3 3 7 4 5 7 9 8 7 5 7 8 11 6 10 9 8 10 8 11 10 12 11 10 10 12 10 7 5 6 10 10 6 6 5 +7533 9267 7477 9153 7669 8476 7309 7881 6412 8276 6385 7267 6841 7400 6356 8314 6439 8356 6021 9247 7943 9786 9233 10194 8907 11452 9320 12063 10133 11498 9165 11118 8156 10148 6382 9123 6908 9637 7742 8533 +8 3 1 1 1 0 0 4 1 2 4 7 5 4 2 3 6 9 3 7 6 4 7 4 9 5 10 6 6 5 10 6 3 1 3 7 9 2 3 1 +10 9 4 6 7 8 8 4 7 7 6 5 5 5 8 6 6 6 4 8 7 7 9 9 11 11 14 16 14 9 11 8 7 8 10 9 10 9 8 7 +7600 9263 7392 8972 7656 8458 7331 7653 6430 8209 6378 7138 6798 7263 6401 8183 6431 8223 5972 9183 7923 9628 9232 10135 8965 11440 9445 12322 10237 11361 9217 10942 8131 10030 6478 9128 6991 9611 7753 8451 +8 5 1 2 4 5 5 1 4 3 3 2 2 2 6 2 3 2 1 4 4 3 5 5 9 7 14 14 13 4 8 4 4 4 9 6 9 5 5 3 +10 11 5 8 9 13 14 8 15 9 10 11 7 9 10 8 7 7 8 9 13 10 12 12 13 13 13 13 16 10 10 7 8 11 12 9 10 11 11 9 +7665 9383 7335 8925 7694 8749 7505 7685 6652 8269 6474 7385 6807 7380 6496 8183 6449 8159 6027 9184 8057 9664 9308 10264 9073 11552 9541 12381 10390 11293 9242 10715 8132 10104 6623 9133 7072 9710 7840 8496 +8 8 2 4 6 12 17 5 21 6 9 10 4 7 9 5 4 3 6 5 13 7 10 9 12 9 12 9 16 6 7 3 5 8 13 6 9 8 10 6 +6 6 4 4 5 10 11 10 8 8 5 6 8 9 7 6 8 8 10 10 7 7 7 7 8 8 9 7 9 8 9 10 8 12 12 9 12 10 7 8 +7627 9188 7254 8635 7629 8838 7598 7838 6690 8264 6440 7310 6841 7490 6512 8060 6492 8160 6132 9247 8034 9514 9254 10078 9051 11350 9532 12068 10360 11106 9241 10686 8133 10235 6764 9137 7202 9741 7823 8477 +3 2 1 1 2 7 10 8 6 5 2 3 5 7 4 2 6 5 10 7 4 3 3 3 4 3 5 2 5 3 5 6 5 9 13 5 12 6 4 5 +8 11 13 6 4 7 10 4 5 6 5 7 7 6 5 6 7 5 7 9 8 6 11 8 7 8 13 14 13 13 10 11 11 13 12 9 9 9 9 7 +7641 9312 7405 8485 7540 8737 7663 7613 6650 8136 6407 7301 6849 7409 6477 7945 6508 7977 6157 9245 8037 9311 9304 9964 9003 11160 9626 12204 10433 11238 9265 10720 8211 10419 6901 9141 7252 9709 7857 8398 +5 8 14 2 1 3 8 1 2 2 2 4 4 3 2 2 4 2 4 5 5 2 8 4 3 3 11 10 11 10 7 7 9 11 13 5 7 5 6 3 +3 1 8 8 4 10 14 7 5 3 7 5 6 6 5 10 7 5 10 9 4 11 8 12 15 11 16 12 11 17 10 10 11 12 9 9 9 8 6 6 +7526 8814 7424 8467 7454 8827 7829 7586 6611 7832 6425 7170 6831 7333 6443 8082 6524 7805 6258 9243 7938 9428 9276 10103 9161 11166 9794 12209 10453 11608 9289 10691 8287 10531 6958 9145 7301 9617 7814 8262 +0 0 5 5 1 7 16 4 2 0 4 2 3 3 2 8 4 2 10 5 1 8 4 9 16 7 17 8 7 17 7 6 9 9 7 5 7 4 3 2 +3 3 4 7 7 16 8 5 5 3 6 6 6 9 6 10 13 9 9 10 5 4 6 8 10 9 9 10 11 10 8 9 10 8 7 7 8 7 7 6 +7414 8469 7340 8389 7446 9280 7838 7438 6573 7546 6417 7108 6813 7445 6435 8211 6693 7889 6331 9302 7867 9108 9197 9988 9187 11049 9779 12090 10473 11525 9261 10602 8335 10390 6963 9026 7323 9470 7797 8134 +0 0 1 3 4 18 5 2 2 0 3 3 3 7 3 8 16 6 8 7 2 1 2 4 7 5 5 5 7 5 4 5 8 4 4 3 5 3 4 2 +6 4 8 9 11 10 13 8 4 6 5 8 6 7 11 16 12 9 12 12 9 7 8 9 6 6 7 11 8 9 6 7 8 7 9 6 6 6 9 6 +7382 8206 7361 8438 7541 9337 7974 7483 6511 7461 6384 7173 6796 7428 6555 8701 6832 7968 6479 9481 7900 8991 9171 9941 9111 10754 9713 12040 10416 11386 9183 10395 8331 10196 7019 8853 7293 9270 7832 8014 +3 1 5 6 10 7 14 5 1 3 2 5 3 4 11 19 13 6 14 10 6 3 4 5 2 2 3 6 4 4 2 3 5 3 7 2 3 2 6 2 +13 10 10 6 9 4 9 12 5 3 5 8 5 7 10 8 5 8 10 9 11 10 7 10 10 12 8 13 12 13 10 6 6 9 8 8 7 8 9 8 +7530 8328 7432 8300 7582 9022 8005 7771 6476 7197 6352 7234 6754 7412 6646 8670 6789 7981 6572 9465 7983 9065 9120 9958 9139 10846 9675 12116 10462 11501 9209 10139 8276 10137 7048 8813 7289 9205 7866 8024 +14 8 8 2 6 1 6 12 2 0 2 5 2 4 9 4 2 5 9 5 10 7 3 6 7 9 4 9 9 10 7 2 2 5 5 4 4 4 6 5 +5 5 5 3 6 8 11 9 6 8 10 12 7 5 8 6 11 12 9 9 4 6 6 7 5 8 9 9 10 10 14 10 8 9 11 10 11 9 9 7 +7469 8135 7374 7986 7546 8972 8086 7857 6467 7256 6449 7537 6764 7274 6684 8518 6900 8239 6638 9450 7885 8889 9045 9790 9038 10686 9663 11942 10456 11425 9336 10145 8273 10081 7153 8898 7388 9205 7899 7972 +2 2 2 0 3 4 9 6 3 5 9 12 4 2 6 2 11 11 7 5 1 2 2 3 1 4 5 4 6 5 14 6 5 5 10 7 10 5 6 4 +4 4 3 7 6 3 5 3 5 7 5 10 7 5 7 8 10 9 6 9 7 6 11 9 7 8 11 11 11 9 13 11 7 10 12 11 11 7 6 5 +7384 7892 7266 7936 7511 8618 8011 7569 6433 7250 6415 7699 6774 7144 6696 8498 6983 8297 6625 9435 7867 8724 9100 9755 8991 10536 9702 11901 10476 11292 9435 10212 8245 10090 7281 9039 7484 9082 7855 7800 +1 1 0 4 3 0 2 0 2 4 2 8 4 2 4 5 9 6 3 5 4 2 8 5 3 4 8 6 7 4 12 8 3 6 12 9 10 3 3 2 +13 12 6 6 5 4 6 9 5 3 5 4 4 4 6 6 6 9 8 7 7 6 10 13 10 12 10 12 10 12 14 11 10 9 8 10 11 9 8 6 +7531 8155 7237 7828 7451 8346 7964 7667 6400 6999 6382 7482 6707 6961 6682 8356 6962 8352 6664 9298 7849 8569 9128 9968 9022 10641 9715 11924 10470 11351 9557 10275 8294 10037 7303 9111 7578 9090 7863 7700 +14 11 3 3 2 1 2 6 2 0 2 1 1 1 3 2 3 6 6 3 4 2 7 11 7 9 7 8 6 8 13 8 8 5 5 7 10 6 5 3 +6 11 16 11 5 7 5 7 6 4 4 4 5 3 7 7 7 7 9 8 11 9 6 11 6 12 10 9 10 11 12 13 7 9 9 10 10 6 7 7 +7496 8341 7465 8034 7392 8275 7893 7637 6393 6824 6324 7278 6667 6727 6694 8284 6967 8280 6727 9231 7934 8607 9053 10045 8950 10739 9728 11761 10464 11345 9625 10457 8265 9987 7350 9178 7644 8913 7845 7668 +3 9 22 9 2 3 2 4 3 1 1 1 2 0 4 3 4 3 7 4 10 6 2 8 2 9 6 4 6 7 10 11 3 5 7 7 8 2 4 4 +4 3 5 8 7 3 5 4 7 7 4 8 11 7 5 6 4 6 6 10 8 7 8 8 12 10 12 10 7 13 14 14 7 11 10 8 10 9 9 5 +7411 8024 7406 8043 7386 7962 7823 7424 6412 6844 6268 7332 6781 6753 6654 8155 6895 8151 6712 9291 7940 8520 9031 9933 9033 10709 9791 11669 10381 11463 9742 10689 8237 10063 7422 9118 7708 8931 7879 7515 +1 0 2 5 4 0 2 1 4 4 1 5 11 4 2 2 1 2 3 7 5 3 4 4 10 6 10 5 3 10 13 12 3 8 8 4 8 6 6 2 +5 2 4 6 6 6 5 10 10 5 8 12 9 5 6 5 7 9 9 8 8 7 8 7 9 11 10 9 14 12 8 11 10 11 10 7 8 7 10 8 +7353 7665 7323 7929 7354 7852 7755 7592 6507 6740 6316 7629 6841 6655 6641 7972 6901 8214 6774 9225 7946 8438 9010 9767 9037 10742 9802 11521 10479 11512 9703 10723 8287 10135 7492 9001 7720 8825 7937 7555 +2 0 1 2 3 3 2 8 9 2 6 12 7 2 3 2 4 6 7 4 5 3 4 3 6 7 6 4 12 8 4 7 8 8 8 3 5 3 8 5 +9 5 10 6 2 4 5 5 9 6 6 8 12 9 5 8 7 11 7 6 6 5 5 6 10 9 8 7 9 9 9 7 9 12 7 10 9 8 7 7 +7399 7512 7395 7821 7221 7626 7689 7443 6574 6704 6311 7662 6976 6808 6603 7985 6907 8396 6783 9040 7901 8238 8912 9549 9067 10650 9761 11259 10447 11374 9690 10509 8310 10264 7483 9075 7757 8787 7917 7531 +7 2 8 3 0 1 2 2 7 3 3 5 13 7 2 5 4 9 4 2 3 2 1 2 7 5 4 2 5 4 5 3 6 9 4 7 6 4 4 4 +7 11 12 4 2 3 4 5 3 6 11 8 9 9 8 6 8 9 9 6 4 5 6 10 12 8 10 10 10 9 9 9 9 10 9 9 7 11 11 6 +7393 7737 7517 7597 7091 7352 7599 7303 6486 6670 6434 7693 7031 6952 6642 7874 6939 8445 6843 8866 7806 8050 8842 9590 9147 10502 9772 11197 10441 11244 9678 10431 8332 10262 7526 9083 7742 8935 8000 7447 +4 10 12 1 0 0 1 2 0 3 11 5 7 7 6 3 5 6 7 2 1 2 2 7 10 4 6 6 6 4 5 5 6 6 7 6 4 9 10 3 +12 6 11 9 6 4 6 7 6 12 9 4 6 9 7 9 6 9 10 11 7 5 14 12 9 9 9 10 9 9 11 13 9 11 16 9 10 10 9 8 +7515 7641 7610 7694 7067 7156 7562 7294 6477 7007 6503 7477 7008 7087 6655 7954 6919 8491 6927 9009 7790 7874 8979 9751 9148 10424 9758 11139 10410 11122 9717 10603 8354 10322 7747 9090 7804 9013 8030 7491 +12 3 10 6 3 1 3 4 3 13 7 1 3 7 4 6 3 6 9 9 4 2 14 10 5 5 5 6 5 4 8 10 6 8 21 6 8 7 6 5 +13 5 5 13 7 4 3 8 4 11 8 5 10 11 8 9 8 6 9 6 7 7 12 13 12 9 11 10 9 12 11 9 11 7 8 10 9 9 8 8 +7659 7489 7547 8031 7069 6972 7449 7347 6417 7262 6545 7335 7088 7337 6693 8029 6950 8350 6984 8837 7774 7831 9061 9964 9226 10351 9795 11085 10380 11191 9755 10519 8426 10132 7758 9158 7839 9025 8034 7533 +14 2 2 13 4 1 0 5 1 10 6 2 9 10 6 6 5 2 7 2 4 4 10 11 10 5 8 6 5 8 8 5 9 3 5 7 6 6 5 5 +9 8 10 12 12 8 4 8 4 7 6 8 8 6 8 10 7 8 8 5 7 7 12 11 8 10 9 10 11 13 9 15 10 9 8 11 10 7 8 8 +7697 7531 7614 8286 7199 7045 7365 7397 6359 7256 6534 7386 7115 7265 6730 8161 6955 8340 7014 8613 7758 7791 9141 10041 9200 10344 9780 11034 10402 11318 9741 10809 8471 10077 7768 9284 7898 8913 8037 7572 +6 5 8 11 12 5 1 5 1 4 3 5 5 3 6 8 4 5 5 1 4 4 10 8 4 6 5 6 7 10 5 14 7 5 5 8 8 3 5 5 +6 12 15 12 8 7 5 5 3 5 10 6 5 10 5 5 4 6 14 10 9 10 8 6 8 7 8 10 13 14 9 13 11 13 12 12 10 10 9 6 +7658 7816 7807 8526 7223 7052 7308 7260 6276 7127 6626 7311 7065 7443 6689 7978 6883 8208 7196 8710 7794 7937 9117 9807 9174 10153 9740 10986 10474 11499 9727 10959 8540 10271 7880 9464 7956 8992 8066 7486 +3 12 18 11 5 4 2 2 0 2 9 3 2 8 2 2 1 2 17 7 6 8 4 2 4 3 4 6 11 11 5 10 9 11 12 10 8 7 6 3 +9 10 14 10 5 7 8 7 3 5 8 7 7 11 10 7 5 7 6 7 6 7 10 7 10 6 9 15 14 8 9 10 9 13 9 9 7 7 8 7 +7696 7961 7969 8628 7170 7058 7330 7254 6196 7006 6665 7302 7067 7672 6777 7929 6838 8145 7169 8617 7752 7890 9145 9648 9200 9912 9726 11248 10570 11300 9714 10915 8556 10453 7913 9449 7936 8882 8069 7466 +6 8 16 7 2 4 5 4 0 2 6 4 4 10 9 4 2 3 3 3 3 4 7 3 7 2 5 13 12 3 5 6 6 11 6 5 4 3 5 4 +10 13 12 9 8 14 10 3 6 5 8 6 7 8 8 7 8 4 4 8 6 9 10 7 11 9 12 11 15 11 12 8 9 10 10 11 12 7 5 6 +7759 8282 8076 8663 7195 7494 7402 7003 6194 6892 6703 7232 7069 7703 6812 7883 6871 7902 7092 8591 7711 7969 9172 9499 9251 9870 9789 11248 10689 11297 9778 10751 8572 10440 7971 9557 8044 8779 7995 7386 +8 13 11 6 5 17 8 0 3 2 6 3 4 5 6 4 5 1 1 5 3 6 7 3 8 5 10 7 14 7 10 4 6 6 8 8 11 3 2 3 +15 12 11 4 5 7 5 7 8 4 6 7 7 5 8 7 7 8 6 11 8 9 8 7 7 8 7 9 8 14 12 8 8 9 10 12 7 8 10 7 +7948 8522 8155 8388 7143 7474 7345 7012 6243 6724 6689 7228 7071 7548 6846 7840 6878 7919 7068 8751 7723 8043 9147 9359 9199 9769 9723 11126 10626 11479 9840 10597 8562 10366 8027 9720 8022 8743 8051 7372 +18 11 9 1 2 4 2 4 6 1 3 4 4 2 5 4 4 5 3 9 5 6 4 3 3 4 3 4 4 11 9 4 5 5 8 10 4 4 8 4 +14 9 8 3 8 12 6 5 13 9 5 4 5 6 12 12 8 8 7 8 8 8 12 10 6 7 7 13 11 12 7 9 8 9 8 9 9 8 9 6 +8107 8563 8155 8069 7169 7762 7315 6898 6419 6873 6649 7040 7022 7463 6981 8106 6910 7935 7070 8717 7734 8051 9225 9411 9122 9612 9659 11257 10641 11527 9773 10514 8552 10297 8031 9689 8051 8709 8080 7298 +16 6 5 0 5 12 3 2 16 7 2 1 2 3 13 11 5 5 4 4 5 5 10 7 2 3 3 10 7 8 3 5 5 5 5 5 6 4 6 3 +7 6 6 8 9 14 8 5 14 7 5 12 8 4 7 7 8 6 5 5 6 3 8 10 5 6 6 11 10 13 10 11 13 14 11 9 7 9 7 6 +8083 8417 8104 8076 7220 8156 7336 6791 6616 6890 6610 7354 7051 7260 6985 8049 6942 7827 7021 8501 7694 7752 9199 9460 9022 9403 9571 11257 10630 11634 9784 10558 8670 10539 8111 9660 8028 8739 8057 7228 +4 2 2 5 7 15 5 2 18 4 2 12 5 1 4 4 5 3 2 1 3 0 4 7 1 2 2 7 6 9 6 7 13 12 9 5 4 6 4 3 +12 6 4 7 9 9 6 6 10 5 4 7 5 6 9 7 4 8 11 8 9 6 7 11 6 6 6 10 9 12 12 12 13 12 8 9 10 10 10 6 +8187 8280 8003 8021 7269 8219 7306 6752 6706 6783 6547 7342 7002 7193 7040 7996 6870 7848 7126 8482 7731 7655 9148 9568 8950 9207 9485 11195 10594 11673 9846 10661 8785 10643 8113 9633 8083 8829 8111 7162 +11 2 1 4 7 6 3 3 9 2 1 4 2 3 7 4 1 5 10 5 6 3 3 8 2 2 2 6 5 8 9 9 12 9 5 5 8 7 8 3 +8 7 4 2 4 5 11 7 5 6 8 6 6 7 6 6 11 9 8 7 10 7 7 10 10 14 10 6 8 8 8 12 11 10 9 6 6 9 7 6 +8187 8213 7905 7662 7189 8033 7404 6776 6666 6744 6588 7270 6980 7191 7017 7884 6979 7930 7152 8403 7793 7625 9098 9608 8982 9514 9503 10891 10534 11464 9804 10758 8846 10618 8140 9423 8034 8852 8087 7100 +5 3 1 0 1 2 10 4 2 3 6 3 3 4 3 3 11 6 5 3 8 4 3 7 7 14 7 2 4 3 4 9 9 6 6 2 2 6 4 3 +9 12 8 11 7 8 6 7 11 9 7 7 10 13 12 10 8 13 12 11 11 10 10 5 6 16 13 11 11 12 9 7 15 13 11 8 9 8 8 7 +8212 8457 7912 7878 7188 8042 7372 6799 6780 6892 6602 7263 7061 7558 7148 8025 7009 8252 7280 8574 7879 7781 9126 9338 8911 9926 9598 10913 10552 11513 9789 10542 9008 10779 8217 9349 8063 8812 8089 7104 +6 11 5 10 4 5 3 4 11 7 4 4 9 14 13 8 5 13 12 9 10 8 7 1 2 17 11 7 7 8 5 3 16 10 9 4 6 4 5 4 +13 11 13 13 8 9 7 6 6 4 10 7 4 11 11 8 9 9 11 9 15 8 9 17 11 14 7 9 11 9 11 10 10 11 9 12 10 6 10 9 +8339 8625 8046 8204 7213 8112 7366 6759 6764 6724 6692 7257 6986 7780 7250 8035 7064 8309 7379 8612 8065 7805 9128 9822 8969 10190 9537 10811 10569 11375 9825 10523 9038 10808 8241 9525 8117 8651 8142 7230 +13 9 13 13 5 6 4 3 3 1 9 4 1 10 10 5 7 6 10 6 18 5 6 20 9 13 3 5 7 4 8 6 7 7 6 10 8 2 8 7 +6 7 9 5 5 9 7 5 8 4 5 5 9 11 9 11 12 7 6 7 9 6 11 14 8 8 10 12 13 9 8 9 11 13 11 10 9 5 8 6 +8284 8537 8075 8018 7160 8178 7361 6660 6799 6566 6652 7128 7041 7989 7299 8228 7194 8240 7348 8525 8093 7705 9181 10092 8949 10070 9554 10899 10637 11245 9784 10444 9093 10958 8316 9567 8144 8439 8143 7164 +2 3 6 2 2 6 4 2 6 1 2 2 7 10 7 9 12 3 3 3 6 3 8 13 4 4 7 9 10 4 4 5 9 10 9 7 6 1 5 3 +5 11 8 2 4 7 6 10 8 6 8 11 10 10 10 6 8 11 10 6 7 9 9 6 9 8 7 9 10 15 12 13 10 11 11 8 7 9 8 7 +8205 8700 8077 7659 7083 8117 7330 6874 6833 6540 6690 7376 7120 8124 7372 8102 7218 8421 7420 8382 8069 7795 9181 9855 8955 9957 9494 10798 10627 11491 9846 10616 9121 10976 8389 9484 8119 8485 8144 7164 +2 9 5 0 1 4 3 9 5 3 6 10 9 8 8 2 5 9 8 2 4 6 5 2 6 4 3 5 6 13 9 10 7 7 9 4 4 6 5 4 +14 13 6 7 4 6 9 6 5 6 5 8 7 4 8 6 4 6 10 13 6 8 10 6 8 9 10 12 11 11 13 11 13 13 11 7 6 8 9 8 +8358 8976 8028 7629 7008 7998 7377 6830 6790 6516 6650 7424 7121 7882 7392 7984 7140 8284 7490 8677 8020 7818 9207 9632 8935 9912 9512 10887 10642 11477 9932 10654 9225 11116 8460 9345 8069 8467 8170 7225 +15 12 2 4 1 2 7 3 2 3 2 5 4 1 5 2 1 2 8 13 2 5 7 2 4 5 7 9 7 7 11 7 12 10 9 3 2 5 6 5 +7 8 10 6 4 8 11 5 6 4 5 4 6 8 5 6 9 6 4 7 8 11 9 6 8 10 15 13 9 11 12 10 9 12 8 10 9 9 9 9 +8328 8928 8083 7539 6935 8009 7474 6727 6773 6370 6611 7224 7096 7900 7335 7873 7191 8155 7405 8586 8024 8024 9207 9422 8916 9931 9657 11032 10606 11464 9990 10629 9224 11186 8453 9398 8097 8511 8196 7344 +3 4 8 3 1 5 10 2 3 1 2 1 3 5 2 3 7 2 1 3 5 9 5 2 4 6 15 10 5 7 9 6 5 8 5 7 6 6 6 7 +5 4 4 3 7 8 7 6 4 5 7 3 5 4 9 11 12 5 5 6 7 10 10 11 10 10 6 10 10 13 10 9 12 11 12 8 8 10 6 7 +8247 8638 7983 7270 6940 8019 7466 6692 6706 6295 6624 6974 7046 7671 7381 8076 7318 7972 7347 8439 8002 8156 9232 9532 8949 9949 9569 10984 10596 11574 9996 10544 9300 11190 8548 9325 8099 8614 8144 7333 +2 1 1 0 4 5 4 3 1 2 4 0 2 1 7 9 12 2 2 2 4 8 7 8 7 6 2 6 6 9 6 5 10 7 11 4 5 7 2 4 +6 4 6 3 6 7 8 7 13 15 10 4 7 9 10 7 7 7 8 8 6 9 11 9 6 12 11 10 9 10 12 15 14 11 11 11 9 10 8 7 +8194 8365 7937 7018 6920 7967 7484 6720 6870 6838 6714 6801 7049 7763 7452 8021 7314 7923 7368 8424 7955 8219 9282 9513 8878 10089 9611 10939 10561 11493 10053 10832 9425 11194 8615 9441 8126 8711 8145 7323 +2 1 2 0 3 4 5 4 15 21 9 1 4 6 8 4 4 4 5 5 2 6 8 5 2 9 8 6 5 5 9 14 14 7 9 8 6 7 5 4 +8 6 7 7 7 5 10 10 10 9 11 6 9 12 11 6 5 7 6 6 4 5 7 6 9 9 12 11 10 10 12 14 14 10 10 8 9 9 7 6 +8193 8231 7917 7026 6926 7796 7552 6931 6954 6980 6827 6761 7103 8034 7547 7908 7259 7877 7337 8287 7858 8033 9229 9310 8886 10036 9677 10958 10552 11417 10108 11042 9547 11136 8655 9366 8153 8741 8120 7252 +5 2 4 4 4 2 8 9 9 7 11 3 7 11 10 3 2 4 3 2 1 2 3 2 6 5 10 7 6 5 9 12 13 6 7 4 6 6 4 3 +7 12 8 6 7 6 11 7 3 6 9 10 7 8 9 5 7 5 3 8 7 6 12 10 7 12 11 8 9 8 14 12 12 8 10 9 10 10 6 5 +8167 8474 7923 6973 6932 7696 7644 6945 6857 6929 6886 6969 7104 8043 7588 7740 7256 7711 7230 8281 7840 7919 9305 9365 8843 10171 9716 10792 10518 11223 10213 11116 9615 10959 8694 9357 8205 8830 8070 7124 +3 11 5 3 4 3 10 4 0 3 7 9 4 5 6 2 4 2 0 5 4 3 10 7 3 9 8 4 5 3 13 8 10 3 7 5 8 7 2 2 +8 12 8 13 14 5 3 5 8 10 7 6 5 7 5 6 7 5 6 6 4 5 6 6 9 11 10 11 12 13 10 10 10 13 10 10 8 9 7 5 +8167 8702 7929 7353 7116 7541 7529 6835 6890 7127 6893 6919 7054 7990 7526 7644 7253 7555 7202 8152 7746 7751 9226 9171 8852 10236 9729 10820 10562 11348 10213 11063 9630 11100 8732 9409 8204 8853 8047 7003 +5 11 5 14 17 2 0 2 5 9 4 3 2 4 2 3 4 2 3 2 1 2 2 2 6 8 6 7 9 10 6 6 7 10 7 7 5 6 4 2 +3 3 10 8 8 5 8 7 6 8 9 9 10 9 9 5 10 7 6 8 7 9 9 7 10 14 11 9 9 13 14 14 10 10 10 11 7 8 8 5 +8039 8364 7986 7403 7142 7395 7545 6854 6871 7190 6950 7056 7133 8063 7568 7492 7327 7531 7175 8154 7731 7838 9225 9050 8886 10481 9767 10723 10528 11465 10315 11259 9645 11048 8769 9520 8178 8813 8050 6890 +0 0 8 5 5 2 5 4 3 5 7 7 9 6 7 2 8 4 3 5 4 6 5 3 7 12 8 5 5 10 12 11 7 6 7 8 3 4 5 2 +9 15 12 7 5 7 7 7 3 5 4 6 9 13 13 12 9 11 9 8 8 10 7 10 9 11 10 9 9 10 11 11 12 9 8 9 7 8 8 6 +8068 8783 8093 7388 7091 7381 7535 6872 6776 7065 6878 7001 7184 8377 7711 7779 7374 7754 7225 8156 7742 7982 9173 9121 8894 10527 9778 10632 10495 11391 10338 11259 9710 10938 8754 9501 8152 8775 8053 6845 +6 17 11 4 2 4 4 4 0 2 1 3 7 13 14 12 7 10 7 5 5 8 3 7 6 7 6 5 5 6 7 7 10 5 4 5 3 4 5 3 +3 7 7 9 5 3 4 4 5 4 3 4 7 8 7 7 12 8 6 7 10 11 9 8 10 9 12 10 12 10 10 10 9 7 8 9 8 8 9 8 +7943 8686 8069 7497 7041 7122 7449 6705 6734 6886 6782 6826 7183 8365 7697 7742 7496 7780 7198 8096 7804 8178 9174 9065 8927 10448 9840 10608 10539 11321 10335 11197 9697 10711 8739 9483 8152 8740 8082 6925 +0 3 4 7 2 0 1 1 2 1 0 1 4 5 4 4 12 5 3 4 8 9 5 4 7 5 9 6 9 6 6 6 5 3 4 5 5 4 6 5 +15 11 15 7 5 11 9 8 8 8 6 5 4 6 6 5 11 9 11 10 8 11 13 13 9 6 6 9 10 10 11 12 11 9 12 10 10 9 7 7 +8128 8840 8251 7477 6993 7370 7493 6794 6770 6964 6766 6723 7105 8231 7658 7584 7590 7866 7299 8224 7813 8363 9277 9319 8934 10189 9747 10524 10531 11256 10358 11262 9736 10621 8827 9528 8204 8768 8059 6939 +18 9 18 4 2 10 7 6 6 5 3 2 1 2 3 2 10 6 10 8 5 9 12 12 6 2 2 5 6 6 7 8 8 5 10 7 8 6 4 4 +10 14 13 7 8 6 6 5 6 10 12 9 9 5 7 8 7 13 12 11 7 5 6 7 9 12 11 9 9 8 8 10 12 10 12 7 10 10 7 7 +8180 9169 8377 7458 7022 7296 7459 6693 6754 7160 6903 6872 7157 8044 7645 7620 7579 8192 7423 8406 7796 8168 9198 9189 8941 10314 9784 10445 10498 11072 10303 11200 9799 10598 8913 9386 8254 8856 8036 6952 +8 14 13 4 5 3 3 2 3 9 13 7 7 2 4 5 4 13 12 9 4 2 2 3 6 9 8 5 5 3 4 6 10 6 10 3 8 7 4 4 +5 10 12 12 9 8 9 8 3 8 8 8 10 8 10 10 6 7 6 5 6 6 5 7 10 11 11 11 11 11 11 12 10 10 10 9 11 10 9 9 +8103 9233 8474 7747 7076 7349 7502 6782 6662 7221 6935 6951 7233 8052 7709 7777 7543 8130 7391 8208 7754 8046 9096 9067 8973 10370 9820 10494 10517 11083 10326 11265 9810 10576 8946 9375 8329 8939 8065 7087 +2 7 11 12 7 5 7 6 0 5 5 5 8 5 8 8 3 4 3 2 3 2 1 3 7 7 8 7 7 7 8 8 6 6 7 5 9 7 6 7 +7 5 8 19 14 4 6 4 1 6 9 4 5 3 7 7 4 8 5 7 7 8 6 8 9 11 9 8 12 12 12 8 8 10 9 9 8 9 8 5 +8079 8986 8466 8449 7257 7153 7468 6620 6521 7156 6991 6779 7180 7753 7695 7740 7456 8133 7334 8145 7739 8054 9022 9014 8979 10423 9804 10355 10561 11155 10374 11080 9769 10555 8952 9365 8325 8955 8068 6968 +4 1 5 28 17 1 3 1 0 3 7 1 2 0 4 4 1 5 2 3 4 5 2 4 6 7 5 4 9 8 9 3 4 6 6 5 5 6 5 2 +6 5 9 9 9 6 5 6 6 6 8 4 9 7 7 7 8 8 6 8 10 11 11 11 16 10 11 8 11 11 10 12 16 13 8 5 8 9 9 9 +8030 8754 8484 8495 7305 7092 7409 6591 6511 7095 7020 6618 7230 7717 7681 7705 7474 8136 7304 8147 7801 8246 9077 9148 9163 10412 9840 10225 10578 11161 10370 11152 9934 10720 8933 9110 8321 8970 8096 7102 +2 1 6 6 7 3 2 3 3 3 5 1 7 4 4 4 5 5 3 5 8 9 9 8 18 6 8 4 7 7 6 8 17 10 4 1 5 6 6 7 +6 9 6 6 9 10 8 6 4 4 5 4 10 13 11 6 8 8 6 5 9 12 10 8 11 7 7 7 6 9 7 10 13 14 8 10 9 9 6 6 +7982 8781 8425 8353 7352 7280 7428 6564 6450 6915 6972 6466 7305 8052 7770 7611 7491 8139 7275 7965 7836 8488 9106 9090 9215 10217 9773 10041 10467 11044 10290 11097 10018 10936 8914 9177 8343 8984 8047 7044 +2 6 2 2 7 8 5 3 1 1 2 1 8 13 10 3 5 5 3 2 6 11 7 4 8 3 3 3 2 5 3 6 11 12 4 7 6 6 2 3 +4 11 5 2 9 9 6 2 3 5 5 6 6 5 4 7 7 6 8 9 10 11 7 4 4 8 6 8 7 6 6 6 10 7 7 7 8 7 6 4 +7884 8929 8342 7974 7398 7396 7395 6293 6365 6807 6925 6446 7276 7876 7678 7584 7482 8019 7297 8040 7896 8654 9057 8790 9087 10095 9682 9930 10384 10750 10186 10799 10023 10709 8870 9056 8339 8875 7999 6867 +1 9 1 0 7 7 3 0 0 2 2 3 3 2 1 4 4 2 5 6 8 9 3 1 1 4 2 4 3 2 2 2 6 3 3 3 5 3 2 1 +9 19 21 18 12 10 6 7 6 4 5 8 6 6 8 12 9 10 12 10 9 12 12 8 8 9 8 7 7 8 12 11 10 10 8 6 7 9 11 5 +7917 9560 8670 8601 7520 7566 7363 6345 6359 6644 6879 6550 7247 7772 7690 7866 7525 8152 7421 8171 7928 8872 9137 8754 9064 10042 9644 9764 10303 10596 10238 10826 10028 10680 8853 8881 8309 8895 8080 6762 +6 25 34 25 12 8 3 4 3 1 2 6 3 3 5 12 7 8 12 8 6 10 10 4 4 5 4 3 3 4 9 7 6 6 4 2 3 6 9 2 +7 9 8 11 7 8 9 9 9 8 8 8 9 10 7 7 8 7 6 8 8 12 9 11 9 9 6 5 6 10 10 12 10 9 9 8 7 6 8 4 +7898 9539 8658 8760 7511 7603 7409 6517 6430 6736 6911 6648 7296 7920 7676 7824 7541 8092 7389 8172 7934 9076 9138 8904 9067 9992 9556 9485 10199 10574 10238 10913 10033 10592 8862 8839 8280 8729 8082 6602 +4 5 4 9 4 5 7 7 7 6 5 6 7 8 4 4 5 4 3 5 5 10 5 9 6 5 2 1 2 6 6 9 6 5 6 4 3 2 5 1 +4 10 10 9 5 8 5 9 9 9 13 9 8 11 8 5 9 8 8 4 7 11 9 12 12 8 6 11 11 8 6 7 9 13 11 9 7 9 10 6 +7803 9581 8697 8787 7451 7638 7351 6678 6499 6884 7070 6802 7318 8120 7688 7661 7582 8097 7409 7927 7914 9207 9139 9107 9147 9884 9470 9591 10225 10431 10135 10688 10012 10755 8921 8861 8252 8758 8135 6574 +1 7 7 6 2 5 2 7 7 7 15 7 5 9 5 2 6 5 5 1 4 8 5 10 10 4 2 8 8 4 2 3 5 10 9 6 3 6 8 3 +3 6 5 3 4 9 8 7 4 5 7 10 6 8 5 4 5 7 9 4 7 7 7 10 8 9 10 12 9 11 9 9 12 8 11 9 6 7 8 7 +7684 9374 8607 8444 7367 7732 7371 6707 6439 6778 7072 7008 7288 8124 7623 7447 7520 8041 7454 7697 7895 9084 9089 9174 9123 9843 9489 9752 10199 10480 10112 10599 10068 10601 8979 8882 8199 8662 8136 6609 +0 2 1 0 1 6 5 4 1 2 4 9 3 5 2 1 2 4 7 1 4 3 3 7 4 5 7 10 5 7 5 5 9 4 9 6 2 3 5 4 +9 5 9 7 4 4 7 5 3 5 5 5 5 7 4 4 7 8 13 12 8 7 8 9 12 9 8 15 11 9 11 7 10 13 12 9 9 8 8 7 +7722 9118 8622 8367 7285 7513 7365 6611 6354 6678 7023 6894 7233 8066 7534 7245 7511 8050 7600 7972 7902 8969 9066 9176 9202 9805 9456 10088 10225 10404 10140 10393 10072 10763 9061 8902 8224 8633 8137 6642 +6 1 6 3 1 1 4 2 0 2 2 2 2 4 1 1 4 5 14 11 5 3 4 5 10 5 4 15 8 5 8 3 6 10 10 6 6 4 5 4 +12 9 10 8 7 4 3 5 9 5 10 7 6 6 8 4 5 4 5 10 8 6 9 11 8 11 10 8 10 11 11 12 9 11 14 10 7 8 8 8 +7835 9123 8662 8356 7282 7307 7257 6521 6425 6584 7103 6910 7205 7950 7550 7056 7451 7812 7538 8108 7909 8799 9069 9301 9176 9892 9475 9974 10225 10455 10168 10506 10050 10793 9192 8982 8197 8606 8138 6734 +12 6 7 5 4 1 0 2 7 2 9 4 3 2 5 1 2 1 2 8 5 2 6 8 4 8 7 4 6 7 8 9 5 7 14 7 3 4 5 6 +5 13 9 9 5 7 11 12 10 9 4 2 6 7 6 6 8 7 6 7 7 9 7 13 8 11 9 12 6 9 7 11 10 8 11 11 7 8 8 5 +7767 9374 8675 8407 7228 7298 7357 6867 6520 6741 7027 6618 7178 7903 7514 7001 7469 7773 7503 8051 7890 8824 9021 9541 9151 9974 9468 10112 10123 10380 10093 10551 10054 10636 9243 9118 8171 8581 8139 6637 +2 12 6 6 2 4 10 13 9 7 1 0 3 4 3 3 5 4 3 4 4 6 3 12 4 8 5 9 2 5 3 7 6 4 8 8 3 5 5 2 +8 10 10 9 9 8 8 11 5 5 5 9 10 8 6 8 8 6 7 7 8 12 9 8 8 7 6 9 7 6 9 11 9 8 9 9 6 11 9 9 +7777 9425 8714 8455 7277 7351 7377 7130 6485 6643 6979 6773 7254 7920 7479 7072 7487 7675 7494 7998 7897 9031 9025 9460 9127 9805 9385 10058 10049 10125 10071 10593 10033 10489 9242 9123 8120 8741 8165 6791 +5 7 7 6 7 5 5 10 2 2 2 7 8 5 3 5 5 3 4 4 5 10 6 4 4 3 2 5 3 2 5 7 5 4 5 6 2 9 6 7 +5 6 6 3 6 8 4 2 7 10 5 6 12 10 8 6 9 10 12 10 10 12 12 10 11 8 8 11 8 8 9 8 10 7 8 9 6 9 8 8 +7710 9228 8649 8132 7248 7401 7295 6825 6502 6858 6932 6735 7379 8059 7496 7016 7530 7828 7613 8132 7955 9226 9106 9506 9180 9708 9355 10130 10002 10009 10049 10448 10038 10289 9215 9128 8070 8769 8165 6875 +2 2 2 0 3 5 1 0 4 9 2 3 12 8 5 3 7 8 12 8 8 10 10 7 8 4 4 8 4 4 5 4 6 3 4 6 2 6 5 5 +4 3 5 7 5 2 4 7 7 8 9 10 11 7 8 7 9 9 7 8 9 7 11 10 8 11 10 10 10 7 6 6 11 10 8 6 7 8 7 8 +7619 8858 8560 8074 7194 7079 7215 6845 6518 6938 6989 6945 7475 8005 7513 7025 7572 7911 7601 8135 7986 9102 9159 9550 9155 9801 9377 10136 10007 9838 9951 10189 10068 10286 9189 8948 8047 8734 8140 6954 +1 0 1 4 2 0 1 4 4 5 7 9 10 4 5 4 7 6 4 5 6 3 8 7 4 8 7 6 6 3 2 2 8 6 4 2 4 4 3 5 +4 11 11 5 8 10 8 11 6 6 4 7 9 6 6 8 6 5 6 6 5 7 8 8 10 8 13 11 10 10 12 15 11 11 10 9 7 10 7 5 +7531 9002 8627 7896 7218 7268 7239 7110 6508 6890 6916 6958 7518 7893 7478 7095 7536 7743 7564 8015 7914 8985 9134 9468 9182 9704 9475 10203 10012 9862 10009 10499 10097 10344 9215 8964 8025 8824 8115 6843 +1 9 9 2 5 8 5 11 3 3 1 4 7 3 3 5 3 2 3 2 2 3 4 4 7 4 12 8 6 6 9 14 8 7 7 6 4 7 4 2 +4 11 10 9 5 5 6 7 9 5 5 9 8 7 7 9 9 9 7 7 8 5 8 12 10 9 9 9 8 7 11 8 8 14 15 11 9 8 8 6 +7445 9137 8667 7975 7165 7139 7211 7113 6575 6783 6871 7093 7534 7849 7470 7222 7577 7831 7554 7964 7920 8753 9110 9637 9208 9674 9468 10143 9966 9700 10040 10360 10049 10583 9368 9101 8054 8786 8116 6801 +1 8 7 6 2 2 3 4 7 2 2 7 5 4 4 7 7 6 4 4 5 1 4 10 7 5 5 5 4 3 8 4 4 12 16 8 6 4 5 3 +4 7 12 9 12 6 3 9 8 6 10 5 8 5 7 8 12 6 7 8 5 7 7 11 13 12 11 7 10 13 14 14 10 11 9 10 10 11 10 6 +7361 9018 8757 8049 7292 7079 7107 7239 6615 6744 6955 6974 7550 7685 7462 7280 7694 7729 7544 7977 7850 8657 9061 9734 9310 9830 9512 9964 9972 9916 10147 10598 10053 10623 9364 9169 8108 8934 8169 6761 +1 3 11 6 12 3 0 7 6 3 9 2 5 2 4 5 12 3 4 5 2 3 3 8 12 9 8 3 6 11 13 12 6 7 5 7 8 9 8 3 +3 4 7 6 11 11 4 3 5 8 6 5 7 7 5 5 8 6 7 6 9 16 10 9 12 13 9 6 7 13 10 13 10 10 11 8 6 9 9 5 +7253 8722 8717 7934 7391 7330 7031 6988 6577 6830 6934 6862 7540 7653 7403 7150 7706 7633 7534 7867 7884 9120 9090 9702 9384 10038 9504 9734 9902 10119 10149 10760 10057 10600 9411 9110 8058 8950 8195 6662 +0 1 3 2 10 10 1 0 2 6 3 2 4 4 2 2 5 3 4 3 6 18 7 5 10 11 5 2 3 11 6 10 6 6 8 4 2 6 6 2 +9 11 12 8 12 7 5 3 3 3 3 6 8 6 7 7 8 7 8 7 11 9 8 11 7 7 10 12 6 9 11 10 8 10 10 7 6 11 10 8 +7302 8874 8806 7949 7513 7320 6983 6753 6489 6604 6837 6818 7556 7562 7397 7151 7718 7605 7550 7825 7968 9125 9067 9795 9328 9865 9522 9887 9808 10064 10176 10728 10010 10578 9431 8993 8010 9088 8246 6753 +7 9 11 5 12 4 2 0 0 0 0 3 5 3 4 4 5 4 5 4 10 6 4 8 3 3 7 9 2 5 8 6 4 6 7 3 2 9 8 6 +9 5 7 9 8 9 9 8 7 6 6 6 5 6 6 9 10 6 6 6 11 10 7 13 12 8 6 9 8 9 12 17 14 11 9 6 8 9 8 7 +7349 8648 8765 8025 7529 7433 7038 6839 6505 6576 6819 6777 7495 7476 7365 7274 7780 7517 7514 7724 8050 9191 9019 10006 9401 9764 9437 9846 9767 10013 10228 11128 10117 10619 9425 8822 8014 9095 8244 6777 +7 1 3 6 5 7 7 5 4 3 3 3 2 3 3 7 8 3 3 3 9 7 3 11 10 4 2 5 4 5 9 17 13 7 5 2 5 6 5 4 +7 4 6 12 8 7 10 6 12 13 14 7 5 4 5 4 7 10 11 5 11 13 8 10 10 10 9 10 9 9 10 11 10 8 9 8 10 10 12 8 +7344 8374 8699 8280 7545 7417 7117 6797 6649 6980 7006 6800 7435 7273 7308 7083 7764 7680 7607 7567 8130 9438 8998 10020 9421 9792 9431 9869 9753 9965 10228 11136 10120 10473 9419 8784 8069 9163 8344 6861 +4 1 2 11 5 4 9 3 13 15 18 4 2 1 2 1 4 8 10 2 9 12 4 6 7 6 5 6 5 5 6 7 6 4 5 4 8 7 11 5 +13 9 8 9 7 11 9 8 9 8 9 5 7 2 4 5 9 7 6 6 11 8 8 7 8 7 10 12 8 13 7 9 10 10 11 9 8 8 11 9 +7492 8424 8686 8336 7535 7647 7169 6880 6713 7052 7061 6699 7428 6959 7227 6965 7800 7649 7570 7481 8208 9363 8977 9848 9390 9634 9451 10014 9714 10165 10151 11020 10122 10459 9465 8809 8072 9104 8416 7002 +14 6 4 6 4 10 7 5 7 5 7 2 4 0 1 2 6 4 3 3 9 4 4 3 4 3 7 9 4 11 3 5 6 6 8 6 5 4 9 7 +5 10 3 1 4 10 7 8 8 6 6 8 5 4 9 8 8 6 7 8 7 6 7 8 8 9 9 9 11 9 7 9 9 11 9 9 8 8 9 6 +7432 8532 8545 7897 7449 7802 7168 6958 6749 6997 7038 6788 7370 6787 7276 7038 7809 7558 7559 7523 8182 9169 8931 9748 9360 9608 9445 9966 9752 10108 10076 10911 10099 10507 9458 8833 8074 9049 8435 6950 +2 7 0 0 1 8 4 5 6 3 3 6 2 1 7 5 5 3 4 5 3 2 3 4 4 5 5 5 8 5 3 5 5 7 5 6 5 4 6 3 +9 13 8 5 5 5 3 4 8 7 4 4 5 6 7 5 7 9 8 7 7 8 7 13 13 8 10 9 14 10 12 16 8 7 8 13 9 11 11 9 +7476 8818 8536 7730 7390 7641 7065 6786 6785 7007 6964 6626 7313 6748 7273 6922 7792 7657 7574 7501 8156 9110 8886 9961 9458 9523 9464 9921 9866 10115 10131 11239 10051 10306 9426 9101 8102 9181 8505 7085 +7 12 5 2 2 2 0 1 6 4 1 1 2 3 4 2 4 6 5 4 3 4 3 11 12 4 7 5 13 6 9 15 4 3 4 12 6 8 9 7 +6 9 7 13 12 6 3 3 7 12 9 7 6 8 8 6 7 11 13 9 8 10 12 10 17 10 9 10 14 9 8 11 13 10 9 9 10 11 10 7 +7442 8841 8501 8064 7512 7551 6965 6563 6794 7323 7020 6658 7283 6834 7295 6875 7776 7873 7717 7603 8156 9177 8970 9977 9656 9566 9457 9940 9977 10061 10082 11240 10132 10302 9420 9107 8155 9305 8548 7089 +3 6 3 13 12 3 0 0 4 12 7 4 3 5 5 3 4 10 14 6 5 7 10 6 20 7 5 6 13 5 4 7 11 6 5 6 8 8 7 4 +6 8 11 16 8 3 7 6 6 7 7 7 9 6 5 6 6 11 8 7 7 7 9 9 11 9 7 9 10 11 13 9 10 9 10 10 11 7 6 5 +7409 8802 8569 8563 7528 7282 6970 6537 6777 7313 7023 6688 7331 6792 7240 6831 7735 8076 7728 7576 8131 9056 8976 9931 9696 9545 9399 9896 9983 10133 10162 11118 10134 10236 9440 9174 8232 9176 8487 6970 +3 4 9 20 5 0 4 3 3 4 4 4 7 3 2 3 3 9 5 4 4 3 6 5 8 5 3 5 6 8 11 4 6 5 7 7 9 3 2 2 +7 14 11 10 13 13 8 7 6 4 5 8 12 7 14 12 9 16 26 17 13 13 6 9 17 8 10 11 11 12 10 10 7 8 13 13 16 16 13 11 +7402 9134 8636 8663 7672 7643 7000 6574 6761 7119 6975 6778 7454 6814 7417 7158 7771 8574 8199 8165 8260 9311 8905 9888 9888 9463 9419 9978 10014 10262 10163 11065 10059 10113 9536 9422 8435 9608 8607 7227 +4 14 9 7 14 14 5 4 3 1 2 6 12 4 17 13 6 20 54 23 13 12 2 5 19 4 7 8 8 9 6 6 3 4 12 12 20 18 13 10 +13 14 13 7 4 5 4 5 7 10 8 5 3 5 12 11 15 13 12 10 11 10 7 9 13 14 10 10 13 12 10 9 12 11 9 9 9 8 8 9 +7549 9446 8752 8573 7582 7491 6927 6486 6771 7306 7005 6678 7344 6712 7538 7404 7960 8858 8301 8289 8334 9366 8861 9847 9973 9755 9439 9993 10096 10383 10164 10954 10114 10182 9528 9409 8454 9523 8596 7346 +14 14 12 3 1 2 1 2 4 8 5 2 0 2 12 10 18 12 11 8 9 7 3 5 11 13 7 6 11 9 6 5 9 8 5 5 6 4 4 7 +11 9 11 3 2 7 8 4 8 8 3 4 4 8 7 8 5 6 8 13 7 5 7 7 11 13 7 10 11 12 13 8 7 11 7 7 7 8 10 7 +7641 9432 8814 8242 7443 7471 6958 6342 6806 7359 6906 6523 7262 6800 7528 7451 7889 8695 8298 8590 8304 9111 8818 9686 10005 9968 9382 10007 10125 10497 10242 10788 10040 10246 9469 9274 8421 9443 8637 7335 +10 5 9 0 0 4 5 1 6 5 0 1 1 6 4 5 2 2 5 13 3 1 3 3 8 11 3 6 8 9 11 4 3 8 3 3 3 4 7 4 +13 9 9 8 3 4 6 8 6 9 8 2 2 8 12 11 6 11 8 9 8 4 5 5 4 7 10 9 14 11 12 12 12 14 7 7 10 12 9 7 +7782 9419 8824 8239 7333 7268 6937 6452 6789 7470 6938 6254 7131 6883 7646 7679 7845 8849 8295 8627 8301 8810 8725 9412 9857 9800 9403 9959 10230 10543 10293 10877 10096 10491 9411 9147 8466 9613 8651 7324 +14 5 6 5 0 1 3 6 3 7 5 0 0 5 12 10 3 9 5 6 5 1 1 1 1 3 7 5 13 7 9 9 9 12 3 3 7 10 6 4 +19 11 2 9 10 4 7 7 3 6 5 3 3 5 5 5 5 13 9 10 11 9 8 7 6 6 14 14 11 8 10 10 13 11 11 8 8 11 8 7 +8073 9529 8654 8297 7405 7077 6942 6494 6696 7390 6892 6063 7029 6777 7582 7525 7776 9116 8317 8723 8374 8834 8711 9277 9764 9580 9526 10221 10255 10401 10291 10838 10176 10537 9457 9089 8459 9712 8639 7314 +29 8 0 6 8 1 4 4 0 3 2 0 0 2 2 2 2 12 6 7 9 6 4 3 2 2 13 13 8 4 6 6 11 7 8 4 5 8 4 4 +19 11 4 8 5 7 7 9 9 7 6 5 9 5 8 8 5 5 6 6 7 9 5 11 11 7 6 9 9 10 11 8 12 12 9 9 6 9 7 8 +8357 9633 8540 8290 7347 7082 6947 6657 6758 7376 6873 6006 7083 6677 7597 7565 7709 8876 8262 8568 8343 8856 8621 9396 9801 9435 9441 10160 10229 10391 10315 10679 10228 10642 9450 9096 8401 9682 8602 7366 +28 8 1 5 2 4 4 7 7 4 3 2 7 2 5 5 2 1 2 2 3 6 1 8 8 3 2 5 5 6 8 4 9 9 5 6 2 5 3 5 +17 17 10 7 7 9 7 10 9 6 5 14 11 7 7 7 8 9 5 4 5 12 7 13 14 10 9 7 5 12 12 6 9 8 7 9 10 9 8 9 +8582 10099 8582 8222 7342 7210 6952 6871 6819 7302 6829 6505 7187 6706 7586 7541 7721 8896 8183 8299 8262 9061 8584 9630 9914 9483 9435 9980 10101 10504 10364 10406 10202 10495 9393 9103 8446 9654 8591 7476 +22 19 7 3 4 7 4 9 7 3 2 19 10 4 4 4 5 6 2 1 2 10 3 11 13 7 5 3 1 9 9 2 5 4 3 6 7 5 5 7 +8 7 8 6 6 9 13 14 6 13 20 19 20 16 11 12 12 13 12 10 10 14 9 7 10 13 12 7 8 12 13 10 15 21 18 19 26 23 14 13 +8572 9923 8572 8097 7312 7330 7110 7318 6802 7662 7169 7282 7518 7286 7677 7825 7835 9160 8285 8415 8311 9377 8599 9482 9922 9712 9506 9811 10053 10611 10437 10396 10330 11155 9618 9724 8899 10487 8734 7826 +5 3 5 2 3 7 15 17 3 14 36 32 34 22 10 12 12 12 11 7 8 14 6 3 6 11 10 3 4 9 11 6 14 27 22 25 51 34 15 14 +7 5 12 7 4 12 15 10 9 8 13 9 7 10 14 11 7 9 7 5 13 14 10 7 8 9 9 7 7 11 6 8 9 12 12 10 15 12 8 11 +8536 9634 8664 8041 7231 7627 7315 7493 6862 7693 7322 7398 7509 7463 7843 8031 7818 9163 8257 8217 8435 9674 8639 9343 9878 9682 9498 9652 9980 10650 10329 10263 10302 11222 9684 9754 9060 10595 8720 8032 +3 1 11 4 1 12 20 8 7 5 15 7 4 8 16 9 4 5 3 2 13 13 7 3 4 5 5 3 3 7 2 4 5 8 10 6 16 9 4 9 +10 7 8 6 5 4 5 6 7 6 5 5 6 10 12 7 9 8 13 5 11 11 8 7 9 12 14 10 8 6 5 7 10 11 14 9 8 11 6 6 +8578 9486 8652 7927 7178 7415 7260 7412 6869 7600 7267 7261 7474 7629 7953 7979 7852 9104 8383 8031 8505 9769 8627 9212 9861 9838 9618 9687 9935 10379 10198 10077 10300 11224 9800 9721 9038 10635 8655 7918 +7 3 4 3 2 1 2 3 4 3 2 2 3 8 11 4 6 4 13 2 9 8 4 3 5 9 13 7 4 2 1 3 6 7 13 5 4 7 2 3 +11 6 8 8 12 11 4 7 5 5 7 12 7 6 4 7 8 10 5 4 8 12 7 6 8 8 13 12 13 11 11 12 9 9 8 9 8 11 7 6 +8645 9285 8640 7942 7305 7645 7181 7397 6825 7451 7264 7562 7466 7539 7856 7930 7860 9172 8301 7794 8497 9920 8590 9027 9819 9739 9710 9843 10019 10432 10224 10209 10272 11103 9759 9690 9016 10672 8617 7811 +9 2 4 5 12 10 1 4 2 2 4 12 4 3 1 4 5 7 2 1 5 9 3 2 4 4 11 9 11 7 8 9 5 4 4 5 4 7 3 3 +12 10 5 7 14 7 5 6 5 5 5 13 6 6 6 8 10 12 8 8 7 7 8 5 8 12 10 15 14 9 12 12 10 8 11 11 7 4 7 7 +8735 9342 8552 7895 7480 7616 7129 7321 6782 7311 7210 7906 7432 7455 7813 7945 7919 9358 8298 7817 8463 9754 8580 8792 9778 9891 9723 10174 10126 10359 10275 10333 10271 10928 9796 9784 8969 10277 8580 7772 +11 7 1 4 17 4 2 3 2 2 2 14 3 3 3 5 8 10 5 5 3 3 5 1 4 9 7 15 13 5 9 9 6 4 8 8 3 1 3 4 +1 5 7 7 5 5 6 5 10 9 6 4 6 13 7 10 8 5 7 12 7 7 6 9 10 7 5 9 14 13 8 11 8 7 10 8 9 8 10 9 +8542 9088 8517 7851 7421 7466 7104 7188 6868 7425 7183 7677 7399 7806 7796 8082 7925 9103 8269 8085 8430 9598 8519 8817 9789 9727 9608 10116 10231 10536 10223 10388 10219 10702 9807 9688 8975 10151 8621 7858 +0 1 3 4 2 2 3 2 9 7 3 1 3 14 4 8 5 1 3 11 3 3 2 6 6 3 1 5 13 10 4 7 4 3 6 4 6 4 7 6 +3 6 7 7 5 4 8 7 10 7 5 5 5 7 3 9 6 6 8 11 7 8 6 10 11 10 9 10 12 10 11 10 10 7 8 9 8 8 7 7 +8405 8911 8483 7810 7363 7263 7131 7186 6952 7409 7131 7523 7342 7767 7678 8150 7880 8925 8267 8275 8398 9513 8459 8902 9825 9757 9598 10123 10282 10518 10248 10379 10219 10489 9766 9659 8955 10033 8584 7816 +0 2 3 4 2 1 5 4 9 4 2 2 2 4 0 6 3 2 5 9 3 4 2 7 8 6 5 6 9 6 8 6 6 3 4 5 4 4 3 4 +6 6 4 10 5 4 5 5 11 9 6 8 16 15 11 12 9 12 12 12 10 7 6 12 12 11 12 11 13 9 9 10 6 8 9 9 7 7 7 6 +8348 8744 8373 7955 7307 7072 7080 7062 7059 7517 7106 7563 7567 8222 7767 8398 7913 9126 8367 8515 8443 9372 8401 9105 9886 9847 9665 10191 10357 10439 10222 10370 10117 10351 9752 9632 8910 9861 8548 7715 +2 2 1 8 2 1 2 2 11 7 3 5 22 18 10 11 6 10 11 11 7 3 2 10 9 8 10 8 11 5 5 6 2 4 5 5 3 3 3 3 +1 3 7 8 7 8 3 4 4 5 4 5 12 9 14 8 5 10 9 7 6 8 6 7 11 10 10 7 8 6 7 13 10 8 8 9 7 10 10 7 +8165 8403 8342 7969 7303 7139 6979 6884 6985 7373 7030 7416 7684 8281 7930 8385 7843 9192 8388 8434 8385 9301 8344 8988 9920 9870 9679 10009 10302 10181 10145 10546 10120 10221 9713 9607 8866 9883 8590 7682 +0 0 3 5 4 5 0 1 1 2 1 2 12 6 16 5 2 7 6 3 2 4 2 3 8 6 7 3 4 2 3 10 6 4 4 5 3 6 7 4 +4 6 4 3 5 5 6 6 9 9 7 6 4 5 5 5 7 6 14 10 7 10 9 11 8 16 17 10 8 8 11 12 13 9 9 9 8 8 8 5 +8063 8267 8236 7675 7248 7017 6958 6839 7040 7483 7033 7339 7594 8091 7859 8189 7826 9009 8536 8542 8354 9357 8365 9124 9876 10260 9871 10022 10249 10061 10172 10650 10199 10160 9700 9583 8849 9781 8580 7528 +1 2 1 0 2 2 3 3 7 7 4 3 1 2 2 2 4 2 15 7 3 7 6 8 4 17 19 6 4 4 8 9 11 5 5 5 4 4 5 2 +13 9 8 5 8 4 5 13 10 8 10 5 3 5 6 6 9 7 10 12 9 8 6 9 10 9 9 10 6 5 7 10 8 10 9 8 9 8 8 6 +8194 8323 8234 7521 7271 6841 6912 7227 7119 7525 7113 7205 7481 7912 7816 8066 7860 8898 8578 8766 8375 9287 8309 9129 9885 10197 9854 10035 10146 9764 10097 10625 10148 10164 9687 9499 8858 9685 8570 7444 +13 6 5 2 5 1 2 15 9 5 9 2 0 2 3 2 6 3 7 11 6 4 2 6 6 5 5 6 2 1 3 6 4 6 5 4 6 4 5 3 +12 16 15 14 8 7 10 6 6 8 7 4 6 5 7 10 6 6 7 11 8 11 12 7 10 6 7 7 9 10 15 11 13 11 10 7 9 8 11 8 +8296 8806 8411 7929 7293 6860 6995 7162 7094 7565 7114 7018 7447 7744 7799 8196 7817 8732 8542 8915 8370 9405 8408 9011 9893 9953 9786 9862 10122 9792 10228 10663 10226 10229 9700 9359 8866 9595 8637 7488 +11 19 17 16 5 4 9 3 3 5 4 1 3 2 4 8 3 2 3 9 5 8 11 3 6 2 3 3 5 6 15 7 11 8 7 3 6 4 9 5 +17 23 16 6 11 11 6 5 7 11 8 6 8 6 5 9 6 9 11 9 9 7 9 11 7 7 7 8 14 7 10 10 8 8 8 10 9 6 9 5 +8523 9690 8610 7821 7392 7124 6973 7039 7095 7786 7140 6965 7465 7648 7732 8257 7775 8761 8609 8933 8391 9270 8428 9146 9824 9785 9720 9761 10227 9634 10228 10637 10175 10106 9662 9411 8874 9387 8651 7345 +22 37 19 3 10 10 3 2 4 10 5 3 5 3 2 6 3 6 9 6 6 3 6 8 3 3 3 4 13 3 6 6 4 4 4 7 6 2 6 2 +14 8 11 8 4 6 5 13 11 7 5 7 9 6 4 5 6 9 11 8 7 8 8 8 9 8 10 11 9 12 9 9 7 10 7 7 11 9 8 8 +8668 9600 8676 7843 7309 7065 6926 7415 7199 7748 7089 6977 7508 7557 7641 8068 7734 8788 8675 8888 8360 9205 8422 9088 9808 9689 9732 9851 10201 9793 10202 10551 10099 10114 9599 9276 8933 9376 8639 7395 +15 4 9 5 1 3 2 14 10 4 2 4 7 3 1 2 3 6 9 4 3 4 5 4 5 4 6 8 5 10 5 5 3 6 3 3 9 5 4 5 +4 6 4 5 5 3 6 12 9 6 6 8 7 6 7 9 11 11 7 9 9 6 7 7 10 12 13 8 12 12 8 9 11 11 8 8 11 11 7 7 +8553 9392 8561 7679 7254 6825 6906 7707 7249 7651 7065 7049 7499 7472 7629 8136 7822 8936 8637 8907 8381 9021 8390 8972 9818 9844 9821 9751 10253 9942 10151 10470 10128 10182 9563 9210 8991 9489 8602 7381 +1 2 1 2 2 0 3 12 7 3 3 5 4 3 4 6 10 9 3 6 6 2 3 3 6 9 11 4 9 9 4 5 8 8 4 4 9 8 3 4 +6 10 5 4 3 5 10 7 6 6 4 6 12 6 4 7 6 8 7 9 5 7 9 11 8 14 12 9 19 10 7 10 10 12 11 12 11 8 8 7 +8492 9442 8475 7464 7149 6722 6989 7674 7221 7560 6990 6994 7618 7392 7540 8077 7780 8891 8600 8925 8299 8909 8410 9109 9777 10113 9882 9718 10482 9959 10076 10456 10130 10308 9605 9394 9047 9411 8591 7368 +2 7 1 1 0 2 9 4 3 3 1 3 12 3 1 4 3 4 3 6 2 3 6 8 4 13 9 5 23 6 3 6 6 9 8 10 9 4 5 4 +6 8 9 10 4 8 12 7 4 6 9 9 7 7 5 7 6 14 7 5 4 6 10 10 10 15 10 8 13 10 10 9 13 9 6 9 11 8 7 5 +8433 9367 8493 7630 7072 6810 7121 7643 7142 7475 7045 7127 7606 7378 7479 8022 7739 9217 8564 8696 8194 8743 8455 9176 9788 10427 9890 9626 10552 9975 10080 10381 10209 10242 9518 9383 9102 9337 8555 7233 +2 4 6 8 1 6 13 4 1 3 7 7 4 4 2 4 3 14 3 1 1 2 7 7 6 14 6 4 10 6 6 5 11 5 2 5 8 4 3 2 +9 9 7 11 10 15 13 8 10 11 9 5 5 4 2 5 6 6 5 5 5 5 5 9 9 9 11 9 8 8 10 10 12 12 9 8 10 10 7 8 +8452 9357 8459 7848 7151 7322 7275 7675 7219 7702 7099 7006 7543 7181 7343 7847 7699 9032 8478 8481 8117 8525 8371 9178 9773 10354 9924 9601 10493 9868 10083 10372 10260 10364 9510 9311 9130 9391 8520 7290 +6 5 3 10 9 19 15 5 8 10 7 2 2 1 0 2 3 2 1 1 2 1 1 5 5 5 8 5 4 4 6 6 9 9 5 4 7 7 3 5 +14 14 11 10 9 12 13 10 8 9 8 8 9 6 10 10 10 7 6 7 10 6 10 8 5 6 8 10 9 16 12 14 9 12 8 9 8 11 7 6 +8598 9655 8529 7991 7202 7619 7425 7828 7243 7792 7126 7077 7584 7118 7415 7990 7762 8920 8419 8402 8169 8382 8417 9118 9656 10101 9880 9639 10461 10258 10138 10609 10233 10479 9477 9305 9106 9503 8486 7221 +15 13 9 8 7 12 14 8 5 6 5 5 6 3 8 8 8 3 2 3 8 2 7 4 1 2 4 7 5 17 9 12 5 9 4 5 4 8 3 3 +9 13 8 10 13 11 16 8 8 7 2 5 8 6 15 11 4 5 7 8 4 7 16 9 10 9 10 8 10 10 15 14 10 10 9 10 8 11 8 6 +8613 9874 8520 8125 7354 7837 7648 7849 7266 7754 6999 6959 7599 7059 7613 8186 7670 8692 8387 8389 8067 8309 8615 9123 9670 10047 9888 9552 10455 10256 10268 10832 10233 10464 9470 9361 9083 9608 8478 7156 +6 11 5 8 14 10 21 5 5 4 0 2 5 3 19 9 1 1 3 5 1 3 19 6 7 5 6 4 6 6 14 12 6 6 5 7 4 8 5 3 +4 8 10 10 6 11 17 10 7 9 6 5 5 10 14 4 7 6 6 9 9 5 7 8 10 11 5 6 7 9 13 14 12 12 11 9 11 10 9 6 +8500 9773 8562 8251 7323 8042 7891 7992 7263 7841 6977 6848 7537 7249 7780 7940 7657 8539 8331 8438 8095 8117 8578 9067 9684 10120 9768 9347 10372 10193 10343 11042 10284 10573 9514 9352 9137 9645 8496 7095 +1 4 7 8 3 9 24 8 4 6 3 2 2 8 16 1 4 2 2 6 6 2 3 4 7 8 1 2 3 5 11 12 9 9 8 5 8 7 6 3 +8 10 4 8 5 6 4 2 2 7 8 4 7 7 8 6 4 5 7 5 8 10 8 10 9 9 10 9 9 11 9 7 12 11 11 8 8 9 7 7 +8492 9801 8450 8247 7268 7928 7796 7635 7132 7800 7007 6682 7527 7244 7790 7832 7568 8333 8301 8238 8097 8244 8568 9137 9672 10065 9779 9339 10343 10257 10314 10809 10334 10614 9557 9282 9113 9619 8462 7099 +5 6 1 5 2 2 1 0 0 4 5 1 4 4 5 3 1 2 3 2 5 8 5 7 5 5 6 5 5 8 5 3 9 7 8 4 4 5 3 4 +8 11 7 4 6 7 4 6 7 4 7 4 9 7 4 4 4 6 10 9 7 14 8 10 12 9 7 6 8 8 10 13 13 10 10 9 8 8 8 7 +8484 9888 8417 7997 7239 7882 7703 7545 7132 7577 7011 6526 7569 7239 7697 7607 7481 8201 8349 8296 8073 8609 8558 9203 9737 10014 9713 9147 10289 10133 10312 10959 10408 10591 9574 9278 9090 9533 8455 7103 +5 8 3 1 3 4 1 3 4 1 4 1 7 4 1 1 1 2 7 6 4 15 5 7 10 5 3 2 4 4 6 10 11 6 7 5 4 4 5 4 +6 5 5 8 6 3 6 7 7 6 9 10 8 12 10 7 6 19 18 10 6 8 8 11 9 5 9 10 10 10 11 12 8 6 8 12 11 9 7 9 +8425 9601 8334 8008 7211 7593 7664 7522 7132 7491 7066 6748 7584 7541 7760 7580 7447 8876 8600 8412 8024 8583 8548 9326 9723 9720 9700 9212 10287 10139 10335 11038 10352 10324 9539 9458 9144 9513 8422 7229 +2 1 2 5 3 0 3 4 4 3 7 9 5 12 8 4 3 27 25 7 2 5 5 8 5 1 5 7 6 6 8 8 4 2 4 10 8 5 3 7 +11 5 8 11 10 9 6 6 6 16 14 8 7 7 8 5 6 10 6 8 6 8 6 11 8 9 8 9 12 10 9 12 9 9 9 8 12 11 7 6 +8495 9332 8330 8203 7286 7690 7626 7439 7107 8024 7247 6834 7573 7518 7770 7432 7414 8957 8538 8398 7977 8559 8487 9442 9684 9689 9662 9212 10336 10145 10307 11112 10323 10257 9530 9382 9222 9618 8390 7163 +9 1 5 9 8 6 3 3 3 21 17 5 4 4 5 2 3 7 2 5 2 5 2 8 4 5 4 5 9 6 5 8 5 5 5 4 10 8 3 3 +7 8 15 19 12 10 13 12 11 6 7 4 4 5 7 7 7 9 7 10 6 7 7 6 10 9 11 11 8 10 15 9 10 10 13 8 10 10 7 4 +8461 9263 8505 8878 7410 7842 7767 7729 7210 7911 7245 6669 7486 7374 7754 7416 7407 8972 8503 8508 7931 8475 8454 9244 9697 9660 9701 9335 10282 10150 10433 10998 10320 10255 9624 9310 9247 9655 8359 6978 +3 4 17 27 12 8 14 12 10 3 4 1 1 2 4 4 4 6 3 7 2 3 3 2 7 5 8 8 4 6 14 5 6 6 11 4 7 7 3 1 +6 7 7 13 16 12 9 11 6 5 4 4 4 8 9 5 8 4 5 6 7 11 8 10 7 8 11 11 10 12 11 9 11 10 9 9 12 12 8 10 +8403 9137 8471 9144 7634 8108 7803 7941 7183 7743 7166 6514 7401 7423 7790 7278 7426 8679 8418 8366 7911 8642 8447 9303 9633 9571 9740 9450 10280 10278 10453 10891 10343 10254 9613 9304 9322 9812 8354 7173 +2 3 3 12 22 11 6 10 3 2 1 1 1 5 6 2 5 1 1 2 4 9 5 7 3 4 8 8 6 9 7 5 7 6 5 5 10 10 5 9 +7 7 6 8 8 9 7 10 8 8 7 5 7 11 9 10 6 6 9 7 8 6 6 9 8 7 6 6 7 14 9 13 8 12 16 13 12 12 10 9 +8372 9018 8412 9086 7647 8174 7787 8078 7208 7769 7166 6430 7395 7653 7825 7455 7393 8526 8437 8294 7918 8492 8389 9297 9597 9426 9650 9251 10202 10521 10422 11036 10289 10376 9782 9544 9396 9960 8401 7295 +3 3 2 4 5 6 4 8 5 5 4 2 4 10 6 8 3 2 6 3 5 2 2 5 4 3 2 2 3 12 5 10 4 9 17 12 10 9 7 7 +3 3 9 16 11 9 7 8 7 7 5 6 5 8 6 11 9 9 11 7 9 8 11 10 9 8 5 7 8 7 12 11 10 11 13 12 9 12 8 6 +8239 8661 8432 9523 7737 8236 7771 8084 7207 7732 7114 6412 7338 7685 7783 7683 7438 8567 8507 8226 7950 8474 8460 9353 9587 9351 9536 9126 10151 10319 10468 11049 10287 10429 9870 9708 9391 10099 8395 7225 +0 0 6 18 10 6 4 5 4 4 2 3 2 5 3 10 7 6 9 3 6 5 9 7 5 4 1 3 4 3 9 7 6 7 11 10 5 9 5 3 +9 12 6 14 13 10 7 13 9 7 3 9 11 8 4 11 13 10 6 6 8 8 6 11 7 8 9 6 10 10 11 6 9 11 10 11 7 12 9 7 +8263 8878 8374 9811 7876 8356 7755 8397 7257 7698 7013 6580 7436 7715 7690 7897 7584 8667 8448 8101 7956 8457 8402 9467 9526 9281 9528 8947 10153 10314 10487 10754 10260 10479 9879 9801 9335 10230 8415 7221 +6 10 2 13 14 7 4 13 7 4 0 7 10 5 1 10 14 7 2 2 5 5 2 8 3 4 5 2 6 6 7 2 5 7 6 8 3 9 6 4 +9 5 10 15 8 7 11 15 6 4 7 9 6 6 6 13 15 7 6 6 7 8 11 12 6 9 11 7 11 12 13 7 11 12 12 9 9 8 7 8 +8286 8652 8420 10143 7883 8284 7842 8814 7229 7481 7016 6738 7403 7620 7651 8221 7778 8577 8390 7983 7936 8441 8473 9636 9441 9277 9571 8840 10180 10432 10557 10538 10285 10587 9939 9765 9332 10107 8383 7279 +6 1 7 15 5 3 10 17 3 1 4 7 3 3 3 13 19 3 2 2 4 5 9 10 2 5 8 3 8 9 10 3 8 9 9 5 5 4 3 5 +10 5 14 15 8 6 11 9 6 7 13 11 7 8 6 7 5 6 3 7 10 8 10 12 5 7 8 10 11 9 10 9 11 10 14 13 12 8 7 6 +8334 8440 8567 10456 7890 8155 7927 8838 7201 7462 7173 7009 7397 7654 7613 8157 7711 8431 8257 7934 7993 8426 8517 9795 9333 9150 9536 8923 10207 10359 10549 10458 10309 10566 10048 9977 9405 9992 8352 7210 +8 1 15 14 5 2 10 6 3 4 15 11 4 5 3 3 2 2 0 4 8 5 7 10 1 3 4 7 8 5 6 5 8 6 13 11 10 4 3 3 +12 11 14 8 9 7 9 8 7 6 4 5 6 11 9 4 4 7 6 7 8 10 10 8 6 8 8 14 11 11 10 9 9 10 12 11 8 10 10 10 +8432 8609 8711 10320 7923 8095 7959 8799 7200 7382 7096 6895 7365 7870 7653 7913 7620 8355 8204 7888 7997 8534 8560 9698 9253 9092 9502 9247 10233 10413 10541 10383 10281 10546 10103 10054 9374 10006 8399 7391 +11 9 15 4 6 4 6 4 4 3 1 2 3 10 6 1 1 3 2 4 5 7 7 4 2 4 4 14 8 7 6 5 5 6 9 8 4 6 7 8 +6 3 6 7 8 9 6 5 4 8 10 7 8 11 8 6 8 7 7 7 8 9 7 6 8 12 9 13 11 9 11 11 9 8 10 8 8 6 9 9 +8374 8276 8646 10130 7929 8162 7913 8578 7122 7430 7174 6911 7385 8073 7666 7806 7634 8283 8178 7844 8001 8574 8525 9484 9226 9283 9494 9490 10258 10341 10559 10435 10254 10404 10106 9942 9344 9774 8419 7500 +2 0 2 3 5 6 3 1 1 5 9 4 5 9 5 3 5 3 3 4 5 6 3 2 4 10 5 12 8 5 7 7 5 4 6 4 4 2 6 7 +4 7 11 17 10 7 5 7 7 5 7 6 15 9 5 6 6 7 6 6 7 7 6 8 9 6 8 10 12 10 9 10 10 10 10 8 8 8 8 6 +8267 8209 8711 10566 7986 8102 7843 8493 7123 7291 7173 6864 7584 8141 7602 7706 7596 8216 8127 7742 7980 8489 8465 9406 9225 9094 9461 9534 10308 10334 10525 10423 10253 10394 10109 9837 9315 9679 8413 7418 +1 3 9 18 8 4 2 3 4 2 4 3 19 6 2 3 3 3 2 3 4 3 2 4 5 2 4 7 9 6 5 6 6 6 6 4 4 4 5 3 +8 5 6 7 7 3 3 4 8 3 7 9 14 11 5 5 10 9 10 11 8 6 8 6 6 7 10 10 9 10 11 8 6 9 11 11 10 10 11 7 +8265 8023 8646 10362 7965 7800 7723 8229 7149 7037 7172 7005 7752 8328 7540 7550 7662 8275 8179 7953 7985 8348 8458 9210 9148 8978 9480 9576 10280 10328 10543 10289 10150 10323 10137 9922 9338 9712 8484 7403 +5 2 2 3 4 0 0 1 5 0 4 7 16 9 2 2 8 6 8 10 5 2 5 2 2 3 7 7 5 6 7 4 2 5 8 8 7 7 9 4 +17 14 11 11 7 5 7 5 6 4 4 4 8 10 10 14 10 8 4 9 8 5 12 7 11 7 7 10 11 9 12 11 9 8 7 7 11 8 9 6 +8493 8401 8711 10416 7945 7639 7709 8042 7123 6860 7095 6830 7762 8442 7607 7957 7726 8270 8077 8028 7990 8154 8553 9087 9200 8869 9422 9615 10304 10261 10586 10347 10126 10195 10062 9756 9386 9620 8502 7327 +22 15 9 7 4 2 4 2 3 1 1 1 5 7 8 16 8 5 1 6 5 2 11 3 8 3 3 7 8 5 9 7 5 4 3 3 8 4 6 3 +25 19 6 12 7 5 5 6 7 4 9 7 4 7 11 6 5 5 4 8 8 7 8 8 5 8 6 5 8 9 10 14 10 8 9 9 11 9 9 5 +8920 9064 8646 10528 7925 7487 7644 7928 7124 6694 7147 6850 7670 8365 7698 7848 7660 8081 7977 8037 7995 8094 8543 9033 9098 8828 9340 9345 10251 10198 10577 10586 10128 10074 10040 9723 9432 9595 8519 7194 +47 26 2 9 4 2 2 2 4 1 7 4 1 3 10 3 2 2 1 5 5 4 5 4 1 4 2 1 4 5 6 12 6 4 5 5 8 5 6 2 +9 17 16 8 6 11 13 10 9 4 4 8 7 5 5 3 4 5 4 5 5 10 8 5 8 6 6 5 8 11 9 14 13 9 10 12 9 8 6 4 +8927 9564 8839 10387 7880 7713 7785 8066 7176 6538 7070 6930 7657 8170 7633 7561 7571 7903 7880 7861 7923 8222 8534 8798 9075 8666 9260 9091 10199 10261 10543 10810 10207 10022 10044 9876 9426 9510 8459 7008 +6 20 19 4 3 10 14 8 7 1 1 5 4 2 2 0 1 2 1 2 2 8 5 1 4 2 2 1 4 8 5 12 11 5 6 9 5 4 2 1 +6 7 5 3 9 15 10 6 5 5 6 5 6 6 8 9 3 7 6 5 12 15 11 8 7 8 9 10 10 11 9 10 11 8 9 7 11 11 9 6 +8857 9420 8746 9948 7913 8171 7846 7950 7124 6452 7046 6821 7619 8048 7646 7660 7458 7858 7836 7696 8031 8650 8602 8761 9027 8637 9258 9159 10200 10321 10509 10775 10233 9912 10023 9713 9471 9615 8477 6956 +2 3 1 0 6 18 8 2 2 2 3 2 3 2 5 6 0 4 3 2 11 17 9 4 3 4 5 7 6 8 5 6 8 4 5 3 8 8 6 3 +8 9 14 9 8 4 3 4 5 5 5 5 10 8 8 6 5 7 6 6 6 8 4 8 11 6 10 11 11 12 10 13 11 10 10 9 9 10 7 5 +8840 9407 8885 9904 7919 7926 7726 7718 7073 6372 6997 6718 7684 8056 7659 7569 7399 7816 7793 7602 7983 8622 8489 8726 9082 8487 9282 9285 10226 10439 10502 10927 10258 9931 10028 9683 9464 9652 8444 6845 +4 5 14 5 5 1 0 1 2 2 2 2 8 5 5 3 2 4 3 3 2 4 1 4 9 2 7 8 8 9 6 10 8 6 6 5 5 7 3 2 +7 14 15 17 20 9 8 5 3 5 9 6 9 6 8 8 5 8 7 7 9 11 9 9 13 10 12 8 9 9 10 16 14 12 7 8 12 7 7 7 +8798 9702 9046 10354 8232 8003 7737 7562 6973 6296 7052 6683 7722 7941 7672 7606 7342 7838 7777 7575 8013 8780 8507 8755 9187 8592 9357 9219 10200 10365 10495 11254 10359 10072 9956 9593 9534 9502 8412 6864 +3 13 16 19 32 6 5 2 0 2 7 3 6 2 5 5 2 5 4 4 6 9 6 6 12 7 10 4 5 5 6 15 12 9 3 4 10 3 3 4 +2 3 8 11 11 10 11 7 9 5 5 11 10 8 7 5 10 14 7 8 10 9 10 8 6 7 8 12 8 15 13 12 15 15 7 10 13 10 6 6 +8629 9304 9024 10408 8307 8137 7825 7538 7028 6225 7003 6957 7784 7956 7659 7456 7414 8227 7761 7612 8068 8806 8550 8721 9111 8506 9327 9403 10149 10664 10565 11316 10483 10389 9886 9631 9628 9546 8355 6820 +0 0 4 7 9 8 10 4 7 2 2 11 8 5 4 2 8 15 4 5 8 6 7 4 2 3 4 10 4 14 10 8 14 14 3 7 11 7 2 3 +9 9 7 5 6 8 7 6 7 6 7 10 8 8 7 3 5 6 3 7 8 6 12 11 6 10 9 11 8 14 8 7 10 11 10 8 10 8 7 8 +8643 9298 8977 10090 8253 8140 7808 7454 7031 6220 7007 7153 7794 7970 7646 7192 7356 8102 7643 7585 8071 8646 8643 8873 9036 8610 9324 9514 10100 10884 10505 11067 10476 10441 9894 9544 9643 9464 8325 6902 +6 5 3 1 2 5 4 3 4 3 4 9 5 5 4 0 2 2 0 4 5 2 11 9 2 7 5 8 4 12 4 3 6 7 6 4 7 4 3 5 +10 5 11 9 13 9 4 12 10 3 8 7 8 9 5 8 11 6 6 9 7 9 10 9 13 6 11 10 13 12 10 11 7 7 7 7 6 8 7 6 +8682 9047 9034 10037 8379 8204 7715 7744 7111 6031 7036 7153 7803 8044 7582 7251 7453 7984 7605 7682 8048 8680 8682 8893 9142 8462 9372 9557 10180 10968 10498 11078 10393 10244 9825 9401 9555 9387 8296 6856 +7 1 9 5 13 6 1 12 9 0 5 4 5 6 2 5 10 2 3 6 4 6 7 6 12 2 8 7 11 8 6 7 3 3 3 3 2 4 3 3 +5 8 14 10 8 6 7 6 6 3 6 6 9 11 8 5 6 6 6 6 11 13 10 10 10 7 11 8 11 12 13 13 11 12 9 7 7 7 7 5 +8593 8995 9166 10049 8374 8080 7701 7648 7086 5853 7013 7092 7838 8237 7597 7123 7420 7873 7568 7589 8128 8957 8720 8973 9169 8384 9419 9475 10207 11047 10568 11212 10414 10366 9809 9267 9495 9253 8267 6751 +1 4 14 6 5 2 4 3 3 0 3 3 6 9 5 2 3 3 3 3 9 12 7 7 7 3 8 4 8 8 10 10 7 9 5 3 3 3 3 2 +6 8 6 4 4 7 6 3 3 4 5 3 6 5 5 8 8 9 5 6 6 8 7 8 9 6 7 8 6 11 10 13 12 11 9 7 7 7 5 5 +8531 8946 9090 9691 8267 8025 7662 7373 6985 5747 6965 6850 7795 8049 7535 7187 7439 7953 7506 7502 8078 8911 8681 8926 9170 8249 9362 9398 10105 11060 10559 11337 10460 10419 9794 9141 9436 9127 8188 6653 +2 4 2 1 1 4 3 0 0 1 2 0 3 2 2 5 5 6 2 3 2 4 3 4 5 2 3 4 2 7 6 10 9 7 5 3 3 3 2 2 +5 14 12 5 8 10 5 7 4 5 13 5 3 4 3 6 15 8 7 7 5 5 6 8 10 8 7 8 10 9 10 17 13 10 12 7 9 7 6 5 +8445 9269 9169 9416 8265 8157 7598 7360 6912 5709 7123 6746 7677 7811 7423 7124 7636 7967 7497 7481 8004 8683 8617 8881 9196 8245 9307 9325 10108 10949 10551 11701 10531 10408 9856 9022 9430 9009 8136 6561 +1 14 10 1 5 8 2 4 1 2 15 2 0 1 0 3 19 5 4 4 2 1 2 4 7 5 3 4 6 5 6 16 10 6 9 3 5 3 2 2 +4 11 9 9 6 12 9 10 6 9 11 4 7 4 6 8 9 9 7 6 10 5 9 14 8 10 8 9 11 12 13 12 13 11 15 9 10 11 8 6 +8336 9388 9170 9404 8212 8404 7638 7532 6892 5919 7226 6587 7664 7588 7391 7188 7675 8041 7488 7400 8059 8469 8631 9208 9170 8364 9279 9318 10136 11029 10619 11736 10600 10459 9993 9033 9450 9144 8137 6535 +1 8 5 5 2 11 6 8 3 8 10 1 4 1 3 5 6 6 4 3 8 1 6 14 4 7 4 5 8 8 10 8 10 7 15 6 7 8 5 3 +8 10 6 10 8 3 3 4 4 9 11 6 8 3 5 7 7 5 7 4 8 6 8 9 10 11 8 7 9 13 13 10 9 14 11 7 8 7 7 5 +8332 9439 9094 9454 8211 8084 7524 7325 6822 6116 7326 6560 7677 7317 7334 7186 7662 7865 7480 7201 8062 8329 8620 9208 9196 8537 9251 9189 10113 11165 10686 11646 10565 10691 10024 8921 9418 9025 8112 6450 +5 7 2 7 5 0 0 1 1 8 10 3 5 0 2 4 4 2 4 1 5 2 4 5 7 9 4 3 5 10 10 5 5 12 8 3 4 3 4 2 +7 14 13 4 4 4 4 11 10 6 6 4 6 5 5 4 5 4 5 8 8 9 6 8 8 8 11 11 8 13 10 13 11 14 8 8 7 9 9 5 +8302 9732 9199 9132 8108 7844 7438 7561 6907 6117 7296 6412 7638 7185 7278 7000 7598 7638 7421 7260 8065 8382 8558 9147 9170 8516 9301 9313 10065 11293 10674 11745 10582 10909 9978 8877 9361 9036 8139 6370 +3 13 12 1 1 1 1 10 9 3 3 1 3 2 2 1 2 1 2 5 5 6 2 4 4 5 8 8 4 10 6 9 7 12 4 4 3 6 6 2 +14 17 14 6 7 8 6 6 10 8 8 10 12 7 5 6 9 7 4 8 8 9 7 9 8 10 7 8 8 13 12 13 9 11 13 13 12 10 10 9 +8452 10192 9327 8952 8084 7864 7405 7475 6990 6241 7318 6641 7754 7183 7224 6948 7638 7609 7337 7315 8068 8432 8523 9151 9145 8619 9247 9245 10018 11414 10714 11839 10547 10930 10061 9143 9434 9108 8191 6540 +15 19 14 2 4 5 3 3 9 6 5 9 12 4 2 3 6 4 1 5 5 6 3 5 4 7 3 4 4 10 9 9 5 7 11 12 10 7 8 7 +10 9 6 7 8 9 9 10 10 6 8 9 8 8 4 8 9 7 6 6 7 8 10 12 12 9 11 11 15 15 10 12 7 12 13 10 12 14 8 8 +8496 10133 9247 8844 8086 7945 7450 7640 7071 6235 7339 6795 7764 7243 7145 7022 7677 7582 7307 7244 8045 8417 8565 9339 9223 8654 9297 9366 10151 11650 10702 11865 10462 11011 10142 9208 9505 9421 8191 6639 +7 5 2 3 5 6 7 8 9 3 5 7 5 5 1 5 6 4 3 3 4 5 7 10 10 6 8 8 15 13 6 8 3 8 11 7 10 14 5 6 +7 10 11 8 12 11 14 9 9 7 10 5 5 5 11 9 8 5 7 9 7 8 9 8 8 8 7 7 12 10 11 15 7 10 10 9 9 7 8 10 +8462 10139 9297 8804 8190 8144 7621 7734 7124 6290 7411 6694 7697 7115 7247 7153 7689 7434 7303 7362 8023 8403 8581 9270 9197 8626 9243 9234 10204 11565 10716 12074 10379 10964 10144 9208 9497 9285 8191 6855 +3 6 8 4 11 9 16 6 7 4 8 2 2 2 10 7 5 2 4 7 4 5 6 4 4 4 3 3 9 5 7 12 3 6 6 5 5 3 5 9 +13 11 9 8 9 12 6 5 9 7 4 7 5 4 4 7 9 4 5 9 10 10 6 5 9 9 8 8 12 8 12 11 14 14 9 7 7 6 10 8 +8583 10206 9294 8767 8215 8392 7584 7577 7176 6342 7328 6722 7632 6933 7168 7153 7727 7233 7248 7473 8078 8513 8520 9021 9197 8661 9216 9171 10256 11362 10755 12025 10477 11166 10120 9085 9438 9096 8242 6935 +13 8 5 4 6 11 3 2 7 4 1 4 2 1 1 4 6 1 2 7 8 7 2 1 5 6 4 4 9 3 9 6 12 12 5 3 3 2 8 5 +9 9 4 6 5 6 6 5 1 5 7 9 6 5 7 8 7 12 12 8 5 6 11 8 5 8 6 9 9 9 10 12 13 11 11 11 8 7 10 7 +8598 10146 9164 8609 8137 8257 7548 7429 7022 6268 7324 6871 7594 6824 7168 7215 7713 7536 7373 7516 8004 8370 8588 8971 9095 8632 9139 9173 10230 11233 10742 12040 10547 11171 10148 9215 9406 8980 8291 6948 +6 5 1 2 2 2 3 2 0 2 4 7 3 2 4 5 4 12 12 5 2 2 9 4 1 4 2 5 5 4 6 8 10 7 8 8 4 3 8 4 +7 7 7 6 4 13 9 4 4 6 4 5 5 7 8 5 6 8 8 14 8 8 7 9 8 7 7 7 13 10 9 13 9 10 10 7 11 11 7 6 +8562 9967 9114 8461 8036 8560 7589 7229 6948 6260 7243 6765 7532 6844 7193 7089 7673 7575 7393 7925 8008 8359 8552 8985 9072 8544 9089 9052 10306 11173 10703 12116 10513 11115 10150 9092 9452 9117 8262 6899 +3 3 3 2 1 13 6 1 1 3 1 2 2 4 5 2 3 5 5 16 5 5 3 6 4 3 3 3 11 6 5 9 5 6 6 3 8 8 3 3 +11 11 9 3 3 11 10 3 6 5 5 6 6 6 5 6 7 6 5 6 5 8 8 6 9 11 7 7 8 10 9 13 10 13 9 10 9 9 7 7 +8629 10044 9116 8137 7912 8722 7655 6979 6927 6191 7190 6727 7497 6802 7141 7032 7660 7489 7336 7818 7935 8348 8543 8814 9075 8707 9041 8938 10253 11117 10665 12187 10506 11246 10126 9160 9446 9122 8234 6915 +9 8 6 0 0 9 8 0 3 2 2 3 3 3 2 3 4 3 2 3 2 5 5 2 6 9 3 3 4 6 5 9 6 10 5 7 5 6 3 4 +9 15 13 4 6 7 4 3 4 3 7 9 10 8 6 9 7 6 4 11 7 8 8 12 7 9 7 10 11 11 10 7 10 10 11 11 9 10 8 8 +8643 10362 9220 7894 7867 8628 7566 6744 6856 6003 7189 6876 7565 6885 7116 7163 7647 7408 7255 8024 7915 8338 8534 9022 9027 8737 8994 9016 10278 11125 10654 11885 10499 11185 10154 9286 9440 9189 8232 6991 +6 14 12 1 3 3 1 0 1 0 4 7 8 5 3 7 4 3 1 9 4 5 5 10 3 6 3 7 8 7 6 2 6 6 8 8 5 7 5 5 +5 8 9 8 7 11 6 3 6 7 9 9 11 10 8 8 9 10 5 11 9 6 5 8 10 9 9 8 10 8 11 9 10 13 11 10 9 8 6 6 +8555 10231 9219 7911 7849 8786 7530 6523 6838 6072 7239 7016 7657 7086 7142 7224 7686 7577 7201 8218 7947 8206 8448 8972 9057 8765 8999 8966 10277 10949 10669 11724 10492 11312 10181 9343 9434 9129 8179 6940 +1 4 5 5 4 9 3 0 3 5 7 7 10 9 5 5 6 8 2 9 6 2 1 4 7 6 6 4 6 4 7 4 6 10 8 7 5 4 2 3 +11 6 10 8 6 9 7 6 6 4 7 9 8 7 6 6 7 7 5 5 6 9 6 5 10 11 7 10 10 9 14 16 8 11 10 13 8 11 9 6 +8622 9985 9244 7927 7806 8811 7520 6500 6820 5953 7237 7147 7670 7090 7117 7159 7673 7552 7149 8032 7901 8266 8390 8740 9086 8914 8953 9042 10276 10845 10760 12003 10434 11309 10182 9581 9402 9257 8204 6892 +9 2 7 5 3 6 4 3 3 1 4 7 5 4 3 3 4 4 2 2 3 6 2 1 7 9 3 7 6 5 12 14 4 7 6 12 4 8 6 3 +10 7 6 9 6 8 8 4 6 6 7 10 15 9 5 7 7 8 5 6 7 11 6 5 11 9 9 7 10 11 14 17 11 10 9 12 10 16 13 8 +8662 9815 9166 8004 7764 8773 7536 6355 6803 5964 7235 7332 7861 7217 7067 7159 7660 7590 7098 7918 7882 8445 8333 8522 9140 8932 8959 8929 10275 10870 10849 12327 10454 11244 10157 9743 9422 9684 8331 6969 +7 3 2 6 3 4 5 1 3 3 4 8 18 7 2 4 4 5 2 3 4 9 2 1 8 6 6 3 6 7 12 15 7 6 5 10 7 18 13 5 +4 8 9 6 7 6 6 5 7 8 9 9 10 6 4 7 13 9 3 5 10 7 7 9 9 6 9 9 11 9 11 18 13 14 11 11 9 8 10 10 +8548 9717 9167 7892 7749 8615 7501 6280 6812 6097 7284 7445 7920 7152 6992 7159 7801 7687 6997 7750 7940 8368 8303 8563 9141 8764 8965 8946 10299 10770 10859 12693 10525 11429 10184 9834 9416 9594 8378 7165 +1 4 5 3 4 2 3 2 4 6 7 7 8 3 1 4 14 6 0 2 8 3 3 6 5 2 6 6 8 5 7 17 10 11 8 8 5 4 7 9 +3 6 10 4 4 4 3 3 4 8 5 6 7 5 6 7 11 7 4 5 6 6 7 7 10 9 11 11 9 10 13 12 10 12 14 11 10 8 5 6 +8411 9502 9193 7664 7657 8343 7390 6087 6744 6222 7229 7366 7901 7030 6970 7159 7887 7655 6924 7592 7895 8234 8274 8479 9168 8791 9022 9085 10271 10738 10920 12668 10517 11480 10287 9919 9436 9509 8296 7103 +0 2 7 1 1 1 0 0 1 6 2 3 4 2 3 4 10 4 1 2 3 2 3 3 7 6 9 9 5 6 10 7 6 8 13 8 7 4 2 3 +7 10 7 10 6 4 5 7 5 9 7 7 5 6 8 9 4 3 7 10 10 12 7 8 10 11 10 9 8 10 10 11 11 11 14 11 9 8 10 6 +8379 9546 9142 7818 7619 8088 7333 6151 6703 6401 7227 7354 7831 6976 7000 7282 7792 7380 6930 7750 7953 8477 8246 8461 9194 8939 9052 9092 10219 10708 10903 12583 10535 11467 10388 9999 9430 9429 8344 7045 +3 7 3 8 3 1 2 4 2 7 4 4 2 3 5 7 1 0 4 8 8 11 3 5 7 9 7 6 4 6 6 6 7 7 12 8 5 4 7 3 +11 13 10 13 7 4 4 5 5 4 4 4 6 8 10 9 6 5 5 6 8 10 8 5 5 7 8 8 10 12 11 10 11 10 12 8 10 11 7 6 +8451 9771 9169 8147 7607 7848 7252 6089 6663 6262 7148 7158 7788 7048 7080 7398 7750 7244 6884 7653 7958 8582 8244 8260 9092 8832 9030 9038 10219 10802 10912 12442 10553 11393 10435 9890 9450 9539 8314 6990 +9 11 7 13 4 1 1 2 2 1 1 1 3 5 9 7 3 2 2 3 5 7 5 2 1 3 4 4 6 9 7 5 7 5 9 4 7 8 3 3 +6 8 11 20 15 6 6 6 4 3 2 5 4 7 6 7 8 9 7 10 10 9 13 9 4 7 6 8 11 8 9 11 12 7 11 10 8 8 8 7 +8393 9676 9221 8886 7800 7745 7224 6092 6598 6070 7020 7035 7695 7055 7056 7384 7761 7362 6891 7808 8014 8620 8370 8317 8967 8732 8957 8987 10245 10645 10869 12371 10596 11139 10455 9910 9418 9458 8310 7000 +2 4 8 30 19 3 3 3 1 0 0 2 1 4 3 4 5 7 4 8 8 6 13 6 1 3 2 4 8 4 5 6 9 3 7 6 4 4 5 4 +12 8 8 8 6 6 3 5 5 3 7 5 6 8 7 9 7 9 5 8 7 6 8 7 6 11 9 9 7 9 9 13 10 12 11 10 9 11 9 8 +8490 9586 9195 8844 7758 7648 7120 6033 6561 5890 7023 6920 7656 7123 7058 7493 7746 7473 6846 7831 7992 8471 8365 8248 8896 8883 8963 9000 10168 10559 10827 12427 10587 11207 10475 9929 9412 9566 8332 7071 +11 4 4 4 3 3 0 2 2 0 4 2 3 5 4 7 4 7 2 5 4 2 5 3 2 9 6 6 3 5 5 9 6 8 7 6 5 8 6 5 +14 10 8 7 7 6 3 3 6 6 7 5 7 13 10 13 8 6 6 8 7 8 10 6 8 8 8 9 8 9 9 10 9 13 13 10 8 10 9 5 +8635 9625 9169 8743 7743 7557 7018 5855 6550 5905 7026 6812 7643 7494 7137 7842 7757 7393 6828 7852 7971 8454 8411 8121 8878 8841 8943 9012 10118 10478 10786 12295 10552 11333 10545 9947 9381 9606 8354 6953 +15 7 4 3 4 3 0 0 3 3 4 2 4 14 9 14 5 3 3 5 4 5 7 2 4 4 4 6 4 5 5 5 5 10 10 6 4 7 6 2 +9 8 14 9 5 4 10 7 9 7 6 8 11 12 12 11 7 6 4 10 10 9 9 10 10 6 6 9 10 8 10 11 10 11 12 10 7 11 14 9 +8649 9539 9297 8771 7677 7349 7098 5933 6616 5980 7003 6894 7733 7781 7265 8047 7742 7318 6759 7995 8027 8499 8431 8248 8912 8679 8873 9024 10121 10340 10772 12233 10544 11328 10588 9964 9325 9705 8503 7088 +6 4 14 6 2 1 9 5 7 5 3 5 10 12 12 9 4 3 1 8 8 6 6 8 7 2 2 6 6 4 6 6 6 7 9 6 3 8 15 7 +7 11 5 4 10 12 13 5 4 5 9 8 9 12 8 6 8 10 11 12 8 8 7 8 8 7 9 8 9 10 10 10 8 13 11 8 7 6 8 5 +8612 9642 9192 8490 7740 7645 7253 5884 6553 5928 7058 6971 7770 8051 7288 7932 7753 7493 6871 8252 8031 8480 8399 8244 8894 8588 8881 8974 10098 10334 10758 12113 10485 11447 10604 9857 9271 9491 8495 6969 +3 8 1 1 8 12 15 2 1 2 7 5 6 11 5 2 5 8 11 11 5 5 3 5 4 3 6 4 5 6 6 5 4 10 7 4 3 2 5 2 +39 129 294 436 540 548 441 254 120 109 150 195 149 82 46 98 75 131 208 175 90 42 19 24 19 20 29 23 73 140 65 48 52 36 24 17 14 15 13 10 +9393 16988 16477 34767 21350 40854 18345 21136 9456 12269 10716 18533 11384 12605 8281 13476 9476 15091 12016 18508 10131 10551 8675 9223 9157 9301 9400 9848 11711 18315 12151 14335 11552 12971 10952 10310 9397 9843 8615 7165 +103 342 878 790 1417 892 1266 637 450 347 542 520 520 240 144 287 257 376 729 461 309 107 27 42 26 29 60 36 218 358 182 102 138 67 35 19 14 15 13 9 +66 417 1181 1450 1694 1731 1128 764 424 380 414 338 207 164 72 221 143 291 416 246 172 128 44 146 87 79 184 428 359 579 262 515 428 195 134 100 126 67 60 49 +10845 41588 46254 121765 64119 144751 46721 66806 20058 34879 21031 38186 16391 21924 9914 26245 12894 32063 22350 32511 14274 17781 9583 17639 11152 13596 13868 35552 20595 52788 18545 45115 22204 24173 14103 15835 12383 13368 9933 9745 +201 653 1800 945 1982 990 1701 833 1147 679 1083 556 601 378 238 461 461 537 1043 440 532 329 123 386 280 217 584 762 944 753 721 766 1080 427 404 263 408 177 187 143 +198 195 409 446 1050 1023 408 264 224 263 370 348 209 111 68 192 231 736 702 229 229 239 191 265 161 128 181 629 591 549 354 331 266 216 231 101 199 111 81 88 +15635 51073 55554 141860 89358 198916 55984 79017 25283 48944 29964 57275 21324 27428 11404 36466 18477 75357 39736 44629 19771 31397 14226 32861 14989 20644 18148 72063 35188 83350 27131 62744 28449 35993 19655 21090 17160 19385 11755 14566 +589 219 488 212 906 399 484 207 482 332 736 394 510 191 202 300 630 724 1187 308 596 436 599 475 479 295 485 633 1085 476 757 342 532 349 604 215 559 260 248 237 +202 213 228 370 573 418 413 327 376 521 230 104 82 59 38 73 144 292 258 176 137 149 207 184 76 82 98 259 261 241 221 177 116 92 87 58 123 60 74 90 +20408 61094 59995 156080 101774 212662 65143 94366 34263 78016 35095 60228 22887 29407 12090 38763 21696 88775 45339 52764 22779 38667 19162 42193 16557 24443 20200 83651 40981 93155 32103 69853 30704 39485 21388 23388 19875 21908 13353 19221 +506 202 225 147 405 113 429 223 680 478 384 67 150 65 80 68 326 207 350 184 295 206 544 249 179 141 214 189 390 150 392 137 182 100 174 85 289 97 202 200 +207 104 108 237 322 444 265 193 187 322 331 200 116 86 56 191 172 192 211 185 128 71 116 179 48 52 54 146 134 89 217 91 63 57 65 49 69 35 51 48 +25189 63818 61258 161276 107464 227181 70290 100561 38187 93118 42680 68901 25280 32926 13219 48171 25551 95244 49600 60964 25482 40709 21648 50658 17370 26171 21076 87602 43383 93033 36848 71252 31547 40617 22515 24995 21142 22743 14323 21016 +442 62 70 67 186 112 235 95 279 222 494 165 219 110 138 227 352 103 250 167 246 60 251 197 85 59 85 73 158 24 344 40 67 38 108 56 126 33 112 69 +218 206 104 256 802 726 320 213 223 240 348 190 69 58 64 176 203 157 151 59 58 97 74 91 24 48 50 69 72 81 123 76 75 31 37 28 121 48 28 29 +30132 72645 62387 167327 125282 258154 76715 107613 42934 102276 50510 76440 26412 34513 14525 56093 30102 99175 52221 60931 26328 44225 22999 53209 17549 27549 21827 86585 44140 92427 39072 71646 32676 40084 22898 25215 23707 24327 14681 21536 +405 163 64 73 481 195 273 101 310 131 446 137 99 51 155 172 373 67 151 21 73 94 129 63 21 47 71 16 54 20 157 28 89 11 36 18 245 56 35 24 +2297 1309 1882 2919 2259 2465 1965 1158 515 684 395 314 190 176 93 180 180 91 72 82 69 82 146 198 61 116 81 18 24 65 178 108 112 20 18 24 93 28 14 16 +88096 148708 108938 336623 179900 394108 125030 172301 55026 138162 59346 91145 30609 43255 16539 63786 33951 98815 52757 62313 27434 46609 26156 62181 18670 33022 23352 82495 43651 90875 42646 73982 34723 38907 22786 25176 25492 24587 14672 21226 +2087 715 1381 765 1067 533 1274 540 637 368 441 221 340 229 233 154 294 22 40 39 94 64 283 180 118 171 145 1 6 13 236 53 155 5 8 13 161 19 8 7 +3493 5719 4555 9538 9098 8519 6470 2313 1555 1787 1013 803 598 479 719 1601 822 414 211 146 144 185 233 281 114 169 105 14 14 88 262 144 110 18 15 22 39 18 23 18 +175186 491146 222655 902417 407976 893848 287298 304068 93402 239661 83759 135010 45131 70088 34505 158320 54115 118321 56833 67544 30430 55178 31459 75714 21118 41423 25453 78405 42919 90829 48278 78390 36668 37678 22600 25017 25852 24217 14893 21058 +1730 1052 1782 990 2063 886 2057 658 1316 612 928 453 899 478 1352 839 1072 231 215 106 244 187 423 235 251 227 191 0 2 25 335 85 143 4 6 11 34 8 23 9 +4073 7828 6422 13612 9976 12341 7205 4078 2368 1915 1763 1585 730 1041 1021 3572 1898 786 487 396 461 508 366 422 261 175 32 19 20 71 198 136 124 30 24 22 25 26 33 28 +274928 942611 381259 1684561 652803 1598419 464303 536366 151602 342934 126734 224288 62665 129839 59742 368275 101281 159511 67863 87820 41454 83077 40029 97097 27262 49689 25635 74868 42358 89741 52134 82042 38922 37260 22649 24867 25845 24361 15364 21514 +1321 765 1546 776 1470 743 1417 681 1322 470 1124 574 832 626 1236 863 1497 376 501 307 731 437 584 297 539 196 23 1 4 16 218 71 160 12 15 11 14 16 47 22 +4053 8412 7125 15643 12496 19178 11695 4445 2446 2458 2390 2192 997 1502 2138 4071 3084 1126 878 815 1078 1007 501 380 415 340 60 19 21 39 111 123 105 28 24 28 29 29 35 34 +371669 1402868 553874 2544558 955935 2680766 751665 777275 210343 473372 184664 345502 86586 214328 112903 596291 177587 219119 88612 132622 67975 139960 51836 114617 37189 67596 26528 71543 41837 86752 53669 84676 40634 36744 22697 25095 25941 24681 15875 22312 +979 564 1205 604 1244 713 1497 508 984 432 1102 542 884 567 1524 616 1499 399 753 467 1185 566 654 214 712 331 77 1 5 5 90 55 119 10 15 18 18 20 51 32 +4635 9178 7798 15895 13673 23909 16333 6078 3817 3085 2435 3130 1534 2443 3441 4314 3382 1511 1165 1009 1453 1598 992 714 575 440 132 32 17 74 128 122 88 56 49 52 55 59 49 50 +480873 1882572 739383 3368439 1281587 3988835 1150413 1104057 302664 634505 242298 517072 123637 351560 198045 825556 259606 298804 116180 186655 103420 229739 75899 151606 50959 90572 29240 69216 41227 86093 55601 87091 41869 37979 23382 26784 26699 26825 16731 24045 +858 442 996 444 1054 583 1407 502 1132 423 849 523 990 601 1502 458 1138 414 779 423 1108 564 992 354 772 337 226 5 3 19 108 52 86 41 62 57 65 74 91 62 +5070 9775 7647 14141 12876 25889 17379 7771 4867 3346 2665 3811 1843 3017 4752 4927 2911 1566 1212 999 1443 1524 939 723 600 416 142 24 17 62 59 53 45 45 51 53 58 53 42 55 +598470 2370173 916401 4035128 1578737 5340070 1565945 1515247 419521 802006 304373 720187 167662 515824 314574 1078727 327537 377087 144261 236832 137724 309585 98007 186929 65024 110695 32139 66537 40632 84736 55721 85121 41974 38465 24101 28433 27515 28471 17386 25981 +781 382 768 315 787 479 1094 472 1042 354 767 468 922 503 1374 403 784 333 676 318 851 404 732 284 653 250 229 3 3 14 24 10 23 26 64 55 69 55 66 67 +5083 10030 7572 13293 12502 24381 14679 9288 4585 3297 2842 4191 1673 3172 4413 4907 3248 1533 1398 1263 1332 1978 1287 895 721 449 157 25 22 46 76 86 71 39 49 37 43 55 60 66 +713464 2844186 1087083 4609718 1858908 6517586 1902085 1994967 526252 956447 369423 934462 206242 679755 419528 1315479 402387 448646 176396 300218 168335 412534 128459 230700 81831 131639 35350 64080 40180 82478 56272 85297 42741 38553 24751 29000 27927 30141 18485 28477 +652 320 651 257 634 352 738 423 786 280 672 384 667 405 939 326 706 261 648 334 646 393 795 286 656 230 240 3 5 8 40 26 55 19 57 26 37 54 116 84 +5107 10504 7656 13916 12099 20628 11630 8685 4458 3121 3081 3981 1504 3269 3989 4953 3352 1506 1541 1548 1646 2706 1551 1301 985 541 192 18 20 106 124 137 64 25 23 31 29 24 27 26 +826201 3318881 1255652 5188110 2121784 7393879 2151894 2408858 627072 1090808 438959 1122979 239539 839810 511023 1540853 478027 514253 211384 377310 206208 554032 164899 296788 104967 156978 39375 61341 39688 84041 58037 88596 43310 37775 24721 29164 27971 29807 18713 28365 +554 277 573 238 541 248 512 326 647 230 604 302 505 324 691 272 606 219 593 328 655 417 778 350 719 243 280 1 4 41 97 63 44 8 12 18 16 10 25 13 +4788 10974 8349 14952 11467 16599 9605 7908 4005 3071 3284 3935 1815 3182 5020 5409 3245 1642 1660 1815 2317 2570 2004 1871 1349 618 205 23 28 72 75 117 98 37 45 54 58 61 64 58 +927969 3793971 1437727 5795449 2361942 7970065 2343703 2750180 713795 1214036 511948 1297359 279955 984917 626589 1780721 549044 584279 248540 466181 260288 678685 212009 393931 136830 185528 43632 59073 39413 83422 58505 90468 44733 37781 25254 30731 28755 31766 19881 30226 +451 246 545 228 459 171 379 248 500 199 557 253 544 260 738 251 518 215 545 307 757 316 788 390 795 236 275 3 9 19 36 44 95 18 47 51 65 62 119 60 +4768 11315 8993 14915 11848 13912 9248 7755 4364 3331 3556 4411 2640 3211 6849 9164 2971 2236 1899 1767 2290 2596 1906 2579 1317 504 230 29 32 90 103 176 87 38 50 58 53 54 57 40 +1026685 4261507 1631720 6364076 2605844 8346599 2521598 3061623 807531 1345845 590068 1490521 340451 1123100 786024 2236895 611284 686597 290878 546771 312328 797457 255438 528743 167080 205361 48422 57310 39247 83946 59677 95853 45840 37848 25901 32450 29391 33177 20841 30870 +406 228 511 198 430 126 333 210 477 195 535 246 677 231 808 374 421 263 552 253 633 262 621 412 643 157 285 5 12 30 67 88 74 19 56 55 52 46 94 28 +4784 11258 9458 14055 11505 12083 8842 7446 4615 3621 3325 4073 2794 2808 7498 11179 3019 2571 1902 2113 3536 3671 2787 3408 1110 402 209 33 23 130 128 135 88 52 45 51 51 49 55 60 +1123346 4697490 1832756 6845751 2834889 8588172 2684674 3335396 905343 1487562 660333 1651328 403373 1228233 958069 2789496 673197 803358 332236 643783 394920 975148 320304 706398 191283 217737 52555 55898 38855 86896 61459 98396 46945 38771 26404 33636 29960 34196 21726 32704 +375 202 471 165 376 98 294 177 444 190 441 192 597 174 719 372 387 256 483 265 791 313 765 421 459 102 232 7 6 59 95 50 73 34 44 40 47 36 85 58 +4806 10471 8935 11923 10391 12478 9741 7219 4156 3051 3018 2913 2397 2485 4389 10354 4171 2992 2293 3059 3839 4111 3427 3731 908 393 213 47 29 127 176 186 107 45 44 48 51 55 60 46 +1218156 5058964 2015405 7167541 3029740 8839519 2866659 3578797 988980 1585757 720996 1731219 454576 1307214 1046346 3258256 763012 938979 382557 793094 483195 1169211 399910 893239 209718 228818 56687 55431 38626 89485 64423 103919 48508 39209 26869 34566 30515 35523 22717 33567 +347 168 400 122 308 100 306 152 358 137 356 110 438 134 363 279 484 254 509 322 697 301 754 353 326 91 219 15 10 53 147 85 98 25 41 34 46 44 93 33 +4789 9415 7469 9703 8275 12865 8568 5913 2944 1953 2347 2110 1701 1500 3367 6624 4198 3292 2421 4031 4146 2726 2216 2121 584 278 196 43 19 80 113 145 69 23 29 27 22 27 33 30 +1310165 5333872 2156019 7333634 3165637 9099562 3014117 3727357 1039547 1610602 762992 1756983 486710 1320940 1106294 3469730 851276 1084894 434893 993164 577114 1266539 446573 969955 219411 232169 60282 54747 38147 89031 65703 106592 49060 38269 26939 34150 30315 35050 22993 33396 +319 133 306 82 218 99 245 106 223 62 244 60 271 55 249 141 428 247 466 344 648 160 408 155 176 45 183 13 4 21 70 50 41 6 17 11 8 10 28 14 +4848 9077 5713 8528 5346 8823 6040 2698 1492 1134 1249 1390 882 864 2042 3299 2665 1886 2276 1967 1926 1781 1611 997 332 192 242 93 11 57 63 44 24 15 15 17 14 13 16 17 +1401385 5571520 2248236 7417572 3223269 9095673 3093271 3669483 1051735 1583639 775872 1736966 497106 1294768 1130875 3464236 898149 1135674 482216 1054424 611940 1299969 476606 973013 222420 230035 64963 57176 37475 87191 65673 102899 48448 36894 26649 33145 29916 33745 22828 32436 +299 119 209 65 114 49 148 24 80 21 97 27 106 19 122 40 232 107 387 126 250 79 259 41 70 22 225 58 1 11 21 4 5 3 4 4 3 2 6 4 +4532 10204 7294 6298 4803 6332 4239 2393 1337 1120 560 872 547 898 1135 1019 1113 808 822 576 1297 1248 1133 671 243 154 202 76 8 39 57 21 15 16 15 17 17 14 15 14 +1482250 5864150 2378565 7359469 3265582 8938977 3124411 3596342 1059657 1557434 770818 1686325 498679 1272255 1131657 3318995 904179 1117177 491190 1026549 629818 1298648 493670 955859 223079 225694 68504 58414 36743 84355 65490 98015 47622 35663 26367 32200 29603 32580 22641 31350 +256 132 267 35 95 26 82 19 64 22 20 11 43 21 41 4 58 21 96 12 142 39 156 19 37 14 168 37 0 5 17 1 2 3 4 4 5 3 6 3 +4577 10787 7694 6154 4787 5455 3748 2768 1319 948 834 422 320 628 933 1423 983 355 269 381 896 914 953 757 187 135 153 80 10 18 29 15 15 14 15 17 13 16 21 15 +1562247 6175041 2515865 7296005 3306429 8737801 3142223 3550629 1066921 1522234 772894 1611076 494410 1234505 1127256 3207288 906735 1071959 485804 988366 636999 1276886 505707 945017 222290 220446 70704 59824 36081 80399 64596 93055 46816 34383 26092 31312 29196 31608 22612 30390 +242 130 268 34 92 20 65 27 62 16 45 2 15 10 28 8 45 4 10 5 77 21 117 25 22 11 108 40 1 1 4 0 2 2 5 4 3 4 12 4 +4644 10487 7196 5773 4415 3773 2622 1938 917 731 1173 287 167 363 907 1183 1393 441 402 803 884 936 787 518 159 69 99 71 9 12 22 13 15 14 15 16 17 18 16 17 +1641960 6448848 2637008 7212941 3336747 8445357 3130807 3456665 1063727 1475814 783584 1532047 486336 1182739 1122301 3087539 919708 1034738 483952 978400 643694 1257781 513200 920142 220805 211458 71469 60596 35410 76312 63545 88270 46030 33180 25824 30416 28901 30817 22456 29611 +230 117 232 30 78 10 32 14 30 10 86 1 4 3 27 6 87 7 24 26 73 23 84 12 16 3 47 31 1 0 2 0 2 3 5 4 5 5 7 5 +5223 10823 6912 6080 4127 1741 1269 1344 1089 910 932 214 190 170 1568 1885 651 470 372 480 543 750 808 566 127 66 66 34 9 12 15 16 14 14 18 18 12 14 13 17 +1734484 6726870 2747867 7153722 3358946 8045618 3085090 3331845 1065010 1443176 787847 1453275 479052 1122221 1134366 3018103 913390 1001531 481380 949188 641505 1228395 521043 899709 218539 202825 71372 59049 34756 72470 62342 83957 45238 32049 25639 29696 28486 29828 22227 28878 +249 115 209 35 68 2 7 7 42 17 54 0 5 0 78 17 19 8 21 10 28 15 86 15 10 3 21 7 1 0 1 0 1 3 7 5 2 3 4 5 +5769 11453 6707 6447 3888 697 1030 746 1178 768 499 314 371 434 1340 1981 607 507 618 594 449 473 481 254 80 59 42 22 15 10 11 16 16 15 16 18 16 15 17 16 +1838655 7026917 2850718 7120603 3374482 7605722 3034405 3177774 1068536 1403772 780935 1385373 476576 1081554 1140302 2958731 906105 972590 485160 928732 636968 1183754 520331 861333 215128 194280 70663 56857 34271 68736 61066 79902 44517 31047 25407 29020 28183 28959 22106 28128 +261 117 191 39 60 0 5 2 49 12 16 2 21 6 57 20 17 10 58 15 19 6 31 3 4 2 8 3 3 0 0 1 2 3 5 6 5 4 8 5 +6518 12603 6832 6314 3537 2848 1978 1107 650 433 390 339 317 247 843 901 1116 590 312 277 252 454 484 286 52 24 12 7 13 14 15 17 15 10 16 15 20 18 16 15 +1959372 7379615 2954197 7081300 3380658 7324371 3009218 3055126 1058477 1346151 771409 1323081 472782 1031838 1133385 2836569 912013 950485 481024 890028 627508 1140624 519714 827226 211086 184098 69205 53875 33747 65472 59925 76152 43789 29798 25181 28200 27990 28327 21963 27361 +282 126 187 39 49 7 19 6 15 4 10 2 16 2 23 4 58 15 15 3 6 6 31 4 1 0 0 0 2 1 1 1 2 1 5 4 8 6 7 4 +7384 13866 7060 4989 3267 4234 2206 1335 729 584 216 204 299 261 956 1044 710 441 154 157 392 462 355 295 69 28 10 10 8 9 8 14 13 9 16 14 15 19 17 14 +2099212 7788748 3060921 6962950 3379778 7145052 2990488 2953844 1050688 1301264 757673 1256232 468622 985965 1129529 2730522 907395 920552 472952 846274 621863 1100573 515814 795718 207580 174772 67733 51257 33109 62096 58633 72443 43028 28563 24961 27368 27674 27794 21849 26579 +309 135 185 25 42 17 25 9 19 8 3 1 14 2 29 6 23 8 3 1 15 7 17 5 3 0 0 0 1 0 0 0 1 1 6 4 4 7 8 4 +8092 13535 5835 2137 2090 2148 1109 927 793 427 191 139 199 121 396 491 275 186 176 157 350 296 287 232 65 26 8 8 6 15 12 11 7 8 12 14 15 15 11 10 +2253660 8152998 3133667 6676481 3348832 6848334 2944184 2833573 1044730 1249425 743641 1189400 462010 934243 1111454 2596863 891773 876748 465644 805145 615286 1052726 510273 762230 204059 165883 66246 48673 32435 59291 57476 68772 42132 27340 24644 26586 27366 27047 21584 25598 +321 124 138 5 17 4 6 4 23 4 2 0 6 0 5 1 3 1 5 1 12 3 11 3 3 0 0 0 0 1 0 0 0 1 3 4 4 4 3 2 +7706 10212 3409 2627 2321 1347 1390 639 424 510 176 313 269 213 385 405 373 159 206 244 286 167 300 242 76 17 8 8 12 7 7 10 11 11 10 12 11 12 11 13 +2394385 8291237 3142582 6437304 3324564 6520206 2906219 2702824 1029488 1205795 729576 1137268 457352 891276 1093549 2465939 879046 833913 459285 771829 607237 999825 505203 731365 200907 156974 64796 46244 31932 56163 56220 65260 41361 26375 24284 25728 26964 26161 21326 24860 +284 75 53 8 22 2 10 2 6 7 2 3 12 2 5 1 7 1 7 3 8 1 12 4 4 0 0 0 2 0 0 0 1 2 2 3 2 3 3 4 +7509 7978 2327 1350 1334 1363 1129 620 417 277 122 302 224 225 370 588 553 226 256 262 223 294 209 125 33 7 7 7 8 12 5 11 13 13 11 12 12 12 12 11 +2526561 8283931 3123616 6134022 3275672 6212748 2862530 2578752 1014448 1150468 714481 1087588 451660 851625 1075708 2354113 871238 797765 454363 741617 597778 957900 497933 695164 196735 147986 63357 43899 31339 53530 54944 62020 40660 25591 23958 24921 26597 25328 21100 24044 +258 46 25 2 7 2 7 2 6 2 1 3 8 2 4 2 15 3 11 4 5 3 6 1 0 0 0 0 1 1 0 0 2 3 3 3 3 3 4 3 +8853 9177 2140 1277 1279 727 579 716 388 229 43 212 315 229 159 394 433 254 216 245 126 106 123 106 21 11 10 10 11 14 13 13 10 13 17 11 12 13 11 9 +2689794 8350727 3100343 5844451 3226594 5884663 2805872 2468022 999042 1095511 697744 1035360 448437 814598 1052919 2237078 860557 765506 448542 712173 586076 906940 488647 659968 192360 139783 62031 41879 30837 51178 53904 59097 39900 24854 23794 24101 26239 24607 20854 23154 +294 61 21 2 7 0 1 3 5 1 0 1 17 3 0 1 9 4 8 4 1 0 2 1 0 0 0 1 2 1 1 1 1 4 7 3 3 4 3 2 +6736 5718 2059 1642 1338 362 627 471 371 502 285 233 221 245 199 309 261 300 217 136 170 97 103 72 41 53 37 16 15 11 12 19 15 11 13 10 13 18 16 11 +2794836 8201003 3075581 5594678 3180249 5553837 2751855 2348883 983586 1060624 687611 987555 442891 780776 1031721 2121842 845746 738008 442891 677799 575791 858485 479081 624795 188606 134652 61428 40349 30450 48783 52865 56718 39287 24038 23532 23269 25916 24236 20742 22440 +199 24 20 4 8 0 2 1 5 9 6 2 8 3 1 0 3 6 8 1 3 0 1 0 1 4 8 3 4 1 1 2 2 3 4 2 3 8 7 3 +4373 5025 4645 5220 5388 3093 1361 1711 1260 830 1434 862 342 402 315 478 809 814 581 324 274 352 491 366 197 303 297 206 59 26 53 130 52 29 27 27 47 20 24 19 +2836852 8017686 3117541 5579714 3238589 5410646 2717949 2313075 991241 1047982 707102 981263 440577 758629 1014017 2023903 845313 743739 446686 657038 568421 828603 479672 609794 188933 145188 67486 50584 31198 47453 52900 61301 39635 24377 23634 23531 26470 24010 20837 22260 +103 19 97 43 116 16 11 24 63 25 139 30 21 11 3 2 35 47 60 9 8 7 37 14 34 120 282 237 60 6 21 98 33 20 18 18 48 10 17 10 +4475 7281 6927 12201 11217 12207 5614 2692 2121 1179 1528 1176 773 695 601 1203 1797 1348 782 658 609 744 1078 1031 425 445 374 181 78 27 86 161 59 37 29 31 46 32 35 28 +2880426 7983970 3216788 5994543 3444477 5835988 2793607 2339685 1020714 1057540 728509 994640 449338 755812 1004066 1976383 870146 781934 455525 658043 569798 824598 495254 636549 195080 163816 75361 58669 32413 46264 53777 67514 40153 25187 23785 24023 26985 24535 21211 22644 +105 41 169 165 286 171 155 60 146 50 146 55 99 33 14 15 142 108 99 39 43 31 145 98 132 179 336 169 96 7 55 123 42 32 21 24 44 24 36 21 +6940 9173 7183 8574 17835 18785 9979 7494 4203 2016 3027 2377 958 956 870 2519 2921 2815 1392 880 649 1576 2171 1971 834 927 686 161 76 47 141 188 91 40 25 35 52 32 31 26 +2985924 8068517 3320101 6161650 3814398 6639946 2978957 2659722 1102672 1117948 787700 1081000 462609 769199 1001240 2012565 923092 907966 479736 672627 572163 871949 538386 719450 211529 210940 91015 65040 33546 46375 56038 75013 41476 26133 23830 24731 27640 25028 21473 22882 +188 65 169 92 441 252 299 243 327 122 321 161 133 60 29 66 251 245 213 67 49 116 331 211 291 332 559 128 88 21 125 137 92 35 15 29 55 24 28 18 +7480 8348 5852 6128 12824 13040 20631 10852 4687 3206 3696 3007 1390 1484 1299 3335 4414 3105 1582 1140 1068 1732 1838 1739 778 1015 813 223 54 45 143 264 170 54 32 23 37 35 29 31 +3102592 8097305 3386811 6168455 4046991 7042710 3431972 3166864 1194957 1247842 862515 1200885 486592 814222 1009451 2096710 1012881 1044253 508200 702309 585180 926043 571929 783124 226136 260643 109525 74838 34088 46357 58294 86731 44786 27882 24053 24660 27895 25676 21678 23413 +197 52 121 49 280 142 585 306 344 204 363 196 209 118 65 104 375 238 236 99 117 122 254 158 246 295 557 176 44 19 121 186 211 59 25 12 27 27 24 24 +7618 8679 6371 7082 10719 15442 26813 15281 7507 4663 4427 3752 1712 1794 2005 4453 5092 2878 1421 1209 1385 2307 2104 1278 590 959 696 186 30 97 237 98 143 69 41 63 56 52 48 54 +3219875 8144702 3465123 6233463 4219968 7568882 4031705 3915686 1357024 1459457 954148 1359348 518208 875589 1035504 2244493 1117760 1158417 531837 734450 605975 1012218 611435 814655 235572 303923 124582 81775 34003 49535 62897 87548 47323 30448 24500 27051 28630 27330 22363 25325 +192 56 133 63 216 165 656 357 516 270 399 225 257 140 132 149 405 193 197 101 164 165 279 92 162 235 411 122 13 80 226 33 157 83 40 82 61 56 63 67 +7958 10254 6762 6251 9376 14376 28341 19576 12879 5848 5182 6090 2672 3149 3733 6995 4569 2755 1611 1361 1424 2992 2174 1130 711 700 526 357 51 149 315 166 130 71 62 62 59 97 72 44 +3342922 8286019 3551475 6243516 4354297 7997993 4655526 4883454 1652367 1731179 1062794 1651944 573574 1016522 1105079 2539584 1206651 1258175 559741 774001 627248 1135308 651744 835202 247866 328694 134917 98801 34457 55717 69379 92493 49464 32983 25473 29237 29423 31649 23645 26508 +192 76 140 49 174 139 614 385 747 287 434 319 397 248 284 234 330 164 220 112 163 208 270 71 197 139 274 236 39 136 295 84 130 80 86 72 65 138 120 42 +7925 10381 7201 7098 9624 15350 26329 24353 19915 7339 5216 7353 3874 4862 4999 8460 3333 2670 1821 1364 1580 3283 2311 1408 746 534 435 409 87 247 395 244 161 68 56 53 60 84 66 56 +3462054 8426660 3646893 6305003 4491612 8461198 5212343 6086646 2120195 2078202 1169597 2004581 658284 1254242 1205279 2906976 1261728 1346725 592317 811363 651978 1268891 694549 871595 260748 341780 142668 118001 35820 67549 77744 101934 52344 35181 26268 30739 30222 34910 24741 28358 +181 77 148 62 173 145 501 387 932 308 399 320 527 341 368 252 211 143 242 104 179 206 270 97 200 85 204 228 103 223 348 135 165 68 69 49 65 100 99 61 +7964 9528 6764 6175 7069 10910 17113 28160 20848 8987 7327 7279 4786 7337 6317 7430 2883 2244 1722 1442 1530 3424 2919 1924 1448 805 423 402 93 349 666 397 306 103 50 51 55 72 61 57 +3579209 8506457 3728758 6306094 4560187 8623829 5519676 7451542 2600194 2505653 1327696 2331514 764193 1629757 1336669 3189045 1303927 1403790 621549 851276 674813 1403122 751827 937507 291253 370731 149919 135619 37303 84937 92827 120208 58859 39398 26890 32028 30873 37238 25682 30158 +175 63 130 47 108 82 286 355 806 326 513 273 564 405 431 188 167 103 213 105 163 191 325 140 405 143 186 194 109 273 530 215 330 120 53 43 53 70 83 58 +8180 9594 6741 5869 6841 8680 12217 28934 20810 11343 8031 6960 6323 10588 8656 5047 2775 1775 954 1206 1335 3148 3063 1759 1346 914 465 315 144 482 680 483 362 116 52 47 47 78 65 54 +3698961 8585521 3807992 6288320 4621222 8639697 5694183 8782100 3067239 3052205 1499844 2619233 906747 2182475 1524570 3307785 1342312 1428617 630419 874295 692093 1512343 811356 989327 318389 404641 158063 146835 40052 109453 107892 142670 66643 44160 27547 32993 31303 39795 26702 31666 +173 64 126 42 101 53 177 317 672 340 496 224 632 455 529 101 152 66 87 73 129 154 313 114 338 151 198 125 191 304 463 233 361 126 55 35 38 74 88 49 +7598 8708 6389 5283 5457 6939 11145 25232 18052 12675 8454 7268 8487 13788 10701 3403 2263 1456 1009 722 936 2720 3133 1486 1336 944 524 318 262 543 590 510 448 99 47 51 53 108 102 49 +3800846 8605408 3876250 6235610 4645355 8547651 5836931 9805386 3452124 3647800 1678508 2908613 1101060 2898632 1760055 3318397 1366651 1432356 640474 866197 698742 1588716 871189 1021265 344592 438360 167511 157562 45749 136246 120280 165443 76431 47592 28060 34146 31876 44042 28642 32776 +151 53 113 35 66 33 150 237 500 310 460 207 729 454 568 47 109 44 94 27 70 114 294 83 306 141 216 116 353 283 350 212 410 88 44 39 47 113 163 38 +6523 6932 5937 5248 3421 4571 8225 17252 14166 11454 7653 7462 10134 14811 11120 2116 1142 1185 659 736 747 1413 2192 1279 1083 843 533 248 282 464 531 443 401 86 48 48 59 134 104 56 +3872708 8514989 3931250 6183912 4616840 8315643 5901473 10277006 3728065 4132646 1832236 3192549 1332624 3634671 2000373 3249303 1361727 1419221 641331 859445 700394 1580208 905474 1038570 363674 463851 176954 163345 51815 156578 130851 182733 84773 50020 28586 35046 32588 49632 30585 34250 +117 33 99 35 26 15 92 133 344 238 369 189 732 373 516 19 30 30 41 28 44 34 177 61 221 109 207 71 340 200 286 154 324 63 45 34 56 135 157 48 +6470 7329 6013 4649 3915 5676 9573 13373 13393 10106 8268 8022 12244 14301 9668 2280 1348 1027 732 666 389 812 1348 884 739 693 421 185 255 396 559 378 318 57 48 52 60 96 124 53 +3941421 8454385 3986820 6098515 4601665 8165443 5998862 10482014 3977358 4505584 1997847 3493855 1612344 4295217 2197575 3194430 1362191 1397167 644032 848797 692853 1535286 917328 1030569 373486 478597 183298 164910 57039 171512 141873 194992 90785 50520 29099 36138 33308 52552 32991 35451 +112 39 98 28 34 24 115 86 298 185 369 182 728 302 405 23 41 23 50 23 12 11 82 29 125 74 142 39 273 143 282 110 225 27 43 38 57 72 187 41 +6476 7597 5695 4594 3779 5763 9820 15422 12855 9201 7821 9988 14367 16092 6348 2467 1348 750 726 748 504 933 1193 695 504 537 353 220 330 321 375 221 155 66 47 49 51 97 89 45 +4008572 8413883 4032873 6014862 4583392 8029600 6100133 10800607 4206675 4800546 2147898 3897869 1939350 5026166 2304986 3154338 1362644 1359418 646513 843826 688440 1500493 924924 1011436 377046 482874 187745 168532 64050 180943 147917 196870 92480 51543 29574 36980 33780 55358 34442 36088 +111 42 88 29 32 26 115 105 271 149 323 211 707 293 232 28 41 13 49 30 21 16 64 18 63 44 105 53 334 95 160 40 74 35 41 33 40 67 111 28 +6227 6888 5205 4346 4172 5169 8022 14094 11044 8051 7666 9041 12907 14460 4772 1898 892 529 617 535 471 706 782 673 342 407 311 158 246 237 283 223 150 65 49 49 44 80 82 58 +4067682 8332252 4065251 5920992 4575621 7865414 6152915 11018497 4383974 5007158 2290241 4219462 2220872 5612994 2369430 3081693 1351429 1310356 646145 826067 683294 1453841 921824 992099 376376 478907 191008 168127 68739 184647 151458 198758 94005 52443 30088 37771 34061 56951 35678 37486 +101 34 75 26 40 21 82 87 213 116 295 172 559 225 153 17 18 7 35 16 18 10 28 18 29 25 82 27 218 52 101 40 68 33 43 32 29 43 94 45 +6448 7234 5528 4783 5105 4981 7767 12406 9402 7467 7613 9176 10630 10922 3664 1185 807 615 397 641 468 772 898 608 422 468 293 136 229 290 380 312 110 47 54 59 55 100 105 46 +4130965 8276776 4105078 5859602 4591894 7699528 6197861 11119607 4514873 5165494 2427676 4530055 2437160 5947248 2403942 2969602 1338321 1269521 640163 815886 678199 1414043 921767 969929 377767 478926 193729 166395 72876 191385 157390 206001 94470 52184 30717 39129 34616 59677 37471 38063 +106 38 83 33 59 20 76 66 167 98 276 160 408 141 100 7 15 10 15 24 18 12 37 15 44 34 72 21 187 72 151 72 36 17 51 44 45 63 131 27 +6467 7514 5923 5039 5456 5995 8708 13465 8366 6654 8107 9428 8821 8432 3085 1378 414 438 211 510 528 530 526 534 372 362 259 120 206 298 422 253 95 43 45 47 46 110 97 45 +4193154 8241831 4154008 5817623 4616733 7605892 6265739 11279713 4616022 5264382 2574308 4837495 2601807 6108468 2422791 2876094 1315494 1220262 629576 798267 674765 1361765 912202 944543 377845 472431 195513 163784 76322 198210 164248 209185 94540 51694 31100 39668 34927 62854 39015 38544 +104 42 92 36 67 30 91 77 138 78 278 154 304 90 73 10 4 5 4 15 24 6 12 12 34 20 55 16 154 71 166 46 27 14 35 27 31 70 111 26 +6032 6828 5728 5219 5742 6459 9509 15725 8611 5999 8202 10164 7464 6555 2791 1848 1001 397 334 703 696 430 746 866 432 324 234 143 160 230 441 286 123 54 44 40 40 125 82 61 +4242671 8166836 4196731 5789222 4648263 7546381 6352398 11569062 4720909 5317095 2719708 5171707 2727655 6144697 2433654 2817071 1308242 1171439 622397 793563 675712 1306480 908499 941077 379455 463992 196613 162742 78506 200448 171420 214205 95324 51910 31448 39745 35077 66762 40137 39979 +92 35 85 40 73 36 103 97 139 63 264 156 234 56 60 19 25 4 11 30 41 4 26 33 46 17 44 24 101 41 167 56 44 23 33 19 23 82 80 45 +6146 7168 6245 5576 5628 6395 8970 14539 7512 4826 7020 8283 5843 5433 2659 1953 1240 559 485 715 674 445 553 736 416 402 292 239 111 270 489 337 166 36 23 27 31 122 59 34 +4293866 8117229 4251604 5784458 4676092 7486509 6423116 11768186 4795084 5294579 2831263 5370304 2808925 6109820 2440872 2768041 1307281 1135499 619257 789878 676073 1255434 899955 929832 380616 460851 199169 167661 79383 205009 179640 222057 97187 51007 31250 39019 34993 70251 40643 39669 +93 38 97 46 70 35 91 81 112 41 205 109 162 38 54 23 38 10 23 32 39 5 14 24 42 27 68 63 50 55 181 72 78 10 9 9 14 74 41 14 +6228 7446 6410 5463 5532 6405 7304 9854 4618 3998 4924 5911 4941 4289 2110 1191 800 548 469 589 612 507 498 581 246 341 343 278 81 158 421 322 165 39 26 28 32 83 40 29 +4345879 8087678 4309325 5773037 4700772 7430843 6449481 11667528 4793430 5222544 2886454 5411255 2865109 6006751 2433876 2675137 1295096 1101039 615787 778673 674840 1211259 890219 909739 377402 454151 202965 174681 79471 202416 185917 228516 98978 50342 31134 38398 34937 71135 40650 39070 +94 41 100 44 67 36 62 37 45 28 121 58 124 25 34 9 16 10 22 22 32 7 12 15 15 20 88 79 26 19 140 63 74 12 11 10 15 33 19 10 +5946 6787 5727 4898 4894 4744 4565 5484 2401 2283 3478 4486 3900 2986 1011 792 721 596 337 467 498 379 278 570 246 244 238 147 37 149 350 240 85 41 28 18 33 84 56 28 +4389385 8019413 4348146 5727589 4708527 7276470 6405172 11304428 4735145 5049465 2903304 5362201 2893279 5829812 2398961 2563293 1281196 1071596 609029 760645 670723 1161871 875102 890176 374269 441893 203982 173231 78432 199425 190222 229550 98679 49840 31072 37200 34908 72027 41066 38446 +85 36 81 35 52 20 25 12 12 10 65 34 82 12 8 4 13 12 11 14 21 4 3 15 15 10 43 22 5 17 101 34 20 14 13 4 16 33 36 10 +5648 5810 4052 2733 2290 1380 1977 3460 1216 747 1092 1597 2103 1414 494 731 460 245 306 352 390 810 615 505 218 129 128 42 40 69 235 172 57 18 17 18 23 40 30 17 +4424188 7895219 4343180 5551856 4649524 6924683 6295814 10838763 4648023 4792403 2858741 5138597 2874810 5566910 2351702 2454412 1260971 1022355 601647 736634 663948 1141925 868977 867793 370498 423305 202161 165417 77496 191699 191480 226344 97672 47955 30730 36073 34624 70163 40807 37183 +76 26 41 11 11 1 4 5 3 1 6 4 24 3 2 4 5 2 10 9 13 21 19 13 12 3 12 2 6 4 47 18 9 2 5 4 7 8 10 3 +6008 6798 3870 2424 1966 1147 1516 3231 1495 615 399 1133 1732 1071 303 402 361 248 298 346 315 417 318 470 160 73 82 39 21 30 131 108 27 11 14 14 18 22 16 12 +4467324 7839177 4333686 5367682 4583712 6579687 6177401 10386968 4570208 4542654 2797576 4899902 2847319 5298708 2300741 2331851 1238720 976252 594245 713695 655425 1099031 855413 844603 365339 402392 199210 157888 76098 182040 190048 219399 95923 45753 30320 34768 34219 67305 40197 35689 +85 37 37 10 8 1 2 5 5 0 0 2 17 2 0 1 3 2 9 9 9 5 5 12 6 1 5 1 1 0 15 7 2 1 3 2 4 2 3 2 +5602 6699 3745 3053 2351 1584 1371 2803 1646 887 552 783 1371 922 338 250 369 262 166 182 290 307 272 451 138 66 83 31 15 26 122 88 31 9 14 11 13 14 10 10 +4499005 7780415 4321234 5233202 4529384 6282238 6058238 9935984 4498196 4324600 2741849 4654025 2811286 5037443 2251946 2207305 1217229 933775 583654 682056 646476 1051953 841012 821637 359746 382304 196358 150319 74581 172715 188422 211642 94320 43560 29920 33357 33697 64126 39449 34162 +74 36 36 16 12 3 2 4 6 1 1 1 11 1 1 0 3 3 3 2 7 3 4 11 5 1 5 1 1 0 13 5 2 0 3 1 2 1 1 1 +5338 6532 3931 3600 2910 2144 1374 1542 1862 1263 549 546 920 1041 439 323 222 126 110 152 305 308 300 330 118 33 28 15 14 10 31 31 20 15 16 15 18 15 13 14 +4523147 7714918 4313847 5140397 4490702 6037041 5942126 9434585 4433503 4142730 2687436 4408339 2764624 4799165 2206951 2094716 1192517 885491 571895 650472 638134 1007761 827686 792615 353781 361394 192171 142221 73077 162966 184510 200848 92476 41868 29582 32277 33315 61200 38796 32972 +67 35 39 24 20 6 2 1 8 4 1 0 5 2 1 1 1 0 1 2 8 3 5 6 3 0 0 0 0 0 0 0 1 2 4 3 5 1 2 3 +5525 6500 3972 3319 2821 2224 1584 747 1688 1219 495 572 622 728 711 331 246 91 98 205 163 195 203 315 80 20 19 9 9 9 59 53 24 13 13 14 15 9 11 10 +4551466 7651385 4307693 5035896 4450710 5811470 5834281 8914426 4365977 3969068 2633001 4178991 2711509 4555953 2170032 1989373 1169036 837954 560123 624039 626370 959278 812213 764412 346994 340939 187859 134241 71482 153741 181412 192053 90780 40154 29175 31200 32866 58081 38108 31608 +70 35 40 21 19 7 3 0 7 4 1 0 2 1 4 1 1 0 1 4 2 1 2 6 1 0 0 0 0 0 3 2 1 2 3 3 3 0 1 1 +5516 6334 3992 3700 2991 1917 1563 906 1447 1052 649 839 543 712 961 299 162 116 151 157 101 128 149 232 26 34 38 12 8 10 30 36 23 10 13 15 12 13 14 9 +4578848 7581465 4302203 4961073 4416062 5580571 5728591 8435244 4293976 3795565 2583862 3979807 2657701 4326350 2140426 1888385 1143994 794805 550000 596243 613315 909587 795746 732802 338996 322572 184140 126924 69902 145131 177650 182742 89101 38359 28779 30249 32352 55394 37514 30264 +70 34 40 27 22 5 3 0 5 3 2 2 1 1 8 1 0 0 2 2 1 0 1 3 0 0 1 0 0 0 0 1 1 1 3 4 2 1 2 1 +5248 6159 4823 5484 6686 6135 3423 1487 1614 859 685 731 556 1086 1319 869 422 136 84 140 91 168 239 296 63 143 135 21 9 11 34 43 33 22 15 16 15 16 20 20 +4598696 7504989 4318093 5000343 4476733 5622669 5673086 8020507 4228041 3620615 2536870 3785939 2605568 4133500 2120710 1828475 1126223 755474 538417 569071 600330 865335 781990 707021 332143 312004 182994 120599 68387 137099 174084 174419 87720 37409 28444 29417 31927 53053 37088 29676 +62 32 59 59 102 58 17 1 6 2 3 1 2 3 17 9 5 1 0 2 0 1 3 6 1 7 17 0 0 0 1 1 3 6 4 4 3 1 5 7 +5777 6800 5582 6868 10494 9390 6906 3421 2207 1449 1576 1654 815 907 1390 1710 810 480 296 354 215 438 825 821 214 483 383 23 18 18 97 113 81 28 24 24 26 28 31 28 +4631571 7472482 4352988 5122287 4633231 5862220 5708001 7749473 4178911 3492409 2513827 3660409 2561357 3941223 2103301 1823829 1118814 739637 532542 556677 590839 840326 783557 715041 329321 322958 188216 114776 67139 129979 172217 170896 87600 36884 28347 29126 31794 51590 36954 29615 +75 40 77 86 187 117 72 9 12 7 18 9 4 2 19 36 21 16 11 15 5 10 43 52 15 78 119 1 1 0 10 13 22 10 11 10 11 6 13 14 +6243 7321 5985 7330 11235 8839 8278 4486 2319 1758 1748 1853 1421 1699 1874 1596 760 501 336 310 264 623 1054 798 473 702 409 30 26 12 108 134 117 36 29 29 27 28 26 29 +4675538 7473935 4397314 5265299 4804764 6053547 5777116 7560132 4133870 3390880 2495756 3554637 2533741 3809141 2098699 1812457 1110312 726040 527836 542323 582838 828184 790939 721167 333191 346710 193972 109732 66127 122917 170678 168875 88404 36882 28380 29160 31690 50214 36696 29619 +85 46 86 93 196 100 97 17 14 12 22 12 14 9 35 32 19 18 15 12 7 22 69 48 71 129 126 2 3 0 12 19 46 17 16 15 12 6 9 15 +5738 6301 5376 7114 9043 7249 6554 3347 1616 1359 1542 1437 1380 2155 1555 863 560 275 231 198 202 441 671 699 457 480 186 22 20 17 70 86 85 20 20 27 29 27 27 28 +4705498 7412634 4424966 5386460 4915982 6135709 5800436 7312173 4071983 3270929 2472870 3429653 2505766 3712999 2086058 1756734 1096909 699374 520564 521949 573452 805588 788346 720843 336555 355398 193884 104499 64987 116586 168206 164026 88369 35897 28182 29069 31640 48860 36470 29562 +71 35 69 85 141 68 62 10 7 7 17 7 14 15 24 10 10 6 7 5 4 11 28 37 65 65 29 1 2 0 5 8 24 5 7 13 14 6 10 14 +4594 3928 3433 6376 5939 4783 2581 636 953 882 1136 1392 1207 1401 824 434 117 99 134 79 75 198 308 232 205 164 65 9 7 13 39 41 44 13 17 17 18 22 17 13 +4705466 7209220 4402259 5455011 4945078 6061437 5721614 6912534 3994693 3128869 2440177 3309403 2474067 3576302 2055046 1677998 1072517 663495 510994 495486 561053 769419 776539 691847 333393 344150 190705 98782 63543 110389 165003 156703 87287 34541 27913 28369 31310 47280 35994 28587 +46 14 29 68 70 30 10 0 2 3 9 8 10 6 6 2 0 0 2 0 0 2 6 4 13 8 3 0 0 0 1 2 6 2 5 5 5 4 4 3 +4421 3059 2029 5054 5106 3126 2519 1712 627 526 513 536 697 789 826 198 84 65 55 30 26 91 256 171 71 58 24 17 11 11 20 38 30 13 13 19 14 17 12 14 +4701013 6964621 4344230 5438228 4952154 5889819 5643175 6602979 3910999 2973460 2392375 3143777 2430122 3410206 2024859 1589486 1047890 627680 499643 467601 547711 728846 763697 660843 326885 327065 186557 93899 62237 104441 161395 149635 85874 33267 27548 27834 30886 45487 35402 27731 +42 9 10 42 52 13 9 3 1 1 2 1 3 2 7 0 0 0 0 0 0 0 4 2 1 1 0 0 0 0 0 1 3 2 3 7 3 2 2 3 +5006 3361 1138 1992 1676 1575 1138 854 312 213 442 457 375 428 442 198 62 36 32 21 37 59 127 84 45 42 19 14 11 11 11 21 20 9 14 17 14 8 9 11 +4711625 6753252 4264873 5234331 4871374 5633208 5531392 6259284 3821342 2808145 2343951 2983234 2379043 3231897 1985610 1506285 1023315 592232 487988 440836 534984 688741 747878 626354 319875 310022 182385 89125 60964 98850 157647 141947 84241 31823 27218 27208 30472 43249 34748 26743 +54 12 3 7 5 3 2 0 0 0 1 1 1 0 2 0 0 0 0 0 0 0 1 0 0 0 0 0 0 0 0 0 1 1 4 6 3 0 1 2 +4867 3524 851 752 647 1335 1773 860 326 235 313 294 222 319 230 122 61 42 20 25 25 47 108 82 24 39 17 17 17 8 8 20 29 13 9 12 16 10 12 11 +4718419 6564579 4180161 4966484 4766306 5377248 5438632 5936578 3734281 2654100 2293439 2822309 2325328 3057589 1941921 1423406 999328 559279 476317 415922 522268 650305 731969 593812 312503 293817 178266 84822 59876 93410 153916 134659 82879 30712 26768 26312 30120 41268 34187 25814 +51 14 1 1 0 3 5 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 1 0 0 0 3 2 1 3 4 1 2 2 +3630 2704 1118 662 1248 1995 1049 670 424 323 327 279 327 349 261 171 73 69 22 23 32 53 91 65 29 43 21 24 13 15 20 15 19 21 17 12 14 12 12 16 +4693423 6336847 4104388 4709178 4679224 5177194 5329680 5621560 3651899 2514704 2244545 2670118 2275638 2895582 1900115 1348510 976247 529962 464988 392380 510048 614544 716022 562178 305443 278830 174352 81207 58713 88727 150584 127501 81295 30159 26534 25470 29725 39529 33640 25248 +28 8 3 0 3 7 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 2 1 0 0 0 1 7 6 3 3 1 2 5 +2550 1840 955 840 1169 764 794 469 322 337 272 351 348 351 208 173 48 50 29 13 33 48 88 50 46 56 36 29 22 25 29 31 17 17 28 15 18 16 17 15 +4641443 6069696 4026340 4478246 4592297 4913513 5216929 5313094 3568966 2384532 2195466 2531482 2227726 2743418 1857998 1278231 953103 501237 454121 369636 498159 580621 700397 531520 298993 265541 170919 78116 57809 84939 147566 121755 79700 29393 26587 24863 29443 38140 33234 24654 +14 4 2 1 3 1 1 0 0 0 0 0 1 0 0 0 0 0 0 0 0 0 0 0 0 1 1 3 3 2 1 1 1 5 16 5 6 3 4 5 +2078 1656 969 399 469 616 733 464 255 236 208 270 202 377 382 182 74 92 43 15 21 41 67 54 54 49 44 84 29 17 41 32 22 29 33 16 13 32 23 18 +4578695 5807269 3950599 4234075 4489646 4656559 5105434 5022828 3486390 2255965 2145976 2396187 2177278 2601981 1821380 1212721 931201 476816 443883 348380 486260 548304 684625 502947 292909 252619 167777 78589 57107 80887 144930 116415 78272 29411 26766 24354 29040 37817 32992 24280 +10 3 2 0 0 0 1 0 0 0 0 0 0 0 1 0 0 1 0 0 0 0 0 0 1 1 2 29 5 1 2 2 2 15 23 6 3 13 8 7 +1756 1424 1025 563 475 284 347 300 153 138 168 354 228 208 195 107 67 119 57 21 54 40 46 46 46 55 41 75 40 44 59 47 32 32 36 26 21 64 31 24 +4509283 5546334 3878180 4014630 4389711 4394625 4986855 4739901 3403268 2129090 2096699 2274170 2128754 2458647 1780896 1146534 909667 455519 434259 328768 475501 517864 668710 475597 286772 240841 164636 78481 56703 78737 142820 112317 77136 29612 27018 24490 28851 39480 32960 24297 +7 3 3 0 0 0 0 0 0 0 0 1 0 0 0 0 0 2 0 0 0 0 0 0 0 1 1 23 11 8 5 4 4 19 27 16 8 51 15 14 +2085 1820 1064 697 413 128 377 373 249 178 319 337 208 131 121 126 85 197 98 38 59 55 64 58 33 111 60 55 100 109 94 74 62 120 97 88 47 79 46 38 +4450013 5325383 3808566 3816583 4290686 4138822 4872003 4478434 3324675 2012285 2052512 2158429 2080930 2319182 1739531 1085485 889131 440292 425923 311377 465139 490172 653652 450625 280456 233210 162059 77151 57843 80709 141657 110124 76795 35207 28823 28427 29332 41964 33313 25173 +10 5 3 1 0 0 0 0 0 0 1 1 0 0 0 0 0 7 1 0 0 0 0 0 0 7 4 13 66 47 13 12 16 169 151 133 41 70 33 33 +2586 2206 1372 1165 1291 1146 798 605 341 352 582 405 226 298 204 108 97 219 184 57 74 82 90 63 33 144 70 49 120 95 82 86 80 125 110 120 62 88 56 37 +4405030 5141404 3748563 3659172 4216577 3960910 4770780 4246908 3250396 1913178 2016151 2053810 2034760 2198345 1701321 1026993 869414 427330 419994 296197 455419 465800 639635 427459 274298 228064 159802 75532 59466 81703 140216 108800 76922 40774 30915 34093 30184 44852 33912 25935 +16 9 6 4 4 3 1 0 0 1 3 1 0 0 0 0 0 9 6 1 0 1 0 0 0 12 5 10 89 35 10 17 27 154 168 174 69 78 48 30 +3289 2963 2049 1929 2381 2511 2112 1460 963 635 1446 1266 598 206 111 76 75 122 84 44 41 58 82 136 59 127 76 44 65 57 45 54 38 62 60 68 35 64 41 36 +4379140 5014971 3707363 3558143 4172181 3877534 4705673 4081802 3193871 1837404 2002784 2008366 1999251 2079105 1661687 970045 849627 409186 411656 281129 445098 441416 625763 410168 268958 222183 157755 73703 59643 80302 137866 105589 75973 42136 31677 36225 30325 46092 34113 26590 +26 17 13 13 15 19 9 5 4 5 22 17 3 0 0 0 0 3 1 0 0 0 0 3 1 10 7 8 26 13 3 7 6 43 61 65 22 40 25 28 +4339 4218 3271 4891 6569 7425 6054 2875 2349 2050 3867 3464 1375 186 95 96 42 99 44 20 22 69 145 221 149 95 139 176 31 32 32 22 28 25 30 32 29 34 42 41 +4380737 4973228 3698429 3645153 4235949 4101064 4742958 4013536 3174187 1853111 2051637 2100688 1984491 1965791 1622634 917742 829491 390718 402504 265490 434549 419171 613848 399136 266052 214689 157370 80093 58946 77450 135242 100605 74792 41143 31653 36017 30309 45415 34335 27513 +46 35 35 80 105 133 79 23 25 51 134 111 20 0 0 0 0 2 0 0 0 0 2 10 10 6 24 115 6 4 1 1 3 7 15 14 15 11 26 34 +5479 5405 4336 5668 7907 13310 12780 5616 4744 4311 8239 6002 2175 612 307 138 65 72 26 31 25 94 374 518 256 156 190 225 39 119 61 37 40 33 35 28 23 30 30 24 +4411435 5006916 3716942 3774680 4332328 4672743 4951246 4117766 3216217 2006785 2211030 2343399 1990549 1885448 1589975 871158 810445 371699 393120 251465 424340 399796 608084 407013 265954 211392 158298 89111 58471 80114 133425 96842 73947 40701 31757 35576 30140 44533 34244 27336 +73 57 61 97 136 254 226 84 95 161 334 212 50 4 1 0 0 1 0 0 0 1 14 57 31 17 44 144 9 57 6 3 7 13 20 11 9 9 13 11 +6287 5583 4526 6549 9558 16637 13127 11729 8389 7648 10271 7241 2111 748 365 253 133 166 64 54 51 219 517 684 358 279 331 289 114 391 174 64 96 48 62 50 34 50 55 53 +4462022 5049518 3739850 3950562 4468504 5414525 5163205 4591310 3350373 2356256 2418387 2647669 1994820 1818280 1559614 834434 793613 359596 384942 239695 415050 389264 606119 424616 268466 215850 162807 101519 59925 99329 134542 94963 74555 41208 32549 36513 30256 44933 34795 28952 +92 60 66 115 174 281 221 219 206 287 397 232 47 7 2 3 1 7 0 1 0 11 28 89 60 53 118 173 80 263 49 12 42 27 62 35 20 25 45 53 +6232 4948 3607 6654 9677 12666 18121 16489 8726 9013 11141 7976 1618 731 427 279 179 205 100 100 109 233 591 646 347 342 331 317 245 439 236 172 231 55 58 51 53 51 68 63 +4509940 5050551 3738694 4122343 4604323 5867834 5497532 5328885 3489794 2768622 2642807 2978840 1986382 1754098 1531595 801511 778377 350615 377888 231457 407475 380224 606095 438828 270634 223911 167203 114903 64691 120340 137216 99832 78598 42114 33219 37455 30855 45370 35664 31085 +89 47 42 111 169 180 307 286 204 290 395 226 28 7 3 4 2 12 2 5 2 13 37 76 55 73 113 167 230 244 86 79 174 34 53 35 49 26 67 68 +6516 5099 3594 5173 7598 10980 19062 16637 10643 9246 12637 7657 1713 844 449 246 190 189 154 165 148 288 531 632 277 315 276 360 349 472 205 244 302 77 67 40 55 42 35 53 +4563921 5060800 3737235 4192828 4683607 6190362 5847567 6031300 3674738 3170562 2899866 3270543 1980583 1700709 1504838 768535 763802 341190 372391 227707 401086 375105 604538 451327 270958 229829 170084 130126 71997 142118 139030 108833 84355 44317 34102 37665 31490 45228 35668 32476 +95 50 41 71 117 134 303 244 247 252 411 188 31 10 3 4 2 10 6 16 4 21 30 69 35 59 81 174 323 227 64 123 229 61 69 21 51 17 17 46 +6639 4653 3272 4752 6862 9542 21250 14812 9538 9640 12594 8156 1508 662 370 288 124 152 109 134 135 242 577 626 299 327 337 570 500 536 304 278 321 84 63 30 28 30 37 32 +4619699 5043033 3727581 4233219 4742098 6405192 6244795 6579448 3826819 3572593 3149409 3575402 1969689 1639342 1476730 740118 747904 330057 365881 222277 394524 367467 604196 462708 271837 236129 174452 157338 82980 166521 143330 119382 90454 46818 34861 37248 31419 44357 35723 32493 +96 41 34 59 98 103 319 189 203 225 369 180 24 6 2 5 1 7 3 11 4 15 35 65 41 61 109 259 429 224 122 132 229 67 59 12 13 9 19 16 +6544 4368 3273 4959 7149 11608 20717 14869 9446 10397 15266 8323 1664 774 325 281 187 195 122 125 140 239 560 590 279 381 496 767 631 588 285 294 306 84 61 23 34 33 36 29 +4671656 5008822 3718194 4283904 4806465 6734062 6618482 7098211 3972752 3997011 3461026 3872230 1963054 1588538 1448173 712976 734013 322234 359865 216620 388254 360103 603428 471194 272182 245369 182776 195020 97038 192655 147037 130281 96018 49169 35550 36426 31503 43723 35751 32325 +92 37 35 63 101 128 284 168 194 217 409 167 30 10 2 6 2 13 4 10 4 15 33 55 35 77 180 290 476 211 107 130 200 62 54 7 19 11 18 13 +6522 4442 3272 5596 6932 12181 21777 15616 9313 10660 17831 9943 2023 876 353 306 204 182 131 140 133 270 552 599 299 401 733 1063 690 613 261 332 358 87 65 46 47 60 57 61 +4721754 4981210 3709015 4370684 4863678 7078404 7009936 7631743 4111642 4412123 3830432 4250778 1965762 1547048 1421045 688999 720904 314082 354230 212224 381961 355085 602474 479724 273030 255283 196950 248627 112253 218757 150038 142861 102772 51564 36324 37066 31918 44785 36315 34133 +90 38 35 76 95 129 281 164 183 204 436 193 45 13 2 7 3 11 4 13 4 20 32 55 40 80 271 326 451 188 90 140 226 61 59 29 37 36 45 57 +6629 4722 3688 5858 6888 12644 21345 15812 9260 9696 18373 10960 2595 1004 445 298 219 198 166 153 155 283 543 565 309 482 789 1159 765 691 292 419 504 101 57 49 59 110 79 67 +4773336 4972457 3710699 4468354 4918338 7430532 7380575 8145306 4245710 4743104 4204471 4669096 1983024 1515912 1396946 665969 708505 307402 349630 208891 376388 351167 601314 485653 274113 269579 212202 304916 129006 248085 153756 160032 113089 54675 36874 37852 32629 48856 37427 36201 +91 43 44 81 92 127 259 156 175 165 415 198 72 18 4 7 3 14 8 17 6 23 31 48 43 102 271 296 445 190 104 170 309 74 44 32 56 101 83 63 +6687 5016 4012 5810 7143 12831 21611 16618 9414 9610 20448 11780 3035 1225 641 376 253 264 196 156 164 343 726 590 332 491 874 1177 790 719 340 389 409 114 53 44 41 121 98 70 +4825113 4982292 3720624 4557215 4978152 7773022 7748762 8677575 4380368 5048943 4622215 5112695 2011103 1500221 1378459 649112 697285 305178 345912 205942 371184 351170 604861 492763 275756 283570 229246 358933 145979 277374 158608 174329 120720 58398 37308 38284 32862 53358 38997 38329 +91 49 52 77 96 120 247 155 172 149 430 194 94 28 9 13 5 26 11 18 6 34 56 51 49 98 280 249 413 178 127 136 222 83 38 25 27 105 113 64 +6721 5473 4370 6060 7636 12718 21883 17412 9894 10119 20158 10689 3618 1051 529 344 300 342 269 170 186 470 717 666 331 448 858 1015 744 710 386 392 358 83 34 43 49 119 133 84 +4876467 5019614 3739452 4656104 5049075 8088021 8114710 9226691 4523934 5367704 5022117 5462651 2053384 1474782 1357570 631301 687547 307879 344153 204030 366672 358976 608089 504115 277333 294080 245455 399756 161353 304352 164515 187953 126857 59993 37245 38628 33294 57467 41423 41190 +90 59 61 80 105 112 240 157 179 148 388 155 121 21 6 11 7 43 21 22 9 61 54 63 48 80 254 178 348 157 146 123 177 43 15 23 38 92 165 81 +6737 5832 4522 5934 7879 13192 21259 17455 9094 9039 18551 12817 4334 962 468 423 313 346 259 211 214 461 848 851 375 475 661 604 625 568 300 260 184 67 52 51 52 150 113 61 +4926948 5076752 3761696 4741319 5124439 8413242 8455572 9745503 4643466 5600988 5370957 5922349 2112912 1445401 1335643 619412 678384 310664 342182 204752 362989 365761 614586 526152 279995 305618 256224 412879 173301 320988 168076 192650 128393 60509 37644 39443 33792 63234 43277 42466 +88 65 65 76 109 116 226 144 153 116 325 180 153 19 5 18 8 43 20 33 12 56 73 93 61 83 173 75 260 104 96 57 59 27 36 32 42 118 124 41 +6885 6293 4646 5304 8038 12929 16018 14636 9190 6255 13176 13455 6766 1136 376 378 234 317 217 206 250 437 1095 1062 512 537 469 275 392 588 242 208 146 65 42 65 38 94 114 57 +4979952 5158785 3786554 4782715 5201987 8702793 8653952 10059995 4762468 5649233 5573691 6393664 2233122 1428473 1311912 605472 667430 311500 339187 205123 360318 370664 627234 559830 286093 320273 261816 405002 178995 337854 170066 193870 128920 60872 37777 41069 33919 65215 45110 43420 +90 72 67 60 110 106 151 107 151 61 202 172 261 27 3 15 4 36 14 32 17 49 110 123 104 95 101 16 133 102 63 36 36 25 23 49 22 49 119 35 +6882 6409 4724 5605 8040 12569 14919 12852 7311 6085 10437 13101 8173 2644 756 523 385 439 305 336 279 709 1386 1139 717 573 385 186 278 473 321 398 406 43 29 32 27 67 101 38 +5031556 5243023 3812786 4840120 5277650 8952854 8819287 10246014 4830468 5684139 5701348 6814952 2386298 1505208 1298487 601276 660610 319781 338516 213459 358455 391984 647005 596218 297279 336261 265121 392130 181633 346643 174026 206690 136080 59861 37575 40570 33762 65418 46565 43149 +88 73 69 65 106 98 132 83 105 56 142 150 307 119 14 29 13 66 28 77 21 108 151 126 164 98 70 8 73 66 101 109 195 11 11 12 11 24 94 15 +7049 6570 4748 5917 8889 12043 12375 11467 6244 3931 5081 7881 5491 5277 1618 746 502 529 398 347 398 1161 1535 1035 1016 907 334 145 161 294 275 383 416 56 29 29 30 38 78 32 +5086141 5332098 3838976 4913250 5373127 9155596 8915463 10335781 4869495 5584615 5688906 6890260 2467091 1739104 1307432 611033 656951 333095 340239 221970 359680 439794 670092 624034 315829 371809 267040 377511 181214 343907 176711 217819 143316 59709 37378 39917 33686 63827 47396 42526 +92 73 69 70 122 88 96 67 79 24 39 62 177 251 65 58 22 87 48 77 43 187 166 102 242 169 52 5 25 26 76 94 191 19 11 10 14 8 56 11 +7197 6334 4339 6476 9552 11221 10361 8886 4440 2552 1973 3029 3048 6733 2999 1847 929 845 631 557 620 1809 1788 1181 1235 732 252 91 110 439 544 388 304 60 28 36 29 42 43 36 +5143147 5401330 3854057 5016336 5483168 9295672 8957755 10261592 4861433 5406340 5597326 6662955 2483418 2048420 1351456 687847 664298 365024 347875 242873 366549 524547 699069 659151 339514 394473 266815 360452 179502 350244 186205 228588 147509 59812 37160 39733 33586 62577 47312 42186 +94 67 57 81 131 75 70 40 40 10 6 10 69 282 168 206 76 157 109 144 97 272 193 115 283 113 30 2 11 56 199 89 118 22 10 16 13 10 17 14 +7372 5798 4065 7834 10072 11887 6405 4558 2431 1850 1468 1213 1330 1814 2750 3700 1925 1142 796 772 1140 2302 1658 977 762 528 165 79 104 317 414 283 196 51 38 26 25 34 36 31 +5203203 5433478 3861758 5196669 5603755 9468261 8897866 9925953 4802217 5195632 5495123 6337717 2455421 2036967 1388015 873896 696922 413285 359538 275730 386539 634504 724000 679627 350517 403244 264372 343679 177679 348705 192139 232260 148836 59356 37203 38946 33386 60911 47051 41559 +96 56 49 106 137 84 26 11 12 6 3 1 13 34 144 358 213 199 147 197 217 299 166 80 143 61 13 1 10 29 130 47 53 16 19 8 9 7 12 11 +7491 5404 3586 9294 9442 11632 4014 1786 1289 1286 2012 1168 1007 1307 1479 2763 1775 1691 1367 945 1163 2590 1401 897 580 503 269 75 68 228 323 238 144 56 29 19 17 21 23 17 +5264802 5439490 3857022 5455881 5705227 9614829 8778353 9440147 4715287 4962915 5409377 6029228 2419866 1995052 1391172 991216 724897 492379 385506 317245 406618 755558 741739 693960 356593 409953 264648 327666 174981 341791 195599 232947 148801 59235 37015 37776 32987 58546 46464 40109 +96 49 38 128 122 77 10 1 3 3 6 1 7 18 48 215 182 265 275 219 208 279 125 66 91 53 34 1 4 15 84 33 28 20 11 4 4 2 5 3 +7487 5097 3592 10867 9826 8235 2831 1706 1162 1126 1393 1116 756 954 678 1061 1024 1826 1610 1144 1131 2302 1386 995 764 706 367 120 56 200 347 231 89 54 30 16 15 17 15 17 +5324761 5426280 3852558 5796182 5813982 9543900 8631583 8978573 4627281 4734330 5309949 5736053 2378783 1933965 1373775 996930 732977 575022 417038 368495 425378 851655 758652 713454 367220 428731 267422 315379 172044 333571 199586 233162 147361 58998 36857 36492 32546 56077 45687 38746 +95 43 39 145 125 39 5 1 3 2 3 1 4 10 10 44 76 251 303 233 189 205 119 76 135 93 63 5 3 12 92 31 11 18 12 3 3 2 2 3 +7359 4884 4047 11601 9722 3967 3273 2282 1298 1313 1199 1064 629 561 671 781 579 1308 1586 1408 1090 1798 1689 1137 751 722 467 131 44 210 349 177 85 39 19 15 14 17 17 14 +5379951 5400777 3859836 6161161 5917363 9215011 8499775 8580080 4544948 4530949 5208044 5457273 2335479 1852398 1356633 985099 729480 620882 447169 432890 442622 911022 782888 740502 377250 447366 272683 304505 168873 326459 203524 230047 145855 57854 36422 35224 32091 53756 44981 37281 +90 40 49 148 121 9 7 3 3 4 2 1 3 3 10 24 24 146 274 246 170 133 152 90 126 89 95 6 2 14 90 18 10 10 5 3 3 2 2 2 +7078 5071 4766 11771 8912 2313 4065 2460 1319 1462 1336 1104 661 300 385 458 447 847 1622 1831 1239 1993 1870 1149 761 909 539 98 41 181 329 179 66 29 23 16 15 15 19 17 +5426580 5388293 3885312 6514687 5997458 8804237 8391503 8216432 4465207 4348924 5112185 5197676 2294074 1759689 1332608 954133 722696 635668 477468 519409 463244 978807 811146 766665 387285 476371 279654 292256 165705 317992 206853 227242 143901 56164 36100 34093 31673 51452 44343 36088 +82 44 68 137 103 3 12 4 4 5 3 2 3 1 3 8 15 69 260 278 191 140 166 86 123 118 116 3 1 11 79 19 6 5 7 3 3 1 3 4 +6974 5359 5352 11851 8003 1654 3087 2005 708 1061 1107 521 562 330 208 431 501 854 1296 1988 1374 1802 2004 1564 861 1228 689 101 24 211 466 184 40 26 25 19 16 21 17 13 +5469387 5394252 3925131 6851917 6052317 8377621 8260934 7846648 4371838 4153184 5012865 4917837 2251172 1674386 1304658 923366 717461 649997 498677 610383 486802 1030791 842124 816755 399626 523235 290285 280926 162181 311876 213601 224912 141331 54391 35837 33214 31291 49655 43670 34721 +79 49 83 129 84 1 7 3 1 3 2 0 2 1 1 8 19 67 186 261 206 113 173 127 141 164 160 4 0 16 132 21 2 5 9 5 4 3 3 2 +6777 5495 5617 12675 9047 1650 908 950 926 583 911 536 441 319 284 376 296 474 1148 2204 1210 1657 2441 1849 1132 1258 654 110 21 161 387 135 49 34 25 19 17 15 18 12 +5506089 5408209 3970731 7219539 6132494 7976356 8077923 7434233 4286373 3939821 4911014 4655709 2206248 1593525 1279349 891066 707117 640120 515574 709169 505580 1070747 883499 881349 418586 569130 299756 270829 158669 303055 218161 219711 139055 53216 35581 32388 30944 47597 43039 33375 +74 50 89 130 101 2 0 0 2 0 1 0 1 1 2 6 6 21 152 248 167 94 211 145 193 154 142 5 0 9 96 11 3 8 9 5 5 2 3 2 +6409 5070 5012 12638 9766 2666 3000 2029 1364 440 507 570 473 392 201 422 489 826 1473 2476 1302 1760 2414 1783 1290 1183 519 149 23 173 355 127 57 19 17 15 17 17 17 16 +5532468 5395217 3999727 7562831 6229049 7661586 7952958 7112853 4214237 3730473 4801378 4411397 2163263 1522000 1252550 863530 701965 652461 540357 818739 526241 1114634 923151 938013 441112 607664 305540 263734 155296 295501 221789 214331 137040 51190 35127 31366 30606 45785 42399 32355 +65 43 71 124 112 5 7 3 4 0 0 0 2 2 1 9 19 63 203 238 177 98 197 125 214 130 97 10 0 11 80 11 5 2 4 3 5 2 3 4 +6090 4448 4295 12871 10292 3806 4252 2626 784 453 441 778 392 148 97 344 416 729 2183 3088 1465 1851 2340 1704 1288 1165 456 90 24 155 291 113 51 42 38 30 33 30 25 30 +5550034 5344791 4009671 7899841 6336640 7435740 7863117 6847434 4129076 3534484 4692792 4194522 2119281 1439776 1223761 832854 695075 658102 582670 959335 550553 1161479 959922 986424 463024 642780 309569 253439 152032 287294 223691 208414 134922 50699 35221 31327 30685 44881 41979 32256 +59 34 52 118 116 12 14 7 1 0 0 1 1 0 0 6 14 48 308 257 198 101 179 109 201 117 76 4 0 10 53 9 4 14 21 15 19 9 7 14 +5585 3613 3559 11617 8285 2489 2397 1298 1199 820 1012 630 356 161 111 407 540 1138 2409 2698 1236 1550 2167 1650 1106 934 419 51 26 156 233 97 68 35 25 17 19 18 15 17 +5554252 5246090 4000553 8139589 6390241 7142531 7728100 6516350 4056649 3372802 4601513 3981566 2075476 1363284 1196049 807889 691527 688533 629704 1067535 568404 1187021 991353 1028612 479737 661597 312551 241366 148901 279641 224062 201869 133292 49807 34980 30491 30404 43294 41314 31365 +50 23 36 97 81 5 4 1 4 2 2 1 1 0 0 9 24 102 319 194 151 71 155 98 156 77 63 1 0 10 34 7 7 10 9 5 6 3 2 4 +4775 2500 3831 9116 5157 3359 2828 1669 717 581 1176 643 516 505 281 507 423 955 1908 1848 811 1276 1256 1140 844 814 331 45 14 119 162 92 48 23 16 12 11 14 14 11 +5537660 5084931 3998615 8211297 6362544 6920365 7607471 6227924 3973709 3206137 4516705 3782186 2036855 1312516 1173374 790566 685077 705895 662757 1117022 574945 1194196 998711 1036936 489335 671912 313209 229649 145541 270174 222609 195409 131191 48231 34515 29398 29926 41556 40640 30158 +36 12 42 62 32 11 6 3 1 1 3 1 2 6 2 16 15 72 225 106 75 48 62 48 101 57 39 1 0 6 16 7 3 4 3 2 2 2 2 2 +3870 2030 3943 7869 5921 3970 1746 1496 339 680 1108 383 296 305 166 480 524 1108 1897 1023 630 1095 969 627 711 565 185 27 13 51 88 70 48 25 17 15 16 18 16 16 +5498348 4904565 3999588 8202090 6355068 6749066 7462194 5946174 3883177 3055554 4432276 3578794 1993574 1252506 1148325 772623 681369 731615 694704 1112853 576696 1189821 998549 1013243 495294 666311 310119 217529 142240 257097 219301 187985 129143 46873 34087 28555 29587 40168 40034 29331 +24 8 44 46 42 16 2 3 0 2 2 0 0 2 0 15 23 88 210 34 45 35 37 15 73 28 12 0 0 1 5 4 3 5 4 4 4 3 3 4 +3253 1658 3326 6431 6279 4815 3015 1086 499 645 860 537 350 403 159 457 614 1623 1004 748 626 1235 1018 627 605 357 113 16 18 54 60 56 47 20 18 18 16 15 17 16 +5444245 4712166 3984765 8105089 6356930 6639960 7352983 5656139 3798995 2911855 4343615 3397067 1952754 1202118 1123723 754344 680055 787432 703026 1092039 578301 1194309 999644 990972 498395 648267 305265 205460 139149 244989 215360 180146 127120 45289 33696 27947 29257 38679 39469 28554 +17 6 31 31 47 25 8 1 0 2 1 1 1 4 0 14 32 142 79 19 45 45 40 15 53 11 4 0 0 1 2 3 3 4 5 6 4 2 3 5 +2884 1224 1304 1151 2178 4466 3583 1487 958 637 476 740 400 366 175 436 755 947 591 541 407 857 1006 409 489 258 53 10 17 62 48 79 63 14 12 14 11 15 14 11 +5382060 4504647 3918625 7689518 6253914 6515958 7261018 5408142 3728648 2776286 4247351 3238715 1914231 1152479 1100144 735871 682378 798369 700583 1059757 574268 1175305 1000405 956644 498453 625223 298999 193747 136109 234099 211210 174191 125557 43431 33161 27130 28807 37279 38841 27516 +14 3 5 1 5 22 11 3 2 2 0 2 1 4 1 13 48 55 28 10 19 22 39 7 34 6 1 0 0 2 1 6 7 2 2 4 2 3 2 2 +2409 1151 2225 4145 3129 2902 1986 1378 1023 494 417 484 303 158 287 781 579 409 531 377 292 456 389 198 243 204 53 13 16 66 36 74 67 17 15 14 9 10 11 12 +5309285 4305093 3877679 7482824 6177780 6303308 7130525 5168327 3661718 2640065 4151982 3074135 1874190 1093040 1080017 739703 680144 775596 696667 1019336 567396 1132805 985375 911412 492221 600244 292889 182921 133120 224108 206857 168286 124135 41869 32716 26362 28317 35656 38152 26602 +10 3 14 14 12 10 3 3 3 1 0 1 1 0 2 44 28 10 22 5 10 6 6 1 8 4 1 0 0 2 0 6 8 3 3 4 1 1 1 3 +1788 859 1354 1526 1418 2241 1272 729 435 381 771 512 249 374 473 778 388 166 295 245 157 229 243 144 101 133 47 12 12 31 35 47 41 7 11 11 10 10 11 11 +5222453 4099572 3815490 7127626 6059809 6062806 6985038 4903027 3581428 2505075 4068043 2921150 1833768 1050437 1065147 743121 673083 739260 686816 973230 557245 1078908 966988 865576 482515 572402 286778 172683 130103 212566 202587 161076 122084 39787 32180 25456 27865 34131 37480 25681 +5 2 5 2 2 6 1 1 0 1 1 1 0 5 8 43 13 1 7 2 3 1 2 1 1 2 0 0 0 0 0 2 3 0 2 2 2 1 1 2 +1715 795 939 1457 772 1628 1221 696 340 299 621 404 236 348 332 275 151 110 323 260 138 207 173 118 106 80 18 7 11 33 26 36 35 10 9 9 8 11 9 8 +5135922 3902450 3744245 6789500 5928269 5799072 6841879 4651617 3500714 2373146 3982365 2770708 1794023 1008793 1047044 715430 660140 701664 677927 930812 546861 1026893 947271 820893 473179 542974 280079 162752 127136 201839 198194 153623 119931 38014 31606 24481 27373 32759 36774 24631 +5 1 2 2 0 3 1 1 0 0 1 0 0 4 4 5 2 0 8 3 2 1 1 0 1 0 0 0 0 0 0 1 2 1 1 1 1 1 1 1 +2101 889 741 1109 796 741 945 604 274 79 243 230 162 185 163 132 110 140 197 296 111 135 170 51 75 57 25 14 11 15 11 16 14 9 10 11 9 10 9 9 +5061418 3722930 3669717 6450280 5800627 5496667 6695239 4409639 3420328 2235616 3889163 2618602 1753379 959633 1025072 680615 646472 668167 666039 893151 536046 973575 927969 774774 463284 513898 273726 153847 124243 190650 193527 145388 117294 36286 31072 23688 26919 31407 36086 23706 +8 2 1 1 0 0 0 0 0 0 0 0 0 1 1 1 1 1 3 4 1 0 1 0 0 0 0 0 0 0 0 0 0 1 1 3 1 1 1 2 +1843 928 1145 1460 922 542 437 140 64 75 127 225 134 111 114 55 54 177 143 113 127 70 79 19 38 27 15 16 11 11 17 18 15 10 7 8 12 13 9 8 +4982179 3556577 3607377 6152977 5679392 5200179 6539274 4153672 3336581 2106092 3795322 2475315 1713034 908876 1002396 643158 631714 638953 653067 846506 525910 919463 906823 729456 452690 484724 267276 145599 121422 179887 189130 137770 114749 34723 30475 22758 26553 30321 35415 22775 +6 3 4 2 1 0 0 0 0 0 0 0 0 0 0 0 0 3 1 0 2 0 0 0 0 0 0 0 0 0 0 0 0 1 0 1 3 3 1 1 +1581 1131 761 653 758 530 309 244 141 41 95 111 75 107 92 41 62 96 65 82 53 54 99 20 12 19 12 9 10 14 18 15 8 10 6 8 10 8 8 9 +4898221 3412676 3536777 5823932 5556991 4920742 6383930 3919452 3256892 1982250 3703006 2333621 1672188 860919 979724 607089 617529 606515 638425 800755 514136 867615 886716 686919 441696 456809 260910 137416 118646 169954 184868 130425 112088 33254 29867 21884 26145 28993 34735 21961 +5 4 2 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 0 1 2 1 0 2 +1385 1082 393 364 255 344 168 131 137 43 40 65 23 25 51 30 22 45 35 54 38 31 65 23 12 18 11 8 10 12 13 14 9 13 9 6 7 7 9 10 +4811348 3274399 3458533 5496873 5424788 4646643 6228860 3692342 3179091 1865961 3611588 2197602 1631032 810801 956570 572508 602675 572890 623382 756029 502272 817464 866242 647118 430976 430507 254677 129662 115939 160494 180585 123459 109519 32057 29351 20939 25671 27683 34097 21257 +4 4 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 2 1 1 1 1 1 2 +650 793 558 200 202 258 104 94 98 35 40 55 34 19 31 28 19 17 26 64 22 25 37 26 8 12 13 10 13 17 12 14 10 11 11 10 10 7 8 7 +4707856 3126662 3386460 5179361 5294530 4383706 6076025 3476585 3102235 1756158 3522453 2069130 1591185 763322 933483 539879 588115 539562 608484 714601 490295 769954 845563 609889 420421 405414 248651 122496 113377 151909 176383 116911 107040 30809 28899 20297 25285 26452 33450 20411 +0 2 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 2 2 3 2 1 1 1 +376 722 438 91 44 40 48 53 42 20 32 28 13 13 8 12 15 24 15 36 14 19 11 18 11 13 7 10 13 15 13 13 14 10 8 9 12 8 8 8 +4599943 2983427 3313118 4874202 5163485 4123151 5925573 3271254 3025866 1652021 3435338 1946707 1551795 718323 910384 508224 573817 508664 593677 673938 478413 724925 824736 574403 410207 381888 242622 115760 110879 143716 172312 110695 104725 29574 28382 19632 24960 25356 32819 19677 +0 2 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 1 2 3 1 1 2 +301 365 247 67 29 37 34 25 7 14 21 10 8 8 13 8 15 19 19 21 15 11 11 9 14 9 7 9 11 14 14 12 16 13 7 6 8 7 5 6 +4492807 2826853 3236725 4585878 5035328 3878045 5778519 3076522 2950509 1553763 3350117 1830523 1513261 675716 887990 478223 559876 479312 579342 634793 466853 682107 804428 540493 400324 359528 236744 109367 108392 135953 168368 104790 102519 28598 27852 18822 24541 24264 32127 18865 +0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 3 0 1 1 1 0 1 +163 87 75 74 37 50 44 14 13 16 26 18 17 14 20 16 17 16 25 23 13 12 14 13 3 7 12 12 17 17 12 14 10 10 8 10 9 8 8 5 +4384818 2662593 3157842 4315282 4910575 3648443 5635392 2892798 2877186 1461524 3267151 1721801 1475919 636034 866334 450513 546334 451537 565518 598120 455531 641919 784704 508863 390407 338387 231140 103542 106120 128840 164471 99362 100215 27496 27361 18307 24158 23299 31529 18040 +0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 2 1 3 2 1 1 0 +33 35 20 19 40 61 36 16 5 8 8 12 13 8 9 11 11 14 14 11 7 14 9 9 9 8 8 13 12 16 11 12 9 9 8 9 9 7 7 9 +843 2150 511 1167 1022 3747 920 983 127 491 204 737 332 491 230 675 281 860 357 675 178 860 230 552 230 491 204 798 306 983 281 737 230 552 204 552 230 430 178 552 +179 168 88 72 227 297 200 53 6 14 15 31 39 14 19 26 28 41 45 26 11 41 19 18 19 14 15 36 33 53 28 31 19 18 15 18 19 11 11 18 +22 21 15 13 33 30 31 15 12 7 7 6 7 6 5 4 7 8 13 8 7 4 5 13 12 7 6 6 11 13 11 11 8 11 10 7 6 7 8 5 +1384 3311 881 1895 1840 5365 1689 1845 430 891 377 1061 502 830 352 880 452 1299 680 1126 352 1054 352 1317 531 891 352 1118 579 1722 555 1368 428 1194 454 948 377 834 378 826 +91 64 47 30 160 96 149 40 33 10 11 7 11 7 5 3 11 12 37 13 11 3 5 33 32 10 8 7 27 31 27 23 14 24 22 10 8 10 14 5 +18 15 9 9 22 21 24 15 7 4 6 4 7 5 7 5 7 5 9 13 10 7 11 10 11 9 6 9 12 10 9 8 9 10 7 8 10 8 5 5 +1809 4033 1089 2334 2356 6333 2260 2655 598 1083 520 1243 668 1087 522 1134 619 1528 893 1857 598 1420 624 1852 798 1390 496 1603 871 2233 771 1777 647 1736 621 1382 623 1275 496 1083 +58 29 16 13 80 43 94 36 10 3 8 3 10 5 11 5 10 4 17 30 22 9 26 18 26 15 8 15 30 17 17 11 17 18 10 12 22 12 5 5 +27 27 16 14 20 27 24 13 7 9 9 5 10 8 5 6 6 7 10 12 7 7 6 9 5 4 6 10 9 14 15 10 11 10 8 10 8 8 8 6 +2454 5449 1470 3054 2808 7611 2817 3294 762 1570 737 1475 906 1513 636 1434 756 1866 1126 2482 762 1764 761 2293 905 1552 636 2121 1079 2959 1135 2284 912 2246 809 1913 811 1690 688 1386 +111 79 49 29 62 62 87 24 10 15 17 4 21 12 5 6 7 8 20 23 10 8 7 13 5 3 7 17 16 30 45 16 25 17 13 17 13 11 14 7 +22 39 27 7 7 26 27 15 9 8 7 5 8 9 9 13 10 12 15 8 4 9 4 3 4 5 6 9 9 11 12 13 11 11 14 11 10 9 6 5 +2955 7518 2123 3300 2916 8751 3436 4017 973 1967 897 1693 1087 1975 850 2146 992 2491 1481 2824 845 2211 844 2339 984 1766 773 2546 1282 3457 1413 2945 1170 2787 1146 2474 1046 2141 824 1610 +73 120 115 7 7 51 99 29 16 11 10 4 13 14 17 29 20 23 43 9 3 13 3 1 3 4 7 13 16 17 27 25 24 18 39 19 20 14 7 4 +16 31 22 14 14 19 13 11 12 10 13 12 10 5 4 8 7 4 12 9 6 13 10 6 5 8 6 7 7 9 9 11 10 11 9 7 7 8 6 6 +3290 8971 2632 3962 3201 9393 3682 4451 1255 2463 1206 2328 1315 2163 931 2508 1146 2587 1750 3207 977 2877 1078 2567 1087 2151 907 2823 1428 3802 1607 3444 1396 3295 1347 2755 1198 2504 956 1882 +37 72 77 26 28 26 23 15 28 16 33 24 19 4 3 10 9 2 26 11 7 26 20 5 5 11 7 7 9 11 15 17 19 17 15 7 9 10 7 6 +15 17 6 16 16 6 11 8 7 11 8 6 6 6 3 7 4 4 5 5 5 5 5 7 8 11 6 4 7 8 8 9 11 9 10 7 7 7 6 6 +3591 9477 2719 4707 3530 9198 3871 4675 1402 2991 1380 2556 1435 2401 984 2787 1219 2677 1834 3321 1080 3011 1178 2843 1264 2697 1037 2899 1571 4065 1771 3790 1642 3650 1568 3019 1347 2783 1085 2137 +31 20 5 30 36 2 16 7 9 18 12 5 6 5 1 7 3 2 4 3 5 3 5 7 12 19 7 2 9 8 11 11 22 11 18 7 9 7 7 6 +7 9 8 18 14 19 19 10 3 6 8 9 9 8 7 7 7 11 7 8 7 5 7 8 5 6 7 6 9 8 11 13 9 9 7 7 6 8 8 6 +3680 9461 2855 5530 3799 9813 4260 5008 1443 3180 1550 2955 1629 2748 1138 3049 1367 3192 1967 3613 1231 3137 1327 3163 1360 2903 1190 3093 1761 4312 2007 4361 1831 3983 1707 3267 1466 3107 1262 2377 +6 5 9 35 26 25 46 11 1 5 12 12 15 10 9 7 9 17 8 8 9 3 9 9 4 5 9 5 14 8 21 21 14 10 9 7 6 9 12 6 +10 9 6 17 10 7 8 7 4 10 6 3 6 8 7 4 5 7 7 6 5 8 8 5 5 5 5 3 4 7 10 10 9 11 6 7 5 6 6 5 +3843 9446 2937 6242 3959 9654 4358 5137 1509 3603 1664 2962 1741 3074 1288 3111 1460 3430 2096 3764 1328 3440 1498 3280 1453 3036 1288 3091 1819 4483 2212 4713 2015 4419 1817 3501 1557 3289 1383 2541 +13 5 5 29 13 3 8 5 3 13 6 1 6 9 9 2 4 7 8 4 4 9 12 3 4 3 4 1 2 6 17 12 14 15 6 6 4 5 7 4 +6 5 3 8 7 4 6 5 3 5 6 5 4 5 4 3 4 8 6 4 5 7 7 7 5 7 7 6 6 6 7 9 9 10 5 5 6 5 7 5 +3900 9186 2940 6358 4039 9320 4402 5135 1548 3694 1775 3091 1799 3196 1358 3108 1525 3715 2197 3783 1422 3663 1639 3513 1544 3283 1434 3274 1926 4582 2335 4983 2194 4768 1899 3598 1671 3398 1527 2695 +4 1 1 6 6 1 4 2 1 3 6 3 2 3 3 1 3 8 6 2 4 6 9 6 4 7 9 5 6 4 8 9 13 12 4 3 6 3 9 3 +3 3 7 13 16 9 11 6 4 6 8 5 3 4 5 5 5 6 6 7 7 7 5 6 8 7 8 9 12 11 8 8 8 9 8 9 9 9 8 6 +3879 8819 3045 6775 4347 9313 4573 5195 1611 3840 1935 3212 1830 3249 1451 3228 1614 3860 2295 3986 1565 3873 1725 3670 1709 3516 1602 3630 2184 4982 2481 5175 2343 5034 2056 3935 1859 3747 1693 2901 +1 0 7 15 32 5 14 4 3 4 11 3 1 2 4 3 4 4 6 6 9 6 4 4 11 6 12 11 24 14 10 7 10 9 11 10 14 11 11 5 +10 9 3 6 7 10 8 5 4 4 2 6 10 7 9 6 7 7 7 10 10 7 6 5 5 8 7 6 5 8 8 9 8 10 10 13 9 8 8 6 +4037 8842 3045 6737 4417 9368 4663 5190 1673 3855 1937 3387 2039 3484 1644 3402 1752 4058 2416 4361 1781 4070 1835 3756 1794 3796 1740 3780 2257 5174 2623 5417 2489 5346 2260 4497 2042 4013 1855 3095 +13 6 1 3 6 7 7 2 2 2 0 5 17 6 15 5 9 6 8 12 18 6 6 3 4 8 9 4 4 7 10 8 10 11 16 21 14 8 11 5 +8 9 3 9 6 7 7 4 4 5 5 3 8 7 4 7 5 4 6 7 6 7 6 5 6 4 8 9 7 8 8 9 7 7 9 8 6 9 6 5 +4140 8864 3045 6885 4460 9236 4725 5124 1733 3930 2016 3368 2192 3705 1705 3627 1836 4060 2509 4529 1889 4255 1942 3837 1902 3813 1901 4106 2379 5355 2762 5644 2605 5455 2433 4718 2144 4325 1962 3216 +8 6 1 7 4 3 5 1 2 3 4 1 10 6 2 6 4 2 5 6 6 6 6 3 6 2 11 10 8 7 10 8 7 5 13 7 6 10 6 3 +1 2 4 12 8 7 5 8 6 4 4 6 5 6 5 2 3 5 4 6 7 8 5 6 5 5 5 8 7 9 10 7 7 6 7 9 8 9 6 4 +4062 8455 3071 7209 4553 9111 4734 5308 1843 3939 2067 3534 2265 3851 1790 3532 1866 4123 2548 4625 2020 4491 2021 3975 1982 3891 1981 4351 2498 5586 2948 5735 2718 5496 2551 4987 2294 4618 2066 3268 +0 0 2 12 7 3 3 7 6 2 2 5 4 4 4 0 1 3 2 4 8 7 4 4 4 3 4 8 8 8 15 5 7 3 7 9 10 9 6 2 +2 5 7 11 7 3 5 4 3 2 4 4 7 6 7 2 5 6 4 4 7 6 5 9 4 5 5 7 6 8 10 8 6 5 9 8 8 7 6 5 +4011 8254 3173 7452 4618 8748 4743 5235 1873 3825 2117 3567 2387 3988 1924 3442 1947 4244 2586 4593 2148 4590 2098 4289 2034 3964 2059 4520 2589 5742 3130 5882 2803 5473 2717 5179 2441 4770 2167 3379 +0 2 7 10 5 0 3 1 1 0 2 2 8 4 8 0 4 4 2 1 8 4 4 10 2 3 4 6 5 6 14 6 5 2 12 7 10 5 6 3 +5 2 7 10 8 8 5 6 3 2 3 2 6 4 4 6 3 2 2 5 5 8 5 8 6 6 6 8 7 7 6 8 7 6 5 5 6 7 6 5 +4038 7881 3272 7619 4707 8714 4752 5289 1902 3718 2140 3475 2480 3994 1978 3604 1975 4112 2572 4624 2222 4806 2173 4523 2136 4094 2160 4740 2703 5827 3205 6020 2911 5513 2776 5175 2533 4913 2266 3483 +3 0 7 8 7 4 3 4 1 0 1 0 5 2 2 5 1 0 0 3 4 7 4 7 6 4 6 7 7 5 5 6 7 3 3 2 5 5 6 3 +8 6 6 9 8 10 6 6 4 3 3 2 6 5 4 5 4 3 4 6 5 7 4 6 7 8 8 9 5 4 6 7 7 8 6 7 6 7 6 7 +4141 7776 3343 7714 4793 8805 4786 5340 1956 3679 2163 3389 2571 4061 2030 3694 2027 4049 2610 4715 2294 4947 2221 4620 2261 4339 2310 5008 2763 5723 3278 6088 3017 5673 2860 5294 2623 5048 2362 3704 +8 3 5 6 7 7 4 4 2 1 1 0 5 3 2 3 2 1 2 4 4 5 2 4 8 8 10 9 3 1 5 5 7 6 5 5 5 5 6 6 +5 6 8 5 5 5 5 3 5 6 3 6 5 4 3 7 6 4 5 6 6 5 5 6 6 4 5 8 4 4 6 6 6 7 7 6 7 6 6 6 +4165 7678 3464 7558 4801 8583 4794 5203 2034 3826 2185 3554 2634 4063 2056 3902 2129 4051 2672 4800 2390 4957 2293 4711 2357 4324 2380 5199 2796 5625 3349 6091 3095 5762 2967 5344 2736 5113 2456 3850 +3 3 9 2 2 1 2 1 4 4 1 5 4 2 1 6 6 2 3 4 5 2 4 4 6 2 4 7 2 1 5 3 5 5 7 4 7 4 5 4 +5 3 9 7 7 6 7 4 5 3 4 8 7 6 4 5 4 6 5 4 5 4 3 8 9 4 5 8 6 6 7 6 7 7 7 4 7 6 6 5 +4188 7401 3607 7534 4860 8436 4853 5136 2111 3780 2232 3832 2747 4187 2106 3975 2178 4176 2733 4757 2458 4905 2312 4919 2528 4310 2448 5378 2879 5656 3444 6094 3196 5846 3071 5269 2846 5174 2548 3926 +3 0 11 4 5 2 5 1 4 1 2 8 7 4 2 3 2 4 3 1 4 1 1 7 13 2 4 7 5 3 6 3 7 5 7 1 7 4 5 3 +5 3 9 7 6 4 6 5 4 6 4 5 5 5 4 3 4 8 4 3 4 6 5 7 9 6 5 7 8 8 7 6 6 8 8 5 7 6 7 5 +4211 7141 3747 7512 4892 8175 4885 5135 2160 3921 2278 3909 2806 4242 2155 3920 2225 4416 2767 4655 2498 4979 2382 5053 2694 4420 2514 5485 3011 5808 3536 6096 3269 5986 3198 5260 2953 5232 2663 3997 +3 0 11 4 4 1 4 2 2 4 2 3 3 3 2 1 2 8 2 1 2 4 4 5 12 4 4 5 9 6 6 3 5 6 9 2 7 4 7 3 +8 4 4 9 8 12 8 6 5 9 5 5 4 5 3 6 5 5 7 5 4 7 6 7 8 9 8 8 8 6 6 10 8 7 7 6 5 5 6 5 +4310 6958 3755 7614 4974 8421 4967 5195 2233 4238 2348 3981 2838 4294 2177 4053 2297 4458 2876 4682 2537 5110 2475 5179 2831 4707 2655 5647 3140 5828 3601 6344 3391 6056 3297 5313 3007 5225 2749 4064 +8 1 2 6 7 11 7 4 4 10 4 3 2 3 1 4 4 3 7 3 2 5 5 5 9 9 10 6 9 3 5 9 9 5 7 4 3 2 5 3 +4 5 7 5 4 6 5 4 3 5 3 4 3 5 3 5 4 5 6 4 3 5 5 6 5 10 6 8 8 7 5 4 5 6 6 6 6 5 4 5 +4304 6847 3840 7464 4952 8284 4970 5129 2253 4290 2366 3987 2843 4343 2199 4117 2341 4497 2957 4646 2550 5110 2541 5236 2888 5038 2742 5799 3266 5908 3638 6209 3434 6061 3368 5362 3085 5218 2782 4127 +2 2 6 2 1 2 2 1 1 3 1 2 1 3 1 3 2 3 5 1 1 2 4 4 3 11 5 6 9 5 3 1 3 3 5 4 5 2 2 3 +2 1 4 5 6 6 4 3 3 6 3 3 7 6 3 4 4 5 5 6 5 6 5 5 3 5 7 9 7 7 7 3 6 8 8 8 7 5 5 4 +4247 6497 3846 7323 4981 8155 4948 5005 2273 4401 2383 3932 2950 4451 2220 4115 2384 4534 3010 4735 2614 5172 2605 5229 2892 5042 2852 6004 3363 5983 3726 6020 3501 6188 3488 5531 3186 5212 2840 4125 +0 0 2 2 4 2 1 1 1 4 1 1 7 4 1 2 2 3 3 4 4 4 4 2 1 2 7 8 7 5 6 0 5 6 9 6 7 2 3 2 +3 1 2 6 5 4 3 5 2 3 3 4 11 6 6 6 3 5 4 4 4 4 4 5 4 5 7 8 7 8 7 5 7 6 6 6 8 6 6 4 +4217 6168 3801 7252 4984 7911 4901 5011 2267 4321 2400 3941 3157 4552 2317 4236 2401 4569 3037 4696 2650 5107 2642 5222 2922 5046 2959 6135 3457 6115 3811 5966 3592 6185 3554 5567 3310 5267 2922 4123 +1 0 0 3 2 1 1 2 0 1 1 2 18 4 6 4 1 3 2 1 2 1 2 2 2 2 7 6 6 6 6 2 6 3 5 3 9 4 5 2 +5 4 3 2 3 4 3 5 3 2 4 3 5 5 4 5 3 5 6 5 6 4 5 7 7 7 6 5 3 6 9 7 9 7 7 7 9 5 5 4 +4239 6043 3782 6939 4936 7682 4855 5017 2287 4184 2442 3888 3206 4586 2361 4289 2417 4602 3114 4721 2737 5046 2703 5338 3027 5173 3038 6074 3447 6116 3945 6038 3732 6243 3644 5663 3457 5258 2976 4121 +3 1 1 0 1 1 1 2 1 0 2 1 3 3 2 3 1 3 5 3 5 1 3 5 7 5 5 2 1 3 10 5 11 4 6 5 11 2 3 2 +6 4 3 4 3 4 3 3 4 2 3 7 6 3 1 2 5 4 5 3 6 4 5 6 5 8 5 5 6 7 8 7 9 8 6 6 5 6 6 7 +4286 5926 3764 6768 4889 7466 4810 4900 2332 4055 2457 4084 3279 4495 2327 4154 2484 4571 3164 4622 2822 4989 2763 5386 3079 5354 3089 6016 3514 6179 4051 6105 3868 6359 3706 5691 3498 5311 3055 4303 +4 1 1 1 1 1 1 1 2 0 1 6 5 1 0 0 4 1 3 1 5 1 3 4 3 7 3 2 5 4 8 4 10 6 4 3 3 4 5 6 +5 6 3 5 4 3 4 3 4 3 4 2 3 5 3 2 7 7 6 5 4 6 5 7 4 6 6 5 7 7 5 7 6 7 6 6 5 6 4 4 +4306 5939 3746 6669 4869 7202 4792 4790 2376 3996 2497 3961 3273 4532 2345 4027 2600 4726 3238 4651 2853 5058 2821 5492 3104 5401 3165 5962 3605 6238 4077 6168 3924 6407 3766 5718 3538 5360 3080 4290 +3 3 1 2 1 0 1 1 2 1 2 0 1 3 1 0 7 5 5 3 2 4 3 5 2 3 5 2 6 4 3 4 4 4 4 3 3 4 2 2 +4 3 2 3 6 8 6 1 3 3 7 4 5 5 4 2 3 5 6 4 4 5 5 6 6 10 9 6 5 7 7 5 5 7 7 10 6 6 5 5 +4300 5766 3703 6453 4900 7261 4825 4564 2393 3940 2613 3969 3319 4567 2388 3908 2611 4749 3310 4617 2884 5061 2878 5531 3179 5691 3316 5972 3642 6293 4154 6105 3953 6452 3850 5989 3603 5407 3130 4339 +2 0 0 0 4 5 4 0 1 1 7 2 3 3 2 0 1 3 5 1 2 2 3 3 5 10 11 3 3 4 6 2 3 4 6 10 5 3 3 3 +3 2 3 4 3 4 6 4 2 3 4 6 4 4 7 6 2 4 8 8 5 6 6 7 9 5 6 6 7 3 5 5 6 7 7 6 6 6 5 7 +4269 5542 3687 6311 4854 7071 4857 4535 2384 3887 2650 4099 3338 4538 2507 4042 2596 4709 3431 4831 2939 5125 2959 5629 3329 5656 3386 5982 3730 6099 4178 6045 4007 6494 3932 5998 3666 5451 3179 4508 +1 0 1 1 1 1 4 1 0 1 2 4 2 1 8 4 0 1 9 7 3 4 5 5 11 2 5 3 6 0 3 2 4 4 6 3 4 3 3 6 +5 5 6 6 2 3 6 4 2 2 4 5 9 6 6 5 5 4 5 7 5 7 7 5 7 5 6 7 7 3 5 7 7 8 9 4 5 6 5 4 +4290 5516 3748 6300 4783 6831 4889 4508 2375 3776 2686 4160 3484 4634 2597 4106 2659 4672 3473 4971 2993 5247 3064 5598 3424 5623 3454 6053 3815 5917 4201 6112 4085 6595 4063 5883 3702 5492 3227 4483 +3 2 4 3 0 0 4 1 0 0 2 3 11 4 5 3 4 1 3 5 3 5 7 2 7 2 5 5 6 0 3 4 6 6 10 1 3 3 3 1 +3 2 4 3 4 3 4 4 3 4 5 4 4 4 3 4 4 5 5 5 5 4 5 7 5 5 5 7 6 5 8 9 6 6 6 5 5 6 5 4 +4259 5307 3756 6106 4765 6605 4869 4483 2392 3795 2746 4156 3499 4601 2608 4105 2694 4698 3514 4979 3046 5177 3115 5692 3466 5592 3495 6119 3873 5869 4300 6298 4136 6567 4114 5837 3737 5531 3274 4459 +1 0 2 0 1 0 1 1 1 2 3 2 2 1 1 2 2 3 3 2 3 1 3 5 3 2 3 4 4 2 8 8 4 3 4 2 3 3 3 1 +4 6 6 3 5 5 3 1 2 2 3 3 2 6 4 2 4 5 3 5 5 3 4 4 4 4 3 5 8 6 8 8 7 6 7 6 6 5 4 3 +4254 5357 3815 5923 4773 6515 4824 4275 2383 3690 2754 4090 3462 4693 2645 3981 2728 4723 3502 4987 3097 5050 3139 5596 3481 5502 3484 6059 3980 5885 4397 6411 4211 6541 4190 5855 3797 5506 3294 4375 +2 4 4 0 2 2 1 0 0 0 1 1 0 4 2 0 2 3 1 2 3 1 2 1 2 1 1 2 8 3 8 6 6 3 6 3 4 2 2 1 +2 3 4 4 4 5 3 1 3 2 2 3 2 3 2 2 2 3 3 4 3 3 4 5 4 4 4 5 5 4 5 6 5 5 4 5 5 4 3 3 +4198 5219 3822 5813 4756 6431 4780 4079 2400 3591 2736 4028 3426 4595 2630 3865 2711 4623 3491 4933 3096 4931 3162 5567 3496 5417 3499 6002 4008 5777 4415 6394 4233 6455 4187 5810 3830 5421 3288 4296 +0 1 2 1 1 2 1 0 1 0 0 1 0 1 0 0 0 1 1 1 1 1 2 2 2 1 2 2 3 1 3 3 3 2 2 2 3 1 1 1 +6 4 5 4 3 4 3 2 6 6 5 5 4 3 3 3 4 3 4 7 3 4 5 6 6 4 6 7 7 5 6 5 8 5 4 7 7 7 5 3 +4246 5151 3854 5709 4713 6290 4737 3957 2493 3744 2795 4093 3442 4503 2641 3817 2745 4529 3506 5067 3095 4880 3210 5601 3562 5337 3565 6071 4086 5737 4458 6317 4331 6374 4184 5891 3913 5525 3333 4222 +4 1 3 1 1 1 1 0 5 4 3 3 2 1 1 1 2 1 2 5 1 1 3 3 5 1 5 5 6 2 4 2 8 2 2 5 6 5 3 1 +6 6 2 3 3 5 2 2 6 7 4 6 8 4 4 4 5 3 2 6 5 3 4 4 3 4 6 7 7 6 6 6 8 6 5 6 8 7 4 3 +4293 5210 3808 5550 4672 6219 4669 3842 2584 3949 2827 4216 3560 4478 2677 3833 2804 4441 3469 5131 3145 4771 3232 5510 3549 5262 3629 6136 4162 5761 4500 6306 4427 6360 4207 5906 4019 5623 3352 4153 +4 4 0 0 1 2 0 0 5 6 2 4 9 1 2 2 3 1 0 4 3 1 2 1 1 1 5 4 6 3 4 3 8 3 3 3 8 5 2 1 +5 3 3 4 3 3 7 6 9 5 3 3 5 5 4 5 3 3 4 4 7 4 6 5 3 4 5 4 4 5 4 8 9 6 7 6 5 6 5 3 +4313 5081 3789 5462 4632 6030 4731 3980 2749 4019 2833 4147 3598 4516 2712 3910 2810 4358 3484 5068 3245 4730 3304 5486 3537 5192 3666 6013 4160 5722 4489 6419 4546 6347 4280 5920 4046 5654 3396 4088 +3 1 1 1 1 0 5 4 12 3 1 1 3 3 2 3 1 1 2 1 7 1 5 2 1 1 3 1 2 2 1 6 10 3 6 3 3 3 3 1 +6 4 4 2 2 3 6 7 6 5 5 6 3 3 3 3 4 3 5 4 5 6 5 7 4 6 6 6 5 6 6 7 7 7 7 6 6 5 5 3 +4358 5021 3796 5257 4567 5852 4766 4171 2833 4085 2890 4266 3584 4429 2720 3859 2842 4280 3524 5009 3291 4814 3349 5586 3550 5249 3727 6020 4183 5747 4530 6463 4611 6396 4352 5933 4098 5621 3439 4027 +4 1 2 0 0 0 4 6 5 3 3 4 1 1 1 1 2 1 3 1 3 4 3 5 2 4 4 3 3 3 4 4 6 4 6 3 4 2 3 1 +2 5 7 2 4 3 3 4 3 4 3 5 3 3 4 5 5 3 4 4 4 5 6 6 3 4 4 7 7 7 8 7 6 5 5 5 5 6 5 3 +4300 5026 3880 5064 4555 5685 4723 4166 2838 4085 2894 4317 3571 4347 2754 3934 2898 4207 3538 4954 3311 4832 3418 5619 3538 5179 3736 6088 4257 5832 4621 6505 4649 6319 4371 5884 4123 5652 3480 3969 +0 2 6 0 1 0 1 2 1 2 1 3 1 1 2 3 3 1 2 1 2 2 5 3 1 1 2 5 6 5 7 4 4 2 3 2 3 3 3 1 +5 7 9 4 4 4 3 3 1 2 3 4 4 3 4 5 5 5 2 3 4 5 6 5 6 5 5 6 5 8 6 6 5 6 7 7 4 5 4 4 +4320 5154 4013 5005 4543 5589 4681 4100 2792 3962 2898 4303 3584 4270 2787 4005 2953 4261 3500 4841 3330 4849 3486 5589 3603 5175 3770 6091 4278 5973 4659 6483 4660 6308 4440 5961 4122 5620 3495 3976 +3 5 10 1 1 1 1 1 0 0 1 2 2 1 2 3 3 3 0 1 2 2 5 2 5 2 3 3 3 6 4 3 3 3 6 5 2 2 2 2 +2 2 7 6 5 5 3 2 3 4 4 5 4 5 5 3 4 5 3 5 5 6 4 4 4 4 5 5 6 7 6 5 6 5 4 5 6 5 6 4 +4263 4967 4091 5073 4557 5560 4640 3976 2798 3970 2927 4352 3596 4320 2845 3949 2981 4312 3489 4857 3374 4926 3501 5499 3615 5110 3803 6032 4324 6044 4696 6401 4697 6236 4431 5910 4172 5590 3561 3983 +0 0 6 4 3 2 1 0 1 2 2 3 2 3 3 1 2 3 1 2 3 4 2 1 2 1 3 2 4 5 4 2 4 2 2 2 4 2 5 2 +4 2 4 2 3 7 4 2 3 3 4 4 4 4 7 3 4 8 6 5 5 5 4 3 3 5 5 6 7 6 4 7 9 6 5 6 6 4 4 3 +4258 4791 4091 4891 4519 5656 4626 3860 2804 3916 2956 4336 3608 4306 2952 3896 3008 4544 3555 4872 3417 4937 3515 5353 3601 5110 3835 6038 4394 6049 4681 6447 4809 6230 4448 5924 4221 5500 3574 3928 +2 0 2 0 1 5 1 0 1 1 2 2 2 2 7 1 2 7 5 2 3 2 2 1 1 2 3 3 6 3 1 4 9 3 3 3 4 1 2 1 +3 3 3 2 4 5 5 7 6 4 7 6 4 5 5 4 3 4 7 7 5 7 5 6 4 3 4 5 8 9 6 5 7 7 9 8 6 4 4 4 +4228 4687 4065 4720 4508 5623 4638 4058 2887 3926 3061 4444 3620 4354 3006 3907 3009 4517 3645 5009 3459 5070 3555 5400 3613 4987 3841 5982 4488 6239 4717 6367 4867 6286 4567 6060 4269 5415 3587 3938 +1 1 1 0 1 2 3 6 5 2 7 4 2 3 3 2 1 1 6 5 3 5 3 3 2 1 2 2 7 8 4 2 5 4 9 6 4 1 2 2 +6 7 3 4 7 7 4 4 4 4 3 4 5 5 4 2 3 4 3 3 4 5 4 3 5 5 8 6 6 7 4 5 5 4 8 7 5 5 5 4 +4275 4835 4040 4682 4574 5715 4624 4060 2917 3936 3061 4423 3657 4399 3033 3795 3010 4491 3630 4892 3474 5072 3568 5260 3650 4994 3949 5991 4529 6294 4701 6292 4873 6154 4657 6126 4290 5397 3625 3947 +4 5 1 1 6 5 1 2 2 2 1 2 3 3 2 0 1 1 1 1 2 2 2 1 3 2 8 3 4 4 1 2 2 1 7 4 3 2 3 2 +7 5 4 7 6 5 8 5 5 4 4 3 4 5 4 3 3 3 2 2 3 4 5 4 4 5 6 6 7 6 5 6 6 4 4 4 3 4 4 4 +4347 4852 4041 4831 4613 5679 4713 4123 2971 3945 3086 4341 3667 4442 3059 3751 3011 4405 3590 4721 3463 5013 3606 5190 3661 5001 4003 6000 4594 6285 4711 6283 4904 6030 4642 6004 4259 5318 3636 3955 +6 2 2 5 4 2 7 3 3 2 2 1 2 3 2 1 1 1 0 0 1 1 3 1 2 2 4 3 6 3 3 3 4 1 1 1 1 1 2 2 +8 7 4 4 4 6 5 2 2 4 5 2 4 5 4 3 6 4 3 4 4 4 5 5 5 8 8 5 6 5 4 5 6 4 6 6 6 5 5 4 +4442 4990 4042 4786 4600 5706 4723 3998 2947 3954 3136 4203 3677 4482 3084 3710 3089 4386 3577 4683 3478 4957 3643 5185 3697 5192 4107 5947 4632 6215 4695 6213 4934 5913 4679 6012 4306 5306 3673 3963 +8 5 2 1 1 3 3 0 0 2 3 0 2 3 2 1 5 2 1 1 2 1 3 2 3 7 8 2 4 2 1 2 4 1 4 3 4 2 3 2 +4 4 2 4 5 4 6 6 2 2 1 1 2 4 3 3 4 3 3 3 3 3 3 6 5 4 6 4 5 5 4 6 5 4 7 4 5 5 4 4 +4433 4936 3992 4744 4612 5609 4758 4126 2924 3839 3083 4012 3636 4458 3083 3671 3114 4307 3564 4586 3467 4843 3628 5242 3732 5126 4157 5835 4644 6149 4680 6208 4938 5803 4741 5897 4326 5294 3683 3970 +2 1 0 1 3 1 4 4 0 0 0 0 0 1 1 1 2 1 1 1 1 1 1 4 3 1 4 1 3 2 1 3 2 1 5 1 3 2 2 2 +5 1 3 4 3 3 6 7 4 3 2 3 3 5 3 3 3 5 6 5 5 5 4 6 5 3 6 5 6 7 7 5 5 5 6 5 5 5 4 4 +4450 4701 3969 4705 4573 5456 4792 4308 2953 3792 3057 3955 3621 4497 3082 3635 3112 4355 3628 4618 3508 4859 3639 5296 3766 5002 4206 5792 4681 6210 4742 6142 4942 5762 4776 5850 4345 5283 3693 3977 +3 0 1 1 1 0 4 6 2 1 0 1 1 3 1 1 1 3 5 3 3 2 2 4 3 1 4 2 4 4 5 2 2 2 4 2 3 2 2 2 +6 5 3 4 7 8 7 6 3 3 2 3 4 5 4 7 5 5 5 6 6 5 4 5 3 7 9 5 5 6 6 5 6 6 7 6 6 4 5 3 +4492 4726 3946 4668 4637 5620 4851 4418 2955 3748 3031 3902 3632 4534 3107 3846 3162 4400 3665 4709 3573 4874 3650 5285 3748 5131 4331 5751 4691 6206 4776 6080 4972 5784 4835 5867 4389 5211 3728 3922 +4 3 1 1 5 6 5 4 1 1 0 1 2 3 2 6 3 3 3 4 5 2 2 2 1 5 10 2 3 3 4 2 4 3 5 3 4 1 3 1 +5 4 3 3 4 5 5 6 4 3 3 3 4 4 6 8 4 6 5 5 4 4 6 8 8 6 5 7 6 6 6 5 5 8 5 6 5 5 7 4 +4507 4688 3924 4572 4623 5590 4857 4521 2983 3707 3032 3852 3643 4507 3182 4106 3185 4504 3701 4733 3586 4827 3712 5459 3858 5191 4350 5836 4727 6202 4810 6022 4975 5928 4842 5883 4407 5205 3813 3932 +3 1 1 1 1 2 2 4 2 1 1 1 2 1 5 8 2 4 3 3 2 1 4 7 8 4 3 5 4 3 4 2 2 6 2 3 3 2 6 2 +6 6 4 4 7 8 6 8 8 5 7 3 3 4 5 6 5 8 6 5 6 6 5 7 8 7 5 5 7 10 7 5 7 7 5 6 5 6 8 6 +4547 4775 3928 4543 4686 5746 4889 4741 3113 3791 3135 3805 3628 4482 3230 4228 3233 4725 3761 4756 3649 4906 3747 5561 3966 5309 4369 5793 4787 6444 4868 5967 5029 6002 4848 5898 4424 5261 3922 4064 +4 4 2 1 5 6 4 7 9 3 7 1 1 1 3 4 3 7 4 3 5 4 3 5 8 5 3 2 5 9 5 2 5 5 2 3 3 4 8 4 +6 6 4 6 5 3 2 4 6 5 4 4 2 4 5 4 7 7 7 4 5 6 6 6 5 8 6 5 6 6 7 8 5 9 7 8 6 6 5 5 +4586 4857 3932 4639 4696 5585 4818 4702 3188 3870 3158 3822 3588 4458 3277 4220 3331 4871 3846 4716 3685 4980 3806 5595 3994 5481 4413 5752 4820 6426 4925 6100 5031 6194 4905 6035 4466 5313 3951 4127 +4 4 2 4 3 0 0 1 5 3 2 2 0 1 3 2 7 5 6 1 3 4 4 3 3 7 4 2 4 3 5 6 2 8 5 6 4 4 3 3 +6 5 5 3 3 4 5 7 4 3 2 4 5 4 5 4 2 3 5 6 7 8 4 9 5 5 7 9 6 6 6 6 7 6 7 8 7 6 7 6 +4624 4872 3961 4544 4655 5495 4825 4849 3210 3822 3130 3838 3626 4436 3323 4212 3298 4763 3877 4801 3771 5172 3813 5812 4022 5459 4481 5959 4853 6409 4955 6102 5084 6191 4961 6164 4533 5362 4031 4248 +4 2 3 1 1 1 2 5 2 1 0 2 3 2 3 2 0 1 3 4 6 7 2 8 3 2 6 8 4 3 4 3 5 3 5 6 6 4 6 4 +4 6 4 3 3 5 4 6 5 4 5 6 7 7 7 7 4 5 5 5 7 8 7 7 5 9 10 9 7 7 8 7 6 7 4 7 8 7 6 5 +4610 4948 3964 4455 4615 5472 4806 4926 3257 3838 3179 3976 3714 4599 3418 4389 3317 4784 3908 4820 3855 5353 3896 5893 4049 5684 4624 6154 4910 6454 5035 6165 5110 6249 4939 6224 4624 5470 4083 4300 +1 4 2 1 1 2 1 4 3 2 3 4 6 6 7 6 2 2 3 2 6 7 6 5 3 8 12 8 5 4 7 4 4 4 1 4 7 5 4 3 +7 8 3 4 5 5 5 4 4 3 4 3 4 3 7 4 5 7 5 3 6 5 5 7 10 9 11 9 8 8 10 7 5 6 7 8 9 9 7 3 +4673 5142 3941 4433 4627 5450 4813 4876 3277 3792 3201 3921 3723 4507 3511 4371 3362 4927 3938 4715 3912 5339 3926 5969 4203 5895 4789 6337 4991 6558 5164 6225 5110 6242 4994 6342 4738 5694 4160 4226 +5 7 1 2 3 2 2 1 2 1 2 1 2 1 6 2 3 5 3 1 4 2 3 5 12 8 14 8 7 6 11 4 2 3 5 6 9 8 6 1 +3 6 4 7 7 6 4 3 8 4 3 3 5 3 8 6 4 6 11 7 5 5 7 8 9 6 9 11 11 8 6 6 8 7 4 6 7 8 7 5 +4633 5202 3944 4597 4690 5491 4795 4767 3399 3810 3197 3870 3757 4420 3627 4477 3380 5000 4120 4862 3942 5325 4006 6102 4328 5909 4899 6632 5147 6656 5188 6220 5186 6297 4971 6330 4798 5843 4235 4279 +1 4 2 6 5 3 1 1 9 2 1 1 3 1 8 4 2 4 15 5 3 2 6 6 10 3 9 11 13 6 4 3 7 4 1 3 5 6 6 3 +5 3 5 6 8 4 4 6 8 6 6 4 5 5 5 6 3 4 4 4 3 5 5 6 10 9 5 8 9 9 7 6 6 7 7 6 6 6 5 6 +4645 5074 3973 4689 4777 5407 4777 4849 3518 3950 3270 3883 3791 4461 3664 4577 3372 4945 4119 4816 3920 5312 4033 6104 4475 6107 4904 6725 5248 6809 5237 6215 5209 6349 5025 6318 4831 5861 4257 4390 +3 1 3 4 7 1 1 4 9 4 5 2 3 3 3 4 1 1 2 1 1 2 3 3 12 8 2 6 9 7 5 3 4 4 5 3 4 3 3 4 +12 8 9 5 9 4 7 8 9 5 3 2 4 7 7 6 6 9 3 4 7 7 11 6 11 7 5 8 4 8 8 9 5 6 9 6 8 7 6 6 +4835 5261 4103 4714 4887 5328 4836 5049 3660 4020 3265 3772 3798 4623 3751 4671 3441 5201 4092 4772 4001 5423 4213 6106 4644 6170 4909 6813 5219 6891 5310 6395 5206 6336 5129 6307 4914 5939 4304 4495 +17 7 10 3 9 1 5 7 11 3 1 0 2 5 6 4 5 9 1 1 6 5 15 3 14 4 2 6 1 5 7 7 2 3 9 3 7 5 4 4 +8 6 4 7 6 4 10 7 4 2 3 3 5 5 3 8 7 7 6 8 9 8 5 4 7 8 7 8 9 9 7 9 6 11 8 6 7 10 9 7 +4918 5313 4102 4861 4918 5254 4970 5176 3670 3901 3260 3730 3831 4652 3734 4882 3534 5319 4143 4977 4131 5589 4235 5985 4707 6291 4965 6895 5318 7030 5356 6564 5229 6631 5205 6297 4970 6197 4426 4655 +7 4 2 5 4 1 11 5 2 0 1 1 3 3 1 7 6 5 4 7 10 6 3 1 5 6 5 5 9 7 5 7 4 11 7 3 5 10 10 5 +6 10 7 8 8 5 8 7 3 4 3 4 5 4 8 9 6 4 5 5 7 10 8 8 7 6 6 8 10 8 7 7 7 9 6 9 8 6 9 5 +4948 5608 4178 5060 4999 5245 5050 5295 3655 3912 3255 3751 3863 4618 3845 5142 3599 5245 4167 4985 4206 5868 4333 6117 4768 6282 4994 6972 5440 7099 5401 6600 5277 6786 5228 6472 5050 6193 4545 4682 +4 10 6 7 7 2 7 5 1 2 1 2 3 1 8 9 5 1 3 2 6 10 8 6 5 3 4 5 11 5 5 4 5 7 4 7 7 3 10 3 +4 5 4 5 6 7 6 5 7 6 3 6 13 5 8 10 5 5 4 6 6 5 9 9 9 8 7 6 8 9 6 8 7 8 9 9 12 6 7 5 +4926 5578 4175 5063 5027 5360 5077 5284 3742 4045 3250 3894 4098 4648 3953 5447 3636 5237 4165 5054 4254 5823 4454 6302 4879 6396 5048 6922 5508 7226 5419 6695 5324 6870 5327 6636 5230 6190 4610 4708 +1 2 2 2 4 5 4 2 6 4 1 4 22 3 8 11 3 2 2 4 4 2 10 8 9 6 5 3 7 7 3 6 5 5 9 7 16 3 6 3 +10 10 11 5 5 6 8 7 6 3 5 6 8 7 3 2 4 7 6 7 6 5 5 11 9 7 10 7 9 10 8 9 7 11 11 10 12 7 8 6 +5058 5857 4351 5066 5029 5407 5154 5397 3801 3986 3296 4028 4200 4799 3931 5243 3647 5352 4214 5180 4301 5780 4470 6599 4987 6442 5177 6936 5600 7406 5488 6846 5370 7133 5475 6852 5406 6248 4699 4794 +11 10 15 2 2 3 7 5 4 1 3 4 8 5 1 0 2 5 4 5 4 2 3 11 9 4 11 4 8 8 7 7 5 10 13 9 15 4 7 4 +11 8 7 3 4 4 8 8 5 3 4 6 6 3 4 6 9 9 5 11 7 8 5 8 13 10 7 9 7 7 7 6 8 11 10 8 8 10 8 6 +5212 5997 4421 4946 5005 5328 5229 5564 3833 3931 3315 4154 4248 4695 3935 5297 3786 5583 4236 5545 4372 5924 4486 6694 5194 6669 5226 7072 5639 7391 5529 6803 5440 7380 5593 6932 5475 6487 4786 4874 +13 6 6 1 1 1 7 6 3 1 2 4 4 1 2 4 11 8 3 13 6 6 3 6 19 9 5 7 5 4 5 3 7 10 10 5 7 9 7 4 +4 5 4 6 8 5 10 6 2 4 4 3 5 5 8 6 5 6 7 10 5 9 8 9 7 8 8 16 12 9 8 7 5 8 11 11 9 8 8 10 +5184 5944 4412 5017 5084 5315 5354 5598 3788 3940 3334 4089 4269 4720 4041 5347 3819 5616 4309 5826 4390 6121 4578 6845 5243 6760 5300 7630 5804 7500 5595 6824 5432 7428 5734 7191 5568 6589 4871 5195 +1 2 2 4 7 2 11 3 0 2 2 1 3 3 8 4 3 3 6 10 3 8 7 7 5 6 7 22 15 7 6 4 2 5 12 10 8 6 7 11 +5 7 8 6 4 2 3 4 4 6 7 6 3 5 6 4 7 5 6 11 9 9 5 9 6 9 7 9 9 9 10 13 9 12 9 7 6 7 7 8 +5182 6017 4506 5084 5059 5118 5297 5507 3795 4072 3429 4212 4239 4743 4093 5271 3902 5586 4354 6152 4510 6306 4591 6987 5265 6907 5346 7725 5889 7602 5710 7213 5526 7719 5820 7189 5582 6623 4928 5374 +2 5 7 4 1 0 1 1 2 4 7 4 1 3 4 1 6 2 4 12 10 8 3 7 4 7 5 6 8 6 10 15 8 12 8 4 3 4 5 7 +3 4 5 5 3 6 6 5 9 9 8 11 3 2 3 5 6 9 7 5 10 11 6 6 11 8 7 9 9 8 10 10 5 10 8 8 7 6 7 5 +5129 5901 4521 5086 5009 5179 5318 5483 3930 4380 3547 4635 4209 4581 4067 5261 3957 5803 4424 6090 4653 6603 4629 6936 5414 6984 5391 7814 5972 7637 5823 7394 5515 7870 5879 7249 5621 6594 4983 5358 +1 1 3 2 1 4 4 2 10 10 9 14 1 0 1 2 4 8 6 2 12 11 4 3 13 5 5 6 8 5 10 8 2 8 6 5 5 3 5 2 +2 2 3 7 9 8 5 3 4 4 5 3 3 5 6 6 3 8 7 6 6 7 7 9 9 8 7 10 8 10 11 10 8 7 8 11 11 6 6 5 +5052 5669 4484 5210 5114 5359 5313 5338 3934 4362 3586 4541 4180 4613 4118 5313 3934 5946 4492 6093 4690 6636 4692 7072 5508 7056 5435 7959 6027 7793 5958 7564 5581 7827 5936 7489 5761 6567 5011 5343 +0 0 1 5 9 7 2 1 2 2 3 1 1 3 4 4 1 6 6 3 4 4 5 7 8 5 5 8 6 8 12 8 6 4 6 10 12 3 4 2 +5 5 8 5 7 7 6 5 3 3 5 7 7 6 8 5 4 4 7 7 5 6 6 8 8 6 8 10 9 11 9 10 8 10 10 9 12 7 6 5 +5053 5636 4576 5204 5165 5467 5333 5324 3912 4284 3624 4698 4254 4704 4219 5301 3938 5835 4558 6157 4700 6606 4728 7139 5575 7001 5503 8095 6106 8001 6039 7724 5646 7971 6043 7592 5923 6603 5039 5329 +2 2 7 2 5 5 4 2 1 1 3 5 6 4 8 2 2 1 6 4 3 3 4 5 6 3 7 8 8 10 8 8 6 8 10 6 15 4 4 2 +6 9 6 3 10 12 13 7 4 4 3 4 7 5 4 3 4 7 9 8 9 6 6 8 10 10 12 10 9 7 8 9 9 8 7 6 7 7 7 7 +5080 5850 4615 5076 5291 5876 5532 5434 3916 4272 3610 4661 4326 4728 4215 5167 3941 5914 4674 6279 4812 6578 4763 7202 5691 7195 5672 8223 6183 7951 6092 7813 5735 7984 6071 7505 5954 6636 5092 5439 +4 8 4 1 11 15 18 5 2 2 1 1 6 3 2 1 2 5 9 6 9 3 4 5 10 9 15 8 8 4 6 6 8 5 5 3 5 4 5 5 +4 4 6 6 6 7 6 6 8 4 5 8 3 9 7 9 6 7 8 9 11 9 8 10 8 7 7 8 9 8 9 8 9 6 7 9 7 9 8 6 +5055 5744 4653 5140 5312 5953 5547 5476 4022 4261 3647 4872 4294 4997 4288 5409 3995 5989 4761 6455 4973 6736 4848 7384 5753 7193 5709 8221 6258 7965 6169 7835 5821 7873 6098 7607 5984 6790 5169 5481 +1 1 4 4 4 5 3 3 8 2 3 7 1 9 6 8 4 5 7 7 14 7 7 8 6 4 5 5 8 5 8 5 8 3 5 6 5 7 7 3 +6 4 4 5 3 3 7 6 8 9 9 6 4 7 4 4 6 6 8 10 6 6 9 10 9 6 8 6 6 9 6 9 10 7 7 8 6 9 8 6 +5082 5645 4639 5138 5256 5780 5587 5516 4126 4558 3786 4948 4289 5127 4283 5330 4048 5998 4846 6682 5002 6700 4957 7555 5839 7130 5770 8096 6255 8040 6168 7917 5931 7830 6124 7642 5987 6935 5244 5520 +4 1 1 2 1 0 5 3 8 9 11 4 2 5 2 1 4 3 7 9 4 3 9 8 8 3 6 2 3 6 3 6 10 4 4 5 3 7 7 3 +8 8 12 10 5 2 10 7 6 7 5 4 4 5 3 5 4 9 8 9 7 9 8 8 8 10 7 8 11 16 12 10 14 12 8 11 9 8 8 8 +5159 5797 4829 5444 5252 5556 5703 5615 4176 4714 3819 4896 4284 5126 4252 5317 4049 6191 4929 6834 5056 6850 5037 7593 5897 7316 5804 8101 6380 8540 6320 8056 6140 8097 6175 7859 6067 7010 5317 5680 +7 6 17 11 2 0 10 5 4 5 3 1 2 2 1 2 2 8 7 7 5 7 7 5 6 8 5 5 11 20 14 8 19 11 6 10 8 5 7 6 +2 8 9 6 6 6 6 5 8 7 8 6 5 4 4 6 5 7 6 9 10 6 8 9 8 7 7 9 15 20 14 8 11 13 13 14 8 9 6 7 +5081 5940 4938 5485 5274 5591 5714 5585 4276 4861 3928 4970 4304 5064 4248 5366 4075 6249 4959 6976 5185 6807 5115 7690 5954 7307 5838 8167 6604 9256 6520 8064 6267 8409 6353 8247 6120 7142 5337 5769 +0 6 9 3 4 3 3 2 8 5 8 4 3 1 2 4 3 4 4 7 11 3 7 6 6 4 5 6 21 29 19 5 12 13 16 15 6 7 4 5 +7 8 5 5 5 4 3 5 7 12 9 5 6 6 7 7 6 4 7 6 5 5 5 12 13 5 8 12 10 9 8 7 10 8 9 10 11 8 8 6 +5133 6075 4942 5463 5270 5501 5648 5557 4348 5306 4060 4978 4349 5128 4320 5474 4126 6119 5014 6926 5183 6705 5115 7965 6137 7175 5896 8414 6694 9253 6561 8010 6366 8395 6424 8366 6248 7204 5408 5791 +5 6 2 2 2 1 0 2 6 16 10 2 4 4 6 5 4 1 5 3 2 2 2 11 17 2 6 11 9 5 6 4 9 5 7 7 12 5 7 3 +10 17 15 10 9 5 10 8 8 9 5 4 5 10 8 6 10 5 9 16 8 9 10 11 7 9 11 13 12 10 10 7 9 7 7 8 10 8 7 6 +5260 6754 5202 5749 5368 5478 5762 5715 4443 5540 4086 4925 4368 5434 4416 5514 4278 6059 5118 7493 5258 6855 5242 8162 6162 7297 6030 8707 6833 9312 6652 7959 6437 8321 6442 8355 6347 7263 5451 5812 +11 27 25 10 9 2 10 6 8 8 3 1 3 11 8 3 12 2 9 22 7 7 11 9 4 7 12 13 13 7 9 4 7 3 4 5 9 5 5 3 +11 14 12 7 9 10 10 9 9 5 7 7 12 10 10 6 4 5 9 7 7 7 9 8 7 8 8 10 14 11 11 9 7 12 10 9 8 8 8 10 +5409 7208 5378 5834 5464 5763 5873 5925 4562 5514 4162 5059 4565 5722 4561 5551 4273 6002 5220 7473 5305 6873 5341 8163 6187 7350 6083 8798 7020 9429 6767 8034 6455 8559 6536 8406 6393 7318 5519 6077 +13 17 16 5 8 10 10 8 9 2 6 5 17 10 12 3 2 2 9 4 5 4 9 5 4 5 6 7 17 8 11 6 4 11 9 6 6 5 7 10 +8 12 16 17 9 9 5 8 10 9 6 4 9 7 9 10 5 7 8 7 6 8 9 12 13 9 10 11 11 8 8 11 10 10 10 8 7 8 9 8 +5478 7512 5652 6528 5557 5970 5854 6061 4703 5736 4211 5001 4681 5808 4677 5832 4294 6071 5294 7454 5325 6952 5437 8410 6364 7461 6186 8945 7125 9354 6802 8227 6549 8659 6628 8393 6412 7370 5611 6203 +7 12 27 28 8 8 2 6 12 8 4 1 9 5 9 10 3 5 7 4 4 5 8 11 16 7 10 9 10 4 6 9 9 7 9 5 4 5 8 6 +11 12 7 8 9 2 2 7 8 8 9 6 13 11 5 7 9 9 6 8 6 5 10 16 11 9 9 13 13 7 10 11 9 10 10 7 6 6 5 6 +5622 7798 5689 6627 5648 5734 5758 6127 4790 5883 4335 5069 4896 6135 4688 5912 4416 6259 5315 7498 5345 6842 5556 8888 6486 7566 6261 9207 7279 9222 6887 8409 6615 8753 6718 8319 6405 7296 5598 6199 +13 12 5 6 8 0 0 4 7 6 10 4 19 12 3 5 10 8 4 5 4 2 10 19 11 7 8 12 15 3 9 9 7 7 9 3 3 3 2 3 +6 10 10 11 6 6 5 6 7 8 11 3 5 7 3 6 7 8 8 9 8 8 10 9 7 11 9 9 10 10 11 7 10 11 10 7 7 10 9 7 +5635 7944 5802 6905 5660 5758 5742 6128 4849 6021 4507 4949 4901 6196 4647 5925 4484 6374 5386 7601 5416 6922 5672 8907 6503 7787 6334 9207 7352 9283 6996 8334 6705 8903 6805 8249 6424 7472 5688 6257 +3 8 10 11 3 3 2 3 5 6 15 1 2 4 1 3 6 6 7 6 7 5 10 6 4 10 8 5 8 7 11 3 9 9 9 3 4 8 8 4 +6 11 6 8 6 5 4 9 10 5 5 7 5 5 6 6 9 8 8 10 9 7 7 6 8 8 7 8 6 7 7 9 8 7 9 10 10 10 8 7 +5647 8143 5810 6982 5672 5719 5700 6313 4983 5966 4522 5082 4906 6131 4684 5938 4602 6483 5456 7759 5510 6936 5709 8741 6545 7811 6354 9146 7321 9156 7000 8386 6742 8798 6865 8368 6519 7638 5750 6311 +3 9 3 5 3 2 1 8 11 2 3 5 2 2 4 3 9 6 7 8 8 4 5 2 6 5 4 4 3 3 4 6 6 3 7 7 9 8 6 4 +7 16 18 12 7 11 13 9 6 3 4 5 2 8 6 4 10 11 11 9 11 8 5 11 9 8 8 10 10 9 7 10 11 9 9 11 11 9 7 5 +5684 8637 6125 7300 5709 6051 5890 6487 5011 5792 4511 5084 4834 6254 4720 5827 4742 6769 5600 7846 5653 7011 5694 8892 6611 7833 6399 9211 7393 9159 7004 8497 6854 8823 6923 8541 6637 7732 5785 6239 +5 19 32 12 5 12 17 7 4 0 1 2 0 6 4 1 12 11 13 6 13 5 2 9 7 5 6 7 8 5 4 7 11 6 7 9 11 6 5 2 +7 5 11 13 8 6 8 5 5 5 7 10 6 8 11 10 10 10 10 7 9 11 7 7 8 5 7 8 13 10 8 8 9 9 9 7 9 7 5 4 +5721 8425 6253 7660 5770 6056 5947 6404 5013 5751 4577 5393 4866 6370 4883 6091 4879 6977 5715 7805 5741 7266 5730 8788 6650 7670 6418 9149 7540 9223 7033 8478 6912 8846 6980 8458 6701 7698 5768 6110 +5 1 12 14 6 3 6 2 2 2 6 11 4 6 14 10 11 9 10 4 8 10 5 3 6 2 4 4 14 7 5 5 7 6 7 3 7 4 2 1 +6 10 6 4 4 3 5 7 9 6 6 12 8 3 6 8 5 6 6 5 9 11 10 9 6 4 11 9 14 11 10 9 13 10 9 8 9 8 8 5 +5731 8533 6250 7446 5728 5876 5926 6449 5117 5774 4616 5806 4949 6172 4914 6217 4885 6927 5725 7643 5827 7505 5842 8813 6637 7455 6538 9153 7709 9345 7113 8522 7071 8929 7035 8442 6763 7727 5828 6050 +3 7 3 1 1 0 2 4 9 3 4 15 7 0 4 6 2 3 3 2 8 10 10 6 3 1 11 5 16 8 9 6 15 7 7 5 7 5 6 2 +2 6 8 5 4 4 5 3 5 3 6 10 8 4 6 5 6 7 8 7 9 11 13 9 11 11 13 14 10 6 8 7 9 8 12 11 13 11 7 5 +5639 8389 6298 7306 5687 5769 5905 6246 5117 5611 4654 6072 5029 6047 4944 6151 4916 6941 5786 7614 5911 7730 6028 8837 6752 7683 6707 9463 7772 9152 7139 8440 7124 8884 7166 8611 6926 7939 5861 5994 +0 2 6 2 1 1 2 0 2 0 4 10 7 1 4 2 4 4 6 4 8 10 17 6 11 10 16 14 8 2 5 3 7 4 13 9 15 10 5 2 +6 5 5 7 6 7 5 2 6 7 9 5 6 6 5 6 7 6 7 5 6 10 13 10 11 9 10 16 11 12 12 11 8 8 9 10 10 10 9 6 +5651 8192 6268 7297 5698 5852 5885 5994 5142 5704 4767 6014 5056 6052 4948 6150 4972 6893 5820 7464 5916 7880 6209 8921 6864 7774 6795 9878 7859 9340 7267 8609 7150 8842 7217 8708 7008 8077 5944 6002 +3 2 2 4 3 5 2 0 4 5 9 2 4 3 2 3 5 3 5 2 3 8 17 7 11 6 9 17 10 10 12 9 5 4 7 7 9 8 8 3 +3 1 4 6 5 5 8 9 8 4 8 5 5 7 8 3 5 6 5 5 15 12 7 11 9 7 9 10 10 7 8 7 8 8 11 10 9 8 6 8 +5586 7761 6213 7227 5683 5808 5942 6187 5218 5607 4852 5960 5057 6118 5028 5965 4975 6848 5802 7323 6151 8144 6232 9061 6922 7737 6855 9899 7918 9209 7290 8522 7176 8803 7318 8799 7063 8083 5948 6133 +0 0 1 3 2 2 6 8 7 1 7 2 2 4 7 0 2 3 2 2 22 11 4 9 7 4 7 6 8 3 5 3 5 4 10 7 7 5 3 6 +4 5 5 9 8 13 10 9 8 4 8 11 5 4 7 8 9 8 7 9 9 12 12 7 6 7 8 11 10 7 12 15 11 8 7 9 9 9 11 7 +5548 7602 6185 7346 5745 6258 6049 6368 5292 5516 4935 6278 5058 5996 5081 6098 5080 6928 5836 7436 6227 8392 6383 8947 6902 7702 6888 9980 7975 9086 7414 8932 7278 8766 7314 8824 7116 8150 6080 6195 +1 2 2 7 6 16 10 8 7 1 7 12 2 1 5 6 9 5 5 7 8 11 14 3 3 4 5 8 8 3 12 16 10 4 4 6 7 6 12 4 +7 16 13 11 6 9 11 9 12 9 9 9 5 4 5 4 7 9 9 8 5 9 7 7 11 6 5 9 9 9 10 11 11 6 8 9 10 9 8 6 +5588 8128 6362 7581 5754 6435 6179 6538 5466 5737 5041 6454 5059 5882 5081 5977 5132 7065 5920 7481 6199 8441 6402 8840 7010 7608 6843 9934 8005 9093 7484 9071 7377 8608 7335 8847 7193 8213 6132 6191 +5 20 16 10 3 7 12 7 15 8 9 7 2 1 2 1 5 7 8 5 2 6 4 3 11 3 2 5 6 6 8 9 10 2 5 6 9 6 6 3 +14 15 11 7 2 6 10 9 4 5 8 9 8 9 4 9 11 9 4 7 7 8 7 9 9 14 14 11 15 8 10 9 11 8 9 8 10 8 9 6 +5806 8561 6484 7556 5661 6417 6280 6698 5431 5699 5119 6619 5137 6082 5056 6171 5285 7194 5874 7462 6223 8426 6421 8862 7065 8011 7030 10013 8188 9038 7552 9079 7474 8583 7381 8807 7269 8211 6208 6188 +20 17 11 4 0 3 9 7 1 2 7 7 7 8 1 8 13 7 1 4 4 5 4 6 7 16 17 8 18 4 8 6 10 5 7 4 8 5 8 3 +3 5 4 5 3 2 4 5 9 10 10 8 10 12 4 8 10 6 3 8 9 10 6 6 9 12 8 8 13 10 6 7 11 11 10 5 7 9 11 8 +5737 8354 6424 7409 5596 6154 6225 6603 5525 5971 5246 6713 5264 6454 5032 6292 5408 7131 5804 7505 6297 8534 6414 8698 7118 8267 7059 9903 8315 9110 7516 8964 7568 8743 7452 8585 7266 8271 6334 6308 +0 1 1 2 0 0 1 2 8 10 11 6 11 14 1 6 11 3 0 5 8 7 3 2 7 11 5 4 13 7 3 3 10 9 8 1 4 6 12 6 +10 13 8 6 9 5 7 5 4 6 6 8 8 7 6 6 8 6 11 7 10 9 16 12 12 11 9 8 11 9 11 8 11 12 8 5 9 11 11 8 +5849 8651 6468 7333 5686 6091 6248 6514 5489 5981 5268 6801 5337 6496 5059 6283 5477 7071 5940 7484 6395 8574 6662 8913 7247 8446 7112 9800 8388 9116 7609 8917 7660 8955 7470 8377 7314 8450 6457 6421 +10 13 6 3 8 2 4 2 1 3 4 6 7 4 4 3 7 3 12 4 9 6 24 10 12 9 7 4 9 6 10 4 10 10 5 1 7 9 11 6 +7 12 6 4 7 5 10 8 9 8 6 4 4 5 9 9 10 6 13 7 5 8 11 13 7 11 10 12 11 13 11 10 10 8 8 10 11 10 10 6 +5881 8869 6459 7138 5722 6032 6347 6614 5582 6113 5289 6638 5306 6413 5162 6458 5595 7015 6124 7465 6363 8551 6776 9176 7245 8615 7190 9949 8459 9367 7700 8996 7724 8909 7488 8488 7412 8557 6551 6404 +5 10 3 1 5 2 9 6 8 6 4 1 1 2 9 7 10 3 17 4 2 5 11 12 4 9 9 9 9 12 10 7 8 4 5 7 10 7 9 3 +8 12 11 6 4 7 7 3 7 7 8 7 6 4 7 5 8 5 7 7 8 8 6 9 9 8 13 11 9 11 12 14 13 9 9 9 10 11 10 6 +5938 9074 6578 7078 5681 6100 6367 6401 5621 6176 5361 6669 5326 6273 5212 6377 5659 6901 6150 7447 6408 8529 6760 9178 7294 8589 7342 10027 8477 9480 7814 9316 7863 8927 7531 8531 7482 8719 6643 6388 +6 10 11 3 1 4 4 0 5 4 7 4 4 1 5 2 6 2 4 4 6 5 3 5 7 5 14 8 6 8 12 14 14 6 7 6 8 9 9 3 +8 11 7 10 11 8 9 7 6 6 6 6 4 7 9 7 8 5 5 7 6 6 7 6 8 9 13 14 13 12 9 11 10 5 9 8 6 8 9 7 +5994 9205 6592 7267 5820 6225 6438 6447 5634 6174 5380 6637 5295 6326 5311 6424 5722 6794 6124 7430 6401 8385 6770 8995 7316 8626 7491 10285 8597 9648 7848 9432 7922 8698 7573 8510 7448 8687 6707 6434 +6 8 4 8 12 6 7 4 3 3 4 3 1 4 9 4 6 2 2 4 3 2 4 2 5 6 14 13 13 10 6 8 8 1 7 5 3 4 7 4 +4 8 5 9 7 3 4 6 7 4 7 7 7 8 4 6 6 5 6 9 11 5 5 7 7 7 9 12 10 7 11 12 11 12 14 12 10 8 9 7 +5946 9144 6555 7383 5853 6035 6379 6428 5672 6049 5424 6668 5341 6437 5280 6407 5732 6693 6124 7537 6522 8189 6728 8885 7312 8538 7534 10405 8638 9499 7933 9603 8005 8913 7741 8736 7517 8657 6769 6478 +1 4 2 7 5 0 1 3 5 1 5 4 5 6 1 3 3 2 3 7 11 2 2 3 4 3 7 9 7 3 10 10 10 10 16 11 8 4 7 4 +4 7 7 13 9 8 7 7 8 7 9 11 7 6 3 5 6 6 6 6 9 5 6 10 8 9 7 9 12 8 8 9 10 10 10 11 10 9 9 8 +5899 9025 6570 7738 5936 6164 6398 6472 5734 6116 5518 6943 5386 6419 5224 6329 5742 6660 6124 7453 6589 8004 6713 8966 7333 8578 7524 10333 8729 9420 7939 9579 8060 8992 7803 8887 7584 8690 6830 6580 +1 3 4 14 8 6 4 4 6 4 8 11 5 3 1 2 3 3 3 3 7 2 3 7 5 6 4 5 11 4 5 5 8 7 8 9 8 6 7 6 +9 8 7 18 11 8 6 7 4 5 12 11 11 6 8 6 9 10 8 8 8 9 9 10 11 9 13 7 9 8 8 13 14 9 8 8 10 8 9 7 +5981 8975 6584 8379 6069 6285 6391 6513 5693 6056 5687 7202 5532 6402 5298 6317 5828 6874 6175 7497 6629 8076 6775 9042 7431 8616 7668 10143 8741 9346 7945 9802 8216 9005 7812 8845 7650 8660 6889 6615 +8 4 4 25 12 6 3 4 1 2 15 10 13 3 7 3 8 9 6 5 6 6 7 7 10 6 14 3 6 4 5 11 15 6 5 4 8 4 7 4 +6 5 4 9 10 8 10 8 4 4 8 12 8 4 8 7 5 7 9 6 7 8 9 11 11 12 11 7 7 7 8 11 11 11 11 10 6 8 7 6 +5985 8743 6521 8429 6173 6399 6487 6613 5653 5938 5749 7507 5598 6263 5370 6368 5810 6891 6250 7415 6642 8082 6835 9175 7526 8836 7757 9964 8701 9215 7951 9889 8292 9140 7898 8928 7612 8631 6895 6586 +3 1 1 6 10 6 9 6 1 1 6 12 6 1 7 4 2 4 8 3 4 5 7 8 10 10 10 3 3 3 5 8 9 8 10 7 3 4 4 3 +8 5 7 12 12 11 7 5 5 5 7 13 12 8 8 6 12 8 5 4 8 10 12 18 11 10 9 9 7 8 10 8 9 7 7 10 8 8 9 6 +6040 8525 6537 8660 6325 6690 6504 6523 5639 5888 5784 7855 5765 6378 5440 6354 5971 6969 6221 7215 6680 8211 6971 9730 7619 8920 7793 9919 8662 9153 8008 9787 8315 9021 7879 9006 7626 8604 6952 6559 +6 1 4 11 14 11 4 2 2 2 5 14 15 6 7 3 14 5 2 1 6 8 13 22 10 7 6 5 3 4 8 4 6 3 4 7 5 4 7 3 +5 8 8 12 5 7 4 7 10 7 11 6 10 6 5 5 6 8 7 9 7 13 12 11 6 7 9 8 8 11 12 8 8 9 9 7 9 9 8 6 +6017 8505 6578 8877 6294 6718 6443 6561 5753 5964 5920 7752 5876 6363 5432 6279 5975 7042 6244 7335 6692 8517 7103 9822 7582 8814 7828 9815 8650 9279 8114 9691 8311 9032 7912 8895 7665 8640 6982 6534 +2 5 6 10 2 4 1 4 10 5 12 3 10 3 2 2 3 5 4 7 4 13 13 8 3 3 6 4 4 8 11 4 5 6 6 3 6 6 5 3 +4 8 9 9 6 9 5 3 5 9 7 6 8 6 4 3 6 7 6 7 6 9 9 14 20 10 8 7 7 10 11 6 7 7 9 9 11 9 10 11 +5969 8486 6643 8897 6290 6867 6409 6351 5737 6159 5951 7655 5933 6349 5398 6086 5979 7049 6241 7324 6678 8558 7155 10092 7903 8899 7837 9656 8613 9336 8192 9478 8282 8920 7944 8914 7754 8674 7063 6817 +1 5 7 6 3 7 2 0 2 8 5 3 6 3 1 0 3 4 3 4 3 6 7 13 33 7 5 3 3 7 9 2 3 3 6 6 10 6 9 11 +8 9 8 6 5 4 11 11 5 4 8 10 7 3 3 5 5 7 5 6 9 16 7 8 10 13 12 9 7 10 9 7 10 11 9 8 8 7 9 7 +6024 8529 6681 8731 6260 6700 6530 6645 5721 6035 6006 7810 5963 6152 5339 6028 5957 7056 6213 7253 6741 9027 7155 9978 7961 9163 7948 9629 8576 9390 8217 9339 8330 9060 7975 8870 7764 8583 7116 6838 +6 6 6 2 2 1 11 11 2 1 6 8 5 0 1 2 2 4 2 3 7 19 4 4 8 12 11 5 3 7 6 3 8 9 6 4 5 3 7 4 +10 9 11 9 7 4 6 8 3 4 7 4 7 9 6 5 6 8 7 6 6 6 13 12 9 11 16 16 17 13 9 9 12 13 14 14 11 9 8 6 +6129 8570 6795 8760 6282 6543 6520 6737 5654 5918 6035 7587 5993 6335 5359 5973 5961 7124 6236 7186 6726 8854 7308 10116 7992 9289 8158 10034 8796 9625 8241 9331 8428 9315 8133 9197 7851 8620 7142 6796 +10 6 11 6 4 1 3 6 0 1 5 1 5 8 4 2 3 5 4 3 3 2 15 9 6 8 20 17 22 11 6 5 11 12 16 14 10 6 5 3 +13 11 13 6 6 8 8 6 5 7 10 5 6 6 7 10 4 6 8 8 8 16 14 11 12 8 7 13 11 9 9 10 11 11 11 9 10 8 7 8 +6308 8731 6957 8603 6278 6641 6561 6701 5640 5992 6139 7438 5996 6323 5404 6229 5914 7065 6284 7246 6762 9305 7483 10184 8099 9223 8133 10230 8857 9600 8265 9385 8498 9431 8211 9198 7910 8594 7142 6879 +16 9 15 2 3 6 6 3 2 5 10 2 3 3 5 10 1 3 6 5 6 18 17 8 11 4 4 11 9 5 6 7 9 8 9 5 8 4 4 5 +13 19 19 11 11 5 7 5 9 11 9 7 7 6 5 7 6 9 6 6 7 15 10 11 12 12 10 12 13 9 10 10 8 10 8 8 11 9 8 9 +6482 9374 7269 8762 6402 6549 6576 6606 5729 6308 6215 7421 6025 6312 5396 6285 5919 7194 6280 7179 6772 9668 7551 10248 8203 9406 8185 10353 8968 9576 8314 9436 8490 9479 8210 9137 7993 8631 7168 7019 +16 26 32 9 11 2 4 2 8 12 8 4 5 3 2 4 3 7 3 3 4 15 8 8 11 10 8 9 12 5 8 7 5 7 5 4 10 6 5 7 +4 4 6 8 7 4 9 8 7 6 5 6 8 9 6 6 8 7 7 7 11 5 5 18 16 10 11 15 15 7 9 10 11 13 11 7 6 6 6 6 +6422 9057 7240 8727 6421 6401 6641 6701 5764 6298 6187 7344 6079 6486 5414 6276 5975 7192 6302 7178 6884 9395 7490 10739 8407 9456 8261 10653 9127 9431 8336 9484 8559 9708 8286 9018 7946 8481 7142 6966 +1 1 3 4 4 1 7 6 5 3 2 3 6 7 3 3 6 4 4 4 11 1 2 20 20 7 9 14 16 3 6 7 9 11 9 3 2 2 3 3 +2 5 6 11 9 5 9 9 7 7 4 9 9 7 5 6 7 8 13 13 11 12 13 14 9 10 10 7 8 8 11 10 13 8 9 10 12 11 6 4 +6312 8820 7212 8879 6490 6324 6705 6851 5799 6350 6134 7456 6157 6526 5406 6268 6004 7251 6476 7546 6993 9568 7635 10954 8427 9503 8310 10443 9103 9356 8409 9529 8677 9617 8309 9091 8054 8647 7117 6793 +0 1 3 9 7 2 7 7 5 4 1 7 8 4 2 3 5 5 16 14 11 10 14 12 6 7 8 3 4 4 9 7 13 4 6 7 11 9 3 1 +10 6 7 9 9 8 10 6 7 12 9 10 14 7 7 6 5 9 13 14 13 13 14 12 8 8 10 9 9 9 10 7 12 13 13 13 9 9 6 5 +6410 8659 7210 8899 6558 6436 6793 6808 5833 6706 6210 7623 6361 6564 5449 6260 5981 7368 6646 7953 7150 9792 7802 11034 8421 9424 8358 10369 9105 9347 8454 9387 8767 9838 8433 9344 8083 8681 7092 6692 +9 2 4 6 7 6 9 3 5 13 8 8 19 4 5 3 2 7 16 16 15 11 16 8 5 4 7 5 6 5 7 3 11 11 13 12 6 6 3 2 +8 6 6 15 15 7 8 5 4 11 9 12 14 6 6 6 5 6 8 9 14 10 11 11 13 9 8 8 8 11 8 6 9 9 10 12 9 9 9 7 +6454 8508 7183 9286 6777 6479 6827 6706 5789 6979 6285 7902 6560 6538 5466 6253 5959 7294 6684 8028 7329 9818 7888 11047 8543 9411 8353 10238 9082 9462 8447 9192 8778 9800 8478 9520 8111 8713 7145 6720 +6 2 3 16 21 4 6 2 1 11 8 12 18 3 3 3 2 3 6 6 17 6 10 7 13 5 5 4 4 8 5 2 6 5 7 10 6 6 7 4 +6 5 5 9 5 8 6 9 7 10 13 10 14 8 6 5 6 11 8 10 9 10 10 12 9 11 9 9 10 8 10 8 11 11 9 5 7 9 10 10 +6446 8304 7131 9281 6735 6581 6809 6856 5823 7174 6460 8042 6754 6637 5482 6185 5963 7532 6721 8160 7376 9843 7946 11121 8559 9522 8374 10176 9110 9385 8491 9132 8840 9887 8496 9256 8087 8743 7222 6931 +3 2 2 5 2 6 3 7 5 9 16 8 18 6 3 2 3 10 6 8 7 6 8 8 6 8 6 5 7 4 7 4 9 8 6 1 4 6 8 9 +3 5 5 7 7 6 5 7 9 8 11 8 7 9 9 4 5 6 7 12 5 9 9 11 18 9 9 9 8 13 13 14 12 10 7 8 10 9 7 9 +6361 8112 7080 9154 6745 6554 6766 6874 5907 7235 6579 8050 6764 6791 5575 6059 5941 7448 6732 8407 7319 9805 7977 11129 8805 9503 8395 10118 9087 9620 8611 9444 8926 9908 8462 9192 8140 8771 7220 7068 +0 2 2 3 4 3 2 4 8 5 11 5 4 7 8 1 2 3 4 11 2 5 6 7 24 5 6 5 4 11 13 14 10 6 3 4 8 6 4 7 +3 7 9 9 13 9 9 6 7 4 7 9 5 9 8 7 9 8 9 11 5 6 10 8 14 10 13 13 9 11 12 9 9 9 9 8 9 7 8 6 +6278 8055 7133 9157 6908 6713 6827 6830 5938 7046 6593 8119 6722 6936 5640 6125 6022 7492 6794 8578 7264 9585 8033 10952 8943 9547 8517 10309 9090 9718 8702 9430 8933 9866 8480 9132 8166 8674 7244 7012 +0 4 7 5 15 7 7 3 5 1 4 6 2 7 6 4 8 5 7 9 2 2 8 3 14 7 13 11 6 8 11 5 6 5 6 4 6 3 5 3 +3 7 4 9 11 10 5 3 5 5 4 6 5 4 5 9 9 6 5 4 6 6 7 7 12 11 13 14 12 10 11 10 7 7 11 12 9 8 10 7 +6197 8001 7057 9160 7016 6924 6784 6604 5917 6930 6530 8000 6682 6765 5627 6310 6101 7411 6752 8309 7236 9378 8011 10724 9026 9650 8636 10550 9169 9749 8765 9478 8888 9704 8549 9321 8192 8645 7318 7021 +0 4 1 5 11 9 2 0 2 2 1 2 2 1 2 8 8 3 2 1 3 2 4 3 10 8 13 12 10 6 9 7 3 3 9 10 6 4 8 4 +10 10 10 7 8 5 9 8 7 5 6 5 6 5 3 5 10 4 6 10 11 8 8 10 14 11 10 9 10 14 10 7 6 10 11 11 7 9 8 10 +6297 8135 7136 9040 7045 6815 6844 6699 5948 6821 6520 7827 6668 6666 5563 6238 6204 7212 6736 8424 7336 9306 8015 10694 9158 9746 8676 10469 9195 10024 8801 9339 8819 9736 8616 9437 8166 8679 7339 7214 +9 8 9 3 5 2 7 6 5 2 3 2 3 2 0 2 10 1 3 7 10 4 5 6 14 8 7 5 7 13 7 3 2 6 9 8 3 6 5 8 +5 9 10 8 5 5 10 10 6 6 4 5 5 6 5 5 7 7 9 9 10 8 7 10 6 9 11 11 11 11 10 9 7 9 11 12 8 10 11 11 +6267 8199 7213 8989 6996 6713 6928 6911 5952 6780 6459 7664 6629 6634 5551 6170 6228 7209 6797 8471 7408 9239 7993 10666 9082 9714 8740 10516 9246 10098 8836 9331 8777 9704 8682 9608 8166 8772 7436 7456 +2 6 8 4 2 2 9 9 3 3 1 2 2 3 2 2 4 4 7 6 8 4 4 6 2 5 9 7 8 8 7 5 3 5 9 10 5 7 10 10 +4 6 10 8 6 6 7 7 9 4 4 6 8 7 5 8 9 7 12 11 7 9 13 12 8 8 11 10 8 8 9 13 15 11 10 13 9 11 9 8 +6212 8075 7288 8941 6974 6678 6933 6926 6033 6618 6400 7572 6668 6666 5540 6291 6302 7206 6934 8638 7402 9237 8125 10763 9059 9622 8803 10499 9219 9983 8845 9569 8941 9797 8720 9830 8192 8921 7480 7500 +1 2 8 4 3 3 4 4 8 1 1 3 6 4 2 6 8 4 13 9 4 5 13 9 4 4 9 6 4 4 6 12 16 8 7 11 6 9 7 5 +6 2 5 4 8 9 6 5 7 6 10 13 14 11 6 6 8 9 12 12 10 9 12 14 6 11 19 9 7 8 11 11 11 14 10 11 10 9 12 9 +6210 7713 7233 8650 7004 6830 6913 6817 6061 6589 6495 7916 6859 6941 5555 6282 6349 7326 7067 8856 7472 9235 8228 10977 8986 9720 9068 10422 9167 9875 8905 9670 8998 10069 8757 9916 8243 8938 7600 7602 +3 0 2 1 5 7 3 2 5 3 9 14 18 11 3 3 6 7 13 10 8 5 11 12 2 8 26 5 3 4 9 8 9 13 7 8 8 6 12 6 +4 4 3 5 7 12 13 7 3 8 9 8 11 7 6 6 4 5 7 8 7 6 5 8 8 12 15 10 5 11 12 11 8 8 8 12 10 8 7 7 +6157 7495 7129 8438 7008 7157 7072 6838 5986 6685 6562 7932 6968 6954 5569 6273 6292 7193 7069 8816 7464 9049 8150 10809 8966 9874 9225 10411 9065 9958 8989 9765 8977 9956 8742 10058 8292 8893 7589 7575 +1 1 0 1 4 13 15 4 0 6 7 5 11 4 3 3 1 2 4 4 4 2 2 4 4 9 16 6 1 8 10 8 4 4 4 9 8 4 4 4 +5 5 9 6 4 12 12 5 5 5 5 6 8 6 5 4 8 6 5 5 6 8 7 9 7 7 10 10 9 11 12 11 11 9 10 10 9 7 8 6 +6131 7352 7181 8300 6935 7464 7202 6734 5964 6591 6526 7824 6998 6905 5557 6142 6339 7130 7020 8594 7431 8997 8125 10713 8921 9711 9250 10400 9068 10036 9071 9854 9034 9911 8779 10068 8315 8789 7604 7489 +2 2 7 2 1 12 12 2 2 2 2 3 5 3 2 1 6 3 2 1 3 4 4 5 3 3 7 6 6 8 10 8 9 5 7 6 6 3 5 3 +8 6 15 14 5 6 8 5 4 6 10 8 9 12 9 4 7 9 9 11 11 10 11 10 8 7 9 11 8 8 10 9 9 9 10 11 11 9 7 5 +6182 7279 7385 8662 6889 7384 7226 6637 5917 6564 6618 7846 7053 7227 5648 6019 6359 7255 7074 8754 7526 9071 8203 10684 8902 9558 9249 10451 9046 9925 9100 9815 9038 9869 8815 10139 8388 8814 7593 7346 +6 3 19 15 2 3 5 2 1 3 9 5 7 12 8 1 4 7 7 9 10 7 9 6 4 3 5 7 4 4 7 5 6 5 7 8 9 6 4 2 +12 10 5 4 7 10 6 3 3 8 8 6 6 11 7 6 9 6 9 9 7 7 9 7 9 8 10 8 9 8 8 8 10 7 12 9 9 8 9 8 +6334 7456 7328 8388 6895 7555 7198 6423 5845 6661 6657 7743 7030 7469 5685 6026 6430 7188 7127 8781 7517 8956 8228 10473 8909 9476 9273 10315 9050 9821 9077 9717 9068 9706 8901 10083 8408 8776 7633 7396 +14 8 2 1 4 8 3 0 0 6 6 3 3 10 5 3 7 3 7 6 4 3 6 3 6 4 7 4 6 4 4 4 7 3 10 5 6 4 6 5 +5 6 9 7 7 6 5 5 2 5 9 6 6 4 6 8 8 6 5 5 9 10 12 11 11 16 10 10 11 3 5 6 10 6 10 10 12 10 9 7 +6303 7377 7375 8314 6901 7470 7146 6344 5750 6568 6720 7647 7007 7266 5696 6155 6473 7125 7076 8561 7559 9033 8329 10520 8967 9890 9297 10310 9105 9416 8978 9502 9097 9492 8934 10092 8504 8863 7672 7382 +2 3 7 3 4 3 2 2 0 2 7 3 3 1 3 6 6 3 2 1 7 7 11 7 9 17 7 6 8 0 1 2 7 2 7 6 11 7 6 4 +13 7 8 12 14 6 5 7 3 4 6 8 10 9 6 5 6 6 9 12 14 8 10 9 6 10 8 11 11 7 8 10 10 9 9 7 12 9 6 6 +6477 7364 7395 8552 7086 7390 7095 6393 5683 6419 6705 7679 7087 7382 5707 6092 6464 7066 7129 8784 7728 8982 8376 10441 8896 9910 9269 10367 9158 9281 8958 9546 9125 9475 8941 9916 8598 8884 7633 7307 +16 4 5 11 17 3 2 4 0 1 3 5 9 7 3 2 3 3 7 11 16 4 7 5 2 6 4 7 8 3 4 7 7 5 6 3 11 6 3 3 +17 11 6 15 17 5 6 8 3 4 7 8 12 7 8 7 7 8 13 9 9 7 7 13 12 10 8 8 10 9 8 9 9 6 7 9 12 7 7 4 +6749 7597 7363 8960 7343 7253 7071 6500 5617 6279 6716 7709 7216 7369 5769 6156 6481 7133 7283 8809 7765 8873 8345 10613 8980 9929 9242 10236 9185 9277 8938 9526 9127 9275 8896 9874 8690 8781 7621 7114 +27 10 3 16 25 2 3 6 0 1 4 5 12 4 6 4 4 5 15 6 6 3 3 10 10 6 4 4 7 5 4 5 6 2 3 5 11 3 4 1 +5 3 5 10 9 12 7 5 7 7 12 8 10 11 7 6 6 6 7 8 7 10 12 6 14 13 7 7 17 10 8 11 14 11 12 11 9 9 9 8 +6708 7325 7307 9036 7389 7555 7073 6417 5655 6332 6855 7737 7291 7602 5803 6155 6472 7073 7280 8771 7750 8955 8443 10344 9113 10131 9190 10051 9390 9334 8919 9630 9257 9394 8980 9957 8703 8807 7660 7178 +2 0 2 7 7 12 4 2 5 4 13 5 8 10 5 3 3 3 4 4 4 7 11 2 14 11 3 3 20 7 4 8 14 8 10 8 6 6 6 5 +6 8 7 5 4 6 6 9 6 6 5 3 6 8 5 5 6 5 8 10 8 12 10 7 12 10 8 15 13 9 10 14 10 12 12 12 6 7 7 8 +6693 7377 7303 8801 7306 7470 7049 6584 5667 6320 6811 7457 7262 7637 5785 6092 6463 6955 7302 8859 7761 9154 8487 10153 9192 10137 9165 10369 9487 9326 8951 9912 9281 9567 9062 10096 8639 8708 7647 7238 +3 5 4 1 1 3 3 7 3 3 2 0 3 5 2 2 3 2 5 7 5 10 7 3 10 6 4 14 12 5 7 13 7 10 10 9 2 3 4 5 +7 10 12 9 6 4 7 6 3 5 7 5 6 4 7 9 7 10 6 10 9 7 9 9 9 11 12 9 12 8 9 11 8 7 9 15 12 12 9 8 +6704 7548 7427 8825 7276 7267 7051 6557 5602 6248 6819 7316 7234 7424 5819 6279 6480 7152 7273 8941 7797 9034 8505 10096 9192 10204 9242 10299 9556 9257 8957 9993 9253 9423 9065 10411 8730 8922 7686 7295 +4 8 12 6 3 1 4 3 0 2 4 2 3 1 5 8 4 9 3 7 6 3 6 5 5 8 10 5 10 4 6 8 4 3 6 14 11 10 6 5 +4 9 9 9 3 7 9 4 3 5 5 6 3 3 5 11 10 8 6 6 10 8 9 11 14 13 13 11 10 10 11 9 9 11 11 10 11 12 9 7 +6638 7648 7471 8848 7171 7261 7105 6409 5538 6180 6776 7245 7130 7162 5801 6578 6573 7214 7244 8773 7857 8983 8522 10166 9320 10390 9343 10356 9573 9315 9014 9946 9252 9533 9119 10400 8793 9123 7724 7287 +1 6 7 6 0 4 7 1 0 2 2 3 0 0 2 11 9 5 3 2 8 4 6 8 14 11 12 7 7 7 9 5 5 8 8 6 9 10 6 4 +9 6 12 8 5 7 3 2 6 5 4 5 5 4 4 4 7 5 11 7 6 7 6 11 11 12 8 12 12 8 10 10 8 9 12 9 11 7 7 7 +6702 7557 7591 8808 7119 7255 7004 6147 5553 6116 6709 7117 7079 6978 5758 6429 6587 7088 7344 8676 7814 8874 8462 10231 9368 10503 9314 10471 9640 9247 9044 9963 9225 9513 9198 10328 8854 9005 7710 7279 +7 3 12 4 2 4 0 0 3 2 1 2 2 1 1 1 4 2 10 3 3 3 2 8 8 9 4 9 10 4 7 6 4 5 10 5 9 3 4 4 +8 4 7 7 10 5 3 4 6 5 5 4 5 5 5 6 3 4 6 7 6 6 9 11 10 9 11 11 12 15 12 7 6 6 8 7 7 9 10 5 +6739 7349 7580 8709 7196 7126 6905 6023 5567 6056 6669 6935 7030 6866 5742 6411 6499 6908 7314 8585 7772 8710 8480 10292 9389 10425 9362 10518 9706 9613 9124 9795 9148 9310 9172 10138 8811 9017 7773 7149 +6 1 4 3 9 2 0 1 3 2 2 1 2 2 2 3 0 1 3 3 3 2 6 8 7 5 8 7 10 15 10 3 2 2 4 3 3 6 8 2 +6931 5720 3892 2415 2299 2927 3305 2951 2135 1496 1641 1954 2221 2615 2662 1914 785 1057 1266 1415 1742 1674 2443 3645 3992 4795 3782 4309 3996 2034 3779 2724 4320 4458 3031 2528 967 1158 1681 748 +183744 358330 106880 156558 65784 186526 91216 186963 60003 97603 48450 126567 63628 167113 73646 123617 26403 71432 39493 95003 52107 111033 70717 233614 111200 304392 105805 274621 111611 134000 105496 176563 119349 282639 86422 164843 33309 79620 50549 52675 +3335 1463 2978 1311 2666 1350 2934 1358 2645 1210 2427 1255 2633 1340 2839 1251 1794 1116 2201 1172 2422 1199 2686 1348 2944 1430 2921 1402 2937 1253 2927 1324 2993 1417 2824 1311 1887 1120 2398 993 +66898 53522 34543 22039 25397 32620 34444 27284 15530 9017 13471 11629 7029 17903 25703 22901 15725 18009 20080 18562 14129 7196 13375 23940 17926 33994 26964 33219 25119 16945 37213 24840 28288 40446 27915 17558 18093 25207 21145 6251 +1889237 3625088 987217 1501185 713352 2179426 969413 1852006 455490 645728 391592 833429 241718 1257002 728840 1523180 427714 1173574 551802 1229705 411978 546475 410850 1690411 566657 2374635 792430 2299036 750929 1167018 1054119 1692077 839481 2750582 797841 1233672 494979 1623498 589806 433560 +3627 1517 3452 1474 3513 1536 3493 1468 3185 1336 3224 1330 2577 1426 3481 1507 3441 1527 3524 1513 3206 1230 3041 1408 3069 1479 3345 1493 3296 1441 3533 1464 3306 1535 3439 1423 3447 1554 3508 1309 +20957 18357 20655 19266 18286 19740 18739 17800 17687 16882 15993 14626 13374 11850 10275 8440 6261 4609 3283 3451 4766 6294 7623 8407 8231 7807 6703 5167 3315 4957 7775 8022 7113 6927 4674 3626 5575 6016 3830 3564 +2377788 4535399 1490566 2594772 1162980 3261442 1424228 2834477 896243 1644173 790637 1682010 577556 1909619 973300 1950325 577083 1386327 621948 1367946 523524 900375 595456 2105495 762916 2711805 943993 2478547 816922 1401545 1226553 2083408 1000350 3011131 897403 1382427 625133 1895700 672986 626510 +887 384 1388 745 1569 591 1317 618 1905 1007 1962 842 2234 581 994 387 1012 284 464 200 823 633 1210 358 1019 248 646 161 342 307 598 343 651 186 456 211 828 266 508 505 +35404 41195 37619 23532 19140 20217 15779 25870 24513 12959 6096 6048 10300 11425 10757 12098 11816 18133 18179 26452 29965 26074 12727 8414 8277 11690 19728 23472 18297 19095 25227 23057 20041 12497 14049 9518 11296 20852 22749 10208 +3223445 6794203 2414992 3884839 1623214 4307845 1792025 4253803 1500483 2341695 926729 1952667 826432 2496969 1223978 2576581 864723 2417196 1071122 2911017 1276435 2448276 905925 2496105 955451 3267307 1424724 3771901 1264245 2490604 1840799 3374973 1487675 3598254 1234128 1884246 898281 3063057 1237707 1216074 +1120 609 1625 590 1164 454 848 605 1647 533 592 259 1183 432 852 446 1300 750 1671 924 2394 1088 1337 301 802 321 1385 606 1450 770 1358 673 1345 308 1122 461 1190 674 1857 810 +15341 15822 16828 10712 8957 9343 8793 11814 9906 3609 2809 2347 3702 8026 9710 7959 6181 6354 3667 8072 8289 8690 3761 2465 4935 5638 9678 9428 5394 5942 8691 7265 4005 3363 4950 3367 4370 6423 6019 3467 +3535131 7358631 2784871 4309876 1811656 4623395 1972061 4724407 1716247 2422926 975399 1979705 900433 2840254 1441635 2910973 1001138 2662544 1138120 3232286 1456458 2835277 979450 2497788 1057750 3417661 1636551 4124828 1370569 2706235 2017010 3618826 1552915 3588981 1329854 1978055 987564 3273894 1360670 1356116 +404 175 593 210 449 161 401 215 535 97 224 59 346 243 642 233 557 195 269 205 532 269 321 44 413 113 552 184 348 175 387 152 205 39 326 113 380 147 400 204 +501 452 124 121 67 0 0 0 0 0 0 0 0 0 0 0 0 0 47 43 55 31 0 0 0 0 0 0 30 43 36 0 0 0 0 0 0 0 0 29 +3459688 6944900 2718520 4058727 1768143 4346002 1922831 4440954 1673403 2277556 951049 1860927 877955 2669845 1405646 2736321 976146 2502797 1110910 3040998 1421505 2667071 954999 2347926 1031344 3212609 1595697 3877348 1337121 2546509 1967578 3401705 1514148 3373650 1296656 1859376 962911 3077468 1326703 1276534 +0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 +12779 9069 4351 1376 1508 3094 4676 5851 6577 7097 8433 9916 9986 8428 7244 6313 4012 4736 6954 7062 5025 4228 2313 2883 7098 12271 13578 13192 15154 7543 3421 4059 6502 3276 2842 4622 5259 8073 5627 4359 +3699985 7085399 2761878 3899751 1762552 4275340 1994360 4533978 1799753 2576930 1142876 2358490 1111305 3027456 1555731 2960003 1054334 2643603 1260939 3292417 1514470 2766811 990285 2384180 1187040 3773759 1902950 4455200 1691116 2857147 2005909 3446985 1642557 3372508 1336935 2031782 1073306 3388812 1437424 1467751 +307 79 108 5 30 24 181 78 314 235 699 391 862 238 422 167 323 131 513 167 283 102 170 66 558 286 678 264 868 223 114 62 348 42 158 175 438 192 346 246 +53514 49702 32754 16454 15534 26299 34213 27312 26561 34468 39867 37225 44390 34148 23475 43895 30229 20166 22714 24660 12211 15254 10776 23646 33790 37742 39174 41384 54465 26440 21219 21585 39191 33155 27429 24641 25928 39080 24374 34242 +4975572 9713858 3530206 4676668 2115640 5634574 2819143 5939931 2433790 4539948 2133446 4503997 2218282 4943783 2116974 5479209 1800743 3723941 1810088 4609928 1788807 3537976 1241025 3693886 2021164 5866116 2856831 6730428 3041161 4310132 2498245 4566302 2603373 5207126 2004714 3423761 1709297 5586469 2024601 3483432 +1137 530 935 329 717 462 1255 456 1116 777 1921 853 2081 711 1116 836 1677 516 1239 532 645 401 843 622 1693 663 1427 623 1877 613 858 462 1572 648 1369 712 1512 727 1197 997 +9954 8258 14481 20659 29877 38659 38879 53106 48349 42253 45034 39350 37748 44899 58865 45202 33669 23671 22148 16277 13497 21257 24451 23590 19466 30145 38921 27696 24279 39977 18058 28544 39873 35268 33924 30202 41803 51945 42987 32611 +5105814 9638401 3812250 5665316 2826557 7671625 3742612 8846249 3608955 6863480 3231369 6651333 3127839 7405650 3568863 7927567 2616454 4954800 2331060 5333361 2089169 4631683 1835073 4921572 2468308 7366196 3780432 8028191 3585875 6507621 2897487 6046007 3557637 7061488 2821851 5073879 2735216 8442665 3072915 5277972 +154 39 346 344 1088 502 1056 636 1376 627 1442 598 1244 615 1703 576 1333 473 963 277 621 449 1320 471 791 392 1047 328 665 619 614 465 1143 495 1244 606 1599 646 1457 628 +15683 16073 18191 20099 19521 23953 28171 20223 15031 18477 59911 76120 25050 19055 32800 66053 37414 35900 18977 19638 26110 29405 24572 19437 18968 19527 25937 20777 26568 26199 17853 24204 26181 37098 32292 25199 32369 33100 39342 39250 +5379252 10047605 4182090 6560243 3255002 8682958 4369304 9557946 3903093 7586868 4682178 10928891 3690098 8132020 4318222 11510063 3507533 6863130 2757968 6219881 2704453 6160363 2417385 5820450 2891560 8123932 4349074 8823006 4175503 7726780 3281523 7170294 4138078 8917024 3576872 6317622 3494368 9969706 4001884 7372728 +265 124 411 279 584 251 651 182 350 206 1369 753 663 201 776 613 1086 523 686 290 987 474 1039 310 649 207 594 209 630 319 524 314 626 416 905 385 931 323 1002 530 +23018 14886 9611 24319 24643 21629 18611 33737 50387 38825 37019 37840 50142 34998 35919 41408 43271 13532 19859 13273 21215 22153 21796 18485 16441 20793 27088 29963 38325 20067 20542 22967 27747 30180 26991 29020 34869 26540 35295 30882 +5833365 10359331 4323372 7660742 3803682 9490833 4735975 11057209 5093676 9516985 5511593 12597979 4879735 9794308 5128604 13363491 4526088 7282731 3196765 6662163 3179249 7151781 2914199 6606909 3239649 8913985 4932943 10134498 5050951 8496058 3724709 8151128 4744060 10236208 4177538 7721496 4298475 11002099 4804211 8827695 +379 103 180 292 633 200 376 298 1043 409 695 287 1086 354 725 299 992 144 609 159 655 282 750 247 485 205 551 279 787 205 527 255 586 282 641 361 829 221 754 343 +19556 20950 16196 14814 23915 29211 41435 81091 62551 35769 40093 44854 42565 28869 53941 34067 21921 17647 29130 27084 25373 21594 30099 27359 23132 21204 21178 30702 30727 20651 21618 39359 31129 25600 34983 30562 29888 29271 32340 35404 +6187644 11024912 4629456 8111251 4320056 10716057 5676931 15375831 6565480 11143547 6398882 14597848 5845988 10980312 6379443 14654705 4973457 7929972 3861598 7926422 3748481 8049372 3610856 7891379 3750088 9681888 5351162 11412706 5710321 9255061 4184337 10080198 5421367 11194860 4967505 9135877 4955182 12140336 5510972 10473188 +289 161 327 142 549 256 755 549 1004 311 640 297 756 246 884 202 428 185 746 319 666 241 832 323 602 194 380 250 543 198 500 397 581 204 730 323 614 221 599 333 +30064 18709 13604 27581 44628 53834 63664 53703 62368 58880 32460 33816 45570 39334 37642 26663 9319 10893 22046 27002 22965 23377 32083 32120 28163 17989 17928 21179 25175 26790 24588 28416 31314 23465 32032 36778 29887 33577 32778 35010 +6801690 11512878 4861641 9319103 5353016 13380546 7162626 17752696 7995865 14092401 7068903 15799583 6864935 12738102 7182414 15413566 5087519 8123431 4328750 9109791 4241949 9002653 4340838 9391288 4376389 10206197 5675863 12029156 6211308 10345690 4708412 11221218 6086495 11964826 5662317 10847295 5595470 13474832 6211287 11995749 +427 127 248 279 873 409 930 303 819 424 451 183 681 300 524 137 140 87 495 278 530 238 750 335 644 142 290 142 390 243 518 234 517 165 569 331 534 222 528 277 +33417 27237 11395 15532 32540 40289 44583 61135 40293 70004 45489 50900 45848 28303 33059 34562 12772 16152 31425 28643 26721 20961 32470 28819 23086 24407 19870 16708 22872 26997 24655 25123 34578 24993 34354 30141 26214 34700 33982 33059 +7486118 12495506 5031562 9714226 6051190 15053001 8123475 20443559 8826251 17547762 8055251 17978815 7865552 13712712 7848187 16612192 5287001 8628383 5023991 10322980 4819111 9750307 5062490 10598400 4857275 11093355 6042101 12333933 6640918 11383602 5221117 12091466 6818455 12782473 6399140 12048271 6125883 14798256 6924897 13307095 +433 188 189 121 539 250 556 309 467 414 580 271 596 177 412 180 207 152 641 264 555 186 657 252 465 192 304 95 321 212 465 181 501 161 539 230 415 207 483 226 +20884 21576 13962 16573 27759 30749 49270 70358 53164 55367 46024 52861 49322 22794 31452 28473 9262 12314 27976 29787 24597 21837 31841 25963 20414 23886 21686 20256 22030 28440 27887 37052 31348 23573 30310 30241 25324 31264 32303 35937 +7833086 13071381 5262860 10149599 6609721 16038999 9180149 23539614 9964922 19896549 9030653 20147777 8929994 14290389 8456261 17364810 5391779 8867242 5613711 11533665 5327570 10506923 5750048 11557623 5257853 11895276 6445618 12838405 7038280 12447896 5803641 13642389 7449576 13463822 7014195 13183335 6620305 15831178 7577773 14716581 +236 130 235 126 403 161 561 296 559 273 531 258 574 119 366 130 130 98 496 241 458 181 557 202 374 169 311 118 285 199 473 252 410 137 416 201 362 164 417 216 +22212 17975 14214 25041 36217 49348 68027 64223 66689 49582 32062 37144 41595 34141 41605 25208 8529 9638 18203 29273 23196 21235 29267 33487 26340 19899 19054 22696 25706 26395 27801 28592 31205 27825 31197 31189 26166 32833 33394 41646 +8205339 13391469 5494826 11079104 7370517 18108515 10689921 26072995 11420901 21748998 9624802 21220994 9770343 15530538 9308691 17871680 5475204 8927363 5938887 12640133 5787523 11181158 6354645 12921551 5799915 12404133 6771781 13462517 7519690 13322695 6369425 14580499 8061287 14365523 7636570 14308541 7123908 16898523 8242240 16392248 +245 93 227 198 484 261 683 233 619 214 327 153 433 194 454 108 110 60 282 207 389 158 450 238 444 125 252 129 318 167 424 166 377 160 392 187 344 168 400 230 +46285 26079 18373 33617 30887 25048 36101 54169 44177 66742 36770 32993 38396 25468 42169 51104 30808 18964 30371 30406 24780 17278 32759 31950 19938 24829 23478 17936 24772 36004 25448 23407 32592 25212 32772 30391 24446 33771 32741 35025 +9183666 14190242 5827316 12479729 7976072 18560934 11345896 27836686 12265071 24544572 10324467 21974793 10507939 16163433 10154259 19939124 6126053 9556843 6566991 13749824 6276485 11571832 7033413 14109217 6164794 13185346 7202890 13756741 7965207 14735361 6860937 15143772 8693183 15052588 8283669 15317210 7570972 17959458 8873427 17560601 +523 146 290 247 373 101 306 174 356 278 357 121 367 122 419 247 500 167 455 195 377 113 455 201 301 154 302 89 286 217 352 113 365 132 384 166 300 158 359 175 +13354 19024 20834 18367 29814 23412 31413 52551 72326 32470 21971 41597 39298 34072 31256 43658 25928 16160 19318 17019 23740 21588 20621 19649 11806 16996 26061 22181 22352 21916 23961 17423 20790 35051 24773 28553 27854 32342 46795 24414 +9295772 14507649 6214415 12859398 8539082 18885698 11865658 29395154 13807726 25066833 10628367 23211974 11250180 17286964 10699755 21425063 6635910 9976285 6896873 13970472 6726656 12203863 7384959 14469883 6312690 13438449 7689265 14294115 8337741 15197739 7302167 15305608 9007616 16302915 8710140 16152439 8093992 18868945 9848113 18006942 +102 89 312 101 336 84 242 154 536 94 177 159 348 168 279 191 372 125 247 77 330 141 248 94 146 81 317 114 241 101 304 67 205 192 262 145 323 141 495 96 +23329 23204 19309 17932 36198 67150 64529 38617 32607 43495 35244 58940 43147 40324 40898 45705 31398 15572 17167 11863 18384 24626 34451 24774 37677 19041 14521 21161 37803 23466 15566 26664 29162 32133 29955 29431 24443 27824 31139 32670 +9660065 15062821 6552868 13189562 9251229 21878130 13218973 30004048 14296554 26235108 11263971 25440438 12072282 18727194 11478108 22947612 7272866 10334437 7163535 13861110 7028676 12984620 8081259 15123777 7118222 13802006 7868508 14736581 9095941 15727604 7517786 16025478 9528209 17298951 9258430 16991499 8516762 19446291 10398261 18933732 +220 117 265 93 395 313 503 96 198 137 305 223 357 201 355 181 418 114 204 37 227 158 415 123 524 94 145 101 415 107 167 131 291 155 316 144 264 112 285 143 +11604 18183 11218 8539 18651 89226 81712 29436 36299 39261 27881 28840 41892 55268 47265 30605 13255 14437 19781 18125 15204 20699 18604 32185 47339 11951 11639 18567 22206 25482 17450 29059 25324 24432 45106 34973 26630 23840 27257 24044 +9715544 15276206 6676046 12922835 9497053 26047317 14977745 30012352 14867556 27073163 11695492 25685931 12841781 20999135 12399787 23451105 7430141 10601369 7490360 14143031 7241868 13477269 8355088 16193753 8150630 13708158 7969605 14993132 9436516 16349536 7776182 16849300 9937697 17762097 10180330 18120704 8984884 19744231 10835442 19274957 +74 73 124 23 163 349 574 58 219 118 215 76 314 259 386 100 136 94 230 86 169 112 190 171 594 40 100 81 212 122 187 141 232 103 453 166 280 81 228 88 +20215 29650 30102 13420 27258 70133 74209 38214 30164 34718 24174 28412 43888 44532 56282 47653 34973 28436 23347 18315 19499 25507 25674 39225 46934 25446 16840 19285 20897 28103 18941 28012 25685 22110 40114 30071 25041 25287 27096 22685 +9989757 16181292 7278871 12971987 9956757 28793337 16500817 30559456 15267478 27581826 12021480 25890400 13643093 22475172 13528955 24971777 8138656 11712349 7900182 14419711 7559529 14235752 8802808 17632053 9146913 14449040 8201129 15278402 9735128 17095181 8066241 17559370 10346191 18054797 10951608 18880993 9400701 20113195 11257594 19512216 +172 153 398 57 255 227 461 85 164 88 172 67 311 175 416 168 422 220 266 83 226 141 274 204 531 141 170 81 185 134 204 127 230 81 366 126 247 90 220 79 +22519 23349 20057 15027 7663 8682 7926 4822 7888 5306 5568 11610 16116 14676 25144 22859 19349 19229 13976 15890 19375 17703 18240 24718 19098 14180 10077 6755 6871 7386 5470 11665 13476 10395 9104 6200 7097 7933 11388 12646 +10316020 16644958 7609872 13116920 9904088 27599207 16291509 29022214 15087987 26252971 11863715 25050328 13714480 22028373 13833970 24877931 8430096 12191018 8060228 14530805 7866091 14469269 9049320 18092784 9406767 14453316 8253994 14776745 9667746 16523289 8004707 17222519 10432395 17610196 10910939 18129092 9347444 19393836 11267672 19118469 +194 101 229 68 31 5 13 1 14 2 11 12 73 26 144 48 200 119 133 64 215 78 172 98 173 50 76 10 27 10 23 24 87 19 37 6 30 9 55 25 +1449 703 1206 1444 628 1549 1035 1398 1298 1405 1254 1670 1256 1657 1634 1530 1668 1789 1761 1817 1857 1931 1961 2096 2129 2104 2256 2299 2344 2425 2561 2549 2622 2730 2793 2900 2938 3081 3137 3234 +10095537 15689491 7450732 12418652 9672901 26038488 15911275 27366841 14744519 24764176 11599611 23649970 13404226 20808526 13530396 23479315 8262290 11569498 7904033 13770624 7717196 13719783 8873546 17136034 9226364 13715416 8105615 14031421 9486324 15680917 7870348 16345814 10238992 16721351 10709961 17219559 9189202 18419542 11066582 18170096 +1 0 1 0 0 0 0 0 0 0 0 0 0 0 0 0 2 1 2 0 2 1 2 0 2 1 3 1 3 1 5 1 3 1 3 1 5 1 4 1 +1450 669 1179 1404 628 1541 1001 1390 1272 1428 1263 1650 1258 1662 1619 1515 1636 1769 1778 1828 1888 1935 1969 2102 2120 2124 2238 2291 2347 2428 2539 2582 2641 2716 2801 2902 2953 3063 3109 3221 +9880583 14789261 7294874 11759821 9447485 24570917 15539663 25810295 14408961 23366118 11342330 22332401 13101768 19662174 13234017 22163691 8097855 10984039 7752171 13056727 7572810 13015511 8702364 16237055 9050235 13023017 7960480 13330323 9309508 14889270 7738780 15523736 10050902 15884975 10514204 16364719 9035294 17502597 10869796 17277824 +1 0 1 0 0 0 0 0 0 0 0 0 0 0 0 0 2 1 2 1 3 1 2 0 2 1 3 1 3 1 5 1 3 1 3 1 5 1 4 1 +1503 807 1164 1442 596 1575 1014 1398 1274 1431 1249 1648 1242 1659 1625 1548 1684 1776 1812 1848 1889 1921 1954 2070 2112 2121 2239 2276 2325 2431 2515 2560 2645 2748 2808 2923 2958 3077 3135 3243 +9672350 13951521 7142524 11142853 9226879 23193486 15177660 24347629 14081831 22052125 11091114 21093760 12806452 18584416 12945190 20929028 7938752 10434136 7604969 12386891 7432054 12352633 8535072 15390046 8878298 12371976 7818993 12670367 9136544 14145304 7609883 14749629 9867610 15100745 10323513 15562457 8885356 16641527 10678587 16440438 +1 0 1 0 0 0 0 0 0 0 0 0 0 0 0 0 2 1 2 1 3 1 2 0 2 1 4 1 3 1 5 1 3 1 4 1 5 1 4 2 +1384 690 1153 1469 647 1547 1012 1372 1244 1413 1264 1648 1251 1664 1623 1551 1658 1780 1749 1840 1870 1916 1972 2104 2121 2106 2270 2319 2347 2411 2523 2568 2624 2730 2784 2903 2944 3076 3134 3243 +9466273 13156855 6993696 10564560 9013083 21896977 14824643 22971122 13762100 20815862 10846553 19929434 12518738 17571628 12663522 19768626 7782956 9917472 7459832 11756752 7294326 11729219 8372417 14595945 8710883 11759075 7681831 12052649 8968460 13444746 7484408 14022458 9688357 14362461 10136969 14807100 8738803 15832057 10492126 15653293 +1 0 1 1 0 0 0 0 0 0 0 0 0 0 0 0 2 1 2 1 3 1 2 1 3 1 4 2 3 1 5 1 3 1 3 2 5 1 4 2 \ No newline at end of file diff --git a/python/tflite_micro/signal/ops/testdata/stacker_test1.txt b/python/tflite_micro/signal/ops/testdata/stacker_test1.txt new file mode 100644 index 00000000000..f73e760ec9a --- /dev/null +++ b/python/tflite_micro/signal/ops/testdata/stacker_test1.txt @@ -0,0 +1,37 @@ +32 1 1 2 +12506 12432 11820 11836 11582 12103 12861 12155 9956 9487 10192 9487 9554 9418 9618 9586 10818 11671 11127 9796 10078 9554 9147 9930 8872 9345 9229 9521 10170 10743 12174 13137 +0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 +0 +13525 13251 14033 13802 12731 11935 11535 11477 10318 10338 10235 10078 10544 8921 8769 9930 12320 12474 10695 9649 10454 10214 9586 9487 9229 9105 9147 9851 9586 9709 12049 12489 +12506 12432 11820 11836 11582 12103 12861 12155 9956 9487 10192 9487 9554 9418 9618 9586 10818 11671 11127 9796 10078 9554 9147 9930 8872 9345 9229 9521 10170 10743 12174 13137 12506 12432 11820 11836 11582 12103 12861 12155 9956 9487 10192 9487 9554 9418 9618 9586 10818 11671 11127 9796 10078 9554 9147 9930 8872 9345 9229 9521 10170 10743 12174 13137 13525 13251 14033 13802 12731 11935 11535 11477 10318 10338 10235 10078 10544 8921 8769 9930 12320 12474 10695 9649 10454 10214 9586 9487 9229 9105 9147 9851 9586 9709 12049 12489 +1 +16255 16129 15423 15542 15067 13721 12827 12103 10277 11234 11000 9521 9269 9061 9618 9487 9981 10170 10743 9382 9453 9554 9586 8969 8921 9418 9345 9824 9016 8540 7660 8120 +0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 +0 +14978 14718 15996 16471 14834 13198 11437 11714 10727 10170 9709 10277 9487 7763 8412 10192 11234 10833 9680 10031 10078 9709 9147 9739 9521 9709 9189 9956 10192 10214 11437 11722 +13525 13251 14033 13802 12731 11935 11535 11477 10318 10338 10235 10078 10544 8921 8769 9930 12320 12474 10695 9649 10454 10214 9586 9487 9229 9105 9147 9851 9586 9709 12049 12489 16255 16129 15423 15542 15067 13721 12827 12103 10277 11234 11000 9521 9269 9061 9618 9487 9981 10170 10743 9382 9453 9554 9586 8969 8921 9418 9345 9824 9016 8540 7660 8120 14978 14718 15996 16471 14834 13198 11437 11714 10727 10170 9709 10277 9487 7763 8412 10192 11234 10833 9680 10031 10078 9709 9147 9739 9521 9709 9189 9956 10192 10214 11437 11722 +1 +7431 15310 15976 15751 14425 12331 11078 10818 11103 10743 11312 10435 10663 8344 7763 10078 10192 8769 10125 9554 9345 9189 9649 10192 9382 9418 9796 10277 10256 10630 12667 13311 +0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 +0 +13645 13949 14250 13755 12722 12312 12168 11671 11234 9739 9487 10397 10214 8715 10804 13327 14423 13858 12485 11301 9269 9586 9061 8659 9618 8821 9824 9487 10055 9930 8769 4436 +14978 14718 15996 16471 14834 13198 11437 11714 10727 10170 9709 10277 9487 7763 8412 10192 11234 10833 9680 10031 10078 9709 9147 9739 9521 9709 9189 9956 10192 10214 11437 11722 7431 15310 15976 15751 14425 12331 11078 10818 11103 10743 11312 10435 10663 8344 7763 10078 10192 8769 10125 9554 9345 9189 9649 10192 9382 9418 9796 10277 10256 10630 12667 13311 13645 13949 14250 13755 12722 12312 12168 11671 11234 9739 9487 10397 10214 8715 10804 13327 14423 13858 12485 11301 9269 9586 9061 8659 9618 8821 9824 9487 10055 9930 8769 4436 +1 +13961 12810 12109 12877 13970 13178 11407 11544 10416 10562 9453 10527 9189 9061 9930 13769 14814 13232 11636 8921 9586 8715 8344 9768 9956 9904 10214 8969 9586 10078 7550 4436 +0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 +0 +13370 12897 14321 14367 13313 12447 12224 12035 11222 10727 9147 9105 9487 8477 10147 9269 11052 11365 8821 9229 10031 9521 9824 9016 8769 9147 9061 8715 9680 9680 7011 3327 +13645 13949 14250 13755 12722 12312 12168 11671 11234 9739 9487 10397 10214 8715 10804 13327 14423 13858 12485 11301 9269 9586 9061 8659 9618 8821 9824 9487 10055 9930 8769 4436 13961 12810 12109 12877 13970 13178 11407 11544 10416 10562 9453 10527 9189 9061 9930 13769 14814 13232 11636 8921 9586 8715 8344 9768 9956 9904 10214 8969 9586 10078 7550 4436 13370 12897 14321 14367 13313 12447 12224 12035 11222 10727 9147 9105 9487 8477 10147 9269 11052 11365 8821 9229 10031 9521 9824 9016 8769 9147 9061 8715 9680 9680 7011 3327 +1 +12980 14116 15453 15029 10509 11164 12277 12485 11301 10102 11164 9956 9796 9904 9956 10544 10960 9680 10055 10630 8921 6441 7660 8821 9105 9229 8198 8477 7952 7763 3327 3327 +0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 +0 +13123 14211 14590 11722 12620 12136 12600 12536 11187 10679 11211 9147 8412 7952 8921 5085 5902 9709 9382 8715 8412 7431 8273 8344 9768 9418 8540 9105 8412 7164 3327 3327 +13370 12897 14321 14367 13313 12447 12224 12035 11222 10727 9147 9105 9487 8477 10147 9269 11052 11365 8821 9229 10031 9521 9824 9016 8769 9147 9061 8715 9680 9680 7011 3327 12980 14116 15453 15029 10509 11164 12277 12485 11301 10102 11164 9956 9796 9904 9956 10544 10960 9680 10055 10630 8921 6441 7660 8821 9105 9229 8198 8477 7952 7763 3327 3327 13123 14211 14590 11722 12620 12136 12600 12536 11187 10679 11211 9147 8412 7952 8921 5085 5902 9709 9382 8715 8412 7431 8273 8344 9768 9418 8540 9105 8412 7164 3327 3327 +1 +13508 13882 14759 14505 13590 12676 12116 12754 11090 8477 10214 9105 8038 8659 8921 4436 6194 9418 9418 9418 8120 8198 9851 9229 9586 8659 8969 8659 8659 8344 6654 3327 +0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 +0 +11772 10491 13678 14922 13327 13225 12700 12414 11014 11199 9709 8969 8477 8412 9307 5085 4436 5085 9521 9824 8120 8821 8600 9796 8344 7431 7860 6654 9382 8120 3327 3327 +13123 14211 14590 11722 12620 12136 12600 12536 11187 10679 11211 9147 8412 7952 8921 5085 5902 9709 9382 8715 8412 7431 8273 8344 9768 9418 8540 9105 8412 7164 3327 3327 13508 13882 14759 14505 13590 12676 12116 12754 11090 8477 10214 9105 8038 8659 8921 4436 6194 9418 9418 9418 8120 8198 9851 9229 9586 8659 8969 8659 8659 8344 6654 3327 11772 10491 13678 14922 13327 13225 12700 12414 11014 11199 9709 8969 8477 8412 9307 5085 4436 5085 9521 9824 8120 8821 8600 9796 8344 7431 7860 6654 9382 8120 3327 3327 +1 \ No newline at end of file diff --git a/python/tflite_micro/signal/tflm_signal.bzl b/python/tflite_micro/signal/tflm_signal.bzl index ff86a7b0f49..1635b48b6ea 100644 --- a/python/tflite_micro/signal/tflm_signal.bzl +++ b/python/tflite_micro/signal/tflm_signal.bzl @@ -1,5 +1,7 @@ """Build rule for wrapping a custom TF OP from .cc to python.""" +load("@rules_python//python:defs.bzl", "py_library") + # TODO(b/286890280): refactor to be more generic build target for any custom OP def py_tflm_signal_library( name, @@ -23,7 +25,7 @@ def py_tflm_signal_library( srcs: Python source files for the Python library. deps: Dependencies for the Python library. visibility: Visibility for the Python library. - cc_op_defs: A list of c++ src files containing REGISTER_OP definitions. + cc_op_defs: A list of c++ libraries containing REGISTER_OP definitions. cc_op_kernels: A list of c++ targets containing kernels that are used by the Python library. """ @@ -37,12 +39,12 @@ def py_tflm_signal_library( library_name = name + "_cc" native.cc_library( name = library_name, - srcs = cc_op_defs, copts = select({ "//conditions:default": ["-pthread"], }), alwayslink = 1, deps = + cc_op_defs + cc_op_kernels + ["@tensorflow_cc_deps//:cc_library"] + select({"//conditions:default": []}), @@ -61,7 +63,7 @@ def py_tflm_signal_library( ] + select({"//conditions:default": []}), ) - native.py_library( + py_library( name = name, srcs = srcs, srcs_version = "PY2AND3", diff --git a/python/tflite_micro/signal/utils/BUILD b/python/tflite_micro/signal/utils/BUILD index cc05bab6e7f..58b7dcfd4b2 100644 --- a/python/tflite_micro/signal/utils/BUILD +++ b/python/tflite_micro/signal/utils/BUILD @@ -1,5 +1,7 @@ # Signal python utilities. +load("@rules_python//python:defs.bzl", "py_library", "py_test") load("@tflm_pip_deps//:requirements.bzl", "requirement") +load("@pybind11_bazel//:build_defs.bzl", "pybind_extension") package( default_visibility = [ @@ -8,12 +10,66 @@ package( licenses = ["notice"], ) +py_test( + name = "freq_to_mel_test", + srcs = ["freq_to_mel_test.py"], + data = [ + ":freq_to_mel_wrapper.so", + ], + python_version = "PY3", + srcs_version = "PY3", +) + +py_library( + name = "freq_to_mel", + data = [ + ":freq_to_mel_wrapper.so", + ], +) + +pybind_extension( + name = "freq_to_mel_wrapper", # :freq_to_mel_wrapper.so + srcs = [ + "freq_to_mel_wrapper.cc", + ], +) + py_library( name = "util", srcs = ["util.py"], + visibility = ["//visibility:public"], deps = [ + requirement("tensorflow"), "//python/tflite_micro:runtime", - "//python/tflite_micro/signal:ops", - requirement("tensorflow-cpu"), ], ) + +pybind_extension( + name = "wide_dynamic_func_lut_wrapper", # :wide_dynamic_func_lut_wrapper.so + srcs = [ + "wide_dynamic_func_lut_wrapper.cc", + ], +) + +py_library( + name = "wide_dynamic_func_lut", + data = [ + ":wide_dynamic_func_lut_wrapper.so", + ], +) + +py_test( + name = "wide_dynamic_func_lut_test", + srcs = ["wide_dynamic_func_lut_test.py"], + data = [ + ":wide_dynamic_func_lut_wrapper.so", + ], + python_version = "PY3", + srcs_version = "PY3", + tags = [ + "noasan", + "nomsan", + "noubsan", + ], + visibility = ["//visibility:public"], +) diff --git a/python/tflite_micro/signal/utils/freq_to_mel_test.py b/python/tflite_micro/signal/utils/freq_to_mel_test.py new file mode 100644 index 00000000000..5d81ffb3ead --- /dev/null +++ b/python/tflite_micro/signal/utils/freq_to_mel_test.py @@ -0,0 +1,155 @@ +# Copyright 2023 The TensorFlow Authors. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# ============================================================================== +"""Tests for audio_frontend.python.utils.freq_to_mel.""" +import unittest +from tflite_micro.python.tflite_micro.signal.utils import freq_to_mel_wrapper + +# This table was generated by C the implementation of Freq2Mel() in Speech +# Micro. The purpose of this test is to catch changes in clang/GCC's C +# implementation and update our unit tests accordingly. +# The table consists of mapping of frequencies in the range 125 Hz - 3850 Hz, +# which are typically used in Mel filter banks. +truth_table = [[125.00000000000000000000, 185.16954040527343750000], + [156.25000000000000000000, 227.07028198242187500000], + [187.50000000000000000000, 267.46884155273437500000], + [218.75000000000000000000, 306.46929931640625000000], + [250.00000000000000000000, 344.16513061523437500000], + [281.25000000000000000000, 380.64077758789062500000], + [312.50000000000000000000, 415.97283935546875000000], + [343.75000000000000000000, 450.23080444335937500000], + [375.00000000000000000000, 483.47805786132812500000], + [406.25000000000000000000, 515.77252197265625000000], + [437.50000000000000000000, 547.16729736328125000000], + [468.75000000000000000000, 577.71118164062500000000], + [500.00000000000000000000, 607.44909667968750000000], + [531.25000000000000000000, 636.42242431640625000000], + [562.50000000000000000000, 664.66949462890625000000], + [593.75000000000000000000, 692.22583007812500000000], + [625.00000000000000000000, 719.12451171875000000000], + [656.25000000000000000000, 745.39605712890625000000], + [687.50000000000000000000, 771.06921386718750000000], + [718.75000000000000000000, 796.17047119140625000000], + [750.00000000000000000000, 820.72473144531250000000], + [781.25000000000000000000, 844.75561523437500000000], + [812.50000000000000000000, 868.28460693359375000000], + [843.75000000000000000000, 891.33245849609375000000], + [875.00000000000000000000, 913.91833496093750000000], + [906.25000000000000000000, 936.06048583984375000000], + [937.50000000000000000000, 957.77606201171875000000], + [968.75000000000000000000, 979.08093261718750000000], + [1000.00000000000000000000, 999.99072265625000000000], + [1031.25000000000000000000, 1020.51953125000000000000], + [1062.50000000000000000000, 1040.68103027343750000000], + [1093.75000000000000000000, 1060.48828125000000000000], + [1125.00000000000000000000, 1079.95336914062500000000], + [1156.25000000000000000000, 1099.08789062500000000000], + [1187.50000000000000000000, 1117.90295410156250000000], + [1218.75000000000000000000, 1136.40930175781250000000], + [1250.00000000000000000000, 1154.61633300781250000000], + [1281.25000000000000000000, 1172.53405761718750000000], + [1312.50000000000000000000, 1190.17138671875000000000], + [1343.75000000000000000000, 1207.53686523437500000000], + [1375.00000000000000000000, 1224.63879394531250000000], + [1406.25000000000000000000, 1241.48522949218750000000], + [1437.50000000000000000000, 1258.08349609375000000000], + [1468.75000000000000000000, 1274.44091796875000000000], + [1500.00000000000000000000, 1290.56408691406250000000], + [1531.25000000000000000000, 1306.45996093750000000000], + [1562.50000000000000000000, 1322.13476562500000000000], + [1593.75000000000000000000, 1337.59448242187500000000], + [1625.00000000000000000000, 1352.84509277343750000000], + [1656.25000000000000000000, 1367.89208984375000000000], + [1687.50000000000000000000, 1382.74084472656250000000], + [1718.75000000000000000000, 1397.39648437500000000000], + [1750.00000000000000000000, 1411.86389160156250000000], + [1781.25000000000000000000, 1426.14807128906250000000], + [1812.50000000000000000000, 1440.25317382812500000000], + [1843.75000000000000000000, 1454.18420410156250000000], + [1875.00000000000000000000, 1467.94506835937500000000], + [1906.25000000000000000000, 1481.53991699218750000000], + [1937.50000000000000000000, 1494.97265625000000000000], + [1968.75000000000000000000, 1508.24731445312500000000], + [2000.00000000000000000000, 1521.36743164062500000000], + [2031.25000000000000000000, 1534.33654785156250000000], + [2062.50000000000000000000, 1547.15795898437500000000], + [2093.75000000000000000000, 1559.83532714843750000000], + [2125.00000000000000000000, 1572.37158203125000000000], + [2156.25000000000000000000, 1584.77001953125000000000], + [2187.50000000000000000000, 1597.03332519531250000000], + [2218.75000000000000000000, 1609.16491699218750000000], + [2250.00000000000000000000, 1621.16711425781250000000], + [2281.25000000000000000000, 1633.04284667968750000000], + [2312.50000000000000000000, 1644.79479980468750000000], + [2343.75000000000000000000, 1656.42553710937500000000], + [2375.00000000000000000000, 1667.93725585937500000000], + [2406.25000000000000000000, 1679.33276367187500000000], + [2437.50000000000000000000, 1690.61401367187500000000], + [2468.75000000000000000000, 1701.78369140625000000000], + [2500.00000000000000000000, 1712.84350585937500000000], + [2531.25000000000000000000, 1723.79614257812500000000], + [2562.50000000000000000000, 1734.64318847656250000000], + [2593.75000000000000000000, 1745.38671875000000000000], + [2625.00000000000000000000, 1756.02893066406250000000], + [2656.25000000000000000000, 1766.57165527343750000000], + [2687.50000000000000000000, 1777.01647949218750000000], + [2718.75000000000000000000, 1787.36547851562500000000], + [2750.00000000000000000000, 1797.62036132812500000000], + [2781.25000000000000000000, 1807.78283691406250000000], + [2781.25000000000000000000, 1807.78283691406250000000], + [2812.50000000000000000000, 1817.85437011718750000000], + [2843.75000000000000000000, 1827.83666992187500000000], + [2875.00000000000000000000, 1837.73144531250000000000], + [2906.25000000000000000000, 1847.54003906250000000000], + [2937.50000000000000000000, 1857.26403808593750000000], + [2968.75000000000000000000, 1866.90466308593750000000], + [3000.00000000000000000000, 1876.46374511718750000000], + [3031.25000000000000000000, 1885.94238281250000000000], + [3062.50000000000000000000, 1895.34204101562500000000], + [3093.75000000000000000000, 1904.66369628906250000000], + [3125.00000000000000000000, 1913.90905761718750000000], + [3156.25000000000000000000, 1923.07922363281250000000], + [3187.50000000000000000000, 1932.17517089843750000000], + [3218.75000000000000000000, 1941.19848632812500000000], + [3250.00000000000000000000, 1950.15014648437500000000], + [3281.25000000000000000000, 1959.03112792968750000000], + [3312.50000000000000000000, 1967.84277343750000000000], + [3343.75000000000000000000, 1976.58605957031250000000], + [3375.00000000000000000000, 1985.26196289062500000000], + [3406.25000000000000000000, 1993.87158203125000000000], + [3437.50000000000000000000, 2002.41601562500000000000], + [3468.75000000000000000000, 2010.89611816406250000000], + [3500.00000000000000000000, 2019.31298828125000000000], + [3531.25000000000000000000, 2027.66723632812500000000], + [3562.50000000000000000000, 2035.96020507812500000000], + [3593.75000000000000000000, 2044.19250488281250000000], + [3625.00000000000000000000, 2052.36523437500000000000], + [3656.25000000000000000000, 2060.47900390625000000000], + [3687.50000000000000000000, 2068.53466796875000000000], + [3718.75000000000000000000, 2076.53344726562500000000], + [3750.00000000000000000000, 2084.47558593750000000000], + [3781.25000000000000000000, 2092.36230468750000000000], + [3812.50000000000000000000, 2100.19409179687500000000]] + + +class Freq2MelTest(unittest.TestCase): + + def testFreq2Mel(self): + for entry in truth_table: + mel = freq_to_mel_wrapper.freq_to_mel(entry[0]) + self.assertLess(abs(mel - float(entry[1])), 0.00025) + + +if __name__ == '__main__': + unittest.main() diff --git a/python/tflite_micro/signal/utils/freq_to_mel_wrapper.cc b/python/tflite_micro/signal/utils/freq_to_mel_wrapper.cc new file mode 100644 index 00000000000..6a78508d488 --- /dev/null +++ b/python/tflite_micro/signal/utils/freq_to_mel_wrapper.cc @@ -0,0 +1,42 @@ +/* Copyright 2023 The TensorFlow Authors. All Rights Reserved. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +==============================================================================*/ + +#include +#include +#include + +namespace py = pybind11; + +namespace tflite { + +// Convert a `freq` in Hz to its value on the Mel scale. +// See: https://en.wikipedia.org/wiki/Mel_scale +// This function is only intended to be used wrapped as the python freq_to_mel +// Why can't we just implement it in Python/numpy? +// The original "Speech Micro" code is written in C and uses 32-bit 'float' +// C types. Python's builtin floating point type is 64-bit wide, which results +// in small differences in the output of the Python and C log() functions. +// A Py wrapper is used in order to establish bit exactness with "Speech Micro", +// while recognizing the slight loss in precision. +float FreqToMel(float freq) { return 1127.0f * log1pf(freq / 700.0f); } + +} // namespace tflite + +PYBIND11_MODULE(freq_to_mel_wrapper, m) { + m.doc() = "freq_to_mel_wrapper"; + m.def("freq_to_mel", &tflite::FreqToMel, + "Convert a `freq` in Hz to its value on the Mel scale.", + py::arg("freq")); +} diff --git a/python/tflite_micro/signal/utils/wide_dynamic_func_lut_test.py b/python/tflite_micro/signal/utils/wide_dynamic_func_lut_test.py new file mode 100644 index 00000000000..5cbcdd5c334 --- /dev/null +++ b/python/tflite_micro/signal/utils/wide_dynamic_func_lut_test.py @@ -0,0 +1,157 @@ +# Copyright 2023 The TensorFlow Authors. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# ============================================================================== +"""Tests for signal.python.utils.wide_dynamic_func_lut""" +import unittest +from tflite_micro.python.tflite_micro.signal.utils import wide_dynamic_func_lut_wrapper + + +class WideDynamicFuncLutTest(unittest.TestCase): + + def testWideDynamicFuncLut(self): + self.maxDiff = None + expected_lut = [ + 32636, + 32633, + 32630, + -6, + 0, + 0, + 32624, + -12, + 0, + 0, + 32612, + -23, + -2, + 0, + 32587, + -48, + 0, + 0, + 32539, + -96, + 0, + 0, + 32443, + -190, + 0, + 0, + 32253, + -378, + 4, + 0, + 31879, + -739, + 18, + 0, + 31158, + -1409, + 62, + 0, + 29811, + -2567, + 202, + 0, + 27446, + -4301, + 562, + 0, + 23707, + -6265, + 1230, + 0, + 18672, + -7458, + 1952, + 0, + 13166, + -7030, + 2212, + 0, + 8348, + -5342, + 1868, + 0, + 4874, + -3459, + 1282, + 0, + 2697, + -2025, + 774, + 0, + 1446, + -1120, + 436, + 0, + 762, + -596, + 232, + 0, + 398, + -313, + 122, + 0, + 207, + -164, + 64, + 0, + 107, + -85, + 34, + 0, + 56, + -45, + 18, + 0, + 29, + -22, + 8, + 0, + 15, + -13, + 6, + 0, + 8, + -8, + 4, + 0, + 4, + -2, + 0, + 0, + 2, + -3, + 2, + 0, + 1, + 0, + 0, + 0, + 1, + -3, + 2, + 0, + 0, + 0, + 0, + ] + lut = wide_dynamic_func_lut_wrapper.wide_dynamic_func_lut( + 0.95, 80.0, 7, 21) + self.assertEqual(lut, expected_lut) + + +if __name__ == '__main__': + unittest.main() diff --git a/python/tflite_micro/signal/utils/wide_dynamic_func_lut_wrapper.cc b/python/tflite_micro/signal/utils/wide_dynamic_func_lut_wrapper.cc new file mode 100644 index 00000000000..4ecf1618466 --- /dev/null +++ b/python/tflite_micro/signal/utils/wide_dynamic_func_lut_wrapper.cc @@ -0,0 +1,96 @@ +/* Copyright 2023 The TensorFlow Authors. All Rights Reserved. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +==============================================================================*/ + +#include +#include + +#include +#include +#include + +namespace py = pybind11; + +constexpr size_t kWideDynamicFunctionBits = 32; +constexpr size_t kWideDynamicFunctionLUTSize = + (4 * kWideDynamicFunctionBits - 3); + +int16_t PcanGainLookupFunction(const float strength, const float offset, + const int gain_bits, int32_t input_bits, + uint32_t x) { + const float x_as_float = + static_cast(x) / (static_cast(1) << input_bits); + const float gain_as_float = (static_cast(1) << gain_bits) * + powf(x_as_float + offset, -strength); + + if (gain_as_float > std::numeric_limits::max()) { + return std::numeric_limits::max(); + } + return static_cast(gain_as_float + 0.5f); +} + +py::list WideDynamicFuncLut(float strength, float offset, int input_bits, + int gain_bits) { + // Avoid accessing outside of the buffer below gain_lut[4 * interval + 3]. + int16_t gain_lut_storage[kWideDynamicFunctionLUTSize + 1]; + int16_t* gain_lut = gain_lut_storage; + + gain_lut[0] = + PcanGainLookupFunction(strength, offset, gain_bits, input_bits, 0); + gain_lut[1] = + PcanGainLookupFunction(strength, offset, gain_bits, input_bits, 1); + // This puts the pointer outside of the buffer making the calculation in the + // loop below a lot simpler. + gain_lut -= 6; + + for (size_t interval = 2; interval <= kWideDynamicFunctionBits; ++interval) { + const uint32_t x0 = static_cast(1) << (interval - 1); + const uint32_t x1 = x0 + (x0 >> 1); + const uint32_t x2 = + (interval == kWideDynamicFunctionBits) ? x0 + (x0 - 1) : 2 * x0; + + const int16_t y0 = + PcanGainLookupFunction(strength, offset, gain_bits, input_bits, x0); + const int16_t y1 = + PcanGainLookupFunction(strength, offset, gain_bits, input_bits, x1); + const int16_t y2 = + PcanGainLookupFunction(strength, offset, gain_bits, input_bits, x2); + + const int32_t diff1 = static_cast(y1 - y0); + const int32_t diff2 = static_cast(y2 - y0); + const int32_t a1 = 4 * diff1 - diff2; + const int32_t a2 = diff2 - a1; + + gain_lut[4 * interval] = y0; + gain_lut[4 * interval + 1] = static_cast(a1); + gain_lut[4 * interval + 2] = static_cast(a2); + gain_lut[4 * interval + 3] = 0; + } + // Brings the pointer back to the start of the buffer post calculation for the + // lut + gain_lut += 6; + + py::list lut_list = py::list(); + for (size_t i = 0; i < kWideDynamicFunctionLUTSize; i++) { + lut_list.append(gain_lut[i]); + } + + return lut_list; +} + +PYBIND11_MODULE(wide_dynamic_func_lut_wrapper, m) { + m.doc() = "wide_dynamic_func_lut"; + m.def("wide_dynamic_func_lut", &WideDynamicFuncLut, py::arg("strength"), + py::arg("offset"), py::arg("input_bits"), py::arg("gain_bits")); +} diff --git a/python/tflite_micro/sine_float.tflite b/python/tflite_micro/sine_float.tflite new file mode 100644 index 00000000000..f741b3a7b6b Binary files /dev/null and b/python/tflite_micro/sine_float.tflite differ diff --git a/python/tflite_micro/whl_test.sh b/python/tflite_micro/whl_test.sh new file mode 100755 index 00000000000..f3e829314ec --- /dev/null +++ b/python/tflite_micro/whl_test.sh @@ -0,0 +1,43 @@ +#!/usr/bin/sh + +# Copyright 2023 The TensorFlow Authors. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +# Install the given tflm-micro.whl in a fresh virtual environment and run its +# embedded, post-installation checks. + +set -e + +WHL="${1}" + +# Create venv for this test. +python3 -m venv pyenv +. pyenv/bin/activate + +# Disable pip's cache for two reasons: 1) the default location in +# $XDG_CACHE_HOME causes errors when pip is run from a bazel sandbox, and 2) it +# makes no sense to relocate the cache within the sandbox since files generated +# in the sandbox are deleted after the run. +export PIP_NO_CACHE_DIR=true + +# Test package installation. +pip install "${WHL}" +pip show --files tflite-micro + +# Run the package's post-installation checks. +python3 << HEREDOC +import sys, tflite_micro +print(tflite_micro.__version__) +sys.exit(0 if tflite_micro.postinstall_check.passed() else 1) +HEREDOC diff --git a/signal/micro/kernels/BUILD b/signal/micro/kernels/BUILD index 7f348dc9936..b7ac658246e 100644 --- a/signal/micro/kernels/BUILD +++ b/signal/micro/kernels/BUILD @@ -3,17 +3,32 @@ load( "micro_copts", ) -package( - licenses = ["notice"], -) +package(licenses = ["notice"]) cc_library( name = "register_signal_ops", srcs = [ + "delay.cc", + "energy.cc", + "fft_auto_scale_common.cc", + "fft_auto_scale_kernel.cc", + "filter_bank.cc", + "filter_bank_log.cc", + "filter_bank_spectral_subtraction.cc", + "filter_bank_square_root.cc", + "filter_bank_square_root_common.cc", + "framer.cc", + "irfft.cc", + "overlap_add.cc", + "pcan.cc", "rfft.cc", + "stacker.cc", "window.cc", ], hdrs = [ + "fft_auto_scale_kernel.h", + "filter_bank_square_root.h", + "irfft.h", "rfft.h", ], copts = micro_copts(), @@ -21,9 +36,20 @@ cc_library( "//tensorflow/lite/micro", ], deps = [ + "//signal/src:circular_buffer", + "//signal/src:energy", + "//signal/src:fft_auto_scale", + "//signal/src:filter_bank", + "//signal/src:filter_bank_log", + "//signal/src:filter_bank_spectral_subtraction", + "//signal/src:filter_bank_square_root", + "//signal/src:irfft", + "//signal/src:overlap_add", + "//signal/src:pcan_argc_fixed", "//signal/src:rfft", "//signal/src:window", "//tensorflow/lite:type_to_tflitetype", + "//tensorflow/lite/c:common", "//tensorflow/lite/kernels:kernel_util", "//tensorflow/lite/kernels/internal:tensor", "//tensorflow/lite/micro:flatbuffer_utils", @@ -87,3 +113,246 @@ cc_test( "//tensorflow/lite/micro/testing:micro_test", ], ) + +cc_library( + name = "framer_flexbuffers_generated_data", + srcs = [ + "framer_flexbuffers_generated_data.cc", + ], + hdrs = [ + "framer_flexbuffers_generated_data.h", + ], +) + +cc_test( + name = "framer_test", + srcs = [ + "framer_test.cc", + ], + deps = [ + ":framer_flexbuffers_generated_data", + ":register_signal_ops", + "//tensorflow/lite/c:common", + "//tensorflow/lite/micro:op_resolvers", + "//tensorflow/lite/micro:test_helpers", + "//tensorflow/lite/micro/kernels:kernel_runner", + "//tensorflow/lite/micro/testing:micro_test", + ], +) + +cc_library( + name = "overlap_add_flexbuffers_generated_data", + srcs = [ + "overlap_add_flexbuffers_generated_data.cc", + ], + hdrs = [ + "overlap_add_flexbuffers_generated_data.h", + ], +) + +cc_test( + name = "overlap_add_test", + srcs = [ + "overlap_add_test.cc", + ], + deps = [ + ":overlap_add_flexbuffers_generated_data", + ":register_signal_ops", + "//tensorflow/lite/c:common", + "//tensorflow/lite/micro:op_resolvers", + "//tensorflow/lite/micro:test_helpers", + "//tensorflow/lite/micro/kernels:kernel_runner", + "//tensorflow/lite/micro/testing:micro_test", + ], +) + +cc_library( + name = "delay_flexbuffers_generated_data", + srcs = [ + "delay_flexbuffers_generated_data.cc", + ], + hdrs = [ + "delay_flexbuffers_generated_data.h", + ], +) + +cc_test( + name = "delay_test", + srcs = [ + "delay_test.cc", + ], + deps = [ + ":delay_flexbuffers_generated_data", + "//tensorflow/lite/c:common", + "//tensorflow/lite/micro:op_resolvers", + "//tensorflow/lite/micro:test_helpers", + "//tensorflow/lite/micro/kernels:kernel_runner", + "//tensorflow/lite/micro/testing:micro_test", + ], +) + +cc_library( + name = "stacker_flexbuffers_generated_data", + srcs = [ + "stacker_flexbuffers_generated_data.cc", + ], + hdrs = [ + "stacker_flexbuffers_generated_data.h", + ], +) + +cc_test( + name = "stacker_test", + srcs = [ + "stacker_test.cc", + ], + deps = [ + ":register_signal_ops", + ":stacker_flexbuffers_generated_data", + "//tensorflow/lite/c:common", + "//tensorflow/lite/micro:op_resolvers", + "//tensorflow/lite/micro:test_helpers", + "//tensorflow/lite/micro/kernels:kernel_runner", + "//tensorflow/lite/micro/testing:micro_test", + ], +) + +cc_library( + name = "energy_flexbuffers_generated_data", + srcs = [ + "energy_flexbuffers_generated_data.cc", + ], + hdrs = [ + "energy_flexbuffers_generated_data.h", + ], +) + +cc_test( + name = "energy_test", + srcs = [ + "energy_test.cc", + ], + deps = [ + ":energy_flexbuffers_generated_data", + ":register_signal_ops", + "//tensorflow/lite/c:common", + "//tensorflow/lite/micro:op_resolvers", + "//tensorflow/lite/micro:test_helpers", + "//tensorflow/lite/micro/kernels:kernel_runner", + "//tensorflow/lite/micro/testing:micro_test", + ], +) + +cc_library( + name = "filter_bank_flexbuffers_generated_data", + srcs = [ + "filter_bank_flexbuffers_generated_data.cc", + ], + hdrs = [ + "filter_bank_flexbuffers_generated_data.h", + ], +) + +cc_library( + name = "filter_bank_log_flexbuffers_generated_data", + srcs = [ + "filter_bank_log_flexbuffers_generated_data.cc", + ], + hdrs = [ + "filter_bank_log_flexbuffers_generated_data.h", + ], +) + +cc_library( + name = "filter_bank_spectral_subtraction_flexbuffers_generated_data", + srcs = [ + "filter_bank_spectral_subtraction_flexbuffers_generated_data.cc", + ], + hdrs = [ + "filter_bank_spectral_subtraction_flexbuffers_generated_data.h", + ], +) + +cc_test( + name = "filter_bank_test", + srcs = [ + "filter_bank_test.cc", + ], + deps = [ + ":filter_bank_flexbuffers_generated_data", + "//tensorflow/lite/c:common", + "//tensorflow/lite/micro:op_resolvers", + "//tensorflow/lite/micro:test_helpers", + "//tensorflow/lite/micro/kernels:kernel_runner", + "//tensorflow/lite/micro/testing:micro_test", + ], +) + +cc_test( + name = "filter_bank_log_test", + srcs = [ + "filter_bank_log_test.cc", + ], + deps = [ + ":filter_bank_log_flexbuffers_generated_data", + "//tensorflow/lite/c:common", + "//tensorflow/lite/micro:op_resolvers", + "//tensorflow/lite/micro:test_helpers", + "//tensorflow/lite/micro/kernels:kernel_runner", + "//tensorflow/lite/micro/testing:micro_test", + ], +) + +cc_test( + name = "filter_bank_spectral_subtraction_test", + srcs = [ + "filter_bank_spectral_subtraction_test.cc", + ], + deps = [ + ":filter_bank_spectral_subtraction_flexbuffers_generated_data", + "//tensorflow/lite/c:common", + "//tensorflow/lite/micro:op_resolvers", + "//tensorflow/lite/micro:test_helpers", + "//tensorflow/lite/micro/kernels:kernel_runner", + "//tensorflow/lite/micro/testing:micro_test", + ], +) + +cc_test( + name = "filter_bank_square_root_test", + srcs = [ + "filter_bank_square_root_test.cc", + ], + deps = [ + "//tensorflow/lite/c:common", + "//tensorflow/lite/micro:op_resolvers", + "//tensorflow/lite/micro:test_helpers", + "//tensorflow/lite/micro/kernels:kernel_runner", + "//tensorflow/lite/micro/testing:micro_test", + ], +) + +cc_library( + name = "pcan_flexbuffers_generated_data", + srcs = [ + "pcan_flexbuffers_generated_data.cc", + ], + hdrs = [ + "pcan_flexbuffers_generated_data.h", + ], +) + +cc_test( + name = "pcan_test", + srcs = [ + "pcan_test.cc", + ], + deps = [ + ":pcan_flexbuffers_generated_data", + "//tensorflow/lite/c:common", + "//tensorflow/lite/micro:op_resolvers", + "//tensorflow/lite/micro:test_helpers", + "//tensorflow/lite/micro/kernels:kernel_runner", + "//tensorflow/lite/micro/testing:micro_test", + ], +) diff --git a/signal/micro/kernels/delay.cc b/signal/micro/kernels/delay.cc new file mode 100644 index 00000000000..33ef35eb28b --- /dev/null +++ b/signal/micro/kernels/delay.cc @@ -0,0 +1,154 @@ +/* Copyright 2021 The TensorFlow Authors. All Rights Reserved. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +==============================================================================*/ + +#include + +#include "signal/src/circular_buffer.h" +#include "tensorflow/lite/kernels/internal/tensor_ctypes.h" +#include "tensorflow/lite/kernels/kernel_util.h" +#include "tensorflow/lite/micro/flatbuffer_utils.h" +#include "tensorflow/lite/micro/kernels/kernel_util.h" +#include "tensorflow/lite/micro/memory_helpers.h" +#include "tensorflow/lite/micro/micro_context.h" +#include "tensorflow/lite/micro/micro_utils.h" + +namespace tflite { +namespace { + +constexpr int kInputTensor = 0; +constexpr int kOutputTensor = 0; + +// Indices into the init flexbuffer's vector. +// The parameter's name is in the comment that follows. +// Elements in the vectors are ordered alphabetically by parameter name. +constexpr int kDelayLengthIndex = 0; // 'delay_length' + +struct TFLMSignalFrontendDelayParams { + int32_t frame_size; + int32_t delay_length; + int32_t outer_dims; + + int8_t** state_buffers; + tflm_signal::CircularBuffer** circular_buffers; +}; + +void* DelayInit(TfLiteContext* context, const char* buffer, size_t length) { + auto* params = static_cast( + context->AllocatePersistentBuffer(context, + sizeof(TFLMSignalFrontendDelayParams))); + + if (params == nullptr) { + return nullptr; + } + + FlexbufferWrapper fbw(reinterpret_cast(buffer), length); + params->delay_length = fbw.ElementAsInt32(kDelayLengthIndex); + return params; +} + +TfLiteStatus DelayPrepare(TfLiteContext* context, TfLiteNode* node) { + TF_LITE_ENSURE_EQ(context, NumInputs(node), 1); + TF_LITE_ENSURE_EQ(context, NumOutputs(node), 1); + + MicroContext* micro_context = GetMicroContext(context); + TfLiteTensor* input = + micro_context->AllocateTempInputTensor(node, kInputTensor); + TF_LITE_ENSURE(context, input != nullptr); + TfLiteTensor* output = + micro_context->AllocateTempOutputTensor(node, kOutputTensor); + TF_LITE_ENSURE(context, output != nullptr); + + TF_LITE_ENSURE_TYPES_EQ(context, input->type, kTfLiteInt16); + TF_LITE_ENSURE_TYPES_EQ(context, output->type, kTfLiteInt16); + + auto* params = + reinterpret_cast(node->user_data); + + TF_LITE_ENSURE(context, params != nullptr); + + RuntimeShape input_shape = GetTensorShape(input); + int innermost_dim = input_shape.Dims(input_shape.DimensionsCount() - 1); + params->outer_dims = input_shape.FlatSize() / innermost_dim; + params->frame_size = innermost_dim; + + params->state_buffers = + static_cast(context->AllocatePersistentBuffer( + context, params->outer_dims * sizeof(int8_t*))); + params->circular_buffers = static_cast( + context->AllocatePersistentBuffer( + context, params->outer_dims * sizeof(tflm_signal::CircularBuffer*))); + + for (int i = 0; i < params->outer_dims; i++) { + size_t capacity = params->frame_size + params->delay_length; + + size_t state_size = tflm_signal::CircularBufferGetNeededMemory(capacity); + params->state_buffers[i] = + static_cast(context->AllocatePersistentBuffer( + context, state_size * sizeof(int8_t))); + params->circular_buffers[i] = tflm_signal::CircularBufferInit( + capacity, params->state_buffers[i], state_size); + tflm_signal::CircularBufferWriteZeros(params->circular_buffers[i], + params->delay_length); + } + + micro_context->DeallocateTempTfLiteTensor(input); + micro_context->DeallocateTempTfLiteTensor(output); + return kTfLiteOk; +} + +TfLiteStatus DelayEval(TfLiteContext* context, TfLiteNode* node) { + auto* params = + reinterpret_cast(node->user_data); + const TfLiteEvalTensor* input = + micro::GetEvalInput(context, node, kInputTensor); + TfLiteEvalTensor* output = micro::GetEvalOutput(context, node, kOutputTensor); + + const int16_t* input_data = micro::GetTensorData(input); + int16_t* output_data = micro::GetTensorData(output); + + for (int dim_index = 0, sample_index = 0; dim_index < params->outer_dims; + dim_index++, sample_index += params->frame_size) { + tflm_signal::CircularBufferWrite(params->circular_buffers[dim_index], + &input_data[sample_index], + params->frame_size); + tflm_signal::CircularBufferGet(params->circular_buffers[dim_index], + params->frame_size, + &output_data[sample_index]); + tflm_signal::CircularBufferDiscard(params->circular_buffers[dim_index], + params->frame_size); + } + return kTfLiteOk; +} + +void DelayReset(TfLiteContext* context, void* buffer) { + auto* params = static_cast(buffer); + for (int i = 0; i < params->outer_dims; ++i) { + tflm_signal::CircularBufferReset(params->circular_buffers[i]); + tflm_signal::CircularBufferWriteZeros(params->circular_buffers[i], + params->delay_length); + } +} + +} // namespace + +namespace tflm_signal { +TFLMRegistration* Register_DELAY() { + static TFLMRegistration r = micro::RegisterOp(DelayInit, DelayPrepare, + DelayEval, nullptr, DelayReset); + return &r; +} +} // namespace tflm_signal + +} // namespace tflite diff --git a/signal/micro/kernels/delay_flexbuffers_generated_data.cc b/signal/micro/kernels/delay_flexbuffers_generated_data.cc new file mode 100644 index 00000000000..756b7ac1738 --- /dev/null +++ b/signal/micro/kernels/delay_flexbuffers_generated_data.cc @@ -0,0 +1,29 @@ +/* Copyright 2021 The TensorFlow Authors. All Rights Reserved. +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +==============================================================================*/ + +// This file is generated. See: +// tensorflow/lite/micro/kernels/test_data_generation/README.md + +#include "signal/micro/kernels/delay_flexbuffers_generated_data.h" + +const int g_gen_data_size_3_delay = 23; +const unsigned char g_gen_data_3_delay[] = { + 0x64, 0x65, 0x6c, 0x61, 0x79, 0x5f, 0x6c, 0x65, 0x6e, 0x67, 0x74, 0x68, + 0x00, 0x01, 0x0e, 0x01, 0x01, 0x01, 0x03, 0x04, 0x02, 0x24, 0x01, +}; +const int g_gen_data_size_5_delay = 23; +const unsigned char g_gen_data_5_delay[] = { + 0x64, 0x65, 0x6c, 0x61, 0x79, 0x5f, 0x6c, 0x65, 0x6e, 0x67, 0x74, 0x68, + 0x00, 0x01, 0x0e, 0x01, 0x01, 0x01, 0x05, 0x04, 0x02, 0x24, 0x01, +}; diff --git a/tensorflow/lite/micro/examples/micro_speech/simple_features/yes_power_spectrum_data.cc b/signal/micro/kernels/delay_flexbuffers_generated_data.h similarity index 57% rename from tensorflow/lite/micro/examples/micro_speech/simple_features/yes_power_spectrum_data.cc rename to signal/micro/kernels/delay_flexbuffers_generated_data.h index 96a7c9ac288..c79273ea1a0 100644 --- a/tensorflow/lite/micro/examples/micro_speech/simple_features/yes_power_spectrum_data.cc +++ b/signal/micro/kernels/delay_flexbuffers_generated_data.h @@ -1,4 +1,4 @@ -/* Copyright 2018 The TensorFlow Authors. All Rights Reserved. +/* Copyright 2020 The TensorFlow Authors. All Rights Reserved. Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. @@ -13,11 +13,13 @@ See the License for the specific language governing permissions and limitations under the License. ==============================================================================*/ -// See the header for documentation on the meaning of this data. +#ifndef SIGNAL_MICRO_KERNELS_DELAY_FLEXBUFFERS_GENERATED_DATA_H_ +#define SIGNAL_MICRO_KERNELS_DELAY_FLEXBUFFERS_GENERATED_DATA_H_ -#include "tensorflow/lite/micro/examples/micro_speech/simple_features/yes_power_spectrum_data.h" +extern const int g_gen_data_size_3_delay; +extern const unsigned char g_gen_data_3_delay[]; -const uint8_t g_yes_power_spectrum_data[g_yes_power_spectrum_data_size] = { - 8, 89, 8, 0, 0, 0, 0, 0, 0, 0, 0, 4, 13, 1, 6, 23, 20, 6, 4, 0, 0, 0, - 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, -}; +extern const int g_gen_data_size_5_delay; +extern const unsigned char g_gen_data_5_delay[]; + +#endif // SIGNAL_MICRO_KERNELS_DELAY_FLEXBUFFERS_GENERATED_DATA_H_ diff --git a/signal/micro/kernels/delay_test.cc b/signal/micro/kernels/delay_test.cc new file mode 100644 index 00000000000..e6fdeb91231 --- /dev/null +++ b/signal/micro/kernels/delay_test.cc @@ -0,0 +1,341 @@ +/* Copyright 2021 The TensorFlow Authors. All Rights Reserved. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +==============================================================================*/ + +#include +#include + +#include "signal/micro/kernels/delay_flexbuffers_generated_data.h" +#include "tensorflow/lite/micro/kernels/kernel_runner.h" +#include "tensorflow/lite/micro/test_helpers.h" +#include "tensorflow/lite/micro/testing/micro_test.h" + +namespace tflite { +namespace { + +constexpr int kInputsSize = 1; +constexpr int kOutputsSize = 1; +constexpr int kTensorsSize = kInputsSize + kOutputsSize; + +class DelayKernelRunner { + public: + DelayKernelRunner(int* input_dims_data, int16_t* input_data, + int* output_dims_data, int16_t* output_data) + : tensors_{testing::CreateTensor( + input_data, testing::IntArrayFromInts(input_dims_data)), + testing::CreateTensor( + output_data, testing::IntArrayFromInts(output_dims_data))}, + inputs_array_{testing::IntArrayFromInts(inputs_array_data_)}, + outputs_array_{testing::IntArrayFromInts(outputs_array_data_)}, + kernel_runner_{*registration_, tensors_, kTensorsSize, + inputs_array_, outputs_array_, nullptr} {} + + micro::KernelRunner& kernel_runner() { return kernel_runner_; } + + private: + int inputs_array_data_[kInputsSize + 1] = {kInputsSize, 0}; + int outputs_array_data_[kOutputsSize + 1] = {kOutputsSize, 1}; + TfLiteTensor tensors_[kTensorsSize] = {}; + TfLiteIntArray* inputs_array_ = nullptr; + TfLiteIntArray* outputs_array_ = nullptr; + + TFLMRegistration* registration_ = tflm_signal::Register_DELAY(); + micro::KernelRunner kernel_runner_; +}; + +void TestDelayInvoke(const int16_t* input_data, int16_t* output_data, + const int16_t* golden, int input_size, int input_num, + micro::KernelRunner* runner, int16_t* input_buffer) { + for (int i = 0; i < input_num; i++) { + memcpy(input_buffer, &input_data[i * input_size], + sizeof(input_data[0]) * input_size); + TF_LITE_MICRO_EXPECT_EQ(runner->Invoke(), kTfLiteOk); + for (int j = 0; j < input_size; ++j) { + TF_LITE_MICRO_EXPECT_EQ(golden[i * input_size + j], output_data[j]); + } + } +} + +void TestDelay(int* input_dims_data, const int16_t* input_data, + int* output_dims_data, int16_t* output_data, + const int16_t* golden, int input_size, int input_num, + const unsigned char* flexbuffers_data, + const unsigned int flexbuffers_data_size, + int16_t* input_buffer) { + DelayKernelRunner delay_runner(input_dims_data, input_buffer, + output_dims_data, output_data); + + // TfLite uses a char* for the raw bytes whereas flexbuffers use an unsigned + // char*. This small discrepancy results in compiler warnings unless we + // reinterpret_cast right before passing in the flexbuffer bytes to the + // KernelRunner. + TF_LITE_MICRO_EXPECT_EQ(delay_runner.kernel_runner().InitAndPrepare( + reinterpret_cast(flexbuffers_data), + flexbuffers_data_size), + kTfLiteOk); + TestDelayInvoke(input_data, output_data, golden, input_size, input_num, + &delay_runner.kernel_runner(), input_buffer); +} +// TestDelayReset() runs a test with the given inputs twice with a reset with +// the main purpose of testing the Delay's Reset functionality. If you just +// want to make sure Delay's Op output matches a set of golden values for an +// input use TestDelay() instead. +void TestDelayReset(int* input_dims_data, const int16_t* input_data, + int* output_dims_data, int16_t* output_data, + const int16_t* golden, int input_size, int input_num, + const unsigned char* flexbuffers_data, + const unsigned int flexbuffers_data_size, + int16_t* input_buffer) { + DelayKernelRunner delay_runner(input_dims_data, input_buffer, + output_dims_data, output_data); + + // TfLite uses a char* for the raw bytes whereas flexbuffers use an unsigned + // char*. This small discrepancy results in compiler warnings unless we + // reinterpret_cast right before passing in the flexbuffer bytes to the + // KernelRunner. + TF_LITE_MICRO_EXPECT_EQ(delay_runner.kernel_runner().InitAndPrepare( + reinterpret_cast(flexbuffers_data), + flexbuffers_data_size), + kTfLiteOk); + TestDelayInvoke(input_data, output_data, golden, input_size, input_num, + &delay_runner.kernel_runner(), input_buffer); + delay_runner.kernel_runner().Reset(); + TestDelayInvoke(input_data, output_data, golden, input_size, input_num, + &delay_runner.kernel_runner(), input_buffer); +} + +} // namespace +} // namespace tflite + +TF_LITE_MICRO_TESTS_BEGIN + +TF_LITE_MICRO_TEST(DelayTestSingleDimDelayLessThanFrameSize) { + const int kInputSize = 8; + const int kInputNum = 2; + int input_shape[] = {1, kInputSize}; + int output_shape[] = {1, kInputSize}; + // The buffer that gets passed to the model. + int16_t input_buffer[kInputSize]; + // The input data. Gets copied to input_buffer kInputNum times. + const int16_t input[kInputNum * kInputSize] = { + 0x1, 0x2, 0x3, 0x4, 0x5, 0x6, 0x7, 0x8, + 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, + }; + int16_t output[kInputNum * kInputSize] = {0}; + const int16_t golden[kInputNum * kInputSize] = {0x0, 0x0, 0x0, 0x1, 0x2, 0x3, + 0x4, 0x5, 0x6, 0x7, 0x8, 0x0, + 0x0, 0x0, 0x0, 0x0}; + tflite::TestDelay(input_shape, input, output_shape, output, golden, + kInputSize, kInputNum, g_gen_data_3_delay, + g_gen_data_size_3_delay, input_buffer); +} + +TF_LITE_MICRO_TEST(DelayTestSingleDimDelayGreaterThanFrameSize) { + const int kInputSize = 3; + const int kInputNum = 3; + int input_shape[] = {1, kInputSize}; + int output_shape[] = {1, kInputSize}; + // The buffer that gets passed to the model. + int16_t input_buffer[kInputSize]; + // The input data. Gets copied to input_buffer kInputNum times. + const int16_t input[kInputNum * kInputSize] = { + 0x1, 0x2, 0x3, 0x4, 0x0, 0x0, 0x0, 0x0, 0x0, + }; + int16_t output[kInputNum * kInputSize] = {0}; + const int16_t golden[kInputNum * kInputSize] = { + 0x0, 0x0, 0x0, 0x0, 0x0, 0x1, 0x2, 0x3, 0x4, + }; + tflite::TestDelay(input_shape, input, output_shape, output, golden, + kInputSize, kInputNum, g_gen_data_5_delay, + g_gen_data_size_5_delay, input_buffer); +} + +TF_LITE_MICRO_TEST(DelayTestMultiDimDelayLessThanFrameSize) { + const int kInputSize = 16; + const int kInputNum = 2; + int input_shape[] = {2, 4, 4}; + int output_shape[] = {2, 4, 4}; + // The buffer that gets passed to the model. + int16_t input_buffer[kInputSize]; + // The op will be invoked 2 times (Input X, X=0,1) + // For each invocation, the input's shape is (4, 4) but flattened for clarity + // On each invocation, the input data is copied to input_buffer first. + const int16_t input[kInputNum * kInputSize] = { + 0x1, 0x2, 0x3, 0x4, 0x5, 0x6, 0x7, 0x8, 0x9, 0xA, 0xB, + 0xC, 0xD, 0xE, 0xF, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, + 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, + }; + int16_t output[kInputNum * kInputSize] = {0}; + // For each invocation, we expect the following output (Output X, X=0,1) + // Each time, the output's shape is (4, 4) but flattened for clarity + const int16_t golden[kInputNum * kInputSize] = { + // Output 0 + 0x0, + 0x0, + 0x0, + 0x1, + 0x0, + 0x0, + 0x0, + 0x5, + 0x0, + 0x0, + 0x0, + 0x9, + 0x0, + 0x0, + 0x0, + 0xD, + // Output 1 + 0x2, + 0x3, + 0x4, + 0x0, + 0x6, + 0x7, + 0x8, + 0x0, + 0xA, + 0xB, + 0xC, + 0x0, + 0xE, + 0xF, + 0x0, + 0x0, + }; + tflite::TestDelay(input_shape, input, output_shape, output, golden, + kInputSize, kInputNum, g_gen_data_3_delay, + g_gen_data_size_3_delay, input_buffer); +} + +TF_LITE_MICRO_TEST(DelayTestMultiDimDelayGreaterThanFrameSize) { + const int kInputSize = 16; + const int kInputNum = 3; + int input_shape[] = {2, 4, 4}; + int output_shape[] = {2, 4, 4}; + // The buffer that gets passed to the model. + int16_t input_buffer[kInputSize]; + // The op will be invoked 3 times (Input X, X=0,1,2) + // For each invocation, the input's shape is (4, 4) but flattened for clarity + // On each invocation, the input data is copied to input_buffer first. + const int16_t input[kInputNum * kInputSize] = { + 0x1, 0x2, 0x3, 0x4, 0x5, 0x6, 0x7, 0x8, 0x9, 0xA, 0xB, 0xC, + 0xD, 0xE, 0xF, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, + 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, + 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, + }; + int16_t output[kInputNum * kInputSize] = {0}; + // For each invocation, we expect the following output (Output X, X=0,1,2) + // Each time, the output's shape is (4, 4) but flattened for clarity + const int16_t golden[kInputNum * kInputSize] = { + // Output 0 + 0x0, + 0x0, + 0x0, + 0x0, + 0x0, + 0x0, + 0x0, + 0x0, + 0x0, + 0x0, + 0x0, + 0x0, + 0x0, + 0x0, + 0x0, + 0x0, + // Output 1 + 0x0, + 0x1, + 0x2, + 0x3, + 0x0, + 0x5, + 0x6, + 0x7, + 0x0, + 0x9, + 0xA, + 0xB, + 0x0, + 0xD, + 0xE, + 0xF, + // Output 2 + 0x4, + 0x0, + 0x0, + 0x0, + 0x8, + 0x0, + 0x0, + 0x0, + 0xC, + 0x0, + 0x0, + 0x0, + 0x0, + 0x0, + 0x0, + 0x0, + }; + tflite::TestDelay(input_shape, input, output_shape, output, golden, + kInputSize, kInputNum, g_gen_data_5_delay, + g_gen_data_size_5_delay, input_buffer); +} + +TF_LITE_MICRO_TEST(DelayTestResetSingleDimDelayLessThanFrameSize) { + const int kInputSize = 8; + const int kInputNum = 2; + int input_shape[] = {1, kInputSize}; + int output_shape[] = {1, kInputSize}; + // The buffer that gets passed to the model. + int16_t input_buffer[kInputSize]; + // The input data. Gets copied to input_buffer kInputNum times. + const int16_t input[kInputNum * kInputSize] = { + 0x1, 0x2, 0x3, 0x4, 0x5, 0x6, 0x7, 0x8, + 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, + }; + int16_t output[kInputNum * kInputSize] = {0}; + const int16_t golden[kInputNum * kInputSize] = {0x0, 0x0, 0x0, 0x1, 0x2, 0x3, + 0x4, 0x5, 0x6, 0x7, 0x8, 0x0, + 0x0, 0x0, 0x0, 0x0}; + tflite::TestDelayReset(input_shape, input, output_shape, output, golden, + kInputSize, kInputNum, g_gen_data_3_delay, + g_gen_data_size_3_delay, input_buffer); +} + +TF_LITE_MICRO_TEST(DelayTestResetSingleResetDimDelayGreaterThanFrameSize) { + const int kInputSize = 3; + const int kInputNum = 3; + int input_shape[] = {1, kInputSize}; + int output_shape[] = {1, kInputSize}; + // The buffer that gets passed to the model. + int16_t input_buffer[kInputSize]; + // The input data. Gets copied to input_buffer kInputNum times. + const int16_t input[kInputNum * kInputSize] = { + 0x1, 0x2, 0x3, 0x4, 0x0, 0x0, 0x0, 0x0, 0x0, + }; + int16_t output[kInputNum * kInputSize] = {0}; + const int16_t golden[kInputNum * kInputSize] = { + 0x0, 0x0, 0x0, 0x0, 0x0, 0x1, 0x2, 0x3, 0x4, + }; + tflite::TestDelayReset(input_shape, input, output_shape, output, golden, + kInputSize, kInputNum, g_gen_data_5_delay, + g_gen_data_size_5_delay, input_buffer); +} + +TF_LITE_MICRO_TESTS_END diff --git a/signal/micro/kernels/energy.cc b/signal/micro/kernels/energy.cc new file mode 100644 index 00000000000..6a86366d42f --- /dev/null +++ b/signal/micro/kernels/energy.cc @@ -0,0 +1,113 @@ +/* Copyright 2021 The TensorFlow Authors. All Rights Reserved. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +==============================================================================*/ + +#include "signal/src/energy.h" + +#include +#include +#include + +#include "tensorflow/lite/kernels/internal/tensor_ctypes.h" +#include "tensorflow/lite/kernels/kernel_util.h" +#include "tensorflow/lite/micro/flatbuffer_utils.h" +#include "tensorflow/lite/micro/kernels/kernel_util.h" +#include "tensorflow/lite/micro/micro_context.h" + +namespace tflite { +namespace { + +constexpr int kInputTensor = 0; +constexpr int kOutputTensor = 0; + +// Indices into the init flexbuffer's vector. +// The parameter's name is in the comment that follows. +// Elements in the vectors are ordered alphabetically by parameter name. +constexpr int kEndIndexIndex = 0; // 'end_index' +constexpr int kStartIndexIndex = 1; // 'start_index' + +struct TFLMSignalEnergyParams { + int32_t end_index; + int32_t start_index; +}; + +void* EnergyInit(TfLiteContext* context, const char* buffer, size_t length) { + TFLITE_DCHECK(context->AllocatePersistentBuffer != nullptr); + + auto* data = + static_cast(context->AllocatePersistentBuffer( + context, sizeof(TFLMSignalEnergyParams))); + + if (data == nullptr) { + return nullptr; + } + + tflite::FlexbufferWrapper fbw(reinterpret_cast(buffer), + length); + data->end_index = fbw.ElementAsInt32(kEndIndexIndex); + data->start_index = fbw.ElementAsInt32(kStartIndexIndex); + return data; +} + +TfLiteStatus EnergyPrepare(TfLiteContext* context, TfLiteNode* node) { + TF_LITE_ENSURE_EQ(context, NumInputs(node), 1); + TF_LITE_ENSURE_EQ(context, NumOutputs(node), 1); + + MicroContext* micro_context = GetMicroContext(context); + TfLiteTensor* input = + micro_context->AllocateTempInputTensor(node, kInputTensor); + TF_LITE_ENSURE(context, input != nullptr); + TfLiteTensor* output = + micro_context->AllocateTempOutputTensor(node, kOutputTensor); + TF_LITE_ENSURE(context, output != nullptr); + + TF_LITE_ENSURE_EQ(context, NumDimensions(input), 1); + TF_LITE_ENSURE_EQ(context, NumDimensions(output), 1); + + TF_LITE_ENSURE_TYPES_EQ(context, input->type, kTfLiteInt16); + TF_LITE_ENSURE_TYPES_EQ(context, output->type, kTfLiteUInt32); + + micro_context->DeallocateTempTfLiteTensor(input); + micro_context->DeallocateTempTfLiteTensor(output); + return kTfLiteOk; +} + +TfLiteStatus EnergyEval(TfLiteContext* context, TfLiteNode* node) { + auto* params = reinterpret_cast(node->user_data); + + const TfLiteEvalTensor* input = + tflite::micro::GetEvalInput(context, node, kInputTensor); + TfLiteEvalTensor* output = + tflite::micro::GetEvalOutput(context, node, kOutputTensor); + + const Complex* input_data = + tflite::micro::GetTensorData>(input); + uint32_t* output_data = tflite::micro::GetTensorData(output); + + tflm_signal::SpectrumToEnergy(input_data, params->start_index, + params->end_index, output_data); + return kTfLiteOk; +} + +} // namespace + +namespace tflm_signal { +TFLMRegistration* Register_ENERGY() { + static TFLMRegistration r = + tflite::micro::RegisterOp(EnergyInit, EnergyPrepare, EnergyEval); + return &r; +} +} // namespace tflm_signal + +} // namespace tflite diff --git a/signal/micro/kernels/energy_flexbuffers_generated_data.cc b/signal/micro/kernels/energy_flexbuffers_generated_data.cc new file mode 100644 index 00000000000..e25a5771a3e --- /dev/null +++ b/signal/micro/kernels/energy_flexbuffers_generated_data.cc @@ -0,0 +1,37 @@ +/* Copyright 2020 The TensorFlow Authors. All Rights Reserved. +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +==============================================================================*/ + +// This file is generated. See: +// tensorflow/lite/micro/kernels/test_data_generation/README.md + +#include "signal/micro/kernels/energy_flexbuffers_generated_data.h" + +const int g_gen_data_size_start_index_2_end_index_4 = 35; +const unsigned char g_gen_data_start_index_2_end_index_4[] = { + 0x73, 0x74, 0x61, 0x72, 0x74, 0x5f, 0x69, 0x6e, 0x64, 0x65, 0x78, 0x00, + 0x65, 0x6e, 0x64, 0x5f, 0x69, 0x6e, 0x64, 0x65, 0x78, 0x00, 0x02, 0x0b, + 0x18, 0x02, 0x01, 0x02, 0x04, 0x02, 0x04, 0x04, 0x04, 0x24, 0x01, +}; +const int g_gen_data_size_start_index_0_end_index_4 = 35; +const unsigned char g_gen_data_start_index_0_end_index_4[] = { + 0x73, 0x74, 0x61, 0x72, 0x74, 0x5f, 0x69, 0x6e, 0x64, 0x65, 0x78, 0x00, + 0x65, 0x6e, 0x64, 0x5f, 0x69, 0x6e, 0x64, 0x65, 0x78, 0x00, 0x02, 0x0b, + 0x18, 0x02, 0x01, 0x02, 0x04, 0x00, 0x04, 0x04, 0x04, 0x24, 0x01, +}; +const int g_gen_data_size_start_index_4_end_index_8 = 35; +const unsigned char g_gen_data_start_index_4_end_index_8[] = { + 0x73, 0x74, 0x61, 0x72, 0x74, 0x5f, 0x69, 0x6e, 0x64, 0x65, 0x78, 0x00, + 0x65, 0x6e, 0x64, 0x5f, 0x69, 0x6e, 0x64, 0x65, 0x78, 0x00, 0x02, 0x0b, + 0x18, 0x02, 0x01, 0x02, 0x08, 0x04, 0x04, 0x04, 0x04, 0x24, 0x01, +}; diff --git a/signal/micro/kernels/energy_flexbuffers_generated_data.h b/signal/micro/kernels/energy_flexbuffers_generated_data.h new file mode 100644 index 00000000000..f2840f66261 --- /dev/null +++ b/signal/micro/kernels/energy_flexbuffers_generated_data.h @@ -0,0 +1,28 @@ +/* Copyright 2020 The TensorFlow Authors. All Rights Reserved. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +==============================================================================*/ + +#ifndef SIGNAL_MICRO_KERNELS_TEST_DATA_GENERATION_GENERATE_ENERGY_FLEXBUFFERS_DATA_H_ +#define SIGNAL_MICRO_KERNELS_TEST_DATA_GENERATION_GENERATE_ENERGY_FLEXBUFFERS_DATA_H_ + +extern const int g_gen_data_size_start_index_2_end_index_4; +extern const unsigned char g_gen_data_start_index_2_end_index_4[]; + +extern const int g_gen_data_size_start_index_0_end_index_4; +extern const unsigned char g_gen_data_start_index_0_end_index_4[]; + +extern const int g_gen_data_size_start_index_4_end_index_8; +extern const unsigned char g_gen_data_start_index_4_end_index_8[]; + +#endif // SIGNAL_MICRO_KERNELS_TEST_DATA_GENERATION_GENERATE_ENERGY_FLEXBUFFERS_DATA_H_ diff --git a/signal/micro/kernels/energy_test.cc b/signal/micro/kernels/energy_test.cc new file mode 100644 index 00000000000..64f0619cb4f --- /dev/null +++ b/signal/micro/kernels/energy_test.cc @@ -0,0 +1,123 @@ +/* Copyright 2021 The TensorFlow Authors. All Rights Reserved. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +==============================================================================*/ + +#include "signal/micro/kernels/energy_flexbuffers_generated_data.h" +#include "tensorflow/lite/micro/kernels/kernel_runner.h" +#include "tensorflow/lite/micro/test_helpers.h" +#include "tensorflow/lite/micro/testing/micro_test.h" + +namespace tflite { +namespace testing { +namespace { + +TfLiteStatus TestEnergy(int* input_dims_data, const int16_t* input_data, + int* output_dims_data, const uint32_t* golden, + const unsigned char* flexbuffers_data, + const unsigned int flexbuffers_data_size, + uint32_t* output_data) { + TfLiteIntArray* input_dims = IntArrayFromInts(input_dims_data); + TfLiteIntArray* output_dims = IntArrayFromInts(output_dims_data); + const int output_len = ElementCount(*output_dims); + constexpr int kInputsSize = 1; + constexpr int kOutputsSize = 1; + constexpr int kTensorsSize = kInputsSize + kOutputsSize; + TfLiteTensor tensors[kTensorsSize] = { + CreateTensor(input_data, input_dims), + CreateTensor(output_data, output_dims), + }; + + int inputs_array_data[] = {1, 0}; + TfLiteIntArray* inputs_array = IntArrayFromInts(inputs_array_data); + int outputs_array_data[] = {1, 1}; + TfLiteIntArray* outputs_array = IntArrayFromInts(outputs_array_data); + + const TFLMRegistration* registration = tflm_signal::Register_ENERGY(); + micro::KernelRunner runner(*registration, tensors, kTensorsSize, inputs_array, + outputs_array, + /*builtin_data=*/nullptr); + + // TfLite uses a char* for the raw bytes whereas flexbuffers use an unsigned + // char*. This small discrepancy results in compiler warnings unless we + // reinterpret_cast right before passing in the flexbuffer bytes to the + // KernelRunner. + TfLiteStatus status = runner.InitAndPrepare( + reinterpret_cast(flexbuffers_data), flexbuffers_data_size); + if (status != kTfLiteOk) { + return status; + } + + status = runner.Invoke(); + if (status != kTfLiteOk) { + return status; + } + + for (int i = 0; i < output_len; ++i) { + TF_LITE_MICRO_EXPECT_EQ(golden[i], output_data[i]); + } + return kTfLiteOk; +} + +} // namespace +} // namespace testing +} // namespace tflite + +TF_LITE_MICRO_TESTS_BEGIN + +TF_LITE_MICRO_TEST(EnergyTestMiddle) { + int input_shape[] = {1, 16}; + int output_shape[] = {1, 8}; + const int16_t input[] = {1, 2, 3, 4, 5, 6, 7, 8, + 9, 10, 11, 12, 13, 14, 15, 16}; + const uint32_t golden[] = {0, 0, 61, 113, 0, 0, 0, 0}; + uint32_t output[8]; + memset(output, 0, sizeof(output)); + TF_LITE_MICRO_EXPECT_EQ( + kTfLiteOk, tflite::testing::TestEnergy( + input_shape, input, output_shape, golden, + g_gen_data_start_index_2_end_index_4, + g_gen_data_size_start_index_2_end_index_4, output)); +} + +TF_LITE_MICRO_TEST(EnergyTestStart) { + int input_shape[] = {1, 16}; + int output_shape[] = {1, 8}; + const int16_t input[] = {1, 2, 3, 4, 5, 6, 7, 8, + 9, 10, 11, 12, 13, 14, 15, 16}; + const uint32_t golden[] = {5, 25, 61, 113, 0, 0, 0, 0}; + uint32_t output[8]; + memset(output, 0, sizeof(output)); + TF_LITE_MICRO_EXPECT_EQ( + kTfLiteOk, tflite::testing::TestEnergy( + input_shape, input, output_shape, golden, + g_gen_data_start_index_0_end_index_4, + g_gen_data_size_start_index_0_end_index_4, output)); +} + +TF_LITE_MICRO_TEST(EnergyTestEnd) { + int input_shape[] = {1, 16}; + int output_shape[] = {1, 8}; + const int16_t input[] = {1, 2, 3, 4, 5, 6, 7, 8, + 9, 10, 11, 12, 13, 14, 15, 16}; + const uint32_t golden[] = {0, 0, 0, 0, 181, 265, 365, 481}; + uint32_t output[8]; + memset(output, 0, sizeof(output)); + TF_LITE_MICRO_EXPECT_EQ( + kTfLiteOk, tflite::testing::TestEnergy( + input_shape, input, output_shape, golden, + g_gen_data_start_index_4_end_index_8, + g_gen_data_size_start_index_4_end_index_8, output)); +} + +TF_LITE_MICRO_TESTS_END diff --git a/signal/micro/kernels/fft_auto_scale_common.cc b/signal/micro/kernels/fft_auto_scale_common.cc new file mode 100644 index 00000000000..8703ac600e6 --- /dev/null +++ b/signal/micro/kernels/fft_auto_scale_common.cc @@ -0,0 +1,54 @@ +/* Copyright 2023 The TensorFlow Authors. All Rights Reserved. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +==============================================================================*/ +#include "signal/micro/kernels/fft_auto_scale_kernel.h" +#include "tensorflow/lite/kernels/kernel_util.h" +#include "tensorflow/lite/micro/kernels/kernel_util.h" + +namespace tflite { + +constexpr int kInputTensor = 0; +constexpr int kOutputTensor = 0; +constexpr int kScaleBitTensor = 1; + +TfLiteStatus FftAutoScalePrepare(TfLiteContext* context, TfLiteNode* node) { + TF_LITE_ENSURE_EQ(context, NumInputs(node), 1); + TF_LITE_ENSURE_EQ(context, NumOutputs(node), 2); + + MicroContext* micro_context = GetMicroContext(context); + TfLiteTensor* input = + micro_context->AllocateTempInputTensor(node, kInputTensor); + TF_LITE_ENSURE(context, input != nullptr); + TfLiteTensor* output = + micro_context->AllocateTempOutputTensor(node, kOutputTensor); + TF_LITE_ENSURE(context, output != nullptr); + TfLiteTensor* scale_bit = + micro_context->AllocateTempOutputTensor(node, kScaleBitTensor); + TF_LITE_ENSURE(context, scale_bit != nullptr); + + TF_LITE_ENSURE_EQ(context, NumDimensions(input), 1); + TF_LITE_ENSURE_EQ(context, NumDimensions(output), 1); + TF_LITE_ENSURE_EQ(context, NumDimensions(scale_bit), 0); + + TF_LITE_ENSURE_TYPES_EQ(context, input->type, kTfLiteInt16); + TF_LITE_ENSURE_TYPES_EQ(context, output->type, kTfLiteInt16); + TF_LITE_ENSURE_TYPES_EQ(context, scale_bit->type, kTfLiteInt32); + + micro_context->DeallocateTempTfLiteTensor(scale_bit); + micro_context->DeallocateTempTfLiteTensor(input); + micro_context->DeallocateTempTfLiteTensor(output); + return kTfLiteOk; +} + +} // namespace tflite diff --git a/signal/micro/kernels/fft_auto_scale_kernel.cc b/signal/micro/kernels/fft_auto_scale_kernel.cc new file mode 100644 index 00000000000..4946fb30a5f --- /dev/null +++ b/signal/micro/kernels/fft_auto_scale_kernel.cc @@ -0,0 +1,64 @@ +/* Copyright 2023 The TensorFlow Authors. All Rights Reserved. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +==============================================================================*/ + +#include "signal/micro/kernels/fft_auto_scale_kernel.h" + +#include +#include +#include + +#include "signal/src/fft_auto_scale.h" +#include "tensorflow/lite/kernels/internal/tensor_ctypes.h" +#include "tensorflow/lite/kernels/kernel_util.h" +#include "tensorflow/lite/micro/kernels/kernel_util.h" +#include "tensorflow/lite/micro/micro_context.h" + +namespace tflite { +namespace { + +constexpr int kInputTensor = 0; +constexpr int kOutputTensor = 0; +constexpr int kScaleBitTensor = 1; + +TfLiteStatus FftAutoScaleEval(TfLiteContext* context, TfLiteNode* node) { + const TfLiteEvalTensor* input = + tflite::micro::GetEvalInput(context, node, kInputTensor); + TfLiteEvalTensor* output = + tflite::micro::GetEvalOutput(context, node, kOutputTensor); + TfLiteEvalTensor* scale_bit = + tflite::micro::GetEvalOutput(context, node, kScaleBitTensor); + + const int16_t* input_data = tflite::micro::GetTensorData(input); + int16_t* output_data = tflite::micro::GetTensorData(output); + int32_t* scale_bit_data = tflite::micro::GetTensorData(scale_bit); + + *scale_bit_data = + tflm_signal::FftAutoScale(input_data, output->dims->data[0], output_data); + return kTfLiteOk; +} + +} // namespace + +// TODO(b/286250473): remove namespace once de-duped libraries +namespace tflm_signal { + +TFLMRegistration* Register_FFT_AUTO_SCALE() { + static TFLMRegistration r = + tflite::micro::RegisterOp(nullptr, FftAutoScalePrepare, FftAutoScaleEval); + return &r; +} + +} // namespace tflm_signal +} // namespace tflite diff --git a/signal/micro/kernels/fft_auto_scale_kernel.h b/signal/micro/kernels/fft_auto_scale_kernel.h new file mode 100644 index 00000000000..9461c90fe09 --- /dev/null +++ b/signal/micro/kernels/fft_auto_scale_kernel.h @@ -0,0 +1,26 @@ +/* Copyright 2023 The TensorFlow Authors. All Rights Reserved. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +==============================================================================*/ +#ifndef SIGNAL_MICRO_KERNELS_FFT_AUTO_SCALE_KERNEL_H_ +#define SIGNAL_MICRO_KERNELS_FFT_AUTO_SCALE_KERNEL_H_ + +#include "tensorflow/lite/c/common.h" + +namespace tflite { + +TfLiteStatus FftAutoScalePrepare(TfLiteContext* context, TfLiteNode* node); + +} // namespace tflite + +#endif // SIGNAL_MICRO_KERNELS_FFT_AUTO_SCALE_KERNEL_H_ diff --git a/signal/micro/kernels/fft_test.cc b/signal/micro/kernels/fft_test.cc index 15e95b76dc0..bf54d41c3e1 100644 --- a/signal/micro/kernels/fft_test.cc +++ b/signal/micro/kernels/fft_test.cc @@ -85,6 +85,43 @@ TfLiteStatus TestFFT(int* input_dims_data, const T* input_data, return kTfLiteOk; } +TfLiteStatus TestFFTAutoScale(int* input_dims_data, const int16_t* input_data, + int* output_dims_data, const int16_t* golden, + int* scale_bit_dims_data, + const int32_t scale_bit_golden, + const TFLMRegistration registration, + const uint8_t* flexbuffers_data, + const int flexbuffers_data_len, + int16_t* output_data, int32_t* scale_bit) { + TfLiteIntArray* input_dims = IntArrayFromInts(input_dims_data); + TfLiteIntArray* output_dims = IntArrayFromInts(output_dims_data); + TfLiteIntArray* scale_bit_dims = IntArrayFromInts(scale_bit_dims_data); + + constexpr int kInputsSize = 1; + constexpr int kOutputsSize = 2; + constexpr int kTensorsSize = kInputsSize + kOutputsSize; + TfLiteTensor tensors[kTensorsSize] = { + CreateTensor(input_data, input_dims), + CreateTensor(output_data, output_dims), + CreateTensor(scale_bit, scale_bit_dims), + }; + + int inputs_array_data[] = {1, 0}; + TfLiteIntArray* inputs_array = IntArrayFromInts(inputs_array_data); + int outputs_array_data[] = {2, 1, 2}; + TfLiteIntArray* outputs_array = IntArrayFromInts(outputs_array_data); + + const int output_len = ElementCount(*output_dims); + + TF_LITE_ENSURE_STATUS(ValidateFFTGoldens( + tensors, kTensorsSize, inputs_array, outputs_array, output_len, golden, + registration, flexbuffers_data, flexbuffers_data_len, output_data, 0)); + + TF_LITE_MICRO_EXPECT_EQ(scale_bit_golden, *scale_bit); + + return kTfLiteOk; +} + } // namespace } // namespace testing @@ -237,11 +274,11 @@ TF_LITE_MICRO_TEST(RfftTestSize512Int16) { const TFLMRegistration* registration = tflite::tflm_signal::Register_RFFT_INT16(); // See (b/287518815) for why this is needed. -#if defined(HIFI4) || defined(HIFI5) +#if defined(HIFI3) || defined(HIFI4) || defined(HIFI5) int tolerance = 9; -#else // defined(HIFI4) || defined(HIFI5) +#else // defined(HIFI3) || defined(HIFI4) || defined(HIFI5) int tolerance = 3; -#endif // defined(HIFI4) || defined(HIFI5) +#endif // defined(HIFI3) || defined(HIFI4) || defined(HIFI5) TF_LITE_MICRO_EXPECT_EQ( kTfLiteOk, tflite::testing::TestFFT( input_shape, tflite::kRfftInt16Length512Input, @@ -266,4 +303,219 @@ TF_LITE_MICRO_TEST(RfftTestSize512Int32) { g_gen_data_size_fft_length_512_int32, output, 0)); } +TF_LITE_MICRO_TEST(IrfftTestLength64Float) { + constexpr int kOutputLen = 64; + int input_shape[] = {1, 66}; + const float input[] = {256, 0, 256, 0, 256, 0, 256, 0, 256, 0, 256, 0, 256, 0, + 256, 0, 256, 0, 256, 0, 256, 0, 256, 0, 256, 0, 256, 0, + 256, 0, 256, 0, 256, 0, 256, 0, 256, 0, 256, 0, 256, 0, + 256, 0, 256, 0, 256, 0, 256, 0, 256, 0, 256, 0, 256, 0, + 256, 0, 256, 0, 256, 0, 256, 0, 256, 0}; + int output_shape[] = {1, kOutputLen}; + const float golden[] = {256, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0}; + float output[kOutputLen]; + const TFLMRegistration* registration = + tflite::tflm_signal::Register_IRFFT_FLOAT(); + TF_LITE_MICRO_EXPECT_EQ( + kTfLiteOk, tflite::testing::TestFFT( + input_shape, input, output_shape, golden, *registration, + g_gen_data_fft_length_64_float, + g_gen_data_size_fft_length_64_int16, output, 1e-7)); +} + +TF_LITE_MICRO_TEST(IrfftTestLength64Int16) { + constexpr int kOutputLen = 64; + int input_shape[] = {1, 66}; + const int16_t input[] = { + 256, 0, 256, 0, 256, 0, 256, 0, 256, 0, 256, 0, 256, 0, 256, 0, 256, 0, + 256, 0, 256, 0, 256, 0, 256, 0, 256, 0, 256, 0, 256, 0, 256, 0, 256, 0, + 256, 0, 256, 0, 256, 0, 256, 0, 256, 0, 256, 0, 256, 0, 256, 0, 256, 0, + 256, 0, 256, 0, 256, 0, 256, 0, 256, 0, 256, 0}; + int output_shape[] = {1, kOutputLen}; + const int16_t golden[] = {256, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0}; + int16_t output[kOutputLen]; + const TFLMRegistration* registration = + tflite::tflm_signal::Register_IRFFT_INT16(); + TF_LITE_MICRO_EXPECT_EQ( + kTfLiteOk, tflite::testing::TestFFT( + input_shape, input, output_shape, golden, *registration, + g_gen_data_fft_length_64_int16, + g_gen_data_size_fft_length_64_int16, output, 0)); +} + +TF_LITE_MICRO_TEST(IrfftTestLength64Int32) { + constexpr int kOutputLen = 64; + int input_shape[] = {1, 66}; + const int32_t input[] = { + 256, 0, 256, 0, 256, 0, 256, 0, 256, 0, 256, 0, 256, 0, 256, 0, 256, 0, + 256, 0, 256, 0, 256, 0, 256, 0, 256, 0, 256, 0, 256, 0, 256, 0, 256, 0, + 256, 0, 256, 0, 256, 0, 256, 0, 256, 0, 256, 0, 256, 0, 256, 0, 256, 0, + 256, 0, 256, 0, 256, 0, 256, 0, 256, 0, 256, 0}; + int output_shape[] = {1, kOutputLen}; + const int32_t golden[] = {256, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0}; + int32_t output[kOutputLen]; + const TFLMRegistration* registration = + tflite::tflm_signal::Register_IRFFT_INT32(); + TF_LITE_MICRO_EXPECT_EQ( + kTfLiteOk, tflite::testing::TestFFT( + input_shape, input, output_shape, golden, *registration, + g_gen_data_fft_length_64_int32, + g_gen_data_size_fft_length_64_int32, output, 0)); +} + +TF_LITE_MICRO_TEST(IrfftTestLength64Int32OuterDims4) { + constexpr int kOutputLen = 64; + constexpr int kOuterDim = 2; + int input_shape[] = {3, kOuterDim, kOuterDim, 66}; + const int32_t input[] = { + 256, 0, 256, 0, 256, 0, 256, 0, 256, 0, 256, 0, 256, 0, 256, 0, 256, 0, + 256, 0, 256, 0, 256, 0, 256, 0, 256, 0, 256, 0, 256, 0, 256, 0, 256, 0, + 256, 0, 256, 0, 256, 0, 256, 0, 256, 0, 256, 0, 256, 0, 256, 0, 256, 0, + 256, 0, 256, 0, 256, 0, 256, 0, 256, 0, 256, 0, 256, 0, 256, 0, 256, 0, + 256, 0, 256, 0, 256, 0, 256, 0, 256, 0, 256, 0, 256, 0, 256, 0, 256, 0, + 256, 0, 256, 0, 256, 0, 256, 0, 256, 0, 256, 0, 256, 0, 256, 0, 256, 0, + 256, 0, 256, 0, 256, 0, 256, 0, 256, 0, 256, 0, 256, 0, 256, 0, 256, 0, + 256, 0, 256, 0, 256, 0, 256, 0, 256, 0, 256, 0, 256, 0, 256, 0, 256, 0, + 256, 0, 256, 0, 256, 0, 256, 0, 256, 0, 256, 0, 256, 0, 256, 0, 256, 0, + 256, 0, 256, 0, 256, 0, 256, 0, 256, 0, 256, 0, 256, 0, 256, 0, 256, 0, + 256, 0, 256, 0, 256, 0, 256, 0, 256, 0, 256, 0, 256, 0, 256, 0, 256, 0, + 256, 0, 256, 0, 256, 0, 256, 0, 256, 0, 256, 0, 256, 0, 256, 0, 256, 0, + 256, 0, 256, 0, 256, 0, 256, 0, 256, 0, 256, 0, 256, 0, 256, 0, 256, 0, + 256, 0, 256, 0, 256, 0, 256, 0, 256, 0, 256, 0, 256, 0, 256, 0, 256, 0, + 256, 0, 256, 0, 256, 0, 256, 0, 256, 0, 256, 0}; + int output_shape[] = {3, kOuterDim, kOuterDim, kOutputLen}; + const int32_t golden[] = { + 256, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 256, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 256, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 256, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0}; + int32_t output[kOuterDim * kOuterDim * kOutputLen]; + const TFLMRegistration* registration = + tflite::tflm_signal::Register_IRFFT_INT32(); + TF_LITE_MICRO_EXPECT_EQ( + kTfLiteOk, tflite::testing::TestFFT( + input_shape, input, output_shape, golden, *registration, + g_gen_data_fft_length_64_int32, + g_gen_data_size_fft_length_64_int32, output, 0)); +} + +TF_LITE_MICRO_TEST(IrfftTestLength512Float) { + constexpr int kOutputLen = 512; + int input_shape[] = {1, 514}; + int output_shape[] = {1, kOutputLen}; + float output[kOutputLen]; + const TFLMRegistration* registration = + tflite::tflm_signal::Register_IRFFT_FLOAT(); + TF_LITE_MICRO_EXPECT_EQ( + kTfLiteOk, tflite::testing::TestFFT( + input_shape, tflite::kIrfftFloatLength512Input, + output_shape, tflite::kIrfftFloatLength512Golden, + *registration, g_gen_data_fft_length_512_float, + g_gen_data_size_fft_length_512_float, output, 1e-6)); +} + +TF_LITE_MICRO_TEST(IrfftTestLength512Int16) { + constexpr int kOutputLen = 512; + int input_shape[] = {1, 514}; + int output_shape[] = {1, kOutputLen}; + int16_t output[kOutputLen]; + const TFLMRegistration* registration = + tflite::tflm_signal::Register_IRFFT_INT16(); + TF_LITE_MICRO_EXPECT_EQ(kTfLiteOk, + tflite::testing::TestFFT( + input_shape, tflite::kIrfftInt16Length512Input, + output_shape, tflite::kIrfftInt16Length512Golden, + *registration, g_gen_data_fft_length_512_int16, + g_gen_data_size_fft_length_512_int16, output, 0)); +} + +TF_LITE_MICRO_TEST(IrfftTestLength512Int32) { + constexpr int kOutputLen = 512; + int input_shape[] = {1, 514}; + int output_shape[] = {1, kOutputLen}; + int32_t output[kOutputLen]; + const TFLMRegistration* registration = + tflite::tflm_signal::Register_IRFFT_INT32(); + TF_LITE_MICRO_EXPECT_EQ(kTfLiteOk, + tflite::testing::TestFFT( + input_shape, tflite::kIrfftInt32Length512Input, + output_shape, tflite::kIrfftInt32Length512Golden, + *registration, g_gen_data_fft_length_512_int32, + g_gen_data_size_fft_length_512_int32, output, 0)); +} + +TF_LITE_MICRO_TEST(FftAutoScaleTestSmall) { + constexpr int kTensorsSize = 8; + int shape[] = {1, 8}; + const int16_t input[] = {0x0000, 0x1111, 0x2222, 0x3333, + 0x3333, 0x2222, 0x1111, 0x0000}; + int16_t output[kTensorsSize]; + int scale_bit_shape[] = {0}; + int32_t scale_bit; + const int16_t golden[] = {0x0000, 0x2222, 0x4444, 0x6666, + 0x6666, 0x4444, 0x2222, 0x0000}; + const int32_t scale_bit_golden = 1; + const TFLMRegistration* registration = + tflite::tflm_signal::Register_FFT_AUTO_SCALE(); + TF_LITE_MICRO_EXPECT_EQ( + kTfLiteOk, + tflite::testing::TestFFTAutoScale( + shape, input, shape, golden, scale_bit_shape, scale_bit_golden, + *registration, nullptr, 0, output, &scale_bit)); +} + +TF_LITE_MICRO_TEST(FftAutoScaleTestScaleBit) { + constexpr int kTensorsSize = 8; + int shape[] = {1, 8}; + const int16_t input[] = {238, 113, -88, -243, -5, -130, 159, -70}; + int16_t output[kTensorsSize]; + int scale_bit_shape[] = {0}; + int32_t scale_bit; + const int16_t golden[] = {30464, 14464, -11264, -31104, + -640, -16640, 20352, -8960}; + const int32_t scale_bit_golden = 7; + const TFLMRegistration* registration = + tflite::tflm_signal::Register_FFT_AUTO_SCALE(); + TF_LITE_MICRO_EXPECT_EQ( + kTfLiteOk, + tflite::testing::TestFFTAutoScale( + shape, input, shape, golden, scale_bit_shape, scale_bit_golden, + *registration, nullptr, 0, output, &scale_bit)); +} + +TF_LITE_MICRO_TEST(FftAutoScaleTestLarge) { + constexpr int kTensorsSize = 400; + int shape[] = {1, kTensorsSize}; + int16_t output[kTensorsSize]; + int scale_bit_shape[] = {0}; + int32_t scale_bit; + + const int32_t scale_bit_golden = 0; + const TFLMRegistration* registration = + tflite::tflm_signal::Register_FFT_AUTO_SCALE(); + TF_LITE_MICRO_EXPECT_EQ( + kTfLiteOk, + tflite::testing::TestFFTAutoScale( + shape, tflite::kFftAutoScaleLength512Input, shape, + tflite::kFftAutoScaleLength512Golden, scale_bit_shape, + scale_bit_golden, *registration, nullptr, 0, output, &scale_bit)); +} + TF_LITE_MICRO_TESTS_END diff --git a/signal/micro/kernels/filter_bank.cc b/signal/micro/kernels/filter_bank.cc new file mode 100644 index 00000000000..1cf08d22ce3 --- /dev/null +++ b/signal/micro/kernels/filter_bank.cc @@ -0,0 +1,177 @@ +/* Copyright 2019 The TensorFlow Authors. All Rights Reserved. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +==============================================================================*/ + +#include "signal/src/filter_bank.h" + +#include + +#include "tensorflow/lite/kernels/internal/tensor_ctypes.h" +#include "tensorflow/lite/kernels/kernel_util.h" +#include "tensorflow/lite/micro/flatbuffer_utils.h" +#include "tensorflow/lite/micro/kernels/kernel_util.h" +#include "tensorflow/lite/micro/memory_helpers.h" +#include "tensorflow/lite/micro/micro_context.h" +#include "tensorflow/lite/micro/micro_utils.h" + +namespace tflite { +namespace { + +constexpr int kInputTensor = 0; +constexpr int kWeightTensor = 1; +constexpr int kUnweightTensor = 2; +constexpr int kChFreqStartsTensor = 3; +constexpr int kChWeightStartsTensor = 4; +constexpr int kChannelWidthsTensor = 5; +constexpr int kOutputTensor = 0; + +// Indices into the init flexbuffer's vector. +// The parameter's name is in the comment that follows. +// Elements in the vectors are ordered alphabetically by parameter name. +constexpr int kNumChannelsIndex = 0; // 'num_channels' + +struct TFLMSignalFilterBankParams { + tflm_signal::FilterbankConfig config; + uint64_t* work_area; +}; + +void* FilterBankInit(TfLiteContext* context, const char* buffer, + size_t length) { + TFLITE_DCHECK(context->AllocatePersistentBuffer != nullptr); + + auto* params = static_cast( + context->AllocatePersistentBuffer(context, + sizeof(TFLMSignalFilterBankParams))); + if (params == nullptr) { + return nullptr; + } + + tflite::FlexbufferWrapper fbw(reinterpret_cast(buffer), + length); + params->config.num_channels = fbw.ElementAsInt32(kNumChannelsIndex); + + params->work_area = static_cast(context->AllocatePersistentBuffer( + context, (params->config.num_channels + 1) * sizeof(uint64_t))); + + if (params->work_area == nullptr) { + return nullptr; + } + + return params; +} + +TfLiteStatus FilterBankPrepare(TfLiteContext* context, TfLiteNode* node) { + TF_LITE_ENSURE_EQ(context, NumInputs(node), 6); + TF_LITE_ENSURE_EQ(context, NumOutputs(node), 1); + + MicroContext* micro_context = GetMicroContext(context); + TfLiteTensor* input = + micro_context->AllocateTempInputTensor(node, kInputTensor); + TF_LITE_ENSURE(context, input != nullptr); + TF_LITE_ENSURE_EQ(context, NumDimensions(input), 1); + TF_LITE_ENSURE_TYPES_EQ(context, input->type, kTfLiteUInt32); + micro_context->DeallocateTempTfLiteTensor(input); + + input = micro_context->AllocateTempInputTensor(node, kWeightTensor); + TF_LITE_ENSURE(context, input != nullptr); + TF_LITE_ENSURE_EQ(context, NumDimensions(input), 1); + TF_LITE_ENSURE_TYPES_EQ(context, input->type, kTfLiteInt16); + micro_context->DeallocateTempTfLiteTensor(input); + + input = micro_context->AllocateTempInputTensor(node, kUnweightTensor); + TF_LITE_ENSURE(context, input != nullptr); + TF_LITE_ENSURE_EQ(context, NumDimensions(input), 1); + TF_LITE_ENSURE_TYPES_EQ(context, input->type, kTfLiteInt16); + micro_context->DeallocateTempTfLiteTensor(input); + + input = micro_context->AllocateTempInputTensor(node, kChFreqStartsTensor); + TF_LITE_ENSURE(context, input != nullptr); + TF_LITE_ENSURE_EQ(context, NumDimensions(input), 1); + TF_LITE_ENSURE_TYPES_EQ(context, input->type, kTfLiteInt16); + micro_context->DeallocateTempTfLiteTensor(input); + + input = micro_context->AllocateTempInputTensor(node, kChWeightStartsTensor); + TF_LITE_ENSURE(context, input != nullptr); + TF_LITE_ENSURE_EQ(context, NumDimensions(input), 1); + TF_LITE_ENSURE_TYPES_EQ(context, input->type, kTfLiteInt16); + micro_context->DeallocateTempTfLiteTensor(input); + + input = micro_context->AllocateTempInputTensor(node, kChannelWidthsTensor); + TF_LITE_ENSURE(context, input != nullptr); + TF_LITE_ENSURE_EQ(context, NumDimensions(input), 1); + TF_LITE_ENSURE_TYPES_EQ(context, input->type, kTfLiteInt16); + micro_context->DeallocateTempTfLiteTensor(input); + + TfLiteTensor* output = + micro_context->AllocateTempOutputTensor(node, kOutputTensor); + TF_LITE_ENSURE(context, output != nullptr); + TF_LITE_ENSURE_EQ(context, NumDimensions(output), 1); + TF_LITE_ENSURE_TYPES_EQ(context, output->type, kTfLiteUInt64); + micro_context->DeallocateTempTfLiteTensor(output); + + return kTfLiteOk; +} + +TfLiteStatus FilterBankEval(TfLiteContext* context, TfLiteNode* node) { + auto* params = reinterpret_cast(node->user_data); + + const TfLiteEvalTensor* input0 = + tflite::micro::GetEvalInput(context, node, kInputTensor); + const TfLiteEvalTensor* input1 = + tflite::micro::GetEvalInput(context, node, kWeightTensor); + const TfLiteEvalTensor* input2 = + tflite::micro::GetEvalInput(context, node, kUnweightTensor); + const TfLiteEvalTensor* input3 = + tflite::micro::GetEvalInput(context, node, kChFreqStartsTensor); + const TfLiteEvalTensor* input4 = + tflite::micro::GetEvalInput(context, node, kChWeightStartsTensor); + const TfLiteEvalTensor* input5 = + tflite::micro::GetEvalInput(context, node, kChannelWidthsTensor); + TfLiteEvalTensor* output = + tflite::micro::GetEvalOutput(context, node, kOutputTensor); + + params->config.weights = tflite::micro::GetTensorData(input1); + params->config.unweights = tflite::micro::GetTensorData(input2); + params->config.channel_frequency_starts = + tflite::micro::GetTensorData(input3); + params->config.channel_weight_starts = + tflite::micro::GetTensorData(input4); + params->config.channel_widths = tflite::micro::GetTensorData(input5); + + const uint32_t* input_data = tflite::micro::GetTensorData(input0); + uint64_t* output_data = tflite::micro::GetTensorData(output); + tflm_signal::FilterbankAccumulateChannels(¶ms->config, input_data, + params->work_area); + + size_t output_size; + TfLiteTypeSizeOf(output->type, &output_size); + output_size *= ElementCount(*output->dims); + // Discard channel 0, which is just scratch + memcpy(output_data, params->work_area + 1, output_size); + return kTfLiteOk; +} + +} // namespace + +namespace tflm_signal { + +TFLMRegistration* Register_FILTER_BANK() { + static TFLMRegistration r = tflite::micro::RegisterOp( + FilterBankInit, FilterBankPrepare, FilterBankEval); + return &r; +} + +} // namespace tflm_signal + +} // namespace tflite diff --git a/signal/micro/kernels/filter_bank_flexbuffers_generated_data.cc b/signal/micro/kernels/filter_bank_flexbuffers_generated_data.cc new file mode 100644 index 00000000000..be88488f214 --- /dev/null +++ b/signal/micro/kernels/filter_bank_flexbuffers_generated_data.cc @@ -0,0 +1,29 @@ +/* Copyright 2021 The TensorFlow Authors. All Rights Reserved. +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +==============================================================================*/ + +// This file is generated. See: +// tensorflow/lite/micro/kernels/test_data_generation/README.md + +#include "signal/micro/kernels/filter_bank_flexbuffers_generated_data.h" + +const int g_gen_data_size_filter_bank_32_channel = 23; +const unsigned char g_gen_data_filter_bank_32_channel[] = { + 0x6e, 0x75, 0x6d, 0x5f, 0x63, 0x68, 0x61, 0x6e, 0x6e, 0x65, 0x6c, 0x73, + 0x00, 0x01, 0x0e, 0x01, 0x01, 0x01, 0x20, 0x04, 0x02, 0x24, 0x01, +}; +const int g_gen_data_size_filter_bank_16_channel = 23; +const unsigned char g_gen_data_filter_bank_16_channel[] = { + 0x6e, 0x75, 0x6d, 0x5f, 0x63, 0x68, 0x61, 0x6e, 0x6e, 0x65, 0x6c, 0x73, + 0x00, 0x01, 0x0e, 0x01, 0x01, 0x01, 0x10, 0x04, 0x02, 0x24, 0x01, +}; diff --git a/tensorflow/lite/micro/examples/micro_speech/simple_features/model.h b/signal/micro/kernels/filter_bank_flexbuffers_generated_data.h similarity index 54% rename from tensorflow/lite/micro/examples/micro_speech/simple_features/model.h rename to signal/micro/kernels/filter_bank_flexbuffers_generated_data.h index b3e705edd8a..59e74e7fe50 100644 --- a/tensorflow/lite/micro/examples/micro_speech/simple_features/model.h +++ b/signal/micro/kernels/filter_bank_flexbuffers_generated_data.h @@ -13,15 +13,13 @@ See the License for the specific language governing permissions and limitations under the License. ==============================================================================*/ -// This is a standard TensorFlow Lite FlatBuffer model file that has been -// converted into a C data array, so it can be easily compiled into a binary -// for devices that don't have a file system. It was created using the command: -// xxd -i model.tflite > model.cc +#ifndef SIGNAL_MICRO_KERNELS_TEST_DATA_GENERATION_GENERATE_FILTER_BANK_FLEXBUFFERS_DATA_H_ +#define SIGNAL_MICRO_KERNELS_TEST_DATA_GENERATION_GENERATE_FILTER_BANK_FLEXBUFFERS_DATA_H_ -#ifndef TENSORFLOW_LITE_MICRO_EXAMPLES_MICRO_SPEECH_SIMPLE_FEATURES_MODEL_H_ -#define TENSORFLOW_LITE_MICRO_EXAMPLES_MICRO_SPEECH_SIMPLE_FEATURES_MODEL_H_ +extern const int g_gen_data_size_filter_bank_32_channel; +extern const unsigned char g_gen_data_filter_bank_32_channel[]; -extern const unsigned char g_model[]; -extern const int g_model_len; +extern const int g_gen_data_size_filter_bank_16_channel; +extern const unsigned char g_gen_data_filter_bank_16_channel[]; -#endif // TENSORFLOW_LITE_MICRO_EXAMPLES_MICRO_SPEECH_SIMPLE_FEATURES_MODEL_H_ +#endif // SIGNAL_MICRO_KERNELS_TEST_DATA_GENERATION_GENERATE_FILTER_BANK_FLEXBUFFERS_DATA_H_ diff --git a/signal/micro/kernels/filter_bank_log.cc b/signal/micro/kernels/filter_bank_log.cc new file mode 100644 index 00000000000..3d38e61ba1e --- /dev/null +++ b/signal/micro/kernels/filter_bank_log.cc @@ -0,0 +1,114 @@ +/* Copyright 2019 The TensorFlow Authors. All Rights Reserved. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +==============================================================================*/ + +#include "signal/src/filter_bank_log.h" + +#include + +#include "tensorflow/lite/kernels/internal/tensor_ctypes.h" +#include "tensorflow/lite/kernels/kernel_util.h" +#include "tensorflow/lite/micro/flatbuffer_utils.h" +#include "tensorflow/lite/micro/kernels/kernel_util.h" +#include "tensorflow/lite/micro/memory_helpers.h" +#include "tensorflow/lite/micro/micro_context.h" +#include "tensorflow/lite/micro/micro_utils.h" + +namespace tflite { +namespace { + +constexpr int kInputTensor = 0; +constexpr int kOutputTensor = 0; + +// Indices into the init flexbuffer's vector. +// The parameter's name is in the comment that follows. +// Elements in the vectors are ordered alphabetically by parameter name. +constexpr int kInputCorrectionBitsIndex = 0; // 'input_correction_bits' +constexpr int kOutputScaleIndex = 1; // 'output_scale' + +struct TFLMSignalLogParams { + int input_correction_bits; + int output_scale; +}; + +void* FilterBankLogInit(TfLiteContext* context, const char* buffer, + size_t length) { + TFLITE_DCHECK(context->AllocatePersistentBuffer != nullptr); + + auto* params = static_cast( + context->AllocatePersistentBuffer(context, sizeof(TFLMSignalLogParams))); + + if (params == nullptr) { + return nullptr; + } + tflite::FlexbufferWrapper fbw(reinterpret_cast(buffer), + length); + + params->input_correction_bits = fbw.ElementAsInt32(kInputCorrectionBitsIndex); + params->output_scale = fbw.ElementAsInt32(kOutputScaleIndex); + return params; +} + +TfLiteStatus FilterBankLogPrepare(TfLiteContext* context, TfLiteNode* node) { + TF_LITE_ENSURE_EQ(context, NumInputs(node), 1); + TF_LITE_ENSURE_EQ(context, NumOutputs(node), 1); + + MicroContext* micro_context = GetMicroContext(context); + TfLiteTensor* input = + micro_context->AllocateTempInputTensor(node, kInputTensor); + TfLiteTensor* output = + micro_context->AllocateTempOutputTensor(node, kOutputTensor); + TF_LITE_ENSURE(context, input != nullptr); + TF_LITE_ENSURE(context, output != nullptr); + + TF_LITE_ENSURE_EQ(context, NumDimensions(input), 1); + TF_LITE_ENSURE_EQ(context, NumDimensions(output), 1); + + TF_LITE_ENSURE_TYPES_EQ(context, input->type, kTfLiteUInt32); + TF_LITE_ENSURE_TYPES_EQ(context, output->type, kTfLiteInt16); + + micro_context->DeallocateTempTfLiteTensor(input); + micro_context->DeallocateTempTfLiteTensor(output); + return kTfLiteOk; +} + +TfLiteStatus FilterBankLogEval(TfLiteContext* context, TfLiteNode* node) { + auto* params = reinterpret_cast(node->user_data); + + const TfLiteEvalTensor* input = + tflite::micro::GetEvalInput(context, node, kInputTensor); + TfLiteEvalTensor* output = + tflite::micro::GetEvalOutput(context, node, kOutputTensor); + + const uint32_t* input_data = tflite::micro::GetTensorData(input); + int16_t* output_data = tflite::micro::GetTensorData(output); + int num_channels = input->dims->data[0]; + tflm_signal::FilterbankLog(input_data, num_channels, params->output_scale, + params->input_correction_bits, output_data); + return kTfLiteOk; +} + +} // namespace + +namespace tflm_signal { + +TFLMRegistration* Register_FILTER_BANK_LOG() { + static TFLMRegistration r = tflite::micro::RegisterOp( + FilterBankLogInit, FilterBankLogPrepare, FilterBankLogEval); + return &r; +} + +} // namespace tflm_signal + +} // namespace tflite diff --git a/signal/micro/kernels/filter_bank_log_flexbuffers_generated_data.cc b/signal/micro/kernels/filter_bank_log_flexbuffers_generated_data.cc new file mode 100644 index 00000000000..09d78795ccf --- /dev/null +++ b/signal/micro/kernels/filter_bank_log_flexbuffers_generated_data.cc @@ -0,0 +1,38 @@ +/* Copyright 2021 The TensorFlow Authors. All Rights Reserved. +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +==============================================================================*/ + +// This file is generated. See: +// tensorflow/lite/micro/kernels/test_data_generation/README.md + +#include "signal/micro/kernels/filter_bank_log_flexbuffers_generated_data.h" + +const int g_gen_data_size_filter_bank_log_scale_1600_correction_bits_3 = 53; +const unsigned char g_gen_data_filter_bank_log_scale_1600_correction_bits_3[] = + { + 0x6f, 0x75, 0x74, 0x70, 0x75, 0x74, 0x5f, 0x73, 0x63, 0x61, 0x6c, + 0x65, 0x00, 0x69, 0x6e, 0x70, 0x75, 0x74, 0x5f, 0x63, 0x6f, 0x72, + 0x72, 0x65, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x5f, 0x62, 0x69, 0x74, + 0x73, 0x00, 0x02, 0x17, 0x25, 0x02, 0x00, 0x01, 0x00, 0x02, 0x00, + 0x03, 0x00, 0x40, 0x06, 0x05, 0x05, 0x06, 0x25, 0x01, +}; +const int g_gen_data_size_filter_bank_log_scale_32768_correction_bits_5 = 65; +const unsigned char g_gen_data_filter_bank_log_scale_32768_correction_bits_5[] = + { + 0x6f, 0x75, 0x74, 0x70, 0x75, 0x74, 0x5f, 0x73, 0x63, 0x61, 0x6c, + 0x65, 0x00, 0x69, 0x6e, 0x70, 0x75, 0x74, 0x5f, 0x63, 0x6f, 0x72, + 0x72, 0x65, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x5f, 0x62, 0x69, 0x74, + 0x73, 0x00, 0x02, 0x17, 0x25, 0x00, 0x00, 0x04, 0x00, 0x00, 0x00, + 0x01, 0x00, 0x00, 0x00, 0x02, 0x00, 0x00, 0x00, 0x05, 0x00, 0x00, + 0x00, 0x00, 0x80, 0x00, 0x00, 0x06, 0x06, 0x0a, 0x26, 0x01, +}; diff --git a/signal/micro/kernels/filter_bank_log_flexbuffers_generated_data.h b/signal/micro/kernels/filter_bank_log_flexbuffers_generated_data.h new file mode 100644 index 00000000000..dbc3bd92e59 --- /dev/null +++ b/signal/micro/kernels/filter_bank_log_flexbuffers_generated_data.h @@ -0,0 +1,27 @@ +/* Copyright 2020 The TensorFlow Authors. All Rights Reserved. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +==============================================================================*/ + +#ifndef SIGNAL_MICRO_KERNELS_TEST_DATA_GENERATION_GENERATE_FILTER_BANK_LOG_FLEXBUFFERS_DATA_H_ +#define SIGNAL_MICRO_KERNELS_TEST_DATA_GENERATION_GENERATE_FILTER_BANK_LOG_FLEXBUFFERS_DATA_H_ + +extern const int g_gen_data_size_filter_bank_log_scale_1600_correction_bits_3; +extern const unsigned char + g_gen_data_filter_bank_log_scale_1600_correction_bits_3[]; + +extern const int g_gen_data_size_filter_bank_log_scale_32768_correction_bits_5; +extern const unsigned char + g_gen_data_filter_bank_log_scale_32768_correction_bits_5[]; + +#endif // SIGNAL_MICRO_KERNELS_TEST_DATA_GENERATION_GENERATE_FILTER_BANK_LOG_FLEXBUFFERS_DATA_H_ diff --git a/signal/micro/kernels/filter_bank_log_test.cc b/signal/micro/kernels/filter_bank_log_test.cc new file mode 100644 index 00000000000..546c5d699ef --- /dev/null +++ b/signal/micro/kernels/filter_bank_log_test.cc @@ -0,0 +1,116 @@ +/* Copyright 2019 The TensorFlow Authors. All Rights Reserved. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +==============================================================================*/ + +#include + +#include "signal/micro/kernels/filter_bank_log_flexbuffers_generated_data.h" +#include "tensorflow/lite/micro/kernels/kernel_runner.h" +#include "tensorflow/lite/micro/test_helpers.h" +#include "tensorflow/lite/micro/testing/micro_test.h" + +namespace tflite { +namespace testing { +namespace { + +TfLiteStatus TestFilterBankLog(int* input_dims_data, const uint32_t* input_data, + int* output_dims_data, const int16_t* golden, + const uint8_t* flexbuffers_data, + const int flexbuffers_data_len, + int16_t* output_data) { + TfLiteIntArray* input_dims = IntArrayFromInts(input_dims_data); + TfLiteIntArray* output_dims = IntArrayFromInts(output_dims_data); + + constexpr int kInputsSize = 1; + constexpr int kOutputsSize = 2; + constexpr int kTensorsSize = kInputsSize + kOutputsSize; + TfLiteTensor tensors[kTensorsSize] = { + CreateTensor(input_data, input_dims), + CreateTensor(output_data, output_dims), + }; + + int inputs_array_data[] = {1, 0}; + TfLiteIntArray* inputs_array = IntArrayFromInts(inputs_array_data); + int outputs_array_data[] = {1, 1}; + TfLiteIntArray* outputs_array = IntArrayFromInts(outputs_array_data); + + const int output_len = ElementCount(*output_dims); + + TFLMRegistration* registration = + tflite::tflm_signal::Register_FILTER_BANK_LOG(); + micro::KernelRunner runner(*registration, tensors, kTensorsSize, inputs_array, + outputs_array, + /*builtin_data=*/nullptr); + + // TfLite uses a char* for the raw bytes whereas flexbuffers use an unsigned + // char*. This small discrepancy results in compiler warnings unless we + // reinterpret_cast right before passing in the flexbuffer bytes to the + // KernelRunner. + TF_LITE_ENSURE_STATUS(runner.InitAndPrepare( + reinterpret_cast(flexbuffers_data), flexbuffers_data_len)); + + TF_LITE_ENSURE_STATUS(runner.Invoke()); + + for (int i = 0; i < output_len; ++i) { + TF_LITE_MICRO_EXPECT_EQ(golden[i], output_data[i]); + } + + return kTfLiteOk; +} + +} // namespace +} // namespace testing +} // namespace tflite + +TF_LITE_MICRO_TESTS_BEGIN + +TF_LITE_MICRO_TEST(FilterBankLogTest32Channel) { + int input_shape[] = {1, 32}; + int output_shape[] = {1, 32}; + const uint32_t input[] = {29, 21, 29, 40, 19, 11, 13, 23, 13, 11, 25, + 17, 5, 4, 46, 14, 17, 14, 20, 14, 10, 10, + 15, 11, 17, 12, 15, 16, 19, 18, 6, 2}; + const int16_t golden[] = {8715, 8198, 8715, 9229, 8038, 7164, 7431, 8344, + 7431, 7164, 8477, 7860, 5902, 5545, 9453, 7550, + 7860, 7550, 8120, 7550, 7011, 7011, 7660, 7164, + 7860, 7303, 7660, 7763, 8038, 7952, 6194, 4436}; + int16_t output[32]; + TF_LITE_MICRO_EXPECT_EQ( + kTfLiteOk, + tflite::testing::TestFilterBankLog( + input_shape, input, output_shape, golden, + g_gen_data_filter_bank_log_scale_1600_correction_bits_3, + g_gen_data_size_filter_bank_log_scale_1600_correction_bits_3, + output)); +} + +TF_LITE_MICRO_TEST(FilterBankLogTest16Channel) { + int input_shape[] = {1, 16}; + int output_shape[] = {1, 16}; + const uint32_t input[] = {48, 20, 19, 24, 35, 47, 23, 30, + 31, 10, 48, 21, 46, 14, 18, 27}; + const int16_t golden[] = {32767, 15121, 13440, 21095, 32767, 32767, + 19701, 28407, 29482, 32767, 32767, 16720, + 32767, 3434, 11669, 24955}; + int16_t output[16]; + TF_LITE_MICRO_EXPECT_EQ( + kTfLiteOk, + tflite::testing::TestFilterBankLog( + input_shape, input, output_shape, golden, + g_gen_data_filter_bank_log_scale_32768_correction_bits_5, + g_gen_data_size_filter_bank_log_scale_32768_correction_bits_5, + output)); +} + +TF_LITE_MICRO_TESTS_END diff --git a/signal/micro/kernels/filter_bank_spectral_subtraction.cc b/signal/micro/kernels/filter_bank_spectral_subtraction.cc new file mode 100644 index 00000000000..e0693235dbd --- /dev/null +++ b/signal/micro/kernels/filter_bank_spectral_subtraction.cc @@ -0,0 +1,184 @@ +/* Copyright 2019 The TensorFlow Authors. All Rights Reserved. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +==============================================================================*/ + +#include "signal/src/filter_bank_spectral_subtraction.h" + +#include + +#include "tensorflow/lite/kernels/internal/tensor_ctypes.h" +#include "tensorflow/lite/kernels/kernel_util.h" +#include "tensorflow/lite/micro/flatbuffer_utils.h" +#include "tensorflow/lite/micro/kernels/kernel_util.h" +#include "tensorflow/lite/micro/memory_helpers.h" +#include "tensorflow/lite/micro/micro_utils.h" + +namespace tflite { +namespace { + +constexpr int kInputTensor = 0; +constexpr int kOutputTensor = 0; +constexpr int kNoiseEstimateTensor = 1; + +// Indices into the init flexbuffer's vector. +// The parameter's name is in the comment that follows. +// Elements in the vectors are ordered alphabetically by parameter name. +// 'alternate_one_minus_smoothing' +constexpr int kAlternateOneMinusSmoothingIndex = 0; +constexpr int kAlternateSmoothingIndex = 1; // 'alternate_smoothing' +constexpr int kClampingIndex = 2; // 'clamping' +constexpr int kMinSignalRemainingIndex = 3; // 'min_signal_remaining' +constexpr int kNumChannelsIndex = 4; // 'num_channels' +constexpr int kOneMinusSmoothingIndex = 5; // 'one_minus_smoothing' +constexpr int kSmoothingIndex = 6; // 'smoothing' +constexpr int kSmoothingBitsIndex = 7; // 'smoothing_bits' +constexpr int kSpectralSubtractionBitsIndex = 8; // 'spectral_subtraction_bits' + +struct TFLMSignalSpectralSubtractionParams { + tflm_signal::SpectralSubtractionConfig config; + uint32_t* noise_estimate; + size_t noise_estimate_size; +}; + +void FilterBankSpectralSubtractionResetState( + TFLMSignalSpectralSubtractionParams* params) { + memset(params->noise_estimate, 0, + sizeof(uint32_t) * params->config.num_channels); +} + +void* FilterBankSpectralSubtractionInit(TfLiteContext* context, + const char* buffer, size_t length) { + TFLITE_DCHECK(context->AllocatePersistentBuffer != nullptr); + + auto* params = static_cast( + context->AllocatePersistentBuffer( + context, sizeof(TFLMSignalSpectralSubtractionParams))); + + if (params == nullptr) { + return nullptr; + } + + tflite::FlexbufferWrapper fbw(reinterpret_cast(buffer), + length); + params->config.alternate_one_minus_smoothing = + fbw.ElementAsInt32(kAlternateOneMinusSmoothingIndex); + params->config.alternate_smoothing = + fbw.ElementAsInt32(kAlternateSmoothingIndex); + params->config.clamping = fbw.ElementAsBool(kClampingIndex); + params->config.min_signal_remaining = + fbw.ElementAsInt32(kMinSignalRemainingIndex); + params->config.num_channels = fbw.ElementAsInt32(kNumChannelsIndex); + params->config.one_minus_smoothing = + fbw.ElementAsInt32(kOneMinusSmoothingIndex); + params->config.one_minus_smoothing = + fbw.ElementAsInt32(kOneMinusSmoothingIndex); + params->config.smoothing = fbw.ElementAsInt32(kSmoothingIndex); + params->config.smoothing_bits = fbw.ElementAsInt32(kSmoothingBitsIndex); + params->config.spectral_subtraction_bits = + fbw.ElementAsInt32(kSpectralSubtractionBitsIndex); + params->noise_estimate = + static_cast(context->AllocatePersistentBuffer( + context, params->config.num_channels * sizeof(uint32_t))); + + if (params->noise_estimate == nullptr) { + return nullptr; + } + + return params; +} + +TfLiteStatus FilterBankSpectralSubtractionPrepare(TfLiteContext* context, + TfLiteNode* node) { + TF_LITE_ENSURE_EQ(context, NumInputs(node), 1); + TF_LITE_ENSURE_EQ(context, NumOutputs(node), 2); + + MicroContext* micro_context = GetMicroContext(context); + + TfLiteTensor* input = + micro_context->AllocateTempInputTensor(node, kInputTensor); + TfLiteTensor* output = + micro_context->AllocateTempOutputTensor(node, kOutputTensor); + TfLiteTensor* noise_estimate = + micro_context->AllocateTempOutputTensor(node, kNoiseEstimateTensor); + TF_LITE_ENSURE(context, input != nullptr); + TF_LITE_ENSURE(context, output != nullptr); + TF_LITE_ENSURE(context, noise_estimate != nullptr); + + TF_LITE_ENSURE_EQ(context, NumDimensions(input), 1); + TF_LITE_ENSURE_EQ(context, NumDimensions(output), 1); + TF_LITE_ENSURE_EQ(context, NumDimensions(noise_estimate), 1); + + TF_LITE_ENSURE_TYPES_EQ(context, input->type, kTfLiteUInt32); + TF_LITE_ENSURE_TYPES_EQ(context, output->type, kTfLiteUInt32); + TF_LITE_ENSURE_TYPES_EQ(context, noise_estimate->type, kTfLiteUInt32); + + auto* params = + reinterpret_cast(node->user_data); + TfLiteTypeSizeOf(output->type, ¶ms->noise_estimate_size); + params->noise_estimate_size *= ElementCount(*noise_estimate->dims); + + FilterBankSpectralSubtractionResetState(params); + + micro_context->DeallocateTempTfLiteTensor(input); + micro_context->DeallocateTempTfLiteTensor(output); + micro_context->DeallocateTempTfLiteTensor(noise_estimate); + return kTfLiteOk; +} + +TfLiteStatus FilterBankSpectralSubtractionEval(TfLiteContext* context, + TfLiteNode* node) { + auto* params = + reinterpret_cast(node->user_data); + + const TfLiteEvalTensor* input = + tflite::micro::GetEvalInput(context, node, kInputTensor); + TfLiteEvalTensor* output = + tflite::micro::GetEvalOutput(context, node, kOutputTensor); + TfLiteEvalTensor* noise_estimate = + tflite::micro::GetEvalOutput(context, node, kNoiseEstimateTensor); + + const uint32_t* input_data = tflite::micro::GetTensorData(input); + uint32_t* output_data = tflite::micro::GetTensorData(output); + uint32_t* noise_estimate_data = + tflite::micro::GetTensorData(noise_estimate); + + FilterbankSpectralSubtraction(¶ms->config, input_data, output_data, + params->noise_estimate); + + memcpy(noise_estimate_data, params->noise_estimate, + params->noise_estimate_size); + + return kTfLiteOk; +} + +void FilterBankSpectralSubtractionReset(TfLiteContext* context, void* buffer) { + FilterBankSpectralSubtractionResetState( + static_cast(buffer)); +} + +} // namespace + +namespace tflm_signal { + +TFLMRegistration* Register_FILTER_BANK_SPECTRAL_SUBTRACTION() { + static TFLMRegistration r = tflite::micro::RegisterOp( + FilterBankSpectralSubtractionInit, FilterBankSpectralSubtractionPrepare, + FilterBankSpectralSubtractionEval, + /*Free*/ nullptr, FilterBankSpectralSubtractionReset); + return &r; +} + +} // namespace tflm_signal + +} // namespace tflite diff --git a/signal/micro/kernels/filter_bank_spectral_subtraction_flexbuffers_generated_data.cc b/signal/micro/kernels/filter_bank_spectral_subtraction_flexbuffers_generated_data.cc new file mode 100644 index 00000000000..d085dd22ab6 --- /dev/null +++ b/signal/micro/kernels/filter_bank_spectral_subtraction_flexbuffers_generated_data.cc @@ -0,0 +1,61 @@ +/* Copyright 2021 The TensorFlow Authors. All Rights Reserved. +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +==============================================================================*/ + +// This file is generated. See: +// tensorflow/lite/micro/kernels/test_data_generation/README.md + +#include "signal/micro/kernels/filter_bank_spectral_subtraction_flexbuffers_generated_data.h" + +const int g_gen_data_size_filter_bank_spectral_subtraction_32_channel = 210; +const unsigned char g_gen_data_filter_bank_spectral_subtraction_32_channel[] = { + 0x6e, 0x75, 0x6d, 0x5f, 0x63, 0x68, 0x61, 0x6e, 0x6e, 0x65, 0x6c, 0x73, + 0x00, 0x73, 0x6d, 0x6f, 0x6f, 0x74, 0x68, 0x69, 0x6e, 0x67, 0x00, 0x6f, + 0x6e, 0x65, 0x5f, 0x6d, 0x69, 0x6e, 0x75, 0x73, 0x5f, 0x73, 0x6d, 0x6f, + 0x6f, 0x74, 0x68, 0x69, 0x6e, 0x67, 0x00, 0x61, 0x6c, 0x74, 0x65, 0x72, + 0x6e, 0x61, 0x74, 0x65, 0x5f, 0x73, 0x6d, 0x6f, 0x6f, 0x74, 0x68, 0x69, + 0x6e, 0x67, 0x00, 0x61, 0x6c, 0x74, 0x65, 0x72, 0x6e, 0x61, 0x74, 0x65, + 0x5f, 0x6f, 0x6e, 0x65, 0x5f, 0x6d, 0x69, 0x6e, 0x75, 0x73, 0x5f, 0x73, + 0x6d, 0x6f, 0x6f, 0x74, 0x68, 0x69, 0x6e, 0x67, 0x00, 0x73, 0x6d, 0x6f, + 0x6f, 0x74, 0x68, 0x69, 0x6e, 0x67, 0x5f, 0x62, 0x69, 0x74, 0x73, 0x00, + 0x6d, 0x69, 0x6e, 0x5f, 0x73, 0x69, 0x67, 0x6e, 0x61, 0x6c, 0x5f, 0x72, + 0x65, 0x6d, 0x61, 0x69, 0x6e, 0x69, 0x6e, 0x67, 0x00, 0x63, 0x6c, 0x61, + 0x6d, 0x70, 0x69, 0x6e, 0x67, 0x00, 0x73, 0x70, 0x65, 0x63, 0x74, 0x72, + 0x61, 0x6c, 0x5f, 0x73, 0x75, 0x62, 0x74, 0x72, 0x61, 0x63, 0x74, 0x69, + 0x6f, 0x6e, 0x5f, 0x62, 0x69, 0x74, 0x73, 0x00, 0x09, 0x66, 0x7b, 0x26, + 0x3c, 0xa9, 0x93, 0x9e, 0x4f, 0x23, 0x09, 0x00, 0x01, 0x00, 0x09, 0x00, + 0x71, 0x3d, 0x8f, 0x02, 0x00, 0x00, 0x33, 0x03, 0x20, 0x00, 0x71, 0x3d, + 0x8f, 0x02, 0x00, 0x00, 0x0e, 0x00, 0x05, 0x05, 0x05, 0x05, 0x05, 0x05, + 0x05, 0x05, 0x05, 0x1b, 0x25, 0x01, +}; +const int g_gen_data_size_filter_bank_spectral_subtraction_16_channel = 210; +const unsigned char g_gen_data_filter_bank_spectral_subtraction_16_channel[] = { + 0x6e, 0x75, 0x6d, 0x5f, 0x63, 0x68, 0x61, 0x6e, 0x6e, 0x65, 0x6c, 0x73, + 0x00, 0x73, 0x6d, 0x6f, 0x6f, 0x74, 0x68, 0x69, 0x6e, 0x67, 0x00, 0x6f, + 0x6e, 0x65, 0x5f, 0x6d, 0x69, 0x6e, 0x75, 0x73, 0x5f, 0x73, 0x6d, 0x6f, + 0x6f, 0x74, 0x68, 0x69, 0x6e, 0x67, 0x00, 0x61, 0x6c, 0x74, 0x65, 0x72, + 0x6e, 0x61, 0x74, 0x65, 0x5f, 0x73, 0x6d, 0x6f, 0x6f, 0x74, 0x68, 0x69, + 0x6e, 0x67, 0x00, 0x61, 0x6c, 0x74, 0x65, 0x72, 0x6e, 0x61, 0x74, 0x65, + 0x5f, 0x6f, 0x6e, 0x65, 0x5f, 0x6d, 0x69, 0x6e, 0x75, 0x73, 0x5f, 0x73, + 0x6d, 0x6f, 0x6f, 0x74, 0x68, 0x69, 0x6e, 0x67, 0x00, 0x73, 0x6d, 0x6f, + 0x6f, 0x74, 0x68, 0x69, 0x6e, 0x67, 0x5f, 0x62, 0x69, 0x74, 0x73, 0x00, + 0x6d, 0x69, 0x6e, 0x5f, 0x73, 0x69, 0x67, 0x6e, 0x61, 0x6c, 0x5f, 0x72, + 0x65, 0x6d, 0x61, 0x69, 0x6e, 0x69, 0x6e, 0x67, 0x00, 0x63, 0x6c, 0x61, + 0x6d, 0x70, 0x69, 0x6e, 0x67, 0x00, 0x73, 0x70, 0x65, 0x63, 0x74, 0x72, + 0x61, 0x6c, 0x5f, 0x73, 0x75, 0x62, 0x74, 0x72, 0x61, 0x63, 0x74, 0x69, + 0x6f, 0x6e, 0x5f, 0x62, 0x69, 0x74, 0x73, 0x00, 0x09, 0x66, 0x7b, 0x26, + 0x3c, 0xa9, 0x93, 0x9e, 0x4f, 0x23, 0x09, 0x00, 0x01, 0x00, 0x09, 0x00, + 0x71, 0x3d, 0x8f, 0x02, 0x00, 0x00, 0x33, 0x03, 0x10, 0x00, 0x71, 0x3d, + 0x8f, 0x02, 0x00, 0x00, 0x0e, 0x00, 0x05, 0x05, 0x05, 0x05, 0x05, 0x05, + 0x05, 0x05, 0x05, 0x1b, 0x25, 0x01, +}; diff --git a/signal/micro/kernels/filter_bank_spectral_subtraction_flexbuffers_generated_data.h b/signal/micro/kernels/filter_bank_spectral_subtraction_flexbuffers_generated_data.h new file mode 100644 index 00000000000..175a14e2091 --- /dev/null +++ b/signal/micro/kernels/filter_bank_spectral_subtraction_flexbuffers_generated_data.h @@ -0,0 +1,26 @@ +/* Copyright 2020 The TensorFlow Authors. All Rights Reserved. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +==============================================================================*/ + +#ifndef SIGNAL_MICRO_KERNELS_TEST_DATA_GENERATION_GENERATE_FILTER_BANK_SPECTRAL_SUBTRACTION_FLEXBUFFERS_DATA_H_ +#define SIGNAL_MICRO_KERNELS_TEST_DATA_GENERATION_GENERATE_FILTER_BANK_SPECTRAL_SUBTRACTION_FLEXBUFFERS_DATA_H_ + +extern const int g_gen_data_size_filter_bank_spectral_subtraction_32_channel; +extern const unsigned char + g_gen_data_filter_bank_spectral_subtraction_32_channel[]; +extern const int g_gen_data_size_filter_bank_spectral_subtraction_16_channel; +extern const unsigned char + g_gen_data_filter_bank_spectral_subtraction_16_channel[]; + +#endif // SIGNAL_MICRO_KERNELS_TEST_DATA_GENERATION_GENERATE_FILTER_BANK_SPECTRAL_SUBTRACTION_FLEXBUFFERS_DATA_H_ diff --git a/signal/micro/kernels/filter_bank_spectral_subtraction_test.cc b/signal/micro/kernels/filter_bank_spectral_subtraction_test.cc new file mode 100644 index 00000000000..885995cddd6 --- /dev/null +++ b/signal/micro/kernels/filter_bank_spectral_subtraction_test.cc @@ -0,0 +1,239 @@ +/* Copyright 2019 The TensorFlow Authors. All Rights Reserved. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +==============================================================================*/ + +#include + +#include "signal/micro/kernels/filter_bank_spectral_subtraction_flexbuffers_generated_data.h" +#include "tensorflow/lite/c/common.h" +#include "tensorflow/lite/micro/kernels/kernel_runner.h" +#include "tensorflow/lite/micro/test_helpers.h" +#include "tensorflow/lite/micro/testing/micro_test.h" + +namespace tflite { +namespace testing { +namespace { + +constexpr int kInputsSize = 1; +constexpr int kOutputsSize = 2; +constexpr int kTensorsSize = kInputsSize + kOutputsSize; + +// Speicalized Kernel Runner for running test for the Filter Bank Spectral +// Subtract +// OP . +class FilterBankSpectralSubtractKernelRunner { + public: + explicit FilterBankSpectralSubtractKernelRunner(int* input_dims_data, + const uint32_t* input_data, + int* output_dims_data, + uint32_t* output_data1, + uint32_t* output_data2) + : inputs_array_(IntArrayFromInts(inputs_array_data_)), + outputs_array_(IntArrayFromInts(outputs_array_data_)), + kernel_runner_(*registration_, tensors_, kTensorsSize, inputs_array_, + outputs_array_, nullptr) { + tensors_[0] = tflite::testing::CreateTensor( + input_data, tflite::testing::IntArrayFromInts(input_dims_data)); + + tensors_[1] = tflite::testing::CreateTensor( + output_data1, tflite::testing::IntArrayFromInts(output_dims_data)); + + tensors_[2] = tflite::testing::CreateTensor( + output_data2, tflite::testing::IntArrayFromInts(output_dims_data)); + } + + tflite::micro::KernelRunner& kernel_runner() { return kernel_runner_; } + + private: + int inputs_array_data_[kInputsSize + 1] = {1, 0}; + int outputs_array_data_[kOutputsSize + 1] = {2, 1, 2}; + TfLiteTensor tensors_[kTensorsSize] = {}; + TfLiteIntArray* inputs_array_ = nullptr; + TfLiteIntArray* outputs_array_ = nullptr; + TFLMRegistration* registration_ = + tflite::tflm_signal::Register_FILTER_BANK_SPECTRAL_SUBTRACTION(); + tflite::micro::KernelRunner kernel_runner_; +}; + +TfLiteStatus TestFilterBankSpectralSubtractionInvoke( + int* output_dims_data, const uint32_t* golden1, const uint32_t* golden2, + uint32_t* output1_data, uint32_t* output2_data, + tflite::micro::KernelRunner& kernel_runner) { + TfLiteIntArray* output_dims = IntArrayFromInts(output_dims_data); + const int output_len = ElementCount(*output_dims); + + TF_LITE_ENSURE_STATUS(kernel_runner.Invoke()); + + for (int i = 0; i < output_len; ++i) { + TF_LITE_MICRO_EXPECT_EQ(golden1[i], output1_data[i]); + TF_LITE_MICRO_EXPECT_EQ(golden2[i], output2_data[i]); + } + + return kTfLiteOk; +} + +TfLiteStatus TestFilterBankSpectralSubtraction( + int* input_dims_data, const uint32_t* input_data, int* output_dims_data, + const uint32_t* golden1, const uint32_t* golden2, + const uint8_t* flexbuffers_data, const int flexbuffers_data_len, + uint32_t* output1_data, uint32_t* output2_data) { + FilterBankSpectralSubtractKernelRunner filter_bank_spectral_subtract_runner( + input_dims_data, input_data, output_dims_data, output1_data, + output2_data); + + // TfLite uses a char* for the raw bytes whereas flexbuffers use an unsigned + // char*. This small discrepancy results in compiler warnings unless we + // reinterpret_cast right before passing in the flexbuffer bytes to the + // KernelRunner. + TF_LITE_ENSURE_STATUS( + filter_bank_spectral_subtract_runner.kernel_runner().InitAndPrepare( + reinterpret_cast(flexbuffers_data), + flexbuffers_data_len)); + + TF_LITE_ENSURE_STATUS(TestFilterBankSpectralSubtractionInvoke( + output_dims_data, golden1, golden2, output1_data, output2_data, + filter_bank_spectral_subtract_runner.kernel_runner())); + + return kTfLiteOk; +} + +TfLiteStatus TestFilterBankSpectralSubtractionReset( + int* input_dims_data, const uint32_t* input_data, int* output_dims_data, + const uint32_t* golden1, const uint32_t* golden2, + const uint8_t* flexbuffers_data, const int flexbuffers_data_len, + uint32_t* output1_data, uint32_t* output2_data) { + FilterBankSpectralSubtractKernelRunner filter_bank_spectral_subtract_runner( + input_dims_data, input_data, output_dims_data, output1_data, + output2_data); + + // TfLite uses a char* for the raw bytes whereas flexbuffers use an unsigned + // char*. This small discrepancy results in compiler warnings unless we + // reinterpret_cast right before passing in the flexbuffer bytes to the + // KernelRunner. + TF_LITE_ENSURE_STATUS( + filter_bank_spectral_subtract_runner.kernel_runner().InitAndPrepare( + reinterpret_cast(flexbuffers_data), + flexbuffers_data_len)); + + TF_LITE_ENSURE_STATUS(TestFilterBankSpectralSubtractionInvoke( + output_dims_data, golden1, golden2, output1_data, output2_data, + filter_bank_spectral_subtract_runner.kernel_runner())); + filter_bank_spectral_subtract_runner.kernel_runner().Reset(); + TF_LITE_ENSURE_STATUS(TestFilterBankSpectralSubtractionInvoke( + output_dims_data, golden1, golden2, output1_data, output2_data, + filter_bank_spectral_subtract_runner.kernel_runner())); + + return kTfLiteOk; +} + +} // namespace +} // namespace testing +} // namespace tflite + +TF_LITE_MICRO_TESTS_BEGIN + +TF_LITE_MICRO_TEST(FilterBankSpectralSubtractionTest32Channel) { + int input_shape[] = {1, 32}; + int output_shape[] = {1, 32}; + + const uint32_t input[] = {322, 308, 210, 212, 181, 251, 403, 259, 65, 48, 76, + 48, 50, 46, 53, 52, 112, 191, 136, 59, 70, 51, + 39, 64, 33, 44, 41, 49, 74, 107, 262, 479}; + const uint32_t golden1[] = {310, 296, 202, 204, 174, 241, 387, 249, + 63, 47, 73, 47, 49, 45, 51, 50, + 108, 184, 131, 57, 68, 49, 38, 62, + 32, 43, 40, 48, 72, 103, 252, 460}; + const uint32_t golden2[] = {12, 12, 8, 8, 7, 10, 16, 10, 2, 1, 3, + 1, 1, 1, 2, 2, 4, 7, 5, 2, 2, 2, + 1, 2, 1, 1, 1, 1, 2, 4, 10, 19}; + uint32_t output1[32]; + uint32_t output2[32]; + TF_LITE_MICRO_EXPECT_EQ( + kTfLiteOk, + tflite::testing::TestFilterBankSpectralSubtraction( + input_shape, input, output_shape, golden1, golden2, + g_gen_data_filter_bank_spectral_subtraction_32_channel, + g_gen_data_size_filter_bank_spectral_subtraction_32_channel, output1, + output2)); +} + +TF_LITE_MICRO_TEST(FilterBankSpectralSubtractionTest16Channel) { + int input_shape[] = {1, 16}; + int output_shape[] = {1, 16}; + + const uint32_t input[] = {393, 213, 408, 1, 361, 385, 386, 326, + 170, 368, 368, 305, 152, 322, 213, 319}; + const uint32_t golden1[] = {378, 205, 392, 1, 347, 370, 371, 313, + 164, 354, 354, 293, 146, 310, 205, 307}; + const uint32_t golden2[] = {15, 8, 16, 0, 14, 15, 15, 13, + 6, 14, 14, 12, 6, 12, 8, 12}; + uint32_t output1[32]; + uint32_t output2[32]; + TF_LITE_MICRO_EXPECT_EQ( + kTfLiteOk, + tflite::testing::TestFilterBankSpectralSubtraction( + input_shape, input, output_shape, golden1, golden2, + g_gen_data_filter_bank_spectral_subtraction_16_channel, + g_gen_data_size_filter_bank_spectral_subtraction_16_channel, output1, + output2)); +} + +TF_LITE_MICRO_TEST(FilterBankSpectralSubtractionTest32ChannelReset) { + int input_shape[] = {1, 32}; + int output_shape[] = {1, 32}; + + const uint32_t input[] = {322, 308, 210, 212, 181, 251, 403, 259, 65, 48, 76, + 48, 50, 46, 53, 52, 112, 191, 136, 59, 70, 51, + 39, 64, 33, 44, 41, 49, 74, 107, 262, 479}; + const uint32_t golden1[] = {310, 296, 202, 204, 174, 241, 387, 249, + 63, 47, 73, 47, 49, 45, 51, 50, + 108, 184, 131, 57, 68, 49, 38, 62, + 32, 43, 40, 48, 72, 103, 252, 460}; + const uint32_t golden2[] = {12, 12, 8, 8, 7, 10, 16, 10, 2, 1, 3, + 1, 1, 1, 2, 2, 4, 7, 5, 2, 2, 2, + 1, 2, 1, 1, 1, 1, 2, 4, 10, 19}; + + uint32_t output1[32]; + uint32_t output2[32]; + TF_LITE_MICRO_EXPECT_EQ( + kTfLiteOk, + tflite::testing::TestFilterBankSpectralSubtractionReset( + input_shape, input, output_shape, golden1, golden2, + g_gen_data_filter_bank_spectral_subtraction_32_channel, + g_gen_data_size_filter_bank_spectral_subtraction_32_channel, output1, + output2)); +} + +TF_LITE_MICRO_TEST(FilterBankSpectralSubtractionTest16ChannelReset) { + int input_shape[] = {1, 16}; + int output_shape[] = {1, 16}; + + const uint32_t input[] = {393, 213, 408, 1, 361, 385, 386, 326, + 170, 368, 368, 305, 152, 322, 213, 319}; + const uint32_t golden1[] = {378, 205, 392, 1, 347, 370, 371, 313, + 164, 354, 354, 293, 146, 310, 205, 307}; + const uint32_t golden2[] = {15, 8, 16, 0, 14, 15, 15, 13, + 6, 14, 14, 12, 6, 12, 8, 12}; + uint32_t output1[32]; + uint32_t output2[32]; + TF_LITE_MICRO_EXPECT_EQ( + kTfLiteOk, + tflite::testing::TestFilterBankSpectralSubtractionReset( + input_shape, input, output_shape, golden1, golden2, + g_gen_data_filter_bank_spectral_subtraction_16_channel, + g_gen_data_size_filter_bank_spectral_subtraction_16_channel, output1, + output2)); +} + +TF_LITE_MICRO_TESTS_END diff --git a/signal/micro/kernels/filter_bank_square_root.cc b/signal/micro/kernels/filter_bank_square_root.cc new file mode 100644 index 00000000000..bd7eff99748 --- /dev/null +++ b/signal/micro/kernels/filter_bank_square_root.cc @@ -0,0 +1,65 @@ +/* Copyright 2019 The TensorFlow Authors. All Rights Reserved. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +==============================================================================*/ + +#include "signal/src/filter_bank_square_root.h" + +#include + +#include "signal/micro/kernels/filter_bank_square_root.h" +#include "tensorflow/lite/kernels/internal/tensor_ctypes.h" +#include "tensorflow/lite/kernels/kernel_util.h" +#include "tensorflow/lite/micro/kernels/kernel_util.h" +#include "tensorflow/lite/micro/memory_helpers.h" +#include "tensorflow/lite/micro/micro_utils.h" + +namespace tflite { +namespace { + +constexpr int kInputTensor = 0; +constexpr int kScaleBitsTensor = 1; +constexpr int kOutputTensor = 0; + +TfLiteStatus FilterBankSquareRootEval(TfLiteContext* context, + TfLiteNode* node) { + const TfLiteEvalTensor* input = + tflite::micro::GetEvalInput(context, node, kInputTensor); + const TfLiteEvalTensor* scale_bits = + tflite::micro::GetEvalInput(context, node, kScaleBitsTensor); + TfLiteEvalTensor* output = + tflite::micro::GetEvalOutput(context, node, kOutputTensor); + + const uint64_t* input_data = tflite::micro::GetTensorData(input); + const int32_t* scale_bits_data = + tflite::micro::GetTensorData(scale_bits); + uint32_t* output_data = tflite::micro::GetTensorData(output); + int32_t num_channels = input->dims->data[0]; + tflm_signal::FilterbankSqrt(input_data, num_channels, *scale_bits_data, + output_data); + return kTfLiteOk; +} + +} // namespace + +namespace tflm_signal { + +TFLMRegistration* Register_FILTER_BANK_SQUARE_ROOT() { + static TFLMRegistration r = tflite::micro::RegisterOp( + nullptr, FilterBankSquareRootPrepare, FilterBankSquareRootEval); + return &r; +} + +} // namespace tflm_signal + +} // namespace tflite diff --git a/signal/micro/kernels/filter_bank_square_root.h b/signal/micro/kernels/filter_bank_square_root.h new file mode 100644 index 00000000000..25b67796b9e --- /dev/null +++ b/signal/micro/kernels/filter_bank_square_root.h @@ -0,0 +1,27 @@ +/* Copyright 2023 The TensorFlow Authors. All Rights Reserved. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +==============================================================================*/ +#ifndef SIGNAL_MICRO_KERNELS_FILTER_BANK_SQUARE_ROOT_H_ +#define SIGNAL_MICRO_KERNELS_FILTER_BANK_SQUARE_ROOT_H_ + +#include "tensorflow/lite/c/common.h" + +namespace tflite { + +TfLiteStatus FilterBankSquareRootPrepare(TfLiteContext* context, + TfLiteNode* node); + +} // namespace tflite + +#endif // SIGNAL_MICRO_KERNELS_FILTER_BANK_SQUARE_ROOT_H_ diff --git a/signal/micro/kernels/filter_bank_square_root_common.cc b/signal/micro/kernels/filter_bank_square_root_common.cc new file mode 100644 index 00000000000..b4309015726 --- /dev/null +++ b/signal/micro/kernels/filter_bank_square_root_common.cc @@ -0,0 +1,56 @@ +/* Copyright 2023 The TensorFlow Authors. All Rights Reserved. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +==============================================================================*/ +#include "signal/micro/kernels/filter_bank_square_root.h" +#include "tensorflow/lite/kernels/kernel_util.h" +#include "tensorflow/lite/micro/kernels/kernel_util.h" + +namespace tflite { + +constexpr int kInputTensor = 0; +constexpr int kScaleBitsTensor = 1; +constexpr int kOutputTensor = 0; + +TfLiteStatus FilterBankSquareRootPrepare(TfLiteContext* context, + TfLiteNode* node) { + TF_LITE_ENSURE_EQ(context, NumInputs(node), 2); + TF_LITE_ENSURE_EQ(context, NumOutputs(node), 1); + + MicroContext* micro_context = GetMicroContext(context); + + TfLiteTensor* input = + micro_context->AllocateTempInputTensor(node, kInputTensor); + TfLiteTensor* scale_bits = + micro_context->AllocateTempInputTensor(node, kScaleBitsTensor); + TfLiteTensor* output = + micro_context->AllocateTempOutputTensor(node, kOutputTensor); + TF_LITE_ENSURE(context, input != nullptr); + TF_LITE_ENSURE(context, scale_bits != nullptr); + TF_LITE_ENSURE(context, output != nullptr); + + TF_LITE_ENSURE_EQ(context, NumDimensions(input), 1); + TF_LITE_ENSURE_EQ(context, NumDimensions(scale_bits), 0); + TF_LITE_ENSURE_EQ(context, NumDimensions(output), 1); + + TF_LITE_ENSURE_TYPES_EQ(context, input->type, kTfLiteUInt64); + TF_LITE_ENSURE_TYPES_EQ(context, scale_bits->type, kTfLiteInt32); + TF_LITE_ENSURE_TYPES_EQ(context, output->type, kTfLiteUInt32); + + micro_context->DeallocateTempTfLiteTensor(input); + micro_context->DeallocateTempTfLiteTensor(output); + micro_context->DeallocateTempTfLiteTensor(scale_bits); + return kTfLiteOk; +} + +} // namespace tflite diff --git a/signal/micro/kernels/filter_bank_square_root_test.cc b/signal/micro/kernels/filter_bank_square_root_test.cc new file mode 100644 index 00000000000..5ae50b53371 --- /dev/null +++ b/signal/micro/kernels/filter_bank_square_root_test.cc @@ -0,0 +1,117 @@ +/* Copyright 2019 The TensorFlow Authors. All Rights Reserved. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +==============================================================================*/ + +#include + +#include "tensorflow/lite/micro/kernels/kernel_runner.h" +#include "tensorflow/lite/micro/test_helpers.h" +#include "tensorflow/lite/micro/testing/micro_test.h" + +namespace tflite { +namespace testing { +namespace { + +TfLiteStatus TestFilterBankSquareRoot( + int* input1_dims_data, const uint64_t* input1_data, int* input2_dims_data, + const int32_t* input2_data, int* output_dims_data, const uint32_t* golden, + uint32_t* output_data) { + TfLiteIntArray* input1_dims = IntArrayFromInts(input1_dims_data); + TfLiteIntArray* input2_dims = IntArrayFromInts(input2_dims_data); + TfLiteIntArray* output_dims = IntArrayFromInts(output_dims_data); + + constexpr int kInputsSize = 1; + constexpr int kOutputsSize = 2; + constexpr int kTensorsSize = kInputsSize + kOutputsSize; + TfLiteTensor tensors[kTensorsSize] = { + CreateTensor(input1_data, input1_dims), + CreateTensor(input2_data, input2_dims), + CreateTensor(output_data, output_dims), + }; + + int inputs_array_data[] = {2, 0, 1}; + TfLiteIntArray* inputs_array = IntArrayFromInts(inputs_array_data); + int outputs_array_data[] = {1, 2}; + TfLiteIntArray* outputs_array = IntArrayFromInts(outputs_array_data); + + const int output_len = ElementCount(*output_dims); + + TFLMRegistration* registration = + tflite::tflm_signal::Register_FILTER_BANK_SQUARE_ROOT(); + micro::KernelRunner runner(*registration, tensors, kTensorsSize, inputs_array, + outputs_array, + /*builtin_data=*/nullptr); + + // TfLite uses a char* for the raw bytes whereas flexbuffers use an unsigned + // char*. This small discrepancy results in compiler warnings unless we + // reinterpret_cast right before passing in the flexbuffer bytes to the + // KernelRunner. + TF_LITE_ENSURE_STATUS(runner.InitAndPrepare(nullptr, 0)); + + TF_LITE_ENSURE_STATUS(runner.Invoke()); + + for (int i = 0; i < output_len; ++i) { + TF_LITE_MICRO_EXPECT_EQ(golden[i], output_data[i]); + } + + return kTfLiteOk; +} + +} // namespace +} // namespace testing +} // namespace tflite + +TF_LITE_MICRO_TESTS_BEGIN + +TF_LITE_MICRO_TEST(FilterBankSquareRoot32Channel) { + int input1_shape[] = {1, 32}; + int input2_shape[] = {0}; + int output_shape[] = {1, 32}; + const uint64_t input1[] = { + 10528000193, 28362909357, 47577133750, 8466055850, 5842710800, 2350911449, + 2989811430, 2646718839, 515262774, 276394561, 469831522, 55815334, + 28232446, 11591835, 40329249, 67658028, 183446654, 323189165, + 117473797, 41339272, 25846050, 12428673, 18670978, 22521722, + 78477733, 54207503, 25150296, 43098592, 28211625, 15736687, + 20990296, 17907031}; + const int32_t input2[] = {7}; + const uint32_t golden[] = {801, 1315, 1704, 718, 597, 378, 427, 401, + 177, 129, 169, 58, 41, 26, 49, 64, + 105, 140, 84, 50, 39, 27, 33, 37, + 69, 57, 39, 51, 41, 30, 35, 33}; + uint32_t output[32]; + TF_LITE_MICRO_EXPECT_EQ(kTfLiteOk, tflite::testing::TestFilterBankSquareRoot( + input1_shape, input1, input2_shape, + input2, output_shape, golden, output)); +} + +TF_LITE_MICRO_TEST(FilterBankSquareRoot16Channel) { + int input1_shape[] = {1, 16}; + int input2_shape[] = {0}; + int output_shape[] = {1, 16}; + const uint64_t input1[] = { + 13051415151, 14932650877, 18954728418, 8730126017, + 6529665275, 12952546517, 10314975609, 8919697835, + 8053663348, 17231208421, 7366899760, 1372112200, + 19953434807, 17012385332, 4710443222, 17765594053}; + const int32_t input2[] = {5}; + const uint32_t golden[] = {3570, 3818, 4302, 2919, 2525, 3556, 3173, 2951, + 2804, 4102, 2682, 1157, 4414, 4076, 2144, 4165}; + uint32_t output[16]; + TF_LITE_MICRO_EXPECT_EQ(kTfLiteOk, tflite::testing::TestFilterBankSquareRoot( + input1_shape, input1, input2_shape, + input2, output_shape, golden, output)); +} + +TF_LITE_MICRO_TESTS_END diff --git a/signal/micro/kernels/filter_bank_test.cc b/signal/micro/kernels/filter_bank_test.cc new file mode 100644 index 00000000000..924fdca6a9f --- /dev/null +++ b/signal/micro/kernels/filter_bank_test.cc @@ -0,0 +1,261 @@ +/* Copyright 2019 The TensorFlow Authors. All Rights Reserved. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +==============================================================================*/ + +#include + +#include "signal/micro/kernels/filter_bank_flexbuffers_generated_data.h" +#include "tensorflow/lite/micro/kernels/kernel_runner.h" +#include "tensorflow/lite/micro/test_helpers.h" +#include "tensorflow/lite/micro/testing/micro_test.h" + +namespace tflite { +namespace testing { +namespace { + +TfLiteStatus TestFilterBank(int* input1_dims_data, const uint32_t* input1_data, + int* input2_dims_data, const int16_t* input2_data, + int* input3_dims_data, const int16_t* input3_data, + int* input4_dims_data, const int16_t* input4_data, + int* input5_dims_data, const int16_t* input5_data, + int* input6_dims_data, const int16_t* input6_data, + int* output_dims_data, const uint64_t* golden, + const uint8_t* flexbuffers_data, + const int flexbuffers_data_len, + uint64_t* output_data) { + TfLiteIntArray* input1_dims = IntArrayFromInts(input1_dims_data); + TfLiteIntArray* input2_dims = IntArrayFromInts(input2_dims_data); + TfLiteIntArray* input3_dims = IntArrayFromInts(input3_dims_data); + TfLiteIntArray* input4_dims = IntArrayFromInts(input4_dims_data); + TfLiteIntArray* input5_dims = IntArrayFromInts(input5_dims_data); + TfLiteIntArray* input6_dims = IntArrayFromInts(input6_dims_data); + TfLiteIntArray* output_dims = IntArrayFromInts(output_dims_data); + + constexpr int kInputsSize = 6; + constexpr int kOutputsSize = 1; + constexpr int kTensorsSize = kInputsSize + kOutputsSize; + TfLiteTensor tensors[kTensorsSize] = { + CreateTensor(input1_data, input1_dims), + CreateTensor(input2_data, input2_dims), + CreateTensor(input3_data, input3_dims), + CreateTensor(input4_data, input4_dims), + CreateTensor(input5_data, input5_dims), + CreateTensor(input6_data, input6_dims), + CreateTensor(output_data, output_dims), + }; + + int inputs_array_data[] = {6, 0, 1, 2, 3, 4, 5}; + TfLiteIntArray* inputs_array = IntArrayFromInts(inputs_array_data); + int outputs_array_data[] = {1, 6}; + TfLiteIntArray* outputs_array = IntArrayFromInts(outputs_array_data); + + const int output_len = ElementCount(*output_dims); + + TFLMRegistration* registration = tflite::tflm_signal::Register_FILTER_BANK(); + micro::KernelRunner runner(*registration, tensors, kTensorsSize, inputs_array, + outputs_array, + /*builtin_data=*/nullptr); + + // TfLite uses a char* for the raw bytes whereas flexbuffers use an unsigned + // char*. This small discrepancy results in compiler warnings unless we + // reinterpret_cast right before passing in the flexbuffer bytes to the + // KernelRunner. + TF_LITE_ENSURE_STATUS(runner.InitAndPrepare( + reinterpret_cast(flexbuffers_data), flexbuffers_data_len)); + + TF_LITE_ENSURE_STATUS(runner.Invoke()); + + for (int i = 0; i < output_len; ++i) { + TF_LITE_MICRO_EXPECT_EQ(golden[i], output_data[i]); + } + return kTfLiteOk; +} + +} // namespace +} // namespace testing +} // namespace tflite + +TF_LITE_MICRO_TESTS_BEGIN + +TF_LITE_MICRO_TEST(FilterBankTest32Channel) { + int input1_shape[] = {1, 257}; + int input2_shape[] = {1, 117}; + int input3_shape[] = {1, 117}; + int input4_shape[] = {1, 33}; + int input5_shape[] = {1, 33}; + int input6_shape[] = {1, 33}; + int output_shape[] = {1, 32}; + + uint64_t output[32]; + + const uint32_t input1[] = { + 65451, 11468838, 4280615122, 4283105055, 30080683, 969970, + 1168164, 192770, 344209, 1811809, 1740724, 586130, + 305045, 17981, 169273, 103321, 85277, 529901, + 524660, 116609, 29653, 64345, 13121, 273956, + 593748, 463432, 348169, 77545, 2117, 19277, + 13837, 85, 16322, 1325, 69584, 233930, + 253273, 94180, 8642, 104245, 151937, 231970, + 90405, 95849, 106285, 81938, 76226, 103337, + 303250, 337705, 75140, 43874, 33730, 44761, + 117608, 57322, 9945, 19816, 48674, 19465, + 15696, 52229, 103738, 102541, 126421, 133157, + 33680, 7738, 45029, 57122, 61605, 60138, + 26170, 41444, 210994, 238338, 74324, 21460, + 33125, 3940, 15481, 7709, 24929, 17714, + 170993, 91978, 45965, 214133, 96832, 1800, + 16717, 42341, 87421, 114341, 65161, 26260, + 135077, 245000, 122117, 81188, 107753, 74125, + 86432, 91460, 29648, 2069, 3161, 5002, + 784, 1152, 1424, 277, 452, 2696, + 3610, 2120, 2617, 562, 1153, 4610, + 2906, 65, 786450, 4293722107, 0, 393208, + 2, 196608, 65539, 65537, 4294967295, 65537, + 4294901762, 65535, 4294770689, 65533, 131073, 4294901761, + 131071, 131071, 65535, 4294901764, 4294967295, 0, + 4294901758, 4294901761, 196607, 4294836224, 131070, 4294901762, + 4294901759, 196608, 4294901761, 131071, 131070, 65538, + 0, 4294901761, 65536, 4294836225, 65536, 4294836225, + 4294901757, 65535, 4294901760, 196607, 4294967295, 0, + 131071, 4294901762, 4294836221, 196608, 65536, 1, + 131074, 4294770690, 4294967291, 196611, 4294770687, 262143, + 4294901759, 131071, 1, 4294901759, 196607, 4294705153, + 196607, 4294967294, 65536, 1, 4294901759, 65536, + 0, 65536, 65537, 4294901759, 65536, 3, + 4294836222, 65534, 65536, 65538, 4294836225, 4294901760, + 4294901761, 4294967293, 0, 65534, 131070, 65537, + 4294901762, 65536, 2, 4294836224, 1, 4294901760, + 0, 4294967294, 131073, 4294901760, 65535, 131073, + 4294836224, 65536, 4294901760, 4294901760, 4294967295, 4294901761, + 131071, 4294901760, 131071, 4294836224, 2, 4294901758, + 4294967292, 131073, 0, 65535, 0, 4294901760, + 4294967295, 131073, 4294901764, 4294836223, 4294967295, 65535, + 65537, 65533, 3, 131072, 4294836224, 65537, + 1, 4294967293, 196611, 4294901759, 1}; + + const int16_t input2[] = { + 1133, 2373, 3712, 1047, 2564, 66, 1740, 3486, 1202, 3079, 919, 2913, + 865, 2964, 1015, 3210, 1352, 3633, 1859, 123, 2520, 856, 3323, 1726, + 161, 2722, 1215, 3833, 2382, 956, 3652, 2276, 923, 3689, 2380, 1093, + 3922, 2676, 1448, 239, 3144, 1970, 814, 3770, 2646, 1538, 445, 3463, + 2399, 1349, 313, 3386, 2376, 1379, 394, 3517, 2556, 1607, 668, 3837, + 2920, 2013, 1117, 231, 3450, 2583, 1725, 877, 37, 3302, 2480, 1666, + 861, 63, 3369, 2588, 1813, 1046, 287, 3630, 2885, 2147, 1415, 690, + 4067, 3355, 2650, 1950, 1257, 569, 3984, 3308, 2638, 1973, 1314, 661, + 12, 3465, 2827, 2194, 1566, 943, 325, 3808, 3199, 2595, 1996, 1401, + 810, 224, 3738, 3160, 2586, 2017, 1451, 890, 332}; + + const int16_t input3[] = { + 2962, 1722, 383, 3048, 1531, 4029, 2355, 609, 2893, 1016, 3176, 1182, + 3230, 1131, 3080, 885, 2743, 462, 2236, 3972, 1575, 3239, 772, 2369, + 3934, 1373, 2880, 262, 1713, 3139, 443, 1819, 3172, 406, 1715, 3002, + 173, 1419, 2647, 3856, 951, 2125, 3281, 325, 1449, 2557, 3650, 632, + 1696, 2746, 3782, 709, 1719, 2716, 3701, 578, 1539, 2488, 3427, 258, + 1175, 2082, 2978, 3864, 645, 1512, 2370, 3218, 4058, 793, 1615, 2429, + 3234, 4032, 726, 1507, 2282, 3049, 3808, 465, 1210, 1948, 2680, 3405, + 28, 740, 1445, 2145, 2838, 3526, 111, 787, 1457, 2122, 2781, 3434, + 4083, 630, 1268, 1901, 2529, 3152, 3770, 287, 896, 1500, 2099, 2694, + 3285, 3871, 357, 935, 1509, 2078, 2644, 3205, 3763}; + + const int16_t input4[] = {5, 6, 7, 9, 11, 12, 14, 16, 18, 20, 22, + 25, 27, 30, 32, 35, 38, 41, 45, 48, 52, 56, + 60, 64, 69, 74, 79, 84, 89, 95, 102, 108, 115}; + + const int16_t input5[] = {0, 1, 2, 4, 6, 7, 9, 11, 13, 15, 17, + 20, 22, 25, 27, 30, 33, 36, 40, 43, 47, 51, + 55, 59, 64, 69, 74, 79, 84, 90, 97, 103, 110}; + + const int16_t input6[] = {1, 1, 2, 2, 1, 2, 2, 2, 2, 2, 3, 2, 3, 2, 3, 3, 3, + 4, 3, 4, 4, 4, 4, 5, 5, 5, 5, 5, 6, 7, 6, 7, 7}; + + const uint64_t golden[] = { + 5645104312, 3087527471, 5883346002, 10807122775, 2465336182, 853935004, + 1206905130, 3485828019, 1134726750, 832725041, 4442875878, 2122064365, + 178483220, 151483681, 1742660113, 1309124116, 1954305288, 1323857378, + 2750861165, 1340947482, 792522630, 669257768, 1659699572, 940652856, + 1957080469, 1034203505, 1541805928, 1710818326, 2432875876, 2254716277, + 275382345, 57293224}; + + TF_LITE_MICRO_EXPECT_EQ( + kTfLiteOk, + tflite::testing::TestFilterBank( + input1_shape, input1, input2_shape, input2, input3_shape, input3, + input4_shape, input4, input5_shape, input5, input6_shape, input6, + output_shape, golden, g_gen_data_filter_bank_32_channel, + g_gen_data_size_filter_bank_32_channel, output)); +} + +TF_LITE_MICRO_TEST(FilterBankTest16Channel) { + int input1_shape[] = {1, 129}; + int input2_shape[] = {1, 59}; + int input3_shape[] = {1, 59}; + int input4_shape[] = {1, 17}; + int input5_shape[] = {1, 17}; + int input6_shape[] = {1, 17}; + int output_shape[] = {1, 16}; + + uint64_t output[16]; + + const uint32_t input1[] = { + 645050, 4644, 3653, 24262, 56660, 43260, 50584, 57902, 31702, 5401, + 45555, 34852, 8518, 43556, 13358, 19350, 40221, 18017, 27284, 64491, + 60099, 17863, 11001, 29076, 32666, 65268, 50947, 28694, 32377, 30014, + 25607, 22547, 45086, 10654, 46797, 8622, 47348, 43085, 5747, 51544, + 50364, 6208, 20696, 59782, 14429, 60125, 37079, 32673, 63457, 60142, + 34042, 11280, 1874, 33734, 62118, 13766, 54398, 47818, 50976, 46930, + 25906, 59441, 25958, 59136, 1756, 18652, 29213, 13379, 51845, 1207, + 55626, 27108, 43771, 35236, 3374, 40959, 47707, 41540, 34282, 27094, + 36329, 13593, 65257, 47006, 46857, 1114, 37106, 18738, 25969, 15461, + 2842, 36470, 32489, 61622, 23613, 29624, 32820, 30438, 9543, 6767, + 23037, 52896, 12059, 32264, 11575, 42400, 43344, 27511, 16712, 6877, + 4910, 50047, 61569, 57237, 48558, 2310, 22192, 7874, 46141, 64056, + 61997, 7298, 31372, 25316, 683, 58940, 18755, 17898, 19196}; + + const int16_t input2[] = { + -2210, 1711, 3237, 1247, 2507, 61, 1019, 899, 206, 146, 2849, 2756, + 1260, 1280, 1951, 213, 617, 2047, 211, 347, 2821, 3747, 150, 1924, + 3962, 942, 1430, 2678, 993, 308, 3364, 2491, 954, 1308, 879, 3950, + 1, 3556, 3628, 2104, 78, 1298, 1080, 342, 1337, 1639, 2352, 829, + 1358, 2498, 1647, 2507, 3816, 3767, 3735, 1155, 2221, 2196, 1160}; + + const int16_t input3[] = { + 408, 3574, 1880, 2561, 2011, 3394, 1019, 445, 3901, 343, 1874, 3846, + 3566, 1830, 327, 111, 623, 1037, 2803, 1947, 1518, 661, 3239, 2351, + 1257, 269, 1574, 3431, 3972, 2487, 2181, 1458, 552, 717, 679, 1031, + 1738, 1782, 128, 2242, 353, 1460, 3305, 1424, 3813, 2895, 164, 272, + 3886, 3135, 141, 747, 3233, 1478, 2612, 3837, 3271, 73, 1746}; + + const int16_t input4[] = {5, 6, 7, 9, 11, 12, 14, 16, 18, + 20, 22, 25, 27, 30, 32, 35, 33}; + + const int16_t input5[] = {0, 1, 2, 4, 6, 7, 9, 11, 13, + 15, 17, 20, 22, 25, 27, 30, 33}; + + const int16_t input6[] = {1, 1, 2, 2, 1, 2, 2, 2, 2, 2, 3, 2, 3, 2, 3, 3, 3}; + + const uint64_t golden[] = {104199304, 407748384, 206363744, 200989269, + 52144406, 230780884, 174394190, 379684049, + 94840835, 57788823, 531528204, 318265707, + 263149795, 188110467, 501443259, 200747781}; + + TF_LITE_MICRO_EXPECT_EQ( + kTfLiteOk, + tflite::testing::TestFilterBank( + input1_shape, input1, input2_shape, input2, input3_shape, input3, + input4_shape, input4, input5_shape, input5, input6_shape, input6, + output_shape, golden, g_gen_data_filter_bank_16_channel, + g_gen_data_size_filter_bank_16_channel, output)); +} + +TF_LITE_MICRO_TESTS_END diff --git a/signal/micro/kernels/framer.cc b/signal/micro/kernels/framer.cc new file mode 100644 index 00000000000..36f189ce538 --- /dev/null +++ b/signal/micro/kernels/framer.cc @@ -0,0 +1,199 @@ +/* Copyright 2019 The TensorFlow Authors. All Rights Reserved. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +==============================================================================*/ + +#include + +#include "signal/src/circular_buffer.h" +#include "tensorflow/lite/kernels/internal/tensor_ctypes.h" +#include "tensorflow/lite/kernels/kernel_util.h" +#include "tensorflow/lite/micro/flatbuffer_utils.h" +#include "tensorflow/lite/micro/kernels/kernel_util.h" +#include "tensorflow/lite/micro/memory_helpers.h" +#include "tensorflow/lite/micro/micro_utils.h" + +namespace tflite { +namespace { + +constexpr int kInputTensor = 0; +constexpr int kOutputTensor = 0; +constexpr int kOutputValidTensor = 1; + +// Indices into the init flexbuffer's vector. +// The parameter's name is in the comment that follows. +// Elements in the vectors are ordered alphabetically by parameter name. +constexpr int kFrameSizeIndex = 0; // 'frame_size' +constexpr int kFrameStepIndex = 1; // 'frame_step' +constexpr int kPrefillIndex = 2; // 'prefill' + +struct TFLMSignalFramerParams { + int32_t frame_size; + int32_t frame_step; + int32_t outer_dims; + int32_t n_frames; + bool prefill; + + int8_t** state_buffers; + tflite::tflm_signal::CircularBuffer** circular_buffers; +}; + +void FramerResetState(TFLMSignalFramerParams* params) { + for (int i = 0; i < params->outer_dims; ++i) { + tflite::tflm_signal::CircularBufferReset(params->circular_buffers[i]); + if (params->prefill) { + tflite::tflm_signal::CircularBufferWriteZeros( + params->circular_buffers[i], params->frame_size - params->frame_step); + } + } +} + +void* FramerInit(TfLiteContext* context, const char* buffer, size_t length) { + const uint8_t* buffer_t = reinterpret_cast(buffer); + + auto* params = + static_cast(context->AllocatePersistentBuffer( + context, sizeof(TFLMSignalFramerParams))); + + if (params == nullptr) { + return nullptr; + } + + tflite::FlexbufferWrapper fbw(buffer_t, length); + params->frame_size = fbw.ElementAsInt32(kFrameSizeIndex); + params->frame_step = fbw.ElementAsInt32(kFrameStepIndex); + params->prefill = fbw.ElementAsBool(kPrefillIndex); + return params; +} + +TfLiteStatus FramerPrepare(TfLiteContext* context, TfLiteNode* node) { + TF_LITE_ENSURE_EQ(context, NumInputs(node), 1); + TF_LITE_ENSURE_EQ(context, NumOutputs(node), 2); + + MicroContext* micro_context = GetMicroContext(context); + + TfLiteTensor* input = + micro_context->AllocateTempInputTensor(node, kInputTensor); + TF_LITE_ENSURE(context, input != nullptr); + TfLiteTensor* output = + micro_context->AllocateTempOutputTensor(node, kOutputTensor); + TF_LITE_ENSURE(context, output != nullptr); + TfLiteTensor* output_valid = + micro_context->AllocateTempOutputTensor(node, kOutputValidTensor); + TF_LITE_ENSURE(context, output_valid != nullptr); + + TF_LITE_ENSURE_EQ(context, NumDimensions(input) + 1, NumDimensions(output)); + TF_LITE_ENSURE_EQ(context, NumDimensions(output_valid), 0); + + TF_LITE_ENSURE_TYPES_EQ(context, input->type, kTfLiteInt16); + TF_LITE_ENSURE_TYPES_EQ(context, output->type, kTfLiteInt16); + TF_LITE_ENSURE_TYPES_EQ(context, output_valid->type, kTfLiteBool); + + auto* params = reinterpret_cast(node->user_data); + + RuntimeShape input_shape = GetTensorShape(input); + int innermost_dim = input_shape.Dims(input_shape.DimensionsCount() - 1); + TF_LITE_ENSURE(context, innermost_dim >= params->frame_step); + TF_LITE_ENSURE_EQ(context, innermost_dim % params->frame_step, 0); + params->outer_dims = input_shape.FlatSize() / innermost_dim; + params->n_frames = innermost_dim / params->frame_step; + + params->state_buffers = + static_cast(context->AllocatePersistentBuffer( + context, params->outer_dims * sizeof(int8_t*))); + params->circular_buffers = static_cast( + context->AllocatePersistentBuffer( + context, + params->outer_dims * sizeof(tflite::tflm_signal::CircularBuffer*))); + for (int i = 0; i < params->outer_dims; i++) { + // Calculate the capacity of the circular buffer. Round up the frame size to + // a multiple of frame step. Saves memory relative to the simpler frame_size + // + frame_step. For example: step_size = 160, frame_size = 400 capacity = + // 480 vs. step_size + frame_size = 560 + size_t capacity = (params->frame_size + params->frame_step - 1) / + params->frame_step * params->frame_step; + + size_t state_size = + tflite::tflm_signal::CircularBufferGetNeededMemory(capacity); + params->state_buffers[i] = + static_cast(context->AllocatePersistentBuffer( + context, state_size * sizeof(int8_t))); + params->circular_buffers[i] = tflite::tflm_signal::CircularBufferInit( + capacity, params->state_buffers[i], state_size); + } + + FramerResetState(params); + + micro_context->DeallocateTempTfLiteTensor(input); + micro_context->DeallocateTempTfLiteTensor(output); + micro_context->DeallocateTempTfLiteTensor(output_valid); + + return kTfLiteOk; +} + +TfLiteStatus FramerEval(TfLiteContext* context, TfLiteNode* node) { + auto* params = reinterpret_cast(node->user_data); + + const TfLiteEvalTensor* input = + tflite::micro::GetEvalInput(context, node, kInputTensor); + TfLiteEvalTensor* output = + tflite::micro::GetEvalOutput(context, node, kOutputTensor); + TfLiteEvalTensor* output_valid = + tflite::micro::GetEvalOutput(context, node, kOutputValidTensor); + + const int16_t* input_data = tflite::micro::GetTensorData(input); + int16_t* output_data = tflite::micro::GetTensorData(output); + bool* output_valid_data = tflite::micro::GetTensorData(output_valid); + *output_valid_data = true; + + for (int i = 0; i < params->outer_dims; i++) { + for (int frame = 0; frame < params->n_frames; frame++) { + int input_idx = (i * params->n_frames + frame) * params->frame_step; + int output_idx = (i * params->n_frames + frame) * params->frame_size; + tflite::tflm_signal::CircularBufferWrite(params->circular_buffers[i], + &input_data[input_idx], + params->frame_step); + + if (tflite::tflm_signal::CircularBufferAvailable( + params->circular_buffers[i]) >= + static_cast(params->frame_size)) { + tflite::tflm_signal::CircularBufferGet(params->circular_buffers[i], + params->frame_size, + &output_data[output_idx]); + tflite::tflm_signal::CircularBufferDiscard(params->circular_buffers[i], + params->frame_step); + } else { + *output_valid_data = false; + } + } + } + + return kTfLiteOk; +} + +void FramerReset(TfLiteContext* context, void* buffer) { + FramerResetState(static_cast(buffer)); +} + +} // namespace + +namespace tflm_signal { +// TODO(b/286250473): remove namespace once de-duped libraries above +TFLMRegistration* Register_FRAMER() { + static TFLMRegistration r = tflite::micro::RegisterOp( + FramerInit, FramerPrepare, FramerEval, nullptr, FramerReset); + return &r; +} +} // namespace tflm_signal + +} // namespace tflite diff --git a/signal/micro/kernels/framer_flexbuffers_generated_data.cc b/signal/micro/kernels/framer_flexbuffers_generated_data.cc new file mode 100644 index 00000000000..53d08109fc0 --- /dev/null +++ b/signal/micro/kernels/framer_flexbuffers_generated_data.cc @@ -0,0 +1,34 @@ +/* Copyright 2021 The TensorFlow Authors. All Rights Reserved. +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +==============================================================================*/ + +// This file is generated. See: +// tensorflow/lite/micro/kernels/test_data_generation/README.md + +#include "signal/micro/kernels/framer_flexbuffers_generated_data.h" + +const int g_gen_data_size_3_1_0_framer = 46; +const unsigned char g_gen_data_3_1_0_framer[] = { + 0x66, 0x72, 0x61, 0x6d, 0x65, 0x5f, 0x73, 0x69, 0x7a, 0x65, 0x00, 0x66, + 0x72, 0x61, 0x6d, 0x65, 0x5f, 0x73, 0x74, 0x65, 0x70, 0x00, 0x70, 0x72, + 0x65, 0x66, 0x69, 0x6c, 0x6c, 0x00, 0x03, 0x1f, 0x15, 0x0b, 0x03, 0x01, + 0x03, 0x03, 0x01, 0x00, 0x04, 0x04, 0x68, 0x06, 0x24, 0x01, +}; + +const int g_gen_data_size_5_2_1_framer = 46; +const unsigned char g_gen_data_5_2_1_framer[] = { + 0x66, 0x72, 0x61, 0x6d, 0x65, 0x5f, 0x73, 0x69, 0x7a, 0x65, 0x00, 0x66, + 0x72, 0x61, 0x6d, 0x65, 0x5f, 0x73, 0x74, 0x65, 0x70, 0x00, 0x70, 0x72, + 0x65, 0x66, 0x69, 0x6c, 0x6c, 0x00, 0x03, 0x1f, 0x15, 0x0b, 0x03, 0x01, + 0x03, 0x05, 0x02, 0x01, 0x04, 0x04, 0x68, 0x06, 0x24, 0x01, +}; diff --git a/tensorflow/lite/micro/examples/micro_speech/simple_features/no_simple_features_data.h b/signal/micro/kernels/framer_flexbuffers_generated_data.h similarity index 53% rename from tensorflow/lite/micro/examples/micro_speech/simple_features/no_simple_features_data.h rename to signal/micro/kernels/framer_flexbuffers_generated_data.h index ff461348d52..655bfa6a3e9 100644 --- a/tensorflow/lite/micro/examples/micro_speech/simple_features/no_simple_features_data.h +++ b/signal/micro/kernels/framer_flexbuffers_generated_data.h @@ -1,4 +1,4 @@ -/* Copyright 2018 The TensorFlow Authors. All Rights Reserved. +/* Copyright 2021 The TensorFlow Authors. All Rights Reserved. Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. @@ -13,11 +13,13 @@ See the License for the specific language governing permissions and limitations under the License. ==============================================================================*/ -#ifndef TENSORFLOW_LITE_MICRO_EXAMPLES_MICRO_SPEECH_SIMPLE_FEATURES_NO_SIMPLE_FEATURES_DATA_H_ -#define TENSORFLOW_LITE_MICRO_EXAMPLES_MICRO_SPEECH_SIMPLE_FEATURES_NO_SIMPLE_FEATURES_DATA_H_ +#ifndef SIGNAL_MICRO_KERNELS_TEST_DATA_GENERATION_GENERATE_FRAMER_FLEXBUFFERS_DATA_H_ +#define SIGNAL_MICRO_KERNELS_TEST_DATA_GENERATION_GENERATE_FRAMER_FLEXBUFFERS_DATA_H_ -extern const int g_no_simple_f9643d42_nohash_4_width; -extern const int g_no_simple_f9643d42_nohash_4_height; -extern const unsigned char g_no_simple_f9643d42_nohash_4_data[]; +extern const int g_gen_data_size_3_1_0_framer; +extern const unsigned char g_gen_data_3_1_0_framer[]; -#endif // TENSORFLOW_LITE_MICRO_EXAMPLES_MICRO_SPEECH_SIMPLE_FEATURES_NO_SIMPLE_FEATURES_DATA_H_ +extern const int g_gen_data_size_5_2_1_framer; +extern const unsigned char g_gen_data_5_2_1_framer[]; + +#endif // SIGNAL_MICRO_KERNELS_TEST_DATA_GENERATION_GENERATE_FRAMER_FLEXBUFFERS_DATA_H_ diff --git a/signal/micro/kernels/framer_test.cc b/signal/micro/kernels/framer_test.cc new file mode 100644 index 00000000000..f6cc3a036b2 --- /dev/null +++ b/signal/micro/kernels/framer_test.cc @@ -0,0 +1,250 @@ +/* Copyright 2021 The TensorFlow Authors. All Rights Reserved. +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + http://www.apache.org/licenses/LICENSE-2.0 +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +==============================================================================*/ +#include +#include + +#include "signal/micro/kernels/framer_flexbuffers_generated_data.h" +#include "tensorflow/lite/micro/kernels/kernel_runner.h" +#include "tensorflow/lite/micro/test_helpers.h" +#include "tensorflow/lite/micro/testing/micro_test.h" + +namespace tflite { +namespace { + +constexpr int kFrameSizeIndex = 0; // 'frame_size' +constexpr int kFrameStepIndex = 1; // 'frame_step' +constexpr int kPrefillIndex = 2; // 'prefill' +constexpr int kInputsSize = 1; +constexpr int kOutputsSize = 2; +constexpr int kTensorsSize = kInputsSize + kOutputsSize; + +class FramerKernelRunner { + public: + FramerKernelRunner(int* input_dims_data, int16_t* input_data, + int* output_dims_data, int16_t* output_data, + int* output_ready_dims_data, bool* output_ready) + : inputs_array_{testing::IntArrayFromInts(inputs_array_data_)}, + outputs_array_{testing::IntArrayFromInts(outputs_array_data_)} { + tensors_[0] = testing::CreateTensor( + input_data, testing::IntArrayFromInts(input_dims_data)); + + tensors_[1] = testing::CreateTensor( + output_data, testing::IntArrayFromInts(output_dims_data)); + + tensors_[2] = testing::CreateTensor( + output_ready, testing::IntArrayFromInts(output_ready_dims_data)); + + // go/tflm-static-cleanups for reasoning new is being used like this + kernel_runner_ = new (kernel_runner_buffer) micro::KernelRunner( + *registration_, tensors_, kTensorsSize, inputs_array_, outputs_array_, + /*builtin_data=*/nullptr); + } + + micro::KernelRunner& kernel_runner() { return *kernel_runner_; } + + private: + uint8_t kernel_runner_buffer[sizeof(micro::KernelRunner)]; + int inputs_array_data_[kInputsSize + 1] = {kInputsSize, 0}; + int outputs_array_data_[kOutputsSize + 1] = {kOutputsSize, 1, 2}; + TfLiteTensor tensors_[kTensorsSize] = {}; + TfLiteIntArray* inputs_array_ = nullptr; + TfLiteIntArray* outputs_array_ = nullptr; + TFLMRegistration* registration_ = tflm_signal::Register_FRAMER(); + micro::KernelRunner* kernel_runner_ = nullptr; +}; + +alignas(alignof(FramerKernelRunner)) uint8_t + framer_kernel_runner_buffer[sizeof(FramerKernelRunner)]; + +void TestFramerInvoke(int* input_dims_data, int16_t* input_data, + int* output_dims_data, const int16_t* golden, + int golden_len, int* output_ready_dims_data, + const unsigned char* flexbuffers_data, + const unsigned int flexbuffers_data_size, + int16_t* output_data, bool* output_ready, + micro::KernelRunner* runner) { + FlexbufferWrapper fbw(flexbuffers_data, flexbuffers_data_size); + int frame_size = fbw.ElementAsInt32(kFrameSizeIndex); + int frame_step = fbw.ElementAsInt32(kFrameStepIndex); + bool prefill = fbw.ElementAsBool(kPrefillIndex); + int latency_samples = frame_size - frame_step; + int input_size = input_dims_data[input_dims_data[0]]; + int outer_dims = 1; + for (int i = 1; i < input_dims_data[0]; i++) { + outer_dims *= input_dims_data[i]; + } + int n_frames = output_dims_data[output_dims_data[0] - 1]; + TF_LITE_MICRO_EXPECT_EQ(frame_size, output_dims_data[output_dims_data[0]]); + for (int i = 0; i < golden_len - latency_samples; i += input_size) { + for (int outer_dim = 0; outer_dim < outer_dims; outer_dim++) { + memcpy(&input_data[outer_dim * input_size], &golden[latency_samples + i], + input_size * sizeof(int16_t)); + } + TF_LITE_MICRO_EXPECT_EQ(runner->Invoke(), kTfLiteOk); + TF_LITE_MICRO_EXPECT_EQ(*output_ready, (i >= latency_samples) || prefill); + if (*output_ready == true) { + for (int outer_dim = 0; outer_dim < outer_dims; outer_dim++) { + for (int frame = 0; frame < n_frames; frame++) { + int output_idx = + outer_dim * frame_size * n_frames + frame * frame_size; + int golden_idx = i + frame * frame_step; + TF_LITE_MICRO_EXPECT_EQ( + 0, memcmp(&golden[golden_idx], &output_data[output_idx], + frame_size * sizeof(int16_t))); + } + } + } + } +} + +void TestFramer(int* input_dims_data, int16_t* input_data, + int* output_dims_data, const int16_t* golden, int golden_len, + int* output_ready_dims_data, + const unsigned char* flexbuffers_data, + const unsigned int flexbuffers_data_size, + int16_t* output_data) { + bool output_ready = false; + FramerKernelRunner* framer_runner = new (framer_kernel_runner_buffer) + FramerKernelRunner(input_dims_data, input_data, output_dims_data, + output_data, output_ready_dims_data, &output_ready); + // TfLite uses a char* for the raw bytes whereas flexbuffers use an unsigned + // char*. This small discrepancy results in compiler warnings unless we + // reinterpret_cast right before passing in the flexbuffer bytes to the + // KernelRunner. + TF_LITE_MICRO_EXPECT_EQ(framer_runner->kernel_runner().InitAndPrepare( + reinterpret_cast(flexbuffers_data), + flexbuffers_data_size), + kTfLiteOk); + TestFramerInvoke(input_dims_data, input_data, output_dims_data, golden, + golden_len, output_ready_dims_data, flexbuffers_data, + flexbuffers_data_size, output_data, &output_ready, + &framer_runner->kernel_runner()); +} + +void TestFramerReset(int* input_dims_data, int16_t* input_data, + int* output_dims_data, const int16_t* golden, + int golden_len, int* output_ready_dims_data, + const unsigned char* flexbuffers_data, + const unsigned int flexbuffers_data_size, + int16_t* output_data) { + bool output_ready = false; + FramerKernelRunner* framer_runner = new (framer_kernel_runner_buffer) + FramerKernelRunner(input_dims_data, input_data, output_dims_data, + output_data, output_ready_dims_data, &output_ready); + // TfLite uses a char* for the raw bytes whereas flexbuffers use an unsigned + // char*. This small discrepancy results in compiler warnings unless we + // reinterpret_cast right before passing in the flexbuffer bytes to the + // KernelRunner. + TF_LITE_MICRO_EXPECT_EQ(framer_runner->kernel_runner().InitAndPrepare( + reinterpret_cast(flexbuffers_data), + flexbuffers_data_size), + kTfLiteOk); + TestFramerInvoke(input_dims_data, input_data, output_dims_data, golden, + golden_len, output_ready_dims_data, flexbuffers_data, + flexbuffers_data_size, output_data, &output_ready, + &framer_runner->kernel_runner()); + framer_runner->kernel_runner().Reset(); + TestFramerInvoke(input_dims_data, input_data, output_dims_data, golden, + golden_len, output_ready_dims_data, flexbuffers_data, + flexbuffers_data_size, output_data, &output_ready, + &framer_runner->kernel_runner()); +} + +} // namespace +} // namespace tflite + +TF_LITE_MICRO_TESTS_BEGIN + +TF_LITE_MICRO_TEST(FramerTest_3_1_0) { + const int kInputSize = 1; + const int kOutputSize = 3; + int input_dims_data[] = {1, kInputSize}; + int output_dims_data[] = {2, 1, kOutputSize}; + int output_ready_dims_data[] = {0}; + const int16_t golden[] = {0x0, 0x0, 0x1234, 0x5678, 0x4321, 0x7777}; + int16_t input_data; + int16_t output_data[kOutputSize]; + + tflite::TestFramer(input_dims_data, &input_data, output_dims_data, golden, + sizeof(golden) / sizeof(int16_t), output_ready_dims_data, + g_gen_data_3_1_0_framer, g_gen_data_size_3_1_0_framer, + output_data); +} + +TF_LITE_MICRO_TEST(FramerTest_5_2_1) { + const int kInputSize = 2; + const int kOutputSize = 5; + int input_dims_data[] = {1, kInputSize}; + int output_dims_data[] = {2, 1, kOutputSize}; + int output_ready_dims_data[] = {0}; + const int16_t golden[] = {0x0, 0x0, 0x0, 0x1010, 0x0202, 0x7070, 0x0606}; + + int16_t input_data[kInputSize]; + int16_t output_data[kOutputSize]; + + tflite::TestFramer(input_dims_data, input_data, output_dims_data, golden, + sizeof(golden) / sizeof(int16_t), output_ready_dims_data, + g_gen_data_5_2_1_framer, g_gen_data_size_5_2_1_framer, + output_data); +} + +TF_LITE_MICRO_TEST(FramerTest_5_2_1_NFrames2) { + const int kInputSize = 4; + const int kOutputSize = 5; + const int kNFrames = 2; + int input_dims_data[] = {1, kInputSize}; + int output_dims_data[] = {2, kNFrames, kOutputSize}; + int output_ready_dims_data[] = {0}; + const int16_t golden[] = {0x0, 0x0, 0x0, 0x1010, 0x0202, 0x7070, 0x0606}; + + int16_t input_data[kInputSize]; + int16_t output_data[kNFrames * kOutputSize]; + + tflite::TestFramer(input_dims_data, input_data, output_dims_data, golden, + sizeof(golden) / sizeof(int16_t), output_ready_dims_data, + g_gen_data_5_2_1_framer, g_gen_data_size_5_2_1_framer, + output_data); +} + +TF_LITE_MICRO_TEST(FramerTest_5_2_1_NFrames2OuterDims4) { + const int kInputSize = 4; + const int kOutputSize = 5; + int input_dims_data[] = {3, 2, 2, kInputSize}; + int output_dims_data[] = {4, 2, 2, 2, kOutputSize}; + int output_ready_dims_data[] = {0}; + const int16_t golden[] = {0x0, 0x0, 0x0, 0x1010, 0x0202, 0x7070, 0x0606}; + + int16_t input_data[2 * 2 * kInputSize]; + int16_t output_data[2 * 2 * 2 * kOutputSize]; + + tflite::TestFramer(input_dims_data, input_data, output_dims_data, golden, + sizeof(golden) / sizeof(int16_t), output_ready_dims_data, + g_gen_data_5_2_1_framer, g_gen_data_size_5_2_1_framer, + output_data); +} + +TF_LITE_MICRO_TEST(TestReset) { + const int kInputSize = 1; + const int kOutputSize = 3; + int input_dims_data[] = {1, kInputSize}; + int output_dims_data[] = {2, 1, kOutputSize}; + int output_ready_dims_data[] = {0}; + const int16_t golden[] = {0x0, 0x0, 0x1234, 0x5678, 0x4321, 0x7777}; + int16_t input_data; + int16_t output_data[kOutputSize]; + tflite::TestFramerReset(input_dims_data, &input_data, output_dims_data, + golden, sizeof(golden) / sizeof(int16_t), + output_ready_dims_data, g_gen_data_3_1_0_framer, + g_gen_data_size_3_1_0_framer, output_data); +} + +TF_LITE_MICRO_TESTS_END diff --git a/signal/micro/kernels/irfft.cc b/signal/micro/kernels/irfft.cc new file mode 100644 index 00000000000..b0d58d59374 --- /dev/null +++ b/signal/micro/kernels/irfft.cc @@ -0,0 +1,230 @@ +/* Copyright 2023 The TensorFlow Authors. All Rights Reserved. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +==============================================================================*/ + +#include "signal/src/irfft.h" + +#include +#include +#include + +#include "tensorflow/lite/kernels/internal/tensor_ctypes.h" +#include "tensorflow/lite/kernels/kernel_util.h" +#include "tensorflow/lite/micro/flatbuffer_utils.h" +#include "tensorflow/lite/micro/kernels/kernel_util.h" +#include "tensorflow/lite/portable_type_to_tflitetype.h" + +namespace tflite { +namespace { + +constexpr int kInputTensor = 0; +constexpr int kOutputTensor = 0; + +// Indices into the init flexbuffer's vector. +// The parameter's name is in the comment that follows. +// Elements in the vectors are ordered alphabetically by parameter name. +// 'T' is added implicitly by the TensorFlow framework when the type is resolved +// during graph construction. +// constexpr int kTypeIndex = 0; // 'T' (unused) +constexpr int kFftLengthIndex = 1; // 'fft_length' + +struct TfLiteAudioFrontendIrfftParams { + int32_t fft_length; + int32_t input_size; + int32_t input_length; + int32_t output_length; + TfLiteType fft_type; + int8_t* state; +}; + +template +void* IrfftInit(TfLiteContext* context, const char* buffer, size_t length) { + TFLITE_DCHECK(context->AllocatePersistentBuffer != nullptr); + + auto* params = static_cast( + context->AllocatePersistentBuffer( + context, sizeof(TfLiteAudioFrontendIrfftParams))); + + if (params == nullptr) { + return nullptr; + } + + tflite::FlexbufferWrapper fbw(reinterpret_cast(buffer), + length); + params->fft_length = fbw.ElementAsInt32(kFftLengthIndex); + params->fft_type = typeToTfLiteType(); + + size_t state_size = (*get_needed_memory_func)(params->fft_length); + params->state = reinterpret_cast( + context->AllocatePersistentBuffer(context, state_size * sizeof(int8_t))); + + if (params->state == nullptr) { + return nullptr; + } + + (*init_func)(params->fft_length, params->state, state_size); + return params; +} + +template +TfLiteStatus IrfftPrepare(TfLiteContext* context, TfLiteNode* node) { + TF_LITE_ENSURE_EQ(context, NumInputs(node), 1); + TF_LITE_ENSURE_EQ(context, NumOutputs(node), 1); + + MicroContext* micro_context = GetMicroContext(context); + + TfLiteTensor* input = + micro_context->AllocateTempInputTensor(node, kInputTensor); + TF_LITE_ENSURE(context, input != nullptr); + TfLiteTensor* output = + micro_context->AllocateTempOutputTensor(node, kOutputTensor); + TF_LITE_ENSURE(context, output != nullptr); + + TF_LITE_ENSURE_EQ(context, NumDimensions(input), NumDimensions(output)); + + TF_LITE_ENSURE_TYPES_EQ(context, input->type, TfLiteTypeEnum); + TF_LITE_ENSURE_TYPES_EQ(context, output->type, TfLiteTypeEnum); + + auto* params = + reinterpret_cast(node->user_data); + RuntimeShape input_shape = GetTensorShape(input); + RuntimeShape output_shape = GetTensorShape(output); + // Divide by 2 because input is complex. + params->input_length = + input_shape.Dims(input_shape.DimensionsCount() - 1) / 2; + params->input_size = input_shape.FlatSize() / 2; + params->output_length = output_shape.Dims(output_shape.DimensionsCount() - 1); + + micro_context->DeallocateTempTfLiteTensor(input); + micro_context->DeallocateTempTfLiteTensor(output); + return kTfLiteOk; +} + +template * input, T*)> +TfLiteStatus IrfftEval(TfLiteContext* context, TfLiteNode* node) { + auto* params = + reinterpret_cast(node->user_data); + + const TfLiteEvalTensor* input = + tflite::micro::GetEvalInput(context, node, kInputTensor); + TfLiteEvalTensor* output = + tflite::micro::GetEvalOutput(context, node, kOutputTensor); + + const Complex* input_data = + tflite::micro::GetTensorData>(input); + T* output_data = tflite::micro::GetTensorData(output); + for (int input_idx = 0, output_idx = 0; input_idx < params->input_size; + input_idx += params->input_length, output_idx += params->output_length) { + (*apply_func)(params->state, &input_data[input_idx], + &output_data[output_idx]); + } + return kTfLiteOk; +} + +void* IrfftInitAll(TfLiteContext* context, const char* buffer, size_t length) { + const uint8_t* buffer_t = reinterpret_cast(buffer); + const flexbuffers::Map& m = flexbuffers::GetRoot(buffer_t, length).AsMap(); + auto tensor_type = static_cast(m["T"].AsInt32()); + + switch (tensor_type) { + case TensorType_INT16: { + return IrfftInit(context, buffer, length); + } + case TensorType_INT32: { + return IrfftInit(context, buffer, length); + } + case TensorType_FLOAT32: { + return IrfftInit(context, buffer, length); + } + default: + return nullptr; + } +} + +TfLiteStatus IrfftPrepareAll(TfLiteContext* context, TfLiteNode* node) { + auto* params = + reinterpret_cast(node->user_data); + + switch (params->fft_type) { + case kTfLiteInt16: { + return IrfftPrepare(context, node); + } + case kTfLiteInt32: { + return IrfftPrepare(context, node); + } + case kTfLiteFloat32: { + return IrfftPrepare(context, node); + } + default: + return kTfLiteError; + } +} + +TfLiteStatus IrfftEvalAll(TfLiteContext* context, TfLiteNode* node) { + auto* params = + reinterpret_cast(node->user_data); + + switch (params->fft_type) { + case kTfLiteInt16: { + return IrfftEval(context, node); + } + case kTfLiteInt32: { + return IrfftEval(context, node); + } + case kTfLiteFloat32: { + return IrfftEval(context, node); + } + default: + return kTfLiteError; + } +} + +} // namespace + +// TODO(b/286250473): remove namespace once de-duped libraries +namespace tflm_signal { + +TFLMRegistration* Register_IRFFT() { + static TFLMRegistration r = + tflite::micro::RegisterOp(IrfftInitAll, IrfftPrepareAll, IrfftEvalAll); + return &r; +} + +TFLMRegistration* Register_IRFFT_FLOAT() { + static TFLMRegistration r = tflite::micro::RegisterOp( + IrfftInit, + IrfftPrepare, IrfftEval); + return &r; +} + +TFLMRegistration* Register_IRFFT_INT16() { + static TFLMRegistration r = tflite::micro::RegisterOp( + IrfftInit, + IrfftPrepare, IrfftEval); + return &r; +} + +TFLMRegistration* Register_IRFFT_INT32() { + static TFLMRegistration r = tflite::micro::RegisterOp( + IrfftInit, + IrfftPrepare, IrfftEval); + return &r; +} + +} // namespace tflm_signal +} // namespace tflite \ No newline at end of file diff --git a/signal/micro/kernels/irfft.h b/signal/micro/kernels/irfft.h new file mode 100644 index 00000000000..380bc3e74a9 --- /dev/null +++ b/signal/micro/kernels/irfft.h @@ -0,0 +1,31 @@ +/* Copyright 2023 The TensorFlow Authors. All Rights Reserved. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +==============================================================================*/ +#ifndef SIGNAL_MICRO_KERNELS_IRFFT_H_ +#define SIGNAL_MICRO_KERNELS_IRFFT_H_ + +#include "tensorflow/lite/micro/micro_common.h" + +namespace tflite { +namespace tflm_signal { + +TFLMRegistration* Register_IRFFT(); +TFLMRegistration* Register_IRFFT_FLOAT(); +TFLMRegistration* Register_IRFFT_INT16(); +TFLMRegistration* Register_IRFFT_INT32(); + +} // namespace tflm_signal +} // namespace tflite + +#endif // SIGNAL_MICRO_KERNELS_IRFFT_H_ diff --git a/signal/micro/kernels/overlap_add.cc b/signal/micro/kernels/overlap_add.cc new file mode 100644 index 00000000000..c365cd8ca25 --- /dev/null +++ b/signal/micro/kernels/overlap_add.cc @@ -0,0 +1,244 @@ +/* Copyright 2021 The TensorFlow Authors. All Rights Reserved. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +==============================================================================*/ + +#include "signal/src/overlap_add.h" + +#include + +#include "tensorflow/lite/kernels/internal/tensor_ctypes.h" +#include "tensorflow/lite/kernels/kernel_util.h" +#include "tensorflow/lite/micro/flatbuffer_utils.h" +#include "tensorflow/lite/micro/kernels/kernel_util.h" +#include "tensorflow/lite/portable_type_to_tflitetype.h" + +namespace tflite { +namespace { + +constexpr int kInputTensor = 0; +constexpr int kOutputTensor = 0; + +// Indices into the init flexbuffer's vector. +// The parameter's name is in the comment that follows. +// Elements in the vectors are ordered alphabetically by parameter name. +// 'T' is added implicitly by the TensorFlow framework when the type is resolved +// during graph construction. +// constexpr int kTypeIndex = 0; // 'T' (unused) +constexpr int kFrameStepIndex = 1; // 'frame_step' + +template +struct TFLMSignalOverlapAddParams { + int32_t frame_size; + int32_t frame_step; + int32_t outer_dims; + int32_t n_frames; + TfLiteType type; + T** state_buffers; +}; + +template +void OverlapAddResetState(TFLMSignalOverlapAddParams* params) { + for (int i = 0; i < params->outer_dims; i++) { + memset(params->state_buffers[i], 0, sizeof(T) * params->frame_size); + } +} + +template +void* OverlapAddInit(TfLiteContext* context, const char* buffer, + size_t length) { + const uint8_t* buffer_t = reinterpret_cast(buffer); + + auto* params = static_cast*>( + context->AllocatePersistentBuffer(context, + sizeof(TFLMSignalOverlapAddParams))); + + if (params == nullptr) { + return nullptr; + } + + tflite::FlexbufferWrapper fbw(buffer_t, length); + params->type = typeToTfLiteType(); + params->frame_step = fbw.ElementAsInt32(kFrameStepIndex); + return params; +} + +template +TfLiteStatus OverlapAddPrepare(TfLiteContext* context, TfLiteNode* node) { + TF_LITE_ENSURE_EQ(context, NumInputs(node), 1); + TF_LITE_ENSURE_EQ(context, NumOutputs(node), 1); + + MicroContext* micro_context = GetMicroContext(context); + + TfLiteTensor* input = + micro_context->AllocateTempInputTensor(node, kInputTensor); + TF_LITE_ENSURE(context, input != nullptr); + TfLiteTensor* output = + micro_context->AllocateTempOutputTensor(node, kOutputTensor); + TF_LITE_ENSURE(context, output != nullptr); + + TF_LITE_ENSURE_EQ(context, NumDimensions(input), NumDimensions(output) + 1); + + TF_LITE_ENSURE_TYPES_EQ(context, input->type, TfLiteTypeEnum); + TF_LITE_ENSURE_TYPES_EQ(context, output->type, TfLiteTypeEnum); + + auto* params = + reinterpret_cast*>(node->user_data); + RuntimeShape input_shape = GetTensorShape(input); + RuntimeShape output_shape = GetTensorShape(output); + TF_LITE_ENSURE(context, input_shape.DimensionsCount() >= 2); + TF_LITE_ENSURE_EQ(context, input_shape.DimensionsCount(), + output_shape.DimensionsCount() + 1); + + params->frame_size = input_shape.Dims(input_shape.DimensionsCount() - 1); + params->n_frames = input_shape.Dims(input_shape.DimensionsCount() - 2); + params->outer_dims = + input_shape.FlatSize() / (params->frame_size * params->n_frames); + params->state_buffers = static_cast(context->AllocatePersistentBuffer( + context, params->outer_dims * sizeof(T*))); + TF_LITE_ENSURE(context, params != nullptr); + + for (int i = 0; i < params->outer_dims; i++) { + params->state_buffers[i] = + static_cast(context->AllocatePersistentBuffer( + context, params->frame_size * sizeof(T))); + } + OverlapAddResetState(params); + + micro_context->DeallocateTempTfLiteTensor(input); + micro_context->DeallocateTempTfLiteTensor(output); + return kTfLiteOk; +} + +template +TfLiteStatus OverlapAddEval(TfLiteContext* context, TfLiteNode* node) { + auto* params = + reinterpret_cast*>(node->user_data); + const TfLiteEvalTensor* input = + tflite::micro::GetEvalInput(context, node, kInputTensor); + TfLiteEvalTensor* output = + tflite::micro::GetEvalOutput(context, node, kOutputTensor); + + const T* input_data = tflite::micro::GetTensorData(input); + T* output_data = tflite::micro::GetTensorData(output); + for (int i = 0; i < params->outer_dims; i++) { + T* buffer = params->state_buffers[i]; + for (int frame = 0; frame < params->n_frames; frame++) { + int input_index = (i * params->n_frames + frame) * params->frame_size; + int output_index = (i * params->n_frames + frame) * params->frame_step; + tflm_signal::OverlapAdd(&input_data[input_index], buffer, + params->frame_size, &output_data[output_index], + params->frame_step); + } + } + return kTfLiteOk; +} + +template +void OverlapAddReset(TfLiteContext* context, void* buffer) { + OverlapAddResetState(static_cast*>(buffer)); +} + +void* OverlapAddInitAll(TfLiteContext* context, const char* buffer, + size_t length) { + const uint8_t* buffer_t = reinterpret_cast(buffer); + const flexbuffers::Map& m = flexbuffers::GetRoot(buffer_t, length).AsMap(); + auto tensor_type = static_cast(m["T"].AsInt32()); + + switch (tensor_type) { + case TensorType_INT16: { + return OverlapAddInit(context, buffer, length); + } + case TensorType_FLOAT32: { + return OverlapAddInit(context, buffer, length); + } + default: + return nullptr; + } +} + +TfLiteStatus OverlapAddPrepareAll(TfLiteContext* context, TfLiteNode* node) { + auto* params = + reinterpret_cast*>(node->user_data); + + switch (params->type) { + case kTfLiteInt16: { + return OverlapAddPrepare(context, node); + } + case kTfLiteFloat32: { + return OverlapAddPrepare(context, node); + } + default: + return kTfLiteError; + } +} + +TfLiteStatus OverlapAddEvalAll(TfLiteContext* context, TfLiteNode* node) { + auto* params = + reinterpret_cast*>(node->user_data); + + switch (params->type) { + case kTfLiteInt16: { + return OverlapAddEval(context, node); + } + case kTfLiteFloat32: { + return OverlapAddEval(context, node); + } + default: + return kTfLiteError; + } +} + +void OverlapAddResetAll(TfLiteContext* context, void* buffer) { + auto* params = reinterpret_cast*>(buffer); + + switch (params->type) { + case kTfLiteInt16: { + OverlapAddReset(context, buffer); + break; + } + case kTfLiteFloat32: { + OverlapAddReset(context, buffer); + break; + } + default: + break; + } +} + +} // namespace + +namespace tflm_signal { +TFLMRegistration* Register_OVERLAP_ADD() { + static TFLMRegistration r = + tflite::micro::RegisterOp(OverlapAddInitAll, OverlapAddPrepareAll, + OverlapAddEvalAll, nullptr, OverlapAddResetAll); + return &r; +} + +TFLMRegistration* Register_OVERLAP_ADD_FLOAT() { + static TFLMRegistration r = tflite::micro::RegisterOp( + OverlapAddInit, OverlapAddPrepare, + OverlapAddEval, nullptr, OverlapAddReset); + return &r; +} + +TFLMRegistration* Register_OVERLAP_ADD_INT16() { + static TFLMRegistration r = tflite::micro::RegisterOp( + OverlapAddInit, OverlapAddPrepare, + OverlapAddEval, nullptr, OverlapAddReset); + return &r; +} +} // namespace tflm_signal + +} // namespace tflite diff --git a/signal/micro/kernels/overlap_add_flexbuffers_generated_data.cc b/signal/micro/kernels/overlap_add_flexbuffers_generated_data.cc new file mode 100644 index 00000000000..7f810db1ccb --- /dev/null +++ b/signal/micro/kernels/overlap_add_flexbuffers_generated_data.cc @@ -0,0 +1,32 @@ +/* Copyright 2021 The TensorFlow Authors. All Rights Reserved. +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +==============================================================================*/ + +// This file is generated. See: +// tensorflow/lite/micro/kernels/test_data_generation/README.md + +#include "signal/micro/kernels/overlap_add_flexbuffers_generated_data.h" + +const int g_gen_data_size_overlap_add_float = 26; +const unsigned char g_gen_data_overlap_add_float[] = { + 0x66, 0x72, 0x61, 0x6d, 0x65, 0x5f, 0x73, 0x74, 0x65, + 0x70, 0x00, 0x54, 0x00, 0x02, 0x03, 0x0f, 0x02, 0x01, + 0x02, 0x00, 0x01, 0x04, 0x04, 0x04, 0x24, 0x01, +}; + +const int g_gen_data_size_overlap_add_int16 = 26; +const unsigned char g_gen_data_overlap_add_int16[] = { + 0x66, 0x72, 0x61, 0x6d, 0x65, 0x5f, 0x73, 0x74, 0x65, + 0x70, 0x00, 0x54, 0x00, 0x02, 0x03, 0x0f, 0x02, 0x01, + 0x02, 0x07, 0x01, 0x04, 0x04, 0x04, 0x24, 0x01, +}; diff --git a/tensorflow/lite/micro/examples/micro_speech/micro_features/yes_feature_data_slice.cc b/signal/micro/kernels/overlap_add_flexbuffers_generated_data.h similarity index 52% rename from tensorflow/lite/micro/examples/micro_speech/micro_features/yes_feature_data_slice.cc rename to signal/micro/kernels/overlap_add_flexbuffers_generated_data.h index e3d006a0039..adb4fbab19d 100644 --- a/tensorflow/lite/micro/examples/micro_speech/micro_features/yes_feature_data_slice.cc +++ b/signal/micro/kernels/overlap_add_flexbuffers_generated_data.h @@ -1,4 +1,4 @@ -/* Copyright 2019 The TensorFlow Authors. All Rights Reserved. +/* Copyright 2021 The TensorFlow Authors. All Rights Reserved. Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. @@ -13,13 +13,13 @@ See the License for the specific language governing permissions and limitations under the License. ==============================================================================*/ -// See the header for documentation on the meaning of this data. +#ifndef SIGNAL_MICRO_KERNELS_TEST_DATA_GENERATION_GENERATE_OVERLAP_ADD_FLEXBUFFERS_DATA_H_ +#define SIGNAL_MICRO_KERNELS_TEST_DATA_GENERATION_GENERATE_OVERLAP_ADD_FLEXBUFFERS_DATA_H_ -#include "tensorflow/lite/micro/examples/micro_speech/micro_features/yes_feature_data_slice.h" +extern const int g_gen_data_size_overlap_add_float; +extern const unsigned char g_gen_data_overlap_add_float[]; -alignas(16) const int8_t - g_yes_feature_data_slice[g_yes_feature_data_slice_size] = { - 86, 88, 108, 75, 108, 76, 98, 64, 75, 61, 71, 66, 85, -1, - -77, -128, 46, 61, 92, 69, 100, 93, 113, 80, 108, 93, 113, 91, - 110, 80, 85, 15, -33, -128, 12, -50, 34, 50, 70, 55, -}; +extern const int g_gen_data_size_overlap_add_int16; +extern const unsigned char g_gen_data_overlap_add_int16[]; + +#endif // SIGNAL_MICRO_KERNELS_TEST_DATA_GENERATION_GENERATE_OVERLAP_ADD_FLEXBUFFERS_DATA_H_ diff --git a/signal/micro/kernels/overlap_add_test.cc b/signal/micro/kernels/overlap_add_test.cc new file mode 100644 index 00000000000..f7b9f522748 --- /dev/null +++ b/signal/micro/kernels/overlap_add_test.cc @@ -0,0 +1,252 @@ +/* Copyright 2021 The TensorFlow Authors. All Rights Reserved. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +==============================================================================*/ + +#include + +#include "signal/micro/kernels/overlap_add_flexbuffers_generated_data.h" +#include "tensorflow/lite/micro/kernels/kernel_runner.h" +#include "tensorflow/lite/micro/test_helpers.h" +#include "tensorflow/lite/micro/testing/micro_test.h" + +namespace tflite { + +constexpr int kFrameStepIndex = 1; +constexpr int kInputsSize = 1; +constexpr int kOutputsSize = 1; +constexpr int kTensorsSize = kInputsSize + kOutputsSize; + +template +class OverlapAddKernelRunner { + public: + OverlapAddKernelRunner(int* input_dims_data, T* input_data, + int* output_dims_data, T* output_data) + : inputs_array_{testing::IntArrayFromInts(inputs_array_data_)}, + outputs_array_{testing::IntArrayFromInts(outputs_array_data_)} { + tensors_[0] = testing::CreateTensor( + input_data, testing::IntArrayFromInts(input_dims_data)); + + tensors_[1] = tflite::testing::CreateTensor( + output_data, testing::IntArrayFromInts(output_dims_data)); + + registration_ = tflm_signal::Register_OVERLAP_ADD(); + + // go/tflm-static-cleanups for reasoning new is being used like this + kernel_runner_ = new (kernel_runner_buffer) tflite::micro::KernelRunner( + *registration_, tensors_, kTensorsSize, inputs_array_, outputs_array_, + /*builtin_data=*/nullptr); + } + + micro::KernelRunner& GetKernelRunner() { return *kernel_runner_; } + + private: + uint8_t kernel_runner_buffer[sizeof(micro::KernelRunner)]; + int inputs_array_data_[kInputsSize + 1] = {1, 0}; + int outputs_array_data_[kOutputsSize + 1] = {1, 1}; + TfLiteTensor tensors_[kTensorsSize] = {}; + TfLiteIntArray* inputs_array_ = nullptr; + TfLiteIntArray* outputs_array_ = nullptr; + TFLMRegistration* registration_ = nullptr; + micro::KernelRunner* kernel_runner_ = nullptr; +}; + +// We can use any of the templated types here - int16_t was picked arbitrarily +alignas(alignof(OverlapAddKernelRunner)) uint8_t + overlap_add_kernel_runner_buffer[sizeof(OverlapAddKernelRunner)]; + +template +void TestOverlapAddInvoke(int* input_dims_data, T* input_data, + int* output_dims_data, const T* golden_input, + const T* golden_output, int iters, + const unsigned char* flexbuffers_data, + const unsigned int flexbuffers_data_size, + T* output_data, tflite::micro::KernelRunner* runner) { + tflite::FlexbufferWrapper fbw(flexbuffers_data, flexbuffers_data_size); + int frame_step = fbw.ElementAsInt32(kFrameStepIndex); + int frame_size = input_dims_data[input_dims_data[0]]; + int n_frames = input_dims_data[input_dims_data[0] - 1]; + int outer_dims = 1; + for (int i = 1; i < input_dims_data[0] - 1; i++) { + outer_dims *= input_dims_data[i]; + } + for (int i = 0; i < iters; i++) { + for (int outer_dim = 0; outer_dim < outer_dims; outer_dim++) { + int input_idx = outer_dim * n_frames * frame_size; + int golden_input_idx = i * n_frames * frame_size; + memcpy(&input_data[input_idx], &golden_input[golden_input_idx], + n_frames * frame_size * sizeof(T)); + } + TF_LITE_MICRO_EXPECT_EQ(kTfLiteOk, runner->Invoke()); + for (int outer_dim = 0; outer_dim < outer_dims; outer_dim++) { + int output_idx = outer_dim * n_frames * frame_step; + int golden_output_idx = i * n_frames * frame_step; + TF_LITE_MICRO_EXPECT_EQ( + 0, memcmp(&output_data[output_idx], &golden_output[golden_output_idx], + n_frames * frame_step * sizeof(T))); + } + } +} + +template +void TestOverlapAdd(int* input_dims_data, T* input_data, int* output_dims_data, + const T* golden_input, const T* golden_output, int iters, + const unsigned char* flexbuffers_data, + const unsigned int flexbuffers_data_size, T* output_data) { + OverlapAddKernelRunner* overlap_add_runner = + new (overlap_add_kernel_runner_buffer) OverlapAddKernelRunner( + input_dims_data, input_data, output_dims_data, output_data); + // TfLite uses a char* for the raw bytes whereas flexbuffers use an unsigned + // char*. This small discrepancy results in compiler warnings unless we + // reinterpret_cast right before passing in the flexbuffer bytes to the + // KernelRunner. + TF_LITE_MICRO_EXPECT_EQ(overlap_add_runner->GetKernelRunner().InitAndPrepare( + reinterpret_cast(flexbuffers_data), + flexbuffers_data_size), + kTfLiteOk); + TestOverlapAddInvoke(input_dims_data, input_data, output_dims_data, + golden_input, golden_output, iters, flexbuffers_data, + flexbuffers_data_size, output_data, + &overlap_add_runner->GetKernelRunner()); +} + +template +void TestOverlapAddReset(int* input_dims_data, T* input_data, + int* output_dims_data, const T* golden_input, + const T* golden_output, int iters, + const unsigned char* flexbuffers_data, + const unsigned int flexbuffers_data_size, + T* output_data) { + OverlapAddKernelRunner* overlap_add_runner = + new (overlap_add_kernel_runner_buffer) OverlapAddKernelRunner( + input_dims_data, input_data, output_dims_data, output_data); + // TfLite uses a char* for the raw bytes whereas flexbuffers use an unsigned + // char*. This small discrepancy results in compiler warnings unless we + // reinterpret_cast right before passing in the flexbuffer bytes to the + // KernelRunner. + TF_LITE_MICRO_EXPECT_EQ(overlap_add_runner->GetKernelRunner().InitAndPrepare( + reinterpret_cast(flexbuffers_data), + flexbuffers_data_size), + kTfLiteOk); + TestOverlapAddInvoke(input_dims_data, input_data, output_dims_data, + golden_input, golden_output, iters, flexbuffers_data, + flexbuffers_data_size, output_data, + &overlap_add_runner->GetKernelRunner()); + overlap_add_runner->GetKernelRunner().Reset(); + TestOverlapAddInvoke(input_dims_data, input_data, output_dims_data, + golden_input, golden_output, iters, flexbuffers_data, + flexbuffers_data_size, output_data, + &overlap_add_runner->GetKernelRunner()); +} + +} // namespace tflite + +TF_LITE_MICRO_TESTS_BEGIN + +TF_LITE_MICRO_TEST(OverlapAddTestInt16) { + const int kInputSize = 3; + const int kOutputSize = 1; + int input_dims_data[] = {2, 1, kInputSize}; + int output_dims_data[] = {1, kOutputSize}; + int16_t input_data[kInputSize]; + int16_t output_data = 0; + const int16_t golden_input[] = {125, -12, -895, 1000, 65, -212, + 63, 71, 52, 1, -17, 32}; + const int16_t golden_output[] = {125, 988, -767, -140}; + + tflite::TestOverlapAdd(input_dims_data, input_data, output_dims_data, + golden_input, golden_output, + sizeof(golden_output) / sizeof(int16_t), + g_gen_data_overlap_add_int16, + g_gen_data_size_overlap_add_int16, &output_data); +} + +TF_LITE_MICRO_TEST(OverlapAddTestFloat) { + const int kInputSize = 3; + const int kOutputSize = 1; + int input_dims_data[] = {2, 1, kInputSize}; + int output_dims_data[] = {1, kOutputSize}; + float input_data[kInputSize]; + float output_data = 0; + const float golden_input[] = {12.5, -1.2, -89.5, 100.0, 6.5, -21.2, + 6.3, 7.1, 5.2, 0.1, -1.7, 3.2}; + const float golden_output[] = {12.5, 98.8, -76.7, -14.0}; + + tflite::TestOverlapAdd(input_dims_data, input_data, output_dims_data, + golden_input, golden_output, + sizeof(golden_output) / sizeof(float), + g_gen_data_overlap_add_float, + g_gen_data_size_overlap_add_float, &output_data); +} + +TF_LITE_MICRO_TEST(OverlapAddTestNframes4Int16) { + const int kInputSize = 3; + const int kOutputSize = 1; + const int kNFrames = 4; + int input_dims_data[] = {2, kNFrames, kInputSize}; + int output_dims_data[] = {1, kNFrames * kOutputSize}; + int16_t input_data[kNFrames * kInputSize]; + int16_t output_data[kNFrames * kOutputSize]; + const int16_t golden_input[] = {125, -12, -895, 1000, 65, -212, + 63, 71, 52, 1, -17, 32}; + const int16_t golden_output[] = {125, 988, -767, -140}; + + const int kIters = + sizeof(golden_input) / kInputSize / kNFrames / sizeof(int16_t); + tflite::TestOverlapAdd(input_dims_data, input_data, output_dims_data, + golden_input, golden_output, kIters, + g_gen_data_overlap_add_int16, + g_gen_data_size_overlap_add_int16, output_data); +} + +TF_LITE_MICRO_TEST(OverlapAddTestNframes4OuterDims4Int16) { + const int kInputSize = 3; + const int kOutputSize = 1; + const int kNFrames = 4; + int input_dims_data[] = {4, 2, 2, kNFrames, kInputSize}; + int output_dims_data[] = {3, 2, 2, kNFrames * kOutputSize}; + int16_t input_data[2 * 2 * kNFrames * kInputSize]; + int16_t output_data[2 * 2 * kNFrames * kOutputSize]; + const int16_t golden_input[] = {125, -12, -895, 1000, 65, -212, + 63, 71, 52, 1, -17, 32}; + const int16_t golden_output[] = {125, 988, -767, -140}; + + const int kIters = + sizeof(golden_input) / kInputSize / kNFrames / sizeof(int16_t); + tflite::TestOverlapAdd(input_dims_data, input_data, output_dims_data, + golden_input, golden_output, kIters, + g_gen_data_overlap_add_int16, + g_gen_data_size_overlap_add_int16, output_data); +} + +TF_LITE_MICRO_TEST(testReset) { + const int kInputSize = 3; + const int kOutputSize = 1; + const int kNFrames = 4; + int input_dims_data[] = {4, 2, 2, kNFrames, kInputSize}; + int output_dims_data[] = {3, 2, 2, kNFrames * kOutputSize}; + int16_t input_data[2 * 2 * kNFrames * kInputSize]; + int16_t output_data[2 * 2 * kNFrames * kOutputSize]; + const int16_t golden_input[] = {125, -12, -895, 1000, 65, -212, + 63, 71, 52, 1, -17, 32}; + const int16_t golden_output[] = {125, 988, -767, -140}; + + const int kIters = + sizeof(golden_input) / kInputSize / kNFrames / sizeof(int16_t); + tflite::TestOverlapAddReset(input_dims_data, input_data, output_dims_data, + golden_input, golden_output, kIters, + g_gen_data_overlap_add_int16, + g_gen_data_size_overlap_add_int16, output_data); +} + +TF_LITE_MICRO_TESTS_END diff --git a/signal/micro/kernels/pcan.cc b/signal/micro/kernels/pcan.cc new file mode 100644 index 00000000000..9473e1b4fab --- /dev/null +++ b/signal/micro/kernels/pcan.cc @@ -0,0 +1,135 @@ +/* Copyright 2023 The TensorFlow Authors. All Rights Reserved. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +==============================================================================*/ + +#include +#include + +#include "signal/src/pcan_argc_fixed.h" +#include "tensorflow/lite/kernels/internal/tensor_ctypes.h" +#include "tensorflow/lite/kernels/kernel_util.h" +#include "tensorflow/lite/micro/flatbuffer_utils.h" +#include "tensorflow/lite/micro/kernels/kernel_util.h" +#include "tensorflow/lite/micro/memory_helpers.h" +#include "tensorflow/lite/micro/micro_context.h" + +namespace tflite { +namespace tflm_signal { +// TODO(b/286250473): remove namespace once de-duped libraries above + +constexpr int kInputTensor = 0; +constexpr int kNoiseEstimateTensor = 1; +constexpr int kGainLutTensor = 2; +constexpr int kOutputTensor = 0; + +// Indices into the init flexbuffer's vector. +// The parameter's name is in the comment that follows. +// Elements in the vectors are ordered alphabetically by parameter name. +constexpr int kSnrShiftIndex = 0; // 'snr_shift' + +struct TfLitePcanParams { + int snr_shift; +}; + +void* PcanInit(TfLiteContext* context, const char* buffer, size_t length) { + auto* params = static_cast( + context->AllocatePersistentBuffer(context, sizeof(TfLitePcanParams))); + + tflite::FlexbufferWrapper fbw(reinterpret_cast(buffer), + length); + params->snr_shift = fbw.ElementAsInt32(kSnrShiftIndex); + return params; +} + +TfLiteStatus PcanPrepare(TfLiteContext* context, TfLiteNode* node) { + TF_LITE_ENSURE_EQ(context, NumInputs(node), 3); + TF_LITE_ENSURE_EQ(context, NumOutputs(node), 1); + + MicroContext* micro_context = GetMicroContext(context); + + TfLiteTensor* input = + micro_context->AllocateTempInputTensor(node, kInputTensor); + TF_LITE_ENSURE(context, input != nullptr); + TfLiteTensor* noise_estimate = + micro_context->AllocateTempInputTensor(node, kNoiseEstimateTensor); + TF_LITE_ENSURE(context, noise_estimate != nullptr); + TfLiteTensor* gain_lut = + micro_context->AllocateTempInputTensor(node, kGainLutTensor); + TF_LITE_ENSURE(context, gain_lut != nullptr); + TfLiteTensor* output = + micro_context->AllocateTempOutputTensor(node, kOutputTensor); + TF_LITE_ENSURE(context, output != nullptr); + + TF_LITE_ENSURE_EQ(context, NumDimensions(input), 1); + TF_LITE_ENSURE_EQ(context, NumDimensions(noise_estimate), 1); + TF_LITE_ENSURE_EQ(context, NumDimensions(gain_lut), 1); + TF_LITE_ENSURE_EQ(context, NumDimensions(output), 1); + + TF_LITE_ENSURE_TYPES_EQ(context, input->type, kTfLiteUInt32); + TF_LITE_ENSURE_TYPES_EQ(context, noise_estimate->type, kTfLiteUInt32); + TF_LITE_ENSURE_TYPES_EQ(context, gain_lut->type, kTfLiteInt16); + TF_LITE_ENSURE_TYPES_EQ(context, output->type, kTfLiteUInt32); + + micro_context->DeallocateTempTfLiteTensor(input); + micro_context->DeallocateTempTfLiteTensor(output); + micro_context->DeallocateTempTfLiteTensor(noise_estimate); + micro_context->DeallocateTempTfLiteTensor(gain_lut); + return kTfLiteOk; +} + +TfLiteStatus PcanEval(TfLiteContext* context, TfLiteNode* node) { + auto* params = reinterpret_cast(node->user_data); + + const TfLiteEvalTensor* input = + tflite::micro::GetEvalInput(context, node, kInputTensor); + TF_LITE_ENSURE(context, input != nullptr); + const TfLiteEvalTensor* noise_estimate = + tflite::micro::GetEvalInput(context, node, kNoiseEstimateTensor); + TF_LITE_ENSURE(context, noise_estimate != nullptr); + const TfLiteEvalTensor* gain_lut = + tflite::micro::GetEvalInput(context, node, kGainLutTensor); + TF_LITE_ENSURE(context, gain_lut != nullptr); + TfLiteEvalTensor* output = + tflite::micro::GetEvalOutput(context, node, kOutputTensor); + TF_LITE_ENSURE(context, output != nullptr); + + const uint32_t* input_data = tflite::micro::GetTensorData(input); + const uint32_t* noise_estimate_data = + tflite::micro::GetTensorData(noise_estimate); + const int16_t* gain_lut_data = + tflite::micro::GetTensorData(gain_lut); + uint32_t* output_data = tflite::micro::GetTensorData(output); + + int num_channels = input->dims->data[0]; + + size_t output_byte_size; + TF_LITE_ENSURE_OK( + context, tflite::TfLiteEvalTensorByteLength(output, &output_byte_size)); + + memcpy(output_data, input_data, output_byte_size); + + tflite::tflm_signal::ApplyPcanAutoGainControlFixed( + gain_lut_data, params->snr_shift, noise_estimate_data, output_data, + num_channels); + return kTfLiteOk; +} + +TFLMRegistration* Register_PCAN() { + static TFLMRegistration r = + tflite::micro::RegisterOp(PcanInit, PcanPrepare, PcanEval); + return &r; +} + +} // namespace tflm_signal +} // namespace tflite diff --git a/signal/micro/kernels/pcan_flexbuffers_generated_data.cc b/signal/micro/kernels/pcan_flexbuffers_generated_data.cc new file mode 100644 index 00000000000..5fc9a9f740c --- /dev/null +++ b/signal/micro/kernels/pcan_flexbuffers_generated_data.cc @@ -0,0 +1,7 @@ +#include "signal/micro/kernels/pcan_flexbuffers_generated_data.h" + +const int g_gen_data_size_snr_shift_6_test = 20; +const unsigned char g_gen_data_snr_shift_6_test[] = { + 0x73, 0x6e, 0x72, 0x5f, 0x73, 0x68, 0x69, 0x66, 0x74, 0x00, + 0x01, 0x0b, 0x01, 0x01, 0x01, 0x06, 0x04, 0x02, 0x24, 0x01, +}; diff --git a/signal/micro/kernels/pcan_flexbuffers_generated_data.h b/signal/micro/kernels/pcan_flexbuffers_generated_data.h new file mode 100644 index 00000000000..32b4cd721bd --- /dev/null +++ b/signal/micro/kernels/pcan_flexbuffers_generated_data.h @@ -0,0 +1,7 @@ +#ifndef SIGNAL_MICRO_KERNELS_PCAN_FLEXBUFFERS_GENERATED_DATA_H_ +#define SIGNAL_MICRO_KERNELS_PCAN_FLEXBUFFERS_GENERATED_DATA_H_ + +extern const int g_gen_data_size_snr_shift_6_test; +extern const unsigned char g_gen_data_snr_shift_6_test[]; + +#endif // SIGNAL_MICRO_KERNELS_PCAN_FLEXBUFFERS_GENERATED_DATA_H_ diff --git a/signal/micro/kernels/pcan_test.cc b/signal/micro/kernels/pcan_test.cc new file mode 100644 index 00000000000..aecb5d17620 --- /dev/null +++ b/signal/micro/kernels/pcan_test.cc @@ -0,0 +1,132 @@ +/* Copyright 2023 The TensorFlow Authors. All Rights Reserved. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +==============================================================================*/ + +#include "signal/micro/kernels/pcan_flexbuffers_generated_data.h" +#include "tensorflow/lite/micro/kernels/kernel_runner.h" +#include "tensorflow/lite/micro/test_helpers.h" +#include "tensorflow/lite/micro/testing/micro_test.h" + +namespace tflite { +namespace tflm_signal { +namespace { + +TfLiteStatus TestPCAN(const unsigned char* init_data, int init_data_size, + int* input_dims_data, const uint32_t* input_data, + int* noise_estimate_dims_data, + const uint32_t* noise_estimate_data, + int* gain_lut_dims_data, const int16_t* gain_lut_data, + int* output_dims_data, const uint32_t* golden, + uint32_t* output_data) { + TfLiteIntArray* input_dims = + ::tflite::testing::IntArrayFromInts(input_dims_data); + TfLiteIntArray* noise_estimate_dims = + ::tflite::testing::IntArrayFromInts(noise_estimate_dims_data); + TfLiteIntArray* gain_lut_dims = + ::tflite::testing::IntArrayFromInts(gain_lut_dims_data); + TfLiteIntArray* output_dims = + ::tflite::testing::IntArrayFromInts(output_dims_data); + const int output_len = ElementCount(*output_dims); + constexpr int kInputsSize = 3; + constexpr int kOutputsSize = 1; + constexpr int kTensorsSize = kInputsSize + kOutputsSize; + TfLiteTensor tensors[kTensorsSize] = { + tflite::testing::CreateTensor(input_data, input_dims), + tflite::testing::CreateTensor(noise_estimate_data, noise_estimate_dims), + tflite::testing::CreateTensor(gain_lut_data, gain_lut_dims), + tflite::testing::CreateTensor(output_data, output_dims), + }; + int inputs_array_data[] = {3, 0, 1, 2}; + TfLiteIntArray* inputs_array = + ::tflite::testing::IntArrayFromInts(inputs_array_data); + int outputs_array_data[] = {1, 3}; + TfLiteIntArray* outputs_array = + ::tflite::testing::IntArrayFromInts(outputs_array_data); + + const TFLMRegistration* registration = tflite::tflm_signal::Register_PCAN(); + micro::KernelRunner runner(*registration, tensors, kTensorsSize, inputs_array, + outputs_array, + /*builtin_data=*/nullptr); + + // TfLite uses a char* for the raw bytes whereas flexbuffers use an unsigned + // char*. This small discrepancy results in compiler warnings unless we + // reinterpret_cast right before passing in the flexbuffer bytes to the + // KernelRunner. + TfLiteStatus status = runner.InitAndPrepare( + reinterpret_cast(init_data), init_data_size); + if (status != kTfLiteOk) { + return status; + } + status = runner.Invoke(); + if (status != kTfLiteOk) { + return status; + } + for (int i = 0; i < output_len; ++i) { + TF_LITE_MICRO_EXPECT_EQ(golden[i], output_data[i]); + } + return kTfLiteOk; +} + +} // namespace +} // namespace tflm_signal +} // namespace tflite + +TF_LITE_MICRO_TESTS_BEGIN + +TF_LITE_MICRO_TEST(Mix1Ref1Case) { + int input_shape[] = {1, 40}; + int noise_estimate_shape[] = {1, 40}; + int gain_lut_shape[] = {1, 125}; + int output_shape[] = {1, 40}; + const uint32_t input[] = {286, 298, 305, 291, 290, 279, 273, 257, 250, 240, + 240, 233, 234, 230, 221, 205, 183, 159, 156, 188, + 239, 298, 345, 374, 380, 369, 359, 364, 372, 354, + 302, 243, 194, 135, 64, 72, 171, 245, 277, 304}; + const uint32_t noise_estimate[] = { + 7310, 18308, 7796, 17878, 7413, 17141, 6978, 15789, 6390, 14745, + 6135, 14314, 5981, 14130, 5649, 12594, 4677, 9768, 3987, 11550, + 6109, 18308, 8819, 22977, 9713, 22670, 9176, 22363, 9509, 21748, + 7719, 14929, 4959, 8294, 1636, 4423, 4371, 15052, 7080, 18677}; + + const int16_t gain_lut[] = { + 32636, 32633, 32630, -6, 0, -21589, 32624, -12, 0, -21589, + 32612, -23, -2, -21589, 32587, -48, 0, -21589, 32539, -96, + 0, -21589, 32443, -190, 0, -21589, 32253, -378, 4, -21589, + 31879, -739, 18, -21589, 31158, -1409, 62, -21589, 29811, -2567, + 202, -21589, 27446, -4301, 562, -21589, 23707, -6265, 1230, -21589, + 18672, -7458, 1952, -21589, 13166, -7030, 2212, -21589, 8348, -5342, + 1868, -21589, 4874, -3459, 1282, -21589, 2697, -2025, 774, -21589, + 1446, -1120, 436, -21589, 762, -596, 232, -21589, 398, -313, + 122, -21589, 207, -164, 64, -21589, 107, -85, 34, -21589, + 56, -45, 18, -21589, 29, -22, 8, -21589, 15, -13, + 6, -21589, 8, -8, 4, -21589, 4, -2, 0, -21589, + 2, -3, 2, -21589, 1, 0, 0, -21589, 1, -3, + 2, -21589, 0, 0, 0}; + + uint32_t output[40]; + const uint32_t golden[] = {1301, 836, 1354, 827, 1312, 811, 1263, 779, + 1192, 753, 1160, 743, 1140, 738, 1096, 698, + 956, 607, 845, 667, 1157, 836, 1461, 912, + 1546, 908, 1496, 904, 1527, 895, 1346, 758, + 999, 548, 378, 344, 908, 761, 1274, 843}; + memset(output, 0, sizeof(output)); + TF_LITE_MICRO_EXPECT_EQ( + kTfLiteOk, + tflite::tflm_signal::TestPCAN( + g_gen_data_snr_shift_6_test, g_gen_data_size_snr_shift_6_test, + input_shape, input, noise_estimate_shape, noise_estimate, + gain_lut_shape, gain_lut, output_shape, golden, output)); +} + +TF_LITE_MICRO_TESTS_END diff --git a/signal/micro/kernels/rfft.cc b/signal/micro/kernels/rfft.cc index fccc6eba8d8..c9472b05657 100644 --- a/signal/micro/kernels/rfft.cc +++ b/signal/micro/kernels/rfft.cc @@ -48,12 +48,13 @@ struct TfLiteAudioFrontendRfftParams { int32_t output_length; TfLiteType fft_type; T* work_area; + int scratch_buffer_index; int8_t* state; }; template -void* Init(TfLiteContext* context, const char* buffer, size_t length) { +void* RfftInit(TfLiteContext* context, const char* buffer, size_t length) { TFLITE_DCHECK(context->AllocatePersistentBuffer != nullptr); const uint8_t* buffer_t = reinterpret_cast(buffer); @@ -65,9 +66,6 @@ void* Init(TfLiteContext* context, const char* buffer, size_t length) { params->fft_length = fbw.ElementAsInt32(kFftLengthIndex); params->fft_type = typeToTfLiteType(); - params->work_area = static_cast(context->AllocatePersistentBuffer( - context, params->fft_length * sizeof(T))); - size_t state_size = (*get_needed_memory_func)(params->fft_length); params->state = static_cast( context->AllocatePersistentBuffer(context, state_size * sizeof(int8_t))); @@ -76,7 +74,7 @@ void* Init(TfLiteContext* context, const char* buffer, size_t length) { } template -TfLiteStatus Prepare(TfLiteContext* context, TfLiteNode* node) { +TfLiteStatus RfftPrepare(TfLiteContext* context, TfLiteNode* node) { TF_LITE_ENSURE_EQ(context, NumInputs(node), 1); TF_LITE_ENSURE_EQ(context, NumOutputs(node), 1); @@ -103,13 +101,15 @@ TfLiteStatus Prepare(TfLiteContext* context, TfLiteNode* node) { params->output_length = output_shape.Dims(output_shape.DimensionsCount() - 1) / 2; + context->RequestScratchBufferInArena(context, params->fft_length * sizeof(T), + ¶ms->scratch_buffer_index); micro_context->DeallocateTempTfLiteTensor(input); micro_context->DeallocateTempTfLiteTensor(output); return kTfLiteOk; } template *)> -TfLiteStatus Eval(TfLiteContext* context, TfLiteNode* node) { +TfLiteStatus RfftEval(TfLiteContext* context, TfLiteNode* node) { auto* params = reinterpret_cast*>(node->user_data); @@ -122,74 +122,76 @@ TfLiteStatus Eval(TfLiteContext* context, TfLiteNode* node) { tflite::micro::GetEvalOutput(context, node, kOutputTensor); Complex* output_data = tflite::micro::GetTensorData>(output); + T* work_area = static_cast( + context->GetScratchBuffer(context, params->scratch_buffer_index)); + for (int input_idx = 0, output_idx = 0; input_idx < params->input_size; input_idx += params->input_length, output_idx += params->output_length) { - memcpy(params->work_area, &input_data[input_idx], - sizeof(T) * params->input_length); + memcpy(work_area, &input_data[input_idx], sizeof(T) * params->input_length); // Zero pad input to FFT length - memset(¶ms->work_area[params->input_length], 0, + memset(&work_area[params->input_length], 0, sizeof(T) * (params->fft_length - params->input_length)); - (*apply_func)(params->state, params->work_area, &output_data[output_idx]); + (*apply_func)(params->state, work_area, &output_data[output_idx]); } return kTfLiteOk; } -void* InitAll(TfLiteContext* context, const char* buffer, size_t length) { +void* RfftInitAll(TfLiteContext* context, const char* buffer, size_t length) { const uint8_t* buffer_t = reinterpret_cast(buffer); const flexbuffers::Map& m = flexbuffers::GetRoot(buffer_t, length).AsMap(); auto tensor_type = static_cast(m["T"].AsInt32()); switch (tensor_type) { case TensorType_INT16: { - return Init(context, buffer, length); + return RfftInit(context, buffer, length); } case TensorType_INT32: { - return Init(context, buffer, length); + return RfftInit(context, buffer, length); } case TensorType_FLOAT32: { - return Init(context, buffer, length); + return RfftInit(context, buffer, length); } default: return nullptr; } } -TfLiteStatus PrepareAll(TfLiteContext* context, TfLiteNode* node) { +TfLiteStatus RfftPrepareAll(TfLiteContext* context, TfLiteNode* node) { auto* params = reinterpret_cast*>(node->user_data); switch (params->fft_type) { case kTfLiteInt16: { - return Prepare(context, node); + return RfftPrepare(context, node); } case kTfLiteInt32: { - return Prepare(context, node); + return RfftPrepare(context, node); } case kTfLiteFloat32: { - return Prepare(context, node); + return RfftPrepare(context, node); } default: return kTfLiteError; } } -TfLiteStatus EvalAll(TfLiteContext* context, TfLiteNode* node) { +TfLiteStatus RfftEvalAll(TfLiteContext* context, TfLiteNode* node) { auto* params = reinterpret_cast*>(node->user_data); switch (params->fft_type) { case kTfLiteInt16: { - return Eval(context, node); + return RfftEval(context, node); } case kTfLiteInt32: { - return Eval(context, node); + return RfftEval(context, node); } case kTfLiteFloat32: { - return Eval(context, node); + return RfftEval(context, node); } default: return kTfLiteError; @@ -202,34 +204,34 @@ namespace tflm_signal { TFLMRegistration* Register_RFFT() { static TFLMRegistration r = - tflite::micro::RegisterOp(InitAll, PrepareAll, EvalAll); + tflite::micro::RegisterOp(RfftInitAll, RfftPrepareAll, RfftEvalAll); return &r; } TFLMRegistration* Register_RFFT_FLOAT() { static TFLMRegistration r = tflite::micro::RegisterOp( - Init, - Prepare, - Eval); + RfftInit, + RfftPrepare, + RfftEval); return &r; } TFLMRegistration* Register_RFFT_INT16() { static TFLMRegistration r = tflite::micro::RegisterOp( - Init, - Prepare, - Eval); + RfftInit, + RfftPrepare, + RfftEval); return &r; } TFLMRegistration* Register_RFFT_INT32() { static TFLMRegistration r = tflite::micro::RegisterOp( - Init, - Prepare, - Eval); + RfftInit, + RfftPrepare, + RfftEval); return &r; } diff --git a/signal/micro/kernels/stacker.cc b/signal/micro/kernels/stacker.cc new file mode 100644 index 00000000000..fc1a4a3769d --- /dev/null +++ b/signal/micro/kernels/stacker.cc @@ -0,0 +1,176 @@ +/* Copyright 2019 The TensorFlow Authors. All Rights Reserved. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +==============================================================================*/ + +#include + +#include "signal/src/circular_buffer.h" +#include "tensorflow/lite/kernels/internal/tensor_ctypes.h" +#include "tensorflow/lite/kernels/kernel_util.h" +#include "tensorflow/lite/micro/flatbuffer_utils.h" +#include "tensorflow/lite/micro/kernels/kernel_util.h" +#include "tensorflow/lite/micro/memory_helpers.h" +#include "tensorflow/lite/micro/micro_utils.h" + +namespace tflite { +namespace { + +constexpr int kInputTensor = 0; +constexpr int kOutputTensor = 0; +constexpr int kOutputValidTensor = 1; + +// Indices into the init flexbuffer's vector. +// The parameter's name is in the comment that follows. +// Elements in the vectors are ordered alphabetically by parameter name. +constexpr int kNumChannelsIndex = 0; // 'num_channels' +constexpr int kStackerLeftContextIndex = 1; // 'stacker_left_context' +constexpr int kStackerRightContextIndex = 2; // 'stacker_right_context' +constexpr int kStackerStepIndex = 3; // 'stacker_step' + +struct TFLMSignalStackerParams { + int32_t num_channels; + int32_t stacker_left_context; + int32_t stacker_right_context; + int32_t stacker_step; + + size_t buffer_size; + size_t step_size; + bool stacker_has_first_frame; + + int8_t* state; + tflm_signal::CircularBuffer* circular_buffer; +}; + +void* StackerInit(TfLiteContext* context, const char* buffer, size_t length) { + const uint8_t* buffer_t = reinterpret_cast(buffer); + + auto* params = + static_cast(context->AllocatePersistentBuffer( + context, sizeof(TFLMSignalStackerParams))); + if (params == nullptr) { + return nullptr; + } + + tflite::FlexbufferWrapper fbw(buffer_t, length); + params->num_channels = fbw.ElementAsInt32(kNumChannelsIndex); + params->stacker_left_context = fbw.ElementAsInt32(kStackerLeftContextIndex); + params->stacker_right_context = fbw.ElementAsInt32(kStackerRightContextIndex); + params->stacker_step = fbw.ElementAsInt32(kStackerStepIndex); + + params->buffer_size = + params->num_channels * + (params->stacker_left_context + params->stacker_right_context + 1); + params->step_size = params->num_channels * params->stacker_step; + params->stacker_has_first_frame = false; + + size_t state_size = + tflm_signal::CircularBufferGetNeededMemory(params->buffer_size); + params->state = static_cast( + context->AllocatePersistentBuffer(context, sizeof(int8_t) * state_size)); + + if (params->state == nullptr) { + return nullptr; + } + + params->circular_buffer = tflm_signal::CircularBufferInit( + params->buffer_size, params->state, state_size); + return params; +} + +TfLiteStatus StackerPrepare(TfLiteContext* context, TfLiteNode* node) { + TF_LITE_ENSURE_EQ(context, NumInputs(node), 1); + TF_LITE_ENSURE_EQ(context, NumOutputs(node), 2); + + MicroContext* micro_context = GetMicroContext(context); + + TfLiteTensor* input = + micro_context->AllocateTempInputTensor(node, kInputTensor); + TF_LITE_ENSURE(context, input != nullptr); + TfLiteTensor* output = + micro_context->AllocateTempOutputTensor(node, kOutputTensor); + TF_LITE_ENSURE(context, output != nullptr); + TfLiteTensor* output_valid = + micro_context->AllocateTempOutputTensor(node, kOutputValidTensor); + TF_LITE_ENSURE(context, output_valid != nullptr); + + TF_LITE_ENSURE_EQ(context, NumDimensions(input), 1); + TF_LITE_ENSURE_EQ(context, NumDimensions(output), 1); + TF_LITE_ENSURE_EQ(context, NumDimensions(output_valid), 0); + + TF_LITE_ENSURE_TYPES_EQ(context, input->type, kTfLiteInt16); + TF_LITE_ENSURE_TYPES_EQ(context, output->type, kTfLiteInt16); + TF_LITE_ENSURE_TYPES_EQ(context, output_valid->type, kTfLiteBool); + + micro_context->DeallocateTempTfLiteTensor(input); + micro_context->DeallocateTempTfLiteTensor(output); + micro_context->DeallocateTempTfLiteTensor(output_valid); + return kTfLiteOk; +} + +TfLiteStatus StackerEval(TfLiteContext* context, TfLiteNode* node) { + auto* params = reinterpret_cast(node->user_data); + TF_LITE_ENSURE(context, params != nullptr); + + const TfLiteEvalTensor* input = + tflite::micro::GetEvalInput(context, node, kInputTensor); + TfLiteEvalTensor* output = + tflite::micro::GetEvalOutput(context, node, kOutputTensor); + TfLiteEvalTensor* output_valid = + tflite::micro::GetEvalOutput(context, node, kOutputValidTensor); + + const int16_t* input_data = tflite::micro::GetTensorData(input); + + tflm_signal::CircularBufferWrite(params->circular_buffer, input_data, + params->num_channels); + + // The first frame is replicated an extra left_context times to pad. + if (params->stacker_has_first_frame == false) { + tflm_signal::CircularBufferExtend(params->circular_buffer, + params->num_channels, + params->stacker_left_context); + params->stacker_has_first_frame = true; + } + + int16_t* output_data = tflite::micro::GetTensorData(output); + bool* output_valid_data = tflite::micro::GetTensorData(output_valid); + if (tflm_signal::CircularBufferAvailable(params->circular_buffer) >= + params->buffer_size) { + tflm_signal::CircularBufferGet(params->circular_buffer, params->buffer_size, + output_data); + tflm_signal::CircularBufferDiscard(params->circular_buffer, + params->step_size); + *output_valid_data = true; + } else { + *output_valid_data = false; + } + return kTfLiteOk; +} + +void StackerReset(TfLiteContext* context, void* buffer) { + auto* params = static_cast(buffer); + tflm_signal::CircularBufferReset(params->circular_buffer); + params->stacker_has_first_frame = false; +} + +} // namespace + +namespace tflm_signal { +TFLMRegistration* Register_STACKER() { + static TFLMRegistration r = tflite::micro::RegisterOp( + StackerInit, StackerPrepare, StackerEval, /*Free*/ nullptr, StackerReset); + return &r; +} +} // namespace tflm_signal + +} // namespace tflite diff --git a/signal/micro/kernels/stacker_flexbuffers_generated_data.cc b/signal/micro/kernels/stacker_flexbuffers_generated_data.cc new file mode 100644 index 00000000000..654e4b7f65b --- /dev/null +++ b/signal/micro/kernels/stacker_flexbuffers_generated_data.cc @@ -0,0 +1,41 @@ +/* Copyright 2021 The TensorFlow Authors. All Rights Reserved. +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +==============================================================================*/ + +// This file is generated. See: +// tensorflow/lite/micro/kernels/test_data_generation/README.md + +#include "signal/micro/kernels/stacker_flexbuffers_generated_data.h" + +const int g_gen_data_size_stacker_3_channels_step_1 = 88; +const unsigned char g_gen_data_stacker_3_channels_step_1[] = { + 0x6e, 0x75, 0x6d, 0x5f, 0x63, 0x68, 0x61, 0x6e, 0x6e, 0x65, 0x6c, + 0x73, 0x00, 0x73, 0x74, 0x61, 0x63, 0x6b, 0x65, 0x72, 0x5f, 0x6c, + 0x65, 0x66, 0x74, 0x5f, 0x63, 0x6f, 0x6e, 0x74, 0x65, 0x78, 0x74, + 0x00, 0x73, 0x74, 0x61, 0x63, 0x6b, 0x65, 0x72, 0x5f, 0x72, 0x69, + 0x67, 0x68, 0x74, 0x5f, 0x63, 0x6f, 0x6e, 0x74, 0x65, 0x78, 0x74, + 0x00, 0x73, 0x74, 0x61, 0x63, 0x6b, 0x65, 0x72, 0x5f, 0x73, 0x74, + 0x65, 0x70, 0x00, 0x04, 0x46, 0x3a, 0x26, 0x11, 0x04, 0x01, 0x04, + 0x03, 0x01, 0x00, 0x01, 0x04, 0x04, 0x04, 0x04, 0x08, 0x24, 0x01, +}; +const int g_gen_data_size_stacker_10_channels_step_2 = 88; +const unsigned char g_gen_data_stacker_10_channels_step_2[] = { + 0x6e, 0x75, 0x6d, 0x5f, 0x63, 0x68, 0x61, 0x6e, 0x6e, 0x65, 0x6c, + 0x73, 0x00, 0x73, 0x74, 0x61, 0x63, 0x6b, 0x65, 0x72, 0x5f, 0x6c, + 0x65, 0x66, 0x74, 0x5f, 0x63, 0x6f, 0x6e, 0x74, 0x65, 0x78, 0x74, + 0x00, 0x73, 0x74, 0x61, 0x63, 0x6b, 0x65, 0x72, 0x5f, 0x72, 0x69, + 0x67, 0x68, 0x74, 0x5f, 0x63, 0x6f, 0x6e, 0x74, 0x65, 0x78, 0x74, + 0x00, 0x73, 0x74, 0x61, 0x63, 0x6b, 0x65, 0x72, 0x5f, 0x73, 0x74, + 0x65, 0x70, 0x00, 0x04, 0x46, 0x3a, 0x26, 0x11, 0x04, 0x01, 0x04, + 0x0a, 0x01, 0x00, 0x02, 0x04, 0x04, 0x04, 0x04, 0x08, 0x24, 0x01, +}; diff --git a/signal/micro/kernels/stacker_flexbuffers_generated_data.h b/signal/micro/kernels/stacker_flexbuffers_generated_data.h new file mode 100644 index 00000000000..47a38277ba3 --- /dev/null +++ b/signal/micro/kernels/stacker_flexbuffers_generated_data.h @@ -0,0 +1,25 @@ +/* Copyright 2020 The TensorFlow Authors. All Rights Reserved. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +==============================================================================*/ + +#ifndef SIGNAL_MICRO_KERNELS_TEST_DATA_GENERATION_GENERATE_STACKER_FLEXBUFFERS_DATA_H_ +#define SIGNAL_MICRO_KERNELS_TEST_DATA_GENERATION_GENERATE_STACKER_FLEXBUFFERS_DATA_H_ + +extern const int g_gen_data_size_stacker_3_channels_step_1; +extern const unsigned char g_gen_data_stacker_3_channels_step_1[]; + +extern const int g_gen_data_size_stacker_10_channels_step_2; +extern const unsigned char g_gen_data_stacker_10_channels_step_2[]; + +#endif // SIGNAL_MICRO_KERNELS_TEST_DATA_GENERATION_GENERATE_STACKER_FLEXBUFFERS_DATA_H_ diff --git a/signal/micro/kernels/stacker_test.cc b/signal/micro/kernels/stacker_test.cc new file mode 100644 index 00000000000..d236c7539a8 --- /dev/null +++ b/signal/micro/kernels/stacker_test.cc @@ -0,0 +1,243 @@ +/* Copyright 2021 The TensorFlow Authors. All Rights Reserved. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +==============================================================================*/ + +#include + +#include "signal/micro/kernels/stacker_flexbuffers_generated_data.h" +#include "tensorflow/lite/micro/kernels/kernel_runner.h" +#include "tensorflow/lite/micro/test_helpers.h" +#include "tensorflow/lite/micro/testing/micro_test.h" + +namespace tflite { +namespace { + +constexpr int kInputsSize = 1; +constexpr int kOutputsSize = 2; +constexpr int kTensorsSize = kInputsSize + kOutputsSize; + +class StackerKernelRunner { + public: + StackerKernelRunner(int* input_dims_data, const int16_t* input_data, + int* output_dims_data, int16_t* output_data, + int* output_ready_dims_data, bool* ouput_ready_data) + : tensors_{testing::CreateTensor( + input_data, + tflite::testing::IntArrayFromInts(input_dims_data)), + testing::CreateTensor( + output_data, + tflite::testing::IntArrayFromInts(output_dims_data)), + testing::CreateTensor( + ouput_ready_data, + testing::IntArrayFromInts(output_ready_dims_data))}, + inputs_array_{testing::IntArrayFromInts(inputs_array_data_)}, + outputs_array_{testing::IntArrayFromInts(outputs_array_data_)}, + kernel_runner_{*registration_, tensors_, kTensorsSize, + inputs_array_, outputs_array_, nullptr} {} + + micro::KernelRunner* kernel_runner() { return &kernel_runner_; } + + private: + int inputs_array_data_[2] = {1, 0}; + int outputs_array_data_[3] = {2, 1, 2}; + TfLiteTensor tensors_[kTensorsSize] = {}; + TfLiteIntArray* inputs_array_ = nullptr; + TfLiteIntArray* outputs_array_ = nullptr; + TFLMRegistration* registration_ = tflm_signal::Register_STACKER(); + micro::KernelRunner kernel_runner_; +}; + +void TestStackerInvoke(int* output_dims_data, int16_t* output_data, + bool* ouput_ready_data, const int16_t* golden, + micro::KernelRunner* kernel_runner) { + TfLiteIntArray* output_dims = testing::IntArrayFromInts(output_dims_data); + + const int output_len = ElementCount(*output_dims); + + TF_LITE_MICRO_EXPECT_EQ(kernel_runner->Invoke(), kTfLiteOk); + TF_LITE_MICRO_EXPECT_EQ(*ouput_ready_data, 1); + + for (int i = 0; i < output_len; ++i) { + TF_LITE_MICRO_EXPECT_EQ(golden[i], output_data[i]); + } +} + +void TestStacker(int* input_dims_data, const int16_t* input_data, + int* output_dims_data, int16_t* output_data, + int* output_ready_dims_data, bool* ouput_ready_data, + const int16_t* golden, const unsigned char* flexbuffers_data, + const unsigned int flexbuffers_data_size) { + StackerKernelRunner stacker_runner(input_dims_data, input_data, + output_dims_data, output_data, + output_ready_dims_data, ouput_ready_data); + + // TfLite uses a char* for the raw bytes whereas flexbuffers use an unsigned + // char*. This small discrepancy results in compiler warnings unless we + // reinterpret_cast right before passing in the flexbuffer bytes to the + // KernelRunner. + TF_LITE_MICRO_EXPECT_EQ(stacker_runner.kernel_runner()->InitAndPrepare( + reinterpret_cast(flexbuffers_data), + flexbuffers_data_size), + kTfLiteOk); + TestStackerInvoke(output_dims_data, output_data, ouput_ready_data, golden, + stacker_runner.kernel_runner()); +} + +// TestStackerReset() runs a test with the given inputs twice with a reset with +// the main purpose of testing the Stacker's Reset functionality. If you just +// want to make sure Stacker's Op output matches a set of golden values for an +// input use TestStacker() instead. +void TestStackerReset(int* input_dims_data, const int16_t* input_data, + int* output_dims_data, int16_t* output_data, + int* output_ready_dims_data, bool* ouput_ready_data, + const int16_t* golden, + const unsigned char* flexbuffers_data, + const unsigned int flexbuffers_data_size) { + StackerKernelRunner stacker_runner(input_dims_data, input_data, + output_dims_data, output_data, + output_ready_dims_data, ouput_ready_data); + + // TfLite uses a char* for the raw bytes whereas flexbuffers use an unsigned + // char*. This small discrepancy results in compiler warnings unless we + // reinterpret_cast right before passing in the flexbuffer bytes to the + // KernelRunner. + TF_LITE_MICRO_EXPECT_EQ(stacker_runner.kernel_runner()->InitAndPrepare( + reinterpret_cast(flexbuffers_data), + flexbuffers_data_size), + kTfLiteOk); + TestStackerInvoke(output_dims_data, output_data, ouput_ready_data, golden, + stacker_runner.kernel_runner()); + stacker_runner.kernel_runner()->Reset(); + TestStackerInvoke(output_dims_data, output_data, ouput_ready_data, golden, + stacker_runner.kernel_runner()); +} + +} // namespace +} // namespace tflite + +TF_LITE_MICRO_TESTS_BEGIN + +TF_LITE_MICRO_TEST(StackerTest3ChannelStep1) { + int input_shape[] = {1, 3}; + int output_shape[] = {1, 6}; + int output_ready_shape[] = {0}; + const int16_t input[] = {0x1234, 0x5678, 0x4321}; + const int16_t golden[] = {0x1234, 0x5678, 0x4321, 0x1234, 0x5678, 0x4321}; + + int16_t output[6]; + bool output_ready = false; + + tflite::TestStacker(input_shape, input, output_shape, output, + output_ready_shape, &output_ready, golden, + g_gen_data_stacker_3_channels_step_1, + g_gen_data_size_stacker_3_channels_step_1); +} + +TF_LITE_MICRO_TEST(StackerTest10ChannelStep2_1stTest) { + int input_shape[] = {1, 10}; + int output_shape[] = {1, 20}; + int output_ready_shape[] = {0}; + + int16_t output[20]; + bool output_ready = false; + + const int16_t input[10] = {252, 477, 1071, 166, 1022, + 312, 1171, 1586, 1491, 145}; + + const int16_t golden[] = {252, 477, 1071, 166, 1022, 312, 1171, + 1586, 1491, 145, 252, 477, 1071, 166, + 1022, 312, 1171, 1586, 1491, 145}; + tflite::TestStacker(input_shape, input, output_shape, output, + output_ready_shape, &output_ready, golden, + g_gen_data_stacker_10_channels_step_2, + g_gen_data_size_stacker_10_channels_step_2); +} + +TF_LITE_MICRO_TEST(StackerTest10ChannelStep2_2ndTest) { + int input_shape[] = {1, 10}; + int output_shape[] = {1, 20}; + int output_ready_shape[] = {0}; + + int16_t output[20]; + bool output_ready = false; + + const int16_t input[10] = {1060, 200, 69, 1519, 883, + 1317, 182, 724, 143, 334}; + + const int16_t golden[] = {1060, 200, 69, 1519, 883, 1317, 182, 724, 143, 334, + 1060, 200, 69, 1519, 883, 1317, 182, 724, 143, 334}; + + tflite::TestStacker(input_shape, input, output_shape, output, + output_ready_shape, &output_ready, golden, + g_gen_data_stacker_10_channels_step_2, + g_gen_data_size_stacker_10_channels_step_2); +} + +TF_LITE_MICRO_TEST(StackerTestReset3ChannelStep1) { + int input_shape[] = {1, 3}; + int output_shape[] = {1, 6}; + int output_ready_shape[] = {0}; + const int16_t input[] = {0x1234, 0x5678, 0x4321}; + const int16_t golden[] = {0x1234, 0x5678, 0x4321, 0x1234, 0x5678, 0x4321}; + + int16_t output[6]; + bool output_ready = false; + + tflite::TestStackerReset(input_shape, input, output_shape, output, + output_ready_shape, &output_ready, golden, + g_gen_data_stacker_3_channels_step_1, + g_gen_data_size_stacker_3_channels_step_1); +} + +TF_LITE_MICRO_TEST(StackerTestReset10ChannelStep2_1stTest) { + int input_shape[] = {1, 10}; + int output_shape[] = {1, 20}; + int output_ready_shape[] = {0}; + + int16_t output[20]; + bool output_ready = false; + + const int16_t input[10] = {252, 477, 1071, 166, 1022, + 312, 1171, 1586, 1491, 145}; + + const int16_t golden[] = {252, 477, 1071, 166, 1022, 312, 1171, + 1586, 1491, 145, 252, 477, 1071, 166, + 1022, 312, 1171, 1586, 1491, 145}; + tflite::TestStackerReset(input_shape, input, output_shape, output, + output_ready_shape, &output_ready, golden, + g_gen_data_stacker_10_channels_step_2, + g_gen_data_size_stacker_10_channels_step_2); +} + +TF_LITE_MICRO_TEST(StackerTestReset10ChannelStep2_2ndTest) { + int input_shape[] = {1, 10}; + int output_shape[] = {1, 20}; + int output_ready_shape[] = {0}; + + int16_t output[20]; + bool output_ready = false; + + const int16_t input[10] = {1060, 200, 69, 1519, 883, + 1317, 182, 724, 143, 334}; + + const int16_t golden[] = {1060, 200, 69, 1519, 883, 1317, 182, 724, 143, 334, + 1060, 200, 69, 1519, 883, 1317, 182, 724, 143, 334}; + + tflite::TestStackerReset(input_shape, input, output_shape, output, + output_ready_shape, &output_ready, golden, + g_gen_data_stacker_10_channels_step_2, + g_gen_data_size_stacker_10_channels_step_2); +} + +TF_LITE_MICRO_TESTS_END diff --git a/signal/micro/kernels/window.cc b/signal/micro/kernels/window.cc index e8508988164..cd9c4623768 100644 --- a/signal/micro/kernels/window.cc +++ b/signal/micro/kernels/window.cc @@ -41,7 +41,7 @@ struct TFLMSignalWindowParams { int32_t input_size; }; -void* Init(TfLiteContext* context, const char* buffer, size_t length) { +void* WindowInit(TfLiteContext* context, const char* buffer, size_t length) { const uint8_t* buffer_t = reinterpret_cast(buffer); auto* params = @@ -53,7 +53,7 @@ void* Init(TfLiteContext* context, const char* buffer, size_t length) { return params; } -TfLiteStatus Prepare(TfLiteContext* context, TfLiteNode* node) { +TfLiteStatus WindowPrepare(TfLiteContext* context, TfLiteNode* node) { TF_LITE_ENSURE_EQ(context, NumInputs(node), 2); TF_LITE_ENSURE_EQ(context, NumOutputs(node), 1); @@ -87,7 +87,7 @@ TfLiteStatus Prepare(TfLiteContext* context, TfLiteNode* node) { return kTfLiteOk; } -TfLiteStatus Eval(TfLiteContext* context, TfLiteNode* node) { +TfLiteStatus WindowEval(TfLiteContext* context, TfLiteNode* node) { auto* params = reinterpret_cast(node->user_data); const TfLiteEvalTensor* input = @@ -114,7 +114,8 @@ TfLiteStatus Eval(TfLiteContext* context, TfLiteNode* node) { namespace tflm_signal { TFLMRegistration* Register_WINDOW() { - static TFLMRegistration r = tflite::micro::RegisterOp(Init, Prepare, Eval); + static TFLMRegistration r = + tflite::micro::RegisterOp(WindowInit, WindowPrepare, WindowEval); return &r; } diff --git a/signal/micro/kernels/xtensa/fft_auto_scale_kernel.cc b/signal/micro/kernels/xtensa/fft_auto_scale_kernel.cc new file mode 100644 index 00000000000..fbd739bca7a --- /dev/null +++ b/signal/micro/kernels/xtensa/fft_auto_scale_kernel.cc @@ -0,0 +1,103 @@ +/* Copyright 2023 The TensorFlow Authors. All Rights Reserved. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +==============================================================================*/ + +#include "signal/micro/kernels/fft_auto_scale_kernel.h" + +#include +#include +#include + +#include "signal/src/fft_auto_scale.h" +#include "signal/src/max_abs.h" +#include "signal/src/msb.h" +#include "tensorflow/lite/kernels/internal/tensor_ctypes.h" +#include "tensorflow/lite/kernels/kernel_util.h" +#include "tensorflow/lite/micro/kernels/kernel_util.h" +#include "tensorflow/lite/micro/micro_context.h" + +#if XCHAL_HAVE_HIFI3 +#include +namespace { +// Implementation for DSPs that support the Hifi3 ISA. Bit exact with the +// portable version below. +int XtensaFftAutoScale(const int16_t* input, int size, int16_t* output) { + const int16_t max = tflite::tflm_signal::MaxAbs16(input, size); + int scale_bits = (sizeof(int16_t) * 8) - + tflite::tflm_signal::MostSignificantBit32(max) - 1; + int i; + if (scale_bits > 0) { + const ae_int16x4* input_16x4_ptr = + reinterpret_cast(input); + ae_int16x4* output_16x4_ptr = reinterpret_cast(output); + const int num_iterations = ((size + 3) >> 2); + for (i = 0; i < num_iterations; ++i) { + ae_int16x4 input_16x4; + AE_L16X4_IP(input_16x4, input_16x4_ptr, 8); + ae_f16x4 input_f16x4 = *reinterpret_cast(&input_16x4); + input_f16x4 = AE_SLAA16S(input_f16x4, scale_bits); + input_16x4 = *reinterpret_cast(&input_f16x4); + AE_S16X4_IP(input_16x4, output_16x4_ptr, 8); + } + } else { + memcpy(output, input, size * sizeof(output[0])); + scale_bits = 0; + } + return scale_bits; +} +} // namespace +#endif + +namespace tflite { +namespace { + +constexpr int kInputTensor = 0; +constexpr int kOutputTensor = 0; +constexpr int kScaleBitTensor = 1; + +TfLiteStatus Eval(TfLiteContext* context, TfLiteNode* node) { + const TfLiteEvalTensor* input = + tflite::micro::GetEvalInput(context, node, kInputTensor); + TfLiteEvalTensor* output = + tflite::micro::GetEvalOutput(context, node, kOutputTensor); + TfLiteEvalTensor* scale_bit = + tflite::micro::GetEvalOutput(context, node, kScaleBitTensor); + + const int16_t* input_data = tflite::micro::GetTensorData(input); + int16_t* output_data = tflite::micro::GetTensorData(output); + int32_t* scale_bit_data = tflite::micro::GetTensorData(scale_bit); + +#if XCHAL_HAVE_HIFI3 + *scale_bit_data = + XtensaFftAutoScale(input_data, output->dims->data[0], output_data); +#else + *scale_bit_data = + tflm_signal::FftAutoScale(input_data, output->dims->data[0], output_data); +#endif + return kTfLiteOk; +} + +} // namespace + +// TODO(b/286250473): remove namespace once de-duped libraries +namespace tflm_signal { + +TFLMRegistration* Register_FFT_AUTO_SCALE() { + static TFLMRegistration r = + tflite::micro::RegisterOp(nullptr, FftAutoScalePrepare, Eval); + return &r; +} + +} // namespace tflm_signal +} // namespace tflite diff --git a/signal/micro/kernels/xtensa/filter_bank_square_root.cc b/signal/micro/kernels/xtensa/filter_bank_square_root.cc new file mode 100644 index 00000000000..60e4119a578 --- /dev/null +++ b/signal/micro/kernels/xtensa/filter_bank_square_root.cc @@ -0,0 +1,72 @@ +/* Copyright 2023 The TensorFlow Authors. All Rights Reserved. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +==============================================================================*/ + +#include "signal/micro/kernels/filter_bank_square_root.h" + +#include + +#include "tensorflow/lite/kernels/internal/tensor_ctypes.h" +#include "tensorflow/lite/kernels/kernel_util.h" +#include "tensorflow/lite/micro/kernels/kernel_util.h" +#include "tensorflow/lite/micro/memory_helpers.h" +#include "tensorflow/lite/micro/micro_utils.h" + +// Defined in square_root.S +extern "C" uint32_t xtensa_sqrt_64(const uint64_t num); + +namespace tflite { +namespace { + +constexpr int kInputTensor = 0; +constexpr int kScaleBitsTensor = 1; +constexpr int kOutputTensor = 0; + +void ApplyFilterbankSqrt(const uint64_t* input, int num_channels, + int scale_down_bits, uint32_t* output) { + for (int i = 0; i < num_channels; ++i) { + output[i] = xtensa_sqrt_64(input[i]) >> scale_down_bits; + } +} + +TfLiteStatus Eval(TfLiteContext* context, TfLiteNode* node) { + const TfLiteEvalTensor* input = + tflite::micro::GetEvalInput(context, node, kInputTensor); + const TfLiteEvalTensor* scale_bits = + tflite::micro::GetEvalInput(context, node, kScaleBitsTensor); + TfLiteEvalTensor* output = + tflite::micro::GetEvalOutput(context, node, kOutputTensor); + + const uint64_t* input_data = tflite::micro::GetTensorData(input); + const int32_t* scale_bits_data = + tflite::micro::GetTensorData(scale_bits); + uint32_t* output_data = tflite::micro::GetTensorData(output); + int32_t num_channels = input->dims->data[0]; + ApplyFilterbankSqrt(input_data, num_channels, *scale_bits_data, output_data); + return kTfLiteOk; +} + +} // namespace + +namespace tflm_signal { + +TFLMRegistration* Register_FILTER_BANK_SQUARE_ROOT() { + static TFLMRegistration r = + tflite::micro::RegisterOp(nullptr, FilterBankSquareRootPrepare, Eval); + return &r; +} + +} // namespace tflm_signal + +} // namespace tflite diff --git a/signal/micro/kernels/xtensa/xtensa_square_root.S b/signal/micro/kernels/xtensa/xtensa_square_root.S new file mode 100644 index 00000000000..054d72fee94 --- /dev/null +++ b/signal/micro/kernels/xtensa/xtensa_square_root.S @@ -0,0 +1,400 @@ +/* Copyright 2023 The TensorFlow Authors. All Rights Reserved. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +==============================================================================*/ + +.section .note.GNU-stack,"",@progbits + +#include "xtensa/config/core-isa.h" + +#ifdef __XTENSA_CALL0_ABI__ +#define NO_REGISTER_WINDOW (1) +#endif + +#if XCHAL_HAVE_WINDOWED == 0 +#define NO_REGISTER_WINDOW +#endif + +// Since the 64 bit sqrt jumps into the middle of the 32 bit sqrt under certain +// conditions, both functions should reserve the same amount of stack space. +#define XTENSA_SQRT_STACK_SIZE 32 + +.text +.type xtensa_sqrt_64, @function +.align 4 +.global xtensa_sqrt_64 + +// Make macros for our 64 bit functions, since we don't have a carry/borrow bit +// in the base ISA, these take up way more cycles than they should. These are +// the "preferred instruction idioms" from 8.9.2 of the base ISA manual. Since +// these macros define a jump (and I couldn't find a way to be clever and use +// something like __LINE__/__FILE__ to define these automatically, you may also +// have to provide an 'opname' that contains a unique string to define a label +// for the macro. + +// dest must not be the same as num2, or this function will not work! +#define ADD_64(dest, num1, num2, opname) \ + add.n dest##_low, num1##_low, num2##_low; \ + add.n dest##_high, num1##_high, num2##_high; \ + bgeu dest##_low, num2##_low, .add_64_jump_##opname; \ + addi.n dest##_high, dest##_high, 1; \ + .add_64_jump_##opname: + +// All three registers must be unique, or this function will not work! +#define SUB_64(dest, num1, num2, opname) \ + sub dest##_low, num1##_low, num2##_low; \ + sub dest##_high, num1##_high, num2##_high; \ + bgeu num1##_low, num2##_low, .sub_64_jump_##opname; \ + addi.n dest##_high, dest##_high, -1; \ + .sub_64_jump_##opname: + +#define SRLI_64(dest, val, imm) \ + slli scratch4, val##_high, (32 - imm); \ + srli dest##_high, val##_high, imm; \ + srli dest##_low, val##_low, imm; \ + or dest##_low, dest##_low, scratch4; + +#define COND_MOV_64(op, dest, val, test) \ + mov##op dest##_low, val##_low, test; \ + mov##op dest##_high, val##_high, test + +#define num_low a2 +#define num_high a3 +#define bit_low a4 +#define bit_high a5 +#define res_low a6 +#define res_high a7 +#define temp1_low a8 +#define temp1_high a9 +#define temp2_low a10 +#define temp2_high a11 +#define scratch1 a12 +#define scratch2 a13 +#define scratch3 a14 +#define scratch4 a15 +#define temp3_low scratch1 +#define temp3_high scratch2 + +.align 4 +xtensa_sqrt_64: +#ifdef NO_REGISTER_WINDOW +addi.n a1, a1, -XTENSA_SQRT_STACK_SIZE +s32i.n a0, a1, 4 +s32i.n a11, a1, 8 +s32i.n a12, a1, 12 +s32i.n a13, a1, 16 +s32i.n a14, a1, 20 +s32i.n a15, a1, 24 +#else +entry a1, XTENSA_SQRT_STACK_SIZE +#endif +// In the event that the upper word of the number is all zero, we can just +// pretend that we're doing a 32 bit sqrt (but the rounding condition at the +// end is slightly different, so we've got a bit of an anomly there. Such is +// life) +beqz.n num_high, .xtensa_sqrt_32_start +// ** uint64 res= 0; +movi.n res_low, 0 +movi.n res_high, 0 + +movi.n scratch2, 1 + +// Setup 'bit' - first we need to know what bit to set it to. +// ** int max_bit_number = 64 - MostSignificantBit_64(num); +movi.n bit_low, 0 +nsau scratch1, num_high + +// ** max_bit_number |= 1; +or scratch1, scratch2, scratch1 + +// The ammount we shift by is 31 - what's in scratch1 for the max bit number. +// This is because we've got the two words, so we can't do a 64 bit shift. +movi.n scratch3, 31 +sub scratch1, scratch3, scratch1 + +// Do the shift +// ** uint32 bit = 1 << (63 - max_bit_number); +ssl scratch1 +sll bit_high, scratch2 + +// Figure out how many iterations we're going to need. However, we already have +// 31 - max_bit_number in scratch1, so just add 32 to that. +// ** int iterations = (63 - max_bit_number) / 2 + 1; +addi.n scratch1, scratch1, 32 +srli scratch1, scratch1, 1 +add scratch1, scratch1, scratch2 + +// If the number of iterations is equal to 32, this means that we're likely in +// an overflow spot if we try and do a subtraction (since the upper most bit is +// going to be set since the bit had to be shifted up so high). We have to do +// one iteration of the loop where we use the pipeline destroying branch call +// that can compare two unsigned numbers. If we need less than 32 iterations, +// we can skip this slow path and jump to the tight inner loop. +blti scratch1, 32, .xtensa_sqrt_64_inner_loop_start + +// Cache bit + res. +ADD_64(temp1, bit, res, temp1_bit_res) +// Since we've stored a copy of bit + res, we can right shift res (since both +// branches of the conditional are going to need it, one branch just needs to +// perform an extra addition). +// ** res <<= 1; +SRLI_64(res, res, 1); + +// ** if (num >= res_plus_bit) { +bltu num_high, temp1_high, .xtensa_sqrt_64_branch_skip +bne num_high, temp1_high, .xtensa_sqrt_64_comparison_failed +bltu num_low, temp1_low, .xtensa_sqrt_64_branch_skip +.xtensa_sqrt_64_comparison_failed: + +// ** num -= res + bit; +SUB_64(temp2, num, temp1, temp2_num_temp1_early_branch) +// Since the sub can't use the same registers, we have to move it back to where +// it belongs. +mov.n num_low, temp2_low +mov.n num_high, temp2_high +// ** res += bit; +ADD_64(res, res, bit, res_res_bit_early_branch) +// ** } +.xtensa_sqrt_64_branch_skip: + +// ** bit >>= 2; +SRLI_64(bit, bit, 2) +// Make sure we knock off this iteration when we fall into the inner loop. +sub scratch1, scratch1, scratch2 + +.xtensa_sqrt_64_inner_loop_start: +loop scratch1, .xtensa_sqrt_64_round + +// We don't have enough registers to be as verbose as the 32 bit version, so +// this version is not as easy to read. Instead of having the two operations in +// the same style of conditional move, we sort of decide to do both branches at +// the same time of the if, then fix up what was incorrect at the end. +SRLI_64(temp1, res, 1) +ADD_64(res, res, bit, res_res_bit) + +SUB_64(temp2, num, res, num_res_temp2) +ADD_64(res, temp1, bit, res_temp1_bit) + +COND_MOV_64(gez, num, temp2, temp2_high) +COND_MOV_64(ltz, res, temp1, temp2_high) + +// ** bit >>= 2; +SRLI_64(bit, bit, 2) + +.xtensa_sqrt_64_round: + +// Need to do if (num > res) { ++res; }, but we'll do it with conditional moves +// again. Except we're going to do it slightly backwards, since we need to move +// the result into the num register to be returned. We'll do this by setting +// the return value to res + 1, but in the event that it was a mistake, we'll +// conditionally move the raw result back into place. +SUB_64(temp1, res, num, res_num_temp1) +addi.n num_low, res_low, 1 +movgez num_low, res_low, temp1_high + +// But we may have overflowed num_low - set it back to result_low if it's been +// zeroed out. +moveqz num_low, res_low, num_low + +#ifdef NO_REGISTER_WINDOW +l32i.n a0, a1, 4 +l32i.n a11, a1, 8 +l32i.n a12, a1, 12 +l32i.n a13, a1, 16 +l32i.n a14, a1, 20 +l32i.n a15, a1, 24 +addi a1, a1, XTENSA_SQRT_STACK_SIZE +ret.n +#else +retw.n +#endif +.xtensa_sqrt_64_end: + .size xtensa_sqrt_64, . - xtensa_sqrt_64 + + +#undef ADD_64 +#undef SUB_64 +#undef SRLI_64 +#undef COND_MOV_64 + +#undef num_low +#undef num_high +#undef bit_low +#undef bit_high +#undef res_low +#undef res_high +#undef temp1_low +#undef temp1_high +#undef temp2_low +#undef temp2_high +#undef scratch1 +#undef scratch2 +#undef scratch3 +#undef scratch4 +#undef temp3_low +#undef temp3_high +.text +.type xtensa_sqrt_32, @function +.align 4 +.global xtensa_sqrt_32 + +// Make the program more readable... +#define num a2 +#define bit a4 +#define res a5 +#define one a6 +#define max_bit_number a7 +#define iterations max_bit_number +#define bit_plus_res a8 +#define num_minus_bit_plus_res a9 +#define res_shift_left_plus_bit a10 +#define res_minus_num res_shift_left_plus_bit + +xtensa_sqrt_32: +#ifdef NO_REGISTER_WINDOW +addi.n a1, a1, -XTENSA_SQRT_STACK_SIZE +s32i.n a0, a1, 4 +s32i.n a11, a1, 8 +s32i.n a12, a1, 12 +s32i.n a13, a1, 16 +s32i.n a14, a1, 20 +s32i.n a15, a1, 24 +#else +entry a1, XTENSA_SQRT_STACK_SIZE +#endif + +.xtensa_sqrt_32_start: +// If the number is zero, just quickly exit without doing anything. +beqz.n num, .xtensa_sqrt_32_return + +// ** uint32 res = 0; +movi.n res, 0 +// Also, setup the handy constant we need a few times. +movi.n one, 1 + +// This will give us (32 - index of the first bit that is set). +// ** int max_bit_number = 32 - MostSignificantBit_32(num); +nsau max_bit_number, num + +// ** max_bit_number |= one; +or max_bit_number, max_bit_number, one + +// The ammount we shift by is 31 - what we stored in max_bit_number. +movi.n a15, 31 +sub max_bit_number, a15, max_bit_number + +// Do the shift. +// ** uint32 bit = 1 << (31 - max_bit_number); +ssl max_bit_number +sll bit, one + +// Compute the number of iterations we're going to need. +// ** int iterations = (31 - max_bit_number) / 2 + 1; +srli iterations, max_bit_number, 1 +add iterations, iterations, one + +// If the number of iterations is equal to 16, this means that we're likely in +// an overflow spot if we try and do a subtraction (since the upper most bit is +// going to be set since the bit had to be shifted up so high). We have to do +// one iteration of the loop where we use the pipeline destroying branch call +// that can compare two unsigned numbers. If we need less than 16 iterations, +// we can skip this slow path and jump to the tight inner loop. +blti iterations, 16, .xtensa_sqrt_32_inner_loop_start + +// Cache bit + res into another register. +add.n bit_plus_res, bit, res +// Since we've stored a copy of bit + res, we can right shift res (since both +// branches of the conditional are going to need it, one branch just needs to +// perform an extra addition). +// ** res <<= 1; +srli res, res, 1 +// ** if (num >= res_plus_bit) { +bltu num, bit_plus_res, .xtensa_sqrt_32_branch_skip +// ** num -= res + bit; +sub num, num, bit_plus_res +// ** res += bit; +add res, res, bit +// ** } +.xtensa_sqrt_32_branch_skip: + +// ** bit >>= 2; +srli bit, bit, 2 +// Make sure we knock off this iteration when we fall into the inner loop. +sub iterations, iterations, one + +.xtensa_sqrt_32_inner_loop_start: +// Start a zero overhead loop for the number of remaining iterations. +loop iterations, .xtensa_sqrt_32_round + +// Cache bit + res into another register. +add.n bit_plus_res, bit, res +// ** res <<= 1; +srli res, res, 1 + +// We can dodge a hefty branch penalty by doing conditional moves - so we need +// to compute the values for num and res for what would happen if we took the +// if part of the condition. If the condition is true, then we'll copy stuff +// across. + +// compute num - bit_plus_res. We can use this for the conditional check +// against zero. +sub num_minus_bit_plus_res, num, bit_plus_res +// compute the shifted res + bit. +add res_shift_left_plus_bit, res, bit + +// Copy stuff if the condition is true. +movgez num, num_minus_bit_plus_res, num_minus_bit_plus_res +movgez res, res_shift_left_plus_bit, num_minus_bit_plus_res + +// ** bit >>= 2; +srli bit, bit, 2 + +.xtensa_sqrt_32_round: + +// Need to do if (num > res) { ++res; }, but we'll do it with conditional moves +// again. Except we're going to do it slightly backwards, since we need to move +// the result into the num register to be returned. We'll do this by setting +// the return value to res + 1, but in the event that it was a mistake, we'll +// conditionally move the raw result back into place. +sub res_minus_num, res, num +add.n num, res, one +movgez num, res, res_minus_num + +// But we might have also pooched the rounding by adding an extra bit, make sure +// we don't explode when we overflow. +clamps num, num, 16 + +.xtensa_sqrt_32_return: +#ifdef NO_REGISTER_WINDOW +l32i.n a0, a1, 4 +l32i.n a11, a1, 8 +l32i.n a12, a1, 12 +l32i.n a13, a1, 16 +l32i.n a14, a1, 20 +l32i.n a15, a1, 24 +addi a1, a1, XTENSA_SQRT_STACK_SIZE +ret.n +#else +retw.n +#endif + +#undef num +#undef bit +#undef res +#undef one +#undef max_bit_number +#undef iterations +#undef bit_plus_res +#undef num_minus_bit_plus_res +#undef res_shift_left_plus_bit +#undef res_minus_num diff --git a/signal/src/BUILD b/signal/src/BUILD index 9b5e4c6d7ab..e5c10600705 100644 --- a/signal/src/BUILD +++ b/signal/src/BUILD @@ -8,6 +8,72 @@ cc_library( hdrs = ["complex.h"], ) +cc_library( + name = "fft_auto_scale", + srcs = ["fft_auto_scale.cc"], + hdrs = ["fft_auto_scale.h"], + deps = [ + ":max_abs", + ":msb_32", + ], +) + +cc_library( + name = "irfft", + srcs = [ + "irfft_float.cc", + "irfft_int16.cc", + "irfft_int32.cc", + ], + hdrs = ["irfft.h"], + deps = [ + ":complex", + "//signal/src/kiss_fft_wrappers", + ], +) + +cc_library( + name = "max_abs", + srcs = ["max_abs.cc"], + hdrs = ["max_abs.h"], +) + +cc_library( + name = "square_root_32", + srcs = ["square_root_32.cc"], + hdrs = ["square_root.h"], + deps = [":msb_32"], +) + +cc_library( + name = "square_root_64", + srcs = ["square_root_64.cc"], + hdrs = ["square_root.h"], + deps = [ + ":msb_64", + ":square_root_32", + ], +) + +cc_library( + name = "log", + srcs = ["log.cc"], + hdrs = ["log.h"], + deps = [":msb_32"], +) + +cc_library( + name = "msb_32", + srcs = ["msb_32.cc"], + hdrs = ["msb.h"], +) + +cc_library( + name = "msb_64", + srcs = ["msb_64.cc"], + hdrs = ["msb.h"], +) + cc_library( name = "rfft", srcs = [ @@ -27,3 +93,62 @@ cc_library( srcs = ["window.cc"], hdrs = ["window.h"], ) + +cc_library( + name = "circular_buffer", + srcs = ["circular_buffer.cc"], + hdrs = ["circular_buffer.h"], +) + +cc_library( + name = "overlap_add", + srcs = ["overlap_add.cc"], + hdrs = ["overlap_add.h"], +) + +cc_library( + name = "energy", + srcs = ["energy.cc"], + hdrs = ["energy.h"], + deps = [":complex"], +) + +cc_library( + name = "filter_bank", + srcs = ["filter_bank.cc"], + hdrs = ["filter_bank.h"], +) + +cc_library( + name = "filter_bank_log", + srcs = ["filter_bank_log.cc"], + hdrs = ["filter_bank_log.h"], + deps = [ + ":log", + ], +) + +cc_library( + name = "filter_bank_spectral_subtraction", + srcs = ["filter_bank_spectral_subtraction.cc"], + hdrs = ["filter_bank_spectral_subtraction.h"], +) + +cc_library( + name = "filter_bank_square_root", + srcs = ["filter_bank_square_root.cc"], + hdrs = ["filter_bank_square_root.h"], + deps = [ + ":square_root_64", + ], +) + +cc_library( + name = "pcan_argc_fixed", + srcs = ["pcan_argc_fixed.cc"], + hdrs = ["pcan_argc_fixed.h"], + deps = [ + ":msb_32", + "//tensorflow/lite/kernels/internal:compatibility", + ], +) diff --git a/signal/src/circular_buffer.cc b/signal/src/circular_buffer.cc new file mode 100644 index 00000000000..7638d912e39 --- /dev/null +++ b/signal/src/circular_buffer.cc @@ -0,0 +1,290 @@ +/* Copyright 2019 The TensorFlow Authors. All Rights Reserved. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +==============================================================================*/ + +#include "signal/src/circular_buffer.h" + +#include +#include +#include + +#define ASSERT assert + +namespace tflite { +namespace tflm_signal { +// TODO(b/286250473): remove namespace once de-duped libraries above +void CircularBufferReset(tflm_signal::CircularBuffer* cb) { + cb->read = 0; + cb->write = 0; + cb->empty = 1; + cb->buffer = (int16_t*)(cb + 1); + memset(cb->buffer, 0, sizeof(cb->buffer[0]) * cb->buffer_size); +} + +size_t CircularBufferGetNeededMemory(size_t capacity) { + return sizeof(CircularBuffer) + sizeof(int16_t) * 2 * capacity; +} + +CircularBuffer* CircularBufferInit(size_t capacity, void* state, + size_t state_size) { + ASSERT(CircularBufferGetNeededMemory(capacity) >= state_size); + CircularBuffer* cb = (CircularBuffer*)state; + cb->buffer_size = 2 * capacity; + cb->capacity = capacity; + CircularBufferReset(cb); + return cb; +} + +size_t CircularBufferCapacity(const tflm_signal::CircularBuffer* cb) { + return cb->capacity; +} + +bool CircularBufferFull(const tflm_signal::CircularBuffer* cb) { + return cb->read == cb->write && cb->empty == 0; +} + +bool CircularBufferEmpty(const tflm_signal::CircularBuffer* cb) { + return cb->empty == 1; +} + +size_t CircularBufferAvailable(const tflm_signal::CircularBuffer* cb) { + const int32_t diff = cb->write - cb->read; + if (diff > 0) { + return diff; + } else if (diff < 0) { + return cb->capacity + diff; + } else if (cb->empty == 1) { + return 0; + } else { + return cb->capacity; + } +} + +size_t CircularBufferCanWrite(const tflm_signal::CircularBuffer* cb) { + return cb->capacity - CircularBufferAvailable(cb); +} + +void CircularBufferAdd(tflm_signal::CircularBuffer* cb, int16_t value) { + ASSERT(!CircularBufferFull(cb)); + cb->buffer[cb->write] = value; + cb->buffer[cb->write + cb->capacity] = value; + if (++cb->write == cb->capacity) { + cb->write = 0; + } + cb->empty = 0; +} + +void CircularBufferWrite(tflm_signal::CircularBuffer* cb, const int16_t* values, + size_t n) { + if (n > 0) { + ASSERT(CircularBufferCanWrite(cb) >= n); + size_t write = cb->write; + int16_t* buffer = cb->buffer; + const size_t capacity = cb->capacity; + const size_t end = write + n; + + memcpy(buffer + write, values, n * sizeof(int16_t)); + if (end < capacity) { + memcpy(buffer + capacity + write, values, n * sizeof(int16_t)); + write += n; + } else { + const size_t n1 = capacity - write; + const size_t nbytes1 = n1 * sizeof(int16_t); + memcpy(buffer + capacity + write, values, nbytes1); + const size_t n2 = end - capacity; + if (n2 > 0) { + const size_t nbytes2 = n2 * sizeof(int16_t); + memcpy(buffer, values + n1, nbytes2); + } + write = n2; + } + cb->write = write; + cb->empty = 0; + } +} + +void CircularBufferWriteZeros(tflm_signal::CircularBuffer* cb, size_t n) { + if (n > 0) { + ASSERT(CircularBufferCanWrite(cb) >= n); + size_t write = cb->write; + int16_t* buffer = cb->buffer; + const size_t capacity = cb->capacity; + const size_t end = write + n; + + memset(buffer + write, 0, n * sizeof(int16_t)); + if (end < capacity) { + memset(buffer + capacity + write, 0, n * sizeof(int16_t)); + write += n; + } else { + const size_t n1 = capacity - write; + const size_t nbytes1 = n1 * sizeof(int16_t); + memset(buffer + capacity + write, 0, nbytes1); + const size_t n2 = end - capacity; + if (n2 > 0) { + const size_t nbytes2 = n2 * sizeof(int16_t); + memset(buffer, 0, nbytes2); + } + write = n2; + } + cb->write = write; + cb->empty = 0; + } +} + +int16_t* CircularBufferReserveForWrite(tflm_signal::CircularBuffer* cb, + size_t n) { + ASSERT(cb->write + n <= cb->capacity); + int16_t* write_ptr = cb->buffer + cb->write; + cb->write += n; + if (cb->write == cb->capacity) { + cb->write = 0; + } + cb->empty = cb->empty && n == 0; + return write_ptr; +} + +void CircularBufferExtend(tflm_signal::CircularBuffer* cb, size_t count, + int32_t n) { + if (n > 0 && count > 0) { + ASSERT(CircularBufferCanWrite(cb) >= count * n); + ASSERT(CircularBufferAvailable(cb) >= count); + const size_t capacity = cb->capacity; + // start pos of region to copy + const size_t start = + (count > cb->write) ? cb->write + capacity - count : cb->write - count; + const size_t end = start + count; + int i; + if (end <= capacity) { + // the source elements are contiguous + for (i = 0; i < n; ++i) { + CircularBufferWrite(cb, cb->buffer + start, count); + } + } else { + // the source elements wrap around the end of the buffer + for (i = 0; i < n; ++i) { + const size_t n1 = capacity - start; + const size_t n2 = count - n1; + CircularBufferWrite(cb, cb->buffer + start, n1); + CircularBufferWrite(cb, cb->buffer, n2); + } + } + } + // Note: no need to update empty flag +} + +int16_t CircularBufferRemove(tflm_signal::CircularBuffer* cb) { + ASSERT(!CircularBufferEmpty(cb)); + const int16_t result = cb->buffer[cb->read]; + if (++cb->read == cb->capacity) { + cb->read = 0; + } + if (cb->read == cb->write) { + cb->empty = 1; + } + return result; +} + +int16_t CircularBufferPeek(const tflm_signal::CircularBuffer* cb, + size_t index) { + ASSERT(CircularBufferAvailable(cb) > index); + size_t target = cb->read + index; + while (target >= cb->capacity) { + target -= cb->capacity; + } + return cb->buffer[target]; +} + +void CircularBufferRewind(tflm_signal::CircularBuffer* cb, size_t n) { + ASSERT(n <= CircularBufferCanWrite(cb)); + if (n > cb->read) { + // Must add before subtracting because types are unsigned. + cb->read = (cb->read + cb->capacity) - n; + } else { + cb->read -= n; + } + if (n > 0) cb->empty = 0; +} + +const int16_t* CircularBufferPeekDirect(const tflm_signal::CircularBuffer* cb, + size_t index) { + ASSERT(CircularBufferAvailable(cb) > index); + size_t target = cb->read + index; + while (target >= cb->capacity) { + target -= cb->capacity; + } + return cb->buffer + target; +} + +const int16_t* CircularBufferPeekMax(const tflm_signal::CircularBuffer* cb, + size_t* n) { + if (CircularBufferAvailable(cb) > 0) { + *n = (cb->write <= cb->read) ? cb->capacity - cb->read + : cb->write - cb->read; + return cb->buffer + cb->read; + } else { + *n = 0; + return NULL; + } +} + +void CircularBufferGet(tflm_signal::CircularBuffer* cb, size_t n, + int16_t* values) { + ASSERT(CircularBufferAvailable(cb) >= n); + const int16_t* buffer = cb->buffer; + const size_t read = cb->read; + const size_t end = read + n; + const size_t capacity = cb->capacity; + if (end <= capacity) { + memcpy(values, buffer + read, n * sizeof(int16_t)); + } else { + const size_t n1 = capacity - read; + const size_t n2 = end - capacity; + const size_t nbytes1 = n1 * sizeof(int16_t); + const size_t nbytes2 = n2 * sizeof(int16_t); + memcpy(values, buffer + read, nbytes1); + memcpy(values + n1, buffer, nbytes2); + } +} + +void CircularBufferDiscard(tflm_signal::CircularBuffer* cb, size_t n) { + ASSERT(n > 0); + ASSERT(CircularBufferAvailable(cb) >= n); + cb->read += n; + if (cb->read >= cb->capacity) { + cb->read -= cb->capacity; + } + if (cb->read == cb->write) { + cb->empty = 1; + } +} + +void CircularBufferShift(tflm_signal::CircularBuffer* cb, int n) { + if (n < 0) { + ASSERT(-n <= (int)cb->capacity); + if ((int)cb->read < -n) { + // First add then subtract to ensure positivity as types are unsigned. + cb->read += cb->capacity; + } + cb->read += n; + } else { + ASSERT(n <= (int)cb->capacity); + cb->read += n; + if (cb->read >= cb->capacity) { + cb->read -= cb->capacity; + } + } +} + +} // namespace tflm_signal +} // namespace tflite diff --git a/signal/src/circular_buffer.h b/signal/src/circular_buffer.h new file mode 100644 index 00000000000..d175a9b99f3 --- /dev/null +++ b/signal/src/circular_buffer.h @@ -0,0 +1,118 @@ +/* Copyright 2019 The TensorFlow Authors. All Rights Reserved. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +==============================================================================*/ + +#ifndef SIGNAL_SRC_CIRCULAR_BUFFER_H_ +#define SIGNAL_SRC_CIRCULAR_BUFFER_H_ + +#include +#include + +namespace tflite { +namespace tflm_signal { +// TODO(b/286250473): remove namespace once de-duped libraries above +struct CircularBuffer { + // Max number of elements, value passed-in to CircularBufferAlloc. + size_t capacity; + // Next position to read. + size_t read; + // Next position to write. + size_t write; + // Flag to indicate emptiness. + int32_t empty; + // Auto-generated size variable + int32_t buffer_size; + // Array of the circular buffer elements (integers). + int16_t* buffer; +}; + +// Returns the size of the memory that the circular buffer needs +// in order to hold `capacity` items. +size_t CircularBufferGetNeededMemory(size_t capacity); + +// Initialize an instance of the circular buffer that holds `capacity` items. +// `state` points to a memory allocation of size `state_size`. The size +// should be greater or equal to the value returned by +// CircularBufferGetNeededMemory(capacity). Fails if it isn't. +// On success, returns a pointer to the circular buffer's object. +CircularBuffer* CircularBufferInit(size_t capacity, void* state, + size_t state_size); + +// Reset a circular buffer to its initial empty state +void CircularBufferReset(CircularBuffer* cb); + +size_t CircularBufferCapacity(const CircularBuffer* cb); + +bool CircularBufferFull(const CircularBuffer* cb); + +bool CircularBufferEmpty(const CircularBuffer* cb); + +// Returns the number of elements ready to read +size_t CircularBufferAvailable(const CircularBuffer* cb); + +// Returns the number of elements available to write. +size_t CircularBufferCanWrite(const CircularBuffer* cb); + +// Adds a single `value` to the buffer and advances the write pointer. +void CircularBufferAdd(CircularBuffer* cb, int16_t value); + +// Writes `n` `values` into the buffer and advances the write pointer. +void CircularBufferWrite(CircularBuffer* cb, const int16_t* values, size_t n); + +// Writes `n` zeros into the buffer and advances the write pointer. +void CircularBufferWriteZeros(CircularBuffer* cb, size_t n); + +// Returns a pointer to a buffer where elements can be written, and +// advances the write pointer as though they have already been written. +// Fails if `n` elements are not available contiguously at the current +// write position. +int16_t* CircularBufferReserveForWrite(CircularBuffer* cb, size_t n); + +// Copies the final region (`count` elements) of the buffer `n` times, to +// the end of the buffer. +void CircularBufferExtend(CircularBuffer* cb, size_t count, int32_t n); + +// Reads a single value from the buffer and advances the read pointer +int16_t CircularBufferRemove(CircularBuffer* cb); + +// Reads the value at the given `index`, does not modify the read pointer. +int16_t CircularBufferPeek(const CircularBuffer* cb, size_t index); + +// Rewinds to restore the previous `n` values read +void CircularBufferRewind(CircularBuffer* cb, size_t n); + +// Returns a pointer directly into the circular buffer at the given `index`. +// Caller is responsible for not reading past the end. +const int16_t* CircularBufferPeekDirect(const CircularBuffer* cb, size_t index); + +// Returns a pointer into the circular buffer at the current read pointer, +// setting `n` to the number of values available to be read from here. +const int16_t* CircularBufferPeekMax(const CircularBuffer* cb, size_t* n); + +// Copies `n` `values` from the buffer and does not advance the read +// pointer and does not update the empty flag. +void CircularBufferGet(CircularBuffer* cb, size_t n, int16_t* values); + +// Discards the next `n` values by advancing the read index. +// Valid for n > 0. +void CircularBufferDiscard(CircularBuffer* cb, size_t n); + +// Shifts the buffer with `n` values (`n` can be negative) by moving +// the read index. +void CircularBufferShift(CircularBuffer* cb, int n); + +} // namespace tflm_signal +} // namespace tflite + +#endif // SIGNAL_SRC_CIRCULAR_BUFFER_H_ diff --git a/signal/src/energy.cc b/signal/src/energy.cc new file mode 100644 index 00000000000..3ea5fc09bfc --- /dev/null +++ b/signal/src/energy.cc @@ -0,0 +1,33 @@ +/* Copyright 2019 The TensorFlow Authors. All Rights Reserved. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +==============================================================================*/ +#include "signal/src/energy.h" + +#include "signal/src/complex.h" + +namespace tflite { +namespace tflm_signal { +void SpectrumToEnergy(const Complex* input, int start_index, + int end_index, uint32_t* output) { + for (int i = start_index; i < end_index; i++) { + const int16_t real = input[i].real; // 15 bits + const int16_t imag = input[i].imag; // 15 bits + // 31 bits + output[i] = (static_cast(real) * real) + + (static_cast(imag) * imag); + } +} + +} // namespace tflm_signal +} // namespace tflite diff --git a/signal/src/energy.h b/signal/src/energy.h new file mode 100644 index 00000000000..5a1cf37ee65 --- /dev/null +++ b/signal/src/energy.h @@ -0,0 +1,38 @@ +/* Copyright 2019 The TensorFlow Authors. All Rights Reserved. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +==============================================================================*/ + +#ifndef SIGNAL_ENERGY_H_ +#define SIGNAL_ENERGY_H_ + +#include + +#include "signal/src/complex.h" + +namespace tflite { +namespace tflm_signal { +// TODO(b/286250473): remove namespace once de-duped libraries above + +// Calculates the power spectrum from a DFT output between start and end indices +// +// * `start_index` and `end_index` must valid indices into `input` +// * `output` must be the same size as `input`. Only the values at indices +// `start_index` and `end_index` inclusive should be considered valid. +void SpectrumToEnergy(const Complex* input, int start_index, + int end_index, uint32_t* output); + +} // namespace tflm_signal +} // namespace tflite + +#endif // SIGNAL_ENERGY_H_ diff --git a/signal/src/fft_auto_scale.cc b/signal/src/fft_auto_scale.cc new file mode 100644 index 00000000000..7e783978183 --- /dev/null +++ b/signal/src/fft_auto_scale.cc @@ -0,0 +1,42 @@ +/* Copyright 2023 The TensorFlow Authors. All Rights Reserved. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +==============================================================================*/ + +#include "signal/src/fft_auto_scale.h" + +#include +#include + +#include "signal/src/max_abs.h" +#include "signal/src/msb.h" + +// TODO(b/286250473): remove namespace once de-duped libraries +namespace tflite { +namespace tflm_signal { + +int FftAutoScale(const int16_t* input, int size, int16_t* output) { + const int16_t max = MaxAbs16(input, size); + int scale_bits = (sizeof(int16_t) * 8) - MostSignificantBit32(max) - 1; + if (scale_bits <= 0) { + scale_bits = 0; + } + for (int i = 0; i < size; i++) { + // (input[i] << scale_bits) is undefined if input[i] is negative. + // Multiply explicitly to make the code portable. + output[i] = input[i] * (1 << scale_bits); + } + return scale_bits; +} +} // namespace tflm_signal +} // namespace tflite diff --git a/signal/src/fft_auto_scale.h b/signal/src/fft_auto_scale.h new file mode 100644 index 00000000000..c566a0e9de0 --- /dev/null +++ b/signal/src/fft_auto_scale.h @@ -0,0 +1,35 @@ +/* Copyright 2023 The TensorFlow Authors. All Rights Reserved. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +==============================================================================*/ + +#ifndef SIGNAL_SRC_FFT_AUTO_SCALE_H_ +#define SIGNAL_SRC_FFT_AUTO_SCALE_H_ + +#include +#include + +// TODO(b/286250473): remove namespace once de-duped libraries +namespace tflite { +namespace tflm_signal { + +// Auto scales `input` and write the result to `output` +// Elements in `input` are left shifted to maximize the amplitude without +// clipping, +// * both `input` and `output` must be of size `size` +int FftAutoScale(const int16_t* input, int size, int16_t* output); + +} // namespace tflm_signal +} // namespace tflite + +#endif // SIGNAL_SRC_FFT_AUTO_SCALE_H_ diff --git a/signal/src/filter_bank.cc b/signal/src/filter_bank.cc new file mode 100644 index 00000000000..8517e4530ea --- /dev/null +++ b/signal/src/filter_bank.cc @@ -0,0 +1,53 @@ +/* Copyright 2019 The TensorFlow Authors. All Rights Reserved. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +==============================================================================*/ + +#include "signal/src/filter_bank.h" + +namespace tflite { +namespace tflm_signal { + +void FilterbankAccumulateChannels(const FilterbankConfig* config, + const uint32_t* input, uint64_t* output) { + // With a log mel filterbank, the energy at each frequency gets added to + // two adjacent filterbank filters/channels. + // For the first filter bank channel, its energy is first multiplied by + // some weight 'w', then gets accumulated. + // For the subsequent filter bank, its power is first multiplied by 1-'w' + // (called unweight here), then gets accumulated. + // For this reason, we need to calculate (config->num_channels + 1) output + // where element 0 is only used as scratch storage for the unweights of + // element 1 (channel 0). The caller should discard element 0. + // Writing the code like this doesn't save multiplications, but it lends + // itself better to optimization, because input[freq_start + j] only needs + // to be loaded once. + uint64_t weight_accumulator = 0; + uint64_t unweight_accumulator = 0; + for (int i = 0; i < config->num_channels + 1; i++) { + const int16_t freq_start = config->channel_frequency_starts[i]; + const int16_t weight_start = config->channel_weight_starts[i]; + for (int j = 0; j < config->channel_widths[i]; ++j) { + weight_accumulator += config->weights[weight_start + j] * + static_cast(input[freq_start + j]); + unweight_accumulator += config->unweights[weight_start + j] * + static_cast(input[freq_start + j]); + } + output[i] = weight_accumulator; + weight_accumulator = unweight_accumulator; + unweight_accumulator = 0; + } +} + +} // namespace tflm_signal +} // namespace tflite \ No newline at end of file diff --git a/signal/src/filter_bank.h b/signal/src/filter_bank.h new file mode 100644 index 00000000000..95b2168a7ca --- /dev/null +++ b/signal/src/filter_bank.h @@ -0,0 +1,69 @@ +/* Copyright 2019 The TensorFlow Authors. All Rights Reserved. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +==============================================================================*/ + +#ifndef SIGNAL_SRC_FILTER_BANK_H_ +#define SIGNAL_SRC_FILTER_BANK_H_ + +#include + +namespace tflite { +namespace tflm_signal { +// TODO(b/286250473): remove namespace once de-duped libraries above + +struct FilterbankConfig { + // Number of filterbank channels + int32_t num_channels; + + // Each of the following three arrays is of size num_channels + 1 + // An extra channel is needed for scratch. See implementation of + // FilterbankAccumulateChannels() for more details + + // For each channel, the index in the input (spectrum) where its band starts + const int16_t* channel_frequency_starts; + // For each channel, the index in the weights/unweights arrays where + // it filter weights start + const int16_t* channel_weight_starts; + // For each channel, the number of bins in the input (spectrum) that span + // its band + const int16_t* channel_widths; + + // The weights array holds the triangular filter weights of all the filters + // in the bank. The output of each filter in the bank is caluclated by + // multiplying the elements in the input spectrum that are in its band + // (see above: channel_frequency_starts, channel_widths) by the filter weights + // then accumulating. Each element in the unweights array holds the 1 minus + // corresponding elements in the weights array and is used to make this + // operation more efficient. For more details, see documnetation in + // FilterbankAccumulateChannels() + const int16_t* weights; + const int16_t* unweights; + int32_t output_scale; + + int32_t input_correction_bits; +}; + +// Accumulate the energy spectrum bins in `input` into filter bank channels +// contained in `output`. +// * `input` - Spectral energy array +// * `output` - of size `config.num_channels` + 1. +// Elements [1:num_channels] contain the filter bank channels. +// Element 0 is used as scratch and should be ignored +void FilterbankAccumulateChannels(const FilterbankConfig* config, + const uint32_t* input, uint64_t* output); + +} // namespace tflm_signal +} // namespace tflite + +#endif // SIGNAL_SRC_FILTER_BANK_H_ diff --git a/signal/src/filter_bank_log.cc b/signal/src/filter_bank_log.cc new file mode 100644 index 00000000000..c670a7c94e1 --- /dev/null +++ b/signal/src/filter_bank_log.cc @@ -0,0 +1,40 @@ +/* Copyright 2019 The TensorFlow Authors. All Rights Reserved. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +==============================================================================*/ + +#include "signal/src/filter_bank_log.h" + +#include "signal/src/log.h" + +namespace tflite { +namespace tflm_signal { + +void FilterbankLog(const uint32_t* input, int num_channels, + int32_t output_scale, uint32_t correction_bits, + int16_t* output) { + for (int i = 0; i < num_channels; ++i) { + const uint32_t scaled = input[i] << correction_bits; + if (scaled > 1) { + const uint32_t log_value = Log32(scaled, output_scale); + output[i] = ((log_value < static_cast(INT16_MAX)) + ? log_value + : static_cast(INT16_MAX)); + } else { + output[i] = 0; + } + } +} + +} // namespace tflm_signal +} // namespace tflite diff --git a/signal/src/filter_bank_log.h b/signal/src/filter_bank_log.h new file mode 100644 index 00000000000..e8514c7348d --- /dev/null +++ b/signal/src/filter_bank_log.h @@ -0,0 +1,38 @@ +/* Copyright 2019 The TensorFlow Authors. All Rights Reserved. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +==============================================================================*/ + +#ifndef SIGNAL_SRC_FILTER_BANK_LOG_H_ +#define SIGNAL_SRC_FILTER_BANK_LOG_H_ + +#include + +namespace tflite { +namespace tflm_signal { +// TODO(b/286250473): remove namespace once de-duped libraries above + +// Apply natural log to each element in array `input` of size `num_channels` +// with pre-shift and post scaling. +// The operation is roughly equivalent to: +// `output` = min(Log(`input` << `correction_bits`) * `output_scale`, INT16_MAX) +// Where: +// If (input << `correction_bits`) is 1 or 0, the function returns 0 +void FilterbankLog(const uint32_t* input, int num_channels, + int32_t output_scale, uint32_t correction_bits, + int16_t* output); + +} // namespace tflm_signal +} // namespace tflite + +#endif // SIGNAL_SRC_FILTER_BANK_LOG_H_ diff --git a/signal/src/filter_bank_spectral_subtraction.cc b/signal/src/filter_bank_spectral_subtraction.cc new file mode 100644 index 00000000000..bbad86b9b40 --- /dev/null +++ b/signal/src/filter_bank_spectral_subtraction.cc @@ -0,0 +1,64 @@ +/* Copyright 2019 The TensorFlow Authors. All Rights Reserved. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +==============================================================================*/ + +#include "signal/src/filter_bank_spectral_subtraction.h" + +namespace tflite { +namespace tflm_signal { + +void FilterbankSpectralSubtraction(const SpectralSubtractionConfig* config, + const uint32_t* input, uint32_t* output, + uint32_t* noise_estimate) { + const bool data_clamping = config->clamping; + const int smoothing_bits = config->smoothing_bits; + const int num_channels = config->num_channels; + + for (int i = 0; i < num_channels; ++i) { + uint32_t smoothing; + uint32_t one_minus_smoothing; + if ((i & 1) == 0) { + smoothing = config->smoothing; + one_minus_smoothing = config->one_minus_smoothing; + } else { // Use alternate smoothing coefficient on odd-index channels. + smoothing = config->alternate_smoothing; + one_minus_smoothing = config->alternate_one_minus_smoothing; + } + + // Scale up signal[i] for smoothing filter computation. + const uint32_t signal_scaled_up = input[i] << smoothing_bits; + noise_estimate[i] = + ((static_cast(signal_scaled_up) * smoothing) + + (static_cast(noise_estimate[i]) * one_minus_smoothing)) >> + config->spectral_subtraction_bits; + + uint32_t estimate_scaled_up = noise_estimate[i]; + // Make sure that we can't get a negative value for the signal - estimate. + if (estimate_scaled_up > signal_scaled_up) { + estimate_scaled_up = signal_scaled_up; + if (data_clamping) { + noise_estimate[i] = estimate_scaled_up; + } + } + const uint32_t floor = + (static_cast(input[i]) * config->min_signal_remaining) >> + config->spectral_subtraction_bits; + const uint32_t subtracted = + (signal_scaled_up - estimate_scaled_up) >> smoothing_bits; + output[i] = subtracted > floor ? subtracted : floor; + } +} + +} // namespace tflm_signal +} // namespace tflite diff --git a/signal/src/filter_bank_spectral_subtraction.h b/signal/src/filter_bank_spectral_subtraction.h new file mode 100644 index 00000000000..e862d773a14 --- /dev/null +++ b/signal/src/filter_bank_spectral_subtraction.h @@ -0,0 +1,73 @@ +/* Copyright 2019 The TensorFlow Authors. All Rights Reserved. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +==============================================================================*/ + +#ifndef SIGNAL_SRC_FILTER_BANK_SPECTRAL_SUBTRACTION_H_ +#define SIGNAL_SRC_FILTER_BANK_SPECTRAL_SUBTRACTION_H_ + +#include + +namespace tflite { +namespace tflm_signal { +// TODO(b/286250473): remove namespace once de-duped libraries above + +struct SpectralSubtractionConfig { + // Number of filterbank channels in input and output + int32_t num_channels; + // The constant used for the lowpass filter for finding the noise. + // Higher values correspond to more aggressively adapting estimates + // of the noise. + // Scale is 1 << spectral_subtraction_bits + uint32_t smoothing; + // One minus smoothing constant for low pass filter. + // Scale is 1 << spectral_subtraction_bits + uint32_t one_minus_smoothing; + // The maximum cap to subtract away from the signal (ie, if this is + // 0.2, then the result of spectral subtraction will not go below + // 0.2 * signal). + // Scale is 1 << spectral_subtraction_bits + uint32_t min_signal_remaining; + // If positive, specifies the filter coefficient for odd-index + // channels, while 'smoothing' is used as the coefficient for even- + // index channels. Otherwise, the same filter coefficient is + // used on all channels. + // Scale is 1 << spectral_subtraction_bits + uint32_t alternate_smoothing; + // Alternate One minus smoothing constant for low pass filter. + // Scale is 1 << spectral_subtraction_bits + uint32_t alternate_one_minus_smoothing; + // Extra fractional bits for the noise_estimate smoothing filter. + uint32_t smoothing_bits; + // Scaling bits for some members of this struct + uint32_t spectral_subtraction_bits; + // If true, when the filterbank level drops below the output, + // the noise estimate will be forced down to the new noise level. + // If false, the noise estimate will remain above the current + // filterbank output (but the subtraction will still keep the + // output non negative). + bool clamping; +}; + +// Apply spectral subtraction to each element in `input`, then write the result +// to `output` and `noise_estimate`. `input`, `output` and `noise estimate` +// must all be of size `config.num_channels`. `config` holds the +// parameters of the spectral subtraction algorithm. +void FilterbankSpectralSubtraction(const SpectralSubtractionConfig* config, + const uint32_t* input, uint32_t* output, + uint32_t* noise_estimate); + +} // namespace tflm_signal +} // namespace tflite + +#endif // SIGNAL_SRC_FILTER_BANK_SPECTRAL_SUBTRACTION_H_ diff --git a/tensorflow/lite/micro/examples/micro_speech/main.cc b/signal/src/filter_bank_square_root.cc similarity index 61% rename from tensorflow/lite/micro/examples/micro_speech/main.cc rename to signal/src/filter_bank_square_root.cc index f35c4726a27..25e8b23ca90 100644 --- a/tensorflow/lite/micro/examples/micro_speech/main.cc +++ b/signal/src/filter_bank_square_root.cc @@ -13,15 +13,19 @@ See the License for the specific language governing permissions and limitations under the License. ==============================================================================*/ -#include "tensorflow/lite/micro/examples/micro_speech/main_functions.h" - -// This is the default main used on systems that have the standard C entry -// point. Other devices (for example FreeRTOS or ESP32) that have different -// requirements for entry code (like an app_main function) should specialize -// this main.cc file in a target-specific subfolder. -int main(int argc, char* argv[]) { - setup(); - while (true) { - loop(); +#include "signal/src/filter_bank_square_root.h" + +#include "signal/src/square_root.h" + +namespace tflite { +namespace tflm_signal { + +void FilterbankSqrt(const uint64_t* input, int num_channels, + int scale_down_bits, uint32_t* output) { + for (int i = 0; i < num_channels; ++i) { + output[i] = Sqrt64(input[i]) >> scale_down_bits; } } + +} // namespace tflm_signal +} // namespace tflite diff --git a/signal/src/filter_bank_square_root.h b/signal/src/filter_bank_square_root.h new file mode 100644 index 00000000000..7d484b9f0c9 --- /dev/null +++ b/signal/src/filter_bank_square_root.h @@ -0,0 +1,34 @@ +/* Copyright 2019 The TensorFlow Authors. All Rights Reserved. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +==============================================================================*/ + +#ifndef SIGNAL_SRC_FILTER_BANK_SQUARE_ROOT_H_ +#define SIGNAL_SRC_FILTER_BANK_SQUARE_ROOT_H_ + +#include + +namespace tflite { +namespace tflm_signal { +// TODO(b/286250473): remove namespace once de-duped libraries above + +// Apply square root to each element in `input`, then shift right by +// `scale_down_bits` before writing the result to `output`, +// `input` and `output` must both be of size `num_channels` +void FilterbankSqrt(const uint64_t* input, int num_channels, + int scale_down_bits, uint32_t* output); + +} // namespace tflm_signal +} // namespace tflite + +#endif // SIGNAL_SRC_FILTER_BANK_SQUARE_ROOT_H_ diff --git a/signal/src/irfft.h b/signal/src/irfft.h new file mode 100644 index 00000000000..c2b54d789e4 --- /dev/null +++ b/signal/src/irfft.h @@ -0,0 +1,84 @@ +/* Copyright 2023 The TensorFlow Authors. All Rights Reserved. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +==============================================================================*/ + +#ifndef SIGNAL_SRC_IRFFT_H_ +#define SIGNAL_SRC_IRFFT_H_ + +#include +#include + +#include "signal/src/complex.h" + +// TODO(b/286250473): remove namespace once de-duped libraries +namespace tflite { +namespace tflm_signal { + +// IRFFT (Inverse Real Fast Fourier Transform) +// IFFT for real valued time domain outputs. + +// 16-bit Integer input/output + +// Returns the size of the memory that an IRFFT of `fft_length` needs +size_t IrfftInt16GetNeededMemory(int32_t fft_length); + +// Initialize the state of an IRFFT of `fft_length` +// `state` points to an opaque state of size `state_size`, which +// must be greater or equal to the value returned by +// IrfftGetNeededMemory(fft_length). Fails if it isn't. +void* IrfftInt16Init(int32_t fft_length, void* state, size_t state_size); + +// Applies IRFFT to `input` and writes the result to `output` +// * `input` must be of size `fft_length` elements (see IRfftInit) +// * `output` must be of size output +void IrfftInt16Apply(void* state, const Complex* input, + int16_t* output); + +// 32-bit Integer input/output + +// Returns the size of the memory that an IRFFT of `fft_length` needs +size_t IrfftInt32GetNeededMemory(int32_t fft_length); + +// Initialize the state of an IRFFT of `fft_length` +// `state` points to an opaque state of size `state_size`, which +// must be greater or equal to the value returned by +// IrfftGetNeededMemory(fft_length). Fails if it isn't. +void* IrfftInt32Init(int32_t fft_length, void* state, size_t state_size); + +// Applies IRFFT to `input` and writes the result to `output` +// * `input` must be of size `fft_length` elements (see IRfftInit) +// * `output` must be of size output +void IrfftInt32Apply(void* state, const Complex* input, + int32_t* output); + +// Floating point input/output + +// Returns the size of the memory that an IRFFT of `fft_length` needs +size_t IrfftFloatGetNeededMemory(int32_t fft_length); + +// Initialize the state of an IRFFT of `fft_length` +// `state` points to an opaque state of size `state_size`, which +// must be greater or equal to the value returned by +// IrfftGetNeededMemory(fft_length). Fails if it isn't. +void* IrfftFloatInit(int32_t fft_length, void* state, size_t state_size); + +// Applies IRFFT to `input` and writes the result to `output` +// * `input` must be of size `fft_length` elements (see IRfftInit) +// * `output` must be of size output +void IrfftFloatApply(void* state, const Complex* input, float* output); + +} // namespace tflm_signal +} // namespace tflite + +#endif // SIGNAL_SRC_IRFFT_H_ \ No newline at end of file diff --git a/signal/src/irfft_float.cc b/signal/src/irfft_float.cc new file mode 100644 index 00000000000..53ce0fe4eb0 --- /dev/null +++ b/signal/src/irfft_float.cc @@ -0,0 +1,64 @@ +/* Copyright 2023 The TensorFlow Authors. All Rights Reserved. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +==============================================================================*/ + +#include +#include + +#include "signal/src/complex.h" +#include "signal/src/irfft.h" +#include "signal/src/kiss_fft_wrappers/kiss_fft_float.h" + +// TODO(b/286250473): remove namespace once de-duped libraries +namespace tflite { +namespace tflm_signal { + +struct IrfftFloatState { + int32_t fft_length; + kiss_fft_float::kiss_fftr_cfg cfg; +}; + +size_t IrfftFloatGetNeededMemory(int32_t fft_length) { + size_t cfg_size = 0; + kiss_fft_float::kiss_fftr_alloc(fft_length, 1, nullptr, &cfg_size); + return sizeof(IrfftFloatState) + cfg_size; +} + +void* IrfftFloatInit(int32_t fft_length, void* state, size_t state_size) { + IrfftFloatState* irfft_float_state = static_cast(state); + irfft_float_state->cfg = + reinterpret_cast(irfft_float_state + 1); + irfft_float_state->fft_length = fft_length; + size_t cfg_size = state_size - sizeof(IrfftFloatState); + return kiss_fft_float::kiss_fftr_alloc(fft_length, 1, irfft_float_state->cfg, + &cfg_size); +} + +void IrfftFloatApply(void* state, const Complex* input, float* output) { + IrfftFloatState* irfft_float_state = static_cast(state); + kiss_fft_float::kiss_fftri( + static_cast(irfft_float_state->cfg), + reinterpret_cast(input), + reinterpret_cast(output)); + // KissFFT scales the IRFFT output by the FFT length. + // KissFFT's nfft is the complex FFT length, which is half the real FFT's + // length. Compensate. + const int fft_length = irfft_float_state->fft_length; + for (int i = 0; i < fft_length; i++) { + output[i] /= fft_length; + } +} + +} // namespace tflm_signal +} // namespace tflite diff --git a/signal/src/irfft_int16.cc b/signal/src/irfft_int16.cc new file mode 100644 index 00000000000..f92b3c5ff46 --- /dev/null +++ b/signal/src/irfft_int16.cc @@ -0,0 +1,46 @@ +/* Copyright 2023 The TensorFlow Authors. All Rights Reserved. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +==============================================================================*/ + +#include +#include + +#include "signal/src/complex.h" +#include "signal/src/irfft.h" +#include "signal/src/kiss_fft_wrappers/kiss_fft_int16.h" + +// TODO(b/286250473): remove namespace once de-duped libraries +namespace tflite { +namespace tflm_signal { + +size_t IrfftInt16GetNeededMemory(int32_t fft_length) { + size_t state_size = 0; + kiss_fft_fixed16::kiss_fftr_alloc(fft_length, 1, nullptr, &state_size); + return state_size; +} + +void* IrfftInt16Init(int32_t fft_length, void* state, size_t state_size) { + return kiss_fft_fixed16::kiss_fftr_alloc(fft_length, 1, state, &state_size); +} + +void IrfftInt16Apply(void* state, const Complex* input, + int16_t* output) { + kiss_fft_fixed16::kiss_fftri( + static_cast(state), + reinterpret_cast(input), + reinterpret_cast(output)); +} + +} // namespace tflm_signal +} // namespace tflite diff --git a/signal/src/irfft_int32.cc b/signal/src/irfft_int32.cc new file mode 100644 index 00000000000..d4802a6358f --- /dev/null +++ b/signal/src/irfft_int32.cc @@ -0,0 +1,46 @@ +/* Copyright 2023 The TensorFlow Authors. All Rights Reserved. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +==============================================================================*/ + +#include +#include + +#include "signal/src/complex.h" +#include "signal/src/irfft.h" +#include "signal/src/kiss_fft_wrappers/kiss_fft_int32.h" + +// TODO(b/286250473): remove namespace once de-duped libraries +namespace tflite { +namespace tflm_signal { + +size_t IrfftInt32GetNeededMemory(int32_t fft_length) { + size_t state_size = 0; + kiss_fft_fixed32::kiss_fftr_alloc(fft_length, 1, nullptr, &state_size); + return state_size; +} + +void* IrfftInt32Init(int32_t fft_length, void* state, size_t state_size) { + return kiss_fft_fixed32::kiss_fftr_alloc(fft_length, 1, state, &state_size); +} + +void IrfftInt32Apply(void* state, const Complex* input, + int32_t* output) { + kiss_fft_fixed32::kiss_fftri( + static_cast(state), + reinterpret_cast(input), + reinterpret_cast(output)); +} + +} // namespace tflm_signal +} // namespace tflite diff --git a/signal/src/log.cc b/signal/src/log.cc new file mode 100644 index 00000000000..86f8e72b59d --- /dev/null +++ b/signal/src/log.cc @@ -0,0 +1,84 @@ +/* Copyright 2019 The TensorFlow Authors. All Rights Reserved. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +==============================================================================*/ + +#include "signal/src/log.h" + +#include "signal/src/msb.h" + +namespace tflite { +namespace tflm_signal { +namespace { + +const uint16_t kLogLut[] = { + 0, 224, 442, 654, 861, 1063, 1259, 1450, 1636, 1817, 1992, 2163, + 2329, 2490, 2646, 2797, 2944, 3087, 3224, 3358, 3487, 3611, 3732, 3848, + 3960, 4068, 4172, 4272, 4368, 4460, 4549, 4633, 4714, 4791, 4864, 4934, + 5001, 5063, 5123, 5178, 5231, 5280, 5326, 5368, 5408, 5444, 5477, 5507, + 5533, 5557, 5578, 5595, 5610, 5622, 5631, 5637, 5640, 5641, 5638, 5633, + 5626, 5615, 5602, 5586, 5568, 5547, 5524, 5498, 5470, 5439, 5406, 5370, + 5332, 5291, 5249, 5203, 5156, 5106, 5054, 5000, 4944, 4885, 4825, 4762, + 4697, 4630, 4561, 4490, 4416, 4341, 4264, 4184, 4103, 4020, 3935, 3848, + 3759, 3668, 3575, 3481, 3384, 3286, 3186, 3084, 2981, 2875, 2768, 2659, + 2549, 2437, 2323, 2207, 2090, 1971, 1851, 1729, 1605, 1480, 1353, 1224, + 1094, 963, 830, 695, 559, 421, 282, 142, 0, 0}; + +// Number of segments in the log lookup table. The table will be kLogSegments+1 +// in length (with some padding). +// constexpr int kLogSegments = 128; +constexpr int kLogSegmentsLog2 = 7; + +// Scale used by lookup table. +constexpr int kLogScale = 65536; +constexpr int kLogScaleLog2 = 16; +constexpr int kLogCoeff = 45426; + +uint32_t Log2FractionPart32(uint32_t x, uint32_t log2x) { + // Part 1 + int32_t frac = x - (1LL << log2x); + if (log2x < kLogScaleLog2) { + frac <<= kLogScaleLog2 - log2x; + } else { + frac >>= log2x - kLogScaleLog2; + } + // Part 2 + const uint32_t base_seg = frac >> (kLogScaleLog2 - kLogSegmentsLog2); + const uint32_t seg_unit = (UINT32_C(1) << kLogScaleLog2) >> kLogSegmentsLog2; + + // ASSERT(base_seg < kLogSegments); + const int32_t c0 = kLogLut[base_seg]; + const int32_t c1 = kLogLut[base_seg + 1]; + const int32_t seg_base = seg_unit * base_seg; + const int32_t rel_pos = ((c1 - c0) * (frac - seg_base)) >> kLogScaleLog2; + return frac + c0 + rel_pos; +} + +} // namespace + +// Calculate integer logarithm, 32 Bit version +uint32_t Log32(uint32_t x, uint32_t out_scale) { + // ASSERT(x != 0); + const uint32_t integer = MostSignificantBit32(x) - 1; + const uint32_t fraction = Log2FractionPart32(x, integer); + const uint32_t log2 = (integer << kLogScaleLog2) + fraction; + const uint32_t round = kLogScale / 2; + const uint32_t loge = + ((static_cast(kLogCoeff)) * log2 + round) >> kLogScaleLog2; + // Finally scale to our output scale + const uint32_t loge_scaled = (out_scale * loge + round) >> kLogScaleLog2; + return loge_scaled; +} + +} // namespace tflm_signal +} // namespace tflite diff --git a/tensorflow/lite/micro/examples/micro_speech/command_responder_test.cc b/signal/src/log.h similarity index 59% rename from tensorflow/lite/micro/examples/micro_speech/command_responder_test.cc rename to signal/src/log.h index e02f7ae2f69..13045f46107 100644 --- a/tensorflow/lite/micro/examples/micro_speech/command_responder_test.cc +++ b/signal/src/log.h @@ -13,17 +13,18 @@ See the License for the specific language governing permissions and limitations under the License. ==============================================================================*/ -#include "tensorflow/lite/micro/examples/micro_speech/command_responder.h" +#ifndef SIGNAL_SRC_LOG_H_ +#define SIGNAL_SRC_LOG_H_ -#include "tensorflow/lite/micro/testing/micro_test.h" +#include -TF_LITE_MICRO_TESTS_BEGIN +namespace tflite { +namespace tflm_signal { +// TODO(b/286250473): remove namespace once de-duped libraries above -TF_LITE_MICRO_TEST(TestCallability) { - // This will have external side-effects (like printing to the debug console - // or lighting an LED) that are hard to observe, so the most we can do is - // make sure the call doesn't crash. - RespondToCommand(0, "foo", 0, true); -} +// Natural logarithm of an integer. The result is multiplied by out_scale +uint32_t Log32(uint32_t x, uint32_t out_scale); -TF_LITE_MICRO_TESTS_END +} // namespace tflm_signal +} // namespace tflite +#endif // SIGNAL_SRC_LOG_H_ diff --git a/signal/src/max_abs.cc b/signal/src/max_abs.cc new file mode 100644 index 00000000000..0ad117a5006 --- /dev/null +++ b/signal/src/max_abs.cc @@ -0,0 +1,84 @@ +/* Copyright 2023 The TensorFlow Authors. All Rights Reserved. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +==============================================================================*/ + +#include "signal/src/max_abs.h" + +#if defined(XTENSA) +#include +#include +#endif +#if XCHAL_HAVE_HIFI3 +#include +static inline ae_p24x2s MaxAbs16Single(ae_p24x2s max, ae_p24x2s current) { + return AE_MAXABSSP24S(max, current); +} +#elif XCHAL_HAVE_HIFI_MINI || XCHAL_HAVE_HIFI2 || XCHAL_HAVE_HIFI_EP +#include +static inline ae_p24x2s MaxAbs16Single(ae_p24x2s max, ae_p24x2s current) { + current = AE_ABSSP24S(current); + return AE_MAXP24S(max, current); +} +#endif + +// TODO(b/286250473): remove namespace once de-duped libraries +namespace tflite { +namespace tflm_signal { + +#if XCHAL_HAVE_HIFI_MINI || XCHAL_HAVE_HIFI2 || XCHAL_HAVE_HIFI_EP || \ + XCHAL_HAVE_HIFI3 +int16_t XtensaMaxAbs16(const int16_t* input, int size) { + int i; + ae_p24x2s current_24x2; + // AE_LP16X2F_IU() effectively pre-increments the address in input_16x2 by 4 + // bytes before loading, so we need to initialize it accordingly. + const ae_p16x2s* input_16x2 = (const ae_p16x2s*)(input - 2); + ae_p24x2s max = AE_ZEROP48(); + const int num_iterations = size / 2; + for (i = 0; i < num_iterations; i++) { + // Advancing the pointer by 2 X 16-bits. + AE_LP16X2F_IU(current_24x2, input_16x2, 4); + max = MaxAbs16Single(max, current_24x2); + } + if (size & 1) { // n is odd + // Advancing the pointer by 2 X 16-bits. + current_24x2 = AE_LP16F_I((ae_p16s*)input_16x2, 4); + max = MaxAbs16Single(max, current_24x2); + } + const int max_L = AE_TRUNCA16P24S_L(max); + const int max_H = AE_TRUNCA16P24S_H(max); + return (max_L >= max_H) ? max_L : max_H; +} +#endif + +int16_t MaxAbs16(const int16_t* input, int size) { +#if XCHAL_HAVE_HIFI_MINI || XCHAL_HAVE_HIFI2 || XCHAL_HAVE_HIFI_EP || \ + XCHAL_HAVE_HIFI3 + return XtensaMaxAbs16(input, size); +#else + int16_t max = 0; + for (int i = 0; i < size; i++) { + const int16_t value = input[i]; + if (value > max) { + max = value; + } else if (-value > max) { + max = -value; + } + } + return max; +#endif +} + +} // namespace tflm_signal +} // namespace tflite diff --git a/signal/src/max_abs.h b/signal/src/max_abs.h new file mode 100644 index 00000000000..538f796588b --- /dev/null +++ b/signal/src/max_abs.h @@ -0,0 +1,31 @@ +/* Copyright 2023 The TensorFlow Authors. All Rights Reserved. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +==============================================================================*/ + +#ifndef SIGNAL_SRC_MAX_ABS_H_ +#define SIGNAL_SRC_MAX_ABS_H_ + +#include + +// TODO(b/286250473): remove namespace once de-duped libraries +namespace tflite { +namespace tflm_signal { + +// Returns the maximum absolute value of the `size` elements in `input` +int16_t MaxAbs16(const int16_t* input, int size); + +} // namespace tflm_signal +} // namespace tflite + +#endif // SIGNAL_SRC_MAX_ABS_H_ diff --git a/tensorflow/lite/micro/examples/micro_speech/micro_features/no_micro_features_data.h b/signal/src/msb.h similarity index 59% rename from tensorflow/lite/micro/examples/micro_speech/micro_features/no_micro_features_data.h rename to signal/src/msb.h index 8c1b6d5b57b..2bdcb47b843 100644 --- a/tensorflow/lite/micro/examples/micro_speech/micro_features/no_micro_features_data.h +++ b/signal/src/msb.h @@ -13,11 +13,20 @@ See the License for the specific language governing permissions and limitations under the License. ==============================================================================*/ -#ifndef TENSORFLOW_LITE_MICRO_EXAMPLES_MICRO_SPEECH_MICRO_FEATURES_NO_MICRO_FEATURES_DATA_H_ -#define TENSORFLOW_LITE_MICRO_EXAMPLES_MICRO_SPEECH_MICRO_FEATURES_NO_MICRO_FEATURES_DATA_H_ +#ifndef SIGNAL_SRC_MSB_H_ +#define SIGNAL_SRC_MSB_H_ -extern const int g_no_micro_f9643d42_nohash_4_width; -extern const int g_no_micro_f9643d42_nohash_4_height; -extern const signed char g_no_micro_f9643d42_nohash_4_data[]; +#include -#endif // TENSORFLOW_LITE_MICRO_EXAMPLES_MICRO_SPEECH_MICRO_FEATURES_NO_MICRO_FEATURES_DATA_H_ +namespace tflite { +namespace tflm_signal { +// TODO(b/286250473): remove namespace once de-duped libraries above + +// Index of the most significant bit +uint32_t MostSignificantBit32(uint32_t x); +uint32_t MostSignificantBit64(uint64_t x); + +} // namespace tflm_signal +} // namespace tflite + +#endif // SIGNAL_SRC_MSB_H_ diff --git a/signal/src/msb_32.cc b/signal/src/msb_32.cc new file mode 100644 index 00000000000..67f26642bef --- /dev/null +++ b/signal/src/msb_32.cc @@ -0,0 +1,48 @@ +/* Copyright 2019 The TensorFlow Authors. All Rights Reserved. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +==============================================================================*/ + +#include "signal/src/msb.h" + +#if defined(XTENSA) +#include +#endif + +namespace tflite { +namespace tflm_signal { +// TODO(b/286250473): remove namespace once de-duped libraries above + +// TODO(b/291167350): can allow __builtin_clz to be used in more cases here +uint32_t MostSignificantBit32(uint32_t x) { +#if defined(XTENSA) + // XT_NSAU returns the number of left shifts needed to put the MSB in the + // leftmost position. Returns 32 if the argument is 0. + return 32 - XT_NSAU(x); +#elif defined(__GNUC__) + if (x) { + return 32 - __builtin_clz(x); + } + return 32; +#else + uint32_t temp = 0; + while (x) { + x = x >> 1; + ++temp; + } + return temp; +#endif +} + +} // namespace tflm_signal +} // namespace tflite diff --git a/signal/src/msb_64.cc b/signal/src/msb_64.cc new file mode 100644 index 00000000000..7416438cab4 --- /dev/null +++ b/signal/src/msb_64.cc @@ -0,0 +1,52 @@ +/* Copyright 2019 The TensorFlow Authors. All Rights Reserved. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +==============================================================================*/ + +#include "signal/src/msb.h" + +#if defined(XTENSA) +#include +#endif + +namespace tflite { +namespace tflm_signal { +// TODO(b/286250473): remove namespace once de-duped libraries above + +uint32_t MostSignificantBit64(uint64_t x) { +#if defined(XTENSA) + // XT_NSAU returns the number of left shifts needed to put the MSB in the + // leftmost position. Returns 32 if the argument is 0. + uint32_t upper = 64 - XT_NSAU((uint32_t)(x >> 32)); + if (upper != 32) { + return upper; + } + // Only if the upper bits are all clear do we want to look at the lower bits. + return 32 - XT_NSAU((uint32_t)x); +#elif defined(__GNUC__) + if (x) { + return 64 - __builtin_clzll(x); + } + return 64; +#else + uint32_t temp = 0; + while (x) { + x = x >> 1; + ++temp; + } + return temp; +#endif +} + +} // namespace tflm_signal +} // namespace tflite diff --git a/signal/src/overlap_add.cc b/signal/src/overlap_add.cc new file mode 100644 index 00000000000..fe6e6646ab8 --- /dev/null +++ b/signal/src/overlap_add.cc @@ -0,0 +1,51 @@ +/* Copyright 2021 The TensorFlow Authors. All Rights Reserved. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +==============================================================================*/ +#include "signal/src/overlap_add.h" + +#include +#include + +namespace tflm_signal { + +void OverlapAdd(const int16_t* input, int16_t* buffer, int input_size, + int16_t* output, int output_size) { + for (int i = 0; i < input_size; ++i) { + int32_t overlap_added_sample = input[i] + buffer[i]; + if (overlap_added_sample < INT16_MIN) { + buffer[i] = INT16_MIN; + } else if (overlap_added_sample > INT16_MAX) { + buffer[i] = INT16_MAX; + } else { + buffer[i] = (int16_t)overlap_added_sample; + } + } + memcpy(output, buffer, output_size * sizeof(output[0])); + memmove(buffer, &buffer[output_size], + (input_size - output_size) * sizeof(buffer[0])); + memset(&buffer[input_size - output_size], 0, output_size * sizeof(buffer[0])); +} + +void OverlapAdd(const float* input, float* buffer, int input_size, + float* output, int output_size) { + for (int i = 0; i < input_size; ++i) { + buffer[i] += input[i]; + } + memcpy(output, buffer, output_size * sizeof(output[0])); + memmove(buffer, &buffer[output_size], + (input_size - output_size) * sizeof(buffer[0])); + memset(&buffer[input_size - output_size], 0, output_size * sizeof(buffer[0])); +} + +} // namespace tflm_signal diff --git a/signal/src/overlap_add.h b/signal/src/overlap_add.h new file mode 100644 index 00000000000..61898094c99 --- /dev/null +++ b/signal/src/overlap_add.h @@ -0,0 +1,46 @@ +/* Copyright 2021 The TensorFlow Authors. All Rights Reserved. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +==============================================================================*/ +#ifndef SIGNAL_SRC_OVERLAP_ADD_H_ +#define SIGNAL_SRC_OVERLAP_ADD_H_ + +#include +#include + +namespace tflm_signal { +// Adds (with saturation) the contents of `input` to the contents of `buffer`, +// both of size `input_size`, then copies the first `output_size` elements of +// `buffer` to `output`, shifts the last `input_size`-`output_size` elements of +// `buffer` to the beginning of `buffer` and fills the trailing `output_size` +// samples in `buffer` with zeros. +// input: {input[0] ... input[input_size-1]} +// buffer: {buffer[0] ... buffer[input_size-1]} +// After invocation: +// output: {saturate(input[0] + buffer[0]), +// ... , +// saturate(input[output_size-1] + buffer[output_size-1])} +// buffer: {saturate(input[output_size] + buffer[output_size]), +// ... +// saturate( input[input_size-output_size-1] +// + buffer[input_size-output_size-1]), +// zeros(output_size)} +void OverlapAdd(const int16_t* input, int16_t* buffer, int input_size, + int16_t* output, int output_size); + +// The same as the int16_t variant above, but without saturation +void OverlapAdd(const float* input, float* buffer, int input_size, + float* output, int output_size); + +} // namespace tflm_signal +#endif // SIGNAL_SRC_OVERLAP_ADD_H_ diff --git a/signal/src/pcan_argc_fixed.cc b/signal/src/pcan_argc_fixed.cc new file mode 100644 index 00000000000..2700b28cd49 --- /dev/null +++ b/signal/src/pcan_argc_fixed.cc @@ -0,0 +1,75 @@ +/* Copyright 2023 The TensorFlow Authors. All Rights Reserved. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +==============================================================================*/ + +#include "pcan_argc_fixed.h" + +namespace tflite { +namespace tflm_signal { + +int16_t WideDynamicFunction(const uint32_t x, const int16_t* lut) { + if (x <= 2) { + return lut[x]; + } + + const int16_t interval = MostSignificantBit32(x); + lut += 4 * interval - 6; + + const int16_t frac = + ((interval < 11) ? (x << (11 - interval)) : (x >> (interval - 11))) & + 0x3FF; + + int32_t result = ((int32_t)lut[2] * frac) >> 5; + result += (int32_t)((uint32_t)lut[1] << 5); + result *= frac; + result = (result + (1 << 14)) >> 15; + result += lut[0]; + return (int16_t)result; +} + +// Evaluate the piecewise polynomial "shrink" function defined by +// shrink(x) = x^2 / 4 for x < 2, +// shrink(x) = x - 1 for x >= 2. +// The input x has kPcanSnrBits fractional bits, and the output has +// kPcanOutputBits fractional bits. +uint32_t PcanShrink(const uint32_t x) { + TFLITE_DCHECK(kPcanSnrBits >= kPcanOutputBits); + if (x < (2 << kPcanSnrBits)) { + // Compute x^2 / 4. + return (x * x) >> (2 + 2 * kPcanSnrBits - kPcanOutputBits); + } else { + // Compute x - 1. + return (x >> (kPcanSnrBits - kPcanOutputBits)) - (1 << kPcanOutputBits); + } +} + +void ApplyPcanAutoGainControlFixed(const int16_t* gain_lut, int32_t snr_shift, + const uint32_t* noise_estimate, + uint32_t* filterbank_output, + int num_channels) { + int i; + for (i = 0; i < num_channels; ++i) { + // The gain has gain_bits fractional bits, and filterbank_output[i] has + // -input_correction_bits fractional bits. The product is shifted so that + // the resulting snr has kPcanSnrBits fractional bits. + const uint32_t gain = WideDynamicFunction(noise_estimate[i], gain_lut); + const uint32_t snr = ((uint64_t)filterbank_output[i] * gain) >> snr_shift; + // Result has kPcanOutputBits fractional bits. + // NOTE: This assumes filterbank_output_scale = 1 << kPcanOutputBits. + filterbank_output[i] = PcanShrink(snr); + } +} + +} // namespace tflm_signal +} // namespace tflite diff --git a/signal/src/pcan_argc_fixed.h b/signal/src/pcan_argc_fixed.h new file mode 100644 index 00000000000..36eaf3d4bc7 --- /dev/null +++ b/signal/src/pcan_argc_fixed.h @@ -0,0 +1,41 @@ +/* Copyright 2023 The TensorFlow Authors. All Rights Reserved. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +==============================================================================*/ + +#ifndef SIGNAL_MICRO_KERNELS__SRC_PCAN_AGC_FIXED_H +#define SIGNAL_MICRO_KERNELS__SRC_PCAN_AGC_FIXED_H +#include + +#include "msb.h" +#include "tensorflow/lite/kernels/internal/compatibility.h" + +namespace tflite { +namespace tflm_signal { + +#define kPcanSnrBits 12 +#define kPcanOutputBits 6 + +int16_t WideDynamicFunction(const uint32_t x, const int16_t* lut); + +uint32_t PcanShrink(const uint32_t x); + +void ApplyPcanAutoGainControlFixed(const int16_t* gain_lut, int32_t snr_shift, + const uint32_t* noise_estimate, + uint32_t* filterbank_output, + int num_channels); + +} // namespace tflm_signal +} // namespace tflite + +#endif // SIGNAL_MICRO_KERNELS__PCAN_AGC_FIXED_H diff --git a/tensorflow/lite/micro/examples/micro_speech/micro_features/no_feature_data_slice.cc b/signal/src/square_root.h similarity index 59% rename from tensorflow/lite/micro/examples/micro_speech/micro_features/no_feature_data_slice.cc rename to signal/src/square_root.h index 684f702fd4f..b32855c554d 100644 --- a/tensorflow/lite/micro/examples/micro_speech/micro_features/no_feature_data_slice.cc +++ b/signal/src/square_root.h @@ -13,13 +13,20 @@ See the License for the specific language governing permissions and limitations under the License. ==============================================================================*/ -// See the header for documentation on the meaning of this data. +#ifndef SIGNAL_SRC_SQUARE_ROOT_H_ +#define SIGNAL_SRC_SQUARE_ROOT_H_ -#include "tensorflow/lite/micro/examples/micro_speech/micro_features/no_feature_data_slice.h" +#include -alignas(16) const int8_t - g_no_feature_data_slice[g_no_feature_data_slice_size] = { - 89, 68, 96, 83, 111, 96, 115, 87, 99, 76, 105, 84, 105, 86, - 113, 91, 108, 87, 110, 78, 80, 46, 22, 74, 88, 72, 103, 86, - 80, 68, 48, 24, 68, 48, 55, 36, 108, 90, 90, 63, -}; +namespace tflite { +namespace tflm_signal { +// TODO(b/286250473): remove namespace once de-duped libraries above + +// Square root +uint16_t Sqrt32(uint32_t num); +uint32_t Sqrt64(uint64_t num); + +} // namespace tflm_signal +} // namespace tflite + +#endif // SIGNAL_SRC_SQUARE_ROOT_H_ diff --git a/tensorflow/lite/micro/examples/micro_speech/command_responder.cc b/signal/src/square_root_32.cc similarity index 50% rename from tensorflow/lite/micro/examples/micro_speech/command_responder.cc rename to signal/src/square_root_32.cc index 2184478d3f6..3ca11ef6b2f 100644 --- a/tensorflow/lite/micro/examples/micro_speech/command_responder.cc +++ b/signal/src/square_root_32.cc @@ -13,16 +13,34 @@ See the License for the specific language governing permissions and limitations under the License. ==============================================================================*/ -#include "tensorflow/lite/micro/examples/micro_speech/command_responder.h" +#include "signal/src/msb.h" +#include "signal/src/square_root.h" -#include "tensorflow/lite/micro/micro_log.h" +namespace tflite { +namespace tflm_signal { -// The default implementation writes out the name of the recognized command -// to the error console. Real applications will want to take some custom -// action instead, and should implement their own versions of this function. -void RespondToCommand(int32_t current_time, const char* found_command, - uint8_t score, bool is_new_command) { - if (is_new_command) { - MicroPrintf("Heard %s (%d) @%dms", found_command, score, current_time); +uint16_t Sqrt32(uint32_t num) { + if (num == 0) { + return 0; + }; + uint32_t res = 0; + int max_bit_number = 32 - MostSignificantBit32(num); + max_bit_number |= 1; + uint32_t bit = 1u << (31 - max_bit_number); + int iterations = (31 - max_bit_number) / 2 + 1; + while (iterations--) { + if (num >= res + bit) { + num -= res + bit; + res = (res >> 1U) + bit; + } else { + res >>= 1U; + } + bit >>= 2U; } + // Do rounding - if we have the bits. + if (num > res && res != 0xFFFF) ++res; + return res; } + +} // namespace tflm_signal +} // namespace tflite diff --git a/signal/src/square_root_64.cc b/signal/src/square_root_64.cc new file mode 100644 index 00000000000..54e8439440a --- /dev/null +++ b/signal/src/square_root_64.cc @@ -0,0 +1,49 @@ +/* Copyright 2019 The TensorFlow Authors. All Rights Reserved. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +==============================================================================*/ + +#include "signal/src/msb.h" +#include "signal/src/square_root.h" + +namespace tflite { +namespace tflm_signal { + +uint32_t Sqrt64(uint64_t num) { + // Take a shortcut and just use 32 bit operations if the upper word is all + // clear. This will cause a slight off by one issue for numbers close to 2^32, + // but it probably isn't going to matter (and gives us a big performance win). + if ((num >> 32) == 0) { + return Sqrt32(static_cast(num)); + } + uint64_t res = 0; + int max_bit_number = 64 - MostSignificantBit64(num); + max_bit_number |= 1; + uint64_t bit = UINT64_C(1) << (63 - max_bit_number); + int iterations = (63 - max_bit_number) / 2 + 1; + while (iterations--) { + if (num >= res + bit) { + num -= res + bit; + res = (res >> 1U) + bit; + } else { + res >>= 1U; + } + bit >>= 2U; + } + // Do rounding - if we have the bits. + if (num > res && res != 0xFFFFFFFFLL) ++res; + return res; +} + +} // namespace tflm_signal +} // namespace tflite diff --git a/signal/tensorflow_core/kernels/BUILD b/signal/tensorflow_core/kernels/BUILD index f15daaa11f9..33f5d08dbf4 100644 --- a/signal/tensorflow_core/kernels/BUILD +++ b/signal/tensorflow_core/kernels/BUILD @@ -5,15 +5,84 @@ package( licenses = ["notice"], ) +tflm_signal_kernel_library( + name = "delay_kernel", + srcs = ["delay_kernel.cc"], + deps = [ + "//signal/src:circular_buffer", + "@tensorflow_cc_deps//:cc_library", + ], +) + +tflm_signal_kernel_library( + name = "energy_kernel", + srcs = ["energy_kernel.cc"], + deps = [ + "//signal/src:complex", + "//signal/src:energy", + "@tensorflow_cc_deps//:cc_library", + ], +) + tflm_signal_kernel_library( name = "fft_kernel", srcs = ["fft_kernels.cc"], deps = [ + "//signal/src:fft_auto_scale", + "//signal/src:irfft", "//signal/src:rfft", "@tensorflow_cc_deps//:cc_library", ], ) +tflm_signal_kernel_library( + name = "filter_bank_kernels", + srcs = ["filter_bank_kernels.cc"], + deps = [ + "//signal/src:filter_bank", + "//signal/src:filter_bank_log", + "//signal/src:filter_bank_spectral_subtraction", + "//signal/src:filter_bank_square_root", + "@tensorflow_cc_deps//:cc_library", + ], +) + +tflm_signal_kernel_library( + name = "framer_kernel", + srcs = ["framer_kernel.cc"], + deps = [ + "//signal/src:circular_buffer", + "@tensorflow_cc_deps//:cc_library", + ], +) + +tflm_signal_kernel_library( + name = "overlap_add_kernel", + srcs = ["overlap_add_kernel.cc"], + deps = [ + "//signal/src:overlap_add", + "@tensorflow_cc_deps//:cc_library", + ], +) + +tflm_signal_kernel_library( + name = "pcan_kernel", + srcs = ["pcan_kernel.cc"], + deps = [ + "//signal/src:pcan_argc_fixed", + "@tensorflow_cc_deps//:cc_library", + ], +) + +tflm_signal_kernel_library( + name = "stacker_kernel", + srcs = ["stacker_kernel.cc"], + deps = [ + "//signal/src:circular_buffer", + "@tensorflow_cc_deps//:cc_library", + ], +) + tflm_signal_kernel_library( name = "window_kernel", srcs = ["window_kernel.cc"], diff --git a/signal/tensorflow_core/kernels/delay_kernel.cc b/signal/tensorflow_core/kernels/delay_kernel.cc new file mode 100644 index 00000000000..8c5c505e8e0 --- /dev/null +++ b/signal/tensorflow_core/kernels/delay_kernel.cc @@ -0,0 +1,94 @@ +/* Copyright 2023 The TensorFlow Authors. All Rights Reserved. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +==============================================================================*/ + +#include + +#include "signal/src/circular_buffer.h" +#include "tensorflow/core/framework/op_kernel.h" + +namespace tensorflow { +namespace signal { + +class DelayOp : public tensorflow::OpKernel { + public: + explicit DelayOp(tensorflow::OpKernelConstruction* context) + : tensorflow::OpKernel(context) { + OP_REQUIRES_OK(context, context->GetAttr("delay_length", &delay_length_)); + initialized_ = false; + } + + ~DelayOp() {} + + void Compute(tensorflow::OpKernelContext* context) override { + const tensorflow::Tensor& input_tensor = context->input(0); + if (!initialized_) { + frame_size_ = input_tensor.flat_inner_dims().dimensions().at(1); + outer_dims_ = input_tensor.flat_inner_dims().dimensions().at(0); + + state_tensors_.resize(outer_dims_); + circular_buffers_.resize(outer_dims_); + + // Calculate the capacity of the circular buffer. + size_t capacity = frame_size_ + delay_length_; + size_t state_size = + tflite::tflm_signal::CircularBufferGetNeededMemory(capacity); + for (int i = 0; i < outer_dims_; i++) { + OP_REQUIRES_OK( + context, + context->allocate_temp( + DT_INT8, TensorShape({static_cast(state_size)}), + &state_tensors_[i])); + int8_t* state_ = state_tensors_[i].flat().data(); + circular_buffers_[i] = tflite::tflm_signal::CircularBufferInit( + capacity, state_, state_size); + tflite::tflm_signal::CircularBufferWriteZeros(circular_buffers_[i], + delay_length_); + } + initialized_ = true; + } + + TensorShape output_shape = input_tensor.shape(); + tensorflow::Tensor* output_tensor = nullptr; + OP_REQUIRES_OK(context, + context->allocate_output(0, output_shape, &output_tensor)); + + for (int dim_index = 0, sample_index = 0; dim_index < outer_dims_; + dim_index++, sample_index += frame_size_) { + tflite::tflm_signal::CircularBufferWrite( + circular_buffers_[dim_index], + &input_tensor.flat().data()[sample_index], frame_size_); + tflite::tflm_signal::CircularBufferGet( + circular_buffers_[dim_index], frame_size_, + &(reinterpret_cast(output_tensor->data()))[sample_index]); + tflite::tflm_signal::CircularBufferDiscard(circular_buffers_[dim_index], + frame_size_); + } + } + + private: + bool initialized_; + int frame_size_; + int delay_length_; + int outer_dims_; + std::vector state_tensors_; + std::vector circular_buffers_; +}; + +// TODO(b/286250473): change back name after name clash resolved +REGISTER_KERNEL_BUILDER(Name("SignalDelay").Device(tensorflow::DEVICE_CPU), + DelayOp); + +} // namespace signal +} // namespace tensorflow diff --git a/signal/tensorflow_core/kernels/energy_kernel.cc b/signal/tensorflow_core/kernels/energy_kernel.cc new file mode 100644 index 00000000000..11fda5e4379 --- /dev/null +++ b/signal/tensorflow_core/kernels/energy_kernel.cc @@ -0,0 +1,56 @@ +/* Copyright 2023 The TensorFlow Authors. All Rights Reserved. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +==============================================================================*/ + +#include "signal/src/complex.h" +#include "signal/src/energy.h" +#include "tensorflow/core/framework/op_kernel.h" + +namespace tensorflow { +namespace signal { + +class EnergyOp : public tensorflow::OpKernel { + public: + explicit EnergyOp(tensorflow::OpKernelConstruction* context) + : tensorflow::OpKernel(context) { + OP_REQUIRES_OK(context, context->GetAttr("start_index", &start_index_)); + OP_REQUIRES_OK(context, context->GetAttr("end_index", &end_index_)); + } + void Compute(tensorflow::OpKernelContext* context) override { + const tensorflow::Tensor& input_tensor = context->input(0); + const int16_t* input = input_tensor.flat().data(); + tensorflow::Tensor* output_tensor = nullptr; + // The input is complex. The output is real. + int output_size = input_tensor.flat().size() >> 1; + + OP_REQUIRES_OK(context, + context->allocate_output(0, {output_size}, &output_tensor)); + uint32* output = output_tensor->flat().data(); + + tflite::tflm_signal::SpectrumToEnergy( + reinterpret_cast*>(input), start_index_, + end_index_, output); + } + + private: + int start_index_; + int end_index_; +}; + +// TODO(b/286250473): change back name after name clash resolved +REGISTER_KERNEL_BUILDER(Name("SignalEnergy").Device(tensorflow::DEVICE_CPU), + EnergyOp); + +} // namespace signal +} // namespace tensorflow diff --git a/signal/tensorflow_core/kernels/fft_kernels.cc b/signal/tensorflow_core/kernels/fft_kernels.cc index b7831d982bd..48db4e3ff78 100644 --- a/signal/tensorflow_core/kernels/fft_kernels.cc +++ b/signal/tensorflow_core/kernels/fft_kernels.cc @@ -13,6 +13,8 @@ See the License for the specific language governing permissions and limitations under the License. ==============================================================================*/ +#include "signal/src/fft_auto_scale.h" +#include "signal/src/irfft.h" #include "signal/src/rfft.h" #include "tensorflow/core/framework/op_kernel.h" @@ -81,7 +83,82 @@ class RfftOp : public tensorflow::OpKernel { Tensor state_tensor_; }; +// get_needed_memory_func(), init_func(), apply_func() +// are type specific implementations of the IRFFT functions. +// See irfft.h included above for documentation +template * input, T*)> +class IrfftOp : public tensorflow::OpKernel { + public: + explicit IrfftOp(tensorflow::OpKernelConstruction* context) + : tensorflow::OpKernel(context) { + OP_REQUIRES_OK(context, context->GetAttr("fft_length", &fft_length_)); + // Subband array size is the number of subbands * 2 because each coefficient + // is complex. + subband_array_size_ = ((fft_length_ / 2) + 1) * 2; + + size_t state_size = (*get_needed_memory_func)(fft_length_); + OP_REQUIRES_OK(context, context->allocate_temp( + DT_INT8, TensorShape({(int32_t)state_size}), + &state_handle_)); + state_ = state_handle_.flat().data(); + (*init_func)(fft_length_, state_, state_size); + } + + void Compute(tensorflow::OpKernelContext* context) override { + const tensorflow::Tensor& input_tensor = context->input(0); + const T* input = input_tensor.flat().data(); + + TensorShape output_shape = input_tensor.shape(); + output_shape.set_dim(output_shape.dims() - 1, fft_length_); + + // Create an output tensor + tensorflow::Tensor* output_tensor = nullptr; + OP_REQUIRES_OK(context, + context->allocate_output(0, output_shape, &output_tensor)); + T* output = output_tensor->flat().data(); + + int outer_dims = input_tensor.flat_inner_dims().dimensions().at(0); + for (int i = 0; i < outer_dims; i++) { + (*apply_func)( + state_, + reinterpret_cast*>(&input[i * subband_array_size_]), + &output[i * fft_length_]); + } + } + + private: + int fft_length_; + int subband_array_size_; + int8_t* state_; + Tensor state_handle_; +}; + +class FftAutoScaleOp : public tensorflow::OpKernel { + public: + explicit FftAutoScaleOp(tensorflow::OpKernelConstruction* context) + : tensorflow::OpKernel(context) {} + void Compute(tensorflow::OpKernelContext* context) override { + const tensorflow::Tensor& input_tensor = context->input(0); + const int16_t* input = input_tensor.flat().data(); + + // Create an output tensor + tensorflow::Tensor* output_tensor = nullptr; + OP_REQUIRES_OK(context, context->allocate_output(0, input_tensor.shape(), + &output_tensor)); + int16_t* output = output_tensor->flat().data(); + + tensorflow::Tensor* scale_bit_tensor = nullptr; + OP_REQUIRES_OK(context, context->allocate_output(1, {}, &scale_bit_tensor)); + scale_bit_tensor->scalar()() = tflite::tflm_signal::FftAutoScale( + input, output_tensor->NumElements(), output); + } +}; + // TODO(b/286250473): change back name after name clash resolved +REGISTER_KERNEL_BUILDER( + Name("SignalFftAutoScale").Device(tensorflow::DEVICE_CPU), FftAutoScaleOp); REGISTER_KERNEL_BUILDER( Name("SignalRfft") .Device(tensorflow::DEVICE_CPU) @@ -101,5 +178,27 @@ REGISTER_KERNEL_BUILDER( RfftOp); +REGISTER_KERNEL_BUILDER( + Name("SignalIrfft") + .Device(tensorflow::DEVICE_CPU) + .TypeConstraint("T"), + IrfftOp); +REGISTER_KERNEL_BUILDER( + Name("SignalIrfft") + .Device(tensorflow::DEVICE_CPU) + .TypeConstraint("T"), + IrfftOp); +REGISTER_KERNEL_BUILDER( + Name("SignalIrfft") + .Device(tensorflow::DEVICE_CPU) + .TypeConstraint("T"), + IrfftOp); + } // namespace signal } // namespace tensorflow \ No newline at end of file diff --git a/signal/tensorflow_core/kernels/filter_bank_kernels.cc b/signal/tensorflow_core/kernels/filter_bank_kernels.cc new file mode 100644 index 00000000000..24a71493762 --- /dev/null +++ b/signal/tensorflow_core/kernels/filter_bank_kernels.cc @@ -0,0 +1,182 @@ +/* Copyright 2023 The TensorFlow Authors. All Rights Reserved. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +==============================================================================*/ + +#include "signal/src/filter_bank.h" +#include "signal/src/filter_bank_log.h" +#include "signal/src/filter_bank_spectral_subtraction.h" +#include "signal/src/filter_bank_square_root.h" +#include "tensorflow/core/framework/op_kernel.h" + +namespace tensorflow { +namespace signal { + +class FilterBankOp : public tensorflow::OpKernel { + public: + explicit FilterBankOp(tensorflow::OpKernelConstruction* context) + : tensorflow::OpKernel(context) { + int32_t num_channels; + OP_REQUIRES_OK(context, context->GetAttr("num_channels", &num_channels)); + OP_REQUIRES_OK(context, context->allocate_temp( + DT_UINT64, TensorShape({num_channels + 1}), + &work_area_tensor_)); + work_area_ = work_area_tensor_.flat().data(); + config_.num_channels = num_channels; + } + + void Compute(tensorflow::OpKernelContext* context) override { + const tensorflow::Tensor& input_tensor = context->input(0); + const uint32_t* input = input_tensor.flat().data(); + + config_.weights = context->input(1).flat().data(); + config_.unweights = context->input(2).flat().data(); + config_.channel_frequency_starts = context->input(3).flat().data(); + config_.channel_weight_starts = context->input(4).flat().data(); + config_.channel_widths = context->input(5).flat().data(); + + tensorflow::Tensor* output_tensor = nullptr; + OP_REQUIRES_OK(context, context->allocate_output(0, {config_.num_channels}, + &output_tensor)); + tflite::tflm_signal::FilterbankAccumulateChannels(&config_, input, + work_area_); + + uint64_t* output = output_tensor->flat().data(); + // Discard channel 0, which is just scratch + memcpy(output, work_area_ + 1, sizeof(*output) * config_.num_channels); + } + + private: + tflite::tflm_signal::FilterbankConfig config_; + uint64_t* work_area_; + Tensor work_area_tensor_; +}; + +class FilterBankSquareRootOp : public tensorflow::OpKernel { + public: + explicit FilterBankSquareRootOp(tensorflow::OpKernelConstruction* context) + : tensorflow::OpKernel(context) {} + + void Compute(tensorflow::OpKernelContext* context) override { + const uint64_t* input = context->input(0).flat().data(); + int32_t scale_bits = context->input(1).scalar()(); + int32_t num_channels = context->input(0).NumElements(); + + tensorflow::Tensor* output_tensor = nullptr; + OP_REQUIRES_OK(context, + context->allocate_output(0, {num_channels}, &output_tensor)); + uint32_t* output = output_tensor->flat().data(); + tflite::tflm_signal::FilterbankSqrt(input, num_channels, scale_bits, + output); + } + + private: +}; + +class FilterBankSpectralSubtractionOp : public tensorflow::OpKernel { + public: + explicit FilterBankSpectralSubtractionOp( + tensorflow::OpKernelConstruction* context) + : tensorflow::OpKernel(context) { + int attr_int; + bool attr_bool; + OP_REQUIRES_OK(context, context->GetAttr("smoothing", &attr_int)); + config_.smoothing = attr_int; + OP_REQUIRES_OK(context, context->GetAttr("one_minus_smoothing", &attr_int)); + config_.one_minus_smoothing = attr_int; + OP_REQUIRES_OK(context, context->GetAttr("alternate_smoothing", &attr_int)); + config_.alternate_smoothing = attr_int; + OP_REQUIRES_OK( + context, context->GetAttr("alternate_one_minus_smoothing", &attr_int)); + config_.alternate_one_minus_smoothing = attr_int; + OP_REQUIRES_OK(context, context->GetAttr("smoothing_bits", &attr_int)); + config_.smoothing_bits = attr_int; + OP_REQUIRES_OK(context, + context->GetAttr("min_signal_remaining", &attr_int)); + config_.min_signal_remaining = attr_int; + OP_REQUIRES_OK(context, context->GetAttr("clamping", &attr_bool)); + config_.clamping = attr_bool; + OP_REQUIRES_OK(context, context->GetAttr("num_channels", &attr_int)); + config_.num_channels = attr_int; + OP_REQUIRES_OK(context, + context->GetAttr("spectral_subtraction_bits", &attr_int)); + config_.spectral_subtraction_bits = attr_int; + OP_REQUIRES_OK(context, context->allocate_temp( + DT_UINT32, TensorShape({config_.num_channels}), + &noise_estimate_tensor_)); + noise_estimate_ = (uint32_t*)noise_estimate_tensor_.flat().data(); + memset(noise_estimate_, 0, sizeof(uint32_t) * config_.num_channels); + } + + void Compute(tensorflow::OpKernelContext* context) override { + tensorflow::Tensor* output_tensor = nullptr; + const uint32_t* input = context->input(0).flat().data(); + OP_REQUIRES_OK(context, context->allocate_output(0, {config_.num_channels}, + &output_tensor)); + uint32_t* output = output_tensor->flat().data(); + OP_REQUIRES_OK(context, context->allocate_output(1, {config_.num_channels}, + &output_tensor)); + uint32_t* noise_estimate = output_tensor->flat().data(); + + tflite::tflm_signal::FilterbankSpectralSubtraction(&config_, input, output, + noise_estimate_); + memcpy(noise_estimate, noise_estimate_, + sizeof(*noise_estimate) * config_.num_channels); + } + + private: + Tensor noise_estimate_tensor_; + tflite::tflm_signal::SpectralSubtractionConfig config_; + uint32_t* noise_estimate_; +}; + +class FilterBankLogOp : public tensorflow::OpKernel { + public: + explicit FilterBankLogOp(tensorflow::OpKernelConstruction* context) + : tensorflow::OpKernel(context) { + OP_REQUIRES_OK(context, context->GetAttr("output_scale", &output_scale_)); + OP_REQUIRES_OK(context, context->GetAttr("input_correction_bits", + &input_correction_bits_)); + } + + void Compute(tensorflow::OpKernelContext* context) override { + const uint32_t* input = context->input(0).flat().data(); + int num_channels = context->input(0).NumElements(); + tensorflow::Tensor* output_tensor = nullptr; + OP_REQUIRES_OK(context, + context->allocate_output(0, {num_channels}, &output_tensor)); + int16_t* output = output_tensor->flat().data(); + tflite::tflm_signal::FilterbankLog(input, num_channels, output_scale_, + input_correction_bits_, output); + } + + private: + int output_scale_; + int input_correction_bits_; +}; + +// TODO(b/286250473): change back name after name clash resolved +REGISTER_KERNEL_BUILDER(Name("SignalFilterBank").Device(tensorflow::DEVICE_CPU), + FilterBankOp); +REGISTER_KERNEL_BUILDER( + Name("SignalFilterBankSquareRoot").Device(tensorflow::DEVICE_CPU), + FilterBankSquareRootOp); +REGISTER_KERNEL_BUILDER( + Name("SignalFilterBankSpectralSubtraction").Device(tensorflow::DEVICE_CPU), + FilterBankSpectralSubtractionOp); +REGISTER_KERNEL_BUILDER( + Name("SignalFilterBankLog").Device(tensorflow::DEVICE_CPU), + FilterBankLogOp); + +} // namespace signal +} // namespace tensorflow diff --git a/signal/tensorflow_core/kernels/framer_kernel.cc b/signal/tensorflow_core/kernels/framer_kernel.cc new file mode 100644 index 00000000000..e858de3042f --- /dev/null +++ b/signal/tensorflow_core/kernels/framer_kernel.cc @@ -0,0 +1,126 @@ +/* Copyright 2023 The TensorFlow Authors. All Rights Reserved. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +==============================================================================*/ + +#include + +#include "signal/src/circular_buffer.h" +#include "tensorflow/core/framework/op_kernel.h" + +namespace tensorflow { +namespace signal { + +class FramerOp : public tensorflow::OpKernel { + public: + explicit FramerOp(tensorflow::OpKernelConstruction* context) + : tensorflow::OpKernel(context) { + OP_REQUIRES_OK(context, context->GetAttr("frame_size", &frame_size_)); + OP_REQUIRES_OK(context, context->GetAttr("frame_step", &frame_step_)); + OP_REQUIRES_OK(context, context->GetAttr("prefill", &prefill_)); + initialized_ = false; + } + + ~FramerOp() {} + + void Compute(tensorflow::OpKernelContext* context) override { + const tensorflow::Tensor& input_tensor = context->input(0); + if (!initialized_) { + n_frames_ = input_tensor.flat_inner_dims().dimensions().at(1) / + frame_step_; + outer_dims_ = input_tensor.flat_inner_dims().dimensions().at(0); + + state_tensors_.resize(outer_dims_); + circular_buffers_.resize(outer_dims_); + + // Calculate the capacity of the circular buffer. Round up the frame size + // to + // a multiple of frame step. Saves memory relative to the simpler + // frame_size + frame_step. For example: + // step_size = 160, frame_size = 400 + // capacity = 480 vs. step_size + frame_size = 560 + size_t capacity = + (frame_size_ + frame_step_ - 1) / frame_step_ * frame_step_; + size_t state_size = + tflite::tflm_signal::CircularBufferGetNeededMemory(capacity); + for (int i = 0; i < outer_dims_; i++) { + OP_REQUIRES_OK( + context, + context->allocate_temp( + DT_INT8, TensorShape({static_cast(state_size)}), + &state_tensors_[i])); + int8_t* state_ = state_tensors_[i].flat().data(); + circular_buffers_[i] = tflite::tflm_signal::CircularBufferInit( + capacity, state_, state_size); + if (prefill_) { + tflite::tflm_signal::CircularBufferWriteZeros( + circular_buffers_[i], frame_size_ - frame_step_); + } + } + + initialized_ = true; + } + + // Split the last dimension of the input into {n_frames_, frame_size_}. + TensorShape output_shape = input_tensor.shape(); + output_shape.AddDim(frame_size_); + output_shape.set_dim(output_shape.dims() - 2, n_frames_); + + tensorflow::Tensor* output_tensor = nullptr; + tensorflow::Tensor* output_valid_tensor = nullptr; + OP_REQUIRES_OK(context, + context->allocate_output(0, output_shape, &output_tensor)); + OP_REQUIRES_OK(context, + context->allocate_output(1, {}, &output_valid_tensor)); + + bool output_valid = true; + for (int i = 0; i < outer_dims_; i++) { + for (int frame = 0; frame < n_frames_; frame++) { + int input_index = (i * n_frames_ + frame) * frame_step_; + int output_index = (i * n_frames_ + frame) * frame_size_; + tflite::tflm_signal::CircularBufferWrite( + circular_buffers_[i], + &(input_tensor.flat().data())[input_index], frame_step_); + if (tflite::tflm_signal::CircularBufferAvailable( + circular_buffers_[i]) >= (unsigned)frame_size_) { + tflite::tflm_signal::CircularBufferGet( + circular_buffers_[i], frame_size_, + &(reinterpret_cast( + output_tensor->data()))[output_index]); + tflite::tflm_signal::CircularBufferDiscard(circular_buffers_[i], + frame_step_); + } else { + output_valid = false; + } + } + *output_valid_tensor->flat().data() = output_valid; + } + } + + private: + bool initialized_; + int frame_size_; + int frame_step_; + int outer_dims_; + bool prefill_; + int n_frames_; + std::vector state_tensors_; + std::vector circular_buffers_; +}; + +// TODO(b/286250473): change back name after name clash resolved +REGISTER_KERNEL_BUILDER(Name("SignalFramer").Device(tensorflow::DEVICE_CPU), + FramerOp); + +} // namespace signal +} // namespace tensorflow diff --git a/signal/tensorflow_core/kernels/overlap_add_kernel.cc b/signal/tensorflow_core/kernels/overlap_add_kernel.cc new file mode 100644 index 00000000000..40dbad80132 --- /dev/null +++ b/signal/tensorflow_core/kernels/overlap_add_kernel.cc @@ -0,0 +1,91 @@ +/* Copyright 2023 The TensorFlow Authors. All Rights Reserved. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +==============================================================================*/ + +#include + +#include "signal/src/overlap_add.h" +#include "tensorflow/core/framework/op_kernel.h" + +namespace tensorflow { +namespace signal { + +template +class OverlapAddOp : public tensorflow::OpKernel { + public: + explicit OverlapAddOp(tensorflow::OpKernelConstruction* context) + : tensorflow::OpKernel(context) { + OP_REQUIRES_OK(context, context->GetAttr("frame_step", &frame_step_)); + initialized_ = false; + } + + void Compute(tensorflow::OpKernelContext* context) override { + const tensorflow::Tensor& input_tensor = context->input(0); + if (!initialized_) { + outer_dims_ = input_tensor.flat_inner_dims().dimensions().at(0); + n_frames_ = input_tensor.flat_inner_dims().dimensions().at(1); + frame_size_ = input_tensor.flat_inner_dims().dimensions().at(2); + + state_tensors_.resize(outer_dims_); + for (int i = 0; i < outer_dims_; i++) { + OP_REQUIRES_OK(context, + context->allocate_temp( + E, TensorShape({static_cast(frame_size_)}), + &state_tensors_[i])); + memset(state_tensors_[i].flat().data(), 0, sizeof(T) * frame_size_); + } + initialized_ = true; + } + + TensorShape output_shape = input_tensor.shape(); + output_shape.RemoveDim(output_shape.dims() - 1); + output_shape.set_dim(output_shape.dims() - 1, n_frames_ * frame_step_); + + tensorflow::Tensor* output_tensor = nullptr; + OP_REQUIRES_OK(context, + context->allocate_output(0, output_shape, &output_tensor)); + const T* input = input_tensor.flat().data(); + T* output = output_tensor->flat().data(); + for (int i = 0; i < outer_dims_; i++) { + T* buffer = state_tensors_[i].flat().data(); + for (int frame = 0; frame < n_frames_; frame++) { + int input_index = (i * n_frames_ + frame) * frame_size_; + int output_index = (i * n_frames_ + frame) * frame_step_; + tflm_signal::OverlapAdd(&input[input_index], buffer, frame_size_, + &output[output_index], frame_step_); + } + } + } + + private: + int frame_size_; + int frame_step_; + int n_frames_; + int outer_dims_; + bool initialized_; + std::vector state_tensors_; +}; + +// TODO(b/286250473): change back name after name clash resolved +REGISTER_KERNEL_BUILDER(Name("SignalOverlapAdd") + .Device(tensorflow::DEVICE_CPU) + .TypeConstraint("T"), + OverlapAddOp); +REGISTER_KERNEL_BUILDER(Name("SignalOverlapAdd") + .Device(tensorflow::DEVICE_CPU) + .TypeConstraint("T"), + OverlapAddOp); + +} // namespace signal +} // namespace tensorflow diff --git a/signal/tensorflow_core/kernels/pcan_kernel.cc b/signal/tensorflow_core/kernels/pcan_kernel.cc new file mode 100644 index 00000000000..a1adde04822 --- /dev/null +++ b/signal/tensorflow_core/kernels/pcan_kernel.cc @@ -0,0 +1,53 @@ +/* Copyright 2023 The TensorFlow Authors. All Rights Reserved. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +==============================================================================*/ + +#include "signal/src/pcan_argc_fixed.h" +#include "tensorflow/core/framework/op_kernel.h" +#include "tensorflow/core/framework/types.h" + +namespace tensorflow { +namespace signal { + +class PcanOp : public tensorflow::OpKernel { + public: + explicit PcanOp(tensorflow::OpKernelConstruction* context) + : tensorflow::OpKernel(context) { + OP_REQUIRES_OK(context, context->GetAttr("snr_shift", &snr_shift_)); + } + + void Compute(tensorflow::OpKernelContext* context) override { + tensorflow::Tensor* output_tensor = nullptr; + const uint32_t* input = context->input(0).flat().data(); + const uint32_t* noise_estimate = context->input(1).flat().data(); + const int16_t* gain_lut = context->input(2).flat().data(); + int32_t num_channels = context->input(0).NumElements(); + OP_REQUIRES_OK(context, + context->allocate_output(0, {num_channels}, &output_tensor)); + uint32_t* output = output_tensor->flat().data(); + + memcpy(output, input, sizeof(uint32_t) * num_channels); + tflite::tflm_signal::ApplyPcanAutoGainControlFixed( + gain_lut, snr_shift_, noise_estimate, output, num_channels); + } + + private: + int snr_shift_; +}; + +REGISTER_KERNEL_BUILDER(Name("SignalPCAN").Device(tensorflow::DEVICE_CPU), + PcanOp); + +} // namespace signal +} // namespace tensorflow diff --git a/signal/tensorflow_core/kernels/stacker_kernel.cc b/signal/tensorflow_core/kernels/stacker_kernel.cc new file mode 100644 index 00000000000..3e1a3893a21 --- /dev/null +++ b/signal/tensorflow_core/kernels/stacker_kernel.cc @@ -0,0 +1,100 @@ +/* Copyright 2023 The TensorFlow Authors. All Rights Reserved. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +==============================================================================*/ + +#include "signal/src/circular_buffer.h" +#include "tensorflow/core/framework/op_kernel.h" + +namespace tensorflow { +namespace signal { + +class StackerOp : public tensorflow::OpKernel { + public: + explicit StackerOp(tensorflow::OpKernelConstruction* context) + : tensorflow::OpKernel(context) { + OP_REQUIRES_OK(context, context->GetAttr("num_channels", &num_channels_)); + OP_REQUIRES_OK(context, context->GetAttr("stacker_left_context", + &stacker_left_context_)); + OP_REQUIRES_OK(context, context->GetAttr("stacker_right_context", + &stacker_right_context_)); + OP_REQUIRES_OK(context, context->GetAttr("stacker_step", &stacker_step_)); + buffer_size_ = + num_channels_ * (stacker_left_context_ + stacker_right_context_ + 1); + step_size_ = num_channels_ * stacker_step_; + stacker_has_first_frame_ = false; + + size_t state_size = + tflite::tflm_signal::CircularBufferGetNeededMemory(buffer_size_); + OP_REQUIRES_OK(context, + context->allocate_temp( + DT_INT8, TensorShape({static_cast(state_size)}), + &state_tensor_)); + state_ = state_tensor_.flat().data(); + circular_buffer = tflite::tflm_signal::CircularBufferInit( + buffer_size_, state_, state_size); + } + + void Compute(tensorflow::OpKernelContext* context) override { + const tensorflow::Tensor& input_tensor = context->input(0); + const int16_t* input = input_tensor.flat().data(); + + tflite::tflm_signal::CircularBufferWrite(circular_buffer, input, + num_channels_); + + // The first frame is replicated an extra left_context times to pad. + if (stacker_has_first_frame_ == false) { + tflite::tflm_signal::CircularBufferExtend(circular_buffer, num_channels_, + stacker_left_context_); + stacker_has_first_frame_ = true; + } + + tensorflow::Tensor* output_tensor = nullptr; + tensorflow::Tensor* output_valid_tensor = nullptr; + + OP_REQUIRES_OK( + context, context->allocate_output( + 0, {static_cast(buffer_size_)}, &output_tensor)); + OP_REQUIRES_OK(context, + context->allocate_output(1, {}, &output_valid_tensor)); + + if (tflite::tflm_signal::CircularBufferAvailable(circular_buffer) >= + buffer_size_) { + tflite::tflm_signal::CircularBufferGet( + circular_buffer, buffer_size_, output_tensor->flat().data()); + tflite::tflm_signal::CircularBufferDiscard(circular_buffer, step_size_); + *output_valid_tensor->flat().data() = true; + } else { + *output_valid_tensor->flat().data() = false; + } + } + + private: + int num_channels_; + int stacker_left_context_; + int stacker_right_context_; + int stacker_step_; + size_t buffer_size_; + size_t step_size_; + bool stacker_has_first_frame_; + + int8_t* state_; + Tensor state_tensor_; + tflite::tflm_signal::CircularBuffer* circular_buffer; +}; + +REGISTER_KERNEL_BUILDER(Name("SignalStacker").Device(tensorflow::DEVICE_CPU), + StackerOp); + +} // namespace signal +} // namespace tensorflow diff --git a/signal/tensorflow_core/ops/BUILD b/signal/tensorflow_core/ops/BUILD index 8c24f22816f..8ef47d8ff2c 100644 --- a/signal/tensorflow_core/ops/BUILD +++ b/signal/tensorflow_core/ops/BUILD @@ -5,6 +5,22 @@ package( licenses = ["notice"], ) +tflm_signal_kernel_library( + name = "delay_op", + srcs = ["delay_op.cc"], + deps = [ + "@tensorflow_cc_deps//:cc_library", + ], +) + +tflm_signal_kernel_library( + name = "energy_op", + srcs = ["energy_op.cc"], + deps = [ + "@tensorflow_cc_deps//:cc_library", + ], +) + tflm_signal_kernel_library( name = "fft_ops", srcs = ["fft_ops.cc"], @@ -13,6 +29,46 @@ tflm_signal_kernel_library( ], ) +tflm_signal_kernel_library( + name = "filter_bank_ops", + srcs = ["filter_bank_ops.cc"], + deps = [ + "@tensorflow_cc_deps//:cc_library", + ], +) + +tflm_signal_kernel_library( + name = "framer_op", + srcs = ["framer_op.cc"], + deps = [ + "@tensorflow_cc_deps//:cc_library", + ], +) + +tflm_signal_kernel_library( + name = "overlap_add_op", + srcs = ["overlap_add_op.cc"], + deps = [ + "@tensorflow_cc_deps//:cc_library", + ], +) + +tflm_signal_kernel_library( + name = "pcan_op", + srcs = ["pcan_op.cc"], + deps = [ + "@tensorflow_cc_deps//:cc_library", + ], +) + +tflm_signal_kernel_library( + name = "stacker_op", + srcs = ["stacker_op.cc"], + deps = [ + "@tensorflow_cc_deps//:cc_library", + ], +) + tflm_signal_kernel_library( name = "window_op", srcs = ["window_op.cc"], diff --git a/signal/tensorflow_core/ops/delay_op.cc b/signal/tensorflow_core/ops/delay_op.cc new file mode 100644 index 00000000000..fb816a6a3e2 --- /dev/null +++ b/signal/tensorflow_core/ops/delay_op.cc @@ -0,0 +1,58 @@ +/* Copyright 2023 The TensorFlow Authors. All Rights Reserved. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +==============================================================================*/ + +#include "tensorflow/core/framework/op.h" +#include "tensorflow/core/framework/shape_inference.h" + +using tensorflow::shape_inference::InferenceContext; +using tensorflow::shape_inference::ShapeHandle; + +namespace tensorflow { +namespace signal { + +absl::Status DelayShape(InferenceContext* c) { + ShapeHandle out; + TF_RETURN_IF_ERROR(c->WithRankAtLeast(c->input(0), 1, &out)); + c->set_output(0, out); + return absl::OkStatus(); +} + +// TODO(b/286250473): change back name after name clash resolved +REGISTER_OP("SignalDelay") + .Attr("delay_length: int >= 0") + .Input("input: int16") + .Output("output: int16") + .SetShapeFn(DelayShape) + .Doc(R"doc( +Delay the innermost dimension of input signal by delay_length samples. + +For example, assuming an input signal of 10 samples, +[1 2 3 4 5 6 7 8 9 0] +If we input the signal to a delay op configured with delay_length=3, the op +will produce the following output: +[0 0 0 1 2 3 4 5 6 7] +To retrieve the remainder of the input signal, call the delay op again with +zeros as input: +[0 0 0 0 0 0 0 0 0 0] +to get the output: +[8 9 0 0 0 0 0 0 0 0] + +input: A multidimensional input signal. +output: An output signal of the same shape as the input signal. The innermost + dimension is delayed by delay_length samples. +)doc"); + +} // namespace signal +} // namespace tensorflow diff --git a/signal/tensorflow_core/ops/energy_op.cc b/signal/tensorflow_core/ops/energy_op.cc new file mode 100644 index 00000000000..d4f7cb723c9 --- /dev/null +++ b/signal/tensorflow_core/ops/energy_op.cc @@ -0,0 +1,56 @@ +/* Copyright 2023 The TensorFlow Authors. All Rights Reserved. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +==============================================================================*/ + +#include "tensorflow/core/framework/op.h" +#include "tensorflow/core/framework/shape_inference.h" + +using tensorflow::shape_inference::InferenceContext; +using tensorflow::shape_inference::ShapeHandle; + +namespace tensorflow { +namespace signal { + +absl::Status EnergyShape(InferenceContext* c) { + ShapeHandle out; + + TF_RETURN_IF_ERROR(c->WithRank(c->input(0), 1, &out)); + int64_t length = InferenceContext::Value(c->Dim(out, 0)) / 2; + + TF_RETURN_IF_ERROR(c->ReplaceDim(out, 0, c->MakeDim(length), &out)); + c->set_output(0, out); + return absl::OkStatus(); +} + +// TODO(b/286250473): change back name after name clash resolved +REGISTER_OP("SignalEnergy") + .Attr("start_index: int") + .Attr("end_index: int") + .Input("input: int16") + .Output("output: uint32") + .SetShapeFn([](InferenceContext* c) { return EnergyShape(c); }) + .Doc(R"doc( +Calculate the energy of a spectral array. Only elements in the index range +[start_index, end_index] are calculated, and the rest are set to zero. + +input: A 1-D frequency domain signal of complex int16 elements +output: A 1-D array of uint32 elements containing the square of the absolute + value of each element in input. + +start_index: index in input to start calculating from. Default: 0 +end_index: last index in the input to calculate. Default: last element of input +)doc"); + +} // namespace signal +} // namespace tensorflow diff --git a/signal/tensorflow_core/ops/fft_ops.cc b/signal/tensorflow_core/ops/fft_ops.cc index 2c013b7df29..63d8e7844c4 100644 --- a/signal/tensorflow_core/ops/fft_ops.cc +++ b/signal/tensorflow_core/ops/fft_ops.cc @@ -22,7 +22,7 @@ using tensorflow::shape_inference::ShapeHandle; namespace tensorflow { namespace signal { -Status RfftShape(InferenceContext* c) { +absl::Status RfftShape(InferenceContext* c) { ShapeHandle out; int fft_length; TF_RETURN_IF_ERROR(c->GetAttr("fft_length", &fft_length)); @@ -30,7 +30,17 @@ Status RfftShape(InferenceContext* c) { auto dim = ((fft_length / 2) + 1) * 2; // * 2 for complex TF_RETURN_IF_ERROR(c->ReplaceDim(out, -1, c->MakeDim(dim), &out)); c->set_output(0, out); - return OkStatus(); + return absl::OkStatus(); +} + +absl::Status IrfftShape(InferenceContext* c) { + ShapeHandle out; + int fft_length; + TF_RETURN_IF_ERROR(c->GetAttr("fft_length", &fft_length)); + TF_RETURN_IF_ERROR(c->WithRankAtLeast(c->input(0), 1, &out)); + TF_RETURN_IF_ERROR(c->ReplaceDim(out, -1, c->MakeDim(fft_length), &out)); + c->set_output(0, out); + return absl::OkStatus(); } // TODO(b/286250473): change back name after name clash resolved @@ -62,5 +72,52 @@ fft_length: The length of the FFT operation. An input signal that's shorter will be zero padded to fft_length. )doc"); +// TODO(b/286250473): change back name after name clash resolved +REGISTER_OP("SignalIrfft") + .Attr("T: {float, int16, int32}") + .Attr("fft_length: int >= 2") + .Input("input: T") + .Output("output: T") + .SetShapeFn(IrfftShape) + .Doc(R"doc( +Computes the inverse 1-dimensional discrete Fourier transform of a real-valued +signal over the inner-most dimension of input. + +The inner-most dimension of input is assumed to be the result of RFFT: +the fft_length / 2 + 1 unique components of the DFT of a real-valued signal. +fft_length must be provided. + +input: A tensor containing ((fft_length / 2) + 1) complex spectral + components along its innermost dimension. + Since there's no TF integer complex type, the array is represented using + ((fft_length / 2) + 1) * 2 real elements. +output: A tensor containing fft_length time domain elements along its innermost + dimension. + +fft_length: The length of the IFFT operation. +)doc"); + +// TODO(b/286250473): change back name after name clash resolved +REGISTER_OP("SignalFftAutoScale") + .Input("input: int16") + .Output("output: int16") + .Output("scale_bits: int32") + .SetShapeFn([](shape_inference::InferenceContext* c) { + ShapeHandle out; + TF_RETURN_IF_ERROR(c->WithRank(c->input(0), 1, &out)); + c->set_output(0, out); + c->set_output(1, c->Scalar()); + return absl::OkStatus(); + }) + .Doc(R"doc( +Shifts the input left until the amplitude is maximized without clipping. Returns +the amount of left shift for compensation later. This op can be used to maximize +precision of integer FFT implementations, especially 16-bit. + +input: A 1-D time domain signal. +output: A 1-D time domain signal after auto scaling. +scale_bits: Scalar. The number of left shifts applied to the input signal. +)doc"); + } // namespace signal } // namespace tensorflow diff --git a/signal/tensorflow_core/ops/filter_bank_ops.cc b/signal/tensorflow_core/ops/filter_bank_ops.cc new file mode 100644 index 00000000000..7e021ffd9cf --- /dev/null +++ b/signal/tensorflow_core/ops/filter_bank_ops.cc @@ -0,0 +1,171 @@ +/* Copyright 2023 The TensorFlow Authors. All Rights Reserved. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +==============================================================================*/ + +#include "tensorflow/core/framework/op.h" +#include "tensorflow/core/framework/shape_inference.h" + +using tensorflow::shape_inference::InferenceContext; +using tensorflow::shape_inference::ShapeHandle; + +namespace tensorflow { +namespace signal { + +absl::Status FilterBankShape(InferenceContext* c) { + ShapeHandle out; + shape_inference::DimensionHandle unused; + int num_channels; + TF_RETURN_IF_ERROR(c->GetAttr("num_channels", &num_channels)); + + TF_RETURN_IF_ERROR(c->WithRank(c->input(0), 1, &out)); + + // Inputs 1,2 must have the same shape + TF_RETURN_IF_ERROR(c->WithRank(c->input(1), 1, &out)); + auto num_weights = InferenceContext::Value(c->Dim(out, 0)); + + TF_RETURN_IF_ERROR(c->WithRank(c->input(2), 1, &out)); + TF_RETURN_IF_ERROR( + c->WithValue(c->Dim(c->input(2), 0), num_weights, &unused)); + // Inputs 3,4,5 must have the same shape + TF_RETURN_IF_ERROR(c->WithRank(c->input(3), 1, &out)); + TF_RETURN_IF_ERROR( + c->WithValue(c->Dim(c->input(3), 0), num_channels + 1, &unused)); + TF_RETURN_IF_ERROR(c->WithRank(c->input(4), 1, &out)); + TF_RETURN_IF_ERROR( + c->WithValue(c->Dim(c->input(4), 0), num_channels + 1, &unused)); + TF_RETURN_IF_ERROR(c->WithRank(c->input(5), 1, &out)); + TF_RETURN_IF_ERROR( + c->WithValue(c->Dim(c->input(5), 0), num_channels + 1, &unused)); + TF_RETURN_IF_ERROR(c->ReplaceDim(out, 0, c->MakeDim(num_channels), &out)); + c->set_output(0, out); + return absl::OkStatus(); +} + +// TODO(b/286250473): change back name after name clash resolved +REGISTER_OP("SignalFilterBank") + .Attr("num_channels: int >= 0") + .Input("input: uint32") + .Input("weights: int16") + .Input("unweights: int16") + .Input("channel_frequency_starts: int16") + .Input("channel_weight_starts: int16") + .Input("channel_widths: int16") + .Output("output: uint64") + .SetShapeFn([](InferenceContext* c) { return FilterBankShape(c); }) + .Doc(R"doc( +Applies a mel filter bank of size num_channels to an input spectral energy array +See filter_bank_ops.py for how weights, unweights, channel_frequency_start, +channel_weight_start, channel_widths are pre-calculated. + +input: A 1-D spectral energy array. +weights: A 1-D filter weight array of num_channels + 1 elements. +unweights: A 1-D filter unweights array of num_channels + 1 elements. +channel_frequency_starts: A 1-D array of size num_channels + 1 elements. + Start index in input for each channel. +channel_weight_starts: A 1-D array of size num_channels + 1 elements. + Start index in (un)weights array for each channel. +channel_widths: A 1-D array of num_channels + 1 elements. + Number of bins for each channels. +output: A 1-D array of num_channels elements. Each elements + contains the output of a single channel/filter in the bank. + +num_channels: Number of channels in filter bank +)doc"); + +// TODO(b/286250473): change back name after name clash resolved +REGISTER_OP("SignalFilterBankSquareRoot") + .Input("input: uint64") + .Input("scale_bits: int32") + .Output("output: uint32") + .SetShapeFn([](InferenceContext* c) { + ShapeHandle out; + TF_RETURN_IF_ERROR(c->WithRank(c->input(0), 1, &out)); + c->set_output(0, out); + return absl::OkStatus(); + }) + .Doc(R"doc( +Applies a square root to each element in the input then shift right by +scale_bits before writing the result to output + +input: A 1-D array of filter bank channels. +scale_bits: A scaler. Number of bits to shift right +output: A 1-D array of num_channels elements. +)doc"); + +// TODO(b/286250473): change back name after name clash resolved +REGISTER_OP("SignalFilterBankSpectralSubtraction") + .Attr("num_channels: int >= 0") + .Attr("smoothing: int >= 0") + .Attr("one_minus_smoothing: int >= 0") + .Attr("alternate_smoothing: int >= 0") + .Attr("alternate_one_minus_smoothing: int >= 0") + .Attr("smoothing_bits: int >= 0") + .Attr("min_signal_remaining: int >= 0") + .Attr("clamping: bool") + .Attr("spectral_subtraction_bits: int") + .Input("input: uint32") + .Output("output: uint32") + .Output("noise_estimate: uint32") + .SetShapeFn([](InferenceContext* c) { + ShapeHandle out; + TF_RETURN_IF_ERROR(c->WithRank(c->input(0), 1, &out)); + c->set_output(0, out); + c->set_output(1, out); + return absl::OkStatus(); + }) + .Doc(R"doc( +Applies spectral subtraction to a filter bank output of size num_channels +Outputs the clean filter bank channels and the noise estimate for each channel. + +input: A 1-D array of filter bank channels +output: A 1-D array of clean filter bank channels +noise_estimate: A 1-D array per-channel noise estimate + +num_channels: Number of filter bank channels in input, output, noise_estimate +smoothing: Smoothing constant for noise LPF +one_minus_smoothing: (1 - smoothing) for noise LPF +min_signal_remaining: minimum amount of signal after subtraction +alternate_smoothing: if positive, noise LPF for odd-index channels, else ignored +alternate_one_minus_smoothing: (1 - alernate_smoothing), if in use +smoothing_bits: extra fractional bits for the noise_estimate smoothing filter +spectral_subtraction_bits: scaling bits for smoothing and min_signal_remaining +)doc"); + +// TODO(b/286250473): change back name after name clash resolved +REGISTER_OP("SignalFilterBankLog") + .Attr("output_scale: int >= 1") + .Attr("input_correction_bits: int >= 0") + .Input("input: uint32") + .Output("output: int16") + .SetShapeFn([](InferenceContext* c) { + ShapeHandle out; + TF_RETURN_IF_ERROR(c->WithRank(c->input(0), 1, &out)); + c->set_output(0, out); + return absl::OkStatus(); + }) + .Doc(R"doc( +Applies natural log to each element in input with pre-shift and post scaling. +The operation is roughly equivalent to: +output = min(Log(input << input_correction_bits) * output_scale, int16max) + If (input << input_correction_bits) is 1 or 0, the function returns 0 + +input: A 1-D array of filter bank channels. +output: A 1-D array of filter bank channels. + +output_scale: A scaler. +input_correction_bits: A scalar +)doc"); + +} // namespace signal +} // namespace tensorflow diff --git a/signal/tensorflow_core/ops/framer_op.cc b/signal/tensorflow_core/ops/framer_op.cc new file mode 100644 index 00000000000..bf7769962e8 --- /dev/null +++ b/signal/tensorflow_core/ops/framer_op.cc @@ -0,0 +1,91 @@ +/* Copyright 2023 The TensorFlow Authors. All Rights Reserved. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +==============================================================================*/ + +#include "tensorflow/core/framework/op.h" +#include "tensorflow/core/framework/shape_inference.h" + +using tensorflow::shape_inference::InferenceContext; +using tensorflow::shape_inference::ShapeHandle; + +namespace tensorflow { +namespace signal { + +absl::Status FramerShape(InferenceContext* c) { + ShapeHandle unused; + ShapeHandle in; + int frame_step, frame_size; + + TF_RETURN_IF_ERROR(c->GetAttr("frame_step", &frame_step)); + TF_RETURN_IF_ERROR(c->GetAttr("frame_size", &frame_size)); + TF_RETURN_IF_ERROR(c->WithRankAtLeast(c->input(0), 1, &in)); + int n_frames = c->Value(c->Dim(in, -1)) / frame_step; + + shape_inference::DimensionHandle extra_dim = c->MakeDim({frame_size}); + ShapeHandle extra_dim_shape = + c->MakeShape(std::vector({extra_dim})); + ShapeHandle out; + TF_RETURN_IF_ERROR(c->Concatenate(in, extra_dim_shape, &out)); + TF_RETURN_IF_ERROR(c->ReplaceDim(out, -2, c->MakeDim(n_frames), &out)); + TF_RETURN_IF_ERROR(c->ReplaceDim(out, -1, c->MakeDim(frame_size), &out)); + c->set_output(0, out); + c->set_output(1, c->Scalar()); + return absl::OkStatus(); +} + +// TODO(b/286250473): change back name after name clash resolved +REGISTER_OP("SignalFramer") + .Attr("frame_size: int >= 1") + .Attr("frame_step: int >= 1") + .Attr("prefill: bool") + .Input("input: int16") + .Output("output: int16") + .Output("output_valid: bool") + .SetShapeFn([](InferenceContext* c) { return FramerShape(c); }) + .Doc(R"doc( +Transform an input signal into a series of overlapping frames, each of +size frame_size. The frame_step determines how many samples the framer +progresses on each invocation. When the framer has enough samples to produce +a frame, it writes it to the output tensor and sets the output_valid to True. +If the framer doesn't have enough samples to produce a frame, it doesn't +initialize the contents of the output tensor and sets the output_valid boolean +to False. +For example, assuming an input signal of 10 samples, +[1 2 3 4 5 6 7 8 9 0] +and the framer is invoked 5 times, each time with two samples. +For a frame with the configuration: +frame_size=3 +frame_step=2 +The framer will produce the following output: +input: [1, 2], output: [undefined, undefined, undefined], output_valid: False +input: [3, 4], output: [1, 2, 3], output_valid: True +input: [5, 6], output: [3, 4, 5], output_valid: True +input: [7, 8], output: [5, 6, 7], output_valid: True +input: [9, 0], output: [7, 6, 8], output_valid: True + +input: A 1-D input signal frame. Size must be a multiple of frame_step +output: A 2-D output frame with innermost dimension frame_size +output_valid: A boolean scalar. +If True, the output is a valid output frame +If False, the output is an invalid output frame of all zeros + +frame_size: The number of samples in each output frame +frame_step: The number of input samples to progress the framer's 'current' +sample on each invocation. +prefill: If true, initialize the framer with (frame_size - frame_step) zeros. +Can be used to guarantee a valid output starting with the first input. +)doc"); + +} // namespace signal +} // namespace tensorflow diff --git a/signal/tensorflow_core/ops/overlap_add_op.cc b/signal/tensorflow_core/ops/overlap_add_op.cc new file mode 100644 index 00000000000..74ccb1fc5ad --- /dev/null +++ b/signal/tensorflow_core/ops/overlap_add_op.cc @@ -0,0 +1,85 @@ +/* Copyright 2023 The TensorFlow Authors. All Rights Reserved. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +==============================================================================*/ + +#include "tensorflow/core/framework/op.h" +#include "tensorflow/core/framework/shape_inference.h" + +using tensorflow::shape_inference::InferenceContext; +using tensorflow::shape_inference::ShapeHandle; + +namespace tensorflow { +namespace signal { + +absl::Status OverlapAddShape(InferenceContext* c) { + shape_inference::DimensionHandle unused; + ShapeHandle in; + ShapeHandle out; + int frame_step; + + TF_RETURN_IF_ERROR(c->WithRankAtLeast(c->input(0), 2, &in)); + TF_RETURN_IF_ERROR(c->GetAttr("frame_step", &frame_step)); + TF_RETURN_IF_ERROR(c->Subshape(in, 0, -1, &out)); + if (!c->ValueKnown(c->Dim(out, -1))) { + TF_RETURN_IF_ERROR(c->ReplaceDim(out, -1, c->UnknownDim(), &out)); + } else { + int n_frames = c->Value(c->Dim(out, -1)); + TF_RETURN_IF_ERROR( + c->ReplaceDim(out, -1, c->MakeDim(n_frames * frame_step), &out)); + } + c->set_output(0, out); + return absl::OkStatus(); +} + +// TODO(b/286250473): change back name after name clash resolved +REGISTER_OP("SignalOverlapAdd") + .Attr("T: {float, int16}") + .Attr("frame_step: int >= 1") + .Input("input: T") + .Output("output: T") + .SetShapeFn([](InferenceContext* c) { return OverlapAddShape(c); }) + .Doc(R"doc( +Transform a an input signal made of overlaping frames of size frame_size into +an output signal made of frames of size frame_step. +The overalpping input frames are spaced frame_step apart in time. +The The op adds the overlapping frames into the output frame. +For example, for a series of 5 input frames, with frame_size=3, frame_step=2: +[1, 2, 3] +[4, 5, 6] +[7, 8, 9] +[0, 1, 2] +[3, 4, 5] +The op will overlap the frames as follows: +[1, 2, 3] + [4, 5, 6] + [7, 8, 9] + [0, 1, 2] + [3, 4, 5] +Then add the samples that are aligned vertically to produce output frames of +size frame_step=2: +[1, 2] +[7, 5] +[13, 8] +[9, 1] +[5, 4] + +input: A [..., frames, frame_length] Tensor. Rank must be at least 2. +output: A Tensor with shape [..., output_size] containing the overlap-added frames of signal's inner-most two dimensions. + +frame_step: The number of output samples to progress the 'current' +sample on each invocation. Also the number of samples in each output frame. +)doc"); + +} // namespace signal +} // namespace tensorflow diff --git a/signal/tensorflow_core/ops/pcan_op.cc b/signal/tensorflow_core/ops/pcan_op.cc new file mode 100644 index 00000000000..2a8490fbdbe --- /dev/null +++ b/signal/tensorflow_core/ops/pcan_op.cc @@ -0,0 +1,60 @@ +/* Copyright 2023 The TensorFlow Authors. All Rights Reserved. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +==============================================================================*/ + +#include "tensorflow/core/framework/op.h" +#include "tensorflow/core/framework/shape_inference.h" + +using ::tensorflow::shape_inference::InferenceContext; +using ::tensorflow::shape_inference::ShapeHandle; + +namespace tensorflow { +namespace signal { + +namespace { + +absl::Status PcanShape(InferenceContext* c) { + ShapeHandle out, lut; + TF_RETURN_IF_ERROR(c->WithRank(c->input(0), 1, &out)); + TF_RETURN_IF_ERROR(c->WithRank(c->input(1), 1, &out)); + TF_RETURN_IF_ERROR(c->WithRank(c->input(2), 1, &lut)); + + c->set_output(0, out); + return absl::OkStatus(); +} + +} // namespace + +REGISTER_OP("SignalPCAN") + .Attr("snr_shift: int") + .Input("input: uint32") + .Input("noise_estimate: uint32") + .Input("gain_lut: int16") + .Output("output: uint32") + .SetShapeFn(PcanShape) + .Doc(R"doc( +Determines whether per-channel amplitude-normalized (PCAN) auto gain control is +applied, using either floating-point or fixed-point computation. If enabled, +the dynamic range of the filterbank output is compressed by dividing by a power +of the noise estimate. + +input: A 1-D array of mel-spectrum subband filter bank outputs. +noise_estimate: A 1-D array of mel-spectrun subbabd noise estimates. +gain_lut: A 1-D lookup table for gain calculation. +output: A 1-D array of processed subband filter bank. +snr_shift: Amount of right shift when calculcating the SNR. +)doc"); + +} // namespace signal +} // namespace tensorflow diff --git a/signal/tensorflow_core/ops/stacker_op.cc b/signal/tensorflow_core/ops/stacker_op.cc new file mode 100644 index 00000000000..5fd2acac66e --- /dev/null +++ b/signal/tensorflow_core/ops/stacker_op.cc @@ -0,0 +1,91 @@ +/* Copyright 2023 The TensorFlow Authors. All Rights Reserved. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +==============================================================================*/ + +#include "tensorflow/core/framework/op.h" +#include "tensorflow/core/framework/shape_inference.h" + +using tensorflow::shape_inference::InferenceContext; +using tensorflow::shape_inference::ShapeHandle; + +namespace tensorflow { +namespace signal { + +absl::Status StackerShape(InferenceContext* c) { + int num_channels, stacker_left_context, stacker_right_context; + TF_RETURN_IF_ERROR(c->GetAttr("num_channels", &num_channels)); + TF_RETURN_IF_ERROR( + c->GetAttr("stacker_left_context", &stacker_left_context)); + TF_RETURN_IF_ERROR( + c->GetAttr("stacker_right_context", &stacker_right_context)); + + int output_frames = stacker_left_context + 1 + stacker_right_context; + + ShapeHandle out; + shape_inference::DimensionHandle dim_in; + TF_RETURN_IF_ERROR(c->WithRank(c->input(0), 1, &out)); + TF_RETURN_IF_ERROR( + c->WithValue(c->Dim(c->input(0), 0), num_channels, &dim_in)); + TF_RETURN_IF_ERROR( + c->ReplaceDim(out, 0, c->MakeDim(num_channels * output_frames), &out)); + c->set_output(0, out); + c->set_output(1, c->Scalar()); + return absl::OkStatus(); +} + +// TODO(b/286250473): change back name after name clash resolved +REGISTER_OP("SignalStacker") + .Attr("num_channels: int >= 1") + .Attr("stacker_left_context: int >= 0") + .Attr("stacker_right_context: int >= 0") + .Attr("stacker_step: int >= 1") + .Input("input: int16") + .Output("output: int16") + .Output("output_valid: bool") + .SetShapeFn([](InferenceContext* c) { return StackerShape(c); }) + .Doc(R"doc( +Stack several input frames into a single stacked frame. On each invocation, it +generates a stacked frame that contains: +(stacker_left_context + 1 + stacker_right_context) +consecutive unstacked frames. The stacked frame which becomes the input to the +neural network. The stacker then moves forward in a step of one or more input +frames. +For example, assuming a squence of 10 input frames, where each input frame +is itself a vector of size num_channels: +[1 2 3 4 5 6 7 8 9 0], and the current frame is 9, the following configuration: +stacker_left_context=0 +stacker_right_context=1 +stacker_step=2 +will produce 5 stacked frames: +[1,2] [3,4] [5,6] [7,8] [9,0]. + +input: A 1-D input frame of size num_channels +output: A 1-D output frame of size + (stacker_left_context + 1 + stacker_right_context) * num_channels +output_valid: A boolean scalar. + If true, the output is a valid output frame + If false, the output is an invalid output frame of all zeros + Once the stacker produces its first output frame, its output will + be valid every stacker_step input frames. +num_channels: the number of filter bank channels in each stacker input frame +stacker_left_context: The number of input frames to the left of the current + frame to include in the output frame. +stacker_right_context: The number of input frames to the right of the current + frame to include in the output frame. +stacker_step: The number of input frames to increment the stacker's 'current' + frame on each invocation. +)doc"); + +} // namespace signal +} // namespace tensorflow diff --git a/signal/tensorflow_core/ops/window_op.cc b/signal/tensorflow_core/ops/window_op.cc index 24e51b2ff71..5a08da8348c 100644 --- a/signal/tensorflow_core/ops/window_op.cc +++ b/signal/tensorflow_core/ops/window_op.cc @@ -22,7 +22,7 @@ using tensorflow::shape_inference::ShapeHandle; namespace tensorflow { namespace signal { -Status WindowShape(InferenceContext* c) { +absl::Status WindowShape(InferenceContext* c) { ShapeHandle out; TF_RETURN_IF_ERROR(c->WithRank(c->input(1), 1, &out)); TF_RETURN_IF_ERROR(c->WithRankAtLeast(c->input(0), 1, &out)); @@ -33,7 +33,7 @@ Status WindowShape(InferenceContext* c) { TF_RETURN_IF_ERROR(c->WithValue(c->Dim(c->input(1), 0), InferenceContext::Value(dim_in), &dim_in)); c->set_output(0, out); - return OkStatus(); + return absl::OkStatus(); } // TODO(b/286250473): change back name to "Window" after name clash resolved @@ -55,4 +55,4 @@ to the output )doc"); } // namespace signal -} // namespace tensorflow \ No newline at end of file +} // namespace tensorflow diff --git a/signal/testdata/fft_test_data.cc b/signal/testdata/fft_test_data.cc index bb24938b9a6..335a343324f 100644 --- a/signal/testdata/fft_test_data.cc +++ b/signal/testdata/fft_test_data.cc @@ -1,4 +1,4 @@ -/* Copyright 2023 The TensorFlow Authors. All Rights Reserved. +/* Copyright 2021 The TensorFlow Authors. All Rights Reserved. Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. @@ -571,4 +571,541 @@ const float kRfftFloatLength512Golden[] = { -5.16014671e+00, 6.53038979e+00, 5.11271954e-02, -3.40430737e-01, -2.98720551e+00, 0.00000000e+00}; +const int16_t kIrfftInt16Length512Input[] = { + 8073, 0, -69, -149, 45, -261, -83, -150, -101, -24, -45, 206, + 159, -84, -11, 1, 107, -34, -116, 253, 124, -132, -56, 112, + -213, -197, 45, 106, 14, -251, -66, 53, 99, -129, -101, -86, + 72, -147, -52, -11, -142, 128, 55, -192, 116, 94, 36, 97, + 242, -401, 89, -163, -177, 93, 194, 147, 49, -104, -198, -141, + -133, -126, 108, -198, 90, 101, -2, 142, 62, 37, 47, -224, + 47, 132, -98, -388, -230, 186, 36, 282, -152, -242, -112, -100, + 303, -22, 63, 9, 43, 108, 38, 106, 254, -275, 25, -44, + -2, 139, 236, -133, 51, -233, 25, -125, -92, 112, 48, 58, + -193, 121, 181, 146, 6, 114, 7, 181, 22, -336, 162, 218, + 56, -256, 3, -205, 325, -276, -166, -173, -25, -64, 74, 95, + 125, -40, 156, 122, 83, -118, -7, -109, -64, -8, 13, 180, + 267, 144, -131, 6, 11, -10, -165, -262, -162, 84, 85, -112, + 171, -160, -62, 133, -15, -58, 128, -198, 393, -6, 224, -297, + -9, 341, -73, 104, -68, -311, -172, -31, -290, -133, -56, -40, + -43, -93, -170, -43, 120, -63, -104, -94, 53, -243, 79, -73, + -8, -139, -56, 49, -103, -87, 188, -322, 78, -85, -50, -179, + -106, -34, -91, -50, 37, -96, -28, -181, -180, 61, 170, 195, + 92, -6, -17, 251, -106, 122, 173, 31, 35, 233, -8, 91, + 226, 139, -82, 215, 17, 146, -169, 60, -94, -119, -40, -69, + 15, 213, 28, -103, 206, -23, 13, 161, 112, -315, 139, -78, + -71, 3, 103, -1, 77, -213, -7, -78, 89, 112, -111, -221, + 58, 61, -71, -38, -84, 1, 34, 42, 167, -77, 96, 107, + 3, -11, 56, -133, -110, 191, -340, -77, 36, 101, 55, 49, + 113, 105, -211, -28, -47, 175, 42, -19, 57, 244, -98, 155, + 66, 165, -200, -25, 49, 96, -228, -52, 24, 154, 190, -225, + 104, 67, -38, 81, -2, 60, 32, 95, -42, -171, -228, 261, + 11, 77, 206, -185, -181, 99, 60, -62, -231, 96, 90, -85, + 284, 94, 90, 73, -49, -141, 100, -110, -83, -107, -127, -198, + 268, 116, -86, 228, -67, 48, 118, -106, 148, 48, -122, -78, + -20, -191, 121, 96, 158, 161, -51, 57, -267, 156, -74, -21, + 277, 194, -157, -141, -365, 160, -274, -79, 234, 29, 38, 278, + -181, 4, -54, 49, 207, -338, 235, 160, 5, 100, 169, 52, + -1, -58, -429, -26, -41, -54, 160, 189, -37, 2, -248, -113, + 120, 69, -32, -256, -269, 66, 48, 279, -225, -295, 147, 191, + 78, -9, 32, -109, 169, -72, 139, 93, -32, 9, -190, -83, + 389, 207, -192, -172, 311, -188, 124, -39, -90, -86, -28, -85, + 0, -138, 90, -71, 107, 133, -175, -32, -162, -135, -51, -123, + -156, 30, 295, -141, -196, 53, 101, -181, -142, 162, 159, -16, + 209, 9, 37, 171, 14, 122, -38, 63, 181, 66, 178, 118, + -73, -56, -436, 80, -69, 140, -68, -32, -105, 67, 98, -192, + 74, 5, 42, -135, 41, -190, 178, -173, 43, 84, -497, -43, + -340, 40, -289, 117, 14, -37, 117, -238, 125, 0}; + +const int16_t kIrfftInt16Length512Golden[] = { + 19, 26, 27, 23, 4, 13, 10, 26, 21, 26, 10, 3, 25, 28, 1, 5, 13, 26, 9, + 21, 7, 32, 11, 14, 19, 12, 29, 17, 6, 6, 3, 19, 15, 21, 20, 13, 20, 28, + 11, 1, 8, 10, 27, 27, 28, 28, 28, 4, 16, 10, 24, 6, 24, 8, 22, 10, 25, + 5, 25, 4, 25, 27, 19, 27, 25, 3, 23, 17, 20, 19, 16, 26, 21, 20, 7, 28, + 27, 9, 27, 13, 5, 3, 14, 20, 19, 21, 19, 10, 20, 19, 22, 25, 29, 28, 28, + 3, 11, 11, 2, 25, 12, 26, 6, 19, 17, 22, 9, 30, 9, 19, 16, 6, 16, 32, + 25, 17, 17, 2, 12, 17, 9, 5, 2, 9, 9, 22, 16, 21, 7, 24, 9, 3, 28, + 25, 1, 23, 3, 14, 12, 7, 4, 3, 1, 27, 14, 25, 21, 5, 27, 22, 15, 14, + 17, 16, 18, 16, 28, 24, 31, 3, 0, 18, 3, 8, 2, 20, 22, 24, 28, 14, 18, + 28, 8, 30, 9, 3, 5, 5, 19, 5, 27, 19, 22, 16, 3, 23, 24, 13, 17, 9, + 17, 7, 25, 29, 9, 29, 19, 22, 23, 29, 17, 12, 0, 7, 8, 30, 25, 7, 20, + 12, 2, 18, 21, 19, 1, 24, 5, 10, 11, 0, 26, 15, 27, 28, 27, 27, 10, 3, + 22, 8, 22, 9, 14, 25, 14, 27, 4, 8, 2, 20, 10, 18, 25, 3, 13, 4, 5, + 4, 19, 27, 9, 20, 20, 11, 6, 19, 29, 12, 21, 25, 16, 13, 28, 14, 27, 16, + 28, 25, 23, 28, 29, 5, 15, 26, 13, 17, 9, 18, 11, 18, 27, 8, 27, 17, 14, + 6, 25, 1, 5, 17, 2, 29, 6, 6, 17, 17, 18, 12, 9, 27, 6, 18, 22, 12, + 16, 24, 24, 8, 10, 4, 30, 30, -1, 19, 11, 24, 5, 19, 23, 7, 19, 21, 25, + 27, 26, 3, 16, 28, 27, 22, 1, 12, 3, 15, 1, 11, 31, 3, 14, 16, 7, 25, + 5, 8, 14, 15, 26, 23, 31, 8, 22, 19, 7, 27, 18, 5, 24, 10, 8, 1, 11, + 10, 31, 2, 27, 31, 14, 12, 24, 24, 23, 3, 27, 8, 28, 7, 3, 31, 4, 17, + 29, 16, 16, 25, 5, 6, -1, 13, 28, 13, 21, 21, 5, 20, 30, 5, 12, 17, 1, + 27, 22, 15, 29, 5, 17, 24, 3, 18, 1, 12, 6, 14, 20, 24, 17, 25, 12, 4, + 31, 2, 24, 2, 2, 2, 6, 16, 30, 18, 6, 8, 29, 17, 23, 17, 13, 5, 23, + 5, 14, 16, 17, 13, 12, 3, 29, 13, 17, 11, 19, 19, 27, 7, 27, 12, 25, 5, + 27, 6, 4, 5, 10, 6, 23, 17, 16, 2, 18, 18, 13, 11, 19, 2, 5, 24, 9, + 28, 2, 1, 3, 22, 23, 27, 6, 19, 6, 0, 16, 11, 30, 19, 14, 13, 28, 22, + 16, 2, 14, 30, 23, 1, 11, 22, 7, 28, 9, 13, 5, 14, 14, 3, 2, 19}; + +const int32_t kIrfftInt32Length512Input[] = { + 839741641, 0, -810680242, 314550700, -188375954, -646098985, + 661169493, 40565493, -727406866, -519447225, -601282802, 610281511, + -564231793, -658060998, 940197365, -556253639, 166244617, 855736360, + -352017590, -472086, -512664325, -29124321, 318599191, -1008807581, + -1045052452, 654632137, 262024233, -650480570, -328981456, -305628084, + -311905089, 643377399, -788794832, -697362958, 195418849, -450270913, + 450274221, -132831079, 954455789, 627087353, 443800117, 581556743, + 721497498, 312576274, 917731184, -119726646, -478746763, -890057048, + 1010851532, -192880372, 311180932, -504532787, 671816327, -655964390, + 791656581, -446266278, -666354330, -196814596, -825177916, 1066606457, + -396384718, 253036626, 1023033179, -536587536, 439268736, 56644895, + 490515745, -918121214, -399642466, 733537340, 550908995, -710689258, + -811550611, -1019377090, 812667220, -627899586, 36487869, -407910896, + -681856276, 176316030, -467027200, -109112451, 95733568, -196757477, + 640458216, 649718695, 947089218, -106860637, -608229423, -264877883, + 1070169157, -864364665, -940770876, 903330942, -858862934, -874506798, + -1024456516, 335288045, -584203446, -681487864, 174713039, 562868290, + -909240239, -303892838, 342417869, 720484621, -262340309, -36238617, + -143340078, 756047229, 977080186, -601657929, -929420577, 909799187, + -483272786, 569161464, 237577775, -170309809, -140492719, 235865688, + -2584061, 738538012, 899139379, -514114503, 92589619, -2186856, + -96582858, 406814228, 938450837, 483109475, -717748004, -97252289, + 456879462, -182572080, 901717931, 460303603, -478409198, 13082064, + 883057018, 191217882, -971905039, -633268629, -161171179, 40177094, + 311234817, 469147409, -35952715, -699453017, -491443043, -266920521, + -84504724, -218798599, -1051333289, -319487510, 464546366, 193917497, + 958715420, 445681947, -779515017, 870946655, 798123864, 375422608, + -49151236, 228702869, -586909787, 956237786, -817062805, 49259295, + 617384758, -568956241, -676306782, -236673390, 593537713, -846550375, + -725553331, -755214027, -960900289, -600629057, -992578727, 860277560, + 91143911, 325074385, -904098818, 523032277, 330057148, -200544851, + -534746367, 134080917, -704766790, -449694111, -871675761, -736092292, + -470441479, 649973251, 184524500, -940746981, -853401135, 292173632, + -351333019, 629238289, -923163881, 55704296, 67245329, 321644032, + -150272289, 1016711520, 685132142, 1036980538, 909268554, 341575246, + -679580804, -613922226, 219169932, 151510727, -31229903, -1023519836, + 488301536, -605189867, -847401037, -711134196, -389183573, 125069877, + 925139073, 740652197, 366040867, 846434837, 1027886779, -846326106, + -966541876, 460961009, -904504464, -667422902, 595819763, 548076246, + 658533296, 831784878, 1018563409, 657002928, 1018623967, -312804064, + -609590993, -632152641, 282353650, 23124988, 842050529, 331536663, + -74218221, 188032000, 912832168, -256904460, -834426583, 989582172, + -606592232, -1011932019, -317445477, -981620621, -509888019, 411955653, + -889686515, -450075112, -369653544, 124736966, 189435676, -107810364, + 881698145, -753413726, 421033515, 373856924, -782249185, 927665742, + 335053664, -291584084, 23011324, -561481341, -659027643, -157231698, + 325630443, -766882742, 445882611, -93371261, 839265875, -82182728, + -1066287105, -151409292, 934991293, 464520547, -125074754, -221788043, + -1053884159, -866025647, -369369912, 106829399, -245123315, 20663469, + -806806422, 552869387, -9053710, -491056522, -686021696, -1038270895, + -125006845, 266429965, 1050001488, -406006535, -365326480, -966938945, + 110681188, -1021078355, 254091936, 121108495, 1027070826, -165470882, + 393915655, 1045537081, 6669269, 919678730, 281326605, -680536894, + 98307878, -230289089, -1060913156, -784329119, -870588988, 886334526, + 185033052, 523745961, -524629656, -233118673, 1015175033, -229774783, + -220068882, 328818397, -728044888, 123650067, -866623422, -371919688, + -140437309, -573584939, -13729139, 477487249, -39276346, 574137018, + 875867641, -873697644, -651969810, -485128371, -438795254, 686955274, + -1025030795, -644894666, -1056623940, 937246325, 627094533, -88519660, + -293430525, 60332339, -783010005, 693207812, -531488433, -439215652, + -652852140, 235512897, 416183897, 602327072, 217920384, -262296603, + 702674563, 323600358, -819073332, 660032637, 859607695, -736547587, + 899965281, 705990533, 700629412, 12532222, 867184362, -460331313, + -244276307, 56341729, 156969129, -322096971, -783707309, 276427177, + 1056930269, -765923003, -949113397, -163145401, -892934786, -442050707, + 137838318, 217340531, 285489000, -39973091, -267480349, -310094927, + -976524654, 895571227, -659112325, -228839076, -242345827, 876628479, + 511335719, -310742223, 1039081559, 512276681, 705312780, 578575064, + -598271655, 762867207, 912370057, 98648217, 503193745, 864713114, + -889332887, -439505808, -544266224, -751632504, 956453792, -692262886, + -91642341, -894188411, 876053978, 199193167, 604157121, -383140183, + -342950535, -943692226, 496727456, -237254976, -50419762, 947862958, + -996090040, 997591942, -413303925, -710352945, -579101581, 177736107, + 672137235, -891910735, -764102135, -963051915, -635556501, -946768907, + -796637162, 733678632, -630011293, 544470901, 736121924, 203332022, + -164480811, 414318422, 1023442078, -800490289, -85638979, 520154045, + -797352137, 1028499935, -405997043, -159904358, -697856807, 339622536, + -745174445, -250872450, -15376813, -81848980, 159907818, -604901490, + 449408277, -199909799, 747859167, -780656655, 495243813, -305468980, + 379086080, -339583326, -234468731, 715773698, -140870306, -297028384, + 796524920, -812985836, 805155911, 551254707, 390593322, -471644369, + -642074940, -688579515, 660561660, 961580918, -205220858, -585147906, + 538115796, 157278134, 585918447, 539709994, -683717882, -169044522, + -555579704, 358607056, -197892766, 903698583, -388508755, 462213245, + 874204812, -712527806, -757899104, 402482925, 786917228, -798119015, + -476098139, 552677178, 300423706, 0}; + +extern const int32_t kIrfftInt32Length512Golden[] = { + -34280174, 8022307, 19780085, 9662521, 43799799, 40100445, + -41211729, -11739138, 58486622, -22044189, -44097652, -79357304, + -72117147, 15744462, 13915851, 28166395, -15520437, 57070719, + -7305489, 27500351, 28306007, -86631061, 67560922, -28806729, + 39817325, 95989071, 897984, 3648535, -30375028, -37041021, + 88781593, -23843719, -57361166, 9011180, 1666315, -17359414, + -35138121, -28118417, 22293673, -5471988, -33560810, -66596308, + -37497959, 28878058, 43901353, 56802879, 14536133, 15417154, + 41299987, 67932041, 63865770, -12329743, -41649482, -55364898, + -9358326, 20864173, -35866369, -8116787, -7470926, 56999653, + 1043351, 49630166, -15330039, 5309138, -21684440, 1281894, + 44467472, -72973186, 50966857, 39752660, -14061082, -21241948, + 7736123, 49012458, -38765179, 32821490, -91494618, 9082358, + 80530972, 34091460, -80566348, -15556794, 44506352, 6554114, + 46780869, 26642366, -45175459, 34982841, -76742845, -63052738, + -12151434, 38025909, -41759399, -67582145, 27092456, -14738429, + 77549391, -13529057, 5800415, 6508249, 32040133, 20067661, + 6739354, -75492401, 49035483, 31075816, -59397318, -13887185, + -37275775, -28354015, 14549583, -60431580, 5713520, 5774464, + 48008598, -6982879, -37770644, 7652162, 36293574, 19866120, + 8689099, -6952246, 2529586, -915507, -45824611, 4908732, + 14144902, -66462468, 5204479, 21154766, -31563680, -63787764, + 19742867, 66808977, -10940667, 944126, 15969624, -24865057, + -16783943, 1317981, -33103183, -30551484, 34187135, 32679187, + 6747414, -18842426, -44799486, 43191326, 4603856, 28972218, + -2997445, -14440478, 55584797, -8586169, 12792009, 32599422, + -44154192, -11805483, -25614802, 101394256, 18403401, -9366015, + -2773809, 27905480, -15595130, 25214356, 18003057, -3796290, + 31428249, 35181313, 11896714, -6089797, 30417284, -51721564, + -23623684, -34906421, 18630637, 50253333, 38298996, -6032889, + 5008328, -77010982, -73036463, 14698362, 38033709, -40956695, + 101755464, 16586257, -66754901, 26886084, 37055264, 49973097, + 38835881, 26088109, 29315128, 23485156, -34186633, 27365092, + -4305884, -16934542, -52437371, 30437566, -40186223, 15784414, + 30092258, 12742434, 72775874, 37936704, 41446581, -47950761, + -40043570, -25891900, 22904631, -19131832, -67828932, -4494550, + 38761919, -51880330, 56229588, -42618597, -38294494, -67942872, + 47777144, -43953283, 38847552, -41095030, -78497360, 45354768, + 8883342, -56316876, -8391759, 12931546, 75368371, 4571884, + 24159982, 30656523, 3463399, 19137413, 58400688, -40423089, + 29534607, -21556205, -7211564, -9219271, 18973775, 45833761, + -7272585, -69115849, -18959925, -29283534, 45159378, 22606489, + 14732883, 4940580, 142802, -6593094, 3439368, -76696477, + -52430789, -89970783, 33615669, -48017657, 28880767, 32409006, + 49693514, -3426601, -9611900, -22412354, -5194037, -7028292, + -45522423, -56836163, 6162975, 11247381, 64776667, 64968977, + -16816131, -7031433, 13389996, -10859561, 20066035, 74140789, + -2595790, 8986611, -21203364, 38877279, 67752097, -42426157, + 41011742, 4298962, -24038197, 12191958, -39762301, -1825759, + 34953189, 62321632, 63258494, 79493646, 6154979, 72713374, + 5751929, -44326151, 27859895, -11950382, 36015249, 21090111, + -20606384, 732179, -7758986, 22196782, -9918264, 3238981, + -24966605, 24182609, -20739710, 13524349, -41468505, -8935286, + 116405313, 48070952, -14947048, 2481984, 19035184, 11853862, + -54909833, 15799738, -38720324, 113225658, 14960079, -1700298, + -10592205, 41679128, -55354432, 7485444, -22756394, 115271330, + -2697232, -81339788, 2763460, -7145026, 18287951, -1434620, + -1147363, 909555, -4734645, 7471510, 4204346, 47711049, + -34546617, -65808093, 48449712, -71939373, 10686041, -23107713, + 45728005, 11280853, -42519241, -397607, -14902756, -27840989, + 45312015, 26630552, 4078644, 60485257, 25279575, 66459123, + 40351793, 45709228, 21003916, 15565038, -54737790, 7196745, + 54625336, 39327144, 12209500, 51127752, 43333991, -25539938, + 29762930, 9115261, 38835591, 8559430, -28015932, -7811926, + -6752397, 18832848, -35364284, 25481402, 21986597, -25030237, + 43594329, -20793566, -7029448, 10848687, -86344081, -27910439, + -18685609, 52777722, -37713487, 51928065, 52133060, -46594790, + 32997528, 23793214, -35724240, -20062160, -15586165, 2879812, + -29894045, -84401847, -9628799, -35762356, 26153072, -16698747, + -55021068, -23994604, 43170951, -17771303, -52726245, 43538460, + -11882492, 3401144, -41231423, -26190758, -18363885, -47011479, + -27288266, 21942417, 47541674, 29862620, -3911136, 57336809, + -6790237, 72562751, 19857054, 3022121, 10589312, 2359890, + 7953397, -4637060, -29962427, -25230703, 45532452, -28510971, + -32665965, -42952368, 48927802, 4331065, -61462593, 45265693, + -38315556, 7308700, 39647849, 38045194, 53011682, -2051408, + 6745501, -433214, -15448637, 13422836, -10231156, 21668412, + 38837116, -100216222, 33292355, 9284671, 72602174, -81000636, + -45456135, -9439562, -56228566, 37764314, -9751637, 43380666, + -29511356, -19398853, 31603418, -68044438, 16906880, 8372405, + -25911348, -37697844, 37349908, -42433050, -19502746, 42059402, + 56373933, -917272, -17142837, 40227000, 8460368, 1281257, + -15998615, 40858839, -42157880, 24580073, -28704215, -2332549, + 3896928, 35426477, -66365675, -42157899, -871713, -4490567, + -1425633, -42870670, -32070326, -54410387, -1836127, 4115382, + 34466160, -13424784}; + +const float kIrfftFloatLength512Input[] = { + -30.237963, 0., -0.8870227, -16.70994, -16.188946, -7.837169, + -11.782056, -9.808029, -9.120637, -16.412685, 2.2228887, 6.0699377, + 0.8525604, -7.8473816, -3.728829, 11.6801, -5.047578, -8.966889, + -8.341611, -12.121087, -2.014905, -1.1014754, 5.350008, -6.0458236, + -7.680677, -0.7340172, -2.153996, -11.128706, 6.927712, 15.9215555, + -3.3578932, -2.3223252, 10.998084, 3.0689626, -7.26421, 1.7153641, + 5.2599764, 14.555982, -3.2174952, 3.122578, 3.540863, -0.07593922, + 9.771112, 12.02481, 2.2330835, -5.610757, 8.759598, -11.710923, + -0.933854, 3.487685, -7.4957643, 3.510248, -5.6749268, -8.372976, + 7.5864186, 8.883432, -3.238809, -2.7004075, 0.13749301, -0.21802016, + 1.5948814, 10.145043, -4.777312, -16.068268, 3.108986, -12.656892, + 8.464096, -1.3835825, 9.3863, 0.08924354, -5.8932724, -0.83083385, + -8.587713, -6.48424, -0.44568732, -5.9496527, 2.3838944, -0.09875517, + 10.801603, 3.9324691, 3.17346, 13.255524, 3.966665, -5.839878, + -3.3029196, 0.43704662, -3.6640527, 16.11661, 16.218079, 1.9934207, + 2.6740897, -3.9145885, 18.744654, -8.516122, 0.2746967, 16.56623, + -10.398819, 6.5646763, 13.364323, -18.21934, -12.42878, 10.884242, + 7.111711, 5.42487, -2.685089, -10.922701, -2.1506147, 3.0457373, + -21.245033, 12.652777, 7.518374, -3.6027377, -12.345767, -10.408322, + -0.45039272, -13.521866, 12.301129, -7.3925786, 3.6123064, -27.47004, + 0.06317914, -15.495375, 14.397725, -2.7989936, 1.4894583, -1.1805921, + 5.011081, -4.577404, -15.920375, -3.943976, -11.309841, -8.585617, + -2.277856, -8.83419, -15.505757, -0.9834521, -5.5173364, -16.236353, + -22.883762, 14.5666, -1.1154052, -4.2594385, 18.848545, 11.113267, + -7.9029818, -7.9614487, -8.73291, 9.273985, 2.7331648, -4.3628683, + 8.791882, -19.129395, 0.40214744, 12.770975, -0.38946837, -11.569212, + -3.261552, -10.017079, 12.063712, -8.527597, 0.8519457, 5.638602, + -0.72109884, 7.438573, -4.971101, -9.484696, 9.00311, 12.11723, + 2.113377, -0.46248603, -20.24738, 0.4090347, -0.6018856, 9.518283, + -17.040405, -2.8654642, 27.70458, -10.235462, -12.795219, -1.599763, + -0.7757828, -9.712138, -18.633558, -21.917002, -7.086175, 1.6413045, + -1.0107656, 9.344263, 12.549336, 8.784375, -4.48422, 3.1921742, + -9.637408, 5.126716, -9.416565, 0.34341943, 9.598896, -4.811465, + 7.468513, -5.018967, -10.314449, -12.268959, 16.794994, 3.6410797, + -30.120907, 2.0361273, 0.28818926, -14.163082, -4.502911, 4.268754, + -1.3731043, -8.439607, 11.7638235, 0.58137584, 8.78262, -4.1122336, + -14.586955, -0.6115846, 5.721377, 3.9643157, 3.9812503, -6.7656417, + -8.288948, 4.735382, 9.98983, 0.2826367, 6.6927032, -7.960859, + -1.972462, -3.5999422, 4.6700177, 7.5491548, -13.338051, 5.372325, + 4.1976047, -2.290505, -2.4447436, 2.6495414, -0.1646954, -14.162679, + -13.492944, -10.066321, 4.8790317, 3.0568714, 25.861683, 7.7827787, + -11.021672, -1.0221997, -12.1969385, 4.14404, 2.7906785, 1.2038546, + -12.202317, 2.6063862, 0.47595504, 3.833782, 5.459406, -1.1311115, + 2.1462953, -9.798795, 0.5092611, -1.7780461, 10.847251, 4.7227964, + 3.6416945, -6.267557, 1.8228995, 7.153564, -8.377606, -1.7705457, + -8.363819, 16.272038, 10.777103, 6.118911, 18.839758, -11.079164, + 8.046534, 6.3055205, -13.714934, 6.8472624, -0.5491477, 10.665407, + 9.522111, -2.2350516, -2.151148, 13.554076, -0.7024065, 24.318254, + 11.075853, -5.6310024, 1.792298, 0.8913419, -3.198663, -3.4226098, + 3.294247, 6.4590416, -3.6266904, -15.735353, 21.041748, -4.4707165, + -10.486909, 7.0881767, 2.338044, 8.936833, 20.289284, -15.834369, + -6.27957, -2.51897, 7.2220874, -13.127552, -13.336818, -1.617663, + 3.2618418, 14.497984, 10.053407, 7.88479, -10.618643, 15.016601, + -8.472935, 26.100227, -6.0125732, 1.8380505, 3.5892081, 2.7994452, + 7.5647445, -6.7973204, -2.226884, -11.008526, 10.130876, 20.727829, + -2.8447893, 1.2786244, 4.782782, 8.294365, -1.3854954, 0.8973222, + 5.240392, -2.2681866, 0.06863499, 7.980012, -5.467104, -13.789669, + 4.4801216, 0.5506569, -2.4283018, -1.9574064, 9.8034, -7.8538375, + -1.3211169, -6.0888257, -8.8751, 0.18552934, 0.32884428, 4.6578417, + -17.806314, 4.283429, 2.8675492, 5.5130286, 3.292803, 8.645252, + 2.487952, 6.951849, -4.8785276, 11.139262, 10.62336, 1.9926109, + -12.535589, -14.7336235, -1.9293493, 12.427337, 3.9960144, -0.23418498, + -0.2729544, 7.5376987, -9.694464, 3.6965284, -6.12123, -0.21720974, + 6.542181, 16.833439, -0.5240893, -3.9085722, 2.2004304, -19.755825, + 4.3837647, 1.4074985, 5.2781444, 3.5348754, -12.352237, -1.0562972, + -3.0493338, -11.641172, 19.914968, 3.4238126, 0.04836255, -15.394404, + -8.4243555, 13.519465, 24.039753, 1.8650069, 1.4495964, -12.118086, + -2.9114485, -3.8421676, 9.094493, 10.687737, 10.193869, 12.889424, + 1.5644586, 13.14207, 8.611605, -16.365925, 7.35201, -12.98773, + -4.5951047, -9.6856575, 7.538099, 18.039642, -4.5050077, -0.17203662, + 7.9955363, -3.8898702, -20.541094, -8.317646, 9.275174, -6.687724, + 13.356739, -8.14045, 2.304177, 4.748354, 1.789078, 2.6287572, + 5.2520447, -4.5307264, 0.7956459, -5.7576833, -8.241041, 1.2976674, + 0.9751119, -1.9878949, -0.28155094, 15.819839, -13.472967, -1.2520658, + -6.4520116, -8.267247, 14.64433, 1.4102879, -2.906583, 6.742353, + -5.8997235, -21.359184, 10.482071, -6.8071265, -4.318058, 0.61369, + 5.1886477, 0.20163095, -4.9314857, -6.725615, -6.1213217, -3.3096085, + -5.5520887, -21.814417, -0.45328552, -5.7754235, 9.9567795, -6.1869125, + -1.71723, 11.038221, -4.8242254, 7.3808165, 2.751095, 15.0145, + 3.2840607, 1.0826187, -2.1640933, -2.6346276, -12.111668, -1.9144021, + 13.436047, 9.108638, 14.142464, -5.0205646, 0.3507184, 7.511046, + 11.356239, -14.513865, 5.2194257, -0.9989464, -5.064783, -1.1407975, + 2.210063, -4.8482714, -11.814263, 6.240137, 23.587172, 12.757468, + -6.7067065, 20.231493, 8.244574, 13.610065, -0.93495035, -26.816973, + 7.8878307, -10.253602, -1.8279042, -7.402954, 4.1620474, 11.353777, + 14.372957, 8.991498, 4.456106, 0.}; + +const float kIrfftFloatLength512Golden[] = { + 0.29982752, -0.5864359, 0.8097371, 0.6964922, -0.56286454, + -0.65686744, -0.67477304, -0.68269503, -0.039257757, -0.8854418, + 0.8713772, -0.13664988, 0.07726803, -0.2444637, -0.31025574, + -0.40032005, -0.6156, -0.5220901, 0.67751265, 0.701784, + 0.12668014, 0.74386156, 0.20080262, -0.5919206, -0.11498103, + 0.84945625, -0.30784115, 0.47987163, 0.8023895, -0.83630186, + -0.27433932, -0.2790887, 0.26171675, -0.5266397, 0.69136655, + 0.38648325, 0.49699098, -0.15258673, -0.6069461, -0.96661866, + -0.71334004, 0.19891304, 0.02852476, 0.5386789, -0.7791545, + 0.4138639, -0.35693097, 0.49731624, 0.9427581, 0.03335625, + -0.4437948, 0.6297895, -0.08721806, 0.50569916, 0.18831411, + -0.08267924, -0.45396042, 0.8013768, -0.2406537, -0.29676312, + 0.21887437, -0.72747946, -0.2328647, 0.40174174, 0.91527927, + 0.28560296, -0.74036396, 0.27275884, 0.7930955, -0.59212196, + 0.74610806, 0.6267803, 0.30080235, -0.35472846, 0.7505894, + -0.22452751, 0.41838533, -0.6592845, 0.92467225, -0.65168047, + 0.8869605, -0.33984357, 0.60530996, 0.01847987, -0.90627366, + -0.88029194, 0.23383015, 0.19796571, 0.61636496, -0.42383894, + -0.04506364, 0.0021984428, 0.9393822, 0.7032502, 0.06971514, + -0.21672109, -0.48139384, -0.07382688, 0.65374225, -0.7207461, + 0.801925, 0.07295595, 0.16882896, -0.3152274, 0.35198689, + 0.98689663, -0.6578901, -0.95485187, 0.88367236, -0.36234283, + 0.3330723, -0.79546535, -0.64958787, 0.23861185, -0.09088708, + 0.45268735, 0.30978698, 0.3372508, 0.29718608, -0.8574079, + -0.6692782, 0.98987687, -0.8064702, -0.2624596, 0.28159115, + 0.16167887, -0.8035746, -0.17733437, 0.30414638, -0.77497596, + 0.8938695, 0.8159308, 0.32125, -0.920919, -0.6462177, + 0.6036651, 0.15566209, -0.03488487, 0.73239374, 0.9670948, + 0.47241682, 0.21456543, -0.02800268, -0.22529918, -0.84989154, + -0.7086431, 0.33786222, 0.6490326, -0.8384799, 0.985114, + -0.4101424, 0.52377456, 0.5921588, -0.66899574, -0.11188361, + -0.9822213, -0.84196454, 0.14232291, -0.49214405, 0.8419358, + 0.8897716, -0.8406141, -0.8259247, -0.6908918, 0.8471942, + 0.06140751, -0.23359495, 0.7398038, 0.40779343, 0.6734735, + -0.6377568, -0.7646455, 0.5685402, 0.04187375, -0.73311013, + -0.00512114, -0.5814765, 0.5903504, 0.09171531, -0.60802567, + 0.61080986, -0.16245437, -0.25096723, 0.6190915, -0.7142277, + -0.1600728, 0.8851507, 0.22585803, 0.9604035, 0.32320845, + 0.14618382, -0.774544, 0.54813004, 0.69257534, -0.61747944, + -0.6833777, 0.8879458, -0.02535102, -0.21824743, -0.12814242, + 0.95946985, -0.31030154, 0.8825775, 0.7767198, -0.8839602, + -0.5129694, 0.02521834, -0.27255273, -0.55642307, 0.7032525, + -0.30640435, 0.5164734, -0.7563333, 0.42620268, -0.15225667, + 0.41989022, -0.6318542, -0.941096, -0.9745337, 0.9156874, + -0.3268398, 0.6331186, 0.6416864, -0.20460913, -0.76695883, + -0.38052237, -0.41319558, 0.798747, -0.6520171, -0.09593515, + -0.8481627, -0.83059865, -0.85499376, 0.80575806, 0.34889475, + -0.8980855, -0.6174965, -0.5200427, -0.6607752, 0.3307703, + -0.24054858, -0.5670337, -0.16636248, 0.23672634, 0.62885606, + 0.10166144, -0.53354776, -0.56768334, -0.7641135, 0.11442101, + 0.96315515, 0.37573636, -0.67078173, 0.70057774, -0.24168986, + -0.44688666, 0.16544406, -0.88838553, 0.76889133, -0.8447937, + 0.62266135, -0.48719126, 0.32826036, -0.02810439, -0.05936452, + -0.8128048, -0.3129079, -0.34860566, 0.6167735, -0.5991243, + -0.3610302, -0.08637966, -0.16619909, 0.63647974, -0.09442633, + -0.5581384, -0.9811046, -0.904086, 0.61326873, 0.7258482, + 0.7736206, 0.8774062, -0.5009506, 0.1289413, 0.16396654, + 0.6514738, -0.28721195, 0.2236729, 0.45609203, 0.8358965, + 0.61307096, -0.84580654, 0.01509422, 0.7686187, -0.3121697, + 0.19145107, -0.55658674, -0.829964, -0.97910535, 0.13230643, + -0.07734007, -0.9984317, 0.81237555, 0.9640697, -0.840312, + -0.540831, -0.5787534, -0.96849644, 0.3838457, -0.12022042, + 0.05475989, -0.8009591, -0.94171214, -0.9488497, 0.9111394, + 0.7597446, 0.16971052, 0.45299768, -0.45736474, 0.5264327, + 0.21037662, -0.6699108, -0.9796896, 0.16691494, 0.45532888, + 0.01740798, 0.04102591, 0.03489509, -0.72302973, -0.52516407, + -0.47465584, -0.63662803, -0.8904173, 0.57405806, 0.23835278, + 0.360155, -0.55155194, 0.4478792, -0.0044243336, 0.46832967, + 0.08410877, -0.8455095, -0.7263971, -0.3361347, 0.97136617, + -0.44052413, -0.5978838, -0.25358623, -0.79874134, 0.23619038, + -0.6827136, -0.29381457, -0.8461834, -0.12056583, -0.13932818, + 0.61767286, 0.8580666, 0.26251537, 0.5273094, -0.48985696, + 0.88013256, -0.18916833, -0.7348702, 0.7566979, -0.7458619, + 0.58176005, -0.78658605, 0.08736417, 0.7823663, -0.1001814, + -0.22595143, -0.19375435, -0.83492655, -0.929291, 0.12090784, + -0.5844915, 0.13852149, 0.02745366, 0.92406005, -0.71191204, + -0.8899623, 0.24317105, 0.32496578, -0.98552567, -0.04174611, + -0.35492605, 0.07938504, -0.8233126, 0.3008612, 0.71700615, + 0.8915528, -0.54795, -0.16213202, -0.829112, -0.8182144, + 0.6964699, 0.011892006, 0.03424544, -0.42607868, -0.46533448, + -0.20733991, 0.1982342, 0.6151171, 0.92954826, 0.37436712, + -0.15674892, 0.09278384, -0.23512769, 0.3352527, -0.0011435747, + -0.6721728, 0.8579683, -0.9896936, 0.42651695, -0.3006128, + 0.6946872, -0.48047653, -0.6129973, 0.9572243, 0.7357151, + 0.43445703, 0.7255762, -0.8190884, 0.9089385, 0.09615627, + -0.16862187, -0.07288834, -0.49054247, -0.9336225, 0.01190421, + 0.09707063, 0.45260486, -0.8358802, 0.79366636, -0.26704386, + 0.95498896, -0.02372235, 0.319471, -0.976424, -0.79633594, + 0.66040134, -0.54345655, -0.66035926, -0.17302102, 0.37924033, + 0.1720984, 0.56285584, -0.8860738, -0.5180471, -0.0959073, + 0.25083944, -0.82610506, -0.29493344, 0.9925902, -0.5277084, + -0.4763626, -0.47927397, -0.36397856, -0.8729958, -0.52953005, + 0.00983614, 0.01398262, 0.9101522, -0.88808525, -0.664688, + -0.73161906, -0.0665354, -0.6483232, 0.63685423, 0.2964374, + 0.09099909, -0.23442702, 0.12380677, -0.5504641, 0.16258523, + -0.541228, -0.3874128, -0.6291951, 0.58925194, 0.8938799, + -0.3282175, 0.02617104, -0.6951823, -0.5767481, -0.48668635, + 0.08838533, 0.7840834, -0.5293897, 0.18453985, 0.26299042, + -0.8357302, -0.97141516, -0.29869047, -0.8049868, -0.6597811, + -0.6783894, -0.742311, 0.35633904, -0.18960063, 0.37161434, + -0.21649535, -0.639291, -0.38328207, -0.88131666, -0.1099996, + -0.20276618, -0.59563124, -0.45079857, 0.8347442, -0.60460365, + -0.5660544, -0.82730365}; + +const int16_t kFftAutoScaleLength512Input[] = { + 27728, 28180, -23037, -999, 7627, 19097, 4809, -28251, 25421, + 21584, 5775, -514, 31389, -13221, 28700, 3928, -32678, 9413, + 21553, -11903, 19367, 18168, 3923, 13968, -19808, -8946, 9707, + 4996, -2033, 26133, 8465, -29982, 145, 4190, -27992, 18817, + -7428, 24828, 23734, -1823, -24872, -14335, 12368, 24986, 26849, + 14131, -21011, 26386, 22377, -11729, 18306, -9073, 14716, -14157, + 17728, -6062, 21593, -5285, -20960, -5598, -10791, 2156, -1026, + 12427, 8910, 12726, 26340, 20350, -28668, 5619, 29298, 10801, + 11427, -5965, 6491, -21680, 1107, 9373, 14507, -16623, -17628, + -3940, 3115, -28103, 8256, 23231, 13811, -27905, 20126, 7647, + -31981, 4270, -2447, -24855, -15303, 29773, 22757, 13823, -2623, + -18288, -29900, -32254, 2751, 21161, 13991, -4786, 2118, -5267, + -13969, -7855, 17443, -4976, 26066, 29508, -22460, -27974, 28652, + -24058, 25214, 18340, 15171, 3101, 28870, 11533, -3481, 3047, + 6802, 23452, 15039, -32103, -29734, -9678, 11514, 29903, -30983, + -30655, -19125, 18490, 14868, 18846, -3849, 18431, 31245, 12547, + 24499, -15543, 8004, 30226, -17948, -27011, -10040, -21747, -4077, + 29493, 25322, -10680, -23061, 16320, -262, 19493, -29407, -30065, + -29158, -14538, 17318, 5900, 22016, -8523, -3482, 8226, 6269, + -9888, -18543, 11548, 32126, 716, 12042, -2344, 28403, 16777, + 18536, 19821, -13224, -32000, 18846, 29756, 15152, -25000, -2854, + 2894, 5817, -20644, -14967, -22120, 19426, 24206, -10895, 9359, + 22642, 15413, -544, 11456, 29289, -29467, -7007, 1946, -12714, + 29003, -9754, -11247, -8743, -26529, 12775, 17819, -4068, 10532, + 7749, -15982, -3309, -15015, -14728, -5061, 4456, -23045, 4875, + 18613, -26598, 20189, -2459, -1949, -27655, 10714, -25641, -31826, + -14901, 26440, -29853, -21380, -9872, 7330, -24977, 28143, 22335, + -24296, 1775, 19950, -31505, -23314, -7708, 8747, -14274, 30659, + -31703, -16215, -7103, -7876, 25772, 773, 28262, 16517, 26455, + -15645, 18958, -1342, 30649, 6825, 8075, -13666, 16635, 31946, + -22845, -27888, 11845, 7597, -20615, -27995, 11419, -2343, -6894, + 7419, 30308, 15120, 24538, -25659, -26220, 25970, -11688, 26728, + -27865, -8426, 24771, 30570, 27041, -20003, 13894, 16227, -32113, + -4925, -7249, -27491, 743, 11549, -18304, 6082, -27239, 22277, + -914, 5237, -30772, -6916, 15278, -28297, 9274, 14611, 23071, + -9831, 1675, -31961, -17243, 16597, -21968, 12045, -16939, 9563, + 7989, 1251, 22767, 28480, 31961, 31297, 30398, -2645, 1837, + -15697, -19268, -15887, 29292, -10900, 812, -10870, -2759, 20450, + -20981, 28539, -30402, -17263, -19693, -32710, 6172, -30003, 27373, + 24939, -28543, -8928, 1198, -326, -3504, -23640, 24945, -24141, + 17787, 20449, -7981, -10926, -26171, -20678, 4107, -32513, 8184, + -12479, 16854, -18552, -21534, -8804, -30278, 18573, -16409, 14746, + -17123, 24656, 25243, 4516, 19254, -2165, 24230, -7639, -19385, + -15505, -15386, 21841, -12507, 9168, 4469, -2649, 21013, 23788, + 21282, 27991, -31716, 22753}; + +const int16_t kFftAutoScaleLength512Golden[] = { + 27728, 28180, -23037, -999, 7627, 19097, 4809, -28251, 25421, + 21584, 5775, -514, 31389, -13221, 28700, 3928, -32678, 9413, + 21553, -11903, 19367, 18168, 3923, 13968, -19808, -8946, 9707, + 4996, -2033, 26133, 8465, -29982, 145, 4190, -27992, 18817, + -7428, 24828, 23734, -1823, -24872, -14335, 12368, 24986, 26849, + 14131, -21011, 26386, 22377, -11729, 18306, -9073, 14716, -14157, + 17728, -6062, 21593, -5285, -20960, -5598, -10791, 2156, -1026, + 12427, 8910, 12726, 26340, 20350, -28668, 5619, 29298, 10801, + 11427, -5965, 6491, -21680, 1107, 9373, 14507, -16623, -17628, + -3940, 3115, -28103, 8256, 23231, 13811, -27905, 20126, 7647, + -31981, 4270, -2447, -24855, -15303, 29773, 22757, 13823, -2623, + -18288, -29900, -32254, 2751, 21161, 13991, -4786, 2118, -5267, + -13969, -7855, 17443, -4976, 26066, 29508, -22460, -27974, 28652, + -24058, 25214, 18340, 15171, 3101, 28870, 11533, -3481, 3047, + 6802, 23452, 15039, -32103, -29734, -9678, 11514, 29903, -30983, + -30655, -19125, 18490, 14868, 18846, -3849, 18431, 31245, 12547, + 24499, -15543, 8004, 30226, -17948, -27011, -10040, -21747, -4077, + 29493, 25322, -10680, -23061, 16320, -262, 19493, -29407, -30065, + -29158, -14538, 17318, 5900, 22016, -8523, -3482, 8226, 6269, + -9888, -18543, 11548, 32126, 716, 12042, -2344, 28403, 16777, + 18536, 19821, -13224, -32000, 18846, 29756, 15152, -25000, -2854, + 2894, 5817, -20644, -14967, -22120, 19426, 24206, -10895, 9359, + 22642, 15413, -544, 11456, 29289, -29467, -7007, 1946, -12714, + 29003, -9754, -11247, -8743, -26529, 12775, 17819, -4068, 10532, + 7749, -15982, -3309, -15015, -14728, -5061, 4456, -23045, 4875, + 18613, -26598, 20189, -2459, -1949, -27655, 10714, -25641, -31826, + -14901, 26440, -29853, -21380, -9872, 7330, -24977, 28143, 22335, + -24296, 1775, 19950, -31505, -23314, -7708, 8747, -14274, 30659, + -31703, -16215, -7103, -7876, 25772, 773, 28262, 16517, 26455, + -15645, 18958, -1342, 30649, 6825, 8075, -13666, 16635, 31946, + -22845, -27888, 11845, 7597, -20615, -27995, 11419, -2343, -6894, + 7419, 30308, 15120, 24538, -25659, -26220, 25970, -11688, 26728, + -27865, -8426, 24771, 30570, 27041, -20003, 13894, 16227, -32113, + -4925, -7249, -27491, 743, 11549, -18304, 6082, -27239, 22277, + -914, 5237, -30772, -6916, 15278, -28297, 9274, 14611, 23071, + -9831, 1675, -31961, -17243, 16597, -21968, 12045, -16939, 9563, + 7989, 1251, 22767, 28480, 31961, 31297, 30398, -2645, 1837, + -15697, -19268, -15887, 29292, -10900, 812, -10870, -2759, 20450, + -20981, 28539, -30402, -17263, -19693, -32710, 6172, -30003, 27373, + 24939, -28543, -8928, 1198, -326, -3504, -23640, 24945, -24141, + 17787, 20449, -7981, -10926, -26171, -20678, 4107, -32513, 8184, + -12479, 16854, -18552, -21534, -8804, -30278, 18573, -16409, 14746, + -17123, 24656, 25243, 4516, 19254, -2165, 24230, -7639, -19385, + -15505, -15386, 21841, -12507, 9168, 4469, -2649, 21013, 23788, + 21282, 27991, -31716, 22753}; + } // namespace tflite diff --git a/signal/testdata/fft_test_data.h b/signal/testdata/fft_test_data.h index 5d720c20515..35a0b46aa14 100644 --- a/signal/testdata/fft_test_data.h +++ b/signal/testdata/fft_test_data.h @@ -1,4 +1,4 @@ -/* Copyright 2023 The TensorFlow Authors. All Rights Reserved. +/* Copyright 2021 The TensorFlow Authors. All Rights Reserved. Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. @@ -31,6 +31,18 @@ extern const int32_t kRfftInt32Length512Golden[]; extern const float kRfftFloatLength512Input[]; extern const float kRfftFloatLength512Golden[]; +extern const int16_t kIrfftInt16Length512Input[]; +extern const int16_t kIrfftInt16Length512Golden[]; + +extern const int32_t kIrfftInt32Length512Input[]; +extern const int32_t kIrfftInt32Length512Golden[]; + +extern const float kIrfftFloatLength512Input[]; +extern const float kIrfftFloatLength512Golden[]; + +extern const int16_t kFftAutoScaleLength512Input[]; +extern const int16_t kFftAutoScaleLength512Golden[]; + } // namespace tflite #endif // SIGNAL_TESTDATA_FFT_TEST_DATA_H_ diff --git a/tensorflow/extra_rules.bzl b/tensorflow/extra_rules.bzl index 9e20edadefe..4a111dc9c53 100644 --- a/tensorflow/extra_rules.bzl +++ b/tensorflow/extra_rules.bzl @@ -17,6 +17,10 @@ def xtensa_fusion_f1_config(): """Config setting for all Fusion F1 based cores.""" return "//tensorflow/lite/micro/kernels:xtensa_fusion_f1_default" +def xtensa_hifi_3_config(): + """Config setting for all HiFi 3 based cores.""" + return "//tensorflow/lite/micro/kernels:xtensa_hifi_3_default" + def xtensa_hifi_3z_config(): """Config setting for all HiFi 3z based cores.""" return "//tensorflow/lite/micro/kernels:xtensa_hifi_3z_default" diff --git a/tensorflow/lite/build_def.bzl b/tensorflow/lite/build_def.bzl index e8fc49c9114..e65ec230bdf 100644 --- a/tensorflow/lite/build_def.bzl +++ b/tensorflow/lite/build_def.bzl @@ -3,6 +3,7 @@ def tflite_copts(): copts = [ "-DFARMHASH_NO_CXX_STRING", "-Wno-sign-compare", + "-Wno-unused-parameter", "-fno-exceptions", # Exceptions are unused in TFLite. ] return copts diff --git a/tensorflow/lite/builtin_ops.h b/tensorflow/lite/builtin_ops.h index f9871add248..5dba0f6e2e8 100644 --- a/tensorflow/lite/builtin_ops.h +++ b/tensorflow/lite/builtin_ops.h @@ -189,6 +189,51 @@ typedef enum { kTfLiteBuiltinBitcast = 159, kTfLiteBuiltinBitwiseXor = 160, kTfLiteBuiltinRightShift = 161, + kTfLiteBuiltinStablehloLogistic = 162, + kTfLiteBuiltinStablehloAdd = 163, + kTfLiteBuiltinStablehloDivide = 164, + kTfLiteBuiltinStablehloMultiply = 165, + kTfLiteBuiltinStablehloMaximum = 166, + kTfLiteBuiltinStablehloReshape = 167, + kTfLiteBuiltinStablehloClamp = 168, + kTfLiteBuiltinStablehloConcatenate = 169, + kTfLiteBuiltinStablehloBroadcastInDim = 170, + kTfLiteBuiltinStablehloConvolution = 171, + kTfLiteBuiltinStablehloSlice = 172, + kTfLiteBuiltinStablehloCustomCall = 173, + kTfLiteBuiltinStablehloReduce = 174, + kTfLiteBuiltinStablehloAbs = 175, + kTfLiteBuiltinStablehloAnd = 176, + kTfLiteBuiltinStablehloCosine = 177, + kTfLiteBuiltinStablehloExponential = 178, + kTfLiteBuiltinStablehloFloor = 179, + kTfLiteBuiltinStablehloLog = 180, + kTfLiteBuiltinStablehloMinimum = 181, + kTfLiteBuiltinStablehloNegate = 182, + kTfLiteBuiltinStablehloOr = 183, + kTfLiteBuiltinStablehloPower = 184, + kTfLiteBuiltinStablehloRemainder = 185, + kTfLiteBuiltinStablehloRsqrt = 186, + kTfLiteBuiltinStablehloSelect = 187, + kTfLiteBuiltinStablehloSubtract = 188, + kTfLiteBuiltinStablehloTanh = 189, + kTfLiteBuiltinStablehloScatter = 190, + kTfLiteBuiltinStablehloCompare = 191, + kTfLiteBuiltinStablehloConvert = 192, + kTfLiteBuiltinStablehloDynamicSlice = 193, + kTfLiteBuiltinStablehloDynamicUpdateSlice = 194, + kTfLiteBuiltinStablehloPad = 195, + kTfLiteBuiltinStablehloIota = 196, + kTfLiteBuiltinStablehloDotGeneral = 197, + kTfLiteBuiltinStablehloReduceWindow = 198, + kTfLiteBuiltinStablehloSort = 199, + kTfLiteBuiltinStablehloWhile = 200, + kTfLiteBuiltinStablehloGather = 201, + kTfLiteBuiltinStablehloTranspose = 202, + kTfLiteBuiltinDilate = 203, + kTfLiteBuiltinStablehloRngBitGenerator = 204, + kTfLiteBuiltinReduceWindow = 205, + kTfLiteBuiltinStablehloComposite = 206, } TfLiteBuiltinOperator; #ifdef __cplusplus diff --git a/tensorflow/lite/c/builtin_op_data.h b/tensorflow/lite/c/builtin_op_data.h index 7628e5ad1f9..0606819288b 100644 --- a/tensorflow/lite/c/builtin_op_data.h +++ b/tensorflow/lite/c/builtin_op_data.h @@ -15,6 +15,9 @@ limitations under the License. #ifndef TENSORFLOW_LITE_C_BUILTIN_OP_DATA_H_ #define TENSORFLOW_LITE_C_BUILTIN_OP_DATA_H_ +/// For documentation, see +/// third_party/tensorflow/lite/core/c/builtin_op_data.h + #include "tensorflow/lite/core/c/builtin_op_data.h" #endif // TENSORFLOW_LITE_C_BUILTIN_OP_DATA_H_ diff --git a/tensorflow/lite/c/c_api_types.h b/tensorflow/lite/c/c_api_types.h index cdbf1fd3237..05cda07e759 100644 --- a/tensorflow/lite/c/c_api_types.h +++ b/tensorflow/lite/c/c_api_types.h @@ -15,6 +15,12 @@ limitations under the License. #ifndef TENSORFLOW_LITE_C_C_API_TYPES_H_ #define TENSORFLOW_LITE_C_C_API_TYPES_H_ +/// \file +/// +/// C API types for TensorFlow Lite. +/// +/// For documentation, see tensorflow/lite/core/c/c_api_types.h + #include "tensorflow/lite/core/c/c_api_types.h" #endif // TENSORFLOW_LITE_C_C_API_TYPES_H_ diff --git a/tensorflow/lite/c/common.h b/tensorflow/lite/c/common.h index e3e8001cbd7..8a8b51331c4 100644 --- a/tensorflow/lite/c/common.h +++ b/tensorflow/lite/c/common.h @@ -13,25 +13,17 @@ See the License for the specific language governing permissions and limitations under the License. ==============================================================================*/ -// This file defines common C types and APIs for implementing operations, -// delegates and other constructs in TensorFlow Lite. The actual operations and -// delegates can be defined using C++, but the interface between the interpreter -// and the operations are C. -// -// Summary of abstractions -// TF_LITE_ENSURE - Self-sufficient error checking -// TfLiteStatus - Status reporting -// TfLiteIntArray - stores tensor shapes (dims), -// TfLiteContext - allows an op to access the tensors -// TfLiteTensor - tensor (a multidimensional array) -// TfLiteNode - a single node or operation -// TfLiteRegistration - the implementation of a conceptual operation. -// TfLiteDelegate - allows delegation of nodes to alternative backends. -// -// Some abstractions in this file are created and managed by Interpreter. -// -// NOTE: The order of values in these structs are "semi-ABI stable". New values -// should be added only to the end of structs and never reordered. +/// \file +/// +/// This file defines common C types and APIs for implementing operations, +/// delegates and other constructs in TensorFlow Lite. The actual operations and +/// delegates can be defined using C++, but the interface between the +/// interpreter and the operations are C. +/// +/// For documentation, see tensorflow/lite/core/c/common.h. +/// +/// See also c_api_opaque.h which has more ABI-stable variants of some of these +/// APIs. #ifndef TENSORFLOW_LITE_C_COMMON_H_ #define TENSORFLOW_LITE_C_COMMON_H_ diff --git a/tensorflow/lite/core/api/BUILD b/tensorflow/lite/core/api/BUILD index ce6cd1ae395..5457f2ee7f8 100644 --- a/tensorflow/lite/core/api/BUILD +++ b/tensorflow/lite/core/api/BUILD @@ -15,14 +15,12 @@ cc_library( hdrs = [ "error_reporter.h", "flatbuffer_conversions.h", - "op_resolver.h", "tensor_utils.h", ], copts = tflite_copts() + micro_copts(), visibility = ["//visibility:public"], deps = [ ":error_reporter", - ":op_resolver", "//tensorflow/lite/c:common", "//tensorflow/lite/kernels/internal:compatibility", "//tensorflow/lite/schema:schema_fbs", @@ -31,29 +29,10 @@ cc_library( ], ) -# We define separate targets for "op_resolver" and "error_reporter", -# even though those headers are also exported by the "api" target, -# so that targets which only want to depend on these small abstract base -# class modules can express more fine-grained dependencies without -# pulling in tensor_utils and flatbuffer_conversions. - -cc_library( - name = "op_resolver", - srcs = ["op_resolver.cc"], - hdrs = ["op_resolver.h"], - copts = tflite_copts() + micro_copts(), - visibility = [ - "//visibility:public", - ], - deps = [ - ":error_reporter", - "//tensorflow/lite/c:common", - "//tensorflow/lite/schema:schema_fbs", - "//tensorflow/lite/schema:schema_utils", - "@flatbuffers//:runtime_cc", - ], -) - +# We define separate targets for "error_reporter", even though those headers are +# also exported by the "api" target, so that targets which only want to depend +# on these small abstract base class modules can express more fine-grained +# dependencies without pulling in tensor_utils and flatbuffer_conversions. cc_library( name = "error_reporter", srcs = ["error_reporter.cc"], diff --git a/tensorflow/lite/core/api/error_reporter.h b/tensorflow/lite/core/api/error_reporter.h index 99ab8cf3657..1e0ef7dc913 100644 --- a/tensorflow/lite/core/api/error_reporter.h +++ b/tensorflow/lite/core/api/error_reporter.h @@ -61,9 +61,9 @@ class ErrorReporter { // reduce binary size, define TF_LITE_STRIP_ERROR_STRINGS when compiling and // every call will be stubbed out, taking no memory. #ifndef TF_LITE_STRIP_ERROR_STRINGS -#define TF_LITE_REPORT_ERROR(reporter, ...) \ - do { \ - static_cast(reporter)->Report(__VA_ARGS__); \ +#define TF_LITE_REPORT_ERROR(reporter, ...) \ + do { \ + static_cast<::tflite::ErrorReporter*>(reporter)->Report(__VA_ARGS__); \ } while (false) #else // TF_LITE_STRIP_ERROR_STRINGS #define TF_LITE_REPORT_ERROR(reporter, ...) diff --git a/tensorflow/lite/core/api/flatbuffer_conversions.cc b/tensorflow/lite/core/api/flatbuffer_conversions.cc index 9f955df1a6d..35268103be8 100644 --- a/tensorflow/lite/core/api/flatbuffer_conversions.cc +++ b/tensorflow/lite/core/api/flatbuffer_conversions.cc @@ -15,11 +15,13 @@ limitations under the License. #include "tensorflow/lite/core/api/flatbuffer_conversions.h" +#include #include #include #include #include "flatbuffers/flatbuffers.h" // from @flatbuffers +#include "flatbuffers/vector.h" // from @flatbuffers #include "tensorflow/lite/core/api/error_reporter.h" #include "tensorflow/lite/core/c/builtin_op_data.h" #include "tensorflow/lite/core/c/common.h" @@ -76,9 +78,10 @@ void CheckParsePointerParams(const Operator* op, ErrorReporter* error_reporter, // Copies the contents from the flatbuffer int vector `flatbuffer` into the // int array `buffer`. `flat_vector` and `buffer` represent the same // configuration operation for a given operation. -TfLiteStatus FlatBufferIntVectorToArray( - int max_size_of_buffer, const flatbuffers::Vector* flat_vector, - int* buffer, ErrorReporter* error_reporter, const char* op_name) { +template +static TfLiteStatus FlatBufferIntVectorToArray( + int max_size_of_buffer, const flatbuffers::Vector* flat_vector, + DataType* buffer, ErrorReporter* error_reporter, const char* op_name) { if (!flat_vector) { TF_LITE_REPORT_ERROR(error_reporter, "Input array not provided for operation '%s'.\n", @@ -86,7 +89,7 @@ TfLiteStatus FlatBufferIntVectorToArray( return kTfLiteError; } else { size_t num_dimensions = flat_vector->size(); - if (num_dimensions > max_size_of_buffer / sizeof(int)) { + if (num_dimensions > max_size_of_buffer / sizeof(DataType)) { TF_LITE_REPORT_ERROR( error_reporter, "Found too many dimensions in the input array of operation '%s'.\n", @@ -142,6 +145,18 @@ TfLiteMirrorPaddingMode ConvertMirrorPadding(MirrorPadMode padding) { return kTfLiteMirrorPaddingUnknown; } +TfLiteRngAlgorithm ConvertRngAlgorithm(RngAlgorithm algorithm) { + switch (algorithm) { + case RngAlgorithm_THREEFRY: + return kTfLiteRngAlgorithmThreefry; + case RngAlgorithm_PHILOX: + return kTfLiteRngAlgorithmPhilox; + case RngAlgorithm_DEFAULT: + return kTfLiteRngAlgorithmDefault; + } + return kTfLiteRngAlgorithmUnknown; +} + #ifndef TF_LITE_STATIC_MEMORY TfLiteStatus ParseOpDataTfLite(const Operator* op, BuiltinOperator op_type, ErrorReporter* error_reporter, @@ -857,6 +872,97 @@ TfLiteStatus ParseOpDataTfLite(const Operator* op, BuiltinOperator op_type, *builtin_data = params.release(); return kTfLiteOk; } + case BuiltinOperator_STABLEHLO_SCATTER: { + return ParseStablehloScatter(op, error_reporter, allocator, builtin_data); + } + case BuiltinOperator_STABLEHLO_RNG_BIT_GENERATOR: { + return ParseStablehloRngBitGenerator(op, error_reporter, allocator, + builtin_data); + } + case BuiltinOperator_STABLEHLO_GATHER: { + return ParseStablehloGather(op, error_reporter, allocator, builtin_data); + } + case BuiltinOperator_STABLEHLO_REDUCE_WINDOW: { + return ParseStablehloReduceWindow(op, error_reporter, allocator, + builtin_data); + } + case BuiltinOperator_REDUCE_WINDOW: { + auto params = safe_allocator.Allocate(); + TF_LITE_ENSURE(error_reporter, params != nullptr); + if (const auto* reduce_params = + op->builtin_options_2_as_ReduceWindowOptions()) { + switch (reduce_params->reduce_function()) { + case ReduceWindowFunction_ADD: + params->reduce_function = TfLiteReduceWindowFunctionAdd; + break; + case ReduceWindowFunction_MUL: + params->reduce_function = TfLiteReduceWindowFunctionMul; + break; + case ReduceWindowFunction_MINIMUM: + params->reduce_function = TfLiteReduceWindowFunctionMin; + break; + case ReduceWindowFunction_MAXIMUM: + params->reduce_function = TfLiteReduceWindowFunctionMax; + break; + case ReduceWindowFunction_ALL: + params->reduce_function = TfLiteReduceWindowFunctionAll; + break; + case ReduceWindowFunction_ANY: + params->reduce_function = TfLiteReduceWindowFunctionAny; + break; + case ReduceWindowFunction_UNSUPPORTED: + default: + return kTfLiteError; + } + } + *builtin_data = params.release(); + return kTfLiteOk; + } + case BuiltinOperator_STABLEHLO_PAD: { + return ParseStablehloPad(op, error_reporter, allocator, builtin_data); + } + case BuiltinOperator_STABLEHLO_COMPOSITE: { + return ParseStablehloComposite(op, error_reporter, allocator, + builtin_data); + } + // TODO: skip param parsing for now since ops below don't have kernels + case BuiltinOperator_STABLEHLO_SLICE: + case BuiltinOperator_STABLEHLO_BROADCAST_IN_DIM: + case BuiltinOperator_STABLEHLO_CONVOLUTION: + case BuiltinOperator_STABLEHLO_LOGISTIC: + case BuiltinOperator_STABLEHLO_ADD: + case BuiltinOperator_STABLEHLO_DIVIDE: + case BuiltinOperator_STABLEHLO_MULTIPLY: + case BuiltinOperator_STABLEHLO_MAXIMUM: + case BuiltinOperator_STABLEHLO_RESHAPE: + case BuiltinOperator_STABLEHLO_CLAMP: + case BuiltinOperator_STABLEHLO_CONCATENATE: + case BuiltinOperator_STABLEHLO_CUSTOM_CALL: + case BuiltinOperator_STABLEHLO_REDUCE: + case BuiltinOperator_STABLEHLO_ABS: + case BuiltinOperator_STABLEHLO_AND: + case BuiltinOperator_STABLEHLO_COSINE: + case BuiltinOperator_STABLEHLO_EXPONENTIAL: + case BuiltinOperator_STABLEHLO_FLOOR: + case BuiltinOperator_STABLEHLO_LOG: + case BuiltinOperator_STABLEHLO_MINIMUM: + case BuiltinOperator_STABLEHLO_NEGATE: + case BuiltinOperator_STABLEHLO_OR: + case BuiltinOperator_STABLEHLO_POWER: + case BuiltinOperator_STABLEHLO_REMAINDER: + case BuiltinOperator_STABLEHLO_RSQRT: + case BuiltinOperator_STABLEHLO_SELECT: + case BuiltinOperator_STABLEHLO_SUBTRACT: + case BuiltinOperator_STABLEHLO_TANH: + case BuiltinOperator_STABLEHLO_DYNAMIC_SLICE: + case BuiltinOperator_STABLEHLO_DYNAMIC_UPDATE_SLICE: + case BuiltinOperator_STABLEHLO_IOTA: + case BuiltinOperator_STABLEHLO_COMPARE: + case BuiltinOperator_STABLEHLO_CONVERT: + case BuiltinOperator_STABLEHLO_DOT_GENERAL: + case BuiltinOperator_STABLEHLO_SORT: + case BuiltinOperator_STABLEHLO_WHILE: + case BuiltinOperator_STABLEHLO_TRANSPOSE: // Below are the ops with no builtin_data structure. // TODO(aselle): Implement call in BuiltinOptions, but nullptrs are @@ -899,6 +1005,7 @@ TfLiteStatus ParseOpDataTfLite(const Operator* op, BuiltinOperator op_type, case BuiltinOperator_SIGN: case BuiltinOperator_BITCAST: case BuiltinOperator_WHERE: + case BuiltinOperator_DILATE: return kTfLiteOk; case BuiltinOperator_PLACEHOLDER_FOR_GREATER_OP_CODES: return kTfLiteError; @@ -914,6 +1021,9 @@ TfLiteStatus ConvertTensorType(TensorType tensor_type, TfLiteType* type, case TensorType_FLOAT16: *type = kTfLiteFloat16; return kTfLiteOk; + case TensorType_BFLOAT16: + *type = kTfLiteBFloat16; + return kTfLiteOk; case TensorType_FLOAT32: *type = kTfLiteFloat32; return kTfLiteOk; @@ -999,7 +1109,7 @@ TfLiteStatus ParseAdd(const Operator* op, ErrorReporter* error_reporter, } else { // TODO(b/157480169): We should either return kTfLiteError or fill in some // reasonable defaults in the params struct. We are not doing so until we - // better undertand the ramifications of changing the legacy behavior. + // better understand the ramifications of changing the legacy behavior. } *builtin_data = params.release(); @@ -1029,7 +1139,7 @@ TfLiteStatus ParseArgMax(const Operator* op, ErrorReporter* error_reporter, } else { // TODO(b/157480169): We should either return kTfLiteError or fill in some // reasonable defaults in the params struct. We are not doing so until we - // better undertand the ramifications of changing the legacy behavior. + // better understand the ramifications of changing the legacy behavior. } *builtin_data = params.release(); @@ -1054,7 +1164,7 @@ TfLiteStatus ParseArgMin(const Operator* op, ErrorReporter* error_reporter, } else { // TODO(b/157480169): We should either return kTfLiteError or fill in some // reasonable defaults in the params struct. We are not doing so until we - // better undertand the ramifications of changing the legacy behavior. + // better understand the ramifications of changing the legacy behavior. } *builtin_data = params.release(); @@ -1134,7 +1244,7 @@ TfLiteStatus ParseCallOnce(const Operator* op, ErrorReporter* error_reporter, } else { // TODO(b/157480169): We should either return kTfLiteError or fill in some // reasonable defaults in the params struct. We are not doing so until we - // better undertand the ramifications of changing the legacy behavior. + // better understand the ramifications of changing the legacy behavior. } *builtin_data = params.release(); @@ -1192,7 +1302,7 @@ TfLiteStatus ParseConcatenation(const Operator* op, } else { // TODO(b/157480169): We should either return kTfLiteError or fill in some // reasonable defaults in the params struct. We are not doing so until we - // better undertand the ramifications of changing the legacy behavior. + // better understand the ramifications of changing the legacy behavior. } *builtin_data = params.release(); @@ -1220,10 +1330,13 @@ TfLiteStatus ParseConv2D(const Operator* op, ErrorReporter* error_reporter, params->dilation_width_factor = schema_params->dilation_w_factor(); params->dilation_height_factor = schema_params->dilation_h_factor(); + TF_LITE_ENSURE_STATUS( + ConvertTensorType(schema_params->quantized_bias_type(), + ¶ms->quantized_bias_type, error_reporter)); } else { // TODO(b/157480169): We should either return kTfLiteError or fill in some // reasonable defaults in the params struct. We are not doing so until we - // better undertand the ramifications of changing the legacy behavior. + // better understand the ramifications of changing the legacy behavior. } *builtin_data = params.release(); @@ -1274,7 +1387,7 @@ TfLiteStatus ParseDepthToSpace(const Operator* op, } else { // TODO(b/157480169): We should either return kTfLiteError or fill in some // reasonable defaults in the params struct. We are not doing so until we - // better undertand the ramifications of changing the legacy behavior. + // better understand the ramifications of changing the legacy behavior. } *builtin_data = params.release(); @@ -1310,7 +1423,7 @@ TfLiteStatus ParseDepthwiseConv2D(const Operator* op, } else { // TODO(b/157480169): We should either return kTfLiteError or fill in some // reasonable defaults in the params struct. We are not doing so until we - // better undertand the ramifications of changing the legacy behavior. + // better understand the ramifications of changing the legacy behavior. } *builtin_data = params.release(); @@ -1434,7 +1547,9 @@ TfLiteStatus ParseFullyConnected(const Operator* op, params->keep_num_dims = schema_params->keep_num_dims(); params->asymmetric_quantize_inputs = schema_params->asymmetric_quantize_inputs(); - + TF_LITE_ENSURE_STATUS( + ConvertTensorType(schema_params->quantized_bias_type(), + ¶ms->quantized_bias_type, error_reporter)); switch (schema_params->weights_format()) { case FullyConnectedOptionsWeightsFormat_DEFAULT: params->weights_format = kTfLiteFullyConnectedWeightsFormatDefault; @@ -1451,7 +1566,7 @@ TfLiteStatus ParseFullyConnected(const Operator* op, } else { // TODO(b/157480169): We should either return kTfLiteError or fill in some // reasonable defaults in the params struct. We are not doing so until we - // better undertand the ramifications of changing the legacy behavior. + // better understand the ramifications of changing the legacy behavior. } *builtin_data = params.release(); @@ -1528,7 +1643,7 @@ TfLiteStatus ParseIf(const Operator* op, ErrorReporter* error_reporter, } else { // TODO(b/157480169): We should either return kTfLiteError or fill in some // reasonable defaults in the params struct. We are not doing so until we - // better undertand the ramifications of changing the legacy behavior. + // better understand the ramifications of changing the legacy behavior. } *builtin_data = params.release(); @@ -1555,7 +1670,7 @@ TfLiteStatus ParseL2Normalization(const Operator* op, } else { // TODO(b/157480169): We should either return kTfLiteError or fill in some // reasonable defaults in the params struct. We are not doing so until we - // better undertand the ramifications of changing the legacy behavior. + // better understand the ramifications of changing the legacy behavior. } *builtin_data = params.release(); @@ -1711,7 +1826,7 @@ TfLiteStatus ParseMirrorPad(const Operator* op, ErrorReporter* error_reporter, } else { // TODO(b/157480169): We should either return kTfLiteError or fill in some // reasonable defaults in the params struct. We are not doing so until we - // better undertand the ramifications of changing the legacy behavior. + // better understand the ramifications of changing the legacy behavior. } *builtin_data = params.release(); @@ -1735,7 +1850,7 @@ TfLiteStatus ParseMul(const Operator* op, ErrorReporter* error_reporter, } else { // TODO(b/157480169): We should either return kTfLiteError or fill in some // reasonable defaults in the params struct. We are not doing so until we - // better undertand the ramifications of changing the legacy behavior. + // better understand the ramifications of changing the legacy behavior. } *builtin_data = params.release(); @@ -1776,7 +1891,7 @@ TfLiteStatus ParsePack(const Operator* op, ErrorReporter* error_reporter, } else { // TODO(b/157480169): We should either return kTfLiteError or fill in some // reasonable defaults in the params struct. We are not doing so until we - // better undertand the ramifications of changing the legacy behavior. + // better understand the ramifications of changing the legacy behavior. } *builtin_data = params.release(); @@ -1822,7 +1937,7 @@ TfLiteStatus ParsePool(const Operator* op, ErrorReporter* error_reporter, } else { // TODO(b/157480169): We should either return kTfLiteError or fill in some // reasonable defaults in the params struct. We are not doing so until we - // better undertand the ramifications of changing the legacy behavior. + // better understand the ramifications of changing the legacy behavior. } *builtin_data = params.release(); @@ -1880,7 +1995,7 @@ TfLiteStatus ParseReducer(const Operator* op, ErrorReporter* error_reporter, } else { // TODO(b/157480169): We should either return kTfLiteError or fill in some // reasonable defaults in the params struct. We are not doing so until we - // better undertand the ramifications of changing the legacy behavior. + // better understand the ramifications of changing the legacy behavior. } *builtin_data = params.release(); @@ -1933,7 +2048,7 @@ TfLiteStatus ParseReshape(const Operator* op, ErrorReporter* error_reporter, } else { // TODO(b/157480169): We should either return kTfLiteError or fill in some // reasonable defaults in the params struct. We are not doing so until we - // better undertand the ramifications of changing the legacy behavior. + // better understand the ramifications of changing the legacy behavior. } *builtin_data = params.release(); @@ -1994,6 +2109,307 @@ TfLiteStatus ParseResizeNearestNeighbor(const Operator* op, return kTfLiteOk; } +TfLiteStatus ParseStablehloReduceWindow(const Operator* op, + ErrorReporter* error_reporter, + BuiltinDataAllocator* allocator, + void** builtin_data) { + CheckParsePointerParams(op, error_reporter, allocator, builtin_data); + + SafeBuiltinDataAllocator safe_allocator(allocator); + auto params = safe_allocator.Allocate(); + + const StablehloReduceWindowOptions* schema_params = + op->builtin_options_2_as_StablehloReduceWindowOptions(); + if (schema_params) { + if (!schema_params->window_dimensions() || + schema_params->window_dimensions()->size() == 0) { + TF_LITE_REPORT_ERROR(error_reporter, + "'window_dimensions' attribute is not optional for " + "'stablehlo.reduce_window' and cannot be empty."); + return kTfLiteError; + } + + const size_t rank = schema_params->window_dimensions()->size(); + + auto LoadAttr = [&error_reporter]( + int64_t* params_array, size_t params_array_size_bytes, + const flatbuffers::Vector* flatbuffer_vector, + const char* attr_name, const size_t expected_size, + const int64_t fill_value) -> TfLiteStatus { + if (flatbuffer_vector && flatbuffer_vector->size()) { + if (expected_size != 0 && flatbuffer_vector->size() != expected_size) { + TF_LITE_REPORT_ERROR( + error_reporter, + "'%s' attribute of 'stablehlo.reduce_window' does not have the " + "expected size (%llu != %llu).", + attr_name, flatbuffer_vector->size(), expected_size); + return kTfLiteError; + } + TfLiteStatus status = FlatBufferIntVectorToArray( + params_array_size_bytes, flatbuffer_vector, params_array, + error_reporter, "stablehlo.reduce_window"); + if (status != kTfLiteOk) { + TF_LITE_REPORT_ERROR(error_reporter, "Check the '%s' attribute.", + attr_name); + return status; + } + } else { + std::fill_n(params_array, params_array_size_bytes / sizeof(int64_t), + fill_value); + } + return kTfLiteOk; + }; + + TF_LITE_ENSURE_STATUS( + LoadAttr(params->window_dimensions, sizeof(params->window_dimensions), + schema_params->window_dimensions(), "window_dimensions", + /*expected_size=*/rank, /*fill_value=*/1)); + TF_LITE_ENSURE_STATUS( + LoadAttr(params->window_strides, sizeof(params->window_strides), + schema_params->window_strides(), "window_strides", + /*expected_size=*/rank, /*fill_value=*/1)); + TF_LITE_ENSURE_STATUS( + LoadAttr(params->base_dilations, sizeof(params->base_dilations), + schema_params->base_dilations(), "base_dilations", + /*expected_size=*/rank, /*fill_value=*/1)); + TF_LITE_ENSURE_STATUS( + LoadAttr(params->window_dilations, sizeof(params->window_dilations), + schema_params->window_dilations(), "window_dilations", + /*expected_size=*/rank, /*fill_value=*/1)); + TF_LITE_ENSURE_STATUS(LoadAttr(params->padding, sizeof(params->padding), + schema_params->padding(), "padding", + /*expected_size=*/2 * rank, + /*fill_value=*/0)); + + params->body_subgraph_index = schema_params->body_subgraph_index(); + *builtin_data = params.release(); + return kTfLiteOk; + } + TF_LITE_REPORT_ERROR( + error_reporter, + "Could not get 'stablehlo.reduce_window' operation parameters."); + return kTfLiteError; +} + +TfLiteStatus ParseStablehloScatter(const Operator* op, + ErrorReporter* error_reporter, + BuiltinDataAllocator* allocator, + void** builtin_data) { + CheckParsePointerParams(op, error_reporter, allocator, builtin_data); + + SafeBuiltinDataAllocator safe_allocator(allocator); + std::unique_ptr + params = safe_allocator.Allocate(); + TF_LITE_ENSURE(error_reporter, params != nullptr); + + const StablehloScatterOptions* schema_params = + op->builtin_options_2_as_StablehloScatterOptions(); + if (schema_params) { + params->indices_are_sorted = schema_params->indices_are_sorted(); + + if (schema_params->update_window_dims()) { + TF_LITE_ENSURE_STATUS(FlatBufferIntVectorToArray( + schema_params->update_window_dims()->size() * sizeof(int64_t), + schema_params->update_window_dims(), params->update_window_dims, + error_reporter, "stablehlo_scatter")); + params->num_update_window_dims = + schema_params->update_window_dims()->size(); + } + + if (schema_params->inserted_window_dims()) { + TF_LITE_ENSURE_STATUS(FlatBufferIntVectorToArray( + schema_params->inserted_window_dims()->size() * sizeof(int64_t), + schema_params->inserted_window_dims(), params->inserted_window_dims, + error_reporter, "stablehlo_scatter")); + params->num_inserted_window_dims = + schema_params->inserted_window_dims()->size(); + } + + if (schema_params->scatter_dims_to_operand_dims()) { + TF_LITE_ENSURE_STATUS(FlatBufferIntVectorToArray( + schema_params->scatter_dims_to_operand_dims()->size() * + sizeof(int64_t), + schema_params->scatter_dims_to_operand_dims(), + params->scatter_dims_to_operand_dims, error_reporter, + "stablehlo_scatter")); + params->num_scatter_dims_to_operand_dims = + schema_params->scatter_dims_to_operand_dims()->size(); + } + + params->index_vector_dim = schema_params->index_vector_dim(); + params->unique_indices = schema_params->unique_indices(); + params->update_computation_subgraph_index = + schema_params->update_computation_subgraph_index(); + } else { + // TODO(b/157480169): We should either return kTfLiteError or fill in some + // reasonable defaults in the params struct. We are not doing so until we + // better understand the ramifications of changing the legacy behavior. + } + *builtin_data = params.release(); + return kTfLiteOk; +} + +TfLiteStatus ParseStablehloRngBitGenerator(const Operator* op, + ErrorReporter* error_reporter, + BuiltinDataAllocator* allocator, + void** builtin_data) { + CheckParsePointerParams(op, error_reporter, allocator, builtin_data); + + SafeBuiltinDataAllocator safe_allocator(allocator); + std::unique_ptr + params = safe_allocator.Allocate(); + TF_LITE_ENSURE(error_reporter, params != nullptr); + + const StablehloRngBitGeneratorOptions* schema_params = + op->builtin_options_2_as_StablehloRngBitGeneratorOptions(); + if (schema_params != nullptr) { + params->algorithm = ConvertRngAlgorithm(schema_params->algorithm()); + } else { + // TODO(b/157480169): We should either return kTfLiteError or fill in some + // reasonable defaults in the params struct. We are not doing so until we + // better understand the ramifications of changing the legacy behavior. + } + + *builtin_data = params.release(); + return kTfLiteOk; +} + +TfLiteStatus ParseStablehloGather(const Operator* op, + ErrorReporter* error_reporter, + BuiltinDataAllocator* allocator, + void** builtin_data) { + CheckParsePointerParams(op, error_reporter, allocator, builtin_data); + + SafeBuiltinDataAllocator safe_allocator(allocator); + std::unique_ptr + params = safe_allocator.Allocate(); + TF_LITE_ENSURE(error_reporter, params != nullptr); + + const StablehloGatherOptions* schema_params = + op->builtin_options_2_as_StablehloGatherOptions(); + + if (schema_params != nullptr) { + TF_LITE_ENSURE_STATUS(FlatBufferIntVectorToArray( + /*max_size_of_buffer=*/schema_params->offset_dims()->size() * + sizeof(int64_t), + /*flat_vector=*/schema_params->offset_dims(), + /*buffer=*/params->offset_dims, /*error_reporter=*/error_reporter, + /*op_name=*/"stablehlo_gather")); + params->num_offset_dims = schema_params->offset_dims()->size(); + + TF_LITE_ENSURE_STATUS(FlatBufferIntVectorToArray( + schema_params->collapsed_slice_dims()->size() * sizeof(int64_t), + schema_params->collapsed_slice_dims(), params->collapsed_slice_dims, + error_reporter, "stablehlo_gather")); + params->num_collapsed_slice_dims = + schema_params->collapsed_slice_dims()->size(); + + TF_LITE_ENSURE_STATUS(FlatBufferIntVectorToArray( + schema_params->start_index_map()->size() * sizeof(int64_t), + schema_params->start_index_map(), params->start_index_map, + error_reporter, "stablehlo_gather")); + params->num_start_index_map = schema_params->start_index_map()->size(); + + params->index_vector_dim = schema_params->index_vector_dim(); + + TF_LITE_ENSURE_STATUS(FlatBufferIntVectorToArray( + schema_params->slice_sizes()->size() * sizeof(int64_t), + schema_params->slice_sizes(), params->slice_sizes, error_reporter, + "stablehlo_gather")); + params->num_slice_sizes = schema_params->slice_sizes()->size(); + + params->indices_are_sorted = schema_params->indices_are_sorted(); + } else { + // TODO(b/157480169): We should either return kTfLiteError or fill in some + // reasonable defaults in the params struct. We are not doing so until we + // better understand the ramifications of changing the legacy behavior. + } + + *builtin_data = params.release(); + return kTfLiteOk; +} + +TfLiteStatus ParseStablehloPad(const Operator* op, + ErrorReporter* error_reporter, + BuiltinDataAllocator* allocator, + void** builtin_data) { + CheckParsePointerParams(op, error_reporter, allocator, builtin_data); + + SafeBuiltinDataAllocator safe_allocator(allocator); + auto params = safe_allocator.Allocate(); + const StablehloPadOptions* schema_params = + op->builtin_options_2_as_StablehloPadOptions(); + + if (schema_params) { + auto LoadAttr = + [&error_reporter]( + int64_t* params_array, const size_t params_array_size_bytes, + const flatbuffers::Vector* const flatbuffer_vector, + const char* const attr_name) -> TfLiteStatus { + TfLiteStatus status = FlatBufferIntVectorToArray( + params_array_size_bytes, flatbuffer_vector, params_array, + error_reporter, "stablehlo.pad"); + if (status != kTfLiteOk) { + TF_LITE_REPORT_ERROR(error_reporter, "Check the '%s' attribute.", + attr_name); + } + return status; + }; + + TF_LITE_ENSURE_STATUS( + LoadAttr(params->edge_padding_low, sizeof(params->edge_padding_low), + schema_params->edge_padding_low(), "edge_padding_low")); + TF_LITE_ENSURE_STATUS( + LoadAttr(params->edge_padding_high, sizeof(params->edge_padding_high), + schema_params->edge_padding_high(), "edge_padding_high")); + TF_LITE_ENSURE_STATUS( + LoadAttr(params->interior_padding, sizeof(params->interior_padding), + schema_params->interior_padding(), "interior_padding")); + if (schema_params->edge_padding_low()->size() != + schema_params->edge_padding_high()->size() || + schema_params->edge_padding_low()->size() != + schema_params->interior_padding()->size()) { + TF_LITE_REPORT_ERROR(error_reporter, + "'stablehlo.pad' operation parameter array sizes " + "are not consistent."); + return kTfLiteError; + } + *builtin_data = params.release(); + return kTfLiteOk; + } + TF_LITE_REPORT_ERROR(error_reporter, + "Could not get 'stablehlo.pad' operation parameters."); + return kTfLiteError; +} + +TfLiteStatus ParseStablehloComposite(const Operator* op, + ErrorReporter* error_reporter, + BuiltinDataAllocator* allocator, + void** builtin_data) { + CheckParsePointerParams(op, error_reporter, allocator, builtin_data); + + SafeBuiltinDataAllocator safe_allocator(allocator); + auto params = safe_allocator.Allocate(); + const StableHLOCompositeOptions* schema_params = + op->builtin_options_2_as_StableHLOCompositeOptions(); + if (schema_params) { + params->name = schema_params->name()->c_str(); + params->version = schema_params->version(); + params->subgraph_index = schema_params->decomposition_subgraph_index(); + params->attributes = schema_params->composite_attributes()->data(); + params->attributes_size = schema_params->composite_attributes()->size(); + *builtin_data = params.release(); + return kTfLiteOk; + } + TF_LITE_REPORT_ERROR( + error_reporter, + "Could not get 'stablehlo.composite' operation parameters."); + return kTfLiteError; +} + // We have this parse function instead of directly returning kTfLiteOk from the // switch-case in ParseOpData because this function is used as part of the // selective registration for the OpResolver implementation in micro. @@ -2034,7 +2450,7 @@ TfLiteStatus ParseShape(const Operator* op, ErrorReporter* error_reporter, } else { // TODO(b/157480169): We should either return kTfLiteError or fill in some // reasonable defaults in the params struct. We are not doing so until we - // better undertand the ramifications of changing the legacy behavior. + // better understand the ramifications of changing the legacy behavior. } *builtin_data = params.release(); @@ -2075,7 +2491,7 @@ TfLiteStatus ParseSoftmax(const Operator* op, ErrorReporter* error_reporter, } else { // TODO(b/157480169): We should either return kTfLiteError or fill in some // reasonable defaults in the params struct. We are not doing so until we - // better undertand the ramifications of changing the legacy behavior. + // better understand the ramifications of changing the legacy behavior. } *builtin_data = params.release(); @@ -2108,7 +2524,7 @@ TfLiteStatus ParseSpaceToDepth(const Operator* op, } else { // TODO(b/157480169): We should either return kTfLiteError or fill in some // reasonable defaults in the params struct. We are not doing so until we - // better undertand the ramifications of changing the legacy behavior. + // better understand the ramifications of changing the legacy behavior. } *builtin_data = params.release(); @@ -2132,7 +2548,7 @@ TfLiteStatus ParseSplit(const Operator* op, ErrorReporter* error_reporter, } else { // TODO(b/157480169): We should either return kTfLiteError or fill in some // reasonable defaults in the params struct. We are not doing so until we - // better undertand the ramifications of changing the legacy behavior. + // better understand the ramifications of changing the legacy behavior. } *builtin_data = params.release(); @@ -2156,7 +2572,7 @@ TfLiteStatus ParseSplitV(const Operator* op, ErrorReporter* error_reporter, } else { // TODO(b/157480169): We should either return kTfLiteError or fill in some // reasonable defaults in the params struct. We are not doing so until we - // better undertand the ramifications of changing the legacy behavior. + // better understand the ramifications of changing the legacy behavior. } *builtin_data = params.release(); @@ -2214,7 +2630,7 @@ TfLiteStatus ParseSqueeze(const Operator* op, ErrorReporter* error_reporter, } else { // TODO(b/157480169): We should either return kTfLiteError or fill in some // reasonable defaults in the params struct. We are not doing so until we - // better undertand the ramifications of changing the legacy behavior. + // better understand the ramifications of changing the legacy behavior. } *builtin_data = params.release(); @@ -2270,7 +2686,7 @@ TfLiteStatus ParseStridedSlice(const Operator* op, } else { // TODO(b/157480169): We should either return kTfLiteError or fill in some // reasonable defaults in the params struct. We are not doing so until we - // better undertand the ramifications of changing the legacy behavior. + // better understand the ramifications of changing the legacy behavior. } *builtin_data = params.release(); @@ -2295,7 +2711,7 @@ TfLiteStatus ParseSub(const Operator* op, ErrorReporter* error_reporter, } else { // TODO(b/157480169): We should either return kTfLiteError or fill in some // reasonable defaults in the params struct. We are not doing so until we - // better undertand the ramifications of changing the legacy behavior. + // better understand the ramifications of changing the legacy behavior. } *builtin_data = params.release(); @@ -2322,7 +2738,7 @@ TfLiteStatus ParseSvdf(const Operator* op, ErrorReporter* error_reporter, } else { // TODO(b/157480169): We should either return kTfLiteError or fill in some // reasonable defaults in the params struct. We are not doing so until we - // better undertand the ramifications of changing the legacy behavior. + // better understand the ramifications of changing the legacy behavior. } *builtin_data = params.release(); @@ -2365,10 +2781,13 @@ TfLiteStatus ParseTransposeConv(const Operator* op, params->activation = ConvertActivation(transpose_conv_params->fused_activation_function()); + TF_LITE_ENSURE_STATUS( + ConvertTensorType(transpose_conv_params->quantized_bias_type(), + ¶ms->quantized_bias_type, error_reporter)); } else { // TODO(b/157480169): We should either return kTfLiteError or fill in some // reasonable defaults in the params struct. We are not doing so until we - // better undertand the ramifications of changing the legacy behavior. + // better understand the ramifications of changing the legacy behavior. } *builtin_data = params.release(); return kTfLiteOk; @@ -2392,7 +2811,7 @@ TfLiteStatus ParseUnpack(const Operator* op, ErrorReporter* error_reporter, } else { // TODO(b/157480169): We should either return kTfLiteError or fill in some // reasonable defaults in the params struct. We are not doing so until we - // better undertand the ramifications of changing the legacy behavior. + // better understand the ramifications of changing the legacy behavior. } *builtin_data = params.release(); @@ -2423,7 +2842,7 @@ TfLiteStatus ParseVarHandle(const Operator* op, ErrorReporter* error_reporter, } else { // TODO(b/157480169): We should either return kTfLiteError or fill in some // reasonable defaults in the params struct. We are not doing so until we - // better undertand the ramifications of changing the legacy behavior. + // better understand the ramifications of changing the legacy behavior. } *builtin_data = params.release(); @@ -2448,7 +2867,7 @@ TfLiteStatus ParseWhile(const Operator* op, ErrorReporter* error_reporter, } else { // TODO(b/157480169): We should either return kTfLiteError or fill in some // reasonable defaults in the params struct. We are not doing so until we - // better undertand the ramifications of changing the legacy behavior. + // better understand the ramifications of changing the legacy behavior. } *builtin_data = params.release(); diff --git a/tensorflow/lite/core/api/flatbuffer_conversions.h b/tensorflow/lite/core/api/flatbuffer_conversions.h index 9ffe3971c14..c01e8875813 100644 --- a/tensorflow/lite/core/api/flatbuffer_conversions.h +++ b/tensorflow/lite/core/api/flatbuffer_conversions.h @@ -420,6 +420,36 @@ TfLiteStatus ParseRightShift(const Operator* op, ErrorReporter* error_reporter, BuiltinDataAllocator* allocator, void** builtin_data); +TfLiteStatus ParseStablehloScatter(const Operator* op, + ErrorReporter* error_reporter, + BuiltinDataAllocator* allocator, + void** builtin_data); + +TfLiteStatus ParseStablehloRngBitGenerator(const Operator* op, + ErrorReporter* error_reporter, + BuiltinDataAllocator* allocator, + void** builtin_data); + +TfLiteStatus ParseStablehloGather(const Operator* op, + ErrorReporter* error_reporter, + BuiltinDataAllocator* allocator, + void** builtin_data); + +TfLiteStatus ParseStablehloReduceWindow(const Operator* op, + ErrorReporter* error_reporter, + BuiltinDataAllocator* allocator, + void** builtin_data); + +TfLiteStatus ParseStablehloPad(const Operator* op, + ErrorReporter* error_reporter, + BuiltinDataAllocator* allocator, + void** builtin_data); + +TfLiteStatus ParseStablehloComposite(const Operator* op, + ErrorReporter* error_reporter, + BuiltinDataAllocator* allocator, + void** builtin_data); + } // namespace tflite #endif // TENSORFLOW_LITE_CORE_API_FLATBUFFER_CONVERSIONS_H_ diff --git a/tensorflow/lite/core/api/op_resolver.cc b/tensorflow/lite/core/api/op_resolver.cc deleted file mode 100644 index ce5ae4f406e..00000000000 --- a/tensorflow/lite/core/api/op_resolver.cc +++ /dev/null @@ -1,68 +0,0 @@ -/* Copyright 2018 The TensorFlow Authors. All Rights Reserved. - -Licensed under the Apache License, Version 2.0 (the "License"); -you may not use this file except in compliance with the License. -You may obtain a copy of the License at - - http://www.apache.org/licenses/LICENSE-2.0 - -Unless required by applicable law or agreed to in writing, software -distributed under the License is distributed on an "AS IS" BASIS, -WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -See the License for the specific language governing permissions and -limitations under the License. -==============================================================================*/ - -#include "tensorflow/lite/core/api/op_resolver.h" - -#include "flatbuffers/flatbuffers.h" // from @flatbuffers -#include "tensorflow/lite/core/api/error_reporter.h" -#include "tensorflow/lite/core/c/common.h" -#include "tensorflow/lite/schema/schema_utils.h" - -namespace tflite { - -TfLiteStatus GetRegistrationFromOpCode( - const OperatorCode* opcode, const OpResolver& op_resolver, - ErrorReporter* error_reporter, const TfLiteRegistration** registration) { - TfLiteStatus status = kTfLiteOk; - *registration = nullptr; - auto builtin_code = GetBuiltinCode(opcode); - int version = opcode->version(); - - if (builtin_code > BuiltinOperator_MAX) { - TF_LITE_REPORT_ERROR( - error_reporter, - "Op builtin_code out of range: %d. Are you using old TFLite binary " - "with newer model?", - builtin_code); - status = kTfLiteError; - } else if (builtin_code != BuiltinOperator_CUSTOM) { - *registration = op_resolver.FindOp(builtin_code, version); - if (*registration == nullptr) { - TF_LITE_REPORT_ERROR( - error_reporter, - "Didn't find op for builtin opcode '%s' version '%d'. " - "An older version of this builtin might be supported. " - "Are you using an old TFLite binary with a newer model?\n", - EnumNameBuiltinOperator(builtin_code), version); - status = kTfLiteError; - } - } else if (!opcode->custom_code()) { - TF_LITE_REPORT_ERROR( - error_reporter, - "Operator with CUSTOM builtin_code has no custom_code.\n"); - status = kTfLiteError; - } else { - const char* name = opcode->custom_code()->c_str(); - *registration = op_resolver.FindOp(name, version); - if (*registration == nullptr) { - // Do not report error for unresolved custom op, we do the final check - // while preparing ops. - status = kTfLiteError; - } - } - return status; -} - -} // namespace tflite diff --git a/tensorflow/lite/core/api/op_resolver.h b/tensorflow/lite/core/api/op_resolver.h deleted file mode 100644 index e8a4e32771a..00000000000 --- a/tensorflow/lite/core/api/op_resolver.h +++ /dev/null @@ -1,136 +0,0 @@ -/* Copyright 2018 The TensorFlow Authors. All Rights Reserved. - -Licensed under the Apache License, Version 2.0 (the "License"); -you may not use this file except in compliance with the License. -You may obtain a copy of the License at - - http://www.apache.org/licenses/LICENSE-2.0 - -Unless required by applicable law or agreed to in writing, software -distributed under the License is distributed on an "AS IS" BASIS, -WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -See the License for the specific language governing permissions and -limitations under the License. -==============================================================================*/ -#ifndef TENSORFLOW_LITE_CORE_API_OP_RESOLVER_H_ -#define TENSORFLOW_LITE_CORE_API_OP_RESOLVER_H_ - -#include -#include -#include - -#include "tensorflow/lite/core/api/error_reporter.h" -#include "tensorflow/lite/core/c/common.h" -#include "tensorflow/lite/schema/schema_generated.h" - -namespace tflite { - -/// Abstract interface that returns TfLiteRegistrations given op codes or custom -/// op names. This is the mechanism that ops being referenced in the flatbuffer -/// model are mapped to executable function pointers (TfLiteRegistrations). -/// -/// The lifetime of the TfLiteRegistration object whose address is -/// returned by FindOp must exceed the lifetime of any InterpreterBuilder or -/// Interpreter created with this OpResolver. -/// Likewise the lifetime of the TfLiteRegistrationExternal object referenced -/// from the TfLiteRegistration object, if any, must exceed the lifetime of -/// any InterpreterBuilder or Interpreter created with this OpResolver. -class OpResolver { - public: - /// Finds the op registration for a builtin operator by enum code. - virtual const TfLiteRegistration* FindOp(tflite::BuiltinOperator op, - int version) const = 0; - /// Finds the op registration of a custom operator by op name. - virtual const TfLiteRegistration* FindOp(const char* op, - int version) const = 0; - - // Represents a sequence of delegates. - using TfLiteDelegatePtrVector = - std::vector>; - - // Returns optional delegates for resolving and handling ops in the flatbuffer - // model. This may be used in addition to the standard TfLiteRegistration - // lookup for graph resolution. - // WARNING: This API is deprecated, GetDelegateCreators is preferred. - virtual TfLiteDelegatePtrVector GetDelegates(int num_threads) const { - return {}; - } - - // Represents a function that creates a TfLite delegate instance. - using TfLiteDelegateCreator = - std::function( - TfLiteContext* /*context*/)>; - - // Represents a sequence of delegate creator functions. - using TfLiteDelegateCreators = std::vector; - - // Returns a vector of delegate creators to create optional delegates for - // resolving and handling ops in the flatbuffer model. This may be used in - // addition to the standard TfLiteRegistration lookup for graph resolution. - // - // Note that this method is not used (will not be called) if you are using - // TF Lite in Google Play Services; the GetOpaqueDelegateCreators method - // (see below) is used for that case. - virtual TfLiteDelegateCreators GetDelegateCreators() const { return {}; } - - // TODO(b/202712825): it would be nice if we could avoid the need for separate - // "opaque" types & methods for use only with TF Lite in Google Play Services. - - // Represents an opaque delegate instance. - // WARNING: Experimental interface, subject to change. - using TfLiteOpaqueDelegatePtr = - std::unique_ptr; - - // Represents a function that creates an opaque delegate instance. - // WARNING: Experimental interface, subject to change. - using TfLiteOpaqueDelegateCreator = - std::function; - - // Represents a sequence of opaque delegate creator functions. - // WARNING: Experimental interface, subject to change. - using TfLiteOpaqueDelegateCreators = std::vector; - - // Returns a vector of opaque delegate creators to create optional opaque - // delegates for resolving and handling ops in the flatbuffer model. This may - // be used in addition to the standard TfLiteRegistration lookup for graph - // resolution. - // - // Note that this method will be called only if you are using TF Lite in - // Google Play Services; if you are using regular TF Lite, GetDelegateCreators - // (see above) is used instead. - // - // WARNING: Experimental interface, subject to change. - virtual TfLiteOpaqueDelegateCreators GetOpaqueDelegateCreators() const { - return {}; - } - - virtual ~OpResolver() {} - - private: - /// Returns true if this OpResolver may contain any "user defined" ops. - /// By "user defined" ops, we mean any op definitions other than those - /// contained in tflite::ops::builtin::BuiltinOpResolver. - /// - /// If this method returns true, it doesn't necessarily mean that the - /// OpResolver contains a user-defined op, just that the absence of - /// user-defined ops can't be guaranteed. - /// - /// Note that "user-defined" ops are not the same as "custom" ops; - /// BuiltinOpResolver may support certain "custom" ops, in addition to - /// "builtin" ops, and may not support all of the "builtin" op enum values. - virtual bool MayContainUserDefinedOps() const { return true; } - - friend class OpResolverInternal; -}; - -// Handles the logic for converting between an OperatorCode structure extracted -// from a flatbuffer and information about a registered operator -// implementation. -TfLiteStatus GetRegistrationFromOpCode(const OperatorCode* opcode, - const OpResolver& op_resolver, - ErrorReporter* error_reporter, - const TfLiteRegistration** registration); - -} // namespace tflite - -#endif // TENSORFLOW_LITE_CORE_API_OP_RESOLVER_H_ diff --git a/tensorflow/lite/core/c/builtin_op_data.h b/tensorflow/lite/core/c/builtin_op_data.h index e9c6eb3488d..e1428e72307 100644 --- a/tensorflow/lite/core/c/builtin_op_data.h +++ b/tensorflow/lite/core/c/builtin_op_data.h @@ -21,6 +21,7 @@ limitations under the License. #define TENSORFLOW_LITE_CORE_C_BUILTIN_OP_DATA_H_ #include +#include #include #include "tensorflow/lite/core/c/common.h" @@ -32,6 +33,10 @@ extern "C" { // TfLiteReshapeParams can't have dynamic data so we fix the maximum possible // number of dimensions. #define TFLITE_RESHAPE_PARAMS_MAX_DIMENSION_COUNT 8 +#define TFLITE_STABLEHLO_SCATTER_PARAMS_MAX_DIMENSION_COUNT 8 +#define TFLITE_STABLEHLO_GATHER_PARAMS_MAX_DIMENSION_COUNT 8 +#define TFLITE_STABLEHLO_REDUCE_WINDOW_PARAMS_MAX_DIMENSION_COUNT 8 +#define TFLITE_STABLEHLO_PAD_PARAMS_MAX_DIMENSION_COUNT 8 // TODO(aselle): Consider using "if this then that" for testing. @@ -90,6 +95,10 @@ typedef struct { // Note: Version 2 supports dilation values not equal to 1. int dilation_width_factor; int dilation_height_factor; + + // Parameters for CONV_2D version 7 or above. + // Used to determine the default value for the quantized bias. + TfLiteType quantized_bias_type; } TfLiteConvParams; typedef struct { @@ -193,6 +202,10 @@ typedef struct { // If set to true and the weights are quantized, then non constant inputs // are quantized at evaluation time with asymmetric quantization. bool asymmetric_quantize_inputs; + + // Parameters for FullyConnected version 10 or above. + // Used to determine the default value for the quantized bias. + TfLiteType quantized_bias_type; } TfLiteFullyConnectedParams; typedef enum { @@ -341,7 +354,7 @@ typedef struct { // These fields are only used in old models for backward compatibility. // In the current implementation, we use the 2nd input of the op as the shape, // and these fields are unused. - int shape[TFLITE_RESHAPE_PARAMS_MAX_DIMENSION_COUNT]; + int32_t shape[TFLITE_RESHAPE_PARAMS_MAX_DIMENSION_COUNT]; int num_dimensions; } TfLiteReshapeParams; @@ -398,7 +411,7 @@ typedef struct { typedef struct { // TODO(ahentz): We can't have dynamic data in this struct, at least not yet. // For now we will fix the maximum possible number of dimensions. - int squeeze_dims[8]; + int32_t squeeze_dims[8]; int num_squeeze_dims; } TfLiteSqueezeParams; @@ -430,6 +443,10 @@ typedef struct { // Parameters supported by version 4: TfLiteFusedActivation activation; + + // Parameters for TransposeConv version 5 or above. + // Used to determine the default value for the quantized bias. + TfLiteType quantized_bias_type; } TfLiteTransposeConvParams; typedef struct { @@ -535,6 +552,108 @@ typedef struct { bool approximate; } TfLiteGeluParams; +typedef struct { + int64_t dimension; +} TfLiteStablehloConcatenateParams; + +typedef struct { + // See the stablehlo spec for the explanation of the attributes: + // https://github.com/openxla/stablehlo/blob/main/docs/spec.md#scatter + bool indices_are_sorted; + int64_t + update_window_dims[TFLITE_STABLEHLO_SCATTER_PARAMS_MAX_DIMENSION_COUNT]; + int num_update_window_dims; + int64_t + inserted_window_dims[TFLITE_STABLEHLO_SCATTER_PARAMS_MAX_DIMENSION_COUNT]; + int num_inserted_window_dims; + int64_t scatter_dims_to_operand_dims + [TFLITE_STABLEHLO_SCATTER_PARAMS_MAX_DIMENSION_COUNT]; + int num_scatter_dims_to_operand_dims; + int64_t index_vector_dim; + bool unique_indices; + int update_computation_subgraph_index; +} TfLiteStablehloScatterParams; + +typedef enum { + kTfLiteRngAlgorithmUnknown = 0, + // An algorithm auto-selected by the system according to device type. + kTfLiteRngAlgorithmDefault, + // The Philox algorithm, as described in paper + // ['Parallel Random Numbers: As Easy as 1, 2, 3'] + // (https://www.thesalmons.org/john/random123/papers/random123sc11.pdf) + kTfLiteRngAlgorithmPhilox, + // The ThreeFry algorithm, as described in paper + // ['Parallel Random Numbers: As Easy as 1, 2, 3'] + // (https://www.thesalmons.org/john/random123/papers/random123sc11.pdf) + kTfLiteRngAlgorithmThreefry, +} TfLiteRngAlgorithm; + +typedef struct { + TfLiteRngAlgorithm algorithm; +} TfLiteStablehloRngBitGeneratorParams; + +typedef struct { + // See the stablehlo spec for the explanation of the attributes: + // https://github.com/openxla/stablehlo/blob/main/docs/spec.md#gather + int64_t offset_dims[TFLITE_STABLEHLO_GATHER_PARAMS_MAX_DIMENSION_COUNT]; + int num_offset_dims; + int64_t + collapsed_slice_dims[TFLITE_STABLEHLO_GATHER_PARAMS_MAX_DIMENSION_COUNT]; + int num_collapsed_slice_dims; + int64_t start_index_map[TFLITE_STABLEHLO_GATHER_PARAMS_MAX_DIMENSION_COUNT]; + int num_start_index_map; + int64_t index_vector_dim; + int64_t slice_sizes[TFLITE_STABLEHLO_GATHER_PARAMS_MAX_DIMENSION_COUNT]; + int num_slice_sizes; + bool indices_are_sorted; +} TfLiteStablehloGatherParams; + +typedef struct { + // See the stablehlo spec for the explanation of the attributes: + // https://github.com/openxla/stablehlo/blob/main/docs/spec.md#reduce_window + int64_t window_dimensions + [TFLITE_STABLEHLO_REDUCE_WINDOW_PARAMS_MAX_DIMENSION_COUNT]; + int64_t + window_strides[TFLITE_STABLEHLO_REDUCE_WINDOW_PARAMS_MAX_DIMENSION_COUNT]; + int64_t + base_dilations[TFLITE_STABLEHLO_REDUCE_WINDOW_PARAMS_MAX_DIMENSION_COUNT]; + int64_t window_dilations + [TFLITE_STABLEHLO_REDUCE_WINDOW_PARAMS_MAX_DIMENSION_COUNT]; + int64_t + padding[2 * TFLITE_STABLEHLO_REDUCE_WINDOW_PARAMS_MAX_DIMENSION_COUNT]; + int body_subgraph_index; +} TfLiteStablehloReduceWindowParams; + +enum TfLiteReduceWindowFunction { + TfLiteReduceWindowFunctionUnsupported, + TfLiteReduceWindowFunctionAdd, + TfLiteReduceWindowFunctionMul, + TfLiteReduceWindowFunctionMin, + TfLiteReduceWindowFunctionMax, + TfLiteReduceWindowFunctionAll, + TfLiteReduceWindowFunctionAny +}; + +typedef struct { + enum TfLiteReduceWindowFunction reduce_function; +} TfLiteReduceWindowParams; + +typedef struct { + // See the stablehlo spec for the explanation of the attributes: + // https://github.com/openxla/stablehlo/blob/main/docs/spec.md#pad + int64_t edge_padding_low[TFLITE_STABLEHLO_PAD_PARAMS_MAX_DIMENSION_COUNT]; + int64_t edge_padding_high[TFLITE_STABLEHLO_PAD_PARAMS_MAX_DIMENSION_COUNT]; + int64_t interior_padding[TFLITE_STABLEHLO_PAD_PARAMS_MAX_DIMENSION_COUNT]; +} TfLiteStablehloPadParams; + +typedef struct { + const char* name; + int32_t subgraph_index; + int32_t version; + const uint8_t* attributes; + size_t attributes_size; +} TfLiteStablehloCompositeParams; + #ifdef __cplusplus } // extern "C" #endif // __cplusplus diff --git a/tensorflow/lite/core/c/c_api_types.h b/tensorflow/lite/core/c/c_api_types.h index 3a6594dae43..32cefa839f4 100644 --- a/tensorflow/lite/core/c/c_api_types.h +++ b/tensorflow/lite/core/c/c_api_types.h @@ -12,16 +12,24 @@ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. ==============================================================================*/ +// WARNING: Users of TensorFlow Lite should not include this file directly, but +// should instead include "third_party/tensorflow/lite/c/c_api_types.h". +// Only the TensorFlow Lite implementation itself should include this file +// directly. /// This file declares types used by the pure C inference API defined in /// c_api.h, some of which are also used in the C++ and C kernel and interpreter /// APIs. - -// WARNING: Users of TensorFlow Lite should not include this file directly, -// but should instead include -// "third_party/tensorflow/lite/c/c_api_types.h". -// Only the TensorFlow Lite implementation itself should include this -// file directly. +/// +// clang-format off +// NOLINTBEGIN(whitespace/line_length) +/// \note Users of TensorFlow Lite should use +/// \code +/// #include "tensorflow/lite/c/c_api_types.h" +/// \endcode +/// to access the APIs documented on this page. +// NOLINTEND(whitespace/line_length) +// clang-format on // IWYU pragma: private, include "third_party/tensorflow/lite/c/c_api_types.h" @@ -34,9 +42,13 @@ limitations under the License. extern "C" { #endif -/** \addtogroup c_api_types tensorflow/lite/c/c_api_types.h +// clang-format off +// NOLINTBEGIN(whitespace/line_length) +/** \defgroup c_api_types lite/c/c_api_types.h * @{ */ +// NOLINTEND(whitespace/line_length) +// clang-format on // Define TFL_CAPI_EXPORT macro to export a function properly with a shared // library. @@ -121,14 +133,14 @@ typedef enum { kTfLiteUInt32 = 16, kTfLiteUInt16 = 17, kTfLiteInt4 = 18, + kTfLiteBFloat16 = 19, } TfLiteType; -/// Legacy. Will be deprecated in favor of TfLiteAffineQuantization. +/// Legacy. Will be deprecated in favor of `TfLiteAffineQuantization`. /// If per-layer quantization is specified this field will still be populated in -/// addition to TfLiteAffineQuantization. +/// addition to `TfLiteAffineQuantization`. /// Parameters for asymmetric quantization. Quantized values can be converted -/// back to float using: -/// real_value = scale * (quantized_value - zero_point) +/// back to float using: `real_value = scale * (quantized_value - zero_point)` typedef struct TfLiteQuantizationParams { float scale; int32_t zero_point; @@ -156,6 +168,7 @@ typedef struct TfLiteDelegate TfLiteDelegate; /// This is an abstract type that is intended to have the same /// role as TfLiteDelegate, but without exposing the implementation /// details of how delegates are implemented. +/// /// WARNING: This is an experimental type and subject to change. typedef struct TfLiteOpaqueDelegateStruct TfLiteOpaqueDelegateStruct; @@ -163,6 +176,7 @@ typedef struct TfLiteOpaqueDelegateStruct TfLiteOpaqueDelegateStruct; /// TfLiteDelegate; allows delegation of nodes to alternative backends. /// For TF Lite in Play Services, this is an opaque type, /// but for regular TF Lite, this is just a typedef for TfLiteDelegate. +/// /// WARNING: This is an experimental type and subject to change. #if TFLITE_WITH_STABLE_ABI || TFLITE_USE_OPAQUE_DELEGATE typedef TfLiteOpaqueDelegateStruct TfLiteOpaqueDelegate; diff --git a/tensorflow/lite/core/c/common.cc b/tensorflow/lite/core/c/common.cc index 367f1752261..7afecdbe885 100644 --- a/tensorflow/lite/core/c/common.cc +++ b/tensorflow/lite/core/c/common.cc @@ -370,6 +370,8 @@ const char* TfLiteTypeGetName(TfLiteType type) { return "STRING"; case kTfLiteFloat16: return "FLOAT16"; + case kTfLiteBFloat16: + return "BFLOAT16"; case kTfLiteFloat64: return "FLOAT64"; case kTfLiteResource: @@ -384,40 +386,128 @@ const char* TfLiteTypeGetName(TfLiteType type) { TfLiteDelegate TfLiteDelegateCreate() { return TfLiteDelegate{}; } -#ifndef TF_LITE_STATIC_MEMORY -TfLiteOpaqueDelegate* TfLiteOpaqueDelegateCreate( - const TfLiteOpaqueDelegateBuilder* opaque_delegate_builder) { - if (!opaque_delegate_builder) return nullptr; - - TfLiteDelegate* result = new TfLiteDelegate{}; - result->opaque_delegate_builder = new TfLiteOpaqueDelegateBuilder{}; - *(result->opaque_delegate_builder) = *opaque_delegate_builder; - - return reinterpret_cast(result); +// Returns a tensor data allocation strategy. +TfLiteAllocationStrategy TfLiteTensorGetAllocationStrategy( + const TfLiteTensor* const t) { + switch (t->allocation_type) { + case kTfLiteMemNone: + return kTfLiteAllocationStrategyNone; + case kTfLiteMmapRo: + return kTfLiteAllocationStrategyMMap; + case kTfLiteArenaRw: + return kTfLiteAllocationStrategyArena; + case kTfLiteArenaRwPersistent: + return kTfLiteAllocationStrategyArena; + case kTfLiteDynamic: + return kTfLiteAllocationStrategyMalloc; + case kTfLitePersistentRo: + return kTfLiteAllocationStrategyUnknown; + case kTfLiteCustom: + return kTfLiteAllocationStrategyUnknown; + case kTfLiteVariantObject: + return kTfLiteAllocationStrategyNew; + } + return kTfLiteAllocationStrategyUnknown; } -void TfLiteOpaqueDelegateDelete(TfLiteOpaqueDelegate* opaque_delegate) { - if (!opaque_delegate) return; - - const TfLiteDelegate* tflite_delegate = - reinterpret_cast(opaque_delegate); - delete tflite_delegate->opaque_delegate_builder; - delete tflite_delegate; +// Returns how stable a tensor data buffer address is across runs. +TfLiteRunStability TfLiteTensorGetBufferAddressStability( + const TfLiteTensor* const t) { + switch (t->allocation_type) { + case kTfLiteMemNone: + return kTfLiteRunStabilityAcrossRuns; + case kTfLiteMmapRo: + return kTfLiteRunStabilityAcrossRuns; + case kTfLiteArenaRw: + return kTfLiteRunStabilityUnstable; + case kTfLiteArenaRwPersistent: + return kTfLiteRunStabilityUnstable; + case kTfLiteDynamic: + return kTfLiteRunStabilitySingleRun; + case kTfLitePersistentRo: + return kTfLiteRunStabilitySingleRun; + case kTfLiteCustom: + return kTfLiteRunStabilityUnknown; + case kTfLiteVariantObject: + return kTfLiteRunStabilityAcrossRuns; + } + return kTfLiteRunStabilityUnknown; } -#endif // TF_LITE_STATIC_MEMORY -void* TfLiteOpaqueDelegateGetData(const TfLiteOpaqueDelegate* delegate) { - if (!delegate) return nullptr; - - // The following cast is safe only because this code is part of the - // TF Lite runtime implementation. Apps using TF Lite should not rely on - // 'TfLiteOpaqueDelegate' and 'TfLiteDelegate' being equivalent. - const auto* tflite_delegate = - reinterpret_cast(delegate); +// Returns how stable a tensor data values are across runs. +TfLiteRunStability TfLiteTensorGetDataStability(const TfLiteTensor* const t) { + switch (t->allocation_type) { + case kTfLiteMemNone: + return kTfLiteRunStabilityAcrossRuns; + case kTfLiteMmapRo: + return kTfLiteRunStabilityAcrossRuns; + case kTfLiteArenaRw: + return kTfLiteRunStabilitySingleRun; + case kTfLiteArenaRwPersistent: + return kTfLiteRunStabilityAcrossRuns; + case kTfLiteDynamic: + return kTfLiteRunStabilitySingleRun; + case kTfLitePersistentRo: + return kTfLiteRunStabilitySingleRun; + case kTfLiteCustom: + return kTfLiteRunStabilityUnknown; + case kTfLiteVariantObject: + return kTfLiteRunStabilitySingleRun; + } + return kTfLiteRunStabilityUnknown; +} - if (!tflite_delegate->opaque_delegate_builder) return tflite_delegate->data_; +// Returns the operation step when the data of a tensor is populated. +// +// Some operations can precompute their results before the evaluation step. This +// makes the data available earlier for subsequent operations. +TfLiteRunStep TfLiteTensorGetDataKnownStep(const TfLiteTensor* t) { + switch (t->allocation_type) { + case kTfLiteMemNone: + return kTfLiteRunStepInit; + case kTfLiteMmapRo: + return kTfLiteRunStepInit; + case kTfLiteArenaRw: + return kTfLiteRunStepEval; + case kTfLiteArenaRwPersistent: + return kTfLiteRunStepEval; + case kTfLiteDynamic: + return kTfLiteRunStepEval; + case kTfLitePersistentRo: + return kTfLiteRunStepPrepare; + case kTfLiteCustom: + return kTfLiteRunStepUnknown; + case kTfLiteVariantObject: + return kTfLiteRunStepEval; + } + return kTfLiteRunStepUnknown; +} - return tflite_delegate->opaque_delegate_builder->data; +// Returns the operation steop when the shape of a tensor is computed. +// +// Some operations can precompute the shape of their results before the +// evaluation step. This makes the shape available earlier for subsequent +// operations. +TfLiteRunStep TfLiteTensorGetShapeKnownStep(const TfLiteTensor* t) { + switch (t->allocation_type) { + case kTfLiteMemNone: + return kTfLiteRunStepInit; + case kTfLiteMmapRo: + return kTfLiteRunStepInit; + case kTfLiteArenaRw: + return kTfLiteRunStepPrepare; + case kTfLiteArenaRwPersistent: + return kTfLiteRunStepPrepare; + case kTfLiteDynamic: + return kTfLiteRunStepEval; + case kTfLitePersistentRo: + return kTfLiteRunStepPrepare; + case kTfLiteCustom: + return kTfLiteRunStepUnknown; + case kTfLiteVariantObject: + return kTfLiteRunStepEval; + } + return kTfLiteRunStepUnknown; } } // extern "C" diff --git a/tensorflow/lite/core/c/common.h b/tensorflow/lite/core/c/common.h index 19a74a7c90d..96f19f12336 100644 --- a/tensorflow/lite/core/c/common.h +++ b/tensorflow/lite/core/c/common.h @@ -12,32 +12,41 @@ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. ==============================================================================*/ +// WARNING: Users of TensorFlow Lite should not include this file directly, but +// should instead include "third_party/tensorflow/lite/c/common.h". +// Only the TensorFlow Lite implementation itself should include this file +// directly. + +/// This file defines common C types and APIs for implementing operations, +/// delegates and other constructs in TensorFlow Lite. The actual operations and +/// delegates can be defined using C++, but the interface between the +/// interpreter and the operations are C. +/// +/// Summary of abstractions: +/// * `TF_LITE_ENSURE` - self-sufficient error checking +/// * `TfLiteStatus` - status reporting +/// * `TfLiteIntArray` - stores tensor shapes (dims), +/// * `TfLiteContext` - allows an op to access the tensors +/// * `TfLiteTensor` - tensor (a multidimensional array) +/// * `TfLiteNode` - a single node or operation +/// * `TfLiteRegistration` - the implementation of a conceptual operation. +/// * `TfLiteDelegate` - allows delegation of nodes to alternative backends. +/// +/// Some abstractions in this file are created and managed by Interpreter. +/// +/// NOTE: The order of values in these structs are "semi-ABI stable". New values +/// should be added only to the end of structs and never reordered. +/// +// clang-format off +// NOLINTBEGIN(whitespace/line_length) +/// \note Users of TensorFlow Lite should use +/// \code +/// #include "tensorflow/lite/c/common.h" +/// \endcode +/// to access the APIs documented on this page. +// NOLINTEND(whitespace/line_length) +// clang-format on -// This file defines common C types and APIs for implementing operations, -// delegates and other constructs in TensorFlow Lite. The actual operations and -// delegates can be defined using C++, but the interface between the interpreter -// and the operations are C. -// -// Summary of abstractions -// TF_LITE_ENSURE - Self-sufficient error checking -// TfLiteStatus - Status reporting -// TfLiteIntArray - stores tensor shapes (dims), -// TfLiteContext - allows an op to access the tensors -// TfLiteTensor - tensor (a multidimensional array) -// TfLiteNode - a single node or operation -// TfLiteRegistration - the implementation of a conceptual operation. -// TfLiteDelegate - allows delegation of nodes to alternative backends. -// -// Some abstractions in this file are created and managed by Interpreter. -// -// NOTE: The order of values in these structs are "semi-ABI stable". New values -// should be added only to the end of structs and never reordered. - -/// WARNING: Users of TensorFlow Lite should not include this file directly, -/// but should instead include -/// "third_party/tensorflow/lite/c/common.h". -/// Only the TensorFlow Lite implementation itself should include this -/// file directly. // IWYU pragma: private, include "third_party/tensorflow/lite/c/common.h" #ifndef TENSORFLOW_LITE_CORE_C_COMMON_H_ @@ -54,15 +63,23 @@ limitations under the License. extern "C" { #endif // __cplusplus -// The list of external context types known to TF Lite. This list exists solely -// to avoid conflicts and to ensure ops can share the external contexts they -// need. Access to the external contexts is controlled by one of the -// corresponding support files. +// clang-format off +// NOLINTBEGIN(whitespace/line_length) +/** \defgroup common lite/c/common.h + * @{ + */ +// NOLINTEND(whitespace/line_length) +// clang-format on + +/// The list of external context types known to TF Lite. This list exists solely +/// to avoid conflicts and to ensure ops can share the external contexts they +/// need. Access to the external contexts is controlled by one of the +/// corresponding support files. typedef enum TfLiteExternalContextType { - kTfLiteEigenContext = 0, // include eigen_support.h to use. - kTfLiteGemmLowpContext = 1, // include gemm_support.h to use. - kTfLiteEdgeTpuContext = 2, // Placeholder for Edge TPU support. - kTfLiteCpuBackendContext = 3, // include cpu_backend_context.h to use. + kTfLiteEigenContext = 0, /// include eigen_support.h to use. + kTfLiteGemmLowpContext = 1, /// include gemm_support.h to use. + kTfLiteEdgeTpuContext = 2, /// Placeholder for Edge TPU support. + kTfLiteCpuBackendContext = 3, /// include cpu_backend_context.h to use. kTfLiteMaxExternalContexts = 4 } TfLiteExternalContextType; @@ -73,11 +90,11 @@ struct TfLiteDelegate; struct TfLiteRegistration; struct TfLiteOpaqueDelegateBuilder; -// An external context is a collection of information unrelated to the TF Lite -// framework, but useful to a subset of the ops. TF Lite knows very little -// about the actual contexts, but it keeps a list of them, and is able to -// refresh them if configurations like the number of recommended threads -// change. +/// An external context is a collection of information unrelated to the TF Lite +/// framework, but useful to a subset of the ops. TF Lite knows very little +/// about the actual contexts, but it keeps a list of them, and is able to +/// refresh them if configurations like the number of recommended threads +/// change. typedef struct TfLiteExternalContext { TfLiteExternalContextType type; TfLiteStatus (*Refresh)(struct TfLiteContext* context); @@ -85,8 +102,8 @@ typedef struct TfLiteExternalContext { #define kTfLiteOptionalTensor (-1) -// Fixed size list of integers. Used for dimensions and inputs/outputs tensor -// indices +/// Fixed size list of integers. Used for dimensions and inputs/outputs tensor +/// indices typedef struct TfLiteIntArray { int size; @@ -105,33 +122,33 @@ typedef struct TfLiteIntArray { #endif } TfLiteIntArray; -// Given the size (number of elements) in a TfLiteIntArray, calculate its size -// in bytes. +/// Given the size (number of elements) in a TfLiteIntArray, calculate its size +/// in bytes. size_t TfLiteIntArrayGetSizeInBytes(int size); #ifndef TF_LITE_STATIC_MEMORY -// Create a array of a given `size` (uninitialized entries). -// This returns a pointer, that you must free using TfLiteIntArrayFree(). +/// Create a array of a given `size` (uninitialized entries). +/// This returns a pointer, that you must free using TfLiteIntArrayFree(). TfLiteIntArray* TfLiteIntArrayCreate(int size); #endif -// Check if two intarrays are equal. Returns 1 if they are equal, 0 otherwise. +/// Check if two intarrays are equal. Returns 1 if they are equal, 0 otherwise. int TfLiteIntArrayEqual(const TfLiteIntArray* a, const TfLiteIntArray* b); -// Check if an intarray equals an array. Returns 1 if equals, 0 otherwise. +/// Check if an intarray equals an array. Returns 1 if equals, 0 otherwise. int TfLiteIntArrayEqualsArray(const TfLiteIntArray* a, int b_size, const int b_data[]); #ifndef TF_LITE_STATIC_MEMORY -// Create a copy of an array passed as `src`. -// You are expected to free memory with TfLiteIntArrayFree +/// Create a copy of an array passed as `src`. +/// You are expected to free memory with TfLiteIntArrayFree TfLiteIntArray* TfLiteIntArrayCopy(const TfLiteIntArray* src); -// Free memory of array `a`. +/// Free memory of array `a`. void TfLiteIntArrayFree(TfLiteIntArray* a); #endif // TF_LITE_STATIC_MEMORY -// Fixed size list of floats. Used for per-channel quantization. +/// Fixed size list of floats. Used for per-channel quantization. typedef struct TfLiteFloatArray { int size; #if defined(_MSC_VER) @@ -149,20 +166,20 @@ typedef struct TfLiteFloatArray { #endif } TfLiteFloatArray; -// Given the size (number of elements) in a TfLiteFloatArray, calculate its size -// in bytes. +/// Given the size (number of elements) in a TfLiteFloatArray, calculate its +/// size in bytes. int TfLiteFloatArrayGetSizeInBytes(int size); #ifndef TF_LITE_STATIC_MEMORY -// Create a array of a given `size` (uninitialized entries). -// This returns a pointer, that you must free using TfLiteFloatArrayFree(). +/// Create a array of a given `size` (uninitialized entries). +/// This returns a pointer, that you must free using TfLiteFloatArrayFree(). TfLiteFloatArray* TfLiteFloatArrayCreate(int size); -// Create a copy of an array passed as `src`. -// You are expected to free memory with TfLiteFloatArrayFree. +/// Create a copy of an array passed as `src`. +/// You are expected to free memory with TfLiteFloatArrayFree. TfLiteFloatArray* TfLiteFloatArrayCopy(const TfLiteFloatArray* src); -// Free memory of array `a`. +/// Free memory of array `a`. void TfLiteFloatArrayFree(TfLiteFloatArray* a); #endif // TF_LITE_STATIC_MEMORY @@ -191,18 +208,18 @@ void TfLiteFloatArrayFree(TfLiteFloatArray* a); #define TF_LITE_MAYBE_KERNEL_LOG(context, ...) ARGS_UNUSED(__VA_ARGS__) #endif // TF_LITE_STRIP_ERROR_STRINGS -// Check whether value is true, and if not return kTfLiteError from -// the current function (and report the error string msg). -#define TF_LITE_ENSURE_MSG(context, value, msg) \ - do { \ - if (!(value)) { \ - TF_LITE_KERNEL_LOG((context), __FILE__ " " msg); \ - return kTfLiteError; \ - } \ +/// Check whether value is true, and if not return kTfLiteError from +/// the current function (and report the error string msg). +#define TF_LITE_ENSURE_MSG(context, value, ...) \ + do { \ + if (!(value)) { \ + TF_LITE_KERNEL_LOG((context), __FILE__ " " __VA_ARGS__); \ + return kTfLiteError; \ + } \ } while (0) -// Check whether the value `a` is true, and if not return kTfLiteError from -// the current function, while also reporting the location of the error. +/// Check whether the value `a` is true, and if not return kTfLiteError from +/// the current function, while also reporting the location of the error. #define TF_LITE_ENSURE(context, a) \ do { \ if (!(a)) { \ @@ -220,11 +237,12 @@ void TfLiteFloatArrayFree(TfLiteFloatArray* a); } \ } while (0) -// Check whether the value `a == b` is true, and if not return kTfLiteError from -// the current function, while also reporting the location of the error. -// `a` and `b` may be evaluated more than once, so no side effects or -// extremely expensive computations should be done. -// NOTE: Use TF_LITE_ENSURE_TYPES_EQ if comparing TfLiteTypes. +/// Check whether the value `a == b` is true, and if not return kTfLiteError +/// from the current function, while also reporting the location of the error. +/// `a` and `b` may be evaluated more than once, so no side effects or +/// extremely expensive computations should be done. +/// +/// NOTE: Use TF_LITE_ENSURE_TYPES_EQ if comparing TfLiteTypes. #define TF_LITE_ENSURE_EQ(context, a, b) \ do { \ if ((a) != (b)) { \ @@ -263,61 +281,69 @@ void TfLiteFloatArrayFree(TfLiteFloatArray* a); } \ } while (0) -// Single-precision complex data type compatible with the C99 definition. +/// Single-precision complex data type compatible with the C99 definition. typedef struct TfLiteComplex64 { - float re, im; // real and imaginary parts, respectively. + float re, im; /// real and imaginary parts, respectively. } TfLiteComplex64; -// Double-precision complex data type compatible with the C99 definition. +/// Double-precision complex data type compatible with the C99 definition. typedef struct TfLiteComplex128 { - double re, im; // real and imaginary parts, respectively. + double re, im; /// real and imaginary parts, respectively. } TfLiteComplex128; -// Half precision data type compatible with the C99 definition. +/// Half precision data type compatible with the C99 definition. typedef struct TfLiteFloat16 { uint16_t data; } TfLiteFloat16; -// Return the name of a given type, for error reporting purposes. +/// bfloat16 data type compatible with the Google Brain definition. +/// https://cloud.google.com/tpu/docs/bfloat16. +/// This provides 1 bit of sign, 8 bits of exponent, and 7 bits of mantissa. +typedef struct TfLiteBFloat16 { + uint16_t data; +} TfLiteBFloat16; + +/// Return the name of a given type, for error reporting purposes. const char* TfLiteTypeGetName(TfLiteType type); -// SupportedQuantizationTypes. +/// SupportedQuantizationTypes. typedef enum TfLiteQuantizationType { - // No quantization. + /// No quantization. kTfLiteNoQuantization = 0, - // Affine quantization (with support for per-channel quantization). - // Corresponds to TfLiteAffineQuantization. + /// Affine quantization (with support for per-channel quantization). + /// Corresponds to TfLiteAffineQuantization. kTfLiteAffineQuantization = 1, } TfLiteQuantizationType; -// Structure specifying the quantization used by the tensor, if-any. +/// Structure specifying the quantization used by the tensor, if-any. typedef struct TfLiteQuantization { - // The type of quantization held by params. + /// The type of quantization held by params. TfLiteQuantizationType type; - // Holds an optional reference to a quantization param structure. The actual - // type depends on the value of the `type` field (see the comment there for - // the values and corresponding types). + /// Holds an optional reference to a quantization param structure. The actual + /// type depends on the value of the `type` field (see the comment there for + /// the values and corresponding types). void* params; } TfLiteQuantization; -// Parameters for asymmetric quantization across a dimension (i.e per output -// channel quantization). -// quantized_dimension specifies which dimension the scales and zero_points -// correspond to. -// For a particular value in quantized_dimension, quantized values can be -// converted back to float using: -// real_value = scale * (quantized_value - zero_point) +/// Parameters for asymmetric quantization across a dimension (i.e per output +/// channel quantization). +/// quantized_dimension specifies which dimension the scales and zero_points +/// correspond to. +/// For a particular value in quantized_dimension, quantized values can be +/// converted back to float using: +/// `real_value = scale * (quantized_value - zero_point)` typedef struct TfLiteAffineQuantization { TfLiteFloatArray* scale; TfLiteIntArray* zero_point; int32_t quantized_dimension; } TfLiteAffineQuantization; -/* A union of pointers that points to memory for a given tensor. */ +/// A union of pointers that points to memory for a given tensor. +/// +/// Do not access these members directly, if possible, use +/// `GetTensorData(tensor)` instead, otherwise only access `.data`, as +/// other members are deprecated. typedef union TfLitePtrUnion { - /* Do not access these members directly, if possible, use - * GetTensorData(tensor) instead, otherwise only access .data, as other - * members are deprecated. */ int32_t* i32; uint32_t* u32; int64_t* i64; @@ -334,24 +360,26 @@ typedef union TfLitePtrUnion { TfLiteComplex64* c64; TfLiteComplex128* c128; int8_t* int8; - /* Only use this member. */ + /// Only use this member. void* data; } TfLitePtrUnion; -// Memory allocation strategies. -// * kTfLiteMmapRo: Read-only memory-mapped data, or data externally allocated. -// * kTfLiteArenaRw: Arena allocated with no guarantees about persistence, -// and available during eval. -// * kTfLiteArenaRwPersistent: Arena allocated but persistent across eval, and -// only available during eval. -// * kTfLiteDynamic: Allocated during eval, or for string tensors. -// * kTfLitePersistentRo: Allocated and populated during prepare. This is -// useful for tensors that can be computed during prepare and treated -// as constant inputs for downstream ops (also in prepare). -// * kTfLiteCustom: Custom memory allocation provided by the user. See -// TfLiteCustomAllocation below. -// * kTfLiteVariantObject: Allocation is an arbitrary type-erased C++ object. -// Allocation and deallocation are done through `new` and `delete`. +/// Memory allocation strategies. +/// * `kTfLiteMmapRo`: Read-only memory-mapped data, or data externally +/// allocated. +/// * `kTfLiteArenaRw`: Arena allocated with no guarantees about persistence, +/// and available during eval. +/// * `kTfLiteArenaRwPersistent`: Arena allocated but persistent across eval, +/// and only available during eval. +/// * `kTfLiteDynamic`: Allocated during eval, or for string tensors. +/// * `kTfLitePersistentRo`: Allocated and populated during prepare. This is +/// useful for tensors that can be computed during prepare and treated +/// as constant inputs for downstream ops (also in prepare). +/// * `kTfLiteCustom`: Custom memory allocation provided by the user. See +/// TfLiteCustomAllocation below. +/// * `kTfLiteVariantObject`: Allocation is an arbitrary type-erased C++ +/// object. +/// Allocation and deallocation are done through `new` and `delete`. typedef enum TfLiteAllocationType { kTfLiteMemNone = 0, kTfLiteMmapRo, @@ -363,20 +391,51 @@ typedef enum TfLiteAllocationType { kTfLiteVariantObject, } TfLiteAllocationType; -// The delegates should use zero or positive integers to represent handles. -// -1 is reserved from unallocated status. +/// Memory allocation strategies. +/// +/// TfLiteAllocationType values have been overloaded to mean more than their +/// original intent. This enum should only be used to document the allocation +/// strategy used by a tensor for it data. +typedef enum TfLiteAllocationStrategy { + kTfLiteAllocationStrategyUnknown, + kTfLiteAllocationStrategyNone, /// No data is allocated. + kTfLiteAllocationStrategyMMap, /// Data is mmaped. + kTfLiteAllocationStrategyArena, /// Handled by the arena. + kTfLiteAllocationStrategyMalloc, /// Uses `malloc`/`free`. + kTfLiteAllocationStrategyNew /// Uses `new[]`/`delete[]`. +} TfLiteAllocationStrategy; + +/// Describes how stable a tensor attribute is with regards to an interpreter +/// runs. +typedef enum TfLiteRunStability { + kTfLiteRunStabilityUnknown, + kTfLiteRunStabilityUnstable, /// May change at any time. + kTfLiteRunStabilitySingleRun, /// Will stay the same for one run. + kTfLiteRunStabilityAcrossRuns /// Will stay the same across all runs. +} TfLiteRunStability; + +/// Describes the steps of a TFLite operation life cycle. +typedef enum TfLiteRunStep { + kTfLiteRunStepUnknown, + kTfLiteRunStepInit, + kTfLiteRunStepPrepare, + kTfLiteRunStepEval +} TfLiteRunStep; + +/// The delegates should use zero or positive integers to represent handles. +/// -1 is reserved from unallocated status. typedef int TfLiteBufferHandle; enum { kTfLiteNullBufferHandle = -1, }; -// Storage format of each dimension in a sparse tensor. +/// Storage format of each dimension in a sparse tensor. typedef enum TfLiteDimensionType { kTfLiteDimDense = 0, kTfLiteDimSparseCSR, } TfLiteDimensionType; -// Metadata to encode each dimension in a sparse tensor. +/// Metadata to encode each dimension in a sparse tensor. typedef struct TfLiteDimensionMetadata { TfLiteDimensionType format; int dense_size; @@ -384,8 +443,8 @@ typedef struct TfLiteDimensionMetadata { TfLiteIntArray* array_indices; } TfLiteDimensionMetadata; -// Parameters used to encode a sparse tensor. For detailed explanation of each -// field please refer to lite/schema/schema.fbs. +/// Parameters used to encode a sparse tensor. For detailed explanation of each +/// field please refer to lite/schema/schema.fbs. typedef struct TfLiteSparsity { TfLiteIntArray* traversal_order; TfLiteIntArray* block_map; @@ -393,133 +452,141 @@ typedef struct TfLiteSparsity { int dim_metadata_size; } TfLiteSparsity; -// Defines a custom memory allocation not owned by the runtime. -// `data` should be aligned to kDefaultTensorAlignment defined in -// lite/util.h. (Currently 64 bytes) -// NOTE: See Interpreter.SetCustomAllocationForTensor for details on usage. +/// Defines a custom memory allocation not owned by the runtime. +/// `data` should be aligned to kDefaultTensorAlignment defined in +/// lite/util.h. (Currently 64 bytes) +/// NOTE: See `Interpreter::SetCustomAllocationForTensor` for details on usage. typedef struct TfLiteCustomAllocation { void* data; size_t bytes; } TfLiteCustomAllocation; -// The flags used in `Interpreter::SetCustomAllocationForTensor`. -// Note that this is a bitmask, so the values should be 1, 2, 4, 8, ...etc. +/// The flags used in `Interpreter::SetCustomAllocationForTensor`. +/// Note that this is a bitmask, so the values should be 1, 2, 4, 8, ...etc. typedef enum TfLiteCustomAllocationFlags { kTfLiteCustomAllocationFlagsNone = 0, - // Skips checking whether allocation.data points to an aligned buffer as - // expected by the TFLite runtime. - // NOTE: Setting this flag can cause crashes when calling Invoke(). - // Use with caution. + /// Skips checking whether allocation.data points to an aligned buffer as + /// expected by the TFLite runtime. + /// NOTE: Setting this flag can cause crashes when calling Invoke(). + /// Use with caution. kTfLiteCustomAllocationFlagsSkipAlignCheck = 1, } TfLiteCustomAllocationFlags; -// A tensor in the interpreter system which is a wrapper around a buffer of -// data including a dimensionality (or NULL if not currently defined). +enum { kTfLiteNoBufferIdentifier = SIZE_MAX }; + +/// A tensor in the interpreter system which is a wrapper around a buffer of +/// data including a dimensionality (or NULL if not currently defined). #ifndef TF_LITE_STATIC_MEMORY typedef struct TfLiteTensor { - // The data type specification for data stored in `data`. This affects - // what member of `data` union should be used. + /// The data type specification for data stored in `data`. This affects + /// what member of `data` union should be used. TfLiteType type; - // A union of data pointers. The appropriate type should be used for a typed - // tensor based on `type`. + /// A union of data pointers. The appropriate type should be used for a typed + /// tensor based on `type`. TfLitePtrUnion data; - // A pointer to a structure representing the dimensionality interpretation - // that the buffer should have. NOTE: the product of elements of `dims` - // and the element datatype size should be equal to `bytes` below. + /// A pointer to a structure representing the dimensionality interpretation + /// that the buffer should have. NOTE: the product of elements of `dims` + /// and the element datatype size should be equal to `bytes` below. TfLiteIntArray* dims; - // Quantization information. + /// Quantization information. TfLiteQuantizationParams params; - // How memory is mapped - // kTfLiteMmapRo: Memory mapped read only. - // i.e. weights - // kTfLiteArenaRw: Arena allocated read write memory - // (i.e. temporaries, outputs). + /// How memory is mapped + /// kTfLiteMmapRo: Memory mapped read only. + /// i.e. weights + /// kTfLiteArenaRw: Arena allocated read write memory + /// (i.e. temporaries, outputs). TfLiteAllocationType allocation_type; - // The number of bytes required to store the data of this Tensor. I.e. - // (bytes of each element) * dims[0] * ... * dims[n-1]. For example, if - // type is kTfLiteFloat32 and dims = {3, 2} then - // bytes = sizeof(float) * 3 * 2 = 4 * 3 * 2 = 24. + /// The number of bytes required to store the data of this Tensor. I.e. + /// (bytes of each element) * dims[0] * ... * dims[n-1]. For example, if + /// type is kTfLiteFloat32 and dims = {3, 2} then + /// bytes = sizeof(float) * 3 * 2 = 4 * 3 * 2 = 24. size_t bytes; - // An opaque pointer to a tflite::MMapAllocation + /// An opaque pointer to a tflite::MMapAllocation const void* allocation; - // Null-terminated name of this tensor. + /// Null-terminated name of this tensor. const char* name; - // The delegate which knows how to handle `buffer_handle`. - // WARNING: This is an experimental interface that is subject to change. + /// The delegate which knows how to handle `buffer_handle`. + /// + /// WARNING: This is an experimental interface that is subject to change. struct TfLiteDelegate* delegate; - // An integer buffer handle that can be handled by `delegate`. - // The value is valid only when delegate is not null. - // WARNING: This is an experimental interface that is subject to change. + /// An integer buffer handle that can be handled by `delegate`. + /// The value is valid only when delegate is not null. + /// + /// WARNING: This is an experimental interface that is subject to change. TfLiteBufferHandle buffer_handle; - // If the delegate uses its own buffer (e.g. GPU memory), the delegate is - // responsible to set data_is_stale to true. - // `delegate->CopyFromBufferHandle` can be called to copy the data from - // delegate buffer. - // WARNING: This is an // experimental interface that is subject to change. + /// If the delegate uses its own buffer (e.g. GPU memory), the delegate is + /// responsible to set data_is_stale to true. + /// `delegate->CopyFromBufferHandle` can be called to copy the data from + /// delegate buffer. + /// + /// WARNING: This is an experimental interface that is subject to change. bool data_is_stale; - // True if the tensor is a variable. + /// True if the tensor is a variable. bool is_variable; - // Quantization information. Replaces params field above. + /// Quantization information. Replaces params field above. TfLiteQuantization quantization; - // Parameters used to encode a sparse tensor. - // This is optional. The field is NULL if a tensor is dense. - // WARNING: This is an experimental interface that is subject to change. + /// Parameters used to encode a sparse tensor. + /// This is optional. The field is NULL if a tensor is dense. + /// + /// WARNING: This is an experimental interface that is subject to change. TfLiteSparsity* sparsity; - // Optional. Encodes shapes with unknown dimensions with -1. This field is - // only populated when unknown dimensions exist in a read-write tensor (i.e. - // an input or output tensor). (e.g. `dims` contains [1, 1, 1, 3] and - // `dims_signature` contains [1, -1, -1, 3]). If no unknown dimensions exist - // then `dims_signature` is either null, or set to an empty array. Note that - // this field only exists when TF_LITE_STATIC_MEMORY is not defined. + /// Optional. Encodes shapes with unknown dimensions with -1. This field is + /// only populated when unknown dimensions exist in a read-write tensor (i.e. + /// an input or output tensor). (e.g. `dims` contains [1, 1, 1, 3] and + /// `dims_signature` contains [1, -1, -1, 3]). If no unknown dimensions exist + /// then `dims_signature` is either null, or set to an empty array. Note that + /// this field only exists when TF_LITE_STATIC_MEMORY is not defined. const TfLiteIntArray* dims_signature; } TfLiteTensor; -// A structure representing an instance of a node. -// This structure only exhibits the inputs, outputs, user defined data and some -// node properties (like statefulness), not other features like the type. +/// A structure representing an instance of a node. +/// This structure only exhibits the inputs, outputs, user defined data and some +/// node properties (like statefulness), not other features like the type. typedef struct TfLiteNode { - // Inputs to this node expressed as indices into the simulator's tensors. + /// Inputs to this node expressed as indices into the simulator's tensors. TfLiteIntArray* inputs; - // Outputs to this node expressed as indices into the simulator's tensors. + /// Outputs to this node expressed as indices into the simulator's tensors. TfLiteIntArray* outputs; - // intermediate tensors to this node expressed as indices into the simulator's - // tensors. + /// intermediate tensors to this node expressed as indices into the + /// simulator's tensors. TfLiteIntArray* intermediates; - // Temporary tensors uses during the computations. This usually contains no - // tensors, but ops are allowed to change that if they need scratch space of - // any sort. + /// Temporary tensors uses during the computations. This usually contains no + /// tensors, but ops are allowed to change that if they need scratch space of + /// any sort. TfLiteIntArray* temporaries; - // Opaque data provided by the node implementer through `Registration.init`. + /// Opaque data provided by the node implementer through `Registration.init`. void* user_data; - // Opaque data provided to the node if the node is a builtin. This is usually - // a structure defined in builtin_op_data.h + /// Opaque data provided to the node if the node is a builtin. This is usually + /// a structure defined in builtin_op_data.h void* builtin_data; - // Custom initial data. This is the opaque data provided in the flatbuffer. - // WARNING: This is an experimental interface that is subject to change. + /// Custom initial data. This is the opaque data provided in the flatbuffer. + /// + /// WARNING: This is an experimental interface that is subject to change. const void* custom_initial_data; int custom_initial_data_size; - // The pointer to the delegate. This is non-null only when the node is - // created by calling `interpreter.ModifyGraphWithDelegate`. - // WARNING: This is an experimental interface that is subject to change. + /// The pointer to the delegate. This is non-null only when the node is + /// created by calling `interpreter.ModifyGraphWithDelegate`. + /// + /// WARNING: This is an experimental interface that is subject to change. struct TfLiteDelegate* delegate; - // Whether this op might have side effect (e.g. stateful op). + /// Whether this op might have side effect (e.g. stateful op). bool might_have_side_effect; } TfLiteNode; #else // defined(TF_LITE_STATIC_MEMORY)? @@ -602,90 +669,89 @@ typedef struct TfLiteNode { void* builtin_data; // Custom initial data. This is the opaque data provided in the flatbuffer. + // // WARNING: This is an experimental interface that is subject to change. const void* custom_initial_data; int custom_initial_data_size; } TfLiteNode; #endif // TF_LITE_STATIC_MEMORY -// Light-weight tensor struct for TF Micro runtime. Provides the minimal amount -// of information required for a kernel to run during TfLiteRegistration::Eval. +/// Light-weight tensor struct for TF Micro runtime. Provides the minimal amount +/// of information required for a kernel to run during TfLiteRegistration::Eval. // TODO(b/160955687): Move this field into TF_LITE_STATIC_MEMORY when TFLM // builds with this flag by default internally. typedef struct TfLiteEvalTensor { - // A union of data pointers. The appropriate type should be used for a typed - // tensor based on `type`. + /// A union of data pointers. The appropriate type should be used for a typed + /// tensor based on `type`. TfLitePtrUnion data; - // A pointer to a structure representing the dimensionality interpretation - // that the buffer should have. + /// A pointer to a structure representing the dimensionality interpretation + /// that the buffer should have. TfLiteIntArray* dims; - // The data type specification for data stored in `data`. This affects - // what member of `data` union should be used. + /// The data type specification for data stored in `data`. This affects + /// what member of `data` union should be used. TfLiteType type; } TfLiteEvalTensor; #ifndef TF_LITE_STATIC_MEMORY -// Free data memory of tensor `t`. +/// Free data memory of tensor `t`. void TfLiteTensorDataFree(TfLiteTensor* t); -// Free quantization data. +/// Free quantization data. void TfLiteQuantizationFree(TfLiteQuantization* quantization); -// Free sparsity parameters. +/// Free sparsity parameters. void TfLiteSparsityFree(TfLiteSparsity* sparsity); -// Free memory of tensor `t`. +/// Free memory of tensor `t`. void TfLiteTensorFree(TfLiteTensor* t); -// Set all of a tensor's fields (and free any previously allocated data). +/// Set all of a tensor's fields (and free any previously allocated data). void TfLiteTensorReset(TfLiteType type, const char* name, TfLiteIntArray* dims, TfLiteQuantizationParams quantization, char* buffer, size_t size, TfLiteAllocationType allocation_type, const void* allocation, bool is_variable, TfLiteTensor* tensor); -// Copies the contents of 'src' in 'dst'. -// Function does nothing if either 'src' or 'dst' is passed as nullptr and -// return kTfLiteOk. -// Returns kTfLiteError if 'src' and 'dst' doesn't have matching data size. -// Note function copies contents, so it won't create new data pointer -// or change allocation type. -// All Tensor related properties will be copied from 'src' to 'dst' like -// quantization, sparsity, ... +/// Copies the contents of `src` in `dst`. +/// Function does nothing if either `src` or `dst` is passed as nullptr and +/// return `kTfLiteOk`. +/// Returns `kTfLiteError` if `src` and `dst` doesn't have matching data size. +/// Note function copies contents, so it won't create new data pointer +/// or change allocation type. +/// All Tensor related properties will be copied from `src` to `dst` like +/// quantization, sparsity, ... TfLiteStatus TfLiteTensorCopy(const TfLiteTensor* src, TfLiteTensor* dst); -// Change the size of the memory block owned by `tensor` to `num_bytes`. -// Tensors with allocation types other than `kTfLiteDynamic` will be ignored and -// a kTfLiteOk will be returned. -// `tensor`'s internal data buffer will be assigned a pointer -// which can safely be passed to free or realloc if `num_bytes` is zero. -// If `preserve_data` is true, tensor data will be unchanged in the range from -// the start of the region up to the minimum of the old and new sizes. In the -// case of NULL tensor, or an error allocating new memory, returns -// `kTfLiteError`. +/// Change the size of the memory block owned by `tensor` to `num_bytes`. +/// Tensors with allocation types other than `kTfLiteDynamic` will be ignored +/// and a `kTfLiteOk` will be returned. `tensor`'s internal data buffer will be +/// assigned a pointer which can safely be passed to free or realloc if +/// `num_bytes` is zero. If `preserve_data` is true, tensor data will be +/// unchanged in the range from the start of the region up to the minimum of the +/// old and new sizes. In the case of NULL tensor, or an error allocating new +/// memory, returns `kTfLiteError`. TfLiteStatus TfLiteTensorResizeMaybeCopy(size_t num_bytes, TfLiteTensor* tensor, bool preserve_data); -// Change the size of the memory block owned by `tensor` to `num_bytes`. -// Tensors with allocation types other than kTfLiteDynamic will be ignored and -// a kTfLiteOk will be returned. -// `tensor`'s internal data buffer will be assigned a pointer -// which can safely be passed to free or realloc if `num_bytes` is zero. -// Tensor data will be unchanged in the range from the start of the region up to -// the minimum of the old and new sizes. In the case -// of NULL tensor, or an error allocating new memory, returns `kTfLiteError`. +/// Change the size of the memory block owned by `tensor` to `num_bytes`. +/// Tensors with allocation types other than `kTfLiteDynamic` will be ignored +/// and a `kTfLiteOk` will be returned. `tensor`'s internal data buffer will be +/// assigned a pointer which can safely be passed to free or realloc if +/// `num_bytes` is zero. Tensor data will be unchanged in the range from the +/// start of the region up to the minimum of the old and new sizes. In the case +/// of NULL tensor, or an error allocating new memory, returns `kTfLiteError`. TfLiteStatus TfLiteTensorRealloc(size_t num_bytes, TfLiteTensor* tensor); #endif // TF_LITE_STATIC_MEMORY -// WARNING: This is an experimental interface that is subject to change. -// -// Currently, TfLiteDelegateParams has to be allocated in a way that it's -// trivially destructable. It will be stored as `builtin_data` field in -// `TfLiteNode` of the delegate node. -// -// See also the `CreateDelegateParams` function in `interpreter.cc` details. +/// WARNING: This is an experimental interface that is subject to change. +/// +/// Currently, TfLiteDelegateParams has to be allocated in a way that it's +/// trivially destructable. It will be stored as `builtin_data` field in +/// `TfLiteNode` of the delegate node. +/// +/// See also the `CreateDelegateParams` function in `interpreter.cc` details. typedef struct TfLiteDelegateParams { struct TfLiteDelegate* delegate; TfLiteIntArray* nodes_to_replace; @@ -693,14 +759,14 @@ typedef struct TfLiteDelegateParams { TfLiteIntArray* output_tensors; } TfLiteDelegateParams; -// WARNING: This is an experimental interface that is subject to change. -// -// Currently, TfLiteOpaqueDelegateParams has to be allocated in a way that it's -// trivially destructable. It will be stored as `builtin_data` field in -// `TfLiteNode` of the delegate node. -// -// See also the `CreateOpaqueDelegateParams` function in `subgraph.cc` -// details. +/// WARNING: This is an experimental interface that is subject to change. +/// +/// Currently, TfLiteOpaqueDelegateParams has to be allocated in a way that it's +/// trivially destructable. It will be stored as `builtin_data` field in +/// `TfLiteNode` of the delegate node. +/// +/// See also the `CreateOpaqueDelegateParams` function in `subgraph.cc` +/// details. typedef struct TfLiteOpaqueDelegateParams { TfLiteOpaqueDelegate* delegate; void* delegate_data; @@ -709,371 +775,430 @@ typedef struct TfLiteOpaqueDelegateParams { TfLiteIntArray* output_tensors; } TfLiteOpaqueDelegateParams; +/// `TfLiteContext` allows an op to access the tensors. +/// +/// `TfLiteContext` is a struct that is created by the TF Lite runtime +/// and passed to the "methods" (C function pointers) in the +/// `TfLiteRegistration` struct that are used to define custom ops and custom +/// delegate kernels. It contains information and methods (C function pointers) +/// that can be called by the code implementing a custom op or a custom delegate +/// kernel. These methods provide access to the context in which that custom op +/// or custom delegate kernel occurs, such as access to the input and output +/// tensors for that op, as well as methods for allocating memory buffers +/// and intermediate tensors, etc. +/// +/// See also `TfLiteOpaqueContext`, which is an more ABI-stable equivalent. typedef struct TfLiteContext { - // Number of tensors in the context. + /// Number of tensors in the context. size_t tensors_size; - // The execution plan contains a list of the node indices in execution - // order. execution_plan->size is the current number of nodes. And, - // execution_plan->data[0] is the first node that needs to be run. - // TfLiteDelegates can traverse the current execution plan by iterating - // through each member of this array and using GetNodeAndRegistration() to - // access details about a node. i.e. - // - // TfLiteIntArray* execution_plan; - // TF_LITE_ENSURE_STATUS(context->GetExecutionPlan(context, &execution_plan)); - // for (int exec_index = 0; exec_index < execution_plan->size; exec_index++) { - // int node_index = execution_plan->data[exec_index]; - // TfLiteNode* node; - // TfLiteRegistration* reg; - // context->GetNodeAndRegistration(context, node_index, &node, ®); - // } - // Note: the memory pointed by '`*execution_plan` is OWNED by TfLite runtime. - // Future calls to GetExecutionPlan invalidates earlier outputs. The following - // code snippet shows the issue of such an invocation pattern. After calling - // CheckNode, subsequent access to `plan_1st` is undefined. - // - // void CheckNode(const TfLiteNode* node) { - // ... - // TfLiteIntArray* plan_2nd; - // TF_LITE_ENSURE_STATUS(context->GetExecutionPlan(context, &plan_2nd)); - // ... - // } - // - // TfLiteIntArray* plan_1st; - // TF_LITE_ENSURE_STATUS(context->GetExecutionPlan(context, &plan_1st)); - // for (int exec_index = 0; exec_index < plan_1st->size; exec_index++) { - // int node_index = plan_1st->data[exec_index]; - // TfLiteNode* node; - // TfLiteRegistration* reg; - // context->GetNodeAndRegistration(context, node_index, &node, ®); - // CheckNode(node); - // } - // - // WARNING: This is an experimental interface that is subject to change. + /// The execution plan contains a list of the node indices in execution + /// order. execution_plan->size is the current number of nodes. And, + /// execution_plan->data[0] is the first node that needs to be run. + /// TfLiteDelegates can traverse the current execution plan by iterating + /// through each member of this array and using GetNodeAndRegistration() to + /// access details about a node. i.e. + /// + /// + /// TfLiteIntArray* execution_plan; + /// TF_LITE_ENSURE_STATUS(context->GetExecutionPlan(context, + /// &execution_plan)); + /// for (int exec_index = 0; exec_index < execution_plan->size; + /// exec_index++) { + /// int node_index = execution_plan->data[exec_index]; + /// TfLiteNode* node; + /// TfLiteRegistration* reg; + /// context->GetNodeAndRegistration(context, node_index, &node, ®); + /// } + /// + /// Note: the memory pointed by '`*execution_plan` is OWNED by TfLite runtime. + /// Future calls to GetExecutionPlan invalidates earlier outputs. The + /// following code snippet shows the issue of such an invocation pattern. + /// After calling CheckNode, subsequent access to `plan_1st` is undefined. + /// + /// void CheckNode(const TfLiteNode* node) { + /// ... + /// TfLiteIntArray* plan_2nd; + /// TF_LITE_ENSURE_STATUS( + /// context->GetExecutionPlan(context, &plan_2nd) + /// ); + /// ... + /// } + /// + /// TfLiteIntArray* plan_1st; + /// TF_LITE_ENSURE_STATUS(context->GetExecutionPlan(context, &plan_1st)); + /// for (int exec_index = 0; exec_index < plan_1st->size; exec_index++) { + /// int node_index = plan_1st->data[exec_index]; + /// TfLiteNode* node; + /// TfLiteRegistration* reg; + /// context->GetNodeAndRegistration(context, node_index, &node, ®); + /// CheckNode(node); + /// } + /// + /// WARNING: This is an experimental interface that is subject to change. TfLiteStatus (*GetExecutionPlan)(struct TfLiteContext* context, TfLiteIntArray** execution_plan); - // An array of tensors in the interpreter context (of length `tensors_size`) + /// An array of tensors in the interpreter context (of length `tensors_size`) TfLiteTensor* tensors; - // opaque full context ptr (an opaque c++ data structure) + /// opaque full context ptr (an opaque c++ data structure) void* impl_; - // Request memory pointer be resized. Updates dimensions on the tensor. - // NOTE: ResizeTensor takes ownership of newSize. + /// Request memory pointer be resized. Updates dimensions on the tensor. + /// NOTE: ResizeTensor takes ownership of newSize. TfLiteStatus (*ResizeTensor)(struct TfLiteContext*, TfLiteTensor* tensor, TfLiteIntArray* new_size); - // Request that an error be reported with format string msg. + /// Request that an error be reported with format string msg. void (*ReportError)(struct TfLiteContext*, const char* msg, ...); - // Add `tensors_to_add` tensors, preserving pre-existing Tensor entries. If - // non-null, the value pointed to by `first_new_tensor_index` will be set to - // the index of the first new tensor. + /// Add `tensors_to_add` tensors, preserving pre-existing Tensor entries. If + /// non-null, the value pointed to by `first_new_tensor_index` will be set to + /// the index of the first new tensor. TfLiteStatus (*AddTensors)(struct TfLiteContext*, int tensors_to_add, int* first_new_tensor_index); - // Get a Tensor node by node_index. - // WARNING: This is an experimental interface that is subject to change. + /// Get a Tensor node by node_index. + /// + /// WARNING: This is an experimental interface that is subject to change. TfLiteStatus (*GetNodeAndRegistration)( struct TfLiteContext*, int node_index, TfLiteNode** node, struct TfLiteRegistration** registration); - // Replace ops with one or more stub delegate operations. This function - // does not take ownership of `nodes_to_replace`. + /// Replace ops with one or more stub delegate operations. This function + /// does not take ownership of `nodes_to_replace`. TfLiteStatus (*ReplaceNodeSubsetsWithDelegateKernels)( struct TfLiteContext*, struct TfLiteRegistration registration, const TfLiteIntArray* nodes_to_replace, struct TfLiteDelegate* delegate); - // Number of threads that are recommended to subsystems like gemmlowp and - // eigen. + /// Number of threads that are recommended to subsystems like gemmlowp and + /// eigen. int recommended_num_threads; - // Access external contexts by type. - // WARNING: This is an experimental interface that is subject to change. + /// Access external contexts by type. + /// + /// WARNING: This is an experimental interface that is subject to change. TfLiteExternalContext* (*GetExternalContext)(struct TfLiteContext*, TfLiteExternalContextType); - // Set the value of a external context. Does not take ownership of the - // pointer. - // WARNING: This is an experimental interface that is subject to change. + /// Set the value of a external context. Does not take ownership of the + /// pointer. + /// + /// WARNING: This is an experimental interface that is subject to change. void (*SetExternalContext)(struct TfLiteContext*, TfLiteExternalContextType, TfLiteExternalContext*); - // Flag for allowing float16 precision for FP32 calculation. - // default: false. - // WARNING: This is an experimental API and subject to change. + /// Flag for allowing float16 precision for FP32 calculation. + /// default: false. + /// + /// WARNING: This is an experimental API and subject to change. bool allow_fp32_relax_to_fp16; - // Pointer to the op-level profiler, if set; nullptr otherwise. + /// Pointer to the op-level profiler, if set; nullptr otherwise. void* profiler; - // Allocate persistent buffer which has the same life time as the interpreter. - // Returns nullptr on failure. - // The memory is allocated from heap for TFL, and from tail in TFLM. - // This method is only available in Init or Prepare stage. - // WARNING: This is an experimental interface that is subject to change. + /// Allocate persistent buffer which has the same life time as the + /// interpreter. Returns `nullptr` on failure. The memory is allocated from + /// heap for TFL, and from tail in TFLM. This method is only available in + /// `Init` or `Prepare` stage. + /// + /// WARNING: This is an experimental interface that is subject + /// to change. void* (*AllocatePersistentBuffer)(struct TfLiteContext* ctx, size_t bytes); - // Allocate a buffer which will be deallocated right after invoke phase. - // The memory is allocated from heap in TFL, and from volatile arena in TFLM. - // This method is only available in invoke stage. - // NOTE: If possible use RequestScratchBufferInArena method to avoid memory - // allocation during inference time. - // WARNING: This is an experimental interface that is subject to change. + /// Allocate a buffer which will be deallocated right after invoke phase. + /// The memory is allocated from heap in TFL, and from volatile arena in TFLM. + /// This method is only available in invoke stage. + /// + /// NOTE: If possible use `RequestScratchBufferInArena` method to avoid memory + /// allocation during inference time. + /// + /// WARNING: This is an experimental interface that is subject to change. TfLiteStatus (*AllocateBufferForEval)(struct TfLiteContext* ctx, size_t bytes, void** ptr); - // Request a scratch buffer in the arena through static memory planning. - // This method is only available in Prepare stage and the buffer is allocated - // by the interpreter between Prepare and Eval stage. In Eval stage, - // GetScratchBuffer API can be used to fetch the address. - // WARNING: This is an experimental interface that is subject to change. + /// Request a scratch buffer in the arena through static memory planning. + /// This method is only available in `Prepare` stage and the buffer is + /// allocated by the interpreter between Prepare and Eval stage. In `Eval` + /// stage, `GetScratchBuffer` API can be used to fetch the address. + /// + /// WARNING: This is an experimental interface that is subject to change. TfLiteStatus (*RequestScratchBufferInArena)(struct TfLiteContext* ctx, size_t bytes, int* buffer_idx); - // Get the scratch buffer pointer. - // This method is only available in Eval stage. - // WARNING: This is an experimental interface that is subject to change. + /// Get the scratch buffer pointer. + /// This method is only available in Eval stage. + /// + /// WARNING: This is an experimental interface that is subject to change. void* (*GetScratchBuffer)(struct TfLiteContext* ctx, int buffer_idx); - // Resize the memory pointer of the `tensor`. This method behaves the same as - // `ResizeTensor`, except that it makes a copy of the shape array internally - // so the shape array could be deallocated right afterwards. - // WARNING: This is an experimental interface that is subject to change. + /// Resize the memory pointer of the `tensor`. This method behaves the same as + /// `ResizeTensor`, except that it makes a copy of the shape array internally + /// so the shape array could be deallocated right afterwards. + /// + /// WARNING: This is an experimental interface that is subject to change. TfLiteStatus (*ResizeTensorExplicit)(struct TfLiteContext* ctx, TfLiteTensor* tensor, int dims, const int* shape); - // This method provides a preview of post-delegation partitioning. Each - // TfLiteDelegateParams in the referenced array corresponds to one instance of - // the delegate kernel. - // Example usage: - // - // TfLiteIntArray* nodes_to_replace = ...; - // TfLiteDelegateParams* params_array; - // int num_partitions = 0; - // TF_LITE_ENSURE_STATUS(context->PreviewDelegatePartitioning( - // context, delegate, nodes_to_replace, ¶ms_array, &num_partitions)); - // for (int idx = 0; idx < num_partitions; idx++) { - // const auto& partition_params = params_array[idx]; - // ... - // } - // - // NOTE: The context owns the memory referenced by partition_params_array. It - // will be cleared with another call to PreviewDelegatePartitioning, or after - // TfLiteDelegateParams::Prepare returns. - // - // WARNING: This is an experimental interface that is subject to change. + /// This method provides a preview of post-delegation partitioning. Each + /// TfLiteDelegateParams in the referenced array corresponds to one instance + /// of the delegate kernel. Example usage: + /// + /// TfLiteIntArray* nodes_to_replace = ...; + /// TfLiteDelegateParams* params_array; + /// int num_partitions = 0; + /// TF_LITE_ENSURE_STATUS(context->PreviewDelegatePartitioning( + /// context, delegate, nodes_to_replace, ¶ms_array, + /// &num_partitions)); + /// for (int idx = 0; idx < num_partitions; idx++) { + /// const auto& partition_params = params_array[idx]; + /// ... + /// } + /// + /// NOTE: The context owns the memory referenced by partition_params_array. It + /// will be cleared with another call to PreviewDelegatePartitioning, or after + /// TfLiteDelegateParams::Prepare returns. + /// + /// WARNING: This is an experimental interface that is subject to change. TfLiteStatus (*PreviewDelegatePartitioning)( struct TfLiteContext* context, const TfLiteIntArray* nodes_to_replace, TfLiteDelegateParams** partition_params_array, int* num_partitions); - // Returns a TfLiteTensor struct for a given index. - // WARNING: This is an experimental interface that is subject to change. - // WARNING: This method may not be available on all platforms. + /// Returns a TfLiteTensor struct for a given index. + /// + /// WARNING: This is an experimental interface that is subject to change. + /// + /// WARNING: This method may not be available on all platforms. TfLiteTensor* (*GetTensor)(const struct TfLiteContext* context, int tensor_idx); - // Returns a TfLiteEvalTensor struct for a given index. - // WARNING: This is an experimental interface that is subject to change. - // WARNING: This method may not be available on all platforms. + /// Returns a TfLiteEvalTensor struct for a given index. + /// + /// WARNING: This is an experimental interface that is subject to change. + /// + /// WARNING: This method may not be available on all platforms. TfLiteEvalTensor* (*GetEvalTensor)(const struct TfLiteContext* context, int tensor_idx); - // Retrieves named metadata buffer from the TFLite model. - // Returns kTfLiteOk if metadata is successfully obtained from the flatbuffer - // Model: that is, there exists a `metadata` entry with given `name` string. - // (see TFLite's schema.fbs). - // The corresponding `buffer` information is populated in `ptr` & `bytes`. - // The data from `ptr` is valid for the lifetime of the Interpreter. - // - // WARNING: This is an experimental interface that is subject to change. + /// Retrieves named metadata buffer from the TFLite model. + /// Returns kTfLiteOk if metadata is successfully obtained from the flatbuffer + /// Model: that is, there exists a `metadata` entry with given `name` string. + /// (see TFLite's schema.fbs). + /// The corresponding `buffer` information is populated in `ptr` & `bytes`. + /// The data from `ptr` is valid for the lifetime of the Interpreter. + /// + /// WARNING: This is an experimental interface that is subject to change. TfLiteStatus (*GetModelMetadata)(const struct TfLiteContext* context, const char* name, const char** ptr, size_t* bytes); - // Retrieves the corresponding TfLiteContext of a subgraph that the given - // subgraph_index points to and switches to the delegate context for that - // subgraph. If an invalid subgraph index is given, returns kTfLiteError. - // NOTE: This function is expected to be paired with ReleaseSubgraphContext() - // once the delegate preparation is done and/or the delegate context functions - // are no longer needed. - // - // WARNING: This is an experimental interface that is subject to change. + /// Retrieves the corresponding TfLiteContext of a subgraph that the given + /// subgraph_index points to and switches to the delegate context for that + /// subgraph. If an invalid subgraph index is given, returns kTfLiteError. + /// + /// NOTE: This function is expected to be paired with ReleaseSubgraphContext() + /// once the delegate preparation is done and/or the delegate context + /// functions are no longer needed. + /// + /// WARNING: This is an experimental interface that is subject to change. TfLiteStatus (*AcquireSubgraphContext)( struct TfLiteContext* context, int subgraph_index, struct TfLiteContext** acquired_context); - // Releases the subgraph context by switching back to the TFLite kernel - // context for the subgraph that the given subgraph_index points to. - // NOTE: This function is expected to be used after AcquireSubgraphContext() - // once the delegate preparation is done and/or the delegate context functions - // are no longer needed. - // - // WARNING: This is an experimental interface that is subject to change. + /// Releases the subgraph context by switching back to the TFLite kernel + /// context for the subgraph that the given subgraph_index points to. + /// + /// NOTE: This function is expected to be used after AcquireSubgraphContext() + /// once the delegate preparation is done and/or the delegate context + /// functions are no longer needed. + /// + /// WARNING: This is an experimental interface that is subject to change. TfLiteStatus (*ReleaseSubgraphContext)(struct TfLiteContext* context, int subgraph_index); } TfLiteContext; -// `TfLiteRegistrationExternal` is an external version of `TfLiteRegistration` -// for C API which doesn't use internal types (such as `TfLiteContext`) but only -// uses stable API types (such as `TfLiteOpaqueContext`). The purpose of each -// field is the exactly the same as with `TfLiteRegistration`. -typedef struct TfLiteRegistrationExternal TfLiteRegistrationExternal; - -// The valid values of the `inplace_operator` field in `TfLiteRegistration`. -// This allow an op to signal to the runtime that the same data pointer -// may be passed as an input and output without impacting the result. -// This does not mean that the memory can safely be reused, it is up to the -// runtime to determine this, e.g. if another op consumes the same input or not -// or if an input tensor has sufficient memory allocated to store the output -// data. -// -// Setting these flags authorizes the runtime to set the data pointers of an -// input and output tensor to the same value. In such cases, the memory required -// by the output must be less than or equal to that required by the shared -// input, never greater. If kTfLiteInplaceOpDataUnmodified is set, then the -// runtime can share the same input tensor with multiple operator's outputs, -// provided that kTfLiteInplaceOpDataUnmodified is set for all of them. -// Otherwise, if an input tensor is consumed by multiple operators, it may only -// be shared with the operator which is the last to consume it. -// -// Note that this is a bitmask, so the values should be 1, 2, 4, 8, ...etc. +/// `TfLiteOperator` is an external version of `TfLiteRegistration` +/// for C API which doesn't use internal types (such as `TfLiteContext`) but +/// only uses stable API types (such as `TfLiteOpaqueContext`). The purpose of +/// each field is the exactly the same as with `TfLiteRegistration`. +typedef struct TfLiteOperator TfLiteOperator; + +#ifndef DOXYGEN_SKIP +// For backwards compatibility. +// Deprecated. Use TfLiteOperator instead. +typedef TfLiteOperator TfLiteRegistrationExternal; +#endif + +/// The valid values of the `inplace_operator` field in `TfLiteRegistration`. +/// This allow an op to signal to the runtime that the same data pointer +/// may be passed as an input and output without impacting the result. +/// This does not mean that the memory can safely be reused, it is up to the +/// runtime to determine this, e.g. if another op consumes the same input or not +/// or if an input tensor has sufficient memory allocated to store the output +/// data. +/// +/// Setting these flags authorizes the runtime to set the data pointers of an +/// input and output tensor to the same value. In such cases, the memory +/// required by the output must be less than or equal to that required by the +/// shared input, never greater. If kTfLiteInplaceOpDataUnmodified is set, then +/// the runtime can share the same input tensor with multiple operator's +/// outputs, provided that kTfLiteInplaceOpDataUnmodified is set for all of +/// them. Otherwise, if an input tensor is consumed by multiple operators, it +/// may only be shared with the operator which is the last to consume it. +/// +/// Note that this is a bitmask, so the values should be 1, 2, 4, 8, ...etc. typedef enum { - // The default value. This indicates that the same data pointer cannot safely - // be passed as an op's input and output. + /// The default value. This indicates that the same data pointer cannot safely + /// be passed as an op's input and output. kTfLiteInplaceOpNone = 0, - // This indicates that an op's first output's data is identical to its first - // input's data, for example Reshape. + /// This indicates that an op's first output's data is identical to its first + /// input's data, for example Reshape. kTfLiteInplaceOpDataUnmodified = 1, - // Setting kTfLiteInplaceInputCanBeSharedWithCorrespondingOutput means - // that InputN may be shared with OutputN instead of with the first output. - // This flag requires one or more of kTfLiteInplaceOpInputNShared to be set. + /// Setting kTfLiteInplaceInputCanBeSharedWithCorrespondingOutput means + /// that InputN may be shared with OutputN instead of with the first output. + /// This flag requires one or more of kTfLiteInplaceOpInputNShared to be set. kTfLiteInplaceInputCanBeSharedWithCorrespondingOutput = 2, - // kTfLiteInplaceOpInputNShared indicates that it is safe for an op to share - // InputN's data pointer with an output tensor. If - // kTfLiteInplaceInputCanBeSharedWithCorrespondingOutput is set then - // kTfLiteInplaceOpInputNShared indicates that InputN may be shared - // with OutputN, otherwise kTfLiteInplaceOpInputNShared indicates that InputN - // may be shared with the first output. - // - // Indicates that an op's first input may be shared with the first output - // tensor. kTfLiteInplaceInputCanBeSharedWithCorrespondingOutput has - // no impact on the behavior allowed by this flag. + /// kTfLiteInplaceOpInputNShared indicates that it is safe for an op to share + /// InputN's data pointer with an output tensor. If + /// kTfLiteInplaceInputCanBeSharedWithCorrespondingOutput is set then + /// kTfLiteInplaceOpInputNShared indicates that InputN may be shared + /// with OutputN, otherwise kTfLiteInplaceOpInputNShared indicates that InputN + /// may be shared with the first output. + /// + /// Indicates that an op's first input may be shared with the first output + /// tensor. kTfLiteInplaceInputCanBeSharedWithCorrespondingOutput has + /// no impact on the behavior allowed by this flag. kTfLiteInplaceOpInput0Shared = 4, - // Indicates that an op's second input may be shared with the first output - // if kTfLiteInplaceInputCanBeSharedWithCorrespondingOutput is not set - // or second output if kTfLiteInplaceInputCanBeSharedWithCorrespondingOutput - // is set. + /// Indicates that an op's second input may be shared with the first output + /// if kTfLiteInplaceInputCanBeSharedWithCorrespondingOutput is not set + /// or second output if kTfLiteInplaceInputCanBeSharedWithCorrespondingOutput + /// is set. kTfLiteInplaceOpInput1Shared = 8, - // Indicates that an op's third input may be shared with the first output - // if kTfLiteInplaceInputCanBeSharedWithCorrespondingOutput is not set - // or third output if kTfLiteInplaceInputCanBeSharedWithCorrespondingOutput is - // set. + /// Indicates that an op's third input may be shared with the first output + /// if kTfLiteInplaceInputCanBeSharedWithCorrespondingOutput is not set + /// or third output if kTfLiteInplaceInputCanBeSharedWithCorrespondingOutput + /// is + /// set. kTfLiteInplaceOpInput2Shared = 16, - // Placeholder to ensure that enum can hold 64 bit values to accommodate - // future fields. + /// Placeholder to ensure that enum can hold 64 bit values to accommodate + /// future fields. kTfLiteInplaceOpMaxValue = UINT64_MAX, } TfLiteInPlaceOp; -// The number of shareable inputs supported. +/// The number of shareable inputs supported. static const int kTfLiteMaxSharableOpInputs = 3; +/// `TfLiteRegistration` defines the implementation of an operation +/// (a built-in op, custom op, or custom delegate kernel). +/// +/// It is a struct containing "methods" (C function pointers) that will be +/// invoked by the TF Lite runtime to evaluate instances of the operation. +/// +/// See also `TfLiteOperator` which is a more ABI-stable equivalent. typedef struct TfLiteRegistration { - // Initializes the op from serialized data. - // Called only *once* for the lifetime of the op, so any one-time allocations - // should be made here (unless they depend on tensor sizes). - // - // If a built-in op: - // `buffer` is the op's params data (TfLiteLSTMParams*). - // `length` is zero. - // If custom op: - // `buffer` is the op's `custom_options`. - // `length` is the size of the buffer. - // - // Returns a type-punned (i.e. void*) opaque data (e.g. a primitive pointer - // or an instance of a struct). - // - // The returned pointer will be stored with the node in the `user_data` field, - // accessible within prepare and invoke functions below. - // NOTE: if the data is already in the desired format, simply implement this - // function to return `nullptr` and implement the free function to be a no-op. + /// Initializes the op from serialized data. + /// Called only *once* for the lifetime of the op, so any one-time allocations + /// should be made here (unless they depend on tensor sizes). + /// + /// * If a built-in op: + /// * `buffer` is the op's params data (TfLiteLSTMParams*). + /// * `length` is zero. + /// * If custom op: + /// * `buffer` is the op's `custom_options`. + /// * `length` is the size of the buffer. + /// + /// Returns a type-punned (i.e. void*) opaque data (e.g. a primitive pointer + /// or an instance of a struct). + /// + /// The returned pointer will be stored with the node in the `user_data` + /// field, accessible within prepare and invoke functions below. + /// + /// NOTE: if the data is already in the desired format, simply implement this + /// function to return `nullptr` and implement the free function to be a + /// no-op. void* (*init)(TfLiteContext* context, const char* buffer, size_t length); - // The pointer `buffer` is the data previously returned by an init invocation. + /// The pointer `buffer` is the data previously returned by an init + /// invocation. void (*free)(TfLiteContext* context, void* buffer); - // prepare is called when the inputs this node depends on have been resized. - // context->ResizeTensor() can be called to request output tensors to be - // resized. - // Can be called multiple times for the lifetime of the op. - // - // Returns kTfLiteOk on success. + /// prepare is called when the inputs this node depends on have been resized. + /// `context->ResizeTensor()` can be called to request output tensors to be + /// resized. + /// Can be called multiple times for the lifetime of the op. + /// + /// Returns `kTfLiteOk` on success. TfLiteStatus (*prepare)(TfLiteContext* context, TfLiteNode* node); - // Execute the node (should read node->inputs and output to node->outputs). - // Returns kTfLiteOk on success. + /// Execute the node (should read `node->inputs` and output to + /// `node->outputs`). + /// + /// Returns `kTfLiteOk` on success. TfLiteStatus (*invoke)(TfLiteContext* context, TfLiteNode* node); - // profiling_string is called during summarization of profiling information - // in order to group executions together. Providing a value here will cause a - // given op to appear multiple times is the profiling report. This is - // particularly useful for custom ops that can perform significantly - // different calculations depending on their `user-data`. + /// `profiling_string` is called during summarization of profiling information + /// in order to group executions together. Providing a value here will cause a + /// given op to appear multiple times is the profiling report. This is + /// particularly useful for custom ops that can perform significantly + /// different calculations depending on their `user-data`. const char* (*profiling_string)(const TfLiteContext* context, const TfLiteNode* node); - // Builtin codes. If this kernel refers to a builtin this is the code - // of the builtin. This is so we can do marshaling to other frameworks like - // NN API. - // Note: It is the responsibility of the registration binder to set this - // properly. + /// Builtin codes. If this kernel refers to a builtin this is the code + /// of the builtin. This is so we can do marshaling to other frameworks like + /// NN API. + /// + /// Note: It is the responsibility of the registration binder to set this + /// properly. int32_t builtin_code; - // Custom op name. If the op is a builtin, this will be null. - // Note: It is the responsibility of the registration binder to set this - // properly. - // WARNING: This is an experimental interface that is subject to change. + /// Custom op name. If the op is a builtin, this will be `null`. + /// + /// Note: It is the responsibility of the registration binder to set this + /// properly. + /// + /// WARNING: This is an experimental interface that is subject to change. const char* custom_name; - // The version of the op. - // Note: It is the responsibility of the registration binder to set this - // properly. + /// The version of the op. + /// Note: It is the responsibility of the registration binder to set this + /// properly. int version; - // The external version of `TfLiteRegistration`. Since we can't use internal - // types (such as `TfLiteContext`) for C API to maintain ABI stability. - // C API user will provide `TfLiteRegistrationExternal` to implement custom - // ops. We keep it inside of `TfLiteRegistration` and use it to route - // callbacks properly. - TfLiteRegistrationExternal* registration_external; - - // Retrieves asynchronous kernel. - // - // If the `async_kernel` field is nullptr, it means the operation described by - // this TfLiteRegistration object does not support asynchronous execution. - // Otherwise, the function that the field points to should only be called for - // delegate kernel nodes, i.e. `node` should be a delegate kernel node created - // by applying a delegate. - // If the function returns nullptr, that means that the underlying delegate - // does not support asynchronous execution for this `node`. + /// The external (i.e. ABI-stable) version of `TfLiteRegistration`. + /// Since we can't use internal types (such as `TfLiteContext`) for C API to + /// maintain ABI stability. C API user will provide `TfLiteOperator` to + /// implement custom ops. We keep it inside of `TfLiteRegistration` and use + /// it to route callbacks properly. + TfLiteOperator* registration_external; + + /// Retrieves asynchronous kernel. + /// + /// If the `async_kernel` field is nullptr, it means the operation described + /// by this TfLiteRegistration object does not support asynchronous execution. + /// Otherwise, the function that the field points to should only be called for + /// delegate kernel nodes, i.e. `node` should be a delegate kernel node + /// created by applying a delegate. If the function returns nullptr, that + /// means that the underlying delegate does not support asynchronous execution + /// for this `node`. struct TfLiteAsyncKernel* (*async_kernel)(TfLiteContext* context, TfLiteNode* node); - // Indicates if an operator's output may safely overwrite its inputs. - // See the comments in `TfLiteInPlaceOp`. + /// Indicates if an operator's output may safely overwrite its inputs. + /// See the comments in `TfLiteInPlaceOp`. uint64_t inplace_operator; } TfLiteRegistration; /// \private -// Old version of `TfLiteRegistration` to maintain binary backward -// compatibility. -// The legacy registration type must be a POD struct type whose field types must -// be a prefix of the field types in TfLiteRegistration, and offset of the first -// field in TfLiteRegistration that is not present in the legacy registration -// type must be greater than or equal to the size of the legacy registration -// type. -// WARNING: This structure is deprecated / not an official part of the -// API. It should be only used for binary backward compatibility. +/// Old version of `TfLiteRegistration` to maintain binary backward +/// compatibility. +/// The legacy registration type must be a POD struct type whose field types +/// must be a prefix of the field types in TfLiteRegistration, and offset of the +/// first field in TfLiteRegistration that is not present in the legacy +/// registration type must be greater than or equal to the size of the legacy +/// registration type. +/// +/// WARNING: This structure is deprecated / not an official part of the +/// API. It should be only used for binary backward compatibility. typedef struct TfLiteRegistration_V3 { void* (*init)(TfLiteContext* context, const char* buffer, size_t length); void (*free)(TfLiteContext* context, void* buffer); @@ -1084,21 +1209,22 @@ typedef struct TfLiteRegistration_V3 { int32_t builtin_code; const char* custom_name; int version; - TfLiteRegistrationExternal* registration_external; + TfLiteOperator* registration_external; struct TfLiteAsyncKernel* (*async_kernel)(TfLiteContext* context, TfLiteNode* node); } TfLiteRegistration_V3; /// \private -// Old version of `TfLiteRegistration` to maintain binary backward -// compatibility. -// The legacy registration type must be a POD struct type whose field types must -// be a prefix of the field types in TfLiteRegistration, and offset of the first -// field in TfLiteRegistration that is not present in the legacy registration -// type must be greater than or equal to the size of the legacy registration -// type. -// WARNING: This structure is deprecated / not an official part of the -// API. It should be only used for binary backward compatibility. +/// Old version of `TfLiteRegistration` to maintain binary backward +/// compatibility. +/// The legacy registration type must be a POD struct type whose field types +/// must be a prefix of the field types in TfLiteRegistration, and offset of the +/// first field in TfLiteRegistration that is not present in the legacy +/// registration type must be greater than or equal to the size of the legacy +/// registration type. +/// +/// WARNING: This structure is deprecated / not an official part of the +/// API. It should be only used for binary backward compatibility. typedef struct TfLiteRegistration_V2 { void* (*init)(TfLiteContext* context, const char* buffer, size_t length); void (*free)(TfLiteContext* context, void* buffer); @@ -1109,19 +1235,20 @@ typedef struct TfLiteRegistration_V2 { int32_t builtin_code; const char* custom_name; int version; - TfLiteRegistrationExternal* registration_external; + TfLiteOperator* registration_external; } TfLiteRegistration_V2; /// \private -// Old version of `TfLiteRegistration` to maintain binary backward -// compatibility. -// The legacy registration type must be a POD struct type whose field types must -// be a prefix of the field types in TfLiteRegistration, and offset of the first -// field in TfLiteRegistration that is not present in the legacy registration -// type must be greater than or equal to the size of the legacy registration -// type. -// WARNING: This structure is deprecated / not an official part of the -// API. It should be only used for binary backward compatibility. +/// Old version of `TfLiteRegistration` to maintain binary backward +/// compatibility. +/// The legacy registration type must be a POD struct type whose field types +/// must be a prefix of the field types in TfLiteRegistration, and offset of the +/// first field in TfLiteRegistration that is not present in the legacy +/// registration type must be greater than or equal to the size of the legacy +/// registration type. +/// +/// WARNING: This structure is deprecated / not an official part of the +/// API. It should be only used for binary backward compatibility. typedef struct TfLiteRegistration_V1 { void* (*init)(TfLiteContext* context, const char* buffer, size_t length); void (*free)(TfLiteContext* context, void* buffer); @@ -1134,184 +1261,202 @@ typedef struct TfLiteRegistration_V1 { int version; } TfLiteRegistration_V1; -// The flags used in `TfLiteDelegate`. Note that this is a bitmask, so the -// values should be 1, 2, 4, 8, ...etc. +/// The flags used in `TfLiteDelegate`. Note that this is a bitmask, so the +/// values should be 1, 2, 4, 8, ...etc. typedef enum TfLiteDelegateFlags { kTfLiteDelegateFlagsNone = 0, - // The flag is set if the delegate can handle dynamic sized tensors. - // For example, the output shape of a `Resize` op with non-constant shape - // can only be inferred when the op is invoked. - // In this case, the Delegate is responsible for calling - // `SetTensorToDynamic` to mark the tensor as a dynamic tensor, and calling - // `ResizeTensor` when invoking the op. - // - // If the delegate isn't capable to handle dynamic tensors, this flag need - // to be set to false. + /// The flag is set if the delegate can handle dynamic sized tensors. + /// For example, the output shape of a `Resize` op with non-constant shape + /// can only be inferred when the op is invoked. + /// In this case, the Delegate is responsible for calling + /// `SetTensorToDynamic` to mark the tensor as a dynamic tensor, and calling + /// `ResizeTensor` when invoking the op. + /// + /// If the delegate isn't capable to handle dynamic tensors, this flag need + /// to be set to false. kTfLiteDelegateFlagsAllowDynamicTensors = 1, - // This flag can be used by delegates (that allow dynamic tensors) to ensure - // applicable tensor shapes are automatically propagated in the case of tensor - // resizing. - // This means that non-dynamic (allocation_type != kTfLiteDynamic) I/O tensors - // of a delegate kernel will have correct shapes before its Prepare() method - // is called. The runtime leverages TFLite builtin ops in the original - // execution plan to propagate shapes. - // - // A few points to note: - // 1. This requires kTfLiteDelegateFlagsAllowDynamicTensors. If that flag is - // false, this one is redundant since the delegate kernels are re-initialized - // every time tensors are resized. - // 2. Enabling this flag adds some overhead to AllocateTensors(), since extra - // work is required to prepare the original execution plan. - // 3. This flag requires that the original execution plan only have ops with - // valid registrations (and not 'dummy' custom ops like with Flex). - // WARNING: This feature is experimental and subject to change. + /// This flag can be used by delegates (that allow dynamic tensors) to ensure + /// applicable tensor shapes are automatically propagated in the case of + /// tensor resizing. This means that non-dynamic (allocation_type != + /// kTfLiteDynamic) I/O tensors of a delegate kernel will have correct shapes + /// before its Prepare() method is called. The runtime leverages TFLite + /// builtin ops in the original execution plan to propagate shapes. + /// + /// A few points to note: + /// 1. This requires kTfLiteDelegateFlagsAllowDynamicTensors. If that flag is + /// false, this one is redundant since the delegate kernels are re-initialized + /// every time tensors are resized. + /// 2. Enabling this flag adds some overhead to AllocateTensors(), since extra + /// work is required to prepare the original execution plan. + /// 3. This flag requires that the original execution plan only have ops with + /// valid registrations (and not 'dummy' custom ops like with Flex). + /// + /// WARNING: This feature is experimental and subject to change. kTfLiteDelegateFlagsRequirePropagatedShapes = 2, - // This flag can be used by delegates to request per-operator profiling. If a - // node is a delegate node, this flag will be checked before profiling. If - // set, then the node will not be profiled. The delegate will then add per - // operator information using Profiler::EventType::OPERATOR_INVOKE_EVENT and - // the results will appear in the operator-wise Profiling section and not in - // the Delegate internal section. + /// This flag can be used by delegates to request per-operator profiling. If a + /// node is a delegate node, this flag will be checked before profiling. If + /// set, then the node will not be profiled. The delegate will then add per + /// operator information using `Profiler::EventType::OPERATOR_INVOKE_EVENT` + /// and the results will appear in the operator-wise Profiling section and not + /// in the Delegate internal section. kTfLiteDelegateFlagsPerOperatorProfiling = 4 } TfLiteDelegateFlags; -// WARNING: This is an experimental interface that is subject to change. +/// WARNING: This is an experimental interface that is subject to change. typedef struct TfLiteDelegate { - // Data that delegate needs to identify itself. This data is owned by the - // delegate. The delegate is owned in the user code, so the delegate is - // responsible for deallocating this when it is destroyed. + /// Data that delegate needs to identify itself. This data is owned by the + /// delegate. The delegate is owned in the user code, so the delegate is + /// responsible for deallocating this when it is destroyed. void* data_; - // Invoked by ModifyGraphWithDelegate. This prepare is called, giving the - // delegate a view of the current graph through TfLiteContext*. It typically - // will look at the nodes and call ReplaceNodeSubsetsWithDelegateKernels() - // to ask the TensorFlow lite runtime to create macro-nodes to represent - // delegated subgraphs of the original graph. + /// Invoked by `ModifyGraphWithDelegate`. This prepare is called, giving the + /// delegate a view of the current graph through `TfLiteContext*`. It + /// typically will look at the nodes and call + /// `ReplaceNodeSubsetsWithDelegateKernels()` to ask the TensorFlow lite + /// runtime to create macro-nodes to represent delegated subgraphs of the + /// original graph. TfLiteStatus (*Prepare)(TfLiteContext* context, struct TfLiteDelegate* delegate); - // Copy the data from delegate buffer handle into raw memory of the given - // 'tensor'. Note that the delegate is allowed to allocate the raw bytes as - // long as it follows the rules for kTfLiteDynamic tensors, in which case this - // cannot be null. + /// Copy the data from delegate buffer handle into raw memory of the given + /// `tensor`. Note that the delegate is allowed to allocate the raw bytes as + /// long as it follows the rules for `kTfLiteDynamic` tensors, in which case + /// this cannot be null. TfLiteStatus (*CopyFromBufferHandle)(TfLiteContext* context, struct TfLiteDelegate* delegate, TfLiteBufferHandle buffer_handle, TfLiteTensor* tensor); - // Copy the data from raw memory of the given 'tensor' to delegate buffer - // handle. This can be null if the delegate doesn't use its own buffer. + /// Copy the data from raw memory of the given `tensor` to delegate buffer + /// handle. This can be null if the delegate doesn't use its own buffer. TfLiteStatus (*CopyToBufferHandle)(TfLiteContext* context, struct TfLiteDelegate* delegate, TfLiteBufferHandle buffer_handle, TfLiteTensor* tensor); - // Free the Delegate Buffer Handle. Note: This only frees the handle, but - // this doesn't release the underlying resource (e.g. textures). The - // resources are either owned by application layer or the delegate. - // This can be null if the delegate doesn't use its own buffer. + /// Free the Delegate Buffer Handle. Note: This only frees the handle, but + /// this doesn't release the underlying resource (e.g. textures). The + /// resources are either owned by application layer or the delegate. + /// This can be null if the delegate doesn't use its own buffer. void (*FreeBufferHandle)(TfLiteContext* context, struct TfLiteDelegate* delegate, TfLiteBufferHandle* handle); - // Bitmask flags. See the comments in `TfLiteDelegateFlags`. + /// Bitmask flags. See the comments in `TfLiteDelegateFlags`. int64_t flags; - // The opaque delegate builder associated with this object. If set then the - // TF Lite runtime will give precedence to this field. E.g. instead of - // invoking 'Prepare' via the function pointer inside the 'TfLiteDelegate' - // object, the runtime will first check if the corresponding function - // pointer inside 'opaque_delegate_builder' is set and if so invoke that. - // - // If this field is non-null, then the 'Prepare' field (of the - // 'TfLiteDelegate') should be null. + /// The opaque delegate builder associated with this object. If set then the + /// TF Lite runtime will give precedence to this field. E.g. instead of + /// invoking `Prepare` via the function pointer inside the `TfLiteDelegate` + /// object, the runtime will first check if the corresponding function + /// pointer inside `opaque_delegate_builder` is set and if so invoke that. + /// + /// If this field is non-null, then the `Prepare` field (of the + /// `TfLiteDelegate`) should be null. struct TfLiteOpaqueDelegateBuilder* opaque_delegate_builder; } TfLiteDelegate; -// Build a 'null' delegate, with all the fields properly set to their default -// values. +/// Build a `null` delegate, with all the fields properly set to their default +/// values. TfLiteDelegate TfLiteDelegateCreate(void); -// `TfLiteOpaqueDelegateBuilder` is used for constructing -// `TfLiteOpaqueDelegate`, see `TfLiteOpaqueDelegateCreate` below. Note: -// This struct is not ABI stable. -// -// For forward source compatibility `TfLiteOpaqueDelegateBuilder` objects should -// be brace-initialized, so that all fields (including any that might be added -// in the future) get zero-initialized. The purpose of each field is exactly -// the same as with `TfLiteDelegate`. -// -// WARNING: This is an experimental interface that is subject to change. +/// `TfLiteOpaqueDelegateBuilder` is used for constructing +/// `TfLiteOpaqueDelegate`, see `TfLiteOpaqueDelegateCreate` in c_api_opaque.h. +/// NOTE: This struct is not ABI stable. +/// +/// For forward source compatibility `TfLiteOpaqueDelegateBuilder` objects +/// should be brace-initialized, so that all fields (including any that might be +/// added in the future) get zero-initialized. The purpose of each field is +/// exactly the same as with `TfLiteDelegate`. +/// +/// NOTE: This type is part of the TensorFlow Lite Extension APIs. +/// We reserve the right to make changes to this API in future releases, +/// potentially including non-backwards-compatible changes, on a different +/// schedule than for the other TensorFlow Lite APIs. See +/// https://www.tensorflow.org/guide/versions#separate_version_number_for_tensorflow_lite_extension_apis. typedef struct TfLiteOpaqueDelegateBuilder { - // Data that delegate needs to identify itself. This data is owned by the - // delegate. The delegate is owned in the user code, so the delegate is - // responsible for deallocating this when it is destroyed. + /// Data that delegate needs to identify itself. This data is owned by the + /// delegate. The delegate is owned in the user code, so the delegate is + /// responsible for deallocating this when it is destroyed. void* data; - // Invoked by ModifyGraphWithDelegate. This prepare is called, giving the - // delegate a view of the current graph through TfLiteContext*. It typically - // will look at the nodes and call ReplaceNodeSubsetsWithDelegateKernels() - // to ask the TensorFlow lite runtime to create macro-nodes to represent - // delegated subgraphs of the original graph. + /// Invoked by ModifyGraphWithDelegate. This prepare is called, giving the + /// delegate a view of the current graph through `TfLiteContext*`. It + /// typically will look at the nodes and call + /// `ReplaceNodeSubsetsWithDelegateKernels()` to ask the TensorFlow lite + /// runtime to create macro-nodes to represent delegated subgraphs of the + /// original graph. TfLiteStatus (*Prepare)(TfLiteOpaqueContext* context, // NOLINT TfLiteOpaqueDelegate* delegate, void* data); - // Copies the data from delegate buffer handle into raw memory of the given - // 'tensor'. Note that the delegate is allowed to allocate the raw bytes as - // long as it follows the rules for kTfLiteDynamic tensors, in which case this - // cannot be null. + /// Copies the data from delegate buffer handle into raw memory of the given + /// `tensor`. Note that the delegate is allowed to allocate the raw bytes as + /// long as it follows the rules for kTfLiteDynamic tensors, in which case + /// this cannot be null. TfLiteStatus (*CopyFromBufferHandle)( // NOLINT TfLiteOpaqueContext* context, TfLiteOpaqueDelegate* delegate, void* data, TfLiteBufferHandle buffer_handle, TfLiteOpaqueTensor* tensor); - // Copies the data from raw memory of the given 'tensor' to delegate buffer - // handle. This can be null if the delegate doesn't use its own buffer. + /// Copies the data from raw memory of the given `tensor` to delegate buffer + /// handle. This can be null if the delegate doesn't use its own buffer. TfLiteStatus (*CopyToBufferHandle)( // NOLINT TfLiteOpaqueContext* context, TfLiteOpaqueDelegate* delegate, void* data, TfLiteBufferHandle buffer_handle, TfLiteOpaqueTensor* tensor); - // Frees the Delegate Buffer Handle. Note: This only frees the handle, but - // this doesn't release the underlying resource (e.g. textures). The - // resources are either owned by application layer or the delegate. - // This can be null if the delegate doesn't use its own buffer. + /// Frees the Delegate Buffer Handle. Note: This only frees the handle, but + /// this doesn't release the underlying resource (e.g. textures). The + /// resources are either owned by application layer or the delegate. + /// This can be null if the delegate doesn't use its own buffer. void (*FreeBufferHandle)(TfLiteOpaqueContext* context, // NOLINT TfLiteOpaqueDelegate* delegate, void* data, TfLiteBufferHandle* handle); - // Bitmask flags. See the comments in `TfLiteDelegateFlags`. + /// Bitmask flags. See the comments in `TfLiteDelegateFlags`. int64_t flags; } TfLiteOpaqueDelegateBuilder; #ifndef TF_LITE_STATIC_MEMORY -// Creates an opaque delegate and returns its address. The opaque delegate will -// behave according to the provided 'opaque_delegate_builder'. The lifetime of -// the objects pointed to by any of the fields within the -// 'opaque_delegate_builder' must outlive the returned -// 'TfLiteOpaqueDelegate' and any 'TfLiteInterpreter', -// 'TfLiteInterpreterOptions', 'tflite::Interpreter', or -// 'tflite::InterpreterBuilder' that the delegate is added to. The returned -// address should be passed to 'TfLiteOpaqueDelegateDelete' for deletion. If -// 'opaque_delegate_builder' is a null pointer, then a null pointer will be -// returned. +// See c_api_opaque.h. +// This declaration in common.h is only for backwards compatibility. +// NOTE: This function is part of the TensorFlow Lite Extension APIs, see above. TfLiteOpaqueDelegate* TfLiteOpaqueDelegateCreate( const TfLiteOpaqueDelegateBuilder* opaque_delegate_builder); -// Deletes the provided opaque 'delegate'. This function has no effect if the -// 'delegate' is a null pointer. +// See c_api_opaque.h. +// This declaration in common.h is only for backwards compatibility. +// NOTE: This function is part of the TensorFlow Lite Extension APIs, see above. void TfLiteOpaqueDelegateDelete(TfLiteOpaqueDelegate* delegate); #endif // TF_LITE_STATIC_MEMORY -// Returns a pointer to the data associated with the provided opaque 'delegate'. -// -// A null pointer will be returned when: -// - The 'delegate' is null. -// - The 'data' field of the 'TfLiteOpaqueDelegateBuilder' used to construct the -// 'delegate' was null. -// - Or in case of any other error. -// - The 'delegate' has been constructed via a 'TfLiteOpaqueDelegateBuilder', -// but the 'data' field of the 'TfLiteOpaqueDelegateBuilder' is null. -// -// The data_ field of 'delegate' will be returned if the -// 'opaque_delegate_builder' field is null. +// See c_api_opaque.h. +// This declaration in common.h is only for backwards compatibility. +// NOTE: This function is part of the TensorFlow Lite Extension APIs, see above. void* TfLiteOpaqueDelegateGetData(const TfLiteOpaqueDelegate* delegate); +/// Returns a tensor data allocation strategy. +TfLiteAllocationStrategy TfLiteTensorGetAllocationStrategy( + const TfLiteTensor* t); + +/// Returns how stable a tensor data buffer address is across runs. +TfLiteRunStability TfLiteTensorGetBufferAddressStability(const TfLiteTensor* t); + +/// Returns how stable a tensor data values are across runs. +TfLiteRunStability TfLiteTensorGetDataStability(const TfLiteTensor* t); + +/// Returns the operation step when the data of a tensor is populated. +/// +/// Some operations can precompute their results before the evaluation step. +/// This makes the data available earlier for subsequent operations. +TfLiteRunStep TfLiteTensorGetDataKnownStep(const TfLiteTensor* t); + +/// Returns the operation steop when the shape of a tensor is computed. +/// +/// Some operations can precompute the shape of their results before the +/// evaluation step. This makes the shape available earlier for subsequent +/// operations. +TfLiteRunStep TfLiteTensorGetShapeKnownStep(const TfLiteTensor* t); + +/** @} */ +// Ends `\addtogroup`, it's important for the doc generator that this doesn't +// include the CC code below. + #ifdef __cplusplus } // extern "C" diff --git a/tensorflow/lite/core/macros.h b/tensorflow/lite/core/macros.h index d329ded7450..9eab6be877d 100644 --- a/tensorflow/lite/core/macros.h +++ b/tensorflow/lite/core/macros.h @@ -73,6 +73,8 @@ inline constexpr char tflite_metadata_buffer_location[] = "buffer_location"; // field for minimum runtime version, string inline constexpr char tflite_metadata_min_runtime_version[] = "min_runtime_version"; +// the stablehlo op version is supported by the tflite runtime +inline constexpr char tflite_supported_stablehlo_version[] = "1.0.0"; #endif #endif // TENSORFLOW_LITE_CORE_MACROS_H_ diff --git a/tensorflow/lite/experimental/microfrontend/lib/filterbank_io.c b/tensorflow/lite/experimental/microfrontend/lib/filterbank_io.c index 6ce4c7c7964..ea45d1b7049 100644 --- a/tensorflow/lite/experimental/microfrontend/lib/filterbank_io.c +++ b/tensorflow/lite/experimental/microfrontend/lib/filterbank_io.c @@ -17,7 +17,7 @@ limitations under the License. static void PrintArray(FILE* fp, const char* name, const int16_t* values, size_t size) { fprintf(fp, "static int16_t filterbank_%s[] = {", name); - int i; + size_t i; for (i = 0; i < size; ++i) { fprintf(fp, "%d", values[i]); if (i < size - 1) { diff --git a/tensorflow/lite/experimental/microfrontend/lib/filterbank_util.c b/tensorflow/lite/experimental/microfrontend/lib/filterbank_util.c index f18ebf54750..430fc6e328e 100644 --- a/tensorflow/lite/experimental/microfrontend/lib/filterbank_util.c +++ b/tensorflow/lite/experimental/microfrontend/lib/filterbank_util.c @@ -28,7 +28,7 @@ void FilterbankFillConfigWithDefaults(struct FilterbankConfig* config) { config->output_scale_shift = 7; } -static float FreqToMel(float freq) { return 1127.0 * log1p(freq / 700.0); } +static float FreqToMel(float freq) { return 1127.0f * log1pf(freq / 700.0f); } static void CalculateCenterFrequencies(const int num_channels, const float lower_frequency_limit, @@ -49,8 +49,8 @@ static void CalculateCenterFrequencies(const int num_channels, static void QuantizeFilterbankWeights(const float float_weight, int16_t* weight, int16_t* unweight) { - *weight = floor(float_weight * (1 << kFilterbankBits) + 0.5); - *unweight = floor((1.0 - float_weight) * (1 << kFilterbankBits) + 0.5); + *weight = floorf(float_weight * (1 << kFilterbankBits) + 0.5f); + *unweight = floorf((1.0f - float_weight) * (1 << kFilterbankBits) + 0.5f); } int FilterbankPopulateState(const struct FilterbankConfig* config, @@ -95,8 +95,8 @@ int FilterbankPopulateState(const struct FilterbankConfig* config, config->upper_band_limit, center_mel_freqs); // Always exclude DC. - const float hz_per_sbin = 0.5 * sample_rate / ((float)spectrum_size - 1); - state->start_index = 1.5 + config->lower_band_limit / hz_per_sbin; + const float hz_per_sbin = 0.5f * sample_rate / ((float)spectrum_size - 1); + state->start_index = 1.5f + config->lower_band_limit / hz_per_sbin; state->end_index = 0; // Initialized to zero here, but actually set below. // For each channel, we need to figure out what frequencies belong to it, and diff --git a/tensorflow/lite/experimental/microfrontend/lib/frontend_main.c b/tensorflow/lite/experimental/microfrontend/lib/frontend_main.c index 861778c77a1..60868f81b8c 100644 --- a/tensorflow/lite/experimental/microfrontend/lib/frontend_main.c +++ b/tensorflow/lite/experimental/microfrontend/lib/frontend_main.c @@ -56,7 +56,7 @@ int main(int argc, char** argv) { audio_file_size -= num_samples_read; if (output.values != NULL) { - int i; + size_t i; for (i = 0; i < output.size; ++i) { printf("%d ", output.values[i]); } diff --git a/tensorflow/lite/experimental/microfrontend/lib/noise_reduction_io.c b/tensorflow/lite/experimental/microfrontend/lib/noise_reduction_io.c index 19c32b32759..8a5d7a418b5 100644 --- a/tensorflow/lite/experimental/microfrontend/lib/noise_reduction_io.c +++ b/tensorflow/lite/experimental/microfrontend/lib/noise_reduction_io.c @@ -16,7 +16,7 @@ limitations under the License. void NoiseReductionWriteMemmapPreamble( FILE* fp, const struct NoiseReductionState* state) { - fprintf(fp, "static uint32_t noise_reduction_estimate[%zu];\n", + fprintf(fp, "static uint32_t noise_reduction_estimate[%d];\n", state->num_channels); fprintf(fp, "\n"); } diff --git a/tensorflow/lite/experimental/microfrontend/lib/window_io.c b/tensorflow/lite/experimental/microfrontend/lib/window_io.c index d12cac2c853..5a7b0477725 100644 --- a/tensorflow/lite/experimental/microfrontend/lib/window_io.c +++ b/tensorflow/lite/experimental/microfrontend/lib/window_io.c @@ -16,7 +16,7 @@ limitations under the License. void WindowWriteMemmapPreamble(FILE* fp, const struct WindowState* state) { fprintf(fp, "static int16_t window_coefficients[] = {\n"); - int i; + size_t i; for (i = 0; i < state->size; ++i) { fprintf(fp, "%d", state->coefficients[i]); if (i < state->size - 1) { diff --git a/tensorflow/lite/experimental/microfrontend/lib/window_util.c b/tensorflow/lite/experimental/microfrontend/lib/window_util.c index eee6e7b56ef..6fdffdc409d 100644 --- a/tensorflow/lite/experimental/microfrontend/lib/window_util.c +++ b/tensorflow/lite/experimental/microfrontend/lib/window_util.c @@ -41,13 +41,13 @@ int WindowPopulateState(const struct WindowConfig* config, } // Populate the window values. - const float arg = M_PI * 2.0 / ((float)state->size); - int i; + const float arg = (float)M_PI * 2.0f / ((float)state->size); + size_t i; for (i = 0; i < state->size; ++i) { - float float_value = 0.5 - (0.5 * cos(arg * (i + 0.5))); + float float_value = 0.5f - (0.5f * cosf(arg * (i + 0.5f))); // Scale it to fixed point and round it. state->coefficients[i] = - floor(float_value * (1 << kFrontendWindowBits) + 0.5); + floorf(float_value * (1 << kFrontendWindowBits) + 0.5f); } state->input_used = 0; diff --git a/tensorflow/lite/kernels/BUILD b/tensorflow/lite/kernels/BUILD index 6e1efcea01f..4b48db1ee83 100644 --- a/tensorflow/lite/kernels/BUILD +++ b/tensorflow/lite/kernels/BUILD @@ -14,7 +14,7 @@ cc_library( "op_macros.h", ], copts = tflite_copts(), - deps = ["//tensorflow/lite/micro:debug_log"], + deps = ["//tensorflow/lite/micro:micro_log"], ) cc_library( diff --git a/tensorflow/lite/kernels/internal/common.cc b/tensorflow/lite/kernels/internal/common.cc index 1654ab84f0d..fabb0208b7d 100644 --- a/tensorflow/lite/kernels/internal/common.cc +++ b/tensorflow/lite/kernels/internal/common.cc @@ -17,6 +17,53 @@ limitations under the License. namespace tflite { +// Single-rounding MultiplyByQuantizedMultiplier +#if TFLITE_SINGLE_ROUNDING +int32_t MultiplyByQuantizedMultiplier(int32_t x, int32_t quantized_multiplier, + int shift) { + TFLITE_DCHECK(quantized_multiplier >= 0); + TFLITE_DCHECK(shift >= -31 && shift <= 30); + + const int64_t total_shift = 31 - shift; + const int64_t round = static_cast(1) << (total_shift - 1); + int64_t result = x * static_cast(quantized_multiplier) + round; + result = result >> total_shift; + + TFLITE_DCHECK(result >= std::numeric_limits::min() && + result <= std::numeric_limits::max()); + return static_cast(result); +} + +int32_t MultiplyByQuantizedMultiplier(int64_t x, int32_t quantized_multiplier, + int shift) { + // Inputs: + // - quantized_multiplier has fixed point at bit 31 + // - shift is -31 to +7 (negative for right shift) + // + // Assumptions: The following input ranges are assumed + // - quantize_scale>=0 (the usual range is (1<<30) to (1>>31)-1) + // - scaling is chosen so final scaled result fits in int32_t + // - input x is in the range -(1<<47) <= x < (1<<47) + TFLITE_DCHECK(quantized_multiplier >= 0); + TFLITE_DCHECK(shift >= -31 && shift < 8); + TFLITE_DCHECK(x >= -(static_cast(1) << 47) && + x < (static_cast(1) << 47)); + + const int32_t reduced_multiplier = + (quantized_multiplier < 0x7FFF0000) + ? ((quantized_multiplier + (1 << 15)) >> 16) + : 0x7FFF; + const int64_t total_shift = 15 - shift; + const int64_t round = static_cast(1) << (total_shift - 1); + int64_t result = x * static_cast(reduced_multiplier) + round; + result = result >> total_shift; + + TFLITE_DCHECK(result >= std::numeric_limits::min() && + result <= std::numeric_limits::max()); + return static_cast(result); +} +// Double-rounding MultiplyByQuantizedMultiplier +#else int32_t MultiplyByQuantizedMultiplier(int32_t x, int32_t quantized_multiplier, int shift) { using gemmlowp::RoundingDivideByPOT; @@ -51,5 +98,6 @@ int32_t MultiplyByQuantizedMultiplier(int64_t x, int32_t quantized_multiplier, int32_t result = x >> total_shift; return result; } +#endif // TFLITE_SINGLE_ROUNDING } // namespace tflite diff --git a/tensorflow/lite/kernels/internal/common.h b/tensorflow/lite/kernels/internal/common.h index 05184df663b..9761a8cc07a 100644 --- a/tensorflow/lite/kernels/internal/common.h +++ b/tensorflow/lite/kernels/internal/common.h @@ -16,6 +16,10 @@ limitations under the License. #define TENSORFLOW_LITE_KERNELS_INTERNAL_COMMON_H_ #include +#include +#include + +#include "tensorflow/lite/kernels/internal/runtime_shape.h" #ifndef ALLOW_SLOW_GENERIC_DEPTHWISECONV_FALLBACK #ifdef GEMMLOWP_ALLOW_SLOW_SCALAR_FALLBACK #define ALLOW_SLOW_GENERIC_DEPTHWISECONV_FALLBACK @@ -35,6 +39,117 @@ namespace tflite { constexpr int kReverseShift = -1; +// Reduces and compresses dimensions so that broadcast handling becomes more +// efficient. Returns true if the output shape is broadcastable; it doesn't +// contain any degenerate dimension, i.e. shape dimension = 0. False otherwise. +template +bool ReduceDimensionsForBroadcast(const RuntimeShape& input1_shape, + const RuntimeShape& input2_shape, + size_t* compressed_input1_stride, + size_t* compressed_input2_stride, + size_t* compressed_output_shape) { + size_t num_compressed_dims = 0; + size_t compressed_input1_shape[MAX_DIM]; + size_t compressed_input2_shape[MAX_DIM]; + std::fill(compressed_input1_shape, compressed_input1_shape + MAX_DIM, 1); + std::fill(compressed_input2_shape, compressed_input2_shape + MAX_DIM, 1); + std::fill(compressed_output_shape, compressed_output_shape + MAX_DIM, 1); + bool broadcast_input1 = false; + bool broadcast_input2 = false; + bool first_nonunit = true; + const size_t num_input1_dims = input1_shape.DimensionsCount(); + const size_t num_input2_dims = input2_shape.DimensionsCount(); + const int32_t* input1_dims = input1_shape.DimsData(); + const int32_t* input2_dims = input2_shape.DimsData(); + const size_t num_common_dims = std::min(num_input1_dims, num_input2_dims); + for (size_t i = 1; i <= num_common_dims; i++) { + const size_t input1_dim = input1_dims[num_input1_dims - i]; + const size_t input2_dim = input2_dims[num_input2_dims - i]; + if (input1_dim == 0 || input2_dim == 0) { + return false; + } + if (input1_dim == 1 && input2_dim == 1) { + continue; + } + assert(!broadcast_input1 || !broadcast_input2); + + if (input1_dim == 1) { + if (!broadcast_input1) { + broadcast_input1 = true; + broadcast_input2 = false; + num_compressed_dims++; + } + compressed_input2_shape[num_compressed_dims - 1] *= input2_dim; + compressed_output_shape[num_compressed_dims - 1] *= input2_dim; + } else if (input2_dim == 1) { + if (!broadcast_input2) { + broadcast_input1 = false; + broadcast_input2 = true; + num_compressed_dims++; + } + compressed_input1_shape[num_compressed_dims - 1] *= input1_dim; + compressed_output_shape[num_compressed_dims - 1] *= input1_dim; + } else { + TFLITE_DCHECK(input1_dim == input2_dim); + if (broadcast_input1 || broadcast_input2 || first_nonunit) { + broadcast_input1 = false; + broadcast_input2 = false; + num_compressed_dims++; + } + compressed_input1_shape[num_compressed_dims - 1] *= input1_dim; + compressed_input2_shape[num_compressed_dims - 1] *= input1_dim; + compressed_output_shape[num_compressed_dims - 1] *= input1_dim; + } + first_nonunit = false; + } + if (num_input1_dims > num_input2_dims) { + if (!broadcast_input2) { + num_compressed_dims++; + } + for (size_t i = 0; i < num_input1_dims - num_input2_dims; i++) { + const size_t input1_dim = input1_dims[i]; + if (input1_dim == 0) { + return false; + } + compressed_input1_shape[num_compressed_dims - 1] *= input1_dim; + compressed_output_shape[num_compressed_dims - 1] *= input1_dim; + } + } else if (num_input2_dims > num_input1_dims) { + if (!broadcast_input1) { + num_compressed_dims++; + } + for (size_t i = 0; i < num_input2_dims - num_input1_dims; i++) { + const size_t input2_dim = input2_dims[i]; + if (input2_dim == 0) { + return false; + } + compressed_input2_shape[num_compressed_dims - 1] *= input2_dim; + compressed_output_shape[num_compressed_dims - 1] *= input2_dim; + } + } + num_compressed_dims = (num_compressed_dims > 1) ? num_compressed_dims : 1; + + int input1_stride = 1; + int input2_stride = 1; + for (int i = 0; i < MAX_DIM; ++i) { + compressed_input1_stride[i] = input1_stride; + input1_stride *= compressed_input1_shape[i]; + compressed_input2_stride[i] = input2_stride; + input2_stride *= compressed_input2_shape[i]; + } + for (int i = 0; i < MAX_DIM; ++i) { + if (compressed_input1_shape[i] != compressed_input2_shape[i]) { + if (compressed_input1_shape[i] == 1) { + compressed_input1_stride[i] = 0; + } else { + TFLITE_DCHECK_EQ(compressed_input2_shape[i], 1); + compressed_input2_stride[i] = 0; + } + } + } + return true; +} + inline void GetActivationMinMax(FusedActivationFunctionType ac, float* output_activation_min, float* output_activation_max) { @@ -142,24 +257,14 @@ inline void BiasAndClamp(float clamp_min, float clamp_max, int bias_size, #endif } -// Single-rounding MultiplyByQuantizedMultiplier -#if TFLITE_SINGLE_ROUNDING -inline int32_t MultiplyByQuantizedMultiplier(int32_t x, - int32_t quantized_multiplier, - int shift) { - TFLITE_DCHECK(quantized_multiplier >= 0); - TFLITE_DCHECK(shift >= -31 && shift <= 30); - - const int64_t total_shift = 31 - shift; - const int64_t round = static_cast(1) << (total_shift - 1); - int64_t result = x * static_cast(quantized_multiplier) + round; - result = result >> total_shift; +TFLITE_NOINLINE int32_t MultiplyByQuantizedMultiplier( + int32_t x, int32_t quantized_multiplier, int shift); - TFLITE_DCHECK(result >= std::numeric_limits::min() && - result <= std::numeric_limits::max()); - return static_cast(result); -} +TFLITE_NOINLINE int32_t MultiplyByQuantizedMultiplier( + int64_t x, int32_t quantized_multiplier, int shift); +// Single-rounding MultiplyByQuantizedMultiplier +#if TFLITE_SINGLE_ROUNDING inline int32_t MultiplyByQuantizedMultiplierSmallerThanOneExp( int32_t x, int32_t quantized_multiplier, int shift) { TFLITE_DCHECK_LE(shift, 0); @@ -172,36 +277,6 @@ inline int32_t MultiplyByQuantizedMultiplierGreaterThanOne( return MultiplyByQuantizedMultiplier(x, quantized_multiplier, shift); } -inline int32_t MultiplyByQuantizedMultiplier(int64_t x, - int32_t quantized_multiplier, - int shift) { - // Inputs: - // - quantized_multiplier has fixed point at bit 31 - // - shift is -31 to +7 (negative for right shift) - // - // Assumptions: The following input ranges are assumed - // - quantize_scale>=0 (the usual range is (1<<30) to (1>>31)-1) - // - scaling is chosen so final scaled result fits in int32_t - // - input x is in the range -(1<<47) <= x < (1<<47) - TFLITE_DCHECK(quantized_multiplier >= 0); - TFLITE_DCHECK(shift >= -31 && shift < 8); - TFLITE_DCHECK(x >= -(static_cast(1) << 47) && - x < (static_cast(1) << 47)); - - const int32_t reduced_multiplier = - (quantized_multiplier < 0x7FFF0000) - ? ((quantized_multiplier + (1 << 15)) >> 16) - : 0x7FFF; - const int64_t total_shift = 15 - shift; - const int64_t round = static_cast(1) << (total_shift - 1); - int64_t result = x * static_cast(reduced_multiplier) + round; - result = result >> total_shift; - - TFLITE_DCHECK(result >= std::numeric_limits::min() && - result <= std::numeric_limits::max()); - return static_cast(result); -} - #ifdef USE_NEON inline int32x4x4_t MultiplyByQuantizedMultiplier4Rows( int32x4x4_t input_val, int32_t quantized_multiplier, int shift) { @@ -251,12 +326,6 @@ inline int32_t MultiplyByQuantizedMultiplierGreaterThanOne( quantized_multiplier); } -TFLITE_NOINLINE int32_t MultiplyByQuantizedMultiplier( - int32_t x, int32_t quantized_multiplier, int shift); - -TFLITE_NOINLINE int32_t MultiplyByQuantizedMultiplier( - int64_t x, int32_t quantized_multiplier, int shift); - #ifdef USE_NEON // Round uses ARM's rounding shift right. inline int32x4x4_t MultiplyByQuantizedMultiplier4Rows( diff --git a/tensorflow/lite/kernels/internal/cppmath.h b/tensorflow/lite/kernels/internal/cppmath.h index c97cc31d248..67ab4610731 100644 --- a/tensorflow/lite/kernels/internal/cppmath.h +++ b/tensorflow/lite/kernels/internal/cppmath.h @@ -32,8 +32,8 @@ namespace tflite { return TF_LITE_GLOBAL_STD_PREFIX::std_name(x); \ } -DECLARE_STD_GLOBAL_SWITCH1(TfLiteRound, round); -DECLARE_STD_GLOBAL_SWITCH1(TfLiteExpm1, expm1); +DECLARE_STD_GLOBAL_SWITCH1(TfLiteRound, round) +DECLARE_STD_GLOBAL_SWITCH1(TfLiteExpm1, expm1) } // namespace tflite diff --git a/tensorflow/lite/kernels/internal/portable_tensor.h b/tensorflow/lite/kernels/internal/portable_tensor.h index 1eee6217d15..a9f9551a4ce 100644 --- a/tensorflow/lite/kernels/internal/portable_tensor.h +++ b/tensorflow/lite/kernels/internal/portable_tensor.h @@ -15,6 +15,7 @@ limitations under the License. #ifndef TENSORFLOW_LITE_KERNELS_INTERNAL_PORTABLE_TENSOR_H_ #define TENSORFLOW_LITE_KERNELS_INTERNAL_PORTABLE_TENSOR_H_ +#include #include #include "tensorflow/lite/core/c/common.h" @@ -50,6 +51,26 @@ class VectorOfTensors { all_shape_ptr_.push_back(&all_shape_[i]); } } + + explicit VectorOfTensors(const std::vector& tensors) { + int num_tensors = tensors.size(); + + all_data_.reserve(num_tensors); + all_shape_.reserve(num_tensors); + all_shape_ptr_.reserve(num_tensors); + + for (auto* t : tensors) { + all_data_.push_back(GetTensorData(t)); + all_shape_.push_back(GetTensorShape(t)); + } + + // Taking the pointer from inside a std::vector is only OK if the vector is + // never modified, so we populate all_shape in the previous loop and then we + // are free to grab iterators here. + for (int i = 0; i < num_tensors; ++i) { + all_shape_ptr_.push_back(&all_shape_[i]); + } + } // Return a pointer to the data pointers of all tensors in the list. For // example: // float* const* f = v.data(); @@ -62,6 +83,8 @@ class VectorOfTensors { // dims[1] are the dimensions of the second tensor in the list. const RuntimeShape* const* shapes() const { return all_shape_ptr_.data(); } + size_t size() const { return all_data_.size(); } + private: std::vector all_data_; std::vector all_shape_; diff --git a/tensorflow/lite/kernels/internal/portable_tensor_utils.h b/tensorflow/lite/kernels/internal/portable_tensor_utils.h index c28892c134e..d37fe6e4c89 100644 --- a/tensorflow/lite/kernels/internal/portable_tensor_utils.h +++ b/tensorflow/lite/kernels/internal/portable_tensor_utils.h @@ -241,7 +241,8 @@ void SparseMatrixBatchVectorMultiplyAccumulate1x16( const int32_t* __restrict__ indices, int m_rows, int m_cols, const int8_t* __restrict__ vector, const int32_t* __restrict__ bias_vector, int n_batch, const int32_t input_offset, const int32_t output_multiplier, - const int32_t output_shift, const int32_t output_offset, + int32_t output_shift, const int32_t* per_channel_scale, + const int32_t* per_channel_shift, int32_t output_offset, const int32_t output_activation_min, const int32_t output_activation_max, int8_t* __restrict__ result); @@ -259,7 +260,7 @@ void SparseMatrixBatchVectorMultiplyAccumulate( const int8_t* __restrict__ matrix, const uint8_t* __restrict__ ledger, const int m_rows, const int m_cols, const int8_t* __restrict__ vectors, const float* __restrict__ scaling_factors, int n_batch, - float* __restrict__ result); + float* __restrict__ result, const float* per_channel_scale = nullptr); // Same as the above 8, 8, 8 integer matmul except for the presence of zero // point and non-accumulative. diff --git a/tensorflow/lite/kernels/internal/reference/add.h b/tensorflow/lite/kernels/internal/reference/add.h index b89a57b9689..5b520bd1e13 100644 --- a/tensorflow/lite/kernels/internal/reference/add.h +++ b/tensorflow/lite/kernels/internal/reference/add.h @@ -16,10 +16,13 @@ limitations under the License. #define TENSORFLOW_LITE_KERNELS_INTERNAL_REFERENCE_ADD_H_ #include +#include +#include #include #include "fixedpoint/fixedpoint.h" #include "tensorflow/lite/kernels/internal/common.h" +#include "tensorflow/lite/kernels/internal/compatibility.h" namespace tflite { @@ -194,21 +197,135 @@ inline void Add(const ArithmeticParams& params, } } +template +inline void AddBroadcast(const T* input_data, const T* broadcast_data, + T* output_data, size_t size, T activation_min, + T activation_max) { + for (size_t c = 0; c < size; ++c) { + output_data[c] = ActivationFunctionWithMinMax( + input_data[c] + broadcast_data[0], activation_min, activation_max); + } +} + +template <> +inline void AddBroadcast(const int32_t* input_data, + const int32_t* broadcast_data, + int32_t* output_data, size_t size, + int32_t activation_min, + int32_t activation_max) { + size_t c = 0; +#ifdef USE_NEON + const int32x4_t vmax = vdupq_n_s32(activation_max); + const int32x4_t vmin = vdupq_n_s32(activation_min); + const int32x4_t vb = vdupq_n_s32(broadcast_data[0]); + for (; c + 4 <= size; c += 4) { + const int32x4_t va = vld1q_s32(&input_data[c]); + int32x4_t vres = vaddq_s32(va, vb); + vres = vmaxq_s32(vmin, vres); + vres = vminq_s32(vmax, vres); + vst1q_s32(&output_data[c], vres); + } +#endif + for (; c < size; ++c) { + output_data[c] = ActivationFunctionWithMinMax( + input_data[c] + broadcast_data[0], activation_min, activation_max); + } +} + +template +void AddElementwise(const T* input1_data, const T* input2_data, T* output_data, + size_t size, T activation_min, T activation_max) { + for (size_t c = 0; c < size; ++c) { + output_data[c] = ActivationFunctionWithMinMax( + input1_data[c] + input2_data[c], activation_min, activation_max); + } +} + +template <> +inline void AddElementwise(const int32_t* input1_data, + const int32_t* input2_data, + int32_t* output_data, size_t size, + int32_t activation_min, + int32_t activation_max) { + size_t c = 0; +#ifdef USE_NEON + const int32x4_t vmax = vdupq_n_s32(activation_max); + const int32x4_t vmin = vdupq_n_s32(activation_min); + for (; c + 4 <= size; c += 4) { + const int32x4_t va = vld1q_s32(&input1_data[c]); + const int32x4_t vb = vld1q_s32(&input2_data[c]); + int32x4_t vres = vaddq_s32(va, vb); + vres = vmaxq_s32(vmin, vres); + vres = vminq_s32(vmax, vres); + vst1q_s32(&output_data[c], vres); + } +#endif + for (; c < size; ++c) { + output_data[c] = ActivationFunctionWithMinMax( + input1_data[c] + input2_data[c], activation_min, activation_max); + } +} + +template +inline void BroadcastAddRecursiveDimensions( + int dimension, size_t* input1_offset_p, size_t* input2_offset_p, + size_t* output_offset, size_t* compressed_input1_stride, + size_t* compressed_input2_stride, size_t* compressed_output_shape, + T activation_min, T activation_max, const T* input1_data, + const T* input2_data, T* output_data) { + if (dimension > 0) { + for (size_t c = 0; c < compressed_output_shape[dimension]; ++c) { + size_t input1_offset_c = *input1_offset_p; + size_t input2_offset_c = *input2_offset_p; + BroadcastAddRecursiveDimensions( + dimension - 1, &input1_offset_c, &input2_offset_c, output_offset, + compressed_input1_stride, compressed_input2_stride, + compressed_output_shape, activation_min, activation_max, input1_data, + input2_data, output_data); + *input1_offset_p += compressed_input1_stride[dimension]; + *input2_offset_p += compressed_input2_stride[dimension]; + } + } else { + TFLITE_DCHECK(dimension == 0); + bool input1_is_broadcast = compressed_input1_stride[dimension] == 0; + bool input2_is_broadcast = compressed_input2_stride[dimension] == 0; + TFLITE_DCHECK(!(input1_is_broadcast && input2_is_broadcast)); + const T* input1_data_ptr = input1_data + *input1_offset_p; + const T* input2_data_ptr = input2_data + *input2_offset_p; + T* output_data_ptr = output_data + *output_offset; + if (input1_is_broadcast) { + // input1 is broadcast. + AddBroadcast(input2_data_ptr, input1_data_ptr, output_data_ptr, + compressed_output_shape[dimension], activation_min, + activation_max); + *input2_offset_p += compressed_output_shape[dimension]; + } else if (input2_is_broadcast) { + // input2 is broadcast. + AddBroadcast(input1_data_ptr, input2_data_ptr, output_data_ptr, + compressed_output_shape[dimension], activation_min, + activation_max); + *input1_offset_p += compressed_output_shape[dimension]; + } else { + // Add element-wise. + AddElementwise(input1_data_ptr, input2_data_ptr, output_data_ptr, + compressed_output_shape[dimension], activation_min, + activation_max); + *input1_offset_p += compressed_output_shape[dimension]; + *input2_offset_p += compressed_output_shape[dimension]; + } + *output_offset += compressed_output_shape[dimension]; + } +} + template inline typename std::enable_if::value || dummy, void>::type BroadcastAdd6DSlow(const ArithmeticParams& params, const RuntimeShape& input1_shape, const T* input1_data, const RuntimeShape& input2_shape, const T* input2_data, const RuntimeShape& output_shape, T* output_data) { - NdArrayDesc<6> desc1; - NdArrayDesc<6> desc2; - NdArrayDescsForElementwiseBroadcast(input1_shape, input2_shape, &desc1, - &desc2); - const RuntimeShape extended_output_shape = - RuntimeShape::ExtendedShape(6, output_shape); - + constexpr int kMaxBroadcastDim = 6; T activation_min, activation_max; GetActivationParams(params, &activation_min, &activation_max); @@ -223,64 +340,74 @@ BroadcastAdd6DSlow(const ArithmeticParams& params, // We name our variables by their Tensorflow convention, but generate C code // nesting loops such that the innermost loop has the smallest stride for the // best cache behavior. - size_t input1_offset_a = 0; - size_t input2_offset_a = 0; - size_t output_offset_a = 0; - for (int a = 0; a < extended_output_shape.Dims(0); ++a) { - size_t input1_offset_d = input1_offset_a; - size_t input2_offset_d = input2_offset_a; - size_t output_offset_d = output_offset_a; - for (int d = 0; d < extended_output_shape.Dims(1); ++d) { - size_t input1_offset_b = input1_offset_d; - size_t input2_offset_b = input2_offset_d; - size_t output_offset_b = output_offset_d; - for (int b = 0; b < extended_output_shape.Dims(2); ++b) { - size_t input1_offset_y = input1_offset_b; - size_t input2_offset_y = input2_offset_b; - size_t output_offset_y = output_offset_b; - for (int y = 0; y < extended_output_shape.Dims(3); ++y) { - size_t input1_offset_x = input1_offset_y; - size_t input2_offset_x = input2_offset_y; - size_t output_offset_x = output_offset_y; - for (int x = 0; x < extended_output_shape.Dims(4); ++x) { - size_t input1_offset_c = input1_offset_x; - size_t input2_offset_c = input2_offset_x; - size_t output_offset_c = output_offset_x; - for (int c = 0; c < extended_output_shape.Dims(5); ++c) { - output_data[output_offset_c] = ActivationFunctionWithMinMax( - input1_data[input1_offset_c] + input2_data[input2_offset_c], - activation_min, activation_max); - input1_offset_c += desc1.strides[5]; - input2_offset_c += desc2.strides[5]; - ++output_offset_c; - } - input1_offset_x += desc1.strides[4]; - input2_offset_x += desc2.strides[4]; - output_offset_x += extended_output_shape.Dims(5); - } - input1_offset_y += desc1.strides[3]; - input2_offset_y += desc2.strides[3]; - output_offset_y += - extended_output_shape.Dims(4) * extended_output_shape.Dims(5); - } - input1_offset_b += desc1.strides[2]; - input2_offset_b += desc2.strides[2]; - output_offset_b += extended_output_shape.Dims(3) * - extended_output_shape.Dims(4) * - extended_output_shape.Dims(5); - } - input1_offset_d += desc1.strides[1]; - input2_offset_d += desc2.strides[1]; - output_offset_d += - extended_output_shape.Dims(2) * extended_output_shape.Dims(3) * - extended_output_shape.Dims(4) * extended_output_shape.Dims(5); + size_t compressed_input1_stride[kMaxBroadcastDim]; + size_t compressed_input2_stride[kMaxBroadcastDim]; + size_t compressed_output_shape[kMaxBroadcastDim]; + bool broadcastable_shape = ReduceDimensionsForBroadcast( + input1_shape, input2_shape, compressed_input1_stride, + compressed_input2_stride, compressed_output_shape); + // Skip broadcasting for degenerate shapes. + if (!broadcastable_shape) { + return; + } + + size_t input1_offset = 0; + size_t input2_offset = 0; + size_t output_offset = 0; + BroadcastAddRecursiveDimensions( + kMaxBroadcastDim - 1, &input1_offset, &input2_offset, &output_offset, + compressed_input1_stride, compressed_input2_stride, + compressed_output_shape, activation_min, activation_max, input1_data, + input2_data, output_data); +} + +// This function is used for 8-bit as well as for 16-bit, but the accumulator +// is 32-bit for both cases. The overflow does not happen due to the +// choice of the shift (20 or 15, accordingly - see add.cc for more comments). +template +inline void BroadcastAddRecursiveDimensions( + const ArithmeticParams& params, int dimension, size_t* input1_offset_p, + size_t* input2_offset_p, size_t* output_offset, + size_t* compressed_input1_stride, size_t* compressed_input2_stride, + size_t* compressed_output_shape, const T* input1_data, const T* input2_data, + T* output_data) { + for (size_t c = 0; c < compressed_output_shape[dimension]; ++c) { + if (dimension > 0) { + size_t input1_offset_c = *input1_offset_p; + size_t input2_offset_c = *input2_offset_p; + BroadcastAddRecursiveDimensions( + params, dimension - 1, &input1_offset_c, &input2_offset_c, + output_offset, compressed_input1_stride, compressed_input2_stride, + compressed_output_shape, input1_data, input2_data, output_data); + } else { + TFLITE_DCHECK(dimension == 0); + const int32_t input1_val = + params.input1_offset + input1_data[*input1_offset_p]; + const int32_t input2_val = + params.input2_offset + input2_data[*input2_offset_p]; + const int32_t shifted_input1_val = input1_val * (1 << params.left_shift); + const int32_t shifted_input2_val = input2_val * (1 << params.left_shift); + const int32_t scaled_input1_val = + MultiplyByQuantizedMultiplierSmallerThanOneExp( + shifted_input1_val, params.input1_multiplier, + params.input1_shift); + const int32_t scaled_input2_val = + MultiplyByQuantizedMultiplierSmallerThanOneExp( + shifted_input2_val, params.input2_multiplier, + params.input2_shift); + const int32_t raw_sum = scaled_input1_val + scaled_input2_val; + const int32_t raw_output = + MultiplyByQuantizedMultiplierSmallerThanOneExp( + raw_sum, params.output_multiplier, params.output_shift) + + params.output_offset; + const int32_t clamped_output = + std::min(params.quantized_activation_max, + std::max(params.quantized_activation_min, raw_output)); + output_data[*output_offset] = static_cast(clamped_output); + ++(*output_offset); } - input1_offset_a += desc1.strides[0]; - input2_offset_a += desc2.strides[0]; - output_offset_a += - extended_output_shape.Dims(1) * extended_output_shape.Dims(2) * - extended_output_shape.Dims(3) * extended_output_shape.Dims(4) * - extended_output_shape.Dims(5); + *input1_offset_p += compressed_input1_stride[dimension]; + *input2_offset_p += compressed_input2_stride[dimension]; } } @@ -293,12 +420,7 @@ BroadcastAdd6DSlow(const ArithmeticParams& params, const RuntimeShape& input1_shape, const T* input1_data, const RuntimeShape& input2_shape, const T* input2_data, const RuntimeShape& output_shape, T* output_data) { - NdArrayDesc<6> desc1; - NdArrayDesc<6> desc2; - NdArrayDescsForElementwiseBroadcast(input1_shape, input2_shape, &desc1, - &desc2); - const RuntimeShape extended_output_shape = - RuntimeShape::ExtendedShape(6, output_shape); + constexpr int kMaxBroadcastDim = 6; // In Tensorflow, the dimensions are canonically named (batch_number, row, // col, channel), with extents (batches, height, width, depth), with the @@ -311,87 +433,24 @@ BroadcastAdd6DSlow(const ArithmeticParams& params, // We name our variables by their Tensorflow convention, but generate C code // nesting loops such that the innermost loop has the smallest stride for the // best cache behavior. - size_t input1_offset_a = 0; - size_t input2_offset_a = 0; - size_t output_offset_a = 0; - for (int a = 0; a < extended_output_shape.Dims(0); ++a) { - size_t input1_offset_d = input1_offset_a; - size_t input2_offset_d = input2_offset_a; - size_t output_offset_d = output_offset_a; - for (int d = 0; d < extended_output_shape.Dims(1); ++d) { - size_t input1_offset_b = input1_offset_d; - size_t input2_offset_b = input2_offset_d; - size_t output_offset_b = output_offset_d; - for (int b = 0; b < extended_output_shape.Dims(2); ++b) { - size_t input1_offset_y = input1_offset_b; - size_t input2_offset_y = input2_offset_b; - size_t output_offset_y = output_offset_b; - for (int y = 0; y < extended_output_shape.Dims(3); ++y) { - size_t input1_offset_x = input1_offset_y; - size_t input2_offset_x = input2_offset_y; - size_t output_offset_x = output_offset_y; - for (int x = 0; x < extended_output_shape.Dims(4); ++x) { - size_t input1_offset_c = input1_offset_x; - size_t input2_offset_c = input2_offset_x; - size_t output_offset_c = output_offset_x; - for (int c = 0; c < extended_output_shape.Dims(5); ++c) { - const int32_t input1_val = - params.input1_offset + input1_data[input1_offset_c]; - const int32_t input2_val = - params.input2_offset + input2_data[input2_offset_c]; - const int32_t shifted_input1_val = - input1_val * (1 << params.left_shift); - const int32_t shifted_input2_val = - input2_val * (1 << params.left_shift); - const int32_t scaled_input1_val = - MultiplyByQuantizedMultiplierSmallerThanOneExp( - shifted_input1_val, params.input1_multiplier, - params.input1_shift); - const int32_t scaled_input2_val = - MultiplyByQuantizedMultiplierSmallerThanOneExp( - shifted_input2_val, params.input2_multiplier, - params.input2_shift); - const int32_t raw_sum = scaled_input1_val + scaled_input2_val; - const int32_t raw_output = - MultiplyByQuantizedMultiplierSmallerThanOneExp( - raw_sum, params.output_multiplier, params.output_shift) + - params.output_offset; - const int32_t clamped_output = std::min( - params.quantized_activation_max, - std::max(params.quantized_activation_min, raw_output)); - output_data[output_offset_c] = static_cast(clamped_output); - input1_offset_c += desc1.strides[5]; - input2_offset_c += desc2.strides[5]; - ++output_offset_c; - } - input1_offset_x += desc1.strides[4]; - input2_offset_x += desc2.strides[4]; - output_offset_x += extended_output_shape.Dims(5); - } - input1_offset_y += desc1.strides[3]; - input2_offset_y += desc2.strides[3]; - output_offset_y += - extended_output_shape.Dims(4) * extended_output_shape.Dims(5); - } - input1_offset_b += desc1.strides[2]; - input2_offset_b += desc2.strides[2]; - output_offset_b += extended_output_shape.Dims(3) * - extended_output_shape.Dims(4) * - extended_output_shape.Dims(5); - } - input1_offset_d += desc1.strides[1]; - input2_offset_d += desc2.strides[1]; - output_offset_d += - extended_output_shape.Dims(2) * extended_output_shape.Dims(3) * - extended_output_shape.Dims(4) * extended_output_shape.Dims(5); - } - input1_offset_a += desc1.strides[0]; - input2_offset_a += desc2.strides[0]; - output_offset_a += - extended_output_shape.Dims(1) * extended_output_shape.Dims(2) * - extended_output_shape.Dims(3) * extended_output_shape.Dims(4) * - extended_output_shape.Dims(5); + size_t compressed_input1_stride[kMaxBroadcastDim]; + size_t compressed_input2_stride[kMaxBroadcastDim]; + size_t compressed_output_shape[kMaxBroadcastDim]; + bool broadcastable_shape = ReduceDimensionsForBroadcast( + input1_shape, input2_shape, compressed_input1_stride, + compressed_input2_stride, compressed_output_shape); + // Skip broadcasting for degenerate shapes. + if (!broadcastable_shape) { + return; } + + size_t input1_offset = 0; + size_t input2_offset = 0; + size_t output_offset = 0; + BroadcastAddRecursiveDimensions( + params, kMaxBroadcastDim - 1, &input1_offset, &input2_offset, + &output_offset, compressed_input1_stride, compressed_input2_stride, + compressed_output_shape, input1_data, input2_data, output_data); } template diff --git a/tensorflow/lite/kernels/internal/reference/comparisons.h b/tensorflow/lite/kernels/internal/reference/comparisons.h index 35583195551..366b378c825 100644 --- a/tensorflow/lite/kernels/internal/reference/comparisons.h +++ b/tensorflow/lite/kernels/internal/reference/comparisons.h @@ -257,12 +257,12 @@ inline void BroadcastComparison4DSlowWithScaling( op_params, input1_shape, input1_data, input2_shape, input2_data, \ output_shape, output_data); \ } -TFLITE_COMPARISON_OP(Equal); -TFLITE_COMPARISON_OP(NotEqual); -TFLITE_COMPARISON_OP(Greater); -TFLITE_COMPARISON_OP(GreaterEqual); -TFLITE_COMPARISON_OP(Less); -TFLITE_COMPARISON_OP(LessEqual); +TFLITE_COMPARISON_OP(Equal) +TFLITE_COMPARISON_OP(NotEqual) +TFLITE_COMPARISON_OP(Greater) +TFLITE_COMPARISON_OP(GreaterEqual) +TFLITE_COMPARISON_OP(Less) +TFLITE_COMPARISON_OP(LessEqual) #undef TFLITE_COMPARISON_OP } // namespace reference_ops diff --git a/tensorflow/lite/kernels/internal/reference/integer_ops/add.h b/tensorflow/lite/kernels/internal/reference/integer_ops/add.h index 579964dc30d..c2a0e0f082c 100644 --- a/tensorflow/lite/kernels/internal/reference/integer_ops/add.h +++ b/tensorflow/lite/kernels/internal/reference/integer_ops/add.h @@ -16,6 +16,7 @@ limitations under the License. #define TENSORFLOW_LITE_KERNELS_INTERNAL_REFERENCE_INTEGER_OPS_ADD_H_ #include +#include #include #include "tensorflow/lite/kernels/internal/common.h" @@ -35,7 +36,30 @@ inline void CheckArithmeticParams(const ArithmeticParams& params) { TFLITE_DCHECK_LE(-params.input2_offset, std::numeric_limits::max()); } -// TODO(b/270589088): move to a more appropriate file (b/270589088#comment2) +// TODO: b/270589088 - move to a more appropriate file (b/270589088#comment2) +template +void BroadcastInput1(int size, const ArithmeticParams& params, + const T* input1_data, const T* input2_data, T* output_data, + void (*check_arithmetic_params)(const ArithmeticParams&), + T (*binary_func)(T, T, const ArithmeticParams&)) { + CheckArithmeticParams(params); + for (int i = 0; i < size; ++i) { + output_data[i] = binary_func(input1_data[0], input2_data[i], params); + } +} + +template +void BroadcastInput2(int size, const ArithmeticParams& params, + const T* input1_data, const T* input2_data, T* output_data, + void (*check_arithmetic_params)(const ArithmeticParams&), + T (*binary_func)(T, T, const ArithmeticParams&)) { + CheckArithmeticParams(params); + for (int i = 0; i < size; ++i) { + output_data[i] = binary_func(input1_data[i], input2_data[0], params); + } +} + +// TODO: b/270589088 - move to a more appropriate file (b/270589088#comment2) template void ElementWise(int size, const ArithmeticParams& params, const T* input1_data, const T* input2_data, T* output_data, @@ -46,7 +70,60 @@ void ElementWise(int size, const ArithmeticParams& params, const T* input1_data, output_data[i] = binary_func(input1_data[i], input2_data[i], params); } } -// TODO(b/270589088): move to a more appropriate file. (b/270589088#comment2) + +template +inline void BroadcastAddRecursiveDimensions( + const ArithmeticParams& params, int dimension, size_t* input1_offset_p, + size_t* input2_offset_p, size_t* output_offset, + size_t* compressed_input1_stride, size_t* compressed_input2_stride, + size_t* compressed_output_shape, const T* input1_data, const T* input2_data, + T* output_data, void (*check_arithmetic_params)(const ArithmeticParams&), + T (*binary_func)(T, T, const ArithmeticParams&)) { + if (dimension > 0) { + for (size_t c = 0; c < compressed_output_shape[dimension]; ++c) { + size_t input1_offset_c = *input1_offset_p; + size_t input2_offset_c = *input2_offset_p; + BroadcastAddRecursiveDimensions( + params, dimension - 1, &input1_offset_c, &input2_offset_c, + output_offset, compressed_input1_stride, compressed_input2_stride, + compressed_output_shape, input1_data, input2_data, output_data, + check_arithmetic_params, binary_func); + *input1_offset_p += compressed_input1_stride[dimension]; + *input2_offset_p += compressed_input2_stride[dimension]; + } + } else { + TFLITE_DCHECK(dimension == 0); + bool input1_is_broadcast = compressed_input1_stride[dimension] == 0; + bool input2_is_broadcast = compressed_input2_stride[dimension] == 0; + TFLITE_DCHECK(!(input1_is_broadcast && input2_is_broadcast)); + const T* input1_data_ptr = input1_data + *input1_offset_p; + const T* input2_data_ptr = input2_data + *input2_offset_p; + T* output_data_ptr = output_data + *output_offset; + if (input1_is_broadcast) { + // input1 is broadcast. + BroadcastInput1(compressed_output_shape[dimension], params, + input1_data_ptr, input2_data_ptr, output_data_ptr, + check_arithmetic_params, binary_func); + *input2_offset_p += compressed_output_shape[dimension]; + } else if (input2_is_broadcast) { + // input2 is broadcast. + BroadcastInput2(compressed_output_shape[dimension], params, + input1_data_ptr, input2_data_ptr, output_data_ptr, + check_arithmetic_params, binary_func); + *input1_offset_p += compressed_output_shape[dimension]; + } else { + // Add element-wise. + ElementWise(compressed_output_shape[dimension], params, + input1_data_ptr, input2_data_ptr, output_data_ptr, + check_arithmetic_params, binary_func); + *input1_offset_p += compressed_output_shape[dimension]; + *input2_offset_p += compressed_output_shape[dimension]; + } + *output_offset += compressed_output_shape[dimension]; + } +} + +// TODO: b/270589088 - move to a more appropriate file. (b/270589088#comment2) template void BroadcastBinaryFunction6DSlow( const ArithmeticParams& params, const RuntimeShape& input1_shape, @@ -54,12 +131,7 @@ void BroadcastBinaryFunction6DSlow( const T* input2_data, const RuntimeShape& output_shape, T* output_data, void (*check_arithmetic_params)(const ArithmeticParams&), T (*binary_func)(T, T, const ArithmeticParams&)) { - NdArrayDesc<6> desc1; - NdArrayDesc<6> desc2; - NdArrayDescsForElementwiseBroadcast(input1_shape, input2_shape, &desc1, - &desc2); - const RuntimeShape extended_output_shape = - RuntimeShape::ExtendedShape(6, output_shape); + constexpr int kMaxBroadcastDim = 6; // In Tensorflow, the dimensions are canonically named (batch_number, row, // col, channel), with extents (batches, height, width, depth), with the @@ -72,65 +144,25 @@ void BroadcastBinaryFunction6DSlow( // We name our variables by their Tensorflow convention, but generate C code // nesting loops such that the innermost loop has the smallest stride for the // best cache behavior. - size_t input1_offset_a = 0; - size_t input2_offset_a = 0; - size_t output_offset_a = 0; - for (int a = 0; a < extended_output_shape.Dims(0); ++a) { - size_t input1_offset_d = input1_offset_a; - size_t input2_offset_d = input2_offset_a; - size_t output_offset_d = output_offset_a; - for (int d = 0; d < extended_output_shape.Dims(1); ++d) { - size_t input1_offset_b = input1_offset_d; - size_t input2_offset_b = input2_offset_d; - size_t output_offset_b = output_offset_d; - for (int b = 0; b < extended_output_shape.Dims(2); ++b) { - size_t input1_offset_y = input1_offset_b; - size_t input2_offset_y = input2_offset_b; - size_t output_offset_y = output_offset_b; - for (int y = 0; y < extended_output_shape.Dims(3); ++y) { - size_t input1_offset_x = input1_offset_y; - size_t input2_offset_x = input2_offset_y; - size_t output_offset_x = output_offset_y; - for (int x = 0; x < extended_output_shape.Dims(4); ++x) { - size_t input1_offset_c = input1_offset_x; - size_t input2_offset_c = input2_offset_x; - size_t output_offset_c = output_offset_x; - for (int c = 0; c < extended_output_shape.Dims(5); ++c) { - output_data[output_offset_c] = - binary_func(input1_data[input1_offset_c], - input2_data[input2_offset_c], params); - input1_offset_c += desc1.strides[5]; - input2_offset_c += desc2.strides[5]; - ++output_offset_c; - } - input1_offset_x += desc1.strides[4]; - input2_offset_x += desc2.strides[4]; - output_offset_x += extended_output_shape.Dims(5); - } - input1_offset_y += desc1.strides[3]; - input2_offset_y += desc2.strides[3]; - output_offset_y += - extended_output_shape.Dims(4) * extended_output_shape.Dims(5); - } - input1_offset_b += desc1.strides[2]; - input2_offset_b += desc2.strides[2]; - output_offset_b += extended_output_shape.Dims(3) * - extended_output_shape.Dims(4) * - extended_output_shape.Dims(5); - } - input1_offset_d += desc1.strides[1]; - input2_offset_d += desc2.strides[1]; - output_offset_d += - extended_output_shape.Dims(2) * extended_output_shape.Dims(3) * - extended_output_shape.Dims(4) * extended_output_shape.Dims(5); - } - input1_offset_a += desc1.strides[0]; - input2_offset_a += desc2.strides[0]; - output_offset_a += - extended_output_shape.Dims(1) * extended_output_shape.Dims(2) * - extended_output_shape.Dims(3) * extended_output_shape.Dims(4) * - extended_output_shape.Dims(5); + size_t compressed_input1_stride[kMaxBroadcastDim]; + size_t compressed_input2_stride[kMaxBroadcastDim]; + size_t compressed_output_shape[kMaxBroadcastDim]; + bool broadcastable_shape = ReduceDimensionsForBroadcast( + input1_shape, input2_shape, compressed_input1_stride, + compressed_input2_stride, compressed_output_shape); + // Skip broadcasting for degenerate shapes. + if (!broadcastable_shape) { + return; } + + size_t input1_offset = 0; + size_t input2_offset = 0; + size_t output_offset = 0; + BroadcastAddRecursiveDimensions( + params, kMaxBroadcastDim - 1, &input1_offset, &input2_offset, + &output_offset, compressed_input1_stride, compressed_input2_stride, + compressed_output_shape, input1_data, input2_data, output_data, + check_arithmetic_params, binary_func); } template diff --git a/tensorflow/lite/kernels/internal/reference/integer_ops/mul.h b/tensorflow/lite/kernels/internal/reference/integer_ops/mul.h index 05066184c14..a57056d5b17 100644 --- a/tensorflow/lite/kernels/internal/reference/integer_ops/mul.h +++ b/tensorflow/lite/kernels/internal/reference/integer_ops/mul.h @@ -24,6 +24,9 @@ limitations under the License. namespace tflite { namespace reference_integer_ops { +// Maximum dimension supported by the broadcast mul operation. +constexpr int kMaxMulBroadcastDim = 6; + template void MulElementwise(int size, const ArithmeticParams& params, const InputType* input1_data, const InputType* input2_data, @@ -88,46 +91,104 @@ inline void Mul(const ArithmeticParams& params, } template -inline void BroadcastMul4DSlow( +inline void BroadcastMul6DSlow( const ArithmeticParams& params, const RuntimeShape& input1_shape, const T* input1_data, const RuntimeShape& input2_shape, const T* input2_data, const RuntimeShape& output_shape, T* output_data) { - ruy::profiler::ScopeLabel label("BroadcastMul4DSlow"); + ruy::profiler::ScopeLabel label("BroadcastMul6DSlow"); - NdArrayDesc<4> desc1; - NdArrayDesc<4> desc2; + NdArrayDesc desc1; + NdArrayDesc desc2; // The input shapes are extended as part of NdArrayDesc initialization. NdArrayDescsForElementwiseBroadcast(input1_shape, input2_shape, &desc1, &desc2); const RuntimeShape extended_output_shape = - RuntimeShape::ExtendedShape(4, output_shape); - - for (int b = 0; b < extended_output_shape.Dims(0); ++b) { - for (int y = 0; y < extended_output_shape.Dims(1); ++y) { - for (int x = 0; x < extended_output_shape.Dims(2); ++x) { - for (int c = 0; c < extended_output_shape.Dims(3); ++c) { - const int32_t input1_val = - params.input1_offset + - input1_data[SubscriptToIndex(desc1, b, y, x, c)]; - const int32_t input2_val = - params.input2_offset + - input2_data[SubscriptToIndex(desc2, b, y, x, c)]; - const int32_t unclamped_result = - params.output_offset + - MultiplyByQuantizedMultiplier(input1_val * input2_val, - params.output_multiplier, - params.output_shift); - const int32_t clamped_output = std::min( - params.quantized_activation_max, - std::max(params.quantized_activation_min, unclamped_result)); - output_data[Offset(extended_output_shape, b, y, x, c)] = - static_cast(clamped_output); + RuntimeShape::ExtendedShape(kMaxMulBroadcastDim, output_shape); + // Cache output shape dimensions. + int32_t extended_output_shape_dims[kMaxMulBroadcastDim]; + std::memcpy(extended_output_shape_dims, extended_output_shape.DimsData(), + sizeof(extended_output_shape_dims)); + + size_t input1_offset_a = 0; + size_t input2_offset_a = 0; + size_t output_offset_a = 0; + for (int a = 0; a < extended_output_shape_dims[0]; ++a) { + size_t input1_offset_d = input1_offset_a; + size_t input2_offset_d = input2_offset_a; + size_t output_offset_d = output_offset_a; + for (int d = 0; d < extended_output_shape_dims[1]; ++d) { + size_t input1_offset_b = input1_offset_d; + size_t input2_offset_b = input2_offset_d; + size_t output_offset_b = output_offset_d; + for (int b = 0; b < extended_output_shape_dims[2]; ++b) { + size_t input1_offset_y = input1_offset_b; + size_t input2_offset_y = input2_offset_b; + size_t output_offset_y = output_offset_b; + for (int y = 0; y < extended_output_shape_dims[3]; ++y) { + size_t input1_offset_x = input1_offset_y; + size_t input2_offset_x = input2_offset_y; + size_t output_offset_x = output_offset_y; + for (int x = 0; x < extended_output_shape_dims[4]; ++x) { + size_t input1_offset_c = input1_offset_x; + size_t input2_offset_c = input2_offset_x; + size_t output_offset_c = output_offset_x; + for (int c = 0; c < extended_output_shape_dims[5]; ++c) { + const int32_t input1_val = + params.input1_offset + input1_data[input1_offset_c]; + const int32_t input2_val = + params.input2_offset + input2_data[input2_offset_c]; + const int32_t unclamped_result = + params.output_offset + + MultiplyByQuantizedMultiplier(input1_val * input2_val, + params.output_multiplier, + params.output_shift); + const int32_t clamped_output = std::min( + params.quantized_activation_max, + std::max(params.quantized_activation_min, unclamped_result)); + output_data[output_offset_c] = static_cast(clamped_output); + input1_offset_c += desc1.strides[5]; + input2_offset_c += desc2.strides[5]; + ++output_offset_c; + } + input1_offset_x += desc1.strides[4]; + input2_offset_x += desc2.strides[4]; + output_offset_x += extended_output_shape_dims[5]; + } + input1_offset_y += desc1.strides[3]; + input2_offset_y += desc2.strides[3]; + output_offset_y += + extended_output_shape_dims[4] * extended_output_shape_dims[5]; } + input1_offset_b += desc1.strides[2]; + input2_offset_b += desc2.strides[2]; + output_offset_b += extended_output_shape_dims[3] * + extended_output_shape_dims[4] * + extended_output_shape_dims[5]; } + input1_offset_d += desc1.strides[1]; + input2_offset_d += desc2.strides[1]; + output_offset_d += + extended_output_shape_dims[2] * extended_output_shape_dims[3] * + extended_output_shape_dims[4] * extended_output_shape_dims[5]; } + input1_offset_a += desc1.strides[0]; + input2_offset_a += desc2.strides[0]; + output_offset_a += + extended_output_shape_dims[1] * extended_output_shape_dims[2] * + extended_output_shape_dims[3] * extended_output_shape_dims[4] * + extended_output_shape_dims[5]; } } +template +inline void BroadcastMul4DSlow( + const ArithmeticParams& params, const RuntimeShape& input1_shape, + const T* input1_data, const RuntimeShape& input2_shape, + const T* input2_data, const RuntimeShape& output_shape, T* output_data) { + BroadcastMul6DSlow(params, input1_shape, input1_data, input2_shape, + input2_data, output_shape, output_data); +} + } // namespace reference_integer_ops } // namespace tflite #endif // TENSORFLOW_LITE_KERNELS_INTERNAL_REFERENCE_INTEGER_OPS_MUL_H_ diff --git a/tensorflow/lite/kernels/internal/reference/mul.h b/tensorflow/lite/kernels/internal/reference/mul.h index 2767fef26f6..fca74a32c12 100644 --- a/tensorflow/lite/kernels/internal/reference/mul.h +++ b/tensorflow/lite/kernels/internal/reference/mul.h @@ -24,6 +24,9 @@ namespace tflite { namespace reference_ops { +// Maximum dimension supported by the broadcast mul operation. +constexpr int kMaxMulBroadcastDim = 6; + // Element-wise mul that can often be used for inner loop of broadcast Mul as // well as the non-broadcast Mul. inline void MulElementwise(int size, const ArithmeticParams& params, @@ -88,128 +91,174 @@ inline void Mul(const ArithmeticParams& params, MulElementwise(flat_size, params, input1_data, input2_data, output_data); } -inline void BroadcastMul4DSlow(const ArithmeticParams& params, +template +void BroadcastMulRecursiveDimensions( + const ArithmeticParams& params, int dimension, const T* input1_data, + const T* input2_data, T* output_data, size_t* input1_offset_p, + size_t* input2_offset_p, size_t* output_offset, + const NdArrayDesc& desc1, + const NdArrayDesc& desc2, + const int32_t extended_output_shape_dims[kMaxMulBroadcastDim], + F binary_func) { + if (dimension == kMaxMulBroadcastDim - 1) { + for (int c = 0; c < extended_output_shape_dims[dimension]; ++c) { + const T input1_val = input1_data[*input1_offset_p]; + const T input2_val = input2_data[*input2_offset_p]; + output_data[*output_offset] = binary_func(params, input1_val, input2_val); + *input1_offset_p += desc1.strides[dimension]; + *input2_offset_p += desc2.strides[dimension]; + ++(*output_offset); + } + } else { + for (int a = 0; a < extended_output_shape_dims[dimension]; ++a) { + size_t input1_offset_c = *input1_offset_p; + size_t input2_offset_c = *input2_offset_p; + BroadcastMulRecursiveDimensions( + params, dimension + 1, input1_data, input2_data, output_data, + &input1_offset_c, &input2_offset_c, output_offset, desc1, desc2, + extended_output_shape_dims, binary_func); + *input1_offset_p += desc1.strides[dimension]; + *input2_offset_p += desc2.strides[dimension]; + } + } +} + +inline void BroadcastMul6DSlow(const ArithmeticParams& params, const RuntimeShape& input1_shape, const uint8_t* input1_data, const RuntimeShape& input2_shape, const uint8_t* input2_data, const RuntimeShape& output_shape, uint8_t* output_data) { - NdArrayDesc<4> desc1; - NdArrayDesc<4> desc2; + NdArrayDesc desc1; + NdArrayDesc desc2; NdArrayDescsForElementwiseBroadcast(input1_shape, input2_shape, &desc1, &desc2); const RuntimeShape extended_output_shape = - RuntimeShape::ExtendedShape(4, output_shape); - - for (int b = 0; b < extended_output_shape.Dims(0); ++b) { - for (int y = 0; y < extended_output_shape.Dims(1); ++y) { - for (int x = 0; x < extended_output_shape.Dims(2); ++x) { - for (int c = 0; c < extended_output_shape.Dims(3); ++c) { - const int32_t input1_val = - params.input1_offset + - input1_data[SubscriptToIndex(desc1, b, y, x, c)]; - const int32_t input2_val = - params.input2_offset + - input2_data[SubscriptToIndex(desc2, b, y, x, c)]; - const int32_t unclamped_result = - params.output_offset + - MultiplyByQuantizedMultiplier(input1_val * input2_val, - params.output_multiplier, - params.output_shift); - const int32_t clamped_output = std::min( - params.quantized_activation_max, - std::max(params.quantized_activation_min, unclamped_result)); - output_data[Offset(extended_output_shape, b, y, x, c)] = - static_cast(clamped_output); - } - } - } - } + RuntimeShape::ExtendedShape(kMaxMulBroadcastDim, output_shape); + // Cache output shape dimensions. + int32_t extended_output_shape_dims[kMaxMulBroadcastDim]; + std::memcpy(extended_output_shape_dims, extended_output_shape.DimsData(), + sizeof(extended_output_shape_dims)); + + size_t input1_offset = 0; + size_t input2_offset = 0; + size_t output_offset = 0; + BroadcastMulRecursiveDimensions( + params, 0, input1_data, input2_data, output_data, &input1_offset, + &input2_offset, &output_offset, desc1, desc2, extended_output_shape_dims, + [](const ArithmeticParams& params, const uint8_t input1_val, + const uint8_t input2_val) { + const int32_t offsetted_input1_val = params.input1_offset + input1_val; + const int32_t offsetted_input2_val = params.input2_offset + input2_val; + const int32_t unclamped_result = + params.output_offset + + MultiplyByQuantizedMultiplier( + offsetted_input1_val * offsetted_input2_val, + params.output_multiplier, params.output_shift); + const int32_t clamped_output = std::min( + params.quantized_activation_max, + std::max(params.quantized_activation_min, unclamped_result)); + return static_cast(clamped_output); + }); } template inline typename std::enable_if< !is_small_integer::value || enable_for_short_integers, void>::type -BroadcastMul4DSlow(const ArithmeticParams& params, +BroadcastMul6DSlow(const ArithmeticParams& params, const RuntimeShape& unextended_input1_shape, const T* input1_data, const RuntimeShape& unextended_input2_shape, const T* input2_data, const RuntimeShape& unextended_output_shape, T* output_data) { - T output_activation_min; - T output_activation_max; - GetActivationParams(params, &output_activation_min, &output_activation_max); - - TFLITE_DCHECK_LE(unextended_input1_shape.DimensionsCount(), 4); - TFLITE_DCHECK_LE(unextended_input2_shape.DimensionsCount(), 4); - TFLITE_DCHECK_LE(unextended_output_shape.DimensionsCount(), 4); - const RuntimeShape output_shape = - RuntimeShape::ExtendedShape(4, unextended_output_shape); - - NdArrayDesc<4> desc1; - NdArrayDesc<4> desc2; + TFLITE_DCHECK_LE(unextended_input1_shape.DimensionsCount(), 6); + TFLITE_DCHECK_LE(unextended_input2_shape.DimensionsCount(), 6); + TFLITE_DCHECK_LE(unextended_output_shape.DimensionsCount(), 6); + NdArrayDesc desc1; + NdArrayDesc desc2; NdArrayDescsForElementwiseBroadcast(unextended_input1_shape, unextended_input2_shape, &desc1, &desc2); + const RuntimeShape extended_output_shape = + RuntimeShape::ExtendedShape(kMaxMulBroadcastDim, unextended_output_shape); + // Cache output shape dimensions. + int32_t extended_output_shape_dims[kMaxMulBroadcastDim]; + std::memcpy(extended_output_shape_dims, extended_output_shape.DimsData(), + sizeof(extended_output_shape_dims)); // In Tensorflow, the dimensions are canonically named (batch_number, row, // col, channel), with extents (batches, height, width, depth), with the - // trailing dimension changing most rapidly (channels has the smallest stride, - // typically 1 element). + // trailing dimension changing most rapidly (channels has the smallest + // stride, typically 1 element). // // In generated C code, we store arrays with the dimensions reversed. The // first dimension has smallest stride. // // We name our variables by their Tensorflow convention, but generate C code - // nesting loops such that the innermost loop has the smallest stride for the - // best cache behavior. - for (int b = 0; b < output_shape.Dims(0); ++b) { - for (int y = 0; y < output_shape.Dims(1); ++y) { - for (int x = 0; x < output_shape.Dims(2); ++x) { - for (int c = 0; c < output_shape.Dims(3); ++c) { - output_data[Offset(output_shape, b, y, x, c)] = - ActivationFunctionWithMinMax( - input1_data[SubscriptToIndex(desc1, b, y, x, c)] * - input2_data[SubscriptToIndex(desc2, b, y, x, c)], - output_activation_min, output_activation_max); - } - } - } - } + // nesting loops such that the innermost loop has the smallest stride for + // the best cache behavior. + size_t input1_offset = 0; + size_t input2_offset = 0; + size_t output_offset = 0; + BroadcastMulRecursiveDimensions( + params, 0, input1_data, input2_data, output_data, &input1_offset, + &input2_offset, &output_offset, desc1, desc2, extended_output_shape_dims, + [](const ArithmeticParams& params, const T input1_val, + const T input2_val) { + T output_activation_min; + T output_activation_max; + GetActivationParams(params, &output_activation_min, + &output_activation_max); + return ActivationFunctionWithMinMax(input1_val * input2_val, + output_activation_min, + output_activation_max); + }); } -inline void BroadcastMul4DSlow(const ArithmeticParams& params, +inline void BroadcastMul6DSlow(const ArithmeticParams& params, const RuntimeShape& unextended_input1_shape, const std::complex* input1_data, const RuntimeShape& unextended_input2_shape, const std::complex* input2_data, const RuntimeShape& unextended_output_shape, std::complex* output_data) { - TFLITE_DCHECK_LE(unextended_input1_shape.DimensionsCount(), 4); - TFLITE_DCHECK_LE(unextended_input2_shape.DimensionsCount(), 4); - TFLITE_DCHECK_LE(unextended_output_shape.DimensionsCount(), 4); - const RuntimeShape output_shape = - RuntimeShape::ExtendedShape(4, unextended_output_shape); - - NdArrayDesc<4> desc1; - NdArrayDesc<4> desc2; + TFLITE_DCHECK_LE(unextended_input1_shape.DimensionsCount(), 6); + TFLITE_DCHECK_LE(unextended_input2_shape.DimensionsCount(), 6); + TFLITE_DCHECK_LE(unextended_output_shape.DimensionsCount(), 6); + + NdArrayDesc desc1; + NdArrayDesc desc2; NdArrayDescsForElementwiseBroadcast(unextended_input1_shape, unextended_input2_shape, &desc1, &desc2); + const RuntimeShape extended_output_shape = + RuntimeShape::ExtendedShape(kMaxMulBroadcastDim, unextended_output_shape); + // Cache output shape dimensions. + int32_t extended_output_shape_dims[kMaxMulBroadcastDim]; + std::memcpy(extended_output_shape_dims, extended_output_shape.DimsData(), + sizeof(extended_output_shape_dims)); - for (int b = 0; b < output_shape.Dims(0); ++b) { - for (int y = 0; y < output_shape.Dims(1); ++y) { - for (int x = 0; x < output_shape.Dims(2); ++x) { - for (int c = 0; c < output_shape.Dims(3); ++c) { - output_data[Offset(output_shape, b, y, x, c)] = - input1_data[SubscriptToIndex(desc1, b, y, x, c)] * - input2_data[SubscriptToIndex(desc2, b, y, x, c)]; - } - } - } - } + size_t input1_offset = 0; + size_t input2_offset = 0; + size_t output_offset = 0; + BroadcastMulRecursiveDimensions( + params, 0, input1_data, input2_data, output_data, &input1_offset, + &input2_offset, &output_offset, desc1, desc2, extended_output_shape_dims, + [](const ArithmeticParams& params, const std::complex input1_val, + const std::complex input2_val) { + return input1_val * input2_val; + }); +} + +template +inline void BroadcastMul4DSlow( + const ArithmeticParams& params, const RuntimeShape& input1_shape, + const T* input1_data, const RuntimeShape& input2_shape, + const T* input2_data, const RuntimeShape& output_shape, T* output_data) { + return BroadcastMul6DSlow(params, input1_shape, input1_data, input2_shape, + input2_data, output_shape, output_data); } } // namespace reference_ops diff --git a/tensorflow/lite/kernels/internal/reference/portable_tensor_utils.cc b/tensorflow/lite/kernels/internal/reference/portable_tensor_utils.cc index d386203e460..7d40df42b33 100644 --- a/tensorflow/lite/kernels/internal/reference/portable_tensor_utils.cc +++ b/tensorflow/lite/kernels/internal/reference/portable_tensor_utils.cc @@ -157,7 +157,7 @@ void PortableMatrixBatchVectorMultiplyAccumulate( *result += dotprod * batch_scaling_factor; ++result; } // for row - } // for batch + } // for batch } void PortableMatrixBatchVectorMultiplyAccumulate( @@ -200,7 +200,7 @@ void PortableMatrixBatchVectorMultiplyAccumulate( *result += dotprod * scale; ++result; } // for row - } // for batch + } // for batch } void PortableSparseMatrixBatchVectorMultiplyAccumulate1x4( @@ -232,7 +232,8 @@ void PortableSparseMatrixBatchVectorMultiplyAccumulate1x16( const int32_t* __restrict__ indices, int m_rows, int m_cols, const int8_t* __restrict__ vector, const int32_t* __restrict__ bias_vector, int n_batch, const int32_t input_offset, const int32_t output_multiplier, - const int32_t output_shift, const int32_t output_offset, + const int32_t output_shift, const int32_t* per_channel_scale, + const int32_t* per_channel_shift, const int32_t output_offset, const int32_t output_activation_min, const int32_t output_activation_max, int8_t* __restrict__ result) { const int kBlockSize = 16; @@ -252,8 +253,10 @@ void PortableSparseMatrixBatchVectorMultiplyAccumulate1x16( } } const int32_t bias_value = bias_vector != nullptr ? bias_vector[row] : 0; - dot_prod = MultiplyByQuantizedMultiplier(dot_prod + bias_value, - output_multiplier, output_shift); + dot_prod = MultiplyByQuantizedMultiplier( + dot_prod + bias_value, + per_channel_scale ? per_channel_scale[row] : output_multiplier, + per_channel_shift ? per_channel_shift[row] : output_shift); dot_prod += output_offset; result[batch * m_rows + row] = static_cast(ActivationFunctionWithMinMax( @@ -294,7 +297,8 @@ void PortableSparseMatrixBatchVectorMultiplyAccumulate( void PortableSparseMatrixBatchVectorMultiplyAccumulate( const int8_t* __restrict__ matrix, const uint8_t* ledger, const int m_rows, const int m_cols, const int8_t* __restrict__ vectors, - const float* scaling_factors, int n_batch, float* __restrict__ result) { + const float* scaling_factors, int n_batch, float* __restrict__ result, + const float* per_channel_scale) { static const int kBlockSize = 16; TFLITE_DCHECK_EQ( // NOLINT m_cols % kBlockSize, 0); @@ -318,10 +322,14 @@ void PortableSparseMatrixBatchVectorMultiplyAccumulate( for (int c = 0; c < kBlockSize; c++) { dotprod += (*row_ptr++) * (*vector_block_ptr++); } // for block - } // for num_nonzero_blocks - result[batch * m_rows + row] += dotprod * batch_scaling_factor; + } // for num_nonzero_blocks + float scaling_factor = batch_scaling_factor; + if (per_channel_scale) { + scaling_factor *= per_channel_scale[row]; + } + result[batch * m_rows + row] += dotprod * scaling_factor; } // for row - } // for batch + } // for batch } template diff --git a/tensorflow/lite/kernels/internal/reference/portable_tensor_utils.h b/tensorflow/lite/kernels/internal/reference/portable_tensor_utils.h index 0416db093a4..7c623f71007 100644 --- a/tensorflow/lite/kernels/internal/reference/portable_tensor_utils.h +++ b/tensorflow/lite/kernels/internal/reference/portable_tensor_utils.h @@ -116,23 +116,26 @@ void SparseMatrixBatchVectorMultiplyAccumulate1x16( const int32_t* __restrict__ indices, int m_rows, int m_cols, const int8_t* __restrict__ vector, const int32_t* __restrict__ bias_vector, int n_batch, const int32_t input_offset, const int32_t output_multiplier, - const int32_t output_shift, const int32_t output_offset, + const int32_t output_shift, const int32_t* per_channel_scale, + const int32_t* per_channel_shift, const int32_t output_offset, const int32_t output_activation_min, const int32_t output_activation_max, int8_t* __restrict__ result) { PortableSparseMatrixBatchVectorMultiplyAccumulate1x16( matrix, segments, indices, m_rows, m_cols, vector, bias_vector, n_batch, - input_offset, output_multiplier, output_shift, output_offset, - output_activation_min, output_activation_max, result); + input_offset, output_multiplier, output_shift, per_channel_scale, + per_channel_shift, output_offset, output_activation_min, + output_activation_max, result); } void SparseMatrixBatchVectorMultiplyAccumulate( const int8_t* __restrict__ matrix, const uint8_t* ledger, const int m_rows, const int m_cols, const int8_t* __restrict__ vectors, - const float* scaling_factors, int n_batch, float* __restrict__ result) { + const float* scaling_factors, int n_batch, float* __restrict__ result, + const float* per_channel_scale) { PortableSparseMatrixBatchVectorMultiplyAccumulate( - matrix, ledger, m_rows, m_cols, vectors, scaling_factors, n_batch, - result); + matrix, ledger, m_rows, m_cols, vectors, scaling_factors, n_batch, result, + per_channel_scale); } void MatrixBatchVectorMultiplyAccumulate( diff --git a/tensorflow/lite/kernels/internal/reference/portable_tensor_utils_impl.h b/tensorflow/lite/kernels/internal/reference/portable_tensor_utils_impl.h index 6c404d5eccc..11765ec7379 100644 --- a/tensorflow/lite/kernels/internal/reference/portable_tensor_utils_impl.h +++ b/tensorflow/lite/kernels/internal/reference/portable_tensor_utils_impl.h @@ -92,14 +92,16 @@ void PortableSparseMatrixBatchVectorMultiplyAccumulate1x16( const int32_t* __restrict__ indices, int m_rows, int m_cols, const int8_t* __restrict__ vector, const int32_t* __restrict__ bias_vector, int n_batch, const int32_t input_offset, const int32_t output_multiplier, - const int32_t output_shift, const int32_t output_offset, + int32_t output_shift, const int32_t* per_channel_scale, + const int32_t* per_channel_shift, int32_t output_offset, const int32_t output_activation_min, const int32_t output_activation_max, int8_t* __restrict__ result); void PortableSparseMatrixBatchVectorMultiplyAccumulate( const int8_t* __restrict__ matrix, const uint8_t* ledger, const int m_rows, const int m_cols, const int8_t* __restrict__ vectors, - const float* scaling_factors, int n_batch, float* __restrict__ result); + const float* scaling_factors, int n_batch, float* __restrict__ result, + const float* per_channel_scale); // Dot product of two vectors. float PortableVectorVectorDotProduct(const float* vector1, const float* vector2, diff --git a/tensorflow/lite/kernels/internal/reference/softmax.h b/tensorflow/lite/kernels/internal/reference/softmax.h index c09a7eae813..2930217b61f 100644 --- a/tensorflow/lite/kernels/internal/reference/softmax.h +++ b/tensorflow/lite/kernels/internal/reference/softmax.h @@ -115,6 +115,9 @@ inline void Softmax(const SoftmaxParams& params, FixedPoint0 shifted_scale = FixedPoint0::FromRaw(GetReciprocal( sum_of_exps.raw(), kAccumulationIntegerBits, &num_bits_over_unit)); + const int exponent = num_bits_over_unit + 31 - (sizeof(OutputT) * 8); + TFLITE_CHECK(0 <= exponent && exponent <= 31); + for (int c = 0; c < depth; ++c) { int32_t input_diff = static_cast(input_data[i * depth + c]) - max_in_row; @@ -127,8 +130,7 @@ inline void Softmax(const SoftmaxParams& params, FixedPoint0 exp_in_0 = exp_on_negative_values(scaled_diff_f8); int32_t unsat_output = gemmlowp::RoundingDivideByPOT( - (shifted_scale * exp_in_0).raw(), - num_bits_over_unit + 31 - (sizeof(OutputT) * 8)); + (shifted_scale * exp_in_0).raw(), exponent); const int32_t shifted_output = unsat_output + diff --git a/tensorflow/lite/kernels/internal/reference/sub.h b/tensorflow/lite/kernels/internal/reference/sub.h index d0ebc95ada0..1a74aebeafc 100644 --- a/tensorflow/lite/kernels/internal/reference/sub.h +++ b/tensorflow/lite/kernels/internal/reference/sub.h @@ -18,6 +18,7 @@ limitations under the License. #include #include +#include #include #include "ruy/profiler/instrumentation.h" // from @ruy @@ -29,100 +30,179 @@ namespace tflite { namespace reference_ops { -inline void SubNonBroadcast(const ArithmeticParams& params, - const RuntimeShape& input1_shape, - const float* input1_data, - const RuntimeShape& input2_shape, - const float* input2_data, - const RuntimeShape& output_shape, - float* output_data) { - const int flat_size = - MatchingElementsSize(input1_shape, input2_shape, output_shape); - for (int i = 0; i < flat_size; ++i) { - output_data[i] = ActivationFunctionWithMinMax( - input1_data[i] - input2_data[i], params.float_activation_min, - params.float_activation_max); +template +struct SubImpl { + template + static void BroadcastInput1(const ArithmeticParams& params, + const T* input1_data, const T* input2_data, + T* output_data, size_t size, F binary_func) { + for (size_t c = 0; c < size; ++c) { + output_data[c] = binary_func(input1_data[0], input2_data[c], params); + } } -} -inline void SubNonBroadcast(const ArithmeticParams& params, - const RuntimeShape& input1_shape, - const int32_t* input1_data, - const RuntimeShape& input2_shape, - const int32_t* input2_data, - const RuntimeShape& output_shape, - int32_t* output_data) { - const int flat_size = - MatchingElementsSize(input1_shape, input2_shape, output_shape); - for (int i = 0; i < flat_size; ++i) { - output_data[i] = ActivationFunctionWithMinMax( - input1_data[i] - input2_data[i], params.quantized_activation_min, - params.quantized_activation_max); + template + static void BroadcastInput2(const ArithmeticParams& params, + const T* input1_data, const T* input2_data, + T* output_data, size_t size, F binary_func) { + for (size_t c = 0; c < size; ++c) { + output_data[c] = binary_func(input1_data[c], input2_data[0], params); + } } -} -// TODO(b/151345304): We can implement BroadcastSub on buffers of arbitrary -// dimensionality if the runtime code does a single loop over one dimension -// that handles broadcasting as the base case. The code generator would then -// generate max(D1, D2) nested for loops. -template -inline void BroadcastSubSlow(const ArithmeticParams& params, - const RuntimeShape& input1_shape, - const float* input1_data, - const RuntimeShape& input2_shape, - const float* input2_data, - const RuntimeShape& output_shape, - float* output_data) { - ruy::profiler::ScopeLabel label("BroadcastSubSlow/float"); - TFLITE_DCHECK_LE(input1_shape.DimensionsCount(), N); - TFLITE_DCHECK_LE(input2_shape.DimensionsCount(), N); - TFLITE_DCHECK_LE(output_shape.DimensionsCount(), N); - NdArrayDesc desc1; - NdArrayDesc desc2; - NdArrayDesc output_desc; - NdArrayDescsForElementwiseBroadcast(input1_shape, input2_shape, &desc1, - &desc2); - CopyDimsToDesc(RuntimeShape::ExtendedShape(N, output_shape), &output_desc); + template + static void ElementWise(const ArithmeticParams& params, const T* input1_data, + const T* input2_data, T* output_data, size_t size, + F binary_func) { + for (size_t c = 0; c < size; ++c) { + output_data[c] = binary_func(input1_data[c], input2_data[c], params); + } + } +}; + +template <> +struct SubImpl { + template + static void BroadcastInput1(const ArithmeticParams& params, + const int32_t* input1_data, + const int32_t* input2_data, int32_t* output_data, + size_t size, F binary_func) { + size_t c = 0; + int32_t activation_min, activation_max; + GetActivationParams(params, &activation_min, &activation_max); +#ifdef USE_NEON + const int32x4_t vmax = vdupq_n_s32(activation_max); + const int32x4_t vmin = vdupq_n_s32(activation_min); + const int32x4_t va = vdupq_n_s32(input1_data[0]); + for (; c + 4 <= size; c += 4) { + const int32x4_t vb = vld1q_s32(&input2_data[c]); + int32x4_t vres = vsubq_s32(va, vb); + vres = vmaxq_s32(vmin, vres); + vres = vminq_s32(vmax, vres); + vst1q_s32(&output_data[c], vres); + } +#endif + for (; c < size; ++c) { + output_data[c] = binary_func(input1_data[0], input2_data[c], params); + } + } - // In Tensorflow, the dimensions are canonically named (batch_number, row, - // col, channel), with extents (batches, height, width, depth), with the - // trailing dimension changing most rapidly (channels has the smallest stride, - // typically 1 element). - // - // In generated C code, we store arrays with the dimensions reversed. The - // first dimension has smallest stride. - // - // We name our variables by their Tensorflow convention, but generate C code - // nesting loops such that the innermost loop has the smallest stride for the - // best cache behavior. - auto sub_func = [&](int indexes[N]) { - output_data[SubscriptToIndex(output_desc, indexes)] = - ActivationFunctionWithMinMax( - input1_data[SubscriptToIndex(desc1, indexes)] - - input2_data[SubscriptToIndex(desc2, indexes)], - params.float_activation_min, params.float_activation_max); - }; - NDOpsHelper(output_desc, sub_func); + template + static void BroadcastInput2(const ArithmeticParams& params, + const int32_t* input1_data, + const int32_t* input2_data, int32_t* output_data, + size_t size, F binary_func) { + size_t c = 0; + int32_t activation_min, activation_max; + GetActivationParams(params, &activation_min, &activation_max); +#ifdef USE_NEON + const int32x4_t vmax = vdupq_n_s32(activation_max); + const int32x4_t vmin = vdupq_n_s32(activation_min); + const int32x4_t vb = vdupq_n_s32(input2_data[0]); + for (; c + 4 <= size; c += 4) { + const int32x4_t va = vld1q_s32(&input1_data[c]); + int32x4_t vres = vsubq_s32(va, vb); + vres = vmaxq_s32(vmin, vres); + vres = vminq_s32(vmax, vres); + vst1q_s32(&output_data[c], vres); + } +#endif + for (; c < size; ++c) { + output_data[c] = binary_func(input1_data[c], input2_data[0], params); + } + } + + template + static void ElementWise(const ArithmeticParams& params, + const int32_t* input1_data, + const int32_t* input2_data, int32_t* output_data, + size_t size, F binary_func) { + size_t c = 0; + int32_t activation_min, activation_max; + GetActivationParams(params, &activation_min, &activation_max); +#ifdef USE_NEON + int32x4_t vmax = vdupq_n_s32(activation_max); + int32x4_t vmin = vdupq_n_s32(activation_min); + for (; c + 4 <= size; c += 4) { + const int32x4_t va = vld1q_s32(&input1_data[c]); + const int32x4_t vb = vld1q_s32(&input2_data[c]); + int32x4_t vres = vsubq_s32(va, vb); + vres = vmaxq_s32(vmin, vres); + vres = vminq_s32(vmax, vres); + vst1q_s32(&output_data[c], vres); + } +#endif + for (; c < size; ++c) { + output_data[c] = binary_func(input1_data[c], input2_data[c], params); + } + } +}; + +template +inline void BroadcastSubRecursiveDimensions( + int dimension, const ArithmeticParams& params, const T* input1_data, + const T* input2_data, T* output_data, size_t* input1_offset_p, + size_t* input2_offset_p, size_t* output_offset, + size_t* compressed_input1_stride, size_t* compressed_input2_stride, + size_t* compressed_output_shape, F binary_func) { + if (dimension > 0) { + for (size_t c = 0; c < compressed_output_shape[dimension]; ++c) { + size_t input1_offset_c = *input1_offset_p; + size_t input2_offset_c = *input2_offset_p; + BroadcastSubRecursiveDimensions( + dimension - 1, params, input1_data, input2_data, output_data, + &input1_offset_c, &input2_offset_c, output_offset, + compressed_input1_stride, compressed_input2_stride, + compressed_output_shape, binary_func); + *input1_offset_p += compressed_input1_stride[dimension]; + *input2_offset_p += compressed_input2_stride[dimension]; + } + } else { + TFLITE_DCHECK(dimension == 0); + bool input1_is_broadcast = compressed_input1_stride[dimension] == 0; + bool input2_is_broadcast = compressed_input2_stride[dimension] == 0; + TFLITE_DCHECK(!(input1_is_broadcast && input2_is_broadcast)); + const T* input1_data_ptr = input1_data + *input1_offset_p; + const T* input2_data_ptr = input2_data + *input2_offset_p; + T* output_data_ptr = output_data + *output_offset; + if (input1_is_broadcast) { + // input1 is broadcast. + SubImpl::BroadcastInput1( + params, input1_data_ptr, input2_data_ptr, output_data_ptr, + compressed_output_shape[dimension], binary_func); + *input2_offset_p += compressed_output_shape[dimension]; + } else if (input2_is_broadcast) { + // input2 is broadcast. + SubImpl::BroadcastInput2( + params, input1_data_ptr, input2_data_ptr, output_data_ptr, + compressed_output_shape[dimension], binary_func); + *input1_offset_p += compressed_output_shape[dimension]; + } else { + // Add element-wise. + SubImpl::ElementWise(params, input1_data_ptr, input2_data_ptr, + output_data_ptr, + compressed_output_shape[dimension], binary_func); + *input1_offset_p += compressed_output_shape[dimension]; + *input2_offset_p += compressed_output_shape[dimension]; + } + *output_offset += compressed_output_shape[dimension]; + } } -template -inline void BroadcastSubSlow(const ArithmeticParams& params, - const RuntimeShape& input1_shape, - const int32_t* input1_data, - const RuntimeShape& input2_shape, - const int32_t* input2_data, - const RuntimeShape& output_shape, - int32_t* output_data) { - ruy::profiler::ScopeLabel label("BroadcastSubSlow/int32_t"); - TFLITE_DCHECK_LE(input1_shape.DimensionsCount(), N); - TFLITE_DCHECK_LE(input2_shape.DimensionsCount(), N); - TFLITE_DCHECK_LE(output_shape.DimensionsCount(), N); - NdArrayDesc desc1; - NdArrayDesc desc2; - NdArrayDesc output_desc; - NdArrayDescsForElementwiseBroadcast(input1_shape, input2_shape, &desc1, - &desc2); - CopyDimsToDesc(RuntimeShape::ExtendedShape(N, output_shape), &output_desc); +// TODO: b/296510380 - we may be able to factor out this to common.h for all +// binary arithmetic ops (add, sub, mul). +template +inline void BroadcastSubCommon(const ArithmeticParams& params, + const RuntimeShape& input1_shape, + const T* input1_data, + const RuntimeShape& input2_shape, + const T* input2_data, + const RuntimeShape& output_shape, T* output_data, + F binary_func) { + constexpr int kMaxBroadcastDim = 6; + TFLITE_DCHECK_LE(input1_shape.DimensionsCount(), kMaxBroadcastDim); + TFLITE_DCHECK_LE(input2_shape.DimensionsCount(), kMaxBroadcastDim); + TFLITE_DCHECK_LE(output_shape.DimensionsCount(), kMaxBroadcastDim); // In Tensorflow, the dimensions are canonically named (batch_number, row, // col, channel), with extents (batches, height, width, depth), with the @@ -135,33 +215,6 @@ inline void BroadcastSubSlow(const ArithmeticParams& params, // We name our variables by their Tensorflow convention, but generate C code // nesting loops such that the innermost loop has the smallest stride for the // best cache behavior. - auto sub_func = [&](int indexes[N]) { - output_data[SubscriptToIndex(output_desc, indexes)] = - ActivationFunctionWithMinMax( - input1_data[SubscriptToIndex(desc1, indexes)] - - input2_data[SubscriptToIndex(desc2, indexes)], - params.quantized_activation_min, params.quantized_activation_max); - }; - NDOpsHelper(output_desc, sub_func); -} - -template -void BroadcastSubSlow(const ArithmeticParams& params, - const RuntimeShape& input1_shape, - const int64_t* input1_data, - const RuntimeShape& input2_shape, - const int64_t* input2_data, - const RuntimeShape& output_shape, int64_t* output_data) { - ruy::profiler::ScopeLabel label("BroadcastSubSlow/int64_t"); - TFLITE_DCHECK_LE(input1_shape.DimensionsCount(), N); - TFLITE_DCHECK_LE(input2_shape.DimensionsCount(), N); - TFLITE_DCHECK_LE(output_shape.DimensionsCount(), N); - NdArrayDesc desc1; - NdArrayDesc desc2; - NdArrayDesc output_desc; - NdArrayDescsForElementwiseBroadcast(input1_shape, input2_shape, &desc1, - &desc2); - CopyDimsToDesc(RuntimeShape::ExtendedShape(N, output_shape), &output_desc); // In Tensorflow, the dimensions are canonically named (batch_number, row, // col, channel), with extents (batches, height, width, depth), with the @@ -174,54 +227,48 @@ void BroadcastSubSlow(const ArithmeticParams& params, // We name our variables by their Tensorflow convention, but generate C code // nesting loops such that the innermost loop has the smallest stride for the // best cache behavior. - auto sub_func = [&](int indexes[N]) { - output_data[SubscriptToIndex(output_desc, indexes)] = - ActivationFunctionWithMinMax( - input1_data[SubscriptToIndex(desc1, indexes)] - - input2_data[SubscriptToIndex(desc2, indexes)], - params.int64_activation_min, params.int64_activation_max); - }; - NDOpsHelper(output_desc, sub_func); + + size_t compressed_input1_stride[kMaxBroadcastDim]; + size_t compressed_input2_stride[kMaxBroadcastDim]; + size_t compressed_output_shape[kMaxBroadcastDim]; + bool broadcastable_shape = ReduceDimensionsForBroadcast( + input1_shape, input2_shape, compressed_input1_stride, + compressed_input2_stride, compressed_output_shape); + // Skip broadcasting for degenerate shapes. + if (!broadcastable_shape) { + return; + } + + size_t input1_offset = 0; + size_t input2_offset = 0; + size_t output_offset = 0; + BroadcastSubRecursiveDimensions( + kMaxBroadcastDim - 1, params, input1_data, input2_data, output_data, + &input1_offset, &input2_offset, &output_offset, compressed_input1_stride, + compressed_input2_stride, compressed_output_shape, binary_func); } -template +// TODO(b/151345304): We can implement BroadcastSub on buffers of arbitrary +// dimensionality if the runtime code does a single loop over one dimension +// that handles broadcasting as the base case. The code generator would then +// generate max(D1, D2) nested for loops. +template void BroadcastSubSlow(const ArithmeticParams& params, const RuntimeShape& input1_shape, const T* input1_data, const RuntimeShape& input2_shape, const T* input2_data, const RuntimeShape& output_shape, T* output_data) { - ruy::profiler::ScopeLabel label("BroadcastSubSlow/templated"); - TFLITE_DCHECK_LE(input1_shape.DimensionsCount(), N); - TFLITE_DCHECK_LE(input2_shape.DimensionsCount(), N); - TFLITE_DCHECK_LE(output_shape.DimensionsCount(), N); - NdArrayDesc desc1; - NdArrayDesc desc2; - NdArrayDesc output_desc; - NdArrayDescsForElementwiseBroadcast(input1_shape, input2_shape, &desc1, - &desc2); - CopyDimsToDesc(RuntimeShape::ExtendedShape(N, output_shape), &output_desc); - - // In Tensorflow, the dimensions are canonically named (batch_number, row, - // col, channel), with extents (batches, height, width, depth), with the - // trailing dimension changing most rapidly (channels has the smallest stride, - // typically 1 element). - // - // In generated C code, we store arrays with the dimensions reversed. The - // first dimension has smallest stride. - // - // We name our variables by their Tensorflow convention, but generate C code - // nesting loops such that the innermost loop has the smallest stride for the - // best cache behavior. - auto sub_func = [&](int indexes[N]) { - output_data[SubscriptToIndex(output_desc, indexes)] = - ActivationFunctionWithMinMax( - input1_data[SubscriptToIndex(desc1, indexes)] - - input2_data[SubscriptToIndex(desc2, indexes)], - params.quantized_activation_min, params.quantized_activation_max); - }; - NDOpsHelper(output_desc, sub_func); + ruy::profiler::ScopeLabel label("BroadcastSubSlow/T"); + BroadcastSubCommon( + params, input1_shape, input1_data, input2_shape, input2_data, + output_shape, output_data, + [](T input1_val, T input2_val, const ArithmeticParams& params) { + T activation_min, activation_max; + GetActivationParams(params, &activation_min, &activation_max); + return ActivationFunctionWithMinMax(input1_val - input2_val, + activation_min, activation_max); + }); } -template inline void BroadcastSub16POTSlow(const ArithmeticParams& params, const RuntimeShape& input1_shape, const int16_t* input1_data, @@ -230,42 +277,24 @@ inline void BroadcastSub16POTSlow(const ArithmeticParams& params, const RuntimeShape& output_shape, int16_t* output_data) { ruy::profiler::ScopeLabel label("BroadcastSub16POTSlow/int16_t"); - NdArrayDesc desc1; - NdArrayDesc desc2; - NdArrayDesc output_desc; - NdArrayDescsForElementwiseBroadcast(input1_shape, input2_shape, &desc1, - &desc2); - CopyDimsToDesc(RuntimeShape::ExtendedShape(N, output_shape), &output_desc); - - // In Tensorflow, the dimensions are canonically named (batch_number, row, - // col, channel), with extents (batches, height, width, depth), with the - // trailing dimension changing most rapidly (channels has the smallest stride, - // typically 1 element). - // - // In generated C code, we store arrays with the dimensions reversed. The - // first dimension has smallest stride. - // - // We name our variables by their Tensorflow convention, but generate C code - // nesting loops such that the innermost loop has the smallest stride for the - // best cache behavior. - auto sub_func = [&](int indexes[N]) { - const int32_t input1_val = input1_data[SubscriptToIndex(desc1, indexes)]; - const int32_t input2_val = input2_data[SubscriptToIndex(desc2, indexes)]; - const int32_t scaled_input1_val = - gemmlowp::RoundingDivideByPOT(input1_val, -params.input1_shift); - const int32_t scaled_input2_val = - gemmlowp::RoundingDivideByPOT(input2_val, -params.input2_shift); - const int32_t raw_output = scaled_input1_val - scaled_input2_val; - const int32_t clamped_output = - std::min(params.quantized_activation_max, - std::max(params.quantized_activation_min, raw_output)); - output_data[SubscriptToIndex(output_desc, indexes)] = - static_cast(clamped_output); - }; - NDOpsHelper(output_desc, sub_func); + BroadcastSubCommon( + params, input1_shape, input1_data, input2_shape, input2_data, + output_shape, output_data, + [](int16_t input1_val, int16_t input2_val, + const ArithmeticParams& params) { + const int32_t scaled_input1_val = + gemmlowp::RoundingDivideByPOT(input1_val, -params.input1_shift); + const int32_t scaled_input2_val = + gemmlowp::RoundingDivideByPOT(input2_val, -params.input2_shift); + const int32_t raw_output = scaled_input1_val - scaled_input2_val; + const int32_t clamped_output = + std::min(params.quantized_activation_max, + std::max(params.quantized_activation_min, raw_output)); + return static_cast(clamped_output); + }); } -template +template void BroadcastQuantSubSlow(const ArithmeticParams& params, const RuntimeShape& input1_shape, const T* input1_data, @@ -273,52 +302,32 @@ void BroadcastQuantSubSlow(const ArithmeticParams& params, const T* input2_data, const RuntimeShape& output_shape, T* output_data) { ruy::profiler::ScopeLabel label("BroadcastQuantSubSlow/T"); - TFLITE_DCHECK_LE(input1_shape.DimensionsCount(), N); - TFLITE_DCHECK_LE(input2_shape.DimensionsCount(), N); - TFLITE_DCHECK_LE(output_shape.DimensionsCount(), N); - NdArrayDesc desc1; - NdArrayDesc desc2; - NdArrayDesc output_desc; - NdArrayDescsForElementwiseBroadcast(input1_shape, input2_shape, &desc1, - &desc2); - CopyDimsToDesc(RuntimeShape::ExtendedShape(N, output_shape), &output_desc); - - // In Tensorflow, the dimensions are canonically named (batch_number, row, - // col, channel), with extents (batches, height, width, depth), with the - // trailing dimension changing most rapidly (channels has the smallest stride, - // typically 1 element). - // - // In generated C code, we store arrays with the dimensions reversed. The - // first dimension has smallest stride. - // - // We name our variables by their Tensorflow convention, but generate C code - // nesting loops such that the innermost loop has the smallest stride for the - // best cache behavior. - auto sub_func = [&](int indexes[N]) { - const int32_t input1_val = - params.input1_offset + input1_data[SubscriptToIndex(desc1, indexes)]; - const int32_t input2_val = - params.input2_offset + input2_data[SubscriptToIndex(desc2, indexes)]; - const int32_t shifted_input1_val = input1_val * (1 << params.left_shift); - const int32_t shifted_input2_val = input2_val * (1 << params.left_shift); - const int32_t scaled_input1_val = - MultiplyByQuantizedMultiplierSmallerThanOneExp( - shifted_input1_val, params.input1_multiplier, params.input1_shift); - const int32_t scaled_input2_val = - MultiplyByQuantizedMultiplierSmallerThanOneExp( - shifted_input2_val, params.input2_multiplier, params.input2_shift); - const int32_t raw_sub = scaled_input1_val - scaled_input2_val; - const int32_t raw_output = - MultiplyByQuantizedMultiplierSmallerThanOneExp( - raw_sub, params.output_multiplier, params.output_shift) + - params.output_offset; - const int32_t clamped_output = - std::min(params.quantized_activation_max, - std::max(params.quantized_activation_min, raw_output)); - output_data[SubscriptToIndex(output_desc, indexes)] = - static_cast(clamped_output); - }; - NDOpsHelper(output_desc, sub_func); + BroadcastSubCommon( + params, input1_shape, input1_data, input2_shape, input2_data, + output_shape, output_data, + [](T input1_val, T input2_val, const ArithmeticParams& params) { + const int32_t shifted_input1_val = + (params.input1_offset + input1_val) * (1 << params.left_shift); + const int32_t shifted_input2_val = + (params.input2_offset + input2_val) * (1 << params.left_shift); + const int32_t scaled_input1_val = + MultiplyByQuantizedMultiplierSmallerThanOneExp( + shifted_input1_val, params.input1_multiplier, + params.input1_shift); + const int32_t scaled_input2_val = + MultiplyByQuantizedMultiplierSmallerThanOneExp( + shifted_input2_val, params.input2_multiplier, + params.input2_shift); + const int32_t raw_sub = scaled_input1_val - scaled_input2_val; + const int32_t raw_output = + MultiplyByQuantizedMultiplierSmallerThanOneExp( + raw_sub, params.output_multiplier, params.output_shift) + + params.output_offset; + const int32_t clamped_output = + std::min(params.quantized_activation_max, + std::max(params.quantized_activation_min, raw_output)); + return static_cast(clamped_output); + }); } // Element-wise add that can often be used for inner loop of broadcast add as @@ -405,35 +414,12 @@ void Sub(const ArithmeticParams& params, const RuntimeShape& input1_shape, const T* input1_data, const RuntimeShape& input2_shape, const T* input2_data, const RuntimeShape& output_shape, T* output_data) { - NdArrayDesc<4> desc1; - NdArrayDesc<4> desc2; - NdArrayDescsForElementwiseBroadcast(input1_shape, input2_shape, &desc1, - &desc2); - const RuntimeShape extended_output_shape = - RuntimeShape::ExtendedShape(4, output_shape); - - // In Tensorflow, the dimensions are canonically named (batch_number, row, - // col, channel), with extents (batches, height, width, depth), with the - // trailing dimension changing most rapidly (channels has the smallest stride, - // typically 1 element). - // - // In generated C code, we store arrays with the dimensions reversed. The - // first dimension has smallest stride. - // - // We name our variables by their Tensorflow convention, but generate C code - // nesting loops such that the innermost loop has the smallest stride for the - // best cache behavior. - for (int b = 0; b < extended_output_shape.Dims(0); ++b) { - for (int y = 0; y < extended_output_shape.Dims(1); ++y) { - for (int x = 0; x < extended_output_shape.Dims(2); ++x) { - for (int c = 0; c < extended_output_shape.Dims(3); ++c) { - output_data[Offset(extended_output_shape, b, y, x, c)] = - input1_data[SubscriptToIndex(desc1, b, y, x, c)] - - input2_data[SubscriptToIndex(desc2, b, y, x, c)]; - } - } - } - } + BroadcastSubCommon( + params, input1_shape, input1_data, input2_shape, input2_data, + output_shape, output_data, + [](T input1_val, T input2_val, const ArithmeticParams& params) { + return input1_val - input2_val; + }); } inline void SetActivationMinMax(const ArithmeticParams& params, diff --git a/tensorflow/lite/kernels/internal/reference/transpose_conv.h b/tensorflow/lite/kernels/internal/reference/transpose_conv.h index 8a51e0fa5e9..744ed0f826b 100644 --- a/tensorflow/lite/kernels/internal/reference/transpose_conv.h +++ b/tensorflow/lite/kernels/internal/reference/transpose_conv.h @@ -219,6 +219,103 @@ inline void TransposeConv( } } +inline void HybridTransposeConv( + const ConvParams& params, float* scaling_factors_ptr, + const RuntimeShape& input_shape, const int8_t* input_data, + const RuntimeShape& filter_shape, const int8_t* filter_data, + const RuntimeShape& bias_shape, const float* bias_data, + const RuntimeShape& output_shape, float* output_data, + const float* per_channel_scale, int32_t* input_offset) { + const int stride_width = params.stride_width; + const int stride_height = params.stride_height; + const int pad_width = params.padding_values.width; + const int pad_height = params.padding_values.height; + TFLITE_DCHECK_EQ(input_shape.DimensionsCount(), 4); + TFLITE_DCHECK_EQ(filter_shape.DimensionsCount(), 4); + TFLITE_DCHECK_EQ(output_shape.DimensionsCount(), 4); + + const int batches = MatchingDim(input_shape, 0, output_shape, 0); + const int input_depth = MatchingDim(input_shape, 3, filter_shape, 3); + const int output_depth = MatchingDim(filter_shape, 0, output_shape, 3); + const int input_height = input_shape.Dims(1); + const int input_width = input_shape.Dims(2); + const int filter_height = filter_shape.Dims(1); + const int filter_width = filter_shape.Dims(2); + const int output_height = output_shape.Dims(1); + const int output_width = output_shape.Dims(2); + const float output_activation_min = params.float_activation_min; + const float output_activation_max = params.float_activation_max; + if (bias_data) { + TFLITE_DCHECK_EQ(bias_shape.FlatSize(), output_depth); + } + + // Although transpose convolution simplifies to convolution with transposed + // weights for strides of 1, non-unitary striding complicates matters. To + // keep this reference implementation as clear as possible, we use a + // "scatter" access pattern, where we loop through all the input elements, + // computing their influence on the output, rather than looping through the + // output elements in the typical "gather" access pattern of a conv. We + // therefore must initialize the output array to zero. + const int num_elements = output_shape.FlatSize(); + for (int i = 0; i < num_elements; i++) { + output_data[i] = 0.0f; + } + + // Loop through input elements one at a time. + for (int batch = 0; batch < batches; ++batch) { + const float scaling_factor = scaling_factors_ptr[batch]; + for (int in_y = 0; in_y < input_height; ++in_y) { + for (int in_x = 0; in_x < input_width; ++in_x) { + for (int in_channel = 0; in_channel < input_depth; ++in_channel) { + // Loop through the output elements it will influence + const int out_x_origin = (in_x * stride_width) - pad_width; + const int out_y_origin = (in_y * stride_height) - pad_height; + for (int filter_y = 0; filter_y < filter_height; ++filter_y) { + for (int filter_x = 0; filter_x < filter_width; ++filter_x) { + for (int out_channel = 0; out_channel < output_depth; + ++out_channel) { + // Compute output element location + const int out_x = out_x_origin + filter_x; + const int out_y = out_y_origin + filter_y; + // We cannot accumulate out of bounds + if ((out_x >= 0) && (out_x < output_width) && (out_y >= 0) && + (out_y < output_height)) { + int32_t input_value = input_data[Offset( + input_shape, batch, in_y, in_x, in_channel)]; + int32_t filter_value = + filter_data[Offset(filter_shape, out_channel, filter_y, + filter_x, in_channel)]; + int32_t acc = + (input_value - input_offset[batch]) * filter_value; + output_data[Offset(output_shape, batch, out_y, out_x, + out_channel)] += + acc * per_channel_scale[out_channel] * scaling_factor; + } + } + } + } + } + } + } + } + + for (int batch = 0; batch < batches; ++batch) { + for (int out_y = 0; out_y < output_height; ++out_y) { + for (int out_x = 0; out_x < output_width; ++out_x) { + for (int out_channel = 0; out_channel < output_depth; ++out_channel) { + float acc = output_data[Offset(output_shape, batch, out_y, out_x, + out_channel)]; + if (bias_data) acc += bias_data[out_channel]; + + output_data[Offset(output_shape, batch, out_y, out_x, out_channel)] = + ActivationFunctionWithMinMax(acc, output_activation_min, + output_activation_max); + } + } + } + } +} + } // namespace reference_ops } // namespace tflite diff --git a/tensorflow/lite/micro/examples/micro_speech/simple_features/simple_model_settings.cc b/tensorflow/lite/kernels/internal/runtime_shape.cc similarity index 68% rename from tensorflow/lite/micro/examples/micro_speech/simple_features/simple_model_settings.cc rename to tensorflow/lite/kernels/internal/runtime_shape.cc index e2cf661c014..dd12278605e 100644 --- a/tensorflow/lite/micro/examples/micro_speech/simple_features/simple_model_settings.cc +++ b/tensorflow/lite/kernels/internal/runtime_shape.cc @@ -1,4 +1,4 @@ -/* Copyright 2018 The TensorFlow Authors. All Rights Reserved. +/* Copyright 2023 The TensorFlow Authors. All Rights Reserved. Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. @@ -13,11 +13,11 @@ See the License for the specific language governing permissions and limitations under the License. ==============================================================================*/ -#include "tensorflow/lite/micro/examples/micro_speech/simple_features/simple_model_settings.h" +#include "tensorflow/lite/kernels/internal/runtime_shape.h" -const char* kCategoryLabels[kCategoryCount] = { - "silence", - "unknown", - "yes", - "no", -}; +namespace tflite { + +// Defining a constexpr static class member is necessary in C++11 +constexpr int tflite::RuntimeShape::kMaxSmallSize; + +} // namespace tflite diff --git a/tensorflow/lite/kernels/internal/runtime_shape.h b/tensorflow/lite/kernels/internal/runtime_shape.h index 0e4df2c36dc..bc786bdb081 100644 --- a/tensorflow/lite/kernels/internal/runtime_shape.h +++ b/tensorflow/lite/kernels/internal/runtime_shape.h @@ -15,6 +15,8 @@ limitations under the License. #ifndef TENSORFLOW_LITE_KERNELS_INTERNAL_RUNTIME_SHAPE_H_ #define TENSORFLOW_LITE_KERNELS_INTERNAL_RUNTIME_SHAPE_H_ +#include + #include "tensorflow/lite/kernels/internal/compatibility.h" namespace tflite { diff --git a/tensorflow/lite/kernels/internal/types.h b/tensorflow/lite/kernels/internal/types.h index b775ca815d9..f2cc1603c65 100644 --- a/tensorflow/lite/kernels/internal/types.h +++ b/tensorflow/lite/kernels/internal/types.h @@ -157,7 +157,8 @@ struct PerChannelQuantizationParams { }; // Gets next index to iterate through a multidimensional array. -inline bool NextIndex(const int num_dims, const int* dims, int* current) { +template +inline bool NextIndex(const int num_dims, const int* dims, IndexType* current) { if (num_dims == 0) { return false; } @@ -165,7 +166,7 @@ inline bool NextIndex(const int num_dims, const int* dims, int* current) { TFLITE_DCHECK(current != nullptr); int carry = 1; for (int idx = num_dims - 1; idx >= 0; --idx) { - int current_val = current[idx] + carry; + IndexType current_val = current[idx] + carry; TFLITE_DCHECK_GE(dims[idx], current_val); if (dims[idx] == current_val) { current[idx] = 0; diff --git a/tensorflow/lite/kernels/kernel_util.cc b/tensorflow/lite/kernels/kernel_util.cc index 58fd99f8fb1..39f7bc7da53 100644 --- a/tensorflow/lite/kernels/kernel_util.cc +++ b/tensorflow/lite/kernels/kernel_util.cc @@ -572,12 +572,11 @@ int TfLiteTypeGetSize(TfLiteType type) { bool IsMobilePlatform() { #if defined(ANDROID) || defined(__ANDROID__) return true; -#elif defined(__APPLE__) -#if TARGET_IPHONE_SIMULATOR || TARGET_OS_IPHONE +#elif defined(__APPLE__) && (TARGET_IPHONE_SIMULATOR || TARGET_OS_IPHONE) return true; -#endif -#endif +#else return false; +#endif } bool HasUnspecifiedDimension(const TfLiteTensor* tensor) { diff --git a/tensorflow/lite/kernels/kernel_util.h b/tensorflow/lite/kernels/kernel_util.h index 24061ab249b..e318118fb64 100644 --- a/tensorflow/lite/kernels/kernel_util.h +++ b/tensorflow/lite/kernels/kernel_util.h @@ -24,6 +24,9 @@ limitations under the License. #include "tensorflow/lite/core/c/builtin_op_data.h" #include "tensorflow/lite/core/c/common.h" +#ifndef NDEBUG +#include "tensorflow/lite/kernels/op_macros.h" +#endif namespace tflite { @@ -165,24 +168,31 @@ inline int NumIntermediates(const TfLiteNode* node) { } #endif // TF_LITE_STATIC_MEMORY -inline int64_t NumElements(const TfLiteIntArray* dims) { +inline int64_t NumElements(const int* dims, int num_dims) { int64_t count = 1; - for (int i = 0; i < dims->size; ++i) { - count *= dims->data[i]; + for (int i = 0; i < num_dims; ++i) { +#ifndef NDEBUG + if (count <= 0) { + break; + } + // Check that number of elements can fit in 32 bit int. Most of tflite + // assumes the result of `NumElements` is < MAX_INT and static or implicit + // casts to `int32_t` without any checks. It is more meaningful to check + // that the result fits into 32 bits than for standard overflow on 64 bit + // type. + TF_LITE_ASSERT(dims[i] < std::numeric_limits::max() / count); +#endif + count *= dims[i]; } return count; } -inline int64_t NumElements(const TfLiteTensor* t) { - return NumElements(t->dims); +inline int64_t NumElements(const TfLiteIntArray* dims) { + return NumElements(dims->data, dims->size); } -inline int64_t NumElements(const int* dims, int num_dims) { - int64_t count = 1; - for (int i = 0; i < num_dims; ++i) { - count *= dims[i]; - } - return count; +inline int64_t NumElements(const TfLiteTensor* t) { + return NumElements(t->dims); } // Determines whether tensor is constant. diff --git a/tensorflow/lite/kernels/op_macros.h b/tensorflow/lite/kernels/op_macros.h index 4255d253511..9c4e2fd0fd5 100644 --- a/tensorflow/lite/kernels/op_macros.h +++ b/tensorflow/lite/kernels/op_macros.h @@ -1,4 +1,4 @@ -/* Copyright 2017 The TensorFlow Authors. All Rights Reserved. +/* Copyright 2023 The TensorFlow Authors. All Rights Reserved. Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. @@ -15,14 +15,14 @@ limitations under the License. #ifndef TENSORFLOW_LITE_KERNELS_OP_MACROS_H_ #define TENSORFLOW_LITE_KERNELS_OP_MACROS_H_ -#include "tensorflow/lite/micro/debug_log.h" +#include "tensorflow/lite/micro/micro_log.h" #if !defined(TF_LITE_MCU_DEBUG_LOG) #include #define TFLITE_ABORT abort() #else inline void AbortImpl() { - DebugLog("HALTED\n"); + MicroPrintf("HALTED"); while (1) { } } @@ -35,4 +35,15 @@ inline void AbortImpl() { #define TFLITE_ASSERT_FALSE TFLITE_ABORT #endif +#define TF_LITE_FATAL(msg) \ + do { \ + MicroPrintf("%s", (msg)); \ + TFLITE_ABORT; \ + } while (0) + +#define TF_LITE_ASSERT(x) \ + do { \ + if (!(x)) TF_LITE_FATAL(#x); \ + } while (0) + #endif // TENSORFLOW_LITE_KERNELS_OP_MACROS_H_ diff --git a/tensorflow/lite/micro/BUILD b/tensorflow/lite/micro/BUILD index 72c23f34521..60ace250631 100644 --- a/tensorflow/lite/micro/BUILD +++ b/tensorflow/lite/micro/BUILD @@ -39,8 +39,8 @@ cc_library( deps = [ ":memory_helpers", ":micro_allocator", - ":micro_context", - ":micro_graph", + ":micro_interpreter_context", + ":micro_interpreter_graph", ":micro_profiler_interface", ":op_resolvers", "//tensorflow/lite:type_to_tflitetype", @@ -62,10 +62,28 @@ cc_library( "micro_context.h", ], copts = micro_copts(), + deps = [ + ":micro_common", + ":micro_graph", + ":micro_log", + "//tensorflow/lite/c:common", + ], +) + +cc_library( + name = "micro_interpreter_context", + srcs = [ + "micro_interpreter_context.cc", + ], + hdrs = [ + "micro_interpreter_context.h", + ], + copts = micro_copts(), deps = [ ":memory_helpers", ":micro_allocator", - ":micro_graph", + ":micro_context", + ":micro_interpreter_graph", ":micro_log", ":micro_profiler_interface", "//tensorflow/lite/c:common", @@ -94,22 +112,36 @@ cc_library( copts = micro_copts(), deps = [ ":memory_helpers", - ":micro_allocator", + ":micro_arena_constants", ":micro_context", ":micro_log", ":mock_micro_graph", "//tensorflow/lite/c:common", + "//tensorflow/lite/micro/arena_allocator:simple_memory_allocator", ], ) cc_library( name = "micro_graph", - srcs = ["micro_graph.cc"], hdrs = ["micro_graph.h"], + copts = micro_copts(), + deps = [ + ":micro_common", + ":micro_resource_variable", + "//tensorflow/lite/kernels/internal:compatibility", + ], +) + +cc_library( + name = "micro_interpreter_graph", + srcs = ["micro_interpreter_graph.cc"], + hdrs = ["micro_interpreter_graph.h"], + copts = micro_copts(), deps = [ ":memory_helpers", ":micro_allocator", ":micro_common", + ":micro_graph", ":micro_log", ":micro_profiler", ":micro_resource_variable", @@ -124,6 +156,7 @@ cc_library( name = "mock_micro_graph", srcs = ["mock_micro_graph.cc"], hdrs = ["mock_micro_graph.h"], + copts = micro_copts(), deps = [ ":micro_allocator", ":micro_graph", @@ -158,6 +191,7 @@ cc_library( "//tensorflow/lite/micro/arena_allocator:persistent_arena_buffer_allocator", "//tensorflow/lite/micro/arena_allocator:simple_memory_allocator", "//tensorflow/lite/micro/memory_planner:greedy_memory_planner", + "//tensorflow/lite/micro/memory_planner:linear_memory_planner", "//tensorflow/lite/micro/memory_planner:micro_memory_planner", "//tensorflow/lite/micro/tflite_bridge:flatbuffer_conversions_bridge", "//tensorflow/lite/schema:schema_fbs", @@ -179,6 +213,7 @@ cc_library( name = "flatbuffer_utils", srcs = ["flatbuffer_utils.cc"], hdrs = ["flatbuffer_utils.h"], + copts = micro_copts(), deps = [ "//tensorflow/lite/c:common", "//tensorflow/lite/schema:schema_fbs", @@ -190,6 +225,7 @@ cc_library( name = "memory_helpers", srcs = ["memory_helpers.cc"], hdrs = ["memory_helpers.h"], + copts = micro_copts(), deps = [ "//tensorflow/lite/c:common", "//tensorflow/lite/kernels/internal:reference", @@ -269,7 +305,6 @@ cc_library( copts = micro_copts(), deps = [ ":debug_log", - ":micro_string", ], ) @@ -291,17 +326,6 @@ cc_library( ], ) -cc_library( - name = "micro_string", - srcs = [ - "micro_string.cc", - ], - hdrs = [ - "micro_string.h", - ], - copts = micro_copts(), -) - cc_library( name = "micro_time", srcs = [ @@ -396,6 +420,7 @@ cc_test( deps = [ ":micro_log", ":system_setup", + "//tensorflow/lite/micro/testing:micro_test", ], ) @@ -412,13 +437,14 @@ cc_test( ) cc_test( - name = "micro_context_test", + name = "micro_interpreter_context_test", srcs = [ - "micro_context_test.cc", + "micro_interpreter_context_test.cc", ], deps = [ ":micro_allocator", - ":micro_context", + ":micro_interpreter_context", + ":micro_interpreter_graph", ":test_helpers", "//tensorflow/lite/micro/testing:micro_test", ], @@ -432,6 +458,7 @@ cc_test( deps = [ ":fake_micro_context", ":micro_allocator", + ":mock_micro_graph", ":test_helpers", "//tensorflow/lite/micro/testing:micro_test", ], @@ -549,17 +576,6 @@ cc_test( ], ) -cc_test( - name = "micro_string_test", - srcs = [ - "micro_string_test.cc", - ], - deps = [ - ":micro_string", - "//tensorflow/lite/micro/testing:micro_test", - ], -) - cc_test( name = "micro_time_test", srcs = [ diff --git a/tensorflow/lite/micro/arc_emsdp/debug_log.cc b/tensorflow/lite/micro/arc_emsdp/debug_log.cc index 1b4d641e5e9..a61e3c21d4d 100644 --- a/tensorflow/lite/micro/arc_emsdp/debug_log.cc +++ b/tensorflow/lite/micro/arc_emsdp/debug_log.cc @@ -1,4 +1,4 @@ -/* Copyright 2020 The TensorFlow Authors. All Rights Reserved. +/* Copyright 2023 The TensorFlow Authors. All Rights Reserved. Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. @@ -19,6 +19,12 @@ limitations under the License. #include #include +#ifndef TF_LITE_STRIP_ERROR_STRINGS +#include "eyalroz_printf/src/printf/printf.h" +#endif + +namespace { + // Print to debug console by default. One can define next to extend destinations // set: EMSDP_LOG_TO_MEMORY // : fill .debug_log memory region (data section) with passed chars. @@ -89,9 +95,7 @@ void LogToMem(const char* s) { debug_log_mem[cursor] = '^'; } -extern "C" void DebugLog(const char* s) { -#ifndef TF_LITE_STRIP_ERROR_STRINGS - +void LogDebugString(const char* s) { #if defined EMSDP_LOG_TO_UART DbgUartSendStr(s); #endif @@ -106,6 +110,24 @@ extern "C" void DebugLog(const char* s) { #warning "EMSDP_LOG_TO_HOST is defined. Ensure hostlib is linked." fprintf(stderr, "%s", s); #endif +} + +} // namespace -#endif // TF_LITE_STRIP_ERROR_STRINGS +extern "C" void DebugLog(const char* format, va_list args) { +#ifndef TF_LITE_STRIP_ERROR_STRINGS + constexpr int kMaxLogLen = 256; + char log_buffer[kMaxLogLen]; + + vsnprintf_(log_buffer, kMaxLogLen, format, args); + LogDebugString(log_buffer); +#endif } + +#ifndef TF_LITE_STRIP_ERROR_STRINGS +// Only called from MicroVsnprintf (micro_log.h) +extern "C" int DebugVsnprintf(char* buffer, size_t buf_size, const char* format, + va_list vlist) { + return vsnprintf_(buffer, buf_size, format, vlist); +} +#endif diff --git a/tensorflow/lite/micro/benchmarks/BUILD b/tensorflow/lite/micro/benchmarks/BUILD index 808f6acbd18..d0f67b6d60e 100644 --- a/tensorflow/lite/micro/benchmarks/BUILD +++ b/tensorflow/lite/micro/benchmarks/BUILD @@ -65,9 +65,7 @@ cc_library( hdrs = [ "//tensorflow/lite/micro/models:generated_keyword_scrambled_8bit_model_hdr", ], - visibility = [ - ":micro_top_level", - ], + visibility = ["//visibility:private"], ) cc_binary( diff --git a/tensorflow/lite/micro/benchmarks/README.md b/tensorflow/lite/micro/benchmarks/README.md index 1031a586de8..1ac5d251d61 100644 --- a/tensorflow/lite/micro/benchmarks/README.md +++ b/tensorflow/lite/micro/benchmarks/README.md @@ -70,29 +70,20 @@ Refer to flashing instructions in the [Person Detection Example](https://github. For more info about the Corstone-300 software see: [tensorflow/lite/micro/cortex_m_corstone_300/README.md](../cortex_m_corstone_300/README.md). -Disclaimer: Executing the benchmark test on the Corstone-300 software will -provide a general metric of instructions executed. The estimates are not cycle -accurate, however it aligns to instruction per cycle, and is a consistent -environment. This means it can detect if code changes changed performance. +Disclaimer: The FVP can not be used to measure CPU performance. +The results are not reliable, not even for relative measurements. +FVP may however be used for performance measurements when running on NPU and only NPU PMU numbers can be used. The NPU model is cycle accurate within approximately +-10%. -The person detection benchmark can also run with Ethos-U enabled, as the -downloaded model will be optimized for Ethos-U. For more info see: +As an example, the person detect downloaded model will be optimized for Ethos-U. For more info see: [tensorflow/lite/micro/kernels/ethos_u/README.md](../kernels/ethos_u/README.md). +And since it only makes sense to measure performance on the NPU, only the person detection benchmark should be run and only with Ethos-U enabled. +See also network tester example, where person detect model is used in the same way when Ethos-U is enabled: +[tensorflow/lite/micro/examples/network_tester/README.md](../examples/network_tester/README.md). -To run the keyword benchmark on FVP: - -``` -make -j -f tensorflow/lite/micro/tools/make/Makefile TARGET=cortex_m_corstone_300 TARGET_ARCH=cortex-m55 run_keyword_benchmark -``` - -To run the person detection benchmark on FVP: - -``` -make -j -f tensorflow/lite/micro/tools/make/Makefile TARGET=cortex_m_corstone_300 TARGET_ARCH=cortex-m55 run_person_detection_benchmark -``` +The person detect model is not an optimial model for Ethos-U since it quite small. Also note that only the NPU PMU cycles are logged even though the CPU is setting up the Ethos-U driver in each iteration. To run the person detection benchmark on FVP with Ethos-U: ``` -make -j -f tensorflow/lite/micro/tools/make/Makefile CO_PROCESSOR=ethos_u TARGET=cortex_m_corstone_300 TARGET_ARCH=cortex-m55 run_person_detection_benchmark +make -f tensorflow/lite/micro/tools/make/Makefile CO_PROCESSOR=ethos_u TARGET=cortex_m_corstone_300 TARGET_ARCH=cortex-m55 run_person_detection_benchmark ``` diff --git a/tensorflow/lite/micro/benchmarks/person_detection_benchmark.cc b/tensorflow/lite/micro/benchmarks/person_detection_benchmark.cc index e21789bbc50..29d30eed766 100644 --- a/tensorflow/lite/micro/benchmarks/person_detection_benchmark.cc +++ b/tensorflow/lite/micro/benchmarks/person_detection_benchmark.cc @@ -1,4 +1,4 @@ -/* Copyright 2023 The TensorFlow Authors. All Rights Reserved. +/* Copyright 2024 The TensorFlow Authors. All Rights Reserved. Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. @@ -37,7 +37,11 @@ limitations under the License. namespace tflite { +#ifdef ETHOS_U +using PersonDetectionOpResolver = MicroMutableOpResolver<1>; +#else using PersonDetectionOpResolver = MicroMutableOpResolver<6>; +#endif using PersonDetectionBenchmarkRunner = MicroBenchmarkRunner; // Create an area of memory to use for input, output, and intermediate arrays. @@ -57,12 +61,16 @@ PersonDetectionBenchmarkRunner* CreateBenchmarkRunner(MicroProfiler* profiler) { // PersonDetectionBenchmarkRunner object. PersonDetectionOpResolver* op_resolver = new (op_resolver_buffer) PersonDetectionOpResolver(); +#ifdef ETHOS_U + op_resolver->AddEthosU(); +#else op_resolver->AddFullyConnected(tflite::Register_FULLY_CONNECTED_INT8()); op_resolver->AddConv2D(tflite::Register_CONV_2D_INT8REF()); op_resolver->AddDepthwiseConv2D(); op_resolver->AddSoftmax(); op_resolver->AddAveragePool2D(tflite::Register_AVERAGE_POOL_2D_INT8()); op_resolver->AddReshape(); +#endif return new (benchmark_runner_buffer) PersonDetectionBenchmarkRunner(g_person_detect_model_data, op_resolver, tensor_arena, kTensorArenaSize, profiler); diff --git a/tensorflow/lite/micro/bluepill/debug_log.cc b/tensorflow/lite/micro/bluepill/debug_log.cc index 3fd2d52c8d8..720410b9b3f 100644 --- a/tensorflow/lite/micro/bluepill/debug_log.cc +++ b/tensorflow/lite/micro/bluepill/debug_log.cc @@ -1,4 +1,4 @@ -/* Copyright 2018 The TensorFlow Authors. All Rights Reserved. +/* Copyright 2023 The TensorFlow Authors. All Rights Reserved. Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. @@ -15,9 +15,16 @@ limitations under the License. #include "tensorflow/lite/micro/debug_log.h" +#ifndef TF_LITE_STRIP_ERROR_STRINGS +#include "eyalroz_printf/src/printf/printf.h" +#endif + +namespace { + +#ifndef TF_LITE_STRIP_ERROR_STRINGS // For Arm Cortex-M devices, calling SYS_WRITE0 will output the zero-terminated // string pointed to by R1 to any debug console that's attached to the system. -extern "C" void DebugLog(const char* s) { +void SysWriteDebugConsole(const char* s) { asm("mov r0, #0x04\n" // SYS_WRITE0 "mov r1, %[str]\n" "bkpt #0xAB\n" @@ -25,3 +32,24 @@ extern "C" void DebugLog(const char* s) { : [str] "r"(s) : "r0", "r1"); } +#endif // TF_LITE_STRIP_ERROR_STRINGS + +} // namespace + +extern "C" void DebugLog(const char* format, va_list args) { +#ifndef TF_LITE_STRIP_ERROR_STRINGS + constexpr int kMaxLogLen = 256; + char log_buffer[kMaxLogLen]; + + vsnprintf_(log_buffer, kMaxLogLen, format, args); + SysWriteDebugConsole(log_buffer); +#endif // TF_LITE_STRIP_ERROR_STRINGS +} + +#ifndef TF_LITE_STRIP_ERROR_STRINGS +// Only called from MicroVsnprintf (micro_log.h) +extern "C" int DebugVsnprintf(char* buffer, size_t buf_size, const char* format, + va_list vlist) { + return vsnprintf_(buffer, buf_size, format, vlist); +} +#endif diff --git a/tensorflow/lite/micro/build_def.bzl b/tensorflow/lite/micro/build_def.bzl index b9ecf77a3cd..82768f14e52 100644 --- a/tensorflow/lite/micro/build_def.bzl +++ b/tensorflow/lite/micro/build_def.bzl @@ -1,6 +1,7 @@ def micro_copts(): return [ "-Wall", + "-Wno-unused-parameter", "-Wnon-virtual-dtor", "-DFLATBUFFERS_LOCALE_INDEPENDENT=0", ] diff --git a/tensorflow/lite/micro/chre/debug_log.cc b/tensorflow/lite/micro/chre/debug_log.cc index 23bb82eb7b6..99f7c673c29 100644 --- a/tensorflow/lite/micro/chre/debug_log.cc +++ b/tensorflow/lite/micro/chre/debug_log.cc @@ -1,4 +1,4 @@ -/* Copyright 2019 The TensorFlow Authors. All Rights Reserved. +/* Copyright 2023 The TensorFlow Authors. All Rights Reserved. Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. @@ -17,6 +17,24 @@ limitations under the License. #include -extern "C" void DebugLog(const char* s) { - chreLog(CHRE_LOG_DEBUG, "[TFL_MICRO] %s", s); +#ifndef TF_LITE_STRIP_ERROR_STRINGS +#include "eyalroz_printf/src/printf/printf.h" +#endif + +extern "C" void DebugLog(const char* format, va_list args) { +#ifndef TF_LITE_STRIP_ERROR_STRINGS + constexpr int kMaxLogLen = 256; + char log_buffer[kMaxLogLen]; + + vsnprintf_(log_buffer, kMaxLogLen, format, args); + chreLog(CHRE_LOG_DEBUG, "[TFL_MICRO] %s", log_buffer); +#endif +} + +#ifndef TF_LITE_STRIP_ERROR_STRINGS +// Only called from MicroVsnprintf (micro_log.h) +extern "C" int DebugVsnprintf(char* buffer, size_t buf_size, const char* format, + va_list vlist) { + return vsnprintf_(buffer, buf_size, format, vlist); } +#endif diff --git a/tensorflow/lite/micro/cortex_m_corstone_300/README.md b/tensorflow/lite/micro/cortex_m_corstone_300/README.md index 94935ac6bbe..663f63647de 100644 --- a/tensorflow/lite/micro/cortex_m_corstone_300/README.md +++ b/tensorflow/lite/micro/cortex_m_corstone_300/README.md @@ -37,12 +37,12 @@ compatible. This means one could run code compiled for e.g. a Cortex-M7. Some examples: ``` -make -j -f tensorflow/lite/micro/tools/make/Makefile CO_PROCESSOR=ethos_u TARGET=cortex_m_corstone_300 TARGET_ARCH=cortex-m55 test_network_tester_test -make -j -f tensorflow/lite/micro/tools/make/Makefile OPTIMIZED_KERNEL_DIR=cmsis_nn TARGET=cortex_m_corstone_300 TARGET_ARCH=cortex-m55 test_network_tester_test -make -j -f tensorflow/lite/micro/tools/make/Makefile CO_PROCESSOR=ethos_u OPTIMIZED_KERNEL_DIR=cmsis_nn TARGET=cortex_m_corstone_300 TARGET_ARCH=cortex-m55 test_network_tester_test -make -j -f tensorflow/lite/micro/tools/make/Makefile TARGET=cortex_m_corstone_300 TARGET_ARCH=cortex-m55 test_network_tester_test -make -j -f tensorflow/lite/micro/tools/make/Makefile TARGET=cortex_m_corstone_300 TARGET_ARCH=cortex-m55 test_kernel_fully_connected_test -make -j -f tensorflow/lite/micro/tools/make/Makefile OPTIMIZED_KERNEL_DIR=cmsis_nn TARGET=cortex_m_corstone_300 TARGET_ARCH=cortex-m7+fp test_kernel_fully_connected_test -make -j -f tensorflow/lite/micro/tools/make/Makefile TARGET=cortex_m_corstone_300 TARGET_ARCH=cortex-m3 test_kernel_fully_connected_test -make -j -f tensorflow/lite/micro/tools/make/Makefile TARGET=cortex_m_corstone_300 TARGET_ARCH=cortex-m55 BUILD_TYPE=release_with_logs TOOLCHAIN=armclang test_network_tester_test +make -f tensorflow/lite/micro/tools/make/Makefile CO_PROCESSOR=ethos_u TARGET=cortex_m_corstone_300 TARGET_ARCH=cortex-m55 test_network_tester_test +make -f tensorflow/lite/micro/tools/make/Makefile OPTIMIZED_KERNEL_DIR=cmsis_nn TARGET=cortex_m_corstone_300 TARGET_ARCH=cortex-m55 test_network_tester_test +make -f tensorflow/lite/micro/tools/make/Makefile CO_PROCESSOR=ethos_u OPTIMIZED_KERNEL_DIR=cmsis_nn TARGET=cortex_m_corstone_300 TARGET_ARCH=cortex-m55 test_network_tester_test +make -f tensorflow/lite/micro/tools/make/Makefile TARGET=cortex_m_corstone_300 TARGET_ARCH=cortex-m55 test_network_tester_test +make -f tensorflow/lite/micro/tools/make/Makefile TARGET=cortex_m_corstone_300 TARGET_ARCH=cortex-m55 test_kernel_fully_connected_test +make -f tensorflow/lite/micro/tools/make/Makefile OPTIMIZED_KERNEL_DIR=cmsis_nn TARGET=cortex_m_corstone_300 TARGET_ARCH=cortex-m7+fp test_kernel_fully_connected_test +make -f tensorflow/lite/micro/tools/make/Makefile TARGET=cortex_m_corstone_300 TARGET_ARCH=cortex-m3 test_kernel_fully_connected_test +make -f tensorflow/lite/micro/tools/make/Makefile TARGET=cortex_m_corstone_300 TARGET_ARCH=cortex-m55 BUILD_TYPE=release_with_logs TOOLCHAIN=armclang test_network_tester_test ``` diff --git a/tensorflow/lite/micro/cortex_m_corstone_300/system_setup.cc b/tensorflow/lite/micro/cortex_m_corstone_300/system_setup.cc index 95a11b2e527..64733401cca 100644 --- a/tensorflow/lite/micro/cortex_m_corstone_300/system_setup.cc +++ b/tensorflow/lite/micro/cortex_m_corstone_300/system_setup.cc @@ -1,4 +1,4 @@ -/* Copyright 2022 The TensorFlow Authors. All Rights Reserved. +/* Copyright 2024 The TensorFlow Authors. All Rights Reserved. Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. @@ -14,32 +14,89 @@ limitations under the License. ==============================================================================*/ #ifdef ETHOS_U +#include + +#include + #include "ethosu_driver.h" +#include "pmu_ethosu.h" #endif // This is set in micro/tools/make/targets/cortex_m_corstone_300_makefile.inc. -// It is needed for the calls to NVIC_SetVector()/NVIC_EnableIR() and for the -// DWT and PMU counters. +// It is needed for the calls to NVIC_SetVector()/NVIC_EnableIR(), #include CMSIS_DEVICE_ARM_CORTEX_M_XX_HEADER_FILE #include "tensorflow/lite/micro/micro_log.h" #include "tensorflow/lite/micro/micro_time.h" #include "tensorflow/lite/micro/system_setup.h" +#ifdef ETHOS_U + +bool npuPmuCycleCounterIsSet; +uint64_t npuPmuCycleCounter; + +extern "C" { +void ethosu_inference_begin(struct ethosu_driver* drv, void* userArg) { + // Enable PMU + ETHOSU_PMU_Enable(drv); + + // Enable cycle counter + ETHOSU_PMU_PMCCNTR_CFG_Set_Stop_Event(drv, ETHOSU_PMU_NPU_IDLE); + ETHOSU_PMU_PMCCNTR_CFG_Set_Start_Event(drv, ETHOSU_PMU_NPU_ACTIVE); + ETHOSU_PMU_CNTR_Enable(drv, ETHOSU_PMU_CCNT_Msk); + ETHOSU_PMU_CYCCNT_Reset(drv); + + // Reset all counters + ETHOSU_PMU_EVCNTR_ALL_Reset(drv); +} + +void ethosu_inference_end(struct ethosu_driver* drv, void* userArg) { + // Save cycle counter + npuPmuCycleCounter += ETHOSU_PMU_Get_CCNTR(drv); + npuPmuCycleCounterIsSet = true; + + // Disable PMU + ETHOSU_PMU_Disable(drv); +} +} +#endif + namespace tflite { namespace { +#ifdef ETHOS_U +constexpr uint32_t kClocksPerSecond = 200e6; +#else constexpr uint32_t kClocksPerSecond = 25e6; +#endif } // namespace uint32_t ticks_per_second() { return kClocksPerSecond; } uint32_t GetCurrentTimeTicks() { -#if (!defined(TF_LITE_STRIP_ERROR_STRINGS) && !defined(ARMCM0)) +#if (!defined(TF_LITE_STRIP_ERROR_STRINGS)) +#ifdef ETHOS_U + uint32_t ticks = static_cast(npuPmuCycleCounter); + + // Note cycle counter will be reset here for next iteration + if (npuPmuCycleCounterIsSet) { + npuPmuCycleCounter = 0; + npuPmuCycleCounterIsSet = false; + } + + return ticks; +#else + +#if defined(ARMCM0) + return 0; +#else #ifdef ARMCM55 return ARM_PMU_Get_CCNTR(); #else return DWT->CYCCNT; +#endif +#endif + #endif #else return 0; @@ -88,14 +145,17 @@ void InitializeTarget() { #ifdef ETHOS_U constexpr int ethosu_base_address = 0x48102000; constexpr int ethosu_irq = 56; + constexpr int ethosu_irq_priority = 5; // Initialize Ethos-U NPU driver. if (ethosu_init(ðosu0_driver, reinterpret_cast(ethosu_base_address), ethosu0_scratch, ETHOSU_FAST_MEMORY_SIZE, 1, 1)) { MicroPrintf("Failed to initialize Ethos-U driver"); + return; } NVIC_SetVector(static_cast(ethosu_irq), (uint32_t)ðosuIrqHandler0); + NVIC_SetPriority(static_cast(ethosu_irq), ethosu_irq_priority); NVIC_EnableIRQ(static_cast(ethosu_irq)); #endif } diff --git a/tensorflow/lite/micro/cortex_m_generic/debug_log.cc b/tensorflow/lite/micro/cortex_m_generic/debug_log.cc index bc79d439170..b7182a5fb10 100644 --- a/tensorflow/lite/micro/cortex_m_generic/debug_log.cc +++ b/tensorflow/lite/micro/cortex_m_generic/debug_log.cc @@ -1,4 +1,4 @@ -/* Copyright 2020 The TensorFlow Authors. All Rights Reserved. +/* Copyright 2023 The TensorFlow Authors. All Rights Reserved. Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. @@ -16,28 +16,52 @@ limitations under the License. // Implementation for the DebugLog() function that prints to the debug logger on // an generic Cortex-M device. +#include "tensorflow/lite/micro/debug_log.h" + #ifdef __cplusplus extern "C" { #endif // __cplusplus -#include "tensorflow/lite/micro/debug_log.h" - #include "tensorflow/lite/micro/cortex_m_generic/debug_log_callback.h" +#ifndef TF_LITE_STRIP_ERROR_STRINGS +#include +#endif + static DebugLogCallback debug_log_callback = nullptr; +namespace { + +void InvokeDebugLogCallback(const char* s) { + if (debug_log_callback != nullptr) { + debug_log_callback(s); + } +} + +} // namespace + void RegisterDebugLogCallback(void (*cb)(const char* s)) { debug_log_callback = cb; } -void DebugLog(const char* s) { +void DebugLog(const char* format, va_list args) { #ifndef TF_LITE_STRIP_ERROR_STRINGS - if (debug_log_callback != nullptr) { - debug_log_callback(s); - } + constexpr int kMaxLogLen = 256; + char log_buffer[kMaxLogLen]; + + vsnprintf(log_buffer, kMaxLogLen, format, args); + InvokeDebugLogCallback(log_buffer); #endif } +#ifndef TF_LITE_STRIP_ERROR_STRINGS +// Only called from MicroVsnprintf (micro_log.h) +int DebugVsnprintf(char* buffer, size_t buf_size, const char* format, + va_list vlist) { + return vsnprintf(buffer, buf_size, format, vlist); +} +#endif + #ifdef __cplusplus } // extern "C" #endif // __cplusplus diff --git a/tensorflow/lite/micro/debug_log.cc b/tensorflow/lite/micro/debug_log.cc index 46ca253a6d5..98231968b62 100644 --- a/tensorflow/lite/micro/debug_log.cc +++ b/tensorflow/lite/micro/debug_log.cc @@ -1,4 +1,4 @@ -/* Copyright 2020 The TensorFlow Authors. All Rights Reserved. +/* Copyright 2023 The TensorFlow Authors. All Rights Reserved. Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. @@ -18,21 +18,17 @@ limitations under the License. // the only function that's absolutely required to be available on a target // device, since it's used for communicating test results back to the host so // that we can verify the implementation is working correctly. -// It's designed to be as easy as possible to supply an implementation though. -// On platforms that have a POSIX stack or C library, it can be written as a -// single call to `fprintf(stderr, "%s", s)` to output a string to the error -// stream of the console, but if there's no OS or C library available, there's -// almost always an equivalent way to write out a string to some serial -// interface that can be used instead. For example on Arm M-series MCUs, calling -// the `bkpt #0xAB` assembler instruction will output the string in r1 to -// whatever debug serial connection is available. If you're running mbed, you -// can do the same by creating `Serial pc(USBTX, USBRX)` and then calling -// `pc.printf("%s", s)`. -// To add an equivalent function for your own platform, create your own -// implementation file, and place it in a subfolder with named after the OS -// you're targeting. For example, see the Cortex M bare metal version in -// tensorflow/lite/micro/bluepill/debug_log.cc or the mbed one on -// tensorflow/lite/micro/mbed/debug_log.cc. +// This function should support standard C/C++ stdio style formatting +// operations. It's designed to be as easy as possible to supply an +// implementation though. On platforms that have a POSIX stack or C library, it +// can be written as a single call to `vfprintf(stderr, format, args)` to output +// a string to the error stream of the console, but if there's no OS or C +// library available, there's almost always an equivalent way to write out a +// string to some serial interface that can be used instead. To add an +// equivalent function for your own platform, create your own implementation +// file, and place it in a subfolder with named after the OS you're targeting. +// For example, see the Cortex M bare metal version in the +// tensorflow/lite/micro/bluepill/debug_log.cc file. #include "tensorflow/lite/micro/debug_log.h" @@ -40,11 +36,19 @@ limitations under the License. #include #endif -extern "C" void DebugLog(const char* s) { +extern "C" void DebugLog(const char* format, va_list args) { #ifndef TF_LITE_STRIP_ERROR_STRINGS // Reusing TF_LITE_STRIP_ERROR_STRINGS to disable DebugLog completely to get // maximum reduction in binary size. This is because we have DebugLog calls // via TF_LITE_CHECK that are not stubbed out by TF_LITE_REPORT_ERROR. - fprintf(stderr, "%s", s); + vfprintf(stderr, format, args); #endif } + +#ifndef TF_LITE_STRIP_ERROR_STRINGS +// Only called from MicroVsnprintf (micro_log.h) +extern "C" int DebugVsnprintf(char* buffer, size_t buf_size, const char* format, + va_list vlist) { + return vsnprintf(buffer, buf_size, format, vlist); +} +#endif diff --git a/tensorflow/lite/micro/debug_log.h b/tensorflow/lite/micro/debug_log.h index c2840d0f4b5..6e2e69ed67f 100644 --- a/tensorflow/lite/micro/debug_log.h +++ b/tensorflow/lite/micro/debug_log.h @@ -1,4 +1,4 @@ -/* Copyright 2018 The TensorFlow Authors. All Rights Reserved. +/* Copyright 2023 The TensorFlow Authors. All Rights Reserved. Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. @@ -15,14 +15,25 @@ limitations under the License. #ifndef TENSORFLOW_LITE_MICRO_DEBUG_LOG_H_ #define TENSORFLOW_LITE_MICRO_DEBUG_LOG_H_ +#ifdef __cplusplus +#include +#include +#else +#include +#include +#endif // __cplusplus + #ifdef __cplusplus extern "C" { #endif // __cplusplus -// This function should be implemented by each target platform, and provide a +// These functions should be implemented by each target platform, and provide a // way for strings to be output to some text stream. For more information, see -// tensorflow/lite/micro/debug_log.cc. -void DebugLog(const char* s); +// the tensorflow/lite/micro/debug_log.cc file. These functions should support +// standard C/C++ stdio style formatting operations. +void DebugLog(const char* format, va_list args); +int DebugVsnprintf(char* buffer, size_t buf_size, const char* format, + va_list vlist); #ifdef __cplusplus } // extern "C" diff --git a/tensorflow/lite/micro/docs/optimized_kernel_implementations.md b/tensorflow/lite/micro/docs/optimized_kernel_implementations.md index 4a5c81abc76..8eefb55069f 100644 --- a/tensorflow/lite/micro/docs/optimized_kernel_implementations.md +++ b/tensorflow/lite/micro/docs/optimized_kernel_implementations.md @@ -169,6 +169,12 @@ support: * Build a static libtensorflow-microlite.a using the TFLM makefile with: `make -f tensorflow/lite/micro/tools/make/Makefile TARGET= OPTIMIZED_KERNEL_DIR= microlite` + * Optionally build for size or speed. Translated to a valid make command it will be any of these two: + `make -f tensorflow/lite/micro/tools/make/Makefile TARGET= + OPTIMIZED_KERNEL_DIR= OPTIMIZE_KERNELS_FOR=KERNELS_OPTIMIZED_FOR_SIZE microlite` + `make -f tensorflow/lite/micro/tools/make/Makefile TARGET= + OPTIMIZED_KERNEL_DIR= OPTIMIZE_KERNELS_FOR=KERNELS_OPTIMIZED_FOR_SPEED microlite` + Check relevant README for given optimization library if this is applicable. * Use the static library and any TFLM headers as part of the overall application (with its own build system). diff --git a/tensorflow/lite/micro/examples/dtln/Makefile.inc b/tensorflow/lite/micro/examples/dtln/Makefile.inc new file mode 100644 index 00000000000..e88cf093d90 --- /dev/null +++ b/tensorflow/lite/micro/examples/dtln/Makefile.inc @@ -0,0 +1,30 @@ + +DTLN_TEST_SRCS := \ +$(TENSORFLOW_ROOT)tensorflow/lite/micro/examples/dtln/dtln_test.cc \ +$(TENSORFLOW_ROOT)tensorflow/lite/micro/examples/dtln/dtln_inout_data.cc + +DTLN_TEST_HDRS := \ +$(TENSORFLOW_ROOT)tensorflow/lite/micro/examples/dtln/dtln_inout_data.h + +DTLN_GENERATOR_INPUTS := \ +$(TENSORFLOW_ROOT)tensorflow/lite/micro/examples/dtln/dtln_noise_suppression.tflite + +DTLN_GENERATED_SRCS := \ +$(GENERATED_SRCS_DIR)$(TENSORFLOW_ROOT)tensorflow/lite/micro/examples/dtln/dtln_noise_suppression_model_data.cc + +DTLN_GENERATED_HDRS := \ +$(GENERATED_SRCS_DIR)$(TENSORFLOW_ROOT)tensorflow/lite/micro/examples/dtln/dtln_noise_suppression_model_data.h + +#Find any platform - specific rules for this example. +include $(wildcard $(TENSORFLOW_ROOT)tensorflow/lite/micro/examples/dtln/*/Makefile.inc) + +# TODO(b/161489252): Disabling warnings for this example until we have a better +# way to build third_party code with a reduced list of CFLAGS. +CCFLAGS := $(filter-out $(CC_WARNINGS),$(CCFLAGS)) + +# Tests loading and running a dtln model. +# TODO(b/319712246): Re-enable tests once the dtln_test works on HiFi Mini & VP6 +ifneq ($(TARGET_ARCH), $(filter $(TARGET_ARCH), hifimini vision_p6)) + $(eval $(call microlite_test,dtln_test,\ + $(DTLN_TEST_SRCS),$(DTLN_TEST_HDRS),$(DTLN_GENERATOR_INPUTS))) +endif diff --git a/tensorflow/lite/micro/examples/dtln/README.md b/tensorflow/lite/micro/examples/dtln/README.md new file mode 100644 index 00000000000..eb96b0c6fdc --- /dev/null +++ b/tensorflow/lite/micro/examples/dtln/README.md @@ -0,0 +1,23 @@ +# DTLN example +The DTLN example is a demonstration of DTLN network running on HiFi DSP for Noise suppression in speech. +It uses feature_data as input and provides noise suppressed speech as output. +It is based on the paper(https://github.com/breizhn/DTLN). +While paper presents 2 parts, one for noise suppression and the other for speech enhancement, +the example presented here follows the noise suppression part only. +The model was re-trained by Cadence using the DNS challenge data (https://github.com/microsoft/DNS-Challenge) +and the noise suppression part was 8-bit quantized. +This example is not to be used to evaluate the network quality or quality of noise suppression, but only as a demonstration as stated above. + +## Run the tests on a development machine + +``` +make -f tensorflow/lite/micro/tools/make/Makefile third_party_downloads +make -f tensorflow/lite/micro/tools/make/Makefile test_dtln_test +``` + +You should see a series of files get compiled, followed by some logging output +from a test, which should conclude with `~~~ALL TESTS PASSED~~~`. If you see +this, it means that a small program has been built and run that loads a trained +TensorFlow model, runs with features data, and got the expected +outputs. This particular test runs with a feature data as input, +and validate the output with golden reference output. diff --git a/tensorflow/lite/micro/examples/dtln/dtln_inout_data.cc b/tensorflow/lite/micro/examples/dtln/dtln_inout_data.cc new file mode 100644 index 00000000000..a4652a73849 --- /dev/null +++ b/tensorflow/lite/micro/examples/dtln/dtln_inout_data.cc @@ -0,0 +1,60 @@ +/* Copyright 2020 The TensorFlow Authors. All Rights Reserved. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +==============================================================================*/ + +#include "tensorflow/lite/c/common.h" + +int8_t feature_data[] = { + -127, -126, -115, -82, -90, -113, -15, 13, -87, -105, -77, -106, + -113, -81, -90, -123, -113, -112, -124, -120, -114, -123, -123, -112, + -115, -125, -120, -120, -126, -126, -126, -127, -128, -127, -128, -128, + -127, -127, -128, -126, -125, -126, -127, -127, -127, -128, -126, -127, + -128, -127, -127, -128, -127, -126, -127, -128, -127, -127, -127, -124, + -124, -127, -126, -126, -127, -126, -124, -125, -128, -126, -125, -127, + -126, -126, -127, -127, -126, -126, -127, -126, -126, -127, -126, -125, + -127, -126, -123, -124, -126, -126, -126, -128, -127, -127, -127, -128, + -127, -127, -128, -128, -128, -128, -128, -127, -128, -128, -127, -128, + -128, -127, -127, -128, -128, -128, -128, -128, -128, -128, -128, -128, + -128, -128, -128, -128, -128, -128, -128, -128, -128, -128, -128, -128, + -128, -128, -128, -128, -128, -128, -128, -128, -128, -128, -128, -128, + -128, -128, -128, -128, -128, -128, -128, -128, -128, -128, -128, -128, + -128, -128, -128, -128, -128, -128, -128, -128, -128, -128, -128, -128, + -128, -128, -128, -128, -128, -128, -128, -127, -127, -127, -127, -127, + -127, -127, -127, -128, -127, -127, -127, -127, -126, -127, -127, -127, + -127, -127, -128, -128, -128, -128, -128, -128, -128, -128, -128, -128, + -128, -128, -128, -128, -128, -128, -128, -128, -128, -128, -128, -128, + -128, -128, -128, -128, -128, -128, -128, -128, -128, -128, -128, -128, + -128, -128, -128, -128, -128, -128, -128, -128, -128, -128, -128, -128, + -128, -127, -128, -128, -127, -127, -128, -128, -128, -128, -128, -128, + -128, -128, -128, -128, -128}; + +int8_t golden_ref[] = { + 119, 82, 72, 116, -5, 10, -2, 0, -2, 36, 116, 125, 123, 124, 126, + 124, 115, 116, 124, 126, 121, 124, 121, 104, 104, 113, 113, 110, 115, 101, + 96, 119, 120, 117, 118, 114, 104, 110, 118, 119, 111, 114, 119, 117, 114, + 110, 117, 112, 115, 120, 119, 118, 119, 116, 117, 120, 121, 121, 121, 119, + 117, 120, 121, 120, 118, 115, 114, 114, 117, 119, 113, 108, 108, 111, 112, + 114, 114, 116, 115, 112, 110, 113, 113, 110, 107, 98, 102, 101, 101, 103, + 92, 98, 101, 102, 102, 101, 104, 102, 101, 101, 100, 102, 98, 104, 100, + 99, 92, 96, 87, 97, 96, 96, 96, 95, 92, 98, 95, 90, 85, 82, + 87, 82, 82, 89, 90, 83, 86, 85, 80, 86, 87, 91, 89, 87, 87, + 85, 82, 74, 80, 80, 72, 79, 74, 79, 82, 83, 77, 85, 71, 76, + 72, 76, 76, 77, 56, 74, 74, 69, 69, 69, 65, 56, 60, 67, 71, + 69, 74, 67, 71, 65, 77, 76, 79, 67, 72, 61, 60, 67, 69, 71, + 77, 63, 63, 60, 63, 71, 80, 80, 74, 76, 67, 74, 63, 67, 69, + 72, 77, 71, 72, 82, 65, 49, 67, 58, 71, 65, 63, 69, 61, 77, + 63, 65, 65, 69, 69, 65, 72, 77, 80, 60, 79, 77, 71, 67, 79, + 69, 67, 65, 74, 69, 71, 67, 76, 77, 77, 77, 83, 67, 65, 79, + 77, 60, 71, 86, 86, 63, 74, 63, 63, 63, 69, 79, 63, 52, 85, + 87, 86}; diff --git a/tensorflow/lite/micro/examples/micro_speech/micro_features/micro_model_settings.cc b/tensorflow/lite/micro/examples/dtln/dtln_inout_data.h similarity index 70% rename from tensorflow/lite/micro/examples/micro_speech/micro_features/micro_model_settings.cc rename to tensorflow/lite/micro/examples/dtln/dtln_inout_data.h index 47d12baf707..c5fde46a085 100644 --- a/tensorflow/lite/micro/examples/micro_speech/micro_features/micro_model_settings.cc +++ b/tensorflow/lite/micro/examples/dtln/dtln_inout_data.h @@ -1,4 +1,4 @@ -/* Copyright 2018 The TensorFlow Authors. All Rights Reserved. +/* Copyright 2020 The TensorFlow Authors. All Rights Reserved. Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. @@ -13,11 +13,7 @@ See the License for the specific language governing permissions and limitations under the License. ==============================================================================*/ -#include "tensorflow/lite/micro/examples/micro_speech/micro_features/micro_model_settings.h" +#include "tensorflow/lite/c/common.h" -const char* kCategoryLabels[kCategoryCount] = { - "silence", - "unknown", - "yes", - "no", -}; +extern int8_t feature_data[]; +extern int8_t golden_ref[]; diff --git a/tensorflow/lite/micro/examples/dtln/dtln_noise_suppression.tflite b/tensorflow/lite/micro/examples/dtln/dtln_noise_suppression.tflite new file mode 100644 index 00000000000..143ef8dce9d Binary files /dev/null and b/tensorflow/lite/micro/examples/dtln/dtln_noise_suppression.tflite differ diff --git a/tensorflow/lite/micro/examples/dtln/dtln_test.cc b/tensorflow/lite/micro/examples/dtln/dtln_test.cc new file mode 100644 index 00000000000..10771954e1f --- /dev/null +++ b/tensorflow/lite/micro/examples/dtln/dtln_test.cc @@ -0,0 +1,100 @@ +/* Copyright 2020 The TensorFlow Authors. All Rights Reserved. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +==============================================================================*/ + +#include "tensorflow/lite/c/common.h" +#include "tensorflow/lite/micro/examples/dtln/dtln_inout_data.h" +#include "tensorflow/lite/micro/examples/dtln/dtln_noise_suppression_model_data.h" +#include "tensorflow/lite/micro/micro_interpreter.h" +#include "tensorflow/lite/micro/micro_log.h" +#include "tensorflow/lite/micro/micro_mutable_op_resolver.h" +#include "tensorflow/lite/micro/testing/micro_test.h" +#include "tensorflow/lite/schema/schema_generated.h" + +TF_LITE_MICRO_TESTS_BEGIN + +MicroPrintf( + "\nThis example demonstrates LSTM layers on HiFi DSP, NOT for evaluating " + "noise suppression quality.\n"); +TF_LITE_MICRO_TEST(TestInvoke) { + // Map the model into a usable data structure. This doesn't involve any + // copying or parsing, it's a very lightweight operation. + const tflite::Model* model = + ::tflite::GetModel(g_dtln_noise_suppression_model_data); + if (model->version() != TFLITE_SCHEMA_VERSION) { + MicroPrintf( + "Model provided is schema version %d not equal " + "to supported version %d.\n", + model->version(), TFLITE_SCHEMA_VERSION); + } + + // Pull in only the operation implementations we need. + // This relies on a complete list of all the ops needed by this graph. + + tflite::MicroMutableOpResolver<3> micro_op_resolver; + micro_op_resolver.AddUnidirectionalSequenceLSTM(); + micro_op_resolver.AddFullyConnected(); + micro_op_resolver.AddLogistic(); + + // Create an area of memory to use for input, output, and intermediate arrays. + constexpr int tensor_arena_size = 16 * 1024; + alignas(16) uint8_t tensor_arena[tensor_arena_size]; + + // Build an interpreter to run the model with. + tflite::MicroInterpreter interpreter(model, micro_op_resolver, tensor_arena, + tensor_arena_size); + interpreter.AllocateTensors(); + + // Get information about the memory area to use for the model's input. + TfLiteTensor* input = interpreter.input(0); + + // Make sure the input has the properties we expect. + TF_LITE_MICRO_EXPECT(input != nullptr); + TF_LITE_MICRO_EXPECT_EQ(3, input->dims->size); + TF_LITE_MICRO_EXPECT_EQ(1, input->dims->data[0]); + TF_LITE_MICRO_EXPECT_EQ(1, input->dims->data[1]); + TF_LITE_MICRO_EXPECT_EQ(257, input->dims->data[2]); + TF_LITE_MICRO_EXPECT_EQ(kTfLiteInt8, input->type); + + // Copy a spectrogram created from a noisy.wav audio file, + // into the memory area used for the input. + for (size_t i = 0; i < input->bytes; ++i) { + input->data.int8[i] = feature_data[i]; + } + + // Run the model on this input and make sure it succeeds. + TfLiteStatus invoke_status = interpreter.Invoke(); + if (invoke_status != kTfLiteOk) { + MicroPrintf("Invoke failed\n"); + } + TF_LITE_MICRO_EXPECT_EQ(kTfLiteOk, invoke_status); + + // Get the output from the model, and make sure it's the expected size and + // type. + TfLiteTensor* output = interpreter.output(0); + TF_LITE_MICRO_EXPECT_EQ(3, output->dims->size); + TF_LITE_MICRO_EXPECT_EQ(1, output->dims->data[0]); + TF_LITE_MICRO_EXPECT_EQ(1, output->dims->data[1]); + TF_LITE_MICRO_EXPECT_EQ(257, output->dims->data[2]); + TF_LITE_MICRO_EXPECT_EQ(kTfLiteInt8, output->type); + + int output_size = + output->dims->data[0] * output->dims->data[1] * output->dims->data[2]; + for (int i = 0; i < output_size; i++) + TF_LITE_MICRO_EXPECT_EQ(output->data.int8[i], golden_ref[i]); + + MicroPrintf("Ran successfully\n"); +} + +TF_LITE_MICRO_TESTS_END diff --git a/tensorflow/lite/micro/examples/hello_world/BUILD b/tensorflow/lite/micro/examples/hello_world/BUILD index f2b41b37518..988b7dd6b62 100644 --- a/tensorflow/lite/micro/examples/hello_world/BUILD +++ b/tensorflow/lite/micro/examples/hello_world/BUILD @@ -1,5 +1,6 @@ # Description: # TensorFlow Lite for Microcontrollers "hello world" example. +load("@rules_python//python:defs.bzl", "py_binary") load("@tflm_pip_deps//:requirements.bzl", "requirement") load( "//tensorflow/lite/micro:build_def.bzl", @@ -53,7 +54,7 @@ py_binary( "@absl_py//absl/flags", "@absl_py//absl/logging", requirement("numpy"), - requirement("tensorflow-cpu"), + requirement("tensorflow"), "//python/tflite_micro:runtime", ], ) @@ -78,6 +79,6 @@ py_binary( srcs_version = "PY3", deps = [ requirement("numpy"), - requirement("tensorflow-cpu"), + requirement("tensorflow"), ], ) diff --git a/tensorflow/lite/micro/examples/hello_world/models/BUILD b/tensorflow/lite/micro/examples/hello_world/models/BUILD index 4f025b03678..4c9441b86fe 100644 --- a/tensorflow/lite/micro/examples/hello_world/models/BUILD +++ b/tensorflow/lite/micro/examples/hello_world/models/BUILD @@ -9,7 +9,10 @@ exports_files( "hello_world_float.tflite", "hello_world_int8.tflite", ], - visibility = ["//tensorflow/lite/micro/examples/hello_world:__subpackages__"], + visibility = [ + "//codegen/examples/hello_world:__subpackages__", + "//tensorflow/lite/micro/examples/hello_world:__subpackages__", + ], ) generate_cc_arrays( diff --git a/tensorflow/lite/micro/examples/hello_world/quantization/BUILD b/tensorflow/lite/micro/examples/hello_world/quantization/BUILD index ecba31638a2..9a204f24ba8 100644 --- a/tensorflow/lite/micro/examples/hello_world/quantization/BUILD +++ b/tensorflow/lite/micro/examples/hello_world/quantization/BUILD @@ -1,3 +1,4 @@ +load("@rules_python//python:defs.bzl", "py_binary") load("@tflm_pip_deps//:requirements.bzl", "requirement") py_binary( @@ -11,7 +12,7 @@ py_binary( "@absl_py//absl/flags", "@absl_py//absl/logging", requirement("numpy"), - requirement("tensorflow-cpu"), + requirement("tensorflow"), "//python/tflite_micro:runtime", ], ) diff --git a/tensorflow/lite/micro/examples/micro_speech/BUILD b/tensorflow/lite/micro/examples/micro_speech/BUILD index 71741f334f7..10d1c1052ec 100644 --- a/tensorflow/lite/micro/examples/micro_speech/BUILD +++ b/tensorflow/lite/micro/examples/micro_speech/BUILD @@ -1,5 +1,7 @@ # Description: # TensorFlow Lite microcontroller example. +load("@rules_python//python:defs.bzl", "py_binary", "py_test") +load("@tflm_pip_deps//:requirements.bzl", "requirement") load("//tensorflow/lite/micro:build_def.bzl", "generate_cc_arrays") package( @@ -9,16 +11,6 @@ package( licenses = ["notice"], ) -cc_library( - name = "simple_model_settings", - srcs = [ - "simple_features/simple_model_settings.cc", - ], - hdrs = [ - "simple_features/simple_model_settings.h", - ], -) - generate_cc_arrays( name = "generated_yes_1000ms_wav_cc", src = "testdata/yes_1000ms.wav", @@ -43,6 +35,30 @@ generate_cc_arrays( out = "testdata/no_1000ms_audio_data.h", ) +generate_cc_arrays( + name = "generated_noise_1000ms_wav_cc", + src = "testdata/noise_1000ms.wav", + out = "testdata/noise_1000ms_audio_data.cc", +) + +generate_cc_arrays( + name = "generated_noise_1000ms_wav_hdr", + src = "testdata/noise_1000ms.wav", + out = "testdata/noise_1000ms_audio_data.h", +) + +generate_cc_arrays( + name = "generated_silence_1000ms_wav_cc", + src = "testdata/silence_1000ms.wav", + out = "testdata/silence_1000ms_audio_data.cc", +) + +generate_cc_arrays( + name = "generated_silence_1000ms_wav_hdr", + src = "testdata/silence_1000ms.wav", + out = "testdata/silence_1000ms_audio_data.h", +) + generate_cc_arrays( name = "generated_yes_30ms_wav_cc", src = "testdata/yes_30ms.wav", @@ -69,14 +85,26 @@ generate_cc_arrays( generate_cc_arrays( name = "generated_micro_speech_model_cc", - src = "micro_speech.tflite", - out = "micro_speech_model_data.cc", + src = "models/micro_speech_quantized.tflite", + out = "models/micro_speech_quantized_model_data.cc", ) generate_cc_arrays( name = "generated_micro_speech_model_hdr", - src = "micro_speech.tflite", - out = "micro_speech_model_data.h", + src = "models/micro_speech_quantized.tflite", + out = "models/micro_speech_quantized_model_data.h", +) + +generate_cc_arrays( + name = "generated_audio_preprocessor_model_cc", + src = "models/audio_preprocessor_int8.tflite", + out = "models/audio_preprocessor_int8_model_data.cc", +) + +generate_cc_arrays( + name = "generated_audio_preprocessor_model_hdr", + src = "models/audio_preprocessor_int8.tflite", + out = "models/audio_preprocessor_int8_model_data.h", ) cc_library( @@ -90,35 +118,17 @@ cc_library( ) cc_library( - name = "simple_features_test_data", + name = "audio_preprocessor_model_data", srcs = [ - "simple_features/no_simple_features_data.cc", - "simple_features/yes_simple_features_data.cc", + ":generated_audio_preprocessor_model_cc", ], hdrs = [ - "simple_features/no_simple_features_data.h", - "simple_features/yes_simple_features_data.h", - ], -) - -cc_test( - name = "micro_speech_test", - srcs = [ - "micro_speech_test.cc", - ], - deps = [ - ":micro_speech_model_data", - "//tensorflow/lite/micro:micro_framework", - "//tensorflow/lite/micro:micro_log", - "//tensorflow/lite/micro:op_resolvers", - "//tensorflow/lite/micro/examples/micro_speech/micro_features:micro_features_test_data", - "//tensorflow/lite/micro/testing:micro_test", - "//tensorflow/lite/schema:schema_fbs", + ":generated_audio_preprocessor_model_hdr", ], ) cc_library( - name = "audio_sample_test_data", + name = "audio_sample_test_data_30ms", srcs = [ ":generated_no_30ms_wav_cc", ":generated_yes_30ms_wav_cc", @@ -130,324 +140,128 @@ cc_library( ) cc_library( - name = "audio_large_sample_test_data", + name = "audio_sample_test_data_1000ms", srcs = [ ":generated_no_1000ms_wav_cc", + ":generated_noise_1000ms_wav_cc", + ":generated_silence_1000ms_wav_cc", ":generated_yes_1000ms_wav_cc", ], hdrs = [ ":generated_no_1000ms_wav_hdr", + ":generated_noise_1000ms_wav_hdr", + ":generated_silence_1000ms_wav_hdr", ":generated_yes_1000ms_wav_hdr", ], ) cc_library( - name = "simple_features_generator_test_data", - srcs = [ - "simple_features/no_power_spectrum_data.cc", - "simple_features/yes_power_spectrum_data.cc", - ], - hdrs = [ - "simple_features/no_power_spectrum_data.h", - "simple_features/yes_power_spectrum_data.h", - ], -) - -cc_library( - name = "simple_features_generator_reference", - srcs = [ - "simple_features/simple_features_generator.cc", - ], + name = "micro_model_settings", hdrs = [ - "simple_features/simple_features_generator.h", - ], - deps = [ - ":simple_model_settings", - "//tensorflow/lite/c:common", - "//tensorflow/lite/micro:micro_log", + "micro_model_settings.h", ], ) cc_test( - name = "simple_features_generator_reference_test", - srcs = [ - "simple_features/simple_features_generator_test.cc", - ], - deps = [ - ":audio_sample_test_data", - ":simple_features_generator_reference", - ":simple_features_generator_test_data", - ":simple_model_settings", - "//tensorflow/lite/c:common", - "//tensorflow/lite/micro:micro_framework", - "//tensorflow/lite/micro:micro_log", - "//tensorflow/lite/micro/testing:micro_test", - ], -) - -cc_library( - name = "simple_features_generator_fixed", - srcs = [ - "simple_features/fixed_point/simple_features_generator.cc", - ], - hdrs = [ - "simple_features/simple_features_generator.h", - ], - deps = [ - ":simple_model_settings", - "//tensorflow/lite/c:common", - "//tensorflow/lite/micro:micro_log", - ], -) - -cc_test( - name = "simple_features_generator_fixed_test", + name = "micro_speech_test", srcs = [ - "simple_features/simple_features_generator_test.cc", + "micro_speech_test.cc", ], deps = [ - ":audio_sample_test_data", - ":simple_features_generator_fixed", - ":simple_features_generator_test_data", - ":simple_model_settings", - "//tensorflow/lite/c:common", + ":audio_preprocessor_model_data", + ":audio_sample_test_data_1000ms", + ":audio_sample_test_data_30ms", + ":micro_model_settings", + ":micro_speech_model_data", "//tensorflow/lite/micro:micro_framework", "//tensorflow/lite/micro:micro_log", + "//tensorflow/lite/micro:op_resolvers", "//tensorflow/lite/micro/testing:micro_test", + "//tensorflow/lite/schema:schema_fbs", ], ) -cc_library( - name = "audio_provider", - srcs = [ - "audio_provider.cc", - ], - hdrs = [ - "audio_provider.h", - ], - deps = [ - "//tensorflow/lite/c:common", - "//tensorflow/lite/micro/examples/micro_speech/micro_features:micro_model_settings", - ], -) - -cc_library( - name = "audio_provider_mock", - srcs = [ - "audio_provider_mock.cc", - ], - hdrs = [ - "audio_provider.h", - ], - deps = [ - ":audio_large_sample_test_data", - "//tensorflow/lite/c:common", - "//tensorflow/lite/micro/examples/micro_speech/micro_features:micro_model_settings", - ], -) - -cc_test( - name = "audio_provider_test", - srcs = [ - "audio_provider_test.cc", - ], - deps = [ - ":audio_provider", - "//tensorflow/lite/c:common", - "//tensorflow/lite/micro:micro_framework", - "//tensorflow/lite/micro/examples/micro_speech/micro_features:micro_model_settings", - "//tensorflow/lite/micro/testing:micro_test", - ], +filegroup( + name = "samples_30ms", + srcs = glob(["testdata/*_30ms.wav"]), ) -cc_test( - name = "audio_provider_mock_test", - srcs = [ - "audio_provider_mock_test.cc", - ], - deps = [ - ":audio_large_sample_test_data", - ":audio_provider_mock", - "//tensorflow/lite/c:common", - "//tensorflow/lite/micro:micro_framework", - "//tensorflow/lite/micro/examples/micro_speech/micro_features:micro_model_settings", - "//tensorflow/lite/micro/testing:micro_test", - ], +filegroup( + name = "samples_1000ms", + srcs = glob(["testdata/*_1000ms.wav"]), ) -cc_library( - name = "feature_provider", - srcs = [ - "feature_provider.cc", - ], - hdrs = [ - "feature_provider.h", - ], - deps = [ - ":audio_provider", - "//tensorflow/lite/c:common", - "//tensorflow/lite/micro:micro_log", - "//tensorflow/lite/micro/examples/micro_speech/micro_features:micro_features_generator", - "//tensorflow/lite/micro/examples/micro_speech/micro_features:micro_model_settings", - ], +filegroup( + name = "models_tflite", + srcs = glob(["models/*.tflite"]), ) -cc_test( - name = "feature_provider_test", - srcs = [ - "feature_provider_test.cc", +py_binary( + name = "audio_preprocessor", + srcs = ["audio_preprocessor.py"], + data = [ + ":samples_30ms", ], + python_version = "PY3", + srcs_version = "PY3", deps = [ - ":audio_provider", - ":feature_provider", - "//tensorflow/lite/c:common", - "//tensorflow/lite/micro:micro_framework", - "//tensorflow/lite/micro/examples/micro_speech/micro_features:micro_model_settings", - "//tensorflow/lite/micro/testing:micro_test", + "@absl_py//absl:app", + "@absl_py//absl/flags", + requirement("numpy"), + requirement("tensorflow"), + "//python/tflite_micro:runtime", + "//python/tflite_micro/signal:ops", + "//python/tflite_micro/signal/utils:util", ], ) -cc_library( - name = "feature_provider_mock", - srcs = [ - "feature_provider.cc", - ], - hdrs = [ - "feature_provider.h", +py_binary( + name = "evaluate", + srcs = ["evaluate.py"], + data = [ + ":models_tflite", ], + python_version = "PY3", + srcs_version = "PY3", deps = [ - ":audio_provider_mock", - "//tensorflow/lite/c:common", - "//tensorflow/lite/micro:micro_log", - "//tensorflow/lite/micro/examples/micro_speech/micro_features:micro_features_generator", - "//tensorflow/lite/micro/examples/micro_speech/micro_features:micro_model_settings", + ":audio_preprocessor", ], ) -cc_test( - name = "feature_provider_mock_test", - size = "small", - srcs = [ - "feature_provider_mock_test.cc", +py_test( + name = "evaluate_test", + srcs = ["evaluate_test.py"], + data = [ + ":models_tflite", + ":samples_1000ms", ], + main = "evaluate_test.py", + python_version = "PY3", tags = [ - "noasan", # TODO(b/179930607): Fix with asan. + "noasan", + "nomsan", # Python doesn't like these symbols + "noubsan", ], deps = [ - ":feature_provider_mock", - "//tensorflow/lite/c:common", - "//tensorflow/lite/micro:micro_framework", - "//tensorflow/lite/micro/examples/micro_speech/micro_features:micro_features_test_data", - "//tensorflow/lite/micro/examples/micro_speech/micro_features:micro_model_settings", - "//tensorflow/lite/micro/testing:micro_test", + ":evaluate", ], ) -cc_library( - name = "recognize_commands", - srcs = [ - "recognize_commands.cc", - ], - hdrs = [ - "recognize_commands.h", - ], - deps = [ - "//tensorflow/lite/c:common", - "//tensorflow/lite/micro:micro_log", - "//tensorflow/lite/micro/examples/micro_speech/micro_features:micro_model_settings", - ], -) - -cc_test( - name = "recognize_commands_test", - srcs = [ - "recognize_commands_test.cc", +py_test( + name = "audio_preprocessor_test", + srcs = ["audio_preprocessor_test.py"], + data = [ + ":models_tflite", + ":samples_30ms", ], + main = "audio_preprocessor_test.py", + python_version = "PY3", tags = [ - "no_oss", # TODO(122853023): Resolve issues and re-enable. - ], - deps = [ - ":recognize_commands", - "//tensorflow/lite/c:common", - "//tensorflow/lite/micro:micro_framework", - "//tensorflow/lite/micro:micro_log", - "//tensorflow/lite/micro:test_helpers", - "//tensorflow/lite/micro/testing:micro_test", - ], -) - -cc_library( - name = "command_responder", - srcs = [ - "command_responder.cc", - ], - hdrs = [ - "command_responder.h", - ], - deps = [ - "//tensorflow/lite/c:common", - "//tensorflow/lite/micro:micro_log", - ], -) - -cc_test( - name = "command_responder_test", - srcs = [ - "command_responder_test.cc", - ], - deps = [ - ":command_responder", - "//tensorflow/lite/c:common", - "//tensorflow/lite/micro:micro_framework", - "//tensorflow/lite/micro/testing:micro_test", - ], -) - -cc_binary( - name = "micro_speech", - srcs = [ - "main.cc", - "main_functions.cc", - "main_functions.h", + "noasan", + "nomsan", # Python doesn't like these symbols + "noubsan", ], deps = [ - ":audio_provider", - ":command_responder", - ":feature_provider", - ":micro_speech_model_data", - ":recognize_commands", - "//tensorflow/lite/micro:micro_framework", - "//tensorflow/lite/micro:micro_log", - "//tensorflow/lite/micro:op_resolvers", - "//tensorflow/lite/micro:system_setup", - "//tensorflow/lite/micro/examples/micro_speech/micro_features:micro_model_settings", - "//tensorflow/lite/schema:schema_fbs", + ":audio_preprocessor", ], ) - -cc_binary( - name = "micro_speech_mock", - srcs = [ - "main.cc", - "main_functions.cc", - "main_functions.h", - ], - deps = [ - ":audio_provider_mock", - ":command_responder", - ":feature_provider", - ":micro_speech_model_data", - ":recognize_commands", - "//tensorflow/lite/micro:micro_framework", - "//tensorflow/lite/micro:op_resolvers", - "//tensorflow/lite/micro:system_setup", - "//tensorflow/lite/micro/examples/micro_speech/micro_features:micro_model_settings", - "//tensorflow/lite/schema:schema_fbs", - ], -) - -sh_test( - name = "micro_speech_binary_mock_test", - srcs = ["micro_speech_binary_mock_test.sh"], - data = [":micro_speech_mock"], -) diff --git a/tensorflow/lite/micro/examples/micro_speech/Makefile.inc b/tensorflow/lite/micro/examples/micro_speech/Makefile.inc index d2ceab5e370..a1b5b565cf5 100644 --- a/tensorflow/lite/micro/examples/micro_speech/Makefile.inc +++ b/tensorflow/lite/micro/examples/micro_speech/Makefile.inc @@ -1,299 +1,64 @@ -INCLUDES += \ - -I$(MAKEFILE_DIR)/downloads/kissfft - -KISSFFT_LIB_SRCS := - -KISSFFT_LIB_HDRS := \ -$(MAKEFILE_DIR)/downloads/kissfft/COPYING \ -$(MAKEFILE_DIR)/downloads/kissfft/kiss_fft.c \ -$(MAKEFILE_DIR)/downloads/kissfft/kiss_fft.h \ -$(MAKEFILE_DIR)/downloads/kissfft/_kiss_fft_guts.h \ -$(MAKEFILE_DIR)/downloads/kissfft/tools/kiss_fftr.c \ -$(MAKEFILE_DIR)/downloads/kissfft/tools/kiss_fftr.h - MICRO_SPEECH_TEST_SRCS := \ -$(TENSORFLOW_ROOT)tensorflow/lite/micro/examples/micro_speech/micro_speech_test.cc \ -$(TENSORFLOW_ROOT)tensorflow/lite/micro/examples/micro_speech/micro_features/no_micro_features_data.cc \ -$(TENSORFLOW_ROOT)tensorflow/lite/micro/examples/micro_speech/micro_features/yes_micro_features_data.cc +$(TENSORFLOW_ROOT)tensorflow/lite/micro/examples/micro_speech/micro_speech_test.cc MICRO_SPEECH_TEST_HDRS := \ -$(TENSORFLOW_ROOT)tensorflow/lite/micro/examples/micro_speech/micro_features/no_micro_features_data.h \ -$(TENSORFLOW_ROOT)tensorflow/lite/micro/examples/micro_speech/micro_features/yes_micro_features_data.h \ - -SIMPLE_FEATURES_GENERATOR_TEST_SRCS := \ -$(TENSORFLOW_ROOT)tensorflow/lite/micro/examples/micro_speech/simple_features/simple_features_generator.cc \ -$(TENSORFLOW_ROOT)tensorflow/lite/micro/examples/micro_speech/simple_features/simple_features_generator_test.cc \ -$(TENSORFLOW_ROOT)tensorflow/lite/micro/examples/micro_speech/simple_features/no_power_spectrum_data.cc \ -$(TENSORFLOW_ROOT)tensorflow/lite/micro/examples/micro_speech/simple_features/yes_power_spectrum_data.cc - -SIMPLE_FEATURES_GENERATOR_TEST_HDRS := \ -$(TENSORFLOW_ROOT)tensorflow/lite/micro/examples/micro_speech/simple_features/simple_model_settings.h \ -$(TENSORFLOW_ROOT)tensorflow/lite/micro/examples/micro_speech/simple_features/simple_features_generator.h \ -$(TENSORFLOW_ROOT)tensorflow/lite/micro/examples/micro_speech/simple_features/no_power_spectrum_data.h \ -$(TENSORFLOW_ROOT)tensorflow/lite/micro/examples/micro_speech/simple_features/yes_power_spectrum_data.h - -MICRO_FEATURES_LIB_SRCS := \ -$(TENSORFLOW_ROOT)tensorflow/lite/experimental/microfrontend/lib/fft.cc \ -$(TENSORFLOW_ROOT)tensorflow/lite/experimental/microfrontend/lib/fft_util.cc \ -$(TENSORFLOW_ROOT)tensorflow/lite/experimental/microfrontend/lib/filterbank.c \ -$(TENSORFLOW_ROOT)tensorflow/lite/experimental/microfrontend/lib/filterbank_util.c \ -$(TENSORFLOW_ROOT)tensorflow/lite/experimental/microfrontend/lib/frontend.c \ -$(TENSORFLOW_ROOT)tensorflow/lite/experimental/microfrontend/lib/frontend_util.c \ -$(TENSORFLOW_ROOT)tensorflow/lite/experimental/microfrontend/lib/kiss_fft_int16.cc \ -$(TENSORFLOW_ROOT)tensorflow/lite/experimental/microfrontend/lib/log_lut.c \ -$(TENSORFLOW_ROOT)tensorflow/lite/experimental/microfrontend/lib/log_scale.c \ -$(TENSORFLOW_ROOT)tensorflow/lite/experimental/microfrontend/lib/log_scale_util.c \ -$(TENSORFLOW_ROOT)tensorflow/lite/experimental/microfrontend/lib/noise_reduction.c \ -$(TENSORFLOW_ROOT)tensorflow/lite/experimental/microfrontend/lib/noise_reduction_util.c \ -$(TENSORFLOW_ROOT)tensorflow/lite/experimental/microfrontend/lib/pcan_gain_control.c \ -$(TENSORFLOW_ROOT)tensorflow/lite/experimental/microfrontend/lib/pcan_gain_control_util.c \ -$(TENSORFLOW_ROOT)tensorflow/lite/experimental/microfrontend/lib/window.c \ -$(TENSORFLOW_ROOT)tensorflow/lite/experimental/microfrontend/lib/window_util.c \ -$(KISSFFT_LIB_SRCS) - -MICRO_FEATURES_LIB_HDRS := \ -$(TENSORFLOW_ROOT)tensorflow/lite/experimental/microfrontend/lib/bits.h \ -$(TENSORFLOW_ROOT)tensorflow/lite/experimental/microfrontend/lib/fft.h \ -$(TENSORFLOW_ROOT)tensorflow/lite/experimental/microfrontend/lib/fft_util.h \ -$(TENSORFLOW_ROOT)tensorflow/lite/experimental/microfrontend/lib/filterbank.h \ -$(TENSORFLOW_ROOT)tensorflow/lite/experimental/microfrontend/lib/filterbank_util.h \ -$(TENSORFLOW_ROOT)tensorflow/lite/experimental/microfrontend/lib/frontend.h \ -$(TENSORFLOW_ROOT)tensorflow/lite/experimental/microfrontend/lib/frontend_util.h \ -$(TENSORFLOW_ROOT)tensorflow/lite/experimental/microfrontend/lib/kiss_fft_common.h \ -$(TENSORFLOW_ROOT)tensorflow/lite/experimental/microfrontend/lib/kiss_fft_int16.h \ -$(TENSORFLOW_ROOT)tensorflow/lite/experimental/microfrontend/lib/log_lut.h \ -$(TENSORFLOW_ROOT)tensorflow/lite/experimental/microfrontend/lib/log_scale.h \ -$(TENSORFLOW_ROOT)tensorflow/lite/experimental/microfrontend/lib/log_scale_util.h \ -$(TENSORFLOW_ROOT)tensorflow/lite/experimental/microfrontend/lib/noise_reduction.h \ -$(TENSORFLOW_ROOT)tensorflow/lite/experimental/microfrontend/lib/noise_reduction_util.h \ -$(TENSORFLOW_ROOT)tensorflow/lite/experimental/microfrontend/lib/pcan_gain_control.h \ -$(TENSORFLOW_ROOT)tensorflow/lite/experimental/microfrontend/lib/pcan_gain_control_util.h \ -$(TENSORFLOW_ROOT)tensorflow/lite/experimental/microfrontend/lib/window.h \ -$(TENSORFLOW_ROOT)tensorflow/lite/experimental/microfrontend/lib/window_util.h \ -$(KISSFFT_LIB_HDRS) - -MICRO_FEATURES_GENERATOR_SRCS := \ -$(TENSORFLOW_ROOT)tensorflow/lite/micro/examples/micro_speech/micro_features/micro_features_generator.cc \ -$(TENSORFLOW_ROOT)tensorflow/lite/micro/examples/micro_speech/micro_features/micro_model_settings.cc \ -$(MICRO_FEATURES_LIB_SRCS) - -MICRO_FEATURES_GENERATOR_HDRS := \ -$(TENSORFLOW_ROOT)tensorflow/lite/micro/examples/micro_speech/micro_features/micro_features_generator.h \ -$(TENSORFLOW_ROOT)tensorflow/lite/micro/examples/micro_speech/micro_features/micro_model_settings.h \ -$(MICRO_FEATURES_LIB_HDRS) - -MICRO_FEATURES_GENERATOR_TEST_SRCS := \ -$(TENSORFLOW_ROOT)tensorflow/lite/micro/examples/micro_speech/micro_features/micro_features_generator_test.cc \ -$(TENSORFLOW_ROOT)tensorflow/lite/micro/examples/micro_speech/micro_features/no_feature_data_slice.cc \ -$(TENSORFLOW_ROOT)tensorflow/lite/micro/examples/micro_speech/micro_features/yes_feature_data_slice.cc \ -$(MICRO_FEATURES_GENERATOR_SRCS) - -MICRO_FEATURES_GENERATOR_TEST_HDRS := \ -$(TENSORFLOW_ROOT)tensorflow/lite/micro/examples/micro_speech/micro_features/no_feature_data_slice.h \ -$(TENSORFLOW_ROOT)tensorflow/lite/micro/examples/micro_speech/micro_features/yes_feature_data_slice.h \ -$(MICRO_FEATURES_GENERATOR_HDRS) - -AUDIO_PROVIDER_TEST_SRCS := \ -$(TENSORFLOW_ROOT)tensorflow/lite/micro/examples/micro_speech/audio_provider_test.cc \ -$(TENSORFLOW_ROOT)tensorflow/lite/micro/examples/micro_speech/micro_features/micro_model_settings.cc \ -$(TENSORFLOW_ROOT)tensorflow/lite/micro/examples/micro_speech/audio_provider.cc - -AUDIO_PROVIDER_TEST_HDRS := \ -$(TENSORFLOW_ROOT)tensorflow/lite/micro/examples/micro_speech/micro_features/micro_model_settings.h \ -$(TENSORFLOW_ROOT)tensorflow/lite/micro/examples/micro_speech/audio_provider.h \ - -AUDIO_PROVIDER_MOCK_TEST_SRCS := \ -$(TENSORFLOW_ROOT)tensorflow/lite/micro/examples/micro_speech/audio_provider_mock_test.cc \ -$(TENSORFLOW_ROOT)tensorflow/lite/micro/examples/micro_speech/micro_features/micro_model_settings.cc \ -$(TENSORFLOW_ROOT)tensorflow/lite/micro/examples/micro_speech/audio_provider_mock.cc - -AUDIO_PROVIDER_MOCK_TEST_HDRS := \ -$(TENSORFLOW_ROOT)tensorflow/lite/micro/examples/micro_speech/micro_features/micro_model_settings.h \ -$(TENSORFLOW_ROOT)tensorflow/lite/micro/examples/micro_speech/audio_provider.h \ - -FEATURE_PROVIDER_TEST_SRCS := \ -$(TENSORFLOW_ROOT)tensorflow/lite/micro/examples/micro_speech/feature_provider_test.cc \ -$(TENSORFLOW_ROOT)tensorflow/lite/micro/examples/micro_speech/audio_provider.cc \ -$(TENSORFLOW_ROOT)tensorflow/lite/micro/examples/micro_speech/feature_provider.cc \ -$(MICRO_FEATURES_GENERATOR_SRCS) - -FEATURE_PROVIDER_TEST_HDRS := \ -$(TENSORFLOW_ROOT)tensorflow/lite/micro/examples/micro_speech/audio_provider.h \ -$(TENSORFLOW_ROOT)tensorflow/lite/micro/examples/micro_speech/feature_provider.h \ -$(MICRO_FEATURES_GENERATOR_HDRS) - -FEATURE_PROVIDER_MOCK_TEST_SRCS := \ -$(TENSORFLOW_ROOT)tensorflow/lite/micro/examples/micro_speech/feature_provider_test.cc \ -$(TENSORFLOW_ROOT)tensorflow/lite/micro/examples/micro_speech/audio_provider_mock.cc \ -$(TENSORFLOW_ROOT)tensorflow/lite/micro/examples/micro_speech/feature_provider.cc \ -$(TENSORFLOW_ROOT)tensorflow/lite/micro/examples/micro_speech/micro_features/no_micro_features_data.cc \ -$(TENSORFLOW_ROOT)tensorflow/lite/micro/examples/micro_speech/micro_features/yes_micro_features_data.cc \ -$(MICRO_FEATURES_GENERATOR_SRCS) - -FEATURE_PROVIDER_MOCK_TEST_HDRS := \ -$(TENSORFLOW_ROOT)tensorflow/lite/micro/examples/micro_speech/audio_provider.h \ -$(TENSORFLOW_ROOT)tensorflow/lite/micro/examples/micro_speech/feature_provider.h \ -$(TENSORFLOW_ROOT)tensorflow/lite/micro/examples/micro_speech/micro_features/no_micro_features_data.h \ -$(TENSORFLOW_ROOT)tensorflow/lite/micro/examples/micro_speech/micro_features/yes_micro_features_data.h \ -$(MICRO_FEATURES_GENERATOR_HDRS) - -RECOGNIZE_COMMANDS_TEST_SRCS := \ -$(TENSORFLOW_ROOT)tensorflow/lite/micro/examples/micro_speech/recognize_commands_test.cc \ -$(TENSORFLOW_ROOT)tensorflow/lite/micro/examples/micro_speech/micro_features/micro_model_settings.cc \ -$(TENSORFLOW_ROOT)tensorflow/lite/micro/examples/micro_speech/recognize_commands.cc - -RECOGNIZE_COMMANDS_TEST_HDRS := \ -$(TENSORFLOW_ROOT)tensorflow/lite/micro/examples/micro_speech/micro_features/micro_model_settings.h \ -$(TENSORFLOW_ROOT)tensorflow/lite/micro/examples/micro_speech/recognize_commands.h - -COMMAND_RESPONDER_TEST_SRCS := \ -$(TENSORFLOW_ROOT)tensorflow/lite/micro/examples/micro_speech/command_responder_test.cc \ -$(TENSORFLOW_ROOT)tensorflow/lite/micro/examples/micro_speech/command_responder.cc +$(TENSORFLOW_ROOT)tensorflow/lite/micro/examples/micro_speech/micro_model_settings.h \ +$(TENSORFLOW_ROOT)tensorflow/lite/micro/testing/micro_test.h -COMMAND_RESPONDER_TEST_HDRS := \ -$(TENSORFLOW_ROOT)tensorflow/lite/micro/examples/micro_speech/command_responder.h +MICRO_SPEECH_SRCS := $(MICRO_SPEECH_TEST_SRCS) -MICRO_SPEECH_SRCS := \ -$(TENSORFLOW_ROOT)tensorflow/lite/micro/examples/micro_speech/main.cc \ -$(TENSORFLOW_ROOT)tensorflow/lite/micro/examples/micro_speech/main_functions.cc \ -$(TENSORFLOW_ROOT)tensorflow/lite/micro/examples/micro_speech/audio_provider.cc \ -$(TENSORFLOW_ROOT)tensorflow/lite/micro/examples/micro_speech/feature_provider.cc \ -$(TENSORFLOW_ROOT)tensorflow/lite/micro/examples/micro_speech/micro_features/no_micro_features_data.cc \ -$(TENSORFLOW_ROOT)tensorflow/lite/micro/examples/micro_speech/micro_features/yes_micro_features_data.cc \ -$(TENSORFLOW_ROOT)tensorflow/lite/micro/examples/micro_speech/recognize_commands.cc \ -$(TENSORFLOW_ROOT)tensorflow/lite/micro/examples/micro_speech/command_responder.cc \ -$(MICRO_FEATURES_GENERATOR_SRCS) - -MICRO_SPEECH_HDRS := \ -$(TENSORFLOW_ROOT)tensorflow/lite/micro/examples/micro_speech/audio_provider.h \ -$(TENSORFLOW_ROOT)tensorflow/lite/micro/examples/micro_speech/feature_provider.h \ -$(TENSORFLOW_ROOT)tensorflow/lite/micro/examples/micro_speech/micro_features/no_micro_features_data.h \ -$(TENSORFLOW_ROOT)tensorflow/lite/micro/examples/micro_speech/micro_features/yes_micro_features_data.h \ -$(TENSORFLOW_ROOT)tensorflow/lite/micro/examples/micro_speech/recognize_commands.h \ -$(TENSORFLOW_ROOT)tensorflow/lite/micro/examples/micro_speech/command_responder.h \ -$(TENSORFLOW_ROOT)tensorflow/lite/micro/examples/micro_speech/main_functions.h \ -$(MICRO_FEATURES_GENERATOR_HDRS) - -MICRO_SPEECH_MOCK_SRCS := \ -$(TENSORFLOW_ROOT)tensorflow/lite/micro/examples/micro_speech/main.cc \ -$(TENSORFLOW_ROOT)tensorflow/lite/micro/examples/micro_speech/main_functions.cc \ -$(TENSORFLOW_ROOT)tensorflow/lite/micro/examples/micro_speech/audio_provider_mock.cc \ -$(TENSORFLOW_ROOT)tensorflow/lite/micro/examples/micro_speech/feature_provider.cc \ -$(TENSORFLOW_ROOT)tensorflow/lite/micro/examples/micro_speech/micro_features/no_micro_features_data.cc \ -$(TENSORFLOW_ROOT)tensorflow/lite/micro/examples/micro_speech/micro_features/yes_micro_features_data.cc \ -$(TENSORFLOW_ROOT)tensorflow/lite/micro/examples/micro_speech/recognize_commands.cc \ -$(TENSORFLOW_ROOT)tensorflow/lite/micro/examples/micro_speech/command_responder.cc \ -$(MICRO_FEATURES_GENERATOR_SRCS) - -MICRO_SPEECH_MOCK_HDRS := \ -$(TENSORFLOW_ROOT)tensorflow/lite/micro/examples/micro_speech/audio_provider.h \ -$(TENSORFLOW_ROOT)tensorflow/lite/micro/examples/micro_speech/feature_provider.h \ -$(TENSORFLOW_ROOT)tensorflow/lite/micro/examples/micro_speech/micro_features/micro_features_generator.h \ -$(TENSORFLOW_ROOT)tensorflow/lite/micro/examples/micro_speech/micro_features/no_micro_features_data.h \ -$(TENSORFLOW_ROOT)tensorflow/lite/micro/examples/micro_speech/micro_features/yes_micro_features_data.h \ -$(TENSORFLOW_ROOT)tensorflow/lite/micro/examples/micro_speech/recognize_commands.h \ -$(TENSORFLOW_ROOT)tensorflow/lite/micro/examples/micro_speech/command_responder.h \ -$(TENSORFLOW_ROOT)tensorflow/lite/micro/examples/micro_speech/main_functions.h \ -$(MICRO_FEATURES_GENERATOR_HDRS) +MICRO_SPEECH_HDRS := $(MICRO_SPEECH_TEST_HDRS) MICRO_SPEECH_GENERATOR_INPUTS := \ -$(TENSORFLOW_ROOT)tensorflow/lite/micro/examples/micro_speech/micro_speech.tflite \ +$(TENSORFLOW_ROOT)tensorflow/lite/micro/examples/micro_speech/models/micro_speech_quantized.tflite \ +$(TENSORFLOW_ROOT)tensorflow/lite/micro/examples/micro_speech/models/audio_preprocessor_int8.tflite \ $(TENSORFLOW_ROOT)tensorflow/lite/micro/examples/micro_speech/testdata/no_1000ms.wav \ -$(TENSORFLOW_ROOT)tensorflow/lite/micro/examples/micro_speech/testdata/no_30ms.wav \ $(TENSORFLOW_ROOT)tensorflow/lite/micro/examples/micro_speech/testdata/yes_1000ms.wav \ +$(TENSORFLOW_ROOT)tensorflow/lite/micro/examples/micro_speech/testdata/silence_1000ms.wav \ +$(TENSORFLOW_ROOT)tensorflow/lite/micro/examples/micro_speech/testdata/noise_1000ms.wav \ +$(TENSORFLOW_ROOT)tensorflow/lite/micro/examples/micro_speech/testdata/no_30ms.wav \ $(TENSORFLOW_ROOT)tensorflow/lite/micro/examples/micro_speech/testdata/yes_30ms.wav MICRO_SPEECH_GENERATED_SRCS := \ -$(GENERATED_SRCS_DIR)$(TENSORFLOW_ROOT)tensorflow/lite/micro/examples/micro_speech/micro_speech_model_data.cc \ +$(GENERATED_SRCS_DIR)$(TENSORFLOW_ROOT)tensorflow/lite/micro/examples/micro_speech/models/micro_speech_quantized_model_data.cc \ +$(GENERATED_SRCS_DIR)$(TENSORFLOW_ROOT)tensorflow/lite/micro/examples/micro_speech/models/audio_preprocessor_int8_model_data.cc \ $(GENERATED_SRCS_DIR)$(TENSORFLOW_ROOT)tensorflow/lite/micro/examples/micro_speech/testdata/no_1000ms_audio_data.cc \ -$(GENERATED_SRCS_DIR)$(TENSORFLOW_ROOT)tensorflow/lite/micro/examples/micro_speech/testdata/no_30ms_audio_data.cc \ $(GENERATED_SRCS_DIR)$(TENSORFLOW_ROOT)tensorflow/lite/micro/examples/micro_speech/testdata/yes_1000ms_audio_data.cc \ +$(GENERATED_SRCS_DIR)$(TENSORFLOW_ROOT)tensorflow/lite/micro/examples/micro_speech/testdata/silence_1000ms_audio_data.cc \ +$(GENERATED_SRCS_DIR)$(TENSORFLOW_ROOT)tensorflow/lite/micro/examples/micro_speech/testdata/noise_1000ms_audio_data.cc \ +$(GENERATED_SRCS_DIR)$(TENSORFLOW_ROOT)tensorflow/lite/micro/examples/micro_speech/testdata/no_30ms_audio_data.cc \ $(GENERATED_SRCS_DIR)$(TENSORFLOW_ROOT)tensorflow/lite/micro/examples/micro_speech/testdata/yes_30ms_audio_data.cc MICRO_SPEECH_GENERATED_HDRS := \ -$(GENERATED_SRCS_DIR)$(TENSORFLOW_ROOT)tensorflow/lite/micro/examples/micro_speech/micro_speech_model_data.h \ +$(GENERATED_SRCS_DIR)$(TENSORFLOW_ROOT)tensorflow/lite/micro/examples/micro_speech/models/micro_speech_quantized_model_data.h \ +$(GENERATED_SRCS_DIR)$(TENSORFLOW_ROOT)tensorflow/lite/micro/examples/micro_speech/models/audio_preprocessor_int8_model_data.h \ $(GENERATED_SRCS_DIR)$(TENSORFLOW_ROOT)tensorflow/lite/micro/examples/micro_speech/testdata/no_1000ms_audio_data.h \ -$(GENERATED_SRCS_DIR)$(TENSORFLOW_ROOT)tensorflow/lite/micro/examples/micro_speech/testdata/no_30ms_audio_data.h \ $(GENERATED_SRCS_DIR)$(TENSORFLOW_ROOT)tensorflow/lite/micro/examples/micro_speech/testdata/yes_1000ms_audio_data.h \ +$(GENERATED_SRCS_DIR)$(TENSORFLOW_ROOT)tensorflow/lite/micro/examples/micro_speech/testdata/silence_1000ms_audio_data.h \ +$(GENERATED_SRCS_DIR)$(TENSORFLOW_ROOT)tensorflow/lite/micro/examples/micro_speech/testdata/noise_1000ms_audio_data.h \ +$(GENERATED_SRCS_DIR)$(TENSORFLOW_ROOT)tensorflow/lite/micro/examples/micro_speech/testdata/no_30ms_audio_data.h \ $(GENERATED_SRCS_DIR)$(TENSORFLOW_ROOT)tensorflow/lite/micro/examples/micro_speech/testdata/yes_30ms_audio_data.h -#Find any platform - specific rules for this example. -include $(wildcard $(TENSORFLOW_ROOT)tensorflow/lite/micro/examples/micro_speech/*/Makefile.inc) - # TODO(b/161489252): Disabling warnings for this example until we have a better # way to build third_party code with a reduced list of CFLAGS. +# Xtensa hifi5/hifi4 xa_nnlib CCFLAGS := $(filter-out $(CC_WARNINGS),$(CCFLAGS)) -# Test the code for feature generation. -ifneq ($(TARGET_ARCH), $(filter $(TARGET_ARCH), hifi5 hifi3z)) - $(eval $(call microlite_test,micro_features_generator_test,\ - $(MICRO_FEATURES_GENERATOR_TEST_SRCS),$(MICRO_FEATURES_GENERATOR_TEST_HDRS),$(MICRO_SPEECH_GENERATOR_INPUTS))) -endif - # Tests loading and running a speech model. $(eval $(call microlite_test,micro_speech_test,\ $(MICRO_SPEECH_TEST_SRCS),$(MICRO_SPEECH_TEST_HDRS),$(MICRO_SPEECH_GENERATOR_INPUTS))) -# TODO(b/268568089): This test is taking very long time to finish; causing the -# CI to run for a long time to finish. -ifneq ($(TARGET_ARCH), $(filter $(TARGET_ARCH), hifimini hifi5 hifi3z)) - # Test the code for feature generation. - $(eval $(call microlite_test,simple_features_generator_test,\ - $(SIMPLE_FEATURES_GENERATOR_TEST_SRCS),$(SIMPLE_FEATURES_GENERATOR_TEST_HDRS),$(MICRO_SPEECH_GENERATOR_INPUTS))) -endif - -# Tests the audio provider module. -$(eval $(call microlite_test,audio_provider_test,\ -$(AUDIO_PROVIDER_TEST_SRCS),$(AUDIO_PROVIDER_TEST_HDRS))) - -# Tests the audio provider mock module. -$(eval $(call microlite_test,audio_provider_mock_test,\ -$(AUDIO_PROVIDER_MOCK_TEST_SRCS),$(AUDIO_PROVIDER_MOCK_TEST_HDRS),$(MICRO_SPEECH_GENERATOR_INPUTS))) - -# Tests the feature provider module. -ifneq ($(TARGET_ARCH), hifi3z) - $(eval $(call microlite_test,feature_provider_test,\ - $(FEATURE_PROVIDER_TEST_SRCS),$(FEATURE_PROVIDER_TEST_HDRS))) -endif - -# Tests the feature provider module using the mock audio provider. -ifneq ($(TARGET_ARCH), hifi3z) - $(eval $(call microlite_test,feature_provider_mock_test,\ - $(FEATURE_PROVIDER_MOCK_TEST_SRCS),$(FEATURE_PROVIDER_MOCK_TEST_HDRS),$(MICRO_SPEECH_GENERATOR_INPUTS))) -endif - -# Tests the command recognizer module. -$(eval $(call microlite_test,recognize_commands_test,\ -$(RECOGNIZE_COMMANDS_TEST_SRCS),$(RECOGNIZE_COMMANDS_TEST_HDRS))) - -# Tests responding to a command. -$(eval $(call microlite_test,command_responder_test,\ -$(COMMAND_RESPONDER_TEST_SRCS),$(COMMAND_RESPONDER_TEST_HDRS))) - -# Builds a standalone speech command recognizer binary. +# Builds a standalone binary. $(eval $(call microlite_test,micro_speech,\ $(MICRO_SPEECH_SRCS),$(MICRO_SPEECH_HDRS),$(MICRO_SPEECH_GENERATOR_INPUTS))) -# Builds a standalone speech command recognizer binary using fake audio input. -$(eval $(call microlite_test,micro_speech_mock,\ -$(MICRO_SPEECH_MOCK_SRCS),$(MICRO_SPEECH_MOCK_HDRS),$(MICRO_SPEECH_GENERATOR_INPUTS))) - # Add sources and headers generated from $(MICRO_SPEECH_GENERATOR_INPUTS). MICRO_SPEECH_SRCS += $(MICRO_SPEECH_GENERATED_SRCS) MICRO_SPEECH_HDRS += $(MICRO_SPEECH_GENERATED_HDRS) -MICRO_SPEECH_MOCK_SRCS += $(MICRO_SPEECH_GENERATED_SRCS) -MICRO_SPEECH_MOCK_HDRS += $(MICRO_SPEECH_GENERATED_HDRS) - list_micro_speech_example_sources: @echo $(MICRO_SPEECH_SRCS) list_micro_speech_example_headers: @echo $(MICRO_SPEECH_HDRS) - -list_micro_speech_mock_example_sources: - @echo $(MICRO_SPEECH_MOCK_SRCS) - -list_micro_speech_mock_example_headers: - @echo $(MICRO_SPEECH_MOCK_HDRS) diff --git a/tensorflow/lite/micro/examples/micro_speech/README.md b/tensorflow/lite/micro/examples/micro_speech/README.md index 8a4aa77d20a..84aaec13f1a 100644 --- a/tensorflow/lite/micro/examples/micro_speech/README.md +++ b/tensorflow/lite/micro/examples/micro_speech/README.md @@ -2,321 +2,275 @@ # Micro Speech Example -This example shows how to run a 20 kB model that can recognize 2 keywords, -"yes" and "no", from speech data. +This example shows how to run inference using TensorFlow Lite Micro (TFLM) +on two models for wake-word recognition. +The first model is an audio preprocessor that generates spectrogram data +from raw audio samples. +The second is the Micro Speech model, a less than 20 kB model +that can recognize 2 keywords, "yes" and "no", from speech data. +The Micro Speech model takes the spectrogram data as input and produces +category probabilities. -The application listens to its surroundings with a microphone and indicates -when it has detected a word by lighting an LED or displaying data on a -screen, depending on the capabilities of the device. - -![Animation on Arduino](images/animation_on_arduino.gif) - -The code has a small footprint (for example, around 22 kilobytes on a Cortex -M3) and only uses about 10 kilobytes of RAM for working memory, so it's able to -run on systems like an STM32F103 with only 20 kilobytes of total SRAM and 64 -kilobytes of Flash. ## Table of contents -- [Deploy to STM32F746](#deploy-to-STM32F746) -- [Deploy to NXP FRDM K66F](#deploy-to-nxp-frdm-k66f) -- [Deploy to CEVA BX1/SP500](#deploy-to-ceva-bx1) -- [Run on macOS](#run-on-macos) -- [Run the tests on a development machine](#run-the-tests-on-a-development-machine) +- [Audio Preprocessor](#audio-preprocessor) +- [Micro Speech Model Architecture](#micro-speech-model-architecture) +- [Run the C++ tests on a development machine](#run-the-c-tests-on-a-development-machine) +- [Run the evaluate.py script on a development machine](#run-the-evaluatepy-script-on-a-development-machine) +- [Run the evaluate_test.py script on a development machine](#run-the-evaluate_testpy-script-on-a-development-machine) +- [Converting models or audio samples to C++](#converting-models-or-audio-samples-to-c) - [Train your own model](#train-your-own-model) -## Deploy to STM32F746 - -The following instructions will help you build and deploy the example to the -[STM32F7 discovery kit](https://os.mbed.com/platforms/ST-Discovery-F746NG/) -using [ARM Mbed](https://github.com/ARMmbed/mbed-cli). - -Before we begin, you'll need the following: - -- STM32F7 discovery kit board -- Mini-USB cable -- ARM Mbed CLI ([installation instructions](https://os.mbed.com/docs/mbed-os/v6.9/quick-start/build-with-mbed-cli.html). Check it out for MacOS Catalina - [mbed-cli is broken on MacOS Catalina #930](https://github.com/ARMmbed/mbed-cli/issues/930#issuecomment-660550734)) -- Python 3 and pip3 - -Since Mbed requires a special folder structure for projects, we'll first run a -command to generate a subfolder containing the required source files in this -structure: - -``` -make -f tensorflow/lite/micro/tools/make/Makefile TARGET=disco_f746ng OPTIMIZED_KERNEL_DIR=cmsis_nn generate_micro_speech_mbed_project +## Audio Preprocessor + +The Audio Preprocessor model converts raw audio samples into a spectrographic feature. +Audio samples are input to the model in windowed frames, each window overlapping +the previous. When sufficient features have been accumulated, those features can +be provided as input to the Micro Speech model. + +This model provides a replication of the legacy preprocessing used during training +of the Micro Speech model. For additional information on audio preprocessing during training, +please refer to the [training README](train/README.md#preprocessing-speech-input) documentation. + +Audio Preprocessing models providing `int8` and `float32` output, ready for use +with the Micro Speech model, are provided in the [models](models/) directory. +These models expect the audio input to conform to: +* 30ms window frame +* 20ms window stride +* 16KHz sample rate +* 16-bit signed PCM data +* single channel (mono) + +### Model Architecture + +This model consists primarily of [Signal Library](https://github.com/tensorflow/tflite-micro/blob/main/python/tflite_micro/signal) operations. +The library is a set of Python methods, and bindings to `C++` library code. +To allow for use with the `TFLM MicroInterpreter`, a set of [Signal Library kernels](https://github.com/tensorflow/tflite-micro/blob/main/signal/micro/kernels) +is also provided. + +The [audio_preprocessor.py](audio_preprocessor.py) script provides a complete example +of how to use the `Signal Library` within your own Python application. This script +has support for TensorFlow eager-execution mode, graph-execution mode, and +`TFLM MicroInterpreter` inference operations. + +[model architecture](images/audio_preprocessor_int8.png) + +*This image was derived from visualizing the 'models/audio_preprocessor_int8.tflite' file in +[Netron](https://github.com/lutzroeder/netron)* + +Each of the steps performed by the model are outlined as follows: +1) Audio frame input with shape `(1, 480)` +1) Apply `Hann Window` smoothing using `SignalWindow` +1) Reshape tensor to match the input of `SignalFftAutoScale` +1) Rescale tensor data using `SignalFftAutoScale` and calculate one of the input +parameters to `SignalFilterBankSquareRoot` +1) Compute FFT using `SignalRfft` +1) Compute power spectrum using `SignalEnergy`. The tensor data is only updated +for elements between `[start_index, end_index)`. +1) The `Cast`, `StridedSlice`, and `Concatenation` operations are used to fill +the tensor data with zeros, for elements outside of `[start_index, end_index)` +1) Compress the power spectrum tensor data into just 40 channels (frequency bands) +using `SignalFilterBank` +1) Scale down the tensor data using `SignalFilterBankSquareRoot` +1) Apply noise reduction using `SignalFilterBankSpectralSubtraction` +1) Apply gain control using `SignalPCAN` +1) Scale down the tensor data using `SignalFilterBankLog` +1) The remaining operations perform additional legacy down-scaling and convert +the tensor data to `int8` +1) Model output has shape `(40,)` + +### The `FeatureParams` Python Class + +The `FeatureParams` class is located within the [audio_preprocessor.py](audio_preprocessor.py#L260) +script. This class allows for custom configuration of the `AudioPreprocessor` class. +Parameters such as sample rate, window size, window stride, number of output channels, +and many more can be configured. The parameters to be changed must be set during +class instantiation, and are frozen thereafter. The defaults for `FeatureParams` +match those of the legacy audio preprocessing used during Micro Speech model training. + +### The `AudioPreprocessor` Python Class + +The `AudioPreprocessor` class in the [audio_preprocessor.py](audio_preprocessor.py#L338) +script provides easy to use convenience methods for creating +and using an audio preprocessing model. This class is configured through use of +a `FeatureParams` object, allowing some flexibility in how the audio preprocessing +model works. + +A short summary of the available methods and properties: +* `load_samples`: load audio samples from a `WAV` format file and prepare +the samples for use by other `AudioPreprocessor` methods +* `samples`: tensor containing previously loaded audio samples +* `params`: the `FeatureParams` object the class was instantiated with +* `generate_feature`: generate a single feature using TensorFlow eager-execution +* `generate_feature_using_graph`: generate a single feature using TensorFlow graph-execution +* `generate_feature_using_tflm`: generate a single feature using the `TFLM MicroInterpreter` +* `reset_tflm`: reset the internal state of the `TFLM MicroInterpreter` and the +`Signal Library` operations +* `generate_tflite_file`: create a `.tflite` format file for the preprocessor model + +### Run the audio_preprocessor.py script on a development machine + +The [audio_preprocessor.py](audio_preprocessor.py#L532) script generates a `.tflite` +file for the preprocessing model, ready for use with the Micro Speech model. + +To generate a `.tflite` model file with `int8` output: +```bash +bazel build tensorflow/lite/micro/examples/micro_speech:audio_preprocessor +bazel-bin/tensorflow/lite/micro/examples/micro_speech/audio_preprocessor --output_type=int8 ``` -Running the make command will result in the creation of a new folder: - -``` -gen/disco_f746ng_cortex-m4_default/prj/micro_speech/mbed +To generate a `.tflite` model file with `float32` output: +```bash +bazel build tensorflow/lite/micro/examples/micro_speech:audio_preprocessor +bazel-bin/tensorflow/lite/micro/examples/micro_speech/audio_preprocessor --output_type=float32 ``` -This folder contains all of the example's dependencies structured in the correct -way for Mbed to be able to build it. - -Change into the directory and run the following commands. +### Run the audio_preprocessor_test.py script on a development machine -First, tell Mbed that the current directory is the root of an Mbed project: +The [audio_preprocessor_test.py](audio_preprocessor_test.py) script performs +several tests to ensure correct inference operations occur across all execution modes. +The tests are: +* cross-check inference results between eager, graph, and `TFLM MicroInterpreter` +execution modes +* check the `yes` and `no` 30ms samples in the [testdata](testdata/) directory for +correct generation of the feature tensor +* compare the preprocessor `int8` model against the same model in the [models](models/) directory +* compare the preprocessor `float32` model against the same model in the [models](models/) directory -``` -mbed config root . +```bash +bazel build tensorflow/lite/micro/examples/micro_speech:audio_preprocessor_test +bazel-bin/tensorflow/lite/micro/examples/micro_speech/audio_preprocessor_test ``` -Next, tell Mbed to download the dependencies and prepare to build: +## Micro Speech Model Architecture -``` -mbed deploy -``` +This is a simple model comprised of a Convolutional 2D layer, a Fully Connected +Layer or a MatMul Layer (output: logits) and a Softmax layer +(output: probabilities) as shown below. Refer to the [`tiny_conv`](https://github.com/tensorflow/tensorflow/blob/master/tensorflow/examples/speech_commands/models.py#L673) +model architecture. The output probabilities are in four categories: +`silence`, `unknown`, `yes`, `no`. -Older versions of Mbed will build the project using C++98. However, TensorFlow Lite -requires C++11. If needed, run the following Python snippet to modify the Mbed -configuration files so that it uses C++11: +The input to the model is 49 spectrographic features, each feature +consisting of 40 channels of data. The features are generated by the +Audio Preprocessor model. For more information, please see the +[training README](train/README.md#preprocessing-speech-input) documentation. -``` -python -c 'import fileinput, glob; -for filename in glob.glob("mbed-os/tools/profiles/*.json"): - for line in fileinput.input(filename, inplace=True): - print(line.replace("\"-std=gnu++98\"","\"-std=c++11\", \"-fpermissive\""))' -``` - -Note: Mbed has a dependency to an old version of arm_math.h and cmsis_gcc.h (adapted from the general [CMSIS-NN MBED example](https://github.com/tensorflow/tflite-micro/blob/main/tensorflow/lite/micro/kernels/cmsis_nn#example-2---mbed)). Therefore you need to copy the newer version as follows: -```bash -cp tensorflow/lite/micro/tools/make/downloads/cmsis/CMSIS/DSP/Include/\ -arm_math.h mbed-os/cmsis/TARGET_CORTEX_M/arm_math.h -cp tensorflow/lite/micro/tools/make/downloads/cmsis/CMSIS/Core/Include/\ -cmsis_gcc.h mbed-os/cmsis/TARGET_CORTEX_M/cmsis_gcc.h -``` +[model architecture](images/micro_speech_quantized.png) -Finally, run the following command to compile: +*This image was derived from visualizing the 'models/micro_speech_quantized.tflite' file in +[Netron](https://github.com/lutzroeder/netron)* -``` -mbed compile -m DISCO_F746NG -t GCC_ARM -``` +## Run the C++ tests on a development machine -This should result in a binary at the following path: +To compile and test this example on a desktop Linux or macOS machine, download the +[TFLM source code](https://github.com/tensorflow/tflite-micro). Then switch +into the source directory from a terminal using the `cd` command. +Compile and run a native binary using Bazel: +```bash +bazel run tensorflow/lite/micro/examples/micro_speech:micro_speech_test ``` -./BUILD/DISCO_F746NG/GCC_ARM/mbed.bin -``` - -To deploy, plug in your STM board and copy the file to it. On macOS, you can do -this with the following command: +For a native binary using `make`, run the following command: +```bash +make -f tensorflow/lite/micro/tools/make/Makefile test_micro_speech_test ``` -cp ./BUILD/DISCO_F746NG/GCC_ARM/mbed.bin /Volumes/DIS_F746NG/ -``` - -Copying the file will initiate the flashing process. -The inference results are logged by the board while the program is running. -To view it, establish a serial connection to the board -using a baud rate of `9600`. On OSX and Linux, the following command should -work, replacing `/dev/tty.devicename` with the name of your device as it appears -in `/dev`: - -``` -screen /dev/tty.devicename 9600 +For an Arm Cortex-M0 binary running in the QEMU emulator: +```bash +make -f tensorflow/lite/micro/tools/make/Makefile TARGET=cortex_m_qemu TARGET_ARCH=cortex-m0 OPTIMIZED_KERNEL_DIR=cmsis_nn BUILD_TYPE=default test_micro_speech_test ``` -You will see a line output for every word that is detected: +This will take a few minutes, and downloads frameworks the code uses like +[CMSIS](https://developer.arm.com/embedded/cmsis) and +[flatbuffers](https://google.github.io/flatbuffers/). Once that process has +finished, you should see a series of files get compiled, followed by some +logging output from a test, which should conclude with `~~~ALL TESTS PASSED~~~`. -``` -Heard yes (201) @4056ms -Heard no (205) @6448ms -Heard unknown (201) @13696ms -Heard yes (205) @15000ms -``` +If you see this, it means that a small program has been built and executed that loads +the trained TensorFlow Lite model, runs some example inputs through it, and got the +expected outputs. -The number after each detected word is its score. By default, the program only -considers matches as valid if their score is over 200, so all of the scores you -see will be at least 200. - -To stop viewing the debug output with `screen`, hit `Ctrl+A`, immediately -followed by the `K` key, then hit the `Y` key. - -## Deploy to NXP FRDM K66F - -The following instructions will help you build and deploy the example to the -[NXP FRDM K66F](https://www.nxp.com/design/development-boards/freedom-development-boards/mcu-boards/freedom-development-platform-for-kinetis-k66-k65-and-k26-mcus:FRDM-K66F) -using [ARM Mbed](https://github.com/ARMmbed/mbed-cli). - -1. Download - [the TensorFlow source code](https://github.com/tensorflow/tensorflow). -2. Follow instructions from - [mbed website](https://os.mbed.com/docs/mbed-os/v5.13/tools/installation-and-setup.html) - to setup and install mbed CLI. -3. Compile TensorFlow with the following command to generate mbed project: - - ``` - make -f tensorflow/lite/micro/tools/make/Makefile TARGET=mbed TAGS="nxp_k66f" generate_micro_speech_mbed_project - ``` - -4. Change into the following directory that has been generated: - `gen/mbed_cortex-m4/prj/micro_speech/mbed` - -5. Create an Mbed project using the generated files, run ensuring your - environment is using Python 2.7: `mbed config root .` - -6. Next, tell Mbed to download the dependencies and prepare to build: `mbed - deploy` - -7. Finally, we can run the following command to compile the code: `mbed compile - -m K66F -t GCC_ARM` - -8. For some Mbed compilers (such as GCC), you may get compile error in - mbed_rtc_time.cpp. Go to `mbed-os/platform/mbed_rtc_time.h` and comment line - 32 and line 37: - - ``` - //#if !defined(__GNUC__) || defined(__CC_ARM) || defined(__clang__) - struct timeval { - time_t tv_sec; - int32_t tv_usec; - }; - //#endif - ``` - -9. If your system does not recognize the board with the `mbed detect` command. - Follow the instructions for setting up - [DAPLink](https://armmbed.github.io/DAPLink/?board=FRDM-K66F) for the - [K66F](https://os.mbed.com/platforms/FRDM-K66F/). - -10. Connect the USB cable to the micro USB port. When the Ethernet port is - facing towards you, the micro USB port is left of the Ethernet port. - -11. To compile and flash in a single step, add the `--flash` option: - - ``` - mbed compile -m K66F -t GCC_ARM --flash - ``` - -12. Disconnect USB cable from the device to power down the device and connect - back the power cable to start running the model. - -13. Connect to serial port with baud rate of 9600 and correct serial device to - view the output from the MCU. In linux, you can run the following screen - command if the serial device is `/dev/ttyACM0`: - - ``` - sudo screen /dev/ttyACM0 9600 - ``` - -14. Saying "Yes" will print "Yes" and "No" will print "No" on the serial port. - -15. A loopback path from microphone to headset jack is enabled. Headset jack is - in black color. If there is no output on the serial port, you can connect - headphone to headphone port to check if audio loopback path is working. - -## Deploy to CEVA-BX1 - -The following instructions will help you build and deploy the sample to the -[CEVA-BX1](https://www.ceva-dsp.com/product/ceva-bx1-sound/) or [CEVA-SP500](https://www.ceva-dsp.com/product/ceva-senspro/) - -1. Contact CEVA at [sales@ceva-dsp.com](mailto:sales@ceva-dsp.com) -2. For BX1: -2.1. Download and install CEVA-BX Toolbox v18.0.2 -2.2. Set the TARGET_TOOLCHAIN_ROOT variable in - /tensorflow/lite/micro/tools/make/templates/ceva_bx1/ceva_app_makefile.tpl - To your installation location. For example: TARGET_TOOLCHAIN_ROOT := - /home/myuser/work/CEVA-ToolBox/V18/BX -2.3. Generate the Makefile for the project: /tensorflow$ make -f - tensorflow/lite/micro/tools/make/Makefile TARGET=ceva TARGET_ARCH=CEVA_BX1 - generate_micro_speech_make_project -3. For SensPro (SP500): -3.1. Download and install CEVA-SP Toolbox v20 -3.2. Set the TARGET_TOOLCHAIN_ROOT variable in - /tensorflow/lite/micro/tools/make/templates/ceva_SP500/ceva_app_makefile.tpl - To your installation location. For example: TARGET_TOOLCHAIN_ROOT := - /home/myuser/work/CEVA-ToolBox/V20/SensPro -3.3. Generate the Makefile for the project: /tensorflow$ make -f - tensorflow/lite/micro/tools/make/Makefile TARGET=ceva TARGET_ARCH=CEVA_SP500 - generate_micro_speech_make_project -5. Build the project: - /gen/ceva_bx1/prj/micro_speech/make$ make -6. This should build the project and create a file called micro_speech.elf. -7. The supplied configuration reads input from a files and expects a file - called input.wav (easily changed in audio_provider.cc) to be placed in the - same directory of the .elf file -8. We used Google's speech command dataset: V0.0.2: - http://download.tensorflow.org/data/speech_commands_v0.02.tar.gz V0.0.1: - http://download.tensorflow.org/data/speech_commands_v0.01.tar.gz -9. Follow CEVA Toolbox instructions for creating a debug target and running the - project. -10. Output should look like: Heard silence (208) @352ms Heard no (201) @1696ms - Heard yes (203) @3904ms - -## Run on macOS - -The example contains an audio provider compatible with macOS. If you have access -to a Mac, you can run the example on your development machine. - -First, use the following command to build it: +To understand how TFLM does this, you can look at the source in the +[micro_speech_test.cc](micro_speech_test.cc) file. +It's a fairly small amount of code that executes the following steps: +1) Create a `TFLM MicroInterpreter` with a handle to the Audio Preprocessor model +that has been compiled into the program +1) Repeatedly execute inference operations using `MicroInterpreter::invoke`, +with audio samples as input, and spectrogram features as output +1) Create a new `TFLM MicroInterpreter` with a handle to the Micro Speech model +that has been compiled into the program +1) Execute a single inference operation using `MicroInterpreter::invoke`, +with the spectrogram features as input, and category probabilities as output +1) Check the largest category probability for a match with the speech sample label. + +## Run the evaluate.py script on a development machine +The [evaluate.py](evaluate.py#L166) script predicts the category of a single audio sample +given by the `sample_path` argument. The output consists of the predictions for +the accumulated spectrogram features across (at most) 49 audio sample window frames. -``` -make -f tensorflow/lite/micro/tools/make/Makefile micro_speech +```bash +bazel build tensorflow/lite/micro/examples/micro_speech:evaluate +bazel-bin/tensorflow/lite/micro/examples/micro_speech/evaluate --sample_path=tensorflow/lite/micro/examples/micro_speech/testdata/no_1000ms.wav ``` -Once the build completes, you can run the example with the following command: - -``` -gen/osx_x86_64/bin/micro_speech +The output looks like this: +```bash +Frame #0: [0.0000, 0.0273, 0.0312, 0.9414] +Frame #1: [0.0000, 0.0273, 0.0312, 0.9414] +Frame #2: [0.0000, 0.0273, 0.0312, 0.9414] +Frame #3: [0.0000, 0.0273, 0.0273, 0.9414] +Frame #4: [0.0000, 0.0273, 0.0273, 0.9414] +Frame #5: [0.0000, 0.0273, 0.0273, 0.9414] +Frame #6: [0.0000, 0.0273, 0.0273, 0.9453] +Frame #7: [0.0000, 0.0273, 0.0273, 0.9453] +Frame #8: [0.0000, 0.0273, 0.0273, 0.9453] + +... + +Frame #40: [0.0000, 0.0312, 0.0000, 0.9648] +Frame #41: [0.0000, 0.0273, 0.0000, 0.9727] +Frame #42: [0.0000, 0.0312, 0.0000, 0.9688] +Frame #43: [0.0000, 0.0273, 0.0000, 0.9727] +Frame #44: [0.0000, 0.0273, 0.0000, 0.9727] +Frame #45: [0.0000, 0.0352, 0.0000, 0.9648] +Frame #46: [0.0000, 0.0391, 0.0000, 0.9609] +Frame #47: [0.0000, 0.0469, 0.0000, 0.9531] +Frame #48: [0.0000, 0.0547, 0.0000, 0.9453] +Model predicts the audio sample as with probability 0.95 ``` -You might see a pop-up asking for microphone access. If so, grant it, and the -program will start. - -Try saying "yes" and "no". You should see output that looks like the following: +## Run the evaluate_test.py script on a development machine +The [evaluate_test.py](evaluate_test.py) script verifies the combination of the +Audio Preprocessor model and the Micro Speech model to generate correct inference results. +Four audio samples from the [testdata](testdata/) directory are used as input to +the Audio Preprocessor model. +The Audio Preprocessor model is tested with both `int8` and `float32` outputs. +The results of the audio preprocessing are then used to check predictions by the +Micro Speech model. -``` -Heard yes (201) @4056ms -Heard no (205) @6448ms -Heard unknown (201) @13696ms -Heard yes (205) @15000ms -Heard yes (205) @16856ms -Heard unknown (204) @18704ms -Heard no (206) @21000ms +```bash +bazel build tensorflow/lite/micro/examples/micro_speech:evaluate_test +bazel-bin/tensorflow/lite/micro/examples/micro_speech/evaluate_test ``` -The number after each detected word is its score. By default, the recognize -commands component only considers matches as valid if their score is over 200, -so all of the scores you see will be at least 200. +## Converting models or audio samples to C++ +A tool is available to convert your custom model or audio samples into `C++` data +structures that you can then use in your own wake-word application. +Keep in mind that audio samples for use with Audio Preprocessor and Micro Speech models +must be 1000ms in length, 16-bit PCM samples, and single channel (mono). +The tool can be found here: [generate_cc_arrays.py](../../tools/generate_cc_arrays.py) -The number after the score is the number of milliseconds since the program was -started. - -If you don't see any output, make sure your Mac's internal microphone is -selected in the Mac's *Sound* menu, and that its input volume is turned up high -enough. - -## Run the tests on a development machine - -To compile and test this example on a desktop Linux or macOS machine, download -[the TensorFlow source code](https://github.com/tensorflow/tensorflow), `cd` -into the source directory from a terminal, and then run the following command: - -``` -make -f tensorflow/lite/micro/tools/make/Makefile test_micro_speech_test +The following commands show how to use the tool: +```bash +bazel build tensorflow/lite/micro/tools:generate_cc_arrays +bazel-bin/tensorflow/lite/micro/tools/generate_cc_arrays /tmp/data.cc path_to_custom_sample.wav +bazel-bin/tensorflow/lite/micro/tools/generate_cc_arrays /tmp/header.h path_to_custom_sample.wav ``` -This will take a few minutes, and downloads frameworks the code uses like -[CMSIS](https://developer.arm.com/embedded/cmsis) and -[flatbuffers](https://google.github.io/flatbuffers/). Once that process has -finished, you should see a series of files get compiled, followed by some -logging output from a test, which should conclude with `~~~ALL TESTS PASSED~~~`. - -If you see this, it means that a small program has been built and run that loads -the trained TensorFlow model, runs some example inputs through it, and got the -expected outputs. - -To understand how TensorFlow Lite does this, you can look at the source in -[micro_speech_test.cc](micro_speech_test.cc). -It's a fairly small amount of code that creates an interpreter, gets a handle to -a model that's been compiled into the program, and then invokes the interpreter -with the model and sample inputs. - ## Train your own model So far you have used an existing trained model to run inference on microcontrollers. If you wish to train your own model, follow the instructions -given in the [train/](train/) directory. +given in the [train](train/README.md) directory. diff --git a/tensorflow/lite/micro/examples/micro_speech/audio_preprocessor.py b/tensorflow/lite/micro/examples/micro_speech/audio_preprocessor.py new file mode 100644 index 00000000000..0379ec6459d --- /dev/null +++ b/tensorflow/lite/micro/examples/micro_speech/audio_preprocessor.py @@ -0,0 +1,551 @@ +# Copyright 2023 The TensorFlow Authors. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +""" +Audio Sample Preprocessor + +When this module is run, feature generation models are created in the .tflite +format. + +Run: +bazel build tensorflow/lite/micro/examples/micro_speech:audio_preprocessor +bazel-bin/tensorflow/lite/micro/examples/micro_speech/audio_preprocessor +""" + +from __future__ import annotations +from pathlib import Path +from dataclasses import dataclass +import tempfile + +from absl import app +from absl import flags + +import tensorflow as tf +from tensorflow.python.platform import resource_loader +from tflite_micro.python.tflite_micro.signal.ops import window_op +from tflite_micro.python.tflite_micro.signal.ops import fft_ops +from tflite_micro.python.tflite_micro.signal.ops import energy_op +from tflite_micro.python.tflite_micro.signal.ops import filter_bank_ops +from tflite_micro.python.tflite_micro.signal.ops import pcan_op +from tflite_micro.python.tflite_micro import runtime + +_ENABLE_DEBUG = flags.DEFINE_enum( + 'debug_mode', + 'off', + ['off', 'all'], + 'Enable debug output', +) + +_FILE_TO_TEST = flags.DEFINE_enum('file_to_test', 'no', ['no', 'yes'], + 'File to test') + +_OUTPUT_TYPE = flags.DEFINE_enum( + 'output_type', 'int8', ['int8', 'float32'], + 'Type of TfLite output file (.tflite) to generate') + + +def _debug_print(*args): + if _ENABLE_DEBUG.value != 'off': + print(*args) + + +class _GenerateFeature(tf.Module): + """Generate feature tensor from audio window samples""" + + def __init__(self, name: str, params: FeatureParams, detail: str): + super().__init__(name=name) + self._params = params + window_sample_count: int = int(params.window_size_ms * params.sample_rate / + 1000) + hann_window_weights = window_op.hann_window_weights( + window_sample_count, params.window_scaling_bits) + self._hann_window_weights_tensor = tf.constant(hann_window_weights, + name='hann_window_weights') + self._fft_size, self._fft_size_log2 = fft_ops.get_pow2_fft_length( + window_sample_count) + self._filter_bank_index_start, self._filter_bank_index_end = \ + filter_bank_ops.calc_start_end_indices( + self._fft_size, + params.sample_rate, + params.filter_bank_number_of_channels, + params.filter_bank_lower_band_limit_hz, + params.filter_bank_upper_band_limit_hz) + self._detail = detail + + def generate_feature_for_frame(self, audio_frame: tf.Tensor) -> tf.Tensor: + # Graph execution does not handle global variables. Instead, capture the + # global variable(s) within a closure (_debug_print_internal). + def _debug_print_internal(*args): + if _ENABLE_DEBUG.value != 'off' and tf.executing_eagerly(): + print(*args) + + _debug_print('*** generate_feature_for_frame ***') + params = self._params + detail = self._detail + + # update filter_bank_ops constants + filter_bank_ops.FILTER_BANK_WEIGHT_SCALING_BITS = \ + params.filter_bank_scaling_bits + filter_bank_ops.FILTER_BANK_ALIGNMENT = params.filter_bank_alignment + filter_bank_ops.FILTER_BANK_CHANNEL_BLOCK_SIZE = \ + params.filter_bank_channel_block_size + + _debug_print_internal(f'audio frame output [{detail}]: {audio_frame!r}') + + # apply window to audio frame + weights = self._hann_window_weights_tensor + _debug_print_internal(f'window weights output [{detail}]: {weights!r}') + window_output: tf.Tensor = window_op.window(audio_frame, weights, + params.window_scaling_bits) + _debug_print_internal(f'window output [{detail}]: {window_output!r}') + + # pre-scale window output + window_output = tf.reshape(window_output, [-1]) + window_scaled_output, scaling_shift = fft_ops.fft_auto_scale(window_output) + _debug_print_internal(f'scaling shift [{detail}]: {scaling_shift!r}') + + # compute FFT on scaled window output + _debug_print_internal( + f'fft size, log2 [{detail}]: {self._fft_size}, {self._fft_size_log2}') + fft_output: tf.Tensor = fft_ops.rfft(window_scaled_output, self._fft_size) + _debug_print_internal(f'fft output [{detail}]: {fft_output!r}') + + index_start = self._filter_bank_index_start + index_end = self._filter_bank_index_end + # convert fft output complex numbers to energy values + _debug_print_internal( + f'index start, end [{detail}]: {index_start}, {index_end}') + energy_output: tf.Tensor = energy_op.energy(fft_output, index_start, + index_end) + # Energy op does not zero indices outside [index_start,index_end). + # The following operations to zero portions of the energy op output + # could be much more efficiently performed inside the energy op C++ + # code. + # Need to convert to tf.int32 or the TfLite converter will not use + # the correct ops. + energy_output = tf.cast(energy_output, tf.int32) # type: ignore + zeros_head = tf.zeros(index_start, dtype=tf.int32) + number_of_elements = energy_output.shape.num_elements() + zeros_tail = tf.zeros( + number_of_elements - index_end, # type: ignore + dtype=tf.int32) + energy_slice = energy_output[index_start:index_end] + energy_output = tf.concat([zeros_head, energy_slice, zeros_tail], + 0) # type: ignore + energy_output = tf.cast(energy_output, dtype=tf.uint32) # type: ignore + _debug_print_internal(f'energy output [{detail}]: {energy_output!r}') + + # compress energy output into 40 channels + filter_output: tf.Tensor = filter_bank_ops.filter_bank( + energy_output, params.sample_rate, + params.filter_bank_number_of_channels, + params.filter_bank_lower_band_limit_hz, + params.filter_bank_upper_band_limit_hz) + _debug_print_internal(f'filterbank output [{detail}]: {filter_output!r}') + + # scale down filter_output + filter_scaled_output: tf.Tensor = filter_bank_ops.filter_bank_square_root( + filter_output, scaling_shift) + _debug_print_internal( + f'scaled filterbank output [{detail}]: {filter_scaled_output!r}') + + # noise reduction + spectral_sub_bits: int = params.filter_bank_spectral_subtraction_bits + filter_noise_output: tf.Tensor + filter_noise_estimate: tf.Tensor + filter_noise_output, filter_noise_estimate = \ + filter_bank_ops.filter_bank_spectral_subtraction( + filter_scaled_output, + num_channels=params.filter_bank_number_of_channels, + smoothing=params.filter_bank_even_smoothing, + alternate_smoothing=params.filter_bank_odd_smoothing, + smoothing_bits=params.filter_bank_smoothing_bits, + min_signal_remaining=params.filter_bank_min_signal_remaining, + clamping=params.filter_bank_clamping, + spectral_subtraction_bits=spectral_sub_bits, + ) + _debug_print_internal(f'noise output [{detail}]: {filter_noise_output!r}') + + # automatic gain control (PCAN) + correction_bits: int = self._fft_size_log2 - \ + int(params.filter_bank_scaling_bits / 2) + filter_agc_output: tf.Tensor = pcan_op.pcan( + filter_noise_output, + filter_noise_estimate, + strength=params.pcan_strength, + offset=params.pcan_offset, + gain_bits=params.pcan_gain_bits, + smoothing_bits=params.pcan_smoothing_bits, + input_correction_bits=correction_bits) + _debug_print_internal( + f'AGC Noise output [{detail}]: {filter_agc_output!r}') + + # re-scale features from UINT32 to INT16 + feature_post_scale: int = 1 << params.filter_bank_post_scaling_bits + feature_pre_scale_shift: int = correction_bits + feature_rescaled_output: tf.Tensor = filter_bank_ops.filter_bank_log( + filter_agc_output, + output_scale=feature_post_scale, + input_correction_bits=feature_pre_scale_shift) + _debug_print_internal( + f'scaled noise output [{detail}]: {feature_rescaled_output!r}') + + # These scaling values are derived from those used in input_data.py in the + # training pipeline. + # The feature pipeline outputs 16-bit signed integers in roughly a 0 to 670 + # range. In training, these are then arbitrarily divided by 25.6 to get + # float values in the rough range of 0.0 to 26.0. This scaling is performed + # for historical reasons, to match up with the output of other feature + # generators. + # The process is then further complicated when we quantize the model. This + # means we have to scale the 0.0 to 26.0 real values to the -128 to 127 + # signed integer numbers. + # All this means that to get matching values from our integer feature + # output into the tensor input, we have to perform: + # input = (((feature / 25.6) / 26.0) * 256) - 128 + # To simplify this and perform it in 32-bit integer math, we rearrange to: + # input = (feature * 256) / (25.6 * 26.0) - 128 + # constexpr int32_t value_scale = 256; + # constexpr int32_t value_div = + # static_cast((25.6f * 26.0f) + 0.5f); + # int32_t value = + # ((frontend_output.values[i] * value_scale) + (value_div / 2)) / + # value_div; + # value -= 128; + # if (value < -128) { + # value = -128; + # } + # if (value > 127) { + # value = 127; + # } + # output[i] = value; + + feature_output: tf.Tensor + if self._params.use_float_output: + # feature_rescaled_output is INT16, cast to FLOAT32 + feature_output = tf.cast(feature_rescaled_output, + tf.float32) # type: ignore + # feature_output will be FLOAT32 + feature_output /= self._params.legacy_output_scaling + else: + value_scale = tf.constant(256, dtype=tf.int32) + value_div = tf.constant(int((25.6 * 26) + 0.5), dtype=tf.int32) + feature_output = tf.cast(feature_rescaled_output, + tf.int32) # type: ignore + feature_output = (feature_output * value_scale) + int(value_div / 2) + feature_output = tf.truncatediv(feature_output, + value_div) # type: ignore + feature_output += tf.constant(-128, dtype=tf.int32) + feature_output = tf.clip_by_value(feature_output, + clip_value_min=-128, + clip_value_max=127) # type: ignore + feature_output = tf.cast(feature_output, tf.int8) # type: ignore + + _debug_print_internal(f'feature output [{detail}]: {feature_output!r}') + + return feature_output + + +@dataclass(kw_only=True, frozen=True) +class FeatureParams: + """ + Feature generator parameters + + Defaults are configured to work with the micro_speech_quantized.tflite model + """ + + sample_rate: int = 16000 + """audio sample rate""" + + window_size_ms: int = 30 + """input window size in milliseconds""" + + window_stride_ms: int = 20 + """input window stride in milliseconds""" + + window_scaling_bits: int = 12 + """input window shaping: scaling bits""" + + filter_bank_number_of_channels: int = 40 + """filter bank channel count""" + + filter_bank_lower_band_limit_hz: float = 125.0 + """filter bank lower band limit""" + + filter_bank_upper_band_limit_hz: float = 7500.0 + """filter bank upper band limit""" + + filter_bank_scaling_bits: int = \ + filter_bank_ops.FILTER_BANK_WEIGHT_SCALING_BITS + """filter bank weight scaling bits, updates filter bank constant""" + + filter_bank_alignment: int = 4 + """filter bank alignment, updates filter bank constant""" + + filter_bank_channel_block_size: int = 4 + """filter bank channel block size, updates filter bank constant""" + + filter_bank_post_scaling_bits: int = 6 + """filter bank output log-scaling bits""" + + filter_bank_spectral_subtraction_bits: int = 14 + """filter bank noise reduction spectral subtration bits""" + + filter_bank_smoothing_bits: int = 10 + """filter bank noise reduction smoothing bits""" + + filter_bank_even_smoothing: float = 0.025 + """filter bank noise reduction even smoothing""" + + filter_bank_odd_smoothing: float = 0.06 + """filter bank noise reduction odd smoothing""" + + filter_bank_min_signal_remaining: float = 0.05 + """filter bank noise reduction minimum signal remaining""" + + filter_bank_clamping: bool = False + """filter bank noise reduction clamping""" + + pcan_strength: float = 0.95 + """PCAN gain control strength""" + + pcan_offset: float = 80.0 + """PCAN gain control offset""" + + pcan_gain_bits: int = 21 + """PCAN gain control bits""" + + pcan_smoothing_bits = 10 + """PCAN gain control smoothing bits""" + + legacy_output_scaling: float = 25.6 + """Final output scaling, legacy from training""" + + use_float_output: bool = False + """Use float output if True, otherwise int8 output""" + + +class AudioPreprocessor: + """ + Audio Preprocessor + + Args: + params: FeatureParams, an immutable object supplying parameters for + the AudioPreprocessor instance + detail: str, used for debug output (optional, for debugging only) + """ + + def __init__(self, params: FeatureParams, detail: str = 'unknown'): + self._detail = detail + self._params = params + self._samples_per_window = int(params.window_size_ms * params.sample_rate / + 1000) + self._tflm_interpreter = None + self._feature_generator = None + self._feature_generator_concrete_function = None + self._model = None + self._samples = None + + def _get_feature_generator(self): + if self._feature_generator is None: + self._feature_generator = _GenerateFeature(name='GenerateFeature', + params=self._params, + detail=self._detail) + return self._feature_generator + + def _get_concrete_function(self): + if self._feature_generator_concrete_function is None: + shape = [1, self._samples_per_window] + fg = self._get_feature_generator() + func = tf.function(func=fg.generate_feature_for_frame) + self._feature_generator_concrete_function = func.get_concrete_function( + tf.TensorSpec(shape=shape, dtype=tf.int16)) # type: ignore + return self._feature_generator_concrete_function + + def _get_model(self): + if self._model is None: + cf = self._get_concrete_function() + converter = tf.lite.TFLiteConverter.from_concrete_functions( + [cf], self._get_feature_generator()) + converter.allow_custom_ops = True + self._model = converter.convert() + if _ENABLE_DEBUG.value != 'off': + tf.lite.experimental.Analyzer.analyze(model_content=self._model) + return self._model + + def load_samples(self, filename: Path, use_rounding: bool = False): + """ + Load audio samples from file. + + Loads INT16 audio samples from a WAV file. + Supports single channel at 16KHz. + The audio samples are accessible through the 'samples' property. + + Args: + filename: a Path object + use_rounding: bool, if True, convert the normalized FLOAT data that + has been loaded into INT16, using a standard rounding algorithm. + Otherwise use a simple conversion to INT16. + """ + file_data = tf.io.read_file(str(filename)) + samples: tf.Tensor + samples, sample_rate = tf.audio.decode_wav(file_data, desired_channels=1) + sample_rate = int(sample_rate) + _debug_print(f'Loaded {filename.name}' + f' sample-rate={sample_rate}' + f' sample-count={len(samples)}') + assert sample_rate == self._params.sample_rate, 'mismatched sample rate' + # convert samples to INT16 + # i = (((int) ((x * 32767) + 32768.5f)) - 32768); + max_value = tf.dtypes.int16.max + min_value = tf.dtypes.int16.min + if use_rounding: + samples = ((samples * max_value) + (-min_value + 0.5)) + min_value + else: + samples *= -min_value + samples = tf.cast(samples, tf.int16) # type: ignore + samples = tf.reshape(samples, [1, -1]) + + self._samples = samples + + @property + def samples(self) -> tf.Tensor: + """ + Audio Samples previously decoded using load_samples method. + + Returns: + tf.Tensor containing INT16 audio samples + """ + return self._samples + + @property + def params(self) -> FeatureParams: + """ + Feature Paramters being used by the AudioPreprocessor object + + Returns: + FeatureParams object which is immutable + """ + return self._params + + def generate_feature(self, audio_frame: tf.Tensor) -> tf.Tensor: + """ + Generate a single feature for a single audio frame. Uses TensorFlow + eager execution. + + Args: + audio_frame: tf.Tensor, a single audio frame (self.params.window_size_ms) + with shape (1, audio_samples_count) + + Returns: + tf.Tensor, a tensor containing a single audio feature with shape + (self.params.filter_bank_number_of_channels,) + """ + fg = self._get_feature_generator() + feature = fg.generate_feature_for_frame(audio_frame=audio_frame) + return feature + + def generate_feature_using_graph(self, audio_frame: tf.Tensor) -> tf.Tensor: + """ + Generate a single feature for a single audio frame. Uses TensorFlow + graph execution. + + Args: + audio_frame: tf.Tensor, a single audio frame (self.params.window_size_ms) + with shape (1, audio_samples_count) + + Returns: + tf.Tensor, a tensor containing a single audio feature with shape + (self.params.filter_bank_number_of_channels,) + """ + cf = self._get_concrete_function() + feature: tf.Tensor = cf(audio_frame=audio_frame) # type: ignore + return feature + + def generate_feature_using_tflm(self, audio_frame: tf.Tensor) -> tf.Tensor: + """ + Generate a single feature for a single audio frame. Uses TensorFlow + graph execution and the TensorFlow model converter to generate a + TFLM compatible model. This model is then used by the TFLM + MicroInterpreter to execute a single inference operation. + + Args: + audio_frame: tf.Tensor, a single audio frame (self.params.window_size_ms) + with shape (1, audio_samples_count) + + Returns: + tf.Tensor, a tensor containing a single audio feature with shape + (self.params.filter_bank_number_of_channels,) + """ + if self._tflm_interpreter is None: + model = self._get_model() + self._tflm_interpreter = runtime.Interpreter.from_bytes(model) + + self._tflm_interpreter.set_input(audio_frame, 0) + self._tflm_interpreter.invoke() + result = self._tflm_interpreter.get_output(0) + return tf.convert_to_tensor(result) + + def reset_tflm(self): + """ + Reset TFLM interpreter state + + Re-initializes TFLM interpreter state and the internal state + of all TFLM kernel operators. Useful for resetting Signal + library operator noise estimation and other internal state. + """ + if self._tflm_interpreter is not None: + self._tflm_interpreter.reset() + + def generate_tflite_file(self) -> Path: + """ + Create a .tflite model file + + The model output tensor type will depend on the + 'FeatureParams.use_float_output' parameter. + + Returns: + Path object for the created model file + """ + model = self._get_model() + if self._params.use_float_output: + type_name = 'float' + else: + type_name = 'int8' + fname = Path(tempfile.gettempdir(), + 'audio_preprocessor_' + type_name + '.tflite') + with open(fname, mode='wb') as file_handle: + file_handle.write(model) + return fname + + +def _main(_): + prefix_path = resource_loader.get_path_to_datafile('testdata') + + fname = _FILE_TO_TEST.value + audio_30ms_path = Path(prefix_path, f'{fname}_30ms.wav') + + use_float_output = _OUTPUT_TYPE.value == 'float32' + params = FeatureParams(use_float_output=use_float_output) + pp = AudioPreprocessor(params=params, detail=fname) + + if _ENABLE_DEBUG.value != 'off': + pp.load_samples(audio_30ms_path) + _ = pp.generate_feature(pp.samples) + + output_file_path: Path = pp.generate_tflite_file() + print('\nOutput file:', str(output_file_path), '\n') + + +if __name__ == '__main__': + app.run(_main) diff --git a/tensorflow/lite/micro/examples/micro_speech/audio_preprocessor_test.py b/tensorflow/lite/micro/examples/micro_speech/audio_preprocessor_test.py new file mode 100644 index 00000000000..e723a72ef9a --- /dev/null +++ b/tensorflow/lite/micro/examples/micro_speech/audio_preprocessor_test.py @@ -0,0 +1,102 @@ +# Copyright 2023 The TensorFlow Authors. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# ============================================================================= +""" +Audio feature generation testing, using the AudioPreprocessor class + +Run: +bazel build tensorflow/lite/micro/examples/micro_speech:audio_preprocessor_test +bazel-bin/tensorflow/lite/micro/examples/micro_speech/audio_preprocessor_test +""" + +from pathlib import Path +import filecmp + +from tensorflow.python.framework import test_util +from tensorflow.python.platform import resource_loader +from tensorflow.python.platform import test + +import tensorflow as tf +from tflite_micro.tensorflow.lite.micro.examples.micro_speech import audio_preprocessor + + +class AudioPreprocessorTest(test_util.TensorFlowTestCase): + + def setUp(self): + self.sample_prefix_path = resource_loader.get_path_to_datafile('testdata') + + def testFeatureGeneration(self): + feature_params = audio_preprocessor.FeatureParams() + audio_pp = audio_preprocessor.AudioPreprocessor(feature_params) + window_size = int(feature_params.window_size_ms * + feature_params.sample_rate / 1000) + data: tf.Tensor = tf.random.uniform(minval=int(tf.dtypes.int16.min), + maxval=tf.dtypes.int16.max, + seed=42, + shape=(1, window_size), + dtype=tf.int32) + data = tf.cast(data, dtype=tf.int16) # type: ignore + + # test signal ops internal state retained and features do not match + feature_eager1 = audio_pp.generate_feature(data) + feature_eager2 = audio_pp.generate_feature(data) + self.assertNotAllEqual(feature_eager1, feature_eager2) + + # test eager vs graph execution feature match + _ = audio_pp.generate_feature_using_graph(data) + feature_graph = audio_pp.generate_feature_using_graph(data) + self.assertAllEqual(feature_graph, feature_eager2) + + # test eager vs MicroInterpreter execution feature match + feature_tflm = audio_pp.generate_feature_using_tflm(data) + self.assertAllEqual(feature_tflm, feature_eager1) + + # test signal ops internal state reset + audio_pp.reset_tflm() + feature_tflm = audio_pp.generate_feature_using_tflm(data) + self.assertAllEqual(feature_tflm, feature_eager1) + + # test signal ops internal state retained + feature_tflm = audio_pp.generate_feature_using_tflm(data) + self.assertAllEqual(feature_tflm, feature_eager2) + + def testFeatureOutputYes(self): + feature_params = audio_preprocessor.FeatureParams() + audio_pp = audio_preprocessor.AudioPreprocessor(feature_params) + audio_pp.load_samples(Path(self.sample_prefix_path, 'yes_30ms.wav')) + feature = audio_pp.generate_feature_using_tflm(audio_pp.samples) + feature_list = feature.numpy().tolist() + expected = [ + 124, 105, 126, 103, 125, 101, 123, 100, 116, 98, 115, 97, 113, 90, 91, + 82, 104, 96, 117, 97, 121, 103, 126, 101, 125, 104, 126, 104, 125, 101, + 116, 90, 81, 74, 80, 71, 83, 76, 82, 71 + ] + self.assertSequenceEqual(feature_list, expected) + + def testFeatureOutputNo(self): + feature_params = audio_preprocessor.FeatureParams() + audio_pp = audio_preprocessor.AudioPreprocessor(feature_params) + audio_pp.load_samples(Path(self.sample_prefix_path, 'no_30ms.wav')) + feature = audio_pp.generate_feature_using_tflm(audio_pp.samples) + feature_list = feature.numpy().tolist() + expected = [ + 126, 103, 124, 102, 124, 102, 123, 100, 118, 97, 118, 100, 118, 98, + 121, 100, 121, 98, 117, 91, 96, 74, 54, 87, 100, 87, 109, 92, 91, 80, + 64, 55, 83, 74, 74, 78, 114, 95, 101, 81 + ] + self.assertSequenceEqual(feature_list, expected) + + +if __name__ == '__main__': + test.main() diff --git a/tensorflow/lite/micro/examples/micro_speech/audio_provider.cc b/tensorflow/lite/micro/examples/micro_speech/audio_provider.cc deleted file mode 100644 index 5ca425df4cf..00000000000 --- a/tensorflow/lite/micro/examples/micro_speech/audio_provider.cc +++ /dev/null @@ -1,38 +0,0 @@ -/* Copyright 2018 The TensorFlow Authors. All Rights Reserved. - -Licensed under the Apache License, Version 2.0 (the "License"); -you may not use this file except in compliance with the License. -You may obtain a copy of the License at - - http://www.apache.org/licenses/LICENSE-2.0 - -Unless required by applicable law or agreed to in writing, software -distributed under the License is distributed on an "AS IS" BASIS, -WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -See the License for the specific language governing permissions and -limitations under the License. -==============================================================================*/ - -#include "tensorflow/lite/micro/examples/micro_speech/audio_provider.h" - -#include "tensorflow/lite/micro/examples/micro_speech/micro_features/micro_model_settings.h" - -namespace { -int16_t g_dummy_audio_data[kMaxAudioSampleSize]; -int32_t g_latest_audio_timestamp = 0; -} // namespace - -TfLiteStatus GetAudioSamples(int start_ms, int duration_ms, - int* audio_samples_size, int16_t** audio_samples) { - for (int i = 0; i < kMaxAudioSampleSize; ++i) { - g_dummy_audio_data[i] = 0; - } - *audio_samples_size = kMaxAudioSampleSize; - *audio_samples = g_dummy_audio_data; - return kTfLiteOk; -} - -int32_t LatestAudioTimestamp() { - g_latest_audio_timestamp += 100; - return g_latest_audio_timestamp; -} diff --git a/tensorflow/lite/micro/examples/micro_speech/audio_provider.h b/tensorflow/lite/micro/examples/micro_speech/audio_provider.h deleted file mode 100644 index d3aab2cf051..00000000000 --- a/tensorflow/lite/micro/examples/micro_speech/audio_provider.h +++ /dev/null @@ -1,44 +0,0 @@ -/* Copyright 2018 The TensorFlow Authors. All Rights Reserved. - -Licensed under the Apache License, Version 2.0 (the "License"); -you may not use this file except in compliance with the License. -You may obtain a copy of the License at - - http://www.apache.org/licenses/LICENSE-2.0 - -Unless required by applicable law or agreed to in writing, software -distributed under the License is distributed on an "AS IS" BASIS, -WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -See the License for the specific language governing permissions and -limitations under the License. -==============================================================================*/ - -#ifndef TENSORFLOW_LITE_MICRO_EXAMPLES_MICRO_SPEECH_AUDIO_PROVIDER_H_ -#define TENSORFLOW_LITE_MICRO_EXAMPLES_MICRO_SPEECH_AUDIO_PROVIDER_H_ - -#include "tensorflow/lite/c/common.h" - -// This is an abstraction around an audio source like a microphone, and is -// expected to return 16-bit PCM sample data for a given point in time. The -// sample data itself should be used as quickly as possible by the caller, since -// to allow memory optimizations there are no guarantees that the samples won't -// be overwritten by new data in the future. In practice, implementations should -// ensure that there's a reasonable time allowed for clients to access the data -// before any reuse. -// The reference implementation can have no platform-specific dependencies, so -// it just returns an array filled with zeros. For real applications, you should -// ensure there's a specialized implementation that accesses hardware APIs. -TfLiteStatus GetAudioSamples(int start_ms, int duration_ms, - int* audio_samples_size, int16_t** audio_samples); - -// Returns the time that audio data was last captured in milliseconds. There's -// no contract about what time zero represents, the accuracy, or the granularity -// of the result. Subsequent calls will generally not return a lower value, but -// even that's not guaranteed if there's an overflow wraparound. -// The reference implementation of this function just returns a constantly -// incrementing value for each call, since it would need a non-portable platform -// call to access time information. For real applications, you'll need to write -// your own platform-specific implementation. -int32_t LatestAudioTimestamp(); - -#endif // TENSORFLOW_LITE_MICRO_EXAMPLES_MICRO_SPEECH_AUDIO_PROVIDER_H_ diff --git a/tensorflow/lite/micro/examples/micro_speech/audio_provider_mock.cc b/tensorflow/lite/micro/examples/micro_speech/audio_provider_mock.cc deleted file mode 100644 index fe3ad1613bd..00000000000 --- a/tensorflow/lite/micro/examples/micro_speech/audio_provider_mock.cc +++ /dev/null @@ -1,54 +0,0 @@ -/* Copyright 2018 The TensorFlow Authors. All Rights Reserved. - -Licensed under the Apache License, Version 2.0 (the "License"); -you may not use this file except in compliance with the License. -You may obtain a copy of the License at - - http://www.apache.org/licenses/LICENSE-2.0 - -Unless required by applicable law or agreed to in writing, software -distributed under the License is distributed on an "AS IS" BASIS, -WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -See the License for the specific language governing permissions and -limitations under the License. -==============================================================================*/ - -#include "tensorflow/lite/micro/examples/micro_speech/audio_provider.h" -#include "tensorflow/lite/micro/examples/micro_speech/micro_features/micro_model_settings.h" -#include "tensorflow/lite/micro/examples/micro_speech/testdata/no_1000ms_audio_data.h" -#include "tensorflow/lite/micro/examples/micro_speech/testdata/yes_1000ms_audio_data.h" - -namespace { -int16_t g_dummy_audio_data[kMaxAudioSampleSize]; -int32_t g_latest_audio_timestamp = 0; -} // namespace - -TfLiteStatus GetAudioSamples(int start_ms, int duration_ms, - int* audio_samples_size, int16_t** audio_samples) { - const int yes_start = (0 * kAudioSampleFrequency) / 1000; - const int yes_end = (1000 * kAudioSampleFrequency) / 1000; - const int no_start = (4000 * kAudioSampleFrequency) / 1000; - const int no_end = (5000 * kAudioSampleFrequency) / 1000; - const int wraparound = (8000 * kAudioSampleFrequency) / 1000; - const int start_sample = (start_ms * kAudioSampleFrequency) / 1000; - for (int i = 0; i < kMaxAudioSampleSize; ++i) { - const int sample_index = (start_sample + i) % wraparound; - int16_t sample; - if ((sample_index >= yes_start) && (sample_index < yes_end)) { - sample = g_yes_1000ms_audio_data[sample_index - yes_start]; - } else if ((sample_index >= no_start) && (sample_index < no_end)) { - sample = g_no_1000ms_audio_data[sample_index - no_start]; - } else { - sample = 0; - } - g_dummy_audio_data[i] = sample; - } - *audio_samples_size = kMaxAudioSampleSize; - *audio_samples = g_dummy_audio_data; - return kTfLiteOk; -} - -int32_t LatestAudioTimestamp() { - g_latest_audio_timestamp += 100; - return g_latest_audio_timestamp; -} diff --git a/tensorflow/lite/micro/examples/micro_speech/audio_provider_mock_test.cc b/tensorflow/lite/micro/examples/micro_speech/audio_provider_mock_test.cc deleted file mode 100644 index b15749e8c27..00000000000 --- a/tensorflow/lite/micro/examples/micro_speech/audio_provider_mock_test.cc +++ /dev/null @@ -1,68 +0,0 @@ -/* Copyright 2018 The TensorFlow Authors. All Rights Reserved. - -Licensed under the Apache License, Version 2.0 (the "License"); -you may not use this file except in compliance with the License. -You may obtain a copy of the License at - - http://www.apache.org/licenses/LICENSE-2.0 - -Unless required by applicable law or agreed to in writing, software -distributed under the License is distributed on an "AS IS" BASIS, -WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -See the License for the specific language governing permissions and -limitations under the License. -==============================================================================*/ - -#include - -#include "tensorflow/lite/c/common.h" -#include "tensorflow/lite/micro/examples/micro_speech/audio_provider.h" -#include "tensorflow/lite/micro/examples/micro_speech/micro_features/micro_model_settings.h" -#include "tensorflow/lite/micro/examples/micro_speech/testdata/no_1000ms_audio_data.h" -#include "tensorflow/lite/micro/examples/micro_speech/testdata/yes_1000ms_audio_data.h" -#include "tensorflow/lite/micro/testing/micro_test.h" - -TF_LITE_MICRO_TESTS_BEGIN - -TF_LITE_MICRO_TEST(TestAudioProviderMock) { - int audio_samples_size = 0; - int16_t* audio_samples = nullptr; - TfLiteStatus get_status = GetAudioSamples( - 0, kFeatureSliceDurationMs, &audio_samples_size, &audio_samples); - TF_LITE_MICRO_EXPECT_EQ(kTfLiteOk, get_status); - TF_LITE_MICRO_EXPECT_LE(audio_samples_size, kMaxAudioSampleSize); - TF_LITE_MICRO_EXPECT(audio_samples != nullptr); - for (int i = 0; i < audio_samples_size; ++i) { - TF_LITE_MICRO_EXPECT_EQ(g_yes_1000ms_audio_data[i], audio_samples[i]); - } - - get_status = GetAudioSamples(500, kFeatureSliceDurationMs, - &audio_samples_size, &audio_samples); - TF_LITE_MICRO_EXPECT_EQ(kTfLiteOk, get_status); - TF_LITE_MICRO_EXPECT_LE(audio_samples_size, kMaxAudioSampleSize); - TF_LITE_MICRO_EXPECT(audio_samples != nullptr); - for (int i = 0; i < audio_samples_size; ++i) { - TF_LITE_MICRO_EXPECT_EQ(g_yes_1000ms_audio_data[i + 8000], - audio_samples[i]); - } - - get_status = GetAudioSamples(1500, kFeatureSliceDurationMs, - &audio_samples_size, &audio_samples); - TF_LITE_MICRO_EXPECT_EQ(kTfLiteOk, get_status); - TF_LITE_MICRO_EXPECT_LE(audio_samples_size, kMaxAudioSampleSize); - TF_LITE_MICRO_EXPECT(audio_samples != nullptr); - for (int i = 0; i < audio_samples_size; ++i) { - TF_LITE_MICRO_EXPECT_EQ(0, audio_samples[i]); - } - - get_status = GetAudioSamples(12250, kFeatureSliceDurationMs, - &audio_samples_size, &audio_samples); - TF_LITE_MICRO_EXPECT_EQ(kTfLiteOk, get_status); - TF_LITE_MICRO_EXPECT_LE(audio_samples_size, kMaxAudioSampleSize); - TF_LITE_MICRO_EXPECT(audio_samples != nullptr); - for (int i = 0; i < audio_samples_size; ++i) { - TF_LITE_MICRO_EXPECT_EQ(g_no_1000ms_audio_data[i + 4000], audio_samples[i]); - } -} - -TF_LITE_MICRO_TESTS_END diff --git a/tensorflow/lite/micro/examples/micro_speech/audio_provider_test.cc b/tensorflow/lite/micro/examples/micro_speech/audio_provider_test.cc deleted file mode 100644 index fb403c08622..00000000000 --- a/tensorflow/lite/micro/examples/micro_speech/audio_provider_test.cc +++ /dev/null @@ -1,66 +0,0 @@ -/* Copyright 2022 The TensorFlow Authors. All Rights Reserved. - -Licensed under the Apache License, Version 2.0 (the "License"); -you may not use this file except in compliance with the License. -You may obtain a copy of the License at - - http://www.apache.org/licenses/LICENSE-2.0 - -Unless required by applicable law or agreed to in writing, software -distributed under the License is distributed on an "AS IS" BASIS, -WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -See the License for the specific language governing permissions and -limitations under the License. -==============================================================================*/ - -#include "tensorflow/lite/micro/examples/micro_speech/audio_provider.h" - -#include - -#include "tensorflow/lite/c/common.h" -#include "tensorflow/lite/micro/examples/micro_speech/micro_features/micro_model_settings.h" -#include "tensorflow/lite/micro/testing/micro_test.h" - -TF_LITE_MICRO_TESTS_BEGIN - -TF_LITE_MICRO_TEST(TestAudioProvider) { - int audio_samples_size = 0; - int16_t* audio_samples = nullptr; - TfLiteStatus get_status = GetAudioSamples( - 0, kFeatureSliceDurationMs, &audio_samples_size, &audio_samples); - TF_LITE_MICRO_EXPECT_EQ(kTfLiteOk, get_status); - TF_LITE_MICRO_EXPECT_LE(audio_samples_size, kMaxAudioSampleSize); - TF_LITE_MICRO_EXPECT(audio_samples != nullptr); - - // Make sure we can read all of the returned memory locations. - int total = 0; - for (int i = 0; i < audio_samples_size; ++i) { - total += audio_samples[i]; - } - (void)total; -} - -TF_LITE_MICRO_TEST(TestTimer) { - // Make sure that the technically-undefined overflow behavior we rely on below - // works on this platform. It's still not guaranteed, but at least this is a - // smoke check. Turn off when running with ASan, as it will complain about - // the following undefined behavior. -#ifndef ADDRESS_SANITIZER - int32_t overflow_value = std::numeric_limits::max(); - overflow_value += 1; - TF_LITE_MICRO_EXPECT_EQ(std::numeric_limits::min(), overflow_value); -#endif - - const int32_t first_time = LatestAudioTimestamp(); - const int32_t second_time = LatestAudioTimestamp(); - - // It's possible that the timer may have wrapped around from +BIG_NUM to - // -BIG_NUM between the first and second calls, since we're storing - // milliseconds in a 32-bit integer. It's not reasonable that the call itself - // would have taken more than 2^31 milliseconds though, so look at the - // difference and rely on integer overflow to ensure it's accurate. - const int32_t time_delta = (second_time - first_time); - TF_LITE_MICRO_EXPECT_LE(0, time_delta); -} - -TF_LITE_MICRO_TESTS_END diff --git a/tensorflow/lite/micro/examples/micro_speech/command_responder.h b/tensorflow/lite/micro/examples/micro_speech/command_responder.h deleted file mode 100644 index a1acb990d35..00000000000 --- a/tensorflow/lite/micro/examples/micro_speech/command_responder.h +++ /dev/null @@ -1,30 +0,0 @@ -/* Copyright 2019 The TensorFlow Authors. All Rights Reserved. - -Licensed under the Apache License, Version 2.0 (the "License"); -you may not use this file except in compliance with the License. -You may obtain a copy of the License at - - http://www.apache.org/licenses/LICENSE-2.0 - -Unless required by applicable law or agreed to in writing, software -distributed under the License is distributed on an "AS IS" BASIS, -WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -See the License for the specific language governing permissions and -limitations under the License. -==============================================================================*/ - -// Provides an interface to take an action based on an audio command. - -#ifndef TENSORFLOW_LITE_MICRO_EXAMPLES_MICRO_SPEECH_COMMAND_RESPONDER_H_ -#define TENSORFLOW_LITE_MICRO_EXAMPLES_MICRO_SPEECH_COMMAND_RESPONDER_H_ - -#include "tensorflow/lite/c/common.h" - -// Called every time the results of an audio recognition run are available. The -// human-readable name of any recognized command is in the `found_command` -// argument, `score` has the numerical confidence, and `is_new_command` is set -// if the previous command was different to this one. -void RespondToCommand(int32_t current_time, const char* found_command, - uint8_t score, bool is_new_command); - -#endif // TENSORFLOW_LITE_MICRO_EXAMPLES_MICRO_SPEECH_COMMAND_RESPONDER_H_ diff --git a/tensorflow/lite/micro/examples/micro_speech/evaluate.py b/tensorflow/lite/micro/examples/micro_speech/evaluate.py new file mode 100644 index 00000000000..81f74b1ca7c --- /dev/null +++ b/tensorflow/lite/micro/examples/micro_speech/evaluate.py @@ -0,0 +1,203 @@ +# Copyright 2023 The TensorFlow Authors. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# ============================================================================= +""" +Wake-word model evaluation, with audio preprocessing using MicroInterpreter + +Run: +bazel build tensorflow/lite/micro/examples/micro_speech:evaluate +bazel-bin/tensorflow/lite/micro/examples/micro_speech/evaluate + --sample_path="path to 1 second audio sample in WAV format" +""" + +from absl import app +from absl import flags +import numpy as np +from pathlib import Path + +from tflite_micro.python.tflite_micro import runtime +from tensorflow.python.platform import resource_loader +import tensorflow as tf +from tflite_micro.tensorflow.lite.micro.examples.micro_speech import audio_preprocessor + +_SAMPLE_PATH = flags.DEFINE_string( + name='sample_path', + default='', + help='path for the audio sample to be predicted.', +) + +_FEATURES_SHAPE = (49, 40) + + +def quantize_input_data(data, input_details): + """quantize the input data using scale and zero point + + Args: + data (np.array in float): input data for the interpreter + input_details : output of get_input_details from the tflm interpreter. + + Returns: + np.ndarray: quantized data as int8 dtype + """ + # Get input quantization parameters + data_type = input_details['dtype'] + input_quantization_parameters = input_details['quantization_parameters'] + input_scale, input_zero_point = input_quantization_parameters['scales'][ + 0], input_quantization_parameters['zero_points'][0] + # quantize the input data + data = data / input_scale + input_zero_point + return data.astype(data_type) + + +def dequantize_output_data(data: np.ndarray, + output_details: dict) -> np.ndarray: + """Dequantize the model output + + Args: + data: integer data to be dequantized + output_details: TFLM interpreter model output details + + Returns: + np.ndarray: dequantized data as float32 dtype + """ + output_quantization_parameters = output_details['quantization_parameters'] + output_scale = output_quantization_parameters['scales'][0] + output_zero_point = output_quantization_parameters['zero_points'][0] + # Caveat: tflm_output_quant need to be converted to float to avoid integer + # overflow during dequantization + # e.g., (tflm_output_quant -output_zero_point) and + # (tflm_output_quant + (-output_zero_point)) + # can produce different results (int8 calculation) + return output_scale * (data.astype(np.float32) - output_zero_point) + + +def predict(interpreter: runtime.Interpreter, + features: np.ndarray) -> np.ndarray: + """ + Use TFLM interpreter to predict wake-word from audio sample features + + Args: + interpreter: TFLM python interpreter instance + features: wake-word model feature data, with shape _FEATURES_SHAPE + + Returns: + np.ndarray: predicted probability (softmax) for each model category + """ + + input_details = interpreter.get_input_details(0) + # Quantize the input if the model is quantized + # and our features are np.float32 + if input_details['dtype'] != np.float32 and features.dtype == np.float32: + features = quantize_input_data(features, input_details) + flattened_features = features.flatten().reshape([1, -1]) + interpreter.set_input(flattened_features, 0) + interpreter.invoke() + tflm_output = interpreter.get_output(0) + + output_details = interpreter.get_output_details(0) + if output_details['dtype'] == np.float32: + return tflm_output[0].astype(np.float32) + # Dequantize the output for quantized model + return dequantize_output_data(tflm_output[0], output_details) + + +def generate_features( + audio_pp: audio_preprocessor.AudioPreprocessor) -> np.ndarray: + """ + Generate audio sample features + + Args: + audio_pp: AudioPreprocessor instance + + Returns: + np.ndarray: generated audio sample features with shape _FEATURES_SHAPE + """ + if audio_pp.params.use_float_output: + dtype = np.float32 + else: + dtype = np.int8 + features = np.zeros(_FEATURES_SHAPE, dtype=dtype) + start_index = 0 + window_size = int(audio_pp.params.window_size_ms * + audio_pp.params.sample_rate / 1000) + window_stride = int(audio_pp.params.window_stride_ms * + audio_pp.params.sample_rate / 1000) + samples = audio_pp.samples[0] + frame_number = 0 + end_index = start_index + window_size + + # reset audio preprocessor noise estimates + audio_pp.reset_tflm() + + while end_index <= len(samples): + frame_tensor: tf.Tensor = tf.convert_to_tensor( + samples[start_index:end_index]) + frame_tensor = tf.reshape(frame_tensor, [1, -1]) + feature_tensor = audio_pp.generate_feature_using_tflm(frame_tensor) + features[frame_number] = feature_tensor.numpy() + start_index += window_stride + end_index += window_stride + frame_number += 1 + + return features + + +def get_category_names() -> list[str]: + """ + Get the list of model output category names + + Returns: + list[str]: model output category names + """ + return ['silence', 'unknown', 'yes', 'no'] + + +def _main(_): + sample_path = Path(_SAMPLE_PATH.value) + assert sample_path.exists() and sample_path.is_file(), \ + 'Audio sample file does not exist. Please check the path.' + model_prefix_path = resource_loader.get_path_to_datafile('models') + model_path = Path(model_prefix_path, 'micro_speech_quantized.tflite') + + feature_params = audio_preprocessor.FeatureParams() + audio_pp = audio_preprocessor.AudioPreprocessor(feature_params) + audio_pp.load_samples(sample_path) + features = generate_features(audio_pp) + + tflm_interpreter = runtime.Interpreter.from_file(model_path) + + frame_number = 0 + test_features = np.zeros(_FEATURES_SHAPE, dtype=np.int8) + for feature in features: + test_features[frame_number] = feature + category_probabilities = predict(tflm_interpreter, test_features) + category_probabilities_str = '[' + for i in range(len(category_probabilities)): + if i > 0: + category_probabilities_str += ', ' + category_probabilities_str += f'{category_probabilities[i]:.4f}' + category_probabilities_str += ']' + print(f'Frame #{frame_number}: {category_probabilities_str}') + frame_number += 1 + + category_probabilities = predict(tflm_interpreter, features) + predicted_category = np.argmax(category_probabilities) + category_names = get_category_names() + print('Model predicts the audio sample as' + f' <{category_names[predicted_category]}>' + f' with probability {category_probabilities[predicted_category]:.2f}') + + +if __name__ == '__main__': + app.run(_main) diff --git a/tensorflow/lite/micro/examples/micro_speech/evaluate_test.py b/tensorflow/lite/micro/examples/micro_speech/evaluate_test.py new file mode 100644 index 00000000000..d5d6ac06d88 --- /dev/null +++ b/tensorflow/lite/micro/examples/micro_speech/evaluate_test.py @@ -0,0 +1,96 @@ +# Copyright 2023 The TensorFlow Authors. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# ============================================================================= +""" +Wake-word model testing, with audio preprocessing using MicroInterpreter + +Run: +bazel build tensorflow/lite/micro/examples/micro_speech:evaluate_test +bazel-bin/tensorflow/lite/micro/examples/micro_speech/evaluate_test +""" + +import numpy as np +from pathlib import Path + +from tensorflow.python.framework import test_util +from tensorflow.python.platform import resource_loader +from tensorflow.python.platform import test +from tflite_micro.python.tflite_micro import runtime +from tflite_micro.tensorflow.lite.micro.examples.micro_speech import audio_preprocessor +from tflite_micro.tensorflow.lite.micro.examples.micro_speech import evaluate + + +class MicroSpeechTest(test_util.TensorFlowTestCase): + + def setUp(self): + model_prefix_path = resource_loader.get_path_to_datafile('models') + self.sample_prefix_path = resource_loader.get_path_to_datafile('testdata') + model_path = Path(model_prefix_path, 'micro_speech_quantized.tflite') + self.tflm_interpreter = runtime.Interpreter.from_file(model_path) + self.test_data = [ + ('no', 'no_1000ms.wav'), + ('yes', 'yes_1000ms.wav'), + ('silence', 'noise_1000ms.wav'), + ('silence', 'silence_1000ms.wav'), + ] + + def testModelAccuracyWithInt8Features(self): + feature_params = audio_preprocessor.FeatureParams() + audio_pp = audio_preprocessor.AudioPreprocessor(feature_params) + for label, sample_name in self.test_data: + # Load audio sample data + sample_path = Path(self.sample_prefix_path, sample_name) + audio_pp.load_samples(sample_path) + + # Generate feature data from audio samples. + # Note that the noise estimate is reset each time generate_features() + # is called. + features = evaluate.generate_features(audio_pp) + + # Run model inference (quantized) on the feature data + category_probabilities = evaluate.predict(self.tflm_interpreter, + features) + + # Check the prediction result + predicted_category = np.argmax(category_probabilities) + category_names = evaluate.get_category_names() + # Check the prediction + self.assertEqual(category_names[predicted_category], label) + + def testModelAccuracyWithFloatFeatures(self): + feature_params = audio_preprocessor.FeatureParams(use_float_output=True) + audio_pp = audio_preprocessor.AudioPreprocessor(feature_params) + for label, sample_name in self.test_data: + # Load audio sample data + sample_path = Path(self.sample_prefix_path, sample_name) + audio_pp.load_samples(sample_path) + + # Generate feature data from audio samples. + # Note that the noise estimate is reset each time generate_features() + # is called. + features = evaluate.generate_features(audio_pp) + + # Run model inference (quantized) on the feature data + category_probabilities = evaluate.predict(self.tflm_interpreter, + features) + + # Check the prediction result + predicted_category = np.argmax(category_probabilities) + category_names = evaluate.get_category_names() + # Check the prediction + self.assertEqual(category_names[predicted_category], label) + + +if __name__ == '__main__': + test.main() diff --git a/tensorflow/lite/micro/examples/micro_speech/feature_provider.cc b/tensorflow/lite/micro/examples/micro_speech/feature_provider.cc deleted file mode 100644 index a4a6635d907..00000000000 --- a/tensorflow/lite/micro/examples/micro_speech/feature_provider.cc +++ /dev/null @@ -1,119 +0,0 @@ -/* Copyright 2018 The TensorFlow Authors. All Rights Reserved. - -Licensed under the Apache License, Version 2.0 (the "License"); -you may not use this file except in compliance with the License. -You may obtain a copy of the License at - - http://www.apache.org/licenses/LICENSE-2.0 - -Unless required by applicable law or agreed to in writing, software -distributed under the License is distributed on an "AS IS" BASIS, -WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -See the License for the specific language governing permissions and -limitations under the License. -==============================================================================*/ - -#include "tensorflow/lite/micro/examples/micro_speech/feature_provider.h" - -#include "tensorflow/lite/micro/examples/micro_speech/audio_provider.h" -#include "tensorflow/lite/micro/examples/micro_speech/micro_features/micro_features_generator.h" -#include "tensorflow/lite/micro/examples/micro_speech/micro_features/micro_model_settings.h" -#include "tensorflow/lite/micro/micro_log.h" - -FeatureProvider::FeatureProvider(int feature_size, int8_t* feature_data) - : feature_size_(feature_size), - feature_data_(feature_data), - is_first_run_(true) { - // Initialize the feature data to default values. - for (int n = 0; n < feature_size_; ++n) { - feature_data_[n] = 0; - } -} - -FeatureProvider::~FeatureProvider() {} - -TfLiteStatus FeatureProvider::PopulateFeatureData(int32_t last_time_in_ms, - int32_t time_in_ms, - int* how_many_new_slices) { - if (feature_size_ != kFeatureElementCount) { - MicroPrintf("Requested feature_data_ size %d doesn't match %d", - feature_size_, kFeatureElementCount); - return kTfLiteError; - } - - // Quantize the time into steps as long as each window stride, so we can - // figure out which audio data we need to fetch. - const int last_step = (last_time_in_ms / kFeatureSliceStrideMs); - const int current_step = (time_in_ms / kFeatureSliceStrideMs); - - int slices_needed = current_step - last_step; - // If this is the first call, make sure we don't use any cached information. - if (is_first_run_) { - TfLiteStatus init_status = InitializeMicroFeatures(); - if (init_status != kTfLiteOk) { - return init_status; - } - is_first_run_ = false; - slices_needed = kFeatureSliceCount; - } - if (slices_needed > kFeatureSliceCount) { - slices_needed = kFeatureSliceCount; - } - *how_many_new_slices = slices_needed; - - const int slices_to_keep = kFeatureSliceCount - slices_needed; - const int slices_to_drop = kFeatureSliceCount - slices_to_keep; - // If we can avoid recalculating some slices, just move the existing data - // up in the spectrogram, to perform something like this: - // last time = 80ms current time = 120ms - // +-----------+ +-----------+ - // | data@20ms | --> | data@60ms | - // +-----------+ -- +-----------+ - // | data@40ms | -- --> | data@80ms | - // +-----------+ -- -- +-----------+ - // | data@60ms | -- -- | | - // +-----------+ -- +-----------+ - // | data@80ms | -- | | - // +-----------+ +-----------+ - if (slices_to_keep > 0) { - for (int dest_slice = 0; dest_slice < slices_to_keep; ++dest_slice) { - int8_t* dest_slice_data = - feature_data_ + (dest_slice * kFeatureSliceSize); - const int src_slice = dest_slice + slices_to_drop; - const int8_t* src_slice_data = - feature_data_ + (src_slice * kFeatureSliceSize); - for (int i = 0; i < kFeatureSliceSize; ++i) { - dest_slice_data[i] = src_slice_data[i]; - } - } - } - // Any slices that need to be filled in with feature data have their - // appropriate audio data pulled, and features calculated for that slice. - if (slices_needed > 0) { - for (int new_slice = slices_to_keep; new_slice < kFeatureSliceCount; - ++new_slice) { - const int new_step = (current_step - kFeatureSliceCount + 1) + new_slice; - const int32_t slice_start_ms = (new_step * kFeatureSliceStrideMs); - int16_t* audio_samples = nullptr; - int audio_samples_size = 0; - // TODO(petewarden): Fix bug that leads to non-zero slice_start_ms - GetAudioSamples((slice_start_ms > 0 ? slice_start_ms : 0), - kFeatureSliceDurationMs, &audio_samples_size, - &audio_samples); - if (audio_samples_size < kMaxAudioSampleSize) { - MicroPrintf("Audio data size %d too small, want %d", audio_samples_size, - kMaxAudioSampleSize); - return kTfLiteError; - } - int8_t* new_slice_data = feature_data_ + (new_slice * kFeatureSliceSize); - size_t num_samples_read; - TfLiteStatus generate_status = GenerateMicroFeatures( - audio_samples, audio_samples_size, kFeatureSliceSize, new_slice_data, - &num_samples_read); - if (generate_status != kTfLiteOk) { - return generate_status; - } - } - } - return kTfLiteOk; -} diff --git a/tensorflow/lite/micro/examples/micro_speech/feature_provider.h b/tensorflow/lite/micro/examples/micro_speech/feature_provider.h deleted file mode 100644 index 2a2ef8f4b31..00000000000 --- a/tensorflow/lite/micro/examples/micro_speech/feature_provider.h +++ /dev/null @@ -1,50 +0,0 @@ -/* Copyright 2018 The TensorFlow Authors. All Rights Reserved. - -Licensed under the Apache License, Version 2.0 (the "License"); -you may not use this file except in compliance with the License. -You may obtain a copy of the License at - - http://www.apache.org/licenses/LICENSE-2.0 - -Unless required by applicable law or agreed to in writing, software -distributed under the License is distributed on an "AS IS" BASIS, -WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -See the License for the specific language governing permissions and -limitations under the License. -==============================================================================*/ - -#ifndef TENSORFLOW_LITE_MICRO_EXAMPLES_MICRO_SPEECH_FEATURE_PROVIDER_H_ -#define TENSORFLOW_LITE_MICRO_EXAMPLES_MICRO_SPEECH_FEATURE_PROVIDER_H_ - -#include "tensorflow/lite/c/common.h" - -// Binds itself to an area of memory intended to hold the input features for an -// audio-recognition neural network model, and fills that data area with the -// features representing the current audio input, for example from a microphone. -// The audio features themselves are a two-dimensional array, made up of -// horizontal slices representing the frequencies at one point in time, stacked -// on top of each other to form a spectrogram showing how those frequencies -// changed over time. -class FeatureProvider { - public: - // Create the provider, and bind it to an area of memory. This memory should - // remain accessible for the lifetime of the provider object, since subsequent - // calls will fill it with feature data. The provider does no memory - // management of this data. - FeatureProvider(int feature_size, int8_t* feature_data); - ~FeatureProvider(); - - // Fills the feature data with information from audio inputs, and returns how - // many feature slices were updated. - TfLiteStatus PopulateFeatureData(int32_t last_time_in_ms, int32_t time_in_ms, - int* how_many_new_slices); - - private: - int feature_size_; - int8_t* feature_data_; - // Make sure we don't try to use cached information if this is the first call - // into the provider. - bool is_first_run_; -}; - -#endif // TENSORFLOW_LITE_MICRO_EXAMPLES_MICRO_SPEECH_FEATURE_PROVIDER_H_ diff --git a/tensorflow/lite/micro/examples/micro_speech/feature_provider_mock_test.cc b/tensorflow/lite/micro/examples/micro_speech/feature_provider_mock_test.cc deleted file mode 100644 index 6fe5e43ecb8..00000000000 --- a/tensorflow/lite/micro/examples/micro_speech/feature_provider_mock_test.cc +++ /dev/null @@ -1,58 +0,0 @@ -/* Copyright 2019 The TensorFlow Authors. All Rights Reserved. - -Licensed under the Apache License, Version 2.0 (the "License"); -you may not use this file except in compliance with the License. -You may obtain a copy of the License at - - http://www.apache.org/licenses/LICENSE-2.0 - -Unless required by applicable law or agreed to in writing, software -distributed under the License is distributed on an "AS IS" BASIS, -WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -See the License for the specific language governing permissions and -limitations under the License. -==============================================================================*/ - -#include "tensorflow/lite/c/common.h" -#include "tensorflow/lite/micro/examples/micro_speech/feature_provider.h" -#include "tensorflow/lite/micro/examples/micro_speech/micro_features/micro_model_settings.h" -#include "tensorflow/lite/micro/examples/micro_speech/micro_features/no_micro_features_data.h" -#include "tensorflow/lite/micro/examples/micro_speech/micro_features/yes_micro_features_data.h" -#include "tensorflow/lite/micro/testing/micro_test.h" - -TF_LITE_MICRO_TESTS_BEGIN - -TF_LITE_MICRO_TEST(TestFeatureProviderMockYes) { - int8_t feature_data[kFeatureElementCount]; - FeatureProvider feature_provider(kFeatureElementCount, feature_data); - - int how_many_new_slices = 0; - TfLiteStatus populate_status = feature_provider.PopulateFeatureData( - /* last_time_in_ms= */ 0, /* time_in_ms= */ 970, &how_many_new_slices); - TF_LITE_MICRO_EXPECT_EQ(kTfLiteOk, populate_status); - TF_LITE_MICRO_EXPECT_EQ(kFeatureSliceCount, how_many_new_slices); - - for (int i = 0; i < kFeatureElementCount; ++i) { - TF_LITE_MICRO_EXPECT_EQ(g_yes_micro_f2e59fea_nohash_1_data[i], - feature_data[i]); - } -} - -TF_LITE_MICRO_TEST(TestFeatureProviderMockNo) { - int8_t feature_data[kFeatureElementCount]; - FeatureProvider feature_provider(kFeatureElementCount, feature_data); - - int how_many_new_slices = 0; - TfLiteStatus populate_status = feature_provider.PopulateFeatureData( - /* last_time_in_ms= */ 4000, - /* time_in_ms= */ 4970, &how_many_new_slices); - TF_LITE_MICRO_EXPECT_EQ(kTfLiteOk, populate_status); - TF_LITE_MICRO_EXPECT_EQ(kFeatureSliceCount, how_many_new_slices); - - for (int i = 0; i < kFeatureElementCount; ++i) { - TF_LITE_MICRO_EXPECT_EQ(g_no_micro_f9643d42_nohash_4_data[i], - feature_data[i]); - } -} - -TF_LITE_MICRO_TESTS_END diff --git a/tensorflow/lite/micro/examples/micro_speech/feature_provider_test.cc b/tensorflow/lite/micro/examples/micro_speech/feature_provider_test.cc deleted file mode 100644 index 2582e8c8928..00000000000 --- a/tensorflow/lite/micro/examples/micro_speech/feature_provider_test.cc +++ /dev/null @@ -1,35 +0,0 @@ -/* Copyright 2018 The TensorFlow Authors. All Rights Reserved. - -Licensed under the Apache License, Version 2.0 (the "License"); -you may not use this file except in compliance with the License. -You may obtain a copy of the License at - - http://www.apache.org/licenses/LICENSE-2.0 - -Unless required by applicable law or agreed to in writing, software -distributed under the License is distributed on an "AS IS" BASIS, -WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -See the License for the specific language governing permissions and -limitations under the License. -==============================================================================*/ - -#include "tensorflow/lite/micro/examples/micro_speech/feature_provider.h" - -#include "tensorflow/lite/c/common.h" -#include "tensorflow/lite/micro/examples/micro_speech/micro_features/micro_model_settings.h" -#include "tensorflow/lite/micro/testing/micro_test.h" - -TF_LITE_MICRO_TESTS_BEGIN - -TF_LITE_MICRO_TEST(TestFeatureProvider) { - int8_t feature_data[kFeatureElementCount]; - FeatureProvider feature_provider(kFeatureElementCount, feature_data); - - int how_many_new_slices = 0; - TfLiteStatus populate_status = feature_provider.PopulateFeatureData( - /* last_time_in_ms= */ 0, /* time_in_ms= */ 10000, &how_many_new_slices); - TF_LITE_MICRO_EXPECT_EQ(kTfLiteOk, populate_status); - TF_LITE_MICRO_EXPECT_EQ(kFeatureSliceCount, how_many_new_slices); -} - -TF_LITE_MICRO_TESTS_END diff --git a/tensorflow/lite/micro/examples/micro_speech/images/animation_on_arduino.gif b/tensorflow/lite/micro/examples/micro_speech/images/animation_on_arduino.gif deleted file mode 100644 index 66ab9c1a593..00000000000 Binary files a/tensorflow/lite/micro/examples/micro_speech/images/animation_on_arduino.gif and /dev/null differ diff --git a/tensorflow/lite/micro/examples/micro_speech/images/audio_preprocessor_int8.png b/tensorflow/lite/micro/examples/micro_speech/images/audio_preprocessor_int8.png new file mode 100644 index 00000000000..a5c91fa2436 Binary files /dev/null and b/tensorflow/lite/micro/examples/micro_speech/images/audio_preprocessor_int8.png differ diff --git a/tensorflow/lite/micro/examples/micro_speech/images/micro_speech_quantized.png b/tensorflow/lite/micro/examples/micro_speech/images/micro_speech_quantized.png new file mode 100644 index 00000000000..59e98c6d780 Binary files /dev/null and b/tensorflow/lite/micro/examples/micro_speech/images/micro_speech_quantized.png differ diff --git a/tensorflow/lite/micro/examples/micro_speech/images/model_architecture.png b/tensorflow/lite/micro/examples/micro_speech/images/model_architecture.png deleted file mode 100644 index ce91faddf67..00000000000 Binary files a/tensorflow/lite/micro/examples/micro_speech/images/model_architecture.png and /dev/null differ diff --git a/tensorflow/lite/micro/examples/micro_speech/main_functions.cc b/tensorflow/lite/micro/examples/micro_speech/main_functions.cc deleted file mode 100644 index c92636a7877..00000000000 --- a/tensorflow/lite/micro/examples/micro_speech/main_functions.cc +++ /dev/null @@ -1,163 +0,0 @@ -/* Copyright 2020 The TensorFlow Authors. All Rights Reserved. - -Licensed under the Apache License, Version 2.0 (the "License"); -you may not use this file except in compliance with the License. -You may obtain a copy of the License at - - http://www.apache.org/licenses/LICENSE-2.0 - -Unless required by applicable law or agreed to in writing, software -distributed under the License is distributed on an "AS IS" BASIS, -WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -See the License for the specific language governing permissions and -limitations under the License. -==============================================================================*/ - -#include "tensorflow/lite/micro/examples/micro_speech/main_functions.h" - -#include "tensorflow/lite/micro/examples/micro_speech/audio_provider.h" -#include "tensorflow/lite/micro/examples/micro_speech/command_responder.h" -#include "tensorflow/lite/micro/examples/micro_speech/feature_provider.h" -#include "tensorflow/lite/micro/examples/micro_speech/micro_features/micro_model_settings.h" -#include "tensorflow/lite/micro/examples/micro_speech/micro_speech_model_data.h" -#include "tensorflow/lite/micro/examples/micro_speech/recognize_commands.h" -#include "tensorflow/lite/micro/micro_interpreter.h" -#include "tensorflow/lite/micro/micro_log.h" -#include "tensorflow/lite/micro/micro_mutable_op_resolver.h" -#include "tensorflow/lite/micro/system_setup.h" -#include "tensorflow/lite/schema/schema_generated.h" - -// Globals, used for compatibility with Arduino-style sketches. -namespace { -const tflite::Model* model = nullptr; -tflite::MicroInterpreter* interpreter = nullptr; -TfLiteTensor* model_input = nullptr; -FeatureProvider* feature_provider = nullptr; -RecognizeCommands* recognizer = nullptr; -int32_t previous_time = 0; - -// Create an area of memory to use for input, output, and intermediate arrays. -// The size of this will depend on the model you're using, and may need to be -// determined by experimentation. -constexpr int kTensorArenaSize = 10 * 1024; -uint8_t tensor_arena[kTensorArenaSize]; -int8_t feature_buffer[kFeatureElementCount]; -int8_t* model_input_buffer = nullptr; -} // namespace - -// The name of this function is important for Arduino compatibility. -void setup() { - tflite::InitializeTarget(); - - // Map the model into a usable data structure. This doesn't involve any - // copying or parsing, it's a very lightweight operation. - model = tflite::GetModel(g_micro_speech_model_data); - if (model->version() != TFLITE_SCHEMA_VERSION) { - MicroPrintf( - "Model provided is schema version %d not equal " - "to supported version %d.", - model->version(), TFLITE_SCHEMA_VERSION); - return; - } - - // Pull in only the operation implementations we need. - // This relies on a complete list of all the ops needed by this graph. - - // NOLINTNEXTLINE(runtime-global-variables) - static tflite::MicroMutableOpResolver<4> micro_op_resolver; - if (micro_op_resolver.AddDepthwiseConv2D() != kTfLiteOk) { - return; - } - if (micro_op_resolver.AddFullyConnected() != kTfLiteOk) { - return; - } - if (micro_op_resolver.AddSoftmax() != kTfLiteOk) { - return; - } - if (micro_op_resolver.AddReshape() != kTfLiteOk) { - return; - } - - // Build an interpreter to run the model with. - static tflite::MicroInterpreter static_interpreter( - model, micro_op_resolver, tensor_arena, kTensorArenaSize); - interpreter = &static_interpreter; - - // Allocate memory from the tensor_arena for the model's tensors. - TfLiteStatus allocate_status = interpreter->AllocateTensors(); - if (allocate_status != kTfLiteOk) { - MicroPrintf("AllocateTensors() failed"); - return; - } - - // Get information about the memory area to use for the model's input. - model_input = interpreter->input(0); - if ((model_input->dims->size != 2) || (model_input->dims->data[0] != 1) || - (model_input->dims->data[1] != - (kFeatureSliceCount * kFeatureSliceSize)) || - (model_input->type != kTfLiteInt8)) { - MicroPrintf("Bad input tensor parameters in model"); - return; - } - model_input_buffer = model_input->data.int8; - - // Prepare to access the audio spectrograms from a microphone or other source - // that will provide the inputs to the neural network. - // NOLINTNEXTLINE(runtime-global-variables) - static FeatureProvider static_feature_provider(kFeatureElementCount, - feature_buffer); - feature_provider = &static_feature_provider; - - static RecognizeCommands static_recognizer; - recognizer = &static_recognizer; - - previous_time = 0; -} - -// The name of this function is important for Arduino compatibility. -void loop() { - // Fetch the spectrogram for the current time. - const int32_t current_time = LatestAudioTimestamp(); - int how_many_new_slices = 0; - TfLiteStatus feature_status = feature_provider->PopulateFeatureData( - previous_time, current_time, &how_many_new_slices); - if (feature_status != kTfLiteOk) { - MicroPrintf("Feature generation failed"); - return; - } - previous_time = current_time; - // If no new audio samples have been received since last time, don't bother - // running the network model. - if (how_many_new_slices == 0) { - return; - } - - // Copy feature buffer to input tensor - for (int i = 0; i < kFeatureElementCount; i++) { - model_input_buffer[i] = feature_buffer[i]; - } - - // Run the model on the spectrogram input and make sure it succeeds. - TfLiteStatus invoke_status = interpreter->Invoke(); - if (invoke_status != kTfLiteOk) { - MicroPrintf("Invoke failed"); - return; - } - - // Obtain a pointer to the output tensor - TfLiteTensor* output = interpreter->output(0); - // Determine whether a command was recognized based on the output of inference - const char* found_command = nullptr; - uint8_t score = 0; - bool is_new_command = false; - TfLiteStatus process_status = recognizer->ProcessLatestResults( - output, current_time, &found_command, &score, &is_new_command); - if (process_status != kTfLiteOk) { - MicroPrintf("RecognizeCommands::ProcessLatestResults() failed"); - return; - } - // Do something based on the recognized command. The default implementation - // just prints to the error console, but you should replace this with your - // own function for a real application. - RespondToCommand(current_time, found_command, score, is_new_command); -} diff --git a/tensorflow/lite/micro/examples/micro_speech/main_functions.h b/tensorflow/lite/micro/examples/micro_speech/main_functions.h deleted file mode 100644 index 0ac06771056..00000000000 --- a/tensorflow/lite/micro/examples/micro_speech/main_functions.h +++ /dev/null @@ -1,37 +0,0 @@ -/* Copyright 2019 The TensorFlow Authors. All Rights Reserved. - -Licensed under the Apache License, Version 2.0 (the "License"); -you may not use this file except in compliance with the License. -You may obtain a copy of the License at - - http://www.apache.org/licenses/LICENSE-2.0 - -Unless required by applicable law or agreed to in writing, software -distributed under the License is distributed on an "AS IS" BASIS, -WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -See the License for the specific language governing permissions and -limitations under the License. -==============================================================================*/ - -#ifndef TENSORFLOW_LITE_MICRO_EXAMPLES_MICRO_SPEECH_MAIN_FUNCTIONS_H_ -#define TENSORFLOW_LITE_MICRO_EXAMPLES_MICRO_SPEECH_MAIN_FUNCTIONS_H_ - -// Expose a C friendly interface for main functions. -#ifdef __cplusplus -extern "C" { -#endif - -// Initializes all data needed for the example. The name is important, and needs -// to be setup() for Arduino compatibility. -void setup(); - -// Runs one iteration of data gathering and inference. This should be called -// repeatedly from the application code. The name needs to be loop() for Arduino -// compatibility. -void loop(); - -#ifdef __cplusplus -} -#endif - -#endif // TENSORFLOW_LITE_MICRO_EXAMPLES_MICRO_SPEECH_MAIN_FUNCTIONS_H_ diff --git a/tensorflow/lite/micro/examples/micro_speech/micro_features/BUILD b/tensorflow/lite/micro/examples/micro_speech/micro_features/BUILD deleted file mode 100644 index 1077435fe26..00000000000 --- a/tensorflow/lite/micro/examples/micro_speech/micro_features/BUILD +++ /dev/null @@ -1,78 +0,0 @@ -# Library for generating feature vectors from audio data -package( - default_visibility = ["//visibility:public"], - # Disabling layering_check because of http://b/177257332 - features = ["-layering_check"], - licenses = ["notice"], -) - -cc_library( - name = "micro_model_settings", - srcs = [ - "micro_model_settings.cc", - ], - hdrs = [ - "micro_model_settings.h", - ], -) - -cc_library( - name = "micro_features_test_data", - srcs = [ - "no_micro_features_data.cc", - "yes_micro_features_data.cc", - ], - hdrs = [ - "no_micro_features_data.h", - "yes_micro_features_data.h", - ], -) - -cc_library( - name = "micro_features_generator", - srcs = [ - "micro_features_generator.cc", - ], - hdrs = [ - "micro_features_generator.h", - ], - deps = [ - ":micro_model_settings", - "//tensorflow/lite/c:common", - "//tensorflow/lite/experimental/microfrontend/lib:frontend", - "//tensorflow/lite/micro:micro_log", - ], -) - -cc_library( - name = "micro_features_generator_test_data", - srcs = [ - "no_feature_data_slice.cc", - "yes_feature_data_slice.cc", - ], - hdrs = [ - "no_feature_data_slice.h", - "yes_feature_data_slice.h", - ], -) - -cc_test( - name = "micro_features_generator_test", - size = "small", - srcs = [ - "micro_features_generator_test.cc", - ], - tags = [ - "noasan", # TODO(b/179930607): Fix with asan. - ], - deps = [ - ":micro_features_generator", - ":micro_features_generator_test_data", - ":micro_model_settings", - "//tensorflow/lite/c:common", - "//tensorflow/lite/micro:micro_framework", - "//tensorflow/lite/micro:micro_log", - "//tensorflow/lite/micro/examples/micro_speech:audio_sample_test_data", - "//tensorflow/lite/micro/testing:micro_test", - ], -) diff --git a/tensorflow/lite/micro/examples/micro_speech/micro_features/micro_features_generator.cc b/tensorflow/lite/micro/examples/micro_speech/micro_features/micro_features_generator.cc deleted file mode 100644 index 3dbb5d30bb5..00000000000 --- a/tensorflow/lite/micro/examples/micro_speech/micro_features/micro_features_generator.cc +++ /dev/null @@ -1,113 +0,0 @@ -/* Copyright 2019 The TensorFlow Authors. All Rights Reserved. - -Licensed under the Apache License, Version 2.0 (the "License"); -you may not use this file except in compliance with the License. -You may obtain a copy of the License at - - http://www.apache.org/licenses/LICENSE-2.0 - -Unless required by applicable law or agreed to in writing, software -distributed under the License is distributed on an "AS IS" BASIS, -WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -See the License for the specific language governing permissions and -limitations under the License. -==============================================================================*/ - -#include "tensorflow/lite/micro/examples/micro_speech/micro_features/micro_features_generator.h" - -#include -#include - -#include "tensorflow/lite/experimental/microfrontend/lib/frontend.h" -#include "tensorflow/lite/experimental/microfrontend/lib/frontend_util.h" -#include "tensorflow/lite/micro/examples/micro_speech/micro_features/micro_model_settings.h" -#include "tensorflow/lite/micro/micro_log.h" - -namespace { - -FrontendState g_micro_features_state; -bool g_is_first_time = true; - -} // namespace - -TfLiteStatus InitializeMicroFeatures() { - FrontendConfig config; - config.window.size_ms = kFeatureSliceDurationMs; - config.window.step_size_ms = kFeatureSliceStrideMs; - config.noise_reduction.smoothing_bits = 10; - config.filterbank.num_channels = kFeatureSliceSize; - config.filterbank.lower_band_limit = 125.0; - config.filterbank.upper_band_limit = 7500.0; - config.noise_reduction.smoothing_bits = 10; - config.noise_reduction.even_smoothing = 0.025; - config.noise_reduction.odd_smoothing = 0.06; - config.noise_reduction.min_signal_remaining = 0.05; - config.pcan_gain_control.enable_pcan = 1; - config.pcan_gain_control.strength = 0.95; - config.pcan_gain_control.offset = 80.0; - config.pcan_gain_control.gain_bits = 21; - config.log_scale.enable_log = 1; - config.log_scale.scale_shift = 6; - if (!FrontendPopulateState(&config, &g_micro_features_state, - kAudioSampleFrequency)) { - MicroPrintf("FrontendPopulateState() failed"); - return kTfLiteError; - } - g_is_first_time = true; - return kTfLiteOk; -} - -// This is not exposed in any header, and is only used for testing, to ensure -// that the state is correctly set up before generating results. -void SetMicroFeaturesNoiseEstimates(const uint32_t* estimate_presets) { - for (int i = 0; i < g_micro_features_state.filterbank.num_channels; ++i) { - g_micro_features_state.noise_reduction.estimate[i] = estimate_presets[i]; - } -} - -TfLiteStatus GenerateMicroFeatures(const int16_t* input, int input_size, - int output_size, int8_t* output, - size_t* num_samples_read) { - const int16_t* frontend_input; - if (g_is_first_time) { - frontend_input = input; - g_is_first_time = false; - } else { - frontend_input = input + 160; - } - FrontendOutput frontend_output = FrontendProcessSamples( - &g_micro_features_state, frontend_input, input_size, num_samples_read); - - for (size_t i = 0; i < frontend_output.size; ++i) { - // These scaling values are derived from those used in input_data.py in the - // training pipeline. - // The feature pipeline outputs 16-bit signed integers in roughly a 0 to 670 - // range. In training, these are then arbitrarily divided by 25.6 to get - // float values in the rough range of 0.0 to 26.0. This scaling is performed - // for historical reasons, to match up with the output of other feature - // generators. - // The process is then further complicated when we quantize the model. This - // means we have to scale the 0.0 to 26.0 real values to the -128 to 127 - // signed integer numbers. - // All this means that to get matching values from our integer feature - // output into the tensor input, we have to perform: - // input = (((feature / 25.6) / 26.0) * 256) - 128 - // To simplify this and perform it in 32-bit integer math, we rearrange to: - // input = (feature * 256) / (25.6 * 26.0) - 128 - constexpr int32_t value_scale = 256; - constexpr int32_t value_div = static_cast((25.6f * 26.0f) + 0.5f); - int32_t value = - ((frontend_output.values[i] * value_scale) + (value_div / 2)) / - value_div; - value -= 128; - if (value < -128) { - value = -128; - } - if (value > 127) { - value = 127; - } - output[i] = value; - } - - return kTfLiteOk; -} diff --git a/tensorflow/lite/micro/examples/micro_speech/micro_features/micro_features_generator.h b/tensorflow/lite/micro/examples/micro_speech/micro_features/micro_features_generator.h deleted file mode 100644 index 7ee0d2b46c0..00000000000 --- a/tensorflow/lite/micro/examples/micro_speech/micro_features/micro_features_generator.h +++ /dev/null @@ -1,30 +0,0 @@ -/* Copyright 2019 The TensorFlow Authors. All Rights Reserved. - -Licensed under the Apache License, Version 2.0 (the "License"); -you may not use this file except in compliance with the License. -You may obtain a copy of the License at - - http://www.apache.org/licenses/LICENSE-2.0 - -Unless required by applicable law or agreed to in writing, software -distributed under the License is distributed on an "AS IS" BASIS, -WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -See the License for the specific language governing permissions and -limitations under the License. -==============================================================================*/ - -#ifndef TENSORFLOW_LITE_MICRO_EXAMPLES_MICRO_SPEECH_MICRO_FEATURES_MICRO_FEATURES_GENERATOR_H_ -#define TENSORFLOW_LITE_MICRO_EXAMPLES_MICRO_SPEECH_MICRO_FEATURES_MICRO_FEATURES_GENERATOR_H_ - -#include "tensorflow/lite/c/common.h" - -// Sets up any resources needed for the feature generation pipeline. -TfLiteStatus InitializeMicroFeatures(); - -// Converts audio sample data into a more compact form that's appropriate for -// feeding into a neural network. -TfLiteStatus GenerateMicroFeatures(const int16_t* input, int input_size, - int output_size, int8_t* output, - size_t* num_samples_read); - -#endif // TENSORFLOW_LITE_MICRO_EXAMPLES_MICRO_SPEECH_MICRO_FEATURES_MICRO_FEATURES_GENERATOR_H_ diff --git a/tensorflow/lite/micro/examples/micro_speech/micro_features/micro_features_generator_test.cc b/tensorflow/lite/micro/examples/micro_speech/micro_features/micro_features_generator_test.cc deleted file mode 100644 index 53ab44332b5..00000000000 --- a/tensorflow/lite/micro/examples/micro_speech/micro_features/micro_features_generator_test.cc +++ /dev/null @@ -1,95 +0,0 @@ -/* Copyright 2019 The TensorFlow Authors. All Rights Reserved. - -Licensed under the Apache License, Version 2.0 (the "License"); -you may not use this file except in compliance with the License. -You may obtain a copy of the License at - - http://www.apache.org/licenses/LICENSE-2.0 - -Unless required by applicable law or agreed to in writing, software -distributed under the License is distributed on an "AS IS" BASIS, -WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -See the License for the specific language governing permissions and -limitations under the License. -==============================================================================*/ - -#include "tensorflow/lite/micro/examples/micro_speech/micro_features/micro_features_generator.h" - -#include "tensorflow/lite/c/common.h" -#include "tensorflow/lite/micro/examples/micro_speech/micro_features/no_feature_data_slice.h" -#include "tensorflow/lite/micro/examples/micro_speech/micro_features/yes_feature_data_slice.h" -#include "tensorflow/lite/micro/examples/micro_speech/testdata/no_30ms_audio_data.h" -#include "tensorflow/lite/micro/examples/micro_speech/testdata/yes_30ms_audio_data.h" -#include "tensorflow/lite/micro/micro_log.h" -#include "tensorflow/lite/micro/testing/micro_test.h" - -// This is a test-only API, not exposed in any public headers, so declare it. -void SetMicroFeaturesNoiseEstimates(const uint32_t* estimate_presets); - -TF_LITE_MICRO_TESTS_BEGIN - -TF_LITE_MICRO_TEST(TestMicroFeaturesGeneratorYes) { - TF_LITE_MICRO_EXPECT_EQ(kTfLiteOk, InitializeMicroFeatures()); - - // The micro features pipeline retains state from previous calls to help - // estimate the background noise. Unfortunately this makes it harder to - // exactly reproduce results in a test environment, so use a known snapshot - // of the parameters at the point that the golden feature values were - // created. - const uint32_t yes_estimate_presets[] = { - 1062898, 2644477, 1257642, 1864718, 412722, 725703, 395721, 474082, - 173046, 255856, 158966, 153736, 69181, 199100, 144493, 227740, - 110573, 164330, 79666, 144650, 122947, 476799, 398553, 497493, - 322152, 1140005, 566716, 690605, 308902, 347481, 109891, 170457, - 73901, 100975, 42963, 72325, 34183, 20207, 6640, 9468, - }; - SetMicroFeaturesNoiseEstimates(yes_estimate_presets); - - int8_t yes_calculated_data[g_yes_feature_data_slice_size]; - size_t num_samples_read; - TfLiteStatus yes_status = GenerateMicroFeatures( - g_yes_30ms_audio_data, g_yes_30ms_audio_data_size, - g_yes_feature_data_slice_size, yes_calculated_data, &num_samples_read); - TF_LITE_MICRO_EXPECT_EQ(kTfLiteOk, yes_status); - - for (int i = 0; i < g_yes_feature_data_slice_size; ++i) { - const int expected = g_yes_feature_data_slice[i]; - const int actual = yes_calculated_data[i]; - TF_LITE_MICRO_EXPECT_EQ(expected, actual); - if (expected != actual) { - MicroPrintf("Expected value %d but found %d", expected, actual); - } - } -} - -TF_LITE_MICRO_TEST(TestMicroFeaturesGeneratorNo) { - TF_LITE_MICRO_EXPECT_EQ(kTfLiteOk, InitializeMicroFeatures()); - // As we did for the previous features, set known good noise state - // parameters. - const uint32_t no_estimate_presets[] = { - 2563964, 1909393, 559801, 538670, 203643, 175959, 75088, 139491, - 59691, 95307, 43865, 129263, 52517, 80058, 51330, 100731, - 76674, 76262, 15497, 22598, 13778, 21460, 8946, 17806, - 10023, 18810, 8002, 10842, 7578, 9983, 6267, 10759, - 8946, 18488, 9691, 39785, 9939, 17835, 9671, 18512, - }; - SetMicroFeaturesNoiseEstimates(no_estimate_presets); - - int8_t no_calculated_data[g_no_feature_data_slice_size]; - size_t num_samples_read; - TfLiteStatus no_status = GenerateMicroFeatures( - g_no_30ms_audio_data, g_no_30ms_audio_data_size, - g_no_feature_data_slice_size, no_calculated_data, &num_samples_read); - TF_LITE_MICRO_EXPECT_EQ(kTfLiteOk, no_status); - - for (size_t i = 0; i < g_no_feature_data_slice_size; ++i) { - const int expected = g_no_feature_data_slice[i]; - const int actual = no_calculated_data[i]; - TF_LITE_MICRO_EXPECT_EQ(expected, actual); - if (expected != actual) { - MicroPrintf("Expected value %d but found %d", expected, actual); - } - } -} - -TF_LITE_MICRO_TESTS_END diff --git a/tensorflow/lite/micro/examples/micro_speech/micro_features/micro_model_settings.h b/tensorflow/lite/micro/examples/micro_speech/micro_features/micro_model_settings.h deleted file mode 100644 index e542213e8d1..00000000000 --- a/tensorflow/lite/micro/examples/micro_speech/micro_features/micro_model_settings.h +++ /dev/null @@ -1,43 +0,0 @@ -/* Copyright 2020 The TensorFlow Authors. All Rights Reserved. - -Licensed under the Apache License, Version 2.0 (the "License"); -you may not use this file except in compliance with the License. -You may obtain a copy of the License at - - http://www.apache.org/licenses/LICENSE-2.0 - -Unless required by applicable law or agreed to in writing, software -distributed under the License is distributed on an "AS IS" BASIS, -WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -See the License for the specific language governing permissions and -limitations under the License. -==============================================================================*/ - -#ifndef TENSORFLOW_LITE_MICRO_EXAMPLES_MICRO_SPEECH_MICRO_FEATURES_MICRO_MODEL_SETTINGS_H_ -#define TENSORFLOW_LITE_MICRO_EXAMPLES_MICRO_SPEECH_MICRO_FEATURES_MICRO_MODEL_SETTINGS_H_ - -// Keeping these as constant expressions allow us to allocate fixed-sized arrays -// on the stack for our working memory. - -// The size of the input time series data we pass to the FFT to produce the -// frequency information. This has to be a power of two, and since we're dealing -// with 30ms of 16KHz inputs, which means 480 samples, this is the next value. -constexpr int kMaxAudioSampleSize = 512; -constexpr int kAudioSampleFrequency = 16000; - -// The following values are derived from values used during model training. -// If you change the way you preprocess the input, update all these constants. -constexpr int kFeatureSliceSize = 40; -constexpr int kFeatureSliceCount = 49; -constexpr int kFeatureElementCount = (kFeatureSliceSize * kFeatureSliceCount); -constexpr int kFeatureSliceStrideMs = 20; -constexpr int kFeatureSliceDurationMs = 30; - -// Variables for the model's output categories. -constexpr int kSilenceIndex = 0; -constexpr int kUnknownIndex = 1; -// If you modify the output categories, you need to update the following values. -constexpr int kCategoryCount = 4; -extern const char* kCategoryLabels[kCategoryCount]; - -#endif // TENSORFLOW_LITE_MICRO_EXAMPLES_MICRO_SPEECH_MICRO_FEATURES_MICRO_MODEL_SETTINGS_H_ diff --git a/tensorflow/lite/micro/examples/micro_speech/micro_features/no_feature_data_slice.h b/tensorflow/lite/micro/examples/micro_speech/micro_features/no_feature_data_slice.h deleted file mode 100644 index 01e6605b844..00000000000 --- a/tensorflow/lite/micro/examples/micro_speech/micro_features/no_feature_data_slice.h +++ /dev/null @@ -1,29 +0,0 @@ -/* Copyright 2019 The TensorFlow Authors. All Rights Reserved. - -Licensed under the Apache License, Version 2.0 (the "License"); -you may not use this file except in compliance with the License. -You may obtain a copy of the License at - - http://www.apache.org/licenses/LICENSE-2.0 - -Unless required by applicable law or agreed to in writing, software -distributed under the License is distributed on an "AS IS" BASIS, -WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -See the License for the specific language governing permissions and -limitations under the License. -==============================================================================*/ - -// This data was extracted from the larger feature data held in -// no_features_data.cc and consists of the 29th spectrogram slice of 43 values. -// This is the expected result of running the sample data in -// no_30ms_sample_data.cc through the preprocessing pipeline. - -#ifndef TENSORFLOW_LITE_MICRO_EXAMPLES_MICRO_SPEECH_MICRO_FEATURES_NO_FEATURE_DATA_SLICE_H_ -#define TENSORFLOW_LITE_MICRO_EXAMPLES_MICRO_SPEECH_MICRO_FEATURES_NO_FEATURE_DATA_SLICE_H_ - -#include - -constexpr int g_no_feature_data_slice_size = 40; -extern const int8_t g_no_feature_data_slice[]; - -#endif // TENSORFLOW_LITE_MICRO_EXAMPLES_MICRO_SPEECH_MICRO_FEATURES_NO_FEATURE_DATA_SLICE_H_ diff --git a/tensorflow/lite/micro/examples/micro_speech/micro_features/no_micro_features_data.cc b/tensorflow/lite/micro/examples/micro_speech/micro_features/no_micro_features_data.cc deleted file mode 100644 index f4814867d92..00000000000 --- a/tensorflow/lite/micro/examples/micro_speech/micro_features/no_micro_features_data.cc +++ /dev/null @@ -1,188 +0,0 @@ -/* Copyright 2019 The TensorFlow Authors. All Rights Reserved. - -Licensed under the Apache License, Version 2.0 (the "License"); -you may not use this file except in compliance with the License. -You may obtain a copy of the License at - - http://www.apache.org/licenses/LICENSE-2.0 - -Unless required by applicable law or agreed to in writing, software -distributed under the License is distributed on an "AS IS" BASIS, -WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -See the License for the specific language governing permissions and -limitations under the License. -==============================================================================*/ - -#include "tensorflow/lite/micro/examples/micro_speech/micro_features/no_micro_features_data.h" - -// Golden test values for the expected spectrogram from a "no" sample file -// speech_commands_test_set_v0.02/no/f9643d42_nohash_4.wav. - -const int g_no_micro_f9643d42_nohash_4_width = 40; -const int g_no_micro_f9643d42_nohash_4_height = 49; -alignas(16) const signed char g_no_micro_f9643d42_nohash_4_data[] = { - 103, 78, 64, 76, 75, 54, 53, 67, 77, 60, 56, 70, - 76, 71, 68, 58, 74, 32, 23, -2, -18, 11, 13, 15, - 9, 20, 5, -7, -18, -2, -10, -18, -10, -12, 9, 7, - -33, -12, -4, -18, 57, 17, 55, 62, 70, 45, 61, 37, - 67, 52, 48, 47, 55, 46, 57, 47, 73, 17, 27, 20, - 19, 8, 15, -6, -1, 10, -12, -29, -6, -23, -18, -3, - -1, 5, 3, -4, -12, -8, -1, -14, 65, 48, 58, 43, - 48, 19, 39, 39, 57, 57, 58, 55, 67, 58, 49, 50, - 70, 27, 9, 16, 37, 4, 25, 4, 11, 9, 7, -33, - -7, -12, 3, -6, -29, -7, -7, -18, -12, -18, -2, -1, - 0, 31, 60, -8, 51, 59, 70, 40, 71, 57, 52, 38, - 66, 48, 17, 6, 59, 8, 15, 7, 18, 4, 18, -23, - -8, -4, -3, -12, -3, -26, 1, 10, 2, -29, -29, -37, - -7, -4, 6, -33, 67, 44, 59, -4, 64, 51, 68, 55, - 74, 9, 40, 15, 57, 33, 60, 18, 40, 25, 27, -20, - 25, -16, 6, 17, -10, -12, -23, -43, -23, -23, -29, -37, - -4, -16, -16, -60, -20, -23, -10, -29, -12, 15, 12, -37, - 27, 15, 61, 44, 50, 8, 48, 22, 49, -18, 46, 33, - 42, 34, 46, -8, 4, -18, -43, -43, -10, 1, -10, -16, - -10, -77, -16, -33, 11, -26, -23, -37, 0, -8, -16, -29, - 42, 40, 68, 24, 47, 46, 53, -128, 30, 2, 42, 21, - 21, -4, 43, 2, 43, 5, 32, -26, 7, -37, -43, -23, - -2, -8, 2, -37, -50, -60, -1, -7, -33, -77, -6, -18, - -16, -50, -12, -33, 53, 8, 52, 18, 51, 35, 69, 26, - 44, 8, 27, -128, 21, -33, 17, -14, 38, -128, -14, -18, - 17, -20, -14, -37, 8, -60, -33, -33, -33, -43, -12, -29, - -12, -128, -33, -60, -26, -77, -26, -50, 57, 29, 11, 30, - 53, -10, 45, 15, 18, -10, 42, 2, 31, -29, 10, -4, - 42, -37, -50, -128, -4, -43, -20, -77, -14, -26, -33, -128, - -12, -43, -8, -33, -33, -60, -43, -77, -12, -60, -26, -50, - 40, -23, 36, 35, 50, -2, 37, 27, 26, -77, 49, -7, - 28, -43, 6, 11, 41, -37, 33, -26, -14, -12, -6, -33, - -16, -26, -20, -77, -14, -43, -8, -50, -14, -37, -26, -77, - -26, -77, -14, -29, 50, -60, 25, -26, 57, 38, 51, 1, - 50, 1, 53, -18, 30, -23, 11, -128, 18, -43, 20, -26, - -10, -26, -12, -128, -50, -60, -37, -77, -20, -43, -50, -128, - -77, -128, -77, -128, -33, -77, -20, -60, 53, -10, -37, -128, - 10, -128, 60, 18, -8, 13, 37, -37, 8, -128, 3, -77, - 32, -29, 14, 10, -12, -77, -37, -77, -37, -60, -23, -128, - -43, -50, -16, -77, -6, -33, 0, -60, -43, -128, -16, -60, - 20, -2, 51, 19, 43, 2, 63, 20, 60, -4, 42, -50, - 4, -128, 2, -3, 32, -33, -26, -128, -18, -128, -33, -43, - -7, -60, -50, -77, -29, -77, -23, -128, -16, -26, -23, -60, - -37, -77, -37, -128, -1, -33, 39, 48, 60, 5, 8, -128, - 44, 11, 4, 0, 13, -77, -2, -20, 33, -128, -33, -77, - -8, -128, -14, -128, -33, -18, -12, -77, -16, -128, -37, -128, - -12, -77, -60, -128, -23, -60, -23, -128, 36, -50, 46, -128, - 66, 39, 18, -14, -12, -77, -20, -6, 24, -128, 28, -26, - 21, -77, -6, -33, 1, -128, -43, -128, -1, -50, -37, -128, - -50, -128, -33, -128, -18, -128, -60, -8, -7, -60, -60, -128, - -6, -29, 20, -1, 73, 40, -43, -14, 33, -43, 33, -3, - 15, -29, 29, -43, 20, -60, -29, -128, -20, -26, 4, -77, - -16, -60, -33, -50, -29, -128, -60, -128, -77, -128, -37, -50, - 0, -77, -33, -128, 39, 8, 47, 10, 62, 16, 2, 1, - 10, 7, 4, -7, 6, -128, -77, -50, 19, -77, -77, -128, - -77, -128, -50, -128, -60, -60, -33, -50, -37, -128, -128, -128, - -60, -128, -37, -60, -18, -128, -33, -77, 37, 23, 29, -128, - -128, -128, -16, -128, -16, -33, 21, -20, -8, -60, -2, -60, - 11, -128, -50, -128, -50, -128, -29, -77, -16, -128, -26, -128, - -50, -77, -43, -128, -128, -128, -50, -128, -33, -128, -33, -50, - -23, -128, 24, -128, -128, -77, 4, -23, 32, -128, 1, -26, - -14, -128, 10, -77, -4, -128, 1, -50, -8, -77, -77, -77, - -23, -128, -50, -43, -33, -128, -43, -128, -128, -128, -43, -128, - -50, -128, -128, -128, 44, 15, 14, -128, 9, -128, 21, 0, - 29, -7, 18, -7, -7, -128, -33, -50, 14, -60, -60, -128, - -60, -128, -37, -128, -43, -128, -20, -128, -50, -128, -43, -77, - -26, -128, -60, -50, -60, -128, -77, -128, -3, -128, 14, -77, - -26, 11, 47, -77, -7, -77, 45, -43, -12, 14, 37, -60, - 22, -4, 5, -77, -14, -128, -10, -60, 22, -77, -12, -60, - -50, -128, -60, -128, -60, -128, -43, -128, -50, -128, -77, -50, - 27, -37, 33, -128, 4, -29, -4, -50, -20, -128, 6, -37, - -33, -128, -50, -128, 34, 15, -43, -128, -20, -50, -3, -37, - -37, -77, -77, -128, -43, -128, -128, -128, 4, -26, -26, 27, - 0, -128, -29, -60, 35, -26, 23, -128, -29, -77, 19, 14, - 28, -128, -16, -7, 31, -1, 17, 11, 60, 44, 8, 11, - 18, -128, -33, -60, -1, -128, -43, -128, -23, -128, -128, -128, - 59, 43, 35, 61, 37, -77, -77, -50, 116, 88, 98, 69, - 78, 53, 78, 40, 48, 7, 29, -18, -2, -14, 5, 12, - 65, 35, 31, -12, 33, -2, -6, -1, 44, -29, -14, -60, - -4, -43, -37, -128, 29, 18, 38, 51, 8, -128, -12, -37, - 115, 91, 113, 77, 89, 36, 60, 44, 49, 36, 27, 31, - 63, 30, 62, 14, 55, 49, 42, 0, 45, 17, -23, 1, - 30, -37, -50, -77, -8, -60, 9, -60, -12, -50, 13, 4, - 23, -6, 28, 13, 107, 78, 101, 73, 89, 46, 63, 17, - 34, -43, -6, 30, 67, 40, 77, 21, 53, 39, 38, 12, - -6, 5, 28, -2, 18, -43, 0, -128, -29, -77, 18, -128, - -2, -77, 39, 35, 38, 35, 50, 29, 100, 70, 94, 69, - 86, 50, 45, 38, 45, 12, 58, 64, 74, 36, 77, 45, - 78, 62, 8, -60, 38, 6, 21, 7, 8, -37, -1, -20, - 48, -37, 8, -10, 8, 13, 45, 39, 38, 22, 49, 25, - 94, 63, 87, 66, 84, -128, 29, 20, 55, 51, 80, 36, - 62, 30, 81, 72, 68, 37, 51, 27, 54, 22, 16, -29, - 4, 9, 57, 15, 35, -43, -77, -20, 4, 6, 37, -1, - 40, 31, 47, 14, 89, 68, 96, 83, 111, 96, 115, 87, - 99, 76, 105, 84, 105, 86, 113, 91, 108, 87, 110, 78, - 80, 46, 22, 74, 88, 72, 103, 86, 80, 68, 48, 24, - 68, 48, 55, 36, 108, 90, 90, 63, 83, 63, 87, 64, - 90, 92, 113, 88, 102, 79, 109, 83, 100, 89, 109, 60, - 56, 21, 75, 62, 81, 45, 63, 73, 93, 65, 94, 80, - 89, 81, 73, 3, 43, 60, 102, 70, 84, 67, 99, 74, - 78, 57, 79, 50, 93, 82, 98, 56, 77, 70, 91, 71, - 85, 82, 86, 13, 45, -18, 48, 40, 53, 28, 85, 60, - 65, 52, 86, 78, 76, 46, 73, 19, 35, 54, 75, 40, - 71, 60, 82, 37, 69, 42, 62, 40, 96, 70, 85, 77, - 70, 68, 103, 84, 94, 69, 81, -128, -128, -128, -43, -37, - 40, 2, 48, 45, 76, 37, 65, 16, 43, 18, 58, 20, - 27, 12, 71, 31, 53, 44, 88, 47, 50, 33, 39, 8, - 89, 57, 88, 69, 72, 63, 100, 68, 81, -77, -10, -128, - -128, -128, -128, -128, 13, -77, 8, 27, 60, 28, 41, -128, - -37, -128, 28, -43, -18, -128, 47, -37, 45, 27, 51, -29, - 15, 39, 52, 30, 49, -33, 65, 15, 76, 71, 90, 19, - 46, -128, -16, -128, -128, -128, -128, -128, -128, -128, -18, -128, - -20, -128, 32, -128, 21, -33, 45, -128, -128, -128, -12, -128, - -6, -14, 43, -128, -128, -128, -128, -128, 52, -18, 69, -43, - 78, 55, 42, -128, -29, -128, -128, -128, -128, -128, -128, -128, - -128, -128, -128, -128, 14, -128, -16, -128, -128, -128, 7, -128, - -128, -128, -128, -128, -128, -128, 12, -128, -128, -128, -128, -16, - 59, -50, 35, -128, 42, 0, 47, -128, -128, -128, -128, -128, - -128, -128, -128, -128, -128, -128, -128, -128, -33, -128, -23, -128, - -128, -128, -23, -128, -128, -128, -128, -128, -128, -128, -33, -128, - -128, -128, -128, -128, -128, -128, -8, -128, 36, -50, -128, -128, - -128, -128, -128, -128, -128, -128, -128, -128, -128, -128, -128, -128, - -128, -128, -37, -128, -128, -60, -10, -128, -128, -128, -128, -128, - -128, -128, 21, -128, -128, -128, -128, -128, -128, -128, -128, -128, - -12, -128, -128, -128, -128, -128, -128, -128, -128, -128, -128, -128, - -128, -128, -128, -128, -128, -128, -128, -128, -128, -128, -128, -128, - -128, -128, -77, -128, -128, -128, -29, -128, -128, -128, -128, -128, - -128, -128, -128, -128, -128, -128, -128, -128, -128, -128, -128, -128, - -128, -128, -128, -128, -128, -128, -128, -128, -128, -128, -128, -128, - -29, -128, -128, -128, -128, -128, -128, -128, -128, -128, -50, -128, - -128, -128, -128, -128, -128, -128, -128, -128, -128, -128, -128, -128, - -128, -128, -128, -128, -128, -128, -128, -128, -128, -128, -128, -128, - -128, -128, -128, -128, -128, -128, -128, -128, -128, -128, -128, -128, - -128, -128, -128, -128, -128, -128, -128, -128, -128, -128, -128, -128, - -128, -128, -128, -128, -128, -128, -128, -128, -128, -128, -128, -128, - -128, -128, -128, -128, -128, -128, -128, -128, -128, -128, -128, -128, - -128, -128, -128, -128, -128, -128, -128, -128, -128, -128, -128, -128, - -128, -128, -128, -128, -128, -128, -128, -128, -128, -128, -128, -128, - -128, -128, -128, -128, -128, -128, -128, -128, -128, -128, -128, -128, - -128, -128, -128, -128, -128, -128, -128, -128, -128, -128, -128, -128, - -128, -128, -128, -128, -128, -128, -128, -128, -128, -128, -128, -128, - -128, -128, -128, -128, -128, -128, -128, -128, -128, -128, -128, -128, - -128, -128, -128, -128, -128, -128, -128, -128, -128, -128, -128, -128, - -128, -128, -128, -128, -128, -128, -128, -128, -128, -128, -128, -128, - -128, -128, -128, -128, -128, -128, -128, -128, -128, -128, -128, -128, - -128, -128, -128, -128, -128, -128, -128, -128, -128, -128, -128, -128, - -128, -128, -128, -128, -128, -128, -128, -128, -128, -128, -128, -128, - -128, -128, -128, -128, -128, -128, -128, -128, -128, -128, -128, -128, - -128, -128, -128, -128, -128, -128, -128, -128, -128, -128, -128, -128, - -128, -128, -128, -128, -128, -128, -128, -128, -128, -128, -128, -128, - -128, -128, -128, -128, -128, -128, -128, -128, -128, -128, -128, -128, - -128, -128, -128, -128, -128, -128, -128, -128, -128, -128, -128, -128, - -128, -128, -128, -128, -128, -128, -128, -128, -128, -128, -128, -128, - -128, -128, -128, -128, -128, -128, -128, -128, -128, -128, -128, -128, - -128, -128, -128, -128, -128, -128, -128, -128, -128, -128, -128, -128, - -128, -128, -128, -128, -128, -128, -128, -128, -128, -128, -128, -128, - -128, -128, -128, -128, -128, -128, -128, -128, -128, -128, -128, -128, - -128, -128, -128, -128, -128, -128, -128, -128, -128, -128, -128, -128, - -128, -128, -128, -128, -128, -128, -128, -128, -128, -128, -128, -128, - -128, -128, -128, -128, -128, -128, -128, -128, -128, -128, -128, -128, - -128, -128, -128, -128, -128, -128, -128, -128, -128, -128, -128, -128, - -128, -128, -128, -128, -128, -128, -128, -128, -128, -128, -128, -128, - -128, -128, -128, -128, -128, -128, -128, -128, -128, -128, -128, -128, - -128, -128, -128, -128, -}; diff --git a/tensorflow/lite/micro/examples/micro_speech/micro_features/yes_feature_data_slice.h b/tensorflow/lite/micro/examples/micro_speech/micro_features/yes_feature_data_slice.h deleted file mode 100644 index 18faadcf971..00000000000 --- a/tensorflow/lite/micro/examples/micro_speech/micro_features/yes_feature_data_slice.h +++ /dev/null @@ -1,29 +0,0 @@ -/* Copyright 2019 The TensorFlow Authors. All Rights Reserved. - -Licensed under the Apache License, Version 2.0 (the "License"); -you may not use this file except in compliance with the License. -You may obtain a copy of the License at - - http://www.apache.org/licenses/LICENSE-2.0 - -Unless required by applicable law or agreed to in writing, software -distributed under the License is distributed on an "AS IS" BASIS, -WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -See the License for the specific language governing permissions and -limitations under the License. -==============================================================================*/ - -// This data was extracted from the larger feature data held in -// no_micro_features_data.cc and consists of the 26th spectrogram slice of 40 -// values. This is the expected result of running the sample data in -// yes_30ms_sample_data.cc through the preprocessing pipeline. - -#ifndef TENSORFLOW_LITE_MICRO_EXAMPLES_MICRO_SPEECH_MICRO_FEATURES_YES_FEATURE_DATA_SLICE_H_ -#define TENSORFLOW_LITE_MICRO_EXAMPLES_MICRO_SPEECH_MICRO_FEATURES_YES_FEATURE_DATA_SLICE_H_ - -#include - -constexpr int g_yes_feature_data_slice_size = 40; -extern const int8_t g_yes_feature_data_slice[]; - -#endif // TENSORFLOW_LITE_MICRO_EXAMPLES_MICRO_SPEECH_MICRO_FEATURES_YES_FEATURE_DATA_SLICE_H_ diff --git a/tensorflow/lite/micro/examples/micro_speech/micro_features/yes_micro_features_data.cc b/tensorflow/lite/micro/examples/micro_speech/micro_features/yes_micro_features_data.cc deleted file mode 100644 index 7ee5387bc81..00000000000 --- a/tensorflow/lite/micro/examples/micro_speech/micro_features/yes_micro_features_data.cc +++ /dev/null @@ -1,188 +0,0 @@ -/* Copyright 2019 The TensorFlow Authors. All Rights Reserved. - -Licensed under the Apache License, Version 2.0 (the "License"); -you may not use this file except in compliance with the License. -You may obtain a copy of the License at - - http://www.apache.org/licenses/LICENSE-2.0 - -Unless required by applicable law or agreed to in writing, software -distributed under the License is distributed on an "AS IS" BASIS, -WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -See the License for the specific language governing permissions and -limitations under the License. -==============================================================================*/ - -#include "tensorflow/lite/micro/examples/micro_speech/micro_features/yes_micro_features_data.h" - -// Golden test values for the expected spectrogram from a "yes" sample file -// speech_commands_test_set_v0.02/yes/f2e59fea_nohash_1.wav. - -const int g_yes_micro_f2e59fea_nohash_1_width = 40; -const int g_yes_micro_f2e59fea_nohash_1_height = 49; -alignas(16) const signed char g_yes_micro_f2e59fea_nohash_1_data[] = { - 116, 98, 118, 95, 106, 85, 101, 81, 67, -18, -33, -12, - -26, -128, 9, 34, 56, 45, 9, -12, 5, 30, 23, 28, - 0, -18, 0, -128, -60, -50, -50, -37, -60, -60, -50, -26, - -33, -50, -33, -50, 83, 61, 81, 55, 76, 61, 73, 64, - 38, -8, -37, -20, -18, -20, 48, 29, 52, 41, 55, 18, - 25, 37, 44, 37, 8, 15, -6, -60, -128, -50, -37, -37, - -18, -37, -26, -29, -37, -60, -50, -60, 95, 59, 52, -4, - 54, -18, 68, 43, 31, -18, -26, -33, -37, -29, 33, 7, - -3, 8, 26, 24, 36, 6, 36, 23, 14, 8, -29, -37, - -37, -37, -50, -50, -26, -8, -26, -37, -18, -37, -60, -77, - 50, 48, 83, 44, 56, -128, -33, -60, 1, -26, -60, -43, - -14, -23, -18, -43, -26, -33, 13, -77, -43, -77, -33, -37, - 16, -12, -37, -50, -50, -77, -20, -43, -60, -128, -60, -77, - -37, -77, -60, -128, 37, -10, 65, -7, 28, -128, 10, -77, - -37, -128, -77, -128, -77, -43, -128, -128, -77, -128, -128, -128, - -128, -128, -14, -128, -43, -50, -37, -77, -128, -128, -77, -43, - -29, -43, -20, -60, -37, -43, -50, -128, -77, -128, -18, -128, - -60, -128, -128, -128, -77, -128, -77, -128, -128, -128, -60, -37, - -20, -128, -60, -128, -128, -128, -60, -128, -77, -60, -128, -50, - -60, -128, -77, -128, -50, -60, -37, -60, -50, -77, -77, -128, - -128, -128, -128, -128, -128, -128, -128, -128, -128, -128, -37, -128, - -128, -128, -128, -128, -77, -128, -128, -128, -128, -128, -128, -128, - -128, -128, -128, -128, -77, -60, -128, -128, -50, -128, -50, -128, - -50, -128, -77, -128, -128, -128, -128, -128, -128, -128, -128, -128, - -128, -128, -77, -128, -77, -128, -128, -128, -128, -128, -128, -128, - -128, -128, -128, -128, -128, -128, -128, -128, -128, -128, -128, -128, - -77, -128, -77, -128, -77, -128, -128, -128, -128, -128, -128, -128, - -128, -128, -128, -128, -128, -128, -128, -128, -128, -128, -128, -128, - -128, -128, -128, -128, -128, -128, -128, -128, -77, -128, -128, -128, - -128, -77, -50, -128, -128, -77, -77, -128, -128, -128, -50, -128, - 85, 43, 65, 53, 69, 60, 45, 3, 46, -12, 9, -23, - 32, -1, -128, -128, -128, -128, -1, 37, 38, 33, 43, 36, - 58, 70, 68, 39, 6, 10, 32, 6, 8, -23, -77, -128, - -29, -128, -77, -128, 101, 87, 102, 91, 110, 88, 101, 83, - 110, 95, 111, 83, 81, 84, 106, 90, 93, 82, 98, 91, - 108, 95, 118, 97, 118, 97, 116, 96, 113, 90, 110, 96, - 107, 85, 94, 66, 69, 36, 29, 0, 100, 60, 105, 68, - 92, 93, 113, 92, 107, 85, 107, 83, 104, 91, 105, 85, - 112, 88, 101, 80, 101, 79, 96, 80, 98, 80, 105, 83, - 98, 81, 103, 71, 100, 79, 83, 78, 91, 47, 50, 13, - 108, 81, 93, 78, 98, 76, 105, 76, 98, 40, 77, 72, - 81, 62, 93, 77, 96, 80, 98, 61, 97, 69, 88, 61, - 71, 56, 98, 68, 97, 72, 89, 51, 81, 61, 88, 75, - 86, 56, 48, 13, 71, 22, 84, 66, 76, -7, 48, 61, - 77, 62, 91, 65, 95, 74, 88, 59, 75, 58, 83, 55, - 87, 55, 76, 43, 76, -3, 56, 60, 79, 57, 71, 54, - 82, 33, 74, 71, 91, 45, 18, -7, 61, 56, 77, 41, - 73, 42, 82, 49, 59, 63, 82, 65, 66, 38, 83, 34, - 48, -8, 46, 20, 54, 33, 54, 6, 48, 16, 60, 37, - 58, 22, 58, 14, 65, 53, 75, -4, 42, 16, 16, -50, - 22, -128, 80, 54, 43, -50, 42, -128, -10, -77, 28, -29, - 68, 43, 73, 2, 25, -60, 47, 14, 45, 7, 66, 4, - 62, 37, 71, 7, 46, -10, 44, 22, 55, 53, 57, -29, - 26, -10, -3, -128, 38, -128, 46, -10, 16, -128, -10, -26, - 60, -7, 65, 38, 70, -60, 35, -8, 42, -29, 6, -128, - 34, -128, 36, -60, 44, -12, -2, -128, -7, -60, -60, -128, - -23, -128, 31, -33, 22, -77, -37, -43, -128, -128, 3, -128, - -23, -128, 17, -77, 43, -77, -7, -128, -20, -128, 17, -43, - 32, -128, -43, -128, -128, -77, 21, -128, -50, -128, -128, -128, - -128, -128, -128, -128, -37, -128, -16, -128, -50, -26, -6, -128, - -128, -128, -128, -128, -23, -128, -128, -128, -128, -128, -128, -128, - -128, -128, -16, -128, 36, -7, 16, -128, -128, -128, -128, -128, - -77, -128, -37, -128, -50, -128, -128, -128, -128, -128, -18, -128, - 11, -128, -16, -77, -128, -128, -128, -128, -128, -128, -128, -128, - -128, -128, -128, -128, -26, -128, -128, -128, -128, -128, -128, -128, - -128, -128, -128, -128, -128, -128, -128, -128, -128, -128, -128, -128, - -128, -128, -20, -128, -128, -128, -128, -128, -128, -128, -128, -128, - -128, -128, -128, -128, -128, -128, -128, -128, -128, -128, -128, -128, - -128, -128, -128, -128, -128, -128, -128, -128, -128, -128, -128, -128, - -128, -128, -128, -128, -128, -128, -50, -128, -77, -128, -128, -128, - -128, -128, -128, -128, -128, -128, -128, -128, -128, -128, -128, -128, - -128, -128, -128, -128, -128, -128, -128, -128, -128, -128, -128, -128, - -128, -128, -128, -128, -128, -128, -128, -128, -128, -128, -77, -128, - -128, -128, -128, -128, -128, -128, -128, -128, -128, -128, -128, -128, - -128, -128, -128, -128, -128, -128, -128, -128, -128, -128, -128, -128, - -128, -128, -128, -128, -128, -128, -128, -128, -128, -128, -128, -128, - -128, -128, -128, -128, -128, -128, -128, -128, -1, -18, 5, -128, - 40, -128, -128, -128, -128, -128, -128, -128, -128, -128, -128, -128, - -128, -128, -128, -128, -128, -128, 4, -128, 63, 66, 75, -128, - 70, 60, 34, -128, -128, -128, -128, -128, -128, -128, -128, -128, - 87, 86, 95, 76, 91, 62, 72, -6, -50, -128, -128, -128, - -128, -128, -128, -128, -128, -128, -128, -128, 64, 83, 104, 70, - 98, 90, 111, 89, 109, 80, 71, -128, -128, -128, -128, -128, - -20, -6, 27, 33, 86, 88, 108, 75, 108, 76, 98, 64, - 75, 61, 71, 66, 85, -1, -77, -128, 46, 61, 92, 69, - 100, 93, 113, 80, 108, 93, 113, 91, 110, 80, 85, 15, - -33, -128, 12, -50, 34, 50, 70, 55, 84, 72, 108, 81, - 111, 88, 100, 80, 84, 73, 97, 86, 99, 65, 85, 43, - 96, 78, 107, 94, 118, 98, 115, 92, 118, 94, 111, 93, - 111, 86, 99, 52, 32, -16, 48, 31, 81, 74, 85, 64, - 78, 64, 98, 70, 110, 92, 96, 73, 100, 72, 94, 73, - 98, 76, 85, 67, 101, 83, 101, 83, 112, 89, 98, 85, - 105, 78, 98, 72, 102, 80, 95, 23, 19, -8, 52, 57, - 103, 91, 95, 65, 74, 8, 77, 49, 96, 76, 100, 87, - 105, 81, 94, 62, 94, 78, 81, 72, 99, 82, 101, 78, - 108, 65, 82, 70, 100, 63, 79, 58, 80, 59, 87, 48, - 50, 57, 93, 67, 86, 80, 103, 56, 77, 31, 81, 57, - 62, 41, 96, 85, 91, 71, 101, 76, 89, 78, 95, 76, - 96, 79, 103, 81, 103, 48, 70, 57, 88, 66, 84, 11, - 85, 67, 104, 37, 38, 67, 90, 54, 81, 62, 90, 52, - 78, -60, 54, -8, 68, 40, 55, 8, 77, 52, 66, 31, - 55, 13, 60, 26, 69, 42, 63, -29, 57, -128, -3, -128, - 3, -128, -29, -60, 52, -43, 63, 56, 86, 75, 95, 75, - 85, 63, 82, 10, 50, -128, 31, -77, 0, -77, -23, -128, - 12, -77, 51, -3, 58, -14, 44, 0, 48, 4, 53, 47, - 28, -128, -128, -128, -37, -128, -3, -128, 49, 61, 100, 90, - 117, 88, 107, 94, 112, 64, 96, 83, -128, -128, 7, -128, - -77, -128, -23, -128, -23, -128, 16, -37, 65, -8, 48, 20, - 14, -77, 57, -18, -43, -128, -128, -128, -128, -128, -128, -128, - 24, 12, 74, 76, 105, 76, 99, 80, 108, 79, 103, 85, - -128, -128, -128, -128, -128, -128, -128, -128, -128, -128, -128, -128, - 42, -128, -8, -128, -50, -128, -128, -128, -128, -128, -128, -128, - -128, -128, -60, -128, -128, 5, 73, 53, 93, 70, 101, 73, - 94, 57, 86, 66, -18, -128, -128, -128, -128, -128, -128, -128, - -128, -128, -50, -128, 36, -128, -128, -128, -128, -128, -20, -128, - -128, -128, -128, -128, -128, -128, -128, -128, -128, -128, 23, 37, - 75, 54, 97, 70, 83, 52, 85, 65, 7, -128, -128, -128, - -128, -128, -128, -128, -128, -128, -43, -128, 23, -128, -43, -128, - -33, -128, -128, -128, -128, -128, -128, -128, -128, -128, -128, -128, - -128, -128, -26, -37, 65, 33, 76, 37, 73, 50, 77, 47, - -12, -128, -128, -128, -128, -128, -128, -128, -128, -128, -7, -14, - -4, -128, -14, -128, 18, -60, -128, -128, -128, -128, -128, -128, - -128, -128, -128, -128, -128, -128, -26, -60, 71, 42, 68, 53, - 81, 49, 73, 36, -128, -128, -128, -128, -128, -128, -128, -128, - -128, -128, -128, -128, -18, -128, -128, -128, -128, -128, -128, -128, - -128, -128, -128, -128, -128, -128, -128, -128, -128, -128, 15, -26, - 44, -18, 59, 39, 57, 20, 62, 26, -128, -128, -128, -128, - -128, -128, -128, -128, -128, -128, -128, -128, -128, -128, -128, -128, - -128, -128, -128, -128, -128, -128, -128, -128, -128, -128, -128, -128, - -128, -128, -128, -128, 49, -128, 30, 8, 69, 27, 62, 38, - -128, -128, -128, -128, -128, -128, -128, -128, -128, -128, -128, -128, - -128, -128, -128, -128, -128, -128, -128, -128, -128, -128, -128, -128, - -128, -128, -128, -128, -128, -128, -43, -128, 28, -37, 48, -10, - 48, 11, 74, 37, -128, -128, -128, -128, -128, -128, -128, -128, - -128, -128, -128, -128, -128, -128, -128, -128, -128, -128, -128, -128, - -128, -128, -128, -128, -128, -128, -128, -128, -128, -128, -128, -128, - -77, -128, 11, -128, -7, -60, -77, -4, -128, -128, -128, -128, - -128, -128, -128, -128, -128, -128, -128, -128, -128, -128, -128, -128, - -128, -128, -128, -128, -128, -128, -128, -128, -128, -128, -128, -128, - -128, -128, -128, -128, -128, -128, -8, -128, -50, -128, -128, -128, - -128, -128, -128, -128, -128, -128, -128, -128, -128, -128, -128, -128, - -128, -128, -128, -128, -128, -128, -128, -128, -128, -128, -128, -128, - -128, -128, -128, -128, -128, -128, -128, -128, -128, -128, -128, -128, - -128, -128, -128, -128, -128, -128, -128, -128, -128, -128, -128, -128, - -128, -128, -128, -128, -128, -128, -128, -128, -128, -128, -128, -128, - -128, -128, -128, -128, -128, -128, -128, -128, -128, -128, -128, -128, - -128, -128, -128, -128, -128, -128, -128, -128, -128, -128, -128, -128, - -128, -128, -128, -128, -128, -128, -128, -128, -128, -128, -128, -128, - -128, -128, -128, -128, -128, -128, -128, -128, -128, -128, -128, -128, - -128, -128, -128, -128, -128, -128, -128, -128, -128, -128, -128, -128, - -128, -128, -128, -128, -128, -128, -128, -128, -128, -128, -128, -128, - -128, -128, -128, -128, -128, -128, -128, -128, -128, -128, -128, -128, - -128, -128, -128, -128, -128, -128, -128, -128, -128, -128, -128, -128, - -128, -128, -128, -128, -128, -128, -128, -128, -128, -128, -128, -128, - -128, -128, -128, -128, -128, -128, -128, -128, -128, -128, -128, -128, - -128, -128, -128, -128, -128, -128, -128, -128, -128, -128, -128, -128, - -128, -128, -128, -128, -128, -128, -128, -128, -128, -128, -128, -128, - -128, -128, -128, -128, -128, -128, -128, -128, -128, -128, -128, -128, - -128, -128, -128, -128, -128, -128, -128, -128, -128, -128, -128, -128, - -128, -128, -128, -128, -128, -128, -128, -128, -128, -128, -128, -128, - -128, -128, -128, -128, -128, -128, -128, -128, -128, -128, -128, -128, - -128, -128, -128, -128, -128, -128, -128, -128, -128, -128, -128, -128, - -128, -128, -128, -128, -128, -128, -128, -128, -128, -128, -128, -128, - -128, -128, -128, -128, -}; diff --git a/tensorflow/lite/micro/examples/micro_speech/micro_features/yes_micro_features_data.h b/tensorflow/lite/micro/examples/micro_speech/micro_features/yes_micro_features_data.h deleted file mode 100644 index cd1ad10888e..00000000000 --- a/tensorflow/lite/micro/examples/micro_speech/micro_features/yes_micro_features_data.h +++ /dev/null @@ -1,23 +0,0 @@ -/* Copyright 2019 The TensorFlow Authors. All Rights Reserved. - -Licensed under the Apache License, Version 2.0 (the "License"); -you may not use this file except in compliance with the License. -You may obtain a copy of the License at - - http://www.apache.org/licenses/LICENSE-2.0 - -Unless required by applicable law or agreed to in writing, software -distributed under the License is distributed on an "AS IS" BASIS, -WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -See the License for the specific language governing permissions and -limitations under the License. -==============================================================================*/ - -#ifndef TENSORFLOW_LITE_MICRO_EXAMPLES_MICRO_SPEECH_MICRO_FEATURES_YES_MICRO_FEATURES_DATA_H_ -#define TENSORFLOW_LITE_MICRO_EXAMPLES_MICRO_SPEECH_MICRO_FEATURES_YES_MICRO_FEATURES_DATA_H_ - -extern const int g_yes_micro_f2e59fea_nohash_1_width; -extern const int g_yes_micro_f2e59fea_nohash_1_height; -extern const signed char g_yes_micro_f2e59fea_nohash_1_data[]; - -#endif // TENSORFLOW_LITE_MICRO_EXAMPLES_MICRO_SPEECH_MICRO_FEATURES_YES_MICRO_FEATURES_DATA_H_ diff --git a/tensorflow/lite/micro/examples/micro_speech/micro_model_settings.h b/tensorflow/lite/micro/examples/micro_speech/micro_model_settings.h new file mode 100644 index 00000000000..9d5b04621eb --- /dev/null +++ b/tensorflow/lite/micro/examples/micro_speech/micro_model_settings.h @@ -0,0 +1,37 @@ +/* Copyright 2023 The TensorFlow Authors. All Rights Reserved. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +==============================================================================*/ + +#ifndef TENSORFLOW_LITE_MICRO_EXAMPLES_MICRO_SPEECH_MICRO_MODEL_SETTINGS_H_ +#define TENSORFLOW_LITE_MICRO_EXAMPLES_MICRO_SPEECH_MICRO_MODEL_SETTINGS_H_ + +// The following values are derived from values used during model training. +// If you change the way you preprocess the input, update all these constants. +constexpr int kAudioSampleFrequency = 16000; +constexpr int kFeatureSize = 40; +constexpr int kFeatureCount = 49; +constexpr int kFeatureElementCount = (kFeatureSize * kFeatureCount); +constexpr int kFeatureStrideMs = 20; +constexpr int kFeatureDurationMs = 30; + +// Variables for the model's output categories. +constexpr int kCategoryCount = 4; +constexpr const char* kCategoryLabels[kCategoryCount] = { + "silence", + "unknown", + "yes", + "no", +}; + +#endif // TENSORFLOW_LITE_MICRO_EXAMPLES_MICRO_SPEECH_MICRO_MODEL_SETTINGS_H_ diff --git a/tensorflow/lite/micro/examples/micro_speech/micro_speech_test.cc b/tensorflow/lite/micro/examples/micro_speech/micro_speech_test.cc index 56cb156e739..f31728c3707 100644 --- a/tensorflow/lite/micro/examples/micro_speech/micro_speech_test.cc +++ b/tensorflow/lite/micro/examples/micro_speech/micro_speech_test.cc @@ -1,4 +1,4 @@ -/* Copyright 2020 The TensorFlow Authors. All Rights Reserved. +/* Copyright 2023 The TensorFlow Authors. All Rights Reserved. Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. @@ -13,132 +13,275 @@ See the License for the specific language governing permissions and limitations under the License. ==============================================================================*/ -#include "tensorflow/lite/micro/examples/micro_speech/micro_features/no_micro_features_data.h" -#include "tensorflow/lite/micro/examples/micro_speech/micro_features/yes_micro_features_data.h" -#include "tensorflow/lite/micro/examples/micro_speech/micro_speech_model_data.h" +#include +#include +#include + +#include "tensorflow/lite/core/c/common.h" +#include "tensorflow/lite/micro/examples/micro_speech/micro_model_settings.h" +#include "tensorflow/lite/micro/examples/micro_speech/models/audio_preprocessor_int8_model_data.h" +#include "tensorflow/lite/micro/examples/micro_speech/models/micro_speech_quantized_model_data.h" +#include "tensorflow/lite/micro/examples/micro_speech/testdata/no_1000ms_audio_data.h" +#include "tensorflow/lite/micro/examples/micro_speech/testdata/no_30ms_audio_data.h" +#include "tensorflow/lite/micro/examples/micro_speech/testdata/noise_1000ms_audio_data.h" +#include "tensorflow/lite/micro/examples/micro_speech/testdata/silence_1000ms_audio_data.h" +#include "tensorflow/lite/micro/examples/micro_speech/testdata/yes_1000ms_audio_data.h" +#include "tensorflow/lite/micro/examples/micro_speech/testdata/yes_30ms_audio_data.h" #include "tensorflow/lite/micro/micro_interpreter.h" #include "tensorflow/lite/micro/micro_log.h" #include "tensorflow/lite/micro/micro_mutable_op_resolver.h" #include "tensorflow/lite/micro/testing/micro_test.h" -#include "tensorflow/lite/schema/schema_generated.h" -TF_LITE_MICRO_TESTS_BEGIN +#define TF_LITE_MICRO_CHECK_FAIL() \ + do { \ + if (micro_test::did_test_fail) { \ + return kTfLiteError; \ + } \ + } while (false) + +namespace { + +// Arena size is a guesstimate, followed by use of +// MicroInterpreter::arena_used_bytes() on both the AudioPreprocessor and +// MicroSpeech models and using the larger of the two results. +constexpr size_t kArenaSize = 28584; // xtensa p6 +alignas(16) uint8_t g_arena[kArenaSize]; + +using Features = int8_t[kFeatureCount][kFeatureSize]; +Features g_features; + +constexpr int kAudioSampleDurationCount = + kFeatureDurationMs * kAudioSampleFrequency / 1000; +constexpr int kAudioSampleStrideCount = + kFeatureStrideMs * kAudioSampleFrequency / 1000; + +using MicroSpeechOpResolver = tflite::MicroMutableOpResolver<4>; +using AudioPreprocessorOpResolver = tflite::MicroMutableOpResolver<18>; -TF_LITE_MICRO_TEST(TestInvoke) { +TfLiteStatus RegisterOps(MicroSpeechOpResolver& op_resolver) { + TF_LITE_ENSURE_STATUS(op_resolver.AddReshape()); + TF_LITE_ENSURE_STATUS(op_resolver.AddFullyConnected()); + TF_LITE_ENSURE_STATUS(op_resolver.AddDepthwiseConv2D()); + TF_LITE_ENSURE_STATUS(op_resolver.AddSoftmax()); + return kTfLiteOk; +} + +TfLiteStatus RegisterOps(AudioPreprocessorOpResolver& op_resolver) { + TF_LITE_ENSURE_STATUS(op_resolver.AddReshape()); + TF_LITE_ENSURE_STATUS(op_resolver.AddCast()); + TF_LITE_ENSURE_STATUS(op_resolver.AddStridedSlice()); + TF_LITE_ENSURE_STATUS(op_resolver.AddConcatenation()); + TF_LITE_ENSURE_STATUS(op_resolver.AddMul()); + TF_LITE_ENSURE_STATUS(op_resolver.AddAdd()); + TF_LITE_ENSURE_STATUS(op_resolver.AddDiv()); + TF_LITE_ENSURE_STATUS(op_resolver.AddMinimum()); + TF_LITE_ENSURE_STATUS(op_resolver.AddMaximum()); + TF_LITE_ENSURE_STATUS(op_resolver.AddWindow()); + TF_LITE_ENSURE_STATUS(op_resolver.AddFftAutoScale()); + TF_LITE_ENSURE_STATUS(op_resolver.AddRfft()); + TF_LITE_ENSURE_STATUS(op_resolver.AddEnergy()); + TF_LITE_ENSURE_STATUS(op_resolver.AddFilterBank()); + TF_LITE_ENSURE_STATUS(op_resolver.AddFilterBankSquareRoot()); + TF_LITE_ENSURE_STATUS(op_resolver.AddFilterBankSpectralSubtraction()); + TF_LITE_ENSURE_STATUS(op_resolver.AddPCAN()); + TF_LITE_ENSURE_STATUS(op_resolver.AddFilterBankLog()); + return kTfLiteOk; +} + +TfLiteStatus LoadMicroSpeechModelAndPerformInference( + const Features& features, const char* expected_label) { // Map the model into a usable data structure. This doesn't involve any // copying or parsing, it's a very lightweight operation. - const tflite::Model* model = ::tflite::GetModel(g_micro_speech_model_data); - if (model->version() != TFLITE_SCHEMA_VERSION) { - MicroPrintf( - "Model provided is schema version %d not equal " - "to supported version %d.\n", - model->version(), TFLITE_SCHEMA_VERSION); - } + const tflite::Model* model = + tflite::GetModel(g_micro_speech_quantized_model_data); + TF_LITE_MICRO_EXPECT(model->version() == TFLITE_SCHEMA_VERSION); + TF_LITE_MICRO_CHECK_FAIL(); - // Pull in only the operation implementations we need. - // This relies on a complete list of all the ops needed by this graph. - - tflite::MicroMutableOpResolver<4> micro_op_resolver; - micro_op_resolver.AddDepthwiseConv2D(); - micro_op_resolver.AddFullyConnected(); - micro_op_resolver.AddReshape(); - micro_op_resolver.AddSoftmax(); - - // Create an area of memory to use for input, output, and intermediate arrays. -#if (defined(XTENSA) && defined(VISION_P6)) - constexpr int tensor_arena_size = 28 * 1024; -#elif defined(XTENSA) - constexpr int tensor_arena_size = 15 * 1024; -#elif defined(HEXAGON) - constexpr int tensor_arena_size = 25 * 1024; -#else - constexpr int tensor_arena_size = 10 * 1024; -#endif - alignas(16) uint8_t tensor_arena[tensor_arena_size]; - - // Build an interpreter to run the model with. - tflite::MicroInterpreter interpreter(model, micro_op_resolver, tensor_arena, - tensor_arena_size); - interpreter.AllocateTensors(); - - // Get information about the memory area to use for the model's input. - TfLiteTensor* input = interpreter.input(0); + MicroSpeechOpResolver op_resolver; + TF_LITE_MICRO_EXPECT(RegisterOps(op_resolver) == kTfLiteOk); + TF_LITE_MICRO_CHECK_FAIL(); + + tflite::MicroInterpreter interpreter(model, op_resolver, g_arena, kArenaSize); - // Make sure the input has the properties we expect. + TF_LITE_MICRO_EXPECT(interpreter.AllocateTensors() == kTfLiteOk); + TF_LITE_MICRO_CHECK_FAIL(); + + MicroPrintf("MicroSpeech model arena size = %u", + interpreter.arena_used_bytes()); + + TfLiteTensor* input = interpreter.input(0); TF_LITE_MICRO_EXPECT(input != nullptr); - TF_LITE_MICRO_EXPECT_EQ(2, input->dims->size); - TF_LITE_MICRO_EXPECT_EQ(1, input->dims->data[0]); - TF_LITE_MICRO_EXPECT_EQ(1960, input->dims->data[1]); - TF_LITE_MICRO_EXPECT_EQ(kTfLiteInt8, input->type); - - // Copy a spectrogram created from a .wav audio file of someone saying "Yes", - // into the memory area used for the input. - const int8_t* yes_features_data = g_yes_micro_f2e59fea_nohash_1_data; - for (size_t i = 0; i < input->bytes; ++i) { - input->data.int8[i] = yes_features_data[i]; + TF_LITE_MICRO_CHECK_FAIL(); + // check input shape is compatible with our feature data size + TF_LITE_MICRO_EXPECT_EQ(kFeatureElementCount, + input->dims->data[input->dims->size - 1]); + TF_LITE_MICRO_CHECK_FAIL(); + + TfLiteTensor* output = interpreter.output(0); + TF_LITE_MICRO_EXPECT(output != nullptr); + TF_LITE_MICRO_CHECK_FAIL(); + // check output shape is compatible with our number of prediction categories + TF_LITE_MICRO_EXPECT_EQ(kCategoryCount, + output->dims->data[output->dims->size - 1]); + TF_LITE_MICRO_CHECK_FAIL(); + + float output_scale = output->params.scale; + int output_zero_point = output->params.zero_point; + + std::copy_n(&features[0][0], kFeatureElementCount, + tflite::GetTensorData(input)); + TF_LITE_MICRO_EXPECT(interpreter.Invoke() == kTfLiteOk); + TF_LITE_MICRO_CHECK_FAIL(); + + // Dequantize output values + float category_predictions[kCategoryCount]; + MicroPrintf("MicroSpeech category predictions for <%s>", expected_label); + for (int i = 0; i < kCategoryCount; i++) { + category_predictions[i] = + (tflite::GetTensorData(output)[i] - output_zero_point) * + output_scale; + MicroPrintf(" %.4f %s", static_cast(category_predictions[i]), + kCategoryLabels[i]); } + int prediction_index = + std::distance(std::begin(category_predictions), + std::max_element(std::begin(category_predictions), + std::end(category_predictions))); + TF_LITE_MICRO_EXPECT_STRING_EQ(expected_label, + kCategoryLabels[prediction_index]); + TF_LITE_MICRO_CHECK_FAIL(); + + return kTfLiteOk; +} + +TfLiteStatus GenerateSingleFeature(const int16_t* audio_data, + const int audio_data_size, + int8_t* feature_output, + tflite::MicroInterpreter* interpreter) { + TfLiteTensor* input = interpreter->input(0); + TF_LITE_MICRO_EXPECT(input != nullptr); + TF_LITE_MICRO_CHECK_FAIL(); + // check input shape is compatible with our audio sample size + TF_LITE_MICRO_EXPECT_EQ(kAudioSampleDurationCount, audio_data_size); + TF_LITE_MICRO_CHECK_FAIL(); + TF_LITE_MICRO_EXPECT_EQ(kAudioSampleDurationCount, + input->dims->data[input->dims->size - 1]); + TF_LITE_MICRO_CHECK_FAIL(); + + TfLiteTensor* output = interpreter->output(0); + TF_LITE_MICRO_EXPECT(output != nullptr); + TF_LITE_MICRO_CHECK_FAIL(); + // check output shape is compatible with our feature size + TF_LITE_MICRO_EXPECT_EQ(kFeatureSize, + output->dims->data[output->dims->size - 1]); + TF_LITE_MICRO_CHECK_FAIL(); + + std::copy_n(audio_data, audio_data_size, + tflite::GetTensorData(input)); + TF_LITE_MICRO_EXPECT(interpreter->Invoke() == kTfLiteOk); + TF_LITE_MICRO_CHECK_FAIL(); + std::copy_n(tflite::GetTensorData(output), kFeatureSize, + feature_output); - // Run the model on this input and make sure it succeeds. - TfLiteStatus invoke_status = interpreter.Invoke(); - if (invoke_status != kTfLiteOk) { - MicroPrintf("Invoke failed\n"); + return kTfLiteOk; +} + +TfLiteStatus GenerateFeatures(const int16_t* audio_data, + const size_t audio_data_size, + Features* features_output) { + // Map the model into a usable data structure. This doesn't involve any + // copying or parsing, it's a very lightweight operation. + const tflite::Model* model = + tflite::GetModel(g_audio_preprocessor_int8_model_data); + TF_LITE_MICRO_EXPECT(model->version() == TFLITE_SCHEMA_VERSION); + TF_LITE_MICRO_CHECK_FAIL(); + + AudioPreprocessorOpResolver op_resolver; + TF_LITE_MICRO_EXPECT(RegisterOps(op_resolver) == kTfLiteOk); + TF_LITE_MICRO_CHECK_FAIL(); + + tflite::MicroInterpreter interpreter(model, op_resolver, g_arena, kArenaSize); + + TF_LITE_MICRO_EXPECT(interpreter.AllocateTensors() == kTfLiteOk); + TF_LITE_MICRO_CHECK_FAIL(); + + MicroPrintf("AudioPreprocessor model arena size = %u", + interpreter.arena_used_bytes()); + + size_t remaining_samples = audio_data_size; + size_t feature_index = 0; + while (remaining_samples >= kAudioSampleDurationCount && + feature_index < kFeatureCount) { + TF_LITE_ENSURE_STATUS( + GenerateSingleFeature(audio_data, kAudioSampleDurationCount, + (*features_output)[feature_index], &interpreter)); + feature_index++; + audio_data += kAudioSampleStrideCount; + remaining_samples -= kAudioSampleStrideCount; } - TF_LITE_MICRO_EXPECT_EQ(kTfLiteOk, invoke_status); - // Get the output from the model, and make sure it's the expected size and - // type. - TfLiteTensor* output = interpreter.output(0); - TF_LITE_MICRO_EXPECT_EQ(2, output->dims->size); - TF_LITE_MICRO_EXPECT_EQ(1, output->dims->data[0]); - TF_LITE_MICRO_EXPECT_EQ(4, output->dims->data[1]); - TF_LITE_MICRO_EXPECT_EQ(kTfLiteInt8, output->type); - - // There are four possible classes in the output, each with a score. - const int kSilenceIndex = 0; - const int kUnknownIndex = 1; - const int kYesIndex = 2; - const int kNoIndex = 3; - - // Make sure that the expected "Yes" score is higher than the other classes. - uint8_t silence_score = output->data.int8[kSilenceIndex] + 128; - uint8_t unknown_score = output->data.int8[kUnknownIndex] + 128; - uint8_t yes_score = output->data.int8[kYesIndex] + 128; - uint8_t no_score = output->data.int8[kNoIndex] + 128; - TF_LITE_MICRO_EXPECT_GT(yes_score, silence_score); - TF_LITE_MICRO_EXPECT_GT(yes_score, unknown_score); - TF_LITE_MICRO_EXPECT_GT(yes_score, no_score); - - // Now test with a different input, from a recording of "No". - const int8_t* no_features_data = g_no_micro_f9643d42_nohash_4_data; - for (size_t i = 0; i < input->bytes; ++i) { - input->data.int8[i] = no_features_data[i]; + return kTfLiteOk; +} + +TfLiteStatus TestAudioSample(const char* label, const int16_t* audio_data, + const size_t audio_data_size) { + TF_LITE_ENSURE_STATUS( + GenerateFeatures(audio_data, audio_data_size, &g_features)); + TF_LITE_ENSURE_STATUS( + LoadMicroSpeechModelAndPerformInference(g_features, label)); + return kTfLiteOk; +} + +} // namespace + +TF_LITE_MICRO_TESTS_BEGIN + +TF_LITE_MICRO_TEST(NoFeatureTest) { + int8_t expected_feature[kFeatureSize] = { + 126, 103, 124, 102, 124, 102, 123, 100, 118, 97, 118, 100, 118, 98, + 121, 100, 121, 98, 117, 91, 96, 74, 54, 87, 100, 87, 109, 92, + 91, 80, 64, 55, 83, 74, 74, 78, 114, 95, 101, 81, + }; + + TF_LITE_ENSURE_STATUS(GenerateFeatures( + g_no_30ms_audio_data, g_no_30ms_audio_data_size, &g_features)); + for (size_t i = 0; i < kFeatureSize; i++) { + TF_LITE_MICRO_EXPECT_EQ(g_features[0][i], expected_feature[i]); + TF_LITE_MICRO_CHECK_FAIL(); } +} + +TF_LITE_MICRO_TEST(YesFeatureTest) { + int8_t expected_feature[kFeatureSize] = { + 124, 105, 126, 103, 125, 101, 123, 100, 116, 98, 115, 97, 113, 90, + 91, 82, 104, 96, 117, 97, 121, 103, 126, 101, 125, 104, 126, 104, + 125, 101, 116, 90, 81, 74, 80, 71, 83, 76, 82, 71, + }; - // Run the model on this "No" input. - invoke_status = interpreter.Invoke(); - if (invoke_status != kTfLiteOk) { - MicroPrintf("Invoke failed\n"); + TF_LITE_ENSURE_STATUS(GenerateFeatures( + g_yes_30ms_audio_data, g_yes_30ms_audio_data_size, &g_features)); + for (size_t i = 0; i < kFeatureSize; i++) { + TF_LITE_MICRO_EXPECT_EQ(g_features[0][i], expected_feature[i]); + TF_LITE_MICRO_CHECK_FAIL(); } - TF_LITE_MICRO_EXPECT_EQ(kTfLiteOk, invoke_status); - - // Get the output from the model, and make sure it's the expected size and - // type. - output = interpreter.output(0); - TF_LITE_MICRO_EXPECT_EQ(2, output->dims->size); - TF_LITE_MICRO_EXPECT_EQ(1, output->dims->data[0]); - TF_LITE_MICRO_EXPECT_EQ(4, output->dims->data[1]); - TF_LITE_MICRO_EXPECT_EQ(kTfLiteInt8, output->type); - - // Make sure that the expected "No" score is higher than the other classes. - silence_score = output->data.int8[kSilenceIndex] + 128; - unknown_score = output->data.int8[kUnknownIndex] + 128; - yes_score = output->data.int8[kYesIndex] + 128; - no_score = output->data.int8[kNoIndex] + 128; - TF_LITE_MICRO_EXPECT_GT(no_score, silence_score); - TF_LITE_MICRO_EXPECT_GT(no_score, unknown_score); - TF_LITE_MICRO_EXPECT_GT(no_score, yes_score); - - MicroPrintf("Ran successfully\n"); +} + +TF_LITE_MICRO_TEST(NoTest) { + TestAudioSample("no", g_no_1000ms_audio_data, g_no_1000ms_audio_data_size); +} + +TF_LITE_MICRO_TEST(YesTest) { + TestAudioSample("yes", g_yes_1000ms_audio_data, g_yes_1000ms_audio_data_size); +} + +TF_LITE_MICRO_TEST(SilenceTest) { + TestAudioSample("silence", g_silence_1000ms_audio_data, + g_silence_1000ms_audio_data_size); +} + +TF_LITE_MICRO_TEST(NoiseTest) { + TestAudioSample("silence", g_noise_1000ms_audio_data, + g_noise_1000ms_audio_data_size); } TF_LITE_MICRO_TESTS_END diff --git a/tensorflow/lite/micro/examples/micro_speech/models/audio_preprocessor_float.tflite b/tensorflow/lite/micro/examples/micro_speech/models/audio_preprocessor_float.tflite new file mode 100644 index 00000000000..8f91ec7839d Binary files /dev/null and b/tensorflow/lite/micro/examples/micro_speech/models/audio_preprocessor_float.tflite differ diff --git a/tensorflow/lite/micro/examples/micro_speech/models/audio_preprocessor_int8.tflite b/tensorflow/lite/micro/examples/micro_speech/models/audio_preprocessor_int8.tflite new file mode 100644 index 00000000000..790087b7669 Binary files /dev/null and b/tensorflow/lite/micro/examples/micro_speech/models/audio_preprocessor_int8.tflite differ diff --git a/tensorflow/lite/micro/examples/micro_speech/micro_speech.tflite b/tensorflow/lite/micro/examples/micro_speech/models/micro_speech_quantized.tflite similarity index 100% rename from tensorflow/lite/micro/examples/micro_speech/micro_speech.tflite rename to tensorflow/lite/micro/examples/micro_speech/models/micro_speech_quantized.tflite diff --git a/tensorflow/lite/micro/examples/micro_speech/recognize_commands.cc b/tensorflow/lite/micro/examples/micro_speech/recognize_commands.cc deleted file mode 100644 index 99edb47f7bc..00000000000 --- a/tensorflow/lite/micro/examples/micro_speech/recognize_commands.cc +++ /dev/null @@ -1,139 +0,0 @@ -/* Copyright 2017 The TensorFlow Authors. All Rights Reserved. - -Licensed under the Apache License, Version 2.0 (the "License"); -you may not use this file except in compliance with the License. -You may obtain a copy of the License at - - http://www.apache.org/licenses/LICENSE-2.0 - -Unless required by applicable law or agreed to in writing, software -distributed under the License is distributed on an "AS IS" BASIS, -WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -See the License for the specific language governing permissions and -limitations under the License. -==============================================================================*/ - -#include "tensorflow/lite/micro/examples/micro_speech/recognize_commands.h" - -#include - -#include "tensorflow/lite/micro/micro_log.h" - -RecognizeCommands::RecognizeCommands(int32_t average_window_duration_ms, - uint8_t detection_threshold, - int32_t suppression_ms, - int32_t minimum_count) - : average_window_duration_ms_(average_window_duration_ms), - detection_threshold_(detection_threshold), - suppression_ms_(suppression_ms), - minimum_count_(minimum_count), - previous_results_() { - previous_top_label_ = "silence"; - previous_top_label_time_ = std::numeric_limits::min(); -} - -TfLiteStatus RecognizeCommands::ProcessLatestResults( - const TfLiteTensor* latest_results, const int32_t current_time_ms, - const char** found_command, uint8_t* score, bool* is_new_command) { - if ((latest_results->dims->size != 2) || - (latest_results->dims->data[0] != 1) || - (latest_results->dims->data[1] != kCategoryCount)) { - MicroPrintf( - "The results for recognition should contain %d elements, but there are " - "%d in an %d-dimensional shape", - kCategoryCount, latest_results->dims->data[1], - latest_results->dims->size); - return kTfLiteError; - } - - if (latest_results->type != kTfLiteInt8) { - MicroPrintf( - "The results for recognition should be int8_t elements, but are %d", - latest_results->type); - return kTfLiteError; - } - - if ((!previous_results_.empty()) && - (current_time_ms < previous_results_.front().time_)) { - MicroPrintf( - "Results must be fed in increasing time order, but received a " - "timestamp of %d that was earlier than the previous one of %d", - current_time_ms, previous_results_.front().time_); - return kTfLiteError; - } - - // Add the latest results to the head of the queue. - previous_results_.push_back({current_time_ms, latest_results->data.int8}); - - // Prune any earlier results that are too old for the averaging window. - const int64_t time_limit = current_time_ms - average_window_duration_ms_; - while ((!previous_results_.empty()) && - previous_results_.front().time_ < time_limit) { - previous_results_.pop_front(); - } - - // If there are too few results, assume the result will be unreliable and - // bail. - const int64_t how_many_results = previous_results_.size(); - const int64_t earliest_time = previous_results_.front().time_; - const int64_t samples_duration = current_time_ms - earliest_time; - if ((how_many_results < minimum_count_) || - (samples_duration < (average_window_duration_ms_ / 4))) { - *found_command = previous_top_label_; - *score = 0; - *is_new_command = false; - return kTfLiteOk; - } - - // Calculate the average score across all the results in the window. - int32_t average_scores[kCategoryCount]; - for (int offset = 0; offset < previous_results_.size(); ++offset) { - PreviousResultsQueue::Result previous_result = - previous_results_.from_front(offset); - const int8_t* scores = previous_result.scores; - for (int i = 0; i < kCategoryCount; ++i) { - if (offset == 0) { - average_scores[i] = scores[i] + 128; - } else { - average_scores[i] += scores[i] + 128; - } - } - } - for (int i = 0; i < kCategoryCount; ++i) { - average_scores[i] /= how_many_results; - } - - // Find the current highest scoring category. - int current_top_index = 0; - int32_t current_top_score = 0; - for (int i = 0; i < kCategoryCount; ++i) { - if (average_scores[i] > current_top_score) { - current_top_score = average_scores[i]; - current_top_index = i; - } - } - const char* current_top_label = kCategoryLabels[current_top_index]; - - // If we've recently had another label trigger, assume one that occurs too - // soon afterwards is a bad result. - int64_t time_since_last_top; - if ((previous_top_label_ == kCategoryLabels[0]) || - (previous_top_label_time_ == std::numeric_limits::min())) { - time_since_last_top = std::numeric_limits::max(); - } else { - time_since_last_top = current_time_ms - previous_top_label_time_; - } - if ((current_top_score > detection_threshold_) && - ((current_top_label != previous_top_label_) || - (time_since_last_top > suppression_ms_))) { - previous_top_label_ = current_top_label; - previous_top_label_time_ = current_time_ms; - *is_new_command = true; - } else { - *is_new_command = false; - } - *found_command = current_top_label; - *score = current_top_score; - - return kTfLiteOk; -} diff --git a/tensorflow/lite/micro/examples/micro_speech/recognize_commands.h b/tensorflow/lite/micro/examples/micro_speech/recognize_commands.h deleted file mode 100644 index 8a5a895698d..00000000000 --- a/tensorflow/lite/micro/examples/micro_speech/recognize_commands.h +++ /dev/null @@ -1,151 +0,0 @@ -/* Copyright 2017 The TensorFlow Authors. All Rights Reserved. - -Licensed under the Apache License, Version 2.0 (the "License"); -you may not use this file except in compliance with the License. -You may obtain a copy of the License at - - http://www.apache.org/licenses/LICENSE-2.0 - -Unless required by applicable law or agreed to in writing, software -distributed under the License is distributed on an "AS IS" BASIS, -WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -See the License for the specific language governing permissions and -limitations under the License. -==============================================================================*/ - -#ifndef TENSORFLOW_LITE_MICRO_EXAMPLES_MICRO_SPEECH_RECOGNIZE_COMMANDS_H_ -#define TENSORFLOW_LITE_MICRO_EXAMPLES_MICRO_SPEECH_RECOGNIZE_COMMANDS_H_ - -#include - -#include "tensorflow/lite/c/common.h" -#include "tensorflow/lite/micro/examples/micro_speech/micro_features/micro_model_settings.h" -#include "tensorflow/lite/micro/micro_log.h" - -// Partial implementation of std::dequeue, just providing the functionality -// that's needed to keep a record of previous neural network results over a -// short time period, so they can be averaged together to produce a more -// accurate overall prediction. This doesn't use any dynamic memory allocation -// so it's a better fit for microcontroller applications, but this does mean -// there are hard limits on the number of results it can store. -class PreviousResultsQueue { - public: - PreviousResultsQueue() : front_index_(0), size_(0) {} - - // Data structure that holds an inference result, and the time when it - // was recorded. - struct Result { - Result() : time_(0), scores() {} - Result(int32_t time, int8_t* input_scores) : time_(time) { - for (int i = 0; i < kCategoryCount; ++i) { - scores[i] = input_scores[i]; - } - } - int32_t time_; - int8_t scores[kCategoryCount]; - }; - - int size() { return size_; } - bool empty() { return size_ == 0; } - Result& front() { return results_[front_index_]; } - Result& back() { - int back_index = front_index_ + (size_ - 1); - if (back_index >= kMaxResults) { - back_index -= kMaxResults; - } - return results_[back_index]; - } - - void push_back(const Result& entry) { - if (size() >= kMaxResults) { - MicroPrintf("Couldn't push_back latest result, too many already!"); - return; - } - size_ += 1; - back() = entry; - } - - Result pop_front() { - if (size() <= 0) { - MicroPrintf("Couldn't pop_front result, none present!"); - return Result(); - } - Result result = front(); - front_index_ += 1; - if (front_index_ >= kMaxResults) { - front_index_ = 0; - } - size_ -= 1; - return result; - } - - // Most of the functions are duplicates of dequeue containers, but this - // is a helper that makes it easy to iterate through the contents of the - // queue. - Result& from_front(int offset) { - if ((offset < 0) || (offset >= size_)) { - MicroPrintf("Attempt to read beyond the end of the queue!"); - offset = size_ - 1; - } - int index = front_index_ + offset; - if (index >= kMaxResults) { - index -= kMaxResults; - } - return results_[index]; - } - - private: - static constexpr int kMaxResults = 50; - Result results_[kMaxResults]; - - int front_index_; - int size_; -}; - -// This class is designed to apply a very primitive decoding model on top of the -// instantaneous results from running an audio recognition model on a single -// window of samples. It applies smoothing over time so that noisy individual -// label scores are averaged, increasing the confidence that apparent matches -// are real. -// To use it, you should create a class object with the configuration you -// want, and then feed results from running a TensorFlow model into the -// processing method. The timestamp for each subsequent call should be -// increasing from the previous, since the class is designed to process a stream -// of data over time. -class RecognizeCommands { - public: - // labels should be a list of the strings associated with each one-hot score. - // The window duration controls the smoothing. Longer durations will give a - // higher confidence that the results are correct, but may miss some commands. - // The detection threshold has a similar effect, with high values increasing - // the precision at the cost of recall. The minimum count controls how many - // results need to be in the averaging window before it's seen as a reliable - // average. This prevents erroneous results when the averaging window is - // initially being populated for example. The suppression argument disables - // further recognitions for a set time after one has been triggered, which can - // help reduce spurious recognitions. - explicit RecognizeCommands(int32_t average_window_duration_ms = 1000, - uint8_t detection_threshold = 200, - int32_t suppression_ms = 1500, - int32_t minimum_count = 3); - - // Call this with the results of running a model on sample data. - TfLiteStatus ProcessLatestResults(const TfLiteTensor* latest_results, - const int32_t current_time_ms, - const char** found_command, uint8_t* score, - bool* is_new_command); - - private: - // Configuration - int32_t average_window_duration_ms_; - uint8_t detection_threshold_; - int32_t suppression_ms_; - int32_t minimum_count_; - - // Working variables - PreviousResultsQueue previous_results_; - const char* previous_top_label_; - int32_t previous_top_label_time_; -}; - -#endif // TENSORFLOW_LITE_MICRO_EXAMPLES_MICRO_SPEECH_RECOGNIZE_COMMANDS_H_ diff --git a/tensorflow/lite/micro/examples/micro_speech/recognize_commands_test.cc b/tensorflow/lite/micro/examples/micro_speech/recognize_commands_test.cc deleted file mode 100644 index 7c1e4c6cfbf..00000000000 --- a/tensorflow/lite/micro/examples/micro_speech/recognize_commands_test.cc +++ /dev/null @@ -1,199 +0,0 @@ -/* Copyright 2017 The TensorFlow Authors. All Rights Reserved. - -Licensed under the Apache License, Version 2.0 (the "License"); -you may not use this file except in compliance with the License. -You may obtain a copy of the License at - - http://www.apache.org/licenses/LICENSE-2.0 - -Unless required by applicable law or agreed to in writing, software -distributed under the License is distributed on an "AS IS" BASIS, -WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -See the License for the specific language governing permissions and -limitations under the License. -==============================================================================*/ - -#include "tensorflow/lite/micro/examples/micro_speech/recognize_commands.h" - -#include "tensorflow/lite/micro/test_helpers.h" -#include "tensorflow/lite/micro/testing/micro_test.h" - -TF_LITE_MICRO_TESTS_BEGIN - -TF_LITE_MICRO_TEST(PreviousResultsQueueBasic) { - PreviousResultsQueue queue; - TF_LITE_MICRO_EXPECT_EQ(0, queue.size()); - - int8_t scores_a[4] = {0, 0, 0, 1}; - queue.push_back({0, scores_a}); - TF_LITE_MICRO_EXPECT_EQ(1, queue.size()); - TF_LITE_MICRO_EXPECT_EQ(0, queue.front().time_); - TF_LITE_MICRO_EXPECT_EQ(0, queue.back().time_); - - int8_t scores_b[4] = {0, 0, 1, 0}; - queue.push_back({1, scores_b}); - TF_LITE_MICRO_EXPECT_EQ(2, queue.size()); - TF_LITE_MICRO_EXPECT_EQ(0, queue.front().time_); - TF_LITE_MICRO_EXPECT_EQ(1, queue.back().time_); - - PreviousResultsQueue::Result pop_result = queue.pop_front(); - TF_LITE_MICRO_EXPECT_EQ(0, pop_result.time_); - TF_LITE_MICRO_EXPECT_EQ(1, queue.size()); - TF_LITE_MICRO_EXPECT_EQ(1, queue.front().time_); - TF_LITE_MICRO_EXPECT_EQ(1, queue.back().time_); - - int8_t scores_c[4] = {0, 1, 0, 0}; - queue.push_back({2, scores_c}); - TF_LITE_MICRO_EXPECT_EQ(2, queue.size()); - TF_LITE_MICRO_EXPECT_EQ(1, queue.front().time_); - TF_LITE_MICRO_EXPECT_EQ(2, queue.back().time_); -} - -TF_LITE_MICRO_TEST(PreviousResultsQueuePushPop) { - PreviousResultsQueue queue; - TF_LITE_MICRO_EXPECT_EQ(0, queue.size()); - - for (int i = 0; i < 123; ++i) { - int8_t scores[4] = {0, 0, 0, 1}; - queue.push_back({i, scores}); - TF_LITE_MICRO_EXPECT_EQ(1, queue.size()); - TF_LITE_MICRO_EXPECT_EQ(i, queue.front().time_); - TF_LITE_MICRO_EXPECT_EQ(i, queue.back().time_); - - PreviousResultsQueue::Result pop_result = queue.pop_front(); - TF_LITE_MICRO_EXPECT_EQ(i, pop_result.time_); - TF_LITE_MICRO_EXPECT_EQ(0, queue.size()); - } -} - -TF_LITE_MICRO_TEST(RecognizeCommandsTestBasic) { - RecognizeCommands recognize_commands; - - const int8_t result_data[] = {127, -128, -128, -128}; - int result_dims[] = {2, 1, 4}; - TfLiteTensor results = tflite::testing::CreateQuantizedTensor( - result_data, tflite::testing::IntArrayFromInts(result_dims), -128.0f, - 127.0f); - - const char* found_command; - uint8_t score; - bool is_new_command; - TF_LITE_MICRO_EXPECT_EQ( - kTfLiteOk, recognize_commands.ProcessLatestResults( - &results, 0, &found_command, &score, &is_new_command)); -} - -TF_LITE_MICRO_TEST(RecognizeCommandsTestFindCommands) { - RecognizeCommands recognize_commands(1000, 51); - - const int8_t yes_data[] = {-128, -128, 127, -128}; - int yes_dims[] = {2, 1, 4}; - TfLiteTensor yes_results = tflite::testing::CreateQuantizedTensor( - yes_data, tflite::testing::IntArrayFromInts(yes_dims), -128.0f, 127.0f); - - bool has_found_new_command = false; - const char* new_command; - for (int i = 0; i < 10; ++i) { - const char* found_command; - uint8_t score; - bool is_new_command; - int32_t current_time_ms = 0 + (i * 100); - TF_LITE_MICRO_EXPECT_EQ( - kTfLiteOk, recognize_commands.ProcessLatestResults( - &yes_results, current_time_ms, &found_command, &score, - &is_new_command)); - if (is_new_command) { - TF_LITE_MICRO_EXPECT(!has_found_new_command); - has_found_new_command = true; - new_command = found_command; - } - } - TF_LITE_MICRO_EXPECT(has_found_new_command); - if (has_found_new_command) { - TF_LITE_MICRO_EXPECT_EQ(0, tflite::testing::TestStrcmp("yes", new_command)); - } - - const int8_t no_data[] = {-128, -128, -128, 127}; - int no_dims[] = {2, 1, 4}; - TfLiteTensor no_results = tflite::testing::CreateQuantizedTensor( - no_data, tflite::testing::IntArrayFromInts(no_dims), -128.0f, 127.0f); - has_found_new_command = false; - new_command = ""; - uint8_t score; - for (int i = 0; i < 10; ++i) { - const char* found_command; - bool is_new_command; - int32_t current_time_ms = 1000 + (i * 100); - TF_LITE_MICRO_EXPECT_EQ( - kTfLiteOk, recognize_commands.ProcessLatestResults( - &no_results, current_time_ms, &found_command, &score, - &is_new_command)); - if (is_new_command) { - TF_LITE_MICRO_EXPECT(!has_found_new_command); - has_found_new_command = true; - new_command = found_command; - } - } - TF_LITE_MICRO_EXPECT(has_found_new_command); - if (has_found_new_command) { - TF_LITE_MICRO_EXPECT_EQ(231, score); - TF_LITE_MICRO_EXPECT_EQ(0, tflite::testing::TestStrcmp("no", new_command)); - } -} - -TF_LITE_MICRO_TEST(RecognizeCommandsTestBadInputLength) { - RecognizeCommands recognize_commands(1000, 51); - - const int8_t bad_data[] = {-128, -128, 127}; - int bad_dims[] = {2, 1, 3}; - TfLiteTensor bad_results = tflite::testing::CreateQuantizedTensor( - bad_data, tflite::testing::IntArrayFromInts(bad_dims), -128.0f, 127.0f); - - const char* found_command; - uint8_t score; - bool is_new_command; - TF_LITE_MICRO_EXPECT_NE( - kTfLiteOk, recognize_commands.ProcessLatestResults( - &bad_results, 0, &found_command, &score, &is_new_command)); -} - -TF_LITE_MICRO_TEST(RecognizeCommandsTestBadInputTimes) { - RecognizeCommands recognize_commands(1000, 51); - - const int8_t result_data[] = {-128, -128, 127, -128}; - int result_dims[] = {2, 1, 4}; - TfLiteTensor results = tflite::testing::CreateQuantizedTensor( - result_data, tflite::testing::IntArrayFromInts(result_dims), -128.0f, - 127.0f); - - const char* found_command; - uint8_t score; - bool is_new_command; - TF_LITE_MICRO_EXPECT_EQ( - kTfLiteOk, recognize_commands.ProcessLatestResults( - &results, 100, &found_command, &score, &is_new_command)); - TF_LITE_MICRO_EXPECT_NE( - kTfLiteOk, recognize_commands.ProcessLatestResults( - &results, 0, &found_command, &score, &is_new_command)); -} - -TF_LITE_MICRO_TEST(RecognizeCommandsTestTooFewInputs) { - RecognizeCommands recognize_commands(1000, 51); - - const int8_t result_data[] = {-128, -128, 127, -128}; - int result_dims[] = {2, 1, 4}; - TfLiteTensor results = tflite::testing::CreateQuantizedTensor( - result_data, tflite::testing::IntArrayFromInts(result_dims), -128.0f, - 127.0f); - - const char* found_command; - uint8_t score; - bool is_new_command; - TF_LITE_MICRO_EXPECT_EQ( - kTfLiteOk, recognize_commands.ProcessLatestResults( - &results, 100, &found_command, &score, &is_new_command)); - TF_LITE_MICRO_EXPECT_EQ(0, score); - TF_LITE_MICRO_EXPECT_EQ(false, is_new_command); -} - -TF_LITE_MICRO_TESTS_END diff --git a/tensorflow/lite/micro/examples/micro_speech/simple_features/CMSIS/simple_features_generator.cc b/tensorflow/lite/micro/examples/micro_speech/simple_features/CMSIS/simple_features_generator.cc deleted file mode 100644 index 33c1e248aea..00000000000 --- a/tensorflow/lite/micro/examples/micro_speech/simple_features/CMSIS/simple_features_generator.cc +++ /dev/null @@ -1,96 +0,0 @@ -/* Copyright 2018 The TensorFlow Authors. All Rights Reserved. - -Licensed under the Apache License, Version 2.0 (the "License"); -you may not use this file except in compliance with the License. -You may obtain a copy of the License at - - http://www.apache.org/licenses/LICENSE-2.0 - -Unless required by applicable law or agreed to in writing, software -distributed under the License is distributed on an "AS IS" BASIS, -WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -See the License for the specific language governing permissions and -limitations under the License. -==============================================================================*/ - -#include "tensorflow/lite/micro/examples/micro_speech/simple_features/simple_features_generator.h" - -#include "tensorflow/lite/micro/micro_log.h" - -extern "C" { -#define IFFT_FLAG_R 0 -#define BIT_REVERSE_FLAG 1 -#define FFT_SIZE 512 -#define FFT_SIZE_DIV2 256 -#include - -#include "arm_cmplx_mag_squared_q10p6.h" -#include "tensorflow/lite/micro/examples/micro_speech/CMSIS/hanning.h" -} - -void quantize(q15_t* bufA, q15_t* bufB, uint8_t* output); - -q15_t bufA[FFT_SIZE]; -q15_t bufB[FFT_SIZE]; -arm_rfft_instance_q15 S_arm_fft; -arm_status arm_math_status; - -namespace { -// These constants allow us to allocate fixed-sized arrays on the stack for our -// working memory. -constexpr int kInputSize = 512; -constexpr int kAverageWindowSize = 6; -constexpr int kOutputSize = - ((kInputSize / 2) + (kAverageWindowSize - 1)) / kAverageWindowSize; -} // namespace - -TfLiteStatus GenerateSimpleFeatures(const int16_t* input, int input_size, - int output_size, uint8_t* output) { - if (input_size > kInputSize) { - MicroPrintf("Input size %d larger than %d", input_size, kInputSize); - return kTfLiteError; - } - if (output_size != kOutputSize) { - MicroPrintf("Requested output size %d doesn't match %d", output_size, - kOutputSize); - return kTfLiteError; - } - - // 30ms at 16 kHz = 480 samples - // We want to pad the rest of the 512-sample buffer with zeros - arm_mult_q15((q15_t*)input, g_hanning, bufB, 480); - int i; - for (i = 480; i < 512; i++) { - bufB[i] = 0; - } - - // Should move init code outside of Preprocess() function - arm_math_status = - arm_rfft_init_q15(&S_arm_fft, FFT_SIZE, IFFT_FLAG_R, BIT_REVERSE_FLAG); - arm_rfft_q15(&S_arm_fft, bufB, bufA); - - // The rfft function packs data as follows: - // {real[0], real[N/2], real[1], imag[1], ..., real[N/2-1], imag[N/2-1]} - // Below we pack as follows: - // {real[0], 0, real[1], imag[1], ..., real[N/2-1], imag[N/2-1, real[N/2], 0} - bufA[FFT_SIZE_DIV2] = bufA[1]; - bufA[FFT_SIZE_DIV2 + 1] = 0; - bufA[1] = 0; - arm_cmplx_mag_squared_q10p6(bufA, bufB, FFT_SIZE_DIV2 + 1); - - quantize(bufA, bufB, output); - - return kTfLiteOk; -} - -void quantize(q15_t* bufA, q15_t* bufB, uint8_t* output) { - int i; - for (i = 0; i < 42; i++) { - arm_mean_q15(bufB + 6 * i, 6, bufA + i); - } - arm_mean_q15(bufB + 252, 5, bufA + 42); - - for (i = 0; i < 43; i++) { - output[i] = (uint8_t)(bufA[i] >> 5); - } -} diff --git a/tensorflow/lite/micro/examples/micro_speech/simple_features/fixed_point/simple_features_generator.cc b/tensorflow/lite/micro/examples/micro_speech/simple_features/fixed_point/simple_features_generator.cc deleted file mode 100644 index 03e8b27af0a..00000000000 --- a/tensorflow/lite/micro/examples/micro_speech/simple_features/fixed_point/simple_features_generator.cc +++ /dev/null @@ -1,212 +0,0 @@ -/* Copyright 2018 The TensorFlow Authors. All Rights Reserved. - -Licensed under the Apache License, Version 2.0 (the "License"); -you may not use this file except in compliance with the License. -You may obtain a copy of the License at - - http://www.apache.org/licenses/LICENSE-2.0 - -Unless required by applicable law or agreed to in writing, software -distributed under the License is distributed on an "AS IS" BASIS, -WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -See the License for the specific language governing permissions and -limitations under the License. -==============================================================================*/ - -// Reference implementation of the preprocessing pipeline, with the same -// results as the audio tutorial at -// https://www.tensorflow.org/tutorials/sequences/audio_recognition -// This module takes 30ms of PCM-encoded signed 16-bit audio samples (at 16KHz, -// so 480 values), and extracts a power spectrum of frequencies. There are 43 -// frequency bands in the result, derived from the original 256 output from the -// discrete Fourier transform, and averaged together in groups of 6. -// It's expected that most platforms will have optimized versions of the -// functions used here, for example replacing the DFT with an FFT, so this -// version shouldn't be used where performance is critical. -// This implementation uses fixed point for any non-constant calculations, -// instead of floating point, to help show how this can work on platforms that -// don't have good float support. - -#include "tensorflow/lite/micro/examples/micro_speech/simple_features/simple_features_generator.h" - -#include - -#include "tensorflow/lite/micro/examples/micro_speech/simple_features/simple_model_settings.h" -#include "tensorflow/lite/micro/micro_log.h" - -namespace { - -// q format notation: qx.y => 1 sign bit, x-1 integer bits, y fraction bits. -// Use standard (non-saturating) arithmetic with signed ints of size x+y bits. -// Sacrifice some precision to avoid use of 64-bit ints. - -// q1.15 * q1.15 => q2.30 -inline int32_t Q1_15_FixedMultiply_Q2_30(int16_t a, int16_t b) { - int32_t big_a = a; - int32_t big_b = b; - return big_a * big_b; -} - -// q2.30 * q2.30 => q10.22 -inline int32_t Q2_30_FixedMultiply_Q10_22(int32_t a, int32_t b) { - // q2.30 result - int32_t tmp = (a >> 15) * (b >> 15); - // q10.22 result - return tmp >> 8; -} - -// q10.22 * q10.22 => q10.22 -// Will overflow if product is >= 512. -// Largest product in small test set is 465.25 -inline int32_t Q10_22_FixedMultiply_Q10_22(int32_t a, int32_t b) { - // q10.22 result - return (a >> 11) * (b >> 11); -} - -// float => q2.30 -// No checking for saturation. Only used for inputs in range [-1, 1]. -inline int32_t FloatToFixed_Q2_30(float input) { - return static_cast(roundf(input * (1 << 30))); -} - -// Performs a discrete Fourier transform on the real inputs. This corresponds to -// rdft() in the FFT package at http://www.kurims.kyoto-u.ac.jp/~ooura/fft.html, -// and to kiss_fftr() in KISSFFT at https://github.com/mborgerding/kissfft. -// It takes in an array of float real values, and returns a result of the same -// length with q10.22 fixed point real and imaginary components interleaved, so -// fourier_output[0] is the first real value, fourier_output[1] is the first -// imaginary, fourier_output[2] is the second real, and so on. -// The calling function should ensure that the array passed in as fourier_output -// is at least time_series_size in length. Most optimized FFT implementations -// require the length to be a power of two as well, but this version doesn't -// enforce that. - -// input: q2.30 fixed point. output: q10.22 fixed point. -// Outputs interpreted as q10.22 fixed point are un-scaled. -void CalculateDiscreteFourierTransform(int32_t* time_series, - int time_series_size, - int32_t* fourier_output) { - for (int i = 0; i < time_series_size / 2; ++i) { - int32_t real = 0; - for (int j = 0; j < time_series_size; ++j) { - const int32_t real_scale = - FloatToFixed_Q2_30(cos(j * i * M_PI * 2 / time_series_size)); - real += Q2_30_FixedMultiply_Q10_22(time_series[j], real_scale); - } - int32_t imaginary = 0; - for (int j = 0; j < time_series_size; ++j) { - const int32_t imaginary_scale = - FloatToFixed_Q2_30(sin(j * i * M_PI * 2 / time_series_size)); - imaginary -= Q2_30_FixedMultiply_Q10_22(time_series[j], imaginary_scale); - } - fourier_output[(i * 2) + 0] = real; - fourier_output[(i * 2) + 1] = imaginary; - } -} - -// Produces a simple sine curve that is used to ensure frequencies at the center -// of the current sample window are weighted more heavily than those at the end. -// q1.15 output format. -void CalculatePeriodicHann(int window_length, int16_t* window_function) { - for (int i = 0; i < window_length; ++i) { - const float real_value = (0.5 - 0.5 * cos((2 * M_PI * i) / window_length)); - int tmp = static_cast(roundf(real_value * (1 << 15))); - // Saturate the 0x8000 value to 0x7fff - if (tmp > 0x7fff) tmp = 0x7fff; - window_function[i] = tmp; - } -} - -} // namespace - -TfLiteStatus GenerateSimpleFeatures(const int16_t* input, int input_size, - int output_size, uint8_t* output) { - // Ensure our input and output data arrays are valid. - if (input_size > kMaxAudioSampleSize) { - MicroPrintf("Input size %d larger than %d", input_size, - kMaxAudioSampleSize); - return kTfLiteError; - } - if (output_size != kFeatureSliceSize) { - MicroPrintf("Requested output size %d doesn't match %d", output_size, - kFeatureSliceSize); - return kTfLiteError; - } - - // Pre-calculate the window function we'll be applying to the input data. - // In a real application, we'd calculate this table once in an initialization - // function and store it for repeated reuse. - // q1.15 format. - int16_t window_function[kMaxAudioSampleSize]; - CalculatePeriodicHann(input_size, window_function); - - // Apply the window function to our time series input, and pad it with zeroes - // to the next power of two. - int32_t fixed_input[kMaxAudioSampleSize]; - for (int i = 0; i < kMaxAudioSampleSize; ++i) { - if (i < input_size) { - // input is int16_t. Treat as q1.15 fixed point value in range [-1,1) - // window_function is also q1.15 fixed point number - fixed_input[i] = Q1_15_FixedMultiply_Q2_30(input[i], window_function[i]); - } else { - fixed_input[i] = 0; - } - } - - // Pull the frequency data from the time series sample. - // Calculated in q10.22 format from q2.30 inputs. - int32_t fourier_values[kMaxAudioSampleSize]; - CalculateDiscreteFourierTransform(fixed_input, kMaxAudioSampleSize, - fourier_values); - - // We have the complex numbers giving us information about each frequency - // band, but all we want to know is how strong each frequency is, so calculate - // the squared magnitude by adding together the squares of each component. - int32_t power_spectrum[kMaxAudioSampleSize / 2]; - for (int i = 0; i < (kMaxAudioSampleSize / 2); ++i) { - const int32_t real = fourier_values[(i * 2) + 0]; - const int32_t imaginary = fourier_values[(i * 2) + 1]; - // q10.22 results - power_spectrum[i] = Q10_22_FixedMultiply_Q10_22(real, real) + - Q10_22_FixedMultiply_Q10_22(imaginary, imaginary); - } - - // Finally, reduce the size of the output by averaging together six adjacent - // frequencies into each slot, producing an array of 43 values. - // Power_spectrum numbers are q10.22. Divide by kAverageWindowSize inside - // loop to prevent overflow. - for (int i = 0; i < kFeatureSliceSize; ++i) { - int32_t average = 0; - for (int j = 0; j < kAverageWindowSize; ++j) { - const int index = (i * kAverageWindowSize) + j; - if (index < (kMaxAudioSampleSize / 2)) { - average += power_spectrum[index] / kAverageWindowSize; - } - } - // Quantize the result into eight bits, effectively multiplying by two. - // The 127.5 constant here has to match the features_max value defined in - // tensorflow/examples/speech_commands/input_data.py, and this also assumes - // that features_min is zero. - // - // q10.22 input - // integer output - // - // output = (input - features_min) * - // (output_max - output_min) / (features_max - features_min) - // == (input) * (255) / (127.5) - // == input * 2 - // == input << 1 - // Also want to round to nearest integer and only keep integer bits - // => ((input << 1) + 0x200000) >> 22 - // == (input + 0x100000) >> 21 - int32_t quantized_average = (average + 0x100000) >> 21; - if (quantized_average < 0) { - quantized_average = 0; - } - if (quantized_average > 255) { - quantized_average = 255; - } - output[i] = quantized_average; - } - return kTfLiteOk; -} diff --git a/tensorflow/lite/micro/examples/micro_speech/simple_features/model.cc b/tensorflow/lite/micro/examples/micro_speech/simple_features/model.cc deleted file mode 100644 index e8fea5b765b..00000000000 --- a/tensorflow/lite/micro/examples/micro_speech/simple_features/model.cc +++ /dev/null @@ -1,1674 +0,0 @@ -/* Copyright 2020 The TensorFlow Authors. All Rights Reserved. - -Licensed under the Apache License, Version 2.0 (the "License"); -you may not use this file except in compliance with the License. -You may obtain a copy of the License at - - http://www.apache.org/licenses/LICENSE-2.0 - -Unless required by applicable law or agreed to in writing, software -distributed under the License is distributed on an "AS IS" BASIS, -WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -See the License for the specific language governing permissions and -limitations under the License. -==============================================================================*/ - -// This is a standard TensorFlow Lite FlatBuffer model file that has been -// converted into a C data array, so it can be easily compiled into a binary -// for devices that don't have a file system. It was created using the command: -// xxd -i model.tflite > model.cc - -#include "tensorflow/lite/micro/examples/micro_speech/simple_features/model.h" - -const unsigned char g_model[] = { - 0x18, 0x00, 0x00, 0x00, 0x54, 0x46, 0x4c, 0x33, 0x00, 0x00, 0x0e, 0x00, - 0x18, 0x00, 0x04, 0x00, 0x08, 0x00, 0x0c, 0x00, 0x10, 0x00, 0x14, 0x00, - 0x0e, 0x00, 0x00, 0x00, 0x03, 0x00, 0x00, 0x00, 0x08, 0x4d, 0x00, 0x00, - 0x0c, 0x00, 0x00, 0x00, 0x10, 0x00, 0x00, 0x00, 0x20, 0x00, 0x00, 0x00, - 0x01, 0x00, 0x00, 0x00, 0xf4, 0x47, 0x00, 0x00, 0x0f, 0x00, 0x00, 0x00, - 0x54, 0x4f, 0x43, 0x4f, 0x20, 0x43, 0x6f, 0x6e, 0x76, 0x65, 0x72, 0x74, - 0x65, 0x64, 0x2e, 0x00, 0x09, 0x00, 0x00, 0x00, 0xd4, 0x47, 0x00, 0x00, - 0xb4, 0x47, 0x00, 0x00, 0xe4, 0x02, 0x00, 0x00, 0xb4, 0x02, 0x00, 0x00, - 0xac, 0x02, 0x00, 0x00, 0x1c, 0x00, 0x00, 0x00, 0x14, 0x00, 0x00, 0x00, - 0x0c, 0x00, 0x00, 0x00, 0x04, 0x00, 0x00, 0x00, 0xb8, 0xb3, 0xff, 0xff, - 0xbc, 0xb3, 0xff, 0xff, 0xc0, 0xb3, 0xff, 0xff, 0x1e, 0xb4, 0xff, 0xff, - 0x04, 0x00, 0x00, 0x00, 0x80, 0x02, 0x00, 0x00, 0x89, 0xa5, 0xe8, 0xc1, - 0xb1, 0x89, 0x5b, 0xc6, 0x4f, 0x9b, 0xd3, 0x74, 0x93, 0x88, 0xff, 0xaf, - 0x89, 0xff, 0xf4, 0x70, 0xcc, 0x75, 0x78, 0xbf, 0x92, 0xcd, 0xa9, 0xa8, - 0xd6, 0x6a, 0x6f, 0x7b, 0x7f, 0xd8, 0xa8, 0xb1, 0xe6, 0x32, 0x21, 0x70, - 0xa0, 0x9c, 0x6f, 0xc8, 0xc6, 0x59, 0x67, 0x93, 0x97, 0xca, 0x3f, 0xde, - 0xcb, 0x74, 0x7c, 0xb5, 0xa4, 0xd9, 0x66, 0xc6, 0x87, 0x98, 0xa5, 0xd0, - 0xbb, 0xb9, 0xc2, 0xb2, 0xaa, 0x79, 0x25, 0xb9, 0x6d, 0x5a, 0xc8, 0x7f, - 0x70, 0x85, 0x79, 0xbc, 0x6a, 0x9b, 0xd1, 0x9a, 0x9c, 0x51, 0x53, 0x71, - 0x89, 0xc0, 0xb4, 0xac, 0xae, 0x47, 0x67, 0x70, 0x79, 0xd2, 0x81, 0xa5, - 0xd2, 0x09, 0x38, 0x82, 0x74, 0xc9, 0x5d, 0xaf, 0xc1, 0x4f, 0x53, 0x99, - 0xcb, 0xb7, 0x3a, 0xba, 0xe8, 0x7f, 0x76, 0xb9, 0xb3, 0xd3, 0x60, 0xc0, - 0x93, 0x9f, 0x87, 0xbd, 0xd0, 0xb8, 0xca, 0xc1, 0xb6, 0x6c, 0x01, 0xc1, - 0x5c, 0x5d, 0xb2, 0x82, 0x76, 0x77, 0x39, 0xbc, 0x72, 0x6a, 0xc3, 0xb4, - 0x79, 0x21, 0x48, 0x42, 0x86, 0xa6, 0xbd, 0xaf, 0xae, 0x23, 0x9c, 0x69, - 0x78, 0xc3, 0x6b, 0xb3, 0xab, 0x43, 0xb2, 0x88, 0x71, 0xc6, 0x6b, 0xbe, - 0xc3, 0x75, 0xc2, 0xc3, 0xa5, 0xcf, 0x32, 0xbe, 0xcb, 0xb0, 0xb8, 0xc1, - 0x9c, 0xcf, 0x64, 0xc4, 0xb4, 0x96, 0xa8, 0xb9, 0xcb, 0xc0, 0xc0, 0xb8, - 0xb8, 0x77, 0x65, 0xc0, 0xc4, 0xb3, 0xc5, 0x77, 0x9b, 0x61, 0xd4, 0xac, - 0x7e, 0x36, 0xb1, 0xae, 0x36, 0x36, 0xb8, 0x39, 0x6b, 0x70, 0x9c, 0xb5, - 0x88, 0x5c, 0xb3, 0x6a, 0xad, 0xc5, 0x7b, 0xb4, 0xad, 0xaa, 0xc4, 0x84, - 0x5e, 0xc4, 0x67, 0xc1, 0xde, 0xba, 0xcf, 0xbd, 0xa0, 0xd3, 0x35, 0xb3, - 0xe7, 0xc8, 0xb8, 0xb8, 0xaf, 0xb4, 0x59, 0xb8, 0xb4, 0xac, 0xac, 0xaa, - 0xc7, 0xad, 0xc8, 0xb6, 0xac, 0x99, 0xa0, 0xcb, 0xc1, 0xc8, 0xcb, 0x89, - 0xc3, 0xac, 0xca, 0x8b, 0x97, 0x1f, 0xbd, 0xbf, 0x13, 0xad, 0xc8, 0x41, - 0x56, 0x3c, 0x86, 0xb2, 0x61, 0xc4, 0xbb, 0x71, 0xba, 0x92, 0x8d, 0xc3, - 0x86, 0xcb, 0xc5, 0x8d, 0x88, 0xc8, 0x6a, 0xbf, 0x9c, 0xcd, 0xcd, 0xc0, - 0x81, 0xb1, 0x47, 0xb5, 0xf0, 0xce, 0xb1, 0xc1, 0xaa, 0xa8, 0x54, 0xcb, - 0xbc, 0xc7, 0xc5, 0x8e, 0xc3, 0xce, 0xc7, 0xb9, 0xb9, 0xa1, 0xc5, 0xbd, - 0xb8, 0xb8, 0xb7, 0x81, 0xb6, 0xba, 0xd2, 0x90, 0xbc, 0x96, 0xbe, 0xba, - 0x53, 0xb5, 0xc7, 0x3c, 0x3c, 0x1f, 0x90, 0xaa, 0x5a, 0xb8, 0xba, 0x7e, - 0xbc, 0x9e, 0xc2, 0xb1, 0x6e, 0xc0, 0xc4, 0x91, 0xf0, 0xb5, 0x60, 0xad, - 0x73, 0xba, 0xcd, 0xba, 0x6e, 0x94, 0x39, 0xb5, 0xe4, 0xbe, 0xb4, 0xb5, - 0xa0, 0xa9, 0x51, 0xac, 0xbc, 0xc2, 0xb3, 0x8a, 0xbd, 0x9a, 0xca, 0xb3, - 0xbf, 0xaf, 0xb5, 0x9a, 0xb9, 0xc3, 0xb6, 0x92, 0xb5, 0xc1, 0xb0, 0x95, - 0xd6, 0xcc, 0xbb, 0xbb, 0xa9, 0xb9, 0xac, 0x4a, 0x62, 0x27, 0xa7, 0xa7, - 0x30, 0xbd, 0xb1, 0x73, 0xa1, 0x74, 0xc2, 0xb7, 0x58, 0xc0, 0xae, 0x8f, - 0xe1, 0xac, 0x4e, 0xb0, 0x55, 0xc9, 0xc8, 0x9f, 0x83, 0x8e, 0x3e, 0xd5, - 0xb5, 0xbe, 0xcd, 0xb2, 0xa6, 0xc8, 0x64, 0xac, 0xc0, 0xc8, 0xaf, 0x99, - 0xc5, 0x9e, 0xb8, 0xbd, 0xa9, 0xc2, 0xb3, 0x81, 0xb4, 0xc2, 0xb4, 0x8f, - 0xbc, 0xb8, 0x9c, 0x88, 0xbe, 0xc6, 0xbf, 0xba, 0xc8, 0xb4, 0xab, 0x5b, - 0x92, 0x51, 0xb1, 0x9a, 0x44, 0xb9, 0xab, 0x80, 0xa5, 0x3e, 0xc0, 0xa5, - 0x5c, 0xb6, 0xa8, 0xa2, 0xb3, 0x9a, 0x6b, 0xb3, 0x34, 0xc6, 0x7e, 0x96, - 0xcb, 0x88, 0x48, 0xc6, 0xa3, 0xbb, 0xd2, 0xa2, 0xaf, 0xd0, 0x6e, 0xae, - 0xb4, 0xce, 0xc8, 0x8f, 0xd7, 0xad, 0xc8, 0xb0, 0xae, 0xb7, 0xb2, 0x70, - 0xb9, 0xad, 0xc1, 0xa0, 0xcb, 0xa2, 0xb0, 0x9b, 0xbe, 0xd3, 0xca, 0xb6, - 0xbd, 0xaf, 0xa9, 0x82, 0xa1, 0xd7, 0xbc, 0x9b, 0x8b, 0xac, 0xaa, 0xac, - 0xad, 0x37, 0xb7, 0xb6, 0x46, 0xae, 0xa9, 0xbd, 0x6b, 0x90, 0x5e, 0xcd, - 0x23, 0xa4, 0x76, 0xa1, 0xc4, 0x96, 0x50, 0xcc, 0x95, 0x99, 0x93, 0xa7, - 0xb2, 0xe1, 0x7c, 0xbd, 0xbd, 0xb5, 0xbf, 0x9a, 0xca, 0x80, 0xd7, 0xae, - 0x79, 0xa8, 0xaa, 0xb2, 0xbc, 0x51, 0xda, 0xa3, 0x80, 0x8b, 0xa2, 0xc8, - 0xd1, 0x94, 0xe1, 0xc4, 0xbd, 0xae, 0xae, 0xcc, 0xb3, 0xca, 0xd5, 0xa1, - 0xd5, 0xa7, 0xaf, 0xd2, 0xb4, 0x8d, 0xcc, 0xc8, 0x63, 0xa3, 0xa4, 0xdf, - 0x6f, 0x7e, 0x98, 0xdf, 0x1b, 0x7b, 0x43, 0x99, 0xb0, 0x99, 0x71, 0xdb, - 0x63, 0x7b, 0x69, 0x9c, 0xba, 0xcd, 0x90, 0xd0, 0xb6, 0xa6, 0x9e, 0x95, - 0x50, 0xb6, 0xff, 0xff, 0xae, 0xb6, 0xff, 0xff, 0x04, 0x00, 0x00, 0x00, - 0x20, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, - 0x00, 0x00, 0x00, 0x00, 0xc7, 0x05, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, - 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x77, 0x00, 0x00, 0x00, - 0xda, 0xb6, 0xff, 0xff, 0x04, 0x00, 0x00, 0x00, 0xc0, 0x44, 0x00, 0x00, - 0x2c, 0x30, 0x38, 0x5a, 0x3d, 0x4c, 0x44, 0x3b, 0x48, 0x48, 0x44, 0x57, - 0x3f, 0x43, 0x45, 0x3a, 0x24, 0x32, 0x21, 0x5c, 0x3f, 0x3a, 0x38, 0x3a, - 0x35, 0x35, 0x2f, 0x51, 0x3c, 0x3a, 0x45, 0x3a, 0x3b, 0x41, 0x39, 0x55, - 0x3c, 0x41, 0x39, 0x44, 0x3a, 0x40, 0x37, 0x48, 0x33, 0x47, 0x36, 0x3e, - 0x3c, 0x41, 0x3f, 0x3e, 0x3e, 0x47, 0x36, 0x3e, 0x41, 0x33, 0x3e, 0x3b, - 0x3a, 0x46, 0x45, 0x40, 0x48, 0x3a, 0x35, 0x4b, 0x45, 0x4d, 0x3c, 0x49, - 0x42, 0x44, 0x3c, 0x4c, 0x3e, 0x3c, 0x44, 0x32, 0x33, 0x41, 0x36, 0x4b, - 0x38, 0x3b, 0x3c, 0x38, 0x3b, 0x45, 0x34, 0x46, 0x40, 0x4e, 0x44, 0x35, - 0x43, 0x36, 0x3d, 0x40, 0x3e, 0x48, 0x40, 0x34, 0x3a, 0x46, 0x45, 0x43, - 0x45, 0x3f, 0x47, 0x37, 0x36, 0x35, 0x44, 0x3a, 0x3e, 0x37, 0x39, 0x40, - 0x3a, 0x3f, 0x3f, 0x4c, 0x3e, 0x41, 0x43, 0x35, 0x3f, 0x3d, 0x3d, 0x4c, - 0x3c, 0x4a, 0x46, 0x3c, 0x3a, 0x41, 0x40, 0x4e, 0x36, 0x47, 0x40, 0x3b, - 0x47, 0x42, 0x38, 0x4d, 0x48, 0x47, 0x3c, 0x3c, 0x33, 0x3b, 0x3e, 0x42, - 0x3f, 0x3e, 0x3a, 0x3d, 0x32, 0x39, 0x41, 0x46, 0x3a, 0x3a, 0x3e, 0x3e, - 0x47, 0x48, 0x4e, 0x36, 0x44, 0x40, 0x41, 0x45, 0x3a, 0x3c, 0x38, 0x55, - 0x2e, 0x26, 0x2f, 0x32, 0x3f, 0x41, 0x3e, 0x4c, 0x45, 0x36, 0x40, 0x31, - 0x17, 0x2e, 0x14, 0x53, 0x34, 0x30, 0x34, 0x3f, 0x2e, 0x44, 0x2b, 0x4e, - 0x34, 0x3e, 0x34, 0x43, 0x3d, 0x35, 0x3f, 0x46, 0x39, 0x40, 0x38, 0x3e, - 0x35, 0x3b, 0x35, 0x45, 0x3d, 0x40, 0x38, 0x37, 0x40, 0x3e, 0x32, 0x3e, - 0x41, 0x39, 0x30, 0x41, 0x3a, 0x32, 0x3e, 0x3d, 0x39, 0x31, 0x33, 0x3e, - 0x41, 0x47, 0x40, 0x47, 0x35, 0x33, 0x3c, 0x32, 0x40, 0x3c, 0x42, 0x49, - 0x34, 0x38, 0x39, 0x37, 0x39, 0x35, 0x40, 0x4d, 0x37, 0x43, 0x42, 0x3e, - 0x3f, 0x3c, 0x3e, 0x51, 0x36, 0x37, 0x42, 0x41, 0x36, 0x31, 0x43, 0x3d, - 0x46, 0x43, 0x37, 0x46, 0x32, 0x45, 0x42, 0x36, 0x3f, 0x42, 0x42, 0x41, - 0x3d, 0x46, 0x39, 0x41, 0x3c, 0x3f, 0x38, 0x3c, 0x43, 0x43, 0x3d, 0x3c, - 0x3d, 0x41, 0x38, 0x42, 0x3a, 0x3d, 0x43, 0x42, 0x41, 0x40, 0x39, 0x36, - 0x3a, 0x3c, 0x3c, 0x4f, 0x44, 0x36, 0x39, 0x35, 0x46, 0x46, 0x36, 0x4a, - 0x3a, 0x42, 0x43, 0x39, 0x3f, 0x3d, 0x3c, 0x47, 0x38, 0x3f, 0x43, 0x40, - 0x36, 0x3c, 0x45, 0x3b, 0x33, 0x36, 0x3b, 0x39, 0x3c, 0x35, 0x40, 0x38, - 0x40, 0x3e, 0x3f, 0x48, 0x3f, 0x34, 0x40, 0x53, 0x26, 0x2c, 0x29, 0x39, - 0x2a, 0x38, 0x3f, 0x45, 0x32, 0x31, 0x4a, 0x37, 0x1c, 0x28, 0x09, 0x43, - 0x35, 0x3b, 0x33, 0x3c, 0x32, 0x3f, 0x28, 0x41, 0x36, 0x35, 0x3a, 0x37, - 0x41, 0x39, 0x32, 0x3c, 0x40, 0x3c, 0x3c, 0x32, 0x38, 0x39, 0x37, 0x44, - 0x3a, 0x33, 0x41, 0x36, 0x37, 0x3c, 0x35, 0x3a, 0x3d, 0x30, 0x3d, 0x41, - 0x37, 0x3c, 0x45, 0x3a, 0x37, 0x2f, 0x36, 0x3c, 0x3a, 0x3d, 0x39, 0x48, - 0x46, 0x33, 0x3a, 0x3e, 0x40, 0x3d, 0x3b, 0x52, 0x38, 0x45, 0x34, 0x47, - 0x39, 0x36, 0x37, 0x56, 0x42, 0x3f, 0x33, 0x36, 0x38, 0x3f, 0x40, 0x53, - 0x3e, 0x37, 0x3d, 0x3c, 0x48, 0x3a, 0x3d, 0x33, 0x39, 0x40, 0x3e, 0x35, - 0x3d, 0x46, 0x38, 0x36, 0x37, 0x43, 0x3a, 0x3c, 0x40, 0x38, 0x39, 0x3b, - 0x39, 0x3a, 0x42, 0x3d, 0x34, 0x3f, 0x35, 0x43, 0x3a, 0x35, 0x46, 0x3a, - 0x48, 0x38, 0x3b, 0x48, 0x3c, 0x35, 0x42, 0x3d, 0x3a, 0x3d, 0x38, 0x42, - 0x3e, 0x3c, 0x33, 0x39, 0x34, 0x30, 0x42, 0x44, 0x41, 0x3d, 0x3c, 0x39, - 0x3c, 0x3a, 0x39, 0x41, 0x3d, 0x44, 0x3c, 0x40, 0x3f, 0x3e, 0x42, 0x3f, - 0x37, 0x40, 0x39, 0x3b, 0x42, 0x43, 0x49, 0x37, 0x39, 0x46, 0x35, 0x3c, - 0x3e, 0x39, 0x45, 0x52, 0x24, 0x2d, 0x38, 0x35, 0x3a, 0x3a, 0x3c, 0x44, - 0x39, 0x32, 0x51, 0x3f, 0x16, 0x34, 0x0a, 0x49, 0x39, 0x38, 0x39, 0x3e, - 0x2f, 0x36, 0x24, 0x3f, 0x37, 0x34, 0x38, 0x3b, 0x34, 0x34, 0x30, 0x3b, - 0x3d, 0x36, 0x35, 0x42, 0x33, 0x40, 0x37, 0x35, 0x43, 0x3f, 0x3f, 0x39, - 0x3a, 0x43, 0x36, 0x3e, 0x39, 0x3d, 0x3f, 0x3d, 0x47, 0x3b, 0x39, 0x37, - 0x35, 0x42, 0x3f, 0x3b, 0x41, 0x3a, 0x42, 0x4b, 0x3d, 0x3f, 0x3d, 0x3e, - 0x38, 0x3b, 0x34, 0x4e, 0x3f, 0x39, 0x36, 0x43, 0x39, 0x35, 0x41, 0x4d, - 0x3c, 0x39, 0x43, 0x33, 0x37, 0x3b, 0x41, 0x48, 0x3c, 0x3f, 0x39, 0x32, - 0x35, 0x3d, 0x42, 0x35, 0x3d, 0x3e, 0x37, 0x3b, 0x38, 0x3a, 0x44, 0x36, - 0x42, 0x35, 0x48, 0x40, 0x3a, 0x44, 0x44, 0x39, 0x43, 0x41, 0x3c, 0x37, - 0x47, 0x3b, 0x42, 0x42, 0x45, 0x3a, 0x40, 0x46, 0x35, 0x3f, 0x3a, 0x48, - 0x35, 0x44, 0x3f, 0x37, 0x33, 0x3e, 0x45, 0x49, 0x39, 0x43, 0x47, 0x37, - 0x3f, 0x3f, 0x3b, 0x44, 0x38, 0x3d, 0x39, 0x42, 0x37, 0x3e, 0x40, 0x45, - 0x3b, 0x3f, 0x40, 0x34, 0x42, 0x3f, 0x43, 0x3c, 0x43, 0x41, 0x38, 0x38, - 0x38, 0x41, 0x55, 0x33, 0x33, 0x39, 0x39, 0x3c, 0x35, 0x39, 0x38, 0x42, - 0x27, 0x26, 0x32, 0x41, 0x41, 0x32, 0x3f, 0x47, 0x3a, 0x38, 0x48, 0x37, - 0x11, 0x27, 0x08, 0x49, 0x35, 0x42, 0x3c, 0x2e, 0x34, 0x43, 0x25, 0x3b, - 0x3a, 0x33, 0x37, 0x30, 0x3c, 0x36, 0x2d, 0x3c, 0x3b, 0x39, 0x3b, 0x40, - 0x46, 0x3a, 0x30, 0x42, 0x35, 0x32, 0x36, 0x3a, 0x3a, 0x34, 0x34, 0x33, - 0x3d, 0x30, 0x3b, 0x42, 0x41, 0x3f, 0x3d, 0x3b, 0x44, 0x3d, 0x41, 0x41, - 0x3d, 0x3f, 0x40, 0x51, 0x42, 0x42, 0x36, 0x45, 0x30, 0x40, 0x32, 0x4f, - 0x3a, 0x3c, 0x40, 0x39, 0x3d, 0x3b, 0x3e, 0x4b, 0x3d, 0x37, 0x42, 0x46, - 0x40, 0x40, 0x47, 0x3d, 0x35, 0x3c, 0x3f, 0x46, 0x37, 0x37, 0x3a, 0x2e, - 0x3d, 0x3c, 0x3a, 0x46, 0x3a, 0x44, 0x3c, 0x3a, 0x32, 0x44, 0x31, 0x41, - 0x43, 0x36, 0x49, 0x39, 0x3d, 0x37, 0x3f, 0x41, 0x3b, 0x3b, 0x3c, 0x42, - 0x3c, 0x34, 0x3f, 0x3b, 0x40, 0x3e, 0x48, 0x47, 0x3e, 0x3c, 0x38, 0x39, - 0x3f, 0x35, 0x39, 0x3f, 0x3e, 0x3e, 0x3b, 0x43, 0x41, 0x40, 0x43, 0x41, - 0x3f, 0x37, 0x39, 0x41, 0x46, 0x32, 0x3d, 0x41, 0x36, 0x3f, 0x3e, 0x3f, - 0x36, 0x48, 0x43, 0x3d, 0x43, 0x3f, 0x34, 0x3d, 0x34, 0x35, 0x4f, 0x32, - 0x3c, 0x3f, 0x3d, 0x3f, 0x39, 0x3c, 0x3d, 0x47, 0x23, 0x36, 0x33, 0x45, - 0x37, 0x2e, 0x42, 0x42, 0x39, 0x34, 0x4f, 0x3f, 0x19, 0x2b, 0x01, 0x50, - 0x35, 0x3f, 0x37, 0x3c, 0x33, 0x35, 0x25, 0x32, 0x38, 0x3e, 0x40, 0x40, - 0x2f, 0x38, 0x35, 0x3d, 0x31, 0x42, 0x44, 0x3c, 0x3a, 0x3d, 0x2d, 0x3e, - 0x3b, 0x3e, 0x3d, 0x31, 0x3b, 0x37, 0x35, 0x31, 0x36, 0x35, 0x34, 0x31, - 0x41, 0x3a, 0x33, 0x32, 0x3c, 0x31, 0x3e, 0x3d, 0x40, 0x3b, 0x34, 0x45, - 0x36, 0x39, 0x3e, 0x3f, 0x3c, 0x45, 0x37, 0x4b, 0x42, 0x3d, 0x33, 0x43, - 0x3e, 0x40, 0x35, 0x4e, 0x38, 0x36, 0x3a, 0x33, 0x38, 0x44, 0x3f, 0x3c, - 0x3f, 0x40, 0x3a, 0x3c, 0x3c, 0x3c, 0x44, 0x29, 0x3a, 0x40, 0x35, 0x3a, - 0x3d, 0x48, 0x3b, 0x30, 0x45, 0x41, 0x45, 0x40, 0x37, 0x32, 0x3a, 0x35, - 0x3f, 0x38, 0x3b, 0x43, 0x3b, 0x3f, 0x33, 0x40, 0x3b, 0x40, 0x38, 0x33, - 0x39, 0x3c, 0x3c, 0x3f, 0x43, 0x33, 0x43, 0x40, 0x43, 0x3d, 0x33, 0x42, - 0x40, 0x32, 0x3e, 0x36, 0x40, 0x38, 0x43, 0x40, 0x44, 0x38, 0x34, 0x3c, - 0x3e, 0x39, 0x47, 0x43, 0x40, 0x3b, 0x3f, 0x3f, 0x3c, 0x3b, 0x4b, 0x33, - 0x36, 0x49, 0x32, 0x41, 0x48, 0x45, 0x57, 0x3a, 0x40, 0x42, 0x40, 0x46, - 0x36, 0x35, 0x3c, 0x46, 0x22, 0x2e, 0x33, 0x3e, 0x3c, 0x39, 0x44, 0x4d, - 0x3f, 0x41, 0x51, 0x44, 0x15, 0x2e, 0x02, 0x4e, 0x39, 0x3a, 0x3c, 0x35, - 0x30, 0x38, 0x1e, 0x31, 0x40, 0x3b, 0x39, 0x3d, 0x3a, 0x37, 0x35, 0x36, - 0x46, 0x36, 0x3c, 0x3e, 0x39, 0x3e, 0x32, 0x40, 0x3b, 0x35, 0x42, 0x41, - 0x41, 0x38, 0x41, 0x35, 0x42, 0x36, 0x3c, 0x42, 0x3d, 0x41, 0x35, 0x31, - 0x3f, 0x44, 0x3e, 0x41, 0x3f, 0x35, 0x42, 0x4b, 0x3e, 0x36, 0x37, 0x34, - 0x36, 0x3d, 0x40, 0x49, 0x41, 0x3e, 0x3d, 0x3b, 0x38, 0x37, 0x40, 0x47, - 0x35, 0x32, 0x43, 0x38, 0x36, 0x3b, 0x33, 0x47, 0x33, 0x34, 0x3d, 0x47, - 0x3c, 0x37, 0x3d, 0x2b, 0x3a, 0x36, 0x3b, 0x3d, 0x43, 0x38, 0x35, 0x32, - 0x32, 0x37, 0x43, 0x36, 0x3f, 0x48, 0x38, 0x30, 0x3a, 0x3c, 0x42, 0x34, - 0x37, 0x3c, 0x37, 0x40, 0x48, 0x3e, 0x35, 0x3b, 0x3f, 0x38, 0x39, 0x3e, - 0x37, 0x35, 0x36, 0x3d, 0x3b, 0x3c, 0x40, 0x3d, 0x34, 0x40, 0x46, 0x42, - 0x3f, 0x3c, 0x3c, 0x3e, 0x40, 0x40, 0x3d, 0x3f, 0x3f, 0x44, 0x46, 0x41, - 0x32, 0x43, 0x40, 0x41, 0x3c, 0x42, 0x39, 0x38, 0x48, 0x44, 0x3d, 0x38, - 0x34, 0x40, 0x4e, 0x31, 0x3c, 0x42, 0x39, 0x48, 0x3c, 0x33, 0x3e, 0x40, - 0x20, 0x27, 0x39, 0x45, 0x45, 0x36, 0x47, 0x4c, 0x35, 0x3e, 0x4a, 0x36, - 0x16, 0x2f, 0x04, 0x4f, 0x3a, 0x35, 0x36, 0x3a, 0x2d, 0x36, 0x21, 0x34, - 0x3b, 0x32, 0x3d, 0x3c, 0x3c, 0x3f, 0x3b, 0x3b, 0x41, 0x46, 0x40, 0x3d, - 0x3b, 0x44, 0x33, 0x42, 0x34, 0x33, 0x3e, 0x45, 0x3f, 0x46, 0x39, 0x33, - 0x3b, 0x37, 0x37, 0x37, 0x42, 0x47, 0x3c, 0x35, 0x31, 0x41, 0x44, 0x3a, - 0x3b, 0x33, 0x39, 0x44, 0x42, 0x33, 0x3d, 0x3f, 0x43, 0x33, 0x41, 0x4a, - 0x35, 0x46, 0x36, 0x3e, 0x39, 0x41, 0x41, 0x4c, 0x34, 0x3d, 0x38, 0x33, - 0x3c, 0x3f, 0x43, 0x44, 0x37, 0x35, 0x35, 0x3c, 0x43, 0x34, 0x3e, 0x2d, - 0x3f, 0x35, 0x38, 0x3c, 0x33, 0x35, 0x43, 0x2a, 0x40, 0x33, 0x34, 0x40, - 0x3d, 0x38, 0x36, 0x2d, 0x36, 0x3c, 0x43, 0x3d, 0x37, 0x3d, 0x39, 0x38, - 0x3b, 0x3e, 0x3c, 0x46, 0x35, 0x35, 0x43, 0x44, 0x39, 0x40, 0x34, 0x39, - 0x3d, 0x34, 0x40, 0x45, 0x38, 0x35, 0x3e, 0x39, 0x3c, 0x44, 0x48, 0x44, - 0x41, 0x3e, 0x3c, 0x45, 0x3a, 0x3c, 0x3c, 0x46, 0x3a, 0x40, 0x39, 0x43, - 0x35, 0x35, 0x3e, 0x45, 0x3a, 0x34, 0x3c, 0x39, 0x46, 0x3a, 0x4f, 0x35, - 0x32, 0x3d, 0x36, 0x41, 0x32, 0x38, 0x3f, 0x45, 0x2d, 0x34, 0x2a, 0x35, - 0x43, 0x3f, 0x41, 0x49, 0x41, 0x3c, 0x4b, 0x3f, 0x17, 0x31, 0x02, 0x4f, - 0x30, 0x38, 0x39, 0x40, 0x33, 0x3a, 0x25, 0x38, 0x35, 0x3c, 0x39, 0x35, - 0x34, 0x41, 0x34, 0x43, 0x40, 0x40, 0x46, 0x3d, 0x40, 0x38, 0x3f, 0x3b, - 0x35, 0x39, 0x3c, 0x39, 0x34, 0x38, 0x3f, 0x36, 0x3a, 0x38, 0x44, 0x3f, - 0x3f, 0x38, 0x3c, 0x33, 0x41, 0x42, 0x38, 0x33, 0x3c, 0x3b, 0x3c, 0x46, - 0x38, 0x3b, 0x3f, 0x33, 0x3f, 0x48, 0x3b, 0x49, 0x3f, 0x3a, 0x3d, 0x3f, - 0x47, 0x3d, 0x30, 0x45, 0x36, 0x42, 0x3d, 0x36, 0x43, 0x38, 0x3b, 0x3d, - 0x3c, 0x30, 0x3b, 0x43, 0x3d, 0x41, 0x34, 0x2e, 0x43, 0x3d, 0x43, 0x46, - 0x43, 0x3c, 0x3c, 0x2e, 0x3c, 0x43, 0x34, 0x43, 0x3e, 0x43, 0x3f, 0x2b, - 0x45, 0x40, 0x3a, 0x43, 0x36, 0x39, 0x3f, 0x3d, 0x3a, 0x3c, 0x35, 0x3b, - 0x36, 0x3f, 0x45, 0x3e, 0x45, 0x40, 0x3f, 0x36, 0x45, 0x42, 0x35, 0x3e, - 0x3a, 0x3a, 0x3f, 0x40, 0x3e, 0x3c, 0x39, 0x46, 0x43, 0x3e, 0x3f, 0x3f, - 0x40, 0x3c, 0x40, 0x4b, 0x41, 0x35, 0x3b, 0x3e, 0x49, 0x32, 0x3e, 0x41, - 0x31, 0x37, 0x3d, 0x3b, 0x3f, 0x45, 0x50, 0x3a, 0x3f, 0x3c, 0x44, 0x36, - 0x43, 0x37, 0x3d, 0x4b, 0x29, 0x39, 0x2f, 0x38, 0x45, 0x36, 0x40, 0x4e, - 0x39, 0x3f, 0x48, 0x43, 0x23, 0x3c, 0x06, 0x51, 0x37, 0x3b, 0x3e, 0x3b, - 0x28, 0x45, 0x2b, 0x37, 0x3f, 0x33, 0x3f, 0x41, 0x31, 0x36, 0x33, 0x3a, - 0x3a, 0x35, 0x3b, 0x33, 0x3e, 0x36, 0x35, 0x40, 0x3a, 0x34, 0x3a, 0x38, - 0x34, 0x3a, 0x3a, 0x34, 0x42, 0x45, 0x40, 0x3e, 0x40, 0x38, 0x39, 0x34, - 0x38, 0x37, 0x3f, 0x3e, 0x3c, 0x32, 0x3f, 0x46, 0x3f, 0x44, 0x3b, 0x3e, - 0x44, 0x45, 0x36, 0x3e, 0x36, 0x3f, 0x3b, 0x40, 0x39, 0x34, 0x38, 0x41, - 0x42, 0x3e, 0x3d, 0x47, 0x3e, 0x45, 0x33, 0x40, 0x3e, 0x3a, 0x44, 0x3d, - 0x3c, 0x3a, 0x3a, 0x2c, 0x3a, 0x3d, 0x35, 0x45, 0x3c, 0x41, 0x36, 0x30, - 0x32, 0x32, 0x3a, 0x3b, 0x35, 0x3c, 0x43, 0x2d, 0x35, 0x3f, 0x41, 0x37, - 0x3f, 0x46, 0x34, 0x39, 0x3c, 0x43, 0x40, 0x3e, 0x3e, 0x36, 0x3e, 0x3c, - 0x37, 0x3a, 0x3d, 0x3a, 0x3c, 0x38, 0x44, 0x41, 0x3f, 0x3b, 0x3c, 0x47, - 0x40, 0x3b, 0x41, 0x47, 0x3e, 0x45, 0x39, 0x3e, 0x37, 0x45, 0x4b, 0x4c, - 0x37, 0x37, 0x37, 0x3c, 0x3c, 0x3d, 0x40, 0x38, 0x39, 0x3e, 0x43, 0x3f, - 0x38, 0x45, 0x51, 0x3c, 0x31, 0x34, 0x3b, 0x48, 0x46, 0x41, 0x40, 0x40, - 0x2c, 0x39, 0x32, 0x42, 0x3c, 0x2e, 0x49, 0x4d, 0x3c, 0x3f, 0x45, 0x38, - 0x20, 0x38, 0x03, 0x55, 0x33, 0x3e, 0x32, 0x39, 0x32, 0x3b, 0x24, 0x2b, - 0x42, 0x35, 0x45, 0x32, 0x2e, 0x3b, 0x2f, 0x3f, 0x3c, 0x37, 0x39, 0x3b, - 0x34, 0x34, 0x3d, 0x36, 0x3d, 0x39, 0x3b, 0x30, 0x3c, 0x3e, 0x40, 0x32, - 0x3d, 0x3c, 0x3c, 0x3e, 0x33, 0x33, 0x3f, 0x3a, 0x33, 0x3e, 0x46, 0x36, - 0x3a, 0x3d, 0x40, 0x40, 0x3f, 0x41, 0x3a, 0x42, 0x34, 0x32, 0x34, 0x46, - 0x3b, 0x31, 0x40, 0x37, 0x37, 0x32, 0x3e, 0x47, 0x3f, 0x3b, 0x3e, 0x43, - 0x49, 0x45, 0x3a, 0x3d, 0x3e, 0x44, 0x40, 0x31, 0x39, 0x3e, 0x3b, 0x2d, - 0x3b, 0x3a, 0x33, 0x3d, 0x39, 0x37, 0x3e, 0x32, 0x41, 0x3c, 0x3a, 0x37, - 0x3b, 0x40, 0x39, 0x2f, 0x3e, 0x3f, 0x47, 0x32, 0x3e, 0x3b, 0x3e, 0x3e, - 0x40, 0x3e, 0x40, 0x3c, 0x41, 0x39, 0x38, 0x46, 0x45, 0x32, 0x47, 0x31, - 0x36, 0x47, 0x37, 0x49, 0x3a, 0x3f, 0x47, 0x3a, 0x41, 0x3b, 0x3c, 0x4f, - 0x3e, 0x36, 0x3b, 0x47, 0x35, 0x39, 0x41, 0x4e, 0x3d, 0x3e, 0x3b, 0x46, - 0x38, 0x39, 0x3b, 0x45, 0x3e, 0x3f, 0x44, 0x42, 0x44, 0x3f, 0x55, 0x3b, - 0x41, 0x3d, 0x43, 0x43, 0x37, 0x3f, 0x3d, 0x4c, 0x28, 0x3d, 0x36, 0x3c, - 0x3e, 0x3e, 0x48, 0x50, 0x3e, 0x39, 0x45, 0x41, 0x22, 0x37, 0x07, 0x4f, - 0x2e, 0x33, 0x38, 0x3f, 0x31, 0x3a, 0x1b, 0x36, 0x34, 0x38, 0x3c, 0x37, - 0x37, 0x3e, 0x36, 0x35, 0x36, 0x3b, 0x3d, 0x38, 0x42, 0x48, 0x3d, 0x40, - 0x40, 0x44, 0x3d, 0x39, 0x37, 0x3b, 0x3d, 0x33, 0x3d, 0x35, 0x42, 0x3c, - 0x39, 0x3e, 0x43, 0x2d, 0x3c, 0x40, 0x43, 0x43, 0x45, 0x35, 0x3c, 0x44, - 0x34, 0x3c, 0x3d, 0x31, 0x39, 0x40, 0x39, 0x3d, 0x3e, 0x34, 0x3e, 0x3b, - 0x40, 0x38, 0x42, 0x4a, 0x40, 0x3b, 0x35, 0x3d, 0x36, 0x38, 0x35, 0x42, - 0x3c, 0x3c, 0x3d, 0x3b, 0x38, 0x39, 0x45, 0x28, 0x3a, 0x37, 0x37, 0x35, - 0x3a, 0x3d, 0x35, 0x2a, 0x3c, 0x3f, 0x37, 0x34, 0x37, 0x3f, 0x3e, 0x2b, - 0x39, 0x43, 0x3b, 0x45, 0x35, 0x36, 0x36, 0x42, 0x33, 0x38, 0x3b, 0x35, - 0x31, 0x3f, 0x41, 0x41, 0x3c, 0x41, 0x45, 0x42, 0x3b, 0x3c, 0x39, 0x46, - 0x3c, 0x3e, 0x3a, 0x41, 0x39, 0x3d, 0x41, 0x4b, 0x40, 0x3f, 0x43, 0x3d, - 0x39, 0x39, 0x44, 0x44, 0x37, 0x42, 0x3f, 0x44, 0x3e, 0x37, 0x42, 0x35, - 0x44, 0x3f, 0x40, 0x42, 0x3f, 0x3a, 0x47, 0x3d, 0x38, 0x3a, 0x3b, 0x3a, - 0x42, 0x36, 0x3a, 0x97, 0x32, 0x31, 0x30, 0x36, 0x47, 0x3e, 0x46, 0x51, - 0x42, 0x34, 0x50, 0x34, 0x26, 0x3b, 0x06, 0x55, 0x3c, 0x3b, 0x2d, 0x3a, - 0x37, 0x37, 0x1b, 0x32, 0x39, 0x3d, 0x36, 0x40, 0x3b, 0x3f, 0x33, 0x33, - 0x3d, 0x37, 0x35, 0x37, 0x44, 0x3f, 0x35, 0x39, 0x33, 0x3c, 0x43, 0x39, - 0x3f, 0x42, 0x3e, 0x34, 0x38, 0x38, 0x39, 0x3c, 0x48, 0x3c, 0x2f, 0x30, - 0x40, 0x3c, 0x41, 0x3e, 0x3f, 0x3e, 0x36, 0x43, 0x40, 0x3c, 0x36, 0x43, - 0x43, 0x38, 0x3a, 0x47, 0x3e, 0x37, 0x39, 0x3a, 0x43, 0x45, 0x38, 0x43, - 0x3b, 0x45, 0x37, 0x44, 0x36, 0x45, 0x3a, 0x3e, 0x3e, 0x3e, 0x3d, 0x33, - 0x39, 0x36, 0x48, 0x33, 0x30, 0x42, 0x33, 0x39, 0x37, 0x3a, 0x3f, 0x34, - 0x34, 0x40, 0x40, 0x40, 0x3f, 0x3d, 0x3f, 0x33, 0x41, 0x40, 0x3b, 0x43, - 0x3b, 0x3a, 0x40, 0x3a, 0x38, 0x3e, 0x38, 0x3b, 0x38, 0x42, 0x40, 0x40, - 0x41, 0x35, 0x37, 0x38, 0x3b, 0x3c, 0x39, 0x4b, 0x32, 0x39, 0x42, 0x3c, - 0x36, 0x3d, 0x32, 0x52, 0x3a, 0x31, 0x40, 0x40, 0x3a, 0x43, 0x3d, 0x46, - 0x3c, 0x3e, 0x3e, 0x33, 0x3f, 0x41, 0x4d, 0x37, 0x39, 0x39, 0x3e, 0x3b, - 0x40, 0x39, 0x53, 0x2d, 0x46, 0x3c, 0x32, 0x42, 0x3d, 0x40, 0x40, 0x4d, - 0x2e, 0x34, 0x39, 0x3b, 0x46, 0x3b, 0x42, 0x4f, 0x3d, 0x39, 0x4e, 0x36, - 0x1a, 0x31, 0x0e, 0x56, 0x36, 0x42, 0x38, 0x44, 0x36, 0x3a, 0x20, 0x30, - 0x36, 0x34, 0x37, 0x38, 0x40, 0x41, 0x2a, 0x35, 0x3b, 0x3b, 0x3a, 0x38, - 0x33, 0x39, 0x36, 0x41, 0x43, 0x39, 0x35, 0x3d, 0x37, 0x3d, 0x33, 0x31, - 0x45, 0x33, 0x3f, 0x3b, 0x44, 0x38, 0x39, 0x34, 0x38, 0x39, 0x38, 0x3d, - 0x3a, 0x3a, 0x41, 0x40, 0x44, 0x3e, 0x3f, 0x45, 0x34, 0x31, 0x34, 0x43, - 0x3b, 0x34, 0x42, 0x3c, 0x3c, 0x43, 0x35, 0x45, 0x36, 0x38, 0x3d, 0x3c, - 0x3f, 0x3d, 0x3e, 0x45, 0x41, 0x43, 0x35, 0x3f, 0x40, 0x3f, 0x3a, 0x34, - 0x3d, 0x32, 0x41, 0x3d, 0x48, 0x42, 0x37, 0x2a, 0x3c, 0x3a, 0x3e, 0x49, - 0x38, 0x36, 0x38, 0x2e, 0x36, 0x37, 0x34, 0x3e, 0x3c, 0x43, 0x43, 0x39, - 0x39, 0x3b, 0x44, 0x46, 0x44, 0x43, 0x37, 0x46, 0x43, 0x34, 0x3b, 0x35, - 0x42, 0x41, 0x3f, 0x3d, 0x3d, 0x3a, 0x42, 0x3e, 0x38, 0x47, 0x3d, 0x49, - 0x45, 0x49, 0x3a, 0x3c, 0x3e, 0x37, 0x40, 0x46, 0x41, 0x33, 0x45, 0x36, - 0x37, 0x44, 0x49, 0x3b, 0x44, 0x40, 0x33, 0x46, 0x37, 0x39, 0x4e, 0x3a, - 0x43, 0x38, 0x3a, 0x42, 0x3a, 0x3d, 0x45, 0x50, 0x26, 0x34, 0x3b, 0x3c, - 0x46, 0x46, 0x4c, 0x54, 0x3f, 0x35, 0x4e, 0x47, 0x21, 0x39, 0x0e, 0x54, - 0x3a, 0x3a, 0x2f, 0x40, 0x2d, 0x3a, 0x1f, 0x31, 0x31, 0x42, 0x34, 0x45, - 0x37, 0x36, 0x30, 0x3b, 0x3a, 0x3a, 0x36, 0x40, 0x32, 0x36, 0x3c, 0x3c, - 0x37, 0x42, 0x35, 0x3e, 0x39, 0x47, 0x36, 0x32, 0x41, 0x30, 0x42, 0x39, - 0x39, 0x44, 0x37, 0x30, 0x41, 0x3b, 0x3d, 0x3d, 0x43, 0x3b, 0x38, 0x45, - 0x3b, 0x3a, 0x39, 0x3a, 0x31, 0x33, 0x43, 0x46, 0x3f, 0x41, 0x44, 0x3f, - 0x3b, 0x44, 0x3a, 0x4c, 0x33, 0x33, 0x33, 0x3e, 0x37, 0x3e, 0x45, 0x45, - 0x36, 0x42, 0x3e, 0x43, 0x40, 0x34, 0x36, 0x31, 0x38, 0x34, 0x41, 0x3b, - 0x32, 0x38, 0x3e, 0x29, 0x47, 0x33, 0x37, 0x45, 0x3c, 0x3d, 0x43, 0x2c, - 0x36, 0x3a, 0x3c, 0x40, 0x3d, 0x46, 0x3c, 0x37, 0x40, 0x44, 0x37, 0x38, - 0x3e, 0x41, 0x3c, 0x40, 0x33, 0x3f, 0x44, 0x32, 0x44, 0x3a, 0x43, 0x42, - 0x3e, 0x38, 0x44, 0x3b, 0x41, 0x48, 0x3f, 0x4e, 0x3f, 0x44, 0x35, 0x45, - 0x34, 0x3f, 0x42, 0x4b, 0x37, 0x37, 0x3e, 0x45, 0x46, 0x45, 0x46, 0x3d, - 0x3e, 0x39, 0x3b, 0x3a, 0x46, 0x3a, 0x56, 0x35, 0x46, 0x3d, 0x40, 0x3b, - 0x36, 0x39, 0x3f, 0x54, 0x27, 0x2b, 0x34, 0x3c, 0x48, 0x3d, 0x49, 0x4c, - 0x3e, 0x3d, 0x4e, 0x42, 0x25, 0x3b, 0x10, 0x4d, 0x30, 0x36, 0x3e, 0x36, - 0x2e, 0x31, 0x1d, 0x37, 0x3a, 0x39, 0x33, 0x3f, 0x39, 0x38, 0x2e, 0x36, - 0x44, 0x3e, 0x41, 0x37, 0x3b, 0x30, 0x3b, 0x48, 0x31, 0x39, 0x41, 0x3e, - 0x37, 0x37, 0x34, 0x2f, 0x35, 0x3b, 0x3a, 0x3e, 0x45, 0x3e, 0x3f, 0x35, - 0x39, 0x39, 0x3b, 0x44, 0x43, 0x3c, 0x3e, 0x46, 0x40, 0x3a, 0x36, 0x45, - 0x41, 0x40, 0x36, 0x44, 0x3a, 0x37, 0x47, 0x47, 0x3d, 0x36, 0x43, 0x4e, - 0x3b, 0x38, 0x40, 0x48, 0x44, 0x43, 0x45, 0x3f, 0x43, 0x3c, 0x3b, 0x37, - 0x43, 0x41, 0x39, 0x2f, 0x3d, 0x45, 0x3e, 0x3e, 0x42, 0x40, 0x41, 0x2f, - 0x47, 0x38, 0x3a, 0x48, 0x3e, 0x35, 0x37, 0x2a, 0x34, 0x38, 0x41, 0x3b, - 0x3d, 0x37, 0x3b, 0x35, 0x38, 0x3e, 0x41, 0x3c, 0x41, 0x43, 0x3d, 0x46, - 0x47, 0x47, 0x3d, 0x35, 0x48, 0x41, 0x3d, 0x3e, 0x34, 0x47, 0x38, 0x38, - 0x39, 0x3e, 0x38, 0x4d, 0x43, 0x36, 0x42, 0x40, 0x3e, 0x41, 0x3f, 0x4c, - 0x3e, 0x3e, 0x37, 0x44, 0x3e, 0x3b, 0x47, 0x3e, 0x3f, 0x3b, 0x39, 0x3c, - 0x3c, 0x3c, 0x53, 0x3b, 0x3b, 0x32, 0x3e, 0x3f, 0x32, 0x3c, 0x37, 0x4b, - 0x33, 0x30, 0x2f, 0x41, 0x47, 0x42, 0x49, 0x4f, 0x3b, 0x42, 0x4c, 0x44, - 0x1f, 0x37, 0x16, 0x4e, 0x3b, 0x3f, 0x30, 0x36, 0x35, 0x38, 0x26, 0x36, - 0x32, 0x3b, 0x38, 0x3c, 0x30, 0x3e, 0x34, 0x3e, 0x3d, 0x34, 0x39, 0x3c, - 0x36, 0x47, 0x34, 0x41, 0x31, 0x39, 0x44, 0x3e, 0x39, 0x41, 0x32, 0x36, - 0x3b, 0x3f, 0x32, 0x3d, 0x36, 0x3e, 0x40, 0x3d, 0x45, 0x32, 0x45, 0x42, - 0x38, 0x43, 0x40, 0x42, 0x34, 0x3a, 0x43, 0x38, 0x47, 0x3f, 0x41, 0x47, - 0x34, 0x44, 0x41, 0x39, 0x3c, 0x46, 0x36, 0x4f, 0x41, 0x3e, 0x38, 0x38, - 0x3a, 0x3b, 0x43, 0x44, 0x37, 0x3f, 0x35, 0x43, 0x34, 0x3d, 0x40, 0x32, - 0x3a, 0x3b, 0x3d, 0x34, 0x35, 0x43, 0x31, 0x2c, 0x3b, 0x36, 0x38, 0x41, - 0x3c, 0x38, 0x3d, 0x31, 0x45, 0x46, 0x42, 0x41, 0x33, 0x3f, 0x3f, 0x3a, - 0x36, 0x3f, 0x3c, 0x3c, 0x3c, 0x3e, 0x39, 0x3e, 0x40, 0x37, 0x47, 0x3e, - 0x35, 0x39, 0x3d, 0x3d, 0x37, 0x36, 0x3e, 0x45, 0x38, 0x3d, 0x45, 0x43, - 0x3a, 0x32, 0x3b, 0x3a, 0x32, 0x3c, 0x3d, 0x43, 0x3d, 0x33, 0x3b, 0x3d, - 0x46, 0x3a, 0x44, 0x45, 0x3b, 0x3e, 0x3c, 0x42, 0x37, 0x37, 0x52, 0x2a, - 0x3a, 0x35, 0x35, 0x3f, 0x40, 0x38, 0x40, 0x5b, 0x35, 0x32, 0x2b, 0x3d, - 0x4a, 0x3c, 0x46, 0x56, 0x44, 0x30, 0x4d, 0x39, 0x20, 0x32, 0x0f, 0x4f, - 0x33, 0x3c, 0x35, 0x35, 0x3a, 0x45, 0x29, 0x3b, 0x31, 0x38, 0x34, 0x38, - 0x42, 0x45, 0x37, 0x3e, 0x37, 0x2e, 0x36, 0x43, 0x3f, 0x38, 0x2f, 0x41, - 0x3f, 0x41, 0x3c, 0x31, 0x37, 0x36, 0x37, 0x39, 0x41, 0x3a, 0x3a, 0x40, - 0x3e, 0x47, 0x3d, 0x37, 0x3c, 0x38, 0x35, 0x39, 0x3a, 0x43, 0x3f, 0x42, - 0x42, 0x38, 0x3e, 0x40, 0x3c, 0x3a, 0x45, 0x48, 0x37, 0x3a, 0x3e, 0x35, - 0x3a, 0x3d, 0x45, 0x4a, 0x3d, 0x37, 0x38, 0x3a, 0x3d, 0x46, 0x46, 0x41, - 0x37, 0x41, 0x40, 0x48, 0x37, 0x34, 0x3b, 0x2c, 0x39, 0x34, 0x37, 0x35, - 0x3a, 0x43, 0x39, 0x2e, 0x39, 0x3f, 0x40, 0x3e, 0x40, 0x40, 0x3c, 0x2d, - 0x3e, 0x3c, 0x37, 0x39, 0x3c, 0x3b, 0x3d, 0x3f, 0x41, 0x48, 0x3b, 0x3d, - 0x3b, 0x41, 0x45, 0x3e, 0x3a, 0x38, 0x3f, 0x3c, 0x3d, 0x3e, 0x40, 0x42, - 0x46, 0x38, 0x43, 0x34, 0x35, 0x47, 0x3d, 0x46, 0x3f, 0x3e, 0x32, 0x3f, - 0x3e, 0x3d, 0x47, 0x46, 0x38, 0x41, 0x45, 0x3f, 0x34, 0x3f, 0x41, 0x43, - 0x3e, 0x3e, 0x44, 0x3b, 0x3b, 0x36, 0x51, 0x32, 0x37, 0x3c, 0x42, 0x43, - 0x33, 0x39, 0x42, 0x61, 0x2c, 0x3b, 0x2e, 0x39, 0x42, 0x39, 0x42, 0x54, - 0x3c, 0x3a, 0x48, 0x35, 0x26, 0x34, 0x15, 0x51, 0x35, 0x40, 0x36, 0x3c, - 0x2d, 0x37, 0x25, 0x38, 0x33, 0x3d, 0x3d, 0x39, 0x3e, 0x3b, 0x2e, 0x4b, - 0x3d, 0x3b, 0x42, 0x37, 0x37, 0x40, 0x37, 0x40, 0x35, 0x45, 0x37, 0x37, - 0x3f, 0x41, 0x36, 0x39, 0x3c, 0x32, 0x3e, 0x38, 0x41, 0x40, 0x3e, 0x3f, - 0x3b, 0x3c, 0x43, 0x35, 0x3e, 0x3d, 0x44, 0x44, 0x3a, 0x36, 0x39, 0x3f, - 0x3a, 0x31, 0x42, 0x4d, 0x40, 0x33, 0x40, 0x45, 0x44, 0x3d, 0x40, 0x49, - 0x41, 0x3f, 0x42, 0x3a, 0x34, 0x46, 0x38, 0x46, 0x42, 0x34, 0x3a, 0x40, - 0x40, 0x41, 0x3d, 0x32, 0x35, 0x48, 0x35, 0x3e, 0x44, 0x41, 0x40, 0x2c, - 0x46, 0x38, 0x38, 0x3f, 0x36, 0x40, 0x38, 0x2a, 0x43, 0x41, 0x3e, 0x35, - 0x46, 0x3a, 0x45, 0x46, 0x46, 0x42, 0x3a, 0x3b, 0x40, 0x38, 0x35, 0x43, - 0x38, 0x3d, 0x3b, 0x41, 0x36, 0x44, 0x3f, 0x3f, 0x34, 0x3e, 0x3c, 0x3d, - 0x49, 0x36, 0x37, 0x4b, 0x38, 0x3c, 0x43, 0x37, 0x3a, 0x3f, 0x31, 0x45, - 0x3b, 0x39, 0x3f, 0x40, 0x37, 0x3c, 0x42, 0x3f, 0x3c, 0x33, 0x40, 0x3b, - 0x32, 0x3c, 0x52, 0x31, 0x3d, 0x44, 0x3b, 0x31, 0x46, 0x38, 0x40, 0x60, - 0x2b, 0x3c, 0x37, 0x34, 0x43, 0x38, 0x45, 0x57, 0x37, 0x39, 0x49, 0x33, - 0x2d, 0x3f, 0x18, 0x4e, 0x39, 0x39, 0x32, 0x3b, 0x34, 0x3b, 0x2c, 0x45, - 0x33, 0x37, 0x45, 0x42, 0x3d, 0x37, 0x2a, 0x4c, 0x3d, 0x3f, 0x3c, 0x36, - 0x37, 0x3c, 0x39, 0x47, 0x3d, 0x44, 0x3d, 0x40, 0x3d, 0x41, 0x34, 0x3e, - 0x40, 0x34, 0x3b, 0x3a, 0x41, 0x36, 0x37, 0x40, 0x3e, 0x3f, 0x3a, 0x36, - 0x3e, 0x35, 0x3b, 0x48, 0x41, 0x40, 0x3c, 0x42, 0x34, 0x41, 0x3f, 0x44, - 0x34, 0x39, 0x33, 0x39, 0x39, 0x47, 0x40, 0x48, 0x38, 0x3a, 0x43, 0x43, - 0x48, 0x3a, 0x3f, 0x46, 0x35, 0x3a, 0x33, 0x36, 0x32, 0x3c, 0x40, 0x34, - 0x40, 0x3a, 0x42, 0x3a, 0x39, 0x38, 0x41, 0x35, 0x3a, 0x3f, 0x35, 0x40, - 0x3f, 0x39, 0x39, 0x36, 0x38, 0x40, 0x3e, 0x3e, 0x3a, 0x31, 0x32, 0x44, - 0x40, 0x47, 0x3a, 0x3c, 0x43, 0x43, 0x46, 0x48, 0x40, 0x35, 0x3d, 0x37, - 0x44, 0x37, 0x33, 0x44, 0x3b, 0x3e, 0x3f, 0x37, 0x36, 0x3a, 0x38, 0x47, - 0x3a, 0x44, 0x36, 0x42, 0x3e, 0x44, 0x34, 0x46, 0x33, 0x43, 0x44, 0x3e, - 0x30, 0x48, 0x37, 0x38, 0x33, 0x3c, 0x46, 0x42, 0x38, 0x3d, 0x50, 0x39, - 0x33, 0x38, 0x3e, 0x40, 0x3b, 0x2b, 0x3b, 0x5f, 0x2b, 0x32, 0x2f, 0x37, - 0x3f, 0x3a, 0x40, 0x4e, 0x34, 0x38, 0x47, 0x37, 0x27, 0x2b, 0x1b, 0x4f, - 0x36, 0x38, 0x3a, 0x3a, 0x3b, 0x38, 0x2e, 0x3f, 0x3f, 0x42, 0x42, 0x42, - 0x36, 0x3e, 0x3c, 0x55, 0x39, 0x40, 0x44, 0x43, 0x3e, 0x33, 0x3c, 0x43, - 0x38, 0x44, 0x3b, 0x46, 0x3f, 0x45, 0x34, 0x38, 0x3c, 0x41, 0x42, 0x3d, - 0x42, 0x36, 0x43, 0x3f, 0x3c, 0x39, 0x3e, 0x39, 0x39, 0x42, 0x33, 0x47, - 0x36, 0x3d, 0x3f, 0x3b, 0x40, 0x39, 0x3b, 0x49, 0x36, 0x40, 0x3d, 0x41, - 0x40, 0x34, 0x3b, 0x4e, 0x3b, 0x36, 0x3b, 0x45, 0x40, 0x32, 0x3b, 0x49, - 0x37, 0x38, 0x3a, 0x47, 0x37, 0x40, 0x3e, 0x38, 0x40, 0x3f, 0x3c, 0x3a, - 0x47, 0x41, 0x42, 0x30, 0x40, 0x3c, 0x42, 0x3f, 0x31, 0x44, 0x39, 0x38, - 0x3b, 0x38, 0x42, 0x43, 0x41, 0x35, 0x3a, 0x39, 0x3e, 0x38, 0x39, 0x3e, - 0x3c, 0x42, 0x3d, 0x49, 0x47, 0x3c, 0x3f, 0x35, 0x41, 0x3a, 0x36, 0x43, - 0x43, 0x3b, 0x39, 0x3b, 0x36, 0x43, 0x43, 0x4e, 0x3e, 0x35, 0x37, 0x3b, - 0x3f, 0x37, 0x41, 0x48, 0x32, 0x44, 0x43, 0x32, 0x38, 0x39, 0x45, 0x39, - 0x3e, 0x3d, 0x35, 0x39, 0x35, 0x39, 0x50, 0x37, 0x39, 0x40, 0x43, 0x47, - 0x32, 0x2a, 0x40, 0x62, 0x24, 0x30, 0x36, 0x3e, 0x41, 0x32, 0x47, 0x58, - 0x39, 0x36, 0x44, 0x34, 0x26, 0x34, 0x1e, 0x50, 0x3c, 0x3b, 0x3f, 0x42, - 0x35, 0x3d, 0x2a, 0x4e, 0x40, 0x38, 0x36, 0x31, 0x3a, 0x30, 0x37, 0x4b, - 0x3c, 0x3b, 0x3b, 0x41, 0x3b, 0x3c, 0x2e, 0x45, 0x44, 0x3f, 0x3b, 0x35, - 0x3e, 0x33, 0x37, 0x3d, 0x40, 0x39, 0x39, 0x37, 0x40, 0x3e, 0x3a, 0x3e, - 0x3c, 0x3c, 0x45, 0x40, 0x3c, 0x3f, 0x3a, 0x51, 0x47, 0x3a, 0x34, 0x39, - 0x3b, 0x34, 0x44, 0x4c, 0x36, 0x3d, 0x3a, 0x35, 0x34, 0x36, 0x38, 0x4b, - 0x3f, 0x40, 0x3f, 0x3e, 0x40, 0x41, 0x47, 0x43, 0x32, 0x38, 0x46, 0x44, - 0x46, 0x43, 0x43, 0x37, 0x39, 0x49, 0x37, 0x36, 0x3e, 0x3d, 0x37, 0x3c, - 0x39, 0x37, 0x34, 0x43, 0x45, 0x32, 0x3a, 0x3a, 0x38, 0x43, 0x3b, 0x40, - 0x3b, 0x3f, 0x3d, 0x41, 0x40, 0x3d, 0x3a, 0x3b, 0x48, 0x37, 0x3d, 0x41, - 0x40, 0x3e, 0x38, 0x41, 0x3d, 0x3a, 0x38, 0x49, 0x40, 0x3c, 0x42, 0x41, - 0x3a, 0x38, 0x38, 0x4c, 0x3e, 0x41, 0x40, 0x3b, 0x3d, 0x3e, 0x3c, 0x46, - 0x3e, 0x42, 0x41, 0x38, 0x42, 0x42, 0x41, 0x3e, 0x3e, 0x37, 0x3c, 0x43, - 0x43, 0x3b, 0x54, 0x2b, 0x45, 0x3b, 0x43, 0x41, 0x41, 0x26, 0x3f, 0x60, - 0x25, 0x2b, 0x2e, 0x3a, 0x40, 0x31, 0x40, 0x49, 0x40, 0x31, 0x46, 0x3c, - 0x1e, 0x2a, 0x1a, 0x47, 0x33, 0x37, 0x37, 0x34, 0x31, 0x36, 0x25, 0x41, - 0x2e, 0x36, 0x35, 0x33, 0x33, 0x34, 0x31, 0x45, 0x3a, 0x3f, 0x3d, 0x40, - 0x3c, 0x41, 0x30, 0x3c, 0x3f, 0x46, 0x37, 0x3c, 0x3a, 0x3c, 0x36, 0x3a, - 0x47, 0x3d, 0x31, 0x3f, 0x40, 0x3e, 0x36, 0x44, 0x41, 0x3d, 0x36, 0x3f, - 0x37, 0x3f, 0x34, 0x4b, 0x31, 0x47, 0x43, 0x3e, 0x3e, 0x3a, 0x3b, 0x4b, - 0x37, 0x32, 0x38, 0x3d, 0x37, 0x47, 0x46, 0x4d, 0x36, 0x3c, 0x3f, 0x3a, - 0x41, 0x31, 0x47, 0x43, 0x3d, 0x3d, 0x3e, 0x35, 0x3d, 0x46, 0x49, 0x2a, - 0x37, 0x3c, 0x39, 0x3d, 0x47, 0x3c, 0x34, 0x2c, 0x3e, 0x38, 0x47, 0x32, - 0x36, 0x36, 0x41, 0x38, 0x35, 0x44, 0x48, 0x3b, 0x39, 0x3e, 0x38, 0x3e, - 0x40, 0x36, 0x37, 0x46, 0x39, 0x3b, 0x34, 0x45, 0x40, 0x3b, 0x48, 0x36, - 0x34, 0x44, 0x37, 0x46, 0x3f, 0x42, 0x33, 0x36, 0x43, 0x3c, 0x41, 0x46, - 0x31, 0x42, 0x43, 0x44, 0x44, 0x3e, 0x42, 0x3b, 0x3b, 0x3a, 0x3c, 0x37, - 0x42, 0x41, 0x46, 0x38, 0x41, 0x3b, 0x40, 0x44, 0x37, 0x3c, 0x4c, 0x2e, - 0x3a, 0x3e, 0x3b, 0x36, 0x33, 0x27, 0x37, 0x5d, 0x27, 0x34, 0x32, 0x41, - 0x41, 0x3f, 0x40, 0x5d, 0x40, 0x3d, 0x48, 0x39, 0x2e, 0x30, 0x1f, 0x3f, - 0x38, 0x3f, 0x40, 0x33, 0x40, 0x38, 0x31, 0x3f, 0x42, 0x3e, 0x3b, 0x3a, - 0x42, 0x36, 0x3a, 0x42, 0x3c, 0x3b, 0x3d, 0x41, 0x3d, 0x40, 0x40, 0x3e, - 0x36, 0x41, 0x47, 0x3d, 0x33, 0x32, 0x33, 0x44, 0x3e, 0x3a, 0x3e, 0x3d, - 0x45, 0x3f, 0x38, 0x3f, 0x40, 0x3a, 0x3c, 0x46, 0x32, 0x42, 0x3c, 0x51, - 0x33, 0x38, 0x3a, 0x38, 0x41, 0x34, 0x45, 0x4e, 0x35, 0x3c, 0x42, 0x3e, - 0x3f, 0x45, 0x44, 0x4e, 0x39, 0x47, 0x3a, 0x33, 0x3e, 0x3b, 0x45, 0x42, - 0x37, 0x3a, 0x3e, 0x33, 0x41, 0x48, 0x32, 0x2a, 0x3b, 0x37, 0x3f, 0x3d, - 0x3a, 0x42, 0x41, 0x2f, 0x34, 0x3e, 0x49, 0x3b, 0x38, 0x3e, 0x3d, 0x3a, - 0x37, 0x3c, 0x44, 0x41, 0x39, 0x42, 0x3f, 0x39, 0x40, 0x35, 0x3d, 0x41, - 0x3b, 0x45, 0x44, 0x48, 0x3d, 0x42, 0x36, 0x33, 0x3e, 0x44, 0x3f, 0x41, - 0x42, 0x40, 0x49, 0x34, 0x48, 0x41, 0x3f, 0x40, 0x3c, 0x45, 0x47, 0x34, - 0x41, 0x37, 0x47, 0x3e, 0x41, 0x41, 0x39, 0x42, 0x3f, 0x3a, 0x46, 0x33, - 0x39, 0x41, 0x38, 0x38, 0x3e, 0x42, 0x41, 0x38, 0x35, 0x32, 0x33, 0x38, - 0x3a, 0x3f, 0x45, 0x66, 0x33, 0x47, 0x38, 0x3c, 0x41, 0x2f, 0x48, 0x55, - 0x33, 0x3e, 0x49, 0x3b, 0x3c, 0x30, 0x24, 0x45, 0x3c, 0x44, 0x43, 0x32, - 0x3d, 0x3f, 0x35, 0x3b, 0x3e, 0x36, 0x38, 0x3a, 0x36, 0x37, 0x3b, 0x41, - 0x38, 0x42, 0x3e, 0x43, 0x39, 0x3f, 0x3c, 0x40, 0x37, 0x43, 0x3e, 0x3b, - 0x3d, 0x35, 0x35, 0x3d, 0x43, 0x3f, 0x3a, 0x35, 0x37, 0x3c, 0x31, 0x47, - 0x44, 0x45, 0x40, 0x32, 0x44, 0x36, 0x38, 0x51, 0x3c, 0x41, 0x45, 0x37, - 0x39, 0x44, 0x3e, 0x4f, 0x3c, 0x3a, 0x38, 0x40, 0x3f, 0x34, 0x39, 0x4e, - 0x3d, 0x39, 0x45, 0x3f, 0x3e, 0x3c, 0x3b, 0x42, 0x3b, 0x3b, 0x34, 0x3d, - 0x41, 0x44, 0x39, 0x2e, 0x37, 0x44, 0x45, 0x37, 0x3d, 0x41, 0x3f, 0x33, - 0x3f, 0x3e, 0x3e, 0x40, 0x44, 0x3f, 0x37, 0x32, 0x35, 0x3e, 0x43, 0x41, - 0x39, 0x37, 0x35, 0x3f, 0x48, 0x3d, 0x43, 0x49, 0x38, 0x35, 0x3f, 0x48, - 0x3b, 0x3a, 0x34, 0x3f, 0x3c, 0x44, 0x3a, 0x40, 0x36, 0x35, 0x44, 0x36, - 0x44, 0x3b, 0x3d, 0x38, 0x3c, 0x44, 0x47, 0x3a, 0x3b, 0x45, 0x41, 0x3a, - 0x39, 0x35, 0x44, 0x3a, 0x49, 0x36, 0x48, 0x31, 0x42, 0x43, 0x42, 0x34, - 0x41, 0x40, 0x4d, 0x36, 0x3e, 0x35, 0x39, 0x3b, 0x3f, 0x41, 0x38, 0x39, - 0x3c, 0x44, 0x3f, 0x39, 0x3a, 0x36, 0x3d, 0x36, 0x3a, 0x3a, 0x34, 0x3b, - 0x38, 0x2f, 0x40, 0x34, 0x32, 0x4d, 0x43, 0x45, 0x4e, 0x3f, 0x48, 0x35, - 0x3b, 0x4d, 0x4f, 0x39, 0x42, 0x36, 0x46, 0x36, 0x4a, 0x3c, 0x37, 0x41, - 0x40, 0x43, 0x50, 0x36, 0x3e, 0x39, 0x44, 0x40, 0x36, 0x47, 0x3f, 0x36, - 0x45, 0x40, 0x45, 0x41, 0x3b, 0x37, 0x41, 0x39, 0x3b, 0x48, 0x37, 0x34, - 0x41, 0x45, 0x49, 0x3f, 0x39, 0x49, 0x3f, 0x3a, 0x42, 0x34, 0x38, 0x37, - 0x44, 0x34, 0x3c, 0x3d, 0x40, 0x47, 0x3a, 0x36, 0x3f, 0x3c, 0x41, 0x3e, - 0x47, 0x46, 0x46, 0x43, 0x3f, 0x38, 0x3b, 0x40, 0x3f, 0x48, 0x3b, 0x4c, - 0x3d, 0x4b, 0x34, 0x3b, 0x44, 0x43, 0x3c, 0x49, 0x38, 0x42, 0x41, 0x36, - 0x33, 0x36, 0x40, 0x46, 0x40, 0x3a, 0x42, 0x3c, 0x3d, 0x35, 0x3c, 0x52, - 0x3e, 0x40, 0x43, 0x43, 0x41, 0x3b, 0x3e, 0x44, 0x3f, 0x40, 0x40, 0x43, - 0x3d, 0x3f, 0x36, 0x42, 0x3f, 0x3c, 0x34, 0x3d, 0x33, 0x41, 0x3c, 0x39, - 0x34, 0x43, 0x3f, 0x34, 0x3c, 0x3a, 0x3a, 0x37, 0x42, 0x41, 0x40, 0x3e, - 0x3d, 0x3c, 0x41, 0x3c, 0x38, 0x33, 0x49, 0x46, 0x40, 0x40, 0x3a, 0x46, - 0x38, 0x3c, 0x37, 0x34, 0x3e, 0x3d, 0x32, 0x38, 0x3c, 0x4c, 0x3a, 0x34, - 0x35, 0x32, 0x39, 0x40, 0x3a, 0x58, 0x40, 0x46, 0x42, 0x33, 0x45, 0x39, - 0x34, 0x4f, 0x53, 0x45, 0x43, 0x3e, 0x41, 0x36, 0x3e, 0x3f, 0x40, 0x47, - 0x4e, 0x3d, 0x53, 0x2b, 0x41, 0x36, 0x3e, 0x38, 0x47, 0x41, 0x3f, 0x34, - 0x47, 0x40, 0x38, 0x39, 0x3d, 0x42, 0x3f, 0x3c, 0x48, 0x3a, 0x35, 0x3c, - 0x45, 0x49, 0x3c, 0x33, 0x33, 0x3f, 0x3c, 0x46, 0x43, 0x3f, 0x45, 0x31, - 0x35, 0x43, 0x46, 0x3a, 0x45, 0x3c, 0x37, 0x3a, 0x37, 0x36, 0x35, 0x3f, - 0x38, 0x49, 0x34, 0x3f, 0x3c, 0x42, 0x49, 0x3e, 0x3e, 0x3c, 0x39, 0x49, - 0x3e, 0x3c, 0x3b, 0x43, 0x44, 0x45, 0x39, 0x4b, 0x47, 0x47, 0x3e, 0x33, - 0x3c, 0x31, 0x34, 0x4f, 0x45, 0x43, 0x40, 0x3d, 0x42, 0x3b, 0x43, 0x50, - 0x3c, 0x3b, 0x37, 0x42, 0x47, 0x42, 0x3e, 0x4a, 0x3f, 0x3a, 0x48, 0x3d, - 0x48, 0x45, 0x3e, 0x40, 0x3a, 0x3c, 0x3d, 0x39, 0x41, 0x42, 0x3c, 0x42, - 0x43, 0x3c, 0x3b, 0x3d, 0x47, 0x49, 0x38, 0x3c, 0x46, 0x3a, 0x3c, 0x3f, - 0x3a, 0x46, 0x3a, 0x3b, 0x3d, 0x3a, 0x49, 0x46, 0x38, 0x40, 0x3e, 0x38, - 0x37, 0x32, 0x40, 0x3c, 0x42, 0x3d, 0x3b, 0x40, 0x3a, 0x38, 0x49, 0x33, - 0x40, 0x38, 0x2b, 0x3a, 0x3c, 0x4f, 0x4d, 0x3e, 0x35, 0x3d, 0x3b, 0x40, - 0x3a, 0x54, 0x3e, 0x3e, 0x43, 0x30, 0x47, 0x3d, 0x3b, 0x53, 0x52, 0x4a, - 0x43, 0x41, 0x49, 0x37, 0x3b, 0x35, 0x44, 0x3c, 0x45, 0x40, 0x4f, 0x36, - 0x4b, 0x42, 0x41, 0x3a, 0x41, 0x44, 0x47, 0x32, 0x43, 0x35, 0x3f, 0x37, - 0x43, 0x41, 0x43, 0x36, 0x3f, 0x3b, 0x3d, 0x38, 0x3d, 0x40, 0x42, 0x36, - 0x44, 0x3a, 0x39, 0x47, 0x37, 0x34, 0x42, 0x3a, 0x37, 0x38, 0x37, 0x3f, - 0x36, 0x3b, 0x45, 0x3f, 0x3f, 0x3d, 0x39, 0x3d, 0x39, 0x41, 0x37, 0x3f, - 0x3f, 0x3d, 0x3f, 0x41, 0x43, 0x41, 0x45, 0x43, 0x41, 0x3c, 0x3e, 0x40, - 0x40, 0x39, 0x41, 0x4f, 0x47, 0x42, 0x46, 0x48, 0x3b, 0x3b, 0x3c, 0x46, - 0x47, 0x3e, 0x46, 0x37, 0x38, 0x3d, 0x38, 0x52, 0x36, 0x46, 0x3c, 0x3a, - 0x3b, 0x37, 0x48, 0x4b, 0x3f, 0x42, 0x3c, 0x36, 0x40, 0x37, 0x33, 0x4c, - 0x39, 0x34, 0x41, 0x34, 0x3f, 0x3b, 0x35, 0x4b, 0x3b, 0x45, 0x43, 0x31, - 0x3e, 0x39, 0x30, 0x3d, 0x32, 0x43, 0x44, 0x3c, 0x3e, 0x38, 0x43, 0x41, - 0x3e, 0x37, 0x41, 0x39, 0x39, 0x44, 0x43, 0x38, 0x3f, 0x37, 0x48, 0x3f, - 0x3b, 0x44, 0x37, 0x3f, 0x3a, 0x3f, 0x3b, 0x33, 0x42, 0x3e, 0x2f, 0x42, - 0x44, 0x4f, 0x52, 0x3c, 0x34, 0x33, 0x39, 0x46, 0x31, 0x55, 0x43, 0x4e, - 0x49, 0x38, 0x4d, 0x48, 0x34, 0x4d, 0x5c, 0x4d, 0x49, 0x37, 0x4f, 0x40, - 0x3c, 0x3d, 0x41, 0x42, 0x3f, 0x51, 0x4b, 0x2f, 0x46, 0x35, 0x39, 0x3c, - 0x49, 0x3d, 0x4e, 0x32, 0x43, 0x47, 0x31, 0x3e, 0x42, 0x4a, 0x4c, 0x39, - 0x43, 0x46, 0x3e, 0x3f, 0x44, 0x3c, 0x42, 0x30, 0x3e, 0x34, 0x3b, 0x3b, - 0x3a, 0x3c, 0x42, 0x3d, 0x3d, 0x48, 0x48, 0x36, 0x3a, 0x45, 0x38, 0x40, - 0x3c, 0x41, 0x3f, 0x49, 0x42, 0x41, 0x38, 0x3d, 0x3d, 0x44, 0x3b, 0x3d, - 0x35, 0x48, 0x43, 0x3b, 0x32, 0x41, 0x3e, 0x3a, 0x46, 0x41, 0x40, 0x54, - 0x38, 0x3f, 0x3c, 0x36, 0x3b, 0x36, 0x43, 0x50, 0x38, 0x3c, 0x44, 0x3b, - 0x43, 0x47, 0x32, 0x50, 0x3d, 0x46, 0x3d, 0x3b, 0x39, 0x37, 0x3b, 0x4a, - 0x47, 0x43, 0x46, 0x3d, 0x3d, 0x41, 0x43, 0x45, 0x3b, 0x3c, 0x39, 0x47, - 0x43, 0x42, 0x39, 0x4c, 0x34, 0x41, 0x45, 0x3b, 0x38, 0x3e, 0x37, 0x3f, - 0x45, 0x43, 0x39, 0x42, 0x3c, 0x3d, 0x3d, 0x3c, 0x48, 0x39, 0x3b, 0x3a, - 0x46, 0x45, 0x3d, 0x3a, 0x3f, 0x3a, 0x45, 0x36, 0x3d, 0x43, 0x36, 0x43, - 0x42, 0x3d, 0x41, 0x3f, 0x3a, 0x3f, 0x31, 0x37, 0x48, 0x4f, 0x4e, 0x36, - 0x30, 0x3a, 0x3e, 0x3e, 0x38, 0x57, 0x40, 0x47, 0x47, 0x38, 0x4f, 0x46, - 0x3d, 0x4a, 0x50, 0x4c, 0x42, 0x3b, 0x4d, 0x3d, 0x3d, 0x33, 0x40, 0x41, - 0x48, 0x4b, 0x46, 0x39, 0x4d, 0x30, 0x45, 0x38, 0x48, 0x3c, 0x48, 0x3b, - 0x4d, 0x40, 0x3b, 0x40, 0x46, 0x41, 0x51, 0x34, 0x40, 0x43, 0x3f, 0x42, - 0x45, 0x42, 0x3e, 0x35, 0x3d, 0x38, 0x37, 0x3a, 0x42, 0x40, 0x43, 0x3c, - 0x3c, 0x3d, 0x43, 0x40, 0x45, 0x3a, 0x3e, 0x3a, 0x3e, 0x40, 0x43, 0x35, - 0x37, 0x3f, 0x3f, 0x3e, 0x39, 0x3f, 0x47, 0x38, 0x3e, 0x44, 0x3b, 0x3c, - 0x3b, 0x32, 0x40, 0x3e, 0x42, 0x45, 0x3a, 0x52, 0x3a, 0x3e, 0x45, 0x40, - 0x41, 0x48, 0x3f, 0x4e, 0x3e, 0x42, 0x3d, 0x39, 0x3a, 0x33, 0x3f, 0x4b, - 0x3e, 0x38, 0x36, 0x3e, 0x31, 0x41, 0x3a, 0x40, 0x3b, 0x37, 0x3f, 0x3e, - 0x3e, 0x3f, 0x35, 0x44, 0x3d, 0x42, 0x3d, 0x44, 0x42, 0x3f, 0x3e, 0x44, - 0x3e, 0x45, 0x37, 0x3a, 0x3b, 0x42, 0x3f, 0x41, 0x3b, 0x3f, 0x41, 0x41, - 0x3e, 0x34, 0x47, 0x39, 0x46, 0x46, 0x37, 0x39, 0x3f, 0x45, 0x39, 0x39, - 0x3a, 0x40, 0x38, 0x3a, 0x31, 0x34, 0x3a, 0x41, 0x38, 0x41, 0x3a, 0x41, - 0x44, 0x37, 0x2d, 0x41, 0x43, 0x4d, 0x4b, 0x3b, 0x2c, 0x30, 0x42, 0x3b, - 0x31, 0x56, 0x43, 0x47, 0x47, 0x38, 0x50, 0x44, 0x40, 0x52, 0x5a, 0x50, - 0x44, 0x3f, 0x4b, 0x35, 0x3a, 0x36, 0x41, 0x44, 0x47, 0x4e, 0x52, 0x36, - 0x45, 0x39, 0x38, 0x3c, 0x42, 0x44, 0x40, 0x3b, 0x4b, 0x38, 0x35, 0x35, - 0x3f, 0x40, 0x4f, 0x39, 0x3d, 0x37, 0x34, 0x3e, 0x41, 0x4c, 0x40, 0x37, - 0x3d, 0x3b, 0x37, 0x37, 0x40, 0x42, 0x35, 0x39, 0x41, 0x42, 0x3d, 0x34, - 0x3c, 0x37, 0x3a, 0x3d, 0x46, 0x46, 0x46, 0x3f, 0x44, 0x3d, 0x3c, 0x40, - 0x3c, 0x3a, 0x3d, 0x3b, 0x3b, 0x41, 0x47, 0x3a, 0x43, 0x43, 0x43, 0x3b, - 0x3e, 0x3e, 0x42, 0x46, 0x36, 0x37, 0x45, 0x35, 0x3c, 0x3b, 0x31, 0x4b, - 0x3c, 0x3e, 0x3a, 0x3a, 0x42, 0x42, 0x34, 0x47, 0x37, 0x34, 0x41, 0x3d, - 0x3e, 0x39, 0x43, 0x47, 0x31, 0x3b, 0x40, 0x3b, 0x42, 0x3d, 0x44, 0x44, - 0x37, 0x39, 0x44, 0x3b, 0x40, 0x3a, 0x3d, 0x44, 0x3c, 0x40, 0x42, 0x3b, - 0x40, 0x3e, 0x32, 0x3d, 0x3c, 0x3e, 0x44, 0x3e, 0x47, 0x3d, 0x3f, 0x2e, - 0x3e, 0x3d, 0x3f, 0x3b, 0x3b, 0x43, 0x43, 0x3c, 0x3a, 0x3c, 0x3a, 0x36, - 0x38, 0x46, 0x30, 0x3e, 0x3f, 0x35, 0x3e, 0x34, 0x3c, 0x34, 0x32, 0x4a, - 0x41, 0x48, 0x48, 0x3f, 0x34, 0x37, 0x42, 0x43, 0x36, 0x59, 0x42, 0x3f, - 0x4b, 0x3d, 0x5d, 0x45, 0x3b, 0x51, 0x51, 0x4c, 0x41, 0x40, 0x4d, 0x36, - 0x3f, 0x34, 0x39, 0x3d, 0x4a, 0x4b, 0x4f, 0x33, 0x48, 0x32, 0x3c, 0x32, - 0x48, 0x4c, 0x4d, 0x3a, 0x49, 0x3a, 0x3a, 0x2e, 0x4b, 0x44, 0x4f, 0x33, - 0x3a, 0x48, 0x34, 0x43, 0x38, 0x45, 0x44, 0x35, 0x3b, 0x3f, 0x40, 0x37, - 0x35, 0x34, 0x38, 0x3e, 0x41, 0x3e, 0x3b, 0x47, 0x41, 0x47, 0x3c, 0x3c, - 0x39, 0x40, 0x3e, 0x45, 0x36, 0x41, 0x3f, 0x3f, 0x3c, 0x44, 0x3f, 0x43, - 0x3d, 0x3c, 0x49, 0x42, 0x3e, 0x3f, 0x48, 0x37, 0x43, 0x37, 0x43, 0x3d, - 0x32, 0x42, 0x44, 0x39, 0x36, 0x37, 0x40, 0x46, 0x47, 0x3d, 0x3a, 0x42, - 0x3f, 0x38, 0x37, 0x48, 0x39, 0x40, 0x3c, 0x37, 0x33, 0x38, 0x38, 0x40, - 0x41, 0x3c, 0x3f, 0x3b, 0x40, 0x3a, 0x47, 0x46, 0x3a, 0x37, 0x42, 0x47, - 0x3b, 0x3f, 0x3b, 0x40, 0x33, 0x3f, 0x3a, 0x3c, 0x38, 0x3a, 0x36, 0x38, - 0x36, 0x40, 0x48, 0x42, 0x48, 0x3c, 0x43, 0x36, 0x32, 0x3b, 0x34, 0x39, - 0x38, 0x46, 0x37, 0x3b, 0x44, 0x34, 0x36, 0x38, 0x3c, 0x43, 0x33, 0x3c, - 0x3b, 0x45, 0x38, 0x38, 0x44, 0x33, 0x36, 0x4a, 0x46, 0x4c, 0x4a, 0x34, - 0x36, 0x37, 0x43, 0x42, 0x33, 0x58, 0x43, 0x48, 0x44, 0x38, 0x5f, 0x3f, - 0x3c, 0x4d, 0x53, 0x52, 0x43, 0x47, 0x52, 0x3e, 0x3b, 0x2d, 0x3b, 0x3a, - 0x4b, 0x49, 0x53, 0x38, 0x4c, 0x2f, 0x38, 0x31, 0x42, 0x40, 0x48, 0x3f, - 0x44, 0x3c, 0x3c, 0x34, 0x46, 0x3f, 0x49, 0x3a, 0x43, 0x3d, 0x34, 0x42, - 0x36, 0x47, 0x51, 0x3c, 0x3d, 0x39, 0x39, 0x3a, 0x3b, 0x35, 0x35, 0x41, - 0x47, 0x3c, 0x3b, 0x43, 0x3f, 0x45, 0x3e, 0x40, 0x3c, 0x3f, 0x3c, 0x42, - 0x3b, 0x3e, 0x38, 0x3f, 0x3f, 0x41, 0x39, 0x39, 0x3d, 0x43, 0x4f, 0x3d, - 0x48, 0x3b, 0x44, 0x45, 0x3d, 0x3b, 0x49, 0x43, 0x44, 0x3d, 0x37, 0x3b, - 0x3c, 0x45, 0x46, 0x44, 0x35, 0x3e, 0x32, 0x35, 0x34, 0x3b, 0x40, 0x43, - 0x3e, 0x45, 0x37, 0x3d, 0x3f, 0x43, 0x36, 0x3f, 0x3f, 0x43, 0x39, 0x44, - 0x3e, 0x3e, 0x45, 0x40, 0x3e, 0x44, 0x3b, 0x3e, 0x42, 0x42, 0x3b, 0x3d, - 0x3a, 0x40, 0x39, 0x3a, 0x32, 0x36, 0x41, 0x30, 0x39, 0x46, 0x33, 0x3f, - 0x46, 0x40, 0x3c, 0x31, 0x41, 0x3a, 0x3f, 0x3f, 0x3b, 0x36, 0x3f, 0x38, - 0x36, 0x3e, 0x35, 0x35, 0x3b, 0x3d, 0x3f, 0x39, 0x46, 0x37, 0x3a, 0x47, - 0x37, 0x39, 0x2c, 0x55, 0x40, 0x4b, 0x4a, 0x39, 0x35, 0x42, 0x3d, 0x40, - 0x3a, 0x54, 0x41, 0x48, 0x51, 0x3b, 0x61, 0x3e, 0x3e, 0x4d, 0x51, 0x52, - 0x3e, 0x43, 0x52, 0x41, 0x48, 0x2d, 0x35, 0x35, 0x4b, 0x44, 0x4d, 0x3c, - 0x54, 0x33, 0x39, 0x27, 0x4a, 0x44, 0x4a, 0x41, 0x3c, 0x3a, 0x31, 0x2f, - 0x3d, 0x42, 0x48, 0x3f, 0x42, 0x40, 0x44, 0x3b, 0x40, 0x3e, 0x49, 0x3a, - 0x3c, 0x35, 0x30, 0x3e, 0x3e, 0x3d, 0x36, 0x3a, 0x3e, 0x3a, 0x4a, 0x3e, - 0x3d, 0x49, 0x40, 0x43, 0x3e, 0x45, 0x3f, 0x3c, 0x3b, 0x42, 0x3a, 0x39, - 0x3b, 0x47, 0x3f, 0x39, 0x49, 0x46, 0x3d, 0x34, 0x32, 0x44, 0x46, 0x42, - 0x47, 0x39, 0x49, 0x48, 0x3b, 0x38, 0x45, 0x45, 0x37, 0x38, 0x46, 0x46, - 0x37, 0x42, 0x35, 0x34, 0x45, 0x42, 0x35, 0x43, 0x3b, 0x3a, 0x43, 0x43, - 0x40, 0x42, 0x35, 0x3f, 0x38, 0x3f, 0x3a, 0x3a, 0x3b, 0x3f, 0x3e, 0x36, - 0x3f, 0x3c, 0x48, 0x3b, 0x3a, 0x41, 0x41, 0x35, 0x33, 0x3f, 0x3b, 0x45, - 0x48, 0x36, 0x40, 0x38, 0x47, 0x3d, 0x35, 0x40, 0x41, 0x42, 0x41, 0x37, - 0x41, 0x3e, 0x36, 0x48, 0x3e, 0x3c, 0x32, 0x39, 0x41, 0x40, 0x38, 0x3f, - 0x46, 0x43, 0x33, 0x40, 0x43, 0x43, 0x3a, 0x49, 0x3f, 0x35, 0x2c, 0x5d, - 0x43, 0x49, 0x52, 0x3b, 0x3c, 0x41, 0x40, 0x4a, 0x33, 0x50, 0x41, 0x46, - 0x52, 0x41, 0x68, 0x48, 0x44, 0x53, 0x54, 0x55, 0x42, 0x42, 0x57, 0x44, - 0x47, 0x35, 0x35, 0x3e, 0x4b, 0x44, 0x4e, 0x38, 0x55, 0x2f, 0x36, 0x2d, - 0x40, 0x48, 0x4b, 0x41, 0x48, 0x36, 0x32, 0x32, 0x44, 0x42, 0x47, 0x42, - 0x48, 0x3d, 0x3d, 0x39, 0x3e, 0x35, 0x4b, 0x39, 0x38, 0x3a, 0x39, 0x46, - 0x38, 0x3f, 0x3a, 0x42, 0x4b, 0x45, 0x3e, 0x32, 0x46, 0x43, 0x3b, 0x40, - 0x45, 0x41, 0x3e, 0x43, 0x37, 0x3d, 0x43, 0x3b, 0x46, 0x48, 0x42, 0x3b, - 0x3d, 0x48, 0x4a, 0x3c, 0x3b, 0x42, 0x40, 0x3c, 0x3a, 0x42, 0x38, 0x47, - 0x3b, 0x3b, 0x3d, 0x41, 0x3f, 0x38, 0x3f, 0x4a, 0x44, 0x3f, 0x47, 0x3a, - 0x47, 0x44, 0x43, 0x43, 0x34, 0x3d, 0x3a, 0x3c, 0x47, 0x3f, 0x3e, 0x39, - 0x42, 0x4a, 0x40, 0x36, 0x40, 0x41, 0x42, 0x3f, 0x3f, 0x43, 0x39, 0x38, - 0x3c, 0x3b, 0x4c, 0x2f, 0x41, 0x39, 0x40, 0x42, 0x3f, 0x42, 0x40, 0x36, - 0x3b, 0x45, 0x41, 0x41, 0x44, 0x45, 0x42, 0x37, 0x3d, 0x3a, 0x33, 0x3e, - 0x3b, 0x3b, 0x3c, 0x3d, 0x38, 0x49, 0x44, 0x39, 0x3f, 0x48, 0x3d, 0x41, - 0x42, 0x43, 0x44, 0x3e, 0x41, 0x3d, 0x32, 0x59, 0x45, 0x4b, 0x4b, 0x38, - 0x37, 0x3d, 0x48, 0x42, 0x3d, 0x52, 0x43, 0x46, 0x54, 0x48, 0x67, 0x4d, - 0x45, 0x4e, 0x49, 0x52, 0x45, 0x45, 0x58, 0x3b, 0x41, 0x38, 0x3f, 0x3f, - 0x49, 0x44, 0x4f, 0x48, 0x57, 0x31, 0x3c, 0x2a, 0x3e, 0x4c, 0x41, 0x40, - 0x47, 0x3f, 0x33, 0x34, 0x3f, 0x42, 0x48, 0x43, 0x4b, 0x38, 0x39, 0x3d, - 0x3f, 0x3e, 0x4b, 0x3f, 0x35, 0x36, 0x3c, 0x46, 0x3c, 0x45, 0x37, 0x3b, - 0x3c, 0x39, 0x41, 0x40, 0x41, 0x43, 0x44, 0x41, 0x45, 0x4f, 0x44, 0x43, - 0x44, 0x3c, 0x45, 0x34, 0x42, 0x45, 0x3f, 0x46, 0x3f, 0x43, 0x3d, 0x3a, - 0x39, 0x47, 0x45, 0x3d, 0x3f, 0x3b, 0x3d, 0x42, 0x38, 0x48, 0x48, 0x3b, - 0x3c, 0x3a, 0x3f, 0x41, 0x44, 0x4b, 0x44, 0x48, 0x41, 0x3c, 0x3d, 0x3c, - 0x3e, 0x3a, 0x4a, 0x3b, 0x49, 0x35, 0x3a, 0x3d, 0x41, 0x3f, 0x49, 0x39, - 0x44, 0x37, 0x3f, 0x3c, 0x42, 0x40, 0x4a, 0x46, 0x39, 0x38, 0x46, 0x37, - 0x41, 0x46, 0x41, 0x45, 0x40, 0x3b, 0x3b, 0x33, 0x3b, 0x39, 0x3c, 0x43, - 0x37, 0x3c, 0x44, 0x3d, 0x46, 0x39, 0x3c, 0x3c, 0x44, 0x48, 0x41, 0x44, - 0x41, 0x43, 0x46, 0x3b, 0x47, 0x41, 0x31, 0x41, 0x44, 0x40, 0x43, 0x42, - 0x3e, 0x43, 0x34, 0x65, 0x4f, 0x50, 0x4d, 0x3a, 0x37, 0x43, 0x4d, 0x4a, - 0x3d, 0x54, 0x40, 0x42, 0x5b, 0x3b, 0x71, 0x49, 0x44, 0x4f, 0x54, 0x56, - 0x48, 0x40, 0x52, 0x41, 0x42, 0x38, 0x3c, 0x49, 0x4a, 0x45, 0x51, 0x35, - 0x54, 0x2f, 0x35, 0x25, 0x4d, 0x3f, 0x4d, 0x43, 0x49, 0x33, 0x32, 0x3a, - 0x46, 0x48, 0x48, 0x3d, 0x43, 0x3a, 0x3c, 0x3a, 0x48, 0x40, 0x4b, 0x3b, - 0x45, 0x3b, 0x3f, 0x38, 0x37, 0x41, 0x31, 0x3b, 0x41, 0x43, 0x43, 0x37, - 0x48, 0x3f, 0x48, 0x37, 0x40, 0x4a, 0x43, 0x45, 0x3d, 0x39, 0x37, 0x37, - 0x3c, 0x3f, 0x47, 0x48, 0x43, 0x3e, 0x41, 0x3f, 0x3e, 0x38, 0x3e, 0x37, - 0x45, 0x45, 0x35, 0x44, 0x38, 0x3a, 0x49, 0x43, 0x40, 0x41, 0x40, 0x44, - 0x3c, 0x3e, 0x40, 0x38, 0x42, 0x41, 0x3c, 0x41, 0x3a, 0x3b, 0x3c, 0x3a, - 0x49, 0x3c, 0x42, 0x44, 0x3f, 0x39, 0x45, 0x32, 0x45, 0x43, 0x45, 0x39, - 0x43, 0x41, 0x4b, 0x39, 0x32, 0x3c, 0x3c, 0x36, 0x39, 0x3f, 0x46, 0x32, - 0x39, 0x35, 0x4f, 0x32, 0x3e, 0x40, 0x3d, 0x3e, 0x3a, 0x39, 0x4c, 0x38, - 0x43, 0x38, 0x49, 0x3b, 0x33, 0x39, 0x3b, 0x36, 0x36, 0x43, 0x3b, 0x3c, - 0x32, 0x3c, 0x3a, 0x45, 0x31, 0x3d, 0x37, 0x40, 0x3f, 0x3f, 0x35, 0xff, - 0x49, 0x4e, 0x4c, 0x3c, 0x36, 0x43, 0x46, 0x45, 0x41, 0x59, 0x44, 0x4a, - 0x53, 0x44, 0x71, 0x4a, 0x39, 0x4f, 0x50, 0x4b, 0x47, 0x42, 0x5a, 0x3c, - 0x45, 0x38, 0x3e, 0x42, 0x53, 0x43, 0x52, 0x3a, 0x52, 0x34, 0x31, 0x20, - 0x49, 0x4e, 0x46, 0x43, 0x4b, 0x3d, 0x2b, 0x27, 0x46, 0x46, 0x47, 0x41, - 0x42, 0x37, 0x39, 0x38, 0x45, 0x3f, 0x51, 0x3d, 0x48, 0x3f, 0x33, 0x3f, - 0x38, 0x45, 0x31, 0x38, 0x41, 0x3d, 0x47, 0x39, 0x42, 0x40, 0x4c, 0x3f, - 0x40, 0x42, 0x41, 0x41, 0x41, 0x42, 0x39, 0x35, 0x3f, 0x46, 0x45, 0x36, - 0x3f, 0x43, 0x3b, 0x39, 0x41, 0x38, 0x43, 0x37, 0x3d, 0x44, 0x3b, 0x40, - 0x36, 0x3d, 0x42, 0x41, 0x41, 0x3d, 0x38, 0x4a, 0x40, 0x4a, 0x4c, 0x38, - 0x3f, 0x40, 0x45, 0x3c, 0x3f, 0x4b, 0x43, 0x41, 0x43, 0x3e, 0x43, 0x3f, - 0x36, 0x40, 0x40, 0x39, 0x3f, 0x3a, 0x3a, 0x30, 0x41, 0x3c, 0x3c, 0x34, - 0x46, 0x38, 0x43, 0x34, 0x3a, 0x42, 0x43, 0x42, 0x40, 0x41, 0x49, 0x34, - 0x35, 0x40, 0x47, 0x3d, 0x3d, 0x3e, 0x4c, 0x33, 0x3c, 0x3b, 0x39, 0x43, - 0x3a, 0x3e, 0x3b, 0x37, 0x3f, 0x42, 0x31, 0x3d, 0x41, 0x3e, 0x32, 0x47, - 0x34, 0x41, 0x3d, 0x35, 0x39, 0x40, 0x38, 0x69, 0x4f, 0x4a, 0x49, 0x37, - 0x37, 0x44, 0x43, 0x46, 0x40, 0x58, 0x43, 0x48, 0x54, 0x46, 0x6c, 0x50, - 0x3a, 0x50, 0x50, 0x57, 0x47, 0x46, 0x5c, 0x40, 0x40, 0x39, 0x3e, 0x46, - 0x53, 0x46, 0x5c, 0x36, 0x4f, 0x32, 0x30, 0x2d, 0x4a, 0x48, 0x41, 0x45, - 0x47, 0x2f, 0x32, 0x2b, 0x43, 0x40, 0x43, 0x3c, 0x40, 0x44, 0x3e, 0x37, - 0x39, 0x3e, 0x48, 0x42, 0x45, 0x36, 0x47, 0x3f, 0x3b, 0x41, 0x35, 0x35, - 0x3b, 0x3e, 0x35, 0x43, 0x3e, 0x41, 0x3d, 0x36, 0x41, 0x3c, 0x40, 0x44, - 0x3d, 0x40, 0x35, 0x32, 0x48, 0x3e, 0x39, 0x42, 0x44, 0x3d, 0x39, 0x3b, - 0x3b, 0x45, 0x40, 0x4a, 0x3f, 0x41, 0x43, 0x39, 0x42, 0x44, 0x4c, 0x3c, - 0x3f, 0x3e, 0x3f, 0x43, 0x40, 0x42, 0x4c, 0x3b, 0x3e, 0x3d, 0x49, 0x42, - 0x40, 0x44, 0x40, 0x34, 0x36, 0x40, 0x45, 0x39, 0x42, 0x40, 0x3e, 0x44, - 0x45, 0x37, 0x3c, 0x38, 0x3e, 0x49, 0x3e, 0x3c, 0x41, 0x3d, 0x42, 0x32, - 0x40, 0x45, 0x3e, 0x36, 0x44, 0x3a, 0x4e, 0x38, 0x43, 0x38, 0x40, 0x38, - 0x49, 0x42, 0x40, 0x3d, 0x42, 0x48, 0x48, 0x3d, 0x41, 0x3a, 0x3f, 0x41, - 0x38, 0x3c, 0x44, 0x39, 0x3a, 0x32, 0x3a, 0x3e, 0x3d, 0x3b, 0x39, 0x38, - 0x3a, 0x43, 0x3a, 0x6b, 0x45, 0x50, 0x47, 0x33, 0x38, 0x48, 0x4d, 0x4f, - 0x39, 0x4b, 0x46, 0x4a, 0x4f, 0x42, 0x6f, 0x4b, 0x40, 0x55, 0x54, 0x50, - 0x42, 0x47, 0x5e, 0x46, 0x40, 0x34, 0x40, 0x47, 0x52, 0x46, 0x55, 0x3b, - 0x4f, 0x2b, 0x35, 0x33, 0x4c, 0x44, 0x44, 0x48, 0x47, 0x37, 0x35, 0x27, - 0x4a, 0x3b, 0x41, 0x40, 0x40, 0x3e, 0x36, 0x39, 0x3e, 0x3c, 0x45, 0x3f, - 0x4d, 0x41, 0x3d, 0x48, 0x47, 0x46, 0x33, 0x3d, 0x3d, 0x3e, 0x34, 0x3f, - 0x3e, 0x3a, 0x41, 0x35, 0x3b, 0x3e, 0x42, 0x3c, 0x42, 0x42, 0x40, 0x31, - 0x37, 0x40, 0x36, 0x42, 0x48, 0x39, 0x3d, 0x3c, 0x3a, 0x43, 0x39, 0x3d, - 0x47, 0x49, 0x43, 0x3d, 0x45, 0x39, 0x44, 0x37, 0x3e, 0x4d, 0x3d, 0x40, - 0x3d, 0x4c, 0x4d, 0x44, 0x3c, 0x3d, 0x46, 0x41, 0x41, 0x42, 0x40, 0x40, - 0x41, 0x3a, 0x3c, 0x3b, 0x3c, 0x44, 0x40, 0x34, 0x44, 0x38, 0x3b, 0x33, - 0x45, 0x45, 0x44, 0x3f, 0x3e, 0x3a, 0x3b, 0x3b, 0x43, 0x39, 0x3a, 0x45, - 0x3b, 0x3a, 0x4b, 0x39, 0x3d, 0x38, 0x41, 0x39, 0x42, 0x45, 0x43, 0x40, - 0x3e, 0x35, 0x44, 0x3f, 0x45, 0x41, 0x40, 0x3e, 0x43, 0x42, 0x37, 0x3a, - 0x38, 0x35, 0x3a, 0x48, 0x3e, 0x3b, 0x40, 0x38, 0x3c, 0x3c, 0x3b, 0x6a, - 0x48, 0x4d, 0x4d, 0x34, 0x38, 0x40, 0x4a, 0x45, 0x3c, 0x4f, 0x41, 0x4b, - 0x58, 0x46, 0x71, 0x49, 0x3d, 0x53, 0x44, 0x52, 0x42, 0x3e, 0x57, 0x4c, - 0x4c, 0x38, 0x40, 0x3b, 0x5c, 0x4c, 0x52, 0x3e, 0x4c, 0x2d, 0x32, 0x37, - 0x49, 0x3f, 0x41, 0x47, 0x4a, 0x3b, 0x2f, 0x26, 0x45, 0x40, 0x47, 0x42, - 0x3d, 0x39, 0x2d, 0x2c, 0x3f, 0x45, 0x46, 0x44, 0x48, 0x43, 0x42, 0x48, - 0x40, 0x41, 0x3b, 0x3b, 0x41, 0x3b, 0x39, 0x40, 0x3b, 0x47, 0x3f, 0x38, - 0x3f, 0x49, 0x3b, 0x35, 0x40, 0x45, 0x38, 0x35, 0x36, 0x34, 0x3e, 0x3d, - 0x46, 0x3e, 0x33, 0x38, 0x43, 0x48, 0x3f, 0x45, 0x31, 0x44, 0x38, 0x35, - 0x3c, 0x41, 0x4b, 0x44, 0x3d, 0x43, 0x38, 0x48, 0x3c, 0x39, 0x4a, 0x42, - 0x3d, 0x43, 0x3f, 0x49, 0x3e, 0x47, 0x49, 0x41, 0x3b, 0x3c, 0x47, 0x3a, - 0x3d, 0x40, 0x4a, 0x38, 0x3d, 0x3b, 0x47, 0x3a, 0x36, 0x47, 0x42, 0x46, - 0x3c, 0x3d, 0x45, 0x3b, 0x48, 0x3f, 0x38, 0x36, 0x39, 0x46, 0x43, 0x3a, - 0x41, 0x3d, 0x39, 0x39, 0x46, 0x37, 0x3f, 0x3f, 0x3a, 0x46, 0x3f, 0x39, - 0x49, 0x44, 0x42, 0x3a, 0x3a, 0x43, 0x3e, 0x42, 0x3d, 0x3d, 0x43, 0x40, - 0x43, 0x3c, 0x3f, 0x43, 0x40, 0x42, 0x3b, 0x57, 0x4a, 0x4f, 0x4a, 0x2d, - 0x3b, 0x48, 0x45, 0x42, 0x34, 0x4c, 0x3e, 0x4f, 0x4d, 0x40, 0x6c, 0x4b, - 0x3b, 0x4d, 0x4c, 0x57, 0x49, 0x3d, 0x5d, 0x44, 0x43, 0x29, 0x42, 0x3f, - 0x5b, 0x47, 0x4f, 0x3e, 0x54, 0x2e, 0x34, 0x34, 0x4b, 0x47, 0x46, 0x46, - 0x4b, 0x34, 0x36, 0x28, 0x3e, 0x3f, 0x42, 0x40, 0x3b, 0x38, 0x39, 0x42, - 0x49, 0x3d, 0x49, 0x47, 0x47, 0x3b, 0x43, 0x34, 0x39, 0x36, 0x42, 0x3d, - 0x37, 0x40, 0x37, 0x38, 0x46, 0x42, 0x49, 0x37, 0x44, 0x3f, 0x38, 0x3e, - 0x36, 0x32, 0x33, 0x38, 0x40, 0x46, 0x42, 0x34, 0x41, 0x42, 0x3e, 0x38, - 0x44, 0x3e, 0x3f, 0x43, 0x3f, 0x43, 0x35, 0x3f, 0x4d, 0x3b, 0x43, 0x39, - 0x40, 0x47, 0x3f, 0x4a, 0x3a, 0x3f, 0x45, 0x45, 0x48, 0x42, 0x3b, 0x47, - 0x42, 0x4b, 0x47, 0x3e, 0x3c, 0x42, 0x46, 0x39, 0x41, 0x3f, 0x48, 0x33, - 0x45, 0x34, 0x3d, 0x30, 0x40, 0x4c, 0x40, 0x40, 0x39, 0x37, 0x40, 0x33, - 0x49, 0x42, 0x45, 0x38, 0x3c, 0x43, 0x45, 0x35, 0x37, 0x33, 0x34, 0x3b, - 0x3b, 0x38, 0x39, 0x41, 0x42, 0x40, 0x3e, 0x3e, 0x41, 0x33, 0x3a, 0x36, - 0x40, 0x3a, 0x3c, 0x45, 0x43, 0x3c, 0x40, 0x41, 0x49, 0x47, 0x35, 0x34, - 0x3a, 0x3d, 0x3a, 0x68, 0x4f, 0x48, 0x43, 0x36, 0x37, 0x3e, 0x45, 0x49, - 0x3a, 0x4d, 0x41, 0x3d, 0x46, 0x45, 0x65, 0x46, 0x38, 0x4d, 0x4a, 0x53, - 0x43, 0x41, 0x5d, 0x47, 0x41, 0x34, 0x39, 0x43, 0x4e, 0x48, 0x50, 0x38, - 0x53, 0x32, 0x30, 0x2e, 0x49, 0x4c, 0x4d, 0x3f, 0x46, 0x38, 0x34, 0x2b, - 0x44, 0x44, 0x41, 0x41, 0x36, 0x40, 0x3f, 0x32, 0x46, 0x38, 0x50, 0x45, - 0x3f, 0x3d, 0x3b, 0x36, 0x3b, 0x43, 0x3a, 0x34, 0x36, 0x3f, 0x39, 0x35, - 0x3c, 0x40, 0x40, 0x37, 0x3c, 0x39, 0x3d, 0x36, 0x48, 0x3d, 0x43, 0x34, - 0x3b, 0x46, 0x43, 0x41, 0x33, 0x3e, 0x44, 0x3d, 0x44, 0x44, 0x4c, 0x3c, - 0x37, 0x49, 0x42, 0x35, 0x45, 0x3a, 0x3c, 0x41, 0x3a, 0x45, 0x46, 0x41, - 0x3c, 0x48, 0x46, 0x36, 0x36, 0x42, 0x3b, 0x46, 0x42, 0x45, 0x44, 0x47, - 0x3f, 0x44, 0x3a, 0x35, 0x37, 0x46, 0x40, 0x38, 0x40, 0x3d, 0x36, 0x2c, - 0x34, 0x47, 0x40, 0x38, 0x3f, 0x3f, 0x44, 0x2d, 0x3b, 0x3d, 0x3e, 0x44, - 0x3c, 0x40, 0x3e, 0x33, 0x3c, 0x3a, 0x49, 0x40, 0x42, 0x42, 0x3a, 0x3b, - 0x33, 0x3d, 0x3c, 0x43, 0x3e, 0x3d, 0x3a, 0x3a, 0x48, 0x3e, 0x3c, 0x39, - 0x3f, 0x44, 0x37, 0x40, 0x3f, 0x3c, 0x3e, 0x3d, 0x38, 0x42, 0x34, 0x62, - 0x51, 0x47, 0x44, 0x3f, 0x32, 0x3c, 0x3f, 0x46, 0x3d, 0x46, 0x3e, 0x45, - 0x4a, 0x3e, 0x5d, 0x43, 0x45, 0x49, 0x4a, 0x55, 0x41, 0x3c, 0x5a, 0x44, - 0x43, 0x3b, 0x3c, 0x3a, 0x4b, 0x4e, 0x4d, 0x42, 0x49, 0x30, 0x3b, 0x38, - 0x42, 0x44, 0x51, 0x40, 0x48, 0x33, 0x3f, 0x2b, 0x3c, 0x41, 0x3c, 0x45, - 0x35, 0x39, 0x42, 0x37, 0x40, 0x46, 0x46, 0x3f, 0x41, 0x45, 0x42, 0x3d, - 0x43, 0x38, 0x3e, 0x38, 0x3c, 0x39, 0x40, 0x38, 0x37, 0x36, 0x3d, 0x3d, - 0x38, 0x47, 0x45, 0x3b, 0x45, 0x44, 0x42, 0x2e, 0x37, 0x40, 0x42, 0x42, - 0x3c, 0x36, 0x3b, 0x39, 0x44, 0x4d, 0x42, 0x3f, 0x3a, 0x3e, 0x45, 0x34, - 0x3c, 0x43, 0x47, 0x43, 0x3f, 0x48, 0x3b, 0x44, 0x3d, 0x44, 0x43, 0x3e, - 0x40, 0x4a, 0x31, 0x42, 0x42, 0x43, 0x48, 0x45, 0x3a, 0x42, 0x36, 0x2f, - 0x3c, 0x3e, 0x3b, 0x3b, 0x44, 0x3f, 0x3a, 0x2c, 0x47, 0x3f, 0x4a, 0x40, - 0x40, 0x40, 0x3c, 0x2a, 0x3e, 0x44, 0x40, 0x43, 0x3a, 0x42, 0x39, 0x34, - 0x49, 0x3e, 0x36, 0x42, 0x3f, 0x42, 0x33, 0x3b, 0x3c, 0x45, 0x39, 0x3f, - 0x3e, 0x3f, 0x41, 0x3d, 0x32, 0x3b, 0x31, 0x40, 0x3f, 0x44, 0x3c, 0x3f, - 0x40, 0x46, 0x45, 0x36, 0x36, 0x42, 0x30, 0x57, 0x47, 0x44, 0x48, 0x3f, - 0x35, 0x37, 0x3f, 0x3f, 0x38, 0x4a, 0x41, 0x46, 0x50, 0x3d, 0x5b, 0x41, - 0x3e, 0x3c, 0x4a, 0x54, 0x45, 0x41, 0x5b, 0x46, 0x3d, 0x3b, 0x43, 0x33, - 0x45, 0x4e, 0x43, 0x3b, 0x44, 0x37, 0x37, 0x32, 0x4c, 0x3d, 0x4c, 0x3f, - 0x49, 0x3b, 0x37, 0x3a, 0x33, 0x43, 0x3f, 0x40, 0x44, 0x36, 0x3b, 0x44, - 0x45, 0x40, 0x3c, 0x3c, 0x41, 0x44, 0x3b, 0x3d, 0x33, 0x37, 0x3c, 0x35, - 0x3d, 0x3f, 0x39, 0x38, 0x33, 0x43, 0x3e, 0x39, 0x3b, 0x3e, 0x41, 0x35, - 0x40, 0x46, 0x43, 0x35, 0x41, 0x3d, 0x32, 0x39, 0x3c, 0x40, 0x3e, 0x3f, - 0x42, 0x38, 0x3b, 0x45, 0x3a, 0x3d, 0x40, 0x36, 0x3a, 0x40, 0x46, 0x44, - 0x48, 0x45, 0x3f, 0x3a, 0x45, 0x45, 0x3c, 0x3b, 0x40, 0x4c, 0x39, 0x3a, - 0x38, 0x39, 0x46, 0x3a, 0x3e, 0x4b, 0x34, 0x39, 0x3d, 0x3f, 0x40, 0x39, - 0x45, 0x31, 0x45, 0x29, 0x3f, 0x38, 0x3a, 0x3f, 0x38, 0x3b, 0x36, 0x2d, - 0x43, 0x3d, 0x45, 0x3c, 0x46, 0x3f, 0x40, 0x3c, 0x3a, 0x3e, 0x3d, 0x38, - 0x3f, 0x3c, 0x3f, 0x42, 0x35, 0x3f, 0x3a, 0x43, 0x3d, 0x43, 0x3d, 0x33, - 0x3d, 0x48, 0x42, 0x3d, 0x45, 0x46, 0x3d, 0x35, 0x32, 0x44, 0x42, 0x37, - 0x3d, 0x40, 0x3c, 0x47, 0x4a, 0x45, 0x47, 0x2f, 0x33, 0x36, 0x3f, 0x42, - 0x38, 0x43, 0x3e, 0x3a, 0x41, 0x3f, 0x5f, 0x3f, 0x48, 0x3a, 0x44, 0x47, - 0x41, 0x3e, 0x57, 0x42, 0x41, 0x33, 0x34, 0x39, 0x42, 0x44, 0x42, 0x3c, - 0x49, 0x34, 0x37, 0x33, 0x47, 0x38, 0x43, 0x3d, 0x43, 0x3e, 0x3e, 0x36, - 0x41, 0x41, 0x37, 0x40, 0x39, 0x3e, 0x3b, 0x3b, 0x3e, 0x41, 0x3d, 0x3b, - 0x43, 0x3e, 0x39, 0x43, 0x2f, 0x3e, 0x33, 0x40, 0x45, 0x47, 0x30, 0x46, - 0x3f, 0x3f, 0x37, 0x42, 0x3d, 0x42, 0x43, 0x37, 0x38, 0x3c, 0x35, 0x34, - 0x41, 0x43, 0x3e, 0x3e, 0x3f, 0x49, 0x35, 0x35, 0x38, 0x36, 0x3a, 0x43, - 0x38, 0x46, 0x48, 0x36, 0x3f, 0x39, 0x3b, 0x3e, 0x48, 0x47, 0x41, 0x34, - 0x3b, 0x3c, 0x37, 0x3e, 0x40, 0x41, 0x3b, 0x3d, 0x43, 0x42, 0x3a, 0x39, - 0x3b, 0x43, 0x38, 0x2b, 0x43, 0x41, 0x48, 0x35, 0x44, 0x44, 0x3e, 0x2c, - 0x46, 0x40, 0x3e, 0x41, 0x38, 0x34, 0x35, 0x37, 0x34, 0x3f, 0x3d, 0x46, - 0x33, 0x3c, 0x3c, 0x2e, 0x3b, 0x45, 0x3d, 0x3e, 0x3a, 0x42, 0x3c, 0x36, - 0x3a, 0x42, 0x39, 0x43, 0x35, 0x39, 0x40, 0x44, 0x47, 0x41, 0x44, 0x3d, - 0x41, 0x3e, 0x38, 0x39, 0x45, 0x3a, 0x35, 0x43, 0x3f, 0x44, 0x41, 0x49, - 0x47, 0x3f, 0x44, 0x40, 0x38, 0x43, 0x40, 0x3e, 0x39, 0x42, 0x32, 0x3b, - 0x42, 0x47, 0x57, 0x37, 0x36, 0x38, 0x43, 0x49, 0x3b, 0x34, 0x54, 0x42, - 0x3d, 0x3f, 0x3e, 0x3b, 0x38, 0x41, 0x43, 0x3a, 0x44, 0x39, 0x34, 0x2c, - 0x38, 0x43, 0x4b, 0x3f, 0x40, 0x3e, 0x32, 0x33, 0x3d, 0x44, 0x45, 0x44, - 0x3e, 0x35, 0x37, 0x39, 0x40, 0x3e, 0x40, 0x3c, 0x34, 0x43, 0x37, 0x40, - 0x39, 0x3e, 0x3d, 0x43, 0x3a, 0x44, 0x43, 0x44, 0x3d, 0x3b, 0x45, 0x3b, - 0x3a, 0x3a, 0x3f, 0x37, 0x43, 0x3b, 0x33, 0x35, 0x40, 0x47, 0x3e, 0x3c, - 0x39, 0x3c, 0x34, 0x29, 0x3c, 0x3e, 0x46, 0x3e, 0x3c, 0x38, 0x3f, 0x2d, - 0x3d, 0x3d, 0x3f, 0x3f, 0x3d, 0x45, 0x3b, 0x32, 0x39, 0x3f, 0x41, 0x38, - 0x36, 0x3e, 0x3a, 0x35, 0x40, 0x3f, 0x3b, 0x32, 0x3c, 0x39, 0x3e, 0x35, - 0x3e, 0x45, 0x34, 0x38, 0x44, 0x39, 0x3f, 0x31, 0x34, 0x39, 0x3f, 0x38, - 0x44, 0x42, 0x3f, 0x3b, 0x39, 0x3d, 0x39, 0x3b, 0x44, 0x46, 0x38, 0x3d, - 0x45, 0x37, 0x40, 0x3a, 0x3a, 0x39, 0x35, 0x3c, 0x39, 0x40, 0x47, 0x3e, - 0x38, 0x42, 0x41, 0x3b, 0x48, 0x3f, 0x3a, 0x3e, 0x3d, 0x3f, 0x32, 0x3b, - 0x3f, 0x3d, 0x3e, 0x44, 0x43, 0x41, 0x44, 0x47, 0x48, 0x41, 0x41, 0x36, - 0x3a, 0x33, 0x3c, 0x3c, 0x37, 0x3e, 0x40, 0x34, 0x3f, 0x42, 0x53, 0x40, - 0x3f, 0x35, 0x3e, 0x46, 0x3a, 0x3e, 0x4b, 0x41, 0x46, 0x32, 0x39, 0x36, - 0x3b, 0x4f, 0x36, 0x3c, 0x40, 0x3a, 0x40, 0x40, 0x47, 0x3e, 0x49, 0x37, - 0x3f, 0x31, 0x3e, 0x40, 0x3b, 0x3f, 0x43, 0x44, 0x3a, 0x3d, 0x31, 0x41, - 0x41, 0x33, 0x43, 0x40, 0x3c, 0x3a, 0x41, 0x40, 0x37, 0x3f, 0x34, 0x3e, - 0x44, 0x42, 0x3d, 0x3f, 0x3f, 0x34, 0x36, 0x34, 0x31, 0x41, 0x32, 0x39, - 0x3e, 0x3d, 0x42, 0x35, 0x3e, 0x3a, 0x41, 0x47, 0x3d, 0x42, 0x33, 0x32, - 0x43, 0x42, 0x36, 0x41, 0x3e, 0x39, 0x46, 0x39, 0x35, 0x3d, 0x3d, 0x40, - 0x38, 0x44, 0x3d, 0x31, 0x44, 0x39, 0x3a, 0x45, 0x42, 0x41, 0x3d, 0x36, - 0x3f, 0x3c, 0x39, 0x3d, 0x32, 0x39, 0x42, 0x34, 0x3f, 0x38, 0x44, 0x3c, - 0x43, 0x45, 0x41, 0x2d, 0x44, 0x42, 0x3d, 0x3f, 0x44, 0x38, 0x3d, 0x35, - 0x3a, 0x48, 0x40, 0x3b, 0x3d, 0x36, 0x3b, 0x40, 0x3f, 0x3a, 0x3a, 0x3f, - 0x3c, 0x33, 0x39, 0x3c, 0x3c, 0x38, 0x47, 0x36, 0x3d, 0x41, 0x46, 0x41, - 0x34, 0x46, 0x48, 0x46, 0x3d, 0x3c, 0x40, 0x43, 0x3d, 0x41, 0x37, 0x3e, - 0x39, 0x47, 0x3f, 0x39, 0x46, 0x43, 0x3f, 0x41, 0x45, 0x37, 0x40, 0x3a, - 0x3d, 0x44, 0x3f, 0x3b, 0x3b, 0x40, 0x4f, 0x3d, 0x3d, 0x41, 0x3c, 0x43, - 0x3e, 0x46, 0x4e, 0x40, 0x3f, 0x34, 0x48, 0x29, 0x45, 0x44, 0x46, 0x41, - 0x45, 0x32, 0x3e, 0x38, 0x39, 0x3a, 0x3e, 0x3e, 0x4c, 0x34, 0x3c, 0x40, - 0x4a, 0x44, 0x3d, 0x46, 0x3b, 0x3e, 0x42, 0x42, 0x3a, 0x41, 0x43, 0x41, - 0x39, 0x3f, 0x3e, 0x3c, 0x36, 0x48, 0x3f, 0x3e, 0x3e, 0x37, 0x3f, 0x3f, - 0x3b, 0x40, 0x3e, 0x35, 0x32, 0x35, 0x3f, 0x33, 0x3f, 0x38, 0x43, 0x37, - 0x49, 0x38, 0x37, 0x3c, 0x3c, 0x40, 0x40, 0x3a, 0x3a, 0x46, 0x37, 0x34, - 0x34, 0x3b, 0x3d, 0x2f, 0x3a, 0x38, 0x3d, 0x46, 0x3d, 0x3b, 0x3d, 0x38, - 0x35, 0x37, 0x44, 0x3c, 0x3d, 0x3e, 0x40, 0x3a, 0x40, 0x33, 0x3e, 0x38, - 0x40, 0x3e, 0x45, 0x37, 0x3f, 0x3b, 0x3c, 0x40, 0x3b, 0x3c, 0x3b, 0x33, - 0x41, 0x3f, 0x3b, 0x42, 0x31, 0x3b, 0x3a, 0x39, 0x3d, 0x41, 0x39, 0x40, - 0x43, 0x45, 0x39, 0x3b, 0x3a, 0x42, 0x43, 0x3d, 0x3f, 0x40, 0x47, 0x39, - 0x37, 0x3f, 0x47, 0x3f, 0x45, 0x41, 0x39, 0x3a, 0x41, 0x38, 0x3c, 0x3c, - 0x39, 0x40, 0x39, 0x3b, 0x3b, 0x3e, 0x38, 0x3b, 0x37, 0x48, 0x41, 0x3f, - 0x3e, 0x37, 0x3d, 0x44, 0x3c, 0x3e, 0x40, 0x39, 0x41, 0x42, 0x3d, 0x45, - 0x3b, 0x3e, 0x4c, 0x3b, 0x3a, 0x3a, 0x3e, 0x47, 0x3c, 0x3f, 0x48, 0x3f, - 0x46, 0x3f, 0x39, 0x25, 0x44, 0x3a, 0x3b, 0x40, 0x41, 0x39, 0x39, 0x47, - 0x3b, 0x32, 0x49, 0x42, 0x41, 0x3a, 0x43, 0x41, 0x3e, 0x35, 0x37, 0x3d, - 0x49, 0x40, 0x45, 0x3b, 0x3c, 0x38, 0x48, 0x3c, 0x3c, 0x35, 0x3f, 0x41, - 0x41, 0x4c, 0x36, 0x39, 0x37, 0x3d, 0x3b, 0x3e, 0x44, 0x32, 0x3d, 0x3f, - 0x3a, 0x3b, 0x3a, 0x47, 0x38, 0x42, 0x36, 0x34, 0x43, 0x3f, 0x3e, 0x40, - 0x34, 0x31, 0x36, 0x33, 0x42, 0x37, 0x41, 0x41, 0x40, 0x3d, 0x3d, 0x37, - 0x43, 0x3a, 0x3e, 0x44, 0x43, 0x3c, 0x35, 0x38, 0x38, 0x3c, 0x43, 0x36, - 0x3a, 0x38, 0x40, 0x3f, 0x3d, 0x3e, 0x37, 0x3b, 0x41, 0x3a, 0x3b, 0x3d, - 0x3c, 0x41, 0x3c, 0x41, 0x47, 0x3f, 0x3f, 0x3b, 0x3d, 0x3f, 0x3b, 0x45, - 0x38, 0x38, 0x40, 0x38, 0x46, 0x42, 0x39, 0x3d, 0x3d, 0x3b, 0x42, 0x36, - 0x42, 0x41, 0x3e, 0x3e, 0x36, 0x3f, 0x37, 0x3f, 0x36, 0x48, 0x3b, 0x39, - 0x3d, 0x3f, 0x43, 0x3e, 0x3c, 0x40, 0x48, 0x46, 0x43, 0x36, 0x42, 0x39, - 0x46, 0x3c, 0x37, 0x38, 0x49, 0x37, 0x36, 0x39, 0x3e, 0x42, 0x48, 0x3a, - 0x3c, 0x3e, 0x42, 0x30, 0x3e, 0x34, 0x39, 0x3b, 0x46, 0x61, 0x46, 0x1e, - 0x4c, 0x3b, 0x40, 0x2d, 0x3c, 0x42, 0x32, 0x30, 0x49, 0x3e, 0x39, 0x34, - 0x30, 0x40, 0x31, 0x38, 0x40, 0x3d, 0x3c, 0x35, 0x3a, 0x36, 0x40, 0x3b, - 0x41, 0x40, 0x3b, 0x39, 0x37, 0x37, 0x3f, 0x3b, 0x3c, 0x3a, 0x40, 0x3a, - 0x36, 0x3c, 0x42, 0x39, 0x3e, 0x36, 0x40, 0x42, 0x39, 0x40, 0x3b, 0x34, - 0x37, 0x33, 0x36, 0x3f, 0x43, 0x33, 0x33, 0x27, 0x3d, 0x46, 0x40, 0x31, - 0x38, 0x3e, 0x41, 0x20, 0x3f, 0x39, 0x42, 0x35, 0x35, 0x45, 0x40, 0x1e, - 0x32, 0x35, 0x32, 0x3c, 0x35, 0x44, 0x46, 0x29, 0x3a, 0x3d, 0x37, 0x42, - 0x3b, 0x45, 0x3a, 0x26, 0x38, 0x40, 0x30, 0x37, 0x41, 0x40, 0x39, 0x2b, - 0x49, 0x3f, 0x43, 0x43, 0x40, 0x3a, 0x38, 0x29, 0x43, 0x3a, 0x37, 0x40, - 0x3f, 0x35, 0x3a, 0x28, 0x36, 0x3e, 0x3f, 0x43, 0x3c, 0x39, 0x42, 0x2c, - 0x38, 0x42, 0x38, 0x3d, 0x42, 0x38, 0x35, 0x2d, 0x34, 0x38, 0x3d, 0x43, - 0x46, 0x3e, 0x3c, 0x27, 0x3e, 0x40, 0x46, 0x39, 0x35, 0x3d, 0x42, 0x35, - 0x42, 0x36, 0x40, 0x3e, 0x3a, 0x3e, 0x3c, 0x37, 0x3a, 0x3c, 0x48, 0x48, - 0x48, 0x37, 0x3d, 0x38, 0x4b, 0x40, 0x43, 0x3b, 0x41, 0x46, 0x3c, 0x34, - 0x46, 0x3c, 0x3c, 0x3c, 0x4b, 0x64, 0x4a, 0x22, 0x52, 0x41, 0x42, 0x3b, - 0x42, 0x4a, 0x34, 0x37, 0x4b, 0x44, 0x3b, 0x4a, 0x38, 0x3f, 0x38, 0x3a, - 0x40, 0x41, 0x42, 0x3c, 0x33, 0x3e, 0x3c, 0x42, 0x2c, 0x4e, 0x47, 0x3f, - 0x38, 0x33, 0x39, 0x3f, 0x3b, 0x45, 0x37, 0x3a, 0x42, 0x42, 0x44, 0x3f, - 0x3c, 0x3c, 0x3e, 0x3d, 0x3c, 0x3c, 0x40, 0x2c, 0x3c, 0x3d, 0x42, 0x39, - 0x3a, 0x37, 0x43, 0x2a, 0x3d, 0x40, 0x41, 0x41, 0x46, 0x46, 0x42, 0x28, - 0x39, 0x3c, 0x37, 0x44, 0x46, 0x41, 0x47, 0x2b, 0x44, 0x33, 0x39, 0x3f, - 0x3f, 0x43, 0x3d, 0x23, 0x3a, 0x43, 0x41, 0x3b, 0x41, 0x42, 0x33, 0x1f, - 0x43, 0x3e, 0x3d, 0x40, 0x37, 0x33, 0x42, 0x28, 0x3b, 0x38, 0x37, 0x3c, - 0x34, 0x40, 0x44, 0x2a, 0x3c, 0x3a, 0x41, 0x37, 0x45, 0x3f, 0x3e, 0x26, - 0x41, 0x40, 0x35, 0x3d, 0x45, 0x3e, 0x3d, 0x29, 0x3c, 0x39, 0x3f, 0x3c, - 0x3d, 0x39, 0x38, 0x2d, 0x39, 0x38, 0x38, 0x44, 0x3c, 0x3e, 0x38, 0x26, - 0x40, 0x36, 0x39, 0x38, 0x3f, 0x32, 0x39, 0x35, 0x3d, 0x3e, 0x35, 0x3a, - 0x3f, 0x3f, 0x31, 0x35, 0x34, 0x45, 0x3e, 0x43, 0x48, 0x3b, 0x37, 0x39, - 0x4d, 0x46, 0x54, 0x40, 0x41, 0x4e, 0x3d, 0x38, 0x4d, 0x38, 0x3a, 0x3b, - 0x49, 0x5a, 0x4a, 0x1e, 0x5e, 0x39, 0x38, 0x37, 0x3a, 0x51, 0x3a, 0x3c, - 0x50, 0x3f, 0x40, 0x42, 0x33, 0x3b, 0x2e, 0x4a, 0x3f, 0x4a, 0x3b, 0x43, - 0x36, 0x3e, 0x3d, 0x42, 0x39, 0x46, 0x4b, 0x3c, 0x3b, 0x3b, 0x35, 0x3e, - 0x3d, 0x4b, 0x3f, 0x41, 0x3f, 0x3b, 0x42, 0x42, 0x38, 0x3a, 0x41, 0x3d, - 0x36, 0x41, 0x37, 0x2f, 0x38, 0x37, 0x3f, 0x34, 0x35, 0x35, 0x45, 0x30, - 0x31, 0x42, 0x31, 0x3a, 0x3a, 0x3e, 0x3d, 0x23, 0x3f, 0x43, 0x3b, 0x41, - 0x35, 0x3b, 0x40, 0x25, 0x45, 0x3e, 0x42, 0x3b, 0x31, 0x40, 0x36, 0x28, - 0x43, 0x42, 0x30, 0x42, 0x32, 0x32, 0x36, 0x2c, 0x35, 0x3a, 0x3d, 0x3a, - 0x3c, 0x36, 0x3e, 0x30, 0x41, 0x42, 0x38, 0x41, 0x41, 0x3e, 0x3c, 0x23, - 0x37, 0x40, 0x3c, 0x3e, 0x3e, 0x3a, 0x37, 0x2b, 0x36, 0x40, 0x41, 0x42, - 0x3e, 0x38, 0x44, 0x22, 0x46, 0x38, 0x33, 0x3b, 0x3a, 0x3a, 0x3a, 0x24, - 0x36, 0x3b, 0x38, 0x44, 0x34, 0x38, 0x40, 0x28, 0x38, 0x3d, 0x36, 0x44, - 0x31, 0x3e, 0x37, 0x37, 0x36, 0x3f, 0x47, 0x38, 0x3b, 0x3e, 0x2c, 0x4c, - 0x36, 0x3c, 0x3b, 0x41, 0x4c, 0x3d, 0x3d, 0x40, 0x49, 0x44, 0x52, 0x3f, - 0x3b, 0x4d, 0x3c, 0x3a, 0x4f, 0x3b, 0x36, 0x3b, 0x4a, 0x5f, 0x4e, 0x1f, - 0x57, 0x3c, 0x3d, 0x3d, 0x46, 0x59, 0x42, 0x45, 0x52, 0x3d, 0x3a, 0x41, - 0x31, 0x39, 0x39, 0x4f, 0x43, 0x4e, 0x3e, 0x37, 0x3a, 0x37, 0x33, 0x47, - 0x32, 0x45, 0x47, 0x43, 0x31, 0x33, 0x38, 0x43, 0x3e, 0x47, 0x3d, 0x32, - 0x3b, 0x39, 0x3c, 0x42, 0x3d, 0x47, 0x42, 0x40, 0x3d, 0x3f, 0x3c, 0x34, - 0x3b, 0x3e, 0x42, 0x3d, 0x43, 0x35, 0x42, 0x2c, 0x35, 0x3d, 0x3c, 0x3d, - 0x3a, 0x3c, 0x46, 0x25, 0x43, 0x35, 0x3d, 0x39, 0x3a, 0x3c, 0x40, 0x2b, - 0x33, 0x40, 0x3d, 0x46, 0x45, 0x37, 0x3c, 0x36, 0x43, 0x37, 0x3e, 0x3a, - 0x3c, 0x47, 0x3f, 0x38, 0x36, 0x3e, 0x3a, 0x42, 0x3c, 0x42, 0x33, 0x39, - 0x3c, 0x3a, 0x3c, 0x40, 0x48, 0x3b, 0x40, 0x32, 0x37, 0x47, 0x34, 0x38, - 0x33, 0x3d, 0x49, 0x2d, 0x36, 0x42, 0x3d, 0x3e, 0x47, 0x3c, 0x42, 0x2c, - 0x3b, 0x31, 0x3f, 0x3c, 0x3d, 0x3c, 0x3f, 0x2b, 0x41, 0x35, 0x33, 0x43, - 0x47, 0x39, 0x34, 0x2a, 0x3a, 0x3a, 0x40, 0x3d, 0x44, 0x3c, 0x39, 0x34, - 0x43, 0x40, 0x33, 0x3a, 0x3b, 0x42, 0x38, 0x3b, 0x34, 0x35, 0x40, 0x43, - 0x4b, 0x41, 0x3d, 0x38, 0x49, 0x44, 0x4d, 0x37, 0x3a, 0x4b, 0x40, 0x39, - 0x4e, 0x3b, 0x30, 0x38, 0x47, 0x5d, 0x50, 0x1f, 0x54, 0x35, 0x3a, 0x39, - 0x40, 0x4c, 0x46, 0x42, 0x52, 0x39, 0x39, 0x45, 0x41, 0x3c, 0x30, 0x5b, - 0x43, 0x4d, 0x4a, 0x3e, 0x31, 0x39, 0x41, 0x4c, 0x36, 0x44, 0x4c, 0x39, - 0x32, 0x41, 0x47, 0x3e, 0x34, 0x49, 0x45, 0x3b, 0x34, 0x3a, 0x3b, 0x47, - 0x43, 0x3e, 0x43, 0x32, 0x40, 0x3e, 0x3e, 0x38, 0x37, 0x3e, 0x37, 0x3a, - 0x3a, 0x40, 0x48, 0x2f, 0x3e, 0x3e, 0x46, 0x3a, 0x3e, 0x35, 0x49, 0x30, - 0x3a, 0x41, 0x3e, 0x39, 0x34, 0x45, 0x3d, 0x34, 0x48, 0x43, 0x43, 0x42, - 0x33, 0x39, 0x3b, 0x3f, 0x30, 0x46, 0x41, 0x39, 0x48, 0x3a, 0x3c, 0x3e, - 0x3f, 0x36, 0x40, 0x3d, 0x43, 0x40, 0x3e, 0x39, 0x44, 0x40, 0x44, 0x3b, - 0x43, 0x42, 0x39, 0x38, 0x3a, 0x3f, 0x3b, 0x3f, 0x38, 0x3d, 0x34, 0x30, - 0x34, 0x3d, 0x3f, 0x42, 0x44, 0x3e, 0x34, 0x32, 0x37, 0x46, 0x44, 0x38, - 0x3c, 0x45, 0x39, 0x2b, 0x41, 0x3c, 0x40, 0x40, 0x3a, 0x3a, 0x3c, 0x32, - 0x45, 0x42, 0x3d, 0x46, 0x38, 0x3b, 0x34, 0x35, 0x38, 0x43, 0x3d, 0x34, - 0x42, 0x3b, 0x38, 0x3d, 0x37, 0x43, 0x3f, 0x39, 0x4e, 0x39, 0x40, 0x3f, - 0x4d, 0x43, 0x49, 0x3f, 0x36, 0x41, 0x44, 0x39, 0x48, 0x3a, 0x35, 0x39, - 0x48, 0x59, 0x4e, 0x25, 0x58, 0x39, 0x42, 0x35, 0x43, 0x4e, 0x42, 0x3f, - 0x4a, 0x43, 0x3b, 0x3f, 0x3b, 0x37, 0x2b, 0x5a, 0x3d, 0x44, 0x3b, 0x40, - 0x31, 0x38, 0x37, 0x44, 0x32, 0x3e, 0x41, 0x3d, 0x2c, 0x42, 0x42, 0x3c, - 0x37, 0x45, 0x41, 0x41, 0x3d, 0x39, 0x41, 0x40, 0x3a, 0x46, 0x41, 0x40, - 0x40, 0x3d, 0x38, 0x31, 0x37, 0x3f, 0x42, 0x38, 0x3f, 0x3c, 0x48, 0x30, - 0x3e, 0x39, 0x3f, 0x3d, 0x3d, 0x44, 0x52, 0x35, 0x3b, 0x32, 0x42, 0x32, - 0x3a, 0x43, 0x39, 0x3b, 0x31, 0x43, 0x36, 0x3c, 0x3c, 0x3c, 0x41, 0x45, - 0x42, 0x49, 0x41, 0x3b, 0x42, 0x3e, 0x41, 0x44, 0x36, 0x41, 0x3f, 0x3c, - 0x3e, 0x47, 0x45, 0x41, 0x38, 0x41, 0x3f, 0x43, 0x35, 0x32, 0x41, 0x39, - 0x36, 0x47, 0x35, 0x42, 0x44, 0x3b, 0x3f, 0x34, 0x48, 0x41, 0x43, 0x42, - 0x36, 0x3e, 0x3c, 0x3d, 0x3d, 0x3b, 0x42, 0x44, 0x3a, 0x44, 0x36, 0x2a, - 0x41, 0x39, 0x3a, 0x41, 0x46, 0x3c, 0x44, 0x2f, 0x36, 0x39, 0x3b, 0x3f, - 0x38, 0x45, 0x3c, 0x3c, 0x3e, 0x41, 0x3c, 0x39, 0x3e, 0x40, 0x2f, 0x45, - 0x3b, 0x41, 0x40, 0x3c, 0x4e, 0x38, 0x3e, 0x48, 0x46, 0x40, 0x48, 0x44, - 0x40, 0x4a, 0x45, 0x3c, 0x4f, 0x39, 0x37, 0x3a, 0x4e, 0x59, 0x5c, 0x22, - 0x58, 0x32, 0x38, 0x34, 0x40, 0x4b, 0x43, 0x43, 0x4f, 0x3e, 0x39, 0x40, - 0x37, 0x3e, 0x2f, 0x55, 0x3f, 0x40, 0x38, 0x3f, 0x3a, 0x33, 0x37, 0x3d, - 0x34, 0x4c, 0x37, 0x3f, 0x32, 0x39, 0x45, 0x34, 0x44, 0x4c, 0x3f, 0x3b, - 0x3c, 0x36, 0x36, 0x43, 0x36, 0x47, 0x41, 0x46, 0x41, 0x3e, 0x41, 0x3a, - 0x43, 0x3a, 0x48, 0x42, 0x42, 0x3e, 0x4c, 0x36, 0x3d, 0x39, 0x43, 0x46, - 0x3d, 0x42, 0x42, 0x3b, 0x45, 0x43, 0x3c, 0x40, 0x39, 0x37, 0x34, 0x45, - 0x3f, 0x40, 0x34, 0x38, 0x43, 0x3f, 0x36, 0x47, 0x3f, 0x3b, 0x49, 0x3c, - 0x3a, 0x3a, 0x42, 0x4c, 0x37, 0x3e, 0x3b, 0x32, 0x47, 0x40, 0x45, 0x4d, - 0x39, 0x3b, 0x39, 0x40, 0x3e, 0x3c, 0x3d, 0x3a, 0x3d, 0x3b, 0x3e, 0x43, - 0x3e, 0x3f, 0x3a, 0x3c, 0x41, 0x40, 0x39, 0x3c, 0x3a, 0x38, 0x39, 0x37, - 0x36, 0x33, 0x43, 0x45, 0x3f, 0x45, 0x41, 0x30, 0x3b, 0x34, 0x3c, 0x39, - 0x3b, 0x45, 0x37, 0x2e, 0x36, 0x34, 0x36, 0x44, 0x3d, 0x40, 0x3a, 0x3c, - 0x3d, 0x3b, 0x38, 0x41, 0x42, 0x3a, 0x32, 0x4b, 0x38, 0x3e, 0x41, 0x46, - 0x57, 0x3a, 0x44, 0x48, 0x47, 0x45, 0x47, 0x3e, 0x43, 0x42, 0x45, 0x3b, - 0x50, 0x39, 0x37, 0x3f, 0x47, 0x51, 0x5e, 0x22, 0x59, 0x33, 0x3c, 0x37, - 0x43, 0x50, 0x49, 0x47, 0x46, 0x42, 0x39, 0x44, 0x44, 0x3d, 0x2f, 0x53, - 0x35, 0x41, 0x40, 0x3d, 0x2d, 0x35, 0x2f, 0x3e, 0x3f, 0x37, 0x38, 0x3e, - 0x30, 0x45, 0x46, 0x38, 0x33, 0x3c, 0x3e, 0x3b, 0x44, 0x42, 0x47, 0x49, - 0x43, 0x40, 0x3d, 0x3c, 0x38, 0x43, 0x3e, 0x38, 0x3d, 0x40, 0x36, 0x43, - 0x43, 0x3e, 0x40, 0x3c, 0x44, 0x47, 0x43, 0x3d, 0x41, 0x39, 0x3e, 0x45, - 0x39, 0x3d, 0x39, 0x40, 0x42, 0x40, 0x3b, 0x4a, 0x40, 0x41, 0x3f, 0x37, - 0x43, 0x41, 0x37, 0x4c, 0x3f, 0x3d, 0x38, 0x3a, 0x42, 0x46, 0x43, 0x4d, - 0x3c, 0x3a, 0x43, 0x3e, 0x3b, 0x3d, 0x46, 0x4a, 0x38, 0x3d, 0x3d, 0x39, - 0x3e, 0x3c, 0x3b, 0x3e, 0x3a, 0x40, 0x40, 0x34, 0x41, 0x3f, 0x3e, 0x3f, - 0x47, 0x3c, 0x32, 0x3a, 0x3c, 0x44, 0x3f, 0x42, 0x41, 0x43, 0x3e, 0x3a, - 0x3b, 0x42, 0x41, 0x39, 0x39, 0x37, 0x39, 0x3e, 0x3d, 0x33, 0x3e, 0x35, - 0x44, 0x37, 0x40, 0x35, 0x3f, 0x47, 0x37, 0x41, 0x35, 0x38, 0x47, 0x40, - 0x43, 0x44, 0x2e, 0x48, 0x35, 0x44, 0x41, 0x3c, 0x47, 0x3d, 0x3d, 0x52, - 0x48, 0x41, 0x44, 0x41, 0x42, 0x4b, 0x3e, 0x3d, 0x4e, 0x32, 0x34, 0x47, - 0x55, 0x57, 0x5f, 0x22, 0x57, 0x33, 0x40, 0x37, 0x40, 0x4a, 0x4d, 0x47, - 0x48, 0x38, 0x3e, 0x46, 0x37, 0x42, 0x28, 0x57, 0x38, 0x42, 0x36, 0x43, - 0x35, 0x37, 0x39, 0x39, 0x42, 0x39, 0x38, 0x3c, 0x35, 0x3c, 0x3c, 0x3a, - 0x3c, 0x4c, 0x45, 0x3f, 0x43, 0x3d, 0x45, 0x45, 0x40, 0x47, 0x3e, 0x3e, - 0x3d, 0x4b, 0x49, 0x35, 0x43, 0x3c, 0x36, 0x46, 0x3c, 0x46, 0x42, 0x44, - 0x3c, 0x42, 0x3d, 0x42, 0x44, 0x3c, 0x4a, 0x40, 0x40, 0x3c, 0x3b, 0x3c, - 0x35, 0x34, 0x2e, 0x46, 0x38, 0x3d, 0x38, 0x44, 0x41, 0x40, 0x3c, 0x52, - 0x3b, 0x3d, 0x3b, 0x3f, 0x42, 0x47, 0x44, 0x52, 0x44, 0x44, 0x39, 0x3f, - 0x43, 0x35, 0x3c, 0x4d, 0x39, 0x3d, 0x3b, 0x37, 0x3e, 0x38, 0x3e, 0x49, - 0x3a, 0x37, 0x3c, 0x49, 0x40, 0x41, 0x3c, 0x40, 0x3d, 0x38, 0x39, 0x3f, - 0x44, 0x3e, 0x42, 0x3e, 0x47, 0x40, 0x34, 0x46, 0x48, 0x37, 0x45, 0x3e, - 0x46, 0x3f, 0x35, 0x39, 0x38, 0x3f, 0x36, 0x2c, 0x40, 0x38, 0x3e, 0x3c, - 0x32, 0x3c, 0x46, 0x3a, 0x3f, 0x41, 0x36, 0x49, 0x42, 0x38, 0x36, 0x43, - 0x3d, 0x41, 0x46, 0x35, 0x4f, 0x3a, 0x41, 0x5c, 0x4a, 0x42, 0x4e, 0x42, - 0x46, 0x54, 0x3f, 0x45, 0x4c, 0x30, 0x33, 0x44, 0x56, 0x5d, 0x68, 0x26, - 0x60, 0x33, 0x3e, 0x3a, 0x42, 0x49, 0x52, 0x47, 0x51, 0x46, 0x40, 0x47, - 0x41, 0x3b, 0x1b, 0x4f, 0x3c, 0x45, 0x3d, 0x3d, 0x32, 0x2f, 0x3e, 0x3c, - 0x3c, 0x3f, 0x3b, 0x3c, 0x2c, 0x3a, 0x41, 0x3c, 0x35, 0x3e, 0x3e, 0x3c, - 0x3d, 0x3f, 0x3e, 0x40, 0x40, 0x44, 0x42, 0x3c, 0x3c, 0x3c, 0x41, 0x3c, - 0x3c, 0x3d, 0x3e, 0x3d, 0x3c, 0x3d, 0x4a, 0x46, 0x3f, 0x35, 0x33, 0x43, - 0x42, 0x41, 0x4d, 0x48, 0x48, 0x44, 0x3e, 0x41, 0x41, 0x36, 0x3c, 0x4c, - 0x34, 0x47, 0x42, 0x39, 0x3e, 0x43, 0x3a, 0x53, 0x3b, 0x3b, 0x42, 0x3d, - 0x41, 0x3c, 0x3e, 0x52, 0x3a, 0x44, 0x34, 0x43, 0x3d, 0x3d, 0x3a, 0x50, - 0x3e, 0x33, 0x41, 0x40, 0x3f, 0x38, 0x43, 0x42, 0x3b, 0x37, 0x3e, 0x43, - 0x3f, 0x3c, 0x41, 0x49, 0x40, 0x32, 0x40, 0x3e, 0x3b, 0x3e, 0x44, 0x3c, - 0x35, 0x37, 0x3d, 0x41, 0x34, 0x3f, 0x3a, 0x3c, 0x47, 0x32, 0x41, 0x3d, - 0x3c, 0x3a, 0x4a, 0x31, 0x43, 0x38, 0x45, 0x37, 0x49, 0x3c, 0x34, 0x3f, - 0x3d, 0x3d, 0x3d, 0x45, 0x47, 0x3e, 0x37, 0x48, 0x40, 0x3b, 0x45, 0x3d, - 0x4e, 0x42, 0x3f, 0x57, 0x4b, 0x43, 0x4b, 0x3d, 0x3f, 0x47, 0x4a, 0x43, - 0x4e, 0x30, 0x38, 0x45, 0x59, 0x60, 0x64, 0x2d, 0x5a, 0x2d, 0x34, 0x35, - 0x47, 0x54, 0x4e, 0x3f, 0x44, 0x45, 0x3c, 0x43, 0x3d, 0x40, 0x1c, 0x5a, - 0x36, 0x3f, 0x3a, 0x39, 0x37, 0x3c, 0x32, 0x3b, 0x2d, 0x4a, 0x42, 0x35, - 0x30, 0x41, 0x43, 0x3d, 0x3d, 0x45, 0x38, 0x36, 0x3e, 0x40, 0x3a, 0x4a, - 0x34, 0x3d, 0x44, 0x3c, 0x39, 0x3b, 0x52, 0x38, 0x40, 0x3b, 0x3f, 0x3f, - 0x35, 0x37, 0x46, 0x48, 0x38, 0x3b, 0x40, 0x36, 0x3d, 0x3a, 0x4f, 0x45, - 0x35, 0x3a, 0x35, 0x33, 0x37, 0x43, 0x42, 0x52, 0x37, 0x3b, 0x3d, 0x42, - 0x44, 0x3d, 0x48, 0x58, 0x33, 0x3f, 0x41, 0x44, 0x44, 0x3f, 0x3b, 0x52, - 0x47, 0x39, 0x32, 0x3b, 0x38, 0x35, 0x48, 0x50, 0x34, 0x30, 0x39, 0x43, - 0x42, 0x40, 0x3b, 0x4b, 0x43, 0x3d, 0x34, 0x44, 0x33, 0x39, 0x44, 0x4b, - 0x45, 0x3e, 0x3c, 0x3f, 0x3a, 0x3e, 0x3c, 0x45, 0x36, 0x3e, 0x3d, 0x40, - 0x43, 0x46, 0x37, 0x3d, 0x3b, 0x42, 0x43, 0x3f, 0x3a, 0x41, 0x48, 0x2f, - 0x3e, 0x39, 0x3a, 0x39, 0x3f, 0x3a, 0x41, 0x40, 0x40, 0x3c, 0x3b, 0x3b, - 0x3f, 0x40, 0x3e, 0x42, 0x38, 0x3f, 0x38, 0x3c, 0x49, 0x45, 0x3f, 0x62, - 0x55, 0x47, 0x4c, 0x3c, 0x3c, 0x4a, 0x4c, 0x46, 0x4f, 0x39, 0x3a, 0x3b, - 0x5e, 0x58, 0x6f, 0x2b, 0x5a, 0x2f, 0x3a, 0x35, 0x4b, 0x47, 0x4a, 0x46, - 0x45, 0x3e, 0x38, 0x4f, 0x3b, 0x3d, 0x21, 0x4b, 0x3d, 0x40, 0x37, 0x40, - 0x2d, 0x2c, 0x43, 0x3f, 0x2b, 0x3e, 0x3d, 0x39, 0x2f, 0x39, 0x44, 0x3c, - 0x39, 0x39, 0x43, 0x3b, 0x3d, 0x3b, 0x44, 0x39, 0x42, 0x42, 0x3e, 0x40, - 0x3b, 0x42, 0x53, 0x40, 0x32, 0x3d, 0x35, 0x3f, 0x3d, 0x45, 0x48, 0x46, - 0x3d, 0x43, 0x3c, 0x36, 0x35, 0x39, 0x3d, 0x4a, 0x39, 0x39, 0x3e, 0x41, - 0x38, 0x36, 0x3b, 0x53, 0x3c, 0x36, 0x32, 0x3b, 0x43, 0x3d, 0x42, 0x57, - 0x35, 0x2f, 0x38, 0x40, 0x2f, 0x3d, 0x3c, 0x4c, 0x40, 0x2f, 0x3a, 0x36, - 0x39, 0x3c, 0x3a, 0x51, 0x3d, 0x37, 0x39, 0x3c, 0x42, 0x40, 0x43, 0x52, - 0x3e, 0x42, 0x3e, 0x45, 0x36, 0x34, 0x42, 0x4b, 0x3a, 0x38, 0x37, 0x3f, - 0x36, 0x41, 0x3a, 0x45, 0x3e, 0x38, 0x35, 0x41, 0x35, 0x34, 0x37, 0x3c, - 0x3f, 0x31, 0x3c, 0x35, 0x33, 0x43, 0x36, 0x28, 0x44, 0x42, 0x3e, 0x42, - 0x3a, 0x41, 0x43, 0x35, 0x3d, 0x3f, 0x40, 0x3e, 0x3d, 0x33, 0x31, 0x41, - 0x3d, 0x40, 0x3b, 0x40, 0x51, 0x40, 0x3f, 0xfb, 0x51, 0x49, 0x4c, 0x3d, - 0x44, 0x4e, 0x47, 0x42, 0x50, 0x39, 0x39, 0x40, 0x59, 0x5d, 0x70, 0x2c, - 0x59, 0x39, 0x38, 0x2f, 0x46, 0x50, 0x51, 0x47, 0x4c, 0x3c, 0x39, 0x48, - 0x44, 0x3a, 0x1a, 0x51, 0x35, 0x3e, 0x34, 0x3a, 0x3d, 0x2b, 0x41, 0x39, - 0x37, 0x4d, 0x3e, 0x43, 0x38, 0x3b, 0x3a, 0x35, 0x36, 0x3a, 0x43, 0x39, - 0x39, 0x3a, 0x46, 0x3b, 0x39, 0x3c, 0x46, 0x36, 0x3e, 0x3d, 0x4b, 0x3d, - 0x3b, 0x46, 0x3a, 0x41, 0x31, 0x3c, 0x44, 0x4a, 0x37, 0x42, 0x39, 0x43, - 0x43, 0x3e, 0x40, 0x47, 0x3c, 0x3e, 0x3b, 0x43, 0x34, 0x3a, 0x43, 0x53, - 0x3f, 0x37, 0x39, 0x37, 0x3e, 0x3b, 0x46, 0x59, 0x37, 0x37, 0x33, 0x3d, - 0x38, 0x42, 0x36, 0x58, 0x2e, 0x32, 0x2b, 0x45, 0x32, 0x33, 0x36, 0x50, - 0x41, 0x3f, 0x37, 0x3d, 0x3f, 0x3d, 0x46, 0x49, 0x41, 0x38, 0x33, 0x3d, - 0x33, 0x32, 0x3a, 0x49, 0x41, 0x41, 0x3d, 0x33, 0x3b, 0x3b, 0x3a, 0x46, - 0x34, 0x44, 0x3f, 0x3b, 0x2f, 0x3f, 0x32, 0x3c, 0x3f, 0x43, 0x3e, 0x45, - 0x3a, 0x3c, 0x43, 0x26, 0x46, 0x37, 0x38, 0x3e, 0x36, 0x31, 0x3e, 0x34, - 0x39, 0x3a, 0x38, 0x42, 0x38, 0x3e, 0x32, 0x42, 0x37, 0x37, 0x3c, 0x3a, - 0x48, 0x44, 0x3a, 0x68, 0x56, 0x46, 0x4d, 0x47, 0x40, 0x4e, 0x42, 0x46, - 0x51, 0x40, 0x38, 0x43, 0x58, 0x5d, 0x6a, 0x31, 0x57, 0x32, 0x3c, 0x36, - 0x49, 0x56, 0x52, 0x48, 0x4b, 0x41, 0x2f, 0x4d, 0x31, 0x43, 0x1b, 0x4c, - 0x30, 0x44, 0x33, 0x36, 0x2c, 0x3d, 0x45, 0x3a, 0x35, 0x46, 0x3d, 0x39, - 0x2e, 0x38, 0x3f, 0x37, 0x41, 0x44, 0x46, 0x31, 0x33, 0x46, 0x37, 0x37, - 0x3f, 0x41, 0x45, 0x30, 0x46, 0x3b, 0x50, 0x3b, 0x40, 0x39, 0x42, 0x43, - 0x35, 0x37, 0x40, 0x44, 0x3b, 0x41, 0x3d, 0x37, 0x3a, 0x41, 0x3d, 0x46, - 0x36, 0x41, 0x38, 0x41, 0x38, 0x3d, 0x45, 0x58, 0x3d, 0x3a, 0x3d, 0x44, - 0x45, 0x38, 0x48, 0x5c, 0x3d, 0x39, 0x43, 0x45, 0x41, 0x3e, 0x4a, 0x56, - 0x40, 0x33, 0x30, 0x31, 0x42, 0x39, 0x38, 0x56, 0x30, 0x3a, 0x35, 0x3e, - 0x3f, 0x38, 0x36, 0x47, 0x3c, 0x3a, 0x3d, 0x3f, 0x37, 0x35, 0x3b, 0x4d, - 0x43, 0x36, 0x39, 0x37, 0x3e, 0x42, 0x3d, 0x3f, 0x40, 0x3f, 0x34, 0x3b, - 0x3f, 0x3e, 0x3b, 0x39, 0x3b, 0x3a, 0x3a, 0x3c, 0x34, 0x3f, 0x3c, 0x2a, - 0x49, 0x3b, 0x36, 0x3c, 0x35, 0x46, 0x38, 0x3b, 0x3c, 0x39, 0x38, 0x42, - 0x39, 0x36, 0x2e, 0x4a, 0x3d, 0x39, 0x3f, 0x3f, 0x4b, 0x45, 0x3e, 0x67, - 0x4b, 0x4b, 0x49, 0x3e, 0x3f, 0x53, 0x4c, 0x55, 0x47, 0x32, 0x3b, 0x39, - 0x54, 0x5b, 0x6f, 0x29, 0x5a, 0x34, 0x3e, 0x26, 0x45, 0x52, 0x59, 0x44, - 0x59, 0x39, 0x3c, 0x47, 0x36, 0x46, 0x16, 0x50, 0x32, 0x46, 0x34, 0x35, - 0x35, 0x2d, 0x39, 0x38, 0x2c, 0x42, 0x43, 0x3b, 0x32, 0x3f, 0x37, 0x2f, - 0x34, 0x43, 0x46, 0x3b, 0x3b, 0x41, 0x3c, 0x37, 0x3e, 0x43, 0x4b, 0x36, - 0x3e, 0x3c, 0x4c, 0x42, 0x40, 0x3f, 0x49, 0x40, 0x3c, 0x40, 0x3c, 0x48, - 0x35, 0x42, 0x3f, 0x42, 0x44, 0x40, 0x45, 0x4f, 0x3f, 0x3f, 0x40, 0x42, - 0x3b, 0x3d, 0x49, 0x55, 0x42, 0x39, 0x41, 0x3b, 0x3f, 0x38, 0x44, 0x60, - 0x34, 0x40, 0x3b, 0x3b, 0x35, 0x3d, 0x41, 0x4e, 0x35, 0x33, 0x30, 0x3a, - 0x3a, 0x32, 0x42, 0x4f, 0x33, 0x34, 0x2f, 0x38, 0x49, 0x38, 0x40, 0x4c, - 0x35, 0x38, 0x3e, 0x46, 0x3f, 0x3a, 0x3a, 0x45, 0x3b, 0x34, 0x2e, 0x39, - 0x32, 0x3e, 0x40, 0x48, 0x35, 0x44, 0x3a, 0x34, 0x3f, 0x35, 0x3b, 0x32, - 0x40, 0x43, 0x3e, 0x38, 0x3b, 0x43, 0x3c, 0x2b, 0x46, 0x43, 0x40, 0x32, - 0x42, 0x3b, 0x49, 0x2e, 0x3b, 0x3a, 0x3e, 0x41, 0x3c, 0x3f, 0x31, 0x3b, - 0x41, 0x33, 0x41, 0x3c, 0x4d, 0x40, 0x38, 0x68, 0x4c, 0x4c, 0x4e, 0x3f, - 0x3f, 0x54, 0x4a, 0x3d, 0x4c, 0x33, 0x3b, 0x3a, 0x5d, 0x60, 0x71, 0x2b, - 0x59, 0x33, 0x3c, 0x2c, 0x47, 0x52, 0x4f, 0x51, 0x56, 0x3d, 0x39, 0x44, - 0x35, 0x41, 0x1b, 0x4a, 0x35, 0x41, 0x37, 0x35, 0x2c, 0x35, 0x37, 0x35, - 0x38, 0x41, 0x38, 0x3e, 0x3c, 0x40, 0x3c, 0x2f, 0x38, 0x3e, 0x3f, 0x45, - 0x40, 0x3d, 0x3c, 0x35, 0x3c, 0x46, 0x43, 0x39, 0x37, 0x42, 0x4e, 0x3c, - 0x42, 0x46, 0x37, 0x33, 0x43, 0x3f, 0x47, 0x4a, 0x3d, 0x3e, 0x40, 0x40, - 0x40, 0x3f, 0x4b, 0x54, 0x36, 0x3f, 0x37, 0x40, 0x39, 0x39, 0x47, 0x51, - 0x3d, 0x39, 0x36, 0x36, 0x40, 0x40, 0x41, 0x5a, 0x38, 0x39, 0x42, 0x38, - 0x40, 0x39, 0x43, 0x50, 0x3a, 0x3a, 0x32, 0x3c, 0x3c, 0x35, 0x44, 0x4a, - 0x37, 0x35, 0x36, 0x3c, 0x35, 0x30, 0x48, 0x4b, 0x3c, 0x33, 0x37, 0x3e, - 0x42, 0x3c, 0x42, 0x4e, 0x41, 0x32, 0x3e, 0x33, 0x49, 0x39, 0x3e, 0x42, - 0x3d, 0x39, 0x37, 0x36, 0x35, 0x41, 0x3e, 0x37, 0x37, 0x3e, 0x3d, 0x38, - 0x3a, 0x3c, 0x41, 0x29, 0x3c, 0x3b, 0x39, 0x40, 0x43, 0x3d, 0x3e, 0x33, - 0x3f, 0x3f, 0x3e, 0x43, 0x43, 0x38, 0x38, 0x41, 0x3b, 0x38, 0x35, 0x3a, - 0x4b, 0x44, 0x44, 0x55, 0x4e, 0x44, 0x4d, 0x49, 0x3e, 0x53, 0x45, 0x3f, - 0x45, 0x3d, 0x36, 0x36, 0x4f, 0x5b, 0x6b, 0x28, 0x59, 0x34, 0x39, 0x34, - 0x4f, 0x4d, 0x52, 0x3e, 0x51, 0x34, 0x35, 0x4a, 0x3b, 0x3f, 0x21, 0x45, - 0x36, 0x3f, 0x38, 0x33, 0x2c, 0x37, 0x32, 0x2f, 0x2b, 0x44, 0x47, 0x3f, - 0x38, 0x3a, 0x3f, 0x2e, 0x41, 0x3f, 0x3d, 0x41, 0x35, 0x48, 0x43, 0x40, - 0x33, 0x44, 0x40, 0x38, 0x47, 0x44, 0x4c, 0x3d, 0x41, 0x3b, 0x39, 0x36, - 0x3e, 0x44, 0x49, 0x48, 0x3c, 0x3b, 0x34, 0x34, 0x3f, 0x3c, 0x42, 0x52, - 0x43, 0x41, 0x3c, 0x3c, 0x3d, 0x43, 0x48, 0x54, 0x39, 0x35, 0x39, 0x3c, - 0x43, 0x3c, 0x44, 0x5f, 0x39, 0x3d, 0x38, 0x3f, 0x36, 0x3d, 0x43, 0x58, - 0x33, 0x3d, 0x43, 0x33, 0x3f, 0x36, 0x39, 0x54, 0x3a, 0x37, 0x2d, 0x46, - 0x43, 0x41, 0x47, 0x46, 0x3e, 0x42, 0x34, 0x49, 0x3a, 0x3f, 0x38, 0x50, - 0x3a, 0x3b, 0x42, 0x3a, 0x3e, 0x3c, 0x3b, 0x40, 0x42, 0x45, 0x37, 0x3b, - 0x2f, 0x3b, 0x46, 0x30, 0x42, 0x3b, 0x3b, 0x44, 0x3b, 0x3e, 0x40, 0x1e, - 0x33, 0x40, 0x40, 0x3d, 0x39, 0x3a, 0x41, 0x33, 0x45, 0x3e, 0x3c, 0x3f, - 0x3f, 0x38, 0x31, 0x46, 0x3b, 0x35, 0x42, 0x39, 0x49, 0x3e, 0x3d, 0x66, - 0x53, 0x3f, 0x44, 0x40, 0x43, 0x45, 0x48, 0x45, 0x49, 0x2d, 0x3e, 0x3a, - 0x4f, 0x5a, 0x62, 0x27, 0x54, 0x37, 0x35, 0x34, 0x42, 0x50, 0x54, 0x43, - 0x4d, 0x38, 0x39, 0x48, 0x38, 0x4c, 0x21, 0x3f, 0x40, 0x3a, 0x3a, 0x2f, - 0x37, 0x2f, 0x29, 0x2c, 0x36, 0x47, 0x3f, 0x41, 0x31, 0x33, 0x3e, 0x32, - 0x3e, 0x40, 0x42, 0x40, 0x42, 0x3a, 0x46, 0x33, 0x44, 0x40, 0x3c, 0x43, - 0x3d, 0x41, 0x4d, 0x3d, 0x3c, 0x47, 0x46, 0x43, 0x42, 0x3e, 0x44, 0x4e, - 0x41, 0x3a, 0x44, 0x38, 0x45, 0x3b, 0x49, 0x4c, 0x40, 0x3f, 0x37, 0x3e, - 0x3e, 0x46, 0x41, 0x51, 0x3f, 0x39, 0x30, 0x40, 0x3e, 0x38, 0x43, 0x5b, - 0x33, 0x3e, 0x31, 0x42, 0x3d, 0x2f, 0x49, 0x57, 0x37, 0x31, 0x46, 0x44, - 0x3e, 0x35, 0x40, 0x55, 0x36, 0x35, 0x3d, 0x3c, 0x38, 0x33, 0x42, 0x52, - 0x3b, 0x39, 0x34, 0x31, 0x45, 0x34, 0x3c, 0x51, 0x33, 0x39, 0x3c, 0x40, - 0x36, 0x36, 0x42, 0x3e, 0x37, 0x3e, 0x3b, 0x40, 0x3d, 0x36, 0x41, 0x30, - 0x42, 0x45, 0x40, 0x49, 0x3d, 0x32, 0x46, 0x26, 0x40, 0x44, 0x3a, 0x3f, - 0x3d, 0x46, 0x45, 0x31, 0x33, 0x34, 0x3e, 0x37, 0x46, 0x3b, 0x32, 0x3a, - 0x3d, 0x31, 0x3c, 0x36, 0x50, 0x41, 0x3b, 0x5d, 0x53, 0x42, 0x44, 0x37, - 0x3e, 0x4d, 0x41, 0x4b, 0x49, 0x2f, 0x35, 0x3a, 0x4e, 0x59, 0x5d, 0x27, - 0x5c, 0x30, 0x3d, 0x3a, 0x46, 0x50, 0x57, 0x4a, 0x4c, 0x36, 0x37, 0x46, - 0x48, 0x41, 0x24, 0x49, 0x36, 0x3e, 0x41, 0x45, 0x37, 0x38, 0x2e, 0x2e, - 0x34, 0x3c, 0x38, 0x41, 0x36, 0x3d, 0x43, 0x36, 0x3e, 0x3e, 0x41, 0x3b, - 0x42, 0x3c, 0x43, 0x38, 0x3e, 0x3d, 0x41, 0x48, 0x47, 0x4c, 0x45, 0x3b, - 0x37, 0x41, 0x38, 0x41, 0x3d, 0x41, 0x46, 0x4e, 0x36, 0x45, 0x38, 0x39, - 0x42, 0x42, 0x37, 0x4c, 0x34, 0x46, 0x3c, 0x44, 0x4a, 0x39, 0x45, 0x53, - 0x3c, 0x3f, 0x41, 0x35, 0x3c, 0x45, 0x4c, 0x5a, 0x44, 0x41, 0x30, 0x35, - 0x40, 0x39, 0x42, 0x5a, 0x36, 0x36, 0x3a, 0x3b, 0x43, 0x35, 0x3c, 0x56, - 0x35, 0x38, 0x2b, 0x4a, 0x3c, 0x40, 0x45, 0x54, 0x37, 0x37, 0x3a, 0x44, - 0x42, 0x3b, 0x3d, 0x4a, 0x3f, 0x37, 0x3b, 0x35, 0x34, 0x3f, 0x40, 0x48, - 0x45, 0x3e, 0x37, 0x38, 0x41, 0x41, 0x3d, 0x37, 0x43, 0x3d, 0x3d, 0x45, - 0x3a, 0x38, 0x3f, 0x23, 0x4a, 0x37, 0x42, 0x3c, 0x3f, 0x43, 0x42, 0x33, - 0x37, 0x39, 0x35, 0x3b, 0x41, 0x36, 0x2f, 0x3b, 0x41, 0x3a, 0x44, 0x3d, - 0x3e, 0x45, 0x44, 0x50, 0x47, 0x47, 0x48, 0x3c, 0x3f, 0x45, 0x43, 0x3f, - 0x4a, 0x33, 0x3c, 0x3a, 0x52, 0x52, 0x5a, 0x23, 0x58, 0x31, 0x3b, 0x3b, - 0x47, 0x44, 0x54, 0x45, 0x42, 0x38, 0x38, 0x40, 0x43, 0x3f, 0x2a, 0x46, - 0x3b, 0x46, 0x3b, 0x46, 0x35, 0x37, 0x29, 0x35, 0x38, 0x41, 0x3a, 0x31, - 0x44, 0x41, 0x39, 0x36, 0x45, 0x41, 0x40, 0x3e, 0x40, 0x44, 0x47, 0x37, - 0x3f, 0x42, 0x49, 0x34, 0x46, 0x3d, 0x4b, 0x3d, 0x42, 0x3b, 0x42, 0x3e, - 0x41, 0x3b, 0x3f, 0x43, 0x47, 0x45, 0x47, 0x41, 0x40, 0x3a, 0x3d, 0x45, - 0x40, 0x36, 0x3b, 0x3b, 0x44, 0x37, 0x46, 0x55, 0x35, 0x42, 0x3f, 0x3a, - 0x41, 0x41, 0x44, 0x5c, 0x31, 0x44, 0x3d, 0x46, 0x39, 0x38, 0x46, 0x59, - 0x41, 0x3b, 0x3d, 0x39, 0x33, 0x3e, 0x41, 0x58, 0x33, 0x44, 0x34, 0x31, - 0x48, 0x3e, 0x4d, 0x56, 0x36, 0x3c, 0x37, 0x46, 0x46, 0x38, 0x45, 0x53, - 0x35, 0x3d, 0x3a, 0x31, 0x42, 0x48, 0x45, 0x44, 0x3b, 0x3b, 0x3c, 0x41, - 0x3d, 0x42, 0x3f, 0x2f, 0x38, 0x3c, 0x3e, 0x41, 0x44, 0x3a, 0x4a, 0x24, - 0x37, 0x3e, 0x37, 0x48, 0x40, 0x3f, 0x46, 0x3c, 0x47, 0x4a, 0x38, 0x47, - 0x34, 0x45, 0x31, 0x42, 0x43, 0x44, 0x3f, 0x3f, 0x49, 0x40, 0x3c, 0x41, - 0x4d, 0x43, 0x42, 0x39, 0x39, 0x48, 0x41, 0x38, 0x47, 0x3c, 0x3c, 0x42, - 0x44, 0x55, 0x62, 0x2a, 0x5c, 0x32, 0x3a, 0x37, 0x4c, 0x44, 0x4f, 0x3e, - 0x4e, 0x42, 0x3a, 0x42, 0x41, 0x4a, 0x35, 0x44, 0x45, 0x3b, 0x43, 0x41, - 0x33, 0x38, 0x28, 0x36, 0x40, 0x47, 0x3e, 0x3e, 0x3e, 0x39, 0x3a, 0x37, - 0x44, 0x44, 0x3f, 0x3b, 0x41, 0x3c, 0x45, 0x36, 0x38, 0x3a, 0x3c, 0x42, - 0x42, 0x3f, 0x59, 0x3c, 0x47, 0x3d, 0x38, 0x3a, 0x42, 0x44, 0x41, 0x46, - 0x3f, 0x43, 0x48, 0x42, 0x44, 0x35, 0x3f, 0x45, 0x36, 0x3f, 0x38, 0x3a, - 0x44, 0x3d, 0x3d, 0x4e, 0x3e, 0x45, 0x40, 0x42, 0x3c, 0x33, 0x43, 0x5a, - 0x38, 0x3e, 0x45, 0x3a, 0x3e, 0x42, 0x45, 0x52, 0x3c, 0x42, 0x3a, 0x38, - 0x3d, 0x3b, 0x4a, 0x57, 0x38, 0x37, 0x47, 0x44, 0x3e, 0x3c, 0x38, 0x48, - 0x36, 0x41, 0x3f, 0x41, 0x3a, 0x3a, 0x46, 0x47, 0x42, 0x40, 0x32, 0x33, - 0x43, 0x37, 0x41, 0x43, 0x3e, 0x40, 0x3d, 0x3a, 0x3e, 0x38, 0x42, 0x30, - 0x3e, 0x40, 0x46, 0x42, 0x40, 0x44, 0x42, 0x23, 0x31, 0x40, 0x3f, 0x3d, - 0x3b, 0x33, 0x40, 0x33, 0x41, 0x33, 0x43, 0x41, 0x3a, 0x3e, 0x36, 0x40, - 0x40, 0x45, 0x37, 0x42, 0x46, 0x42, 0x39, 0x48, 0x44, 0x40, 0x40, 0x45, - 0x3c, 0x49, 0x41, 0x3f, 0x4c, 0x3d, 0x2f, 0x3f, 0x47, 0x52, 0x54, 0x2c, - 0x55, 0x42, 0x44, 0x3b, 0x46, 0x4f, 0x48, 0x3c, 0x45, 0x39, 0x3f, 0x4b, - 0x3f, 0x3f, 0x36, 0x42, 0x41, 0x48, 0x44, 0x44, 0x36, 0x3b, 0x37, 0x40, - 0x39, 0x49, 0x3a, 0x35, 0x3e, 0x48, 0x31, 0x30, 0x44, 0x38, 0x4c, 0x3c, - 0x41, 0x3e, 0x46, 0x32, 0x44, 0x3b, 0x42, 0x3c, 0x38, 0x3a, 0x47, 0x3f, - 0x3a, 0x42, 0x3a, 0x43, 0x40, 0x4b, 0x47, 0x3c, 0x42, 0x46, 0x45, 0x42, - 0x3c, 0x46, 0x3d, 0x3f, 0x3e, 0x36, 0x38, 0x3e, 0x46, 0x3c, 0x4d, 0x43, - 0x49, 0x41, 0x48, 0x3c, 0x3d, 0x39, 0x43, 0x58, 0x3a, 0x41, 0x3f, 0x38, - 0x37, 0x3f, 0x46, 0x5d, 0x3c, 0x3c, 0x39, 0x36, 0x3d, 0x46, 0x43, 0x50, - 0x3a, 0x47, 0x39, 0x36, 0x41, 0x3f, 0x3e, 0x51, 0x31, 0x36, 0x3e, 0x3c, - 0x3c, 0x3a, 0x48, 0x41, 0x3a, 0x43, 0x49, 0x3e, 0x42, 0x46, 0x3f, 0x41, - 0x49, 0x33, 0x42, 0x41, 0x45, 0x40, 0x3d, 0x2b, 0x3d, 0x38, 0x40, 0x37, - 0x3a, 0x31, 0x45, 0x26, 0x33, 0x3d, 0x3f, 0x39, 0x36, 0x3c, 0x38, 0x33, - 0x34, 0x3f, 0x35, 0x44, 0x3a, 0x39, 0x32, 0x41, 0x35, 0x40, 0x3c, 0x3b, - 0x4a, 0x3f, 0x3e, 0x3e, 0x4a, 0x3e, 0x42, 0x35, 0x38, 0x43, 0x3c, 0x37, - 0x3d, 0x3c, 0x39, 0x43, 0x3f, 0x4e, 0x54, 0x33, 0x4b, 0x37, 0x43, 0x3b, - 0x43, 0x48, 0x43, 0x42, 0x3d, 0x46, 0x45, 0x49, 0x3a, 0x39, 0x36, 0x4a, - 0x48, 0x48, 0x37, 0x4b, 0x42, 0x47, 0x34, 0x34, 0x43, 0x42, 0x3a, 0x3d, - 0x3c, 0x46, 0x34, 0x39, 0x40, 0x3b, 0x3e, 0x3e, 0x37, 0x3d, 0x53, 0x3b, - 0x48, 0x3c, 0x43, 0x44, 0x3b, 0x3f, 0x57, 0x3d, 0x39, 0x3c, 0x39, 0x3a, - 0x3e, 0x3f, 0x43, 0x3e, 0x41, 0x47, 0x3c, 0x41, 0x40, 0x41, 0x37, 0x3f, - 0x3b, 0x43, 0x35, 0x3e, 0x45, 0x40, 0x47, 0x59, 0x41, 0x49, 0x3b, 0x3f, - 0x47, 0x49, 0x4b, 0x61, 0x39, 0x48, 0x39, 0x3e, 0x44, 0x34, 0x3b, 0x59, - 0x3c, 0x42, 0x45, 0x35, 0x42, 0x41, 0x39, 0x52, 0x42, 0x3c, 0x3d, 0x3e, - 0x3d, 0x4a, 0x4a, 0x4d, 0x3c, 0x34, 0x44, 0x3c, 0x41, 0x34, 0x38, 0x46, - 0x38, 0x45, 0x40, 0x45, 0x40, 0x3a, 0x3d, 0x44, 0x3a, 0x37, 0x3a, 0x3a, - 0x3b, 0x42, 0x40, 0x34, 0x3b, 0x3c, 0x42, 0x40, 0x3d, 0x32, 0x40, 0x27, - 0x37, 0x39, 0x37, 0x46, 0x48, 0x31, 0x40, 0x30, 0x42, 0x42, 0x3a, 0x40, - 0x3d, 0x37, 0x2a, 0x40, 0x41, 0x37, 0x3c, 0x4a, 0x46, 0x45, 0x3d, 0x34, - 0x48, 0x41, 0x42, 0x3e, 0x3f, 0x39, 0x3c, 0x3a, 0x4f, 0x3b, 0x32, 0x3e, - 0x43, 0x51, 0x4f, 0x2a, 0x46, 0x3a, 0x3d, 0x3b, 0x40, 0x3d, 0x4c, 0x3c, - 0x48, 0x40, 0x36, 0x4a, 0x3a, 0x38, 0x42, 0x43, 0x4c, 0x3d, 0x47, 0x47, - 0x33, 0x3f, 0x2d, 0x37, 0x4a, 0x43, 0x38, 0x3e, 0x49, 0x42, 0x42, 0x3d, - 0x43, 0x47, 0x41, 0x38, 0x46, 0x37, 0x46, 0x38, 0x47, 0x42, 0x49, 0x3d, - 0x3b, 0x37, 0x4c, 0x3c, 0x3a, 0x45, 0x3f, 0x37, 0x36, 0x3d, 0x3c, 0x40, - 0x3e, 0x45, 0x46, 0x41, 0x41, 0x40, 0x3c, 0x44, 0x47, 0x43, 0x37, 0x3f, - 0x3e, 0x3a, 0x3a, 0x4b, 0x3a, 0x36, 0x3d, 0x3f, 0x38, 0x3f, 0x3c, 0x58, - 0x40, 0x49, 0x3d, 0x42, 0x38, 0x3a, 0x47, 0x50, 0x3b, 0x49, 0x40, 0x44, - 0x3e, 0x3c, 0x38, 0x52, 0x3a, 0x3e, 0x44, 0x3c, 0x35, 0x44, 0x3a, 0x47, - 0x3e, 0x49, 0x3f, 0x47, 0x45, 0x39, 0x3b, 0x46, 0x44, 0x3e, 0x41, 0x46, - 0x40, 0x41, 0x40, 0x40, 0x3a, 0x35, 0x3e, 0x36, 0x3e, 0x3e, 0x3d, 0x35, - 0x3b, 0x3c, 0x38, 0x46, 0x3b, 0x3c, 0x41, 0x2c, 0x3f, 0x42, 0x38, 0x3b, - 0x36, 0x3b, 0x39, 0x40, 0x40, 0x38, 0x36, 0x33, 0x34, 0x42, 0x2f, 0x44, - 0x41, 0x40, 0x39, 0x35, 0x3b, 0x44, 0x42, 0x2c, 0x41, 0x3b, 0x44, 0x41, - 0x35, 0x44, 0x3b, 0x34, 0x44, 0x49, 0x36, 0x39, 0x3a, 0x52, 0x4d, 0x2b, - 0x40, 0x40, 0x3e, 0x39, 0x48, 0x42, 0x3c, 0x44, 0x46, 0x49, 0x3f, 0x54, - 0x43, 0x40, 0x2e, 0x40, 0x4f, 0x36, 0x3e, 0x3f, 0x38, 0x48, 0x44, 0x3c, - 0x44, 0x43, 0x41, 0x47, 0x40, 0x46, 0x40, 0x37, 0x41, 0x34, 0x3a, 0x41, - 0x41, 0x3b, 0x49, 0x39, 0x42, 0x38, 0x3d, 0x39, 0x34, 0x35, 0x43, 0x36, - 0x3e, 0x44, 0x3f, 0x40, 0x43, 0x40, 0x40, 0x3a, 0x47, 0x42, 0x3e, 0x42, - 0x46, 0x35, 0x3a, 0x46, 0x3c, 0x3c, 0x3c, 0x3d, 0x3f, 0x40, 0x43, 0x4c, - 0x3a, 0x37, 0x3f, 0x43, 0x47, 0x38, 0x42, 0x58, 0x42, 0x3b, 0x34, 0x37, - 0x3e, 0x48, 0x3c, 0x57, 0x44, 0x3c, 0x3d, 0x3a, 0x36, 0x48, 0x3c, 0x51, - 0x3d, 0x48, 0x45, 0x45, 0x38, 0x45, 0x40, 0x3f, 0x3b, 0x35, 0x3d, 0x3f, - 0x38, 0x47, 0x39, 0x3b, 0x36, 0x49, 0x43, 0x40, 0x3f, 0x46, 0x38, 0x40, - 0x3f, 0x3e, 0x39, 0x32, 0x47, 0x42, 0x35, 0x33, 0x39, 0x47, 0x3c, 0x36, - 0x3b, 0x37, 0x43, 0x35, 0x3b, 0x3b, 0x34, 0x3b, 0x38, 0x3d, 0x3e, 0x3a, - 0x35, 0x49, 0x38, 0x40, 0x3f, 0x3f, 0x3e, 0x37, 0x43, 0x3b, 0x3e, 0x3e, - 0x3b, 0x40, 0x44, 0x39, 0x3d, 0x3f, 0x31, 0x42, 0x42, 0x3b, 0x41, 0x3d, - 0x3e, 0x3c, 0x37, 0x34, 0x48, 0x3d, 0x49, 0x4a, 0x47, 0x36, 0x3a, 0x34, - 0x37, 0x36, 0x3e, 0x38, 0x33, 0x45, 0x39, 0x44, 0x34, 0x49, 0x3a, 0x3d, - 0x34, 0x31, 0x31, 0x3d, 0x34, 0x3d, 0x41, 0x3e, 0x49, 0x41, 0x34, 0x3f, - 0x3a, 0x42, 0x3e, 0x40, 0x3f, 0x33, 0x46, 0x3f, 0x34, 0x39, 0x37, 0x46, - 0x3e, 0x32, 0x3f, 0x45, 0x45, 0x41, 0x3b, 0x4b, 0x35, 0x35, 0x3b, 0x4a, - 0x3d, 0x43, 0x3b, 0x44, 0x3c, 0x38, 0x31, 0x43, 0x39, 0x35, 0x41, 0x45, - 0x37, 0x3e, 0x43, 0x47, 0x39, 0x40, 0x41, 0x41, 0x40, 0x32, 0x37, 0x3e, - 0x3d, 0x39, 0x3b, 0x49, 0x33, 0x35, 0x38, 0x41, 0x45, 0x37, 0x3c, 0x49, - 0x3b, 0x34, 0x34, 0x41, 0x3a, 0x3f, 0x3e, 0x47, 0x39, 0x3c, 0x34, 0x3a, - 0x38, 0x44, 0x40, 0x51, 0x3a, 0x37, 0x3b, 0x3f, 0x3d, 0x3a, 0x45, 0x48, - 0x3f, 0x46, 0x35, 0x43, 0x38, 0x43, 0x35, 0x4c, 0x42, 0x47, 0x44, 0x3d, - 0x40, 0x3a, 0x39, 0x4e, 0x3d, 0x37, 0x3c, 0x42, 0x40, 0x48, 0x44, 0x4c, - 0x31, 0x40, 0x42, 0x3b, 0x45, 0x45, 0x3f, 0x3e, 0x3d, 0x44, 0x3f, 0x31, - 0x3f, 0x44, 0x45, 0x37, 0x3e, 0x3d, 0x35, 0x3b, 0x2d, 0x44, 0x4a, 0x3a, - 0x2b, 0x37, 0x38, 0x46, 0x41, 0x39, 0x3c, 0x3c, 0x46, 0x33, 0x36, 0x3c, - 0x4b, 0x34, 0x49, 0x50, 0x30, 0x3c, 0x33, 0x41, 0x44, 0x33, 0x43, 0x39, - 0x36, 0x45, 0x33, 0x3b, 0x3d, 0x36, 0x47, 0x30, 0x42, 0x37, 0x49, 0x3e, - 0x3b, 0x49, 0x3d, 0x3b, 0x3a, 0x41, 0x38, 0x44, 0x42, 0x3b, 0x3f, 0x40, - 0x46, 0x35, 0x38, 0x3c, 0x48, 0x3a, 0x46, 0x41, 0x36, 0x36, 0x41, 0x3e, - 0x43, 0x3e, 0x32, 0x39, 0x3a, 0x41, 0x30, 0x3e, 0x40, 0x3e, 0x36, 0x3a, - 0x45, 0x45, 0x3a, 0x3c, 0x31, 0x3b, 0x47, 0x3f, 0x36, 0x3a, 0x3c, 0x41, - 0x3b, 0x41, 0x39, 0x46, 0x3f, 0x3c, 0x34, 0x3e, 0x41, 0x45, 0x41, 0x42, - 0x39, 0x40, 0x40, 0x44, 0x45, 0x42, 0x34, 0x3f, 0x3e, 0x31, 0x3b, 0x41, - 0x33, 0x43, 0x37, 0x44, 0x44, 0x3a, 0x36, 0x36, 0x48, 0x3c, 0x37, 0x47, - 0x39, 0x3e, 0x3e, 0x3c, 0x3c, 0x41, 0x3c, 0x44, 0x3b, 0x42, 0x3f, 0x3a, - 0x43, 0x3b, 0x3e, 0x48, 0x36, 0x3f, 0x3d, 0x34, 0x40, 0x43, 0x35, 0x4f, - 0x34, 0x39, 0x3b, 0x41, 0x40, 0x39, 0x37, 0x4c, 0x39, 0x36, 0x39, 0x39, - 0x47, 0x41, 0x43, 0x3f, 0x3f, 0x33, 0x42, 0x3f, 0x42, 0x40, 0x37, 0x40, - 0x3f, 0x34, 0x45, 0x3d, 0x2d, 0x3c, 0x44, 0x3b, 0x43, 0x37, 0x26, 0x50, - 0x43, 0x44, 0x3d, 0x43, 0x42, 0x2d, 0x3c, 0x33, 0x4a, 0x32, 0x4a, 0x53, - 0x33, 0x38, 0x27, 0x36, 0x42, 0x30, 0x47, 0x3d, 0x36, 0x45, 0x46, 0x36, - 0x3b, 0x3b, 0x40, 0x33, 0x37, 0x36, 0x44, 0x46, 0x3d, 0x35, 0x40, 0x38, - 0x3b, 0x40, 0x36, 0x3c, 0x3d, 0x37, 0x31, 0x41, 0x33, 0x3c, 0x38, 0x3f, - 0x43, 0x3a, 0x40, 0x49, 0x38, 0x39, 0x38, 0x3d, 0x43, 0x3d, 0x39, 0x3b, - 0x3d, 0x3f, 0x38, 0x42, 0x34, 0x43, 0x33, 0x3e, 0x43, 0x3e, 0x40, 0x42, - 0x3b, 0x45, 0x37, 0x44, 0x43, 0x39, 0x3c, 0x3d, 0x37, 0x44, 0x3a, 0x3b, - 0x47, 0x3f, 0x3a, 0x3c, 0x3a, 0x3b, 0x3f, 0x43, 0x3e, 0x3d, 0x46, 0x3e, - 0x37, 0x36, 0x3f, 0x40, 0x42, 0x42, 0x37, 0x36, 0x48, 0x35, 0x44, 0x44, - 0x39, 0x3c, 0x3b, 0x41, 0x44, 0x49, 0x3a, 0x40, 0x41, 0x36, 0x33, 0x3a, - 0x3c, 0x3d, 0x40, 0x3f, 0x43, 0x36, 0x3c, 0x3a, 0x3f, 0x4b, 0x32, 0x49, - 0x49, 0x3e, 0x3a, 0x3e, 0x3f, 0x41, 0x3c, 0x47, 0x40, 0x41, 0x45, 0x3e, - 0x47, 0x47, 0x3f, 0x47, 0x45, 0x3e, 0x31, 0x43, 0x4a, 0x44, 0x36, 0x40, - 0x41, 0x47, 0x3e, 0x42, 0x37, 0x40, 0x3b, 0x46, 0x37, 0x41, 0x3e, 0x3c, - 0x27, 0x40, 0x49, 0x42, 0x42, 0x39, 0x30, 0x49, 0x43, 0x38, 0x3d, 0x42, - 0x43, 0x2f, 0x3b, 0x37, 0x4b, 0x2d, 0x4f, 0x52, 0x30, 0x31, 0x2f, 0x3a, - 0x49, 0x38, 0x4f, 0x45, 0x2e, 0x47, 0x3a, 0x32, 0x33, 0x3f, 0x4a, 0x2e, - 0x33, 0x3b, 0x3e, 0x3e, 0x49, 0x45, 0x44, 0x38, 0x3c, 0x35, 0x45, 0x47, - 0x41, 0x3b, 0x3c, 0x48, 0x46, 0x39, 0x39, 0x3b, 0x3f, 0x41, 0x38, 0x42, - 0x3d, 0x46, 0x33, 0x41, 0x36, 0x3f, 0x3f, 0x3c, 0x33, 0x3e, 0x3e, 0x40, - 0x44, 0x40, 0x3c, 0x38, 0x46, 0x3a, 0x40, 0x36, 0x42, 0x35, 0x3f, 0x3b, - 0x3b, 0x43, 0x3c, 0x40, 0x40, 0x49, 0x2e, 0x39, 0x40, 0x3f, 0x45, 0x41, - 0x3f, 0x30, 0x42, 0x3d, 0x40, 0x3c, 0x3a, 0x3b, 0x3b, 0x40, 0x39, 0x42, - 0x3a, 0x3f, 0x3f, 0x3e, 0x35, 0x3b, 0x38, 0x45, 0x47, 0x35, 0x44, 0x3e, - 0x3b, 0x3f, 0x3f, 0x40, 0x3a, 0x35, 0x30, 0x49, 0x45, 0x35, 0x3b, 0x39, - 0x3b, 0x48, 0x3f, 0x37, 0x39, 0x40, 0x43, 0x45, 0x3d, 0x40, 0x41, 0x3a, - 0x33, 0x3d, 0x3a, 0x4b, 0x40, 0x42, 0x40, 0x42, 0x43, 0x39, 0x3c, 0x49, - 0x3e, 0x47, 0x3e, 0x44, 0x3f, 0x3a, 0x40, 0x41, 0x3f, 0x42, 0x42, 0x37, - 0x3e, 0x3b, 0x36, 0x3e, 0x3b, 0x3c, 0x48, 0x43, 0x2d, 0x46, 0x4a, 0x38, - 0x45, 0x3a, 0x29, 0x46, 0x40, 0x3c, 0x40, 0x44, 0x40, 0x33, 0x2f, 0x33, - 0x48, 0x2e, 0x51, 0x4f, 0x2b, 0x32, 0x2e, 0x2d, 0x45, 0x33, 0x4d, 0x41, - 0x29, 0x4b, 0x41, 0x39, 0x2f, 0x3a, 0x49, 0x31, 0x37, 0x40, 0x47, 0x4c, - 0x3e, 0x31, 0x41, 0x3f, 0x43, 0x37, 0x45, 0x4f, 0x41, 0x3c, 0x30, 0x4a, - 0x37, 0x37, 0x36, 0x39, 0x31, 0x3d, 0x36, 0x4b, 0x37, 0x44, 0x3c, 0x43, - 0x44, 0x36, 0x3f, 0x3b, 0x34, 0x3e, 0x3a, 0x35, 0x38, 0x3f, 0x33, 0x37, - 0x3b, 0x3d, 0x46, 0x38, 0x3b, 0x37, 0x38, 0x3b, 0x31, 0x3e, 0x3d, 0x3b, - 0x3d, 0x39, 0x35, 0x33, 0x33, 0x3c, 0x39, 0x39, 0x48, 0x39, 0x39, 0x3f, - 0x3e, 0x36, 0x47, 0x3a, 0x44, 0x40, 0x32, 0x3c, 0x37, 0x35, 0x40, 0x3f, - 0x3a, 0x38, 0x3b, 0x3d, 0x46, 0x45, 0x36, 0x43, 0x40, 0x3d, 0x41, 0x41, - 0x47, 0x3a, 0x3d, 0x3e, 0x43, 0x42, 0x32, 0x36, 0x41, 0x37, 0x3b, 0x35, - 0x36, 0x44, 0x36, 0x3c, 0x43, 0x32, 0x3e, 0x3e, 0x42, 0x45, 0x32, 0x3c, - 0x3a, 0x3b, 0x35, 0x43, 0x41, 0x3d, 0x44, 0x50, 0x43, 0x31, 0x3e, 0x44, - 0x44, 0x41, 0x3a, 0x44, 0x36, 0x39, 0x3b, 0x3c, 0x32, 0x38, 0x3b, 0x45, - 0x38, 0x43, 0x40, 0x42, 0x33, 0x3e, 0x4a, 0x42, 0x45, 0x39, 0x2f, 0x42, - 0x39, 0x35, 0x44, 0x3e, 0x39, 0x2f, 0x34, 0x33, 0x49, 0x29, 0x50, 0x4f, - 0x2b, 0x36, 0x34, 0x2d, 0x47, 0x33, 0x49, 0x3c, 0x33, 0x51, 0x49, 0x3f, - 0x34, 0x39, 0x4a, 0x2c, 0x34, 0x45, 0x4f, 0x47, 0x34, 0x42, 0x3a, 0x3d, - 0x36, 0x4a, 0x3b, 0x43, 0x36, 0x3f, 0x39, 0x4b, 0x38, 0x3a, 0x31, 0x3d, - 0x32, 0x42, 0x3a, 0x47, 0x48, 0x3e, 0x44, 0x3f, 0x39, 0x3e, 0x44, 0x35, - 0x41, 0x3c, 0x45, 0x3a, 0x3e, 0x3b, 0x3d, 0x2f, 0x37, 0x40, 0x3e, 0x43, - 0x39, 0x39, 0x33, 0x3b, 0x37, 0x3b, 0x37, 0x37, 0x37, 0x39, 0x36, 0x31, - 0x39, 0x3b, 0x41, 0x39, 0x3b, 0x40, 0x36, 0x37, 0x42, 0x39, 0x3a, 0x46, - 0x3f, 0x30, 0x38, 0x39, 0x35, 0x32, 0x3e, 0x3a, 0x43, 0x43, 0x3e, 0x33, - 0x42, 0x3f, 0x41, 0x3c, 0x46, 0x34, 0x34, 0x40, 0x43, 0x37, 0x32, 0x43, - 0x3c, 0x37, 0x36, 0x33, 0x3d, 0x36, 0x3a, 0x40, 0x39, 0x38, 0x32, 0x3e, - 0x32, 0x3d, 0x37, 0x49, 0x42, 0x47, 0x41, 0x3b, 0x3d, 0x3c, 0x3a, 0x37, - 0x3c, 0x45, 0x3a, 0x45, 0x36, 0x44, 0x3a, 0x3a, 0x3a, 0x3c, 0x43, 0x3b, - 0x3b, 0x35, 0x38, 0x47, 0x36, 0x40, 0x32, 0x43, 0x3e, 0x39, 0x42, 0x40, - 0x2c, 0x3c, 0x4c, 0x4c, 0x43, 0x3b, 0x37, 0x4a, 0x3f, 0x3c, 0x45, 0x44, - 0x3f, 0x30, 0x36, 0x31, 0x4f, 0x2f, 0x5d, 0x4b, 0x34, 0x34, 0x2d, 0x2b, - 0x44, 0x31, 0x4e, 0x40, 0x2e, 0x4d, 0x48, 0x3e, 0x37, 0x2b, 0x49, 0x25, - 0x31, 0x49, 0x44, 0x49, 0x39, 0x39, 0x4b, 0x3a, 0x3a, 0x41, 0x3e, 0x42, - 0x3c, 0x36, 0x36, 0x4a, 0x32, 0x44, 0x3e, 0x48, 0x3e, 0x3c, 0x37, 0x49, - 0x3d, 0x34, 0x3f, 0x37, 0x33, 0x36, 0x46, 0x3a, 0x3a, 0x31, 0x45, 0x3f, - 0x3a, 0x31, 0x3b, 0x33, 0x41, 0x42, 0x35, 0x39, 0x38, 0x44, 0x36, 0x3a, - 0x3f, 0x3b, 0x37, 0x3e, 0x3b, 0x38, 0x2f, 0x32, 0x44, 0x3d, 0x44, 0x41, - 0x39, 0x36, 0x3a, 0x34, 0x39, 0x38, 0x34, 0x3f, 0x3b, 0x37, 0x34, 0x34, - 0x40, 0x3d, 0x34, 0x3a, 0x46, 0x42, 0x3f, 0x34, 0x38, 0x33, 0x39, 0x44, - 0x3f, 0x41, 0x3c, 0x31, 0x40, 0x32, 0x3f, 0x37, 0x37, 0x41, 0x3e, 0x35, - 0x37, 0x48, 0x3b, 0x41, 0x3d, 0x3a, 0x3f, 0x35, 0x33, 0x3c, 0x36, 0x3b, - 0x3a, 0x48, 0x33, 0x42, 0x37, 0x33, 0x39, 0x41, 0x3c, 0x3d, 0x3b, 0x4d, - 0x39, 0x3a, 0x3e, 0x44, 0x3d, 0x41, 0x3b, 0x38, 0x49, 0x41, 0x3a, 0x38, - 0x34, 0x38, 0x38, 0x3c, 0x45, 0x3c, 0x37, 0x3b, 0x36, 0x3e, 0x4a, 0x4b, - 0x42, 0x3f, 0x32, 0x45, 0x46, 0x35, 0x46, 0x41, 0x38, 0x33, 0x39, 0x37, - 0x44, 0x2b, 0x60, 0x4a, 0x2a, 0x2e, 0x35, 0x2d, 0x43, 0x37, 0x51, 0x47, - 0x2f, 0x4d, 0x50, 0x3e, 0x3a, 0x33, 0x4f, 0x2a, 0x35, 0x45, 0x4a, 0x4c, - 0x3b, 0x3d, 0x43, 0x44, 0x3d, 0x3f, 0x4a, 0x3e, 0x49, 0x37, 0x2e, 0x4f, - 0x39, 0x3f, 0x32, 0x3c, 0x37, 0x3b, 0x39, 0x4d, 0x34, 0x3f, 0x46, 0x44, - 0x3d, 0x40, 0x3f, 0x40, 0x39, 0x33, 0x39, 0x3e, 0x3d, 0x40, 0x31, 0x30, - 0x35, 0x3d, 0x3e, 0x3a, 0x3e, 0x32, 0x31, 0x3e, 0x48, 0x3c, 0x40, 0x43, - 0x3f, 0x3f, 0x34, 0x2e, 0x3a, 0x3e, 0x3b, 0x43, 0x45, 0x32, 0x3a, 0x31, - 0x37, 0x38, 0x31, 0x35, 0x34, 0x3d, 0x42, 0x36, 0x46, 0x37, 0x32, 0x47, - 0x41, 0x3c, 0x35, 0x35, 0x36, 0x41, 0x3a, 0x3b, 0x42, 0x44, 0x36, 0x31, - 0x3c, 0x3d, 0x34, 0x34, 0x3b, 0x40, 0x40, 0x2e, 0x40, 0x46, 0x3b, 0x43, - 0x3f, 0x40, 0x3b, 0x3a, 0x32, 0x40, 0x46, 0x39, 0x3c, 0x49, 0x2f, 0x3d, - 0x49, 0x3e, 0x44, 0x3c, 0x3e, 0x35, 0x3f, 0x44, 0x41, 0x40, 0x3e, 0x47, - 0x3d, 0x40, 0x3f, 0x41, 0x3b, 0x41, 0x41, 0x3f, 0x40, 0x3f, 0x3e, 0x3e, - 0x3f, 0x43, 0x35, 0x40, 0x2b, 0x42, 0x45, 0x56, 0x40, 0x3c, 0x2f, 0x44, - 0x44, 0x3d, 0x3e, 0x3d, 0x40, 0x2d, 0x39, 0x31, 0x54, 0x2f, 0x61, 0x48, - 0x2e, 0x37, 0x37, 0x32, 0x3e, 0x2d, 0x52, 0x4d, 0x2d, 0x4d, 0x4c, 0x3a, - 0x3a, 0x31, 0x4e, 0x2d, 0x31, 0x48, 0x47, 0x54, 0x45, 0x38, 0x3b, 0x3d, - 0x42, 0x41, 0x44, 0x4a, 0x48, 0x42, 0x2f, 0x4d, 0x31, 0x34, 0x3a, 0x46, - 0x37, 0x44, 0x2c, 0x45, 0x46, 0x43, 0x40, 0x3f, 0x34, 0x33, 0x40, 0x39, - 0x32, 0x35, 0x3a, 0x40, 0x3f, 0x3f, 0x36, 0x32, 0x3f, 0x3d, 0x35, 0x48, - 0x3c, 0x48, 0x37, 0x39, 0x35, 0x3f, 0x37, 0x3d, 0x44, 0x46, 0x2d, 0x2a, - 0x47, 0x38, 0x3a, 0x39, 0x45, 0x3b, 0x40, 0x2d, 0x37, 0x33, 0x41, 0x3c, - 0x40, 0x35, 0x3f, 0x32, 0x3a, 0x36, 0x40, 0x41, 0x3a, 0x3c, 0x33, 0x31, - 0x42, 0x3f, 0x41, 0x3a, 0x41, 0x46, 0x38, 0x2f, 0x3c, 0x3d, 0x3d, 0x39, - 0x3b, 0x46, 0x41, 0x31, 0x46, 0x36, 0x40, 0x48, 0x3c, 0x33, 0x42, 0x32, - 0x3b, 0x40, 0x3f, 0x36, 0x37, 0x44, 0x34, 0x35, 0x32, 0x32, 0x37, 0x38, - 0x33, 0x3b, 0x37, 0x4a, 0x3f, 0x46, 0x3a, 0x41, 0x32, 0x37, 0x30, 0x3e, - 0x40, 0x35, 0x41, 0x40, 0x37, 0x41, 0x2b, 0x40, 0x3d, 0x3d, 0x32, 0x38, - 0x34, 0x3e, 0x47, 0x61, 0x43, 0x3b, 0x3c, 0x42, 0x46, 0x3d, 0x40, 0x4a, - 0x3c, 0x2d, 0x33, 0x35, 0x55, 0x38, 0x69, 0x4f, 0x33, 0x37, 0x30, 0x39, - 0x44, 0x2e, 0x58, 0x4b, 0x2a, 0x51, 0x4b, 0x3c, 0x39, 0x2e, 0x51, 0x2d, - 0x30, 0x4a, 0x42, 0x53, 0x3f, 0x39, 0x3e, 0x44, 0x3b, 0x40, 0x47, 0x44, - 0x47, 0x3e, 0x39, 0x4b, 0x40, 0x3d, 0x42, 0x39, 0x3b, 0x39, 0x32, 0x42, - 0x36, 0x36, 0x36, 0x42, 0x44, 0x34, 0x33, 0x40, 0x40, 0x40, 0x3a, 0x3a, - 0x41, 0x3f, 0x31, 0x30, 0x3f, 0x31, 0x30, 0x39, 0x46, 0x36, 0x35, 0x34, - 0x40, 0x43, 0x3c, 0x41, 0x31, 0x46, 0x35, 0x26, 0x44, 0x32, 0x3d, 0x35, - 0x3d, 0x3c, 0x36, 0x32, 0x39, 0x3a, 0x30, 0x40, 0x48, 0x3e, 0x38, 0x37, - 0x44, 0x3b, 0x3d, 0x42, 0x3d, 0x3c, 0x32, 0x2b, 0x3f, 0x41, 0x39, 0x3d, - 0x3e, 0x3f, 0x35, 0x2f, 0x46, 0x3d, 0x3d, 0x3b, 0x45, 0x37, 0x31, 0x35, - 0x44, 0x40, 0x3a, 0x45, 0x3a, 0x3c, 0x39, 0x31, 0x3b, 0x3d, 0x3b, 0x3a, - 0x43, 0x44, 0x39, 0x47, 0x44, 0x36, 0x3e, 0x39, 0x48, 0x3f, 0x39, 0x4b, - 0x3c, 0x36, 0x3d, 0x44, 0x44, 0x3f, 0x39, 0x43, 0x3f, 0x37, 0x3f, 0x37, - 0x3b, 0x3b, 0x38, 0x3b, 0x3f, 0x40, 0x31, 0x44, 0x30, 0x44, 0x46, 0x5b, - 0x46, 0x3f, 0x39, 0x40, 0x40, 0x37, 0x4a, 0x46, 0x3f, 0x36, 0x40, 0x39, - 0x59, 0x3e, 0x66, 0x57, 0x32, 0x34, 0x2e, 0x33, 0x46, 0x31, 0x58, 0x44, - 0x26, 0x4c, 0x4b, 0x3c, 0x39, 0x2e, 0x4d, 0x35, 0x32, 0x46, 0x52, 0x52, - 0x3e, 0x40, 0x39, 0x3c, 0x39, 0x3d, 0x53, 0x48, 0x41, 0x3c, 0x3b, 0x4d, - 0x3c, 0x3e, 0x38, 0x44, 0x3a, 0x3a, 0x29, 0x4a, 0x3c, 0x37, 0x36, 0x38, - 0x3a, 0x31, 0x37, 0x39, 0x3a, 0x40, 0x46, 0x32, 0x42, 0x38, 0x32, 0x2e, - 0x3a, 0x45, 0x44, 0x34, 0x34, 0x38, 0x32, 0x2e, 0x35, 0x40, 0x3a, 0x41, - 0x42, 0x3d, 0x37, 0x2c, 0x3f, 0x37, 0x3c, 0x3d, 0x3a, 0x36, 0x33, 0x35, - 0x3c, 0x34, 0x3c, 0x39, 0x3c, 0x3a, 0x37, 0x30, 0x30, 0x3e, 0x3d, 0x3a, - 0x44, 0x37, 0x36, 0x32, 0x36, 0x37, 0x36, 0x3a, 0x3c, 0x41, 0x3a, 0x35, - 0x36, 0x3a, 0x34, 0x40, 0x39, 0x40, 0x3e, 0x32, 0x34, 0x46, 0x33, 0x3f, - 0x36, 0x45, 0x3e, 0x35, 0x3f, 0x38, 0x3f, 0x3e, 0x3b, 0x3a, 0x36, 0x3b, - 0x36, 0x38, 0x32, 0x3f, 0x44, 0x3c, 0x35, 0x48, 0x38, 0x39, 0x31, 0x49, - 0x3d, 0x43, 0x36, 0x3f, 0x31, 0x43, 0x36, 0x3e, 0x3e, 0x41, 0x39, 0x3b, - 0x40, 0x42, 0x3c, 0x43, 0x36, 0x4a, 0x48, 0x67, 0x4e, 0x43, 0x36, 0x46, - 0x44, 0x3f, 0x4b, 0x4b, 0x3f, 0x38, 0x3c, 0x3c, 0x5e, 0x38, 0x70, 0x52, - 0x38, 0x32, 0x3b, 0x36, 0x4a, 0x2c, 0x52, 0x46, 0x29, 0x4f, 0x48, 0x42, - 0x2d, 0x2e, 0x4f, 0x28, 0x28, 0x45, 0x4d, 0x52, 0x42, 0x3e, 0x3f, 0x41, - 0x3c, 0x3a, 0x47, 0x50, 0x44, 0x45, 0x33, 0x4b, 0x3e, 0x3f, 0x42, 0x3d, - 0x43, 0x34, 0x27, 0x3f, 0x42, 0x3e, 0x43, 0x3e, 0x3a, 0x3c, 0x37, 0x3b, - 0x3f, 0x30, 0x3a, 0x3e, 0x3c, 0x34, 0x37, 0x24, 0x3d, 0x43, 0x40, 0x44, - 0x40, 0x46, 0x31, 0x2f, 0x43, 0x38, 0x38, 0x39, 0x3c, 0x34, 0x2d, 0x2a, - 0x38, 0x31, 0x43, 0x3b, 0x39, 0x3b, 0x32, 0x34, 0x3e, 0x39, 0x41, 0x3b, - 0x3e, 0x33, 0x3a, 0x2a, 0x41, 0x3f, 0x3c, 0x43, 0x3b, 0x3e, 0x35, 0x2c, - 0x38, 0x41, 0x33, 0x31, 0x3e, 0x3f, 0x3a, 0x3c, 0x3b, 0x35, 0x3f, 0x3d, - 0x42, 0x3a, 0x3c, 0x35, 0x3f, 0x40, 0x3c, 0x3e, 0x37, 0x41, 0x3d, 0x38, - 0x34, 0x31, 0x36, 0x3d, 0x3d, 0x47, 0x36, 0x44, 0x3f, 0x45, 0x3c, 0x3c, - 0x35, 0x36, 0x31, 0x4f, 0x46, 0x3a, 0x41, 0x42, 0x40, 0x32, 0x33, 0x41, - 0x34, 0x40, 0x3d, 0x43, 0x3b, 0x3a, 0x32, 0x3c, 0x42, 0x42, 0x3d, 0x43, - 0x37, 0x45, 0x45, 0xff, 0x4b, 0x45, 0x3b, 0x40, 0x43, 0x3e, 0x47, 0x49, - 0x3d, 0x3b, 0x3e, 0x33, 0x58, 0x35, 0x71, 0x54, 0x2f, 0x38, 0x38, 0x33, - 0x47, 0x35, 0x5b, 0x46, 0x2c, 0x4c, 0x43, 0x37, 0x36, 0x39, 0x4f, 0x30, - 0x26, 0x48, 0x51, 0x48, 0x46, 0x45, 0x3b, 0x39, 0x42, 0x50, 0x47, 0x4c, - 0x4b, 0x3b, 0x3d, 0x4d, 0x41, 0x34, 0x40, 0x44, 0x38, 0x32, 0x2d, 0x43, - 0x39, 0x36, 0x3b, 0x3b, 0x40, 0x3d, 0x37, 0x3c, 0x44, 0x39, 0x42, 0x37, - 0x38, 0x38, 0x32, 0x2f, 0x41, 0x40, 0x3f, 0x3a, 0x37, 0x35, 0x3b, 0x2a, - 0x37, 0x30, 0x3c, 0x37, 0x40, 0x38, 0x3a, 0x27, 0x44, 0x3d, 0x43, 0x40, - 0x35, 0x3f, 0x3e, 0x32, 0x3e, 0x3c, 0x40, 0x39, 0x39, 0x3a, 0x41, 0x31, - 0x3b, 0x3f, 0x34, 0x43, 0x3a, 0x38, 0x42, 0x2a, 0x47, 0x46, 0x3b, 0x38, - 0x47, 0x45, 0x39, 0x31, 0x43, 0x40, 0x37, 0x3a, 0x3d, 0x3e, 0x39, 0x30, - 0x36, 0x37, 0x3a, 0x43, 0x3f, 0x32, 0x31, 0x41, 0x45, 0x3e, 0x43, 0x38, - 0x3f, 0x37, 0x3c, 0x49, 0x3b, 0x33, 0x3d, 0x3a, 0x37, 0x44, 0x32, 0x50, - 0x39, 0x44, 0x3e, 0x3f, 0x3d, 0x41, 0x3e, 0x3e, 0x42, 0x44, 0x45, 0x3f, - 0x36, 0x3f, 0x37, 0x39, 0x3b, 0x3d, 0x3b, 0x3b, 0x2f, 0x46, 0x40, 0x6d, - 0x50, 0x45, 0x3b, 0x45, 0x46, 0x3b, 0x42, 0x48, 0x42, 0x3c, 0x39, 0x37, - 0x57, 0x3b, 0x6c, 0x5b, 0x32, 0x35, 0x3d, 0x39, 0x48, 0x31, 0x5c, 0x46, - 0x29, 0x4c, 0x3f, 0x3e, 0x37, 0x33, 0x58, 0x32, 0x2a, 0x43, 0x4c, 0x50, - 0x3b, 0x44, 0x3c, 0x41, 0x39, 0x48, 0x55, 0x4c, 0x42, 0x38, 0x3b, 0x51, - 0x3f, 0x38, 0x44, 0x46, 0x36, 0x3b, 0x38, 0x4a, 0x3f, 0x37, 0x36, 0x3c, - 0x31, 0x3d, 0x32, 0x39, 0x3b, 0x3f, 0x3e, 0x35, 0x38, 0x3f, 0x34, 0x2b, - 0x37, 0x36, 0x39, 0x40, 0x37, 0x41, 0x32, 0x27, 0x36, 0x33, 0x40, 0x3a, - 0x3f, 0x44, 0x3f, 0x25, 0x38, 0x34, 0x42, 0x3c, 0x3a, 0x40, 0x38, 0x31, - 0x49, 0x3e, 0x33, 0x3d, 0x31, 0x36, 0x39, 0x2b, 0x44, 0x2f, 0x43, 0x34, - 0x34, 0x37, 0x39, 0x33, 0x3b, 0x34, 0x42, 0x3c, 0x40, 0x45, 0x36, 0x31, - 0x43, 0x47, 0x3e, 0x3f, 0x40, 0x3a, 0x33, 0x34, 0x41, 0x44, 0x3a, 0x43, - 0x3e, 0x38, 0x36, 0x31, 0x42, 0x44, 0x40, 0x41, 0x44, 0x43, 0x33, 0x42, - 0x3d, 0x41, 0x3d, 0x3e, 0x3c, 0x39, 0x3e, 0x4f, 0x3f, 0x37, 0x31, 0x40, - 0x3b, 0x38, 0x35, 0x3b, 0x44, 0x41, 0x41, 0x37, 0x40, 0x42, 0x2d, 0x3d, - 0x39, 0x48, 0x44, 0x3e, 0x34, 0x48, 0x49, 0x6d, 0x45, 0x4b, 0x3a, 0x44, - 0x49, 0x40, 0x4d, 0x51, 0x3f, 0x34, 0x3b, 0x40, 0x52, 0x34, 0x6f, 0x56, - 0x33, 0x3e, 0x40, 0x39, 0x41, 0x32, 0x5d, 0x45, 0x2e, 0x51, 0x48, 0x3c, - 0x2e, 0x2e, 0x51, 0x39, 0x32, 0x45, 0x4a, 0x4c, 0x3b, 0x40, 0x40, 0x3b, - 0x36, 0x41, 0x54, 0x4e, 0x4a, 0x49, 0x3b, 0x4d, 0x3c, 0x41, 0x38, 0x47, - 0x3d, 0x3c, 0x37, 0x48, 0x3f, 0x42, 0x3e, 0x36, 0x39, 0x46, 0x37, 0x3e, - 0x3b, 0x38, 0x40, 0x3b, 0x39, 0x32, 0x3e, 0x29, 0x37, 0x35, 0x3c, 0x3d, - 0x37, 0x3b, 0x35, 0x2f, 0x32, 0x3b, 0x37, 0x3c, 0x40, 0x3e, 0x39, 0x27, - 0x3b, 0x38, 0x37, 0x36, 0x39, 0x37, 0x37, 0x35, 0x42, 0x3e, 0x3b, 0x43, - 0x41, 0x3c, 0x37, 0x2a, 0x3a, 0x3e, 0x38, 0x40, 0x36, 0x3e, 0x44, 0x2e, - 0x3e, 0x3a, 0x37, 0x3b, 0x3e, 0x41, 0x3d, 0x30, 0x3b, 0x3f, 0x41, 0x45, - 0x3a, 0x48, 0x37, 0x2f, 0x3a, 0x37, 0x34, 0x43, 0x42, 0x3d, 0x38, 0x41, - 0x3b, 0x3c, 0x39, 0x3c, 0x39, 0x47, 0x2e, 0x41, 0x42, 0x40, 0x32, 0x36, - 0x43, 0x40, 0x3d, 0x4c, 0x38, 0x3e, 0x3b, 0x41, 0x3d, 0x3b, 0x34, 0x43, - 0x43, 0x3f, 0x44, 0x3c, 0x3a, 0x33, 0x39, 0x42, 0x43, 0x3f, 0x33, 0x3d, - 0x33, 0x3e, 0x48, 0x6b, 0x48, 0x43, 0x36, 0x47, 0x49, 0x44, 0x4a, 0x49, - 0x3c, 0x31, 0x35, 0x3e, 0x5c, 0x34, 0x73, 0x53, 0x33, 0x3c, 0x32, 0x3b, - 0x43, 0x27, 0x59, 0x4e, 0x2b, 0x51, 0x4f, 0x37, 0x36, 0x34, 0x56, 0x34, - 0x32, 0x4f, 0x46, 0x50, 0x40, 0x40, 0x3c, 0x3e, 0x34, 0x37, 0x50, 0x49, - 0x43, 0x47, 0x3e, 0x52, 0x44, 0x38, 0x3b, 0x4f, 0x3a, 0x3d, 0x2b, 0x4c, - 0x40, 0x38, 0x3a, 0x35, 0x3a, 0x3a, 0x3d, 0x38, 0x3d, 0x3b, 0x37, 0x48, - 0x3d, 0x3d, 0x32, 0x30, 0x3a, 0x34, 0x3f, 0x3a, 0x3b, 0x3e, 0x35, 0x2f, - 0x3b, 0x3a, 0x45, 0x3d, 0x42, 0x33, 0x33, 0x24, 0x44, 0x39, 0x3c, 0x3d, - 0x41, 0x3c, 0x37, 0x2c, 0x3b, 0x36, 0x34, 0x41, 0x3d, 0x3f, 0x39, 0x32, - 0x3c, 0x40, 0x44, 0x3d, 0x41, 0x3d, 0x3a, 0x29, 0x3e, 0x3e, 0x43, 0x33, - 0x3f, 0x3e, 0x3e, 0x31, 0x38, 0x3a, 0x34, 0x3d, 0x3f, 0x3e, 0x3a, 0x3d, - 0x3e, 0x48, 0x45, 0x3d, 0x44, 0x37, 0x33, 0x3d, 0x45, 0x39, 0x40, 0x40, - 0x42, 0x3f, 0x3f, 0x3d, 0x3a, 0x3b, 0x41, 0x33, 0x41, 0x3c, 0x32, 0x55, - 0x43, 0x3a, 0x32, 0x40, 0x3c, 0x3e, 0x40, 0x43, 0x37, 0x3f, 0x40, 0x38, - 0x43, 0x41, 0x36, 0x42, 0x44, 0x3c, 0x32, 0x3f, 0x38, 0x42, 0x46, 0x59, - 0x4c, 0x41, 0x39, 0x47, 0x46, 0x46, 0x44, 0x44, 0x35, 0x42, 0x32, 0x39, - 0x4f, 0x34, 0x6d, 0x55, 0x31, 0x3b, 0x3a, 0x3f, 0x44, 0x2c, 0x5d, 0x43, - 0x26, 0x4a, 0x4f, 0x40, 0x36, 0x32, 0x4d, 0x33, 0x2f, 0x50, 0x4d, 0x57, - 0x3b, 0x40, 0x42, 0x44, 0x41, 0x3f, 0x52, 0x4e, 0x35, 0x41, 0x44, 0x52, - 0x40, 0x35, 0x39, 0x4b, 0x45, 0x34, 0x2c, 0x4a, 0x3b, 0x41, 0x31, 0x33, - 0x3f, 0x3a, 0x36, 0x3c, 0x3c, 0x33, 0x30, 0x38, 0x43, 0x3f, 0x32, 0x2d, - 0x3f, 0x3a, 0x38, 0x41, 0x39, 0x45, 0x36, 0x2e, 0x3c, 0x38, 0x45, 0x3f, - 0x40, 0x3f, 0x3e, 0x26, 0x41, 0x37, 0x3c, 0x44, 0x3f, 0x3f, 0x35, 0x37, - 0x46, 0x34, 0x37, 0x3e, 0x48, 0x38, 0x36, 0x34, 0x33, 0x39, 0x40, 0x3c, - 0x42, 0x3d, 0x3b, 0x31, 0x38, 0x3b, 0x44, 0x42, 0x45, 0x38, 0x41, 0x30, - 0x3d, 0x42, 0x36, 0x3f, 0x3b, 0x45, 0x37, 0x32, 0x3c, 0x37, 0x3d, 0x42, - 0x38, 0x3d, 0x2f, 0x31, 0x39, 0x40, 0x3f, 0x44, 0x3a, 0x41, 0x44, 0x46, - 0x3d, 0x3a, 0x32, 0x3b, 0x34, 0x47, 0x36, 0x4c, 0x47, 0x35, 0x3c, 0x33, - 0x3b, 0x3c, 0x30, 0x43, 0x43, 0x3f, 0x31, 0x40, 0x3a, 0x37, 0x30, 0x46, - 0x39, 0x3b, 0x42, 0x40, 0x2d, 0x3f, 0x3e, 0x6a, 0x50, 0x3b, 0x31, 0x54, - 0x47, 0x3d, 0x48, 0x4e, 0x3b, 0x41, 0x3a, 0x39, 0x49, 0x36, 0x64, 0x4e, - 0x32, 0x39, 0x3d, 0x37, 0x42, 0x2c, 0x5c, 0x43, 0x2a, 0x4b, 0x4b, 0x46, - 0x30, 0x29, 0x52, 0x31, 0x35, 0x44, 0x4a, 0x4b, 0x3d, 0x3b, 0x4e, 0x42, - 0x3d, 0x39, 0x42, 0x52, 0x3f, 0x36, 0x3e, 0x50, 0x3f, 0x32, 0x35, 0x3a, - 0x40, 0x39, 0x35, 0x48, 0x3b, 0x3e, 0x41, 0x43, 0x43, 0x45, 0x2f, 0x36, - 0x38, 0x34, 0x3f, 0x44, 0x32, 0x3f, 0x37, 0x33, 0x33, 0x35, 0x2e, 0x41, - 0x37, 0x3e, 0x38, 0x28, 0x49, 0x30, 0x46, 0x39, 0x3b, 0x30, 0x38, 0x28, - 0x3b, 0x3d, 0x3a, 0x43, 0x3f, 0x34, 0x43, 0x36, 0x39, 0x3c, 0x3e, 0x3e, - 0x39, 0x3b, 0x39, 0x32, 0x3c, 0x36, 0x3e, 0x38, 0x34, 0x3c, 0x3a, 0x2a, - 0x46, 0x3d, 0x40, 0x37, 0x3b, 0x39, 0x3b, 0x34, 0x38, 0x31, 0x43, 0x46, - 0x3b, 0x43, 0x39, 0x2b, 0x38, 0x40, 0x3e, 0x39, 0x35, 0x3d, 0x2c, 0x36, - 0x37, 0x40, 0x36, 0x40, 0x41, 0x38, 0x32, 0x3f, 0x36, 0x46, 0x34, 0x31, - 0x40, 0x3e, 0x3c, 0x4e, 0x42, 0x3d, 0x36, 0x3f, 0x42, 0x3f, 0x33, 0x40, - 0x34, 0x37, 0x3c, 0x3b, 0x31, 0x47, 0x32, 0x3c, 0x34, 0x3d, 0x42, 0x3b, - 0x37, 0x41, 0x3b, 0x64, 0x52, 0x40, 0x36, 0x4e, 0x46, 0x3f, 0x3f, 0x47, - 0x3c, 0x3a, 0x3a, 0x41, 0x4a, 0x32, 0x5e, 0x50, 0x2d, 0x39, 0x3a, 0x38, - 0x3d, 0x2c, 0x5a, 0x3e, 0x2e, 0x47, 0x3e, 0x3e, 0x33, 0x29, 0x4c, 0x35, - 0x30, 0x4d, 0x4d, 0x4d, 0x38, 0x42, 0x51, 0x47, 0x39, 0x3c, 0x43, 0x4b, - 0x42, 0x3f, 0x3a, 0x4b, 0x44, 0x3f, 0x3a, 0x44, 0x3e, 0x37, 0x30, 0x45, - 0x3d, 0x36, 0x34, 0x3f, 0x36, 0x35, 0x37, 0x36, 0x43, 0x3b, 0x37, 0x3e, - 0x35, 0x3e, 0x32, 0x34, 0x32, 0x38, 0x3c, 0x3a, 0x3a, 0x3c, 0x30, 0x2b, - 0x31, 0x37, 0x30, 0x42, 0x36, 0x37, 0x36, 0x2c, 0x3c, 0x31, 0x41, 0x37, - 0x44, 0x41, 0x3b, 0x37, 0x41, 0x3f, 0x38, 0x3b, 0x3a, 0x3a, 0x3c, 0x2f, - 0x47, 0x41, 0x3e, 0x33, 0x42, 0x3a, 0x32, 0x34, 0x44, 0x40, 0x43, 0x3d, - 0x34, 0x41, 0x38, 0x35, 0x35, 0x3b, 0x45, 0x38, 0x32, 0x37, 0x3c, 0x2e, - 0x39, 0x40, 0x30, 0x3e, 0x42, 0x35, 0x3d, 0x36, 0x3e, 0x3d, 0x39, 0x46, - 0x3f, 0x36, 0x37, 0x49, 0x41, 0x39, 0x3d, 0x3d, 0x33, 0x44, 0x42, 0x50, - 0x3d, 0x3c, 0x3e, 0x3f, 0x42, 0x42, 0x3b, 0x3d, 0x41, 0x31, 0x39, 0x3a, - 0x44, 0x34, 0x38, 0x47, 0x44, 0x38, 0x3b, 0x42, 0x30, 0x42, 0x44, 0x57, - 0x49, 0x3a, 0x39, 0x4f, 0x41, 0x3e, 0x40, 0x43, 0x37, 0x42, 0x3b, 0x48, - 0x50, 0x29, 0x5b, 0x44, 0x2c, 0x40, 0x3f, 0x3c, 0x46, 0x34, 0x5c, 0x41, - 0x2c, 0x48, 0x46, 0x46, 0x35, 0x32, 0x4c, 0x35, 0x2f, 0x3b, 0x48, 0x44, - 0x41, 0x41, 0x49, 0x45, 0x34, 0x37, 0x44, 0x45, 0x43, 0x3b, 0x42, 0x44, - 0x3a, 0x37, 0x48, 0x49, 0x34, 0x39, 0x33, 0x4a, 0x40, 0x3d, 0x33, 0x39, - 0x39, 0x3b, 0x30, 0x31, 0x3d, 0x47, 0x3c, 0x3a, 0x34, 0x3c, 0x3a, 0x2b, - 0x3a, 0x34, 0x41, 0x40, 0x42, 0x36, 0x44, 0x2c, 0x40, 0x47, 0x3b, 0x37, - 0x38, 0x42, 0x44, 0x29, 0x36, 0x3d, 0x3d, 0x36, 0x42, 0x3b, 0x35, 0x36, - 0x43, 0x39, 0x41, 0x3d, 0x45, 0x41, 0x31, 0x32, 0x40, 0x3d, 0x3c, 0x41, - 0x3e, 0x3d, 0x35, 0x34, 0x32, 0x38, 0x36, 0x3f, 0x3b, 0x3d, 0x39, 0x36, - 0x40, 0x3e, 0x3d, 0x3a, 0x3a, 0x3b, 0x3c, 0x32, 0x40, 0x34, 0x3a, 0x36, - 0x42, 0x47, 0x3e, 0x33, 0x3a, 0x44, 0x30, 0x39, 0x40, 0x3a, 0x36, 0x44, - 0x3c, 0x3b, 0x3f, 0x33, 0x3e, 0x3c, 0x35, 0x53, 0x43, 0x3c, 0x3f, 0x43, - 0x3d, 0x44, 0x33, 0x47, 0x42, 0x40, 0x37, 0x3b, 0x43, 0x3f, 0x33, 0x41, - 0x38, 0x42, 0x44, 0x3d, 0x2d, 0x3f, 0x46, 0x49, 0x4e, 0x3f, 0x36, 0x45, - 0x45, 0x39, 0x40, 0x42, 0x39, 0x39, 0x3a, 0x42, 0x45, 0x2c, 0x61, 0x44, - 0x30, 0x45, 0x38, 0x3a, 0x40, 0x37, 0x58, 0x39, 0x31, 0x3e, 0x3a, 0x3e, - 0x37, 0x32, 0x4a, 0x39, 0x2e, 0x47, 0x3e, 0x4e, 0x3f, 0x3e, 0x48, 0x45, - 0x3f, 0x48, 0x3a, 0x3f, 0x40, 0x36, 0x3a, 0x44, 0x36, 0x3e, 0x3d, 0x41, - 0x45, 0x36, 0x36, 0x4b, 0x3a, 0x3d, 0x45, 0x48, 0x38, 0x45, 0x39, 0x38, - 0x38, 0x3a, 0x42, 0x34, 0x3f, 0x34, 0x39, 0x34, 0x32, 0x3f, 0x3c, 0x3d, - 0x3d, 0x47, 0x3a, 0x2f, 0x3c, 0x3e, 0x3f, 0x39, 0x35, 0x42, 0x3c, 0x2a, - 0x3b, 0x35, 0x42, 0x44, 0x46, 0x39, 0x38, 0x39, 0x43, 0x3a, 0x38, 0x42, - 0x3d, 0x3a, 0x40, 0x35, 0x34, 0x39, 0x3a, 0x38, 0x43, 0x42, 0x42, 0x2d, - 0x31, 0x3b, 0x33, 0x40, 0x3b, 0x47, 0x35, 0x30, 0x3a, 0x3c, 0x3b, 0x47, - 0x3a, 0x3c, 0x38, 0x35, 0x3c, 0x35, 0x3e, 0x3e, 0x39, 0x3d, 0x39, 0x40, - 0x37, 0x33, 0x49, 0x38, 0x3c, 0x43, 0x34, 0x40, 0x39, 0x42, 0x3c, 0x3b, - 0x3e, 0x45, 0x3e, 0x51, 0x3d, 0x3f, 0x3b, 0x34, 0x37, 0x3c, 0x40, 0x47, - 0x3c, 0x41, 0x3f, 0x41, 0x37, 0x3e, 0x36, 0x3c, 0x42, 0x40, 0x3f, 0x3a, - 0x3b, 0x42, 0x44, 0x4b, 0x4b, 0x37, 0x41, 0x4d, 0x41, 0x45, 0x40, 0x41, - 0x40, 0x38, 0x37, 0x40, 0x42, 0x2c, 0x57, 0x43, 0x2d, 0x49, 0x3a, 0x3e, - 0x37, 0x2f, 0x52, 0x37, 0x31, 0x42, 0x3b, 0x3f, 0x39, 0x38, 0x48, 0x3c, - 0x37, 0x3d, 0x3a, 0x39, 0x3a, 0x45, 0x4b, 0x49, 0x3e, 0x44, 0x48, 0x49, - 0x3d, 0x39, 0x3c, 0x41, 0x41, 0x38, 0x45, 0x38, 0x33, 0x3d, 0x37, 0x47, - 0x34, 0x3f, 0x3b, 0x3d, 0x39, 0x34, 0x30, 0x39, 0x44, 0x36, 0x34, 0x3c, - 0x37, 0x38, 0x45, 0x34, 0x40, 0x33, 0x41, 0x3a, 0x3e, 0x3c, 0x3b, 0x3a, - 0x40, 0x3f, 0x3b, 0x3d, 0x3b, 0x46, 0x41, 0x2a, 0x3a, 0x3c, 0x42, 0x46, - 0x33, 0x3f, 0x2d, 0x3a, 0x45, 0x45, 0x38, 0x3b, 0x44, 0x34, 0x35, 0x3f, - 0x34, 0x43, 0x38, 0x3e, 0x41, 0x3b, 0x42, 0x38, 0x3d, 0x3f, 0x38, 0x45, - 0x3b, 0x35, 0x39, 0x3c, 0x43, 0x43, 0x38, 0x34, 0x44, 0x43, 0x2e, 0x39, - 0x39, 0x40, 0x39, 0x41, 0x41, 0x34, 0x3e, 0x44, 0x3d, 0x43, 0x3a, 0x3a, - 0x3b, 0x3b, 0x36, 0x45, 0x3c, 0x43, 0x3d, 0x48, 0x36, 0x36, 0x39, 0x55, - 0x35, 0x40, 0x3e, 0x49, 0x40, 0x3a, 0x3d, 0x3d, 0x34, 0x47, 0x40, 0x41, - 0x40, 0x47, 0x39, 0x3e, 0x3b, 0x38, 0x3c, 0x3a, 0x35, 0x3e, 0x41, 0x4a, - 0x4b, 0x3f, 0x36, 0x3d, 0x40, 0x3c, 0x39, 0x32, 0x33, 0x36, 0x30, 0x42, - 0x42, 0x36, 0x54, 0x48, 0x2e, 0x4c, 0x34, 0x3c, 0x39, 0x36, 0x4e, 0x37, - 0x2f, 0x3e, 0x30, 0x3d, 0x36, 0x3b, 0x45, 0x36, 0x37, 0x3e, 0x41, 0x4b, - 0x3b, 0x36, 0x45, 0x3b, 0x38, 0x45, 0x3e, 0x43, 0x48, 0x46, 0x44, 0x44, - 0x3e, 0x3b, 0x37, 0x3b, 0x3a, 0x3f, 0x3d, 0x44, 0x39, 0x38, 0x45, 0x43, - 0x3d, 0x35, 0x39, 0x2c, 0x44, 0x41, 0x36, 0x40, 0x3d, 0x39, 0x3d, 0x2f, - 0x3d, 0x39, 0x42, 0x3d, 0x36, 0x46, 0x43, 0x2c, 0x41, 0x3a, 0x30, 0x45, - 0x3f, 0x41, 0x35, 0x2b, 0x3b, 0x38, 0x3a, 0x44, 0x32, 0x32, 0x39, 0x3c, - 0x3a, 0x3a, 0x3c, 0x3a, 0x35, 0x40, 0x3b, 0x31, 0x36, 0x33, 0x35, 0x34, - 0x3c, 0x3b, 0x3d, 0x36, 0x48, 0x3b, 0x3f, 0x42, 0x3e, 0x33, 0x2f, 0x3a, - 0x49, 0x41, 0x39, 0x3e, 0x3c, 0x44, 0x3c, 0x39, 0x33, 0x39, 0x36, 0x35, - 0x3d, 0x42, 0x34, 0x3e, 0x38, 0x45, 0x40, 0x45, 0x3d, 0x48, 0x42, 0x4a, - 0x3f, 0x45, 0x38, 0x42, 0x44, 0x40, 0x34, 0x49, 0x44, 0x3d, 0x3a, 0x39, - 0x3e, 0x3a, 0x42, 0x3e, 0x48, 0x42, 0x3e, 0x3a, 0x3f, 0x3f, 0x32, 0x3b, - 0x38, 0x41, 0x3c, 0x39, 0x33, 0x45, 0x44, 0x3c, 0x48, 0x41, 0x41, 0x3d, - 0x3a, 0x3c, 0x37, 0x33, 0x41, 0x3f, 0x38, 0x3a, 0x3f, 0x37, 0x51, 0x3c, - 0x37, 0x3a, 0x43, 0x37, 0x40, 0x31, 0x4f, 0x34, 0x3b, 0x44, 0x45, 0x39, - 0x40, 0x33, 0x49, 0x33, 0x3e, 0x35, 0x44, 0x3d, 0x3b, 0x3f, 0x43, 0x41, - 0x43, 0x43, 0x48, 0x44, 0x46, 0x3b, 0x43, 0x3f, 0x3c, 0x3f, 0x3e, 0x3d, - 0x3b, 0x41, 0x3c, 0x43, 0x30, 0x34, 0x39, 0x33, 0x3f, 0x38, 0x36, 0x2e, - 0x33, 0x3f, 0x3c, 0x40, 0x3d, 0x3b, 0x3b, 0x31, 0x36, 0x41, 0x3b, 0x38, - 0x46, 0x36, 0x34, 0x31, 0x42, 0x44, 0x33, 0x35, 0x3f, 0x36, 0x3c, 0x30, - 0x3f, 0x31, 0x39, 0x3e, 0x3f, 0x47, 0x3e, 0x34, 0x36, 0x36, 0x34, 0x39, - 0x37, 0x46, 0x40, 0x33, 0x3b, 0x3a, 0x3f, 0x41, 0x37, 0x44, 0x3a, 0x3f, - 0x34, 0x45, 0x37, 0x33, 0x3f, 0x47, 0x41, 0x36, 0x39, 0x3e, 0x40, 0x38, - 0x41, 0x3d, 0x3d, 0x36, 0x40, 0x3a, 0x3b, 0x3b, 0x41, 0x3b, 0x3a, 0x3f, - 0x3f, 0x3b, 0x35, 0x42, 0x46, 0x3a, 0x30, 0x45, 0x40, 0x37, 0x39, 0x39, - 0x3d, 0x38, 0x3f, 0x45, 0x3f, 0x31, 0x32, 0x3b, 0x35, 0x3e, 0x3b, 0x38, - 0x3b, 0x44, 0x37, 0x39, 0x37, 0x42, 0x3f, 0x44, 0x38, 0x36, 0x37, 0x44, - 0x45, 0x46, 0x41, 0x3b, 0x46, 0x42, 0x43, 0x43, 0x3a, 0x4b, 0x37, 0x35, - 0x3b, 0x40, 0x32, 0x38, 0x41, 0x38, 0x4f, 0x3e, 0x36, 0x3f, 0x47, 0x3b, - 0x47, 0x3b, 0x4a, 0x2e, 0x3d, 0x45, 0x3b, 0x46, 0x3e, 0x38, 0x43, 0x38, - 0x41, 0x48, 0x3a, 0x39, 0x40, 0x45, 0x3b, 0x43, 0x40, 0x3e, 0x43, 0x41, - 0x41, 0x3e, 0x39, 0x3f, 0x35, 0x42, 0x33, 0x3f, 0x3d, 0x32, 0x45, 0x3c, - 0x41, 0x31, 0x45, 0x38, 0x43, 0x45, 0x41, 0x35, 0x35, 0x40, 0x44, 0x36, - 0x3a, 0x3b, 0x3c, 0x2c, 0x3e, 0x41, 0x33, 0x3d, 0x46, 0x34, 0x3b, 0x30, - 0x30, 0x42, 0x43, 0x3d, 0x3d, 0x3d, 0x43, 0x31, 0x3f, 0x40, 0x3a, 0x3f, - 0x48, 0x3e, 0x3b, 0x39, 0x44, 0x43, 0x3b, 0x3a, 0x42, 0x38, 0x38, 0x3b, - 0x3f, 0x44, 0x37, 0x3e, 0x45, 0x40, 0x41, 0x3b, 0x3c, 0x3a, 0x38, 0x37, - 0x3b, 0x33, 0x3f, 0x35, 0x43, 0x3d, 0x33, 0x41, 0x3b, 0x46, 0x39, 0x32, - 0x39, 0x3f, 0x3b, 0x39, 0x47, 0x3c, 0x3f, 0x39, 0x34, 0x3d, 0x3c, 0x46, - 0x3f, 0x3e, 0x3e, 0x44, 0x34, 0x40, 0x3f, 0x39, 0x3c, 0x38, 0x36, 0x45, - 0x42, 0x46, 0x3b, 0x44, 0x3a, 0x3d, 0x3b, 0x42, 0x3b, 0x3b, 0x3c, 0x45, - 0x42, 0x3d, 0x36, 0x37, 0x3d, 0x43, 0x3f, 0x48, 0xa6, 0xfb, 0xff, 0xff, - 0x04, 0x00, 0x00, 0x00, 0x10, 0x00, 0x00, 0x00, 0xb3, 0x00, 0x00, 0x00, - 0x39, 0xff, 0xff, 0xff, 0xe5, 0xff, 0xff, 0xff, 0x30, 0x00, 0x00, 0x00, - 0x68, 0xfb, 0xff, 0xff, 0xbc, 0xfc, 0xff, 0xff, 0x20, 0x00, 0x00, 0x00, - 0x14, 0x00, 0x00, 0x00, 0x08, 0x00, 0x00, 0x00, 0xe8, 0x03, 0x00, 0x00, - 0x01, 0x00, 0x00, 0x00, 0x05, 0x00, 0x00, 0x00, 0x01, 0x00, 0x00, 0x00, - 0x03, 0x00, 0x00, 0x00, 0x08, 0x00, 0x00, 0x00, 0x70, 0x02, 0x00, 0x00, - 0x70, 0x03, 0x00, 0x00, 0xf0, 0x00, 0x00, 0x00, 0xf0, 0x01, 0x00, 0x00, - 0x80, 0x00, 0x00, 0x00, 0x0c, 0x00, 0x00, 0x00, 0x50, 0x01, 0x00, 0x00, - 0xa4, 0x02, 0x00, 0x00, 0xba, 0xfc, 0xff, 0xff, 0x00, 0x00, 0x00, 0x03, - 0x24, 0x00, 0x00, 0x00, 0x08, 0x00, 0x00, 0x00, 0x08, 0x00, 0x00, 0x00, - 0x24, 0x00, 0x00, 0x00, 0x0e, 0x00, 0x00, 0x00, 0x6c, 0x61, 0x62, 0x65, - 0x6c, 0x73, 0x5f, 0x73, 0x6f, 0x66, 0x74, 0x6d, 0x61, 0x78, 0x00, 0x00, - 0x02, 0x00, 0x00, 0x00, 0x01, 0x00, 0x00, 0x00, 0x04, 0x00, 0x00, 0x00, - 0x3c, 0xfd, 0xff, 0xff, 0x2c, 0x00, 0x00, 0x00, 0x20, 0x00, 0x00, 0x00, - 0x14, 0x00, 0x00, 0x00, 0x04, 0x00, 0x00, 0x00, 0x01, 0x00, 0x00, 0x00, - 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x01, 0x00, 0x00, 0x00, - 0x00, 0x00, 0x80, 0x3b, 0x01, 0x00, 0x00, 0x00, 0x00, 0x00, 0x7f, 0x3f, - 0x01, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x2a, 0xfd, 0xff, 0xff, - 0x00, 0x00, 0x00, 0x03, 0x1c, 0x00, 0x00, 0x00, 0x07, 0x00, 0x00, 0x00, - 0x08, 0x00, 0x00, 0x00, 0x1c, 0x00, 0x00, 0x00, 0x05, 0x00, 0x00, 0x00, - 0x61, 0x64, 0x64, 0x5f, 0x31, 0x00, 0x00, 0x00, 0x02, 0x00, 0x00, 0x00, - 0x01, 0x00, 0x00, 0x00, 0x04, 0x00, 0x00, 0x00, 0xa4, 0xfd, 0xff, 0xff, - 0x2c, 0x00, 0x00, 0x00, 0x20, 0x00, 0x00, 0x00, 0x14, 0x00, 0x00, 0x00, - 0x04, 0x00, 0x00, 0x00, 0x01, 0x00, 0x00, 0x00, 0x58, 0x00, 0x00, 0x00, - 0x00, 0x00, 0x00, 0x00, 0x01, 0x00, 0x00, 0x00, 0x09, 0x97, 0xf5, 0x3f, - 0x01, 0x00, 0x00, 0x00, 0x87, 0x35, 0xa0, 0x43, 0x01, 0x00, 0x00, 0x00, - 0xd6, 0xd7, 0x28, 0xc3, 0x92, 0xfd, 0xff, 0xff, 0x00, 0x00, 0x00, 0x03, - 0x1c, 0x00, 0x00, 0x00, 0x06, 0x00, 0x00, 0x00, 0x08, 0x00, 0x00, 0x00, - 0x24, 0x00, 0x00, 0x00, 0x04, 0x00, 0x00, 0x00, 0x52, 0x65, 0x6c, 0x75, - 0x00, 0x00, 0x00, 0x00, 0x04, 0x00, 0x00, 0x00, 0x01, 0x00, 0x00, 0x00, - 0x19, 0x00, 0x00, 0x00, 0x16, 0x00, 0x00, 0x00, 0x08, 0x00, 0x00, 0x00, - 0x14, 0xfe, 0xff, 0xff, 0x2c, 0x00, 0x00, 0x00, 0x20, 0x00, 0x00, 0x00, - 0x14, 0x00, 0x00, 0x00, 0x04, 0x00, 0x00, 0x00, 0x01, 0x00, 0x00, 0x00, - 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x01, 0x00, 0x00, 0x00, - 0x05, 0x80, 0xbf, 0x3f, 0x01, 0x00, 0x00, 0x00, 0x85, 0xc0, 0xbe, 0x43, - 0x01, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xfe, 0xff, 0xff, - 0x00, 0x00, 0x00, 0x03, 0x3c, 0x00, 0x00, 0x00, 0x05, 0x00, 0x00, 0x00, - 0x08, 0x00, 0x00, 0x00, 0x44, 0x00, 0x00, 0x00, 0x25, 0x00, 0x00, 0x00, - 0x77, 0x65, 0x69, 0x67, 0x68, 0x74, 0x73, 0x5f, 0x71, 0x75, 0x61, 0x6e, - 0x74, 0x2f, 0x46, 0x61, 0x6b, 0x65, 0x51, 0x75, 0x61, 0x6e, 0x74, 0x57, - 0x69, 0x74, 0x68, 0x4d, 0x69, 0x6e, 0x4d, 0x61, 0x78, 0x56, 0x61, 0x72, - 0x73, 0x00, 0x00, 0x00, 0x04, 0x00, 0x00, 0x00, 0x01, 0x00, 0x00, 0x00, - 0x0a, 0x00, 0x00, 0x00, 0x08, 0x00, 0x00, 0x00, 0x08, 0x00, 0x00, 0x00, - 0xa4, 0xfe, 0xff, 0xff, 0x30, 0x00, 0x00, 0x00, 0x24, 0x00, 0x00, 0x00, - 0x18, 0x00, 0x00, 0x00, 0x04, 0x00, 0x00, 0x00, 0x01, 0x00, 0x00, 0x00, - 0xae, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, - 0x01, 0x00, 0x00, 0x00, 0x17, 0xac, 0x6e, 0x3a, 0x01, 0x00, 0x00, 0x00, - 0x20, 0x4e, 0x97, 0x3d, 0x01, 0x00, 0x00, 0x00, 0xaf, 0x27, 0x21, 0xbe, - 0x96, 0xfe, 0xff, 0xff, 0x00, 0x00, 0x00, 0x03, 0x20, 0x00, 0x00, 0x00, - 0x04, 0x00, 0x00, 0x00, 0x08, 0x00, 0x00, 0x00, 0x28, 0x00, 0x00, 0x00, - 0x09, 0x00, 0x00, 0x00, 0x52, 0x65, 0x73, 0x68, 0x61, 0x70, 0x65, 0x5f, - 0x31, 0x00, 0x00, 0x00, 0x04, 0x00, 0x00, 0x00, 0x01, 0x00, 0x00, 0x00, - 0x31, 0x00, 0x00, 0x00, 0x2b, 0x00, 0x00, 0x00, 0x01, 0x00, 0x00, 0x00, - 0x1c, 0xff, 0xff, 0xff, 0x2c, 0x00, 0x00, 0x00, 0x20, 0x00, 0x00, 0x00, - 0x14, 0x00, 0x00, 0x00, 0x04, 0x00, 0x00, 0x00, 0x01, 0x00, 0x00, 0x00, - 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x01, 0x00, 0x00, 0x00, - 0x00, 0x00, 0x00, 0x3f, 0x01, 0x00, 0x00, 0x00, 0x00, 0x00, 0xff, 0x42, - 0x01, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x0a, 0xff, 0xff, 0xff, - 0x00, 0x00, 0x00, 0x02, 0x20, 0x00, 0x00, 0x00, 0x03, 0x00, 0x00, 0x00, - 0x08, 0x00, 0x00, 0x00, 0x1c, 0x00, 0x00, 0x00, 0x0b, 0x00, 0x00, 0x00, - 0x43, 0x6f, 0x6e, 0x76, 0x32, 0x44, 0x5f, 0x62, 0x69, 0x61, 0x73, 0x00, - 0x01, 0x00, 0x00, 0x00, 0x08, 0x00, 0x00, 0x00, 0xfc, 0xfe, 0xff, 0xff, - 0x14, 0x00, 0x00, 0x00, 0x04, 0x00, 0x00, 0x00, 0x01, 0x00, 0x00, 0x00, - 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x01, 0x00, 0x00, 0x00, - 0x17, 0xac, 0xee, 0x39, 0x5a, 0xff, 0xff, 0xff, 0x00, 0x00, 0x00, 0x03, - 0x48, 0x00, 0x00, 0x00, 0x02, 0x00, 0x00, 0x00, 0x08, 0x00, 0x00, 0x00, - 0x54, 0x00, 0x00, 0x00, 0x31, 0x00, 0x00, 0x00, 0x77, 0x65, 0x69, 0x67, - 0x68, 0x74, 0x73, 0x5f, 0x71, 0x75, 0x61, 0x6e, 0x74, 0x5f, 0x31, 0x2f, - 0x46, 0x61, 0x6b, 0x65, 0x51, 0x75, 0x61, 0x6e, 0x74, 0x57, 0x69, 0x74, - 0x68, 0x4d, 0x69, 0x6e, 0x4d, 0x61, 0x78, 0x56, 0x61, 0x72, 0x73, 0x2f, - 0x74, 0x72, 0x61, 0x6e, 0x73, 0x70, 0x6f, 0x73, 0x65, 0x00, 0x00, 0x00, - 0x02, 0x00, 0x00, 0x00, 0x04, 0x00, 0x00, 0x00, 0x30, 0x11, 0x00, 0x00, - 0x0c, 0x00, 0x14, 0x00, 0x04, 0x00, 0x08, 0x00, 0x0c, 0x00, 0x10, 0x00, - 0x0c, 0x00, 0x00, 0x00, 0x30, 0x00, 0x00, 0x00, 0x24, 0x00, 0x00, 0x00, - 0x18, 0x00, 0x00, 0x00, 0x04, 0x00, 0x00, 0x00, 0x01, 0x00, 0x00, 0x00, - 0x3d, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, - 0x01, 0x00, 0x00, 0x00, 0x9d, 0xaf, 0xd0, 0x3a, 0x01, 0x00, 0x00, 0x00, - 0xe7, 0x29, 0x9e, 0x3e, 0x01, 0x00, 0x00, 0x00, 0x5b, 0x91, 0xc3, 0xbd, - 0x00, 0x00, 0x0e, 0x00, 0x18, 0x00, 0x08, 0x00, 0x07, 0x00, 0x0c, 0x00, - 0x10, 0x00, 0x14, 0x00, 0x0e, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, - 0x20, 0x00, 0x00, 0x00, 0x01, 0x00, 0x00, 0x00, 0x08, 0x00, 0x00, 0x00, - 0x28, 0x00, 0x00, 0x00, 0x0b, 0x00, 0x00, 0x00, 0x4d, 0x61, 0x74, 0x4d, - 0x75, 0x6c, 0x5f, 0x62, 0x69, 0x61, 0x73, 0x00, 0x01, 0x00, 0x00, 0x00, - 0x04, 0x00, 0x00, 0x00, 0x0c, 0x00, 0x0c, 0x00, 0x00, 0x00, 0x00, 0x00, - 0x04, 0x00, 0x08, 0x00, 0x0c, 0x00, 0x00, 0x00, 0x14, 0x00, 0x00, 0x00, - 0x04, 0x00, 0x00, 0x00, 0x01, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, - 0x00, 0x00, 0x00, 0x00, 0x01, 0x00, 0x00, 0x00, 0x62, 0x1b, 0x1c, 0x3b, - 0x03, 0x00, 0x00, 0x00, 0xb4, 0x00, 0x00, 0x00, 0x5c, 0x00, 0x00, 0x00, - 0x04, 0x00, 0x00, 0x00, 0xc0, 0xff, 0xff, 0xff, 0x00, 0x00, 0x00, 0x09, - 0x02, 0x00, 0x00, 0x00, 0x1c, 0x00, 0x00, 0x00, 0x10, 0x00, 0x00, 0x00, - 0x24, 0x00, 0x00, 0x00, 0x04, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, - 0x01, 0x00, 0x00, 0x00, 0x05, 0x00, 0x00, 0x00, 0x01, 0x00, 0x00, 0x00, - 0x04, 0x00, 0x00, 0x00, 0x00, 0x00, 0x06, 0x00, 0x08, 0x00, 0x04, 0x00, - 0x06, 0x00, 0x00, 0x00, 0x00, 0x00, 0x80, 0x3f, 0x14, 0x00, 0x1c, 0x00, - 0x08, 0x00, 0x0c, 0x00, 0x10, 0x00, 0x07, 0x00, 0x14, 0x00, 0x00, 0x00, - 0x00, 0x00, 0x18, 0x00, 0x14, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x08, - 0x01, 0x00, 0x00, 0x00, 0x1c, 0x00, 0x00, 0x00, 0x10, 0x00, 0x00, 0x00, - 0x28, 0x00, 0x00, 0x00, 0x04, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, - 0x01, 0x00, 0x00, 0x00, 0x04, 0x00, 0x00, 0x00, 0x03, 0x00, 0x00, 0x00, - 0x02, 0x00, 0x00, 0x00, 0x07, 0x00, 0x00, 0x00, 0x01, 0x00, 0x00, 0x00, - 0x04, 0x00, 0x04, 0x00, 0x04, 0x00, 0x00, 0x00, 0x14, 0x00, 0x18, 0x00, - 0x00, 0x00, 0x08, 0x00, 0x0c, 0x00, 0x07, 0x00, 0x10, 0x00, 0x00, 0x00, - 0x00, 0x00, 0x14, 0x00, 0x14, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, - 0x1c, 0x00, 0x00, 0x00, 0x10, 0x00, 0x00, 0x00, 0x30, 0x00, 0x00, 0x00, - 0x04, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x01, 0x00, 0x00, 0x00, - 0x02, 0x00, 0x00, 0x00, 0x03, 0x00, 0x00, 0x00, 0x03, 0x00, 0x00, 0x00, - 0x06, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x0c, 0x00, 0x10, 0x00, - 0x00, 0x00, 0x04, 0x00, 0x08, 0x00, 0x0c, 0x00, 0x0c, 0x00, 0x00, 0x00, - 0x02, 0x00, 0x00, 0x00, 0x02, 0x00, 0x00, 0x00, 0x08, 0x00, 0x00, 0x00, - 0x03, 0x00, 0x00, 0x00, 0x24, 0x00, 0x00, 0x00, 0x14, 0x00, 0x00, 0x00, - 0x04, 0x00, 0x00, 0x00, 0xfa, 0xff, 0xff, 0xff, 0x00, 0x19, 0x06, 0x00, - 0x06, 0x00, 0x05, 0x00, 0x06, 0x00, 0x00, 0x00, 0x00, 0x09, 0x06, 0x00, - 0x08, 0x00, 0x07, 0x00, 0x06, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x04}; -const int g_model_len = 19800; diff --git a/tensorflow/lite/micro/examples/micro_speech/simple_features/no_power_spectrum_data.h b/tensorflow/lite/micro/examples/micro_speech/simple_features/no_power_spectrum_data.h deleted file mode 100644 index f20362349f2..00000000000 --- a/tensorflow/lite/micro/examples/micro_speech/simple_features/no_power_spectrum_data.h +++ /dev/null @@ -1,29 +0,0 @@ -/* Copyright 2018 The TensorFlow Authors. All Rights Reserved. - -Licensed under the Apache License, Version 2.0 (the "License"); -you may not use this file except in compliance with the License. -You may obtain a copy of the License at - - http://www.apache.org/licenses/LICENSE-2.0 - -Unless required by applicable law or agreed to in writing, software -distributed under the License is distributed on an "AS IS" BASIS, -WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -See the License for the specific language governing permissions and -limitations under the License. -==============================================================================*/ - -// This data was extracted from the larger feature data held in -// no_features_data.cc and consists of the 29th spectrogram slice of 43 values. -// This is the expected result of running the sample data in -// no_30ms_sample_data.cc through the preprocessing pipeline. - -#ifndef TENSORFLOW_LITE_MICRO_EXAMPLES_MICRO_SPEECH_SIMPLE_FEATURES_NO_POWER_SPECTRUM_DATA_H_ -#define TENSORFLOW_LITE_MICRO_EXAMPLES_MICRO_SPEECH_SIMPLE_FEATURES_NO_POWER_SPECTRUM_DATA_H_ - -#include - -constexpr int g_no_power_spectrum_data_size = 43; -extern const uint8_t g_no_power_spectrum_data[]; - -#endif // TENSORFLOW_LITE_MICRO_EXAMPLES_MICRO_SPEECH_SIMPLE_FEATURES_NO_POWER_SPECTRUM_DATA_H_ diff --git a/tensorflow/lite/micro/examples/micro_speech/simple_features/no_simple_features_data.cc b/tensorflow/lite/micro/examples/micro_speech/simple_features/no_simple_features_data.cc deleted file mode 100644 index 2d7ae623010..00000000000 --- a/tensorflow/lite/micro/examples/micro_speech/simple_features/no_simple_features_data.cc +++ /dev/null @@ -1,152 +0,0 @@ -/* Copyright 2018 The TensorFlow Authors. All Rights Reserved. - -Licensed under the Apache License, Version 2.0 (the "License"); -you may not use this file except in compliance with the License. -You may obtain a copy of the License at - - http://www.apache.org/licenses/LICENSE-2.0 - -Unless required by applicable law or agreed to in writing, software -distributed under the License is distributed on an "AS IS" BASIS, -WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -See the License for the specific language governing permissions and -limitations under the License. -==============================================================================*/ - -#include "tensorflow/lite/micro/examples/micro_speech/simple_features/no_simple_features_data.h" - -/* File automatically created by - * tensorflow/examples/speech_commands/wav_to_features.py \ - * --sample_rate=16000 \ - * --clip_duration_ms=1000 \ - * --window_size_ms=30 \ - * --window_stride_ms=20 \ - * --feature_bin_count=40 \ - * --quantize=1 \ - * --preprocess="average" \ - * --input_wav="speech_commands_test_set_v0.02/no/f9643d42_nohash_4.wav" \ - * --output_c_file="no_simple_features_data.cc" \ - */ - -const int g_no_simple_f9643d42_nohash_4_width = 43; -const int g_no_simple_f9643d42_nohash_4_height = 49; -const unsigned char g_no_simple_f9643d42_nohash_4_data[] = { - 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, - 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, - 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, - 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, - 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, - 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, - 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, - 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, - 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, - 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, - 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, - 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, - 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, - 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, - 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, - 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, - 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, - 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, - 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, - 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, - 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, - 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, - 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, - 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, - 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, - 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, - 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, - 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, - 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, - 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, - 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, - 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, - 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, - 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, - 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, - 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, - 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, - 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, - 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, - 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, - 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, - 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, - 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, - 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, - 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, - 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, - 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, - 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, - 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, - 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, - 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, - 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, - 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, - 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, - 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 5, - 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, - 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, - 0, 0, 0, 0, 0, 0, 67, 2, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, - 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, - 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 139, 2, 0, 0, 0, - 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, - 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, - 0, 0, 195, 2, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, - 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, - 0, 0, 0, 0, 0, 0, 0, 0, 0, 230, 2, 0, 0, 0, 0, 0, 0, 0, - 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, - 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 255, 7, - 6, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, - 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, - 0, 0, 0, 0, 0, 255, 7, 16, 1, 1, 0, 2, 0, 0, 0, 0, 0, 0, - 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, - 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 255, 7, 22, 0, 1, 0, - 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, - 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, - 0, 238, 5, 20, 3, 4, 1, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, - 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, - 0, 0, 0, 0, 0, 0, 0, 0, 144, 4, 19, 3, 5, 0, 0, 0, 0, 0, - 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, - 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 42, 6, 3, - 1, 3, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, - 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, - 0, 0, 0, 0, 3, 1, 5, 0, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, - 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, - 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 5, 1, 3, 0, 0, 0, 0, - 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, - 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, - 1, 0, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, - 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, - 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, - 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, - 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, - 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, - 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, - 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, - 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, - 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, - 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, - 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, - 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, - 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, - 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, - 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, - 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, - 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, - 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, - 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, - 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, - 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, - 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, - 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, - 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, - 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, - 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, - 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, - 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, - 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, - 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, - 0, -}; diff --git a/tensorflow/lite/micro/examples/micro_speech/simple_features/simple_features_generator.cc b/tensorflow/lite/micro/examples/micro_speech/simple_features/simple_features_generator.cc deleted file mode 100644 index 3733912e121..00000000000 --- a/tensorflow/lite/micro/examples/micro_speech/simple_features/simple_features_generator.cc +++ /dev/null @@ -1,149 +0,0 @@ -/* Copyright 2018 The TensorFlow Authors. All Rights Reserved. - -Licensed under the Apache License, Version 2.0 (the "License"); -you may not use this file except in compliance with the License. -You may obtain a copy of the License at - - http://www.apache.org/licenses/LICENSE-2.0 - -Unless required by applicable law or agreed to in writing, software -distributed under the License is distributed on an "AS IS" BASIS, -WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -See the License for the specific language governing permissions and -limitations under the License. -==============================================================================*/ - -// Reference implementation of the preprocessing pipeline, with the same -// results as the audio tutorial at -// https://www.tensorflow.org/tutorials/sequences/audio_recognition -// This module takes 30ms of PCM-encoded signed 16-bit audio samples (at 16KHz, -// so 480 values), and extracts a power spectrum of frequencies. There are 43 -// frequency bands in the result, derived from the original 256 output from the -// discrete Fourier transform, and averaged together in groups of 6. -// It's expected that most platforms will have optimized versions of the -// functions used here, for example replacing the DFT with an FFT, so this -// version shouldn't be used where performance is critical. - -#include "tensorflow/lite/micro/examples/micro_speech/simple_features/simple_features_generator.h" - -#include - -#include "tensorflow/lite/micro/examples/micro_speech/simple_features/simple_model_settings.h" -#include "tensorflow/lite/micro/micro_log.h" - -namespace { - -// Needed because some platforms don't have M_PI defined. -constexpr float kPi = 3.14159265358979323846f; - -// Performs a discrete Fourier transform on the real inputs. This corresponds to -// rdft() in the FFT package at http://www.kurims.kyoto-u.ac.jp/~ooura/fft.html, -// and to kiss_fftr() in KISSFFT at https://github.com/mborgerding/kissfft. -// It takes in an array of float real values, and returns a result of the same -// length with float real and imaginary components interleaved, so -// fourier_output[0] is the first real value, fourier_output[1] is the first -// imaginary, fourier_output[2] is the second real, and so on. -// The calling function should ensure that the array passed in as fourier_output -// is at least time_series_size in length. Most optimized FFT implementations -// require the length to be a power of two as well, but this version doesn't -// enforce that. -void CalculateDiscreteFourierTransform(float* time_series, int time_series_size, - float* fourier_output) { - for (int i = 0; i < time_series_size / 2; ++i) { - float real = 0; - for (int j = 0; j < time_series_size; ++j) { - real += time_series[j] * std::cos(j * i * kPi * 2 / time_series_size); - } - float imaginary = 0; - for (int j = 0; j < time_series_size; ++j) { - imaginary -= - time_series[j] * std::sin(j * i * kPi * 2 / time_series_size); - } - fourier_output[(i * 2) + 0] = real; - fourier_output[(i * 2) + 1] = imaginary; - } -} - -// Produces a simple sine curve that is used to ensure frequencies at the center -// of the current sample window are weighted more heavily than those at the end. -void CalculatePeriodicHann(int window_length, float* window_function) { - for (int i = 0; i < window_length; ++i) { - window_function[i] = 0.5f - 0.5f * std::cos((2 * kPi * i) / window_length); - } -} - -} // namespace - -TfLiteStatus GenerateSimpleFeatures(const int16_t* input, int input_size, - int output_size, uint8_t* output) { - // Ensure our input and output data arrays are valid. - if (input_size > kMaxAudioSampleSize) { - MicroPrintf("Input size %d larger than %d", input_size, - kMaxAudioSampleSize); - return kTfLiteError; - } - if (output_size != kFeatureSliceSize) { - MicroPrintf("Requested output size %d doesn't match %d", output_size, - kFeatureSliceSize); - return kTfLiteError; - } - - // Pre-calculate the window function we'll be applying to the input data. - // In a real application, we'd calculate this table once in an initialization - // function and store it for repeated reuse. - float window_function[kMaxAudioSampleSize]; - CalculatePeriodicHann(input_size, window_function); - - // Apply the window function to our time series input, and pad it with zeroes - // to the next power of two. - float float_input[kMaxAudioSampleSize]; - for (int i = 0; i < kMaxAudioSampleSize; ++i) { - if (i < input_size) { - float_input[i] = - (input[i] * window_function[i]) / static_cast(1 << 15); - } else { - float_input[i] = 0.0f; - } - } - - // Pull the frequency data from the time series sample. - float fourier_values[kMaxAudioSampleSize]; - CalculateDiscreteFourierTransform(float_input, kMaxAudioSampleSize, - fourier_values); - - // We have the complex numbers giving us information about each frequency - // band, but all we want to know is how strong each frequency is, so calculate - // the squared magnitude by adding together the squares of each component. - float power_spectrum[kMaxAudioSampleSize / 2]; - for (int i = 0; i < (kMaxAudioSampleSize / 2); ++i) { - const float real = fourier_values[(i * 2) + 0]; - const float imaginary = fourier_values[(i * 2) + 1]; - power_spectrum[i] = (real * real) + (imaginary * imaginary); - } - - // Finally, reduce the size of the output by averaging together six adjacent - // frequencies into each slot, producing an array of 43 values. - for (int i = 0; i < kFeatureSliceSize; ++i) { - float total = 0.0f; - for (int j = 0; j < kAverageWindowSize; ++j) { - const int index = (i * kAverageWindowSize) + j; - if (index < (kMaxAudioSampleSize / 2)) { - total += power_spectrum[index]; - } - } - const float average = total / kAverageWindowSize; - // Quantize the result into eight bits, effectively multiplying by two. - // The 127.5 constant here has to match the features_max value defined in - // tensorflow/examples/speech_commands/input_data.py, and this also assumes - // that features_min is zero. If it wasn't, we'd have to subtract it first. - int quantized_average = roundf(average * (255.0f / 127.5f)); - if (quantized_average < 0) { - quantized_average = 0; - } - if (quantized_average > 255) { - quantized_average = 255; - } - output[i] = quantized_average; - } - return kTfLiteOk; -} diff --git a/tensorflow/lite/micro/examples/micro_speech/simple_features/simple_features_generator.h b/tensorflow/lite/micro/examples/micro_speech/simple_features/simple_features_generator.h deleted file mode 100644 index 7beccead5f2..00000000000 --- a/tensorflow/lite/micro/examples/micro_speech/simple_features/simple_features_generator.h +++ /dev/null @@ -1,29 +0,0 @@ -/* Copyright 2018 The TensorFlow Authors. All Rights Reserved. - -Licensed under the Apache License, Version 2.0 (the "License"); -you may not use this file except in compliance with the License. -You may obtain a copy of the License at - - http://www.apache.org/licenses/LICENSE-2.0 - -Unless required by applicable law or agreed to in writing, software -distributed under the License is distributed on an "AS IS" BASIS, -WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -See the License for the specific language governing permissions and -limitations under the License. -==============================================================================*/ - -#ifndef TENSORFLOW_LITE_MICRO_EXAMPLES_MICRO_SPEECH_SIMPLE_FEATURES_SIMPLE_FEATURES_GENERATOR_H_ -#define TENSORFLOW_LITE_MICRO_EXAMPLES_MICRO_SPEECH_SIMPLE_FEATURES_SIMPLE_FEATURES_GENERATOR_H_ - -#include "tensorflow/lite/c/common.h" - -// Converts audio sample data into a more compact form that's appropriate for -// feeding into a neural network. There are reference implementations that use -// both floating point and fixed point available, but because the calculations -// involved can be time-consuming, it's recommended that you use or write -// specialized versions for your platform. -TfLiteStatus GenerateSimpleFeatures(const int16_t* input, int input_size, - int output_size, uint8_t* output); - -#endif // TENSORFLOW_LITE_MICRO_EXAMPLES_MICRO_SPEECH_SIMPLE_FEATURES_SIMPLE_FEATURES_GENERATOR_H_ diff --git a/tensorflow/lite/micro/examples/micro_speech/simple_features/simple_features_generator_test.cc b/tensorflow/lite/micro/examples/micro_speech/simple_features/simple_features_generator_test.cc deleted file mode 100644 index f3babd12ddb..00000000000 --- a/tensorflow/lite/micro/examples/micro_speech/simple_features/simple_features_generator_test.cc +++ /dev/null @@ -1,59 +0,0 @@ -/* Copyright 2018 The TensorFlow Authors. All Rights Reserved. - -Licensed under the Apache License, Version 2.0 (the "License"); -you may not use this file except in compliance with the License. -You may obtain a copy of the License at - - http://www.apache.org/licenses/LICENSE-2.0 - -Unless required by applicable law or agreed to in writing, software -distributed under the License is distributed on an "AS IS" BASIS, -WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -See the License for the specific language governing permissions and -limitations under the License. -==============================================================================*/ - -#include "tensorflow/lite/micro/examples/micro_speech/simple_features/simple_features_generator.h" - -#include "tensorflow/lite/c/common.h" -#include "tensorflow/lite/micro/examples/micro_speech/simple_features/no_power_spectrum_data.h" -#include "tensorflow/lite/micro/examples/micro_speech/simple_features/yes_power_spectrum_data.h" -#include "tensorflow/lite/micro/examples/micro_speech/testdata/no_30ms_audio_data.h" -#include "tensorflow/lite/micro/examples/micro_speech/testdata/yes_30ms_audio_data.h" -#include "tensorflow/lite/micro/micro_log.h" -#include "tensorflow/lite/micro/testing/micro_test.h" - -TF_LITE_MICRO_TESTS_BEGIN - -TF_LITE_MICRO_TEST(TestSimpleFeaturesGenerator) { - uint8_t yes_calculated_data[g_yes_power_spectrum_data_size]; - TfLiteStatus yes_status = GenerateSimpleFeatures( - g_yes_30ms_audio_data, g_yes_30ms_audio_data_size, - g_yes_power_spectrum_data_size, yes_calculated_data); - TF_LITE_MICRO_EXPECT_EQ(kTfLiteOk, yes_status); - - for (int i = 0; i < g_yes_power_spectrum_data_size; ++i) { - TF_LITE_MICRO_EXPECT_EQ(g_yes_power_spectrum_data[i], - yes_calculated_data[i]); - if (g_yes_power_spectrum_data[i] != yes_calculated_data[i]) { - MicroPrintf("Expected value %d but found %d", - g_yes_power_spectrum_data[i], yes_calculated_data[i]); - } - } - - uint8_t no_calculated_data[g_yes_power_spectrum_data_size]; - TfLiteStatus no_status = - GenerateSimpleFeatures(g_no_30ms_audio_data, g_no_30ms_audio_data_size, - g_no_power_spectrum_data_size, no_calculated_data); - TF_LITE_MICRO_EXPECT_EQ(kTfLiteOk, no_status); - - for (int i = 0; i < g_no_power_spectrum_data_size; ++i) { - TF_LITE_MICRO_EXPECT_EQ(g_no_power_spectrum_data[i], no_calculated_data[i]); - if (g_no_power_spectrum_data[i] != no_calculated_data[i]) { - MicroPrintf("Expected value %d but found %d", g_no_power_spectrum_data[i], - no_calculated_data[i]); - } - } -} - -TF_LITE_MICRO_TESTS_END diff --git a/tensorflow/lite/micro/examples/micro_speech/simple_features/simple_model_settings.h b/tensorflow/lite/micro/examples/micro_speech/simple_features/simple_model_settings.h deleted file mode 100644 index 9d129c8a86f..00000000000 --- a/tensorflow/lite/micro/examples/micro_speech/simple_features/simple_model_settings.h +++ /dev/null @@ -1,43 +0,0 @@ -/* Copyright 2018 The TensorFlow Authors. All Rights Reserved. - -Licensed under the Apache License, Version 2.0 (the "License"); -you may not use this file except in compliance with the License. -You may obtain a copy of the License at - - http://www.apache.org/licenses/LICENSE-2.0 - -Unless required by applicable law or agreed to in writing, software -distributed under the License is distributed on an "AS IS" BASIS, -WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -See the License for the specific language governing permissions and -limitations under the License. -==============================================================================*/ - -#ifndef TENSORFLOW_LITE_MICRO_EXAMPLES_MICRO_SPEECH_SIMPLE_FEATURES_SIMPLE_MODEL_SETTINGS_H_ -#define TENSORFLOW_LITE_MICRO_EXAMPLES_MICRO_SPEECH_SIMPLE_FEATURES_SIMPLE_MODEL_SETTINGS_H_ - -// Keeping these as constant expressions allow us to allocate fixed-sized arrays -// on the stack for our working memory. - -// The size of the input time series data we pass to the FFT to produce the -// frequency information. This has to be a power of two, and since we're dealing -// with 30ms of 16KHz inputs, which means 480 samples, this is the next value. -constexpr int kMaxAudioSampleSize = 512; -constexpr int kAudioSampleFrequency = 16000; - -// All of these values are derived from the values used during model training, -// if you change your model you'll need to update these constants. -constexpr int kAverageWindowSize = 6; -constexpr int kFeatureSliceSize = - ((kMaxAudioSampleSize / 2) + (kAverageWindowSize - 1)) / kAverageWindowSize; -constexpr int kFeatureSliceCount = 49; -constexpr int kFeatureElementCount = (kFeatureSliceSize * kFeatureSliceCount); -constexpr int kFeatureSliceStrideMs = 20; -constexpr int kFeatureSliceDurationMs = 30; - -constexpr int kCategoryCount = 4; -constexpr int kSilenceIndex = 0; -constexpr int kUnknownIndex = 1; -extern const char* kCategoryLabels[kCategoryCount]; - -#endif // TENSORFLOW_LITE_MICRO_EXAMPLES_MICRO_SPEECH_SIMPLE_FEATURES_SIMPLE_MODEL_SETTINGS_H_ diff --git a/tensorflow/lite/micro/examples/micro_speech/simple_features/yes_power_spectrum_data.h b/tensorflow/lite/micro/examples/micro_speech/simple_features/yes_power_spectrum_data.h deleted file mode 100644 index 5264e6262fc..00000000000 --- a/tensorflow/lite/micro/examples/micro_speech/simple_features/yes_power_spectrum_data.h +++ /dev/null @@ -1,29 +0,0 @@ -/* Copyright 2018 The TensorFlow Authors. All Rights Reserved. - -Licensed under the Apache License, Version 2.0 (the "License"); -you may not use this file except in compliance with the License. -You may obtain a copy of the License at - - http://www.apache.org/licenses/LICENSE-2.0 - -Unless required by applicable law or agreed to in writing, software -distributed under the License is distributed on an "AS IS" BASIS, -WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -See the License for the specific language governing permissions and -limitations under the License. -==============================================================================*/ - -// This data was extracted from the larger feature data held in -// no_features_data.cc and consists of the 26th spectrogram slice of 43 values. -// This is the expected result of running the sample data in -// yes_30ms_sample_data.cc through the preprocessing pipeline. - -#ifndef TENSORFLOW_LITE_MICRO_EXAMPLES_MICRO_SPEECH_SIMPLE_FEATURES_YES_POWER_SPECTRUM_DATA_H_ -#define TENSORFLOW_LITE_MICRO_EXAMPLES_MICRO_SPEECH_SIMPLE_FEATURES_YES_POWER_SPECTRUM_DATA_H_ - -#include - -constexpr int g_yes_power_spectrum_data_size = 43; -extern const uint8_t g_yes_power_spectrum_data[]; - -#endif // TENSORFLOW_LITE_MICRO_EXAMPLES_MICRO_SPEECH_SIMPLE_FEATURES_YES_POWER_SPECTRUM_DATA_H_ diff --git a/tensorflow/lite/micro/examples/micro_speech/simple_features/yes_simple_features_data.cc b/tensorflow/lite/micro/examples/micro_speech/simple_features/yes_simple_features_data.cc deleted file mode 100644 index 078f78d5428..00000000000 --- a/tensorflow/lite/micro/examples/micro_speech/simple_features/yes_simple_features_data.cc +++ /dev/null @@ -1,158 +0,0 @@ -/* Copyright 2018 The TensorFlow Authors. All Rights Reserved. - -Licensed under the Apache License, Version 2.0 (the "License"); -you may not use this file except in compliance with the License. -You may obtain a copy of the License at - - http://www.apache.org/licenses/LICENSE-2.0 - -Unless required by applicable law or agreed to in writing, software -distributed under the License is distributed on an "AS IS" BASIS, -WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -See the License for the specific language governing permissions and -limitations under the License. -==============================================================================*/ - -#include "tensorflow/lite/micro/examples/micro_speech/simple_features/yes_simple_features_data.h" - -/* File automatically created by - * tensorflow/examples/speech_commands/wav_to_features.py \ - * --sample_rate=16000 \ - * --clip_duration_ms=1000 \ - * --window_size_ms=30 \ - * --window_stride_ms=20 \ - * --feature_bin_count=40 \ - * --quantize=1 \ - * --preprocess="average" \ - * --input_wav="speech_commands_test_set_v0.02/yes/f2e59fea_nohash_1.wav" \ - * --output_c_file="yes_simple_features_data.cc" \ - */ - -const int g_yes_simple_f2e59fea_nohash_1_width = 43; -const int g_yes_simple_f2e59fea_nohash_1_height = 49; -const unsigned char g_yes_simple_f2e59fea_nohash_1_data[] = { - 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, - 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, - 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, - 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, - 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, - 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, - 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, - 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, - 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, - 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, - 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, - 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, - 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, - 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, - 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, - 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, - 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, - 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, - 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, - 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, - 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, - 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, - 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, - 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, - 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, - 0, 0, 0, 0, 0, 0, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, - 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, - 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 1, 1, - 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, - 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, - 0, 0, 0, 0, 0, 0, 4, 5, 1, 0, 0, 0, 0, 0, 0, 0, 0, - 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, - 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, - 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, - 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, - 0, 0, 0, 0, 0, 0, 0, 0, 2, 0, 0, 0, 0, 0, 0, 0, 0, - 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, - 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, - 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, - 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, - 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, - 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, - 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, - 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, - 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, - 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, - 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, - 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, - 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, - 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, - 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, - 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, - 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, - 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, - 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, - 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, - 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, - 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, - 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, - 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, - 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 19, 1, 0, 0, - 0, 0, 0, 0, 0, 0, 0, 1, 0, 1, 3, 3, 1, 1, 0, 0, 0, - 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, - 0, 0, 0, 0, 8, 89, 8, 0, 0, 0, 0, 0, 0, 0, 0, 4, 13, - 1, 6, 23, 20, 6, 4, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, - 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 19, 177, 42, 1, - 1, 0, 0, 0, 0, 2, 3, 119, 51, 5, 139, 92, 58, 58, 15, 2, 1, - 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, - 0, 0, 0, 0, 0, 13, 165, 176, 3, 1, 1, 0, 0, 1, 1, 32, 214, - 26, 19, 113, 103, 28, 22, 27, 3, 1, 0, 0, 0, 0, 0, 0, 0, 0, - 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 12, 55, 128, - 27, 1, 1, 0, 1, 4, 2, 52, 93, 10, 28, 156, 10, 21, 21, 3, 3, - 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, - 0, 0, 0, 0, 0, 0, 14, 99, 32, 65, 7, 1, 2, 2, 6, 13, 121, - 36, 15, 11, 112, 125, 14, 5, 13, 4, 4, 2, 0, 0, 0, 0, 0, 0, - 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 24, 25, - 32, 5, 1, 0, 0, 0, 1, 0, 7, 5, 1, 1, 3, 3, 0, 3, 3, - 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, - 0, 0, 0, 0, 0, 0, 0, 13, 13, 5, 1, 0, 0, 0, 0, 0, 3, - 4, 1, 0, 1, 2, 3, 1, 1, 1, 4, 8, 1, 2, 1, 3, 1, 1, - 0, 1, 1, 3, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, - 8, 2, 1, 0, 0, 0, 0, 0, 1, 1, 0, 0, 1, 1, 2, 0, 2, - 1, 0, 2, 0, 2, 2, 3, 1, 1, 0, 1, 1, 4, 5, 1, 0, 1, - 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, - 0, 0, 0, 0, 0, 0, 1, 1, 1, 0, 1, 2, 1, 0, 1, 3, 1, - 1, 3, 1, 1, 6, 2, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, - 0, 2, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, - 0, 0, 0, 0, 0, 0, 1, 1, 0, 1, 2, 6, 2, 4, 2, 0, 0, - 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 3, 0, 0, 0, 0, 0, 0, - 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, - 0, 0, 1, 2, 1, 1, 2, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, - 0, 0, 4, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, - 0, 0, 0, 0, 0, 0, 0, 0, 2, 1, 0, 0, 2, 3, 5, 2, 0, - 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0, 0, 0, 0, 0, - 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, - 0, 0, 0, 0, 1, 2, 2, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, - 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, - 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0, 0, 0, 0, 1, 2, 3, - 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, - 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, - 0, 0, 0, 0, 0, 1, 1, 1, 1, 0, 0, 0, 1, 0, 0, 0, 0, - 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, - 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, - 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, - 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, - 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, - 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, - 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, - 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, - 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, - 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, - 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, - 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, - 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, - 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, - 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, - 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, - 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, - 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, - 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, - 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, - 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, - 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, - 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, -}; diff --git a/tensorflow/lite/micro/examples/micro_speech/simple_features/yes_simple_features_data.h b/tensorflow/lite/micro/examples/micro_speech/simple_features/yes_simple_features_data.h deleted file mode 100644 index 98c7e429fee..00000000000 --- a/tensorflow/lite/micro/examples/micro_speech/simple_features/yes_simple_features_data.h +++ /dev/null @@ -1,23 +0,0 @@ -/* Copyright 2018 The TensorFlow Authors. All Rights Reserved. - -Licensed under the Apache License, Version 2.0 (the "License"); -you may not use this file except in compliance with the License. -You may obtain a copy of the License at - - http://www.apache.org/licenses/LICENSE-2.0 - -Unless required by applicable law or agreed to in writing, software -distributed under the License is distributed on an "AS IS" BASIS, -WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -See the License for the specific language governing permissions and -limitations under the License. -==============================================================================*/ - -#ifndef TENSORFLOW_LITE_MICRO_EXAMPLES_MICRO_SPEECH_SIMPLE_FEATURES_YES_SIMPLE_FEATURES_DATA_H_ -#define TENSORFLOW_LITE_MICRO_EXAMPLES_MICRO_SPEECH_SIMPLE_FEATURES_YES_SIMPLE_FEATURES_DATA_H_ - -extern const int g_yes_simple_f2e59fea_nohash_1_width; -extern const int g_yes_simple_f2e59fea_nohash_1_height; -extern const unsigned char g_yes_simple_f2e59fea_nohash_1_data[]; - -#endif // TENSORFLOW_LITE_MICRO_EXAMPLES_MICRO_SPEECH_SIMPLE_FEATURES_YES_SIMPLE_FEATURES_DATA_H_ diff --git a/tensorflow/lite/micro/examples/micro_speech/testdata/noise_1000ms.wav b/tensorflow/lite/micro/examples/micro_speech/testdata/noise_1000ms.wav new file mode 100644 index 00000000000..8e0896adb5d Binary files /dev/null and b/tensorflow/lite/micro/examples/micro_speech/testdata/noise_1000ms.wav differ diff --git a/tensorflow/lite/micro/examples/micro_speech/testdata/silence_1000ms.wav b/tensorflow/lite/micro/examples/micro_speech/testdata/silence_1000ms.wav new file mode 100644 index 00000000000..294dfc6d96b Binary files /dev/null and b/tensorflow/lite/micro/examples/micro_speech/testdata/silence_1000ms.wav differ diff --git a/tensorflow/lite/micro/examples/micro_speech/train/README.md b/tensorflow/lite/micro/examples/micro_speech/train/README.md index 18a68465326..636f70cbd03 100644 --- a/tensorflow/lite/micro/examples/micro_speech/train/README.md +++ b/tensorflow/lite/micro/examples/micro_speech/train/README.md @@ -1,7 +1,7 @@ # Micro Speech Training -This example shows how to train a 20 kB model that can recognize 2 keywords, +This example shows how to train a less than 20 kB model that can recognize 2 keywords, "yes" and "no", from speech data. If the input does not belong to either categories, it is classified as "unknown" @@ -89,19 +89,17 @@ includes the following 3 model files: **Fully quantized implies that the model is **strictly int8** quantized **including** the input(s) and output(s).* - ## Model Architecture -This is a simple model comprising of a Convolutional 2D layer, a Fully Connected +This is a simple model comprised of a Convolutional 2D layer, a Fully Connected Layer or a MatMul Layer (output: logits) and a Softmax layer -(output: probabilities) as shown below. Refer to the [`tiny_conv`](https://github.com/tensorflow/tflite-micro/blob/main/tensorflow/examples/speech_commands/models.py#L673) +(output: probabilities) as shown below. Refer to the [`tiny_conv`](https://github.com/tensorflow/tensorflow/blob/master/tensorflow/examples/speech_commands/models.py#L673) model architecture. -![model_architecture.png](../images/model_architecture.png) +[model architecture](../images/micro_speech_quantized.png) -*This image was derived from visualizing the 'model.tflite' file in +*This image was derived from visualizing the 'models/micro_speech_quantized.tflite' file in [Netron](https://github.com/lutzroeder/netron)* This doesn't produce a highly accurate model, but it's designed to be used as @@ -109,7 +107,7 @@ the first stage of a pipeline, running on a low-energy piece of hardware that can always be on, and then wake higher-power chips when a possible utterance has been found, so that more accurate analysis can be done. Additionally, the model takes in preprocessed speech input as a result of which we can leverage a -simpler model for accurate results. +simpler model for inference results. ## Dataset @@ -124,49 +122,32 @@ downloaded you won't need to do this again. ## Preprocessing Speech Input In this section we discuss spectrograms, the preprocessed speech input to the -model. Here's an illustration of the process: - -![spectrogram diagram](https://storage.googleapis.com/download.tensorflow.org/example_images/spectrogram_diagram.png) +model. The model doesn't take in raw audio sample data, instead it works with spectrograms which are two dimensional arrays that are made up of slices of frequency information, each taken from a different time window. The recipe for creating the spectrogram data is that each frequency slice is -created by running an FFT across a 30ms section of the audio sample data. The +created by running an FFT across a 30ms window of the audio sample data. The input samples are treated as being between -1 and +1 as real values (encoded as --32,768 and 32,767 in 16-bit signed integer samples). +-32,768 and 32,767 in 16-bit signed integer samples). The audio sampling window +stride is 20ms, thus every window overlaps by 10ms. -This results in an FFT with 256 entries. Every sequence of six entries is -averaged together, giving a total of 43 frequency buckets in the final slice. -The results are stored as unsigned eight-bit values, where 0 represents a real -number of zero, and 255 represents 127.5 as a real number. +This results in an FFT with 257 entries. Every sequence of approximately six entries is +averaged together, giving a total of 40 frequency buckets in the slice. +The results are further processed by down-scaling, noise reduction, automatic +gain control, and a final down-scaling. Each adjacent frequency entry is stored in ascending memory order (frequency bucket 0 at data[0], bucket 1 at data[1], etc). The window for the frequency analysis is then moved forward by 20ms, and the process repeated, storing the -results in the next memory row (for example bucket 0 in this moved window would -be in data[43 + 0], etc). This process happens 49 times in total, producing a -single channel image that is 43 pixels wide, and 49 rows high. - -In a complete application these spectrograms would be calculated at runtime from -microphone inputs, but the code for doing that is not yet included in this -sample code. The test uses spectrograms that have been pre-calculated from -one-second WAV files in the test dataset generated by running the following -commands: - -``` -python tensorflow/tensorflow/examples/speech_commands/wav_to_features.py \ ---input_wav=/tmp/speech_dataset/yes/f2e59fea_nohash_1.wav \ ---output_c_file=/tmp/yes_features_data.cc \ ---window_stride=20 --preprocess=average --quantize=1 - -python tensorflow/tensorflow/examples/speech_commands/wav_to_features.py \ ---input_wav=/tmp/speech_dataset/no/f9643d42_nohash_4.wav \ ---output_c_file=/tmp/no_features_data.cc \ ---window_stride=20 --preprocess=average --quantize=1 -``` - +results of the new frequency slice in the next memory row. +The training is configured for raw audio samples of 1000ms in length. +With a window size of 30ms and stride of 20ms, some 49 frequency slices can +be created from 1000ms of audio data. +Thus, the preprocessing produces a +single channel image that is 40 pixels wide, and 49 rows high. ## Other Training Methods diff --git a/tensorflow/lite/micro/examples/mnist_lstm/BUILD b/tensorflow/lite/micro/examples/mnist_lstm/BUILD index 6df2eef7400..7d818b21565 100644 --- a/tensorflow/lite/micro/examples/mnist_lstm/BUILD +++ b/tensorflow/lite/micro/examples/mnist_lstm/BUILD @@ -1,3 +1,4 @@ +load("@rules_python//python:defs.bzl", "py_binary", "py_test") load("@tflm_pip_deps//:requirements.bzl", "requirement") py_binary( @@ -6,7 +7,7 @@ py_binary( srcs_version = "PY3", deps = [ requirement("numpy"), - requirement("tensorflow-cpu"), + requirement("tensorflow"), ], ) @@ -17,6 +18,7 @@ py_binary( deps = [ "//python/tflite_micro:runtime", "@absl_py//absl:app", + requirement("pillow"), ], ) diff --git a/tensorflow/lite/micro/examples/network_tester/network_tester_test.cc b/tensorflow/lite/micro/examples/network_tester/network_tester_test.cc index e62e0c42ab8..23e50c9e1d5 100644 --- a/tensorflow/lite/micro/examples/network_tester/network_tester_test.cc +++ b/tensorflow/lite/micro/examples/network_tester/network_tester_test.cc @@ -1,4 +1,4 @@ -/* Copyright 2023 The TensorFlow Authors. All Rights Reserved. +/* Copyright 2024 The TensorFlow Authors. All Rights Reserved. Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. @@ -40,7 +40,7 @@ limitations under the License. #define NUM_INFERENCES 1 #endif -uint8_t tensor_arena[TENSOR_ARENA_SIZE]; +alignas(16) uint8_t tensor_arena[TENSOR_ARENA_SIZE]; #ifdef NUM_BYTES_TO_PRINT inline void print_output_data(TfLiteTensor* output) { @@ -92,15 +92,19 @@ TF_LITE_MICRO_TEST(TestInvoke) { model->version(), TFLITE_SCHEMA_VERSION); return kTfLiteError; } +#ifdef ETHOS_U + tflite::MicroMutableOpResolver<1> resolver; + resolver.AddEthosU(); - tflite::MicroMutableOpResolver<6> resolver; +#else + tflite::MicroMutableOpResolver<5> resolver; resolver.AddAveragePool2D(tflite::Register_AVERAGE_POOL_2D_INT8()); resolver.AddConv2D(tflite::Register_CONV_2D_INT8()); resolver.AddDepthwiseConv2D(tflite::Register_DEPTHWISE_CONV_2D_INT8()); - resolver.AddEthosU(); resolver.AddReshape(); resolver.AddSoftmax(tflite::Register_SOFTMAX_INT8()); +#endif tflite::MicroInterpreter interpreter(model, resolver, tensor_arena, TENSOR_ARENA_SIZE); @@ -152,7 +156,8 @@ TF_LITE_MICRO_TEST(TestInvoke) { } #endif } - MicroPrintf("Ran successfully\n"); + + MicroPrintf("~~~ALL TESTS PASSED~~~\n"); } TF_LITE_MICRO_TESTS_END diff --git a/tensorflow/lite/micro/examples/person_detection/README.md b/tensorflow/lite/micro/examples/person_detection/README.md index a490629c5dd..2b3333439b4 100644 --- a/tensorflow/lite/micro/examples/person_detection/README.md +++ b/tensorflow/lite/micro/examples/person_detection/README.md @@ -7,6 +7,7 @@ network to recognize people in images. - [Run the tests on a development machine](#run-the-tests-on-a-development-machine) - [Training your own model](#training-your-own-model) +- [Additional makefile targets](#additional-makefile-targets) ## Run the tests on a development machine @@ -26,6 +27,19 @@ and checks that the network correctly identifies them. To understand how TensorFlow Lite does this, you can look at [person_detection_test.cc](person_detection_test.cc). +## Additional makefile targets +``` +make -f tensorflow/lite/micro/tools/make/Makefile person_detection +make -f tensorflow/lite/micro/tools/make/Makefile person_detection_bin +make -f tensorflow/lite/micro/tools/make/Makefile run_person_detection +``` + +The `run_person_detection` target will produce continuous output similar +to the following: +``` +person score:-72 no person score 72 +``` + ## Training your own model You can train your own model with some easy-to-use scripts. See diff --git a/tensorflow/lite/micro/examples/person_detection/utils/BUILD b/tensorflow/lite/micro/examples/person_detection/utils/BUILD index 980d803a327..7f5c815b7a4 100644 --- a/tensorflow/lite/micro/examples/person_detection/utils/BUILD +++ b/tensorflow/lite/micro/examples/person_detection/utils/BUILD @@ -1,4 +1,5 @@ load("@tflm_pip_deps//:requirements.bzl", "requirement") +load("@rules_python//python:defs.bzl", "py_binary", "py_library", "py_test") package( features = ["-layering_check"], @@ -36,7 +37,7 @@ py_test( ], deps = [ ":raw_to_bitmap_lib", - requirement("tensorflow-cpu"), + requirement("tensorflow"), requirement("numpy"), ], ) diff --git a/tensorflow/lite/micro/examples/recipes/BUILD b/tensorflow/lite/micro/examples/recipes/BUILD index 475e552b619..3572fdb19fe 100644 --- a/tensorflow/lite/micro/examples/recipes/BUILD +++ b/tensorflow/lite/micro/examples/recipes/BUILD @@ -1,9 +1,21 @@ +load("@rules_python//python:defs.bzl", "py_library", "py_test") load("@tflm_pip_deps//:requirements.bzl", "requirement") package( licenses = ["notice"], ) +py_library( + name = "add_four_numbers", + srcs = ["add_four_numbers.py"], + srcs_version = "PY3", + visibility = ["//:__subpackages__"], + deps = [ + requirement("numpy"), + requirement("tensorflow"), + ], +) + py_library( name = "resource_variables_lib", srcs = ["resource_variables_lib.py"], @@ -11,7 +23,7 @@ py_library( visibility = ["//:__subpackages__"], deps = [ requirement("numpy"), - requirement("tensorflow-cpu"), + requirement("tensorflow"), ], ) @@ -26,8 +38,6 @@ py_test( ], deps = [ ":resource_variables_lib", - # TODO(b/286456378): update tflm_runtime to runtime when we are ready to - # remove the alias. - "//tensorflow/lite/micro/python/interpreter/src:tflm_runtime", + "//python/tflite_micro:runtime", ], ) diff --git a/tensorflow/lite/micro/examples/recipes/add_four_numbers.py b/tensorflow/lite/micro/examples/recipes/add_four_numbers.py new file mode 100644 index 00000000000..f564141bc68 --- /dev/null +++ b/tensorflow/lite/micro/examples/recipes/add_four_numbers.py @@ -0,0 +1,62 @@ +# Copyright 2023 The TensorFlow Authors. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# ============================================================================= +"""Simple TF model creation using resource variables.""" + +import numpy as np +import tensorflow as tf + + +""" +Generates a simple TfLite model that adds 4 numbers. + +Basic Usage: + + model = generate_model(False) + +Usage where you want model written to file: + + file_path = "some file path" + model = generate_model(True, file_path) +""" + +class AddFourNumbers(tf.Module): + @tf.function( + input_signature=[ + tf.TensorSpec(shape=[1], dtype=tf.float32, name="a"), + tf.TensorSpec(shape=[1], dtype=tf.float32, name="b"), + tf.TensorSpec(shape=[1], dtype=tf.float32, name="c"), + tf.TensorSpec(shape=[1], dtype=tf.float32, name="d"), + ] + ) + def __call__(self, a, b, c, d): + return a + b + c + d + + +def get_model_from_concrete_function(): + """Accumulator model built via TF concrete functions.""" + model = AddFourNumbers("AddFourNumbers") + concrete_func = model.__call__.get_concrete_function() + converter = tf.lite.TFLiteConverter.from_concrete_functions( + [concrete_func], model + ) + return converter.convert() + + +def generate_model(write_file=True, filename="/tmp/add.tflite"): + model = get_model_from_concrete_function() + if write_file: + with open(filename, "wb") as f: + f.write(model) + return model diff --git a/tensorflow/lite/micro/examples/recipes/resource_variables_test.py b/tensorflow/lite/micro/examples/recipes/resource_variables_test.py index ad8c79eb94f..686a6506fb1 100644 --- a/tensorflow/lite/micro/examples/recipes/resource_variables_test.py +++ b/tensorflow/lite/micro/examples/recipes/resource_variables_test.py @@ -18,9 +18,7 @@ from tensorflow.python.platform import test from tflite_micro.tensorflow.lite.micro.examples.recipes import resource_variables_lib -# TODO(b/286456378): change tflm_runtime to runtime when we all other usage has -# been updated. -from tflite_micro.tensorflow.lite.micro.python.interpreter.src import tflm_runtime +from tflite_micro.python.tflite_micro import runtime as tflm_runtime class ResourceVariablesTest(test_util.TensorFlowTestCase): diff --git a/tensorflow/lite/micro/fake_micro_context.cc b/tensorflow/lite/micro/fake_micro_context.cc index 03ea6dfc75b..5787ffd0648 100644 --- a/tensorflow/lite/micro/fake_micro_context.cc +++ b/tensorflow/lite/micro/fake_micro_context.cc @@ -15,28 +15,18 @@ limitations under the License. #include "tensorflow/lite/micro/fake_micro_context.h" +#include "tensorflow/lite/c/c_api_types.h" #include "tensorflow/lite/kernels/internal/compatibility.h" #include "tensorflow/lite/micro/arena_allocator/single_arena_buffer_allocator.h" -#include "tensorflow/lite/micro/micro_allocator.h" #include "tensorflow/lite/micro/micro_arena_constants.h" #include "tensorflow/lite/micro/micro_log.h" namespace tflite { -namespace { -// Dummy static variables to allow creation of dummy MicroAllocator. -// All tests are guarateed to run serially. -static constexpr int KDummyTensorArenaSize = 256; -static uint8_t dummy_tensor_arena[KDummyTensorArenaSize]; -} // namespace FakeMicroContext::FakeMicroContext(TfLiteTensor* tensors, SingleArenaBufferAllocator* allocator, MicroGraph* micro_graph) - : MicroContext( - MicroAllocator::Create(dummy_tensor_arena, KDummyTensorArenaSize), - nullptr, micro_graph), - tensors_(tensors), - allocator_(allocator) {} + : graph_(*micro_graph), tensors_(tensors), allocator_(allocator) {} TfLiteTensor* FakeMicroContext::AllocateTempTfLiteTensor(int tensor_index) { allocated_temp_count_++; @@ -113,4 +103,13 @@ void* FakeMicroContext::GetScratchBuffer(int buffer_index) { return scratch_buffers_[buffer_index]; } +TfLiteStatus FakeMicroContext::set_external_context( + void* external_context_payload) { + return kTfLiteError; +} + +void* FakeMicroContext::external_context() { return nullptr; } + +MicroGraph& FakeMicroContext::graph() { return graph_; } + } // namespace tflite diff --git a/tensorflow/lite/micro/fake_micro_context.h b/tensorflow/lite/micro/fake_micro_context.h index b068f32634e..46d8a9b1ec4 100644 --- a/tensorflow/lite/micro/fake_micro_context.h +++ b/tensorflow/lite/micro/fake_micro_context.h @@ -27,6 +27,8 @@ namespace tflite { class FakeMicroContext : public MicroContext { public: + ~FakeMicroContext() = default; + FakeMicroContext(TfLiteTensor* tensors, SingleArenaBufferAllocator* allocator, MicroGraph* micro_graph); @@ -44,9 +46,14 @@ class FakeMicroContext : public MicroContext { TfLiteEvalTensor* GetEvalTensor(int tensor_index) override; + TfLiteStatus set_external_context(void* external_context_payload) override; + void* external_context() override; + MicroGraph& graph() override; + private: static constexpr int kNumScratchBuffers_ = 12; + MicroGraph& graph_; int scratch_buffer_count_ = 0; uint8_t* scratch_buffers_[kNumScratchBuffers_]; diff --git a/tensorflow/lite/micro/fake_micro_context_test.cc b/tensorflow/lite/micro/fake_micro_context_test.cc index 264b7e79acf..e792238f7ed 100644 --- a/tensorflow/lite/micro/fake_micro_context_test.cc +++ b/tensorflow/lite/micro/fake_micro_context_test.cc @@ -18,6 +18,7 @@ limitations under the License. #include "tensorflow/lite/micro/arena_allocator/single_arena_buffer_allocator.h" #include "tensorflow/lite/micro/micro_allocator.h" +#include "tensorflow/lite/micro/mock_micro_graph.h" #include "tensorflow/lite/micro/test_helpers.h" #include "tensorflow/lite/micro/testing/micro_test.h" @@ -58,7 +59,7 @@ TF_LITE_MICRO_TEST(TestGetBeforeRequestScratchBufferWouldReturnNull) { uint8_t arena_buffer[kArenaSize]; tflite::SingleArenaBufferAllocator simple_memory_allocator(arena_buffer, kArenaSize); - tflite::MicroGraph dummy_micro_graph(nullptr, nullptr, nullptr, nullptr); + tflite::MockMicroGraph dummy_micro_graph{&simple_memory_allocator}; tflite::FakeMicroContext micro_context = tflite::CreateFakeMicroContext( &simple_memory_allocator, &dummy_micro_graph); @@ -71,7 +72,7 @@ TF_LITE_MICRO_TEST(TestRequestScratchBufferAndThenGetShouldSucceed) { uint8_t arena_buffer[kArenaSize]; tflite::SingleArenaBufferAllocator simple_memory_allocator(arena_buffer, kArenaSize); - tflite::MicroGraph dummy_micro_graph(nullptr, nullptr, nullptr, nullptr); + tflite::MockMicroGraph dummy_micro_graph{&simple_memory_allocator}; tflite::FakeMicroContext micro_context = tflite::CreateFakeMicroContext( &simple_memory_allocator, &dummy_micro_graph); diff --git a/tensorflow/lite/micro/integration_tests/BUILD b/tensorflow/lite/micro/integration_tests/BUILD index 1e96ba8b60f..5b90e5d86a5 100644 --- a/tensorflow/lite/micro/integration_tests/BUILD +++ b/tensorflow/lite/micro/integration_tests/BUILD @@ -1,3 +1,4 @@ +load("@rules_python//python:defs.bzl", "py_binary") load("@tflm_pip_deps//:requirements.bzl", "requirement") package(licenses = ["notice"]) @@ -18,7 +19,7 @@ py_binary( "@absl_py//absl:app", "@absl_py//absl/flags", requirement("mako"), - requirement("tensorflow-cpu"), + requirement("tensorflow"), "//tensorflow/lite/micro/tools:generate_test_for_model", "//tensorflow/lite/python:schema_py", "//tensorflow/lite/python:schema_util", diff --git a/tensorflow/lite/micro/kernels/BUILD b/tensorflow/lite/micro/kernels/BUILD index 11ece2debc0..f2ccb067a36 100644 --- a/tensorflow/lite/micro/kernels/BUILD +++ b/tensorflow/lite/micro/kernels/BUILD @@ -1,8 +1,10 @@ +load("@bazel_skylib//rules:common_settings.bzl", "string_flag") load("//tensorflow/lite/micro:build_def.bzl", "micro_copts", "tflm_kernel_cc_library") load( "//tensorflow:extra_rules.bzl", "tflm_kernel_friends", "xtensa_fusion_f1_config", + "xtensa_hifi_3_config", "xtensa_hifi_3z_config", "xtensa_hifi_5_config", "xtensa_vision_p6_config", @@ -38,6 +40,7 @@ package_group( cc_library( name = "activation_utils", hdrs = ["activation_utils.h"], + copts = micro_copts(), deps = [ "//tensorflow/lite/c:common", "//tensorflow/lite/kernels/internal:cppmath", @@ -52,6 +55,7 @@ cc_library( hdrs = [ "circular_buffer_flexbuffers_generated_data.h", ], + copts = micro_copts(), ) cc_library( @@ -62,6 +66,7 @@ cc_library( hdrs = [ "conv_test.h", ], + copts = micro_copts(), deps = [ ":kernel_runner", ":micro_ops", @@ -79,6 +84,7 @@ cc_library( hdrs = [ "detection_postprocess_flexbuffers_generated_data.h", ], + copts = micro_copts(), ) cc_library( @@ -87,6 +93,7 @@ cc_library( "kernel_runner.cc", ], hdrs = ["kernel_runner.h"], + copts = micro_copts(), visibility = [ "//visibility:public", ], @@ -107,6 +114,7 @@ cc_library( "kernel_util.cc", ], hdrs = ["kernel_util.h"], + copts = micro_copts(), visibility = [ ":kernel_friends", ":tflite_micro", @@ -126,6 +134,7 @@ cc_library( hdrs = [ "lstm_shared.h", ], + copts = micro_copts(), visibility = ["//tensorflow/lite/micro/kernels/testdata:__pkg__"], ) @@ -134,6 +143,7 @@ cc_library( hdrs = [ "lstm_eval_test.h", ], + copts = micro_copts(), deps = [ ":kernel_util", ":micro_ops", @@ -149,6 +159,7 @@ cc_library( "micro_tensor_utils.cc", ], hdrs = ["micro_tensor_utils.h"], + copts = micro_copts(), deps = [ "//tensorflow/lite/c:common", "//tensorflow/lite/core:macros", @@ -160,6 +171,11 @@ cc_library( ], ) +HIFI3_COPTS = [ + "-DXTENSA=1", + "-DHIFI3=1", +] + HIFI4_COPTS = [ "-DXTENSA=1", "-DHIFI4=1", @@ -185,6 +201,7 @@ tflm_kernel_cc_library( "add_n.cc", "arg_min_max.cc", "assign_variable.cc", + "batch_matmul.cc", "batch_to_space_nd.cc", "broadcast_args.cc", "broadcast_to.cc", @@ -266,6 +283,7 @@ tflm_kernel_cc_library( "squared_difference.cc", "squeeze.cc", "strided_slice.cc", + "strided_slice_common.cc", "sub.cc", "sub_common.cc", "svdf.cc", @@ -303,10 +321,13 @@ tflm_kernel_cc_library( "reduce.h", "reshape.h", "softmax.h", + "strided_slice.h", "sub.h", "svdf.h", + "transpose_conv.h", ] + select({ xtensa_fusion_f1_config(): glob(["xtensa/**/*.h"]), + xtensa_hifi_3_config(): glob(["xtensa/**/*.h"]), xtensa_hifi_3z_config(): glob(["xtensa/**/*.h"]), xtensa_hifi_5_config(): glob(["xtensa/**/*.h"]), xtensa_vision_p6_config(): glob(["xtensa/**/*.h"]), @@ -314,12 +335,14 @@ tflm_kernel_cc_library( }), accelerated_srcs = { xtensa_fusion_f1_config(): glob(["xtensa/**/*.cc"]), + xtensa_hifi_3_config(): glob(["xtensa/**/*.cc"]), xtensa_hifi_3z_config(): glob(["xtensa/**/*.cc"]), xtensa_hifi_5_config(): glob(["xtensa/**/*.cc"]), xtensa_vision_p6_config(): glob(["xtensa/**/*.cc"]), }, copts = micro_copts() + select({ xtensa_fusion_f1_config(): HIFI4_COPTS, + xtensa_hifi_3_config(): HIFI3_COPTS, xtensa_hifi_3z_config(): HIFI4_COPTS, xtensa_hifi_5_config(): HIFI5_COPTS, xtensa_vision_p6_config(): VP6_COPTS, @@ -355,6 +378,7 @@ tflm_kernel_cc_library( "@flatbuffers//:runtime_cc", ] + select({ xtensa_fusion_f1_config(): ["//third_party/xtensa/nnlib_hifi4:nnlib_hifi4_lib"], + xtensa_hifi_3_config(): ["//third_party/xtensa/nnlib_hifi4:nnlib_hifi4_lib"], xtensa_hifi_3z_config(): ["//third_party/xtensa/nnlib_hifi4:nnlib_hifi4_lib"], xtensa_hifi_5_config(): ["//third_party/xtensa/nnlib_hifi5:nnlib_hifi5_lib"], xtensa_vision_p6_config(): ["//third_party/xtensa/xi_tflmlib_vision_p6:xi_tflmlib_vision_p6_lib"], @@ -423,6 +447,20 @@ cc_test( ], ) +cc_test( + name = "batch_matmul_test", + srcs = [ + "batch_matmul_test.cc", + ], + deps = [ + ":kernel_runner", + "//tensorflow/lite/c:common", + "//tensorflow/lite/micro:op_resolvers", + "//tensorflow/lite/micro:test_helpers", + "//tensorflow/lite/micro/testing:micro_test", + ], +) + cc_test( name = "batch_to_space_nd_test", srcs = [ @@ -1426,30 +1464,54 @@ cc_test( # Bazel config settings. #################################### +# Command line flag to select which set of optimized kernels to use. +# Each value should have a `config_setting` which is selected on in the +# `micro_ops` target to pickup optimized kernel sources. An empty value +# indicates only reference kernels should be used. +string_flag( + name = "optimized_kernels", + build_setting_default = "", + values = [ + "", + "xtensa_fusion_f1", + "xtensa_hifi_3", + "xtensa_hifi_3z", + "xtensa_hifi_5", + "xtensa_vision_p6", + ], +) + config_setting( name = "xtensa_fusion_f1_default", - values = { - "cpu": "F1_190305_swupgrade", + flag_values = { + ":optimized_kernels": "xtensa_fusion_f1", + }, +) + +config_setting( + name = "xtensa_hifi_3_default", + flag_values = { + ":optimized_kernels": "xtensa_hifi_3", }, ) config_setting( name = "xtensa_hifi_3z_default", - values = { - "cpu": "HIFI_190304_swupgrade", + flag_values = { + ":optimized_kernels": "xtensa_hifi_3z", }, ) config_setting( name = "xtensa_hifi_5_default", - values = { - "cpu": "AE_HiFi5_LE5_AO_FP_XC", + flag_values = { + ":optimized_kernels": "xtensa_hifi_5", }, ) config_setting( name = "xtensa_vision_p6_default", - values = { - "cpu": "P6_200528", + flag_values = { + ":optimized_kernels": "xtensa_vision_p6", }, ) diff --git a/tensorflow/lite/micro/kernels/Makefile.inc b/tensorflow/lite/micro/kernels/Makefile.inc index 926ea8ae552..0bd846bc679 100644 --- a/tensorflow/lite/micro/kernels/Makefile.inc +++ b/tensorflow/lite/micro/kernels/Makefile.inc @@ -1,4 +1,4 @@ -# Copyright 2021 The TensorFlow Authors. All Rights Reserved. +# Copyright 2023 The TensorFlow Authors. All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. @@ -48,12 +48,60 @@ $(eval $(call microlite_test,unidirectional_sequence_lstm_test,\ $(TENSORFLOW_ROOT)tensorflow/lite/micro/kernels/testdata/lstm_test_data.cc,\ $(TENSORFLOW_ROOT)tensorflow/lite/micro/kernels/testdata/lstm_test_data.h)) +$(eval $(call microlite_test,kernel_signal_delay_test,\ + $(TENSORFLOW_ROOT)signal/micro/kernels/delay_test.cc \ + $(TENSORFLOW_ROOT)signal/micro/kernels/delay_flexbuffers_generated_data.cc, \ + $(TENSORFLOW_ROOT)signal/micro/kernels/delay_flexbuffers_generated_data.h)) + +$(eval $(call microlite_test,kernel_signal_energy_test,\ + $(TENSORFLOW_ROOT)signal/micro/kernels/energy_test.cc \ + $(TENSORFLOW_ROOT)signal/micro/kernels/energy_flexbuffers_generated_data.cc, \ + $(TENSORFLOW_ROOT)signal/micro/kernels/energy_flexbuffers_generated_data.h)) + $(eval $(call microlite_test,kernel_signal_fft_test,\ $(TENSORFLOW_ROOT)signal/micro/kernels/fft_test.cc \ $(TENSORFLOW_ROOT)signal/micro/kernels/fft_flexbuffers_generated_data.cc \ $(TENSORFLOW_ROOT)signal/testdata/fft_test_data.cc, \ $(TENSORFLOW_ROOT)signal/micro/kernels/fft_flexbuffers_generated_data.h)) +$(eval $(call microlite_test,kernel_signal_framer_test,\ + $(TENSORFLOW_ROOT)signal/micro/kernels/framer_test.cc \ + $(TENSORFLOW_ROOT)signal/micro/kernels/framer_flexbuffers_generated_data.cc, \ + $(TENSORFLOW_ROOT)signal/micro/kernels/framer_flexbuffers_generated_data.h)) + +$(eval $(call microlite_test,kernel_signal_filter_bank_test,\ + $(TENSORFLOW_ROOT)signal/micro/kernels/filter_bank_test.cc \ + $(TENSORFLOW_ROOT)signal/micro/kernels/filter_bank_flexbuffers_generated_data.cc, \ + $(TENSORFLOW_ROOT)signal/micro/kernels/filter_bank_flexbuffers_generated_data.h)) + +$(eval $(call microlite_test,kernel_signal_filter_bank_log_test,\ + $(TENSORFLOW_ROOT)signal/micro/kernels/filter_bank_log_test.cc \ + $(TENSORFLOW_ROOT)signal/micro/kernels/filter_bank_log_flexbuffers_generated_data.cc, \ + $(TENSORFLOW_ROOT)signal/micro/kernels/filter_bank_log_flexbuffers_generated_data.h)) + +$(eval $(call microlite_test,kernel_signal_filter_bank_square_root_test,\ + $(TENSORFLOW_ROOT)signal/micro/kernels/filter_bank_square_root_test.cc)) + +$(eval $(call microlite_test,kernel_signal_filter_bank_spectral_subtraction_test,\ + $(TENSORFLOW_ROOT)signal/micro/kernels/filter_bank_spectral_subtraction_test.cc \ + $(TENSORFLOW_ROOT)signal/micro/kernels/filter_bank_spectral_subtraction_flexbuffers_generated_data.cc, \ + $(TENSORFLOW_ROOT)signal/micro/kernels/filter_bank_spectral_subtraction_flexbuffers_generated_data.h)) + +$(eval $(call microlite_test,kernel_signal_overlap_add_test,\ + $(TENSORFLOW_ROOT)signal/micro/kernels/overlap_add_test.cc \ + $(TENSORFLOW_ROOT)signal/micro/kernels/overlap_add_flexbuffers_generated_data.cc, \ + $(TENSORFLOW_ROOT)signal/micro/kernels/overlap_add_flexbuffers_generated_data.h)) + +$(eval $(call microlite_test,kernel_signal_pcan_test,\ + $(TENSORFLOW_ROOT)signal/micro/kernels/pcan_test.cc \ + $(TENSORFLOW_ROOT)signal/micro/kernels/pcan_flexbuffers_generated_data.cc, \ + $(TENSORFLOW_ROOT)signal/micro/kernels/pcan_flexbuffers_generated_data.h)) + +$(eval $(call microlite_test,kernel_signal_stacker_test,\ + $(TENSORFLOW_ROOT)signal/micro/kernels/stacker_test.cc \ + $(TENSORFLOW_ROOT)signal/micro/kernels/stacker_flexbuffers_generated_data.cc, \ + $(TENSORFLOW_ROOT)signal/micro/kernels/stacker_flexbuffers_generated_data.h)) + $(eval $(call microlite_test,kernel_signal_window_test,\ $(TENSORFLOW_ROOT)signal/micro/kernels/window_test.cc \ $(TENSORFLOW_ROOT)signal/micro/kernels/window_flexbuffers_generated_data.cc, \ @@ -66,6 +114,7 @@ $(TENSORFLOW_ROOT)tensorflow/lite/micro/kernels/activations_test.cc \ $(TENSORFLOW_ROOT)tensorflow/lite/micro/kernels/add_test.cc \ $(TENSORFLOW_ROOT)tensorflow/lite/micro/kernels/add_n_test.cc \ $(TENSORFLOW_ROOT)tensorflow/lite/micro/kernels/arg_min_max_test.cc \ +$(TENSORFLOW_ROOT)tensorflow/lite/micro/kernels/batch_matmul_test.cc \ $(TENSORFLOW_ROOT)tensorflow/lite/micro/kernels/batch_to_space_nd_test.cc \ $(TENSORFLOW_ROOT)tensorflow/lite/micro/kernels/broadcast_args_test.cc \ $(TENSORFLOW_ROOT)tensorflow/lite/micro/kernels/broadcast_to_test.cc \ diff --git a/tensorflow/lite/micro/kernels/add.cc b/tensorflow/lite/micro/kernels/add.cc index b27206c6774..fde047a4bdd 100644 --- a/tensorflow/lite/micro/kernels/add.cc +++ b/tensorflow/lite/micro/kernels/add.cc @@ -38,7 +38,7 @@ TfLiteStatus EvalAdd(TfLiteContext* context, TfLiteNode* node, const TfLiteEvalTensor* input2, TfLiteEvalTensor* output) { switch (output->type) { case kTfLiteFloat32: { - tflite::ArithmeticParams op_params; + tflite::ArithmeticParams op_params = {}; SetActivationParams(data->output_activation_min_f32, data->output_activation_max_f32, &op_params); if (data->requires_broadcast) { @@ -59,7 +59,7 @@ TfLiteStatus EvalAdd(TfLiteContext* context, TfLiteNode* node, } } break; case kTfLiteInt32: { - tflite::ArithmeticParams op_params; + tflite::ArithmeticParams op_params = {}; SetActivationParams(std::numeric_limits::lowest(), std::numeric_limits::max(), &op_params); if (data->requires_broadcast) { @@ -93,7 +93,7 @@ TfLiteStatus EvalAddQuantized(TfLiteContext* context, TfLiteNode* node, const TfLiteEvalTensor* input1, const TfLiteEvalTensor* input2, TfLiteEvalTensor* output) { - tflite::ArithmeticParams op_params; + tflite::ArithmeticParams op_params = {}; op_params.left_shift = data->left_shift; op_params.input1_offset = data->input1_offset; op_params.input1_multiplier = data->input1_multiplier; diff --git a/tensorflow/lite/micro/kernels/arc_mli/depthwise_conv.cc b/tensorflow/lite/micro/kernels/arc_mli/depthwise_conv.cc index 1fa1d19ad1a..c2c9cd5c438 100644 --- a/tensorflow/lite/micro/kernels/arc_mli/depthwise_conv.cc +++ b/tensorflow/lite/micro/kernels/arc_mli/depthwise_conv.cc @@ -660,6 +660,30 @@ TfLiteStatus Eval(TfLiteContext* context, TfLiteNode* node) { bias, output); } break; + case kTfLiteInt16: { + switch (filter->type) { + case kTfLiteInt8: { + reference_integer_ops::DepthwiseConvPerChannel( + DepthwiseConvParamsQuantized(params, data), + data.per_channel_output_multiplier, data.per_channel_output_shift, + tflite::micro::GetTensorShape(input), + tflite::micro::GetTensorData(input), + tflite::micro::GetTensorShape(filter), + tflite::micro::GetTensorData(filter), + tflite::micro::GetTensorShape(bias), + tflite::micro::GetOptionalTensorData(bias), + tflite::micro::GetTensorShape(output), + tflite::micro::GetTensorData(output)); + break; + } + default: + MicroPrintf("Filter type %s (%d) for input type %s not supported.", + TfLiteTypeGetName(filter->type), filter->type, + TfLiteTypeGetName(input->type)); + return kTfLiteError; + } + break; + } default: MicroPrintf("Type %s (%d) not supported.", TfLiteTypeGetName(input->type), input->type); diff --git a/tensorflow/lite/micro/kernels/batch_matmul.cc b/tensorflow/lite/micro/kernels/batch_matmul.cc new file mode 100644 index 00000000000..bd621f4c2cb --- /dev/null +++ b/tensorflow/lite/micro/kernels/batch_matmul.cc @@ -0,0 +1,558 @@ +/* Copyright 2023 The TensorFlow Authors. All Rights Reserved. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +==============================================================================*/ + +#include "tensorflow/lite/kernels/internal/reference/batch_matmul.h" + +#include +#include +#include + +#include "tensorflow/lite/core/c/common.h" +#include "tensorflow/lite/kernels/internal/quantization_util.h" +#include "tensorflow/lite/kernels/internal/reference/transpose.h" +#include "tensorflow/lite/kernels/internal/tensor_ctypes.h" +#include "tensorflow/lite/kernels/internal/types.h" +#include "tensorflow/lite/kernels/kernel_util.h" +#include "tensorflow/lite/micro/kernels/kernel_util.h" +#include "tensorflow/lite/micro/micro_log.h" + +namespace tflite { +namespace { + +constexpr int kInputLhsTensor = 0; +constexpr int kInputRhsTensor = 1; +constexpr int kOutputTensor = 0; + +struct QuantizationOpData { + // The scaling factor from input to output (aka the 'real multiplier') can + // be represented as a fixed point multiplier plus a left shift. + int32_t output_multiplier; + int output_shift; // exponent + + // The range of the fused activation layer. For example for kNone and + // int8_t these would be -128 and 127. + int32_t output_activation_min; + int32_t output_activation_max; + + int32_t lhs_zero_point; + int32_t rhs_zero_point; + int32_t output_zero_point; +}; + +struct OpData { + QuantizationOpData* quantization; + + // Transpose tensors and state + TfLiteEvalTensor* lhs_transposed_tensor; + TfLiteEvalTensor* rhs_transposed_tensor; + bool rhs_is_transposed; + bool lhs_is_constant_tensor; + bool rhs_is_constant_tensor; +}; + +struct OpContext { + OpContext(TfLiteContext* context, TfLiteNode* node) + : params(static_cast(node->builtin_data)), + op_data(static_cast(node->user_data)) {} + + TfLiteBatchMatMulParams* params; + OpData* op_data; +}; + +struct PrepareOpContext : OpContext { + PrepareOpContext(TfLiteContext* context, TfLiteNode* node) + : OpContext(context, node), + micro_context_(GetMicroContext(context)), + lhs(micro_context_->AllocateTempInputTensor(node, kInputLhsTensor)), + rhs(micro_context_->AllocateTempInputTensor(node, kInputRhsTensor)), + output(micro_context_->AllocateTempOutputTensor(node, kOutputTensor)) {} + + ~PrepareOpContext() { + if (lhs != nullptr) { + micro_context_->DeallocateTempTfLiteTensor(lhs); + } + if (rhs != nullptr) { + micro_context_->DeallocateTempTfLiteTensor(rhs); + } + if (output != nullptr) { + micro_context_->DeallocateTempTfLiteTensor(output); + } + } + + private: + MicroContext* micro_context_; + + public: + TfLiteTensor* lhs; + TfLiteTensor* rhs; + TfLiteTensor* output; +}; + +struct EvalOpContext : OpContext { + EvalOpContext(TfLiteContext* context, TfLiteNode* node) + : OpContext(context, node), + lhs(tflite::micro::GetEvalInput(context, node, kInputLhsTensor)), + rhs(tflite::micro::GetEvalInput(context, node, kInputRhsTensor)), + output(tflite::micro::GetEvalOutput(context, node, kOutputTensor)) {} + + const TfLiteEvalTensor* lhs; + const TfLiteEvalTensor* rhs; + TfLiteEvalTensor* output; +}; + +TfLiteStatus ReshapeOutputTensor(TfLiteContext* context, TfLiteNode* node, + const RuntimeShape& extended_lhs_shape, + const RuntimeShape& extended_rhs_shape, + bool adj_x, bool adj_y, int output_rank, + TfLiteTensor* output) { + int64_t orig_size = NumElements(output); + + // make sure the new output dims rank does not exceed the original rank + TF_LITE_ENSURE(context, output_rank <= NumDimensions(output)); + + // make sure output tensor dims are not in the FlatBuffer + TfLiteEvalTensor* output_eval = + tflite::micro::GetEvalOutput(context, node, kOutputTensor); + TF_LITE_ENSURE_OK(context, tflite::micro::CreateWritableTensorDimsWithCopy( + context, output, output_eval)); + + // Fill in any broadcast dimensions. + for (int i = 0; i < output_rank - 2; ++i) { + const int lhs_dim = extended_lhs_shape.Dims(i); + const int rhs_dim = extended_rhs_shape.Dims(i); + int broadcast_dim = lhs_dim; + if ((lhs_dim != rhs_dim) && (lhs_dim == 1)) { + broadcast_dim = rhs_dim; + } + output->dims->data[i] = broadcast_dim; + } + // Fill in the matmul dimensions. + int lhs_rows_index = adj_x ? output_rank - 1 : output_rank - 2; + int rhs_cols_index = adj_y ? output_rank - 2 : output_rank - 1; + + output->dims->data[output_rank - 2] = extended_lhs_shape.Dims(lhs_rows_index); + output->dims->data[output_rank - 1] = extended_rhs_shape.Dims(rhs_cols_index); + output->dims->size = output_rank; + + // Check that output tensor has not been resized + // since TFLM doesn't support tensor resizing. + TF_LITE_ENSURE_EQ(context, orig_size, NumElements(output)); + + return kTfLiteOk; +} + +TfLiteEvalTensor* AllocInitTransposeTensorFromTfLiteTensor( + TfLiteContext* context, const TfLiteTensor& tensor) { + MicroContext* micro_context = GetMicroContext(context); + TfLiteEvalTensor* eval_tensor = static_cast( + micro_context->AllocatePersistentBuffer(sizeof(TfLiteEvalTensor))); + if (eval_tensor == nullptr) { + return nullptr; + } + + eval_tensor->type = tensor.type; + + const int tensor_rank = NumDimensions(&tensor); + const size_t eval_dims_size = TfLiteIntArrayGetSizeInBytes(tensor_rank); + eval_tensor->dims = static_cast( + micro_context->AllocatePersistentBuffer(eval_dims_size)); + if (eval_tensor->dims == nullptr) { + return nullptr; + } + eval_tensor->dims->size = tensor_rank; + for (int i = 0; i < tensor_rank - 2; ++i) { + eval_tensor->dims->data[i] = tensor.dims->data[i]; + } + // Swap last two dimensions. + eval_tensor->dims->data[tensor_rank - 2] = tensor.dims->data[tensor_rank - 1]; + eval_tensor->dims->data[tensor_rank - 1] = tensor.dims->data[tensor_rank - 2]; + + const size_t eval_data_size = static_cast(NumElements(&tensor)) * + TfLiteTypeGetSize(tensor.type); + eval_tensor->data.data = + micro_context->AllocatePersistentBuffer(eval_data_size); + if (eval_tensor->data.data == nullptr) { + return nullptr; + } + + return eval_tensor; +} + +// Initializes tensors to store transposed operands. +// Allocate storage for hybrid quantization if needed. +// Allocate normal quantization data if needed. +TfLiteStatus InitializeTemporaries(TfLiteContext* context, TfLiteNode* node, + const PrepareOpContext& op_context) { + OpData* op_data = op_context.op_data; + const TfLiteTensor* lhs = op_context.lhs; + const TfLiteTensor* rhs = op_context.rhs; + MicroContext* micro_context = GetMicroContext(context); + + op_data->quantization = nullptr; + op_data->lhs_transposed_tensor = nullptr; + op_data->rhs_transposed_tensor = nullptr; + + if (lhs->type == kTfLiteInt8 || lhs->type == kTfLiteInt16) { + op_data->quantization = static_castquantization)>( + micro_context->AllocatePersistentBuffer( + sizeof(*op_data->quantization))); + TF_LITE_ENSURE(context, op_data->quantization != nullptr); + } + + // tensor for Transposed LHS; + if (op_context.params->adj_x) { + op_data->lhs_transposed_tensor = + AllocInitTransposeTensorFromTfLiteTensor(context, *lhs); + TF_LITE_ENSURE(context, op_data->lhs_transposed_tensor != nullptr); + } + + // We need a buffer for the RHS if we need to transpose the RHS. We + // transpose by default, so that the two inputs (LHS and RHS) are in a proper + // layout for our fast matrix multiplication routines. If the transpose flag + // is set by the caller, the data is already in the desired layout. + if (!op_context.params->adj_y) { + op_data->rhs_transposed_tensor = + AllocInitTransposeTensorFromTfLiteTensor(context, *rhs); + TF_LITE_ENSURE(context, op_data->rhs_transposed_tensor != nullptr); + } + + return kTfLiteOk; +} + +template +void TransposeRowsColumnsImpl(const TfLiteEvalTensor& tensor_in, + TfLiteEvalTensor* tensor_out) { + const Scalar* input = tflite::micro::GetTensorData(&tensor_in); + Scalar* output = tflite::micro::GetTensorData(tensor_out); + RuntimeShape transposed_shape(tflite::micro::GetTensorShape(&tensor_in)); + RuntimeShape shape(transposed_shape); + TransposeParams params; + const int rank = shape.DimensionsCount(); + params.perm_count = rank; + for (int i = 0; i < rank - 2; ++i) { + params.perm[i] = i; + } + // Transpose the last two dimensions. + params.perm[rank - 2] = rank - 1; + params.perm[rank - 1] = rank - 2; + transposed_shape.SetDim(rank - 1, shape.Dims(rank - 2)); + transposed_shape.SetDim(rank - 2, shape.Dims(rank - 1)); + reference_ops::Transpose(params, shape, input, transposed_shape, output); +} + +TfLiteStatus TransposeRowsColumns(const TfLiteEvalTensor& tensor_in, + TfLiteEvalTensor* tensor_out) { + if (tensor_in.type == kTfLiteFloat32) { + TransposeRowsColumnsImpl(tensor_in, tensor_out); + return kTfLiteOk; + } else if (tensor_in.type == kTfLiteInt8) { + TransposeRowsColumnsImpl(tensor_in, tensor_out); + return kTfLiteOk; + } else if (tensor_in.type == kTfLiteInt16) { + TransposeRowsColumnsImpl(tensor_in, tensor_out); + return kTfLiteOk; + } else { + MicroPrintf( + "BATCH_MATMUL can only transpose tensors with FLOAT32, INT8, INT16 " + "type."); + } + return kTfLiteError; +} + +RuntimeShape SwapRowColumnDims(const RuntimeShape& shape) { + RuntimeShape swapped_shape(shape); + const int32_t dims = shape.DimensionsCount(); + swapped_shape.SetDim(dims - 2, shape.Dims(dims - 1)); + swapped_shape.SetDim(dims - 1, shape.Dims(dims - 2)); + return swapped_shape; +} + +void* BatchMatMulInit(TfLiteContext* context, const char* buffer, + size_t length) { + // This is a builtin op, so we don't use the contents in 'buffer', if any. + // Instead, we allocate a new object to carry information from Prepare() to + // Eval(). + TFLITE_DCHECK(context->AllocatePersistentBuffer != nullptr); + MicroContext* micro_context = GetMicroContext(context); + return micro_context->AllocatePersistentBuffer(sizeof(OpData)); +} + +TfLiteStatus BatchMatMulPrepare(TfLiteContext* context, TfLiteNode* node) { + TF_LITE_ENSURE_EQ(context, NumInputs(node), 2); + TF_LITE_ENSURE_EQ(context, NumOutputs(node), 1); + + PrepareOpContext op_context(context, node); + const TfLiteTensor* lhs_data = op_context.lhs; + TF_LITE_ENSURE(context, lhs_data != nullptr); + const TfLiteTensor* rhs_data = op_context.rhs; + TF_LITE_ENSURE(context, rhs_data != nullptr); + TfLiteTensor* output = op_context.output; + TF_LITE_ENSURE(context, output != nullptr); + + TF_LITE_ENSURE(context, lhs_data->type == kTfLiteFloat32 || + lhs_data->type == kTfLiteInt8 || + lhs_data->type == kTfLiteInt16); + TF_LITE_ENSURE(context, rhs_data->type == kTfLiteFloat32 || + rhs_data->type == kTfLiteInt8 || + rhs_data->type == kTfLiteInt16); + // Both inputs should be of the same type. + // Hybrid input (FLOAT32 LHS, INT8 RHS) is not supported. + TF_LITE_ENSURE(context, lhs_data->type == rhs_data->type); + // LHS input must match output type. INT32 output not supported. + TF_LITE_ENSURE(context, lhs_data->type == output->type); + + const int lhs_rank = NumDimensions(lhs_data); + const int rhs_rank = NumDimensions(rhs_data); + // Support dimensions between 2 and 5, inclusive. + TF_LITE_ENSURE(context, lhs_rank >= 2); + TF_LITE_ENSURE(context, lhs_rank <= 5); + TF_LITE_ENSURE(context, rhs_rank >= 2); + TF_LITE_ENSURE(context, rhs_rank <= 5); + + TF_LITE_ENSURE_OK(context, InitializeTemporaries(context, node, op_context)); + + OpData* op_data = op_context.op_data; + // If the RHS is constant, we only transpose once. + op_data->rhs_is_transposed = false; + op_data->lhs_is_constant_tensor = IsConstantTensor(lhs_data); + op_data->rhs_is_constant_tensor = IsConstantTensor(rhs_data); + + // Note that quantized inference requires that all tensors have their + // parameters set. This is usually done during quantized training. + if (lhs_data->type == kTfLiteInt8 || lhs_data->type == kTfLiteInt16) { + TF_LITE_ENSURE(context, op_data->quantization != nullptr); + double real_multiplier = 0.0; + TF_LITE_ENSURE_STATUS(GetQuantizedConvolutionMultipler( + context, lhs_data, rhs_data, output, &real_multiplier)); + QuantizeMultiplier(real_multiplier, + &op_data->quantization->output_multiplier, + &op_data->quantization->output_shift); + // BatchMatMul has no fused activation functions. Therefore, set + // output activation min and max to min and max of int8_t or int16_t type. + if (lhs_data->type == kTfLiteInt8) { + op_data->quantization->output_activation_min = + std::numeric_limits::min(); + op_data->quantization->output_activation_max = + std::numeric_limits::max(); + } else { + op_data->quantization->output_activation_min = + std::numeric_limits::min(); + op_data->quantization->output_activation_max = + std::numeric_limits::max(); + + TF_LITE_ENSURE_EQ(context, lhs_data->params.zero_point, 0); + TF_LITE_ENSURE_EQ(context, rhs_data->params.zero_point, 0); + TF_LITE_ENSURE_EQ(context, output->params.zero_point, 0); + } + + op_data->quantization->lhs_zero_point = lhs_data->params.zero_point; + op_data->quantization->rhs_zero_point = rhs_data->params.zero_point; + op_data->quantization->output_zero_point = output->params.zero_point; + } + + const int output_rank = std::max(lhs_rank, rhs_rank); + const RuntimeShape extended_lhs_shape = + RuntimeShape::ExtendedShape(output_rank, GetTensorShape(lhs_data)); + const RuntimeShape extended_rhs_shape = + RuntimeShape::ExtendedShape(output_rank, GetTensorShape(rhs_data)); + + // Ensure any batch dimensions obey broacasting rules. + for (int i = 0; i < output_rank - 2; ++i) { + const int lhs_dim = extended_lhs_shape.Dims(i); + const int rhs_dim = extended_rhs_shape.Dims(i); + if (lhs_dim != rhs_dim) { + if (lhs_dim != 1) { + TF_LITE_ENSURE_EQ(context, rhs_dim, 1); + } + } + } + bool adj_x = op_context.params->adj_x; + bool adj_y = op_context.params->adj_y; + // Ensure other dimensions work for matrix multiplication. + int accum_dim_lhs = adj_x ? extended_lhs_shape.Dims(output_rank - 2) + : extended_lhs_shape.Dims(output_rank - 1); + int accum_dim_rhs = adj_y ? extended_rhs_shape.Dims(output_rank - 1) + : extended_rhs_shape.Dims(output_rank - 2); + + TF_LITE_ENSURE_EQ(context, accum_dim_lhs, accum_dim_rhs); + TfLiteStatus status = + ReshapeOutputTensor(context, node, extended_lhs_shape, extended_rhs_shape, + adj_x, adj_y, output_rank, output); + return status; +} + +TfLiteStatus EvalInt8(TfLiteContext* context, const OpData& data, + const RuntimeShape& lhs_shape, + const TfLiteEvalTensor& lhs, + const RuntimeShape& rhs_shape, + const TfLiteEvalTensor& rhs, + const RuntimeShape& output_shape, + TfLiteEvalTensor* output) { + TF_LITE_ENSURE(context, data.quantization != nullptr); + // Reuse params struct from FullyConnected Op. + FullyConnectedParams op_params; + op_params.input_offset = -data.quantization->lhs_zero_point; + op_params.weights_offset = + -data.quantization->rhs_zero_point; // filter offset + op_params.output_offset = data.quantization->output_zero_point; + op_params.output_multiplier = data.quantization->output_multiplier; + op_params.output_shift = data.quantization->output_shift; + op_params.quantized_activation_min = data.quantization->output_activation_min; + op_params.quantized_activation_max = data.quantization->output_activation_max; + op_params.lhs_cacheable = data.lhs_is_constant_tensor; + op_params.rhs_cacheable = data.rhs_is_constant_tensor; + + // Note we pass RHS args first, LHS args second. See note for Eval. + reference_ops::BatchMatMul( + op_params, rhs_shape, tflite::micro::GetTensorData(&rhs), + lhs_shape, tflite::micro::GetTensorData(&lhs), output_shape, + tflite::micro::GetTensorData(output)); + + return kTfLiteOk; +} + +TfLiteStatus EvalInt16(TfLiteContext* context, const OpData& data, + const RuntimeShape& lhs_shape, + const TfLiteEvalTensor& lhs, + const RuntimeShape& rhs_shape, + const TfLiteEvalTensor& rhs, + const RuntimeShape& output_shape, + TfLiteEvalTensor* output) { + TF_LITE_ENSURE(context, data.quantization != nullptr); + // Reuse params struct from FullyConnected Op. + FullyConnectedParams op_params; + op_params.input_offset = -data.quantization->lhs_zero_point; + op_params.weights_offset = + -data.quantization->rhs_zero_point; // filter offset + op_params.output_offset = data.quantization->output_zero_point; + op_params.output_multiplier = data.quantization->output_multiplier; + op_params.output_shift = data.quantization->output_shift; + op_params.quantized_activation_min = data.quantization->output_activation_min; + op_params.quantized_activation_max = data.quantization->output_activation_max; + op_params.lhs_cacheable = data.lhs_is_constant_tensor; + op_params.rhs_cacheable = data.rhs_is_constant_tensor; + + // Note we pass RHS args first, LHS args second. See note for Eval. + reference_ops::BatchMatMul( + op_params, rhs_shape, tflite::micro::GetTensorData(&rhs), + lhs_shape, tflite::micro::GetTensorData(&lhs), output_shape, + tflite::micro::GetTensorData(output)); + + return kTfLiteOk; +} + +// Perform a batch matrix multiply on +// LHS <..., A, B> X RHS<..., B, C> +// where the leading dimensions of LHS and RHS obey broadcasting rules +// (this Op will apply broadcasting rules). +// We assume that LHS and RHS are both row oriented (adjacent values in memory +// are in the same row) and will output in the same memory layout. However, +// our fast GEMM libraries assume RCC layout (LHS row oriented, +// RHS column oriented, output column oriented). Therefore, we perform +// RHS <..., C, B> X LHS <..., B, A> +// where output is a C X A column-oriented, which is equivalent to +// A X C row-oriented. +TfLiteStatus BatchMatMulEval(TfLiteContext* context, TfLiteNode* node) { + EvalOpContext op_context(context, node); + OpData* op_data = op_context.op_data; + const TfLiteEvalTensor* lhs = op_context.lhs; + const TfLiteEvalTensor* rhs = op_context.rhs; + TfLiteEvalTensor* output = op_context.output; + RuntimeShape orig_lhs_shape = tflite::micro::GetTensorShape(lhs); + RuntimeShape orig_rhs_shape = tflite::micro::GetTensorShape(rhs); + + bool adj_y = op_context.params->adj_y; + bool adj_x = op_context.params->adj_x; + + // Compress BatchMatMul when third from last RHS dimension is one. + int32_t rhs_dims_count = orig_rhs_shape.DimensionsCount(); + int32_t lhs_dims_count = orig_lhs_shape.DimensionsCount(); + // Compress ops where rhs shape is [..., 1, X, Y] and lhs shape is + // [..., Q, R, S] which is equivalent to rhs: [..., X, Y] and + // lhs: [..., Q * R, S]. + if (rhs_dims_count > 2 && lhs_dims_count > 2) { + int rhs_one = orig_rhs_shape.DimsData()[rhs_dims_count - 3]; + if (rhs_one == 1) { + int32_t* lhs_dims = orig_lhs_shape.DimsData(); + int32_t* rhs_dims = orig_rhs_shape.DimsData(); + RuntimeShape tmp_l(lhs_dims_count - 1, lhs_dims); + tmp_l.SetDim(lhs_dims_count - 3, + lhs_dims[lhs_dims_count - 3] * lhs_dims[lhs_dims_count - 2]); + tmp_l.SetDim(lhs_dims_count - 2, lhs_dims[lhs_dims_count - 1]); + orig_lhs_shape.ReplaceWith(tmp_l.DimensionsCount(), tmp_l.DimsData()); + RuntimeShape tmp_r(rhs_dims_count - 1, orig_rhs_shape.DimsData()); + tmp_r.SetDim(rhs_dims_count - 3, rhs_dims[rhs_dims_count - 2]); + tmp_r.SetDim(rhs_dims_count - 2, rhs_dims[rhs_dims_count - 1]); + orig_rhs_shape.ReplaceWith(tmp_r.DimensionsCount(), tmp_r.DimsData()); + rhs_dims_count = orig_rhs_shape.DimensionsCount(); + lhs_dims_count = orig_lhs_shape.DimensionsCount(); + } + } + + TfLiteEvalTensor* rhs_tensor = adj_y ? const_cast(rhs) + : op_data->rhs_transposed_tensor; + TfLiteEvalTensor* lhs_tensor = adj_x ? op_data->lhs_transposed_tensor + : const_cast(lhs); + TF_LITE_ENSURE(context, rhs_tensor != nullptr); + TF_LITE_ENSURE(context, lhs_tensor != nullptr); + if (!adj_y) { + // TODO(b/154760341): Constant tensors should already be transposed, but + // we transpose once if necessary for now. + if (!(op_data->rhs_is_constant_tensor && op_data->rhs_is_transposed)) { + TransposeRowsColumns(*rhs, rhs_tensor); + op_data->rhs_is_transposed = true; + } + } + if (adj_x) { + TransposeRowsColumns(*lhs, lhs_tensor); + } + RuntimeShape rhs_shape = + adj_y ? orig_rhs_shape : SwapRowColumnDims(orig_rhs_shape); + RuntimeShape lhs_shape = + adj_x ? orig_lhs_shape : SwapRowColumnDims(orig_lhs_shape); + + switch (lhs->type) { + case kTfLiteFloat32: + // Note we pass RHS args first, LHS args second. See note above. + reference_ops::BatchMatMul( + rhs_shape, tflite::micro::GetTensorData(rhs_tensor), lhs_shape, + tflite::micro::GetTensorData(lhs_tensor), + tflite::micro::GetTensorShape(output), + tflite::micro::GetTensorData(output)); + break; + case kTfLiteInt8: + return EvalInt8(context, *op_data, lhs_shape, *lhs_tensor, rhs_shape, + *rhs_tensor, tflite::micro::GetTensorShape(output), + output); + case kTfLiteInt16: + return EvalInt16(context, *op_data, lhs_shape, *lhs_tensor, rhs_shape, + *rhs_tensor, tflite::micro::GetTensorShape(output), + output); + default: + MicroPrintf("BATCH_MATMUL doesn't support input type %s", + TfLiteTypeGetName(lhs->type)); + return kTfLiteError; + } + return kTfLiteOk; +} + +} // namespace + +TFLMRegistration Register_BATCH_MATMUL() { + return tflite::micro::RegisterOp(BatchMatMulInit, BatchMatMulPrepare, + BatchMatMulEval); +} + +} // namespace tflite diff --git a/tensorflow/lite/micro/kernels/batch_matmul_test.cc b/tensorflow/lite/micro/kernels/batch_matmul_test.cc new file mode 100644 index 00000000000..abba7577764 --- /dev/null +++ b/tensorflow/lite/micro/kernels/batch_matmul_test.cc @@ -0,0 +1,736 @@ +/* Copyright 2023 The TensorFlow Authors. All Rights Reserved. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +==============================================================================*/ + +#include +#include +#include +#include + +#include "tensorflow/lite/c/builtin_op_data.h" +#include "tensorflow/lite/c/common.h" +#include "tensorflow/lite/micro/kernels/kernel_runner.h" +#include "tensorflow/lite/micro/test_helpers.h" +#include "tensorflow/lite/micro/testing/micro_test.h" + +namespace tflite { +namespace testing { +namespace { + +constexpr float kFloatTolerance = 1e-5; + +constexpr int kNumInputs = 2; +constexpr int kNumOutputs = 1; +constexpr int kLhsInputTensorIndex = 0; +constexpr int kRhsInputTensorIndex = 1; +constexpr int kOutputTensorIndex = 2; + +// data_min/data_max are used to compute symmetric scale, zero-point is 0 +// scale should be 0 to use data_min/data_max +template +struct TestQuantizationParams { + // quantization parameters + float scale; // if 0, use data_min and data_max + int zero_point; + float data_min; // input data minimum value + float data_max; // input data maximum value + + T quantized_data[kNumElements]; // quantized storage +}; + +micro::KernelRunner* GetKernelRunnerInstance( + TfLiteTensor* tensors, int tensors_count, + const TfLiteBatchMatMulParams& params, bool need_init_prepare) { + static int kInputArrayData[] = {kNumInputs, kLhsInputTensorIndex, + kRhsInputTensorIndex}; + TfLiteIntArray* inputs_array = IntArrayFromInts(kInputArrayData); + static int kOutputArrayData[] = {kNumOutputs, kOutputTensorIndex}; + TfLiteIntArray* outputs_array = IntArrayFromInts(kOutputArrayData); + + static const TFLMRegistration registration = tflite::Register_BATCH_MATMUL(); + + alignas(micro::KernelRunner) static char + kernel_runner_buffer[sizeof(micro::KernelRunner)] = {}; + + static micro::KernelRunner* runner = nullptr; + if (runner == nullptr || need_init_prepare) { + runner = new (kernel_runner_buffer) + micro::KernelRunner(registration, tensors, tensors_count, inputs_array, + outputs_array, ¶ms); + + TF_LITE_MICRO_EXPECT_EQ(kTfLiteOk, runner->InitAndPrepare()); + } + + return runner; +} + +void TestBatchMatMulFloat(const TfLiteBatchMatMulParams& params, + const int* input_dims_data[kNumInputs], + const float* input_data_lhs, + const float* input_data_rhs, const int* expected_dims, + const float* expected_data, float* output_data, + bool need_constant_rhs = false, + bool need_init_prepare = true) { + TfLiteIntArray* input_dims_lhs = IntArrayFromInts(input_dims_data[0]); + TfLiteIntArray* input_dims_rhs = IntArrayFromInts(input_dims_data[1]); + TfLiteIntArray* output_dims = IntArrayFromInts(expected_dims); + const int kOutputCount = ElementCount(*output_dims); + + static TfLiteTensor tensors[kNumInputs + kNumOutputs]; + + if (need_init_prepare) { + tensors[kLhsInputTensorIndex] = + CreateTensor(input_data_lhs, input_dims_lhs); + tensors[kRhsInputTensorIndex] = + CreateTensor(input_data_rhs, input_dims_rhs); + if (need_constant_rhs) { + tensors[kRhsInputTensorIndex].allocation_type = kTfLiteMmapRo; + } + tensors[kOutputTensorIndex] = CreateTensor(output_data, output_dims); + } + + constexpr int kTensorCount = std::extent::value; + micro::KernelRunner* runner = + GetKernelRunnerInstance(tensors, kTensorCount, params, need_init_prepare); + TF_LITE_MICRO_EXPECT_EQ(kTfLiteOk, runner->Invoke()); + + // check output data against expected + for (int i = 0; i < kOutputCount; i++) { + TF_LITE_MICRO_EXPECT_NEAR(expected_data[i], output_data[i], + kFloatTolerance); + } + + // check output dimensions (relocated) against original dimensions + TF_LITE_MICRO_EXPECT_EQ(output_dims->size, + tensors[kOutputTensorIndex].dims->size); + for (int i = 0; i < output_dims->size; i++) { + TF_LITE_MICRO_EXPECT_EQ(output_dims->data[i], + tensors[kOutputTensorIndex].dims->data[i]); + } +} + +template +void SetScaleAndZeroPoint(TestQuantizationParams* q_params) { + if (q_params->scale == 0.0f || q_params->data_max != 0 || + q_params->data_min != 0) { + q_params->scale = + ScaleFromMinMax(q_params->data_min, q_params->data_max); + q_params->zero_point = + ZeroPointFromMinMax(q_params->data_min, q_params->data_max); + } +} + +template +void TestBatchMatMulQuantized( + const TfLiteBatchMatMulParams& params, + TestQuantizationParams* quantization_lhs, + TestQuantizationParams* quantization_rhs, + TestQuantizationParams* quantization_output, + const int* input_dims_data[kNumInputs], const float* input_data_lhs, + const float* input_data_rhs, const int* expected_dims, + const T* expected_data, const float* output_data) { + TfLiteIntArray* input_dims_lhs = IntArrayFromInts(input_dims_data[0]); + TfLiteIntArray* input_dims_rhs = IntArrayFromInts(input_dims_data[1]); + TfLiteIntArray* output_dims = IntArrayFromInts(expected_dims); + const int kOutputCount = ElementCount(*output_dims); + + static TfLiteTensor tensors[kNumInputs + kNumOutputs]; + + SetScaleAndZeroPoint(quantization_lhs); + tensors[kLhsInputTensorIndex] = CreateQuantizedTensor( + input_data_lhs, quantization_lhs->quantized_data, input_dims_lhs, + quantization_lhs->scale, quantization_lhs->zero_point); + SetScaleAndZeroPoint(quantization_rhs); + tensors[kRhsInputTensorIndex] = CreateQuantizedTensor( + input_data_rhs, quantization_rhs->quantized_data, input_dims_rhs, + quantization_rhs->scale, quantization_rhs->zero_point); + SetScaleAndZeroPoint(quantization_output); + tensors[kOutputTensorIndex] = CreateQuantizedTensor( + quantization_output->quantized_data, output_dims, + quantization_output->scale, quantization_output->zero_point); + + constexpr int kTensorCount = std::extent::value; + micro::KernelRunner* runner = + GetKernelRunnerInstance(tensors, kTensorCount, params, true); + TF_LITE_MICRO_EXPECT_EQ(kTfLiteOk, runner->Invoke()); + + // check output data against expected + for (int i = 0; i < kOutputCount; i++) { + TF_LITE_MICRO_EXPECT_EQ(expected_data[i], + quantization_output->quantized_data[i]); + } + // check dequantized output data against expected + for (int i = 0; i < kOutputCount; i++) { + float dequantized_value = (quantization_output->quantized_data[i] - + quantization_output->zero_point) * + quantization_output->scale; + TF_LITE_MICRO_EXPECT_NEAR(output_data[i], dequantized_value, + kFloatTolerance); + } + + // check output dimensions (relocated) against original dimensions + TF_LITE_MICRO_EXPECT_EQ(output_dims->size, + tensors[kOutputTensorIndex].dims->size); + for (int i = 0; i < output_dims->size; i++) { + TF_LITE_MICRO_EXPECT_EQ(output_dims->data[i], + tensors[kOutputTensorIndex].dims->data[i]); + } +} + +} // namespace +} // namespace testing +} // namespace tflite + +TF_LITE_MICRO_TESTS_BEGIN + +TF_LITE_MICRO_TEST(BatchMatMulOpTestFloat32Test_Ones) { + constexpr int kLhsInputDims[] = {4, 3, 2, 1, 4}; + constexpr int kRhsInputDims[] = {4, 3, 1, 4, 1}; + const int* kInputDims[tflite::testing::kNumInputs] = {kLhsInputDims, + kRhsInputDims}; + + constexpr size_t kLhsInputSize = 24; + float lhs_input[kLhsInputSize]; + std::iota(std::begin(lhs_input), std::end(lhs_input), 1); + + constexpr size_t kRhsInputSize = 12; + float rhs_input[kRhsInputSize]; + std::iota(std::begin(rhs_input), std::end(rhs_input), 1); + + constexpr float kExpect[] = {30, 70, 278, 382, 782, 950}; + constexpr int kOutputDims[] = {4, 3, 2, 1, 1}; + constexpr int kOutputCount = std::extent::value; + float output_data[kOutputCount]; + + constexpr TfLiteBatchMatMulParams params = { + false, // adj_x + false, // adj_y + false // asymmetric_quantize_inputs + }; + + tflite::testing::TestBatchMatMulFloat(params, kInputDims, lhs_input, + rhs_input, kOutputDims, kExpect, + output_data); +} + +TF_LITE_MICRO_TEST(BatchMatMulOpTestFloat32Test_Flatten) { + constexpr int kLhsInputDims[] = {4, 3, 2, 2, 4}; + constexpr int kRhsInputDims[] = {4, 3, 1, 4, 1}; + const int* kInputDims[tflite::testing::kNumInputs] = {kLhsInputDims, + kRhsInputDims}; + + constexpr size_t kLhsInputSize = 48; + float lhs_input[kLhsInputSize]; + std::iota(std::begin(lhs_input), std::end(lhs_input), 1); + + constexpr size_t kRhsInputSize = 12; + float rhs_input[kRhsInputSize]; + std::iota(std::begin(rhs_input), std::end(rhs_input), 1); + + constexpr float kExpect[] = {30, 70, 110, 150, 486, 590, + 694, 798, 1454, 1622, 1790, 1958}; + constexpr int kOutputDims[] = {4, 3, 2, 2, 1}; + constexpr int kOutputCount = std::extent::value; + float output_data[kOutputCount]; + + constexpr TfLiteBatchMatMulParams params = { + false, // adj_x + false, // adj_y + false // asymmetric_quantize_inputs + }; + + tflite::testing::TestBatchMatMulFloat(params, kInputDims, lhs_input, + rhs_input, kOutputDims, kExpect, + output_data); +} + +TF_LITE_MICRO_TEST(BatchMatMulOpTestFloat32Test_Simple) { + constexpr int kLhsInputDims[] = {3, 1, 2, 3}; + constexpr int kRhsInputDims[] = {3, 1, 3, 4}; + const int* kInputDims[tflite::testing::kNumInputs] = {kLhsInputDims, + kRhsInputDims}; + + constexpr size_t kLhsInputSize = 6; + float lhs_input[kLhsInputSize]; + std::iota(std::begin(lhs_input), std::end(lhs_input), 1); + + constexpr size_t kRhsInputSize = 12; + float rhs_input[kRhsInputSize]; + std::iota(std::begin(rhs_input), std::end(rhs_input), 7); + + constexpr float kExpect[] = {74., 80., 86., 92., 173., 188., 203., 218.}; + constexpr int kOutputDims[] = {3, 1, 2, 4}; + constexpr int kOutputCount = std::extent::value; + float output_data[kOutputCount]; + + constexpr TfLiteBatchMatMulParams params = { + false, // adj_x + false, // adj_y + false // asymmetric_quantize_inputs + }; + + tflite::testing::TestBatchMatMulFloat(params, kInputDims, lhs_input, + rhs_input, kOutputDims, kExpect, + output_data); +} + +TF_LITE_MICRO_TEST(BatchMatMulOpTestFloat32Test_SimpleRHSAdjoint) { + constexpr int kLhsInputDims[] = {3, 1, 2, 3}; + constexpr int kRhsInputDims[] = {3, 1, 4, 3}; + const int* kInputDims[tflite::testing::kNumInputs] = {kLhsInputDims, + kRhsInputDims}; + + constexpr size_t kLhsInputSize = 6; + float lhs_input[kLhsInputSize]; + std::iota(std::begin(lhs_input), std::end(lhs_input), 1); + + constexpr float kRhsInput[] = {7, 11, 15, 8, 12, 16, 9, 13, 17, 10, 14, 18}; + + constexpr float kExpect[] = {74., 80., 86., 92., 173., 188., 203., 218.}; + constexpr int kOutputDims[] = {3, 1, 2, 4}; + constexpr int kOutputCount = std::extent::value; + float output_data[kOutputCount]; + + constexpr TfLiteBatchMatMulParams params = { + false, // adj_x + true, // adj_y + false // asymmetric_quantize_inputs + }; + + tflite::testing::TestBatchMatMulFloat(params, kInputDims, lhs_input, + kRhsInput, kOutputDims, kExpect, + output_data); +} + +TF_LITE_MICRO_TEST(BatchMatMulOpTestFloat32Test_SimpleLHSAdjoint) { + constexpr int kLhsInputDims[] = {3, 1, 3, 2}; + constexpr int kRhsInputDims[] = {3, 1, 3, 4}; + const int* kInputDims[tflite::testing::kNumInputs] = {kLhsInputDims, + kRhsInputDims}; + constexpr float kLhsInput[] = {1, 4, 2, 5, 3, 6}; + + constexpr size_t kRhsInputSize = 12; + float rhs_input[kRhsInputSize]; + std::iota(std::begin(rhs_input), std::end(rhs_input), 7); + + constexpr float kExpect[] = {74., 80., 86., 92., 173., 188., 203., 218.}; + constexpr int kOutputDims[] = {3, 1, 2, 4}; + constexpr int kOutputCount = std::extent::value; + float output_data[kOutputCount]; + + constexpr TfLiteBatchMatMulParams params = { + true, // adj_x + false, // adj_y + false // asymmetric_quantize_inputs + }; + + tflite::testing::TestBatchMatMulFloat(params, kInputDims, kLhsInput, + rhs_input, kOutputDims, kExpect, + output_data); +} + +TF_LITE_MICRO_TEST(BatchMatMulOpTestFloat32Test_BatchSizeTwo) { + constexpr int kLhsInputDims[] = {3, 2, 2, 3}; + constexpr int kRhsInputDims[] = {3, 2, 3, 4}; + const int* kInputDims[tflite::testing::kNumInputs] = {kLhsInputDims, + kRhsInputDims}; + constexpr size_t kLhsInputSize = 12; + float lhs_input[kLhsInputSize]; + std::iota(std::begin(lhs_input), std::end(lhs_input), 1); + + constexpr size_t kRhsInputSize = 24; + float rhs_input[kRhsInputSize]; + std::iota(std::begin(rhs_input), std::end(rhs_input), 7); + + constexpr float kExpect[] = {74., 80., 86., 92., 173., 188., 203., 218., + 560., 584., 608., 632., 767., 800., 833., 866.}; + constexpr int kOutputDims[] = {3, 2, 2, 4}; + constexpr int kOutputCount = std::extent::value; + float output_data[kOutputCount]; + + constexpr TfLiteBatchMatMulParams params = { + false, // adj_x + false, // adj_y + false // asymmetric_quantize_inputs + }; + + tflite::testing::TestBatchMatMulFloat(params, kInputDims, lhs_input, + rhs_input, kOutputDims, kExpect, + output_data); +} + +TF_LITE_MICRO_TEST(BatchMatMulOpTestFloat32Test_Broadcast) { + constexpr int kLhsInputDims[] = {3, 2, 2, 3}; + constexpr int kRhsInputDims[] = {2, 3, 4}; + const int* kInputDims[tflite::testing::kNumInputs] = {kLhsInputDims, + kRhsInputDims}; + constexpr size_t kLhsInputSize = 12; + float lhs_input[kLhsInputSize]; + std::iota(std::begin(lhs_input), std::end(lhs_input), 1); + + constexpr size_t kRhsInputSize = 12; + float rhs_input[kRhsInputSize]; + std::iota(std::begin(rhs_input), std::end(rhs_input), 7); + + constexpr float kExpect[] = {74., 80., 86., 92., 173., 188., 203., 218., + 272., 296., 320., 344., 371., 404., 437., 470.}; + constexpr int kOutputDims[] = {3, 2, 2, 4}; + constexpr int kOutputCount = std::extent::value; + float output_data[kOutputCount]; + + constexpr TfLiteBatchMatMulParams params = { + false, // adj_x + false, // adj_y + false // asymmetric_quantize_inputs + }; + + tflite::testing::TestBatchMatMulFloat(params, kInputDims, lhs_input, + rhs_input, kOutputDims, kExpect, + output_data); +} + +TF_LITE_MICRO_TEST(BatchMatMulOpTestFloat32Test_BroadcastLHSAdjoint) { + constexpr int kLhsInputDims[] = {3, 2, 3, 2}; + constexpr int kRhsInputDims[] = {2, 3, 4}; + const int* kInputDims[tflite::testing::kNumInputs] = {kLhsInputDims, + kRhsInputDims}; + + constexpr float kLhsInput[] = {1, 4, 2, 5, 3, 6, 7, 10, 8, 11, 9, 12}; + + constexpr size_t kRhsInputSize = 12; + float rhs_input[kRhsInputSize]; + std::iota(std::begin(rhs_input), std::end(rhs_input), 7); + + constexpr float kExpect[] = {74., 80., 86., 92., 173., 188., 203., 218., + 272., 296., 320., 344., 371., 404., 437., 470.}; + constexpr int kOutputDims[] = {3, 2, 2, 4}; + constexpr int kOutputCount = std::extent::value; + float output_data[kOutputCount]; + + constexpr TfLiteBatchMatMulParams params = { + true, // adj_x + false, // adj_y + false // asymmetric_quantize_inputs + }; + + tflite::testing::TestBatchMatMulFloat(params, kInputDims, kLhsInput, + rhs_input, kOutputDims, kExpect, + output_data); +} + +TF_LITE_MICRO_TEST(BatchMatMulOpTestFloat32Test_Broadcast2) { + constexpr int kLhsInputDims[] = {4, 2, 1, 3, 2}; + constexpr int kRhsInputDims[] = {3, 3, 2, 4}; + const int* kInputDims[tflite::testing::kNumInputs] = {kLhsInputDims, + kRhsInputDims}; + + constexpr size_t kLhsInputSize = 12; + float lhs_input[kLhsInputSize]; + std::iota(std::begin(lhs_input), std::end(lhs_input), 1); + + constexpr size_t kRhsInputSize = 24; + float rhs_input[kRhsInputSize]; + std::iota(std::begin(rhs_input), std::end(rhs_input), 7); + + constexpr float kExpect[] = { + 29., 32., 35., 38., 65., 72., 79., 86., 101., 112., 123., 134., + 53., 56., 59., 62., 121., 128., 135., 142., 189., 200., 211., 222., + 77., 80., 83., 86., 177., 184., 191., 198., 277., 288., 299., 310., + 137., 152., 167., 182., 173., 192., 211., 230., 209., 232., 255., 278., + 257., 272., 287., 302., 325., 344., 363., 382., 393., 416., 439., 462., + 377., 392., 407., 422., 477., 496., 515., 534., 577., 600., 623., 646.}; + constexpr int kOutputDims[] = {4, 2, 3, 3, 4}; + constexpr int kOutputCount = std::extent::value; + float output_data[kOutputCount]; + + constexpr TfLiteBatchMatMulParams params = { + false, // adj_x + false, // adj_y + false // asymmetric_quantize_inputs + }; + + tflite::testing::TestBatchMatMulFloat(params, kInputDims, lhs_input, + rhs_input, kOutputDims, kExpect, + output_data); +} + +TF_LITE_MICRO_TEST(BatchMatMulOpTestFloat32Test_Broadcast2LHSAdjoint) { + constexpr int kLhsInputDims[] = {4, 2, 1, 2, 3}; + constexpr int kRhsInputDims[] = {3, 3, 2, 4}; + const int* kInputDims[tflite::testing::kNumInputs] = {kLhsInputDims, + kRhsInputDims}; + + constexpr float kLhsInput[] = {1, 3, 5, 2, 4, 6, 7, 9, 11, 8, 10, 12}; + + constexpr size_t kRhsInputSize = 24; + float rhs_input[kRhsInputSize]; + std::iota(std::begin(rhs_input), std::end(rhs_input), 7); + + constexpr float kExpect[] = { + 29., 32., 35., 38., 65., 72., 79., 86., 101., 112., 123., 134., + 53., 56., 59., 62., 121., 128., 135., 142., 189., 200., 211., 222., + 77., 80., 83., 86., 177., 184., 191., 198., 277., 288., 299., 310., + 137., 152., 167., 182., 173., 192., 211., 230., 209., 232., 255., 278., + 257., 272., 287., 302., 325., 344., 363., 382., 393., 416., 439., 462., + 377., 392., 407., 422., 477., 496., 515., 534., 577., 600., 623., 646.}; + constexpr int kOutputDims[] = {4, 2, 3, 3, 4}; + constexpr int kOutputCount = std::extent::value; + float output_data[kOutputCount]; + + constexpr TfLiteBatchMatMulParams params = { + true, // adj_x + false, // adj_y + false // asymmetric_quantize_inputs + }; + + tflite::testing::TestBatchMatMulFloat(params, kInputDims, kLhsInput, + rhs_input, kOutputDims, kExpect, + output_data); +} + +TF_LITE_MICRO_TEST(BatchMatMulOpTestFloat32Test_Broadcast2RHSAdjoint) { + constexpr int kLhsInputDims[] = {4, 2, 1, 3, 2}; + constexpr int kRhsInputDims[] = {3, 3, 4, 2}; + const int* kInputDims[tflite::testing::kNumInputs] = {kLhsInputDims, + kRhsInputDims}; + + constexpr size_t kLhsInputSize = 12; + float lhs_input[kLhsInputSize]; + std::iota(std::begin(lhs_input), std::end(lhs_input), 1); + + constexpr float kRhsInput[] = {7, 11, 8, 12, 9, 13, 10, 14, + 15, 19, 16, 20, 17, 21, 18, 22, + 23, 27, 24, 28, 25, 29, 26, 30}; + + constexpr float kExpect[] = { + 29., 32., 35., 38., 65., 72., 79., 86., 101., 112., 123., 134., + 53., 56., 59., 62., 121., 128., 135., 142., 189., 200., 211., 222., + 77., 80., 83., 86., 177., 184., 191., 198., 277., 288., 299., 310., + 137., 152., 167., 182., 173., 192., 211., 230., 209., 232., 255., 278., + 257., 272., 287., 302., 325., 344., 363., 382., 393., 416., 439., 462., + 377., 392., 407., 422., 477., 496., 515., 534., 577., 600., 623., 646.}; + constexpr int kOutputDims[] = {4, 2, 3, 3, 4}; + constexpr int kOutputCount = std::extent::value; + float output_data[kOutputCount]; + + constexpr TfLiteBatchMatMulParams params = { + false, // adj_x + true, // adj_y + false // asymmetric_quantize_inputs + }; + + tflite::testing::TestBatchMatMulFloat(params, kInputDims, lhs_input, + kRhsInput, kOutputDims, kExpect, + output_data); +} + +TF_LITE_MICRO_TEST(BatchMatMulOpTestFloat32Test_Broadcast2BothAdjoint) { + constexpr int kLhsInputDims[] = {4, 2, 1, 2, 3}; + constexpr int kRhsInputDims[] = {3, 3, 4, 2}; + const int* kInputDims[tflite::testing::kNumInputs] = {kLhsInputDims, + kRhsInputDims}; + + constexpr float kLhsInput[] = {1, 3, 5, 2, 4, 6, 7, 9, 11, 8, 10, 12}; + + constexpr float kRhsInput[] = {7, 11, 8, 12, 9, 13, 10, 14, + 15, 19, 16, 20, 17, 21, 18, 22, + 23, 27, 24, 28, 25, 29, 26, 30}; + + constexpr float kExpect[] = { + 29., 32., 35., 38., 65., 72., 79., 86., 101., 112., 123., 134., + 53., 56., 59., 62., 121., 128., 135., 142., 189., 200., 211., 222., + 77., 80., 83., 86., 177., 184., 191., 198., 277., 288., 299., 310., + 137., 152., 167., 182., 173., 192., 211., 230., 209., 232., 255., 278., + 257., 272., 287., 302., 325., 344., 363., 382., 393., 416., 439., 462., + 377., 392., 407., 422., 477., 496., 515., 534., 577., 600., 623., 646.}; + constexpr int kOutputDims[] = {4, 2, 3, 3, 4}; + constexpr int kOutputCount = std::extent::value; + float output_data[kOutputCount]; + + constexpr TfLiteBatchMatMulParams params = { + true, // adj_x + true, // adj_y + false // asymmetric_quantize_inputs + }; + + tflite::testing::TestBatchMatMulFloat(params, kInputDims, kLhsInput, + kRhsInput, kOutputDims, kExpect, + output_data); +} + +TF_LITE_MICRO_TEST(BatchMatMulOpTestFloat32Test_BroadcastFromRHS) { + constexpr int kLhsInputDims[] = {2, 4, 5}; + constexpr int kRhsInputDims[] = {4, 3, 1, 5, 2}; + const int* kInputDims[tflite::testing::kNumInputs] = {kLhsInputDims, + kRhsInputDims}; + + constexpr size_t kLhsInputSize = 20; + float lhs_input[kLhsInputSize]; + std::iota(std::begin(lhs_input), std::end(lhs_input), 1); + + constexpr size_t kRhsInputSize = 30; + float rhs_input[kRhsInputSize]; + std::iota(std::begin(rhs_input), std::end(rhs_input), 7); + + constexpr float kExpect[] = {185., 200., 460., 500., 735., 800., + 1010., 1100., 335., 350., 860., 900., + 1385., 1450., 1910., 2000., 485., 500., + 1260., 1300., 2035., 2100., 2810., 2900.}; + constexpr int kOutputDims[] = {4, 3, 1, 4, 2}; + constexpr int kOutputCount = std::extent::value; + float output_data[kOutputCount]; + + constexpr TfLiteBatchMatMulParams params = { + false, // adj_x + false, // adj_y + false // asymmetric_quantize_inputs + }; + + tflite::testing::TestBatchMatMulFloat(params, kInputDims, lhs_input, + rhs_input, kOutputDims, kExpect, + output_data); +} + +TF_LITE_MICRO_TEST(ConstRHSBatchMatMulOpModelRHSNotAdjoint) { + constexpr int kLhsInputDims[] = {3, 1, 6, 2}; + constexpr int kRhsInputDims[] = {2, 2, 3}; + const int* kInputDims[tflite::testing::kNumInputs] = {kLhsInputDims, + kRhsInputDims}; + + constexpr float kLhsInput[] = {6, 3, 7, 4, 6, 9, 2, 6, 7, 4, 3, 7}; + + constexpr float kRhsInput[] = {6, 3, 7, 4, 6, 9}; + + constexpr float kExpect[] = {48, 36, 69, 58, 45, 85, 72, 72, 123, + 36, 42, 68, 58, 45, 85, 46, 51, 84}; + constexpr int kOutputDims[] = {3, 1, 6, 3}; + constexpr int kOutputCount = std::extent::value; + float output_data[kOutputCount]; + + constexpr TfLiteBatchMatMulParams params = { + false, // adj_x + false, // adj_y + false // asymmetric_quantize_inputs + }; + + tflite::testing::TestBatchMatMulFloat(params, kInputDims, kLhsInput, + kRhsInput, kOutputDims, kExpect, + output_data, true); + // Eval twice to make sure constant transposed RHS is persistent. + tflite::testing::TestBatchMatMulFloat(params, kInputDims, kLhsInput, + kRhsInput, kOutputDims, kExpect, + output_data, true, false); +} + +TF_LITE_MICRO_TEST(QuantizedBatchMatMulOpTestSimpleTestQuantizedInt8) { + constexpr int kLhsInputDims[] = {2, 2, 10}; + constexpr int kRhsInputDims[] = {2, 10, 3}; + const int* kInputDims[tflite::testing::kNumInputs] = {kLhsInputDims, + kRhsInputDims}; + + constexpr float kLhsInput[] = { + 1, 2, 3, 4, 5, 6, 7, 8, -9, -10, // b = 0 + 1, 2, 3, 4, 5, 6, 7, -8, 9, -10, // b = 1 + }; + constexpr int kLhsInputCount = std::extent::value; + + constexpr float kRhsInput[] = { + 1, 1, 1, 2, 2, 2, 3, 3, 3, 4, 4, 4, 5, 5, 5, + 6, 6, 6, 7, 7, 7, 8, 8, 8, 9, 9, 9, 10, 10, 10, + }; + constexpr int kRhsInputCount = std::extent::value; + + constexpr int8_t kExpect[] = {22, 22, 22, 56, 56, 56}; + constexpr int kOutputDims[] = {2, 2, 3}; + constexpr int kOutputCount = std::extent::value; + constexpr float output_data[kOutputCount] = {23, 23, 23, 57, 57, 57}; + + constexpr TfLiteBatchMatMulParams params = { + false, // adj_x + false, // adj_y + false // asymmetric_quantize_inputs + }; + + tflite::testing::TestQuantizationParams + quantization_params_lhs = {0.0f, // scale + 0, // zero_point + -63.5f, // data_min + 64.0f, // data_max + {}}; + tflite::testing::TestQuantizationParams + quantization_params_rhs = {0.0f, // scale + 0, // zero_point + -63.5f, // data_min + 64.0f, // data_max + {}}; + tflite::testing::TestQuantizationParams + quantization_params_output = {0.0f, // scale + 0, // zero_point + -127.0f, // data_min + 128.0f, // data_max + {}}; + + tflite::testing::TestBatchMatMulQuantized( + params, &quantization_params_lhs, &quantization_params_rhs, + &quantization_params_output, kInputDims, kLhsInput, kRhsInput, + kOutputDims, kExpect, output_data); +} + +TF_LITE_MICRO_TEST(QuantizedBatchMatMulOpTestSimpleTestQuantizedInt16) { + constexpr int kLhsInputDims[] = {2, 2, 10}; + constexpr int kRhsInputDims[] = {2, 10, 3}; + const int* kInputDims[tflite::testing::kNumInputs] = {kLhsInputDims, + kRhsInputDims}; + + constexpr float kLhsInput[] = { + 1, 2, 3, 4, 5, 6, 7, 8, -9, -10, // b = 0 + 1, 2, 3, 4, 5, 6, 7, -8, 9, -10, // b = 1 + }; + constexpr int kLhsInputCount = std::extent::value; + + constexpr float kRhsInput[] = { + 1, 1, 1, 2, 2, 2, 3, 3, 3, 4, 4, 4, 5, 5, 5, + 6, 6, 6, 7, 7, 7, 8, 8, 8, 9, 9, 9, 10, 10, 10, + }; + constexpr int kRhsInputCount = std::extent::value; + + constexpr int16_t kExpect[] = {23, 23, 23, 57, 57, 57}; + constexpr int kOutputDims[] = {2, 2, 3}; + constexpr int kOutputCount = std::extent::value; + constexpr float output_data[kOutputCount] = {23, 23, 23, 57, 57, 57}; + + constexpr TfLiteBatchMatMulParams params = { + false, // adj_x + false, // adj_y + false // asymmetric_quantize_inputs + }; + + tflite::testing::TestQuantizationParams + quantization_params_lhs = {}; + quantization_params_lhs.scale = 10.0f / std::numeric_limits::max(); + tflite::testing::TestQuantizationParams + quantization_params_rhs = {}; + quantization_params_rhs.scale = 10.0f / std::numeric_limits::max(); + + tflite::testing::TestQuantizationParams + quantization_params_output = {}; + quantization_params_output.scale = 1.0f; + + tflite::testing::TestBatchMatMulQuantized( + params, &quantization_params_lhs, &quantization_params_rhs, + &quantization_params_output, kInputDims, kLhsInput, kRhsInput, + kOutputDims, kExpect, output_data); +} + +TF_LITE_MICRO_TESTS_END diff --git a/tensorflow/lite/micro/kernels/batch_to_space_nd.cc b/tensorflow/lite/micro/kernels/batch_to_space_nd.cc index 090a040aaaf..31a1c28d589 100644 --- a/tensorflow/lite/micro/kernels/batch_to_space_nd.cc +++ b/tensorflow/lite/micro/kernels/batch_to_space_nd.cc @@ -38,7 +38,7 @@ constexpr int kOutputTensor = 0; const int kInputOutputMinDimensionNum = 3; const int kInputOutputMaxDimensionNum = 4; -TfLiteStatus Prepare(TfLiteContext* context, TfLiteNode* node) { +TfLiteStatus BatchToSpaceNDPrepare(TfLiteContext* context, TfLiteNode* node) { TF_LITE_ENSURE_EQ(context, NumInputs(node), 3); TF_LITE_ENSURE_EQ(context, NumOutputs(node), 1); @@ -62,7 +62,7 @@ TfLiteStatus Prepare(TfLiteContext* context, TfLiteNode* node) { return kTfLiteOk; } -TfLiteStatus Eval(TfLiteContext* context, TfLiteNode* node) { +TfLiteStatus BatchToSpaceNDEval(TfLiteContext* context, TfLiteNode* node) { const TfLiteEvalTensor* input = tflite::micro::GetEvalInput(context, node, kInputTensor); const TfLiteEvalTensor* block_shape = @@ -106,7 +106,8 @@ TfLiteStatus Eval(TfLiteContext* context, TfLiteNode* node) { } // namespace. TFLMRegistration Register_BATCH_TO_SPACE_ND() { - return tflite::micro::RegisterOp(nullptr, Prepare, Eval); + return tflite::micro::RegisterOp(nullptr, BatchToSpaceNDPrepare, + BatchToSpaceNDEval); } } // namespace tflite diff --git a/tensorflow/lite/micro/kernels/call_once.cc b/tensorflow/lite/micro/kernels/call_once.cc index 8ad1c201f22..65857ef4cfe 100644 --- a/tensorflow/lite/micro/kernels/call_once.cc +++ b/tensorflow/lite/micro/kernels/call_once.cc @@ -36,12 +36,12 @@ struct OpData { bool has_run; }; -void* Init(TfLiteContext* context, const char* buffer, size_t length) { +void* CallOnceInit(TfLiteContext* context, const char* buffer, size_t length) { TFLITE_DCHECK(context->AllocatePersistentBuffer != nullptr); return context->AllocatePersistentBuffer(context, sizeof(OpData)); } -TfLiteStatus Prepare(TfLiteContext* context, TfLiteNode* node) { +TfLiteStatus CallOncePrepare(TfLiteContext* context, TfLiteNode* node) { OpData* op_data = reinterpret_cast(node->user_data); const auto* params = reinterpret_cast(node->builtin_data); @@ -60,7 +60,7 @@ TfLiteStatus Prepare(TfLiteContext* context, TfLiteNode* node) { return kTfLiteOk; } -TfLiteStatus Eval(TfLiteContext* context, TfLiteNode* node) { +TfLiteStatus CallOnceEval(TfLiteContext* context, TfLiteNode* node) { OpData* op_data = reinterpret_cast(node->user_data); // Call once only runs one time then is a no-op for every subsequent call. @@ -82,7 +82,7 @@ TfLiteStatus Eval(TfLiteContext* context, TfLiteNode* node) { } // namespace. TFLMRegistration Register_CALL_ONCE() { - return tflite::micro::RegisterOp(Init, Prepare, Eval); + return tflite::micro::RegisterOp(CallOnceInit, CallOncePrepare, CallOnceEval); } } // namespace tflite diff --git a/tensorflow/lite/micro/kernels/cast.cc b/tensorflow/lite/micro/kernels/cast.cc index a493618ac52..0b450d68040 100644 --- a/tensorflow/lite/micro/kernels/cast.cc +++ b/tensorflow/lite/micro/kernels/cast.cc @@ -1,4 +1,4 @@ -/* Copyright 2021 The TensorFlow Authors. All Rights Reserved. +/* Copyright 2023 The TensorFlow Authors. All Rights Reserved. Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. @@ -25,7 +25,7 @@ namespace { constexpr int kInputTensor = 0; constexpr int kOutputTensor = 0; -TfLiteStatus Prepare(TfLiteContext* context, TfLiteNode* node) { +TfLiteStatus CastPrepare(TfLiteContext* context, TfLiteNode* node) { TF_LITE_ENSURE_EQ(context, NumInputs(node), 1); TF_LITE_ENSURE_EQ(context, NumOutputs(node), 1); @@ -63,6 +63,9 @@ TfLiteStatus copyToTensor(TfLiteContext* context, const FromT* in, case kTfLiteInt32: copyCast(in, out->data.i32, num_elements); break; + case kTfLiteUInt32: + copyCast(in, out->data.u32, num_elements); + break; case kTfLiteFloat32: copyCast(in, tflite::micro::GetTensorData(out), num_elements); break; @@ -74,7 +77,7 @@ TfLiteStatus copyToTensor(TfLiteContext* context, const FromT* in, return kTfLiteOk; } -TfLiteStatus Eval(TfLiteContext* context, TfLiteNode* node) { +TfLiteStatus CastEval(TfLiteContext* context, TfLiteNode* node) { const TfLiteEvalTensor* input = tflite::micro::GetEvalInput(context, node, kInputTensor); TfLiteEvalTensor* output = @@ -98,6 +101,9 @@ TfLiteStatus Eval(TfLiteContext* context, TfLiteNode* node) { case kTfLiteFloat32: return copyToTensor(context, tflite::micro::GetTensorData(input), output, num_elements); + case kTfLiteBool: + return copyToTensor(context, tflite::micro::GetTensorData(input), + output, num_elements); default: // Unsupported type. MicroPrintf("Input type %s (%d) not supported.", @@ -108,7 +114,7 @@ TfLiteStatus Eval(TfLiteContext* context, TfLiteNode* node) { } // namespace TFLMRegistration Register_CAST() { - return tflite::micro::RegisterOp(nullptr, Prepare, Eval); + return tflite::micro::RegisterOp(nullptr, CastPrepare, CastEval); } } // namespace tflite diff --git a/tensorflow/lite/micro/kernels/cast_test.cc b/tensorflow/lite/micro/kernels/cast_test.cc index f5ab660f555..8625572ce47 100644 --- a/tensorflow/lite/micro/kernels/cast_test.cc +++ b/tensorflow/lite/micro/kernels/cast_test.cc @@ -1,4 +1,4 @@ -/* Copyright 2021 The TensorFlow Authors. All Rights Reserved. +/* Copyright 2023 The TensorFlow Authors. All Rights Reserved. Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. @@ -113,4 +113,28 @@ TF_LITE_MICRO_TEST(CastInt32ToInt16) { tflite::testing::TestCast(input_dims, input_values, golden, output_data); } +TF_LITE_MICRO_TEST(CastUInt32ToInt32) { + int32_t output_data[6]; + int input_dims[] = {2, 2, 3}; + const uint32_t input_values[] = {100, 200, 300, 400, 500, 600}; + const int32_t golden[] = {100, 200, 300, 400, 500, 600}; + tflite::testing::TestCast(input_dims, input_values, golden, output_data); +} + +TF_LITE_MICRO_TEST(CastUInt32ToInt32) { + uint32_t output_data[6]; + int input_dims[] = {2, 2, 3}; + const int32_t input_values[] = {100, 200, 300, 400, 500, 600}; + const uint32_t golden[] = {100, 200, 300, 400, 500, 600}; + tflite::testing::TestCast(input_dims, input_values, golden, output_data); +} + +TF_LITE_MICRO_TEST(CastBoolToFloat) { + float output_data[6]; + int input_dims[] = {2, 2, 3}; + const bool input_values[] = {true, true, false, true, false, true}; + const float golden[] = {1.f, 1.0f, 0.f, 1.0f, 0.0f, 1.0f}; + tflite::testing::TestCast(input_dims, input_values, golden, output_data); +} + TF_LITE_MICRO_TESTS_END diff --git a/tensorflow/lite/micro/kernels/ceil.cc b/tensorflow/lite/micro/kernels/ceil.cc index 46b55e7c7e8..36139f9d464 100644 --- a/tensorflow/lite/micro/kernels/ceil.cc +++ b/tensorflow/lite/micro/kernels/ceil.cc @@ -27,7 +27,7 @@ namespace { constexpr int kInputTensor = 0; constexpr int kOutputTensor = 0; -TfLiteStatus Prepare(TfLiteContext* context, TfLiteNode* node) { +TfLiteStatus CeilPrepare(TfLiteContext* context, TfLiteNode* node) { MicroContext* micro_context = GetMicroContext(context); TfLiteTensor* input = @@ -50,7 +50,7 @@ TfLiteStatus Prepare(TfLiteContext* context, TfLiteNode* node) { return kTfLiteOk; } -TfLiteStatus Eval(TfLiteContext* context, TfLiteNode* node) { +TfLiteStatus CeilEval(TfLiteContext* context, TfLiteNode* node) { const TfLiteEvalTensor* input = tflite::micro::GetEvalInput(context, node, kInputTensor); TfLiteEvalTensor* output = @@ -67,7 +67,7 @@ TfLiteStatus Eval(TfLiteContext* context, TfLiteNode* node) { } // namespace TFLMRegistration Register_CEIL() { - return tflite::micro::RegisterOp(nullptr, Prepare, Eval); + return tflite::micro::RegisterOp(nullptr, CeilPrepare, CeilEval); } } // namespace tflite diff --git a/tensorflow/lite/micro/kernels/ceva/depthwise_conv.cc b/tensorflow/lite/micro/kernels/ceva/depthwise_conv.cc index 533014fbffb..eed5d9ee566 100644 --- a/tensorflow/lite/micro/kernels/ceva/depthwise_conv.cc +++ b/tensorflow/lite/micro/kernels/ceva/depthwise_conv.cc @@ -224,6 +224,30 @@ TfLiteStatus EvalCEVA(TfLiteContext* context, TfLiteNode* node) { EvalQuantizedPerChannel(context, node, params, data, input, filter, bias, output); break; + case kTfLiteInt16: { + switch (filter->type) { + case kTfLiteInt8: { + reference_integer_ops::DepthwiseConvPerChannel( + DepthwiseConvParamsQuantized(*params, data), + data.per_channel_output_multiplier, data.per_channel_output_shift, + tflite::micro::GetTensorShape(input), + tflite::micro::GetTensorData(input), + tflite::micro::GetTensorShape(filter), + tflite::micro::GetTensorData(filter), + tflite::micro::GetTensorShape(bias), + tflite::micro::GetOptionalTensorData(bias), + tflite::micro::GetTensorShape(output), + tflite::micro::GetTensorData(output)); + break; + } + default: + MicroPrintf("Filter type %s (%d) for input type %s not supported.", + TfLiteTypeGetName(filter->type), filter->type, + TfLiteTypeGetName(input->type)); + return kTfLiteError; + } + break; + } default: MicroPrintf("Type %s (%d) not supported.", TfLiteTypeGetName(input->type), input->type); diff --git a/tensorflow/lite/micro/kernels/circular_buffer.cc b/tensorflow/lite/micro/kernels/circular_buffer.cc index 0bed5cb437d..3e901047bbc 100644 --- a/tensorflow/lite/micro/kernels/circular_buffer.cc +++ b/tensorflow/lite/micro/kernels/circular_buffer.cc @@ -99,8 +99,7 @@ TfLiteStatus CircularBufferEval(TfLiteContext* context, TfLiteNode* node) { if (--data->cycles_until_run != 0) { // Signal the interpreter to end current run if the delay before op invoke // has not been reached. - // TODO(b/149795762): Add kTfLiteAbort to TfLiteStatus enum. - return static_cast(kTfLiteAbort); + return kTfLiteAbort; } data->cycles_until_run = data->cycles_max; diff --git a/tensorflow/lite/micro/kernels/circular_buffer.h b/tensorflow/lite/micro/kernels/circular_buffer.h index 51adf746db3..c0a55b8418f 100644 --- a/tensorflow/lite/micro/kernels/circular_buffer.h +++ b/tensorflow/lite/micro/kernels/circular_buffer.h @@ -30,9 +30,6 @@ extern const int kCircularBufferOutputTensor; // Elements in the vectors are ordered alphabetically by parameter name. extern const int kCircularBufferCyclesMaxIndex; // 'cycles_max' -// TODO(b/149795762): Add this to TfLiteStatus enum. -extern const TfLiteStatus kTfLiteAbort; - // These fields control the stride period of a strided streaming model. This op // returns kTfLiteAbort until cycles_until_run-- is zero. At this time, // cycles_until_run is reset to cycles_max. diff --git a/tensorflow/lite/micro/kernels/circular_buffer_common.cc b/tensorflow/lite/micro/kernels/circular_buffer_common.cc index 81db6e65f3d..bf45c06f61c 100644 --- a/tensorflow/lite/micro/kernels/circular_buffer_common.cc +++ b/tensorflow/lite/micro/kernels/circular_buffer_common.cc @@ -35,9 +35,6 @@ const int kCircularBufferOutputTensor = 0; // Elements in the vectors are ordered alphabetically by parameter name. const int kCircularBufferCyclesMaxIndex = 0; // 'cycles_max' -// TODO(b/149795762): Add this to TfLiteStatus enum. -const TfLiteStatus kTfLiteAbort = static_cast(-9); - TfLiteStatus CircularBufferPrepare(TfLiteContext* context, TfLiteNode* node) { MicroContext* micro_context = GetMicroContext(context); diff --git a/tensorflow/lite/micro/kernels/circular_buffer_test.cc b/tensorflow/lite/micro/kernels/circular_buffer_test.cc index faf27940834..8fde7c51ec1 100644 --- a/tensorflow/lite/micro/kernels/circular_buffer_test.cc +++ b/tensorflow/lite/micro/kernels/circular_buffer_test.cc @@ -27,9 +27,6 @@ namespace { constexpr int kRunPeriod = 2; -// TODO(b/149795762): Add this to TfLiteStatus enum. -const TfLiteStatus kTfLiteAbort = static_cast(-9); - } // namespace } // namespace testing } // namespace tflite @@ -102,7 +99,7 @@ TF_LITE_MICRO_TEST(OutputTensorLength4) { if (i % tflite::testing::kRunPeriod == tflite::testing::kRunPeriod - 1) { TF_LITE_MICRO_EXPECT_EQ(kTfLiteOk, status); } else { - TF_LITE_MICRO_EXPECT_EQ(tflite::testing::kTfLiteAbort, status); + TF_LITE_MICRO_EXPECT_EQ(tflite::kTfLiteAbort, status); } } } diff --git a/tensorflow/lite/micro/kernels/cmsis_nn/README.md b/tensorflow/lite/micro/kernels/cmsis_nn/README.md index e4a4de31606..dc531b77e9b 100644 --- a/tensorflow/lite/micro/kernels/cmsis_nn/README.md +++ b/tensorflow/lite/micro/kernels/cmsis_nn/README.md @@ -1,12 +1,14 @@ -# Info +# General Info CMSIS-NN is a library containing kernel optimizations for Arm(R) Cortex(R)-M processors. To use CMSIS-NN optimized kernels instead of reference kernels, add `OPTIMIZED_KERNEL_DIR=cmsis_nn` to the make command line. See examples below. For more information about the optimizations, check out -[CMSIS-NN documentation](https://github.com/ARM-software/CMSIS_5/blob/develop/CMSIS/NN/README.md). +[CMSIS-NN documentation](https://github.com/ARM-software/CMSIS-NN/blob/main/README.md), + +# Specifying path to CMSIS-NN By default CMSIS-NN is built by code that is downloaded to the TFLM tree. It also possible to build CMSIS-NN code from an external path by specifying @@ -14,7 +16,7 @@ CMSIS_PATH=<../path> and CMSIS_NN_PATH=<../path>. Note that both CMSIS_PATH and since CMSIS-NN has a dependency to CMSIS-Core. As a third option CMSIS-NN can be provided manually as an external library. The examples below will illustrate this. -# Example - FVP based on Arm Corstone-300 software. +## Example - FVP based on Arm Corstone-300 software. In this example, the kernel conv unit test is built. For more information about this specific target, check out the [Corstone-300 readme](https://github.com/tensorflow/tflite-micro/tree/main/tensorflow/lite/micro/cortex_m_corstone_300/README.md). @@ -39,3 +41,22 @@ external CMSIS-NN library as different compiler options may have been used. Also note that if specifying CMSIS_NN_LIBS but not CMSIS_PATH and or CMSIS_NN_PATH, headers and system/startup code from the default downloaded path of CMSIS would be used. So CMSIS_NN_LIBS, CMSIS_NN_PATH and CMSIS_PATH should have the same base path and if not there will be a build error. + +# Build for speed or size +It is possible to build for speed or size. The size option may be required for a large model on an embedded system with limited memory. Where applicable, building for size would result in higher latency paired with a smaller scratch buffer, whereas building for speed would result in lower latency with a larger scratch buffer. Currently only transpose conv supports this. See examples below. + +## Example - building a static library with CMSIS-NN optimized kernels +More info on the target used in this example: https://github.com/tensorflow/tflite-micro/blob/main/tensorflow/lite/micro/cortex_m_generic/README.md + +Bulding for speed (default): +Note that speed is default so if leaving out OPTIMIZE_KERNELS_FOR completely that will be the default. +``` +make -f tensorflow/lite/micro/tools/make/Makefile TARGET=cortex_m_generic TARGET_ARCH=cortex-m55 OPTIMIZED_KERNEL_DIR=cmsis_nn OPTIMIZE_KERNELS_FOR=KERNELS_OPTIMIZED_FOR_SPEED microlite + +``` + +Bulding for size: +``` +make -f tensorflow/lite/micro/tools/make/Makefile TARGET=cortex_m_generic TARGET_ARCH=cortex-m55 OPTIMIZED_KERNEL_DIR=cmsis_nn OPTIMIZE_KERNELS_FOR=KERNELS_OPTIMIZED_FOR_SIZE microlite + +``` diff --git a/tensorflow/lite/micro/kernels/cmsis_nn/conv.cc b/tensorflow/lite/micro/kernels/cmsis_nn/conv.cc index 8b6928bbf38..4c35970d1ea 100644 --- a/tensorflow/lite/micro/kernels/cmsis_nn/conv.cc +++ b/tensorflow/lite/micro/kernels/cmsis_nn/conv.cc @@ -1,4 +1,4 @@ -/* Copyright 2023 The TensorFlow Authors. All Rights Reserved. +/* Copyright 2024 The TensorFlow Authors. All Rights Reserved. Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. @@ -21,7 +21,6 @@ limitations under the License. #include "tensorflow/lite/kernels/internal/common.h" #include "tensorflow/lite/kernels/internal/quantization_util.h" #include "tensorflow/lite/kernels/internal/reference/conv.h" -#include "tensorflow/lite/kernels/internal/reference/integer_ops/conv.h" #include "tensorflow/lite/kernels/internal/tensor_ctypes.h" #include "tensorflow/lite/kernels/kernel_util.h" #include "tensorflow/lite/kernels/padding.h" @@ -63,39 +62,56 @@ TfLiteStatus Prepare(TfLiteContext* context, TfLiteNode* node) { TfLiteTensor* output = micro_context->AllocateTempOutputTensor(node, kConvOutputTensor); TF_LITE_ENSURE(context, output != nullptr); + TfLiteTensor* bias = + micro_context->AllocateTempOutputTensor(node, kConvBiasTensor); + TfLiteType bias_type = bias != nullptr ? bias->type : kTfLiteNoType; - RuntimeShape input_shape = GetTensorShape(input); - RuntimeShape output_shape = GetTensorShape(output); + TF_LITE_ENSURE_EQ(context, input->type, output->type); + TF_LITE_ENSURE_MSG( + context, + input->type == filter->type || + (input->type == kTfLiteInt16 && filter->type == kTfLiteInt8) || + (input->type == kTfLiteInt8 && filter->type == kTfLiteInt4), + "Hybrid models are not supported on TFLite Micro."); - // Initialize cmsis_nn input dimensions + // Consistency check tensor dims + // Dimensionality + TF_LITE_ENSURE_EQ(context, input->dims->size, 4); + TF_LITE_ENSURE_EQ(context, filter->dims->size, 4); + TF_LITE_ENSURE_EQ(context, output->dims->size, 4); + // Equal batch size in input and output + TF_LITE_ENSURE_EQ(context, input->dims->data[0], output->dims->data[0]); + // Input channels should be an even multiple of filter channels + TF_LITE_ENSURE(context, filter->dims->data[3] > 0); + TF_LITE_ENSURE_EQ(context, input->dims->data[3] % filter->dims->data[3], 0); + // Output channels should be an even multiple of the number of groups + const int groups = input->dims->data[3] / filter->dims->data[3]; + TFLITE_DCHECK_EQ(output->dims->data[3] % groups, 0); + // Bias size equal to output channels + if (bias != nullptr) { + TF_LITE_ENSURE_EQ(context, bias->dims->size, 4); + const int bias_size = NumElements(bias->dims); + TFLITE_DCHECK_EQ(bias_size, output->dims->data[3]); + } + + // Initialize cmsis_nn dimensions cmsis_nn_dims input_dims; - input_dims.n = MatchingDim(input_shape, 0, output_shape, 0); + input_dims.n = input->dims->data[0]; input_dims.h = input->dims->data[1]; input_dims.w = input->dims->data[2]; - input_dims.c = input_shape.Dims(3); + input_dims.c = input->dims->data[3]; - // Initialize cmsis_nn filter dimensions cmsis_nn_dims filter_dims; - filter_dims.n = output_shape.Dims(3); + filter_dims.n = 1; filter_dims.h = filter->dims->data[1]; filter_dims.w = filter->dims->data[2]; - filter_dims.c = input_dims.c; + filter_dims.c = filter->dims->data[3]; - // Initialize cmsis_nn output dimensions cmsis_nn_dims output_dims; - output_dims.n = input_dims.n; + output_dims.n = output->dims->data[0]; output_dims.h = output->dims->data[1]; output_dims.w = output->dims->data[2]; - output_dims.c = output_shape.Dims(3); - - if (filter->type == kTfLiteInt4) { - int filter_size = - RuntimeShape(filter->dims->size, - reinterpret_cast(filter->dims->data)) - .FlatSize(); - context->RequestScratchBufferInArena( - context, filter_size, &data->reference_op_data.filter_buffer_index); - } + output_dims.c = output->dims->data[3]; if (input->type == kTfLiteInt8 || input->type == kTfLiteInt16) { const int num_channels = filter->dims->data[kConvQuantizedDimension]; @@ -112,7 +128,10 @@ TfLiteStatus Prepare(TfLiteContext* context, TfLiteNode* node) { filter_dims.h, output_dims.w, output_dims.h, input->type, &data->reference_op_data)); - if (input->type == kTfLiteInt8 || input->type == kTfLiteInt16) { + // CMSIS_NN allows INT64 or nullptr bias data pointer + if (input->type == kTfLiteInt8 || + (input->type == kTfLiteInt16 && + (bias_type == kTfLiteInt64 || bias_type == kTfLiteNoType))) { // Initialize cmsis_nn convolution parameters cmsis_nn_conv_params conv_params; conv_params.input_offset = -input->params.zero_point; @@ -147,10 +166,76 @@ TfLiteStatus Prepare(TfLiteContext* context, TfLiteNode* node) { micro_context->DeallocateTempTfLiteTensor(output); micro_context->DeallocateTempTfLiteTensor(input); micro_context->DeallocateTempTfLiteTensor(filter); + if (bias != nullptr) { + micro_context->DeallocateTempTfLiteTensor(bias); + } return kTfLiteOk; } +template +arm_cmsis_nn_status convolve_wrapper( + const cmsis_nn_context* ctx, const cmsis_nn_conv_params* conv_params, + const cmsis_nn_per_channel_quant_params* quant_params, + const cmsis_nn_dims* input_dims, const ActType* input, + const cmsis_nn_dims* filter_dims, const int8_t* filter, + const cmsis_nn_dims* bias_dims, const BiasType* bias, + const cmsis_nn_dims* output_dims, ActType* output, WeigthsType weightsT) { + return ARM_CMSIS_NN_ARG_ERROR; +} + +template <> +arm_cmsis_nn_status convolve_wrapper( + const cmsis_nn_context* ctx, const cmsis_nn_conv_params* conv_params, + const cmsis_nn_per_channel_quant_params* quant_params, + const cmsis_nn_dims* input_dims, const int8_t* input, + const cmsis_nn_dims* filter_dims, const int8_t* filter, + const cmsis_nn_dims* bias_dims, const int32_t* bias, + const cmsis_nn_dims* output_dims, int8_t* output, TfLiteType weightsT) { + if (weightsT == kTfLiteInt8) { + return arm_convolve_wrapper_s8(ctx, conv_params, quant_params, input_dims, + input, filter_dims, filter, bias_dims, bias, + output_dims, output); + } else if (weightsT == kTfLiteInt4) { + return arm_convolve_wrapper_s4(ctx, conv_params, quant_params, input_dims, + input, filter_dims, filter, bias_dims, bias, + output_dims, output); + } else { + return ARM_CMSIS_NN_ARG_ERROR; + } +} + +template <> +arm_cmsis_nn_status convolve_wrapper( + const cmsis_nn_context* ctx, const cmsis_nn_conv_params* conv_params, + const cmsis_nn_per_channel_quant_params* quant_params, + const cmsis_nn_dims* input_dims, const int16_t* input, + const cmsis_nn_dims* filter_dims, const int8_t* filter, + const cmsis_nn_dims* bias_dims, const int64_t* bias, + const cmsis_nn_dims* output_dims, int16_t* output, TfLiteType weightsT) { + const cmsis_nn_bias_data bias_data = {bias, false}; + + return arm_convolve_wrapper_s16(ctx, conv_params, quant_params, input_dims, + input, filter_dims, filter, bias_dims, + &bias_data, output_dims, output); +} + +template <> +arm_cmsis_nn_status convolve_wrapper( + const cmsis_nn_context* ctx, const cmsis_nn_conv_params* conv_params, + const cmsis_nn_per_channel_quant_params* quant_params, + const cmsis_nn_dims* input_dims, const int16_t* input, + const cmsis_nn_dims* filter_dims, const int8_t* filter, + const cmsis_nn_dims* bias_dims, const int32_t* bias, + const cmsis_nn_dims* output_dims, int16_t* output, TfLiteType weightsT) { + const cmsis_nn_bias_data bias_data = {bias, true}; + + return arm_convolve_wrapper_s16(ctx, conv_params, quant_params, input_dims, + input, filter_dims, filter, bias_dims, + &bias_data, output_dims, output); +} + +template TfLiteStatus EvalQuantizedPerChannel(TfLiteContext* context, TfLiteNode* node, const TfLiteConvParams& params, const OpData& data, @@ -179,51 +264,31 @@ TfLiteStatus EvalQuantizedPerChannel(TfLiteContext* context, TfLiteNode* node, quant_params.shift = const_cast(data.reference_op_data.per_channel_output_shift); - RuntimeShape filter_shape = tflite::micro::GetTensorShape(filter); - RuntimeShape input_shape = tflite::micro::GetTensorShape(input); - RuntimeShape output_shape = tflite::micro::GetTensorShape(output); - RuntimeShape bias_shape = tflite::micro::GetTensorShape(bias); - - // Consistency check. - TFLITE_DCHECK_LE(conv_params.activation.min, conv_params.activation.max); - TFLITE_DCHECK_EQ(input_shape.DimensionsCount(), 4); - TFLITE_DCHECK_EQ(filter_shape.DimensionsCount(), 4); - TFLITE_DCHECK_EQ(output_shape.DimensionsCount(), 4); - const int batch_size = MatchingDim(input_shape, 0, output_shape, 0); - const int input_depth = MatchingDim(input_shape, 3, filter_shape, 3); - const int output_depth = MatchingDim(filter_shape, 0, output_shape, 3); - if (tflite::micro::GetOptionalTensorData(bias)) { - TFLITE_DCHECK_EQ(bias_shape.FlatSize(), output_depth); - } - - // Initialize cmsis_nn dimensions - // Input + // Initialize cmsis_nn dimension structs, consistency is checked in the + // prepare stage cmsis_nn_dims input_dims; - input_dims.n = batch_size; - input_dims.h = input_shape.Dims(1); - input_dims.w = input_shape.Dims(2); - input_dims.c = input_depth; + input_dims.n = input->dims->data[0]; + input_dims.h = input->dims->data[1]; + input_dims.w = input->dims->data[2]; + input_dims.c = input->dims->data[3]; - // Filter cmsis_nn_dims filter_dims; - filter_dims.n = output_depth; - filter_dims.h = filter_shape.Dims(1); - filter_dims.w = filter_shape.Dims(2); - filter_dims.c = input_depth; + filter_dims.n = 1; + filter_dims.h = filter->dims->data[1]; + filter_dims.w = filter->dims->data[2]; + filter_dims.c = filter->dims->data[3]; - // Bias cmsis_nn_dims bias_dims; bias_dims.n = 1; bias_dims.h = 1; bias_dims.w = 1; - bias_dims.c = output_depth; + bias_dims.c = output->dims->data[3]; - // Output cmsis_nn_dims output_dims; - output_dims.n = batch_size; - output_dims.h = output_shape.Dims(1); - output_dims.w = output_shape.Dims(2); - output_dims.c = output_depth; + output_dims.n = output->dims->data[0]; + output_dims.h = output->dims->data[1]; + output_dims.w = output->dims->data[2]; + output_dims.c = output->dims->data[3]; // Initialize cmsis_nn context cmsis_nn_context ctx; @@ -233,118 +298,44 @@ TfLiteStatus EvalQuantizedPerChannel(TfLiteContext* context, TfLiteNode* node, if (data.buffer_idx > -1) { ctx.buf = context->GetScratchBuffer(context, data.buffer_idx); // Note: ctx.size is currently not used in cmsis_nn. - // The buffer should be allocated in the Prepare function through - // arm_convolve_wrapper_s8_get_buffer_size + // The buffer should be allocated in the prepare function through + // the corresponding arm_convolve_wrapper_[type]_get_buffer_size } - // arm_convolve_wrapper_s8 dispatches the optimized kernel accordingly with - // the parameters passed + // arm_convolve_wrapper_[type] dispatches the optimized kernel accordingly + // with the parameters passed TFLITE_DCHECK_EQ( - arm_convolve_wrapper_s8( + convolve_wrapper( &ctx, &conv_params, &quant_params, &input_dims, - tflite::micro::GetTensorData(input), &filter_dims, + tflite::micro::GetTensorData(input), &filter_dims, tflite::micro::GetTensorData(filter), &bias_dims, - tflite::micro::GetOptionalTensorData(bias), &output_dims, - tflite::micro::GetTensorData(output)), + tflite::micro::GetOptionalTensorData(bias), &output_dims, + tflite::micro::GetTensorData(output), type), ARM_CMSIS_NN_SUCCESS); return kTfLiteOk; } -TfLiteStatus EvalQuantizedPerChannel16x8( - TfLiteContext* context, TfLiteNode* node, const TfLiteConvParams& params, - const OpData& data, const TfLiteEvalTensor* input, - const TfLiteEvalTensor* filter, const TfLiteEvalTensor* bias, - TfLiteEvalTensor* output) { - cmsis_nn_conv_params conv_params; - conv_params.dilation.h = params.dilation_height_factor; - conv_params.dilation.w = params.dilation_width_factor; - - // Initialize cmsis_nn convolution parameters - conv_params.input_offset = -data.reference_op_data.input_zero_point; - conv_params.output_offset = data.reference_op_data.output_zero_point; - conv_params.stride.h = params.stride_height; - conv_params.stride.w = params.stride_width; - conv_params.padding.h = data.reference_op_data.padding.height; - conv_params.padding.w = data.reference_op_data.padding.width; - conv_params.activation.min = data.reference_op_data.output_activation_min; - conv_params.activation.max = data.reference_op_data.output_activation_max; - - // Initialize cmsis_nn per channel quantization parameters - cmsis_nn_per_channel_quant_params quant_params; - quant_params.multiplier = const_cast( - data.reference_op_data.per_channel_output_multiplier); - quant_params.shift = - const_cast(data.reference_op_data.per_channel_output_shift); - - RuntimeShape filter_shape = tflite::micro::GetTensorShape(filter); - RuntimeShape input_shape = tflite::micro::GetTensorShape(input); - RuntimeShape output_shape = tflite::micro::GetTensorShape(output); - RuntimeShape bias_shape = tflite::micro::GetTensorShape(bias); - - // Consistency check. - TFLITE_DCHECK_LE(conv_params.activation.min, conv_params.activation.max); - TFLITE_DCHECK_EQ(input_shape.DimensionsCount(), 4); - TFLITE_DCHECK_EQ(filter_shape.DimensionsCount(), 4); - TFLITE_DCHECK_EQ(output_shape.DimensionsCount(), 4); - const int batch_size = MatchingDim(input_shape, 0, output_shape, 0); - const int input_depth = MatchingDim(input_shape, 3, filter_shape, 3); - const int output_depth = MatchingDim(filter_shape, 0, output_shape, 3); - if (tflite::micro::GetOptionalTensorData(bias)) { - TFLITE_DCHECK_EQ(bias_shape.FlatSize(), output_depth); - } - - // Initialize cmsis_nn dimensions - // Input - cmsis_nn_dims input_dims; - input_dims.n = batch_size; - input_dims.h = input_shape.Dims(1); - input_dims.w = input_shape.Dims(2); - input_dims.c = input_depth; - - // Filter - cmsis_nn_dims filter_dims; - filter_dims.n = output_depth; - filter_dims.h = filter_shape.Dims(1); - filter_dims.w = filter_shape.Dims(2); - filter_dims.c = input_depth; - - // Bias - cmsis_nn_dims bias_dims; - bias_dims.n = 1; - bias_dims.h = 1; - bias_dims.w = 1; - bias_dims.c = output_depth; - - // Output - cmsis_nn_dims output_dims; - output_dims.n = batch_size; - output_dims.h = output_shape.Dims(1); - output_dims.w = output_shape.Dims(2); - output_dims.c = output_depth; - - // Initialize cmsis_nn context - cmsis_nn_context ctx; - ctx.buf = nullptr; - ctx.size = 0; - - if (data.buffer_idx > -1) { - ctx.buf = context->GetScratchBuffer(context, data.buffer_idx); - // Note: ctx.size is currently not used in cmsis_nn. - // The buffer should be allocated in the Prepare function through - // arm_convolve_wrapper_s8_get_buffer_size - } +TfLiteStatus EvalInt4(TfLiteContext* context, TfLiteNode* node) { + const TfLiteEvalTensor* input = + tflite::micro::GetEvalInput(context, node, kConvInputTensor); + const TfLiteEvalTensor* filter = + tflite::micro::GetEvalInput(context, node, kConvWeightsTensor); + const TfLiteEvalTensor* bias = + (NumInputs(node) == 3) + ? tflite::micro::GetEvalInput(context, node, kConvBiasTensor) + : nullptr; + TfLiteEvalTensor* output = + tflite::micro::GetEvalOutput(context, node, kConvOutputTensor); - TFLITE_DCHECK_EQ( - arm_convolve_wrapper_s16( - &ctx, &conv_params, &quant_params, &input_dims, - tflite::micro::GetTensorData(input), &filter_dims, - tflite::micro::GetTensorData(filter), &bias_dims, - tflite::micro::GetOptionalTensorData(bias), &output_dims, - tflite::micro::GetTensorData(output)), - ARM_CMSIS_NN_SUCCESS); + TFLITE_DCHECK(node->builtin_data != nullptr); + const auto& params = + *(reinterpret_cast(node->builtin_data)); + TFLITE_DCHECK(node->user_data != nullptr); + const OpData& data = *(static_cast(node->user_data)); - return kTfLiteOk; + return EvalQuantizedPerChannel( + context, node, params, data, input, filter, bias, output); } TfLiteStatus EvalInt8(TfLiteContext* context, TfLiteNode* node) { @@ -364,11 +355,9 @@ TfLiteStatus EvalInt8(TfLiteContext* context, TfLiteNode* node) { *(reinterpret_cast(node->builtin_data)); TFLITE_DCHECK(node->user_data != nullptr); const OpData& data = *(static_cast(node->user_data)); - TfLiteEvalTensor filter_int8 = tflite::micro::MakeUnpackedInt4Tensor( - context, data.reference_op_data.filter_buffer_index, filter); - return EvalQuantizedPerChannel(context, node, params, data, input, - &filter_int8, bias, output); + return EvalQuantizedPerChannel( + context, node, params, data, input, filter, bias, output); } TfLiteStatus EvalInt16x8(TfLiteContext* context, TfLiteNode* node) { @@ -389,8 +378,17 @@ TfLiteStatus EvalInt16x8(TfLiteContext* context, TfLiteNode* node) { TFLITE_DCHECK(node->user_data != nullptr); const OpData& data = *(static_cast(node->user_data)); - return EvalQuantizedPerChannel16x8(context, node, params, data, input, filter, - bias, output); + if (bias == nullptr || bias->type == kTfLiteInt32) { + return EvalQuantizedPerChannel( + context, node, params, data, input, filter, bias, output); + } else if (bias->type == kTfLiteInt64) { + return EvalQuantizedPerChannel( + context, node, params, data, input, filter, bias, output); + } else { + MicroPrintf("Bias type %s (%d) not supported.", + TfLiteTypeGetName(bias->type), bias->type); + return kTfLiteError; + } } TfLiteStatus Eval(TfLiteContext* context, TfLiteNode* node) { @@ -419,9 +417,6 @@ TfLiteStatus Eval(TfLiteContext* context, TfLiteNode* node) { (input->type == kTfLiteInt8 && filter->type == kTfLiteInt4), "Hybrid models are not supported on TFLite Micro."); - TfLiteEvalTensor filter_int8 = tflite::micro::MakeUnpackedInt4Tensor( - context, data.reference_op_data.filter_buffer_index, filter); - switch (input->type) { // Already know in/out types are same. case kTfLiteFloat32: { tflite::reference_ops::Conv( @@ -437,30 +432,44 @@ TfLiteStatus Eval(TfLiteContext* context, TfLiteNode* node) { tflite::micro::GetTensorShape(nullptr), nullptr); break; } - case kTfLiteInt8: - switch (filter_int8.type) { + case kTfLiteInt8: { + switch (filter->type) { + case kTfLiteInt4: { + return EvalQuantizedPerChannel( + context, node, params, data, input, filter, bias, output); + } case kTfLiteInt8: { - return EvalQuantizedPerChannel(context, node, params, data, input, - &filter_int8, bias, output); + return EvalQuantizedPerChannel( + context, node, params, data, input, filter, bias, output); } - default: { MicroPrintf("Filter type %s (%d) not supported.", TfLiteTypeGetName(filter->type), filter->type); return kTfLiteError; } } - break; - case kTfLiteInt16: - return EvalQuantizedPerChannel16x8(context, node, params, data, input, - filter, bias, output); + } + case kTfLiteInt16: { + if (bias == nullptr || bias->type == kTfLiteInt32) { + return EvalQuantizedPerChannel( + context, node, params, data, input, filter, bias, output); + } else if (bias->type == kTfLiteInt64) { + return EvalQuantizedPerChannel( + context, node, params, data, input, filter, bias, output); + } else { + MicroPrintf("Bias type %s (%d) not supported.", + TfLiteTypeGetName(bias->type), bias->type); + return kTfLiteError; + } break; + } default: MicroPrintf("Type %s (%d) not supported.", TfLiteTypeGetName(input->type), input->type); return kTfLiteError; } + return kTfLiteOk; } @@ -470,6 +479,10 @@ TFLMRegistration Register_CONV_2D() { return tflite::micro::RegisterOp(Init, Prepare, Eval); } +TFLMRegistration Register_CONV_2D_INT4() { + return tflite::micro::RegisterOp(Init, Prepare, EvalInt4); +} + TFLMRegistration Register_CONV_2D_INT8() { return tflite::micro::RegisterOp(Init, Prepare, EvalInt8); } diff --git a/tensorflow/lite/micro/kernels/cmsis_nn/depthwise_conv.cc b/tensorflow/lite/micro/kernels/cmsis_nn/depthwise_conv.cc index 7b733b76afd..f30a9520831 100644 --- a/tensorflow/lite/micro/kernels/cmsis_nn/depthwise_conv.cc +++ b/tensorflow/lite/micro/kernels/cmsis_nn/depthwise_conv.cc @@ -1,4 +1,4 @@ -/* Copyright 2022 The TensorFlow Authors. All Rights Reserved. +/* Copyright 2023 The TensorFlow Authors. All Rights Reserved. Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. @@ -118,15 +118,6 @@ TfLiteStatus Prepare(TfLiteContext* context, TfLiteNode* node) { context, num_channels * sizeof(int32_t))); } - if (filter->type == kTfLiteInt4) { - int filter_size = - RuntimeShape(filter->dims->size, - reinterpret_cast(filter->dims->data)) - .FlatSize(); - context->RequestScratchBufferInArena( - context, filter_size, &data->reference_op_data.filter_buffer_index); - } - TF_LITE_ENSURE_STATUS(CalculateOpDataDepthwiseConv( context, node, params, input_width, input_height, filter_width, filter_height, output_width, output_height, data_type, @@ -168,8 +159,18 @@ TfLiteStatus Prepare(TfLiteContext* context, TfLiteNode* node) { dw_conv_params.dilation.h = params.dilation_height_factor; dw_conv_params.dilation.w = params.dilation_width_factor; - const int32_t buf_size = arm_depthwise_conv_wrapper_s8_get_buffer_size( - &dw_conv_params, &input_dims, &filter_dims, &output_dims); + int32_t buf_size = 0; + if (filter->type == kTfLiteInt8) { + buf_size = arm_depthwise_conv_wrapper_s8_get_buffer_size( + &dw_conv_params, &input_dims, &filter_dims, &output_dims); + } else if (filter->type == kTfLiteInt4) { + buf_size = arm_depthwise_conv_wrapper_s4_get_buffer_size( + &dw_conv_params, &input_dims, &filter_dims, &output_dims); + } else { + MicroPrintf("Filter type %s (%d) not supported.", + TfLiteTypeGetName(filter->type), filter->type); + return kTfLiteError; + } if (buf_size > 0) { TF_LITE_ENSURE_STATUS(context->RequestScratchBufferInArena( @@ -285,6 +286,43 @@ void EvalQuantizedPerChannel(TfLiteContext* context, TfLiteNode* node, ARM_CMSIS_NN_SUCCESS); } +void EvalQuantizedPerChannelInt4(TfLiteContext* context, TfLiteNode* node, + const TfLiteDepthwiseConvParams& params, + const OpData& data, + const TfLiteEvalTensor* input, + const TfLiteEvalTensor* filter, + const TfLiteEvalTensor* bias, + TfLiteEvalTensor* output) { + cmsis_nn_dw_conv_params dw_conv_params; + cmsis_nn_per_channel_quant_params quant_params; + cmsis_nn_dims input_dims; + cmsis_nn_dims filter_dims; + cmsis_nn_dims bias_dims; + cmsis_nn_dims output_dims; + + PopulateDwConvParams(&dw_conv_params, &quant_params, &input_dims, + &filter_dims, &bias_dims, &output_dims, params, data, + input, filter, bias, output); + + cmsis_nn_context ctx; + ctx.buf = nullptr; + /* 'size' is unused */ + ctx.size = 0; + + if (data.buffer_idx > -1) { + ctx.buf = context->GetScratchBuffer(context, data.buffer_idx); + } + + TFLITE_DCHECK_EQ( + arm_depthwise_conv_wrapper_s4( + &ctx, &dw_conv_params, &quant_params, &input_dims, + tflite::micro::GetTensorData(input), &filter_dims, + tflite::micro::GetTensorData(filter), &bias_dims, + tflite::micro::GetOptionalTensorData(bias), &output_dims, + tflite::micro::GetTensorData(output)), + ARM_CMSIS_NN_SUCCESS); +} + void EvalQuantizedPerChannel16x8(TfLiteContext* context, TfLiteNode* node, const TfLiteDepthwiseConvParams& params, const OpData& data, @@ -337,9 +375,6 @@ TfLiteStatus Eval(TfLiteContext* context, TfLiteNode* node) { ? tflite::micro::GetEvalInput(context, node, kDepthwiseConvBiasTensor) : nullptr; - TfLiteEvalTensor filter_int8 = tflite::micro::MakeUnpackedInt4Tensor( - context, data.reference_op_data.filter_buffer_index, filter); - switch (input->type) { // Already know in/out types are same. case kTfLiteFloat32: { tflite::reference_ops::DepthwiseConv( @@ -355,10 +390,15 @@ TfLiteStatus Eval(TfLiteContext* context, TfLiteNode* node) { break; } case kTfLiteInt8: - switch (filter_int8.type) { + switch (filter->type) { case kTfLiteInt8: { - EvalQuantizedPerChannel(context, node, params, data, input, - &filter_int8, bias, output); + EvalQuantizedPerChannel(context, node, params, data, input, filter, + bias, output); + break; + } + case kTfLiteInt4: { + EvalQuantizedPerChannelInt4(context, node, params, data, input, + filter, bias, output); break; } default: { @@ -399,11 +439,8 @@ TfLiteStatus EvalInt8(TfLiteContext* context, TfLiteNode* node) { ? tflite::micro::GetEvalInput(context, node, kDepthwiseConvBiasTensor) : nullptr; - TfLiteEvalTensor filter_int8 = tflite::micro::MakeUnpackedInt4Tensor( - context, data.reference_op_data.filter_buffer_index, filter); - - EvalQuantizedPerChannel(context, node, params, data, input, &filter_int8, - bias, output); + EvalQuantizedPerChannel(context, node, params, data, input, filter, bias, + output); return kTfLiteOk; } @@ -431,6 +468,30 @@ TfLiteStatus EvalInt16x8(TfLiteContext* context, TfLiteNode* node) { return kTfLiteOk; } +TfLiteStatus EvalInt4(TfLiteContext* context, TfLiteNode* node) { + TFLITE_DCHECK(node->user_data != nullptr); + TFLITE_DCHECK(node->builtin_data != nullptr); + + const auto& params = + *(reinterpret_cast(node->builtin_data)); + const OpData& data = *(static_cast(node->user_data)); + + TfLiteEvalTensor* output = + tflite::micro::GetEvalOutput(context, node, kDepthwiseConvOutputTensor); + const TfLiteEvalTensor* input = + tflite::micro::GetEvalInput(context, node, kDepthwiseConvInputTensor); + const TfLiteEvalTensor* filter = + tflite::micro::GetEvalInput(context, node, kDepthwiseConvWeightsTensor); + const TfLiteEvalTensor* bias = + (NumInputs(node) == 3) + ? tflite::micro::GetEvalInput(context, node, kDepthwiseConvBiasTensor) + : nullptr; + + EvalQuantizedPerChannelInt4(context, node, params, data, input, filter, bias, + output); + return kTfLiteOk; +} + } // namespace TFLMRegistration Register_DEPTHWISE_CONV_2D() { @@ -445,4 +506,8 @@ TFLMRegistration Register_DEPTHWISE_CONV_2D_INT16() { return tflite::micro::RegisterOp(Init, Prepare, EvalInt16x8); } +TFLMRegistration Register_DEPTHWISE_CONV_2D_INT4() { + return tflite::micro::RegisterOp(Init, Prepare, EvalInt4); +} + } // namespace tflite diff --git a/tensorflow/lite/micro/kernels/cmsis_nn/fully_connected.cc b/tensorflow/lite/micro/kernels/cmsis_nn/fully_connected.cc index a7ab8f12ab3..dc7b78c8a07 100644 --- a/tensorflow/lite/micro/kernels/cmsis_nn/fully_connected.cc +++ b/tensorflow/lite/micro/kernels/cmsis_nn/fully_connected.cc @@ -1,4 +1,4 @@ -/* Copyright 2022 The TensorFlow Authors. All Rights Reserved. +/* Copyright 2024 The TensorFlow Authors. All Rights Reserved. Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. @@ -26,6 +26,7 @@ limitations under the License. #include "tensorflow/lite/kernels/internal/tensor_ctypes.h" #include "tensorflow/lite/kernels/kernel_util.h" #include "tensorflow/lite/micro/kernels/kernel_util.h" +#include "tensorflow/lite/micro/micro_arena_constants.h" #include "tensorflow/lite/micro/micro_log.h" namespace tflite { @@ -42,6 +43,8 @@ struct OpData { // Index to buffer for optimizations if applicable. int buffer_idx; + int32_t* kernel_sums; + int32_t batches; int32_t accum_depth; int32_t output_depth; @@ -101,7 +104,7 @@ TfLiteStatus Prepare(TfLiteContext* context, TfLiteNode* node) { TF_LITE_ENSURE_EQ(context, input->params.zero_point, 0); TF_LITE_ENSURE_EQ(context, output->params.zero_point, 0); buf_size = arm_fully_connected_s16_get_buffer_size(&filter_dims); - } else if (input->type == kTfLiteInt8) { + } else if (input->type == kTfLiteInt8 && filter->type != kTfLiteInt4) { const RuntimeShape input_shape = GetTensorShape(input); TFLITE_DCHECK_GE(output_dim_count, 2); @@ -124,16 +127,21 @@ TfLiteStatus Prepare(TfLiteContext* context, TfLiteNode* node) { buf_size = arm_convolve_1x1_s8_fast_get_buffer_size(&input_dims); } else { buf_size = arm_fully_connected_s8_get_buffer_size(&filter_dims); - } - } - if (filter->type == kTfLiteInt4) { - int filter_size = - RuntimeShape(filter->dims->size, - reinterpret_cast(filter->dims->data)) - .FlatSize(); - context->RequestScratchBufferInArena( - context, filter_size, &data->reference_op_data.filter_buffer_index); + int8_t* filter_data = GetTensorData(filter); + data->kernel_sums = nullptr; + + if (buf_size > 0 && filter_data != nullptr) { + data->kernel_sums = static_cast( + context->AllocatePersistentBuffer(context, buf_size)); + + arm_vector_sum_s8(data->kernel_sums, filter_dims.n, data->output_depth, + filter_data, 1, nullptr); + + // Do not request a scratch buffer since using persistent memory + buf_size = 0; + } + } } if (buf_size > 0) { @@ -188,6 +196,49 @@ void PopulateCommonParams(TfLiteContext* context, } } +TfLiteStatus EvalQuantizedInt4(TfLiteContext* context, TfLiteNode* node, + const OpData& data, + const TfLiteEvalTensor* input, + const TfLiteEvalTensor* filter, + const TfLiteEvalTensor* bias, + TfLiteEvalTensor* output) { + const RuntimeShape output_shape = tflite::micro::GetTensorShape(output); + const int output_dim_count = output_shape.DimensionsCount(); + TFLITE_DCHECK_GE(output_dim_count, 2); + TFLITE_DCHECK_LE(output_dim_count, 4); + + cmsis_nn_per_tensor_quant_params quant_params; + cmsis_nn_dims input_dims; + cmsis_nn_dims filter_dims; + cmsis_nn_dims bias_dims; + cmsis_nn_dims output_dims; + cmsis_nn_context ctx; + + PopulateCommonParams(context, &quant_params, &input_dims, &filter_dims, + &bias_dims, &output_dims, &ctx, data); + + const int32_t* bias_data = + tflite::micro::GetOptionalTensorData(bias); + + cmsis_nn_fc_params fc_params; + fc_params.input_offset = -data.reference_op_data.input_zero_point; + fc_params.output_offset = data.reference_op_data.output_zero_point; + fc_params.filter_offset = 0; + fc_params.activation.min = data.reference_op_data.output_activation_min; + fc_params.activation.max = data.reference_op_data.output_activation_max; + + TF_LITE_ENSURE_EQ( + context, + arm_fully_connected_s4( + &ctx, &fc_params, &quant_params, &input_dims, + tflite::micro::GetTensorData(input), &filter_dims, + tflite::micro::GetTensorData(filter), &bias_dims, bias_data, + &output_dims, tflite::micro::GetTensorData(output)), + ARM_CMSIS_NN_SUCCESS); + + return kTfLiteOk; +} + TfLiteStatus EvalQuantizedInt8(TfLiteContext* context, TfLiteNode* node, const OpData& data, const TfLiteEvalTensor* input, @@ -247,11 +298,20 @@ TfLiteStatus EvalQuantizedInt8(TfLiteContext* context, TfLiteNode* node, } else { cmsis_nn_fc_params fc_params; fc_params.input_offset = -data.reference_op_data.input_zero_point; + fc_params.filter_offset = -data.reference_op_data.filter_zero_point; fc_params.output_offset = data.reference_op_data.output_zero_point; - fc_params.filter_offset = 0; fc_params.activation.min = data.reference_op_data.output_activation_min; fc_params.activation.max = data.reference_op_data.output_activation_max; + if (data.kernel_sums != nullptr) { + ctx.buf = data.kernel_sums; + } else if (ctx.buf != nullptr) { + // If behaving like batch matmul we calculate kernel sums in eval. + arm_vector_sum_s8( + static_cast(ctx.buf), filter_dims.n, data.output_depth, + tflite::micro::GetTensorData(filter), 1, nullptr); + } + TF_LITE_ENSURE_EQ( context, arm_fully_connected_s8( @@ -319,9 +379,6 @@ TfLiteStatus Eval(TfLiteContext* context, TfLiteNode* node) { TFLITE_DCHECK(node->user_data != nullptr); const OpData& data = *(static_cast(node->user_data)); - TfLiteEvalTensor filter_int8 = tflite::micro::MakeUnpackedInt4Tensor( - context, data.reference_op_data.filter_buffer_index, filter); - // Checks in Prepare ensure input, output and filter types are all the same. switch (input->type) { case kTfLiteFloat32: { @@ -339,10 +396,13 @@ TfLiteStatus Eval(TfLiteContext* context, TfLiteNode* node) { break; } case kTfLiteInt8: { - switch (filter_int8.type) { + switch (filter->type) { + case kTfLiteInt4: + return EvalQuantizedInt4(context, node, data, input, filter, bias, + output); case kTfLiteInt8: - return EvalQuantizedInt8(context, node, data, input, &filter_int8, - bias, output); + return EvalQuantizedInt8(context, node, data, input, filter, bias, + output); default: MicroPrintf("Filter Type %s (%d) not supported.", TfLiteTypeGetName(filter->type), filter->type); @@ -363,6 +423,29 @@ TfLiteStatus Eval(TfLiteContext* context, TfLiteNode* node) { return kTfLiteOk; } +TfLiteStatus EvalInt4(TfLiteContext* context, TfLiteNode* node) { + const TfLiteEvalTensor* input = + tflite::micro::GetEvalInput(context, node, kFullyConnectedInputTensor); + const TfLiteEvalTensor* filter = + tflite::micro::GetEvalInput(context, node, kFullyConnectedWeightsTensor); + const TfLiteEvalTensor* bias = + tflite::micro::GetEvalInput(context, node, kFullyConnectedBiasTensor); + TfLiteEvalTensor* output = + tflite::micro::GetEvalOutput(context, node, kFullyConnectedOutputTensor); + + TFLITE_DCHECK(node->user_data != nullptr); + const OpData& data = *(static_cast(node->user_data)); + + // Checks in Prepare ensure input, output and filter types are all the same. + if (input->type != kTfLiteInt8 && filter->type != kTfLiteInt4) { + MicroPrintf("Type %s (%d) not supported.", TfLiteTypeGetName(input->type), + input->type); + return kTfLiteError; + } + + return EvalQuantizedInt4(context, node, data, input, filter, bias, output); +} + // Note that the current function names are not ideal at all (this EvalInt8 // function internally calls EvalQuantizedInt8, and there is similar name // aliasing in the Eval function too). We will be attempting to have a more @@ -389,11 +472,7 @@ TfLiteStatus EvalInt8(TfLiteContext* context, TfLiteNode* node) { return kTfLiteError; } - TfLiteEvalTensor filter_int8 = tflite::micro::MakeUnpackedInt4Tensor( - context, data.reference_op_data.filter_buffer_index, filter); - - return EvalQuantizedInt8(context, node, data, input, &filter_int8, bias, - output); + return EvalQuantizedInt8(context, node, data, input, filter, bias, output); } TfLiteStatus EvalInt16(TfLiteContext* context, TfLiteNode* node) { @@ -425,6 +504,10 @@ TFLMRegistration Register_FULLY_CONNECTED() { return tflite::micro::RegisterOp(Init, Prepare, Eval); } +TFLMRegistration Register_FULLY_CONNECTED_INT4() { + return tflite::micro::RegisterOp(Init, Prepare, EvalInt4); +} + TFLMRegistration Register_FULLY_CONNECTED_INT8() { return tflite::micro::RegisterOp(Init, Prepare, EvalInt8); } @@ -433,4 +516,8 @@ TFLMRegistration Register_FULLY_CONNECTED_INT16() { return tflite::micro::RegisterOp(Init, Prepare, EvalInt16); } +TFLMInferenceRegistration RegisterInference_FULLY_CONNECTED() { + return tflite::micro::RegisterOp(Eval); +} + } // namespace tflite diff --git a/tensorflow/lite/micro/kernels/cmsis_nn/svdf.cc b/tensorflow/lite/micro/kernels/cmsis_nn/svdf.cc index 03dbaee9c85..bf64016b13f 100644 --- a/tensorflow/lite/micro/kernels/cmsis_nn/svdf.cc +++ b/tensorflow/lite/micro/kernels/cmsis_nn/svdf.cc @@ -1,4 +1,4 @@ -/* Copyright 2023 The TensorFlow Authors. All Rights Reserved. +/* Copyright 2024 The TensorFlow Authors. All Rights Reserved. Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. @@ -31,9 +31,195 @@ limitations under the License. namespace tflite { namespace { +struct CmsisNnOpDataSvdf { + int32_t effective_scale_1_a; + int32_t effective_scale_2_a; + // b versions of each scale are kept at int since the numbers are just the + // shift value - typically between [-32, 32]. + int effective_scale_1_b; + int effective_scale_2_b; + int scratch_tensor_index; + int scratch_output_tensor_index; + + // Cached tensor zero point values for quantized operations. + int input_zero_point; + int output_zero_point; + int activation_state_zero_point; + int32_t* kernel_sums; +}; + void* Init(TfLiteContext* context, const char* buffer, size_t length) { TFLITE_DCHECK(context->AllocatePersistentBuffer != nullptr); - return context->AllocatePersistentBuffer(context, sizeof(OpDataSvdf)); + return context->AllocatePersistentBuffer(context, sizeof(CmsisNnOpDataSvdf)); +} + +TfLiteStatus CmsisNnPrepareSvdf(TfLiteContext* context, TfLiteNode* node) { + TFLITE_DCHECK(node->builtin_data != nullptr); + + const auto* params = static_cast(node->builtin_data); + + MicroContext* micro_context = GetMicroContext(context); + + // Validate Tensor Inputs (dtype depends on quantization): + // [0] = Input, {2, batch_size, input_size} + // [1] = Weights Feature, {2, num_filters, input_size} + // [2] = Weights Time, {2, num_filters, memory_size} + // [3] = Bias (optional), {1, num_units} + // [4] = Activation State (variable), + // {2, batch_size, memory_size * num_filters} + TfLiteTensor* input = + micro_context->AllocateTempInputTensor(node, kSvdfInputTensor); + TF_LITE_ENSURE(context, input != nullptr); + TfLiteTensor* weights_feature = + micro_context->AllocateTempInputTensor(node, kSvdfWeightsFeatureTensor); + TF_LITE_ENSURE(context, weights_feature != nullptr); + TfLiteTensor* weights_time = + micro_context->AllocateTempInputTensor(node, kSvdfWeightsTimeTensor); + TF_LITE_ENSURE(context, weights_time != nullptr); + TfLiteTensor* bias = + micro_context->AllocateTempInputTensor(node, kSvdfBiasTensor); + TfLiteTensor* activation_state = micro_context->AllocateTempInputTensor( + node, kSvdfInputActivationStateTensor); + TF_LITE_ENSURE(context, activation_state != nullptr); + + // Define input constants based on input tensor definition above: + const int rank = params->rank; + const int input_size = input->dims->data[1]; + const int batch_size = input->dims->data[0]; + const int num_filters = weights_feature->dims->data[0]; + TF_LITE_ENSURE_EQ(context, num_filters % rank, 0); + const int num_units = num_filters / rank; + const int memory_size = weights_time->dims->data[1]; + + // Validate Input Tensor: + TF_LITE_ENSURE(context, + input->type == kTfLiteFloat32 || input->type == kTfLiteInt8); + TF_LITE_ENSURE_EQ(context, NumDimensions(input), 2); + + // Validate Tensor Output: + // [0] = float/int8_t, {2, batch_size, num_units} + TF_LITE_ENSURE_EQ(context, node->outputs->size, 1); + TfLiteTensor* output = + micro_context->AllocateTempOutputTensor(node, kSvdfOutputTensor); + TF_LITE_ENSURE(context, output != nullptr); + TF_LITE_ENSURE_EQ(context, NumDimensions(output), 2); + TF_LITE_ENSURE_EQ(context, output->dims->data[0], batch_size); + TF_LITE_ENSURE_EQ(context, output->dims->data[1], num_units); + + // Validate Weights Feature Input Tensor: + TF_LITE_ENSURE_EQ(context, NumDimensions(weights_feature), 2); + TF_LITE_ENSURE_EQ(context, weights_feature->dims->data[1], input_size); + + // Validate Weights Time Input Tensor: + TF_LITE_ENSURE_EQ(context, NumDimensions(weights_time), 2); + TF_LITE_ENSURE_EQ(context, weights_time->dims->data[0], num_filters); + TF_LITE_ENSURE_EQ(context, weights_time->dims->data[1], memory_size); + + // Validate Optional Bias Input Tensor: + if (bias != nullptr) { + TF_LITE_ENSURE_EQ(context, bias->dims->data[0], num_units); + } + + // Validate Activation State Input Tensor: + TF_LITE_ENSURE_EQ(context, NumDimensions(activation_state), 2); + TF_LITE_ENSURE_EQ(context, activation_state->dims->data[0], batch_size); + TF_LITE_ENSURE_EQ(context, activation_state->dims->data[1], + memory_size * num_filters); + // Since is_variable is not part of TFLiteEvalTensor, check is_variable here. + TF_LITE_ENSURE_EQ(context, activation_state->is_variable, true); + + TF_LITE_ENSURE_EQ(context, node->inputs->size, 5); + + TFLITE_DCHECK(node->user_data != nullptr); + CmsisNnOpDataSvdf* data = static_cast(node->user_data); + + if (input->type == kTfLiteInt8) { + TF_LITE_ENSURE_EQ(context, weights_feature->type, kTfLiteInt8); + TF_LITE_ENSURE(context, (weights_time->type == kTfLiteInt16) || + (weights_time->type == kTfLiteInt8)); + TF_LITE_ENSURE(context, (activation_state->type == kTfLiteInt16) || + (activation_state->type == kTfLiteInt8)); + if (bias != nullptr) { + TF_LITE_ENSURE_EQ(context, bias->type, kTfLiteInt32); + } + + TF_LITE_ENSURE_TYPES_EQ(context, output->type, kTfLiteInt8); + + const double effective_scale_1 = static_cast( + input->params.scale * weights_feature->params.scale / + activation_state->params.scale); + const double effective_scale_2 = + static_cast(activation_state->params.scale * + weights_time->params.scale / output->params.scale); + + // TODO(b/162018098): Use TF_LITE_ENSURE_NEAR when it is ready. + // TODO(#1751): account for optional bias tensor + TF_LITE_ENSURE( + context, + std::abs(static_cast(bias->params.scale) - + static_cast(activation_state->params.scale * + weights_time->params.scale)) < 1e-5); + + QuantizeMultiplier(effective_scale_1, &(data->effective_scale_1_a), + &(data->effective_scale_1_b)); + QuantizeMultiplier(effective_scale_2, &(data->effective_scale_2_a), + &(data->effective_scale_2_b)); + + data->input_zero_point = input->params.zero_point; + data->output_zero_point = output->params.zero_point; + data->activation_state_zero_point = activation_state->params.zero_point; + + TFLITE_DCHECK(context->RequestScratchBufferInArena != nullptr); + + const TfLiteStatus scratch_status = context->RequestScratchBufferInArena( + context, batch_size * num_filters * sizeof(int32_t), + &(data->scratch_tensor_index)); + TF_LITE_ENSURE_OK(context, scratch_status); + + const TfLiteStatus scratch_output_status = + context->RequestScratchBufferInArena( + context, batch_size * num_units * sizeof(int32_t), + &(data->scratch_output_tensor_index)); + TF_LITE_ENSURE_OK(context, scratch_output_status); + + cmsis_nn_dims weights_feature_dims; + weights_feature_dims.n = num_filters; + weights_feature_dims.h = input_size; + + const int32_t buf_size = arm_svdf_s8_get_buffer_size(&weights_feature_dims); + + if (buf_size > 0) { + data->kernel_sums = static_cast( + context->AllocatePersistentBuffer(context, buf_size)); + + arm_vector_sum_s8(data->kernel_sums, input_size, num_filters, + GetTensorData(weights_feature), 1, nullptr); + } + + } else { + TF_LITE_ENSURE_EQ(context, weights_feature->type, kTfLiteFloat32); + TF_LITE_ENSURE_EQ(context, weights_time->type, kTfLiteFloat32); + TF_LITE_ENSURE_EQ(context, activation_state->type, kTfLiteFloat32); + if (bias != nullptr) { + TF_LITE_ENSURE_EQ(context, bias->type, kTfLiteFloat32); + } + TF_LITE_ENSURE_TYPES_EQ(context, output->type, kTfLiteFloat32); + + TFLITE_DCHECK(context->RequestScratchBufferInArena != nullptr); + const TfLiteStatus scratch_status = context->RequestScratchBufferInArena( + context, batch_size * num_filters * sizeof(float), + &(data->scratch_tensor_index)); + TF_LITE_ENSURE_OK(context, scratch_status); + } + + micro_context->DeallocateTempTfLiteTensor(input); + micro_context->DeallocateTempTfLiteTensor(weights_feature); + micro_context->DeallocateTempTfLiteTensor(weights_time); + micro_context->DeallocateTempTfLiteTensor(activation_state); + micro_context->DeallocateTempTfLiteTensor(output); + // TODO(#1751): account for optional bias tensor + micro_context->DeallocateTempTfLiteTensor(bias); + return kTfLiteOk; } TfLiteStatus EvalIntegerSVDF(TfLiteContext* context, TfLiteNode* node, @@ -44,7 +230,7 @@ TfLiteStatus EvalIntegerSVDF(TfLiteContext* context, TfLiteNode* node, const TfLiteSVDFParams* params, TfLiteEvalTensor* activation_state_tensor, TfLiteEvalTensor* output_tensor, - const OpDataSvdf& data) { + const CmsisNnOpDataSvdf& data) { cmsis_nn_dims input_dims; input_dims.n = input_tensor->dims->data[0]; input_dims.h = input_tensor->dims->data[1]; @@ -102,9 +288,12 @@ TfLiteStatus EvalIntegerSVDF(TfLiteContext* context, TfLiteNode* node, switch (weights_time_tensor->type) { case kTfLiteInt8: { + cmsis_nn_context ctx; + ctx.buf = data.kernel_sums; + arm_svdf_s8( - &scratch_ctx, &scratch_output_ctx, &svdf_params, &in_quant_params, - &out_quant_params, &input_dims, + &ctx, &scratch_ctx, &scratch_output_ctx, &svdf_params, + &in_quant_params, &out_quant_params, &input_dims, tflite::micro::GetTensorData(input_tensor), &state_dims, tflite::micro::GetTensorData(activation_state_tensor), &weights_feature_dims, @@ -141,7 +330,8 @@ TfLiteStatus EvalIntegerSVDF(TfLiteContext* context, TfLiteNode* node, TfLiteStatus EvalSvdf(TfLiteContext* context, TfLiteNode* node) { auto* params = reinterpret_cast(node->builtin_data); TFLITE_DCHECK(node->user_data != nullptr); - const OpDataSvdf& data = *(static_cast(node->user_data)); + const CmsisNnOpDataSvdf& data = + *(static_cast(node->user_data)); const TfLiteEvalTensor* input = tflite::micro::GetEvalInput(context, node, kSvdfInputTensor); @@ -184,7 +374,8 @@ TfLiteStatus EvalSvdf(TfLiteContext* context, TfLiteNode* node) { TfLiteStatus EvalSvdfInt8(TfLiteContext* context, TfLiteNode* node) { auto* params = reinterpret_cast(node->builtin_data); TFLITE_DCHECK(node->user_data != nullptr); - const OpDataSvdf& data = *(static_cast(node->user_data)); + const CmsisNnOpDataSvdf& data = + *(static_cast(node->user_data)); const TfLiteEvalTensor* input = tflite::micro::GetEvalInput(context, node, kSvdfInputTensor); @@ -213,11 +404,11 @@ TfLiteStatus EvalSvdfInt8(TfLiteContext* context, TfLiteNode* node) { } // namespace TFLMRegistration Register_SVDF() { - return tflite::micro::RegisterOp(Init, PrepareSvdf, EvalSvdf); + return tflite::micro::RegisterOp(Init, CmsisNnPrepareSvdf, EvalSvdf); } TFLMRegistration Register_SVDF_INT8() { - return tflite::micro::RegisterOp(Init, PrepareSvdf, EvalSvdfInt8); + return tflite::micro::RegisterOp(Init, CmsisNnPrepareSvdf, EvalSvdfInt8); } } // namespace tflite diff --git a/tensorflow/lite/micro/kernels/cmsis_nn/transpose_conv.cc b/tensorflow/lite/micro/kernels/cmsis_nn/transpose_conv.cc new file mode 100644 index 00000000000..06305bc3faf --- /dev/null +++ b/tensorflow/lite/micro/kernels/cmsis_nn/transpose_conv.cc @@ -0,0 +1,549 @@ +/* Copyright 2024 The TensorFlow Authors. All Rights Reserved. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +==============================================================================*/ + +#include "tensorflow/lite/micro/kernels/transpose_conv.h" + +#include "Include/arm_nnfunctions.h" +#include "tensorflow/lite/c/builtin_op_data.h" +#include "tensorflow/lite/c/common.h" +#include "tensorflow/lite/kernels/internal/common.h" +#include "tensorflow/lite/kernels/internal/quantization_util.h" +#include "tensorflow/lite/kernels/internal/reference/integer_ops/transpose_conv.h" +#include "tensorflow/lite/kernels/internal/reference/transpose_conv.h" +#include "tensorflow/lite/kernels/internal/tensor_ctypes.h" +#include "tensorflow/lite/kernels/kernel_util.h" +#include "tensorflow/lite/kernels/padding.h" +#include "tensorflow/lite/micro/kernels/kernel_util.h" +#include "tensorflow/lite/micro/micro_log.h" + +namespace tflite { +namespace { + +// For the TfLite transpose_conv implementation, input tensor 0 corresponds to +// the OutputShapeTensor. However, since TFLM does not support dynamic tensors, +// the TFLM implementation ignores input tensor 0 and the only inputs we care +// about are kFilterTensor, kInputTensor and kBiasTensor. +constexpr int kFilterTensor = 1; +constexpr int kInputTensor = 2; +constexpr int kBiasTensor = 3; +constexpr int kOutputTensor = 0; + +// Conv is quantized along dimension 0: +// https://www.tensorflow.org/lite/performance/quantization_spec +constexpr int kConvQuantizedDimension = 0; + +struct OpData { + ConvParams params; + + // Scratch buffers are required for quantized implementations. + int scratch_buffer_index; + int scratch_buffer_output_index; + + // TODO(b/192090531): Remove this once all 8x16 transpose conv models use + // 64-bit biases. + int bias_converted_buffer_index; + + // Multiplier and shift arrays are required for the int8 implementation. + int32_t* per_channel_output_multiplier; + int32_t* per_channel_output_shift; +}; + +inline PaddingType RuntimePaddingType(TfLitePadding padding) { + switch (padding) { + case TfLitePadding::kTfLitePaddingSame: + return PaddingType::kSame; + case TfLitePadding::kTfLitePaddingValid: + return PaddingType::kValid; + case TfLitePadding::kTfLitePaddingUnknown: + default: + return PaddingType::kNone; + } +} + +TfLiteStatus CalculateOpData(TfLiteContext* context, TfLiteNode* node, + const TfLiteTransposeConvParams* params, int width, + int height, int filter_width, int filter_height, + const TfLiteType data_type, OpData* data) { + bool has_bias = node->inputs->size == 4; + // Check number of inputs/outputs + TF_LITE_ENSURE(context, has_bias || node->inputs->size == 3); + TF_LITE_ENSURE_EQ(context, node->outputs->size, 1); + + // Matching GetWindowedOutputSize in TensorFlow. + auto padding = params->padding; + int pad_output_width; + int pad_output_height; + + TfLitePaddingValues padding_values = ComputePaddingHeightWidth( + params->stride_height, params->stride_width, 1, + 1, // Dilation height and width are always 1 for transpose_conv. + height, width, filter_height, filter_width, padding, &pad_output_height, + &pad_output_width); + + data->params.padding_type = RuntimePaddingType(padding); + data->params.padding_values.width = padding_values.width; + data->params.padding_values.height = padding_values.height; + data->params.padding_values.width_offset = + padding_values.width_offset + padding_values.width; + data->params.padding_values.height_offset = + padding_values.height_offset + padding_values.height; + + // Note that quantized inference requires that all tensors have their + // parameters set. This is usually done during quantized training. + if (data_type != kTfLiteFloat32) { + MicroContext* micro_context = GetMicroContext(context); + + TfLiteTensor* input = + micro_context->AllocateTempInputTensor(node, kInputTensor); + TF_LITE_ENSURE(context, input != nullptr); + TfLiteTensor* filter = + micro_context->AllocateTempInputTensor(node, kFilterTensor); + TF_LITE_ENSURE(context, filter != nullptr); + TfLiteTensor* bias = + micro_context->AllocateTempInputTensor(node, kBiasTensor); + TfLiteTensor* output = + micro_context->AllocateTempOutputTensor(node, kOutputTensor); + TF_LITE_ENSURE(context, output != nullptr); + int output_channels = filter->dims->data[kConvQuantizedDimension]; + + TF_LITE_ENSURE_STATUS(tflite::PopulateConvolutionQuantizationParams( + context, input, filter, bias, output, kTfLiteActNone, + &data->params.output_multiplier, &data->params.output_shift, + &data->params.quantized_activation_min, + &data->params.quantized_activation_max, + data->per_channel_output_multiplier, data->per_channel_output_shift, + output_channels)); + + // TODO(b/192090531): Remove this once all 8x16 transpose conv models use + // 64-bit biases. + if (input->type == kTfLiteInt16) { + TFLITE_DCHECK(filter->type == kTfLiteInt8); + TFLITE_DCHECK(output->type == kTfLiteInt16); + if (bias->type == kTfLiteInt16) { + TFLITE_DCHECK( + context->RequestScratchBufferInArena( + context, GetTensorShape(bias).FlatSize() * sizeof(std::int64_t), + &(data->bias_converted_buffer_index)) == kTfLiteOk); + } + } + + micro_context->DeallocateTempTfLiteTensor(input); + micro_context->DeallocateTempTfLiteTensor(filter); + micro_context->DeallocateTempTfLiteTensor(output); + if (bias != nullptr) { + micro_context->DeallocateTempTfLiteTensor(bias); + } + } + return kTfLiteOk; +} + +void* Init(TfLiteContext* context, const char* buffer, size_t length) { + TFLITE_DCHECK(context->AllocatePersistentBuffer != nullptr); + return context->AllocatePersistentBuffer(context, sizeof(OpData)); +} + +TfLiteStatus Prepare(TfLiteContext* context, TfLiteNode* node) { + TFLITE_DCHECK(node->user_data != nullptr); + TFLITE_DCHECK(node->builtin_data != nullptr); + + OpData* data = static_cast(node->user_data); + const auto params = + static_cast(node->builtin_data); + + MicroContext* micro_context = GetMicroContext(context); + + TfLiteTensor* output = + micro_context->AllocateTempOutputTensor(node, kOutputTensor); + TF_LITE_ENSURE(context, output != nullptr); + TfLiteTensor* input = + micro_context->AllocateTempInputTensor(node, kInputTensor); + TF_LITE_ENSURE(context, input != nullptr); + TfLiteTensor* filter = + micro_context->AllocateTempInputTensor(node, kFilterTensor); + TF_LITE_ENSURE(context, filter != nullptr); + + TF_LITE_ENSURE_MSG( + context, + input->type == filter->type || + (input->type == kTfLiteInt16 && filter->type == kTfLiteInt8), + "Hybrid models are not supported on TFLite Micro."); + + // Get height and width of the output. + const int width = SizeOfDimension(output, 2); + const int height = SizeOfDimension(output, 1); + const int filter_width = SizeOfDimension(filter, 2); + const int filter_height = SizeOfDimension(filter, 1); + + // Dynamically allocate per-channel quantization parameters. + const int num_channels = filter->dims->data[kConvQuantizedDimension]; + data->per_channel_output_multiplier = + static_cast(context->AllocatePersistentBuffer( + context, num_channels * sizeof(int32_t))); + data->per_channel_output_shift = + static_cast(context->AllocatePersistentBuffer( + context, num_channels * sizeof(int32_t))); + + if (input->type == kTfLiteInt8) { + TFLITE_DCHECK(context->RequestScratchBufferInArena != nullptr); + + RuntimeShape input_shape = GetTensorShape(input); + RuntimeShape output_shape = GetTensorShape(output); + RuntimeShape filter_shape = GetTensorShape(filter); + + const int batch_size = MatchingDim(input_shape, 0, output_shape, 0); + const int output_depth = MatchingDim(filter_shape, 0, output_shape, 3); + + cmsis_nn_dims output_dims; + output_dims.n = batch_size; + output_dims.h = output_shape.Dims(1); + output_dims.w = output_shape.Dims(2); + output_dims.c = output_depth; + +#if defined(KERNELS_OPTIMIZED_FOR_SPEED) + const int input_depth = MatchingDim(input_shape, 3, filter_shape, 3); + + cmsis_nn_dims input_dims; + input_dims.n = batch_size; + input_dims.h = input_shape.Dims(1); + input_dims.w = input_shape.Dims(2); + input_dims.c = input_depth; + + cmsis_nn_dims filter_dims; + filter_dims.n = output_depth; + filter_dims.h = filter_shape.Dims(1); + filter_dims.w = filter_shape.Dims(2); + filter_dims.c = input_depth; + + const size_t buf_size = arm_transpose_conv_s8_get_buffer_size( + &input_dims, &filter_dims, &output_dims); + TFLITE_DCHECK(context->RequestScratchBufferInArena( + context, buf_size, &(data->scratch_buffer_index)) == + kTfLiteOk); +#endif + + // Quantized 8-bit kernels use an int32 scratch buffer. + TFLITE_DCHECK( + context->RequestScratchBufferInArena( + context, + output_dims.h * output_dims.w * output_dims.c * sizeof(int32_t), + &(data->scratch_buffer_output_index)) == kTfLiteOk); + } + + // Quantized 16x8 kernels use an int64 scratch buffer. + if (input->type == kTfLiteInt16) { + TFLITE_DCHECK(context->RequestScratchBufferInArena != nullptr); + TFLITE_DCHECK(context->RequestScratchBufferInArena( + context, + GetTensorShape(output).FlatSize() * sizeof(std::int64_t), + &(data->scratch_buffer_index)) == kTfLiteOk); + } + + // All per-channel quantized tensors need valid zero point and scale arrays. + if (input->type == kTfLiteInt8 || input->type == kTfLiteInt16) { + TF_LITE_ENSURE_EQ(context, filter->quantization.type, + kTfLiteAffineQuantization); + + const auto* affine_quantization = + static_cast(filter->quantization.params); + TF_LITE_ENSURE(context, affine_quantization); + TF_LITE_ENSURE(context, affine_quantization->scale); + TF_LITE_ENSURE(context, affine_quantization->zero_point); + + TF_LITE_ENSURE(context, + affine_quantization->scale->size == 1 || + affine_quantization->scale->size == + filter->dims->data[kConvQuantizedDimension]); + TF_LITE_ENSURE_EQ(context, affine_quantization->scale->size, + affine_quantization->zero_point->size); + } + + TF_LITE_ENSURE_STATUS(CalculateOpData(context, node, params, width, height, + filter_width, filter_height, + input->type, data)); + + // Offsets (zero points) + data->params.input_offset = -input->params.zero_point; + data->params.weights_offset = -filter->params.zero_point; + data->params.output_offset = output->params.zero_point; + + // Stride + data->params.stride_width = params->stride_width; + data->params.stride_height = params->stride_height; + + micro_context->DeallocateTempTfLiteTensor(output); + micro_context->DeallocateTempTfLiteTensor(input); + micro_context->DeallocateTempTfLiteTensor(filter); + return kTfLiteOk; +} + +#if defined(KERNELS_OPTIMIZED_FOR_SPEED) +TfLiteStatus EvalQuantizedPerChannel(TfLiteContext* context, TfLiteNode* node, + const TfLiteConvParams& params, + const OpData& data, + const TfLiteEvalTensor* input, + const TfLiteEvalTensor* filter, + const TfLiteEvalTensor* bias, + TfLiteEvalTensor* output) { + cmsis_nn_transpose_conv_params conv_params; + conv_params.dilation.h = 1; + conv_params.dilation.w = 1; + + // Initialize cmsis_nn convolution parameters + conv_params.input_offset = data.params.input_offset; + conv_params.output_offset = data.params.output_offset; + conv_params.stride.h = params.stride_height; + conv_params.stride.w = params.stride_width; + conv_params.padding.h = data.params.padding_values.height; + conv_params.padding.w = data.params.padding_values.width; + conv_params.padding_offsets.h = data.params.padding_values.height_offset; + conv_params.padding_offsets.w = data.params.padding_values.width_offset; + conv_params.activation.min = data.params.quantized_activation_min; + conv_params.activation.max = data.params.quantized_activation_max; + + // Initialize cmsis_nn per channel quantization parameters + cmsis_nn_per_channel_quant_params quant_params; + quant_params.multiplier = + const_cast(data.per_channel_output_multiplier); + quant_params.shift = const_cast(data.per_channel_output_shift); + + RuntimeShape filter_shape = tflite::micro::GetTensorShape(filter); + RuntimeShape input_shape = tflite::micro::GetTensorShape(input); + RuntimeShape output_shape = tflite::micro::GetTensorShape(output); + RuntimeShape bias_shape = tflite::micro::GetTensorShape(bias); + + // Consistency check. + TFLITE_DCHECK_LE(conv_params.activation.min, conv_params.activation.max); + TFLITE_DCHECK_EQ(input_shape.DimensionsCount(), 4); + TFLITE_DCHECK_EQ(filter_shape.DimensionsCount(), 4); + TFLITE_DCHECK_EQ(output_shape.DimensionsCount(), 4); + const int batch_size = MatchingDim(input_shape, 0, output_shape, 0); + const int input_depth = MatchingDim(input_shape, 3, filter_shape, 3); + const int output_depth = MatchingDim(filter_shape, 0, output_shape, 3); + if (tflite::micro::GetOptionalTensorData(bias)) { + TFLITE_DCHECK_EQ(bias_shape.FlatSize(), output_depth); + } + + cmsis_nn_dims input_dims; + input_dims.n = batch_size; + input_dims.h = input_shape.Dims(1); + input_dims.w = input_shape.Dims(2); + input_dims.c = input_depth; + + cmsis_nn_dims filter_dims; + filter_dims.n = output_depth; + filter_dims.h = filter_shape.Dims(1); + filter_dims.w = filter_shape.Dims(2); + filter_dims.c = input_depth; + + cmsis_nn_dims bias_dims; + bias_dims.n = 1; + bias_dims.h = 1; + bias_dims.w = 1; + bias_dims.c = output_depth; + + cmsis_nn_dims output_dims; + output_dims.n = batch_size; + output_dims.h = output_shape.Dims(1); + output_dims.w = output_shape.Dims(2); + output_dims.c = output_depth; + + cmsis_nn_context ctx; + ctx.size = 0; // Note: ctx.size is currently not used in cmsis_nn. + ctx.buf = context->GetScratchBuffer(context, data.scratch_buffer_index); + + cmsis_nn_context scratch_output_ctx; + scratch_output_ctx.size = + 0; // Note: ctx.size is currently not used in cmsis_nn. + scratch_output_ctx.buf = + context->GetScratchBuffer(context, data.scratch_buffer_output_index); + + TFLITE_DCHECK_EQ( + arm_transpose_conv_s8( + &ctx, &scratch_output_ctx, &conv_params, &quant_params, &input_dims, + tflite::micro::GetTensorData(input), &filter_dims, + tflite::micro::GetTensorData(filter), &bias_dims, + tflite::micro::GetOptionalTensorData(bias), &output_dims, + tflite::micro::GetTensorData(output)), + ARM_CMSIS_NN_SUCCESS); + + return kTfLiteOk; +} +#endif + +TfLiteStatus Eval(TfLiteContext* context, TfLiteNode* node) { + const TfLiteEvalTensor* input = + tflite::micro::GetEvalInput(context, node, kInputTensor); + const TfLiteEvalTensor* filter = + tflite::micro::GetEvalInput(context, node, kFilterTensor); + const TfLiteEvalTensor* bias = + (NumInputs(node) == 4) + ? tflite::micro::GetEvalInput(context, node, kBiasTensor) + : nullptr; + TfLiteEvalTensor* output = + tflite::micro::GetEvalOutput(context, node, kOutputTensor); + + TFLITE_DCHECK(node->user_data != nullptr); + const OpData& data = *(static_cast(node->user_data)); + + TF_LITE_ENSURE_EQ(context, input->type, output->type); + const auto& params = + *(reinterpret_cast(node->builtin_data)); + + switch (input->type) { // Already know in/out types are same. + case kTfLiteFloat32: { + ConvParams op_params = data.params; + CalculateActivationRange(params.activation, + &op_params.float_activation_min, + &op_params.float_activation_max); + + reference_ops::TransposeConv( + op_params, tflite::micro::GetTensorShape(input), + tflite::micro::GetTensorData(input), + tflite::micro::GetTensorShape(filter), + tflite::micro::GetTensorData(filter), + tflite::micro::GetTensorShape(bias), + tflite::micro::GetOptionalTensorData(bias), + tflite::micro::GetTensorShape(output), + tflite::micro::GetTensorData(output), + tflite::micro::GetTensorShape(nullptr), nullptr); + break; + } + case kTfLiteInt8: { +#if defined(KERNELS_OPTIMIZED_FOR_SIZE) + int32_t* scratch_buffer = static_cast( + context->GetScratchBuffer(context, data.scratch_buffer_index)); + reference_integer_ops::TransposeConv( + data.params, data.per_channel_output_multiplier, + data.per_channel_output_shift, tflite::micro::GetTensorShape(input), + tflite::micro::GetTensorData(input), + tflite::micro::GetTensorShape(filter), + tflite::micro::GetTensorData(filter), + tflite::micro::GetTensorShape(bias), + tflite::micro::GetOptionalTensorData(bias), + tflite::micro::GetTensorShape(output), + tflite::micro::GetTensorData(output), + tflite::micro::GetTensorShape(nullptr), nullptr, scratch_buffer); +#elif defined(KERNELS_OPTIMIZED_FOR_SPEED) + return EvalQuantizedPerChannel(context, node, params, data, input, filter, + bias, output); +#else + MicroPrintf( + "Either KERNELS_OPTIMIZED_FOR_SIZE or KERNELS_OPTIMIZED_FOR_SPEED " + "must be defined"); + return kTfLiteError; +#endif + break; + } + case kTfLiteInt16: { + std::int64_t* scratch_buffer = static_cast( + context->GetScratchBuffer(context, data.scratch_buffer_index)); + // TODO(b/192090531): Remove this once all 8x16 transpose conv models use + // 64-bit biases. + if (bias != nullptr && bias->type == kTfLiteInt16) { + std::int64_t* bias_converted_buffer = + static_cast(context->GetScratchBuffer( + context, data.bias_converted_buffer_index)); + for (int i = 0; i < tflite::micro::GetTensorShape(bias).FlatSize(); + i++) { + bias_converted_buffer[i] = bias->data.i16[i]; + } + reference_integer_ops::TransposeConv( + data.params, data.per_channel_output_multiplier, + data.per_channel_output_shift, tflite::micro::GetTensorShape(input), + tflite::micro::GetTensorData(input), + tflite::micro::GetTensorShape(filter), + tflite::micro::GetTensorData(filter), + tflite::micro::GetTensorShape(bias), bias_converted_buffer, + tflite::micro::GetTensorShape(output), + tflite::micro::GetTensorData(output), + tflite::micro::GetTensorShape(nullptr), nullptr, scratch_buffer); + } else { + reference_integer_ops::TransposeConv( + data.params, data.per_channel_output_multiplier, + data.per_channel_output_shift, tflite::micro::GetTensorShape(input), + tflite::micro::GetTensorData(input), + tflite::micro::GetTensorShape(filter), + tflite::micro::GetTensorData(filter), + tflite::micro::GetTensorShape(bias), + tflite::micro::GetOptionalTensorData(bias), + tflite::micro::GetTensorShape(output), + tflite::micro::GetTensorData(output), + tflite::micro::GetTensorShape(nullptr), nullptr, scratch_buffer); + } + break; + } + default: + MicroPrintf("Type %s (%d) not supported.", TfLiteTypeGetName(input->type), + input->type); + return kTfLiteError; + } + return kTfLiteOk; +} + +TfLiteStatus EvalInt8(TfLiteContext* context, TfLiteNode* node) { + const TfLiteEvalTensor* input = + tflite::micro::GetEvalInput(context, node, kInputTensor); + const TfLiteEvalTensor* filter = + tflite::micro::GetEvalInput(context, node, kFilterTensor); + const TfLiteEvalTensor* bias = + (NumInputs(node) == 4) + ? tflite::micro::GetEvalInput(context, node, kBiasTensor) + : nullptr; + TfLiteEvalTensor* output = + tflite::micro::GetEvalOutput(context, node, kOutputTensor); + + TFLITE_DCHECK(node->user_data != nullptr); + const OpData& data = *(static_cast(node->user_data)); + +#if defined(KERNELS_OPTIMIZED_FOR_SIZE) + int32_t* scratch_buffer = static_cast( + context->GetScratchBuffer(context, data.scratch_buffer_index)); + reference_integer_ops::TransposeConv( + data.params, data.per_channel_output_multiplier, + data.per_channel_output_shift, tflite::micro::GetTensorShape(input), + tflite::micro::GetTensorData(input), + tflite::micro::GetTensorShape(filter), + tflite::micro::GetTensorData(filter), + tflite::micro::GetTensorShape(bias), + tflite::micro::GetOptionalTensorData(bias), + tflite::micro::GetTensorShape(output), + tflite::micro::GetTensorData(output), + tflite::micro::GetTensorShape(nullptr), nullptr, scratch_buffer); +#elif defined(KERNELS_OPTIMIZED_FOR_SPEED) + const auto& params = + *(reinterpret_cast(node->builtin_data)); + + return EvalQuantizedPerChannel(context, node, params, data, input, filter, + bias, output); +#else + MicroPrintf( + "Either KERNELS_OPTIMIZED_FOR_SIZE or KERNELS_OPTIMIZED_FOR_SPEED must " + "be defined"); + return kTfLiteError; +#endif + return kTfLiteOk; +} + +} // namespace + +TFLMRegistration Register_TRANSPOSE_CONV() { + return tflite::micro::RegisterOp(Init, Prepare, Eval); +} + +TFLMRegistration Register_TRANSPOSE_CONV_INT8() { + return tflite::micro::RegisterOp(Init, Prepare, EvalInt8); +} + +} // namespace tflite diff --git a/tensorflow/lite/micro/kernels/cmsis_nn/unidirectional_sequence_lstm.cc b/tensorflow/lite/micro/kernels/cmsis_nn/unidirectional_sequence_lstm.cc index f66ce80431c..49da4d916d0 100644 --- a/tensorflow/lite/micro/kernels/cmsis_nn/unidirectional_sequence_lstm.cc +++ b/tensorflow/lite/micro/kernels/cmsis_nn/unidirectional_sequence_lstm.cc @@ -1,4 +1,4 @@ -/* Copyright 2023 The TensorFlow Authors. All Rights Reserved. +/* Copyright 2024 The TensorFlow Authors. All Rights Reserved. Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. @@ -28,477 +28,300 @@ limitations under the License. #include "tensorflow/lite/micro/kernels/lstm_eval.h" #include "tensorflow/lite/micro/kernels/lstm_shared.h" #include "tensorflow/lite/micro/kernels/micro_tensor_utils.h" - namespace tflite { namespace { struct OpData { - OpDataLSTM params_ref; - cmsis_nn_lstm_params params_cmsis_nn; + OpDataLSTM params_ref; // Used for fallback implementation + cmsis_nn_lstm_params params_cmsis_nn; // Used for CMSIS-NN implementation }; -/*Helper Functions*/ -TfLiteStatus PrecomputeZeroPointTimesWeightWithBias( - TfLiteContext* context, int32_t zero_point, - const TfLiteTensor* weight_tensor, const TfLiteTensor* bias_tensor, - int32_t** output) { - if (weight_tensor == nullptr) { - return kTfLiteOk; - } - - const RuntimeShape& weight_shape = GetTensorShape(weight_tensor); - TF_LITE_ENSURE_EQ(context, weight_shape.DimensionsCount(), 2); - const int row = weight_shape.Dims(0); - const int col = weight_shape.Dims(1); - TFLITE_DCHECK(context->AllocatePersistentBuffer != nullptr); - *output = static_cast( - context->AllocatePersistentBuffer(context, row * sizeof(int32_t))); - - if (bias_tensor == nullptr) { - memset(*output, 0, row * sizeof(int32_t)); - } else { - const int32_t* bias = GetTensorData(bias_tensor); - memcpy(*output, bias, row * sizeof(int32_t)); - } - - if (zero_point != 0) { - const int8_t* weight = GetTensorData(weight_tensor); - tflite::tensor_utils::MatrixScalarMultiplyAccumulate(weight, zero_point, - row, col, *output); - } - return kTfLiteOk; +LSTMBuffers CMSIS_NN_CreateLSTMBuffers(TfLiteContext* context, + const int* buffer_indices) { + LSTMBuffers buffers; + buffers.buffer0 = reinterpret_cast( + context->GetScratchBuffer(context, buffer_indices[0])); + buffers.buffer1 = reinterpret_cast( + context->GetScratchBuffer(context, buffer_indices[1])); + buffers.buffer2 = reinterpret_cast( + context->GetScratchBuffer(context, buffer_indices[2])); + + return buffers; } -TfLiteStatus CalculateOpData(TfLiteContext* context, TfLiteNode* node, - const LstmTensors& lstm_tensors, OpData* op_data) { - const TfLiteTensor* input = lstm_tensors.GetInternalTensor(kLstmInputTensor); - const TfLiteTensor* output_state = - lstm_tensors.GetInternalTensor(tflite::kLstmOutputStateTensor); - - TF_LITE_ENSURE(context, input->type == kTfLiteInt8); - - op_data->params_cmsis_nn.output_state_offset = - output_state->params.zero_point; - - const TfLiteTensor* input_to_forget_weights = - lstm_tensors.GetInternalTensor(kLstmInputToForgetWeightsTensor); - const TfLiteTensor* input_to_input_weights = - lstm_tensors.GetInternalTensor(kLstmInputToInputWeightsTensor); - const TfLiteTensor* input_to_output_weights = - lstm_tensors.GetInternalTensor(kLstmInputToOutputWeightsTensor); - const TfLiteTensor* input_to_cell_weights = - lstm_tensors.GetInternalTensor(kLstmInputToCellWeightsTensor); - const TfLiteTensor* forget_gate_bias = - lstm_tensors.GetInternalTensor(kLstmForgetGateBiasTensor); - const TfLiteTensor* cell_state = - lstm_tensors.GetInternalTensor(kLstmCellStateTensor); - - const TfLiteTensor* cell_gate_bias = - lstm_tensors.GetInternalTensor(kLstmCellGateBiasTensor); - const TfLiteTensor* output_gate_bias = - lstm_tensors.GetInternalTensor(kLstmOutputGateBiasTensor); - const TfLiteTensor* input_gate_bias = - lstm_tensors.GetInternalTensor(kLstmInputGateBiasTensor); - const TfLiteTensor* recurrent_to_forget_weights = - lstm_tensors.GetInternalTensor(kLstmRecurrentToForgetWeightsTensor); - const TfLiteTensor* recurrent_to_cell_weights = - lstm_tensors.GetInternalTensor(kLstmRecurrentToCellWeightsTensor); - const TfLiteTensor* recurrent_to_output_weights = - lstm_tensors.GetInternalTensor(kLstmRecurrentToOutputWeightsTensor); - const TfLiteTensor* recurrent_to_input_weights = - lstm_tensors.GetInternalTensor(kLstmRecurrentToInputWeightsTensor); - const TfLiteTensor* cell_to_output_weights = - lstm_tensors.GetInternalTensor(kLstmCellToOutputWeightsTensor); - const TfLiteTensor* forget_layer_norm_coefficients = - lstm_tensors.GetInternalTensor(kLstmForgetLayerNormCoefficientsTensor); - const TfLiteTensor* projection_weights = - lstm_tensors.GetInternalTensor(kLstmProjectionWeightsTensor); - - const bool use_layer_norm = (forget_layer_norm_coefficients != nullptr); - const bool use_peephole = (cell_to_output_weights != nullptr); - const bool use_projection = (projection_weights != nullptr); - const bool use_cifg = (input_to_input_weights == nullptr); - const bool lstm_unsupported_config = - use_layer_norm || use_peephole || use_projection || use_cifg; - TFLITE_DCHECK(!lstm_unsupported_config); - - // Pre-calculate bias + zero_point * weight. - int32_t* input_to_forget_effective_bias = nullptr; - int32_t* recurrent_to_forget_effective_bias = nullptr; - int32_t* input_to_cell_effective_bias = nullptr; - int32_t* recurrent_to_cell_effective_bias = nullptr; - int32_t* input_to_output_effective_bias = nullptr; - int32_t* recurrent_to_output_effective_bias = nullptr; - int32_t* input_to_input_effective_bias = nullptr; - int32_t* recurrent_to_input_effective_bias = nullptr; - - const int32_t output_state_zero_point = - -op_data->params_cmsis_nn.output_state_offset; - const int32_t input_zero_point = -input->params.zero_point; - - TF_LITE_ENSURE_OK(context, - PrecomputeZeroPointTimesWeightWithBias( - context, input_zero_point, input_to_forget_weights, - forget_gate_bias, &input_to_forget_effective_bias)); - - TF_LITE_ENSURE_OK(context, PrecomputeZeroPointTimesWeightWithBias( - context, output_state_zero_point, - recurrent_to_forget_weights, nullptr, - &recurrent_to_forget_effective_bias)); - - // Modulation gate. - TF_LITE_ENSURE_OK(context, - PrecomputeZeroPointTimesWeightWithBias( - context, input_zero_point, input_to_cell_weights, - cell_gate_bias, &input_to_cell_effective_bias)); - TF_LITE_ENSURE_OK( - context, PrecomputeZeroPointTimesWeightWithBias( - context, output_state_zero_point, recurrent_to_cell_weights, - nullptr, &recurrent_to_cell_effective_bias)); - - // Output gate. - TF_LITE_ENSURE_OK(context, - PrecomputeZeroPointTimesWeightWithBias( - context, input_zero_point, input_to_output_weights, - output_gate_bias, &input_to_output_effective_bias)); - - TF_LITE_ENSURE_OK(context, PrecomputeZeroPointTimesWeightWithBias( - context, output_state_zero_point, - recurrent_to_output_weights, nullptr, - &recurrent_to_output_effective_bias)); - - // Input gate. The calculation is only meaningful for non-cifg case. - TF_LITE_ENSURE_OK(context, - PrecomputeZeroPointTimesWeightWithBias( - context, input_zero_point, input_to_input_weights, - input_gate_bias, &input_to_input_effective_bias)); - TF_LITE_ENSURE_OK( - context, PrecomputeZeroPointTimesWeightWithBias( - context, output_state_zero_point, recurrent_to_input_weights, - nullptr, &recurrent_to_input_effective_bias)); - - op_data->params_cmsis_nn.i2f_effective_bias = input_to_forget_effective_bias; - op_data->params_cmsis_nn.r2f_effective_bias = - recurrent_to_forget_effective_bias; - op_data->params_cmsis_nn.i2c_effective_bias = input_to_cell_effective_bias; - op_data->params_cmsis_nn.r2c_effective_bias = - recurrent_to_cell_effective_bias; - op_data->params_cmsis_nn.i2o_effective_bias = input_to_output_effective_bias; - op_data->params_cmsis_nn.r2o_effective_bias = - recurrent_to_output_effective_bias; - op_data->params_cmsis_nn.i2i_effective_bias = input_to_input_effective_bias; - op_data->params_cmsis_nn.r2i_effective_bias = - recurrent_to_input_effective_bias; - - // Get intermediate scales and zero points. - float intermediate_scale[5]; - int32_t intermediate_zp[5]; - for (int i = 0; i < 4; ++i) { - // Q3.12 for activation functions. - intermediate_scale[i] = std::pow(2.0f, -12.0f); - intermediate_zp[i] = 0; - } +void CMSIS_NN_VectorSum(int32_t* kernel_sum, const int32_t size1, + const int32_t size2, const int8_t* weights, + const int32_t offset, const int32_t* biases) { + arm_vector_sum_s8(kernel_sum, size1, size2, weights, offset, biases); +} - MicroContext* micro_context = GetMicroContext(context); - // In the absence of projection, hidden becomes otuput and this intermediate - // is ignored. - TfLiteTensor* hidden = micro_context->AllocateTempIntermediateTensor(node, 4); - TF_LITE_ENSURE(context, hidden->quantization.type != kTfLiteNoQuantization); - auto* hidden_params = - static_cast(hidden->quantization.params); - intermediate_scale[4] = hidden_params->scale->data[0]; - intermediate_zp[4] = hidden_params->zero_point->data[0]; - if (hidden != nullptr) { - micro_context->DeallocateTempTfLiteTensor(hidden); - } +void CMSIS_NN_VectorSum(int64_t* kernel_sum, const int32_t size1, + const int32_t size2, const int8_t* weights, + const int32_t offset, const int64_t* biases) { + arm_vector_sum_s8_s64(kernel_sum, size1, size2, weights, offset, biases); +} - // Scales. - const float default_scale = 1.0; - float input_scale = default_scale; - float input_to_input_weight_scale = default_scale; - float recurrent_to_input_weight_scale = default_scale; - float input_to_forget_weight_scale = default_scale; - float recurrent_to_forget_weight_scale = default_scale; - float input_to_cell_weight_scale = default_scale; - float recurrent_to_cell_weight_scale = default_scale; - float input_to_output_weight_scale = default_scale; - float recurrent_to_output_weight_scale = default_scale; - float output_state_scale = default_scale; - int cell_scale = 1; - - // Effective scales. - float effective_input_to_input_scale = default_scale; - float effective_recurrent_to_input_scale = default_scale; - float effective_cell_to_input_scale = default_scale; - float effective_input_to_forget_scale = default_scale; - float effective_recurrent_to_forget_scale = default_scale; - float effective_cell_to_forget_scale = default_scale; - float effective_input_to_cell_scale = default_scale; - float effective_recurrent_to_cell_scale = default_scale; - float effective_input_to_output_scale = default_scale; - float effective_recurrent_to_output_scale = default_scale; - float effective_cell_to_output_scale = default_scale; - float effective_hidden_scale = default_scale; - - // Populate scales. - input_to_input_weight_scale = input_to_input_weights->params.scale; - recurrent_to_input_weight_scale = recurrent_to_input_weights->params.scale; - - output_state_scale = output_state->params.scale; - - input_to_forget_weight_scale = input_to_forget_weights->params.scale; - input_to_cell_weight_scale = input_to_cell_weights->params.scale; - input_to_output_weight_scale = input_to_output_weights->params.scale; - recurrent_to_forget_weight_scale = recurrent_to_forget_weights->params.scale; - recurrent_to_cell_weight_scale = recurrent_to_cell_weights->params.scale; - recurrent_to_output_weight_scale = recurrent_to_output_weights->params.scale; - - // Check cell state (already used above) - TF_LITE_ENSURE(context, CheckedLog2(cell_state->params.scale, &cell_scale)); - TF_LITE_ENSURE(context, cell_scale <= -9); - - op_data->params_cmsis_nn.cell_state_shift = cell_scale; - input_scale = input->params.scale; - - // Calculate effective scales. - effective_input_to_input_scale = - input_to_input_weight_scale * input_scale / intermediate_scale[0]; - effective_recurrent_to_input_scale = recurrent_to_input_weight_scale * - output_state_scale / - intermediate_scale[0]; - - effective_input_to_forget_scale = - input_to_forget_weight_scale * input_scale / intermediate_scale[1]; - effective_recurrent_to_forget_scale = recurrent_to_forget_weight_scale * - output_state_scale / - intermediate_scale[1]; - - effective_input_to_cell_scale = - input_to_cell_weight_scale * input_scale / intermediate_scale[2]; - effective_recurrent_to_cell_scale = recurrent_to_cell_weight_scale * - output_state_scale / - intermediate_scale[2]; - - effective_input_to_output_scale = - input_to_output_weight_scale * input_scale / intermediate_scale[3]; - effective_recurrent_to_output_scale = recurrent_to_output_weight_scale * - output_state_scale / - intermediate_scale[3]; - - effective_hidden_scale = - std::pow(2.0f, -15.0f) / intermediate_scale[4] * std::pow(2.0f, -15.0f); - - // Decompose scales. - int shift_output; - QuantizeMultiplier( - static_cast(effective_input_to_input_scale), - &op_data->params_cmsis_nn.input_to_input_scaling.multiplier, - &shift_output); - op_data->params_cmsis_nn.input_to_input_scaling.shift = - static_cast(shift_output); - - QuantizeMultiplier( - static_cast(effective_recurrent_to_input_scale), - &op_data->params_cmsis_nn.recurrent_to_input_scaling.multiplier, - &shift_output); - op_data->params_cmsis_nn.recurrent_to_input_scaling.shift = - static_cast(shift_output); - QuantizeMultiplier(static_cast(effective_cell_to_input_scale), - &op_data->params_cmsis_nn.cell_to_input_scaling.multiplier, - &shift_output); - op_data->params_cmsis_nn.cell_to_input_scaling.shift = - static_cast(shift_output); - QuantizeMultiplier( - static_cast(effective_input_to_forget_scale), - &op_data->params_cmsis_nn.input_to_forget_scaling.multiplier, - &shift_output); - op_data->params_cmsis_nn.input_to_forget_scaling.shift = - static_cast(shift_output); - QuantizeMultiplier( - static_cast(effective_recurrent_to_forget_scale), - &op_data->params_cmsis_nn.recurrent_to_forget_scaling.multiplier, - &shift_output); - op_data->params_cmsis_nn.recurrent_to_forget_scaling.shift = - static_cast(shift_output); - QuantizeMultiplier( - static_cast(effective_cell_to_forget_scale), - &op_data->params_cmsis_nn.cell_to_forget_scaling.multiplier, - &shift_output); - // ok - op_data->params_cmsis_nn.cell_to_forget_scaling.shift = - static_cast(shift_output); - QuantizeMultiplier(static_cast(effective_input_to_cell_scale), - &op_data->params_cmsis_nn.input_to_cell_scaling.multiplier, - &shift_output); - op_data->params_cmsis_nn.input_to_cell_scaling.shift = - static_cast(shift_output); - QuantizeMultiplier( - static_cast(effective_recurrent_to_cell_scale), - &op_data->params_cmsis_nn.recurrent_to_cell_scaling.multiplier, - &shift_output); - op_data->params_cmsis_nn.recurrent_to_cell_scaling.shift = - static_cast(shift_output); - QuantizeMultiplier( - static_cast(effective_input_to_output_scale), - &op_data->params_cmsis_nn.input_to_output_scaling.multiplier, - &shift_output); - op_data->params_cmsis_nn.input_to_output_scaling.shift = - static_cast(shift_output); - QuantizeMultiplier( - static_cast(effective_recurrent_to_output_scale), - &op_data->params_cmsis_nn.recurrent_to_output_scaling.multiplier, - &shift_output); - op_data->params_cmsis_nn.recurrent_to_output_scaling.shift = - static_cast(shift_output); - QuantizeMultiplier( - static_cast(effective_cell_to_output_scale), - &op_data->params_cmsis_nn.cell_to_output_scaling.multiplier, - &shift_output); - op_data->params_cmsis_nn.cell_to_output_scaling.shift = - static_cast(shift_output); - - op_data->params_cmsis_nn.projection_scaling.shift = - static_cast(shift_output); - - QuantizeMultiplier(static_cast(effective_hidden_scale), - &op_data->params_cmsis_nn.hidden_scaling.multiplier, - &shift_output); - op_data->params_cmsis_nn.hidden_scaling.shift = - static_cast(shift_output); - - op_data->params_cmsis_nn.hidden_offset = intermediate_zp[4]; - - op_data->params_cmsis_nn.activation.min = std::numeric_limits::min(); - op_data->params_cmsis_nn.activation.max = std::numeric_limits::max(); +template +TfLiteStatus CMSIS_NN_PortOpData(TfLiteContext* context, OpDataLSTM* params_ref, + const LSTMKernelContents& kernel_content, + cmsis_nn_lstm_params* params_cmsis_nn) { + // Unwrap pointers + const BiasType* input_gate_bias = + tflite::micro::GetOptionalTensorData( + kernel_content.GetInternalTensor(tflite::kLstmInputGateBiasTensor)); + const BiasType* forget_gate_bias = + tflite::micro::GetOptionalTensorData( + kernel_content.GetInternalTensor(tflite::kLstmForgetGateBiasTensor)); + const BiasType* cell_gate_bias = + tflite::micro::GetOptionalTensorData( + kernel_content.GetInternalTensor(tflite::kLstmCellGateBiasTensor)); + const BiasType* output_gate_bias = + tflite::micro::GetOptionalTensorData( + kernel_content.GetInternalTensor(tflite::kLstmOutputGateBiasTensor)); + + const int8_t* input_to_input_weights = + tflite::micro::GetOptionalTensorData( + kernel_content.GetInternalTensor( + tflite::kLstmInputToInputWeightsTensor)); + const int8_t* input_to_forget_weights = + tflite::micro::GetOptionalTensorData( + kernel_content.GetInternalTensor( + tflite::kLstmInputToForgetWeightsTensor)); + const int8_t* input_to_cell_weights = + tflite::micro::GetOptionalTensorData( + kernel_content.GetInternalTensor( + tflite::kLstmInputToCellWeightsTensor)); + const int8_t* input_to_output_weights = + tflite::micro::GetOptionalTensorData( + kernel_content.GetInternalTensor( + tflite::kLstmInputToOutputWeightsTensor)); + + const int8_t* recurrent_to_input_weights = + tflite::micro::GetOptionalTensorData( + kernel_content.GetInternalTensor( + tflite::kLstmRecurrentToInputWeightsTensor)); + const int8_t* recurrent_to_forget_weights = + tflite::micro::GetOptionalTensorData( + kernel_content.GetInternalTensor( + tflite::kLstmRecurrentToForgetWeightsTensor)); + const int8_t* recurrent_to_cell_weights = + tflite::micro::GetOptionalTensorData( + kernel_content.GetInternalTensor( + tflite::kLstmRecurrentToCellWeightsTensor)); + const int8_t* recurrent_to_output_weights = + tflite::micro::GetOptionalTensorData( + kernel_content.GetInternalTensor( + tflite::kLstmRecurrentToOutputWeightsTensor)); + + int32_t size_data = params_ref->size_info.input_dimension; + int32_t size_hidden = params_ref->size_info.state_dimension; + + BiasType* input_data_kernel_sum{ + static_cast(context->AllocatePersistentBuffer( + context, size_hidden * sizeof(BiasType)))}; + BiasType* forget_data_kernel_sum{ + static_cast(context->AllocatePersistentBuffer( + context, size_hidden * sizeof(BiasType)))}; + BiasType* cell_data_kernel_sum{ + static_cast(context->AllocatePersistentBuffer( + context, size_hidden * sizeof(BiasType)))}; + BiasType* output_data_kernel_sum{ + static_cast(context->AllocatePersistentBuffer( + context, size_hidden * sizeof(BiasType)))}; + + BiasType* input_hidden_kernel_sum{ + static_cast(context->AllocatePersistentBuffer( + context, size_hidden * sizeof(BiasType)))}; + BiasType* forget_hidden_kernel_sum{ + static_cast(context->AllocatePersistentBuffer( + context, size_hidden * sizeof(BiasType)))}; + BiasType* cell_hidden_kernel_sum = { + static_cast(context->AllocatePersistentBuffer( + context, size_hidden * sizeof(BiasType)))}; + BiasType* output_hidden_kernel_sum = { + static_cast(context->AllocatePersistentBuffer( + context, size_hidden * sizeof(BiasType)))}; + + // Compute effective biases + CMSIS_NN_VectorSum( + input_data_kernel_sum, size_data, size_hidden, input_to_input_weights, + params_ref->input_gate_parameters.input_fc_params.input_offset, + input_gate_bias); + + CMSIS_NN_VectorSum( + forget_data_kernel_sum, size_data, size_hidden, input_to_forget_weights, + params_ref->forget_gate_parameters.input_fc_params.input_offset, + forget_gate_bias); + + CMSIS_NN_VectorSum( + cell_data_kernel_sum, size_data, size_hidden, input_to_cell_weights, + params_ref->cell_gate_parameters.input_fc_params.input_offset, + cell_gate_bias); + + CMSIS_NN_VectorSum( + output_data_kernel_sum, size_data, size_hidden, input_to_output_weights, + params_ref->output_gate_parameters.input_fc_params.input_offset, + output_gate_bias); + + CMSIS_NN_VectorSum( + input_hidden_kernel_sum, size_hidden, size_hidden, + recurrent_to_input_weights, + -params_ref->inter_gate_parameters.output_mul_params.output_offset, + nullptr); + + CMSIS_NN_VectorSum( + forget_hidden_kernel_sum, size_hidden, size_hidden, + recurrent_to_forget_weights, + -params_ref->inter_gate_parameters.output_mul_params.output_offset, + nullptr); + + CMSIS_NN_VectorSum( + cell_hidden_kernel_sum, size_hidden, size_hidden, + recurrent_to_cell_weights, + -params_ref->inter_gate_parameters.output_mul_params.output_offset, + nullptr); + + CMSIS_NN_VectorSum( + output_hidden_kernel_sum, size_hidden, size_hidden, + recurrent_to_output_weights, + -params_ref->inter_gate_parameters.output_mul_params.output_offset, + nullptr); + + // Create input gate parameters + cmsis_nn_lstm_gate gate_input{ + params_ref->input_gate_parameters.input_fc_params.output_multiplier, + params_ref->input_gate_parameters.input_fc_params.output_shift, + input_to_input_weights, + input_data_kernel_sum, + params_ref->input_gate_parameters.recurrent_fc_params.output_multiplier, + params_ref->input_gate_parameters.recurrent_fc_params.output_shift, + recurrent_to_input_weights, + input_hidden_kernel_sum, + input_gate_bias, + ARM_SIGMOID}; + + // Create forget gate parameters + cmsis_nn_lstm_gate gate_forget{ + params_ref->forget_gate_parameters.input_fc_params.output_multiplier, + params_ref->forget_gate_parameters.input_fc_params.output_shift, + input_to_forget_weights, + forget_data_kernel_sum, + params_ref->forget_gate_parameters.recurrent_fc_params.output_multiplier, + params_ref->forget_gate_parameters.recurrent_fc_params.output_shift, + recurrent_to_forget_weights, + forget_hidden_kernel_sum, + forget_gate_bias, + ARM_SIGMOID}; + + auto cell_gate_nonlinear_type = + (params_ref->cell_gate_nonlinear_type == kTfLiteActTanh) ? ARM_TANH + : ARM_SIGMOID; + // Create cell gate parameters + cmsis_nn_lstm_gate gate_cell{ + params_ref->cell_gate_parameters.input_fc_params.output_multiplier, + params_ref->cell_gate_parameters.input_fc_params.output_shift, + input_to_cell_weights, + cell_data_kernel_sum, + params_ref->cell_gate_parameters.recurrent_fc_params.output_multiplier, + params_ref->cell_gate_parameters.recurrent_fc_params.output_shift, + recurrent_to_cell_weights, + cell_hidden_kernel_sum, + cell_gate_bias, + cell_gate_nonlinear_type}; + + // Create output gate parameters + cmsis_nn_lstm_gate gate_output{ + params_ref->output_gate_parameters.input_fc_params.output_multiplier, + params_ref->output_gate_parameters.input_fc_params.output_shift, + input_to_output_weights, + output_data_kernel_sum, + params_ref->output_gate_parameters.recurrent_fc_params.output_multiplier, + params_ref->output_gate_parameters.recurrent_fc_params.output_shift, + recurrent_to_output_weights, + output_hidden_kernel_sum, + output_gate_bias, + ARM_SIGMOID}; + + // Create the complete lstm data struct + *params_cmsis_nn = { + params_ref->size_info.time_major, + params_ref->size_info.batch_size, + params_ref->size_info.time_steps, + params_ref->size_info.input_dimension, + params_ref->size_info.state_dimension, + params_ref->forget_gate_parameters.input_fc_params.input_offset, + params_ref->inter_gate_parameters.forget_cell_mul_params + .output_multiplier, + params_ref->inter_gate_parameters.forget_cell_mul_params.output_shift, + params_ref->inter_gate_parameters.input_mul_params.output_multiplier, + params_ref->inter_gate_parameters.input_mul_params.output_shift, + params_ref->cell_state_info.quantized_cell_clip, + params_ref->cell_state_info.cell_state_scale_power, + params_ref->inter_gate_parameters.output_mul_params.output_multiplier, + params_ref->inter_gate_parameters.output_mul_params.output_shift, + params_ref->inter_gate_parameters.output_mul_params.output_offset, + gate_forget, + gate_input, + gate_cell, + gate_output}; return kTfLiteOk; } -template TfLiteStatus CMSIS_NN_EvalInteger8x8_16Lstm( const OpData& op_data, const LSTMKernelContents& kernel_content, - const LSTMBuffers& buffers) { - const OpDataLSTM& op_data_lstm = op_data.params_ref; - const TfLiteEvalTensor* input = - kernel_content.GetInternalTensor(tflite::kLstmInputTensor); - const TfLiteEvalTensor* input_gate_bias = - kernel_content.GetInternalTensor(tflite::kLstmInputGateBiasTensor); - const TfLiteEvalTensor* forget_gate_bias = - kernel_content.GetInternalTensor(tflite::kLstmForgetGateBiasTensor); - const TfLiteEvalTensor* cell_gate_bias = - kernel_content.GetInternalTensor(tflite::kLstmCellGateBiasTensor); - const TfLiteEvalTensor* output_gate_bias = - kernel_content.GetInternalTensor(tflite::kLstmOutputGateBiasTensor); - const TfLiteEvalTensor* input_to_output_weights = - kernel_content.GetInternalTensor(tflite::kLstmInputToOutputWeightsTensor); - const TfLiteEvalTensor* recurrent_to_output_weights = - kernel_content.GetInternalTensor( - tflite::kLstmRecurrentToOutputWeightsTensor); - const TfLiteEvalTensor* input_to_input_weights = - kernel_content.GetInternalTensor(tflite::kLstmInputToInputWeightsTensor); - const TfLiteEvalTensor* input_to_forget_weights = - kernel_content.GetInternalTensor(tflite::kLstmInputToForgetWeightsTensor); - const TfLiteEvalTensor* input_to_cell_weights = - kernel_content.GetInternalTensor(tflite::kLstmInputToCellWeightsTensor); - const TfLiteEvalTensor* recurrent_to_input_weights = - kernel_content.GetInternalTensor( - tflite::kLstmRecurrentToInputWeightsTensor); - const TfLiteEvalTensor* recurrent_to_forget_weights = - kernel_content.GetInternalTensor( - tflite::kLstmRecurrentToForgetWeightsTensor); - const TfLiteEvalTensor* recurrent_to_cell_weights = - kernel_content.GetInternalTensor( - tflite::kLstmRecurrentToCellWeightsTensor); - const TfLiteEvalTensor* cell_to_input_weights = - kernel_content.GetInternalTensor(tflite::kLstmCellToInputWeightsTensor); - const TfLiteEvalTensor* cell_to_forget_weights = - kernel_content.GetInternalTensor(tflite::kLstmCellToForgetWeightsTensor); - const TfLiteEvalTensor* cell_to_output_weights = - kernel_content.GetInternalTensor(tflite::kLstmCellToOutputWeightsTensor); - const TfLiteEvalTensor* cell_state = - kernel_content.GetInternalTensor(tflite::kLstmCellStateTensor); - const TfLiteEvalTensor* output_state = - kernel_content.GetInternalTensor(tflite::kLstmOutputStateTensor); - const TfLiteEvalTensor* output = kernel_content.output_tensor; - - TFLITE_DCHECK(input->dims->size >= 2 && input->dims->size <= 3); - - cmsis_nn_lstm_context scratch_buffers; - scratch_buffers.input_gate = reinterpret_cast(buffers.buffer0); - scratch_buffers.forget_gate = reinterpret_cast(buffers.buffer1); - scratch_buffers.cell_gate = reinterpret_cast(buffers.buffer2); - scratch_buffers.output_gate = reinterpret_cast(buffers.buffer3); - - cmsis_nn_lstm_params cmsis_lstm_params = op_data.params_cmsis_nn; - cmsis_lstm_params.time_major = op_data_lstm.size_info.time_major; - cmsis_lstm_params.clip.cell = - op_data_lstm.cell_state_info.quantized_cell_clip; - - cmsis_lstm_params.input_gate_bias = const_cast( - tflite::micro::GetOptionalTensorData(input_gate_bias)); - cmsis_lstm_params.forget_gate_bias = const_cast( - tflite::micro::GetOptionalTensorData(forget_gate_bias)); - cmsis_lstm_params.cell_gate_bias = const_cast( - tflite::micro::GetOptionalTensorData(cell_gate_bias)); - cmsis_lstm_params.output_gate_bias = const_cast( - tflite::micro::GetOptionalTensorData(output_gate_bias)); - - const bool time_major = op_data_lstm.size_info.time_major; - const int n_input = input->dims->data[input->dims->size - 1]; - const int n_output = recurrent_to_output_weights->dims->data[1]; - - int max_time, n_batch; - if (input->dims->size == 2) { - max_time = 1; - n_batch = input->dims->data[0]; - } else { - max_time = (time_major) ? input->dims->data[0] : input->dims->data[1]; - n_batch = (time_major) ? input->dims->data[1] : input->dims->data[0]; - } + const LSTMBuffers& buffers) { + TFLITE_DCHECK( + kernel_content.GetInternalTensor(tflite::kLstmInputTensor)->dims->size >= + 2 && + kernel_content.GetInternalTensor(tflite::kLstmInputTensor)->dims->size <= + 3); + + const int8_t* input = tflite::micro::GetOptionalTensorData( + kernel_content.GetInternalTensor(tflite::kLstmInputTensor)); + int8_t* output = + tflite::micro::GetTensorData(kernel_content.output_tensor); + + // Create lstm buffer struct + cmsis_nn_lstm_context cmsis_buffers; + cmsis_buffers.temp1 = reinterpret_cast(buffers.buffer0); + cmsis_buffers.temp2 = reinterpret_cast(buffers.buffer1); + cmsis_buffers.cell_state = reinterpret_cast(buffers.buffer2); + + arm_lstm_unidirectional_s8(input, output, &op_data.params_cmsis_nn, + &cmsis_buffers); + + return kTfLiteOk; +} - cmsis_nn_lstm_dims lstm_dims; - lstm_dims.num_inputs = n_input; - lstm_dims.num_outputs = n_output; - lstm_dims.num_batches = n_batch; - lstm_dims.max_time = max_time; - - arm_lstm_unidirectional_s16_s8( - &scratch_buffers, - const_cast(tflite::micro::GetTensorData(input)), - &lstm_dims, - const_cast( - tflite::micro::GetOptionalTensorData(input_to_input_weights)), - const_cast(tflite::micro::GetOptionalTensorData( - input_to_forget_weights)), - const_cast( - tflite::micro::GetOptionalTensorData(input_to_cell_weights)), - const_cast(tflite::micro::GetOptionalTensorData( - input_to_output_weights)), - const_cast(tflite::micro::GetOptionalTensorData( - recurrent_to_input_weights)), - const_cast(tflite::micro::GetOptionalTensorData( - recurrent_to_forget_weights)), - const_cast(tflite::micro::GetOptionalTensorData( - recurrent_to_cell_weights)), - const_cast(tflite::micro::GetOptionalTensorData( - recurrent_to_output_weights)), - const_cast( - tflite::micro::GetOptionalTensorData(cell_to_input_weights)), - const_cast(tflite::micro::GetOptionalTensorData( - cell_to_forget_weights)), - const_cast(tflite::micro::GetOptionalTensorData( - cell_to_output_weights)), - nullptr, &cmsis_lstm_params, - const_cast(tflite::micro::GetTensorData(output_state)), - const_cast(tflite::micro::GetTensorData(cell_state)), - const_cast(tflite::micro::GetTensorData(output))); +TfLiteStatus CMSIS_NN_EvalInteger16x8_16Lstm( + const OpData& op_data, const LSTMKernelContents& kernel_content, + const LSTMBuffers& buffers) { + TFLITE_DCHECK( + kernel_content.GetInternalTensor(tflite::kLstmInputTensor)->dims->size >= + 2 && + kernel_content.GetInternalTensor(tflite::kLstmInputTensor)->dims->size <= + 3); + + const int16_t* input = tflite::micro::GetOptionalTensorData( + kernel_content.GetInternalTensor(tflite::kLstmInputTensor)); + int16_t* output = + tflite::micro::GetTensorData(kernel_content.output_tensor); + + // Create lstm buffer struct + cmsis_nn_lstm_context cmsis_buffers; + cmsis_buffers.temp1 = reinterpret_cast(buffers.buffer0); + cmsis_buffers.temp2 = reinterpret_cast(buffers.buffer1); + cmsis_buffers.cell_state = reinterpret_cast(buffers.buffer2); + + arm_lstm_unidirectional_s16(input, output, &op_data.params_cmsis_nn, + &cmsis_buffers); return kTfLiteOk; } /*Kernel functions*/ - void* UnidirectionalSequenceLstmInit(TfLiteContext* context, const char* buffer, size_t length) { TFLITE_DCHECK(context->AllocatePersistentBuffer != nullptr); @@ -531,15 +354,9 @@ TfLiteStatus UnidirectionalSequenceLstmPrepare(TfLiteContext* context, const TfLiteTensor* input = lstm_tensors.GetInternalTensor(kLstmInputTensor); const auto activation_type = input->type; - if (kTfLiteInt8 == activation_type) { - TF_LITE_ENSURE_STATUS( - CalculateOpData(context, node, lstm_tensors, op_data)); - } - TF_LITE_ENSURE_OK(context, ValidateTensorSize(context, lstm_tensors, op_data_lstm->size_info)); - // Create cell state information and gate parameters (Fully Connected and Mul) auto cell_state_type = lstm_tensors.GetInternalTensor(kLstmCellStateTensor)->type; if (cell_state_type == kTfLiteFloat32) { @@ -559,8 +376,24 @@ TfLiteStatus UnidirectionalSequenceLstmPrepare(TfLiteContext* context, TfLiteTypeGetName(cell_state_type), cell_state_type); return kTfLiteError; } - // request buffers (four buffers) - for (size_t i = 0; i < 4; i++) { + + size_t number_of_buffers; + if (activation_type == kTfLiteInt8 && cell_state_type == kTfLiteInt16) { + auto kernel_content = CreateLSTMKernelContent(context, node); + number_of_buffers = 3; + CMSIS_NN_PortOpData(context, op_data_lstm, kernel_content, + &op_data->params_cmsis_nn); + } else if (activation_type == kTfLiteInt16 && + cell_state_type == kTfLiteInt16) { + auto kernel_content = CreateLSTMKernelContent(context, node); + number_of_buffers = 3; + CMSIS_NN_PortOpData(context, op_data_lstm, kernel_content, + &op_data->params_cmsis_nn); + } else { + number_of_buffers = 4; + } + + for (size_t i = 0; i < number_of_buffers; i++) { TF_LITE_ENSURE_OK(context, context->RequestScratchBufferInArena( context, op_data_lstm->size_info.batch_size * @@ -598,9 +431,8 @@ TfLiteStatus UnidirectionalSequenceLstmEval(TfLiteContext* context, case kTfLiteInt8: { // 8(activation)x8(weight)->16(cell) LSTM with 32 bits bias LSTMBuffers buffers = - CreateLSTMBuffers(context, op_data_lstm.buffer_indices); - return CMSIS_NN_EvalInteger8x8_16Lstm( - op_data, kernel_content, buffers); + CMSIS_NN_CreateLSTMBuffers(context, op_data_lstm.buffer_indices); + CMSIS_NN_EvalInteger8x8_16Lstm(op_data, kernel_content, buffers); break; } default: { @@ -616,9 +448,8 @@ TfLiteStatus UnidirectionalSequenceLstmEval(TfLiteContext* context, case kTfLiteInt8: { // 16(activation)x8(weight)->16(cell) LSTM with 64 bits bias LSTMBuffers buffers = - CreateLSTMBuffers(context, op_data_lstm.buffer_indices); - EvalLstm(op_data_lstm, - kernel_content, buffers); + CMSIS_NN_CreateLSTMBuffers(context, op_data_lstm.buffer_indices); + CMSIS_NN_EvalInteger16x8_16Lstm(op_data, kernel_content, buffers); break; } default: { @@ -654,10 +485,36 @@ TfLiteStatus UnidirectionalSequenceLstmEvalInt8(TfLiteContext* context, if (activation_type == kTfLiteInt8) { LSTMBuffers buffers = - CreateLSTMBuffers(context, op_data_lstm.buffer_indices); + CMSIS_NN_CreateLSTMBuffers(context, op_data_lstm.buffer_indices); - return CMSIS_NN_EvalInteger8x8_16Lstm(op_data, kernel_content, - buffers); + return CMSIS_NN_EvalInteger8x8_16Lstm(op_data, kernel_content, buffers); + } else { + MicroPrintf("Input type %s (%d) not supported.", + TfLiteTypeGetName(activation_type), activation_type); + return kTfLiteError; + } + return kTfLiteOk; +} + +TfLiteStatus UnidirectionalSequenceLstmEvalInt16(TfLiteContext* context, + TfLiteNode* node) { + TFLITE_DCHECK(node->user_data != nullptr); + const OpData& op_data = *reinterpret_cast(node->user_data); + const OpDataLSTM& op_data_lstm = op_data.params_ref; + auto kernel_content = CreateLSTMKernelContent(context, node); + const auto activation_type = + kernel_content.internal_tensors[kLstmInputTensor]->type; + const auto weight_type = + kernel_content.internal_tensors[kLstmInputToInputWeightsTensor]->type; + + TFLITE_DCHECK(weight_type == kTfLiteInt16 && + "Only int16 filter type supported."); + + if (activation_type == kTfLiteInt16) { + LSTMBuffers buffers = + CMSIS_NN_CreateLSTMBuffers(context, op_data_lstm.buffer_indices); + + return CMSIS_NN_EvalInteger16x8_16Lstm(op_data, kernel_content, buffers); } else { MicroPrintf("Input type %s (%d) not supported.", TfLiteTypeGetName(activation_type), activation_type); @@ -680,4 +537,10 @@ TFLMRegistration Register_UNIDIRECTIONAL_SEQUENCE_LSTM_INT8() { UnidirectionalSequenceLstmEvalInt8); } +TFLMRegistration Register_UNIDIRECTIONAL_SEQUENCE_LSTM_INT16() { + return tflite::micro::RegisterOp(UnidirectionalSequenceLstmInit, + UnidirectionalSequenceLstmPrepare, + UnidirectionalSequenceLstmEvalInt16); +} + } // namespace tflite diff --git a/tensorflow/lite/micro/kernels/comparisons.cc b/tensorflow/lite/micro/kernels/comparisons.cc index 4056316f145..69b3c61c32d 100644 --- a/tensorflow/lite/micro/kernels/comparisons.cc +++ b/tensorflow/lite/micro/kernels/comparisons.cc @@ -533,7 +533,7 @@ void* Init(TfLiteContext* context, const char* buffer, size_t length) { return context->AllocatePersistentBuffer(context, sizeof(OpData)); } -TfLiteStatus Prepare(TfLiteContext* context, TfLiteNode* node) { +TfLiteStatus ComparisonsPrepare(TfLiteContext* context, TfLiteNode* node) { TFLITE_DCHECK(node->user_data != nullptr); OpData* data = static_cast(node->user_data); @@ -580,27 +580,27 @@ TfLiteStatus Prepare(TfLiteContext* context, TfLiteNode* node) { } // namespace TFLMRegistration Register_EQUAL() { - return tflite::micro::RegisterOp(Init, Prepare, EqualEval); + return tflite::micro::RegisterOp(Init, ComparisonsPrepare, EqualEval); } TFLMRegistration Register_NOT_EQUAL() { - return tflite::micro::RegisterOp(Init, Prepare, NotEqualEval); + return tflite::micro::RegisterOp(Init, ComparisonsPrepare, NotEqualEval); } TFLMRegistration Register_GREATER() { - return tflite::micro::RegisterOp(Init, Prepare, GreaterEval); + return tflite::micro::RegisterOp(Init, ComparisonsPrepare, GreaterEval); } TFLMRegistration Register_GREATER_EQUAL() { - return tflite::micro::RegisterOp(Init, Prepare, GreaterEqualEval); + return tflite::micro::RegisterOp(Init, ComparisonsPrepare, GreaterEqualEval); } TFLMRegistration Register_LESS() { - return tflite::micro::RegisterOp(Init, Prepare, LessEval); + return tflite::micro::RegisterOp(Init, ComparisonsPrepare, LessEval); } TFLMRegistration Register_LESS_EQUAL() { - return tflite::micro::RegisterOp(Init, Prepare, LessEqualEval); + return tflite::micro::RegisterOp(Init, ComparisonsPrepare, LessEqualEval); } } // namespace tflite diff --git a/tensorflow/lite/micro/kernels/concatenation.cc b/tensorflow/lite/micro/kernels/concatenation.cc index b4a838f72ac..57d63a916a1 100644 --- a/tensorflow/lite/micro/kernels/concatenation.cc +++ b/tensorflow/lite/micro/kernels/concatenation.cc @@ -103,12 +103,13 @@ void EvalUnquantized(TfLiteContext* context, TfLiteNode* node) { tflite::micro::GetTensorData(output)); } -void* Init(TfLiteContext* context, const char* buffer, size_t length) { +void* ConcatenationInit(TfLiteContext* context, const char* buffer, + size_t length) { TFLITE_DCHECK(context->AllocatePersistentBuffer != nullptr); return context->AllocatePersistentBuffer(context, sizeof(OpData)); } -TfLiteStatus Prepare(TfLiteContext* context, TfLiteNode* node) { +TfLiteStatus ConcatenationPrepare(TfLiteContext* context, TfLiteNode* node) { // This function only checks the types. Additional shape validations are // performed in the reference implementation called during Eval(). const TfLiteConcatenationParams* params = @@ -214,7 +215,7 @@ TfLiteStatus Prepare(TfLiteContext* context, TfLiteNode* node) { return kTfLiteOk; } -TfLiteStatus Eval(TfLiteContext* context, TfLiteNode* node) { +TfLiteStatus ConcatenationEval(TfLiteContext* context, TfLiteNode* node) { const TfLiteEvalTensor* output_tensor = tflite::micro::GetEvalOutput(context, node, kOutputTensor); TF_LITE_ENSURE(context, output_tensor != nullptr); @@ -252,7 +253,8 @@ TfLiteStatus Eval(TfLiteContext* context, TfLiteNode* node) { } // namespace TFLMRegistration Register_CONCATENATION() { - return tflite::micro::RegisterOp(Init, Prepare, Eval); + return tflite::micro::RegisterOp(ConcatenationInit, ConcatenationPrepare, + ConcatenationEval); } } // namespace tflite diff --git a/tensorflow/lite/micro/kernels/conv.cc b/tensorflow/lite/micro/kernels/conv.cc index 550f5b06aaf..0df35fce4eb 100644 --- a/tensorflow/lite/micro/kernels/conv.cc +++ b/tensorflow/lite/micro/kernels/conv.cc @@ -1,4 +1,4 @@ -/* Copyright 2019 The TensorFlow Authors. All Rights Reserved. +/* Copyright 2023 The TensorFlow Authors. All Rights Reserved. Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. @@ -27,12 +27,7 @@ limitations under the License. namespace tflite { namespace { -void* Init(TfLiteContext* context, const char* buffer, size_t length) { - TFLITE_DCHECK(context->AllocatePersistentBuffer != nullptr); - return context->AllocatePersistentBuffer(context, sizeof(OpDataConv)); -} - -TfLiteStatus Eval(TfLiteContext* context, TfLiteNode* node) { +TfLiteStatus ConvEval(TfLiteContext* context, TfLiteNode* node) { const TfLiteEvalTensor* input = tflite::micro::GetEvalInput(context, node, kConvInputTensor); const TfLiteEvalTensor* filter = @@ -50,14 +45,6 @@ TfLiteStatus Eval(TfLiteContext* context, TfLiteNode* node) { TFLITE_DCHECK(node->user_data != nullptr); const auto& data = *(static_cast(node->user_data)); - TF_LITE_ENSURE_EQ(context, input->type, output->type); - TF_LITE_ENSURE_MSG( - context, - input->type == filter->type || - (input->type == kTfLiteInt16 && filter->type == kTfLiteInt8) || - (input->type == kTfLiteInt8 && filter->type == kTfLiteInt4), - "Hybrid models are not supported on TFLite Micro."); - switch (input->type) { // Already know in/out types are same. case kTfLiteFloat32: { tflite::reference_ops::Conv( @@ -73,39 +60,34 @@ TfLiteStatus Eval(TfLiteContext* context, TfLiteNode* node) { break; } case kTfLiteInt16: { - switch (bias->type) { - case kTfLiteInt32: { - reference_integer_ops::ConvPerChannel( - ConvParamsQuantized(params, data), - data.per_channel_output_multiplier, data.per_channel_output_shift, - tflite::micro::GetTensorShape(input), - tflite::micro::GetTensorData(input), - tflite::micro::GetTensorShape(filter), - tflite::micro::GetTensorData(filter), - tflite::micro::GetTensorShape(bias), - tflite::micro::GetOptionalTensorData(bias), - tflite::micro::GetTensorShape(output), - tflite::micro::GetTensorData(output)); - break; - } - case kTfLiteInt64: { - reference_integer_ops::ConvPerChannel( - ConvParamsQuantized(params, data), - data.per_channel_output_multiplier, data.per_channel_output_shift, - tflite::micro::GetTensorShape(input), - tflite::micro::GetTensorData(input), - tflite::micro::GetTensorShape(filter), - tflite::micro::GetTensorData(filter), - tflite::micro::GetTensorShape(bias), - tflite::micro::GetOptionalTensorData(bias), - tflite::micro::GetTensorShape(output), - tflite::micro::GetTensorData(output)); - break; - } - default: - MicroPrintf("Bias type %s (%d) not supported.", - TfLiteTypeGetName(bias->type), bias->type); - return kTfLiteError; + if (bias == nullptr || bias->type == kTfLiteInt32) { + reference_integer_ops::ConvPerChannel( + ConvParamsQuantized(params, data), + data.per_channel_output_multiplier, data.per_channel_output_shift, + tflite::micro::GetTensorShape(input), + tflite::micro::GetTensorData(input), + tflite::micro::GetTensorShape(filter), + tflite::micro::GetTensorData(filter), + tflite::micro::GetTensorShape(bias), + tflite::micro::GetOptionalTensorData(bias), + tflite::micro::GetTensorShape(output), + tflite::micro::GetTensorData(output)); + } else if (bias->type == kTfLiteInt64) { + reference_integer_ops::ConvPerChannel( + ConvParamsQuantized(params, data), + data.per_channel_output_multiplier, data.per_channel_output_shift, + tflite::micro::GetTensorShape(input), + tflite::micro::GetTensorData(input), + tflite::micro::GetTensorShape(filter), + tflite::micro::GetTensorData(filter), + tflite::micro::GetTensorShape(bias), + tflite::micro::GetOptionalTensorData(bias), + tflite::micro::GetTensorShape(output), + tflite::micro::GetTensorData(output)); + } else { + MicroPrintf("Bias type %s (%d) not supported.", + TfLiteTypeGetName(bias->type), bias->type); + return kTfLiteError; } break; } @@ -162,7 +144,7 @@ TfLiteStatus Eval(TfLiteContext* context, TfLiteNode* node) { } // namespace TFLMRegistration Register_CONV_2D() { - return tflite::micro::RegisterOp(Init, ConvPrepare, Eval); + return tflite::micro::RegisterOp(ConvInit, ConvPrepare, ConvEval); } } // namespace tflite diff --git a/tensorflow/lite/micro/kernels/conv.h b/tensorflow/lite/micro/kernels/conv.h index 3b122ad1c43..0c8073f48f0 100644 --- a/tensorflow/lite/micro/kernels/conv.h +++ b/tensorflow/lite/micro/kernels/conv.h @@ -1,4 +1,4 @@ -/* Copyright 2022 The TensorFlow Authors. All Rights Reserved. +/* Copyright 2023 The TensorFlow Authors. All Rights Reserved. Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. @@ -74,6 +74,8 @@ TfLiteStatus CalculateOpDataConv(TfLiteContext* context, TfLiteNode* node, int out_height, const TfLiteType data_type, OpDataConv* data); +void* ConvInit(TfLiteContext* context, const char* buffer, size_t length); + TfLiteStatus ConvPrepare(TfLiteContext* context, TfLiteNode* node); // This is the most generic TFLMRegistration. The actual supported types @@ -86,14 +88,24 @@ TFLMRegistration Register_CONV_2D(); // int8 activations and int8 weights and always calls the reference // implementation. TFLMRegistration Register_CONV_2D_INT8REF(); + #else inline TFLMRegistration Register_CONV_2D_INT8REF() { return Register_CONV_2D(); } -#endif +#endif // defined(XTENSA) #if defined(CMSIS_NN) // Returns a TFLMRegistration struct for kernel variant that only supports +// int8 activations and int4 weights and uses the latency optimized +// implementations. +TFLMRegistration Register_CONV_2D_INT4(); +#else +inline TFLMRegistration Register_CONV_2D_INT4() { return Register_CONV_2D(); } +#endif // defined(CMSIS_NN) + +#if defined(CMSIS_NN) || defined(XTENSA) +// Returns a TFLMRegistration struct for kernel variant that only supports // int8 activations and int8 weights and uses the latency optimized // implementations. TFLMRegistration Register_CONV_2D_INT8(); @@ -107,7 +119,7 @@ TFLMRegistration Register_CONV_2D_INT16(); inline TFLMRegistration Register_CONV_2D_INT8() { return Register_CONV_2D(); } inline TFLMRegistration Register_CONV_2D_INT16() { return Register_CONV_2D(); } -#endif +#endif // defined(CMSIS_NN) || defined(XTENSA) } // namespace tflite diff --git a/tensorflow/lite/micro/kernels/conv_common.cc b/tensorflow/lite/micro/kernels/conv_common.cc index c548c932c27..51c7a6ff2d6 100644 --- a/tensorflow/lite/micro/kernels/conv_common.cc +++ b/tensorflow/lite/micro/kernels/conv_common.cc @@ -1,4 +1,4 @@ -/* Copyright 2021 The TensorFlow Authors. All Rights Reserved. +/* Copyright 2023 The TensorFlow Authors. All Rights Reserved. Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. @@ -71,6 +71,11 @@ ConvParams ConvParamsQuantized(const TfLiteConvParams& params, return op_params; } +void* ConvInit(TfLiteContext* context, const char* buffer, size_t length) { + TFLITE_DCHECK(context->AllocatePersistentBuffer != nullptr); + return context->AllocatePersistentBuffer(context, sizeof(OpDataConv)); +} + TfLiteStatus CalculateOpDataConv(TfLiteContext* context, TfLiteNode* node, const TfLiteConvParams& params, int width, int height, int filter_width, @@ -120,10 +125,12 @@ TfLiteStatus CalculateOpDataConv(TfLiteContext* context, TfLiteNode* node, data->filter_zero_point = filter->params.zero_point; data->output_zero_point = output->params.zero_point; + micro_context->DeallocateTempTfLiteTensor(output); micro_context->DeallocateTempTfLiteTensor(input); micro_context->DeallocateTempTfLiteTensor(filter); - micro_context->DeallocateTempTfLiteTensor(output); - micro_context->DeallocateTempTfLiteTensor(bias); + if (bias != nullptr) { + micro_context->DeallocateTempTfLiteTensor(bias); + } return kTfLiteOk; } @@ -147,6 +154,15 @@ TfLiteStatus ConvPrepare(TfLiteContext* context, TfLiteNode* node) { micro_context->AllocateTempInputTensor(node, kConvWeightsTensor); TF_LITE_ENSURE(context, filter != nullptr); + TF_LITE_ENSURE_EQ(context, input->type, output->type); + TF_LITE_ENSURE_MSG( + context, + (input->type == kTfLiteFloat32 && filter->type == kTfLiteFloat32) || + (input->type == kTfLiteInt16 && filter->type == kTfLiteInt8) || + (input->type == kTfLiteInt8 && + (filter->type == kTfLiteInt4 || filter->type == kTfLiteInt8)), + "Hybrid models are not supported on TFLite Micro."); + const int input_width = input->dims->data[2]; const int input_height = input->dims->data[1]; const int filter_width = filter->dims->data[2]; @@ -196,7 +212,6 @@ TfLiteStatus ConvPrepare(TfLiteContext* context, TfLiteNode* node) { micro_context->DeallocateTempTfLiteTensor(filter); micro_context->DeallocateTempTfLiteTensor(input); micro_context->DeallocateTempTfLiteTensor(output); - return kTfLiteOk; } } // namespace tflite diff --git a/tensorflow/lite/micro/kernels/conv_test.cc b/tensorflow/lite/micro/kernels/conv_test.cc index 98c2615dbf6..0fb9411a3f0 100644 --- a/tensorflow/lite/micro/kernels/conv_test.cc +++ b/tensorflow/lite/micro/kernels/conv_test.cc @@ -1,4 +1,4 @@ -/* Copyright 2020 The TensorFlow Authors. All Rights Reserved. +/* Copyright 2023 The TensorFlow Authors. All Rights Reserved. Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. @@ -53,6 +53,7 @@ static TfLiteConvParams common_conv_params = { kTfLiteActNone, // activation 1, // dilation_width_factor 1, // dilation_height_factor + kTfLiteNoType // quantized_bias_type }; } // namespace @@ -61,10 +62,6 @@ static TfLiteConvParams common_conv_params = { TF_LITE_MICRO_TESTS_BEGIN -#if !defined(VISION_P6) // TODO(b/270720625): disabled int8 and int4 test for -// conv for fully connected vision p6 kernels, because vision p6 conv doesn't -// work with per channel quantization - TF_LITE_MICRO_TEST(SimpleTestQuantized4bitPerChannel) { const int output_dims_count = 12; int8_t output_data[output_dims_count]; @@ -125,12 +122,6 @@ TF_LITE_MICRO_TEST(SimpleTestQuantizedPerChannel) { output_data)); } -#endif // !defined(VISION_P6) - -#if !defined(XTENSA) // TODO(b/170321206): xtensa kernels are less general than - // reference kernels and we ifdef out test cases that are - // currently known to fail. - TF_LITE_MICRO_TEST(SimpleTestFloat) { float output_data[tflite::testing::kOutputElements]; @@ -255,7 +246,6 @@ TF_LITE_MICRO_TEST(SimpleTestQuantized16x8PerChannel64bBias) { output_data)); } -#if !defined(CMSIS_NN) TF_LITE_MICRO_TEST(SimpleTestQuantized16x8PerChannel32bBias) { const int output_dims_count = 12; int16_t output_data[output_dims_count]; @@ -285,7 +275,6 @@ TF_LITE_MICRO_TEST(SimpleTestQuantized16x8PerChannel32bBias) { &tflite::testing::common_conv_params, tflite::Register_CONV_2D(), output_data)); } -#endif TF_LITE_MICRO_TEST(SimpleTestDilatedQuantizedPerChannel) { const int output_dims_count = 24; @@ -396,7 +385,6 @@ TF_LITE_MICRO_TEST(SimpleTestQuantized16x8PerChannelRelu664bBias) { &conv_params, tflite::Register_CONV_2D(), output_data)); } -#if !defined(CMSIS_NN) TF_LITE_MICRO_TEST(SimpleTestQuantized16x8PerChannelRelu632bBias) { const int output_dims_count = 12; int16_t output_data[output_dims_count]; @@ -429,13 +417,12 @@ TF_LITE_MICRO_TEST(SimpleTestQuantized16x8PerChannelRelu632bBias) { golden_data, golden_quantized, output_scale, output_zero_point, &conv_params, tflite::Register_CONV_2D(), output_data)); } -#endif TF_LITE_MICRO_TEST(Kernel1x1QuantizedPerChannel) { // conv params: // padding, stride_, activation, dilation_ - TfLiteConvParams conv_params = {kTfLitePaddingValid, 1, 1, - kTfLiteActNone, 1, 1}; + TfLiteConvParams conv_params = { + kTfLitePaddingValid, 1, 1, kTfLiteActNone, 1, 1, kTfLiteNoType}; int input_shape[] = {4, 1, 2, 2, 4}; // [len,N,H,W,C] constexpr int input_elements = @@ -487,8 +474,8 @@ TF_LITE_MICRO_TEST(Kernel1x1QuantizedPerChannel) { TF_LITE_MICRO_TEST(Kernel1x1QuantizedPerChannelRelu6) { // conv params: // padding, stride_, activation, dilation_ - TfLiteConvParams conv_params = {kTfLitePaddingValid, 1, 1, - kTfLiteActRelu6, 1, 1}; + TfLiteConvParams conv_params = { + kTfLitePaddingValid, 1, 1, kTfLiteActRelu6, 1, 1, kTfLiteNoType}; int input_shape[] = {4, 1, 2, 2, 4}; // [len,N,H,W,C] constexpr int input_elements = @@ -540,8 +527,8 @@ TF_LITE_MICRO_TEST(Kernel1x1QuantizedPerChannelRelu6) { TF_LITE_MICRO_TEST(Kernel1x1Quantized16x8PerChannelRelu6) { // conv params: // padding, stride_, activation, dilation_ - TfLiteConvParams conv_params = {kTfLitePaddingValid, 1, 1, - kTfLiteActRelu6, 1, 1}; + TfLiteConvParams conv_params = { + kTfLitePaddingValid, 1, 1, kTfLiteActRelu6, 1, 1, kTfLiteNoType}; int input_shape[] = {4, 1, 2, 2, 4}; // [len,N,H,W,C] const int input_elements = 1 * 2 * 2 * 4; @@ -673,8 +660,6 @@ TF_LITE_MICRO_TEST(BroadcastPerLayerQuantizationToPerChannelShouldMatchGolden) { tflite::Register_CONV_2D(), output_data)); } -#endif // !defined(XTENSA) - TF_LITE_MICRO_TEST(Int8Filter1x3x3x1ShouldMatchGoldenEvenInputPaddingSame) { using tflite::ElementCount; using tflite::kConvFilter1x3x3x1; diff --git a/tensorflow/lite/micro/kernels/conv_test.h b/tensorflow/lite/micro/kernels/conv_test.h index 39d3fa7a2ba..c655f043bcc 100644 --- a/tensorflow/lite/micro/kernels/conv_test.h +++ b/tensorflow/lite/micro/kernels/conv_test.h @@ -34,10 +34,6 @@ TfLiteStatus InvokeConv(TfLiteTensor* tensors, int tensors_size, int output_length, TfLiteConvParams* conv_params, TFLMRegistration registration, int8_t* output_data); -TfLiteStatus InvokeConv(TfLiteTensor* tensors, int tensors_size, - int output_length, TfLiteConvParams* conv_params, - TFLMRegistration registration, uint8_t* output_data); - TfLiteStatus ValidateConvGoldens(TfLiteTensor* tensors, int tensors_size, const float* expected_output_data, int output_length, @@ -52,13 +48,6 @@ TfLiteStatus ValidateConvGoldens(TfLiteTensor* tensors, int tensors_size, TFLMRegistration registration, int8_t* output_data, float tolerance = 1e-5); -TfLiteStatus ValidateConvGoldens(TfLiteTensor* tensors, int tensors_size, - const uint8_t* expected_output_data, - int output_length, - TfLiteConvParams* conv_params, - TFLMRegistration registration, - uint8_t* output_data, float tolerance = 1e-5); - TfLiteStatus TestConvFloat(int* input_dims_data, const float* input_data, int* filter_dims_data, const float* filter_data, int* bias_dims_data, const float* bias_data, @@ -67,15 +56,6 @@ TfLiteStatus TestConvFloat(int* input_dims_data, const float* input_data, TfLiteConvParams* conv_params, TFLMRegistration registration, float* output_data); -TfLiteStatus TestConvQuantizedPerLayer( - int* input_dims_data, const float* input_data, uint8_t* input_quantized, - float input_scale, int* filter_dims_data, const float* filter_data, - uint8_t* filter_quantized, float filter_scale, int* bias_dims_data, - const float* bias_data, int32_t* bias_quantized, int* output_dims_data, - const float* expected_output_data, uint8_t* expected_output_quantized, - float output_scale, TfLiteConvParams* conv_params, - TFLMRegistration registration, uint8_t* output_data); - TfLiteStatus TestConvQuantizedPerChannel( int* input_dims_data, const float* input_data, int8_t* input_quantized, float input_scale, int input_zero_point, int* filter_dims_data, diff --git a/tensorflow/lite/micro/kernels/conv_test_common.cc b/tensorflow/lite/micro/kernels/conv_test_common.cc index bdc9466baa0..a0f733b8e42 100644 --- a/tensorflow/lite/micro/kernels/conv_test_common.cc +++ b/tensorflow/lite/micro/kernels/conv_test_common.cc @@ -1,4 +1,4 @@ -/* Copyright 2020 The TensorFlow Authors. All Rights Reserved. +/* Copyright 2023 The TensorFlow Authors. All Rights Reserved. Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. @@ -145,8 +145,8 @@ TfLiteStatus TestConvQuantizedPerChannel( input_data, input_quantized, input_dims, input_scale, input_zero_point); TfLiteTensor filter_tensor = CreateSymmetricPerChannelQuantizedTensor( filter_data, filter_data_quantized, filter_dims, filter_scales, - filter_zero_points, &filter_quant, 0, false, - tensor_weight_type /* quantized dimension */); + filter_zero_points, &filter_quant, 0 /* quantized dimension */, false, + tensor_weight_type); TfLiteTensor bias_tensor = CreatePerChannelQuantizedBiasTensor( bias_data, bias_data_quantized, bias_dims, input_scale, &filter_scales[1], bias_scales, bias_zero_points, &bias_quant, 0 /* quantized dimension */); diff --git a/tensorflow/lite/micro/kernels/cumsum.cc b/tensorflow/lite/micro/kernels/cumsum.cc index f62f2a51fff..258cf8d208b 100644 --- a/tensorflow/lite/micro/kernels/cumsum.cc +++ b/tensorflow/lite/micro/kernels/cumsum.cc @@ -104,11 +104,11 @@ TfLiteStatus CalculateOpData(TfLiteContext* context, TfLiteNode* node) { return kTfLiteOk; } -TfLiteStatus Prepare(TfLiteContext* context, TfLiteNode* node) { +TfLiteStatus CumSumPrepare(TfLiteContext* context, TfLiteNode* node) { return CalculateOpData(context, node); } -TfLiteStatus Eval(TfLiteContext* context, TfLiteNode* node) { +TfLiteStatus CumSumEval(TfLiteContext* context, TfLiteNode* node) { const TfLiteEvalTensor* input = tflite::micro::GetEvalInput(context, node, kInputTensor); const TfLiteEvalTensor* axis_tensor = @@ -169,7 +169,7 @@ TfLiteStatus Eval(TfLiteContext* context, TfLiteNode* node) { } // namespace TFLMRegistration Register_CUMSUM() { - return tflite::micro::RegisterOp(nullptr, Prepare, Eval); + return tflite::micro::RegisterOp(nullptr, CumSumPrepare, CumSumEval); } } // namespace tflite diff --git a/tensorflow/lite/micro/kernels/depth_to_space.cc b/tensorflow/lite/micro/kernels/depth_to_space.cc index 7e0a8fa5007..d4faf7cd59a 100644 --- a/tensorflow/lite/micro/kernels/depth_to_space.cc +++ b/tensorflow/lite/micro/kernels/depth_to_space.cc @@ -93,11 +93,11 @@ TfLiteStatus CalculateOpData(TfLiteContext* context, TfLiteNode* node) { return kTfLiteOk; } -TfLiteStatus Prepare(TfLiteContext* context, TfLiteNode* node) { +TfLiteStatus DepthToSpacePrepare(TfLiteContext* context, TfLiteNode* node) { return CalculateOpData(context, node); } -TfLiteStatus Eval(TfLiteContext* context, TfLiteNode* node) { +TfLiteStatus DepthToSpaceEval(TfLiteContext* context, TfLiteNode* node) { auto* params = reinterpret_cast(node->builtin_data); @@ -136,7 +136,8 @@ TfLiteStatus Eval(TfLiteContext* context, TfLiteNode* node) { } // namespace TFLMRegistration Register_DEPTH_TO_SPACE() { - return tflite::micro::RegisterOp(nullptr, Prepare, Eval); + return tflite::micro::RegisterOp(nullptr, DepthToSpacePrepare, + DepthToSpaceEval); } } // namespace tflite diff --git a/tensorflow/lite/micro/kernels/depthwise_conv.cc b/tensorflow/lite/micro/kernels/depthwise_conv.cc index 9290c2d89fc..fa55a705606 100644 --- a/tensorflow/lite/micro/kernels/depthwise_conv.cc +++ b/tensorflow/lite/micro/kernels/depthwise_conv.cc @@ -27,12 +27,13 @@ limitations under the License. namespace tflite { namespace { -void* Init(TfLiteContext* context, const char* buffer, size_t length) { +void* DepthwiseConvInit(TfLiteContext* context, const char* buffer, + size_t length) { TFLITE_DCHECK(context->AllocatePersistentBuffer != nullptr); return context->AllocatePersistentBuffer(context, sizeof(OpDataConv)); } -TfLiteStatus Eval(TfLiteContext* context, TfLiteNode* node) { +TfLiteStatus DepthwiseConvEval(TfLiteContext* context, TfLiteNode* node) { TFLITE_DCHECK(node->user_data != nullptr); TFLITE_DCHECK(node->builtin_data != nullptr); @@ -101,8 +102,33 @@ TfLiteStatus Eval(TfLiteContext* context, TfLiteNode* node) { break; } default: - MicroPrintf("Filter type %s (%d) not supported.", - TfLiteTypeGetName(filter->type), filter->type); + MicroPrintf("Filter type %s (%d) for input type %s not supported.", + TfLiteTypeGetName(filter->type), filter->type, + TfLiteTypeGetName(input->type)); + return kTfLiteError; + } + break; + } + case kTfLiteInt16: { + switch (filter->type) { + case kTfLiteInt8: { + reference_integer_ops::DepthwiseConvPerChannel( + DepthwiseConvParamsQuantized(params, data), + data.per_channel_output_multiplier, data.per_channel_output_shift, + tflite::micro::GetTensorShape(input), + tflite::micro::GetTensorData(input), + tflite::micro::GetTensorShape(filter), + tflite::micro::GetTensorData(filter), + tflite::micro::GetTensorShape(bias), + tflite::micro::GetOptionalTensorData(bias), + tflite::micro::GetTensorShape(output), + tflite::micro::GetTensorData(output)); + break; + } + default: + MicroPrintf("Filter type %s (%d) for input type %s not supported.", + TfLiteTypeGetName(filter->type), filter->type, + TfLiteTypeGetName(input->type)); return kTfLiteError; } break; @@ -118,7 +144,8 @@ TfLiteStatus Eval(TfLiteContext* context, TfLiteNode* node) { } // namespace TFLMRegistration Register_DEPTHWISE_CONV_2D() { - return tflite::micro::RegisterOp(Init, DepthwiseConvPrepare, Eval); + return tflite::micro::RegisterOp(DepthwiseConvInit, DepthwiseConvPrepare, + DepthwiseConvEval); } } // namespace tflite diff --git a/tensorflow/lite/micro/kernels/depthwise_conv.h b/tensorflow/lite/micro/kernels/depthwise_conv.h index d8cc78db6ab..5f2d87efe28 100644 --- a/tensorflow/lite/micro/kernels/depthwise_conv.h +++ b/tensorflow/lite/micro/kernels/depthwise_conv.h @@ -1,4 +1,4 @@ -/* Copyright 2022 The TensorFlow Authors. All Rights Reserved. +/* Copyright 2023 The TensorFlow Authors. All Rights Reserved. Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. @@ -65,6 +65,11 @@ TFLMRegistration Register_DEPTHWISE_CONV_2D_INT8(); // implementations. TFLMRegistration Register_DEPTHWISE_CONV_2D_INT16(); +// Returns a TFLMRegistration struct for kernel variant that only supports +// int8 activations and int4 weights and uses the latency optimized +// implementations. +TFLMRegistration Register_DEPTHWISE_CONV_2D_INT4(); + #else inline TFLMRegistration Register_DEPTHWISE_CONV_2D_INT8() { return Register_DEPTHWISE_CONV_2D(); @@ -73,6 +78,11 @@ inline TFLMRegistration Register_DEPTHWISE_CONV_2D_INT8() { inline TFLMRegistration Register_DEPTHWISE_CONV_2D_INT16() { return Register_DEPTHWISE_CONV_2D(); } + +inline TFLMRegistration Register_DEPTHWISE_CONV_2D_INT4() { + return Register_DEPTHWISE_CONV_2D(); +} + #endif } // namespace tflite diff --git a/tensorflow/lite/micro/kernels/depthwise_conv_common.cc b/tensorflow/lite/micro/kernels/depthwise_conv_common.cc index 6d5f6c27113..52804de3315 100644 --- a/tensorflow/lite/micro/kernels/depthwise_conv_common.cc +++ b/tensorflow/lite/micro/kernels/depthwise_conv_common.cc @@ -192,7 +192,8 @@ TfLiteStatus DepthwiseConvPrepare(TfLiteContext* context, TfLiteNode* node) { context, input->type == filter->type || (input->type == kTfLiteInt8 && - (filter->type == kTfLiteInt4 || filter->type == kTfLiteInt8)), + (filter->type == kTfLiteInt4 || filter->type == kTfLiteInt8)) || + (input->type == kTfLiteInt16 && filter->type == kTfLiteInt8), "Hybrid models are not supported on TFLite Micro."); if (filter->type == kTfLiteInt4) { diff --git a/tensorflow/lite/micro/kernels/depthwise_conv_test.cc b/tensorflow/lite/micro/kernels/depthwise_conv_test.cc index c3b916ca0da..b50b40ae6d6 100644 --- a/tensorflow/lite/micro/kernels/depthwise_conv_test.cc +++ b/tensorflow/lite/micro/kernels/depthwise_conv_test.cc @@ -78,15 +78,15 @@ TfLiteStatus ValidateDepthwiseConvGoldens( return kTfLiteOk; } +template void TestDepthwiseConvQuantizedPerChannel( - int* input_dims_data, const float* input_data, int8_t* input_quantized, + int* input_dims_data, const float* input_data, T* input_quantized, float input_scale, int input_zero_point, int* filter_dims_data, const float* filter_data, int8_t* filter_data_quantized, - int* bias_dims_data, const float* bias_data, int32_t* bias_data_quantized, + int* bias_dims_data, const float* bias_data, BiasT* bias_data_quantized, int* output_dims_data, const float* expected_output_data, - int8_t* expected_output_data_quantized, int8_t* output_data, - float output_scale, int output_zero_point, - TfLiteDepthwiseConvParams* conv_params, + T* expected_output_data_quantized, T* output_data, float output_scale, + int output_zero_point, TfLiteDepthwiseConvParams* conv_params, TfLiteType filter_packed_type = kTfLiteNoType) { TfLiteIntArray* input_dims = IntArrayFromInts(input_dims_data); TfLiteIntArray* filter_dims = IntArrayFromInts(filter_dims_data); @@ -147,6 +147,42 @@ void TestDepthwiseConvQuantizedPerChannel( 1.0, tensors_size, tensors)); } +void TestDepthwiseConvQuantizedPerChannel( + int* input_dims_data, const float* input_data, int8_t* input_quantized, + float input_scale, int input_zero_point, int* filter_dims_data, + const float* filter_data, int8_t* filter_data_quantized, + int* bias_dims_data, const float* bias_data, int32_t* bias_data_quantized, + int* output_dims_data, const float* expected_output_data, + int8_t* expected_output_data_quantized, int8_t* output_data, + float output_scale, int output_zero_point, + TfLiteDepthwiseConvParams* conv_params, + TfLiteType filter_packed_type = kTfLiteNoType) { + return TestDepthwiseConvQuantizedPerChannel( + input_dims_data, input_data, input_quantized, input_scale, + input_zero_point, filter_dims_data, filter_data, filter_data_quantized, + bias_dims_data, bias_data, bias_data_quantized, output_dims_data, + expected_output_data, expected_output_data_quantized, output_data, + output_scale, output_zero_point, conv_params, filter_packed_type); +} + +void TestDepthwiseConvQuantizedPerChannel( + int* input_dims_data, const float* input_data, int16_t* input_quantized, + float input_scale, int input_zero_point, int* filter_dims_data, + const float* filter_data, int8_t* filter_data_quantized, + int* bias_dims_data, const float* bias_data, int64_t* bias_data_quantized, + int* output_dims_data, const float* expected_output_data, + int16_t* expected_output_data_quantized, int16_t* output_data, + float output_scale, int output_zero_point, + TfLiteDepthwiseConvParams* conv_params, + TfLiteType filter_packed_type = kTfLiteNoType) { + return TestDepthwiseConvQuantizedPerChannel( + input_dims_data, input_data, input_quantized, input_scale, + input_zero_point, filter_dims_data, filter_data, filter_data_quantized, + bias_dims_data, bias_data, bias_data_quantized, output_dims_data, + expected_output_data, expected_output_data_quantized, output_data, + output_scale, output_zero_point, conv_params, filter_packed_type); +} + // Xtensa kernels do not support float activations., and the corresponding tests // are disabled. As a result, helper functions that are only needed for float // kernel tests also need to be ifdef'd out to avoid build errors due to unused @@ -989,4 +1025,47 @@ TF_LITE_MICRO_TEST(SimpleTestQuantizedPerChannel) { output_scale, output_zero_point, &conv_params); } +TF_LITE_MICRO_TEST(SimpleTestQuantizedPerChannelInt16InputInt8Filter) { + const int input_elements = 12; + int input_shape[] = {4, 1, 3, 2, 2}; + const float input_values[] = {-547, 108, -682, 540, -161, -539, 9, -482, + -859, 84, 153, -726, 523, 702, -172, -936}; + const int filter_elements = 16; + int filter_shape[] = {4, 1, 2, 2, 4}; + const float filter_values[] = {1, 2, 3, 4, -9, 10, -11, 12, + 5, 6, 7, 8, 13, -14, 15, -16}; + const int bias_elements = 4; + int bias_shape[] = {4, 1, 1, 1, 4}; + const int output_elements = 8; + const float bias_values[] = {1, 2, 3, 4}; + const float golden[] = { + 4894, -9009, -16596, 10268, -2564, -7483, -6599, 4356, + }; + int output_shape[] = {4, 1, 2, 1, 4}; + const int output_dims_count = 8; + int16_t output_data[output_dims_count]; + + const float input_scale = 0.5; + const float output_scale = 1.0f; + const int input_zero_point = 0; + const int output_zero_point = 0; + + int16_t input_quantized[input_elements]; + int8_t filter_quantized[filter_elements]; + int64_t bias_quantized[bias_elements]; + int16_t golden_quantized[output_elements]; + + TfLiteDepthwiseConvParams conv_params; + conv_params.activation = kTfLiteActNone; + conv_params.dilation_width_factor = 1; + conv_params.dilation_height_factor = 1; + conv_params.stride_height = 1; + conv_params.stride_width = 1; + + tflite::testing::TestDepthwiseConvQuantizedPerChannel( + input_shape, input_values, input_quantized, input_scale, input_zero_point, + filter_shape, filter_values, filter_quantized, bias_shape, bias_values, + bias_quantized, output_shape, golden, golden_quantized, output_data, + output_scale, output_zero_point, &conv_params); +} TF_LITE_MICRO_TESTS_END diff --git a/tensorflow/lite/micro/kernels/detection_postprocess.cc b/tensorflow/lite/micro/kernels/detection_postprocess.cc index e807f35d430..fa2d4cabdb7 100644 --- a/tensorflow/lite/micro/kernels/detection_postprocess.cc +++ b/tensorflow/lite/micro/kernels/detection_postprocess.cc @@ -117,7 +117,8 @@ struct OpData { TfLiteQuantizationParams input_anchors; }; -void* Init(TfLiteContext* context, const char* buffer, size_t length) { +void* DetectionPostProcessInit(TfLiteContext* context, const char* buffer, + size_t length) { TFLITE_DCHECK(context->AllocatePersistentBuffer != nullptr); OpData* op_data = nullptr; @@ -149,7 +150,8 @@ void* Init(TfLiteContext* context, const char* buffer, size_t length) { return op_data; } -TfLiteStatus Prepare(TfLiteContext* context, TfLiteNode* node) { +TfLiteStatus DetectionPostProcessPrepare(TfLiteContext* context, + TfLiteNode* node) { auto* op_data = static_cast(node->user_data); MicroContext* micro_context = GetMicroContext(context); @@ -774,7 +776,8 @@ TfLiteStatus NonMaxSuppressionMultiClass(TfLiteContext* context, return kTfLiteOk; } -TfLiteStatus Eval(TfLiteContext* context, TfLiteNode* node) { +TfLiteStatus DetectionPostProcessEval(TfLiteContext* context, + TfLiteNode* node) { TF_LITE_ENSURE(context, (kBatchSize == 1)); auto* op_data = static_cast(node->user_data); @@ -800,7 +803,9 @@ TfLiteStatus Eval(TfLiteContext* context, TfLiteNode* node) { } // namespace TFLMRegistration* Register_DETECTION_POSTPROCESS() { - static TFLMRegistration r = tflite::micro::RegisterOp(Init, Prepare, Eval); + static TFLMRegistration r = tflite::micro::RegisterOp( + DetectionPostProcessInit, DetectionPostProcessPrepare, + DetectionPostProcessEval); return &r; } diff --git a/tensorflow/lite/micro/kernels/div.cc b/tensorflow/lite/micro/kernels/div.cc index cc90e224706..a80b3f2006c 100644 --- a/tensorflow/lite/micro/kernels/div.cc +++ b/tensorflow/lite/micro/kernels/div.cc @@ -1,4 +1,4 @@ -/* Copyright 2022 The TensorFlow Authors. All Rights Reserved. +/* Copyright 2023 The TensorFlow Authors. All Rights Reserved. Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. @@ -65,12 +65,12 @@ TfLiteStatus CalculateOpDataDiv(TfLiteContext* context, TfLiteTensor* input1, return kTfLiteOk; } -void* Init(TfLiteContext* context, const char* buffer, size_t length) { +void* DivInit(TfLiteContext* context, const char* buffer, size_t length) { TFLITE_DCHECK(context->AllocatePersistentBuffer != nullptr); return context->AllocatePersistentBuffer(context, sizeof(OpDataDiv)); } -TfLiteStatus Prepare(TfLiteContext* context, TfLiteNode* node) { +TfLiteStatus DivPrepare(TfLiteContext* context, TfLiteNode* node) { TFLITE_DCHECK(node->user_data != nullptr); TFLITE_DCHECK(node->builtin_data != nullptr); @@ -91,12 +91,21 @@ TfLiteStatus Prepare(TfLiteContext* context, TfLiteNode* node) { TF_LITE_ENSURE_STATUS( CalculateOpDataDiv(context, input1, input2, output, params, data)); + if (output->type == kTfLiteInt32) { + // Only support int32 unquantized DIV for now. + TF_LITE_ENSURE_EQ(context, input1->quantization.type, + kTfLiteNoQuantization); + TF_LITE_ENSURE_EQ(context, input2->quantization.type, + kTfLiteNoQuantization); + } + micro_context->DeallocateTempTfLiteTensor(input1); micro_context->DeallocateTempTfLiteTensor(input2); micro_context->DeallocateTempTfLiteTensor(output); return kTfLiteOk; } +template void EvalDiv(TfLiteContext* context, TfLiteNode* node, TfLiteDivParams* params, const OpDataDiv* data, const TfLiteEvalTensor* input1, const TfLiteEvalTensor* input2, TfLiteEvalTensor* output) { @@ -120,9 +129,9 @@ void EvalDiv(TfLiteContext* context, TfLiteNode* node, TfLiteDivParams* params, tflite::micro::GetTensorShape(input2), &op_params); if (requires_broadcast) { - TF_LITE_DIV(reference_ops, BroadcastDivSlow, float); + TF_LITE_DIV(reference_ops, BroadcastDivSlow, T); } else { - TF_LITE_DIV(reference_ops, Div, float); + TF_LITE_DIV(reference_ops, Div, T); } #undef TF_LITE_DIV } @@ -170,7 +179,7 @@ TfLiteStatus EvalQuantized(TfLiteContext* context, TfLiteNode* node, return kTfLiteOk; } -TfLiteStatus Eval(TfLiteContext* context, TfLiteNode* node) { +TfLiteStatus DivEval(TfLiteContext* context, TfLiteNode* node) { TFLITE_DCHECK(node->builtin_data != nullptr); auto* params = static_cast(node->builtin_data); TFLITE_DCHECK(node->user_data != nullptr); @@ -184,13 +193,15 @@ TfLiteStatus Eval(TfLiteContext* context, TfLiteNode* node) { tflite::micro::GetEvalOutput(context, node, kOutputTensor); if (output->type == kTfLiteFloat32) { - EvalDiv(context, node, params, data, input1, input2, output); + EvalDiv(context, node, params, data, input1, input2, output); + } else if (output->type == kTfLiteInt32) { + EvalDiv(context, node, params, data, input1, input2, output); } else if (output->type == kTfLiteInt8) { TF_LITE_ENSURE_OK(context, EvalQuantized(context, node, params, data, input1, input2, output)); } else { MicroPrintf( - "DIV only supports FLOAT32, quantized INT8 " + "DIV only supports FLOAT32, INT32, quantized INT8 " "now, got type %s (%d).", TfLiteTypeGetName(output->type), output->type); return kTfLiteError; @@ -202,7 +213,7 @@ TfLiteStatus Eval(TfLiteContext* context, TfLiteNode* node) { } // namespace TFLMRegistration Register_DIV() { - return tflite::micro::RegisterOp(Init, Prepare, Eval); + return tflite::micro::RegisterOp(DivInit, DivPrepare, DivEval); } } // namespace tflite diff --git a/tensorflow/lite/micro/kernels/div_test.cc b/tensorflow/lite/micro/kernels/div_test.cc index e0202557bb2..ef35d0c0676 100644 --- a/tensorflow/lite/micro/kernels/div_test.cc +++ b/tensorflow/lite/micro/kernels/div_test.cc @@ -1,4 +1,4 @@ -/* Copyright 2020 The TensorFlow Authors. All Rights Reserved. +/* Copyright 2023 The TensorFlow Authors. All Rights Reserved. Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. @@ -374,4 +374,65 @@ TF_LITE_MICRO_TEST(QuantizedDivOpTestBroadcast) { kTfLiteActNone, ¶ms); } +TF_LITE_MICRO_TEST(IntegerDivOpTestNoActivation) { + int kDims[] = {4, 1, 2, 2, 1}; + constexpr int32_t kInput1[] = {-2, 2, -15, 8}; + constexpr int32_t kInput2[] = {5, -2, -3, 5}; + constexpr int32_t kExpect[] = {0, -1, 5, 1}; + constexpr int kOutputCount = std::extent::value; + int32_t output_data[kOutputCount]; + + tflite::testing::TestDiv(kDims, kInput1, kDims, kInput2, kDims, kExpect, + output_data, kTfLiteActNone); +} + +TF_LITE_MICRO_TEST(IntegerDivOpTestActivationRELU_N1_TO_1) { + int kDims[] = {4, 1, 2, 2, 1}; + constexpr int32_t kInput1[] = {-2, 2, -12, 8}; + constexpr int32_t kInput2[] = {1, 2, -15, 5}; + constexpr int32_t kExpect[] = {-1, 1, 0, 1}; + constexpr int kOutputCount = std::extent::value; + int32_t output_data[kOutputCount]; + + tflite::testing::TestDiv(kDims, kInput1, kDims, kInput2, kDims, kExpect, + output_data, kTfLiteActReluN1To1); +} + +TF_LITE_MICRO_TEST(IntegerDivOpTestVariousInputShapes) { + int kShape1[] = {1, 6}; + int kShape2[] = {2, 2, 3}; + int kShape3[] = {3, 2, 1, 3}; + int kShape4[] = {4, 1, 3, 1, 2}; + int* kDims[] = {kShape1, kShape2, kShape3, kShape4}; + constexpr int kDimsCount = std::extent::value; + + constexpr int32_t kInput1[] = {-20, 2, 3, 8, 11, -20}; + constexpr int32_t kInput2[] = {1, 2, 6, 5, -11, -1}; + constexpr int32_t kExpect[] = {-20, 1, 0, 1, -1, 20}; + constexpr int kOutputCount = std::extent::value; + int32_t output_data[kOutputCount]; + + tflite::testing::TestDivMultiShape(kDims, kDimsCount, kInput1, kInput2, + kExpect, output_data, kTfLiteActNone); +} + +TF_LITE_MICRO_TEST(IntegerDivOpTestWithBroadcast) { + int kShape1[] = {1, 8}; + int kShape2[] = {2, 2, 4}; + int kShape3[] = {3, 2, 1, 4}; + int kShape4[] = {4, 1, 4, 1, 2}; + int kShape5[] = {5, 1, 2, 1, 2, 2}; + int* kDims[] = {kShape1, kShape2, kShape3, kShape4, kShape5}; + constexpr int kDimsCount = std::extent::value; + + constexpr int32_t kInput1[] = {-20, 21, 7, 8, 11, -123, -42, -48}; + constexpr int32_t kInput2[] = {3}; + constexpr int32_t kExpect[] = {-6, 7, 2, 2, 3, -41, -14, -16}; + constexpr int kOutputCount = std::extent::value; + int32_t output_data[kOutputCount]; + + tflite::testing::TestDivMultiBroadcast(kDims, kDimsCount, kInput1, kInput2, + kExpect, output_data, kTfLiteActNone); +} + TF_LITE_MICRO_TESTS_END diff --git a/tensorflow/lite/micro/kernels/embedding_lookup.cc b/tensorflow/lite/micro/kernels/embedding_lookup.cc index 77ac0e0c8e4..6a4be877dca 100644 --- a/tensorflow/lite/micro/kernels/embedding_lookup.cc +++ b/tensorflow/lite/micro/kernels/embedding_lookup.cc @@ -65,7 +65,7 @@ TfLiteStatus CalculateOpData(TfLiteContext* context, TfLiteNode* node, return kTfLiteOk; } -TfLiteStatus Prepare(TfLiteContext* context, TfLiteNode* node) { +TfLiteStatus EmbeddingLookUpPrepare(TfLiteContext* context, TfLiteNode* node) { TF_LITE_ENSURE_EQ(context, NumInputs(node), 2); TF_LITE_ENSURE_EQ(context, NumOutputs(node), 1); @@ -178,7 +178,7 @@ TfLiteStatus EvalHybrid(const OpData& op_data, const TfLiteEvalTensor* lookup, return kTfLiteOk; } -TfLiteStatus Eval(TfLiteContext* context, TfLiteNode* node) { +TfLiteStatus EmbeddingLookUpEval(TfLiteContext* context, TfLiteNode* node) { const TfLiteEvalTensor* lookup = tflite::micro::GetEvalInput(context, node, kInputTensor_0); const TfLiteEvalTensor* value = @@ -207,7 +207,8 @@ TfLiteStatus Eval(TfLiteContext* context, TfLiteNode* node) { } // namespace TFLMRegistration Register_EMBEDDING_LOOKUP() { - return tflite::micro::RegisterOp(nullptr, Prepare, Eval); + return tflite::micro::RegisterOp(nullptr, EmbeddingLookUpPrepare, + EmbeddingLookUpEval); } } // namespace tflite diff --git a/tensorflow/lite/micro/kernels/exp.cc b/tensorflow/lite/micro/kernels/exp.cc index 1a2e00cafc7..8d1da8faf00 100644 --- a/tensorflow/lite/micro/kernels/exp.cc +++ b/tensorflow/lite/micro/kernels/exp.cc @@ -27,7 +27,7 @@ namespace { constexpr int kInputTensor = 0; constexpr int kOutputTensor = 0; -TfLiteStatus Prepare(TfLiteContext* context, TfLiteNode* node) { +TfLiteStatus ExpPrepare(TfLiteContext* context, TfLiteNode* node) { MicroContext* micro_context = GetMicroContext(context); TF_LITE_ENSURE_EQ(context, NumInputs(node), 1); @@ -51,7 +51,7 @@ TfLiteStatus Prepare(TfLiteContext* context, TfLiteNode* node) { return kTfLiteOk; } -TfLiteStatus Eval(TfLiteContext* context, TfLiteNode* node) { +TfLiteStatus ExpEval(TfLiteContext* context, TfLiteNode* node) { const TfLiteEvalTensor* input = tflite::micro::GetEvalInput(context, node, kInputTensor); TfLiteEvalTensor* output = @@ -73,7 +73,7 @@ TfLiteStatus Eval(TfLiteContext* context, TfLiteNode* node) { } // namespace TFLMRegistration Register_EXP() { - return tflite::micro::RegisterOp(nullptr, Prepare, Eval); + return tflite::micro::RegisterOp(nullptr, ExpPrepare, ExpEval); } } // namespace tflite diff --git a/tensorflow/lite/micro/kernels/expand_dims.cc b/tensorflow/lite/micro/kernels/expand_dims.cc index 0c4c6ff3c5b..d47b42cbe0c 100644 --- a/tensorflow/lite/micro/kernels/expand_dims.cc +++ b/tensorflow/lite/micro/kernels/expand_dims.cc @@ -13,6 +13,8 @@ See the License for the specific language governing permissions and limitations under the License. ==============================================================================*/ +#include + #include "tensorflow/lite/c/common.h" #include "tensorflow/lite/kernels/internal/tensor_ctypes.h" #include "tensorflow/lite/kernels/kernel_util.h" @@ -82,7 +84,7 @@ TfLiteStatus VerifyTensorDim(TfLiteContext* context, const TfLiteTensor* input, return kTfLiteOk; } -TfLiteStatus Prepare(TfLiteContext* context, TfLiteNode* node) { +TfLiteStatus ExpandDimsPrepare(TfLiteContext* context, TfLiteNode* node) { MicroContext* micro_context = GetMicroContext(context); TF_LITE_ENSURE_EQ(context, NumInputs(node), 2); @@ -116,7 +118,7 @@ void memCopyN(T* out, const T* in, const int num_elements) { } } -TfLiteStatus Eval(TfLiteContext* context, TfLiteNode* node) { +TfLiteStatus ExpandDimsEval(TfLiteContext* context, TfLiteNode* node) { const TfLiteEvalTensor* input = tflite::micro::GetEvalInput(context, node, kInputTensor); TfLiteEvalTensor* output = @@ -128,13 +130,18 @@ TfLiteStatus Eval(TfLiteContext* context, TfLiteNode* node) { memCopyN(tflite::micro::GetTensorData(output), tflite::micro::GetTensorData(input), flat_size); } break; + case kTfLiteInt16: { + memCopyN(tflite::micro::GetTensorData(output), + tflite::micro::GetTensorData(input), flat_size); + } break; case kTfLiteInt8: { memCopyN(tflite::micro::GetTensorData(output), tflite::micro::GetTensorData(input), flat_size); } break; default: MicroPrintf( - "Expand_Dims only currently supports int8 and float32, got %d.", + "Expand_Dims only currently supports int8, int16 and float32, got " + "%d.", input->type); return kTfLiteError; } @@ -143,7 +150,7 @@ TfLiteStatus Eval(TfLiteContext* context, TfLiteNode* node) { } // namespace TFLMRegistration Register_EXPAND_DIMS() { - return tflite::micro::RegisterOp(nullptr, Prepare, Eval); + return tflite::micro::RegisterOp(nullptr, ExpandDimsPrepare, ExpandDimsEval); } } // namespace tflite diff --git a/tensorflow/lite/micro/kernels/expand_dims_test.cc b/tensorflow/lite/micro/kernels/expand_dims_test.cc index d8e217e588b..39a83b57471 100644 --- a/tensorflow/lite/micro/kernels/expand_dims_test.cc +++ b/tensorflow/lite/micro/kernels/expand_dims_test.cc @@ -13,6 +13,8 @@ See the License for the specific language governing permissions and limitations under the License. ==============================================================================*/ +#include + #include "tensorflow/lite/c/builtin_op_data.h" #include "tensorflow/lite/c/common.h" #include "tensorflow/lite/micro/kernels/kernel_runner.h" @@ -138,6 +140,20 @@ TF_LITE_MICRO_TEST(ExpandDimsPositiveAxisTest2) { golden_data, output_data); } +TF_LITE_MICRO_TEST(ExpandDimsPositiveAxisTest3) { + int16_t output_data[6]; + int input_dims[] = {3, 3, 1, 2}; + const int16_t input_data[] = {-1, 1, 2, -2, 0, 3}; + const int16_t golden_data[] = {-1, 1, 2, -2, 0, 3}; + int axis_dims[] = {1, 1}; + const int32_t axis_data[] = {3}; + int golden_dims[] = {1, 3, 1, 2}; + int output_dims[] = {4, 3, 1, 2, 1}; + tflite::testing::TestExpandDims(input_dims, input_data, axis_dims, + axis_data, golden_dims, output_dims, + golden_data, output_data); +} + TF_LITE_MICRO_TEST(ExpandDimsNegativeAxisTest4) { int8_t output_data[6]; int input_dims[] = {3, 3, 1, 2}; diff --git a/tensorflow/lite/micro/kernels/fill.cc b/tensorflow/lite/micro/kernels/fill.cc index b1b366eb128..1486fcb8ee7 100644 --- a/tensorflow/lite/micro/kernels/fill.cc +++ b/tensorflow/lite/micro/kernels/fill.cc @@ -64,7 +64,7 @@ constexpr int kDimsTensor = 0; constexpr int kValueTensor = 1; constexpr int kOutputTensor = 0; -TfLiteStatus Prepare(TfLiteContext* context, TfLiteNode* node) { +TfLiteStatus FillPrepare(TfLiteContext* context, TfLiteNode* node) { MicroContext* micro_context = GetMicroContext(context); // Ensure inputs and outputs exist. @@ -107,7 +107,7 @@ void FillImpl(const TfLiteEvalTensor* value, TfLiteEvalTensor* output) { micro::GetTensorShape(output), micro::GetTensorData(output)); } -TfLiteStatus Eval(TfLiteContext* context, TfLiteNode* node) { +TfLiteStatus FillEval(TfLiteContext* context, TfLiteNode* node) { const TfLiteEvalTensor* value = micro::GetEvalInput(context, node, kValueTensor); TfLiteEvalTensor* output = micro::GetEvalOutput(context, node, kOutputTensor); @@ -134,7 +134,7 @@ TfLiteStatus Eval(TfLiteContext* context, TfLiteNode* node) { } // namespace TFLMRegistration Register_FILL() { - return tflite::micro::RegisterOp(nullptr, Prepare, Eval); + return tflite::micro::RegisterOp(nullptr, FillPrepare, FillEval); } } // namespace tflite diff --git a/tensorflow/lite/micro/kernels/floor.cc b/tensorflow/lite/micro/kernels/floor.cc index 094c8b55c32..f92b7e02db9 100644 --- a/tensorflow/lite/micro/kernels/floor.cc +++ b/tensorflow/lite/micro/kernels/floor.cc @@ -26,7 +26,7 @@ namespace { constexpr int kInputTensor = 0; constexpr int kOutputTensor = 0; -TfLiteStatus Eval(TfLiteContext* context, TfLiteNode* node) { +TfLiteStatus FloorEval(TfLiteContext* context, TfLiteNode* node) { const TfLiteEvalTensor* input = tflite::micro::GetEvalInput(context, node, kInputTensor); TF_LITE_ENSURE_TYPES_EQ(context, input->type, kTfLiteFloat32); @@ -42,7 +42,7 @@ TfLiteStatus Eval(TfLiteContext* context, TfLiteNode* node) { } // namespace TFLMRegistration Register_FLOOR() { - return tflite::micro::RegisterOp(nullptr, nullptr, Eval); + return tflite::micro::RegisterOp(nullptr, nullptr, FloorEval); } } // namespace tflite diff --git a/tensorflow/lite/micro/kernels/floor_div.cc b/tensorflow/lite/micro/kernels/floor_div.cc index 5c008085319..9adf61423c5 100644 --- a/tensorflow/lite/micro/kernels/floor_div.cc +++ b/tensorflow/lite/micro/kernels/floor_div.cc @@ -57,11 +57,11 @@ TfLiteStatus CalculateOpData(TfLiteContext* context, TfLiteNode* node) { return kTfLiteOk; } -void* Init(TfLiteContext* context, const char* buffer, size_t length) { +void* FloorDivInit(TfLiteContext* context, const char* buffer, size_t length) { return nullptr; } -TfLiteStatus Prepare(TfLiteContext* context, TfLiteNode* node) { +TfLiteStatus FloorDivPrepare(TfLiteContext* context, TfLiteNode* node) { return CalculateOpData(context, node); } @@ -101,7 +101,7 @@ TfLiteStatus EvalFloorDiv(TfLiteContext* context, return kTfLiteOk; } -TfLiteStatus Eval(TfLiteContext* context, TfLiteNode* node) { +TfLiteStatus FloorDivEval(TfLiteContext* context, TfLiteNode* node) { const TfLiteEvalTensor* input1 = tflite::micro::GetEvalInput(context, node, kInputTensor1); const TfLiteEvalTensor* input2 = @@ -124,7 +124,7 @@ TfLiteStatus Eval(TfLiteContext* context, TfLiteNode* node) { } // namespace TFLMRegistration Register_FLOOR_DIV() { - return tflite::micro::RegisterOp(Init, Prepare, Eval); + return tflite::micro::RegisterOp(FloorDivInit, FloorDivPrepare, FloorDivEval); } } // namespace tflite diff --git a/tensorflow/lite/micro/kernels/floor_mod.cc b/tensorflow/lite/micro/kernels/floor_mod.cc index f45989206fc..da2a7c92817 100644 --- a/tensorflow/lite/micro/kernels/floor_mod.cc +++ b/tensorflow/lite/micro/kernels/floor_mod.cc @@ -62,11 +62,11 @@ TfLiteStatus CalculateOpData(TfLiteContext* context, TfLiteNode* node) { return kTfLiteOk; } -void* Init(TfLiteContext* context, const char* buffer, size_t length) { +void* FloorModInit(TfLiteContext* context, const char* buffer, size_t length) { return nullptr; } -TfLiteStatus Prepare(TfLiteContext* context, TfLiteNode* node) { +TfLiteStatus FloorModPrepare(TfLiteContext* context, TfLiteNode* node) { return CalculateOpData(context, node); } @@ -96,7 +96,7 @@ TfLiteStatus EvalFloorMod(TfLiteContext* context, bool requires_broadcast, return kTfLiteOk; } -TfLiteStatus Eval(TfLiteContext* context, TfLiteNode* node) { +TfLiteStatus FloorModEval(TfLiteContext* context, TfLiteNode* node) { const TfLiteEvalTensor* input1 = tflite::micro::GetEvalInput(context, node, kInputTensor1); const TfLiteEvalTensor* input2 = @@ -122,7 +122,7 @@ TfLiteStatus Eval(TfLiteContext* context, TfLiteNode* node) { } // namespace TFLMRegistration Register_FLOOR_MOD() { - return tflite::micro::RegisterOp(Init, Prepare, Eval); + return tflite::micro::RegisterOp(FloorModInit, FloorModPrepare, FloorModEval); } } // namespace tflite diff --git a/tensorflow/lite/micro/kernels/fully_connected.cc b/tensorflow/lite/micro/kernels/fully_connected.cc index f732b2935a0..65c83792e87 100644 --- a/tensorflow/lite/micro/kernels/fully_connected.cc +++ b/tensorflow/lite/micro/kernels/fully_connected.cc @@ -26,13 +26,14 @@ limitations under the License. namespace tflite { namespace { -void* Init(TfLiteContext* context, const char* buffer, size_t length) { +void* FullyConnectedInit(TfLiteContext* context, const char* buffer, + size_t length) { TFLITE_DCHECK(context->AllocatePersistentBuffer != nullptr); return context->AllocatePersistentBuffer(context, sizeof(OpDataFullyConnected)); } -TfLiteStatus Prepare(TfLiteContext* context, TfLiteNode* node) { +TfLiteStatus FullyConnectedPrepare(TfLiteContext* context, TfLiteNode* node) { MicroContext* micro_context = GetMicroContext(context); TFLITE_DCHECK(node->user_data != nullptr); @@ -87,7 +88,7 @@ TfLiteStatus Prepare(TfLiteContext* context, TfLiteNode* node) { return kTfLiteOk; } -TfLiteStatus Eval(TfLiteContext* context, TfLiteNode* node) { +TfLiteStatus FullyConnectedEval(TfLiteContext* context, TfLiteNode* node) { TFLITE_DCHECK(node->builtin_data != nullptr); const auto* params = static_cast(node->builtin_data); @@ -200,7 +201,12 @@ TfLiteStatus Eval(TfLiteContext* context, TfLiteNode* node) { } // namespace TFLMRegistration Register_FULLY_CONNECTED() { - return tflite::micro::RegisterOp(Init, Prepare, Eval); + return tflite::micro::RegisterOp(FullyConnectedInit, FullyConnectedPrepare, + FullyConnectedEval); +} + +TFLMInferenceRegistration RegisterInference_FULLY_CONNECTED() { + return tflite::micro::RegisterOp(FullyConnectedEval); } } // namespace tflite diff --git a/tensorflow/lite/micro/kernels/fully_connected.h b/tensorflow/lite/micro/kernels/fully_connected.h index 3fa6060c74a..8308838ec6d 100644 --- a/tensorflow/lite/micro/kernels/fully_connected.h +++ b/tensorflow/lite/micro/kernels/fully_connected.h @@ -1,4 +1,4 @@ -/* Copyright 2022 The TensorFlow Authors. All Rights Reserved. +/* Copyright 2023 The TensorFlow Authors. All Rights Reserved. Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. @@ -95,6 +95,10 @@ inline TFLMRegistration Register_FULLY_CONNECTED_INT8() { // int16. TFLMRegistration Register_FULLY_CONNECTED_INT16(); +// Returns a TFLMRegistration struct for kernel variant that only supports +// int8 and int4 packed kernels. +TFLMRegistration Register_FULLY_CONNECTED_INT4(); + #else // Note that while this block gets used for both reference and optimized kernels // that do not have any specialized implementations, the only goal here is to @@ -105,6 +109,10 @@ inline TFLMRegistration Register_FULLY_CONNECTED_INT16() { return Register_FULLY_CONNECTED(); } +inline TFLMRegistration Register_FULLY_CONNECTED_INT4() { + return Register_FULLY_CONNECTED(); +} + #endif } // namespace tflite diff --git a/tensorflow/lite/micro/kernels/fully_connected_common.cc b/tensorflow/lite/micro/kernels/fully_connected_common.cc index 5a8d312defe..66f8787b4af 100644 --- a/tensorflow/lite/micro/kernels/fully_connected_common.cc +++ b/tensorflow/lite/micro/kernels/fully_connected_common.cc @@ -57,6 +57,24 @@ TfLiteStatus CalculateOpDataFullyConnected( TfLiteType data_type, const TfLiteTensor* input, const TfLiteTensor* filter, const TfLiteTensor* bias, TfLiteTensor* output, OpDataFullyConnected* data) { + // TODO(b/324385802): Support per-channel quantization for FullyConnected. + // If you have hit this failure message, you will need to disable this + // behavior. This can be done by setting the following flag to true: + // TfLiteConverter._experimental_disable_per_channel_quantization_for_dense_layers + // https://github.com/tensorflow/tensorflow/blob/377f47694fa790e98db6665b9adecde00b5e0d68/tensorflow/lite/python/lite.py#L674 + if (filter->quantization.type == kTfLiteAffineQuantization && + filter->quantization.params != nullptr) { + TfLiteAffineQuantization* affine_quantization = + reinterpret_cast( + filter->quantization.params); + TF_LITE_ENSURE(context, affine_quantization->scale); + TF_LITE_ENSURE_MSG( + context, affine_quantization->scale->size == 1, + "FullyConnected per-channel quantization not yet supported. Please set " + "converter._experimental_disable_per_channel_quantization_for_dense_" + "layers = True."); + } + if (data_type != kTfLiteFloat32) { double real_multiplier = 0.0; TF_LITE_ENSURE_STATUS(GetQuantizedConvolutionMultipler( diff --git a/tensorflow/lite/micro/kernels/fully_connected_test.cc b/tensorflow/lite/micro/kernels/fully_connected_test.cc index 2e9206a20f0..2ad132055b8 100644 --- a/tensorflow/lite/micro/kernels/fully_connected_test.cc +++ b/tensorflow/lite/micro/kernels/fully_connected_test.cc @@ -247,7 +247,8 @@ TfLiteStatus ValidateFullyConnectedGoldens( const TfLiteFusedActivation activation, const float tolerance, const int output_len, const T* golden, T* output_data) { TfLiteFullyConnectedParams builtin_data = { - activation, kTfLiteFullyConnectedWeightsFormatDefault, false, false}; + activation, kTfLiteFullyConnectedWeightsFormatDefault, false, false, + kTfLiteNoType}; // Avoid variable length array warning. constexpr int inputs_array_len = 4; diff --git a/tensorflow/lite/micro/kernels/gather.cc b/tensorflow/lite/micro/kernels/gather.cc index 99556011301..a0af4c0edda 100644 --- a/tensorflow/lite/micro/kernels/gather.cc +++ b/tensorflow/lite/micro/kernels/gather.cc @@ -97,7 +97,7 @@ TfLiteStatus Gather(const TfLiteGatherParams* params, return kTfLiteOk; } -TfLiteStatus Prepare(TfLiteContext* context, TfLiteNode* node) { +TfLiteStatus GatherPrepare(TfLiteContext* context, TfLiteNode* node) { MicroContext* micro_context = GetMicroContext(context); TF_LITE_ENSURE_EQ(context, NumInputs(node), 2); @@ -188,7 +188,7 @@ TfLiteStatus Prepare(TfLiteContext* context, TfLiteNode* node) { return kTfLiteOk; } -TfLiteStatus Eval(TfLiteContext* context, TfLiteNode* node) { +TfLiteStatus GatherEval(TfLiteContext* context, TfLiteNode* node) { const auto* params = reinterpret_cast(node->builtin_data); const TfLiteEvalTensor* input = @@ -218,7 +218,7 @@ TfLiteStatus Eval(TfLiteContext* context, TfLiteNode* node) { } // namespace TFLMRegistration Register_GATHER() { - return tflite::micro::RegisterOp(nullptr, Prepare, Eval); + return tflite::micro::RegisterOp(nullptr, GatherPrepare, GatherEval); } } // namespace tflite diff --git a/tensorflow/lite/micro/kernels/gather_nd.cc b/tensorflow/lite/micro/kernels/gather_nd.cc index 3774dddb581..d01af7c6784 100644 --- a/tensorflow/lite/micro/kernels/gather_nd.cc +++ b/tensorflow/lite/micro/kernels/gather_nd.cc @@ -28,7 +28,7 @@ constexpr int kIndices = 1; constexpr int kOutputTensor = 0; constexpr int MAX_INDICES_ND = 5; -TfLiteStatus Prepare(TfLiteContext* context, TfLiteNode* node) { +TfLiteStatus GatherNdPrepare(TfLiteContext* context, TfLiteNode* node) { MicroContext* micro_context = GetMicroContext(context); TF_LITE_ENSURE_EQ(context, NumInputs(node), 2); @@ -185,7 +185,7 @@ TfLiteStatus EvalGatherNd(TfLiteContext* context, return status; } -TfLiteStatus Eval(TfLiteContext* context, TfLiteNode* node) { +TfLiteStatus GatherNdEval(TfLiteContext* context, TfLiteNode* node) { const TfLiteEvalTensor* params = tflite::micro::GetEvalInput(context, node, kParams); const TfLiteEvalTensor* indices = @@ -206,7 +206,7 @@ TfLiteStatus Eval(TfLiteContext* context, TfLiteNode* node) { } // namespace TFLMRegistration Register_GATHER_ND() { - return tflite::micro::RegisterOp(nullptr, Prepare, Eval); + return tflite::micro::RegisterOp(nullptr, GatherNdPrepare, GatherNdEval); } } // namespace tflite diff --git a/tensorflow/lite/micro/kernels/if.cc b/tensorflow/lite/micro/kernels/if.cc index 9143c9c6d28..029846b2115 100644 --- a/tensorflow/lite/micro/kernels/if.cc +++ b/tensorflow/lite/micro/kernels/if.cc @@ -38,12 +38,12 @@ struct OpData { int else_subgraph_index; }; -void* Init(TfLiteContext* context, const char* buffer, size_t length) { +void* IfInit(TfLiteContext* context, const char* buffer, size_t length) { TFLITE_DCHECK(context->AllocatePersistentBuffer != nullptr); return context->AllocatePersistentBuffer(context, sizeof(OpData)); } -TfLiteStatus Prepare(TfLiteContext* context, TfLiteNode* node) { +TfLiteStatus IfPrepare(TfLiteContext* context, TfLiteNode* node) { OpData* op_data = reinterpret_cast(node->user_data); const auto* params = reinterpret_cast(node->builtin_data); @@ -67,7 +67,7 @@ TfLiteStatus Prepare(TfLiteContext* context, TfLiteNode* node) { // passed to the branch subgraphs. Therefore, the number of subgraph inputs // will be the number of node inputs - 1. size_t num_inputs = node->inputs->size - 1; - size_t num_outputs = node->outputs->size; + size_t num_outputs = NumOutputs(node); MicroGraph& graph_info = micro_context->graph(); @@ -85,7 +85,7 @@ TfLiteStatus Prepare(TfLiteContext* context, TfLiteNode* node) { return kTfLiteOk; } -TfLiteStatus Eval(TfLiteContext* context, TfLiteNode* node) { +TfLiteStatus IfEval(TfLiteContext* context, TfLiteNode* node) { const OpData* op_data = reinterpret_cast(node->user_data); tflite::MicroContext* micro_context = tflite::GetMicroContext(context); @@ -117,7 +117,7 @@ TfLiteStatus Eval(TfLiteContext* context, TfLiteNode* node) { } // namespace. TFLMRegistration Register_IF() { - return tflite::micro::RegisterOp(Init, Prepare, Eval); + return tflite::micro::RegisterOp(IfInit, IfPrepare, IfEval); } } // namespace tflite diff --git a/tensorflow/lite/micro/kernels/kernel_runner.cc b/tensorflow/lite/micro/kernels/kernel_runner.cc index d5112a1ec69..602778d7c50 100644 --- a/tensorflow/lite/micro/kernels/kernel_runner.cc +++ b/tensorflow/lite/micro/kernels/kernel_runner.cc @@ -37,7 +37,8 @@ void ClearBufferApi(TfLiteContext* context_) { KernelRunner::KernelRunner(const TFLMRegistration& registration, TfLiteTensor* tensors, int tensors_size, TfLiteIntArray* inputs, TfLiteIntArray* outputs, - void* builtin_data, TfLiteIntArray* intermediates) + const void* builtin_data, + TfLiteIntArray* intermediates) : registration_(registration), allocator_(SingleArenaBufferAllocator::Create(kKernelRunnerBuffer_, kKernelRunnerBufferSize_)), @@ -57,7 +58,7 @@ KernelRunner::KernelRunner(const TFLMRegistration& registration, // Prepare TfLiteNode: node_.inputs = inputs; node_.outputs = outputs; - node_.builtin_data = builtin_data; + node_.builtin_data = const_cast(builtin_data); node_.intermediates = intermediates; } diff --git a/tensorflow/lite/micro/kernels/kernel_runner.h b/tensorflow/lite/micro/kernels/kernel_runner.h index d617c449b25..25b97c11302 100644 --- a/tensorflow/lite/micro/kernels/kernel_runner.h +++ b/tensorflow/lite/micro/kernels/kernel_runner.h @@ -35,7 +35,7 @@ class KernelRunner { public: KernelRunner(const TFLMRegistration& registration, TfLiteTensor* tensors, int tensors_size, TfLiteIntArray* inputs, - TfLiteIntArray* outputs, void* builtin_data, + TfLiteIntArray* outputs, const void* builtin_data, TfLiteIntArray* intermediates = nullptr); // Calls init and prepare on the kernel (i.e. TFLMRegistration) struct. diff --git a/tensorflow/lite/micro/kernels/kernel_util.cc b/tensorflow/lite/micro/kernels/kernel_util.cc index ffffa084d51..a509f5d6fd8 100644 --- a/tensorflow/lite/micro/kernels/kernel_util.cc +++ b/tensorflow/lite/micro/kernels/kernel_util.cc @@ -53,6 +53,15 @@ TFLMRegistration RegisterOp( /*custom_name=*/nullptr}; } +TFLMInferenceRegistration RegisterOp( + TfLiteStatus (*invoke)(TfLiteContext* context, TfLiteNode* node), + void (*reset)(TfLiteContext* context, void* buffer)) { + return { + /*invoke=*/invoke, + /*reset*/ reset, + }; +} + // Returns a mutable tensor for a given input index. is_variable must be checked // during prepare when the full TfLiteTensor is available. TfLiteEvalTensor* GetMutableEvalInput(const TfLiteContext* context, @@ -243,6 +252,7 @@ TfLiteStatus CopySubgraphOutputsToOpOutputs(TfLiteContext* context, TfLiteNode* node, MicroGraph* graph_info, int subgraph_idx) { + if (graph_info->NumSubgraphOutputs(subgraph_idx) == 0) return kTfLiteOk; TF_LITE_ENSURE(context, static_cast(node->outputs->size) == graph_info->NumSubgraphOutputs(subgraph_idx)); for (int i = 0; i < node->outputs->size; i++) { diff --git a/tensorflow/lite/micro/kernels/kernel_util.h b/tensorflow/lite/micro/kernels/kernel_util.h index 080a0b3f361..f14c927133d 100644 --- a/tensorflow/lite/micro/kernels/kernel_util.h +++ b/tensorflow/lite/micro/kernels/kernel_util.h @@ -35,6 +35,10 @@ TFLMRegistration RegisterOp( void (*free)(TfLiteContext* context, void* buffer) = nullptr, void (*reset)(TfLiteContext* context, void* buffer) = nullptr); +TFLMInferenceRegistration RegisterOp( + TfLiteStatus (*invoke)(TfLiteContext* context, TfLiteNode* node), + void (*reset)(TfLiteContext* context, void* buffer) = nullptr); + // Prints out n bytes in a int8_t buffer as hex void PrintNBytes(const int8_t* tensor_data, int n_bytes, const char* prefix = nullptr); diff --git a/tensorflow/lite/micro/kernels/l2norm.cc b/tensorflow/lite/micro/kernels/l2norm.cc index fa3601bf5b1..bde38de8eb2 100644 --- a/tensorflow/lite/micro/kernels/l2norm.cc +++ b/tensorflow/lite/micro/kernels/l2norm.cc @@ -33,7 +33,7 @@ enum KernelType { constexpr int kInputTensor = 0; constexpr int kOutputTensor = 0; -TfLiteStatus Prepare(TfLiteContext* context, TfLiteNode* node) { +TfLiteStatus L2NormPrepare(TfLiteContext* context, TfLiteNode* node) { TFLITE_DCHECK(node->user_data != nullptr); TFLITE_DCHECK(node->builtin_data != nullptr); @@ -72,13 +72,13 @@ TfLiteStatus Prepare(TfLiteContext* context, TfLiteNode* node) { return kTfLiteOk; } -void* Init(TfLiteContext* context, const char* buffer, size_t length) { +void* L2NormInit(TfLiteContext* context, const char* buffer, size_t length) { TFLITE_DCHECK(context->AllocatePersistentBuffer != nullptr); return context->AllocatePersistentBuffer(context, sizeof(L2NormalizationParams)); } -TfLiteStatus Eval(TfLiteContext* context, TfLiteNode* node) { +TfLiteStatus L2NormEval(TfLiteContext* context, TfLiteNode* node) { TFLITE_DCHECK(node->user_data != nullptr); const L2NormalizationParams& data = *(static_cast(node->user_data)); @@ -132,7 +132,7 @@ TfLiteStatus Eval(TfLiteContext* context, TfLiteNode* node) { } // namespace TFLMRegistration Register_L2NORM_REF() { - return tflite::micro::RegisterOp(Init, Prepare, Eval); + return tflite::micro::RegisterOp(L2NormInit, L2NormPrepare, L2NormEval); } TFLMRegistration Register_L2_NORMALIZATION() { return Register_L2NORM_REF(); } diff --git a/tensorflow/lite/micro/kernels/lstm_eval_test.cc b/tensorflow/lite/micro/kernels/lstm_eval_test.cc index 53c0d7c4dac..eaba2c4ac2f 100644 --- a/tensorflow/lite/micro/kernels/lstm_eval_test.cc +++ b/tensorflow/lite/micro/kernels/lstm_eval_test.cc @@ -454,6 +454,6 @@ TF_LITE_MICRO_TEST(TestLSTMEvalInt16) { cell_state_tolerance, int16_node_contents); } - #endif // !defined(XTENSA) + TF_LITE_MICRO_TESTS_END diff --git a/tensorflow/lite/micro/kernels/micro_ops.h b/tensorflow/lite/micro/kernels/micro_ops.h index 9b21d87fe53..2e33a6730bd 100644 --- a/tensorflow/lite/micro/kernels/micro_ops.h +++ b/tensorflow/lite/micro/kernels/micro_ops.h @@ -15,6 +15,7 @@ limitations under the License. #ifndef TENSORFLOW_LITE_MICRO_KERNELS_MICRO_OPS_H_ #define TENSORFLOW_LITE_MICRO_KERNELS_MICRO_OPS_H_ +#include "signal/micro/kernels/irfft.h" #include "signal/micro/kernels/rfft.h" #include "tensorflow/lite/c/common.h" @@ -39,6 +40,7 @@ TFLMRegistration Register_ARG_MAX(); TFLMRegistration Register_ARG_MIN(); TFLMRegistration Register_ASSIGN_VARIABLE(); TFLMRegistration Register_AVERAGE_POOL_2D(); +TFLMRegistration Register_BATCH_MATMUL(); TFLMRegistration Register_BATCH_TO_SPACE_ND(); TFLMRegistration Register_BROADCAST_ARGS(); TFLMRegistration Register_BROADCAST_TO(); @@ -100,8 +102,10 @@ TFLMRegistration Register_READ_VARIABLE(); TFLMRegistration Register_REDUCE_MAX(); TFLMRegistration Register_RELU(); TFLMRegistration Register_RELU6(); +TFLMRegistration Register_RESHAPE(); TFLMRegistration Register_RESIZE_BILINEAR(); TFLMRegistration Register_RESIZE_NEAREST_NEIGHBOR(); +TFLMRegistration Register_ROUND(); TFLMRegistration Register_RSQRT(); TFLMRegistration Register_SELECT_V2(); TFLMRegistration Register_SHAPE(); @@ -130,17 +134,25 @@ TFLMRegistration Register_VAR_HANDLE(); TFLMRegistration Register_WHILE(); TFLMRegistration Register_ZEROS_LIKE(); +// TODO(b/295174388): Add the rest of inference only registration functions. +TFLMInferenceRegistration RegisterInference_FULLY_CONNECTED(); + // TODO(b/160234179): Change custom OPs to also return by value. namespace tflm_signal { +TFLMRegistration* Register_DELAY(); +TFLMRegistration* Register_FFT_AUTO_SCALE(); +TFLMRegistration* Register_FILTER_BANK(); +TFLMRegistration* Register_FILTER_BANK_LOG(); +TFLMRegistration* Register_FILTER_BANK_SPECTRAL_SUBTRACTION(); +TFLMRegistration* Register_FILTER_BANK_SQUARE_ROOT(); +TFLMRegistration* Register_ENERGY(); +TFLMRegistration* Register_FRAMER(); +TFLMRegistration* Register_OVERLAP_ADD(); +TFLMRegistration* Register_PCAN(); +TFLMRegistration* Register_STACKER(); TFLMRegistration* Register_WINDOW(); } // namespace tflm_signal -namespace ops { -namespace micro { -TFLMRegistration Register_RESHAPE(); -TFLMRegistration Register_ROUND(); -} // namespace micro -} // namespace ops } // namespace tflite #endif // TENSORFLOW_LITE_MICRO_KERNELS_MICRO_OPS_H_ diff --git a/tensorflow/lite/micro/kernels/mirror_pad.cc b/tensorflow/lite/micro/kernels/mirror_pad.cc index 4cbaf52f5f3..aa94e1b3686 100644 --- a/tensorflow/lite/micro/kernels/mirror_pad.cc +++ b/tensorflow/lite/micro/kernels/mirror_pad.cc @@ -100,7 +100,7 @@ void MirrorPad(const TfLiteEvalTensor* padding_matrix, } } -TfLiteStatus Eval(TfLiteContext* context, TfLiteNode* node) { +TfLiteStatus MirrorPadEval(TfLiteContext* context, TfLiteNode* node) { TFLITE_DCHECK(node->user_data != nullptr); TfLiteStatus status = kTfLiteOk; const OpDataMirrorPad* data = @@ -161,12 +161,12 @@ TfLiteStatus Eval(TfLiteContext* context, TfLiteNode* node) { return status; } -void* Init(TfLiteContext* context, const char* buffer, size_t length) { +void* MirrorPadInit(TfLiteContext* context, const char* buffer, size_t length) { TFLITE_DCHECK(context->AllocatePersistentBuffer != nullptr); return context->AllocatePersistentBuffer(context, sizeof(OpDataMirrorPad)); } -TfLiteStatus Prepare(TfLiteContext* context, TfLiteNode* node) { +TfLiteStatus MirrorPadPrepare(TfLiteContext* context, TfLiteNode* node) { MicroContext* micro_context = GetMicroContext(context); TFLITE_DCHECK(node->user_data != nullptr); @@ -209,7 +209,8 @@ TfLiteStatus Prepare(TfLiteContext* context, TfLiteNode* node) { } // namespace TFLMRegistration Register_MIRROR_PAD() { - return tflite::micro::RegisterOp(Init, Prepare, Eval); + return tflite::micro::RegisterOp(MirrorPadInit, MirrorPadPrepare, + MirrorPadEval); } } // namespace tflite diff --git a/tensorflow/lite/micro/kernels/neg.cc b/tensorflow/lite/micro/kernels/neg.cc index c80a8093838..a76ac019b15 100644 --- a/tensorflow/lite/micro/kernels/neg.cc +++ b/tensorflow/lite/micro/kernels/neg.cc @@ -27,7 +27,7 @@ namespace { constexpr int kInputTensor = 0; constexpr int kOutputTensor = 0; -TfLiteStatus Eval(TfLiteContext* context, TfLiteNode* node) { +TfLiteStatus NegEval(TfLiteContext* context, TfLiteNode* node) { const TfLiteEvalTensor* input = tflite::micro::GetEvalInput(context, node, kInputTensor); TfLiteEvalTensor* output = @@ -51,7 +51,7 @@ TfLiteStatus Eval(TfLiteContext* context, TfLiteNode* node) { } // namespace TFLMRegistration Register_NEG() { - return tflite::micro::RegisterOp(nullptr, nullptr, Eval); + return tflite::micro::RegisterOp(nullptr, nullptr, NegEval); } } // namespace tflite diff --git a/tensorflow/lite/micro/kernels/pack.cc b/tensorflow/lite/micro/kernels/pack.cc index 7b4aeef2197..f254329e889 100644 --- a/tensorflow/lite/micro/kernels/pack.cc +++ b/tensorflow/lite/micro/kernels/pack.cc @@ -69,7 +69,7 @@ TfLiteStatus PackImpl(TfLiteContext* context, TfLiteNode* node, return kTfLiteOk; } -TfLiteStatus Eval(TfLiteContext* context, TfLiteNode* node) { +TfLiteStatus PackEval(TfLiteContext* context, TfLiteNode* node) { const TfLitePackParams* data = reinterpret_cast(node->builtin_data); @@ -106,7 +106,7 @@ TfLiteStatus Eval(TfLiteContext* context, TfLiteNode* node) { } // namespace TFLMRegistration Register_PACK() { - return tflite::micro::RegisterOp(nullptr, nullptr, Eval); + return tflite::micro::RegisterOp(nullptr, nullptr, PackEval); } } // namespace tflite diff --git a/tensorflow/lite/micro/kernels/pad.cc b/tensorflow/lite/micro/kernels/pad.cc index f8d40ade96d..29f08faa534 100644 --- a/tensorflow/lite/micro/kernels/pad.cc +++ b/tensorflow/lite/micro/kernels/pad.cc @@ -32,12 +32,12 @@ struct OpData { int32_t output_zero_point; }; -void* Init(TfLiteContext* context, const char* buffer, size_t length) { +void* PadInit(TfLiteContext* context, const char* buffer, size_t length) { TFLITE_DCHECK(context->AllocatePersistentBuffer != nullptr); return context->AllocatePersistentBuffer(context, sizeof(OpData)); } -TfLiteStatus Eval(TfLiteContext* context, TfLiteNode* node) { +TfLiteStatus PadEval(TfLiteContext* context, TfLiteNode* node) { TFLITE_DCHECK(node->user_data != nullptr); const OpData* data = static_cast(node->user_data); @@ -218,12 +218,12 @@ TfLiteStatus PadPrepare(TfLiteContext* context, TfLiteNode* node) { } TFLMRegistration Register_PAD() { - return tflite::micro::RegisterOp(Init, PadPrepare, Eval); + return tflite::micro::RegisterOp(PadInit, PadPrepare, PadEval); } // Also register Pad as PadV2. TFLMRegistration Register_PADV2() { - return tflite::micro::RegisterOp(Init, PadPrepare, Eval); + return tflite::micro::RegisterOp(PadInit, PadPrepare, PadEval); } } // namespace tflite diff --git a/tensorflow/lite/micro/kernels/pooling.cc b/tensorflow/lite/micro/kernels/pooling.cc index e03f72ec715..c178d9b1356 100644 --- a/tensorflow/lite/micro/kernels/pooling.cc +++ b/tensorflow/lite/micro/kernels/pooling.cc @@ -91,7 +91,7 @@ TfLiteStatus MaxEval(TfLiteContext* context, TfLiteNode* node) { return kTfLiteOk; } -void* Init(TfLiteContext* context, const char* buffer, size_t length) { +void* PoolInit(TfLiteContext* context, const char* buffer, size_t length) { TFLITE_DCHECK(context->AllocatePersistentBuffer != nullptr); return context->AllocatePersistentBuffer(context, sizeof(OpDataPooling)); } @@ -99,11 +99,11 @@ void* Init(TfLiteContext* context, const char* buffer, size_t length) { } // namespace TFLMRegistration Register_AVERAGE_POOL_2D() { - return tflite::micro::RegisterOp(Init, PoolingPrepare, AverageEval); + return tflite::micro::RegisterOp(PoolInit, PoolingPrepare, AverageEval); } TFLMRegistration Register_MAX_POOL_2D() { - return tflite::micro::RegisterOp(Init, PoolingPrepare, MaxEval); + return tflite::micro::RegisterOp(PoolInit, PoolingPrepare, MaxEval); } } // namespace tflite diff --git a/tensorflow/lite/micro/kernels/quantize.cc b/tensorflow/lite/micro/kernels/quantize.cc index 1ac694246ae..ba11f19b6ae 100644 --- a/tensorflow/lite/micro/kernels/quantize.cc +++ b/tensorflow/lite/micro/kernels/quantize.cc @@ -25,7 +25,8 @@ limitations under the License. namespace tflite { namespace { -void* Init(TfLiteContext* context, const char* buffer, size_t length) { +void* InitQuantizeReference(TfLiteContext* context, const char* buffer, + size_t length) { TFLITE_DCHECK(context->AllocatePersistentBuffer != nullptr); return context->AllocatePersistentBuffer(context, sizeof(OpDataQuantizeReference)); @@ -34,8 +35,8 @@ void* Init(TfLiteContext* context, const char* buffer, size_t length) { } // namespace TFLMRegistration Register_QUANTIZE() { - return tflite::micro::RegisterOp(Init, PrepareQuantizeReference, - EvalQuantizeReference); + return tflite::micro::RegisterOp( + InitQuantizeReference, PrepareQuantizeReference, EvalQuantizeReference); } } // namespace tflite diff --git a/tensorflow/lite/micro/kernels/reduce.cc b/tensorflow/lite/micro/kernels/reduce.cc index ab24a82ddb5..a689d3b934b 100644 --- a/tensorflow/lite/micro/kernels/reduce.cc +++ b/tensorflow/lite/micro/kernels/reduce.cc @@ -29,7 +29,9 @@ limitations under the License. namespace tflite { void* InitReduce(TfLiteContext* context, const char* buffer, size_t length) { - return context->AllocatePersistentBuffer(context, sizeof(OpDataReduce)); + void* op_data = + context->AllocatePersistentBuffer(context, sizeof(OpDataReduce)); + return new (op_data) OpDataReduce(); } TfLiteStatus PrepareMax(TfLiteContext* context, TfLiteNode* node) { diff --git a/tensorflow/lite/micro/kernels/reduce_common.cc b/tensorflow/lite/micro/kernels/reduce_common.cc index 0dab49c2de2..2c1a92a5062 100644 --- a/tensorflow/lite/micro/kernels/reduce_common.cc +++ b/tensorflow/lite/micro/kernels/reduce_common.cc @@ -74,7 +74,9 @@ TfLiteStatus PrepareMaxHelper(TfLiteContext* context, TfLiteNode* node, TfLiteTensor* output = micro_context->AllocateTempOutputTensor(node, 0); TfLiteTensor* axis = micro_context->AllocateTempInputTensor(node, 1); + op_data->input_zp = input->params.zero_point; op_data->input_scale = input->params.scale; + op_data->output_zp = output->params.zero_point; op_data->output_scale = output->params.scale; op_data->num_output_elements = NumElements(output); diff --git a/tensorflow/lite/micro/kernels/reshape.cc b/tensorflow/lite/micro/kernels/reshape.cc index 0720c9e6e83..5527798b842 100644 --- a/tensorflow/lite/micro/kernels/reshape.cc +++ b/tensorflow/lite/micro/kernels/reshape.cc @@ -27,11 +27,9 @@ limitations under the License. #include "tensorflow/lite/micro/micro_utils.h" namespace tflite { -namespace ops { -namespace micro { -namespace reshape { +namespace { -TfLiteStatus Eval(TfLiteContext* context, TfLiteNode* node) { +TfLiteStatus EvalReshapeReference(TfLiteContext* context, TfLiteNode* node) { const TfLiteEvalTensor* input = tflite::micro::GetEvalInput(context, node, kReshapeInputTensor); TfLiteEvalTensor* output = @@ -51,13 +49,11 @@ TfLiteStatus Eval(TfLiteContext* context, TfLiteNode* node) { return kTfLiteOk; } -} // namespace reshape +} // namespace TFLMRegistration Register_RESHAPE() { - return tflite::micro::RegisterOp(nullptr, reshape::PrepareReshapeReference, - reshape::Eval); + return tflite::micro::RegisterOp(nullptr, PrepareReshapeReference, + EvalReshapeReference); } -} // namespace micro -} // namespace ops } // namespace tflite diff --git a/tensorflow/lite/micro/kernels/reshape.h b/tensorflow/lite/micro/kernels/reshape.h index d7d4277db27..02bda32ad7e 100644 --- a/tensorflow/lite/micro/kernels/reshape.h +++ b/tensorflow/lite/micro/kernels/reshape.h @@ -17,16 +17,10 @@ limitations under the License. #include "tensorflow/lite/c/common.h" namespace tflite { -namespace ops { -namespace micro { -namespace reshape { constexpr int kReshapeInputTensor = 0; constexpr int kReshapeOutputTensor = 0; TfLiteStatus PrepareReshapeReference(TfLiteContext* context, TfLiteNode* node); -} // namespace reshape -} // namespace micro -} // namespace ops } // namespace tflite diff --git a/tensorflow/lite/micro/kernels/reshape_common.cc b/tensorflow/lite/micro/kernels/reshape_common.cc index bc7ba706b29..b86e2be05b4 100644 --- a/tensorflow/lite/micro/kernels/reshape_common.cc +++ b/tensorflow/lite/micro/kernels/reshape_common.cc @@ -26,9 +26,6 @@ limitations under the License. #include "tensorflow/lite/micro/micro_utils.h" namespace tflite { -namespace ops { -namespace micro { -namespace reshape { namespace { @@ -94,7 +91,4 @@ TfLiteStatus PrepareReshapeReference(TfLiteContext* context, TfLiteNode* node) { return kTfLiteOk; } -} // namespace reshape -} // namespace micro -} // namespace ops } // namespace tflite diff --git a/tensorflow/lite/micro/kernels/reshape_test.cc b/tensorflow/lite/micro/kernels/reshape_test.cc index 6f1cfc93056..d78d9fa9124 100644 --- a/tensorflow/lite/micro/kernels/reshape_test.cc +++ b/tensorflow/lite/micro/kernels/reshape_test.cc @@ -38,7 +38,7 @@ void ValidateReshapeGoldens(TfLiteTensor* tensors, int tensors_size, const size_t expected_output_len, int* expected_dims, const size_t expected_dims_len, bool expect_failure) { - const TFLMRegistration registration = tflite::ops::micro::Register_RESHAPE(); + const TFLMRegistration registration = tflite::Register_RESHAPE(); micro::KernelRunner runner(registration, tensors, tensors_size, inputs_array, outputs_array, /*builtin_data=*/nullptr); diff --git a/tensorflow/lite/micro/kernels/resize_bilinear.cc b/tensorflow/lite/micro/kernels/resize_bilinear.cc index e701e03b9c8..ab54e8114ac 100644 --- a/tensorflow/lite/micro/kernels/resize_bilinear.cc +++ b/tensorflow/lite/micro/kernels/resize_bilinear.cc @@ -30,7 +30,7 @@ constexpr int kInputTensor = 0; constexpr int kSizeTensor = 1; constexpr int kOutputTensor = 0; -TfLiteStatus Prepare(TfLiteContext* context, TfLiteNode* node) { +TfLiteStatus ResizeBilinearPrepare(TfLiteContext* context, TfLiteNode* node) { MicroContext* micro_context = GetMicroContext(context); TF_LITE_ENSURE_EQ(context, NumInputs(node), 2); @@ -66,7 +66,7 @@ TfLiteStatus Prepare(TfLiteContext* context, TfLiteNode* node) { return kTfLiteOk; } -TfLiteStatus Eval(TfLiteContext* context, TfLiteNode* node) { +TfLiteStatus ResizeBilinearEval(TfLiteContext* context, TfLiteNode* node) { auto* params = reinterpret_cast(node->builtin_data); @@ -110,7 +110,8 @@ TfLiteStatus Eval(TfLiteContext* context, TfLiteNode* node) { } // namespace TFLMRegistration Register_RESIZE_BILINEAR() { - return tflite::micro::RegisterOp(nullptr, Prepare, Eval); + return tflite::micro::RegisterOp(nullptr, ResizeBilinearPrepare, + ResizeBilinearEval); } } // namespace tflite diff --git a/tensorflow/lite/micro/kernels/resize_nearest_neighbor.cc b/tensorflow/lite/micro/kernels/resize_nearest_neighbor.cc index 46b6ea16254..ef2d35de52e 100644 --- a/tensorflow/lite/micro/kernels/resize_nearest_neighbor.cc +++ b/tensorflow/lite/micro/kernels/resize_nearest_neighbor.cc @@ -31,7 +31,8 @@ constexpr int kInputTensor = 0; constexpr int kSizeTensor = 1; constexpr int kOutputTensor = 0; -TfLiteStatus Prepare(TfLiteContext* context, TfLiteNode* node) { +TfLiteStatus ResizeNearestNeighborPrepare(TfLiteContext* context, + TfLiteNode* node) { MicroContext* micro_context = GetMicroContext(context); TF_LITE_ENSURE_EQ(context, NumInputs(node), 2); @@ -65,7 +66,8 @@ TfLiteStatus Prepare(TfLiteContext* context, TfLiteNode* node) { return kTfLiteOk; } -TfLiteStatus Eval(TfLiteContext* context, TfLiteNode* node) { +TfLiteStatus ResizeNearestNeighborEval(TfLiteContext* context, + TfLiteNode* node) { auto* params = reinterpret_cast(node->builtin_data); @@ -117,7 +119,8 @@ TfLiteStatus Eval(TfLiteContext* context, TfLiteNode* node) { } // namespace TFLMRegistration Register_RESIZE_NEAREST_NEIGHBOR() { - return tflite::micro::RegisterOp(nullptr, Prepare, Eval); + return tflite::micro::RegisterOp(nullptr, ResizeNearestNeighborPrepare, + ResizeNearestNeighborEval); } } // namespace tflite diff --git a/tensorflow/lite/micro/kernels/round.cc b/tensorflow/lite/micro/kernels/round.cc index 7a9458bb40a..ae8e3543b27 100644 --- a/tensorflow/lite/micro/kernels/round.cc +++ b/tensorflow/lite/micro/kernels/round.cc @@ -1,4 +1,4 @@ -/* Copyright 2018 The TensorFlow Authors. All Rights Reserved. +/* Copyright 2023 The TensorFlow Authors. All Rights Reserved. Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. @@ -21,14 +21,12 @@ limitations under the License. #include "tensorflow/lite/micro/kernels/kernel_util.h" namespace tflite { -namespace ops { -namespace micro { -namespace round { +namespace { constexpr int kInputTensor = 0; constexpr int kOutputTensor = 0; -TfLiteStatus Prepare(TfLiteContext* context, TfLiteNode* node) { +TfLiteStatus RoundPrepare(TfLiteContext* context, TfLiteNode* node) { MicroContext* micro_context = GetMicroContext(context); TfLiteTensor* input = @@ -52,7 +50,7 @@ TfLiteStatus Prepare(TfLiteContext* context, TfLiteNode* node) { return kTfLiteOk; } -TfLiteStatus Eval(TfLiteContext* context, TfLiteNode* node) { +TfLiteStatus RoundEval(TfLiteContext* context, TfLiteNode* node) { const TfLiteEvalTensor* input = tflite::micro::GetEvalInput(context, node, kInputTensor); TfLiteEvalTensor* output = @@ -65,12 +63,10 @@ TfLiteStatus Eval(TfLiteContext* context, TfLiteNode* node) { return kTfLiteOk; } -} // namespace round +} // namespace TFLMRegistration Register_ROUND() { - return tflite::micro::RegisterOp(nullptr, round::Prepare, round::Eval); + return tflite::micro::RegisterOp(nullptr, RoundPrepare, RoundEval); } -} // namespace micro -} // namespace ops } // namespace tflite diff --git a/tensorflow/lite/micro/kernels/round_test.cc b/tensorflow/lite/micro/kernels/round_test.cc index 1edb6093288..fa09a9fc07f 100644 --- a/tensorflow/lite/micro/kernels/round_test.cc +++ b/tensorflow/lite/micro/kernels/round_test.cc @@ -1,4 +1,4 @@ -/* Copyright 2018 The TensorFlow Authors. All Rights Reserved. +/* Copyright 2023 The TensorFlow Authors. All Rights Reserved. Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. @@ -41,7 +41,7 @@ void TestRound(int* input_dims_data, const float* input_data, int outputs_array_data[] = {1, 1}; TfLiteIntArray* outputs_array = IntArrayFromInts(outputs_array_data); - const TFLMRegistration registration = tflite::ops::micro::Register_ROUND(); + const TFLMRegistration registration = tflite::Register_ROUND(); micro::KernelRunner runner(registration, tensors, tensors_size, inputs_array, outputs_array, nullptr); diff --git a/tensorflow/lite/micro/kernels/shape.cc b/tensorflow/lite/micro/kernels/shape.cc index a39bfc0e13a..d95e450f683 100644 --- a/tensorflow/lite/micro/kernels/shape.cc +++ b/tensorflow/lite/micro/kernels/shape.cc @@ -35,14 +35,14 @@ void ExtractShape(const TfLiteEvalTensor* input, int32_t* output_data) { } } -TfLiteStatus Prepare(TfLiteContext* context, TfLiteNode* node) { +TfLiteStatus ShapePrepare(TfLiteContext* context, TfLiteNode* node) { TF_LITE_ENSURE_EQ(context, NumInputs(node), 1); TF_LITE_ENSURE_EQ(context, NumOutputs(node), 1); return kTfLiteOk; } -TfLiteStatus Eval(TfLiteContext* context, TfLiteNode* node) { +TfLiteStatus ShapeEval(TfLiteContext* context, TfLiteNode* node) { const TfLiteEvalTensor* input = tflite::micro::GetEvalInput(context, node, kInputTensor); TfLiteEvalTensor* output = @@ -61,7 +61,7 @@ TfLiteStatus Eval(TfLiteContext* context, TfLiteNode* node) { } // namespace TFLMRegistration Register_SHAPE() { - return tflite::micro::RegisterOp(nullptr, Prepare, Eval); + return tflite::micro::RegisterOp(nullptr, ShapePrepare, ShapeEval); } } // namespace tflite diff --git a/tensorflow/lite/micro/kernels/slice.cc b/tensorflow/lite/micro/kernels/slice.cc index 973da182a0b..79634258f5c 100644 --- a/tensorflow/lite/micro/kernels/slice.cc +++ b/tensorflow/lite/micro/kernels/slice.cc @@ -44,7 +44,7 @@ void GetBeginAndSizeVectors(int dimensions, const TfLiteEvalTensor* begin, } } -TfLiteStatus Prepare(TfLiteContext* context, TfLiteNode* node) { +TfLiteStatus SlicePrepare(TfLiteContext* context, TfLiteNode* node) { MicroContext* micro_context = GetMicroContext(context); TF_LITE_ENSURE_EQ(context, NumInputs(node), 3); @@ -81,7 +81,7 @@ TfLiteStatus Prepare(TfLiteContext* context, TfLiteNode* node) { return kTfLiteOk; } -TfLiteStatus Eval(TfLiteContext* context, TfLiteNode* node) { +TfLiteStatus SliceEval(TfLiteContext* context, TfLiteNode* node) { const TfLiteEvalTensor* input = tflite::micro::GetEvalInput(context, node, kInputTensor); const TfLiteEvalTensor* begin = @@ -158,7 +158,7 @@ TfLiteStatus Eval(TfLiteContext* context, TfLiteNode* node) { } // namespace TFLMRegistration Register_SLICE() { - return tflite::micro::RegisterOp(nullptr, Prepare, Eval); + return tflite::micro::RegisterOp(nullptr, SlicePrepare, SliceEval); } } // namespace tflite diff --git a/tensorflow/lite/micro/kernels/space_to_batch_nd.cc b/tensorflow/lite/micro/kernels/space_to_batch_nd.cc index 6b536eed2fc..f8df1496cfc 100644 --- a/tensorflow/lite/micro/kernels/space_to_batch_nd.cc +++ b/tensorflow/lite/micro/kernels/space_to_batch_nd.cc @@ -39,12 +39,13 @@ constexpr int kOutputTensor = 0; const int kInputOutputMinDimensionNum = 3; const int kInputOutputMaxDimensionNum = 4; -void* Init(TfLiteContext* context, const char* buffer, size_t length) { +void* SpaceToBatchNDInit(TfLiteContext* context, const char* buffer, + size_t length) { TFLITE_DCHECK(context->AllocatePersistentBuffer != nullptr); return context->AllocatePersistentBuffer(context, sizeof(SpaceToBatchParams)); } -TfLiteStatus Prepare(TfLiteContext* context, TfLiteNode* node) { +TfLiteStatus SpaceToBatchNDPrepare(TfLiteContext* context, TfLiteNode* node) { MicroContext* micro_context = GetMicroContext(context); TF_LITE_ENSURE_EQ(context, NumInputs(node), 3); @@ -67,7 +68,7 @@ TfLiteStatus Prepare(TfLiteContext* context, TfLiteNode* node) { return kTfLiteOk; } -TfLiteStatus Eval(TfLiteContext* context, TfLiteNode* node) { +TfLiteStatus SpaceToBatchNDEval(TfLiteContext* context, TfLiteNode* node) { TFLITE_DCHECK(node->user_data != nullptr); const SpaceToBatchParams& params = *(static_cast(node->user_data)); @@ -115,7 +116,8 @@ TfLiteStatus Eval(TfLiteContext* context, TfLiteNode* node) { } // namespace. TFLMRegistration Register_SPACE_TO_BATCH_ND() { - return tflite::micro::RegisterOp(Init, Prepare, Eval); + return tflite::micro::RegisterOp(SpaceToBatchNDInit, SpaceToBatchNDPrepare, + SpaceToBatchNDEval); } } // namespace tflite diff --git a/tensorflow/lite/micro/kernels/split.cc b/tensorflow/lite/micro/kernels/split.cc index aa877201211..cae7074b15e 100644 --- a/tensorflow/lite/micro/kernels/split.cc +++ b/tensorflow/lite/micro/kernels/split.cc @@ -67,7 +67,7 @@ TfLiteStatus SplitImpl(TfLiteContext* context, TfLiteNode* node, return kTfLiteOk; } -TfLiteStatus Prepare(TfLiteContext* context, TfLiteNode* node) { +TfLiteStatus SplitPrepare(TfLiteContext* context, TfLiteNode* node) { MicroContext* micro_context = GetMicroContext(context); TfLiteTensor* axis = micro_context->AllocateTempInputTensor(node, 0); TF_LITE_ENSURE(context, axis != nullptr); @@ -82,7 +82,7 @@ TfLiteStatus Prepare(TfLiteContext* context, TfLiteNode* node) { return kTfLiteOk; } -TfLiteStatus Eval(TfLiteContext* context, TfLiteNode* node) { +TfLiteStatus SplitEval(TfLiteContext* context, TfLiteNode* node) { const TfLiteEvalTensor* axis = tflite::micro::GetEvalInput(context, node, 0); const TfLiteEvalTensor* input = tflite::micro::GetEvalInput(context, node, 1); @@ -119,7 +119,7 @@ TfLiteStatus Eval(TfLiteContext* context, TfLiteNode* node) { } // namespace TFLMRegistration Register_SPLIT() { - return tflite::micro::RegisterOp(nullptr, Prepare, Eval); + return tflite::micro::RegisterOp(nullptr, SplitPrepare, SplitEval); } } // namespace tflite diff --git a/tensorflow/lite/micro/kernels/split_v.cc b/tensorflow/lite/micro/kernels/split_v.cc index 6aed6f7ff82..ad96a20d51d 100644 --- a/tensorflow/lite/micro/kernels/split_v.cc +++ b/tensorflow/lite/micro/kernels/split_v.cc @@ -71,7 +71,7 @@ TfLiteStatus SplitImpl(TfLiteContext* context, TfLiteNode* node, return kTfLiteOk; } -TfLiteStatus Prepare(TfLiteContext* context, TfLiteNode* node) { +TfLiteStatus SplitVPrepare(TfLiteContext* context, TfLiteNode* node) { TF_LITE_ENSURE_EQ(context, NumInputs(node), 3); MicroContext* micro_context = GetMicroContext(context); @@ -85,7 +85,7 @@ TfLiteStatus Prepare(TfLiteContext* context, TfLiteNode* node) { return kTfLiteOk; } -TfLiteStatus Eval(TfLiteContext* context, TfLiteNode* node) { +TfLiteStatus SplitVEval(TfLiteContext* context, TfLiteNode* node) { const TfLiteEvalTensor* input = tflite::micro::GetEvalInput(context, node, 0); const TfLiteEvalTensor* axis = tflite::micro::GetEvalInput(context, node, 2); @@ -121,7 +121,7 @@ TfLiteStatus Eval(TfLiteContext* context, TfLiteNode* node) { } // namespace TFLMRegistration Register_SPLIT_V() { - return tflite::micro::RegisterOp(nullptr, Prepare, Eval); + return tflite::micro::RegisterOp(nullptr, SplitVPrepare, SplitVEval); } } // namespace tflite diff --git a/tensorflow/lite/micro/kernels/squeeze.cc b/tensorflow/lite/micro/kernels/squeeze.cc index e52ccabe90f..3df1363f746 100644 --- a/tensorflow/lite/micro/kernels/squeeze.cc +++ b/tensorflow/lite/micro/kernels/squeeze.cc @@ -44,7 +44,7 @@ struct SqueezeContext { TfLiteTensor* output; }; -TfLiteStatus Prepare(TfLiteContext* context, TfLiteNode* node) { +TfLiteStatus SqueezePrepare(TfLiteContext* context, TfLiteNode* node) { TF_LITE_ENSURE_EQ(context, NumInputs(node), 1); TF_LITE_ENSURE_EQ(context, NumOutputs(node), 1); @@ -55,7 +55,7 @@ TfLiteStatus Prepare(TfLiteContext* context, TfLiteNode* node) { // Determines number of dimensions of output tensor after squeeze. const TfLiteIntArray* input_dims = op_context.input->dims; const TfLiteIntArray* output_dims = op_context.output->dims; - const int* squeeze_dims = op_context.params->squeeze_dims; + const int32_t* squeeze_dims = op_context.params->squeeze_dims; constexpr int max_squeeze_dims = 8; TF_LITE_ENSURE(context, input_num_dims <= max_squeeze_dims); @@ -87,7 +87,7 @@ TfLiteStatus Prepare(TfLiteContext* context, TfLiteNode* node) { return kTfLiteOk; } -TfLiteStatus Eval(TfLiteContext* context, TfLiteNode* node) { +TfLiteStatus SqueezeEval(TfLiteContext* context, TfLiteNode* node) { const TfLiteEvalTensor* input = tflite::micro::GetEvalInput(context, node, 0); if (input->type == kTfLiteString) { @@ -112,7 +112,7 @@ TfLiteStatus Eval(TfLiteContext* context, TfLiteNode* node) { } // namespace TFLMRegistration Register_SQUEEZE() { - return tflite::micro::RegisterOp(nullptr, Prepare, Eval); + return tflite::micro::RegisterOp(nullptr, SqueezePrepare, SqueezeEval); } } // namespace tflite diff --git a/tensorflow/lite/micro/kernels/strided_slice.cc b/tensorflow/lite/micro/kernels/strided_slice.cc index 4e60e6beea8..78507a7dd8d 100644 --- a/tensorflow/lite/micro/kernels/strided_slice.cc +++ b/tensorflow/lite/micro/kernels/strided_slice.cc @@ -14,146 +14,30 @@ limitations under the License. ==============================================================================*/ #include "tensorflow/lite/kernels/internal/reference/strided_slice.h" -#include +#include #include -#include "tensorflow/lite/c/builtin_op_data.h" #include "tensorflow/lite/c/common.h" #include "tensorflow/lite/kernels/internal/tensor_ctypes.h" #include "tensorflow/lite/kernels/kernel_util.h" #include "tensorflow/lite/kernels/op_macros.h" #include "tensorflow/lite/micro/kernels/kernel_util.h" +#include "tensorflow/lite/micro/kernels/strided_slice.h" #include "tensorflow/lite/micro/micro_log.h" namespace tflite { namespace { -constexpr int kInputTensor = 0; -constexpr int kBeginTensor = 1; -constexpr int kEndTensor = 2; -constexpr int kStridesTensor = 3; -constexpr int kOutputTensor = 0; - -struct StridedSliceContext { - StridedSliceContext(TfLiteContext* context, TfLiteNode* node) { - params = reinterpret_cast(node->builtin_data); - micro_context = GetMicroContext(context); - input = micro_context->AllocateTempInputTensor(node, kInputTensor); - begin = micro_context->AllocateTempInputTensor(node, kBeginTensor); - end = micro_context->AllocateTempInputTensor(node, kEndTensor); - strides = micro_context->AllocateTempInputTensor(node, kStridesTensor); - output = micro_context->AllocateTempOutputTensor(node, kOutputTensor); - dims = NumDimensions(input); - } - ~StridedSliceContext() { - micro_context->DeallocateTempTfLiteTensor(input); - micro_context->DeallocateTempTfLiteTensor(begin); - micro_context->DeallocateTempTfLiteTensor(end); - micro_context->DeallocateTempTfLiteTensor(strides); - micro_context->DeallocateTempTfLiteTensor(output); - } - const TfLiteStridedSliceParams* params; - MicroContext* micro_context; - TfLiteTensor* input; - TfLiteTensor* begin; - TfLiteTensor* end; - TfLiteTensor* strides; - TfLiteTensor* output; - int dims; -}; - -// This Op only supports 1-4D cases and since we use the reference 4D -// implementation, the 1-3D tensors are mapped to 4D. -const int kMaxDim = 4; - -tflite::StridedSliceParams BuildStridedSliceParams( - StridedSliceContext* op_context) { - tflite::StridedSliceParams op_params{}; - op_params.start_indices_count = op_context->dims; - op_params.stop_indices_count = op_context->dims; - op_params.strides_count = op_context->dims; - - for (int i = 0; i < op_context->dims; ++i) { - op_params.start_indices[i] = GetTensorData(op_context->begin)[i]; - op_params.stop_indices[i] = GetTensorData(op_context->end)[i]; - op_params.strides[i] = GetTensorData(op_context->strides)[i]; - } - - op_params.begin_mask = op_context->params->begin_mask; - op_params.ellipsis_mask = 0; - op_params.end_mask = op_context->params->end_mask; - op_params.new_axis_mask = 0; - op_params.shrink_axis_mask = op_context->params->shrink_axis_mask; - return op_params; -} - -// Processes the indexing tensors (begin, end and strides) to resize the -// output tensor. This function is callable from both Prepare() and Eval() as -// long as the caller ensures the indexing tensors are present. -TfLiteStatus CheckOutputSize(TfLiteContext* context, - StridedSliceContext* op_context) { - using ::tflite::strided_slice::StartForAxis; - using ::tflite::strided_slice::StopForAxis; - TfLiteIntArray* output_shape = op_context->output->dims; - int shape_size = 0; - auto op_params = BuildStridedSliceParams(op_context); - auto input_shape = GetTensorShape(op_context->input); - for (int idx = 0; idx < op_context->dims; ++idx) { - int32_t stride = GetTensorData(op_context->strides)[idx]; - TF_LITE_ENSURE_MSG(context, stride != 0, "stride value has to be non-zero"); - int32_t begin = StartForAxis(op_params, input_shape, idx); - int32_t end = StopForAxis(op_params, input_shape, idx, begin); - - // When shrinking an axis, the end position does not matter (and can be - // incorrect when negative indexing is used, see Issue #19260). Always use - // begin + 1 to generate a length 1 slice, since begin has - // already been adjusted for negative indices by StartForAxis. - const bool shrink_axis = op_context->params->shrink_axis_mask & (1 << idx); - if (shrink_axis) { - end = begin + 1; - } - - // This is valid for both positive and negative strides - int32_t dim_shape = std::ceil((end - begin) / static_cast(stride)); - dim_shape = dim_shape < 0 ? 0 : dim_shape; - if (!shrink_axis) { - TF_LITE_ENSURE_EQ(context, output_shape->data[shape_size], dim_shape); - shape_size++; - } - } - TF_LITE_ENSURE_EQ(context, output_shape->size, shape_size); - return kTfLiteOk; -} - -void* Init(TfLiteContext* context, const char* buffer, size_t length) { - TFLITE_DCHECK(context->AllocatePersistentBuffer != nullptr); - return context->AllocatePersistentBuffer(context, sizeof(StridedSliceParams)); -} - -TfLiteStatus Prepare(TfLiteContext* context, TfLiteNode* node) { - TFLITE_DCHECK(node->user_data != nullptr); - StridedSliceParams* op_params = - static_cast(node->user_data); - TF_LITE_ENSURE_EQ(context, NumInputs(node), 4); - TF_LITE_ENSURE_EQ(context, NumOutputs(node), 1); - StridedSliceContext op_context(context, node); - TF_LITE_ENSURE_MSG(context, op_context.dims <= kMaxDim, - "input dim should not exceed 4"); - auto params = BuildStridedSliceParams(&op_context); - memcpy(op_params, ¶ms, sizeof(StridedSliceParams)); - return CheckOutputSize(context, &op_context); -} - -TfLiteStatus Eval(TfLiteContext* context, TfLiteNode* node) { +TfLiteStatus StridedSliceEval(TfLiteContext* context, TfLiteNode* node) { TFLITE_DCHECK(node->user_data != nullptr); const StridedSliceParams& op_params = *(static_cast(node->user_data)); const TfLiteEvalTensor* input = - tflite::micro::GetEvalInput(context, node, kInputTensor); + tflite::micro::GetEvalInput(context, node, kStridedSliceInputTensor); TfLiteEvalTensor* output = - tflite::micro::GetEvalOutput(context, node, kOutputTensor); + tflite::micro::GetEvalOutput(context, node, kStridedSliceOutputTensor); switch (output->type) { case kTfLiteFloat32: reference_ops::StridedSlice(op_params, @@ -201,7 +85,8 @@ TfLiteStatus Eval(TfLiteContext* context, TfLiteNode* node) { } // namespace TFLMRegistration Register_STRIDED_SLICE() { - return tflite::micro::RegisterOp(Init, Prepare, Eval); + return tflite::micro::RegisterOp(StridedSliceInit, StridedSlicePrepare, + StridedSliceEval); } } // namespace tflite diff --git a/tensorflow/lite/micro/kernels/strided_slice.h b/tensorflow/lite/micro/kernels/strided_slice.h new file mode 100644 index 00000000000..ea9413f36ff --- /dev/null +++ b/tensorflow/lite/micro/kernels/strided_slice.h @@ -0,0 +1,40 @@ +/* Copyright 2023 The TensorFlow Authors. All Rights Reserved. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +==============================================================================*/ + +#ifndef TENSORFLOW_LITE_MICRO_KERNELS_STRIDED_SLICE_H_ +#define TENSORFLOW_LITE_MICRO_KERNELS_STRIDED_SLICE_H_ + +#include + +#include "tensorflow/lite/c/builtin_op_data.h" +#include "tensorflow/lite/c/common.h" +#include "tensorflow/lite/micro/micro_common.h" + +namespace tflite { + +constexpr int kStridedSliceInputTensor = 0; +constexpr int kStridedSliceBeginTensor = 1; +constexpr int kStridedSliceEndTensor = 2; +constexpr int kStridedSliceStridesTensor = 3; +constexpr int kStridedSliceOutputTensor = 0; + +void* StridedSliceInit(TfLiteContext* context, const char* buffer, + size_t length); + +TfLiteStatus StridedSlicePrepare(TfLiteContext* context, TfLiteNode* node); + +} // namespace tflite + +#endif // TENSORFLOW_LITE_MICRO_KERNELS_STRIDED_SLICE_H_ diff --git a/tensorflow/lite/micro/kernels/strided_slice_common.cc b/tensorflow/lite/micro/kernels/strided_slice_common.cc new file mode 100644 index 00000000000..165e1f39039 --- /dev/null +++ b/tensorflow/lite/micro/kernels/strided_slice_common.cc @@ -0,0 +1,149 @@ +/* Copyright 2023 The TensorFlow Authors. All Rights Reserved. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +==============================================================================*/ +#include +#include + +#include "tensorflow/lite/c/builtin_op_data.h" +#include "tensorflow/lite/c/common.h" +#include "tensorflow/lite/kernels/internal/reference/strided_slice.h" +#include "tensorflow/lite/kernels/internal/tensor_ctypes.h" +#include "tensorflow/lite/kernels/kernel_util.h" +#include "tensorflow/lite/kernels/op_macros.h" +#include "tensorflow/lite/micro/kernels/kernel_util.h" +#include "tensorflow/lite/micro/kernels/strided_slice.h" +#include "tensorflow/lite/micro/micro_log.h" + +namespace tflite { + +namespace { + +struct StridedSliceContext { + StridedSliceContext(TfLiteContext* context, TfLiteNode* node) { + params = reinterpret_cast(node->builtin_data); + micro_context = GetMicroContext(context); + input = + micro_context->AllocateTempInputTensor(node, kStridedSliceInputTensor); + begin = + micro_context->AllocateTempInputTensor(node, kStridedSliceBeginTensor); + end = micro_context->AllocateTempInputTensor(node, kStridedSliceEndTensor); + strides = micro_context->AllocateTempInputTensor( + node, kStridedSliceStridesTensor); + output = micro_context->AllocateTempOutputTensor(node, + kStridedSliceOutputTensor); + dims = NumDimensions(input); + } + ~StridedSliceContext() { + micro_context->DeallocateTempTfLiteTensor(input); + micro_context->DeallocateTempTfLiteTensor(begin); + micro_context->DeallocateTempTfLiteTensor(end); + micro_context->DeallocateTempTfLiteTensor(strides); + micro_context->DeallocateTempTfLiteTensor(output); + } + const TfLiteStridedSliceParams* params; + MicroContext* micro_context; + TfLiteTensor* input; + TfLiteTensor* begin; + TfLiteTensor* end; + TfLiteTensor* strides; + TfLiteTensor* output; + int dims; +}; + +// This Op only supports 1-4D cases and since we use the reference 4D +// implementation, the 1-3D tensors are mapped to 4D. +const int kMaxDim = 4; + +tflite::StridedSliceParams BuildStridedSliceParams( + StridedSliceContext* op_context) { + tflite::StridedSliceParams op_params{}; + op_params.start_indices_count = op_context->dims; + op_params.stop_indices_count = op_context->dims; + op_params.strides_count = op_context->dims; + + for (int i = 0; i < op_context->dims; ++i) { + op_params.start_indices[i] = GetTensorData(op_context->begin)[i]; + op_params.stop_indices[i] = GetTensorData(op_context->end)[i]; + op_params.strides[i] = GetTensorData(op_context->strides)[i]; + } + + op_params.begin_mask = op_context->params->begin_mask; + op_params.ellipsis_mask = 0; + op_params.end_mask = op_context->params->end_mask; + op_params.new_axis_mask = 0; + op_params.shrink_axis_mask = op_context->params->shrink_axis_mask; + return op_params; +} + +// Processes the indexing tensors (begin, end and strides) to resize the +// output tensor. This function is callable from both Prepare() and Eval() as +// long as the caller ensures the indexing tensors are present. +TfLiteStatus CheckOutputSize(TfLiteContext* context, + StridedSliceContext* op_context) { + using ::tflite::strided_slice::StartForAxis; + using ::tflite::strided_slice::StopForAxis; + TfLiteIntArray* output_shape = op_context->output->dims; + int shape_size = 0; + auto op_params = BuildStridedSliceParams(op_context); + auto input_shape = GetTensorShape(op_context->input); + for (int idx = 0; idx < op_context->dims; ++idx) { + int32_t stride = GetTensorData(op_context->strides)[idx]; + TF_LITE_ENSURE_MSG(context, stride != 0, "stride value has to be non-zero"); + int32_t begin = StartForAxis(op_params, input_shape, idx); + int32_t end = StopForAxis(op_params, input_shape, idx, begin); + + // When shrinking an axis, the end position does not matter (and can be + // incorrect when negative indexing is used, see Issue #19260). Always use + // begin + 1 to generate a length 1 slice, since begin has + // already been adjusted for negative indices by StartForAxis. + const bool shrink_axis = op_context->params->shrink_axis_mask & (1 << idx); + if (shrink_axis) { + end = begin + 1; + } + + // This is valid for both positive and negative strides + int32_t dim_shape = std::ceil((end - begin) / static_cast(stride)); + dim_shape = dim_shape < 0 ? 0 : dim_shape; + if (!shrink_axis) { + TF_LITE_ENSURE_EQ(context, output_shape->data[shape_size], dim_shape); + shape_size++; + } + } + TF_LITE_ENSURE_EQ(context, output_shape->size, shape_size); + return kTfLiteOk; +} + +} // namespace + +void* StridedSliceInit(TfLiteContext* context, const char* buffer, + size_t length) { + TFLITE_DCHECK(context->AllocatePersistentBuffer != nullptr); + return context->AllocatePersistentBuffer(context, sizeof(StridedSliceParams)); +} + +TfLiteStatus StridedSlicePrepare(TfLiteContext* context, TfLiteNode* node) { + TFLITE_DCHECK(node->user_data != nullptr); + StridedSliceParams* op_params = + static_cast(node->user_data); + TF_LITE_ENSURE_EQ(context, NumInputs(node), 4); + TF_LITE_ENSURE_EQ(context, NumOutputs(node), 1); + StridedSliceContext op_context(context, node); + TF_LITE_ENSURE_MSG(context, op_context.dims <= kMaxDim, + "input dim should not exceed 4"); + auto params = BuildStridedSliceParams(&op_context); + memcpy(op_params, ¶ms, sizeof(StridedSliceParams)); + return CheckOutputSize(context, &op_context); +} + +} // namespace tflite diff --git a/tensorflow/lite/micro/kernels/svdf.cc b/tensorflow/lite/micro/kernels/svdf.cc index 0ffb4b07734..9e85c6b902f 100644 --- a/tensorflow/lite/micro/kernels/svdf.cc +++ b/tensorflow/lite/micro/kernels/svdf.cc @@ -32,12 +32,12 @@ limitations under the License. namespace tflite { namespace { -void* Init(TfLiteContext* context, const char* buffer, size_t length) { +void* InitSvdf(TfLiteContext* context, const char* buffer, size_t length) { TFLITE_DCHECK(context->AllocatePersistentBuffer != nullptr); return context->AllocatePersistentBuffer(context, sizeof(OpDataSvdf)); } -TfLiteStatus Eval(TfLiteContext* context, TfLiteNode* node) { +TfLiteStatus EvalSvdf(TfLiteContext* context, TfLiteNode* node) { auto* params = reinterpret_cast(node->builtin_data); TFLITE_DCHECK(node->user_data != nullptr); const OpDataSvdf& data = *(static_cast(node->user_data)); @@ -99,7 +99,7 @@ TfLiteStatus Eval(TfLiteContext* context, TfLiteNode* node) { } // namespace TFLMRegistration Register_SVDF() { - return tflite::micro::RegisterOp(Init, PrepareSvdf, Eval); + return tflite::micro::RegisterOp(InitSvdf, PrepareSvdf, EvalSvdf); } } // namespace tflite diff --git a/tensorflow/lite/micro/kernels/testdata/BUILD b/tensorflow/lite/micro/kernels/testdata/BUILD index a20337ac434..0c7822d3dd6 100644 --- a/tensorflow/lite/micro/kernels/testdata/BUILD +++ b/tensorflow/lite/micro/kernels/testdata/BUILD @@ -1,3 +1,4 @@ +load("@rules_python//python:defs.bzl", "py_binary", "py_test") load("@tflm_pip_deps//:requirements.bzl", "requirement") package( @@ -44,7 +45,7 @@ py_binary( deps = [ "@absl_py//absl:app", requirement("numpy"), - requirement("tensorflow-cpu"), + requirement("tensorflow"), ], ) diff --git a/tensorflow/lite/micro/kernels/testdata/lstm_test_data.cc b/tensorflow/lite/micro/kernels/testdata/lstm_test_data.cc index 4d7d9d9edcb..0fc010bca1e 100644 --- a/tensorflow/lite/micro/kernels/testdata/lstm_test_data.cc +++ b/tensorflow/lite/micro/kernels/testdata/lstm_test_data.cc @@ -251,11 +251,12 @@ NodeQuantizationParameters Get2X2Int16LstmQuantizationSettings() { // state quantization parameters quantization_settings.input = {/*scale=*/3.0518044e-5, /*zp=*/0, - /*symmetry=*/false}; - quantization_settings.output = {/*scale=*/1.8310826e-5, /*zp=*/-5461, - /*symmetry=*/false}; - quantization_settings.hidden_state = {/*scale=*/1.8310826e-5, /*zp=*/-5461, - /*symmetry=*/false}; + /*symmetry=*/true}; + quantization_settings.output = {/*scale=*/2.1362956633198035e-05, /*zp=*/0, + /*symmetry=*/true}; + quantization_settings.hidden_state = {/*scale=*/2.1362956633198035e-05, + /*zp=*/0, + /*symmetry=*/true}; quantization_settings.cell_state = {/*scale=*/0.00024414062, /*zp=*/0, /*symmetry=*/true}; diff --git a/tensorflow/lite/micro/kernels/testdata/lstm_test_data.h b/tensorflow/lite/micro/kernels/testdata/lstm_test_data.h index 3edf4200aa0..932b83203ef 100644 --- a/tensorflow/lite/micro/kernels/testdata/lstm_test_data.h +++ b/tensorflow/lite/micro/kernels/testdata/lstm_test_data.h @@ -390,9 +390,9 @@ class LstmNodeContent { int state_size_[3] = {2, batch_size, state_dimension}; // see lstm_shared.h for tensor names, the last tensor is the output tensor - TfLiteTensor tensors_[24 + 1]; + TfLiteTensor tensors_[24 + 1] = {}; // Use for internel kernel testing - TfLiteEvalTensor eval_tensors_[24 + 1]; + TfLiteEvalTensor eval_tensors_[24 + 1] = {}; // indices for the tensors inside the node (required by kernel runner) int input_tensor_indices_[1 + 24] = {}; // single output (last in the tensors array) diff --git a/tensorflow/lite/micro/kernels/testdata/lstm_test_data_generator.py b/tensorflow/lite/micro/kernels/testdata/lstm_test_data_generator.py index 97c8798ef44..c6553fe2e4f 100644 --- a/tensorflow/lite/micro/kernels/testdata/lstm_test_data_generator.py +++ b/tensorflow/lite/micro/kernels/testdata/lstm_test_data_generator.py @@ -17,15 +17,15 @@ 2. Print the intermediate step outputs inside the LSTM for a single step LSTM invocation (Get2X2GateOutputCheckData in .cc) 3. Print the outputs for multi-step LSTM invocation (Get2X2LstmEvalCheckData in .cc) -Every invocation gives three types information: -1. Quantized output: kernel output in integer +Every invocation gives three types information: +1. Quantized output: kernel output in integer 2. Dequantized output: Quantized output in floating point representation 3. Float output: output from the floating point computation (i.e., float kernel) -Note: +Note: 1. Change quantization settings in _KERNEL_CONFIG to see the outcomes from various quantization schema (e.g., 8x8 Vs. 16x8) 2. Only single batch inference is supporte here. Change _GATE_TEST_DATA or _MULTISTEP_TEST_DATA to see kernel outputs on different input data -3. The quantization computation here is not the exact as the c++ implementation. The integer calculation is mimiced here using floating point. +3. The quantization computation here is not the exact as the c++ implementation. The integer calculation is emulated here using floating point. No fixed point math is implemented here. The purpose is to illustrate the computation procedure and possible quantization error accumulation, not for bit exactness. """ from absl import app @@ -88,7 +88,7 @@ _MULTISTEP_TEST_DATA = { 'init_hidden_state_vals': [0, 0], 'init_cell_state_vals': [0, 0], - 'input_data': [0.2, 0.3, 0.2, 0.3, 0.2, 0.3], # three time steps + 'input_data': [0.2, 0.3, 0.2, 0.3, 0.2, 0.3], # three time steps 'hidden_state_range': (-0.5, 0.7), 'cell_state_range': [-8, 8], 'input_data_range': [-1, 1] diff --git a/tensorflow/lite/micro/kernels/testdata/lstm_test_data_utils.py b/tensorflow/lite/micro/kernels/testdata/lstm_test_data_utils.py index 345b143fad5..142a58ca8ae 100644 --- a/tensorflow/lite/micro/kernels/testdata/lstm_test_data_utils.py +++ b/tensorflow/lite/micro/kernels/testdata/lstm_test_data_utils.py @@ -92,14 +92,14 @@ def sigmoid(x): def quantized_sigmoid(input, input_scale, output_scale, num_bits=16): - """Sigmoid (interger)""" + """Sigmoid (integer)""" float_input = input * input_scale float_result = sigmoid(float_input) return quantize_data(float_result, output_scale, bit_width=num_bits) def quantized_tanh(input, input_scale, output_scale, num_bits=16): - """Tanh (interger)""" + """Tanh (integer)""" float_input = input * input_scale float_result = np.tanh(float_input) return quantize_data(float_result, output_scale, bit_width=num_bits) diff --git a/tensorflow/lite/micro/kernels/transpose.cc b/tensorflow/lite/micro/kernels/transpose.cc index 710bfca40d3..fd17e893937 100644 --- a/tensorflow/lite/micro/kernels/transpose.cc +++ b/tensorflow/lite/micro/kernels/transpose.cc @@ -46,7 +46,7 @@ struct TransposeContext { TfLiteTensor* output; }; -TfLiteStatus Prepare(TfLiteContext* context, TfLiteNode* node) { +TfLiteStatus TransposePrepare(TfLiteContext* context, TfLiteNode* node) { TF_LITE_ENSURE_EQ(context, NumInputs(node), 2); TF_LITE_ENSURE_EQ(context, NumOutputs(node), 1); @@ -72,7 +72,7 @@ TfLiteStatus Prepare(TfLiteContext* context, TfLiteNode* node) { return kTfLiteOk; } -TfLiteStatus Eval(TfLiteContext* context, TfLiteNode* node) { +TfLiteStatus TransposeEval(TfLiteContext* context, TfLiteNode* node) { const TfLiteEvalTensor* perm_tensor = tflite::micro::GetEvalInput(context, node, kPermTensor); const int32_t* perm_data = perm_tensor->data.i32; @@ -117,6 +117,6 @@ TfLiteStatus Eval(TfLiteContext* context, TfLiteNode* node) { } // namespace TFLMRegistration Register_TRANSPOSE() { - return tflite::micro::RegisterOp(nullptr, Prepare, Eval); + return tflite::micro::RegisterOp(nullptr, TransposePrepare, TransposeEval); } } // namespace tflite diff --git a/tensorflow/lite/micro/kernels/transpose_conv.cc b/tensorflow/lite/micro/kernels/transpose_conv.cc index a2ac2b463e9..ea0efae0607 100644 --- a/tensorflow/lite/micro/kernels/transpose_conv.cc +++ b/tensorflow/lite/micro/kernels/transpose_conv.cc @@ -15,6 +15,9 @@ limitations under the License. #include "tensorflow/lite/kernels/internal/reference/transpose_conv.h" +#include +#include + #include "tensorflow/lite/c/builtin_op_data.h" #include "tensorflow/lite/c/common.h" #include "tensorflow/lite/kernels/internal/common.h" @@ -48,8 +51,9 @@ struct OpData { // A scratch buffer is required for quantized implementations. int scratch_buffer_index; - // TODO(b/192090531): Remove this once all 8x16 transpose conv models use - // 64-bit biases. + // Index to the converted 64-bit bias buffer from 16-bit bias. This is + // required to handle 16x8 transpose convolutions where a 16-bit bias is + // provided, whereas the kernel expects 64-bit biases. int bias_converted_buffer_index; // Multiplier and shift arrays are required for the int8 implementation. @@ -123,7 +127,9 @@ TfLiteStatus CalculateOpData(TfLiteContext* context, TfLiteNode* node, if (input->type == kTfLiteInt16) { TFLITE_DCHECK(filter->type == kTfLiteInt8); TFLITE_DCHECK(output->type == kTfLiteInt16); - if (bias->type == kTfLiteInt16) { + // Handle the case where the bias is 16 bits for 16x8 transpose + // convolution where the kernel actually expects 64-bit biases. + if (bias != nullptr && bias->type == kTfLiteInt16) { TFLITE_DCHECK( context->RequestScratchBufferInArena( context, GetTensorShape(bias).FlatSize() * sizeof(std::int64_t), @@ -141,12 +147,13 @@ TfLiteStatus CalculateOpData(TfLiteContext* context, TfLiteNode* node, return kTfLiteOk; } -void* Init(TfLiteContext* context, const char* buffer, size_t length) { +void* TransposeConvInit(TfLiteContext* context, const char* buffer, + size_t length) { TFLITE_DCHECK(context->AllocatePersistentBuffer != nullptr); return context->AllocatePersistentBuffer(context, sizeof(OpData)); } -TfLiteStatus Prepare(TfLiteContext* context, TfLiteNode* node) { +TfLiteStatus TransposeConvPrepare(TfLiteContext* context, TfLiteNode* node) { TFLITE_DCHECK(node->user_data != nullptr); TFLITE_DCHECK(node->builtin_data != nullptr); @@ -243,7 +250,7 @@ TfLiteStatus Prepare(TfLiteContext* context, TfLiteNode* node) { return kTfLiteOk; } -TfLiteStatus Eval(TfLiteContext* context, TfLiteNode* node) { +TfLiteStatus TransposeConvEval(TfLiteContext* context, TfLiteNode* node) { const TfLiteEvalTensor* input = tflite::micro::GetEvalInput(context, node, kInputTensor); const TfLiteEvalTensor* filter = @@ -298,12 +305,10 @@ TfLiteStatus Eval(TfLiteContext* context, TfLiteNode* node) { break; } case kTfLiteInt16: { - std::int64_t* scratch_buffer = static_cast( + auto* scratch_buffer = static_cast( context->GetScratchBuffer(context, data.scratch_buffer_index)); - // TODO(b/192090531): Remove this once all 8x16 transpose conv models use - // 64-bit biases. if (bias != nullptr && bias->type == kTfLiteInt16) { - std::int64_t* bias_converted_buffer = + auto* bias_converted_buffer = static_cast(context->GetScratchBuffer( context, data.bias_converted_buffer_index)); for (int i = 0; i < tflite::micro::GetTensorShape(bias).FlatSize(); @@ -346,7 +351,8 @@ TfLiteStatus Eval(TfLiteContext* context, TfLiteNode* node) { } // namespace TFLMRegistration Register_TRANSPOSE_CONV() { - return tflite::micro::RegisterOp(Init, Prepare, Eval); + return tflite::micro::RegisterOp(TransposeConvInit, TransposeConvPrepare, + TransposeConvEval); } } // namespace tflite diff --git a/tensorflow/lite/micro/kernels/transpose_conv.h b/tensorflow/lite/micro/kernels/transpose_conv.h new file mode 100644 index 00000000000..3a99ccbf847 --- /dev/null +++ b/tensorflow/lite/micro/kernels/transpose_conv.h @@ -0,0 +1,50 @@ +/* Copyright 2023 The TensorFlow Authors. All Rights Reserved. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +==============================================================================*/ +#ifndef TENSORFLOW_LITE_MICRO_KERNELS_TRANSPOSE_CONV_H_ +#define TENSORFLOW_LITE_MICRO_KERNELS_TRANSPOSE_CONV_H_ + +#include + +#include "tensorflow/lite/c/builtin_op_data.h" +#include "tensorflow/lite/kernels/internal/types.h" +#include "tensorflow/lite/micro/micro_common.h" + +namespace tflite { + +// This is the most generic TFLMRegistration. The actual supported types +// may still be target dependent. The only requirement is that every +// implementation (reference or optimized) must define this function. +TFLMRegistration Register_TRANSPOSE_CONV(); + +#if defined(CMSIS_NN) +// Returns a TFLMRegistration struct for kernel variant that only supports +// int8. +TFLMRegistration Register_TRANSPOSE_CONV_INT8(); + +#else +// Note that while this block gets used for both reference and optimized kernels +// that do not have any specialized implementations, the only goal here is to +// define fallback implementation that allow reference kernels to still be used +// from applications that call a more specific kernel variant. + +inline TFLMRegistration Register_TRANSPOSE_CONV_INT8() { + return Register_TRANSPOSE_CONV(); +} + +#endif + +} // namespace tflite + +#endif // TENSORFLOW_LITE_MICRO_KERNELS_TRANSPOSE_CONV_H_ diff --git a/tensorflow/lite/micro/kernels/transpose_conv_test.cc b/tensorflow/lite/micro/kernels/transpose_conv_test.cc index 0ddb3b2071f..49d2c90f439 100644 --- a/tensorflow/lite/micro/kernels/transpose_conv_test.cc +++ b/tensorflow/lite/micro/kernels/transpose_conv_test.cc @@ -53,7 +53,8 @@ static TfLiteConvParams common_conv_params = {kTfLitePaddingSame, // padding 1, // stride_height kTfLiteActNone, 1, - 1}; + 1, + kTfLiteNoType}; template TfLiteStatus InvokeTransposeConv(TfLiteTensor* tensors, int tensors_size, @@ -253,7 +254,8 @@ TF_LITE_MICRO_TEST(fusedRELUTest) { 1, // stride_height kTfLiteActRelu, 1, - 1}; + 1, + kTfLiteNoType}; TF_LITE_MICRO_EXPECT_EQ( kTfLiteOk, tflite::testing::TestTransposeConvFloat( @@ -276,7 +278,8 @@ TF_LITE_MICRO_TEST(AccuracyWithFusedActivationTest) { 3, // stride_height kTfLiteActRelu, 1, - 1}; + 1, + kTfLiteNoType}; TF_LITE_MICRO_EXPECT_EQ( kTfLiteOk, tflite::testing::TestTransposeConvFloat( @@ -304,7 +307,8 @@ TF_LITE_MICRO_TEST(MultiChannelBiasWithFusedActivationTest) { 2, // stride_height kTfLiteActRelu, 1, - 1}; + 1, + kTfLiteNoType}; TF_LITE_MICRO_EXPECT_EQ( kTfLiteOk, diff --git a/tensorflow/lite/micro/kernels/unidirectional_sequence_lstm.h b/tensorflow/lite/micro/kernels/unidirectional_sequence_lstm.h index 16aa23b9163..46f6b2d4923 100644 --- a/tensorflow/lite/micro/kernels/unidirectional_sequence_lstm.h +++ b/tensorflow/lite/micro/kernels/unidirectional_sequence_lstm.h @@ -1,4 +1,4 @@ -/* Copyright 2023 The TensorFlow Authors. All Rights Reserved. +/* Copyright 2024 The TensorFlow Authors. All Rights Reserved. Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. @@ -36,10 +36,19 @@ TFLMRegistration Register_UNIDIRECTIONAL_SEQUENCE_LSTM(); // implementations. TFLMRegistration Register_UNIDIRECTIONAL_SEQUENCE_LSTM_INT8(); +// Returns a TFLMRegistration struct for kernel variant that only supports +// int16 activations and int8 weights and uses the latency optimized +// implementations. +TFLMRegistration Register_UNIDIRECTIONAL_SEQUENCE_LSTM_INT16(); + #else inline TFLMRegistration Register_UNIDIRECTIONAL_SEQUENCE_LSTM_INT8() { return Register_UNIDIRECTIONAL_SEQUENCE_LSTM(); } + +inline TFLMRegistration Register_UNIDIRECTIONAL_SEQUENCE_LSTM_INT16() { + return Register_UNIDIRECTIONAL_SEQUENCE_LSTM(); +} #endif } // namespace tflite diff --git a/tensorflow/lite/micro/kernels/unidirectional_sequence_lstm_test.cc b/tensorflow/lite/micro/kernels/unidirectional_sequence_lstm_test.cc index c85e56fe667..1e5a86808f4 100644 --- a/tensorflow/lite/micro/kernels/unidirectional_sequence_lstm_test.cc +++ b/tensorflow/lite/micro/kernels/unidirectional_sequence_lstm_test.cc @@ -1,4 +1,4 @@ -/* Copyright 2023 The TensorFlow Authors. All Rights Reserved. +/* Copyright 2024 The TensorFlow Authors. All Rights Reserved. Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. @@ -28,7 +28,6 @@ namespace testing { namespace { constexpr int kLstmMaxNumInputOutputTensors = 24 + 1; -constexpr int kLstmIntermediateTensorBase = kLstmMaxNumInputOutputTensors + 1; // Validate the output result array with golden values template @@ -50,42 +49,20 @@ void TestUnidirectionalLSTMInteger( LstmNodeContent& node_contents) { - TfLiteTensor tensors[kLstmMaxNumInputOutputTensors + 1 + 5]; - memcpy(tensors, node_contents.GetTensors(), - kLstmMaxNumInputOutputTensors * sizeof(TfLiteTensor)); - - // Provide also intermediate tensors needed by older LSTM implementations - int intermediate_array_data[6] = {5, - kLstmIntermediateTensorBase, - kLstmIntermediateTensorBase + 1, - kLstmIntermediateTensorBase + 2, - kLstmIntermediateTensorBase + 3, - kLstmIntermediateTensorBase + 4}; - int input_zero_points[2] = {1, -21}; - float input_scales[2] = {1, 0.004705882165580988}; - TfLiteAffineQuantization input_quant = { - tflite::testing::FloatArrayFromFloats(input_scales), - tflite::testing::IntArrayFromInts(input_zero_points), 0}; - int intermediate_dim[2] = {1, 0}; - for (int i = 0; i < 5; ++i) { - tensors[kLstmIntermediateTensorBase + i] = - CreateTensor(nullptr, IntArrayFromInts(intermediate_dim)); - tensors[kLstmIntermediateTensorBase + i].quantization = { - kTfLiteAffineQuantization, &input_quant}; - } - const TFLMRegistration registration = Register_UNIDIRECTIONAL_SEQUENCE_LSTM(); auto buildin_data = node_contents.BuiltinData(); micro::KernelRunner runner( - registration, tensors, kLstmMaxNumInputOutputTensors + 1 + 5, + registration, node_contents.GetTensors(), kLstmMaxNumInputOutputTensors, node_contents.KernelInputs(), node_contents.KernelOutputs(), - reinterpret_cast(&buildin_data), - IntArrayFromInts(intermediate_array_data)); + reinterpret_cast(&buildin_data)); TF_LITE_MICRO_EXPECT_EQ(kTfLiteOk, runner.InitAndPrepare()); TF_LITE_MICRO_EXPECT_EQ(kTfLiteOk, runner.Invoke()); const auto& quantization_settings = node_contents.QuantizationSettings(); +// CMSIS-NN does not use the hidden state and cell state tensors so these tests +// fail. +#if !defined(CMSIS_NN) float dequantized_hidden_state[batch_size * state_dimension] = {}; Dequantize(node_contents.GetHiddenStateData(), batch_size * state_dimension, quantization_settings.hidden_state.scale, @@ -104,6 +81,7 @@ void TestUnidirectionalLSTMInteger( ValidateResultGoldens(eval_check_data.expected_cell_state, dequantized_cell_state, batch_size * state_dimension, cell_state_tolerance); +#endif float dequantized_output[batch_size * state_dimension * time_steps] = {}; Dequantize(node_contents.GetOutputData(), @@ -150,7 +128,6 @@ void TestUnidirectionalLSTMFloat( TF_LITE_MICRO_TESTS_BEGIN // TODO(b/230666079) enable below tests for xtensa when the xtensa // kernel is reconciled with reference kernel -#if !defined(XTENSA) TF_LITE_MICRO_TEST(TestUnidirectionalLSTMFloat) { const tflite::testing::LstmEvalCheckData<12, 4, 12> kernel_eval_data = tflite::testing::Get2X2LstmEvalCheckData(); @@ -193,5 +170,4 @@ TF_LITE_MICRO_TEST(TestUnidirectionalLSTMInt16) { kernel_eval_data, hidden_state_tolerance, cell_state_tolerance, int16_node_contents); } -#endif // !defined(XTENSA) TF_LITE_MICRO_TESTS_END diff --git a/tensorflow/lite/micro/kernels/unpack.cc b/tensorflow/lite/micro/kernels/unpack.cc index 3ce4c33f3ad..9ce168384a4 100644 --- a/tensorflow/lite/micro/kernels/unpack.cc +++ b/tensorflow/lite/micro/kernels/unpack.cc @@ -72,7 +72,7 @@ TfLiteStatus UnpackImpl(TfLiteContext* context, TfLiteNode* node, return kTfLiteOk; } -TfLiteStatus Eval(TfLiteContext* context, TfLiteNode* node) { +TfLiteStatus UnpackEval(TfLiteContext* context, TfLiteNode* node) { TfLiteUnpackParams* data = reinterpret_cast(node->builtin_data); @@ -102,7 +102,7 @@ TfLiteStatus Eval(TfLiteContext* context, TfLiteNode* node) { } // namespace TFLMRegistration Register_UNPACK() { - return tflite::micro::RegisterOp(nullptr, nullptr, Eval); + return tflite::micro::RegisterOp(nullptr, nullptr, UnpackEval); } } // namespace tflite diff --git a/tensorflow/lite/micro/kernels/var_handle.cc b/tensorflow/lite/micro/kernels/var_handle.cc index 06087f79875..0efb28ccc58 100644 --- a/tensorflow/lite/micro/kernels/var_handle.cc +++ b/tensorflow/lite/micro/kernels/var_handle.cc @@ -36,12 +36,12 @@ struct OpData { int32_t resource_id; }; -void* Init(TfLiteContext* context, const char* buffer, size_t length) { +void* VarHandleInit(TfLiteContext* context, const char* buffer, size_t length) { TFLITE_DCHECK(context->AllocatePersistentBuffer != nullptr); return context->AllocatePersistentBuffer(context, sizeof(OpData)); } -TfLiteStatus Prepare(TfLiteContext* context, TfLiteNode* node) { +TfLiteStatus VarHandlePrepare(TfLiteContext* context, TfLiteNode* node) { OpData* op_data = reinterpret_cast(node->user_data); const auto* params = reinterpret_cast(node->builtin_data); @@ -72,7 +72,7 @@ TfLiteStatus Prepare(TfLiteContext* context, TfLiteNode* node) { return kTfLiteOk; } -TfLiteStatus Eval(TfLiteContext* context, TfLiteNode* node) { +TfLiteStatus VarHandleEval(TfLiteContext* context, TfLiteNode* node) { OpData* op_data = reinterpret_cast(node->user_data); TfLiteEvalTensor* output = tflite::micro::GetEvalOutput(context, node, 0); @@ -87,7 +87,8 @@ TfLiteStatus Eval(TfLiteContext* context, TfLiteNode* node) { } // namespace. TFLMRegistration Register_VAR_HANDLE() { - return tflite::micro::RegisterOp(Init, Prepare, Eval); + return tflite::micro::RegisterOp(VarHandleInit, VarHandlePrepare, + VarHandleEval); } } // namespace tflite diff --git a/tensorflow/lite/micro/kernels/while.cc b/tensorflow/lite/micro/kernels/while.cc index 097a3421867..a11adebd4df 100644 --- a/tensorflow/lite/micro/kernels/while.cc +++ b/tensorflow/lite/micro/kernels/while.cc @@ -36,12 +36,12 @@ struct OpData { int body_subgraph_index; }; -void* Init(TfLiteContext* context, const char* buffer, size_t length) { +void* WhileInit(TfLiteContext* context, const char* buffer, size_t length) { TFLITE_DCHECK(context->AllocatePersistentBuffer != nullptr); return context->AllocatePersistentBuffer(context, sizeof(OpData)); } -TfLiteStatus Prepare(TfLiteContext* context, TfLiteNode* node) { +TfLiteStatus WhilePrepare(TfLiteContext* context, TfLiteNode* node) { OpData* op_data = reinterpret_cast(node->user_data); const auto* params = reinterpret_cast(node->builtin_data); @@ -74,7 +74,7 @@ TfLiteStatus Prepare(TfLiteContext* context, TfLiteNode* node) { return kTfLiteOk; } -TfLiteStatus Eval(TfLiteContext* context, TfLiteNode* node) { +TfLiteStatus WhileEval(TfLiteContext* context, TfLiteNode* node) { const OpData* op_data = reinterpret_cast(node->user_data); tflite::MicroContext* micro_context = tflite::GetMicroContext(context); @@ -127,7 +127,7 @@ TfLiteStatus Eval(TfLiteContext* context, TfLiteNode* node) { } // namespace. TFLMRegistration Register_WHILE() { - return tflite::micro::RegisterOp(Init, Prepare, Eval); + return tflite::micro::RegisterOp(WhileInit, WhilePrepare, WhileEval); } } // namespace tflite diff --git a/tensorflow/lite/micro/kernels/xtensa/add.cc b/tensorflow/lite/micro/kernels/xtensa/add.cc index 4e4f805ca98..d3c23713cfb 100644 --- a/tensorflow/lite/micro/kernels/xtensa/add.cc +++ b/tensorflow/lite/micro/kernels/xtensa/add.cc @@ -113,11 +113,11 @@ TfLiteStatus EvalAddQuantized(TfLiteContext* context, TfLiteNode* node, op_params.output_shift = data->output_shift; SetActivationParams(data->output_activation_min, data->output_activation_max, &op_params); -#if !(defined(HIFI4)) +#if !(defined(HIFI3) || defined(HIFI4) || defined(HIFI5)) bool need_broadcast = reference_ops::ProcessBroadcastShapes( tflite::micro::GetTensorShape(input1), tflite::micro::GetTensorShape(input2), &op_params); -#endif // !defined(HIFI4) +#endif // !defined(HIFI3) && !defined(HIFI4) && !defined(HIFI5) switch (output->type) { case kTfLiteInt8: { @@ -126,7 +126,7 @@ TfLiteStatus EvalAddQuantized(TfLiteContext* context, TfLiteNode* node, *(reinterpret_cast(node->user_data)); AddEvalQuantizedVision(context, node, *params, op_data, input1, input2, output); -#elif defined(HIFI4) // defined(VISION_P6) +#elif defined(HIFI3) || defined(HIFI4) || defined(HIFI5) // defined(VISION_P6) int err; const RuntimeShape extended_input1_shape = RuntimeShape::ExtendedShape(4, tflite::micro::GetTensorShape(input1)); @@ -150,7 +150,7 @@ TfLiteStatus EvalAddQuantized(TfLiteContext* context, TfLiteNode* node, op_params.left_shift); TF_LITE_ENSURE(context, err == 0); -#else // defined(VISION_P6) +#else // defined(VISION_P6) if (need_broadcast) { reference_integer_ops::BroadcastAdd4DSlow( op_params, tflite::micro::GetTensorShape(input1), @@ -168,11 +168,11 @@ TfLiteStatus EvalAddQuantized(TfLiteContext* context, TfLiteNode* node, tflite::micro::GetTensorShape(output), tflite::micro::GetTensorData(output)); } -#endif // defined(VISION_P6) +#endif // defined(VISION_P6) break; } case kTfLiteInt16: { -#if defined(HIFI4) +#if defined(HIFI3) || defined(HIFI4) || defined(HIFI5) int err; const RuntimeShape extended_input1_shape = RuntimeShape::ExtendedShape(4, tflite::micro::GetTensorShape(input1)); @@ -196,7 +196,7 @@ TfLiteStatus EvalAddQuantized(TfLiteContext* context, TfLiteNode* node, op_params.left_shift); TF_LITE_ENSURE(context, err == 0); -#else // defined(HIFI4) +#else // defined(HIFI3) || defined(HIFI4) || defined(HIFI5) if (need_broadcast) { reference_ops::BroadcastAdd4DSlow( op_params, tflite::micro::GetTensorShape(input1), @@ -214,7 +214,7 @@ TfLiteStatus EvalAddQuantized(TfLiteContext* context, TfLiteNode* node, tflite::micro::GetTensorData(output), false); } -#endif // defined(HIFI4) +#endif // defined(HIFI3) || defined(HIFI4) || defined(HIFI5) break; } default: diff --git a/tensorflow/lite/micro/kernels/xtensa/conv.cc b/tensorflow/lite/micro/kernels/xtensa/conv.cc index 59e576c9fc5..384dba9f7ac 100644 --- a/tensorflow/lite/micro/kernels/xtensa/conv.cc +++ b/tensorflow/lite/micro/kernels/xtensa/conv.cc @@ -1,4 +1,4 @@ -/* Copyright 2021 The TensorFlow Authors. All Rights Reserved. +/* Copyright 2023 The TensorFlow Authors. All Rights Reserved. Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. @@ -32,31 +32,6 @@ limitations under the License. namespace tflite { namespace { -void* Init(TfLiteContext* context, const char* buffer, size_t length) { - TFLITE_DCHECK(context->AllocatePersistentBuffer != nullptr); - void* data = - context->AllocatePersistentBuffer(context, sizeof(XtensaConvOpData)); -#if defined(VISION_P6) - if (InitXtensaContext()) { - return nullptr; - } -#endif // defined(VISION_P6) - - return data; -} - -TfLiteStatus Prepare(TfLiteContext* context, TfLiteNode* node) { - TF_LITE_ENSURE_OK(context, ConvPrepare(context, node)); - -#if defined(HIFI4) || defined(HIFI5) - TF_LITE_ENSURE_OK(context, ConvPrepareHifi(context, node)); -#endif -#if defined(VISION_P6) - TF_LITE_ENSURE_OK(context, ConvPrepareVision(context, node)); -#endif // VISION_P6 - return kTfLiteOk; -} - TfLiteStatus Eval(TfLiteContext* context, TfLiteNode* node) { TFLITE_DCHECK(node->user_data != nullptr); TFLITE_DCHECK(node->builtin_data != nullptr); @@ -73,68 +48,76 @@ TfLiteStatus Eval(TfLiteContext* context, TfLiteNode* node) { const TfLiteEvalTensor* filter = tflite::micro::GetEvalInput(context, node, kConvWeightsTensor); const TfLiteEvalTensor* bias = - (NumInputs(node) == 3) - ? tflite::micro::GetEvalInput(context, node, kConvBiasTensor) - : nullptr; - - TfLiteEvalTensor filter_int8 = tflite::micro::MakeUnpackedInt4Tensor( - context, op_data.reference_op_data.filter_buffer_index, filter); + tflite::micro::GetEvalInput(context, node, kConvBiasTensor); switch (input->type) { + case kTfLiteFloat32: { + tflite::reference_ops::Conv( + ConvParamsFloat(params, op_data.reference_op_data), + tflite::micro::GetTensorShape(input), + tflite::micro::GetTensorData(input), + tflite::micro::GetTensorShape(filter), + tflite::micro::GetTensorData(filter), + tflite::micro::GetTensorShape(bias), + tflite::micro::GetOptionalTensorData(bias), + tflite::micro::GetTensorShape(output), + tflite::micro::GetTensorData(output), + tflite::micro::GetTensorShape(nullptr), nullptr); + break; + } case kTfLiteInt8: { - switch (filter_int8.type) { - case kTfLiteInt8: { -#if defined(HIFI4) || defined(HIFI5) - ConvEvalHifi(context, node, params, op_data, input, &filter_int8, - bias, output); +#if defined(HIFI3) || defined(HIFI4) || defined(HIFI5) + if (params.dilation_width_factor == 1 && + params.dilation_height_factor == 1) { + return ConvEvalHifiInt8(context, node, params, op_data, input, filter, + bias, output); + } else { + return ConvReferenceEvalInt8(context, node); + } #elif defined(VISION_P6) - return ConvEvalVision(context, node, params, op_data, input, - &filter_int8, bias, output); + // At this time the optimized implementation is failing the unit tests in + // ways that are not entirely clear why. For now, we have identified some + // of the problem cases and are manually inserting a reference fallback. + // See http://b/270720625 for more details. + if (op_data.is_per_channel_quantized || + input->dims->data[1] != input->dims->data[2]) { + return ConvReferenceEvalInt8(context, node); + } else { + return ConvEvalVision(context, node, params, op_data, input, filter, + bias, output); + } #else - reference_integer_ops::ConvPerChannel( - ConvParamsQuantized(params, op_data.reference_op_data), - op_data.reference_op_data.per_channel_output_multiplier, - op_data.reference_op_data.per_channel_output_shift, - tflite::micro::GetTensorShape(input), - tflite::micro::GetTensorData(input), - tflite::micro::GetTensorShape(filter), - tflite::micro::GetTensorData(&filter_int8), - tflite::micro::GetTensorShape(bias), - tflite::micro::GetOptionalTensorData(bias), - tflite::micro::GetTensorShape(output), - tflite::micro::GetTensorData(output)); - return kTfLiteOk; + return ConvReferenceEvalInt8(context, node); #endif - break; - } - - default: - MicroPrintf("Filter type %s (%d) not supported.", - TfLiteTypeGetName(filter->type), filter->type); - return kTfLiteError; - } - return kTfLiteOk; } case kTfLiteInt16: { -#if defined(HIFI4) - ConvEvalHifi16(context, node, params, op_data, input, filter, bias, - output); +#if defined(HIFI3) || defined(HIFI4) || defined(HIFI5) + // Note that int32 bias is not widely supported and might be risky (e.g. + // http://b/262003750). As such, while we have a fallback to the reference + // implementation, production use-cases should only have int64 bias. + if (bias->type == kTfLiteInt32) { + return ConvReferenceEvalInt16(context, node); + } else { + return ConvEvalHifiInt16(context, node, params, op_data, input, filter, + bias, output); + } #else return ConvReferenceEvalInt16(context, node); -#endif // defined(HIFI4) - break; +#endif } default: MicroPrintf("Type %s (%d) not supported.", TfLiteTypeGetName(input->type), input->type); return kTfLiteError; } + return kTfLiteOk; } + } // namespace TFLMRegistration Register_CONV_2D() { - return tflite::micro::RegisterOp(Init, Prepare, Eval); + return tflite::micro::RegisterOp(ConvInitXtensa, ConvPrepareXtensa, Eval); } } // namespace tflite diff --git a/tensorflow/lite/micro/kernels/xtensa/conv_common_xtensa.cc b/tensorflow/lite/micro/kernels/xtensa/conv_common_xtensa.cc new file mode 100644 index 00000000000..3063e77744d --- /dev/null +++ b/tensorflow/lite/micro/kernels/xtensa/conv_common_xtensa.cc @@ -0,0 +1,56 @@ +/* Copyright 2023 The TensorFlow Authors. All Rights Reserved. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +==============================================================================*/ + +#include "tensorflow/lite/c/builtin_op_data.h" +#include "tensorflow/lite/c/common.h" +#include "tensorflow/lite/kernels/internal/common.h" +#include "tensorflow/lite/kernels/internal/tensor_ctypes.h" +#include "tensorflow/lite/kernels/kernel_util.h" +#include "tensorflow/lite/micro/kernels/conv.h" +#include "tensorflow/lite/micro/kernels/kernel_util.h" +#include "tensorflow/lite/micro/kernels/xtensa/xtensa.h" +#include "tensorflow/lite/micro/kernels/xtensa/xtensa_conv.h" + +namespace tflite { + +void* ConvInitXtensa(TfLiteContext* context, const char* buffer, + size_t length) { + TFLITE_DCHECK(context->AllocatePersistentBuffer != nullptr); + void* data = + context->AllocatePersistentBuffer(context, sizeof(XtensaConvOpData)); +#if defined(VISION_P6) + if (InitXtensaContext()) { + return nullptr; + } +#endif // defined(VISION_P6) + + return data; +} + +TfLiteStatus ConvPrepareXtensa(TfLiteContext* context, TfLiteNode* node) { + TF_LITE_ENSURE_OK(context, ConvPrepare(context, node)); + +#if defined(HIFI3) || defined(HIFI4) || defined(HIFI5) + TF_LITE_ENSURE_OK(context, ConvPrepareHifi(context, node)); +#endif // defined(HIFI3) || defined(HIFI4) || defined(HIFI5) + +#if defined(VISION_P6) + TF_LITE_ENSURE_OK(context, ConvPrepareVision(context, node)); +#endif // defined(VISION_P6) + + return kTfLiteOk; +} + +} // namespace tflite diff --git a/tensorflow/lite/micro/kernels/xtensa/conv_hifi.cc b/tensorflow/lite/micro/kernels/xtensa/conv_hifi.cc index 487c84aec43..1d2d7ec253e 100644 --- a/tensorflow/lite/micro/kernels/xtensa/conv_hifi.cc +++ b/tensorflow/lite/micro/kernels/xtensa/conv_hifi.cc @@ -1,4 +1,4 @@ -/* Copyright 2021 The TensorFlow Authors. All Rights Reserved. +/* Copyright 2023 The TensorFlow Authors. All Rights Reserved. Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. @@ -13,14 +13,14 @@ See the License for the specific language governing permissions and limitations under the License. ==============================================================================*/ -#if defined(HIFI4) || defined(HIFI5) +#if defined(HIFI3) || defined(HIFI4) || defined(HIFI5) #include #include "tensorflow/lite/c/builtin_op_data.h" #include "tensorflow/lite/c/common.h" #include "tensorflow/lite/kernels/internal/common.h" -#include "tensorflow/lite/kernels/internal/reference/integer_ops/conv.h" +#include "tensorflow/lite/kernels/internal/portable_tensor_utils.h" #include "tensorflow/lite/kernels/internal/tensor_ctypes.h" #include "tensorflow/lite/kernels/kernel_util.h" #include "tensorflow/lite/micro/kernels/conv.h" @@ -39,17 +39,43 @@ TfLiteStatus ConvPrepareHifi(TfLiteContext* context, TfLiteNode* node) { // Calculate scratch memory requirements and request scratch buffer TfLiteTensor* output = micro_context->AllocateTempOutputTensor(node, kConvOutputTensor); - TF_LITE_ENSURE(context, output != nullptr); TfLiteTensor* input = micro_context->AllocateTempInputTensor(node, kConvInputTensor); - TF_LITE_ENSURE(context, input != nullptr); TfLiteTensor* filter = micro_context->AllocateTempInputTensor(node, kConvWeightsTensor); - TF_LITE_ENSURE(context, filter != nullptr); + TfLiteTensor* bias = + micro_context->AllocateTempInputTensor(node, kConvBiasTensor); const RuntimeShape& input_shape = GetTensorShape(input); const RuntimeShape& filter_shape = GetTensorShape(filter); const RuntimeShape& output_shape = GetTensorShape(output); + + // Check if the Xtensa optimized code can be used + // HIFI4 and HIFI5 do not allow bias data pointer to be nullptr + /* TODO(b/277112516): Dilation is currently not supported on HiFi 4 NN Library + */ + bool inputs_and_bias_ok = bias != nullptr; +#if defined(HIFI3) || defined(HIFI4) || defined(HIFI5) + inputs_and_bias_ok = + inputs_and_bias_ok && + (input->type == kTfLiteInt8 || + (input->type == kTfLiteInt16 && bias->type == kTfLiteInt64)); +#else + inputs_and_bias_ok = inputs_and_bias_ok && (input->type == kTfLiteInt8); +#endif // defined(HIFI3) || defined(HIFI4) || defined(HIFI5) + if (!(inputs_and_bias_ok && params->dilation_width_factor == 1 && + params->dilation_height_factor == 1 && + input_shape.Dims(1) >= filter_shape.Dims(1) && + input_shape.Dims(2) >= filter_shape.Dims(2))) { + micro_context->DeallocateTempTfLiteTensor(input); + micro_context->DeallocateTempTfLiteTensor(filter); + micro_context->DeallocateTempTfLiteTensor(output); + if (bias != nullptr) { + micro_context->DeallocateTempTfLiteTensor(bias); + } + return kTfLiteOk; + } + const int input_height = input_shape.Dims(1); const int input_depth = MatchingDim(input_shape, 3, filter_shape, 3); const int filter_height = filter_shape.Dims(1); @@ -60,7 +86,7 @@ TfLiteStatus ConvPrepareHifi(TfLiteContext* context, TfLiteNode* node) { const int pad_height = data->reference_op_data.padding.height; int required_scratch = 0; - // Dilation is currently not supported on HiFi 4 NN Library + // TODO(b/277112516): Dilation is currently not supported on HiFi 4 NN Library if ((params->dilation_width_factor == 1) && (params->dilation_height_factor == 1)) { if (input->type == kTfLiteInt8) { @@ -83,245 +109,220 @@ TfLiteStatus ConvPrepareHifi(TfLiteContext* context, TfLiteNode* node) { micro_context->DeallocateTempTfLiteTensor(input); micro_context->DeallocateTempTfLiteTensor(filter); micro_context->DeallocateTempTfLiteTensor(output); + if (bias != nullptr) { + micro_context->DeallocateTempTfLiteTensor(bias); + } return kTfLiteOk; } -#if defined(HIFI4) -TfLiteStatus ConvEvalHifi16(TfLiteContext* context, TfLiteNode* node, - const TfLiteConvParams& params, - const XtensaConvOpData& data, - const TfLiteEvalTensor* input, - const TfLiteEvalTensor* filter, - const TfLiteEvalTensor* bias, - TfLiteEvalTensor* output) { +#if defined(HIFI3) || defined(HIFI4) || defined(HIFI5) +TfLiteStatus ConvEvalHifiInt16(TfLiteContext* context, TfLiteNode* node, + const TfLiteConvParams& params, + const XtensaConvOpData& data, + const TfLiteEvalTensor* input, + const TfLiteEvalTensor* filter, + const TfLiteEvalTensor* bias, + TfLiteEvalTensor* output) { const RuntimeShape& input_shape = tflite::micro::GetTensorShape(input); const RuntimeShape& filter_shape = tflite::micro::GetTensorShape(filter); - /* TODO(b/277112516):Dilation is currently not supported on HiFi 4 NN Library - */ - if ((params.dilation_width_factor == 1) && - (params.dilation_height_factor == 1) && - input_shape.Dims(1) >= filter_shape.Dims(1) && - input_shape.Dims(2) >= filter_shape.Dims(2)) { - const int stride_width = params.stride_width; - const int stride_height = params.stride_height; - const int pad_width = data.reference_op_data.padding.width; - const int pad_height = data.reference_op_data.padding.height; - const int32_t output_activation_min = - data.reference_op_data.output_activation_min; - const int32_t output_activation_max = - data.reference_op_data.output_activation_max; - - const RuntimeShape& output_shape = tflite::micro::GetTensorShape(output); - const int batches = MatchingDim(input_shape, 0, output_shape, 0); - const int input_depth = MatchingDim(input_shape, 3, filter_shape, 3); - const int output_depth = MatchingDim(filter_shape, 0, output_shape, 3); - const int input_height = input_shape.Dims(1); - const int input_width = input_shape.Dims(2); - const int filter_height = filter_shape.Dims(1); - const int filter_width = filter_shape.Dims(2); - const int output_height = output_shape.Dims(1); - const int output_width = output_shape.Dims(2); - - const int16_t* input_data = tflite::micro::GetTensorData(input); - const int8_t* filter_data = tflite::micro::GetTensorData(filter); - const int64_t* bias_data = tflite::micro::GetTensorData(bias); - int16_t* output_data = tflite::micro::GetTensorData(output); - - int output_data_format = 0; - int out_length = output_height * output_width * output_depth; - if (filter_height == 1 && filter_width == 1) { - for (int batch = 0; batch < batches; ++batch) { - int16_t* p_out_temp; - p_out_temp = &output_data[batch * out_length]; + const int stride_width = params.stride_width; + const int stride_height = params.stride_height; + const int pad_width = data.reference_op_data.padding.width; + const int pad_height = data.reference_op_data.padding.height; + const int32_t output_activation_min = + data.reference_op_data.output_activation_min; + const int32_t output_activation_max = + data.reference_op_data.output_activation_max; + + const RuntimeShape& output_shape = tflite::micro::GetTensorShape(output); + const int batches = MatchingDim(input_shape, 0, output_shape, 0); + const int input_depth = MatchingDim(input_shape, 3, filter_shape, 3); + const int output_depth = MatchingDim(filter_shape, 0, output_shape, 3); + const int input_height = input_shape.Dims(1); + const int input_width = input_shape.Dims(2); + const int filter_height = filter_shape.Dims(1); + const int filter_width = filter_shape.Dims(2); + const int output_height = output_shape.Dims(1); + const int output_width = output_shape.Dims(2); + + const int16_t* input_data = tflite::micro::GetTensorData(input); + const int8_t* filter_data = tflite::micro::GetTensorData(filter); + const int64_t* bias_data = tflite::micro::GetTensorData(bias); + int16_t* output_data = tflite::micro::GetTensorData(output); + + int output_data_format = 0; + int out_length = output_height * output_width * output_depth; + if (filter_height == 1 && filter_width == 1) { + for (int batch = 0; batch < batches; ++batch) { + int16_t* p_out_temp; + p_out_temp = &output_data[batch * out_length]; + + TF_LITE_ENSURE_EQ( + context, + xa_nn_conv2d_pointwise_per_chan_sym8sxsym16s( + p_out_temp, const_cast(filter_data), + const_cast(&input_data[batch * input_height * + input_width * input_depth]), + const_cast(bias_data), input_height, input_width, + input_depth, output_depth, 0, + data.reference_op_data.per_channel_output_multiplier, + data.reference_op_data.per_channel_output_shift, 0, + output_data_format), + 0); + + TF_LITE_ENSURE_EQ(context, + xa_nn_vec_activation_min_max_16_16( + p_out_temp, p_out_temp, output_activation_min, + output_activation_max, out_length), + 0); + } + } else { + void* p_scratch = static_cast( + context->GetScratchBuffer(context, data.scratch_tensor_index)); + + for (int batch = 0; batch < batches; ++batch) { + int16_t* p_out_temp; + p_out_temp = &output_data[batch * out_length]; + { TF_LITE_ENSURE_EQ( context, - xa_nn_conv2d_pointwise_per_chan_sym8sxsym16s( - p_out_temp, const_cast(filter_data), - const_cast(&input_data[batch * input_height * - input_width * input_depth]), - const_cast(bias_data), input_height, input_width, - input_depth, output_depth, 0, + xa_nn_conv2d_std_per_chan_sym8sxsym16s( + p_out_temp, + &input_data[batch * input_height * input_width * input_depth], + const_cast(filter_data), // filter_data, + bias_data, input_height, input_width, input_depth, + filter_height, filter_width, output_depth, stride_width, + stride_height, pad_width, pad_height, output_height, + output_width, 0, data.reference_op_data.per_channel_output_multiplier, data.reference_op_data.per_channel_output_shift, 0, - output_data_format), + output_data_format, static_cast(p_scratch)), 0); - - TF_LITE_ENSURE_EQ(context, - xa_nn_vec_activation_min_max_16_16( - p_out_temp, p_out_temp, output_activation_min, - output_activation_max, out_length), - 0); - } - } else { - void* p_scratch = static_cast( - context->GetScratchBuffer(context, data.scratch_tensor_index)); - - for (int batch = 0; batch < batches; ++batch) { - int16_t* p_out_temp; - p_out_temp = &output_data[batch * out_length]; - - { - TF_LITE_ENSURE_EQ( - context, - xa_nn_conv2d_std_per_chan_sym8sxsym16s( - p_out_temp, - &input_data[batch * input_height * input_width * input_depth], - const_cast(filter_data), // filter_data, - bias_data, input_height, input_width, input_depth, - filter_height, filter_width, output_depth, stride_width, - stride_height, pad_width, pad_height, output_height, - output_width, 0, - data.reference_op_data.per_channel_output_multiplier, - data.reference_op_data.per_channel_output_shift, 0, - output_data_format, static_cast(p_scratch)), - 0); - } - TF_LITE_ENSURE_EQ(context, - xa_nn_vec_activation_min_max_16_16( - p_out_temp, p_out_temp, output_activation_min, - output_activation_max, out_length), - 0); } + TF_LITE_ENSURE_EQ(context, + xa_nn_vec_activation_min_max_16_16( + p_out_temp, p_out_temp, output_activation_min, + output_activation_max, out_length), + 0); } - return kTfLiteOk; } - reference_integer_ops::ConvPerChannel( - ConvParamsQuantized(params, data.reference_op_data), - data.reference_op_data.per_channel_output_multiplier, - data.reference_op_data.per_channel_output_shift, - tflite::micro::GetTensorShape(input), - tflite::micro::GetTensorData(input), - tflite::micro::GetTensorShape(filter), - tflite::micro::GetTensorData(filter), - tflite::micro::GetTensorShape(bias), - tflite::micro::GetTensorData(bias), - tflite::micro::GetTensorShape(output), - tflite::micro::GetTensorData(output)); + return kTfLiteOk; } -#endif // defined(HIFI4) - -TfLiteStatus ConvEvalHifi(TfLiteContext* context, TfLiteNode* node, - const TfLiteConvParams& params, - const XtensaConvOpData& data, - const TfLiteEvalTensor* input, - const TfLiteEvalTensor* filter, - const TfLiteEvalTensor* bias, - TfLiteEvalTensor* output) { +#endif // defined(HIFI3) || defined(HIFI4) || defined(HIFI5) + +TfLiteStatus ConvEvalHifiInt8(TfLiteContext* context, TfLiteNode* node, + const TfLiteConvParams& params, + const XtensaConvOpData& data, + const TfLiteEvalTensor* input, + const TfLiteEvalTensor* filter, + const TfLiteEvalTensor* bias, + TfLiteEvalTensor* output) { const RuntimeShape& input_shape = tflite::micro::GetTensorShape(input); const RuntimeShape& filter_shape = tflite::micro::GetTensorShape(filter); - /* TODO(b/277112516):Dilation is currently not supported on HiFi 4 NN - Library */ - if ((params.dilation_width_factor == 1) && - (params.dilation_height_factor == 1) && - input_shape.Dims(1) >= filter_shape.Dims(1) && - input_shape.Dims(2) >= filter_shape.Dims(2)) { - const int32_t input_offset = -data.reference_op_data.input_zero_point; - const int32_t output_offset = data.reference_op_data.output_zero_point; - const int stride_width = params.stride_width; - const int stride_height = params.stride_height; - const int pad_width = data.reference_op_data.padding.width; - const int pad_height = data.reference_op_data.padding.height; - const int32_t output_activation_min = - data.reference_op_data.output_activation_min; - const int32_t output_activation_max = - data.reference_op_data.output_activation_max; - - const RuntimeShape& output_shape = tflite::micro::GetTensorShape(output); - const int batches = MatchingDim(input_shape, 0, output_shape, 0); - const int input_depth = MatchingDim(input_shape, 3, filter_shape, 3); - const int output_depth = MatchingDim(filter_shape, 0, output_shape, 3); - const int input_height = input_shape.Dims(1); - const int input_width = input_shape.Dims(2); - const int filter_height = filter_shape.Dims(1); - const int filter_width = filter_shape.Dims(2); - const int output_height = output_shape.Dims(1); - const int output_width = output_shape.Dims(2); - - const int8_t* input_data = tflite::micro::GetTensorData(input); - const int8_t* filter_data = tflite::micro::GetTensorData(filter); - const int32_t* bias_data = tflite::micro::GetTensorData(bias); - int8_t* output_data = tflite::micro::GetTensorData(output); - - int output_data_format = 0; - int out_length = output_height * output_width * output_depth; - - if (filter_height == 1 && filter_width == 1) { - for (int batch = 0; batch < batches; ++batch) { - int8_t* p_out_temp; - p_out_temp = &output_data[batch * out_length]; + const int32_t input_offset = -data.reference_op_data.input_zero_point; + const int32_t output_offset = data.reference_op_data.output_zero_point; + const int stride_width = params.stride_width; + const int stride_height = params.stride_height; + const int pad_width = data.reference_op_data.padding.width; + const int pad_height = data.reference_op_data.padding.height; + const int32_t output_activation_min = + data.reference_op_data.output_activation_min; + const int32_t output_activation_max = + data.reference_op_data.output_activation_max; + + const RuntimeShape& output_shape = tflite::micro::GetTensorShape(output); + const int batches = MatchingDim(input_shape, 0, output_shape, 0); + const int input_depth = MatchingDim(input_shape, 3, filter_shape, 3); + const int output_depth = MatchingDim(filter_shape, 0, output_shape, 3); + const int input_height = input_shape.Dims(1); + const int input_width = input_shape.Dims(2); + const int filter_height = filter_shape.Dims(1); + const int filter_width = filter_shape.Dims(2); + const int output_height = output_shape.Dims(1); + const int output_width = output_shape.Dims(2); + + const int8_t* input_data = tflite::micro::GetTensorData(input); + const int32_t* bias_data = tflite::micro::GetTensorData(bias); + int8_t* output_data = tflite::micro::GetTensorData(output); + + const int8_t* filter_data; + if (filter->type == kTfLiteInt4) { + int8_t* unpacked_filter_data = + static_cast(context->GetScratchBuffer( + context, data.reference_op_data.filter_buffer_index)); + tflite::tensor_utils::UnpackDenseInt4IntoInt8( + tflite::micro::GetTensorData(filter), + tflite::micro::GetTensorShape(filter).FlatSize(), unpacked_filter_data); + filter_data = unpacked_filter_data; + } else { + filter_data = tflite::micro::GetTensorData(filter); + } + int output_data_format = 0; + int out_length = output_height * output_width * output_depth; + + if (filter_height == 1 && filter_width == 1) { + for (int batch = 0; batch < batches; ++batch) { + int8_t* p_out_temp; + p_out_temp = &output_data[batch * out_length]; + + TF_LITE_ENSURE_EQ( + context, + + xa_nn_conv2d_pointwise_per_chan_sym8sxasym8s( + p_out_temp, const_cast(filter_data), + const_cast(&input_data[batch * input_height * + input_width * input_depth]), + const_cast(bias_data), input_height, input_width, + input_depth, output_depth, input_offset, + data.reference_op_data.per_channel_output_multiplier, + data.reference_op_data.per_channel_output_shift, output_offset, + output_data_format), + 0); + + TF_LITE_ENSURE_EQ(context, + xa_nn_vec_activation_min_max_8_8( + p_out_temp, p_out_temp, output_activation_min, + output_activation_max, out_length), + 0); + } + } else { + void* p_scratch = static_cast( + context->GetScratchBuffer(context, data.scratch_tensor_index)); + + for (int batch = 0; batch < batches; ++batch) { + int8_t* p_out_temp; + p_out_temp = &output_data[batch * out_length]; + + { TF_LITE_ENSURE_EQ( context, - - xa_nn_conv2d_pointwise_per_chan_sym8sxasym8s( - p_out_temp, const_cast(filter_data), - const_cast(&input_data[batch * input_height * - input_width * input_depth]), - const_cast(bias_data), input_height, input_width, - input_depth, output_depth, input_offset, + xa_nn_conv2d_std_per_chan_sym8sxasym8s( + p_out_temp, + &input_data[batch * input_height * input_width * input_depth], + const_cast(filter_data), // filter_data, + bias_data, input_height, input_width, input_depth, + filter_height, filter_width, output_depth, stride_width, + stride_height, pad_width, pad_height, output_height, + output_width, input_offset, data.reference_op_data.per_channel_output_multiplier, data.reference_op_data.per_channel_output_shift, output_offset, - output_data_format), + output_data_format, static_cast(p_scratch)), 0); - - TF_LITE_ENSURE_EQ(context, - xa_nn_vec_activation_min_max_8_8( - p_out_temp, p_out_temp, output_activation_min, - output_activation_max, out_length), - 0); - } - } else { - void* p_scratch = static_cast( - context->GetScratchBuffer(context, data.scratch_tensor_index)); - - for (int batch = 0; batch < batches; ++batch) { - int8_t* p_out_temp; - p_out_temp = &output_data[batch * out_length]; - - { - TF_LITE_ENSURE_EQ( - context, - xa_nn_conv2d_std_per_chan_sym8sxasym8s( - p_out_temp, - &input_data[batch * input_height * input_width * input_depth], - const_cast(filter_data), // filter_data, - bias_data, input_height, input_width, input_depth, - filter_height, filter_width, output_depth, stride_width, - stride_height, pad_width, pad_height, output_height, - output_width, input_offset, - data.reference_op_data.per_channel_output_multiplier, - data.reference_op_data.per_channel_output_shift, - output_offset, output_data_format, - static_cast(p_scratch)), - 0); - } - - TF_LITE_ENSURE_EQ(context, - xa_nn_vec_activation_min_max_8_8( - p_out_temp, p_out_temp, output_activation_min, - output_activation_max, out_length), - 0); } + + TF_LITE_ENSURE_EQ(context, + xa_nn_vec_activation_min_max_8_8( + p_out_temp, p_out_temp, output_activation_min, + output_activation_max, out_length), + 0); } - return kTfLiteOk; } - reference_integer_ops::ConvPerChannel( - ConvParamsQuantized(params, data.reference_op_data), - data.reference_op_data.per_channel_output_multiplier, - data.reference_op_data.per_channel_output_shift, - tflite::micro::GetTensorShape(input), - tflite::micro::GetTensorData(input), - tflite::micro::GetTensorShape(filter), - tflite::micro::GetTensorData(filter), - tflite::micro::GetTensorShape(bias), - tflite::micro::GetTensorData(bias), - tflite::micro::GetTensorShape(output), - tflite::micro::GetTensorData(output)); return kTfLiteOk; } } // namespace tflite -#endif // defined(HIFI4) || defined(HIFI5) +#endif // defined(HIFI3) || defined(HIFI4) || defined(HIFI5) diff --git a/tensorflow/lite/micro/kernels/xtensa/conv_int16_reference.cc b/tensorflow/lite/micro/kernels/xtensa/conv_int16_reference.cc index 0d3c4a3d97f..2492d4b348b 100644 --- a/tensorflow/lite/micro/kernels/xtensa/conv_int16_reference.cc +++ b/tensorflow/lite/micro/kernels/xtensa/conv_int16_reference.cc @@ -1,4 +1,4 @@ -/* Copyright 2021 The TensorFlow Authors. All Rights Reserved. +/* Copyright 2023 The TensorFlow Authors. All Rights Reserved. Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. @@ -23,16 +23,9 @@ limitations under the License. #include "tensorflow/lite/kernels/padding.h" #include "tensorflow/lite/micro/kernels/conv.h" #include "tensorflow/lite/micro/kernels/kernel_util.h" +#include "tensorflow/lite/micro/micro_log.h" namespace tflite { -namespace { - -void* Init(TfLiteContext* context, const char* buffer, size_t length) { - TFLITE_DCHECK(context->AllocatePersistentBuffer != nullptr); - return context->AllocatePersistentBuffer(context, sizeof(OpDataConv)); -} - -} // namespace. TfLiteStatus ConvReferenceEvalInt16(TfLiteContext* context, TfLiteNode* node) { TFLITE_DCHECK(node->user_data != nullptr); @@ -52,25 +45,37 @@ TfLiteStatus ConvReferenceEvalInt16(TfLiteContext* context, TfLiteNode* node) { ? tflite::micro::GetEvalInput(context, node, kConvBiasTensor) : nullptr; - reference_integer_ops::ConvPerChannel( - ConvParamsQuantized(params, op_data), - op_data.per_channel_output_multiplier, op_data.per_channel_output_shift, - tflite::micro::GetTensorShape(input), - tflite::micro::GetTensorData(input), - tflite::micro::GetTensorShape(filter), - tflite::micro::GetTensorData(filter), - tflite::micro::GetTensorShape(bias), - tflite::micro::GetTensorData(bias), - tflite::micro::GetTensorShape(output), - tflite::micro::GetTensorData(output)); - return kTfLiteOk; -} + if (bias == nullptr || bias->type == kTfLiteInt32) { + reference_integer_ops::ConvPerChannel( + ConvParamsQuantized(params, op_data), + op_data.per_channel_output_multiplier, op_data.per_channel_output_shift, + tflite::micro::GetTensorShape(input), + tflite::micro::GetTensorData(input), + tflite::micro::GetTensorShape(filter), + tflite::micro::GetTensorData(filter), + tflite::micro::GetTensorShape(bias), + tflite::micro::GetOptionalTensorData(bias), + tflite::micro::GetTensorShape(output), + tflite::micro::GetTensorData(output)); + } else if (bias->type == kTfLiteInt64) { + reference_integer_ops::ConvPerChannel( + ConvParamsQuantized(params, op_data), + op_data.per_channel_output_multiplier, op_data.per_channel_output_shift, + tflite::micro::GetTensorShape(input), + tflite::micro::GetTensorData(input), + tflite::micro::GetTensorShape(filter), + tflite::micro::GetTensorData(filter), + tflite::micro::GetTensorShape(bias), + tflite::micro::GetOptionalTensorData(bias), + tflite::micro::GetTensorShape(output), + tflite::micro::GetTensorData(output)); + } else { + MicroPrintf("Bias type %s (%d) not supported.", + TfLiteTypeGetName(bias->type), bias->type); + return kTfLiteError; + } -// TODO(b/189981943): This variant can be used for a smaller binary -// since the optimized conv implementation currently adds a lot to -// the binary size (~30KB to text section). -TFLMRegistration Register_CONV_2D_INT16REF() { - return tflite::micro::RegisterOp(Init, ConvPrepare, ConvReferenceEvalInt16); + return kTfLiteOk; } } // namespace tflite diff --git a/tensorflow/lite/micro/kernels/xtensa/conv_int8_int16.cc b/tensorflow/lite/micro/kernels/xtensa/conv_int8_int16.cc new file mode 100644 index 00000000000..ed64f01bc2b --- /dev/null +++ b/tensorflow/lite/micro/kernels/xtensa/conv_int8_int16.cc @@ -0,0 +1,89 @@ +/* Copyright 2023 The TensorFlow Authors. All Rights Reserved. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +==============================================================================*/ + +#include "tensorflow/lite/c/builtin_op_data.h" +#include "tensorflow/lite/c/common.h" +#include "tensorflow/lite/kernels/internal/common.h" +#include "tensorflow/lite/kernels/internal/tensor_ctypes.h" +#include "tensorflow/lite/kernels/kernel_util.h" +#include "tensorflow/lite/micro/kernels/kernel_util.h" +#include "tensorflow/lite/micro/kernels/xtensa/xtensa.h" +#include "tensorflow/lite/micro/kernels/xtensa/xtensa_conv.h" + +namespace tflite { +namespace { + +TfLiteStatus EvalInt8(TfLiteContext* context, TfLiteNode* node) { +#if defined(HIFIMINI) + return ConvReferenceEvalInt8(context, node); +#else + const auto& op_data = *(reinterpret_cast(node->user_data)); + const auto& params = + *(reinterpret_cast(node->builtin_data)); + + const TfLiteEvalTensor* input = + tflite::micro::GetEvalInput(context, node, kConvInputTensor); + TfLiteEvalTensor* output = + tflite::micro::GetEvalOutput(context, node, kConvOutputTensor); + const TfLiteEvalTensor* filter = + tflite::micro::GetEvalInput(context, node, kConvWeightsTensor); + const TfLiteEvalTensor* bias = + tflite::micro::GetEvalInput(context, node, kConvBiasTensor); + +#if defined(HIFI3) || defined(HIFI4) || defined(HIFI5) + return ConvEvalHifiInt8(context, node, params, op_data, input, filter, bias, + output); +#elif defined(VISION_P6) + return ConvEvalVision(context, node, params, op_data, input, filter, bias, + output); +#endif + +#endif // defined(HIFIMINI) +} + +TfLiteStatus EvalInt16(TfLiteContext* context, TfLiteNode* node) { +#if defined(HIFI3) || defined(HIFI4) || defined(HIFI5) + const auto& op_data = *(reinterpret_cast(node->user_data)); + const auto& params = + *(reinterpret_cast(node->builtin_data)); + + const TfLiteEvalTensor* input = + tflite::micro::GetEvalInput(context, node, kConvInputTensor); + TfLiteEvalTensor* output = + tflite::micro::GetEvalOutput(context, node, kConvOutputTensor); + const TfLiteEvalTensor* filter = + tflite::micro::GetEvalInput(context, node, kConvWeightsTensor); + const TfLiteEvalTensor* bias = + tflite::micro::GetEvalInput(context, node, kConvBiasTensor); + + return ConvEvalHifiInt16(context, node, params, op_data, input, filter, bias, + output); +#else + return ConvReferenceEvalInt16(context, node); +#endif +} + +} // namespace + +TFLMRegistration Register_CONV_2D_INT8() { + return tflite::micro::RegisterOp(ConvInitXtensa, ConvPrepareXtensa, EvalInt8); +} + +TFLMRegistration Register_CONV_2D_INT16() { + return tflite::micro::RegisterOp(ConvInitXtensa, ConvPrepareXtensa, + EvalInt16); +} + +} // namespace tflite diff --git a/tensorflow/lite/micro/kernels/xtensa/conv_int8_reference.cc b/tensorflow/lite/micro/kernels/xtensa/conv_int8_reference.cc index 80a42d92eea..6ac07bab403 100644 --- a/tensorflow/lite/micro/kernels/xtensa/conv_int8_reference.cc +++ b/tensorflow/lite/micro/kernels/xtensa/conv_int8_reference.cc @@ -1,4 +1,4 @@ -/* Copyright 2021 The TensorFlow Authors. All Rights Reserved. +/* Copyright 2023 The TensorFlow Authors. All Rights Reserved. Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. @@ -16,6 +16,7 @@ limitations under the License. #include "tensorflow/lite/c/builtin_op_data.h" #include "tensorflow/lite/c/common.h" #include "tensorflow/lite/kernels/internal/common.h" +#include "tensorflow/lite/kernels/internal/portable_tensor_utils.h" #include "tensorflow/lite/kernels/internal/quantization_util.h" #include "tensorflow/lite/kernels/internal/reference/integer_ops/conv.h" #include "tensorflow/lite/kernels/internal/tensor_ctypes.h" @@ -25,14 +26,6 @@ limitations under the License. #include "tensorflow/lite/micro/kernels/kernel_util.h" namespace tflite { -namespace { - -void* Init(TfLiteContext* context, const char* buffer, size_t length) { - TFLITE_DCHECK(context->AllocatePersistentBuffer != nullptr); - return context->AllocatePersistentBuffer(context, sizeof(OpDataConv)); -} - -} // namespace. TfLiteStatus ConvReferenceEvalInt8(TfLiteContext* context, TfLiteNode* node) { TFLITE_DCHECK(node->user_data != nullptr); @@ -52,17 +45,29 @@ TfLiteStatus ConvReferenceEvalInt8(TfLiteContext* context, TfLiteNode* node) { ? tflite::micro::GetEvalInput(context, node, kConvBiasTensor) : nullptr; + const int8_t* filter_data; + if (filter->type == kTfLiteInt4) { + int8_t* unpacked_filter_data = static_cast( + context->GetScratchBuffer(context, op_data.filter_buffer_index)); + tflite::tensor_utils::UnpackDenseInt4IntoInt8( + tflite::micro::GetTensorData(filter), + tflite::micro::GetTensorShape(filter).FlatSize(), unpacked_filter_data); + filter_data = unpacked_filter_data; + } else { + filter_data = tflite::micro::GetTensorData(filter); + } + reference_integer_ops::ConvPerChannel( ConvParamsQuantized(params, op_data), op_data.per_channel_output_multiplier, op_data.per_channel_output_shift, tflite::micro::GetTensorShape(input), tflite::micro::GetTensorData(input), - tflite::micro::GetTensorShape(filter), - tflite::micro::GetTensorData(filter), + tflite::micro::GetTensorShape(filter), filter_data, tflite::micro::GetTensorShape(bias), - tflite::micro::GetTensorData(bias), + tflite::micro::GetOptionalTensorData(bias), tflite::micro::GetTensorShape(output), tflite::micro::GetTensorData(output)); + return kTfLiteOk; } @@ -70,7 +75,8 @@ TfLiteStatus ConvReferenceEvalInt8(TfLiteContext* context, TfLiteNode* node) { // since the optimized conv implementation currently adds a lot to // the binary size (~30KB to text section). TFLMRegistration Register_CONV_2D_INT8REF() { - return tflite::micro::RegisterOp(Init, ConvPrepare, ConvReferenceEvalInt8); + return tflite::micro::RegisterOp(ConvInit, ConvPrepare, + ConvReferenceEvalInt8); } } // namespace tflite diff --git a/tensorflow/lite/micro/kernels/xtensa/conv_vision.cc b/tensorflow/lite/micro/kernels/xtensa/conv_vision.cc index e4f0d49af8e..812ab60ebf2 100644 --- a/tensorflow/lite/micro/kernels/xtensa/conv_vision.cc +++ b/tensorflow/lite/micro/kernels/xtensa/conv_vision.cc @@ -1,4 +1,4 @@ -/* Copyright 2021 The TensorFlow Authors. All Rights Reserved. +/* Copyright 2023 The TensorFlow Authors. All Rights Reserved. Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. @@ -21,6 +21,7 @@ limitations under the License. #include "tensorflow/lite/c/common.h" #include "tensorflow/lite/kernels/internal/common.h" #include "tensorflow/lite/kernels/internal/portable_tensor_utils.h" +#include "tensorflow/lite/kernels/internal/reference/integer_ops/conv.h" #include "tensorflow/lite/kernels/internal/tensor_ctypes.h" #include "tensorflow/lite/kernels/kernel_util.h" #include "tensorflow/lite/micro/kernels/conv.h" @@ -32,29 +33,22 @@ limitations under the License. namespace tflite { TfLiteStatus ConvPrepareVision(TfLiteContext* context, TfLiteNode* node) { - TFLITE_DCHECK(node->user_data != nullptr); - TFLITE_DCHECK(node->builtin_data != nullptr); + MicroContext* micro_context = GetMicroContext(context); + TfLiteTensor* input = + micro_context->AllocateTempInputTensor(node, kConvInputTensor); + TfLiteTensor* bias = + micro_context->AllocateTempInputTensor(node, kConvBiasTensor); + const uint32_t input_height = SizeOfDimension(input, 1); + const uint32_t input_width = SizeOfDimension(input, 2); XtensaConvOpData* data = reinterpret_cast(node->user_data); const auto& params = *(reinterpret_cast(node->builtin_data)); - MicroContext* micro_context = GetMicroContext(context); TfLiteTensor* output = micro_context->AllocateTempOutputTensor(node, kConvOutputTensor); - TF_LITE_ENSURE(context, output != nullptr); - TfLiteTensor* input = - micro_context->AllocateTempInputTensor(node, kConvInputTensor); - TF_LITE_ENSURE(context, input != nullptr); TfLiteTensor* filter = micro_context->AllocateTempInputTensor(node, kConvWeightsTensor); - TF_LITE_ENSURE(context, filter != nullptr); - TfLiteTensor* bias = - micro_context->AllocateTempInputTensor(node, kConvBiasTensor); - TF_LITE_ENSURE(context, bias != nullptr); - - const uint32_t input_height = SizeOfDimension(input, 1); - const uint32_t input_width = SizeOfDimension(input, 2); const uint32_t output_height = SizeOfDimension(output, 1); const uint32_t output_width = SizeOfDimension(output, 2); @@ -62,6 +56,15 @@ TfLiteStatus ConvPrepareVision(TfLiteContext* context, TfLiteNode* node) { const uint32_t filter_height = SizeOfDimension(filter, 1); const uint32_t filter_width = SizeOfDimension(filter, 2); + // At this time it is unclear if per channel quantization is correctly + // supported by the optimized vision P6 implementation or not. For now, we are + // manually adding a flag to switch to the reference implementation for + // per-channel conv. + // See http://b/270720625 for more details. + data->is_per_channel_quantized = + reinterpret_cast(filter->quantization.params) + ->scale->size > 1; + // Dynamically allocate per-channel quantization parameters. const int num_channels = SizeOfDimension(filter, kConvQuantizedDimension); data->per_channel_output_shift_int8 = static_cast( @@ -97,7 +100,6 @@ TfLiteStatus ConvPrepareVision(TfLiteContext* context, TfLiteNode* node) { tflite::tensor_utils::UnpackDenseInt4IntoInt8( GetTensorData(filter), GetTensorShape(filter).FlatSize(), GetTensorData(&filter_int8)); - } else { filter_int8 = *filter; } @@ -142,14 +144,17 @@ TfLiteStatus ConvPrepareVision(TfLiteContext* context, TfLiteNode* node) { if (status) { return kTfLiteError; } + if (filter->type == kTfLiteInt4) { micro_context->DeallocateTempBuffer(GetTensorData(&filter_int8)); } + micro_context->DeallocateTempTfLiteTensor(output); micro_context->DeallocateTempTfLiteTensor(input); micro_context->DeallocateTempTfLiteTensor(filter); - micro_context->DeallocateTempTfLiteTensor(bias); - + if (bias != nullptr) { + micro_context->DeallocateTempTfLiteTensor(bias); + } return kTfLiteOk; } @@ -170,7 +175,9 @@ TfLiteStatus ConvEvalVision(TfLiteContext* context, TfLiteNode* node, data.reorder_coefficient_bias, data.reorder_coefficient_bias_size, data.reference_op_data.per_channel_output_multiplier, data.per_channel_output_shift_int8, num_channels); + return kTfLiteOk; } + } // namespace tflite #endif // defined(VISION_P6) diff --git a/tensorflow/lite/micro/kernels/xtensa/depthwise_conv.cc b/tensorflow/lite/micro/kernels/xtensa/depthwise_conv.cc index 02ea8717cb9..8536ff79507 100644 --- a/tensorflow/lite/micro/kernels/xtensa/depthwise_conv.cc +++ b/tensorflow/lite/micro/kernels/xtensa/depthwise_conv.cc @@ -48,10 +48,22 @@ void* Init(TfLiteContext* context, const char* buffer, size_t length) { TfLiteStatus Prepare(TfLiteContext* context, TfLiteNode* node) { TF_LITE_ENSURE_OK(context, DepthwiseConvPrepare(context, node)); + MicroContext* micro_context = GetMicroContext(context); + TfLiteTensor* input = + micro_context->AllocateTempInputTensor(node, kConvInputTensor); + TF_LITE_ENSURE(context, input != nullptr); + + // For int16 input, only fallback to the reference kernel is used + // so there is no need to prepare the Hifi/Vision kernel. + if (input->type == kTfLiteInt16) { + micro_context->DeallocateTempTfLiteTensor(input); + return kTfLiteOk; + } + micro_context->DeallocateTempTfLiteTensor(input); -#if defined(HIFI4) || defined(HIFI5) +#if defined(HIFI3) || defined(HIFI4) || defined(HIFI5) TF_LITE_ENSURE_OK(context, DepthwiseConvPrepareHifi(context, node)); -#endif // defined(HIFI4) || defined(HIFI5) +#endif // defined(HIFI3) || defined(HIFI4) || defined(HIFI5) #if defined(VISION_P6) TF_LITE_ENSURE_OK(context, DepthwiseConvPrepareVision(context, node)); @@ -85,7 +97,7 @@ TfLiteStatus Eval(TfLiteContext* context, TfLiteNode* node) { case kTfLiteInt8: { switch (filter_int8.type) { case kTfLiteInt8: { -#if defined(HIFI4) || defined(HIFI5) +#if defined(HIFI3) || defined(HIFI4) || defined(HIFI5) DepthwiseConvEvalHifi(context, node, params, op_data, input, &filter_int8, bias, output); #elif defined(VISION_P6) @@ -104,7 +116,7 @@ TfLiteStatus Eval(TfLiteContext* context, TfLiteNode* node) { tflite::micro::GetOptionalTensorData(bias), tflite::micro::GetTensorShape(output), tflite::micro::GetTensorData(output)); -#endif // defined(HIFI4) || defined(HIFI5) +#endif // defined(HIFI3) || defined(HIFI4) || defined(HIFI5) break; } default: @@ -114,6 +126,31 @@ TfLiteStatus Eval(TfLiteContext* context, TfLiteNode* node) { } break; } + case kTfLiteInt16: { + switch (filter->type) { + case kTfLiteInt8: { + reference_integer_ops::DepthwiseConvPerChannel( + DepthwiseConvParamsQuantized(params, op_data.reference_op_data), + op_data.reference_op_data.per_channel_output_multiplier, + op_data.reference_op_data.per_channel_output_shift, + tflite::micro::GetTensorShape(input), + tflite::micro::GetTensorData(input), + tflite::micro::GetTensorShape(filter), + tflite::micro::GetTensorData(&filter_int8), + tflite::micro::GetTensorShape(bias), + tflite::micro::GetOptionalTensorData(bias), + tflite::micro::GetTensorShape(output), + tflite::micro::GetTensorData(output)); + break; + } + default: + MicroPrintf("Filter type %s (%d) for input type %s not supported.", + TfLiteTypeGetName(filter->type), filter->type, + TfLiteTypeGetName(input->type)); + return kTfLiteError; + } + break; + } default: MicroPrintf("Type %s (%d) not supported.", TfLiteTypeGetName(input->type), input->type); diff --git a/tensorflow/lite/micro/kernels/xtensa/depthwise_conv_hifi.cc b/tensorflow/lite/micro/kernels/xtensa/depthwise_conv_hifi.cc index 05dab485c7e..8c2052b23e7 100644 --- a/tensorflow/lite/micro/kernels/xtensa/depthwise_conv_hifi.cc +++ b/tensorflow/lite/micro/kernels/xtensa/depthwise_conv_hifi.cc @@ -28,7 +28,7 @@ limitations under the License. #include "tensorflow/lite/micro/kernels/xtensa/xtensa.h" #include "tensorflow/lite/micro/kernels/xtensa/xtensa_depthwise_conv.h" -#if defined(HIFI4) || defined(HIFI5) +#if defined(HIFI3) || defined(HIFI4) || defined(HIFI5) namespace tflite { TfLiteStatus DepthwiseConvPrepareHifi(TfLiteContext* context, TfLiteNode* node) { @@ -187,4 +187,4 @@ TfLiteStatus DepthwiseConvEvalHifi(TfLiteContext* context, TfLiteNode* node, return kTfLiteOk; } } // namespace tflite -#endif // defined(HIFI4) || defined(HIFI5) +#endif // defined(HIFI3) ||defined(HIFI4) || defined(HIFI5) diff --git a/tensorflow/lite/micro/kernels/xtensa/dequantize.cc b/tensorflow/lite/micro/kernels/xtensa/dequantize.cc new file mode 100644 index 00000000000..f2f4a7d9cb7 --- /dev/null +++ b/tensorflow/lite/micro/kernels/xtensa/dequantize.cc @@ -0,0 +1,118 @@ +/* Copyright 2021 The TensorFlow Authors. All Rights Reserved. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +==============================================================================*/ + +#include "tensorflow/lite/kernels/internal/reference/dequantize.h" + +#include "tensorflow/lite/c/builtin_op_data.h" +#include "tensorflow/lite/c/common.h" +#include "tensorflow/lite/kernels/internal/quantization_util.h" +#include "tensorflow/lite/kernels/internal/reference/quantize.h" +#include "tensorflow/lite/kernels/internal/reference/requantize.h" +#include "tensorflow/lite/kernels/internal/tensor_ctypes.h" +#include "tensorflow/lite/kernels/kernel_util.h" +#include "tensorflow/lite/micro/kernels/dequantize.h" +#include "tensorflow/lite/micro/kernels/kernel_util.h" +#include "tensorflow/lite/micro/kernels/xtensa/xtensa.h" +#include "tensorflow/lite/micro/micro_log.h" + +namespace tflite { + +void* DequantizeInit(TfLiteContext* context, const char* buffer, + size_t length) { + TFLITE_DCHECK(context->AllocatePersistentBuffer != nullptr); + return context->AllocatePersistentBuffer(context, sizeof(DequantizeOpData)); +} + +TfLiteStatus DequantizeEval(TfLiteContext* context, TfLiteNode* node) { + TFLITE_DCHECK(node->user_data != nullptr); + DequantizeOpData* data = static_cast(node->user_data); + + const TfLiteEvalTensor* input = tflite::micro::GetEvalInput(context, node, 0); + TfLiteEvalTensor* output = tflite::micro::GetEvalOutput(context, node, 0); + + // Output type ensured to be kTfLiteFloat32 at the Prepare stage + TFLITE_DCHECK(output->type == kTfLiteFloat32); + + switch (input->type) { + case kTfLiteInt8: { +#if HIFI_VFPU && (defined(HIFI5) || defined(HIFI4) || defined(HIFI3)) + int err; + const int8_t* input_data_ptr; + float* output_data_ptr; + const int flat_size = + MatchingFlatSize(tflite::micro::GetTensorShape(input), + tflite::micro::GetTensorShape(output)); + input_data_ptr = tflite::micro::GetTensorData(input); + output_data_ptr = tflite::micro::GetTensorData(output); + + err = xa_nn_elm_dequantize_asym8s_f32( + output_data_ptr, input_data_ptr, data->quantization_params.zero_point, + data->quantization_params.scale, flat_size); + TF_LITE_ENSURE(context, (err == 0)); +#else // HIFI_VFPU && (defined(HIFI5) || defined(HIFI4) || defined(HIFI3)) + reference_ops::Dequantize(data->quantization_params, + tflite::micro::GetTensorShape(input), + tflite::micro::GetTensorData(input), + tflite::micro::GetTensorShape(output), + tflite::micro::GetTensorData(output)); +#endif // HIFI_VFPU && (defined(HIFI5) || defined(HIFI4) || defined(HIFI3)) + break; + } + case kTfLiteInt16: { +#if HIFI_VFPU && (defined(HIFI5) || defined(HIFI4) || defined(HIFI3)) + int err; + const int16_t* input_data_ptr; + float* output_data_ptr; + const RuntimeShape& input_shape = tflite::micro::GetTensorShape(input); + const RuntimeShape& output_shape = tflite::micro::GetTensorShape(output); + const int flat_size = MatchingFlatSize(input_shape, output_shape); + input_data_ptr = tflite::micro::GetTensorData(input); + output_data_ptr = tflite::micro::GetTensorData(output); + err = xa_nn_elm_dequantize_asym16s_f32( + output_data_ptr, input_data_ptr, data->quantization_params.zero_point, + data->quantization_params.scale, flat_size); + TF_LITE_ENSURE(context, (err == 0)); +#else // HIFI_VFPU && (defined(HIFI5) || defined(HIFI4) || defined(HIFI3)) + reference_ops::Dequantize(data->quantization_params, + tflite::micro::GetTensorShape(input), + tflite::micro::GetTensorData(input), + tflite::micro::GetTensorShape(output), + tflite::micro::GetTensorData(output)); +#endif // HIFI_VFPU && (defined(HIFI5) || defined(HIFI4) || defined(HIFI3)) + break; + } + case kTfLiteUInt8: + reference_ops::Dequantize(data->quantization_params, + tflite::micro::GetTensorShape(input), + tflite::micro::GetTensorData(input), + tflite::micro::GetTensorShape(output), + tflite::micro::GetTensorData(output)); + break; + default: + MicroPrintf("Input %s, output %s not supported.", + TfLiteTypeGetName(input->type), + TfLiteTypeGetName(output->type)); + return kTfLiteError; + } + + return kTfLiteOk; +} + +TFLMRegistration Register_DEQUANTIZE() { + return tflite::micro::RegisterOp(DequantizeInit, DequantizePrepare, + DequantizeEval); +} + +} // namespace tflite diff --git a/tensorflow/lite/micro/kernels/xtensa/fully_connected.cc b/tensorflow/lite/micro/kernels/xtensa/fully_connected.cc index 1395fc39645..df5458001b7 100644 --- a/tensorflow/lite/micro/kernels/xtensa/fully_connected.cc +++ b/tensorflow/lite/micro/kernels/xtensa/fully_connected.cc @@ -125,4 +125,8 @@ TFLMRegistration Register_FULLY_CONNECTED() { XtensaPrepareFullyConnected, Eval); } +TFLMInferenceRegistration RegisterInference_FULLY_CONNECTED() { + return tflite::micro::RegisterOp(Eval); +} + } // namespace tflite diff --git a/tensorflow/lite/micro/kernels/xtensa/fully_connected_int8.cc b/tensorflow/lite/micro/kernels/xtensa/fully_connected_int8.cc index b53afa42819..f850c0c0fca 100644 --- a/tensorflow/lite/micro/kernels/xtensa/fully_connected_int8.cc +++ b/tensorflow/lite/micro/kernels/xtensa/fully_connected_int8.cc @@ -57,7 +57,7 @@ TfLiteStatus XtensaEvalFullyConnectedQuantizedInt8( tflite::micro::GetTensorShape(bias), bias_data, tflite::micro::GetTensorShape(output), tflite::micro::GetTensorData(output)); -#elif defined(HIFI4) || defined(HIFI5) +#elif defined(HIFI3) || defined(HIFI4) || defined(HIFI5) const RuntimeShape& output_shape = tflite::micro::GetTensorShape(output); const int num_batches = FlatSizeSkipDim(output_shape, output_shape.DimensionsCount() - 1); @@ -103,7 +103,7 @@ TfLiteStatus XtensaEvalFullyConnectedQuantizedInt8( tflite::micro::GetTensorShape(bias), bias_data, tflite::micro::GetTensorShape(output), tflite::micro::GetTensorData(output)); -#endif // defined(HIFI4) || defined(HIFI5) +#endif // defined(HIFI3) || defined(HIFI4) || defined(HIFI5) return kTfLiteOk; } diff --git a/tensorflow/lite/micro/kernels/xtensa/leaky_relu.cc b/tensorflow/lite/micro/kernels/xtensa/leaky_relu.cc index 857a488fe86..c1ed1d6b9bd 100644 --- a/tensorflow/lite/micro/kernels/xtensa/leaky_relu.cc +++ b/tensorflow/lite/micro/kernels/xtensa/leaky_relu.cc @@ -76,7 +76,7 @@ TfLiteStatus LeakyReluEval(TfLiteContext* context, TfLiteNode* node) { return kTfLiteOk; } break; case kTfLiteInt16: { -#if defined(HIFI4) +#if defined(HIFI3) || defined(HIFI4) || defined(HIFI5) const RuntimeShape& input_shape = tflite::micro::GetTensorShape(input); const RuntimeShape& output_shape = tflite::micro::GetTensorShape(output); const int flat_size = MatchingFlatSize(input_shape, output_shape); @@ -89,7 +89,7 @@ TfLiteStatus LeakyReluEval(TfLiteContext* context, TfLiteNode* node) { if (err != 0) return kTfLiteError; #else QuantizeLeakyRelu(data, input, output); -#endif // defined(HIFI4) +#endif // defined(HIFI3) || defined(HIFI4) || defined(HIFI5) return kTfLiteOk; } break; default: diff --git a/tensorflow/lite/micro/kernels/xtensa/logistic.cc b/tensorflow/lite/micro/kernels/xtensa/logistic.cc index 41e6f3da779..2ddf82eff50 100644 --- a/tensorflow/lite/micro/kernels/xtensa/logistic.cc +++ b/tensorflow/lite/micro/kernels/xtensa/logistic.cc @@ -54,7 +54,7 @@ TfLiteStatus LogisticEval(TfLiteContext* context, TfLiteNode* node) { switch (input->type) { case kTfLiteFloat32: { -#if HIFI_VFPU && (defined(HIFI4) || defined(HIFI5)) +#if HIFI_VFPU && (defined(HIFI3) || defined(HIFI4) || defined(HIFI5)) const RuntimeShape& input_shape = tflite::micro::GetTensorShape(input); const RuntimeShape& output_shape = tflite::micro::GetTensorShape(output); const int flat_size = MatchingFlatSize(input_shape, output_shape); @@ -70,11 +70,11 @@ TfLiteStatus LogisticEval(TfLiteContext* context, TfLiteNode* node) { tflite::micro::GetTensorData(input), tflite::micro::GetTensorShape(output), tflite::micro::GetTensorData(output)); -#endif // HIFI_VFPU && (defined(HIFI4) || defined(HIFI5)) +#endif // HIFI_VFPU && (defined(HIFI3) || defined(HIFI4) || defined(HIFI5)) break; } case kTfLiteInt8: { -#if defined(HIFI4) || defined(HIFI5) +#if defined(HIFI3) || defined(HIFI4) || defined(HIFI5) const RuntimeShape& input_shape = tflite::micro::GetTensorShape(input); const RuntimeShape& output_shape = tflite::micro::GetTensorShape(output); const int flat_size = MatchingFlatSize(input_shape, output_shape); @@ -96,7 +96,7 @@ TfLiteStatus LogisticEval(TfLiteContext* context, TfLiteNode* node) { data->input_multiplier, data->input_left_shift, NumElements(input->dims), tflite::micro::GetTensorData(input), tflite::micro::GetTensorData(output)); -#endif // defined(HIFI4) || defined(HIFI5) +#endif // defined(HIFI3) || defined(HIFI4) || defined(HIFI5) break; } case kTfLiteInt16: { diff --git a/tensorflow/lite/micro/kernels/xtensa/lstm_eval.cc b/tensorflow/lite/micro/kernels/xtensa/lstm_eval.cc index 9065388e8ee..94e76a1cb19 100644 --- a/tensorflow/lite/micro/kernels/xtensa/lstm_eval.cc +++ b/tensorflow/lite/micro/kernels/xtensa/lstm_eval.cc @@ -1,4 +1,4 @@ -/* Copyright 2021 The TensorFlow Authors. All Rights Reserved. +/* Copyright 2023 The TensorFlow Authors. All Rights Reserved. Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. @@ -14,1204 +14,469 @@ limitations under the License. ==============================================================================*/ #include "tensorflow/lite/micro/kernels/xtensa/lstm_eval.h" -#include -#include - -#include -#include -#include -#include - -#include "tensorflow/lite/c/builtin_op_data.h" -#include "tensorflow/lite/c/common.h" -#include "tensorflow/lite/kernels/internal/compatibility.h" -#include "tensorflow/lite/kernels/internal/portable_tensor_utils.h" -#include "tensorflow/lite/kernels/internal/tensor_ctypes.h" -#include "tensorflow/lite/kernels/op_macros.h" +#include + +#include "tensorflow/lite/kernels/internal/reference/fully_connected.h" +#include "tensorflow/lite/kernels/internal/reference/integer_ops/fully_connected.h" +#include "tensorflow/lite/kernels/internal/reference/integer_ops/logistic.h" +#include "tensorflow/lite/kernels/internal/reference/integer_ops/mul.h" +#include "tensorflow/lite/kernels/internal/reference/integer_ops/tanh.h" +#include "tensorflow/lite/kernels/internal/reference/logistic.h" +#include "tensorflow/lite/kernels/internal/reference/mul.h" +#include "tensorflow/lite/kernels/internal/reference/tanh.h" +#include "tensorflow/lite/kernels/internal/types.h" #include "tensorflow/lite/micro/kernels/xtensa/xtensa.h" namespace tflite { -namespace ops { -namespace micro { -namespace lstm_eval { -namespace { - -// Calculates a single LSTM gate, int8x8_16 version. -// Implements the same functionality as CalculateLstmGateFloat. -void CalculateLstmGateInteger8x8_16( - // Input and weights - const int8_t* input, const int8_t* input_to_gate_weights, - const int32_t* input_to_gate_bias, const int32_t input_to_gate_scale_a, - const int32_t input_to_gate_scale_b, - // Output state and weights - const int8_t* output_state, const int8_t* recurrent_to_gate_weights, - const int32_t* recurrent_to_gate_bias, - const int32_t recurrent_to_gate_scale_a, - const int32_t recurrent_to_gate_scale_b, - // Cell state and weights - const int16_t* cell_state, const int16_t* cell_to_gate_weights, - const int32_t cell_to_gate_scale_a, const int32_t cell_to_gate_scale_b, - // Layer normalization parameters (layer norm LSTM) - const int16_t* layer_norm_coefficients, const int32_t* layer_norm_bias, - const int32_t layer_norm_input_scale_a, - const int32_t layer_norm_input_scale_b, - const int32_t layer_norm_variance_guard, - // Array sizes - const int n_batch, const int n_input, const int n_output, const int n_cell, - const TfLiteFusedActivation activation, - // Output - int16_t* gate, - // Parameters for performance optimizations - // CpuBackendContext* context, - // Scratch arrays - int32_t* scratch5) { - const bool use_peephole = (cell_to_gate_weights != nullptr); - const bool use_layer_norm = (layer_norm_coefficients != nullptr); - // Initialize scratch buffers with zeros. Note that unlike float and hybrid - // versions, bias is only used in layer normalization. - std::fill_n(gate, n_batch * n_cell, 0); -#if !defined(HIFI5) - // For each batch and cell: compute input_weight * input. - tensor_utils::PortableMatrixBatchVectorMultiplyAccumulate( - input, input_to_gate_bias, input_to_gate_weights, input_to_gate_scale_a, - input_to_gate_scale_b, n_batch, n_input, n_cell, 0, scratch5, gate, NULL); -#else - { - xa_nn_matXvec_acc_batch_sym8sx8_asym16s( - gate, input_to_gate_weights, input, input_to_gate_bias, n_cell, n_input, - n_input, input_to_gate_scale_a, input_to_gate_scale_b, 0, n_batch); +LstmTensors::LstmTensors(TfLiteContext* context, TfLiteNode* node) { + micro_context_ = GetMicroContext(context); + // 24 internal tensors. see lstm_shared.h for tensor names + for (size_t i = 0; i < 24; i++) { + internal_tensors_[i] = micro_context_->AllocateTempInputTensor(node, i); } -#endif // !defined(HIFI5) -// Note: no aux_input. + output_tensor_ = + micro_context_->AllocateTempOutputTensor(node, kLstmOutputTensor); +} -// For each batch and cell: compute recurrent_weight * output_state. -#if !defined(HIFI5) - tensor_utils::PortableMatrixBatchVectorMultiplyAccumulate( - output_state, recurrent_to_gate_bias, recurrent_to_gate_weights, - recurrent_to_gate_scale_a, recurrent_to_gate_scale_b, n_batch, n_output, - n_cell, 0, scratch5, gate, NULL); -#else - { - xa_nn_matXvec_acc_batch_sym8sx8_asym16s( - gate, recurrent_to_gate_weights, output_state, recurrent_to_gate_bias, - n_cell, n_output, n_output, recurrent_to_gate_scale_a, - recurrent_to_gate_scale_b, 0, n_batch); - } -#endif // !defined(HIFI5) - // For each batch and cell: compute cell_weight * cell_state (peephole LSTM) - if (use_peephole) { - tensor_utils::PortableVectorBatchVectorCwiseProductAccumulate( - cell_to_gate_weights, n_output, cell_state, n_batch, - cell_to_gate_scale_a, cell_to_gate_scale_b, gate); - } - // Do layer normalization (if layer norm LSTM) - if (use_layer_norm) { - tensor_utils::PortableApplyLayerNorm( - gate, layer_norm_coefficients, layer_norm_bias, - layer_norm_input_scale_a, layer_norm_input_scale_b, - layer_norm_variance_guard, n_batch, n_cell, gate); - } - // Apply activation - switch (activation) { - case kTfLiteActSigmoid: -#if !defined(HIFI5) - tensor_utils::PortableApplySigmoid(gate, n_batch, n_cell, gate); -#else - xa_nn_vec_sigmoid_16_16(gate, gate, n_batch * n_cell); -#endif // !defined(HIFI5) - break; - case kTfLiteActTanh: -#if !defined(HIFI5) - tensor_utils::PortableApplyTanh(3, gate, n_batch, n_cell, gate); -#else - xa_nn_vec_tanh_16_16(gate, gate, 3, n_batch * n_cell); -#endif // !defined(HIFI5) - break; - default: - // Only Sigmoid or Tanh is used. - TFLITE_ASSERT_FALSE; +LstmTensors::~LstmTensors() { + for (size_t i = 0; i < 24; i++) { + if (internal_tensors_[i] != nullptr) { + micro_context_->DeallocateTempTfLiteTensor(internal_tensors_[i]); + } } + micro_context_->DeallocateTempTfLiteTensor(output_tensor_); } -// Updates the LSTM cell state, used by both integer LSTM versions. -// Also see UpdateLstmCellFloat. -// -// Parameters: -// - n_batch, n_cell: sizes of vectors -// - cell_state: input/output vector, size n_batch*n_cell -// - cell_state_scale: scaling factor of cell state. -// - input_gate: input vector, size n_batch*n_cell. -// - forget_gate: input/scratch vector, size n_batch*n_cell, always modified. -// - cell_gate: input vector, size n_batch*n_cell. -// - use_cifg: use 1-forget_gate instead of input_gate. -// - clip: if > 0, clip the resulting cell state to [-clip, +clip]. -void UpdateLstmCellInteger(int n_batch, int n_cell, int16_t* cell_state, - int32_t cell_state_scale, const int16_t* input_gate, - int16_t* forget_gate, const int16_t* cell_gate, - bool use_cifg, int16_t clip) { -#if !defined(HIFI5) - // Use the forget_gate array as scratch, as input_gate array is not allocated - // in CIFG case. (Be careful not to write to the scratch before reading the - // forget gate data.) - int16_t* scratch = forget_gate; - - tensor_utils::PortableCwiseMul(forget_gate, cell_state, n_batch, n_cell, 15, - cell_state); - if (use_cifg) { - tensor_utils::PortableSub1Vector(forget_gate, n_batch * n_cell, scratch); - tensor_utils::PortableCwiseMul(scratch, cell_gate, n_batch, n_cell, - 30 + cell_state_scale, scratch); - } else { - tensor_utils::PortableCwiseMul(input_gate, cell_gate, n_batch, n_cell, - 30 + cell_state_scale, scratch); +// Verify the LSTM internal tensor properties (e.g., type checks) +// Input/output/states/fc weights tensors are required for kernel evaulation. +// The state tensors should be variables. Variants of the standard LSTM +// are not supported here, therefore their corresponding tensors should be +// invalid +TfLiteStatus LstmTensors::ValidateTensorStatus(TfLiteContext* context) const { + // Verify certain tensor properties + // input tensor + TF_LITE_ENSURE(context, internal_tensors_[kLstmInputTensor] != nullptr); + // hidden state + TF_LITE_ENSURE(context, internal_tensors_[kLstmOutputStateTensor] != nullptr); + TF_LITE_ENSURE(context, + internal_tensors_[kLstmOutputStateTensor]->is_variable); + // hidden state becomes input so they must have the same type + TF_LITE_ENSURE_EQ(context, internal_tensors_[kLstmOutputStateTensor]->type, + internal_tensors_[kLstmInputTensor]->type); + // cell state + TF_LITE_ENSURE(context, internal_tensors_[kLstmCellStateTensor] != nullptr); + TF_LITE_ENSURE(context, internal_tensors_[kLstmCellStateTensor]->is_variable); + // output + TF_LITE_ENSURE(context, output_tensor_ != nullptr); + // output type is the same as the input type (activations) + TF_LITE_ENSURE_EQ(context, output_tensor_->type, + internal_tensors_[kLstmInputTensor]->type); + + // weight tensors (1-9, see lstm_shared for index definition) + const auto weight_type = + internal_tensors_[kLstmInputToForgetWeightsTensor]->type; + for (size_t i = 1; i < 9; i++) { + TF_LITE_ENSURE(context, internal_tensors_[i] != nullptr); + TF_LITE_ENSURE_EQ(context, internal_tensors_[i]->type, weight_type); } - tensor_utils::PortableCwiseAdd(cell_state, scratch, n_batch, n_cell, - cell_state); - if (clip > 0) { - tensor_utils::PortableCwiseClipping(cell_state, n_batch * n_cell, clip); + // bias tensors (12-15, see lstm_shared for index definition) + const auto bias_type = internal_tensors_[kLstmForgetGateBiasTensor]->type; + for (size_t i = 12; i < 16; i++) { + TF_LITE_ENSURE(context, internal_tensors_[i] != nullptr); + TF_LITE_ENSURE_EQ(context, internal_tensors_[i]->type, bias_type); } -#else - if (use_cifg) { - calc_cell_state_with_cifg(cell_state, forget_gate, cell_gate, 15, - 30 + cell_state_scale, clip, n_batch * n_cell); - } else { - calc_cell_state_without_cifg(cell_state, forget_gate, cell_gate, input_gate, - 15, 30 + cell_state_scale, clip, - n_batch * n_cell); + // Tensors from LSTM variants are invalid + // No peephole + for (size_t i = 9; i < 12; i++) { + TF_LITE_ENSURE(context, internal_tensors_[i] == nullptr); } - -#endif // !defined(HIFI5) + // No projection + for (size_t i = 16; i < 18; i++) { + TF_LITE_ENSURE(context, internal_tensors_[i] == nullptr); + } + // No internal layer norm + for (size_t i = 20; i < 24; i++) { + TF_LITE_ENSURE(context, internal_tensors_[i] == nullptr); + } + return kTfLiteOk; } -// Calculates the output state tensor of an LSTM step. See Float and hybrid -// versions as well. -// -// Parameters: -// - n_batch: batches: the number of distinct vectors in each array. -// - n_cell, n_output: sizes of vectors. -// - cell_state, output_gate: input vectors, size n_batch*n_cell. -// - cell_state_scale: scaling of cell_state. -// - hidden_scale_[a|b]: effective scale of cell_state.*output_gate -// - hidden_zp: zero_point for cell_state.*output_gate -// - projection_weights, proj_scale_[a|b], projection_bias: -// constant inputs, describing projection matrix and bias. -// - output_state_zp: zero point of output_state. (Input, calibrated value.) -// - quantized_proj_clip: if > 0, clip the output of the projection. -// - output_state: output vector, size n_batch*n_output. Must be contiguous. -// - context: data for optimized MatrixBatchVectorMultiplyAccumulate. -// - scratch0: scratch area of size n_batch*n_cell -// - scratch1: scratch area of size n_batch*n_cell -// - scratch2: scratch area used by MatrixBatchVectorMultiplyAccumulate -void CalculateLstmOutputInteger8x8_16( - int n_batch, int n_cell, int n_output, const int16_t* cell_state, - int32_t cell_state_scale, const int16_t* output_gate, - int32_t hidden_scale_a, int32_t hidden_scale_b, int32_t hidden_zp, - const int8_t* projection_weights, int32_t proj_scale_a, - int32_t proj_scale_b, const int32_t* projection_bias, - int32_t output_state_zp, int8_t quantized_proj_clip, int8_t* output_state, - int16_t* scratch0, int8_t* scratch1, int32_t* scratch2) { -// Note: unlike float/hybrid, the activation is always Tanh. -#if !defined(HIFI5) - tensor_utils::PortableApplyTanh(15 + cell_state_scale, cell_state, n_batch, - n_cell, scratch0); -#else - xa_nn_vec_tanh_16_16(scratch0, cell_state, (15 + cell_state_scale), - n_batch * n_cell); -#endif // !defined(HIFI5) - -#if !defined(HIFI5) - tensor_utils::PortableCwiseMul(output_gate, scratch0, hidden_scale_a, - hidden_scale_b, n_batch, n_cell, hidden_zp, - scratch1); -#else - xa_nn_elm_mul_16x16_asym8s(scratch1, output_gate, scratch0, hidden_scale_a, - hidden_scale_b, hidden_zp, n_batch * n_cell); -#endif // !defined(HIFI5) - - const bool use_projection = (projection_weights != nullptr); - - if (use_projection) { - // Note: no bias like in float/hybrid - std::fill_n(output_state, n_batch * n_output, 0); - tensor_utils::PortableMatrixBatchVectorMultiplyAccumulate( - scratch1, projection_bias, projection_weights, proj_scale_a, - proj_scale_b, n_batch, n_cell, n_output, output_state_zp, scratch2, - output_state, NULL); - if (quantized_proj_clip > 0) { - tensor_utils::PortableCwiseClipping(output_state, n_batch * n_output, - quantized_proj_clip); +namespace lstm_internal { + +#if !(defined(HIFI3) || defined(HIFI4) || defined(HIFI5)) +const int32_t kInt16Max = std::numeric_limits::max(); +const int32_t kInt16Min = std::numeric_limits::min(); +#endif + +void AddElementWise(const int16_t* input_1, const int16_t* input_2, int n_batch, + int n_input, int16_t* output) { +#if !(defined(HIFI3) || defined(HIFI4) || defined(HIFI5)) + for (int batch = 0; batch < n_batch; ++batch) { + for (int i = 0; i < n_input; ++i) { + const int index = batch * n_input + i; + int32_t sum = input_1[index] + input_2[index]; + const int32_t sum_clamped = std::min(kInt16Max, std::max(kInt16Min, sum)); + output[index] = static_cast(sum_clamped); } - } else { - std::copy_n(scratch1, n_batch * n_output, output_state); } +#else + xa_nn_elm_add_16x16_16(output, input_1, input_2, n_batch * n_input); +#endif } -// Calculates a single LSTM gate, int8x8_8 version. -// Implements the same functionality as CalculateLstmGateFloat. -void CalculateLstmGateInteger8x8_8( - // Inputs and weights - const int8_t* input, int32_t input_zp, const int8_t* input_to_gate_weight, - const int32_t input_to_gate_scale_a, const int32_t input_to_gate_scale_b, - const int32_t input_times_weights_scale_a, - const int32_t input_times_weights_scale_b, - const int32_t input_times_weights_zp, - // Output state and weights - const int8_t* output_state, const int32_t output_state_zp, - const int8_t* recurrent_to_gate_weight, - const int32_t recurrent_to_gate_scale_a, - const int32_t recurrent_to_gate_scale_b, - const int32_t output_state_times_weights_scale_a, - const int32_t output_state_times_weights_scale_b, - const int32_t output_state_times_weights_zp, - // Layer normalization parameters (layer norm LSTM) - const int16_t* layer_norm_gate_weight, - const int32_t layer_norm_gate_scale_a, - const int32_t layer_norm_gate_scale_b, const int32_t* gate_bias, - // Array sizes - const int n_batch, const int n_input, const int n_output, const int n_cell, - const TfLiteFusedActivation activation, - // Output - int16_t* gate, - // Scratch arrays, both sized n_batch*n_cell - int8_t* scratch0, int8_t* scratch1) { - // Multiply input * input_weights => scratch0 - tensor_utils::PortableMatrixBatchVectorMultiply( - input, input_zp, input_to_gate_weight, input_to_gate_scale_a, - input_to_gate_scale_b, n_batch, n_input, n_cell, scratch0, - input_times_weights_zp); - // Multiply output_state * recurrent_weights => scratch1 - tensor_utils::PortableMatrixBatchVectorMultiply( - output_state, output_state_zp, recurrent_to_gate_weight, - recurrent_to_gate_scale_a, recurrent_to_gate_scale_b, n_batch, n_output, - n_cell, scratch1, output_state_times_weights_zp); - // Add scratch0 + scratch1 => gate - tensor_utils::PortableTwoGateSaturatingAdd( - scratch0, input_times_weights_zp, scratch1, output_state_times_weights_zp, - input_times_weights_scale_a, input_times_weights_scale_b, - output_state_times_weights_scale_a, output_state_times_weights_scale_b, - n_batch, n_cell, gate); - // Apply layer normalization. - tensor_utils::PortableApplyLayerNormFloat( - gate, layer_norm_gate_weight, layer_norm_gate_scale_a, - layer_norm_gate_scale_b, gate_bias, n_batch, n_cell, gate); - // Apply activation. - switch (activation) { - case kTfLiteActSigmoid: - tensor_utils::PortableApplySigmoidFloat(gate, n_batch, n_cell, gate); - break; - case kTfLiteActTanh: - tensor_utils::PortableApplyTanhFloat(gate, n_batch, n_cell, -12, gate); - break; - default: - // Only Sigmoid or Tanh is used. - TFLITE_ASSERT_FALSE; +void AddElementWise(const float* input_1, const float* input_2, int n_batch, + int n_input, float* output) { + for (int batch = 0; batch < n_batch; ++batch) { + for (int i = 0; i < n_input; ++i) { + const int index = batch * n_input + i; + output[index] = input_1[index] + input_2[index]; + } } } -// Calculates the output state tensor of an LSTM step. See Float and hybrid -// versions as well. -// -// Parameters: -// - n_batch: batches: the number of distinct vectors in each array. -// - n_cell, n_output: sizes of vectors. -// - cell_state, output_gate: input vectors, size n_batch*n_cell. -// - projection_weights, proj_scale_[a|b], projection_bias: -// constant inputs, describing projection matrix and bias. -// - output_state_zp: zero point of the output state. -// - quantized_proj_clip: if > 0, clip the output of the projection. -// - output_state: output vector, size n_batch*n_output. Must be contiguous. -// - scratch: scratch area of size n_batch*n_cell -void CalculateLstmOutputInteger8x8_8( - int n_batch, int n_cell, int n_output, const int16_t* cell_state, - const int16_t* output_gate, const int8_t* projection_weights, - int32_t proj_scale_a, int32_t proj_scale_b, const int32_t* projection_bias, - int32_t output_state_zp, int32_t quantized_proj_clip, int8_t* output_state, - int16_t* scratch) { - // Note: unlike float/hybrid, the activation is always Tanh. - tensor_utils::PortableApplyTanhFloat(cell_state, n_batch, n_cell, -15, - scratch); - tensor_utils::PortableCwiseMul(output_gate, scratch, n_batch, n_cell, - 15 + 15 - 15, scratch); - // Note: no bias like in float/hybrid - tensor_utils::PortableMatrixBatchVectorMultiply( - scratch, projection_weights, proj_scale_a, proj_scale_b, projection_bias, - n_batch, n_cell, n_output, output_state_zp, output_state); - if (quantized_proj_clip > 0) { - tensor_utils::PortableCwiseClipping(output_state, n_batch * n_output, - (int8_t)quantized_proj_clip); - } +#if !(defined(HIFI3) || defined(HIFI4) || defined(HIFI5)) +void Sigmoid(const RuntimeShape& data_shape, int16_t* data) { + reference_integer_ops::Logistic( + 0 /*data->input_multiplier*/, 0 /*data->input_left_shift */, + data_shape.FlatSize() /*NumElements(input->dims)*/, + data /* tflite::micro::GetTensorData(input) */, + data /*tflite::micro::GetTensorData(output) */); } -// Fully quantized lstm kernel for 16 bit gate matmul output. -// -// Input tensor of size n_batch * n_input: -// input_ptr -// -// LSTM weights: -// Quantized input weights of size 'n_cell * n_input': -// input_to_input_weight_ptr - optional -// input_to_forget_weight_ptr - optional -// input_to_cell_weight_ptr - optional -// input_to_output_weight_ptr - optional -// -// Quantized recurrent weights of size 'n_cell * n_output': -// recurrent_to_input_weight_ptr - optional -// recurrent_to_forget_weights_ptr -// recurrent_to_cell_weights_ptr -// recurrent_to_input_weights_ptr -// -// Quantized peephole weights of size 'n_cell', representing diagonal matrices. -// cell_to_input_weights - optional -// cell_to_cell_weights - optional -// cell_to_output_weights - optional -// -// Quantized projection weights of size 'n_output * n_cell' -// projection_weight_ptr - optional -// -// Weight scales (scalars) for each of the weights above. -// effective_input_to_input_scale_a - optional -// effective_input_to_input_scale_b - optional -// effective_input_to_forget_scale_a -// effective_input_to_forget_scale_b -// effective_input_to_cell_scale_a -// effective_input_to_cell_scale_b -// effective_input_to_output_scale_a -// effective_input_to_output_scale_b -// effective_recurrent_to_input_scale_a - optional -// effective_recurrent_to_input_scale_b - optional -// effective_recurrent_to_forget_scale_a -// effective_recurrent_to_forget_scale_b -// effective_recurrent_to_cell_scale_a -// effective_recurrent_to_cell_scale_b -// effective_recurrent_to_output_scale_a -// effective_recurrent_to_output_scale_b -// effective_proj_scale_a - optional -// effective_proj_scale_b - optional -// -// Gate biases of size 'n_cell': -// input_gate_bias_ptr - optional -// forget_gate_bias_ptr -// cell_gate_bias_ptr -// output_gate_bias_ptr -// -// Layer norm coefficients of size 'n_cell', representing diagonal matrices. -// layer_norm_input_weight_ptr - optional -// layer_norm_forget_weight_ptr - optional -// layer_norm_cell_weight_ptr - optional -// layer_norm_output_weight_ptr - optional -// -// Layer norm scales of size 'n_cell'. -// layer_norm_input_scale_a - optional -// layer_norm_input_scale_b - optional -// layer_norm_forget_scale_a - optional -// layer_norm_forget_scale_b - optional -// layer_norm_cell_scale_a - optional -// layer_norm_cell_scale_b - optional -// layer_norm_output_scale_a - optional -// layer_norm_output_scale_b - optional -// -// Scalar values: -// quantized_cell_clip: quantized clip value for cell. -// quantized_proj_clip: quantized clip value for projection. -// cell_state_scale: the power of two scale for cell state. -// -// Zero points: -// output_state_zp: zero point of output state -// hidden_zp: zero point for hidden state. -// -// Temporary pre-allocated storage for the calculation. Each is of size n_cell * -// n_batch. -// scratch0 -// scratch1 -// scratch2 -// scratch3 -// scratch4 -// scratch5: this scratch buffer is created purely for optimizing the -// MatrixBatchVectorMultiplyAccumulate. -// -// Outputs: -// output_state_ptr - size 'n_batch * n_output' -// cell_state_ptr - size 'n_batch * n_cell' -// output_ptr - size 'n_batch * n_output' -// TODO(b/159947023): scratch0 is not used if (!cifg). Don't allocate then. -inline void LstmStepInteger8x8_16( - const int8_t* input_ptr, const int8_t* input_to_input_weight_ptr, - int32_t effective_input_to_input_scale_a, - int32_t effective_input_to_input_scale_b, - const int8_t* input_to_forget_weight_ptr, - int32_t effective_input_to_forget_scale_a, - int32_t effective_input_to_forget_scale_b, - const int8_t* input_to_cell_weight_ptr, - int32_t effective_input_to_cell_scale_a, - int32_t effective_input_to_cell_scale_b, - const int8_t* input_to_output_weight_ptr, - int32_t effective_input_to_output_scale_a, - int32_t effective_input_to_output_scale_b, - const int8_t* recurrent_to_input_weight_ptr, - int32_t effective_recurrent_to_input_scale_a, - int32_t effective_recurrent_to_input_scale_b, - const int8_t* recurrent_to_forget_weight_ptr, - int32_t effective_recurrent_to_forget_scale_a, - int32_t effective_recurrent_to_forget_scale_b, - const int8_t* recurrent_to_cell_weight_ptr, - int32_t effective_recurrent_to_cell_scale_a, - int32_t effective_recurrent_to_cell_scale_b, - const int8_t* recurrent_to_output_weight_ptr, - int32_t effective_recurrent_to_output_scale_a, - int32_t effective_recurrent_to_output_scale_b, - const int16_t* cell_to_input_weight_ptr, - int32_t effective_cell_to_input_scale_a, - int32_t effective_cell_to_input_scale_b, - const int16_t* cell_to_forget_weight_ptr, - int32_t effective_cell_to_forget_scale_a, - int32_t effective_cell_to_forget_scale_b, - const int16_t* cell_to_output_weight_ptr, - int32_t effective_cell_to_output_scale_a, - int32_t effective_cell_to_output_scale_b, - const int8_t* projection_weight_ptr, int32_t effective_proj_scale_a, - int32_t effective_proj_scale_b, int32_t hidden_zp, - int32_t effective_hidden_scale_a, int32_t effective_hidden_scale_b, - const int16_t* layer_norm_input_weight_ptr, - int32_t layer_norm_input_scale_a, int32_t layer_norm_input_scale_b, - const int16_t* layer_norm_forget_weight_ptr, - int32_t layer_norm_forget_scale_a, int32_t layer_norm_forget_scale_b, - const int16_t* layer_norm_cell_weight_ptr, int32_t layer_norm_cell_scale_a, - int32_t layer_norm_cell_scale_b, - const int16_t* layer_norm_output_weight_ptr, - int32_t layer_norm_output_scale_a, int32_t layer_norm_output_scale_b, - const int32_t* input_gate_bias_ptr, const int32_t* forget_gate_bias_ptr, - const int32_t* cell_gate_bias_ptr, const int32_t* output_gate_bias_ptr, - int16_t quantized_cell_clip, int8_t quantized_proj_clip, - int32_t cell_state_scale, int32_t input_variance_guard, - int32_t forget_variance_guard, int32_t cell_variance_guard, - int32_t output_variance_guard, - const int32_t* input_to_forget_effective_bias, - const int32_t* recurrent_to_forget_effective_bias, - const int32_t* input_to_cell_effective_bias, - const int32_t* recurrent_to_cell_effective_bias, - const int32_t* input_to_output_effective_bias, - const int32_t* recurrent_to_output_effective_bias, - const int32_t* input_to_input_effective_bias, - const int32_t* recurrent_to_input_effective_bias, - const int32_t* projection_effective_bias, int n_batch, int n_cell, - int n_input, int n_output, int8_t* output_state_ptr, - int32_t output_state_zp, int16_t* cell_state_ptr, int8_t* output_ptr, - int16_t* scratch0, int16_t* scratch1, int16_t* scratch2, int16_t* scratch3, - int8_t* scratch4, int32_t* scratch5) { - // ruy::profiler::ScopeLabel label("LstmStepInteger8x8_16"); - // Make named scratch buffers for the different gates. - int16_t* input_gate_scratch = scratch0; - int16_t* forget_gate_scratch = scratch1; - int16_t* cell_gate_scratch = scratch2; - int16_t* output_gate_scratch = scratch3; - - // Since we have already checked that weights are all there or none, we - // can check the existence of only one to the get the condition. - const bool use_cifg = (input_to_input_weight_ptr == nullptr); +void Sigmoid(const RuntimeShape& data_shape, float* data) { + reference_ops::Logistic(data_shape, data, data_shape, data); +} - // Check for nullptrs. - TFLITE_DCHECK(input_to_forget_effective_bias); - TFLITE_DCHECK(recurrent_to_forget_effective_bias); - TFLITE_DCHECK(input_to_cell_effective_bias); - TFLITE_DCHECK(recurrent_to_cell_effective_bias); - TFLITE_DCHECK(input_to_output_effective_bias); - TFLITE_DCHECK(recurrent_to_output_effective_bias); - if (!use_cifg) { - TFLITE_DCHECK(input_to_input_effective_bias); - TFLITE_DCHECK(recurrent_to_input_effective_bias); - } - const bool use_projection = (projection_weight_ptr != nullptr); - if (use_projection) { - TFLITE_DCHECK(projection_effective_bias); - } - if (!use_cifg) { - // Calculate the input gate. (If not CIFG.) - CalculateLstmGateInteger8x8_16( - input_ptr, input_to_input_weight_ptr, input_to_input_effective_bias, - effective_input_to_input_scale_a, effective_input_to_input_scale_b, - output_state_ptr, recurrent_to_input_weight_ptr, - recurrent_to_input_effective_bias, effective_recurrent_to_input_scale_a, - effective_recurrent_to_input_scale_b, cell_state_ptr, - cell_to_input_weight_ptr, effective_cell_to_input_scale_a, - effective_cell_to_input_scale_b, layer_norm_input_weight_ptr, - input_gate_bias_ptr, layer_norm_input_scale_a, layer_norm_input_scale_b, - input_variance_guard, n_batch, n_input, n_output, n_cell, - kTfLiteActSigmoid, input_gate_scratch, scratch5); +void Tanh(int32_t cell_state_scale_power, const RuntimeShape& input_data_shape, + int16_t* input_data, const RuntimeShape& output_data_shape, + int16_t* output_data) { + int32_t tanh_input_left_shift = (15 + cell_state_scale_power) - 3; + int32_t input_multiplier = 0; + if (tanh_input_left_shift < 0) /* handling negative shift value */ + { + tanh_input_left_shift = -tanh_input_left_shift; + input_multiplier = 3; } - // Calculate the forget gate. - CalculateLstmGateInteger8x8_16( - input_ptr, input_to_forget_weight_ptr, input_to_forget_effective_bias, - effective_input_to_forget_scale_a, effective_input_to_forget_scale_b, - output_state_ptr, recurrent_to_forget_weight_ptr, - recurrent_to_forget_effective_bias, effective_recurrent_to_forget_scale_a, - effective_recurrent_to_forget_scale_b, cell_state_ptr, - cell_to_forget_weight_ptr, effective_cell_to_forget_scale_a, - effective_cell_to_forget_scale_b, layer_norm_forget_weight_ptr, - forget_gate_bias_ptr, layer_norm_forget_scale_a, - layer_norm_forget_scale_b, forget_variance_guard, n_batch, n_input, - n_output, n_cell, kTfLiteActSigmoid, forget_gate_scratch, scratch5); - // Calculate the cell update gate. - CalculateLstmGateInteger8x8_16( - input_ptr, input_to_cell_weight_ptr, input_to_cell_effective_bias, - effective_input_to_cell_scale_a, effective_input_to_cell_scale_b, - output_state_ptr, recurrent_to_cell_weight_ptr, - recurrent_to_cell_effective_bias, effective_recurrent_to_cell_scale_a, - effective_recurrent_to_cell_scale_b, cell_state_ptr, - /*cell_to_gate_weights=*/nullptr, /*cell_to_gate_scale_a=*/0, - /*cell_to_gate_scale_b=*/0, layer_norm_cell_weight_ptr, - cell_gate_bias_ptr, layer_norm_cell_scale_a, layer_norm_cell_scale_b, - cell_variance_guard, n_batch, n_input, n_output, n_cell, kTfLiteActTanh, - cell_gate_scratch, scratch5); - // Update the cell state. - UpdateLstmCellInteger(n_batch, n_cell, cell_state_ptr, cell_state_scale, - input_gate_scratch, forget_gate_scratch, - cell_gate_scratch, use_cifg, quantized_cell_clip); - // Calculate the output gate. - CalculateLstmGateInteger8x8_16( - input_ptr, input_to_output_weight_ptr, input_to_output_effective_bias, - effective_input_to_output_scale_a, effective_input_to_output_scale_b, - output_state_ptr, recurrent_to_output_weight_ptr, - recurrent_to_output_effective_bias, effective_recurrent_to_output_scale_a, - effective_recurrent_to_output_scale_b, cell_state_ptr, - cell_to_output_weight_ptr, effective_cell_to_output_scale_a, - effective_cell_to_output_scale_b, layer_norm_output_weight_ptr, - output_gate_bias_ptr, layer_norm_output_scale_a, - layer_norm_output_scale_b, output_variance_guard, n_batch, n_input, - n_output, n_cell, kTfLiteActSigmoid, output_gate_scratch, scratch5); - // Update the output state. - CalculateLstmOutputInteger8x8_16( - n_batch, n_cell, n_output, cell_state_ptr, cell_state_scale, - output_gate_scratch, effective_hidden_scale_a, effective_hidden_scale_b, - hidden_zp, projection_weight_ptr, effective_proj_scale_a, - effective_proj_scale_b, projection_effective_bias, output_state_zp, - quantized_proj_clip, output_state_ptr, scratch0, scratch4, scratch5); - // Copy output state to the output. Note that unlike float or hybrid, output - // is always contiguous. - std::copy_n(output_state_ptr, n_batch * n_output, output_ptr); + reference_integer_ops::Tanh(input_multiplier, tanh_input_left_shift, + input_data_shape, input_data, output_data_shape, + output_data); } -// Fully quantized lstm kernel for 8 bit gate matmul output. -// -// Input tensor of size n_batch * n_input: -// input_ptr -// -// LSTM weights: -// Quantized input weights of size 'n_cell * n_input': -// input_to_input_weight_ptr - optional -// input_to_forget_weight_ptr - optional -// input_to_cell_weight_ptr - optional -// input_to_output_weight_ptr - optional -// -// Quantized recurrent weights of size 'n_cell * n_output': -// recurrent_to_input_weight_ptr - optional -// recurrent_to_forget_weights_ptr -// recurrent_to_cell_weights_ptr -// recurrent_to_input_weights_ptr -// -// Quantized peephole weights of size 'n_cell', representing diagonal matrices. -// cell_to_input_weights - optional -// cell_to_cell_weights - optional -// cell_to_output_weights - optional -// -// Quantized projection weights of size 'n_output * n_cell' -// projection_weight_ptr - optional -// -// Weight scales (scalars) for each of the weights above. -// effective_input_to_input_scale_a - optional -// effective_input_to_input_scale_b - optional -// effective_input_to_forget_scale_a -// effective_input_to_forget_scale_b -// effective_input_to_cell_scale_a -// effective_input_to_cell_scale_b -// effective_input_to_output_scale_a -// effective_input_to_output_scale_b -// effective_recurrent_to_input_scale_a - optional -// effective_recurrent_to_input_scale_b - optional -// effective_recurrent_to_forget_scale_a -// effective_recurrent_to_forget_scale_b -// effective_recurrent_to_cell_scale_a -// effective_recurrent_to_cell_scale_b -// effective_recurrent_to_output_scale_a -// effective_recurrent_to_output_scale_b -// effective_proj_scale_a - optional -// effective_proj_scale_b - optional -// -// Gate biases of size 'n_cell': -// input_gate_bias_ptr - optional -// forget_gate_bias_ptr -// cell_gate_bias_ptr -// output_gate_bias_ptr -// -// Layer norm coefficients of size 'n_cell', representing diagonal matrices. -// layer_norm_input_weight_ptr - optional -// layer_norm_forget_weight_ptr - optional -// layer_norm_cell_weight_ptr - optional -// layer_norm_output_weight_ptr - optional -// -// Layer norm scales of size 'n_cell'. -// layer_norm_input_scale_a - optional -// layer_norm_input_scale_b - optional -// layer_norm_forget_scale_a - optional -// layer_norm_forget_scale_b - optional -// layer_norm_cell_scale_a - optional -// layer_norm_cell_scale_b - optional -// layer_norm_output_scale_a - optional -// layer_norm_output_scale_b - optional -// -// Scalar values: -// quantized_cell_clip: quantized clip value for cell. -// quantized_proj_clip: quantized clip value for projection. -// cell_state_scale: the power of two scale for cell state. -// -// Zero points: -// output_state_zp: zero point of output state. -// hidden_zp: zero point for hidden state. -// -// Temporary pre-allocated storage for the calculation. Each is of size n_cell * -// n_batch. -// scratch0 -// scratch1 -// scratch2 -// scratch3 -// scratch4 -// scratch5 -// scratch6 -// scratch7 -// -// Outputs: -// output_state_ptr - size 'n_batch * n_output' -// cell_state_ptr - size 'n_batch * n_cell' -// output_ptr - size 'n_batch * n_output' -// TODO(b/148688698): Move zero point calculation into Prepare(). -// TODO(b/159947023): scratch5 is unused, remove. -inline void LstmStepInteger8x8_8( - const int8_t* input_ptr, int32_t input_zp, - const int8_t* input_to_input_weight_ptr, - int32_t effective_input_to_input_scale_a, - int32_t effective_input_to_input_scale_b, - const int8_t* input_to_forget_weight_ptr, - int32_t effective_input_to_forget_scale_a, - int32_t effective_input_to_forget_scale_b, - const int8_t* input_to_cell_weight_ptr, - int32_t effective_input_to_cell_scale_a, - int32_t effective_input_to_cell_scale_b, - const int8_t* input_to_output_weight_ptr, - int32_t effective_input_to_output_scale_a, - int32_t effective_input_to_output_scale_b, - const int8_t* recurrent_to_input_weight_ptr, - int32_t effective_recurrent_to_input_scale_a, - int32_t effective_recurrent_to_input_scale_b, - const int8_t* recurrent_to_forget_weight_ptr, - int32_t effective_recurrent_to_forget_scale_a, - int32_t effective_recurrent_to_forget_scale_b, - const int8_t* recurrent_to_cell_weight_ptr, - int32_t effective_recurrent_to_cell_scale_a, - int32_t effective_recurrent_to_cell_scale_b, - const int8_t* recurrent_to_output_weight_ptr, - int32_t effective_recurrent_to_output_scale_a, - int32_t effective_recurrent_to_output_scale_b, - const int8_t* cell_to_input_weight_ptr, - int32_t effective_cell_to_input_scale_a, - int32_t effective_cell_to_input_scale_b, - const int8_t* cell_to_forget_weight_ptr, - int32_t effective_cell_to_forget_scale_a, - int32_t effective_cell_to_forget_scale_b, - const int8_t* cell_to_output_weight_ptr, - int32_t effective_cell_to_output_scale_a, - int32_t effective_cell_to_output_scale_b, - const int8_t* projection_weight_ptr, int32_t effective_proj_scale_a, - int32_t effective_proj_scale_b, const int16_t* layer_norm_input_weight_ptr, - int32_t layer_norm_input_scale_a, int32_t layer_norm_input_scale_b, - const int16_t* layer_norm_forget_weight_ptr, - int32_t layer_norm_forget_scale_a, int32_t layer_norm_forget_scale_b, - const int16_t* layer_norm_cell_weight_ptr, int32_t layer_norm_cell_scale_a, - int32_t layer_norm_cell_scale_b, - const int16_t* layer_norm_output_weight_ptr, - int32_t layer_norm_output_scale_a, int32_t layer_norm_output_scale_b, - const int32_t* input_gate_bias_ptr, const int32_t* forget_gate_bias_ptr, - const int32_t* cell_gate_bias_ptr, const int32_t* output_gate_bias_ptr, - const int32_t* projection_bias_ptr, const TfLiteLSTMParams* params, - const int32_t* intermediate_scale_a, const int32_t* intermediate_scale_b, - const int32_t* intermediate_zp, int16_t quantized_cell_clip, - int8_t quantized_proj_clip, int n_batch, int n_cell, int n_input, - int n_output, int output_batch_leading_dim, int8_t* output_state_ptr, - int32_t output_state_zp, int16_t* cell_state_ptr, int8_t* output_ptr, - int8_t* scratch0, int8_t* scratch1, int16_t* scratch2, int16_t* scratch3, - int16_t* scratch4, int16_t* scratch5, int16_t* scratch6, - int16_t* scratch7) { - // TODO(b/159066113): scratch5 is unused, remove. - - // ruy::profiler::ScopeLabel label("LstmStepInteger8x8_8"); - // Make named scratch buffers for the different gates. - int16_t* forget_gate_scratch = scratch2; - int16_t* cell_gate_scratch = scratch3; - int16_t* output_gate_scratch = scratch4; - // no-CIFG is not supported here - - // Calculate the forget gate. - CalculateLstmGateInteger8x8_8( - input_ptr, input_zp, input_to_forget_weight_ptr, - effective_input_to_forget_scale_a, effective_input_to_forget_scale_b, - intermediate_scale_a[2], intermediate_scale_b[2], intermediate_zp[4], - output_state_ptr, output_state_zp, recurrent_to_forget_weight_ptr, - effective_recurrent_to_forget_scale_a, - effective_recurrent_to_forget_scale_b, intermediate_scale_a[3], - intermediate_scale_b[3], intermediate_zp[5], layer_norm_forget_weight_ptr, - layer_norm_forget_scale_a, layer_norm_forget_scale_b, - forget_gate_bias_ptr, n_batch, n_input, n_output, n_cell, - kTfLiteActSigmoid, forget_gate_scratch, scratch0, scratch1); - // Calculate the cell update gate. - CalculateLstmGateInteger8x8_8( - input_ptr, input_zp, input_to_cell_weight_ptr, - effective_input_to_cell_scale_a, effective_input_to_cell_scale_b, - intermediate_scale_a[4], intermediate_scale_b[4], intermediate_zp[7], - output_state_ptr, output_state_zp, recurrent_to_cell_weight_ptr, - effective_recurrent_to_cell_scale_a, effective_recurrent_to_cell_scale_b, - intermediate_scale_a[5], intermediate_scale_b[5], intermediate_zp[8], - layer_norm_cell_weight_ptr, layer_norm_cell_scale_a, - layer_norm_cell_scale_b, cell_gate_bias_ptr, n_batch, n_input, n_output, - n_cell, kTfLiteActTanh, cell_gate_scratch, scratch0, scratch1); - // Update the cell state. - UpdateLstmCellInteger(n_batch, n_cell, cell_state_ptr, - /*cell_state_scale=*/-15, /*input_gate=*/nullptr, - forget_gate_scratch, cell_gate_scratch, - /*use_cifg=*/true, quantized_cell_clip); - // Calculate the output gate. - CalculateLstmGateInteger8x8_8( - input_ptr, input_zp, input_to_output_weight_ptr, - effective_input_to_output_scale_a, effective_input_to_output_scale_b, - intermediate_scale_a[6], intermediate_scale_b[6], intermediate_zp[10], - output_state_ptr, output_state_zp, recurrent_to_output_weight_ptr, - effective_recurrent_to_output_scale_a, - effective_recurrent_to_output_scale_b, intermediate_scale_a[11], - intermediate_scale_b[7], intermediate_zp[7], layer_norm_output_weight_ptr, - layer_norm_output_scale_a, layer_norm_output_scale_b, - output_gate_bias_ptr, n_batch, n_input, n_output, n_cell, - kTfLiteActSigmoid, output_gate_scratch, scratch0, scratch1); - // Update the output state. - CalculateLstmOutputInteger8x8_8( - n_batch, n_cell, n_output, cell_state_ptr, output_gate_scratch, - projection_weight_ptr, effective_proj_scale_a, effective_proj_scale_b, - projection_bias_ptr, output_state_zp, quantized_proj_clip, - output_state_ptr, scratch2); - // Copy output state to the output. Note that unlike float or hybrid, output - // is always contiguous. - std::copy_n(output_state_ptr, n_batch * n_output, output_ptr); +void Tanh(int32_t cell_state_scale_power, const RuntimeShape& input_data_shape, + float* input_data, const RuntimeShape& output_data_shape, + float* output_data) { + reference_ops::Tanh(input_data_shape, input_data, output_data_shape, + output_data); } -} // namespace - -// LINT.ThenChange(//tensorflow/lite/tools/optimize/calibration/builtin_logging_ops/lstm.cc) -TfLiteStatus EvalInteger8x8_16( - TfLiteContext* context, TfLiteNode* node, const TfLiteEvalTensor* input, - const TfLiteEvalTensor* input_to_input_weights, - const TfLiteEvalTensor* input_to_forget_weights, - const TfLiteEvalTensor* input_to_cell_weights, - const TfLiteEvalTensor* input_to_output_weights, - const TfLiteEvalTensor* recurrent_to_input_weights, - const TfLiteEvalTensor* recurrent_to_forget_weights, - const TfLiteEvalTensor* recurrent_to_cell_weights, - const TfLiteEvalTensor* recurrent_to_output_weights, - const TfLiteEvalTensor* cell_to_input_weights, - const TfLiteEvalTensor* cell_to_forget_weights, - const TfLiteEvalTensor* cell_to_output_weights, - const TfLiteEvalTensor* input_layer_norm_coefficients, - const TfLiteEvalTensor* forget_layer_norm_coefficients, - const TfLiteEvalTensor* cell_layer_norm_coefficients, - const TfLiteEvalTensor* output_layer_norm_coefficients, - const TfLiteEvalTensor* input_gate_bias, - const TfLiteEvalTensor* forget_gate_bias, - const TfLiteEvalTensor* cell_gate_bias, - const TfLiteEvalTensor* output_gate_bias, - const TfLiteEvalTensor* projection_weights, - const TfLiteEvalTensor* projection_bias, const TfLiteLSTMParams* params, - bool forward_sequence, bool time_major, - const lstm_eval::IntegerLstmParameter* integer_lstm_param, - TfLiteEvalTensor* output_state, TfLiteEvalTensor* cell_state, - TfLiteEvalTensor* output, TfLiteEvalTensor* scratch0, - TfLiteEvalTensor* scratch1, TfLiteEvalTensor* scratch2, - TfLiteEvalTensor* scratch3, TfLiteEvalTensor* scratch4, - TfLiteEvalTensor* scratch5) { - TFLITE_DCHECK(input->dims->size >= 2 && input->dims->size <= 3); - const int n_input = input->dims->data[input->dims->size - 1]; - int max_time, n_batch; - if (input->dims->size == 2) { - max_time = 1; - n_batch = input->dims->data[0]; - } else { - max_time = (time_major) ? input->dims->data[0] : input->dims->data[1]; - n_batch = (time_major) ? input->dims->data[1] : input->dims->data[0]; - } - - // n_cell and n_output will be the same size when there is no projection. - const int n_cell = input_to_output_weights->dims->data[0]; - const int n_output = recurrent_to_output_weights->dims->data[1]; +// Input and output have the same shape in LSTM +void Mul(const RuntimeShape& shape, const ArithmeticParams& params, + const int16_t* input1_data, const int16_t* input2_data, + int8_t* output_data) { + return reference_integer_ops::MulElementwise( + shape.FlatSize(), params, input1_data, input2_data, output_data); +} - // Activation zero point - // TODO@is data.output_zero_point equal to output_state->params.zero_point - // int output_state_zp = output_state->params.zero_point; - int output_state_zp = 0; +// Input and output have the same shape in LSTM +void Mul(const RuntimeShape& shape, const ArithmeticParams& params, + const int16_t* input1_data, const int16_t* input2_data, + int16_t* output_data) { + return reference_integer_ops::MulElementwise( + shape.FlatSize(), params, input1_data, input2_data, output_data); +} - // Get params for time/batch/sequence. - const int output_batch_leading_dim = - output->dims->data[output->dims->size - 1]; +// Input and output have the same shape in LSTM +void Mul(const RuntimeShape& shape, const ArithmeticParams& params, + const float* input1_data, const float* input2_data, + float* output_data) { + return reference_ops::Mul(params, shape, input1_data, shape, input2_data, + shape, output_data); +} - if (time_major) { - const int input_step = n_batch * n_input; - const int output_step = n_batch * output_batch_leading_dim; - for (int t = 0; t < max_time; t++) { - const int t_rel = t; - int8_t* output_ptr = - tflite::micro::GetTensorData(output) + t_rel * output_step; - const int8_t* input_ptr = - tflite::micro::GetTensorData(input) + t_rel * input_step; - LstmStepInteger8x8_16( - input_ptr, - tflite::micro::GetTensorData(input_to_input_weights), - integer_lstm_param->effective_input_to_input_scale_a, - integer_lstm_param->effective_input_to_input_scale_b, - tflite::micro::GetTensorData(input_to_forget_weights), - integer_lstm_param->effective_input_to_forget_scale_a, - integer_lstm_param->effective_input_to_forget_scale_b, - tflite::micro::GetTensorData(input_to_cell_weights), - integer_lstm_param->effective_input_to_cell_scale_a, - integer_lstm_param->effective_input_to_cell_scale_b, - tflite::micro::GetTensorData(input_to_output_weights), - integer_lstm_param->effective_input_to_output_scale_a, - integer_lstm_param->effective_input_to_output_scale_b, - tflite::micro::GetTensorData(recurrent_to_input_weights), - integer_lstm_param->effective_recurrent_to_input_scale_a, - integer_lstm_param->effective_recurrent_to_input_scale_b, - tflite::micro::GetTensorData(recurrent_to_forget_weights), - integer_lstm_param->effective_recurrent_to_forget_scale_a, - integer_lstm_param->effective_recurrent_to_forget_scale_b, - tflite::micro::GetTensorData(recurrent_to_cell_weights), - integer_lstm_param->effective_recurrent_to_cell_scale_a, - integer_lstm_param->effective_recurrent_to_cell_scale_b, - tflite::micro::GetTensorData(recurrent_to_output_weights), - integer_lstm_param->effective_recurrent_to_output_scale_a, - integer_lstm_param->effective_recurrent_to_output_scale_b, - tflite::micro::GetTensorData(cell_to_input_weights), - integer_lstm_param->effective_cell_to_input_scale_a, - integer_lstm_param->effective_cell_to_input_scale_b, - tflite::micro::GetTensorData(cell_to_forget_weights), - integer_lstm_param->effective_cell_to_forget_scale_a, - integer_lstm_param->effective_cell_to_forget_scale_b, - tflite::micro::GetTensorData(cell_to_output_weights), - integer_lstm_param->effective_cell_to_output_scale_a, - integer_lstm_param->effective_cell_to_output_scale_b, - tflite::micro::GetTensorData(projection_weights), - integer_lstm_param->effective_proj_scale_a, - integer_lstm_param->effective_proj_scale_b, - integer_lstm_param->hidden_zp, - integer_lstm_param->effective_hidden_scale_a, - integer_lstm_param->effective_hidden_scale_b, - tflite::micro::GetTensorData(input_layer_norm_coefficients), - integer_lstm_param->layer_norm_input_scale_a, - integer_lstm_param->layer_norm_input_scale_b, - tflite::micro::GetTensorData(forget_layer_norm_coefficients), - integer_lstm_param->layer_norm_forget_scale_a, - integer_lstm_param->layer_norm_forget_scale_b, - tflite::micro::GetTensorData(cell_layer_norm_coefficients), - integer_lstm_param->layer_norm_cell_scale_a, - integer_lstm_param->layer_norm_cell_scale_b, - tflite::micro::GetTensorData(output_layer_norm_coefficients), - integer_lstm_param->layer_norm_output_scale_a, - integer_lstm_param->layer_norm_output_scale_b, - tflite::micro::GetTensorData(input_gate_bias), - tflite::micro::GetTensorData(forget_gate_bias), - tflite::micro::GetTensorData(cell_gate_bias), - tflite::micro::GetTensorData(output_gate_bias), - integer_lstm_param->quantized_cell_clip, - integer_lstm_param->quantized_proj_clip, - integer_lstm_param->cell_scale, - integer_lstm_param->input_variance_guard, - integer_lstm_param->forget_variance_guard, - integer_lstm_param->cell_variance_guard, - integer_lstm_param->output_variance_guard, - integer_lstm_param->input_to_forget_effective_bias.get(), - integer_lstm_param->recurrent_to_forget_effective_bias.get(), - integer_lstm_param->input_to_cell_effective_bias.get(), - integer_lstm_param->recurrent_to_cell_effective_bias.get(), - integer_lstm_param->input_to_output_effective_bias.get(), - integer_lstm_param->recurrent_to_output_effective_bias.get(), - integer_lstm_param->input_to_input_effective_bias.get(), - integer_lstm_param->recurrent_to_input_effective_bias.get(), - integer_lstm_param->projection_effective_bias.get(), n_batch, n_cell, - n_input, n_output, tflite::micro::GetTensorData(output_state), - output_state_zp, tflite::micro::GetTensorData(cell_state), - output_ptr, (int16_t*)(scratch0), (int16_t*)(scratch1), - (int16_t*)(scratch2), (int16_t*)(scratch3), (int8_t*)(scratch4), - (int32_t*)(scratch5)); - } - } else { - for (int b = 0; b < n_batch; b++) { - const int input_step = n_input; - const int output_step = output_batch_leading_dim; - for (int t = 0; t < max_time; t++) { - // If this is the forward_sequence, step forward, otherwise step - // backwards. - const int t_rel = forward_sequence ? t : max_time - t - 1; - const int time_offset = b * max_time + t_rel; - const int8_t* input_ptr = tflite::micro::GetTensorData(input) + - time_offset * input_step; - int8_t* output_ptr = tflite::micro::GetTensorData(output) + - time_offset * output_step; +void FullyConnected(const FullyConnectedParams& params, + const RuntimeShape& input_shape, const int8_t* input_data, + const RuntimeShape& filter_shape, const int8_t* filter_data, + const RuntimeShape& bias_shape, const int32_t* bias_data, + const RuntimeShape& output_shape, int16_t* output_data) { + return tflite::reference_integer_ops::FullyConnected( + params, input_shape, input_data, filter_shape, filter_data, bias_shape, + bias_data, output_shape, output_data); +} - // Offset the {output,cell}_state pointers to the right batch. - int8_t* output_state_ptr = - tflite::micro::GetTensorData(output_state) + - b * output_batch_leading_dim; - int16_t* cell_state_ptr = - tflite::micro::GetTensorData(cell_state) + b * n_cell; +void FullyConnected(const FullyConnectedParams& params, + const RuntimeShape& input_shape, const int16_t* input_data, + const RuntimeShape& filter_shape, const int8_t* filter_data, + const RuntimeShape& bias_shape, const int64_t* bias_data, + const RuntimeShape& output_shape, int16_t* output_data) { + return tflite::reference_integer_ops::FullyConnected( + params, input_shape, input_data, filter_shape, filter_data, bias_shape, + bias_data, output_shape, output_data); +} - LstmStepInteger8x8_16( - input_ptr, - tflite::micro::GetTensorData(input_to_input_weights), - integer_lstm_param->effective_input_to_input_scale_a, - integer_lstm_param->effective_input_to_input_scale_b, - tflite::micro::GetTensorData(input_to_forget_weights), - integer_lstm_param->effective_input_to_forget_scale_a, - integer_lstm_param->effective_input_to_forget_scale_b, - tflite::micro::GetTensorData(input_to_cell_weights), - integer_lstm_param->effective_input_to_cell_scale_a, - integer_lstm_param->effective_input_to_cell_scale_b, - tflite::micro::GetTensorData(input_to_output_weights), - integer_lstm_param->effective_input_to_output_scale_a, - integer_lstm_param->effective_input_to_output_scale_b, - tflite::micro::GetTensorData(recurrent_to_input_weights), - integer_lstm_param->effective_recurrent_to_input_scale_a, - integer_lstm_param->effective_recurrent_to_input_scale_b, - tflite::micro::GetTensorData(recurrent_to_forget_weights), - integer_lstm_param->effective_recurrent_to_forget_scale_a, - integer_lstm_param->effective_recurrent_to_forget_scale_b, - tflite::micro::GetTensorData(recurrent_to_cell_weights), - integer_lstm_param->effective_recurrent_to_cell_scale_a, - integer_lstm_param->effective_recurrent_to_cell_scale_b, - tflite::micro::GetTensorData(recurrent_to_output_weights), - integer_lstm_param->effective_recurrent_to_output_scale_a, - integer_lstm_param->effective_recurrent_to_output_scale_b, - tflite::micro::GetTensorData(cell_to_input_weights), - integer_lstm_param->effective_cell_to_input_scale_a, - integer_lstm_param->effective_cell_to_input_scale_b, - tflite::micro::GetTensorData(cell_to_forget_weights), - integer_lstm_param->effective_cell_to_forget_scale_a, - integer_lstm_param->effective_cell_to_forget_scale_b, - tflite::micro::GetTensorData(cell_to_output_weights), - integer_lstm_param->effective_cell_to_output_scale_a, - integer_lstm_param->effective_cell_to_output_scale_b, - tflite::micro::GetTensorData(projection_weights), - integer_lstm_param->effective_proj_scale_a, - integer_lstm_param->effective_proj_scale_b, - integer_lstm_param->hidden_zp, - integer_lstm_param->effective_hidden_scale_a, - integer_lstm_param->effective_hidden_scale_b, - tflite::micro::GetTensorData( - input_layer_norm_coefficients), - integer_lstm_param->layer_norm_input_scale_a, - integer_lstm_param->layer_norm_input_scale_b, - tflite::micro::GetTensorData( - forget_layer_norm_coefficients), - integer_lstm_param->layer_norm_forget_scale_a, - integer_lstm_param->layer_norm_forget_scale_b, - tflite::micro::GetTensorData(cell_layer_norm_coefficients), - integer_lstm_param->layer_norm_cell_scale_a, - integer_lstm_param->layer_norm_cell_scale_b, - tflite::micro::GetTensorData( - output_layer_norm_coefficients), - integer_lstm_param->layer_norm_output_scale_a, - integer_lstm_param->layer_norm_output_scale_b, - tflite::micro::GetTensorData(input_gate_bias), - tflite::micro::GetTensorData(forget_gate_bias), - tflite::micro::GetTensorData(cell_gate_bias), - tflite::micro::GetTensorData(output_gate_bias), - integer_lstm_param->quantized_cell_clip, - integer_lstm_param->quantized_proj_clip, - integer_lstm_param->cell_scale, - integer_lstm_param->input_variance_guard, - integer_lstm_param->forget_variance_guard, - integer_lstm_param->cell_variance_guard, - integer_lstm_param->output_variance_guard, - integer_lstm_param->input_to_forget_effective_bias.get(), - integer_lstm_param->recurrent_to_forget_effective_bias.get(), - integer_lstm_param->input_to_cell_effective_bias.get(), - integer_lstm_param->recurrent_to_cell_effective_bias.get(), - integer_lstm_param->input_to_output_effective_bias.get(), - integer_lstm_param->recurrent_to_output_effective_bias.get(), - integer_lstm_param->input_to_input_effective_bias.get(), - integer_lstm_param->recurrent_to_input_effective_bias.get(), - integer_lstm_param->projection_effective_bias.get(), /*n_batch=*/1, - n_cell, n_input, n_output, output_state_ptr, output_state_zp, - cell_state_ptr, output_ptr, (int16_t*)(scratch0), - (int16_t*)(scratch1), (int16_t*)(scratch2), (int16_t*)(scratch3), - (int8_t*)(scratch4), (int32_t*)(scratch5)); - } - } - } +void FullyConnected(const FullyConnectedParams& params, + const RuntimeShape& input_shape, const float* input_data, + const RuntimeShape& filter_shape, const float* filter_data, + const RuntimeShape& bias_shape, const float* bias_data, + const RuntimeShape& output_shape, float* output_data) { + return tflite::reference_ops::FullyConnected( + params, input_shape, input_data, filter_shape, filter_data, bias_shape, + bias_data, output_shape, output_data); +} +#else // #if !(defined(HIFI3) || defined(HIFI4) || defined(HIFI5)) +void Sigmoid(int16_t* data, int32_t data_size) { + xa_nn_vec_sigmoid_sym16s_sym16s(data, data, 0, 0, data_size); +} - return kTfLiteOk; +void Sigmoid(float* data, int32_t data_size) { + int data_dims[2] = {1, data_size}; + RuntimeShape data_shape(2, reinterpret_cast(data_dims)); + reference_ops::Logistic(data_shape, data, data_shape, data); } -TfLiteStatus EvalInteger8x8_8( - const TfLiteEvalTensor* input, - const TfLiteEvalTensor* input_to_input_weights, - const TfLiteEvalTensor* input_to_forget_weights, - const TfLiteEvalTensor* input_to_cell_weights, - const TfLiteEvalTensor* input_to_output_weights, - const TfLiteEvalTensor* recurrent_to_input_weights, - const TfLiteEvalTensor* recurrent_to_forget_weights, - const TfLiteEvalTensor* recurrent_to_cell_weights, - const TfLiteEvalTensor* recurrent_to_output_weights, - const TfLiteEvalTensor* cell_to_input_weights, - const TfLiteEvalTensor* cell_to_forget_weights, - const TfLiteEvalTensor* cell_to_output_weights, - const TfLiteEvalTensor* input_layer_norm_coefficients, - const TfLiteEvalTensor* forget_layer_norm_coefficients, - const TfLiteEvalTensor* cell_layer_norm_coefficients, - const TfLiteEvalTensor* output_layer_norm_coefficients, - const TfLiteEvalTensor* input_gate_bias, - const TfLiteEvalTensor* forget_gate_bias, - const TfLiteEvalTensor* cell_gate_bias, - const TfLiteEvalTensor* output_gate_bias, - const TfLiteEvalTensor* projection_weights, - const TfLiteEvalTensor* projection_bias, const TfLiteLSTMParams* params, - TfLiteEvalTensor* output_state, TfLiteEvalTensor* cell_state, - TfLiteEvalTensor* output, - const lstm_eval::IntegerLstmParameter* integer_lstm_param, - TfLiteEvalTensor* scratch0, TfLiteEvalTensor* scratch1, - TfLiteEvalTensor* scratch2, TfLiteEvalTensor* scratch3, - TfLiteEvalTensor* scratch4, TfLiteEvalTensor* scratch5, - TfLiteEvalTensor* scratch6, TfLiteEvalTensor* scratch7) { - TFLITE_DCHECK(input->dims->size >= 2 && input->dims->size <= 3); - const int n_input = input->dims->data[input->dims->size - 1]; - int max_time, n_batch; - if (input->dims->size == 2) { - max_time = 1; - n_batch = input->dims->data[0]; - } else { - max_time = input->dims->data[0]; - n_batch = input->dims->data[1]; +void Tanh(int32_t cell_state_scale_power, int16_t* input_data, + int16_t* output_data, int32_t data_size) { + int32_t tanh_input_left_shift = (15 + cell_state_scale_power) - 3; + int32_t input_multiplier = 0; + if (tanh_input_left_shift < 0) /* handling negative shift value */ + { + tanh_input_left_shift = -tanh_input_left_shift; +#if (defined(USE_HIFI_ACT_TIE) && \ + (defined(AE_TANH16X4X2) || defined(AE_TANH16X4))) + input_multiplier = 1; +#else + input_multiplier = 3; +#endif } + xa_nn_vec_tanh_sym16s_sym16s(output_data, input_data, input_multiplier, + tanh_input_left_shift, data_size); +} - // n_cell and n_output will be the same size when there is no projection. - const int n_cell = input_to_output_weights->dims->data[0]; - const int n_output = recurrent_to_output_weights->dims->data[1]; - //@TODO input zero point and output zeropoint - // const int32_t input_zp = input->params.zero_point; - /// const int32_t output_state_zp = output_state->params.zero_point; - const int32_t input_zp = 0; - const int32_t output_state_zp = 0; - - // Get params for time/batch/sequence. - const int output_batch_leading_dim = - output->dims->data[output->dims->size - 1]; - const int input_step = n_batch * n_input; - const int output_step = n_batch * output_batch_leading_dim; - - for (int t = 0; t < max_time; t++) { - const int t_rel = t; - int8_t* output_ptr = - tflite::micro::GetTensorData(output) + t_rel * output_step; - // Input can be int8 asymmetric or int16 symmetric. - const int8_t* input_ptr = - tflite::micro::GetTensorData(input) + t_rel * input_step; - lstm_eval::LstmStepInteger8x8_8( - input_ptr, input_zp, - - tflite::micro::GetTensorData(input_to_input_weights), - integer_lstm_param->effective_input_to_input_scale_a, - integer_lstm_param->effective_input_to_input_scale_b, - - tflite::micro::GetTensorData(input_to_forget_weights), - integer_lstm_param->effective_input_to_forget_scale_a, - integer_lstm_param->effective_input_to_forget_scale_b, - - tflite::micro::GetTensorData(input_to_cell_weights), - integer_lstm_param->effective_input_to_cell_scale_a, - integer_lstm_param->effective_input_to_cell_scale_b, - - tflite::micro::GetTensorData(input_to_output_weights), - integer_lstm_param->effective_input_to_output_scale_a, - integer_lstm_param->effective_input_to_output_scale_b, - - tflite::micro::GetTensorData(recurrent_to_input_weights), - integer_lstm_param->effective_recurrent_to_input_scale_a, - integer_lstm_param->effective_recurrent_to_input_scale_b, - - tflite::micro::GetTensorData(recurrent_to_forget_weights), - integer_lstm_param->effective_recurrent_to_forget_scale_a, - integer_lstm_param->effective_recurrent_to_forget_scale_b, +void Tanh(int32_t cell_state_scale_power, float* input_data, float* output_data, + int32_t data_size) { + int data_dims[2] = {1, data_size}; + RuntimeShape data_shape(2, reinterpret_cast(data_dims)); + reference_ops::Tanh(data_shape, input_data, data_shape, output_data); +} - tflite::micro::GetTensorData(recurrent_to_cell_weights), - integer_lstm_param->effective_recurrent_to_cell_scale_a, - integer_lstm_param->effective_recurrent_to_cell_scale_b, +// Input and output have the same shape in LSTM +void Mul(const ArithmeticParams& params, const int16_t* input1_data, + const int16_t* input2_data, int8_t* output_data, int32_t data_size) { + xa_nn_elm_mul_sym16sxsym16s_asym8s( + output_data, params.output_offset, params.output_shift, + params.output_multiplier, params.quantized_activation_min, + params.quantized_activation_max, input1_data, input2_data, data_size); +} - tflite::micro::GetTensorData(recurrent_to_output_weights), - integer_lstm_param->effective_recurrent_to_output_scale_a, - integer_lstm_param->effective_recurrent_to_output_scale_b, +// Input and output have the same shape in LSTM +void Mul(const ArithmeticParams& params, const int16_t* input1_data, + const int16_t* input2_data, int16_t* output_data, int32_t data_size) { + int dims_4D[4] = {1, 1, 1, data_size}; + xa_nn_elm_mul_broadcast_4D_sym16sxsym16s_sym16s( + output_data, dims_4D, params.output_shift, params.output_multiplier, + params.quantized_activation_min, params.quantized_activation_max, + input1_data, dims_4D, input2_data, dims_4D); + return; +} - tflite::micro::GetTensorData(cell_to_input_weights), - integer_lstm_param->effective_cell_to_input_scale_a, - integer_lstm_param->effective_cell_to_input_scale_b, +// Input and output have the same shape in LSTM +void Mul(const ArithmeticParams& params, const float* input1_data, + const float* input2_data, float* output_data, int32_t data_size) { + int dims_2D[2] = {1, data_size}; + RuntimeShape data_shape(2, reinterpret_cast(dims_2D)); + return reference_ops::Mul(params, data_shape, input1_data, data_shape, + input2_data, data_shape, output_data); +} - tflite::micro::GetTensorData(cell_to_forget_weights), - integer_lstm_param->effective_cell_to_forget_scale_a, - integer_lstm_param->effective_cell_to_forget_scale_b, +void FullyConnected(const FullyConnectedParams& params, + const int8_t* input_data, const int8_t* filter_data, + const int32_t* bias_data, int16_t* output_data, + const int num_batches, const int output_depth, + const int accum_depth) { +#pragma loop_count min = 1 + for (int b = 0; b < num_batches; b++) { + xa_nn_matXvec_out_stride_sym8sxasym8s_16( + output_data + b * output_depth, filter_data, + input_data + b * accum_depth, bias_data, output_depth, accum_depth, + accum_depth, 1, params.input_offset, params.output_multiplier, + params.output_shift); + } + return; +} - tflite::micro::GetTensorData(cell_to_output_weights), - integer_lstm_param->effective_cell_to_output_scale_a, - integer_lstm_param->effective_cell_to_output_scale_b, +void FullyConnected(const FullyConnectedParams& params, + const int16_t* input_data, const int8_t* filter_data, + const int64_t* bias_data, int16_t* output_data, + const int num_batches, const int output_depth, + const int accum_depth) { + xa_nn_matmul_sym8sxsym16s_sym16s( + output_data, filter_data, input_data, bias_data, output_depth, + accum_depth, accum_depth, num_batches, accum_depth, output_depth, 1, + params.input_offset, params.output_multiplier, params.output_shift, + params.output_offset); + return; +} - tflite::micro::GetTensorData(projection_weights), - integer_lstm_param->effective_proj_scale_a, - integer_lstm_param->effective_proj_scale_b, +void FullyConnected(const FullyConnectedParams& params, const float* input_data, + const float* filter_data, const float* bias_data, + float* output_data, const int num_batches, + const int output_depth, const int accum_depth) { + int input_dims[2] = {num_batches, output_depth}; + RuntimeShape input_shape(2, reinterpret_cast(input_dims)); + RuntimeShape bias_shape(1, bias_data == NULL ? 0 : output_depth); + int filter_dims[2] = {output_depth, accum_depth}; + RuntimeShape filter_shape(2, reinterpret_cast(filter_dims)); + int output_dims[2] = {num_batches, output_depth}; + RuntimeShape output_shape(2, reinterpret_cast(output_dims)); + return tflite::reference_ops::FullyConnected( + params, input_shape, input_data, filter_shape, filter_data, bias_shape, + bias_data, output_shape, output_data); +} +#endif // #if !(defined(HIFI3) || defined(HIFI4) || defined(HIFI5)) + +void Clipping(const int v_size, const CellStateInfo& cell_state_info, + int16_t* vector) { + for (int i = 0; i < v_size; i++) { + vector[i] = + std::max(std::min(cell_state_info.quantized_cell_clip, vector[i]), + static_cast(-cell_state_info.quantized_cell_clip)); + } +} - tflite::micro::GetTensorData(input_layer_norm_coefficients), - integer_lstm_param->layer_norm_input_scale_a, - integer_lstm_param->layer_norm_input_scale_b, +void Clipping(const int v_size, const CellStateInfo& cell_state_info, + float* vector) { + for (int i = 0; i < v_size; i++) { + vector[i] = std::max(std::min(cell_state_info.cell_clip, vector[i]), + -cell_state_info.cell_clip); + } +} - tflite::micro::GetTensorData(forget_layer_norm_coefficients), - integer_lstm_param->layer_norm_forget_scale_a, - integer_lstm_param->layer_norm_forget_scale_b, +#if defined(HIFI3) || defined(HIFI4) || defined(HIFI5) +void UpdateLstmCell(const LstmStepManager& step_info, + TfLiteEvalTensor* cell_state, + // Gate outputs + int16_t* forget_gate_output, + const int16_t* input_gate_output, + const int16_t* cell_gate_output, + // Mul parameters + const ArithmeticParams& forget_cell_mul_params, + const ArithmeticParams& input_mul_params, + const CellStateInfo& cell_state_info, int16_t* buffer) { + auto cell_state_shape = step_info.StateShape(); + // Check offset validity to avoid memory overflow + TFLITE_DCHECK_LE(step_info.CellStateOffset() + cell_state_shape.FlatSize(), + tflite::micro::GetTensorShape(cell_state).FlatSize()); + + // Multiplier is equivalent to 0.5 here so adding 1 to shifts + xa_nn_lstm_cell_state_update_16( + tflite::micro::GetTensorData(cell_state) + + step_info.CellStateOffset(), + forget_gate_output, cell_gate_output, input_gate_output, + forget_cell_mul_params.output_shift - 1, + input_mul_params.output_shift - 1, cell_state_info.quantized_cell_clip, + cell_state_shape.FlatSize()); +} - tflite::micro::GetTensorData(cell_layer_norm_coefficients), - integer_lstm_param->layer_norm_cell_scale_a, - integer_lstm_param->layer_norm_cell_scale_b, +void UpdateLstmCell(const LstmStepManager& step_info, + TfLiteEvalTensor* cell_state, + // Gate outputs + float* forget_gate_output, const float* input_gate_output, + const float* cell_gate_output, + // Mul parameters + const ArithmeticParams& forget_cell_mul_params, + const ArithmeticParams& input_mul_params, + const CellStateInfo& cell_state_info, float* buffer) { + // Check offset validity to avoid memory overflow + TFLITE_DCHECK_LE( + step_info.CellStateOffset() + step_info.StateShape().FlatSize(), + tflite::micro::GetTensorShape(cell_state).FlatSize()); + + auto cell_state_shape = step_info.StateShape(); + // Forget Gate x Cell State + Mul(forget_cell_mul_params, forget_gate_output, + tflite::micro::GetTensorData(cell_state) + + step_info.CellStateOffset(), + tflite::micro::GetTensorData(cell_state) + + step_info.CellStateOffset(), + cell_state_shape.FlatSize()); + // Input Gate x Cell Gate + Mul(input_mul_params, input_gate_output, cell_gate_output, buffer, + cell_state_shape.FlatSize()); + + // Update the cell state + AddElementWise(tflite::micro::GetTensorData(cell_state) + + step_info.CellStateOffset(), + buffer, + /*n_batch=*/cell_state_shape.DimsData()[0], + /*n_state=*/cell_state_shape.DimsData()[1], + tflite::micro::GetTensorData(cell_state) + + step_info.CellStateOffset()); + + if (cell_state_info.cell_clip > 0) { + Clipping(cell_state_shape.FlatSize(), cell_state_info, + tflite::micro::GetTensorData(cell_state) + + step_info.CellStateOffset()); + } +} +#endif // #if defined(HIFI3) || defined(HIFI4) || defined(HIFI5) + +// Increment the data offset so the sigle time step invocation call can access +// the corresponding input/output tensor data at the time step +void LstmStepManager::UpdateTime() { + current_time_ += 1; + TFLITE_DCHECK_LE(current_time_, size_info_.time_steps); + // default as one batch per inference + int input_step = size_info_.input_dimension; + int output_step = size_info_.state_dimension; + // time major: batch inference + if (size_info_.time_major) { + input_step = input_step * size_info_.batch_size; + output_step = output_step * size_info_.batch_size; + } - tflite::micro::GetTensorData(output_layer_norm_coefficients), - integer_lstm_param->layer_norm_output_scale_a, - integer_lstm_param->layer_norm_output_scale_b, + input_offset_ += input_step; + output_offset_ += output_step; +} - tflite::micro::GetTensorData(input_gate_bias), - tflite::micro::GetTensorData(forget_gate_bias), - tflite::micro::GetTensorData(cell_gate_bias), - tflite::micro::GetTensorData(output_gate_bias), - tflite::micro::GetTensorData(projection_bias), +// Increment the data offset so the sigle time step invocation call can access +// the corresponding hidden/cell state tensor data at the time step (for single +// batch inference only) +void LstmStepManager::UpdateBatch() { + current_batch_ += 1; + TFLITE_DCHECK_LE(current_batch_, size_info_.batch_size); + // batch inference for time major: no action needed + if (size_info_.time_major) { + return; + } + // otherwise: singe batch inference, go to the next batch + hidden_state_offset_ += size_info_.state_dimension; + cell_state_offset_ += size_info_.state_dimension; +} - params, integer_lstm_param->intermediate_scale_a, - integer_lstm_param->intermediate_scale_b, - integer_lstm_param->intermediate_zp, - integer_lstm_param->quantized_cell_clip, - integer_lstm_param->quantized_proj_clip, n_batch, n_cell, n_input, - n_output, output_batch_leading_dim, - tflite::micro::GetTensorData(output_state), output_state_zp, - tflite::micro::GetTensorData(cell_state), output_ptr, - tflite::micro::GetTensorData(scratch0), - tflite::micro::GetTensorData(scratch1), - tflite::micro::GetTensorData(scratch2), - tflite::micro::GetTensorData(scratch3), - tflite::micro::GetTensorData(scratch4), - tflite::micro::GetTensorData(scratch5), - tflite::micro::GetTensorData(scratch6), - tflite::micro::GetTensorData(scratch7)); +// Input shape for each single time LSTM invocation. +// Multi-batch for time_major input +RuntimeShape LstmStepManager::InputShape() const { + int batch_size = 1; + if (size_info_.time_major) { + batch_size = size_info_.batch_size; } + const int dims[2] = {batch_size, size_info_.input_dimension}; + const int32_t* dims_data = reinterpret_cast(dims); + return RuntimeShape(2, dims_data); +} - return kTfLiteOk; +// State shape (both hidden and cell) for each single time LSTM invocation. +// Multi-batch for time_major input +RuntimeShape LstmStepManager::StateShape() const { + int batch_size = 1; + if (size_info_.time_major) { + batch_size = size_info_.batch_size; + } + const int dims[2] = {batch_size, size_info_.state_dimension}; + const int32_t* dims_data = reinterpret_cast(dims); + return RuntimeShape(2, dims_data); } -} // namespace lstm_eval -} // namespace micro -} // namespace ops +} // namespace lstm_internal } // namespace tflite diff --git a/tensorflow/lite/micro/kernels/xtensa/lstm_eval.h b/tensorflow/lite/micro/kernels/xtensa/lstm_eval.h index 5dd746aa47c..0ba5e22a083 100644 --- a/tensorflow/lite/micro/kernels/xtensa/lstm_eval.h +++ b/tensorflow/lite/micro/kernels/xtensa/lstm_eval.h @@ -1,4 +1,4 @@ -/* Copyright 2021 The TensorFlow Authors. All Rights Reserved. +/* Copyright 2023 The TensorFlow Authors. All Rights Reserved. Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. @@ -12,205 +12,813 @@ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. ==============================================================================*/ -#ifndef TENSORFLOW_LITE_KERNELS_LSTM_EVAL_H_ -#define TENSORFLOW_LITE_KERNELS_LSTM_EVAL_H_ +// Functions to perform integer evaulation for standard LSTM (e.g., defined in +// the keras lstm layer, no peephole etc.). Currently used by the 16 bits +// activation case only + +#ifndef TENSORFLOW_LITE_MICRO_KERNELS_LSTM_EVAL_GENERAL_H_ +#define TENSORFLOW_LITE_MICRO_KERNELS_LSTM_EVAL_GENERAL_H_ +#include #include -#include #include "tensorflow/lite/c/builtin_op_data.h" #include "tensorflow/lite/c/common.h" -#include "tensorflow/lite/kernels/internal/portable_tensor_utils.h" -#include "tensorflow/lite/kernels/internal/reference/portable_tensor_utils_impl.h" #include "tensorflow/lite/micro/kernels/kernel_util.h" +#include "tensorflow/lite/micro/kernels/lstm_shared.h" +#include "tensorflow/lite/micro/kernels/xtensa/xtensa.h" +#include "tensorflow/lite/micro/micro_log.h" namespace tflite { -namespace ops { -namespace micro { -namespace lstm_eval { - -#if defined(HIFI5) -void calc_cell_state_without_cifg(int16_t* cell_state, - const int16_t* forget_gate, - const int16_t* cell_gate, - const int16_t* input_gate, int shift1, - int shift2, int clip, int num_elms); - -void calc_cell_state_with_cifg(int16_t* cell_state, const int16_t* forget_gate, - const int16_t* cell_gate, int shift1, int shift2, - int clip, int num_elms); - -void xa_nn_elm_mul_16x16_asym8s(int8_t* output, const int16_t* input_1, - const int16_t* input_2, int32_t multiplier, - int32_t shift, int32_t zero_point, - int num_elms); -#endif // defined(HIFI5) - -// Pamameters for integer LSTM. -// Consider split this into two Integer Parameters if more fields are added. -struct IntegerLstmParameter { - int32_t effective_input_to_input_scale_a; - int effective_input_to_input_scale_b; - int32_t effective_recurrent_to_input_scale_a; - int effective_recurrent_to_input_scale_b; - int32_t effective_cell_to_input_scale_a; - int effective_cell_to_input_scale_b; - int32_t effective_input_to_forget_scale_a; - int effective_input_to_forget_scale_b; - int32_t effective_recurrent_to_forget_scale_a; - int effective_recurrent_to_forget_scale_b; - int32_t effective_cell_to_forget_scale_a; - int effective_cell_to_forget_scale_b; - int32_t effective_input_to_cell_scale_a; - int effective_input_to_cell_scale_b; - int32_t effective_recurrent_to_cell_scale_a; - int effective_recurrent_to_cell_scale_b; - int32_t effective_input_to_output_scale_a; - int effective_input_to_output_scale_b; - int32_t effective_recurrent_to_output_scale_a; - int effective_recurrent_to_output_scale_b; - int32_t effective_cell_to_output_scale_a; - int effective_cell_to_output_scale_b; - int32_t effective_proj_scale_a; - int effective_proj_scale_b; - int32_t effective_hidden_scale_a; - int effective_hidden_scale_b; - int32_t layer_norm_input_scale_a; - int layer_norm_input_scale_b; - int32_t layer_norm_forget_scale_a; - int layer_norm_forget_scale_b; - int32_t layer_norm_cell_scale_a; - int layer_norm_cell_scale_b; - int32_t layer_norm_output_scale_a; - int layer_norm_output_scale_b; - // Quantized clip value for cell and projection. Zero value means no clipping. - int16_t quantized_cell_clip; - int8_t quantized_proj_clip; - int32_t hidden_zp; - int32_t cell_scale; - - int32_t input_variance_guard; - int32_t forget_variance_guard; - int32_t cell_variance_guard; - int32_t output_variance_guard; - - // Pre-calculate bias + zero_point * weight. - // Unabled to use temporary tensors since those are used in Prepare() and - // scratch buffer is only allocated after Preapre(). - std::unique_ptr input_to_forget_effective_bias; - std::unique_ptr recurrent_to_forget_effective_bias; - std::unique_ptr input_to_cell_effective_bias; - std::unique_ptr recurrent_to_cell_effective_bias; - std::unique_ptr input_to_output_effective_bias; - std::unique_ptr recurrent_to_output_effective_bias; - std::unique_ptr input_to_input_effective_bias; - std::unique_ptr recurrent_to_input_effective_bias; - std::unique_ptr projection_effective_bias; - - // Scale and zero point for intermediate tensors. - // Used only in the 8x8_8 case. - int32_t intermediate_scale_a[8]; - int32_t intermediate_scale_b[8]; - int32_t intermediate_zp[12]; + +// Interface to access all the TempTfLiteTensors of the LSTM kernel during the +// preparation phase. Can only be constructed through the constructor to avoid +// memory leakage. All TempTfLiteTensors will be deallocated through the +// destructor. +class LstmTensors { + public: + LstmTensors(const LstmTensors& other) = delete; + LstmTensors& operator=(const LstmTensors& other) = delete; + + LstmTensors(TfLiteContext* context, TfLiteNode* node); + ~LstmTensors(); + + // Verify the LSTM internal tensor properties (e.g., type checks) + // Input/output/states/fc weights tensors are required for kernel evaulation. + // The state tensors should be variables. Variants of the standard LSTM + // are not supported here, therefore their corresponding tensors should be + // invalid + TfLiteStatus ValidateTensorStatus(TfLiteContext* context) const; + + // Internal tensors. see lstm_shared.h for tensor names + const TfLiteTensor* GetInternalTensor(const int tensor_index) const { + return internal_tensors_[tensor_index]; + } + + const TfLiteTensor* HiddenStateTensor() const { + return internal_tensors_[kLstmOutputStateTensor]; + } + const TfLiteTensor* CellStateTensor() const { + return internal_tensors_[kLstmCellStateTensor]; + } + const TfLiteTensor* OutputTensor() const { return output_tensor_; } + + private: + // see lstm_shared.h for tensor names + MicroContext* micro_context_; + TfLiteTensor* internal_tensors_[24]; + TfLiteTensor* output_tensor_; +}; + +// Deduce the size information (Batch (B), Time Steps (T), Input dimension (I), +// State dimension (S)) that defines the LSTM using the input and hidden state +// tensor +LstmSizeInfo CreateLstmSizeInfo( + const bool time_major, const TfLiteIntArray* input_tensor_shape, + const TfLiteIntArray* hidden_state_tensor_shape); + +TfLiteStatus ValidateWeightTensorSize(TfLiteContext* context, + const TfLiteTensor* tensor, int dim1_size, + int dim2_size); + +TfLiteStatus ValidateBiasTensorSize(TfLiteContext* context, + const TfLiteTensor* tensor, int size); + +// Go through every tensors and make sure their shape match the kernel +// configuration +TfLiteStatus ValidateTensorSize(TfLiteContext* context, + const LstmTensors& tensors, + const LstmSizeInfo& size_info); + +// Wrapper function to create gate parameters for the four internal LSTM gates +TfLiteStatus CreateGateParams( + TfLiteContext* context, + /*Input tensors*/ + const TfLiteTensor* input, const TfLiteTensor* input_weight, + const TfLiteTensor* input_bias, + /*Hidden state tensors*/ + const TfLiteTensor* hidden_state, const TfLiteTensor* hidden_state_weight, + const TfLiteTensor* hidden_state_bias, + /*Scale of the fc output (input to non-linear activation)*/ + const float nonlinear_activation_input_scale, const TfLiteType cell_type, + const tflite::GateParameters& gate_params); + +// Create parameters for element wise multiplication that happens in a) cell +// state update ; b) hidden state update +// Note that all the output of gates are symmetrically quantized so only scales +// are required for input. However, during the hidden state update phase, the +// output is the updated hidden state, which is asymmetrically quantized. Thus +// output may require zero point +tflite::ArithmeticParams CreateInterGateMulParams(const float input1_scale, + const float input2_scale, + const float output_scale, + const TfLiteType output_type, + const int output_zp = 0); + +// Create the additional information about the cell state, which include: +// cell_state_scale_power: used in integer nonlinear function (e.g., tanh) +// quantized_cell_clip: quantized cell clip range +CellStateInfo CreateLstmCellStateInfo(const float cell_state_scale, + const float cell_clip); + +CellStateInfo CreateLstmCellStateInfoFloat(const float cell_clip); +tflite::FullyConnectedParams CreateFCParamsFloat(); + +tflite::GateParameters CreateGateParamsFloat(); + +tflite::ArithmeticParams CreateInterGateMulParamsFloat(); + +TfLiteStatus PrepareGateParametersFloat(TfLiteContext* context, + const LstmTensors& lstm_tensors, + OpDataLSTM* op_data_lstm); + +TfLiteStatus PrepareGateParametersInteger(TfLiteContext* context, + const LstmTensors& lstm_tensors, + OpDataLSTM* op_data_lstm); + +LSTMKernelContents CreateLSTMKernelContent(TfLiteContext* context, + TfLiteNode* node); + +template +LSTMBuffers CreateLSTMBuffers(TfLiteContext* context, + const int* buffer_indices) { + LSTMBuffers buffers; + buffers.buffer0 = reinterpret_cast( + context->GetScratchBuffer(context, buffer_indices[0])); + buffers.buffer1 = reinterpret_cast( + context->GetScratchBuffer(context, buffer_indices[1])); + buffers.buffer2 = reinterpret_cast( + context->GetScratchBuffer(context, buffer_indices[2])); + buffers.buffer3 = reinterpret_cast( + context->GetScratchBuffer(context, buffer_indices[3])); + return buffers; +} + +// Since LSTM includes multiple intermediate stages, introducing the internal +// namespace to expose them for testing +namespace lstm_internal { + +#if !(defined(HIFI3) || defined(HIFI4) || defined(HIFI5)) +void Sigmoid(const RuntimeShape& data_shape, int16_t* data); + +void Sigmoid(const RuntimeShape& data_shape, float* data); + +void Tanh(int32_t cell_state_scale_power, const RuntimeShape& input_data_shape, + int16_t* input_data, const RuntimeShape& output_data_shape, + int16_t* output_data); + +void Tanh(int32_t cell_state_scale_power, const RuntimeShape& input_data_shape, + float* input_data, const RuntimeShape& output_data_shape, + float* output_data); + +void Mul(const RuntimeShape& shape, const ArithmeticParams& params, + const int16_t* input1_data, const int16_t* input2_data, + int8_t* output_data); + +void Mul(const RuntimeShape& shape, const ArithmeticParams& params, + const int16_t* input1_data, const int16_t* input2_data, + int16_t* output_data); + +void Mul(const RuntimeShape& shape, const ArithmeticParams& params, + const float* input1_data, const float* input2_data, + float* output_data); + +void FullyConnected(const FullyConnectedParams& params, + const RuntimeShape& input_shape, const int8_t* input_data, + const RuntimeShape& filter_shape, const int8_t* filter_data, + const RuntimeShape& bias_shape, const int32_t* bias_data, + const RuntimeShape& output_shape, int16_t* output_data); + +void FullyConnected(const FullyConnectedParams& params, + const RuntimeShape& input_shape, const int16_t* input_data, + const RuntimeShape& filter_shape, const int8_t* filter_data, + const RuntimeShape& bias_shape, const int64_t* bias_data, + const RuntimeShape& output_shape, int16_t* output_data); + +void FullyConnected(const FullyConnectedParams& params, + const RuntimeShape& input_shape, const float* input_data, + const RuntimeShape& filter_shape, const float* filter_data, + const RuntimeShape& bias_shape, const float* bias_data, + const RuntimeShape& output_shape, float* output_data); +#else // #if !(defined(HIFI3) || defined(HIFI4) || defined(HIFI5)) +void Sigmoid(int16_t* data, int32_t data_size); + +void Sigmoid(float* data, int32_t data_size); + +void Tanh(int32_t cell_state_scale_power, int16_t* input_data, + int16_t* output_data, int32_t data_size); + +void Tanh(int32_t cell_state_scale_power, float* input_data, float* output_data, + int32_t data_size); + +void Mul(const ArithmeticParams& params, const int16_t* input1_data, + const int16_t* input2_data, int8_t* output_data, int32_t data_size); + +void Mul(const ArithmeticParams& params, const int16_t* input1_data, + const int16_t* input2_data, int16_t* output_data, int32_t data_size); + +void Mul(const ArithmeticParams& params, const float* input1_data, + const float* input2_data, float* output_data, int32_t data_size); + +void FullyConnected(const FullyConnectedParams& params, + const int8_t* input_data, const int8_t* filter_data, + const int32_t* bias_data, int16_t* output_data, + const int num_batches, const int output_depth, + const int accum_depth); + +void FullyConnected(const FullyConnectedParams& params, + const int16_t* input_data, const int8_t* filter_data, + const int64_t* bias_data, int16_t* output_data, + const int num_batches, const int output_depth, + const int accum_depth); + +void FullyConnected(const FullyConnectedParams& params, const float* input_data, + const float* filter_data, const float* bias_data, + float* output_data, const int num_batches, + const int output_depth, const int accum_depth); +#endif // #if !(defined(HIFI3) || defined(HIFI4) || defined(HIFI5)) + +void AddElementWise(const int16_t* input_1, const int16_t* input_2, int n_batch, + int n_input, int16_t* output); + +void AddElementWise(const float* input_1, const float* input_2, int n_batch, + int n_input, float* output); + +void Clipping(const int v_size, const CellStateInfo& cell_state_info, + int16_t* vector); + +void Clipping(const int v_size, const CellStateInfo& cell_state_info, + float* vector); + +// Manages the slice position (offset), slice length (sliced tensor shape), +// and update rules for input/output/hidden state/cell state tensors at each +// time step. +class LstmStepManager { + public: + LstmStepManager() = delete; + // Does not take any ownership, and all pointers must refer to valid objects + // that outlive the one constructed. + explicit LstmStepManager(const LstmSizeInfo* size_info) + : size_info_(*size_info) {} + + void UpdateTime(); + void UpdateBatch(); + + void ResetTime() { current_time_ = 0; } + RuntimeShape InputShape() const; + RuntimeShape StateShape() const; + + int InputOffset() const { return input_offset_; } + int OutputOffset() const { return output_offset_; } + int HiddenStateOffset() const { return hidden_state_offset_; } + int CellStateOffset() const { return cell_state_offset_; } +#if defined(HIFI3) || defined(HIFI4) || defined(HIFI5) + int time_major() const { return size_info_.time_major; } + + int batch_size() const { return size_info_.batch_size; } + + int input_dimension() const { return size_info_.input_dimension; } + + int state_dimension() const { return size_info_.state_dimension; } +#endif + + private: + int current_time_ = 0; + int current_batch_ = 0; + int input_offset_ = 0; + int output_offset_ = 0; + int hidden_state_offset_ = 0; + int cell_state_offset_ = 0; + // Sizeinfo is from LstmOpData, which reside in the memory arena + // (guarante to outlast LSTMStepManager, which reside in stack) + const LstmSizeInfo& size_info_; }; -TfLiteStatus EvalFloat(const TfLiteEvalTensor* input, - const TfLiteEvalTensor* input_to_input_weights, - const TfLiteEvalTensor* input_to_forget_weights, - const TfLiteEvalTensor* input_to_cell_weights, - const TfLiteEvalTensor* input_to_output_weights, - const TfLiteEvalTensor* recurrent_to_input_weights, - const TfLiteEvalTensor* recurrent_to_forget_weights, - const TfLiteEvalTensor* recurrent_to_cell_weights, - const TfLiteEvalTensor* recurrent_to_output_weights, - const TfLiteEvalTensor* cell_to_input_weights, - const TfLiteEvalTensor* cell_to_forget_weights, - const TfLiteEvalTensor* cell_to_output_weights, - const TfLiteEvalTensor* input_layer_norm_coefficients, - const TfLiteEvalTensor* forget_layer_norm_coefficients, - const TfLiteEvalTensor* cell_layer_norm_coefficients, - const TfLiteEvalTensor* output_layer_norm_coefficients, - const TfLiteEvalTensor* aux_input, - const TfLiteEvalTensor* aux_input_to_input_weights, - const TfLiteEvalTensor* aux_input_to_forget_weights, - const TfLiteEvalTensor* aux_input_to_cell_weights, - const TfLiteEvalTensor* aux_input_to_output_weights, - const TfLiteEvalTensor* input_gate_bias, - const TfLiteEvalTensor* forget_gate_bias, - const TfLiteEvalTensor* cell_gate_bias, - const TfLiteEvalTensor* output_gate_bias, - const TfLiteEvalTensor* projection_weights, - const TfLiteEvalTensor* projection_bias, - const TfLiteLSTMParams* params, bool forward_sequence, - bool time_major, int output_offset, - TfLiteEvalTensor* scratch_buffer, - TfLiteEvalTensor* output_state, - TfLiteEvalTensor* cell_state, TfLiteEvalTensor* output); - -TfLiteStatus EvalInteger8x8_16( - TfLiteContext* context, TfLiteNode* node, const TfLiteEvalTensor* input, - const TfLiteEvalTensor* input_to_input_weights, - const TfLiteEvalTensor* input_to_forget_weights, - const TfLiteEvalTensor* input_to_cell_weights, - const TfLiteEvalTensor* input_to_output_weights, - const TfLiteEvalTensor* recurrent_to_input_weights, - const TfLiteEvalTensor* recurrent_to_forget_weights, - const TfLiteEvalTensor* recurrent_to_cell_weights, - const TfLiteEvalTensor* recurrent_to_output_weights, - const TfLiteEvalTensor* cell_to_input_weights, - const TfLiteEvalTensor* cell_to_forget_weights, - const TfLiteEvalTensor* cell_to_output_weights, - const TfLiteEvalTensor* input_layer_norm_coefficients, - const TfLiteEvalTensor* forget_layer_norm_coefficients, - const TfLiteEvalTensor* cell_layer_norm_coefficients, - const TfLiteEvalTensor* output_layer_norm_coefficients, - const TfLiteEvalTensor* input_gate_bias, - const TfLiteEvalTensor* forget_gate_bias, - const TfLiteEvalTensor* cell_gate_bias, - const TfLiteEvalTensor* output_gate_bias, - const TfLiteEvalTensor* projection_weights, - const TfLiteEvalTensor* projection_bias, const TfLiteLSTMParams* params, - bool forward_sequence, bool time_major, - const lstm_eval::IntegerLstmParameter* integer_lstm_param, - TfLiteEvalTensor* output_state, TfLiteEvalTensor* cell_state, - TfLiteEvalTensor* output, TfLiteEvalTensor* scratch0, - TfLiteEvalTensor* scratch1, TfLiteEvalTensor* scratch2, - TfLiteEvalTensor* scratch3, TfLiteEvalTensor* scratch4, - TfLiteEvalTensor* scratch5); - -TfLiteStatus EvalInteger8x8_8( - const TfLiteEvalTensor* input, - const TfLiteEvalTensor* input_to_input_weights, - const TfLiteEvalTensor* input_to_forget_weights, - const TfLiteEvalTensor* input_to_cell_weights, - const TfLiteEvalTensor* input_to_output_weights, - const TfLiteEvalTensor* recurrent_to_input_weights, - const TfLiteEvalTensor* recurrent_to_forget_weights, - const TfLiteEvalTensor* recurrent_to_cell_weights, - const TfLiteEvalTensor* recurrent_to_output_weights, - const TfLiteEvalTensor* cell_to_input_weights, - const TfLiteEvalTensor* cell_to_forget_weights, - const TfLiteEvalTensor* cell_to_output_weights, - const TfLiteEvalTensor* input_layer_norm_coefficients, - const TfLiteEvalTensor* forget_layer_norm_coefficients, - const TfLiteEvalTensor* cell_layer_norm_coefficients, - const TfLiteEvalTensor* output_layer_norm_coefficients, - const TfLiteEvalTensor* input_gate_bias, - const TfLiteEvalTensor* forget_gate_bias, - const TfLiteEvalTensor* cell_gate_bias, - const TfLiteEvalTensor* output_gate_bias, - const TfLiteEvalTensor* projection_weights, - const TfLiteEvalTensor* projection_bias, const TfLiteLSTMParams* params, - TfLiteEvalTensor* output_state, TfLiteEvalTensor* cell_state, - TfLiteEvalTensor* output, - const lstm_eval::IntegerLstmParameter* integer_lstm_param, - TfLiteEvalTensor* scratch0, TfLiteEvalTensor* scratch1, - TfLiteEvalTensor* scratch2, TfLiteEvalTensor* scratch3, - TfLiteEvalTensor* scratch4, TfLiteEvalTensor* scratch5, - TfLiteEvalTensor* scratch6, TfLiteEvalTensor* scratch7); - -} // namespace lstm_eval -} // namespace micro -} // namespace ops +// Calculates a single LSTM gate. +// Implements the following formula: +// gate = activate(FC(input) + FC(recurrent)) +// Activation is sigmoid except for the "cell" gate (configurable, usually tanh) +#if !(defined(HIFI3) || defined(HIFI4) || defined(HIFI5)) +template +void CalculateLstmGate( + const LstmStepManager& step_info, const GateParameters& gate_params, + // Input FC + const TfLiteEvalTensor* input, const TfLiteEvalTensor* input_weight, + const TfLiteEvalTensor* input_bias, + // Recurrent FC + const TfLiteEvalTensor* recurrent, const TfLiteEvalTensor* recurrent_weight, + const TfLiteEvalTensor* recurrent_bias, + // Output + CellType* gate_output, + // Scratch arrays + CellType* fc_output_buffer, const TfLiteFusedActivation activation) { + const auto gate_output_shape = step_info.StateShape(); + // Check offset validity to avoid memory overflow + TFLITE_DCHECK_LE(step_info.InputOffset() + step_info.InputShape().FlatSize(), + tflite::micro::GetTensorShape(input).FlatSize()); + TFLITE_DCHECK_LE( + step_info.HiddenStateOffset() + step_info.StateShape().FlatSize(), + tflite::micro::GetTensorShape(recurrent).FlatSize()); + + // Input FC + FullyConnected(gate_params.input_fc_params, step_info.InputShape(), + tflite::micro::GetTensorData(input) + + step_info.InputOffset(), + micro::GetTensorShape(input_weight), + tflite::micro::GetTensorData(input_weight), + tflite::micro::GetTensorShape(input_bias), + tflite::micro::GetOptionalTensorData(input_bias), + gate_output_shape, gate_output); + + // Recurrent FC + FullyConnected(gate_params.recurrent_fc_params, step_info.StateShape(), + tflite::micro::GetTensorData(recurrent) + + step_info.HiddenStateOffset(), + tflite::micro::GetTensorShape(recurrent_weight), + tflite::micro::GetTensorData(recurrent_weight), + tflite::micro::GetTensorShape(recurrent_bias), + tflite::micro::GetOptionalTensorData(recurrent_bias), + gate_output_shape, fc_output_buffer); + + AddElementWise(gate_output, fc_output_buffer, + /*n_batch=*/gate_output_shape.DimsData()[0], + /*n_state=*/gate_output_shape.DimsData()[1], gate_output); + // Apply activation + switch (activation) { + case kTfLiteActSigmoid: + Sigmoid(gate_output_shape, gate_output); + break; + case kTfLiteActTanh: { + // Set the scale power to -12 to avoid shift + Tanh(/*cell_state_scale_power=*/-12, gate_output_shape, gate_output, + gate_output_shape, gate_output); + } break; + default: + // Only Sigmoid or Tanh is used. + TFLITE_ASSERT_FALSE; + } +} + +// Update the cell state using the output from the forget gate, input gate, and +// cell gate Formula: updated_cell_state = forget_gate_output*cell_state + +// input_gate_output * cell_gate_output, where * denotes element wise +// multiplication +template +void UpdateLstmCell(const LstmStepManager& step_info, + TfLiteEvalTensor* cell_state, + // Gate outputs + CellType* forget_gate_output, + const CellType* input_gate_output, + const CellType* cell_gate_output, + // Mul parameters + const ArithmeticParams& forget_cell_mul_params, + const ArithmeticParams& input_mul_params, + const CellStateInfo& cell_state_info, CellType* buffer) { + // Check offset validity to avoid memory overflow + TFLITE_DCHECK_LE( + step_info.CellStateOffset() + step_info.StateShape().FlatSize(), + tflite::micro::GetTensorShape(cell_state).FlatSize()); + + auto cell_state_shape = step_info.StateShape(); + // Forget Gate x Cell State + Mul(cell_state_shape, forget_cell_mul_params, forget_gate_output, + tflite::micro::GetTensorData(cell_state) + + step_info.CellStateOffset(), + tflite::micro::GetTensorData(cell_state) + + step_info.CellStateOffset()); + // Input Gate x Cell Gate + Mul(cell_state_shape, input_mul_params, input_gate_output, cell_gate_output, + buffer); + + // Update the cell state + AddElementWise(tflite::micro::GetTensorData(cell_state) + + step_info.CellStateOffset(), + buffer, + /*n_batch=*/cell_state_shape.DimsData()[0], + /*n_state=*/cell_state_shape.DimsData()[1], + tflite::micro::GetTensorData(cell_state) + + step_info.CellStateOffset()); + + if (cell_state_info.cell_clip > 0) { + Clipping(cell_state_shape.FlatSize(), cell_state_info, + tflite::micro::GetTensorData(cell_state) + + step_info.CellStateOffset()); + } +} +#else // #if !(defined(HIFI3) || defined(HIFI4) || defined(HIFI5)) +template +void CalculateLstmGate( + const LstmStepManager& step_info, const GateParameters& gate_params, + // Input FC + const TfLiteEvalTensor* input, const TfLiteEvalTensor* input_weight, + const TfLiteEvalTensor* input_bias, + // Recurrent FC + const TfLiteEvalTensor* recurrent, const TfLiteEvalTensor* recurrent_weight, + const TfLiteEvalTensor* recurrent_bias, + // Output + CellType* gate_output, + // Scratch arrays + CellType* fc_output_buffer, const TfLiteFusedActivation activation, + const int num_batches, const int input_dimension, + const int state_dimension) { + // RuntimeShape step_input_shape = step_info.InputShape(); + // RuntimeShape input_shape = tflite::micro::GetTensorShape(input); + // RuntimeShape step_state_shape = step_info.StateShape(); + // RuntimeShape recurrent_shape = tflite::micro::GetTensorShape(recurrent); + + // Moved these to LstmStep function + // Check offset validity to avoid memory overflow + // TFLITE_DCHECK_LE(step_info.InputOffset() + step_input_shape.FlatSize(), + // input_shape.FlatSize()); + // TFLITE_DCHECK_LE( + // step_info.HiddenStateOffset() + step_state_shape.FlatSize(), + // recurrent_shape.FlatSize()); + + // Input FC + FullyConnected(gate_params.input_fc_params, + tflite::micro::GetTensorData(input) + + step_info.InputOffset(), + tflite::micro::GetTensorData(input_weight), + tflite::micro::GetOptionalTensorData(input_bias), + gate_output, num_batches, state_dimension, input_dimension); + + // Recurrent FC + FullyConnected(gate_params.recurrent_fc_params, + tflite::micro::GetTensorData(recurrent) + + step_info.HiddenStateOffset(), + tflite::micro::GetTensorData(recurrent_weight), + tflite::micro::GetOptionalTensorData(recurrent_bias), + fc_output_buffer, num_batches, state_dimension, + state_dimension); + + AddElementWise(gate_output, fc_output_buffer, + /*n_batch=*/num_batches, + /*n_state=*/state_dimension, gate_output); + // Apply activation + switch (activation) { + case kTfLiteActSigmoid: + Sigmoid(gate_output, num_batches * state_dimension); + break; + case kTfLiteActTanh: { + // Set the scale power to -12 to avoid shift + Tanh(/*cell_state_scale_power=*/-12, gate_output, gate_output, + num_batches * state_dimension); + } break; + default: + // Only Sigmoid or Tanh is used. + TFLITE_ASSERT_FALSE; + } +} + +// Update the cell state using the output from the forget gate, input gate, and +// cell gate Formula: updated_cell_state = forget_gate_output*cell_state + +// input_gate_output * cell_gate_output, where * denotes element wise +// multiplication +void UpdateLstmCell(const LstmStepManager& step_info, + TfLiteEvalTensor* cell_state, + // Gate outputs + int16_t* forget_gate_output, + const int16_t* input_gate_output, + const int16_t* cell_gate_output, + // Mul parameters + const ArithmeticParams& forget_cell_mul_params, + const ArithmeticParams& input_mul_params, + const CellStateInfo& cell_state_info, int16_t* buffer); + +void UpdateLstmCell(const LstmStepManager& step_info, + TfLiteEvalTensor* cell_state, + // Gate outputs + float* forget_gate_output, const float* input_gate_output, + const float* cell_gate_output, + // Mul parameters + const ArithmeticParams& forget_cell_mul_params, + const ArithmeticParams& input_mul_params, + const CellStateInfo& cell_state_info, float* buffer); +#endif // #if !(defined(HIFI3) || defined(HIFI4) || defined(HIFI5)) + +// Update the hidden state of the LSTM kernel using the following formula: +// updated_hidden_state = Tanh(updated_cell_state) * output_gate_output, * means +// element wise multiplication +template +void UpdateLstmHidden(const LstmStepManager& step_info, + TfLiteEvalTensor* cell_state, + TfLiteEvalTensor* hidden_state, + const CellType* output_gate_output, + const ArithmeticParams& mul_params, + int32_t cell_state_scale_power, CellType* buffer) { + // Check offset validity to avoid memory overflow + TFLITE_DCHECK_LE( + step_info.CellStateOffset() + step_info.StateShape().FlatSize(), + tflite::micro::GetTensorShape(cell_state).FlatSize()); + TFLITE_DCHECK_LE( + step_info.HiddenStateOffset() + step_info.StateShape().FlatSize(), + tflite::micro::GetTensorShape(hidden_state).FlatSize()); + + auto cell_state_shape = step_info.StateShape(); + CellType* cell_state_data = + tflite::micro::GetTensorData(cell_state) + + step_info.CellStateOffset(); + // Tanh(cell_state) +#if !(defined(HIFI3) || defined(HIFI4) || defined(HIFI5)) + Tanh(cell_state_scale_power, cell_state_shape, cell_state_data, + cell_state_shape, buffer); + // Update the hidden state + Mul(cell_state_shape, mul_params, buffer, output_gate_output, + tflite::micro::GetTensorData(hidden_state) + + step_info.HiddenStateOffset()); +#else + int32_t cell_state_size = cell_state_shape.FlatSize(); + Tanh(cell_state_scale_power, cell_state_data, buffer, cell_state_size); + // Update the hidden state + Mul(mul_params, buffer, output_gate_output, + tflite::micro::GetTensorData(hidden_state) + + step_info.HiddenStateOffset(), + cell_state_size); +#endif +} + +#if !(defined(HIFI3) || defined(HIFI4) || defined(HIFI5)) +template +void LstmStep(const LstmStepManager& step_info, const OpDataLSTM& op_data, + LSTMKernelContents& kernel_content, + const LSTMBuffers& buffers) { + /*Step1: Calculate gate outputs to prepare cell state update*/ + CellType* gate_internal_buffer = buffers.buffer3; + CellType* forget_gate_output = buffers.buffer0; + CalculateLstmGate( + step_info, op_data.forget_gate_parameters, + // Input FC + kernel_content.GetInternalTensor(tflite::kLstmInputTensor), + kernel_content.GetInternalTensor(tflite::kLstmInputToForgetWeightsTensor), + kernel_content.GetInternalTensor(tflite::kLstmForgetGateBiasTensor), + // Recurrent FC + kernel_content.HiddenStateTensor(), + kernel_content.GetInternalTensor( + tflite::kLstmRecurrentToForgetWeightsTensor), + /*recurrent_bias*/ nullptr, + // Output + forget_gate_output, + // Scratch arrays + gate_internal_buffer, kTfLiteActSigmoid); + + // Input Gate calculation; + CellType* input_gate_output = buffers.buffer1; + CalculateLstmGate( + step_info, op_data.input_gate_parameters, + // Input FC + kernel_content.GetInternalTensor(tflite::kLstmInputTensor), + kernel_content.GetInternalTensor(tflite::kLstmInputToInputWeightsTensor), + kernel_content.GetInternalTensor(tflite::kLstmInputGateBiasTensor), + // Recurrent FC + kernel_content.HiddenStateTensor(), + kernel_content.GetInternalTensor( + tflite::kLstmRecurrentToInputWeightsTensor), + /*recurrent_bias*/ nullptr, + // Output + input_gate_output, + // Scratch arrays + gate_internal_buffer, kTfLiteActSigmoid); + + // Cell Gate calculation + CellType* cell_gate_output = buffers.buffer2; + CalculateLstmGate( + step_info, op_data.cell_gate_parameters, + // Input FC + kernel_content.GetInternalTensor(tflite::kLstmInputTensor), + kernel_content.GetInternalTensor(tflite::kLstmInputToCellWeightsTensor), + kernel_content.GetInternalTensor(tflite::kLstmCellGateBiasTensor), + // Recurrent FC + kernel_content.HiddenStateTensor(), + kernel_content.GetInternalTensor( + tflite::kLstmRecurrentToCellWeightsTensor), + /*recurrent_bias*/ nullptr, + // Output + cell_gate_output, + // Scratch arrays + gate_internal_buffer, op_data.cell_gate_nonlinear_type); + + /*Step2: update the cell state */ + const InterGateParameters& inter_gate_params = op_data.inter_gate_parameters; + CellType* updated_input_buffer = buffers.buffer1; // reuse buffer + + UpdateLstmCell(step_info, kernel_content.CellStateTensor(), + forget_gate_output, input_gate_output, + cell_gate_output, + inter_gate_params.forget_cell_mul_params, + inter_gate_params.input_mul_params, + op_data.cell_state_info, updated_input_buffer); + + /*Step3: update the hidden state */ + CellType* output_gate_output = buffers.buffer1; // reuse buffer + CalculateLstmGate( + step_info, op_data.output_gate_parameters, + // Input FC + kernel_content.GetInternalTensor(tflite::kLstmInputTensor), + kernel_content.GetInternalTensor(tflite::kLstmInputToOutputWeightsTensor), + kernel_content.GetInternalTensor(tflite::kLstmOutputGateBiasTensor), + // Recurrent FC + kernel_content.HiddenStateTensor(), + kernel_content.GetInternalTensor( + tflite::kLstmRecurrentToOutputWeightsTensor), + /*recurrent_bias*/ nullptr, + // Output + output_gate_output, + // Scratch arrays + gate_internal_buffer, kTfLiteActSigmoid); + + CellType* tanh_activated_cell_buffer = buffers.buffer0; // reuse buffer + tflite::lstm_internal::UpdateLstmHidden( + step_info, kernel_content.CellStateTensor(), + kernel_content.HiddenStateTensor(), output_gate_output, + inter_gate_params.output_mul_params, + op_data.cell_state_info.cell_state_scale_power, + tanh_activated_cell_buffer); + + /*Step4: copy the update the hidden state to output*/ + // Check offset validity to avoid memory overflow + TFLITE_DCHECK_LE( + step_info.OutputOffset() + step_info.StateShape().FlatSize(), + tflite::micro::GetTensorShape(kernel_content.output_tensor).FlatSize()); + // record the output (from the updated hidden state) + ActivationType* output_ptr = tflite::micro::GetTensorData( + kernel_content.output_tensor); + const auto* hidden_state = kernel_content.HiddenStateTensor(); + std::memcpy(output_ptr + step_info.OutputOffset(), + tflite::micro::GetTensorData(hidden_state) + + step_info.HiddenStateOffset(), + step_info.StateShape().FlatSize() * sizeof(ActivationType)); +} +#else // #if !(defined(HIFI3) || defined(HIFI4) || defined(HIFI5)) +template +void LstmStep(const LstmStepManager& step_info, const OpDataLSTM& op_data, + LSTMKernelContents& kernel_content, + const LSTMBuffers& buffers) { + const TfLiteEvalTensor* input = + kernel_content.GetInternalTensor(tflite::kLstmInputTensor); + TfLiteEvalTensor* recurrent = kernel_content.HiddenStateTensor(); + + int time_major = step_info.time_major(); + int num_batches = time_major == 0 ? 1 : step_info.batch_size(); + int input_dimension = step_info.input_dimension(); + int state_dimension = step_info.state_dimension(); + + // Check offset validity to avoid memory overflow + TFLITE_DCHECK_LE(step_info.InputOffset() + num_batches * input_dimension, + tflite::micro::GetTensorShape(input).FlatSize()); + TFLITE_DCHECK_LE( + step_info.HiddenStateOffset() + num_batches * state_dimension, + tflite::micro::GetTensorShape(recurrent).FlatSize()); + + /*Step1: Calculate gate outputs to prepare cell state update*/ + CellType* gate_internal_buffer = buffers.buffer3; + CellType* forget_gate_output = buffers.buffer0; + CalculateLstmGate( + step_info, op_data.forget_gate_parameters, + // Input FC + input, // kernel_content.GetInternalTensor(tflite::kLstmInputTensor), + kernel_content.GetInternalTensor(tflite::kLstmInputToForgetWeightsTensor), + kernel_content.GetInternalTensor(tflite::kLstmForgetGateBiasTensor), + // Recurrent FC + recurrent, // kernel_content.HiddenStateTensor(), + kernel_content.GetInternalTensor( + tflite::kLstmRecurrentToForgetWeightsTensor), + /*recurrent_bias*/ nullptr, + // Output + forget_gate_output, + // Scratch arrays + gate_internal_buffer, kTfLiteActSigmoid, num_batches, input_dimension, + state_dimension); + + // Input Gate calculation; + CellType* input_gate_output = buffers.buffer1; + CalculateLstmGate( + step_info, op_data.input_gate_parameters, + // Input FC + input, // kernel_content.GetInternalTensor(tflite::kLstmInputTensor), + kernel_content.GetInternalTensor(tflite::kLstmInputToInputWeightsTensor), + kernel_content.GetInternalTensor(tflite::kLstmInputGateBiasTensor), + // Recurrent FC + recurrent, // kernel_content.HiddenStateTensor(), + kernel_content.GetInternalTensor( + tflite::kLstmRecurrentToInputWeightsTensor), + /*recurrent_bias*/ nullptr, + // Output + input_gate_output, + // Scratch arrays + gate_internal_buffer, kTfLiteActSigmoid, num_batches, input_dimension, + state_dimension); + + // Cell Gate calculation + CellType* cell_gate_output = buffers.buffer2; + CalculateLstmGate( + step_info, op_data.cell_gate_parameters, + // Input FC + input, // kernel_content.GetInternalTensor(tflite::kLstmInputTensor), + kernel_content.GetInternalTensor(tflite::kLstmInputToCellWeightsTensor), + kernel_content.GetInternalTensor(tflite::kLstmCellGateBiasTensor), + // Recurrent FC + recurrent, // kernel_content.HiddenStateTensor(), + kernel_content.GetInternalTensor( + tflite::kLstmRecurrentToCellWeightsTensor), + /*recurrent_bias*/ nullptr, + // Output + cell_gate_output, + // Scratch arrays + gate_internal_buffer, op_data.cell_gate_nonlinear_type, num_batches, + input_dimension, state_dimension); + + /*Step2: update the cell state */ + const InterGateParameters& inter_gate_params = op_data.inter_gate_parameters; + CellType* updated_input_buffer = buffers.buffer1; // reuse buffer + + UpdateLstmCell(step_info, kernel_content.CellStateTensor(), + forget_gate_output, input_gate_output, cell_gate_output, + inter_gate_params.forget_cell_mul_params, + inter_gate_params.input_mul_params, op_data.cell_state_info, + updated_input_buffer); + + /*Step3: update the hidden state */ + CellType* output_gate_output = buffers.buffer1; // reuse buffer + CalculateLstmGate( + step_info, op_data.output_gate_parameters, + // Input FC + input, // kernel_content.GetInternalTensor(tflite::kLstmInputTensor), + kernel_content.GetInternalTensor(tflite::kLstmInputToOutputWeightsTensor), + kernel_content.GetInternalTensor(tflite::kLstmOutputGateBiasTensor), + // Recurrent FC + recurrent, // kernel_content.HiddenStateTensor(), + kernel_content.GetInternalTensor( + tflite::kLstmRecurrentToOutputWeightsTensor), + /*recurrent_bias*/ nullptr, + // Output + output_gate_output, + // Scratch arrays + gate_internal_buffer, kTfLiteActSigmoid, num_batches, input_dimension, + state_dimension); + + CellType* tanh_activated_cell_buffer = buffers.buffer0; // reuse buffer + tflite::lstm_internal::UpdateLstmHidden( + step_info, kernel_content.CellStateTensor(), recurrent, + /* kernel_content.HiddenStateTensor(), */ output_gate_output, + inter_gate_params.output_mul_params, + op_data.cell_state_info.cell_state_scale_power, + tanh_activated_cell_buffer); + + /*Step4: copy the update the hidden state to output*/ + // Check offset validity to avoid memory overflow + TFLITE_DCHECK_LE( + step_info.OutputOffset() + step_info.StateShape().FlatSize(), + tflite::micro::GetTensorShape(kernel_content.output_tensor).FlatSize()); + // record the output (from the updated hidden state) + ActivationType* output_ptr = tflite::micro::GetTensorData( + kernel_content.output_tensor); + // const auto* hidden_state = kernel_content.HiddenStateTensor(); + std::memcpy(output_ptr + step_info.OutputOffset(), + tflite::micro::GetTensorData(recurrent) + + step_info.HiddenStateOffset(), + step_info.StateShape().FlatSize() * sizeof(ActivationType)); +} +#endif // #if !(defined(HIFI3) || defined(HIFI4) || defined(HIFI5)) + +} // namespace lstm_internal + +// Evaulate the LSTM kernel with (potential) multi-steps and multi-batch input +// Since +template +TfLiteStatus EvalLstm(const OpDataLSTM& op_data, + LSTMKernelContents& kernel_content, + const LSTMBuffers& buffers) { + lstm_internal::LstmStepManager step_info(&op_data.size_info); + const auto& size_info = op_data.size_info; + // time is the first dimention, enable batch computation + if (size_info.time_major) { + for (int t = 0; t < size_info.time_steps; t++) { + lstm_internal::LstmStep( + step_info, op_data, kernel_content, buffers); + // prepare for the next time step + step_info.UpdateTime(); + } + } else { + // batch first, unable to size the input data. single batch inference + for (int b = 0; b < size_info.batch_size; b++) { + for (int t = 0; t < size_info.time_steps; t++) { + lstm_internal::LstmStep( + step_info, op_data, kernel_content, buffers); + // prepare for the next time step + step_info.UpdateTime(); + } + // prepare for the next batch + step_info.UpdateBatch(); + step_info.ResetTime(); + } + } + return kTfLiteOk; +} } // namespace tflite -#endif // TENSORFLOW_LITE_KERNELS_LSTM_EVAL_H_ + +#endif // TENSORFLOW_LITE_MICRO_KERNELS_LSTM_EVAL_16ACT_H_ diff --git a/tensorflow/lite/micro/kernels/xtensa/lstm_eval_hifi.cc b/tensorflow/lite/micro/kernels/xtensa/lstm_eval_hifi.cc index 2b49f26e3ab..f8b6fd806f0 100644 --- a/tensorflow/lite/micro/kernels/xtensa/lstm_eval_hifi.cc +++ b/tensorflow/lite/micro/kernels/xtensa/lstm_eval_hifi.cc @@ -12,17 +12,65 @@ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. ==============================================================================*/ +#if defined(HIFI3) || defined(HIFI4) || defined(HIFI5) + +#include + #include "tensorflow/lite/c/builtin_op_data.h" #include "tensorflow/lite/c/common.h" #include "tensorflow/lite/micro/kernels/xtensa/lstm_eval.h" #include "tensorflow/lite/micro/kernels/xtensa/xtensa.h" namespace tflite { -namespace ops { -namespace micro { -namespace lstm_eval { #if defined(HIFI5) +#if TFLITE_SINGLE_ROUNDING +#define MPY_BY_QUANT_MULT_X2_OUT32(out, inp, multiplier, left_shift, \ + right_shift) \ + { \ + ae_int64 out64_0, out64_1; \ + ae_int64 INT64_ONE = AE_MOVINT64_FROMINT32X2(AE_MOVDA32X2(0, 1)); \ + ae_int64 round_val = AE_SLAA64S(INT64_ONE, 30 - left_shift); \ + AE_MUL32X2S_HH_LL(out64_0, out64_1, inp, AE_MOVDA32(multiplier)); \ + out64_0 = AE_ADD64S(out64_0, round_val); \ + out64_1 = AE_ADD64S(out64_1, round_val); \ + out = AE_TRUNCA32X2F64S(out64_0, out64_1, 1 + left_shift); \ + } + +#define MPY_BY_QUANT_MULT_X2X2_OUT32(out1, out2, inp1, inp2, multiplier, \ + left_shift, right_shift) \ + { \ + ae_int64 out64_0, out64_1, out64_2, out64_3; \ + ae_int64 INT64_ONE = AE_MOVINT64_FROMINT32X2(AE_MOVDA32X2(0, 1)); \ + ae_int64 round_val = AE_SLAA64S(INT64_ONE, 30 - left_shift); \ + AE_MUL32X2S_HH_LL(out64_0, out64_1, inp1, AE_MOVDA32(multiplier)); \ + AE_MUL32X2S_HH_LL(out64_2, out64_3, inp2, AE_MOVDA32(multiplier)); \ + out64_0 = AE_ADD64S(out64_0, round_val); \ + out64_1 = AE_ADD64S(out64_1, round_val); \ + out64_2 = AE_ADD64S(out64_2, round_val); \ + out64_3 = AE_ADD64S(out64_3, round_val); \ + out1 = AE_TRUNCA32X2F64S(out64_0, out64_1, 1 + left_shift); \ + out2 = AE_TRUNCA32X2F64S(out64_2, out64_3, 1 + left_shift); \ + } +#else /* #if TFLITE_SINGLE_ROUNDING */ +#define MPY_BY_QUANT_MULT_X2_OUT32(out, inp, multiplier, left_shift, \ + right_shift) \ + out = AE_SLAA32(inp, left_shift); \ + out = AE_MULFP32X2RAS(out, AE_MOVDA32(multiplier)); \ + out = AE_SRAA32SYMS(out, right_shift); + +#define MPY_BY_QUANT_MULT_X2X2_OUT32(out1, out2, inp1, inp2, multiplier, \ + left_shift, right_shift) \ + { \ + ae_int32x2 d_ls = AE_MOVDA32(1 << left_shift); \ + AE_MUL2P32X4(out1, out2, inp1, inp2, d_ls, d_ls); \ + AE_MULF2P32X4RAS(out1, out2, out1, out2, AE_MOVDA32(multiplier), \ + AE_MOVDA32(multiplier)); \ + out1 = AE_SRAA32SYMS(out1, right_shift); \ + out2 = AE_SRAA32SYMS(out2, right_shift); \ + } +#endif /* #if TFLITE_SINGLE_ROUNDING */ + void calc_cell_state_without_cifg(int16_t* cell_state, const int16_t* forget_gate, const int16_t* cell_gate, @@ -124,7 +172,7 @@ void calc_cell_state_without_cifg(int16_t* cell_state, AE_MUL16X4(d_mul_0, d_mul_1, d_cg_0, d_ig_0); d_mul_0 = AE_SRAA32SYMS(d_mul_0, shift2); - d_cg_0 = AE_SAT16X4(d_mul_0, d_mul_1); + d_cg_0 = AE_SAT16X4(d_mul_0, d_mul_0); d_cs_w_0 = AE_ADD16S(d_cs_w_0, d_cg_0); AE_MINMAX16(d_cs_w_0, d_min, d_max); @@ -187,11 +235,11 @@ void calc_cell_state_without_cifg(int16_t* cell_state, AE_MUL16X4(d_mul_0, d_mul_1, d_cs_r_0, d_fg_0); d_mul_0 = AE_SRAA32SYMS(d_mul_0, shift1); - d_cs_w_0 = AE_SAT16X4(d_mul_0, d_mul_1); + d_cs_w_0 = AE_SAT16X4(d_mul_0, d_mul_0); AE_MUL16X4(d_mul_0, d_mul_1, d_cg_0, d_ig_0); d_mul_0 = AE_SRAA32SYMS(d_mul_0, shift2); - d_cg_0 = AE_SAT16X4(d_mul_0, d_mul_1); + d_cg_0 = AE_SAT16X4(d_mul_0, d_mul_0); d_cs_w_0 = AE_ADD16S(d_cs_w_0, d_cg_0); AE_MINMAX16(d_cs_w_0, d_min, d_max); @@ -298,7 +346,7 @@ void calc_cell_state_with_cifg(int16_t* cell_state, const int16_t* forget_gate, d_1mfg_0 = AE_SUB16S(d_one, d_fg_0); AE_MUL16X4(d_mul_0, d_mul_1, d_cg_0, d_1mfg_0); d_mul_0 = AE_SRAA32SYMS(d_mul_0, shift2); - d_cg_0 = AE_SAT16X4(d_mul_0, d_mul_1); + d_cg_0 = AE_SAT16X4(d_mul_0, d_mul_0); d_cs_w_0 = AE_ADD16S(d_cs_w_0, d_cg_0); AE_MINMAX16(d_cs_w_0, d_min, d_max); @@ -360,12 +408,12 @@ void calc_cell_state_with_cifg(int16_t* cell_state, const int16_t* forget_gate, AE_MUL16X4(d_mul_0, d_mul_1, d_cs_r_0, d_fg_0); d_mul_0 = AE_SRAA32SYMS(d_mul_0, shift1); - d_cs_w_0 = AE_SAT16X4(d_mul_0, d_mul_1); + d_cs_w_0 = AE_SAT16X4(d_mul_0, d_mul_0); d_1mfg_0 = AE_SUB16S(d_one, d_fg_0); AE_MUL16X4(d_mul_0, d_mul_1, d_cg_0, d_1mfg_0); d_mul_0 = AE_SRAA32SYMS(d_mul_0, shift2); - d_cg_0 = AE_SAT16X4(d_mul_0, d_mul_1); + d_cg_0 = AE_SAT16X4(d_mul_0, d_mul_0); d_cs_w_0 = AE_ADD16S(d_cs_w_0, d_cg_0); AE_MINMAX16(d_cs_w_0, d_min, d_max); @@ -387,7 +435,6 @@ void xa_nn_elm_mul_16x16_asym8s(int8_t* output, const int16_t* input_1, ae_int16x4 data_a_0, data_a_1; ae_int16x4 data_b_0, data_b_1; ae_int32x2 data_ab_0, data_ab_1, data_ab_2, data_ab_3; - ae_int32x2 d_multiplier, d_left_shift; ae_int16x4 d_zp; ae_int16x4 data_c_0, data_c_1; ae_int8x8 data_c; @@ -401,13 +448,15 @@ void xa_nn_elm_mul_16x16_asym8s(int8_t* output, const int16_t* input_1, align_src_input_2 = AE_LA128_PP((ae_int16x8*)tmp_input_2); align_dst_output = AE_ZALIGN64(); // zero alignment reg - d_multiplier = AE_MOVDA32(multiplier); d_zp = AE_MOVDA16(zero_point); +#if TFLITE_SINGLE_ROUNDING + left_shift = shift; + (void)right_shift; +#else /* #if TFLITE_SINGLE_ROUNDING */ left_shift = shift < 0 ? 0 : shift; right_shift = shift > 0 ? 0 : -shift; - - d_left_shift = AE_MOVDA32(1 << left_shift); +#endif /* #if TFLITE_SINGLE_ROUNDING */ #pragma concurrent for (i = 0; i < (num_elms >> 3); i++) { AE_LA16X4X2_IP(data_a_0, data_a_1, align_src_input_1, tmp_input_1); @@ -415,18 +464,10 @@ void xa_nn_elm_mul_16x16_asym8s(int8_t* output, const int16_t* input_1, AE_MUL16X4(data_ab_0, data_ab_1, data_a_0, data_b_0); AE_MUL16X4(data_ab_2, data_ab_3, data_a_1, data_b_1); - AE_MUL2P32X4(data_ab_0, data_ab_1, data_ab_0, data_ab_1, d_left_shift, - d_left_shift); - AE_MUL2P32X4(data_ab_2, data_ab_3, data_ab_2, data_ab_3, d_left_shift, - d_left_shift); - AE_MULF2P32X4RAS(data_ab_0, data_ab_1, data_ab_0, data_ab_1, d_multiplier, - d_multiplier); - AE_MULF2P32X4RAS(data_ab_2, data_ab_3, data_ab_2, data_ab_3, d_multiplier, - d_multiplier); - data_ab_0 = AE_SRAA32SYMS(data_ab_0, right_shift); - data_ab_1 = AE_SRAA32SYMS(data_ab_1, right_shift); - data_ab_2 = AE_SRAA32SYMS(data_ab_2, right_shift); - data_ab_3 = AE_SRAA32SYMS(data_ab_3, right_shift); + MPY_BY_QUANT_MULT_X2X2_OUT32(data_ab_0, data_ab_1, data_ab_0, data_ab_1, + multiplier, left_shift, right_shift); + MPY_BY_QUANT_MULT_X2X2_OUT32(data_ab_2, data_ab_3, data_ab_2, data_ab_3, + multiplier, left_shift, right_shift); data_c_0 = AE_SAT16X4(data_ab_0, data_ab_1); data_c_1 = AE_SAT16X4(data_ab_2, data_ab_3); data_c_0 = AE_SUB16S(data_c_0, d_zp); @@ -445,18 +486,532 @@ void xa_nn_elm_mul_16x16_asym8s(int8_t* output, const int16_t* input_1, AE_L16_IP(data_b_0, (ae_int16*)tmp_input_2, 2); AE_MUL16X4(data_ab_0, data_ab_1, data_a_0, data_b_0); - data_ab_0 = AE_MULP32X2(data_ab_0, d_left_shift); - data_ab_0 = AE_MULFP32X2RAS(data_ab_0, d_multiplier); - data_ab_0 = AE_SRAA32SYMS(data_ab_0, right_shift); - data_c_0 = AE_SAT16X4(data_ab_0, data_ab_1); + MPY_BY_QUANT_MULT_X2_OUT32(data_ab_0, data_ab_0, multiplier, left_shift, + right_shift); + data_c_0 = AE_SAT16X4(data_ab_0, data_ab_0); data_c_0 = AE_SUB16S(data_c_0, d_zp); data_c = AE_SAT8X8X16(data_c_0, data_c_0); AE_S8_0_IP(data_c, (ae_int8*)output, 1); } } +#elif defined(HIFI3) || defined(HIFI4) +#if TFLITE_SINGLE_ROUNDING +#define MPY_BY_QUANT_MULT_X2_OUT32(out, inp, multiplier, l_shift, r_shift) \ + { \ + ae_int64 out64_0, out64_1; \ + out64_0 = AE_MUL32_HH(inp, AE_MOVDA32(multiplier)); \ + out64_1 = AE_MUL32_LL(inp, AE_MOVDA32(multiplier)); \ + out64_0 = AE_SLAA64S(out64_0, 1 + l_shift); \ + out64_1 = AE_SLAA64S(out64_1, 1 + l_shift); \ + out = AE_ROUND32X2F64SASYM(out64_0, out64_1); \ + } + +#define MPY_BY_QUANT_MULT_X2X2_OUT32(out1, out2, inp1, inp2, multiplier, \ + l_shift, r_shift) \ + { \ + ae_int64 out64_0, out64_1, out64_2, out64_3; \ + out64_0 = AE_MUL32_HH(inp1, AE_MOVDA32(multiplier)); \ + out64_1 = AE_MUL32_LL(inp1, AE_MOVDA32(multiplier)); \ + out64_2 = AE_MUL32_HH(inp2, AE_MOVDA32(multiplier)); \ + out64_3 = AE_MUL32_LL(inp2, AE_MOVDA32(multiplier)); \ + out64_0 = AE_SLAA64S(out64_0, 1 + l_shift); \ + out64_1 = AE_SLAA64S(out64_1, 1 + l_shift); \ + out64_2 = AE_SLAA64S(out64_2, 1 + l_shift); \ + out64_3 = AE_SLAA64S(out64_3, 1 + l_shift); \ + out1 = AE_ROUND32X2F64SASYM(out64_0, out64_1); \ + out2 = AE_ROUND32X2F64SASYM(out64_2, out64_3); \ + } +#else /* #if TFLITE_SINGLE_ROUNDING */ +#define MPY_BY_QUANT_MULT_X2_OUT32(out, inp, multiplier, l_shift, r_shift) \ + out = AE_SLAA32(inp, l_shift); \ + out = AE_MULFP32X2RAS(out, AE_MOVDA32(multiplier)); \ + out = AE_ROUND32X2F64SSYM(AE_SRAA64(AE_CVT64F32_H(out), r_shift), \ + AE_SRAA64(AE_CVT64F32_L(out), r_shift)); + +#define MPY_BY_QUANT_MULT_X2X2_OUT32(out1, out2, inp1, inp2, multiplier, \ + l_shift, r_shift) \ + { \ + ae_int32x2 d_ls = AE_MOVDA32(1 << l_shift); \ + out1 = AE_MULP32X2(inp1, d_ls); \ + out2 = AE_MULP32X2(inp2, d_ls); \ + out1 = AE_MULFP32X2RAS(out1, AE_MOVDA32(multiplier)); \ + out2 = AE_MULFP32X2RAS(out2, AE_MOVDA32(multiplier)); \ + out1 = AE_ROUND32X2F64SSYM(AE_SRAA64(AE_CVT64F32_H(out1), r_shift), \ + AE_SRAA64(AE_CVT64F32_L(out1), r_shift)); \ + out2 = AE_ROUND32X2F64SSYM(AE_SRAA64(AE_CVT64F32_H(out2), r_shift), \ + AE_SRAA64(AE_CVT64F32_L(out2), r_shift)); \ + } +#endif /* #if TFLITE_SINGLE_ROUNDING */ + +#ifndef AE_MULFP16X4RS +static inline ae_f16x4 AE_MULFP16X4RS(ae_f16x4 d0, ae_f16x4 d1) { + ae_f16x4 output; + ae_f32x2 d0_32_0, d0_32_1, out32_0, out32_1; + ae_f16x4 one_d = AE_MOVDA16(1); + AE_MUL16X4(d0_32_0, d0_32_1, d0, one_d); + out32_0 = AE_MULFP32X16X2RS_H(d0_32_0, d1); + out32_1 = AE_MULFP32X16X2RS_L(d0_32_1, d1); + output = AE_SEL16_6420(AE_MOVF16X4_FROMF32X2(out32_0), + AE_MOVF16X4_FROMF32X2(out32_1)); + return output; +} +#endif + +#ifndef AE_MINMAX16 +#define AE_MINMAX16(dinout, d_min, d_max) \ + { \ + xtbool4 b0 = AE_LT16(dinout, d_min); \ + AE_MOVT16X4(dinout, d_min, b0); \ + b0 = AE_LT16(d_max, dinout); \ + AE_MOVT16X4(dinout, d_max, b0); \ + } +#endif + +#ifndef AE_SRAA32SYMS +#define AE_SRAA32SYMS(inp, right_shift) \ + AE_ROUND32X2F64SSYM(AE_SRAA64(AE_CVT64F32_H(inp), right_shift), \ + AE_SRAA64(AE_CVT64F32_L(inp), right_shift)) +#endif + +void calc_cell_state_without_cifg(int16_t* cell_state, + const int16_t* forget_gate, + const int16_t* cell_gate, + const int16_t* input_gate, int shift1, + int shift2, int clip, int num_elms) { + const ae_int16x4 *p16x4_cs_r, *p16x4_fg_r; + const ae_int16x4 *p16x4_cg_r, *p16x4_ig_r; + + ae_int16x4* p16x4_cs_w; + + ae_valign align_cs_r, align_fg_r; + ae_valign align_cg_r, align_ig_r; + ae_valign align_cs_w; + + ae_int16x4 d_cs_r_0, d_cs_r_1; + ae_int16x4 d_fg_0, d_fg_1; + ae_int16x4 d_cg_0, d_cg_1; + ae_int16x4 d_ig_0, d_ig_1; + ae_int16x4 d_cs_w_0, d_cs_w_1; + ae_int32x2 d_mul_0, d_mul_1, d_mul_2, d_mul_3; + ae_int32x2 d_mul_4, d_mul_5, d_mul_6, d_mul_7; + + ae_int16x4 d_min, d_max; + + int i = 0; + p16x4_cs_r = (const ae_int16x4*)cell_state; + p16x4_fg_r = (const ae_int16x4*)forget_gate; + p16x4_cg_r = (const ae_int16x4*)cell_gate; + p16x4_ig_r = (const ae_int16x4*)input_gate; + + p16x4_cs_w = (ae_int16x4*)cell_state; + + align_cs_r = AE_LA64_PP(p16x4_cs_r); + align_fg_r = AE_LA64_PP(p16x4_fg_r); + align_cg_r = AE_LA64_PP(p16x4_cg_r); + align_ig_r = AE_LA64_PP(p16x4_ig_r); + + align_cs_w = AE_ZALIGN64(); + + if (clip > 0) { + d_min = AE_MOVDA16(-clip); + d_max = AE_MOVDA16(clip); + } else { + d_min = AE_MOVDA16(-32768); + d_max = AE_MOVDA16(32767); + } + +#pragma concurrent + if (shift1 == 15) { + for (i = 0; i < (num_elms >> 3); i++) { + AE_LA16X4_IP(d_cs_r_0, align_cs_r, p16x4_cs_r); + AE_LA16X4_IP(d_cs_r_1, align_cs_r, p16x4_cs_r); + AE_LA16X4_IP(d_fg_0, align_fg_r, p16x4_fg_r); + AE_LA16X4_IP(d_fg_1, align_fg_r, p16x4_fg_r); + AE_LA16X4_IP(d_cg_0, align_cg_r, p16x4_cg_r); + AE_LA16X4_IP(d_cg_1, align_cg_r, p16x4_cg_r); + AE_LA16X4_IP(d_ig_0, align_ig_r, p16x4_ig_r); + AE_LA16X4_IP(d_ig_1, align_ig_r, p16x4_ig_r); + + d_cs_w_0 = AE_MULFP16X4RS(d_cs_r_0, d_fg_0); + d_cs_w_1 = AE_MULFP16X4RS(d_cs_r_1, d_fg_1); + + AE_MUL16X4(d_mul_4, d_mul_5, d_cg_0, d_ig_0); + AE_MUL16X4(d_mul_6, d_mul_7, d_cg_1, d_ig_1); + d_mul_4 = AE_SRAA32SYMS(d_mul_4, shift2); + d_mul_5 = AE_SRAA32SYMS(d_mul_5, shift2); + d_mul_6 = AE_SRAA32SYMS(d_mul_6, shift2); + d_mul_7 = AE_SRAA32SYMS(d_mul_7, shift2); + + d_cg_0 = AE_SAT16X4(d_mul_4, d_mul_5); + d_cg_1 = AE_SAT16X4(d_mul_6, d_mul_7); + d_cs_w_0 = AE_ADD16S(d_cs_w_0, d_cg_0); + d_cs_w_1 = AE_ADD16S(d_cs_w_1, d_cg_1); + AE_MINMAX16(d_cs_w_0, d_min, d_max); + AE_MINMAX16(d_cs_w_1, d_min, d_max); + + AE_SA16X4_IP(d_cs_w_0, align_cs_w, p16x4_cs_w); + AE_SA16X4_IP(d_cs_w_1, align_cs_w, p16x4_cs_w); + } + AE_SA64POS_FP(align_cs_w, p16x4_cs_w); // finalize the stream + + const ae_int16 *p16_cs_r, *p16_fg_r; + const ae_int16 *p16_cg_r, *p16_ig_r; + + ae_int16* p16_cs_w; + + p16_cs_r = (const ae_int16*)p16x4_cs_r; + p16_fg_r = (const ae_int16*)p16x4_fg_r; + p16_cg_r = (const ae_int16*)p16x4_cg_r; + p16_ig_r = (const ae_int16*)p16x4_ig_r; + + p16_cs_w = (ae_int16*)p16x4_cs_w; + // residue iterations +#pragma concurrent +#pragma loop_count max = 7 + for (i = 0; i < ((num_elms)&7); i++) { + d_cs_r_0 = p16_cs_r[i]; + d_fg_0 = p16_fg_r[i]; + d_cg_0 = p16_cg_r[i]; + d_ig_0 = p16_ig_r[i]; + + d_cs_w_0 = AE_MULFP16X4RS(d_cs_r_0, d_fg_0); + + AE_MUL16X4(d_mul_0, d_mul_1, d_cg_0, d_ig_0); + d_mul_0 = AE_SRAA32SYMS(d_mul_0, shift2); + d_cg_0 = AE_SAT16X4(d_mul_0, d_mul_0); + + d_cs_w_0 = AE_ADD16S(d_cs_w_0, d_cg_0); + AE_MINMAX16(d_cs_w_0, d_min, d_max); + p16_cs_w[i] = d_cs_w_0; + } + } else { + for (i = 0; i < (num_elms >> 3); i++) { + AE_LA16X4_IP(d_cs_r_0, align_cs_r, p16x4_cs_r); + AE_LA16X4_IP(d_cs_r_1, align_cs_r, p16x4_cs_r); + AE_LA16X4_IP(d_fg_0, align_fg_r, p16x4_fg_r); + AE_LA16X4_IP(d_fg_1, align_fg_r, p16x4_fg_r); + AE_LA16X4_IP(d_cg_0, align_cg_r, p16x4_cg_r); + AE_LA16X4_IP(d_cg_1, align_cg_r, p16x4_cg_r); + AE_LA16X4_IP(d_ig_0, align_ig_r, p16x4_ig_r); + AE_LA16X4_IP(d_ig_1, align_ig_r, p16x4_ig_r); + + AE_MUL16X4(d_mul_0, d_mul_1, d_cs_r_0, d_fg_0); + AE_MUL16X4(d_mul_2, d_mul_3, d_cs_r_1, d_fg_1); + d_mul_0 = AE_SRAA32SYMS(d_mul_0, shift1); + d_mul_1 = AE_SRAA32SYMS(d_mul_1, shift1); + d_mul_2 = AE_SRAA32SYMS(d_mul_2, shift1); + d_mul_3 = AE_SRAA32SYMS(d_mul_3, shift1); + + d_cs_w_0 = AE_SAT16X4(d_mul_0, d_mul_1); + d_cs_w_1 = AE_SAT16X4(d_mul_2, d_mul_3); + + AE_MUL16X4(d_mul_4, d_mul_5, d_cg_0, d_ig_0); + AE_MUL16X4(d_mul_6, d_mul_7, d_cg_1, d_ig_1); + d_mul_4 = AE_SRAA32SYMS(d_mul_4, shift2); + d_mul_5 = AE_SRAA32SYMS(d_mul_5, shift2); + d_mul_6 = AE_SRAA32SYMS(d_mul_6, shift2); + d_mul_7 = AE_SRAA32SYMS(d_mul_7, shift2); + + d_cg_0 = AE_SAT16X4(d_mul_4, d_mul_5); + d_cg_1 = AE_SAT16X4(d_mul_6, d_mul_7); + d_cs_w_0 = AE_ADD16S(d_cs_w_0, d_cg_0); + d_cs_w_1 = AE_ADD16S(d_cs_w_1, d_cg_1); + AE_MINMAX16(d_cs_w_0, d_min, d_max); + AE_MINMAX16(d_cs_w_1, d_min, d_max); + + AE_SA16X4_IP(d_cs_w_0, align_cs_w, p16x4_cs_w); + AE_SA16X4_IP(d_cs_w_1, align_cs_w, p16x4_cs_w); + } + AE_SA64POS_FP(align_cs_w, p16x4_cs_w); // finalize the stream + + const ae_int16 *p16_cs_r, *p16_fg_r; + const ae_int16 *p16_cg_r, *p16_ig_r; + + ae_int16* p16_cs_w; + + p16_cs_r = (const ae_int16*)p16x4_cs_r; + p16_fg_r = (const ae_int16*)p16x4_fg_r; + p16_cg_r = (const ae_int16*)p16x4_cg_r; + p16_ig_r = (const ae_int16*)p16x4_ig_r; + + p16_cs_w = (ae_int16*)p16x4_cs_w; + // residue iterations +#pragma concurrent +#pragma loop_count max = 7 + for (i = 0; i < ((num_elms)&7); i++) { + d_cs_r_0 = p16_cs_r[i]; + d_fg_0 = p16_fg_r[i]; + d_cg_0 = p16_cg_r[i]; + d_ig_0 = p16_ig_r[i]; + + AE_MUL16X4(d_mul_0, d_mul_1, d_cs_r_0, d_fg_0); + d_mul_0 = AE_SRAA32SYMS(d_mul_0, shift1); + d_cs_w_0 = AE_SAT16X4(d_mul_0, d_mul_0); + + AE_MUL16X4(d_mul_0, d_mul_1, d_cg_0, d_ig_0); + d_mul_0 = AE_SRAA32SYMS(d_mul_0, shift2); + d_cg_0 = AE_SAT16X4(d_mul_0, d_mul_0); + + d_cs_w_0 = AE_ADD16S(d_cs_w_0, d_cg_0); + AE_MINMAX16(d_cs_w_0, d_min, d_max); + p16_cs_w[i] = d_cs_w_0; + } + } +} + +void calc_cell_state_with_cifg(int16_t* cell_state, const int16_t* forget_gate, + const int16_t* cell_gate, int shift1, int shift2, + int clip, int num_elms) { + const ae_int16x4 *p16x4_cs_r, *p16x4_fg_r; + const ae_int16x4* p16x4_cg_r; + + ae_int16x4* p16x4_cs_w; + + ae_valign align_cs_r, align_fg_r; + ae_valign align_cg_r; + ae_valign align_cs_w; + + ae_int16x4 d_cs_r_0, d_cs_r_1; + ae_int16x4 d_fg_0, d_fg_1; + ae_int16x4 d_cg_0, d_cg_1; + ae_int16x4 d_1mfg_0, d_1mfg_1; + ae_int16x4 d_cs_w_0, d_cs_w_1; + ae_int32x2 d_mul_0, d_mul_1, d_mul_2, d_mul_3; + ae_int32x2 d_mul_4, d_mul_5, d_mul_6, d_mul_7; + + ae_int16x4 d_min, d_max, d_one; + + int i = 0; + p16x4_cs_r = (const ae_int16x4*)cell_state; + p16x4_fg_r = (const ae_int16x4*)forget_gate; + p16x4_cg_r = (const ae_int16x4*)cell_gate; + + p16x4_cs_w = (ae_int16x4*)cell_state; + + align_cs_r = AE_LA64_PP(p16x4_cs_r); + align_fg_r = AE_LA64_PP(p16x4_fg_r); + align_cg_r = AE_LA64_PP(p16x4_cg_r); + + align_cs_w = AE_ZALIGN64(); + + if (clip > 0) { + d_min = AE_MOVDA16(-clip); + d_max = AE_MOVDA16(clip); + } else { + d_min = AE_MOVDA16(-32768); + d_max = AE_MOVDA16(32767); + } + d_one = AE_MOVDA16(32767); + +#pragma concurrent + if (shift1 == 15) { + for (i = 0; i < (num_elms >> 3); i++) { + AE_LA16X4_IP(d_cs_r_0, align_cs_r, p16x4_cs_r); + AE_LA16X4_IP(d_cs_r_1, align_cs_r, p16x4_cs_r); + AE_LA16X4_IP(d_fg_0, align_fg_r, p16x4_fg_r); + AE_LA16X4_IP(d_fg_1, align_fg_r, p16x4_fg_r); + AE_LA16X4_IP(d_cg_0, align_cg_r, p16x4_cg_r); + AE_LA16X4_IP(d_cg_1, align_cg_r, p16x4_cg_r); + + d_cs_w_0 = AE_MULFP16X4RS(d_cs_r_0, d_fg_0); + d_cs_w_1 = AE_MULFP16X4RS(d_cs_r_1, d_fg_1); + + d_1mfg_0 = AE_SUB16S(d_one, d_fg_0); + d_1mfg_1 = AE_SUB16S(d_one, d_fg_1); + AE_MUL16X4(d_mul_4, d_mul_5, d_cg_0, d_1mfg_0); + AE_MUL16X4(d_mul_6, d_mul_7, d_cg_1, d_1mfg_1); + d_mul_4 = AE_SRAA32SYMS(d_mul_4, shift2); + d_mul_5 = AE_SRAA32SYMS(d_mul_5, shift2); + d_mul_6 = AE_SRAA32SYMS(d_mul_6, shift2); + d_mul_7 = AE_SRAA32SYMS(d_mul_7, shift2); + d_cg_0 = AE_SAT16X4(d_mul_4, d_mul_5); + d_cg_1 = AE_SAT16X4(d_mul_6, d_mul_7); + + d_cs_w_0 = AE_ADD16S(d_cs_w_0, d_cg_0); + d_cs_w_1 = AE_ADD16S(d_cs_w_1, d_cg_1); + + AE_MINMAX16(d_cs_w_0, d_min, d_max); + AE_MINMAX16(d_cs_w_1, d_min, d_max); + + AE_SA16X4_IP(d_cs_w_0, align_cs_w, p16x4_cs_w); + AE_SA16X4_IP(d_cs_w_1, align_cs_w, p16x4_cs_w); + } + AE_SA64POS_FP(align_cs_w, p16x4_cs_w); // finalize the stream + + const ae_int16 *p16_cs_r, *p16_fg_r; + const ae_int16* p16_cg_r; + + ae_int16* p16_cs_w; + + p16_cs_r = (const ae_int16*)p16x4_cs_r; + p16_fg_r = (const ae_int16*)p16x4_fg_r; + p16_cg_r = (const ae_int16*)p16x4_cg_r; + + p16_cs_w = (ae_int16*)p16x4_cs_w; + // residue iterations +#pragma concurrent +#pragma loop_count max = 7 + for (i = 0; i < ((num_elms)&7); i++) { + d_cs_r_0 = p16_cs_r[i]; + d_fg_0 = p16_fg_r[i]; + d_cg_0 = p16_cg_r[i]; + + d_cs_w_0 = AE_MULFP16X4RS(d_cs_r_0, d_fg_0); + + d_1mfg_0 = AE_SUB16S(d_one, d_fg_0); + AE_MUL16X4(d_mul_0, d_mul_1, d_cg_0, d_1mfg_0); + d_mul_0 = AE_SRAA32SYMS(d_mul_0, shift2); + d_cg_0 = AE_SAT16X4(d_mul_0, d_mul_0); + + d_cs_w_0 = AE_ADD16S(d_cs_w_0, d_cg_0); + AE_MINMAX16(d_cs_w_0, d_min, d_max); + p16_cs_w[i] = d_cs_w_0; + } + } else { + for (i = 0; i < (num_elms >> 3); i++) { + AE_LA16X4_IP(d_cs_r_0, align_cs_r, p16x4_cs_r); + AE_LA16X4_IP(d_cs_r_1, align_cs_r, p16x4_cs_r); + AE_LA16X4_IP(d_fg_0, align_fg_r, p16x4_fg_r); + AE_LA16X4_IP(d_fg_1, align_fg_r, p16x4_fg_r); + AE_LA16X4_IP(d_cg_0, align_cg_r, p16x4_cg_r); + AE_LA16X4_IP(d_cg_1, align_cg_r, p16x4_cg_r); + + AE_MUL16X4(d_mul_0, d_mul_1, d_cs_r_0, d_fg_0); + AE_MUL16X4(d_mul_2, d_mul_3, d_cs_r_1, d_fg_1); + d_mul_0 = AE_SRAA32SYMS(d_mul_0, shift1); + d_mul_1 = AE_SRAA32SYMS(d_mul_1, shift1); + d_mul_2 = AE_SRAA32SYMS(d_mul_2, shift1); + d_mul_3 = AE_SRAA32SYMS(d_mul_3, shift1); + d_cs_w_0 = AE_SAT16X4(d_mul_0, d_mul_1); + d_cs_w_1 = AE_SAT16X4(d_mul_2, d_mul_3); + + d_1mfg_0 = AE_SUB16S(d_one, d_fg_0); + d_1mfg_1 = AE_SUB16S(d_one, d_fg_1); + AE_MUL16X4(d_mul_4, d_mul_5, d_cg_0, d_1mfg_0); + AE_MUL16X4(d_mul_6, d_mul_7, d_cg_1, d_1mfg_1); + d_mul_4 = AE_SRAA32SYMS(d_mul_4, shift2); + d_mul_5 = AE_SRAA32SYMS(d_mul_5, shift2); + d_mul_6 = AE_SRAA32SYMS(d_mul_6, shift2); + d_mul_7 = AE_SRAA32SYMS(d_mul_7, shift2); + d_cg_0 = AE_SAT16X4(d_mul_4, d_mul_5); + d_cg_1 = AE_SAT16X4(d_mul_6, d_mul_7); + + d_cs_w_0 = AE_ADD16S(d_cs_w_0, d_cg_0); + d_cs_w_1 = AE_ADD16S(d_cs_w_1, d_cg_1); + + AE_MINMAX16(d_cs_w_0, d_min, d_max); + AE_MINMAX16(d_cs_w_1, d_min, d_max); + + AE_SA16X4_IP(d_cs_w_0, align_cs_w, p16x4_cs_w); + AE_SA16X4_IP(d_cs_w_1, align_cs_w, p16x4_cs_w); + } + AE_SA64POS_FP(align_cs_w, p16x4_cs_w); // finalize the stream + + const ae_int16 *p16_cs_r, *p16_fg_r; + const ae_int16* p16_cg_r; + + ae_int16* p16_cs_w; + + p16_cs_r = (const ae_int16*)p16x4_cs_r; + p16_fg_r = (const ae_int16*)p16x4_fg_r; + p16_cg_r = (const ae_int16*)p16x4_cg_r; + + p16_cs_w = (ae_int16*)p16x4_cs_w; + // residue iterations +#pragma concurrent +#pragma loop_count max = 7 + for (i = 0; i < ((num_elms)&7); i++) { + d_cs_r_0 = p16_cs_r[i]; + d_fg_0 = p16_fg_r[i]; + d_cg_0 = p16_cg_r[i]; + + AE_MUL16X4(d_mul_0, d_mul_1, d_cs_r_0, d_fg_0); + d_mul_0 = AE_SRAA32SYMS(d_mul_0, shift1); + d_cs_w_0 = AE_SAT16X4(d_mul_0, d_mul_0); + + d_1mfg_0 = AE_SUB16S(d_one, d_fg_0); + AE_MUL16X4(d_mul_0, d_mul_1, d_cg_0, d_1mfg_0); + d_mul_0 = AE_SRAA32SYMS(d_mul_0, shift2); + d_cg_0 = AE_SAT16X4(d_mul_0, d_mul_0); + + d_cs_w_0 = AE_ADD16S(d_cs_w_0, d_cg_0); + AE_MINMAX16(d_cs_w_0, d_min, d_max); + p16_cs_w[i] = d_cs_w_0; + } + } +} + +void xa_nn_elm_mul_16x16_asym8s(int8_t* output, const int16_t* input_1, + const int16_t* input_2, int32_t multiplier, + int32_t shift, int32_t zero_point, + int num_elms) { + ae_int16x4* tmp_input_1; + ae_int16x4* tmp_input_2; + + ae_valign align_src_input_1, align_src_input_2; + + ae_int16x4 data_a_0, data_b_0; + ae_int32x2 data_ab_0, data_ab_1; + ae_int16x4 d_zp; + ae_int16x4 data_c_0; + ae_int16x4 d_min8 = AE_MOVDA16(-128); + ae_int16x4 d_max8 = AE_MOVDA16(127); + + int i = 0; + int left_shift, right_shift; + tmp_input_1 = (ae_int16x4*)(input_1); + tmp_input_2 = (ae_int16x4*)(input_2); + + align_src_input_1 = AE_LA64_PP((ae_int16x4*)tmp_input_1); + align_src_input_2 = AE_LA64_PP((ae_int16x4*)tmp_input_2); + + d_zp = AE_MOVDA16(zero_point); + +#if TFLITE_SINGLE_ROUNDING + left_shift = shift; + (void)right_shift; +#else /* #if TFLITE_SINGLE_ROUNDING */ + left_shift = shift < 0 ? 0 : shift; + right_shift = shift > 0 ? 0 : -shift; +#endif /* #if TFLITE_SINGLE_ROUNDING */ + +#pragma concurrent + for (i = 0; i < (num_elms >> 2); i++) { + AE_LA16X4_IP(data_a_0, align_src_input_1, tmp_input_1); + AE_LA16X4_IP(data_b_0, align_src_input_2, tmp_input_2); + + AE_MUL16X4(data_ab_0, data_ab_1, data_a_0, data_b_0); + MPY_BY_QUANT_MULT_X2X2_OUT32(data_ab_0, data_ab_1, data_ab_0, data_ab_1, + multiplier, left_shift, right_shift); + data_c_0 = AE_SAT16X4(data_ab_0, data_ab_1); + data_c_0 = AE_SUB16S(data_c_0, d_zp); + AE_MINMAX16(data_c_0, d_min8, d_max8); + + *output++ = AE_MOVAD16_3(data_c_0); + *output++ = AE_MOVAD16_2(data_c_0); + *output++ = AE_MOVAD16_1(data_c_0); + *output++ = AE_MOVAD16_0(data_c_0); + } + + // residue iterations +#pragma concurrent +#pragma loop_count max = 3 + for (int j = 0; j < ((num_elms)&3); j++) { + AE_L16_IP(data_a_0, (ae_int16*)tmp_input_1, 2); + AE_L16_IP(data_b_0, (ae_int16*)tmp_input_2, 2); + + AE_MUL16X4(data_ab_0, data_ab_1, data_a_0, data_b_0); + MPY_BY_QUANT_MULT_X2_OUT32(data_ab_0, data_ab_0, multiplier, left_shift, + right_shift); + data_c_0 = AE_SAT16X4(data_ab_0, data_ab_0); + data_c_0 = AE_SUB16S(data_c_0, d_zp); + AE_MINMAX16(data_c_0, d_min8, d_max8); + + *output++ = AE_MOVAD16_0(data_c_0); + } +} #endif // defined(HIFI5) -} // namespace lstm_eval -} // namespace micro -} // namespace ops } // namespace tflite + +#endif // defined(HIFI3) || defined(HIFI4) || defined(HIFI5) diff --git a/tensorflow/lite/micro/kernels/xtensa/lstm_shared.h b/tensorflow/lite/micro/kernels/xtensa/lstm_shared.h deleted file mode 100644 index 4bcff1aa386..00000000000 --- a/tensorflow/lite/micro/kernels/xtensa/lstm_shared.h +++ /dev/null @@ -1,78 +0,0 @@ -/* Copyright 2021 The TensorFlow Authors. All Rights Reserved. - -Licensed under the Apache License, Version 2.0 (the "License"); -you may not use this file except in compliance with the License. -You may obtain a copy of the License at - - http://www.apache.org/licenses/LICENSE-2.0 - -Unless required by applicable law or agreed to in writing, software -distributed under the License is distributed on an "AS IS" BASIS, -WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -See the License for the specific language governing permissions and -limitations under the License. -==============================================================================*/ -#ifndef TENSORFLOW_LITE_KERNELS_LSTM_SHARED_H_ -#define TENSORFLOW_LITE_KERNELS_LSTM_SHARED_H_ - -namespace tflite { -namespace ops { -namespace micro { -namespace lstm { -// For full inputs kernel (24-inputs). -// Please note the 20-input full kernel is deprecated and only kept -// here for backward compatibility. -namespace full { - -// Input Tensors of size {n_batch, n_input} -constexpr int kInputTensor = 0; - -// Input weight tensors of size: {n_cell, n_input} -constexpr int kInputToInputWeightsTensor = 1; // Optional -constexpr int kInputToForgetWeightsTensor = 2; -constexpr int kInputToCellWeightsTensor = 3; -constexpr int kInputToOutputWeightsTensor = 4; - -// Recurrent weight tensors of size {n_cell, n_output} -constexpr int kRecurrentToInputWeightsTensor = 5; // Optional -constexpr int kRecurrentToForgetWeightsTensor = 6; -constexpr int kRecurrentToCellWeightsTensor = 7; -constexpr int kRecurrentToOutputWeightsTensor = 8; - -// Peephole weights tensors of size {n_cell}, representing a diagonal matrix. -constexpr int kCellToInputWeightsTensor = 9; // Optional -constexpr int kCellToForgetWeightsTensor = 10; // Optional -constexpr int kCellToOutputWeightsTensor = 11; // Optional - -// Gates bias tensors of size {n_cell} -constexpr int kInputGateBiasTensor = 12; // Optional -constexpr int kForgetGateBiasTensor = 13; -constexpr int kCellGateBiasTensor = 14; -constexpr int kOutputGateBiasTensor = 15; - -// Projection weight tensor of size {n_output, n_cell} -constexpr int kProjectionWeightsTensor = 16; // Optional -// Projection bias tensor of size {n_output} -constexpr int kProjectionBiasTensor = 17; // Optional - -// These state tensors are defined as variable tensors, and will be modified by -// this op. -constexpr int kOutputStateTensor = 18; -constexpr int kCellStateTensor = 19; - -// Layer norm coefficient tensors of size {n_cell}, representing a diagonal -// matrix. -constexpr int kInputLayerNormCoefficientsTensor = 20; // Optional -constexpr int kForgetLayerNormCoefficientsTensor = 21; // Optional -constexpr int kCellLayerNormCoefficientsTensor = 22; // Optional -constexpr int kOutputLayerNormCoefficientsTensor = 23; // Optional - -// Output tensors. -constexpr int kOutputTensor = 0; -} // namespace full - -} // namespace lstm -} // namespace micro -} // namespace ops -} // namespace tflite -#endif // TENSORFLOW_LITE_KERNELS_LSTM_SHARED_H_ diff --git a/tensorflow/lite/micro/kernels/xtensa/pad.cc b/tensorflow/lite/micro/kernels/xtensa/pad.cc index bb00edb6712..d822c289db5 100644 --- a/tensorflow/lite/micro/kernels/xtensa/pad.cc +++ b/tensorflow/lite/micro/kernels/xtensa/pad.cc @@ -215,7 +215,7 @@ TfLiteStatus Eval(TfLiteContext* context, TfLiteNode* node) { constant_values == nullptr ? 0 : *tflite::micro::GetTensorData(constant_values); -#if defined(HIFI4) +#if defined(HIFI3) || defined(HIFI4) || defined(HIFI5) /* NNLib currently only supports up to 4D input tensors */ if (tflite::micro::GetTensorShape(input).DimensionsCount() == 4) { const TfLiteEvalTensor* paddings = @@ -233,14 +233,14 @@ TfLiteStatus Eval(TfLiteContext* context, TfLiteNode* node) { pad_value); if (err != 0) return kTfLiteError; } else { -#endif // defined(HIFI4) +#endif // defined(HIFI3) || defined(HIFI4) || defined(HIFI5) reference_ops::Pad(data->params, tflite::micro::GetTensorShape(input), tflite::micro::GetTensorData(input), &pad_value, tflite::micro::GetTensorShape(output), tflite::micro::GetTensorData(output)); -#if defined(HIFI4) +#if defined(HIFI3) || defined(HIFI4) || defined(HIFI5) } -#endif // defined(HIFI4) +#endif // defined(HIFI3) || defined(HIFI4) || defined(HIFI5) } break; case kTfLiteInt32: { int32_t pad_value = diff --git a/tensorflow/lite/micro/kernels/xtensa/quantize.cc b/tensorflow/lite/micro/kernels/xtensa/quantize.cc index e8491080297..06d4fbbff19 100644 --- a/tensorflow/lite/micro/kernels/xtensa/quantize.cc +++ b/tensorflow/lite/micro/kernels/xtensa/quantize.cc @@ -29,7 +29,7 @@ limitations under the License. namespace tflite { namespace { -#if defined(HIFI4) || defined(HIFI5) +#if defined(HIFI3) || defined(HIFI4) || defined(HIFI5) TfLiteStatus EvalXtensa(TfLiteContext* context, TfLiteNode* node) { TFLITE_DCHECK(node->user_data != nullptr); auto* op_data = static_cast(node->user_data); @@ -75,12 +75,19 @@ TfLiteStatus EvalXtensa(TfLiteContext* context, TfLiteNode* node) { case kTfLiteInt8: { int size = ElementCount(*input->dims); - reference_ops::Requantize( - tflite::micro::GetTensorData(input), size, - op_data->requantize_output_multiplier, - op_data->requantize_output_shift, op_data->input_zero_point, - op_data->quantization_params.zero_point, - tflite::micro::GetTensorData(output)); + int32_t zero_point = op_data->quantization_params.zero_point; + const int8_t* input_data_ptr; + int8_t* output_data_ptr; + input_data_ptr = tflite::micro::GetTensorData(input); + output_data_ptr = tflite::micro::GetTensorData(output); + + TF_LITE_ENSURE_EQ( + context, + xa_nn_elm_requantize_asym8s_asym8s( + output_data_ptr, input_data_ptr, op_data->input_zero_point, + zero_point, op_data->requantize_output_shift, + op_data->requantize_output_multiplier, size), + 0); break; } @@ -98,7 +105,6 @@ TfLiteStatus EvalXtensa(TfLiteContext* context, TfLiteNode* node) { case kTfLiteInt32: { int size = ElementCount(*input->dims); int32_t zero_point = op_data->quantization_params.zero_point; -#if defined(HIFI5) const int8_t* input_data_ptr; int32_t* output_data_ptr; input_data_ptr = tflite::micro::GetTensorData(input); @@ -111,13 +117,6 @@ TfLiteStatus EvalXtensa(TfLiteContext* context, TfLiteNode* node) { zero_point, op_data->requantize_output_shift, op_data->requantize_output_multiplier, size), 0); -#else - reference_ops::Requantize( - tflite::micro::GetTensorData(input), size, - op_data->requantize_output_multiplier, - op_data->requantize_output_shift, op_data->input_zero_point, - zero_point, tflite::micro::GetTensorData(output)); -#endif // defined(HIFI5) break; } @@ -149,18 +148,20 @@ TfLiteStatus EvalXtensa(TfLiteContext* context, TfLiteNode* node) { case kTfLiteInt16: { int size = ElementCount(*input->dims); - reference_ops::Requantize( - tflite::micro::GetTensorData(input), size, - op_data->requantize_output_multiplier, - op_data->requantize_output_shift, op_data->input_zero_point, - op_data->quantization_params.zero_point, - tflite::micro::GetTensorData(output)); + TF_LITE_ENSURE_EQ(context, + xa_nn_elm_requantize_asym16s_asym16s( + tflite::micro::GetTensorData(output), + tflite::micro::GetTensorData(input), + op_data->input_zero_point, + op_data->quantization_params.zero_point, + op_data->requantize_output_shift, + op_data->requantize_output_multiplier, size), + 0); break; } case kTfLiteInt32: { int size = ElementCount(*input->dims); -#if defined(HIFI5) TF_LITE_ENSURE_EQ(context, xa_nn_elm_requantize_asym16s_asym32s( tflite::micro::GetTensorData(output), @@ -170,14 +171,6 @@ TfLiteStatus EvalXtensa(TfLiteContext* context, TfLiteNode* node) { op_data->requantize_output_shift, op_data->requantize_output_multiplier, size), 0); -#else - int32_t zero_point = op_data->quantization_params.zero_point; - reference_ops::Requantize( - tflite::micro::GetTensorData(input), size, - op_data->requantize_output_multiplier, - op_data->requantize_output_shift, op_data->input_zero_point, - zero_point, tflite::micro::GetTensorData(output)); -#endif // defined(HIFI5) break; } @@ -228,22 +221,56 @@ TfLiteStatus EvalXtensa(TfLiteContext* context, TfLiteNode* node) { case kTfLiteFloat32: { switch (output->type) { case kTfLiteInt8: { +#if HIFI_VFPU + int size = ElementCount(*input->dims); + int32_t zero_point = op_data->quantization_params.zero_point; + const float* input_data_ptr; + int8_t* output_data_ptr; + input_data_ptr = tflite::micro::GetTensorData(input); + output_data_ptr = tflite::micro::GetTensorData(output); + + TF_LITE_ENSURE_EQ( + context, + xa_nn_elm_quantize_f32_asym8s( + output_data_ptr, input_data_ptr, + static_cast(op_data->quantization_params.scale), + zero_point, size), + 0); +#else // #if HIFI_VFPU reference_ops::AffineQuantize( op_data->quantization_params, tflite::micro::GetTensorShape(input), tflite::micro::GetTensorData(input), tflite::micro::GetTensorShape(output), tflite::micro::GetTensorData(output)); +#endif // #if HIFI_VFPU break; } case kTfLiteInt16: { +#if HIFI_VFPU + int size = ElementCount(*input->dims); + int32_t zero_point = op_data->quantization_params.zero_point; + const float* input_data_ptr; + int16_t* output_data_ptr; + input_data_ptr = tflite::micro::GetTensorData(input); + output_data_ptr = tflite::micro::GetTensorData(output); + + TF_LITE_ENSURE_EQ( + context, + xa_nn_elm_quantize_f32_asym16s( + output_data_ptr, input_data_ptr, + static_cast(op_data->quantization_params.scale), + zero_point, size), + 0); +#else // #if HIFI_VFPU reference_ops::AffineQuantize( op_data->quantization_params, tflite::micro::GetTensorShape(input), tflite::micro::GetTensorData(input), tflite::micro::GetTensorShape(output), tflite::micro::GetTensorData(output)); +#endif // #if HIFI_VFPU break; } @@ -267,7 +294,7 @@ TfLiteStatus EvalXtensa(TfLiteContext* context, TfLiteNode* node) { return kTfLiteOk; } -#endif // defined(HIFI4) || defined(HIFI5) +#endif // defined(HIFI3) || defined(HIFI4) || defined(HIFI5) void* Init(TfLiteContext* context, const char* buffer, size_t length) { TFLITE_DCHECK(context->AllocatePersistentBuffer != nullptr); @@ -301,11 +328,11 @@ TfLiteStatus Prepare(TfLiteContext* context, TfLiteNode* node) { } TfLiteStatus Eval(TfLiteContext* context, TfLiteNode* node) { -#if defined(HIFI4) || defined(HIFI5) +#if defined(HIFI3) || defined(HIFI4) || defined(HIFI5) return EvalXtensa(context, node); #else return EvalQuantizeReference(context, node); -#endif // defined(HIFI4) || defined(HIFI5) +#endif // defined(HIFI3) || defined(HIFI4) || defined(HIFI5) } } // namespace diff --git a/tensorflow/lite/micro/kernels/xtensa/reduce_vision.cc b/tensorflow/lite/micro/kernels/xtensa/reduce_vision.cc index c76525e2c64..e539c29f8ae 100644 --- a/tensorflow/lite/micro/kernels/xtensa/reduce_vision.cc +++ b/tensorflow/lite/micro/kernels/xtensa/reduce_vision.cc @@ -72,6 +72,7 @@ inline bool ResolveAxis(const int num_dims, const int* axis, } return true; } + TfLiteStatus ReducePrepareVision(TfLiteContext* context, TfLiteNode* node) { TFLITE_DCHECK(node->user_data != nullptr); TFLITE_DCHECK(node->builtin_data != nullptr); @@ -83,14 +84,14 @@ TfLiteStatus ReducePrepareVision(TfLiteContext* context, TfLiteNode* node) { TfLiteTensor* output = micro_context->AllocateTempOutputTensor(node, 0); TfLiteTensor* axis = micro_context->AllocateTempInputTensor(node, 1); - uint32_t inputDims[4] = {1, 1, 1, 1}; - uint32_t outputDims[4] = {1, 1, 1, 1}; - uint32_t shouldReduceR[4] = {0, 0, 0, 0}; + uint32_t input_dims[4] = {1, 1, 1, 1}; + uint32_t output_dims[4] = {1, 1, 1, 1}; + uint32_t should_reduce_r[4] = {0, 0, 0, 0}; int32_t resolved_axis[4] = {0, 0, 0, 0}; - OperandDims4D(inputDims, input); - OperandDims4D(outputDims, output); + OperandDims4D(input_dims, input); + OperandDims4D(output_dims, output); - uint32_t inputRank = NumDimensions(input); + const int input_rank = NumDimensions(input); // Interpret an axis tensor with null dimensions as a scalar int num_axis = static_cast(ElementCount(*axis->dims)); // Resolve axis. @@ -99,16 +100,22 @@ TfLiteStatus ReducePrepareVision(TfLiteContext* context, TfLiteNode* node) { &num_resolved_axis)) { return kTfLiteError; } - std::vector shouldReduce(inputRank); - for (int32_t i = 0; i < num_axis; ++i) { - int32_t axisD = resolved_axis[i]; - shouldReduce[axisD] = true; + // ResolveAxis should eliminate dupes and negative axis, so the number of axis + // should be no greater than the input rank. + TFLITE_DCHECK(num_resolved_axis <= input_rank); + + bool should_reduce[4] = {false, false, false, false}; + + for (int32_t i = 0; i < num_resolved_axis; ++i) { + int32_t axis_d = resolved_axis[i]; + should_reduce[axis_d] = true; } // reverse axes and align it to dimension 0 as OperandDims4D - for (uint32_t axisI = 0; axisI < inputRank; ++axisI) { - shouldReduceR[inputRank - 1 - axisI] = (uint32_t)shouldReduce[axisI]; + for (int axis_i = 0; axis_i < input_rank; ++axis_i) { + should_reduce_r[input_rank - 1 - axis_i] = + static_cast(should_reduce[axis_i]); } uint32_t context_size = 0; @@ -123,8 +130,8 @@ TfLiteStatus ReducePrepareVision(TfLiteContext* context, TfLiteNode* node) { data->context_size = context_size; } - status = xiReduceSetContext(data->p_context, data->context_size, inputDims, - outputDims, shouldReduceR); + status = xiReduceSetContext(data->p_context, data->context_size, input_dims, + output_dims, should_reduce_r); if (status) { return kTfLiteError; diff --git a/tensorflow/lite/micro/kernels/xtensa/reshape.cc b/tensorflow/lite/micro/kernels/xtensa/reshape.cc index 21968d654b2..6ce6f5c3836 100644 --- a/tensorflow/lite/micro/kernels/xtensa/reshape.cc +++ b/tensorflow/lite/micro/kernels/xtensa/reshape.cc @@ -29,9 +29,7 @@ limitations under the License. #include "tensorflow/lite/micro/micro_utils.h" namespace tflite { -namespace ops { -namespace micro { -namespace reshape { +namespace { #if defined(VISION_P6) void* Init(TfLiteContext* context, const char* buffer, size_t length) { @@ -91,17 +89,14 @@ TfLiteStatus Eval(TfLiteContext* context, TfLiteNode* node) { return kTfLiteOk; } -} // namespace reshape +} // namespace TFLMRegistration Register_RESHAPE() { #if defined(VISION_P6) - return tflite::micro::RegisterOp(reshape::Init, reshape::Prepare, - reshape::Eval); + return tflite::micro::RegisterOp(Init, Prepare, Eval); #else - return tflite::micro::RegisterOp(nullptr, reshape::Prepare, reshape::Eval); + return tflite::micro::RegisterOp(nullptr, Prepare, Eval); #endif } -} // namespace micro -} // namespace ops } // namespace tflite diff --git a/tensorflow/lite/micro/kernels/xtensa/reshape_vision.cc b/tensorflow/lite/micro/kernels/xtensa/reshape_vision.cc index a43ca17adcf..3e855a4a82f 100644 --- a/tensorflow/lite/micro/kernels/xtensa/reshape_vision.cc +++ b/tensorflow/lite/micro/kernels/xtensa/reshape_vision.cc @@ -1,4 +1,4 @@ -/* Copyright 2022 The TensorFlow Authors. All Rights Reserved. +/* Copyright 2023 The TensorFlow Authors. All Rights Reserved. Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. @@ -24,6 +24,7 @@ limitations under the License. #include "tensorflow/lite/kernels/internal/tensor_ctypes.h" #include "tensorflow/lite/kernels/kernel_util.h" #include "tensorflow/lite/micro/kernels/kernel_util.h" +#include "tensorflow/lite/micro/kernels/reshape.h" #include "tensorflow/lite/micro/kernels/xtensa/xtensa.h" #include "tensorflow/lite/micro/kernels/xtensa/xtensa_reshape.h" diff --git a/tensorflow/lite/micro/kernels/xtensa/softmax.cc b/tensorflow/lite/micro/kernels/xtensa/softmax.cc index 76c380faa70..c248fc5a68b 100644 --- a/tensorflow/lite/micro/kernels/xtensa/softmax.cc +++ b/tensorflow/lite/micro/kernels/xtensa/softmax.cc @@ -31,7 +31,7 @@ limitations under the License. namespace tflite { namespace { -#if defined(HIFI4) || defined(HIFI5) +#if defined(HIFI3) || defined(HIFI4) || defined(HIFI5) TfLiteStatus EvalHifiInt8(const XtensaSoftmaxOpData* op_data, const TfLiteEvalTensor* input, TfLiteEvalTensor* output, TfLiteContext* context) { @@ -56,7 +56,7 @@ TfLiteStatus EvalHifiInt8(const XtensaSoftmaxOpData* op_data, } return kTfLiteOk; } -#endif // defined(HIFI4) || defined(HIFI5) +#endif // defined(HIFI3) || defined(HIFI4) || defined(HIFI5) TfLiteStatus Eval(TfLiteContext* context, TfLiteNode* node) { const TfLiteEvalTensor* input = tflite::micro::GetEvalInput(context, node, 0); @@ -68,7 +68,7 @@ TfLiteStatus Eval(TfLiteContext* context, TfLiteNode* node) { TFLITE_DCHECK(node->user_data != nullptr); -#if defined(HIFI4) || defined(HIFI5) +#if defined(HIFI3) || defined(HIFI4) || defined(HIFI5) XtensaSoftmaxOpData op_data = *static_cast(node->user_data); SoftmaxParams params = op_data.params; @@ -77,7 +77,7 @@ TfLiteStatus Eval(TfLiteContext* context, TfLiteNode* node) { #endif if (input->type == kTfLiteInt8 && output->type == kTfLiteInt8) { -#if defined(HIFI4) || defined(HIFI5) +#if defined(HIFI3) || defined(HIFI4) || defined(HIFI5) return EvalHifiInt8(static_cast(node->user_data), input, output, context); #elif defined(VISION_P6) @@ -91,7 +91,7 @@ TfLiteStatus Eval(TfLiteContext* context, TfLiteNode* node) { tflite::micro::GetTensorShape(output), tflite::micro::GetTensorData(output)); return kTfLiteOk; -#endif // defined(HIFI4) || defined(HIFI5) +#endif // defined(HIFI3) || defined(HIFI4) || defined(HIFI5) } if (input->type == kTfLiteInt16 && output->type == kTfLiteInt16) { diff --git a/tensorflow/lite/micro/kernels/xtensa/softmax_int8_int16.cc b/tensorflow/lite/micro/kernels/xtensa/softmax_int8_int16.cc index b23a9f7dda9..d37a2f54f90 100644 --- a/tensorflow/lite/micro/kernels/xtensa/softmax_int8_int16.cc +++ b/tensorflow/lite/micro/kernels/xtensa/softmax_int8_int16.cc @@ -30,7 +30,7 @@ limitations under the License. namespace tflite { namespace { -#if defined(HIFI4) || defined(HIFI5) +#if defined(HIFI3) || defined(HIFI4) || defined(HIFI5) TfLiteStatus PrepareHifi(TfLiteContext* context, TfLiteNode* node) { TF_LITE_ENSURE_OK(context, SoftmaxPrepare(context, node)); @@ -86,13 +86,13 @@ TfLiteStatus EvalHifi(const XtensaSoftmaxOpData* op_data, } return kTfLiteOk; } -#endif // defined(HIFI4) || defined(HIFI5) +#endif // defined(HIFI3) || defined(HIFI4) || defined(HIFI5) } // namespace void* XtensaInitSoftmax(TfLiteContext* context, const char* buffer, size_t length) { -#if defined(HIFI4) || defined(HIFI5) +#if defined(HIFI3) || defined(HIFI4) || defined(HIFI5) TFLITE_DCHECK(context->AllocatePersistentBuffer != nullptr); return context->AllocatePersistentBuffer(context, sizeof(XtensaSoftmaxOpData)); @@ -105,11 +105,11 @@ void* XtensaInitSoftmax(TfLiteContext* context, const char* buffer, sizeof(XtensaSoftmaxOpData)); #else return SoftmaxInit(context, buffer, length); -#endif // defined(HIFI4) || defined(HIFI5) +#endif // defined(HIFI3) || defined(HIFI4) || defined(HIFI5) } TfLiteStatus XtensaPrepareSoftmax(TfLiteContext* context, TfLiteNode* node) { -#if defined(HIFI4) || defined(HIFI5) +#if defined(HIFI3) || defined(HIFI4) || defined(HIFI5) return PrepareHifi(context, node); #else TF_LITE_ENSURE_OK(context, SoftmaxPrepare(context, node)); @@ -117,7 +117,7 @@ TfLiteStatus XtensaPrepareSoftmax(TfLiteContext* context, TfLiteNode* node) { TF_LITE_ENSURE_OK(context, SoftmaxPrepareVision(context, node)); #endif return kTfLiteOk; -#endif // defined(HIFI4) || defined(HIFI5) +#endif // defined(HIFI3) || defined(HIFI4) || defined(HIFI5) } TfLiteStatus XtensaEvalSoftmaxInt8Int16(TfLiteContext* context, @@ -127,7 +127,7 @@ TfLiteStatus XtensaEvalSoftmaxInt8Int16(TfLiteContext* context, TFLITE_DCHECK(node->user_data != nullptr); if (input->type == kTfLiteInt8 && output->type == kTfLiteInt16) { -#if defined(HIFI4) || defined(HIFI5) +#if defined(HIFI3) || defined(HIFI4) || defined(HIFI5) return EvalHifi(static_cast(node->user_data), input, output, context); #else @@ -138,7 +138,7 @@ TfLiteStatus XtensaEvalSoftmaxInt8Int16(TfLiteContext* context, tflite::micro::GetTensorShape(output), tflite::micro::GetTensorData(output)); return kTfLiteOk; -#endif // defined(HIFI4) || defined(HIFI5) +#endif // defined(HIFI3) || defined(HIFI4) || defined(HIFI5) } else { MicroPrintf("Type %s (%d) not supported.", TfLiteTypeGetName(input->type), input->type); diff --git a/tensorflow/lite/micro/kernels/xtensa/strided_slice.cc b/tensorflow/lite/micro/kernels/xtensa/strided_slice.cc index 0440cfcb765..8ebf7247a23 100644 --- a/tensorflow/lite/micro/kernels/xtensa/strided_slice.cc +++ b/tensorflow/lite/micro/kernels/xtensa/strided_slice.cc @@ -23,129 +23,14 @@ limitations under the License. #include "tensorflow/lite/kernels/kernel_util.h" #include "tensorflow/lite/kernels/op_macros.h" #include "tensorflow/lite/micro/kernels/kernel_util.h" +#include "tensorflow/lite/micro/kernels/strided_slice.h" #include "tensorflow/lite/micro/kernels/xtensa/xtensa.h" #include "tensorflow/lite/micro/micro_log.h" namespace tflite { namespace { -constexpr int kInputTensor = 0; -constexpr int kBeginTensor = 1; -constexpr int kEndTensor = 2; -constexpr int kStridesTensor = 3; -constexpr int kOutputTensor = 0; - -struct StridedSliceContext { - StridedSliceContext(TfLiteContext* context, TfLiteNode* node) { - params = reinterpret_cast(node->builtin_data); - micro_context = GetMicroContext(context); - input = micro_context->AllocateTempInputTensor(node, kInputTensor); - begin = micro_context->AllocateTempInputTensor(node, kBeginTensor); - end = micro_context->AllocateTempInputTensor(node, kEndTensor); - strides = micro_context->AllocateTempInputTensor(node, kStridesTensor); - output = micro_context->AllocateTempOutputTensor(node, kOutputTensor); - dims = NumDimensions(input); - } - ~StridedSliceContext() { - micro_context->DeallocateTempTfLiteTensor(input); - micro_context->DeallocateTempTfLiteTensor(begin); - micro_context->DeallocateTempTfLiteTensor(end); - micro_context->DeallocateTempTfLiteTensor(strides); - micro_context->DeallocateTempTfLiteTensor(output); - } - const TfLiteStridedSliceParams* params; - MicroContext* micro_context; - TfLiteTensor* input; - TfLiteTensor* begin; - TfLiteTensor* end; - TfLiteTensor* strides; - TfLiteTensor* output; - int dims; -}; - -// This Op only supports 1-4D cases and since we use the reference 4D -// implementation, the 1-3D tensors are mapped to 4D. -const int kMaxDim = 4; - -tflite::StridedSliceParams BuildStridedSliceParams( - StridedSliceContext* op_context) { - tflite::StridedSliceParams op_params; - op_params.start_indices_count = op_context->dims; - op_params.stop_indices_count = op_context->dims; - op_params.strides_count = op_context->dims; - - for (int i = 0; i < op_context->dims; ++i) { - op_params.start_indices[i] = GetTensorData(op_context->begin)[i]; - op_params.stop_indices[i] = GetTensorData(op_context->end)[i]; - op_params.strides[i] = GetTensorData(op_context->strides)[i]; - } - - op_params.begin_mask = op_context->params->begin_mask; - op_params.ellipsis_mask = 0; - op_params.end_mask = op_context->params->end_mask; - op_params.new_axis_mask = 0; - op_params.shrink_axis_mask = op_context->params->shrink_axis_mask; - return op_params; -} - -// Processes the indexing tensors (begin, end and strides) to resize the -// output tensor. This function is callable from both Prepare() and Eval() as -// long as the caller ensures the indexing tensors are present. -TfLiteStatus CheckOutputSize(TfLiteContext* context, - StridedSliceContext* op_context) { - using ::tflite::strided_slice::StartForAxis; - using ::tflite::strided_slice::StopForAxis; - TfLiteIntArray* output_shape = op_context->output->dims; - int shape_size = 0; - auto op_params = BuildStridedSliceParams(op_context); - auto input_shape = GetTensorShape(op_context->input); - for (int idx = 0; idx < op_context->dims; ++idx) { - int32_t stride = GetTensorData(op_context->strides)[idx]; - TF_LITE_ENSURE_MSG(context, stride != 0, "stride value has to be non-zero"); - int32_t begin = StartForAxis(op_params, input_shape, idx); - int32_t end = StopForAxis(op_params, input_shape, idx, begin); - - // When shrinking an axis, the end position does not matter (and can be - // incorrect when negative indexing is used, see Issue #19260). Always use - // begin + 1 to generate a length 1 slice, since begin has - // already been adjusted for negative indices by StartForAxis. - const bool shrink_axis = op_context->params->shrink_axis_mask & (1 << idx); - if (shrink_axis) { - end = begin + 1; - } - - // This is valid for both positive and negative strides - int32_t dim_shape = std::ceil((end - begin) / static_cast(stride)); - dim_shape = dim_shape < 0 ? 0 : dim_shape; - if (!shrink_axis) { - TF_LITE_ENSURE_EQ(context, output_shape->data[shape_size], dim_shape); - shape_size++; - } - } - TF_LITE_ENSURE_EQ(context, output_shape->size, shape_size); - return kTfLiteOk; -} - -void* Init(TfLiteContext* context, const char* buffer, size_t length) { - TFLITE_DCHECK(context->AllocatePersistentBuffer != nullptr); - return context->AllocatePersistentBuffer(context, sizeof(StridedSliceParams)); -} - -TfLiteStatus Prepare(TfLiteContext* context, TfLiteNode* node) { - TFLITE_DCHECK(node->user_data != nullptr); - StridedSliceParams* op_params = - static_cast(node->user_data); - TF_LITE_ENSURE_EQ(context, NumInputs(node), 4); - TF_LITE_ENSURE_EQ(context, NumOutputs(node), 1); - StridedSliceContext op_context(context, node); - TF_LITE_ENSURE_MSG(context, op_context.dims <= kMaxDim, - "input dim should not exceed 4"); - auto params = BuildStridedSliceParams(&op_context); - memcpy(op_params, ¶ms, sizeof(StridedSliceParams)); - return CheckOutputSize(context, &op_context); -} - -#if defined(HIFI4) +#if defined(HIFI3) || defined(HIFI4) || defined(HIFI5) void StridedSlice_int16_hifi4opt(const tflite::StridedSliceParams& op_params, const RuntimeShape& unextended_input_shape, const int16_t* input_data, @@ -192,7 +77,7 @@ void StridedSlice_int16_hifi4opt(const tflite::StridedSliceParams& op_params, input_shape.Dims(1), input_shape.Dims(2), input_shape.Dims(3), input_shape.Dims(4)); } -#endif // defined(HIFI4) +#endif // defined(HIFI3) || defined(HIFI4) || defined(HIFI5) TfLiteStatus Eval(TfLiteContext* context, TfLiteNode* node) { TFLITE_DCHECK(node->user_data != nullptr); @@ -200,9 +85,9 @@ TfLiteStatus Eval(TfLiteContext* context, TfLiteNode* node) { *(static_cast(node->user_data)); const TfLiteEvalTensor* input = - tflite::micro::GetEvalInput(context, node, kInputTensor); + tflite::micro::GetEvalInput(context, node, kStridedSliceInputTensor); TfLiteEvalTensor* output = - tflite::micro::GetEvalOutput(context, node, kOutputTensor); + tflite::micro::GetEvalOutput(context, node, kStridedSliceOutputTensor); switch (output->type) { case kTfLiteFloat32: reference_ops::StridedSlice(op_params, @@ -219,7 +104,7 @@ TfLiteStatus Eval(TfLiteContext* context, TfLiteNode* node) { tflite::micro::GetTensorData(output)); break; case kTfLiteInt16: -#if defined(HIFI4) +#if defined(HIFI3) || defined(HIFI4) || defined(HIFI5) StridedSlice_int16_hifi4opt( op_params, tflite::micro::GetTensorShape(input), tflite::micro::GetTensorData(input), @@ -231,7 +116,7 @@ TfLiteStatus Eval(TfLiteContext* context, TfLiteNode* node) { tflite::micro::GetTensorData(input), tflite::micro::GetTensorShape(output), tflite::micro::GetTensorData(output)); -#endif // defined(HIFI4) +#endif // defined(HIFI3) || defined(HIFI4) || defined(HIFI5) break; case kTfLiteInt32: reference_ops::StridedSlice( @@ -257,7 +142,7 @@ TfLiteStatus Eval(TfLiteContext* context, TfLiteNode* node) { } // namespace TFLMRegistration Register_STRIDED_SLICE() { - return tflite::micro::RegisterOp(Init, Prepare, Eval); + return tflite::micro::RegisterOp(StridedSliceInit, StridedSlicePrepare, Eval); } } // namespace tflite diff --git a/tensorflow/lite/micro/kernels/xtensa/sub.cc b/tensorflow/lite/micro/kernels/xtensa/sub.cc index c4f0984c96a..b8308c93eaa 100644 --- a/tensorflow/lite/micro/kernels/xtensa/sub.cc +++ b/tensorflow/lite/micro/kernels/xtensa/sub.cc @@ -83,15 +83,15 @@ TfLiteStatus EvalSubQuantized(TfLiteContext* context, TfLiteNode* node, &op_params); // TODO(b/259724572): vision_p6 and hifi code path is getting very confusing. // Let's separate them into two different files. -#if !(defined(HIFI4)) +#if !(defined(HIFI3) || defined(HIFI4) || defined(HIFI5)) bool need_broadcast = reference_ops::ProcessBroadcastShapes( tflite::micro::GetTensorShape(input1), tflite::micro::GetTensorShape(input2), &op_params); -#endif // !(defined(HIFI4)) +#endif // !(defined(HIFI3) || defined(HIFI4)) switch (output->type) { case kTfLiteInt8: { -#if defined(HIFI4) +#if defined(HIFI3) || defined(HIFI4) || defined(HIFI5) int err; const RuntimeShape extended_input1_shape = RuntimeShape::ExtendedShape(5, tflite::micro::GetTensorShape(input1)); @@ -105,7 +105,6 @@ TfLiteStatus EvalSubQuantized(TfLiteContext* context, TfLiteNode* node, // TODO(b/259724572): Refactor the following block of code. int b; int inp1_off = 0; - int inp2_off = 0; int out_off; out_off = output_dims[1] * output_dims[2] * output_dims[3] * output_dims[4]; @@ -113,10 +112,6 @@ TfLiteStatus EvalSubQuantized(TfLiteContext* context, TfLiteNode* node, inp1_off = input1_dims[1] * input1_dims[2] * input1_dims[3] * input1_dims[4]; } - if (input2_dims[0] > 1) { - inp2_off = - input2_dims[1] * input2_dims[2] * input2_dims[3] * input2_dims[4]; - } for (b = 0; b < output_dims[0]; b++) { err = xa_nn_elm_sub_broadcast_4D_asym8sxasym8s_asym8s( @@ -133,7 +128,7 @@ TfLiteStatus EvalSubQuantized(TfLiteContext* context, TfLiteNode* node, TF_LITE_ENSURE(context, err == 0); } -#else // defined(HIFI4) +#else // defined(HIFI3) || defined(HIFI4) || defined(HIFI5) if (need_broadcast) { tflite::reference_ops::BroadcastQuantSubSlow( op_params, tflite::micro::GetTensorShape(input1), @@ -151,11 +146,11 @@ TfLiteStatus EvalSubQuantized(TfLiteContext* context, TfLiteNode* node, tflite::micro::GetTensorShape(output), tflite::micro::GetTensorData(output)); } -#endif // defined(HIFI4) +#endif // defined(HIFI3) || defined(HIFI4) || defined(HIFI5) break; } case kTfLiteInt16: { -#if defined(HIFI4) +#if defined(HIFI3) || defined(HIFI4) || defined(HIFI5) int err; const RuntimeShape extended_input1_shape = RuntimeShape::ExtendedShape(5, tflite::micro::GetTensorShape(input1)); @@ -168,7 +163,6 @@ TfLiteStatus EvalSubQuantized(TfLiteContext* context, TfLiteNode* node, const int* output_dims = extended_output_shape.DimsData(); int b; int inp1_off = 0; - int inp2_off = 0; int out_off; out_off = output_dims[1] * output_dims[2] * output_dims[3] * output_dims[4]; @@ -176,10 +170,6 @@ TfLiteStatus EvalSubQuantized(TfLiteContext* context, TfLiteNode* node, inp1_off = input1_dims[1] * input1_dims[2] * input1_dims[3] * input1_dims[4]; } - if (input2_dims[0] > 1) { - inp2_off = - input2_dims[1] * input2_dims[2] * input2_dims[3] * input2_dims[4]; - } for (b = 0; b < output_dims[0]; b++) { err = xa_nn_elm_sub_broadcast_4D_asym16sxasym16s_asym16s( @@ -196,7 +186,7 @@ TfLiteStatus EvalSubQuantized(TfLiteContext* context, TfLiteNode* node, TF_LITE_ENSURE(context, err == 0); } -#else // defined(HIFI4) +#else // defined(HIFI3) || defined(HIFI4) || defined(HIFI5) if (need_broadcast) { tflite::reference_ops::BroadcastQuantSubSlow( op_params, tflite::micro::GetTensorShape(input1), @@ -214,7 +204,7 @@ TfLiteStatus EvalSubQuantized(TfLiteContext* context, TfLiteNode* node, tflite::micro::GetTensorShape(output), tflite::micro::GetTensorData(output)); } -#endif // defined(HIFI4) +#endif // defined(HIFI3) || defined(HIFI4) || defined(HIFI5) break; } default: @@ -256,4 +246,4 @@ TFLMRegistration Register_SUB() { return tflite::micro::RegisterOp(SubInit, SubPrepare, SubEval); } -} // namespace tflite \ No newline at end of file +} // namespace tflite diff --git a/tensorflow/lite/micro/kernels/xtensa/svdf.cc b/tensorflow/lite/micro/kernels/xtensa/svdf.cc index c1dac3bf5da..da34e094706 100644 --- a/tensorflow/lite/micro/kernels/xtensa/svdf.cc +++ b/tensorflow/lite/micro/kernels/xtensa/svdf.cc @@ -33,7 +33,7 @@ limitations under the License. namespace tflite { namespace { -#if defined(HIFI4) || defined(HIFI5) +#if defined(HIFI3) || defined(HIFI4) || defined(HIFI5) TfLiteStatus EvalIntegerSvdfHifi(TfLiteContext* context, TfLiteNode* node, const TfLiteEvalTensor* input_tensor, @@ -63,7 +63,7 @@ TfLiteStatus EvalIntegerSvdfHifi(TfLiteContext* context, TfLiteNode* node, #if defined(HIFI5) memcpy(state_ptr, state_ptr + 1, num_bytes); #else - xa_nn_memmove_16(state_ptr, state_ptr + 1, num_bytes); + xa_nn_memmove_16(state_ptr, state_ptr + 1, (num_bytes >> 1)); #endif // defined(HIFI5) // Note: no need to clear the latest activation, matmul is not accumulative. @@ -108,7 +108,7 @@ TfLiteStatus EvalIntegerSvdfHifi(TfLiteContext* context, TfLiteNode* node, } return kTfLiteOk; } -#endif // defined(HIFI4) || defined(HIFI5) +#endif // defined(HIFI3) || defined(HIFI4) || defined(HIFI5) void* Init(TfLiteContext* context, const char* buffer, size_t length) { TFLITE_DCHECK(context != nullptr); @@ -116,7 +116,7 @@ void* Init(TfLiteContext* context, const char* buffer, size_t length) { } TfLiteStatus PrepareInt8(TfLiteContext* context, TfLiteNode* node) { -#if defined(HIFIMINI) || defined(HIFI4) || defined(HIFI5) +#if defined(HIFIMINI) || defined(HIFI3) || defined(HIFI4) || defined(HIFI5) TFLITE_DCHECK(node->builtin_data != nullptr); const auto* params = static_cast(node->builtin_data); @@ -252,11 +252,12 @@ TfLiteStatus PrepareInt8(TfLiteContext* context, TfLiteNode* node) { return kTfLiteOk; #else return PrepareSvdf(context, node); -#endif // defined(HIFIMINI) || defined(HIFI4) || defined(HIFI5) +#endif // defined(HIFIMINI) || defined(HIFI3) || defined(HIFI4) || + // defined(HIFI5) } TfLiteStatus Prepare(TfLiteContext* context, TfLiteNode* node) { -#if defined(HIFIMINI) || defined(HIFI4) || defined(HIFI5) +#if defined(HIFIMINI) || defined(HIFI3) || defined(HIFI4) || defined(HIFI5) MicroContext* micro_context = GetMicroContext(context); TfLiteTensor* input = @@ -277,7 +278,8 @@ TfLiteStatus Prepare(TfLiteContext* context, TfLiteNode* node) { return status; #else return PrepareSvdf(context, node); -#endif // defined(HIFIMINI) || defined(HIFI4) || defined(HIFI5) +#endif // defined(HIFIMINI) || defined(HIFI3) || defined(HIFI4) || + // defined(HIFI5) } TfLiteStatus EvalInt8(TfLiteContext* context, TfLiteNode* node) { @@ -306,7 +308,7 @@ TfLiteStatus EvalInt8(TfLiteContext* context, TfLiteNode* node) { return EvalIntegerSvdfHifimini(context, node, input, weights_feature, weights_time, bias, params, activation_state, output, data); -#elif defined(HIFI4) || defined(HIFI5) +#elif defined(HIFI3) || defined(HIFI4) || defined(HIFI5) return EvalIntegerSvdfHifi(context, node, input, weights_feature, weights_time, bias, params, activation_state, output, data); @@ -314,7 +316,7 @@ TfLiteStatus EvalInt8(TfLiteContext* context, TfLiteNode* node) { EvalInt16SvdfReference(context, node, input, weights_feature, weights_time, bias, params, activation_state, output, data); return kTfLiteOk; -#endif // defined(HIFI4) || defined(HIFI5) +#endif // defined(HIFI3) || defined(HIFI4) || defined(HIFI5) } TfLiteStatus Eval(TfLiteContext* context, TfLiteNode* node) { diff --git a/tensorflow/lite/micro/kernels/xtensa/transpose_conv.cc b/tensorflow/lite/micro/kernels/xtensa/transpose_conv.cc index 826e1685404..44a9f86049c 100644 --- a/tensorflow/lite/micro/kernels/xtensa/transpose_conv.cc +++ b/tensorflow/lite/micro/kernels/xtensa/transpose_conv.cc @@ -183,19 +183,57 @@ TfLiteStatus Prepare(TfLiteContext* context, TfLiteNode* node) { // Quantized kernels use an int32 scratch buffer. if (input->type == kTfLiteInt8) { TFLITE_DCHECK(context->RequestScratchBufferInArena != nullptr); +#if defined(HIFI3) || defined(HIFI4) || defined(HIFI5) + const int stride_width = params->stride_width; + const int stride_height = params->stride_height; + + const int input_height = SizeOfDimension(input, 1); + const int input_width = SizeOfDimension(input, 2); + const int input_depth = SizeOfDimension(input, 3); + const int output_height = height; + const int output_width = width; + int32_t scratch_buffer_size = 0; + scratch_buffer_size = xa_nn_transpose_conv_getsize( + input_height, input_width, input_depth, filter_height, filter_width, + stride_width, stride_height, output_height, output_width, num_channels, + PREC_SYM8S, PREC_ASYM8S); + TFLITE_DCHECK(context->RequestScratchBufferInArena( + context, scratch_buffer_size, + &(data->scratch_buffer_index)) == kTfLiteOk); +#else // #if defined(HIFI3) || defined(HIFI4) || defined(HIFI5) TFLITE_DCHECK(context->RequestScratchBufferInArena( context, GetTensorShape(output).FlatSize() * sizeof(int32_t), &(data->scratch_buffer_index)) == kTfLiteOk); +#endif } // Quantized 16x8 kernels use an int64 scratch buffer. if (input->type == kTfLiteInt16) { TFLITE_DCHECK(context->RequestScratchBufferInArena != nullptr); +#if defined(HIFI3) || defined(HIFI4) || defined(HIFI5) + const int stride_width = params->stride_width; + const int stride_height = params->stride_height; + + const int input_height = SizeOfDimension(input, 1); + const int input_width = SizeOfDimension(input, 2); + const int input_depth = SizeOfDimension(input, 3); + const int output_height = height; + const int output_width = width; + int32_t scratch_buffer_size = 0; + scratch_buffer_size = xa_nn_transpose_conv_getsize( + input_height, input_width, input_depth, filter_height, filter_width, + stride_width, stride_height, output_height, output_width, num_channels, + PREC_SYM8S, PREC_SYM16S); + TFLITE_DCHECK(context->RequestScratchBufferInArena( + context, scratch_buffer_size, + &(data->scratch_buffer_index)) == kTfLiteOk); +#else // #if defined(HIFI3) || defined(HIFI4) || defined(HIFI5) TFLITE_DCHECK(context->RequestScratchBufferInArena( context, GetTensorShape(output).FlatSize() * sizeof(std::int64_t), &(data->scratch_buffer_index)) == kTfLiteOk); +#endif // #if defined(HIFI3) || defined(HIFI4) || defined(HIFI5) } // All per-channel quantized tensors need valid zero point and scale arrays. @@ -282,6 +320,63 @@ TfLiteStatus Eval(TfLiteContext* context, TfLiteNode* node) { case kTfLiteInt8: { int32_t* scratch_buffer = static_cast( context->GetScratchBuffer(context, data.scratch_buffer_index)); +#if defined(HIFI3) || defined(HIFI4) || defined(HIFI5) + if (bias->type == kTfLiteInt32) { + const RuntimeShape& input_shape = tflite::micro::GetTensorShape(input); + const RuntimeShape& filter_shape = + tflite::micro::GetTensorShape(filter); + const RuntimeShape& output_shape = + tflite::micro::GetTensorShape(output); + const int stride_width = data.params.stride_width; + const int stride_height = data.params.stride_height; + const int pad_width = data.params.padding_values.width; + const int pad_height = data.params.padding_values.height; + + const int batches = MatchingDim(input_shape, 0, output_shape, 0); + const int input_depth = MatchingDim(input_shape, 3, filter_shape, 3); + const int output_depth = MatchingDim(filter_shape, 0, output_shape, 3); + + const int input_height = input_shape.Dims(1); + const int input_width = input_shape.Dims(2); + const int filter_height = filter_shape.Dims(1); + const int filter_width = filter_shape.Dims(2); + const int output_height = output_shape.Dims(1); + const int output_width = output_shape.Dims(2); + const int8_t* input_data = tflite::micro::GetTensorData(input); + const int8_t* filter_data = + tflite::micro::GetTensorData(filter); + const int32_t* bias_data = tflite::micro::GetTensorData(bias); + int8_t* output_data = tflite::micro::GetTensorData(output); + + const int num_elements = output_shape.FlatSize(); + + for (int b = 0; b < batches; b++) { + xa_nn_transpose_conv_sym8sxasym8s( + &output_data[b * output_height * output_width * output_depth], + const_cast( + &input_data[b * input_height * input_width * input_depth]), + const_cast(filter_data), const_cast(bias_data), + stride_width, stride_height, pad_width, pad_height, input_depth, + output_depth, input_height, input_width, filter_height, + filter_width, output_height, output_width, num_elements / batches, + data.params.input_offset, data.params.output_offset, + data.per_channel_output_shift, data.per_channel_output_multiplier, + scratch_buffer); + } + } else { + reference_integer_ops::TransposeConv( + data.params, data.per_channel_output_multiplier, + data.per_channel_output_shift, tflite::micro::GetTensorShape(input), + tflite::micro::GetTensorData(input), + tflite::micro::GetTensorShape(filter), + tflite::micro::GetTensorData(filter), + tflite::micro::GetTensorShape(bias), + tflite::micro::GetTensorData(bias), + tflite::micro::GetTensorShape(output), + tflite::micro::GetTensorData(output), + tflite::micro::GetTensorShape(nullptr), nullptr, scratch_buffer); + } +#else reference_integer_ops::TransposeConv( data.params, data.per_channel_output_multiplier, data.per_channel_output_shift, tflite::micro::GetTensorShape(input), @@ -293,6 +388,7 @@ TfLiteStatus Eval(TfLiteContext* context, TfLiteNode* node) { tflite::micro::GetTensorShape(output), tflite::micro::GetTensorData(output), tflite::micro::GetTensorShape(nullptr), nullptr, scratch_buffer); +#endif break; } case kTfLiteInt16: { @@ -319,7 +415,7 @@ TfLiteStatus Eval(TfLiteContext* context, TfLiteNode* node) { tflite::micro::GetTensorData(output), tflite::micro::GetTensorShape(nullptr), nullptr, scratch_buffer); } else { -#if defined(HIFI4) +#if defined(HIFI3) || defined(HIFI4) || defined(HIFI5) const RuntimeShape& input_shape = tflite::micro::GetTensorShape(input); const RuntimeShape& filter_shape = tflite::micro::GetTensorShape(filter); @@ -359,9 +455,9 @@ TfLiteStatus Eval(TfLiteContext* context, TfLiteNode* node) { output_depth, input_height, input_width, filter_height, filter_width, output_height, output_width, num_elements / batches, data.per_channel_output_shift, data.per_channel_output_multiplier, - &scratch_buffer[b * output_height * output_width * output_depth]); + scratch_buffer); } -#else +#else // #if defined(HIFI3) || defined(HIFI4) || defined(HIFI5) reference_integer_ops::TransposeConv( data.params, data.per_channel_output_multiplier, data.per_channel_output_shift, tflite::micro::GetTensorShape(input), @@ -373,7 +469,7 @@ TfLiteStatus Eval(TfLiteContext* context, TfLiteNode* node) { tflite::micro::GetTensorShape(output), tflite::micro::GetTensorData(output), tflite::micro::GetTensorShape(nullptr), nullptr, scratch_buffer); -#endif // defined(HIFI4) +#endif // #if defined(HIFI3) || defined(HIFI4) || defined(HIFI5) } break; } diff --git a/tensorflow/lite/micro/kernels/xtensa/unidirectional_sequence_lstm.cc b/tensorflow/lite/micro/kernels/xtensa/unidirectional_sequence_lstm.cc index cbce1e1c75c..0f6a02eaf09 100644 --- a/tensorflow/lite/micro/kernels/xtensa/unidirectional_sequence_lstm.cc +++ b/tensorflow/lite/micro/kernels/xtensa/unidirectional_sequence_lstm.cc @@ -1,4 +1,4 @@ -/* Copyright 2021 The TensorFlow Authors. All Rights Reserved. +/* Copyright 2023 The TensorFlow Authors. All Rights Reserved. Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. @@ -13,1109 +13,156 @@ See the License for the specific language governing permissions and limitations under the License. ==============================================================================*/ -#include -#include +// Integer version of unidirectional sequence lstm. Only the standard LSTM +// (defined in the keras LSTM layer, e.g., no peephole etc.) is supported here. +// Currently used by the 16 bits activation case only -#include +#include +#include -#include "tensorflow/lite/c/builtin_op_data.h" -#include "tensorflow/lite/c/common.h" -#include "tensorflow/lite/kernels/internal/compatibility.h" #include "tensorflow/lite/kernels/internal/quantization_util.h" -#include "tensorflow/lite/kernels/internal/tensor_ctypes.h" #include "tensorflow/lite/kernels/kernel_util.h" +#include "tensorflow/lite/micro/kernels/fully_connected.h" +#include "tensorflow/lite/micro/kernels/kernel_util.h" +#include "tensorflow/lite/micro/kernels/lstm_shared.h" #include "tensorflow/lite/micro/kernels/xtensa/lstm_eval.h" -#include "tensorflow/lite/micro/kernels/xtensa/lstm_shared.h" -#include "tensorflow/lite/micro/micro_log.h" -// TODO(b/230666079): Flatten the namespace to match the builtin kernel -// implementation namespace tflite { -namespace ops { -namespace micro { -// namespace unidirectional_sequence_lstm { -namespace { - -struct OpData { - // If the lstm is layer norm. - bool use_layer_norm; - // The scratch tensor index. - int scratch_tensor_index; - bool compute_row_sums = false; - - lstm_eval::IntegerLstmParameter integer_lstm_param; -}; - -TfLiteStatus PopulateQuantizedLstmParams8x8_16( - TfLiteContext* context, TfLiteNode* node, - lstm_eval::IntegerLstmParameter* integer_lstm_param) { - // Calculate quantized clip for projection and cell. - const auto* params = - static_cast(node->builtin_data); - const float cell_clip = static_cast(params->cell_clip); - const float proj_clip = static_cast(params->proj_clip); - - const TfLiteTensor* cell_state = - GetVariableInput(context, node, micro::lstm::full::kCellStateTensor); - TF_LITE_ENSURE(context, cell_state != nullptr); - TfLiteTensor* output_tensor; - TF_LITE_ENSURE_OK( - context, GetOutputSafe(context, node, micro::lstm::full::kOutputTensor, - &output_tensor)); - - auto* cell_state_params = - static_cast(cell_state->quantization.params); - auto* proj_params = static_cast( - output_tensor->quantization.params); - if (cell_clip > static_cast(0.0)) { - integer_lstm_param->quantized_cell_clip = static_cast(std::min( - std::max(cell_clip / cell_state_params->scale->data[0], -32768.0f), - 32767.0f)); - } else { - integer_lstm_param->quantized_cell_clip = 0; - } - if (proj_clip > static_cast(0.0)) { - integer_lstm_param->quantized_proj_clip = static_cast(std::min( - std::max(proj_clip / proj_params->scale->data[0], -128.0f), 127.0f)); - } else { - integer_lstm_param->quantized_proj_clip = 0; - } - - // Calculate effective scales. - OpData* op_data = static_cast(node->user_data); - const bool use_layer_norm = op_data->use_layer_norm; - - const TfLiteTensor* input; - TF_LITE_ENSURE_OK( - context, - GetInputSafe(context, node, micro::lstm::full::kInputTensor, &input)); - - const TfLiteTensor* input_to_input_weights = GetOptionalInputTensor( - context, node, micro::lstm::full::kInputToInputWeightsTensor); - const TfLiteTensor* input_to_forget_weights; - TF_LITE_ENSURE_OK(context, - GetInputSafe(context, node, - micro::lstm::full::kInputToForgetWeightsTensor, - &input_to_forget_weights)); - const TfLiteTensor* input_to_cell_weights; - TF_LITE_ENSURE_OK( - context, - GetInputSafe(context, node, micro::lstm::full::kInputToCellWeightsTensor, - &input_to_cell_weights)); - const TfLiteTensor* input_to_output_weights; - TF_LITE_ENSURE_OK(context, - GetInputSafe(context, node, - micro::lstm::full::kInputToOutputWeightsTensor, - &input_to_output_weights)); - - const TfLiteTensor* recurrent_to_input_weights = GetOptionalInputTensor( - context, node, micro::lstm::full::kRecurrentToInputWeightsTensor); - const TfLiteTensor* recurrent_to_forget_weights; - TF_LITE_ENSURE_OK( - context, GetInputSafe(context, node, - micro::lstm::full::kRecurrentToForgetWeightsTensor, - &recurrent_to_forget_weights)); - const TfLiteTensor* recurrent_to_cell_weights; - TF_LITE_ENSURE_OK( - context, GetInputSafe(context, node, - micro::lstm::full::kRecurrentToCellWeightsTensor, - &recurrent_to_cell_weights)); - const TfLiteTensor* recurrent_to_output_weights; - TF_LITE_ENSURE_OK( - context, GetInputSafe(context, node, - micro::lstm::full::kRecurrentToOutputWeightsTensor, - &recurrent_to_output_weights)); - - const TfLiteTensor* cell_to_input_weights = GetOptionalInputTensor( - context, node, micro::lstm::full::kCellToInputWeightsTensor); - const TfLiteTensor* cell_to_forget_weights = GetOptionalInputTensor( - context, node, micro::lstm::full::kCellToForgetWeightsTensor); - const TfLiteTensor* cell_to_output_weights = GetOptionalInputTensor( - context, node, micro::lstm::full::kCellToOutputWeightsTensor); - - const TfLiteTensor* input_layer_norm_coefficients = GetOptionalInputTensor( - context, node, micro::lstm::full::kInputLayerNormCoefficientsTensor); - const TfLiteTensor* forget_layer_norm_coefficients = GetOptionalInputTensor( - context, node, micro::lstm::full::kForgetLayerNormCoefficientsTensor); - const TfLiteTensor* cell_layer_norm_coefficients = GetOptionalInputTensor( - context, node, micro::lstm::full::kCellLayerNormCoefficientsTensor); - const TfLiteTensor* output_layer_norm_coefficients = GetOptionalInputTensor( - context, node, micro::lstm::full::kOutputLayerNormCoefficientsTensor); - - const TfLiteTensor* projection_weights = GetOptionalInputTensor( - context, node, micro::lstm::full::kProjectionWeightsTensor); - - TfLiteTensor* output_state = - GetVariableInput(context, node, micro::lstm::full::kOutputStateTensor); - TF_LITE_ENSURE(context, output_state != nullptr); - - // Since we have already checked that weights are all there or none, we can - // check the existence of only one to get the condition. - const bool use_cifg = (input_to_input_weights == nullptr); - const bool use_peephole = (cell_to_output_weights != nullptr); - const bool use_projection = (projection_weights != nullptr); - - // Get intermediate scales and zero points. - constexpr size_t kIntermediateCount = 5; - float intermediate_scale[kIntermediateCount]; - int32_t intermediate_zp[kIntermediateCount]; - for (int i = 0; i < 4; ++i) { - if (use_layer_norm) { - TfLiteTensor* intermediate = - context->GetTensor(context, node->intermediates->data[i]); - auto* tmp_params = static_cast( - intermediate->quantization.params); - intermediate_scale[i] = tmp_params->scale->data[0]; - intermediate_zp[i] = tmp_params->zero_point->data[0]; - } else { - // Q3.12 for activation functions. - intermediate_scale[i] = std::pow(2, -12); - intermediate_zp[i] = 0; - } - } - // In the absence of projection, hidden becomes otuput and this intermediate - // is ignored. - TfLiteTensor* hidden = - context->GetTensor(context, node->intermediates->data[4]); - auto* hidden_params = - static_cast(hidden->quantization.params); - intermediate_scale[4] = hidden_params->scale->data[0]; - intermediate_zp[4] = hidden_params->zero_point->data[0]; - - // Scales. - const float default_scale = 1.0; - float input_scale = default_scale; - float input_to_input_weight_scale = default_scale; - float recurrent_to_input_weight_scale = default_scale; - float cell_to_input_weight_scale = default_scale; - float input_to_forget_weight_scale = default_scale; - float recurrent_to_forget_weight_scale = default_scale; - float cell_to_forget_weight_scale = default_scale; - float input_to_cell_weight_scale = default_scale; - float recurrent_to_cell_weight_scale = default_scale; - float input_to_output_weight_scale = default_scale; - float recurrent_to_output_weight_scale = default_scale; - float cell_to_output_weight_scale = default_scale; - float projection_weight_scale = default_scale; - float layer_norm_input_scale = default_scale; - float layer_norm_forget_scale = default_scale; - float layer_norm_cell_scale = default_scale; - float layer_norm_output_scale = default_scale; - float output_state_scale = default_scale; - int cell_scale = 1; - - // Effective scales. - float effective_input_to_input_scale = default_scale; - float effective_recurrent_to_input_scale = default_scale; - float effective_cell_to_input_scale = default_scale; - float effective_input_to_forget_scale = default_scale; - float effective_recurrent_to_forget_scale = default_scale; - float effective_cell_to_forget_scale = default_scale; - float effective_input_to_cell_scale = default_scale; - float effective_recurrent_to_cell_scale = default_scale; - float effective_input_to_output_scale = default_scale; - float effective_recurrent_to_output_scale = default_scale; - float effective_cell_to_output_scale = default_scale; - float effective_proj_scale = default_scale; - float effective_hidden_scale = default_scale; - - // Populate scales. - if (!use_cifg) { - input_to_input_weight_scale = input_to_input_weights->params.scale; - recurrent_to_input_weight_scale = recurrent_to_input_weights->params.scale; - } - - if (use_peephole) { - if (!use_cifg) { - cell_to_input_weight_scale = cell_to_input_weights->params.scale; - } - cell_to_forget_weight_scale = cell_to_forget_weights->params.scale; - cell_to_output_weight_scale = cell_to_output_weights->params.scale; - } - - if (use_layer_norm) { - if (!use_cifg) { - layer_norm_input_scale = input_layer_norm_coefficients->params.scale; - } - layer_norm_forget_scale = forget_layer_norm_coefficients->params.scale; - layer_norm_cell_scale = cell_layer_norm_coefficients->params.scale; - layer_norm_output_scale = output_layer_norm_coefficients->params.scale; - } - - if (use_projection) { - projection_weight_scale = projection_weights->params.scale; - } - output_state_scale = output_state->params.scale; - - input_to_forget_weight_scale = input_to_forget_weights->params.scale; - input_to_cell_weight_scale = input_to_cell_weights->params.scale; - input_to_output_weight_scale = input_to_output_weights->params.scale; - recurrent_to_forget_weight_scale = recurrent_to_forget_weights->params.scale; - recurrent_to_cell_weight_scale = recurrent_to_cell_weights->params.scale; - recurrent_to_output_weight_scale = recurrent_to_output_weights->params.scale; - - // Check cell state (already used above) - TF_LITE_ENSURE(context, CheckedLog2(cell_state->params.scale, &cell_scale)); - // TF_LITE_ENSURE(context, cell_scale <= -9); - integer_lstm_param->cell_scale = cell_scale; - input_scale = input->params.scale; - - // Calculate effective scales. - if (!use_cifg) { - effective_input_to_input_scale = - input_to_input_weight_scale * input_scale / intermediate_scale[0]; - effective_recurrent_to_input_scale = recurrent_to_input_weight_scale * - output_state_scale / - intermediate_scale[0]; - } - effective_input_to_forget_scale = - input_to_forget_weight_scale * input_scale / intermediate_scale[1]; - effective_recurrent_to_forget_scale = recurrent_to_forget_weight_scale * - output_state_scale / - intermediate_scale[1]; - - effective_input_to_cell_scale = - input_to_cell_weight_scale * input_scale / intermediate_scale[2]; - effective_recurrent_to_cell_scale = recurrent_to_cell_weight_scale * - output_state_scale / - intermediate_scale[2]; - - effective_input_to_output_scale = - input_to_output_weight_scale * input_scale / intermediate_scale[3]; - effective_recurrent_to_output_scale = recurrent_to_output_weight_scale * - output_state_scale / - intermediate_scale[3]; - - effective_hidden_scale = std::pow((float)2, (float)-15) / - intermediate_scale[4] * - std::pow((float)2, (float)-15); - - effective_proj_scale = - projection_weight_scale * intermediate_scale[4] / output_state_scale; - - if (use_peephole) { - if (!use_cifg) { - effective_cell_to_input_scale = - std::pow((float)(2), (float)cell_scale) * // NOLINT - (float)(cell_to_input_weight_scale) / intermediate_scale[0]; - } - effective_cell_to_forget_scale = - std::pow((float)2, (float)cell_scale) * // NOLINT - (float)cell_to_forget_weight_scale / intermediate_scale[1]; - effective_cell_to_output_scale = - std::pow((float)2, (float)cell_scale) * // NOLINT - (float)cell_to_output_weight_scale / intermediate_scale[3]; - } - - // Decompose scales. - QuantizeMultiplier(static_cast(effective_input_to_input_scale), - &integer_lstm_param->effective_input_to_input_scale_a, - &integer_lstm_param->effective_input_to_input_scale_b); - QuantizeMultiplier(static_cast(effective_recurrent_to_input_scale), - &integer_lstm_param->effective_recurrent_to_input_scale_a, - &integer_lstm_param->effective_recurrent_to_input_scale_b); - QuantizeMultiplier(static_cast(effective_cell_to_input_scale), - &integer_lstm_param->effective_cell_to_input_scale_a, - &integer_lstm_param->effective_cell_to_input_scale_b); - QuantizeMultiplier(static_cast(effective_input_to_forget_scale), - &integer_lstm_param->effective_input_to_forget_scale_a, - &integer_lstm_param->effective_input_to_forget_scale_b); - QuantizeMultiplier( - static_cast(effective_recurrent_to_forget_scale), - &integer_lstm_param->effective_recurrent_to_forget_scale_a, - &integer_lstm_param->effective_recurrent_to_forget_scale_b); - QuantizeMultiplier(static_cast(effective_cell_to_forget_scale), - &integer_lstm_param->effective_cell_to_forget_scale_a, - &integer_lstm_param->effective_cell_to_forget_scale_b); - QuantizeMultiplier(static_cast(effective_input_to_cell_scale), - &integer_lstm_param->effective_input_to_cell_scale_a, - &integer_lstm_param->effective_input_to_cell_scale_b); - QuantizeMultiplier(static_cast(effective_recurrent_to_cell_scale), - &integer_lstm_param->effective_recurrent_to_cell_scale_a, - &integer_lstm_param->effective_recurrent_to_cell_scale_b); - QuantizeMultiplier(static_cast(effective_input_to_output_scale), - &integer_lstm_param->effective_input_to_output_scale_a, - &integer_lstm_param->effective_input_to_output_scale_b); - QuantizeMultiplier( - static_cast(effective_recurrent_to_output_scale), - &integer_lstm_param->effective_recurrent_to_output_scale_a, - &integer_lstm_param->effective_recurrent_to_output_scale_b); - QuantizeMultiplier(static_cast(effective_cell_to_output_scale), - &integer_lstm_param->effective_cell_to_output_scale_a, - &integer_lstm_param->effective_cell_to_output_scale_b); - QuantizeMultiplier(static_cast(effective_proj_scale), - &integer_lstm_param->effective_proj_scale_a, - &integer_lstm_param->effective_proj_scale_b); - QuantizeMultiplier(static_cast(effective_hidden_scale), - &integer_lstm_param->effective_hidden_scale_a, - &integer_lstm_param->effective_hidden_scale_b); - QuantizeMultiplier(static_cast(layer_norm_input_scale), - &integer_lstm_param->layer_norm_input_scale_a, - &integer_lstm_param->layer_norm_input_scale_b); - QuantizeMultiplier(static_cast(layer_norm_forget_scale), - &integer_lstm_param->layer_norm_forget_scale_a, - &integer_lstm_param->layer_norm_forget_scale_b); - QuantizeMultiplier(static_cast(layer_norm_cell_scale), - &integer_lstm_param->layer_norm_cell_scale_a, - &integer_lstm_param->layer_norm_cell_scale_b); - QuantizeMultiplier(static_cast(layer_norm_output_scale), - &integer_lstm_param->layer_norm_output_scale_a, - &integer_lstm_param->layer_norm_output_scale_b); - - integer_lstm_param->hidden_zp = intermediate_zp[4]; - - // 10000 is used to make sure the kernel logic does not overflow. - if (!use_cifg) { - integer_lstm_param->input_variance_guard = - std::max(static_cast(1), - static_cast(10000 * layer_norm_input_scale)); - } - integer_lstm_param->forget_variance_guard = - std::max(static_cast(1), - static_cast(10000 * layer_norm_forget_scale)); - integer_lstm_param->cell_variance_guard = - std::max(static_cast(1), - static_cast(10000 * layer_norm_cell_scale)); - integer_lstm_param->output_variance_guard = - std::max(static_cast(1), - static_cast(10000 * layer_norm_output_scale)); - - return kTfLiteOk; -} - -} // namespace - -// Temporary tensors -enum TemporaryTensor { - kScratchBuffer = 0, - kInputQuantized = 1, - kOutputStateQuantized = 2, - kCellStateQuantized = 3, - kInputScalingFactors = 4, - kOutputStateScalingFactors = 5, - kProductScalingFactors = 6, - kRecoveredCellWeights = 7, - kAccumScratch = 8, - kInputZeroPoints = 9, - kOutputStateZeroPoints = 10, - kRowSums = 11, - kNumTemporaryTensors = 12, -}; - -void* Init(TfLiteContext* context, const char* buffer, size_t length) { - OpData* op_data = reinterpret_cast( - context->AllocatePersistentBuffer(context, sizeof(OpData))); - - return op_data; -} - -// Check that input tensor dimensions matches with each other. -TfLiteStatus CheckInputTensorDimensions(TfLiteContext* context, - TfLiteNode* node, int n_input, - int n_output, int n_cell, - bool use_layer_norm, bool is_integer) { - const auto* params = reinterpret_cast(node->builtin_data); - - // Making sure clipping parameters have valid values. - // == 0 means no clipping - // > 0 means clipping - TF_LITE_ENSURE(context, params->cell_clip >= 0); - TF_LITE_ENSURE(context, params->proj_clip >= 0); - const TfLiteEvalTensor* input_to_input_weights = tflite::micro::GetEvalInput( - context, node, micro::lstm::full::kInputToInputWeightsTensor); - if (input_to_input_weights != nullptr) { - TF_LITE_ENSURE_EQ(context, input_to_input_weights->dims->size, 2); - TF_LITE_ENSURE_EQ(context, input_to_input_weights->dims->data[0], n_cell); - TF_LITE_ENSURE_EQ(context, input_to_input_weights->dims->data[1], n_input); - } - const TfLiteEvalTensor* input_to_forget_weights = tflite::micro::GetEvalInput( - context, node, micro::lstm::full::kInputToForgetWeightsTensor); - - TF_LITE_ENSURE_EQ(context, input_to_forget_weights->dims->size, 2); - TF_LITE_ENSURE_EQ(context, input_to_forget_weights->dims->data[0], n_cell); - TF_LITE_ENSURE_EQ(context, input_to_forget_weights->dims->data[1], n_input); - const TfLiteEvalTensor* input_to_cell_weights = tflite::micro::GetEvalInput( - context, node, micro::lstm::full::kInputToCellWeightsTensor); - - TF_LITE_ENSURE_EQ(context, input_to_cell_weights->dims->size, 2); - TF_LITE_ENSURE_EQ(context, input_to_cell_weights->dims->data[0], n_cell); - TF_LITE_ENSURE_EQ(context, input_to_cell_weights->dims->data[1], n_input); - const TfLiteEvalTensor* recurrent_to_input_weights = - tflite::micro::GetEvalInput( - context, node, micro::lstm::full::kRecurrentToInputWeightsTensor); - if (recurrent_to_input_weights != nullptr) { - TF_LITE_ENSURE_EQ(context, recurrent_to_input_weights->dims->size, 2); - TF_LITE_ENSURE_EQ(context, recurrent_to_input_weights->dims->data[0], - n_cell); - TF_LITE_ENSURE_EQ(context, recurrent_to_input_weights->dims->data[1], - n_output); - } - const TfLiteEvalTensor* recurrent_to_forget_weights = - tflite::micro::GetEvalInput( - context, node, micro::lstm::full::kRecurrentToForgetWeightsTensor); - - TF_LITE_ENSURE_EQ(context, recurrent_to_forget_weights->dims->size, 2); - TF_LITE_ENSURE_EQ(context, recurrent_to_forget_weights->dims->data[0], - n_cell); - TF_LITE_ENSURE_EQ(context, recurrent_to_forget_weights->dims->data[1], - n_output); - const TfLiteEvalTensor* recurrent_to_cell_weights = - tflite::micro::GetEvalInput( - context, node, micro::lstm::full::kRecurrentToCellWeightsTensor); - - TF_LITE_ENSURE_EQ(context, recurrent_to_cell_weights->dims->size, 2); - TF_LITE_ENSURE_EQ(context, recurrent_to_cell_weights->dims->data[0], n_cell); - TF_LITE_ENSURE_EQ(context, recurrent_to_cell_weights->dims->data[1], - n_output); - - // We make sure the input-gate's parameters are either both present (regular - // LSTM) or not at all (CIFG-LSTM). - const bool cifg_weights_all_or_none = - ((input_to_input_weights != nullptr) && - (recurrent_to_input_weights != nullptr)) || - ((input_to_input_weights == nullptr) && - (recurrent_to_input_weights == nullptr)); - TF_LITE_ENSURE(context, cifg_weights_all_or_none == true); - - const TfLiteTensor* cell_to_input_weights = GetOptionalInputTensor( - context, node, micro::lstm::full::kCellToInputWeightsTensor); - if (cell_to_input_weights != nullptr) { - TF_LITE_ENSURE_EQ(context, cell_to_input_weights->dims->size, 1); - TF_LITE_ENSURE_EQ(context, cell_to_input_weights->dims->data[0], n_cell); - TF_LITE_ENSURE_TYPES_EQ( - context, cell_to_input_weights->type, - is_integer ? kTfLiteInt16 : input_to_forget_weights->type); - } - - const TfLiteTensor* cell_to_forget_weights = GetOptionalInputTensor( - context, node, lstm::full::kCellToForgetWeightsTensor); - if (cell_to_forget_weights != nullptr) { - TF_LITE_ENSURE_EQ(context, cell_to_forget_weights->dims->size, 1); - TF_LITE_ENSURE_EQ(context, cell_to_forget_weights->dims->data[0], n_cell); - TF_LITE_ENSURE_TYPES_EQ( - context, cell_to_forget_weights->type, - is_integer ? kTfLiteInt16 : input_to_forget_weights->type); - } - - const TfLiteTensor* cell_to_output_weights = GetOptionalInputTensor( - context, node, micro::lstm::full::kCellToOutputWeightsTensor); - if (cell_to_output_weights != nullptr) { - TF_LITE_ENSURE_EQ(context, cell_to_output_weights->dims->size, 1); - TF_LITE_ENSURE_EQ(context, cell_to_output_weights->dims->data[0], n_cell); - TF_LITE_ENSURE_TYPES_EQ( - context, cell_to_output_weights->type, - is_integer ? kTfLiteInt16 : input_to_forget_weights->type); - } - - // Making sure the peephole weights are there all or none. - const bool use_cifg = (input_to_input_weights == nullptr); - const bool peephole_weights_all_or_none = - ((cell_to_input_weights != nullptr || use_cifg) && - (cell_to_forget_weights != nullptr) && - (cell_to_output_weights != nullptr)) || - ((cell_to_input_weights == nullptr) && - (cell_to_forget_weights == nullptr) && - (cell_to_output_weights == nullptr)); - TF_LITE_ENSURE(context, peephole_weights_all_or_none == true); - const TfLiteEvalTensor* input_gate_bias = tflite::micro::GetEvalInput( - context, node, micro::lstm::full::kInputGateBiasTensor); - - if (use_cifg) { - TF_LITE_ENSURE_EQ(context, input_gate_bias, nullptr); - } else { - TF_LITE_ENSURE_EQ(context, input_gate_bias->dims->size, 1); - TF_LITE_ENSURE_EQ(context, input_gate_bias->dims->data[0], n_cell); - if (is_integer) { - TF_LITE_ENSURE_TYPES_EQ(context, input_gate_bias->type, kTfLiteInt32); - } else { - TF_LITE_ENSURE_TYPES_EQ(context, input_gate_bias->type, kTfLiteFloat32); - } - } - const TfLiteEvalTensor* forget_gate_bias = tflite::micro::GetEvalInput( - context, node, micro::lstm::full::kForgetGateBiasTensor); - - TF_LITE_ENSURE_EQ(context, forget_gate_bias->dims->size, 1); - TF_LITE_ENSURE_EQ(context, forget_gate_bias->dims->data[0], n_cell); - if (is_integer) { - TF_LITE_ENSURE_TYPES_EQ(context, forget_gate_bias->type, kTfLiteInt32); - } else { - TF_LITE_ENSURE_TYPES_EQ(context, forget_gate_bias->type, kTfLiteFloat32); - } - const TfLiteEvalTensor* cell_gate_bias = tflite::micro::GetEvalInput( - context, node, micro::lstm::full::kCellGateBiasTensor); - - TF_LITE_ENSURE_EQ(context, cell_gate_bias->dims->size, 1); - TF_LITE_ENSURE_EQ(context, cell_gate_bias->dims->data[0], n_cell); - if (is_integer) { - TF_LITE_ENSURE_TYPES_EQ(context, cell_gate_bias->type, kTfLiteInt32); - } else { - TF_LITE_ENSURE_TYPES_EQ(context, cell_gate_bias->type, kTfLiteFloat32); - } - const TfLiteEvalTensor* output_gate_bias = tflite::micro::GetEvalInput( - context, node, micro::lstm::full::kOutputGateBiasTensor); - TF_LITE_ENSURE_EQ(context, output_gate_bias->dims->size, 1); - TF_LITE_ENSURE_EQ(context, output_gate_bias->dims->data[0], n_cell); - if (is_integer) { - TF_LITE_ENSURE_TYPES_EQ(context, output_gate_bias->type, kTfLiteInt32); - } else { - TF_LITE_ENSURE_TYPES_EQ(context, output_gate_bias->type, kTfLiteFloat32); - } - const TfLiteTensor* projection_weights = GetOptionalInputTensor( - context, node, micro::lstm::full::kProjectionWeightsTensor); - if (projection_weights != nullptr) { - TF_LITE_ENSURE_EQ(context, projection_weights->dims->size, 2); - TF_LITE_ENSURE_EQ(context, projection_weights->dims->data[0], n_output); - TF_LITE_ENSURE_EQ(context, projection_weights->dims->data[1], n_cell); - } - - const TfLiteTensor* projection_bias = GetOptionalInputTensor( - context, node, micro::lstm::full::kProjectionBiasTensor); - if (projection_bias != nullptr) { - TF_LITE_ENSURE_EQ(context, projection_bias->dims->size, 1); - TF_LITE_ENSURE_EQ(context, projection_bias->dims->data[0], n_output); - if (is_integer) { - TF_LITE_ENSURE_TYPES_EQ(context, projection_bias->type, kTfLiteInt32); - } else { - TF_LITE_ENSURE_TYPES_EQ(context, projection_bias->type, kTfLiteFloat32); - } - } - - // Making sure the projection tensors are consistent: - // 1) If projection weight is not present, then projection bias should not be - // present. - // 2) If projection weight is present, then projection bias is optional. - const bool projecton_tensors_consistent = - ((projection_weights != nullptr) || (projection_bias == nullptr)); - TF_LITE_ENSURE(context, projecton_tensors_consistent == true); - - if (use_layer_norm) { - const TfLiteEvalTensor* input_layer_norm_coefficients = - tflite::micro::GetEvalInput( - context, node, - micro::lstm::full::kInputLayerNormCoefficientsTensor); - if (use_cifg) { - TF_LITE_ENSURE_EQ(context, input_layer_norm_coefficients, nullptr); - } else { - TF_LITE_ENSURE(context, input_layer_norm_coefficients != nullptr); - TF_LITE_ENSURE_EQ(context, input_layer_norm_coefficients->dims->size, 1); - TF_LITE_ENSURE_EQ(context, input_layer_norm_coefficients->dims->data[0], - n_cell); - if (is_integer) { - TF_LITE_ENSURE_TYPES_EQ(context, input_layer_norm_coefficients->type, - kTfLiteInt16); - } else { - TF_LITE_ENSURE_TYPES_EQ(context, input_layer_norm_coefficients->type, - kTfLiteFloat32); - } - } - const TfLiteEvalTensor* forget_layer_norm_coefficients = - tflite::micro::GetEvalInput( - context, node, - micro::lstm::full::kForgetLayerNormCoefficientsTensor); - TF_LITE_ENSURE_EQ(context, forget_layer_norm_coefficients->dims->size, 1); - TF_LITE_ENSURE_EQ(context, forget_layer_norm_coefficients->dims->data[0], - n_cell); - if (is_integer) { - TF_LITE_ENSURE_TYPES_EQ(context, forget_layer_norm_coefficients->type, - kTfLiteInt16); - } else { - TF_LITE_ENSURE_TYPES_EQ(context, forget_layer_norm_coefficients->type, - kTfLiteFloat32); - } - const TfLiteEvalTensor* cell_layer_norm_coefficients = - tflite::micro::GetEvalInput( - context, node, micro::lstm::full::kCellLayerNormCoefficientsTensor); - TF_LITE_ENSURE_EQ(context, cell_layer_norm_coefficients->dims->size, 1); - TF_LITE_ENSURE_EQ(context, cell_layer_norm_coefficients->dims->data[0], - n_cell); - if (is_integer) { - TF_LITE_ENSURE_TYPES_EQ(context, cell_layer_norm_coefficients->type, - kTfLiteInt16); - } else { - TF_LITE_ENSURE_TYPES_EQ(context, cell_layer_norm_coefficients->type, - kTfLiteFloat32); - } - const TfLiteEvalTensor* output_layer_norm_coefficients = - tflite::micro::GetEvalInput( - context, node, - micro::lstm::full::kOutputLayerNormCoefficientsTensor); - - TF_LITE_ENSURE_EQ(context, output_layer_norm_coefficients->dims->size, 1); - TF_LITE_ENSURE_EQ(context, output_layer_norm_coefficients->dims->data[0], - n_cell); - if (is_integer) { - TF_LITE_ENSURE_TYPES_EQ(context, output_layer_norm_coefficients->type, - kTfLiteInt16); - } else { - TF_LITE_ENSURE_TYPES_EQ(context, output_layer_norm_coefficients->type, - kTfLiteFloat32); - } - } - - return kTfLiteOk; -} +namespace { +/*Helper Functions*/ -TfLiteStatus PrecomputeZeroPointTimesWeightWithBias( - TfLiteContext* context, int32_t zero_point, - const TfLiteTensor* weight_tensor, const TfLiteTensor* bias_tensor, - std::unique_ptr* output) { - if (weight_tensor == nullptr) { - return kTfLiteOk; - } +/*Kernel functions*/ - const RuntimeShape& weight_shape = GetTensorShape(weight_tensor); - TF_LITE_ENSURE_EQ(context, weight_shape.DimensionsCount(), 2); - const int row = weight_shape.Dims(0); - const int col = weight_shape.Dims(1); - output->reset(new int32_t[row]); - if (bias_tensor == nullptr) { - memset(output->get(), 0, row * sizeof(int32_t)); - } else { - const int32_t* bias = GetTensorData(bias_tensor); - memcpy(output->get(), bias, row * sizeof(int32_t)); - } - if (zero_point != 0) { - const int8_t* weight = GetTensorData(weight_tensor); - tensor_utils::PortableMatrixScalarMultiplyAccumulate( - weight, zero_point, row, col, output->get()); - } - return kTfLiteOk; +void* UnidirectionalSequenceLstmInit(TfLiteContext* context, const char* buffer, + size_t length) { + TFLITE_DCHECK(context->AllocatePersistentBuffer != nullptr); + return context->AllocatePersistentBuffer(context, sizeof(OpDataLSTM)); } -TfLiteStatus PopulatePrecomputedZPTimesWeightsWithBias(TfLiteContext* context, - OpData* op_data, - TfLiteNode* node) { - const TfLiteTensor* input; - TF_LITE_ENSURE_OK( - context, - GetInputSafe(context, node, micro::lstm::full::kInputTensor, &input)); - const TfLiteTensor* output_state = - GetVariableInput(context, node, micro::lstm::full::kOutputStateTensor); - TF_LITE_ENSURE(context, output_state != nullptr); - - const int32_t input_zero_point = -input->params.zero_point; - const int32_t output_state_zero_point = -output_state->params.zero_point; - - const TfLiteTensor* input_to_input_weights = GetOptionalInputTensor( - context, node, micro::lstm::full::kInputToInputWeightsTensor); - const TfLiteTensor* input_to_forget_weights; - TF_LITE_ENSURE_OK(context, - GetInputSafe(context, node, - micro::lstm::full::kInputToForgetWeightsTensor, - &input_to_forget_weights)); - const TfLiteTensor* input_to_cell_weights; - TF_LITE_ENSURE_OK( - context, - GetInputSafe(context, node, micro::lstm::full::kInputToCellWeightsTensor, - &input_to_cell_weights)); - const TfLiteTensor* input_to_output_weights; - TF_LITE_ENSURE_OK(context, - GetInputSafe(context, node, - micro::lstm::full::kInputToOutputWeightsTensor, - &input_to_output_weights)); - - const TfLiteTensor* recurrent_to_input_weights = GetOptionalInputTensor( - context, node, micro::lstm::full::kRecurrentToInputWeightsTensor); - const TfLiteTensor* recurrent_to_forget_weights; - TF_LITE_ENSURE_OK( - context, GetInputSafe(context, node, - micro::lstm::full::kRecurrentToForgetWeightsTensor, - &recurrent_to_forget_weights)); - const TfLiteTensor* recurrent_to_cell_weights; - TF_LITE_ENSURE_OK( - context, GetInputSafe(context, node, - micro::lstm::full::kRecurrentToCellWeightsTensor, - &recurrent_to_cell_weights)); - const TfLiteTensor* recurrent_to_output_weights; - TF_LITE_ENSURE_OK( - context, GetInputSafe(context, node, - micro::lstm::full::kRecurrentToOutputWeightsTensor, - &recurrent_to_output_weights)); - - const TfLiteTensor* projection_weights = GetOptionalInputTensor( - context, node, lstm::full::kProjectionWeightsTensor); - const TfLiteTensor* projection_bias = GetOptionalInputTensor( - context, node, micro::lstm::full::kProjectionBiasTensor); - - lstm_eval::IntegerLstmParameter* integer_lstm_params = - &op_data->integer_lstm_param; - - TfLiteTensor* intermediate = - context->GetTensor(context, node->intermediates->data[4]); - const auto* params = - static_cast(intermediate->quantization.params); - const int32_t hidden_zp = params->zero_point->data[0]; - - // Get bias and perform zero point calculation. - // When there is layer normalization, the gate bias does not apply to matmul - // directly: - // y = ln(w * x + w * r + w * c) + b. - const bool is_layer_norm = op_data->use_layer_norm; - - // Forget gate. - const TfLiteTensor* forget_gate_bias = - is_layer_norm - ? nullptr - : GetInput(context, node, micro::lstm::full::kForgetGateBiasTensor); - TF_LITE_ENSURE_OK( - context, - PrecomputeZeroPointTimesWeightWithBias( - context, input_zero_point, input_to_forget_weights, forget_gate_bias, - &(integer_lstm_params->input_to_forget_effective_bias))); - - TF_LITE_ENSURE_OK( - context, - PrecomputeZeroPointTimesWeightWithBias( - context, output_state_zero_point, recurrent_to_forget_weights, - nullptr, &(integer_lstm_params->recurrent_to_forget_effective_bias))); - - // Modulation gate. - const TfLiteTensor* cell_gate_bias = - is_layer_norm - ? nullptr - : GetInput(context, node, micro::lstm::full::kCellGateBiasTensor); - TF_LITE_ENSURE_OK( - context, - PrecomputeZeroPointTimesWeightWithBias( - context, input_zero_point, input_to_cell_weights, cell_gate_bias, - &(integer_lstm_params->input_to_cell_effective_bias))); - TF_LITE_ENSURE_OK( - context, - PrecomputeZeroPointTimesWeightWithBias( - context, output_state_zero_point, recurrent_to_cell_weights, nullptr, - &(integer_lstm_params->recurrent_to_cell_effective_bias))); - - // Output gate. - const TfLiteTensor* output_gate_bias = - is_layer_norm - ? nullptr - : GetInput(context, node, micro::lstm::full::kOutputGateBiasTensor); - TF_LITE_ENSURE_OK( - context, - PrecomputeZeroPointTimesWeightWithBias( - context, input_zero_point, input_to_output_weights, output_gate_bias, - &(integer_lstm_params->input_to_output_effective_bias))); - - TF_LITE_ENSURE_OK( - context, - PrecomputeZeroPointTimesWeightWithBias( - context, output_state_zero_point, recurrent_to_output_weights, - nullptr, &(integer_lstm_params->recurrent_to_output_effective_bias))); - - // Input gate. The calculation is only meaningful for non-cifg case. - const TfLiteTensor* input_gate_bias = - is_layer_norm - ? nullptr - : GetInput(context, node, micro::lstm::full::kInputGateBiasTensor); - TF_LITE_ENSURE_OK( - context, - PrecomputeZeroPointTimesWeightWithBias( - context, input_zero_point, input_to_input_weights, input_gate_bias, - &(integer_lstm_params->input_to_input_effective_bias))); - TF_LITE_ENSURE_OK( - context, - PrecomputeZeroPointTimesWeightWithBias( - context, output_state_zero_point, recurrent_to_input_weights, nullptr, - &(integer_lstm_params->recurrent_to_input_effective_bias))); - - // Projection bias. The calculation is only meaningful for with projection. - TF_LITE_ENSURE_OK(context, - PrecomputeZeroPointTimesWeightWithBias( - context, hidden_zp, projection_weights, projection_bias, - &(integer_lstm_params->projection_effective_bias))); - return kTfLiteOk; -} +TfLiteStatus UnidirectionalSequenceLstmPrepare(TfLiteContext* context, + TfLiteNode* node) { + TF_LITE_ENSURE_EQ(context, node->outputs->size, 1); + TF_LITE_ENSURE_EQ(context, node->inputs->size, 24); -// Resize the output and state tensors based on the sizes of the input tensors. -// Allocate a temporary scratch tensor. Also check that the sizes of the input -// tensors match each other. -TfLiteStatus Prepare(TfLiteContext* context, TfLiteNode* node) { - OpData* op_data = reinterpret_cast(node->user_data); - // const int scratch_tensor_index = op_data->scratch_tensor_index; + TFLITE_DCHECK(node->builtin_data != nullptr); + TFLITE_DCHECK(node->user_data != nullptr); - // Check we have all the inputs and outputs we need. - bool use_layer_norm = false; - if (node->inputs->size == 24) { - const TfLiteTensor* forget_layer_norm_coefficients = GetOptionalInputTensor( - context, node, micro::lstm::full::kForgetLayerNormCoefficientsTensor); - if (forget_layer_norm_coefficients == nullptr) { - use_layer_norm = false; - } else { - use_layer_norm = true; - } - } else if (node->inputs->size == 20) { - // This is deprecated and is only kept here for backward compatibility. - use_layer_norm = false; + OpDataLSTM* op_data = reinterpret_cast(node->user_data); + const auto* builtin_data = + static_cast(node->builtin_data); + // All TempTfLiteTensors will be deallocated through the destructor. + LstmTensors lstm_tensors(context, node); + TF_LITE_ENSURE_OK(context, lstm_tensors.ValidateTensorStatus(context)); + + op_data->cell_gate_nonlinear_type = builtin_data->activation; + op_data->size_info = + CreateLstmSizeInfo(builtin_data->time_major, + lstm_tensors.GetInternalTensor(kLstmInputTensor)->dims, + lstm_tensors.HiddenStateTensor()->dims); + TF_LITE_ENSURE_OK( + context, ValidateTensorSize(context, lstm_tensors, op_data->size_info)); + + // Create cell state information and gate parameters (Fully Connected and Mul) + auto cell_state_type = + lstm_tensors.GetInternalTensor(kLstmCellStateTensor)->type; + if (cell_state_type == kTfLiteFloat32) { + op_data->cell_state_info = + CreateLstmCellStateInfoFloat(builtin_data->cell_clip); + TF_LITE_ENSURE_OK( + context, PrepareGateParametersFloat(context, lstm_tensors, op_data)); + } else if (cell_state_type == kTfLiteInt16) { + op_data->cell_state_info = CreateLstmCellStateInfo( + lstm_tensors.CellStateTensor()->params.scale, builtin_data->cell_clip); + TF_LITE_ENSURE_OK( + context, PrepareGateParametersInteger(context, lstm_tensors, op_data)); } else { - MicroPrintf("The LSTM Full kernel expects 20 or 24 inputs. Got %d inputs", - node->inputs->size); + MicroPrintf( + "Cell state type %s (%d) not supported. The quantized Unidirectional " + "Sequence LSTM Op only support int16 cell state", + TfLiteTypeGetName(cell_state_type), cell_state_type); return kTfLiteError; } - TF_LITE_ENSURE_EQ(context, node->outputs->size, 1); - op_data->use_layer_norm = use_layer_norm; - - // Inferring batch size, number of outputs and sequence length and - // number of cells from the input tensors. - const TfLiteEvalTensor* input = tflite::micro::GetEvalInput( - context, node, micro::lstm::full::kInputTensor); - const bool is_integer = input->type == kTfLiteInt8; - TF_LITE_ENSURE(context, input->dims->size > 1); - const auto* params = - reinterpret_cast( - node->builtin_data); - const bool time_major = params->time_major; - const int n_batch = time_major ? input->dims->data[1] : input->dims->data[0]; - const int n_input = input->dims->data[2]; - const TfLiteEvalTensor* input_to_output_weights = tflite::micro::GetEvalInput( - context, node, micro::lstm::full::kInputToOutputWeightsTensor); - const int n_cell = input_to_output_weights->dims->data[0]; - TF_LITE_ENSURE_EQ(context, input_to_output_weights->dims->size, 2); - TF_LITE_ENSURE_EQ(context, input_to_output_weights->dims->data[1], n_input); - const TfLiteEvalTensor* recurrent_to_output_weights = - tflite::micro::GetEvalInput( - context, node, micro::lstm::full::kRecurrentToOutputWeightsTensor); - - TF_LITE_ENSURE_EQ(context, recurrent_to_output_weights->dims->size, 2); - TF_LITE_ENSURE_EQ(context, recurrent_to_output_weights->dims->data[0], - n_cell); - const int n_output = recurrent_to_output_weights->dims->data[1]; - - // Check that input tensor dimensions matches with each other. - TF_LITE_ENSURE_OK( - context, CheckInputTensorDimensions(context, node, n_input, n_output, - n_cell, use_layer_norm, is_integer)); - // Get the pointer to output, output_state and cell_state buffer tensors. - // TfLiteEvalTensor* output = - // tflite::micro::GetEvalOutput(context, node, - // micro::lstm::full::kOutputTensor); - TfLiteEvalTensor* output_state = tflite::micro::GetMutableEvalInput( - context, node, micro::lstm::full::kOutputStateTensor); - TFLITE_DCHECK(output_state != nullptr); - TfLiteEvalTensor* cell_state = tflite::micro::GetMutableEvalInput( - context, node, micro::lstm::full::kCellStateTensor); - TFLITE_DCHECK(cell_state != nullptr); - // Check the shape of input state tensors. - // These tensor may be 1D or 2D. It's fine as long as the total size is - // correct. - TF_LITE_ENSURE_EQ(context, NumElements(output_state->dims), - n_batch * n_output); - TF_LITE_ENSURE_EQ(context, NumElements(cell_state->dims), n_batch * n_cell); - - if (is_integer) { - const int num_intermediate_tensors = node->intermediates->size; - TF_LITE_ENSURE(context, num_intermediate_tensors == 5); + // request buffers (four buffers) + for (size_t i = 0; i < 4; i++) { + TF_LITE_ENSURE_OK(context, context->RequestScratchBufferInArena( + context, + op_data->size_info.batch_size * + op_data->size_info.state_dimension * + TfLiteTypeGetSize(cell_state_type), + &(op_data->buffer_indices[i]))); } - - if (is_integer) { - // Integer UnidirectionalSequenceLSTM prepare function for 8x8->16. - // This code path needs 5 intermediate tensors per Op. - // Populate quantization parameters. - PopulateQuantizedLstmParams8x8_16(context, node, - &op_data->integer_lstm_param); - // Allocate scratch buffer. Need 6 16bit buffer with size n_batch * n_cell - // and 1 8bit buffer with size n_batch * n_cell. We also need 1 32 bit - // buffer with size n_batch * n_cell. - // - // Handle cifg case as well, which might save one buffer. - - int scratch_idx = 0; - - context->RequestScratchBufferInArena( - context, n_batch * n_cell * sizeof(int32_t), &(scratch_idx)); - op_data->scratch_tensor_index = scratch_idx; - - for (int scratch_index = 1; scratch_index < 6; ++scratch_index) { - // node->temporaries->data[scratch_index] = op_data->scratch_tensor_index - // + scratch_index; - context->RequestScratchBufferInArena( - context, n_batch * n_cell * sizeof(int32_t), &(scratch_idx)); - TFLITE_DCHECK(scratch_idx == - (op_data->scratch_tensor_index + scratch_index)); - } - - // Populate precomputed zp * weight. - TF_LITE_ENSURE_OK(context, PopulatePrecomputedZPTimesWeightsWithBias( - context, op_data, node)); - } - return kTfLiteOk; } -TfLiteStatus Eval(TfLiteContext* context, TfLiteNode* node) { - const auto* params = - reinterpret_cast( - node->builtin_data); - const OpData* op_data = reinterpret_cast(node->user_data); - // const bool use_layer_norm = op_data->use_layer_norm; - // const bool time_major = params->time_major; - - const TfLiteEvalTensor* input = tflite::micro::GetEvalInput( - context, node, micro::lstm::full::kInputTensor); - const TfLiteEvalTensor* input_to_input_weights = tflite::micro::GetEvalInput( - context, node, micro::lstm::full::kInputToInputWeightsTensor); - const TfLiteEvalTensor* input_to_forget_weights = tflite::micro::GetEvalInput( - context, node, micro::lstm::full::kInputToForgetWeightsTensor); - const TfLiteEvalTensor* input_to_cell_weights = tflite::micro::GetEvalInput( - context, node, micro::lstm::full::kInputToCellWeightsTensor); - const TfLiteEvalTensor* input_to_output_weights = tflite::micro::GetEvalInput( - context, node, micro::lstm::full::kInputToOutputWeightsTensor); - const TfLiteEvalTensor* recurrent_to_input_weights = - tflite::micro::GetEvalInput( - context, node, micro::lstm::full::kRecurrentToInputWeightsTensor); - const TfLiteEvalTensor* recurrent_to_forget_weights = - tflite::micro::GetEvalInput( - context, node, micro::lstm::full::kRecurrentToForgetWeightsTensor); - const TfLiteEvalTensor* recurrent_to_cell_weights = - tflite::micro::GetEvalInput( - context, node, micro::lstm::full::kRecurrentToCellWeightsTensor); - const TfLiteEvalTensor* recurrent_to_output_weights = - tflite::micro::GetEvalInput( - context, node, micro::lstm::full::kRecurrentToOutputWeightsTensor); - const TfLiteEvalTensor* cell_to_input_weights = context->GetEvalTensor( - context, - node->inputs->data[micro::lstm::full::kCellToInputWeightsTensor]); - const TfLiteEvalTensor* cell_to_forget_weights = context->GetEvalTensor( - context, - node->inputs->data[micro::lstm::full::kCellToForgetWeightsTensor]); - const TfLiteEvalTensor* cell_to_output_weights = context->GetEvalTensor( - context, - node->inputs->data[micro::lstm::full::kCellToOutputWeightsTensor]); - const TfLiteEvalTensor* input_gate_bias = context->GetEvalTensor( - context, node->inputs->data[micro::lstm::full::kInputGateBiasTensor]); - - const TfLiteEvalTensor* forget_gate_bias = context->GetEvalTensor( - context, node->inputs->data[micro::lstm::full::kForgetGateBiasTensor]); - const TfLiteEvalTensor* cell_gate_bias = context->GetEvalTensor( - context, node->inputs->data[micro::lstm::full::kCellGateBiasTensor]); - const TfLiteEvalTensor* output_gate_bias = context->GetEvalTensor( - context, node->inputs->data[micro::lstm::full::kOutputGateBiasTensor]); - - const TfLiteEvalTensor* projection_weights = context->GetEvalTensor( - context, node->inputs->data[micro::lstm::full::kProjectionWeightsTensor]); - const TfLiteEvalTensor* projection_bias = context->GetEvalTensor( - context, node->inputs->data[micro::lstm::full::kProjectionBiasTensor]); - - TfLiteEvalTensor* output_state = context->GetEvalTensor( - context, node->inputs->data[micro::lstm::full::kOutputStateTensor]); - TFLITE_DCHECK(output_state != nullptr); - TfLiteEvalTensor* cell_state = context->GetEvalTensor( - context, node->inputs->data[micro::lstm::full::kCellStateTensor]); - TFLITE_DCHECK(cell_state != nullptr); - const TfLiteEvalTensor* input_layer_norm_coefficients = - context->GetEvalTensor( - context, - node->inputs - ->data[micro::lstm::full::kInputLayerNormCoefficientsTensor]); - - const TfLiteEvalTensor* forget_layer_norm_coefficients = - context->GetEvalTensor( - context, - node->inputs - ->data[micro::lstm::full::kForgetLayerNormCoefficientsTensor]); - const TfLiteEvalTensor* cell_layer_norm_coefficients = context->GetEvalTensor( - context, - node->inputs->data[micro::lstm::full::kCellLayerNormCoefficientsTensor]); - - const TfLiteEvalTensor* output_layer_norm_coefficients = - context->GetEvalTensor( - context, - node->inputs - ->data[micro::lstm::full::kOutputLayerNormCoefficientsTensor]); - - TfLiteEvalTensor* output = tflite::micro::GetEvalOutput( - context, node, micro::lstm::full::kOutputTensor); - - // Copy out the LSTM specific params so they can be passed in the function. - TfLiteLSTMParams lstm_params; - lstm_params.activation = params->activation; - lstm_params.cell_clip = params->cell_clip; - lstm_params.proj_clip = params->proj_clip; - lstm_params.asymmetric_quantize_inputs = params->asymmetric_quantize_inputs; - switch (input_to_output_weights->type) { +TfLiteStatus UnidirectionalSequenceLstmEval(TfLiteContext* context, + TfLiteNode* node) { + TFLITE_DCHECK(node->user_data != nullptr); + const OpDataLSTM& op_data = *reinterpret_cast(node->user_data); + auto kernel_content = CreateLSTMKernelContent(context, node); + + const auto activation_type = + kernel_content.internal_tensors[kLstmInputTensor]->type; + const auto weight_type = + kernel_content.internal_tensors[kLstmInputToInputWeightsTensor]->type; + + switch (activation_type) { + case kTfLiteFloat32: { + LSTMBuffers buffers = + CreateLSTMBuffers(context, op_data.buffer_indices); + EvalLstm(op_data, kernel_content, buffers); + break; + } case kTfLiteInt8: { - const bool is_hybrid = input->type == kTfLiteFloat32; - if (is_hybrid) { - MicroPrintf(" hybrid type is not supported."); - return kTfLiteError; - - } else { - TfLiteEvalTensor* scratch[6]; - // Allocate scratch buffer. Need 6 16bit buffer with size n_batch * - // n_cell - // and 1 8bit buffer with size n_batch * n_cell. We also need 1 32 bit - // buffer with size n_batch * n_cell. - // - // Handle cifg case as well, which might save one buffer. - - const auto* tmp_params = - reinterpret_cast( - node->builtin_data); - const bool time_major = tmp_params->time_major; - for (int scratch_index = 0; scratch_index < 6; ++scratch_index) { - TFLITE_DCHECK(context != nullptr); - TFLITE_DCHECK(context->GetScratchBuffer != nullptr); - int32_t* scratch_tensor = - static_cast(context->GetScratchBuffer( - context, op_data->scratch_tensor_index + scratch_index)); - scratch[scratch_index] = (TfLiteEvalTensor*)scratch_tensor; + switch (weight_type) { + case kTfLiteInt8: { + // 8(activation)x8(weight)->16(cell) LSTM with 32 bits bias + LSTMBuffers buffers = + CreateLSTMBuffers(context, op_data.buffer_indices); + EvalLstm(op_data, kernel_content, + buffers); + break; + } + default: { + MicroPrintf("Filter type %s (%d) not supported.", + TfLiteTypeGetName(weight_type), activation_type); + return kTfLiteError; } - /* - TF_LITE_ENSURE_OK(context, - GetScratchSafe(context, node, 0, - &scratch0)); - - TF_LITE_ENSURE_OK(context, - GetScratchSafe(context, node, 1, - &scratch1)); - - TF_LITE_ENSURE_OK(context, - GetScratchSafe(context, node, 2, - &scratch2)); - - TF_LITE_ENSURE_OK(context, - GetScratchSafe(context, node, 3, - &scratch3)); - - TF_LITE_ENSURE_OK(context, - GetScratchSafe(context, node, 4, - &scratch4)); - - TF_LITE_ENSURE_OK(context, - GetScratchSafe(context, node, 5, - &scratch5)); - */ - return lstm_eval::EvalInteger8x8_16( - context, node, input, input_to_input_weights, - input_to_forget_weights, input_to_cell_weights, - input_to_output_weights, recurrent_to_input_weights, - recurrent_to_forget_weights, recurrent_to_cell_weights, - recurrent_to_output_weights, cell_to_input_weights, - cell_to_forget_weights, cell_to_output_weights, - input_layer_norm_coefficients, forget_layer_norm_coefficients, - cell_layer_norm_coefficients, output_layer_norm_coefficients, - input_gate_bias, forget_gate_bias, cell_gate_bias, output_gate_bias, - projection_weights, projection_bias, &lstm_params, - /*forward_sequence=*/true, time_major, &op_data->integer_lstm_param, - output_state, cell_state, output, scratch[0], scratch[1], - scratch[2], scratch[3], scratch[4], scratch[5]); } + break; } - - default: - MicroPrintf("Type %s is not currently supported.", - TfLiteTypeGetName(input_to_output_weights->type)); + case kTfLiteInt16: { + switch (weight_type) { + case kTfLiteInt8: { + // 16(activation)x8(weight)->16(cell) LSTM with 64 bits bias + LSTMBuffers buffers = + CreateLSTMBuffers(context, op_data.buffer_indices); + EvalLstm(op_data, kernel_content, + buffers); + break; + } + default: { + MicroPrintf("Filter type %s (%d) not supported.", + TfLiteTypeGetName(weight_type), weight_type); + return kTfLiteError; + } + } + break; + } + default: { + MicroPrintf("Input type %s (%d) not supported.", + TfLiteTypeGetName(activation_type), activation_type); return kTfLiteError; + } } return kTfLiteOk; } -//} // namespace unidirectional_sequence_lstm -} // namespace micro -} // namespace ops +} // namespace TFLMRegistration Register_UNIDIRECTIONAL_SEQUENCE_LSTM() { - return tflite::micro::RegisterOp(ops::micro::Init, ops::micro::Prepare, - ops::micro::Eval); + return tflite::micro::RegisterOp(UnidirectionalSequenceLstmInit, + UnidirectionalSequenceLstmPrepare, + UnidirectionalSequenceLstmEval); } } // namespace tflite diff --git a/tensorflow/lite/micro/kernels/xtensa/xtensa.h b/tensorflow/lite/micro/kernels/xtensa/xtensa.h index 47820d326b9..604736ddbd4 100644 --- a/tensorflow/lite/micro/kernels/xtensa/xtensa.h +++ b/tensorflow/lite/micro/kernels/xtensa/xtensa.h @@ -22,13 +22,13 @@ limitations under the License. #include "tensorflow/lite/micro/kernels/xtensa/hifimini/fixedpoint_utils.h" #endif // defined(HIFMINI) -#if defined(HIFI4) || defined(HIFI5) +#if defined(HIFI3) || defined(HIFI4) || defined(HIFI5) #include "include/nnlib/xa_nnlib_api.h" #include "include/nnlib/xa_nnlib_standards.h" #define ALIGNED_SIZE(x, bytes) (((x) + (bytes - 1)) & (~(bytes - 1))) #define ALIGN_PTR(x, bytes) ((((unsigned)(x)) + (bytes - 1)) & (~(bytes - 1))) -#endif // defined(HIFI4) || defined(HIFI5) +#endif // defined(HIFI3) || defined(HIFI4) || defined(HIFI5) #if defined(VISION_P6) #include "utils.h" diff --git a/tensorflow/lite/micro/kernels/xtensa/xtensa_conv.h b/tensorflow/lite/micro/kernels/xtensa/xtensa_conv.h index 355f022349f..f804a6d430c 100644 --- a/tensorflow/lite/micro/kernels/xtensa/xtensa_conv.h +++ b/tensorflow/lite/micro/kernels/xtensa/xtensa_conv.h @@ -25,9 +25,9 @@ namespace tflite { struct XtensaConvOpData { OpDataConv reference_op_data; -#if defined(HIFI4) || defined(HIFI5) +#if defined(HIFI3) || defined(HIFI4) || defined(HIFI5) int scratch_tensor_index; -#endif // defined(HIFI4) || defined(HIFI5) +#endif // defined(HIFI3) || defined(HIFI4) || defined(HIFI5) #if defined(VISION_P6) int8_t* reorder_coefficient_bias; // buffers used to keep reordered coeff and @@ -36,30 +36,30 @@ struct XtensaConvOpData { int8_t* per_channel_output_shift_int8; uint8_t* p_context; // persistent lib context for this instance saved here uint32_t context_size; + bool is_per_channel_quantized; #endif // VISION_P6 }; -#if defined(HIFI4) || defined(HIFI5) +#if defined(HIFI3) || defined(HIFI4) || defined(HIFI5) TfLiteStatus ConvPrepareHifi(TfLiteContext* context, TfLiteNode* node); -TfLiteStatus ConvEvalHifi(TfLiteContext* context, TfLiteNode* node, - const TfLiteConvParams& params, - const XtensaConvOpData& data, - const TfLiteEvalTensor* input, - const TfLiteEvalTensor* filter, - const TfLiteEvalTensor* bias, - TfLiteEvalTensor* output); -#endif // defined(HIFI4) || defined(HIFI5) - -#if defined(HIFI4) -TfLiteStatus ConvEvalHifi16(TfLiteContext* context, TfLiteNode* node, - const TfLiteConvParams& params, - const XtensaConvOpData& data, - const TfLiteEvalTensor* input, - const TfLiteEvalTensor* filter, - const TfLiteEvalTensor* bias, - TfLiteEvalTensor* output); -#endif // defined(HIFI4) +TfLiteStatus ConvEvalHifiInt8(TfLiteContext* context, TfLiteNode* node, + const TfLiteConvParams& params, + const XtensaConvOpData& data, + const TfLiteEvalTensor* input, + const TfLiteEvalTensor* filter, + const TfLiteEvalTensor* bias, + TfLiteEvalTensor* output); + +TfLiteStatus ConvEvalHifiInt16(TfLiteContext* context, TfLiteNode* node, + const TfLiteConvParams& params, + const XtensaConvOpData& data, + const TfLiteEvalTensor* input, + const TfLiteEvalTensor* filter, + const TfLiteEvalTensor* bias, + TfLiteEvalTensor* output); + +#endif // defined(HIFI3) || defined(HIFI4) || defined(HIFI5) #if defined(VISION_P6) @@ -79,6 +79,9 @@ TfLiteStatus ConvReferenceEvalInt8(TfLiteContext* context, TfLiteNode* node); TfLiteStatus ConvReferenceEvalInt16(TfLiteContext* context, TfLiteNode* node); +void* ConvInitXtensa(TfLiteContext* context, const char* buffer, size_t length); +TfLiteStatus ConvPrepareXtensa(TfLiteContext* context, TfLiteNode* node); + } // namespace tflite #endif // TENSORFLOW_LITE_MICRO_KERNELS_XTENSA_XTENSA_CONV_H_ diff --git a/tensorflow/lite/micro/kernels/xtensa/xtensa_depthwise_conv.h b/tensorflow/lite/micro/kernels/xtensa/xtensa_depthwise_conv.h index ca15719fae7..7d0d765c33a 100644 --- a/tensorflow/lite/micro/kernels/xtensa/xtensa_depthwise_conv.h +++ b/tensorflow/lite/micro/kernels/xtensa/xtensa_depthwise_conv.h @@ -25,9 +25,9 @@ namespace tflite { struct XtensaDepthwiseConvOpData { OpDataConv reference_op_data; -#if defined(HIFI4) || defined(HIFI5) +#if defined(HIFI3) || defined(HIFI4) || defined(HIFI5) int scratch_tensor_index; -#endif // defined(HIFI4) || defined(HIFI5) +#endif // defined(HIFI3) || defined(HIFI4) || defined(HIFI5) #if defined(VISION_P6) int8_t* reorder_coefficient_bias; // buffers used to keep reordered coeff and @@ -39,7 +39,7 @@ struct XtensaDepthwiseConvOpData { #endif // VISION_P6 }; -#if defined(HIFI4) || defined(HIFI5) +#if defined(HIFI3) || defined(HIFI4) || defined(HIFI5) TfLiteStatus DepthwiseConvPrepareHifi(TfLiteContext* context, TfLiteNode* node); TfLiteStatus DepthwiseConvEvalHifi(TfLiteContext* context, TfLiteNode* node, @@ -52,7 +52,7 @@ TfLiteStatus DepthwiseConvEvalHifi(TfLiteContext* context, TfLiteNode* node, TfLiteStatus DepthwiseConvReferenceEvalInt8(TfLiteContext* context, TfLiteNode* node); -#endif // defined(HIFI4) || defined(HIFI5) +#endif // defined(HIFI3) || defined(HIFI4) || defined(HIFI5) #if defined(VISION_P6) diff --git a/tensorflow/lite/micro/kernels/xtensa/xtensa_reshape.h b/tensorflow/lite/micro/kernels/xtensa/xtensa_reshape.h index 1988db7cbb3..e472ef11648 100644 --- a/tensorflow/lite/micro/kernels/xtensa/xtensa_reshape.h +++ b/tensorflow/lite/micro/kernels/xtensa/xtensa_reshape.h @@ -22,9 +22,6 @@ limitations under the License. namespace tflite { -constexpr int kReshapeInputTensor = 0; -constexpr int kReshapeOutputTensor = 0; - #if defined(VISION_P6) struct XtensaReshapeData { diff --git a/tensorflow/lite/micro/kernels/xtensa/xtensa_softmax.h b/tensorflow/lite/micro/kernels/xtensa/xtensa_softmax.h index 7d0d461d659..d7e6a14934f 100644 --- a/tensorflow/lite/micro/kernels/xtensa/xtensa_softmax.h +++ b/tensorflow/lite/micro/kernels/xtensa/xtensa_softmax.h @@ -22,12 +22,12 @@ limitations under the License. namespace tflite { -#if defined(HIFI4) || defined(HIFI5) +#if defined(HIFI3) || defined(HIFI4) || defined(HIFI5) struct XtensaSoftmaxOpData { SoftmaxParams params; int scratch_tensor_index; }; -#endif // defined(HIFI4) || defined(HIFI5) +#endif // defined(HIFI3) || defined(HIFI4) || defined(HIFI5) #if defined(VISION_P6) struct XtensaSoftmaxOpData { diff --git a/tensorflow/lite/micro/kernels/zeros_like.cc b/tensorflow/lite/micro/kernels/zeros_like.cc index 597e50e3913..eb1f9c6221f 100644 --- a/tensorflow/lite/micro/kernels/zeros_like.cc +++ b/tensorflow/lite/micro/kernels/zeros_like.cc @@ -25,7 +25,7 @@ namespace { constexpr int kInputTensor = 0; constexpr int kOutputTensor = 0; -TfLiteStatus Prepare(TfLiteContext* context, TfLiteNode* node) { +TfLiteStatus ZerosLikePrepare(TfLiteContext* context, TfLiteNode* node) { MicroContext* micro_context = GetMicroContext(context); TF_LITE_ENSURE_EQ(context, NumInputs(node), 1); @@ -50,7 +50,7 @@ void resetZeros(T* out, const int num_elements) { } } -TfLiteStatus Eval(TfLiteContext* context, TfLiteNode* node) { +TfLiteStatus ZerosLikeEval(TfLiteContext* context, TfLiteNode* node) { const TfLiteEvalTensor* input = tflite::micro::GetEvalInput(context, node, kInputTensor); TfLiteEvalTensor* output = @@ -82,7 +82,7 @@ TfLiteStatus Eval(TfLiteContext* context, TfLiteNode* node) { } // namespace TFLMRegistration Register_ZEROS_LIKE() { - return tflite::micro::RegisterOp(nullptr, Prepare, Eval); + return tflite::micro::RegisterOp(nullptr, ZerosLikePrepare, ZerosLikeEval); } } // namespace tflite diff --git a/tensorflow/lite/micro/memory_arena_threshold_test.cc b/tensorflow/lite/micro/memory_arena_threshold_test.cc index f6bb24fa258..3017f56b2bb 100644 --- a/tensorflow/lite/micro/memory_arena_threshold_test.cc +++ b/tensorflow/lite/micro/memory_arena_threshold_test.cc @@ -97,7 +97,7 @@ constexpr int kTestConvModelOnlyTotalSize = 9488; // Tail size contributed by the conv model excluding the // RecordingMicroAllocator's overhead // TODO(b/207157610): replace magic number that depends on OPs -constexpr int kTestConvModelOnlyTailSize = 1744; +constexpr int kTestConvModelOnlyTailSize = 1816; constexpr int kTestConvModelPersistentTfLiteTensorDataSize = 128; constexpr int kTestConvModelPersistentBufferDataSize = 728; #else @@ -108,7 +108,7 @@ constexpr int kTestConvModelOnlyTotalSize = 9760; // Tail size contributed by the conv model excluding the // RecordingMicroAllocator's overhead // TODO(b/207157610): replace magic number that depends on OPs -constexpr int kTestConvModelOnlyTailSize = 2016; +constexpr int kTestConvModelOnlyTailSize = 2088; constexpr int kTestConvModelPersistentTfLiteTensorDataSize = 224; constexpr int kTestConvModelPersistentBufferDataSize = 720; #endif diff --git a/tensorflow/lite/micro/memory_helpers.cc b/tensorflow/lite/micro/memory_helpers.cc index 685f04b22fa..94a6fe33081 100644 --- a/tensorflow/lite/micro/memory_helpers.cc +++ b/tensorflow/lite/micro/memory_helpers.cc @@ -50,6 +50,9 @@ TfLiteStatus TfLiteTypeSizeOf(TfLiteType type, size_t* size) { case kTfLiteFloat16: *size = sizeof(int16_t); break; + case kTfLiteBFloat16: + *size = sizeof(int16_t); + break; case kTfLiteFloat32: *size = sizeof(float); break; diff --git a/tensorflow/lite/micro/memory_helpers_test.cc b/tensorflow/lite/micro/memory_helpers_test.cc index e44c5866e40..9da2940c30d 100644 --- a/tensorflow/lite/micro/memory_helpers_test.cc +++ b/tensorflow/lite/micro/memory_helpers_test.cc @@ -180,8 +180,8 @@ TF_LITE_MICRO_TEST(TestTypeSizeOf) { tflite::TfLiteTypeSizeOf(kTfLiteComplex128, &size)); TF_LITE_MICRO_EXPECT_EQ(sizeof(double) * 2, size); - TF_LITE_MICRO_EXPECT_NE( - kTfLiteOk, tflite::TfLiteTypeSizeOf(static_cast(-1), &size)); + TF_LITE_MICRO_EXPECT_NE(kTfLiteOk, + tflite::TfLiteTypeSizeOf(kTfLiteNoType, &size)); } TF_LITE_MICRO_TEST(TestBytesRequiredForTensor) { diff --git a/tensorflow/lite/micro/memory_planner/BUILD b/tensorflow/lite/micro/memory_planner/BUILD index 0329e73feef..7111da4e1c2 100644 --- a/tensorflow/lite/micro/memory_planner/BUILD +++ b/tensorflow/lite/micro/memory_planner/BUILD @@ -52,7 +52,6 @@ cc_library( ":micro_memory_planner", "//tensorflow/lite/micro:micro_compatibility", "//tensorflow/lite/micro:micro_log", - "//tensorflow/lite/micro:micro_string", ], ) diff --git a/tensorflow/lite/micro/memory_planner/greedy_memory_planner.cc b/tensorflow/lite/micro/memory_planner/greedy_memory_planner.cc index 471a5b22935..a087b236cc9 100644 --- a/tensorflow/lite/micro/memory_planner/greedy_memory_planner.cc +++ b/tensorflow/lite/micro/memory_planner/greedy_memory_planner.cc @@ -1,4 +1,4 @@ -/* Copyright 2019 The TensorFlow Authors. All Rights Reserved. +/* Copyright 2023 The TensorFlow Authors. All Rights Reserved. Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. @@ -16,7 +16,6 @@ limitations under the License. #include "tensorflow/lite/micro/memory_planner/greedy_memory_planner.h" #include "tensorflow/lite/micro/micro_log.h" -#include "tensorflow/lite/micro/micro_string.h" namespace tflite { diff --git a/tensorflow/lite/micro/memory_planner/greedy_memory_planner.h b/tensorflow/lite/micro/memory_planner/greedy_memory_planner.h index ae3705d3310..b2cdb6173df 100644 --- a/tensorflow/lite/micro/memory_planner/greedy_memory_planner.h +++ b/tensorflow/lite/micro/memory_planner/greedy_memory_planner.h @@ -107,6 +107,11 @@ class GreedyMemoryPlanner : public MicroMemoryPlanner { return per_buffer_size; } + // Returns False because the GreedyMemoryPlanner doesn't preserves all tensors + // after invocation. Do to the fact that tensors that tensor data for tensors + // that aren't being used during a phase of invocation are overwritten. + bool preserves_all_tensors() const override { return false; } + private: // Whether a buffer is active in a given time range. bool DoesEntryOverlapInTime(const ListEntry* entry, const int first_time_used, diff --git a/tensorflow/lite/micro/memory_planner/linear_memory_planner.cc b/tensorflow/lite/micro/memory_planner/linear_memory_planner.cc index 5c6afb54582..00d707a7646 100644 --- a/tensorflow/lite/micro/memory_planner/linear_memory_planner.cc +++ b/tensorflow/lite/micro/memory_planner/linear_memory_planner.cc @@ -19,6 +19,9 @@ limitations under the License. namespace tflite { +// C++11 requires defining a constexpr static class member in a .cc file +constexpr int tflite::LinearMemoryPlanner::kMaxBufferCount; + LinearMemoryPlanner::LinearMemoryPlanner() : current_buffer_count_(0), next_free_offset_(0) {} LinearMemoryPlanner::~LinearMemoryPlanner() {} diff --git a/tensorflow/lite/micro/memory_planner/linear_memory_planner.h b/tensorflow/lite/micro/memory_planner/linear_memory_planner.h index d4938ddc7c7..9850569fded 100644 --- a/tensorflow/lite/micro/memory_planner/linear_memory_planner.h +++ b/tensorflow/lite/micro/memory_planner/linear_memory_planner.h @@ -35,6 +35,10 @@ class LinearMemoryPlanner : public MicroMemoryPlanner { int GetBufferCount() override; TfLiteStatus GetOffsetForBuffer(int buffer_index, int* offset) override; + // Returns True because the LinearMemoryPlanner preserves all tensors after + // invocation. + bool preserves_all_tensors() const override { return true; } + private: static constexpr int kMaxBufferCount = 1024; size_t buffer_offsets_[kMaxBufferCount]; diff --git a/tensorflow/lite/micro/memory_planner/micro_memory_planner.h b/tensorflow/lite/micro/memory_planner/micro_memory_planner.h index 0bfe693a3d2..035f467374f 100644 --- a/tensorflow/lite/micro/memory_planner/micro_memory_planner.h +++ b/tensorflow/lite/micro/memory_planner/micro_memory_planner.h @@ -81,6 +81,10 @@ class MicroMemoryPlanner { return kTfLiteOk; } + // Method will return True if the MicroMemoryPlanner preserves all tensors + // after invocation, and False if it doesn't. + virtual bool preserves_all_tensors() const = 0; + virtual void PrintMemoryPlan() { // Default does nothing. } diff --git a/tensorflow/lite/micro/memory_planner/non_persistent_buffer_planner_shim.h b/tensorflow/lite/micro/memory_planner/non_persistent_buffer_planner_shim.h index 8f9bb26a330..13a3fad8a79 100644 --- a/tensorflow/lite/micro/memory_planner/non_persistent_buffer_planner_shim.h +++ b/tensorflow/lite/micro/memory_planner/non_persistent_buffer_planner_shim.h @@ -115,6 +115,10 @@ class NonPersistentMemoryPlannerShim : public MicroMemoryPlanner { size_t GetMaximumMemorySize() override; int GetBufferCount() override; + // Returns False because the NonPersistentMemoryPlannerShim doesn't preserves + // all tensors after invocation. + bool preserves_all_tensors() const override { return false; } + private: const BufferPlan* buffer_plan_; // not owned, can't be null diff --git a/tensorflow/lite/micro/micro_allocator.cc b/tensorflow/lite/micro/micro_allocator.cc index ba7cb662197..930da754bb5 100644 --- a/tensorflow/lite/micro/micro_allocator.cc +++ b/tensorflow/lite/micro/micro_allocator.cc @@ -28,13 +28,13 @@ limitations under the License. #include "tensorflow/lite/micro/flatbuffer_utils.h" #include "tensorflow/lite/micro/memory_helpers.h" #include "tensorflow/lite/micro/memory_planner/greedy_memory_planner.h" +#include "tensorflow/lite/micro/memory_planner/linear_memory_planner.h" #include "tensorflow/lite/micro/memory_planner/micro_memory_planner.h" #include "tensorflow/lite/micro/micro_allocation_info.h" #include "tensorflow/lite/micro/micro_arena_constants.h" #include "tensorflow/lite/micro/micro_log.h" #include "tensorflow/lite/micro/tflite_bridge/flatbuffer_conversions_bridge.h" #include "tensorflow/lite/schema/schema_generated.h" -#include "tensorflow/lite/schema/schema_utils.h" namespace tflite { @@ -71,6 +71,29 @@ class MicroBuiltinDataAllocator : public TfLiteBridgeBuiltinDataAllocator { IPersistentBufferAllocator* persistent_allocator_; }; +MicroMemoryPlanner* CreateMemoryPlanner( + MemoryPlannerType memory_planner_type, + IPersistentBufferAllocator* memory_allocator) { + MicroMemoryPlanner* memory_planner = nullptr; + uint8_t* memory_planner_buffer = nullptr; + + switch (memory_planner_type) { + case MemoryPlannerType::kLinear: { + memory_planner_buffer = memory_allocator->AllocatePersistentBuffer( + sizeof(LinearMemoryPlanner), alignof(LinearMemoryPlanner)); + memory_planner = new (memory_planner_buffer) LinearMemoryPlanner(); + break; + } + case MemoryPlannerType::kGreedy: { + memory_planner_buffer = memory_allocator->AllocatePersistentBuffer( + sizeof(GreedyMemoryPlanner), alignof(GreedyMemoryPlanner)); + memory_planner = new (memory_planner_buffer) GreedyMemoryPlanner(); + break; + } + } + return memory_planner; +} + TfLiteStatus CreatePlan(MicroMemoryPlanner* planner, const AllocationInfo* allocation_info, size_t allocation_info_size) { @@ -374,8 +397,8 @@ MicroAllocator* MicroAllocator::Create(uint8_t* tensor_arena, size_t arena_size, return Create(memory_allocator, memory_planner); } -MicroAllocator* MicroAllocator::Create(uint8_t* tensor_arena, - size_t arena_size) { +MicroAllocator* MicroAllocator::Create(uint8_t* tensor_arena, size_t arena_size, + MemoryPlannerType memory_planner_type) { uint8_t* aligned_arena = AlignPointerUp(tensor_arena, MicroArenaBufferAlignment()); size_t aligned_arena_size = tensor_arena + arena_size - aligned_arena; @@ -384,10 +407,8 @@ MicroAllocator* MicroAllocator::Create(uint8_t* tensor_arena, // By default create GreedyMemoryPlanner. // If a different MemoryPlanner is needed, use the other api. - uint8_t* memory_planner_buffer = memory_allocator->AllocatePersistentBuffer( - sizeof(GreedyMemoryPlanner), alignof(GreedyMemoryPlanner)); - GreedyMemoryPlanner* memory_planner = - new (memory_planner_buffer) GreedyMemoryPlanner(); + MicroMemoryPlanner* memory_planner = + CreateMemoryPlanner(memory_planner_type, memory_allocator); return Create(memory_allocator, memory_planner); } @@ -408,7 +429,8 @@ MicroAllocator* MicroAllocator::Create( MicroAllocator* MicroAllocator::Create(uint8_t* persistent_tensor_arena, size_t persistent_arena_size, uint8_t* non_persistent_tensor_arena, - size_t non_persistent_arena_size) { + size_t non_persistent_arena_size, + MemoryPlannerType memory_planner_type) { TFLITE_DCHECK(persistent_tensor_arena != nullptr); TFLITE_DCHECK(non_persistent_tensor_arena != nullptr); TFLITE_DCHECK(persistent_tensor_arena != non_persistent_tensor_arena); @@ -421,11 +443,22 @@ MicroAllocator* MicroAllocator::Create(uint8_t* persistent_tensor_arena, non_persistent_arena_size, persistent_buffer_allocator); - uint8_t* memory_planner_buffer = - persistent_buffer_allocator->AllocatePersistentBuffer( - sizeof(GreedyMemoryPlanner), alignof(GreedyMemoryPlanner)); - GreedyMemoryPlanner* memory_planner = - new (memory_planner_buffer) GreedyMemoryPlanner(); + // TODO(b/297821738): this should be changed to CreateMemoryPlanner if + // possible once it's figured out why it breaks the HifiMini Build + uint8_t* memory_planner_buffer = nullptr; + MicroMemoryPlanner* memory_planner = nullptr; + + if (memory_planner_type == MemoryPlannerType::kGreedy) { + memory_planner_buffer = + persistent_buffer_allocator->AllocatePersistentBuffer( + sizeof(GreedyMemoryPlanner), alignof(GreedyMemoryPlanner)); + memory_planner = new (memory_planner_buffer) GreedyMemoryPlanner(); + } else if (memory_planner_type == MemoryPlannerType::kLinear) { + memory_planner_buffer = + persistent_buffer_allocator->AllocatePersistentBuffer( + sizeof(LinearMemoryPlanner), alignof(LinearMemoryPlanner)); + memory_planner = new (memory_planner_buffer) LinearMemoryPlanner(); + } uint8_t* micro_allocator_buffer = persistent_buffer_allocator->AllocatePersistentBuffer( diff --git a/tensorflow/lite/micro/micro_allocator.h b/tensorflow/lite/micro/micro_allocator.h index 3532577b957..4eff167d67f 100644 --- a/tensorflow/lite/micro/micro_allocator.h +++ b/tensorflow/lite/micro/micro_allocator.h @@ -66,6 +66,13 @@ struct ScratchBufferRequest { } // namespace internal +// Enum used to keep track of which MemoryPlanner is being used for +// MicroAllocater::Create(); +enum class MemoryPlannerType { + kGreedy, + kLinear, +}; + struct NodeAndRegistration { TfLiteNode node; const TFLMRegistration* registration; @@ -117,7 +124,9 @@ class MicroAllocator { // Note: Please use alignas(16) to make sure tensor_arena is 16 // bytes aligned, otherwise some head room will be wasted. // TODO(b/157615197): Cleanup constructor + factory usage. - static MicroAllocator* Create(uint8_t* tensor_arena, size_t arena_size); + static MicroAllocator* Create( + uint8_t* tensor_arena, size_t arena_size, + MemoryPlannerType memory_planner_type = MemoryPlannerType::kGreedy); // Creates a MicroAllocator instance from a given tensor arena and a given // MemoryPlanner. This arena will be managed by the created instance. Note: @@ -137,14 +146,20 @@ class MicroAllocator { // SingleArenaBufferAllocator instance and the MemoryPlanner. This allocator // instance will use the SingleArenaBufferAllocator instance to manage // allocations internally. - static MicroAllocator* Create(uint8_t* persistent_tensor_arena, - size_t persistent_arena_size, - uint8_t* non_persistent_tensor_arena, - size_t non_persistent_arena_size); + static MicroAllocator* Create( + uint8_t* persistent_tensor_arena, size_t persistent_arena_size, + uint8_t* non_persistent_tensor_arena, size_t non_persistent_arena_size, + MemoryPlannerType memory_planner_type = MemoryPlannerType::kGreedy); // Returns the fixed amount of memory overhead of MicroAllocator. static size_t GetDefaultTailUsage(bool is_memory_planner_given); + // Returns True if the MicroAllocator uses a LinearMemoryPlanner(is compatible + // with the PerserveAllTensors flag / feature ) and False otherwise. + bool preserves_all_tensor() const { + return memory_planner_->preserves_all_tensors(); + }; + // Allocates internal resources required for model inference for each subgraph // from the arena. // diff --git a/tensorflow/lite/micro/micro_allocator_test.cc b/tensorflow/lite/micro/micro_allocator_test.cc index 0c4878d055a..9e19e271daf 100644 --- a/tensorflow/lite/micro/micro_allocator_test.cc +++ b/tensorflow/lite/micro/micro_allocator_test.cc @@ -1,4 +1,4 @@ -/* Copyright 2020 The TensorFlow Authors. All Rights Reserved. +/* Copyright 2023 The TensorFlow Authors. All Rights Reserved. Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. @@ -23,6 +23,7 @@ limitations under the License. #include "tensorflow/lite/micro/memory_helpers.h" #include "tensorflow/lite/micro/memory_planner/greedy_memory_planner.h" #include "tensorflow/lite/micro/memory_planner/non_persistent_buffer_planner_shim.h" +#include "tensorflow/lite/micro/micro_allocator.h" #include "tensorflow/lite/micro/micro_arena_constants.h" #include "tensorflow/lite/micro/test_helpers.h" #include "tensorflow/lite/micro/testing/micro_test.h" @@ -913,6 +914,30 @@ TF_LITE_MICRO_TEST(TestAllocatePersistentTfLiteTensor) { TF_LITE_MICRO_EXPECT_GT(tensor1, tensor2); } +TF_LITE_MICRO_TEST(TestFailAllocatePersistentTfLiteTensor) { + const tflite::Model* model = tflite::testing::GetSimpleMockModel(); + // MicroAllocator::Create always allocates GreedyMemoryPlanner, + // SingleArenaBufferAllocator and MicroAllocator objects. + // Memory available should be <= the sum of the alignments which + // is < sizeof(TfLiteTensor). + constexpr size_t kArenaSize = sizeof(tflite::GreedyMemoryPlanner) + + alignof(tflite::GreedyMemoryPlanner) + + sizeof(tflite::MicroAllocator) + + alignof(tflite::MicroAllocator) + + sizeof(tflite::SingleArenaBufferAllocator) + + alignof(tflite::SingleArenaBufferAllocator) + + tflite::MicroArenaBufferAlignment(); + uint8_t arena[kArenaSize]; + tflite::MicroAllocator* allocator = + tflite::MicroAllocator::Create(arena, sizeof(arena)); + TF_LITE_MICRO_EXPECT(allocator != nullptr); + + TfLiteTensor* tensor1 = allocator->AllocatePersistentTfLiteTensor( + model, /*subgraph_allocations=*/nullptr, /*tensor_index=*/1, + /*subgraph_index=*/0); + TF_LITE_MICRO_EXPECT(tensor1 == nullptr); +} + TF_LITE_MICRO_TEST(TestAllocateSingleTempTfLiteTensor) { const tflite::Model* model = tflite::testing::GetSimpleMockModel(); constexpr size_t arena_size = 1024; diff --git a/tensorflow/lite/micro/micro_common.h b/tensorflow/lite/micro/micro_common.h index dc0bc0843a6..9ab427f5add 100644 --- a/tensorflow/lite/micro/micro_common.h +++ b/tensorflow/lite/micro/micro_common.h @@ -30,4 +30,9 @@ struct TFLMRegistration { const char* custom_name; }; +struct TFLMInferenceRegistration { + TfLiteStatus (*invoke)(TfLiteContext* context, TfLiteNode* node); + void (*reset)(TfLiteContext* context, void* buffer); +}; + #endif // THIRD_PARTY_TFLITE_MICRO_TENSORFLOW_LITE_MICRO_MICRO_COMMON_H_ diff --git a/tensorflow/lite/micro/micro_context.cc b/tensorflow/lite/micro/micro_context.cc index b06252acb5f..295b3c34463 100644 --- a/tensorflow/lite/micro/micro_context.cc +++ b/tensorflow/lite/micro/micro_context.cc @@ -1,4 +1,4 @@ -/* Copyright 2021 The TensorFlow Authors. All Rights Reserved. +/* Copyright 2023 The TensorFlow Authors. All Rights Reserved. Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. @@ -17,48 +17,14 @@ limitations under the License. #include #include -#include -#include "tensorflow/lite/kernels/internal/compatibility.h" +#include "tensorflow/lite/micro/micro_common.h" #include "tensorflow/lite/micro/micro_log.h" namespace tflite { -MicroContext::MicroContext(MicroAllocator* allocator, const Model* model, - MicroGraph* graph) - : allocator_(*allocator), - graph_(*graph), - model_(model), - state_(InterpreterState::kInit) {} +namespace { -MicroContext::~MicroContext() {} - -void* MicroContext::AllocatePersistentBuffer(size_t bytes) { - TFLITE_DCHECK(state_ == InterpreterState::kPrepare || - state_ == InterpreterState::kInit); - return allocator_.AllocatePersistentBuffer(bytes); -} - -TfLiteStatus MicroContext::RequestScratchBufferInArena(size_t bytes, - int* buffer_idx) { - TFLITE_DCHECK(state_ == InterpreterState::kPrepare); - return allocator_.RequestScratchBufferInArena( - bytes, graph_.GetCurrentSubgraphIndex(), buffer_idx); -} - -void* MicroContext::GetScratchBuffer(int buffer_idx) { - TFLITE_DCHECK(state_ == InterpreterState::kInvoke); - ScratchBufferHandle* handle = scratch_buffer_handles_ + buffer_idx; - return handle->data; -} - -TfLiteTensor* MicroContext::AllocateTempTfLiteTensor(int tensor_idx) { - return allocator_.AllocateTempTfLiteTensor(model_, graph_.GetAllocations(), - tensor_idx, - graph_.GetCurrentSubgraphIndex()); -} - -int MicroContext::GetTensorIndex(int index, int max_size, - const int* tensor_indices) { +int GetTensorIndex(int index, int max_size, const int* tensor_indices) { if (index >= 0 && index < max_size) { const int tensor_index = tensor_indices[index]; if (tensor_index != kTfLiteOptionalTensor) { @@ -68,6 +34,8 @@ int MicroContext::GetTensorIndex(int index, int max_size, return -1; } +} // namespace + TfLiteTensor* MicroContext::AllocateTempInputTensor(const TfLiteNode* node, int index) { const int tensor_index = @@ -98,60 +66,12 @@ TfLiteTensor* MicroContext::AllocateTempIntermediateTensor( return AllocateTempTfLiteTensor(tensor_index); } -void MicroContext::DeallocateTempTfLiteTensor(TfLiteTensor* tensor) { - return allocator_.DeallocateTempTfLiteTensor(tensor); -} - -uint8_t* MicroContext::AllocateTempBuffer(size_t size, size_t alignment) { - TFLITE_DCHECK(state_ == InterpreterState::kPrepare); - return allocator_.AllocateTempBuffer(size, alignment); -} - -void MicroContext::DeallocateTempBuffer(uint8_t* buffer) { - TFLITE_DCHECK(state_ == InterpreterState::kPrepare); - allocator_.DeallocateTempBuffer(buffer); -} - -TfLiteEvalTensor* MicroContext::GetEvalTensor(int tensor_idx) { - return &graph_.GetAllocations()[graph_.GetCurrentSubgraphIndex()] - .tensors[tensor_idx]; -} - -void MicroContext::SetScratchBufferHandles( - ScratchBufferHandle* scratch_buffer_handles) { - scratch_buffer_handles_ = scratch_buffer_handles; -} - -TfLiteStatus MicroContext::set_external_context( - void* external_context_payload) { - TFLITE_DCHECK(state_ == InterpreterState::kPrepare || - state_ == InterpreterState::kInvoke); - if (external_context_payload == nullptr || - external_context_payload_ != nullptr) { - MicroPrintf( - "Attempting to set external context to %x but it was %x already", - external_context_payload, external_context_payload_); - return kTfLiteError; - } - - external_context_payload_ = external_context_payload; - return kTfLiteOk; -} - void MicroContextReportOpError(struct TfLiteContext* context, const char* format, ...) { va_list args; va_start(args, format); - Log(format, args); + VMicroPrintf(format, args); va_end(args); } -void MicroContext::SetInterpreterState(MicroContext::InterpreterState state) { - state_ = state; -} - -MicroContext::InterpreterState MicroContext::GetInterpreterState() const { - return state_; -} - } // namespace tflite diff --git a/tensorflow/lite/micro/micro_context.h b/tensorflow/lite/micro/micro_context.h index 63b4b7d5938..2dd3233a159 100644 --- a/tensorflow/lite/micro/micro_context.h +++ b/tensorflow/lite/micro/micro_context.h @@ -17,10 +17,12 @@ limitations under the License. #define TENSORFLOW_LITE_MICRO_MICRO_CONTEXT_H_ #include "tensorflow/lite/c/common.h" -#include "tensorflow/lite/micro/micro_allocator.h" #include "tensorflow/lite/micro/micro_graph.h" namespace tflite { +// TODO(b/149795762): kTfLiteAbort cannot be part of the tflite TfLiteStatus. +const TfLiteStatus kTfLiteAbort = static_cast(15); + // MicroContext is eventually going to become the API between TFLM and the // kernels, replacing all the functions in TfLiteContext. The end state is code // kernels to have code like: @@ -29,117 +31,71 @@ namespace tflite { // micro_context-> class MicroContext { public: - // Enum that allows MicroContext to keep track of the stages different memory - // planning APIs are available to kernels. - enum class InterpreterState { - kInit, - kPrepare, - kMemoryPlanning, - kInvoke, - }; - - // Does not take any ownership, and all pointers must refer to valid objects - // that outlive the one constructed. - explicit MicroContext(MicroAllocator* allocator, const Model* model, - MicroGraph* graph); - virtual ~MicroContext(); + virtual ~MicroContext() = default; // Allocate persistent buffer which has the same life time as the interpreter. // Returns nullptr on failure. // The memory is allocated from the tail. // This method is only available in Init or Prepare stage. - // Virtual so that it can be faked for kernel tests. - virtual void* AllocatePersistentBuffer(size_t bytes); + virtual void* AllocatePersistentBuffer(size_t bytes) = 0; // Request a scratch buffer in the arena through static memory planning. // This method is only available in Prepare stage and the buffer is allocated // by the interpreter between Prepare and Eval stage. In Eval stage, // GetScratchBuffer API can be used to fetch the address. - // Virtual so that it can be faked for kernel tests. virtual TfLiteStatus RequestScratchBufferInArena(size_t bytes, - int* buffer_idx); + int* buffer_idx) = 0; // Get the scratch buffer pointer. // This method is only available in Eval stage. - // Virtual so that it can be faked for kernel tests. - virtual void* GetScratchBuffer(int buffer_idx); + virtual void* GetScratchBuffer(int buffer_idx) = 0; // Returns a temporary TfLiteTensor struct for a given index. - // Virtual so that it can be faked for kernel tests. - virtual TfLiteTensor* AllocateTempTfLiteTensor(int tensor_idx); + virtual TfLiteTensor* AllocateTempTfLiteTensor(int tensor_idx) = 0; // Returns a temporary TfLiteTensor struct for the specified input tensor of a // given mode. This is the recommended API over the deprecated // GetInput/GetInputSafe to get a temp input tensor. The returned tensor shall // be freed via calling DeallocateTempTfLiteTensor. - virtual TfLiteTensor* AllocateTempInputTensor(const TfLiteNode* node, - int index); + TfLiteTensor* AllocateTempInputTensor(const TfLiteNode* node, int index); // Returns a temporary TfLiteTensor struct for the specified output tensor of // a given mode. This is the recommended API over the deprecated // GetOutput/GetOutputSafe to get a temp output tensor. The returned tensor // shall be freed via calling DeallocateTempTfLiteTensor. - virtual TfLiteTensor* AllocateTempOutputTensor(const TfLiteNode* node, - int index); + TfLiteTensor* AllocateTempOutputTensor(const TfLiteNode* node, int index); // Returns a temporary TfLiteTensor struct for the specified intermediate // tensor of a given mode. This is the recommended API over the deprecated // GetIntermediates/GetIntermediatesSafe to get a temp intermediate tensor. // The returned tensor shall be freed via calling DeallocateTempTfLiteTensor. - virtual TfLiteTensor* AllocateTempIntermediateTensor(const TfLiteNode* node, - int index); + TfLiteTensor* AllocateTempIntermediateTensor(const TfLiteNode* node, + int index); // Deallocates a temp TfLiteTensor. - // Virtual so that it can be faked for kernel tests. - virtual void DeallocateTempTfLiteTensor(TfLiteTensor* tensor); + virtual void DeallocateTempTfLiteTensor(TfLiteTensor* tensor) = 0; // Returns a pointer to a temporary buffer (from the arena). // This API is only valid from the kernel's Prepare function and // the buffer's lifetime is also that of the Prepare function. - // Virtual so that it can be faked for kernel tests. - virtual uint8_t* AllocateTempBuffer(size_t size, size_t alignment); + virtual uint8_t* AllocateTempBuffer(size_t size, size_t alignment) = 0; // Signals that the temporary buffer is no longer needed. - // Virtual so that it can be faked for kernel tests. - virtual void DeallocateTempBuffer(uint8_t* buffer); + virtual void DeallocateTempBuffer(uint8_t* buffer) = 0; // Returns a TfLiteEvalTensor struct for a given index. - // Virtual so that it can be faked for kernel tests. - virtual TfLiteEvalTensor* GetEvalTensor(int tensor_idx); - - // Sets the State of MemoryPlanning MicroContext - void SetInterpreterState(MicroContext::InterpreterState state); - - // Sets the State of MemoryPlanning MicroContext - MicroContext::InterpreterState GetInterpreterState() const; + virtual TfLiteEvalTensor* GetEvalTensor(int tensor_idx) = 0; // Does not take ownership of the pointer and the pointer must refer to valid // an object that outlive this class instance. // This can only be called once to set one external context. - TfLiteStatus set_external_context(void* external_context_payload); - - void* external_context() { return external_context_payload_; } + virtual TfLiteStatus set_external_context(void* external_context_payload) = 0; - MicroGraph& graph() { return graph_; } + virtual void* external_context() = 0; - // Sets the pointer to a list of ScratchBufferHandle instances. - // Not API between TFLM and kernels. Primarily used by the framework for - // housekeeping in MicroContext. - void SetScratchBufferHandles(ScratchBufferHandle* scratch_buffer_handles); + virtual MicroGraph& graph() = 0; private: - // Return the tensor index as tensor_indices[index]. tensor_indices is of - // max_size. Return -1 if index is not in the valid range of tensor_indices. - int GetTensorIndex(int index, int max_size, const int* tensor_indices); - - MicroAllocator& allocator_; - MicroGraph& graph_; - const Model* model_; - InterpreterState state_; - - ScratchBufferHandle* scratch_buffer_handles_ = nullptr; - void* external_context_payload_ = nullptr; - TF_LITE_REMOVE_VIRTUAL_DELETE }; diff --git a/tensorflow/lite/micro/micro_graph.h b/tensorflow/lite/micro/micro_graph.h index ca8c40e264a..79b36496f44 100644 --- a/tensorflow/lite/micro/micro_graph.h +++ b/tensorflow/lite/micro/micro_graph.h @@ -16,90 +16,44 @@ limitations under the License. #ifndef TENSORFLOW_LITE_MICRO_MICRO_GRAPH_H_ #define TENSORFLOW_LITE_MICRO_MICRO_GRAPH_H_ -#include "tensorflow/lite/micro/micro_allocator.h" +#include "tensorflow/lite/micro/compatibility.h" #include "tensorflow/lite/micro/micro_common.h" #include "tensorflow/lite/micro/micro_resource_variable.h" -#include "tensorflow/lite/schema/schema_generated.h" namespace tflite { -// Abstracts the details of interacting with the tflite::Model. +// Abstracts the details of interacting with the graph from the kernels // -// Provides methods to access, initialize, prepare, invoke and free any -// subgraph in the tflite::Graph. +// Provides methods to invoke any subgraph in the tflite::Graph. class MicroGraph { public: - // The lifetime of the context, model, allocator and resource_variables must - // be at least as long as that of the graph object, since the this class may - // need to access them at any time. If resource_variables is a nullptr, - // GetResourceVariables will return a nullptr. - MicroGraph(TfLiteContext* context, const Model* model, - MicroAllocator* allocator, - MicroResourceVariables* resource_variables); - virtual ~MicroGraph(); - - // Sets up builtin data and calls TFLMRegistration->Init for every - // operator in every subgraph in the model. - virtual TfLiteStatus InitSubgraphs(); - - // Calls TFLMRegistration->Prepare for every operator in every subgraph - // in the model. - virtual TfLiteStatus PrepareSubgraphs(); - - // Calls TFLMRegistration->Reset for every operator in every subgraph in - // the model. - virtual TfLiteStatus ResetSubgraphs(); - - // Calls TFLMRegistration->Free for every operator in every subgraph in - // the model. - virtual TfLiteStatus FreeSubgraphs(); + virtual ~MicroGraph() = default; // Calls TFLMRegistration->Invoke for every operator in a single subgraph // in the model. - virtual TfLiteStatus InvokeSubgraph(int subgraph_idx); - - // Zeros out all variable tensors in all subgraphs in the model. - virtual TfLiteStatus ResetVariableTensors(); + virtual TfLiteStatus InvokeSubgraph(int subgraph_idx) = 0; // Number of tensor inputs to a specified subgraph in the model. - virtual size_t NumSubgraphInputs(int subgraph_idx); + virtual size_t NumSubgraphInputs(int subgraph_idx) = 0; // Get the specified input tensor of a specified subgraph in the model. - virtual TfLiteEvalTensor* GetSubgraphInput(int subgraph_idx, int input_idx); + virtual TfLiteEvalTensor* GetSubgraphInput(int subgraph_idx, + int input_idx) = 0; // Number of tensor outputs from a specified subgraph in the model. - virtual size_t NumSubgraphOutputs(int subgraph_idx); + virtual size_t NumSubgraphOutputs(int subgraph_idx) = 0; // Get the specified output tensor of a specified subgraph in the model. - virtual TfLiteEvalTensor* GetSubgraphOutput(int subgraph_idx, int output_idx); + virtual TfLiteEvalTensor* GetSubgraphOutput(int subgraph_idx, + int output_idx) = 0; // Number of subgraphs in the model. - virtual int NumSubgraphs(); - - // Hook to pass in subgraph allocations tracked within the interpreter, - // allowing MicroGraph to init / prepare / invoke subgraphs in the model. - void SetSubgraphAllocations(SubgraphAllocations* subgraph_allocations); - - // Get the current subgraph index. Within an on operator, this is guaranteed - // to be the subgraph of that operator. - int GetCurrentSubgraphIndex() { return current_subgraph_index_; } - - // Gets the list of alloctions for each subgraph. This is the source of truth - // for all per-subgraph allocation data. - SubgraphAllocations* GetAllocations() { return subgraph_allocations_; } + virtual int NumSubgraphs() = 0; // Get the resource variables for this TFLM graph. - MicroResourceVariables* GetResourceVariables() { return resource_variables_; } + virtual MicroResourceVariables* GetResourceVariables() = 0; private: - TfLiteContext* context_; - const Model* model_; - MicroAllocator* allocator_; - SubgraphAllocations* subgraph_allocations_ = nullptr; - int current_subgraph_index_; - MicroResourceVariables* resource_variables_; - const flatbuffers::Vector>* subgraphs_; - TF_LITE_REMOVE_VIRTUAL_DELETE }; diff --git a/tensorflow/lite/micro/micro_interpreter.cc b/tensorflow/lite/micro/micro_interpreter.cc index c6917b4b922..7f4565e638a 100644 --- a/tensorflow/lite/micro/micro_interpreter.cc +++ b/tensorflow/lite/micro/micro_interpreter.cc @@ -24,7 +24,7 @@ limitations under the License. #include "tensorflow/lite/micro/flatbuffer_utils.h" #include "tensorflow/lite/micro/memory_helpers.h" #include "tensorflow/lite/micro/micro_allocator.h" -#include "tensorflow/lite/micro/micro_context.h" +#include "tensorflow/lite/micro/micro_interpreter_context.h" #include "tensorflow/lite/micro/micro_log.h" #include "tensorflow/lite/micro/micro_op_resolver.h" #include "tensorflow/lite/micro/micro_profiler_interface.h" @@ -33,17 +33,28 @@ limitations under the License. #include "tensorflow/lite/schema/schema_utils.h" namespace tflite { +namespace { +MemoryPlannerType FlagToMemoryPlannerType(bool preserve_all_tensors) { + if (preserve_all_tensors) { + return MemoryPlannerType::kLinear; + } else { + return MemoryPlannerType::kGreedy; + } +} +} // namespace MicroInterpreter::MicroInterpreter(const Model* model, const MicroOpResolver& op_resolver, uint8_t* tensor_arena, size_t tensor_arena_size, MicroResourceVariables* resource_variables, - MicroProfilerInterface* profiler) + MicroProfilerInterface* profiler, + bool preserve_all_tensors) : model_(model), op_resolver_(op_resolver), - allocator_(*MicroAllocator::Create(tensor_arena, tensor_arena_size)), - + allocator_(*MicroAllocator::Create( + tensor_arena, tensor_arena_size, + FlagToMemoryPlannerType(preserve_all_tensors))), graph_(&context_, model, &allocator_, resource_variables), tensors_allocated_(false), initialization_status_(kTfLiteError), @@ -77,7 +88,8 @@ MicroInterpreter::~MicroInterpreter() { } void MicroInterpreter::Init(MicroProfilerInterface* profiler) { - micro_context_.SetInterpreterState(MicroContext::InterpreterState::kInit); + micro_context_.SetInterpreterState( + MicroInterpreterContext::InterpreterState::kInit); context_.impl_ = static_cast(µ_context_); context_.ReportError = MicroContextReportOpError; context_.GetTensor = MicroContextGetTensor; @@ -198,15 +210,17 @@ TfLiteStatus MicroInterpreter::AllocateTensors() { TF_LITE_ENSURE_STATUS(PrepareNodeAndRegistrationDataFromFlatbuffer()); - micro_context_.SetInterpreterState(MicroContext::InterpreterState::kInit); + micro_context_.SetInterpreterState( + MicroInterpreterContext::InterpreterState::kInit); TF_LITE_ENSURE_STATUS(graph_.InitSubgraphs()); - micro_context_.SetInterpreterState(MicroContext::InterpreterState::kPrepare); + micro_context_.SetInterpreterState( + MicroInterpreterContext::InterpreterState::kPrepare); TF_LITE_ENSURE_STATUS(graph_.PrepareSubgraphs()); micro_context_.SetInterpreterState( - MicroContext::InterpreterState::kMemoryPlanning); + MicroInterpreterContext::InterpreterState::kMemoryPlanning); TF_LITE_ENSURE_OK(&context_, allocator_.FinishModelAllocation( model_, graph_.GetAllocations(), @@ -261,7 +275,8 @@ TfLiteStatus MicroInterpreter::AllocateTensors() { TF_LITE_ENSURE_STATUS(Reset()); tensors_allocated_ = true; - micro_context_.SetInterpreterState(MicroContext::InterpreterState::kInvoke); + micro_context_.SetInterpreterState( + MicroInterpreterContext::InterpreterState::kInvoke); return kTfLiteOk; } @@ -305,6 +320,15 @@ TfLiteStatus MicroInterpreter::Reset() { return graph_.ResetVariableTensors(); } +TfLiteEvalTensor* MicroInterpreter::GetTensor(int tensor_index, + int subgraph_index) { + if (!allocator_.preserves_all_tensor()) { + MicroPrintf("GetTensor requires all tensors to be preserved"); + return nullptr; + } + return &graph_.GetAllocations()[subgraph_index].tensors[tensor_index]; +} + TfLiteStatus MicroInterpreter::SetMicroExternalContext( void* external_context_payload) { return micro_context_.set_external_context(external_context_payload); diff --git a/tensorflow/lite/micro/micro_interpreter.h b/tensorflow/lite/micro/micro_interpreter.h index a77b0e0b4e8..1c419962239 100644 --- a/tensorflow/lite/micro/micro_interpreter.h +++ b/tensorflow/lite/micro/micro_interpreter.h @@ -24,8 +24,8 @@ limitations under the License. #include "tensorflow/lite/core/api/error_reporter.h" #include "tensorflow/lite/kernels/internal/tensor_ctypes.h" #include "tensorflow/lite/micro/micro_allocator.h" -#include "tensorflow/lite/micro/micro_context.h" -#include "tensorflow/lite/micro/micro_graph.h" +#include "tensorflow/lite/micro/micro_interpreter_context.h" +#include "tensorflow/lite/micro/micro_interpreter_graph.h" #include "tensorflow/lite/micro/micro_op_resolver.h" #include "tensorflow/lite/micro/micro_profiler_interface.h" #include "tensorflow/lite/portable_type_to_tflitetype.h" @@ -50,7 +50,8 @@ class MicroInterpreter { MicroInterpreter(const Model* model, const MicroOpResolver& op_resolver, uint8_t* tensor_arena, size_t tensor_arena_size, MicroResourceVariables* resource_variables = nullptr, - MicroProfilerInterface* profiler = nullptr); + MicroProfilerInterface* profiler = nullptr, + bool preserve_all_tensors = false); // Create an interpreter instance using an existing MicroAllocator instance. // This constructor should be used when creating an allocator that needs to @@ -115,6 +116,9 @@ class MicroInterpreter { return nullptr; } + // Returns a pointer to the tensor for the corresponding tensor_index + TfLiteEvalTensor* GetTensor(int tensor_index, int subgraph_index = 0); + // Reset the state to be what you would expect when the interpreter is first // created. i.e. after Init and Prepare is called for the very first time. TfLiteStatus Reset(); @@ -135,6 +139,13 @@ class MicroInterpreter { // arena_used_bytes() + 16. size_t arena_used_bytes() const { return allocator_.used_bytes(); } + // Returns True if all Tensors are being preserves + // TODO(b/297106074) : revisit making C++ example or test for + // preserve_all_tesnors + bool preserve_all_tensors() const { + return allocator_.preserves_all_tensor(); + } + protected: const MicroAllocator& allocator() const { return allocator_; } const TfLiteContext& context() const { return context_; } @@ -151,7 +162,7 @@ class MicroInterpreter { const MicroOpResolver& op_resolver_; TfLiteContext context_ = {}; MicroAllocator& allocator_; - MicroGraph graph_; + MicroInterpreterGraph graph_; bool tensors_allocated_; TfLiteStatus initialization_status_; @@ -163,7 +174,7 @@ class MicroInterpreter { TfLiteTensor** input_tensors_; TfLiteTensor** output_tensors_; - MicroContext micro_context_; + MicroInterpreterContext micro_context_; }; } // namespace tflite diff --git a/tensorflow/lite/micro/micro_interpreter_context.cc b/tensorflow/lite/micro/micro_interpreter_context.cc new file mode 100644 index 00000000000..098df15d522 --- /dev/null +++ b/tensorflow/lite/micro/micro_interpreter_context.cc @@ -0,0 +1,109 @@ +/* Copyright 2023 The TensorFlow Authors. All Rights Reserved. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +==============================================================================*/ + +#include "tensorflow/lite/micro/micro_interpreter_context.h" + +#include + +#include "tensorflow/lite/kernels/internal/compatibility.h" + +namespace tflite { +MicroInterpreterContext::MicroInterpreterContext(MicroAllocator* allocator, + const Model* model, + MicroInterpreterGraph* graph) + : allocator_(*allocator), + graph_(*graph), + model_(model), + state_(InterpreterState::kInit) {} + +MicroInterpreterContext::~MicroInterpreterContext() {} + +void* MicroInterpreterContext::AllocatePersistentBuffer(size_t bytes) { + TFLITE_DCHECK(state_ == InterpreterState::kPrepare || + state_ == InterpreterState::kInit); + return allocator_.AllocatePersistentBuffer(bytes); +} + +TfLiteStatus MicroInterpreterContext::RequestScratchBufferInArena( + size_t bytes, int* buffer_idx) { + TFLITE_DCHECK(state_ == InterpreterState::kPrepare); + return allocator_.RequestScratchBufferInArena( + bytes, graph_.GetCurrentSubgraphIndex(), buffer_idx); +} + +void* MicroInterpreterContext::GetScratchBuffer(int buffer_idx) { + TFLITE_DCHECK(state_ == InterpreterState::kInvoke); + ScratchBufferHandle* handle = scratch_buffer_handles_ + buffer_idx; + return handle->data; +} + +TfLiteTensor* MicroInterpreterContext::AllocateTempTfLiteTensor( + int tensor_idx) { + return allocator_.AllocateTempTfLiteTensor(model_, graph_.GetAllocations(), + tensor_idx, + graph_.GetCurrentSubgraphIndex()); +} + +void MicroInterpreterContext::DeallocateTempTfLiteTensor(TfLiteTensor* tensor) { + return allocator_.DeallocateTempTfLiteTensor(tensor); +} + +uint8_t* MicroInterpreterContext::AllocateTempBuffer(size_t size, + size_t alignment) { + TFLITE_DCHECK(state_ == InterpreterState::kPrepare); + return allocator_.AllocateTempBuffer(size, alignment); +} + +void MicroInterpreterContext::DeallocateTempBuffer(uint8_t* buffer) { + TFLITE_DCHECK(state_ == InterpreterState::kPrepare); + allocator_.DeallocateTempBuffer(buffer); +} + +TfLiteEvalTensor* MicroInterpreterContext::GetEvalTensor(int tensor_idx) { + return &graph_.GetAllocations()[graph_.GetCurrentSubgraphIndex()] + .tensors[tensor_idx]; +} + +void MicroInterpreterContext::SetScratchBufferHandles( + ScratchBufferHandle* scratch_buffer_handles) { + scratch_buffer_handles_ = scratch_buffer_handles; +} + +TfLiteStatus MicroInterpreterContext::set_external_context( + void* external_context_payload) { + TFLITE_DCHECK(state_ == InterpreterState::kPrepare || + state_ == InterpreterState::kInvoke); + if (external_context_payload == nullptr || + external_context_payload_ != nullptr) { + MicroPrintf( + "Attempting to set external context to %x but it was %x already", + external_context_payload, external_context_payload_); + return kTfLiteError; + } + + external_context_payload_ = external_context_payload; + return kTfLiteOk; +} + +void MicroInterpreterContext::SetInterpreterState(InterpreterState state) { + state_ = state; +} + +MicroInterpreterContext::InterpreterState +MicroInterpreterContext::GetInterpreterState() const { + return state_; +} + +} // namespace tflite diff --git a/tensorflow/lite/micro/micro_interpreter_context.h b/tensorflow/lite/micro/micro_interpreter_context.h new file mode 100644 index 00000000000..5986dc37fd2 --- /dev/null +++ b/tensorflow/lite/micro/micro_interpreter_context.h @@ -0,0 +1,123 @@ +/* Copyright 2021 The TensorFlow Authors. All Rights Reserved. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +==============================================================================*/ + +#ifndef TENSORFLOW_LITE_MICRO_MICRO_INTERPRETER_CONTEXT_H_ +#define TENSORFLOW_LITE_MICRO_MICRO_INTERPRETER_CONTEXT_H_ + +#include "tensorflow/lite/c/common.h" +#include "tensorflow/lite/micro/micro_allocator.h" +#include "tensorflow/lite/micro/micro_context.h" +#include "tensorflow/lite/micro/micro_interpreter_graph.h" +#include "tensorflow/lite/micro/micro_log.h" + +namespace tflite { + +// A full implementation of the MicroContext, to be used by the +// MicroInterpreter. Kernels should not depend on this directly. Instead they +// should only depend on the MicroContext. +class MicroInterpreterContext : public MicroContext { + public: + // Enum that allows MicroContext to keep track of the stages different memory + // planning APIs are available to kernels. + enum class InterpreterState { + kInit, + kPrepare, + kMemoryPlanning, + kInvoke, + }; + + // Does not take any ownership, and all pointers must refer to valid objects + // that outlive the one constructed. + MicroInterpreterContext(MicroAllocator* allocator, const Model* model, + MicroInterpreterGraph* graph); + virtual ~MicroInterpreterContext(); + + // Allocate persistent buffer which has the same life time as the interpreter. + // Returns nullptr on failure. + // The memory is allocated from the tail. + // This method is only available in Init or Prepare stage. + // Virtual so that it can be faked for kernel tests. + virtual void* AllocatePersistentBuffer(size_t bytes) override; + + // Request a scratch buffer in the arena through static memory planning. + // This method is only available in Prepare stage and the buffer is allocated + // by the interpreter between Prepare and Eval stage. In Eval stage, + // GetScratchBuffer API can be used to fetch the address. + // Virtual so that it can be faked for kernel tests. + virtual TfLiteStatus RequestScratchBufferInArena(size_t bytes, + int* buffer_idx) override; + + // Get the scratch buffer pointer. + // This method is only available in Eval stage. + // Virtual so that it can be faked for kernel tests. + virtual void* GetScratchBuffer(int buffer_idx) override; + + // Returns a temporary TfLiteTensor struct for a given index. + // Virtual so that it can be faked for kernel tests. + virtual TfLiteTensor* AllocateTempTfLiteTensor(int tensor_idx) override; + + // Deallocates a temp TfLiteTensor. + // Virtual so that it can be faked for kernel tests. + virtual void DeallocateTempTfLiteTensor(TfLiteTensor* tensor) override; + + // Returns a pointer to a temporary buffer (from the arena). + // This API is only valid from the kernel's Prepare function and + // the buffer's lifetime is also that of the Prepare function. + // Virtual so that it can be faked for kernel tests. + virtual uint8_t* AllocateTempBuffer(size_t size, size_t alignment) override; + + // Signals that the temporary buffer is no longer needed. + // Virtual so that it can be faked for kernel tests. + virtual void DeallocateTempBuffer(uint8_t* buffer) override; + + // Returns a TfLiteEvalTensor struct for a given index. + // Virtual so that it can be faked for kernel tests. + virtual TfLiteEvalTensor* GetEvalTensor(int tensor_idx) override; + + // Sets the State of MemoryPlanning MicroInterpreterContext + void SetInterpreterState(InterpreterState state); + + // Sets the State of MemoryPlanning MicroInterpreterContext + InterpreterState GetInterpreterState() const; + + // Does not take ownership of the pointer and the pointer must refer to valid + // an object that outlive this class instance. + // This can only be called once to set one external context. + TfLiteStatus set_external_context(void* external_context_payload) override; + + void* external_context() override { return external_context_payload_; } + + MicroGraph& graph() override { return graph_; } + + // Sets the pointer to a list of ScratchBufferHandle instances. + // Not API between TFLM and kernels. Primarily used by the framework for + // housekeeping in MicroInterpreterContext. + void SetScratchBufferHandles(ScratchBufferHandle* scratch_buffer_handles); + + private: + MicroAllocator& allocator_; + MicroInterpreterGraph& graph_; + const Model* model_; + InterpreterState state_; + + ScratchBufferHandle* scratch_buffer_handles_ = nullptr; + void* external_context_payload_ = nullptr; + + TF_LITE_REMOVE_VIRTUAL_DELETE +}; + +} // namespace tflite + +#endif // TENSORFLOW_LITE_MICRO_MICRO_INTERPRETER_CONTEXT_H_ diff --git a/tensorflow/lite/micro/micro_context_test.cc b/tensorflow/lite/micro/micro_interpreter_context_test.cc similarity index 78% rename from tensorflow/lite/micro/micro_context_test.cc rename to tensorflow/lite/micro/micro_interpreter_context_test.cc index e01d3879fe6..3af123f5511 100644 --- a/tensorflow/lite/micro/micro_context_test.cc +++ b/tensorflow/lite/micro/micro_interpreter_context_test.cc @@ -12,12 +12,13 @@ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. ==============================================================================*/ -#include "tensorflow/lite/micro/micro_context.h" +#include "tensorflow/lite/micro/micro_interpreter_context.h" #include #include "tensorflow/lite/micro/micro_allocator.h" #include "tensorflow/lite/micro/micro_arena_constants.h" +#include "tensorflow/lite/micro/micro_interpreter_graph.h" #include "tensorflow/lite/micro/test_helpers.h" #include "tensorflow/lite/micro/testing/micro_test.h" @@ -26,23 +27,20 @@ using ::tflite::testing::IntArrayFromInts; namespace tflite { namespace { -tflite::MicroContext CreateMicroContext() { +tflite::MicroInterpreterContext CreateMicroInterpreterContext() { // Some targets do not support dynamic memory (i.e., no malloc or new), thus, // the test need to place non-transient memories in static variables. This is // safe because tests are guaranteed to run serially. - constexpr size_t kMicroGraphPlacementBufferSize = 1024; - alignas(4) static uint8_t - micro_graph_placement_buffer[kMicroGraphPlacementBufferSize]; constexpr size_t kArenaSize = 1024; static uint8_t tensor_arena[kArenaSize]; const tflite::Model* model = tflite::testing::GetSimpleMockModel(); MicroAllocator* micro_allocator = MicroAllocator::Create(tensor_arena, kArenaSize); - MicroGraph* micro_graph = new (micro_graph_placement_buffer) - MicroGraph(nullptr, nullptr, nullptr, nullptr); + static MicroInterpreterGraph micro_graph(nullptr, nullptr, nullptr, nullptr); - tflite::MicroContext micro_context(micro_allocator, model, micro_graph); + tflite::MicroInterpreterContext micro_context(micro_allocator, model, + µ_graph); return micro_context; } @@ -58,9 +56,10 @@ TF_LITE_MICRO_TESTS_BEGIN // Ensures that a regular set and get pair works ok. TF_LITE_MICRO_TEST(TestSetGetExternalContextSuccess) { - tflite::MicroContext micro_context = tflite::CreateMicroContext(); + tflite::MicroInterpreterContext micro_context = + tflite::CreateMicroInterpreterContext(); micro_context.SetInterpreterState( - tflite::MicroContext::InterpreterState::kInvoke); + tflite::MicroInterpreterContext::InterpreterState::kInvoke); tflite::TestExternalContextPayloadData payload; TF_LITE_MICRO_EXPECT_EQ(kTfLiteOk, @@ -75,7 +74,8 @@ TF_LITE_MICRO_TEST(TestSetGetExternalContextSuccess) { } TF_LITE_MICRO_TEST(TestGetExternalContextWithoutSetShouldReturnNull) { - tflite::MicroContext micro_context = tflite::CreateMicroContext(); + tflite::MicroInterpreterContext micro_context = + tflite::CreateMicroInterpreterContext(); tflite::TestExternalContextPayloadData* returned_external_context = reinterpret_cast( @@ -86,9 +86,10 @@ TF_LITE_MICRO_TEST(TestGetExternalContextWithoutSetShouldReturnNull) { } TF_LITE_MICRO_TEST(TestSetExternalContextCanOnlyBeCalledOnce) { - tflite::MicroContext micro_context = tflite::CreateMicroContext(); + tflite::MicroInterpreterContext micro_context = + tflite::CreateMicroInterpreterContext(); micro_context.SetInterpreterState( - tflite::MicroContext::InterpreterState::kPrepare); + tflite::MicroInterpreterContext::InterpreterState::kPrepare); tflite::TestExternalContextPayloadData payload; TF_LITE_MICRO_EXPECT_EQ(kTfLiteOk, @@ -100,15 +101,17 @@ TF_LITE_MICRO_TEST(TestSetExternalContextCanOnlyBeCalledOnce) { } TF_LITE_MICRO_TEST(TestSetExternalContextToNullShouldFail) { - tflite::MicroContext micro_context = tflite::CreateMicroContext(); + tflite::MicroInterpreterContext micro_context = + tflite::CreateMicroInterpreterContext(); micro_context.SetInterpreterState( - tflite::MicroContext::InterpreterState::kPrepare); + tflite::MicroInterpreterContext::InterpreterState::kPrepare); TF_LITE_MICRO_EXPECT_EQ(kTfLiteError, micro_context.set_external_context(nullptr)); } TF_LITE_MICRO_TEST(TestGetTempInputTensor) { - tflite::MicroContext micro_context = tflite::CreateMicroContext(); + tflite::MicroInterpreterContext micro_context = + tflite::CreateMicroInterpreterContext(); TfLiteNode node; int input_data[] = {2, 0, 1}; @@ -127,7 +130,8 @@ TF_LITE_MICRO_TEST(TestGetTempInputTensor) { } TF_LITE_MICRO_TEST(TestGetTempOutputTensor) { - tflite::MicroContext micro_context = tflite::CreateMicroContext(); + tflite::MicroInterpreterContext micro_context = + tflite::CreateMicroInterpreterContext(); TfLiteNode node; int output_data[] = {1, 0}; @@ -143,16 +147,18 @@ TF_LITE_MICRO_TEST(TestGetTempOutputTensor) { } TF_LITE_MICRO_TEST(TestAllocateTempBuffer) { - tflite::MicroContext micro_context = tflite::CreateMicroContext(); + tflite::MicroInterpreterContext micro_context = + tflite::CreateMicroInterpreterContext(); micro_context.SetInterpreterState( - tflite::MicroContext::InterpreterState::kPrepare); + tflite::MicroInterpreterContext::InterpreterState::kPrepare); uint8_t* buffer1 = micro_context.AllocateTempBuffer(10, tflite::MicroArenaBufferAlignment()); TF_LITE_MICRO_EXPECT(buffer1 != nullptr); } TF_LITE_MICRO_TEST(TestGetTempIntermediateTensor) { - tflite::MicroContext micro_context = tflite::CreateMicroContext(); + tflite::MicroInterpreterContext micro_context = + tflite::CreateMicroInterpreterContext(); TfLiteNode node; int intermediate_data[] = {1, 0}; diff --git a/tensorflow/lite/micro/micro_graph.cc b/tensorflow/lite/micro/micro_interpreter_graph.cc similarity index 87% rename from tensorflow/lite/micro/micro_graph.cc rename to tensorflow/lite/micro/micro_interpreter_graph.cc index 35c6c1fceac..0d18fe7399f 100644 --- a/tensorflow/lite/micro/micro_graph.cc +++ b/tensorflow/lite/micro/micro_interpreter_graph.cc @@ -13,7 +13,7 @@ See the License for the specific language governing permissions and limitations under the License. ==============================================================================*/ -#include "tensorflow/lite/micro/micro_graph.h" +#include "tensorflow/lite/micro/micro_interpreter_graph.h" #include "flatbuffers/flatbuffers.h" // from @flatbuffers #include "tensorflow/lite/c/common.h" @@ -37,9 +37,9 @@ const char* OpNameFromRegistration(const TFLMRegistration* registration) { } // namespace -MicroGraph::MicroGraph(TfLiteContext* context, const Model* model, - MicroAllocator* allocator, - MicroResourceVariables* resource_variables) +MicroInterpreterGraph::MicroInterpreterGraph( + TfLiteContext* context, const Model* model, MicroAllocator* allocator, + MicroResourceVariables* resource_variables) : context_(context), model_(model), allocator_(allocator), @@ -50,9 +50,9 @@ MicroGraph::MicroGraph(TfLiteContext* context, const Model* model, } } -MicroGraph::~MicroGraph() {} +MicroInterpreterGraph::~MicroInterpreterGraph() {} -TfLiteStatus MicroGraph::InitSubgraphs() { +TfLiteStatus MicroInterpreterGraph::InitSubgraphs() { int previous_subgraph_idx = current_subgraph_index_; for (size_t subgraph_idx = 0; subgraph_idx < subgraphs_->size(); @@ -85,7 +85,7 @@ TfLiteStatus MicroGraph::InitSubgraphs() { return kTfLiteOk; } -TfLiteStatus MicroGraph::PrepareSubgraphs() { +TfLiteStatus MicroInterpreterGraph::PrepareSubgraphs() { int previous_subgraph_idx = current_subgraph_index_; for (size_t subgraph_idx = 0; subgraph_idx < subgraphs_->size(); @@ -114,7 +114,7 @@ TfLiteStatus MicroGraph::PrepareSubgraphs() { return kTfLiteOk; } -TfLiteStatus MicroGraph::ResetSubgraphs() { +TfLiteStatus MicroInterpreterGraph::ResetSubgraphs() { int previous_subgraph_idx = current_subgraph_index_; for (size_t subgraph_idx = 0; subgraph_idx < subgraphs_->size(); @@ -139,7 +139,7 @@ TfLiteStatus MicroGraph::ResetSubgraphs() { return kTfLiteOk; } -TfLiteStatus MicroGraph::FreeSubgraphs() { +TfLiteStatus MicroInterpreterGraph::FreeSubgraphs() { int previous_subgraph_idx = current_subgraph_index_; for (size_t subgraph_idx = 0; subgraph_idx < subgraphs_->size(); @@ -164,7 +164,7 @@ TfLiteStatus MicroGraph::FreeSubgraphs() { return kTfLiteOk; } -TfLiteStatus MicroGraph::InvokeSubgraph(int subgraph_idx) { +TfLiteStatus MicroInterpreterGraph::InvokeSubgraph(int subgraph_idx) { int previous_subgraph_idx = current_subgraph_index_; current_subgraph_index_ = subgraph_idx; @@ -211,7 +211,7 @@ TfLiteStatus MicroGraph::InvokeSubgraph(int subgraph_idx) { return kTfLiteOk; } -TfLiteStatus MicroGraph::ResetVariableTensors() { +TfLiteStatus MicroInterpreterGraph::ResetVariableTensors() { for (size_t subgraph_idx = 0; subgraph_idx < subgraphs_->size(); subgraph_idx++) { const SubGraph* subgraph = (*subgraphs_)[subgraph_idx]; @@ -238,30 +238,34 @@ TfLiteStatus MicroGraph::ResetVariableTensors() { return kTfLiteOk; } -int MicroGraph::NumSubgraphs() { return model_->subgraphs()->size(); } +int MicroInterpreterGraph::NumSubgraphs() { + return model_->subgraphs()->size(); +} -void MicroGraph::SetSubgraphAllocations( +void MicroInterpreterGraph::SetSubgraphAllocations( SubgraphAllocations* subgraph_allocations) { subgraph_allocations_ = subgraph_allocations; } -size_t MicroGraph::NumSubgraphInputs(int subgraph_idx) { +size_t MicroInterpreterGraph::NumSubgraphInputs(int subgraph_idx) { return model_->subgraphs()->Get(subgraph_idx)->inputs()->size(); } -TfLiteEvalTensor* MicroGraph::GetSubgraphInput(int subgraph_idx, - int input_idx) { +TfLiteEvalTensor* MicroInterpreterGraph::GetSubgraphInput(int subgraph_idx, + int input_idx) { int tensor_idx = model_->subgraphs()->Get(subgraph_idx)->inputs()->Get(input_idx); return &subgraph_allocations_[subgraph_idx].tensors[tensor_idx]; } -size_t MicroGraph::NumSubgraphOutputs(int subgraph_idx) { - return model_->subgraphs()->Get(subgraph_idx)->outputs()->size(); +size_t MicroInterpreterGraph::NumSubgraphOutputs(int subgraph_idx) { + return model_->subgraphs()->Get(subgraph_idx)->outputs() == nullptr + ? 0 + : model_->subgraphs()->Get(subgraph_idx)->outputs()->size(); } -TfLiteEvalTensor* MicroGraph::GetSubgraphOutput(int subgraph_idx, - int output_idx) { +TfLiteEvalTensor* MicroInterpreterGraph::GetSubgraphOutput(int subgraph_idx, + int output_idx) { int tensor_idx = model_->subgraphs()->Get(subgraph_idx)->outputs()->Get(output_idx); return &subgraph_allocations_[subgraph_idx].tensors[tensor_idx]; diff --git a/tensorflow/lite/micro/micro_interpreter_graph.h b/tensorflow/lite/micro/micro_interpreter_graph.h new file mode 100644 index 00000000000..5c2121aa924 --- /dev/null +++ b/tensorflow/lite/micro/micro_interpreter_graph.h @@ -0,0 +1,110 @@ +/* Copyright 2021 The TensorFlow Authors. All Rights Reserved. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +==============================================================================*/ + +#ifndef TENSORFLOW_LITE_MICRO_MICRO_INTERPRETER_GRAPH_H_ +#define TENSORFLOW_LITE_MICRO_MICRO_INTERPRETER_GRAPH_H_ + +#include "tensorflow/lite/micro/micro_allocator.h" +#include "tensorflow/lite/micro/micro_common.h" +#include "tensorflow/lite/micro/micro_graph.h" +#include "tensorflow/lite/micro/micro_resource_variable.h" +#include "tensorflow/lite/schema/schema_generated.h" + +namespace tflite { + +// Abstracts the details of interacting with the tflite::Model. +// +// Provides methods to access, initialize, prepare, invoke and free any +// subgraph in the tflite::Graph. +class MicroInterpreterGraph : public MicroGraph { + public: + // The lifetime of the context, model, allocator and resource_variables must + // be at least as long as that of the graph object, since the this class may + // need to access them at any time. If resource_variables is a nullptr, + // GetResourceVariables will return a nullptr. + MicroInterpreterGraph(TfLiteContext* context, const Model* model, + MicroAllocator* allocator, + MicroResourceVariables* resource_variables); + virtual ~MicroInterpreterGraph(); + + // Sets up builtin data and calls TFLMRegistration->Init for every + // operator in every subgraph in the model. + virtual TfLiteStatus InitSubgraphs(); + + // Calls TFLMRegistration->Prepare for every operator in every subgraph + // in the model. + virtual TfLiteStatus PrepareSubgraphs(); + + // Calls TFLMRegistration->Reset for every operator in every subgraph in + // the model. + virtual TfLiteStatus ResetSubgraphs(); + + // Calls TFLMRegistration->Free for every operator in every subgraph in + // the model. + virtual TfLiteStatus FreeSubgraphs(); + + // Calls TFLMRegistration->Invoke for every operator in a single subgraph + // in the model. + virtual TfLiteStatus InvokeSubgraph(int subgraph_idx); + + // Zeros out all variable tensors in all subgraphs in the model. + virtual TfLiteStatus ResetVariableTensors(); + + // Number of tensor inputs to a specified subgraph in the model. + virtual size_t NumSubgraphInputs(int subgraph_idx); + + // Get the specified input tensor of a specified subgraph in the model. + virtual TfLiteEvalTensor* GetSubgraphInput(int subgraph_idx, int input_idx); + + // Number of tensor outputs from a specified subgraph in the model. + virtual size_t NumSubgraphOutputs(int subgraph_idx); + + // Get the specified output tensor of a specified subgraph in the model. + virtual TfLiteEvalTensor* GetSubgraphOutput(int subgraph_idx, int output_idx); + + // Number of subgraphs in the model. + virtual int NumSubgraphs(); + + // Hook to pass in subgraph allocations tracked within the interpreter, + // allowing MicroInterpreterGraph to init / prepare / invoke subgraphs in the + // model. + void SetSubgraphAllocations(SubgraphAllocations* subgraph_allocations); + + // Get the current subgraph index. Within an on operator, this is guaranteed + // to be the subgraph of that operator. + int GetCurrentSubgraphIndex() { return current_subgraph_index_; } + + // Gets the list of alloctions for each subgraph. This is the source of truth + // for all per-subgraph allocation data. + SubgraphAllocations* GetAllocations() { return subgraph_allocations_; } + + // Get the resource variables for this TFLM graph. + MicroResourceVariables* GetResourceVariables() { return resource_variables_; } + + private: + TfLiteContext* context_; + const Model* model_; + MicroAllocator* allocator_; + SubgraphAllocations* subgraph_allocations_ = nullptr; + int current_subgraph_index_; + MicroResourceVariables* resource_variables_; + const flatbuffers::Vector>* subgraphs_; + + TF_LITE_REMOVE_VIRTUAL_DELETE +}; + +} // namespace tflite + +#endif // TENSORFLOW_LITE_MICRO_MICRO_INTERPRETER_GRAPH_H_ diff --git a/tensorflow/lite/micro/micro_interpreter_test.cc b/tensorflow/lite/micro/micro_interpreter_test.cc index 0ba31c4a796..e44de6b09aa 100644 --- a/tensorflow/lite/micro/micro_interpreter_test.cc +++ b/tensorflow/lite/micro/micro_interpreter_test.cc @@ -548,4 +548,23 @@ TF_LITE_MICRO_TEST(TestArenaUsedBytes) { TF_LITE_MICRO_EXPECT_EQ(interpreter2.Invoke(), kTfLiteOk); } +TF_LITE_MICRO_TEST(TestGetTensorFailsNoLinearMemoryPlanner) { + const tflite::Model* model = tflite::testing::GetModelWith256x256Tensor(); + TF_LITE_MICRO_EXPECT(model != nullptr); + + tflite::testing::TestingOpResolver op_resolver; + TF_LITE_MICRO_EXPECT_EQ(tflite::testing::GetTestingOpResolver(op_resolver), + kTfLiteOk); + tflite::MicroInterpreter interpreter(model, op_resolver, tflite::arena_buffer, + tflite::buffer_arena_size); + TF_LITE_MICRO_EXPECT_EQ(interpreter.AllocateTensors(), kTfLiteOk); + + TF_LITE_MICRO_EXPECT_EQ(interpreter.Invoke(), kTfLiteOk); + + // GetTensor Should return a null_ptr when a linear memory planner isn't used + // to initialize it. preserve_all_tensors() getter should also return false + TF_LITE_MICRO_EXPECT_EQ(interpreter.preserve_all_tensors(), false); + TF_LITE_MICRO_EXPECT(interpreter.GetTensor(0) == nullptr); +} + TF_LITE_MICRO_TESTS_END diff --git a/tensorflow/lite/micro/micro_log.cc b/tensorflow/lite/micro/micro_log.cc index 9c8ccaa3c25..a08a07d6ed3 100644 --- a/tensorflow/lite/micro/micro_log.cc +++ b/tensorflow/lite/micro/micro_log.cc @@ -1,4 +1,4 @@ -/* Copyright 2018 The TensorFlow Authors. All Rights Reserved. +/* Copyright 2023 The TensorFlow Authors. All Rights Reserved. Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. @@ -17,31 +17,46 @@ limitations under the License. #include #include -#include #if !defined(TF_LITE_STRIP_ERROR_STRINGS) #include "tensorflow/lite/micro/debug_log.h" -#include "tensorflow/lite/micro/micro_string.h" #endif -void Log(const char* format, va_list args) { #if !defined(TF_LITE_STRIP_ERROR_STRINGS) - // Only pulling in the implementation of this function for builds where we - // expect to make use of it to be extra cautious about not increasing the code - // size. - static constexpr int kMaxLogLen = 256; - char log_buffer[kMaxLogLen]; - MicroVsnprintf(log_buffer, kMaxLogLen, format, args); - DebugLog(log_buffer); - DebugLog("\r\n"); -#endif +namespace { + +void VDebugLog(const char* format, ...) { + va_list args; + va_start(args, format); + DebugLog(format, args); + va_end(args); +} + +} // namespace + +void VMicroPrintf(const char* format, va_list args) { + DebugLog(format, args); + // TODO(b/290051015): remove "\r\n" + VDebugLog("\r\n"); } -#if !defined(TF_LITE_STRIP_ERROR_STRINGS) void MicroPrintf(const char* format, ...) { va_list args; va_start(args, format); - Log(format, args); + VMicroPrintf(format, args); va_end(args); } -#endif + +int MicroSnprintf(char* buffer, size_t buf_size, const char* format, ...) { + va_list args; + va_start(args, format); + int result = MicroVsnprintf(buffer, buf_size, format, args); + va_end(args); + return result; +} + +int MicroVsnprintf(char* buffer, size_t buf_size, const char* format, + va_list vlist) { + return DebugVsnprintf(buffer, buf_size, format, vlist); +} +#endif // !defined(TF_LITE_STRIP_ERROR_STRINGS) diff --git a/tensorflow/lite/micro/micro_log.h b/tensorflow/lite/micro/micro_log.h index d9cfbe8c08d..af3c24a800d 100644 --- a/tensorflow/lite/micro/micro_log.h +++ b/tensorflow/lite/micro/micro_log.h @@ -1,4 +1,4 @@ -/* Copyright 2018 The TensorFlow Authors. All Rights Reserved. +/* Copyright 2023 The TensorFlow Authors. All Rights Reserved. Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. @@ -15,20 +15,23 @@ limitations under the License. #ifndef TENSORFLOW_LITE_MICRO_MICRO_LOG_H_ #define TENSORFLOW_LITE_MICRO_MICRO_LOG_H_ -#include - -// This is a free function used to perform the actual logging. -// This function will be used by MicroPrintf and MicroErrorReporter::Report() -void Log(const char* format, va_list args); - #if !defined(TF_LITE_STRIP_ERROR_STRINGS) -// This function can be used independent of the MicroErrorReporter to get +#include +#include +// These functions can be used independent of the MicroErrorReporter to get // printf-like functionalitys and are common to all target platforms. void MicroPrintf(const char* format, ...); +void VMicroPrintf(const char* format, va_list args); +int MicroSnprintf(char* buffer, size_t buf_size, const char* format, ...); +int MicroVsnprintf(char* buffer, size_t buf_size, const char* format, + va_list vlist); #else // We use a #define to ensure that the strings are completely stripped, to // prevent an unnecessary increase in the binary size. #define MicroPrintf(...) tflite::Unused(__VA_ARGS__) +#define VMicroPrintf(...) tflite::Unused(__VA_ARGS__) +#define MicroSnprintf(...) tflite::Unused(__VA_ARGS__) +#define MicroVsnprintf(...) tflite::Unused(__VA_ARGS__) #endif namespace tflite { @@ -39,6 +42,13 @@ template void Unused(Args&&... args) { (void)(sizeof...(args)); } + +template +T Unused(Args&&... args) { + (void)(sizeof...(args)); + return static_cast(0); +} + } // namespace tflite #endif // TENSORFLOW_LITE_MICRO_MICRO_LOG_H_ diff --git a/tensorflow/lite/micro/micro_log_test.cc b/tensorflow/lite/micro/micro_log_test.cc index 97ac8be1e19..7027e013630 100644 --- a/tensorflow/lite/micro/micro_log_test.cc +++ b/tensorflow/lite/micro/micro_log_test.cc @@ -1,4 +1,4 @@ -/* Copyright 2021 The TensorFlow Authors. All Rights Reserved. +/* Copyright 2023 The TensorFlow Authors. All Rights Reserved. Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. @@ -15,18 +15,42 @@ limitations under the License. #include "tensorflow/lite/micro/micro_log.h" -#include "tensorflow/lite/micro/system_setup.h" +#include +#include -namespace tflite { -inline void InitializeTest() { InitializeTarget(); } -} // namespace tflite +#include "tensorflow/lite/micro/testing/micro_test.h" -int main(int argc, char** argv) { - tflite::InitializeTest(); -#ifndef TF_LITE_STRIP_ERROR_STRINGS - MicroPrintf("Number: %d", 42); - MicroPrintf("Badly-formed format string %"); - MicroPrintf("Another % badly-formed %% format string"); - MicroPrintf("~~~%s~~~", "ALL TESTS PASSED"); +namespace { + +#if !defined(TF_LITE_STRIP_ERROR_STRINGS) +constexpr int kMaxBufferSize = 128; +const char* kFormat = "%2d%6.2f%#5x%5s"; +const char* kExpect = "42 42.42 0x42 \"42\""; #endif // !defined(TF_LITE_STRIP_ERROR_STRINGS) + +} // namespace + +TF_LITE_MICRO_TESTS_BEGIN + +#if !defined(TF_LITE_STRIP_ERROR_STRINGS) + +TF_LITE_MICRO_TEST(MicroPrintfTest) { + MicroPrintf("Integer 42: %d", 42); + MicroPrintf("Float 42.42: %2.2f", 42.42); + MicroPrintf("String \"Hello World!\": %s", "\"Hello World!\""); + MicroPrintf("Badly-formed format string %"); + MicroPrintf("Another %# badly-formed %% format string"); +} + +TF_LITE_MICRO_TEST(MicroSnprintf) { + char buffer[kMaxBufferSize]; + buffer[0] = '\0'; + size_t result = + MicroSnprintf(buffer, kMaxBufferSize, kFormat, 42, 42.42, 0x42, "\"42\""); + TF_LITE_MICRO_EXPECT_EQ(result, strlen(buffer)); + TF_LITE_MICRO_EXPECT_STRING_EQ(kExpect, buffer); } + +#endif // !defined(TF_LITE_STRIP_ERROR_STRINGS) + +TF_LITE_MICRO_TESTS_END diff --git a/tensorflow/lite/micro/micro_mutable_op_resolver.h b/tensorflow/lite/micro/micro_mutable_op_resolver.h index a07f2686d45..f5f6e38e003 100644 --- a/tensorflow/lite/micro/micro_mutable_op_resolver.h +++ b/tensorflow/lite/micro/micro_mutable_op_resolver.h @@ -29,9 +29,11 @@ limitations under the License. #include "tensorflow/lite/micro/kernels/ethosu.h" #include "tensorflow/lite/micro/kernels/fully_connected.h" #include "tensorflow/lite/micro/kernels/micro_ops.h" +#include "tensorflow/lite/micro/kernels/mul.h" #include "tensorflow/lite/micro/kernels/pooling.h" #include "tensorflow/lite/micro/kernels/reduce.h" #include "tensorflow/lite/micro/kernels/softmax.h" +#include "tensorflow/lite/micro/kernels/transpose_conv.h" #include "tensorflow/lite/micro/micro_log.h" #include "tensorflow/lite/micro/micro_op_resolver.h" #include "tensorflow/lite/schema/schema_generated.h" @@ -143,6 +145,11 @@ class MicroMutableOpResolver : public MicroOpResolver { return AddBuiltin(BuiltinOperator_AVERAGE_POOL_2D, registration, ParsePool); } + TfLiteStatus AddBatchMatMul() { + return AddBuiltin(BuiltinOperator_BATCH_MATMUL, + tflite::Register_BATCH_MATMUL(), ParseBatchMatMul); + } + TfLiteStatus AddBatchToSpaceNd() { return AddBuiltin(BuiltinOperator_BATCH_TO_SPACE_ND, Register_BATCH_TO_SPACE_ND(), ParseBatchToSpaceNd); @@ -194,6 +201,11 @@ class MicroMutableOpResolver : public MicroOpResolver { ParseCumsum); } + TfLiteStatus AddDelay() { + // TODO(b/286250473): change back name to "Delay" and remove namespace + return AddCustom("SignalDelay", tflite::tflm_signal::Register_DELAY()); + } + TfLiteStatus AddDepthToSpace() { return AddBuiltin(BuiltinOperator_DEPTH_TO_SPACE, tflite::Register_DEPTH_TO_SPACE(), ParseDepthToSpace); @@ -219,6 +231,16 @@ class MicroMutableOpResolver : public MicroOpResolver { return AddBuiltin(BuiltinOperator_DIV, tflite::Register_DIV(), ParseDiv); } + TfLiteStatus AddEmbeddingLookup() { + return AddBuiltin(BuiltinOperator_EMBEDDING_LOOKUP, + Register_EMBEDDING_LOOKUP(), ParseEmbeddingLookup); + } + + TfLiteStatus AddEnergy() { + // TODO(b/286250473): change back name to "Energy" and remove namespace + return AddCustom("SignalEnergy", tflite::tflm_signal::Register_ENERGY()); + } + TfLiteStatus AddElu() { return AddBuiltin(BuiltinOperator_ELU, tflite::Register_ELU(), ParseElu); } @@ -244,10 +266,41 @@ class MicroMutableOpResolver : public MicroOpResolver { ParseExpandDims); } + TfLiteStatus AddFftAutoScale() { + // TODO(b/286250473): change back name and remove namespace + return AddCustom("SignalFftAutoScale", + tflite::tflm_signal::Register_FFT_AUTO_SCALE()); + } + TfLiteStatus AddFill() { return AddBuiltin(BuiltinOperator_FILL, tflite::Register_FILL(), ParseFill); } + TfLiteStatus AddFilterBank() { + // TODO(b/286250473): change back name to "FilterBank" and remove namespace + return AddCustom("SignalFilterBank", + tflite::tflm_signal::Register_FILTER_BANK()); + } + TfLiteStatus AddFilterBankLog() { + // TODO(b/286250473): change back name to "FilterBankLog" and remove + // namespace + return AddCustom("SignalFilterBankLog", + tflite::tflm_signal::Register_FILTER_BANK_LOG()); + } + TfLiteStatus AddFilterBankSquareRoot() { + // TODO(b/286250473): change back name to "FilterBankSquareRoot" and remove + // namespace + return AddCustom("SignalFilterBankSquareRoot", + tflite::tflm_signal::Register_FILTER_BANK_SQUARE_ROOT()); + } + TfLiteStatus AddFilterBankSpectralSubtraction() { + // TODO(b/286250473): change back name to "FilterBankSpectralSubtraction" + // and remove namespace + return AddCustom( + "SignalFilterBankSpectralSubtraction", + tflite::tflm_signal::Register_FILTER_BANK_SPECTRAL_SUBTRACTION()); + } + TfLiteStatus AddFloor() { return AddBuiltin(BuiltinOperator_FLOOR, Register_FLOOR(), ParseFloor); } @@ -262,6 +315,11 @@ class MicroMutableOpResolver : public MicroOpResolver { ParseFloorMod); } + TfLiteStatus AddFramer() { + // TODO(b/286250473): change back name to "Framer" and remove namespace + return AddCustom("SignalFramer", tflite::tflm_signal::Register_FRAMER()); + } + TfLiteStatus AddFullyConnected( const TFLMRegistration& registration = Register_FULLY_CONNECTED()) { return AddBuiltin(BuiltinOperator_FULLY_CONNECTED, registration, @@ -297,6 +355,12 @@ class MicroMutableOpResolver : public MicroOpResolver { return AddBuiltin(BuiltinOperator_IF, tflite::Register_IF(), ParseIf); } + TfLiteStatus AddIrfft(const TFLMRegistration* registration = + tflite::tflm_signal::Register_IRFFT()) { + // TODO(b/286250473): change back name and remove namespace + return AddCustom("SignalIrfft", registration); + } + TfLiteStatus AddL2Normalization() { return AddBuiltin(BuiltinOperator_L2_NORMALIZATION, Register_L2_NORMALIZATION(), ParseL2Normalization); @@ -387,6 +451,12 @@ class MicroMutableOpResolver : public MicroOpResolver { ParseNotEqual); } + TfLiteStatus AddOverlapAdd() { + // TODO(b/286250473): change back name to "OverlapAdd" and remove namespace + return AddCustom("SignalOverlapAdd", + tflite::tflm_signal::Register_OVERLAP_ADD()); + } + TfLiteStatus AddPack() { return AddBuiltin(BuiltinOperator_PACK, Register_PACK(), ParsePack); } @@ -399,6 +469,11 @@ class MicroMutableOpResolver : public MicroOpResolver { return AddBuiltin(BuiltinOperator_PADV2, Register_PADV2(), ParsePadV2); } + TfLiteStatus AddPCAN() { + // TODO(b/286250473): change back name to "PCAN" and remove namespace + return AddCustom("SignalPCAN", tflite::tflm_signal::Register_PCAN()); + } + TfLiteStatus AddPrelu() { return AddBuiltin(BuiltinOperator_PRELU, tflite::Register_PRELU(), ParsePrelu); @@ -429,8 +504,8 @@ class MicroMutableOpResolver : public MicroOpResolver { } TfLiteStatus AddReshape() { - return AddBuiltin(BuiltinOperator_RESHAPE, - tflite::ops::micro::Register_RESHAPE(), ParseReshape); + return AddBuiltin(BuiltinOperator_RESHAPE, Register_RESHAPE(), + ParseReshape); } TfLiteStatus AddResizeBilinear() { @@ -451,8 +526,7 @@ class MicroMutableOpResolver : public MicroOpResolver { } TfLiteStatus AddRound() { - return AddBuiltin(BuiltinOperator_ROUND, - tflite::ops::micro::Register_ROUND(), ParseRound); + return AddBuiltin(BuiltinOperator_ROUND, Register_ROUND(), ParseRound); } TfLiteStatus AddRsqrt() { @@ -523,6 +597,11 @@ class MicroMutableOpResolver : public MicroOpResolver { ParseStridedSlice); } + TfLiteStatus AddStacker() { + // TODO(b/286250473): change back name to "Stacker" and remove namespace + return AddCustom("SignalStacker", tflite::tflm_signal::Register_STACKER()); + } + TfLiteStatus AddSub() { return AddBuiltin(BuiltinOperator_SUB, tflite::Register_SUB(), ParseSub); } @@ -539,9 +618,10 @@ class MicroMutableOpResolver : public MicroOpResolver { return AddBuiltin(BuiltinOperator_TANH, Register_TANH(), ParseTanh); } - TfLiteStatus AddTransposeConv() { - return AddBuiltin(BuiltinOperator_TRANSPOSE_CONV, - tflite::Register_TRANSPOSE_CONV(), ParseTransposeConv); + TfLiteStatus AddTransposeConv( + const TFLMRegistration& registration = Register_TRANSPOSE_CONV()) { + return AddBuiltin(BuiltinOperator_TRANSPOSE_CONV, registration, + ParseTransposeConv); } TfLiteStatus AddTranspose() { diff --git a/tensorflow/lite/micro/micro_profiler.cc b/tensorflow/lite/micro/micro_profiler.cc index c3f0f4f1fdc..ebead51a90d 100644 --- a/tensorflow/lite/micro/micro_profiler.cc +++ b/tensorflow/lite/micro/micro_profiler.cc @@ -65,8 +65,13 @@ void MicroProfiler::LogCsv() const { #if !defined(TF_LITE_STRIP_ERROR_STRINGS) MicroPrintf("\"Event\",\"Tag\",\"Ticks\""); for (int i = 0; i < num_events_; ++i) { +#if defined(HEXAGON) || defined(CMSIS_NN) + int ticks = end_ticks_[i] - start_ticks_[i]; + MicroPrintf("%d,%s,%d", i, tags_[i], ticks); +#else uint32_t ticks = end_ticks_[i] - start_ticks_[i]; MicroPrintf("%d,%s,%" PRIu32, i, tags_[i], ticks); +#endif } #endif } @@ -94,7 +99,7 @@ void MicroProfiler::LogTicksPerTagCsv() { } MicroPrintf("%s, %d", each_tag_entry.tag, each_tag_entry.ticks); } - MicroPrintf("total number of ticks, %d", total_ticks); + MicroPrintf("\"total number of ticks\", %d", total_ticks); #endif } diff --git a/tensorflow/lite/micro/micro_profiler.h b/tensorflow/lite/micro/micro_profiler.h index 1c39ea1cbfb..b52ebcb4ea9 100644 --- a/tensorflow/lite/micro/micro_profiler.h +++ b/tensorflow/lite/micro/micro_profiler.h @@ -40,7 +40,7 @@ class MicroProfiler : public MicroProfilerInterface { // only once per event_handle. // // If EndEvent is called more than once for the same event_handle, the last - // call will be used as the end of event marker.If EndEvent is called 0 times + // call will be used as the end of event marker. If EndEvent is called 0 times // for a particular event_handle, the duration of that event will be 0 ticks. virtual void EndEvent(uint32_t event_handle) override; @@ -66,9 +66,9 @@ class MicroProfiler : public MicroProfilerInterface { void LogTicksPerTagCsv(); private: - // Maximum number of events that this class can keep track of. If we call - // AddEvent more than kMaxEvents number of times, then the oldest event's - // profiling information will be overwritten. + // Maximum number of events that this class can keep track of. The + // MicroProfiler will abort if AddEvent is called more than kMaxEvents number + // of times. Increase this number if you need more events. static constexpr int kMaxEvents = 4096; const char* tags_[kMaxEvents]; @@ -87,7 +87,7 @@ class MicroProfiler : public MicroProfilerInterface { int FindExistingOrNextPosition(const char* tag_name); - TF_LITE_REMOVE_VIRTUAL_DELETE; + TF_LITE_REMOVE_VIRTUAL_DELETE }; #if defined(TF_LITE_STRIP_ERROR_STRINGS) diff --git a/tensorflow/lite/micro/micro_string.cc b/tensorflow/lite/micro/micro_string.cc deleted file mode 100644 index bb41a9e394c..00000000000 --- a/tensorflow/lite/micro/micro_string.cc +++ /dev/null @@ -1,317 +0,0 @@ -/* Copyright 2018 The TensorFlow Authors. All Rights Reserved. - -Licensed under the Apache License, Version 2.0 (the "License"); -you may not use this file except in compliance with the License. -You may obtain a copy of the License at - - http://www.apache.org/licenses/LICENSE-2.0 - -Unless required by applicable law or agreed to in writing, software -distributed under the License is distributed on an "AS IS" BASIS, -WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -See the License for the specific language governing permissions and -limitations under the License. -==============================================================================*/ - -// Implements debug logging for numbers by converting them into strings and then -// calling the main DebugLog(char*) function. These are separated into a -// different file so that platforms can just implement the string output version -// of DebugLog() and then get the numerical variations without requiring any -// more code. - -#include "tensorflow/lite/micro/micro_string.h" - -#include -#include -#include - -namespace { - -// Int formats can need up to 10 bytes for the value plus a single byte for the -// sign. -constexpr int kMaxIntCharsNeeded = 10 + 1; -// Hex formats can need up to 8 bytes for the value plus two bytes for the "0x". -constexpr int kMaxHexCharsNeeded = 8 + 2; - -// Float formats can need up to 7 bytes for the fraction plus 3 bytes for "x2^" -// plus 3 bytes for the exponent and a single sign bit. -constexpr float kMaxFloatCharsNeeded = 7 + 3 + 3 + 1; - -// All input buffers to the number conversion functions must be this long. -const int kFastToBufferSize = 48; - -// Reverses a zero-terminated string in-place. -char* ReverseStringInPlace(char* start, char* end) { - char* p1 = start; - char* p2 = end - 1; - while (p1 < p2) { - char tmp = *p1; - *p1++ = *p2; - *p2-- = tmp; - } - return start; -} - -// Appends a string to a string, in-place. You need to pass in the maximum -// string length as the second argument. -char* StrCatStr(char* main, int main_max_length, const char* to_append) { - char* current = main; - while (*current != 0) { - ++current; - } - char* current_end = main + (main_max_length - 1); - while ((*to_append != 0) && (current < current_end)) { - *current = *to_append; - ++current; - ++to_append; - } - *current = 0; - return current; -} - -// Populates the provided buffer with an ASCII representation of the number. -char* FastUInt32ToBufferLeft(uint32_t i, char* buffer, int base) { - char* start = buffer; - do { - int32_t digit = i % base; - char character; - if (digit < 10) { - character = '0' + digit; - } else { - character = 'a' + (digit - 10); - } - *buffer++ = character; - i /= base; - } while (i > 0); - *buffer = 0; - ReverseStringInPlace(start, buffer); - return buffer; -} - -// Populates the provided buffer with an ASCII representation of the number. -char* FastInt32ToBufferLeft(int32_t i, char* buffer) { - uint32_t u = i; - if (i < 0) { - *buffer++ = '-'; - u = -u; - } - return FastUInt32ToBufferLeft(u, buffer, 10); -} - -// Converts a number to a string and appends it to another. -char* StrCatInt32(char* main, int main_max_length, int32_t number) { - char number_string[kFastToBufferSize]; - FastInt32ToBufferLeft(number, number_string); - return StrCatStr(main, main_max_length, number_string); -} - -// Converts a number to a string and appends it to another. -char* StrCatUInt32(char* main, int main_max_length, uint32_t number, int base) { - char number_string[kFastToBufferSize]; - FastUInt32ToBufferLeft(number, number_string, base); - return StrCatStr(main, main_max_length, number_string); -} - -// Populates the provided buffer with ASCII representation of the float number. -// Avoids the use of any floating point instructions (since these aren't -// supported on many microcontrollers) and as a consequence prints values with -// power-of-two exponents. -char* FastFloatToBufferLeft(float f, char* buffer) { - char* current = buffer; - char* current_end = buffer + (kFastToBufferSize - 1); - // Access the bit fields of the floating point value to avoid requiring any - // float instructions. These constants are derived from IEEE 754. - const uint32_t sign_mask = 0x80000000; - const uint32_t exponent_mask = 0x7f800000; - const int32_t exponent_shift = 23; - const int32_t exponent_bias = 127; - const uint32_t fraction_mask = 0x007fffff; - uint32_t u; - memcpy(&u, &f, sizeof(int32_t)); - const int32_t exponent = - ((u & exponent_mask) >> exponent_shift) - exponent_bias; - const uint32_t fraction = (u & fraction_mask); - // Expect ~0x2B1B9D3 for fraction. - if (u & sign_mask) { - *current = '-'; - current += 1; - } - *current = 0; - // These are special cases for infinities and not-a-numbers. - if (exponent == 128) { - if (fraction == 0) { - current = StrCatStr(current, (current_end - current), "Inf"); - return current; - } else { - current = StrCatStr(current, (current_end - current), "NaN"); - return current; - } - } - // 0x007fffff (8388607) represents 0.99... for the fraction, so to print the - // correct decimal digits we need to scale our value before passing it to the - // conversion function. This scale should be 10000000/8388608 = 1.1920928955. - // We can approximate this using multiply-adds and right-shifts using the - // values in this array. The 1. portion of the number string is printed out - // in a fixed way before the fraction, below. - const int32_t scale_shifts_size = 13; - const int8_t scale_shifts[13] = {3, 4, 8, 11, 13, 14, 17, - 18, 19, 20, 21, 22, 23}; - uint32_t scaled_fraction = fraction; - for (int i = 0; i < scale_shifts_size; ++i) { - scaled_fraction += (fraction >> scale_shifts[i]); - } - *current = '1'; - current += 1; - *current = '.'; - current += 1; - *current = 0; - - // Prepend leading zeros to fill in all 7 bytes of the fraction. Truncate - // zeros off the end of the fraction. Every fractional value takes 7 bytes. - // For example, 2500 would be written into the buffer as 0002500 since it - // represents .00025. - constexpr int kMaxFractionalDigits = 7; - - // Abort early if there is not enough space in the buffer. - if (current_end - current <= kMaxFractionalDigits) { - return current; - } - - // Pre-fill buffer with zeros to ensure zero-truncation works properly. - for (int i = 1; i < kMaxFractionalDigits; i++) { - *(current + i) = '0'; - } - - // Track how large the fraction is to add leading zeros. - char* previous = current; - current = StrCatUInt32(current, (current_end - current), scaled_fraction, 10); - int fraction_digits = current - previous; - int leading_zeros = kMaxFractionalDigits - fraction_digits; - - // Overwrite the null terminator from StrCatUInt32 to ensure zero-trunctaion - // works properly. - *current = '0'; - - // Shift fraction values and prepend zeros if necessary. - if (leading_zeros != 0) { - for (int i = 0; i < fraction_digits; i++) { - current--; - *(current + leading_zeros) = *current; - *current = '0'; - } - current += kMaxFractionalDigits; - } - - // Truncate trailing zeros for cleaner logs. Ensure we leave at least one - // fractional character for the case when scaled_fraction is 0. - while (*(current - 1) == '0' && (current - 1) > previous) { - current--; - } - *current = 0; - current = StrCatStr(current, (current_end - current), "*2^"); - current = StrCatInt32(current, (current_end - current), exponent); - return current; -} - -int FormatInt32(char* output, int32_t i) { - return static_cast(FastInt32ToBufferLeft(i, output) - output); -} - -int FormatUInt32(char* output, uint32_t i) { - return static_cast(FastUInt32ToBufferLeft(i, output, 10) - output); -} - -int FormatHex(char* output, uint32_t i) { - return static_cast(FastUInt32ToBufferLeft(i, output, 16) - output); -} - -int FormatFloat(char* output, float i) { - return static_cast(FastFloatToBufferLeft(i, output) - output); -} - -} // namespace - -extern "C" int MicroVsnprintf(char* output, int len, const char* format, - va_list args) { - int output_index = 0; - const char* current = format; - // One extra character must be left for the null terminator. - const int usable_length = len - 1; - while (*current != '\0' && output_index < usable_length) { - if (*current == '%') { - current++; - switch (*current) { - case 'd': - // Cut off log message if format could exceed log buffer length. - if (usable_length - output_index < kMaxIntCharsNeeded) { - output[output_index++] = '\0'; - return output_index; - } - output_index += - FormatInt32(&output[output_index], va_arg(args, int32_t)); - current++; - break; - case 'u': - if (usable_length - output_index < kMaxIntCharsNeeded) { - output[output_index++] = '\0'; - return output_index; - } - output_index += - FormatUInt32(&output[output_index], va_arg(args, uint32_t)); - current++; - break; - case 'x': - if (usable_length - output_index < kMaxHexCharsNeeded) { - output[output_index++] = '\0'; - return output_index; - } - output[output_index++] = '0'; - output[output_index++] = 'x'; - output_index += - FormatHex(&output[output_index], va_arg(args, uint32_t)); - current++; - break; - case 'f': - if (usable_length - output_index < kMaxFloatCharsNeeded) { - output[output_index++] = '\0'; - return output_index; - } - output_index += - FormatFloat(&output[output_index], va_arg(args, double)); - current++; - break; - case '%': - output[output_index++] = *current++; - break; - case 'c': - if (usable_length - output_index < 1) { - output[output_index++] = '\0'; - return output_index; - } - output[output_index++] = va_arg(args, int32_t); - current++; - break; - case 's': - char* string = va_arg(args, char*); - int string_idx = 0; - while (string_idx + output_index < usable_length && - string[string_idx] != '\0') { - output[output_index++] = string[string_idx++]; - } - current++; - } - } else { - output[output_index++] = *current++; - } - } - output[output_index++] = '\0'; - return output_index; -} - -extern "C" int MicroSnprintf(char* output, int len, const char* format, ...) { - va_list args; - va_start(args, format); - int bytes_written = MicroVsnprintf(output, len, format, args); - va_end(args); - return bytes_written; -} diff --git a/tensorflow/lite/micro/micro_string.h b/tensorflow/lite/micro/micro_string.h deleted file mode 100644 index 59303e82b09..00000000000 --- a/tensorflow/lite/micro/micro_string.h +++ /dev/null @@ -1,33 +0,0 @@ -/* Copyright 2018 The TensorFlow Authors. All Rights Reserved. - -Licensed under the Apache License, Version 2.0 (the "License"); -you may not use this file except in compliance with the License. -You may obtain a copy of the License at - - http://www.apache.org/licenses/LICENSE-2.0 - -Unless required by applicable law or agreed to in writing, software -distributed under the License is distributed on an "AS IS" BASIS, -WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -See the License for the specific language governing permissions and -limitations under the License. -==============================================================================*/ -#ifndef TENSORFLOW_LITE_MICRO_MICRO_STRING_H_ -#define TENSORFLOW_LITE_MICRO_MICRO_STRING_H_ - -#include - -// Implements simple string formatting for numeric types. Returns the number of -// bytes written to output. -extern "C" { -// Functionally equivalent to vsnprintf, trimmed down for TFLite Micro. -// MicroSnprintf() is implemented using MicroVsnprintf(). -int MicroVsnprintf(char* output, int len, const char* format, va_list args); -// Functionally equavalent to snprintf, trimmed down for TFLite Micro. -// For example, MicroSnprintf(buffer, 10, "int %d", 10) will put the string -// "int 10" in the buffer. -// Floating point values are logged in exponent notation (1.XXX*2^N). -int MicroSnprintf(char* output, int len, const char* format, ...); -} - -#endif // TENSORFLOW_LITE_MICRO_MICRO_STRING_H_ diff --git a/tensorflow/lite/micro/micro_string_test.cc b/tensorflow/lite/micro/micro_string_test.cc deleted file mode 100644 index 3c1d8e971ee..00000000000 --- a/tensorflow/lite/micro/micro_string_test.cc +++ /dev/null @@ -1,161 +0,0 @@ -/* Copyright 2020 The TensorFlow Authors. All Rights Reserved. - -Licensed under the Apache License, Version 2.0 (the "License"); -you may not use this file except in compliance with the License. -You may obtain a copy of the License at - - http://www.apache.org/licenses/LICENSE-2.0 - -Unless required by applicable law or agreed to in writing, software -distributed under the License is distributed on an "AS IS" BASIS, -WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -See the License for the specific language governing permissions and -limitations under the License. -==============================================================================*/ - -#include "tensorflow/lite/micro/micro_string.h" - -#include "tensorflow/lite/micro/testing/micro_test.h" - -TF_LITE_MICRO_TESTS_BEGIN - -TF_LITE_MICRO_TEST(FormatPositiveIntShouldMatchExpected) { - const int kBufferLen = 32; - char buffer[kBufferLen]; - const char golden[] = "Int: 55"; - int bytes_written = MicroSnprintf(buffer, kBufferLen, "Int: %d", 55); - TF_LITE_MICRO_EXPECT_EQ(static_cast(sizeof(golden)), bytes_written); - TF_LITE_MICRO_EXPECT_STRING_EQ(golden, buffer); -} - -TF_LITE_MICRO_TEST(FormatNegativeIntShouldMatchExpected) { - const int kBufferLen = 32; - char buffer[kBufferLen]; - const char golden[] = "Int: -55"; - int bytes_written = MicroSnprintf(buffer, kBufferLen, "Int: %d", -55); - TF_LITE_MICRO_EXPECT_EQ(static_cast(sizeof(golden)), bytes_written); - TF_LITE_MICRO_EXPECT_STRING_EQ(golden, buffer); -} - -TF_LITE_MICRO_TEST(FormatUnsignedIntShouldMatchExpected) { - const int kBufferLen = 32; - char buffer[kBufferLen]; - const char golden[] = "UInt: 12345"; - int bytes_written = MicroSnprintf(buffer, kBufferLen, "UInt: %u", 12345); - TF_LITE_MICRO_EXPECT_EQ(static_cast(sizeof(golden)), bytes_written); - TF_LITE_MICRO_EXPECT_STRING_EQ(golden, buffer); -} - -TF_LITE_MICRO_TEST(FormatHexShouldMatchExpected) { - const int kBufferLen = 32; - char buffer[kBufferLen]; - const char golden[] = "Hex: 0x12345"; - int bytes_written = MicroSnprintf(buffer, kBufferLen, "Hex: %x", 0x12345); - TF_LITE_MICRO_EXPECT_EQ(static_cast(sizeof(golden)), bytes_written); - TF_LITE_MICRO_EXPECT_STRING_EQ(golden, buffer); -} - -TF_LITE_MICRO_TEST(FormatFloatShouldMatchExpected) { - const int kBufferLen = 32; - char buffer[kBufferLen]; - const char golden[] = "Float: 1.0*2^4"; - int bytes_written = MicroSnprintf(buffer, kBufferLen, "Float: %f", 16.); - TF_LITE_MICRO_EXPECT_EQ(static_cast(sizeof(golden)), bytes_written); - TF_LITE_MICRO_EXPECT_STRING_EQ(golden, buffer); -} - -TF_LITE_MICRO_TEST(FormatCharShouldMatchExpected) { - const int kBufferLen = 32; - char buffer[kBufferLen]; - const char golden[] = "Chars: @,Z"; - int bytes_written = - MicroSnprintf(buffer, kBufferLen, "Chars: %c,%c", 64, 'Z'); - TF_LITE_MICRO_EXPECT_EQ(static_cast(sizeof(golden)), bytes_written); - TF_LITE_MICRO_EXPECT_STRING_EQ(golden, buffer); -} - -TF_LITE_MICRO_TEST(BadlyFormattedStringShouldProduceReasonableString) { - const int kBufferLen = 32; - char buffer[kBufferLen]; - const char golden[] = "Test Badly % formated % string"; - int bytes_written = - MicroSnprintf(buffer, kBufferLen, "Test Badly %% formated %% string%"); - TF_LITE_MICRO_EXPECT_EQ(static_cast(sizeof(golden)), bytes_written); - TF_LITE_MICRO_EXPECT_STRING_EQ(golden, buffer); -} - -TF_LITE_MICRO_TEST(IntFormatOverrunShouldTruncate) { - const int kBufferLen = 8; - char buffer[kBufferLen]; - const char golden[] = "Int: "; - int bytes_written = MicroSnprintf(buffer, kBufferLen, "Int: %d", 12345); - TF_LITE_MICRO_EXPECT_EQ(static_cast(sizeof(golden)), bytes_written); - TF_LITE_MICRO_EXPECT_STRING_EQ(golden, buffer); -} - -TF_LITE_MICRO_TEST(UnsignedIntFormatOverrunShouldTruncate) { - const int kBufferLen = 8; - char buffer[kBufferLen]; - const char golden[] = "UInt: "; - int bytes_written = MicroSnprintf(buffer, kBufferLen, "UInt: %u", 12345); - TF_LITE_MICRO_EXPECT_EQ(static_cast(sizeof(golden)), bytes_written); - TF_LITE_MICRO_EXPECT_STRING_EQ(golden, buffer); -} - -TF_LITE_MICRO_TEST(HexFormatOverrunShouldTruncate) { - const int kBufferLen = 8; - char buffer[kBufferLen]; - const char golden[] = "Hex: "; - int bytes_written = MicroSnprintf(buffer, kBufferLen, "Hex: %x", 0x12345); - TF_LITE_MICRO_EXPECT_EQ(static_cast(sizeof(golden)), bytes_written); - TF_LITE_MICRO_EXPECT_STRING_EQ(golden, buffer); -} - -TF_LITE_MICRO_TEST(FloatFormatOverrunShouldTruncate) { - const int kBufferLen = 12; - char buffer[kBufferLen]; - const char golden[] = "Float: "; - int bytes_written = MicroSnprintf(buffer, kBufferLen, "Float: %x", 12345.); - TF_LITE_MICRO_EXPECT_EQ(static_cast(sizeof(golden)), bytes_written); - TF_LITE_MICRO_EXPECT_STRING_EQ(golden, buffer); -} - -TF_LITE_MICRO_TEST(FloatFormatShouldPrintFractionCorrectly) { - const int kBufferLen = 24; - char buffer[kBufferLen]; - const char golden[] = "Float: 1.0625*2^0"; - // Add small offset to float value to account for float rounding error. - int bytes_written = MicroSnprintf(buffer, kBufferLen, "Float: %f", 1.0625001); - TF_LITE_MICRO_EXPECT_EQ(static_cast(sizeof(golden)), bytes_written); - TF_LITE_MICRO_EXPECT_STRING_EQ(golden, buffer); -} - -TF_LITE_MICRO_TEST(FloatFormatShouldPrintFractionCorrectlyNoLeadingZeros) { - const int kBufferLen = 24; - char buffer[kBufferLen]; - const char golden[] = "Float: 1.6332993*2^-1"; - int bytes_written = MicroSnprintf(buffer, kBufferLen, "Float: %f", 0.816650); - TF_LITE_MICRO_EXPECT_EQ(static_cast(sizeof(golden)), bytes_written); - TF_LITE_MICRO_EXPECT_STRING_EQ(golden, buffer); -} - -TF_LITE_MICRO_TEST(StringFormatOverrunShouldTruncate) { - const int kBufferLen = 10; - char buffer[kBufferLen]; - const char golden[] = "String: h"; - int bytes_written = - MicroSnprintf(buffer, kBufferLen, "String: %s", "hello world"); - TF_LITE_MICRO_EXPECT_EQ(static_cast(sizeof(golden)), bytes_written); - TF_LITE_MICRO_EXPECT_STRING_EQ(golden, buffer); -} - -TF_LITE_MICRO_TEST(StringFormatWithExactOutputSizeOverrunShouldTruncate) { - const int kBufferLen = 10; - char buffer[kBufferLen]; - const char golden[] = "format st"; - int bytes_written = MicroSnprintf(buffer, kBufferLen, "format str"); - TF_LITE_MICRO_EXPECT_EQ(static_cast(sizeof(golden)), bytes_written); - TF_LITE_MICRO_EXPECT_STRING_EQ(golden, buffer); -} - -TF_LITE_MICRO_TESTS_END diff --git a/tensorflow/lite/micro/mock_micro_graph.cc b/tensorflow/lite/micro/mock_micro_graph.cc index 438a40653f0..9c652fb2798 100644 --- a/tensorflow/lite/micro/mock_micro_graph.cc +++ b/tensorflow/lite/micro/mock_micro_graph.cc @@ -20,11 +20,7 @@ limitations under the License. namespace tflite { MockMicroGraph::MockMicroGraph(SingleArenaBufferAllocator* allocator) - : MicroGraph(nullptr, nullptr, nullptr, nullptr), - allocator_(allocator), - init_count_(0), - prepare_count_(0), - free_count_(0) { + : allocator_(allocator), init_count_(0), prepare_count_(0), free_count_(0) { memset(invoke_counts_, 0, sizeof(invoke_counts_)); mock_tensor_ = reinterpret_cast(allocator_->AllocatePersistentBuffer( @@ -45,8 +41,6 @@ TfLiteStatus MockMicroGraph::InvokeSubgraph(int subgraph_idx) { return kTfLiteOk; } -TfLiteStatus MockMicroGraph::ResetVariableTensors() { return kTfLiteOk; } - size_t MockMicroGraph::NumSubgraphInputs(int subgraph_idx) { return 1; } TfLiteEvalTensor* MockMicroGraph::GetSubgraphInput(int subgraph_idx, @@ -63,4 +57,8 @@ TfLiteEvalTensor* MockMicroGraph::GetSubgraphOutput(int subgraph_idx, int MockMicroGraph::NumSubgraphs() { return kMaxSubgraphs; } +MicroResourceVariables* MockMicroGraph::GetResourceVariables() { + return nullptr; +} + } // namespace tflite diff --git a/tensorflow/lite/micro/mock_micro_graph.h b/tensorflow/lite/micro/mock_micro_graph.h index 3ae7d7cf070..745e8f0cbc9 100644 --- a/tensorflow/lite/micro/mock_micro_graph.h +++ b/tensorflow/lite/micro/mock_micro_graph.h @@ -30,13 +30,13 @@ class MockMicroGraph : public MicroGraph { public: explicit MockMicroGraph(SingleArenaBufferAllocator* allocator); TfLiteStatus InvokeSubgraph(int subgraph_idx) override; - TfLiteStatus ResetVariableTensors() override; size_t NumSubgraphInputs(int subgraph_idx) override; TfLiteEvalTensor* GetSubgraphInput(int subgraph_idx, int tensor_idx) override; size_t NumSubgraphOutputs(int subgraph_idx) override; TfLiteEvalTensor* GetSubgraphOutput(int subgraph_idx, int tensor_idx) override; int NumSubgraphs() override; + MicroResourceVariables* GetResourceVariables() override; int get_init_count() const { return init_count_; } int get_prepare_count() const { return prepare_count_; } int get_free_count() const { return free_count_; } diff --git a/tensorflow/lite/micro/models/person_detect_vela.tflite b/tensorflow/lite/micro/models/person_detect_vela.tflite new file mode 100644 index 00000000000..95cf47687dc Binary files /dev/null and b/tensorflow/lite/micro/models/person_detect_vela.tflite differ diff --git a/tensorflow/lite/micro/python/interpreter/src/BUILD b/tensorflow/lite/micro/python/interpreter/src/BUILD index 601e3db4077..f8be0ed253a 100644 --- a/tensorflow/lite/micro/python/interpreter/src/BUILD +++ b/tensorflow/lite/micro/python/interpreter/src/BUILD @@ -17,24 +17,6 @@ package_group( packages = tflm_python_op_resolver_friends(), ) -# tflm_runtime is deprecated, please use //python/tflite_micro:runtime instead. -# TODO(b/286456378): remove once all usage is changed to the runtime target. -py_library( - name = "tflm_runtime", - srcs = ["tflm_runtime.py"], - visibility = ["//visibility:public"], - deps = ["//python/tflite_micro:runtime"], -) - -# runtime is deprecated, please use //python/tflite_micro:runtime instead. -# TODO(b/286456378): remove once all usage is changed to the runtime target. -py_library( - name = "runtime", - srcs = ["runtime.py"], - visibility = ["//visibility:public"], - deps = ["//python/tflite_micro:runtime"], -) - # TODO(b/286456378): remove once all internal usage is fixed. cc_library( name = "python_ops_resolver", @@ -43,10 +25,7 @@ cc_library( "python_ops_resolver.h", ], copts = micro_copts(), - visibility = [ - ":op_resolver_friends", - "//tensorflow/lite/micro/integration_tests:__subpackages__", - ], + visibility = [":op_resolver_friends"], deps = [ "//python/tflite_micro:python_ops_resolver", ], diff --git a/tensorflow/lite/micro/python/tflite_size/src/BUILD b/tensorflow/lite/micro/python/tflite_size/src/BUILD index 66d9b50ee6b..b8f53a24f42 100644 --- a/tensorflow/lite/micro/python/tflite_size/src/BUILD +++ b/tensorflow/lite/micro/python/tflite_size/src/BUILD @@ -1,3 +1,4 @@ +load("@rules_python//python:defs.bzl", "py_binary", "py_library") load("@pybind11_bazel//:build_defs.bzl", "pybind_extension", "pybind_library") package( diff --git a/tensorflow/lite/micro/python/tflite_size/tests/BUILD b/tensorflow/lite/micro/python/tflite_size/tests/BUILD index 1b4c5b26c30..076a6ab2ca7 100644 --- a/tensorflow/lite/micro/python/tflite_size/tests/BUILD +++ b/tensorflow/lite/micro/python/tflite_size/tests/BUILD @@ -1,3 +1,4 @@ +load("@rules_python//python:defs.bzl", "py_test") load("@tflm_pip_deps//:requirements.bzl", "requirement") licenses(["notice"]) @@ -25,7 +26,7 @@ py_test( "noubsan", ], deps = [ - requirement("tensorflow-cpu"), + requirement("tensorflow"), "//tensorflow/lite/micro/python/tflite_size/src:flatbuffer_size", ], ) diff --git a/tensorflow/lite/micro/riscv32_generic/debug_log.cc b/tensorflow/lite/micro/riscv32_generic/debug_log.cc new file mode 100644 index 00000000000..1d38fff929b --- /dev/null +++ b/tensorflow/lite/micro/riscv32_generic/debug_log.cc @@ -0,0 +1,40 @@ +/* Copyright 2023 The TensorFlow Authors. All Rights Reserved. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +==============================================================================*/ + +#include "tensorflow/lite/micro/debug_log.h" + +#include + +#ifndef TF_LITE_STRIP_ERROR_STRINGS +#include "eyalroz_printf/src/printf/printf.h" +#endif + +extern "C" void DebugLog(const char* format, va_list args) { +#ifndef TF_LITE_STRIP_ERROR_STRINGS + constexpr int kMaxLogLen = 256; + char log_buffer[kMaxLogLen]; + + vsnprintf_(log_buffer, kMaxLogLen, format, args); + std::fputs(log_buffer, stdout); +#endif +} + +#ifndef TF_LITE_STRIP_ERROR_STRINGS +// Only called from MicroVsnprintf (micro_log.h) +extern "C" int DebugVsnprintf(char* buffer, size_t buf_size, const char* format, + va_list vlist) { + return vsnprintf_(buffer, buf_size, format, vlist); +} +#endif diff --git a/tensorflow/lite/micro/test_helpers.cc b/tensorflow/lite/micro/test_helpers.cc index 15d238203e6..3f0f5ec0826 100644 --- a/tensorflow/lite/micro/test_helpers.cc +++ b/tensorflow/lite/micro/test_helpers.cc @@ -1876,8 +1876,8 @@ int TestStrcmp(const char* a, const char* b) { // Create a TfLiteIntArray from an array of ints. The first element in the // supplied array must be the size of the array expressed as an int. -TfLiteIntArray* IntArrayFromInts(int* int_array) { - return reinterpret_cast(int_array); +TfLiteIntArray* IntArrayFromInts(const int* int_array) { + return reinterpret_cast(const_cast(int_array)); } // Create a TfLiteFloatArray from an array of floats. The first element in the diff --git a/tensorflow/lite/micro/test_helpers.h b/tensorflow/lite/micro/test_helpers.h index 578282e9b28..6315b9fecdc 100644 --- a/tensorflow/lite/micro/test_helpers.h +++ b/tensorflow/lite/micro/test_helpers.h @@ -1,4 +1,4 @@ -/* Copyright 2019 The TensorFlow Authors. All Rights Reserved. +/* Copyright 2023 The TensorFlow Authors. All Rights Reserved. Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. @@ -17,6 +17,7 @@ limitations under the License. #define TENSORFLOW_LITE_MICRO_TEST_HELPERS_H_ #include +#include #include #include #include @@ -195,7 +196,7 @@ void PopulateContext(TfLiteTensor* tensors, int tensors_size, // Create a TfLiteIntArray from an array of ints. The first element in the // supplied array must be the size of the array expressed as an int. -TfLiteIntArray* IntArrayFromInts(int* int_array); +TfLiteIntArray* IntArrayFromInts(const int* int_array); // Create a TfLiteFloatArray from an array of floats. The first element in the // supplied array must be the size of the array expressed as a float. @@ -325,7 +326,7 @@ inline float SymmetricScaleFromMinMax(const float min, const float max) { template inline int ZeroPointFromMinMax(const float min, const float max) { return static_cast(std::numeric_limits::min()) + - static_cast(-min / ScaleFromMinMax(min, max) + 0.5f); + static_cast(roundf(-min / ScaleFromMinMax(min, max))); } } // namespace testing diff --git a/tensorflow/lite/micro/testing/BUILD b/tensorflow/lite/micro/testing/BUILD index 58914bcf9fa..7a246990a23 100644 --- a/tensorflow/lite/micro/testing/BUILD +++ b/tensorflow/lite/micro/testing/BUILD @@ -1,3 +1,4 @@ +load("@rules_python//python:defs.bzl", "py_binary", "py_library") load("@tflm_pip_deps//:requirements.bzl", "requirement") load( "//tensorflow:extra_rules.bzl", @@ -32,7 +33,6 @@ cc_library( ], visibility = [ ":kernel_test_friends", - ":microfrontend", ":tflite_micro", ], deps = [ @@ -77,7 +77,7 @@ py_library( ], deps = [ requirement("numpy"), - requirement("tensorflow-cpu"), + requirement("tensorflow"), ], ) @@ -93,6 +93,6 @@ py_binary( deps = [ "@absl_py//absl:app", requirement("numpy"), - requirement("tensorflow-cpu"), + requirement("tensorflow"), ], ) diff --git a/tensorflow/lite/micro/testing/generate_test_models.py b/tensorflow/lite/micro/testing/generate_test_models.py index 25902d00f41..2593653fdf2 100644 --- a/tensorflow/lite/micro/testing/generate_test_models.py +++ b/tensorflow/lite/micro/testing/generate_test_models.py @@ -71,6 +71,10 @@ def representative_dataset_gen(): converter.inference_input_type = tf.int8 converter.inference_output_type = tf.int8 converter.representative_dataset = representative_dataset_gen + # TODO(b/324385802): Disable per channel quantization in FC layers (currently + # default behaviour) since it's not yet supported in TFLM. + converter._experimental_disable_per_channel_quantization_for_dense_layers = ( # pylint: disable=protected-access + True) tflite_model = converter.convert() if write_to_file: diff --git a/tensorflow/lite/micro/testing/micro_test.h b/tensorflow/lite/micro/testing/micro_test.h index 2e119e17352..a28f4b6d8e4 100644 --- a/tensorflow/lite/micro/testing/micro_test.h +++ b/tensorflow/lite/micro/testing/micro_test.h @@ -259,6 +259,7 @@ inline void InitializeTest() { InitializeTarget(); } MicroPrintf("FAIL: %s did not match %s", string1, string2, __FILE__, \ __LINE__); \ micro_test::did_test_fail = true; \ + break; \ } \ } \ } while (false) diff --git a/tensorflow/lite/micro/testing/test_with_arm_corstone_300.sh b/tensorflow/lite/micro/testing/test_with_arm_corstone_300.sh index 9b39ee4adf5..27635baa8e4 100755 --- a/tensorflow/lite/micro/testing/test_with_arm_corstone_300.sh +++ b/tensorflow/lite/micro/testing/test_with_arm_corstone_300.sh @@ -1,5 +1,5 @@ #!/bin/bash -e -# Copyright 2021 The TensorFlow Authors. All Rights Reserved. +# Copyright 2023 The TensorFlow Authors. All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. @@ -36,7 +36,8 @@ FVP+="-C mps3_board.visualisation.disable-visualisation=1 " FVP+="-C mps3_board.telnetterminal0.start_telnet=0 " FVP+='-C mps3_board.uart0.out_file="-" ' FVP+='-C mps3_board.uart0.unbuffered_output=1 ' -FVP+='-C mps3_board.uart0.shutdown_on_eot=1' +FVP+='-C mps3_board.uart0.shutdown_on_eot=1 ' +FVP+='--stat' ${FVP} ${BINARY_TO_TEST} | tee ${MICRO_LOG_FILENAME} if [[ ${2} != "non_test_binary" ]] diff --git a/tensorflow/lite/micro/tflite_bridge/BUILD b/tensorflow/lite/micro/tflite_bridge/BUILD index 518015ac15e..ea5efdf6a8f 100644 --- a/tensorflow/lite/micro/tflite_bridge/BUILD +++ b/tensorflow/lite/micro/tflite_bridge/BUILD @@ -38,9 +38,6 @@ cc_library( "micro_error_reporter.h", ], copts = micro_copts(), - visibility = [ - "//tensorflow/lite/micro/tflite_bridge:__pkg__", - ], deps = [ "//tensorflow/lite/core/api:error_reporter", "//tensorflow/lite/micro:micro_compatibility", diff --git a/tensorflow/lite/micro/tflite_bridge/micro_error_reporter.cc b/tensorflow/lite/micro/tflite_bridge/micro_error_reporter.cc index 63cc42ed535..d5d77c35cb0 100644 --- a/tensorflow/lite/micro/tflite_bridge/micro_error_reporter.cc +++ b/tensorflow/lite/micro/tflite_bridge/micro_error_reporter.cc @@ -1,4 +1,4 @@ -/* Copyright 2018 The TensorFlow Authors. All Rights Reserved. +/* Copyright 2023 The TensorFlow Authors. All Rights Reserved. Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. @@ -36,7 +36,7 @@ ErrorReporter* GetMicroErrorReporter() { } int MicroErrorReporter::Report(const char* format, va_list args) { - Log(format, args); + VMicroPrintf(format, args); return 0; } diff --git a/tensorflow/lite/micro/tools/BUILD b/tensorflow/lite/micro/tools/BUILD index 1051241d8f4..a85a7bab905 100644 --- a/tensorflow/lite/micro/tools/BUILD +++ b/tensorflow/lite/micro/tools/BUILD @@ -1,3 +1,5 @@ +load("@rules_python//python:defs.bzl", "py_binary", "py_library", "py_test") +load("@flatbuffers//:build_defs.bzl", "flatbuffer_cc_library", "flatbuffer_py_library") load("@tflm_pip_deps//:requirements.bzl", "requirement") load("@pybind11_bazel//:build_defs.bzl", "pybind_extension") load("//tensorflow:extra_rules.bzl", "tflm_application_friends") @@ -21,8 +23,8 @@ py_library( name = "generate_cc_arrays_lib", srcs = ["generate_cc_arrays.py"], deps = [ - requirement("numpy"), requirement("pillow"), + requirement("numpy"), ], ) @@ -39,8 +41,8 @@ py_binary( name = "generate_cc_arrays", srcs = ["generate_cc_arrays.py"], deps = [ - requirement("numpy"), requirement("pillow"), + requirement("numpy"), ], ) @@ -70,9 +72,9 @@ py_test( ], deps = [ ":requantize_flatbuffer", - "//python/tflite_micro:runtime", requirement("numpy"), - requirement("tensorflow-cpu"), + requirement("tensorflow"), + "//python/tflite_micro:runtime", ], ) @@ -131,11 +133,33 @@ py_library( ], deps = [ ":model_transforms_utils", - "//tensorflow/lite/micro/python/interpreter/src:runtime", - "//tensorflow/lite/tools:flatbuffer_utils", "@absl_py//absl/logging", requirement("numpy"), - requirement("tensorflow-cpu"), + requirement("tensorflow"), + "//python/tflite_micro:runtime", + "//tensorflow/lite/tools:flatbuffer_utils", + ], +) + +cc_binary( + name = "layer_by_layer_output_tool", + srcs = ["layer_by_layer.cc"], + deps = [ + ":layer_by_layer_schema", + "//tensorflow/lite/c:c_api_types", + "//tensorflow/lite/c:common", + "//tensorflow/lite/kernels:op_macros", + "//tensorflow/lite/micro:micro_allocator", + "//tensorflow/lite/micro:micro_context", + "//tensorflow/lite/micro:micro_framework", + "//tensorflow/lite/micro:micro_log", + "//tensorflow/lite/micro:micro_resource_variable", + "//tensorflow/lite/micro:micro_utils", + "//tensorflow/lite/micro:op_resolvers", + "//tensorflow/lite/micro/kernels:kernel_util", + "//tensorflow/lite/micro/tools/benchmarking:op_resolver", + "//tensorflow/lite/schema:schema_fbs", + "@flatbuffers", ], ) @@ -167,8 +191,34 @@ py_test( ], deps = [ ":tflm_model_transforms_lib", - "//tensorflow/lite/micro/examples/recipes:resource_variables_lib", "@absl_py//absl/testing:parameterized", - requirement("tensorflow-cpu"), + requirement("tensorflow"), + "//tensorflow/lite/micro/examples/recipes:resource_variables_lib", ], ) + +py_binary( + name = "layer_by_layer_debugger", + srcs = ["layer_by_layer_debugger.py"], + python_version = "PY3", + srcs_version = "PY3", + deps = [ + ":layer_by_layer_schema_py", + ":model_transforms_utils", + "@absl_py//absl:app", + "@absl_py//absl/flags", + requirement("tensorflow"), + "//python/tflite_micro:runtime", + "//tensorflow/lite/tools:flatbuffer_utils", + ], +) + +flatbuffer_cc_library( + name = "layer_by_layer_schema", + srcs = ["layer_by_layer_schema.fbs"], +) + +flatbuffer_py_library( + name = "layer_by_layer_schema_py", + srcs = ["layer_by_layer_schema.fbs"], +) diff --git a/tensorflow/lite/micro/tools/Makefile.inc b/tensorflow/lite/micro/tools/Makefile.inc new file mode 100644 index 00000000000..adbe73af770 --- /dev/null +++ b/tensorflow/lite/micro/tools/Makefile.inc @@ -0,0 +1,12 @@ +MICROLITE_TOOL_ROOT_DIR := $(TENSORFLOW_ROOT)tensorflow/lite/micro/tools/ + +LAYER_BY_LAYER_OUPUT_SRCS := \ +$(MICROLITE_TOOL_ROOT_DIR)/layer_by_layer.cc \ + +LAYER_BY_LAYER_OUPUT_HDRS := \ +$(MICROLITE_TOOL_ROOT_DIR)benchmarking/op_resolver.h \ + +ifneq ($(TARGET), bluepill cortex_m_corstone_300 riscv32_generic hexagon) + $(eval $(call microlite_test,layer_by_layer_output_tool,\ + $(LAYER_BY_LAYER_OUPUT_SRCS),$(LAYER_BY_LAYER_OUPUT_HDRS),)) +endif diff --git a/tensorflow/lite/micro/tools/benchmarking/BUILD b/tensorflow/lite/micro/tools/benchmarking/BUILD new file mode 100644 index 00000000000..6691ac31814 --- /dev/null +++ b/tensorflow/lite/micro/tools/benchmarking/BUILD @@ -0,0 +1,43 @@ +cc_library( + name = "op_resolver", + hdrs = ["op_resolver.h"], + visibility = ["//tensorflow/lite/micro/tools:__subpackages__"], + deps = ["//tensorflow/lite/micro:op_resolvers"], +) + +cc_library( + name = "metrics", + srcs = ["metrics.cc"], + hdrs = ["metrics.h"], + deps = [ + "//tensorflow/lite/kernels/internal:compatibility", + "//tensorflow/lite/micro:micro_log", + "//tensorflow/lite/micro:micro_profiler", + "//tensorflow/lite/micro:recording_allocators", + ], +) + +cc_library( + name = "generic_benchmark_lib", + srcs = ["generic_model_benchmark.cc"], + hdrs = ["show_meta_data.h"], + defines = ["GENERIC_BENCHMARK_NO_META_DATA"], + deps = [ + ":metrics", + ":op_resolver", + "//tensorflow/lite/c:c_api_types", + "//tensorflow/lite/c:common", + "//tensorflow/lite/micro:micro_context", + "//tensorflow/lite/micro:micro_log", + "//tensorflow/lite/micro:micro_profiler", + "//tensorflow/lite/micro:op_resolvers", + "//tensorflow/lite/micro:recording_allocators", + "//tensorflow/lite/micro:system_setup", + "//tensorflow/lite/schema:schema_fbs", + ], +) + +cc_binary( + name = "tflm_benchmark", + deps = [":generic_benchmark_lib"], +) diff --git a/tensorflow/lite/micro/tools/benchmarking/Makefile.inc b/tensorflow/lite/micro/tools/benchmarking/Makefile.inc new file mode 100644 index 00000000000..396e7016384 --- /dev/null +++ b/tensorflow/lite/micro/tools/benchmarking/Makefile.inc @@ -0,0 +1,64 @@ +MICROLITE_BENCHMARK_ROOT_DIR := $(TENSORFLOW_ROOT)tensorflow/lite/micro/tools/benchmarking + +ifneq ($(GENERIC_BENCHMARK_MODEL_PATH),) + GENERIC_BENCHMARK_MODEL_DIR := $(dir $(GENERIC_BENCHMARK_MODEL_PATH)) + GENERIC_BENCHMARK_MODEL_NAME := $(notdir $(basename $(GENERIC_BENCHMARK_MODEL_PATH))) + CXXFLAGS += -DGENERIC_BENCHMARK_USING_BUILTIN_MODEL + CXXFLAGS += -DGENERIC_BENCHMARK_MODEL_HEADER_PATH=\"$(GENERIC_BENCHMARK_MODEL_DIR)$(GENERIC_BENCHMARK_MODEL_NAME)_model_data.h\" + CXXFLAGS += -DGENERIC_BENCHMARK_MODEL_NAME=$(GENERIC_BENCHMARK_MODEL_NAME) +ifneq ($(GENERIC_BENCHMARK_ARENA_SIZE),) + CXXFLAGS += -DGENERIC_BENCHMARK_TENSOR_ARENA_SIZE=$(GENERIC_BENCHMARK_ARENA_SIZE) +endif + + # model path includes $(TENSORFLOW_ROOT) as part of the make invocation + GENERIC_BENCHMARK_GENERATOR_INPUTS := $(GENERIC_BENCHMARK_MODEL_PATH) + + GENERIC_BENCHMARK_GENERATED_SRCS := \ + $(GENERATED_SRCS_DIR)$(GENERIC_BENCHMARK_MODEL_DIR)$(GENERIC_BENCHMARK_MODEL_NAME)_model_data.cc + + GENERIC_BENCHMARK_GENERATED_HDRS := \ + $(GENERATED_SRCS_DIR)$(GENERIC_BENCHMARK_MODEL_DIR)$(GENERIC_BENCHMARK_MODEL_NAME)_model_data.h +endif + +GENERIC_BENCHMARK_SRCS := \ +$(MICROLITE_BENCHMARK_ROOT_DIR)/generic_model_benchmark.cc \ +$(MICROLITE_BENCHMARK_ROOT_DIR)/metrics.cc \ +$(GENERATED_SRCS_DIR)$(MICROLITE_BENCHMARK_ROOT_DIR)/show_meta_data.cc + +GENERIC_BENCHMARK_HDRS := \ +$(MICROLITE_BENCHMARK_ROOT_DIR)/op_resolver.h \ +$(MICROLITE_BENCHMARK_ROOT_DIR)/metrics.h \ +$(MICROLITE_BENCHMARK_ROOT_DIR)/show_meta_data.h + +# always rebuild these to catch MODEL_PATH and ARENA_SIZE changes on command line +.PHONY: $(GENERATED_SRCS_DIR)$(MICROLITE_BENCHMARK_ROOT_DIR)/show_meta_data.cc +.PHONY: $(MICROLITE_BENCHMARK_ROOT_DIR)/generic_model_benchmark.cc + +$(GENERATED_SRCS_DIR)$(MICROLITE_BENCHMARK_ROOT_DIR)/show_meta_data.cc: + CC="$(CC)" \ + CXX="$(CXX)" \ + CC_FLAGS="$(CCFLAGS)" \ + CXX_FLAGS="$(CXXFLAGS)" \ + KERNEL_OPTIMIZATION="$(KERNEL_OPTIMIZATION_LEVEL)" \ + CORE_OPTIMIZATION="$(CORE_OPTIMIZATION_LEVEL)" \ + THIRD_PARTY_KERNEL_OPTIMIZATION="$(THIRD_PARTY_KERNEL_OPTIMIZATION_LEVEL)" \ + TARGET=$(TARGET) \ + TARGET_ARCH=$(TARGET_ARCH) \ + TENSORFLOW_ROOT="$(TENSORFLOW_ROOT)" \ + OPTIMIZED_KERNEL=$(OPTIMIZED_KERNEL_DIR) \ + BUILD_TYPE=$(BUILD_TYPE) \ + XTENSA_CORE=$(XTENSA_CORE) \ + XTENSA_BASE=$(XTENSA_BASE) \ + XTENSA_TOOLS_VERSION=$(XTENSA_TOOLS_VERSION) \ + TEMPLATE_FILE="$(MICROLITE_BENCHMARK_ROOT_DIR)/show_meta_data.cc.template" \ + GENERATED_FILE="$(GENERATED_SRCS_DIR)$(MICROLITE_BENCHMARK_ROOT_DIR)/show_meta_data.cc" \ + MODEL_FILE="$(GENERIC_BENCHMARK_MODEL_PATH)" \ + $(MICROLITE_BENCHMARK_ROOT_DIR)/collect_meta_data.sh + +ifneq ($(TARGET),bluepill) +ifneq ($(TARGET_ARCH), $(filter $(TARGET_ARCH), hifimini)) + $(eval $(call microlite_test,tflm_benchmark,\ + $(GENERIC_BENCHMARK_SRCS),$(GENERIC_BENCHMARK_HDRS),\ + $(GENERIC_BENCHMARK_GENERATOR_INPUTS))) +endif +endif diff --git a/tensorflow/lite/micro/tools/benchmarking/README.md b/tensorflow/lite/micro/tools/benchmarking/README.md new file mode 100644 index 00000000000..c203820471a --- /dev/null +++ b/tensorflow/lite/micro/tools/benchmarking/README.md @@ -0,0 +1,414 @@ +# Generic Benchmarking Tool build/run instructions +This tool can be used to benchmark any TfLite format model. The tool can be +compiled in one of two ways: +1. Such that it takes command line arguments, allowing the path to the model +file to be specified as a program argument +2. With a model compiled into the tool, allowing use in any simulator or on +any hardware platform + +Building the tool with the model compiled in uses two additional Makefile +variables: +* `GENERIC_BENCHMARK_MODEL_PATH`: the path to the TfLite format model file. This +can be a relative or absolute path. This variable is required. +* `GENERIC_BENCHMARK_ARENA_SIZE`: the size of the TFLM interpreter arena, in bytes. +This variable is optional. + +## Tested, working targets +* x86 +* cortex_m_qemu (no timing data) +* Xtensa (p6, hifi3) +* cortex_m_corstone_300 + +## Tested, non-working targets +* none currently + +## Build and run for x86 +Build for command line arguments: +``` +make -f tensorflow/lite/micro/tools/make/Makefile tflm_benchmark -j$(nproc) +``` +Run with command line arguments: +``` +gen/linux_x86_64_default/bin/tflm_benchmark tensorflow/lite/micro/models/person_detect.tflite +``` + +Build and run with model compiled into tool: +``` +make -f tensorflow/lite/micro/tools/make/Makefile BUILD_TYPE=default run_tflm_benchmark -j$(nproc) GENERIC_BENCHMARK_MODEL_PATH=tensorflow/lite/micro/models/person_detect.tflite GENERIC_BENCHMARK_ARENA_SIZE=`expr 150 \* 1024` +``` + +## Build and run for Xtensa +Build and run with model compiled into tool: +``` +make -f tensorflow/lite/micro/tools/make/Makefile TARGET=xtensa TARGET_ARCH=vision_p6 OPTIMIZED_KERNEL_DIR=xtensa XTENSA_CORE=P6_200528 BUILD_TYPE=default run_tflm_benchmark -j$(nproc) GENERIC_BENCHMARK_MODEL_PATH=/tmp/keyword_scrambled.tflite GENERIC_BENCHMARK_ARENA_SIZE=`expr 50 \* 1024` +``` + +## Build and run for Cortex-M using Corstone 300 simulator +Build and run with model compiled into tool: +``` +make -f tensorflow/lite/micro/tools/make/Makefile TARGET=cortex_m_corstone_300 TARGET_ARCH=cortex-m4 OPTIMIZED_KERNEL_DIR=cmsis_nn BUILD_TYPE=default run_tflm_benchmark -j$(nproc) GENERIC_BENCHMARK_MODEL_PATH=tensorflow/lite/micro/models/person_detect.tflite GENERIC_BENCHMARK_ARENA_SIZE=`expr 150 \* 1024` +``` + +## Build and run using Bazel + +This is only for the x86 command line argument build, and does not contain meta-data: +``` +bazel build tensorflow/lite/micro/tools/benchmarking:tflm_benchmark +bazel-bin/tensorflow/lite/micro/tools/benchmarking/tflm_benchmark tensorflow/lite/micro/models/person_detect.tflite +``` + +## Example output with meta-data and built-in model layer information + +This sample output is for Cortex-M using Corstone 300: +``` +Configured arena size = 153600 + +-------------------- +Compiled on: + +Fri May 17 03:36:59 PM PDT 2024 +-------------------- +Git SHA: a4390a1d73edf5a8d3affa1da60e1eba88e0cb13 + +Git status: + +On branch main +Your branch is up to date with 'origin/main'. +-------------------- +C compiler: tensorflow/lite/micro/tools/make/downloads/gcc_embedded/bin/arm-none-eabi-gcc +Version: + +arm-none-eabi-gcc (Arm GNU Toolchain 13.2.rel1 (Build arm-13.7)) 13.2.1 20231009 +Copyright (C) 2023 Free Software Foundation, Inc. +This is free software; see the source for copying conditions. There is NO +warranty; not even for MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. + +Flags: + +-Wimplicit-function-declaration -std=c11 -Werror -fno-unwind-tables -ffunction-sections +-fdata-sections -fmessage-length=0 -DTF_LITE_STATIC_MEMORY -DTF_LITE_DISABLE_X86_NEON +-DCMSIS_NN -DKERNELS_OPTIMIZED_FOR_SPEED -mcpu=cortex-m4+nofp -mfpu=auto +-DTF_LITE_MCU_DEBUG_LOG -mthumb -mfloat-abi=soft -funsigned-char -mlittle-endian +-fomit-frame-pointer -MD -DARMCM4 + +C++ compiler: tensorflow/lite/micro/tools/make/downloads/gcc_embedded/bin/arm-none-eabi-g++ +Version: + +arm-none-eabi-g++ (Arm GNU Toolchain 13.2.rel1 (Build arm-13.7)) 13.2.1 20231009 +Copyright (C) 2023 Free Software Foundation, Inc. +This is free software; see the source for copying conditions. There is NO +warranty; not even for MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. + +Flags: + +-std=c++11 -fno-rtti -fno-exceptions -fno-threadsafe-statics -Wnon-virtual-dtor -Werror +-fno-unwind-tables -ffunction-sections -fdata-sections -fmessage-length=0 +-DTF_LITE_STATIC_MEMORY -DTF_LITE_DISABLE_X86_NEON -Wsign-compare -Wdouble-promotion +-Wunused-variable -Wunused-function -Wswitch -Wvla -Wall -Wextra +-Wmissing-field-initializers -Wstrict-aliasing -Wno-unused-parameter -DCMSIS_NN +-DKERNELS_OPTIMIZED_FOR_SPEED -mcpu=cortex-m4+nofp -mfpu=auto -DTF_LITE_MCU_DEBUG_LOG +-mthumb -mfloat-abi=soft -funsigned-char -mlittle-endian -fomit-frame-pointer -MD +-DARMCM4 -DCMSIS_DEVICE_ARM_CORTEX_M_XX_HEADER_FILE="ARMCM4.h" +-DGENERIC_BENCHMARK_USING_BUILTIN_MODEL +-DGENERIC_BENCHMARK_MODEL_HEADER_PATH="tensorflow/lite/micro/models/person_detect_model_da +ta.h" -DGENERIC_BENCHMARK_MODEL_NAME=person_detect +-DGENERIC_BENCHMARK_TENSOR_ARENA_SIZE=153600 + +Optimization: kernel= -O2 core= -Os third-party-kernel= -O2 +-------------------- +Target information: + +TARGET=cortex_m_corstone_300 +TARGET_ARCH=cortex-m4 +OPTIMIZATION=cmsis_nn +BUILD_TYPE=default +-------------------- +NN library download URLs: + +http://github.com/ARM-software/CMSIS-NN/archive/01dee38e6d6bfbbf202f0cd425bbea1731747d51.z +ip + +NN library MD5 checksums: + +f20be93ededf42bb704c19f699a24313 +-------------------- +Model SHA1: + +bcafcaa99d2eaf089f0ca25d66f56a2177e93f76 + +Model analysis: + +=== tensorflow/lite/micro/models/person_detect.tflite === +Your TFLite model has '1' subgraph(s). In the subgraph description below, +T# represents the Tensor numbers. For example, in Subgraph#0, the DEPTHWISE_CONV_2D op +takes +tensor #88 and tensor #0 and tensor #33 as input and produces tensor #34 as output. +Subgraph#0(T#88) -> [T#87] + Op#0 DEPTHWISE_CONV_2D(T#88, T#0, T#33[3774, -107, -84394, -13908, 20697, ...]) -> +[T#34] + Op#1 DEPTHWISE_CONV_2D(T#34, T#9, T#52[31132, 28, 273, -2692, 7409, ...]) -> [T#51] + Op#2 CONV_2D(T#51, T#10, T#53[10064, 1130, -13056, -30284, -23349, ...]) -> [T#54] + Op#3 DEPTHWISE_CONV_2D(T#54, T#11, T#56[306, -158, 19181, -364, 6237, ...]) -> [T#55] + Op#4 CONV_2D(T#55, T#12, T#57[-7649, 12287, -4433, 5851, -188, ...]) -> [T#58] + Op#5 DEPTHWISE_CONV_2D(T#58, T#13, T#60[7297, -498, 263, -1975, 2260, ...]) -> [T#59] + Op#6 CONV_2D(T#59, T#14, T#61[-4742, -4160, 6985, 8647, 29773, ...]) -> [T#62] + Op#7 DEPTHWISE_CONV_2D(T#62, T#15, T#64[28588, 363, 27592, 22294, -4344, ...]) -> [T#63] + Op#8 CONV_2D(T#63, T#16, T#65[12683, 36581, 6206, 1236, 15834, ...]) -> [T#66] + Op#9 DEPTHWISE_CONV_2D(T#66, T#17, T#68[-6353, 9090, -30, -1019, -496, ...]) -> [T#67] + Op#10 CONV_2D(T#67, T#18, T#69[3895, -6563, -8843, -2066, -1372, ...]) -> [T#70] + Op#11 DEPTHWISE_CONV_2D(T#70, T#19, T#72[20437, -365, -2518, 20827, -904, ...]) -> +[T#71] + Op#12 CONV_2D(T#71, T#20, T#73[-10120, 9768, 3524, 3796, 6896, ...]) -> [T#74] + Op#13 DEPTHWISE_CONV_2D(T#74, T#21, T#76[-3969, -1910, -2425, -114, 4456, ...]) -> +[T#75] + Op#14 CONV_2D(T#75, T#22, T#77[-13202, 13929, -4357, 19492, 1971, ...]) -> [T#78] + Op#15 DEPTHWISE_CONV_2D(T#78, T#23, T#80[-6169, -10, -2788, 14420, -7457, ...]) -> +[T#79] + Op#16 CONV_2D(T#79, T#24, T#81[155, -3073, 291, -902, -9942, ...]) -> [T#82] + Op#17 DEPTHWISE_CONV_2D(T#82, T#25, T#84[-2063, 10755, -12037, -6417, 2147, ...]) -> +[T#83] + Op#18 CONV_2D(T#83, T#26, T#85[-1872, -7549, 13994, 3191, -614, ...]) -> [T#86] + Op#19 DEPTHWISE_CONV_2D(T#86, T#1, T#36[-6485, 294, 686, -6011, -5196, ...]) -> [T#35] + Op#20 CONV_2D(T#35, T#2, T#37[7116, 8066, 11755, 11674, 9983, ...]) -> [T#38] + Op#21 DEPTHWISE_CONV_2D(T#38, T#3, T#40[7735, 5235, 4334, -6485, 9397, ...]) -> [T#39] + Op#22 CONV_2D(T#39, T#4, T#41[2947, 10152, -7865, -554, -13760, ...]) -> [T#42] + Op#23 DEPTHWISE_CONV_2D(T#42, T#5, T#44[-4755, 7899, -488, -2954, 2990, ...]) -> [T#43] + Op#24 CONV_2D(T#43, T#6, T#45[-6269, -22458, 13332, -16368, 4435, ...]) -> [T#46] + Op#25 DEPTHWISE_CONV_2D(T#46, T#7, T#48[333, -4743, -310, -2471, 4804, ...]) -> [T#47] + Op#26 CONV_2D(T#47, T#8, T#49[6677, -3593, 3754, 26316, -4761, ...]) -> [T#50] + Op#27 AVERAGE_POOL_2D(T#50) -> [T#27] + Op#28 CONV_2D(T#27, T#30, T#29[16267, -17079]) -> [T#28] + Op#29 RESHAPE(T#28, T#32[1, 2]) -> [T#31] + Op#30 SOFTMAX(T#31) -> [T#87] +Tensors of Subgraph#0 + T#0(MobilenetV1/Conv2d_0/weights/read) shape:[1, 3, 3, 8], type:INT8 RO 72 bytes, +buffer: 68, data:[., y, ., g, ., ...] + T#1(MobilenetV1/Conv2d_10_depthwise/depthwise_weights/read) shape:[1, 3, 3, 128], +type:INT8 RO 1152 bytes, buffer: 72, data:[W, ., d, ., ., ...] + T#2(MobilenetV1/Conv2d_10_pointwise/weights/read) shape:[128, 1, 1, 128], type:INT8 RO +16384 bytes, buffer: 14, data:[., ., +, ., ., ...] + T#3(MobilenetV1/Conv2d_11_depthwise/depthwise_weights/read) shape:[1, 3, 3, 128], +type:INT8 RO 1152 bytes, buffer: 13, data:[., `, ., :, ., ...] + T#4(MobilenetV1/Conv2d_11_pointwise/weights/read) shape:[128, 1, 1, 128], type:INT8 RO +16384 bytes, buffer: 12, data:[., ., ., ., ., ...] + T#5(MobilenetV1/Conv2d_12_depthwise/depthwise_weights/read) shape:[1, 3, 3, 128], +type:INT8 RO 1152 bytes, buffer: 10, data:[z, ., ., ?, ., ...] + T#6(MobilenetV1/Conv2d_12_pointwise/weights/read) shape:[256, 1, 1, 128], type:INT8 RO +32768 bytes, buffer: 69, data:[/, ., ., ., #, ...] + T#7(MobilenetV1/Conv2d_13_depthwise/depthwise_weights/read) shape:[1, 3, 3, 256], +type:INT8 RO 2304 bytes, buffer: 7, data:[., ., w, ., ., ...] + T#8(MobilenetV1/Conv2d_13_pointwise/weights/read) shape:[256, 1, 1, 256], type:INT8 RO +65536 bytes, buffer: 5, data:[&, ., ., ., ., ...] + T#9(MobilenetV1/Conv2d_1_depthwise/depthwise_weights/read) shape:[1, 3, 3, 8], +type:INT8 RO 72 bytes, buffer: 60, data:[., ., ., ., ., ...] + T#10(MobilenetV1/Conv2d_1_pointwise/weights/read) shape:[16, 1, 1, 8], type:INT8 RO 128 +bytes, buffer: 63, data:[., ., ., ., ., ...] + T#11(MobilenetV1/Conv2d_2_depthwise/depthwise_weights/read) shape:[1, 3, 3, 16], +type:INT8 RO 144 bytes, buffer: 58, data:[O, *, ., !, ., ...] + T#12(MobilenetV1/Conv2d_2_pointwise/weights/read) shape:[32, 1, 1, 16], type:INT8 RO +512 bytes, buffer: 61, data:[., 4, ., ., 8, ...] + T#13(MobilenetV1/Conv2d_3_depthwise/depthwise_weights/read) shape:[1, 3, 3, 32], +type:INT8 RO 288 bytes, buffer: 35, data:[., 1, ;, M, ., ...] + T#14(MobilenetV1/Conv2d_3_pointwise/weights/read) shape:[32, 1, 1, 32], type:INT8 RO +1024 bytes, buffer: 33, data:[., ., ., ., ., ...] + T#15(MobilenetV1/Conv2d_4_depthwise/depthwise_weights/read) shape:[1, 3, 3, 32], +type:INT8 RO 288 bytes, buffer: 32, data:[., ;, ., ., ., ...] + T#16(MobilenetV1/Conv2d_4_pointwise/weights/read) shape:[64, 1, 1, 32], type:INT8 RO +2048 bytes, buffer: 30, data:[., ., ., 5, ., ...] + T#17(MobilenetV1/Conv2d_5_depthwise/depthwise_weights/read) shape:[1, 3, 3, 64], +type:INT8 RO 576 bytes, buffer: 77, data:[G, ., ., ., ., ...] + T#18(MobilenetV1/Conv2d_5_pointwise/weights/read) shape:[64, 1, 1, 64], type:INT8 RO +4096 bytes, buffer: 28, data:[., 2, ., $, ., ...] + T#19(MobilenetV1/Conv2d_6_depthwise/depthwise_weights/read) shape:[1, 3, 3, 64], +type:INT8 RO 576 bytes, buffer: 27, data:[., 1, z, ., U, ...] + T#20(MobilenetV1/Conv2d_6_pointwise/weights/read) shape:[128, 1, 1, 64], type:INT8 RO +8192 bytes, buffer: 25, data:[5, ., ., ., V, ...] + T#21(MobilenetV1/Conv2d_7_depthwise/depthwise_weights/read) shape:[1, 3, 3, 128], +type:INT8 RO 1152 bytes, buffer: 23, data:[., ., ., ., ., ...] + T#22(MobilenetV1/Conv2d_7_pointwise/weights/read) shape:[128, 1, 1, 128], type:INT8 RO +16384 bytes, buffer: 21, data:[., ., ., ., ., ...] + T#23(MobilenetV1/Conv2d_8_depthwise/depthwise_weights/read) shape:[1, 3, 3, 128], +type:INT8 RO 1152 bytes, buffer: 71, data:[., ., ., ., Q, ...] + T#24(MobilenetV1/Conv2d_8_pointwise/weights/read) shape:[128, 1, 1, 128], type:INT8 RO +16384 bytes, buffer: 20, data:[@, ., 2, ., 8, ...] + T#25(MobilenetV1/Conv2d_9_depthwise/depthwise_weights/read) shape:[1, 3, 3, 128], +type:INT8 RO 1152 bytes, buffer: 80, data:[^, ., ~, ., ., ...] + T#26(MobilenetV1/Conv2d_9_pointwise/weights/read) shape:[128, 1, 1, 128], type:INT8 RO +16384 bytes, buffer: 16, data:[., ., , ., %, ...] + T#27(MobilenetV1/Logits/AvgPool_1a/AvgPool) shape:[1, 1, 1, 256], type:INT8 + T#28(MobilenetV1/Logits/Conv2d_1c_1x1/BiasAdd) shape:[1, 1, 1, 2], type:INT8 + T#29(MobilenetV1/Logits/Conv2d_1c_1x1/Conv2D_bias) shape:[2], type:INT32 RO 8 bytes, +buffer: 2, data:[16267, -17079] + T#30(MobilenetV1/Logits/Conv2d_1c_1x1/weights/read) shape:[2, 1, 1, 256], type:INT8 RO +512 bytes, buffer: 3, data:[., %, ., ., ., ...] + T#31(MobilenetV1/Logits/SpatialSqueeze) shape:[1, 2], type:INT8 + T#32(MobilenetV1/Logits/SpatialSqueeze_shape) shape:[2], type:INT32 RO 8 bytes, buffer: +1, data:[1, 2] + T#33(MobilenetV1/MobilenetV1/Conv2d_0/Conv2D_bias) shape:[8], type:INT32 RO 32 bytes, +buffer: 82, data:[3774, -107, -84394, -13908, 20697, ...] + T#34(MobilenetV1/MobilenetV1/Conv2d_0/Relu6) shape:[1, 48, 48, 8], type:INT8 + T#35(MobilenetV1/MobilenetV1/Conv2d_10_depthwise/Relu6) shape:[1, 6, 6, 128], type:INT8 + T#36(MobilenetV1/MobilenetV1/Conv2d_10_depthwise/depthwise_bias) shape:[128], +type:INT32 RO 512 bytes, buffer: 22, data:[-6485, 294, 686, -6011, -5196, ...] + T#37(MobilenetV1/MobilenetV1/Conv2d_10_pointwise/Conv2D_bias) shape:[128], type:INT32 +RO 512 bytes, buffer: 70, data:[7116, 8066, 11755, 11674, 9983, ...] + T#38(MobilenetV1/MobilenetV1/Conv2d_10_pointwise/Relu6) shape:[1, 6, 6, 128], type:INT8 + T#39(MobilenetV1/MobilenetV1/Conv2d_11_depthwise/Relu6) shape:[1, 6, 6, 128], type:INT8 + T#40(MobilenetV1/MobilenetV1/Conv2d_11_depthwise/depthwise_bias) shape:[128], +type:INT32 RO 512 bytes, buffer: 19, data:[7735, 5235, 4334, -6485, 9397, ...] + T#41(MobilenetV1/MobilenetV1/Conv2d_11_pointwise/Conv2D_bias) shape:[128], type:INT32 +RO 512 bytes, buffer: 11, data:[2947, 10152, -7865, -554, -13760, ...] + T#42(MobilenetV1/MobilenetV1/Conv2d_11_pointwise/Relu6) shape:[1, 6, 6, 128], type:INT8 + T#43(MobilenetV1/MobilenetV1/Conv2d_12_depthwise/Relu6) shape:[1, 3, 3, 128], type:INT8 + T#44(MobilenetV1/MobilenetV1/Conv2d_12_depthwise/depthwise_bias) shape:[128], +type:INT32 RO 512 bytes, buffer: 9, data:[-4755, 7899, -488, -2954, 2990, ...] + T#45(MobilenetV1/MobilenetV1/Conv2d_12_pointwise/Conv2D_bias) shape:[256], type:INT32 +RO 1024 bytes, buffer: 8, data:[-6269, -22458, 13332, -16368, 4435, ...] + T#46(MobilenetV1/MobilenetV1/Conv2d_12_pointwise/Relu6) shape:[1, 3, 3, 256], type:INT8 + T#47(MobilenetV1/MobilenetV1/Conv2d_13_depthwise/Relu6) shape:[1, 3, 3, 256], type:INT8 + T#48(MobilenetV1/MobilenetV1/Conv2d_13_depthwise/depthwise_bias) shape:[256], +type:INT32 RO 1024 bytes, buffer: 6, data:[333, -4743, -310, -2471, 4804, ...] + T#49(MobilenetV1/MobilenetV1/Conv2d_13_pointwise/Conv2D_bias) shape:[256], type:INT32 +RO 1024 bytes, buffer: 4, data:[6677, -3593, 3754, 26316, -4761, ...] + T#50(MobilenetV1/MobilenetV1/Conv2d_13_pointwise/Relu6) shape:[1, 3, 3, 256], type:INT8 + T#51(MobilenetV1/MobilenetV1/Conv2d_1_depthwise/Relu6) shape:[1, 48, 48, 8], type:INT8 + T#52(MobilenetV1/MobilenetV1/Conv2d_1_depthwise/depthwise_bias) shape:[8], type:INT32 +RO 32 bytes, buffer: 56, data:[31132, 28, 273, -2692, 7409, ...] + T#53(MobilenetV1/MobilenetV1/Conv2d_1_pointwise/Conv2D_bias) shape:[16], type:INT32 RO +64 bytes, buffer: 36, data:[10064, 1130, -13056, -30284, -23349, ...] + T#54(MobilenetV1/MobilenetV1/Conv2d_1_pointwise/Relu6) shape:[1, 48, 48, 16], type:INT8 + T#55(MobilenetV1/MobilenetV1/Conv2d_2_depthwise/Relu6) shape:[1, 24, 24, 16], type:INT8 + T#56(MobilenetV1/MobilenetV1/Conv2d_2_depthwise/depthwise_bias) shape:[16], type:INT32 +RO 64 bytes, buffer: 48, data:[306, -158, 19181, -364, 6237, ...] + T#57(MobilenetV1/MobilenetV1/Conv2d_2_pointwise/Conv2D_bias) shape:[32], type:INT32 RO +128 bytes, buffer: 62, data:[-7649, 12287, -4433, 5851, -188, ...] + T#58(MobilenetV1/MobilenetV1/Conv2d_2_pointwise/Relu6) shape:[1, 24, 24, 32], type:INT8 + T#59(MobilenetV1/MobilenetV1/Conv2d_3_depthwise/Relu6) shape:[1, 24, 24, 32], type:INT8 + T#60(MobilenetV1/MobilenetV1/Conv2d_3_depthwise/depthwise_bias) shape:[32], type:INT32 +RO 128 bytes, buffer: 34, data:[7297, -498, 263, -1975, 2260, ...] + T#61(MobilenetV1/MobilenetV1/Conv2d_3_pointwise/Conv2D_bias) shape:[32], type:INT32 RO +128 bytes, buffer: 59, data:[-4742, -4160, 6985, 8647, 29773, ...] + T#62(MobilenetV1/MobilenetV1/Conv2d_3_pointwise/Relu6) shape:[1, 24, 24, 32], type:INT8 + T#63(MobilenetV1/MobilenetV1/Conv2d_4_depthwise/Relu6) shape:[1, 12, 12, 32], type:INT8 + T#64(MobilenetV1/MobilenetV1/Conv2d_4_depthwise/depthwise_bias) shape:[32], type:INT32 +RO 128 bytes, buffer: 31, data:[28588, 363, 27592, 22294, -4344, ...] + T#65(MobilenetV1/MobilenetV1/Conv2d_4_pointwise/Conv2D_bias) shape:[64], type:INT32 RO +256 bytes, buffer: 76, data:[12683, 36581, 6206, 1236, 15834, ...] + T#66(MobilenetV1/MobilenetV1/Conv2d_4_pointwise/Relu6) shape:[1, 12, 12, 64], type:INT8 + T#67(MobilenetV1/MobilenetV1/Conv2d_5_depthwise/Relu6) shape:[1, 12, 12, 64], type:INT8 + T#68(MobilenetV1/MobilenetV1/Conv2d_5_depthwise/depthwise_bias) shape:[64], type:INT32 +RO 256 bytes, buffer: 29, data:[-6353, 9090, -30, -1019, -496, ...] + T#69(MobilenetV1/MobilenetV1/Conv2d_5_pointwise/Conv2D_bias) shape:[64], type:INT32 RO +256 bytes, buffer: 84, data:[3895, -6563, -8843, -2066, -1372, ...] + T#70(MobilenetV1/MobilenetV1/Conv2d_5_pointwise/Relu6) shape:[1, 12, 12, 64], type:INT8 + T#71(MobilenetV1/MobilenetV1/Conv2d_6_depthwise/Relu6) shape:[1, 6, 6, 64], type:INT8 + T#72(MobilenetV1/MobilenetV1/Conv2d_6_depthwise/depthwise_bias) shape:[64], type:INT32 +RO 256 bytes, buffer: 26, data:[20437, -365, -2518, 20827, -904, ...] + T#73(MobilenetV1/MobilenetV1/Conv2d_6_pointwise/Conv2D_bias) shape:[128], type:INT32 RO +512 bytes, buffer: 24, data:[-10120, 9768, 3524, 3796, 6896, ...] + T#74(MobilenetV1/MobilenetV1/Conv2d_6_pointwise/Relu6) shape:[1, 6, 6, 128], type:INT8 + T#75(MobilenetV1/MobilenetV1/Conv2d_7_depthwise/Relu6) shape:[1, 6, 6, 128], type:INT8 + T#76(MobilenetV1/MobilenetV1/Conv2d_7_depthwise/depthwise_bias) shape:[128], type:INT32 +RO 512 bytes, buffer: 78, data:[-3969, -1910, -2425, -114, 4456, ...] + T#77(MobilenetV1/MobilenetV1/Conv2d_7_pointwise/Conv2D_bias) shape:[128], type:INT32 RO +512 bytes, buffer: 83, data:[-13202, 13929, -4357, 19492, 1971, ...] + T#78(MobilenetV1/MobilenetV1/Conv2d_7_pointwise/Relu6) shape:[1, 6, 6, 128], type:INT8 + T#79(MobilenetV1/MobilenetV1/Conv2d_8_depthwise/Relu6) shape:[1, 6, 6, 128], type:INT8 + T#80(MobilenetV1/MobilenetV1/Conv2d_8_depthwise/depthwise_bias) shape:[128], type:INT32 +RO 512 bytes, buffer: 55, data:[-6169, -10, -2788, 14420, -7457, ...] + T#81(MobilenetV1/MobilenetV1/Conv2d_8_pointwise/Conv2D_bias) shape:[128], type:INT32 RO +512 bytes, buffer: 18, data:[155, -3073, 291, -902, -9942, ...] + T#82(MobilenetV1/MobilenetV1/Conv2d_8_pointwise/Relu6) shape:[1, 6, 6, 128], type:INT8 + T#83(MobilenetV1/MobilenetV1/Conv2d_9_depthwise/Relu6) shape:[1, 6, 6, 128], type:INT8 + T#84(MobilenetV1/MobilenetV1/Conv2d_9_depthwise/depthwise_bias) shape:[128], type:INT32 +RO 512 bytes, buffer: 17, data:[-2063, 10755, -12037, -6417, 2147, ...] + T#85(MobilenetV1/MobilenetV1/Conv2d_9_pointwise/Conv2D_bias) shape:[128], type:INT32 RO +512 bytes, buffer: 15, data:[-1872, -7549, 13994, 3191, -614, ...] + T#86(MobilenetV1/MobilenetV1/Conv2d_9_pointwise/Relu6) shape:[1, 6, 6, 128], type:INT8 + T#87(MobilenetV1/Predictions/Reshape_1) shape:[1, 2], type:INT8 + T#88(input) shape:[1, 96, 96, 1], type:INT8 +--------------------------------------------------------------- + Model size: 300568 bytes + Non-data buffer size: 81640 bytes (27.16 %) + Total data buffer size: 218928 bytes (72.84 %) + (Zero value buffers): 0 bytes (00.00 %) +* Buffers of TFLite model are mostly used for constant tensors. + And zero value buffers are buffers filled with zeros. + Non-data buffers area are used to store operators, subgraphs and etc. + You can find more details from +https://github.com/tensorflow/tensorflow/blob/master/tensorflow/lite/schema/schema.fbs +-------------------- +TfliteGetModel took 4 ticks (0 ms). + +DEPTHWISE_CONV_2D took 224622 ticks (8 ms). +DEPTHWISE_CONV_2D took 175917 ticks (7 ms). +CONV_2D took 249560 ticks (9 ms). +DEPTHWISE_CONV_2D took 84958 ticks (3 ms). +CONV_2D took 145817 ticks (5 ms). +DEPTHWISE_CONV_2D took 164915 ticks (6 ms). +CONV_2D took 197283 ticks (7 ms). +DEPTHWISE_CONV_2D took 41304 ticks (1 ms). +CONV_2D took 99472 ticks (3 ms). +DEPTHWISE_CONV_2D took 79969 ticks (3 ms). +CONV_2D took 151505 ticks (6 ms). +DEPTHWISE_CONV_2D took 20053 ticks (0 ms). +CONV_2D took 78521 ticks (3 ms). +DEPTHWISE_CONV_2D took 38127 ticks (1 ms). +CONV_2D took 132862 ticks (5 ms). +DEPTHWISE_CONV_2D took 38127 ticks (1 ms). +CONV_2D took 132865 ticks (5 ms). +DEPTHWISE_CONV_2D took 38127 ticks (1 ms). +CONV_2D took 132859 ticks (5 ms). +DEPTHWISE_CONV_2D took 38127 ticks (1 ms). +CONV_2D took 132851 ticks (5 ms). +DEPTHWISE_CONV_2D took 38127 ticks (1 ms). +CONV_2D took 132853 ticks (5 ms). +DEPTHWISE_CONV_2D took 9585 ticks (0 ms). +CONV_2D took 78470 ticks (3 ms). +DEPTHWISE_CONV_2D took 17473 ticks (0 ms). +CONV_2D took 143615 ticks (5 ms). +AVERAGE_POOL_2D took 2229 ticks (0 ms). +CONV_2D took 386 ticks (0 ms). +RESHAPE took 28 ticks (0 ms). +SOFTMAX took 163 ticks (0 ms). + +"Unique Tag","Total ticks across all events with that tag." +DEPTHWISE_CONV_2D, 1009431 +CONV_2D, 1808919 +AVERAGE_POOL_2D, 2229 +RESHAPE, 28 +SOFTMAX, 163 +"total number of ticks", 2820770 + +[[ Table ]]: Arena + Arena Bytes % Arena + Total | 84436 | 100.00 +NonPersistent | 55296 | 65.49 + Persistent | 29140 | 34.51 + +[[ Table ]]: Allocations + Allocation Id Used Requested Count % Memory + Eval tensor data | 0 | 1068 | 1068 | 89 | 1.26 + Persistent tensor data | 1 | 64 | 64 | 2 | 0.08 +Persistent quantization data | 2 | 40 | 40 | 4 | 0.05 + Persistent buffer data | 3 | 25872 | 25704 | 90 | 30.64 + Tensor variable buffer data | 4 | 0 | 0 | 0 | 0.00 + Node and registration array | 5 | 992 | 992 | 31 | 1.17 + Operation data | 6 | 0 | 0 | 0 | 0.00 + +Application exit code: 0. + +Info: /OSCI/SystemC: Simulation stopped by user. +[warning ][main@0][01 ns] Simulation stopped by user + +--- FVP_MPS3_Corstone_SSE_300 statistics: ------------------------------------- +Simulated time : 2.879993s +User time : 2.027100s +System time : 0.135914s +Wall time : 2.663214s +Performance index : 1.08 +cpu0 : 27.03 MIPS ( 71999848 Inst) +Memory highwater mark : 0x11919000 bytes ( 0.275 GB ) +------------------------------------------------------------------------------- +``` diff --git a/tensorflow/lite/micro/examples/micro_speech/micro_speech_binary_mock_test.sh b/tensorflow/lite/micro/tools/benchmarking/analyze_model.py old mode 100755 new mode 100644 similarity index 56% rename from tensorflow/lite/micro/examples/micro_speech/micro_speech_binary_mock_test.sh rename to tensorflow/lite/micro/tools/benchmarking/analyze_model.py index 0515d7c486e..f2ff0139ede --- a/tensorflow/lite/micro/examples/micro_speech/micro_speech_binary_mock_test.sh +++ b/tensorflow/lite/micro/tools/benchmarking/analyze_model.py @@ -1,5 +1,5 @@ -#!/bin/bash -# Copyright 2020 The TensorFlow Authors. All Rights Reserved. +#!/usr/bin/env python3 +# Copyright 2024 The TensorFlow Authors. All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. @@ -13,18 +13,23 @@ # See the License for the specific language governing permissions and # limitations under the License. # ============================================================================== -# -# Bash unit tests for the example binary. -set -e +from absl import app +from absl import flags + +import tensorflow as tf + +_MODEL_PATH = flags.DEFINE_string( + name='model_file', + default='', + help='path for the .tflite model file.', +) + -OUTPUT_LOG_FILE=${TEST_TMPDIR}/output_log.txt -${TEST_SRCDIR}/${TEST_WORKSPACE}/tensorflow/lite/micro/examples/micro_speech/micro_speech_mock 2>&1 | head > ${OUTPUT_LOG_FILE} +def _main(_): + """outputs model analysis to stdout/stderr""" + tf.lite.experimental.Analyzer.analyze(model_path=_MODEL_PATH.value) -if ! grep -q 'Heard ' ${OUTPUT_LOG_FILE}; then - echo "ERROR: Expected logs not found in output '${OUTPUT_LOG_FILE}'" - exit 1 -fi -echo -echo "SUCCESS: micro_speech_binary_mock_test PASSED" +if __name__ == '__main__': + app.run(_main) diff --git a/tensorflow/lite/micro/tools/benchmarking/collect_meta_data.sh b/tensorflow/lite/micro/tools/benchmarking/collect_meta_data.sh new file mode 100755 index 00000000000..c60bdf3ed72 --- /dev/null +++ b/tensorflow/lite/micro/tools/benchmarking/collect_meta_data.sh @@ -0,0 +1,177 @@ +#!/usr/bin/env bash +# Copyright 2024 The TensorFlow Authors. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# ============================================================================== +# +# Collect generic benchmark meta data and insert resulting strings into +# the file designated by TEMPLATE_FILE. +# +# Takes no arguments. +# +# Uses the following environment variables: +# TEMPLATE_FILE - path to the template source file +# GENERATED_FILE - path to the generated source file with substituted strings +# TENSORFLOW_ROOT - path to the root of the source tree +# MODEL_FILE - path to the .tflite model file +# CC - path to C compiler +# CXX - path to C++ compiler +# CC_FLAGS - C compiler flags +# CXX_FLAGS - C++ compiler flags +# KERNEL_OPTIMIZATION - kernel optimization flags +# CORE_OPTIMIZATION - core optimization flags +# THIRD_PARTY_KERNEL_OPTIMIZATION - third pary kernel optimization flags +# TARGET - target platform (xtensa, cortex_m_corstone_300, etc.) +# TARGET_ARCH - target architecture (hifi5, cortex-m0, etc.) +# OPTIMIZED_KERNEL - optimized kernel (xtensa, cmsis_nn, etc.) +# BUILD_TYPE - type of build (default, release, etc.) +# XTENSA_CORE - Xtensa core specification +# XTENSA_BASE - Xtensa base install directory +# XTENSA_TOOLS_VERSION - Xtensa tooling version + + +set -e + +source ${TENSORFLOW_ROOT}tensorflow/lite/micro/tools/ci_build/helper_functions.sh + +function substitute_strings() { + search="// %%%_$1_%%%" + lines=$(fold -w 90 -s <<< "$2") + SAVED_IFS=${IFS} + IFS=$'\n' lines_array=( ${lines} ) + IFS=${SAVED_IFS} + replacement=() + for line in "${lines_array[@]}"; do + line=$(sed -e 's/"/\\"/g' <<< "${line}") + line=$(printf '"%s",\n ' "${line}") + replacement+=( "${line}" ) + done + + tempfile=$(mktemp) + + SEARCH_PATTERN="$search" REPLACEMENT_PATTERN="${replacement[@]}" awk ' + BEGIN { + search = ENVIRON["SEARCH_PATTERN"] + replacement = ENVIRON["REPLACEMENT_PATTERN"] + } + s = index($0,search) { + $0 = substr($0,1,s-1) replacement substr($0,s+length(search)) + } + { print } + ' "${GENERATED_FILE}" > ${tempfile} + mv ${tempfile} "${GENERATED_FILE}" +} + +mkdir -p $(dirname ${GENERATED_FILE}) +cp -p ${TEMPLATE_FILE} ${GENERATED_FILE} + +# model analysis and SHA1 +if [[ ${MODEL_FILE} ]]; then + python3 -m pip install absl-py tensorflow + result=$(python3 \ + "${TENSORFLOW_ROOT}tensorflow/lite/micro/tools/benchmarking/analyze_model.py" \ + --model_file="${MODEL_FILE}" \ + ) + substitute_strings model_analysis_strings "${result}" + + result=$(shasum -b "${MODEL_FILE}" | cut -f 1 -d ' ') + substitute_strings model_sha1_strings "${result}" +fi + +# compile date +result=$(date) +substitute_strings compilation_date_strings "${result}" + +GIT_TENSORFLOW_ROOT="${TENSORFLOW_ROOT:-./}" +set +e +# Git repo commit information +result=$(cd ${GIT_TENSORFLOW_ROOT} && git rev-parse --verify HEAD) +if [[ $? != 0 ]]; then + result="" +fi +substitute_strings git_commit_strings "${result}" + +# Git repo status information +result=$(cd ${GIT_TENSORFLOW_ROOT} && git status) +if [[ $? != 0 ]]; then + result="" +fi +substitute_strings git_status_strings "${result}" +set -e + +# Compiler information +result="${CC}" +substitute_strings cc_name_strings "${result}" +result=$("${CC}" --version) +substitute_strings cc_version_strings "${result}" +result="${CC_FLAGS}" +substitute_strings cc_flags_strings "${result}" + +result="${CXX}" +substitute_strings cxx_name_strings "${result}" +result=$("${CXX}" --version) +substitute_strings cxx_version_strings "${result}" +result="${CXX_FLAGS}" +substitute_strings cxx_flags_strings "${result}" + +result="kernel= ${KERNEL_OPTIMIZATION}" +result+=" core= ${CORE_OPTIMIZATION}" +result+=" third-party-kernel= ${THIRD_PARTY_KERNEL_OPTIMIZATION}" +substitute_strings optimization_flag_strings "${result}" + +# Target information +TARGET="${TARGET:-linux}" +TARGET_ARCH="${TARGET_ARCH:-x86}" +OPTIMIZED_KERNEL="${OPTIMIZED_KERNEL:-none}" +BUILD_TYPE="${BUILD_TYPE:-default}" +result=$(printf 'TARGET=%s\nTARGET_ARCH=%s\nOPTIMIZATION=%s\nBUILD_TYPE=%s\n' \ + "${TARGET}" \ + "${TARGET_ARCH}" \ + "${OPTIMIZED_KERNEL}" \ + "${BUILD_TYPE}" \ +) +if [[ ${XTENSA_CORE} ]]; then + result+=$(printf '\nXTENSA_CORE=%s' "${XTENSA_CORE}") + result+=$(printf '\nXTENSA_BASE=%s' "${XTENSA_BASE}") + result+=$(printf '\nXTENSA_TOOLS_VERSION=%s' "${XTENSA_TOOLS_VERSION}") +fi +substitute_strings target_info_strings "${result}" + +download_scripts=() +download_script_args=( "--no-downloads" ) +if [[ ${OPTIMIZED_KERNEL} == "cmsis_nn" ]]; then + download_scripts+=( "${TENSORFLOW_ROOT}tensorflow/lite/micro/tools/make/ext_libs/cmsis_nn_download.sh" ) + download_script_args+=( "${TENSORFLOW_ROOT}" ) +elif [[ ${OPTIMIZED_KERNEL} == "xtensa" ]]; then + download_script_args+=( "${TARGET_ARCH}" "${TENSORFLOW_ROOT}" ) + if [[ ${TARGET_ARCH} =~ ^(vision_p6)$ ]]; then + download_scripts+=( "${TENSORFLOW_ROOT}tensorflow/lite/micro/tools/make/ext_libs/xtensa_download.sh" ) + elif [[ ${TARGET_ARCH} =~ ^(hifi3|hifi4|hifi5)$ ]]; then + download_scripts+=( "${TENSORFLOW_ROOT}tensorflow/lite/micro/tools/make/ext_libs/xtensa_download.sh" ) + download_scripts+=( "${TENSORFLOW_ROOT}tensorflow/lite/micro/tools/make/ext_libs/xtensa_ndsp_download.sh" ) + fi +fi + +if [[ ${#download_scripts[@]} -gt 0 ]]; then + results_url= + results_md5= + for script in "${download_scripts[@]}"; do + results=$("${script}" "${download_script_args[@]}" 2>&1) + url=$(sed -rn 's/^LIBRARY_URL=(.*)$/\1/p' <<< "${results}") + results_url+=$(printf '\n%s' "${url}") + md5=$(sed -rn 's/^LIBRARY_MD5=(.*)$/\1/p' <<< "${results}") + results_md5+=$(printf '\n%s' "${md5}") + done + substitute_strings nn_library_url_strings "${results_url}" + substitute_strings nn_library_md5_strings "${results_md5}" +fi diff --git a/tensorflow/lite/micro/tools/benchmarking/generic_model_benchmark.cc b/tensorflow/lite/micro/tools/benchmarking/generic_model_benchmark.cc new file mode 100644 index 00000000000..9874a631464 --- /dev/null +++ b/tensorflow/lite/micro/tools/benchmarking/generic_model_benchmark.cc @@ -0,0 +1,223 @@ +/* Copyright 2024 The TensorFlow Authors. All Rights Reserved. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +==============================================================================*/ + +#include +#include +#include + +#include +#include +#include +#include + +#include "tensorflow/lite/c/c_api_types.h" +#include "tensorflow/lite/c/common.h" +#include "tensorflow/lite/micro/micro_context.h" +#include "tensorflow/lite/micro/micro_log.h" +#include "tensorflow/lite/micro/micro_mutable_op_resolver.h" +#include "tensorflow/lite/micro/micro_op_resolver.h" +#include "tensorflow/lite/micro/micro_profiler.h" +#include "tensorflow/lite/micro/recording_micro_allocator.h" +#include "tensorflow/lite/micro/recording_micro_interpreter.h" +#include "tensorflow/lite/micro/system_setup.h" +#include "tensorflow/lite/micro/tools/benchmarking/metrics.h" +#include "tensorflow/lite/micro/tools/benchmarking/op_resolver.h" +#include "tensorflow/lite/micro/tools/benchmarking/show_meta_data.h" +#include "tensorflow/lite/schema/schema_generated.h" + +#if defined(GENERIC_BENCHMARK_USING_BUILTIN_MODEL) +#if !defined(GENERIC_BENCHMARK_MODEL_HEADER_PATH) +#error "GENERIC_BENCHMARK_MODEL_HEADER_PATH missing from CXXFLAGS" +#endif // !defined(GENERIC_BENCHMARK_MODEL_HEADER_PATH) +#if !defined(GENERIC_BENCHMARK_MODEL_NAME) +#error "GENERIC_BENCHMARK_MODEL_NAME missing from CXXFLAGS" +#endif // !defined(GENERIC_BENCHMARK_MODEL_NAME) + +#include GENERIC_BENCHMARK_MODEL_HEADER_PATH + +#define __MODEL_DATA(x) g_##x##_model_data +#define _MODEL_DATA(x) __MODEL_DATA(x) +#define MODEL_DATA _MODEL_DATA(GENERIC_BENCHMARK_MODEL_NAME) +#define __MODEL_SIZE(x) g_##x##_model_data_size +#define _MODEL_SIZE(x) __MODEL_SIZE(x) +#define MODEL_SIZE _MODEL_SIZE(GENERIC_BENCHMARK_MODEL_NAME) + +#endif // defind(GENERIC_BENCHMARK_USING_BUILTIN_MODEL) + +/* + * Generic model benchmark. Evaluates runtime performance of a provided model + * with random inputs. + */ + +namespace tflite { + +namespace { + +using Profiler = ::tflite::MicroProfiler; + +// Seed used for the random input. Input data shouldn't affect invocation timing +// so randomness isn't really needed. +constexpr uint32_t kRandomSeed = 0xFB; + +#if !defined(GENERIC_BENCHMARK_USING_BUILTIN_MODEL) +constexpr size_t kTensorArenaSize = 3e6; +constexpr size_t kModelSize = 2e6; +#elif defined(GENERIC_BENCHMARK_TENSOR_ARENA_SIZE) +constexpr size_t kTensorArenaSize = GENERIC_BENCHMARK_TENSOR_ARENA_SIZE; +#else +constexpr size_t kTensorArenaSize = 5e6 - MODEL_SIZE; +#endif // !defined(GENERIC_BENCHMARK_USING_BUILTIN_MODEL) + +constexpr int kNumResourceVariable = 100; + +void SetRandomInput(const uint32_t random_seed, + tflite::MicroInterpreter& interpreter) { + std::mt19937 eng(random_seed); + std::uniform_int_distribution dist(0, 255); + + for (size_t i = 0; i < interpreter.inputs_size(); ++i) { + TfLiteTensor* input = interpreter.input_tensor(i); + + // Pre-populate input tensor with random values. + int8_t* input_values = tflite::GetTensorData(input); + for (size_t j = 0; j < input->bytes; ++j) { + input_values[j] = dist(eng); + } + } +} + +#if !defined(GENERIC_BENCHMARK_USING_BUILTIN_MODEL) +bool ReadFile(const char* file_name, void* buffer, size_t buffer_size) { + std::unique_ptr file(fopen(file_name, "rb"), fclose); + + const size_t bytes_read = + fread(buffer, sizeof(char), buffer_size, file.get()); + if (ferror(file.get())) { + MicroPrintf("Unable to read model file: %d\n", ferror(file.get())); + return false; + } + if (!feof(file.get())) { + // Note that http://b/297592546 can mean that this error message is + // confusing. + MicroPrintf( + "Model buffer (%d bytes) is too small for the model (%d bytes).\n", + buffer_size, bytes_read); + return false; + } + if (bytes_read == 0) { + MicroPrintf("No bytes read from model file.\n"); + return false; + } + + return true; +} +#endif // !defined(GENERIC_BENCHMARK_USING_BUILTIN_MODEL) + +int Benchmark(const uint8_t* model_data, tflite::PrettyPrintType print_type) { + Profiler profiler; + alignas(16) static uint8_t tensor_arena[kTensorArenaSize]; + + uint32_t event_handle = profiler.BeginEvent("TfliteGetModel"); + const tflite::Model* model = tflite::GetModel(model_data); + profiler.EndEvent(event_handle); + + TflmOpResolver op_resolver; + TF_LITE_ENSURE_STATUS(CreateOpResolver(op_resolver)); + + tflite::RecordingMicroAllocator* allocator( + tflite::RecordingMicroAllocator::Create(tensor_arena, kTensorArenaSize)); + tflite::RecordingMicroInterpreter interpreter( + model, op_resolver, allocator, + tflite::MicroResourceVariables::Create(allocator, kNumResourceVariable), + &profiler); + TF_LITE_ENSURE_STATUS(interpreter.AllocateTensors()); + + profiler.Log(); + profiler.ClearEvents(); + + MicroPrintf(""); // null MicroPrintf serves as a newline. + + // For streaming models, the interpreter will return kTfLiteAbort if the model + // does not yet have enough data to make an inference. As such, we need to + // invoke the interpreter multiple times until we either receive an error or + // kTfLiteOk. This loop also works for non-streaming models, as they'll just + // return kTfLiteOk after the first invocation. + uint32_t seed = kRandomSeed; + while (true) { + SetRandomInput(seed++, interpreter); + TfLiteStatus status = interpreter.Invoke(); + if ((status != kTfLiteOk) && (static_cast(status) != kTfLiteAbort)) { + MicroPrintf("Model interpreter invocation failed: %d\n", status); + return -1; + } + + profiler.Log(); + MicroPrintf(""); // null MicroPrintf serves as a newline. + profiler.LogTicksPerTagCsv(); + MicroPrintf(""); // null MicroPrintf serves as a newline. + profiler.ClearEvents(); + + if (status == kTfLiteOk) { + break; + } + } + + LogAllocatorEvents(*allocator, print_type); + + return 0; +} +} // namespace +} // namespace tflite + +#if !defined(GENERIC_BENCHMARK_USING_BUILTIN_MODEL) +void usage(const char* prog_name) { + MicroPrintf("usage: %s filename [--csv]", prog_name); +} +#endif // !defined(GENERIC_BENCHMARK_USING_BUILTIN_MODEL) + +int main(int argc, char** argv) { + // Which format should be used to output debug information. + tflite::PrettyPrintType print_type = tflite::PrettyPrintType::kTable; + tflite::InitializeTarget(); + +#if !defined(GENERIC_BENCHMARK_USING_BUILTIN_MODEL) + if (argc < 2 || argc > 3) { + usage(argv[0]); + return -1; + } + const char* model_filename = argv[1]; + + if (argc == 3) { + if (std::strcmp(argv[2], "--csv") == 0) { + print_type = tflite::PrettyPrintType::kCsv; + } else { + usage(argv[0]); + return -1; + } + } + + alignas(16) static uint8_t model_data[tflite::kModelSize]; + + if (!tflite::ReadFile(model_filename, model_data, tflite::kModelSize)) { + return -1; + } +#else + const uint8_t* model_data = MODEL_DATA; +#endif // !defined(GENERIC_BENCHMARK_USING_BUILTIN_MODEL) + + MicroPrintf("\nConfigured arena size = %d\n", tflite::kTensorArenaSize); + tflite::GenericBenchmarkShowMetaData(); + return tflite::Benchmark(model_data, print_type); +} diff --git a/tensorflow/lite/micro/tools/benchmarking/metrics.cc b/tensorflow/lite/micro/tools/benchmarking/metrics.cc new file mode 100644 index 00000000000..3a4bf7e4917 --- /dev/null +++ b/tensorflow/lite/micro/tools/benchmarking/metrics.cc @@ -0,0 +1,318 @@ +/* Copyright 2023 The TensorFlow Authors. All Rights Reserved. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +==============================================================================*/ + +#include "tensorflow/lite/micro/tools/benchmarking/metrics.h" + +#include +#include +#include +#include +#include + +#include "tensorflow/lite/kernels/internal/compatibility.h" +#include "tensorflow/lite/micro/micro_log.h" + +namespace tflite { + +namespace { + +struct LogArenaRecord { + const char* title; + int allocations; + float percentage; +}; + +struct LogAllocationRecord { + const char* title; + int type; + int used_bytes; + int requested_bytes; + int count; + float percentage; +}; + +constexpr int kArenaRows = 3; +constexpr int kArenaColumns = 3; + +constexpr int kAllocationTypes = 7; +constexpr int kAllocationColumns = 6; + +constexpr int kMaxBufSize = 100; + +LogArenaRecord GetLogArenaRecord( + const tflite::RecordingSingleArenaBufferAllocator* allocator, + int row_index) { + TFLITE_DCHECK(row_index < kArenaRows); + + const size_t total_bytes = allocator->GetUsedBytes(); + const size_t allocations[] = {total_bytes, + allocator->GetNonPersistentUsedBytes(), + allocator->GetPersistentUsedBytes()}; + static_assert(std::extent::value == kArenaRows, + "kArenaRows mismatch"); + const char* titles[] = {"Total", "NonPersistent", "Persistent"}; + static_assert(std::extent::value == kArenaRows, + "kArenaRows mismatch"); + + LogArenaRecord record = {}; + record.title = titles[row_index]; + record.allocations = allocations[row_index]; + record.percentage = record.allocations * 100.0f / total_bytes; + + return record; +} + +LogAllocationRecord GetLogAllocationRecord( + const tflite::RecordingMicroAllocator& allocator, int row_index) { + TFLITE_DCHECK(row_index < kAllocationTypes); + + const tflite::RecordedAllocationType types[] = { + tflite::RecordedAllocationType::kTfLiteEvalTensorData, + tflite::RecordedAllocationType::kPersistentTfLiteTensorData, + tflite::RecordedAllocationType::kPersistentTfLiteTensorQuantizationData, + tflite::RecordedAllocationType::kPersistentBufferData, + tflite::RecordedAllocationType::kTfLiteTensorVariableBufferData, + tflite::RecordedAllocationType::kNodeAndRegistrationArray, + tflite::RecordedAllocationType::kOpData}; + static_assert(std::extent::value == kAllocationTypes, + "kAllocationTypes mismatch"); + const char* titles[] = {"Eval tensor data", + "Persistent tensor data", + "Persistent quantization data", + "Persistent buffer data", + "Tensor variable buffer data", + "Node and registration array", + "Operation data"}; + static_assert(std::extent::value == kAllocationTypes, + "kAllocationTypes mismatch"); + const size_t total_bytes = + allocator.GetSimpleMemoryAllocator()->GetUsedBytes(); + tflite::RecordedAllocation allocation = + allocator.GetRecordedAllocation(types[row_index]); + + LogAllocationRecord record = {}; + record.title = titles[row_index]; + record.type = static_cast(types[row_index]); + record.used_bytes = allocation.used_bytes; + record.requested_bytes = allocation.requested_bytes; + record.count = allocation.count; + record.percentage = allocation.used_bytes * 100.0f / total_bytes; + + return record; +} + +template +void UpdateColumnWidths(int (&widths)[kColumns], const char* s[kColumns]) { + for (int i = 0; i < kColumns; i++) { + widths[i] = std::max(widths[i], static_cast(std::strlen(s[i]))); + } +} + +void UpdateColumnWidths(int (&widths)[kArenaColumns], + const LogArenaRecord& record) { + char buf[kMaxBufSize]; + int count; + + count = MicroSnprintf(buf, kMaxBufSize, "%s", record.title); + widths[0] = std::max(widths[0], count); + count = MicroSnprintf(buf, kMaxBufSize, "%d", record.allocations); + widths[1] = std::max(widths[1], count); + count = MicroSnprintf(buf, kMaxBufSize, "%.2f", + static_cast(record.percentage)); + widths[2] = std::max(widths[2], count); +} + +void UpdateColumnWidths(int (&widths)[kAllocationColumns], + const LogAllocationRecord& record) { + char buf[kMaxBufSize]; + int count; + + count = MicroSnprintf(buf, kMaxBufSize, "%s", record.title); + widths[0] = std::max(widths[0], count); + count = MicroSnprintf(buf, kMaxBufSize, "%d", record.type); + widths[1] = std::max(widths[1], count); + count = MicroSnprintf(buf, kMaxBufSize, "%d", record.used_bytes); + widths[2] = std::max(widths[2], count); + count = MicroSnprintf(buf, kMaxBufSize, "%d", record.requested_bytes); + widths[3] = std::max(widths[3], count); + count = MicroSnprintf(buf, kMaxBufSize, "%d", record.count); + widths[4] = std::max(widths[4], count); + count = MicroSnprintf(buf, kMaxBufSize, "%.2f", + static_cast(record.percentage)); + widths[5] = std::max(widths[5], count); +} + +using BufferDatum = std::tuple; + +template +BufferDatum AddTableColumnValue(const BufferDatum& buffer, const char* format, + int column_width, T value, + const char* separator = nullptr) { + char* p; + char* p_end; + std::tie(p, p_end) = buffer; + int count = MicroSnprintf(p, p_end - p, format, column_width, value); + p += count; + if (separator != nullptr && p < p_end) { + count = MicroSnprintf(p, p_end - p, separator); + p += count; + } + + if (p > p_end) { + p = p_end; + } + + return std::make_tuple(p, p_end); +} + +} // namespace + +void LogArenaAllocations( + const tflite::RecordingSingleArenaBufferAllocator* allocator, + const PrettyPrintType type) { + const char* headers[] = {"Arena", "Bytes", "%% Arena"}; + static_assert(std::extent::value == kArenaColumns, + "kArenaColumns mismatch"); + char buffer[kMaxBufSize]; + BufferDatum buffer_datum = + std::make_tuple(std::begin(buffer), std::end(buffer)); + int column_widths[kArenaColumns] = {}; + + const char* output_type; + const char* string_format; + if (type == PrettyPrintType::kCsv) { + output_type = "CSV"; + string_format = "\"%*s\""; + } else { + output_type = "Table"; + string_format = "%*s"; + + UpdateColumnWidths(column_widths, headers); + for (int i = 0; i < kArenaRows; i++) { + LogArenaRecord record = GetLogArenaRecord(allocator, i); + UpdateColumnWidths(column_widths, record); + } + } + + MicroPrintf("[[ %s ]]: Arena", output_type); + + for (int i = 0; i < kArenaColumns; i++) { + // create header + const char* separator = nullptr; + if (i != kArenaColumns - 1) { + // separator for all but last column value + if (type == PrettyPrintType::kCsv) { + separator = ","; + } else { + separator = " "; + } + } + buffer_datum = AddTableColumnValue(buffer_datum, string_format, + column_widths[i], headers[i], separator); + } + MicroPrintf(buffer); + + for (int i = 0; i < kArenaRows; ++i) { + // create rows + const char* separator = (type == PrettyPrintType::kCsv) ? "," : " | "; + buffer_datum = std::make_tuple(std::begin(buffer), std::end(buffer)); + LogArenaRecord record = GetLogArenaRecord(allocator, i); + buffer_datum = AddTableColumnValue( + buffer_datum, string_format, column_widths[0], record.title, separator); + buffer_datum = AddTableColumnValue(buffer_datum, "%*d", column_widths[1], + record.allocations, separator); + buffer_datum = AddTableColumnValue(buffer_datum, "%*.2f", column_widths[2], + static_cast(record.percentage)); + MicroPrintf(buffer); + } + + MicroPrintf(""); // output newline +} + +void LogAllocations(const tflite::RecordingMicroAllocator& allocator, + const PrettyPrintType type) { + const char* headers[] = {"Allocation", "Id", "Used", + "Requested", "Count", "%% Memory"}; + static_assert(std::extent::value == kAllocationColumns, + "kAllocationColumns mismatch"); + char buffer[kMaxBufSize]; + BufferDatum buffer_datum = + std::make_tuple(std::begin(buffer), std::end(buffer)); + int column_widths[kAllocationColumns] = {}; + + const char* output_type; + const char* string_format; + if (type == PrettyPrintType::kCsv) { + output_type = "CSV"; + string_format = "\"%*s\""; + } else { + output_type = "Table"; + string_format = "%*s"; + + UpdateColumnWidths(column_widths, headers); + for (int i = 0; i < kAllocationTypes; i++) { + LogAllocationRecord record = GetLogAllocationRecord(allocator, i); + UpdateColumnWidths(column_widths, record); + } + } + + MicroPrintf("[[ %s ]]: Allocations", output_type); + + for (int i = 0; i < kAllocationColumns; i++) { + // create header + const char* separator = nullptr; + if (i != kAllocationColumns - 1) { + // separator for all but last column value + if (type == PrettyPrintType::kCsv) { + separator = ","; + } else { + separator = " "; + } + } + buffer_datum = AddTableColumnValue(buffer_datum, string_format, + column_widths[i], headers[i], separator); + } + MicroPrintf(buffer); + + for (int i = 0; i < kAllocationTypes; ++i) { + // create rows + const char* separator = (type == PrettyPrintType::kCsv) ? "," : " | "; + buffer_datum = std::make_tuple(std::begin(buffer), std::end(buffer)); + LogAllocationRecord record = GetLogAllocationRecord(allocator, i); + buffer_datum = AddTableColumnValue( + buffer_datum, string_format, column_widths[0], record.title, separator); + buffer_datum = AddTableColumnValue(buffer_datum, "%*d", column_widths[1], + record.type, separator); + buffer_datum = AddTableColumnValue(buffer_datum, "%*d", column_widths[2], + record.used_bytes, separator); + buffer_datum = AddTableColumnValue(buffer_datum, "%*d", column_widths[3], + record.requested_bytes, separator); + buffer_datum = AddTableColumnValue(buffer_datum, "%*d", column_widths[4], + record.count, separator); + buffer_datum = AddTableColumnValue(buffer_datum, "%*.2f", column_widths[5], + static_cast(record.percentage)); + MicroPrintf(buffer); + } + + MicroPrintf(""); // output newline +} + +void LogAllocatorEvents(const tflite::RecordingMicroAllocator& allocator, + const PrettyPrintType type) { + LogArenaAllocations(allocator.GetSimpleMemoryAllocator(), type); + LogAllocations(allocator, type); +} + +} // namespace tflite diff --git a/tensorflow/lite/micro/tools/benchmarking/metrics.h b/tensorflow/lite/micro/tools/benchmarking/metrics.h new file mode 100644 index 00000000000..996cde186eb --- /dev/null +++ b/tensorflow/lite/micro/tools/benchmarking/metrics.h @@ -0,0 +1,43 @@ +/* Copyright 2023 The TensorFlow Authors. All Rights Reserved. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +==============================================================================*/ + +#ifndef TFLM_BENCHMARK_INTERNAL_METRICS_H_ +#define TFLM_BENCHMARK_INTERNAL_METRICS_H_ + +#include "tensorflow/lite/micro/micro_profiler.h" +#include "tensorflow/lite/micro/recording_micro_allocator.h" + +namespace tflite { + +// Defines how formatted data is printed to stdout. +enum class PrettyPrintType { + // Prints as a CSV file. + kCsv, + // Prints as a formatted table. + kTable, +}; + +// Logs the allocation events. Prints out two tables, one for the arena +// allocations, and one for each type of TFLM allocation type. +// Args: +// - allocator: The recording micro allocator used during the invocation +// process. +// - type: Which print format should be used to output the allocation data to +// stdout. +void LogAllocatorEvents(const tflite::RecordingMicroAllocator& allocator, + PrettyPrintType type); +} // namespace tflite + +#endif // TFLM_BENCHMARK_INTERNAL_METRICS_H_ diff --git a/tensorflow/lite/micro/tools/benchmarking/op_resolver.h b/tensorflow/lite/micro/tools/benchmarking/op_resolver.h new file mode 100644 index 00000000000..9b98849c472 --- /dev/null +++ b/tensorflow/lite/micro/tools/benchmarking/op_resolver.h @@ -0,0 +1,147 @@ +/* Copyright 2023 The TensorFlow Authors. All Rights Reserved. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +==============================================================================*/ + +#ifndef TFLM_BENCHMARK_OP_RESOLVER_H_ +#define TFLM_BENCHMARK_OP_RESOLVER_H_ + +#include + +#include "tensorflow/lite/micro/micro_mutable_op_resolver.h" +#include "tensorflow/lite/micro/micro_op_resolver.h" + +namespace tflite { + +using TflmOpResolver = MicroMutableOpResolver<113>; + +inline TfLiteStatus CreateOpResolver(TflmOpResolver& op_resolver) { + TF_LITE_ENSURE_STATUS(op_resolver.AddAbs()); + TF_LITE_ENSURE_STATUS(op_resolver.AddAdd()); + TF_LITE_ENSURE_STATUS(op_resolver.AddAddN()); + TF_LITE_ENSURE_STATUS(op_resolver.AddArgMax()); + TF_LITE_ENSURE_STATUS(op_resolver.AddArgMin()); + TF_LITE_ENSURE_STATUS(op_resolver.AddAssignVariable()); + TF_LITE_ENSURE_STATUS(op_resolver.AddAveragePool2D()); + TF_LITE_ENSURE_STATUS(op_resolver.AddBatchMatMul()); + TF_LITE_ENSURE_STATUS(op_resolver.AddBatchToSpaceNd()); + TF_LITE_ENSURE_STATUS(op_resolver.AddBroadcastArgs()); + TF_LITE_ENSURE_STATUS(op_resolver.AddBroadcastTo()); + TF_LITE_ENSURE_STATUS(op_resolver.AddCallOnce()); + TF_LITE_ENSURE_STATUS(op_resolver.AddCast()); + TF_LITE_ENSURE_STATUS(op_resolver.AddCeil()); + TF_LITE_ENSURE_STATUS(op_resolver.AddCircularBuffer()); + TF_LITE_ENSURE_STATUS(op_resolver.AddConcatenation()); + TF_LITE_ENSURE_STATUS(op_resolver.AddConv2D()); + TF_LITE_ENSURE_STATUS(op_resolver.AddCos()); + TF_LITE_ENSURE_STATUS(op_resolver.AddCumSum()); + TF_LITE_ENSURE_STATUS(op_resolver.AddDelay()); + TF_LITE_ENSURE_STATUS(op_resolver.AddDepthToSpace()); + TF_LITE_ENSURE_STATUS(op_resolver.AddDepthwiseConv2D()); + TF_LITE_ENSURE_STATUS(op_resolver.AddDequantize()); + TF_LITE_ENSURE_STATUS(op_resolver.AddDetectionPostprocess()); + TF_LITE_ENSURE_STATUS(op_resolver.AddDiv()); + TF_LITE_ENSURE_STATUS(op_resolver.AddElu()); + TF_LITE_ENSURE_STATUS(op_resolver.AddEmbeddingLookup()); + TF_LITE_ENSURE_STATUS(op_resolver.AddEnergy()); + TF_LITE_ENSURE_STATUS(op_resolver.AddEqual()); + TF_LITE_ENSURE_STATUS(op_resolver.AddEthosU()); + TF_LITE_ENSURE_STATUS(op_resolver.AddExp()); + TF_LITE_ENSURE_STATUS(op_resolver.AddExpandDims()); + TF_LITE_ENSURE_STATUS(op_resolver.AddFftAutoScale()); + TF_LITE_ENSURE_STATUS(op_resolver.AddFill()); + TF_LITE_ENSURE_STATUS(op_resolver.AddFilterBank()); + TF_LITE_ENSURE_STATUS(op_resolver.AddFilterBankLog()); + TF_LITE_ENSURE_STATUS(op_resolver.AddFilterBankSpectralSubtraction()); + TF_LITE_ENSURE_STATUS(op_resolver.AddFilterBankSquareRoot()); + TF_LITE_ENSURE_STATUS(op_resolver.AddFloor()); + TF_LITE_ENSURE_STATUS(op_resolver.AddFloorDiv()); + TF_LITE_ENSURE_STATUS(op_resolver.AddFloorMod()); + TF_LITE_ENSURE_STATUS(op_resolver.AddFramer()); + TF_LITE_ENSURE_STATUS(op_resolver.AddFullyConnected()); + TF_LITE_ENSURE_STATUS(op_resolver.AddGather()); + TF_LITE_ENSURE_STATUS(op_resolver.AddGatherNd()); + TF_LITE_ENSURE_STATUS(op_resolver.AddGreater()); + TF_LITE_ENSURE_STATUS(op_resolver.AddGreaterEqual()); + TF_LITE_ENSURE_STATUS(op_resolver.AddHardSwish()); + TF_LITE_ENSURE_STATUS(op_resolver.AddIf()); + TF_LITE_ENSURE_STATUS(op_resolver.AddIrfft()); + TF_LITE_ENSURE_STATUS(op_resolver.AddL2Normalization()); + TF_LITE_ENSURE_STATUS(op_resolver.AddL2Pool2D()); + TF_LITE_ENSURE_STATUS(op_resolver.AddLeakyRelu()); + TF_LITE_ENSURE_STATUS(op_resolver.AddLess()); + TF_LITE_ENSURE_STATUS(op_resolver.AddLessEqual()); + TF_LITE_ENSURE_STATUS(op_resolver.AddLog()); + TF_LITE_ENSURE_STATUS(op_resolver.AddLogSoftmax()); + TF_LITE_ENSURE_STATUS(op_resolver.AddLogicalAnd()); + TF_LITE_ENSURE_STATUS(op_resolver.AddLogicalNot()); + TF_LITE_ENSURE_STATUS(op_resolver.AddLogicalOr()); + TF_LITE_ENSURE_STATUS(op_resolver.AddLogistic()); + TF_LITE_ENSURE_STATUS(op_resolver.AddMaxPool2D()); + TF_LITE_ENSURE_STATUS(op_resolver.AddMaximum()); + TF_LITE_ENSURE_STATUS(op_resolver.AddMean()); + TF_LITE_ENSURE_STATUS(op_resolver.AddMinimum()); + TF_LITE_ENSURE_STATUS(op_resolver.AddMirrorPad()); + TF_LITE_ENSURE_STATUS(op_resolver.AddMul()); + TF_LITE_ENSURE_STATUS(op_resolver.AddNeg()); + TF_LITE_ENSURE_STATUS(op_resolver.AddNotEqual()); + TF_LITE_ENSURE_STATUS(op_resolver.AddOverlapAdd()); + TF_LITE_ENSURE_STATUS(op_resolver.AddPCAN()); + TF_LITE_ENSURE_STATUS(op_resolver.AddPack()); + TF_LITE_ENSURE_STATUS(op_resolver.AddPad()); + TF_LITE_ENSURE_STATUS(op_resolver.AddPadV2()); + TF_LITE_ENSURE_STATUS(op_resolver.AddPrelu()); + TF_LITE_ENSURE_STATUS(op_resolver.AddQuantize()); + TF_LITE_ENSURE_STATUS(op_resolver.AddReadVariable()); + TF_LITE_ENSURE_STATUS(op_resolver.AddReduceMax()); + TF_LITE_ENSURE_STATUS(op_resolver.AddRelu()); + TF_LITE_ENSURE_STATUS(op_resolver.AddRelu6()); + TF_LITE_ENSURE_STATUS(op_resolver.AddReshape()); + TF_LITE_ENSURE_STATUS(op_resolver.AddResizeBilinear()); + TF_LITE_ENSURE_STATUS(op_resolver.AddResizeNearestNeighbor()); + TF_LITE_ENSURE_STATUS(op_resolver.AddRfft()); + TF_LITE_ENSURE_STATUS(op_resolver.AddRound()); + TF_LITE_ENSURE_STATUS(op_resolver.AddRsqrt()); + TF_LITE_ENSURE_STATUS(op_resolver.AddSelectV2()); + TF_LITE_ENSURE_STATUS(op_resolver.AddShape()); + TF_LITE_ENSURE_STATUS(op_resolver.AddSin()); + TF_LITE_ENSURE_STATUS(op_resolver.AddSlice()); + TF_LITE_ENSURE_STATUS(op_resolver.AddSoftmax()); + TF_LITE_ENSURE_STATUS(op_resolver.AddSpaceToBatchNd()); + TF_LITE_ENSURE_STATUS(op_resolver.AddSpaceToDepth()); + TF_LITE_ENSURE_STATUS(op_resolver.AddSplit()); + TF_LITE_ENSURE_STATUS(op_resolver.AddSplitV()); + TF_LITE_ENSURE_STATUS(op_resolver.AddSqrt()); + TF_LITE_ENSURE_STATUS(op_resolver.AddSquare()); + TF_LITE_ENSURE_STATUS(op_resolver.AddSquaredDifference()); + TF_LITE_ENSURE_STATUS(op_resolver.AddSqueeze()); + TF_LITE_ENSURE_STATUS(op_resolver.AddStacker()); + TF_LITE_ENSURE_STATUS(op_resolver.AddStridedSlice()); + TF_LITE_ENSURE_STATUS(op_resolver.AddSub()); + TF_LITE_ENSURE_STATUS(op_resolver.AddSum()); + TF_LITE_ENSURE_STATUS(op_resolver.AddSvdf()); + TF_LITE_ENSURE_STATUS(op_resolver.AddTanh()); + TF_LITE_ENSURE_STATUS(op_resolver.AddTranspose()); + TF_LITE_ENSURE_STATUS(op_resolver.AddTransposeConv()); + TF_LITE_ENSURE_STATUS(op_resolver.AddUnidirectionalSequenceLSTM()); + TF_LITE_ENSURE_STATUS(op_resolver.AddUnpack()); + TF_LITE_ENSURE_STATUS(op_resolver.AddVarHandle()); + TF_LITE_ENSURE_STATUS(op_resolver.AddWhile()); + TF_LITE_ENSURE_STATUS(op_resolver.AddWindow()); + TF_LITE_ENSURE_STATUS(op_resolver.AddZerosLike()); + + return kTfLiteOk; +} + +} // namespace tflite +#endif // TFLM_BENCHMARK_OP_RESOLVER_H_ diff --git a/tensorflow/lite/micro/tools/benchmarking/show_meta_data.cc.template b/tensorflow/lite/micro/tools/benchmarking/show_meta_data.cc.template new file mode 100644 index 00000000000..a2102a48e1c --- /dev/null +++ b/tensorflow/lite/micro/tools/benchmarking/show_meta_data.cc.template @@ -0,0 +1,177 @@ +/* Copyright 2024 The TensorFlow Authors. All Rights Reserved. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +==============================================================================*/ + +#include +#include +#include + +#include "tensorflow/lite/micro/micro_log.h" +#include "tensorflow/lite/micro/tools/benchmarking/show_meta_data.h" + +#if defined(HIFI3) || defined(HIFI4) || defined(HIFI5) +#include "NatureDSP_Signal_id.h" +#include "xa_nnlib_standards.h" +#endif // defined(HIFI3) || defined(HIFI4) || defined(HIFI5) + +namespace tflite { +namespace { + +#if defined(GENERIC_BENCHMARK_USING_BUILTIN_MODEL) +const char* model_analysis_strings[] = { + // %%%_model_analysis_strings_%%% +}; + +const char* model_sha1_strings[] = { + // %%%_model_sha1_strings_%%% +}; +#endif // defind(GENERIC_BENCHMARK_USING_BUILTIN_MODEL) + +const char* compilation_date_strings[] = { + // %%%_compilation_date_strings_%%% +}; + +const char* git_commit_strings[] = { + // %%%_git_commit_strings_%%% +}; + +const char* git_status_strings[] = { + // %%%_git_status_strings_%%% +}; + +const char* cc_name_strings[] = { + // %%%_cc_name_strings_%%% +}; + +const char* cc_version_strings[] = { + // %%%_cc_version_strings_%%% +}; + +const char* cc_flags_strings[] = { + // %%%_cc_flags_strings_%%% +}; + +const char* cxx_name_strings[] = { + // %%%_cxx_name_strings_%%% +}; + +const char* cxx_version_strings[] = { + // %%%_cxx_version_strings_%%% +}; + +const char* cxx_flags_strings[] = { + // %%%_cxx_flags_strings_%%% +}; + +const char* optimization_flag_strings[] = { + // %%%_optimization_flag_strings_%%% +}; + +const char* target_info_strings[] = { + // %%%_target_info_strings_%%% +}; + +#if defined(CMSIS_NN) || defined(HIFI3) || defined(HIFI4) || defined(HIFI5) || \ + defined(VISION_P6) +const char* nn_library_url_strings[] = { + // %%%_nn_library_url_strings_%%% +}; + +const char* nn_library_md5_strings[] = { + // %%%_nn_library_md5_strings_%%% +}; +#endif // defined(CMSIS_NN) || defined(HIFI3) || defined(HIFI4) || + // defined(HIFI5) || defined(VISION_P6) + +void ShowStrings(const char* title, const char** str, const size_t count) { + MicroPrintf("%s%s", title, str[0]); + for (size_t i = 1; i < count; i++) { + MicroPrintf("%s", str[i]); + } +} + +void ShowSeparator() { MicroPrintf("--------------------"); } + +} // namespace + +void GenericBenchmarkShowMetaData() { + ShowSeparator(); + ShowStrings("Compiled on:\n\n", compilation_date_strings, + std::extent::value); + + ShowSeparator(); + ShowStrings("Git SHA: ", git_commit_strings, + std::extent::value); + ShowStrings("\nGit status:\n\n", git_status_strings, + std::extent::value); + + ShowSeparator(); + ShowStrings("C compiler: ", cc_name_strings, + std::extent::value); + ShowStrings("Version:\n\n", cc_version_strings, + std::extent::value); + ShowStrings("\nFlags:\n\n", cc_flags_strings, + std::extent::value); + ShowStrings("\nC++ compiler: ", cxx_name_strings, + std::extent::value); + ShowStrings("Version:\n\n", cxx_version_strings, + std::extent::value); + ShowStrings("\nFlags:\n\n", cxx_flags_strings, + std::extent::value); + ShowStrings("\nOptimization: ", optimization_flag_strings, + std::extent::value); + + ShowSeparator(); + ShowStrings("Target information:\n\n", target_info_strings, + std::extent::value); + +#if defined(CMSIS_NN) || defined(HIFI3) || defined(HIFI4) || defined(HIFI5) || \ + defined(VISION_P6) + ShowSeparator(); + ShowStrings("NN library download URLs:\n\n", nn_library_url_strings, + std::extent::value); + ShowStrings("\nNN library MD5 checksums:\n\n", nn_library_md5_strings, + std::extent::value); +#endif // defined(CMSIS_NN) || defined(HIFI3) || defined(HIFI4) || + // defined(HIFI5) || defined(VISION_P6) + +#if defined(HIFI3) || defined(HIFI4) || defined(HIFI5) + ShowSeparator(); + + char version_buffer[30 + 1]; + memset(version_buffer, 0, sizeof(version_buffer)); + NatureDSP_Signal_get_library_version(version_buffer); + MicroPrintf("NatureDSP library version: %s", version_buffer); + memset(version_buffer, 0, sizeof(version_buffer)); + NatureDSP_Signal_get_library_api_version(version_buffer); + MicroPrintf("NatureDSP API version: %s", version_buffer); + + const char* nnlib_library_version = xa_nnlib_get_lib_version_string(); + const char* nnlib_api_version = xa_nnlib_get_lib_api_version_string(); + MicroPrintf("NNLIB library version: %s", nnlib_library_version); + MicroPrintf("NNLIB API version: %s", nnlib_api_version); +#endif // defined(HIFI3) || defined(HIFI4) || defined(HIFI5) + +#if defined(GENERIC_BENCHMARK_USING_BUILTIN_MODEL) + ShowSeparator(); + ShowStrings("Model SHA1:\n\n", model_sha1_strings, + std::extent::value); + ShowStrings("\nModel analysis:\n\n", model_analysis_strings, + std::extent::value); +#endif // defind(GENERIC_BENCHMARK_USING_BUILTIN_MODEL) + + ShowSeparator(); +} + +} // namespace tflite diff --git a/tensorflow/lite/micro/tools/benchmarking/show_meta_data.h b/tensorflow/lite/micro/tools/benchmarking/show_meta_data.h new file mode 100644 index 00000000000..37cf6168b39 --- /dev/null +++ b/tensorflow/lite/micro/tools/benchmarking/show_meta_data.h @@ -0,0 +1,24 @@ +/* Copyright 2024 The TensorFlow Authors. All Rights Reserved. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +==============================================================================*/ + +namespace tflite { + +#if !defined(GENERIC_BENCHMARK_NO_META_DATA) +void GenericBenchmarkShowMetaData(); +#else +inline void GenericBenchmarkShowMetaData() {} +#endif // defined(GENERIC_BENCHMARK_NO_META_DATA) + +} // namespace tflite diff --git a/tensorflow/lite/micro/tools/ci_build/test_code_style.sh b/tensorflow/lite/micro/tools/ci_build/test_code_style.sh index 0d25c9f3cc5..d49f42938e5 100755 --- a/tensorflow/lite/micro/tools/ci_build/test_code_style.sh +++ b/tensorflow/lite/micro/tools/ci_build/test_code_style.sh @@ -77,6 +77,8 @@ else FIX_FORMAT_OPTIONS="" fi +EXCLUDE_SHARED_TFL_CODE=$(sed 's/^/-e /' ci/tflite_files.txt) + tensorflow/lite/micro/tools/make/downloads/pigweed/pw_presubmit/py/pw_presubmit/format_code.py \ ${FIX_FORMAT_OPTIONS} \ -e "\.github" \ @@ -84,6 +86,8 @@ tensorflow/lite/micro/tools/make/downloads/pigweed/pw_presubmit/py/pw_presubmit/ -e third_party/xtensa \ -e ci \ -e c/common.c \ + -e codegen/preprocessor/preprocessor_schema_generated.h \ + -e codegen/preprocessor/preprocessor_schema_py_generated.py \ -e core/api/error_reporter.cc \ -e kernels/internal/reference/integer_ops/ \ -e kernels/internal/reference/reference_ops.h \ @@ -93,8 +97,10 @@ tensorflow/lite/micro/tools/make/downloads/pigweed/pw_presubmit/py/pw_presubmit/ -e experimental \ -e schema/schema_generated.h \ -e schema/schema_utils.h \ + -e tensorflow/lite/micro/tools/layer_by_layer_schema_generated.h \ -e "\.inc" \ - -e "\.md" + -e "\.md" \ + ${EXCLUDE_SHARED_TFL_CODE} CODE_FORMAT_RESULT=$? diff --git a/tensorflow/lite/micro/tools/ci_build/test_cortex_m_corstone_300.sh b/tensorflow/lite/micro/tools/ci_build/test_cortex_m_corstone_300.sh index 516c1816202..ab136e518fa 100755 --- a/tensorflow/lite/micro/tools/ci_build/test_cortex_m_corstone_300.sh +++ b/tensorflow/lite/micro/tools/ci_build/test_cortex_m_corstone_300.sh @@ -1,5 +1,5 @@ #!/usr/bin/env bash -# Copyright 2022 The TensorFlow Authors. All Rights Reserved. +# Copyright 2024 The TensorFlow Authors. All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. @@ -42,3 +42,14 @@ readable_run make -f tensorflow/lite/micro/tools/make/Makefile CO_PROCESSOR=etho readable_run make -f tensorflow/lite/micro/tools/make/Makefile clean readable_run make -j$(nproc) -f tensorflow/lite/micro/tools/make/Makefile CO_PROCESSOR=ethos_u OPTIMIZED_KERNEL_DIR=${OPTIMIZED_KERNEL_DIR} TARGET=${TARGET} TARGET_ARCH=${TARGET_ARCH} TOOLCHAIN=${TOOLCHAIN} build readable_run make -f tensorflow/lite/micro/tools/make/Makefile CO_PROCESSOR=ethos_u OPTIMIZED_KERNEL_DIR=${OPTIMIZED_KERNEL_DIR} TARGET=${TARGET} TARGET_ARCH=${TARGET_ARCH} TOOLCHAIN=${TOOLCHAIN} test + +# Run generic benchmark. +readable_run make -j$(nproc) -f tensorflow/lite/micro/tools/make/Makefile \ + CO_PROCESSOR=ethos_u \ + OPTIMIZED_KERNEL_DIR=${OPTIMIZED_KERNEL_DIR} \ + TARGET=${TARGET} \ + TARGET_ARCH=${TARGET_ARCH} \ + TOOLCHAIN=${TOOLCHAIN} \ + GENERIC_BENCHMARK_MODEL_PATH=tensorflow/lite/micro/models/person_detect_vela.tflite \ + GENERIC_BENCHMARK_ARENA_SIZE=`expr 150 \* 1024` \ + run_tflm_benchmark diff --git a/tensorflow/lite/micro/tools/ci_build/test_x86_default.sh b/tensorflow/lite/micro/tools/ci_build/test_x86_default.sh index 623238ed093..998827f24de 100755 --- a/tensorflow/lite/micro/tools/ci_build/test_x86_default.sh +++ b/tensorflow/lite/micro/tools/ci_build/test_x86_default.sh @@ -40,3 +40,10 @@ readable_run make -f ${TENSORFLOW_ROOT}tensorflow/lite/micro/tools/make/Makefile readable_run make -s -j8 -f ${TENSORFLOW_ROOT}tensorflow/lite/micro/tools/make/Makefile build TENSORFLOW_ROOT=${TENSORFLOW_ROOT} EXTERNAL_DIR=${EXTERNAL_DIR} readable_run make -s -j8 -f ${TENSORFLOW_ROOT}tensorflow/lite/micro/tools/make/Makefile test TENSORFLOW_ROOT=${TENSORFLOW_ROOT} EXTERNAL_DIR=${EXTERNAL_DIR} readable_run make -s -j8 -f ${TENSORFLOW_ROOT}tensorflow/lite/micro/tools/make/Makefile integration_tests TENSORFLOW_ROOT=${TENSORFLOW_ROOT} EXTERNAL_DIR=${EXTERNAL_DIR} + +# run generic benchmark +readable_run make -j$(nproc) -f ${TENSORFLOW_ROOT}tensorflow/lite/micro/tools/make/Makefile \ + TENSORFLOW_ROOT=${TENSORFLOW_ROOT} \ + EXTERNAL_DIR=${EXTERNAL_DIR} \ + GENERIC_BENCHMARK_MODEL_PATH=${TENSORFLOW_ROOT}tensorflow/lite/micro/models/person_detect.tflite \ + run_tflm_benchmark diff --git a/tensorflow/lite/micro/tools/ci_build/test_xtensa_fusion_f1.sh b/tensorflow/lite/micro/tools/ci_build/test_xtensa_fusion_f1.sh index 84167922943..2fd6bf89cab 100755 --- a/tensorflow/lite/micro/tools/ci_build/test_xtensa_fusion_f1.sh +++ b/tensorflow/lite/micro/tools/ci_build/test_xtensa_fusion_f1.sh @@ -35,7 +35,7 @@ readable_run make -f ${TENSORFLOW_ROOT}tensorflow/lite/micro/tools/make/Makefile if [[ ${1} == "INTERNAL" ]]; then readable_run make -f ${TENSORFLOW_ROOT}tensorflow/lite/micro/tools/make/Makefile \ TARGET=xtensa \ - TARGET_ARCH=hifi4 \ + TARGET_ARCH=hifi3 \ OPTIMIZED_KERNEL_DIR=xtensa \ XTENSA_CORE=F1_190305_swupgrade \ TENSORFLOW_ROOT=${TENSORFLOW_ROOT} \ @@ -44,7 +44,7 @@ readable_run make -f ${TENSORFLOW_ROOT}tensorflow/lite/micro/tools/make/Makefile readable_run make -f ${TENSORFLOW_ROOT}tensorflow/lite/micro/tools/make/Makefile \ TARGET=xtensa \ - TARGET_ARCH=hifi4 \ + TARGET_ARCH=hifi3 \ OPTIMIZED_KERNEL_DIR=xtensa \ XTENSA_CORE=F1_190305_swupgrade \ TENSORFLOW_ROOT=${TENSORFLOW_ROOT} \ @@ -53,7 +53,7 @@ readable_run make -f ${TENSORFLOW_ROOT}tensorflow/lite/micro/tools/make/Makefile else readable_run make -f ${TENSORFLOW_ROOT}tensorflow/lite/micro/tools/make/Makefile \ TARGET=xtensa \ - TARGET_ARCH=hifi4 \ + TARGET_ARCH=hifi3 \ OPTIMIZED_KERNEL_DIR=xtensa \ XTENSA_CORE=F1_190305_swupgrade \ TENSORFLOW_ROOT=${TENSORFLOW_ROOT} \ @@ -62,10 +62,21 @@ readable_run make -f ${TENSORFLOW_ROOT}tensorflow/lite/micro/tools/make/Makefile readable_run make -f ${TENSORFLOW_ROOT}tensorflow/lite/micro/tools/make/Makefile \ TARGET=xtensa \ - TARGET_ARCH=hifi4 \ + TARGET_ARCH=hifi3 \ OPTIMIZED_KERNEL_DIR=xtensa \ XTENSA_CORE=F1_190305_swupgrade \ TENSORFLOW_ROOT=${TENSORFLOW_ROOT} \ EXTERNAL_DIR=${EXTERNAL_DIR} \ test -j$(nproc) + +# run generic benchmark +readable_run make -f ${TENSORFLOW_ROOT}tensorflow/lite/micro/tools/make/Makefile \ + TARGET=xtensa \ + TARGET_ARCH=hifi3 \ + OPTIMIZED_KERNEL_DIR=xtensa \ + XTENSA_CORE=F1_190305_swupgrade \ + TENSORFLOW_ROOT=${TENSORFLOW_ROOT} \ + EXTERNAL_DIR=${EXTERNAL_DIR} \ + GENERIC_BENCHMARK_MODEL_PATH=${TENSORFLOW_ROOT}tensorflow/lite/micro/models/person_detect.tflite \ + run_tflm_benchmark -j$(nproc) fi diff --git a/tensorflow/lite/micro/tools/ci_build/test_xtensa_hifi3z.sh b/tensorflow/lite/micro/tools/ci_build/test_xtensa_hifi3z.sh index 1ddf0d912ac..ff3d600476a 100755 --- a/tensorflow/lite/micro/tools/ci_build/test_xtensa_hifi3z.sh +++ b/tensorflow/lite/micro/tools/ci_build/test_xtensa_hifi3z.sh @@ -35,7 +35,7 @@ readable_run make -f ${TENSORFLOW_ROOT}tensorflow/lite/micro/tools/make/Makefile if [[ ${1} == "INTERNAL" ]]; then readable_run make -f ${TENSORFLOW_ROOT}tensorflow/lite/micro/tools/make/Makefile \ TARGET=xtensa \ - TARGET_ARCH=hifi4 \ + TARGET_ARCH=hifi3 \ OPTIMIZED_KERNEL_DIR=xtensa \ XTENSA_CORE=HIFI_190304_swupgrade \ TENSORFLOW_ROOT=${TENSORFLOW_ROOT} \ @@ -44,7 +44,7 @@ if [[ ${1} == "INTERNAL" ]]; then readable_run make -f ${TENSORFLOW_ROOT}tensorflow/lite/micro/tools/make/Makefile \ TARGET=xtensa \ - TARGET_ARCH=hifi4 \ + TARGET_ARCH=hifi3 \ OPTIMIZED_KERNEL_DIR=xtensa \ XTENSA_CORE=HIFI_190304_swupgrade \ TENSORFLOW_ROOT=${TENSORFLOW_ROOT} \ @@ -53,7 +53,7 @@ if [[ ${1} == "INTERNAL" ]]; then readable_run make -f ${TENSORFLOW_ROOT}tensorflow/lite/micro/tools/make/Makefile \ TARGET=xtensa \ - TARGET_ARCH=hifi4 \ + TARGET_ARCH=hifi3 \ OPTIMIZED_KERNEL_DIR=xtensa \ XTENSA_CORE=HIFI_190304_swupgrade \ TENSORFLOW_ROOT=${TENSORFLOW_ROOT} \ @@ -62,7 +62,7 @@ if [[ ${1} == "INTERNAL" ]]; then readable_run make -f ${TENSORFLOW_ROOT}tensorflow/lite/micro/tools/make/Makefile \ TARGET=xtensa \ - TARGET_ARCH=hifi4 \ + TARGET_ARCH=hifi3 \ OPTIMIZED_KERNEL_DIR=xtensa \ XTENSA_CORE=HIFI_190304_swupgrade \ TENSORFLOW_ROOT=${TENSORFLOW_ROOT} \ @@ -71,7 +71,7 @@ if [[ ${1} == "INTERNAL" ]]; then readable_run make -f ${TENSORFLOW_ROOT}tensorflow/lite/micro/tools/make/Makefile \ TARGET=xtensa \ - TARGET_ARCH=hifi4 \ + TARGET_ARCH=hifi3 \ OPTIMIZED_KERNEL_DIR=xtensa \ XTENSA_CORE=HIFI_190304_swupgrade \ TENSORFLOW_ROOT=${TENSORFLOW_ROOT} \ @@ -80,7 +80,7 @@ if [[ ${1} == "INTERNAL" ]]; then else readable_run make -f ${TENSORFLOW_ROOT}tensorflow/lite/micro/tools/make/Makefile \ TARGET=xtensa \ - TARGET_ARCH=hifi4 \ + TARGET_ARCH=hifi3 \ OPTIMIZED_KERNEL_DIR=xtensa \ XTENSA_CORE=HIFI_190304_swupgrade \ TENSORFLOW_ROOT=${TENSORFLOW_ROOT} \ @@ -89,10 +89,21 @@ else readable_run make -f ${TENSORFLOW_ROOT}tensorflow/lite/micro/tools/make/Makefile \ TARGET=xtensa \ - TARGET_ARCH=hifi4 \ + TARGET_ARCH=hifi3 \ OPTIMIZED_KERNEL_DIR=xtensa \ XTENSA_CORE=HIFI_190304_swupgrade \ TENSORFLOW_ROOT=${TENSORFLOW_ROOT} \ EXTERNAL_DIR=${EXTERNAL_DIR} \ test -j$(nproc) + + # run generic benchmark + readable_run make -f ${TENSORFLOW_ROOT}tensorflow/lite/micro/tools/make/Makefile \ + TARGET=xtensa \ + TARGET_ARCH=hifi3 \ + OPTIMIZED_KERNEL_DIR=xtensa \ + XTENSA_CORE=HIFI_190304_swupgrade \ + TENSORFLOW_ROOT=${TENSORFLOW_ROOT} \ + EXTERNAL_DIR=${EXTERNAL_DIR} \ + GENERIC_BENCHMARK_MODEL_PATH=${TENSORFLOW_ROOT}tensorflow/lite/micro/models/person_detect.tflite \ + run_tflm_benchmark -j$(nproc) fi diff --git a/tensorflow/lite/micro/tools/ci_build/test_xtensa_hifi5.sh b/tensorflow/lite/micro/tools/ci_build/test_xtensa_hifi5.sh index 82a04a91735..0ad29e8db23 100755 --- a/tensorflow/lite/micro/tools/ci_build/test_xtensa_hifi5.sh +++ b/tensorflow/lite/micro/tools/ci_build/test_xtensa_hifi5.sh @@ -47,3 +47,14 @@ readable_run make -f ${TENSORFLOW_ROOT}tensorflow/lite/micro/tools/make/Makefile TENSORFLOW_ROOT=${TENSORFLOW_ROOT} \ EXTERNAL_DIR=${EXTERNAL_DIR} \ test -j$(nproc) + +# run generic benchmark +readable_run make -f ${TENSORFLOW_ROOT}tensorflow/lite/micro/tools/make/Makefile \ + TARGET=xtensa \ + TARGET_ARCH=hifi5 \ + OPTIMIZED_KERNEL_DIR=xtensa \ + XTENSA_CORE=PRD_H5_RDO_07_01_2022 \ + TENSORFLOW_ROOT=${TENSORFLOW_ROOT} \ + EXTERNAL_DIR=${EXTERNAL_DIR} \ + GENERIC_BENCHMARK_MODEL_PATH=${TENSORFLOW_ROOT}tensorflow/lite/micro/models/person_detect.tflite \ + run_tflm_benchmark -j$(nproc) \ No newline at end of file diff --git a/tensorflow/lite/micro/tools/ci_build/test_xtensa_vision_p6.sh b/tensorflow/lite/micro/tools/ci_build/test_xtensa_vision_p6.sh index a2744b5addf..1c6de9338e4 100755 --- a/tensorflow/lite/micro/tools/ci_build/test_xtensa_vision_p6.sh +++ b/tensorflow/lite/micro/tools/ci_build/test_xtensa_vision_p6.sh @@ -54,4 +54,15 @@ if [[ ${1} == "RUN_TESTS" ]]; then TENSORFLOW_ROOT=${TENSORFLOW_ROOT} \ EXTERNAL_DIR=${EXTERNAL_DIR} \ test -j$(nproc) + + # run generic benchmark + readable_run make -f ${TENSORFLOW_ROOT}tensorflow/lite/micro/tools/make/Makefile \ + TARGET=xtensa \ + TARGET_ARCH=vision_p6 \ + OPTIMIZED_KERNEL_DIR=xtensa \ + XTENSA_CORE=P6_200528 \ + TENSORFLOW_ROOT=${TENSORFLOW_ROOT} \ + EXTERNAL_DIR=${EXTERNAL_DIR} \ + GENERIC_BENCHMARK_MODEL_PATH=${TENSORFLOW_ROOT}tensorflow/lite/micro/models/person_detect.tflite \ + run_tflm_benchmark -j$(nproc) fi diff --git a/tensorflow/lite/micro/tools/gen_micro_mutable_op_resolver/BUILD b/tensorflow/lite/micro/tools/gen_micro_mutable_op_resolver/BUILD index 276e9c2bbc7..e59093901e4 100644 --- a/tensorflow/lite/micro/tools/gen_micro_mutable_op_resolver/BUILD +++ b/tensorflow/lite/micro/tools/gen_micro_mutable_op_resolver/BUILD @@ -1,3 +1,4 @@ +load("@rules_python//python:defs.bzl", "py_binary") load("@tflm_pip_deps//:requirements.bzl", "requirement") package( @@ -18,7 +19,6 @@ py_binary( deps = [ "@absl_py//absl:app", "@absl_py//absl/flags", - requirement("tensorflow-cpu"), requirement("mako"), "//tensorflow/lite/python:schema_py", "//tensorflow/lite/python:schema_util", @@ -42,7 +42,7 @@ py_binary( deps = [ "@absl_py//absl:app", "@absl_py//absl/flags", - requirement("tensorflow-cpu"), + requirement("tensorflow"), requirement("mako"), "//tensorflow/lite/micro/tools:generate_test_for_model", "//tensorflow/lite/python:schema_py", diff --git a/tensorflow/lite/micro/tools/gen_micro_mutable_op_resolver/README.md b/tensorflow/lite/micro/tools/gen_micro_mutable_op_resolver/README.md index 95a0c43a517..1837995ba37 100644 --- a/tensorflow/lite/micro/tools/gen_micro_mutable_op_resolver/README.md +++ b/tensorflow/lite/micro/tools/gen_micro_mutable_op_resolver/README.md @@ -4,6 +4,8 @@ The MicroMutableOpResolver includes the operators explictly specified in source This generally requires manually finding out which operators are used in the model through the use of a visualization tool, which may be impractical in some cases. This script will automatically generate a MicroMutableOpResolver with only the used operators for a given model or set of models. +Note: Check ci/Dockerfile.micro for supported python version. + ## How to run bazel run tensorflow/lite/micro/tools/gen_micro_mutable_op_resolver:generate_micro_mutable_op_resolver_from_model -- \ diff --git a/tensorflow/lite/micro/tools/gen_micro_mutable_op_resolver/generate_micro_mutable_op_resolver_from_model.py b/tensorflow/lite/micro/tools/gen_micro_mutable_op_resolver/generate_micro_mutable_op_resolver_from_model.py index de583dae78c..d7e6140bf8f 100644 --- a/tensorflow/lite/micro/tools/gen_micro_mutable_op_resolver/generate_micro_mutable_op_resolver_from_model.py +++ b/tensorflow/lite/micro/tools/gen_micro_mutable_op_resolver/generate_micro_mutable_op_resolver_from_model.py @@ -23,7 +23,7 @@ from absl import flags from mako import template -from tflite_micro.tensorflow.lite.tools import visualize as visualize +from tensorflow.lite.tools import visualize TEMPLATE_DIR = os.path.join(os.path.dirname(__file__), 'templates') TEMPLATE_DIR = os.path.abspath(TEMPLATE_DIR) @@ -69,8 +69,9 @@ def ParseString(word): else: formated_op_string += part.upper() - # Edge case for AddUnidirectionalSequenceLSTM(). + # Edge cases formated_op_string = formated_op_string.replace('Lstm', 'LSTM') + formated_op_string = formated_op_string.replace('BatchMatmul', 'BatchMatMul') return 'Add' + formated_op_string diff --git a/tensorflow/lite/micro/tools/generate_cc_arrays.py b/tensorflow/lite/micro/tools/generate_cc_arrays.py index 4d1e54cf67d..16d72c12016 100644 --- a/tensorflow/lite/micro/tools/generate_cc_arrays.py +++ b/tensorflow/lite/micro/tools/generate_cc_arrays.py @@ -35,8 +35,6 @@ def generate_file(out_fname, array_name, array_type, array_contents, size): out_cc_file.write('#include \n\n') out_cc_file.write('#include "{}"\n\n'.format( out_fname.split('genfiles/')[-1].replace('.cc', '.h'))) - out_cc_file.write('const unsigned int {}_size = {};\n'.format( - array_name, str(size))) out_cc_file.write('alignas(16) const {} {}[] = {{'.format( array_type, array_name)) out_cc_file.write(array_contents) @@ -45,8 +43,8 @@ def generate_file(out_fname, array_name, array_type, array_contents, size): elif out_fname.endswith('.h'): out_hdr_file = open(out_fname, 'w') out_hdr_file.write('#include \n\n') - out_hdr_file.write( - 'extern const unsigned int {}_size;\n'.format(array_name)) + out_hdr_file.write('constexpr unsigned int {}_size = {};\n'.format( + array_name, str(size))) out_hdr_file.write('extern const {} {}[];\n'.format( array_type, array_name)) out_hdr_file.close() diff --git a/tensorflow/lite/micro/tools/layer_by_layer.cc b/tensorflow/lite/micro/tools/layer_by_layer.cc new file mode 100644 index 00000000000..91d325e51c7 --- /dev/null +++ b/tensorflow/lite/micro/tools/layer_by_layer.cc @@ -0,0 +1,332 @@ +/* Copyright 2023 The TensorFlow Authors. All Rights Reserved. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +==============================================================================*/ + +#include +#include +#include + +#include +#include +#include +#include +#include +#include +#include +#include + +#include "flatbuffers/flatbuffer_builder.h" +#include "flatbuffers/util.h" +#include "tensorflow/lite/c/c_api_types.h" +#include "tensorflow/lite/c/common.h" +#include "tensorflow/lite/kernels/op_macros.h" +#include "tensorflow/lite/micro/kernels/kernel_util.h" +#include "tensorflow/lite/micro/micro_allocator.h" +#include "tensorflow/lite/micro/micro_context.h" +#include "tensorflow/lite/micro/micro_interpreter.h" +#include "tensorflow/lite/micro/micro_log.h" +#include "tensorflow/lite/micro/micro_mutable_op_resolver.h" +#include "tensorflow/lite/micro/micro_resource_variable.h" +#include "tensorflow/lite/micro/micro_utils.h" +#include "tensorflow/lite/micro/tools/benchmarking/op_resolver.h" +#include "tensorflow/lite/micro/tools/layer_by_layer_schema_generated.h" +#include "tensorflow/lite/schema/schema_generated.h" + +namespace tflite { + +namespace { + +// Seed used for the random input. Input data shouldn't affect invocation timing +// so randomness isn't really needed. +constexpr uint32_t kRandomSeed = 0xFB; + +constexpr size_t kTensorArenaSize = 3e6; +constexpr int kNumResourceVariable = 100; + +bool SaveFile(const char* name, const char* buf, size_t len) { + std::ofstream ofs(name, std::ofstream::binary); + if (!ofs.is_open()) return false; + ofs.write(buf, len); + return !ofs.bad(); +} + +TfLiteStatus ConvertTensorType(TfLiteType type, TensorTypes& tensor_type) { + switch (type) { + case kTfLiteFloat16: + tensor_type = TensorTypes_FLOAT16; + return kTfLiteOk; + case kTfLiteBFloat16: + tensor_type = TensorTypes_BFLOAT16; + return kTfLiteOk; + case kTfLiteFloat32: + tensor_type = TensorTypes_FLOAT32; + return kTfLiteOk; + case kTfLiteFloat64: + tensor_type = TensorTypes_FLOAT64; + return kTfLiteOk; + case kTfLiteInt16: + tensor_type = TensorTypes_INT16; + return kTfLiteOk; + case kTfLiteUInt16: + tensor_type = TensorTypes_UINT16; + return kTfLiteOk; + case kTfLiteInt32: + tensor_type = TensorTypes_INT32; + return kTfLiteOk; + case kTfLiteUInt32: + tensor_type = TensorTypes_UINT32; + return kTfLiteOk; + case kTfLiteUInt8: + tensor_type = TensorTypes_UINT8; + return kTfLiteOk; + case kTfLiteInt8: + tensor_type = TensorTypes_INT8; + return kTfLiteOk; + case kTfLiteInt64: + tensor_type = TensorTypes_INT64; + return kTfLiteOk; + case kTfLiteUInt64: + tensor_type = TensorTypes_UINT64; + return kTfLiteOk; + case kTfLiteString: + tensor_type = TensorTypes_STRING; + return kTfLiteOk; + case kTfLiteBool: + tensor_type = TensorTypes_BOOL; + return kTfLiteOk; + case kTfLiteComplex64: + tensor_type = TensorTypes_COMPLEX64; + return kTfLiteOk; + case kTfLiteComplex128: + tensor_type = TensorTypes_COMPLEX128; + return kTfLiteOk; + case kTfLiteResource: + tensor_type = TensorTypes_RESOURCE; + return kTfLiteOk; + case kTfLiteVariant: + tensor_type = TensorTypes_VARIANT; + return kTfLiteOk; + case kTfLiteInt4: + tensor_type = TensorTypes_INT4; + return kTfLiteOk; + case kTfLiteNoType: + MicroPrintf("Unsupported data type %d in tensor\n", tensor_type); + return kTfLiteError; + } + return kTfLiteOk; +} + +TfLiteStatus SetRandomInput(const uint32_t random_seed, + const ModelT& unpacked_model, + MicroInterpreter& interpreter, + ModelTestDataT& output_data) { + std::mt19937 eng(random_seed); + std::uniform_int_distribution dist(0, 255); + for (size_t i = 0; i < interpreter.inputs_size(); ++i) { + TfLiteTensor* input = interpreter.input_tensor(i); + std::unique_ptr test_data(new TensorDataT()); + test_data->input_index = i; + test_data->layer_number = -1; + test_data->tensor_index = -1; + test_data->num_bytes = input->bytes; + // make this share tensortype with tflite schema later + TF_LITE_ENSURE_STATUS(ConvertTensorType(input->type, test_data->dtype)); + for (int x = 0; x < input->dims->size; ++x) { + test_data->shape.push_back(input->dims->data[x]); + } + + // Pre-populate input tensor with random values. + uint8_t* input_values = GetTensorData(input); + for (size_t j = 0; j < input->bytes; ++j) { + input_values[j] = dist(eng); + test_data->data.push_back(input_values[j]); + } + output_data.input_data.push_back(std::move(test_data)); + } + + // Get tensor indices for all model input_tensors + for (size_t i = 0; i < unpacked_model.subgraphs[0]->inputs.size(); ++i) { + output_data.input_data[i]->tensor_index = + unpacked_model.subgraphs[0]->inputs[i]; + } + return kTfLiteOk; +} + +std::unique_ptr ReadModelFile(const char* model_file_name) { + std::ifstream model_file(model_file_name, std::ios::binary); + if (!model_file.is_open()) { + MicroPrintf("could not open model file \n "); + return nullptr; + } + + model_file.seekg(0, std::ios::end); + size_t num_bytes = model_file.tellg(); + model_file.seekg(0, std::ios::beg); + std::unique_ptr model_data(new char[num_bytes]); + model_file.read(model_data.get(), num_bytes); + + return model_data; +} + +// Stores the Intermediate Tensor data for each layer into the unpacked +// ModelTestDataT class which is packed into the flatbuffer ! +TfLiteStatus StoreLayerByLayerData(MicroInterpreter& interpreter, + const ModelT& tflite_model, + ModelTestDataT& output_data) { + for (size_t i = 0; i < tflite_model.subgraphs.size(); ++i) { + std::unique_ptr subgraph_data(new SubgraphDataT()); + subgraph_data->subgraph_index = i; + + for (size_t j = 0; j < tflite_model.subgraphs[i]->operators.size(); ++j) { + for (size_t k = 0; + k < tflite_model.subgraphs[i]->operators[j]->outputs.size(); ++k) { + subgraph_data->outputs.emplace_back(new TensorDataT()); + std::unique_ptr& tensor_data = + subgraph_data->outputs.back(); + + // input_index + tensor_data->input_index = -1; + + // tensor index + tensor_data->tensor_index = + tflite_model.subgraphs[i]->operators[j]->outputs[k]; + + TfLiteEvalTensor* layer_output_tensor = + interpreter.GetTensor(subgraph_data->outputs.back()->tensor_index, + subgraph_data->subgraph_index); + + // dims + tensor_data->shape.assign( + layer_output_tensor->dims->data, + layer_output_tensor->dims->data + layer_output_tensor->dims->size); + + // dtype + TF_LITE_ENSURE_STATUS( + ConvertTensorType(layer_output_tensor->type, tensor_data->dtype)); + // num_bytes + tensor_data->num_bytes = EvalTensorBytes(layer_output_tensor); + + uint8_t* tensor_values = + micro::GetTensorData(layer_output_tensor); + + // data + tensor_data->data.assign( + tensor_values, + tensor_values + EvalTensorBytes(layer_output_tensor)); + + // layer_number + tensor_data->layer_number = j; + } + } + output_data.subgraph_data.push_back(std::move(subgraph_data)); + } + + return kTfLiteOk; +} + +bool WriteToFile(const char* output_file_name, ModelTestDataT& output_data) { + flatbuffers::DefaultAllocator allocator; + flatbuffers::FlatBufferBuilder fbb{2048, &allocator}; + auto new_model = ModelTestData::Pack(fbb, &output_data); + fbb.Finish(new_model); + return SaveFile(output_file_name, + reinterpret_cast(fbb.GetBufferPointer()), + fbb.GetSize()); +} + +TfLiteStatus Invoke(const Model* model, ModelTestDataT& output_data) { + const tflite::ModelT unpacked_model = *model->UnPack(); + alignas(16) static uint8_t tensor_arena[kTensorArenaSize]; + + TflmOpResolver op_resolver; + TF_LITE_ENSURE_STATUS(CreateOpResolver(op_resolver)); + + MicroAllocator* allocator = MicroAllocator::Create( + tensor_arena, kTensorArenaSize, MemoryPlannerType::kLinear); + + MicroInterpreter interpreter( + model, op_resolver, allocator, + MicroResourceVariables::Create(allocator, kNumResourceVariable), nullptr); + TF_LITE_ENSURE_STATUS(interpreter.AllocateTensors()); + + TF_LITE_ASSERT(interpreter.preserve_all_tensors()); + + MicroPrintf(""); // null MicroPrintf serves as a newline. + + // For streaming models, the interpreter will return kTfLiteAbort if the model + // does not yet have enough data to make an inference. As such, we need to + // invoke the interpreter multiple times until we either receive an error or + // kTfLiteOk. This loop also works for non-streaming models, as they'll just + // return kTfLiteOk after the first invocation. + uint32_t seed = kRandomSeed; + while (true) { + TF_LITE_ENSURE_STATUS( + SetRandomInput(seed++, unpacked_model, interpreter, output_data)); + TfLiteStatus status = interpreter.Invoke(); + if ((status != kTfLiteOk) && (static_cast(status) != kTfLiteAbort)) { + MicroPrintf("Model interpreter invocation failed: %d\n", status); + return kTfLiteError; + } + + if (status == kTfLiteOk) { + break; + } + } + TF_LITE_ENSURE_STATUS( + StoreLayerByLayerData(interpreter, unpacked_model, output_data)); + + return kTfLiteOk; +} +} // namespace +} // namespace tflite + +/* Usage information: + This binary will write a debugging flatbuffer to the path provide in 2nd arg + using the tflite_model provided in the 1st arg : + `bazel run tensorflow/lite/micro/tools:layer_by_layer_output_tool -- \ + + ` */ + +int main(int argc, char** argv) { + if (argc < 2) { + MicroPrintf("layer_by_layer: invalid usage!\n"); + MicroPrintf( + "usage: layer_by_layer_output_tool " + ""); + return EXIT_FAILURE; + } + + const char* model_file_name = argv[1]; + const char* output_file_name = argv[2]; + + const auto model_file_content = tflite::ReadModelFile(model_file_name); + + if (!model_file_content) { + MicroPrintf("Could not read model from file: %s", model_file_name); + return EXIT_FAILURE; + } + + const tflite::Model* model = tflite::GetModel(model_file_content.get()); + + ModelTestDataT output_data; + + TF_LITE_ENSURE_STATUS(tflite::Invoke(model, output_data)); + + if (!tflite::WriteToFile(output_file_name, output_data)) { + MicroPrintf("Could not write to %s", output_file_name); + return EXIT_FAILURE; + } + + return EXIT_SUCCESS; +} diff --git a/tensorflow/lite/micro/tools/layer_by_layer_debugger.py b/tensorflow/lite/micro/tools/layer_by_layer_debugger.py new file mode 100644 index 00000000000..e41e55132cc --- /dev/null +++ b/tensorflow/lite/micro/tools/layer_by_layer_debugger.py @@ -0,0 +1,270 @@ +# Copyright 2023 The TensorFlow Authors. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# ============================================================================== +"""Runs TFLM specific transformations to reduce model size on a .tflite model.""" + +import sys +import unittest + +from absl import app +from absl import flags +from absl import logging +import numpy as np +import tensorflow as tf + +from tflite_micro.tensorflow.lite.tools import flatbuffer_utils +from tensorflow.python.platform import gfile +from tflite_micro.python.tflite_micro import runtime +from tflite_micro.tensorflow.lite.micro.tools import layer_by_layer_schema_py_generated as layer_schema_fb +from tflite_micro.tensorflow.lite.micro.tools import model_transforms_utils + +np.set_printoptions(threshold=sys.maxsize) + +# Usage information: +# This Python Tool/Script can first be used to compare TFLM vs Tflite outputs for +# random Inputs by only providing a TfLite file + +# TfLite vs TFLM command: +# `bazel run tensorflow/lite/micro/tools:layer_by_layer_debugger -- \ +# --input_tflite_file=` + +# This Python Tool/Script can also be used to comapre TFLM vs Expected +# Output/Flatbuffer provided by the C++ Tool/binary. + +# TFLM vs Expected Command: +# `bazel run tensorflow/lite/micro/tools:layer_by_layer_debugger -- \ +# --input_tflite_file= \ +# --dbg_file=` + +# Optional Flags: +# --print_dump +# when this flags is set it will dump a part of the TFLM and Ouput +# it's compared against for each layer + +# --rng +# integer flag that only works in TfLite vs TFLM comparison(when only +# a TfLite Model is Provided).It can be used to set the rng seed to a +# differen value then it's default value of 42. + +_INPUT_TFLITE_FILE = flags.DEFINE_string( + "input_tflite_file", + None, + "Full path name to the input TFLite file.", + required=True) + +_RNG = flags.DEFINE_integer( + "rng", + 42, + "This flag defines rng seed used to generate random test data for the" + " provided model. This only occurs when no input/golden data are provided." + " It is defaulted to 42. ", +) + +_DEBUG_FILE = flags.DEFINE_string( + "layer_by_layer_data_file", + None, + "Full path to the debug file , generated in C++", + required=False, +) + +_PRINT_PREVIEW = flags.DEFINE_bool( + "print_dump", + False, + "When this flag is set to True, it prints a preview of elements of the TFLM" + " output and output it's being compared with.", + required=False, +) + + +def numpy_from_tensor_type(tensor_type_idx): + """Gives the equivalent numpy dtype based on TensorType class (schema) number.""" + tensor_type_idx_to_numpy = { + layer_schema_fb.TensorTypes.FLOAT32: + np.float32, + layer_schema_fb.TensorTypes.FLOAT16: + np.float16, + layer_schema_fb.TensorTypes.INT32: + np.int32, + layer_schema_fb.TensorTypes.UINT8: + np.uint8, + layer_schema_fb.TensorTypes.INT64: + np.int64, + layer_schema_fb.TensorTypes.STRING: + np.string_, + layer_schema_fb.TensorTypes.BOOL: + np.bool_, + layer_schema_fb.TensorTypes.INT16: + np.int16, + layer_schema_fb.TensorTypes.COMPLEX64: + np.complex64, + layer_schema_fb.TensorTypes.INT8: + np.int8, + layer_schema_fb.TensorTypes.FLOAT64: + np.float64, + layer_schema_fb.TensorTypes.COMPLEX128: + np.complex128, + layer_schema_fb.TensorTypes.UINT64: + np.uint64, + layer_schema_fb.TensorTypes.RESOURCE: + "RESORCE", + layer_schema_fb.TensorTypes.VARIANT: + "VARIANT", + layer_schema_fb.TensorTypes.UINT32: + np.uint32, + layer_schema_fb.TensorTypes.UINT16: + np.uint16, + # INT4 is mapped to INT8, b/246806634 + layer_schema_fb.TensorTypes.INT4: + np.int8, + } + return tensor_type_idx_to_numpy.get(tensor_type_idx) + + +def GenerateRandomInputTfLiteComparison(tflm_interpreter, tflite_interpreter, + model, rng_value): + subgraph_info = layer_schema_fb.ModelTestDataT() + subgraph_info.subgraphData = [] + rng_seed = np.random.default_rng(seed=rng_value) + + for subgraph_index, subgraph in enumerate(model.subgraphs): + subgraph_data = layer_schema_fb.SubgraphDataT() + subgraph_data.subgraphIndex = subgraph_index + subgraph_data.outputs = [] + + for op_index, operator in enumerate(subgraph.operators): + for output in operator.outputs: + tensor_data = layer_schema_fb.TensorDataT() + tensor_data.layerNumber = op_index + tensor_data.tensorIndex = output + subgraph_data.outputs.append(tensor_data) + subgraph_info.subgraphData.append(subgraph_data) + + for index, input_tensor_index in enumerate(model.subgraphs[0].inputs): + input_tensor = model.subgraphs[0].tensors[input_tensor_index] + random_data = model_transforms_utils.generate_random_input_data( + model, input_tensor, rng_seed) + tflm_interpreter.set_input(random_data, index) + tflite_interpreter.set_tensor(input_tensor_index, random_data) + return subgraph_info, tflm_interpreter, tflite_interpreter + + +def ReadDebugFile(): + with gfile.GFile(_DEBUG_FILE.value, "rb") as debug_file_handle: + debug_bytearray = bytearray(debug_file_handle.read()) + flatbuffer_root_object = layer_schema_fb.ModelTestData.GetRootAs( + debug_bytearray, 0) + debug_obj = layer_schema_fb.ModelTestDataT.InitFromObj( + flatbuffer_root_object) + return debug_obj + + +def SetDebugFileInterpreterInput(tflm_interpreter, tflite_interpreter, + debug_obj): + for inputs in debug_obj.inputData: + input_array = np.frombuffer(bytearray(inputs.data), + dtype=numpy_from_tensor_type(inputs.dtype)) + input_array = np.reshape(input_array, inputs.shape) + tflm_interpreter.set_input(input_array, inputs.inputIndex) + tflite_interpreter.set_tensor(inputs.tensorIndex, input_array) + + return tflm_interpreter, tflite_interpreter + + +def main(_) -> None: + logging.info( + "\n--Running TFLM vs TfLite layer by layer debugger on: %s", + _INPUT_TFLITE_FILE.value, + ) + + model = flatbuffer_utils.read_model(_INPUT_TFLITE_FILE.value) + + tflm_interpreter = runtime.Interpreter.from_file( + _INPUT_TFLITE_FILE.value, + intrepreter_config=runtime.InterpreterConfig.kPreserveAllTensors, + ) + + tflite_interpreter = tf.lite.Interpreter( + model_path=_INPUT_TFLITE_FILE.value, + experimental_preserve_all_tensors=True, + ) + + tflite_interpreter.allocate_tensors() + + debug_obj = None + + # Setting Inputs either randomly or using provided Debug File + if _DEBUG_FILE.value == None: + debug_obj, tflm_interpreter, tflite_interpreter = ( + GenerateRandomInputTfLiteComparison(tflm_interpreter, + tflite_interpreter, model, + _RNG.value)) + tflite_interpreter.invoke() + else: + debug_obj = ReadDebugFile() + tflm_interpreter, tflite_interpreter = SetDebugFileInterpreterInput( + tflm_interpreter, tflite_interpreter, debug_obj) + + tflm_interpreter.invoke() + comparison = "" + + for subgraph in debug_obj.subgraphData: + for output in subgraph.outputs: + tflm_ouput = tflm_interpreter.GetTensor( + output.tensorIndex, subgraph.subgraphIndex)["tensor_data"] + + comparison_ouput = None + + if _DEBUG_FILE.value == None: + tflite_output = tflite_interpreter.get_tensor(output.tensorIndex, + subgraph.subgraphIndex) + comparison_ouput = tflite_output + comparison = "TfLite" + else: + expected_output_data = np.frombuffer(bytearray(output.data), + dtype=numpy_from_tensor_type( + output.dtype)) + expected_output_data = np.reshape(expected_output_data, output.shape) + comparison = "Expected Golden Data" + comparison_ouput = expected_output_data + + error_message = ( + "\n\nTFLM output does not match {comparison} output.\n Subgraph" + " number is {subgraph_index} \n Layer number is {layer_number} \n The" + " Tensor Index where this output does not match is {tensor_index}" + " \n\n\n".format( + comparison=comparison, + subgraph_index=subgraph.subgraphIndex, + layer_number=output.layerNumber, + tensor_index=output.tensorIndex, + )) + if _PRINT_PREVIEW.value: + print("layer number ", output.layerNumber) + print("tensor index ", output.tensorIndex, "\n\n") + print("TFLM output \n ", tflm_ouput[:10]) + print( + "{comparison} output \n".format(comparison=comparison), + comparison_ouput[:_PRINT_PREVIEW.value], + ) + print("--------------\n\n\n") + np.testing.assert_array_equal(tflm_ouput, + comparison_ouput, + err_msg=error_message, + verbose=True) + print( + "\n\nTFLM output matched {comparison} output for all Layers in the Model." + .format(comparison=comparison)) + + +if __name__ == "__main__": + app.run(main) diff --git a/tensorflow/lite/micro/tools/layer_by_layer_schema.fbs b/tensorflow/lite/micro/tools/layer_by_layer_schema.fbs new file mode 100644 index 00000000000..4183c9cf52c --- /dev/null +++ b/tensorflow/lite/micro/tools/layer_by_layer_schema.fbs @@ -0,0 +1,62 @@ +// Copyright 2023 The TensorFlow Authors. All Rights Reserved. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +enum TensorTypes : byte { + FLOAT32 = 0, + FLOAT16 = 1, + INT32 = 2, + UINT8 = 3, + INT64 = 4, + STRING = 5, + BOOL = 6, + INT16 = 7, + COMPLEX64 = 8, + INT8 = 9, + FLOAT64 = 10, + COMPLEX128 = 11, + UINT64 = 12, + // Experimental: Resource and variant types are experimental, that are subject + // to change. Do not implement custom kernels using resource & variant types + // now. + RESOURCE = 13, + VARIANT = 14, + UINT32 = 15, + UINT16 = 16, + INT4 = 17, + BFLOAT16 = 18, +} + +table TensorData { + //input_index will be set to -1 for non-input_tensors + input_index:uint; + tensor_index: uint; + shape:[int]; + dtype:TensorTypes; + data:[ubyte]; + num_bytes:uint; + //layer_number will be set to -1 for input_tensors + layer_number:uint; +} + +table SubgraphData { + outputs:[TensorData]; + subgraph_index:uint; +} + +table ModelTestData { + input_data:[TensorData]; + subgraph_data:[SubgraphData]; +} + +root_type ModelTestData; diff --git a/tensorflow/lite/micro/tools/layer_by_layer_schema_generated.h b/tensorflow/lite/micro/tools/layer_by_layer_schema_generated.h new file mode 100644 index 00000000000..67a2caa7850 --- /dev/null +++ b/tensorflow/lite/micro/tools/layer_by_layer_schema_generated.h @@ -0,0 +1,597 @@ +/* Copyright 2023 The TensorFlow Authors. All Rights Reserved. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +==============================================================================*/ +// automatically generated by the FlatBuffers compiler, do not modify + + +#ifndef FLATBUFFERS_GENERATED_LAYERBYLAYERSCHEMA_H_ +#define FLATBUFFERS_GENERATED_LAYERBYLAYERSCHEMA_H_ + +#include "flatbuffers/flatbuffers.h" + +// Ensure the included flatbuffers.h is the same version as when this file was +// generated, otherwise it may not be compatible. +static_assert(FLATBUFFERS_VERSION_MAJOR == 23 && + FLATBUFFERS_VERSION_MINOR == 5 && + FLATBUFFERS_VERSION_REVISION == 26, + "Non-compatible flatbuffers version included"); + +struct TensorData; +struct TensorDataBuilder; +struct TensorDataT; + +struct SubgraphData; +struct SubgraphDataBuilder; +struct SubgraphDataT; + +struct ModelTestData; +struct ModelTestDataBuilder; +struct ModelTestDataT; + +enum TensorTypes : int8_t { + TensorTypes_FLOAT32 = 0, + TensorTypes_FLOAT16 = 1, + TensorTypes_INT32 = 2, + TensorTypes_UINT8 = 3, + TensorTypes_INT64 = 4, + TensorTypes_STRING = 5, + TensorTypes_BOOL = 6, + TensorTypes_INT16 = 7, + TensorTypes_COMPLEX64 = 8, + TensorTypes_INT8 = 9, + TensorTypes_FLOAT64 = 10, + TensorTypes_COMPLEX128 = 11, + TensorTypes_UINT64 = 12, + TensorTypes_RESOURCE = 13, + TensorTypes_VARIANT = 14, + TensorTypes_UINT32 = 15, + TensorTypes_UINT16 = 16, + TensorTypes_INT4 = 17, + TensorTypes_BFLOAT16 = 18, + TensorTypes_MIN = TensorTypes_FLOAT32, + TensorTypes_MAX = TensorTypes_BFLOAT16 +}; + +inline const TensorTypes (&EnumValuesTensorTypes())[19] { + static const TensorTypes values[] = { + TensorTypes_FLOAT32, + TensorTypes_FLOAT16, + TensorTypes_INT32, + TensorTypes_UINT8, + TensorTypes_INT64, + TensorTypes_STRING, + TensorTypes_BOOL, + TensorTypes_INT16, + TensorTypes_COMPLEX64, + TensorTypes_INT8, + TensorTypes_FLOAT64, + TensorTypes_COMPLEX128, + TensorTypes_UINT64, + TensorTypes_RESOURCE, + TensorTypes_VARIANT, + TensorTypes_UINT32, + TensorTypes_UINT16, + TensorTypes_INT4, + TensorTypes_BFLOAT16 + }; + return values; +} + +inline const char * const *EnumNamesTensorTypes() { + static const char * const names[20] = { + "FLOAT32", + "FLOAT16", + "INT32", + "UINT8", + "INT64", + "STRING", + "BOOL", + "INT16", + "COMPLEX64", + "INT8", + "FLOAT64", + "COMPLEX128", + "UINT64", + "RESOURCE", + "VARIANT", + "UINT32", + "UINT16", + "INT4", + "BFLOAT16", + nullptr + }; + return names; +} + +inline const char *EnumNameTensorTypes(TensorTypes e) { + if (::flatbuffers::IsOutRange(e, TensorTypes_FLOAT32, TensorTypes_BFLOAT16)) return ""; + const size_t index = static_cast(e); + return EnumNamesTensorTypes()[index]; +} + +struct TensorDataT : public ::flatbuffers::NativeTable { + typedef TensorData TableType; + uint32_t input_index = 0; + uint32_t tensor_index = 0; + std::vector shape{}; + TensorTypes dtype = TensorTypes_FLOAT32; + std::vector data{}; + uint32_t num_bytes = 0; + uint32_t layer_number = 0; +}; + +struct TensorData FLATBUFFERS_FINAL_CLASS : private ::flatbuffers::Table { + typedef TensorDataT NativeTableType; + typedef TensorDataBuilder Builder; + enum FlatBuffersVTableOffset FLATBUFFERS_VTABLE_UNDERLYING_TYPE { + VT_INPUT_INDEX = 4, + VT_TENSOR_INDEX = 6, + VT_SHAPE = 8, + VT_DTYPE = 10, + VT_DATA = 12, + VT_NUM_BYTES = 14, + VT_LAYER_NUMBER = 16 + }; + uint32_t input_index() const { + return GetField(VT_INPUT_INDEX, 0); + } + uint32_t tensor_index() const { + return GetField(VT_TENSOR_INDEX, 0); + } + const ::flatbuffers::Vector *shape() const { + return GetPointer *>(VT_SHAPE); + } + TensorTypes dtype() const { + return static_cast(GetField(VT_DTYPE, 0)); + } + const ::flatbuffers::Vector *data() const { + return GetPointer *>(VT_DATA); + } + uint32_t num_bytes() const { + return GetField(VT_NUM_BYTES, 0); + } + uint32_t layer_number() const { + return GetField(VT_LAYER_NUMBER, 0); + } + bool Verify(::flatbuffers::Verifier &verifier) const { + return VerifyTableStart(verifier) && + VerifyField(verifier, VT_INPUT_INDEX, 4) && + VerifyField(verifier, VT_TENSOR_INDEX, 4) && + VerifyOffset(verifier, VT_SHAPE) && + verifier.VerifyVector(shape()) && + VerifyField(verifier, VT_DTYPE, 1) && + VerifyOffset(verifier, VT_DATA) && + verifier.VerifyVector(data()) && + VerifyField(verifier, VT_NUM_BYTES, 4) && + VerifyField(verifier, VT_LAYER_NUMBER, 4) && + verifier.EndTable(); + } + TensorDataT *UnPack(const ::flatbuffers::resolver_function_t *_resolver = nullptr) const; + void UnPackTo(TensorDataT *_o, const ::flatbuffers::resolver_function_t *_resolver = nullptr) const; + static ::flatbuffers::Offset Pack(::flatbuffers::FlatBufferBuilder &_fbb, const TensorDataT* _o, const ::flatbuffers::rehasher_function_t *_rehasher = nullptr); +}; + +struct TensorDataBuilder { + typedef TensorData Table; + ::flatbuffers::FlatBufferBuilder &fbb_; + ::flatbuffers::uoffset_t start_; + void add_input_index(uint32_t input_index) { + fbb_.AddElement(TensorData::VT_INPUT_INDEX, input_index, 0); + } + void add_tensor_index(uint32_t tensor_index) { + fbb_.AddElement(TensorData::VT_TENSOR_INDEX, tensor_index, 0); + } + void add_shape(::flatbuffers::Offset<::flatbuffers::Vector> shape) { + fbb_.AddOffset(TensorData::VT_SHAPE, shape); + } + void add_dtype(TensorTypes dtype) { + fbb_.AddElement(TensorData::VT_DTYPE, static_cast(dtype), 0); + } + void add_data(::flatbuffers::Offset<::flatbuffers::Vector> data) { + fbb_.AddOffset(TensorData::VT_DATA, data); + } + void add_num_bytes(uint32_t num_bytes) { + fbb_.AddElement(TensorData::VT_NUM_BYTES, num_bytes, 0); + } + void add_layer_number(uint32_t layer_number) { + fbb_.AddElement(TensorData::VT_LAYER_NUMBER, layer_number, 0); + } + explicit TensorDataBuilder(::flatbuffers::FlatBufferBuilder &_fbb) + : fbb_(_fbb) { + start_ = fbb_.StartTable(); + } + ::flatbuffers::Offset Finish() { + const auto end = fbb_.EndTable(start_); + auto o = ::flatbuffers::Offset(end); + return o; + } +}; + +inline ::flatbuffers::Offset CreateTensorData( + ::flatbuffers::FlatBufferBuilder &_fbb, + uint32_t input_index = 0, + uint32_t tensor_index = 0, + ::flatbuffers::Offset<::flatbuffers::Vector> shape = 0, + TensorTypes dtype = TensorTypes_FLOAT32, + ::flatbuffers::Offset<::flatbuffers::Vector> data = 0, + uint32_t num_bytes = 0, + uint32_t layer_number = 0) { + TensorDataBuilder builder_(_fbb); + builder_.add_layer_number(layer_number); + builder_.add_num_bytes(num_bytes); + builder_.add_data(data); + builder_.add_shape(shape); + builder_.add_tensor_index(tensor_index); + builder_.add_input_index(input_index); + builder_.add_dtype(dtype); + return builder_.Finish(); +} + +inline ::flatbuffers::Offset CreateTensorDataDirect( + ::flatbuffers::FlatBufferBuilder &_fbb, + uint32_t input_index = 0, + uint32_t tensor_index = 0, + const std::vector *shape = nullptr, + TensorTypes dtype = TensorTypes_FLOAT32, + const std::vector *data = nullptr, + uint32_t num_bytes = 0, + uint32_t layer_number = 0) { + auto shape__ = shape ? _fbb.CreateVector(*shape) : 0; + auto data__ = data ? _fbb.CreateVector(*data) : 0; + return CreateTensorData( + _fbb, + input_index, + tensor_index, + shape__, + dtype, + data__, + num_bytes, + layer_number); +} + +::flatbuffers::Offset CreateTensorData(::flatbuffers::FlatBufferBuilder &_fbb, const TensorDataT *_o, const ::flatbuffers::rehasher_function_t *_rehasher = nullptr); + +struct SubgraphDataT : public ::flatbuffers::NativeTable { + typedef SubgraphData TableType; + std::vector> outputs{}; + uint32_t subgraph_index = 0; + SubgraphDataT() = default; + SubgraphDataT(const SubgraphDataT &o); + SubgraphDataT(SubgraphDataT&&) FLATBUFFERS_NOEXCEPT = default; + SubgraphDataT &operator=(SubgraphDataT o) FLATBUFFERS_NOEXCEPT; +}; + +struct SubgraphData FLATBUFFERS_FINAL_CLASS : private ::flatbuffers::Table { + typedef SubgraphDataT NativeTableType; + typedef SubgraphDataBuilder Builder; + enum FlatBuffersVTableOffset FLATBUFFERS_VTABLE_UNDERLYING_TYPE { + VT_OUTPUTS = 4, + VT_SUBGRAPH_INDEX = 6 + }; + const ::flatbuffers::Vector<::flatbuffers::Offset> *outputs() const { + return GetPointer> *>(VT_OUTPUTS); + } + uint32_t subgraph_index() const { + return GetField(VT_SUBGRAPH_INDEX, 0); + } + bool Verify(::flatbuffers::Verifier &verifier) const { + return VerifyTableStart(verifier) && + VerifyOffset(verifier, VT_OUTPUTS) && + verifier.VerifyVector(outputs()) && + verifier.VerifyVectorOfTables(outputs()) && + VerifyField(verifier, VT_SUBGRAPH_INDEX, 4) && + verifier.EndTable(); + } + SubgraphDataT *UnPack(const ::flatbuffers::resolver_function_t *_resolver = nullptr) const; + void UnPackTo(SubgraphDataT *_o, const ::flatbuffers::resolver_function_t *_resolver = nullptr) const; + static ::flatbuffers::Offset Pack(::flatbuffers::FlatBufferBuilder &_fbb, const SubgraphDataT* _o, const ::flatbuffers::rehasher_function_t *_rehasher = nullptr); +}; + +struct SubgraphDataBuilder { + typedef SubgraphData Table; + ::flatbuffers::FlatBufferBuilder &fbb_; + ::flatbuffers::uoffset_t start_; + void add_outputs(::flatbuffers::Offset<::flatbuffers::Vector<::flatbuffers::Offset>> outputs) { + fbb_.AddOffset(SubgraphData::VT_OUTPUTS, outputs); + } + void add_subgraph_index(uint32_t subgraph_index) { + fbb_.AddElement(SubgraphData::VT_SUBGRAPH_INDEX, subgraph_index, 0); + } + explicit SubgraphDataBuilder(::flatbuffers::FlatBufferBuilder &_fbb) + : fbb_(_fbb) { + start_ = fbb_.StartTable(); + } + ::flatbuffers::Offset Finish() { + const auto end = fbb_.EndTable(start_); + auto o = ::flatbuffers::Offset(end); + return o; + } +}; + +inline ::flatbuffers::Offset CreateSubgraphData( + ::flatbuffers::FlatBufferBuilder &_fbb, + ::flatbuffers::Offset<::flatbuffers::Vector<::flatbuffers::Offset>> outputs = 0, + uint32_t subgraph_index = 0) { + SubgraphDataBuilder builder_(_fbb); + builder_.add_subgraph_index(subgraph_index); + builder_.add_outputs(outputs); + return builder_.Finish(); +} + +inline ::flatbuffers::Offset CreateSubgraphDataDirect( + ::flatbuffers::FlatBufferBuilder &_fbb, + const std::vector<::flatbuffers::Offset> *outputs = nullptr, + uint32_t subgraph_index = 0) { + auto outputs__ = outputs ? _fbb.CreateVector<::flatbuffers::Offset>(*outputs) : 0; + return CreateSubgraphData( + _fbb, + outputs__, + subgraph_index); +} + +::flatbuffers::Offset CreateSubgraphData(::flatbuffers::FlatBufferBuilder &_fbb, const SubgraphDataT *_o, const ::flatbuffers::rehasher_function_t *_rehasher = nullptr); + +struct ModelTestDataT : public ::flatbuffers::NativeTable { + typedef ModelTestData TableType; + std::vector> input_data{}; + std::vector> subgraph_data{}; + ModelTestDataT() = default; + ModelTestDataT(const ModelTestDataT &o); + ModelTestDataT(ModelTestDataT&&) FLATBUFFERS_NOEXCEPT = default; + ModelTestDataT &operator=(ModelTestDataT o) FLATBUFFERS_NOEXCEPT; +}; + +struct ModelTestData FLATBUFFERS_FINAL_CLASS : private ::flatbuffers::Table { + typedef ModelTestDataT NativeTableType; + typedef ModelTestDataBuilder Builder; + enum FlatBuffersVTableOffset FLATBUFFERS_VTABLE_UNDERLYING_TYPE { + VT_INPUT_DATA = 4, + VT_SUBGRAPH_DATA = 6 + }; + const ::flatbuffers::Vector<::flatbuffers::Offset> *input_data() const { + return GetPointer> *>(VT_INPUT_DATA); + } + const ::flatbuffers::Vector<::flatbuffers::Offset> *subgraph_data() const { + return GetPointer> *>(VT_SUBGRAPH_DATA); + } + bool Verify(::flatbuffers::Verifier &verifier) const { + return VerifyTableStart(verifier) && + VerifyOffset(verifier, VT_INPUT_DATA) && + verifier.VerifyVector(input_data()) && + verifier.VerifyVectorOfTables(input_data()) && + VerifyOffset(verifier, VT_SUBGRAPH_DATA) && + verifier.VerifyVector(subgraph_data()) && + verifier.VerifyVectorOfTables(subgraph_data()) && + verifier.EndTable(); + } + ModelTestDataT *UnPack(const ::flatbuffers::resolver_function_t *_resolver = nullptr) const; + void UnPackTo(ModelTestDataT *_o, const ::flatbuffers::resolver_function_t *_resolver = nullptr) const; + static ::flatbuffers::Offset Pack(::flatbuffers::FlatBufferBuilder &_fbb, const ModelTestDataT* _o, const ::flatbuffers::rehasher_function_t *_rehasher = nullptr); +}; + +struct ModelTestDataBuilder { + typedef ModelTestData Table; + ::flatbuffers::FlatBufferBuilder &fbb_; + ::flatbuffers::uoffset_t start_; + void add_input_data(::flatbuffers::Offset<::flatbuffers::Vector<::flatbuffers::Offset>> input_data) { + fbb_.AddOffset(ModelTestData::VT_INPUT_DATA, input_data); + } + void add_subgraph_data(::flatbuffers::Offset<::flatbuffers::Vector<::flatbuffers::Offset>> subgraph_data) { + fbb_.AddOffset(ModelTestData::VT_SUBGRAPH_DATA, subgraph_data); + } + explicit ModelTestDataBuilder(::flatbuffers::FlatBufferBuilder &_fbb) + : fbb_(_fbb) { + start_ = fbb_.StartTable(); + } + ::flatbuffers::Offset Finish() { + const auto end = fbb_.EndTable(start_); + auto o = ::flatbuffers::Offset(end); + return o; + } +}; + +inline ::flatbuffers::Offset CreateModelTestData( + ::flatbuffers::FlatBufferBuilder &_fbb, + ::flatbuffers::Offset<::flatbuffers::Vector<::flatbuffers::Offset>> input_data = 0, + ::flatbuffers::Offset<::flatbuffers::Vector<::flatbuffers::Offset>> subgraph_data = 0) { + ModelTestDataBuilder builder_(_fbb); + builder_.add_subgraph_data(subgraph_data); + builder_.add_input_data(input_data); + return builder_.Finish(); +} + +inline ::flatbuffers::Offset CreateModelTestDataDirect( + ::flatbuffers::FlatBufferBuilder &_fbb, + const std::vector<::flatbuffers::Offset> *input_data = nullptr, + const std::vector<::flatbuffers::Offset> *subgraph_data = nullptr) { + auto input_data__ = input_data ? _fbb.CreateVector<::flatbuffers::Offset>(*input_data) : 0; + auto subgraph_data__ = subgraph_data ? _fbb.CreateVector<::flatbuffers::Offset>(*subgraph_data) : 0; + return CreateModelTestData( + _fbb, + input_data__, + subgraph_data__); +} + +::flatbuffers::Offset CreateModelTestData(::flatbuffers::FlatBufferBuilder &_fbb, const ModelTestDataT *_o, const ::flatbuffers::rehasher_function_t *_rehasher = nullptr); + +inline TensorDataT *TensorData::UnPack(const ::flatbuffers::resolver_function_t *_resolver) const { + auto _o = std::unique_ptr(new TensorDataT()); + UnPackTo(_o.get(), _resolver); + return _o.release(); +} + +inline void TensorData::UnPackTo(TensorDataT *_o, const ::flatbuffers::resolver_function_t *_resolver) const { + (void)_o; + (void)_resolver; + { auto _e = input_index(); _o->input_index = _e; } + { auto _e = tensor_index(); _o->tensor_index = _e; } + { auto _e = shape(); if (_e) { _o->shape.resize(_e->size()); for (::flatbuffers::uoffset_t _i = 0; _i < _e->size(); _i++) { _o->shape[_i] = _e->Get(_i); } } else { _o->shape.resize(0); } } + { auto _e = dtype(); _o->dtype = _e; } + { auto _e = data(); if (_e) { _o->data.resize(_e->size()); std::copy(_e->begin(), _e->end(), _o->data.begin()); } } + { auto _e = num_bytes(); _o->num_bytes = _e; } + { auto _e = layer_number(); _o->layer_number = _e; } +} + +inline ::flatbuffers::Offset TensorData::Pack(::flatbuffers::FlatBufferBuilder &_fbb, const TensorDataT* _o, const ::flatbuffers::rehasher_function_t *_rehasher) { + return CreateTensorData(_fbb, _o, _rehasher); +} + +inline ::flatbuffers::Offset CreateTensorData(::flatbuffers::FlatBufferBuilder &_fbb, const TensorDataT *_o, const ::flatbuffers::rehasher_function_t *_rehasher) { + (void)_rehasher; + (void)_o; + struct _VectorArgs { ::flatbuffers::FlatBufferBuilder *__fbb; const TensorDataT* __o; const ::flatbuffers::rehasher_function_t *__rehasher; } _va = { &_fbb, _o, _rehasher}; (void)_va; + auto _input_index = _o->input_index; + auto _tensor_index = _o->tensor_index; + auto _shape = _o->shape.size() ? _fbb.CreateVector(_o->shape) : 0; + auto _dtype = _o->dtype; + auto _data = _o->data.size() ? _fbb.CreateVector(_o->data) : 0; + auto _num_bytes = _o->num_bytes; + auto _layer_number = _o->layer_number; + return CreateTensorData( + _fbb, + _input_index, + _tensor_index, + _shape, + _dtype, + _data, + _num_bytes, + _layer_number); +} + +inline SubgraphDataT::SubgraphDataT(const SubgraphDataT &o) + : subgraph_index(o.subgraph_index) { + outputs.reserve(o.outputs.size()); + for (const auto &outputs_ : o.outputs) { outputs.emplace_back((outputs_) ? new TensorDataT(*outputs_) : nullptr); } +} + +inline SubgraphDataT &SubgraphDataT::operator=(SubgraphDataT o) FLATBUFFERS_NOEXCEPT { + std::swap(outputs, o.outputs); + std::swap(subgraph_index, o.subgraph_index); + return *this; +} + +inline SubgraphDataT *SubgraphData::UnPack(const ::flatbuffers::resolver_function_t *_resolver) const { + auto _o = std::unique_ptr(new SubgraphDataT()); + UnPackTo(_o.get(), _resolver); + return _o.release(); +} + +inline void SubgraphData::UnPackTo(SubgraphDataT *_o, const ::flatbuffers::resolver_function_t *_resolver) const { + (void)_o; + (void)_resolver; + { auto _e = outputs(); if (_e) { _o->outputs.resize(_e->size()); for (::flatbuffers::uoffset_t _i = 0; _i < _e->size(); _i++) { if(_o->outputs[_i]) { _e->Get(_i)->UnPackTo(_o->outputs[_i].get(), _resolver); } else { _o->outputs[_i] = std::unique_ptr(_e->Get(_i)->UnPack(_resolver)); }; } } else { _o->outputs.resize(0); } } + { auto _e = subgraph_index(); _o->subgraph_index = _e; } +} + +inline ::flatbuffers::Offset SubgraphData::Pack(::flatbuffers::FlatBufferBuilder &_fbb, const SubgraphDataT* _o, const ::flatbuffers::rehasher_function_t *_rehasher) { + return CreateSubgraphData(_fbb, _o, _rehasher); +} + +inline ::flatbuffers::Offset CreateSubgraphData(::flatbuffers::FlatBufferBuilder &_fbb, const SubgraphDataT *_o, const ::flatbuffers::rehasher_function_t *_rehasher) { + (void)_rehasher; + (void)_o; + struct _VectorArgs { ::flatbuffers::FlatBufferBuilder *__fbb; const SubgraphDataT* __o; const ::flatbuffers::rehasher_function_t *__rehasher; } _va = { &_fbb, _o, _rehasher}; (void)_va; + auto _outputs = _o->outputs.size() ? _fbb.CreateVector<::flatbuffers::Offset> (_o->outputs.size(), [](size_t i, _VectorArgs *__va) { return CreateTensorData(*__va->__fbb, __va->__o->outputs[i].get(), __va->__rehasher); }, &_va ) : 0; + auto _subgraph_index = _o->subgraph_index; + return CreateSubgraphData( + _fbb, + _outputs, + _subgraph_index); +} + +inline ModelTestDataT::ModelTestDataT(const ModelTestDataT &o) { + input_data.reserve(o.input_data.size()); + for (const auto &input_data_ : o.input_data) { input_data.emplace_back((input_data_) ? new TensorDataT(*input_data_) : nullptr); } + subgraph_data.reserve(o.subgraph_data.size()); + for (const auto &subgraph_data_ : o.subgraph_data) { subgraph_data.emplace_back((subgraph_data_) ? new SubgraphDataT(*subgraph_data_) : nullptr); } +} + +inline ModelTestDataT &ModelTestDataT::operator=(ModelTestDataT o) FLATBUFFERS_NOEXCEPT { + std::swap(input_data, o.input_data); + std::swap(subgraph_data, o.subgraph_data); + return *this; +} + +inline ModelTestDataT *ModelTestData::UnPack(const ::flatbuffers::resolver_function_t *_resolver) const { + auto _o = std::unique_ptr(new ModelTestDataT()); + UnPackTo(_o.get(), _resolver); + return _o.release(); +} + +inline void ModelTestData::UnPackTo(ModelTestDataT *_o, const ::flatbuffers::resolver_function_t *_resolver) const { + (void)_o; + (void)_resolver; + { auto _e = input_data(); if (_e) { _o->input_data.resize(_e->size()); for (::flatbuffers::uoffset_t _i = 0; _i < _e->size(); _i++) { if(_o->input_data[_i]) { _e->Get(_i)->UnPackTo(_o->input_data[_i].get(), _resolver); } else { _o->input_data[_i] = std::unique_ptr(_e->Get(_i)->UnPack(_resolver)); }; } } else { _o->input_data.resize(0); } } + { auto _e = subgraph_data(); if (_e) { _o->subgraph_data.resize(_e->size()); for (::flatbuffers::uoffset_t _i = 0; _i < _e->size(); _i++) { if(_o->subgraph_data[_i]) { _e->Get(_i)->UnPackTo(_o->subgraph_data[_i].get(), _resolver); } else { _o->subgraph_data[_i] = std::unique_ptr(_e->Get(_i)->UnPack(_resolver)); }; } } else { _o->subgraph_data.resize(0); } } +} + +inline ::flatbuffers::Offset ModelTestData::Pack(::flatbuffers::FlatBufferBuilder &_fbb, const ModelTestDataT* _o, const ::flatbuffers::rehasher_function_t *_rehasher) { + return CreateModelTestData(_fbb, _o, _rehasher); +} + +inline ::flatbuffers::Offset CreateModelTestData(::flatbuffers::FlatBufferBuilder &_fbb, const ModelTestDataT *_o, const ::flatbuffers::rehasher_function_t *_rehasher) { + (void)_rehasher; + (void)_o; + struct _VectorArgs { ::flatbuffers::FlatBufferBuilder *__fbb; const ModelTestDataT* __o; const ::flatbuffers::rehasher_function_t *__rehasher; } _va = { &_fbb, _o, _rehasher}; (void)_va; + auto _input_data = _o->input_data.size() ? _fbb.CreateVector<::flatbuffers::Offset> (_o->input_data.size(), [](size_t i, _VectorArgs *__va) { return CreateTensorData(*__va->__fbb, __va->__o->input_data[i].get(), __va->__rehasher); }, &_va ) : 0; + auto _subgraph_data = _o->subgraph_data.size() ? _fbb.CreateVector<::flatbuffers::Offset> (_o->subgraph_data.size(), [](size_t i, _VectorArgs *__va) { return CreateSubgraphData(*__va->__fbb, __va->__o->subgraph_data[i].get(), __va->__rehasher); }, &_va ) : 0; + return CreateModelTestData( + _fbb, + _input_data, + _subgraph_data); +} + +inline const ModelTestData *GetModelTestData(const void *buf) { + return ::flatbuffers::GetRoot(buf); +} + +inline const ModelTestData *GetSizePrefixedModelTestData(const void *buf) { + return ::flatbuffers::GetSizePrefixedRoot(buf); +} + +inline bool VerifyModelTestDataBuffer( + ::flatbuffers::Verifier &verifier) { + return verifier.VerifyBuffer(nullptr); +} + +inline bool VerifySizePrefixedModelTestDataBuffer( + ::flatbuffers::Verifier &verifier) { + return verifier.VerifySizePrefixedBuffer(nullptr); +} + +inline void FinishModelTestDataBuffer( + ::flatbuffers::FlatBufferBuilder &fbb, + ::flatbuffers::Offset root) { + fbb.Finish(root); +} + +inline void FinishSizePrefixedModelTestDataBuffer( + ::flatbuffers::FlatBufferBuilder &fbb, + ::flatbuffers::Offset root) { + fbb.FinishSizePrefixed(root); +} + +inline std::unique_ptr UnPackModelTestData( + const void *buf, + const ::flatbuffers::resolver_function_t *res = nullptr) { + return std::unique_ptr(GetModelTestData(buf)->UnPack(res)); +} + +inline std::unique_ptr UnPackSizePrefixedModelTestData( + const void *buf, + const ::flatbuffers::resolver_function_t *res = nullptr) { + return std::unique_ptr(GetSizePrefixedModelTestData(buf)->UnPack(res)); +} + +#endif // FLATBUFFERS_GENERATED_LAYERBYLAYERSCHEMA_H_ diff --git a/tensorflow/lite/micro/tools/make/Makefile b/tensorflow/lite/micro/tools/make/Makefile index 25ebe9df94e..90b0c2945ff 100644 --- a/tensorflow/lite/micro/tools/make/Makefile +++ b/tensorflow/lite/micro/tools/make/Makefile @@ -1,4 +1,4 @@ -# Copyright 2023 The TensorFlow Authors. All Rights Reserved. +# Copyright 2024 The TensorFlow Authors. All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. @@ -60,9 +60,21 @@ endif # Specify which specialized kernel implementation should be pulled in. OPTIMIZED_KERNEL_DIR := +# Optimize kernels for speed or memory. This is similar but not the same as KERNEL_OPTIMIZATION_LEVEL and +# CORE_OPTIMIZATION_LEVEL, which specify compiler optimization level. +# Instead this enables a kernel to provide multiple implementations that is configured at build time. +# An example could be a kernel requiring a bigger scratch buffer for certain use cases. +# The example kernel would have a smaller scratch buffer usage when building for size. +# Vice versa it would use more scratch buffer when building for speed and would be more performant. +# Note that this is optional. If having one implementation, nothing needs to be done. +# OPTIMIZE_KERNELS_FOR has only two valid values, KERNELS_OPTIMIZED_FOR_SIZE and KERNELS_OPTIMIZED_FOR_SPEED where the +# former is default. +OPTIMIZE_KERNELS_FOR := KERNELS_OPTIMIZED_FOR_SPEED + # Override this variable from the command line in case the optimized kernels are # in a different directory. OPTIMIZED_KERNEL_DIR_PREFIX := $(TENSORFLOW_ROOT)tensorflow/lite/micro/kernels +OPTIMIZED_SIGNAL_KERNEL_DIR_PREFIX := $(TENSORFLOW_ROOT)signal/micro/kernels # Specify which co-processor's kernel implementation should be pulled in. # If the same kernel is implemented in both kernels/OPTIMIZED_KERNEL_DIR and @@ -80,6 +92,7 @@ DOWNLOADS_DIR := $(MAKEFILE_DIR)/downloads INCLUDES := \ -I. \ +-I$(DOWNLOADS_DIR) \ -I$(DOWNLOADS_DIR)/gemmlowp \ -I$(DOWNLOADS_DIR)/flatbuffers/include \ -I$(DOWNLOADS_DIR)/kissfft \ @@ -97,7 +110,7 @@ TEST_SCRIPT := MICROLITE_LIBS := -lm -# For the optimized_kernel_dir, and co-processor as specified on the +# For the optimized_kernel_dir, co-processor and optimize_kernels_for as specified on the # command line we add -D to the cflags to allow for #idefs in the code. # # We apply the following transformations (via the tr command): @@ -111,6 +124,10 @@ ifneq ($(CO_PROCESSOR),) ADDITIONAL_DEFINES += -D$(shell echo $(CO_PROCESSOR) | tr [a-z] [A-Z]) endif +ifneq ($(OPTIMIZE_KERNELS_FOR),) + ADDITIONAL_DEFINES += -D$(shell echo $(OPTIMIZE_KERNELS_FOR) | tr [a-z] [A-Z]) +endif + ifeq ($(TOOLCHAIN), armclang) CORE_OPTIMIZATION_LEVEL := -Oz else @@ -129,7 +146,6 @@ endif CC_WARNINGS := \ -Wsign-compare \ -Wdouble-promotion \ - -Wshadow \ -Wunused-variable \ -Wunused-function \ -Wswitch \ @@ -140,6 +156,14 @@ CC_WARNINGS := \ -Wstrict-aliasing \ -Wno-unused-parameter +ifneq ($(TOOLCHAIN), gcc) + # GCC can be overly aggressive with shadow warnings, such as warning when a + # lambda has variable with the same name as a non-captured variable from the + # enclosing scope. As such, we don't enable shadow warnings on gcc. + # https://stackoverflow.com/q/66404751 + CC_WARNINGS += -Wshadow +endif + COMMON_FLAGS := \ -Werror \ -fno-unwind-tables \ @@ -248,7 +272,15 @@ MICROLITE_LIB_NAME := libtensorflow-microlite.a # Where compiled objects are stored. BASE_GENDIR := gen -GENDIR := $(BASE_GENDIR)/$(TARGET)_$(TARGET_ARCH)_$(BUILD_TYPE)/ +GENDIR := $(BASE_GENDIR)/$(TARGET)_$(TARGET_ARCH)_$(BUILD_TYPE) +ifneq ($(OPTIMIZED_KERNEL_DIR),) + GENDIR := $(GENDIR)_$(OPTIMIZED_KERNEL_DIR) +endif +ifneq ($(CO_PROCESSOR),) + GENDIR := $(GENDIR)_$(CO_PROCESSOR) +endif +GENDIR := $(GENDIR)_$(TOOLCHAIN)/ + CORE_OBJDIR := $(GENDIR)obj/core/ KERNEL_OBJDIR := $(GENDIR)obj/kernels/ THIRD_PARTY_KERNEL_OBJDIR := $(GENDIR)obj/third_party_kernels/ @@ -280,11 +312,15 @@ MICRO_LITE_INTEGRATION_TESTS += $(shell find $(TENSORFLOW_ROOT)tensorflow/lite/m MICRO_LITE_GEN_MUTABLE_OP_RESOLVER_TEST += \ $(wildcard $(TENSORFLOW_ROOT)tensorflow/lite/micro/tools/gen_micro_mutable_op_resolver_test/person_detect/Makefile.inc) -MICRO_LITE_BENCHMARKS := $(wildcard $(TENSORFLOW_ROOT)tensorflow/lite/micro/benchmarks/Makefile.inc) +OLD_MICRO_LITE_BENCHMARKS := $(wildcard $(TENSORFLOW_ROOT)tensorflow/lite/micro/benchmarks/Makefile.inc) +MICRO_LITE_BENCHMARKS := $(wildcard $(TENSORFLOW_ROOT)tensorflow/lite/micro/tools/benchmarking/Makefile.inc) + +MICRO_LITE_LAYER_BY_LAYER_OUTPUT := $(wildcard $(TENSORFLOW_ROOT)tensorflow/lite/micro/tools/Makefile.inc) # TODO(b/152645559): move all benchmarks to benchmarks directory. MICROLITE_BENCHMARK_SRCS := \ -$(wildcard $(TENSORFLOW_ROOT)tensorflow/lite/micro/benchmarks/*benchmark.cc) +$(wildcard $(TENSORFLOW_ROOT)tensorflow/lite/micro/benchmarks/*benchmark.cc) \ +$(wildcard $(TENSORFLOW_ROOT)tensorflow/lite/micro/tools/benchmarking/*benchmark.cc) MICROLITE_TEST_SRCS := \ $(TENSORFLOW_ROOT)tensorflow/lite/micro/fake_micro_context_test.cc \ @@ -293,12 +329,11 @@ $(TENSORFLOW_ROOT)tensorflow/lite/micro/memory_arena_threshold_test.cc \ $(TENSORFLOW_ROOT)tensorflow/lite/micro/memory_helpers_test.cc \ $(TENSORFLOW_ROOT)tensorflow/lite/micro/micro_allocator_test.cc \ $(TENSORFLOW_ROOT)tensorflow/lite/micro/micro_allocation_info_test.cc \ -$(TENSORFLOW_ROOT)tensorflow/lite/micro/micro_context_test.cc \ +$(TENSORFLOW_ROOT)tensorflow/lite/micro/micro_interpreter_context_test.cc \ $(TENSORFLOW_ROOT)tensorflow/lite/micro/micro_log_test.cc \ $(TENSORFLOW_ROOT)tensorflow/lite/micro/micro_interpreter_test.cc \ $(TENSORFLOW_ROOT)tensorflow/lite/micro/micro_mutable_op_resolver_test.cc \ $(TENSORFLOW_ROOT)tensorflow/lite/micro/micro_resource_variable_test.cc \ -$(TENSORFLOW_ROOT)tensorflow/lite/micro/micro_string_test.cc \ $(TENSORFLOW_ROOT)tensorflow/lite/micro/micro_time_test.cc \ $(TENSORFLOW_ROOT)tensorflow/lite/micro/micro_utils_test.cc \ $(TENSORFLOW_ROOT)tensorflow/lite/micro/recording_micro_allocator_test.cc \ @@ -312,12 +347,6 @@ $(TENSORFLOW_ROOT)tensorflow/lite/micro/memory_planner/linear_memory_planner_tes $(TENSORFLOW_ROOT)tensorflow/lite/micro/memory_planner/non_persistent_buffer_planner_shim_test.cc MICROLITE_CC_KERNEL_SRCS := \ -$(TENSORFLOW_ROOT)signal/micro/kernels/rfft.cc \ -$(TENSORFLOW_ROOT)signal/micro/kernels/window.cc \ -$(TENSORFLOW_ROOT)signal/src/rfft_float.cc \ -$(TENSORFLOW_ROOT)signal/src/rfft_int16.cc \ -$(TENSORFLOW_ROOT)signal/src/rfft_int32.cc \ -$(TENSORFLOW_ROOT)signal/src/window.cc \ $(TENSORFLOW_ROOT)tensorflow/lite/micro/kernels/activations.cc \ $(TENSORFLOW_ROOT)tensorflow/lite/micro/kernels/activations_common.cc \ $(TENSORFLOW_ROOT)tensorflow/lite/micro/kernels/add.cc \ @@ -325,6 +354,7 @@ $(TENSORFLOW_ROOT)tensorflow/lite/micro/kernels/add_common.cc \ $(TENSORFLOW_ROOT)tensorflow/lite/micro/kernels/add_n.cc \ $(TENSORFLOW_ROOT)tensorflow/lite/micro/kernels/arg_min_max.cc \ $(TENSORFLOW_ROOT)tensorflow/lite/micro/kernels/assign_variable.cc \ +$(TENSORFLOW_ROOT)tensorflow/lite/micro/kernels/batch_matmul.cc \ $(TENSORFLOW_ROOT)tensorflow/lite/micro/kernels/batch_to_space_nd.cc \ $(TENSORFLOW_ROOT)tensorflow/lite/micro/kernels/broadcast_args.cc \ $(TENSORFLOW_ROOT)tensorflow/lite/micro/kernels/broadcast_to.cc \ @@ -409,6 +439,7 @@ $(TENSORFLOW_ROOT)tensorflow/lite/micro/kernels/split_v.cc \ $(TENSORFLOW_ROOT)tensorflow/lite/micro/kernels/squared_difference.cc \ $(TENSORFLOW_ROOT)tensorflow/lite/micro/kernels/squeeze.cc \ $(TENSORFLOW_ROOT)tensorflow/lite/micro/kernels/strided_slice.cc \ +$(TENSORFLOW_ROOT)tensorflow/lite/micro/kernels/strided_slice_common.cc \ $(TENSORFLOW_ROOT)tensorflow/lite/micro/kernels/sub.cc \ $(TENSORFLOW_ROOT)tensorflow/lite/micro/kernels/sub_common.cc \ $(TENSORFLOW_ROOT)tensorflow/lite/micro/kernels/svdf.cc \ @@ -422,6 +453,46 @@ $(TENSORFLOW_ROOT)tensorflow/lite/micro/kernels/var_handle.cc \ $(TENSORFLOW_ROOT)tensorflow/lite/micro/kernels/while.cc \ $(TENSORFLOW_ROOT)tensorflow/lite/micro/kernels/zeros_like.cc +MICROLITE_CC_SIGNAL_KERNEL_SRCS := \ +$(TENSORFLOW_ROOT)signal/micro/kernels/delay.cc \ +$(TENSORFLOW_ROOT)signal/micro/kernels/energy.cc \ +$(TENSORFLOW_ROOT)signal/micro/kernels/fft_auto_scale_kernel.cc \ +$(TENSORFLOW_ROOT)signal/micro/kernels/fft_auto_scale_common.cc \ +$(TENSORFLOW_ROOT)signal/micro/kernels/filter_bank.cc \ +$(TENSORFLOW_ROOT)signal/micro/kernels/filter_bank_log.cc \ +$(TENSORFLOW_ROOT)signal/micro/kernels/filter_bank_square_root.cc \ +$(TENSORFLOW_ROOT)signal/micro/kernels/filter_bank_square_root_common.cc \ +$(TENSORFLOW_ROOT)signal/micro/kernels/filter_bank_spectral_subtraction.cc \ +$(TENSORFLOW_ROOT)signal/micro/kernels/framer.cc \ +$(TENSORFLOW_ROOT)signal/micro/kernels/irfft.cc \ +$(TENSORFLOW_ROOT)signal/micro/kernels/rfft.cc \ +$(TENSORFLOW_ROOT)signal/micro/kernels/stacker.cc \ +$(TENSORFLOW_ROOT)signal/micro/kernels/overlap_add.cc \ +$(TENSORFLOW_ROOT)signal/micro/kernels/pcan.cc \ +$(TENSORFLOW_ROOT)signal/micro/kernels/window.cc \ +$(TENSORFLOW_ROOT)signal/src/circular_buffer.cc \ +$(TENSORFLOW_ROOT)signal/src/energy.cc \ +$(TENSORFLOW_ROOT)signal/src/fft_auto_scale.cc \ +$(TENSORFLOW_ROOT)signal/src/filter_bank.cc \ +$(TENSORFLOW_ROOT)signal/src/filter_bank_log.cc \ +$(TENSORFLOW_ROOT)signal/src/filter_bank_square_root.cc \ +$(TENSORFLOW_ROOT)signal/src/filter_bank_spectral_subtraction.cc \ +$(TENSORFLOW_ROOT)signal/src/irfft_float.cc \ +$(TENSORFLOW_ROOT)signal/src/irfft_int16.cc \ +$(TENSORFLOW_ROOT)signal/src/irfft_int32.cc \ +$(TENSORFLOW_ROOT)signal/src/log.cc \ +$(TENSORFLOW_ROOT)signal/src/max_abs.cc \ +$(TENSORFLOW_ROOT)signal/src/msb_32.cc \ +$(TENSORFLOW_ROOT)signal/src/msb_64.cc \ +$(TENSORFLOW_ROOT)signal/src/overlap_add.cc \ +$(TENSORFLOW_ROOT)signal/src/pcan_argc_fixed.cc \ +$(TENSORFLOW_ROOT)signal/src/rfft_float.cc \ +$(TENSORFLOW_ROOT)signal/src/rfft_int16.cc \ +$(TENSORFLOW_ROOT)signal/src/rfft_int32.cc \ +$(TENSORFLOW_ROOT)signal/src/square_root_32.cc \ +$(TENSORFLOW_ROOT)signal/src/square_root_64.cc \ +$(TENSORFLOW_ROOT)signal/src/window.cc + MICROLITE_TEST_HDRS := \ $(wildcard $(TENSORFLOW_ROOT)tensorflow/lite/micro/testing/*.h) @@ -435,11 +506,11 @@ $(shell find $(TENSORFLOW_ROOT)tensorflow/lite -type d \( -path $(TENSORFLOW_ROO ifneq ($(BUILD_TYPE), no_tf_lite_static_memory) EXCLUDED_TFL_CC_SRCS := \ - $(TENSORFLOW_ROOT)tensorflow/lite/array.cc + $(TENSORFLOW_ROOT)tensorflow/lite/array.cc TFL_CC_SRCS := $(filter-out $(EXCLUDED_TFL_CC_SRCS), $(TFL_CC_SRCS)) EXCLUDED_TFL_CC_HDRS := \ - $(TENSORFLOW_ROOT)tensorflow/lite/array.h + $(TENSORFLOW_ROOT)tensorflow/lite/array.h TFL_CC_HDRS := $(filter-out $(EXCLUDED_TFL_CC_HDRS), $(TFL_CC_HDRS)) endif @@ -473,20 +544,31 @@ $(DOWNLOADS_DIR)/flatbuffers/include/flatbuffers/array.h \ $(DOWNLOADS_DIR)/flatbuffers/include/flatbuffers/base.h \ $(DOWNLOADS_DIR)/flatbuffers/include/flatbuffers/buffer.h \ $(DOWNLOADS_DIR)/flatbuffers/include/flatbuffers/buffer_ref.h \ +$(DOWNLOADS_DIR)/flatbuffers/include/flatbuffers/code_generator.h \ +$(DOWNLOADS_DIR)/flatbuffers/include/flatbuffers/code_generators.h \ $(DOWNLOADS_DIR)/flatbuffers/include/flatbuffers/default_allocator.h \ $(DOWNLOADS_DIR)/flatbuffers/include/flatbuffers/detached_buffer.h \ +$(DOWNLOADS_DIR)/flatbuffers/include/flatbuffers/file_manager.h \ $(DOWNLOADS_DIR)/flatbuffers/include/flatbuffers/flatbuffer_builder.h \ $(DOWNLOADS_DIR)/flatbuffers/include/flatbuffers/flatbuffers.h \ +$(DOWNLOADS_DIR)/flatbuffers/include/flatbuffers/flex_flat_util.h \ $(DOWNLOADS_DIR)/flatbuffers/include/flatbuffers/flexbuffers.h \ +$(DOWNLOADS_DIR)/flatbuffers/include/flatbuffers/grpc.h \ +$(DOWNLOADS_DIR)/flatbuffers/include/flatbuffers/hash.h \ +$(DOWNLOADS_DIR)/flatbuffers/include/flatbuffers/idl.h \ +$(DOWNLOADS_DIR)/flatbuffers/include/flatbuffers/minireflect.h \ +$(DOWNLOADS_DIR)/flatbuffers/include/flatbuffers/reflection.h \ +$(DOWNLOADS_DIR)/flatbuffers/include/flatbuffers/reflection_generated.h \ +$(DOWNLOADS_DIR)/flatbuffers/include/flatbuffers/registry.h \ $(DOWNLOADS_DIR)/flatbuffers/include/flatbuffers/stl_emulation.h \ $(DOWNLOADS_DIR)/flatbuffers/include/flatbuffers/string.h \ $(DOWNLOADS_DIR)/flatbuffers/include/flatbuffers/struct.h \ $(DOWNLOADS_DIR)/flatbuffers/include/flatbuffers/table.h \ +$(DOWNLOADS_DIR)/flatbuffers/include/flatbuffers/util.h \ $(DOWNLOADS_DIR)/flatbuffers/include/flatbuffers/vector.h \ $(DOWNLOADS_DIR)/flatbuffers/include/flatbuffers/vector_downward.h \ $(DOWNLOADS_DIR)/flatbuffers/include/flatbuffers/verifier.h \ -$(DOWNLOADS_DIR)/flatbuffers/include/flatbuffers/util.h \ -$(DOWNLOADS_DIR)/flatbuffers/LICENSE.txt \ +$(DOWNLOADS_DIR)/flatbuffers/LICENSE \ $(DOWNLOADS_DIR)/gemmlowp/fixedpoint/fixedpoint.h \ $(DOWNLOADS_DIR)/gemmlowp/fixedpoint/fixedpoint_neon.h \ $(DOWNLOADS_DIR)/gemmlowp/fixedpoint/fixedpoint_sse.h \ @@ -555,8 +637,14 @@ ifeq ($(findstring $(TARGET),$(TARGETS_WITHOUT_MAKEFILES)),) include $(MAKEFILE_DIR)/targets/$(TARGET)_makefile.inc endif +# Validate valid options. +ifeq (,$(filter $(OPTIMIZE_KERNELS_FOR),KERNELS_OPTIMIZED_FOR_SPEED KERNELS_OPTIMIZED_FOR_SIZE)) + $(error Incorrect OPTIMIZE_KERNELS_FOR: $(OPTIMIZE_KERNELS_FOR)) +endif + ifneq ($(OPTIMIZED_KERNEL_DIR),) PATH_TO_OPTIMIZED_KERNELS := $(OPTIMIZED_KERNEL_DIR_PREFIX)/$(OPTIMIZED_KERNEL_DIR) + PATH_TO_SIGNAL_OPTIMIZED_KERNELS := $(OPTIMIZED_SIGNAL_KERNEL_DIR_PREFIX)/$(OPTIMIZED_KERNEL_DIR) # Check that OPTIMIZED_KERNEL_DIR is valid to avoid unexpected fallback to # reference kernels. See http://b/183546742 for more context. @@ -571,6 +659,22 @@ ifneq ($(OPTIMIZED_KERNEL_DIR),) --base_files "$(MICROLITE_CC_KERNEL_SRCS)" \ --specialize_directory $(PATH_TO_OPTIMIZED_KERNELS)) + ifneq ($(filter $(OPTIMIZED_KERNEL_DIR), xtensa),) + # Check that OPTIMIZED_KERNEL_DIR is valid to avoid unexpected fallback to + # reference kernels. See http://b/183546742 for more context. + RESULT := $(shell $(MAKEFILE_DIR)/check_optimized_kernel_dir.sh $(PATH_TO_SIGNAL_OPTIMIZED_KERNELS)) + ifneq ($(RESULT), SUCCESS) + $(error Incorrect SIGNAL OPTIMIZED_KERNEL_DIR: $(RESULT)) + endif + + # Specialize for the optimized kernels + MICROLITE_CC_SIGNAL_KERNEL_SRCS := $(shell python3 $(MAKEFILE_DIR)/specialize_files.py \ + --base_files "$(MICROLITE_CC_SIGNAL_KERNEL_SRCS)" \ + --specialize_directory $(PATH_TO_SIGNAL_OPTIMIZED_KERNELS)) + MICROLITE_CC_KERNEL_SRCS += $(wildcard $(PATH_TO_SIGNAL_OPTIMIZED_KERNELS)/*.S) + MICROLITE_CC_HDRS += $(wildcard $(PATH_TO_SIGNAL_OPTIMIZED_KERNELS)/*.h) + endif + # The first ifneq is needed to be compatible with make versions prior to 4.2 # which do not support .SHELLSTATUS. While make 4.2 was released in 2016, # Ubuntu 18.04 only has version 4.1 @@ -586,6 +690,8 @@ ifneq ($(OPTIMIZED_KERNEL_DIR),) MICROLITE_CC_HDRS += $(wildcard $(PATH_TO_OPTIMIZED_KERNELS)/*.h) endif +MICROLITE_CC_KERNEL_SRCS += $(MICROLITE_CC_SIGNAL_KERNEL_SRCS) + # If a co-processor is specified on the command line with # CO_PROCESSOR= then we will include ext_libs/.inc # and find additional kernel sources in kernels// @@ -654,7 +760,14 @@ include $(MICRO_LITE_INTEGRATION_TESTS) # Load generated micro mutable op resolver test. include ${MICRO_LITE_GEN_MUTABLE_OP_RESOLVER_TEST} +# Load layer_by_layer_output_tool +ifneq ($(TARGET), $(filter $(TARGET), \ + bluepill cortex_m_corstone_300 riscv32_generic hexagon)) +include ${MICRO_LITE_LAYER_BY_LAYER_OUTPUT} +endif + # Load the benchmarks. +include $(OLD_MICRO_LITE_BENCHMARKS) include $(MICRO_LITE_BENCHMARKS) # Load custom kernel tests. @@ -747,6 +860,7 @@ $(BINDIR)%.test_target: $(BINDIR)%_test # These are microcontroller-specific rules for converting the ELF output # of the linker into a binary image that can be loaded directly. ifeq ($(TOOLCHAIN), armclang) + CXXFLAGS += -ffp-mode=full FROMELF := ${TARGET_TOOLCHAIN_ROOT}$(TARGET_TOOLCHAIN_PREFIX)fromelf $(BINDIR)%.bin: $(BINDIR)% @mkdir -p $(dir $@) @@ -822,6 +936,9 @@ endif # Just build the test targets build: $(MICROLITE_BUILD_TARGETS) +list_gendir: + @echo $(GENDIR) + list_library_sources: @echo $(MICROLITE_CC_SRCS) $(MICROLITE_CC_KERNEL_SRCS) diff --git a/tensorflow/lite/micro/tools/make/arm_gcc_download.sh b/tensorflow/lite/micro/tools/make/arm_gcc_download.sh index 8e6d632d8c7..68d2ca15b3d 100755 --- a/tensorflow/lite/micro/tools/make/arm_gcc_download.sh +++ b/tensorflow/lite/micro/tools/make/arm_gcc_download.sh @@ -1,5 +1,5 @@ #!/bin/bash -# Copyright 2022 The TensorFlow Authors. All Rights Reserved. +# Copyright 2023 The TensorFlow Authors. All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. @@ -63,19 +63,26 @@ else # host architechture UNAME_M=`uname -m` if [ "${UNAME_M}" == "x86_64" ]; then - GCC_URL="https://developer.arm.com/-/media/Files/downloads/gnu-rm/10.3-2021.10/gcc-arm-none-eabi-10.3-2021.10-x86_64-linux.tar.bz2" - EXPECTED_MD5="2383e4eb4ea23f248d33adc70dc3227e" + GCC_URL="https://developer.arm.com/-/media/Files/downloads/gnu/13.2.rel1/binrel/arm-gnu-toolchain-13.2.rel1-x86_64-arm-none-eabi.tar.xz" + EXPECTED_MD5="791754852f8c18ea04da7139f153a5b7" elif [ "${UNAME_M}" == "aarch64" ]; then - GCC_URL="https://developer.arm.com/-/media/Files/downloads/gnu-rm/10.3-2021.10/gcc-arm-none-eabi-10.3-2021.10-aarch64-linux.tar.bz2" - EXPECTED_MD5="3fe3d8bb693bd0a6e4615b6569443d0d" + GCC_URL="https://developer.arm.com/-/media/Files/downloads/gnu/13.2.rel1/binrel/arm-gnu-toolchain-13.2.rel1-aarch64-arm-none-eabi.tar.xz" + EXPECTED_MD5="5a08122e6d4caf97c6ccd1d29e62599c" fi elif [ "${HOST_OS}" == "osx" ]; then - GCC_URL="https://developer.arm.com/-/media/Files/downloads/gnu-rm/10.3-2021.10/gcc-arm-none-eabi-10.3-2021.10-mac.tar.bz2" - EXPECTED_MD5="7f2a7b7b23797302a9d6182c6e482449" + # host architechture + UNAME_M=`uname -m` + if [ "${UNAME_M}" == "arm64" ]; then + GCC_URL="https://developer.arm.com/-/media/Files/downloads/gnu/13.2.rel1/binrel/arm-gnu-toolchain-13.2.rel1-darwin-arm64-arm-none-eabi.tar.xz" + EXPECTED_MD5="2c43e9d72206c1f81227b0a685df5ea6" + else + GCC_URL="https://developer.arm.com/-/media/Files/downloads/gnu/13.2.rel1/binrel/arm-gnu-toolchain-13.2.rel1-darwin-x86_64-arm-none-eabi.tar.xz" + EXPECTED_MD5="41d49840b0fc676d2ae35aab21a58693" + fi elif [ "${HOST_OS}" == "windows" ]; then - GCC_URL="https://developer.arm.com/-/media/Files/downloads/gnu-rm/10.3-2021.10/gcc-arm-none-eabi-10.3-2021.10-win32.zip" - EXPECTED_MD5="2bc8f0c4c4659f8259c8176223eeafc1" + GCC_URL="https://developer.arm.com/-/media/Files/downloads/gnu/13.2.rel1/binrel/arm-gnu-toolchain-13.2.rel1-mingw-w64-i686-arm-none-eabi.zip" + EXPECTED_MD5="7fd677088038cdf82f33f149e2e943ee" else echo "OS type ${HOST_OS} not supported." exit 1 @@ -92,7 +99,7 @@ else unzip -q ${TEMPFILE} -d ${TEMPDIR} >&2 mv ${TEMPDIR}/*/* ${DOWNLOADED_GCC_PATH} else - tar -C ${DOWNLOADED_GCC_PATH} --strip-components=1 -xjf ${TEMPFILE} >&2 + tar -C ${DOWNLOADED_GCC_PATH} --strip-components=1 -xJf ${TEMPFILE} >&2 fi echo >&2 "Unpacked to directory: ${DOWNLOADED_GCC_PATH}" fi diff --git a/tensorflow/lite/micro/tools/make/bash_helpers.sh b/tensorflow/lite/micro/tools/make/bash_helpers.sh index f29a64150e5..e5446dec9a4 100755 --- a/tensorflow/lite/micro/tools/make/bash_helpers.sh +++ b/tensorflow/lite/micro/tools/make/bash_helpers.sh @@ -1,5 +1,5 @@ #!/bin/bash -# Copyright 2019 The TensorFlow Authors. All Rights Reserved. +# Copyright 2024 The TensorFlow Authors. All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. @@ -14,6 +14,32 @@ # limitations under the License. # ============================================================================== +# Check the download path argument +# +# Parameter(s): +# ${1} - path to the download directory or --no-downloads +# +# Outputs: +# "yes" or "no" +function check_should_download() { + if [[ ${1} == "--no-downloads" ]]; then + echo "no" + else + echo "yes" + fi +} + +# Show the download URL and MD5 checksum +# +# Parameter(s): +# ${1} - download URL +# ${2} - download MD5 checksum +# +# Download scripts require informational output should be on stderr. +function show_download_url_md5() { + echo >&2 "LIBRARY_URL=${1}" + echo >&2 "LIBRARY_MD5=${2}" +} # Compute the MD5 sum. # diff --git a/tensorflow/lite/micro/tools/make/corstone_300_download.sh b/tensorflow/lite/micro/tools/make/corstone_300_download.sh index aa0a762ad78..c485d3d114e 100755 --- a/tensorflow/lite/micro/tools/make/corstone_300_download.sh +++ b/tensorflow/lite/micro/tools/make/corstone_300_download.sh @@ -1,5 +1,5 @@ #!/bin/bash -# Copyright 2022 The TensorFlow Authors. All Rights Reserved. +# Copyright 2024 The TensorFlow Authors. All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. @@ -17,6 +17,7 @@ # Called with following arguments: # 1 - Path to the downloads folder which is typically # tensorflow/lite/micro/tools/make/downloads +# 2 - (optional) TENSORFLOW_ROOT: path to root of the TFLM tree (relative to directory from where the script is called). # # This script is called from the Makefile and uses the following convention to # enable determination of sucess/failure: @@ -31,11 +32,8 @@ set -e -SCRIPT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)" -ROOT_DIR=${SCRIPT_DIR}/../../../../.. -cd "${ROOT_DIR}" - -source tensorflow/lite/micro/tools/make/bash_helpers.sh +TENSORFLOW_ROOT=${2} +source ${TENSORFLOW_ROOT}tensorflow/lite/micro/tools/make/bash_helpers.sh DOWNLOADS_DIR=${1} if [ ! -d ${DOWNLOADS_DIR} ]; then @@ -48,10 +46,19 @@ DOWNLOADED_CORSTONE_PATH=${DOWNLOADS_DIR}/corstone300 if [ -d ${DOWNLOADED_CORSTONE_PATH} ]; then echo >&2 "${DOWNLOADED_CORSTONE_PATH} already exists, skipping the download." else - UNAME_S=`uname -s` + UNAME_S=$(uname -s) + UNAME_M=$(uname -m) if [ ${UNAME_S} == Linux ]; then - CORSTONE_URL=https://developer.arm.com/-/media/Arm%20Developer%20Community/Downloads/OSS/FVP/Corstone-300/FVP_Corstone_SSE-300_11.16_26.tgz - EXPECTED_MD5=29d9208127b24a0d83356efb8343162d + if [ ${UNAME_M} == x86_64 ]; then + CORSTONE_URL=https://developer.arm.com/-/media/Arm%20Developer%20Community/Downloads/OSS/FVP/Corstone-300/FVP_Corstone_SSE-300_11.24_13_Linux64.tgz + EXPECTED_MD5=42500e49a4b9e8e0f633d1bad9b7c052 + elif [ ${UNAME_M} == aarch64 ]; then + CORSTONE_URL=https://developer.arm.com/-/media/Arm%20Developer%20Community/Downloads/OSS/FVP/Corstone-300/FVP_Corstone_SSE-300_11.24_13_Linux64_armv8l.tgz + EXPECTED_MD5=89904e875c863235635e1570c4f6459e + else + echo "Cpu type ${UNAME_M} with OS type ${UNAME_S} not supported." + exit 1 + fi else echo "OS type ${UNAME_S} not supported." exit 1 diff --git a/tensorflow/lite/micro/tools/make/download_and_extract.sh b/tensorflow/lite/micro/tools/make/download_and_extract.sh index 974dca100c1..dcd60dacce7 100755 --- a/tensorflow/lite/micro/tools/make/download_and_extract.sh +++ b/tensorflow/lite/micro/tools/make/download_and_extract.sh @@ -1,5 +1,5 @@ #!/bin/bash -# Copyright 2021 The TensorFlow Authors. All Rights Reserved. +# Copyright 2024 The TensorFlow Authors. All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. @@ -123,9 +123,9 @@ download_and_extract() { sleep 2 done - # Check that the file was downloaded correctly using a checksum. + # Check that the file was downloaded correctly using a checksum. Put expected_md5 as "SKIP_MD5_CHECK" to skip this check. DOWNLOADED_MD5=$(openssl dgst -md5 ${tempfile} | sed 's/.* //g') - if [ ${expected_md5} != ${DOWNLOADED_MD5} ]; then + if [ ${expected_md5} != ${DOWNLOADED_MD5} ] && [ ${expected_md5} != "SKIP_MD5_CHECK" ]; then echo "Checksum error for '${url}'. Expected ${expected_md5} but found ${DOWNLOADED_MD5}" exit 1 fi @@ -134,28 +134,30 @@ download_and_extract() { url=$(echo "${url}" | sed "s/\?.*//") if [[ "${url}" == *gz ]]; then - tar -C "${dir}" --strip-components=1 -xzf ${tempfile} + tar -C "${tempdir2}" -xzf ${tempfile} elif [[ "${url}" == *tar.xz ]]; then - tar -C "${dir}" --strip-components=1 -xf ${tempfile} + tar -C "${tempdir2}" -xf ${tempfile} elif [[ "${url}" == *bz2 ]]; then curl -Ls "${url}" > ${tempdir}/tarred.bz2 - tar -C "${dir}" --strip-components=1 -xjf ${tempfile} + tar -C "${tempdir2}" -xjf ${tempfile} elif [[ "${url}" == *zip ]]; then unzip ${tempfile} -d ${tempdir2} 2>&1 1>/dev/null - # If the zip file contains nested directories, extract the files from the - # inner directory. - if [ $(find $tempdir2/* -maxdepth 0 | wc -l) = 1 ] && [ -d $tempdir2/* ]; then - # unzip has no strip components, so unzip to a temp dir, and move the - # files we want from the tempdir to destination. - cp -R ${tempdir2}/*/* ${dir}/ - else - cp -R ${tempdir2}/* ${dir}/ - fi else echo "Error unsupported archive type. Failed to extract tool after download." exit 1 fi - rm -rf ${tempdir2} ${tempdir} + + # If the zip file contains nested directories, extract the files from the + # inner directory. + if [ $(find $tempdir2/* -maxdepth 0 | wc -l) = 1 ] && [ -d $tempdir2/* ]; then + # Unzip to a temp dir, and move the files we want from the tempdir to destination. + # We want this to be dependent on the folder structure of the zipped file, so --strip-components cannot be used. + cp -R ${tempdir2}/*/* ${dir}/ + else + cp -R ${tempdir2}/* ${dir}/ + fi + + rm -rf ${tempdir} ${tempdir2} # Delete any potential BUILD files, which would interfere with Bazel builds. find "${dir}" -type f -name '*BUILD' -delete diff --git a/tensorflow/lite/micro/tools/make/increase-stack-size-and-switch-DTCM-SRAM.patch b/tensorflow/lite/micro/tools/make/ethos_u_core_platform.patch similarity index 72% rename from tensorflow/lite/micro/tools/make/increase-stack-size-and-switch-DTCM-SRAM.patch rename to tensorflow/lite/micro/tools/make/ethos_u_core_platform.patch index 57c50c1d938..957c9c28bdd 100644 --- a/tensorflow/lite/micro/tools/make/increase-stack-size-and-switch-DTCM-SRAM.patch +++ b/tensorflow/lite/micro/tools/make/ethos_u_core_platform.patch @@ -1,16 +1,16 @@ -From 470dee13bffc0adb9a778d56fab3028031f71e80 Mon Sep 17 00:00:00 2001 +From 70e504abb13fe56244250ac7ac58b1b5232481c7 Mon Sep 17 00:00:00 2001 From: TFLM -Date: Fri, 28 Oct 2022 11:01:15 +0200 +Date: Mon, 28 Aug 2023 16:07:22 +0000 Subject: [PATCH] TFLM patch --- - targets/corstone-300/platform.ld | 8 +++----- - targets/corstone-300/platform.scatter | 5 +++-- + targets/corstone-300/platform.ld | 10 ++++------ + targets/corstone-300/platform.scatter | 7 ++++--- targets/corstone-300/retarget.c | 16 ++++++++-------- - 3 files changed, 14 insertions(+), 15 deletions(-) + 3 files changed, 16 insertions(+), 17 deletions(-) diff --git a/targets/corstone-300/platform.ld b/targets/corstone-300/platform.ld -index ec58acc..21316a4 100644 +index ec58acc..51c93ca 100644 --- a/targets/corstone-300/platform.ld +++ b/targets/corstone-300/platform.ld @@ -75,7 +75,7 @@ @@ -22,6 +22,15 @@ index ec58acc..21316a4 100644 __HEAP_SIZE = 0x00008000; MEMORY +@@ -83,7 +83,7 @@ MEMORY + ITCM (rx) : ORIGIN = 0x10000000, LENGTH = 0x00080000 + BRAM (rw) : ORIGIN = 0x11000000, LENGTH = 0x00400000 + DTCM (rw) : ORIGIN = 0x30000000, LENGTH = 0x00080000 +- SRAM (rw) : ORIGIN = 0x31000000, LENGTH = 0x00200000 ++ SRAM (rw) : ORIGIN = 0x31000000, LENGTH = 0x02000000 + DDR (rwx) : ORIGIN = 0x70000000, LENGTH = 0x60000000 + } + @@ -150,9 +150,6 @@ SECTIONS *(EXCLUDE_FILE(*crtend?.o *crtend.o) .dtors) *(SORT(.dtors.*)) @@ -50,7 +59,7 @@ index ec58acc..21316a4 100644 .heap (COPY) : { diff --git a/targets/corstone-300/platform.scatter b/targets/corstone-300/platform.scatter -index fab12d1..be5c227 100644 +index fab12d1..2180aca 100644 --- a/targets/corstone-300/platform.scatter +++ b/targets/corstone-300/platform.scatter @@ -1,3 +1,4 @@ @@ -67,6 +76,15 @@ index fab12d1..be5c227 100644 #endif #ifndef HEAP_SIZE +@@ -108,7 +109,7 @@ + #define DTCM_SIZE 0x00080000 + + #define SRAM_START 0x31000000 +-#define SRAM_SIZE 0x00200000 ++#define SRAM_SIZE 0x02000000 + + #define DDR_START 0x70000000 + #define DDR_SIZE 0x02000000 @@ -136,7 +137,6 @@ APP_IMAGE LR_START LR_SIZE ; Make sure reset_handler ends up in root segment, when split across ; ITCM and DTCM @@ -115,5 +133,5 @@ index 4bde44d..b510ad8 100644 +} +#endif -- -2.17.1 +2.42.0.rc2.253.gd59a3bf2b4-goog diff --git a/tensorflow/lite/micro/tools/make/ethos_u_core_platform_download.sh b/tensorflow/lite/micro/tools/make/ethos_u_core_platform_download.sh index 76223db9137..3a1cd33f279 100755 --- a/tensorflow/lite/micro/tools/make/ethos_u_core_platform_download.sh +++ b/tensorflow/lite/micro/tools/make/ethos_u_core_platform_download.sh @@ -1,5 +1,5 @@ #!/bin/bash -# Copyright 2022 The TensorFlow Authors. All Rights Reserved. +# Copyright 2024 The TensorFlow Authors. All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. @@ -31,11 +31,8 @@ set -e -SCRIPT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)" -ROOT_DIR=${SCRIPT_DIR}/../../../../.. -cd "${ROOT_DIR}" - -source tensorflow/lite/micro/tools/make/bash_helpers.sh +TENSORFLOW_ROOT=${2} +source ${TENSORFLOW_ROOT}tensorflow/lite/micro/tools/make/bash_helpers.sh DOWNLOADS_DIR=${1} if [ ! -d ${DOWNLOADS_DIR} ]; then @@ -54,24 +51,22 @@ else exit 1 fi - git clone https://git.mlplatform.org/ml/ethos-u/ethos-u-core-platform.git ${DOWNLOADED_ETHOS_U_CORE_PLATFORM_PATH} >&2 - cd ${DOWNLOADED_ETHOS_U_CORE_PLATFORM_PATH} + git clone "https://review.mlplatform.org/ml/ethos-u/ethos-u-core-platform" ${DOWNLOADED_ETHOS_U_CORE_PLATFORM_PATH} >&2 + pushd ${DOWNLOADED_ETHOS_U_CORE_PLATFORM_PATH} > /dev/null git checkout e25a89dec1cf990f3168dbd6c565e3b0d51cb151 >&2 rm -rf .git create_git_repo ./ - - apply_patch_to_folder ./ ../../increase-stack-size-and-switch-DTCM-SRAM.patch "TFLM patch" - - cd "${ROOT_DIR}" + apply_patch_to_folder ./ ../../ethos_u_core_platform.patch "TFLM patch" + popd > /dev/null LINKER_PATH=${DOWNLOADED_ETHOS_U_CORE_PLATFORM_PATH}/targets/corstone-300 # Run C preprocessor on linker file to get rid of ifdefs and make sure compiler is downloaded first. COMPILER=${DOWNLOADS_DIR}/gcc_embedded/bin/arm-none-eabi-gcc if [ ! -f ${COMPILER} ]; then - RETURN_VALUE=`./tensorflow/lite/micro/tools/make/arm_gcc_download.sh ${DOWNLOADS_DIR}` + RETURN_VALUE=`${TENSORFLOW_ROOT}tensorflow/lite/micro/tools/make/arm_gcc_download.sh ${DOWNLOADS_DIR} ${TENSORFLOW_ROOT}` if [ "SUCCESS" != "${RETURN_VALUE}" ]; then - echo "The script ./tensorflow/lite/micro/tools/make/arm_gcc_download.sh failed." + echo "The script ${TENSORFLOW_ROOT}tensorflow/lite/micro/tools/make/arm_gcc_download.sh failed." exit 1 fi fi diff --git a/tensorflow/lite/micro/tools/make/ext_libs/cmsis_nn.inc b/tensorflow/lite/micro/tools/make/ext_libs/cmsis_nn.inc index e9ae5fceb6a..a78aa492750 100644 --- a/tensorflow/lite/micro/tools/make/ext_libs/cmsis_nn.inc +++ b/tensorflow/lite/micro/tools/make/ext_libs/cmsis_nn.inc @@ -43,7 +43,7 @@ THIRD_PARTY_CC_HDRS += $(shell find $(CMSIS_NN_PATH)/Include -name "*.h") # the various intrinisics. THIRD_PARTY_CC_HDRS += \ $(CMSIS_PATH)/LICENSE.txt \ - $(CMSIS_NN_PATH)/LICENSE.txt \ + $(CMSIS_NN_PATH)/LICENSE \ $(wildcard $(CMSIS_PATH)/CMSIS/Core/Include/*.h) # We add -I$(CMSIS_PATH) to enable the code in the TFLM repo (mostly in the diff --git a/tensorflow/lite/micro/tools/make/ext_libs/cmsis_nn_download.sh b/tensorflow/lite/micro/tools/make/ext_libs/cmsis_nn_download.sh index bc8e87b14d4..fb0ad928bd6 100755 --- a/tensorflow/lite/micro/tools/make/ext_libs/cmsis_nn_download.sh +++ b/tensorflow/lite/micro/tools/make/ext_libs/cmsis_nn_download.sh @@ -1,5 +1,5 @@ #!/bin/bash -# Copyright 2023 The TensorFlow Authors. All Rights Reserved. +# Copyright 2024 The TensorFlow Authors. All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. @@ -36,21 +36,22 @@ TENSORFLOW_ROOT=${2} source ${TENSORFLOW_ROOT}tensorflow/lite/micro/tools/make/bash_helpers.sh DOWNLOADS_DIR=${1} -if [ ! -d ${DOWNLOADS_DIR} ]; then - echo "The top-level downloads directory: ${DOWNLOADS_DIR} does not exist." - exit 1 -fi - DOWNLOADED_CMSIS_NN_PATH=${DOWNLOADS_DIR}/cmsis_nn -if [ -d ${DOWNLOADED_CMSIS_NN_PATH} ]; then - echo >&2 "${DOWNLOADED_CMSIS_NN_PATH} already exists, skipping the download." -else +ZIP_PREFIX_NN="01dee38e6d6bfbbf202f0cd425bbea1731747d51" +CMSIS_NN_URL="http://github.com/ARM-software/CMSIS-NN/archive/${ZIP_PREFIX_NN}.zip" +CMSIS_NN_MD5="f20be93ededf42bb704c19f699a24313" - ZIP_PREFIX_NN="dc64e488f6655aa2792d2aceca316c896f78b4db" - CMSIS_NN_URL="http://github.com/ARM-software/CMSIS-NN/archive/${ZIP_PREFIX_NN}.zip" - CMSIS_NN_MD5="80f9cf0bcc10a4aefb6531ae53942044" +should_download=$(check_should_download ${DOWNLOADS_DIR}) +if [[ ${should_download} == "no" ]]; then + show_download_url_md5 ${CMSIS_NN_URL} ${CMSIS_NN_MD5} +elif [ ! -d ${DOWNLOADS_DIR} ]; then + echo "The top-level downloads directory: ${DOWNLOADS_DIR} does not exist." + exit 1 +elif [ -d ${DOWNLOADED_CMSIS_NN_PATH} ]; then + echo >&2 "${DOWNLOADED_CMSIS_NN_PATH} already exists, skipping the download." +else # wget is much faster than git clone of the entire repo. So we wget a specific # version and can then apply a patch, as needed. wget ${CMSIS_NN_URL} -O /tmp/${ZIP_PREFIX_NN}.zip >&2 diff --git a/tensorflow/lite/micro/tools/make/ext_libs/eyalroz_printf.inc b/tensorflow/lite/micro/tools/make/ext_libs/eyalroz_printf.inc new file mode 100644 index 00000000000..63112c12029 --- /dev/null +++ b/tensorflow/lite/micro/tools/make/ext_libs/eyalroz_printf.inc @@ -0,0 +1,28 @@ +# Copyright 2023 The TensorFlow Authors. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# ============================================================================== + +DOWNLOAD_RESULT := $(shell $(MAKEFILE_DIR)/ext_libs/eyalroz_printf_download.sh $(DOWNLOADS_DIR) $(TENSORFLOW_ROOT)) +ifneq ($(DOWNLOAD_RESULT), SUCCESS) + $(error Something went wrong with the printf download: $(DOWNLOAD_RESULT)) +endif + +PRINTF_PATH := $(DOWNLOADS_DIR)/eyalroz_printf +THIRD_PARTY_CC_SRCS += \ + $(PRINTF_PATH)/src/printf/printf.c +THIRD_PARTY_CC_HDRS += \ + $(PRINTF_PATH)/src/printf/printf.h + +INCLUDES += \ + -I$(PRINTF_PATH)/src diff --git a/tensorflow/lite/micro/tools/make/ext_libs/eyalroz_printf_download.sh b/tensorflow/lite/micro/tools/make/ext_libs/eyalroz_printf_download.sh new file mode 100755 index 00000000000..79c5ba0ec84 --- /dev/null +++ b/tensorflow/lite/micro/tools/make/ext_libs/eyalroz_printf_download.sh @@ -0,0 +1,63 @@ +#!/bin/bash +# Copyright 2023 The TensorFlow Authors. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# ============================================================================== +# +# Called with following arguments: +# 1 - Path to the downloads folder which is typically +# ${TENSORFLOW_ROOT}/tensorflow/lite/micro/tools/make/downloads +# 2 - (optional) TENSORFLOW_ROOT: path to root of the TFLM tree (relative to directory from where the script is called). +# +# This script is called from the Makefile and uses the following convention to +# enable determination of sucess/failure: +# +# - If the script is successful, the only output on stdout should be SUCCESS. +# The makefile checks for this particular string. +# +# - Any string on stdout that is not SUCCESS will be shown in the makefile as +# the cause for the script to have failed. +# +# - Any other informational prints should be on stderr. + +set -e + +TENSORFLOW_ROOT=${2} +source ${TENSORFLOW_ROOT}tensorflow/lite/micro/tools/make/bash_helpers.sh + +DOWNLOADS_DIR=${1} +if [ ! -d ${DOWNLOADS_DIR} ]; then + echo "The top-level downloads directory: ${DOWNLOADS_DIR} does not exist." + exit 1 +fi + +DOWNLOADED_PRINTF_PATH=${DOWNLOADS_DIR}/eyalroz_printf + +if [ -d ${DOWNLOADED_PRINTF_PATH} ]; then + echo >&2 "${DOWNLOADED_PRINTF_PATH} already exists, skipping the download." +else + + ZIP_PREFIX="f8ed5a9bd9fa8384430973465e94aa14c925872d" + PRINTF_URL="https://github.com/eyalroz/printf/archive/${ZIP_PREFIX}.zip" + PRINTF_MD5="5772534c1d6f718301bca1fefaba28f3" + + # wget is much faster than git clone of the entire repo. So we wget a specific + # version and can then apply a patch, as needed. + wget ${PRINTF_URL} -O /tmp/${ZIP_PREFIX}.zip >&2 + check_md5 /tmp/${ZIP_PREFIX}.zip ${PRINTF_MD5} + + unzip -qo /tmp/${ZIP_PREFIX}.zip -d /tmp >&2 + mv /tmp/printf-${ZIP_PREFIX} ${DOWNLOADED_PRINTF_PATH} +fi + +echo "SUCCESS" \ No newline at end of file diff --git a/tensorflow/lite/micro/tools/make/ext_libs/ndsplib-hifi3.patch b/tensorflow/lite/micro/tools/make/ext_libs/ndsplib-hifi3.patch new file mode 100644 index 00000000000..f216ce21128 --- /dev/null +++ b/tensorflow/lite/micro/tools/make/ext_libs/ndsplib-hifi3.patch @@ -0,0 +1,140 @@ +diff --git a/library/include/NatureDSP_Signal_math.h b/library/include/NatureDSP_Signal_math.h +index 2ffea63..adf08ab 100644 +--- a/library/include/NatureDSP_Signal_math.h ++++ b/library/include/NatureDSP_Signal_math.h +@@ -38,7 +38,7 @@ extern "C" { + vec_log Logarithm + vec_antilog Antilogarithm + vec_sqrt Square Root +- vec_rsqrt Reciprocal Square Root ++ vec_rsqrt Reciprocal Square Root + vec_sine,vec_cosine Sine/Cosine + vec_tan Tangent + vec_atan Arctangent +@@ -384,9 +384,9 @@ int32_t scl_sqrt64x32(int64_t x); + part left by exponent value. + + Mantissa accuracy is 1 LSB, so relative accuracy is: +- vec_rsqrt16x16, scl_rsqrt16x16 6.2e-5 +- scl_rsqrt32x32 2.4e-7 +- vec_rsqrt32x32 9.2e-10 ++ vec_rsqrt16x16, scl_rsqrt16x16 6.2e-5 ++ scl_rsqrt32x32 2.4e-7 ++ vec_rsqrt32x32 9.2e-10 + + Precision: + 16x16 16-bit inputs, 16-bit output. Accuracy: 2LSB +@@ -406,11 +406,11 @@ int32_t scl_sqrt64x32(int64_t x); + ---------------- + Returned packed value: + scl_rsqrt32x32(): +- bits 23…0 fractional part +- bits 31…24 exponent ++ bits 23�0 fractional part ++ bits 31�24 exponent + scl_rsqrt16x16(): +- bits 15…0 fractional part +- bits 31…16 exponent ++ bits 15�0 fractional part ++ bits 31�16 exponent + + -------------------------------------------------------------------------*/ + void vec_rsqrt32x32 ( int32_t * frac, int16_t * exp, const int32_t * x, int N); +@@ -635,6 +635,7 @@ float32_t scl_atan2f (float32_t y, float32_t x); + return result, Q16.15 + -------------------------------------------------------------------------*/ + void vec_tanh32x32(int32_t * y, const int32_t * x, int N); ++void vec_tanhf (float32_t * y, const float32_t * x,int N); + int32_t scl_tanh32x32(int32_t x); + + /*------------------------------------------------------------------------- +@@ -659,7 +660,12 @@ int32_t scl_tanh32x32(int32_t x); + return result, Q16.15 + -------------------------------------------------------------------------*/ + void vec_sigmoid32x32(int32_t * y, const int32_t * x, int N); ++void vec_sigmoidf (float32_t * y, const float32_t * x, int N); + int32_t scl_sigmoid32x32(int32_t x); ++float32_t scl_sigmoidf(float32_t x); ++ ++void vec_relu32x32 (int32_t * y, const int32_t * x, int32_t K, int N); ++void vec_reluf (float32_t * y, const float32_t * x, float32_t K, int N); + + /*------------------------------------------------------------------------- + Softmax +@@ -685,6 +691,7 @@ int32_t scl_sigmoid32x32(int32_t x); + + -------------------------------------------------------------------------*/ + void vec_softmax32x32(int32_t * y, const int32_t * x, int N); ++void vec_softmaxf (float32_t * y, const float32_t * x,int N); + + /*------------------------------------------------------------------------- + Integer to float conversion +diff --git a/library/include/NatureDSP_types.h b/library/include/NatureDSP_types.h +index a38b334..6322852 100644 +--- a/library/include/NatureDSP_types.h ++++ b/library/include/NatureDSP_types.h +@@ -332,7 +332,9 @@ typedef struct tagComplex32_t + #define NASSERT(x) + #define restrict + #elif defined (COMPILER_XTENSA) ++#if !defined restrict + #define restrict __restrict ++#endif + #define onchip + #define NASSERT(x) {(void)__builtin_expect((x)!=0,1);} + #else +diff --git a/library/include_private/common.h b/library/include_private/common.h +index 20206e4..74d4d42 100644 +--- a/library/include_private/common.h ++++ b/library/include_private/common.h +@@ -157,6 +157,12 @@ __pragma (warning(pop)) + __asm__(".type "#name", @object\n\t.global "#name"\n\t.align 4\n\t"#name":\n\t.long 0x49438B96,0x4D73F192\n\t"); + #endif + ++#if defined(COMPILER_XTENSA) ++#define DISCARD_FUN_FOR_NONVOID_RETURN(retval_type,name,arglist) \ ++__attribute__ ((section ("/DISCARD/"))) \ ++retval_type name arglist \ ++{ return (retval_type) 0; } ++#endif + + #ifdef __cplusplus + #define externC extern "C" +diff --git a/library/include_private/fft_real_twiddles.h b/library/include_private/fft_real_twiddles.h +old mode 100644 +new mode 100755 +index 10cd4b2..6f5c80b +--- a/library/include_private/fft_real_twiddles.h ++++ b/library/include_private/fft_real_twiddles.h +@@ -44,8 +44,12 @@ + 7 128 + */ + ++#if !defined MAX_RFFT_PWR + #define MAX_RFFT_PWR 13 ++#endif ++#if !defined MAX_RFFT_LEN + #define MAX_RFFT_LEN (1<&2 + retVal=$? + set -e + if [ $retVal -ne 0 ]; then + TEMPFILE=$(mktemp -d)/ + python3 -m venv $TEMPFILE + source $TEMPFILE/bin/activate + python3 -m pip install --upgrade pip >&2 + pip install --upgrade cython >&2 + pip install --prefer-binary ethos-u-vela >&2 + fi + # Compile an optimized .tflite version for Ethos-U. - TEMPFILE=$(mktemp -d)/ - python3 -m venv $TEMPFILE - source $TEMPFILE/bin/activate - python3 -m pip install --upgrade pip >&2 - pip install --upgrade cython >&2 - pip install --prefer-binary ethos-u-vela >&2 vela --accelerator-config=ethos-u55-256 ${DOWNLOADS_DIR}/../../../models/person_detect.tflite \ --output-dir ${MODEL_DIR} >&2 - deactivate + + if [ $retVal -ne 0 ]; then + deactivate + fi # Convert .tflite back to C array. echo "// This file is generated by $0." > ${CONVERTED_PERSON_MODEL_INT8} @@ -66,11 +79,12 @@ if [ ! -f ${CONVERTED_PERSON_MODEL_INT8} ]; then ${CONVERTED_PERSON_MODEL_INT8} echo -n "const " >> ${CONVERTED_PERSON_MODEL_INT8} xxd -i ${MODEL_DIR}/person_detect_vela.tflite >> ${CONVERTED_PERSON_MODEL_INT8} - sed -i 's/gen_cortex_m_corstone_300_cortex_m55_default_genfiles_tensorflow_lite_micro_models_person_detect_vela_tflite/g_person_detect_model_data/' \ + sed -i 's/gen_cortex_m_corstone_300_cortex_m55_.*genfiles_tensorflow_lite_micro_models_person_detect_vela_tflite/g_person_detect_model_data/' \ ${CONVERTED_PERSON_MODEL_INT8} - sed -i 's/^const unsigned char g_person_detect_model_data/alignas\(16\) &/' ${CONVERTED_PERSON_MODEL_INT8} - sed -i 's/g_person_detect_model_data_len/g_person_detect_model_data_size/' ${CONVERTED_PERSON_MODEL_INT8} - sed -i 's/unsigned int/const unsigned int/' ${CONVERTED_PERSON_MODEL_INT8} + sed -i 's/^const unsigned char g_person_detect_model_data/alignas\(16\) &/' ${CONVERTED_PERSON_MODEL_INT8} + SIZE=$(sed -E -n -e 's/^.*g_person_detect_model_data_len = ([0-9]+);/\1/p' ${CONVERTED_PERSON_MODEL_INT8}) + sed -i 's/^.*g_person_detect_model_data_len.*$//' ${CONVERTED_PERSON_MODEL_INT8} + sed -E -i "s/(^constexpr.*g_person_detect_model_data_size = )([0-9]+);/\1$SIZE;/" ${PERSON_MODEL_HEADER} fi echo "SUCCESS" diff --git a/tensorflow/lite/micro/tools/make/ext_libs/xa_nnlib_hifi4.patch b/tensorflow/lite/micro/tools/make/ext_libs/xa_nnlib_hifi4.patch index 227ee92a2be..1bb15aa88fc 100644 --- a/tensorflow/lite/micro/tools/make/ext_libs/xa_nnlib_hifi4.patch +++ b/tensorflow/lite/micro/tools/make/ext_libs/xa_nnlib_hifi4.patch @@ -1,207 +1,5 @@ -From 0a68f2ffa640d1b52314278cec838384722eb1d0 Mon Sep 17 00:00:00 2001 -From: William Huang -Date: Tue, 16 May 2023 09:18:55 +0000 -Subject: [PATCH] Optimize Xtensa transpose convolution for more kernel sizes - and input channels. - -Previously, there were three code paths, in decreasing performance: - -1. Kernel size (H*W) multiple of 4, input channels multiple of 16 -2. Kernel size (H*W) multiple of 4, input channels multiple of 4 -3. Others (unoptimized case) - -This patch reduces them to the follow two cases: - -1. Input channels multiple of 4 -2. Others (unoptimized case) - -Original CL=cl/516144094 - -BUG=227374718 - -Signed-off-by: William Huang - -Optimize Xtensa CONV2D circular buffer copy. - -In Xtensa's CONV2D kernel, data is shuffled around and padded so the 2D -convolution turns into sequential vector products. Unfortunately, this -process is somewhat slow, and the overhead is especially high for small -vector lengths. - -This patch introduces the following: - -- Faster code path for no padding (since our models use VALID padding, - i.e., no padding at all) -- Manual loop if array is small and memcpy if array is large -- Skip memset on padded channels as the corresponding kernels are - already zero - -BUG=249796929 - -Signed-off-by: William Huang - -Add implementation for zero-copy CONV2D kernels. - -The previous `xa_nn_conv2d_std_sym8sxsym16s` implementation shuffles the -input tensor into a circular buffer, flattening the dimensions, so that -the 2D convolution turns into sequential vector products. However, this -created significant overhead for layers where the resulting vector -lengths are small. - -This patch implements an alternative zero-copy method that takes -advantage of two facts: - -1. If `x_padding == 0`, the width dimension is automatically flattened - with the channel dimension, and we need only `kernel_height` - sequential vector products, even without the data shuffling -2. Similar to the loop tiling done in - `xa_nn_matXvec_sym8sxsym16s_sym16s_circ`, we can tile the `out_width` - and `out_channels` dimensions, achieving the throughput of - `_xa_nn_dot_product_2row_4vec_mat_vecs_4bytes_aligned` (i.e., 1.6 - MULAAAAQs/cycle), even when `out_height < 2` - -As a result, the patch significantly benefits layers where the kernel -and output heights are small, leading to 25%+ cycle reductions in some -use cases. - -Signed-off-by: William Huang ---- - .../cnn/hifi4/xa_nn_conv2d_std_circ_buf.c | 84 +++++++- - .../cnn/hifi4/xa_nn_conv2d_std_state.h | 15 ++ - .../cnn/hifi4/xa_nn_conv2d_std_sym8sxsym16s.c | 203 +++++++++++++++--- - .../hifi4/xa_nn_transpose_conv_sym8sxsym16s.c | 36 +--- - 4 files changed, 275 insertions(+), 63 deletions(-) - -diff --git a/algo/kernels/cnn/hifi4/xa_nn_conv2d_std_circ_buf.c b/algo/kernels/cnn/hifi4/xa_nn_conv2d_std_circ_buf.c -index f8adba2..1a5f186 100644 ---- a/algo/kernels/cnn/hifi4/xa_nn_conv2d_std_circ_buf.c -+++ b/algo/kernels/cnn/hifi4/xa_nn_conv2d_std_circ_buf.c -@@ -642,7 +642,8 @@ VOID conv2d_std_init_cir_buf( - } - - // Add x_stride (but not more than kernel_width) x (input_height x input_channels) new planes to circular buffer --VOID conv2d_std_update_cir_buf( -+// Slow version of conv2d_std_update_cir_buf with fewer requirements -+VOID conv2d_std_update_cir_buf_slow( - WORD32 input_channels, - WORD32 input_channels_pad, - WORD32 input_bytewidth, -@@ -742,6 +743,87 @@ VOID conv2d_std_update_cir_buf( - *pp_inp = (VOID *)p_inp; - } - -+// Add x_stride (but not more than kernel_width) x (input_height x input_channels) new planes to circular buffer -+VOID conv2d_std_update_cir_buf( -+ WORD32 input_channels, -+ WORD32 input_channels_pad, -+ WORD32 input_bytewidth, -+ WORD32 input_width, -+ WORD32 input_height, -+ WORD32 y_padding, -+ WORD32 y_b_pad, -+ WORD32 x_padding, -+ WORD32 kernel_width, -+ WORD32 x_stride, -+ VOID **pp_inp, -+ WORD32 idx_beg_inp_width_pad, -+ xa_nn_conv_state_t *p_state) -+{ -+ if (y_padding != 0 || y_b_pad != 0 || x_padding != 0) { -+ conv2d_std_update_cir_buf_slow( -+ input_channels, -+ input_channels_pad, -+ input_bytewidth, -+ input_width, -+ input_height, -+ y_padding, -+ y_b_pad, -+ x_padding, -+ kernel_width, -+ x_stride, -+ pp_inp, -+ idx_beg_inp_width_pad, -+ p_state -+ ); -+ return; -+ } -+ -+ WORD32 i,k; -+ WORD8 *p_inp = (WORD8 *)*pp_inp; -+ WORD32 planes_to_add = x_stride > kernel_width ? kernel_width : x_stride; -+ WORD32 planes_to_keep = kernel_width - planes_to_add; -+ -+ // Copy 'planes_to_add' planes of data to circular buffer -+ AE_ADDCIRC16X4_XC((ae_int16x4 *)p_state->cir_buf.p_curr, planes_to_add * input_channels_pad * input_bytewidth); -+ WORD8 *p_dst = (WORD8 *)p_state->cir_buf.p_curr; -+ AE_ADDCIRC16X4_XC((ae_int16x4 *)p_dst, planes_to_keep * input_channels_pad * input_bytewidth); -+ -+ WORD32 copy_inp_width = planes_to_add; -+ WORD32 to_skip_inp_width = x_stride - planes_to_add; // Non-zero for x_stride > kernel_width -+ -+ int size = input_channels * input_bytewidth; -+ if (size <= 32) { -+ for(i=0;icir_buf.p_curr/* matrix: rows x cols */ -- ,p_state->p_kernel_padded /* vec: cols */ -- ,p_bias /* bias */ -- ,out_height /* rows */ -- ,input_channels_pad * kernel_width * kernel_height /* cols */ -- ,input_channels_pad * kernel_width * y_stride/* row_offset */ -- ,out_channels /* vec_count */ -- ,input_channels_pad * kernel_width * kernel_height /* vec_stride */ -- ,out_channels_offset /* out_col_offset */ -- ,out_height_offset /* out_row_offset */ -- ,input_zero_bias -- ,p_out_multiplier -- ,p_out_shift -- ,out_zero_bias -- ); -- p_out += out_width_offset; -+ // Convolution using matXvec with matrix as circular buffer -+ xa_nn_matXvec_sym8sxsym16s_sym16s_circ -+ (p_out /* output */ -+ ,p_state->cir_buf.p_curr/* matrix: rows x cols */ -+ ,p_state->p_kernel_padded /* vec: cols */ -+ ,p_bias /* bias */ -+ ,out_height /* rows */ -+ ,input_channels_pad * kernel_width * kernel_height /* cols */ -+ ,input_channels_pad * kernel_width * y_stride/* row_offset */ -+ ,out_channels /* vec_count */ -+ ,input_channels_pad * kernel_width * kernel_height /* vec_stride */ -+ ,out_channels_offset /* out_col_offset */ -+ ,out_height_offset /* out_row_offset */ -+ ,input_zero_bias -+ ,p_out_multiplier -+ ,p_out_shift -+ ,out_zero_bias -+ ); -+ p_out += out_width_offset; -+ } -+ } else { ++ + const WORD16 *p_dst0_0 = p_out + 0; + const WORD16 *p_dst0_1 = p_out + 1; + const WORD16 *p_dst0_2 = p_out + 2; @@ -310,8 +65,8 @@ index 92721bc..6f868be 100644 + const WORD16 *p_dst1_2 = p_out + out_channels + 2; + const WORD16 *p_dst1_3 = p_out + out_channels + 3; + int kernel_out_ch_offset = kernel_height * kernel_width * input_channels; -+ int input_x_offset = input_channels * x_stride / 4; -+ int p_inp_vec_stride = input_width * input_channels / 4; ++ int input_x_offset = (input_channels * x_stride) / 4; ++ int p_inp_vec_stride = (input_width * input_channels) / 4; + int p_kern_vec_stride = kernel_width * input_channels; + int vec_len = kernel_width * input_channels; + for (int out_y = 0; out_y < out_height; ++out_y) { @@ -325,6 +80,7 @@ index 92721bc..6f868be 100644 + ae_int64 out1_1 = p_bias[out_ch + 1]; + ae_int64 out1_2 = p_bias[out_ch + 2]; + ae_int64 out1_3 = p_bias[out_ch + 3]; ++ + out0_0 = AE_SLAI64(out0_0, 8); + out0_1 = AE_SLAI64(out0_1, 8); + out0_2 = AE_SLAI64(out0_2, 8); @@ -333,10 +89,11 @@ index 92721bc..6f868be 100644 + out1_1 = AE_SLAI64(out1_1, 8); + out1_2 = AE_SLAI64(out1_2, 8); + out1_3 = AE_SLAI64(out1_3, 8); ++ + int in_x_o = out_x * x_stride; + int in_y_o = out_y * y_stride - y_padding; + int k_y_min = -in_y_o; -+ int k_y_max = input_width - in_y_o; ++ int k_y_max = input_height - in_y_o; + k_y_min = (k_y_min < 0) ? 0 : k_y_min; + k_y_min = (k_y_min < kernel_height) ? k_y_min : kernel_height; + k_y_max = (k_y_max < 0) ? 0 : k_y_max; @@ -382,6 +139,7 @@ index 92721bc..6f868be 100644 + AE_MULAAAAQ16(out1_3, d_inp1, d_kern3); + } + } ++ + out0_0 = AE_SRAI64(out0_0, 8); + out0_1 = AE_SRAI64(out0_1, 8); + out0_2 = AE_SRAI64(out0_2, 8); @@ -390,6 +148,7 @@ index 92721bc..6f868be 100644 + out1_1 = AE_SRAI64(out1_1, 8); + out1_2 = AE_SRAI64(out1_2, 8); + out1_3 = AE_SRAI64(out1_3, 8); ++ + ae_int32x2 acc_vec0 = MultiplyByQuantizedMultiplier_x2_opt( + out0_0, out1_0, p_out_multiplier[out_ch + 0], + p_out_shift[out_ch + 0]); @@ -423,70 +182,45 @@ index 92721bc..6f868be 100644 + p_dst1_3 += out_channels; + } + } ++ return 0; ++} ++ + WORD32 xa_nn_conv2d_std_per_chan_sym8sxsym16s( + WORD16* __restrict__ p_out, + const WORD16* __restrict__ p_inp, +@@ -180,6 +352,35 @@ WORD32 xa_nn_conv2d_std_per_chan_sym8sxsym16s( + XA_NNLIB_ARG_CHK_COND((p_out_shift[itr] < -31 || p_out_shift[itr] > 31), -1); } - return 0; -diff --git a/algo/kernels/cnn/hifi4/xa_nn_transpose_conv_sym8sxsym16s.c b/algo/kernels/cnn/hifi4/xa_nn_transpose_conv_sym8sxsym16s.c -index 7f31b75..a010d45 100644 ---- a/algo/kernels/cnn/hifi4/xa_nn_transpose_conv_sym8sxsym16s.c -+++ b/algo/kernels/cnn/hifi4/xa_nn_transpose_conv_sym8sxsym16s.c -@@ -157,7 +157,7 @@ int xa_nn_transpose_conv_sym8sxsym16s(WORD16* output_data, - */ - if(input_data && filter_data && output_data && scratch_buffer && - (((unsigned int)input_data&0x7)==0) && (((unsigned int)filter_data&0x3)==0) && (((unsigned int)output_data&0x7) == 0) && -- (((unsigned int)scratch_buffer&0x7) == 0) && ((input_depth&0xF)==0) && ((filter_height*filter_width&0x3)==0)) -+ (((unsigned int)scratch_buffer&0x7) == 0) && ((input_depth&0x3)==0)) - { - { - //tbd : batch = 1, need to handle other values and in_x_min/max= 0 .. need toc heck for other values -@@ -180,7 +180,8 @@ int xa_nn_transpose_conv_sym8sxsym16s(WORD16* output_data, - filt_y_max = (filt_y_max < filter_height) ? filt_y_max : filter_height; - filt_y_max = (filt_y_max < 0) ? 0 : filt_y_max; - pinp = (WORD16*)&input_data[in_y*input_width*input_depth+in_x*input_depth]; -- for (int in_channel = 0; in_channel < input_depth; in_channel+=16) -+ int in_channel = 0; -+ for (; in_channel + 15 < input_depth; in_channel+=16) - { - ae_int16x4 d_inp, d_inp1, d_inp2, d_inp3; - AE_L16X4_IP(d_inp, (ae_int16x4*)pinp, sizeof(WORD64)); -@@ -235,36 +236,7 @@ int xa_nn_transpose_conv_sym8sxsym16s(WORD16* output_data, - } - } - } -- } -- } -- } -- } -- else if(input_data && filter_data && output_data && scratch_buffer && -- (((unsigned int)input_data&0x7)==0) && (((unsigned int)filter_data&0x3)==0) && (((unsigned int)output_data&0x7) == 0) && -- (((unsigned int)scratch_buffer&0x7) == 0) && ((input_depth&0x3)==0) && ((filter_height*filter_width&0x3)==0)) -- { -- { -- //tbd : batch = 1, need to handle other values and in_x_min/max= 0 .. need toc heck for other values -- for (int in_y = 0; in_y < input_height; ++in_y) -- { -- for (int in_x = 0; in_x < input_width; ++in_x) -- { -- const int out_x_orig = in_x*stride_width - pad_width; -- const int out_y_orig = in_y*stride_height - pad_height; -- int filt_x_min = -out_x_orig; -- int filt_x_max = output_width - out_x_orig; -- int filt_y_min = -out_y_orig; -- int filt_y_max = output_height - out_y_orig; -- filt_x_min = (filt_x_min < filter_width) ? filt_x_min : filter_width; -- filt_x_min = (filt_x_min < 0) ? 0 : filt_x_min; -- filt_x_max = (filt_x_max < filter_width) ? filt_x_max : filter_width; -- filt_x_max = (filt_x_max < 0) ? 0 : filt_x_max; -- filt_y_min = (filt_y_min < filter_height) ? filt_y_min : filter_height; -- filt_y_min = (filt_y_min < 0) ? 0 : filt_y_min; -- filt_y_max = (filt_y_max < filter_height) ? filt_y_max : filter_height; -- filt_y_max = (filt_y_max < 0) ? 0 : filt_y_max; -- pinp = (WORD16*)&input_data[in_y*input_width*input_depth+in_x*input_depth]; -- for (int in_channel = 0; in_channel < input_depth; in_channel+=4) -+ for (; in_channel + 3 < input_depth; in_channel+=4) - { - ae_int16x4 d_inp; - AE_L16X4_IP(d_inp, (ae_int16x4*)pinp, sizeof(WORD64)); --- -2.41.0.162.gfafddb0af9-goog - ++ if ( !(x_padding) && !(input_channels & 0x3) && !(out_channels & 0x3) && !(out_width & 0x1) && (out_data_format == 0) && ((out_width-1)*x_stride <=(input_width-kernel_width) ) ) ++ { ++ int ret_val=0; ++ ret_val=xa_nn_conv2d_std_per_chan_sym8sxsym16s_no_circ_buf(p_out, ++ p_inp, ++ p_kernel, ++ p_bias, ++ input_height, ++ input_width, ++ input_channels, ++ kernel_height, ++ kernel_width, ++ out_channels, ++ x_stride, ++ y_stride, ++ x_padding, ++ y_padding, ++ out_height, ++ out_width, ++ input_zero_bias, ++ p_out_multiplier, ++ p_out_shift, ++ out_zero_bias, ++ out_data_format ++ ); ++ ++ return ret_val; ++ } ++ + WORD32 j; + WORD32 input_bytewidth = 2; + VOID *pp_inp = (VOID *)p_inp; diff --git a/tensorflow/lite/micro/tools/make/ext_libs/xa_nnlib_hifi5.patch b/tensorflow/lite/micro/tools/make/ext_libs/xa_nnlib_hifi5.patch deleted file mode 100644 index 9d95c63a0ad..00000000000 --- a/tensorflow/lite/micro/tools/make/ext_libs/xa_nnlib_hifi5.patch +++ /dev/null @@ -1,36 +0,0 @@ -diff --git a/algo/kernels/fc/hifi4/xa_nn_fully_connected.c b/algo/kernels/fc/hifi4/xa_nn_fully_connected.c -index 26a2b73..61f0a64 100644 ---- a/algo/kernels/fc/hifi4/xa_nn_fully_connected.c -+++ b/algo/kernels/fc/hifi4/xa_nn_fully_connected.c -@@ -298,7 +298,6 @@ WORD32 xa_nn_fully_connected_sym8sxasym8s_asym8s - XA_NNLIB_ARG_CHK_PTR(p_out, -1); - XA_NNLIB_ARG_CHK_PTR(p_weight, -1); - XA_NNLIB_ARG_CHK_PTR(p_inp, -1); -- XA_NNLIB_ARG_CHK_PTR(p_bias, -1); - /* Pointer alignment checks */ - #if 0 - XA_NNLIB_ARG_CHK_ALIGN(p_out, ALIGNMENT, -1); -@@ -310,7 +309,8 @@ WORD32 xa_nn_fully_connected_sym8sxasym8s_asym8s - XA_NNLIB_ARG_CHK_ALIGN(p_out, sizeof(WORD8), -1); - XA_NNLIB_ARG_CHK_ALIGN(p_weight, sizeof(WORD8), -1); - XA_NNLIB_ARG_CHK_ALIGN(p_inp, sizeof(WORD8), -1); -- XA_NNLIB_ARG_CHK_ALIGN(p_bias, sizeof(WORD32), -1); -+ if (p_bias != NULL) -+ XA_NNLIB_ARG_CHK_ALIGN(p_bias, sizeof(WORD32), -1); - #endif - /* Basic Parameter checks */ - XA_NNLIB_ARG_CHK_COND((out_depth <= 0), -1); -diff --git a/algo/kernels/matXvec/hifi5/xa_nn_matXvec_sym8sxasym8s.c b/algo/kernels/matXvec/hifi5/xa_nn_matXvec_sym8sxasym8s.c -index 5350cbe..a91e043 100644 ---- a/algo/kernels/matXvec/hifi5/xa_nn_matXvec_sym8sxasym8s.c -+++ b/algo/kernels/matXvec/hifi5/xa_nn_matXvec_sym8sxasym8s.c -@@ -704,7 +704,8 @@ WORD32 xa_nn_matXvec_sym8sxasym8s_asym8s( - XA_NNLIB_ARG_CHK_PTR(p_mat1, -1); - XA_NNLIB_ARG_CHK_PTR(p_vec1, -1); - /* Pointer alignment checks */ -- XA_NNLIB_ARG_CHK_ALIGN(p_bias, sizeof(WORD32), -1); -+ if (p_bias != NULL) -+ XA_NNLIB_ARG_CHK_ALIGN(p_bias, sizeof(WORD32), -1); - /* Basic Parameter checks */ - XA_NNLIB_ARG_CHK_COND((rows <= 0), -1); - XA_NNLIB_ARG_CHK_COND((cols1 <= 0), -1); diff --git a/tensorflow/lite/micro/tools/make/ext_libs/xtensa.inc b/tensorflow/lite/micro/tools/make/ext_libs/xtensa.inc index 3b282676acb..70e1880c800 100644 --- a/tensorflow/lite/micro/tools/make/ext_libs/xtensa.inc +++ b/tensorflow/lite/micro/tools/make/ext_libs/xtensa.inc @@ -1,9 +1,12 @@ + # Explicitly add kernel sources specific to the Xtensa optimized # implementations. MICROLITE_CC_KERNEL_SRCS += \ $(TENSORFLOW_ROOT)tensorflow/lite/micro/kernels/xtensa/add_vision.cc \ + $(TENSORFLOW_ROOT)tensorflow/lite/micro/kernels/xtensa/conv_common_xtensa.cc \ $(TENSORFLOW_ROOT)tensorflow/lite/micro/kernels/xtensa/conv_hifi.cc \ $(TENSORFLOW_ROOT)tensorflow/lite/micro/kernels/xtensa/conv_int16_reference.cc \ + $(TENSORFLOW_ROOT)tensorflow/lite/micro/kernels/xtensa/conv_int8_int16.cc \ $(TENSORFLOW_ROOT)tensorflow/lite/micro/kernels/xtensa/conv_int8_reference.cc \ $(TENSORFLOW_ROOT)tensorflow/lite/micro/kernels/xtensa/conv_vision.cc \ $(TENSORFLOW_ROOT)tensorflow/lite/micro/kernels/xtensa/depthwise_conv_hifi.cc \ @@ -25,12 +28,25 @@ ifeq ($(TARGET_ARCH), hifimini) $(TENSORFLOW_ROOT)tensorflow/lite/micro/kernels/xtensa/hifimini/svdf.cc \ $(TENSORFLOW_ROOT)tensorflow/lite/micro/kernels/xtensa/hifimini/fully_connected.cc + FFT_PATH := $(MAKEFILE_DIR)/downloads/hifi_fft + INCLUDES += -I$(FFT_PATH)/ + + THIRD_PARTY_KERNEL_CC_SRCS += \ + $(shell find $(FFT_PATH)/hifi2_fft -name "*.c") + THIRD_PARTY_CC_HDRS += \ + $(shell find $(FFT_PATH)/hifi2_fft -name "*.h") + else ifeq ($(TARGET_ARCH), hifi5) DOWNLOAD_RESULT := $(shell $(MAKEFILE_DIR)/ext_libs/xtensa_download.sh ${DOWNLOADS_DIR} hifi5 $(TENSORFLOW_ROOT)) ifneq ($(DOWNLOAD_RESULT), SUCCESS) $(error Something went wrong with the xtensa download: $(DOWNLOAD_RESULT)) endif + DOWNLOAD_RESULT := $(shell $(MAKEFILE_DIR)/ext_libs/xtensa_ndsp_download.sh ${DOWNLOADS_DIR} hifi5 $(TENSORFLOW_ROOT)) + ifneq ($(DOWNLOAD_RESULT), SUCCESS) + $(error Something went wrong with the xtensa ndsp download: $(DOWNLOAD_RESULT)) + endif + # TODO(b/161489252): -Wno-shadow is only needed for xannlib. But since we do # not have separate cflags (or the concept of modular build targets) with the # Makefile, -Wno-shadow will be used for everything. @@ -43,10 +59,19 @@ else ifeq ($(TARGET_ARCH), hifi5) CXXFLAGS += $(PLATFORM_FLAGS) NNLIB_PATH := $(MAKEFILE_DIR)/downloads/xa_nnlib_hifi5 + NDSPLIB_PATH := $(MAKEFILE_DIR)/downloads/ndsplib-hifi5 THIRD_PARTY_KERNEL_CC_SRCS += \ $(shell find $(NNLIB_PATH) -name "*.c") + # The NDSP library has a lot of file. Add as needed. + THIRD_PARTY_KERNEL_CC_SRCS += \ + $(shell find $(NDSPLIB_PATH)/library/fft/fft -name "*.c") \ + $(shell find $(NDSPLIB_PATH)/library/fft/fft_ie -name "*.c") \ + $(shell find $(NDSPLIB_PATH)/library/fft/fft_ief -name "*.c") \ + $(shell find $(NDSPLIB_PATH)/library/twiddles -name "*.c") \ + $(shell find $(NDSPLIB_PATH)/library -name "version.c") + EXCLUDED_NNLIB_SRCS = \ $(NNLIB_PATH)/algo/layers/cnn/src/xa_nn_cnn_api.c \ $(NNLIB_PATH)/algo/layers/gru/src/xa_nn_gru_api.c \ @@ -55,7 +80,8 @@ else ifeq ($(TARGET_ARCH), hifi5) THIRD_PARTY_KERNEL_CC_SRCS := $(filter-out $(EXCLUDED_NNLIB_SRCS), $(THIRD_PARTY_KERNEL_CC_SRCS)) THIRD_PARTY_CC_HDRS += \ - $(shell find $(NNLIB_PATH) -name "*.h") + $(shell find $(NNLIB_PATH) -name "*.h") \ + $(shell find $(NDSPLIB_PATH) -name "*.h") INCLUDES += \ -I$(NNLIB_PATH)/ \ @@ -63,15 +89,20 @@ else ifeq ($(TARGET_ARCH), hifi5) -I$(NNLIB_PATH)/include/nnlib/ \ -I$(NNLIB_PATH)/include/ \ -I$(NNLIB_PATH)/algo/common/include/ \ - -I$(NNLIB_PATH)/algo/ndsp/hifi5/include/ - -else ifeq ($(TARGET_ARCH), hifi4) - + -I$(NDSPLIB_PATH)/library/include/ \ + -I$(NDSPLIB_PATH)/library/include_private/ +else ifeq ($(TARGET_ARCH), $(filter $(TARGET_ARCH), hifi3 hifi4)) + # NNLib hifi4 also supports hifi3 DOWNLOAD_RESULT := $(shell $(MAKEFILE_DIR)/ext_libs/xtensa_download.sh ${DOWNLOADS_DIR} hifi4 $(TENSORFLOW_ROOT)) ifneq ($(DOWNLOAD_RESULT), SUCCESS) $(error Something went wrong with the xtensa download: $(DOWNLOAD_RESULT)) endif + DOWNLOAD_RESULT := $(shell $(MAKEFILE_DIR)/ext_libs/xtensa_ndsp_download.sh ${DOWNLOADS_DIR} $(TARGET_ARCH) $(TENSORFLOW_ROOT)) + ifneq ($(DOWNLOAD_RESULT), SUCCESS) + $(error Something went wrong with the xtensa ndsp download: $(DOWNLOAD_RESULT)) + endif + # TODO(b/161489252): -Wno-shadow is only needed for xannlib. But since we do # not have separate cflags (or the concept of modular build targets) with the # Makefile, -Wno-shadow will be used for everything. @@ -83,20 +114,44 @@ else ifeq ($(TARGET_ARCH), hifi4) CCFLAGS += $(PLATFORM_FLAGS) CXXFLAGS += $(PLATFORM_FLAGS) + # NNLib for hifi4 also supports hifi3 NNLIB_PATH := $(MAKEFILE_DIR)/downloads/xa_nnlib_hifi4 + NDSPLIB_PATH := $(MAKEFILE_DIR)/downloads/ndsplib-$(TARGET_ARCH) THIRD_PARTY_KERNEL_CC_SRCS += \ $(shell find $(NNLIB_PATH) -name "*.c") + # The NDSP library has a lot of file. Add as needed. + THIRD_PARTY_KERNEL_CC_SRCS += \ + $(shell find $(NDSPLIB_PATH)/library/fft/fft -name "*.c") \ + $(shell find $(NDSPLIB_PATH)/library/fft/fft_ie -name "*.c") \ + $(shell find $(NDSPLIB_PATH)/library/fft/fft_ief -name "*.c") \ + $(shell find $(NDSPLIB_PATH)/library/twiddles -name "*.c") \ + $(shell find $(NDSPLIB_PATH)/library -name "version.c") + EXCLUDED_NNLIB_SRCS = \ $(NNLIB_PATH)/algo/layers/cnn/src/xa_nn_cnn_api.c \ $(NNLIB_PATH)/algo/layers/gru/src/xa_nn_gru_api.c \ $(NNLIB_PATH)/algo/layers/lstm/src/xa_nn_lstm_api.c + ifeq ($(TARGET_ARCH), hifi3) + EXCLUDED_NNLIB_SRCS += \ + $(NNLIB_PATH)/algo/ndsp/hifi4/src/pow2f_tbl.c \ + $(NNLIB_PATH)/algo/ndsp/hifi4/src/scl_tanhf_hifi4.c \ + $(NNLIB_PATH)/algo/ndsp/hifi4/src/vec_tanhf_hifi4.c \ + $(NNLIB_PATH)/algo/ndsp/hifi4/src/tanhf_tbl.c + endif + + ifeq ($(TARGET_ARCH), hifi4) + EXCLUDED_NNLIB_SRCS += \ + $(NNLIB_PATH)/algo/kernels/activations/hifi4/xa_nn_activations_asym8_asym8.c + endif + THIRD_PARTY_KERNEL_CC_SRCS := $(filter-out $(EXCLUDED_NNLIB_SRCS), $(THIRD_PARTY_KERNEL_CC_SRCS)) THIRD_PARTY_CC_HDRS += \ - $(shell find $(NNLIB_PATH) -name "*.h") + $(shell find $(NNLIB_PATH) -name "*.h") \ + $(shell find $(NDSPLIB_PATH) -name "*.h") INCLUDES += \ -I$(NNLIB_PATH)/ \ @@ -104,7 +159,8 @@ else ifeq ($(TARGET_ARCH), hifi4) -I$(NNLIB_PATH)/include/nnlib/ \ -I$(NNLIB_PATH)/include/ \ -I$(NNLIB_PATH)/algo/common/include/ \ - -I$(NNLIB_PATH)/algo/ndsp/hifi4/include/ + -I$(NDSPLIB_PATH)/library/include/ \ + -I$(NDSPLIB_PATH)/library/include_private/ else ifeq ($(TARGET_ARCH), vision_p6) DOWNLOAD_RESULT := $(shell $(MAKEFILE_DIR)/ext_libs/xtensa_download.sh ${DOWNLOADS_DIR} vision_p6 $(TENSORFLOW_ROOT)) @@ -139,21 +195,3 @@ else ifeq ($(TARGET_ARCH), vision_p6) else $(error Unsupported TARGET_ARCH=$(TARGET_ARCH)) endif - -FFT_PATH := $(MAKEFILE_DIR)/downloads/hifi_fft - -INCLUDES += -I$(FFT_PATH)/ - -ifeq ($(TARGET_ARCH), $(filter $(TARGET_ARCH), hifi3 hifi4 hifi5)) -THIRD_PARTY_KERNEL_CC_SRCS += \ - $(shell find $(FFT_PATH)/hifi3_fft -name "*.c") - -THIRD_PARTY_CC_HDRS += \ - $(shell find $(FFT_PATH)/hifi3_fft -name "*.h") -else ifeq ($(TARGET_ARCH), hifimini) -THIRD_PARTY_KERNEL_CC_SRCS += \ - $(shell find $(FFT_PATH)/hifi2_fft -name "*.c") - -THIRD_PARTY_CC_HDRS += \ - $(shell find $(FFT_PATH)/hifi2_fft -name "*.h") -endif diff --git a/tensorflow/lite/micro/tools/make/ext_libs/xtensa_download.sh b/tensorflow/lite/micro/tools/make/ext_libs/xtensa_download.sh index fb4512310ae..2c81710459a 100755 --- a/tensorflow/lite/micro/tools/make/ext_libs/xtensa_download.sh +++ b/tensorflow/lite/micro/tools/make/ext_libs/xtensa_download.sh @@ -1,5 +1,5 @@ #!/bin/bash -# Copyright 2019 The TensorFlow Authors. All Rights Reserved. +# Copyright 2024 The TensorFlow Authors. All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. @@ -38,19 +38,15 @@ set -e source ${3}tensorflow/lite/micro/tools/make/bash_helpers.sh DOWNLOADS_DIR=${1} -if [ ! -d ${DOWNLOADS_DIR} ]; then - echo "The top-level downloads directory: ${DOWNLOADS_DIR} does not exist." - exit 1 -fi -if [[ ${2} == "hifi4" ]]; then - LIBRARY_URL="http://github.com/foss-xtensa/nnlib-hifi4/raw/master/archive/xa_nnlib_hifi4_10_14_2022.zip" +if [[ ${2} == "hifi4" || ${2} == "hifi3" ]]; then + LIBRARY_URL="http://github.com/foss-xtensa/nnlib-hifi4/raw/master/archive/xa_nnlib_hifi4_09_05_2023.zip" LIBRARY_DIRNAME="xa_nnlib_hifi4" - LIBRARY_MD5="2bf3c1c7fd5a23f157babc8e24fd2c55" + LIBRARY_MD5="2a54e056aef73a4fcffde4643998501a" elif [[ ${2} == "hifi5" ]]; then - LIBRARY_URL="http://github.com/foss-xtensa/nnlib-hifi5/raw/master/archive/xa_nnlib_hifi5_12_19_2022.zip" + LIBRARY_URL="http://github.com/foss-xtensa/nnlib-hifi5/raw/master/archive/xa_nnlib_hifi5_09_05_2023.zip" LIBRARY_DIRNAME="xa_nnlib_hifi5" - LIBRARY_MD5="83306809191f42a064bde688b94e1eb1" + LIBRARY_MD5="1deb55ef200bf5dbedc70b99b02140c0" elif [[ ${2} == "vision_p6" ]]; then LIBRARY_URL="https://github.com/foss-xtensa/tflmlib_vision/raw/main/archive/xi_tflmlib_vision_p6_22_06_29.zip" LIBRARY_DIRNAME="xi_tflmlib_vision_p6" @@ -62,35 +58,30 @@ fi LIBRARY_INSTALL_PATH=${DOWNLOADS_DIR}/${LIBRARY_DIRNAME} -if [ -d ${LIBRARY_INSTALL_PATH} ]; then +should_download=$(check_should_download ${DOWNLOADS_DIR}) + +if [[ ${should_download} == "no" ]]; then + show_download_url_md5 ${LIBRARY_URL} ${LIBRARY_MD5} +elif [ ! -d ${DOWNLOADS_DIR} ]; then + echo "The top-level downloads directory: ${DOWNLOADS_DIR} does not exist." + exit 1 +elif [ -d ${LIBRARY_INSTALL_PATH} ]; then echo >&2 "${LIBRARY_INSTALL_PATH} already exists, skipping the download." else TEMPDIR="$(mktemp -d)" TEMPFILE="${TEMPDIR}/${LIBRARY_DIRNAME}.zip" wget ${LIBRARY_URL} -O "$TEMPFILE" >&2 - MD5=`md5sum "$TEMPFILE" | awk '{print $1}'` - - if [[ ${MD5} != ${LIBRARY_MD5} ]] - then - echo "Bad checksum. Expected: ${LIBRARY_MD5}, Got: ${MD5}" - exit 1 - fi + check_md5 "${TEMPFILE}" ${LIBRARY_MD5} - # Check if another make process has already extracted the downloaded files. - # If so, skip extracting and patching. - if [ -d ${LIBRARY_INSTALL_PATH} ]; then - echo >&2 "${LIBRARY_INSTALL_PATH} already exists, skipping the extraction." - else - unzip -qo "$TEMPFILE" -d ${DOWNLOADS_DIR} >&2 + unzip -qo "$TEMPFILE" -d ${DOWNLOADS_DIR} >&2 - rm -rf "${TEMPDIR}" + rm -rf "${TEMPDIR}" - pushd "${LIBRARY_INSTALL_PATH}" > /dev/null - chmod -R +w ./ - if [[ -f "../../ext_libs/xa_nnlib_${2}.patch" ]]; then - create_git_repo ./ - apply_patch_to_folder ./ "../../ext_libs/xa_nnlib_${2}.patch" "TFLM patch" - fi + pushd "${LIBRARY_INSTALL_PATH}" > /dev/null + chmod -R +w ./ + if [[ -f "../../ext_libs/xa_nnlib_${2}.patch" ]]; then + create_git_repo ./ + apply_patch_to_folder ./ "../../ext_libs/xa_nnlib_${2}.patch" "TFLM patch" fi fi diff --git a/tensorflow/lite/micro/tools/make/ext_libs/xtensa_ndsp_download.sh b/tensorflow/lite/micro/tools/make/ext_libs/xtensa_ndsp_download.sh new file mode 100755 index 00000000000..71fe1d157e5 --- /dev/null +++ b/tensorflow/lite/micro/tools/make/ext_libs/xtensa_ndsp_download.sh @@ -0,0 +1,100 @@ +#!/bin/bash +# Copyright 2024 The TensorFlow Authors. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# ============================================================================== +# +# Downloads necessary to build with OPTIMIZED_KERNEL_DIR=xtensa. +# +# Called with four arguments: +# 1 - Path to the downloads folder which is typically +# ${TENSORFLOW_ROOT}/tensorflow/lite/micro/tools/make/downloads +# 2 - Xtensa variant to download for (e.g. hifi4) +# 3 - (optional) TENSORFLOW_ROOT: path to root of the TFLM tree (relative to directory from where the script is called). +# +# This script is called from the Makefile and uses the following convention to +# enable determination of sucess/failure: +# +# - If the script is successful, the only output on stdout should be SUCCESS. +# The makefile checks for this particular string. +# +# - Any string on stdout that is not SUCCESS will be shown in the makefile as +# the cause for the script to have failed. +# +# - Any other informational prints should be on stderr. + +set -e + +source ${3}tensorflow/lite/micro/tools/make/bash_helpers.sh + +DOWNLOADS_DIR=${1} + +if [[ ${2} == "hifi3" ]]; then + COMMIT="d17bf205dc530a9e1a1d979249520f4401529db1" + LIBRARY_DIRNAME="ndsplib-hifi3" + LIBRARY_URL="https://github.com/foss-xtensa/${LIBRARY_DIRNAME}/archive/${COMMIT}.zip" + LIBRARY_MD5="5572b27361736c1f773474ebaf42c5d4" + CORE_NAME="HiFi3" +elif [[ ${2} == "hifi4" ]]; then + COMMIT="aba2485ba12d9851fa398bcb5c18c05cc3731a17" + LIBRARY_DIRNAME="ndsplib-hifi4" + LIBRARY_URL="https://github.com/foss-xtensa/${LIBRARY_DIRNAME}/archive/${COMMIT}.zip" + LIBRARY_MD5="062b8f957c662b6ab834bbe284237b6c" + CORE_NAME="HiFi4" +elif [[ ${2} == "hifi5" ]]; then + COMMIT="01c92ceb26cc0a598c6d83d17c3d88363bd8f7fc" + LIBRARY_DIRNAME="ndsplib-hifi5" + LIBRARY_URL="https://github.com/foss-xtensa/${LIBRARY_DIRNAME}/archive/${COMMIT}.zip" + LIBRARY_MD5="94b372d608781c13be2fb2d1a8fd3b58" + CORE_NAME="HiFi5" +else + echo "Attempting to download an unsupported xtensa variant: ${2}" + exit 1 +fi + +LIBRARY_INSTALL_PATH=${DOWNLOADS_DIR}/${LIBRARY_DIRNAME} + +should_download=$(check_should_download ${DOWNLOADS_DIR}) + +if [[ ${should_download} == "no" ]]; then + show_download_url_md5 ${LIBRARY_URL} ${LIBRARY_MD5} +elif [ ! -d ${DOWNLOADS_DIR} ]; then + echo "The top-level downloads directory: ${DOWNLOADS_DIR} does not exist." + exit 1 +elif [ -d ${LIBRARY_INSTALL_PATH} ]; then + echo >&2 "${LIBRARY_INSTALL_PATH} already exists, skipping the download." +else + TEMPDIR="$(mktemp -d)" + TEMPFILE="${TEMPDIR}/${LIBRARY_DIRNAME}.zip" + wget ${LIBRARY_URL} -O "$TEMPFILE" >&2 + check_md5 "${TEMPFILE}" ${LIBRARY_MD5} + + unzip -qo "$TEMPFILE" -d ${TEMPDIR} >&2 + unzip -qo ${TEMPDIR}/${LIBRARY_DIRNAME}-${COMMIT}/NDSP_${CORE_NAME}/NDSP_${CORE_NAME}*.zip -d ${TEMPDIR}/${LIBRARY_DIRNAME}-${COMMIT}/NDSP_${CORE_NAME}/ >&2 + find ${TEMPDIR}/${LIBRARY_DIRNAME}-${COMMIT}/NDSP_${CORE_NAME}/* -maxdepth 0 -type d -exec mv {} ${LIBRARY_INSTALL_PATH} \; + rm -rf "${TEMPDIR}" + # NDSP sources in GitHub currently uses DOS style newlines, which causes compiler errors. + find ${LIBRARY_INSTALL_PATH} -type f -exec sed -i.bak 's/\r$//g' {} \; + + pushd "${LIBRARY_INSTALL_PATH}" > /dev/null + chmod -R +w ./ + if [[ -f "../../ext_libs/ndsplib-${2}.patch" ]]; then + create_git_repo ./ + apply_patch_to_folder ./ "../../ext_libs/ndsplib-${2}.patch" "TFLM patch" + fi + # Rename the strings in __renaming__.h to names that are traceable to TFLM. + # Note that renaming is disabled by default and must be enabled with -D__RENAMING__ + sed -i 's/NatureDSP_/NatureDSP_TFLM_/' library/include_private/__renaming__.h +fi + +echo "SUCCESS" diff --git a/tensorflow/lite/micro/tools/make/flatbuffers.patch b/tensorflow/lite/micro/tools/make/flatbuffers.patch index cb22cf0b1af..2017775aaca 100644 --- a/tensorflow/lite/micro/tools/make/flatbuffers.patch +++ b/tensorflow/lite/micro/tools/make/flatbuffers.patch @@ -1,5 +1,5 @@ diff --git a/include/flatbuffers/base.h b/include/flatbuffers/base.h -index a5ac10d..371b6fd 100644 +index 5c4cae79..1a631641 100644 --- a/include/flatbuffers/base.h +++ b/include/flatbuffers/base.h @@ -1,6 +1,16 @@ @@ -20,10 +20,10 @@ index a5ac10d..371b6fd 100644 // If activate should be declared and included first. diff --git a/include/flatbuffers/default_allocator.h b/include/flatbuffers/default_allocator.h -index 8b173af..975d938 100644 +index d4724122..975d9380 100644 --- a/include/flatbuffers/default_allocator.h +++ b/include/flatbuffers/default_allocator.h -@@ -39,26 +39,20 @@ class DefaultAllocator : public Allocator { +@@ -39,24 +39,18 @@ class DefaultAllocator : public Allocator { // This is to avoid having a statically or dynamically allocated default // allocator, or having to move it between the classes that may own it. inline uint8_t *Allocate(Allocator *allocator, size_t size) { @@ -52,15 +52,11 @@ index 8b173af..975d938 100644 } } // namespace flatbuffers - --#endif // FLATBUFFERS_DEFAULT_ALLOCATOR_H_ -\ No newline at end of file -+#endif // FLATBUFFERS_DEFAULT_ALLOCATOR_H_ diff --git a/include/flatbuffers/flexbuffers.h b/include/flatbuffers/flexbuffers.h -index 89f3f30..6e6d0b3 100644 +index 8e8cac14..52dae316 100644 --- a/include/flatbuffers/flexbuffers.h +++ b/include/flatbuffers/flexbuffers.h -@@ -496,9 +496,24 @@ class Reference { +@@ -495,9 +495,24 @@ class Reference { return static_cast(ReadUInt64(Indirect(), byte_width_)); case FBT_NULL: return 0.0; case FBT_STRING: { @@ -86,10 +82,10 @@ index 89f3f30..6e6d0b3 100644 case FBT_VECTOR: return static_cast(AsVector().size()); case FBT_BOOL: diff --git a/include/flatbuffers/util.h b/include/flatbuffers/util.h -index 93a39de..1cd4e8f 100644 +index 1ccf3517..34a75193 100644 --- a/include/flatbuffers/util.h +++ b/include/flatbuffers/util.h -@@ -24,6 +24,12 @@ +@@ -23,6 +23,12 @@ #include "flatbuffers/base.h" #include "flatbuffers/stl_emulation.h" @@ -102,4 +98,3 @@ index 93a39de..1cd4e8f 100644 #ifndef FLATBUFFERS_PREFER_PRINTF # include # include - \ No newline at end of file diff --git a/tensorflow/lite/micro/tools/make/flatbuffers_download.sh b/tensorflow/lite/micro/tools/make/flatbuffers_download.sh index af5e80f6f88..52accccd243 100755 --- a/tensorflow/lite/micro/tools/make/flatbuffers_download.sh +++ b/tensorflow/lite/micro/tools/make/flatbuffers_download.sh @@ -54,9 +54,9 @@ DOWNLOADED_FLATBUFFERS_PATH=${DOWNLOADS_DIR}/flatbuffers if [ -d ${DOWNLOADED_FLATBUFFERS_PATH} ]; then echo >&2 "${DOWNLOADED_FLATBUFFERS_PATH} already exists, skipping the download." else - ZIP_PREFIX="a66de58af9565586832c276fbb4251fc416bf07f" + ZIP_PREFIX="v23.5.26" FLATBUFFERS_URL="https://github.com/google/flatbuffers/archive/${ZIP_PREFIX}.zip" - FLATBUFFERS_MD5="51a7a96747e1c33eb4aac6d52513a02f" + FLATBUFFERS_MD5="e87e8acd8e2d53653387ad78720316e2" TEMPDIR="$(mktemp -d)" TEMPFILE="${TEMPDIR}/${ZIP_PREFIX}.zip" @@ -64,7 +64,7 @@ else check_md5 "${TEMPFILE}" ${FLATBUFFERS_MD5} unzip -qo "$TEMPFILE" -d "${TEMPDIR}" >&2 - mv "${TEMPDIR}/flatbuffers-${ZIP_PREFIX}" ${DOWNLOADED_FLATBUFFERS_PATH} + mv "${TEMPDIR}/flatbuffers-${ZIP_PREFIX#v}" ${DOWNLOADED_FLATBUFFERS_PATH} rm -rf "${TEMPDIR}" pushd ${DOWNLOADED_FLATBUFFERS_PATH} > /dev/null diff --git a/tensorflow/lite/micro/tools/make/targets/arc_emsdp_makefile.inc b/tensorflow/lite/micro/tools/make/targets/arc_emsdp_makefile.inc index f7c3066ee70..7fce24b927e 100644 --- a/tensorflow/lite/micro/tools/make/targets/arc_emsdp_makefile.inc +++ b/tensorflow/lite/micro/tools/make/targets/arc_emsdp_makefile.inc @@ -1,4 +1,4 @@ -# Copyright 2021 The TensorFlow Authors. All Rights Reserved. +# Copyright 2023 The TensorFlow Authors. All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. @@ -26,4 +26,6 @@ $(eval $(call add_third_party_download,$(EMBARC_MLI_PRE_COMPILED_URL),$(EMBARC_M TCF_FILE = $(PWD)/$(MAKEFILE_DIR)/downloads/$(MLI_LIB_DIR)/hw/emsdp_em11d_em9d_dfss.tcf -include $(MAKEFILE_DIR)/targets/arc/arc_common.inc \ No newline at end of file +include $(MAKEFILE_DIR)/targets/arc/arc_common.inc + +include $(MAKEFILE_DIR)/ext_libs/eyalroz_printf.inc diff --git a/tensorflow/lite/micro/tools/make/targets/bluepill_makefile.inc b/tensorflow/lite/micro/tools/make/targets/bluepill_makefile.inc index c14bda496e7..ae3b0b0a155 100644 --- a/tensorflow/lite/micro/tools/make/targets/bluepill_makefile.inc +++ b/tensorflow/lite/micro/tools/make/targets/bluepill_makefile.inc @@ -1,3 +1,18 @@ +# Copyright 2023 The TensorFlow Authors. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# ============================================================================== + export PATH := $(DOWNLOADS_DIR)/gcc_embedded/bin/:$(PATH) TARGET_ARCH := cortex-m3 TARGET_TOOLCHAIN_PREFIX := arm-none-eabi- @@ -43,6 +58,7 @@ CCFLAGS += $(PLATFORM_FLAGS) LDFLAGS += \ -T $(MAKEFILE_DIR)/targets/bluepill/bluepill.lds \ + -Wl,--no-warn-rwx-segment \ -Wl,-Map=gen/$(TARGET).map,--cref # Additional include paths needed for the stm_32_bare_lib only. @@ -72,8 +88,7 @@ EXCLUDED_TESTS += $(TENSORFLOW_ROOT)tensorflow/lite/micro/flatbuffer_utils_test. MICROLITE_TEST_SRCS := $(filter-out $(EXCLUDED_TESTS), $(MICROLITE_TEST_SRCS)) -EXCLUDED_EXAMPLE_TESTS := \ - $(TENSORFLOW_ROOT)tensorflow/lite/micro/examples/micro_speech/Makefile.inc +EXCLUDED_EXAMPLE_TESTS := MICRO_LITE_EXAMPLE_TESTS := $(filter-out $(EXCLUDED_EXAMPLE_TESTS), $(MICRO_LITE_EXAMPLE_TESTS)) @@ -87,3 +102,5 @@ TEST_TARGET_BINARIES = $(shell ls -1 $(BINDIR)/*_test) test: build $(TEST_SCRIPT) "$(TEST_TARGET_BINARIES)" $(TEST_PASS_STRING) $(TARGET) + +include $(MAKEFILE_DIR)/ext_libs/eyalroz_printf.inc diff --git a/tensorflow/lite/micro/tools/make/targets/chre_makefile.inc b/tensorflow/lite/micro/tools/make/targets/chre_makefile.inc index 3665b264aa9..d2e5892a761 100644 --- a/tensorflow/lite/micro/tools/make/targets/chre_makefile.inc +++ b/tensorflow/lite/micro/tools/make/targets/chre_makefile.inc @@ -1,4 +1,4 @@ -# Copyright 2020 The TensorFlow Authors. All Rights Reserved. +# Copyright 2023 The TensorFlow Authors. All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. @@ -32,3 +32,5 @@ MICROLITE_CC_KERNEL_SRCS := $(filter-out $(EXCLUDED_CC_SRCS),$(MICROLITE_CC_KERN MICROLITE_TEST_SRCS := $(filter-out $(EXCLUDED_TESTS),$(MICROLITE_TEST_SRCS)) THIRD_PARTY_CC_HDRS := $(filter-out $(EXCLUDED_HDRS),$(THIRD_PARTY_CC_HDRS)) MICROLITE_CC_HDRS := $(filter-out $(EXCLUDED_KERNEL_HDRS),$(MICROLITE_CC_HDRS)) + +include $(MAKEFILE_DIR)/ext_libs/eyalroz_printf.inc diff --git a/tensorflow/lite/micro/tools/make/targets/cortex_a_generic_makefile.inc b/tensorflow/lite/micro/tools/make/targets/cortex_a_generic_makefile.inc new file mode 100644 index 00000000000..ad272cc65d0 --- /dev/null +++ b/tensorflow/lite/micro/tools/make/targets/cortex_a_generic_makefile.inc @@ -0,0 +1,66 @@ +# Copyright 2023 The TensorFlow Authors. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# ============================================================================== + +# Generic Makefile target for ARM Cortex A builds. + +FPU := neon +FLOAT_ABI := softfp + +GCC_TARGET_ARCH := +GCC_TARGET_CPU := + +ifeq ($(TARGET_ARCH), armv8.2-a) + GCC_TARGET_ARCH := armv8.2-a + +else ifeq ($(TARGET_ARCH), armv7-a) + GCC_TARGET_ARCH := armv7-a + +else ifeq ($(TARGET_ARCH), cortex-a32) + GCC_TARGET_CPU := cortex-a32 + +else + $(error "TARGET_ARCH=$(TARGET_ARCH) is not supported") +endif + +TARGET_TOOLCHAIN_PREFIX := arm-none-eabi- + +ifneq ($(GCC_TARGET_ARCH),) + FLAGS_GCC += -march=$(GCC_TARGET_ARCH) +else ifneq ($(GCC_TARGET_CPU),) + FLAGS_GCC += -mcpu=$(GCC_TARGET_CPU) +endif + +CXXFLAGS += $(FLAGS_GCC) +CCFLAGS += $(FLAGS_GCC) + +PLATFORM_FLAGS = \ + -DTF_LITE_MCU_DEBUG_LOG \ + -mfloat-abi=$(FLOAT_ABI) \ + -mfpu=$(FPU) \ + -funsigned-char \ + -mlittle-endian \ + -Wno-type-limits \ + -Wno-unused-private-field \ + -fomit-frame-pointer \ + -MD + +ifneq ($(PIC),) + PLATFORM_FLAGS += -fpic +endif + +# Common + C/C++ flags +CXXFLAGS += $(PLATFORM_FLAGS) +CCFLAGS += $(PLATFORM_FLAGS) + diff --git a/tensorflow/lite/micro/tools/make/targets/cortex_m_corstone_300_makefile.inc b/tensorflow/lite/micro/tools/make/targets/cortex_m_corstone_300_makefile.inc index 0ffe5a31988..0c483ac9195 100644 --- a/tensorflow/lite/micro/tools/make/targets/cortex_m_corstone_300_makefile.inc +++ b/tensorflow/lite/micro/tools/make/targets/cortex_m_corstone_300_makefile.inc @@ -1,4 +1,4 @@ -# Copyright 2023 The TensorFlow Authors. All Rights Reserved. +# Copyright 2024 The TensorFlow Authors. All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. @@ -16,23 +16,29 @@ # ARM Cortex M makefile targeted for a FVP based on Arm Corstone-300 software. # For more info see: tensorflow/lite/micro/cortex_m_corstone_300/README.md -export PATH := $(MAKEFILE_DIR)/downloads/corstone300/models/Linux64_GCC-6.4:$(PATH) -DOWNLOAD_RESULT := $(shell $(MAKEFILE_DIR)/corstone_300_download.sh ${MAKEFILE_DIR}/downloads) +UNAME_M := $(shell uname -m) +ifeq ($(UNAME_M), aarch64) + export PATH := $(DOWNLOADS_DIR)/corstone300/models/Linux64_armv8l_GCC-9.3:$(PATH) +else + export PATH := $(DOWNLOADS_DIR)/corstone300/models/Linux64_GCC-9.3:$(PATH) +endif + +DOWNLOAD_RESULT := $(shell $(MAKEFILE_DIR)/corstone_300_download.sh $(DOWNLOADS_DIR) $(TENSORFLOW_ROOT)) ifneq ($(DOWNLOAD_RESULT), SUCCESS) $(error Something went wrong with the Arm Corstone-300 software download: $(DOWNLOAD_RESULT)) endif -ETHOS_U_CORE_PLATFORM := ${PWD}/$(MAKEFILE_DIR)/downloads/ethos_u_core_platform/targets/corstone-300 -DOWNLOAD_RESULT := $(shell $(MAKEFILE_DIR)/ethos_u_core_platform_download.sh ${MAKEFILE_DIR}/downloads) +ETHOS_U_CORE_PLATFORM := $(DOWNLOADS_DIR)/ethos_u_core_platform/targets/corstone-300 +DOWNLOAD_RESULT := $(shell $(MAKEFILE_DIR)/ethos_u_core_platform_download.sh $(DOWNLOADS_DIR) $(TENSORFLOW_ROOT)) ifneq ($(DOWNLOAD_RESULT), SUCCESS) $(error Something went wrong with the Ethos-U Core Platform software download: $(DOWNLOAD_RESULT)) endif # This target has dependencies to CMSIS-Device so just in case running without OPTIMIZED_KERNEL_DIR=cmsis_nn. -CMSIS_DEFAULT_DOWNLOAD_PATH := $(MAKEFILE_DIR)/downloads/cmsis +CMSIS_DEFAULT_DOWNLOAD_PATH := $(DOWNLOADS_DIR)/cmsis CMSIS_PATH := $(CMSIS_DEFAULT_DOWNLOAD_PATH) ifeq ($(CMSIS_PATH), $(CMSIS_DEFAULT_DOWNLOAD_PATH)) - DOWNLOAD_RESULT := $(shell $(MAKEFILE_DIR)/ext_libs/cmsis_download.sh ${MAKEFILE_DIR}/downloads) + DOWNLOAD_RESULT := $(shell $(MAKEFILE_DIR)/ext_libs/cmsis_download.sh $(DOWNLOADS_DIR) $(TENSORFLOW_ROOT)) ifneq ($(DOWNLOAD_RESULT), SUCCESS) $(error Something went wrong with the CMSIS download: $(DOWNLOAD_RESULT)) endif @@ -97,14 +103,13 @@ ifeq ($(TOOLCHAIN), armclang) FLAGS_ARMC = \ --target=arm-arm-none-eabi \ -Wno-unused-private-field \ - -mcpu=$(MCPU_OPTION) \ - -ffp-mode=full + -mcpu=$(MCPU_OPTION) # Pass comma separated linker options to armlink ARMC6_LDFLAGS += -Wl,--strict,--summary_stderr,--info,summarysizes,--map ARMC6_LDFLAGS += -Wl,--load_addr_map_info,--xref,--callgraph,--symbols ARMC6_LDFLAGS += -Wl,--info,sizes,--info,totals,--info,unused,--info,veneers - ARMC6_LDFLAGS += -Wl,--list=${TENSORFLOW_ROOT}gen/$(TARGET).map + ARMC6_LDFLAGS += -Wl,--list=gen/$(TARGET).map ARMC6_LDFLAGS += -Wl,--entry=Reset_Handler --verbose ARMC6_LDFLAGS += -Wl,--scatter=$(ETHOS_U_CORE_PLATFORM)/platform.scatter @@ -130,11 +135,16 @@ ifeq ($(TOOLCHAIN), armclang) # https://developer.arm.com/documentation/100891/0611/troubleshooting/general-troubleshooting-advice MICROLITE_LIBS := $(filter-out -lm,$(MICROLITE_LIBS)) + # This does not build with armclang and is anyway not used by this target. + EXCLUDED_TESTS := \ + tensorflow/lite/micro/tools/benchmarking/Makefile.inc + MICRO_LITE_BENCHMARKS := $(filter-out $(EXCLUDED_TESTS), $(MICRO_LITE_BENCHMARKS)) + else ifeq ($(TOOLCHAIN), gcc) - TARGET_DEFAULT_TOOLCHAIN_ROOT := $(MAKEFILE_DIR)/downloads/gcc_embedded/bin/ + TARGET_DEFAULT_TOOLCHAIN_ROOT := $(DOWNLOADS_DIR)/gcc_embedded/bin/ TARGET_TOOLCHAIN_ROOT := $(TARGET_DEFAULT_TOOLCHAIN_ROOT) ifeq ($(TARGET_TOOLCHAIN_ROOT), $(TARGET_DEFAULT_TOOLCHAIN_ROOT)) - DOWNLOAD_RESULT := $(shell $(MAKEFILE_DIR)/arm_gcc_download.sh ${MAKEFILE_DIR}/downloads) + DOWNLOAD_RESULT := $(shell $(MAKEFILE_DIR)/arm_gcc_download.sh $(DOWNLOADS_DIR) $(TENSORFLOW_ROOT)) ifneq ($(DOWNLOAD_RESULT), SUCCESS) $(error Something went wrong with the GCC download: $(DOWNLOAD_RESULT)) endif @@ -148,7 +158,7 @@ else ifeq ($(TOOLCHAIN), gcc) LDFLAGS += \ --specs=nosys.specs \ -T $(ETHOS_U_CORE_PLATFORM)/platform_parsed.ld \ - -Wl,-Map=${TENSORFLOW_ROOT}gen/$(TARGET).map,--cref \ + -Wl,-Map=gen/$(TARGET).map,--cref \ -Wl,--gc-sections \ --entry Reset_Handler @@ -186,7 +196,7 @@ ifeq ($(CO_PROCESSOR), ethos_u) ETHOSU_ARCH=u55 endif -CMSIS_DEFAULT_DOWNLOAD_PATH := $(MAKEFILE_DIR)/downloads/cmsis +CMSIS_DEFAULT_DOWNLOAD_PATH := $(DOWNLOADS_DIR)/cmsis CMSIS_PATH := $(CMSIS_DEFAULT_DOWNLOAD_PATH) THIRD_PARTY_CC_SRCS += \ $(CMSIS_PATH)/Device/ARM/$(ARM_CPU)/Source/system_$(ARM_CPU).c \ @@ -197,8 +207,13 @@ INCLUDES += \ # TODO(#274): Examine why some tests fail here. EXCLUDED_TESTS := \ - tensorflow/lite/micro/memory_arena_threshold_test.cc \ - tensorflow/lite/micro/recording_micro_allocator_test.cc + $(TENSORFLOW_ROOT)tensorflow/lite/micro/memory_arena_threshold_test.cc \ + $(TENSORFLOW_ROOT)tensorflow/lite/micro/recording_micro_allocator_test.cc +ifeq ($(CO_PROCESSOR), ethos_u) +# This does not work with Ethos-U enabled since then NPU PMU counters are used instead for the sake of the benchmark example. +EXCLUDED_TESTS += \ + $(TENSORFLOW_ROOT)tensorflow/lite/micro/micro_time_test.cc +endif MICROLITE_TEST_SRCS := $(filter-out $(EXCLUDED_TESTS), $(MICROLITE_TEST_SRCS)) -TEST_SCRIPT := tensorflow/lite/micro/testing/test_with_arm_corstone_300.sh +TEST_SCRIPT := $(TENSORFLOW_ROOT)tensorflow/lite/micro/testing/test_with_arm_corstone_300.sh diff --git a/tensorflow/lite/micro/tools/make/targets/cortex_m_generic_makefile.inc b/tensorflow/lite/micro/tools/make/targets/cortex_m_generic_makefile.inc index 0ed14fcb8bb..5aa5d256c91 100644 --- a/tensorflow/lite/micro/tools/make/targets/cortex_m_generic_makefile.inc +++ b/tensorflow/lite/micro/tools/make/targets/cortex_m_generic_makefile.inc @@ -1,4 +1,4 @@ -# Copyright 2022 The TensorFlow Authors. All Rights Reserved. +# Copyright 2024 The TensorFlow Authors. All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. @@ -16,6 +16,16 @@ # Generic Makefile target for ARM Cortex M builds. # For more info see: tensorflow/lite/micro/cortex_m_generic/README.md +# Needed in case running without OPTIMIZED_KERNEL_DIR=cmsis_nn. +CMSIS_DEFAULT_DOWNLOAD_PATH := $(DOWNLOADS_DIR)/cmsis +CMSIS_PATH := $(CMSIS_DEFAULT_DOWNLOAD_PATH) +ifeq ($(CMSIS_PATH), $(CMSIS_DEFAULT_DOWNLOAD_PATH)) + DOWNLOAD_RESULT := $(shell $(MAKEFILE_DIR)/ext_libs/cmsis_download.sh $(DOWNLOADS_DIR) $(TENSORFLOW_ROOT)) + ifneq ($(DOWNLOAD_RESULT), SUCCESS) + $(error Something went wrong with the CMSIS download: $(DOWNLOAD_RESULT)) + endif +endif + FLOAT := soft GCC_TARGET_ARCH := $(TARGET_ARCH) @@ -92,8 +102,6 @@ else ifeq ($(TARGET_ARCH), cortex-m85) CORE=M85 ARM_LDFLAGS := -Wl,--cpu=8.1-M.Main.mve.fp FLOAT=hard - # GCC does not yet support cortex-m85 option hence go with cortex-m55 for now. - GCC_TARGET_ARCH := cortex-m55 else ifeq ($(TARGET_ARCH), project_generation) # No flags needed here as project_generation does not build anything. diff --git a/tensorflow/lite/micro/tools/make/targets/hexagon_makefile.inc b/tensorflow/lite/micro/tools/make/targets/hexagon_makefile.inc index 2e7d18ca1cd..6fe7be796a3 100644 --- a/tensorflow/lite/micro/tools/make/targets/hexagon_makefile.inc +++ b/tensorflow/lite/micro/tools/make/targets/hexagon_makefile.inc @@ -80,6 +80,10 @@ PLATFORM_ARGS = \ -mcpu=$(HEXAGON_CPU_VER) \ -m$(HEXAGON_CPU_VER) +ifeq ($(HEXAGON_PIC_BUILD), true) + PLATFORM_ARGS += -fPIC +endif + # See http://b/183462077 for more details on why we need -G0 for an LPI build. ifeq ($(HEXAGON_LPI_BUILD), true) PLATFORM_ARGS += -G0 @@ -112,5 +116,10 @@ EXCLUDED_TESTS := \ MICROLITE_TEST_SRCS := $(filter-out $(EXCLUDED_TESTS), $(MICROLITE_TEST_SRCS)) +# TODO(b/302404477): micro_speech_test example does not pass due to misprediction +EXCLUDED_EXAMPLE_TESTS := \ + $(TENSORFLOW_ROOT)tensorflow/lite/micro/examples/micro_speech/Makefile.inc +MICRO_LITE_EXAMPLE_TESTS := $(filter-out $(EXCLUDED_EXAMPLE_TESTS), $(MICRO_LITE_EXAMPLE_TESTS)) + TEST_SCRIPT := $(TENSORFLOW_ROOT)tensorflow/lite/micro/testing/test_hexagon_binary.sh SIZE_SCRIPT := $(TENSORFLOW_ROOT)tensorflow/lite/micro/testing/size_hexagon_binary.sh diff --git a/tensorflow/lite/micro/tools/make/targets/mips_makefile.inc b/tensorflow/lite/micro/tools/make/targets/mips_makefile.inc new file mode 100644 index 00000000000..cf0d38eff2b --- /dev/null +++ b/tensorflow/lite/micro/tools/make/targets/mips_makefile.inc @@ -0,0 +1,41 @@ +# Copyright 2023 The TensorFlow Authors. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# ============================================================================== + +TARGET_ARCH := mips +TARGET_TOOLCHAIN_PREFIX := mips-elf- + + +# Allow additional flags on the command line for debugging. +MIPS_EXTRA_CFLAGS := + +export PATH := $(TARGET_TOOLCHAIN_ROOT):$(PATH) + +PLATFORM_FLAGS = \ + -fno-builtin-printf \ + -DTF_LITE_MCU_DEBUG_LOG \ + -DTF_LITE_USE_GLOBAL_CMATH_FUNCTIONS \ + -fno-delete-null-pointer-checks \ + -fomit-frame-pointer + +CXXFLAGS += $(PLATFORM_FLAGS) \ + -fpermissive \ + -fno-use-cxa-atexit \ + -DTF_LITE_USE_GLOBAL_MIN \ + -DTF_LITE_USE_GLOBAL_MAX + +CCFLAGS += $(PLATFORM_FLAGS) + +CCFLAGS += $(MIPS_EXTRA_CFLAGS) +CXXFLAGS += $(MIPS_EXTRA_CFLAGS) diff --git a/tensorflow/lite/micro/tools/make/targets/riscv32_generic_makefile.inc b/tensorflow/lite/micro/tools/make/targets/riscv32_generic_makefile.inc index ce5f0eba504..453e9d0beaa 100644 --- a/tensorflow/lite/micro/tools/make/targets/riscv32_generic_makefile.inc +++ b/tensorflow/lite/micro/tools/make/targets/riscv32_generic_makefile.inc @@ -2,6 +2,13 @@ TARGET_ARCH := riscv32 TARGET_TOOLCHAIN_PREFIX := riscv64-unknown-elf- +RISCV_ARCH := rv32imc +RISCV_ABI := ilp32 +RISCV_CODE_MODEL := medany + +# Allow additional flags on the command line for debugging. +RISCV_EXTRA_CFLAGS := + TARGET_DEFAULT_TOOLCHAIN_ROOT := $(DOWNLOADS_DIR)/riscv_toolchain/bin/ TARGET_TOOLCHAIN_ROOT := $(TARGET_DEFAULT_TOOLCHAIN_ROOT) ifeq ($(TARGET_TOOLCHAIN_ROOT), $(TARGET_DEFAULT_TOOLCHAIN_ROOT)) @@ -11,9 +18,9 @@ endif export PATH := $(TARGET_TOOLCHAIN_ROOT):$(PATH) PLATFORM_FLAGS = \ - -march=rv32imac \ - -mabi=ilp32 \ - -mcmodel=medany \ + -march=$(RISCV_ARCH) \ + -mabi=$(RISCV_ABI) \ + -mcmodel=$(RISCV_CODE_MODEL) \ -mexplicit-relocs \ -fno-builtin-printf \ -DTF_LITE_MCU_DEBUG_LOG \ @@ -40,9 +47,13 @@ EXCLUDED_TESTS := \ MICROLITE_TEST_SRCS := $(filter-out $(EXCLUDED_TESTS), $(MICROLITE_TEST_SRCS)) +CCFLAGS += $(RISCV_EXTRA_CFLAGS) +CXXFLAGS += $(RISCV_EXTRA_CFLAGS) + # This disables the "linker relaxation" optimization, which produced incorrect code. # TODO(b/279805615): Check whether this is fixed in newer versions of the toolchain. LDFLAGS += -mno-relax TEST_SCRIPT := $(TENSORFLOW_ROOT)tensorflow/lite/micro/testing/test_with_qemu.sh riscv32 rv32 SIZE_SCRIPT := ${TENSORFLOW_ROOT}tensorflow/lite/micro/testing/size_riscv32_binary.sh +include $(MAKEFILE_DIR)/ext_libs/eyalroz_printf.inc diff --git a/tensorflow/lite/micro/tools/make/targets/xtensa_makefile.inc b/tensorflow/lite/micro/tools/make/targets/xtensa_makefile.inc index 8d970c72bb9..0fba9061102 100644 --- a/tensorflow/lite/micro/tools/make/targets/xtensa_makefile.inc +++ b/tensorflow/lite/micro/tools/make/targets/xtensa_makefile.inc @@ -39,8 +39,6 @@ PLATFORM_FLAGS = \ -DTF_LITE_USE_CTIME \ --xtensa-core=$(XTENSA_CORE) \ -mcoproc \ - -DMAX_RFFT_PWR=9 \ - -DMIN_RFFT_PWR=MAX_RFFT_PWR \ $(TARGET_ARCH_DEFINES) \ -mlongcalls diff --git a/tensorflow/lite/micro/tools/make/third_party_downloads.inc b/tensorflow/lite/micro/tools/make/third_party_downloads.inc index a8e63e11826..6bee8ce822d 100644 --- a/tensorflow/lite/micro/tools/make/third_party_downloads.inc +++ b/tensorflow/lite/micro/tools/make/third_party_downloads.inc @@ -39,5 +39,6 @@ endif EMBARC_MLI_PRE_COMPILED_URL := "https://github.com/foss-for-synopsys-dwc-arc-processors/embarc_mli/releases/download/Release_1.1/embARC_MLI_package.zip" EMBARC_MLI_PRE_COMPILED_MD5 := "173990c2dde4efef6a2c95b92d1f0244" -ETHOSU_URL := "https://git.mlplatform.org/ml/ethos-u/ethos-u-core-driver.git/snapshot/ethos-u-core-driver-24455eedb9e8939f8a28ca0101a6f2d171e1b2f9.tar.gz" -ETHOSU_MD5 := "14b5712525d4af612d35217f0bc53fcc" +# Skip md5sum-check since ethos-u-core-driver download link is non-deterministic, see https://github.com/google/gitiles/issues/84 +ETHOSU_URL := "https://review.mlplatform.org/plugins/gitiles/ml/ethos-u/ethos-u-core-driver/+archive/24455eedb9e8939f8a28ca0101a6f2d171e1b2f9.tar.gz" +ETHOSU_MD5 := "SKIP_MD5_CHECK" \ No newline at end of file diff --git a/tensorflow/lite/micro/tools/model_transforms_utils.py b/tensorflow/lite/micro/tools/model_transforms_utils.py index 2b5c6a78563..c713f01e234 100644 --- a/tensorflow/lite/micro/tools/model_transforms_utils.py +++ b/tensorflow/lite/micro/tools/model_transforms_utils.py @@ -168,7 +168,7 @@ def _zero_bias_buffer(model, buffer_idx, zero_point): if buffer.data is None: buffer.data = [] return - if buffer.data == []: + if len(buffer.data) == 0: return # For now this assumes that zero_point is int8 and hence all the buffer diff --git a/tensorflow/lite/micro/tools/project_generation/Makefile b/tensorflow/lite/micro/tools/project_generation/Makefile index 092ba7e07f4..f14bd90a44b 100644 --- a/tensorflow/lite/micro/tools/project_generation/Makefile +++ b/tensorflow/lite/micro/tools/project_generation/Makefile @@ -1,4 +1,4 @@ -# Copyright 2022 The TensorFlow Authors. All Rights Reserved. +# Copyright 2023 The TensorFlow Authors. All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. @@ -90,8 +90,16 @@ BINDIR := $(GENDIR)/bin LIB := $(GENDIR)/libtflm.a TFLM_CC_SRCS := $(shell find $(TENSORFLOW_ROOT)tensorflow -name "*.cc" -o -name "*.c") +TFLM_CC_SRCS += $(shell find $(TENSORFLOW_ROOT)signal -name "*.cc" -o -name "*.c") OBJS := $(addprefix $(OBJDIR)/, $(patsubst %.c,%.o,$(patsubst %.cc,%.o,$(TFLM_CC_SRCS)))) +# if the third party printf library is present, add the include paths +TFLM_PRINTF_PATH := $(shell find third_party -name eyalroz_printf) +ifneq ($(TFLM_PRINTF_PATH),) + INCLUDES += \ + -I./third_party +endif + $(OBJDIR)/%.o: %.cc @mkdir -p $(dir $@) $(CXX) $(CXXFLAGS) $(INCLUDES) -c $< -o $@ diff --git a/tensorflow/lite/micro/tools/requantize_flatbuffer_test.py b/tensorflow/lite/micro/tools/requantize_flatbuffer_test.py index 4d80991b0b1..342f893bf7a 100644 --- a/tensorflow/lite/micro/tools/requantize_flatbuffer_test.py +++ b/tensorflow/lite/micro/tools/requantize_flatbuffer_test.py @@ -24,7 +24,7 @@ from tflite_micro.tensorflow.lite.tools import flatbuffer_utils -#TODO(b/248061370): replace the keras model creation process with flatbuffer manipulation to speed up test +# TODO(b/248061370): replace the keras model creation process with flatbuffer manipulation to speed up test def create_simple_fc_model(): '''Create a simple model with two fully connected(fc) layers''' model = tf.keras.models.Sequential([ @@ -60,6 +60,8 @@ def convert_tfl_converter(keras_model, EXPERIMENTAL_TFLITE_BUILTINS_ACTIVATIONS_INT16_WEIGHTS_INT8 ] converter.representative_dataset = representative_dataset_gen + # TODO(b/324385802): Support per-channel quantization for FullyConnected. + converter._experimental_disable_per_channel_quantization_for_dense_layers = True return converter.convert() diff --git a/tensorflow/lite/micro/tools/tflm_model_transforms_lib.py b/tensorflow/lite/micro/tools/tflm_model_transforms_lib.py index 60530ffd964..2bf90cfdc24 100644 --- a/tensorflow/lite/micro/tools/tflm_model_transforms_lib.py +++ b/tensorflow/lite/micro/tools/tflm_model_transforms_lib.py @@ -29,7 +29,7 @@ from tflite_micro.tensorflow.lite.tools import flatbuffer_utils from tflite_micro.tensorflow.lite.micro.tools import model_transforms_utils -from tflite_micro.tensorflow.lite.micro.python.interpreter.src import runtime +from tflite_micro.python.tflite_micro import runtime def _save_and_align_flatbuffer(model, model_path): diff --git a/tensorflow/lite/python/BUILD b/tensorflow/lite/python/BUILD index 3dc723273d0..6ca2c578480 100644 --- a/tensorflow/lite/python/BUILD +++ b/tensorflow/lite/python/BUILD @@ -20,6 +20,6 @@ py_library( visibility = ["//:__subpackages__"], deps = [ requirement("flatbuffers"), - requirement("tensorflow-cpu"), + requirement("tensorflow"), ], ) diff --git a/tensorflow/lite/python/schema_py_generated.py b/tensorflow/lite/python/schema_py_generated.py index 914340e7d91..52999cb1155 100755 --- a/tensorflow/lite/python/schema_py_generated.py +++ b/tensorflow/lite/python/schema_py_generated.py @@ -7,1682 +7,403 @@ from flatbuffers.compat import import_numpy np = import_numpy() -class ATan2Options(object): - __slots__ = ['_tab'] - - @classmethod - def GetRootAs(cls, buf, offset=0): - n = flatbuffers.encode.Get(flatbuffers.packer.uoffset, buf, offset) - x = ATan2Options() - x.Init(buf, n + offset) - return x - - @classmethod - def GetRootAsATan2Options(cls, buf, offset=0): - """This method is deprecated. Please switch to GetRootAs.""" - return cls.GetRootAs(buf, offset) - @classmethod - def ATan2OptionsBufferHasIdentifier(cls, buf, offset, size_prefixed=False): - return flatbuffers.util.BufferHasIdentifier(buf, offset, b"\x54\x46\x4C\x33", size_prefixed=size_prefixed) +class TensorType(object): + FLOAT32 = 0 + FLOAT16 = 1 + INT32 = 2 + UINT8 = 3 + INT64 = 4 + STRING = 5 + BOOL = 6 + INT16 = 7 + COMPLEX64 = 8 + INT8 = 9 + FLOAT64 = 10 + COMPLEX128 = 11 + UINT64 = 12 + RESOURCE = 13 + VARIANT = 14 + UINT32 = 15 + UINT16 = 16 + INT4 = 17 + BFLOAT16 = 18 - # ATan2Options - def Init(self, buf, pos): - self._tab = flatbuffers.table.Table(buf, pos) -def ATan2OptionsStart(builder): builder.StartObject(0) -def Start(builder): - return ATan2OptionsStart(builder) -def ATan2OptionsEnd(builder): return builder.EndObject() -def End(builder): - return ATan2OptionsEnd(builder) +class QuantizationDetails(object): + NONE = 0 + CustomQuantization = 1 -class ATan2OptionsT(object): +def QuantizationDetailsCreator(unionType, table): + from flatbuffers.table import Table + if not isinstance(table, Table): + return None + if unionType == QuantizationDetails().CustomQuantization: + return CustomQuantizationT.InitFromBuf(table.Bytes, table.Pos) + return None - # ATan2OptionsT - def __init__(self): - pass - @classmethod - def InitFromBuf(cls, buf, pos): - atan2options = ATan2Options() - atan2options.Init(buf, pos) - return cls.InitFromObj(atan2options) +class DimensionType(object): + DENSE = 0 + SPARSE_CSR = 1 - @classmethod - def InitFromObj(cls, atan2options): - x = ATan2OptionsT() - x._UnPack(atan2options) - return x - # ATan2OptionsT - def _UnPack(self, atan2options): - if atan2options is None: - return +class SparseIndexVector(object): + NONE = 0 + Int32Vector = 1 + Uint16Vector = 2 + Uint8Vector = 3 - # ATan2OptionsT - def Pack(self, builder): - ATan2OptionsStart(builder) - atan2options = ATan2OptionsEnd(builder) - return atan2options -# automatically generated by the FlatBuffers compiler, do not modify +def SparseIndexVectorCreator(unionType, table): + from flatbuffers.table import Table + if not isinstance(table, Table): + return None + if unionType == SparseIndexVector().Int32Vector: + return Int32VectorT.InitFromBuf(table.Bytes, table.Pos) + if unionType == SparseIndexVector().Uint16Vector: + return Uint16VectorT.InitFromBuf(table.Bytes, table.Pos) + if unionType == SparseIndexVector().Uint8Vector: + return Uint8VectorT.InitFromBuf(table.Bytes, table.Pos) + return None -# namespace: tflite -from flatbuffers.compat import import_numpy -np = import_numpy() +class BuiltinOperator(object): + ADD = 0 + AVERAGE_POOL_2D = 1 + CONCATENATION = 2 + CONV_2D = 3 + DEPTHWISE_CONV_2D = 4 + DEPTH_TO_SPACE = 5 + DEQUANTIZE = 6 + EMBEDDING_LOOKUP = 7 + FLOOR = 8 + FULLY_CONNECTED = 9 + HASHTABLE_LOOKUP = 10 + L2_NORMALIZATION = 11 + L2_POOL_2D = 12 + LOCAL_RESPONSE_NORMALIZATION = 13 + LOGISTIC = 14 + LSH_PROJECTION = 15 + LSTM = 16 + MAX_POOL_2D = 17 + MUL = 18 + RELU = 19 + RELU_N1_TO_1 = 20 + RELU6 = 21 + RESHAPE = 22 + RESIZE_BILINEAR = 23 + RNN = 24 + SOFTMAX = 25 + SPACE_TO_DEPTH = 26 + SVDF = 27 + TANH = 28 + CONCAT_EMBEDDINGS = 29 + SKIP_GRAM = 30 + CALL = 31 + CUSTOM = 32 + EMBEDDING_LOOKUP_SPARSE = 33 + PAD = 34 + UNIDIRECTIONAL_SEQUENCE_RNN = 35 + GATHER = 36 + BATCH_TO_SPACE_ND = 37 + SPACE_TO_BATCH_ND = 38 + TRANSPOSE = 39 + MEAN = 40 + SUB = 41 + DIV = 42 + SQUEEZE = 43 + UNIDIRECTIONAL_SEQUENCE_LSTM = 44 + STRIDED_SLICE = 45 + BIDIRECTIONAL_SEQUENCE_RNN = 46 + EXP = 47 + TOPK_V2 = 48 + SPLIT = 49 + LOG_SOFTMAX = 50 + DELEGATE = 51 + BIDIRECTIONAL_SEQUENCE_LSTM = 52 + CAST = 53 + PRELU = 54 + MAXIMUM = 55 + ARG_MAX = 56 + MINIMUM = 57 + LESS = 58 + NEG = 59 + PADV2 = 60 + GREATER = 61 + GREATER_EQUAL = 62 + LESS_EQUAL = 63 + SELECT = 64 + SLICE = 65 + SIN = 66 + TRANSPOSE_CONV = 67 + SPARSE_TO_DENSE = 68 + TILE = 69 + EXPAND_DIMS = 70 + EQUAL = 71 + NOT_EQUAL = 72 + LOG = 73 + SUM = 74 + SQRT = 75 + RSQRT = 76 + SHAPE = 77 + POW = 78 + ARG_MIN = 79 + FAKE_QUANT = 80 + REDUCE_PROD = 81 + REDUCE_MAX = 82 + PACK = 83 + LOGICAL_OR = 84 + ONE_HOT = 85 + LOGICAL_AND = 86 + LOGICAL_NOT = 87 + UNPACK = 88 + REDUCE_MIN = 89 + FLOOR_DIV = 90 + REDUCE_ANY = 91 + SQUARE = 92 + ZEROS_LIKE = 93 + FILL = 94 + FLOOR_MOD = 95 + RANGE = 96 + RESIZE_NEAREST_NEIGHBOR = 97 + LEAKY_RELU = 98 + SQUARED_DIFFERENCE = 99 + MIRROR_PAD = 100 + ABS = 101 + SPLIT_V = 102 + UNIQUE = 103 + CEIL = 104 + REVERSE_V2 = 105 + ADD_N = 106 + GATHER_ND = 107 + COS = 108 + WHERE = 109 + RANK = 110 + ELU = 111 + REVERSE_SEQUENCE = 112 + MATRIX_DIAG = 113 + QUANTIZE = 114 + MATRIX_SET_DIAG = 115 + ROUND = 116 + HARD_SWISH = 117 + IF = 118 + WHILE = 119 + NON_MAX_SUPPRESSION_V4 = 120 + NON_MAX_SUPPRESSION_V5 = 121 + SCATTER_ND = 122 + SELECT_V2 = 123 + DENSIFY = 124 + SEGMENT_SUM = 125 + BATCH_MATMUL = 126 + PLACEHOLDER_FOR_GREATER_OP_CODES = 127 + CUMSUM = 128 + CALL_ONCE = 129 + BROADCAST_TO = 130 + RFFT2D = 131 + CONV_3D = 132 + IMAG = 133 + REAL = 134 + COMPLEX_ABS = 135 + HASHTABLE = 136 + HASHTABLE_FIND = 137 + HASHTABLE_IMPORT = 138 + HASHTABLE_SIZE = 139 + REDUCE_ALL = 140 + CONV_3D_TRANSPOSE = 141 + VAR_HANDLE = 142 + READ_VARIABLE = 143 + ASSIGN_VARIABLE = 144 + BROADCAST_ARGS = 145 + RANDOM_STANDARD_NORMAL = 146 + BUCKETIZE = 147 + RANDOM_UNIFORM = 148 + MULTINOMIAL = 149 + GELU = 150 + DYNAMIC_UPDATE_SLICE = 151 + RELU_0_TO_1 = 152 + UNSORTED_SEGMENT_PROD = 153 + UNSORTED_SEGMENT_MAX = 154 + UNSORTED_SEGMENT_SUM = 155 + ATAN2 = 156 + UNSORTED_SEGMENT_MIN = 157 + SIGN = 158 + BITCAST = 159 + BITWISE_XOR = 160 + RIGHT_SHIFT = 161 + STABLEHLO_LOGISTIC = 162 + STABLEHLO_ADD = 163 + STABLEHLO_DIVIDE = 164 + STABLEHLO_MULTIPLY = 165 + STABLEHLO_MAXIMUM = 166 + STABLEHLO_RESHAPE = 167 + STABLEHLO_CLAMP = 168 + STABLEHLO_CONCATENATE = 169 + STABLEHLO_BROADCAST_IN_DIM = 170 + STABLEHLO_CONVOLUTION = 171 + STABLEHLO_SLICE = 172 + STABLEHLO_CUSTOM_CALL = 173 + STABLEHLO_REDUCE = 174 + STABLEHLO_ABS = 175 + STABLEHLO_AND = 176 + STABLEHLO_COSINE = 177 + STABLEHLO_EXPONENTIAL = 178 + STABLEHLO_FLOOR = 179 + STABLEHLO_LOG = 180 + STABLEHLO_MINIMUM = 181 + STABLEHLO_NEGATE = 182 + STABLEHLO_OR = 183 + STABLEHLO_POWER = 184 + STABLEHLO_REMAINDER = 185 + STABLEHLO_RSQRT = 186 + STABLEHLO_SELECT = 187 + STABLEHLO_SUBTRACT = 188 + STABLEHLO_TANH = 189 + STABLEHLO_SCATTER = 190 + STABLEHLO_COMPARE = 191 + STABLEHLO_CONVERT = 192 + STABLEHLO_DYNAMIC_SLICE = 193 + STABLEHLO_DYNAMIC_UPDATE_SLICE = 194 + STABLEHLO_PAD = 195 + STABLEHLO_IOTA = 196 + STABLEHLO_DOT_GENERAL = 197 + STABLEHLO_REDUCE_WINDOW = 198 + STABLEHLO_SORT = 199 + STABLEHLO_WHILE = 200 + STABLEHLO_GATHER = 201 + STABLEHLO_TRANSPOSE = 202 + DILATE = 203 + STABLEHLO_RNG_BIT_GENERATOR = 204 + REDUCE_WINDOW = 205 + STABLEHLO_COMPOSITE = 206 -class AbsOptions(object): - __slots__ = ['_tab'] - @classmethod - def GetRootAs(cls, buf, offset=0): - n = flatbuffers.encode.Get(flatbuffers.packer.uoffset, buf, offset) - x = AbsOptions() - x.Init(buf, n + offset) - return x - - @classmethod - def GetRootAsAbsOptions(cls, buf, offset=0): - """This method is deprecated. Please switch to GetRootAs.""" - return cls.GetRootAs(buf, offset) - @classmethod - def AbsOptionsBufferHasIdentifier(cls, buf, offset, size_prefixed=False): - return flatbuffers.util.BufferHasIdentifier(buf, offset, b"\x54\x46\x4C\x33", size_prefixed=size_prefixed) - - # AbsOptions - def Init(self, buf, pos): - self._tab = flatbuffers.table.Table(buf, pos) - -def AbsOptionsStart(builder): builder.StartObject(0) -def Start(builder): - return AbsOptionsStart(builder) -def AbsOptionsEnd(builder): return builder.EndObject() -def End(builder): - return AbsOptionsEnd(builder) - -class AbsOptionsT(object): - - # AbsOptionsT - def __init__(self): - pass - - @classmethod - def InitFromBuf(cls, buf, pos): - absOptions = AbsOptions() - absOptions.Init(buf, pos) - return cls.InitFromObj(absOptions) - - @classmethod - def InitFromObj(cls, absOptions): - x = AbsOptionsT() - x._UnPack(absOptions) - return x - - # AbsOptionsT - def _UnPack(self, absOptions): - if absOptions is None: - return - - # AbsOptionsT - def Pack(self, builder): - AbsOptionsStart(builder) - absOptions = AbsOptionsEnd(builder) - return absOptions -# automatically generated by the FlatBuffers compiler, do not modify - -# namespace: tflite - -class ActivationFunctionType(object): +class BuiltinOptions(object): NONE = 0 - RELU = 1 - RELU_N1_TO_1 = 2 - RELU6 = 3 - TANH = 4 - SIGN_BIT = 5 -# automatically generated by the FlatBuffers compiler, do not modify - -# namespace: tflite - -from flatbuffers.compat import import_numpy -np = import_numpy() - -class AddNOptions(object): - __slots__ = ['_tab'] - - @classmethod - def GetRootAs(cls, buf, offset=0): - n = flatbuffers.encode.Get(flatbuffers.packer.uoffset, buf, offset) - x = AddNOptions() - x.Init(buf, n + offset) - return x - - @classmethod - def GetRootAsAddNOptions(cls, buf, offset=0): - """This method is deprecated. Please switch to GetRootAs.""" - return cls.GetRootAs(buf, offset) - @classmethod - def AddNOptionsBufferHasIdentifier(cls, buf, offset, size_prefixed=False): - return flatbuffers.util.BufferHasIdentifier(buf, offset, b"\x54\x46\x4C\x33", size_prefixed=size_prefixed) - - # AddNOptions - def Init(self, buf, pos): - self._tab = flatbuffers.table.Table(buf, pos) - -def AddNOptionsStart(builder): builder.StartObject(0) -def Start(builder): - return AddNOptionsStart(builder) -def AddNOptionsEnd(builder): return builder.EndObject() -def End(builder): - return AddNOptionsEnd(builder) - -class AddNOptionsT(object): - - # AddNOptionsT - def __init__(self): - pass - - @classmethod - def InitFromBuf(cls, buf, pos): - addNoptions = AddNOptions() - addNoptions.Init(buf, pos) - return cls.InitFromObj(addNoptions) - - @classmethod - def InitFromObj(cls, addNoptions): - x = AddNOptionsT() - x._UnPack(addNoptions) - return x - - # AddNOptionsT - def _UnPack(self, addNoptions): - if addNoptions is None: - return - - # AddNOptionsT - def Pack(self, builder): - AddNOptionsStart(builder) - addNoptions = AddNOptionsEnd(builder) - return addNoptions -# automatically generated by the FlatBuffers compiler, do not modify - -# namespace: tflite - -from flatbuffers.compat import import_numpy -np = import_numpy() - -class AddOptions(object): - __slots__ = ['_tab'] - - @classmethod - def GetRootAs(cls, buf, offset=0): - n = flatbuffers.encode.Get(flatbuffers.packer.uoffset, buf, offset) - x = AddOptions() - x.Init(buf, n + offset) - return x - - @classmethod - def GetRootAsAddOptions(cls, buf, offset=0): - """This method is deprecated. Please switch to GetRootAs.""" - return cls.GetRootAs(buf, offset) - @classmethod - def AddOptionsBufferHasIdentifier(cls, buf, offset, size_prefixed=False): - return flatbuffers.util.BufferHasIdentifier(buf, offset, b"\x54\x46\x4C\x33", size_prefixed=size_prefixed) - - # AddOptions - def Init(self, buf, pos): - self._tab = flatbuffers.table.Table(buf, pos) - - # AddOptions - def FusedActivationFunction(self): - o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(4)) - if o != 0: - return self._tab.Get(flatbuffers.number_types.Int8Flags, o + self._tab.Pos) - return 0 - - # AddOptions - def PotScaleInt16(self): - o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(6)) - if o != 0: - return bool(self._tab.Get(flatbuffers.number_types.BoolFlags, o + self._tab.Pos)) - return True - -def AddOptionsStart(builder): builder.StartObject(2) -def Start(builder): - return AddOptionsStart(builder) -def AddOptionsAddFusedActivationFunction(builder, fusedActivationFunction): builder.PrependInt8Slot(0, fusedActivationFunction, 0) -def AddFusedActivationFunction(builder, fusedActivationFunction): - return AddOptionsAddFusedActivationFunction(builder, fusedActivationFunction) -def AddOptionsAddPotScaleInt16(builder, potScaleInt16): builder.PrependBoolSlot(1, potScaleInt16, 1) -def AddPotScaleInt16(builder, potScaleInt16): - return AddOptionsAddPotScaleInt16(builder, potScaleInt16) -def AddOptionsEnd(builder): return builder.EndObject() -def End(builder): - return AddOptionsEnd(builder) - -class AddOptionsT(object): - - # AddOptionsT - def __init__(self): - self.fusedActivationFunction = 0 # type: int - self.potScaleInt16 = True # type: bool - - @classmethod - def InitFromBuf(cls, buf, pos): - addOptions = AddOptions() - addOptions.Init(buf, pos) - return cls.InitFromObj(addOptions) - - @classmethod - def InitFromObj(cls, addOptions): - x = AddOptionsT() - x._UnPack(addOptions) - return x - - # AddOptionsT - def _UnPack(self, addOptions): - if addOptions is None: - return - self.fusedActivationFunction = addOptions.FusedActivationFunction() - self.potScaleInt16 = addOptions.PotScaleInt16() - - # AddOptionsT - def Pack(self, builder): - AddOptionsStart(builder) - AddOptionsAddFusedActivationFunction(builder, self.fusedActivationFunction) - AddOptionsAddPotScaleInt16(builder, self.potScaleInt16) - addOptions = AddOptionsEnd(builder) - return addOptions -# automatically generated by the FlatBuffers compiler, do not modify - -# namespace: tflite - -from flatbuffers.compat import import_numpy -np = import_numpy() - -class ArgMaxOptions(object): - __slots__ = ['_tab'] - - @classmethod - def GetRootAs(cls, buf, offset=0): - n = flatbuffers.encode.Get(flatbuffers.packer.uoffset, buf, offset) - x = ArgMaxOptions() - x.Init(buf, n + offset) - return x - - @classmethod - def GetRootAsArgMaxOptions(cls, buf, offset=0): - """This method is deprecated. Please switch to GetRootAs.""" - return cls.GetRootAs(buf, offset) - @classmethod - def ArgMaxOptionsBufferHasIdentifier(cls, buf, offset, size_prefixed=False): - return flatbuffers.util.BufferHasIdentifier(buf, offset, b"\x54\x46\x4C\x33", size_prefixed=size_prefixed) - - # ArgMaxOptions - def Init(self, buf, pos): - self._tab = flatbuffers.table.Table(buf, pos) - - # ArgMaxOptions - def OutputType(self): - o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(4)) - if o != 0: - return self._tab.Get(flatbuffers.number_types.Int8Flags, o + self._tab.Pos) - return 0 - -def ArgMaxOptionsStart(builder): builder.StartObject(1) -def Start(builder): - return ArgMaxOptionsStart(builder) -def ArgMaxOptionsAddOutputType(builder, outputType): builder.PrependInt8Slot(0, outputType, 0) -def AddOutputType(builder, outputType): - return ArgMaxOptionsAddOutputType(builder, outputType) -def ArgMaxOptionsEnd(builder): return builder.EndObject() -def End(builder): - return ArgMaxOptionsEnd(builder) - -class ArgMaxOptionsT(object): - - # ArgMaxOptionsT - def __init__(self): - self.outputType = 0 # type: int - - @classmethod - def InitFromBuf(cls, buf, pos): - argMaxOptions = ArgMaxOptions() - argMaxOptions.Init(buf, pos) - return cls.InitFromObj(argMaxOptions) - - @classmethod - def InitFromObj(cls, argMaxOptions): - x = ArgMaxOptionsT() - x._UnPack(argMaxOptions) - return x - - # ArgMaxOptionsT - def _UnPack(self, argMaxOptions): - if argMaxOptions is None: - return - self.outputType = argMaxOptions.OutputType() - - # ArgMaxOptionsT - def Pack(self, builder): - ArgMaxOptionsStart(builder) - ArgMaxOptionsAddOutputType(builder, self.outputType) - argMaxOptions = ArgMaxOptionsEnd(builder) - return argMaxOptions -# automatically generated by the FlatBuffers compiler, do not modify - -# namespace: tflite - -from flatbuffers.compat import import_numpy -np = import_numpy() - -class ArgMinOptions(object): - __slots__ = ['_tab'] - - @classmethod - def GetRootAs(cls, buf, offset=0): - n = flatbuffers.encode.Get(flatbuffers.packer.uoffset, buf, offset) - x = ArgMinOptions() - x.Init(buf, n + offset) - return x - - @classmethod - def GetRootAsArgMinOptions(cls, buf, offset=0): - """This method is deprecated. Please switch to GetRootAs.""" - return cls.GetRootAs(buf, offset) - @classmethod - def ArgMinOptionsBufferHasIdentifier(cls, buf, offset, size_prefixed=False): - return flatbuffers.util.BufferHasIdentifier(buf, offset, b"\x54\x46\x4C\x33", size_prefixed=size_prefixed) - - # ArgMinOptions - def Init(self, buf, pos): - self._tab = flatbuffers.table.Table(buf, pos) - - # ArgMinOptions - def OutputType(self): - o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(4)) - if o != 0: - return self._tab.Get(flatbuffers.number_types.Int8Flags, o + self._tab.Pos) - return 0 - -def ArgMinOptionsStart(builder): builder.StartObject(1) -def Start(builder): - return ArgMinOptionsStart(builder) -def ArgMinOptionsAddOutputType(builder, outputType): builder.PrependInt8Slot(0, outputType, 0) -def AddOutputType(builder, outputType): - return ArgMinOptionsAddOutputType(builder, outputType) -def ArgMinOptionsEnd(builder): return builder.EndObject() -def End(builder): - return ArgMinOptionsEnd(builder) - -class ArgMinOptionsT(object): - - # ArgMinOptionsT - def __init__(self): - self.outputType = 0 # type: int - - @classmethod - def InitFromBuf(cls, buf, pos): - argMinOptions = ArgMinOptions() - argMinOptions.Init(buf, pos) - return cls.InitFromObj(argMinOptions) - - @classmethod - def InitFromObj(cls, argMinOptions): - x = ArgMinOptionsT() - x._UnPack(argMinOptions) - return x - - # ArgMinOptionsT - def _UnPack(self, argMinOptions): - if argMinOptions is None: - return - self.outputType = argMinOptions.OutputType() - - # ArgMinOptionsT - def Pack(self, builder): - ArgMinOptionsStart(builder) - ArgMinOptionsAddOutputType(builder, self.outputType) - argMinOptions = ArgMinOptionsEnd(builder) - return argMinOptions -# automatically generated by the FlatBuffers compiler, do not modify - -# namespace: tflite - -from flatbuffers.compat import import_numpy -np = import_numpy() - -class AssignVariableOptions(object): - __slots__ = ['_tab'] - - @classmethod - def GetRootAs(cls, buf, offset=0): - n = flatbuffers.encode.Get(flatbuffers.packer.uoffset, buf, offset) - x = AssignVariableOptions() - x.Init(buf, n + offset) - return x - - @classmethod - def GetRootAsAssignVariableOptions(cls, buf, offset=0): - """This method is deprecated. Please switch to GetRootAs.""" - return cls.GetRootAs(buf, offset) - @classmethod - def AssignVariableOptionsBufferHasIdentifier(cls, buf, offset, size_prefixed=False): - return flatbuffers.util.BufferHasIdentifier(buf, offset, b"\x54\x46\x4C\x33", size_prefixed=size_prefixed) - - # AssignVariableOptions - def Init(self, buf, pos): - self._tab = flatbuffers.table.Table(buf, pos) - -def AssignVariableOptionsStart(builder): builder.StartObject(0) -def Start(builder): - return AssignVariableOptionsStart(builder) -def AssignVariableOptionsEnd(builder): return builder.EndObject() -def End(builder): - return AssignVariableOptionsEnd(builder) - -class AssignVariableOptionsT(object): - - # AssignVariableOptionsT - def __init__(self): - pass - - @classmethod - def InitFromBuf(cls, buf, pos): - assignVariableOptions = AssignVariableOptions() - assignVariableOptions.Init(buf, pos) - return cls.InitFromObj(assignVariableOptions) - - @classmethod - def InitFromObj(cls, assignVariableOptions): - x = AssignVariableOptionsT() - x._UnPack(assignVariableOptions) - return x - - # AssignVariableOptionsT - def _UnPack(self, assignVariableOptions): - if assignVariableOptions is None: - return - - # AssignVariableOptionsT - def Pack(self, builder): - AssignVariableOptionsStart(builder) - assignVariableOptions = AssignVariableOptionsEnd(builder) - return assignVariableOptions -# automatically generated by the FlatBuffers compiler, do not modify - -# namespace: tflite - -from flatbuffers.compat import import_numpy -np = import_numpy() - -class BatchMatMulOptions(object): - __slots__ = ['_tab'] - - @classmethod - def GetRootAs(cls, buf, offset=0): - n = flatbuffers.encode.Get(flatbuffers.packer.uoffset, buf, offset) - x = BatchMatMulOptions() - x.Init(buf, n + offset) - return x - - @classmethod - def GetRootAsBatchMatMulOptions(cls, buf, offset=0): - """This method is deprecated. Please switch to GetRootAs.""" - return cls.GetRootAs(buf, offset) - @classmethod - def BatchMatMulOptionsBufferHasIdentifier(cls, buf, offset, size_prefixed=False): - return flatbuffers.util.BufferHasIdentifier(buf, offset, b"\x54\x46\x4C\x33", size_prefixed=size_prefixed) - - # BatchMatMulOptions - def Init(self, buf, pos): - self._tab = flatbuffers.table.Table(buf, pos) - - # BatchMatMulOptions - def AdjX(self): - o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(4)) - if o != 0: - return bool(self._tab.Get(flatbuffers.number_types.BoolFlags, o + self._tab.Pos)) - return False - - # BatchMatMulOptions - def AdjY(self): - o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(6)) - if o != 0: - return bool(self._tab.Get(flatbuffers.number_types.BoolFlags, o + self._tab.Pos)) - return False - - # BatchMatMulOptions - def AsymmetricQuantizeInputs(self): - o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(8)) - if o != 0: - return bool(self._tab.Get(flatbuffers.number_types.BoolFlags, o + self._tab.Pos)) - return False - -def BatchMatMulOptionsStart(builder): builder.StartObject(3) -def Start(builder): - return BatchMatMulOptionsStart(builder) -def BatchMatMulOptionsAddAdjX(builder, adjX): builder.PrependBoolSlot(0, adjX, 0) -def AddAdjX(builder, adjX): - return BatchMatMulOptionsAddAdjX(builder, adjX) -def BatchMatMulOptionsAddAdjY(builder, adjY): builder.PrependBoolSlot(1, adjY, 0) -def AddAdjY(builder, adjY): - return BatchMatMulOptionsAddAdjY(builder, adjY) -def BatchMatMulOptionsAddAsymmetricQuantizeInputs(builder, asymmetricQuantizeInputs): builder.PrependBoolSlot(2, asymmetricQuantizeInputs, 0) -def AddAsymmetricQuantizeInputs(builder, asymmetricQuantizeInputs): - return BatchMatMulOptionsAddAsymmetricQuantizeInputs(builder, asymmetricQuantizeInputs) -def BatchMatMulOptionsEnd(builder): return builder.EndObject() -def End(builder): - return BatchMatMulOptionsEnd(builder) - -class BatchMatMulOptionsT(object): - - # BatchMatMulOptionsT - def __init__(self): - self.adjX = False # type: bool - self.adjY = False # type: bool - self.asymmetricQuantizeInputs = False # type: bool - - @classmethod - def InitFromBuf(cls, buf, pos): - batchMatMulOptions = BatchMatMulOptions() - batchMatMulOptions.Init(buf, pos) - return cls.InitFromObj(batchMatMulOptions) - - @classmethod - def InitFromObj(cls, batchMatMulOptions): - x = BatchMatMulOptionsT() - x._UnPack(batchMatMulOptions) - return x - - # BatchMatMulOptionsT - def _UnPack(self, batchMatMulOptions): - if batchMatMulOptions is None: - return - self.adjX = batchMatMulOptions.AdjX() - self.adjY = batchMatMulOptions.AdjY() - self.asymmetricQuantizeInputs = batchMatMulOptions.AsymmetricQuantizeInputs() - - # BatchMatMulOptionsT - def Pack(self, builder): - BatchMatMulOptionsStart(builder) - BatchMatMulOptionsAddAdjX(builder, self.adjX) - BatchMatMulOptionsAddAdjY(builder, self.adjY) - BatchMatMulOptionsAddAsymmetricQuantizeInputs(builder, self.asymmetricQuantizeInputs) - batchMatMulOptions = BatchMatMulOptionsEnd(builder) - return batchMatMulOptions -# automatically generated by the FlatBuffers compiler, do not modify - -# namespace: tflite - -from flatbuffers.compat import import_numpy -np = import_numpy() - -class BatchToSpaceNDOptions(object): - __slots__ = ['_tab'] - - @classmethod - def GetRootAs(cls, buf, offset=0): - n = flatbuffers.encode.Get(flatbuffers.packer.uoffset, buf, offset) - x = BatchToSpaceNDOptions() - x.Init(buf, n + offset) - return x - - @classmethod - def GetRootAsBatchToSpaceNDOptions(cls, buf, offset=0): - """This method is deprecated. Please switch to GetRootAs.""" - return cls.GetRootAs(buf, offset) - @classmethod - def BatchToSpaceNDOptionsBufferHasIdentifier(cls, buf, offset, size_prefixed=False): - return flatbuffers.util.BufferHasIdentifier(buf, offset, b"\x54\x46\x4C\x33", size_prefixed=size_prefixed) - - # BatchToSpaceNDOptions - def Init(self, buf, pos): - self._tab = flatbuffers.table.Table(buf, pos) - -def BatchToSpaceNDOptionsStart(builder): builder.StartObject(0) -def Start(builder): - return BatchToSpaceNDOptionsStart(builder) -def BatchToSpaceNDOptionsEnd(builder): return builder.EndObject() -def End(builder): - return BatchToSpaceNDOptionsEnd(builder) - -class BatchToSpaceNDOptionsT(object): - - # BatchToSpaceNDOptionsT - def __init__(self): - pass - - @classmethod - def InitFromBuf(cls, buf, pos): - batchToSpaceNdoptions = BatchToSpaceNDOptions() - batchToSpaceNdoptions.Init(buf, pos) - return cls.InitFromObj(batchToSpaceNdoptions) - - @classmethod - def InitFromObj(cls, batchToSpaceNdoptions): - x = BatchToSpaceNDOptionsT() - x._UnPack(batchToSpaceNdoptions) - return x - - # BatchToSpaceNDOptionsT - def _UnPack(self, batchToSpaceNdoptions): - if batchToSpaceNdoptions is None: - return - - # BatchToSpaceNDOptionsT - def Pack(self, builder): - BatchToSpaceNDOptionsStart(builder) - batchToSpaceNdoptions = BatchToSpaceNDOptionsEnd(builder) - return batchToSpaceNdoptions -# automatically generated by the FlatBuffers compiler, do not modify - -# namespace: tflite - -from flatbuffers.compat import import_numpy -np = import_numpy() - -class BidirectionalSequenceLSTMOptions(object): - __slots__ = ['_tab'] - - @classmethod - def GetRootAs(cls, buf, offset=0): - n = flatbuffers.encode.Get(flatbuffers.packer.uoffset, buf, offset) - x = BidirectionalSequenceLSTMOptions() - x.Init(buf, n + offset) - return x - - @classmethod - def GetRootAsBidirectionalSequenceLSTMOptions(cls, buf, offset=0): - """This method is deprecated. Please switch to GetRootAs.""" - return cls.GetRootAs(buf, offset) - @classmethod - def BidirectionalSequenceLSTMOptionsBufferHasIdentifier(cls, buf, offset, size_prefixed=False): - return flatbuffers.util.BufferHasIdentifier(buf, offset, b"\x54\x46\x4C\x33", size_prefixed=size_prefixed) - - # BidirectionalSequenceLSTMOptions - def Init(self, buf, pos): - self._tab = flatbuffers.table.Table(buf, pos) - - # BidirectionalSequenceLSTMOptions - def FusedActivationFunction(self): - o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(4)) - if o != 0: - return self._tab.Get(flatbuffers.number_types.Int8Flags, o + self._tab.Pos) - return 0 - - # BidirectionalSequenceLSTMOptions - def CellClip(self): - o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(6)) - if o != 0: - return self._tab.Get(flatbuffers.number_types.Float32Flags, o + self._tab.Pos) - return 0.0 - - # BidirectionalSequenceLSTMOptions - def ProjClip(self): - o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(8)) - if o != 0: - return self._tab.Get(flatbuffers.number_types.Float32Flags, o + self._tab.Pos) - return 0.0 - - # BidirectionalSequenceLSTMOptions - def MergeOutputs(self): - o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(10)) - if o != 0: - return bool(self._tab.Get(flatbuffers.number_types.BoolFlags, o + self._tab.Pos)) - return False - - # BidirectionalSequenceLSTMOptions - def TimeMajor(self): - o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(12)) - if o != 0: - return bool(self._tab.Get(flatbuffers.number_types.BoolFlags, o + self._tab.Pos)) - return True - - # BidirectionalSequenceLSTMOptions - def AsymmetricQuantizeInputs(self): - o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(14)) - if o != 0: - return bool(self._tab.Get(flatbuffers.number_types.BoolFlags, o + self._tab.Pos)) - return False - -def BidirectionalSequenceLSTMOptionsStart(builder): builder.StartObject(6) -def Start(builder): - return BidirectionalSequenceLSTMOptionsStart(builder) -def BidirectionalSequenceLSTMOptionsAddFusedActivationFunction(builder, fusedActivationFunction): builder.PrependInt8Slot(0, fusedActivationFunction, 0) -def AddFusedActivationFunction(builder, fusedActivationFunction): - return BidirectionalSequenceLSTMOptionsAddFusedActivationFunction(builder, fusedActivationFunction) -def BidirectionalSequenceLSTMOptionsAddCellClip(builder, cellClip): builder.PrependFloat32Slot(1, cellClip, 0.0) -def AddCellClip(builder, cellClip): - return BidirectionalSequenceLSTMOptionsAddCellClip(builder, cellClip) -def BidirectionalSequenceLSTMOptionsAddProjClip(builder, projClip): builder.PrependFloat32Slot(2, projClip, 0.0) -def AddProjClip(builder, projClip): - return BidirectionalSequenceLSTMOptionsAddProjClip(builder, projClip) -def BidirectionalSequenceLSTMOptionsAddMergeOutputs(builder, mergeOutputs): builder.PrependBoolSlot(3, mergeOutputs, 0) -def AddMergeOutputs(builder, mergeOutputs): - return BidirectionalSequenceLSTMOptionsAddMergeOutputs(builder, mergeOutputs) -def BidirectionalSequenceLSTMOptionsAddTimeMajor(builder, timeMajor): builder.PrependBoolSlot(4, timeMajor, 1) -def AddTimeMajor(builder, timeMajor): - return BidirectionalSequenceLSTMOptionsAddTimeMajor(builder, timeMajor) -def BidirectionalSequenceLSTMOptionsAddAsymmetricQuantizeInputs(builder, asymmetricQuantizeInputs): builder.PrependBoolSlot(5, asymmetricQuantizeInputs, 0) -def AddAsymmetricQuantizeInputs(builder, asymmetricQuantizeInputs): - return BidirectionalSequenceLSTMOptionsAddAsymmetricQuantizeInputs(builder, asymmetricQuantizeInputs) -def BidirectionalSequenceLSTMOptionsEnd(builder): return builder.EndObject() -def End(builder): - return BidirectionalSequenceLSTMOptionsEnd(builder) - -class BidirectionalSequenceLSTMOptionsT(object): - - # BidirectionalSequenceLSTMOptionsT - def __init__(self): - self.fusedActivationFunction = 0 # type: int - self.cellClip = 0.0 # type: float - self.projClip = 0.0 # type: float - self.mergeOutputs = False # type: bool - self.timeMajor = True # type: bool - self.asymmetricQuantizeInputs = False # type: bool - - @classmethod - def InitFromBuf(cls, buf, pos): - bidirectionalSequenceLstmoptions = BidirectionalSequenceLSTMOptions() - bidirectionalSequenceLstmoptions.Init(buf, pos) - return cls.InitFromObj(bidirectionalSequenceLstmoptions) - - @classmethod - def InitFromObj(cls, bidirectionalSequenceLstmoptions): - x = BidirectionalSequenceLSTMOptionsT() - x._UnPack(bidirectionalSequenceLstmoptions) - return x - - # BidirectionalSequenceLSTMOptionsT - def _UnPack(self, bidirectionalSequenceLstmoptions): - if bidirectionalSequenceLstmoptions is None: - return - self.fusedActivationFunction = bidirectionalSequenceLstmoptions.FusedActivationFunction() - self.cellClip = bidirectionalSequenceLstmoptions.CellClip() - self.projClip = bidirectionalSequenceLstmoptions.ProjClip() - self.mergeOutputs = bidirectionalSequenceLstmoptions.MergeOutputs() - self.timeMajor = bidirectionalSequenceLstmoptions.TimeMajor() - self.asymmetricQuantizeInputs = bidirectionalSequenceLstmoptions.AsymmetricQuantizeInputs() - - # BidirectionalSequenceLSTMOptionsT - def Pack(self, builder): - BidirectionalSequenceLSTMOptionsStart(builder) - BidirectionalSequenceLSTMOptionsAddFusedActivationFunction(builder, self.fusedActivationFunction) - BidirectionalSequenceLSTMOptionsAddCellClip(builder, self.cellClip) - BidirectionalSequenceLSTMOptionsAddProjClip(builder, self.projClip) - BidirectionalSequenceLSTMOptionsAddMergeOutputs(builder, self.mergeOutputs) - BidirectionalSequenceLSTMOptionsAddTimeMajor(builder, self.timeMajor) - BidirectionalSequenceLSTMOptionsAddAsymmetricQuantizeInputs(builder, self.asymmetricQuantizeInputs) - bidirectionalSequenceLstmoptions = BidirectionalSequenceLSTMOptionsEnd(builder) - return bidirectionalSequenceLstmoptions -# automatically generated by the FlatBuffers compiler, do not modify - -# namespace: tflite - -from flatbuffers.compat import import_numpy -np = import_numpy() - -class BidirectionalSequenceRNNOptions(object): - __slots__ = ['_tab'] - - @classmethod - def GetRootAs(cls, buf, offset=0): - n = flatbuffers.encode.Get(flatbuffers.packer.uoffset, buf, offset) - x = BidirectionalSequenceRNNOptions() - x.Init(buf, n + offset) - return x - - @classmethod - def GetRootAsBidirectionalSequenceRNNOptions(cls, buf, offset=0): - """This method is deprecated. Please switch to GetRootAs.""" - return cls.GetRootAs(buf, offset) - @classmethod - def BidirectionalSequenceRNNOptionsBufferHasIdentifier(cls, buf, offset, size_prefixed=False): - return flatbuffers.util.BufferHasIdentifier(buf, offset, b"\x54\x46\x4C\x33", size_prefixed=size_prefixed) - - # BidirectionalSequenceRNNOptions - def Init(self, buf, pos): - self._tab = flatbuffers.table.Table(buf, pos) - - # BidirectionalSequenceRNNOptions - def TimeMajor(self): - o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(4)) - if o != 0: - return bool(self._tab.Get(flatbuffers.number_types.BoolFlags, o + self._tab.Pos)) - return False - - # BidirectionalSequenceRNNOptions - def FusedActivationFunction(self): - o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(6)) - if o != 0: - return self._tab.Get(flatbuffers.number_types.Int8Flags, o + self._tab.Pos) - return 0 - - # BidirectionalSequenceRNNOptions - def MergeOutputs(self): - o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(8)) - if o != 0: - return bool(self._tab.Get(flatbuffers.number_types.BoolFlags, o + self._tab.Pos)) - return False - - # BidirectionalSequenceRNNOptions - def AsymmetricQuantizeInputs(self): - o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(10)) - if o != 0: - return bool(self._tab.Get(flatbuffers.number_types.BoolFlags, o + self._tab.Pos)) - return False - -def BidirectionalSequenceRNNOptionsStart(builder): builder.StartObject(4) -def Start(builder): - return BidirectionalSequenceRNNOptionsStart(builder) -def BidirectionalSequenceRNNOptionsAddTimeMajor(builder, timeMajor): builder.PrependBoolSlot(0, timeMajor, 0) -def AddTimeMajor(builder, timeMajor): - return BidirectionalSequenceRNNOptionsAddTimeMajor(builder, timeMajor) -def BidirectionalSequenceRNNOptionsAddFusedActivationFunction(builder, fusedActivationFunction): builder.PrependInt8Slot(1, fusedActivationFunction, 0) -def AddFusedActivationFunction(builder, fusedActivationFunction): - return BidirectionalSequenceRNNOptionsAddFusedActivationFunction(builder, fusedActivationFunction) -def BidirectionalSequenceRNNOptionsAddMergeOutputs(builder, mergeOutputs): builder.PrependBoolSlot(2, mergeOutputs, 0) -def AddMergeOutputs(builder, mergeOutputs): - return BidirectionalSequenceRNNOptionsAddMergeOutputs(builder, mergeOutputs) -def BidirectionalSequenceRNNOptionsAddAsymmetricQuantizeInputs(builder, asymmetricQuantizeInputs): builder.PrependBoolSlot(3, asymmetricQuantizeInputs, 0) -def AddAsymmetricQuantizeInputs(builder, asymmetricQuantizeInputs): - return BidirectionalSequenceRNNOptionsAddAsymmetricQuantizeInputs(builder, asymmetricQuantizeInputs) -def BidirectionalSequenceRNNOptionsEnd(builder): return builder.EndObject() -def End(builder): - return BidirectionalSequenceRNNOptionsEnd(builder) - -class BidirectionalSequenceRNNOptionsT(object): - - # BidirectionalSequenceRNNOptionsT - def __init__(self): - self.timeMajor = False # type: bool - self.fusedActivationFunction = 0 # type: int - self.mergeOutputs = False # type: bool - self.asymmetricQuantizeInputs = False # type: bool - - @classmethod - def InitFromBuf(cls, buf, pos): - bidirectionalSequenceRnnoptions = BidirectionalSequenceRNNOptions() - bidirectionalSequenceRnnoptions.Init(buf, pos) - return cls.InitFromObj(bidirectionalSequenceRnnoptions) - - @classmethod - def InitFromObj(cls, bidirectionalSequenceRnnoptions): - x = BidirectionalSequenceRNNOptionsT() - x._UnPack(bidirectionalSequenceRnnoptions) - return x - - # BidirectionalSequenceRNNOptionsT - def _UnPack(self, bidirectionalSequenceRnnoptions): - if bidirectionalSequenceRnnoptions is None: - return - self.timeMajor = bidirectionalSequenceRnnoptions.TimeMajor() - self.fusedActivationFunction = bidirectionalSequenceRnnoptions.FusedActivationFunction() - self.mergeOutputs = bidirectionalSequenceRnnoptions.MergeOutputs() - self.asymmetricQuantizeInputs = bidirectionalSequenceRnnoptions.AsymmetricQuantizeInputs() - - # BidirectionalSequenceRNNOptionsT - def Pack(self, builder): - BidirectionalSequenceRNNOptionsStart(builder) - BidirectionalSequenceRNNOptionsAddTimeMajor(builder, self.timeMajor) - BidirectionalSequenceRNNOptionsAddFusedActivationFunction(builder, self.fusedActivationFunction) - BidirectionalSequenceRNNOptionsAddMergeOutputs(builder, self.mergeOutputs) - BidirectionalSequenceRNNOptionsAddAsymmetricQuantizeInputs(builder, self.asymmetricQuantizeInputs) - bidirectionalSequenceRnnoptions = BidirectionalSequenceRNNOptionsEnd(builder) - return bidirectionalSequenceRnnoptions -# automatically generated by the FlatBuffers compiler, do not modify - -# namespace: tflite - -from flatbuffers.compat import import_numpy -np = import_numpy() - -class BitcastOptions(object): - __slots__ = ['_tab'] - - @classmethod - def GetRootAs(cls, buf, offset=0): - n = flatbuffers.encode.Get(flatbuffers.packer.uoffset, buf, offset) - x = BitcastOptions() - x.Init(buf, n + offset) - return x - - @classmethod - def GetRootAsBitcastOptions(cls, buf, offset=0): - """This method is deprecated. Please switch to GetRootAs.""" - return cls.GetRootAs(buf, offset) - @classmethod - def BitcastOptionsBufferHasIdentifier(cls, buf, offset, size_prefixed=False): - return flatbuffers.util.BufferHasIdentifier(buf, offset, b"\x54\x46\x4C\x33", size_prefixed=size_prefixed) - - # BitcastOptions - def Init(self, buf, pos): - self._tab = flatbuffers.table.Table(buf, pos) - -def BitcastOptionsStart(builder): builder.StartObject(0) -def Start(builder): - return BitcastOptionsStart(builder) -def BitcastOptionsEnd(builder): return builder.EndObject() -def End(builder): - return BitcastOptionsEnd(builder) - -class BitcastOptionsT(object): - - # BitcastOptionsT - def __init__(self): - pass - - @classmethod - def InitFromBuf(cls, buf, pos): - bitcastOptions = BitcastOptions() - bitcastOptions.Init(buf, pos) - return cls.InitFromObj(bitcastOptions) - - @classmethod - def InitFromObj(cls, bitcastOptions): - x = BitcastOptionsT() - x._UnPack(bitcastOptions) - return x - - # BitcastOptionsT - def _UnPack(self, bitcastOptions): - if bitcastOptions is None: - return - - # BitcastOptionsT - def Pack(self, builder): - BitcastOptionsStart(builder) - bitcastOptions = BitcastOptionsEnd(builder) - return bitcastOptions -# automatically generated by the FlatBuffers compiler, do not modify - -# namespace: tflite - -from flatbuffers.compat import import_numpy -np = import_numpy() - -class BitwiseXorOptions(object): - __slots__ = ['_tab'] - - @classmethod - def GetRootAs(cls, buf, offset=0): - n = flatbuffers.encode.Get(flatbuffers.packer.uoffset, buf, offset) - x = BitwiseXorOptions() - x.Init(buf, n + offset) - return x - - @classmethod - def GetRootAsBitwiseXorOptions(cls, buf, offset=0): - """This method is deprecated. Please switch to GetRootAs.""" - return cls.GetRootAs(buf, offset) - @classmethod - def BitwiseXorOptionsBufferHasIdentifier(cls, buf, offset, size_prefixed=False): - return flatbuffers.util.BufferHasIdentifier(buf, offset, b"\x54\x46\x4C\x33", size_prefixed=size_prefixed) - - # BitwiseXorOptions - def Init(self, buf, pos): - self._tab = flatbuffers.table.Table(buf, pos) - -def BitwiseXorOptionsStart(builder): builder.StartObject(0) -def Start(builder): - return BitwiseXorOptionsStart(builder) -def BitwiseXorOptionsEnd(builder): return builder.EndObject() -def End(builder): - return BitwiseXorOptionsEnd(builder) - -class BitwiseXorOptionsT(object): - - # BitwiseXorOptionsT - def __init__(self): - pass - - @classmethod - def InitFromBuf(cls, buf, pos): - bitwiseXorOptions = BitwiseXorOptions() - bitwiseXorOptions.Init(buf, pos) - return cls.InitFromObj(bitwiseXorOptions) - - @classmethod - def InitFromObj(cls, bitwiseXorOptions): - x = BitwiseXorOptionsT() - x._UnPack(bitwiseXorOptions) - return x - - # BitwiseXorOptionsT - def _UnPack(self, bitwiseXorOptions): - if bitwiseXorOptions is None: - return - - # BitwiseXorOptionsT - def Pack(self, builder): - BitwiseXorOptionsStart(builder) - bitwiseXorOptions = BitwiseXorOptionsEnd(builder) - return bitwiseXorOptions -# automatically generated by the FlatBuffers compiler, do not modify - -# namespace: tflite - -from flatbuffers.compat import import_numpy -np = import_numpy() - -class BroadcastToOptions(object): - __slots__ = ['_tab'] - - @classmethod - def GetRootAs(cls, buf, offset=0): - n = flatbuffers.encode.Get(flatbuffers.packer.uoffset, buf, offset) - x = BroadcastToOptions() - x.Init(buf, n + offset) - return x - - @classmethod - def GetRootAsBroadcastToOptions(cls, buf, offset=0): - """This method is deprecated. Please switch to GetRootAs.""" - return cls.GetRootAs(buf, offset) - @classmethod - def BroadcastToOptionsBufferHasIdentifier(cls, buf, offset, size_prefixed=False): - return flatbuffers.util.BufferHasIdentifier(buf, offset, b"\x54\x46\x4C\x33", size_prefixed=size_prefixed) - - # BroadcastToOptions - def Init(self, buf, pos): - self._tab = flatbuffers.table.Table(buf, pos) - -def BroadcastToOptionsStart(builder): builder.StartObject(0) -def Start(builder): - return BroadcastToOptionsStart(builder) -def BroadcastToOptionsEnd(builder): return builder.EndObject() -def End(builder): - return BroadcastToOptionsEnd(builder) - -class BroadcastToOptionsT(object): - - # BroadcastToOptionsT - def __init__(self): - pass - - @classmethod - def InitFromBuf(cls, buf, pos): - broadcastToOptions = BroadcastToOptions() - broadcastToOptions.Init(buf, pos) - return cls.InitFromObj(broadcastToOptions) - - @classmethod - def InitFromObj(cls, broadcastToOptions): - x = BroadcastToOptionsT() - x._UnPack(broadcastToOptions) - return x - - # BroadcastToOptionsT - def _UnPack(self, broadcastToOptions): - if broadcastToOptions is None: - return - - # BroadcastToOptionsT - def Pack(self, builder): - BroadcastToOptionsStart(builder) - broadcastToOptions = BroadcastToOptionsEnd(builder) - return broadcastToOptions -# automatically generated by the FlatBuffers compiler, do not modify - -# namespace: tflite - -from flatbuffers.compat import import_numpy -np = import_numpy() - -class BucketizeOptions(object): - __slots__ = ['_tab'] - - @classmethod - def GetRootAs(cls, buf, offset=0): - n = flatbuffers.encode.Get(flatbuffers.packer.uoffset, buf, offset) - x = BucketizeOptions() - x.Init(buf, n + offset) - return x - - @classmethod - def GetRootAsBucketizeOptions(cls, buf, offset=0): - """This method is deprecated. Please switch to GetRootAs.""" - return cls.GetRootAs(buf, offset) - @classmethod - def BucketizeOptionsBufferHasIdentifier(cls, buf, offset, size_prefixed=False): - return flatbuffers.util.BufferHasIdentifier(buf, offset, b"\x54\x46\x4C\x33", size_prefixed=size_prefixed) - - # BucketizeOptions - def Init(self, buf, pos): - self._tab = flatbuffers.table.Table(buf, pos) - - # BucketizeOptions - def Boundaries(self, j): - o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(4)) - if o != 0: - a = self._tab.Vector(o) - return self._tab.Get(flatbuffers.number_types.Float32Flags, a + flatbuffers.number_types.UOffsetTFlags.py_type(j * 4)) - return 0 - - # BucketizeOptions - def BoundariesAsNumpy(self): - o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(4)) - if o != 0: - return self._tab.GetVectorAsNumpy(flatbuffers.number_types.Float32Flags, o) - return 0 - - # BucketizeOptions - def BoundariesLength(self): - o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(4)) - if o != 0: - return self._tab.VectorLen(o) - return 0 - - # BucketizeOptions - def BoundariesIsNone(self): - o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(4)) - return o == 0 - -def BucketizeOptionsStart(builder): builder.StartObject(1) -def Start(builder): - return BucketizeOptionsStart(builder) -def BucketizeOptionsAddBoundaries(builder, boundaries): builder.PrependUOffsetTRelativeSlot(0, flatbuffers.number_types.UOffsetTFlags.py_type(boundaries), 0) -def AddBoundaries(builder, boundaries): - return BucketizeOptionsAddBoundaries(builder, boundaries) -def BucketizeOptionsStartBoundariesVector(builder, numElems): return builder.StartVector(4, numElems, 4) -def StartBoundariesVector(builder, numElems): - return BucketizeOptionsStartBoundariesVector(builder, numElems) -def BucketizeOptionsEnd(builder): return builder.EndObject() -def End(builder): - return BucketizeOptionsEnd(builder) -try: - from typing import List -except: - pass - -class BucketizeOptionsT(object): - - # BucketizeOptionsT - def __init__(self): - self.boundaries = None # type: List[float] - - @classmethod - def InitFromBuf(cls, buf, pos): - bucketizeOptions = BucketizeOptions() - bucketizeOptions.Init(buf, pos) - return cls.InitFromObj(bucketizeOptions) - - @classmethod - def InitFromObj(cls, bucketizeOptions): - x = BucketizeOptionsT() - x._UnPack(bucketizeOptions) - return x - - # BucketizeOptionsT - def _UnPack(self, bucketizeOptions): - if bucketizeOptions is None: - return - if not bucketizeOptions.BoundariesIsNone(): - if np is None: - self.boundaries = [] - for i in range(bucketizeOptions.BoundariesLength()): - self.boundaries.append(bucketizeOptions.Boundaries(i)) - else: - self.boundaries = bucketizeOptions.BoundariesAsNumpy() - - # BucketizeOptionsT - def Pack(self, builder): - if self.boundaries is not None: - if np is not None and type(self.boundaries) is np.ndarray: - boundaries = builder.CreateNumpyVector(self.boundaries) - else: - BucketizeOptionsStartBoundariesVector(builder, len(self.boundaries)) - for i in reversed(range(len(self.boundaries))): - builder.PrependFloat32(self.boundaries[i]) - boundaries = builder.EndVector() - BucketizeOptionsStart(builder) - if self.boundaries is not None: - BucketizeOptionsAddBoundaries(builder, boundaries) - bucketizeOptions = BucketizeOptionsEnd(builder) - return bucketizeOptions -# automatically generated by the FlatBuffers compiler, do not modify - -# namespace: tflite - -from flatbuffers.compat import import_numpy -np = import_numpy() - -class Buffer(object): - __slots__ = ['_tab'] - - @classmethod - def GetRootAs(cls, buf, offset=0): - n = flatbuffers.encode.Get(flatbuffers.packer.uoffset, buf, offset) - x = Buffer() - x.Init(buf, n + offset) - return x - - @classmethod - def GetRootAsBuffer(cls, buf, offset=0): - """This method is deprecated. Please switch to GetRootAs.""" - return cls.GetRootAs(buf, offset) - @classmethod - def BufferBufferHasIdentifier(cls, buf, offset, size_prefixed=False): - return flatbuffers.util.BufferHasIdentifier(buf, offset, b"\x54\x46\x4C\x33", size_prefixed=size_prefixed) - - # Buffer - def Init(self, buf, pos): - self._tab = flatbuffers.table.Table(buf, pos) - - # Buffer - def Data(self, j): - o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(4)) - if o != 0: - a = self._tab.Vector(o) - return self._tab.Get(flatbuffers.number_types.Uint8Flags, a + flatbuffers.number_types.UOffsetTFlags.py_type(j * 1)) - return 0 - - # Buffer - def DataAsNumpy(self): - o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(4)) - if o != 0: - return self._tab.GetVectorAsNumpy(flatbuffers.number_types.Uint8Flags, o) - return 0 - - # Buffer - def DataLength(self): - o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(4)) - if o != 0: - return self._tab.VectorLen(o) - return 0 - - # Buffer - def DataIsNone(self): - o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(4)) - return o == 0 - - # Buffer - def Offset(self): - o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(6)) - if o != 0: - return self._tab.Get(flatbuffers.number_types.Uint64Flags, o + self._tab.Pos) - return 0 - - # Buffer - def Size(self): - o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(8)) - if o != 0: - return self._tab.Get(flatbuffers.number_types.Uint64Flags, o + self._tab.Pos) - return 0 - -def BufferStart(builder): builder.StartObject(3) -def Start(builder): - return BufferStart(builder) -def BufferAddData(builder, data): builder.PrependUOffsetTRelativeSlot(0, flatbuffers.number_types.UOffsetTFlags.py_type(data), 0) -def AddData(builder, data): - return BufferAddData(builder, data) -def BufferStartDataVector(builder, numElems): return builder.StartVector(1, numElems, 1) -def StartDataVector(builder, numElems): - return BufferStartDataVector(builder, numElems) -def BufferAddOffset(builder, offset): builder.PrependUint64Slot(1, offset, 0) -def AddOffset(builder, offset): - return BufferAddOffset(builder, offset) -def BufferAddSize(builder, size): builder.PrependUint64Slot(2, size, 0) -def AddSize(builder, size): - return BufferAddSize(builder, size) -def BufferEnd(builder): return builder.EndObject() -def End(builder): - return BufferEnd(builder) -try: - from typing import List -except: - pass - -class BufferT(object): - - # BufferT - def __init__(self): - self.data = None # type: List[int] - self.offset = 0 # type: int - self.size = 0 # type: int - - @classmethod - def InitFromBuf(cls, buf, pos): - buffer = Buffer() - buffer.Init(buf, pos) - return cls.InitFromObj(buffer) - - @classmethod - def InitFromObj(cls, buffer): - x = BufferT() - x._UnPack(buffer) - return x - - # BufferT - def _UnPack(self, buffer): - if buffer is None: - return - if not buffer.DataIsNone(): - if np is None: - self.data = [] - for i in range(buffer.DataLength()): - self.data.append(buffer.Data(i)) - else: - self.data = buffer.DataAsNumpy() - self.offset = buffer.Offset() - self.size = buffer.Size() - - # BufferT - def Pack(self, builder): - if self.data is not None: - if np is not None and type(self.data) is np.ndarray: - data = builder.CreateNumpyVector(self.data) - else: - BufferStartDataVector(builder, len(self.data)) - for i in reversed(range(len(self.data))): - builder.PrependUint8(self.data[i]) - data = builder.EndVector() - BufferStart(builder) - if self.data is not None: - BufferAddData(builder, data) - BufferAddOffset(builder, self.offset) - BufferAddSize(builder, self.size) - buffer = BufferEnd(builder) - return buffer -# automatically generated by the FlatBuffers compiler, do not modify - -# namespace: tflite - -class BuiltinOperator(object): - ADD = 0 - AVERAGE_POOL_2D = 1 - CONCATENATION = 2 - CONV_2D = 3 - DEPTHWISE_CONV_2D = 4 - DEPTH_TO_SPACE = 5 - DEQUANTIZE = 6 - EMBEDDING_LOOKUP = 7 - FLOOR = 8 - FULLY_CONNECTED = 9 - HASHTABLE_LOOKUP = 10 - L2_NORMALIZATION = 11 - L2_POOL_2D = 12 - LOCAL_RESPONSE_NORMALIZATION = 13 - LOGISTIC = 14 - LSH_PROJECTION = 15 - LSTM = 16 - MAX_POOL_2D = 17 - MUL = 18 - RELU = 19 - RELU_N1_TO_1 = 20 - RELU6 = 21 - RESHAPE = 22 - RESIZE_BILINEAR = 23 - RNN = 24 - SOFTMAX = 25 - SPACE_TO_DEPTH = 26 - SVDF = 27 - TANH = 28 - CONCAT_EMBEDDINGS = 29 - SKIP_GRAM = 30 - CALL = 31 - CUSTOM = 32 - EMBEDDING_LOOKUP_SPARSE = 33 - PAD = 34 - UNIDIRECTIONAL_SEQUENCE_RNN = 35 - GATHER = 36 - BATCH_TO_SPACE_ND = 37 - SPACE_TO_BATCH_ND = 38 - TRANSPOSE = 39 - MEAN = 40 - SUB = 41 - DIV = 42 - SQUEEZE = 43 - UNIDIRECTIONAL_SEQUENCE_LSTM = 44 - STRIDED_SLICE = 45 - BIDIRECTIONAL_SEQUENCE_RNN = 46 - EXP = 47 - TOPK_V2 = 48 - SPLIT = 49 - LOG_SOFTMAX = 50 - DELEGATE = 51 - BIDIRECTIONAL_SEQUENCE_LSTM = 52 - CAST = 53 - PRELU = 54 - MAXIMUM = 55 - ARG_MAX = 56 - MINIMUM = 57 - LESS = 58 - NEG = 59 - PADV2 = 60 - GREATER = 61 - GREATER_EQUAL = 62 - LESS_EQUAL = 63 - SELECT = 64 - SLICE = 65 - SIN = 66 - TRANSPOSE_CONV = 67 - SPARSE_TO_DENSE = 68 - TILE = 69 - EXPAND_DIMS = 70 - EQUAL = 71 - NOT_EQUAL = 72 - LOG = 73 - SUM = 74 - SQRT = 75 - RSQRT = 76 - SHAPE = 77 - POW = 78 - ARG_MIN = 79 - FAKE_QUANT = 80 - REDUCE_PROD = 81 - REDUCE_MAX = 82 - PACK = 83 - LOGICAL_OR = 84 - ONE_HOT = 85 - LOGICAL_AND = 86 - LOGICAL_NOT = 87 - UNPACK = 88 - REDUCE_MIN = 89 - FLOOR_DIV = 90 - REDUCE_ANY = 91 - SQUARE = 92 - ZEROS_LIKE = 93 - FILL = 94 - FLOOR_MOD = 95 - RANGE = 96 - RESIZE_NEAREST_NEIGHBOR = 97 - LEAKY_RELU = 98 - SQUARED_DIFFERENCE = 99 - MIRROR_PAD = 100 - ABS = 101 - SPLIT_V = 102 - UNIQUE = 103 - CEIL = 104 - REVERSE_V2 = 105 - ADD_N = 106 - GATHER_ND = 107 - COS = 108 - WHERE = 109 - RANK = 110 - ELU = 111 - REVERSE_SEQUENCE = 112 - MATRIX_DIAG = 113 - QUANTIZE = 114 - MATRIX_SET_DIAG = 115 - ROUND = 116 - HARD_SWISH = 117 - IF = 118 - WHILE = 119 - NON_MAX_SUPPRESSION_V4 = 120 - NON_MAX_SUPPRESSION_V5 = 121 - SCATTER_ND = 122 - SELECT_V2 = 123 - DENSIFY = 124 - SEGMENT_SUM = 125 - BATCH_MATMUL = 126 - PLACEHOLDER_FOR_GREATER_OP_CODES = 127 - CUMSUM = 128 - CALL_ONCE = 129 - BROADCAST_TO = 130 - RFFT2D = 131 - CONV_3D = 132 - IMAG = 133 - REAL = 134 - COMPLEX_ABS = 135 - HASHTABLE = 136 - HASHTABLE_FIND = 137 - HASHTABLE_IMPORT = 138 - HASHTABLE_SIZE = 139 - REDUCE_ALL = 140 - CONV_3D_TRANSPOSE = 141 - VAR_HANDLE = 142 - READ_VARIABLE = 143 - ASSIGN_VARIABLE = 144 - BROADCAST_ARGS = 145 - RANDOM_STANDARD_NORMAL = 146 - BUCKETIZE = 147 - RANDOM_UNIFORM = 148 - MULTINOMIAL = 149 - GELU = 150 - DYNAMIC_UPDATE_SLICE = 151 - RELU_0_TO_1 = 152 - UNSORTED_SEGMENT_PROD = 153 - UNSORTED_SEGMENT_MAX = 154 - UNSORTED_SEGMENT_SUM = 155 - ATAN2 = 156 - UNSORTED_SEGMENT_MIN = 157 - SIGN = 158 - BITCAST = 159 - BITWISE_XOR = 160 - RIGHT_SHIFT = 161 -# automatically generated by the FlatBuffers compiler, do not modify - -# namespace: tflite - -class BuiltinOptions(object): - NONE = 0 - Conv2DOptions = 1 - DepthwiseConv2DOptions = 2 - ConcatEmbeddingsOptions = 3 - LSHProjectionOptions = 4 - Pool2DOptions = 5 - SVDFOptions = 6 - RNNOptions = 7 - FullyConnectedOptions = 8 - SoftmaxOptions = 9 - ConcatenationOptions = 10 - AddOptions = 11 - L2NormOptions = 12 - LocalResponseNormalizationOptions = 13 - LSTMOptions = 14 - ResizeBilinearOptions = 15 - CallOptions = 16 - ReshapeOptions = 17 - SkipGramOptions = 18 - SpaceToDepthOptions = 19 - EmbeddingLookupSparseOptions = 20 - MulOptions = 21 - PadOptions = 22 - GatherOptions = 23 - BatchToSpaceNDOptions = 24 - SpaceToBatchNDOptions = 25 - TransposeOptions = 26 - ReducerOptions = 27 - SubOptions = 28 - DivOptions = 29 - SqueezeOptions = 30 - SequenceRNNOptions = 31 - StridedSliceOptions = 32 - ExpOptions = 33 - TopKV2Options = 34 - SplitOptions = 35 - LogSoftmaxOptions = 36 - CastOptions = 37 - DequantizeOptions = 38 - MaximumMinimumOptions = 39 - ArgMaxOptions = 40 - LessOptions = 41 - NegOptions = 42 - PadV2Options = 43 - GreaterOptions = 44 - GreaterEqualOptions = 45 - LessEqualOptions = 46 - SelectOptions = 47 - SliceOptions = 48 - TransposeConvOptions = 49 - SparseToDenseOptions = 50 - TileOptions = 51 - ExpandDimsOptions = 52 - EqualOptions = 53 - NotEqualOptions = 54 - ShapeOptions = 55 - PowOptions = 56 - ArgMinOptions = 57 - FakeQuantOptions = 58 - PackOptions = 59 - LogicalOrOptions = 60 - OneHotOptions = 61 - LogicalAndOptions = 62 - LogicalNotOptions = 63 - UnpackOptions = 64 - FloorDivOptions = 65 - SquareOptions = 66 - ZerosLikeOptions = 67 - FillOptions = 68 - BidirectionalSequenceLSTMOptions = 69 - BidirectionalSequenceRNNOptions = 70 - UnidirectionalSequenceLSTMOptions = 71 - FloorModOptions = 72 - RangeOptions = 73 - ResizeNearestNeighborOptions = 74 - LeakyReluOptions = 75 - SquaredDifferenceOptions = 76 - MirrorPadOptions = 77 - AbsOptions = 78 - SplitVOptions = 79 - UniqueOptions = 80 - ReverseV2Options = 81 - AddNOptions = 82 - GatherNdOptions = 83 - CosOptions = 84 - WhereOptions = 85 - RankOptions = 86 - ReverseSequenceOptions = 87 - MatrixDiagOptions = 88 - QuantizeOptions = 89 - MatrixSetDiagOptions = 90 - HardSwishOptions = 91 - IfOptions = 92 - WhileOptions = 93 - DepthToSpaceOptions = 94 - NonMaxSuppressionV4Options = 95 - NonMaxSuppressionV5Options = 96 - ScatterNdOptions = 97 - SelectV2Options = 98 - DensifyOptions = 99 - SegmentSumOptions = 100 - BatchMatMulOptions = 101 - CumsumOptions = 102 - CallOnceOptions = 103 - BroadcastToOptions = 104 - Rfft2dOptions = 105 - Conv3DOptions = 106 - HashtableOptions = 107 - HashtableFindOptions = 108 - HashtableImportOptions = 109 - HashtableSizeOptions = 110 - VarHandleOptions = 111 - ReadVariableOptions = 112 - AssignVariableOptions = 113 - RandomOptions = 114 - BucketizeOptions = 115 - GeluOptions = 116 - DynamicUpdateSliceOptions = 117 - UnsortedSegmentProdOptions = 118 - UnsortedSegmentMaxOptions = 119 - UnsortedSegmentMinOptions = 120 - UnsortedSegmentSumOptions = 121 - ATan2Options = 122 - SignOptions = 123 - BitcastOptions = 124 - BitwiseXorOptions = 125 - RightShiftOptions = 126 + Conv2DOptions = 1 + DepthwiseConv2DOptions = 2 + ConcatEmbeddingsOptions = 3 + LSHProjectionOptions = 4 + Pool2DOptions = 5 + SVDFOptions = 6 + RNNOptions = 7 + FullyConnectedOptions = 8 + SoftmaxOptions = 9 + ConcatenationOptions = 10 + AddOptions = 11 + L2NormOptions = 12 + LocalResponseNormalizationOptions = 13 + LSTMOptions = 14 + ResizeBilinearOptions = 15 + CallOptions = 16 + ReshapeOptions = 17 + SkipGramOptions = 18 + SpaceToDepthOptions = 19 + EmbeddingLookupSparseOptions = 20 + MulOptions = 21 + PadOptions = 22 + GatherOptions = 23 + BatchToSpaceNDOptions = 24 + SpaceToBatchNDOptions = 25 + TransposeOptions = 26 + ReducerOptions = 27 + SubOptions = 28 + DivOptions = 29 + SqueezeOptions = 30 + SequenceRNNOptions = 31 + StridedSliceOptions = 32 + ExpOptions = 33 + TopKV2Options = 34 + SplitOptions = 35 + LogSoftmaxOptions = 36 + CastOptions = 37 + DequantizeOptions = 38 + MaximumMinimumOptions = 39 + ArgMaxOptions = 40 + LessOptions = 41 + NegOptions = 42 + PadV2Options = 43 + GreaterOptions = 44 + GreaterEqualOptions = 45 + LessEqualOptions = 46 + SelectOptions = 47 + SliceOptions = 48 + TransposeConvOptions = 49 + SparseToDenseOptions = 50 + TileOptions = 51 + ExpandDimsOptions = 52 + EqualOptions = 53 + NotEqualOptions = 54 + ShapeOptions = 55 + PowOptions = 56 + ArgMinOptions = 57 + FakeQuantOptions = 58 + PackOptions = 59 + LogicalOrOptions = 60 + OneHotOptions = 61 + LogicalAndOptions = 62 + LogicalNotOptions = 63 + UnpackOptions = 64 + FloorDivOptions = 65 + SquareOptions = 66 + ZerosLikeOptions = 67 + FillOptions = 68 + BidirectionalSequenceLSTMOptions = 69 + BidirectionalSequenceRNNOptions = 70 + UnidirectionalSequenceLSTMOptions = 71 + FloorModOptions = 72 + RangeOptions = 73 + ResizeNearestNeighborOptions = 74 + LeakyReluOptions = 75 + SquaredDifferenceOptions = 76 + MirrorPadOptions = 77 + AbsOptions = 78 + SplitVOptions = 79 + UniqueOptions = 80 + ReverseV2Options = 81 + AddNOptions = 82 + GatherNdOptions = 83 + CosOptions = 84 + WhereOptions = 85 + RankOptions = 86 + ReverseSequenceOptions = 87 + MatrixDiagOptions = 88 + QuantizeOptions = 89 + MatrixSetDiagOptions = 90 + HardSwishOptions = 91 + IfOptions = 92 + WhileOptions = 93 + DepthToSpaceOptions = 94 + NonMaxSuppressionV4Options = 95 + NonMaxSuppressionV5Options = 96 + ScatterNdOptions = 97 + SelectV2Options = 98 + DensifyOptions = 99 + SegmentSumOptions = 100 + BatchMatMulOptions = 101 + CumsumOptions = 102 + CallOnceOptions = 103 + BroadcastToOptions = 104 + Rfft2dOptions = 105 + Conv3DOptions = 106 + HashtableOptions = 107 + HashtableFindOptions = 108 + HashtableImportOptions = 109 + HashtableSizeOptions = 110 + VarHandleOptions = 111 + ReadVariableOptions = 112 + AssignVariableOptions = 113 + RandomOptions = 114 + BucketizeOptions = 115 + GeluOptions = 116 + DynamicUpdateSliceOptions = 117 + UnsortedSegmentProdOptions = 118 + UnsortedSegmentMaxOptions = 119 + UnsortedSegmentMinOptions = 120 + UnsortedSegmentSumOptions = 121 + ATan2Options = 122 + SignOptions = 123 + BitcastOptions = 124 + BitwiseXorOptions = 125 + RightShiftOptions = 126 def BuiltinOptionsCreator(unionType, table): from flatbuffers.table import Table @@ -1941,443 +662,6838 @@ def BuiltinOptionsCreator(unionType, table): if unionType == BuiltinOptions().RightShiftOptions: return RightShiftOptionsT.InitFromBuf(table.Bytes, table.Pos) return None -# automatically generated by the FlatBuffers compiler, do not modify -# namespace: tflite -from flatbuffers.compat import import_numpy -np = import_numpy() +class BuiltinOptions2(object): + NONE = 0 + StablehloConcatenateOptions = 1 + StablehloBroadcastInDimOptions = 2 + StablehloSliceOptions = 3 + StablehloConvolutionOptions = 4 + StablehloCustomCallOptions = 5 + StablehloReduceOptions = 6 + StablehloScatterOptions = 7 + StablehloCompareOptions = 8 + StablehloDynamicSliceOptions = 9 + StablehloPadOptions = 10 + StablehloIotaOptions = 11 + StablehloDotGeneralOptions = 12 + StablehloReduceWindowOptions = 13 + StablehloSortOptions = 14 + StablehloWhileOptions = 15 + StablehloGatherOptions = 16 + StablehloTransposeOptions = 17 + DilateOptions = 18 + StablehloRngBitGeneratorOptions = 19 + ReduceWindowOptions = 20 + StableHLOCompositeOptions = 21 + +def BuiltinOptions2Creator(unionType, table): + from flatbuffers.table import Table + if not isinstance(table, Table): + return None + if unionType == BuiltinOptions2().StablehloConcatenateOptions: + return StablehloConcatenateOptionsT.InitFromBuf(table.Bytes, table.Pos) + if unionType == BuiltinOptions2().StablehloBroadcastInDimOptions: + return StablehloBroadcastInDimOptionsT.InitFromBuf(table.Bytes, table.Pos) + if unionType == BuiltinOptions2().StablehloSliceOptions: + return StablehloSliceOptionsT.InitFromBuf(table.Bytes, table.Pos) + if unionType == BuiltinOptions2().StablehloConvolutionOptions: + return StablehloConvolutionOptionsT.InitFromBuf(table.Bytes, table.Pos) + if unionType == BuiltinOptions2().StablehloCustomCallOptions: + return StablehloCustomCallOptionsT.InitFromBuf(table.Bytes, table.Pos) + if unionType == BuiltinOptions2().StablehloReduceOptions: + return StablehloReduceOptionsT.InitFromBuf(table.Bytes, table.Pos) + if unionType == BuiltinOptions2().StablehloScatterOptions: + return StablehloScatterOptionsT.InitFromBuf(table.Bytes, table.Pos) + if unionType == BuiltinOptions2().StablehloCompareOptions: + return StablehloCompareOptionsT.InitFromBuf(table.Bytes, table.Pos) + if unionType == BuiltinOptions2().StablehloDynamicSliceOptions: + return StablehloDynamicSliceOptionsT.InitFromBuf(table.Bytes, table.Pos) + if unionType == BuiltinOptions2().StablehloPadOptions: + return StablehloPadOptionsT.InitFromBuf(table.Bytes, table.Pos) + if unionType == BuiltinOptions2().StablehloIotaOptions: + return StablehloIotaOptionsT.InitFromBuf(table.Bytes, table.Pos) + if unionType == BuiltinOptions2().StablehloDotGeneralOptions: + return StablehloDotGeneralOptionsT.InitFromBuf(table.Bytes, table.Pos) + if unionType == BuiltinOptions2().StablehloReduceWindowOptions: + return StablehloReduceWindowOptionsT.InitFromBuf(table.Bytes, table.Pos) + if unionType == BuiltinOptions2().StablehloSortOptions: + return StablehloSortOptionsT.InitFromBuf(table.Bytes, table.Pos) + if unionType == BuiltinOptions2().StablehloWhileOptions: + return StablehloWhileOptionsT.InitFromBuf(table.Bytes, table.Pos) + if unionType == BuiltinOptions2().StablehloGatherOptions: + return StablehloGatherOptionsT.InitFromBuf(table.Bytes, table.Pos) + if unionType == BuiltinOptions2().StablehloTransposeOptions: + return StablehloTransposeOptionsT.InitFromBuf(table.Bytes, table.Pos) + if unionType == BuiltinOptions2().DilateOptions: + return DilateOptionsT.InitFromBuf(table.Bytes, table.Pos) + if unionType == BuiltinOptions2().StablehloRngBitGeneratorOptions: + return StablehloRngBitGeneratorOptionsT.InitFromBuf(table.Bytes, table.Pos) + if unionType == BuiltinOptions2().ReduceWindowOptions: + return ReduceWindowOptionsT.InitFromBuf(table.Bytes, table.Pos) + if unionType == BuiltinOptions2().StableHLOCompositeOptions: + return StableHLOCompositeOptionsT.InitFromBuf(table.Bytes, table.Pos) + return None + + +class StablehloPrecisionConfig(object): + DEFAULT = 0 + HIGH = 1 + HIGHEST = 2 + + +class StablehloComparisonDirection(object): + STABLEHLO_COMPARISON_DIRECTION_EQ = 0 + STABLEHLO_COMPARISON_DIRECTION_NE = 1 + STABLEHLO_COMPARISON_DIRECTION_GE = 2 + STABLEHLO_COMPARISON_DIRECTION_GT = 3 + STABLEHLO_COMPARISON_DIRECTION_LE = 4 + STABLEHLO_COMPARISON_DIRECTION_LT = 5 + + +class StablehloComparisonType(object): + STABLEHLO_COMPARISON_TYPE_NOTYPE = 0 + STABLEHLO_COMPARISON_TYPE_FLOAT = 1 + STABLEHLO_COMPARISON_TYPE_FLOAT_TOTAL_ORDER = 2 + STABLEHLO_COMPARISON_TYPE_SIGNED = 3 + STABLEHLO_COMPARISON_TYPE_UNSIGNED = 4 + + +class RngAlgorithm(object): + DEFAULT = 0 + PHILOX = 1 + THREEFRY = 2 + + +class Padding(object): + SAME = 0 + VALID = 1 + + +class ActivationFunctionType(object): + NONE = 0 + RELU = 1 + RELU_N1_TO_1 = 2 + RELU6 = 3 + TANH = 4 + SIGN_BIT = 5 + + +class LSHProjectionType(object): + UNKNOWN = 0 + SPARSE = 1 + DENSE = 2 + + +class FullyConnectedOptionsWeightsFormat(object): + DEFAULT = 0 + SHUFFLED4x16INT8 = 1 + + +class LSTMKernelType(object): + FULL = 0 + BASIC = 1 + + +class CombinerType(object): + SUM = 0 + MEAN = 1 + SQRTN = 2 + + +class MirrorPadMode(object): + REFLECT = 0 + SYMMETRIC = 1 + + +class ReduceWindowFunction(object): + UNSUPPORTED = 0 + ADD = 1 + MUL = 2 + MINIMUM = 3 + MAXIMUM = 4 + ALL = 5 + ANY = 6 + + +class CustomOptionsFormat(object): + FLEXBUFFERS = 0 + + +class CustomQuantization(object): + __slots__ = ['_tab'] + + @classmethod + def GetRootAs(cls, buf, offset=0): + n = flatbuffers.encode.Get(flatbuffers.packer.uoffset, buf, offset) + x = CustomQuantization() + x.Init(buf, n + offset) + return x + + @classmethod + def GetRootAsCustomQuantization(cls, buf, offset=0): + """This method is deprecated. Please switch to GetRootAs.""" + return cls.GetRootAs(buf, offset) + @classmethod + def CustomQuantizationBufferHasIdentifier(cls, buf, offset, size_prefixed=False): + return flatbuffers.util.BufferHasIdentifier(buf, offset, b"\x54\x46\x4C\x33", size_prefixed=size_prefixed) + + # CustomQuantization + def Init(self, buf, pos): + self._tab = flatbuffers.table.Table(buf, pos) + + # CustomQuantization + def Custom(self, j): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(4)) + if o != 0: + a = self._tab.Vector(o) + return self._tab.Get(flatbuffers.number_types.Uint8Flags, a + flatbuffers.number_types.UOffsetTFlags.py_type(j * 1)) + return 0 + + # CustomQuantization + def CustomAsNumpy(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(4)) + if o != 0: + return self._tab.GetVectorAsNumpy(flatbuffers.number_types.Uint8Flags, o) + return 0 + + # CustomQuantization + def CustomLength(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(4)) + if o != 0: + return self._tab.VectorLen(o) + return 0 + + # CustomQuantization + def CustomIsNone(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(4)) + return o == 0 + +def CustomQuantizationStart(builder): + builder.StartObject(1) + +def CustomQuantizationAddCustom(builder, custom): + builder.PrependUOffsetTRelativeSlot(0, flatbuffers.number_types.UOffsetTFlags.py_type(custom), 0) + +def CustomQuantizationStartCustomVector(builder, numElems): + return builder.StartVector(1, numElems, 1) + +def CustomQuantizationEnd(builder): + return builder.EndObject() + + +try: + from typing import List +except: + pass + +class CustomQuantizationT(object): + + # CustomQuantizationT + def __init__(self): + self.custom = None # type: List[int] + + @classmethod + def InitFromBuf(cls, buf, pos): + customQuantization = CustomQuantization() + customQuantization.Init(buf, pos) + return cls.InitFromObj(customQuantization) + + @classmethod + def InitFromPackedBuf(cls, buf, pos=0): + n = flatbuffers.encode.Get(flatbuffers.packer.uoffset, buf, pos) + return cls.InitFromBuf(buf, pos+n) + + @classmethod + def InitFromObj(cls, customQuantization): + x = CustomQuantizationT() + x._UnPack(customQuantization) + return x + + # CustomQuantizationT + def _UnPack(self, customQuantization): + if customQuantization is None: + return + if not customQuantization.CustomIsNone(): + if np is None: + self.custom = [] + for i in range(customQuantization.CustomLength()): + self.custom.append(customQuantization.Custom(i)) + else: + self.custom = customQuantization.CustomAsNumpy() + + # CustomQuantizationT + def Pack(self, builder): + if self.custom is not None: + if np is not None and type(self.custom) is np.ndarray: + custom = builder.CreateNumpyVector(self.custom) + else: + CustomQuantizationStartCustomVector(builder, len(self.custom)) + for i in reversed(range(len(self.custom))): + builder.PrependUint8(self.custom[i]) + custom = builder.EndVector() + CustomQuantizationStart(builder) + if self.custom is not None: + CustomQuantizationAddCustom(builder, custom) + customQuantization = CustomQuantizationEnd(builder) + return customQuantization + + +class QuantizationParameters(object): + __slots__ = ['_tab'] + + @classmethod + def GetRootAs(cls, buf, offset=0): + n = flatbuffers.encode.Get(flatbuffers.packer.uoffset, buf, offset) + x = QuantizationParameters() + x.Init(buf, n + offset) + return x + + @classmethod + def GetRootAsQuantizationParameters(cls, buf, offset=0): + """This method is deprecated. Please switch to GetRootAs.""" + return cls.GetRootAs(buf, offset) + @classmethod + def QuantizationParametersBufferHasIdentifier(cls, buf, offset, size_prefixed=False): + return flatbuffers.util.BufferHasIdentifier(buf, offset, b"\x54\x46\x4C\x33", size_prefixed=size_prefixed) + + # QuantizationParameters + def Init(self, buf, pos): + self._tab = flatbuffers.table.Table(buf, pos) + + # QuantizationParameters + def Min(self, j): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(4)) + if o != 0: + a = self._tab.Vector(o) + return self._tab.Get(flatbuffers.number_types.Float32Flags, a + flatbuffers.number_types.UOffsetTFlags.py_type(j * 4)) + return 0 + + # QuantizationParameters + def MinAsNumpy(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(4)) + if o != 0: + return self._tab.GetVectorAsNumpy(flatbuffers.number_types.Float32Flags, o) + return 0 + + # QuantizationParameters + def MinLength(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(4)) + if o != 0: + return self._tab.VectorLen(o) + return 0 + + # QuantizationParameters + def MinIsNone(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(4)) + return o == 0 + + # QuantizationParameters + def Max(self, j): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(6)) + if o != 0: + a = self._tab.Vector(o) + return self._tab.Get(flatbuffers.number_types.Float32Flags, a + flatbuffers.number_types.UOffsetTFlags.py_type(j * 4)) + return 0 + + # QuantizationParameters + def MaxAsNumpy(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(6)) + if o != 0: + return self._tab.GetVectorAsNumpy(flatbuffers.number_types.Float32Flags, o) + return 0 + + # QuantizationParameters + def MaxLength(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(6)) + if o != 0: + return self._tab.VectorLen(o) + return 0 + + # QuantizationParameters + def MaxIsNone(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(6)) + return o == 0 + + # QuantizationParameters + def Scale(self, j): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(8)) + if o != 0: + a = self._tab.Vector(o) + return self._tab.Get(flatbuffers.number_types.Float32Flags, a + flatbuffers.number_types.UOffsetTFlags.py_type(j * 4)) + return 0 + + # QuantizationParameters + def ScaleAsNumpy(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(8)) + if o != 0: + return self._tab.GetVectorAsNumpy(flatbuffers.number_types.Float32Flags, o) + return 0 + + # QuantizationParameters + def ScaleLength(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(8)) + if o != 0: + return self._tab.VectorLen(o) + return 0 + + # QuantizationParameters + def ScaleIsNone(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(8)) + return o == 0 + + # QuantizationParameters + def ZeroPoint(self, j): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(10)) + if o != 0: + a = self._tab.Vector(o) + return self._tab.Get(flatbuffers.number_types.Int64Flags, a + flatbuffers.number_types.UOffsetTFlags.py_type(j * 8)) + return 0 + + # QuantizationParameters + def ZeroPointAsNumpy(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(10)) + if o != 0: + return self._tab.GetVectorAsNumpy(flatbuffers.number_types.Int64Flags, o) + return 0 + + # QuantizationParameters + def ZeroPointLength(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(10)) + if o != 0: + return self._tab.VectorLen(o) + return 0 + + # QuantizationParameters + def ZeroPointIsNone(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(10)) + return o == 0 + + # QuantizationParameters + def DetailsType(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(12)) + if o != 0: + return self._tab.Get(flatbuffers.number_types.Uint8Flags, o + self._tab.Pos) + return 0 + + # QuantizationParameters + def Details(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(14)) + if o != 0: + from flatbuffers.table import Table + obj = Table(bytearray(), 0) + self._tab.Union(obj, o) + return obj + return None + + # QuantizationParameters + def QuantizedDimension(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(16)) + if o != 0: + return self._tab.Get(flatbuffers.number_types.Int32Flags, o + self._tab.Pos) + return 0 + +def QuantizationParametersStart(builder): + builder.StartObject(7) + +def QuantizationParametersAddMin(builder, min): + builder.PrependUOffsetTRelativeSlot(0, flatbuffers.number_types.UOffsetTFlags.py_type(min), 0) + +def QuantizationParametersStartMinVector(builder, numElems): + return builder.StartVector(4, numElems, 4) + +def QuantizationParametersAddMax(builder, max): + builder.PrependUOffsetTRelativeSlot(1, flatbuffers.number_types.UOffsetTFlags.py_type(max), 0) + +def QuantizationParametersStartMaxVector(builder, numElems): + return builder.StartVector(4, numElems, 4) + +def QuantizationParametersAddScale(builder, scale): + builder.PrependUOffsetTRelativeSlot(2, flatbuffers.number_types.UOffsetTFlags.py_type(scale), 0) + +def QuantizationParametersStartScaleVector(builder, numElems): + return builder.StartVector(4, numElems, 4) + +def QuantizationParametersAddZeroPoint(builder, zeroPoint): + builder.PrependUOffsetTRelativeSlot(3, flatbuffers.number_types.UOffsetTFlags.py_type(zeroPoint), 0) + +def QuantizationParametersStartZeroPointVector(builder, numElems): + return builder.StartVector(8, numElems, 8) + +def QuantizationParametersAddDetailsType(builder, detailsType): + builder.PrependUint8Slot(4, detailsType, 0) + +def QuantizationParametersAddDetails(builder, details): + builder.PrependUOffsetTRelativeSlot(5, flatbuffers.number_types.UOffsetTFlags.py_type(details), 0) + +def QuantizationParametersAddQuantizedDimension(builder, quantizedDimension): + builder.PrependInt32Slot(6, quantizedDimension, 0) + +def QuantizationParametersEnd(builder): + return builder.EndObject() + + +try: + from typing import List, Union +except: + pass + +class QuantizationParametersT(object): + + # QuantizationParametersT + def __init__(self): + self.min = None # type: List[float] + self.max = None # type: List[float] + self.scale = None # type: List[float] + self.zeroPoint = None # type: List[int] + self.detailsType = 0 # type: int + self.details = None # type: Union[None, CustomQuantizationT] + self.quantizedDimension = 0 # type: int + + @classmethod + def InitFromBuf(cls, buf, pos): + quantizationParameters = QuantizationParameters() + quantizationParameters.Init(buf, pos) + return cls.InitFromObj(quantizationParameters) + + @classmethod + def InitFromPackedBuf(cls, buf, pos=0): + n = flatbuffers.encode.Get(flatbuffers.packer.uoffset, buf, pos) + return cls.InitFromBuf(buf, pos+n) + + @classmethod + def InitFromObj(cls, quantizationParameters): + x = QuantizationParametersT() + x._UnPack(quantizationParameters) + return x + + # QuantizationParametersT + def _UnPack(self, quantizationParameters): + if quantizationParameters is None: + return + if not quantizationParameters.MinIsNone(): + if np is None: + self.min = [] + for i in range(quantizationParameters.MinLength()): + self.min.append(quantizationParameters.Min(i)) + else: + self.min = quantizationParameters.MinAsNumpy() + if not quantizationParameters.MaxIsNone(): + if np is None: + self.max = [] + for i in range(quantizationParameters.MaxLength()): + self.max.append(quantizationParameters.Max(i)) + else: + self.max = quantizationParameters.MaxAsNumpy() + if not quantizationParameters.ScaleIsNone(): + if np is None: + self.scale = [] + for i in range(quantizationParameters.ScaleLength()): + self.scale.append(quantizationParameters.Scale(i)) + else: + self.scale = quantizationParameters.ScaleAsNumpy() + if not quantizationParameters.ZeroPointIsNone(): + if np is None: + self.zeroPoint = [] + for i in range(quantizationParameters.ZeroPointLength()): + self.zeroPoint.append(quantizationParameters.ZeroPoint(i)) + else: + self.zeroPoint = quantizationParameters.ZeroPointAsNumpy() + self.detailsType = quantizationParameters.DetailsType() + self.details = QuantizationDetailsCreator(self.detailsType, quantizationParameters.Details()) + self.quantizedDimension = quantizationParameters.QuantizedDimension() + + # QuantizationParametersT + def Pack(self, builder): + if self.min is not None: + if np is not None and type(self.min) is np.ndarray: + min = builder.CreateNumpyVector(self.min) + else: + QuantizationParametersStartMinVector(builder, len(self.min)) + for i in reversed(range(len(self.min))): + builder.PrependFloat32(self.min[i]) + min = builder.EndVector() + if self.max is not None: + if np is not None and type(self.max) is np.ndarray: + max = builder.CreateNumpyVector(self.max) + else: + QuantizationParametersStartMaxVector(builder, len(self.max)) + for i in reversed(range(len(self.max))): + builder.PrependFloat32(self.max[i]) + max = builder.EndVector() + if self.scale is not None: + if np is not None and type(self.scale) is np.ndarray: + scale = builder.CreateNumpyVector(self.scale) + else: + QuantizationParametersStartScaleVector(builder, len(self.scale)) + for i in reversed(range(len(self.scale))): + builder.PrependFloat32(self.scale[i]) + scale = builder.EndVector() + if self.zeroPoint is not None: + if np is not None and type(self.zeroPoint) is np.ndarray: + zeroPoint = builder.CreateNumpyVector(self.zeroPoint) + else: + QuantizationParametersStartZeroPointVector(builder, len(self.zeroPoint)) + for i in reversed(range(len(self.zeroPoint))): + builder.PrependInt64(self.zeroPoint[i]) + zeroPoint = builder.EndVector() + if self.details is not None: + details = self.details.Pack(builder) + QuantizationParametersStart(builder) + if self.min is not None: + QuantizationParametersAddMin(builder, min) + if self.max is not None: + QuantizationParametersAddMax(builder, max) + if self.scale is not None: + QuantizationParametersAddScale(builder, scale) + if self.zeroPoint is not None: + QuantizationParametersAddZeroPoint(builder, zeroPoint) + QuantizationParametersAddDetailsType(builder, self.detailsType) + if self.details is not None: + QuantizationParametersAddDetails(builder, details) + QuantizationParametersAddQuantizedDimension(builder, self.quantizedDimension) + quantizationParameters = QuantizationParametersEnd(builder) + return quantizationParameters + + +class Int32Vector(object): + __slots__ = ['_tab'] + + @classmethod + def GetRootAs(cls, buf, offset=0): + n = flatbuffers.encode.Get(flatbuffers.packer.uoffset, buf, offset) + x = Int32Vector() + x.Init(buf, n + offset) + return x + + @classmethod + def GetRootAsInt32Vector(cls, buf, offset=0): + """This method is deprecated. Please switch to GetRootAs.""" + return cls.GetRootAs(buf, offset) + @classmethod + def Int32VectorBufferHasIdentifier(cls, buf, offset, size_prefixed=False): + return flatbuffers.util.BufferHasIdentifier(buf, offset, b"\x54\x46\x4C\x33", size_prefixed=size_prefixed) + + # Int32Vector + def Init(self, buf, pos): + self._tab = flatbuffers.table.Table(buf, pos) + + # Int32Vector + def Values(self, j): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(4)) + if o != 0: + a = self._tab.Vector(o) + return self._tab.Get(flatbuffers.number_types.Int32Flags, a + flatbuffers.number_types.UOffsetTFlags.py_type(j * 4)) + return 0 + + # Int32Vector + def ValuesAsNumpy(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(4)) + if o != 0: + return self._tab.GetVectorAsNumpy(flatbuffers.number_types.Int32Flags, o) + return 0 + + # Int32Vector + def ValuesLength(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(4)) + if o != 0: + return self._tab.VectorLen(o) + return 0 + + # Int32Vector + def ValuesIsNone(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(4)) + return o == 0 + +def Int32VectorStart(builder): + builder.StartObject(1) + +def Int32VectorAddValues(builder, values): + builder.PrependUOffsetTRelativeSlot(0, flatbuffers.number_types.UOffsetTFlags.py_type(values), 0) + +def Int32VectorStartValuesVector(builder, numElems): + return builder.StartVector(4, numElems, 4) + +def Int32VectorEnd(builder): + return builder.EndObject() + + +try: + from typing import List +except: + pass + +class Int32VectorT(object): + + # Int32VectorT + def __init__(self): + self.values = None # type: List[int] + + @classmethod + def InitFromBuf(cls, buf, pos): + int32Vector = Int32Vector() + int32Vector.Init(buf, pos) + return cls.InitFromObj(int32Vector) + + @classmethod + def InitFromPackedBuf(cls, buf, pos=0): + n = flatbuffers.encode.Get(flatbuffers.packer.uoffset, buf, pos) + return cls.InitFromBuf(buf, pos+n) + + @classmethod + def InitFromObj(cls, int32Vector): + x = Int32VectorT() + x._UnPack(int32Vector) + return x + + # Int32VectorT + def _UnPack(self, int32Vector): + if int32Vector is None: + return + if not int32Vector.ValuesIsNone(): + if np is None: + self.values = [] + for i in range(int32Vector.ValuesLength()): + self.values.append(int32Vector.Values(i)) + else: + self.values = int32Vector.ValuesAsNumpy() + + # Int32VectorT + def Pack(self, builder): + if self.values is not None: + if np is not None and type(self.values) is np.ndarray: + values = builder.CreateNumpyVector(self.values) + else: + Int32VectorStartValuesVector(builder, len(self.values)) + for i in reversed(range(len(self.values))): + builder.PrependInt32(self.values[i]) + values = builder.EndVector() + Int32VectorStart(builder) + if self.values is not None: + Int32VectorAddValues(builder, values) + int32Vector = Int32VectorEnd(builder) + return int32Vector + + +class Uint16Vector(object): + __slots__ = ['_tab'] + + @classmethod + def GetRootAs(cls, buf, offset=0): + n = flatbuffers.encode.Get(flatbuffers.packer.uoffset, buf, offset) + x = Uint16Vector() + x.Init(buf, n + offset) + return x + + @classmethod + def GetRootAsUint16Vector(cls, buf, offset=0): + """This method is deprecated. Please switch to GetRootAs.""" + return cls.GetRootAs(buf, offset) + @classmethod + def Uint16VectorBufferHasIdentifier(cls, buf, offset, size_prefixed=False): + return flatbuffers.util.BufferHasIdentifier(buf, offset, b"\x54\x46\x4C\x33", size_prefixed=size_prefixed) + + # Uint16Vector + def Init(self, buf, pos): + self._tab = flatbuffers.table.Table(buf, pos) + + # Uint16Vector + def Values(self, j): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(4)) + if o != 0: + a = self._tab.Vector(o) + return self._tab.Get(flatbuffers.number_types.Uint16Flags, a + flatbuffers.number_types.UOffsetTFlags.py_type(j * 2)) + return 0 + + # Uint16Vector + def ValuesAsNumpy(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(4)) + if o != 0: + return self._tab.GetVectorAsNumpy(flatbuffers.number_types.Uint16Flags, o) + return 0 + + # Uint16Vector + def ValuesLength(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(4)) + if o != 0: + return self._tab.VectorLen(o) + return 0 + + # Uint16Vector + def ValuesIsNone(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(4)) + return o == 0 + +def Uint16VectorStart(builder): + builder.StartObject(1) + +def Uint16VectorAddValues(builder, values): + builder.PrependUOffsetTRelativeSlot(0, flatbuffers.number_types.UOffsetTFlags.py_type(values), 0) + +def Uint16VectorStartValuesVector(builder, numElems): + return builder.StartVector(2, numElems, 2) + +def Uint16VectorEnd(builder): + return builder.EndObject() + + +try: + from typing import List +except: + pass + +class Uint16VectorT(object): + + # Uint16VectorT + def __init__(self): + self.values = None # type: List[int] + + @classmethod + def InitFromBuf(cls, buf, pos): + uint16Vector = Uint16Vector() + uint16Vector.Init(buf, pos) + return cls.InitFromObj(uint16Vector) + + @classmethod + def InitFromPackedBuf(cls, buf, pos=0): + n = flatbuffers.encode.Get(flatbuffers.packer.uoffset, buf, pos) + return cls.InitFromBuf(buf, pos+n) + + @classmethod + def InitFromObj(cls, uint16Vector): + x = Uint16VectorT() + x._UnPack(uint16Vector) + return x + + # Uint16VectorT + def _UnPack(self, uint16Vector): + if uint16Vector is None: + return + if not uint16Vector.ValuesIsNone(): + if np is None: + self.values = [] + for i in range(uint16Vector.ValuesLength()): + self.values.append(uint16Vector.Values(i)) + else: + self.values = uint16Vector.ValuesAsNumpy() + + # Uint16VectorT + def Pack(self, builder): + if self.values is not None: + if np is not None and type(self.values) is np.ndarray: + values = builder.CreateNumpyVector(self.values) + else: + Uint16VectorStartValuesVector(builder, len(self.values)) + for i in reversed(range(len(self.values))): + builder.PrependUint16(self.values[i]) + values = builder.EndVector() + Uint16VectorStart(builder) + if self.values is not None: + Uint16VectorAddValues(builder, values) + uint16Vector = Uint16VectorEnd(builder) + return uint16Vector + + +class Uint8Vector(object): + __slots__ = ['_tab'] + + @classmethod + def GetRootAs(cls, buf, offset=0): + n = flatbuffers.encode.Get(flatbuffers.packer.uoffset, buf, offset) + x = Uint8Vector() + x.Init(buf, n + offset) + return x + + @classmethod + def GetRootAsUint8Vector(cls, buf, offset=0): + """This method is deprecated. Please switch to GetRootAs.""" + return cls.GetRootAs(buf, offset) + @classmethod + def Uint8VectorBufferHasIdentifier(cls, buf, offset, size_prefixed=False): + return flatbuffers.util.BufferHasIdentifier(buf, offset, b"\x54\x46\x4C\x33", size_prefixed=size_prefixed) + + # Uint8Vector + def Init(self, buf, pos): + self._tab = flatbuffers.table.Table(buf, pos) + + # Uint8Vector + def Values(self, j): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(4)) + if o != 0: + a = self._tab.Vector(o) + return self._tab.Get(flatbuffers.number_types.Uint8Flags, a + flatbuffers.number_types.UOffsetTFlags.py_type(j * 1)) + return 0 + + # Uint8Vector + def ValuesAsNumpy(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(4)) + if o != 0: + return self._tab.GetVectorAsNumpy(flatbuffers.number_types.Uint8Flags, o) + return 0 + + # Uint8Vector + def ValuesLength(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(4)) + if o != 0: + return self._tab.VectorLen(o) + return 0 + + # Uint8Vector + def ValuesIsNone(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(4)) + return o == 0 + +def Uint8VectorStart(builder): + builder.StartObject(1) + +def Uint8VectorAddValues(builder, values): + builder.PrependUOffsetTRelativeSlot(0, flatbuffers.number_types.UOffsetTFlags.py_type(values), 0) + +def Uint8VectorStartValuesVector(builder, numElems): + return builder.StartVector(1, numElems, 1) + +def Uint8VectorEnd(builder): + return builder.EndObject() + + +try: + from typing import List +except: + pass + +class Uint8VectorT(object): + + # Uint8VectorT + def __init__(self): + self.values = None # type: List[int] + + @classmethod + def InitFromBuf(cls, buf, pos): + uint8Vector = Uint8Vector() + uint8Vector.Init(buf, pos) + return cls.InitFromObj(uint8Vector) + + @classmethod + def InitFromPackedBuf(cls, buf, pos=0): + n = flatbuffers.encode.Get(flatbuffers.packer.uoffset, buf, pos) + return cls.InitFromBuf(buf, pos+n) + + @classmethod + def InitFromObj(cls, uint8Vector): + x = Uint8VectorT() + x._UnPack(uint8Vector) + return x + + # Uint8VectorT + def _UnPack(self, uint8Vector): + if uint8Vector is None: + return + if not uint8Vector.ValuesIsNone(): + if np is None: + self.values = [] + for i in range(uint8Vector.ValuesLength()): + self.values.append(uint8Vector.Values(i)) + else: + self.values = uint8Vector.ValuesAsNumpy() + + # Uint8VectorT + def Pack(self, builder): + if self.values is not None: + if np is not None and type(self.values) is np.ndarray: + values = builder.CreateNumpyVector(self.values) + else: + Uint8VectorStartValuesVector(builder, len(self.values)) + for i in reversed(range(len(self.values))): + builder.PrependUint8(self.values[i]) + values = builder.EndVector() + Uint8VectorStart(builder) + if self.values is not None: + Uint8VectorAddValues(builder, values) + uint8Vector = Uint8VectorEnd(builder) + return uint8Vector + + +class DimensionMetadata(object): + __slots__ = ['_tab'] + + @classmethod + def GetRootAs(cls, buf, offset=0): + n = flatbuffers.encode.Get(flatbuffers.packer.uoffset, buf, offset) + x = DimensionMetadata() + x.Init(buf, n + offset) + return x + + @classmethod + def GetRootAsDimensionMetadata(cls, buf, offset=0): + """This method is deprecated. Please switch to GetRootAs.""" + return cls.GetRootAs(buf, offset) + @classmethod + def DimensionMetadataBufferHasIdentifier(cls, buf, offset, size_prefixed=False): + return flatbuffers.util.BufferHasIdentifier(buf, offset, b"\x54\x46\x4C\x33", size_prefixed=size_prefixed) + + # DimensionMetadata + def Init(self, buf, pos): + self._tab = flatbuffers.table.Table(buf, pos) + + # DimensionMetadata + def Format(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(4)) + if o != 0: + return self._tab.Get(flatbuffers.number_types.Int8Flags, o + self._tab.Pos) + return 0 + + # DimensionMetadata + def DenseSize(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(6)) + if o != 0: + return self._tab.Get(flatbuffers.number_types.Int32Flags, o + self._tab.Pos) + return 0 + + # DimensionMetadata + def ArraySegmentsType(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(8)) + if o != 0: + return self._tab.Get(flatbuffers.number_types.Uint8Flags, o + self._tab.Pos) + return 0 + + # DimensionMetadata + def ArraySegments(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(10)) + if o != 0: + from flatbuffers.table import Table + obj = Table(bytearray(), 0) + self._tab.Union(obj, o) + return obj + return None + + # DimensionMetadata + def ArrayIndicesType(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(12)) + if o != 0: + return self._tab.Get(flatbuffers.number_types.Uint8Flags, o + self._tab.Pos) + return 0 + + # DimensionMetadata + def ArrayIndices(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(14)) + if o != 0: + from flatbuffers.table import Table + obj = Table(bytearray(), 0) + self._tab.Union(obj, o) + return obj + return None + +def DimensionMetadataStart(builder): + builder.StartObject(6) + +def DimensionMetadataAddFormat(builder, format): + builder.PrependInt8Slot(0, format, 0) + +def DimensionMetadataAddDenseSize(builder, denseSize): + builder.PrependInt32Slot(1, denseSize, 0) + +def DimensionMetadataAddArraySegmentsType(builder, arraySegmentsType): + builder.PrependUint8Slot(2, arraySegmentsType, 0) + +def DimensionMetadataAddArraySegments(builder, arraySegments): + builder.PrependUOffsetTRelativeSlot(3, flatbuffers.number_types.UOffsetTFlags.py_type(arraySegments), 0) + +def DimensionMetadataAddArrayIndicesType(builder, arrayIndicesType): + builder.PrependUint8Slot(4, arrayIndicesType, 0) + +def DimensionMetadataAddArrayIndices(builder, arrayIndices): + builder.PrependUOffsetTRelativeSlot(5, flatbuffers.number_types.UOffsetTFlags.py_type(arrayIndices), 0) + +def DimensionMetadataEnd(builder): + return builder.EndObject() + + +try: + from typing import Union +except: + pass + +class DimensionMetadataT(object): + + # DimensionMetadataT + def __init__(self): + self.format = 0 # type: int + self.denseSize = 0 # type: int + self.arraySegmentsType = 0 # type: int + self.arraySegments = None # type: Union[None, Int32VectorT, Uint16VectorT, Uint8VectorT] + self.arrayIndicesType = 0 # type: int + self.arrayIndices = None # type: Union[None, Int32VectorT, Uint16VectorT, Uint8VectorT] + + @classmethod + def InitFromBuf(cls, buf, pos): + dimensionMetadata = DimensionMetadata() + dimensionMetadata.Init(buf, pos) + return cls.InitFromObj(dimensionMetadata) + + @classmethod + def InitFromPackedBuf(cls, buf, pos=0): + n = flatbuffers.encode.Get(flatbuffers.packer.uoffset, buf, pos) + return cls.InitFromBuf(buf, pos+n) + + @classmethod + def InitFromObj(cls, dimensionMetadata): + x = DimensionMetadataT() + x._UnPack(dimensionMetadata) + return x + + # DimensionMetadataT + def _UnPack(self, dimensionMetadata): + if dimensionMetadata is None: + return + self.format = dimensionMetadata.Format() + self.denseSize = dimensionMetadata.DenseSize() + self.arraySegmentsType = dimensionMetadata.ArraySegmentsType() + self.arraySegments = SparseIndexVectorCreator(self.arraySegmentsType, dimensionMetadata.ArraySegments()) + self.arrayIndicesType = dimensionMetadata.ArrayIndicesType() + self.arrayIndices = SparseIndexVectorCreator(self.arrayIndicesType, dimensionMetadata.ArrayIndices()) + + # DimensionMetadataT + def Pack(self, builder): + if self.arraySegments is not None: + arraySegments = self.arraySegments.Pack(builder) + if self.arrayIndices is not None: + arrayIndices = self.arrayIndices.Pack(builder) + DimensionMetadataStart(builder) + DimensionMetadataAddFormat(builder, self.format) + DimensionMetadataAddDenseSize(builder, self.denseSize) + DimensionMetadataAddArraySegmentsType(builder, self.arraySegmentsType) + if self.arraySegments is not None: + DimensionMetadataAddArraySegments(builder, arraySegments) + DimensionMetadataAddArrayIndicesType(builder, self.arrayIndicesType) + if self.arrayIndices is not None: + DimensionMetadataAddArrayIndices(builder, arrayIndices) + dimensionMetadata = DimensionMetadataEnd(builder) + return dimensionMetadata + + +class SparsityParameters(object): + __slots__ = ['_tab'] + + @classmethod + def GetRootAs(cls, buf, offset=0): + n = flatbuffers.encode.Get(flatbuffers.packer.uoffset, buf, offset) + x = SparsityParameters() + x.Init(buf, n + offset) + return x + + @classmethod + def GetRootAsSparsityParameters(cls, buf, offset=0): + """This method is deprecated. Please switch to GetRootAs.""" + return cls.GetRootAs(buf, offset) + @classmethod + def SparsityParametersBufferHasIdentifier(cls, buf, offset, size_prefixed=False): + return flatbuffers.util.BufferHasIdentifier(buf, offset, b"\x54\x46\x4C\x33", size_prefixed=size_prefixed) + + # SparsityParameters + def Init(self, buf, pos): + self._tab = flatbuffers.table.Table(buf, pos) + + # SparsityParameters + def TraversalOrder(self, j): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(4)) + if o != 0: + a = self._tab.Vector(o) + return self._tab.Get(flatbuffers.number_types.Int32Flags, a + flatbuffers.number_types.UOffsetTFlags.py_type(j * 4)) + return 0 + + # SparsityParameters + def TraversalOrderAsNumpy(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(4)) + if o != 0: + return self._tab.GetVectorAsNumpy(flatbuffers.number_types.Int32Flags, o) + return 0 + + # SparsityParameters + def TraversalOrderLength(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(4)) + if o != 0: + return self._tab.VectorLen(o) + return 0 + + # SparsityParameters + def TraversalOrderIsNone(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(4)) + return o == 0 + + # SparsityParameters + def BlockMap(self, j): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(6)) + if o != 0: + a = self._tab.Vector(o) + return self._tab.Get(flatbuffers.number_types.Int32Flags, a + flatbuffers.number_types.UOffsetTFlags.py_type(j * 4)) + return 0 + + # SparsityParameters + def BlockMapAsNumpy(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(6)) + if o != 0: + return self._tab.GetVectorAsNumpy(flatbuffers.number_types.Int32Flags, o) + return 0 + + # SparsityParameters + def BlockMapLength(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(6)) + if o != 0: + return self._tab.VectorLen(o) + return 0 + + # SparsityParameters + def BlockMapIsNone(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(6)) + return o == 0 + + # SparsityParameters + def DimMetadata(self, j): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(8)) + if o != 0: + x = self._tab.Vector(o) + x += flatbuffers.number_types.UOffsetTFlags.py_type(j) * 4 + x = self._tab.Indirect(x) + obj = DimensionMetadata() + obj.Init(self._tab.Bytes, x) + return obj + return None + + # SparsityParameters + def DimMetadataLength(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(8)) + if o != 0: + return self._tab.VectorLen(o) + return 0 + + # SparsityParameters + def DimMetadataIsNone(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(8)) + return o == 0 + +def SparsityParametersStart(builder): + builder.StartObject(3) + +def SparsityParametersAddTraversalOrder(builder, traversalOrder): + builder.PrependUOffsetTRelativeSlot(0, flatbuffers.number_types.UOffsetTFlags.py_type(traversalOrder), 0) + +def SparsityParametersStartTraversalOrderVector(builder, numElems): + return builder.StartVector(4, numElems, 4) + +def SparsityParametersAddBlockMap(builder, blockMap): + builder.PrependUOffsetTRelativeSlot(1, flatbuffers.number_types.UOffsetTFlags.py_type(blockMap), 0) + +def SparsityParametersStartBlockMapVector(builder, numElems): + return builder.StartVector(4, numElems, 4) + +def SparsityParametersAddDimMetadata(builder, dimMetadata): + builder.PrependUOffsetTRelativeSlot(2, flatbuffers.number_types.UOffsetTFlags.py_type(dimMetadata), 0) + +def SparsityParametersStartDimMetadataVector(builder, numElems): + return builder.StartVector(4, numElems, 4) + +def SparsityParametersEnd(builder): + return builder.EndObject() + + +try: + from typing import List +except: + pass + +class SparsityParametersT(object): + + # SparsityParametersT + def __init__(self): + self.traversalOrder = None # type: List[int] + self.blockMap = None # type: List[int] + self.dimMetadata = None # type: List[DimensionMetadataT] + + @classmethod + def InitFromBuf(cls, buf, pos): + sparsityParameters = SparsityParameters() + sparsityParameters.Init(buf, pos) + return cls.InitFromObj(sparsityParameters) + + @classmethod + def InitFromPackedBuf(cls, buf, pos=0): + n = flatbuffers.encode.Get(flatbuffers.packer.uoffset, buf, pos) + return cls.InitFromBuf(buf, pos+n) + + @classmethod + def InitFromObj(cls, sparsityParameters): + x = SparsityParametersT() + x._UnPack(sparsityParameters) + return x + + # SparsityParametersT + def _UnPack(self, sparsityParameters): + if sparsityParameters is None: + return + if not sparsityParameters.TraversalOrderIsNone(): + if np is None: + self.traversalOrder = [] + for i in range(sparsityParameters.TraversalOrderLength()): + self.traversalOrder.append(sparsityParameters.TraversalOrder(i)) + else: + self.traversalOrder = sparsityParameters.TraversalOrderAsNumpy() + if not sparsityParameters.BlockMapIsNone(): + if np is None: + self.blockMap = [] + for i in range(sparsityParameters.BlockMapLength()): + self.blockMap.append(sparsityParameters.BlockMap(i)) + else: + self.blockMap = sparsityParameters.BlockMapAsNumpy() + if not sparsityParameters.DimMetadataIsNone(): + self.dimMetadata = [] + for i in range(sparsityParameters.DimMetadataLength()): + if sparsityParameters.DimMetadata(i) is None: + self.dimMetadata.append(None) + else: + dimensionMetadata_ = DimensionMetadataT.InitFromObj(sparsityParameters.DimMetadata(i)) + self.dimMetadata.append(dimensionMetadata_) + + # SparsityParametersT + def Pack(self, builder): + if self.traversalOrder is not None: + if np is not None and type(self.traversalOrder) is np.ndarray: + traversalOrder = builder.CreateNumpyVector(self.traversalOrder) + else: + SparsityParametersStartTraversalOrderVector(builder, len(self.traversalOrder)) + for i in reversed(range(len(self.traversalOrder))): + builder.PrependInt32(self.traversalOrder[i]) + traversalOrder = builder.EndVector() + if self.blockMap is not None: + if np is not None and type(self.blockMap) is np.ndarray: + blockMap = builder.CreateNumpyVector(self.blockMap) + else: + SparsityParametersStartBlockMapVector(builder, len(self.blockMap)) + for i in reversed(range(len(self.blockMap))): + builder.PrependInt32(self.blockMap[i]) + blockMap = builder.EndVector() + if self.dimMetadata is not None: + dimMetadatalist = [] + for i in range(len(self.dimMetadata)): + dimMetadatalist.append(self.dimMetadata[i].Pack(builder)) + SparsityParametersStartDimMetadataVector(builder, len(self.dimMetadata)) + for i in reversed(range(len(self.dimMetadata))): + builder.PrependUOffsetTRelative(dimMetadatalist[i]) + dimMetadata = builder.EndVector() + SparsityParametersStart(builder) + if self.traversalOrder is not None: + SparsityParametersAddTraversalOrder(builder, traversalOrder) + if self.blockMap is not None: + SparsityParametersAddBlockMap(builder, blockMap) + if self.dimMetadata is not None: + SparsityParametersAddDimMetadata(builder, dimMetadata) + sparsityParameters = SparsityParametersEnd(builder) + return sparsityParameters + + +class VariantSubType(object): + __slots__ = ['_tab'] + + @classmethod + def GetRootAs(cls, buf, offset=0): + n = flatbuffers.encode.Get(flatbuffers.packer.uoffset, buf, offset) + x = VariantSubType() + x.Init(buf, n + offset) + return x + + @classmethod + def GetRootAsVariantSubType(cls, buf, offset=0): + """This method is deprecated. Please switch to GetRootAs.""" + return cls.GetRootAs(buf, offset) + @classmethod + def VariantSubTypeBufferHasIdentifier(cls, buf, offset, size_prefixed=False): + return flatbuffers.util.BufferHasIdentifier(buf, offset, b"\x54\x46\x4C\x33", size_prefixed=size_prefixed) + + # VariantSubType + def Init(self, buf, pos): + self._tab = flatbuffers.table.Table(buf, pos) + + # VariantSubType + def Shape(self, j): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(4)) + if o != 0: + a = self._tab.Vector(o) + return self._tab.Get(flatbuffers.number_types.Int32Flags, a + flatbuffers.number_types.UOffsetTFlags.py_type(j * 4)) + return 0 + + # VariantSubType + def ShapeAsNumpy(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(4)) + if o != 0: + return self._tab.GetVectorAsNumpy(flatbuffers.number_types.Int32Flags, o) + return 0 + + # VariantSubType + def ShapeLength(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(4)) + if o != 0: + return self._tab.VectorLen(o) + return 0 + + # VariantSubType + def ShapeIsNone(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(4)) + return o == 0 + + # VariantSubType + def Type(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(6)) + if o != 0: + return self._tab.Get(flatbuffers.number_types.Int8Flags, o + self._tab.Pos) + return 0 + + # VariantSubType + def HasRank(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(8)) + if o != 0: + return bool(self._tab.Get(flatbuffers.number_types.BoolFlags, o + self._tab.Pos)) + return False + +def VariantSubTypeStart(builder): + builder.StartObject(3) + +def VariantSubTypeAddShape(builder, shape): + builder.PrependUOffsetTRelativeSlot(0, flatbuffers.number_types.UOffsetTFlags.py_type(shape), 0) + +def VariantSubTypeStartShapeVector(builder, numElems): + return builder.StartVector(4, numElems, 4) + +def VariantSubTypeAddType(builder, type): + builder.PrependInt8Slot(1, type, 0) + +def VariantSubTypeAddHasRank(builder, hasRank): + builder.PrependBoolSlot(2, hasRank, 0) + +def VariantSubTypeEnd(builder): + return builder.EndObject() + + +try: + from typing import List +except: + pass + +class VariantSubTypeT(object): + + # VariantSubTypeT + def __init__(self): + self.shape = None # type: List[int] + self.type = 0 # type: int + self.hasRank = False # type: bool + + @classmethod + def InitFromBuf(cls, buf, pos): + variantSubType = VariantSubType() + variantSubType.Init(buf, pos) + return cls.InitFromObj(variantSubType) + + @classmethod + def InitFromPackedBuf(cls, buf, pos=0): + n = flatbuffers.encode.Get(flatbuffers.packer.uoffset, buf, pos) + return cls.InitFromBuf(buf, pos+n) + + @classmethod + def InitFromObj(cls, variantSubType): + x = VariantSubTypeT() + x._UnPack(variantSubType) + return x + + # VariantSubTypeT + def _UnPack(self, variantSubType): + if variantSubType is None: + return + if not variantSubType.ShapeIsNone(): + if np is None: + self.shape = [] + for i in range(variantSubType.ShapeLength()): + self.shape.append(variantSubType.Shape(i)) + else: + self.shape = variantSubType.ShapeAsNumpy() + self.type = variantSubType.Type() + self.hasRank = variantSubType.HasRank() + + # VariantSubTypeT + def Pack(self, builder): + if self.shape is not None: + if np is not None and type(self.shape) is np.ndarray: + shape = builder.CreateNumpyVector(self.shape) + else: + VariantSubTypeStartShapeVector(builder, len(self.shape)) + for i in reversed(range(len(self.shape))): + builder.PrependInt32(self.shape[i]) + shape = builder.EndVector() + VariantSubTypeStart(builder) + if self.shape is not None: + VariantSubTypeAddShape(builder, shape) + VariantSubTypeAddType(builder, self.type) + VariantSubTypeAddHasRank(builder, self.hasRank) + variantSubType = VariantSubTypeEnd(builder) + return variantSubType + + +class Tensor(object): + __slots__ = ['_tab'] + + @classmethod + def GetRootAs(cls, buf, offset=0): + n = flatbuffers.encode.Get(flatbuffers.packer.uoffset, buf, offset) + x = Tensor() + x.Init(buf, n + offset) + return x + + @classmethod + def GetRootAsTensor(cls, buf, offset=0): + """This method is deprecated. Please switch to GetRootAs.""" + return cls.GetRootAs(buf, offset) + @classmethod + def TensorBufferHasIdentifier(cls, buf, offset, size_prefixed=False): + return flatbuffers.util.BufferHasIdentifier(buf, offset, b"\x54\x46\x4C\x33", size_prefixed=size_prefixed) + + # Tensor + def Init(self, buf, pos): + self._tab = flatbuffers.table.Table(buf, pos) + + # Tensor + def Shape(self, j): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(4)) + if o != 0: + a = self._tab.Vector(o) + return self._tab.Get(flatbuffers.number_types.Int32Flags, a + flatbuffers.number_types.UOffsetTFlags.py_type(j * 4)) + return 0 + + # Tensor + def ShapeAsNumpy(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(4)) + if o != 0: + return self._tab.GetVectorAsNumpy(flatbuffers.number_types.Int32Flags, o) + return 0 + + # Tensor + def ShapeLength(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(4)) + if o != 0: + return self._tab.VectorLen(o) + return 0 + + # Tensor + def ShapeIsNone(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(4)) + return o == 0 + + # Tensor + def Type(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(6)) + if o != 0: + return self._tab.Get(flatbuffers.number_types.Int8Flags, o + self._tab.Pos) + return 0 + + # Tensor + def Buffer(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(8)) + if o != 0: + return self._tab.Get(flatbuffers.number_types.Uint32Flags, o + self._tab.Pos) + return 0 + + # Tensor + def Name(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(10)) + if o != 0: + return self._tab.String(o + self._tab.Pos) + return None + + # Tensor + def Quantization(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(12)) + if o != 0: + x = self._tab.Indirect(o + self._tab.Pos) + obj = QuantizationParameters() + obj.Init(self._tab.Bytes, x) + return obj + return None + + # Tensor + def IsVariable(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(14)) + if o != 0: + return bool(self._tab.Get(flatbuffers.number_types.BoolFlags, o + self._tab.Pos)) + return False + + # Tensor + def Sparsity(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(16)) + if o != 0: + x = self._tab.Indirect(o + self._tab.Pos) + obj = SparsityParameters() + obj.Init(self._tab.Bytes, x) + return obj + return None + + # Tensor + def ShapeSignature(self, j): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(18)) + if o != 0: + a = self._tab.Vector(o) + return self._tab.Get(flatbuffers.number_types.Int32Flags, a + flatbuffers.number_types.UOffsetTFlags.py_type(j * 4)) + return 0 + + # Tensor + def ShapeSignatureAsNumpy(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(18)) + if o != 0: + return self._tab.GetVectorAsNumpy(flatbuffers.number_types.Int32Flags, o) + return 0 + + # Tensor + def ShapeSignatureLength(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(18)) + if o != 0: + return self._tab.VectorLen(o) + return 0 + + # Tensor + def ShapeSignatureIsNone(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(18)) + return o == 0 + + # Tensor + def HasRank(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(20)) + if o != 0: + return bool(self._tab.Get(flatbuffers.number_types.BoolFlags, o + self._tab.Pos)) + return False + + # Tensor + def VariantTensors(self, j): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(22)) + if o != 0: + x = self._tab.Vector(o) + x += flatbuffers.number_types.UOffsetTFlags.py_type(j) * 4 + x = self._tab.Indirect(x) + obj = VariantSubType() + obj.Init(self._tab.Bytes, x) + return obj + return None + + # Tensor + def VariantTensorsLength(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(22)) + if o != 0: + return self._tab.VectorLen(o) + return 0 + + # Tensor + def VariantTensorsIsNone(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(22)) + return o == 0 + +def TensorStart(builder): + builder.StartObject(10) + +def TensorAddShape(builder, shape): + builder.PrependUOffsetTRelativeSlot(0, flatbuffers.number_types.UOffsetTFlags.py_type(shape), 0) + +def TensorStartShapeVector(builder, numElems): + return builder.StartVector(4, numElems, 4) + +def TensorAddType(builder, type): + builder.PrependInt8Slot(1, type, 0) + +def TensorAddBuffer(builder, buffer): + builder.PrependUint32Slot(2, buffer, 0) + +def TensorAddName(builder, name): + builder.PrependUOffsetTRelativeSlot(3, flatbuffers.number_types.UOffsetTFlags.py_type(name), 0) + +def TensorAddQuantization(builder, quantization): + builder.PrependUOffsetTRelativeSlot(4, flatbuffers.number_types.UOffsetTFlags.py_type(quantization), 0) + +def TensorAddIsVariable(builder, isVariable): + builder.PrependBoolSlot(5, isVariable, 0) + +def TensorAddSparsity(builder, sparsity): + builder.PrependUOffsetTRelativeSlot(6, flatbuffers.number_types.UOffsetTFlags.py_type(sparsity), 0) + +def TensorAddShapeSignature(builder, shapeSignature): + builder.PrependUOffsetTRelativeSlot(7, flatbuffers.number_types.UOffsetTFlags.py_type(shapeSignature), 0) + +def TensorStartShapeSignatureVector(builder, numElems): + return builder.StartVector(4, numElems, 4) + +def TensorAddHasRank(builder, hasRank): + builder.PrependBoolSlot(8, hasRank, 0) + +def TensorAddVariantTensors(builder, variantTensors): + builder.PrependUOffsetTRelativeSlot(9, flatbuffers.number_types.UOffsetTFlags.py_type(variantTensors), 0) + +def TensorStartVariantTensorsVector(builder, numElems): + return builder.StartVector(4, numElems, 4) + +def TensorEnd(builder): + return builder.EndObject() + + +try: + from typing import List, Optional +except: + pass + +class TensorT(object): + + # TensorT + def __init__(self): + self.shape = None # type: List[int] + self.type = 0 # type: int + self.buffer = 0 # type: int + self.name = None # type: str + self.quantization = None # type: Optional[QuantizationParametersT] + self.isVariable = False # type: bool + self.sparsity = None # type: Optional[SparsityParametersT] + self.shapeSignature = None # type: List[int] + self.hasRank = False # type: bool + self.variantTensors = None # type: List[VariantSubTypeT] + + @classmethod + def InitFromBuf(cls, buf, pos): + tensor = Tensor() + tensor.Init(buf, pos) + return cls.InitFromObj(tensor) + + @classmethod + def InitFromPackedBuf(cls, buf, pos=0): + n = flatbuffers.encode.Get(flatbuffers.packer.uoffset, buf, pos) + return cls.InitFromBuf(buf, pos+n) + + @classmethod + def InitFromObj(cls, tensor): + x = TensorT() + x._UnPack(tensor) + return x + + # TensorT + def _UnPack(self, tensor): + if tensor is None: + return + if not tensor.ShapeIsNone(): + if np is None: + self.shape = [] + for i in range(tensor.ShapeLength()): + self.shape.append(tensor.Shape(i)) + else: + self.shape = tensor.ShapeAsNumpy() + self.type = tensor.Type() + self.buffer = tensor.Buffer() + self.name = tensor.Name() + if tensor.Quantization() is not None: + self.quantization = QuantizationParametersT.InitFromObj(tensor.Quantization()) + self.isVariable = tensor.IsVariable() + if tensor.Sparsity() is not None: + self.sparsity = SparsityParametersT.InitFromObj(tensor.Sparsity()) + if not tensor.ShapeSignatureIsNone(): + if np is None: + self.shapeSignature = [] + for i in range(tensor.ShapeSignatureLength()): + self.shapeSignature.append(tensor.ShapeSignature(i)) + else: + self.shapeSignature = tensor.ShapeSignatureAsNumpy() + self.hasRank = tensor.HasRank() + if not tensor.VariantTensorsIsNone(): + self.variantTensors = [] + for i in range(tensor.VariantTensorsLength()): + if tensor.VariantTensors(i) is None: + self.variantTensors.append(None) + else: + variantSubType_ = VariantSubTypeT.InitFromObj(tensor.VariantTensors(i)) + self.variantTensors.append(variantSubType_) + + # TensorT + def Pack(self, builder): + if self.shape is not None: + if np is not None and type(self.shape) is np.ndarray: + shape = builder.CreateNumpyVector(self.shape) + else: + TensorStartShapeVector(builder, len(self.shape)) + for i in reversed(range(len(self.shape))): + builder.PrependInt32(self.shape[i]) + shape = builder.EndVector() + if self.name is not None: + name = builder.CreateString(self.name) + if self.quantization is not None: + quantization = self.quantization.Pack(builder) + if self.sparsity is not None: + sparsity = self.sparsity.Pack(builder) + if self.shapeSignature is not None: + if np is not None and type(self.shapeSignature) is np.ndarray: + shapeSignature = builder.CreateNumpyVector(self.shapeSignature) + else: + TensorStartShapeSignatureVector(builder, len(self.shapeSignature)) + for i in reversed(range(len(self.shapeSignature))): + builder.PrependInt32(self.shapeSignature[i]) + shapeSignature = builder.EndVector() + if self.variantTensors is not None: + variantTensorslist = [] + for i in range(len(self.variantTensors)): + variantTensorslist.append(self.variantTensors[i].Pack(builder)) + TensorStartVariantTensorsVector(builder, len(self.variantTensors)) + for i in reversed(range(len(self.variantTensors))): + builder.PrependUOffsetTRelative(variantTensorslist[i]) + variantTensors = builder.EndVector() + TensorStart(builder) + if self.shape is not None: + TensorAddShape(builder, shape) + TensorAddType(builder, self.type) + TensorAddBuffer(builder, self.buffer) + if self.name is not None: + TensorAddName(builder, name) + if self.quantization is not None: + TensorAddQuantization(builder, quantization) + TensorAddIsVariable(builder, self.isVariable) + if self.sparsity is not None: + TensorAddSparsity(builder, sparsity) + if self.shapeSignature is not None: + TensorAddShapeSignature(builder, shapeSignature) + TensorAddHasRank(builder, self.hasRank) + if self.variantTensors is not None: + TensorAddVariantTensors(builder, variantTensors) + tensor = TensorEnd(builder) + return tensor + + +class StablehloGatherOptions(object): + __slots__ = ['_tab'] + + @classmethod + def GetRootAs(cls, buf, offset=0): + n = flatbuffers.encode.Get(flatbuffers.packer.uoffset, buf, offset) + x = StablehloGatherOptions() + x.Init(buf, n + offset) + return x + + @classmethod + def GetRootAsStablehloGatherOptions(cls, buf, offset=0): + """This method is deprecated. Please switch to GetRootAs.""" + return cls.GetRootAs(buf, offset) + @classmethod + def StablehloGatherOptionsBufferHasIdentifier(cls, buf, offset, size_prefixed=False): + return flatbuffers.util.BufferHasIdentifier(buf, offset, b"\x54\x46\x4C\x33", size_prefixed=size_prefixed) + + # StablehloGatherOptions + def Init(self, buf, pos): + self._tab = flatbuffers.table.Table(buf, pos) + + # StablehloGatherOptions + def OffsetDims(self, j): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(4)) + if o != 0: + a = self._tab.Vector(o) + return self._tab.Get(flatbuffers.number_types.Int64Flags, a + flatbuffers.number_types.UOffsetTFlags.py_type(j * 8)) + return 0 + + # StablehloGatherOptions + def OffsetDimsAsNumpy(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(4)) + if o != 0: + return self._tab.GetVectorAsNumpy(flatbuffers.number_types.Int64Flags, o) + return 0 + + # StablehloGatherOptions + def OffsetDimsLength(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(4)) + if o != 0: + return self._tab.VectorLen(o) + return 0 + + # StablehloGatherOptions + def OffsetDimsIsNone(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(4)) + return o == 0 + + # StablehloGatherOptions + def CollapsedSliceDims(self, j): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(6)) + if o != 0: + a = self._tab.Vector(o) + return self._tab.Get(flatbuffers.number_types.Int64Flags, a + flatbuffers.number_types.UOffsetTFlags.py_type(j * 8)) + return 0 + + # StablehloGatherOptions + def CollapsedSliceDimsAsNumpy(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(6)) + if o != 0: + return self._tab.GetVectorAsNumpy(flatbuffers.number_types.Int64Flags, o) + return 0 + + # StablehloGatherOptions + def CollapsedSliceDimsLength(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(6)) + if o != 0: + return self._tab.VectorLen(o) + return 0 + + # StablehloGatherOptions + def CollapsedSliceDimsIsNone(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(6)) + return o == 0 + + # StablehloGatherOptions + def StartIndexMap(self, j): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(8)) + if o != 0: + a = self._tab.Vector(o) + return self._tab.Get(flatbuffers.number_types.Int64Flags, a + flatbuffers.number_types.UOffsetTFlags.py_type(j * 8)) + return 0 + + # StablehloGatherOptions + def StartIndexMapAsNumpy(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(8)) + if o != 0: + return self._tab.GetVectorAsNumpy(flatbuffers.number_types.Int64Flags, o) + return 0 + + # StablehloGatherOptions + def StartIndexMapLength(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(8)) + if o != 0: + return self._tab.VectorLen(o) + return 0 + + # StablehloGatherOptions + def StartIndexMapIsNone(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(8)) + return o == 0 + + # StablehloGatherOptions + def IndexVectorDim(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(10)) + if o != 0: + return self._tab.Get(flatbuffers.number_types.Int64Flags, o + self._tab.Pos) + return 0 + + # StablehloGatherOptions + def SliceSizes(self, j): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(12)) + if o != 0: + a = self._tab.Vector(o) + return self._tab.Get(flatbuffers.number_types.Int64Flags, a + flatbuffers.number_types.UOffsetTFlags.py_type(j * 8)) + return 0 + + # StablehloGatherOptions + def SliceSizesAsNumpy(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(12)) + if o != 0: + return self._tab.GetVectorAsNumpy(flatbuffers.number_types.Int64Flags, o) + return 0 + + # StablehloGatherOptions + def SliceSizesLength(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(12)) + if o != 0: + return self._tab.VectorLen(o) + return 0 + + # StablehloGatherOptions + def SliceSizesIsNone(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(12)) + return o == 0 + + # StablehloGatherOptions + def IndicesAreSorted(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(14)) + if o != 0: + return bool(self._tab.Get(flatbuffers.number_types.BoolFlags, o + self._tab.Pos)) + return False + +def StablehloGatherOptionsStart(builder): + builder.StartObject(6) + +def StablehloGatherOptionsAddOffsetDims(builder, offsetDims): + builder.PrependUOffsetTRelativeSlot(0, flatbuffers.number_types.UOffsetTFlags.py_type(offsetDims), 0) + +def StablehloGatherOptionsStartOffsetDimsVector(builder, numElems): + return builder.StartVector(8, numElems, 8) + +def StablehloGatherOptionsAddCollapsedSliceDims(builder, collapsedSliceDims): + builder.PrependUOffsetTRelativeSlot(1, flatbuffers.number_types.UOffsetTFlags.py_type(collapsedSliceDims), 0) + +def StablehloGatherOptionsStartCollapsedSliceDimsVector(builder, numElems): + return builder.StartVector(8, numElems, 8) + +def StablehloGatherOptionsAddStartIndexMap(builder, startIndexMap): + builder.PrependUOffsetTRelativeSlot(2, flatbuffers.number_types.UOffsetTFlags.py_type(startIndexMap), 0) + +def StablehloGatherOptionsStartStartIndexMapVector(builder, numElems): + return builder.StartVector(8, numElems, 8) + +def StablehloGatherOptionsAddIndexVectorDim(builder, indexVectorDim): + builder.PrependInt64Slot(3, indexVectorDim, 0) + +def StablehloGatherOptionsAddSliceSizes(builder, sliceSizes): + builder.PrependUOffsetTRelativeSlot(4, flatbuffers.number_types.UOffsetTFlags.py_type(sliceSizes), 0) + +def StablehloGatherOptionsStartSliceSizesVector(builder, numElems): + return builder.StartVector(8, numElems, 8) + +def StablehloGatherOptionsAddIndicesAreSorted(builder, indicesAreSorted): + builder.PrependBoolSlot(5, indicesAreSorted, 0) + +def StablehloGatherOptionsEnd(builder): + return builder.EndObject() + + +try: + from typing import List +except: + pass + +class StablehloGatherOptionsT(object): + + # StablehloGatherOptionsT + def __init__(self): + self.offsetDims = None # type: List[int] + self.collapsedSliceDims = None # type: List[int] + self.startIndexMap = None # type: List[int] + self.indexVectorDim = 0 # type: int + self.sliceSizes = None # type: List[int] + self.indicesAreSorted = False # type: bool + + @classmethod + def InitFromBuf(cls, buf, pos): + stablehloGatherOptions = StablehloGatherOptions() + stablehloGatherOptions.Init(buf, pos) + return cls.InitFromObj(stablehloGatherOptions) + + @classmethod + def InitFromPackedBuf(cls, buf, pos=0): + n = flatbuffers.encode.Get(flatbuffers.packer.uoffset, buf, pos) + return cls.InitFromBuf(buf, pos+n) + + @classmethod + def InitFromObj(cls, stablehloGatherOptions): + x = StablehloGatherOptionsT() + x._UnPack(stablehloGatherOptions) + return x + + # StablehloGatherOptionsT + def _UnPack(self, stablehloGatherOptions): + if stablehloGatherOptions is None: + return + if not stablehloGatherOptions.OffsetDimsIsNone(): + if np is None: + self.offsetDims = [] + for i in range(stablehloGatherOptions.OffsetDimsLength()): + self.offsetDims.append(stablehloGatherOptions.OffsetDims(i)) + else: + self.offsetDims = stablehloGatherOptions.OffsetDimsAsNumpy() + if not stablehloGatherOptions.CollapsedSliceDimsIsNone(): + if np is None: + self.collapsedSliceDims = [] + for i in range(stablehloGatherOptions.CollapsedSliceDimsLength()): + self.collapsedSliceDims.append(stablehloGatherOptions.CollapsedSliceDims(i)) + else: + self.collapsedSliceDims = stablehloGatherOptions.CollapsedSliceDimsAsNumpy() + if not stablehloGatherOptions.StartIndexMapIsNone(): + if np is None: + self.startIndexMap = [] + for i in range(stablehloGatherOptions.StartIndexMapLength()): + self.startIndexMap.append(stablehloGatherOptions.StartIndexMap(i)) + else: + self.startIndexMap = stablehloGatherOptions.StartIndexMapAsNumpy() + self.indexVectorDim = stablehloGatherOptions.IndexVectorDim() + if not stablehloGatherOptions.SliceSizesIsNone(): + if np is None: + self.sliceSizes = [] + for i in range(stablehloGatherOptions.SliceSizesLength()): + self.sliceSizes.append(stablehloGatherOptions.SliceSizes(i)) + else: + self.sliceSizes = stablehloGatherOptions.SliceSizesAsNumpy() + self.indicesAreSorted = stablehloGatherOptions.IndicesAreSorted() + + # StablehloGatherOptionsT + def Pack(self, builder): + if self.offsetDims is not None: + if np is not None and type(self.offsetDims) is np.ndarray: + offsetDims = builder.CreateNumpyVector(self.offsetDims) + else: + StablehloGatherOptionsStartOffsetDimsVector(builder, len(self.offsetDims)) + for i in reversed(range(len(self.offsetDims))): + builder.PrependInt64(self.offsetDims[i]) + offsetDims = builder.EndVector() + if self.collapsedSliceDims is not None: + if np is not None and type(self.collapsedSliceDims) is np.ndarray: + collapsedSliceDims = builder.CreateNumpyVector(self.collapsedSliceDims) + else: + StablehloGatherOptionsStartCollapsedSliceDimsVector(builder, len(self.collapsedSliceDims)) + for i in reversed(range(len(self.collapsedSliceDims))): + builder.PrependInt64(self.collapsedSliceDims[i]) + collapsedSliceDims = builder.EndVector() + if self.startIndexMap is not None: + if np is not None and type(self.startIndexMap) is np.ndarray: + startIndexMap = builder.CreateNumpyVector(self.startIndexMap) + else: + StablehloGatherOptionsStartStartIndexMapVector(builder, len(self.startIndexMap)) + for i in reversed(range(len(self.startIndexMap))): + builder.PrependInt64(self.startIndexMap[i]) + startIndexMap = builder.EndVector() + if self.sliceSizes is not None: + if np is not None and type(self.sliceSizes) is np.ndarray: + sliceSizes = builder.CreateNumpyVector(self.sliceSizes) + else: + StablehloGatherOptionsStartSliceSizesVector(builder, len(self.sliceSizes)) + for i in reversed(range(len(self.sliceSizes))): + builder.PrependInt64(self.sliceSizes[i]) + sliceSizes = builder.EndVector() + StablehloGatherOptionsStart(builder) + if self.offsetDims is not None: + StablehloGatherOptionsAddOffsetDims(builder, offsetDims) + if self.collapsedSliceDims is not None: + StablehloGatherOptionsAddCollapsedSliceDims(builder, collapsedSliceDims) + if self.startIndexMap is not None: + StablehloGatherOptionsAddStartIndexMap(builder, startIndexMap) + StablehloGatherOptionsAddIndexVectorDim(builder, self.indexVectorDim) + if self.sliceSizes is not None: + StablehloGatherOptionsAddSliceSizes(builder, sliceSizes) + StablehloGatherOptionsAddIndicesAreSorted(builder, self.indicesAreSorted) + stablehloGatherOptions = StablehloGatherOptionsEnd(builder) + return stablehloGatherOptions + + +class StablehloTransposeOptions(object): + __slots__ = ['_tab'] + + @classmethod + def GetRootAs(cls, buf, offset=0): + n = flatbuffers.encode.Get(flatbuffers.packer.uoffset, buf, offset) + x = StablehloTransposeOptions() + x.Init(buf, n + offset) + return x + + @classmethod + def GetRootAsStablehloTransposeOptions(cls, buf, offset=0): + """This method is deprecated. Please switch to GetRootAs.""" + return cls.GetRootAs(buf, offset) + @classmethod + def StablehloTransposeOptionsBufferHasIdentifier(cls, buf, offset, size_prefixed=False): + return flatbuffers.util.BufferHasIdentifier(buf, offset, b"\x54\x46\x4C\x33", size_prefixed=size_prefixed) + + # StablehloTransposeOptions + def Init(self, buf, pos): + self._tab = flatbuffers.table.Table(buf, pos) + + # StablehloTransposeOptions + def Permutation(self, j): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(4)) + if o != 0: + a = self._tab.Vector(o) + return self._tab.Get(flatbuffers.number_types.Int64Flags, a + flatbuffers.number_types.UOffsetTFlags.py_type(j * 8)) + return 0 + + # StablehloTransposeOptions + def PermutationAsNumpy(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(4)) + if o != 0: + return self._tab.GetVectorAsNumpy(flatbuffers.number_types.Int64Flags, o) + return 0 + + # StablehloTransposeOptions + def PermutationLength(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(4)) + if o != 0: + return self._tab.VectorLen(o) + return 0 + + # StablehloTransposeOptions + def PermutationIsNone(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(4)) + return o == 0 + +def StablehloTransposeOptionsStart(builder): + builder.StartObject(1) + +def StablehloTransposeOptionsAddPermutation(builder, permutation): + builder.PrependUOffsetTRelativeSlot(0, flatbuffers.number_types.UOffsetTFlags.py_type(permutation), 0) + +def StablehloTransposeOptionsStartPermutationVector(builder, numElems): + return builder.StartVector(8, numElems, 8) + +def StablehloTransposeOptionsEnd(builder): + return builder.EndObject() + + +try: + from typing import List +except: + pass + +class StablehloTransposeOptionsT(object): + + # StablehloTransposeOptionsT + def __init__(self): + self.permutation = None # type: List[int] + + @classmethod + def InitFromBuf(cls, buf, pos): + stablehloTransposeOptions = StablehloTransposeOptions() + stablehloTransposeOptions.Init(buf, pos) + return cls.InitFromObj(stablehloTransposeOptions) + + @classmethod + def InitFromPackedBuf(cls, buf, pos=0): + n = flatbuffers.encode.Get(flatbuffers.packer.uoffset, buf, pos) + return cls.InitFromBuf(buf, pos+n) + + @classmethod + def InitFromObj(cls, stablehloTransposeOptions): + x = StablehloTransposeOptionsT() + x._UnPack(stablehloTransposeOptions) + return x + + # StablehloTransposeOptionsT + def _UnPack(self, stablehloTransposeOptions): + if stablehloTransposeOptions is None: + return + if not stablehloTransposeOptions.PermutationIsNone(): + if np is None: + self.permutation = [] + for i in range(stablehloTransposeOptions.PermutationLength()): + self.permutation.append(stablehloTransposeOptions.Permutation(i)) + else: + self.permutation = stablehloTransposeOptions.PermutationAsNumpy() + + # StablehloTransposeOptionsT + def Pack(self, builder): + if self.permutation is not None: + if np is not None and type(self.permutation) is np.ndarray: + permutation = builder.CreateNumpyVector(self.permutation) + else: + StablehloTransposeOptionsStartPermutationVector(builder, len(self.permutation)) + for i in reversed(range(len(self.permutation))): + builder.PrependInt64(self.permutation[i]) + permutation = builder.EndVector() + StablehloTransposeOptionsStart(builder) + if self.permutation is not None: + StablehloTransposeOptionsAddPermutation(builder, permutation) + stablehloTransposeOptions = StablehloTransposeOptionsEnd(builder) + return stablehloTransposeOptions + + +class StablehloDotGeneralOptions(object): + __slots__ = ['_tab'] + + @classmethod + def GetRootAs(cls, buf, offset=0): + n = flatbuffers.encode.Get(flatbuffers.packer.uoffset, buf, offset) + x = StablehloDotGeneralOptions() + x.Init(buf, n + offset) + return x + + @classmethod + def GetRootAsStablehloDotGeneralOptions(cls, buf, offset=0): + """This method is deprecated. Please switch to GetRootAs.""" + return cls.GetRootAs(buf, offset) + @classmethod + def StablehloDotGeneralOptionsBufferHasIdentifier(cls, buf, offset, size_prefixed=False): + return flatbuffers.util.BufferHasIdentifier(buf, offset, b"\x54\x46\x4C\x33", size_prefixed=size_prefixed) + + # StablehloDotGeneralOptions + def Init(self, buf, pos): + self._tab = flatbuffers.table.Table(buf, pos) + + # StablehloDotGeneralOptions + def LhsBatchingDimensions(self, j): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(4)) + if o != 0: + a = self._tab.Vector(o) + return self._tab.Get(flatbuffers.number_types.Int64Flags, a + flatbuffers.number_types.UOffsetTFlags.py_type(j * 8)) + return 0 + + # StablehloDotGeneralOptions + def LhsBatchingDimensionsAsNumpy(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(4)) + if o != 0: + return self._tab.GetVectorAsNumpy(flatbuffers.number_types.Int64Flags, o) + return 0 + + # StablehloDotGeneralOptions + def LhsBatchingDimensionsLength(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(4)) + if o != 0: + return self._tab.VectorLen(o) + return 0 + + # StablehloDotGeneralOptions + def LhsBatchingDimensionsIsNone(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(4)) + return o == 0 + + # StablehloDotGeneralOptions + def RhsBatchingDimensions(self, j): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(6)) + if o != 0: + a = self._tab.Vector(o) + return self._tab.Get(flatbuffers.number_types.Int64Flags, a + flatbuffers.number_types.UOffsetTFlags.py_type(j * 8)) + return 0 + + # StablehloDotGeneralOptions + def RhsBatchingDimensionsAsNumpy(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(6)) + if o != 0: + return self._tab.GetVectorAsNumpy(flatbuffers.number_types.Int64Flags, o) + return 0 + + # StablehloDotGeneralOptions + def RhsBatchingDimensionsLength(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(6)) + if o != 0: + return self._tab.VectorLen(o) + return 0 + + # StablehloDotGeneralOptions + def RhsBatchingDimensionsIsNone(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(6)) + return o == 0 + + # StablehloDotGeneralOptions + def LhsContractingDimensions(self, j): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(8)) + if o != 0: + a = self._tab.Vector(o) + return self._tab.Get(flatbuffers.number_types.Int64Flags, a + flatbuffers.number_types.UOffsetTFlags.py_type(j * 8)) + return 0 + + # StablehloDotGeneralOptions + def LhsContractingDimensionsAsNumpy(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(8)) + if o != 0: + return self._tab.GetVectorAsNumpy(flatbuffers.number_types.Int64Flags, o) + return 0 + + # StablehloDotGeneralOptions + def LhsContractingDimensionsLength(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(8)) + if o != 0: + return self._tab.VectorLen(o) + return 0 + + # StablehloDotGeneralOptions + def LhsContractingDimensionsIsNone(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(8)) + return o == 0 + + # StablehloDotGeneralOptions + def RhsContractingDimensions(self, j): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(10)) + if o != 0: + a = self._tab.Vector(o) + return self._tab.Get(flatbuffers.number_types.Int64Flags, a + flatbuffers.number_types.UOffsetTFlags.py_type(j * 8)) + return 0 + + # StablehloDotGeneralOptions + def RhsContractingDimensionsAsNumpy(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(10)) + if o != 0: + return self._tab.GetVectorAsNumpy(flatbuffers.number_types.Int64Flags, o) + return 0 + + # StablehloDotGeneralOptions + def RhsContractingDimensionsLength(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(10)) + if o != 0: + return self._tab.VectorLen(o) + return 0 + + # StablehloDotGeneralOptions + def RhsContractingDimensionsIsNone(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(10)) + return o == 0 + + # StablehloDotGeneralOptions + def PrecisionConfig(self, j): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(12)) + if o != 0: + a = self._tab.Vector(o) + return self._tab.Get(flatbuffers.number_types.Uint32Flags, a + flatbuffers.number_types.UOffsetTFlags.py_type(j * 4)) + return 0 + + # StablehloDotGeneralOptions + def PrecisionConfigAsNumpy(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(12)) + if o != 0: + return self._tab.GetVectorAsNumpy(flatbuffers.number_types.Uint32Flags, o) + return 0 + + # StablehloDotGeneralOptions + def PrecisionConfigLength(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(12)) + if o != 0: + return self._tab.VectorLen(o) + return 0 + + # StablehloDotGeneralOptions + def PrecisionConfigIsNone(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(12)) + return o == 0 + +def StablehloDotGeneralOptionsStart(builder): + builder.StartObject(5) + +def StablehloDotGeneralOptionsAddLhsBatchingDimensions(builder, lhsBatchingDimensions): + builder.PrependUOffsetTRelativeSlot(0, flatbuffers.number_types.UOffsetTFlags.py_type(lhsBatchingDimensions), 0) + +def StablehloDotGeneralOptionsStartLhsBatchingDimensionsVector(builder, numElems): + return builder.StartVector(8, numElems, 8) + +def StablehloDotGeneralOptionsAddRhsBatchingDimensions(builder, rhsBatchingDimensions): + builder.PrependUOffsetTRelativeSlot(1, flatbuffers.number_types.UOffsetTFlags.py_type(rhsBatchingDimensions), 0) + +def StablehloDotGeneralOptionsStartRhsBatchingDimensionsVector(builder, numElems): + return builder.StartVector(8, numElems, 8) + +def StablehloDotGeneralOptionsAddLhsContractingDimensions(builder, lhsContractingDimensions): + builder.PrependUOffsetTRelativeSlot(2, flatbuffers.number_types.UOffsetTFlags.py_type(lhsContractingDimensions), 0) + +def StablehloDotGeneralOptionsStartLhsContractingDimensionsVector(builder, numElems): + return builder.StartVector(8, numElems, 8) + +def StablehloDotGeneralOptionsAddRhsContractingDimensions(builder, rhsContractingDimensions): + builder.PrependUOffsetTRelativeSlot(3, flatbuffers.number_types.UOffsetTFlags.py_type(rhsContractingDimensions), 0) + +def StablehloDotGeneralOptionsStartRhsContractingDimensionsVector(builder, numElems): + return builder.StartVector(8, numElems, 8) + +def StablehloDotGeneralOptionsAddPrecisionConfig(builder, precisionConfig): + builder.PrependUOffsetTRelativeSlot(4, flatbuffers.number_types.UOffsetTFlags.py_type(precisionConfig), 0) + +def StablehloDotGeneralOptionsStartPrecisionConfigVector(builder, numElems): + return builder.StartVector(4, numElems, 4) + +def StablehloDotGeneralOptionsEnd(builder): + return builder.EndObject() + + +try: + from typing import List +except: + pass + +class StablehloDotGeneralOptionsT(object): + + # StablehloDotGeneralOptionsT + def __init__(self): + self.lhsBatchingDimensions = None # type: List[int] + self.rhsBatchingDimensions = None # type: List[int] + self.lhsContractingDimensions = None # type: List[int] + self.rhsContractingDimensions = None # type: List[int] + self.precisionConfig = None # type: List[int] + + @classmethod + def InitFromBuf(cls, buf, pos): + stablehloDotGeneralOptions = StablehloDotGeneralOptions() + stablehloDotGeneralOptions.Init(buf, pos) + return cls.InitFromObj(stablehloDotGeneralOptions) + + @classmethod + def InitFromPackedBuf(cls, buf, pos=0): + n = flatbuffers.encode.Get(flatbuffers.packer.uoffset, buf, pos) + return cls.InitFromBuf(buf, pos+n) + + @classmethod + def InitFromObj(cls, stablehloDotGeneralOptions): + x = StablehloDotGeneralOptionsT() + x._UnPack(stablehloDotGeneralOptions) + return x + + # StablehloDotGeneralOptionsT + def _UnPack(self, stablehloDotGeneralOptions): + if stablehloDotGeneralOptions is None: + return + if not stablehloDotGeneralOptions.LhsBatchingDimensionsIsNone(): + if np is None: + self.lhsBatchingDimensions = [] + for i in range(stablehloDotGeneralOptions.LhsBatchingDimensionsLength()): + self.lhsBatchingDimensions.append(stablehloDotGeneralOptions.LhsBatchingDimensions(i)) + else: + self.lhsBatchingDimensions = stablehloDotGeneralOptions.LhsBatchingDimensionsAsNumpy() + if not stablehloDotGeneralOptions.RhsBatchingDimensionsIsNone(): + if np is None: + self.rhsBatchingDimensions = [] + for i in range(stablehloDotGeneralOptions.RhsBatchingDimensionsLength()): + self.rhsBatchingDimensions.append(stablehloDotGeneralOptions.RhsBatchingDimensions(i)) + else: + self.rhsBatchingDimensions = stablehloDotGeneralOptions.RhsBatchingDimensionsAsNumpy() + if not stablehloDotGeneralOptions.LhsContractingDimensionsIsNone(): + if np is None: + self.lhsContractingDimensions = [] + for i in range(stablehloDotGeneralOptions.LhsContractingDimensionsLength()): + self.lhsContractingDimensions.append(stablehloDotGeneralOptions.LhsContractingDimensions(i)) + else: + self.lhsContractingDimensions = stablehloDotGeneralOptions.LhsContractingDimensionsAsNumpy() + if not stablehloDotGeneralOptions.RhsContractingDimensionsIsNone(): + if np is None: + self.rhsContractingDimensions = [] + for i in range(stablehloDotGeneralOptions.RhsContractingDimensionsLength()): + self.rhsContractingDimensions.append(stablehloDotGeneralOptions.RhsContractingDimensions(i)) + else: + self.rhsContractingDimensions = stablehloDotGeneralOptions.RhsContractingDimensionsAsNumpy() + if not stablehloDotGeneralOptions.PrecisionConfigIsNone(): + if np is None: + self.precisionConfig = [] + for i in range(stablehloDotGeneralOptions.PrecisionConfigLength()): + self.precisionConfig.append(stablehloDotGeneralOptions.PrecisionConfig(i)) + else: + self.precisionConfig = stablehloDotGeneralOptions.PrecisionConfigAsNumpy() + + # StablehloDotGeneralOptionsT + def Pack(self, builder): + if self.lhsBatchingDimensions is not None: + if np is not None and type(self.lhsBatchingDimensions) is np.ndarray: + lhsBatchingDimensions = builder.CreateNumpyVector(self.lhsBatchingDimensions) + else: + StablehloDotGeneralOptionsStartLhsBatchingDimensionsVector(builder, len(self.lhsBatchingDimensions)) + for i in reversed(range(len(self.lhsBatchingDimensions))): + builder.PrependInt64(self.lhsBatchingDimensions[i]) + lhsBatchingDimensions = builder.EndVector() + if self.rhsBatchingDimensions is not None: + if np is not None and type(self.rhsBatchingDimensions) is np.ndarray: + rhsBatchingDimensions = builder.CreateNumpyVector(self.rhsBatchingDimensions) + else: + StablehloDotGeneralOptionsStartRhsBatchingDimensionsVector(builder, len(self.rhsBatchingDimensions)) + for i in reversed(range(len(self.rhsBatchingDimensions))): + builder.PrependInt64(self.rhsBatchingDimensions[i]) + rhsBatchingDimensions = builder.EndVector() + if self.lhsContractingDimensions is not None: + if np is not None and type(self.lhsContractingDimensions) is np.ndarray: + lhsContractingDimensions = builder.CreateNumpyVector(self.lhsContractingDimensions) + else: + StablehloDotGeneralOptionsStartLhsContractingDimensionsVector(builder, len(self.lhsContractingDimensions)) + for i in reversed(range(len(self.lhsContractingDimensions))): + builder.PrependInt64(self.lhsContractingDimensions[i]) + lhsContractingDimensions = builder.EndVector() + if self.rhsContractingDimensions is not None: + if np is not None and type(self.rhsContractingDimensions) is np.ndarray: + rhsContractingDimensions = builder.CreateNumpyVector(self.rhsContractingDimensions) + else: + StablehloDotGeneralOptionsStartRhsContractingDimensionsVector(builder, len(self.rhsContractingDimensions)) + for i in reversed(range(len(self.rhsContractingDimensions))): + builder.PrependInt64(self.rhsContractingDimensions[i]) + rhsContractingDimensions = builder.EndVector() + if self.precisionConfig is not None: + if np is not None and type(self.precisionConfig) is np.ndarray: + precisionConfig = builder.CreateNumpyVector(self.precisionConfig) + else: + StablehloDotGeneralOptionsStartPrecisionConfigVector(builder, len(self.precisionConfig)) + for i in reversed(range(len(self.precisionConfig))): + builder.PrependUint32(self.precisionConfig[i]) + precisionConfig = builder.EndVector() + StablehloDotGeneralOptionsStart(builder) + if self.lhsBatchingDimensions is not None: + StablehloDotGeneralOptionsAddLhsBatchingDimensions(builder, lhsBatchingDimensions) + if self.rhsBatchingDimensions is not None: + StablehloDotGeneralOptionsAddRhsBatchingDimensions(builder, rhsBatchingDimensions) + if self.lhsContractingDimensions is not None: + StablehloDotGeneralOptionsAddLhsContractingDimensions(builder, lhsContractingDimensions) + if self.rhsContractingDimensions is not None: + StablehloDotGeneralOptionsAddRhsContractingDimensions(builder, rhsContractingDimensions) + if self.precisionConfig is not None: + StablehloDotGeneralOptionsAddPrecisionConfig(builder, precisionConfig) + stablehloDotGeneralOptions = StablehloDotGeneralOptionsEnd(builder) + return stablehloDotGeneralOptions + + +class StablehloReduceWindowOptions(object): + __slots__ = ['_tab'] + + @classmethod + def GetRootAs(cls, buf, offset=0): + n = flatbuffers.encode.Get(flatbuffers.packer.uoffset, buf, offset) + x = StablehloReduceWindowOptions() + x.Init(buf, n + offset) + return x + + @classmethod + def GetRootAsStablehloReduceWindowOptions(cls, buf, offset=0): + """This method is deprecated. Please switch to GetRootAs.""" + return cls.GetRootAs(buf, offset) + @classmethod + def StablehloReduceWindowOptionsBufferHasIdentifier(cls, buf, offset, size_prefixed=False): + return flatbuffers.util.BufferHasIdentifier(buf, offset, b"\x54\x46\x4C\x33", size_prefixed=size_prefixed) + + # StablehloReduceWindowOptions + def Init(self, buf, pos): + self._tab = flatbuffers.table.Table(buf, pos) + + # StablehloReduceWindowOptions + def WindowDimensions(self, j): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(4)) + if o != 0: + a = self._tab.Vector(o) + return self._tab.Get(flatbuffers.number_types.Int64Flags, a + flatbuffers.number_types.UOffsetTFlags.py_type(j * 8)) + return 0 + + # StablehloReduceWindowOptions + def WindowDimensionsAsNumpy(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(4)) + if o != 0: + return self._tab.GetVectorAsNumpy(flatbuffers.number_types.Int64Flags, o) + return 0 + + # StablehloReduceWindowOptions + def WindowDimensionsLength(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(4)) + if o != 0: + return self._tab.VectorLen(o) + return 0 + + # StablehloReduceWindowOptions + def WindowDimensionsIsNone(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(4)) + return o == 0 + + # StablehloReduceWindowOptions + def WindowStrides(self, j): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(6)) + if o != 0: + a = self._tab.Vector(o) + return self._tab.Get(flatbuffers.number_types.Int64Flags, a + flatbuffers.number_types.UOffsetTFlags.py_type(j * 8)) + return 0 + + # StablehloReduceWindowOptions + def WindowStridesAsNumpy(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(6)) + if o != 0: + return self._tab.GetVectorAsNumpy(flatbuffers.number_types.Int64Flags, o) + return 0 + + # StablehloReduceWindowOptions + def WindowStridesLength(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(6)) + if o != 0: + return self._tab.VectorLen(o) + return 0 + + # StablehloReduceWindowOptions + def WindowStridesIsNone(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(6)) + return o == 0 + + # StablehloReduceWindowOptions + def BaseDilations(self, j): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(8)) + if o != 0: + a = self._tab.Vector(o) + return self._tab.Get(flatbuffers.number_types.Int64Flags, a + flatbuffers.number_types.UOffsetTFlags.py_type(j * 8)) + return 0 + + # StablehloReduceWindowOptions + def BaseDilationsAsNumpy(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(8)) + if o != 0: + return self._tab.GetVectorAsNumpy(flatbuffers.number_types.Int64Flags, o) + return 0 + + # StablehloReduceWindowOptions + def BaseDilationsLength(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(8)) + if o != 0: + return self._tab.VectorLen(o) + return 0 + + # StablehloReduceWindowOptions + def BaseDilationsIsNone(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(8)) + return o == 0 + + # StablehloReduceWindowOptions + def WindowDilations(self, j): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(10)) + if o != 0: + a = self._tab.Vector(o) + return self._tab.Get(flatbuffers.number_types.Int64Flags, a + flatbuffers.number_types.UOffsetTFlags.py_type(j * 8)) + return 0 + + # StablehloReduceWindowOptions + def WindowDilationsAsNumpy(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(10)) + if o != 0: + return self._tab.GetVectorAsNumpy(flatbuffers.number_types.Int64Flags, o) + return 0 + + # StablehloReduceWindowOptions + def WindowDilationsLength(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(10)) + if o != 0: + return self._tab.VectorLen(o) + return 0 + + # StablehloReduceWindowOptions + def WindowDilationsIsNone(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(10)) + return o == 0 + + # StablehloReduceWindowOptions + def Padding(self, j): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(12)) + if o != 0: + a = self._tab.Vector(o) + return self._tab.Get(flatbuffers.number_types.Int64Flags, a + flatbuffers.number_types.UOffsetTFlags.py_type(j * 8)) + return 0 + + # StablehloReduceWindowOptions + def PaddingAsNumpy(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(12)) + if o != 0: + return self._tab.GetVectorAsNumpy(flatbuffers.number_types.Int64Flags, o) + return 0 + + # StablehloReduceWindowOptions + def PaddingLength(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(12)) + if o != 0: + return self._tab.VectorLen(o) + return 0 + + # StablehloReduceWindowOptions + def PaddingIsNone(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(12)) + return o == 0 + + # StablehloReduceWindowOptions + def BodySubgraphIndex(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(14)) + if o != 0: + return self._tab.Get(flatbuffers.number_types.Int32Flags, o + self._tab.Pos) + return 0 + +def StablehloReduceWindowOptionsStart(builder): + builder.StartObject(6) + +def StablehloReduceWindowOptionsAddWindowDimensions(builder, windowDimensions): + builder.PrependUOffsetTRelativeSlot(0, flatbuffers.number_types.UOffsetTFlags.py_type(windowDimensions), 0) + +def StablehloReduceWindowOptionsStartWindowDimensionsVector(builder, numElems): + return builder.StartVector(8, numElems, 8) + +def StablehloReduceWindowOptionsAddWindowStrides(builder, windowStrides): + builder.PrependUOffsetTRelativeSlot(1, flatbuffers.number_types.UOffsetTFlags.py_type(windowStrides), 0) + +def StablehloReduceWindowOptionsStartWindowStridesVector(builder, numElems): + return builder.StartVector(8, numElems, 8) + +def StablehloReduceWindowOptionsAddBaseDilations(builder, baseDilations): + builder.PrependUOffsetTRelativeSlot(2, flatbuffers.number_types.UOffsetTFlags.py_type(baseDilations), 0) + +def StablehloReduceWindowOptionsStartBaseDilationsVector(builder, numElems): + return builder.StartVector(8, numElems, 8) + +def StablehloReduceWindowOptionsAddWindowDilations(builder, windowDilations): + builder.PrependUOffsetTRelativeSlot(3, flatbuffers.number_types.UOffsetTFlags.py_type(windowDilations), 0) + +def StablehloReduceWindowOptionsStartWindowDilationsVector(builder, numElems): + return builder.StartVector(8, numElems, 8) + +def StablehloReduceWindowOptionsAddPadding(builder, padding): + builder.PrependUOffsetTRelativeSlot(4, flatbuffers.number_types.UOffsetTFlags.py_type(padding), 0) + +def StablehloReduceWindowOptionsStartPaddingVector(builder, numElems): + return builder.StartVector(8, numElems, 8) + +def StablehloReduceWindowOptionsAddBodySubgraphIndex(builder, bodySubgraphIndex): + builder.PrependInt32Slot(5, bodySubgraphIndex, 0) + +def StablehloReduceWindowOptionsEnd(builder): + return builder.EndObject() + + +try: + from typing import List +except: + pass + +class StablehloReduceWindowOptionsT(object): + + # StablehloReduceWindowOptionsT + def __init__(self): + self.windowDimensions = None # type: List[int] + self.windowStrides = None # type: List[int] + self.baseDilations = None # type: List[int] + self.windowDilations = None # type: List[int] + self.padding = None # type: List[int] + self.bodySubgraphIndex = 0 # type: int + + @classmethod + def InitFromBuf(cls, buf, pos): + stablehloReduceWindowOptions = StablehloReduceWindowOptions() + stablehloReduceWindowOptions.Init(buf, pos) + return cls.InitFromObj(stablehloReduceWindowOptions) + + @classmethod + def InitFromPackedBuf(cls, buf, pos=0): + n = flatbuffers.encode.Get(flatbuffers.packer.uoffset, buf, pos) + return cls.InitFromBuf(buf, pos+n) + + @classmethod + def InitFromObj(cls, stablehloReduceWindowOptions): + x = StablehloReduceWindowOptionsT() + x._UnPack(stablehloReduceWindowOptions) + return x + + # StablehloReduceWindowOptionsT + def _UnPack(self, stablehloReduceWindowOptions): + if stablehloReduceWindowOptions is None: + return + if not stablehloReduceWindowOptions.WindowDimensionsIsNone(): + if np is None: + self.windowDimensions = [] + for i in range(stablehloReduceWindowOptions.WindowDimensionsLength()): + self.windowDimensions.append(stablehloReduceWindowOptions.WindowDimensions(i)) + else: + self.windowDimensions = stablehloReduceWindowOptions.WindowDimensionsAsNumpy() + if not stablehloReduceWindowOptions.WindowStridesIsNone(): + if np is None: + self.windowStrides = [] + for i in range(stablehloReduceWindowOptions.WindowStridesLength()): + self.windowStrides.append(stablehloReduceWindowOptions.WindowStrides(i)) + else: + self.windowStrides = stablehloReduceWindowOptions.WindowStridesAsNumpy() + if not stablehloReduceWindowOptions.BaseDilationsIsNone(): + if np is None: + self.baseDilations = [] + for i in range(stablehloReduceWindowOptions.BaseDilationsLength()): + self.baseDilations.append(stablehloReduceWindowOptions.BaseDilations(i)) + else: + self.baseDilations = stablehloReduceWindowOptions.BaseDilationsAsNumpy() + if not stablehloReduceWindowOptions.WindowDilationsIsNone(): + if np is None: + self.windowDilations = [] + for i in range(stablehloReduceWindowOptions.WindowDilationsLength()): + self.windowDilations.append(stablehloReduceWindowOptions.WindowDilations(i)) + else: + self.windowDilations = stablehloReduceWindowOptions.WindowDilationsAsNumpy() + if not stablehloReduceWindowOptions.PaddingIsNone(): + if np is None: + self.padding = [] + for i in range(stablehloReduceWindowOptions.PaddingLength()): + self.padding.append(stablehloReduceWindowOptions.Padding(i)) + else: + self.padding = stablehloReduceWindowOptions.PaddingAsNumpy() + self.bodySubgraphIndex = stablehloReduceWindowOptions.BodySubgraphIndex() + + # StablehloReduceWindowOptionsT + def Pack(self, builder): + if self.windowDimensions is not None: + if np is not None and type(self.windowDimensions) is np.ndarray: + windowDimensions = builder.CreateNumpyVector(self.windowDimensions) + else: + StablehloReduceWindowOptionsStartWindowDimensionsVector(builder, len(self.windowDimensions)) + for i in reversed(range(len(self.windowDimensions))): + builder.PrependInt64(self.windowDimensions[i]) + windowDimensions = builder.EndVector() + if self.windowStrides is not None: + if np is not None and type(self.windowStrides) is np.ndarray: + windowStrides = builder.CreateNumpyVector(self.windowStrides) + else: + StablehloReduceWindowOptionsStartWindowStridesVector(builder, len(self.windowStrides)) + for i in reversed(range(len(self.windowStrides))): + builder.PrependInt64(self.windowStrides[i]) + windowStrides = builder.EndVector() + if self.baseDilations is not None: + if np is not None and type(self.baseDilations) is np.ndarray: + baseDilations = builder.CreateNumpyVector(self.baseDilations) + else: + StablehloReduceWindowOptionsStartBaseDilationsVector(builder, len(self.baseDilations)) + for i in reversed(range(len(self.baseDilations))): + builder.PrependInt64(self.baseDilations[i]) + baseDilations = builder.EndVector() + if self.windowDilations is not None: + if np is not None and type(self.windowDilations) is np.ndarray: + windowDilations = builder.CreateNumpyVector(self.windowDilations) + else: + StablehloReduceWindowOptionsStartWindowDilationsVector(builder, len(self.windowDilations)) + for i in reversed(range(len(self.windowDilations))): + builder.PrependInt64(self.windowDilations[i]) + windowDilations = builder.EndVector() + if self.padding is not None: + if np is not None and type(self.padding) is np.ndarray: + padding = builder.CreateNumpyVector(self.padding) + else: + StablehloReduceWindowOptionsStartPaddingVector(builder, len(self.padding)) + for i in reversed(range(len(self.padding))): + builder.PrependInt64(self.padding[i]) + padding = builder.EndVector() + StablehloReduceWindowOptionsStart(builder) + if self.windowDimensions is not None: + StablehloReduceWindowOptionsAddWindowDimensions(builder, windowDimensions) + if self.windowStrides is not None: + StablehloReduceWindowOptionsAddWindowStrides(builder, windowStrides) + if self.baseDilations is not None: + StablehloReduceWindowOptionsAddBaseDilations(builder, baseDilations) + if self.windowDilations is not None: + StablehloReduceWindowOptionsAddWindowDilations(builder, windowDilations) + if self.padding is not None: + StablehloReduceWindowOptionsAddPadding(builder, padding) + StablehloReduceWindowOptionsAddBodySubgraphIndex(builder, self.bodySubgraphIndex) + stablehloReduceWindowOptions = StablehloReduceWindowOptionsEnd(builder) + return stablehloReduceWindowOptions + + +class StablehloWhileOptions(object): + __slots__ = ['_tab'] + + @classmethod + def GetRootAs(cls, buf, offset=0): + n = flatbuffers.encode.Get(flatbuffers.packer.uoffset, buf, offset) + x = StablehloWhileOptions() + x.Init(buf, n + offset) + return x + + @classmethod + def GetRootAsStablehloWhileOptions(cls, buf, offset=0): + """This method is deprecated. Please switch to GetRootAs.""" + return cls.GetRootAs(buf, offset) + @classmethod + def StablehloWhileOptionsBufferHasIdentifier(cls, buf, offset, size_prefixed=False): + return flatbuffers.util.BufferHasIdentifier(buf, offset, b"\x54\x46\x4C\x33", size_prefixed=size_prefixed) + + # StablehloWhileOptions + def Init(self, buf, pos): + self._tab = flatbuffers.table.Table(buf, pos) + + # StablehloWhileOptions + def CondSubgraphIndex(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(4)) + if o != 0: + return self._tab.Get(flatbuffers.number_types.Int32Flags, o + self._tab.Pos) + return 0 + + # StablehloWhileOptions + def BodySubgraphIndex(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(6)) + if o != 0: + return self._tab.Get(flatbuffers.number_types.Int32Flags, o + self._tab.Pos) + return 0 + +def StablehloWhileOptionsStart(builder): + builder.StartObject(2) + +def StablehloWhileOptionsAddCondSubgraphIndex(builder, condSubgraphIndex): + builder.PrependInt32Slot(0, condSubgraphIndex, 0) + +def StablehloWhileOptionsAddBodySubgraphIndex(builder, bodySubgraphIndex): + builder.PrependInt32Slot(1, bodySubgraphIndex, 0) + +def StablehloWhileOptionsEnd(builder): + return builder.EndObject() + + + +class StablehloWhileOptionsT(object): + + # StablehloWhileOptionsT + def __init__(self): + self.condSubgraphIndex = 0 # type: int + self.bodySubgraphIndex = 0 # type: int + + @classmethod + def InitFromBuf(cls, buf, pos): + stablehloWhileOptions = StablehloWhileOptions() + stablehloWhileOptions.Init(buf, pos) + return cls.InitFromObj(stablehloWhileOptions) + + @classmethod + def InitFromPackedBuf(cls, buf, pos=0): + n = flatbuffers.encode.Get(flatbuffers.packer.uoffset, buf, pos) + return cls.InitFromBuf(buf, pos+n) + + @classmethod + def InitFromObj(cls, stablehloWhileOptions): + x = StablehloWhileOptionsT() + x._UnPack(stablehloWhileOptions) + return x + + # StablehloWhileOptionsT + def _UnPack(self, stablehloWhileOptions): + if stablehloWhileOptions is None: + return + self.condSubgraphIndex = stablehloWhileOptions.CondSubgraphIndex() + self.bodySubgraphIndex = stablehloWhileOptions.BodySubgraphIndex() + + # StablehloWhileOptionsT + def Pack(self, builder): + StablehloWhileOptionsStart(builder) + StablehloWhileOptionsAddCondSubgraphIndex(builder, self.condSubgraphIndex) + StablehloWhileOptionsAddBodySubgraphIndex(builder, self.bodySubgraphIndex) + stablehloWhileOptions = StablehloWhileOptionsEnd(builder) + return stablehloWhileOptions + + +class StablehloSortOptions(object): + __slots__ = ['_tab'] + + @classmethod + def GetRootAs(cls, buf, offset=0): + n = flatbuffers.encode.Get(flatbuffers.packer.uoffset, buf, offset) + x = StablehloSortOptions() + x.Init(buf, n + offset) + return x + + @classmethod + def GetRootAsStablehloSortOptions(cls, buf, offset=0): + """This method is deprecated. Please switch to GetRootAs.""" + return cls.GetRootAs(buf, offset) + @classmethod + def StablehloSortOptionsBufferHasIdentifier(cls, buf, offset, size_prefixed=False): + return flatbuffers.util.BufferHasIdentifier(buf, offset, b"\x54\x46\x4C\x33", size_prefixed=size_prefixed) + + # StablehloSortOptions + def Init(self, buf, pos): + self._tab = flatbuffers.table.Table(buf, pos) + + # StablehloSortOptions + def Dimension(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(4)) + if o != 0: + return self._tab.Get(flatbuffers.number_types.Int64Flags, o + self._tab.Pos) + return 0 + + # StablehloSortOptions + def IsStable(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(6)) + if o != 0: + return bool(self._tab.Get(flatbuffers.number_types.BoolFlags, o + self._tab.Pos)) + return False + + # StablehloSortOptions + def ComparatorSubgraphIndex(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(8)) + if o != 0: + return self._tab.Get(flatbuffers.number_types.Int32Flags, o + self._tab.Pos) + return 0 + +def StablehloSortOptionsStart(builder): + builder.StartObject(3) + +def StablehloSortOptionsAddDimension(builder, dimension): + builder.PrependInt64Slot(0, dimension, 0) + +def StablehloSortOptionsAddIsStable(builder, isStable): + builder.PrependBoolSlot(1, isStable, 0) + +def StablehloSortOptionsAddComparatorSubgraphIndex(builder, comparatorSubgraphIndex): + builder.PrependInt32Slot(2, comparatorSubgraphIndex, 0) + +def StablehloSortOptionsEnd(builder): + return builder.EndObject() + + + +class StablehloSortOptionsT(object): + + # StablehloSortOptionsT + def __init__(self): + self.dimension = 0 # type: int + self.isStable = False # type: bool + self.comparatorSubgraphIndex = 0 # type: int + + @classmethod + def InitFromBuf(cls, buf, pos): + stablehloSortOptions = StablehloSortOptions() + stablehloSortOptions.Init(buf, pos) + return cls.InitFromObj(stablehloSortOptions) + + @classmethod + def InitFromPackedBuf(cls, buf, pos=0): + n = flatbuffers.encode.Get(flatbuffers.packer.uoffset, buf, pos) + return cls.InitFromBuf(buf, pos+n) + + @classmethod + def InitFromObj(cls, stablehloSortOptions): + x = StablehloSortOptionsT() + x._UnPack(stablehloSortOptions) + return x + + # StablehloSortOptionsT + def _UnPack(self, stablehloSortOptions): + if stablehloSortOptions is None: + return + self.dimension = stablehloSortOptions.Dimension() + self.isStable = stablehloSortOptions.IsStable() + self.comparatorSubgraphIndex = stablehloSortOptions.ComparatorSubgraphIndex() + + # StablehloSortOptionsT + def Pack(self, builder): + StablehloSortOptionsStart(builder) + StablehloSortOptionsAddDimension(builder, self.dimension) + StablehloSortOptionsAddIsStable(builder, self.isStable) + StablehloSortOptionsAddComparatorSubgraphIndex(builder, self.comparatorSubgraphIndex) + stablehloSortOptions = StablehloSortOptionsEnd(builder) + return stablehloSortOptions + + +class StablehloConcatenateOptions(object): + __slots__ = ['_tab'] + + @classmethod + def GetRootAs(cls, buf, offset=0): + n = flatbuffers.encode.Get(flatbuffers.packer.uoffset, buf, offset) + x = StablehloConcatenateOptions() + x.Init(buf, n + offset) + return x + + @classmethod + def GetRootAsStablehloConcatenateOptions(cls, buf, offset=0): + """This method is deprecated. Please switch to GetRootAs.""" + return cls.GetRootAs(buf, offset) + @classmethod + def StablehloConcatenateOptionsBufferHasIdentifier(cls, buf, offset, size_prefixed=False): + return flatbuffers.util.BufferHasIdentifier(buf, offset, b"\x54\x46\x4C\x33", size_prefixed=size_prefixed) + + # StablehloConcatenateOptions + def Init(self, buf, pos): + self._tab = flatbuffers.table.Table(buf, pos) + + # StablehloConcatenateOptions + def Dimension(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(4)) + if o != 0: + return self._tab.Get(flatbuffers.number_types.Int64Flags, o + self._tab.Pos) + return 0 + +def StablehloConcatenateOptionsStart(builder): + builder.StartObject(1) + +def StablehloConcatenateOptionsAddDimension(builder, dimension): + builder.PrependInt64Slot(0, dimension, 0) + +def StablehloConcatenateOptionsEnd(builder): + return builder.EndObject() + + + +class StablehloConcatenateOptionsT(object): + + # StablehloConcatenateOptionsT + def __init__(self): + self.dimension = 0 # type: int + + @classmethod + def InitFromBuf(cls, buf, pos): + stablehloConcatenateOptions = StablehloConcatenateOptions() + stablehloConcatenateOptions.Init(buf, pos) + return cls.InitFromObj(stablehloConcatenateOptions) + + @classmethod + def InitFromPackedBuf(cls, buf, pos=0): + n = flatbuffers.encode.Get(flatbuffers.packer.uoffset, buf, pos) + return cls.InitFromBuf(buf, pos+n) + + @classmethod + def InitFromObj(cls, stablehloConcatenateOptions): + x = StablehloConcatenateOptionsT() + x._UnPack(stablehloConcatenateOptions) + return x + + # StablehloConcatenateOptionsT + def _UnPack(self, stablehloConcatenateOptions): + if stablehloConcatenateOptions is None: + return + self.dimension = stablehloConcatenateOptions.Dimension() + + # StablehloConcatenateOptionsT + def Pack(self, builder): + StablehloConcatenateOptionsStart(builder) + StablehloConcatenateOptionsAddDimension(builder, self.dimension) + stablehloConcatenateOptions = StablehloConcatenateOptionsEnd(builder) + return stablehloConcatenateOptions + + +class StablehloBroadcastInDimOptions(object): + __slots__ = ['_tab'] + + @classmethod + def GetRootAs(cls, buf, offset=0): + n = flatbuffers.encode.Get(flatbuffers.packer.uoffset, buf, offset) + x = StablehloBroadcastInDimOptions() + x.Init(buf, n + offset) + return x + + @classmethod + def GetRootAsStablehloBroadcastInDimOptions(cls, buf, offset=0): + """This method is deprecated. Please switch to GetRootAs.""" + return cls.GetRootAs(buf, offset) + @classmethod + def StablehloBroadcastInDimOptionsBufferHasIdentifier(cls, buf, offset, size_prefixed=False): + return flatbuffers.util.BufferHasIdentifier(buf, offset, b"\x54\x46\x4C\x33", size_prefixed=size_prefixed) + + # StablehloBroadcastInDimOptions + def Init(self, buf, pos): + self._tab = flatbuffers.table.Table(buf, pos) + + # StablehloBroadcastInDimOptions + def BroadcastDimensions(self, j): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(4)) + if o != 0: + a = self._tab.Vector(o) + return self._tab.Get(flatbuffers.number_types.Int64Flags, a + flatbuffers.number_types.UOffsetTFlags.py_type(j * 8)) + return 0 + + # StablehloBroadcastInDimOptions + def BroadcastDimensionsAsNumpy(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(4)) + if o != 0: + return self._tab.GetVectorAsNumpy(flatbuffers.number_types.Int64Flags, o) + return 0 + + # StablehloBroadcastInDimOptions + def BroadcastDimensionsLength(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(4)) + if o != 0: + return self._tab.VectorLen(o) + return 0 + + # StablehloBroadcastInDimOptions + def BroadcastDimensionsIsNone(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(4)) + return o == 0 + +def StablehloBroadcastInDimOptionsStart(builder): + builder.StartObject(1) + +def StablehloBroadcastInDimOptionsAddBroadcastDimensions(builder, broadcastDimensions): + builder.PrependUOffsetTRelativeSlot(0, flatbuffers.number_types.UOffsetTFlags.py_type(broadcastDimensions), 0) + +def StablehloBroadcastInDimOptionsStartBroadcastDimensionsVector(builder, numElems): + return builder.StartVector(8, numElems, 8) + +def StablehloBroadcastInDimOptionsEnd(builder): + return builder.EndObject() + + +try: + from typing import List +except: + pass + +class StablehloBroadcastInDimOptionsT(object): + + # StablehloBroadcastInDimOptionsT + def __init__(self): + self.broadcastDimensions = None # type: List[int] + + @classmethod + def InitFromBuf(cls, buf, pos): + stablehloBroadcastInDimOptions = StablehloBroadcastInDimOptions() + stablehloBroadcastInDimOptions.Init(buf, pos) + return cls.InitFromObj(stablehloBroadcastInDimOptions) + + @classmethod + def InitFromPackedBuf(cls, buf, pos=0): + n = flatbuffers.encode.Get(flatbuffers.packer.uoffset, buf, pos) + return cls.InitFromBuf(buf, pos+n) + + @classmethod + def InitFromObj(cls, stablehloBroadcastInDimOptions): + x = StablehloBroadcastInDimOptionsT() + x._UnPack(stablehloBroadcastInDimOptions) + return x + + # StablehloBroadcastInDimOptionsT + def _UnPack(self, stablehloBroadcastInDimOptions): + if stablehloBroadcastInDimOptions is None: + return + if not stablehloBroadcastInDimOptions.BroadcastDimensionsIsNone(): + if np is None: + self.broadcastDimensions = [] + for i in range(stablehloBroadcastInDimOptions.BroadcastDimensionsLength()): + self.broadcastDimensions.append(stablehloBroadcastInDimOptions.BroadcastDimensions(i)) + else: + self.broadcastDimensions = stablehloBroadcastInDimOptions.BroadcastDimensionsAsNumpy() + + # StablehloBroadcastInDimOptionsT + def Pack(self, builder): + if self.broadcastDimensions is not None: + if np is not None and type(self.broadcastDimensions) is np.ndarray: + broadcastDimensions = builder.CreateNumpyVector(self.broadcastDimensions) + else: + StablehloBroadcastInDimOptionsStartBroadcastDimensionsVector(builder, len(self.broadcastDimensions)) + for i in reversed(range(len(self.broadcastDimensions))): + builder.PrependInt64(self.broadcastDimensions[i]) + broadcastDimensions = builder.EndVector() + StablehloBroadcastInDimOptionsStart(builder) + if self.broadcastDimensions is not None: + StablehloBroadcastInDimOptionsAddBroadcastDimensions(builder, broadcastDimensions) + stablehloBroadcastInDimOptions = StablehloBroadcastInDimOptionsEnd(builder) + return stablehloBroadcastInDimOptions + + +class StablehloCompareOptions(object): + __slots__ = ['_tab'] + + @classmethod + def GetRootAs(cls, buf, offset=0): + n = flatbuffers.encode.Get(flatbuffers.packer.uoffset, buf, offset) + x = StablehloCompareOptions() + x.Init(buf, n + offset) + return x + + @classmethod + def GetRootAsStablehloCompareOptions(cls, buf, offset=0): + """This method is deprecated. Please switch to GetRootAs.""" + return cls.GetRootAs(buf, offset) + @classmethod + def StablehloCompareOptionsBufferHasIdentifier(cls, buf, offset, size_prefixed=False): + return flatbuffers.util.BufferHasIdentifier(buf, offset, b"\x54\x46\x4C\x33", size_prefixed=size_prefixed) + + # StablehloCompareOptions + def Init(self, buf, pos): + self._tab = flatbuffers.table.Table(buf, pos) + + # StablehloCompareOptions + def ComparisonDirection(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(4)) + if o != 0: + return self._tab.Get(flatbuffers.number_types.Uint32Flags, o + self._tab.Pos) + return 0 + + # StablehloCompareOptions + def CompareType(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(6)) + if o != 0: + return self._tab.Get(flatbuffers.number_types.Uint32Flags, o + self._tab.Pos) + return 0 + +def StablehloCompareOptionsStart(builder): + builder.StartObject(2) + +def StablehloCompareOptionsAddComparisonDirection(builder, comparisonDirection): + builder.PrependUint32Slot(0, comparisonDirection, 0) + +def StablehloCompareOptionsAddCompareType(builder, compareType): + builder.PrependUint32Slot(1, compareType, 0) + +def StablehloCompareOptionsEnd(builder): + return builder.EndObject() + + + +class StablehloCompareOptionsT(object): + + # StablehloCompareOptionsT + def __init__(self): + self.comparisonDirection = 0 # type: int + self.compareType = 0 # type: int + + @classmethod + def InitFromBuf(cls, buf, pos): + stablehloCompareOptions = StablehloCompareOptions() + stablehloCompareOptions.Init(buf, pos) + return cls.InitFromObj(stablehloCompareOptions) + + @classmethod + def InitFromPackedBuf(cls, buf, pos=0): + n = flatbuffers.encode.Get(flatbuffers.packer.uoffset, buf, pos) + return cls.InitFromBuf(buf, pos+n) + + @classmethod + def InitFromObj(cls, stablehloCompareOptions): + x = StablehloCompareOptionsT() + x._UnPack(stablehloCompareOptions) + return x + + # StablehloCompareOptionsT + def _UnPack(self, stablehloCompareOptions): + if stablehloCompareOptions is None: + return + self.comparisonDirection = stablehloCompareOptions.ComparisonDirection() + self.compareType = stablehloCompareOptions.CompareType() + + # StablehloCompareOptionsT + def Pack(self, builder): + StablehloCompareOptionsStart(builder) + StablehloCompareOptionsAddComparisonDirection(builder, self.comparisonDirection) + StablehloCompareOptionsAddCompareType(builder, self.compareType) + stablehloCompareOptions = StablehloCompareOptionsEnd(builder) + return stablehloCompareOptions + + +class StablehloDynamicSliceOptions(object): + __slots__ = ['_tab'] + + @classmethod + def GetRootAs(cls, buf, offset=0): + n = flatbuffers.encode.Get(flatbuffers.packer.uoffset, buf, offset) + x = StablehloDynamicSliceOptions() + x.Init(buf, n + offset) + return x + + @classmethod + def GetRootAsStablehloDynamicSliceOptions(cls, buf, offset=0): + """This method is deprecated. Please switch to GetRootAs.""" + return cls.GetRootAs(buf, offset) + @classmethod + def StablehloDynamicSliceOptionsBufferHasIdentifier(cls, buf, offset, size_prefixed=False): + return flatbuffers.util.BufferHasIdentifier(buf, offset, b"\x54\x46\x4C\x33", size_prefixed=size_prefixed) + + # StablehloDynamicSliceOptions + def Init(self, buf, pos): + self._tab = flatbuffers.table.Table(buf, pos) + + # StablehloDynamicSliceOptions + def SliceSizes(self, j): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(4)) + if o != 0: + a = self._tab.Vector(o) + return self._tab.Get(flatbuffers.number_types.Int64Flags, a + flatbuffers.number_types.UOffsetTFlags.py_type(j * 8)) + return 0 + + # StablehloDynamicSliceOptions + def SliceSizesAsNumpy(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(4)) + if o != 0: + return self._tab.GetVectorAsNumpy(flatbuffers.number_types.Int64Flags, o) + return 0 + + # StablehloDynamicSliceOptions + def SliceSizesLength(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(4)) + if o != 0: + return self._tab.VectorLen(o) + return 0 + + # StablehloDynamicSliceOptions + def SliceSizesIsNone(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(4)) + return o == 0 + +def StablehloDynamicSliceOptionsStart(builder): + builder.StartObject(1) + +def StablehloDynamicSliceOptionsAddSliceSizes(builder, sliceSizes): + builder.PrependUOffsetTRelativeSlot(0, flatbuffers.number_types.UOffsetTFlags.py_type(sliceSizes), 0) + +def StablehloDynamicSliceOptionsStartSliceSizesVector(builder, numElems): + return builder.StartVector(8, numElems, 8) + +def StablehloDynamicSliceOptionsEnd(builder): + return builder.EndObject() + + +try: + from typing import List +except: + pass + +class StablehloDynamicSliceOptionsT(object): + + # StablehloDynamicSliceOptionsT + def __init__(self): + self.sliceSizes = None # type: List[int] + + @classmethod + def InitFromBuf(cls, buf, pos): + stablehloDynamicSliceOptions = StablehloDynamicSliceOptions() + stablehloDynamicSliceOptions.Init(buf, pos) + return cls.InitFromObj(stablehloDynamicSliceOptions) + + @classmethod + def InitFromPackedBuf(cls, buf, pos=0): + n = flatbuffers.encode.Get(flatbuffers.packer.uoffset, buf, pos) + return cls.InitFromBuf(buf, pos+n) + + @classmethod + def InitFromObj(cls, stablehloDynamicSliceOptions): + x = StablehloDynamicSliceOptionsT() + x._UnPack(stablehloDynamicSliceOptions) + return x + + # StablehloDynamicSliceOptionsT + def _UnPack(self, stablehloDynamicSliceOptions): + if stablehloDynamicSliceOptions is None: + return + if not stablehloDynamicSliceOptions.SliceSizesIsNone(): + if np is None: + self.sliceSizes = [] + for i in range(stablehloDynamicSliceOptions.SliceSizesLength()): + self.sliceSizes.append(stablehloDynamicSliceOptions.SliceSizes(i)) + else: + self.sliceSizes = stablehloDynamicSliceOptions.SliceSizesAsNumpy() + + # StablehloDynamicSliceOptionsT + def Pack(self, builder): + if self.sliceSizes is not None: + if np is not None and type(self.sliceSizes) is np.ndarray: + sliceSizes = builder.CreateNumpyVector(self.sliceSizes) + else: + StablehloDynamicSliceOptionsStartSliceSizesVector(builder, len(self.sliceSizes)) + for i in reversed(range(len(self.sliceSizes))): + builder.PrependInt64(self.sliceSizes[i]) + sliceSizes = builder.EndVector() + StablehloDynamicSliceOptionsStart(builder) + if self.sliceSizes is not None: + StablehloDynamicSliceOptionsAddSliceSizes(builder, sliceSizes) + stablehloDynamicSliceOptions = StablehloDynamicSliceOptionsEnd(builder) + return stablehloDynamicSliceOptions + + +class StablehloPadOptions(object): + __slots__ = ['_tab'] + + @classmethod + def GetRootAs(cls, buf, offset=0): + n = flatbuffers.encode.Get(flatbuffers.packer.uoffset, buf, offset) + x = StablehloPadOptions() + x.Init(buf, n + offset) + return x + + @classmethod + def GetRootAsStablehloPadOptions(cls, buf, offset=0): + """This method is deprecated. Please switch to GetRootAs.""" + return cls.GetRootAs(buf, offset) + @classmethod + def StablehloPadOptionsBufferHasIdentifier(cls, buf, offset, size_prefixed=False): + return flatbuffers.util.BufferHasIdentifier(buf, offset, b"\x54\x46\x4C\x33", size_prefixed=size_prefixed) + + # StablehloPadOptions + def Init(self, buf, pos): + self._tab = flatbuffers.table.Table(buf, pos) + + # StablehloPadOptions + def EdgePaddingLow(self, j): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(4)) + if o != 0: + a = self._tab.Vector(o) + return self._tab.Get(flatbuffers.number_types.Int64Flags, a + flatbuffers.number_types.UOffsetTFlags.py_type(j * 8)) + return 0 + + # StablehloPadOptions + def EdgePaddingLowAsNumpy(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(4)) + if o != 0: + return self._tab.GetVectorAsNumpy(flatbuffers.number_types.Int64Flags, o) + return 0 + + # StablehloPadOptions + def EdgePaddingLowLength(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(4)) + if o != 0: + return self._tab.VectorLen(o) + return 0 + + # StablehloPadOptions + def EdgePaddingLowIsNone(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(4)) + return o == 0 + + # StablehloPadOptions + def EdgePaddingHigh(self, j): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(6)) + if o != 0: + a = self._tab.Vector(o) + return self._tab.Get(flatbuffers.number_types.Int64Flags, a + flatbuffers.number_types.UOffsetTFlags.py_type(j * 8)) + return 0 + + # StablehloPadOptions + def EdgePaddingHighAsNumpy(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(6)) + if o != 0: + return self._tab.GetVectorAsNumpy(flatbuffers.number_types.Int64Flags, o) + return 0 + + # StablehloPadOptions + def EdgePaddingHighLength(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(6)) + if o != 0: + return self._tab.VectorLen(o) + return 0 + + # StablehloPadOptions + def EdgePaddingHighIsNone(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(6)) + return o == 0 + + # StablehloPadOptions + def InteriorPadding(self, j): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(8)) + if o != 0: + a = self._tab.Vector(o) + return self._tab.Get(flatbuffers.number_types.Int64Flags, a + flatbuffers.number_types.UOffsetTFlags.py_type(j * 8)) + return 0 + + # StablehloPadOptions + def InteriorPaddingAsNumpy(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(8)) + if o != 0: + return self._tab.GetVectorAsNumpy(flatbuffers.number_types.Int64Flags, o) + return 0 + + # StablehloPadOptions + def InteriorPaddingLength(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(8)) + if o != 0: + return self._tab.VectorLen(o) + return 0 + + # StablehloPadOptions + def InteriorPaddingIsNone(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(8)) + return o == 0 + +def StablehloPadOptionsStart(builder): + builder.StartObject(3) + +def StablehloPadOptionsAddEdgePaddingLow(builder, edgePaddingLow): + builder.PrependUOffsetTRelativeSlot(0, flatbuffers.number_types.UOffsetTFlags.py_type(edgePaddingLow), 0) + +def StablehloPadOptionsStartEdgePaddingLowVector(builder, numElems): + return builder.StartVector(8, numElems, 8) + +def StablehloPadOptionsAddEdgePaddingHigh(builder, edgePaddingHigh): + builder.PrependUOffsetTRelativeSlot(1, flatbuffers.number_types.UOffsetTFlags.py_type(edgePaddingHigh), 0) + +def StablehloPadOptionsStartEdgePaddingHighVector(builder, numElems): + return builder.StartVector(8, numElems, 8) + +def StablehloPadOptionsAddInteriorPadding(builder, interiorPadding): + builder.PrependUOffsetTRelativeSlot(2, flatbuffers.number_types.UOffsetTFlags.py_type(interiorPadding), 0) + +def StablehloPadOptionsStartInteriorPaddingVector(builder, numElems): + return builder.StartVector(8, numElems, 8) + +def StablehloPadOptionsEnd(builder): + return builder.EndObject() + + +try: + from typing import List +except: + pass + +class StablehloPadOptionsT(object): + + # StablehloPadOptionsT + def __init__(self): + self.edgePaddingLow = None # type: List[int] + self.edgePaddingHigh = None # type: List[int] + self.interiorPadding = None # type: List[int] + + @classmethod + def InitFromBuf(cls, buf, pos): + stablehloPadOptions = StablehloPadOptions() + stablehloPadOptions.Init(buf, pos) + return cls.InitFromObj(stablehloPadOptions) + + @classmethod + def InitFromPackedBuf(cls, buf, pos=0): + n = flatbuffers.encode.Get(flatbuffers.packer.uoffset, buf, pos) + return cls.InitFromBuf(buf, pos+n) + + @classmethod + def InitFromObj(cls, stablehloPadOptions): + x = StablehloPadOptionsT() + x._UnPack(stablehloPadOptions) + return x + + # StablehloPadOptionsT + def _UnPack(self, stablehloPadOptions): + if stablehloPadOptions is None: + return + if not stablehloPadOptions.EdgePaddingLowIsNone(): + if np is None: + self.edgePaddingLow = [] + for i in range(stablehloPadOptions.EdgePaddingLowLength()): + self.edgePaddingLow.append(stablehloPadOptions.EdgePaddingLow(i)) + else: + self.edgePaddingLow = stablehloPadOptions.EdgePaddingLowAsNumpy() + if not stablehloPadOptions.EdgePaddingHighIsNone(): + if np is None: + self.edgePaddingHigh = [] + for i in range(stablehloPadOptions.EdgePaddingHighLength()): + self.edgePaddingHigh.append(stablehloPadOptions.EdgePaddingHigh(i)) + else: + self.edgePaddingHigh = stablehloPadOptions.EdgePaddingHighAsNumpy() + if not stablehloPadOptions.InteriorPaddingIsNone(): + if np is None: + self.interiorPadding = [] + for i in range(stablehloPadOptions.InteriorPaddingLength()): + self.interiorPadding.append(stablehloPadOptions.InteriorPadding(i)) + else: + self.interiorPadding = stablehloPadOptions.InteriorPaddingAsNumpy() + + # StablehloPadOptionsT + def Pack(self, builder): + if self.edgePaddingLow is not None: + if np is not None and type(self.edgePaddingLow) is np.ndarray: + edgePaddingLow = builder.CreateNumpyVector(self.edgePaddingLow) + else: + StablehloPadOptionsStartEdgePaddingLowVector(builder, len(self.edgePaddingLow)) + for i in reversed(range(len(self.edgePaddingLow))): + builder.PrependInt64(self.edgePaddingLow[i]) + edgePaddingLow = builder.EndVector() + if self.edgePaddingHigh is not None: + if np is not None and type(self.edgePaddingHigh) is np.ndarray: + edgePaddingHigh = builder.CreateNumpyVector(self.edgePaddingHigh) + else: + StablehloPadOptionsStartEdgePaddingHighVector(builder, len(self.edgePaddingHigh)) + for i in reversed(range(len(self.edgePaddingHigh))): + builder.PrependInt64(self.edgePaddingHigh[i]) + edgePaddingHigh = builder.EndVector() + if self.interiorPadding is not None: + if np is not None and type(self.interiorPadding) is np.ndarray: + interiorPadding = builder.CreateNumpyVector(self.interiorPadding) + else: + StablehloPadOptionsStartInteriorPaddingVector(builder, len(self.interiorPadding)) + for i in reversed(range(len(self.interiorPadding))): + builder.PrependInt64(self.interiorPadding[i]) + interiorPadding = builder.EndVector() + StablehloPadOptionsStart(builder) + if self.edgePaddingLow is not None: + StablehloPadOptionsAddEdgePaddingLow(builder, edgePaddingLow) + if self.edgePaddingHigh is not None: + StablehloPadOptionsAddEdgePaddingHigh(builder, edgePaddingHigh) + if self.interiorPadding is not None: + StablehloPadOptionsAddInteriorPadding(builder, interiorPadding) + stablehloPadOptions = StablehloPadOptionsEnd(builder) + return stablehloPadOptions + + +class StablehloIotaOptions(object): + __slots__ = ['_tab'] + + @classmethod + def GetRootAs(cls, buf, offset=0): + n = flatbuffers.encode.Get(flatbuffers.packer.uoffset, buf, offset) + x = StablehloIotaOptions() + x.Init(buf, n + offset) + return x + + @classmethod + def GetRootAsStablehloIotaOptions(cls, buf, offset=0): + """This method is deprecated. Please switch to GetRootAs.""" + return cls.GetRootAs(buf, offset) + @classmethod + def StablehloIotaOptionsBufferHasIdentifier(cls, buf, offset, size_prefixed=False): + return flatbuffers.util.BufferHasIdentifier(buf, offset, b"\x54\x46\x4C\x33", size_prefixed=size_prefixed) + + # StablehloIotaOptions + def Init(self, buf, pos): + self._tab = flatbuffers.table.Table(buf, pos) + + # StablehloIotaOptions + def IotaDimension(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(4)) + if o != 0: + return self._tab.Get(flatbuffers.number_types.Int64Flags, o + self._tab.Pos) + return 0 + +def StablehloIotaOptionsStart(builder): + builder.StartObject(1) + +def StablehloIotaOptionsAddIotaDimension(builder, iotaDimension): + builder.PrependInt64Slot(0, iotaDimension, 0) + +def StablehloIotaOptionsEnd(builder): + return builder.EndObject() + + + +class StablehloIotaOptionsT(object): + + # StablehloIotaOptionsT + def __init__(self): + self.iotaDimension = 0 # type: int + + @classmethod + def InitFromBuf(cls, buf, pos): + stablehloIotaOptions = StablehloIotaOptions() + stablehloIotaOptions.Init(buf, pos) + return cls.InitFromObj(stablehloIotaOptions) + + @classmethod + def InitFromPackedBuf(cls, buf, pos=0): + n = flatbuffers.encode.Get(flatbuffers.packer.uoffset, buf, pos) + return cls.InitFromBuf(buf, pos+n) + + @classmethod + def InitFromObj(cls, stablehloIotaOptions): + x = StablehloIotaOptionsT() + x._UnPack(stablehloIotaOptions) + return x + + # StablehloIotaOptionsT + def _UnPack(self, stablehloIotaOptions): + if stablehloIotaOptions is None: + return + self.iotaDimension = stablehloIotaOptions.IotaDimension() + + # StablehloIotaOptionsT + def Pack(self, builder): + StablehloIotaOptionsStart(builder) + StablehloIotaOptionsAddIotaDimension(builder, self.iotaDimension) + stablehloIotaOptions = StablehloIotaOptionsEnd(builder) + return stablehloIotaOptions + + +class StablehloCustomCallOptions(object): + __slots__ = ['_tab'] + + @classmethod + def GetRootAs(cls, buf, offset=0): + n = flatbuffers.encode.Get(flatbuffers.packer.uoffset, buf, offset) + x = StablehloCustomCallOptions() + x.Init(buf, n + offset) + return x + + @classmethod + def GetRootAsStablehloCustomCallOptions(cls, buf, offset=0): + """This method is deprecated. Please switch to GetRootAs.""" + return cls.GetRootAs(buf, offset) + @classmethod + def StablehloCustomCallOptionsBufferHasIdentifier(cls, buf, offset, size_prefixed=False): + return flatbuffers.util.BufferHasIdentifier(buf, offset, b"\x54\x46\x4C\x33", size_prefixed=size_prefixed) + + # StablehloCustomCallOptions + def Init(self, buf, pos): + self._tab = flatbuffers.table.Table(buf, pos) + + # StablehloCustomCallOptions + def CallTargetName(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(4)) + if o != 0: + return self._tab.String(o + self._tab.Pos) + return None + + # StablehloCustomCallOptions + def HasSideEffect(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(6)) + if o != 0: + return bool(self._tab.Get(flatbuffers.number_types.BoolFlags, o + self._tab.Pos)) + return False + + # StablehloCustomCallOptions + def BackendConfig(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(8)) + if o != 0: + return self._tab.String(o + self._tab.Pos) + return None + + # StablehloCustomCallOptions + def ApiVersion(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(10)) + if o != 0: + return self._tab.Get(flatbuffers.number_types.Int32Flags, o + self._tab.Pos) + return 0 + + # StablehloCustomCallOptions + def CalledComputations(self, j): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(12)) + if o != 0: + a = self._tab.Vector(o) + return self._tab.Get(flatbuffers.number_types.Int32Flags, a + flatbuffers.number_types.UOffsetTFlags.py_type(j * 4)) + return 0 + + # StablehloCustomCallOptions + def CalledComputationsAsNumpy(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(12)) + if o != 0: + return self._tab.GetVectorAsNumpy(flatbuffers.number_types.Int32Flags, o) + return 0 + + # StablehloCustomCallOptions + def CalledComputationsLength(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(12)) + if o != 0: + return self._tab.VectorLen(o) + return 0 + + # StablehloCustomCallOptions + def CalledComputationsIsNone(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(12)) + return o == 0 + + # StablehloCustomCallOptions + def CustomAttributes(self, j): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(14)) + if o != 0: + a = self._tab.Vector(o) + return self._tab.Get(flatbuffers.number_types.Uint8Flags, a + flatbuffers.number_types.UOffsetTFlags.py_type(j * 1)) + return 0 + + # StablehloCustomCallOptions + def CustomAttributesAsNumpy(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(14)) + if o != 0: + return self._tab.GetVectorAsNumpy(flatbuffers.number_types.Uint8Flags, o) + return 0 + + # StablehloCustomCallOptions + def CustomAttributesLength(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(14)) + if o != 0: + return self._tab.VectorLen(o) + return 0 + + # StablehloCustomCallOptions + def CustomAttributesIsNone(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(14)) + return o == 0 + +def StablehloCustomCallOptionsStart(builder): + builder.StartObject(6) + +def StablehloCustomCallOptionsAddCallTargetName(builder, callTargetName): + builder.PrependUOffsetTRelativeSlot(0, flatbuffers.number_types.UOffsetTFlags.py_type(callTargetName), 0) + +def StablehloCustomCallOptionsAddHasSideEffect(builder, hasSideEffect): + builder.PrependBoolSlot(1, hasSideEffect, 0) + +def StablehloCustomCallOptionsAddBackendConfig(builder, backendConfig): + builder.PrependUOffsetTRelativeSlot(2, flatbuffers.number_types.UOffsetTFlags.py_type(backendConfig), 0) + +def StablehloCustomCallOptionsAddApiVersion(builder, apiVersion): + builder.PrependInt32Slot(3, apiVersion, 0) + +def StablehloCustomCallOptionsAddCalledComputations(builder, calledComputations): + builder.PrependUOffsetTRelativeSlot(4, flatbuffers.number_types.UOffsetTFlags.py_type(calledComputations), 0) + +def StablehloCustomCallOptionsStartCalledComputationsVector(builder, numElems): + return builder.StartVector(4, numElems, 4) + +def StablehloCustomCallOptionsAddCustomAttributes(builder, customAttributes): + builder.PrependUOffsetTRelativeSlot(5, flatbuffers.number_types.UOffsetTFlags.py_type(customAttributes), 0) + +def StablehloCustomCallOptionsStartCustomAttributesVector(builder, numElems): + return builder.StartVector(1, numElems, 1) + +def StablehloCustomCallOptionsEnd(builder): + return builder.EndObject() + + +try: + from typing import List +except: + pass + +class StablehloCustomCallOptionsT(object): + + # StablehloCustomCallOptionsT + def __init__(self): + self.callTargetName = None # type: str + self.hasSideEffect = False # type: bool + self.backendConfig = None # type: str + self.apiVersion = 0 # type: int + self.calledComputations = None # type: List[int] + self.customAttributes = None # type: List[int] + + @classmethod + def InitFromBuf(cls, buf, pos): + stablehloCustomCallOptions = StablehloCustomCallOptions() + stablehloCustomCallOptions.Init(buf, pos) + return cls.InitFromObj(stablehloCustomCallOptions) + + @classmethod + def InitFromPackedBuf(cls, buf, pos=0): + n = flatbuffers.encode.Get(flatbuffers.packer.uoffset, buf, pos) + return cls.InitFromBuf(buf, pos+n) + + @classmethod + def InitFromObj(cls, stablehloCustomCallOptions): + x = StablehloCustomCallOptionsT() + x._UnPack(stablehloCustomCallOptions) + return x + + # StablehloCustomCallOptionsT + def _UnPack(self, stablehloCustomCallOptions): + if stablehloCustomCallOptions is None: + return + self.callTargetName = stablehloCustomCallOptions.CallTargetName() + self.hasSideEffect = stablehloCustomCallOptions.HasSideEffect() + self.backendConfig = stablehloCustomCallOptions.BackendConfig() + self.apiVersion = stablehloCustomCallOptions.ApiVersion() + if not stablehloCustomCallOptions.CalledComputationsIsNone(): + if np is None: + self.calledComputations = [] + for i in range(stablehloCustomCallOptions.CalledComputationsLength()): + self.calledComputations.append(stablehloCustomCallOptions.CalledComputations(i)) + else: + self.calledComputations = stablehloCustomCallOptions.CalledComputationsAsNumpy() + if not stablehloCustomCallOptions.CustomAttributesIsNone(): + if np is None: + self.customAttributes = [] + for i in range(stablehloCustomCallOptions.CustomAttributesLength()): + self.customAttributes.append(stablehloCustomCallOptions.CustomAttributes(i)) + else: + self.customAttributes = stablehloCustomCallOptions.CustomAttributesAsNumpy() + + # StablehloCustomCallOptionsT + def Pack(self, builder): + if self.callTargetName is not None: + callTargetName = builder.CreateString(self.callTargetName) + if self.backendConfig is not None: + backendConfig = builder.CreateString(self.backendConfig) + if self.calledComputations is not None: + if np is not None and type(self.calledComputations) is np.ndarray: + calledComputations = builder.CreateNumpyVector(self.calledComputations) + else: + StablehloCustomCallOptionsStartCalledComputationsVector(builder, len(self.calledComputations)) + for i in reversed(range(len(self.calledComputations))): + builder.PrependInt32(self.calledComputations[i]) + calledComputations = builder.EndVector() + if self.customAttributes is not None: + if np is not None and type(self.customAttributes) is np.ndarray: + customAttributes = builder.CreateNumpyVector(self.customAttributes) + else: + StablehloCustomCallOptionsStartCustomAttributesVector(builder, len(self.customAttributes)) + for i in reversed(range(len(self.customAttributes))): + builder.PrependUint8(self.customAttributes[i]) + customAttributes = builder.EndVector() + StablehloCustomCallOptionsStart(builder) + if self.callTargetName is not None: + StablehloCustomCallOptionsAddCallTargetName(builder, callTargetName) + StablehloCustomCallOptionsAddHasSideEffect(builder, self.hasSideEffect) + if self.backendConfig is not None: + StablehloCustomCallOptionsAddBackendConfig(builder, backendConfig) + StablehloCustomCallOptionsAddApiVersion(builder, self.apiVersion) + if self.calledComputations is not None: + StablehloCustomCallOptionsAddCalledComputations(builder, calledComputations) + if self.customAttributes is not None: + StablehloCustomCallOptionsAddCustomAttributes(builder, customAttributes) + stablehloCustomCallOptions = StablehloCustomCallOptionsEnd(builder) + return stablehloCustomCallOptions + + +class StablehloReduceOptions(object): + __slots__ = ['_tab'] + + @classmethod + def GetRootAs(cls, buf, offset=0): + n = flatbuffers.encode.Get(flatbuffers.packer.uoffset, buf, offset) + x = StablehloReduceOptions() + x.Init(buf, n + offset) + return x + + @classmethod + def GetRootAsStablehloReduceOptions(cls, buf, offset=0): + """This method is deprecated. Please switch to GetRootAs.""" + return cls.GetRootAs(buf, offset) + @classmethod + def StablehloReduceOptionsBufferHasIdentifier(cls, buf, offset, size_prefixed=False): + return flatbuffers.util.BufferHasIdentifier(buf, offset, b"\x54\x46\x4C\x33", size_prefixed=size_prefixed) + + # StablehloReduceOptions + def Init(self, buf, pos): + self._tab = flatbuffers.table.Table(buf, pos) + + # StablehloReduceOptions + def Dimensions(self, j): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(4)) + if o != 0: + a = self._tab.Vector(o) + return self._tab.Get(flatbuffers.number_types.Int64Flags, a + flatbuffers.number_types.UOffsetTFlags.py_type(j * 8)) + return 0 + + # StablehloReduceOptions + def DimensionsAsNumpy(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(4)) + if o != 0: + return self._tab.GetVectorAsNumpy(flatbuffers.number_types.Int64Flags, o) + return 0 + + # StablehloReduceOptions + def DimensionsLength(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(4)) + if o != 0: + return self._tab.VectorLen(o) + return 0 + + # StablehloReduceOptions + def DimensionsIsNone(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(4)) + return o == 0 + + # StablehloReduceOptions + def BodySubgraphIndex(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(6)) + if o != 0: + return self._tab.Get(flatbuffers.number_types.Int32Flags, o + self._tab.Pos) + return 0 + +def StablehloReduceOptionsStart(builder): + builder.StartObject(2) + +def StablehloReduceOptionsAddDimensions(builder, dimensions): + builder.PrependUOffsetTRelativeSlot(0, flatbuffers.number_types.UOffsetTFlags.py_type(dimensions), 0) + +def StablehloReduceOptionsStartDimensionsVector(builder, numElems): + return builder.StartVector(8, numElems, 8) + +def StablehloReduceOptionsAddBodySubgraphIndex(builder, bodySubgraphIndex): + builder.PrependInt32Slot(1, bodySubgraphIndex, 0) + +def StablehloReduceOptionsEnd(builder): + return builder.EndObject() + + +try: + from typing import List +except: + pass + +class StablehloReduceOptionsT(object): + + # StablehloReduceOptionsT + def __init__(self): + self.dimensions = None # type: List[int] + self.bodySubgraphIndex = 0 # type: int + + @classmethod + def InitFromBuf(cls, buf, pos): + stablehloReduceOptions = StablehloReduceOptions() + stablehloReduceOptions.Init(buf, pos) + return cls.InitFromObj(stablehloReduceOptions) + + @classmethod + def InitFromPackedBuf(cls, buf, pos=0): + n = flatbuffers.encode.Get(flatbuffers.packer.uoffset, buf, pos) + return cls.InitFromBuf(buf, pos+n) + + @classmethod + def InitFromObj(cls, stablehloReduceOptions): + x = StablehloReduceOptionsT() + x._UnPack(stablehloReduceOptions) + return x + + # StablehloReduceOptionsT + def _UnPack(self, stablehloReduceOptions): + if stablehloReduceOptions is None: + return + if not stablehloReduceOptions.DimensionsIsNone(): + if np is None: + self.dimensions = [] + for i in range(stablehloReduceOptions.DimensionsLength()): + self.dimensions.append(stablehloReduceOptions.Dimensions(i)) + else: + self.dimensions = stablehloReduceOptions.DimensionsAsNumpy() + self.bodySubgraphIndex = stablehloReduceOptions.BodySubgraphIndex() + + # StablehloReduceOptionsT + def Pack(self, builder): + if self.dimensions is not None: + if np is not None and type(self.dimensions) is np.ndarray: + dimensions = builder.CreateNumpyVector(self.dimensions) + else: + StablehloReduceOptionsStartDimensionsVector(builder, len(self.dimensions)) + for i in reversed(range(len(self.dimensions))): + builder.PrependInt64(self.dimensions[i]) + dimensions = builder.EndVector() + StablehloReduceOptionsStart(builder) + if self.dimensions is not None: + StablehloReduceOptionsAddDimensions(builder, dimensions) + StablehloReduceOptionsAddBodySubgraphIndex(builder, self.bodySubgraphIndex) + stablehloReduceOptions = StablehloReduceOptionsEnd(builder) + return stablehloReduceOptions + + +class StablehloSliceOptions(object): + __slots__ = ['_tab'] + + @classmethod + def GetRootAs(cls, buf, offset=0): + n = flatbuffers.encode.Get(flatbuffers.packer.uoffset, buf, offset) + x = StablehloSliceOptions() + x.Init(buf, n + offset) + return x + + @classmethod + def GetRootAsStablehloSliceOptions(cls, buf, offset=0): + """This method is deprecated. Please switch to GetRootAs.""" + return cls.GetRootAs(buf, offset) + @classmethod + def StablehloSliceOptionsBufferHasIdentifier(cls, buf, offset, size_prefixed=False): + return flatbuffers.util.BufferHasIdentifier(buf, offset, b"\x54\x46\x4C\x33", size_prefixed=size_prefixed) + + # StablehloSliceOptions + def Init(self, buf, pos): + self._tab = flatbuffers.table.Table(buf, pos) + + # StablehloSliceOptions + def StartIndices(self, j): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(4)) + if o != 0: + a = self._tab.Vector(o) + return self._tab.Get(flatbuffers.number_types.Int64Flags, a + flatbuffers.number_types.UOffsetTFlags.py_type(j * 8)) + return 0 + + # StablehloSliceOptions + def StartIndicesAsNumpy(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(4)) + if o != 0: + return self._tab.GetVectorAsNumpy(flatbuffers.number_types.Int64Flags, o) + return 0 + + # StablehloSliceOptions + def StartIndicesLength(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(4)) + if o != 0: + return self._tab.VectorLen(o) + return 0 + + # StablehloSliceOptions + def StartIndicesIsNone(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(4)) + return o == 0 + + # StablehloSliceOptions + def LimitIndices(self, j): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(6)) + if o != 0: + a = self._tab.Vector(o) + return self._tab.Get(flatbuffers.number_types.Int64Flags, a + flatbuffers.number_types.UOffsetTFlags.py_type(j * 8)) + return 0 + + # StablehloSliceOptions + def LimitIndicesAsNumpy(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(6)) + if o != 0: + return self._tab.GetVectorAsNumpy(flatbuffers.number_types.Int64Flags, o) + return 0 + + # StablehloSliceOptions + def LimitIndicesLength(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(6)) + if o != 0: + return self._tab.VectorLen(o) + return 0 + + # StablehloSliceOptions + def LimitIndicesIsNone(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(6)) + return o == 0 + + # StablehloSliceOptions + def Strides(self, j): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(8)) + if o != 0: + a = self._tab.Vector(o) + return self._tab.Get(flatbuffers.number_types.Int64Flags, a + flatbuffers.number_types.UOffsetTFlags.py_type(j * 8)) + return 0 + + # StablehloSliceOptions + def StridesAsNumpy(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(8)) + if o != 0: + return self._tab.GetVectorAsNumpy(flatbuffers.number_types.Int64Flags, o) + return 0 + + # StablehloSliceOptions + def StridesLength(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(8)) + if o != 0: + return self._tab.VectorLen(o) + return 0 + + # StablehloSliceOptions + def StridesIsNone(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(8)) + return o == 0 + +def StablehloSliceOptionsStart(builder): + builder.StartObject(3) + +def StablehloSliceOptionsAddStartIndices(builder, startIndices): + builder.PrependUOffsetTRelativeSlot(0, flatbuffers.number_types.UOffsetTFlags.py_type(startIndices), 0) + +def StablehloSliceOptionsStartStartIndicesVector(builder, numElems): + return builder.StartVector(8, numElems, 8) + +def StablehloSliceOptionsAddLimitIndices(builder, limitIndices): + builder.PrependUOffsetTRelativeSlot(1, flatbuffers.number_types.UOffsetTFlags.py_type(limitIndices), 0) + +def StablehloSliceOptionsStartLimitIndicesVector(builder, numElems): + return builder.StartVector(8, numElems, 8) + +def StablehloSliceOptionsAddStrides(builder, strides): + builder.PrependUOffsetTRelativeSlot(2, flatbuffers.number_types.UOffsetTFlags.py_type(strides), 0) + +def StablehloSliceOptionsStartStridesVector(builder, numElems): + return builder.StartVector(8, numElems, 8) + +def StablehloSliceOptionsEnd(builder): + return builder.EndObject() + + +try: + from typing import List +except: + pass + +class StablehloSliceOptionsT(object): + + # StablehloSliceOptionsT + def __init__(self): + self.startIndices = None # type: List[int] + self.limitIndices = None # type: List[int] + self.strides = None # type: List[int] + + @classmethod + def InitFromBuf(cls, buf, pos): + stablehloSliceOptions = StablehloSliceOptions() + stablehloSliceOptions.Init(buf, pos) + return cls.InitFromObj(stablehloSliceOptions) + + @classmethod + def InitFromPackedBuf(cls, buf, pos=0): + n = flatbuffers.encode.Get(flatbuffers.packer.uoffset, buf, pos) + return cls.InitFromBuf(buf, pos+n) + + @classmethod + def InitFromObj(cls, stablehloSliceOptions): + x = StablehloSliceOptionsT() + x._UnPack(stablehloSliceOptions) + return x + + # StablehloSliceOptionsT + def _UnPack(self, stablehloSliceOptions): + if stablehloSliceOptions is None: + return + if not stablehloSliceOptions.StartIndicesIsNone(): + if np is None: + self.startIndices = [] + for i in range(stablehloSliceOptions.StartIndicesLength()): + self.startIndices.append(stablehloSliceOptions.StartIndices(i)) + else: + self.startIndices = stablehloSliceOptions.StartIndicesAsNumpy() + if not stablehloSliceOptions.LimitIndicesIsNone(): + if np is None: + self.limitIndices = [] + for i in range(stablehloSliceOptions.LimitIndicesLength()): + self.limitIndices.append(stablehloSliceOptions.LimitIndices(i)) + else: + self.limitIndices = stablehloSliceOptions.LimitIndicesAsNumpy() + if not stablehloSliceOptions.StridesIsNone(): + if np is None: + self.strides = [] + for i in range(stablehloSliceOptions.StridesLength()): + self.strides.append(stablehloSliceOptions.Strides(i)) + else: + self.strides = stablehloSliceOptions.StridesAsNumpy() + + # StablehloSliceOptionsT + def Pack(self, builder): + if self.startIndices is not None: + if np is not None and type(self.startIndices) is np.ndarray: + startIndices = builder.CreateNumpyVector(self.startIndices) + else: + StablehloSliceOptionsStartStartIndicesVector(builder, len(self.startIndices)) + for i in reversed(range(len(self.startIndices))): + builder.PrependInt64(self.startIndices[i]) + startIndices = builder.EndVector() + if self.limitIndices is not None: + if np is not None and type(self.limitIndices) is np.ndarray: + limitIndices = builder.CreateNumpyVector(self.limitIndices) + else: + StablehloSliceOptionsStartLimitIndicesVector(builder, len(self.limitIndices)) + for i in reversed(range(len(self.limitIndices))): + builder.PrependInt64(self.limitIndices[i]) + limitIndices = builder.EndVector() + if self.strides is not None: + if np is not None and type(self.strides) is np.ndarray: + strides = builder.CreateNumpyVector(self.strides) + else: + StablehloSliceOptionsStartStridesVector(builder, len(self.strides)) + for i in reversed(range(len(self.strides))): + builder.PrependInt64(self.strides[i]) + strides = builder.EndVector() + StablehloSliceOptionsStart(builder) + if self.startIndices is not None: + StablehloSliceOptionsAddStartIndices(builder, startIndices) + if self.limitIndices is not None: + StablehloSliceOptionsAddLimitIndices(builder, limitIndices) + if self.strides is not None: + StablehloSliceOptionsAddStrides(builder, strides) + stablehloSliceOptions = StablehloSliceOptionsEnd(builder) + return stablehloSliceOptions + + +class StablehloConvolutionOptions(object): + __slots__ = ['_tab'] + + @classmethod + def GetRootAs(cls, buf, offset=0): + n = flatbuffers.encode.Get(flatbuffers.packer.uoffset, buf, offset) + x = StablehloConvolutionOptions() + x.Init(buf, n + offset) + return x + + @classmethod + def GetRootAsStablehloConvolutionOptions(cls, buf, offset=0): + """This method is deprecated. Please switch to GetRootAs.""" + return cls.GetRootAs(buf, offset) + @classmethod + def StablehloConvolutionOptionsBufferHasIdentifier(cls, buf, offset, size_prefixed=False): + return flatbuffers.util.BufferHasIdentifier(buf, offset, b"\x54\x46\x4C\x33", size_prefixed=size_prefixed) + + # StablehloConvolutionOptions + def Init(self, buf, pos): + self._tab = flatbuffers.table.Table(buf, pos) + + # StablehloConvolutionOptions + def WindowStrides(self, j): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(4)) + if o != 0: + a = self._tab.Vector(o) + return self._tab.Get(flatbuffers.number_types.Int64Flags, a + flatbuffers.number_types.UOffsetTFlags.py_type(j * 8)) + return 0 + + # StablehloConvolutionOptions + def WindowStridesAsNumpy(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(4)) + if o != 0: + return self._tab.GetVectorAsNumpy(flatbuffers.number_types.Int64Flags, o) + return 0 + + # StablehloConvolutionOptions + def WindowStridesLength(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(4)) + if o != 0: + return self._tab.VectorLen(o) + return 0 + + # StablehloConvolutionOptions + def WindowStridesIsNone(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(4)) + return o == 0 + + # StablehloConvolutionOptions + def Padding(self, j): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(6)) + if o != 0: + a = self._tab.Vector(o) + return self._tab.Get(flatbuffers.number_types.Int64Flags, a + flatbuffers.number_types.UOffsetTFlags.py_type(j * 8)) + return 0 + + # StablehloConvolutionOptions + def PaddingAsNumpy(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(6)) + if o != 0: + return self._tab.GetVectorAsNumpy(flatbuffers.number_types.Int64Flags, o) + return 0 + + # StablehloConvolutionOptions + def PaddingLength(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(6)) + if o != 0: + return self._tab.VectorLen(o) + return 0 + + # StablehloConvolutionOptions + def PaddingIsNone(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(6)) + return o == 0 + + # StablehloConvolutionOptions + def LhsDilation(self, j): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(8)) + if o != 0: + a = self._tab.Vector(o) + return self._tab.Get(flatbuffers.number_types.Int64Flags, a + flatbuffers.number_types.UOffsetTFlags.py_type(j * 8)) + return 0 + + # StablehloConvolutionOptions + def LhsDilationAsNumpy(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(8)) + if o != 0: + return self._tab.GetVectorAsNumpy(flatbuffers.number_types.Int64Flags, o) + return 0 + + # StablehloConvolutionOptions + def LhsDilationLength(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(8)) + if o != 0: + return self._tab.VectorLen(o) + return 0 + + # StablehloConvolutionOptions + def LhsDilationIsNone(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(8)) + return o == 0 + + # StablehloConvolutionOptions + def RhsDilation(self, j): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(10)) + if o != 0: + a = self._tab.Vector(o) + return self._tab.Get(flatbuffers.number_types.Int64Flags, a + flatbuffers.number_types.UOffsetTFlags.py_type(j * 8)) + return 0 + + # StablehloConvolutionOptions + def RhsDilationAsNumpy(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(10)) + if o != 0: + return self._tab.GetVectorAsNumpy(flatbuffers.number_types.Int64Flags, o) + return 0 + + # StablehloConvolutionOptions + def RhsDilationLength(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(10)) + if o != 0: + return self._tab.VectorLen(o) + return 0 + + # StablehloConvolutionOptions + def RhsDilationIsNone(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(10)) + return o == 0 + + # StablehloConvolutionOptions + def WindowReversal(self, j): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(12)) + if o != 0: + a = self._tab.Vector(o) + return self._tab.Get(flatbuffers.number_types.BoolFlags, a + flatbuffers.number_types.UOffsetTFlags.py_type(j * 1)) + return 0 + + # StablehloConvolutionOptions + def WindowReversalAsNumpy(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(12)) + if o != 0: + return self._tab.GetVectorAsNumpy(flatbuffers.number_types.BoolFlags, o) + return 0 + + # StablehloConvolutionOptions + def WindowReversalLength(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(12)) + if o != 0: + return self._tab.VectorLen(o) + return 0 + + # StablehloConvolutionOptions + def WindowReversalIsNone(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(12)) + return o == 0 + + # StablehloConvolutionOptions + def InputBatchDimension(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(14)) + if o != 0: + return self._tab.Get(flatbuffers.number_types.Int64Flags, o + self._tab.Pos) + return 0 + + # StablehloConvolutionOptions + def InputFeatureDimension(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(16)) + if o != 0: + return self._tab.Get(flatbuffers.number_types.Int64Flags, o + self._tab.Pos) + return 0 + + # StablehloConvolutionOptions + def InputSpatialDimensions(self, j): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(18)) + if o != 0: + a = self._tab.Vector(o) + return self._tab.Get(flatbuffers.number_types.Int64Flags, a + flatbuffers.number_types.UOffsetTFlags.py_type(j * 8)) + return 0 + + # StablehloConvolutionOptions + def InputSpatialDimensionsAsNumpy(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(18)) + if o != 0: + return self._tab.GetVectorAsNumpy(flatbuffers.number_types.Int64Flags, o) + return 0 + + # StablehloConvolutionOptions + def InputSpatialDimensionsLength(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(18)) + if o != 0: + return self._tab.VectorLen(o) + return 0 + + # StablehloConvolutionOptions + def InputSpatialDimensionsIsNone(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(18)) + return o == 0 + + # StablehloConvolutionOptions + def KernelInputFeatureDimension(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(20)) + if o != 0: + return self._tab.Get(flatbuffers.number_types.Int64Flags, o + self._tab.Pos) + return 0 + + # StablehloConvolutionOptions + def KernelOutputFeatureDimension(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(22)) + if o != 0: + return self._tab.Get(flatbuffers.number_types.Int64Flags, o + self._tab.Pos) + return 0 + + # StablehloConvolutionOptions + def KernelSpatialDimensions(self, j): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(24)) + if o != 0: + a = self._tab.Vector(o) + return self._tab.Get(flatbuffers.number_types.Int64Flags, a + flatbuffers.number_types.UOffsetTFlags.py_type(j * 8)) + return 0 + + # StablehloConvolutionOptions + def KernelSpatialDimensionsAsNumpy(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(24)) + if o != 0: + return self._tab.GetVectorAsNumpy(flatbuffers.number_types.Int64Flags, o) + return 0 + + # StablehloConvolutionOptions + def KernelSpatialDimensionsLength(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(24)) + if o != 0: + return self._tab.VectorLen(o) + return 0 + + # StablehloConvolutionOptions + def KernelSpatialDimensionsIsNone(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(24)) + return o == 0 + + # StablehloConvolutionOptions + def OutputBatchDimension(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(26)) + if o != 0: + return self._tab.Get(flatbuffers.number_types.Int64Flags, o + self._tab.Pos) + return 0 + + # StablehloConvolutionOptions + def OutputFeatureDimension(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(28)) + if o != 0: + return self._tab.Get(flatbuffers.number_types.Int64Flags, o + self._tab.Pos) + return 0 + + # StablehloConvolutionOptions + def OutputSpatialDimensions(self, j): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(30)) + if o != 0: + a = self._tab.Vector(o) + return self._tab.Get(flatbuffers.number_types.Int64Flags, a + flatbuffers.number_types.UOffsetTFlags.py_type(j * 8)) + return 0 + + # StablehloConvolutionOptions + def OutputSpatialDimensionsAsNumpy(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(30)) + if o != 0: + return self._tab.GetVectorAsNumpy(flatbuffers.number_types.Int64Flags, o) + return 0 + + # StablehloConvolutionOptions + def OutputSpatialDimensionsLength(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(30)) + if o != 0: + return self._tab.VectorLen(o) + return 0 + + # StablehloConvolutionOptions + def OutputSpatialDimensionsIsNone(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(30)) + return o == 0 + + # StablehloConvolutionOptions + def FeatureGroupCount(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(32)) + if o != 0: + return self._tab.Get(flatbuffers.number_types.Int64Flags, o + self._tab.Pos) + return 0 + + # StablehloConvolutionOptions + def BatchGroupCount(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(34)) + if o != 0: + return self._tab.Get(flatbuffers.number_types.Int64Flags, o + self._tab.Pos) + return 0 + + # StablehloConvolutionOptions + def PrecisionConfig(self, j): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(36)) + if o != 0: + a = self._tab.Vector(o) + return self._tab.Get(flatbuffers.number_types.Uint32Flags, a + flatbuffers.number_types.UOffsetTFlags.py_type(j * 4)) + return 0 + + # StablehloConvolutionOptions + def PrecisionConfigAsNumpy(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(36)) + if o != 0: + return self._tab.GetVectorAsNumpy(flatbuffers.number_types.Uint32Flags, o) + return 0 + + # StablehloConvolutionOptions + def PrecisionConfigLength(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(36)) + if o != 0: + return self._tab.VectorLen(o) + return 0 + + # StablehloConvolutionOptions + def PrecisionConfigIsNone(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(36)) + return o == 0 + +def StablehloConvolutionOptionsStart(builder): + builder.StartObject(17) + +def StablehloConvolutionOptionsAddWindowStrides(builder, windowStrides): + builder.PrependUOffsetTRelativeSlot(0, flatbuffers.number_types.UOffsetTFlags.py_type(windowStrides), 0) + +def StablehloConvolutionOptionsStartWindowStridesVector(builder, numElems): + return builder.StartVector(8, numElems, 8) + +def StablehloConvolutionOptionsAddPadding(builder, padding): + builder.PrependUOffsetTRelativeSlot(1, flatbuffers.number_types.UOffsetTFlags.py_type(padding), 0) + +def StablehloConvolutionOptionsStartPaddingVector(builder, numElems): + return builder.StartVector(8, numElems, 8) + +def StablehloConvolutionOptionsAddLhsDilation(builder, lhsDilation): + builder.PrependUOffsetTRelativeSlot(2, flatbuffers.number_types.UOffsetTFlags.py_type(lhsDilation), 0) + +def StablehloConvolutionOptionsStartLhsDilationVector(builder, numElems): + return builder.StartVector(8, numElems, 8) + +def StablehloConvolutionOptionsAddRhsDilation(builder, rhsDilation): + builder.PrependUOffsetTRelativeSlot(3, flatbuffers.number_types.UOffsetTFlags.py_type(rhsDilation), 0) + +def StablehloConvolutionOptionsStartRhsDilationVector(builder, numElems): + return builder.StartVector(8, numElems, 8) + +def StablehloConvolutionOptionsAddWindowReversal(builder, windowReversal): + builder.PrependUOffsetTRelativeSlot(4, flatbuffers.number_types.UOffsetTFlags.py_type(windowReversal), 0) + +def StablehloConvolutionOptionsStartWindowReversalVector(builder, numElems): + return builder.StartVector(1, numElems, 1) + +def StablehloConvolutionOptionsAddInputBatchDimension(builder, inputBatchDimension): + builder.PrependInt64Slot(5, inputBatchDimension, 0) + +def StablehloConvolutionOptionsAddInputFeatureDimension(builder, inputFeatureDimension): + builder.PrependInt64Slot(6, inputFeatureDimension, 0) + +def StablehloConvolutionOptionsAddInputSpatialDimensions(builder, inputSpatialDimensions): + builder.PrependUOffsetTRelativeSlot(7, flatbuffers.number_types.UOffsetTFlags.py_type(inputSpatialDimensions), 0) + +def StablehloConvolutionOptionsStartInputSpatialDimensionsVector(builder, numElems): + return builder.StartVector(8, numElems, 8) + +def StablehloConvolutionOptionsAddKernelInputFeatureDimension(builder, kernelInputFeatureDimension): + builder.PrependInt64Slot(8, kernelInputFeatureDimension, 0) + +def StablehloConvolutionOptionsAddKernelOutputFeatureDimension(builder, kernelOutputFeatureDimension): + builder.PrependInt64Slot(9, kernelOutputFeatureDimension, 0) + +def StablehloConvolutionOptionsAddKernelSpatialDimensions(builder, kernelSpatialDimensions): + builder.PrependUOffsetTRelativeSlot(10, flatbuffers.number_types.UOffsetTFlags.py_type(kernelSpatialDimensions), 0) + +def StablehloConvolutionOptionsStartKernelSpatialDimensionsVector(builder, numElems): + return builder.StartVector(8, numElems, 8) + +def StablehloConvolutionOptionsAddOutputBatchDimension(builder, outputBatchDimension): + builder.PrependInt64Slot(11, outputBatchDimension, 0) + +def StablehloConvolutionOptionsAddOutputFeatureDimension(builder, outputFeatureDimension): + builder.PrependInt64Slot(12, outputFeatureDimension, 0) + +def StablehloConvolutionOptionsAddOutputSpatialDimensions(builder, outputSpatialDimensions): + builder.PrependUOffsetTRelativeSlot(13, flatbuffers.number_types.UOffsetTFlags.py_type(outputSpatialDimensions), 0) + +def StablehloConvolutionOptionsStartOutputSpatialDimensionsVector(builder, numElems): + return builder.StartVector(8, numElems, 8) + +def StablehloConvolutionOptionsAddFeatureGroupCount(builder, featureGroupCount): + builder.PrependInt64Slot(14, featureGroupCount, 0) + +def StablehloConvolutionOptionsAddBatchGroupCount(builder, batchGroupCount): + builder.PrependInt64Slot(15, batchGroupCount, 0) + +def StablehloConvolutionOptionsAddPrecisionConfig(builder, precisionConfig): + builder.PrependUOffsetTRelativeSlot(16, flatbuffers.number_types.UOffsetTFlags.py_type(precisionConfig), 0) + +def StablehloConvolutionOptionsStartPrecisionConfigVector(builder, numElems): + return builder.StartVector(4, numElems, 4) + +def StablehloConvolutionOptionsEnd(builder): + return builder.EndObject() + + +try: + from typing import List +except: + pass + +class StablehloConvolutionOptionsT(object): + + # StablehloConvolutionOptionsT + def __init__(self): + self.windowStrides = None # type: List[int] + self.padding = None # type: List[int] + self.lhsDilation = None # type: List[int] + self.rhsDilation = None # type: List[int] + self.windowReversal = None # type: List[bool] + self.inputBatchDimension = 0 # type: int + self.inputFeatureDimension = 0 # type: int + self.inputSpatialDimensions = None # type: List[int] + self.kernelInputFeatureDimension = 0 # type: int + self.kernelOutputFeatureDimension = 0 # type: int + self.kernelSpatialDimensions = None # type: List[int] + self.outputBatchDimension = 0 # type: int + self.outputFeatureDimension = 0 # type: int + self.outputSpatialDimensions = None # type: List[int] + self.featureGroupCount = 0 # type: int + self.batchGroupCount = 0 # type: int + self.precisionConfig = None # type: List[int] + + @classmethod + def InitFromBuf(cls, buf, pos): + stablehloConvolutionOptions = StablehloConvolutionOptions() + stablehloConvolutionOptions.Init(buf, pos) + return cls.InitFromObj(stablehloConvolutionOptions) + + @classmethod + def InitFromPackedBuf(cls, buf, pos=0): + n = flatbuffers.encode.Get(flatbuffers.packer.uoffset, buf, pos) + return cls.InitFromBuf(buf, pos+n) + + @classmethod + def InitFromObj(cls, stablehloConvolutionOptions): + x = StablehloConvolutionOptionsT() + x._UnPack(stablehloConvolutionOptions) + return x + + # StablehloConvolutionOptionsT + def _UnPack(self, stablehloConvolutionOptions): + if stablehloConvolutionOptions is None: + return + if not stablehloConvolutionOptions.WindowStridesIsNone(): + if np is None: + self.windowStrides = [] + for i in range(stablehloConvolutionOptions.WindowStridesLength()): + self.windowStrides.append(stablehloConvolutionOptions.WindowStrides(i)) + else: + self.windowStrides = stablehloConvolutionOptions.WindowStridesAsNumpy() + if not stablehloConvolutionOptions.PaddingIsNone(): + if np is None: + self.padding = [] + for i in range(stablehloConvolutionOptions.PaddingLength()): + self.padding.append(stablehloConvolutionOptions.Padding(i)) + else: + self.padding = stablehloConvolutionOptions.PaddingAsNumpy() + if not stablehloConvolutionOptions.LhsDilationIsNone(): + if np is None: + self.lhsDilation = [] + for i in range(stablehloConvolutionOptions.LhsDilationLength()): + self.lhsDilation.append(stablehloConvolutionOptions.LhsDilation(i)) + else: + self.lhsDilation = stablehloConvolutionOptions.LhsDilationAsNumpy() + if not stablehloConvolutionOptions.RhsDilationIsNone(): + if np is None: + self.rhsDilation = [] + for i in range(stablehloConvolutionOptions.RhsDilationLength()): + self.rhsDilation.append(stablehloConvolutionOptions.RhsDilation(i)) + else: + self.rhsDilation = stablehloConvolutionOptions.RhsDilationAsNumpy() + if not stablehloConvolutionOptions.WindowReversalIsNone(): + if np is None: + self.windowReversal = [] + for i in range(stablehloConvolutionOptions.WindowReversalLength()): + self.windowReversal.append(stablehloConvolutionOptions.WindowReversal(i)) + else: + self.windowReversal = stablehloConvolutionOptions.WindowReversalAsNumpy() + self.inputBatchDimension = stablehloConvolutionOptions.InputBatchDimension() + self.inputFeatureDimension = stablehloConvolutionOptions.InputFeatureDimension() + if not stablehloConvolutionOptions.InputSpatialDimensionsIsNone(): + if np is None: + self.inputSpatialDimensions = [] + for i in range(stablehloConvolutionOptions.InputSpatialDimensionsLength()): + self.inputSpatialDimensions.append(stablehloConvolutionOptions.InputSpatialDimensions(i)) + else: + self.inputSpatialDimensions = stablehloConvolutionOptions.InputSpatialDimensionsAsNumpy() + self.kernelInputFeatureDimension = stablehloConvolutionOptions.KernelInputFeatureDimension() + self.kernelOutputFeatureDimension = stablehloConvolutionOptions.KernelOutputFeatureDimension() + if not stablehloConvolutionOptions.KernelSpatialDimensionsIsNone(): + if np is None: + self.kernelSpatialDimensions = [] + for i in range(stablehloConvolutionOptions.KernelSpatialDimensionsLength()): + self.kernelSpatialDimensions.append(stablehloConvolutionOptions.KernelSpatialDimensions(i)) + else: + self.kernelSpatialDimensions = stablehloConvolutionOptions.KernelSpatialDimensionsAsNumpy() + self.outputBatchDimension = stablehloConvolutionOptions.OutputBatchDimension() + self.outputFeatureDimension = stablehloConvolutionOptions.OutputFeatureDimension() + if not stablehloConvolutionOptions.OutputSpatialDimensionsIsNone(): + if np is None: + self.outputSpatialDimensions = [] + for i in range(stablehloConvolutionOptions.OutputSpatialDimensionsLength()): + self.outputSpatialDimensions.append(stablehloConvolutionOptions.OutputSpatialDimensions(i)) + else: + self.outputSpatialDimensions = stablehloConvolutionOptions.OutputSpatialDimensionsAsNumpy() + self.featureGroupCount = stablehloConvolutionOptions.FeatureGroupCount() + self.batchGroupCount = stablehloConvolutionOptions.BatchGroupCount() + if not stablehloConvolutionOptions.PrecisionConfigIsNone(): + if np is None: + self.precisionConfig = [] + for i in range(stablehloConvolutionOptions.PrecisionConfigLength()): + self.precisionConfig.append(stablehloConvolutionOptions.PrecisionConfig(i)) + else: + self.precisionConfig = stablehloConvolutionOptions.PrecisionConfigAsNumpy() + + # StablehloConvolutionOptionsT + def Pack(self, builder): + if self.windowStrides is not None: + if np is not None and type(self.windowStrides) is np.ndarray: + windowStrides = builder.CreateNumpyVector(self.windowStrides) + else: + StablehloConvolutionOptionsStartWindowStridesVector(builder, len(self.windowStrides)) + for i in reversed(range(len(self.windowStrides))): + builder.PrependInt64(self.windowStrides[i]) + windowStrides = builder.EndVector() + if self.padding is not None: + if np is not None and type(self.padding) is np.ndarray: + padding = builder.CreateNumpyVector(self.padding) + else: + StablehloConvolutionOptionsStartPaddingVector(builder, len(self.padding)) + for i in reversed(range(len(self.padding))): + builder.PrependInt64(self.padding[i]) + padding = builder.EndVector() + if self.lhsDilation is not None: + if np is not None and type(self.lhsDilation) is np.ndarray: + lhsDilation = builder.CreateNumpyVector(self.lhsDilation) + else: + StablehloConvolutionOptionsStartLhsDilationVector(builder, len(self.lhsDilation)) + for i in reversed(range(len(self.lhsDilation))): + builder.PrependInt64(self.lhsDilation[i]) + lhsDilation = builder.EndVector() + if self.rhsDilation is not None: + if np is not None and type(self.rhsDilation) is np.ndarray: + rhsDilation = builder.CreateNumpyVector(self.rhsDilation) + else: + StablehloConvolutionOptionsStartRhsDilationVector(builder, len(self.rhsDilation)) + for i in reversed(range(len(self.rhsDilation))): + builder.PrependInt64(self.rhsDilation[i]) + rhsDilation = builder.EndVector() + if self.windowReversal is not None: + if np is not None and type(self.windowReversal) is np.ndarray: + windowReversal = builder.CreateNumpyVector(self.windowReversal) + else: + StablehloConvolutionOptionsStartWindowReversalVector(builder, len(self.windowReversal)) + for i in reversed(range(len(self.windowReversal))): + builder.PrependBool(self.windowReversal[i]) + windowReversal = builder.EndVector() + if self.inputSpatialDimensions is not None: + if np is not None and type(self.inputSpatialDimensions) is np.ndarray: + inputSpatialDimensions = builder.CreateNumpyVector(self.inputSpatialDimensions) + else: + StablehloConvolutionOptionsStartInputSpatialDimensionsVector(builder, len(self.inputSpatialDimensions)) + for i in reversed(range(len(self.inputSpatialDimensions))): + builder.PrependInt64(self.inputSpatialDimensions[i]) + inputSpatialDimensions = builder.EndVector() + if self.kernelSpatialDimensions is not None: + if np is not None and type(self.kernelSpatialDimensions) is np.ndarray: + kernelSpatialDimensions = builder.CreateNumpyVector(self.kernelSpatialDimensions) + else: + StablehloConvolutionOptionsStartKernelSpatialDimensionsVector(builder, len(self.kernelSpatialDimensions)) + for i in reversed(range(len(self.kernelSpatialDimensions))): + builder.PrependInt64(self.kernelSpatialDimensions[i]) + kernelSpatialDimensions = builder.EndVector() + if self.outputSpatialDimensions is not None: + if np is not None and type(self.outputSpatialDimensions) is np.ndarray: + outputSpatialDimensions = builder.CreateNumpyVector(self.outputSpatialDimensions) + else: + StablehloConvolutionOptionsStartOutputSpatialDimensionsVector(builder, len(self.outputSpatialDimensions)) + for i in reversed(range(len(self.outputSpatialDimensions))): + builder.PrependInt64(self.outputSpatialDimensions[i]) + outputSpatialDimensions = builder.EndVector() + if self.precisionConfig is not None: + if np is not None and type(self.precisionConfig) is np.ndarray: + precisionConfig = builder.CreateNumpyVector(self.precisionConfig) + else: + StablehloConvolutionOptionsStartPrecisionConfigVector(builder, len(self.precisionConfig)) + for i in reversed(range(len(self.precisionConfig))): + builder.PrependUint32(self.precisionConfig[i]) + precisionConfig = builder.EndVector() + StablehloConvolutionOptionsStart(builder) + if self.windowStrides is not None: + StablehloConvolutionOptionsAddWindowStrides(builder, windowStrides) + if self.padding is not None: + StablehloConvolutionOptionsAddPadding(builder, padding) + if self.lhsDilation is not None: + StablehloConvolutionOptionsAddLhsDilation(builder, lhsDilation) + if self.rhsDilation is not None: + StablehloConvolutionOptionsAddRhsDilation(builder, rhsDilation) + if self.windowReversal is not None: + StablehloConvolutionOptionsAddWindowReversal(builder, windowReversal) + StablehloConvolutionOptionsAddInputBatchDimension(builder, self.inputBatchDimension) + StablehloConvolutionOptionsAddInputFeatureDimension(builder, self.inputFeatureDimension) + if self.inputSpatialDimensions is not None: + StablehloConvolutionOptionsAddInputSpatialDimensions(builder, inputSpatialDimensions) + StablehloConvolutionOptionsAddKernelInputFeatureDimension(builder, self.kernelInputFeatureDimension) + StablehloConvolutionOptionsAddKernelOutputFeatureDimension(builder, self.kernelOutputFeatureDimension) + if self.kernelSpatialDimensions is not None: + StablehloConvolutionOptionsAddKernelSpatialDimensions(builder, kernelSpatialDimensions) + StablehloConvolutionOptionsAddOutputBatchDimension(builder, self.outputBatchDimension) + StablehloConvolutionOptionsAddOutputFeatureDimension(builder, self.outputFeatureDimension) + if self.outputSpatialDimensions is not None: + StablehloConvolutionOptionsAddOutputSpatialDimensions(builder, outputSpatialDimensions) + StablehloConvolutionOptionsAddFeatureGroupCount(builder, self.featureGroupCount) + StablehloConvolutionOptionsAddBatchGroupCount(builder, self.batchGroupCount) + if self.precisionConfig is not None: + StablehloConvolutionOptionsAddPrecisionConfig(builder, precisionConfig) + stablehloConvolutionOptions = StablehloConvolutionOptionsEnd(builder) + return stablehloConvolutionOptions + + +class StablehloScatterOptions(object): + __slots__ = ['_tab'] + + @classmethod + def GetRootAs(cls, buf, offset=0): + n = flatbuffers.encode.Get(flatbuffers.packer.uoffset, buf, offset) + x = StablehloScatterOptions() + x.Init(buf, n + offset) + return x + + @classmethod + def GetRootAsStablehloScatterOptions(cls, buf, offset=0): + """This method is deprecated. Please switch to GetRootAs.""" + return cls.GetRootAs(buf, offset) + @classmethod + def StablehloScatterOptionsBufferHasIdentifier(cls, buf, offset, size_prefixed=False): + return flatbuffers.util.BufferHasIdentifier(buf, offset, b"\x54\x46\x4C\x33", size_prefixed=size_prefixed) + + # StablehloScatterOptions + def Init(self, buf, pos): + self._tab = flatbuffers.table.Table(buf, pos) + + # StablehloScatterOptions + def IndicesAreSorted(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(4)) + if o != 0: + return bool(self._tab.Get(flatbuffers.number_types.BoolFlags, o + self._tab.Pos)) + return False + + # StablehloScatterOptions + def UpdateWindowDims(self, j): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(6)) + if o != 0: + a = self._tab.Vector(o) + return self._tab.Get(flatbuffers.number_types.Int64Flags, a + flatbuffers.number_types.UOffsetTFlags.py_type(j * 8)) + return 0 + + # StablehloScatterOptions + def UpdateWindowDimsAsNumpy(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(6)) + if o != 0: + return self._tab.GetVectorAsNumpy(flatbuffers.number_types.Int64Flags, o) + return 0 + + # StablehloScatterOptions + def UpdateWindowDimsLength(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(6)) + if o != 0: + return self._tab.VectorLen(o) + return 0 + + # StablehloScatterOptions + def UpdateWindowDimsIsNone(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(6)) + return o == 0 + + # StablehloScatterOptions + def InsertedWindowDims(self, j): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(8)) + if o != 0: + a = self._tab.Vector(o) + return self._tab.Get(flatbuffers.number_types.Int64Flags, a + flatbuffers.number_types.UOffsetTFlags.py_type(j * 8)) + return 0 + + # StablehloScatterOptions + def InsertedWindowDimsAsNumpy(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(8)) + if o != 0: + return self._tab.GetVectorAsNumpy(flatbuffers.number_types.Int64Flags, o) + return 0 + + # StablehloScatterOptions + def InsertedWindowDimsLength(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(8)) + if o != 0: + return self._tab.VectorLen(o) + return 0 + + # StablehloScatterOptions + def InsertedWindowDimsIsNone(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(8)) + return o == 0 + + # StablehloScatterOptions + def ScatterDimsToOperandDims(self, j): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(10)) + if o != 0: + a = self._tab.Vector(o) + return self._tab.Get(flatbuffers.number_types.Int64Flags, a + flatbuffers.number_types.UOffsetTFlags.py_type(j * 8)) + return 0 + + # StablehloScatterOptions + def ScatterDimsToOperandDimsAsNumpy(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(10)) + if o != 0: + return self._tab.GetVectorAsNumpy(flatbuffers.number_types.Int64Flags, o) + return 0 + + # StablehloScatterOptions + def ScatterDimsToOperandDimsLength(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(10)) + if o != 0: + return self._tab.VectorLen(o) + return 0 + + # StablehloScatterOptions + def ScatterDimsToOperandDimsIsNone(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(10)) + return o == 0 + + # StablehloScatterOptions + def IndexVectorDim(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(12)) + if o != 0: + return self._tab.Get(flatbuffers.number_types.Int64Flags, o + self._tab.Pos) + return 0 + + # StablehloScatterOptions + def UniqueIndices(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(14)) + if o != 0: + return bool(self._tab.Get(flatbuffers.number_types.BoolFlags, o + self._tab.Pos)) + return False + + # StablehloScatterOptions + def UpdateComputationSubgraphIndex(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(16)) + if o != 0: + return self._tab.Get(flatbuffers.number_types.Int32Flags, o + self._tab.Pos) + return 0 + +def StablehloScatterOptionsStart(builder): + builder.StartObject(7) + +def StablehloScatterOptionsAddIndicesAreSorted(builder, indicesAreSorted): + builder.PrependBoolSlot(0, indicesAreSorted, 0) + +def StablehloScatterOptionsAddUpdateWindowDims(builder, updateWindowDims): + builder.PrependUOffsetTRelativeSlot(1, flatbuffers.number_types.UOffsetTFlags.py_type(updateWindowDims), 0) + +def StablehloScatterOptionsStartUpdateWindowDimsVector(builder, numElems): + return builder.StartVector(8, numElems, 8) + +def StablehloScatterOptionsAddInsertedWindowDims(builder, insertedWindowDims): + builder.PrependUOffsetTRelativeSlot(2, flatbuffers.number_types.UOffsetTFlags.py_type(insertedWindowDims), 0) + +def StablehloScatterOptionsStartInsertedWindowDimsVector(builder, numElems): + return builder.StartVector(8, numElems, 8) + +def StablehloScatterOptionsAddScatterDimsToOperandDims(builder, scatterDimsToOperandDims): + builder.PrependUOffsetTRelativeSlot(3, flatbuffers.number_types.UOffsetTFlags.py_type(scatterDimsToOperandDims), 0) + +def StablehloScatterOptionsStartScatterDimsToOperandDimsVector(builder, numElems): + return builder.StartVector(8, numElems, 8) + +def StablehloScatterOptionsAddIndexVectorDim(builder, indexVectorDim): + builder.PrependInt64Slot(4, indexVectorDim, 0) + +def StablehloScatterOptionsAddUniqueIndices(builder, uniqueIndices): + builder.PrependBoolSlot(5, uniqueIndices, 0) + +def StablehloScatterOptionsAddUpdateComputationSubgraphIndex(builder, updateComputationSubgraphIndex): + builder.PrependInt32Slot(6, updateComputationSubgraphIndex, 0) + +def StablehloScatterOptionsEnd(builder): + return builder.EndObject() + + +try: + from typing import List +except: + pass + +class StablehloScatterOptionsT(object): + + # StablehloScatterOptionsT + def __init__(self): + self.indicesAreSorted = False # type: bool + self.updateWindowDims = None # type: List[int] + self.insertedWindowDims = None # type: List[int] + self.scatterDimsToOperandDims = None # type: List[int] + self.indexVectorDim = 0 # type: int + self.uniqueIndices = False # type: bool + self.updateComputationSubgraphIndex = 0 # type: int + + @classmethod + def InitFromBuf(cls, buf, pos): + stablehloScatterOptions = StablehloScatterOptions() + stablehloScatterOptions.Init(buf, pos) + return cls.InitFromObj(stablehloScatterOptions) + + @classmethod + def InitFromPackedBuf(cls, buf, pos=0): + n = flatbuffers.encode.Get(flatbuffers.packer.uoffset, buf, pos) + return cls.InitFromBuf(buf, pos+n) + + @classmethod + def InitFromObj(cls, stablehloScatterOptions): + x = StablehloScatterOptionsT() + x._UnPack(stablehloScatterOptions) + return x + + # StablehloScatterOptionsT + def _UnPack(self, stablehloScatterOptions): + if stablehloScatterOptions is None: + return + self.indicesAreSorted = stablehloScatterOptions.IndicesAreSorted() + if not stablehloScatterOptions.UpdateWindowDimsIsNone(): + if np is None: + self.updateWindowDims = [] + for i in range(stablehloScatterOptions.UpdateWindowDimsLength()): + self.updateWindowDims.append(stablehloScatterOptions.UpdateWindowDims(i)) + else: + self.updateWindowDims = stablehloScatterOptions.UpdateWindowDimsAsNumpy() + if not stablehloScatterOptions.InsertedWindowDimsIsNone(): + if np is None: + self.insertedWindowDims = [] + for i in range(stablehloScatterOptions.InsertedWindowDimsLength()): + self.insertedWindowDims.append(stablehloScatterOptions.InsertedWindowDims(i)) + else: + self.insertedWindowDims = stablehloScatterOptions.InsertedWindowDimsAsNumpy() + if not stablehloScatterOptions.ScatterDimsToOperandDimsIsNone(): + if np is None: + self.scatterDimsToOperandDims = [] + for i in range(stablehloScatterOptions.ScatterDimsToOperandDimsLength()): + self.scatterDimsToOperandDims.append(stablehloScatterOptions.ScatterDimsToOperandDims(i)) + else: + self.scatterDimsToOperandDims = stablehloScatterOptions.ScatterDimsToOperandDimsAsNumpy() + self.indexVectorDim = stablehloScatterOptions.IndexVectorDim() + self.uniqueIndices = stablehloScatterOptions.UniqueIndices() + self.updateComputationSubgraphIndex = stablehloScatterOptions.UpdateComputationSubgraphIndex() + + # StablehloScatterOptionsT + def Pack(self, builder): + if self.updateWindowDims is not None: + if np is not None and type(self.updateWindowDims) is np.ndarray: + updateWindowDims = builder.CreateNumpyVector(self.updateWindowDims) + else: + StablehloScatterOptionsStartUpdateWindowDimsVector(builder, len(self.updateWindowDims)) + for i in reversed(range(len(self.updateWindowDims))): + builder.PrependInt64(self.updateWindowDims[i]) + updateWindowDims = builder.EndVector() + if self.insertedWindowDims is not None: + if np is not None and type(self.insertedWindowDims) is np.ndarray: + insertedWindowDims = builder.CreateNumpyVector(self.insertedWindowDims) + else: + StablehloScatterOptionsStartInsertedWindowDimsVector(builder, len(self.insertedWindowDims)) + for i in reversed(range(len(self.insertedWindowDims))): + builder.PrependInt64(self.insertedWindowDims[i]) + insertedWindowDims = builder.EndVector() + if self.scatterDimsToOperandDims is not None: + if np is not None and type(self.scatterDimsToOperandDims) is np.ndarray: + scatterDimsToOperandDims = builder.CreateNumpyVector(self.scatterDimsToOperandDims) + else: + StablehloScatterOptionsStartScatterDimsToOperandDimsVector(builder, len(self.scatterDimsToOperandDims)) + for i in reversed(range(len(self.scatterDimsToOperandDims))): + builder.PrependInt64(self.scatterDimsToOperandDims[i]) + scatterDimsToOperandDims = builder.EndVector() + StablehloScatterOptionsStart(builder) + StablehloScatterOptionsAddIndicesAreSorted(builder, self.indicesAreSorted) + if self.updateWindowDims is not None: + StablehloScatterOptionsAddUpdateWindowDims(builder, updateWindowDims) + if self.insertedWindowDims is not None: + StablehloScatterOptionsAddInsertedWindowDims(builder, insertedWindowDims) + if self.scatterDimsToOperandDims is not None: + StablehloScatterOptionsAddScatterDimsToOperandDims(builder, scatterDimsToOperandDims) + StablehloScatterOptionsAddIndexVectorDim(builder, self.indexVectorDim) + StablehloScatterOptionsAddUniqueIndices(builder, self.uniqueIndices) + StablehloScatterOptionsAddUpdateComputationSubgraphIndex(builder, self.updateComputationSubgraphIndex) + stablehloScatterOptions = StablehloScatterOptionsEnd(builder) + return stablehloScatterOptions + + +class StablehloRngBitGeneratorOptions(object): + __slots__ = ['_tab'] + + @classmethod + def GetRootAs(cls, buf, offset=0): + n = flatbuffers.encode.Get(flatbuffers.packer.uoffset, buf, offset) + x = StablehloRngBitGeneratorOptions() + x.Init(buf, n + offset) + return x + + @classmethod + def GetRootAsStablehloRngBitGeneratorOptions(cls, buf, offset=0): + """This method is deprecated. Please switch to GetRootAs.""" + return cls.GetRootAs(buf, offset) + @classmethod + def StablehloRngBitGeneratorOptionsBufferHasIdentifier(cls, buf, offset, size_prefixed=False): + return flatbuffers.util.BufferHasIdentifier(buf, offset, b"\x54\x46\x4C\x33", size_prefixed=size_prefixed) + + # StablehloRngBitGeneratorOptions + def Init(self, buf, pos): + self._tab = flatbuffers.table.Table(buf, pos) + + # StablehloRngBitGeneratorOptions + def Algorithm(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(4)) + if o != 0: + return self._tab.Get(flatbuffers.number_types.Int8Flags, o + self._tab.Pos) + return 0 + +def StablehloRngBitGeneratorOptionsStart(builder): + builder.StartObject(1) + +def StablehloRngBitGeneratorOptionsAddAlgorithm(builder, algorithm): + builder.PrependInt8Slot(0, algorithm, 0) + +def StablehloRngBitGeneratorOptionsEnd(builder): + return builder.EndObject() + + + +class StablehloRngBitGeneratorOptionsT(object): + + # StablehloRngBitGeneratorOptionsT + def __init__(self): + self.algorithm = 0 # type: int + + @classmethod + def InitFromBuf(cls, buf, pos): + stablehloRngBitGeneratorOptions = StablehloRngBitGeneratorOptions() + stablehloRngBitGeneratorOptions.Init(buf, pos) + return cls.InitFromObj(stablehloRngBitGeneratorOptions) + + @classmethod + def InitFromPackedBuf(cls, buf, pos=0): + n = flatbuffers.encode.Get(flatbuffers.packer.uoffset, buf, pos) + return cls.InitFromBuf(buf, pos+n) + + @classmethod + def InitFromObj(cls, stablehloRngBitGeneratorOptions): + x = StablehloRngBitGeneratorOptionsT() + x._UnPack(stablehloRngBitGeneratorOptions) + return x + + # StablehloRngBitGeneratorOptionsT + def _UnPack(self, stablehloRngBitGeneratorOptions): + if stablehloRngBitGeneratorOptions is None: + return + self.algorithm = stablehloRngBitGeneratorOptions.Algorithm() + + # StablehloRngBitGeneratorOptionsT + def Pack(self, builder): + StablehloRngBitGeneratorOptionsStart(builder) + StablehloRngBitGeneratorOptionsAddAlgorithm(builder, self.algorithm) + stablehloRngBitGeneratorOptions = StablehloRngBitGeneratorOptionsEnd(builder) + return stablehloRngBitGeneratorOptions + + +class Conv2DOptions(object): + __slots__ = ['_tab'] + + @classmethod + def GetRootAs(cls, buf, offset=0): + n = flatbuffers.encode.Get(flatbuffers.packer.uoffset, buf, offset) + x = Conv2DOptions() + x.Init(buf, n + offset) + return x + + @classmethod + def GetRootAsConv2DOptions(cls, buf, offset=0): + """This method is deprecated. Please switch to GetRootAs.""" + return cls.GetRootAs(buf, offset) + @classmethod + def Conv2DOptionsBufferHasIdentifier(cls, buf, offset, size_prefixed=False): + return flatbuffers.util.BufferHasIdentifier(buf, offset, b"\x54\x46\x4C\x33", size_prefixed=size_prefixed) + + # Conv2DOptions + def Init(self, buf, pos): + self._tab = flatbuffers.table.Table(buf, pos) + + # Conv2DOptions + def Padding(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(4)) + if o != 0: + return self._tab.Get(flatbuffers.number_types.Int8Flags, o + self._tab.Pos) + return 0 + + # Conv2DOptions + def StrideW(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(6)) + if o != 0: + return self._tab.Get(flatbuffers.number_types.Int32Flags, o + self._tab.Pos) + return 0 + + # Conv2DOptions + def StrideH(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(8)) + if o != 0: + return self._tab.Get(flatbuffers.number_types.Int32Flags, o + self._tab.Pos) + return 0 + + # Conv2DOptions + def FusedActivationFunction(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(10)) + if o != 0: + return self._tab.Get(flatbuffers.number_types.Int8Flags, o + self._tab.Pos) + return 0 + + # Conv2DOptions + def DilationWFactor(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(12)) + if o != 0: + return self._tab.Get(flatbuffers.number_types.Int32Flags, o + self._tab.Pos) + return 1 + + # Conv2DOptions + def DilationHFactor(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(14)) + if o != 0: + return self._tab.Get(flatbuffers.number_types.Int32Flags, o + self._tab.Pos) + return 1 + + # Conv2DOptions + def QuantizedBiasType(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(16)) + if o != 0: + return self._tab.Get(flatbuffers.number_types.Int8Flags, o + self._tab.Pos) + return 0 + +def Conv2DOptionsStart(builder): + builder.StartObject(7) + +def Conv2DOptionsAddPadding(builder, padding): + builder.PrependInt8Slot(0, padding, 0) + +def Conv2DOptionsAddStrideW(builder, strideW): + builder.PrependInt32Slot(1, strideW, 0) + +def Conv2DOptionsAddStrideH(builder, strideH): + builder.PrependInt32Slot(2, strideH, 0) + +def Conv2DOptionsAddFusedActivationFunction(builder, fusedActivationFunction): + builder.PrependInt8Slot(3, fusedActivationFunction, 0) + +def Conv2DOptionsAddDilationWFactor(builder, dilationWFactor): + builder.PrependInt32Slot(4, dilationWFactor, 1) + +def Conv2DOptionsAddDilationHFactor(builder, dilationHFactor): + builder.PrependInt32Slot(5, dilationHFactor, 1) + +def Conv2DOptionsAddQuantizedBiasType(builder, quantizedBiasType): + builder.PrependInt8Slot(6, quantizedBiasType, 0) + +def Conv2DOptionsEnd(builder): + return builder.EndObject() + + + +class Conv2DOptionsT(object): + + # Conv2DOptionsT + def __init__(self): + self.padding = 0 # type: int + self.strideW = 0 # type: int + self.strideH = 0 # type: int + self.fusedActivationFunction = 0 # type: int + self.dilationWFactor = 1 # type: int + self.dilationHFactor = 1 # type: int + self.quantizedBiasType = 0 # type: int + + @classmethod + def InitFromBuf(cls, buf, pos): + conv2Doptions = Conv2DOptions() + conv2Doptions.Init(buf, pos) + return cls.InitFromObj(conv2Doptions) + + @classmethod + def InitFromPackedBuf(cls, buf, pos=0): + n = flatbuffers.encode.Get(flatbuffers.packer.uoffset, buf, pos) + return cls.InitFromBuf(buf, pos+n) + + @classmethod + def InitFromObj(cls, conv2Doptions): + x = Conv2DOptionsT() + x._UnPack(conv2Doptions) + return x + + # Conv2DOptionsT + def _UnPack(self, conv2Doptions): + if conv2Doptions is None: + return + self.padding = conv2Doptions.Padding() + self.strideW = conv2Doptions.StrideW() + self.strideH = conv2Doptions.StrideH() + self.fusedActivationFunction = conv2Doptions.FusedActivationFunction() + self.dilationWFactor = conv2Doptions.DilationWFactor() + self.dilationHFactor = conv2Doptions.DilationHFactor() + self.quantizedBiasType = conv2Doptions.QuantizedBiasType() + + # Conv2DOptionsT + def Pack(self, builder): + Conv2DOptionsStart(builder) + Conv2DOptionsAddPadding(builder, self.padding) + Conv2DOptionsAddStrideW(builder, self.strideW) + Conv2DOptionsAddStrideH(builder, self.strideH) + Conv2DOptionsAddFusedActivationFunction(builder, self.fusedActivationFunction) + Conv2DOptionsAddDilationWFactor(builder, self.dilationWFactor) + Conv2DOptionsAddDilationHFactor(builder, self.dilationHFactor) + Conv2DOptionsAddQuantizedBiasType(builder, self.quantizedBiasType) + conv2Doptions = Conv2DOptionsEnd(builder) + return conv2Doptions + + +class Conv3DOptions(object): + __slots__ = ['_tab'] + + @classmethod + def GetRootAs(cls, buf, offset=0): + n = flatbuffers.encode.Get(flatbuffers.packer.uoffset, buf, offset) + x = Conv3DOptions() + x.Init(buf, n + offset) + return x + + @classmethod + def GetRootAsConv3DOptions(cls, buf, offset=0): + """This method is deprecated. Please switch to GetRootAs.""" + return cls.GetRootAs(buf, offset) + @classmethod + def Conv3DOptionsBufferHasIdentifier(cls, buf, offset, size_prefixed=False): + return flatbuffers.util.BufferHasIdentifier(buf, offset, b"\x54\x46\x4C\x33", size_prefixed=size_prefixed) + + # Conv3DOptions + def Init(self, buf, pos): + self._tab = flatbuffers.table.Table(buf, pos) + + # Conv3DOptions + def Padding(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(4)) + if o != 0: + return self._tab.Get(flatbuffers.number_types.Int8Flags, o + self._tab.Pos) + return 0 + + # Conv3DOptions + def StrideD(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(6)) + if o != 0: + return self._tab.Get(flatbuffers.number_types.Int32Flags, o + self._tab.Pos) + return 0 + + # Conv3DOptions + def StrideW(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(8)) + if o != 0: + return self._tab.Get(flatbuffers.number_types.Int32Flags, o + self._tab.Pos) + return 0 + + # Conv3DOptions + def StrideH(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(10)) + if o != 0: + return self._tab.Get(flatbuffers.number_types.Int32Flags, o + self._tab.Pos) + return 0 + + # Conv3DOptions + def FusedActivationFunction(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(12)) + if o != 0: + return self._tab.Get(flatbuffers.number_types.Int8Flags, o + self._tab.Pos) + return 0 + + # Conv3DOptions + def DilationDFactor(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(14)) + if o != 0: + return self._tab.Get(flatbuffers.number_types.Int32Flags, o + self._tab.Pos) + return 1 + + # Conv3DOptions + def DilationWFactor(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(16)) + if o != 0: + return self._tab.Get(flatbuffers.number_types.Int32Flags, o + self._tab.Pos) + return 1 + + # Conv3DOptions + def DilationHFactor(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(18)) + if o != 0: + return self._tab.Get(flatbuffers.number_types.Int32Flags, o + self._tab.Pos) + return 1 + +def Conv3DOptionsStart(builder): + builder.StartObject(8) + +def Conv3DOptionsAddPadding(builder, padding): + builder.PrependInt8Slot(0, padding, 0) + +def Conv3DOptionsAddStrideD(builder, strideD): + builder.PrependInt32Slot(1, strideD, 0) + +def Conv3DOptionsAddStrideW(builder, strideW): + builder.PrependInt32Slot(2, strideW, 0) + +def Conv3DOptionsAddStrideH(builder, strideH): + builder.PrependInt32Slot(3, strideH, 0) + +def Conv3DOptionsAddFusedActivationFunction(builder, fusedActivationFunction): + builder.PrependInt8Slot(4, fusedActivationFunction, 0) + +def Conv3DOptionsAddDilationDFactor(builder, dilationDFactor): + builder.PrependInt32Slot(5, dilationDFactor, 1) + +def Conv3DOptionsAddDilationWFactor(builder, dilationWFactor): + builder.PrependInt32Slot(6, dilationWFactor, 1) + +def Conv3DOptionsAddDilationHFactor(builder, dilationHFactor): + builder.PrependInt32Slot(7, dilationHFactor, 1) + +def Conv3DOptionsEnd(builder): + return builder.EndObject() + + + +class Conv3DOptionsT(object): + + # Conv3DOptionsT + def __init__(self): + self.padding = 0 # type: int + self.strideD = 0 # type: int + self.strideW = 0 # type: int + self.strideH = 0 # type: int + self.fusedActivationFunction = 0 # type: int + self.dilationDFactor = 1 # type: int + self.dilationWFactor = 1 # type: int + self.dilationHFactor = 1 # type: int + + @classmethod + def InitFromBuf(cls, buf, pos): + conv3Doptions = Conv3DOptions() + conv3Doptions.Init(buf, pos) + return cls.InitFromObj(conv3Doptions) + + @classmethod + def InitFromPackedBuf(cls, buf, pos=0): + n = flatbuffers.encode.Get(flatbuffers.packer.uoffset, buf, pos) + return cls.InitFromBuf(buf, pos+n) + + @classmethod + def InitFromObj(cls, conv3Doptions): + x = Conv3DOptionsT() + x._UnPack(conv3Doptions) + return x + + # Conv3DOptionsT + def _UnPack(self, conv3Doptions): + if conv3Doptions is None: + return + self.padding = conv3Doptions.Padding() + self.strideD = conv3Doptions.StrideD() + self.strideW = conv3Doptions.StrideW() + self.strideH = conv3Doptions.StrideH() + self.fusedActivationFunction = conv3Doptions.FusedActivationFunction() + self.dilationDFactor = conv3Doptions.DilationDFactor() + self.dilationWFactor = conv3Doptions.DilationWFactor() + self.dilationHFactor = conv3Doptions.DilationHFactor() + + # Conv3DOptionsT + def Pack(self, builder): + Conv3DOptionsStart(builder) + Conv3DOptionsAddPadding(builder, self.padding) + Conv3DOptionsAddStrideD(builder, self.strideD) + Conv3DOptionsAddStrideW(builder, self.strideW) + Conv3DOptionsAddStrideH(builder, self.strideH) + Conv3DOptionsAddFusedActivationFunction(builder, self.fusedActivationFunction) + Conv3DOptionsAddDilationDFactor(builder, self.dilationDFactor) + Conv3DOptionsAddDilationWFactor(builder, self.dilationWFactor) + Conv3DOptionsAddDilationHFactor(builder, self.dilationHFactor) + conv3Doptions = Conv3DOptionsEnd(builder) + return conv3Doptions + + +class Pool2DOptions(object): + __slots__ = ['_tab'] + + @classmethod + def GetRootAs(cls, buf, offset=0): + n = flatbuffers.encode.Get(flatbuffers.packer.uoffset, buf, offset) + x = Pool2DOptions() + x.Init(buf, n + offset) + return x + + @classmethod + def GetRootAsPool2DOptions(cls, buf, offset=0): + """This method is deprecated. Please switch to GetRootAs.""" + return cls.GetRootAs(buf, offset) + @classmethod + def Pool2DOptionsBufferHasIdentifier(cls, buf, offset, size_prefixed=False): + return flatbuffers.util.BufferHasIdentifier(buf, offset, b"\x54\x46\x4C\x33", size_prefixed=size_prefixed) + + # Pool2DOptions + def Init(self, buf, pos): + self._tab = flatbuffers.table.Table(buf, pos) + + # Pool2DOptions + def Padding(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(4)) + if o != 0: + return self._tab.Get(flatbuffers.number_types.Int8Flags, o + self._tab.Pos) + return 0 + + # Pool2DOptions + def StrideW(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(6)) + if o != 0: + return self._tab.Get(flatbuffers.number_types.Int32Flags, o + self._tab.Pos) + return 0 + + # Pool2DOptions + def StrideH(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(8)) + if o != 0: + return self._tab.Get(flatbuffers.number_types.Int32Flags, o + self._tab.Pos) + return 0 + + # Pool2DOptions + def FilterWidth(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(10)) + if o != 0: + return self._tab.Get(flatbuffers.number_types.Int32Flags, o + self._tab.Pos) + return 0 + + # Pool2DOptions + def FilterHeight(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(12)) + if o != 0: + return self._tab.Get(flatbuffers.number_types.Int32Flags, o + self._tab.Pos) + return 0 + + # Pool2DOptions + def FusedActivationFunction(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(14)) + if o != 0: + return self._tab.Get(flatbuffers.number_types.Int8Flags, o + self._tab.Pos) + return 0 + +def Pool2DOptionsStart(builder): + builder.StartObject(6) + +def Pool2DOptionsAddPadding(builder, padding): + builder.PrependInt8Slot(0, padding, 0) + +def Pool2DOptionsAddStrideW(builder, strideW): + builder.PrependInt32Slot(1, strideW, 0) + +def Pool2DOptionsAddStrideH(builder, strideH): + builder.PrependInt32Slot(2, strideH, 0) + +def Pool2DOptionsAddFilterWidth(builder, filterWidth): + builder.PrependInt32Slot(3, filterWidth, 0) + +def Pool2DOptionsAddFilterHeight(builder, filterHeight): + builder.PrependInt32Slot(4, filterHeight, 0) + +def Pool2DOptionsAddFusedActivationFunction(builder, fusedActivationFunction): + builder.PrependInt8Slot(5, fusedActivationFunction, 0) + +def Pool2DOptionsEnd(builder): + return builder.EndObject() + + + +class Pool2DOptionsT(object): + + # Pool2DOptionsT + def __init__(self): + self.padding = 0 # type: int + self.strideW = 0 # type: int + self.strideH = 0 # type: int + self.filterWidth = 0 # type: int + self.filterHeight = 0 # type: int + self.fusedActivationFunction = 0 # type: int + + @classmethod + def InitFromBuf(cls, buf, pos): + pool2Doptions = Pool2DOptions() + pool2Doptions.Init(buf, pos) + return cls.InitFromObj(pool2Doptions) + + @classmethod + def InitFromPackedBuf(cls, buf, pos=0): + n = flatbuffers.encode.Get(flatbuffers.packer.uoffset, buf, pos) + return cls.InitFromBuf(buf, pos+n) + + @classmethod + def InitFromObj(cls, pool2Doptions): + x = Pool2DOptionsT() + x._UnPack(pool2Doptions) + return x + + # Pool2DOptionsT + def _UnPack(self, pool2Doptions): + if pool2Doptions is None: + return + self.padding = pool2Doptions.Padding() + self.strideW = pool2Doptions.StrideW() + self.strideH = pool2Doptions.StrideH() + self.filterWidth = pool2Doptions.FilterWidth() + self.filterHeight = pool2Doptions.FilterHeight() + self.fusedActivationFunction = pool2Doptions.FusedActivationFunction() + + # Pool2DOptionsT + def Pack(self, builder): + Pool2DOptionsStart(builder) + Pool2DOptionsAddPadding(builder, self.padding) + Pool2DOptionsAddStrideW(builder, self.strideW) + Pool2DOptionsAddStrideH(builder, self.strideH) + Pool2DOptionsAddFilterWidth(builder, self.filterWidth) + Pool2DOptionsAddFilterHeight(builder, self.filterHeight) + Pool2DOptionsAddFusedActivationFunction(builder, self.fusedActivationFunction) + pool2Doptions = Pool2DOptionsEnd(builder) + return pool2Doptions + + +class DepthwiseConv2DOptions(object): + __slots__ = ['_tab'] + + @classmethod + def GetRootAs(cls, buf, offset=0): + n = flatbuffers.encode.Get(flatbuffers.packer.uoffset, buf, offset) + x = DepthwiseConv2DOptions() + x.Init(buf, n + offset) + return x + + @classmethod + def GetRootAsDepthwiseConv2DOptions(cls, buf, offset=0): + """This method is deprecated. Please switch to GetRootAs.""" + return cls.GetRootAs(buf, offset) + @classmethod + def DepthwiseConv2DOptionsBufferHasIdentifier(cls, buf, offset, size_prefixed=False): + return flatbuffers.util.BufferHasIdentifier(buf, offset, b"\x54\x46\x4C\x33", size_prefixed=size_prefixed) + + # DepthwiseConv2DOptions + def Init(self, buf, pos): + self._tab = flatbuffers.table.Table(buf, pos) + + # DepthwiseConv2DOptions + def Padding(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(4)) + if o != 0: + return self._tab.Get(flatbuffers.number_types.Int8Flags, o + self._tab.Pos) + return 0 + + # DepthwiseConv2DOptions + def StrideW(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(6)) + if o != 0: + return self._tab.Get(flatbuffers.number_types.Int32Flags, o + self._tab.Pos) + return 0 + + # DepthwiseConv2DOptions + def StrideH(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(8)) + if o != 0: + return self._tab.Get(flatbuffers.number_types.Int32Flags, o + self._tab.Pos) + return 0 + + # DepthwiseConv2DOptions + def DepthMultiplier(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(10)) + if o != 0: + return self._tab.Get(flatbuffers.number_types.Int32Flags, o + self._tab.Pos) + return 0 + + # DepthwiseConv2DOptions + def FusedActivationFunction(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(12)) + if o != 0: + return self._tab.Get(flatbuffers.number_types.Int8Flags, o + self._tab.Pos) + return 0 + + # DepthwiseConv2DOptions + def DilationWFactor(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(14)) + if o != 0: + return self._tab.Get(flatbuffers.number_types.Int32Flags, o + self._tab.Pos) + return 1 + + # DepthwiseConv2DOptions + def DilationHFactor(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(16)) + if o != 0: + return self._tab.Get(flatbuffers.number_types.Int32Flags, o + self._tab.Pos) + return 1 + +def DepthwiseConv2DOptionsStart(builder): + builder.StartObject(7) + +def DepthwiseConv2DOptionsAddPadding(builder, padding): + builder.PrependInt8Slot(0, padding, 0) + +def DepthwiseConv2DOptionsAddStrideW(builder, strideW): + builder.PrependInt32Slot(1, strideW, 0) + +def DepthwiseConv2DOptionsAddStrideH(builder, strideH): + builder.PrependInt32Slot(2, strideH, 0) + +def DepthwiseConv2DOptionsAddDepthMultiplier(builder, depthMultiplier): + builder.PrependInt32Slot(3, depthMultiplier, 0) + +def DepthwiseConv2DOptionsAddFusedActivationFunction(builder, fusedActivationFunction): + builder.PrependInt8Slot(4, fusedActivationFunction, 0) + +def DepthwiseConv2DOptionsAddDilationWFactor(builder, dilationWFactor): + builder.PrependInt32Slot(5, dilationWFactor, 1) + +def DepthwiseConv2DOptionsAddDilationHFactor(builder, dilationHFactor): + builder.PrependInt32Slot(6, dilationHFactor, 1) + +def DepthwiseConv2DOptionsEnd(builder): + return builder.EndObject() + + + +class DepthwiseConv2DOptionsT(object): + + # DepthwiseConv2DOptionsT + def __init__(self): + self.padding = 0 # type: int + self.strideW = 0 # type: int + self.strideH = 0 # type: int + self.depthMultiplier = 0 # type: int + self.fusedActivationFunction = 0 # type: int + self.dilationWFactor = 1 # type: int + self.dilationHFactor = 1 # type: int + + @classmethod + def InitFromBuf(cls, buf, pos): + depthwiseConv2Doptions = DepthwiseConv2DOptions() + depthwiseConv2Doptions.Init(buf, pos) + return cls.InitFromObj(depthwiseConv2Doptions) + + @classmethod + def InitFromPackedBuf(cls, buf, pos=0): + n = flatbuffers.encode.Get(flatbuffers.packer.uoffset, buf, pos) + return cls.InitFromBuf(buf, pos+n) + + @classmethod + def InitFromObj(cls, depthwiseConv2Doptions): + x = DepthwiseConv2DOptionsT() + x._UnPack(depthwiseConv2Doptions) + return x + + # DepthwiseConv2DOptionsT + def _UnPack(self, depthwiseConv2Doptions): + if depthwiseConv2Doptions is None: + return + self.padding = depthwiseConv2Doptions.Padding() + self.strideW = depthwiseConv2Doptions.StrideW() + self.strideH = depthwiseConv2Doptions.StrideH() + self.depthMultiplier = depthwiseConv2Doptions.DepthMultiplier() + self.fusedActivationFunction = depthwiseConv2Doptions.FusedActivationFunction() + self.dilationWFactor = depthwiseConv2Doptions.DilationWFactor() + self.dilationHFactor = depthwiseConv2Doptions.DilationHFactor() + + # DepthwiseConv2DOptionsT + def Pack(self, builder): + DepthwiseConv2DOptionsStart(builder) + DepthwiseConv2DOptionsAddPadding(builder, self.padding) + DepthwiseConv2DOptionsAddStrideW(builder, self.strideW) + DepthwiseConv2DOptionsAddStrideH(builder, self.strideH) + DepthwiseConv2DOptionsAddDepthMultiplier(builder, self.depthMultiplier) + DepthwiseConv2DOptionsAddFusedActivationFunction(builder, self.fusedActivationFunction) + DepthwiseConv2DOptionsAddDilationWFactor(builder, self.dilationWFactor) + DepthwiseConv2DOptionsAddDilationHFactor(builder, self.dilationHFactor) + depthwiseConv2Doptions = DepthwiseConv2DOptionsEnd(builder) + return depthwiseConv2Doptions + + +class ConcatEmbeddingsOptions(object): + __slots__ = ['_tab'] + + @classmethod + def GetRootAs(cls, buf, offset=0): + n = flatbuffers.encode.Get(flatbuffers.packer.uoffset, buf, offset) + x = ConcatEmbeddingsOptions() + x.Init(buf, n + offset) + return x + + @classmethod + def GetRootAsConcatEmbeddingsOptions(cls, buf, offset=0): + """This method is deprecated. Please switch to GetRootAs.""" + return cls.GetRootAs(buf, offset) + @classmethod + def ConcatEmbeddingsOptionsBufferHasIdentifier(cls, buf, offset, size_prefixed=False): + return flatbuffers.util.BufferHasIdentifier(buf, offset, b"\x54\x46\x4C\x33", size_prefixed=size_prefixed) + + # ConcatEmbeddingsOptions + def Init(self, buf, pos): + self._tab = flatbuffers.table.Table(buf, pos) + + # ConcatEmbeddingsOptions + def NumChannels(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(4)) + if o != 0: + return self._tab.Get(flatbuffers.number_types.Int32Flags, o + self._tab.Pos) + return 0 + + # ConcatEmbeddingsOptions + def NumColumnsPerChannel(self, j): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(6)) + if o != 0: + a = self._tab.Vector(o) + return self._tab.Get(flatbuffers.number_types.Int32Flags, a + flatbuffers.number_types.UOffsetTFlags.py_type(j * 4)) + return 0 + + # ConcatEmbeddingsOptions + def NumColumnsPerChannelAsNumpy(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(6)) + if o != 0: + return self._tab.GetVectorAsNumpy(flatbuffers.number_types.Int32Flags, o) + return 0 + + # ConcatEmbeddingsOptions + def NumColumnsPerChannelLength(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(6)) + if o != 0: + return self._tab.VectorLen(o) + return 0 + + # ConcatEmbeddingsOptions + def NumColumnsPerChannelIsNone(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(6)) + return o == 0 + + # ConcatEmbeddingsOptions + def EmbeddingDimPerChannel(self, j): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(8)) + if o != 0: + a = self._tab.Vector(o) + return self._tab.Get(flatbuffers.number_types.Int32Flags, a + flatbuffers.number_types.UOffsetTFlags.py_type(j * 4)) + return 0 + + # ConcatEmbeddingsOptions + def EmbeddingDimPerChannelAsNumpy(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(8)) + if o != 0: + return self._tab.GetVectorAsNumpy(flatbuffers.number_types.Int32Flags, o) + return 0 + + # ConcatEmbeddingsOptions + def EmbeddingDimPerChannelLength(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(8)) + if o != 0: + return self._tab.VectorLen(o) + return 0 + + # ConcatEmbeddingsOptions + def EmbeddingDimPerChannelIsNone(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(8)) + return o == 0 + +def ConcatEmbeddingsOptionsStart(builder): + builder.StartObject(3) + +def ConcatEmbeddingsOptionsAddNumChannels(builder, numChannels): + builder.PrependInt32Slot(0, numChannels, 0) + +def ConcatEmbeddingsOptionsAddNumColumnsPerChannel(builder, numColumnsPerChannel): + builder.PrependUOffsetTRelativeSlot(1, flatbuffers.number_types.UOffsetTFlags.py_type(numColumnsPerChannel), 0) + +def ConcatEmbeddingsOptionsStartNumColumnsPerChannelVector(builder, numElems): + return builder.StartVector(4, numElems, 4) + +def ConcatEmbeddingsOptionsAddEmbeddingDimPerChannel(builder, embeddingDimPerChannel): + builder.PrependUOffsetTRelativeSlot(2, flatbuffers.number_types.UOffsetTFlags.py_type(embeddingDimPerChannel), 0) + +def ConcatEmbeddingsOptionsStartEmbeddingDimPerChannelVector(builder, numElems): + return builder.StartVector(4, numElems, 4) + +def ConcatEmbeddingsOptionsEnd(builder): + return builder.EndObject() + + +try: + from typing import List +except: + pass + +class ConcatEmbeddingsOptionsT(object): + + # ConcatEmbeddingsOptionsT + def __init__(self): + self.numChannels = 0 # type: int + self.numColumnsPerChannel = None # type: List[int] + self.embeddingDimPerChannel = None # type: List[int] + + @classmethod + def InitFromBuf(cls, buf, pos): + concatEmbeddingsOptions = ConcatEmbeddingsOptions() + concatEmbeddingsOptions.Init(buf, pos) + return cls.InitFromObj(concatEmbeddingsOptions) + + @classmethod + def InitFromPackedBuf(cls, buf, pos=0): + n = flatbuffers.encode.Get(flatbuffers.packer.uoffset, buf, pos) + return cls.InitFromBuf(buf, pos+n) + + @classmethod + def InitFromObj(cls, concatEmbeddingsOptions): + x = ConcatEmbeddingsOptionsT() + x._UnPack(concatEmbeddingsOptions) + return x + + # ConcatEmbeddingsOptionsT + def _UnPack(self, concatEmbeddingsOptions): + if concatEmbeddingsOptions is None: + return + self.numChannels = concatEmbeddingsOptions.NumChannels() + if not concatEmbeddingsOptions.NumColumnsPerChannelIsNone(): + if np is None: + self.numColumnsPerChannel = [] + for i in range(concatEmbeddingsOptions.NumColumnsPerChannelLength()): + self.numColumnsPerChannel.append(concatEmbeddingsOptions.NumColumnsPerChannel(i)) + else: + self.numColumnsPerChannel = concatEmbeddingsOptions.NumColumnsPerChannelAsNumpy() + if not concatEmbeddingsOptions.EmbeddingDimPerChannelIsNone(): + if np is None: + self.embeddingDimPerChannel = [] + for i in range(concatEmbeddingsOptions.EmbeddingDimPerChannelLength()): + self.embeddingDimPerChannel.append(concatEmbeddingsOptions.EmbeddingDimPerChannel(i)) + else: + self.embeddingDimPerChannel = concatEmbeddingsOptions.EmbeddingDimPerChannelAsNumpy() + + # ConcatEmbeddingsOptionsT + def Pack(self, builder): + if self.numColumnsPerChannel is not None: + if np is not None and type(self.numColumnsPerChannel) is np.ndarray: + numColumnsPerChannel = builder.CreateNumpyVector(self.numColumnsPerChannel) + else: + ConcatEmbeddingsOptionsStartNumColumnsPerChannelVector(builder, len(self.numColumnsPerChannel)) + for i in reversed(range(len(self.numColumnsPerChannel))): + builder.PrependInt32(self.numColumnsPerChannel[i]) + numColumnsPerChannel = builder.EndVector() + if self.embeddingDimPerChannel is not None: + if np is not None and type(self.embeddingDimPerChannel) is np.ndarray: + embeddingDimPerChannel = builder.CreateNumpyVector(self.embeddingDimPerChannel) + else: + ConcatEmbeddingsOptionsStartEmbeddingDimPerChannelVector(builder, len(self.embeddingDimPerChannel)) + for i in reversed(range(len(self.embeddingDimPerChannel))): + builder.PrependInt32(self.embeddingDimPerChannel[i]) + embeddingDimPerChannel = builder.EndVector() + ConcatEmbeddingsOptionsStart(builder) + ConcatEmbeddingsOptionsAddNumChannels(builder, self.numChannels) + if self.numColumnsPerChannel is not None: + ConcatEmbeddingsOptionsAddNumColumnsPerChannel(builder, numColumnsPerChannel) + if self.embeddingDimPerChannel is not None: + ConcatEmbeddingsOptionsAddEmbeddingDimPerChannel(builder, embeddingDimPerChannel) + concatEmbeddingsOptions = ConcatEmbeddingsOptionsEnd(builder) + return concatEmbeddingsOptions + + +class LSHProjectionOptions(object): + __slots__ = ['_tab'] + + @classmethod + def GetRootAs(cls, buf, offset=0): + n = flatbuffers.encode.Get(flatbuffers.packer.uoffset, buf, offset) + x = LSHProjectionOptions() + x.Init(buf, n + offset) + return x + + @classmethod + def GetRootAsLSHProjectionOptions(cls, buf, offset=0): + """This method is deprecated. Please switch to GetRootAs.""" + return cls.GetRootAs(buf, offset) + @classmethod + def LSHProjectionOptionsBufferHasIdentifier(cls, buf, offset, size_prefixed=False): + return flatbuffers.util.BufferHasIdentifier(buf, offset, b"\x54\x46\x4C\x33", size_prefixed=size_prefixed) + + # LSHProjectionOptions + def Init(self, buf, pos): + self._tab = flatbuffers.table.Table(buf, pos) + + # LSHProjectionOptions + def Type(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(4)) + if o != 0: + return self._tab.Get(flatbuffers.number_types.Int8Flags, o + self._tab.Pos) + return 0 + +def LSHProjectionOptionsStart(builder): + builder.StartObject(1) + +def LSHProjectionOptionsAddType(builder, type): + builder.PrependInt8Slot(0, type, 0) + +def LSHProjectionOptionsEnd(builder): + return builder.EndObject() + + + +class LSHProjectionOptionsT(object): + + # LSHProjectionOptionsT + def __init__(self): + self.type = 0 # type: int + + @classmethod + def InitFromBuf(cls, buf, pos): + lshprojectionOptions = LSHProjectionOptions() + lshprojectionOptions.Init(buf, pos) + return cls.InitFromObj(lshprojectionOptions) + + @classmethod + def InitFromPackedBuf(cls, buf, pos=0): + n = flatbuffers.encode.Get(flatbuffers.packer.uoffset, buf, pos) + return cls.InitFromBuf(buf, pos+n) + + @classmethod + def InitFromObj(cls, lshprojectionOptions): + x = LSHProjectionOptionsT() + x._UnPack(lshprojectionOptions) + return x + + # LSHProjectionOptionsT + def _UnPack(self, lshprojectionOptions): + if lshprojectionOptions is None: + return + self.type = lshprojectionOptions.Type() + + # LSHProjectionOptionsT + def Pack(self, builder): + LSHProjectionOptionsStart(builder) + LSHProjectionOptionsAddType(builder, self.type) + lshprojectionOptions = LSHProjectionOptionsEnd(builder) + return lshprojectionOptions + + +class SVDFOptions(object): + __slots__ = ['_tab'] + + @classmethod + def GetRootAs(cls, buf, offset=0): + n = flatbuffers.encode.Get(flatbuffers.packer.uoffset, buf, offset) + x = SVDFOptions() + x.Init(buf, n + offset) + return x + + @classmethod + def GetRootAsSVDFOptions(cls, buf, offset=0): + """This method is deprecated. Please switch to GetRootAs.""" + return cls.GetRootAs(buf, offset) + @classmethod + def SVDFOptionsBufferHasIdentifier(cls, buf, offset, size_prefixed=False): + return flatbuffers.util.BufferHasIdentifier(buf, offset, b"\x54\x46\x4C\x33", size_prefixed=size_prefixed) + + # SVDFOptions + def Init(self, buf, pos): + self._tab = flatbuffers.table.Table(buf, pos) + + # SVDFOptions + def Rank(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(4)) + if o != 0: + return self._tab.Get(flatbuffers.number_types.Int32Flags, o + self._tab.Pos) + return 0 + + # SVDFOptions + def FusedActivationFunction(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(6)) + if o != 0: + return self._tab.Get(flatbuffers.number_types.Int8Flags, o + self._tab.Pos) + return 0 + + # SVDFOptions + def AsymmetricQuantizeInputs(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(8)) + if o != 0: + return bool(self._tab.Get(flatbuffers.number_types.BoolFlags, o + self._tab.Pos)) + return False + +def SVDFOptionsStart(builder): + builder.StartObject(3) + +def SVDFOptionsAddRank(builder, rank): + builder.PrependInt32Slot(0, rank, 0) -class CallOnceOptions(object): +def SVDFOptionsAddFusedActivationFunction(builder, fusedActivationFunction): + builder.PrependInt8Slot(1, fusedActivationFunction, 0) + +def SVDFOptionsAddAsymmetricQuantizeInputs(builder, asymmetricQuantizeInputs): + builder.PrependBoolSlot(2, asymmetricQuantizeInputs, 0) + +def SVDFOptionsEnd(builder): + return builder.EndObject() + + + +class SVDFOptionsT(object): + + # SVDFOptionsT + def __init__(self): + self.rank = 0 # type: int + self.fusedActivationFunction = 0 # type: int + self.asymmetricQuantizeInputs = False # type: bool + + @classmethod + def InitFromBuf(cls, buf, pos): + svdfoptions = SVDFOptions() + svdfoptions.Init(buf, pos) + return cls.InitFromObj(svdfoptions) + + @classmethod + def InitFromPackedBuf(cls, buf, pos=0): + n = flatbuffers.encode.Get(flatbuffers.packer.uoffset, buf, pos) + return cls.InitFromBuf(buf, pos+n) + + @classmethod + def InitFromObj(cls, svdfoptions): + x = SVDFOptionsT() + x._UnPack(svdfoptions) + return x + + # SVDFOptionsT + def _UnPack(self, svdfoptions): + if svdfoptions is None: + return + self.rank = svdfoptions.Rank() + self.fusedActivationFunction = svdfoptions.FusedActivationFunction() + self.asymmetricQuantizeInputs = svdfoptions.AsymmetricQuantizeInputs() + + # SVDFOptionsT + def Pack(self, builder): + SVDFOptionsStart(builder) + SVDFOptionsAddRank(builder, self.rank) + SVDFOptionsAddFusedActivationFunction(builder, self.fusedActivationFunction) + SVDFOptionsAddAsymmetricQuantizeInputs(builder, self.asymmetricQuantizeInputs) + svdfoptions = SVDFOptionsEnd(builder) + return svdfoptions + + +class RNNOptions(object): __slots__ = ['_tab'] @classmethod def GetRootAs(cls, buf, offset=0): n = flatbuffers.encode.Get(flatbuffers.packer.uoffset, buf, offset) - x = CallOnceOptions() + x = RNNOptions() x.Init(buf, n + offset) return x @classmethod - def GetRootAsCallOnceOptions(cls, buf, offset=0): + def GetRootAsRNNOptions(cls, buf, offset=0): """This method is deprecated. Please switch to GetRootAs.""" return cls.GetRootAs(buf, offset) @classmethod - def CallOnceOptionsBufferHasIdentifier(cls, buf, offset, size_prefixed=False): + def RNNOptionsBufferHasIdentifier(cls, buf, offset, size_prefixed=False): return flatbuffers.util.BufferHasIdentifier(buf, offset, b"\x54\x46\x4C\x33", size_prefixed=size_prefixed) - # CallOnceOptions + # RNNOptions def Init(self, buf, pos): self._tab = flatbuffers.table.Table(buf, pos) - # CallOnceOptions - def InitSubgraphIndex(self): + # RNNOptions + def FusedActivationFunction(self): o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(4)) if o != 0: - return self._tab.Get(flatbuffers.number_types.Int32Flags, o + self._tab.Pos) + return self._tab.Get(flatbuffers.number_types.Int8Flags, o + self._tab.Pos) return 0 -def CallOnceOptionsStart(builder): builder.StartObject(1) -def Start(builder): - return CallOnceOptionsStart(builder) -def CallOnceOptionsAddInitSubgraphIndex(builder, initSubgraphIndex): builder.PrependInt32Slot(0, initSubgraphIndex, 0) -def AddInitSubgraphIndex(builder, initSubgraphIndex): - return CallOnceOptionsAddInitSubgraphIndex(builder, initSubgraphIndex) -def CallOnceOptionsEnd(builder): return builder.EndObject() -def End(builder): - return CallOnceOptionsEnd(builder) + # RNNOptions + def AsymmetricQuantizeInputs(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(6)) + if o != 0: + return bool(self._tab.Get(flatbuffers.number_types.BoolFlags, o + self._tab.Pos)) + return False + +def RNNOptionsStart(builder): + builder.StartObject(2) -class CallOnceOptionsT(object): +def RNNOptionsAddFusedActivationFunction(builder, fusedActivationFunction): + builder.PrependInt8Slot(0, fusedActivationFunction, 0) - # CallOnceOptionsT +def RNNOptionsAddAsymmetricQuantizeInputs(builder, asymmetricQuantizeInputs): + builder.PrependBoolSlot(1, asymmetricQuantizeInputs, 0) + +def RNNOptionsEnd(builder): + return builder.EndObject() + + + +class RNNOptionsT(object): + + # RNNOptionsT def __init__(self): - self.initSubgraphIndex = 0 # type: int + self.fusedActivationFunction = 0 # type: int + self.asymmetricQuantizeInputs = False # type: bool @classmethod def InitFromBuf(cls, buf, pos): - callOnceOptions = CallOnceOptions() - callOnceOptions.Init(buf, pos) - return cls.InitFromObj(callOnceOptions) + rnnoptions = RNNOptions() + rnnoptions.Init(buf, pos) + return cls.InitFromObj(rnnoptions) @classmethod - def InitFromObj(cls, callOnceOptions): - x = CallOnceOptionsT() - x._UnPack(callOnceOptions) + def InitFromPackedBuf(cls, buf, pos=0): + n = flatbuffers.encode.Get(flatbuffers.packer.uoffset, buf, pos) + return cls.InitFromBuf(buf, pos+n) + + @classmethod + def InitFromObj(cls, rnnoptions): + x = RNNOptionsT() + x._UnPack(rnnoptions) return x - # CallOnceOptionsT - def _UnPack(self, callOnceOptions): - if callOnceOptions is None: + # RNNOptionsT + def _UnPack(self, rnnoptions): + if rnnoptions is None: return - self.initSubgraphIndex = callOnceOptions.InitSubgraphIndex() + self.fusedActivationFunction = rnnoptions.FusedActivationFunction() + self.asymmetricQuantizeInputs = rnnoptions.AsymmetricQuantizeInputs() - # CallOnceOptionsT + # RNNOptionsT def Pack(self, builder): - CallOnceOptionsStart(builder) - CallOnceOptionsAddInitSubgraphIndex(builder, self.initSubgraphIndex) - callOnceOptions = CallOnceOptionsEnd(builder) - return callOnceOptions -# automatically generated by the FlatBuffers compiler, do not modify + RNNOptionsStart(builder) + RNNOptionsAddFusedActivationFunction(builder, self.fusedActivationFunction) + RNNOptionsAddAsymmetricQuantizeInputs(builder, self.asymmetricQuantizeInputs) + rnnoptions = RNNOptionsEnd(builder) + return rnnoptions -# namespace: tflite -from flatbuffers.compat import import_numpy -np = import_numpy() +class SequenceRNNOptions(object): + __slots__ = ['_tab'] -class CallOptions(object): + @classmethod + def GetRootAs(cls, buf, offset=0): + n = flatbuffers.encode.Get(flatbuffers.packer.uoffset, buf, offset) + x = SequenceRNNOptions() + x.Init(buf, n + offset) + return x + + @classmethod + def GetRootAsSequenceRNNOptions(cls, buf, offset=0): + """This method is deprecated. Please switch to GetRootAs.""" + return cls.GetRootAs(buf, offset) + @classmethod + def SequenceRNNOptionsBufferHasIdentifier(cls, buf, offset, size_prefixed=False): + return flatbuffers.util.BufferHasIdentifier(buf, offset, b"\x54\x46\x4C\x33", size_prefixed=size_prefixed) + + # SequenceRNNOptions + def Init(self, buf, pos): + self._tab = flatbuffers.table.Table(buf, pos) + + # SequenceRNNOptions + def TimeMajor(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(4)) + if o != 0: + return bool(self._tab.Get(flatbuffers.number_types.BoolFlags, o + self._tab.Pos)) + return False + + # SequenceRNNOptions + def FusedActivationFunction(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(6)) + if o != 0: + return self._tab.Get(flatbuffers.number_types.Int8Flags, o + self._tab.Pos) + return 0 + + # SequenceRNNOptions + def AsymmetricQuantizeInputs(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(8)) + if o != 0: + return bool(self._tab.Get(flatbuffers.number_types.BoolFlags, o + self._tab.Pos)) + return False + +def SequenceRNNOptionsStart(builder): + builder.StartObject(3) + +def SequenceRNNOptionsAddTimeMajor(builder, timeMajor): + builder.PrependBoolSlot(0, timeMajor, 0) + +def SequenceRNNOptionsAddFusedActivationFunction(builder, fusedActivationFunction): + builder.PrependInt8Slot(1, fusedActivationFunction, 0) + +def SequenceRNNOptionsAddAsymmetricQuantizeInputs(builder, asymmetricQuantizeInputs): + builder.PrependBoolSlot(2, asymmetricQuantizeInputs, 0) + +def SequenceRNNOptionsEnd(builder): + return builder.EndObject() + + + +class SequenceRNNOptionsT(object): + + # SequenceRNNOptionsT + def __init__(self): + self.timeMajor = False # type: bool + self.fusedActivationFunction = 0 # type: int + self.asymmetricQuantizeInputs = False # type: bool + + @classmethod + def InitFromBuf(cls, buf, pos): + sequenceRnnoptions = SequenceRNNOptions() + sequenceRnnoptions.Init(buf, pos) + return cls.InitFromObj(sequenceRnnoptions) + + @classmethod + def InitFromPackedBuf(cls, buf, pos=0): + n = flatbuffers.encode.Get(flatbuffers.packer.uoffset, buf, pos) + return cls.InitFromBuf(buf, pos+n) + + @classmethod + def InitFromObj(cls, sequenceRnnoptions): + x = SequenceRNNOptionsT() + x._UnPack(sequenceRnnoptions) + return x + + # SequenceRNNOptionsT + def _UnPack(self, sequenceRnnoptions): + if sequenceRnnoptions is None: + return + self.timeMajor = sequenceRnnoptions.TimeMajor() + self.fusedActivationFunction = sequenceRnnoptions.FusedActivationFunction() + self.asymmetricQuantizeInputs = sequenceRnnoptions.AsymmetricQuantizeInputs() + + # SequenceRNNOptionsT + def Pack(self, builder): + SequenceRNNOptionsStart(builder) + SequenceRNNOptionsAddTimeMajor(builder, self.timeMajor) + SequenceRNNOptionsAddFusedActivationFunction(builder, self.fusedActivationFunction) + SequenceRNNOptionsAddAsymmetricQuantizeInputs(builder, self.asymmetricQuantizeInputs) + sequenceRnnoptions = SequenceRNNOptionsEnd(builder) + return sequenceRnnoptions + + +class BidirectionalSequenceRNNOptions(object): __slots__ = ['_tab'] @classmethod def GetRootAs(cls, buf, offset=0): n = flatbuffers.encode.Get(flatbuffers.packer.uoffset, buf, offset) - x = CallOptions() + x = BidirectionalSequenceRNNOptions() x.Init(buf, n + offset) return x @classmethod - def GetRootAsCallOptions(cls, buf, offset=0): + def GetRootAsBidirectionalSequenceRNNOptions(cls, buf, offset=0): """This method is deprecated. Please switch to GetRootAs.""" return cls.GetRootAs(buf, offset) @classmethod - def CallOptionsBufferHasIdentifier(cls, buf, offset, size_prefixed=False): + def BidirectionalSequenceRNNOptionsBufferHasIdentifier(cls, buf, offset, size_prefixed=False): return flatbuffers.util.BufferHasIdentifier(buf, offset, b"\x54\x46\x4C\x33", size_prefixed=size_prefixed) - # CallOptions + # BidirectionalSequenceRNNOptions def Init(self, buf, pos): self._tab = flatbuffers.table.Table(buf, pos) - # CallOptions - def Subgraph(self): + # BidirectionalSequenceRNNOptions + def TimeMajor(self): o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(4)) if o != 0: - return self._tab.Get(flatbuffers.number_types.Uint32Flags, o + self._tab.Pos) + return bool(self._tab.Get(flatbuffers.number_types.BoolFlags, o + self._tab.Pos)) + return False + + # BidirectionalSequenceRNNOptions + def FusedActivationFunction(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(6)) + if o != 0: + return self._tab.Get(flatbuffers.number_types.Int8Flags, o + self._tab.Pos) return 0 -def CallOptionsStart(builder): builder.StartObject(1) -def Start(builder): - return CallOptionsStart(builder) -def CallOptionsAddSubgraph(builder, subgraph): builder.PrependUint32Slot(0, subgraph, 0) -def AddSubgraph(builder, subgraph): - return CallOptionsAddSubgraph(builder, subgraph) -def CallOptionsEnd(builder): return builder.EndObject() -def End(builder): - return CallOptionsEnd(builder) + # BidirectionalSequenceRNNOptions + def MergeOutputs(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(8)) + if o != 0: + return bool(self._tab.Get(flatbuffers.number_types.BoolFlags, o + self._tab.Pos)) + return False + + # BidirectionalSequenceRNNOptions + def AsymmetricQuantizeInputs(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(10)) + if o != 0: + return bool(self._tab.Get(flatbuffers.number_types.BoolFlags, o + self._tab.Pos)) + return False + +def BidirectionalSequenceRNNOptionsStart(builder): + builder.StartObject(4) + +def BidirectionalSequenceRNNOptionsAddTimeMajor(builder, timeMajor): + builder.PrependBoolSlot(0, timeMajor, 0) + +def BidirectionalSequenceRNNOptionsAddFusedActivationFunction(builder, fusedActivationFunction): + builder.PrependInt8Slot(1, fusedActivationFunction, 0) + +def BidirectionalSequenceRNNOptionsAddMergeOutputs(builder, mergeOutputs): + builder.PrependBoolSlot(2, mergeOutputs, 0) + +def BidirectionalSequenceRNNOptionsAddAsymmetricQuantizeInputs(builder, asymmetricQuantizeInputs): + builder.PrependBoolSlot(3, asymmetricQuantizeInputs, 0) + +def BidirectionalSequenceRNNOptionsEnd(builder): + return builder.EndObject() -class CallOptionsT(object): - # CallOptionsT + +class BidirectionalSequenceRNNOptionsT(object): + + # BidirectionalSequenceRNNOptionsT def __init__(self): - self.subgraph = 0 # type: int + self.timeMajor = False # type: bool + self.fusedActivationFunction = 0 # type: int + self.mergeOutputs = False # type: bool + self.asymmetricQuantizeInputs = False # type: bool @classmethod def InitFromBuf(cls, buf, pos): - callOptions = CallOptions() - callOptions.Init(buf, pos) - return cls.InitFromObj(callOptions) + bidirectionalSequenceRnnoptions = BidirectionalSequenceRNNOptions() + bidirectionalSequenceRnnoptions.Init(buf, pos) + return cls.InitFromObj(bidirectionalSequenceRnnoptions) @classmethod - def InitFromObj(cls, callOptions): - x = CallOptionsT() - x._UnPack(callOptions) + def InitFromPackedBuf(cls, buf, pos=0): + n = flatbuffers.encode.Get(flatbuffers.packer.uoffset, buf, pos) + return cls.InitFromBuf(buf, pos+n) + + @classmethod + def InitFromObj(cls, bidirectionalSequenceRnnoptions): + x = BidirectionalSequenceRNNOptionsT() + x._UnPack(bidirectionalSequenceRnnoptions) return x - # CallOptionsT - def _UnPack(self, callOptions): - if callOptions is None: + # BidirectionalSequenceRNNOptionsT + def _UnPack(self, bidirectionalSequenceRnnoptions): + if bidirectionalSequenceRnnoptions is None: return - self.subgraph = callOptions.Subgraph() + self.timeMajor = bidirectionalSequenceRnnoptions.TimeMajor() + self.fusedActivationFunction = bidirectionalSequenceRnnoptions.FusedActivationFunction() + self.mergeOutputs = bidirectionalSequenceRnnoptions.MergeOutputs() + self.asymmetricQuantizeInputs = bidirectionalSequenceRnnoptions.AsymmetricQuantizeInputs() - # CallOptionsT + # BidirectionalSequenceRNNOptionsT def Pack(self, builder): - CallOptionsStart(builder) - CallOptionsAddSubgraph(builder, self.subgraph) - callOptions = CallOptionsEnd(builder) - return callOptions -# automatically generated by the FlatBuffers compiler, do not modify - -# namespace: tflite + BidirectionalSequenceRNNOptionsStart(builder) + BidirectionalSequenceRNNOptionsAddTimeMajor(builder, self.timeMajor) + BidirectionalSequenceRNNOptionsAddFusedActivationFunction(builder, self.fusedActivationFunction) + BidirectionalSequenceRNNOptionsAddMergeOutputs(builder, self.mergeOutputs) + BidirectionalSequenceRNNOptionsAddAsymmetricQuantizeInputs(builder, self.asymmetricQuantizeInputs) + bidirectionalSequenceRnnoptions = BidirectionalSequenceRNNOptionsEnd(builder) + return bidirectionalSequenceRnnoptions -from flatbuffers.compat import import_numpy -np = import_numpy() -class CastOptions(object): +class FullyConnectedOptions(object): __slots__ = ['_tab'] @classmethod def GetRootAs(cls, buf, offset=0): n = flatbuffers.encode.Get(flatbuffers.packer.uoffset, buf, offset) - x = CastOptions() + x = FullyConnectedOptions() x.Init(buf, n + offset) return x @classmethod - def GetRootAsCastOptions(cls, buf, offset=0): + def GetRootAsFullyConnectedOptions(cls, buf, offset=0): """This method is deprecated. Please switch to GetRootAs.""" return cls.GetRootAs(buf, offset) @classmethod - def CastOptionsBufferHasIdentifier(cls, buf, offset, size_prefixed=False): + def FullyConnectedOptionsBufferHasIdentifier(cls, buf, offset, size_prefixed=False): return flatbuffers.util.BufferHasIdentifier(buf, offset, b"\x54\x46\x4C\x33", size_prefixed=size_prefixed) - # CastOptions + # FullyConnectedOptions def Init(self, buf, pos): self._tab = flatbuffers.table.Table(buf, pos) - # CastOptions - def InDataType(self): + # FullyConnectedOptions + def FusedActivationFunction(self): o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(4)) if o != 0: return self._tab.Get(flatbuffers.number_types.Int8Flags, o + self._tab.Pos) return 0 - # CastOptions - def OutDataType(self): + # FullyConnectedOptions + def WeightsFormat(self): o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(6)) if o != 0: return self._tab.Get(flatbuffers.number_types.Int8Flags, o + self._tab.Pos) return 0 -def CastOptionsStart(builder): builder.StartObject(2) -def Start(builder): - return CastOptionsStart(builder) -def CastOptionsAddInDataType(builder, inDataType): builder.PrependInt8Slot(0, inDataType, 0) -def AddInDataType(builder, inDataType): - return CastOptionsAddInDataType(builder, inDataType) -def CastOptionsAddOutDataType(builder, outDataType): builder.PrependInt8Slot(1, outDataType, 0) -def AddOutDataType(builder, outDataType): - return CastOptionsAddOutDataType(builder, outDataType) -def CastOptionsEnd(builder): return builder.EndObject() -def End(builder): - return CastOptionsEnd(builder) + # FullyConnectedOptions + def KeepNumDims(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(8)) + if o != 0: + return bool(self._tab.Get(flatbuffers.number_types.BoolFlags, o + self._tab.Pos)) + return False -class CastOptionsT(object): + # FullyConnectedOptions + def AsymmetricQuantizeInputs(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(10)) + if o != 0: + return bool(self._tab.Get(flatbuffers.number_types.BoolFlags, o + self._tab.Pos)) + return False - # CastOptionsT + # FullyConnectedOptions + def QuantizedBiasType(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(12)) + if o != 0: + return self._tab.Get(flatbuffers.number_types.Int8Flags, o + self._tab.Pos) + return 0 + +def FullyConnectedOptionsStart(builder): + builder.StartObject(5) + +def FullyConnectedOptionsAddFusedActivationFunction(builder, fusedActivationFunction): + builder.PrependInt8Slot(0, fusedActivationFunction, 0) + +def FullyConnectedOptionsAddWeightsFormat(builder, weightsFormat): + builder.PrependInt8Slot(1, weightsFormat, 0) + +def FullyConnectedOptionsAddKeepNumDims(builder, keepNumDims): + builder.PrependBoolSlot(2, keepNumDims, 0) + +def FullyConnectedOptionsAddAsymmetricQuantizeInputs(builder, asymmetricQuantizeInputs): + builder.PrependBoolSlot(3, asymmetricQuantizeInputs, 0) + +def FullyConnectedOptionsAddQuantizedBiasType(builder, quantizedBiasType): + builder.PrependInt8Slot(4, quantizedBiasType, 0) + +def FullyConnectedOptionsEnd(builder): + return builder.EndObject() + + + +class FullyConnectedOptionsT(object): + + # FullyConnectedOptionsT def __init__(self): - self.inDataType = 0 # type: int - self.outDataType = 0 # type: int + self.fusedActivationFunction = 0 # type: int + self.weightsFormat = 0 # type: int + self.keepNumDims = False # type: bool + self.asymmetricQuantizeInputs = False # type: bool + self.quantizedBiasType = 0 # type: int @classmethod def InitFromBuf(cls, buf, pos): - castOptions = CastOptions() - castOptions.Init(buf, pos) - return cls.InitFromObj(castOptions) + fullyConnectedOptions = FullyConnectedOptions() + fullyConnectedOptions.Init(buf, pos) + return cls.InitFromObj(fullyConnectedOptions) @classmethod - def InitFromObj(cls, castOptions): - x = CastOptionsT() - x._UnPack(castOptions) + def InitFromPackedBuf(cls, buf, pos=0): + n = flatbuffers.encode.Get(flatbuffers.packer.uoffset, buf, pos) + return cls.InitFromBuf(buf, pos+n) + + @classmethod + def InitFromObj(cls, fullyConnectedOptions): + x = FullyConnectedOptionsT() + x._UnPack(fullyConnectedOptions) return x - # CastOptionsT - def _UnPack(self, castOptions): - if castOptions is None: + # FullyConnectedOptionsT + def _UnPack(self, fullyConnectedOptions): + if fullyConnectedOptions is None: return - self.inDataType = castOptions.InDataType() - self.outDataType = castOptions.OutDataType() + self.fusedActivationFunction = fullyConnectedOptions.FusedActivationFunction() + self.weightsFormat = fullyConnectedOptions.WeightsFormat() + self.keepNumDims = fullyConnectedOptions.KeepNumDims() + self.asymmetricQuantizeInputs = fullyConnectedOptions.AsymmetricQuantizeInputs() + self.quantizedBiasType = fullyConnectedOptions.QuantizedBiasType() - # CastOptionsT + # FullyConnectedOptionsT def Pack(self, builder): - CastOptionsStart(builder) - CastOptionsAddInDataType(builder, self.inDataType) - CastOptionsAddOutDataType(builder, self.outDataType) - castOptions = CastOptionsEnd(builder) - return castOptions -# automatically generated by the FlatBuffers compiler, do not modify - -# namespace: tflite - -class CombinerType(object): - SUM = 0 - MEAN = 1 - SQRTN = 2 -# automatically generated by the FlatBuffers compiler, do not modify - -# namespace: tflite + FullyConnectedOptionsStart(builder) + FullyConnectedOptionsAddFusedActivationFunction(builder, self.fusedActivationFunction) + FullyConnectedOptionsAddWeightsFormat(builder, self.weightsFormat) + FullyConnectedOptionsAddKeepNumDims(builder, self.keepNumDims) + FullyConnectedOptionsAddAsymmetricQuantizeInputs(builder, self.asymmetricQuantizeInputs) + FullyConnectedOptionsAddQuantizedBiasType(builder, self.quantizedBiasType) + fullyConnectedOptions = FullyConnectedOptionsEnd(builder) + return fullyConnectedOptions -from flatbuffers.compat import import_numpy -np = import_numpy() -class ConcatEmbeddingsOptions(object): +class SoftmaxOptions(object): __slots__ = ['_tab'] @classmethod def GetRootAs(cls, buf, offset=0): n = flatbuffers.encode.Get(flatbuffers.packer.uoffset, buf, offset) - x = ConcatEmbeddingsOptions() + x = SoftmaxOptions() x.Init(buf, n + offset) return x @classmethod - def GetRootAsConcatEmbeddingsOptions(cls, buf, offset=0): + def GetRootAsSoftmaxOptions(cls, buf, offset=0): """This method is deprecated. Please switch to GetRootAs.""" return cls.GetRootAs(buf, offset) @classmethod - def ConcatEmbeddingsOptionsBufferHasIdentifier(cls, buf, offset, size_prefixed=False): + def SoftmaxOptionsBufferHasIdentifier(cls, buf, offset, size_prefixed=False): return flatbuffers.util.BufferHasIdentifier(buf, offset, b"\x54\x46\x4C\x33", size_prefixed=size_prefixed) - # ConcatEmbeddingsOptions + # SoftmaxOptions def Init(self, buf, pos): self._tab = flatbuffers.table.Table(buf, pos) - # ConcatEmbeddingsOptions - def NumChannels(self): + # SoftmaxOptions + def Beta(self): o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(4)) if o != 0: - return self._tab.Get(flatbuffers.number_types.Int32Flags, o + self._tab.Pos) - return 0 - - # ConcatEmbeddingsOptions - def NumColumnsPerChannel(self, j): - o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(6)) - if o != 0: - a = self._tab.Vector(o) - return self._tab.Get(flatbuffers.number_types.Int32Flags, a + flatbuffers.number_types.UOffsetTFlags.py_type(j * 4)) - return 0 - - # ConcatEmbeddingsOptions - def NumColumnsPerChannelAsNumpy(self): - o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(6)) - if o != 0: - return self._tab.GetVectorAsNumpy(flatbuffers.number_types.Int32Flags, o) - return 0 - - # ConcatEmbeddingsOptions - def NumColumnsPerChannelLength(self): - o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(6)) - if o != 0: - return self._tab.VectorLen(o) - return 0 - - # ConcatEmbeddingsOptions - def NumColumnsPerChannelIsNone(self): - o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(6)) - return o == 0 + return self._tab.Get(flatbuffers.number_types.Float32Flags, o + self._tab.Pos) + return 0.0 - # ConcatEmbeddingsOptions - def EmbeddingDimPerChannel(self, j): - o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(8)) - if o != 0: - a = self._tab.Vector(o) - return self._tab.Get(flatbuffers.number_types.Int32Flags, a + flatbuffers.number_types.UOffsetTFlags.py_type(j * 4)) - return 0 +def SoftmaxOptionsStart(builder): + builder.StartObject(1) - # ConcatEmbeddingsOptions - def EmbeddingDimPerChannelAsNumpy(self): - o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(8)) - if o != 0: - return self._tab.GetVectorAsNumpy(flatbuffers.number_types.Int32Flags, o) - return 0 +def SoftmaxOptionsAddBeta(builder, beta): + builder.PrependFloat32Slot(0, beta, 0.0) - # ConcatEmbeddingsOptions - def EmbeddingDimPerChannelLength(self): - o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(8)) - if o != 0: - return self._tab.VectorLen(o) - return 0 +def SoftmaxOptionsEnd(builder): + return builder.EndObject() - # ConcatEmbeddingsOptions - def EmbeddingDimPerChannelIsNone(self): - o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(8)) - return o == 0 -def ConcatEmbeddingsOptionsStart(builder): builder.StartObject(3) -def Start(builder): - return ConcatEmbeddingsOptionsStart(builder) -def ConcatEmbeddingsOptionsAddNumChannels(builder, numChannels): builder.PrependInt32Slot(0, numChannels, 0) -def AddNumChannels(builder, numChannels): - return ConcatEmbeddingsOptionsAddNumChannels(builder, numChannels) -def ConcatEmbeddingsOptionsAddNumColumnsPerChannel(builder, numColumnsPerChannel): builder.PrependUOffsetTRelativeSlot(1, flatbuffers.number_types.UOffsetTFlags.py_type(numColumnsPerChannel), 0) -def AddNumColumnsPerChannel(builder, numColumnsPerChannel): - return ConcatEmbeddingsOptionsAddNumColumnsPerChannel(builder, numColumnsPerChannel) -def ConcatEmbeddingsOptionsStartNumColumnsPerChannelVector(builder, numElems): return builder.StartVector(4, numElems, 4) -def StartNumColumnsPerChannelVector(builder, numElems): - return ConcatEmbeddingsOptionsStartNumColumnsPerChannelVector(builder, numElems) -def ConcatEmbeddingsOptionsAddEmbeddingDimPerChannel(builder, embeddingDimPerChannel): builder.PrependUOffsetTRelativeSlot(2, flatbuffers.number_types.UOffsetTFlags.py_type(embeddingDimPerChannel), 0) -def AddEmbeddingDimPerChannel(builder, embeddingDimPerChannel): - return ConcatEmbeddingsOptionsAddEmbeddingDimPerChannel(builder, embeddingDimPerChannel) -def ConcatEmbeddingsOptionsStartEmbeddingDimPerChannelVector(builder, numElems): return builder.StartVector(4, numElems, 4) -def StartEmbeddingDimPerChannelVector(builder, numElems): - return ConcatEmbeddingsOptionsStartEmbeddingDimPerChannelVector(builder, numElems) -def ConcatEmbeddingsOptionsEnd(builder): return builder.EndObject() -def End(builder): - return ConcatEmbeddingsOptionsEnd(builder) -try: - from typing import List -except: - pass -class ConcatEmbeddingsOptionsT(object): +class SoftmaxOptionsT(object): - # ConcatEmbeddingsOptionsT + # SoftmaxOptionsT def __init__(self): - self.numChannels = 0 # type: int - self.numColumnsPerChannel = None # type: List[int] - self.embeddingDimPerChannel = None # type: List[int] + self.beta = 0.0 # type: float @classmethod def InitFromBuf(cls, buf, pos): - concatEmbeddingsOptions = ConcatEmbeddingsOptions() - concatEmbeddingsOptions.Init(buf, pos) - return cls.InitFromObj(concatEmbeddingsOptions) + softmaxOptions = SoftmaxOptions() + softmaxOptions.Init(buf, pos) + return cls.InitFromObj(softmaxOptions) @classmethod - def InitFromObj(cls, concatEmbeddingsOptions): - x = ConcatEmbeddingsOptionsT() - x._UnPack(concatEmbeddingsOptions) + def InitFromPackedBuf(cls, buf, pos=0): + n = flatbuffers.encode.Get(flatbuffers.packer.uoffset, buf, pos) + return cls.InitFromBuf(buf, pos+n) + + @classmethod + def InitFromObj(cls, softmaxOptions): + x = SoftmaxOptionsT() + x._UnPack(softmaxOptions) return x - # ConcatEmbeddingsOptionsT - def _UnPack(self, concatEmbeddingsOptions): - if concatEmbeddingsOptions is None: + # SoftmaxOptionsT + def _UnPack(self, softmaxOptions): + if softmaxOptions is None: return - self.numChannels = concatEmbeddingsOptions.NumChannels() - if not concatEmbeddingsOptions.NumColumnsPerChannelIsNone(): - if np is None: - self.numColumnsPerChannel = [] - for i in range(concatEmbeddingsOptions.NumColumnsPerChannelLength()): - self.numColumnsPerChannel.append(concatEmbeddingsOptions.NumColumnsPerChannel(i)) - else: - self.numColumnsPerChannel = concatEmbeddingsOptions.NumColumnsPerChannelAsNumpy() - if not concatEmbeddingsOptions.EmbeddingDimPerChannelIsNone(): - if np is None: - self.embeddingDimPerChannel = [] - for i in range(concatEmbeddingsOptions.EmbeddingDimPerChannelLength()): - self.embeddingDimPerChannel.append(concatEmbeddingsOptions.EmbeddingDimPerChannel(i)) - else: - self.embeddingDimPerChannel = concatEmbeddingsOptions.EmbeddingDimPerChannelAsNumpy() + self.beta = softmaxOptions.Beta() - # ConcatEmbeddingsOptionsT + # SoftmaxOptionsT def Pack(self, builder): - if self.numColumnsPerChannel is not None: - if np is not None and type(self.numColumnsPerChannel) is np.ndarray: - numColumnsPerChannel = builder.CreateNumpyVector(self.numColumnsPerChannel) - else: - ConcatEmbeddingsOptionsStartNumColumnsPerChannelVector(builder, len(self.numColumnsPerChannel)) - for i in reversed(range(len(self.numColumnsPerChannel))): - builder.PrependInt32(self.numColumnsPerChannel[i]) - numColumnsPerChannel = builder.EndVector() - if self.embeddingDimPerChannel is not None: - if np is not None and type(self.embeddingDimPerChannel) is np.ndarray: - embeddingDimPerChannel = builder.CreateNumpyVector(self.embeddingDimPerChannel) - else: - ConcatEmbeddingsOptionsStartEmbeddingDimPerChannelVector(builder, len(self.embeddingDimPerChannel)) - for i in reversed(range(len(self.embeddingDimPerChannel))): - builder.PrependInt32(self.embeddingDimPerChannel[i]) - embeddingDimPerChannel = builder.EndVector() - ConcatEmbeddingsOptionsStart(builder) - ConcatEmbeddingsOptionsAddNumChannels(builder, self.numChannels) - if self.numColumnsPerChannel is not None: - ConcatEmbeddingsOptionsAddNumColumnsPerChannel(builder, numColumnsPerChannel) - if self.embeddingDimPerChannel is not None: - ConcatEmbeddingsOptionsAddEmbeddingDimPerChannel(builder, embeddingDimPerChannel) - concatEmbeddingsOptions = ConcatEmbeddingsOptionsEnd(builder) - return concatEmbeddingsOptions -# automatically generated by the FlatBuffers compiler, do not modify - -# namespace: tflite + SoftmaxOptionsStart(builder) + SoftmaxOptionsAddBeta(builder, self.beta) + softmaxOptions = SoftmaxOptionsEnd(builder) + return softmaxOptions -from flatbuffers.compat import import_numpy -np = import_numpy() class ConcatenationOptions(object): __slots__ = ['_tab'] @@ -2415,18 +7531,19 @@ def FusedActivationFunction(self): return self._tab.Get(flatbuffers.number_types.Int8Flags, o + self._tab.Pos) return 0 -def ConcatenationOptionsStart(builder): builder.StartObject(2) -def Start(builder): - return ConcatenationOptionsStart(builder) -def ConcatenationOptionsAddAxis(builder, axis): builder.PrependInt32Slot(0, axis, 0) -def AddAxis(builder, axis): - return ConcatenationOptionsAddAxis(builder, axis) -def ConcatenationOptionsAddFusedActivationFunction(builder, fusedActivationFunction): builder.PrependInt8Slot(1, fusedActivationFunction, 0) -def AddFusedActivationFunction(builder, fusedActivationFunction): - return ConcatenationOptionsAddFusedActivationFunction(builder, fusedActivationFunction) -def ConcatenationOptionsEnd(builder): return builder.EndObject() -def End(builder): - return ConcatenationOptionsEnd(builder) +def ConcatenationOptionsStart(builder): + builder.StartObject(2) + +def ConcatenationOptionsAddAxis(builder, axis): + builder.PrependInt32Slot(0, axis, 0) + +def ConcatenationOptionsAddFusedActivationFunction(builder, fusedActivationFunction): + builder.PrependInt8Slot(1, fusedActivationFunction, 0) + +def ConcatenationOptionsEnd(builder): + return builder.EndObject() + + class ConcatenationOptionsT(object): @@ -2441,6 +7558,11 @@ def InitFromBuf(cls, buf, pos): concatenationOptions.Init(buf, pos) return cls.InitFromObj(concatenationOptions) + @classmethod + def InitFromPackedBuf(cls, buf, pos=0): + n = flatbuffers.encode.Get(flatbuffers.packer.uoffset, buf, pos) + return cls.InitFromBuf(buf, pos+n) + @classmethod def InitFromObj(cls, concatenationOptions): x = ConcatenationOptionsT() @@ -2461,2020 +7583,1982 @@ def Pack(self, builder): ConcatenationOptionsAddFusedActivationFunction(builder, self.fusedActivationFunction) concatenationOptions = ConcatenationOptionsEnd(builder) return concatenationOptions -# automatically generated by the FlatBuffers compiler, do not modify - -# namespace: tflite -from flatbuffers.compat import import_numpy -np = import_numpy() -class Conv2DOptions(object): +class AddOptions(object): __slots__ = ['_tab'] @classmethod def GetRootAs(cls, buf, offset=0): n = flatbuffers.encode.Get(flatbuffers.packer.uoffset, buf, offset) - x = Conv2DOptions() + x = AddOptions() x.Init(buf, n + offset) return x @classmethod - def GetRootAsConv2DOptions(cls, buf, offset=0): + def GetRootAsAddOptions(cls, buf, offset=0): """This method is deprecated. Please switch to GetRootAs.""" return cls.GetRootAs(buf, offset) @classmethod - def Conv2DOptionsBufferHasIdentifier(cls, buf, offset, size_prefixed=False): + def AddOptionsBufferHasIdentifier(cls, buf, offset, size_prefixed=False): return flatbuffers.util.BufferHasIdentifier(buf, offset, b"\x54\x46\x4C\x33", size_prefixed=size_prefixed) - # Conv2DOptions + # AddOptions def Init(self, buf, pos): self._tab = flatbuffers.table.Table(buf, pos) - # Conv2DOptions - def Padding(self): + # AddOptions + def FusedActivationFunction(self): o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(4)) if o != 0: return self._tab.Get(flatbuffers.number_types.Int8Flags, o + self._tab.Pos) return 0 - # Conv2DOptions - def StrideW(self): + # AddOptions + def PotScaleInt16(self): o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(6)) if o != 0: - return self._tab.Get(flatbuffers.number_types.Int32Flags, o + self._tab.Pos) - return 0 + return bool(self._tab.Get(flatbuffers.number_types.BoolFlags, o + self._tab.Pos)) + return True - # Conv2DOptions - def StrideH(self): - o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(8)) - if o != 0: - return self._tab.Get(flatbuffers.number_types.Int32Flags, o + self._tab.Pos) - return 0 +def AddOptionsStart(builder): + builder.StartObject(2) - # Conv2DOptions - def FusedActivationFunction(self): - o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(10)) - if o != 0: - return self._tab.Get(flatbuffers.number_types.Int8Flags, o + self._tab.Pos) - return 0 +def AddOptionsAddFusedActivationFunction(builder, fusedActivationFunction): + builder.PrependInt8Slot(0, fusedActivationFunction, 0) - # Conv2DOptions - def DilationWFactor(self): - o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(12)) - if o != 0: - return self._tab.Get(flatbuffers.number_types.Int32Flags, o + self._tab.Pos) - return 1 +def AddOptionsAddPotScaleInt16(builder, potScaleInt16): + builder.PrependBoolSlot(1, potScaleInt16, 1) - # Conv2DOptions - def DilationHFactor(self): - o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(14)) - if o != 0: - return self._tab.Get(flatbuffers.number_types.Int32Flags, o + self._tab.Pos) - return 1 +def AddOptionsEnd(builder): + return builder.EndObject() -def Conv2DOptionsStart(builder): builder.StartObject(6) -def Start(builder): - return Conv2DOptionsStart(builder) -def Conv2DOptionsAddPadding(builder, padding): builder.PrependInt8Slot(0, padding, 0) -def AddPadding(builder, padding): - return Conv2DOptionsAddPadding(builder, padding) -def Conv2DOptionsAddStrideW(builder, strideW): builder.PrependInt32Slot(1, strideW, 0) -def AddStrideW(builder, strideW): - return Conv2DOptionsAddStrideW(builder, strideW) -def Conv2DOptionsAddStrideH(builder, strideH): builder.PrependInt32Slot(2, strideH, 0) -def AddStrideH(builder, strideH): - return Conv2DOptionsAddStrideH(builder, strideH) -def Conv2DOptionsAddFusedActivationFunction(builder, fusedActivationFunction): builder.PrependInt8Slot(3, fusedActivationFunction, 0) -def AddFusedActivationFunction(builder, fusedActivationFunction): - return Conv2DOptionsAddFusedActivationFunction(builder, fusedActivationFunction) -def Conv2DOptionsAddDilationWFactor(builder, dilationWFactor): builder.PrependInt32Slot(4, dilationWFactor, 1) -def AddDilationWFactor(builder, dilationWFactor): - return Conv2DOptionsAddDilationWFactor(builder, dilationWFactor) -def Conv2DOptionsAddDilationHFactor(builder, dilationHFactor): builder.PrependInt32Slot(5, dilationHFactor, 1) -def AddDilationHFactor(builder, dilationHFactor): - return Conv2DOptionsAddDilationHFactor(builder, dilationHFactor) -def Conv2DOptionsEnd(builder): return builder.EndObject() -def End(builder): - return Conv2DOptionsEnd(builder) -class Conv2DOptionsT(object): - # Conv2DOptionsT +class AddOptionsT(object): + + # AddOptionsT def __init__(self): - self.padding = 0 # type: int - self.strideW = 0 # type: int - self.strideH = 0 # type: int self.fusedActivationFunction = 0 # type: int - self.dilationWFactor = 1 # type: int - self.dilationHFactor = 1 # type: int + self.potScaleInt16 = True # type: bool @classmethod def InitFromBuf(cls, buf, pos): - conv2doptions = Conv2DOptions() - conv2doptions.Init(buf, pos) - return cls.InitFromObj(conv2doptions) + addOptions = AddOptions() + addOptions.Init(buf, pos) + return cls.InitFromObj(addOptions) @classmethod - def InitFromObj(cls, conv2doptions): - x = Conv2DOptionsT() - x._UnPack(conv2doptions) + def InitFromPackedBuf(cls, buf, pos=0): + n = flatbuffers.encode.Get(flatbuffers.packer.uoffset, buf, pos) + return cls.InitFromBuf(buf, pos+n) + + @classmethod + def InitFromObj(cls, addOptions): + x = AddOptionsT() + x._UnPack(addOptions) return x - # Conv2DOptionsT - def _UnPack(self, conv2doptions): - if conv2doptions is None: + # AddOptionsT + def _UnPack(self, addOptions): + if addOptions is None: return - self.padding = conv2doptions.Padding() - self.strideW = conv2doptions.StrideW() - self.strideH = conv2doptions.StrideH() - self.fusedActivationFunction = conv2doptions.FusedActivationFunction() - self.dilationWFactor = conv2doptions.DilationWFactor() - self.dilationHFactor = conv2doptions.DilationHFactor() + self.fusedActivationFunction = addOptions.FusedActivationFunction() + self.potScaleInt16 = addOptions.PotScaleInt16() - # Conv2DOptionsT + # AddOptionsT def Pack(self, builder): - Conv2DOptionsStart(builder) - Conv2DOptionsAddPadding(builder, self.padding) - Conv2DOptionsAddStrideW(builder, self.strideW) - Conv2DOptionsAddStrideH(builder, self.strideH) - Conv2DOptionsAddFusedActivationFunction(builder, self.fusedActivationFunction) - Conv2DOptionsAddDilationWFactor(builder, self.dilationWFactor) - Conv2DOptionsAddDilationHFactor(builder, self.dilationHFactor) - conv2doptions = Conv2DOptionsEnd(builder) - return conv2doptions -# automatically generated by the FlatBuffers compiler, do not modify - -# namespace: tflite + AddOptionsStart(builder) + AddOptionsAddFusedActivationFunction(builder, self.fusedActivationFunction) + AddOptionsAddPotScaleInt16(builder, self.potScaleInt16) + addOptions = AddOptionsEnd(builder) + return addOptions -from flatbuffers.compat import import_numpy -np = import_numpy() -class Conv3DOptions(object): +class MulOptions(object): __slots__ = ['_tab'] @classmethod def GetRootAs(cls, buf, offset=0): n = flatbuffers.encode.Get(flatbuffers.packer.uoffset, buf, offset) - x = Conv3DOptions() + x = MulOptions() x.Init(buf, n + offset) return x @classmethod - def GetRootAsConv3DOptions(cls, buf, offset=0): + def GetRootAsMulOptions(cls, buf, offset=0): """This method is deprecated. Please switch to GetRootAs.""" return cls.GetRootAs(buf, offset) @classmethod - def Conv3DOptionsBufferHasIdentifier(cls, buf, offset, size_prefixed=False): + def MulOptionsBufferHasIdentifier(cls, buf, offset, size_prefixed=False): return flatbuffers.util.BufferHasIdentifier(buf, offset, b"\x54\x46\x4C\x33", size_prefixed=size_prefixed) - # Conv3DOptions + # MulOptions def Init(self, buf, pos): self._tab = flatbuffers.table.Table(buf, pos) - # Conv3DOptions - def Padding(self): + # MulOptions + def FusedActivationFunction(self): o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(4)) if o != 0: return self._tab.Get(flatbuffers.number_types.Int8Flags, o + self._tab.Pos) return 0 - # Conv3DOptions - def StrideD(self): - o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(6)) - if o != 0: - return self._tab.Get(flatbuffers.number_types.Int32Flags, o + self._tab.Pos) - return 0 - - # Conv3DOptions - def StrideW(self): - o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(8)) - if o != 0: - return self._tab.Get(flatbuffers.number_types.Int32Flags, o + self._tab.Pos) - return 0 - - # Conv3DOptions - def StrideH(self): - o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(10)) - if o != 0: - return self._tab.Get(flatbuffers.number_types.Int32Flags, o + self._tab.Pos) - return 0 - - # Conv3DOptions - def FusedActivationFunction(self): - o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(12)) - if o != 0: - return self._tab.Get(flatbuffers.number_types.Int8Flags, o + self._tab.Pos) - return 0 +def MulOptionsStart(builder): + builder.StartObject(1) - # Conv3DOptions - def DilationDFactor(self): - o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(14)) - if o != 0: - return self._tab.Get(flatbuffers.number_types.Int32Flags, o + self._tab.Pos) - return 1 +def MulOptionsAddFusedActivationFunction(builder, fusedActivationFunction): + builder.PrependInt8Slot(0, fusedActivationFunction, 0) - # Conv3DOptions - def DilationWFactor(self): - o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(16)) - if o != 0: - return self._tab.Get(flatbuffers.number_types.Int32Flags, o + self._tab.Pos) - return 1 +def MulOptionsEnd(builder): + return builder.EndObject() - # Conv3DOptions - def DilationHFactor(self): - o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(18)) - if o != 0: - return self._tab.Get(flatbuffers.number_types.Int32Flags, o + self._tab.Pos) - return 1 -def Conv3DOptionsStart(builder): builder.StartObject(8) -def Start(builder): - return Conv3DOptionsStart(builder) -def Conv3DOptionsAddPadding(builder, padding): builder.PrependInt8Slot(0, padding, 0) -def AddPadding(builder, padding): - return Conv3DOptionsAddPadding(builder, padding) -def Conv3DOptionsAddStrideD(builder, strideD): builder.PrependInt32Slot(1, strideD, 0) -def AddStrideD(builder, strideD): - return Conv3DOptionsAddStrideD(builder, strideD) -def Conv3DOptionsAddStrideW(builder, strideW): builder.PrependInt32Slot(2, strideW, 0) -def AddStrideW(builder, strideW): - return Conv3DOptionsAddStrideW(builder, strideW) -def Conv3DOptionsAddStrideH(builder, strideH): builder.PrependInt32Slot(3, strideH, 0) -def AddStrideH(builder, strideH): - return Conv3DOptionsAddStrideH(builder, strideH) -def Conv3DOptionsAddFusedActivationFunction(builder, fusedActivationFunction): builder.PrependInt8Slot(4, fusedActivationFunction, 0) -def AddFusedActivationFunction(builder, fusedActivationFunction): - return Conv3DOptionsAddFusedActivationFunction(builder, fusedActivationFunction) -def Conv3DOptionsAddDilationDFactor(builder, dilationDFactor): builder.PrependInt32Slot(5, dilationDFactor, 1) -def AddDilationDFactor(builder, dilationDFactor): - return Conv3DOptionsAddDilationDFactor(builder, dilationDFactor) -def Conv3DOptionsAddDilationWFactor(builder, dilationWFactor): builder.PrependInt32Slot(6, dilationWFactor, 1) -def AddDilationWFactor(builder, dilationWFactor): - return Conv3DOptionsAddDilationWFactor(builder, dilationWFactor) -def Conv3DOptionsAddDilationHFactor(builder, dilationHFactor): builder.PrependInt32Slot(7, dilationHFactor, 1) -def AddDilationHFactor(builder, dilationHFactor): - return Conv3DOptionsAddDilationHFactor(builder, dilationHFactor) -def Conv3DOptionsEnd(builder): return builder.EndObject() -def End(builder): - return Conv3DOptionsEnd(builder) -class Conv3DOptionsT(object): +class MulOptionsT(object): - # Conv3DOptionsT + # MulOptionsT def __init__(self): - self.padding = 0 # type: int - self.strideD = 0 # type: int - self.strideW = 0 # type: int - self.strideH = 0 # type: int self.fusedActivationFunction = 0 # type: int - self.dilationDFactor = 1 # type: int - self.dilationWFactor = 1 # type: int - self.dilationHFactor = 1 # type: int @classmethod def InitFromBuf(cls, buf, pos): - conv3doptions = Conv3DOptions() - conv3doptions.Init(buf, pos) - return cls.InitFromObj(conv3doptions) + mulOptions = MulOptions() + mulOptions.Init(buf, pos) + return cls.InitFromObj(mulOptions) @classmethod - def InitFromObj(cls, conv3doptions): - x = Conv3DOptionsT() - x._UnPack(conv3doptions) - return x + def InitFromPackedBuf(cls, buf, pos=0): + n = flatbuffers.encode.Get(flatbuffers.packer.uoffset, buf, pos) + return cls.InitFromBuf(buf, pos+n) - # Conv3DOptionsT - def _UnPack(self, conv3doptions): - if conv3doptions is None: - return - self.padding = conv3doptions.Padding() - self.strideD = conv3doptions.StrideD() - self.strideW = conv3doptions.StrideW() - self.strideH = conv3doptions.StrideH() - self.fusedActivationFunction = conv3doptions.FusedActivationFunction() - self.dilationDFactor = conv3doptions.DilationDFactor() - self.dilationWFactor = conv3doptions.DilationWFactor() - self.dilationHFactor = conv3doptions.DilationHFactor() + @classmethod + def InitFromObj(cls, mulOptions): + x = MulOptionsT() + x._UnPack(mulOptions) + return x - # Conv3DOptionsT - def Pack(self, builder): - Conv3DOptionsStart(builder) - Conv3DOptionsAddPadding(builder, self.padding) - Conv3DOptionsAddStrideD(builder, self.strideD) - Conv3DOptionsAddStrideW(builder, self.strideW) - Conv3DOptionsAddStrideH(builder, self.strideH) - Conv3DOptionsAddFusedActivationFunction(builder, self.fusedActivationFunction) - Conv3DOptionsAddDilationDFactor(builder, self.dilationDFactor) - Conv3DOptionsAddDilationWFactor(builder, self.dilationWFactor) - Conv3DOptionsAddDilationHFactor(builder, self.dilationHFactor) - conv3doptions = Conv3DOptionsEnd(builder) - return conv3doptions -# automatically generated by the FlatBuffers compiler, do not modify + # MulOptionsT + def _UnPack(self, mulOptions): + if mulOptions is None: + return + self.fusedActivationFunction = mulOptions.FusedActivationFunction() -# namespace: tflite + # MulOptionsT + def Pack(self, builder): + MulOptionsStart(builder) + MulOptionsAddFusedActivationFunction(builder, self.fusedActivationFunction) + mulOptions = MulOptionsEnd(builder) + return mulOptions -from flatbuffers.compat import import_numpy -np = import_numpy() -class CosOptions(object): +class L2NormOptions(object): __slots__ = ['_tab'] @classmethod def GetRootAs(cls, buf, offset=0): n = flatbuffers.encode.Get(flatbuffers.packer.uoffset, buf, offset) - x = CosOptions() + x = L2NormOptions() x.Init(buf, n + offset) return x @classmethod - def GetRootAsCosOptions(cls, buf, offset=0): + def GetRootAsL2NormOptions(cls, buf, offset=0): """This method is deprecated. Please switch to GetRootAs.""" return cls.GetRootAs(buf, offset) @classmethod - def CosOptionsBufferHasIdentifier(cls, buf, offset, size_prefixed=False): + def L2NormOptionsBufferHasIdentifier(cls, buf, offset, size_prefixed=False): return flatbuffers.util.BufferHasIdentifier(buf, offset, b"\x54\x46\x4C\x33", size_prefixed=size_prefixed) - # CosOptions + # L2NormOptions def Init(self, buf, pos): self._tab = flatbuffers.table.Table(buf, pos) -def CosOptionsStart(builder): builder.StartObject(0) -def Start(builder): - return CosOptionsStart(builder) -def CosOptionsEnd(builder): return builder.EndObject() -def End(builder): - return CosOptionsEnd(builder) + # L2NormOptions + def FusedActivationFunction(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(4)) + if o != 0: + return self._tab.Get(flatbuffers.number_types.Int8Flags, o + self._tab.Pos) + return 0 + +def L2NormOptionsStart(builder): + builder.StartObject(1) -class CosOptionsT(object): +def L2NormOptionsAddFusedActivationFunction(builder, fusedActivationFunction): + builder.PrependInt8Slot(0, fusedActivationFunction, 0) - # CosOptionsT +def L2NormOptionsEnd(builder): + return builder.EndObject() + + + +class L2NormOptionsT(object): + + # L2NormOptionsT def __init__(self): - pass + self.fusedActivationFunction = 0 # type: int @classmethod def InitFromBuf(cls, buf, pos): - cosOptions = CosOptions() - cosOptions.Init(buf, pos) - return cls.InitFromObj(cosOptions) + l2NormOptions = L2NormOptions() + l2NormOptions.Init(buf, pos) + return cls.InitFromObj(l2NormOptions) @classmethod - def InitFromObj(cls, cosOptions): - x = CosOptionsT() - x._UnPack(cosOptions) + def InitFromPackedBuf(cls, buf, pos=0): + n = flatbuffers.encode.Get(flatbuffers.packer.uoffset, buf, pos) + return cls.InitFromBuf(buf, pos+n) + + @classmethod + def InitFromObj(cls, l2NormOptions): + x = L2NormOptionsT() + x._UnPack(l2NormOptions) return x - # CosOptionsT - def _UnPack(self, cosOptions): - if cosOptions is None: + # L2NormOptionsT + def _UnPack(self, l2NormOptions): + if l2NormOptions is None: return + self.fusedActivationFunction = l2NormOptions.FusedActivationFunction() - # CosOptionsT + # L2NormOptionsT def Pack(self, builder): - CosOptionsStart(builder) - cosOptions = CosOptionsEnd(builder) - return cosOptions -# automatically generated by the FlatBuffers compiler, do not modify - -# namespace: tflite + L2NormOptionsStart(builder) + L2NormOptionsAddFusedActivationFunction(builder, self.fusedActivationFunction) + l2NormOptions = L2NormOptionsEnd(builder) + return l2NormOptions -from flatbuffers.compat import import_numpy -np = import_numpy() -class CumsumOptions(object): +class LocalResponseNormalizationOptions(object): __slots__ = ['_tab'] @classmethod def GetRootAs(cls, buf, offset=0): n = flatbuffers.encode.Get(flatbuffers.packer.uoffset, buf, offset) - x = CumsumOptions() + x = LocalResponseNormalizationOptions() x.Init(buf, n + offset) return x @classmethod - def GetRootAsCumsumOptions(cls, buf, offset=0): + def GetRootAsLocalResponseNormalizationOptions(cls, buf, offset=0): """This method is deprecated. Please switch to GetRootAs.""" return cls.GetRootAs(buf, offset) @classmethod - def CumsumOptionsBufferHasIdentifier(cls, buf, offset, size_prefixed=False): + def LocalResponseNormalizationOptionsBufferHasIdentifier(cls, buf, offset, size_prefixed=False): return flatbuffers.util.BufferHasIdentifier(buf, offset, b"\x54\x46\x4C\x33", size_prefixed=size_prefixed) - # CumsumOptions + # LocalResponseNormalizationOptions def Init(self, buf, pos): self._tab = flatbuffers.table.Table(buf, pos) - # CumsumOptions - def Exclusive(self): + # LocalResponseNormalizationOptions + def Radius(self): o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(4)) if o != 0: - return bool(self._tab.Get(flatbuffers.number_types.BoolFlags, o + self._tab.Pos)) - return False + return self._tab.Get(flatbuffers.number_types.Int32Flags, o + self._tab.Pos) + return 0 - # CumsumOptions - def Reverse(self): + # LocalResponseNormalizationOptions + def Bias(self): o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(6)) if o != 0: - return bool(self._tab.Get(flatbuffers.number_types.BoolFlags, o + self._tab.Pos)) - return False + return self._tab.Get(flatbuffers.number_types.Float32Flags, o + self._tab.Pos) + return 0.0 + + # LocalResponseNormalizationOptions + def Alpha(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(8)) + if o != 0: + return self._tab.Get(flatbuffers.number_types.Float32Flags, o + self._tab.Pos) + return 0.0 -def CumsumOptionsStart(builder): builder.StartObject(2) -def Start(builder): - return CumsumOptionsStart(builder) -def CumsumOptionsAddExclusive(builder, exclusive): builder.PrependBoolSlot(0, exclusive, 0) -def AddExclusive(builder, exclusive): - return CumsumOptionsAddExclusive(builder, exclusive) -def CumsumOptionsAddReverse(builder, reverse): builder.PrependBoolSlot(1, reverse, 0) -def AddReverse(builder, reverse): - return CumsumOptionsAddReverse(builder, reverse) -def CumsumOptionsEnd(builder): return builder.EndObject() -def End(builder): - return CumsumOptionsEnd(builder) + # LocalResponseNormalizationOptions + def Beta(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(10)) + if o != 0: + return self._tab.Get(flatbuffers.number_types.Float32Flags, o + self._tab.Pos) + return 0.0 -class CumsumOptionsT(object): +def LocalResponseNormalizationOptionsStart(builder): + builder.StartObject(4) - # CumsumOptionsT +def LocalResponseNormalizationOptionsAddRadius(builder, radius): + builder.PrependInt32Slot(0, radius, 0) + +def LocalResponseNormalizationOptionsAddBias(builder, bias): + builder.PrependFloat32Slot(1, bias, 0.0) + +def LocalResponseNormalizationOptionsAddAlpha(builder, alpha): + builder.PrependFloat32Slot(2, alpha, 0.0) + +def LocalResponseNormalizationOptionsAddBeta(builder, beta): + builder.PrependFloat32Slot(3, beta, 0.0) + +def LocalResponseNormalizationOptionsEnd(builder): + return builder.EndObject() + + + +class LocalResponseNormalizationOptionsT(object): + + # LocalResponseNormalizationOptionsT def __init__(self): - self.exclusive = False # type: bool - self.reverse = False # type: bool + self.radius = 0 # type: int + self.bias = 0.0 # type: float + self.alpha = 0.0 # type: float + self.beta = 0.0 # type: float @classmethod def InitFromBuf(cls, buf, pos): - cumsumOptions = CumsumOptions() - cumsumOptions.Init(buf, pos) - return cls.InitFromObj(cumsumOptions) + localResponseNormalizationOptions = LocalResponseNormalizationOptions() + localResponseNormalizationOptions.Init(buf, pos) + return cls.InitFromObj(localResponseNormalizationOptions) @classmethod - def InitFromObj(cls, cumsumOptions): - x = CumsumOptionsT() - x._UnPack(cumsumOptions) + def InitFromPackedBuf(cls, buf, pos=0): + n = flatbuffers.encode.Get(flatbuffers.packer.uoffset, buf, pos) + return cls.InitFromBuf(buf, pos+n) + + @classmethod + def InitFromObj(cls, localResponseNormalizationOptions): + x = LocalResponseNormalizationOptionsT() + x._UnPack(localResponseNormalizationOptions) return x - # CumsumOptionsT - def _UnPack(self, cumsumOptions): - if cumsumOptions is None: + # LocalResponseNormalizationOptionsT + def _UnPack(self, localResponseNormalizationOptions): + if localResponseNormalizationOptions is None: return - self.exclusive = cumsumOptions.Exclusive() - self.reverse = cumsumOptions.Reverse() + self.radius = localResponseNormalizationOptions.Radius() + self.bias = localResponseNormalizationOptions.Bias() + self.alpha = localResponseNormalizationOptions.Alpha() + self.beta = localResponseNormalizationOptions.Beta() - # CumsumOptionsT + # LocalResponseNormalizationOptionsT def Pack(self, builder): - CumsumOptionsStart(builder) - CumsumOptionsAddExclusive(builder, self.exclusive) - CumsumOptionsAddReverse(builder, self.reverse) - cumsumOptions = CumsumOptionsEnd(builder) - return cumsumOptions -# automatically generated by the FlatBuffers compiler, do not modify - -# namespace: tflite - -class CustomOptionsFormat(object): - FLEXBUFFERS = 0 -# automatically generated by the FlatBuffers compiler, do not modify - -# namespace: tflite + LocalResponseNormalizationOptionsStart(builder) + LocalResponseNormalizationOptionsAddRadius(builder, self.radius) + LocalResponseNormalizationOptionsAddBias(builder, self.bias) + LocalResponseNormalizationOptionsAddAlpha(builder, self.alpha) + LocalResponseNormalizationOptionsAddBeta(builder, self.beta) + localResponseNormalizationOptions = LocalResponseNormalizationOptionsEnd(builder) + return localResponseNormalizationOptions -from flatbuffers.compat import import_numpy -np = import_numpy() -class CustomQuantization(object): +class LSTMOptions(object): __slots__ = ['_tab'] @classmethod def GetRootAs(cls, buf, offset=0): n = flatbuffers.encode.Get(flatbuffers.packer.uoffset, buf, offset) - x = CustomQuantization() + x = LSTMOptions() x.Init(buf, n + offset) return x @classmethod - def GetRootAsCustomQuantization(cls, buf, offset=0): + def GetRootAsLSTMOptions(cls, buf, offset=0): """This method is deprecated. Please switch to GetRootAs.""" return cls.GetRootAs(buf, offset) @classmethod - def CustomQuantizationBufferHasIdentifier(cls, buf, offset, size_prefixed=False): + def LSTMOptionsBufferHasIdentifier(cls, buf, offset, size_prefixed=False): return flatbuffers.util.BufferHasIdentifier(buf, offset, b"\x54\x46\x4C\x33", size_prefixed=size_prefixed) - # CustomQuantization + # LSTMOptions def Init(self, buf, pos): self._tab = flatbuffers.table.Table(buf, pos) - # CustomQuantization - def Custom(self, j): + # LSTMOptions + def FusedActivationFunction(self): o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(4)) if o != 0: - a = self._tab.Vector(o) - return self._tab.Get(flatbuffers.number_types.Uint8Flags, a + flatbuffers.number_types.UOffsetTFlags.py_type(j * 1)) + return self._tab.Get(flatbuffers.number_types.Int8Flags, o + self._tab.Pos) return 0 - # CustomQuantization - def CustomAsNumpy(self): - o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(4)) + # LSTMOptions + def CellClip(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(6)) if o != 0: - return self._tab.GetVectorAsNumpy(flatbuffers.number_types.Uint8Flags, o) - return 0 + return self._tab.Get(flatbuffers.number_types.Float32Flags, o + self._tab.Pos) + return 0.0 - # CustomQuantization - def CustomLength(self): - o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(4)) + # LSTMOptions + def ProjClip(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(8)) if o != 0: - return self._tab.VectorLen(o) + return self._tab.Get(flatbuffers.number_types.Float32Flags, o + self._tab.Pos) + return 0.0 + + # LSTMOptions + def KernelType(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(10)) + if o != 0: + return self._tab.Get(flatbuffers.number_types.Int8Flags, o + self._tab.Pos) return 0 - # CustomQuantization - def CustomIsNone(self): - o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(4)) - return o == 0 + # LSTMOptions + def AsymmetricQuantizeInputs(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(12)) + if o != 0: + return bool(self._tab.Get(flatbuffers.number_types.BoolFlags, o + self._tab.Pos)) + return False -def CustomQuantizationStart(builder): builder.StartObject(1) -def Start(builder): - return CustomQuantizationStart(builder) -def CustomQuantizationAddCustom(builder, custom): builder.PrependUOffsetTRelativeSlot(0, flatbuffers.number_types.UOffsetTFlags.py_type(custom), 0) -def AddCustom(builder, custom): - return CustomQuantizationAddCustom(builder, custom) -def CustomQuantizationStartCustomVector(builder, numElems): return builder.StartVector(1, numElems, 1) -def StartCustomVector(builder, numElems): - return CustomQuantizationStartCustomVector(builder, numElems) -def CustomQuantizationEnd(builder): return builder.EndObject() -def End(builder): - return CustomQuantizationEnd(builder) -try: - from typing import List -except: - pass +def LSTMOptionsStart(builder): + builder.StartObject(5) -class CustomQuantizationT(object): +def LSTMOptionsAddFusedActivationFunction(builder, fusedActivationFunction): + builder.PrependInt8Slot(0, fusedActivationFunction, 0) - # CustomQuantizationT +def LSTMOptionsAddCellClip(builder, cellClip): + builder.PrependFloat32Slot(1, cellClip, 0.0) + +def LSTMOptionsAddProjClip(builder, projClip): + builder.PrependFloat32Slot(2, projClip, 0.0) + +def LSTMOptionsAddKernelType(builder, kernelType): + builder.PrependInt8Slot(3, kernelType, 0) + +def LSTMOptionsAddAsymmetricQuantizeInputs(builder, asymmetricQuantizeInputs): + builder.PrependBoolSlot(4, asymmetricQuantizeInputs, 0) + +def LSTMOptionsEnd(builder): + return builder.EndObject() + + + +class LSTMOptionsT(object): + + # LSTMOptionsT def __init__(self): - self.custom = None # type: List[int] + self.fusedActivationFunction = 0 # type: int + self.cellClip = 0.0 # type: float + self.projClip = 0.0 # type: float + self.kernelType = 0 # type: int + self.asymmetricQuantizeInputs = False # type: bool @classmethod def InitFromBuf(cls, buf, pos): - customQuantization = CustomQuantization() - customQuantization.Init(buf, pos) - return cls.InitFromObj(customQuantization) + lstmoptions = LSTMOptions() + lstmoptions.Init(buf, pos) + return cls.InitFromObj(lstmoptions) @classmethod - def InitFromObj(cls, customQuantization): - x = CustomQuantizationT() - x._UnPack(customQuantization) + def InitFromPackedBuf(cls, buf, pos=0): + n = flatbuffers.encode.Get(flatbuffers.packer.uoffset, buf, pos) + return cls.InitFromBuf(buf, pos+n) + + @classmethod + def InitFromObj(cls, lstmoptions): + x = LSTMOptionsT() + x._UnPack(lstmoptions) return x - # CustomQuantizationT - def _UnPack(self, customQuantization): - if customQuantization is None: + # LSTMOptionsT + def _UnPack(self, lstmoptions): + if lstmoptions is None: return - if not customQuantization.CustomIsNone(): - if np is None: - self.custom = [] - for i in range(customQuantization.CustomLength()): - self.custom.append(customQuantization.Custom(i)) - else: - self.custom = customQuantization.CustomAsNumpy() + self.fusedActivationFunction = lstmoptions.FusedActivationFunction() + self.cellClip = lstmoptions.CellClip() + self.projClip = lstmoptions.ProjClip() + self.kernelType = lstmoptions.KernelType() + self.asymmetricQuantizeInputs = lstmoptions.AsymmetricQuantizeInputs() - # CustomQuantizationT + # LSTMOptionsT def Pack(self, builder): - if self.custom is not None: - if np is not None and type(self.custom) is np.ndarray: - custom = builder.CreateNumpyVector(self.custom) - else: - CustomQuantizationStartCustomVector(builder, len(self.custom)) - for i in reversed(range(len(self.custom))): - builder.PrependUint8(self.custom[i]) - custom = builder.EndVector() - CustomQuantizationStart(builder) - if self.custom is not None: - CustomQuantizationAddCustom(builder, custom) - customQuantization = CustomQuantizationEnd(builder) - return customQuantization -# automatically generated by the FlatBuffers compiler, do not modify - -# namespace: tflite + LSTMOptionsStart(builder) + LSTMOptionsAddFusedActivationFunction(builder, self.fusedActivationFunction) + LSTMOptionsAddCellClip(builder, self.cellClip) + LSTMOptionsAddProjClip(builder, self.projClip) + LSTMOptionsAddKernelType(builder, self.kernelType) + LSTMOptionsAddAsymmetricQuantizeInputs(builder, self.asymmetricQuantizeInputs) + lstmoptions = LSTMOptionsEnd(builder) + return lstmoptions -from flatbuffers.compat import import_numpy -np = import_numpy() -class DensifyOptions(object): +class UnidirectionalSequenceLSTMOptions(object): __slots__ = ['_tab'] @classmethod def GetRootAs(cls, buf, offset=0): n = flatbuffers.encode.Get(flatbuffers.packer.uoffset, buf, offset) - x = DensifyOptions() + x = UnidirectionalSequenceLSTMOptions() x.Init(buf, n + offset) return x @classmethod - def GetRootAsDensifyOptions(cls, buf, offset=0): + def GetRootAsUnidirectionalSequenceLSTMOptions(cls, buf, offset=0): """This method is deprecated. Please switch to GetRootAs.""" return cls.GetRootAs(buf, offset) @classmethod - def DensifyOptionsBufferHasIdentifier(cls, buf, offset, size_prefixed=False): + def UnidirectionalSequenceLSTMOptionsBufferHasIdentifier(cls, buf, offset, size_prefixed=False): return flatbuffers.util.BufferHasIdentifier(buf, offset, b"\x54\x46\x4C\x33", size_prefixed=size_prefixed) - # DensifyOptions + # UnidirectionalSequenceLSTMOptions def Init(self, buf, pos): self._tab = flatbuffers.table.Table(buf, pos) -def DensifyOptionsStart(builder): builder.StartObject(0) -def Start(builder): - return DensifyOptionsStart(builder) -def DensifyOptionsEnd(builder): return builder.EndObject() -def End(builder): - return DensifyOptionsEnd(builder) + # UnidirectionalSequenceLSTMOptions + def FusedActivationFunction(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(4)) + if o != 0: + return self._tab.Get(flatbuffers.number_types.Int8Flags, o + self._tab.Pos) + return 0 + + # UnidirectionalSequenceLSTMOptions + def CellClip(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(6)) + if o != 0: + return self._tab.Get(flatbuffers.number_types.Float32Flags, o + self._tab.Pos) + return 0.0 -class DensifyOptionsT(object): + # UnidirectionalSequenceLSTMOptions + def ProjClip(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(8)) + if o != 0: + return self._tab.Get(flatbuffers.number_types.Float32Flags, o + self._tab.Pos) + return 0.0 - # DensifyOptionsT + # UnidirectionalSequenceLSTMOptions + def TimeMajor(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(10)) + if o != 0: + return bool(self._tab.Get(flatbuffers.number_types.BoolFlags, o + self._tab.Pos)) + return False + + # UnidirectionalSequenceLSTMOptions + def AsymmetricQuantizeInputs(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(12)) + if o != 0: + return bool(self._tab.Get(flatbuffers.number_types.BoolFlags, o + self._tab.Pos)) + return False + + # UnidirectionalSequenceLSTMOptions + def DiagonalRecurrentTensors(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(14)) + if o != 0: + return bool(self._tab.Get(flatbuffers.number_types.BoolFlags, o + self._tab.Pos)) + return False + +def UnidirectionalSequenceLSTMOptionsStart(builder): + builder.StartObject(6) + +def UnidirectionalSequenceLSTMOptionsAddFusedActivationFunction(builder, fusedActivationFunction): + builder.PrependInt8Slot(0, fusedActivationFunction, 0) + +def UnidirectionalSequenceLSTMOptionsAddCellClip(builder, cellClip): + builder.PrependFloat32Slot(1, cellClip, 0.0) + +def UnidirectionalSequenceLSTMOptionsAddProjClip(builder, projClip): + builder.PrependFloat32Slot(2, projClip, 0.0) + +def UnidirectionalSequenceLSTMOptionsAddTimeMajor(builder, timeMajor): + builder.PrependBoolSlot(3, timeMajor, 0) + +def UnidirectionalSequenceLSTMOptionsAddAsymmetricQuantizeInputs(builder, asymmetricQuantizeInputs): + builder.PrependBoolSlot(4, asymmetricQuantizeInputs, 0) + +def UnidirectionalSequenceLSTMOptionsAddDiagonalRecurrentTensors(builder, diagonalRecurrentTensors): + builder.PrependBoolSlot(5, diagonalRecurrentTensors, 0) + +def UnidirectionalSequenceLSTMOptionsEnd(builder): + return builder.EndObject() + + + +class UnidirectionalSequenceLSTMOptionsT(object): + + # UnidirectionalSequenceLSTMOptionsT def __init__(self): - pass + self.fusedActivationFunction = 0 # type: int + self.cellClip = 0.0 # type: float + self.projClip = 0.0 # type: float + self.timeMajor = False # type: bool + self.asymmetricQuantizeInputs = False # type: bool + self.diagonalRecurrentTensors = False # type: bool @classmethod def InitFromBuf(cls, buf, pos): - densifyOptions = DensifyOptions() - densifyOptions.Init(buf, pos) - return cls.InitFromObj(densifyOptions) + unidirectionalSequenceLstmoptions = UnidirectionalSequenceLSTMOptions() + unidirectionalSequenceLstmoptions.Init(buf, pos) + return cls.InitFromObj(unidirectionalSequenceLstmoptions) @classmethod - def InitFromObj(cls, densifyOptions): - x = DensifyOptionsT() - x._UnPack(densifyOptions) + def InitFromPackedBuf(cls, buf, pos=0): + n = flatbuffers.encode.Get(flatbuffers.packer.uoffset, buf, pos) + return cls.InitFromBuf(buf, pos+n) + + @classmethod + def InitFromObj(cls, unidirectionalSequenceLstmoptions): + x = UnidirectionalSequenceLSTMOptionsT() + x._UnPack(unidirectionalSequenceLstmoptions) return x - # DensifyOptionsT - def _UnPack(self, densifyOptions): - if densifyOptions is None: + # UnidirectionalSequenceLSTMOptionsT + def _UnPack(self, unidirectionalSequenceLstmoptions): + if unidirectionalSequenceLstmoptions is None: return + self.fusedActivationFunction = unidirectionalSequenceLstmoptions.FusedActivationFunction() + self.cellClip = unidirectionalSequenceLstmoptions.CellClip() + self.projClip = unidirectionalSequenceLstmoptions.ProjClip() + self.timeMajor = unidirectionalSequenceLstmoptions.TimeMajor() + self.asymmetricQuantizeInputs = unidirectionalSequenceLstmoptions.AsymmetricQuantizeInputs() + self.diagonalRecurrentTensors = unidirectionalSequenceLstmoptions.DiagonalRecurrentTensors() - # DensifyOptionsT + # UnidirectionalSequenceLSTMOptionsT def Pack(self, builder): - DensifyOptionsStart(builder) - densifyOptions = DensifyOptionsEnd(builder) - return densifyOptions -# automatically generated by the FlatBuffers compiler, do not modify - -# namespace: tflite + UnidirectionalSequenceLSTMOptionsStart(builder) + UnidirectionalSequenceLSTMOptionsAddFusedActivationFunction(builder, self.fusedActivationFunction) + UnidirectionalSequenceLSTMOptionsAddCellClip(builder, self.cellClip) + UnidirectionalSequenceLSTMOptionsAddProjClip(builder, self.projClip) + UnidirectionalSequenceLSTMOptionsAddTimeMajor(builder, self.timeMajor) + UnidirectionalSequenceLSTMOptionsAddAsymmetricQuantizeInputs(builder, self.asymmetricQuantizeInputs) + UnidirectionalSequenceLSTMOptionsAddDiagonalRecurrentTensors(builder, self.diagonalRecurrentTensors) + unidirectionalSequenceLstmoptions = UnidirectionalSequenceLSTMOptionsEnd(builder) + return unidirectionalSequenceLstmoptions -from flatbuffers.compat import import_numpy -np = import_numpy() -class DepthToSpaceOptions(object): +class BidirectionalSequenceLSTMOptions(object): __slots__ = ['_tab'] @classmethod def GetRootAs(cls, buf, offset=0): n = flatbuffers.encode.Get(flatbuffers.packer.uoffset, buf, offset) - x = DepthToSpaceOptions() + x = BidirectionalSequenceLSTMOptions() x.Init(buf, n + offset) return x @classmethod - def GetRootAsDepthToSpaceOptions(cls, buf, offset=0): + def GetRootAsBidirectionalSequenceLSTMOptions(cls, buf, offset=0): """This method is deprecated. Please switch to GetRootAs.""" return cls.GetRootAs(buf, offset) @classmethod - def DepthToSpaceOptionsBufferHasIdentifier(cls, buf, offset, size_prefixed=False): + def BidirectionalSequenceLSTMOptionsBufferHasIdentifier(cls, buf, offset, size_prefixed=False): return flatbuffers.util.BufferHasIdentifier(buf, offset, b"\x54\x46\x4C\x33", size_prefixed=size_prefixed) - # DepthToSpaceOptions + # BidirectionalSequenceLSTMOptions def Init(self, buf, pos): self._tab = flatbuffers.table.Table(buf, pos) - # DepthToSpaceOptions - def BlockSize(self): + # BidirectionalSequenceLSTMOptions + def FusedActivationFunction(self): o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(4)) if o != 0: - return self._tab.Get(flatbuffers.number_types.Int32Flags, o + self._tab.Pos) + return self._tab.Get(flatbuffers.number_types.Int8Flags, o + self._tab.Pos) return 0 -def DepthToSpaceOptionsStart(builder): builder.StartObject(1) -def Start(builder): - return DepthToSpaceOptionsStart(builder) -def DepthToSpaceOptionsAddBlockSize(builder, blockSize): builder.PrependInt32Slot(0, blockSize, 0) -def AddBlockSize(builder, blockSize): - return DepthToSpaceOptionsAddBlockSize(builder, blockSize) -def DepthToSpaceOptionsEnd(builder): return builder.EndObject() -def End(builder): - return DepthToSpaceOptionsEnd(builder) + # BidirectionalSequenceLSTMOptions + def CellClip(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(6)) + if o != 0: + return self._tab.Get(flatbuffers.number_types.Float32Flags, o + self._tab.Pos) + return 0.0 -class DepthToSpaceOptionsT(object): + # BidirectionalSequenceLSTMOptions + def ProjClip(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(8)) + if o != 0: + return self._tab.Get(flatbuffers.number_types.Float32Flags, o + self._tab.Pos) + return 0.0 - # DepthToSpaceOptionsT + # BidirectionalSequenceLSTMOptions + def MergeOutputs(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(10)) + if o != 0: + return bool(self._tab.Get(flatbuffers.number_types.BoolFlags, o + self._tab.Pos)) + return False + + # BidirectionalSequenceLSTMOptions + def TimeMajor(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(12)) + if o != 0: + return bool(self._tab.Get(flatbuffers.number_types.BoolFlags, o + self._tab.Pos)) + return True + + # BidirectionalSequenceLSTMOptions + def AsymmetricQuantizeInputs(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(14)) + if o != 0: + return bool(self._tab.Get(flatbuffers.number_types.BoolFlags, o + self._tab.Pos)) + return False + +def BidirectionalSequenceLSTMOptionsStart(builder): + builder.StartObject(6) + +def BidirectionalSequenceLSTMOptionsAddFusedActivationFunction(builder, fusedActivationFunction): + builder.PrependInt8Slot(0, fusedActivationFunction, 0) + +def BidirectionalSequenceLSTMOptionsAddCellClip(builder, cellClip): + builder.PrependFloat32Slot(1, cellClip, 0.0) + +def BidirectionalSequenceLSTMOptionsAddProjClip(builder, projClip): + builder.PrependFloat32Slot(2, projClip, 0.0) + +def BidirectionalSequenceLSTMOptionsAddMergeOutputs(builder, mergeOutputs): + builder.PrependBoolSlot(3, mergeOutputs, 0) + +def BidirectionalSequenceLSTMOptionsAddTimeMajor(builder, timeMajor): + builder.PrependBoolSlot(4, timeMajor, 1) + +def BidirectionalSequenceLSTMOptionsAddAsymmetricQuantizeInputs(builder, asymmetricQuantizeInputs): + builder.PrependBoolSlot(5, asymmetricQuantizeInputs, 0) + +def BidirectionalSequenceLSTMOptionsEnd(builder): + return builder.EndObject() + + + +class BidirectionalSequenceLSTMOptionsT(object): + + # BidirectionalSequenceLSTMOptionsT def __init__(self): - self.blockSize = 0 # type: int + self.fusedActivationFunction = 0 # type: int + self.cellClip = 0.0 # type: float + self.projClip = 0.0 # type: float + self.mergeOutputs = False # type: bool + self.timeMajor = True # type: bool + self.asymmetricQuantizeInputs = False # type: bool @classmethod def InitFromBuf(cls, buf, pos): - depthToSpaceOptions = DepthToSpaceOptions() - depthToSpaceOptions.Init(buf, pos) - return cls.InitFromObj(depthToSpaceOptions) + bidirectionalSequenceLstmoptions = BidirectionalSequenceLSTMOptions() + bidirectionalSequenceLstmoptions.Init(buf, pos) + return cls.InitFromObj(bidirectionalSequenceLstmoptions) @classmethod - def InitFromObj(cls, depthToSpaceOptions): - x = DepthToSpaceOptionsT() - x._UnPack(depthToSpaceOptions) + def InitFromPackedBuf(cls, buf, pos=0): + n = flatbuffers.encode.Get(flatbuffers.packer.uoffset, buf, pos) + return cls.InitFromBuf(buf, pos+n) + + @classmethod + def InitFromObj(cls, bidirectionalSequenceLstmoptions): + x = BidirectionalSequenceLSTMOptionsT() + x._UnPack(bidirectionalSequenceLstmoptions) return x - # DepthToSpaceOptionsT - def _UnPack(self, depthToSpaceOptions): - if depthToSpaceOptions is None: + # BidirectionalSequenceLSTMOptionsT + def _UnPack(self, bidirectionalSequenceLstmoptions): + if bidirectionalSequenceLstmoptions is None: return - self.blockSize = depthToSpaceOptions.BlockSize() + self.fusedActivationFunction = bidirectionalSequenceLstmoptions.FusedActivationFunction() + self.cellClip = bidirectionalSequenceLstmoptions.CellClip() + self.projClip = bidirectionalSequenceLstmoptions.ProjClip() + self.mergeOutputs = bidirectionalSequenceLstmoptions.MergeOutputs() + self.timeMajor = bidirectionalSequenceLstmoptions.TimeMajor() + self.asymmetricQuantizeInputs = bidirectionalSequenceLstmoptions.AsymmetricQuantizeInputs() - # DepthToSpaceOptionsT + # BidirectionalSequenceLSTMOptionsT def Pack(self, builder): - DepthToSpaceOptionsStart(builder) - DepthToSpaceOptionsAddBlockSize(builder, self.blockSize) - depthToSpaceOptions = DepthToSpaceOptionsEnd(builder) - return depthToSpaceOptions -# automatically generated by the FlatBuffers compiler, do not modify - -# namespace: tflite + BidirectionalSequenceLSTMOptionsStart(builder) + BidirectionalSequenceLSTMOptionsAddFusedActivationFunction(builder, self.fusedActivationFunction) + BidirectionalSequenceLSTMOptionsAddCellClip(builder, self.cellClip) + BidirectionalSequenceLSTMOptionsAddProjClip(builder, self.projClip) + BidirectionalSequenceLSTMOptionsAddMergeOutputs(builder, self.mergeOutputs) + BidirectionalSequenceLSTMOptionsAddTimeMajor(builder, self.timeMajor) + BidirectionalSequenceLSTMOptionsAddAsymmetricQuantizeInputs(builder, self.asymmetricQuantizeInputs) + bidirectionalSequenceLstmoptions = BidirectionalSequenceLSTMOptionsEnd(builder) + return bidirectionalSequenceLstmoptions -from flatbuffers.compat import import_numpy -np = import_numpy() -class DepthwiseConv2DOptions(object): +class ResizeBilinearOptions(object): __slots__ = ['_tab'] @classmethod def GetRootAs(cls, buf, offset=0): n = flatbuffers.encode.Get(flatbuffers.packer.uoffset, buf, offset) - x = DepthwiseConv2DOptions() + x = ResizeBilinearOptions() x.Init(buf, n + offset) return x @classmethod - def GetRootAsDepthwiseConv2DOptions(cls, buf, offset=0): + def GetRootAsResizeBilinearOptions(cls, buf, offset=0): """This method is deprecated. Please switch to GetRootAs.""" return cls.GetRootAs(buf, offset) @classmethod - def DepthwiseConv2DOptionsBufferHasIdentifier(cls, buf, offset, size_prefixed=False): + def ResizeBilinearOptionsBufferHasIdentifier(cls, buf, offset, size_prefixed=False): return flatbuffers.util.BufferHasIdentifier(buf, offset, b"\x54\x46\x4C\x33", size_prefixed=size_prefixed) - # DepthwiseConv2DOptions + # ResizeBilinearOptions def Init(self, buf, pos): self._tab = flatbuffers.table.Table(buf, pos) - # DepthwiseConv2DOptions - def Padding(self): - o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(4)) - if o != 0: - return self._tab.Get(flatbuffers.number_types.Int8Flags, o + self._tab.Pos) - return 0 - - # DepthwiseConv2DOptions - def StrideW(self): - o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(6)) - if o != 0: - return self._tab.Get(flatbuffers.number_types.Int32Flags, o + self._tab.Pos) - return 0 - - # DepthwiseConv2DOptions - def StrideH(self): + # ResizeBilinearOptions + def AlignCorners(self): o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(8)) if o != 0: - return self._tab.Get(flatbuffers.number_types.Int32Flags, o + self._tab.Pos) - return 0 + return bool(self._tab.Get(flatbuffers.number_types.BoolFlags, o + self._tab.Pos)) + return False - # DepthwiseConv2DOptions - def DepthMultiplier(self): + # ResizeBilinearOptions + def HalfPixelCenters(self): o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(10)) if o != 0: - return self._tab.Get(flatbuffers.number_types.Int32Flags, o + self._tab.Pos) - return 0 + return bool(self._tab.Get(flatbuffers.number_types.BoolFlags, o + self._tab.Pos)) + return False - # DepthwiseConv2DOptions - def FusedActivationFunction(self): - o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(12)) - if o != 0: - return self._tab.Get(flatbuffers.number_types.Int8Flags, o + self._tab.Pos) - return 0 +def ResizeBilinearOptionsStart(builder): + builder.StartObject(4) - # DepthwiseConv2DOptions - def DilationWFactor(self): - o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(14)) - if o != 0: - return self._tab.Get(flatbuffers.number_types.Int32Flags, o + self._tab.Pos) - return 1 +def ResizeBilinearOptionsAddAlignCorners(builder, alignCorners): + builder.PrependBoolSlot(2, alignCorners, 0) - # DepthwiseConv2DOptions - def DilationHFactor(self): - o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(16)) - if o != 0: - return self._tab.Get(flatbuffers.number_types.Int32Flags, o + self._tab.Pos) - return 1 +def ResizeBilinearOptionsAddHalfPixelCenters(builder, halfPixelCenters): + builder.PrependBoolSlot(3, halfPixelCenters, 0) -def DepthwiseConv2DOptionsStart(builder): builder.StartObject(7) -def Start(builder): - return DepthwiseConv2DOptionsStart(builder) -def DepthwiseConv2DOptionsAddPadding(builder, padding): builder.PrependInt8Slot(0, padding, 0) -def AddPadding(builder, padding): - return DepthwiseConv2DOptionsAddPadding(builder, padding) -def DepthwiseConv2DOptionsAddStrideW(builder, strideW): builder.PrependInt32Slot(1, strideW, 0) -def AddStrideW(builder, strideW): - return DepthwiseConv2DOptionsAddStrideW(builder, strideW) -def DepthwiseConv2DOptionsAddStrideH(builder, strideH): builder.PrependInt32Slot(2, strideH, 0) -def AddStrideH(builder, strideH): - return DepthwiseConv2DOptionsAddStrideH(builder, strideH) -def DepthwiseConv2DOptionsAddDepthMultiplier(builder, depthMultiplier): builder.PrependInt32Slot(3, depthMultiplier, 0) -def AddDepthMultiplier(builder, depthMultiplier): - return DepthwiseConv2DOptionsAddDepthMultiplier(builder, depthMultiplier) -def DepthwiseConv2DOptionsAddFusedActivationFunction(builder, fusedActivationFunction): builder.PrependInt8Slot(4, fusedActivationFunction, 0) -def AddFusedActivationFunction(builder, fusedActivationFunction): - return DepthwiseConv2DOptionsAddFusedActivationFunction(builder, fusedActivationFunction) -def DepthwiseConv2DOptionsAddDilationWFactor(builder, dilationWFactor): builder.PrependInt32Slot(5, dilationWFactor, 1) -def AddDilationWFactor(builder, dilationWFactor): - return DepthwiseConv2DOptionsAddDilationWFactor(builder, dilationWFactor) -def DepthwiseConv2DOptionsAddDilationHFactor(builder, dilationHFactor): builder.PrependInt32Slot(6, dilationHFactor, 1) -def AddDilationHFactor(builder, dilationHFactor): - return DepthwiseConv2DOptionsAddDilationHFactor(builder, dilationHFactor) -def DepthwiseConv2DOptionsEnd(builder): return builder.EndObject() -def End(builder): - return DepthwiseConv2DOptionsEnd(builder) +def ResizeBilinearOptionsEnd(builder): + return builder.EndObject() -class DepthwiseConv2DOptionsT(object): - # DepthwiseConv2DOptionsT + +class ResizeBilinearOptionsT(object): + + # ResizeBilinearOptionsT def __init__(self): - self.padding = 0 # type: int - self.strideW = 0 # type: int - self.strideH = 0 # type: int - self.depthMultiplier = 0 # type: int - self.fusedActivationFunction = 0 # type: int - self.dilationWFactor = 1 # type: int - self.dilationHFactor = 1 # type: int + self.alignCorners = False # type: bool + self.halfPixelCenters = False # type: bool @classmethod def InitFromBuf(cls, buf, pos): - depthwiseConv2doptions = DepthwiseConv2DOptions() - depthwiseConv2doptions.Init(buf, pos) - return cls.InitFromObj(depthwiseConv2doptions) + resizeBilinearOptions = ResizeBilinearOptions() + resizeBilinearOptions.Init(buf, pos) + return cls.InitFromObj(resizeBilinearOptions) @classmethod - def InitFromObj(cls, depthwiseConv2doptions): - x = DepthwiseConv2DOptionsT() - x._UnPack(depthwiseConv2doptions) + def InitFromPackedBuf(cls, buf, pos=0): + n = flatbuffers.encode.Get(flatbuffers.packer.uoffset, buf, pos) + return cls.InitFromBuf(buf, pos+n) + + @classmethod + def InitFromObj(cls, resizeBilinearOptions): + x = ResizeBilinearOptionsT() + x._UnPack(resizeBilinearOptions) return x - # DepthwiseConv2DOptionsT - def _UnPack(self, depthwiseConv2doptions): - if depthwiseConv2doptions is None: + # ResizeBilinearOptionsT + def _UnPack(self, resizeBilinearOptions): + if resizeBilinearOptions is None: return - self.padding = depthwiseConv2doptions.Padding() - self.strideW = depthwiseConv2doptions.StrideW() - self.strideH = depthwiseConv2doptions.StrideH() - self.depthMultiplier = depthwiseConv2doptions.DepthMultiplier() - self.fusedActivationFunction = depthwiseConv2doptions.FusedActivationFunction() - self.dilationWFactor = depthwiseConv2doptions.DilationWFactor() - self.dilationHFactor = depthwiseConv2doptions.DilationHFactor() + self.alignCorners = resizeBilinearOptions.AlignCorners() + self.halfPixelCenters = resizeBilinearOptions.HalfPixelCenters() - # DepthwiseConv2DOptionsT + # ResizeBilinearOptionsT def Pack(self, builder): - DepthwiseConv2DOptionsStart(builder) - DepthwiseConv2DOptionsAddPadding(builder, self.padding) - DepthwiseConv2DOptionsAddStrideW(builder, self.strideW) - DepthwiseConv2DOptionsAddStrideH(builder, self.strideH) - DepthwiseConv2DOptionsAddDepthMultiplier(builder, self.depthMultiplier) - DepthwiseConv2DOptionsAddFusedActivationFunction(builder, self.fusedActivationFunction) - DepthwiseConv2DOptionsAddDilationWFactor(builder, self.dilationWFactor) - DepthwiseConv2DOptionsAddDilationHFactor(builder, self.dilationHFactor) - depthwiseConv2doptions = DepthwiseConv2DOptionsEnd(builder) - return depthwiseConv2doptions -# automatically generated by the FlatBuffers compiler, do not modify - -# namespace: tflite + ResizeBilinearOptionsStart(builder) + ResizeBilinearOptionsAddAlignCorners(builder, self.alignCorners) + ResizeBilinearOptionsAddHalfPixelCenters(builder, self.halfPixelCenters) + resizeBilinearOptions = ResizeBilinearOptionsEnd(builder) + return resizeBilinearOptions -from flatbuffers.compat import import_numpy -np = import_numpy() -class DequantizeOptions(object): +class ResizeNearestNeighborOptions(object): __slots__ = ['_tab'] @classmethod def GetRootAs(cls, buf, offset=0): n = flatbuffers.encode.Get(flatbuffers.packer.uoffset, buf, offset) - x = DequantizeOptions() + x = ResizeNearestNeighborOptions() x.Init(buf, n + offset) return x @classmethod - def GetRootAsDequantizeOptions(cls, buf, offset=0): + def GetRootAsResizeNearestNeighborOptions(cls, buf, offset=0): """This method is deprecated. Please switch to GetRootAs.""" return cls.GetRootAs(buf, offset) @classmethod - def DequantizeOptionsBufferHasIdentifier(cls, buf, offset, size_prefixed=False): + def ResizeNearestNeighborOptionsBufferHasIdentifier(cls, buf, offset, size_prefixed=False): return flatbuffers.util.BufferHasIdentifier(buf, offset, b"\x54\x46\x4C\x33", size_prefixed=size_prefixed) - # DequantizeOptions + # ResizeNearestNeighborOptions def Init(self, buf, pos): self._tab = flatbuffers.table.Table(buf, pos) -def DequantizeOptionsStart(builder): builder.StartObject(0) -def Start(builder): - return DequantizeOptionsStart(builder) -def DequantizeOptionsEnd(builder): return builder.EndObject() -def End(builder): - return DequantizeOptionsEnd(builder) + # ResizeNearestNeighborOptions + def AlignCorners(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(4)) + if o != 0: + return bool(self._tab.Get(flatbuffers.number_types.BoolFlags, o + self._tab.Pos)) + return False -class DequantizeOptionsT(object): + # ResizeNearestNeighborOptions + def HalfPixelCenters(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(6)) + if o != 0: + return bool(self._tab.Get(flatbuffers.number_types.BoolFlags, o + self._tab.Pos)) + return False - # DequantizeOptionsT +def ResizeNearestNeighborOptionsStart(builder): + builder.StartObject(2) + +def ResizeNearestNeighborOptionsAddAlignCorners(builder, alignCorners): + builder.PrependBoolSlot(0, alignCorners, 0) + +def ResizeNearestNeighborOptionsAddHalfPixelCenters(builder, halfPixelCenters): + builder.PrependBoolSlot(1, halfPixelCenters, 0) + +def ResizeNearestNeighborOptionsEnd(builder): + return builder.EndObject() + + + +class ResizeNearestNeighborOptionsT(object): + + # ResizeNearestNeighborOptionsT def __init__(self): - pass + self.alignCorners = False # type: bool + self.halfPixelCenters = False # type: bool @classmethod def InitFromBuf(cls, buf, pos): - dequantizeOptions = DequantizeOptions() - dequantizeOptions.Init(buf, pos) - return cls.InitFromObj(dequantizeOptions) + resizeNearestNeighborOptions = ResizeNearestNeighborOptions() + resizeNearestNeighborOptions.Init(buf, pos) + return cls.InitFromObj(resizeNearestNeighborOptions) @classmethod - def InitFromObj(cls, dequantizeOptions): - x = DequantizeOptionsT() - x._UnPack(dequantizeOptions) + def InitFromPackedBuf(cls, buf, pos=0): + n = flatbuffers.encode.Get(flatbuffers.packer.uoffset, buf, pos) + return cls.InitFromBuf(buf, pos+n) + + @classmethod + def InitFromObj(cls, resizeNearestNeighborOptions): + x = ResizeNearestNeighborOptionsT() + x._UnPack(resizeNearestNeighborOptions) return x - # DequantizeOptionsT - def _UnPack(self, dequantizeOptions): - if dequantizeOptions is None: + # ResizeNearestNeighborOptionsT + def _UnPack(self, resizeNearestNeighborOptions): + if resizeNearestNeighborOptions is None: return + self.alignCorners = resizeNearestNeighborOptions.AlignCorners() + self.halfPixelCenters = resizeNearestNeighborOptions.HalfPixelCenters() - # DequantizeOptionsT + # ResizeNearestNeighborOptionsT def Pack(self, builder): - DequantizeOptionsStart(builder) - dequantizeOptions = DequantizeOptionsEnd(builder) - return dequantizeOptions -# automatically generated by the FlatBuffers compiler, do not modify - -# namespace: tflite + ResizeNearestNeighborOptionsStart(builder) + ResizeNearestNeighborOptionsAddAlignCorners(builder, self.alignCorners) + ResizeNearestNeighborOptionsAddHalfPixelCenters(builder, self.halfPixelCenters) + resizeNearestNeighborOptions = ResizeNearestNeighborOptionsEnd(builder) + return resizeNearestNeighborOptions -from flatbuffers.compat import import_numpy -np = import_numpy() -class DimensionMetadata(object): +class CallOptions(object): __slots__ = ['_tab'] @classmethod def GetRootAs(cls, buf, offset=0): n = flatbuffers.encode.Get(flatbuffers.packer.uoffset, buf, offset) - x = DimensionMetadata() + x = CallOptions() x.Init(buf, n + offset) return x @classmethod - def GetRootAsDimensionMetadata(cls, buf, offset=0): + def GetRootAsCallOptions(cls, buf, offset=0): """This method is deprecated. Please switch to GetRootAs.""" return cls.GetRootAs(buf, offset) @classmethod - def DimensionMetadataBufferHasIdentifier(cls, buf, offset, size_prefixed=False): + def CallOptionsBufferHasIdentifier(cls, buf, offset, size_prefixed=False): return flatbuffers.util.BufferHasIdentifier(buf, offset, b"\x54\x46\x4C\x33", size_prefixed=size_prefixed) - # DimensionMetadata + # CallOptions def Init(self, buf, pos): self._tab = flatbuffers.table.Table(buf, pos) - # DimensionMetadata - def Format(self): + # CallOptions + def Subgraph(self): o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(4)) if o != 0: - return self._tab.Get(flatbuffers.number_types.Int8Flags, o + self._tab.Pos) - return 0 - - # DimensionMetadata - def DenseSize(self): - o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(6)) - if o != 0: - return self._tab.Get(flatbuffers.number_types.Int32Flags, o + self._tab.Pos) - return 0 - - # DimensionMetadata - def ArraySegmentsType(self): - o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(8)) - if o != 0: - return self._tab.Get(flatbuffers.number_types.Uint8Flags, o + self._tab.Pos) - return 0 - - # DimensionMetadata - def ArraySegments(self): - o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(10)) - if o != 0: - from flatbuffers.table import Table - obj = Table(bytearray(), 0) - self._tab.Union(obj, o) - return obj - return None - - # DimensionMetadata - def ArrayIndicesType(self): - o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(12)) - if o != 0: - return self._tab.Get(flatbuffers.number_types.Uint8Flags, o + self._tab.Pos) + return self._tab.Get(flatbuffers.number_types.Uint32Flags, o + self._tab.Pos) return 0 - # DimensionMetadata - def ArrayIndices(self): - o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(14)) - if o != 0: - from flatbuffers.table import Table - obj = Table(bytearray(), 0) - self._tab.Union(obj, o) - return obj - return None +def CallOptionsStart(builder): + builder.StartObject(1) -def DimensionMetadataStart(builder): builder.StartObject(6) -def Start(builder): - return DimensionMetadataStart(builder) -def DimensionMetadataAddFormat(builder, format): builder.PrependInt8Slot(0, format, 0) -def AddFormat(builder, format): - return DimensionMetadataAddFormat(builder, format) -def DimensionMetadataAddDenseSize(builder, denseSize): builder.PrependInt32Slot(1, denseSize, 0) -def AddDenseSize(builder, denseSize): - return DimensionMetadataAddDenseSize(builder, denseSize) -def DimensionMetadataAddArraySegmentsType(builder, arraySegmentsType): builder.PrependUint8Slot(2, arraySegmentsType, 0) -def AddArraySegmentsType(builder, arraySegmentsType): - return DimensionMetadataAddArraySegmentsType(builder, arraySegmentsType) -def DimensionMetadataAddArraySegments(builder, arraySegments): builder.PrependUOffsetTRelativeSlot(3, flatbuffers.number_types.UOffsetTFlags.py_type(arraySegments), 0) -def AddArraySegments(builder, arraySegments): - return DimensionMetadataAddArraySegments(builder, arraySegments) -def DimensionMetadataAddArrayIndicesType(builder, arrayIndicesType): builder.PrependUint8Slot(4, arrayIndicesType, 0) -def AddArrayIndicesType(builder, arrayIndicesType): - return DimensionMetadataAddArrayIndicesType(builder, arrayIndicesType) -def DimensionMetadataAddArrayIndices(builder, arrayIndices): builder.PrependUOffsetTRelativeSlot(5, flatbuffers.number_types.UOffsetTFlags.py_type(arrayIndices), 0) -def AddArrayIndices(builder, arrayIndices): - return DimensionMetadataAddArrayIndices(builder, arrayIndices) -def DimensionMetadataEnd(builder): return builder.EndObject() -def End(builder): - return DimensionMetadataEnd(builder) -try: - from typing import Union -except: - pass +def CallOptionsAddSubgraph(builder, subgraph): + builder.PrependUint32Slot(0, subgraph, 0) -class DimensionMetadataT(object): +def CallOptionsEnd(builder): + return builder.EndObject() - # DimensionMetadataT + + +class CallOptionsT(object): + + # CallOptionsT def __init__(self): - self.format = 0 # type: int - self.denseSize = 0 # type: int - self.arraySegmentsType = 0 # type: int - self.arraySegments = None # type: Union[None, Int32VectorT, Uint16VectorT, Uint8VectorT] - self.arrayIndicesType = 0 # type: int - self.arrayIndices = None # type: Union[None, Int32VectorT, Uint16VectorT, Uint8VectorT] + self.subgraph = 0 # type: int @classmethod def InitFromBuf(cls, buf, pos): - dimensionMetadata = DimensionMetadata() - dimensionMetadata.Init(buf, pos) - return cls.InitFromObj(dimensionMetadata) + callOptions = CallOptions() + callOptions.Init(buf, pos) + return cls.InitFromObj(callOptions) @classmethod - def InitFromObj(cls, dimensionMetadata): - x = DimensionMetadataT() - x._UnPack(dimensionMetadata) + def InitFromPackedBuf(cls, buf, pos=0): + n = flatbuffers.encode.Get(flatbuffers.packer.uoffset, buf, pos) + return cls.InitFromBuf(buf, pos+n) + + @classmethod + def InitFromObj(cls, callOptions): + x = CallOptionsT() + x._UnPack(callOptions) return x - # DimensionMetadataT - def _UnPack(self, dimensionMetadata): - if dimensionMetadata is None: + # CallOptionsT + def _UnPack(self, callOptions): + if callOptions is None: return - self.format = dimensionMetadata.Format() - self.denseSize = dimensionMetadata.DenseSize() - self.arraySegmentsType = dimensionMetadata.ArraySegmentsType() - self.arraySegments = SparseIndexVectorCreator(self.arraySegmentsType, dimensionMetadata.ArraySegments()) - self.arrayIndicesType = dimensionMetadata.ArrayIndicesType() - self.arrayIndices = SparseIndexVectorCreator(self.arrayIndicesType, dimensionMetadata.ArrayIndices()) + self.subgraph = callOptions.Subgraph() - # DimensionMetadataT + # CallOptionsT def Pack(self, builder): - if self.arraySegments is not None: - arraySegments = self.arraySegments.Pack(builder) - if self.arrayIndices is not None: - arrayIndices = self.arrayIndices.Pack(builder) - DimensionMetadataStart(builder) - DimensionMetadataAddFormat(builder, self.format) - DimensionMetadataAddDenseSize(builder, self.denseSize) - DimensionMetadataAddArraySegmentsType(builder, self.arraySegmentsType) - if self.arraySegments is not None: - DimensionMetadataAddArraySegments(builder, arraySegments) - DimensionMetadataAddArrayIndicesType(builder, self.arrayIndicesType) - if self.arrayIndices is not None: - DimensionMetadataAddArrayIndices(builder, arrayIndices) - dimensionMetadata = DimensionMetadataEnd(builder) - return dimensionMetadata -# automatically generated by the FlatBuffers compiler, do not modify - -# namespace: tflite - -class DimensionType(object): - DENSE = 0 - SPARSE_CSR = 1 -# automatically generated by the FlatBuffers compiler, do not modify - -# namespace: tflite + CallOptionsStart(builder) + CallOptionsAddSubgraph(builder, self.subgraph) + callOptions = CallOptionsEnd(builder) + return callOptions -from flatbuffers.compat import import_numpy -np = import_numpy() -class DivOptions(object): +class PadOptions(object): __slots__ = ['_tab'] @classmethod def GetRootAs(cls, buf, offset=0): n = flatbuffers.encode.Get(flatbuffers.packer.uoffset, buf, offset) - x = DivOptions() + x = PadOptions() x.Init(buf, n + offset) return x @classmethod - def GetRootAsDivOptions(cls, buf, offset=0): + def GetRootAsPadOptions(cls, buf, offset=0): """This method is deprecated. Please switch to GetRootAs.""" return cls.GetRootAs(buf, offset) @classmethod - def DivOptionsBufferHasIdentifier(cls, buf, offset, size_prefixed=False): + def PadOptionsBufferHasIdentifier(cls, buf, offset, size_prefixed=False): return flatbuffers.util.BufferHasIdentifier(buf, offset, b"\x54\x46\x4C\x33", size_prefixed=size_prefixed) - # DivOptions + # PadOptions def Init(self, buf, pos): self._tab = flatbuffers.table.Table(buf, pos) - # DivOptions - def FusedActivationFunction(self): - o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(4)) - if o != 0: - return self._tab.Get(flatbuffers.number_types.Int8Flags, o + self._tab.Pos) - return 0 +def PadOptionsStart(builder): + builder.StartObject(0) -def DivOptionsStart(builder): builder.StartObject(1) -def Start(builder): - return DivOptionsStart(builder) -def DivOptionsAddFusedActivationFunction(builder, fusedActivationFunction): builder.PrependInt8Slot(0, fusedActivationFunction, 0) -def AddFusedActivationFunction(builder, fusedActivationFunction): - return DivOptionsAddFusedActivationFunction(builder, fusedActivationFunction) -def DivOptionsEnd(builder): return builder.EndObject() -def End(builder): - return DivOptionsEnd(builder) +def PadOptionsEnd(builder): + return builder.EndObject() -class DivOptionsT(object): - # DivOptionsT + +class PadOptionsT(object): + + # PadOptionsT def __init__(self): - self.fusedActivationFunction = 0 # type: int + pass @classmethod def InitFromBuf(cls, buf, pos): - divOptions = DivOptions() - divOptions.Init(buf, pos) - return cls.InitFromObj(divOptions) + padOptions = PadOptions() + padOptions.Init(buf, pos) + return cls.InitFromObj(padOptions) @classmethod - def InitFromObj(cls, divOptions): - x = DivOptionsT() - x._UnPack(divOptions) + def InitFromPackedBuf(cls, buf, pos=0): + n = flatbuffers.encode.Get(flatbuffers.packer.uoffset, buf, pos) + return cls.InitFromBuf(buf, pos+n) + + @classmethod + def InitFromObj(cls, padOptions): + x = PadOptionsT() + x._UnPack(padOptions) return x - # DivOptionsT - def _UnPack(self, divOptions): - if divOptions is None: + # PadOptionsT + def _UnPack(self, padOptions): + if padOptions is None: return - self.fusedActivationFunction = divOptions.FusedActivationFunction() - # DivOptionsT + # PadOptionsT def Pack(self, builder): - DivOptionsStart(builder) - DivOptionsAddFusedActivationFunction(builder, self.fusedActivationFunction) - divOptions = DivOptionsEnd(builder) - return divOptions -# automatically generated by the FlatBuffers compiler, do not modify - -# namespace: tflite + PadOptionsStart(builder) + padOptions = PadOptionsEnd(builder) + return padOptions -from flatbuffers.compat import import_numpy -np = import_numpy() -class DynamicUpdateSliceOptions(object): +class PadV2Options(object): __slots__ = ['_tab'] @classmethod def GetRootAs(cls, buf, offset=0): n = flatbuffers.encode.Get(flatbuffers.packer.uoffset, buf, offset) - x = DynamicUpdateSliceOptions() + x = PadV2Options() x.Init(buf, n + offset) return x @classmethod - def GetRootAsDynamicUpdateSliceOptions(cls, buf, offset=0): + def GetRootAsPadV2Options(cls, buf, offset=0): """This method is deprecated. Please switch to GetRootAs.""" return cls.GetRootAs(buf, offset) @classmethod - def DynamicUpdateSliceOptionsBufferHasIdentifier(cls, buf, offset, size_prefixed=False): + def PadV2OptionsBufferHasIdentifier(cls, buf, offset, size_prefixed=False): return flatbuffers.util.BufferHasIdentifier(buf, offset, b"\x54\x46\x4C\x33", size_prefixed=size_prefixed) - # DynamicUpdateSliceOptions + # PadV2Options def Init(self, buf, pos): self._tab = flatbuffers.table.Table(buf, pos) -def DynamicUpdateSliceOptionsStart(builder): builder.StartObject(0) -def Start(builder): - return DynamicUpdateSliceOptionsStart(builder) -def DynamicUpdateSliceOptionsEnd(builder): return builder.EndObject() -def End(builder): - return DynamicUpdateSliceOptionsEnd(builder) +def PadV2OptionsStart(builder): + builder.StartObject(0) -class DynamicUpdateSliceOptionsT(object): +def PadV2OptionsEnd(builder): + return builder.EndObject() - # DynamicUpdateSliceOptionsT + + +class PadV2OptionsT(object): + + # PadV2OptionsT def __init__(self): pass @classmethod def InitFromBuf(cls, buf, pos): - dynamicUpdateSliceOptions = DynamicUpdateSliceOptions() - dynamicUpdateSliceOptions.Init(buf, pos) - return cls.InitFromObj(dynamicUpdateSliceOptions) + padV2Options = PadV2Options() + padV2Options.Init(buf, pos) + return cls.InitFromObj(padV2Options) @classmethod - def InitFromObj(cls, dynamicUpdateSliceOptions): - x = DynamicUpdateSliceOptionsT() - x._UnPack(dynamicUpdateSliceOptions) + def InitFromPackedBuf(cls, buf, pos=0): + n = flatbuffers.encode.Get(flatbuffers.packer.uoffset, buf, pos) + return cls.InitFromBuf(buf, pos+n) + + @classmethod + def InitFromObj(cls, padV2Options): + x = PadV2OptionsT() + x._UnPack(padV2Options) return x - # DynamicUpdateSliceOptionsT - def _UnPack(self, dynamicUpdateSliceOptions): - if dynamicUpdateSliceOptions is None: + # PadV2OptionsT + def _UnPack(self, padV2Options): + if padV2Options is None: return - # DynamicUpdateSliceOptionsT + # PadV2OptionsT def Pack(self, builder): - DynamicUpdateSliceOptionsStart(builder) - dynamicUpdateSliceOptions = DynamicUpdateSliceOptionsEnd(builder) - return dynamicUpdateSliceOptions -# automatically generated by the FlatBuffers compiler, do not modify - -# namespace: tflite + PadV2OptionsStart(builder) + padV2Options = PadV2OptionsEnd(builder) + return padV2Options -from flatbuffers.compat import import_numpy -np = import_numpy() -class EmbeddingLookupSparseOptions(object): +class ReshapeOptions(object): __slots__ = ['_tab'] @classmethod def GetRootAs(cls, buf, offset=0): n = flatbuffers.encode.Get(flatbuffers.packer.uoffset, buf, offset) - x = EmbeddingLookupSparseOptions() + x = ReshapeOptions() x.Init(buf, n + offset) return x @classmethod - def GetRootAsEmbeddingLookupSparseOptions(cls, buf, offset=0): + def GetRootAsReshapeOptions(cls, buf, offset=0): """This method is deprecated. Please switch to GetRootAs.""" return cls.GetRootAs(buf, offset) @classmethod - def EmbeddingLookupSparseOptionsBufferHasIdentifier(cls, buf, offset, size_prefixed=False): + def ReshapeOptionsBufferHasIdentifier(cls, buf, offset, size_prefixed=False): return flatbuffers.util.BufferHasIdentifier(buf, offset, b"\x54\x46\x4C\x33", size_prefixed=size_prefixed) - # EmbeddingLookupSparseOptions + # ReshapeOptions def Init(self, buf, pos): self._tab = flatbuffers.table.Table(buf, pos) - # EmbeddingLookupSparseOptions - def Combiner(self): + # ReshapeOptions + def NewShape(self, j): o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(4)) if o != 0: - return self._tab.Get(flatbuffers.number_types.Int8Flags, o + self._tab.Pos) + a = self._tab.Vector(o) + return self._tab.Get(flatbuffers.number_types.Int32Flags, a + flatbuffers.number_types.UOffsetTFlags.py_type(j * 4)) + return 0 + + # ReshapeOptions + def NewShapeAsNumpy(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(4)) + if o != 0: + return self._tab.GetVectorAsNumpy(flatbuffers.number_types.Int32Flags, o) return 0 -def EmbeddingLookupSparseOptionsStart(builder): builder.StartObject(1) -def Start(builder): - return EmbeddingLookupSparseOptionsStart(builder) -def EmbeddingLookupSparseOptionsAddCombiner(builder, combiner): builder.PrependInt8Slot(0, combiner, 0) -def AddCombiner(builder, combiner): - return EmbeddingLookupSparseOptionsAddCombiner(builder, combiner) -def EmbeddingLookupSparseOptionsEnd(builder): return builder.EndObject() -def End(builder): - return EmbeddingLookupSparseOptionsEnd(builder) + # ReshapeOptions + def NewShapeLength(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(4)) + if o != 0: + return self._tab.VectorLen(o) + return 0 -class EmbeddingLookupSparseOptionsT(object): + # ReshapeOptions + def NewShapeIsNone(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(4)) + return o == 0 - # EmbeddingLookupSparseOptionsT +def ReshapeOptionsStart(builder): + builder.StartObject(1) + +def ReshapeOptionsAddNewShape(builder, newShape): + builder.PrependUOffsetTRelativeSlot(0, flatbuffers.number_types.UOffsetTFlags.py_type(newShape), 0) + +def ReshapeOptionsStartNewShapeVector(builder, numElems): + return builder.StartVector(4, numElems, 4) + +def ReshapeOptionsEnd(builder): + return builder.EndObject() + + +try: + from typing import List +except: + pass + +class ReshapeOptionsT(object): + + # ReshapeOptionsT def __init__(self): - self.combiner = 0 # type: int + self.newShape = None # type: List[int] @classmethod def InitFromBuf(cls, buf, pos): - embeddingLookupSparseOptions = EmbeddingLookupSparseOptions() - embeddingLookupSparseOptions.Init(buf, pos) - return cls.InitFromObj(embeddingLookupSparseOptions) + reshapeOptions = ReshapeOptions() + reshapeOptions.Init(buf, pos) + return cls.InitFromObj(reshapeOptions) @classmethod - def InitFromObj(cls, embeddingLookupSparseOptions): - x = EmbeddingLookupSparseOptionsT() - x._UnPack(embeddingLookupSparseOptions) - return x + def InitFromPackedBuf(cls, buf, pos=0): + n = flatbuffers.encode.Get(flatbuffers.packer.uoffset, buf, pos) + return cls.InitFromBuf(buf, pos+n) - # EmbeddingLookupSparseOptionsT - def _UnPack(self, embeddingLookupSparseOptions): - if embeddingLookupSparseOptions is None: + @classmethod + def InitFromObj(cls, reshapeOptions): + x = ReshapeOptionsT() + x._UnPack(reshapeOptions) + return x + + # ReshapeOptionsT + def _UnPack(self, reshapeOptions): + if reshapeOptions is None: return - self.combiner = embeddingLookupSparseOptions.Combiner() + if not reshapeOptions.NewShapeIsNone(): + if np is None: + self.newShape = [] + for i in range(reshapeOptions.NewShapeLength()): + self.newShape.append(reshapeOptions.NewShape(i)) + else: + self.newShape = reshapeOptions.NewShapeAsNumpy() - # EmbeddingLookupSparseOptionsT + # ReshapeOptionsT def Pack(self, builder): - EmbeddingLookupSparseOptionsStart(builder) - EmbeddingLookupSparseOptionsAddCombiner(builder, self.combiner) - embeddingLookupSparseOptions = EmbeddingLookupSparseOptionsEnd(builder) - return embeddingLookupSparseOptions -# automatically generated by the FlatBuffers compiler, do not modify - -# namespace: tflite + if self.newShape is not None: + if np is not None and type(self.newShape) is np.ndarray: + newShape = builder.CreateNumpyVector(self.newShape) + else: + ReshapeOptionsStartNewShapeVector(builder, len(self.newShape)) + for i in reversed(range(len(self.newShape))): + builder.PrependInt32(self.newShape[i]) + newShape = builder.EndVector() + ReshapeOptionsStart(builder) + if self.newShape is not None: + ReshapeOptionsAddNewShape(builder, newShape) + reshapeOptions = ReshapeOptionsEnd(builder) + return reshapeOptions -from flatbuffers.compat import import_numpy -np = import_numpy() -class EqualOptions(object): +class SpaceToBatchNDOptions(object): __slots__ = ['_tab'] @classmethod def GetRootAs(cls, buf, offset=0): n = flatbuffers.encode.Get(flatbuffers.packer.uoffset, buf, offset) - x = EqualOptions() + x = SpaceToBatchNDOptions() x.Init(buf, n + offset) return x @classmethod - def GetRootAsEqualOptions(cls, buf, offset=0): + def GetRootAsSpaceToBatchNDOptions(cls, buf, offset=0): """This method is deprecated. Please switch to GetRootAs.""" return cls.GetRootAs(buf, offset) @classmethod - def EqualOptionsBufferHasIdentifier(cls, buf, offset, size_prefixed=False): + def SpaceToBatchNDOptionsBufferHasIdentifier(cls, buf, offset, size_prefixed=False): return flatbuffers.util.BufferHasIdentifier(buf, offset, b"\x54\x46\x4C\x33", size_prefixed=size_prefixed) - # EqualOptions + # SpaceToBatchNDOptions def Init(self, buf, pos): self._tab = flatbuffers.table.Table(buf, pos) -def EqualOptionsStart(builder): builder.StartObject(0) -def Start(builder): - return EqualOptionsStart(builder) -def EqualOptionsEnd(builder): return builder.EndObject() -def End(builder): - return EqualOptionsEnd(builder) +def SpaceToBatchNDOptionsStart(builder): + builder.StartObject(0) -class EqualOptionsT(object): +def SpaceToBatchNDOptionsEnd(builder): + return builder.EndObject() - # EqualOptionsT + + +class SpaceToBatchNDOptionsT(object): + + # SpaceToBatchNDOptionsT def __init__(self): pass @classmethod def InitFromBuf(cls, buf, pos): - equalOptions = EqualOptions() - equalOptions.Init(buf, pos) - return cls.InitFromObj(equalOptions) + spaceToBatchNdoptions = SpaceToBatchNDOptions() + spaceToBatchNdoptions.Init(buf, pos) + return cls.InitFromObj(spaceToBatchNdoptions) @classmethod - def InitFromObj(cls, equalOptions): - x = EqualOptionsT() - x._UnPack(equalOptions) + def InitFromPackedBuf(cls, buf, pos=0): + n = flatbuffers.encode.Get(flatbuffers.packer.uoffset, buf, pos) + return cls.InitFromBuf(buf, pos+n) + + @classmethod + def InitFromObj(cls, spaceToBatchNdoptions): + x = SpaceToBatchNDOptionsT() + x._UnPack(spaceToBatchNdoptions) return x - # EqualOptionsT - def _UnPack(self, equalOptions): - if equalOptions is None: + # SpaceToBatchNDOptionsT + def _UnPack(self, spaceToBatchNdoptions): + if spaceToBatchNdoptions is None: return - # EqualOptionsT + # SpaceToBatchNDOptionsT def Pack(self, builder): - EqualOptionsStart(builder) - equalOptions = EqualOptionsEnd(builder) - return equalOptions -# automatically generated by the FlatBuffers compiler, do not modify - -# namespace: tflite + SpaceToBatchNDOptionsStart(builder) + spaceToBatchNdoptions = SpaceToBatchNDOptionsEnd(builder) + return spaceToBatchNdoptions -from flatbuffers.compat import import_numpy -np = import_numpy() -class ExpOptions(object): +class BatchToSpaceNDOptions(object): __slots__ = ['_tab'] @classmethod def GetRootAs(cls, buf, offset=0): n = flatbuffers.encode.Get(flatbuffers.packer.uoffset, buf, offset) - x = ExpOptions() + x = BatchToSpaceNDOptions() x.Init(buf, n + offset) return x @classmethod - def GetRootAsExpOptions(cls, buf, offset=0): + def GetRootAsBatchToSpaceNDOptions(cls, buf, offset=0): """This method is deprecated. Please switch to GetRootAs.""" return cls.GetRootAs(buf, offset) @classmethod - def ExpOptionsBufferHasIdentifier(cls, buf, offset, size_prefixed=False): + def BatchToSpaceNDOptionsBufferHasIdentifier(cls, buf, offset, size_prefixed=False): return flatbuffers.util.BufferHasIdentifier(buf, offset, b"\x54\x46\x4C\x33", size_prefixed=size_prefixed) - # ExpOptions + # BatchToSpaceNDOptions def Init(self, buf, pos): self._tab = flatbuffers.table.Table(buf, pos) -def ExpOptionsStart(builder): builder.StartObject(0) -def Start(builder): - return ExpOptionsStart(builder) -def ExpOptionsEnd(builder): return builder.EndObject() -def End(builder): - return ExpOptionsEnd(builder) +def BatchToSpaceNDOptionsStart(builder): + builder.StartObject(0) -class ExpOptionsT(object): +def BatchToSpaceNDOptionsEnd(builder): + return builder.EndObject() - # ExpOptionsT + + +class BatchToSpaceNDOptionsT(object): + + # BatchToSpaceNDOptionsT def __init__(self): pass @classmethod def InitFromBuf(cls, buf, pos): - expOptions = ExpOptions() - expOptions.Init(buf, pos) - return cls.InitFromObj(expOptions) + batchToSpaceNdoptions = BatchToSpaceNDOptions() + batchToSpaceNdoptions.Init(buf, pos) + return cls.InitFromObj(batchToSpaceNdoptions) @classmethod - def InitFromObj(cls, expOptions): - x = ExpOptionsT() - x._UnPack(expOptions) + def InitFromPackedBuf(cls, buf, pos=0): + n = flatbuffers.encode.Get(flatbuffers.packer.uoffset, buf, pos) + return cls.InitFromBuf(buf, pos+n) + + @classmethod + def InitFromObj(cls, batchToSpaceNdoptions): + x = BatchToSpaceNDOptionsT() + x._UnPack(batchToSpaceNdoptions) return x - # ExpOptionsT - def _UnPack(self, expOptions): - if expOptions is None: + # BatchToSpaceNDOptionsT + def _UnPack(self, batchToSpaceNdoptions): + if batchToSpaceNdoptions is None: return - # ExpOptionsT + # BatchToSpaceNDOptionsT def Pack(self, builder): - ExpOptionsStart(builder) - expOptions = ExpOptionsEnd(builder) - return expOptions -# automatically generated by the FlatBuffers compiler, do not modify - -# namespace: tflite + BatchToSpaceNDOptionsStart(builder) + batchToSpaceNdoptions = BatchToSpaceNDOptionsEnd(builder) + return batchToSpaceNdoptions -from flatbuffers.compat import import_numpy -np = import_numpy() -class ExpandDimsOptions(object): +class SkipGramOptions(object): __slots__ = ['_tab'] @classmethod def GetRootAs(cls, buf, offset=0): n = flatbuffers.encode.Get(flatbuffers.packer.uoffset, buf, offset) - x = ExpandDimsOptions() + x = SkipGramOptions() x.Init(buf, n + offset) return x @classmethod - def GetRootAsExpandDimsOptions(cls, buf, offset=0): + def GetRootAsSkipGramOptions(cls, buf, offset=0): """This method is deprecated. Please switch to GetRootAs.""" return cls.GetRootAs(buf, offset) @classmethod - def ExpandDimsOptionsBufferHasIdentifier(cls, buf, offset, size_prefixed=False): + def SkipGramOptionsBufferHasIdentifier(cls, buf, offset, size_prefixed=False): return flatbuffers.util.BufferHasIdentifier(buf, offset, b"\x54\x46\x4C\x33", size_prefixed=size_prefixed) - # ExpandDimsOptions + # SkipGramOptions def Init(self, buf, pos): self._tab = flatbuffers.table.Table(buf, pos) -def ExpandDimsOptionsStart(builder): builder.StartObject(0) -def Start(builder): - return ExpandDimsOptionsStart(builder) -def ExpandDimsOptionsEnd(builder): return builder.EndObject() -def End(builder): - return ExpandDimsOptionsEnd(builder) + # SkipGramOptions + def NgramSize(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(4)) + if o != 0: + return self._tab.Get(flatbuffers.number_types.Int32Flags, o + self._tab.Pos) + return 0 -class ExpandDimsOptionsT(object): + # SkipGramOptions + def MaxSkipSize(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(6)) + if o != 0: + return self._tab.Get(flatbuffers.number_types.Int32Flags, o + self._tab.Pos) + return 0 - # ExpandDimsOptionsT + # SkipGramOptions + def IncludeAllNgrams(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(8)) + if o != 0: + return bool(self._tab.Get(flatbuffers.number_types.BoolFlags, o + self._tab.Pos)) + return False + +def SkipGramOptionsStart(builder): + builder.StartObject(3) + +def SkipGramOptionsAddNgramSize(builder, ngramSize): + builder.PrependInt32Slot(0, ngramSize, 0) + +def SkipGramOptionsAddMaxSkipSize(builder, maxSkipSize): + builder.PrependInt32Slot(1, maxSkipSize, 0) + +def SkipGramOptionsAddIncludeAllNgrams(builder, includeAllNgrams): + builder.PrependBoolSlot(2, includeAllNgrams, 0) + +def SkipGramOptionsEnd(builder): + return builder.EndObject() + + + +class SkipGramOptionsT(object): + + # SkipGramOptionsT def __init__(self): - pass + self.ngramSize = 0 # type: int + self.maxSkipSize = 0 # type: int + self.includeAllNgrams = False # type: bool @classmethod def InitFromBuf(cls, buf, pos): - expandDimsOptions = ExpandDimsOptions() - expandDimsOptions.Init(buf, pos) - return cls.InitFromObj(expandDimsOptions) + skipGramOptions = SkipGramOptions() + skipGramOptions.Init(buf, pos) + return cls.InitFromObj(skipGramOptions) @classmethod - def InitFromObj(cls, expandDimsOptions): - x = ExpandDimsOptionsT() - x._UnPack(expandDimsOptions) + def InitFromPackedBuf(cls, buf, pos=0): + n = flatbuffers.encode.Get(flatbuffers.packer.uoffset, buf, pos) + return cls.InitFromBuf(buf, pos+n) + + @classmethod + def InitFromObj(cls, skipGramOptions): + x = SkipGramOptionsT() + x._UnPack(skipGramOptions) return x - # ExpandDimsOptionsT - def _UnPack(self, expandDimsOptions): - if expandDimsOptions is None: + # SkipGramOptionsT + def _UnPack(self, skipGramOptions): + if skipGramOptions is None: return + self.ngramSize = skipGramOptions.NgramSize() + self.maxSkipSize = skipGramOptions.MaxSkipSize() + self.includeAllNgrams = skipGramOptions.IncludeAllNgrams() - # ExpandDimsOptionsT + # SkipGramOptionsT def Pack(self, builder): - ExpandDimsOptionsStart(builder) - expandDimsOptions = ExpandDimsOptionsEnd(builder) - return expandDimsOptions -# automatically generated by the FlatBuffers compiler, do not modify - -# namespace: tflite + SkipGramOptionsStart(builder) + SkipGramOptionsAddNgramSize(builder, self.ngramSize) + SkipGramOptionsAddMaxSkipSize(builder, self.maxSkipSize) + SkipGramOptionsAddIncludeAllNgrams(builder, self.includeAllNgrams) + skipGramOptions = SkipGramOptionsEnd(builder) + return skipGramOptions -from flatbuffers.compat import import_numpy -np = import_numpy() -class FakeQuantOptions(object): +class SpaceToDepthOptions(object): __slots__ = ['_tab'] @classmethod def GetRootAs(cls, buf, offset=0): n = flatbuffers.encode.Get(flatbuffers.packer.uoffset, buf, offset) - x = FakeQuantOptions() + x = SpaceToDepthOptions() x.Init(buf, n + offset) return x @classmethod - def GetRootAsFakeQuantOptions(cls, buf, offset=0): + def GetRootAsSpaceToDepthOptions(cls, buf, offset=0): """This method is deprecated. Please switch to GetRootAs.""" return cls.GetRootAs(buf, offset) @classmethod - def FakeQuantOptionsBufferHasIdentifier(cls, buf, offset, size_prefixed=False): + def SpaceToDepthOptionsBufferHasIdentifier(cls, buf, offset, size_prefixed=False): return flatbuffers.util.BufferHasIdentifier(buf, offset, b"\x54\x46\x4C\x33", size_prefixed=size_prefixed) - # FakeQuantOptions + # SpaceToDepthOptions def Init(self, buf, pos): self._tab = flatbuffers.table.Table(buf, pos) - # FakeQuantOptions - def Min(self): + # SpaceToDepthOptions + def BlockSize(self): o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(4)) - if o != 0: - return self._tab.Get(flatbuffers.number_types.Float32Flags, o + self._tab.Pos) - return 0.0 - - # FakeQuantOptions - def Max(self): - o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(6)) - if o != 0: - return self._tab.Get(flatbuffers.number_types.Float32Flags, o + self._tab.Pos) - return 0.0 - - # FakeQuantOptions - def NumBits(self): - o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(8)) if o != 0: return self._tab.Get(flatbuffers.number_types.Int32Flags, o + self._tab.Pos) return 0 - # FakeQuantOptions - def NarrowRange(self): - o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(10)) - if o != 0: - return bool(self._tab.Get(flatbuffers.number_types.BoolFlags, o + self._tab.Pos)) - return False +def SpaceToDepthOptionsStart(builder): + builder.StartObject(1) + +def SpaceToDepthOptionsAddBlockSize(builder, blockSize): + builder.PrependInt32Slot(0, blockSize, 0) -def FakeQuantOptionsStart(builder): builder.StartObject(4) -def Start(builder): - return FakeQuantOptionsStart(builder) -def FakeQuantOptionsAddMin(builder, min): builder.PrependFloat32Slot(0, min, 0.0) -def AddMin(builder, min): - return FakeQuantOptionsAddMin(builder, min) -def FakeQuantOptionsAddMax(builder, max): builder.PrependFloat32Slot(1, max, 0.0) -def AddMax(builder, max): - return FakeQuantOptionsAddMax(builder, max) -def FakeQuantOptionsAddNumBits(builder, numBits): builder.PrependInt32Slot(2, numBits, 0) -def AddNumBits(builder, numBits): - return FakeQuantOptionsAddNumBits(builder, numBits) -def FakeQuantOptionsAddNarrowRange(builder, narrowRange): builder.PrependBoolSlot(3, narrowRange, 0) -def AddNarrowRange(builder, narrowRange): - return FakeQuantOptionsAddNarrowRange(builder, narrowRange) -def FakeQuantOptionsEnd(builder): return builder.EndObject() -def End(builder): - return FakeQuantOptionsEnd(builder) +def SpaceToDepthOptionsEnd(builder): + return builder.EndObject() -class FakeQuantOptionsT(object): - # FakeQuantOptionsT + +class SpaceToDepthOptionsT(object): + + # SpaceToDepthOptionsT def __init__(self): - self.min = 0.0 # type: float - self.max = 0.0 # type: float - self.numBits = 0 # type: int - self.narrowRange = False # type: bool + self.blockSize = 0 # type: int @classmethod def InitFromBuf(cls, buf, pos): - fakeQuantOptions = FakeQuantOptions() - fakeQuantOptions.Init(buf, pos) - return cls.InitFromObj(fakeQuantOptions) + spaceToDepthOptions = SpaceToDepthOptions() + spaceToDepthOptions.Init(buf, pos) + return cls.InitFromObj(spaceToDepthOptions) @classmethod - def InitFromObj(cls, fakeQuantOptions): - x = FakeQuantOptionsT() - x._UnPack(fakeQuantOptions) + def InitFromPackedBuf(cls, buf, pos=0): + n = flatbuffers.encode.Get(flatbuffers.packer.uoffset, buf, pos) + return cls.InitFromBuf(buf, pos+n) + + @classmethod + def InitFromObj(cls, spaceToDepthOptions): + x = SpaceToDepthOptionsT() + x._UnPack(spaceToDepthOptions) return x - # FakeQuantOptionsT - def _UnPack(self, fakeQuantOptions): - if fakeQuantOptions is None: + # SpaceToDepthOptionsT + def _UnPack(self, spaceToDepthOptions): + if spaceToDepthOptions is None: return - self.min = fakeQuantOptions.Min() - self.max = fakeQuantOptions.Max() - self.numBits = fakeQuantOptions.NumBits() - self.narrowRange = fakeQuantOptions.NarrowRange() + self.blockSize = spaceToDepthOptions.BlockSize() - # FakeQuantOptionsT + # SpaceToDepthOptionsT def Pack(self, builder): - FakeQuantOptionsStart(builder) - FakeQuantOptionsAddMin(builder, self.min) - FakeQuantOptionsAddMax(builder, self.max) - FakeQuantOptionsAddNumBits(builder, self.numBits) - FakeQuantOptionsAddNarrowRange(builder, self.narrowRange) - fakeQuantOptions = FakeQuantOptionsEnd(builder) - return fakeQuantOptions -# automatically generated by the FlatBuffers compiler, do not modify - -# namespace: tflite + SpaceToDepthOptionsStart(builder) + SpaceToDepthOptionsAddBlockSize(builder, self.blockSize) + spaceToDepthOptions = SpaceToDepthOptionsEnd(builder) + return spaceToDepthOptions -from flatbuffers.compat import import_numpy -np = import_numpy() -class FillOptions(object): +class DepthToSpaceOptions(object): __slots__ = ['_tab'] @classmethod def GetRootAs(cls, buf, offset=0): n = flatbuffers.encode.Get(flatbuffers.packer.uoffset, buf, offset) - x = FillOptions() + x = DepthToSpaceOptions() x.Init(buf, n + offset) return x @classmethod - def GetRootAsFillOptions(cls, buf, offset=0): + def GetRootAsDepthToSpaceOptions(cls, buf, offset=0): """This method is deprecated. Please switch to GetRootAs.""" return cls.GetRootAs(buf, offset) @classmethod - def FillOptionsBufferHasIdentifier(cls, buf, offset, size_prefixed=False): + def DepthToSpaceOptionsBufferHasIdentifier(cls, buf, offset, size_prefixed=False): return flatbuffers.util.BufferHasIdentifier(buf, offset, b"\x54\x46\x4C\x33", size_prefixed=size_prefixed) - # FillOptions + # DepthToSpaceOptions def Init(self, buf, pos): self._tab = flatbuffers.table.Table(buf, pos) -def FillOptionsStart(builder): builder.StartObject(0) -def Start(builder): - return FillOptionsStart(builder) -def FillOptionsEnd(builder): return builder.EndObject() -def End(builder): - return FillOptionsEnd(builder) + # DepthToSpaceOptions + def BlockSize(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(4)) + if o != 0: + return self._tab.Get(flatbuffers.number_types.Int32Flags, o + self._tab.Pos) + return 0 -class FillOptionsT(object): +def DepthToSpaceOptionsStart(builder): + builder.StartObject(1) - # FillOptionsT +def DepthToSpaceOptionsAddBlockSize(builder, blockSize): + builder.PrependInt32Slot(0, blockSize, 0) + +def DepthToSpaceOptionsEnd(builder): + return builder.EndObject() + + + +class DepthToSpaceOptionsT(object): + + # DepthToSpaceOptionsT def __init__(self): - pass + self.blockSize = 0 # type: int @classmethod def InitFromBuf(cls, buf, pos): - fillOptions = FillOptions() - fillOptions.Init(buf, pos) - return cls.InitFromObj(fillOptions) + depthToSpaceOptions = DepthToSpaceOptions() + depthToSpaceOptions.Init(buf, pos) + return cls.InitFromObj(depthToSpaceOptions) @classmethod - def InitFromObj(cls, fillOptions): - x = FillOptionsT() - x._UnPack(fillOptions) + def InitFromPackedBuf(cls, buf, pos=0): + n = flatbuffers.encode.Get(flatbuffers.packer.uoffset, buf, pos) + return cls.InitFromBuf(buf, pos+n) + + @classmethod + def InitFromObj(cls, depthToSpaceOptions): + x = DepthToSpaceOptionsT() + x._UnPack(depthToSpaceOptions) return x - # FillOptionsT - def _UnPack(self, fillOptions): - if fillOptions is None: + # DepthToSpaceOptionsT + def _UnPack(self, depthToSpaceOptions): + if depthToSpaceOptions is None: return + self.blockSize = depthToSpaceOptions.BlockSize() - # FillOptionsT + # DepthToSpaceOptionsT def Pack(self, builder): - FillOptionsStart(builder) - fillOptions = FillOptionsEnd(builder) - return fillOptions -# automatically generated by the FlatBuffers compiler, do not modify - -# namespace: tflite + DepthToSpaceOptionsStart(builder) + DepthToSpaceOptionsAddBlockSize(builder, self.blockSize) + depthToSpaceOptions = DepthToSpaceOptionsEnd(builder) + return depthToSpaceOptions -from flatbuffers.compat import import_numpy -np = import_numpy() -class FloorDivOptions(object): +class SubOptions(object): __slots__ = ['_tab'] @classmethod def GetRootAs(cls, buf, offset=0): n = flatbuffers.encode.Get(flatbuffers.packer.uoffset, buf, offset) - x = FloorDivOptions() + x = SubOptions() x.Init(buf, n + offset) return x @classmethod - def GetRootAsFloorDivOptions(cls, buf, offset=0): + def GetRootAsSubOptions(cls, buf, offset=0): """This method is deprecated. Please switch to GetRootAs.""" return cls.GetRootAs(buf, offset) @classmethod - def FloorDivOptionsBufferHasIdentifier(cls, buf, offset, size_prefixed=False): + def SubOptionsBufferHasIdentifier(cls, buf, offset, size_prefixed=False): return flatbuffers.util.BufferHasIdentifier(buf, offset, b"\x54\x46\x4C\x33", size_prefixed=size_prefixed) - # FloorDivOptions + # SubOptions def Init(self, buf, pos): self._tab = flatbuffers.table.Table(buf, pos) -def FloorDivOptionsStart(builder): builder.StartObject(0) -def Start(builder): - return FloorDivOptionsStart(builder) -def FloorDivOptionsEnd(builder): return builder.EndObject() -def End(builder): - return FloorDivOptionsEnd(builder) + # SubOptions + def FusedActivationFunction(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(4)) + if o != 0: + return self._tab.Get(flatbuffers.number_types.Int8Flags, o + self._tab.Pos) + return 0 + + # SubOptions + def PotScaleInt16(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(6)) + if o != 0: + return bool(self._tab.Get(flatbuffers.number_types.BoolFlags, o + self._tab.Pos)) + return True + +def SubOptionsStart(builder): + builder.StartObject(2) -class FloorDivOptionsT(object): +def SubOptionsAddFusedActivationFunction(builder, fusedActivationFunction): + builder.PrependInt8Slot(0, fusedActivationFunction, 0) - # FloorDivOptionsT +def SubOptionsAddPotScaleInt16(builder, potScaleInt16): + builder.PrependBoolSlot(1, potScaleInt16, 1) + +def SubOptionsEnd(builder): + return builder.EndObject() + + + +class SubOptionsT(object): + + # SubOptionsT def __init__(self): - pass + self.fusedActivationFunction = 0 # type: int + self.potScaleInt16 = True # type: bool @classmethod def InitFromBuf(cls, buf, pos): - floorDivOptions = FloorDivOptions() - floorDivOptions.Init(buf, pos) - return cls.InitFromObj(floorDivOptions) + subOptions = SubOptions() + subOptions.Init(buf, pos) + return cls.InitFromObj(subOptions) @classmethod - def InitFromObj(cls, floorDivOptions): - x = FloorDivOptionsT() - x._UnPack(floorDivOptions) + def InitFromPackedBuf(cls, buf, pos=0): + n = flatbuffers.encode.Get(flatbuffers.packer.uoffset, buf, pos) + return cls.InitFromBuf(buf, pos+n) + + @classmethod + def InitFromObj(cls, subOptions): + x = SubOptionsT() + x._UnPack(subOptions) return x - # FloorDivOptionsT - def _UnPack(self, floorDivOptions): - if floorDivOptions is None: + # SubOptionsT + def _UnPack(self, subOptions): + if subOptions is None: return + self.fusedActivationFunction = subOptions.FusedActivationFunction() + self.potScaleInt16 = subOptions.PotScaleInt16() - # FloorDivOptionsT + # SubOptionsT def Pack(self, builder): - FloorDivOptionsStart(builder) - floorDivOptions = FloorDivOptionsEnd(builder) - return floorDivOptions -# automatically generated by the FlatBuffers compiler, do not modify - -# namespace: tflite + SubOptionsStart(builder) + SubOptionsAddFusedActivationFunction(builder, self.fusedActivationFunction) + SubOptionsAddPotScaleInt16(builder, self.potScaleInt16) + subOptions = SubOptionsEnd(builder) + return subOptions -from flatbuffers.compat import import_numpy -np = import_numpy() -class FloorModOptions(object): +class DivOptions(object): __slots__ = ['_tab'] @classmethod def GetRootAs(cls, buf, offset=0): n = flatbuffers.encode.Get(flatbuffers.packer.uoffset, buf, offset) - x = FloorModOptions() + x = DivOptions() x.Init(buf, n + offset) return x @classmethod - def GetRootAsFloorModOptions(cls, buf, offset=0): + def GetRootAsDivOptions(cls, buf, offset=0): """This method is deprecated. Please switch to GetRootAs.""" return cls.GetRootAs(buf, offset) @classmethod - def FloorModOptionsBufferHasIdentifier(cls, buf, offset, size_prefixed=False): + def DivOptionsBufferHasIdentifier(cls, buf, offset, size_prefixed=False): return flatbuffers.util.BufferHasIdentifier(buf, offset, b"\x54\x46\x4C\x33", size_prefixed=size_prefixed) - # FloorModOptions + # DivOptions def Init(self, buf, pos): self._tab = flatbuffers.table.Table(buf, pos) -def FloorModOptionsStart(builder): builder.StartObject(0) -def Start(builder): - return FloorModOptionsStart(builder) -def FloorModOptionsEnd(builder): return builder.EndObject() -def End(builder): - return FloorModOptionsEnd(builder) + # DivOptions + def FusedActivationFunction(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(4)) + if o != 0: + return self._tab.Get(flatbuffers.number_types.Int8Flags, o + self._tab.Pos) + return 0 + +def DivOptionsStart(builder): + builder.StartObject(1) -class FloorModOptionsT(object): +def DivOptionsAddFusedActivationFunction(builder, fusedActivationFunction): + builder.PrependInt8Slot(0, fusedActivationFunction, 0) - # FloorModOptionsT +def DivOptionsEnd(builder): + return builder.EndObject() + + + +class DivOptionsT(object): + + # DivOptionsT def __init__(self): - pass + self.fusedActivationFunction = 0 # type: int @classmethod def InitFromBuf(cls, buf, pos): - floorModOptions = FloorModOptions() - floorModOptions.Init(buf, pos) - return cls.InitFromObj(floorModOptions) + divOptions = DivOptions() + divOptions.Init(buf, pos) + return cls.InitFromObj(divOptions) @classmethod - def InitFromObj(cls, floorModOptions): - x = FloorModOptionsT() - x._UnPack(floorModOptions) + def InitFromPackedBuf(cls, buf, pos=0): + n = flatbuffers.encode.Get(flatbuffers.packer.uoffset, buf, pos) + return cls.InitFromBuf(buf, pos+n) + + @classmethod + def InitFromObj(cls, divOptions): + x = DivOptionsT() + x._UnPack(divOptions) return x - # FloorModOptionsT - def _UnPack(self, floorModOptions): - if floorModOptions is None: + # DivOptionsT + def _UnPack(self, divOptions): + if divOptions is None: return + self.fusedActivationFunction = divOptions.FusedActivationFunction() - # FloorModOptionsT + # DivOptionsT def Pack(self, builder): - FloorModOptionsStart(builder) - floorModOptions = FloorModOptionsEnd(builder) - return floorModOptions -# automatically generated by the FlatBuffers compiler, do not modify - -# namespace: tflite + DivOptionsStart(builder) + DivOptionsAddFusedActivationFunction(builder, self.fusedActivationFunction) + divOptions = DivOptionsEnd(builder) + return divOptions -from flatbuffers.compat import import_numpy -np = import_numpy() -class FullyConnectedOptions(object): +class TopKV2Options(object): __slots__ = ['_tab'] @classmethod def GetRootAs(cls, buf, offset=0): n = flatbuffers.encode.Get(flatbuffers.packer.uoffset, buf, offset) - x = FullyConnectedOptions() + x = TopKV2Options() x.Init(buf, n + offset) return x @classmethod - def GetRootAsFullyConnectedOptions(cls, buf, offset=0): + def GetRootAsTopKV2Options(cls, buf, offset=0): """This method is deprecated. Please switch to GetRootAs.""" return cls.GetRootAs(buf, offset) @classmethod - def FullyConnectedOptionsBufferHasIdentifier(cls, buf, offset, size_prefixed=False): + def TopKV2OptionsBufferHasIdentifier(cls, buf, offset, size_prefixed=False): return flatbuffers.util.BufferHasIdentifier(buf, offset, b"\x54\x46\x4C\x33", size_prefixed=size_prefixed) - # FullyConnectedOptions + # TopKV2Options def Init(self, buf, pos): self._tab = flatbuffers.table.Table(buf, pos) - # FullyConnectedOptions - def FusedActivationFunction(self): - o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(4)) - if o != 0: - return self._tab.Get(flatbuffers.number_types.Int8Flags, o + self._tab.Pos) - return 0 - - # FullyConnectedOptions - def WeightsFormat(self): - o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(6)) - if o != 0: - return self._tab.Get(flatbuffers.number_types.Int8Flags, o + self._tab.Pos) - return 0 +def TopKV2OptionsStart(builder): + builder.StartObject(0) - # FullyConnectedOptions - def KeepNumDims(self): - o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(8)) - if o != 0: - return bool(self._tab.Get(flatbuffers.number_types.BoolFlags, o + self._tab.Pos)) - return False +def TopKV2OptionsEnd(builder): + return builder.EndObject() - # FullyConnectedOptions - def AsymmetricQuantizeInputs(self): - o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(10)) - if o != 0: - return bool(self._tab.Get(flatbuffers.number_types.BoolFlags, o + self._tab.Pos)) - return False -def FullyConnectedOptionsStart(builder): builder.StartObject(4) -def Start(builder): - return FullyConnectedOptionsStart(builder) -def FullyConnectedOptionsAddFusedActivationFunction(builder, fusedActivationFunction): builder.PrependInt8Slot(0, fusedActivationFunction, 0) -def AddFusedActivationFunction(builder, fusedActivationFunction): - return FullyConnectedOptionsAddFusedActivationFunction(builder, fusedActivationFunction) -def FullyConnectedOptionsAddWeightsFormat(builder, weightsFormat): builder.PrependInt8Slot(1, weightsFormat, 0) -def AddWeightsFormat(builder, weightsFormat): - return FullyConnectedOptionsAddWeightsFormat(builder, weightsFormat) -def FullyConnectedOptionsAddKeepNumDims(builder, keepNumDims): builder.PrependBoolSlot(2, keepNumDims, 0) -def AddKeepNumDims(builder, keepNumDims): - return FullyConnectedOptionsAddKeepNumDims(builder, keepNumDims) -def FullyConnectedOptionsAddAsymmetricQuantizeInputs(builder, asymmetricQuantizeInputs): builder.PrependBoolSlot(3, asymmetricQuantizeInputs, 0) -def AddAsymmetricQuantizeInputs(builder, asymmetricQuantizeInputs): - return FullyConnectedOptionsAddAsymmetricQuantizeInputs(builder, asymmetricQuantizeInputs) -def FullyConnectedOptionsEnd(builder): return builder.EndObject() -def End(builder): - return FullyConnectedOptionsEnd(builder) -class FullyConnectedOptionsT(object): +class TopKV2OptionsT(object): - # FullyConnectedOptionsT + # TopKV2OptionsT def __init__(self): - self.fusedActivationFunction = 0 # type: int - self.weightsFormat = 0 # type: int - self.keepNumDims = False # type: bool - self.asymmetricQuantizeInputs = False # type: bool + pass @classmethod def InitFromBuf(cls, buf, pos): - fullyConnectedOptions = FullyConnectedOptions() - fullyConnectedOptions.Init(buf, pos) - return cls.InitFromObj(fullyConnectedOptions) + topKv2Options = TopKV2Options() + topKv2Options.Init(buf, pos) + return cls.InitFromObj(topKv2Options) @classmethod - def InitFromObj(cls, fullyConnectedOptions): - x = FullyConnectedOptionsT() - x._UnPack(fullyConnectedOptions) + def InitFromPackedBuf(cls, buf, pos=0): + n = flatbuffers.encode.Get(flatbuffers.packer.uoffset, buf, pos) + return cls.InitFromBuf(buf, pos+n) + + @classmethod + def InitFromObj(cls, topKv2Options): + x = TopKV2OptionsT() + x._UnPack(topKv2Options) return x - # FullyConnectedOptionsT - def _UnPack(self, fullyConnectedOptions): - if fullyConnectedOptions is None: + # TopKV2OptionsT + def _UnPack(self, topKv2Options): + if topKv2Options is None: return - self.fusedActivationFunction = fullyConnectedOptions.FusedActivationFunction() - self.weightsFormat = fullyConnectedOptions.WeightsFormat() - self.keepNumDims = fullyConnectedOptions.KeepNumDims() - self.asymmetricQuantizeInputs = fullyConnectedOptions.AsymmetricQuantizeInputs() - # FullyConnectedOptionsT + # TopKV2OptionsT def Pack(self, builder): - FullyConnectedOptionsStart(builder) - FullyConnectedOptionsAddFusedActivationFunction(builder, self.fusedActivationFunction) - FullyConnectedOptionsAddWeightsFormat(builder, self.weightsFormat) - FullyConnectedOptionsAddKeepNumDims(builder, self.keepNumDims) - FullyConnectedOptionsAddAsymmetricQuantizeInputs(builder, self.asymmetricQuantizeInputs) - fullyConnectedOptions = FullyConnectedOptionsEnd(builder) - return fullyConnectedOptions -# automatically generated by the FlatBuffers compiler, do not modify - -# namespace: tflite - -class FullyConnectedOptionsWeightsFormat(object): - DEFAULT = 0 - SHUFFLED4x16INT8 = 1 -# automatically generated by the FlatBuffers compiler, do not modify - -# namespace: tflite + TopKV2OptionsStart(builder) + topKv2Options = TopKV2OptionsEnd(builder) + return topKv2Options -from flatbuffers.compat import import_numpy -np = import_numpy() -class GatherNdOptions(object): +class EmbeddingLookupSparseOptions(object): __slots__ = ['_tab'] @classmethod def GetRootAs(cls, buf, offset=0): n = flatbuffers.encode.Get(flatbuffers.packer.uoffset, buf, offset) - x = GatherNdOptions() + x = EmbeddingLookupSparseOptions() x.Init(buf, n + offset) return x @classmethod - def GetRootAsGatherNdOptions(cls, buf, offset=0): + def GetRootAsEmbeddingLookupSparseOptions(cls, buf, offset=0): """This method is deprecated. Please switch to GetRootAs.""" return cls.GetRootAs(buf, offset) @classmethod - def GatherNdOptionsBufferHasIdentifier(cls, buf, offset, size_prefixed=False): + def EmbeddingLookupSparseOptionsBufferHasIdentifier(cls, buf, offset, size_prefixed=False): return flatbuffers.util.BufferHasIdentifier(buf, offset, b"\x54\x46\x4C\x33", size_prefixed=size_prefixed) - # GatherNdOptions + # EmbeddingLookupSparseOptions def Init(self, buf, pos): self._tab = flatbuffers.table.Table(buf, pos) -def GatherNdOptionsStart(builder): builder.StartObject(0) -def Start(builder): - return GatherNdOptionsStart(builder) -def GatherNdOptionsEnd(builder): return builder.EndObject() -def End(builder): - return GatherNdOptionsEnd(builder) + # EmbeddingLookupSparseOptions + def Combiner(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(4)) + if o != 0: + return self._tab.Get(flatbuffers.number_types.Int8Flags, o + self._tab.Pos) + return 0 -class GatherNdOptionsT(object): +def EmbeddingLookupSparseOptionsStart(builder): + builder.StartObject(1) - # GatherNdOptionsT +def EmbeddingLookupSparseOptionsAddCombiner(builder, combiner): + builder.PrependInt8Slot(0, combiner, 0) + +def EmbeddingLookupSparseOptionsEnd(builder): + return builder.EndObject() + + + +class EmbeddingLookupSparseOptionsT(object): + + # EmbeddingLookupSparseOptionsT def __init__(self): - pass + self.combiner = 0 # type: int @classmethod def InitFromBuf(cls, buf, pos): - gatherNdOptions = GatherNdOptions() - gatherNdOptions.Init(buf, pos) - return cls.InitFromObj(gatherNdOptions) + embeddingLookupSparseOptions = EmbeddingLookupSparseOptions() + embeddingLookupSparseOptions.Init(buf, pos) + return cls.InitFromObj(embeddingLookupSparseOptions) @classmethod - def InitFromObj(cls, gatherNdOptions): - x = GatherNdOptionsT() - x._UnPack(gatherNdOptions) + def InitFromPackedBuf(cls, buf, pos=0): + n = flatbuffers.encode.Get(flatbuffers.packer.uoffset, buf, pos) + return cls.InitFromBuf(buf, pos+n) + + @classmethod + def InitFromObj(cls, embeddingLookupSparseOptions): + x = EmbeddingLookupSparseOptionsT() + x._UnPack(embeddingLookupSparseOptions) return x - # GatherNdOptionsT - def _UnPack(self, gatherNdOptions): - if gatherNdOptions is None: + # EmbeddingLookupSparseOptionsT + def _UnPack(self, embeddingLookupSparseOptions): + if embeddingLookupSparseOptions is None: return + self.combiner = embeddingLookupSparseOptions.Combiner() - # GatherNdOptionsT + # EmbeddingLookupSparseOptionsT def Pack(self, builder): - GatherNdOptionsStart(builder) - gatherNdOptions = GatherNdOptionsEnd(builder) - return gatherNdOptions -# automatically generated by the FlatBuffers compiler, do not modify - -# namespace: tflite + EmbeddingLookupSparseOptionsStart(builder) + EmbeddingLookupSparseOptionsAddCombiner(builder, self.combiner) + embeddingLookupSparseOptions = EmbeddingLookupSparseOptionsEnd(builder) + return embeddingLookupSparseOptions -from flatbuffers.compat import import_numpy -np = import_numpy() class GatherOptions(object): __slots__ = ['_tab'] @@ -4512,18 +9596,19 @@ def BatchDims(self): return self._tab.Get(flatbuffers.number_types.Int32Flags, o + self._tab.Pos) return 0 -def GatherOptionsStart(builder): builder.StartObject(2) -def Start(builder): - return GatherOptionsStart(builder) -def GatherOptionsAddAxis(builder, axis): builder.PrependInt32Slot(0, axis, 0) -def AddAxis(builder, axis): - return GatherOptionsAddAxis(builder, axis) -def GatherOptionsAddBatchDims(builder, batchDims): builder.PrependInt32Slot(1, batchDims, 0) -def AddBatchDims(builder, batchDims): - return GatherOptionsAddBatchDims(builder, batchDims) -def GatherOptionsEnd(builder): return builder.EndObject() -def End(builder): - return GatherOptionsEnd(builder) +def GatherOptionsStart(builder): + builder.StartObject(2) + +def GatherOptionsAddAxis(builder, axis): + builder.PrependInt32Slot(0, axis, 0) + +def GatherOptionsAddBatchDims(builder, batchDims): + builder.PrependInt32Slot(1, batchDims, 0) + +def GatherOptionsEnd(builder): + return builder.EndObject() + + class GatherOptionsT(object): @@ -4538,6 +9623,11 @@ def InitFromBuf(cls, buf, pos): gatherOptions.Init(buf, pos) return cls.InitFromObj(gatherOptions) + @classmethod + def InitFromPackedBuf(cls, buf, pos=0): + n = flatbuffers.encode.Get(flatbuffers.packer.uoffset, buf, pos) + return cls.InitFromBuf(buf, pos+n) + @classmethod def InitFromObj(cls, gatherOptions): x = GatherOptionsT() @@ -4558,2742 +9648,2532 @@ def Pack(self, builder): GatherOptionsAddBatchDims(builder, self.batchDims) gatherOptions = GatherOptionsEnd(builder) return gatherOptions -# automatically generated by the FlatBuffers compiler, do not modify -# namespace: tflite - -from flatbuffers.compat import import_numpy -np = import_numpy() -class GeluOptions(object): +class TransposeOptions(object): __slots__ = ['_tab'] @classmethod def GetRootAs(cls, buf, offset=0): n = flatbuffers.encode.Get(flatbuffers.packer.uoffset, buf, offset) - x = GeluOptions() + x = TransposeOptions() x.Init(buf, n + offset) return x @classmethod - def GetRootAsGeluOptions(cls, buf, offset=0): + def GetRootAsTransposeOptions(cls, buf, offset=0): """This method is deprecated. Please switch to GetRootAs.""" return cls.GetRootAs(buf, offset) @classmethod - def GeluOptionsBufferHasIdentifier(cls, buf, offset, size_prefixed=False): + def TransposeOptionsBufferHasIdentifier(cls, buf, offset, size_prefixed=False): return flatbuffers.util.BufferHasIdentifier(buf, offset, b"\x54\x46\x4C\x33", size_prefixed=size_prefixed) - # GeluOptions + # TransposeOptions def Init(self, buf, pos): self._tab = flatbuffers.table.Table(buf, pos) - # GeluOptions - def Approximate(self): - o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(4)) - if o != 0: - return bool(self._tab.Get(flatbuffers.number_types.BoolFlags, o + self._tab.Pos)) - return False +def TransposeOptionsStart(builder): + builder.StartObject(0) -def GeluOptionsStart(builder): builder.StartObject(1) -def Start(builder): - return GeluOptionsStart(builder) -def GeluOptionsAddApproximate(builder, approximate): builder.PrependBoolSlot(0, approximate, 0) -def AddApproximate(builder, approximate): - return GeluOptionsAddApproximate(builder, approximate) -def GeluOptionsEnd(builder): return builder.EndObject() -def End(builder): - return GeluOptionsEnd(builder) +def TransposeOptionsEnd(builder): + return builder.EndObject() -class GeluOptionsT(object): - # GeluOptionsT + +class TransposeOptionsT(object): + + # TransposeOptionsT def __init__(self): - self.approximate = False # type: bool + pass @classmethod def InitFromBuf(cls, buf, pos): - geluOptions = GeluOptions() - geluOptions.Init(buf, pos) - return cls.InitFromObj(geluOptions) + transposeOptions = TransposeOptions() + transposeOptions.Init(buf, pos) + return cls.InitFromObj(transposeOptions) @classmethod - def InitFromObj(cls, geluOptions): - x = GeluOptionsT() - x._UnPack(geluOptions) + def InitFromPackedBuf(cls, buf, pos=0): + n = flatbuffers.encode.Get(flatbuffers.packer.uoffset, buf, pos) + return cls.InitFromBuf(buf, pos+n) + + @classmethod + def InitFromObj(cls, transposeOptions): + x = TransposeOptionsT() + x._UnPack(transposeOptions) return x - # GeluOptionsT - def _UnPack(self, geluOptions): - if geluOptions is None: + # TransposeOptionsT + def _UnPack(self, transposeOptions): + if transposeOptions is None: return - self.approximate = geluOptions.Approximate() - # GeluOptionsT + # TransposeOptionsT def Pack(self, builder): - GeluOptionsStart(builder) - GeluOptionsAddApproximate(builder, self.approximate) - geluOptions = GeluOptionsEnd(builder) - return geluOptions -# automatically generated by the FlatBuffers compiler, do not modify + TransposeOptionsStart(builder) + transposeOptions = TransposeOptionsEnd(builder) + return transposeOptions + + +class ExpOptions(object): + __slots__ = ['_tab'] + + @classmethod + def GetRootAs(cls, buf, offset=0): + n = flatbuffers.encode.Get(flatbuffers.packer.uoffset, buf, offset) + x = ExpOptions() + x.Init(buf, n + offset) + return x + + @classmethod + def GetRootAsExpOptions(cls, buf, offset=0): + """This method is deprecated. Please switch to GetRootAs.""" + return cls.GetRootAs(buf, offset) + @classmethod + def ExpOptionsBufferHasIdentifier(cls, buf, offset, size_prefixed=False): + return flatbuffers.util.BufferHasIdentifier(buf, offset, b"\x54\x46\x4C\x33", size_prefixed=size_prefixed) + + # ExpOptions + def Init(self, buf, pos): + self._tab = flatbuffers.table.Table(buf, pos) + +def ExpOptionsStart(builder): + builder.StartObject(0) + +def ExpOptionsEnd(builder): + return builder.EndObject() + + + +class ExpOptionsT(object): + + # ExpOptionsT + def __init__(self): + pass + + @classmethod + def InitFromBuf(cls, buf, pos): + expOptions = ExpOptions() + expOptions.Init(buf, pos) + return cls.InitFromObj(expOptions) + + @classmethod + def InitFromPackedBuf(cls, buf, pos=0): + n = flatbuffers.encode.Get(flatbuffers.packer.uoffset, buf, pos) + return cls.InitFromBuf(buf, pos+n) + + @classmethod + def InitFromObj(cls, expOptions): + x = ExpOptionsT() + x._UnPack(expOptions) + return x + + # ExpOptionsT + def _UnPack(self, expOptions): + if expOptions is None: + return -# namespace: tflite + # ExpOptionsT + def Pack(self, builder): + ExpOptionsStart(builder) + expOptions = ExpOptionsEnd(builder) + return expOptions -from flatbuffers.compat import import_numpy -np = import_numpy() -class GreaterEqualOptions(object): +class CosOptions(object): __slots__ = ['_tab'] @classmethod def GetRootAs(cls, buf, offset=0): n = flatbuffers.encode.Get(flatbuffers.packer.uoffset, buf, offset) - x = GreaterEqualOptions() + x = CosOptions() x.Init(buf, n + offset) return x @classmethod - def GetRootAsGreaterEqualOptions(cls, buf, offset=0): + def GetRootAsCosOptions(cls, buf, offset=0): """This method is deprecated. Please switch to GetRootAs.""" return cls.GetRootAs(buf, offset) @classmethod - def GreaterEqualOptionsBufferHasIdentifier(cls, buf, offset, size_prefixed=False): + def CosOptionsBufferHasIdentifier(cls, buf, offset, size_prefixed=False): return flatbuffers.util.BufferHasIdentifier(buf, offset, b"\x54\x46\x4C\x33", size_prefixed=size_prefixed) - # GreaterEqualOptions + # CosOptions def Init(self, buf, pos): self._tab = flatbuffers.table.Table(buf, pos) -def GreaterEqualOptionsStart(builder): builder.StartObject(0) -def Start(builder): - return GreaterEqualOptionsStart(builder) -def GreaterEqualOptionsEnd(builder): return builder.EndObject() -def End(builder): - return GreaterEqualOptionsEnd(builder) +def CosOptionsStart(builder): + builder.StartObject(0) -class GreaterEqualOptionsT(object): +def CosOptionsEnd(builder): + return builder.EndObject() - # GreaterEqualOptionsT + + +class CosOptionsT(object): + + # CosOptionsT def __init__(self): pass @classmethod def InitFromBuf(cls, buf, pos): - greaterEqualOptions = GreaterEqualOptions() - greaterEqualOptions.Init(buf, pos) - return cls.InitFromObj(greaterEqualOptions) + cosOptions = CosOptions() + cosOptions.Init(buf, pos) + return cls.InitFromObj(cosOptions) @classmethod - def InitFromObj(cls, greaterEqualOptions): - x = GreaterEqualOptionsT() - x._UnPack(greaterEqualOptions) + def InitFromPackedBuf(cls, buf, pos=0): + n = flatbuffers.encode.Get(flatbuffers.packer.uoffset, buf, pos) + return cls.InitFromBuf(buf, pos+n) + + @classmethod + def InitFromObj(cls, cosOptions): + x = CosOptionsT() + x._UnPack(cosOptions) return x - # GreaterEqualOptionsT - def _UnPack(self, greaterEqualOptions): - if greaterEqualOptions is None: + # CosOptionsT + def _UnPack(self, cosOptions): + if cosOptions is None: return - # GreaterEqualOptionsT + # CosOptionsT def Pack(self, builder): - GreaterEqualOptionsStart(builder) - greaterEqualOptions = GreaterEqualOptionsEnd(builder) - return greaterEqualOptions -# automatically generated by the FlatBuffers compiler, do not modify - -# namespace: tflite + CosOptionsStart(builder) + cosOptions = CosOptionsEnd(builder) + return cosOptions -from flatbuffers.compat import import_numpy -np = import_numpy() -class GreaterOptions(object): +class ReducerOptions(object): __slots__ = ['_tab'] @classmethod def GetRootAs(cls, buf, offset=0): n = flatbuffers.encode.Get(flatbuffers.packer.uoffset, buf, offset) - x = GreaterOptions() + x = ReducerOptions() x.Init(buf, n + offset) return x @classmethod - def GetRootAsGreaterOptions(cls, buf, offset=0): + def GetRootAsReducerOptions(cls, buf, offset=0): """This method is deprecated. Please switch to GetRootAs.""" return cls.GetRootAs(buf, offset) @classmethod - def GreaterOptionsBufferHasIdentifier(cls, buf, offset, size_prefixed=False): + def ReducerOptionsBufferHasIdentifier(cls, buf, offset, size_prefixed=False): return flatbuffers.util.BufferHasIdentifier(buf, offset, b"\x54\x46\x4C\x33", size_prefixed=size_prefixed) - # GreaterOptions + # ReducerOptions def Init(self, buf, pos): self._tab = flatbuffers.table.Table(buf, pos) -def GreaterOptionsStart(builder): builder.StartObject(0) -def Start(builder): - return GreaterOptionsStart(builder) -def GreaterOptionsEnd(builder): return builder.EndObject() -def End(builder): - return GreaterOptionsEnd(builder) + # ReducerOptions + def KeepDims(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(4)) + if o != 0: + return bool(self._tab.Get(flatbuffers.number_types.BoolFlags, o + self._tab.Pos)) + return False + +def ReducerOptionsStart(builder): + builder.StartObject(1) -class GreaterOptionsT(object): +def ReducerOptionsAddKeepDims(builder, keepDims): + builder.PrependBoolSlot(0, keepDims, 0) - # GreaterOptionsT +def ReducerOptionsEnd(builder): + return builder.EndObject() + + + +class ReducerOptionsT(object): + + # ReducerOptionsT def __init__(self): - pass + self.keepDims = False # type: bool @classmethod def InitFromBuf(cls, buf, pos): - greaterOptions = GreaterOptions() - greaterOptions.Init(buf, pos) - return cls.InitFromObj(greaterOptions) + reducerOptions = ReducerOptions() + reducerOptions.Init(buf, pos) + return cls.InitFromObj(reducerOptions) @classmethod - def InitFromObj(cls, greaterOptions): - x = GreaterOptionsT() - x._UnPack(greaterOptions) + def InitFromPackedBuf(cls, buf, pos=0): + n = flatbuffers.encode.Get(flatbuffers.packer.uoffset, buf, pos) + return cls.InitFromBuf(buf, pos+n) + + @classmethod + def InitFromObj(cls, reducerOptions): + x = ReducerOptionsT() + x._UnPack(reducerOptions) return x - # GreaterOptionsT - def _UnPack(self, greaterOptions): - if greaterOptions is None: + # ReducerOptionsT + def _UnPack(self, reducerOptions): + if reducerOptions is None: return + self.keepDims = reducerOptions.KeepDims() - # GreaterOptionsT + # ReducerOptionsT def Pack(self, builder): - GreaterOptionsStart(builder) - greaterOptions = GreaterOptionsEnd(builder) - return greaterOptions -# automatically generated by the FlatBuffers compiler, do not modify - -# namespace: tflite + ReducerOptionsStart(builder) + ReducerOptionsAddKeepDims(builder, self.keepDims) + reducerOptions = ReducerOptionsEnd(builder) + return reducerOptions -from flatbuffers.compat import import_numpy -np = import_numpy() -class HardSwishOptions(object): +class SqueezeOptions(object): __slots__ = ['_tab'] @classmethod def GetRootAs(cls, buf, offset=0): n = flatbuffers.encode.Get(flatbuffers.packer.uoffset, buf, offset) - x = HardSwishOptions() + x = SqueezeOptions() x.Init(buf, n + offset) return x @classmethod - def GetRootAsHardSwishOptions(cls, buf, offset=0): + def GetRootAsSqueezeOptions(cls, buf, offset=0): """This method is deprecated. Please switch to GetRootAs.""" return cls.GetRootAs(buf, offset) @classmethod - def HardSwishOptionsBufferHasIdentifier(cls, buf, offset, size_prefixed=False): + def SqueezeOptionsBufferHasIdentifier(cls, buf, offset, size_prefixed=False): return flatbuffers.util.BufferHasIdentifier(buf, offset, b"\x54\x46\x4C\x33", size_prefixed=size_prefixed) - # HardSwishOptions + # SqueezeOptions def Init(self, buf, pos): self._tab = flatbuffers.table.Table(buf, pos) -def HardSwishOptionsStart(builder): builder.StartObject(0) -def Start(builder): - return HardSwishOptionsStart(builder) -def HardSwishOptionsEnd(builder): return builder.EndObject() -def End(builder): - return HardSwishOptionsEnd(builder) + # SqueezeOptions + def SqueezeDims(self, j): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(4)) + if o != 0: + a = self._tab.Vector(o) + return self._tab.Get(flatbuffers.number_types.Int32Flags, a + flatbuffers.number_types.UOffsetTFlags.py_type(j * 4)) + return 0 -class HardSwishOptionsT(object): + # SqueezeOptions + def SqueezeDimsAsNumpy(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(4)) + if o != 0: + return self._tab.GetVectorAsNumpy(flatbuffers.number_types.Int32Flags, o) + return 0 - # HardSwishOptionsT + # SqueezeOptions + def SqueezeDimsLength(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(4)) + if o != 0: + return self._tab.VectorLen(o) + return 0 + + # SqueezeOptions + def SqueezeDimsIsNone(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(4)) + return o == 0 + +def SqueezeOptionsStart(builder): + builder.StartObject(1) + +def SqueezeOptionsAddSqueezeDims(builder, squeezeDims): + builder.PrependUOffsetTRelativeSlot(0, flatbuffers.number_types.UOffsetTFlags.py_type(squeezeDims), 0) + +def SqueezeOptionsStartSqueezeDimsVector(builder, numElems): + return builder.StartVector(4, numElems, 4) + +def SqueezeOptionsEnd(builder): + return builder.EndObject() + + +try: + from typing import List +except: + pass + +class SqueezeOptionsT(object): + + # SqueezeOptionsT def __init__(self): - pass + self.squeezeDims = None # type: List[int] @classmethod def InitFromBuf(cls, buf, pos): - hardSwishOptions = HardSwishOptions() - hardSwishOptions.Init(buf, pos) - return cls.InitFromObj(hardSwishOptions) + squeezeOptions = SqueezeOptions() + squeezeOptions.Init(buf, pos) + return cls.InitFromObj(squeezeOptions) @classmethod - def InitFromObj(cls, hardSwishOptions): - x = HardSwishOptionsT() - x._UnPack(hardSwishOptions) + def InitFromPackedBuf(cls, buf, pos=0): + n = flatbuffers.encode.Get(flatbuffers.packer.uoffset, buf, pos) + return cls.InitFromBuf(buf, pos+n) + + @classmethod + def InitFromObj(cls, squeezeOptions): + x = SqueezeOptionsT() + x._UnPack(squeezeOptions) return x - # HardSwishOptionsT - def _UnPack(self, hardSwishOptions): - if hardSwishOptions is None: + # SqueezeOptionsT + def _UnPack(self, squeezeOptions): + if squeezeOptions is None: return + if not squeezeOptions.SqueezeDimsIsNone(): + if np is None: + self.squeezeDims = [] + for i in range(squeezeOptions.SqueezeDimsLength()): + self.squeezeDims.append(squeezeOptions.SqueezeDims(i)) + else: + self.squeezeDims = squeezeOptions.SqueezeDimsAsNumpy() - # HardSwishOptionsT + # SqueezeOptionsT def Pack(self, builder): - HardSwishOptionsStart(builder) - hardSwishOptions = HardSwishOptionsEnd(builder) - return hardSwishOptions -# automatically generated by the FlatBuffers compiler, do not modify - -# namespace: tflite + if self.squeezeDims is not None: + if np is not None and type(self.squeezeDims) is np.ndarray: + squeezeDims = builder.CreateNumpyVector(self.squeezeDims) + else: + SqueezeOptionsStartSqueezeDimsVector(builder, len(self.squeezeDims)) + for i in reversed(range(len(self.squeezeDims))): + builder.PrependInt32(self.squeezeDims[i]) + squeezeDims = builder.EndVector() + SqueezeOptionsStart(builder) + if self.squeezeDims is not None: + SqueezeOptionsAddSqueezeDims(builder, squeezeDims) + squeezeOptions = SqueezeOptionsEnd(builder) + return squeezeOptions -from flatbuffers.compat import import_numpy -np = import_numpy() -class HashtableFindOptions(object): +class SplitOptions(object): __slots__ = ['_tab'] @classmethod def GetRootAs(cls, buf, offset=0): n = flatbuffers.encode.Get(flatbuffers.packer.uoffset, buf, offset) - x = HashtableFindOptions() + x = SplitOptions() x.Init(buf, n + offset) return x @classmethod - def GetRootAsHashtableFindOptions(cls, buf, offset=0): + def GetRootAsSplitOptions(cls, buf, offset=0): """This method is deprecated. Please switch to GetRootAs.""" return cls.GetRootAs(buf, offset) @classmethod - def HashtableFindOptionsBufferHasIdentifier(cls, buf, offset, size_prefixed=False): + def SplitOptionsBufferHasIdentifier(cls, buf, offset, size_prefixed=False): return flatbuffers.util.BufferHasIdentifier(buf, offset, b"\x54\x46\x4C\x33", size_prefixed=size_prefixed) - # HashtableFindOptions + # SplitOptions def Init(self, buf, pos): self._tab = flatbuffers.table.Table(buf, pos) -def HashtableFindOptionsStart(builder): builder.StartObject(0) -def Start(builder): - return HashtableFindOptionsStart(builder) -def HashtableFindOptionsEnd(builder): return builder.EndObject() -def End(builder): - return HashtableFindOptionsEnd(builder) + # SplitOptions + def NumSplits(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(4)) + if o != 0: + return self._tab.Get(flatbuffers.number_types.Int32Flags, o + self._tab.Pos) + return 0 + +def SplitOptionsStart(builder): + builder.StartObject(1) -class HashtableFindOptionsT(object): +def SplitOptionsAddNumSplits(builder, numSplits): + builder.PrependInt32Slot(0, numSplits, 0) - # HashtableFindOptionsT +def SplitOptionsEnd(builder): + return builder.EndObject() + + + +class SplitOptionsT(object): + + # SplitOptionsT def __init__(self): - pass + self.numSplits = 0 # type: int + + @classmethod + def InitFromBuf(cls, buf, pos): + splitOptions = SplitOptions() + splitOptions.Init(buf, pos) + return cls.InitFromObj(splitOptions) @classmethod - def InitFromBuf(cls, buf, pos): - hashtableFindOptions = HashtableFindOptions() - hashtableFindOptions.Init(buf, pos) - return cls.InitFromObj(hashtableFindOptions) + def InitFromPackedBuf(cls, buf, pos=0): + n = flatbuffers.encode.Get(flatbuffers.packer.uoffset, buf, pos) + return cls.InitFromBuf(buf, pos+n) @classmethod - def InitFromObj(cls, hashtableFindOptions): - x = HashtableFindOptionsT() - x._UnPack(hashtableFindOptions) + def InitFromObj(cls, splitOptions): + x = SplitOptionsT() + x._UnPack(splitOptions) return x - # HashtableFindOptionsT - def _UnPack(self, hashtableFindOptions): - if hashtableFindOptions is None: + # SplitOptionsT + def _UnPack(self, splitOptions): + if splitOptions is None: return + self.numSplits = splitOptions.NumSplits() - # HashtableFindOptionsT + # SplitOptionsT def Pack(self, builder): - HashtableFindOptionsStart(builder) - hashtableFindOptions = HashtableFindOptionsEnd(builder) - return hashtableFindOptions -# automatically generated by the FlatBuffers compiler, do not modify - -# namespace: tflite + SplitOptionsStart(builder) + SplitOptionsAddNumSplits(builder, self.numSplits) + splitOptions = SplitOptionsEnd(builder) + return splitOptions -from flatbuffers.compat import import_numpy -np = import_numpy() -class HashtableImportOptions(object): +class SplitVOptions(object): __slots__ = ['_tab'] @classmethod def GetRootAs(cls, buf, offset=0): n = flatbuffers.encode.Get(flatbuffers.packer.uoffset, buf, offset) - x = HashtableImportOptions() + x = SplitVOptions() x.Init(buf, n + offset) return x @classmethod - def GetRootAsHashtableImportOptions(cls, buf, offset=0): + def GetRootAsSplitVOptions(cls, buf, offset=0): """This method is deprecated. Please switch to GetRootAs.""" return cls.GetRootAs(buf, offset) @classmethod - def HashtableImportOptionsBufferHasIdentifier(cls, buf, offset, size_prefixed=False): + def SplitVOptionsBufferHasIdentifier(cls, buf, offset, size_prefixed=False): return flatbuffers.util.BufferHasIdentifier(buf, offset, b"\x54\x46\x4C\x33", size_prefixed=size_prefixed) - # HashtableImportOptions + # SplitVOptions def Init(self, buf, pos): self._tab = flatbuffers.table.Table(buf, pos) -def HashtableImportOptionsStart(builder): builder.StartObject(0) -def Start(builder): - return HashtableImportOptionsStart(builder) -def HashtableImportOptionsEnd(builder): return builder.EndObject() -def End(builder): - return HashtableImportOptionsEnd(builder) + # SplitVOptions + def NumSplits(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(4)) + if o != 0: + return self._tab.Get(flatbuffers.number_types.Int32Flags, o + self._tab.Pos) + return 0 + +def SplitVOptionsStart(builder): + builder.StartObject(1) -class HashtableImportOptionsT(object): +def SplitVOptionsAddNumSplits(builder, numSplits): + builder.PrependInt32Slot(0, numSplits, 0) - # HashtableImportOptionsT +def SplitVOptionsEnd(builder): + return builder.EndObject() + + + +class SplitVOptionsT(object): + + # SplitVOptionsT def __init__(self): - pass + self.numSplits = 0 # type: int @classmethod def InitFromBuf(cls, buf, pos): - hashtableImportOptions = HashtableImportOptions() - hashtableImportOptions.Init(buf, pos) - return cls.InitFromObj(hashtableImportOptions) + splitVoptions = SplitVOptions() + splitVoptions.Init(buf, pos) + return cls.InitFromObj(splitVoptions) @classmethod - def InitFromObj(cls, hashtableImportOptions): - x = HashtableImportOptionsT() - x._UnPack(hashtableImportOptions) + def InitFromPackedBuf(cls, buf, pos=0): + n = flatbuffers.encode.Get(flatbuffers.packer.uoffset, buf, pos) + return cls.InitFromBuf(buf, pos+n) + + @classmethod + def InitFromObj(cls, splitVoptions): + x = SplitVOptionsT() + x._UnPack(splitVoptions) return x - # HashtableImportOptionsT - def _UnPack(self, hashtableImportOptions): - if hashtableImportOptions is None: + # SplitVOptionsT + def _UnPack(self, splitVoptions): + if splitVoptions is None: return + self.numSplits = splitVoptions.NumSplits() - # HashtableImportOptionsT + # SplitVOptionsT def Pack(self, builder): - HashtableImportOptionsStart(builder) - hashtableImportOptions = HashtableImportOptionsEnd(builder) - return hashtableImportOptions -# automatically generated by the FlatBuffers compiler, do not modify - -# namespace: tflite + SplitVOptionsStart(builder) + SplitVOptionsAddNumSplits(builder, self.numSplits) + splitVoptions = SplitVOptionsEnd(builder) + return splitVoptions -from flatbuffers.compat import import_numpy -np = import_numpy() -class HashtableOptions(object): +class StridedSliceOptions(object): __slots__ = ['_tab'] @classmethod def GetRootAs(cls, buf, offset=0): n = flatbuffers.encode.Get(flatbuffers.packer.uoffset, buf, offset) - x = HashtableOptions() + x = StridedSliceOptions() x.Init(buf, n + offset) return x @classmethod - def GetRootAsHashtableOptions(cls, buf, offset=0): + def GetRootAsStridedSliceOptions(cls, buf, offset=0): """This method is deprecated. Please switch to GetRootAs.""" return cls.GetRootAs(buf, offset) @classmethod - def HashtableOptionsBufferHasIdentifier(cls, buf, offset, size_prefixed=False): + def StridedSliceOptionsBufferHasIdentifier(cls, buf, offset, size_prefixed=False): return flatbuffers.util.BufferHasIdentifier(buf, offset, b"\x54\x46\x4C\x33", size_prefixed=size_prefixed) - # HashtableOptions + # StridedSliceOptions def Init(self, buf, pos): self._tab = flatbuffers.table.Table(buf, pos) - # HashtableOptions - def TableId(self): + # StridedSliceOptions + def BeginMask(self): o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(4)) if o != 0: return self._tab.Get(flatbuffers.number_types.Int32Flags, o + self._tab.Pos) return 0 - # HashtableOptions - def KeyDtype(self): + # StridedSliceOptions + def EndMask(self): o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(6)) if o != 0: - return self._tab.Get(flatbuffers.number_types.Int8Flags, o + self._tab.Pos) + return self._tab.Get(flatbuffers.number_types.Int32Flags, o + self._tab.Pos) return 0 - # HashtableOptions - def ValueDtype(self): + # StridedSliceOptions + def EllipsisMask(self): o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(8)) if o != 0: - return self._tab.Get(flatbuffers.number_types.Int8Flags, o + self._tab.Pos) + return self._tab.Get(flatbuffers.number_types.Int32Flags, o + self._tab.Pos) return 0 -def HashtableOptionsStart(builder): builder.StartObject(3) -def Start(builder): - return HashtableOptionsStart(builder) -def HashtableOptionsAddTableId(builder, tableId): builder.PrependInt32Slot(0, tableId, 0) -def AddTableId(builder, tableId): - return HashtableOptionsAddTableId(builder, tableId) -def HashtableOptionsAddKeyDtype(builder, keyDtype): builder.PrependInt8Slot(1, keyDtype, 0) -def AddKeyDtype(builder, keyDtype): - return HashtableOptionsAddKeyDtype(builder, keyDtype) -def HashtableOptionsAddValueDtype(builder, valueDtype): builder.PrependInt8Slot(2, valueDtype, 0) -def AddValueDtype(builder, valueDtype): - return HashtableOptionsAddValueDtype(builder, valueDtype) -def HashtableOptionsEnd(builder): return builder.EndObject() -def End(builder): - return HashtableOptionsEnd(builder) - -class HashtableOptionsT(object): + # StridedSliceOptions + def NewAxisMask(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(10)) + if o != 0: + return self._tab.Get(flatbuffers.number_types.Int32Flags, o + self._tab.Pos) + return 0 - # HashtableOptionsT - def __init__(self): - self.tableId = 0 # type: int - self.keyDtype = 0 # type: int - self.valueDtype = 0 # type: int + # StridedSliceOptions + def ShrinkAxisMask(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(12)) + if o != 0: + return self._tab.Get(flatbuffers.number_types.Int32Flags, o + self._tab.Pos) + return 0 - @classmethod - def InitFromBuf(cls, buf, pos): - hashtableOptions = HashtableOptions() - hashtableOptions.Init(buf, pos) - return cls.InitFromObj(hashtableOptions) + # StridedSliceOptions + def Offset(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(14)) + if o != 0: + return bool(self._tab.Get(flatbuffers.number_types.BoolFlags, o + self._tab.Pos)) + return False - @classmethod - def InitFromObj(cls, hashtableOptions): - x = HashtableOptionsT() - x._UnPack(hashtableOptions) - return x +def StridedSliceOptionsStart(builder): + builder.StartObject(6) - # HashtableOptionsT - def _UnPack(self, hashtableOptions): - if hashtableOptions is None: - return - self.tableId = hashtableOptions.TableId() - self.keyDtype = hashtableOptions.KeyDtype() - self.valueDtype = hashtableOptions.ValueDtype() +def StridedSliceOptionsAddBeginMask(builder, beginMask): + builder.PrependInt32Slot(0, beginMask, 0) - # HashtableOptionsT - def Pack(self, builder): - HashtableOptionsStart(builder) - HashtableOptionsAddTableId(builder, self.tableId) - HashtableOptionsAddKeyDtype(builder, self.keyDtype) - HashtableOptionsAddValueDtype(builder, self.valueDtype) - hashtableOptions = HashtableOptionsEnd(builder) - return hashtableOptions -# automatically generated by the FlatBuffers compiler, do not modify +def StridedSliceOptionsAddEndMask(builder, endMask): + builder.PrependInt32Slot(1, endMask, 0) -# namespace: tflite +def StridedSliceOptionsAddEllipsisMask(builder, ellipsisMask): + builder.PrependInt32Slot(2, ellipsisMask, 0) -from flatbuffers.compat import import_numpy -np = import_numpy() +def StridedSliceOptionsAddNewAxisMask(builder, newAxisMask): + builder.PrependInt32Slot(3, newAxisMask, 0) -class HashtableSizeOptions(object): - __slots__ = ['_tab'] +def StridedSliceOptionsAddShrinkAxisMask(builder, shrinkAxisMask): + builder.PrependInt32Slot(4, shrinkAxisMask, 0) - @classmethod - def GetRootAs(cls, buf, offset=0): - n = flatbuffers.encode.Get(flatbuffers.packer.uoffset, buf, offset) - x = HashtableSizeOptions() - x.Init(buf, n + offset) - return x +def StridedSliceOptionsAddOffset(builder, offset): + builder.PrependBoolSlot(5, offset, 0) - @classmethod - def GetRootAsHashtableSizeOptions(cls, buf, offset=0): - """This method is deprecated. Please switch to GetRootAs.""" - return cls.GetRootAs(buf, offset) - @classmethod - def HashtableSizeOptionsBufferHasIdentifier(cls, buf, offset, size_prefixed=False): - return flatbuffers.util.BufferHasIdentifier(buf, offset, b"\x54\x46\x4C\x33", size_prefixed=size_prefixed) +def StridedSliceOptionsEnd(builder): + return builder.EndObject() - # HashtableSizeOptions - def Init(self, buf, pos): - self._tab = flatbuffers.table.Table(buf, pos) -def HashtableSizeOptionsStart(builder): builder.StartObject(0) -def Start(builder): - return HashtableSizeOptionsStart(builder) -def HashtableSizeOptionsEnd(builder): return builder.EndObject() -def End(builder): - return HashtableSizeOptionsEnd(builder) -class HashtableSizeOptionsT(object): +class StridedSliceOptionsT(object): - # HashtableSizeOptionsT + # StridedSliceOptionsT def __init__(self): - pass + self.beginMask = 0 # type: int + self.endMask = 0 # type: int + self.ellipsisMask = 0 # type: int + self.newAxisMask = 0 # type: int + self.shrinkAxisMask = 0 # type: int + self.offset = False # type: bool @classmethod def InitFromBuf(cls, buf, pos): - hashtableSizeOptions = HashtableSizeOptions() - hashtableSizeOptions.Init(buf, pos) - return cls.InitFromObj(hashtableSizeOptions) + stridedSliceOptions = StridedSliceOptions() + stridedSliceOptions.Init(buf, pos) + return cls.InitFromObj(stridedSliceOptions) @classmethod - def InitFromObj(cls, hashtableSizeOptions): - x = HashtableSizeOptionsT() - x._UnPack(hashtableSizeOptions) + def InitFromPackedBuf(cls, buf, pos=0): + n = flatbuffers.encode.Get(flatbuffers.packer.uoffset, buf, pos) + return cls.InitFromBuf(buf, pos+n) + + @classmethod + def InitFromObj(cls, stridedSliceOptions): + x = StridedSliceOptionsT() + x._UnPack(stridedSliceOptions) return x - # HashtableSizeOptionsT - def _UnPack(self, hashtableSizeOptions): - if hashtableSizeOptions is None: + # StridedSliceOptionsT + def _UnPack(self, stridedSliceOptions): + if stridedSliceOptions is None: return + self.beginMask = stridedSliceOptions.BeginMask() + self.endMask = stridedSliceOptions.EndMask() + self.ellipsisMask = stridedSliceOptions.EllipsisMask() + self.newAxisMask = stridedSliceOptions.NewAxisMask() + self.shrinkAxisMask = stridedSliceOptions.ShrinkAxisMask() + self.offset = stridedSliceOptions.Offset() - # HashtableSizeOptionsT + # StridedSliceOptionsT def Pack(self, builder): - HashtableSizeOptionsStart(builder) - hashtableSizeOptions = HashtableSizeOptionsEnd(builder) - return hashtableSizeOptions -# automatically generated by the FlatBuffers compiler, do not modify - -# namespace: tflite + StridedSliceOptionsStart(builder) + StridedSliceOptionsAddBeginMask(builder, self.beginMask) + StridedSliceOptionsAddEndMask(builder, self.endMask) + StridedSliceOptionsAddEllipsisMask(builder, self.ellipsisMask) + StridedSliceOptionsAddNewAxisMask(builder, self.newAxisMask) + StridedSliceOptionsAddShrinkAxisMask(builder, self.shrinkAxisMask) + StridedSliceOptionsAddOffset(builder, self.offset) + stridedSliceOptions = StridedSliceOptionsEnd(builder) + return stridedSliceOptions -from flatbuffers.compat import import_numpy -np = import_numpy() -class IfOptions(object): +class LogSoftmaxOptions(object): __slots__ = ['_tab'] @classmethod def GetRootAs(cls, buf, offset=0): n = flatbuffers.encode.Get(flatbuffers.packer.uoffset, buf, offset) - x = IfOptions() + x = LogSoftmaxOptions() x.Init(buf, n + offset) return x @classmethod - def GetRootAsIfOptions(cls, buf, offset=0): + def GetRootAsLogSoftmaxOptions(cls, buf, offset=0): """This method is deprecated. Please switch to GetRootAs.""" return cls.GetRootAs(buf, offset) @classmethod - def IfOptionsBufferHasIdentifier(cls, buf, offset, size_prefixed=False): + def LogSoftmaxOptionsBufferHasIdentifier(cls, buf, offset, size_prefixed=False): return flatbuffers.util.BufferHasIdentifier(buf, offset, b"\x54\x46\x4C\x33", size_prefixed=size_prefixed) - # IfOptions + # LogSoftmaxOptions def Init(self, buf, pos): self._tab = flatbuffers.table.Table(buf, pos) - # IfOptions - def ThenSubgraphIndex(self): - o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(4)) - if o != 0: - return self._tab.Get(flatbuffers.number_types.Int32Flags, o + self._tab.Pos) - return 0 +def LogSoftmaxOptionsStart(builder): + builder.StartObject(0) - # IfOptions - def ElseSubgraphIndex(self): - o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(6)) - if o != 0: - return self._tab.Get(flatbuffers.number_types.Int32Flags, o + self._tab.Pos) - return 0 +def LogSoftmaxOptionsEnd(builder): + return builder.EndObject() -def IfOptionsStart(builder): builder.StartObject(2) -def Start(builder): - return IfOptionsStart(builder) -def IfOptionsAddThenSubgraphIndex(builder, thenSubgraphIndex): builder.PrependInt32Slot(0, thenSubgraphIndex, 0) -def AddThenSubgraphIndex(builder, thenSubgraphIndex): - return IfOptionsAddThenSubgraphIndex(builder, thenSubgraphIndex) -def IfOptionsAddElseSubgraphIndex(builder, elseSubgraphIndex): builder.PrependInt32Slot(1, elseSubgraphIndex, 0) -def AddElseSubgraphIndex(builder, elseSubgraphIndex): - return IfOptionsAddElseSubgraphIndex(builder, elseSubgraphIndex) -def IfOptionsEnd(builder): return builder.EndObject() -def End(builder): - return IfOptionsEnd(builder) -class IfOptionsT(object): - # IfOptionsT +class LogSoftmaxOptionsT(object): + + # LogSoftmaxOptionsT def __init__(self): - self.thenSubgraphIndex = 0 # type: int - self.elseSubgraphIndex = 0 # type: int + pass @classmethod def InitFromBuf(cls, buf, pos): - ifOptions = IfOptions() - ifOptions.Init(buf, pos) - return cls.InitFromObj(ifOptions) + logSoftmaxOptions = LogSoftmaxOptions() + logSoftmaxOptions.Init(buf, pos) + return cls.InitFromObj(logSoftmaxOptions) @classmethod - def InitFromObj(cls, ifOptions): - x = IfOptionsT() - x._UnPack(ifOptions) + def InitFromPackedBuf(cls, buf, pos=0): + n = flatbuffers.encode.Get(flatbuffers.packer.uoffset, buf, pos) + return cls.InitFromBuf(buf, pos+n) + + @classmethod + def InitFromObj(cls, logSoftmaxOptions): + x = LogSoftmaxOptionsT() + x._UnPack(logSoftmaxOptions) return x - # IfOptionsT - def _UnPack(self, ifOptions): - if ifOptions is None: + # LogSoftmaxOptionsT + def _UnPack(self, logSoftmaxOptions): + if logSoftmaxOptions is None: return - self.thenSubgraphIndex = ifOptions.ThenSubgraphIndex() - self.elseSubgraphIndex = ifOptions.ElseSubgraphIndex() - # IfOptionsT + # LogSoftmaxOptionsT def Pack(self, builder): - IfOptionsStart(builder) - IfOptionsAddThenSubgraphIndex(builder, self.thenSubgraphIndex) - IfOptionsAddElseSubgraphIndex(builder, self.elseSubgraphIndex) - ifOptions = IfOptionsEnd(builder) - return ifOptions -# automatically generated by the FlatBuffers compiler, do not modify - -# namespace: tflite + LogSoftmaxOptionsStart(builder) + logSoftmaxOptions = LogSoftmaxOptionsEnd(builder) + return logSoftmaxOptions -from flatbuffers.compat import import_numpy -np = import_numpy() -class Int32Vector(object): +class CastOptions(object): __slots__ = ['_tab'] @classmethod def GetRootAs(cls, buf, offset=0): n = flatbuffers.encode.Get(flatbuffers.packer.uoffset, buf, offset) - x = Int32Vector() + x = CastOptions() x.Init(buf, n + offset) return x @classmethod - def GetRootAsInt32Vector(cls, buf, offset=0): + def GetRootAsCastOptions(cls, buf, offset=0): """This method is deprecated. Please switch to GetRootAs.""" return cls.GetRootAs(buf, offset) @classmethod - def Int32VectorBufferHasIdentifier(cls, buf, offset, size_prefixed=False): + def CastOptionsBufferHasIdentifier(cls, buf, offset, size_prefixed=False): return flatbuffers.util.BufferHasIdentifier(buf, offset, b"\x54\x46\x4C\x33", size_prefixed=size_prefixed) - # Int32Vector + # CastOptions def Init(self, buf, pos): self._tab = flatbuffers.table.Table(buf, pos) - # Int32Vector - def Values(self, j): + # CastOptions + def InDataType(self): o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(4)) if o != 0: - a = self._tab.Vector(o) - return self._tab.Get(flatbuffers.number_types.Int32Flags, a + flatbuffers.number_types.UOffsetTFlags.py_type(j * 4)) + return self._tab.Get(flatbuffers.number_types.Int8Flags, o + self._tab.Pos) return 0 - # Int32Vector - def ValuesAsNumpy(self): - o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(4)) + # CastOptions + def OutDataType(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(6)) if o != 0: - return self._tab.GetVectorAsNumpy(flatbuffers.number_types.Int32Flags, o) + return self._tab.Get(flatbuffers.number_types.Int8Flags, o + self._tab.Pos) return 0 - # Int32Vector - def ValuesLength(self): - o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(4)) - if o != 0: - return self._tab.VectorLen(o) - return 0 +def CastOptionsStart(builder): + builder.StartObject(2) - # Int32Vector - def ValuesIsNone(self): - o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(4)) - return o == 0 +def CastOptionsAddInDataType(builder, inDataType): + builder.PrependInt8Slot(0, inDataType, 0) -def Int32VectorStart(builder): builder.StartObject(1) -def Start(builder): - return Int32VectorStart(builder) -def Int32VectorAddValues(builder, values): builder.PrependUOffsetTRelativeSlot(0, flatbuffers.number_types.UOffsetTFlags.py_type(values), 0) -def AddValues(builder, values): - return Int32VectorAddValues(builder, values) -def Int32VectorStartValuesVector(builder, numElems): return builder.StartVector(4, numElems, 4) -def StartValuesVector(builder, numElems): - return Int32VectorStartValuesVector(builder, numElems) -def Int32VectorEnd(builder): return builder.EndObject() -def End(builder): - return Int32VectorEnd(builder) -try: - from typing import List -except: - pass +def CastOptionsAddOutDataType(builder, outDataType): + builder.PrependInt8Slot(1, outDataType, 0) -class Int32VectorT(object): +def CastOptionsEnd(builder): + return builder.EndObject() - # Int32VectorT + + +class CastOptionsT(object): + + # CastOptionsT def __init__(self): - self.values = None # type: List[int] + self.inDataType = 0 # type: int + self.outDataType = 0 # type: int @classmethod def InitFromBuf(cls, buf, pos): - int32vector = Int32Vector() - int32vector.Init(buf, pos) - return cls.InitFromObj(int32vector) + castOptions = CastOptions() + castOptions.Init(buf, pos) + return cls.InitFromObj(castOptions) @classmethod - def InitFromObj(cls, int32vector): - x = Int32VectorT() - x._UnPack(int32vector) + def InitFromPackedBuf(cls, buf, pos=0): + n = flatbuffers.encode.Get(flatbuffers.packer.uoffset, buf, pos) + return cls.InitFromBuf(buf, pos+n) + + @classmethod + def InitFromObj(cls, castOptions): + x = CastOptionsT() + x._UnPack(castOptions) return x - # Int32VectorT - def _UnPack(self, int32vector): - if int32vector is None: + # CastOptionsT + def _UnPack(self, castOptions): + if castOptions is None: return - if not int32vector.ValuesIsNone(): - if np is None: - self.values = [] - for i in range(int32vector.ValuesLength()): - self.values.append(int32vector.Values(i)) - else: - self.values = int32vector.ValuesAsNumpy() + self.inDataType = castOptions.InDataType() + self.outDataType = castOptions.OutDataType() - # Int32VectorT + # CastOptionsT def Pack(self, builder): - if self.values is not None: - if np is not None and type(self.values) is np.ndarray: - values = builder.CreateNumpyVector(self.values) - else: - Int32VectorStartValuesVector(builder, len(self.values)) - for i in reversed(range(len(self.values))): - builder.PrependInt32(self.values[i]) - values = builder.EndVector() - Int32VectorStart(builder) - if self.values is not None: - Int32VectorAddValues(builder, values) - int32vector = Int32VectorEnd(builder) - return int32vector -# automatically generated by the FlatBuffers compiler, do not modify - -# namespace: tflite + CastOptionsStart(builder) + CastOptionsAddInDataType(builder, self.inDataType) + CastOptionsAddOutDataType(builder, self.outDataType) + castOptions = CastOptionsEnd(builder) + return castOptions -from flatbuffers.compat import import_numpy -np = import_numpy() -class L2NormOptions(object): +class DequantizeOptions(object): __slots__ = ['_tab'] @classmethod def GetRootAs(cls, buf, offset=0): n = flatbuffers.encode.Get(flatbuffers.packer.uoffset, buf, offset) - x = L2NormOptions() + x = DequantizeOptions() x.Init(buf, n + offset) return x @classmethod - def GetRootAsL2NormOptions(cls, buf, offset=0): + def GetRootAsDequantizeOptions(cls, buf, offset=0): """This method is deprecated. Please switch to GetRootAs.""" return cls.GetRootAs(buf, offset) @classmethod - def L2NormOptionsBufferHasIdentifier(cls, buf, offset, size_prefixed=False): + def DequantizeOptionsBufferHasIdentifier(cls, buf, offset, size_prefixed=False): return flatbuffers.util.BufferHasIdentifier(buf, offset, b"\x54\x46\x4C\x33", size_prefixed=size_prefixed) - # L2NormOptions + # DequantizeOptions def Init(self, buf, pos): self._tab = flatbuffers.table.Table(buf, pos) - # L2NormOptions - def FusedActivationFunction(self): - o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(4)) - if o != 0: - return self._tab.Get(flatbuffers.number_types.Int8Flags, o + self._tab.Pos) - return 0 +def DequantizeOptionsStart(builder): + builder.StartObject(0) -def L2NormOptionsStart(builder): builder.StartObject(1) -def Start(builder): - return L2NormOptionsStart(builder) -def L2NormOptionsAddFusedActivationFunction(builder, fusedActivationFunction): builder.PrependInt8Slot(0, fusedActivationFunction, 0) -def AddFusedActivationFunction(builder, fusedActivationFunction): - return L2NormOptionsAddFusedActivationFunction(builder, fusedActivationFunction) -def L2NormOptionsEnd(builder): return builder.EndObject() -def End(builder): - return L2NormOptionsEnd(builder) +def DequantizeOptionsEnd(builder): + return builder.EndObject() -class L2NormOptionsT(object): - # L2NormOptionsT + +class DequantizeOptionsT(object): + + # DequantizeOptionsT def __init__(self): - self.fusedActivationFunction = 0 # type: int + pass @classmethod def InitFromBuf(cls, buf, pos): - l2normOptions = L2NormOptions() - l2normOptions.Init(buf, pos) - return cls.InitFromObj(l2normOptions) + dequantizeOptions = DequantizeOptions() + dequantizeOptions.Init(buf, pos) + return cls.InitFromObj(dequantizeOptions) @classmethod - def InitFromObj(cls, l2normOptions): - x = L2NormOptionsT() - x._UnPack(l2normOptions) + def InitFromPackedBuf(cls, buf, pos=0): + n = flatbuffers.encode.Get(flatbuffers.packer.uoffset, buf, pos) + return cls.InitFromBuf(buf, pos+n) + + @classmethod + def InitFromObj(cls, dequantizeOptions): + x = DequantizeOptionsT() + x._UnPack(dequantizeOptions) return x - # L2NormOptionsT - def _UnPack(self, l2normOptions): - if l2normOptions is None: + # DequantizeOptionsT + def _UnPack(self, dequantizeOptions): + if dequantizeOptions is None: return - self.fusedActivationFunction = l2normOptions.FusedActivationFunction() - # L2NormOptionsT + # DequantizeOptionsT def Pack(self, builder): - L2NormOptionsStart(builder) - L2NormOptionsAddFusedActivationFunction(builder, self.fusedActivationFunction) - l2normOptions = L2NormOptionsEnd(builder) - return l2normOptions -# automatically generated by the FlatBuffers compiler, do not modify - -# namespace: tflite + DequantizeOptionsStart(builder) + dequantizeOptions = DequantizeOptionsEnd(builder) + return dequantizeOptions -from flatbuffers.compat import import_numpy -np = import_numpy() -class LSHProjectionOptions(object): +class MaximumMinimumOptions(object): __slots__ = ['_tab'] @classmethod def GetRootAs(cls, buf, offset=0): n = flatbuffers.encode.Get(flatbuffers.packer.uoffset, buf, offset) - x = LSHProjectionOptions() + x = MaximumMinimumOptions() x.Init(buf, n + offset) return x @classmethod - def GetRootAsLSHProjectionOptions(cls, buf, offset=0): + def GetRootAsMaximumMinimumOptions(cls, buf, offset=0): """This method is deprecated. Please switch to GetRootAs.""" return cls.GetRootAs(buf, offset) @classmethod - def LSHProjectionOptionsBufferHasIdentifier(cls, buf, offset, size_prefixed=False): + def MaximumMinimumOptionsBufferHasIdentifier(cls, buf, offset, size_prefixed=False): return flatbuffers.util.BufferHasIdentifier(buf, offset, b"\x54\x46\x4C\x33", size_prefixed=size_prefixed) - # LSHProjectionOptions + # MaximumMinimumOptions def Init(self, buf, pos): self._tab = flatbuffers.table.Table(buf, pos) - # LSHProjectionOptions - def Type(self): - o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(4)) - if o != 0: - return self._tab.Get(flatbuffers.number_types.Int8Flags, o + self._tab.Pos) - return 0 +def MaximumMinimumOptionsStart(builder): + builder.StartObject(0) -def LSHProjectionOptionsStart(builder): builder.StartObject(1) -def Start(builder): - return LSHProjectionOptionsStart(builder) -def LSHProjectionOptionsAddType(builder, type): builder.PrependInt8Slot(0, type, 0) -def AddType(builder, type): - return LSHProjectionOptionsAddType(builder, type) -def LSHProjectionOptionsEnd(builder): return builder.EndObject() -def End(builder): - return LSHProjectionOptionsEnd(builder) +def MaximumMinimumOptionsEnd(builder): + return builder.EndObject() -class LSHProjectionOptionsT(object): - # LSHProjectionOptionsT + +class MaximumMinimumOptionsT(object): + + # MaximumMinimumOptionsT def __init__(self): - self.type = 0 # type: int + pass @classmethod def InitFromBuf(cls, buf, pos): - lshprojectionOptions = LSHProjectionOptions() - lshprojectionOptions.Init(buf, pos) - return cls.InitFromObj(lshprojectionOptions) - - @classmethod - def InitFromObj(cls, lshprojectionOptions): - x = LSHProjectionOptionsT() - x._UnPack(lshprojectionOptions) - return x - - # LSHProjectionOptionsT - def _UnPack(self, lshprojectionOptions): - if lshprojectionOptions is None: - return - self.type = lshprojectionOptions.Type() - - # LSHProjectionOptionsT - def Pack(self, builder): - LSHProjectionOptionsStart(builder) - LSHProjectionOptionsAddType(builder, self.type) - lshprojectionOptions = LSHProjectionOptionsEnd(builder) - return lshprojectionOptions -# automatically generated by the FlatBuffers compiler, do not modify - -# namespace: tflite - -class LSHProjectionType(object): - UNKNOWN = 0 - SPARSE = 1 - DENSE = 2 -# automatically generated by the FlatBuffers compiler, do not modify + maximumMinimumOptions = MaximumMinimumOptions() + maximumMinimumOptions.Init(buf, pos) + return cls.InitFromObj(maximumMinimumOptions) -# namespace: tflite + @classmethod + def InitFromPackedBuf(cls, buf, pos=0): + n = flatbuffers.encode.Get(flatbuffers.packer.uoffset, buf, pos) + return cls.InitFromBuf(buf, pos+n) -class LSTMKernelType(object): - FULL = 0 - BASIC = 1 -# automatically generated by the FlatBuffers compiler, do not modify + @classmethod + def InitFromObj(cls, maximumMinimumOptions): + x = MaximumMinimumOptionsT() + x._UnPack(maximumMinimumOptions) + return x -# namespace: tflite + # MaximumMinimumOptionsT + def _UnPack(self, maximumMinimumOptions): + if maximumMinimumOptions is None: + return -from flatbuffers.compat import import_numpy -np = import_numpy() + # MaximumMinimumOptionsT + def Pack(self, builder): + MaximumMinimumOptionsStart(builder) + maximumMinimumOptions = MaximumMinimumOptionsEnd(builder) + return maximumMinimumOptions -class LSTMOptions(object): + +class TileOptions(object): __slots__ = ['_tab'] @classmethod def GetRootAs(cls, buf, offset=0): n = flatbuffers.encode.Get(flatbuffers.packer.uoffset, buf, offset) - x = LSTMOptions() + x = TileOptions() x.Init(buf, n + offset) return x @classmethod - def GetRootAsLSTMOptions(cls, buf, offset=0): + def GetRootAsTileOptions(cls, buf, offset=0): """This method is deprecated. Please switch to GetRootAs.""" return cls.GetRootAs(buf, offset) @classmethod - def LSTMOptionsBufferHasIdentifier(cls, buf, offset, size_prefixed=False): + def TileOptionsBufferHasIdentifier(cls, buf, offset, size_prefixed=False): return flatbuffers.util.BufferHasIdentifier(buf, offset, b"\x54\x46\x4C\x33", size_prefixed=size_prefixed) - # LSTMOptions + # TileOptions def Init(self, buf, pos): self._tab = flatbuffers.table.Table(buf, pos) - # LSTMOptions - def FusedActivationFunction(self): - o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(4)) - if o != 0: - return self._tab.Get(flatbuffers.number_types.Int8Flags, o + self._tab.Pos) - return 0 - - # LSTMOptions - def CellClip(self): - o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(6)) - if o != 0: - return self._tab.Get(flatbuffers.number_types.Float32Flags, o + self._tab.Pos) - return 0.0 - - # LSTMOptions - def ProjClip(self): - o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(8)) - if o != 0: - return self._tab.Get(flatbuffers.number_types.Float32Flags, o + self._tab.Pos) - return 0.0 +def TileOptionsStart(builder): + builder.StartObject(0) - # LSTMOptions - def KernelType(self): - o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(10)) - if o != 0: - return self._tab.Get(flatbuffers.number_types.Int8Flags, o + self._tab.Pos) - return 0 +def TileOptionsEnd(builder): + return builder.EndObject() - # LSTMOptions - def AsymmetricQuantizeInputs(self): - o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(12)) - if o != 0: - return bool(self._tab.Get(flatbuffers.number_types.BoolFlags, o + self._tab.Pos)) - return False -def LSTMOptionsStart(builder): builder.StartObject(5) -def Start(builder): - return LSTMOptionsStart(builder) -def LSTMOptionsAddFusedActivationFunction(builder, fusedActivationFunction): builder.PrependInt8Slot(0, fusedActivationFunction, 0) -def AddFusedActivationFunction(builder, fusedActivationFunction): - return LSTMOptionsAddFusedActivationFunction(builder, fusedActivationFunction) -def LSTMOptionsAddCellClip(builder, cellClip): builder.PrependFloat32Slot(1, cellClip, 0.0) -def AddCellClip(builder, cellClip): - return LSTMOptionsAddCellClip(builder, cellClip) -def LSTMOptionsAddProjClip(builder, projClip): builder.PrependFloat32Slot(2, projClip, 0.0) -def AddProjClip(builder, projClip): - return LSTMOptionsAddProjClip(builder, projClip) -def LSTMOptionsAddKernelType(builder, kernelType): builder.PrependInt8Slot(3, kernelType, 0) -def AddKernelType(builder, kernelType): - return LSTMOptionsAddKernelType(builder, kernelType) -def LSTMOptionsAddAsymmetricQuantizeInputs(builder, asymmetricQuantizeInputs): builder.PrependBoolSlot(4, asymmetricQuantizeInputs, 0) -def AddAsymmetricQuantizeInputs(builder, asymmetricQuantizeInputs): - return LSTMOptionsAddAsymmetricQuantizeInputs(builder, asymmetricQuantizeInputs) -def LSTMOptionsEnd(builder): return builder.EndObject() -def End(builder): - return LSTMOptionsEnd(builder) -class LSTMOptionsT(object): +class TileOptionsT(object): - # LSTMOptionsT + # TileOptionsT def __init__(self): - self.fusedActivationFunction = 0 # type: int - self.cellClip = 0.0 # type: float - self.projClip = 0.0 # type: float - self.kernelType = 0 # type: int - self.asymmetricQuantizeInputs = False # type: bool + pass @classmethod def InitFromBuf(cls, buf, pos): - lstmoptions = LSTMOptions() - lstmoptions.Init(buf, pos) - return cls.InitFromObj(lstmoptions) + tileOptions = TileOptions() + tileOptions.Init(buf, pos) + return cls.InitFromObj(tileOptions) @classmethod - def InitFromObj(cls, lstmoptions): - x = LSTMOptionsT() - x._UnPack(lstmoptions) + def InitFromPackedBuf(cls, buf, pos=0): + n = flatbuffers.encode.Get(flatbuffers.packer.uoffset, buf, pos) + return cls.InitFromBuf(buf, pos+n) + + @classmethod + def InitFromObj(cls, tileOptions): + x = TileOptionsT() + x._UnPack(tileOptions) return x - # LSTMOptionsT - def _UnPack(self, lstmoptions): - if lstmoptions is None: + # TileOptionsT + def _UnPack(self, tileOptions): + if tileOptions is None: return - self.fusedActivationFunction = lstmoptions.FusedActivationFunction() - self.cellClip = lstmoptions.CellClip() - self.projClip = lstmoptions.ProjClip() - self.kernelType = lstmoptions.KernelType() - self.asymmetricQuantizeInputs = lstmoptions.AsymmetricQuantizeInputs() - # LSTMOptionsT + # TileOptionsT def Pack(self, builder): - LSTMOptionsStart(builder) - LSTMOptionsAddFusedActivationFunction(builder, self.fusedActivationFunction) - LSTMOptionsAddCellClip(builder, self.cellClip) - LSTMOptionsAddProjClip(builder, self.projClip) - LSTMOptionsAddKernelType(builder, self.kernelType) - LSTMOptionsAddAsymmetricQuantizeInputs(builder, self.asymmetricQuantizeInputs) - lstmoptions = LSTMOptionsEnd(builder) - return lstmoptions -# automatically generated by the FlatBuffers compiler, do not modify - -# namespace: tflite + TileOptionsStart(builder) + tileOptions = TileOptionsEnd(builder) + return tileOptions -from flatbuffers.compat import import_numpy -np = import_numpy() -class LeakyReluOptions(object): +class ArgMaxOptions(object): __slots__ = ['_tab'] @classmethod def GetRootAs(cls, buf, offset=0): n = flatbuffers.encode.Get(flatbuffers.packer.uoffset, buf, offset) - x = LeakyReluOptions() + x = ArgMaxOptions() x.Init(buf, n + offset) return x @classmethod - def GetRootAsLeakyReluOptions(cls, buf, offset=0): + def GetRootAsArgMaxOptions(cls, buf, offset=0): """This method is deprecated. Please switch to GetRootAs.""" return cls.GetRootAs(buf, offset) @classmethod - def LeakyReluOptionsBufferHasIdentifier(cls, buf, offset, size_prefixed=False): + def ArgMaxOptionsBufferHasIdentifier(cls, buf, offset, size_prefixed=False): return flatbuffers.util.BufferHasIdentifier(buf, offset, b"\x54\x46\x4C\x33", size_prefixed=size_prefixed) - # LeakyReluOptions + # ArgMaxOptions def Init(self, buf, pos): self._tab = flatbuffers.table.Table(buf, pos) - # LeakyReluOptions - def Alpha(self): + # ArgMaxOptions + def OutputType(self): o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(4)) if o != 0: - return self._tab.Get(flatbuffers.number_types.Float32Flags, o + self._tab.Pos) - return 0.0 + return self._tab.Get(flatbuffers.number_types.Int8Flags, o + self._tab.Pos) + return 0 -def LeakyReluOptionsStart(builder): builder.StartObject(1) -def Start(builder): - return LeakyReluOptionsStart(builder) -def LeakyReluOptionsAddAlpha(builder, alpha): builder.PrependFloat32Slot(0, alpha, 0.0) -def AddAlpha(builder, alpha): - return LeakyReluOptionsAddAlpha(builder, alpha) -def LeakyReluOptionsEnd(builder): return builder.EndObject() -def End(builder): - return LeakyReluOptionsEnd(builder) +def ArgMaxOptionsStart(builder): + builder.StartObject(1) -class LeakyReluOptionsT(object): +def ArgMaxOptionsAddOutputType(builder, outputType): + builder.PrependInt8Slot(0, outputType, 0) - # LeakyReluOptionsT +def ArgMaxOptionsEnd(builder): + return builder.EndObject() + + + +class ArgMaxOptionsT(object): + + # ArgMaxOptionsT def __init__(self): - self.alpha = 0.0 # type: float + self.outputType = 0 # type: int @classmethod def InitFromBuf(cls, buf, pos): - leakyReluOptions = LeakyReluOptions() - leakyReluOptions.Init(buf, pos) - return cls.InitFromObj(leakyReluOptions) + argMaxOptions = ArgMaxOptions() + argMaxOptions.Init(buf, pos) + return cls.InitFromObj(argMaxOptions) @classmethod - def InitFromObj(cls, leakyReluOptions): - x = LeakyReluOptionsT() - x._UnPack(leakyReluOptions) + def InitFromPackedBuf(cls, buf, pos=0): + n = flatbuffers.encode.Get(flatbuffers.packer.uoffset, buf, pos) + return cls.InitFromBuf(buf, pos+n) + + @classmethod + def InitFromObj(cls, argMaxOptions): + x = ArgMaxOptionsT() + x._UnPack(argMaxOptions) return x - # LeakyReluOptionsT - def _UnPack(self, leakyReluOptions): - if leakyReluOptions is None: + # ArgMaxOptionsT + def _UnPack(self, argMaxOptions): + if argMaxOptions is None: return - self.alpha = leakyReluOptions.Alpha() + self.outputType = argMaxOptions.OutputType() - # LeakyReluOptionsT + # ArgMaxOptionsT def Pack(self, builder): - LeakyReluOptionsStart(builder) - LeakyReluOptionsAddAlpha(builder, self.alpha) - leakyReluOptions = LeakyReluOptionsEnd(builder) - return leakyReluOptions -# automatically generated by the FlatBuffers compiler, do not modify - -# namespace: tflite + ArgMaxOptionsStart(builder) + ArgMaxOptionsAddOutputType(builder, self.outputType) + argMaxOptions = ArgMaxOptionsEnd(builder) + return argMaxOptions -from flatbuffers.compat import import_numpy -np = import_numpy() -class LessEqualOptions(object): +class ArgMinOptions(object): __slots__ = ['_tab'] @classmethod def GetRootAs(cls, buf, offset=0): n = flatbuffers.encode.Get(flatbuffers.packer.uoffset, buf, offset) - x = LessEqualOptions() + x = ArgMinOptions() x.Init(buf, n + offset) return x @classmethod - def GetRootAsLessEqualOptions(cls, buf, offset=0): + def GetRootAsArgMinOptions(cls, buf, offset=0): """This method is deprecated. Please switch to GetRootAs.""" return cls.GetRootAs(buf, offset) @classmethod - def LessEqualOptionsBufferHasIdentifier(cls, buf, offset, size_prefixed=False): + def ArgMinOptionsBufferHasIdentifier(cls, buf, offset, size_prefixed=False): return flatbuffers.util.BufferHasIdentifier(buf, offset, b"\x54\x46\x4C\x33", size_prefixed=size_prefixed) - # LessEqualOptions + # ArgMinOptions def Init(self, buf, pos): self._tab = flatbuffers.table.Table(buf, pos) -def LessEqualOptionsStart(builder): builder.StartObject(0) -def Start(builder): - return LessEqualOptionsStart(builder) -def LessEqualOptionsEnd(builder): return builder.EndObject() -def End(builder): - return LessEqualOptionsEnd(builder) + # ArgMinOptions + def OutputType(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(4)) + if o != 0: + return self._tab.Get(flatbuffers.number_types.Int8Flags, o + self._tab.Pos) + return 0 + +def ArgMinOptionsStart(builder): + builder.StartObject(1) -class LessEqualOptionsT(object): +def ArgMinOptionsAddOutputType(builder, outputType): + builder.PrependInt8Slot(0, outputType, 0) - # LessEqualOptionsT +def ArgMinOptionsEnd(builder): + return builder.EndObject() + + + +class ArgMinOptionsT(object): + + # ArgMinOptionsT def __init__(self): - pass + self.outputType = 0 # type: int @classmethod def InitFromBuf(cls, buf, pos): - lessEqualOptions = LessEqualOptions() - lessEqualOptions.Init(buf, pos) - return cls.InitFromObj(lessEqualOptions) + argMinOptions = ArgMinOptions() + argMinOptions.Init(buf, pos) + return cls.InitFromObj(argMinOptions) @classmethod - def InitFromObj(cls, lessEqualOptions): - x = LessEqualOptionsT() - x._UnPack(lessEqualOptions) + def InitFromPackedBuf(cls, buf, pos=0): + n = flatbuffers.encode.Get(flatbuffers.packer.uoffset, buf, pos) + return cls.InitFromBuf(buf, pos+n) + + @classmethod + def InitFromObj(cls, argMinOptions): + x = ArgMinOptionsT() + x._UnPack(argMinOptions) return x - # LessEqualOptionsT - def _UnPack(self, lessEqualOptions): - if lessEqualOptions is None: + # ArgMinOptionsT + def _UnPack(self, argMinOptions): + if argMinOptions is None: return + self.outputType = argMinOptions.OutputType() - # LessEqualOptionsT + # ArgMinOptionsT def Pack(self, builder): - LessEqualOptionsStart(builder) - lessEqualOptions = LessEqualOptionsEnd(builder) - return lessEqualOptions -# automatically generated by the FlatBuffers compiler, do not modify - -# namespace: tflite + ArgMinOptionsStart(builder) + ArgMinOptionsAddOutputType(builder, self.outputType) + argMinOptions = ArgMinOptionsEnd(builder) + return argMinOptions -from flatbuffers.compat import import_numpy -np = import_numpy() -class LessOptions(object): +class GreaterOptions(object): __slots__ = ['_tab'] @classmethod def GetRootAs(cls, buf, offset=0): n = flatbuffers.encode.Get(flatbuffers.packer.uoffset, buf, offset) - x = LessOptions() + x = GreaterOptions() x.Init(buf, n + offset) return x @classmethod - def GetRootAsLessOptions(cls, buf, offset=0): + def GetRootAsGreaterOptions(cls, buf, offset=0): """This method is deprecated. Please switch to GetRootAs.""" return cls.GetRootAs(buf, offset) @classmethod - def LessOptionsBufferHasIdentifier(cls, buf, offset, size_prefixed=False): + def GreaterOptionsBufferHasIdentifier(cls, buf, offset, size_prefixed=False): return flatbuffers.util.BufferHasIdentifier(buf, offset, b"\x54\x46\x4C\x33", size_prefixed=size_prefixed) - # LessOptions + # GreaterOptions def Init(self, buf, pos): self._tab = flatbuffers.table.Table(buf, pos) -def LessOptionsStart(builder): builder.StartObject(0) -def Start(builder): - return LessOptionsStart(builder) -def LessOptionsEnd(builder): return builder.EndObject() -def End(builder): - return LessOptionsEnd(builder) +def GreaterOptionsStart(builder): + builder.StartObject(0) -class LessOptionsT(object): +def GreaterOptionsEnd(builder): + return builder.EndObject() - # LessOptionsT + + +class GreaterOptionsT(object): + + # GreaterOptionsT def __init__(self): pass @classmethod def InitFromBuf(cls, buf, pos): - lessOptions = LessOptions() - lessOptions.Init(buf, pos) - return cls.InitFromObj(lessOptions) + greaterOptions = GreaterOptions() + greaterOptions.Init(buf, pos) + return cls.InitFromObj(greaterOptions) @classmethod - def InitFromObj(cls, lessOptions): - x = LessOptionsT() - x._UnPack(lessOptions) + def InitFromPackedBuf(cls, buf, pos=0): + n = flatbuffers.encode.Get(flatbuffers.packer.uoffset, buf, pos) + return cls.InitFromBuf(buf, pos+n) + + @classmethod + def InitFromObj(cls, greaterOptions): + x = GreaterOptionsT() + x._UnPack(greaterOptions) return x - # LessOptionsT - def _UnPack(self, lessOptions): - if lessOptions is None: + # GreaterOptionsT + def _UnPack(self, greaterOptions): + if greaterOptions is None: return - # LessOptionsT + # GreaterOptionsT def Pack(self, builder): - LessOptionsStart(builder) - lessOptions = LessOptionsEnd(builder) - return lessOptions -# automatically generated by the FlatBuffers compiler, do not modify - -# namespace: tflite + GreaterOptionsStart(builder) + greaterOptions = GreaterOptionsEnd(builder) + return greaterOptions -from flatbuffers.compat import import_numpy -np = import_numpy() -class LocalResponseNormalizationOptions(object): +class GreaterEqualOptions(object): __slots__ = ['_tab'] @classmethod def GetRootAs(cls, buf, offset=0): n = flatbuffers.encode.Get(flatbuffers.packer.uoffset, buf, offset) - x = LocalResponseNormalizationOptions() + x = GreaterEqualOptions() x.Init(buf, n + offset) return x @classmethod - def GetRootAsLocalResponseNormalizationOptions(cls, buf, offset=0): + def GetRootAsGreaterEqualOptions(cls, buf, offset=0): """This method is deprecated. Please switch to GetRootAs.""" return cls.GetRootAs(buf, offset) @classmethod - def LocalResponseNormalizationOptionsBufferHasIdentifier(cls, buf, offset, size_prefixed=False): + def GreaterEqualOptionsBufferHasIdentifier(cls, buf, offset, size_prefixed=False): return flatbuffers.util.BufferHasIdentifier(buf, offset, b"\x54\x46\x4C\x33", size_prefixed=size_prefixed) - # LocalResponseNormalizationOptions + # GreaterEqualOptions def Init(self, buf, pos): self._tab = flatbuffers.table.Table(buf, pos) - # LocalResponseNormalizationOptions - def Radius(self): - o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(4)) - if o != 0: - return self._tab.Get(flatbuffers.number_types.Int32Flags, o + self._tab.Pos) - return 0 - - # LocalResponseNormalizationOptions - def Bias(self): - o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(6)) - if o != 0: - return self._tab.Get(flatbuffers.number_types.Float32Flags, o + self._tab.Pos) - return 0.0 +def GreaterEqualOptionsStart(builder): + builder.StartObject(0) - # LocalResponseNormalizationOptions - def Alpha(self): - o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(8)) - if o != 0: - return self._tab.Get(flatbuffers.number_types.Float32Flags, o + self._tab.Pos) - return 0.0 +def GreaterEqualOptionsEnd(builder): + return builder.EndObject() - # LocalResponseNormalizationOptions - def Beta(self): - o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(10)) - if o != 0: - return self._tab.Get(flatbuffers.number_types.Float32Flags, o + self._tab.Pos) - return 0.0 -def LocalResponseNormalizationOptionsStart(builder): builder.StartObject(4) -def Start(builder): - return LocalResponseNormalizationOptionsStart(builder) -def LocalResponseNormalizationOptionsAddRadius(builder, radius): builder.PrependInt32Slot(0, radius, 0) -def AddRadius(builder, radius): - return LocalResponseNormalizationOptionsAddRadius(builder, radius) -def LocalResponseNormalizationOptionsAddBias(builder, bias): builder.PrependFloat32Slot(1, bias, 0.0) -def AddBias(builder, bias): - return LocalResponseNormalizationOptionsAddBias(builder, bias) -def LocalResponseNormalizationOptionsAddAlpha(builder, alpha): builder.PrependFloat32Slot(2, alpha, 0.0) -def AddAlpha(builder, alpha): - return LocalResponseNormalizationOptionsAddAlpha(builder, alpha) -def LocalResponseNormalizationOptionsAddBeta(builder, beta): builder.PrependFloat32Slot(3, beta, 0.0) -def AddBeta(builder, beta): - return LocalResponseNormalizationOptionsAddBeta(builder, beta) -def LocalResponseNormalizationOptionsEnd(builder): return builder.EndObject() -def End(builder): - return LocalResponseNormalizationOptionsEnd(builder) -class LocalResponseNormalizationOptionsT(object): +class GreaterEqualOptionsT(object): - # LocalResponseNormalizationOptionsT + # GreaterEqualOptionsT def __init__(self): - self.radius = 0 # type: int - self.bias = 0.0 # type: float - self.alpha = 0.0 # type: float - self.beta = 0.0 # type: float + pass @classmethod def InitFromBuf(cls, buf, pos): - localResponseNormalizationOptions = LocalResponseNormalizationOptions() - localResponseNormalizationOptions.Init(buf, pos) - return cls.InitFromObj(localResponseNormalizationOptions) + greaterEqualOptions = GreaterEqualOptions() + greaterEqualOptions.Init(buf, pos) + return cls.InitFromObj(greaterEqualOptions) @classmethod - def InitFromObj(cls, localResponseNormalizationOptions): - x = LocalResponseNormalizationOptionsT() - x._UnPack(localResponseNormalizationOptions) + def InitFromPackedBuf(cls, buf, pos=0): + n = flatbuffers.encode.Get(flatbuffers.packer.uoffset, buf, pos) + return cls.InitFromBuf(buf, pos+n) + + @classmethod + def InitFromObj(cls, greaterEqualOptions): + x = GreaterEqualOptionsT() + x._UnPack(greaterEqualOptions) return x - # LocalResponseNormalizationOptionsT - def _UnPack(self, localResponseNormalizationOptions): - if localResponseNormalizationOptions is None: + # GreaterEqualOptionsT + def _UnPack(self, greaterEqualOptions): + if greaterEqualOptions is None: return - self.radius = localResponseNormalizationOptions.Radius() - self.bias = localResponseNormalizationOptions.Bias() - self.alpha = localResponseNormalizationOptions.Alpha() - self.beta = localResponseNormalizationOptions.Beta() - # LocalResponseNormalizationOptionsT + # GreaterEqualOptionsT def Pack(self, builder): - LocalResponseNormalizationOptionsStart(builder) - LocalResponseNormalizationOptionsAddRadius(builder, self.radius) - LocalResponseNormalizationOptionsAddBias(builder, self.bias) - LocalResponseNormalizationOptionsAddAlpha(builder, self.alpha) - LocalResponseNormalizationOptionsAddBeta(builder, self.beta) - localResponseNormalizationOptions = LocalResponseNormalizationOptionsEnd(builder) - return localResponseNormalizationOptions -# automatically generated by the FlatBuffers compiler, do not modify - -# namespace: tflite + GreaterEqualOptionsStart(builder) + greaterEqualOptions = GreaterEqualOptionsEnd(builder) + return greaterEqualOptions -from flatbuffers.compat import import_numpy -np = import_numpy() -class LogSoftmaxOptions(object): +class LessOptions(object): __slots__ = ['_tab'] @classmethod def GetRootAs(cls, buf, offset=0): n = flatbuffers.encode.Get(flatbuffers.packer.uoffset, buf, offset) - x = LogSoftmaxOptions() + x = LessOptions() x.Init(buf, n + offset) return x @classmethod - def GetRootAsLogSoftmaxOptions(cls, buf, offset=0): + def GetRootAsLessOptions(cls, buf, offset=0): """This method is deprecated. Please switch to GetRootAs.""" return cls.GetRootAs(buf, offset) @classmethod - def LogSoftmaxOptionsBufferHasIdentifier(cls, buf, offset, size_prefixed=False): + def LessOptionsBufferHasIdentifier(cls, buf, offset, size_prefixed=False): return flatbuffers.util.BufferHasIdentifier(buf, offset, b"\x54\x46\x4C\x33", size_prefixed=size_prefixed) - # LogSoftmaxOptions + # LessOptions def Init(self, buf, pos): self._tab = flatbuffers.table.Table(buf, pos) -def LogSoftmaxOptionsStart(builder): builder.StartObject(0) -def Start(builder): - return LogSoftmaxOptionsStart(builder) -def LogSoftmaxOptionsEnd(builder): return builder.EndObject() -def End(builder): - return LogSoftmaxOptionsEnd(builder) +def LessOptionsStart(builder): + builder.StartObject(0) -class LogSoftmaxOptionsT(object): +def LessOptionsEnd(builder): + return builder.EndObject() - # LogSoftmaxOptionsT + + +class LessOptionsT(object): + + # LessOptionsT def __init__(self): pass @classmethod def InitFromBuf(cls, buf, pos): - logSoftmaxOptions = LogSoftmaxOptions() - logSoftmaxOptions.Init(buf, pos) - return cls.InitFromObj(logSoftmaxOptions) + lessOptions = LessOptions() + lessOptions.Init(buf, pos) + return cls.InitFromObj(lessOptions) @classmethod - def InitFromObj(cls, logSoftmaxOptions): - x = LogSoftmaxOptionsT() - x._UnPack(logSoftmaxOptions) + def InitFromPackedBuf(cls, buf, pos=0): + n = flatbuffers.encode.Get(flatbuffers.packer.uoffset, buf, pos) + return cls.InitFromBuf(buf, pos+n) + + @classmethod + def InitFromObj(cls, lessOptions): + x = LessOptionsT() + x._UnPack(lessOptions) return x - # LogSoftmaxOptionsT - def _UnPack(self, logSoftmaxOptions): - if logSoftmaxOptions is None: + # LessOptionsT + def _UnPack(self, lessOptions): + if lessOptions is None: return - # LogSoftmaxOptionsT + # LessOptionsT def Pack(self, builder): - LogSoftmaxOptionsStart(builder) - logSoftmaxOptions = LogSoftmaxOptionsEnd(builder) - return logSoftmaxOptions -# automatically generated by the FlatBuffers compiler, do not modify - -# namespace: tflite + LessOptionsStart(builder) + lessOptions = LessOptionsEnd(builder) + return lessOptions -from flatbuffers.compat import import_numpy -np = import_numpy() -class LogicalAndOptions(object): +class LessEqualOptions(object): __slots__ = ['_tab'] @classmethod def GetRootAs(cls, buf, offset=0): n = flatbuffers.encode.Get(flatbuffers.packer.uoffset, buf, offset) - x = LogicalAndOptions() + x = LessEqualOptions() x.Init(buf, n + offset) return x @classmethod - def GetRootAsLogicalAndOptions(cls, buf, offset=0): + def GetRootAsLessEqualOptions(cls, buf, offset=0): """This method is deprecated. Please switch to GetRootAs.""" return cls.GetRootAs(buf, offset) @classmethod - def LogicalAndOptionsBufferHasIdentifier(cls, buf, offset, size_prefixed=False): + def LessEqualOptionsBufferHasIdentifier(cls, buf, offset, size_prefixed=False): return flatbuffers.util.BufferHasIdentifier(buf, offset, b"\x54\x46\x4C\x33", size_prefixed=size_prefixed) - # LogicalAndOptions + # LessEqualOptions def Init(self, buf, pos): self._tab = flatbuffers.table.Table(buf, pos) -def LogicalAndOptionsStart(builder): builder.StartObject(0) -def Start(builder): - return LogicalAndOptionsStart(builder) -def LogicalAndOptionsEnd(builder): return builder.EndObject() -def End(builder): - return LogicalAndOptionsEnd(builder) +def LessEqualOptionsStart(builder): + builder.StartObject(0) -class LogicalAndOptionsT(object): +def LessEqualOptionsEnd(builder): + return builder.EndObject() - # LogicalAndOptionsT + + +class LessEqualOptionsT(object): + + # LessEqualOptionsT def __init__(self): pass @classmethod def InitFromBuf(cls, buf, pos): - logicalAndOptions = LogicalAndOptions() - logicalAndOptions.Init(buf, pos) - return cls.InitFromObj(logicalAndOptions) + lessEqualOptions = LessEqualOptions() + lessEqualOptions.Init(buf, pos) + return cls.InitFromObj(lessEqualOptions) @classmethod - def InitFromObj(cls, logicalAndOptions): - x = LogicalAndOptionsT() - x._UnPack(logicalAndOptions) + def InitFromPackedBuf(cls, buf, pos=0): + n = flatbuffers.encode.Get(flatbuffers.packer.uoffset, buf, pos) + return cls.InitFromBuf(buf, pos+n) + + @classmethod + def InitFromObj(cls, lessEqualOptions): + x = LessEqualOptionsT() + x._UnPack(lessEqualOptions) return x - # LogicalAndOptionsT - def _UnPack(self, logicalAndOptions): - if logicalAndOptions is None: + # LessEqualOptionsT + def _UnPack(self, lessEqualOptions): + if lessEqualOptions is None: return - # LogicalAndOptionsT + # LessEqualOptionsT def Pack(self, builder): - LogicalAndOptionsStart(builder) - logicalAndOptions = LogicalAndOptionsEnd(builder) - return logicalAndOptions -# automatically generated by the FlatBuffers compiler, do not modify - -# namespace: tflite + LessEqualOptionsStart(builder) + lessEqualOptions = LessEqualOptionsEnd(builder) + return lessEqualOptions -from flatbuffers.compat import import_numpy -np = import_numpy() -class LogicalNotOptions(object): +class NegOptions(object): __slots__ = ['_tab'] @classmethod def GetRootAs(cls, buf, offset=0): n = flatbuffers.encode.Get(flatbuffers.packer.uoffset, buf, offset) - x = LogicalNotOptions() + x = NegOptions() x.Init(buf, n + offset) return x @classmethod - def GetRootAsLogicalNotOptions(cls, buf, offset=0): + def GetRootAsNegOptions(cls, buf, offset=0): """This method is deprecated. Please switch to GetRootAs.""" return cls.GetRootAs(buf, offset) @classmethod - def LogicalNotOptionsBufferHasIdentifier(cls, buf, offset, size_prefixed=False): + def NegOptionsBufferHasIdentifier(cls, buf, offset, size_prefixed=False): return flatbuffers.util.BufferHasIdentifier(buf, offset, b"\x54\x46\x4C\x33", size_prefixed=size_prefixed) - # LogicalNotOptions + # NegOptions def Init(self, buf, pos): self._tab = flatbuffers.table.Table(buf, pos) -def LogicalNotOptionsStart(builder): builder.StartObject(0) -def Start(builder): - return LogicalNotOptionsStart(builder) -def LogicalNotOptionsEnd(builder): return builder.EndObject() -def End(builder): - return LogicalNotOptionsEnd(builder) +def NegOptionsStart(builder): + builder.StartObject(0) -class LogicalNotOptionsT(object): +def NegOptionsEnd(builder): + return builder.EndObject() - # LogicalNotOptionsT + + +class NegOptionsT(object): + + # NegOptionsT def __init__(self): pass @classmethod def InitFromBuf(cls, buf, pos): - logicalNotOptions = LogicalNotOptions() - logicalNotOptions.Init(buf, pos) - return cls.InitFromObj(logicalNotOptions) + negOptions = NegOptions() + negOptions.Init(buf, pos) + return cls.InitFromObj(negOptions) @classmethod - def InitFromObj(cls, logicalNotOptions): - x = LogicalNotOptionsT() - x._UnPack(logicalNotOptions) + def InitFromPackedBuf(cls, buf, pos=0): + n = flatbuffers.encode.Get(flatbuffers.packer.uoffset, buf, pos) + return cls.InitFromBuf(buf, pos+n) + + @classmethod + def InitFromObj(cls, negOptions): + x = NegOptionsT() + x._UnPack(negOptions) return x - # LogicalNotOptionsT - def _UnPack(self, logicalNotOptions): - if logicalNotOptions is None: + # NegOptionsT + def _UnPack(self, negOptions): + if negOptions is None: return - # LogicalNotOptionsT + # NegOptionsT def Pack(self, builder): - LogicalNotOptionsStart(builder) - logicalNotOptions = LogicalNotOptionsEnd(builder) - return logicalNotOptions -# automatically generated by the FlatBuffers compiler, do not modify - -# namespace: tflite + NegOptionsStart(builder) + negOptions = NegOptionsEnd(builder) + return negOptions -from flatbuffers.compat import import_numpy -np = import_numpy() -class LogicalOrOptions(object): +class SelectOptions(object): __slots__ = ['_tab'] @classmethod def GetRootAs(cls, buf, offset=0): n = flatbuffers.encode.Get(flatbuffers.packer.uoffset, buf, offset) - x = LogicalOrOptions() + x = SelectOptions() x.Init(buf, n + offset) return x @classmethod - def GetRootAsLogicalOrOptions(cls, buf, offset=0): + def GetRootAsSelectOptions(cls, buf, offset=0): """This method is deprecated. Please switch to GetRootAs.""" return cls.GetRootAs(buf, offset) @classmethod - def LogicalOrOptionsBufferHasIdentifier(cls, buf, offset, size_prefixed=False): + def SelectOptionsBufferHasIdentifier(cls, buf, offset, size_prefixed=False): return flatbuffers.util.BufferHasIdentifier(buf, offset, b"\x54\x46\x4C\x33", size_prefixed=size_prefixed) - # LogicalOrOptions + # SelectOptions def Init(self, buf, pos): self._tab = flatbuffers.table.Table(buf, pos) -def LogicalOrOptionsStart(builder): builder.StartObject(0) -def Start(builder): - return LogicalOrOptionsStart(builder) -def LogicalOrOptionsEnd(builder): return builder.EndObject() -def End(builder): - return LogicalOrOptionsEnd(builder) +def SelectOptionsStart(builder): + builder.StartObject(0) -class LogicalOrOptionsT(object): +def SelectOptionsEnd(builder): + return builder.EndObject() - # LogicalOrOptionsT + + +class SelectOptionsT(object): + + # SelectOptionsT def __init__(self): pass @classmethod def InitFromBuf(cls, buf, pos): - logicalOrOptions = LogicalOrOptions() - logicalOrOptions.Init(buf, pos) - return cls.InitFromObj(logicalOrOptions) + selectOptions = SelectOptions() + selectOptions.Init(buf, pos) + return cls.InitFromObj(selectOptions) @classmethod - def InitFromObj(cls, logicalOrOptions): - x = LogicalOrOptionsT() - x._UnPack(logicalOrOptions) + def InitFromPackedBuf(cls, buf, pos=0): + n = flatbuffers.encode.Get(flatbuffers.packer.uoffset, buf, pos) + return cls.InitFromBuf(buf, pos+n) + + @classmethod + def InitFromObj(cls, selectOptions): + x = SelectOptionsT() + x._UnPack(selectOptions) return x - # LogicalOrOptionsT - def _UnPack(self, logicalOrOptions): - if logicalOrOptions is None: + # SelectOptionsT + def _UnPack(self, selectOptions): + if selectOptions is None: return - # LogicalOrOptionsT + # SelectOptionsT def Pack(self, builder): - LogicalOrOptionsStart(builder) - logicalOrOptions = LogicalOrOptionsEnd(builder) - return logicalOrOptions -# automatically generated by the FlatBuffers compiler, do not modify - -# namespace: tflite + SelectOptionsStart(builder) + selectOptions = SelectOptionsEnd(builder) + return selectOptions -from flatbuffers.compat import import_numpy -np = import_numpy() -class MatrixDiagOptions(object): +class SliceOptions(object): __slots__ = ['_tab'] @classmethod def GetRootAs(cls, buf, offset=0): n = flatbuffers.encode.Get(flatbuffers.packer.uoffset, buf, offset) - x = MatrixDiagOptions() + x = SliceOptions() x.Init(buf, n + offset) return x @classmethod - def GetRootAsMatrixDiagOptions(cls, buf, offset=0): + def GetRootAsSliceOptions(cls, buf, offset=0): """This method is deprecated. Please switch to GetRootAs.""" return cls.GetRootAs(buf, offset) @classmethod - def MatrixDiagOptionsBufferHasIdentifier(cls, buf, offset, size_prefixed=False): + def SliceOptionsBufferHasIdentifier(cls, buf, offset, size_prefixed=False): return flatbuffers.util.BufferHasIdentifier(buf, offset, b"\x54\x46\x4C\x33", size_prefixed=size_prefixed) - # MatrixDiagOptions + # SliceOptions def Init(self, buf, pos): self._tab = flatbuffers.table.Table(buf, pos) -def MatrixDiagOptionsStart(builder): builder.StartObject(0) -def Start(builder): - return MatrixDiagOptionsStart(builder) -def MatrixDiagOptionsEnd(builder): return builder.EndObject() -def End(builder): - return MatrixDiagOptionsEnd(builder) +def SliceOptionsStart(builder): + builder.StartObject(0) -class MatrixDiagOptionsT(object): +def SliceOptionsEnd(builder): + return builder.EndObject() - # MatrixDiagOptionsT + + +class SliceOptionsT(object): + + # SliceOptionsT def __init__(self): pass @classmethod def InitFromBuf(cls, buf, pos): - matrixDiagOptions = MatrixDiagOptions() - matrixDiagOptions.Init(buf, pos) - return cls.InitFromObj(matrixDiagOptions) + sliceOptions = SliceOptions() + sliceOptions.Init(buf, pos) + return cls.InitFromObj(sliceOptions) @classmethod - def InitFromObj(cls, matrixDiagOptions): - x = MatrixDiagOptionsT() - x._UnPack(matrixDiagOptions) + def InitFromPackedBuf(cls, buf, pos=0): + n = flatbuffers.encode.Get(flatbuffers.packer.uoffset, buf, pos) + return cls.InitFromBuf(buf, pos+n) + + @classmethod + def InitFromObj(cls, sliceOptions): + x = SliceOptionsT() + x._UnPack(sliceOptions) return x - # MatrixDiagOptionsT - def _UnPack(self, matrixDiagOptions): - if matrixDiagOptions is None: + # SliceOptionsT + def _UnPack(self, sliceOptions): + if sliceOptions is None: return - # MatrixDiagOptionsT + # SliceOptionsT def Pack(self, builder): - MatrixDiagOptionsStart(builder) - matrixDiagOptions = MatrixDiagOptionsEnd(builder) - return matrixDiagOptions -# automatically generated by the FlatBuffers compiler, do not modify - -# namespace: tflite + SliceOptionsStart(builder) + sliceOptions = SliceOptionsEnd(builder) + return sliceOptions -from flatbuffers.compat import import_numpy -np = import_numpy() -class MatrixSetDiagOptions(object): +class TransposeConvOptions(object): __slots__ = ['_tab'] @classmethod def GetRootAs(cls, buf, offset=0): n = flatbuffers.encode.Get(flatbuffers.packer.uoffset, buf, offset) - x = MatrixSetDiagOptions() + x = TransposeConvOptions() x.Init(buf, n + offset) return x @classmethod - def GetRootAsMatrixSetDiagOptions(cls, buf, offset=0): + def GetRootAsTransposeConvOptions(cls, buf, offset=0): """This method is deprecated. Please switch to GetRootAs.""" return cls.GetRootAs(buf, offset) @classmethod - def MatrixSetDiagOptionsBufferHasIdentifier(cls, buf, offset, size_prefixed=False): + def TransposeConvOptionsBufferHasIdentifier(cls, buf, offset, size_prefixed=False): return flatbuffers.util.BufferHasIdentifier(buf, offset, b"\x54\x46\x4C\x33", size_prefixed=size_prefixed) - # MatrixSetDiagOptions + # TransposeConvOptions def Init(self, buf, pos): self._tab = flatbuffers.table.Table(buf, pos) -def MatrixSetDiagOptionsStart(builder): builder.StartObject(0) -def Start(builder): - return MatrixSetDiagOptionsStart(builder) -def MatrixSetDiagOptionsEnd(builder): return builder.EndObject() -def End(builder): - return MatrixSetDiagOptionsEnd(builder) + # TransposeConvOptions + def Padding(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(4)) + if o != 0: + return self._tab.Get(flatbuffers.number_types.Int8Flags, o + self._tab.Pos) + return 0 -class MatrixSetDiagOptionsT(object): + # TransposeConvOptions + def StrideW(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(6)) + if o != 0: + return self._tab.Get(flatbuffers.number_types.Int32Flags, o + self._tab.Pos) + return 0 - # MatrixSetDiagOptionsT + # TransposeConvOptions + def StrideH(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(8)) + if o != 0: + return self._tab.Get(flatbuffers.number_types.Int32Flags, o + self._tab.Pos) + return 0 + + # TransposeConvOptions + def FusedActivationFunction(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(10)) + if o != 0: + return self._tab.Get(flatbuffers.number_types.Int8Flags, o + self._tab.Pos) + return 0 + + # TransposeConvOptions + def QuantizedBiasType(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(12)) + if o != 0: + return self._tab.Get(flatbuffers.number_types.Int8Flags, o + self._tab.Pos) + return 0 + +def TransposeConvOptionsStart(builder): + builder.StartObject(5) + +def TransposeConvOptionsAddPadding(builder, padding): + builder.PrependInt8Slot(0, padding, 0) + +def TransposeConvOptionsAddStrideW(builder, strideW): + builder.PrependInt32Slot(1, strideW, 0) + +def TransposeConvOptionsAddStrideH(builder, strideH): + builder.PrependInt32Slot(2, strideH, 0) + +def TransposeConvOptionsAddFusedActivationFunction(builder, fusedActivationFunction): + builder.PrependInt8Slot(3, fusedActivationFunction, 0) + +def TransposeConvOptionsAddQuantizedBiasType(builder, quantizedBiasType): + builder.PrependInt8Slot(4, quantizedBiasType, 0) + +def TransposeConvOptionsEnd(builder): + return builder.EndObject() + + + +class TransposeConvOptionsT(object): + + # TransposeConvOptionsT def __init__(self): - pass + self.padding = 0 # type: int + self.strideW = 0 # type: int + self.strideH = 0 # type: int + self.fusedActivationFunction = 0 # type: int + self.quantizedBiasType = 0 # type: int @classmethod def InitFromBuf(cls, buf, pos): - matrixSetDiagOptions = MatrixSetDiagOptions() - matrixSetDiagOptions.Init(buf, pos) - return cls.InitFromObj(matrixSetDiagOptions) + transposeConvOptions = TransposeConvOptions() + transposeConvOptions.Init(buf, pos) + return cls.InitFromObj(transposeConvOptions) @classmethod - def InitFromObj(cls, matrixSetDiagOptions): - x = MatrixSetDiagOptionsT() - x._UnPack(matrixSetDiagOptions) + def InitFromPackedBuf(cls, buf, pos=0): + n = flatbuffers.encode.Get(flatbuffers.packer.uoffset, buf, pos) + return cls.InitFromBuf(buf, pos+n) + + @classmethod + def InitFromObj(cls, transposeConvOptions): + x = TransposeConvOptionsT() + x._UnPack(transposeConvOptions) return x - # MatrixSetDiagOptionsT - def _UnPack(self, matrixSetDiagOptions): - if matrixSetDiagOptions is None: + # TransposeConvOptionsT + def _UnPack(self, transposeConvOptions): + if transposeConvOptions is None: return + self.padding = transposeConvOptions.Padding() + self.strideW = transposeConvOptions.StrideW() + self.strideH = transposeConvOptions.StrideH() + self.fusedActivationFunction = transposeConvOptions.FusedActivationFunction() + self.quantizedBiasType = transposeConvOptions.QuantizedBiasType() - # MatrixSetDiagOptionsT + # TransposeConvOptionsT def Pack(self, builder): - MatrixSetDiagOptionsStart(builder) - matrixSetDiagOptions = MatrixSetDiagOptionsEnd(builder) - return matrixSetDiagOptions -# automatically generated by the FlatBuffers compiler, do not modify - -# namespace: tflite + TransposeConvOptionsStart(builder) + TransposeConvOptionsAddPadding(builder, self.padding) + TransposeConvOptionsAddStrideW(builder, self.strideW) + TransposeConvOptionsAddStrideH(builder, self.strideH) + TransposeConvOptionsAddFusedActivationFunction(builder, self.fusedActivationFunction) + TransposeConvOptionsAddQuantizedBiasType(builder, self.quantizedBiasType) + transposeConvOptions = TransposeConvOptionsEnd(builder) + return transposeConvOptions -from flatbuffers.compat import import_numpy -np = import_numpy() -class MaximumMinimumOptions(object): +class ExpandDimsOptions(object): __slots__ = ['_tab'] @classmethod def GetRootAs(cls, buf, offset=0): n = flatbuffers.encode.Get(flatbuffers.packer.uoffset, buf, offset) - x = MaximumMinimumOptions() + x = ExpandDimsOptions() x.Init(buf, n + offset) return x @classmethod - def GetRootAsMaximumMinimumOptions(cls, buf, offset=0): + def GetRootAsExpandDimsOptions(cls, buf, offset=0): """This method is deprecated. Please switch to GetRootAs.""" return cls.GetRootAs(buf, offset) @classmethod - def MaximumMinimumOptionsBufferHasIdentifier(cls, buf, offset, size_prefixed=False): + def ExpandDimsOptionsBufferHasIdentifier(cls, buf, offset, size_prefixed=False): return flatbuffers.util.BufferHasIdentifier(buf, offset, b"\x54\x46\x4C\x33", size_prefixed=size_prefixed) - # MaximumMinimumOptions + # ExpandDimsOptions def Init(self, buf, pos): self._tab = flatbuffers.table.Table(buf, pos) -def MaximumMinimumOptionsStart(builder): builder.StartObject(0) -def Start(builder): - return MaximumMinimumOptionsStart(builder) -def MaximumMinimumOptionsEnd(builder): return builder.EndObject() -def End(builder): - return MaximumMinimumOptionsEnd(builder) +def ExpandDimsOptionsStart(builder): + builder.StartObject(0) -class MaximumMinimumOptionsT(object): +def ExpandDimsOptionsEnd(builder): + return builder.EndObject() - # MaximumMinimumOptionsT + + +class ExpandDimsOptionsT(object): + + # ExpandDimsOptionsT def __init__(self): pass @classmethod def InitFromBuf(cls, buf, pos): - maximumMinimumOptions = MaximumMinimumOptions() - maximumMinimumOptions.Init(buf, pos) - return cls.InitFromObj(maximumMinimumOptions) + expandDimsOptions = ExpandDimsOptions() + expandDimsOptions.Init(buf, pos) + return cls.InitFromObj(expandDimsOptions) @classmethod - def InitFromObj(cls, maximumMinimumOptions): - x = MaximumMinimumOptionsT() - x._UnPack(maximumMinimumOptions) - return x + def InitFromPackedBuf(cls, buf, pos=0): + n = flatbuffers.encode.Get(flatbuffers.packer.uoffset, buf, pos) + return cls.InitFromBuf(buf, pos+n) - # MaximumMinimumOptionsT - def _UnPack(self, maximumMinimumOptions): - if maximumMinimumOptions is None: + @classmethod + def InitFromObj(cls, expandDimsOptions): + x = ExpandDimsOptionsT() + x._UnPack(expandDimsOptions) + return x + + # ExpandDimsOptionsT + def _UnPack(self, expandDimsOptions): + if expandDimsOptions is None: return - # MaximumMinimumOptionsT + # ExpandDimsOptionsT def Pack(self, builder): - MaximumMinimumOptionsStart(builder) - maximumMinimumOptions = MaximumMinimumOptionsEnd(builder) - return maximumMinimumOptions -# automatically generated by the FlatBuffers compiler, do not modify - -# namespace: tflite + ExpandDimsOptionsStart(builder) + expandDimsOptions = ExpandDimsOptionsEnd(builder) + return expandDimsOptions -from flatbuffers.compat import import_numpy -np = import_numpy() -class Metadata(object): +class SparseToDenseOptions(object): __slots__ = ['_tab'] @classmethod def GetRootAs(cls, buf, offset=0): n = flatbuffers.encode.Get(flatbuffers.packer.uoffset, buf, offset) - x = Metadata() + x = SparseToDenseOptions() x.Init(buf, n + offset) return x @classmethod - def GetRootAsMetadata(cls, buf, offset=0): + def GetRootAsSparseToDenseOptions(cls, buf, offset=0): """This method is deprecated. Please switch to GetRootAs.""" return cls.GetRootAs(buf, offset) @classmethod - def MetadataBufferHasIdentifier(cls, buf, offset, size_prefixed=False): + def SparseToDenseOptionsBufferHasIdentifier(cls, buf, offset, size_prefixed=False): return flatbuffers.util.BufferHasIdentifier(buf, offset, b"\x54\x46\x4C\x33", size_prefixed=size_prefixed) - # Metadata + # SparseToDenseOptions def Init(self, buf, pos): self._tab = flatbuffers.table.Table(buf, pos) - # Metadata - def Name(self): + # SparseToDenseOptions + def ValidateIndices(self): o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(4)) if o != 0: - return self._tab.String(o + self._tab.Pos) - return None + return bool(self._tab.Get(flatbuffers.number_types.BoolFlags, o + self._tab.Pos)) + return False - # Metadata - def Buffer(self): - o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(6)) - if o != 0: - return self._tab.Get(flatbuffers.number_types.Uint32Flags, o + self._tab.Pos) - return 0 +def SparseToDenseOptionsStart(builder): + builder.StartObject(1) -def MetadataStart(builder): builder.StartObject(2) -def Start(builder): - return MetadataStart(builder) -def MetadataAddName(builder, name): builder.PrependUOffsetTRelativeSlot(0, flatbuffers.number_types.UOffsetTFlags.py_type(name), 0) -def AddName(builder, name): - return MetadataAddName(builder, name) -def MetadataAddBuffer(builder, buffer): builder.PrependUint32Slot(1, buffer, 0) -def AddBuffer(builder, buffer): - return MetadataAddBuffer(builder, buffer) -def MetadataEnd(builder): return builder.EndObject() -def End(builder): - return MetadataEnd(builder) +def SparseToDenseOptionsAddValidateIndices(builder, validateIndices): + builder.PrependBoolSlot(0, validateIndices, 0) -class MetadataT(object): +def SparseToDenseOptionsEnd(builder): + return builder.EndObject() - # MetadataT + + +class SparseToDenseOptionsT(object): + + # SparseToDenseOptionsT def __init__(self): - self.name = None # type: str - self.buffer = 0 # type: int + self.validateIndices = False # type: bool @classmethod def InitFromBuf(cls, buf, pos): - metadata = Metadata() - metadata.Init(buf, pos) - return cls.InitFromObj(metadata) + sparseToDenseOptions = SparseToDenseOptions() + sparseToDenseOptions.Init(buf, pos) + return cls.InitFromObj(sparseToDenseOptions) @classmethod - def InitFromObj(cls, metadata): - x = MetadataT() - x._UnPack(metadata) + def InitFromPackedBuf(cls, buf, pos=0): + n = flatbuffers.encode.Get(flatbuffers.packer.uoffset, buf, pos) + return cls.InitFromBuf(buf, pos+n) + + @classmethod + def InitFromObj(cls, sparseToDenseOptions): + x = SparseToDenseOptionsT() + x._UnPack(sparseToDenseOptions) return x - # MetadataT - def _UnPack(self, metadata): - if metadata is None: + # SparseToDenseOptionsT + def _UnPack(self, sparseToDenseOptions): + if sparseToDenseOptions is None: return - self.name = metadata.Name() - self.buffer = metadata.Buffer() + self.validateIndices = sparseToDenseOptions.ValidateIndices() - # MetadataT + # SparseToDenseOptionsT def Pack(self, builder): - if self.name is not None: - name = builder.CreateString(self.name) - MetadataStart(builder) - if self.name is not None: - MetadataAddName(builder, name) - MetadataAddBuffer(builder, self.buffer) - metadata = MetadataEnd(builder) - return metadata -# automatically generated by the FlatBuffers compiler, do not modify - -# namespace: tflite - -class MirrorPadMode(object): - REFLECT = 0 - SYMMETRIC = 1 -# automatically generated by the FlatBuffers compiler, do not modify - -# namespace: tflite + SparseToDenseOptionsStart(builder) + SparseToDenseOptionsAddValidateIndices(builder, self.validateIndices) + sparseToDenseOptions = SparseToDenseOptionsEnd(builder) + return sparseToDenseOptions -from flatbuffers.compat import import_numpy -np = import_numpy() -class MirrorPadOptions(object): +class EqualOptions(object): __slots__ = ['_tab'] @classmethod def GetRootAs(cls, buf, offset=0): n = flatbuffers.encode.Get(flatbuffers.packer.uoffset, buf, offset) - x = MirrorPadOptions() + x = EqualOptions() x.Init(buf, n + offset) return x @classmethod - def GetRootAsMirrorPadOptions(cls, buf, offset=0): + def GetRootAsEqualOptions(cls, buf, offset=0): """This method is deprecated. Please switch to GetRootAs.""" return cls.GetRootAs(buf, offset) @classmethod - def MirrorPadOptionsBufferHasIdentifier(cls, buf, offset, size_prefixed=False): + def EqualOptionsBufferHasIdentifier(cls, buf, offset, size_prefixed=False): return flatbuffers.util.BufferHasIdentifier(buf, offset, b"\x54\x46\x4C\x33", size_prefixed=size_prefixed) - # MirrorPadOptions + # EqualOptions def Init(self, buf, pos): self._tab = flatbuffers.table.Table(buf, pos) - # MirrorPadOptions - def Mode(self): - o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(4)) - if o != 0: - return self._tab.Get(flatbuffers.number_types.Int8Flags, o + self._tab.Pos) - return 0 +def EqualOptionsStart(builder): + builder.StartObject(0) -def MirrorPadOptionsStart(builder): builder.StartObject(1) -def Start(builder): - return MirrorPadOptionsStart(builder) -def MirrorPadOptionsAddMode(builder, mode): builder.PrependInt8Slot(0, mode, 0) -def AddMode(builder, mode): - return MirrorPadOptionsAddMode(builder, mode) -def MirrorPadOptionsEnd(builder): return builder.EndObject() -def End(builder): - return MirrorPadOptionsEnd(builder) +def EqualOptionsEnd(builder): + return builder.EndObject() -class MirrorPadOptionsT(object): - # MirrorPadOptionsT + +class EqualOptionsT(object): + + # EqualOptionsT def __init__(self): - self.mode = 0 # type: int + pass @classmethod def InitFromBuf(cls, buf, pos): - mirrorPadOptions = MirrorPadOptions() - mirrorPadOptions.Init(buf, pos) - return cls.InitFromObj(mirrorPadOptions) + equalOptions = EqualOptions() + equalOptions.Init(buf, pos) + return cls.InitFromObj(equalOptions) @classmethod - def InitFromObj(cls, mirrorPadOptions): - x = MirrorPadOptionsT() - x._UnPack(mirrorPadOptions) + def InitFromPackedBuf(cls, buf, pos=0): + n = flatbuffers.encode.Get(flatbuffers.packer.uoffset, buf, pos) + return cls.InitFromBuf(buf, pos+n) + + @classmethod + def InitFromObj(cls, equalOptions): + x = EqualOptionsT() + x._UnPack(equalOptions) return x - # MirrorPadOptionsT - def _UnPack(self, mirrorPadOptions): - if mirrorPadOptions is None: + # EqualOptionsT + def _UnPack(self, equalOptions): + if equalOptions is None: return - self.mode = mirrorPadOptions.Mode() - # MirrorPadOptionsT + # EqualOptionsT def Pack(self, builder): - MirrorPadOptionsStart(builder) - MirrorPadOptionsAddMode(builder, self.mode) - mirrorPadOptions = MirrorPadOptionsEnd(builder) - return mirrorPadOptions -# automatically generated by the FlatBuffers compiler, do not modify - -# namespace: tflite + EqualOptionsStart(builder) + equalOptions = EqualOptionsEnd(builder) + return equalOptions -from flatbuffers.compat import import_numpy -np = import_numpy() -class Model(object): +class NotEqualOptions(object): __slots__ = ['_tab'] @classmethod def GetRootAs(cls, buf, offset=0): n = flatbuffers.encode.Get(flatbuffers.packer.uoffset, buf, offset) - x = Model() + x = NotEqualOptions() x.Init(buf, n + offset) return x @classmethod - def GetRootAsModel(cls, buf, offset=0): + def GetRootAsNotEqualOptions(cls, buf, offset=0): """This method is deprecated. Please switch to GetRootAs.""" return cls.GetRootAs(buf, offset) @classmethod - def ModelBufferHasIdentifier(cls, buf, offset, size_prefixed=False): + def NotEqualOptionsBufferHasIdentifier(cls, buf, offset, size_prefixed=False): return flatbuffers.util.BufferHasIdentifier(buf, offset, b"\x54\x46\x4C\x33", size_prefixed=size_prefixed) - # Model + # NotEqualOptions def Init(self, buf, pos): self._tab = flatbuffers.table.Table(buf, pos) - # Model - def Version(self): - o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(4)) - if o != 0: - return self._tab.Get(flatbuffers.number_types.Uint32Flags, o + self._tab.Pos) - return 0 +def NotEqualOptionsStart(builder): + builder.StartObject(0) - # Model - def OperatorCodes(self, j): - o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(6)) - if o != 0: - x = self._tab.Vector(o) - x += flatbuffers.number_types.UOffsetTFlags.py_type(j) * 4 - x = self._tab.Indirect(x) - obj = OperatorCode() - obj.Init(self._tab.Bytes, x) - return obj - return None +def NotEqualOptionsEnd(builder): + return builder.EndObject() - # Model - def OperatorCodesLength(self): - o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(6)) - if o != 0: - return self._tab.VectorLen(o) - return 0 - # Model - def OperatorCodesIsNone(self): - o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(6)) - return o == 0 - # Model - def Subgraphs(self, j): - o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(8)) - if o != 0: - x = self._tab.Vector(o) - x += flatbuffers.number_types.UOffsetTFlags.py_type(j) * 4 - x = self._tab.Indirect(x) - obj = SubGraph() - obj.Init(self._tab.Bytes, x) - return obj - return None +class NotEqualOptionsT(object): - # Model - def SubgraphsLength(self): - o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(8)) - if o != 0: - return self._tab.VectorLen(o) - return 0 + # NotEqualOptionsT + def __init__(self): + pass - # Model - def SubgraphsIsNone(self): - o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(8)) - return o == 0 + @classmethod + def InitFromBuf(cls, buf, pos): + notEqualOptions = NotEqualOptions() + notEqualOptions.Init(buf, pos) + return cls.InitFromObj(notEqualOptions) - # Model - def Description(self): - o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(10)) - if o != 0: - return self._tab.String(o + self._tab.Pos) - return None + @classmethod + def InitFromPackedBuf(cls, buf, pos=0): + n = flatbuffers.encode.Get(flatbuffers.packer.uoffset, buf, pos) + return cls.InitFromBuf(buf, pos+n) - # Model - def Buffers(self, j): - o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(12)) - if o != 0: - x = self._tab.Vector(o) - x += flatbuffers.number_types.UOffsetTFlags.py_type(j) * 4 - x = self._tab.Indirect(x) - obj = Buffer() - obj.Init(self._tab.Bytes, x) - return obj - return None + @classmethod + def InitFromObj(cls, notEqualOptions): + x = NotEqualOptionsT() + x._UnPack(notEqualOptions) + return x - # Model - def BuffersLength(self): - o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(12)) - if o != 0: - return self._tab.VectorLen(o) - return 0 + # NotEqualOptionsT + def _UnPack(self, notEqualOptions): + if notEqualOptions is None: + return - # Model - def BuffersIsNone(self): - o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(12)) - return o == 0 + # NotEqualOptionsT + def Pack(self, builder): + NotEqualOptionsStart(builder) + notEqualOptions = NotEqualOptionsEnd(builder) + return notEqualOptions - # Model - def MetadataBuffer(self, j): - o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(14)) - if o != 0: - a = self._tab.Vector(o) - return self._tab.Get(flatbuffers.number_types.Int32Flags, a + flatbuffers.number_types.UOffsetTFlags.py_type(j * 4)) - return 0 - # Model - def MetadataBufferAsNumpy(self): - o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(14)) - if o != 0: - return self._tab.GetVectorAsNumpy(flatbuffers.number_types.Int32Flags, o) - return 0 +class ShapeOptions(object): + __slots__ = ['_tab'] - # Model - def MetadataBufferLength(self): - o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(14)) - if o != 0: - return self._tab.VectorLen(o) - return 0 + @classmethod + def GetRootAs(cls, buf, offset=0): + n = flatbuffers.encode.Get(flatbuffers.packer.uoffset, buf, offset) + x = ShapeOptions() + x.Init(buf, n + offset) + return x - # Model - def MetadataBufferIsNone(self): - o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(14)) - return o == 0 + @classmethod + def GetRootAsShapeOptions(cls, buf, offset=0): + """This method is deprecated. Please switch to GetRootAs.""" + return cls.GetRootAs(buf, offset) + @classmethod + def ShapeOptionsBufferHasIdentifier(cls, buf, offset, size_prefixed=False): + return flatbuffers.util.BufferHasIdentifier(buf, offset, b"\x54\x46\x4C\x33", size_prefixed=size_prefixed) - # Model - def Metadata(self, j): - o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(16)) - if o != 0: - x = self._tab.Vector(o) - x += flatbuffers.number_types.UOffsetTFlags.py_type(j) * 4 - x = self._tab.Indirect(x) - obj = Metadata() - obj.Init(self._tab.Bytes, x) - return obj - return None + # ShapeOptions + def Init(self, buf, pos): + self._tab = flatbuffers.table.Table(buf, pos) - # Model - def MetadataLength(self): - o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(16)) + # ShapeOptions + def OutType(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(4)) if o != 0: - return self._tab.VectorLen(o) + return self._tab.Get(flatbuffers.number_types.Int8Flags, o + self._tab.Pos) return 0 - # Model - def MetadataIsNone(self): - o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(16)) - return o == 0 +def ShapeOptionsStart(builder): + builder.StartObject(1) - # Model - def SignatureDefs(self, j): - o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(18)) - if o != 0: - x = self._tab.Vector(o) - x += flatbuffers.number_types.UOffsetTFlags.py_type(j) * 4 - x = self._tab.Indirect(x) - obj = SignatureDef() - obj.Init(self._tab.Bytes, x) - return obj - return None +def ShapeOptionsAddOutType(builder, outType): + builder.PrependInt8Slot(0, outType, 0) - # Model - def SignatureDefsLength(self): - o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(18)) - if o != 0: - return self._tab.VectorLen(o) - return 0 +def ShapeOptionsEnd(builder): + return builder.EndObject() - # Model - def SignatureDefsIsNone(self): - o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(18)) - return o == 0 -def ModelStart(builder): builder.StartObject(8) -def Start(builder): - return ModelStart(builder) -def ModelAddVersion(builder, version): builder.PrependUint32Slot(0, version, 0) -def AddVersion(builder, version): - return ModelAddVersion(builder, version) -def ModelAddOperatorCodes(builder, operatorCodes): builder.PrependUOffsetTRelativeSlot(1, flatbuffers.number_types.UOffsetTFlags.py_type(operatorCodes), 0) -def AddOperatorCodes(builder, operatorCodes): - return ModelAddOperatorCodes(builder, operatorCodes) -def ModelStartOperatorCodesVector(builder, numElems): return builder.StartVector(4, numElems, 4) -def StartOperatorCodesVector(builder, numElems): - return ModelStartOperatorCodesVector(builder, numElems) -def ModelAddSubgraphs(builder, subgraphs): builder.PrependUOffsetTRelativeSlot(2, flatbuffers.number_types.UOffsetTFlags.py_type(subgraphs), 0) -def AddSubgraphs(builder, subgraphs): - return ModelAddSubgraphs(builder, subgraphs) -def ModelStartSubgraphsVector(builder, numElems): return builder.StartVector(4, numElems, 4) -def StartSubgraphsVector(builder, numElems): - return ModelStartSubgraphsVector(builder, numElems) -def ModelAddDescription(builder, description): builder.PrependUOffsetTRelativeSlot(3, flatbuffers.number_types.UOffsetTFlags.py_type(description), 0) -def AddDescription(builder, description): - return ModelAddDescription(builder, description) -def ModelAddBuffers(builder, buffers): builder.PrependUOffsetTRelativeSlot(4, flatbuffers.number_types.UOffsetTFlags.py_type(buffers), 0) -def AddBuffers(builder, buffers): - return ModelAddBuffers(builder, buffers) -def ModelStartBuffersVector(builder, numElems): return builder.StartVector(4, numElems, 4) -def StartBuffersVector(builder, numElems): - return ModelStartBuffersVector(builder, numElems) -def ModelAddMetadataBuffer(builder, metadataBuffer): builder.PrependUOffsetTRelativeSlot(5, flatbuffers.number_types.UOffsetTFlags.py_type(metadataBuffer), 0) -def AddMetadataBuffer(builder, metadataBuffer): - return ModelAddMetadataBuffer(builder, metadataBuffer) -def ModelStartMetadataBufferVector(builder, numElems): return builder.StartVector(4, numElems, 4) -def StartMetadataBufferVector(builder, numElems): - return ModelStartMetadataBufferVector(builder, numElems) -def ModelAddMetadata(builder, metadata): builder.PrependUOffsetTRelativeSlot(6, flatbuffers.number_types.UOffsetTFlags.py_type(metadata), 0) -def AddMetadata(builder, metadata): - return ModelAddMetadata(builder, metadata) -def ModelStartMetadataVector(builder, numElems): return builder.StartVector(4, numElems, 4) -def StartMetadataVector(builder, numElems): - return ModelStartMetadataVector(builder, numElems) -def ModelAddSignatureDefs(builder, signatureDefs): builder.PrependUOffsetTRelativeSlot(7, flatbuffers.number_types.UOffsetTFlags.py_type(signatureDefs), 0) -def AddSignatureDefs(builder, signatureDefs): - return ModelAddSignatureDefs(builder, signatureDefs) -def ModelStartSignatureDefsVector(builder, numElems): return builder.StartVector(4, numElems, 4) -def StartSignatureDefsVector(builder, numElems): - return ModelStartSignatureDefsVector(builder, numElems) -def ModelEnd(builder): return builder.EndObject() -def End(builder): - return ModelEnd(builder) -try: - from typing import List -except: - pass -class ModelT(object): +class ShapeOptionsT(object): - # ModelT + # ShapeOptionsT def __init__(self): - self.version = 0 # type: int - self.operatorCodes = None # type: List[OperatorCodeT] - self.subgraphs = None # type: List[SubGraphT] - self.description = None # type: str - self.buffers = None # type: List[BufferT] - self.metadataBuffer = None # type: List[int] - self.metadata = None # type: List[MetadataT] - self.signatureDefs = None # type: List[SignatureDefT] + self.outType = 0 # type: int @classmethod def InitFromBuf(cls, buf, pos): - model = Model() - model.Init(buf, pos) - return cls.InitFromObj(model) + shapeOptions = ShapeOptions() + shapeOptions.Init(buf, pos) + return cls.InitFromObj(shapeOptions) @classmethod - def InitFromObj(cls, model): - x = ModelT() - x._UnPack(model) - return x + def InitFromPackedBuf(cls, buf, pos=0): + n = flatbuffers.encode.Get(flatbuffers.packer.uoffset, buf, pos) + return cls.InitFromBuf(buf, pos+n) - # ModelT - def _UnPack(self, model): - if model is None: - return - self.version = model.Version() - if not model.OperatorCodesIsNone(): - self.operatorCodes = [] - for i in range(model.OperatorCodesLength()): - if model.OperatorCodes(i) is None: - self.operatorCodes.append(None) - else: - operatorCode_ = OperatorCodeT.InitFromObj(model.OperatorCodes(i)) - self.operatorCodes.append(operatorCode_) - if not model.SubgraphsIsNone(): - self.subgraphs = [] - for i in range(model.SubgraphsLength()): - if model.Subgraphs(i) is None: - self.subgraphs.append(None) - else: - subGraph_ = SubGraphT.InitFromObj(model.Subgraphs(i)) - self.subgraphs.append(subGraph_) - self.description = model.Description() - if not model.BuffersIsNone(): - self.buffers = [] - for i in range(model.BuffersLength()): - if model.Buffers(i) is None: - self.buffers.append(None) - else: - buffer_ = BufferT.InitFromObj(model.Buffers(i)) - self.buffers.append(buffer_) - if not model.MetadataBufferIsNone(): - if np is None: - self.metadataBuffer = [] - for i in range(model.MetadataBufferLength()): - self.metadataBuffer.append(model.MetadataBuffer(i)) - else: - self.metadataBuffer = model.MetadataBufferAsNumpy() - if not model.MetadataIsNone(): - self.metadata = [] - for i in range(model.MetadataLength()): - if model.Metadata(i) is None: - self.metadata.append(None) - else: - metadata_ = MetadataT.InitFromObj(model.Metadata(i)) - self.metadata.append(metadata_) - if not model.SignatureDefsIsNone(): - self.signatureDefs = [] - for i in range(model.SignatureDefsLength()): - if model.SignatureDefs(i) is None: - self.signatureDefs.append(None) - else: - signatureDef_ = SignatureDefT.InitFromObj(model.SignatureDefs(i)) - self.signatureDefs.append(signatureDef_) + @classmethod + def InitFromObj(cls, shapeOptions): + x = ShapeOptionsT() + x._UnPack(shapeOptions) + return x - # ModelT - def Pack(self, builder): - if self.operatorCodes is not None: - operatorCodeslist = [] - for i in range(len(self.operatorCodes)): - operatorCodeslist.append(self.operatorCodes[i].Pack(builder)) - ModelStartOperatorCodesVector(builder, len(self.operatorCodes)) - for i in reversed(range(len(self.operatorCodes))): - builder.PrependUOffsetTRelative(operatorCodeslist[i]) - operatorCodes = builder.EndVector() - if self.subgraphs is not None: - subgraphslist = [] - for i in range(len(self.subgraphs)): - subgraphslist.append(self.subgraphs[i].Pack(builder)) - ModelStartSubgraphsVector(builder, len(self.subgraphs)) - for i in reversed(range(len(self.subgraphs))): - builder.PrependUOffsetTRelative(subgraphslist[i]) - subgraphs = builder.EndVector() - if self.description is not None: - description = builder.CreateString(self.description) - if self.buffers is not None: - bufferslist = [] - for i in range(len(self.buffers)): - bufferslist.append(self.buffers[i].Pack(builder)) - ModelStartBuffersVector(builder, len(self.buffers)) - for i in reversed(range(len(self.buffers))): - builder.PrependUOffsetTRelative(bufferslist[i]) - buffers = builder.EndVector() - if self.metadataBuffer is not None: - if np is not None and type(self.metadataBuffer) is np.ndarray: - metadataBuffer = builder.CreateNumpyVector(self.metadataBuffer) - else: - ModelStartMetadataBufferVector(builder, len(self.metadataBuffer)) - for i in reversed(range(len(self.metadataBuffer))): - builder.PrependInt32(self.metadataBuffer[i]) - metadataBuffer = builder.EndVector() - if self.metadata is not None: - metadatalist = [] - for i in range(len(self.metadata)): - metadatalist.append(self.metadata[i].Pack(builder)) - ModelStartMetadataVector(builder, len(self.metadata)) - for i in reversed(range(len(self.metadata))): - builder.PrependUOffsetTRelative(metadatalist[i]) - metadata = builder.EndVector() - if self.signatureDefs is not None: - signatureDefslist = [] - for i in range(len(self.signatureDefs)): - signatureDefslist.append(self.signatureDefs[i].Pack(builder)) - ModelStartSignatureDefsVector(builder, len(self.signatureDefs)) - for i in reversed(range(len(self.signatureDefs))): - builder.PrependUOffsetTRelative(signatureDefslist[i]) - signatureDefs = builder.EndVector() - ModelStart(builder) - ModelAddVersion(builder, self.version) - if self.operatorCodes is not None: - ModelAddOperatorCodes(builder, operatorCodes) - if self.subgraphs is not None: - ModelAddSubgraphs(builder, subgraphs) - if self.description is not None: - ModelAddDescription(builder, description) - if self.buffers is not None: - ModelAddBuffers(builder, buffers) - if self.metadataBuffer is not None: - ModelAddMetadataBuffer(builder, metadataBuffer) - if self.metadata is not None: - ModelAddMetadata(builder, metadata) - if self.signatureDefs is not None: - ModelAddSignatureDefs(builder, signatureDefs) - model = ModelEnd(builder) - return model -# automatically generated by the FlatBuffers compiler, do not modify + # ShapeOptionsT + def _UnPack(self, shapeOptions): + if shapeOptions is None: + return + self.outType = shapeOptions.OutType() -# namespace: tflite + # ShapeOptionsT + def Pack(self, builder): + ShapeOptionsStart(builder) + ShapeOptionsAddOutType(builder, self.outType) + shapeOptions = ShapeOptionsEnd(builder) + return shapeOptions -from flatbuffers.compat import import_numpy -np = import_numpy() -class MulOptions(object): +class RankOptions(object): __slots__ = ['_tab'] @classmethod def GetRootAs(cls, buf, offset=0): n = flatbuffers.encode.Get(flatbuffers.packer.uoffset, buf, offset) - x = MulOptions() + x = RankOptions() x.Init(buf, n + offset) return x @classmethod - def GetRootAsMulOptions(cls, buf, offset=0): + def GetRootAsRankOptions(cls, buf, offset=0): """This method is deprecated. Please switch to GetRootAs.""" return cls.GetRootAs(buf, offset) @classmethod - def MulOptionsBufferHasIdentifier(cls, buf, offset, size_prefixed=False): + def RankOptionsBufferHasIdentifier(cls, buf, offset, size_prefixed=False): return flatbuffers.util.BufferHasIdentifier(buf, offset, b"\x54\x46\x4C\x33", size_prefixed=size_prefixed) - # MulOptions + # RankOptions def Init(self, buf, pos): self._tab = flatbuffers.table.Table(buf, pos) - # MulOptions - def FusedActivationFunction(self): - o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(4)) - if o != 0: - return self._tab.Get(flatbuffers.number_types.Int8Flags, o + self._tab.Pos) - return 0 +def RankOptionsStart(builder): + builder.StartObject(0) -def MulOptionsStart(builder): builder.StartObject(1) -def Start(builder): - return MulOptionsStart(builder) -def MulOptionsAddFusedActivationFunction(builder, fusedActivationFunction): builder.PrependInt8Slot(0, fusedActivationFunction, 0) -def AddFusedActivationFunction(builder, fusedActivationFunction): - return MulOptionsAddFusedActivationFunction(builder, fusedActivationFunction) -def MulOptionsEnd(builder): return builder.EndObject() -def End(builder): - return MulOptionsEnd(builder) +def RankOptionsEnd(builder): + return builder.EndObject() -class MulOptionsT(object): - # MulOptionsT + +class RankOptionsT(object): + + # RankOptionsT def __init__(self): - self.fusedActivationFunction = 0 # type: int + pass @classmethod def InitFromBuf(cls, buf, pos): - mulOptions = MulOptions() - mulOptions.Init(buf, pos) - return cls.InitFromObj(mulOptions) + rankOptions = RankOptions() + rankOptions.Init(buf, pos) + return cls.InitFromObj(rankOptions) @classmethod - def InitFromObj(cls, mulOptions): - x = MulOptionsT() - x._UnPack(mulOptions) + def InitFromPackedBuf(cls, buf, pos=0): + n = flatbuffers.encode.Get(flatbuffers.packer.uoffset, buf, pos) + return cls.InitFromBuf(buf, pos+n) + + @classmethod + def InitFromObj(cls, rankOptions): + x = RankOptionsT() + x._UnPack(rankOptions) return x - # MulOptionsT - def _UnPack(self, mulOptions): - if mulOptions is None: + # RankOptionsT + def _UnPack(self, rankOptions): + if rankOptions is None: return - self.fusedActivationFunction = mulOptions.FusedActivationFunction() - # MulOptionsT + # RankOptionsT def Pack(self, builder): - MulOptionsStart(builder) - MulOptionsAddFusedActivationFunction(builder, self.fusedActivationFunction) - mulOptions = MulOptionsEnd(builder) - return mulOptions -# automatically generated by the FlatBuffers compiler, do not modify - -# namespace: tflite + RankOptionsStart(builder) + rankOptions = RankOptionsEnd(builder) + return rankOptions -from flatbuffers.compat import import_numpy -np = import_numpy() -class NegOptions(object): +class PowOptions(object): __slots__ = ['_tab'] @classmethod def GetRootAs(cls, buf, offset=0): n = flatbuffers.encode.Get(flatbuffers.packer.uoffset, buf, offset) - x = NegOptions() + x = PowOptions() x.Init(buf, n + offset) return x @classmethod - def GetRootAsNegOptions(cls, buf, offset=0): + def GetRootAsPowOptions(cls, buf, offset=0): """This method is deprecated. Please switch to GetRootAs.""" return cls.GetRootAs(buf, offset) @classmethod - def NegOptionsBufferHasIdentifier(cls, buf, offset, size_prefixed=False): + def PowOptionsBufferHasIdentifier(cls, buf, offset, size_prefixed=False): return flatbuffers.util.BufferHasIdentifier(buf, offset, b"\x54\x46\x4C\x33", size_prefixed=size_prefixed) - # NegOptions + # PowOptions def Init(self, buf, pos): self._tab = flatbuffers.table.Table(buf, pos) -def NegOptionsStart(builder): builder.StartObject(0) -def Start(builder): - return NegOptionsStart(builder) -def NegOptionsEnd(builder): return builder.EndObject() -def End(builder): - return NegOptionsEnd(builder) +def PowOptionsStart(builder): + builder.StartObject(0) -class NegOptionsT(object): +def PowOptionsEnd(builder): + return builder.EndObject() - # NegOptionsT + + +class PowOptionsT(object): + + # PowOptionsT def __init__(self): pass @classmethod def InitFromBuf(cls, buf, pos): - negOptions = NegOptions() - negOptions.Init(buf, pos) - return cls.InitFromObj(negOptions) + powOptions = PowOptions() + powOptions.Init(buf, pos) + return cls.InitFromObj(powOptions) @classmethod - def InitFromObj(cls, negOptions): - x = NegOptionsT() - x._UnPack(negOptions) + def InitFromPackedBuf(cls, buf, pos=0): + n = flatbuffers.encode.Get(flatbuffers.packer.uoffset, buf, pos) + return cls.InitFromBuf(buf, pos+n) + + @classmethod + def InitFromObj(cls, powOptions): + x = PowOptionsT() + x._UnPack(powOptions) return x - # NegOptionsT - def _UnPack(self, negOptions): - if negOptions is None: + # PowOptionsT + def _UnPack(self, powOptions): + if powOptions is None: return - # NegOptionsT + # PowOptionsT def Pack(self, builder): - NegOptionsStart(builder) - negOptions = NegOptionsEnd(builder) - return negOptions -# automatically generated by the FlatBuffers compiler, do not modify - -# namespace: tflite + PowOptionsStart(builder) + powOptions = PowOptionsEnd(builder) + return powOptions -from flatbuffers.compat import import_numpy -np = import_numpy() -class NonMaxSuppressionV4Options(object): +class FakeQuantOptions(object): __slots__ = ['_tab'] @classmethod def GetRootAs(cls, buf, offset=0): n = flatbuffers.encode.Get(flatbuffers.packer.uoffset, buf, offset) - x = NonMaxSuppressionV4Options() + x = FakeQuantOptions() x.Init(buf, n + offset) return x @classmethod - def GetRootAsNonMaxSuppressionV4Options(cls, buf, offset=0): + def GetRootAsFakeQuantOptions(cls, buf, offset=0): """This method is deprecated. Please switch to GetRootAs.""" return cls.GetRootAs(buf, offset) @classmethod - def NonMaxSuppressionV4OptionsBufferHasIdentifier(cls, buf, offset, size_prefixed=False): + def FakeQuantOptionsBufferHasIdentifier(cls, buf, offset, size_prefixed=False): return flatbuffers.util.BufferHasIdentifier(buf, offset, b"\x54\x46\x4C\x33", size_prefixed=size_prefixed) - # NonMaxSuppressionV4Options + # FakeQuantOptions def Init(self, buf, pos): self._tab = flatbuffers.table.Table(buf, pos) -def NonMaxSuppressionV4OptionsStart(builder): builder.StartObject(0) -def Start(builder): - return NonMaxSuppressionV4OptionsStart(builder) -def NonMaxSuppressionV4OptionsEnd(builder): return builder.EndObject() -def End(builder): - return NonMaxSuppressionV4OptionsEnd(builder) + # FakeQuantOptions + def Min(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(4)) + if o != 0: + return self._tab.Get(flatbuffers.number_types.Float32Flags, o + self._tab.Pos) + return 0.0 -class NonMaxSuppressionV4OptionsT(object): + # FakeQuantOptions + def Max(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(6)) + if o != 0: + return self._tab.Get(flatbuffers.number_types.Float32Flags, o + self._tab.Pos) + return 0.0 - # NonMaxSuppressionV4OptionsT + # FakeQuantOptions + def NumBits(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(8)) + if o != 0: + return self._tab.Get(flatbuffers.number_types.Int32Flags, o + self._tab.Pos) + return 0 + + # FakeQuantOptions + def NarrowRange(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(10)) + if o != 0: + return bool(self._tab.Get(flatbuffers.number_types.BoolFlags, o + self._tab.Pos)) + return False + +def FakeQuantOptionsStart(builder): + builder.StartObject(4) + +def FakeQuantOptionsAddMin(builder, min): + builder.PrependFloat32Slot(0, min, 0.0) + +def FakeQuantOptionsAddMax(builder, max): + builder.PrependFloat32Slot(1, max, 0.0) + +def FakeQuantOptionsAddNumBits(builder, numBits): + builder.PrependInt32Slot(2, numBits, 0) + +def FakeQuantOptionsAddNarrowRange(builder, narrowRange): + builder.PrependBoolSlot(3, narrowRange, 0) + +def FakeQuantOptionsEnd(builder): + return builder.EndObject() + + + +class FakeQuantOptionsT(object): + + # FakeQuantOptionsT def __init__(self): - pass + self.min = 0.0 # type: float + self.max = 0.0 # type: float + self.numBits = 0 # type: int + self.narrowRange = False # type: bool @classmethod def InitFromBuf(cls, buf, pos): - nonMaxSuppressionV4options = NonMaxSuppressionV4Options() - nonMaxSuppressionV4options.Init(buf, pos) - return cls.InitFromObj(nonMaxSuppressionV4options) + fakeQuantOptions = FakeQuantOptions() + fakeQuantOptions.Init(buf, pos) + return cls.InitFromObj(fakeQuantOptions) @classmethod - def InitFromObj(cls, nonMaxSuppressionV4options): - x = NonMaxSuppressionV4OptionsT() - x._UnPack(nonMaxSuppressionV4options) + def InitFromPackedBuf(cls, buf, pos=0): + n = flatbuffers.encode.Get(flatbuffers.packer.uoffset, buf, pos) + return cls.InitFromBuf(buf, pos+n) + + @classmethod + def InitFromObj(cls, fakeQuantOptions): + x = FakeQuantOptionsT() + x._UnPack(fakeQuantOptions) return x - # NonMaxSuppressionV4OptionsT - def _UnPack(self, nonMaxSuppressionV4options): - if nonMaxSuppressionV4options is None: + # FakeQuantOptionsT + def _UnPack(self, fakeQuantOptions): + if fakeQuantOptions is None: return + self.min = fakeQuantOptions.Min() + self.max = fakeQuantOptions.Max() + self.numBits = fakeQuantOptions.NumBits() + self.narrowRange = fakeQuantOptions.NarrowRange() - # NonMaxSuppressionV4OptionsT + # FakeQuantOptionsT def Pack(self, builder): - NonMaxSuppressionV4OptionsStart(builder) - nonMaxSuppressionV4options = NonMaxSuppressionV4OptionsEnd(builder) - return nonMaxSuppressionV4options -# automatically generated by the FlatBuffers compiler, do not modify - -# namespace: tflite + FakeQuantOptionsStart(builder) + FakeQuantOptionsAddMin(builder, self.min) + FakeQuantOptionsAddMax(builder, self.max) + FakeQuantOptionsAddNumBits(builder, self.numBits) + FakeQuantOptionsAddNarrowRange(builder, self.narrowRange) + fakeQuantOptions = FakeQuantOptionsEnd(builder) + return fakeQuantOptions -from flatbuffers.compat import import_numpy -np = import_numpy() -class NonMaxSuppressionV5Options(object): +class PackOptions(object): __slots__ = ['_tab'] @classmethod def GetRootAs(cls, buf, offset=0): n = flatbuffers.encode.Get(flatbuffers.packer.uoffset, buf, offset) - x = NonMaxSuppressionV5Options() + x = PackOptions() x.Init(buf, n + offset) return x @classmethod - def GetRootAsNonMaxSuppressionV5Options(cls, buf, offset=0): + def GetRootAsPackOptions(cls, buf, offset=0): """This method is deprecated. Please switch to GetRootAs.""" return cls.GetRootAs(buf, offset) @classmethod - def NonMaxSuppressionV5OptionsBufferHasIdentifier(cls, buf, offset, size_prefixed=False): + def PackOptionsBufferHasIdentifier(cls, buf, offset, size_prefixed=False): return flatbuffers.util.BufferHasIdentifier(buf, offset, b"\x54\x46\x4C\x33", size_prefixed=size_prefixed) - # NonMaxSuppressionV5Options + # PackOptions def Init(self, buf, pos): self._tab = flatbuffers.table.Table(buf, pos) -def NonMaxSuppressionV5OptionsStart(builder): builder.StartObject(0) -def Start(builder): - return NonMaxSuppressionV5OptionsStart(builder) -def NonMaxSuppressionV5OptionsEnd(builder): return builder.EndObject() -def End(builder): - return NonMaxSuppressionV5OptionsEnd(builder) + # PackOptions + def ValuesCount(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(4)) + if o != 0: + return self._tab.Get(flatbuffers.number_types.Int32Flags, o + self._tab.Pos) + return 0 -class NonMaxSuppressionV5OptionsT(object): + # PackOptions + def Axis(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(6)) + if o != 0: + return self._tab.Get(flatbuffers.number_types.Int32Flags, o + self._tab.Pos) + return 0 - # NonMaxSuppressionV5OptionsT +def PackOptionsStart(builder): + builder.StartObject(2) + +def PackOptionsAddValuesCount(builder, valuesCount): + builder.PrependInt32Slot(0, valuesCount, 0) + +def PackOptionsAddAxis(builder, axis): + builder.PrependInt32Slot(1, axis, 0) + +def PackOptionsEnd(builder): + return builder.EndObject() + + + +class PackOptionsT(object): + + # PackOptionsT def __init__(self): - pass + self.valuesCount = 0 # type: int + self.axis = 0 # type: int @classmethod def InitFromBuf(cls, buf, pos): - nonMaxSuppressionV5options = NonMaxSuppressionV5Options() - nonMaxSuppressionV5options.Init(buf, pos) - return cls.InitFromObj(nonMaxSuppressionV5options) + packOptions = PackOptions() + packOptions.Init(buf, pos) + return cls.InitFromObj(packOptions) @classmethod - def InitFromObj(cls, nonMaxSuppressionV5options): - x = NonMaxSuppressionV5OptionsT() - x._UnPack(nonMaxSuppressionV5options) + def InitFromPackedBuf(cls, buf, pos=0): + n = flatbuffers.encode.Get(flatbuffers.packer.uoffset, buf, pos) + return cls.InitFromBuf(buf, pos+n) + + @classmethod + def InitFromObj(cls, packOptions): + x = PackOptionsT() + x._UnPack(packOptions) return x - # NonMaxSuppressionV5OptionsT - def _UnPack(self, nonMaxSuppressionV5options): - if nonMaxSuppressionV5options is None: + # PackOptionsT + def _UnPack(self, packOptions): + if packOptions is None: return + self.valuesCount = packOptions.ValuesCount() + self.axis = packOptions.Axis() - # NonMaxSuppressionV5OptionsT + # PackOptionsT def Pack(self, builder): - NonMaxSuppressionV5OptionsStart(builder) - nonMaxSuppressionV5options = NonMaxSuppressionV5OptionsEnd(builder) - return nonMaxSuppressionV5options -# automatically generated by the FlatBuffers compiler, do not modify - -# namespace: tflite + PackOptionsStart(builder) + PackOptionsAddValuesCount(builder, self.valuesCount) + PackOptionsAddAxis(builder, self.axis) + packOptions = PackOptionsEnd(builder) + return packOptions -from flatbuffers.compat import import_numpy -np = import_numpy() -class NotEqualOptions(object): +class LogicalOrOptions(object): __slots__ = ['_tab'] @classmethod def GetRootAs(cls, buf, offset=0): n = flatbuffers.encode.Get(flatbuffers.packer.uoffset, buf, offset) - x = NotEqualOptions() + x = LogicalOrOptions() x.Init(buf, n + offset) return x @classmethod - def GetRootAsNotEqualOptions(cls, buf, offset=0): + def GetRootAsLogicalOrOptions(cls, buf, offset=0): """This method is deprecated. Please switch to GetRootAs.""" return cls.GetRootAs(buf, offset) @classmethod - def NotEqualOptionsBufferHasIdentifier(cls, buf, offset, size_prefixed=False): + def LogicalOrOptionsBufferHasIdentifier(cls, buf, offset, size_prefixed=False): return flatbuffers.util.BufferHasIdentifier(buf, offset, b"\x54\x46\x4C\x33", size_prefixed=size_prefixed) - # NotEqualOptions + # LogicalOrOptions def Init(self, buf, pos): self._tab = flatbuffers.table.Table(buf, pos) -def NotEqualOptionsStart(builder): builder.StartObject(0) -def Start(builder): - return NotEqualOptionsStart(builder) -def NotEqualOptionsEnd(builder): return builder.EndObject() -def End(builder): - return NotEqualOptionsEnd(builder) +def LogicalOrOptionsStart(builder): + builder.StartObject(0) -class NotEqualOptionsT(object): +def LogicalOrOptionsEnd(builder): + return builder.EndObject() - # NotEqualOptionsT + + +class LogicalOrOptionsT(object): + + # LogicalOrOptionsT def __init__(self): pass @classmethod def InitFromBuf(cls, buf, pos): - notEqualOptions = NotEqualOptions() - notEqualOptions.Init(buf, pos) - return cls.InitFromObj(notEqualOptions) + logicalOrOptions = LogicalOrOptions() + logicalOrOptions.Init(buf, pos) + return cls.InitFromObj(logicalOrOptions) @classmethod - def InitFromObj(cls, notEqualOptions): - x = NotEqualOptionsT() - x._UnPack(notEqualOptions) + def InitFromPackedBuf(cls, buf, pos=0): + n = flatbuffers.encode.Get(flatbuffers.packer.uoffset, buf, pos) + return cls.InitFromBuf(buf, pos+n) + + @classmethod + def InitFromObj(cls, logicalOrOptions): + x = LogicalOrOptionsT() + x._UnPack(logicalOrOptions) return x - # NotEqualOptionsT - def _UnPack(self, notEqualOptions): - if notEqualOptions is None: + # LogicalOrOptionsT + def _UnPack(self, logicalOrOptions): + if logicalOrOptions is None: return - # NotEqualOptionsT + # LogicalOrOptionsT def Pack(self, builder): - NotEqualOptionsStart(builder) - notEqualOptions = NotEqualOptionsEnd(builder) - return notEqualOptions -# automatically generated by the FlatBuffers compiler, do not modify - -# namespace: tflite + LogicalOrOptionsStart(builder) + logicalOrOptions = LogicalOrOptionsEnd(builder) + return logicalOrOptions -from flatbuffers.compat import import_numpy -np = import_numpy() class OneHotOptions(object): __slots__ = ['_tab'] @@ -7324,15 +12204,16 @@ def Axis(self): return self._tab.Get(flatbuffers.number_types.Int32Flags, o + self._tab.Pos) return 0 -def OneHotOptionsStart(builder): builder.StartObject(1) -def Start(builder): - return OneHotOptionsStart(builder) -def OneHotOptionsAddAxis(builder, axis): builder.PrependInt32Slot(0, axis, 0) -def AddAxis(builder, axis): - return OneHotOptionsAddAxis(builder, axis) -def OneHotOptionsEnd(builder): return builder.EndObject() -def End(builder): - return OneHotOptionsEnd(builder) +def OneHotOptionsStart(builder): + builder.StartObject(1) + +def OneHotOptionsAddAxis(builder, axis): + builder.PrependInt32Slot(0, axis, 0) + +def OneHotOptionsEnd(builder): + return builder.EndObject() + + class OneHotOptionsT(object): @@ -7346,6 +12227,11 @@ def InitFromBuf(cls, buf, pos): oneHotOptions.Init(buf, pos) return cls.InitFromObj(oneHotOptions) + @classmethod + def InitFromPackedBuf(cls, buf, pos=0): + n = flatbuffers.encode.Get(flatbuffers.packer.uoffset, buf, pos) + return cls.InitFromBuf(buf, pos+n) + @classmethod def InitFromObj(cls, oneHotOptions): x = OneHotOptionsT() @@ -7364,2485 +12250,1975 @@ def Pack(self, builder): OneHotOptionsAddAxis(builder, self.axis) oneHotOptions = OneHotOptionsEnd(builder) return oneHotOptions -# automatically generated by the FlatBuffers compiler, do not modify -# namespace: tflite - -from flatbuffers.compat import import_numpy -np = import_numpy() -class Operator(object): +class AbsOptions(object): __slots__ = ['_tab'] @classmethod def GetRootAs(cls, buf, offset=0): n = flatbuffers.encode.Get(flatbuffers.packer.uoffset, buf, offset) - x = Operator() + x = AbsOptions() x.Init(buf, n + offset) return x @classmethod - def GetRootAsOperator(cls, buf, offset=0): + def GetRootAsAbsOptions(cls, buf, offset=0): """This method is deprecated. Please switch to GetRootAs.""" return cls.GetRootAs(buf, offset) @classmethod - def OperatorBufferHasIdentifier(cls, buf, offset, size_prefixed=False): + def AbsOptionsBufferHasIdentifier(cls, buf, offset, size_prefixed=False): return flatbuffers.util.BufferHasIdentifier(buf, offset, b"\x54\x46\x4C\x33", size_prefixed=size_prefixed) - # Operator + # AbsOptions def Init(self, buf, pos): self._tab = flatbuffers.table.Table(buf, pos) - # Operator - def OpcodeIndex(self): - o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(4)) - if o != 0: - return self._tab.Get(flatbuffers.number_types.Uint32Flags, o + self._tab.Pos) - return 0 +def AbsOptionsStart(builder): + builder.StartObject(0) - # Operator - def Inputs(self, j): - o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(6)) - if o != 0: - a = self._tab.Vector(o) - return self._tab.Get(flatbuffers.number_types.Int32Flags, a + flatbuffers.number_types.UOffsetTFlags.py_type(j * 4)) - return 0 +def AbsOptionsEnd(builder): + return builder.EndObject() - # Operator - def InputsAsNumpy(self): - o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(6)) - if o != 0: - return self._tab.GetVectorAsNumpy(flatbuffers.number_types.Int32Flags, o) - return 0 - # Operator - def InputsLength(self): - o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(6)) - if o != 0: - return self._tab.VectorLen(o) - return 0 - # Operator - def InputsIsNone(self): - o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(6)) - return o == 0 +class AbsOptionsT(object): - # Operator - def Outputs(self, j): - o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(8)) - if o != 0: - a = self._tab.Vector(o) - return self._tab.Get(flatbuffers.number_types.Int32Flags, a + flatbuffers.number_types.UOffsetTFlags.py_type(j * 4)) - return 0 + # AbsOptionsT + def __init__(self): + pass - # Operator - def OutputsAsNumpy(self): - o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(8)) - if o != 0: - return self._tab.GetVectorAsNumpy(flatbuffers.number_types.Int32Flags, o) - return 0 + @classmethod + def InitFromBuf(cls, buf, pos): + absOptions = AbsOptions() + absOptions.Init(buf, pos) + return cls.InitFromObj(absOptions) - # Operator - def OutputsLength(self): - o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(8)) - if o != 0: - return self._tab.VectorLen(o) - return 0 + @classmethod + def InitFromPackedBuf(cls, buf, pos=0): + n = flatbuffers.encode.Get(flatbuffers.packer.uoffset, buf, pos) + return cls.InitFromBuf(buf, pos+n) - # Operator - def OutputsIsNone(self): - o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(8)) - return o == 0 + @classmethod + def InitFromObj(cls, absOptions): + x = AbsOptionsT() + x._UnPack(absOptions) + return x - # Operator - def BuiltinOptionsType(self): - o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(10)) - if o != 0: - return self._tab.Get(flatbuffers.number_types.Uint8Flags, o + self._tab.Pos) - return 0 + # AbsOptionsT + def _UnPack(self, absOptions): + if absOptions is None: + return - # Operator - def BuiltinOptions(self): - o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(12)) - if o != 0: - from flatbuffers.table import Table - obj = Table(bytearray(), 0) - self._tab.Union(obj, o) - return obj - return None + # AbsOptionsT + def Pack(self, builder): + AbsOptionsStart(builder) + absOptions = AbsOptionsEnd(builder) + return absOptions - # Operator - def CustomOptions(self, j): - o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(14)) - if o != 0: - a = self._tab.Vector(o) - return self._tab.Get(flatbuffers.number_types.Uint8Flags, a + flatbuffers.number_types.UOffsetTFlags.py_type(j * 1)) - return 0 - # Operator - def CustomOptionsAsNumpy(self): - o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(14)) - if o != 0: - return self._tab.GetVectorAsNumpy(flatbuffers.number_types.Uint8Flags, o) - return 0 +class HardSwishOptions(object): + __slots__ = ['_tab'] - # Operator - def CustomOptionsLength(self): - o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(14)) - if o != 0: - return self._tab.VectorLen(o) - return 0 + @classmethod + def GetRootAs(cls, buf, offset=0): + n = flatbuffers.encode.Get(flatbuffers.packer.uoffset, buf, offset) + x = HardSwishOptions() + x.Init(buf, n + offset) + return x - # Operator - def CustomOptionsIsNone(self): - o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(14)) - return o == 0 + @classmethod + def GetRootAsHardSwishOptions(cls, buf, offset=0): + """This method is deprecated. Please switch to GetRootAs.""" + return cls.GetRootAs(buf, offset) + @classmethod + def HardSwishOptionsBufferHasIdentifier(cls, buf, offset, size_prefixed=False): + return flatbuffers.util.BufferHasIdentifier(buf, offset, b"\x54\x46\x4C\x33", size_prefixed=size_prefixed) - # Operator - def CustomOptionsFormat(self): - o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(16)) - if o != 0: - return self._tab.Get(flatbuffers.number_types.Int8Flags, o + self._tab.Pos) - return 0 + # HardSwishOptions + def Init(self, buf, pos): + self._tab = flatbuffers.table.Table(buf, pos) - # Operator - def MutatingVariableInputs(self, j): - o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(18)) - if o != 0: - a = self._tab.Vector(o) - return self._tab.Get(flatbuffers.number_types.BoolFlags, a + flatbuffers.number_types.UOffsetTFlags.py_type(j * 1)) - return 0 +def HardSwishOptionsStart(builder): + builder.StartObject(0) + +def HardSwishOptionsEnd(builder): + return builder.EndObject() + + + +class HardSwishOptionsT(object): + + # HardSwishOptionsT + def __init__(self): + pass + + @classmethod + def InitFromBuf(cls, buf, pos): + hardSwishOptions = HardSwishOptions() + hardSwishOptions.Init(buf, pos) + return cls.InitFromObj(hardSwishOptions) + + @classmethod + def InitFromPackedBuf(cls, buf, pos=0): + n = flatbuffers.encode.Get(flatbuffers.packer.uoffset, buf, pos) + return cls.InitFromBuf(buf, pos+n) + + @classmethod + def InitFromObj(cls, hardSwishOptions): + x = HardSwishOptionsT() + x._UnPack(hardSwishOptions) + return x + + # HardSwishOptionsT + def _UnPack(self, hardSwishOptions): + if hardSwishOptions is None: + return - # Operator - def MutatingVariableInputsAsNumpy(self): - o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(18)) - if o != 0: - return self._tab.GetVectorAsNumpy(flatbuffers.number_types.BoolFlags, o) - return 0 + # HardSwishOptionsT + def Pack(self, builder): + HardSwishOptionsStart(builder) + hardSwishOptions = HardSwishOptionsEnd(builder) + return hardSwishOptions - # Operator - def MutatingVariableInputsLength(self): - o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(18)) - if o != 0: - return self._tab.VectorLen(o) - return 0 - # Operator - def MutatingVariableInputsIsNone(self): - o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(18)) - return o == 0 +class LogicalAndOptions(object): + __slots__ = ['_tab'] - # Operator - def Intermediates(self, j): - o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(20)) - if o != 0: - a = self._tab.Vector(o) - return self._tab.Get(flatbuffers.number_types.Int32Flags, a + flatbuffers.number_types.UOffsetTFlags.py_type(j * 4)) - return 0 + @classmethod + def GetRootAs(cls, buf, offset=0): + n = flatbuffers.encode.Get(flatbuffers.packer.uoffset, buf, offset) + x = LogicalAndOptions() + x.Init(buf, n + offset) + return x - # Operator - def IntermediatesAsNumpy(self): - o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(20)) - if o != 0: - return self._tab.GetVectorAsNumpy(flatbuffers.number_types.Int32Flags, o) - return 0 + @classmethod + def GetRootAsLogicalAndOptions(cls, buf, offset=0): + """This method is deprecated. Please switch to GetRootAs.""" + return cls.GetRootAs(buf, offset) + @classmethod + def LogicalAndOptionsBufferHasIdentifier(cls, buf, offset, size_prefixed=False): + return flatbuffers.util.BufferHasIdentifier(buf, offset, b"\x54\x46\x4C\x33", size_prefixed=size_prefixed) - # Operator - def IntermediatesLength(self): - o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(20)) - if o != 0: - return self._tab.VectorLen(o) - return 0 + # LogicalAndOptions + def Init(self, buf, pos): + self._tab = flatbuffers.table.Table(buf, pos) - # Operator - def IntermediatesIsNone(self): - o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(20)) - return o == 0 +def LogicalAndOptionsStart(builder): + builder.StartObject(0) - # Operator - def LargeCustomOptionsOffset(self): - o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(22)) - if o != 0: - return self._tab.Get(flatbuffers.number_types.Uint64Flags, o + self._tab.Pos) - return 0 +def LogicalAndOptionsEnd(builder): + return builder.EndObject() - # Operator - def LargeCustomOptionsSize(self): - o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(24)) - if o != 0: - return self._tab.Get(flatbuffers.number_types.Uint64Flags, o + self._tab.Pos) - return 0 -def OperatorStart(builder): builder.StartObject(11) -def Start(builder): - return OperatorStart(builder) -def OperatorAddOpcodeIndex(builder, opcodeIndex): builder.PrependUint32Slot(0, opcodeIndex, 0) -def AddOpcodeIndex(builder, opcodeIndex): - return OperatorAddOpcodeIndex(builder, opcodeIndex) -def OperatorAddInputs(builder, inputs): builder.PrependUOffsetTRelativeSlot(1, flatbuffers.number_types.UOffsetTFlags.py_type(inputs), 0) -def AddInputs(builder, inputs): - return OperatorAddInputs(builder, inputs) -def OperatorStartInputsVector(builder, numElems): return builder.StartVector(4, numElems, 4) -def StartInputsVector(builder, numElems): - return OperatorStartInputsVector(builder, numElems) -def OperatorAddOutputs(builder, outputs): builder.PrependUOffsetTRelativeSlot(2, flatbuffers.number_types.UOffsetTFlags.py_type(outputs), 0) -def AddOutputs(builder, outputs): - return OperatorAddOutputs(builder, outputs) -def OperatorStartOutputsVector(builder, numElems): return builder.StartVector(4, numElems, 4) -def StartOutputsVector(builder, numElems): - return OperatorStartOutputsVector(builder, numElems) -def OperatorAddBuiltinOptionsType(builder, builtinOptionsType): builder.PrependUint8Slot(3, builtinOptionsType, 0) -def AddBuiltinOptionsType(builder, builtinOptionsType): - return OperatorAddBuiltinOptionsType(builder, builtinOptionsType) -def OperatorAddBuiltinOptions(builder, builtinOptions): builder.PrependUOffsetTRelativeSlot(4, flatbuffers.number_types.UOffsetTFlags.py_type(builtinOptions), 0) -def AddBuiltinOptions(builder, builtinOptions): - return OperatorAddBuiltinOptions(builder, builtinOptions) -def OperatorAddCustomOptions(builder, customOptions): builder.PrependUOffsetTRelativeSlot(5, flatbuffers.number_types.UOffsetTFlags.py_type(customOptions), 0) -def AddCustomOptions(builder, customOptions): - return OperatorAddCustomOptions(builder, customOptions) -def OperatorStartCustomOptionsVector(builder, numElems): return builder.StartVector(1, numElems, 1) -def StartCustomOptionsVector(builder, numElems): - return OperatorStartCustomOptionsVector(builder, numElems) -def OperatorAddCustomOptionsFormat(builder, customOptionsFormat): builder.PrependInt8Slot(6, customOptionsFormat, 0) -def AddCustomOptionsFormat(builder, customOptionsFormat): - return OperatorAddCustomOptionsFormat(builder, customOptionsFormat) -def OperatorAddMutatingVariableInputs(builder, mutatingVariableInputs): builder.PrependUOffsetTRelativeSlot(7, flatbuffers.number_types.UOffsetTFlags.py_type(mutatingVariableInputs), 0) -def AddMutatingVariableInputs(builder, mutatingVariableInputs): - return OperatorAddMutatingVariableInputs(builder, mutatingVariableInputs) -def OperatorStartMutatingVariableInputsVector(builder, numElems): return builder.StartVector(1, numElems, 1) -def StartMutatingVariableInputsVector(builder, numElems): - return OperatorStartMutatingVariableInputsVector(builder, numElems) -def OperatorAddIntermediates(builder, intermediates): builder.PrependUOffsetTRelativeSlot(8, flatbuffers.number_types.UOffsetTFlags.py_type(intermediates), 0) -def AddIntermediates(builder, intermediates): - return OperatorAddIntermediates(builder, intermediates) -def OperatorStartIntermediatesVector(builder, numElems): return builder.StartVector(4, numElems, 4) -def StartIntermediatesVector(builder, numElems): - return OperatorStartIntermediatesVector(builder, numElems) -def OperatorAddLargeCustomOptionsOffset(builder, largeCustomOptionsOffset): builder.PrependUint64Slot(9, largeCustomOptionsOffset, 0) -def AddLargeCustomOptionsOffset(builder, largeCustomOptionsOffset): - return OperatorAddLargeCustomOptionsOffset(builder, largeCustomOptionsOffset) -def OperatorAddLargeCustomOptionsSize(builder, largeCustomOptionsSize): builder.PrependUint64Slot(10, largeCustomOptionsSize, 0) -def AddLargeCustomOptionsSize(builder, largeCustomOptionsSize): - return OperatorAddLargeCustomOptionsSize(builder, largeCustomOptionsSize) -def OperatorEnd(builder): return builder.EndObject() -def End(builder): - return OperatorEnd(builder) -try: - from typing import List, Union -except: - pass -class OperatorT(object): +class LogicalAndOptionsT(object): - # OperatorT + # LogicalAndOptionsT def __init__(self): - self.opcodeIndex = 0 # type: int - self.inputs = None # type: List[int] - self.outputs = None # type: List[int] - self.builtinOptionsType = 0 # type: int - self.builtinOptions = None # type: Union[None, Conv2DOptionsT, DepthwiseConv2DOptionsT, ConcatEmbeddingsOptionsT, LSHProjectionOptionsT, Pool2DOptionsT, SVDFOptionsT, RNNOptionsT, FullyConnectedOptionsT, SoftmaxOptionsT, ConcatenationOptionsT, AddOptionsT, L2NormOptionsT, LocalResponseNormalizationOptionsT, LSTMOptionsT, ResizeBilinearOptionsT, CallOptionsT, ReshapeOptionsT, SkipGramOptionsT, SpaceToDepthOptionsT, EmbeddingLookupSparseOptionsT, MulOptionsT, PadOptionsT, GatherOptionsT, BatchToSpaceNDOptionsT, SpaceToBatchNDOptionsT, TransposeOptionsT, ReducerOptionsT, SubOptionsT, DivOptionsT, SqueezeOptionsT, SequenceRNNOptionsT, StridedSliceOptionsT, ExpOptionsT, TopKV2OptionsT, SplitOptionsT, LogSoftmaxOptionsT, CastOptionsT, DequantizeOptionsT, MaximumMinimumOptionsT, ArgMaxOptionsT, LessOptionsT, NegOptionsT, PadV2OptionsT, GreaterOptionsT, GreaterEqualOptionsT, LessEqualOptionsT, SelectOptionsT, SliceOptionsT, TransposeConvOptionsT, SparseToDenseOptionsT, TileOptionsT, ExpandDimsOptionsT, EqualOptionsT, NotEqualOptionsT, ShapeOptionsT, PowOptionsT, ArgMinOptionsT, FakeQuantOptionsT, PackOptionsT, LogicalOrOptionsT, OneHotOptionsT, LogicalAndOptionsT, LogicalNotOptionsT, UnpackOptionsT, FloorDivOptionsT, SquareOptionsT, ZerosLikeOptionsT, FillOptionsT, BidirectionalSequenceLSTMOptionsT, BidirectionalSequenceRNNOptionsT, UnidirectionalSequenceLSTMOptionsT, FloorModOptionsT, RangeOptionsT, ResizeNearestNeighborOptionsT, LeakyReluOptionsT, SquaredDifferenceOptionsT, MirrorPadOptionsT, AbsOptionsT, SplitVOptionsT, UniqueOptionsT, ReverseV2OptionsT, AddNOptionsT, GatherNdOptionsT, CosOptionsT, WhereOptionsT, RankOptionsT, ReverseSequenceOptionsT, MatrixDiagOptionsT, QuantizeOptionsT, MatrixSetDiagOptionsT, HardSwishOptionsT, IfOptionsT, WhileOptionsT, DepthToSpaceOptionsT, NonMaxSuppressionV4OptionsT, NonMaxSuppressionV5OptionsT, ScatterNdOptionsT, SelectV2OptionsT, DensifyOptionsT, SegmentSumOptionsT, BatchMatMulOptionsT, CumsumOptionsT, CallOnceOptionsT, BroadcastToOptionsT, Rfft2dOptionsT, Conv3DOptionsT, HashtableOptionsT, HashtableFindOptionsT, HashtableImportOptionsT, HashtableSizeOptionsT, VarHandleOptionsT, ReadVariableOptionsT, AssignVariableOptionsT, RandomOptionsT, BucketizeOptionsT, GeluOptionsT, DynamicUpdateSliceOptionsT, UnsortedSegmentProdOptionsT, UnsortedSegmentMaxOptionsT, UnsortedSegmentMinOptionsT, UnsortedSegmentSumOptionsT, ATan2OptionsT, SignOptionsT, BitcastOptionsT, BitwiseXorOptionsT, RightShiftOptionsT] - self.customOptions = None # type: List[int] - self.customOptionsFormat = 0 # type: int - self.mutatingVariableInputs = None # type: List[bool] - self.intermediates = None # type: List[int] - self.largeCustomOptionsOffset = 0 # type: int - self.largeCustomOptionsSize = 0 # type: int + pass @classmethod def InitFromBuf(cls, buf, pos): - operator = Operator() - operator.Init(buf, pos) - return cls.InitFromObj(operator) + logicalAndOptions = LogicalAndOptions() + logicalAndOptions.Init(buf, pos) + return cls.InitFromObj(logicalAndOptions) @classmethod - def InitFromObj(cls, operator): - x = OperatorT() - x._UnPack(operator) + def InitFromPackedBuf(cls, buf, pos=0): + n = flatbuffers.encode.Get(flatbuffers.packer.uoffset, buf, pos) + return cls.InitFromBuf(buf, pos+n) + + @classmethod + def InitFromObj(cls, logicalAndOptions): + x = LogicalAndOptionsT() + x._UnPack(logicalAndOptions) return x - # OperatorT - def _UnPack(self, operator): - if operator is None: + # LogicalAndOptionsT + def _UnPack(self, logicalAndOptions): + if logicalAndOptions is None: return - self.opcodeIndex = operator.OpcodeIndex() - if not operator.InputsIsNone(): - if np is None: - self.inputs = [] - for i in range(operator.InputsLength()): - self.inputs.append(operator.Inputs(i)) - else: - self.inputs = operator.InputsAsNumpy() - if not operator.OutputsIsNone(): - if np is None: - self.outputs = [] - for i in range(operator.OutputsLength()): - self.outputs.append(operator.Outputs(i)) - else: - self.outputs = operator.OutputsAsNumpy() - self.builtinOptionsType = operator.BuiltinOptionsType() - self.builtinOptions = BuiltinOptionsCreator(self.builtinOptionsType, operator.BuiltinOptions()) - if not operator.CustomOptionsIsNone(): - if np is None: - self.customOptions = [] - for i in range(operator.CustomOptionsLength()): - self.customOptions.append(operator.CustomOptions(i)) - else: - self.customOptions = operator.CustomOptionsAsNumpy() - self.customOptionsFormat = operator.CustomOptionsFormat() - if not operator.MutatingVariableInputsIsNone(): - if np is None: - self.mutatingVariableInputs = [] - for i in range(operator.MutatingVariableInputsLength()): - self.mutatingVariableInputs.append(operator.MutatingVariableInputs(i)) - else: - self.mutatingVariableInputs = operator.MutatingVariableInputsAsNumpy() - if not operator.IntermediatesIsNone(): - if np is None: - self.intermediates = [] - for i in range(operator.IntermediatesLength()): - self.intermediates.append(operator.Intermediates(i)) - else: - self.intermediates = operator.IntermediatesAsNumpy() - self.largeCustomOptionsOffset = operator.LargeCustomOptionsOffset() - self.largeCustomOptionsSize = operator.LargeCustomOptionsSize() - # OperatorT + # LogicalAndOptionsT def Pack(self, builder): - if self.inputs is not None: - if np is not None and type(self.inputs) is np.ndarray: - inputs = builder.CreateNumpyVector(self.inputs) - else: - OperatorStartInputsVector(builder, len(self.inputs)) - for i in reversed(range(len(self.inputs))): - builder.PrependInt32(self.inputs[i]) - inputs = builder.EndVector() - if self.outputs is not None: - if np is not None and type(self.outputs) is np.ndarray: - outputs = builder.CreateNumpyVector(self.outputs) - else: - OperatorStartOutputsVector(builder, len(self.outputs)) - for i in reversed(range(len(self.outputs))): - builder.PrependInt32(self.outputs[i]) - outputs = builder.EndVector() - if self.builtinOptions is not None: - builtinOptions = self.builtinOptions.Pack(builder) - if self.customOptions is not None: - if np is not None and type(self.customOptions) is np.ndarray: - customOptions = builder.CreateNumpyVector(self.customOptions) - else: - OperatorStartCustomOptionsVector(builder, len(self.customOptions)) - for i in reversed(range(len(self.customOptions))): - builder.PrependUint8(self.customOptions[i]) - customOptions = builder.EndVector() - if self.mutatingVariableInputs is not None: - if np is not None and type(self.mutatingVariableInputs) is np.ndarray: - mutatingVariableInputs = builder.CreateNumpyVector(self.mutatingVariableInputs) - else: - OperatorStartMutatingVariableInputsVector(builder, len(self.mutatingVariableInputs)) - for i in reversed(range(len(self.mutatingVariableInputs))): - builder.PrependBool(self.mutatingVariableInputs[i]) - mutatingVariableInputs = builder.EndVector() - if self.intermediates is not None: - if np is not None and type(self.intermediates) is np.ndarray: - intermediates = builder.CreateNumpyVector(self.intermediates) - else: - OperatorStartIntermediatesVector(builder, len(self.intermediates)) - for i in reversed(range(len(self.intermediates))): - builder.PrependInt32(self.intermediates[i]) - intermediates = builder.EndVector() - OperatorStart(builder) - OperatorAddOpcodeIndex(builder, self.opcodeIndex) - if self.inputs is not None: - OperatorAddInputs(builder, inputs) - if self.outputs is not None: - OperatorAddOutputs(builder, outputs) - OperatorAddBuiltinOptionsType(builder, self.builtinOptionsType) - if self.builtinOptions is not None: - OperatorAddBuiltinOptions(builder, builtinOptions) - if self.customOptions is not None: - OperatorAddCustomOptions(builder, customOptions) - OperatorAddCustomOptionsFormat(builder, self.customOptionsFormat) - if self.mutatingVariableInputs is not None: - OperatorAddMutatingVariableInputs(builder, mutatingVariableInputs) - if self.intermediates is not None: - OperatorAddIntermediates(builder, intermediates) - OperatorAddLargeCustomOptionsOffset(builder, self.largeCustomOptionsOffset) - OperatorAddLargeCustomOptionsSize(builder, self.largeCustomOptionsSize) - operator = OperatorEnd(builder) - return operator -# automatically generated by the FlatBuffers compiler, do not modify + LogicalAndOptionsStart(builder) + logicalAndOptions = LogicalAndOptionsEnd(builder) + return logicalAndOptions + + +class LogicalNotOptions(object): + __slots__ = ['_tab'] + + @classmethod + def GetRootAs(cls, buf, offset=0): + n = flatbuffers.encode.Get(flatbuffers.packer.uoffset, buf, offset) + x = LogicalNotOptions() + x.Init(buf, n + offset) + return x + + @classmethod + def GetRootAsLogicalNotOptions(cls, buf, offset=0): + """This method is deprecated. Please switch to GetRootAs.""" + return cls.GetRootAs(buf, offset) + @classmethod + def LogicalNotOptionsBufferHasIdentifier(cls, buf, offset, size_prefixed=False): + return flatbuffers.util.BufferHasIdentifier(buf, offset, b"\x54\x46\x4C\x33", size_prefixed=size_prefixed) + + # LogicalNotOptions + def Init(self, buf, pos): + self._tab = flatbuffers.table.Table(buf, pos) + +def LogicalNotOptionsStart(builder): + builder.StartObject(0) + +def LogicalNotOptionsEnd(builder): + return builder.EndObject() + + + +class LogicalNotOptionsT(object): + + # LogicalNotOptionsT + def __init__(self): + pass -# namespace: tflite + @classmethod + def InitFromBuf(cls, buf, pos): + logicalNotOptions = LogicalNotOptions() + logicalNotOptions.Init(buf, pos) + return cls.InitFromObj(logicalNotOptions) -from flatbuffers.compat import import_numpy -np = import_numpy() + @classmethod + def InitFromPackedBuf(cls, buf, pos=0): + n = flatbuffers.encode.Get(flatbuffers.packer.uoffset, buf, pos) + return cls.InitFromBuf(buf, pos+n) -class OperatorCode(object): + @classmethod + def InitFromObj(cls, logicalNotOptions): + x = LogicalNotOptionsT() + x._UnPack(logicalNotOptions) + return x + + # LogicalNotOptionsT + def _UnPack(self, logicalNotOptions): + if logicalNotOptions is None: + return + + # LogicalNotOptionsT + def Pack(self, builder): + LogicalNotOptionsStart(builder) + logicalNotOptions = LogicalNotOptionsEnd(builder) + return logicalNotOptions + + +class UnpackOptions(object): __slots__ = ['_tab'] @classmethod def GetRootAs(cls, buf, offset=0): n = flatbuffers.encode.Get(flatbuffers.packer.uoffset, buf, offset) - x = OperatorCode() + x = UnpackOptions() x.Init(buf, n + offset) return x @classmethod - def GetRootAsOperatorCode(cls, buf, offset=0): + def GetRootAsUnpackOptions(cls, buf, offset=0): """This method is deprecated. Please switch to GetRootAs.""" return cls.GetRootAs(buf, offset) @classmethod - def OperatorCodeBufferHasIdentifier(cls, buf, offset, size_prefixed=False): + def UnpackOptionsBufferHasIdentifier(cls, buf, offset, size_prefixed=False): return flatbuffers.util.BufferHasIdentifier(buf, offset, b"\x54\x46\x4C\x33", size_prefixed=size_prefixed) - # OperatorCode + # UnpackOptions def Init(self, buf, pos): self._tab = flatbuffers.table.Table(buf, pos) - # OperatorCode - def DeprecatedBuiltinCode(self): + # UnpackOptions + def Num(self): o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(4)) if o != 0: - return self._tab.Get(flatbuffers.number_types.Int8Flags, o + self._tab.Pos) + return self._tab.Get(flatbuffers.number_types.Int32Flags, o + self._tab.Pos) return 0 - # OperatorCode - def CustomCode(self): + # UnpackOptions + def Axis(self): o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(6)) - if o != 0: - return self._tab.String(o + self._tab.Pos) - return None - - # OperatorCode - def Version(self): - o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(8)) - if o != 0: - return self._tab.Get(flatbuffers.number_types.Int32Flags, o + self._tab.Pos) - return 1 - - # OperatorCode - def BuiltinCode(self): - o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(10)) if o != 0: return self._tab.Get(flatbuffers.number_types.Int32Flags, o + self._tab.Pos) return 0 -def OperatorCodeStart(builder): builder.StartObject(4) -def Start(builder): - return OperatorCodeStart(builder) -def OperatorCodeAddDeprecatedBuiltinCode(builder, deprecatedBuiltinCode): builder.PrependInt8Slot(0, deprecatedBuiltinCode, 0) -def AddDeprecatedBuiltinCode(builder, deprecatedBuiltinCode): - return OperatorCodeAddDeprecatedBuiltinCode(builder, deprecatedBuiltinCode) -def OperatorCodeAddCustomCode(builder, customCode): builder.PrependUOffsetTRelativeSlot(1, flatbuffers.number_types.UOffsetTFlags.py_type(customCode), 0) -def AddCustomCode(builder, customCode): - return OperatorCodeAddCustomCode(builder, customCode) -def OperatorCodeAddVersion(builder, version): builder.PrependInt32Slot(2, version, 1) -def AddVersion(builder, version): - return OperatorCodeAddVersion(builder, version) -def OperatorCodeAddBuiltinCode(builder, builtinCode): builder.PrependInt32Slot(3, builtinCode, 0) -def AddBuiltinCode(builder, builtinCode): - return OperatorCodeAddBuiltinCode(builder, builtinCode) -def OperatorCodeEnd(builder): return builder.EndObject() -def End(builder): - return OperatorCodeEnd(builder) +def UnpackOptionsStart(builder): + builder.StartObject(2) -class OperatorCodeT(object): +def UnpackOptionsAddNum(builder, num): + builder.PrependInt32Slot(0, num, 0) - # OperatorCodeT +def UnpackOptionsAddAxis(builder, axis): + builder.PrependInt32Slot(1, axis, 0) + +def UnpackOptionsEnd(builder): + return builder.EndObject() + + + +class UnpackOptionsT(object): + + # UnpackOptionsT def __init__(self): - self.deprecatedBuiltinCode = 0 # type: int - self.customCode = None # type: str - self.version = 1 # type: int - self.builtinCode = 0 # type: int + self.num = 0 # type: int + self.axis = 0 # type: int @classmethod def InitFromBuf(cls, buf, pos): - operatorCode = OperatorCode() - operatorCode.Init(buf, pos) - return cls.InitFromObj(operatorCode) + unpackOptions = UnpackOptions() + unpackOptions.Init(buf, pos) + return cls.InitFromObj(unpackOptions) @classmethod - def InitFromObj(cls, operatorCode): - x = OperatorCodeT() - x._UnPack(operatorCode) + def InitFromPackedBuf(cls, buf, pos=0): + n = flatbuffers.encode.Get(flatbuffers.packer.uoffset, buf, pos) + return cls.InitFromBuf(buf, pos+n) + + @classmethod + def InitFromObj(cls, unpackOptions): + x = UnpackOptionsT() + x._UnPack(unpackOptions) return x - # OperatorCodeT - def _UnPack(self, operatorCode): - if operatorCode is None: + # UnpackOptionsT + def _UnPack(self, unpackOptions): + if unpackOptions is None: return - self.deprecatedBuiltinCode = operatorCode.DeprecatedBuiltinCode() - self.customCode = operatorCode.CustomCode() - self.version = operatorCode.Version() - self.builtinCode = operatorCode.BuiltinCode() + self.num = unpackOptions.Num() + self.axis = unpackOptions.Axis() - # OperatorCodeT + # UnpackOptionsT def Pack(self, builder): - if self.customCode is not None: - customCode = builder.CreateString(self.customCode) - OperatorCodeStart(builder) - OperatorCodeAddDeprecatedBuiltinCode(builder, self.deprecatedBuiltinCode) - if self.customCode is not None: - OperatorCodeAddCustomCode(builder, customCode) - OperatorCodeAddVersion(builder, self.version) - OperatorCodeAddBuiltinCode(builder, self.builtinCode) - operatorCode = OperatorCodeEnd(builder) - return operatorCode -# automatically generated by the FlatBuffers compiler, do not modify - -# namespace: tflite + UnpackOptionsStart(builder) + UnpackOptionsAddNum(builder, self.num) + UnpackOptionsAddAxis(builder, self.axis) + unpackOptions = UnpackOptionsEnd(builder) + return unpackOptions -from flatbuffers.compat import import_numpy -np = import_numpy() -class PackOptions(object): +class FloorDivOptions(object): __slots__ = ['_tab'] @classmethod def GetRootAs(cls, buf, offset=0): n = flatbuffers.encode.Get(flatbuffers.packer.uoffset, buf, offset) - x = PackOptions() + x = FloorDivOptions() x.Init(buf, n + offset) return x @classmethod - def GetRootAsPackOptions(cls, buf, offset=0): + def GetRootAsFloorDivOptions(cls, buf, offset=0): """This method is deprecated. Please switch to GetRootAs.""" return cls.GetRootAs(buf, offset) @classmethod - def PackOptionsBufferHasIdentifier(cls, buf, offset, size_prefixed=False): + def FloorDivOptionsBufferHasIdentifier(cls, buf, offset, size_prefixed=False): return flatbuffers.util.BufferHasIdentifier(buf, offset, b"\x54\x46\x4C\x33", size_prefixed=size_prefixed) - # PackOptions + # FloorDivOptions def Init(self, buf, pos): self._tab = flatbuffers.table.Table(buf, pos) - # PackOptions - def ValuesCount(self): - o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(4)) - if o != 0: - return self._tab.Get(flatbuffers.number_types.Int32Flags, o + self._tab.Pos) - return 0 +def FloorDivOptionsStart(builder): + builder.StartObject(0) - # PackOptions - def Axis(self): - o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(6)) - if o != 0: - return self._tab.Get(flatbuffers.number_types.Int32Flags, o + self._tab.Pos) - return 0 +def FloorDivOptionsEnd(builder): + return builder.EndObject() -def PackOptionsStart(builder): builder.StartObject(2) -def Start(builder): - return PackOptionsStart(builder) -def PackOptionsAddValuesCount(builder, valuesCount): builder.PrependInt32Slot(0, valuesCount, 0) -def AddValuesCount(builder, valuesCount): - return PackOptionsAddValuesCount(builder, valuesCount) -def PackOptionsAddAxis(builder, axis): builder.PrependInt32Slot(1, axis, 0) -def AddAxis(builder, axis): - return PackOptionsAddAxis(builder, axis) -def PackOptionsEnd(builder): return builder.EndObject() -def End(builder): - return PackOptionsEnd(builder) -class PackOptionsT(object): - # PackOptionsT +class FloorDivOptionsT(object): + + # FloorDivOptionsT def __init__(self): - self.valuesCount = 0 # type: int - self.axis = 0 # type: int + pass @classmethod def InitFromBuf(cls, buf, pos): - packOptions = PackOptions() - packOptions.Init(buf, pos) - return cls.InitFromObj(packOptions) + floorDivOptions = FloorDivOptions() + floorDivOptions.Init(buf, pos) + return cls.InitFromObj(floorDivOptions) @classmethod - def InitFromObj(cls, packOptions): - x = PackOptionsT() - x._UnPack(packOptions) + def InitFromPackedBuf(cls, buf, pos=0): + n = flatbuffers.encode.Get(flatbuffers.packer.uoffset, buf, pos) + return cls.InitFromBuf(buf, pos+n) + + @classmethod + def InitFromObj(cls, floorDivOptions): + x = FloorDivOptionsT() + x._UnPack(floorDivOptions) return x - # PackOptionsT - def _UnPack(self, packOptions): - if packOptions is None: + # FloorDivOptionsT + def _UnPack(self, floorDivOptions): + if floorDivOptions is None: return - self.valuesCount = packOptions.ValuesCount() - self.axis = packOptions.Axis() - # PackOptionsT + # FloorDivOptionsT def Pack(self, builder): - PackOptionsStart(builder) - PackOptionsAddValuesCount(builder, self.valuesCount) - PackOptionsAddAxis(builder, self.axis) - packOptions = PackOptionsEnd(builder) - return packOptions -# automatically generated by the FlatBuffers compiler, do not modify - -# namespace: tflite + FloorDivOptionsStart(builder) + floorDivOptions = FloorDivOptionsEnd(builder) + return floorDivOptions -from flatbuffers.compat import import_numpy -np = import_numpy() -class PadOptions(object): +class SquareOptions(object): __slots__ = ['_tab'] @classmethod def GetRootAs(cls, buf, offset=0): n = flatbuffers.encode.Get(flatbuffers.packer.uoffset, buf, offset) - x = PadOptions() + x = SquareOptions() x.Init(buf, n + offset) return x @classmethod - def GetRootAsPadOptions(cls, buf, offset=0): + def GetRootAsSquareOptions(cls, buf, offset=0): """This method is deprecated. Please switch to GetRootAs.""" return cls.GetRootAs(buf, offset) @classmethod - def PadOptionsBufferHasIdentifier(cls, buf, offset, size_prefixed=False): + def SquareOptionsBufferHasIdentifier(cls, buf, offset, size_prefixed=False): return flatbuffers.util.BufferHasIdentifier(buf, offset, b"\x54\x46\x4C\x33", size_prefixed=size_prefixed) - # PadOptions + # SquareOptions def Init(self, buf, pos): self._tab = flatbuffers.table.Table(buf, pos) -def PadOptionsStart(builder): builder.StartObject(0) -def Start(builder): - return PadOptionsStart(builder) -def PadOptionsEnd(builder): return builder.EndObject() -def End(builder): - return PadOptionsEnd(builder) +def SquareOptionsStart(builder): + builder.StartObject(0) -class PadOptionsT(object): +def SquareOptionsEnd(builder): + return builder.EndObject() - # PadOptionsT + + +class SquareOptionsT(object): + + # SquareOptionsT def __init__(self): pass @classmethod def InitFromBuf(cls, buf, pos): - padOptions = PadOptions() - padOptions.Init(buf, pos) - return cls.InitFromObj(padOptions) + squareOptions = SquareOptions() + squareOptions.Init(buf, pos) + return cls.InitFromObj(squareOptions) @classmethod - def InitFromObj(cls, padOptions): - x = PadOptionsT() - x._UnPack(padOptions) + def InitFromPackedBuf(cls, buf, pos=0): + n = flatbuffers.encode.Get(flatbuffers.packer.uoffset, buf, pos) + return cls.InitFromBuf(buf, pos+n) + + @classmethod + def InitFromObj(cls, squareOptions): + x = SquareOptionsT() + x._UnPack(squareOptions) return x - # PadOptionsT - def _UnPack(self, padOptions): - if padOptions is None: + # SquareOptionsT + def _UnPack(self, squareOptions): + if squareOptions is None: return - # PadOptionsT + # SquareOptionsT def Pack(self, builder): - PadOptionsStart(builder) - padOptions = PadOptionsEnd(builder) - return padOptions -# automatically generated by the FlatBuffers compiler, do not modify - -# namespace: tflite + SquareOptionsStart(builder) + squareOptions = SquareOptionsEnd(builder) + return squareOptions -from flatbuffers.compat import import_numpy -np = import_numpy() -class PadV2Options(object): +class ZerosLikeOptions(object): __slots__ = ['_tab'] @classmethod def GetRootAs(cls, buf, offset=0): n = flatbuffers.encode.Get(flatbuffers.packer.uoffset, buf, offset) - x = PadV2Options() + x = ZerosLikeOptions() x.Init(buf, n + offset) return x @classmethod - def GetRootAsPadV2Options(cls, buf, offset=0): + def GetRootAsZerosLikeOptions(cls, buf, offset=0): """This method is deprecated. Please switch to GetRootAs.""" return cls.GetRootAs(buf, offset) @classmethod - def PadV2OptionsBufferHasIdentifier(cls, buf, offset, size_prefixed=False): + def ZerosLikeOptionsBufferHasIdentifier(cls, buf, offset, size_prefixed=False): return flatbuffers.util.BufferHasIdentifier(buf, offset, b"\x54\x46\x4C\x33", size_prefixed=size_prefixed) - # PadV2Options + # ZerosLikeOptions def Init(self, buf, pos): self._tab = flatbuffers.table.Table(buf, pos) -def PadV2OptionsStart(builder): builder.StartObject(0) -def Start(builder): - return PadV2OptionsStart(builder) -def PadV2OptionsEnd(builder): return builder.EndObject() -def End(builder): - return PadV2OptionsEnd(builder) +def ZerosLikeOptionsStart(builder): + builder.StartObject(0) + +def ZerosLikeOptionsEnd(builder): + return builder.EndObject() + -class PadV2OptionsT(object): - # PadV2OptionsT +class ZerosLikeOptionsT(object): + + # ZerosLikeOptionsT def __init__(self): pass @classmethod def InitFromBuf(cls, buf, pos): - padV2options = PadV2Options() - padV2options.Init(buf, pos) - return cls.InitFromObj(padV2options) + zerosLikeOptions = ZerosLikeOptions() + zerosLikeOptions.Init(buf, pos) + return cls.InitFromObj(zerosLikeOptions) @classmethod - def InitFromObj(cls, padV2options): - x = PadV2OptionsT() - x._UnPack(padV2options) + def InitFromPackedBuf(cls, buf, pos=0): + n = flatbuffers.encode.Get(flatbuffers.packer.uoffset, buf, pos) + return cls.InitFromBuf(buf, pos+n) + + @classmethod + def InitFromObj(cls, zerosLikeOptions): + x = ZerosLikeOptionsT() + x._UnPack(zerosLikeOptions) return x - # PadV2OptionsT - def _UnPack(self, padV2options): - if padV2options is None: + # ZerosLikeOptionsT + def _UnPack(self, zerosLikeOptions): + if zerosLikeOptions is None: return - # PadV2OptionsT + # ZerosLikeOptionsT def Pack(self, builder): - PadV2OptionsStart(builder) - padV2options = PadV2OptionsEnd(builder) - return padV2options -# automatically generated by the FlatBuffers compiler, do not modify - -# namespace: tflite - -class Padding(object): - SAME = 0 - VALID = 1 -# automatically generated by the FlatBuffers compiler, do not modify - -# namespace: tflite + ZerosLikeOptionsStart(builder) + zerosLikeOptions = ZerosLikeOptionsEnd(builder) + return zerosLikeOptions -from flatbuffers.compat import import_numpy -np = import_numpy() -class Pool2DOptions(object): +class FillOptions(object): __slots__ = ['_tab'] @classmethod def GetRootAs(cls, buf, offset=0): n = flatbuffers.encode.Get(flatbuffers.packer.uoffset, buf, offset) - x = Pool2DOptions() + x = FillOptions() x.Init(buf, n + offset) return x @classmethod - def GetRootAsPool2DOptions(cls, buf, offset=0): + def GetRootAsFillOptions(cls, buf, offset=0): """This method is deprecated. Please switch to GetRootAs.""" return cls.GetRootAs(buf, offset) @classmethod - def Pool2DOptionsBufferHasIdentifier(cls, buf, offset, size_prefixed=False): + def FillOptionsBufferHasIdentifier(cls, buf, offset, size_prefixed=False): return flatbuffers.util.BufferHasIdentifier(buf, offset, b"\x54\x46\x4C\x33", size_prefixed=size_prefixed) - # Pool2DOptions + # FillOptions def Init(self, buf, pos): self._tab = flatbuffers.table.Table(buf, pos) - # Pool2DOptions - def Padding(self): - o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(4)) - if o != 0: - return self._tab.Get(flatbuffers.number_types.Int8Flags, o + self._tab.Pos) - return 0 - - # Pool2DOptions - def StrideW(self): - o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(6)) - if o != 0: - return self._tab.Get(flatbuffers.number_types.Int32Flags, o + self._tab.Pos) - return 0 - - # Pool2DOptions - def StrideH(self): - o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(8)) - if o != 0: - return self._tab.Get(flatbuffers.number_types.Int32Flags, o + self._tab.Pos) - return 0 - - # Pool2DOptions - def FilterWidth(self): - o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(10)) - if o != 0: - return self._tab.Get(flatbuffers.number_types.Int32Flags, o + self._tab.Pos) - return 0 +def FillOptionsStart(builder): + builder.StartObject(0) - # Pool2DOptions - def FilterHeight(self): - o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(12)) - if o != 0: - return self._tab.Get(flatbuffers.number_types.Int32Flags, o + self._tab.Pos) - return 0 +def FillOptionsEnd(builder): + return builder.EndObject() - # Pool2DOptions - def FusedActivationFunction(self): - o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(14)) - if o != 0: - return self._tab.Get(flatbuffers.number_types.Int8Flags, o + self._tab.Pos) - return 0 -def Pool2DOptionsStart(builder): builder.StartObject(6) -def Start(builder): - return Pool2DOptionsStart(builder) -def Pool2DOptionsAddPadding(builder, padding): builder.PrependInt8Slot(0, padding, 0) -def AddPadding(builder, padding): - return Pool2DOptionsAddPadding(builder, padding) -def Pool2DOptionsAddStrideW(builder, strideW): builder.PrependInt32Slot(1, strideW, 0) -def AddStrideW(builder, strideW): - return Pool2DOptionsAddStrideW(builder, strideW) -def Pool2DOptionsAddStrideH(builder, strideH): builder.PrependInt32Slot(2, strideH, 0) -def AddStrideH(builder, strideH): - return Pool2DOptionsAddStrideH(builder, strideH) -def Pool2DOptionsAddFilterWidth(builder, filterWidth): builder.PrependInt32Slot(3, filterWidth, 0) -def AddFilterWidth(builder, filterWidth): - return Pool2DOptionsAddFilterWidth(builder, filterWidth) -def Pool2DOptionsAddFilterHeight(builder, filterHeight): builder.PrependInt32Slot(4, filterHeight, 0) -def AddFilterHeight(builder, filterHeight): - return Pool2DOptionsAddFilterHeight(builder, filterHeight) -def Pool2DOptionsAddFusedActivationFunction(builder, fusedActivationFunction): builder.PrependInt8Slot(5, fusedActivationFunction, 0) -def AddFusedActivationFunction(builder, fusedActivationFunction): - return Pool2DOptionsAddFusedActivationFunction(builder, fusedActivationFunction) -def Pool2DOptionsEnd(builder): return builder.EndObject() -def End(builder): - return Pool2DOptionsEnd(builder) -class Pool2DOptionsT(object): +class FillOptionsT(object): - # Pool2DOptionsT + # FillOptionsT def __init__(self): - self.padding = 0 # type: int - self.strideW = 0 # type: int - self.strideH = 0 # type: int - self.filterWidth = 0 # type: int - self.filterHeight = 0 # type: int - self.fusedActivationFunction = 0 # type: int + pass @classmethod def InitFromBuf(cls, buf, pos): - pool2doptions = Pool2DOptions() - pool2doptions.Init(buf, pos) - return cls.InitFromObj(pool2doptions) + fillOptions = FillOptions() + fillOptions.Init(buf, pos) + return cls.InitFromObj(fillOptions) @classmethod - def InitFromObj(cls, pool2doptions): - x = Pool2DOptionsT() - x._UnPack(pool2doptions) + def InitFromPackedBuf(cls, buf, pos=0): + n = flatbuffers.encode.Get(flatbuffers.packer.uoffset, buf, pos) + return cls.InitFromBuf(buf, pos+n) + + @classmethod + def InitFromObj(cls, fillOptions): + x = FillOptionsT() + x._UnPack(fillOptions) return x - # Pool2DOptionsT - def _UnPack(self, pool2doptions): - if pool2doptions is None: + # FillOptionsT + def _UnPack(self, fillOptions): + if fillOptions is None: return - self.padding = pool2doptions.Padding() - self.strideW = pool2doptions.StrideW() - self.strideH = pool2doptions.StrideH() - self.filterWidth = pool2doptions.FilterWidth() - self.filterHeight = pool2doptions.FilterHeight() - self.fusedActivationFunction = pool2doptions.FusedActivationFunction() - # Pool2DOptionsT + # FillOptionsT def Pack(self, builder): - Pool2DOptionsStart(builder) - Pool2DOptionsAddPadding(builder, self.padding) - Pool2DOptionsAddStrideW(builder, self.strideW) - Pool2DOptionsAddStrideH(builder, self.strideH) - Pool2DOptionsAddFilterWidth(builder, self.filterWidth) - Pool2DOptionsAddFilterHeight(builder, self.filterHeight) - Pool2DOptionsAddFusedActivationFunction(builder, self.fusedActivationFunction) - pool2doptions = Pool2DOptionsEnd(builder) - return pool2doptions -# automatically generated by the FlatBuffers compiler, do not modify - -# namespace: tflite + FillOptionsStart(builder) + fillOptions = FillOptionsEnd(builder) + return fillOptions -from flatbuffers.compat import import_numpy -np = import_numpy() -class PowOptions(object): +class FloorModOptions(object): __slots__ = ['_tab'] @classmethod def GetRootAs(cls, buf, offset=0): n = flatbuffers.encode.Get(flatbuffers.packer.uoffset, buf, offset) - x = PowOptions() + x = FloorModOptions() x.Init(buf, n + offset) return x @classmethod - def GetRootAsPowOptions(cls, buf, offset=0): + def GetRootAsFloorModOptions(cls, buf, offset=0): """This method is deprecated. Please switch to GetRootAs.""" return cls.GetRootAs(buf, offset) @classmethod - def PowOptionsBufferHasIdentifier(cls, buf, offset, size_prefixed=False): + def FloorModOptionsBufferHasIdentifier(cls, buf, offset, size_prefixed=False): return flatbuffers.util.BufferHasIdentifier(buf, offset, b"\x54\x46\x4C\x33", size_prefixed=size_prefixed) - # PowOptions + # FloorModOptions def Init(self, buf, pos): self._tab = flatbuffers.table.Table(buf, pos) -def PowOptionsStart(builder): builder.StartObject(0) -def Start(builder): - return PowOptionsStart(builder) -def PowOptionsEnd(builder): return builder.EndObject() -def End(builder): - return PowOptionsEnd(builder) +def FloorModOptionsStart(builder): + builder.StartObject(0) -class PowOptionsT(object): +def FloorModOptionsEnd(builder): + return builder.EndObject() - # PowOptionsT + + +class FloorModOptionsT(object): + + # FloorModOptionsT def __init__(self): pass @classmethod def InitFromBuf(cls, buf, pos): - powOptions = PowOptions() - powOptions.Init(buf, pos) - return cls.InitFromObj(powOptions) + floorModOptions = FloorModOptions() + floorModOptions.Init(buf, pos) + return cls.InitFromObj(floorModOptions) @classmethod - def InitFromObj(cls, powOptions): - x = PowOptionsT() - x._UnPack(powOptions) + def InitFromPackedBuf(cls, buf, pos=0): + n = flatbuffers.encode.Get(flatbuffers.packer.uoffset, buf, pos) + return cls.InitFromBuf(buf, pos+n) + + @classmethod + def InitFromObj(cls, floorModOptions): + x = FloorModOptionsT() + x._UnPack(floorModOptions) return x - # PowOptionsT - def _UnPack(self, powOptions): - if powOptions is None: + # FloorModOptionsT + def _UnPack(self, floorModOptions): + if floorModOptions is None: return - # PowOptionsT + # FloorModOptionsT def Pack(self, builder): - PowOptionsStart(builder) - powOptions = PowOptionsEnd(builder) - return powOptions -# automatically generated by the FlatBuffers compiler, do not modify - -# namespace: tflite - -class QuantizationDetails(object): - NONE = 0 - CustomQuantization = 1 - -def QuantizationDetailsCreator(unionType, table): - from flatbuffers.table import Table - if not isinstance(table, Table): - return None - if unionType == QuantizationDetails().CustomQuantization: - return CustomQuantizationT.InitFromBuf(table.Bytes, table.Pos) - return None -# automatically generated by the FlatBuffers compiler, do not modify - -# namespace: tflite + FloorModOptionsStart(builder) + floorModOptions = FloorModOptionsEnd(builder) + return floorModOptions -from flatbuffers.compat import import_numpy -np = import_numpy() -class QuantizationParameters(object): +class RangeOptions(object): __slots__ = ['_tab'] @classmethod def GetRootAs(cls, buf, offset=0): n = flatbuffers.encode.Get(flatbuffers.packer.uoffset, buf, offset) - x = QuantizationParameters() + x = RangeOptions() x.Init(buf, n + offset) return x @classmethod - def GetRootAsQuantizationParameters(cls, buf, offset=0): + def GetRootAsRangeOptions(cls, buf, offset=0): """This method is deprecated. Please switch to GetRootAs.""" return cls.GetRootAs(buf, offset) @classmethod - def QuantizationParametersBufferHasIdentifier(cls, buf, offset, size_prefixed=False): + def RangeOptionsBufferHasIdentifier(cls, buf, offset, size_prefixed=False): return flatbuffers.util.BufferHasIdentifier(buf, offset, b"\x54\x46\x4C\x33", size_prefixed=size_prefixed) - # QuantizationParameters + # RangeOptions def Init(self, buf, pos): self._tab = flatbuffers.table.Table(buf, pos) - # QuantizationParameters - def Min(self, j): - o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(4)) - if o != 0: - a = self._tab.Vector(o) - return self._tab.Get(flatbuffers.number_types.Float32Flags, a + flatbuffers.number_types.UOffsetTFlags.py_type(j * 4)) - return 0 +def RangeOptionsStart(builder): + builder.StartObject(0) - # QuantizationParameters - def MinAsNumpy(self): - o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(4)) - if o != 0: - return self._tab.GetVectorAsNumpy(flatbuffers.number_types.Float32Flags, o) - return 0 +def RangeOptionsEnd(builder): + return builder.EndObject() - # QuantizationParameters - def MinLength(self): - o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(4)) - if o != 0: - return self._tab.VectorLen(o) - return 0 - # QuantizationParameters - def MinIsNone(self): - o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(4)) - return o == 0 - # QuantizationParameters - def Max(self, j): - o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(6)) - if o != 0: - a = self._tab.Vector(o) - return self._tab.Get(flatbuffers.number_types.Float32Flags, a + flatbuffers.number_types.UOffsetTFlags.py_type(j * 4)) - return 0 +class RangeOptionsT(object): - # QuantizationParameters - def MaxAsNumpy(self): - o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(6)) - if o != 0: - return self._tab.GetVectorAsNumpy(flatbuffers.number_types.Float32Flags, o) - return 0 + # RangeOptionsT + def __init__(self): + pass - # QuantizationParameters - def MaxLength(self): - o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(6)) - if o != 0: - return self._tab.VectorLen(o) - return 0 + @classmethod + def InitFromBuf(cls, buf, pos): + rangeOptions = RangeOptions() + rangeOptions.Init(buf, pos) + return cls.InitFromObj(rangeOptions) + + @classmethod + def InitFromPackedBuf(cls, buf, pos=0): + n = flatbuffers.encode.Get(flatbuffers.packer.uoffset, buf, pos) + return cls.InitFromBuf(buf, pos+n) + + @classmethod + def InitFromObj(cls, rangeOptions): + x = RangeOptionsT() + x._UnPack(rangeOptions) + return x - # QuantizationParameters - def MaxIsNone(self): - o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(6)) - return o == 0 + # RangeOptionsT + def _UnPack(self, rangeOptions): + if rangeOptions is None: + return - # QuantizationParameters - def Scale(self, j): - o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(8)) - if o != 0: - a = self._tab.Vector(o) - return self._tab.Get(flatbuffers.number_types.Float32Flags, a + flatbuffers.number_types.UOffsetTFlags.py_type(j * 4)) - return 0 + # RangeOptionsT + def Pack(self, builder): + RangeOptionsStart(builder) + rangeOptions = RangeOptionsEnd(builder) + return rangeOptions - # QuantizationParameters - def ScaleAsNumpy(self): - o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(8)) - if o != 0: - return self._tab.GetVectorAsNumpy(flatbuffers.number_types.Float32Flags, o) - return 0 - # QuantizationParameters - def ScaleLength(self): - o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(8)) - if o != 0: - return self._tab.VectorLen(o) - return 0 +class LeakyReluOptions(object): + __slots__ = ['_tab'] - # QuantizationParameters - def ScaleIsNone(self): - o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(8)) - return o == 0 + @classmethod + def GetRootAs(cls, buf, offset=0): + n = flatbuffers.encode.Get(flatbuffers.packer.uoffset, buf, offset) + x = LeakyReluOptions() + x.Init(buf, n + offset) + return x - # QuantizationParameters - def ZeroPoint(self, j): - o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(10)) - if o != 0: - a = self._tab.Vector(o) - return self._tab.Get(flatbuffers.number_types.Int64Flags, a + flatbuffers.number_types.UOffsetTFlags.py_type(j * 8)) - return 0 + @classmethod + def GetRootAsLeakyReluOptions(cls, buf, offset=0): + """This method is deprecated. Please switch to GetRootAs.""" + return cls.GetRootAs(buf, offset) + @classmethod + def LeakyReluOptionsBufferHasIdentifier(cls, buf, offset, size_prefixed=False): + return flatbuffers.util.BufferHasIdentifier(buf, offset, b"\x54\x46\x4C\x33", size_prefixed=size_prefixed) - # QuantizationParameters - def ZeroPointAsNumpy(self): - o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(10)) - if o != 0: - return self._tab.GetVectorAsNumpy(flatbuffers.number_types.Int64Flags, o) - return 0 + # LeakyReluOptions + def Init(self, buf, pos): + self._tab = flatbuffers.table.Table(buf, pos) - # QuantizationParameters - def ZeroPointLength(self): - o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(10)) + # LeakyReluOptions + def Alpha(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(4)) if o != 0: - return self._tab.VectorLen(o) - return 0 + return self._tab.Get(flatbuffers.number_types.Float32Flags, o + self._tab.Pos) + return 0.0 - # QuantizationParameters - def ZeroPointIsNone(self): - o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(10)) - return o == 0 +def LeakyReluOptionsStart(builder): + builder.StartObject(1) - # QuantizationParameters - def DetailsType(self): - o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(12)) - if o != 0: - return self._tab.Get(flatbuffers.number_types.Uint8Flags, o + self._tab.Pos) - return 0 +def LeakyReluOptionsAddAlpha(builder, alpha): + builder.PrependFloat32Slot(0, alpha, 0.0) - # QuantizationParameters - def Details(self): - o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(14)) - if o != 0: - from flatbuffers.table import Table - obj = Table(bytearray(), 0) - self._tab.Union(obj, o) - return obj - return None +def LeakyReluOptionsEnd(builder): + return builder.EndObject() - # QuantizationParameters - def QuantizedDimension(self): - o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(16)) - if o != 0: - return self._tab.Get(flatbuffers.number_types.Int32Flags, o + self._tab.Pos) - return 0 -def QuantizationParametersStart(builder): builder.StartObject(7) -def Start(builder): - return QuantizationParametersStart(builder) -def QuantizationParametersAddMin(builder, min): builder.PrependUOffsetTRelativeSlot(0, flatbuffers.number_types.UOffsetTFlags.py_type(min), 0) -def AddMin(builder, min): - return QuantizationParametersAddMin(builder, min) -def QuantizationParametersStartMinVector(builder, numElems): return builder.StartVector(4, numElems, 4) -def StartMinVector(builder, numElems): - return QuantizationParametersStartMinVector(builder, numElems) -def QuantizationParametersAddMax(builder, max): builder.PrependUOffsetTRelativeSlot(1, flatbuffers.number_types.UOffsetTFlags.py_type(max), 0) -def AddMax(builder, max): - return QuantizationParametersAddMax(builder, max) -def QuantizationParametersStartMaxVector(builder, numElems): return builder.StartVector(4, numElems, 4) -def StartMaxVector(builder, numElems): - return QuantizationParametersStartMaxVector(builder, numElems) -def QuantizationParametersAddScale(builder, scale): builder.PrependUOffsetTRelativeSlot(2, flatbuffers.number_types.UOffsetTFlags.py_type(scale), 0) -def AddScale(builder, scale): - return QuantizationParametersAddScale(builder, scale) -def QuantizationParametersStartScaleVector(builder, numElems): return builder.StartVector(4, numElems, 4) -def StartScaleVector(builder, numElems): - return QuantizationParametersStartScaleVector(builder, numElems) -def QuantizationParametersAddZeroPoint(builder, zeroPoint): builder.PrependUOffsetTRelativeSlot(3, flatbuffers.number_types.UOffsetTFlags.py_type(zeroPoint), 0) -def AddZeroPoint(builder, zeroPoint): - return QuantizationParametersAddZeroPoint(builder, zeroPoint) -def QuantizationParametersStartZeroPointVector(builder, numElems): return builder.StartVector(8, numElems, 8) -def StartZeroPointVector(builder, numElems): - return QuantizationParametersStartZeroPointVector(builder, numElems) -def QuantizationParametersAddDetailsType(builder, detailsType): builder.PrependUint8Slot(4, detailsType, 0) -def AddDetailsType(builder, detailsType): - return QuantizationParametersAddDetailsType(builder, detailsType) -def QuantizationParametersAddDetails(builder, details): builder.PrependUOffsetTRelativeSlot(5, flatbuffers.number_types.UOffsetTFlags.py_type(details), 0) -def AddDetails(builder, details): - return QuantizationParametersAddDetails(builder, details) -def QuantizationParametersAddQuantizedDimension(builder, quantizedDimension): builder.PrependInt32Slot(6, quantizedDimension, 0) -def AddQuantizedDimension(builder, quantizedDimension): - return QuantizationParametersAddQuantizedDimension(builder, quantizedDimension) -def QuantizationParametersEnd(builder): return builder.EndObject() -def End(builder): - return QuantizationParametersEnd(builder) -try: - from typing import List, Union -except: - pass -class QuantizationParametersT(object): +class LeakyReluOptionsT(object): - # QuantizationParametersT + # LeakyReluOptionsT def __init__(self): - self.min = None # type: List[float] - self.max = None # type: List[float] - self.scale = None # type: List[float] - self.zeroPoint = None # type: List[int] - self.detailsType = 0 # type: int - self.details = None # type: Union[None, CustomQuantizationT] - self.quantizedDimension = 0 # type: int + self.alpha = 0.0 # type: float @classmethod def InitFromBuf(cls, buf, pos): - quantizationParameters = QuantizationParameters() - quantizationParameters.Init(buf, pos) - return cls.InitFromObj(quantizationParameters) + leakyReluOptions = LeakyReluOptions() + leakyReluOptions.Init(buf, pos) + return cls.InitFromObj(leakyReluOptions) @classmethod - def InitFromObj(cls, quantizationParameters): - x = QuantizationParametersT() - x._UnPack(quantizationParameters) + def InitFromPackedBuf(cls, buf, pos=0): + n = flatbuffers.encode.Get(flatbuffers.packer.uoffset, buf, pos) + return cls.InitFromBuf(buf, pos+n) + + @classmethod + def InitFromObj(cls, leakyReluOptions): + x = LeakyReluOptionsT() + x._UnPack(leakyReluOptions) return x - # QuantizationParametersT - def _UnPack(self, quantizationParameters): - if quantizationParameters is None: + # LeakyReluOptionsT + def _UnPack(self, leakyReluOptions): + if leakyReluOptions is None: return - if not quantizationParameters.MinIsNone(): - if np is None: - self.min = [] - for i in range(quantizationParameters.MinLength()): - self.min.append(quantizationParameters.Min(i)) - else: - self.min = quantizationParameters.MinAsNumpy() - if not quantizationParameters.MaxIsNone(): - if np is None: - self.max = [] - for i in range(quantizationParameters.MaxLength()): - self.max.append(quantizationParameters.Max(i)) - else: - self.max = quantizationParameters.MaxAsNumpy() - if not quantizationParameters.ScaleIsNone(): - if np is None: - self.scale = [] - for i in range(quantizationParameters.ScaleLength()): - self.scale.append(quantizationParameters.Scale(i)) - else: - self.scale = quantizationParameters.ScaleAsNumpy() - if not quantizationParameters.ZeroPointIsNone(): - if np is None: - self.zeroPoint = [] - for i in range(quantizationParameters.ZeroPointLength()): - self.zeroPoint.append(quantizationParameters.ZeroPoint(i)) - else: - self.zeroPoint = quantizationParameters.ZeroPointAsNumpy() - self.detailsType = quantizationParameters.DetailsType() - self.details = QuantizationDetailsCreator(self.detailsType, quantizationParameters.Details()) - self.quantizedDimension = quantizationParameters.QuantizedDimension() + self.alpha = leakyReluOptions.Alpha() - # QuantizationParametersT + # LeakyReluOptionsT def Pack(self, builder): - if self.min is not None: - if np is not None and type(self.min) is np.ndarray: - min = builder.CreateNumpyVector(self.min) - else: - QuantizationParametersStartMinVector(builder, len(self.min)) - for i in reversed(range(len(self.min))): - builder.PrependFloat32(self.min[i]) - min = builder.EndVector() - if self.max is not None: - if np is not None and type(self.max) is np.ndarray: - max = builder.CreateNumpyVector(self.max) - else: - QuantizationParametersStartMaxVector(builder, len(self.max)) - for i in reversed(range(len(self.max))): - builder.PrependFloat32(self.max[i]) - max = builder.EndVector() - if self.scale is not None: - if np is not None and type(self.scale) is np.ndarray: - scale = builder.CreateNumpyVector(self.scale) - else: - QuantizationParametersStartScaleVector(builder, len(self.scale)) - for i in reversed(range(len(self.scale))): - builder.PrependFloat32(self.scale[i]) - scale = builder.EndVector() - if self.zeroPoint is not None: - if np is not None and type(self.zeroPoint) is np.ndarray: - zeroPoint = builder.CreateNumpyVector(self.zeroPoint) - else: - QuantizationParametersStartZeroPointVector(builder, len(self.zeroPoint)) - for i in reversed(range(len(self.zeroPoint))): - builder.PrependInt64(self.zeroPoint[i]) - zeroPoint = builder.EndVector() - if self.details is not None: - details = self.details.Pack(builder) - QuantizationParametersStart(builder) - if self.min is not None: - QuantizationParametersAddMin(builder, min) - if self.max is not None: - QuantizationParametersAddMax(builder, max) - if self.scale is not None: - QuantizationParametersAddScale(builder, scale) - if self.zeroPoint is not None: - QuantizationParametersAddZeroPoint(builder, zeroPoint) - QuantizationParametersAddDetailsType(builder, self.detailsType) - if self.details is not None: - QuantizationParametersAddDetails(builder, details) - QuantizationParametersAddQuantizedDimension(builder, self.quantizedDimension) - quantizationParameters = QuantizationParametersEnd(builder) - return quantizationParameters -# automatically generated by the FlatBuffers compiler, do not modify + LeakyReluOptionsStart(builder) + LeakyReluOptionsAddAlpha(builder, self.alpha) + leakyReluOptions = LeakyReluOptionsEnd(builder) + return leakyReluOptions + + +class SquaredDifferenceOptions(object): + __slots__ = ['_tab'] + + @classmethod + def GetRootAs(cls, buf, offset=0): + n = flatbuffers.encode.Get(flatbuffers.packer.uoffset, buf, offset) + x = SquaredDifferenceOptions() + x.Init(buf, n + offset) + return x + + @classmethod + def GetRootAsSquaredDifferenceOptions(cls, buf, offset=0): + """This method is deprecated. Please switch to GetRootAs.""" + return cls.GetRootAs(buf, offset) + @classmethod + def SquaredDifferenceOptionsBufferHasIdentifier(cls, buf, offset, size_prefixed=False): + return flatbuffers.util.BufferHasIdentifier(buf, offset, b"\x54\x46\x4C\x33", size_prefixed=size_prefixed) + + # SquaredDifferenceOptions + def Init(self, buf, pos): + self._tab = flatbuffers.table.Table(buf, pos) + +def SquaredDifferenceOptionsStart(builder): + builder.StartObject(0) + +def SquaredDifferenceOptionsEnd(builder): + return builder.EndObject() + -# namespace: tflite -from flatbuffers.compat import import_numpy -np = import_numpy() +class SquaredDifferenceOptionsT(object): -class QuantizeOptions(object): + # SquaredDifferenceOptionsT + def __init__(self): + pass + + @classmethod + def InitFromBuf(cls, buf, pos): + squaredDifferenceOptions = SquaredDifferenceOptions() + squaredDifferenceOptions.Init(buf, pos) + return cls.InitFromObj(squaredDifferenceOptions) + + @classmethod + def InitFromPackedBuf(cls, buf, pos=0): + n = flatbuffers.encode.Get(flatbuffers.packer.uoffset, buf, pos) + return cls.InitFromBuf(buf, pos+n) + + @classmethod + def InitFromObj(cls, squaredDifferenceOptions): + x = SquaredDifferenceOptionsT() + x._UnPack(squaredDifferenceOptions) + return x + + # SquaredDifferenceOptionsT + def _UnPack(self, squaredDifferenceOptions): + if squaredDifferenceOptions is None: + return + + # SquaredDifferenceOptionsT + def Pack(self, builder): + SquaredDifferenceOptionsStart(builder) + squaredDifferenceOptions = SquaredDifferenceOptionsEnd(builder) + return squaredDifferenceOptions + + +class MirrorPadOptions(object): __slots__ = ['_tab'] @classmethod def GetRootAs(cls, buf, offset=0): n = flatbuffers.encode.Get(flatbuffers.packer.uoffset, buf, offset) - x = QuantizeOptions() + x = MirrorPadOptions() x.Init(buf, n + offset) return x @classmethod - def GetRootAsQuantizeOptions(cls, buf, offset=0): + def GetRootAsMirrorPadOptions(cls, buf, offset=0): """This method is deprecated. Please switch to GetRootAs.""" return cls.GetRootAs(buf, offset) @classmethod - def QuantizeOptionsBufferHasIdentifier(cls, buf, offset, size_prefixed=False): + def MirrorPadOptionsBufferHasIdentifier(cls, buf, offset, size_prefixed=False): return flatbuffers.util.BufferHasIdentifier(buf, offset, b"\x54\x46\x4C\x33", size_prefixed=size_prefixed) - # QuantizeOptions + # MirrorPadOptions def Init(self, buf, pos): self._tab = flatbuffers.table.Table(buf, pos) -def QuantizeOptionsStart(builder): builder.StartObject(0) -def Start(builder): - return QuantizeOptionsStart(builder) -def QuantizeOptionsEnd(builder): return builder.EndObject() -def End(builder): - return QuantizeOptionsEnd(builder) + # MirrorPadOptions + def Mode(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(4)) + if o != 0: + return self._tab.Get(flatbuffers.number_types.Int8Flags, o + self._tab.Pos) + return 0 + +def MirrorPadOptionsStart(builder): + builder.StartObject(1) -class QuantizeOptionsT(object): +def MirrorPadOptionsAddMode(builder, mode): + builder.PrependInt8Slot(0, mode, 0) - # QuantizeOptionsT +def MirrorPadOptionsEnd(builder): + return builder.EndObject() + + + +class MirrorPadOptionsT(object): + + # MirrorPadOptionsT def __init__(self): - pass + self.mode = 0 # type: int @classmethod def InitFromBuf(cls, buf, pos): - quantizeOptions = QuantizeOptions() - quantizeOptions.Init(buf, pos) - return cls.InitFromObj(quantizeOptions) + mirrorPadOptions = MirrorPadOptions() + mirrorPadOptions.Init(buf, pos) + return cls.InitFromObj(mirrorPadOptions) @classmethod - def InitFromObj(cls, quantizeOptions): - x = QuantizeOptionsT() - x._UnPack(quantizeOptions) + def InitFromPackedBuf(cls, buf, pos=0): + n = flatbuffers.encode.Get(flatbuffers.packer.uoffset, buf, pos) + return cls.InitFromBuf(buf, pos+n) + + @classmethod + def InitFromObj(cls, mirrorPadOptions): + x = MirrorPadOptionsT() + x._UnPack(mirrorPadOptions) return x - # QuantizeOptionsT - def _UnPack(self, quantizeOptions): - if quantizeOptions is None: + # MirrorPadOptionsT + def _UnPack(self, mirrorPadOptions): + if mirrorPadOptions is None: return + self.mode = mirrorPadOptions.Mode() - # QuantizeOptionsT + # MirrorPadOptionsT def Pack(self, builder): - QuantizeOptionsStart(builder) - quantizeOptions = QuantizeOptionsEnd(builder) - return quantizeOptions -# automatically generated by the FlatBuffers compiler, do not modify - -# namespace: tflite + MirrorPadOptionsStart(builder) + MirrorPadOptionsAddMode(builder, self.mode) + mirrorPadOptions = MirrorPadOptionsEnd(builder) + return mirrorPadOptions -from flatbuffers.compat import import_numpy -np = import_numpy() -class RNNOptions(object): +class UniqueOptions(object): __slots__ = ['_tab'] @classmethod def GetRootAs(cls, buf, offset=0): n = flatbuffers.encode.Get(flatbuffers.packer.uoffset, buf, offset) - x = RNNOptions() + x = UniqueOptions() x.Init(buf, n + offset) return x @classmethod - def GetRootAsRNNOptions(cls, buf, offset=0): + def GetRootAsUniqueOptions(cls, buf, offset=0): """This method is deprecated. Please switch to GetRootAs.""" return cls.GetRootAs(buf, offset) @classmethod - def RNNOptionsBufferHasIdentifier(cls, buf, offset, size_prefixed=False): + def UniqueOptionsBufferHasIdentifier(cls, buf, offset, size_prefixed=False): return flatbuffers.util.BufferHasIdentifier(buf, offset, b"\x54\x46\x4C\x33", size_prefixed=size_prefixed) - # RNNOptions + # UniqueOptions def Init(self, buf, pos): self._tab = flatbuffers.table.Table(buf, pos) - # RNNOptions - def FusedActivationFunction(self): + # UniqueOptions + def IdxOutType(self): o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(4)) if o != 0: return self._tab.Get(flatbuffers.number_types.Int8Flags, o + self._tab.Pos) - return 0 + return 2 - # RNNOptions - def AsymmetricQuantizeInputs(self): - o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(6)) - if o != 0: - return bool(self._tab.Get(flatbuffers.number_types.BoolFlags, o + self._tab.Pos)) - return False +def UniqueOptionsStart(builder): + builder.StartObject(1) -def RNNOptionsStart(builder): builder.StartObject(2) -def Start(builder): - return RNNOptionsStart(builder) -def RNNOptionsAddFusedActivationFunction(builder, fusedActivationFunction): builder.PrependInt8Slot(0, fusedActivationFunction, 0) -def AddFusedActivationFunction(builder, fusedActivationFunction): - return RNNOptionsAddFusedActivationFunction(builder, fusedActivationFunction) -def RNNOptionsAddAsymmetricQuantizeInputs(builder, asymmetricQuantizeInputs): builder.PrependBoolSlot(1, asymmetricQuantizeInputs, 0) -def AddAsymmetricQuantizeInputs(builder, asymmetricQuantizeInputs): - return RNNOptionsAddAsymmetricQuantizeInputs(builder, asymmetricQuantizeInputs) -def RNNOptionsEnd(builder): return builder.EndObject() -def End(builder): - return RNNOptionsEnd(builder) +def UniqueOptionsAddIdxOutType(builder, idxOutType): + builder.PrependInt8Slot(0, idxOutType, 2) -class RNNOptionsT(object): +def UniqueOptionsEnd(builder): + return builder.EndObject() - # RNNOptionsT + + +class UniqueOptionsT(object): + + # UniqueOptionsT def __init__(self): - self.fusedActivationFunction = 0 # type: int - self.asymmetricQuantizeInputs = False # type: bool + self.idxOutType = 2 # type: int @classmethod def InitFromBuf(cls, buf, pos): - rnnoptions = RNNOptions() - rnnoptions.Init(buf, pos) - return cls.InitFromObj(rnnoptions) + uniqueOptions = UniqueOptions() + uniqueOptions.Init(buf, pos) + return cls.InitFromObj(uniqueOptions) @classmethod - def InitFromObj(cls, rnnoptions): - x = RNNOptionsT() - x._UnPack(rnnoptions) + def InitFromPackedBuf(cls, buf, pos=0): + n = flatbuffers.encode.Get(flatbuffers.packer.uoffset, buf, pos) + return cls.InitFromBuf(buf, pos+n) + + @classmethod + def InitFromObj(cls, uniqueOptions): + x = UniqueOptionsT() + x._UnPack(uniqueOptions) return x - # RNNOptionsT - def _UnPack(self, rnnoptions): - if rnnoptions is None: + # UniqueOptionsT + def _UnPack(self, uniqueOptions): + if uniqueOptions is None: return - self.fusedActivationFunction = rnnoptions.FusedActivationFunction() - self.asymmetricQuantizeInputs = rnnoptions.AsymmetricQuantizeInputs() + self.idxOutType = uniqueOptions.IdxOutType() - # RNNOptionsT + # UniqueOptionsT def Pack(self, builder): - RNNOptionsStart(builder) - RNNOptionsAddFusedActivationFunction(builder, self.fusedActivationFunction) - RNNOptionsAddAsymmetricQuantizeInputs(builder, self.asymmetricQuantizeInputs) - rnnoptions = RNNOptionsEnd(builder) - return rnnoptions -# automatically generated by the FlatBuffers compiler, do not modify - -# namespace: tflite + UniqueOptionsStart(builder) + UniqueOptionsAddIdxOutType(builder, self.idxOutType) + uniqueOptions = UniqueOptionsEnd(builder) + return uniqueOptions -from flatbuffers.compat import import_numpy -np = import_numpy() -class RandomOptions(object): +class ReverseV2Options(object): __slots__ = ['_tab'] @classmethod def GetRootAs(cls, buf, offset=0): n = flatbuffers.encode.Get(flatbuffers.packer.uoffset, buf, offset) - x = RandomOptions() + x = ReverseV2Options() x.Init(buf, n + offset) return x @classmethod - def GetRootAsRandomOptions(cls, buf, offset=0): + def GetRootAsReverseV2Options(cls, buf, offset=0): """This method is deprecated. Please switch to GetRootAs.""" return cls.GetRootAs(buf, offset) @classmethod - def RandomOptionsBufferHasIdentifier(cls, buf, offset, size_prefixed=False): + def ReverseV2OptionsBufferHasIdentifier(cls, buf, offset, size_prefixed=False): return flatbuffers.util.BufferHasIdentifier(buf, offset, b"\x54\x46\x4C\x33", size_prefixed=size_prefixed) - # RandomOptions + # ReverseV2Options def Init(self, buf, pos): self._tab = flatbuffers.table.Table(buf, pos) - # RandomOptions - def Seed(self): - o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(4)) - if o != 0: - return self._tab.Get(flatbuffers.number_types.Int64Flags, o + self._tab.Pos) - return 0 +def ReverseV2OptionsStart(builder): + builder.StartObject(0) - # RandomOptions - def Seed2(self): - o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(6)) - if o != 0: - return self._tab.Get(flatbuffers.number_types.Int64Flags, o + self._tab.Pos) - return 0 +def ReverseV2OptionsEnd(builder): + return builder.EndObject() -def RandomOptionsStart(builder): builder.StartObject(2) -def Start(builder): - return RandomOptionsStart(builder) -def RandomOptionsAddSeed(builder, seed): builder.PrependInt64Slot(0, seed, 0) -def AddSeed(builder, seed): - return RandomOptionsAddSeed(builder, seed) -def RandomOptionsAddSeed2(builder, seed2): builder.PrependInt64Slot(1, seed2, 0) -def AddSeed2(builder, seed2): - return RandomOptionsAddSeed2(builder, seed2) -def RandomOptionsEnd(builder): return builder.EndObject() -def End(builder): - return RandomOptionsEnd(builder) -class RandomOptionsT(object): - # RandomOptionsT +class ReverseV2OptionsT(object): + + # ReverseV2OptionsT def __init__(self): - self.seed = 0 # type: int - self.seed2 = 0 # type: int + pass @classmethod def InitFromBuf(cls, buf, pos): - randomOptions = RandomOptions() - randomOptions.Init(buf, pos) - return cls.InitFromObj(randomOptions) + reverseV2Options = ReverseV2Options() + reverseV2Options.Init(buf, pos) + return cls.InitFromObj(reverseV2Options) @classmethod - def InitFromObj(cls, randomOptions): - x = RandomOptionsT() - x._UnPack(randomOptions) + def InitFromPackedBuf(cls, buf, pos=0): + n = flatbuffers.encode.Get(flatbuffers.packer.uoffset, buf, pos) + return cls.InitFromBuf(buf, pos+n) + + @classmethod + def InitFromObj(cls, reverseV2Options): + x = ReverseV2OptionsT() + x._UnPack(reverseV2Options) return x - # RandomOptionsT - def _UnPack(self, randomOptions): - if randomOptions is None: + # ReverseV2OptionsT + def _UnPack(self, reverseV2Options): + if reverseV2Options is None: return - self.seed = randomOptions.Seed() - self.seed2 = randomOptions.Seed2() - # RandomOptionsT + # ReverseV2OptionsT def Pack(self, builder): - RandomOptionsStart(builder) - RandomOptionsAddSeed(builder, self.seed) - RandomOptionsAddSeed2(builder, self.seed2) - randomOptions = RandomOptionsEnd(builder) - return randomOptions -# automatically generated by the FlatBuffers compiler, do not modify - -# namespace: tflite + ReverseV2OptionsStart(builder) + reverseV2Options = ReverseV2OptionsEnd(builder) + return reverseV2Options -from flatbuffers.compat import import_numpy -np = import_numpy() -class RangeOptions(object): +class AddNOptions(object): __slots__ = ['_tab'] @classmethod def GetRootAs(cls, buf, offset=0): n = flatbuffers.encode.Get(flatbuffers.packer.uoffset, buf, offset) - x = RangeOptions() + x = AddNOptions() x.Init(buf, n + offset) return x @classmethod - def GetRootAsRangeOptions(cls, buf, offset=0): + def GetRootAsAddNOptions(cls, buf, offset=0): """This method is deprecated. Please switch to GetRootAs.""" return cls.GetRootAs(buf, offset) @classmethod - def RangeOptionsBufferHasIdentifier(cls, buf, offset, size_prefixed=False): + def AddNOptionsBufferHasIdentifier(cls, buf, offset, size_prefixed=False): return flatbuffers.util.BufferHasIdentifier(buf, offset, b"\x54\x46\x4C\x33", size_prefixed=size_prefixed) - # RangeOptions + # AddNOptions def Init(self, buf, pos): self._tab = flatbuffers.table.Table(buf, pos) -def RangeOptionsStart(builder): builder.StartObject(0) -def Start(builder): - return RangeOptionsStart(builder) -def RangeOptionsEnd(builder): return builder.EndObject() -def End(builder): - return RangeOptionsEnd(builder) +def AddNOptionsStart(builder): + builder.StartObject(0) -class RangeOptionsT(object): +def AddNOptionsEnd(builder): + return builder.EndObject() - # RangeOptionsT + + +class AddNOptionsT(object): + + # AddNOptionsT def __init__(self): pass @classmethod def InitFromBuf(cls, buf, pos): - rangeOptions = RangeOptions() - rangeOptions.Init(buf, pos) - return cls.InitFromObj(rangeOptions) + addNoptions = AddNOptions() + addNoptions.Init(buf, pos) + return cls.InitFromObj(addNoptions) @classmethod - def InitFromObj(cls, rangeOptions): - x = RangeOptionsT() - x._UnPack(rangeOptions) + def InitFromPackedBuf(cls, buf, pos=0): + n = flatbuffers.encode.Get(flatbuffers.packer.uoffset, buf, pos) + return cls.InitFromBuf(buf, pos+n) + + @classmethod + def InitFromObj(cls, addNoptions): + x = AddNOptionsT() + x._UnPack(addNoptions) return x - # RangeOptionsT - def _UnPack(self, rangeOptions): - if rangeOptions is None: + # AddNOptionsT + def _UnPack(self, addNoptions): + if addNoptions is None: return - # RangeOptionsT + # AddNOptionsT def Pack(self, builder): - RangeOptionsStart(builder) - rangeOptions = RangeOptionsEnd(builder) - return rangeOptions -# automatically generated by the FlatBuffers compiler, do not modify - -# namespace: tflite + AddNOptionsStart(builder) + addNoptions = AddNOptionsEnd(builder) + return addNoptions -from flatbuffers.compat import import_numpy -np = import_numpy() -class RankOptions(object): +class GatherNdOptions(object): __slots__ = ['_tab'] @classmethod def GetRootAs(cls, buf, offset=0): n = flatbuffers.encode.Get(flatbuffers.packer.uoffset, buf, offset) - x = RankOptions() + x = GatherNdOptions() x.Init(buf, n + offset) return x @classmethod - def GetRootAsRankOptions(cls, buf, offset=0): + def GetRootAsGatherNdOptions(cls, buf, offset=0): """This method is deprecated. Please switch to GetRootAs.""" return cls.GetRootAs(buf, offset) @classmethod - def RankOptionsBufferHasIdentifier(cls, buf, offset, size_prefixed=False): + def GatherNdOptionsBufferHasIdentifier(cls, buf, offset, size_prefixed=False): return flatbuffers.util.BufferHasIdentifier(buf, offset, b"\x54\x46\x4C\x33", size_prefixed=size_prefixed) - # RankOptions + # GatherNdOptions def Init(self, buf, pos): self._tab = flatbuffers.table.Table(buf, pos) -def RankOptionsStart(builder): builder.StartObject(0) -def Start(builder): - return RankOptionsStart(builder) -def RankOptionsEnd(builder): return builder.EndObject() -def End(builder): - return RankOptionsEnd(builder) +def GatherNdOptionsStart(builder): + builder.StartObject(0) -class RankOptionsT(object): +def GatherNdOptionsEnd(builder): + return builder.EndObject() - # RankOptionsT + + +class GatherNdOptionsT(object): + + # GatherNdOptionsT def __init__(self): pass @classmethod def InitFromBuf(cls, buf, pos): - rankOptions = RankOptions() - rankOptions.Init(buf, pos) - return cls.InitFromObj(rankOptions) + gatherNdOptions = GatherNdOptions() + gatherNdOptions.Init(buf, pos) + return cls.InitFromObj(gatherNdOptions) @classmethod - def InitFromObj(cls, rankOptions): - x = RankOptionsT() - x._UnPack(rankOptions) + def InitFromPackedBuf(cls, buf, pos=0): + n = flatbuffers.encode.Get(flatbuffers.packer.uoffset, buf, pos) + return cls.InitFromBuf(buf, pos+n) + + @classmethod + def InitFromObj(cls, gatherNdOptions): + x = GatherNdOptionsT() + x._UnPack(gatherNdOptions) return x - # RankOptionsT - def _UnPack(self, rankOptions): - if rankOptions is None: + # GatherNdOptionsT + def _UnPack(self, gatherNdOptions): + if gatherNdOptions is None: return - # RankOptionsT + # GatherNdOptionsT def Pack(self, builder): - RankOptionsStart(builder) - rankOptions = RankOptionsEnd(builder) - return rankOptions -# automatically generated by the FlatBuffers compiler, do not modify - -# namespace: tflite + GatherNdOptionsStart(builder) + gatherNdOptions = GatherNdOptionsEnd(builder) + return gatherNdOptions -from flatbuffers.compat import import_numpy -np = import_numpy() -class ReadVariableOptions(object): +class WhereOptions(object): __slots__ = ['_tab'] @classmethod def GetRootAs(cls, buf, offset=0): n = flatbuffers.encode.Get(flatbuffers.packer.uoffset, buf, offset) - x = ReadVariableOptions() + x = WhereOptions() x.Init(buf, n + offset) return x @classmethod - def GetRootAsReadVariableOptions(cls, buf, offset=0): + def GetRootAsWhereOptions(cls, buf, offset=0): """This method is deprecated. Please switch to GetRootAs.""" return cls.GetRootAs(buf, offset) @classmethod - def ReadVariableOptionsBufferHasIdentifier(cls, buf, offset, size_prefixed=False): + def WhereOptionsBufferHasIdentifier(cls, buf, offset, size_prefixed=False): return flatbuffers.util.BufferHasIdentifier(buf, offset, b"\x54\x46\x4C\x33", size_prefixed=size_prefixed) - # ReadVariableOptions + # WhereOptions def Init(self, buf, pos): self._tab = flatbuffers.table.Table(buf, pos) -def ReadVariableOptionsStart(builder): builder.StartObject(0) -def Start(builder): - return ReadVariableOptionsStart(builder) -def ReadVariableOptionsEnd(builder): return builder.EndObject() -def End(builder): - return ReadVariableOptionsEnd(builder) +def WhereOptionsStart(builder): + builder.StartObject(0) -class ReadVariableOptionsT(object): +def WhereOptionsEnd(builder): + return builder.EndObject() - # ReadVariableOptionsT + + +class WhereOptionsT(object): + + # WhereOptionsT def __init__(self): pass @classmethod def InitFromBuf(cls, buf, pos): - readVariableOptions = ReadVariableOptions() - readVariableOptions.Init(buf, pos) - return cls.InitFromObj(readVariableOptions) + whereOptions = WhereOptions() + whereOptions.Init(buf, pos) + return cls.InitFromObj(whereOptions) @classmethod - def InitFromObj(cls, readVariableOptions): - x = ReadVariableOptionsT() - x._UnPack(readVariableOptions) + def InitFromPackedBuf(cls, buf, pos=0): + n = flatbuffers.encode.Get(flatbuffers.packer.uoffset, buf, pos) + return cls.InitFromBuf(buf, pos+n) + + @classmethod + def InitFromObj(cls, whereOptions): + x = WhereOptionsT() + x._UnPack(whereOptions) return x - # ReadVariableOptionsT - def _UnPack(self, readVariableOptions): - if readVariableOptions is None: + # WhereOptionsT + def _UnPack(self, whereOptions): + if whereOptions is None: return - # ReadVariableOptionsT + # WhereOptionsT def Pack(self, builder): - ReadVariableOptionsStart(builder) - readVariableOptions = ReadVariableOptionsEnd(builder) - return readVariableOptions -# automatically generated by the FlatBuffers compiler, do not modify - -# namespace: tflite + WhereOptionsStart(builder) + whereOptions = WhereOptionsEnd(builder) + return whereOptions -from flatbuffers.compat import import_numpy -np = import_numpy() -class ReducerOptions(object): +class ReverseSequenceOptions(object): __slots__ = ['_tab'] @classmethod def GetRootAs(cls, buf, offset=0): n = flatbuffers.encode.Get(flatbuffers.packer.uoffset, buf, offset) - x = ReducerOptions() + x = ReverseSequenceOptions() x.Init(buf, n + offset) return x @classmethod - def GetRootAsReducerOptions(cls, buf, offset=0): + def GetRootAsReverseSequenceOptions(cls, buf, offset=0): """This method is deprecated. Please switch to GetRootAs.""" return cls.GetRootAs(buf, offset) @classmethod - def ReducerOptionsBufferHasIdentifier(cls, buf, offset, size_prefixed=False): + def ReverseSequenceOptionsBufferHasIdentifier(cls, buf, offset, size_prefixed=False): return flatbuffers.util.BufferHasIdentifier(buf, offset, b"\x54\x46\x4C\x33", size_prefixed=size_prefixed) - # ReducerOptions + # ReverseSequenceOptions def Init(self, buf, pos): self._tab = flatbuffers.table.Table(buf, pos) - # ReducerOptions - def KeepDims(self): + # ReverseSequenceOptions + def SeqDim(self): o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(4)) if o != 0: - return bool(self._tab.Get(flatbuffers.number_types.BoolFlags, o + self._tab.Pos)) - return False + return self._tab.Get(flatbuffers.number_types.Int32Flags, o + self._tab.Pos) + return 0 -def ReducerOptionsStart(builder): builder.StartObject(1) -def Start(builder): - return ReducerOptionsStart(builder) -def ReducerOptionsAddKeepDims(builder, keepDims): builder.PrependBoolSlot(0, keepDims, 0) -def AddKeepDims(builder, keepDims): - return ReducerOptionsAddKeepDims(builder, keepDims) -def ReducerOptionsEnd(builder): return builder.EndObject() -def End(builder): - return ReducerOptionsEnd(builder) + # ReverseSequenceOptions + def BatchDim(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(6)) + if o != 0: + return self._tab.Get(flatbuffers.number_types.Int32Flags, o + self._tab.Pos) + return 0 -class ReducerOptionsT(object): +def ReverseSequenceOptionsStart(builder): + builder.StartObject(2) - # ReducerOptionsT +def ReverseSequenceOptionsAddSeqDim(builder, seqDim): + builder.PrependInt32Slot(0, seqDim, 0) + +def ReverseSequenceOptionsAddBatchDim(builder, batchDim): + builder.PrependInt32Slot(1, batchDim, 0) + +def ReverseSequenceOptionsEnd(builder): + return builder.EndObject() + + + +class ReverseSequenceOptionsT(object): + + # ReverseSequenceOptionsT def __init__(self): - self.keepDims = False # type: bool + self.seqDim = 0 # type: int + self.batchDim = 0 # type: int @classmethod def InitFromBuf(cls, buf, pos): - reducerOptions = ReducerOptions() - reducerOptions.Init(buf, pos) - return cls.InitFromObj(reducerOptions) + reverseSequenceOptions = ReverseSequenceOptions() + reverseSequenceOptions.Init(buf, pos) + return cls.InitFromObj(reverseSequenceOptions) @classmethod - def InitFromObj(cls, reducerOptions): - x = ReducerOptionsT() - x._UnPack(reducerOptions) + def InitFromPackedBuf(cls, buf, pos=0): + n = flatbuffers.encode.Get(flatbuffers.packer.uoffset, buf, pos) + return cls.InitFromBuf(buf, pos+n) + + @classmethod + def InitFromObj(cls, reverseSequenceOptions): + x = ReverseSequenceOptionsT() + x._UnPack(reverseSequenceOptions) return x - # ReducerOptionsT - def _UnPack(self, reducerOptions): - if reducerOptions is None: + # ReverseSequenceOptionsT + def _UnPack(self, reverseSequenceOptions): + if reverseSequenceOptions is None: return - self.keepDims = reducerOptions.KeepDims() + self.seqDim = reverseSequenceOptions.SeqDim() + self.batchDim = reverseSequenceOptions.BatchDim() - # ReducerOptionsT + # ReverseSequenceOptionsT def Pack(self, builder): - ReducerOptionsStart(builder) - ReducerOptionsAddKeepDims(builder, self.keepDims) - reducerOptions = ReducerOptionsEnd(builder) - return reducerOptions -# automatically generated by the FlatBuffers compiler, do not modify - -# namespace: tflite + ReverseSequenceOptionsStart(builder) + ReverseSequenceOptionsAddSeqDim(builder, self.seqDim) + ReverseSequenceOptionsAddBatchDim(builder, self.batchDim) + reverseSequenceOptions = ReverseSequenceOptionsEnd(builder) + return reverseSequenceOptions -from flatbuffers.compat import import_numpy -np = import_numpy() -class ReshapeOptions(object): +class MatrixDiagOptions(object): __slots__ = ['_tab'] @classmethod def GetRootAs(cls, buf, offset=0): n = flatbuffers.encode.Get(flatbuffers.packer.uoffset, buf, offset) - x = ReshapeOptions() + x = MatrixDiagOptions() x.Init(buf, n + offset) return x @classmethod - def GetRootAsReshapeOptions(cls, buf, offset=0): + def GetRootAsMatrixDiagOptions(cls, buf, offset=0): """This method is deprecated. Please switch to GetRootAs.""" return cls.GetRootAs(buf, offset) @classmethod - def ReshapeOptionsBufferHasIdentifier(cls, buf, offset, size_prefixed=False): + def MatrixDiagOptionsBufferHasIdentifier(cls, buf, offset, size_prefixed=False): return flatbuffers.util.BufferHasIdentifier(buf, offset, b"\x54\x46\x4C\x33", size_prefixed=size_prefixed) - # ReshapeOptions + # MatrixDiagOptions def Init(self, buf, pos): self._tab = flatbuffers.table.Table(buf, pos) - # ReshapeOptions - def NewShape(self, j): - o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(4)) - if o != 0: - a = self._tab.Vector(o) - return self._tab.Get(flatbuffers.number_types.Int32Flags, a + flatbuffers.number_types.UOffsetTFlags.py_type(j * 4)) - return 0 - - # ReshapeOptions - def NewShapeAsNumpy(self): - o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(4)) - if o != 0: - return self._tab.GetVectorAsNumpy(flatbuffers.number_types.Int32Flags, o) - return 0 +def MatrixDiagOptionsStart(builder): + builder.StartObject(0) - # ReshapeOptions - def NewShapeLength(self): - o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(4)) - if o != 0: - return self._tab.VectorLen(o) - return 0 +def MatrixDiagOptionsEnd(builder): + return builder.EndObject() - # ReshapeOptions - def NewShapeIsNone(self): - o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(4)) - return o == 0 -def ReshapeOptionsStart(builder): builder.StartObject(1) -def Start(builder): - return ReshapeOptionsStart(builder) -def ReshapeOptionsAddNewShape(builder, newShape): builder.PrependUOffsetTRelativeSlot(0, flatbuffers.number_types.UOffsetTFlags.py_type(newShape), 0) -def AddNewShape(builder, newShape): - return ReshapeOptionsAddNewShape(builder, newShape) -def ReshapeOptionsStartNewShapeVector(builder, numElems): return builder.StartVector(4, numElems, 4) -def StartNewShapeVector(builder, numElems): - return ReshapeOptionsStartNewShapeVector(builder, numElems) -def ReshapeOptionsEnd(builder): return builder.EndObject() -def End(builder): - return ReshapeOptionsEnd(builder) -try: - from typing import List -except: - pass -class ReshapeOptionsT(object): +class MatrixDiagOptionsT(object): - # ReshapeOptionsT + # MatrixDiagOptionsT def __init__(self): - self.newShape = None # type: List[int] + pass @classmethod def InitFromBuf(cls, buf, pos): - reshapeOptions = ReshapeOptions() - reshapeOptions.Init(buf, pos) - return cls.InitFromObj(reshapeOptions) + matrixDiagOptions = MatrixDiagOptions() + matrixDiagOptions.Init(buf, pos) + return cls.InitFromObj(matrixDiagOptions) @classmethod - def InitFromObj(cls, reshapeOptions): - x = ReshapeOptionsT() - x._UnPack(reshapeOptions) + def InitFromPackedBuf(cls, buf, pos=0): + n = flatbuffers.encode.Get(flatbuffers.packer.uoffset, buf, pos) + return cls.InitFromBuf(buf, pos+n) + + @classmethod + def InitFromObj(cls, matrixDiagOptions): + x = MatrixDiagOptionsT() + x._UnPack(matrixDiagOptions) return x - # ReshapeOptionsT - def _UnPack(self, reshapeOptions): - if reshapeOptions is None: + # MatrixDiagOptionsT + def _UnPack(self, matrixDiagOptions): + if matrixDiagOptions is None: return - if not reshapeOptions.NewShapeIsNone(): - if np is None: - self.newShape = [] - for i in range(reshapeOptions.NewShapeLength()): - self.newShape.append(reshapeOptions.NewShape(i)) - else: - self.newShape = reshapeOptions.NewShapeAsNumpy() - # ReshapeOptionsT + # MatrixDiagOptionsT def Pack(self, builder): - if self.newShape is not None: - if np is not None and type(self.newShape) is np.ndarray: - newShape = builder.CreateNumpyVector(self.newShape) - else: - ReshapeOptionsStartNewShapeVector(builder, len(self.newShape)) - for i in reversed(range(len(self.newShape))): - builder.PrependInt32(self.newShape[i]) - newShape = builder.EndVector() - ReshapeOptionsStart(builder) - if self.newShape is not None: - ReshapeOptionsAddNewShape(builder, newShape) - reshapeOptions = ReshapeOptionsEnd(builder) - return reshapeOptions -# automatically generated by the FlatBuffers compiler, do not modify - -# namespace: tflite + MatrixDiagOptionsStart(builder) + matrixDiagOptions = MatrixDiagOptionsEnd(builder) + return matrixDiagOptions -from flatbuffers.compat import import_numpy -np = import_numpy() -class ResizeBilinearOptions(object): +class QuantizeOptions(object): __slots__ = ['_tab'] @classmethod def GetRootAs(cls, buf, offset=0): n = flatbuffers.encode.Get(flatbuffers.packer.uoffset, buf, offset) - x = ResizeBilinearOptions() + x = QuantizeOptions() x.Init(buf, n + offset) return x @classmethod - def GetRootAsResizeBilinearOptions(cls, buf, offset=0): + def GetRootAsQuantizeOptions(cls, buf, offset=0): """This method is deprecated. Please switch to GetRootAs.""" return cls.GetRootAs(buf, offset) @classmethod - def ResizeBilinearOptionsBufferHasIdentifier(cls, buf, offset, size_prefixed=False): + def QuantizeOptionsBufferHasIdentifier(cls, buf, offset, size_prefixed=False): return flatbuffers.util.BufferHasIdentifier(buf, offset, b"\x54\x46\x4C\x33", size_prefixed=size_prefixed) - # ResizeBilinearOptions + # QuantizeOptions def Init(self, buf, pos): self._tab = flatbuffers.table.Table(buf, pos) - # ResizeBilinearOptions - def AlignCorners(self): - o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(8)) - if o != 0: - return bool(self._tab.Get(flatbuffers.number_types.BoolFlags, o + self._tab.Pos)) - return False +def QuantizeOptionsStart(builder): + builder.StartObject(0) - # ResizeBilinearOptions - def HalfPixelCenters(self): - o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(10)) - if o != 0: - return bool(self._tab.Get(flatbuffers.number_types.BoolFlags, o + self._tab.Pos)) - return False +def QuantizeOptionsEnd(builder): + return builder.EndObject() -def ResizeBilinearOptionsStart(builder): builder.StartObject(4) -def Start(builder): - return ResizeBilinearOptionsStart(builder) -def ResizeBilinearOptionsAddAlignCorners(builder, alignCorners): builder.PrependBoolSlot(2, alignCorners, 0) -def AddAlignCorners(builder, alignCorners): - return ResizeBilinearOptionsAddAlignCorners(builder, alignCorners) -def ResizeBilinearOptionsAddHalfPixelCenters(builder, halfPixelCenters): builder.PrependBoolSlot(3, halfPixelCenters, 0) -def AddHalfPixelCenters(builder, halfPixelCenters): - return ResizeBilinearOptionsAddHalfPixelCenters(builder, halfPixelCenters) -def ResizeBilinearOptionsEnd(builder): return builder.EndObject() -def End(builder): - return ResizeBilinearOptionsEnd(builder) -class ResizeBilinearOptionsT(object): - # ResizeBilinearOptionsT +class QuantizeOptionsT(object): + + # QuantizeOptionsT def __init__(self): - self.alignCorners = False # type: bool - self.halfPixelCenters = False # type: bool + pass @classmethod def InitFromBuf(cls, buf, pos): - resizeBilinearOptions = ResizeBilinearOptions() - resizeBilinearOptions.Init(buf, pos) - return cls.InitFromObj(resizeBilinearOptions) + quantizeOptions = QuantizeOptions() + quantizeOptions.Init(buf, pos) + return cls.InitFromObj(quantizeOptions) @classmethod - def InitFromObj(cls, resizeBilinearOptions): - x = ResizeBilinearOptionsT() - x._UnPack(resizeBilinearOptions) + def InitFromPackedBuf(cls, buf, pos=0): + n = flatbuffers.encode.Get(flatbuffers.packer.uoffset, buf, pos) + return cls.InitFromBuf(buf, pos+n) + + @classmethod + def InitFromObj(cls, quantizeOptions): + x = QuantizeOptionsT() + x._UnPack(quantizeOptions) return x - # ResizeBilinearOptionsT - def _UnPack(self, resizeBilinearOptions): - if resizeBilinearOptions is None: + # QuantizeOptionsT + def _UnPack(self, quantizeOptions): + if quantizeOptions is None: return - self.alignCorners = resizeBilinearOptions.AlignCorners() - self.halfPixelCenters = resizeBilinearOptions.HalfPixelCenters() - # ResizeBilinearOptionsT + # QuantizeOptionsT def Pack(self, builder): - ResizeBilinearOptionsStart(builder) - ResizeBilinearOptionsAddAlignCorners(builder, self.alignCorners) - ResizeBilinearOptionsAddHalfPixelCenters(builder, self.halfPixelCenters) - resizeBilinearOptions = ResizeBilinearOptionsEnd(builder) - return resizeBilinearOptions -# automatically generated by the FlatBuffers compiler, do not modify - -# namespace: tflite + QuantizeOptionsStart(builder) + quantizeOptions = QuantizeOptionsEnd(builder) + return quantizeOptions -from flatbuffers.compat import import_numpy -np = import_numpy() -class ResizeNearestNeighborOptions(object): +class MatrixSetDiagOptions(object): __slots__ = ['_tab'] @classmethod def GetRootAs(cls, buf, offset=0): n = flatbuffers.encode.Get(flatbuffers.packer.uoffset, buf, offset) - x = ResizeNearestNeighborOptions() + x = MatrixSetDiagOptions() x.Init(buf, n + offset) return x @classmethod - def GetRootAsResizeNearestNeighborOptions(cls, buf, offset=0): + def GetRootAsMatrixSetDiagOptions(cls, buf, offset=0): """This method is deprecated. Please switch to GetRootAs.""" return cls.GetRootAs(buf, offset) @classmethod - def ResizeNearestNeighborOptionsBufferHasIdentifier(cls, buf, offset, size_prefixed=False): + def MatrixSetDiagOptionsBufferHasIdentifier(cls, buf, offset, size_prefixed=False): return flatbuffers.util.BufferHasIdentifier(buf, offset, b"\x54\x46\x4C\x33", size_prefixed=size_prefixed) - # ResizeNearestNeighborOptions + # MatrixSetDiagOptions def Init(self, buf, pos): self._tab = flatbuffers.table.Table(buf, pos) - # ResizeNearestNeighborOptions - def AlignCorners(self): - o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(4)) - if o != 0: - return bool(self._tab.Get(flatbuffers.number_types.BoolFlags, o + self._tab.Pos)) - return False +def MatrixSetDiagOptionsStart(builder): + builder.StartObject(0) - # ResizeNearestNeighborOptions - def HalfPixelCenters(self): - o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(6)) - if o != 0: - return bool(self._tab.Get(flatbuffers.number_types.BoolFlags, o + self._tab.Pos)) - return False +def MatrixSetDiagOptionsEnd(builder): + return builder.EndObject() -def ResizeNearestNeighborOptionsStart(builder): builder.StartObject(2) -def Start(builder): - return ResizeNearestNeighborOptionsStart(builder) -def ResizeNearestNeighborOptionsAddAlignCorners(builder, alignCorners): builder.PrependBoolSlot(0, alignCorners, 0) -def AddAlignCorners(builder, alignCorners): - return ResizeNearestNeighborOptionsAddAlignCorners(builder, alignCorners) -def ResizeNearestNeighborOptionsAddHalfPixelCenters(builder, halfPixelCenters): builder.PrependBoolSlot(1, halfPixelCenters, 0) -def AddHalfPixelCenters(builder, halfPixelCenters): - return ResizeNearestNeighborOptionsAddHalfPixelCenters(builder, halfPixelCenters) -def ResizeNearestNeighborOptionsEnd(builder): return builder.EndObject() -def End(builder): - return ResizeNearestNeighborOptionsEnd(builder) -class ResizeNearestNeighborOptionsT(object): - # ResizeNearestNeighborOptionsT +class MatrixSetDiagOptionsT(object): + + # MatrixSetDiagOptionsT def __init__(self): - self.alignCorners = False # type: bool - self.halfPixelCenters = False # type: bool + pass @classmethod def InitFromBuf(cls, buf, pos): - resizeNearestNeighborOptions = ResizeNearestNeighborOptions() - resizeNearestNeighborOptions.Init(buf, pos) - return cls.InitFromObj(resizeNearestNeighborOptions) + matrixSetDiagOptions = MatrixSetDiagOptions() + matrixSetDiagOptions.Init(buf, pos) + return cls.InitFromObj(matrixSetDiagOptions) @classmethod - def InitFromObj(cls, resizeNearestNeighborOptions): - x = ResizeNearestNeighborOptionsT() - x._UnPack(resizeNearestNeighborOptions) + def InitFromPackedBuf(cls, buf, pos=0): + n = flatbuffers.encode.Get(flatbuffers.packer.uoffset, buf, pos) + return cls.InitFromBuf(buf, pos+n) + + @classmethod + def InitFromObj(cls, matrixSetDiagOptions): + x = MatrixSetDiagOptionsT() + x._UnPack(matrixSetDiagOptions) return x - # ResizeNearestNeighborOptionsT - def _UnPack(self, resizeNearestNeighborOptions): - if resizeNearestNeighborOptions is None: + # MatrixSetDiagOptionsT + def _UnPack(self, matrixSetDiagOptions): + if matrixSetDiagOptions is None: return - self.alignCorners = resizeNearestNeighborOptions.AlignCorners() - self.halfPixelCenters = resizeNearestNeighborOptions.HalfPixelCenters() - # ResizeNearestNeighborOptionsT + # MatrixSetDiagOptionsT def Pack(self, builder): - ResizeNearestNeighborOptionsStart(builder) - ResizeNearestNeighborOptionsAddAlignCorners(builder, self.alignCorners) - ResizeNearestNeighborOptionsAddHalfPixelCenters(builder, self.halfPixelCenters) - resizeNearestNeighborOptions = ResizeNearestNeighborOptionsEnd(builder) - return resizeNearestNeighborOptions -# automatically generated by the FlatBuffers compiler, do not modify - -# namespace: tflite + MatrixSetDiagOptionsStart(builder) + matrixSetDiagOptions = MatrixSetDiagOptionsEnd(builder) + return matrixSetDiagOptions -from flatbuffers.compat import import_numpy -np = import_numpy() -class ReverseSequenceOptions(object): +class IfOptions(object): __slots__ = ['_tab'] @classmethod def GetRootAs(cls, buf, offset=0): n = flatbuffers.encode.Get(flatbuffers.packer.uoffset, buf, offset) - x = ReverseSequenceOptions() + x = IfOptions() x.Init(buf, n + offset) return x @classmethod - def GetRootAsReverseSequenceOptions(cls, buf, offset=0): + def GetRootAsIfOptions(cls, buf, offset=0): """This method is deprecated. Please switch to GetRootAs.""" return cls.GetRootAs(buf, offset) @classmethod - def ReverseSequenceOptionsBufferHasIdentifier(cls, buf, offset, size_prefixed=False): + def IfOptionsBufferHasIdentifier(cls, buf, offset, size_prefixed=False): return flatbuffers.util.BufferHasIdentifier(buf, offset, b"\x54\x46\x4C\x33", size_prefixed=size_prefixed) - # ReverseSequenceOptions + # IfOptions def Init(self, buf, pos): self._tab = flatbuffers.table.Table(buf, pos) - # ReverseSequenceOptions - def SeqDim(self): + # IfOptions + def ThenSubgraphIndex(self): o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(4)) if o != 0: return self._tab.Get(flatbuffers.number_types.Int32Flags, o + self._tab.Pos) return 0 - # ReverseSequenceOptions - def BatchDim(self): + # IfOptions + def ElseSubgraphIndex(self): o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(6)) if o != 0: return self._tab.Get(flatbuffers.number_types.Int32Flags, o + self._tab.Pos) return 0 -def ReverseSequenceOptionsStart(builder): builder.StartObject(2) -def Start(builder): - return ReverseSequenceOptionsStart(builder) -def ReverseSequenceOptionsAddSeqDim(builder, seqDim): builder.PrependInt32Slot(0, seqDim, 0) -def AddSeqDim(builder, seqDim): - return ReverseSequenceOptionsAddSeqDim(builder, seqDim) -def ReverseSequenceOptionsAddBatchDim(builder, batchDim): builder.PrependInt32Slot(1, batchDim, 0) -def AddBatchDim(builder, batchDim): - return ReverseSequenceOptionsAddBatchDim(builder, batchDim) -def ReverseSequenceOptionsEnd(builder): return builder.EndObject() -def End(builder): - return ReverseSequenceOptionsEnd(builder) +def IfOptionsStart(builder): + builder.StartObject(2) -class ReverseSequenceOptionsT(object): +def IfOptionsAddThenSubgraphIndex(builder, thenSubgraphIndex): + builder.PrependInt32Slot(0, thenSubgraphIndex, 0) - # ReverseSequenceOptionsT +def IfOptionsAddElseSubgraphIndex(builder, elseSubgraphIndex): + builder.PrependInt32Slot(1, elseSubgraphIndex, 0) + +def IfOptionsEnd(builder): + return builder.EndObject() + + + +class IfOptionsT(object): + + # IfOptionsT def __init__(self): - self.seqDim = 0 # type: int - self.batchDim = 0 # type: int + self.thenSubgraphIndex = 0 # type: int + self.elseSubgraphIndex = 0 # type: int @classmethod def InitFromBuf(cls, buf, pos): - reverseSequenceOptions = ReverseSequenceOptions() - reverseSequenceOptions.Init(buf, pos) - return cls.InitFromObj(reverseSequenceOptions) + ifOptions = IfOptions() + ifOptions.Init(buf, pos) + return cls.InitFromObj(ifOptions) @classmethod - def InitFromObj(cls, reverseSequenceOptions): - x = ReverseSequenceOptionsT() - x._UnPack(reverseSequenceOptions) + def InitFromPackedBuf(cls, buf, pos=0): + n = flatbuffers.encode.Get(flatbuffers.packer.uoffset, buf, pos) + return cls.InitFromBuf(buf, pos+n) + + @classmethod + def InitFromObj(cls, ifOptions): + x = IfOptionsT() + x._UnPack(ifOptions) return x - # ReverseSequenceOptionsT - def _UnPack(self, reverseSequenceOptions): - if reverseSequenceOptions is None: + # IfOptionsT + def _UnPack(self, ifOptions): + if ifOptions is None: return - self.seqDim = reverseSequenceOptions.SeqDim() - self.batchDim = reverseSequenceOptions.BatchDim() + self.thenSubgraphIndex = ifOptions.ThenSubgraphIndex() + self.elseSubgraphIndex = ifOptions.ElseSubgraphIndex() - # ReverseSequenceOptionsT + # IfOptionsT def Pack(self, builder): - ReverseSequenceOptionsStart(builder) - ReverseSequenceOptionsAddSeqDim(builder, self.seqDim) - ReverseSequenceOptionsAddBatchDim(builder, self.batchDim) - reverseSequenceOptions = ReverseSequenceOptionsEnd(builder) - return reverseSequenceOptions -# automatically generated by the FlatBuffers compiler, do not modify - -# namespace: tflite + IfOptionsStart(builder) + IfOptionsAddThenSubgraphIndex(builder, self.thenSubgraphIndex) + IfOptionsAddElseSubgraphIndex(builder, self.elseSubgraphIndex) + ifOptions = IfOptionsEnd(builder) + return ifOptions -from flatbuffers.compat import import_numpy -np = import_numpy() -class ReverseV2Options(object): +class CallOnceOptions(object): __slots__ = ['_tab'] @classmethod def GetRootAs(cls, buf, offset=0): n = flatbuffers.encode.Get(flatbuffers.packer.uoffset, buf, offset) - x = ReverseV2Options() + x = CallOnceOptions() x.Init(buf, n + offset) return x @classmethod - def GetRootAsReverseV2Options(cls, buf, offset=0): + def GetRootAsCallOnceOptions(cls, buf, offset=0): """This method is deprecated. Please switch to GetRootAs.""" return cls.GetRootAs(buf, offset) @classmethod - def ReverseV2OptionsBufferHasIdentifier(cls, buf, offset, size_prefixed=False): + def CallOnceOptionsBufferHasIdentifier(cls, buf, offset, size_prefixed=False): return flatbuffers.util.BufferHasIdentifier(buf, offset, b"\x54\x46\x4C\x33", size_prefixed=size_prefixed) - # ReverseV2Options - def Init(self, buf, pos): - self._tab = flatbuffers.table.Table(buf, pos) + # CallOnceOptions + def Init(self, buf, pos): + self._tab = flatbuffers.table.Table(buf, pos) + + # CallOnceOptions + def InitSubgraphIndex(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(4)) + if o != 0: + return self._tab.Get(flatbuffers.number_types.Int32Flags, o + self._tab.Pos) + return 0 + +def CallOnceOptionsStart(builder): + builder.StartObject(1) -def ReverseV2OptionsStart(builder): builder.StartObject(0) -def Start(builder): - return ReverseV2OptionsStart(builder) -def ReverseV2OptionsEnd(builder): return builder.EndObject() -def End(builder): - return ReverseV2OptionsEnd(builder) +def CallOnceOptionsAddInitSubgraphIndex(builder, initSubgraphIndex): + builder.PrependInt32Slot(0, initSubgraphIndex, 0) -class ReverseV2OptionsT(object): +def CallOnceOptionsEnd(builder): + return builder.EndObject() - # ReverseV2OptionsT + + +class CallOnceOptionsT(object): + + # CallOnceOptionsT def __init__(self): - pass + self.initSubgraphIndex = 0 # type: int @classmethod def InitFromBuf(cls, buf, pos): - reverseV2options = ReverseV2Options() - reverseV2options.Init(buf, pos) - return cls.InitFromObj(reverseV2options) + callOnceOptions = CallOnceOptions() + callOnceOptions.Init(buf, pos) + return cls.InitFromObj(callOnceOptions) @classmethod - def InitFromObj(cls, reverseV2options): - x = ReverseV2OptionsT() - x._UnPack(reverseV2options) + def InitFromPackedBuf(cls, buf, pos=0): + n = flatbuffers.encode.Get(flatbuffers.packer.uoffset, buf, pos) + return cls.InitFromBuf(buf, pos+n) + + @classmethod + def InitFromObj(cls, callOnceOptions): + x = CallOnceOptionsT() + x._UnPack(callOnceOptions) return x - # ReverseV2OptionsT - def _UnPack(self, reverseV2options): - if reverseV2options is None: + # CallOnceOptionsT + def _UnPack(self, callOnceOptions): + if callOnceOptions is None: return + self.initSubgraphIndex = callOnceOptions.InitSubgraphIndex() - # ReverseV2OptionsT + # CallOnceOptionsT def Pack(self, builder): - ReverseV2OptionsStart(builder) - reverseV2options = ReverseV2OptionsEnd(builder) - return reverseV2options -# automatically generated by the FlatBuffers compiler, do not modify - -# namespace: tflite + CallOnceOptionsStart(builder) + CallOnceOptionsAddInitSubgraphIndex(builder, self.initSubgraphIndex) + callOnceOptions = CallOnceOptionsEnd(builder) + return callOnceOptions -from flatbuffers.compat import import_numpy -np = import_numpy() -class Rfft2dOptions(object): +class WhileOptions(object): __slots__ = ['_tab'] @classmethod def GetRootAs(cls, buf, offset=0): n = flatbuffers.encode.Get(flatbuffers.packer.uoffset, buf, offset) - x = Rfft2dOptions() + x = WhileOptions() x.Init(buf, n + offset) return x @classmethod - def GetRootAsRfft2dOptions(cls, buf, offset=0): + def GetRootAsWhileOptions(cls, buf, offset=0): """This method is deprecated. Please switch to GetRootAs.""" return cls.GetRootAs(buf, offset) @classmethod - def Rfft2dOptionsBufferHasIdentifier(cls, buf, offset, size_prefixed=False): + def WhileOptionsBufferHasIdentifier(cls, buf, offset, size_prefixed=False): return flatbuffers.util.BufferHasIdentifier(buf, offset, b"\x54\x46\x4C\x33", size_prefixed=size_prefixed) - # Rfft2dOptions + # WhileOptions def Init(self, buf, pos): self._tab = flatbuffers.table.Table(buf, pos) -def Rfft2dOptionsStart(builder): builder.StartObject(0) -def Start(builder): - return Rfft2dOptionsStart(builder) -def Rfft2dOptionsEnd(builder): return builder.EndObject() -def End(builder): - return Rfft2dOptionsEnd(builder) + # WhileOptions + def CondSubgraphIndex(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(4)) + if o != 0: + return self._tab.Get(flatbuffers.number_types.Int32Flags, o + self._tab.Pos) + return 0 -class Rfft2dOptionsT(object): + # WhileOptions + def BodySubgraphIndex(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(6)) + if o != 0: + return self._tab.Get(flatbuffers.number_types.Int32Flags, o + self._tab.Pos) + return 0 - # Rfft2dOptionsT +def WhileOptionsStart(builder): + builder.StartObject(2) + +def WhileOptionsAddCondSubgraphIndex(builder, condSubgraphIndex): + builder.PrependInt32Slot(0, condSubgraphIndex, 0) + +def WhileOptionsAddBodySubgraphIndex(builder, bodySubgraphIndex): + builder.PrependInt32Slot(1, bodySubgraphIndex, 0) + +def WhileOptionsEnd(builder): + return builder.EndObject() + + + +class WhileOptionsT(object): + + # WhileOptionsT def __init__(self): - pass + self.condSubgraphIndex = 0 # type: int + self.bodySubgraphIndex = 0 # type: int @classmethod def InitFromBuf(cls, buf, pos): - rfft2dOptions = Rfft2dOptions() - rfft2dOptions.Init(buf, pos) - return cls.InitFromObj(rfft2dOptions) + whileOptions = WhileOptions() + whileOptions.Init(buf, pos) + return cls.InitFromObj(whileOptions) @classmethod - def InitFromObj(cls, rfft2dOptions): - x = Rfft2dOptionsT() - x._UnPack(rfft2dOptions) + def InitFromPackedBuf(cls, buf, pos=0): + n = flatbuffers.encode.Get(flatbuffers.packer.uoffset, buf, pos) + return cls.InitFromBuf(buf, pos+n) + + @classmethod + def InitFromObj(cls, whileOptions): + x = WhileOptionsT() + x._UnPack(whileOptions) return x - # Rfft2dOptionsT - def _UnPack(self, rfft2dOptions): - if rfft2dOptions is None: + # WhileOptionsT + def _UnPack(self, whileOptions): + if whileOptions is None: return + self.condSubgraphIndex = whileOptions.CondSubgraphIndex() + self.bodySubgraphIndex = whileOptions.BodySubgraphIndex() - # Rfft2dOptionsT + # WhileOptionsT def Pack(self, builder): - Rfft2dOptionsStart(builder) - rfft2dOptions = Rfft2dOptionsEnd(builder) - return rfft2dOptions -# automatically generated by the FlatBuffers compiler, do not modify - -# namespace: tflite + WhileOptionsStart(builder) + WhileOptionsAddCondSubgraphIndex(builder, self.condSubgraphIndex) + WhileOptionsAddBodySubgraphIndex(builder, self.bodySubgraphIndex) + whileOptions = WhileOptionsEnd(builder) + return whileOptions -from flatbuffers.compat import import_numpy -np = import_numpy() -class RightShiftOptions(object): +class NonMaxSuppressionV4Options(object): __slots__ = ['_tab'] @classmethod def GetRootAs(cls, buf, offset=0): n = flatbuffers.encode.Get(flatbuffers.packer.uoffset, buf, offset) - x = RightShiftOptions() + x = NonMaxSuppressionV4Options() x.Init(buf, n + offset) return x @classmethod - def GetRootAsRightShiftOptions(cls, buf, offset=0): + def GetRootAsNonMaxSuppressionV4Options(cls, buf, offset=0): """This method is deprecated. Please switch to GetRootAs.""" return cls.GetRootAs(buf, offset) @classmethod - def RightShiftOptionsBufferHasIdentifier(cls, buf, offset, size_prefixed=False): + def NonMaxSuppressionV4OptionsBufferHasIdentifier(cls, buf, offset, size_prefixed=False): return flatbuffers.util.BufferHasIdentifier(buf, offset, b"\x54\x46\x4C\x33", size_prefixed=size_prefixed) - # RightShiftOptions + # NonMaxSuppressionV4Options def Init(self, buf, pos): self._tab = flatbuffers.table.Table(buf, pos) -def RightShiftOptionsStart(builder): builder.StartObject(0) -def Start(builder): - return RightShiftOptionsStart(builder) -def RightShiftOptionsEnd(builder): return builder.EndObject() -def End(builder): - return RightShiftOptionsEnd(builder) +def NonMaxSuppressionV4OptionsStart(builder): + builder.StartObject(0) -class RightShiftOptionsT(object): +def NonMaxSuppressionV4OptionsEnd(builder): + return builder.EndObject() - # RightShiftOptionsT + + +class NonMaxSuppressionV4OptionsT(object): + + # NonMaxSuppressionV4OptionsT def __init__(self): pass @classmethod def InitFromBuf(cls, buf, pos): - rightShiftOptions = RightShiftOptions() - rightShiftOptions.Init(buf, pos) - return cls.InitFromObj(rightShiftOptions) + nonMaxSuppressionV4Options = NonMaxSuppressionV4Options() + nonMaxSuppressionV4Options.Init(buf, pos) + return cls.InitFromObj(nonMaxSuppressionV4Options) @classmethod - def InitFromObj(cls, rightShiftOptions): - x = RightShiftOptionsT() - x._UnPack(rightShiftOptions) + def InitFromPackedBuf(cls, buf, pos=0): + n = flatbuffers.encode.Get(flatbuffers.packer.uoffset, buf, pos) + return cls.InitFromBuf(buf, pos+n) + + @classmethod + def InitFromObj(cls, nonMaxSuppressionV4Options): + x = NonMaxSuppressionV4OptionsT() + x._UnPack(nonMaxSuppressionV4Options) return x - # RightShiftOptionsT - def _UnPack(self, rightShiftOptions): - if rightShiftOptions is None: + # NonMaxSuppressionV4OptionsT + def _UnPack(self, nonMaxSuppressionV4Options): + if nonMaxSuppressionV4Options is None: return - # RightShiftOptionsT + # NonMaxSuppressionV4OptionsT def Pack(self, builder): - RightShiftOptionsStart(builder) - rightShiftOptions = RightShiftOptionsEnd(builder) - return rightShiftOptions -# automatically generated by the FlatBuffers compiler, do not modify - -# namespace: tflite + NonMaxSuppressionV4OptionsStart(builder) + nonMaxSuppressionV4Options = NonMaxSuppressionV4OptionsEnd(builder) + return nonMaxSuppressionV4Options -from flatbuffers.compat import import_numpy -np = import_numpy() -class SVDFOptions(object): +class NonMaxSuppressionV5Options(object): __slots__ = ['_tab'] @classmethod def GetRootAs(cls, buf, offset=0): n = flatbuffers.encode.Get(flatbuffers.packer.uoffset, buf, offset) - x = SVDFOptions() + x = NonMaxSuppressionV5Options() x.Init(buf, n + offset) return x @classmethod - def GetRootAsSVDFOptions(cls, buf, offset=0): + def GetRootAsNonMaxSuppressionV5Options(cls, buf, offset=0): """This method is deprecated. Please switch to GetRootAs.""" return cls.GetRootAs(buf, offset) @classmethod - def SVDFOptionsBufferHasIdentifier(cls, buf, offset, size_prefixed=False): + def NonMaxSuppressionV5OptionsBufferHasIdentifier(cls, buf, offset, size_prefixed=False): return flatbuffers.util.BufferHasIdentifier(buf, offset, b"\x54\x46\x4C\x33", size_prefixed=size_prefixed) - # SVDFOptions + # NonMaxSuppressionV5Options def Init(self, buf, pos): self._tab = flatbuffers.table.Table(buf, pos) - # SVDFOptions - def Rank(self): - o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(4)) - if o != 0: - return self._tab.Get(flatbuffers.number_types.Int32Flags, o + self._tab.Pos) - return 0 +def NonMaxSuppressionV5OptionsStart(builder): + builder.StartObject(0) - # SVDFOptions - def FusedActivationFunction(self): - o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(6)) - if o != 0: - return self._tab.Get(flatbuffers.number_types.Int8Flags, o + self._tab.Pos) - return 0 +def NonMaxSuppressionV5OptionsEnd(builder): + return builder.EndObject() - # SVDFOptions - def AsymmetricQuantizeInputs(self): - o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(8)) - if o != 0: - return bool(self._tab.Get(flatbuffers.number_types.BoolFlags, o + self._tab.Pos)) - return False -def SVDFOptionsStart(builder): builder.StartObject(3) -def Start(builder): - return SVDFOptionsStart(builder) -def SVDFOptionsAddRank(builder, rank): builder.PrependInt32Slot(0, rank, 0) -def AddRank(builder, rank): - return SVDFOptionsAddRank(builder, rank) -def SVDFOptionsAddFusedActivationFunction(builder, fusedActivationFunction): builder.PrependInt8Slot(1, fusedActivationFunction, 0) -def AddFusedActivationFunction(builder, fusedActivationFunction): - return SVDFOptionsAddFusedActivationFunction(builder, fusedActivationFunction) -def SVDFOptionsAddAsymmetricQuantizeInputs(builder, asymmetricQuantizeInputs): builder.PrependBoolSlot(2, asymmetricQuantizeInputs, 0) -def AddAsymmetricQuantizeInputs(builder, asymmetricQuantizeInputs): - return SVDFOptionsAddAsymmetricQuantizeInputs(builder, asymmetricQuantizeInputs) -def SVDFOptionsEnd(builder): return builder.EndObject() -def End(builder): - return SVDFOptionsEnd(builder) -class SVDFOptionsT(object): +class NonMaxSuppressionV5OptionsT(object): - # SVDFOptionsT + # NonMaxSuppressionV5OptionsT def __init__(self): - self.rank = 0 # type: int - self.fusedActivationFunction = 0 # type: int - self.asymmetricQuantizeInputs = False # type: bool + pass @classmethod def InitFromBuf(cls, buf, pos): - svdfoptions = SVDFOptions() - svdfoptions.Init(buf, pos) - return cls.InitFromObj(svdfoptions) + nonMaxSuppressionV5Options = NonMaxSuppressionV5Options() + nonMaxSuppressionV5Options.Init(buf, pos) + return cls.InitFromObj(nonMaxSuppressionV5Options) @classmethod - def InitFromObj(cls, svdfoptions): - x = SVDFOptionsT() - x._UnPack(svdfoptions) + def InitFromPackedBuf(cls, buf, pos=0): + n = flatbuffers.encode.Get(flatbuffers.packer.uoffset, buf, pos) + return cls.InitFromBuf(buf, pos+n) + + @classmethod + def InitFromObj(cls, nonMaxSuppressionV5Options): + x = NonMaxSuppressionV5OptionsT() + x._UnPack(nonMaxSuppressionV5Options) return x - # SVDFOptionsT - def _UnPack(self, svdfoptions): - if svdfoptions is None: + # NonMaxSuppressionV5OptionsT + def _UnPack(self, nonMaxSuppressionV5Options): + if nonMaxSuppressionV5Options is None: return - self.rank = svdfoptions.Rank() - self.fusedActivationFunction = svdfoptions.FusedActivationFunction() - self.asymmetricQuantizeInputs = svdfoptions.AsymmetricQuantizeInputs() - # SVDFOptionsT + # NonMaxSuppressionV5OptionsT def Pack(self, builder): - SVDFOptionsStart(builder) - SVDFOptionsAddRank(builder, self.rank) - SVDFOptionsAddFusedActivationFunction(builder, self.fusedActivationFunction) - SVDFOptionsAddAsymmetricQuantizeInputs(builder, self.asymmetricQuantizeInputs) - svdfoptions = SVDFOptionsEnd(builder) - return svdfoptions -# automatically generated by the FlatBuffers compiler, do not modify - -# namespace: tflite + NonMaxSuppressionV5OptionsStart(builder) + nonMaxSuppressionV5Options = NonMaxSuppressionV5OptionsEnd(builder) + return nonMaxSuppressionV5Options -from flatbuffers.compat import import_numpy -np = import_numpy() class ScatterNdOptions(object): __slots__ = ['_tab'] @@ -9866,12 +14242,13 @@ def ScatterNdOptionsBufferHasIdentifier(cls, buf, offset, size_prefixed=False): def Init(self, buf, pos): self._tab = flatbuffers.table.Table(buf, pos) -def ScatterNdOptionsStart(builder): builder.StartObject(0) -def Start(builder): - return ScatterNdOptionsStart(builder) -def ScatterNdOptionsEnd(builder): return builder.EndObject() -def End(builder): - return ScatterNdOptionsEnd(builder) +def ScatterNdOptionsStart(builder): + builder.StartObject(0) + +def ScatterNdOptionsEnd(builder): + return builder.EndObject() + + class ScatterNdOptionsT(object): @@ -9885,6 +14262,11 @@ def InitFromBuf(cls, buf, pos): scatterNdOptions.Init(buf, pos) return cls.InitFromObj(scatterNdOptions) + @classmethod + def InitFromPackedBuf(cls, buf, pos=0): + n = flatbuffers.encode.Get(flatbuffers.packer.uoffset, buf, pos) + return cls.InitFromBuf(buf, pos+n) + @classmethod def InitFromObj(cls, scatterNdOptions): x = ScatterNdOptionsT() @@ -9901,4241 +14283,4060 @@ def Pack(self, builder): ScatterNdOptionsStart(builder) scatterNdOptions = ScatterNdOptionsEnd(builder) return scatterNdOptions -# automatically generated by the FlatBuffers compiler, do not modify - -# namespace: tflite -from flatbuffers.compat import import_numpy -np = import_numpy() -class SegmentSumOptions(object): +class SelectV2Options(object): __slots__ = ['_tab'] @classmethod def GetRootAs(cls, buf, offset=0): n = flatbuffers.encode.Get(flatbuffers.packer.uoffset, buf, offset) - x = SegmentSumOptions() + x = SelectV2Options() x.Init(buf, n + offset) return x @classmethod - def GetRootAsSegmentSumOptions(cls, buf, offset=0): + def GetRootAsSelectV2Options(cls, buf, offset=0): """This method is deprecated. Please switch to GetRootAs.""" return cls.GetRootAs(buf, offset) @classmethod - def SegmentSumOptionsBufferHasIdentifier(cls, buf, offset, size_prefixed=False): + def SelectV2OptionsBufferHasIdentifier(cls, buf, offset, size_prefixed=False): return flatbuffers.util.BufferHasIdentifier(buf, offset, b"\x54\x46\x4C\x33", size_prefixed=size_prefixed) - # SegmentSumOptions + # SelectV2Options def Init(self, buf, pos): self._tab = flatbuffers.table.Table(buf, pos) -def SegmentSumOptionsStart(builder): builder.StartObject(0) -def Start(builder): - return SegmentSumOptionsStart(builder) -def SegmentSumOptionsEnd(builder): return builder.EndObject() -def End(builder): - return SegmentSumOptionsEnd(builder) +def SelectV2OptionsStart(builder): + builder.StartObject(0) -class SegmentSumOptionsT(object): +def SelectV2OptionsEnd(builder): + return builder.EndObject() - # SegmentSumOptionsT + + +class SelectV2OptionsT(object): + + # SelectV2OptionsT def __init__(self): pass @classmethod def InitFromBuf(cls, buf, pos): - segmentSumOptions = SegmentSumOptions() - segmentSumOptions.Init(buf, pos) - return cls.InitFromObj(segmentSumOptions) + selectV2Options = SelectV2Options() + selectV2Options.Init(buf, pos) + return cls.InitFromObj(selectV2Options) @classmethod - def InitFromObj(cls, segmentSumOptions): - x = SegmentSumOptionsT() - x._UnPack(segmentSumOptions) + def InitFromPackedBuf(cls, buf, pos=0): + n = flatbuffers.encode.Get(flatbuffers.packer.uoffset, buf, pos) + return cls.InitFromBuf(buf, pos+n) + + @classmethod + def InitFromObj(cls, selectV2Options): + x = SelectV2OptionsT() + x._UnPack(selectV2Options) return x - # SegmentSumOptionsT - def _UnPack(self, segmentSumOptions): - if segmentSumOptions is None: + # SelectV2OptionsT + def _UnPack(self, selectV2Options): + if selectV2Options is None: return - # SegmentSumOptionsT + # SelectV2OptionsT def Pack(self, builder): - SegmentSumOptionsStart(builder) - segmentSumOptions = SegmentSumOptionsEnd(builder) - return segmentSumOptions -# automatically generated by the FlatBuffers compiler, do not modify - -# namespace: tflite + SelectV2OptionsStart(builder) + selectV2Options = SelectV2OptionsEnd(builder) + return selectV2Options -from flatbuffers.compat import import_numpy -np = import_numpy() -class SelectOptions(object): +class DensifyOptions(object): __slots__ = ['_tab'] @classmethod def GetRootAs(cls, buf, offset=0): n = flatbuffers.encode.Get(flatbuffers.packer.uoffset, buf, offset) - x = SelectOptions() + x = DensifyOptions() x.Init(buf, n + offset) return x @classmethod - def GetRootAsSelectOptions(cls, buf, offset=0): + def GetRootAsDensifyOptions(cls, buf, offset=0): """This method is deprecated. Please switch to GetRootAs.""" return cls.GetRootAs(buf, offset) @classmethod - def SelectOptionsBufferHasIdentifier(cls, buf, offset, size_prefixed=False): + def DensifyOptionsBufferHasIdentifier(cls, buf, offset, size_prefixed=False): return flatbuffers.util.BufferHasIdentifier(buf, offset, b"\x54\x46\x4C\x33", size_prefixed=size_prefixed) - # SelectOptions + # DensifyOptions def Init(self, buf, pos): self._tab = flatbuffers.table.Table(buf, pos) -def SelectOptionsStart(builder): builder.StartObject(0) -def Start(builder): - return SelectOptionsStart(builder) -def SelectOptionsEnd(builder): return builder.EndObject() -def End(builder): - return SelectOptionsEnd(builder) +def DensifyOptionsStart(builder): + builder.StartObject(0) -class SelectOptionsT(object): +def DensifyOptionsEnd(builder): + return builder.EndObject() - # SelectOptionsT + + +class DensifyOptionsT(object): + + # DensifyOptionsT def __init__(self): pass @classmethod def InitFromBuf(cls, buf, pos): - selectOptions = SelectOptions() - selectOptions.Init(buf, pos) - return cls.InitFromObj(selectOptions) + densifyOptions = DensifyOptions() + densifyOptions.Init(buf, pos) + return cls.InitFromObj(densifyOptions) @classmethod - def InitFromObj(cls, selectOptions): - x = SelectOptionsT() - x._UnPack(selectOptions) + def InitFromPackedBuf(cls, buf, pos=0): + n = flatbuffers.encode.Get(flatbuffers.packer.uoffset, buf, pos) + return cls.InitFromBuf(buf, pos+n) + + @classmethod + def InitFromObj(cls, densifyOptions): + x = DensifyOptionsT() + x._UnPack(densifyOptions) return x - # SelectOptionsT - def _UnPack(self, selectOptions): - if selectOptions is None: + # DensifyOptionsT + def _UnPack(self, densifyOptions): + if densifyOptions is None: return - # SelectOptionsT + # DensifyOptionsT def Pack(self, builder): - SelectOptionsStart(builder) - selectOptions = SelectOptionsEnd(builder) - return selectOptions -# automatically generated by the FlatBuffers compiler, do not modify - -# namespace: tflite + DensifyOptionsStart(builder) + densifyOptions = DensifyOptionsEnd(builder) + return densifyOptions -from flatbuffers.compat import import_numpy -np = import_numpy() -class SelectV2Options(object): +class SegmentSumOptions(object): __slots__ = ['_tab'] @classmethod def GetRootAs(cls, buf, offset=0): n = flatbuffers.encode.Get(flatbuffers.packer.uoffset, buf, offset) - x = SelectV2Options() + x = SegmentSumOptions() x.Init(buf, n + offset) return x @classmethod - def GetRootAsSelectV2Options(cls, buf, offset=0): + def GetRootAsSegmentSumOptions(cls, buf, offset=0): """This method is deprecated. Please switch to GetRootAs.""" return cls.GetRootAs(buf, offset) @classmethod - def SelectV2OptionsBufferHasIdentifier(cls, buf, offset, size_prefixed=False): + def SegmentSumOptionsBufferHasIdentifier(cls, buf, offset, size_prefixed=False): return flatbuffers.util.BufferHasIdentifier(buf, offset, b"\x54\x46\x4C\x33", size_prefixed=size_prefixed) - # SelectV2Options + # SegmentSumOptions def Init(self, buf, pos): self._tab = flatbuffers.table.Table(buf, pos) -def SelectV2OptionsStart(builder): builder.StartObject(0) -def Start(builder): - return SelectV2OptionsStart(builder) -def SelectV2OptionsEnd(builder): return builder.EndObject() -def End(builder): - return SelectV2OptionsEnd(builder) +def SegmentSumOptionsStart(builder): + builder.StartObject(0) -class SelectV2OptionsT(object): +def SegmentSumOptionsEnd(builder): + return builder.EndObject() - # SelectV2OptionsT + + +class SegmentSumOptionsT(object): + + # SegmentSumOptionsT def __init__(self): pass @classmethod def InitFromBuf(cls, buf, pos): - selectV2options = SelectV2Options() - selectV2options.Init(buf, pos) - return cls.InitFromObj(selectV2options) + segmentSumOptions = SegmentSumOptions() + segmentSumOptions.Init(buf, pos) + return cls.InitFromObj(segmentSumOptions) @classmethod - def InitFromObj(cls, selectV2options): - x = SelectV2OptionsT() - x._UnPack(selectV2options) + def InitFromPackedBuf(cls, buf, pos=0): + n = flatbuffers.encode.Get(flatbuffers.packer.uoffset, buf, pos) + return cls.InitFromBuf(buf, pos+n) + + @classmethod + def InitFromObj(cls, segmentSumOptions): + x = SegmentSumOptionsT() + x._UnPack(segmentSumOptions) return x - # SelectV2OptionsT - def _UnPack(self, selectV2options): - if selectV2options is None: + # SegmentSumOptionsT + def _UnPack(self, segmentSumOptions): + if segmentSumOptions is None: return - # SelectV2OptionsT + # SegmentSumOptionsT def Pack(self, builder): - SelectV2OptionsStart(builder) - selectV2options = SelectV2OptionsEnd(builder) - return selectV2options -# automatically generated by the FlatBuffers compiler, do not modify - -# namespace: tflite + SegmentSumOptionsStart(builder) + segmentSumOptions = SegmentSumOptionsEnd(builder) + return segmentSumOptions -from flatbuffers.compat import import_numpy -np = import_numpy() -class SequenceRNNOptions(object): +class BatchMatMulOptions(object): __slots__ = ['_tab'] @classmethod def GetRootAs(cls, buf, offset=0): n = flatbuffers.encode.Get(flatbuffers.packer.uoffset, buf, offset) - x = SequenceRNNOptions() + x = BatchMatMulOptions() x.Init(buf, n + offset) return x @classmethod - def GetRootAsSequenceRNNOptions(cls, buf, offset=0): + def GetRootAsBatchMatMulOptions(cls, buf, offset=0): """This method is deprecated. Please switch to GetRootAs.""" return cls.GetRootAs(buf, offset) @classmethod - def SequenceRNNOptionsBufferHasIdentifier(cls, buf, offset, size_prefixed=False): + def BatchMatMulOptionsBufferHasIdentifier(cls, buf, offset, size_prefixed=False): return flatbuffers.util.BufferHasIdentifier(buf, offset, b"\x54\x46\x4C\x33", size_prefixed=size_prefixed) - # SequenceRNNOptions + # BatchMatMulOptions def Init(self, buf, pos): self._tab = flatbuffers.table.Table(buf, pos) - # SequenceRNNOptions - def TimeMajor(self): + # BatchMatMulOptions + def AdjX(self): o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(4)) if o != 0: return bool(self._tab.Get(flatbuffers.number_types.BoolFlags, o + self._tab.Pos)) return False - # SequenceRNNOptions - def FusedActivationFunction(self): + # BatchMatMulOptions + def AdjY(self): o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(6)) if o != 0: - return self._tab.Get(flatbuffers.number_types.Int8Flags, o + self._tab.Pos) - return 0 + return bool(self._tab.Get(flatbuffers.number_types.BoolFlags, o + self._tab.Pos)) + return False - # SequenceRNNOptions + # BatchMatMulOptions def AsymmetricQuantizeInputs(self): o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(8)) if o != 0: return bool(self._tab.Get(flatbuffers.number_types.BoolFlags, o + self._tab.Pos)) return False -def SequenceRNNOptionsStart(builder): builder.StartObject(3) -def Start(builder): - return SequenceRNNOptionsStart(builder) -def SequenceRNNOptionsAddTimeMajor(builder, timeMajor): builder.PrependBoolSlot(0, timeMajor, 0) -def AddTimeMajor(builder, timeMajor): - return SequenceRNNOptionsAddTimeMajor(builder, timeMajor) -def SequenceRNNOptionsAddFusedActivationFunction(builder, fusedActivationFunction): builder.PrependInt8Slot(1, fusedActivationFunction, 0) -def AddFusedActivationFunction(builder, fusedActivationFunction): - return SequenceRNNOptionsAddFusedActivationFunction(builder, fusedActivationFunction) -def SequenceRNNOptionsAddAsymmetricQuantizeInputs(builder, asymmetricQuantizeInputs): builder.PrependBoolSlot(2, asymmetricQuantizeInputs, 0) -def AddAsymmetricQuantizeInputs(builder, asymmetricQuantizeInputs): - return SequenceRNNOptionsAddAsymmetricQuantizeInputs(builder, asymmetricQuantizeInputs) -def SequenceRNNOptionsEnd(builder): return builder.EndObject() -def End(builder): - return SequenceRNNOptionsEnd(builder) +def BatchMatMulOptionsStart(builder): + builder.StartObject(3) -class SequenceRNNOptionsT(object): +def BatchMatMulOptionsAddAdjX(builder, adjX): + builder.PrependBoolSlot(0, adjX, 0) - # SequenceRNNOptionsT +def BatchMatMulOptionsAddAdjY(builder, adjY): + builder.PrependBoolSlot(1, adjY, 0) + +def BatchMatMulOptionsAddAsymmetricQuantizeInputs(builder, asymmetricQuantizeInputs): + builder.PrependBoolSlot(2, asymmetricQuantizeInputs, 0) + +def BatchMatMulOptionsEnd(builder): + return builder.EndObject() + + + +class BatchMatMulOptionsT(object): + + # BatchMatMulOptionsT def __init__(self): - self.timeMajor = False # type: bool - self.fusedActivationFunction = 0 # type: int + self.adjX = False # type: bool + self.adjY = False # type: bool self.asymmetricQuantizeInputs = False # type: bool @classmethod def InitFromBuf(cls, buf, pos): - sequenceRnnoptions = SequenceRNNOptions() - sequenceRnnoptions.Init(buf, pos) - return cls.InitFromObj(sequenceRnnoptions) + batchMatMulOptions = BatchMatMulOptions() + batchMatMulOptions.Init(buf, pos) + return cls.InitFromObj(batchMatMulOptions) @classmethod - def InitFromObj(cls, sequenceRnnoptions): - x = SequenceRNNOptionsT() - x._UnPack(sequenceRnnoptions) + def InitFromPackedBuf(cls, buf, pos=0): + n = flatbuffers.encode.Get(flatbuffers.packer.uoffset, buf, pos) + return cls.InitFromBuf(buf, pos+n) + + @classmethod + def InitFromObj(cls, batchMatMulOptions): + x = BatchMatMulOptionsT() + x._UnPack(batchMatMulOptions) return x - # SequenceRNNOptionsT - def _UnPack(self, sequenceRnnoptions): - if sequenceRnnoptions is None: + # BatchMatMulOptionsT + def _UnPack(self, batchMatMulOptions): + if batchMatMulOptions is None: return - self.timeMajor = sequenceRnnoptions.TimeMajor() - self.fusedActivationFunction = sequenceRnnoptions.FusedActivationFunction() - self.asymmetricQuantizeInputs = sequenceRnnoptions.AsymmetricQuantizeInputs() + self.adjX = batchMatMulOptions.AdjX() + self.adjY = batchMatMulOptions.AdjY() + self.asymmetricQuantizeInputs = batchMatMulOptions.AsymmetricQuantizeInputs() - # SequenceRNNOptionsT + # BatchMatMulOptionsT def Pack(self, builder): - SequenceRNNOptionsStart(builder) - SequenceRNNOptionsAddTimeMajor(builder, self.timeMajor) - SequenceRNNOptionsAddFusedActivationFunction(builder, self.fusedActivationFunction) - SequenceRNNOptionsAddAsymmetricQuantizeInputs(builder, self.asymmetricQuantizeInputs) - sequenceRnnoptions = SequenceRNNOptionsEnd(builder) - return sequenceRnnoptions -# automatically generated by the FlatBuffers compiler, do not modify - -# namespace: tflite + BatchMatMulOptionsStart(builder) + BatchMatMulOptionsAddAdjX(builder, self.adjX) + BatchMatMulOptionsAddAdjY(builder, self.adjY) + BatchMatMulOptionsAddAsymmetricQuantizeInputs(builder, self.asymmetricQuantizeInputs) + batchMatMulOptions = BatchMatMulOptionsEnd(builder) + return batchMatMulOptions -from flatbuffers.compat import import_numpy -np = import_numpy() -class ShapeOptions(object): +class CumsumOptions(object): __slots__ = ['_tab'] @classmethod def GetRootAs(cls, buf, offset=0): n = flatbuffers.encode.Get(flatbuffers.packer.uoffset, buf, offset) - x = ShapeOptions() + x = CumsumOptions() x.Init(buf, n + offset) return x @classmethod - def GetRootAsShapeOptions(cls, buf, offset=0): + def GetRootAsCumsumOptions(cls, buf, offset=0): """This method is deprecated. Please switch to GetRootAs.""" return cls.GetRootAs(buf, offset) @classmethod - def ShapeOptionsBufferHasIdentifier(cls, buf, offset, size_prefixed=False): + def CumsumOptionsBufferHasIdentifier(cls, buf, offset, size_prefixed=False): return flatbuffers.util.BufferHasIdentifier(buf, offset, b"\x54\x46\x4C\x33", size_prefixed=size_prefixed) - # ShapeOptions + # CumsumOptions def Init(self, buf, pos): self._tab = flatbuffers.table.Table(buf, pos) - # ShapeOptions - def OutType(self): + # CumsumOptions + def Exclusive(self): o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(4)) if o != 0: - return self._tab.Get(flatbuffers.number_types.Int8Flags, o + self._tab.Pos) - return 0 + return bool(self._tab.Get(flatbuffers.number_types.BoolFlags, o + self._tab.Pos)) + return False + + # CumsumOptions + def Reverse(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(6)) + if o != 0: + return bool(self._tab.Get(flatbuffers.number_types.BoolFlags, o + self._tab.Pos)) + return False -def ShapeOptionsStart(builder): builder.StartObject(1) -def Start(builder): - return ShapeOptionsStart(builder) -def ShapeOptionsAddOutType(builder, outType): builder.PrependInt8Slot(0, outType, 0) -def AddOutType(builder, outType): - return ShapeOptionsAddOutType(builder, outType) -def ShapeOptionsEnd(builder): return builder.EndObject() -def End(builder): - return ShapeOptionsEnd(builder) +def CumsumOptionsStart(builder): + builder.StartObject(2) -class ShapeOptionsT(object): +def CumsumOptionsAddExclusive(builder, exclusive): + builder.PrependBoolSlot(0, exclusive, 0) - # ShapeOptionsT +def CumsumOptionsAddReverse(builder, reverse): + builder.PrependBoolSlot(1, reverse, 0) + +def CumsumOptionsEnd(builder): + return builder.EndObject() + + + +class CumsumOptionsT(object): + + # CumsumOptionsT def __init__(self): - self.outType = 0 # type: int + self.exclusive = False # type: bool + self.reverse = False # type: bool @classmethod def InitFromBuf(cls, buf, pos): - shapeOptions = ShapeOptions() - shapeOptions.Init(buf, pos) - return cls.InitFromObj(shapeOptions) + cumsumOptions = CumsumOptions() + cumsumOptions.Init(buf, pos) + return cls.InitFromObj(cumsumOptions) @classmethod - def InitFromObj(cls, shapeOptions): - x = ShapeOptionsT() - x._UnPack(shapeOptions) + def InitFromPackedBuf(cls, buf, pos=0): + n = flatbuffers.encode.Get(flatbuffers.packer.uoffset, buf, pos) + return cls.InitFromBuf(buf, pos+n) + + @classmethod + def InitFromObj(cls, cumsumOptions): + x = CumsumOptionsT() + x._UnPack(cumsumOptions) return x - # ShapeOptionsT - def _UnPack(self, shapeOptions): - if shapeOptions is None: + # CumsumOptionsT + def _UnPack(self, cumsumOptions): + if cumsumOptions is None: return - self.outType = shapeOptions.OutType() - - # ShapeOptionsT - def Pack(self, builder): - ShapeOptionsStart(builder) - ShapeOptionsAddOutType(builder, self.outType) - shapeOptions = ShapeOptionsEnd(builder) - return shapeOptions -# automatically generated by the FlatBuffers compiler, do not modify + self.exclusive = cumsumOptions.Exclusive() + self.reverse = cumsumOptions.Reverse() -# namespace: tflite + # CumsumOptionsT + def Pack(self, builder): + CumsumOptionsStart(builder) + CumsumOptionsAddExclusive(builder, self.exclusive) + CumsumOptionsAddReverse(builder, self.reverse) + cumsumOptions = CumsumOptionsEnd(builder) + return cumsumOptions -from flatbuffers.compat import import_numpy -np = import_numpy() -class SignOptions(object): +class BroadcastToOptions(object): __slots__ = ['_tab'] @classmethod def GetRootAs(cls, buf, offset=0): n = flatbuffers.encode.Get(flatbuffers.packer.uoffset, buf, offset) - x = SignOptions() + x = BroadcastToOptions() x.Init(buf, n + offset) return x @classmethod - def GetRootAsSignOptions(cls, buf, offset=0): + def GetRootAsBroadcastToOptions(cls, buf, offset=0): """This method is deprecated. Please switch to GetRootAs.""" return cls.GetRootAs(buf, offset) @classmethod - def SignOptionsBufferHasIdentifier(cls, buf, offset, size_prefixed=False): + def BroadcastToOptionsBufferHasIdentifier(cls, buf, offset, size_prefixed=False): return flatbuffers.util.BufferHasIdentifier(buf, offset, b"\x54\x46\x4C\x33", size_prefixed=size_prefixed) - # SignOptions + # BroadcastToOptions def Init(self, buf, pos): self._tab = flatbuffers.table.Table(buf, pos) -def SignOptionsStart(builder): builder.StartObject(0) -def Start(builder): - return SignOptionsStart(builder) -def SignOptionsEnd(builder): return builder.EndObject() -def End(builder): - return SignOptionsEnd(builder) +def BroadcastToOptionsStart(builder): + builder.StartObject(0) -class SignOptionsT(object): +def BroadcastToOptionsEnd(builder): + return builder.EndObject() - # SignOptionsT + + +class BroadcastToOptionsT(object): + + # BroadcastToOptionsT def __init__(self): pass @classmethod def InitFromBuf(cls, buf, pos): - signOptions = SignOptions() - signOptions.Init(buf, pos) - return cls.InitFromObj(signOptions) + broadcastToOptions = BroadcastToOptions() + broadcastToOptions.Init(buf, pos) + return cls.InitFromObj(broadcastToOptions) @classmethod - def InitFromObj(cls, signOptions): - x = SignOptionsT() - x._UnPack(signOptions) + def InitFromPackedBuf(cls, buf, pos=0): + n = flatbuffers.encode.Get(flatbuffers.packer.uoffset, buf, pos) + return cls.InitFromBuf(buf, pos+n) + + @classmethod + def InitFromObj(cls, broadcastToOptions): + x = BroadcastToOptionsT() + x._UnPack(broadcastToOptions) return x - # SignOptionsT - def _UnPack(self, signOptions): - if signOptions is None: + # BroadcastToOptionsT + def _UnPack(self, broadcastToOptions): + if broadcastToOptions is None: return - # SignOptionsT + # BroadcastToOptionsT def Pack(self, builder): - SignOptionsStart(builder) - signOptions = SignOptionsEnd(builder) - return signOptions -# automatically generated by the FlatBuffers compiler, do not modify - -# namespace: tflite + BroadcastToOptionsStart(builder) + broadcastToOptions = BroadcastToOptionsEnd(builder) + return broadcastToOptions -from flatbuffers.compat import import_numpy -np = import_numpy() -class SignatureDef(object): +class Rfft2dOptions(object): __slots__ = ['_tab'] @classmethod def GetRootAs(cls, buf, offset=0): n = flatbuffers.encode.Get(flatbuffers.packer.uoffset, buf, offset) - x = SignatureDef() + x = Rfft2dOptions() x.Init(buf, n + offset) return x @classmethod - def GetRootAsSignatureDef(cls, buf, offset=0): + def GetRootAsRfft2dOptions(cls, buf, offset=0): """This method is deprecated. Please switch to GetRootAs.""" return cls.GetRootAs(buf, offset) @classmethod - def SignatureDefBufferHasIdentifier(cls, buf, offset, size_prefixed=False): + def Rfft2dOptionsBufferHasIdentifier(cls, buf, offset, size_prefixed=False): return flatbuffers.util.BufferHasIdentifier(buf, offset, b"\x54\x46\x4C\x33", size_prefixed=size_prefixed) - # SignatureDef + # Rfft2dOptions def Init(self, buf, pos): self._tab = flatbuffers.table.Table(buf, pos) - # SignatureDef - def Inputs(self, j): - o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(4)) - if o != 0: - x = self._tab.Vector(o) - x += flatbuffers.number_types.UOffsetTFlags.py_type(j) * 4 - x = self._tab.Indirect(x) - obj = TensorMap() - obj.Init(self._tab.Bytes, x) - return obj - return None - - # SignatureDef - def InputsLength(self): - o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(4)) - if o != 0: - return self._tab.VectorLen(o) - return 0 - - # SignatureDef - def InputsIsNone(self): - o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(4)) - return o == 0 - - # SignatureDef - def Outputs(self, j): - o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(6)) - if o != 0: - x = self._tab.Vector(o) - x += flatbuffers.number_types.UOffsetTFlags.py_type(j) * 4 - x = self._tab.Indirect(x) - obj = TensorMap() - obj.Init(self._tab.Bytes, x) - return obj - return None - - # SignatureDef - def OutputsLength(self): - o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(6)) - if o != 0: - return self._tab.VectorLen(o) - return 0 - - # SignatureDef - def OutputsIsNone(self): - o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(6)) - return o == 0 +def Rfft2dOptionsStart(builder): + builder.StartObject(0) - # SignatureDef - def SignatureKey(self): - o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(8)) - if o != 0: - return self._tab.String(o + self._tab.Pos) - return None +def Rfft2dOptionsEnd(builder): + return builder.EndObject() - # SignatureDef - def SubgraphIndex(self): - o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(12)) - if o != 0: - return self._tab.Get(flatbuffers.number_types.Uint32Flags, o + self._tab.Pos) - return 0 -def SignatureDefStart(builder): builder.StartObject(5) -def Start(builder): - return SignatureDefStart(builder) -def SignatureDefAddInputs(builder, inputs): builder.PrependUOffsetTRelativeSlot(0, flatbuffers.number_types.UOffsetTFlags.py_type(inputs), 0) -def AddInputs(builder, inputs): - return SignatureDefAddInputs(builder, inputs) -def SignatureDefStartInputsVector(builder, numElems): return builder.StartVector(4, numElems, 4) -def StartInputsVector(builder, numElems): - return SignatureDefStartInputsVector(builder, numElems) -def SignatureDefAddOutputs(builder, outputs): builder.PrependUOffsetTRelativeSlot(1, flatbuffers.number_types.UOffsetTFlags.py_type(outputs), 0) -def AddOutputs(builder, outputs): - return SignatureDefAddOutputs(builder, outputs) -def SignatureDefStartOutputsVector(builder, numElems): return builder.StartVector(4, numElems, 4) -def StartOutputsVector(builder, numElems): - return SignatureDefStartOutputsVector(builder, numElems) -def SignatureDefAddSignatureKey(builder, signatureKey): builder.PrependUOffsetTRelativeSlot(2, flatbuffers.number_types.UOffsetTFlags.py_type(signatureKey), 0) -def AddSignatureKey(builder, signatureKey): - return SignatureDefAddSignatureKey(builder, signatureKey) -def SignatureDefAddSubgraphIndex(builder, subgraphIndex): builder.PrependUint32Slot(4, subgraphIndex, 0) -def AddSubgraphIndex(builder, subgraphIndex): - return SignatureDefAddSubgraphIndex(builder, subgraphIndex) -def SignatureDefEnd(builder): return builder.EndObject() -def End(builder): - return SignatureDefEnd(builder) -try: - from typing import List -except: - pass -class SignatureDefT(object): +class Rfft2dOptionsT(object): - # SignatureDefT + # Rfft2dOptionsT def __init__(self): - self.inputs = None # type: List[TensorMapT] - self.outputs = None # type: List[TensorMapT] - self.signatureKey = None # type: str - self.subgraphIndex = 0 # type: int + pass @classmethod def InitFromBuf(cls, buf, pos): - signatureDef = SignatureDef() - signatureDef.Init(buf, pos) - return cls.InitFromObj(signatureDef) + rfft2dOptions = Rfft2dOptions() + rfft2dOptions.Init(buf, pos) + return cls.InitFromObj(rfft2dOptions) @classmethod - def InitFromObj(cls, signatureDef): - x = SignatureDefT() - x._UnPack(signatureDef) + def InitFromPackedBuf(cls, buf, pos=0): + n = flatbuffers.encode.Get(flatbuffers.packer.uoffset, buf, pos) + return cls.InitFromBuf(buf, pos+n) + + @classmethod + def InitFromObj(cls, rfft2dOptions): + x = Rfft2dOptionsT() + x._UnPack(rfft2dOptions) return x - # SignatureDefT - def _UnPack(self, signatureDef): - if signatureDef is None: + # Rfft2dOptionsT + def _UnPack(self, rfft2dOptions): + if rfft2dOptions is None: return - if not signatureDef.InputsIsNone(): - self.inputs = [] - for i in range(signatureDef.InputsLength()): - if signatureDef.Inputs(i) is None: - self.inputs.append(None) - else: - tensorMap_ = TensorMapT.InitFromObj(signatureDef.Inputs(i)) - self.inputs.append(tensorMap_) - if not signatureDef.OutputsIsNone(): - self.outputs = [] - for i in range(signatureDef.OutputsLength()): - if signatureDef.Outputs(i) is None: - self.outputs.append(None) - else: - tensorMap_ = TensorMapT.InitFromObj(signatureDef.Outputs(i)) - self.outputs.append(tensorMap_) - self.signatureKey = signatureDef.SignatureKey() - self.subgraphIndex = signatureDef.SubgraphIndex() - # SignatureDefT + # Rfft2dOptionsT def Pack(self, builder): - if self.inputs is not None: - inputslist = [] - for i in range(len(self.inputs)): - inputslist.append(self.inputs[i].Pack(builder)) - SignatureDefStartInputsVector(builder, len(self.inputs)) - for i in reversed(range(len(self.inputs))): - builder.PrependUOffsetTRelative(inputslist[i]) - inputs = builder.EndVector() - if self.outputs is not None: - outputslist = [] - for i in range(len(self.outputs)): - outputslist.append(self.outputs[i].Pack(builder)) - SignatureDefStartOutputsVector(builder, len(self.outputs)) - for i in reversed(range(len(self.outputs))): - builder.PrependUOffsetTRelative(outputslist[i]) - outputs = builder.EndVector() - if self.signatureKey is not None: - signatureKey = builder.CreateString(self.signatureKey) - SignatureDefStart(builder) - if self.inputs is not None: - SignatureDefAddInputs(builder, inputs) - if self.outputs is not None: - SignatureDefAddOutputs(builder, outputs) - if self.signatureKey is not None: - SignatureDefAddSignatureKey(builder, signatureKey) - SignatureDefAddSubgraphIndex(builder, self.subgraphIndex) - signatureDef = SignatureDefEnd(builder) - return signatureDef -# automatically generated by the FlatBuffers compiler, do not modify - -# namespace: tflite + Rfft2dOptionsStart(builder) + rfft2dOptions = Rfft2dOptionsEnd(builder) + return rfft2dOptions -from flatbuffers.compat import import_numpy -np = import_numpy() -class SkipGramOptions(object): +class HashtableOptions(object): __slots__ = ['_tab'] @classmethod def GetRootAs(cls, buf, offset=0): n = flatbuffers.encode.Get(flatbuffers.packer.uoffset, buf, offset) - x = SkipGramOptions() + x = HashtableOptions() x.Init(buf, n + offset) return x @classmethod - def GetRootAsSkipGramOptions(cls, buf, offset=0): + def GetRootAsHashtableOptions(cls, buf, offset=0): """This method is deprecated. Please switch to GetRootAs.""" return cls.GetRootAs(buf, offset) @classmethod - def SkipGramOptionsBufferHasIdentifier(cls, buf, offset, size_prefixed=False): + def HashtableOptionsBufferHasIdentifier(cls, buf, offset, size_prefixed=False): return flatbuffers.util.BufferHasIdentifier(buf, offset, b"\x54\x46\x4C\x33", size_prefixed=size_prefixed) - # SkipGramOptions + # HashtableOptions def Init(self, buf, pos): self._tab = flatbuffers.table.Table(buf, pos) - # SkipGramOptions - def NgramSize(self): + # HashtableOptions + def TableId(self): o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(4)) if o != 0: return self._tab.Get(flatbuffers.number_types.Int32Flags, o + self._tab.Pos) return 0 - # SkipGramOptions - def MaxSkipSize(self): + # HashtableOptions + def KeyDtype(self): o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(6)) if o != 0: - return self._tab.Get(flatbuffers.number_types.Int32Flags, o + self._tab.Pos) + return self._tab.Get(flatbuffers.number_types.Int8Flags, o + self._tab.Pos) return 0 - # SkipGramOptions - def IncludeAllNgrams(self): + # HashtableOptions + def ValueDtype(self): o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(8)) if o != 0: - return bool(self._tab.Get(flatbuffers.number_types.BoolFlags, o + self._tab.Pos)) - return False + return self._tab.Get(flatbuffers.number_types.Int8Flags, o + self._tab.Pos) + return 0 -def SkipGramOptionsStart(builder): builder.StartObject(3) -def Start(builder): - return SkipGramOptionsStart(builder) -def SkipGramOptionsAddNgramSize(builder, ngramSize): builder.PrependInt32Slot(0, ngramSize, 0) -def AddNgramSize(builder, ngramSize): - return SkipGramOptionsAddNgramSize(builder, ngramSize) -def SkipGramOptionsAddMaxSkipSize(builder, maxSkipSize): builder.PrependInt32Slot(1, maxSkipSize, 0) -def AddMaxSkipSize(builder, maxSkipSize): - return SkipGramOptionsAddMaxSkipSize(builder, maxSkipSize) -def SkipGramOptionsAddIncludeAllNgrams(builder, includeAllNgrams): builder.PrependBoolSlot(2, includeAllNgrams, 0) -def AddIncludeAllNgrams(builder, includeAllNgrams): - return SkipGramOptionsAddIncludeAllNgrams(builder, includeAllNgrams) -def SkipGramOptionsEnd(builder): return builder.EndObject() -def End(builder): - return SkipGramOptionsEnd(builder) +def HashtableOptionsStart(builder): + builder.StartObject(3) -class SkipGramOptionsT(object): +def HashtableOptionsAddTableId(builder, tableId): + builder.PrependInt32Slot(0, tableId, 0) - # SkipGramOptionsT +def HashtableOptionsAddKeyDtype(builder, keyDtype): + builder.PrependInt8Slot(1, keyDtype, 0) + +def HashtableOptionsAddValueDtype(builder, valueDtype): + builder.PrependInt8Slot(2, valueDtype, 0) + +def HashtableOptionsEnd(builder): + return builder.EndObject() + + + +class HashtableOptionsT(object): + + # HashtableOptionsT def __init__(self): - self.ngramSize = 0 # type: int - self.maxSkipSize = 0 # type: int - self.includeAllNgrams = False # type: bool + self.tableId = 0 # type: int + self.keyDtype = 0 # type: int + self.valueDtype = 0 # type: int @classmethod def InitFromBuf(cls, buf, pos): - skipGramOptions = SkipGramOptions() - skipGramOptions.Init(buf, pos) - return cls.InitFromObj(skipGramOptions) + hashtableOptions = HashtableOptions() + hashtableOptions.Init(buf, pos) + return cls.InitFromObj(hashtableOptions) @classmethod - def InitFromObj(cls, skipGramOptions): - x = SkipGramOptionsT() - x._UnPack(skipGramOptions) + def InitFromPackedBuf(cls, buf, pos=0): + n = flatbuffers.encode.Get(flatbuffers.packer.uoffset, buf, pos) + return cls.InitFromBuf(buf, pos+n) + + @classmethod + def InitFromObj(cls, hashtableOptions): + x = HashtableOptionsT() + x._UnPack(hashtableOptions) return x - # SkipGramOptionsT - def _UnPack(self, skipGramOptions): - if skipGramOptions is None: + # HashtableOptionsT + def _UnPack(self, hashtableOptions): + if hashtableOptions is None: return - self.ngramSize = skipGramOptions.NgramSize() - self.maxSkipSize = skipGramOptions.MaxSkipSize() - self.includeAllNgrams = skipGramOptions.IncludeAllNgrams() + self.tableId = hashtableOptions.TableId() + self.keyDtype = hashtableOptions.KeyDtype() + self.valueDtype = hashtableOptions.ValueDtype() - # SkipGramOptionsT + # HashtableOptionsT def Pack(self, builder): - SkipGramOptionsStart(builder) - SkipGramOptionsAddNgramSize(builder, self.ngramSize) - SkipGramOptionsAddMaxSkipSize(builder, self.maxSkipSize) - SkipGramOptionsAddIncludeAllNgrams(builder, self.includeAllNgrams) - skipGramOptions = SkipGramOptionsEnd(builder) - return skipGramOptions -# automatically generated by the FlatBuffers compiler, do not modify - -# namespace: tflite + HashtableOptionsStart(builder) + HashtableOptionsAddTableId(builder, self.tableId) + HashtableOptionsAddKeyDtype(builder, self.keyDtype) + HashtableOptionsAddValueDtype(builder, self.valueDtype) + hashtableOptions = HashtableOptionsEnd(builder) + return hashtableOptions -from flatbuffers.compat import import_numpy -np = import_numpy() -class SliceOptions(object): +class HashtableFindOptions(object): __slots__ = ['_tab'] @classmethod def GetRootAs(cls, buf, offset=0): n = flatbuffers.encode.Get(flatbuffers.packer.uoffset, buf, offset) - x = SliceOptions() + x = HashtableFindOptions() x.Init(buf, n + offset) return x @classmethod - def GetRootAsSliceOptions(cls, buf, offset=0): + def GetRootAsHashtableFindOptions(cls, buf, offset=0): """This method is deprecated. Please switch to GetRootAs.""" return cls.GetRootAs(buf, offset) @classmethod - def SliceOptionsBufferHasIdentifier(cls, buf, offset, size_prefixed=False): + def HashtableFindOptionsBufferHasIdentifier(cls, buf, offset, size_prefixed=False): return flatbuffers.util.BufferHasIdentifier(buf, offset, b"\x54\x46\x4C\x33", size_prefixed=size_prefixed) - # SliceOptions + # HashtableFindOptions def Init(self, buf, pos): self._tab = flatbuffers.table.Table(buf, pos) -def SliceOptionsStart(builder): builder.StartObject(0) -def Start(builder): - return SliceOptionsStart(builder) -def SliceOptionsEnd(builder): return builder.EndObject() -def End(builder): - return SliceOptionsEnd(builder) +def HashtableFindOptionsStart(builder): + builder.StartObject(0) -class SliceOptionsT(object): +def HashtableFindOptionsEnd(builder): + return builder.EndObject() - # SliceOptionsT + + +class HashtableFindOptionsT(object): + + # HashtableFindOptionsT def __init__(self): pass @classmethod def InitFromBuf(cls, buf, pos): - sliceOptions = SliceOptions() - sliceOptions.Init(buf, pos) - return cls.InitFromObj(sliceOptions) + hashtableFindOptions = HashtableFindOptions() + hashtableFindOptions.Init(buf, pos) + return cls.InitFromObj(hashtableFindOptions) @classmethod - def InitFromObj(cls, sliceOptions): - x = SliceOptionsT() - x._UnPack(sliceOptions) + def InitFromPackedBuf(cls, buf, pos=0): + n = flatbuffers.encode.Get(flatbuffers.packer.uoffset, buf, pos) + return cls.InitFromBuf(buf, pos+n) + + @classmethod + def InitFromObj(cls, hashtableFindOptions): + x = HashtableFindOptionsT() + x._UnPack(hashtableFindOptions) return x - # SliceOptionsT - def _UnPack(self, sliceOptions): - if sliceOptions is None: + # HashtableFindOptionsT + def _UnPack(self, hashtableFindOptions): + if hashtableFindOptions is None: return - # SliceOptionsT + # HashtableFindOptionsT def Pack(self, builder): - SliceOptionsStart(builder) - sliceOptions = SliceOptionsEnd(builder) - return sliceOptions -# automatically generated by the FlatBuffers compiler, do not modify - -# namespace: tflite + HashtableFindOptionsStart(builder) + hashtableFindOptions = HashtableFindOptionsEnd(builder) + return hashtableFindOptions -from flatbuffers.compat import import_numpy -np = import_numpy() -class SoftmaxOptions(object): +class HashtableImportOptions(object): __slots__ = ['_tab'] @classmethod def GetRootAs(cls, buf, offset=0): n = flatbuffers.encode.Get(flatbuffers.packer.uoffset, buf, offset) - x = SoftmaxOptions() + x = HashtableImportOptions() x.Init(buf, n + offset) return x @classmethod - def GetRootAsSoftmaxOptions(cls, buf, offset=0): + def GetRootAsHashtableImportOptions(cls, buf, offset=0): """This method is deprecated. Please switch to GetRootAs.""" return cls.GetRootAs(buf, offset) @classmethod - def SoftmaxOptionsBufferHasIdentifier(cls, buf, offset, size_prefixed=False): + def HashtableImportOptionsBufferHasIdentifier(cls, buf, offset, size_prefixed=False): return flatbuffers.util.BufferHasIdentifier(buf, offset, b"\x54\x46\x4C\x33", size_prefixed=size_prefixed) - # SoftmaxOptions + # HashtableImportOptions def Init(self, buf, pos): self._tab = flatbuffers.table.Table(buf, pos) - # SoftmaxOptions - def Beta(self): - o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(4)) - if o != 0: - return self._tab.Get(flatbuffers.number_types.Float32Flags, o + self._tab.Pos) - return 0.0 +def HashtableImportOptionsStart(builder): + builder.StartObject(0) -def SoftmaxOptionsStart(builder): builder.StartObject(1) -def Start(builder): - return SoftmaxOptionsStart(builder) -def SoftmaxOptionsAddBeta(builder, beta): builder.PrependFloat32Slot(0, beta, 0.0) -def AddBeta(builder, beta): - return SoftmaxOptionsAddBeta(builder, beta) -def SoftmaxOptionsEnd(builder): return builder.EndObject() -def End(builder): - return SoftmaxOptionsEnd(builder) +def HashtableImportOptionsEnd(builder): + return builder.EndObject() -class SoftmaxOptionsT(object): - # SoftmaxOptionsT + +class HashtableImportOptionsT(object): + + # HashtableImportOptionsT def __init__(self): - self.beta = 0.0 # type: float + pass @classmethod def InitFromBuf(cls, buf, pos): - softmaxOptions = SoftmaxOptions() - softmaxOptions.Init(buf, pos) - return cls.InitFromObj(softmaxOptions) + hashtableImportOptions = HashtableImportOptions() + hashtableImportOptions.Init(buf, pos) + return cls.InitFromObj(hashtableImportOptions) @classmethod - def InitFromObj(cls, softmaxOptions): - x = SoftmaxOptionsT() - x._UnPack(softmaxOptions) + def InitFromPackedBuf(cls, buf, pos=0): + n = flatbuffers.encode.Get(flatbuffers.packer.uoffset, buf, pos) + return cls.InitFromBuf(buf, pos+n) + + @classmethod + def InitFromObj(cls, hashtableImportOptions): + x = HashtableImportOptionsT() + x._UnPack(hashtableImportOptions) return x - # SoftmaxOptionsT - def _UnPack(self, softmaxOptions): - if softmaxOptions is None: + # HashtableImportOptionsT + def _UnPack(self, hashtableImportOptions): + if hashtableImportOptions is None: return - self.beta = softmaxOptions.Beta() - # SoftmaxOptionsT + # HashtableImportOptionsT def Pack(self, builder): - SoftmaxOptionsStart(builder) - SoftmaxOptionsAddBeta(builder, self.beta) - softmaxOptions = SoftmaxOptionsEnd(builder) - return softmaxOptions -# automatically generated by the FlatBuffers compiler, do not modify - -# namespace: tflite + HashtableImportOptionsStart(builder) + hashtableImportOptions = HashtableImportOptionsEnd(builder) + return hashtableImportOptions -from flatbuffers.compat import import_numpy -np = import_numpy() -class SpaceToBatchNDOptions(object): +class HashtableSizeOptions(object): __slots__ = ['_tab'] @classmethod def GetRootAs(cls, buf, offset=0): n = flatbuffers.encode.Get(flatbuffers.packer.uoffset, buf, offset) - x = SpaceToBatchNDOptions() + x = HashtableSizeOptions() x.Init(buf, n + offset) return x @classmethod - def GetRootAsSpaceToBatchNDOptions(cls, buf, offset=0): + def GetRootAsHashtableSizeOptions(cls, buf, offset=0): """This method is deprecated. Please switch to GetRootAs.""" return cls.GetRootAs(buf, offset) @classmethod - def SpaceToBatchNDOptionsBufferHasIdentifier(cls, buf, offset, size_prefixed=False): + def HashtableSizeOptionsBufferHasIdentifier(cls, buf, offset, size_prefixed=False): return flatbuffers.util.BufferHasIdentifier(buf, offset, b"\x54\x46\x4C\x33", size_prefixed=size_prefixed) - # SpaceToBatchNDOptions + # HashtableSizeOptions def Init(self, buf, pos): self._tab = flatbuffers.table.Table(buf, pos) -def SpaceToBatchNDOptionsStart(builder): builder.StartObject(0) -def Start(builder): - return SpaceToBatchNDOptionsStart(builder) -def SpaceToBatchNDOptionsEnd(builder): return builder.EndObject() -def End(builder): - return SpaceToBatchNDOptionsEnd(builder) +def HashtableSizeOptionsStart(builder): + builder.StartObject(0) -class SpaceToBatchNDOptionsT(object): +def HashtableSizeOptionsEnd(builder): + return builder.EndObject() - # SpaceToBatchNDOptionsT + + +class HashtableSizeOptionsT(object): + + # HashtableSizeOptionsT def __init__(self): pass @classmethod def InitFromBuf(cls, buf, pos): - spaceToBatchNdoptions = SpaceToBatchNDOptions() - spaceToBatchNdoptions.Init(buf, pos) - return cls.InitFromObj(spaceToBatchNdoptions) + hashtableSizeOptions = HashtableSizeOptions() + hashtableSizeOptions.Init(buf, pos) + return cls.InitFromObj(hashtableSizeOptions) @classmethod - def InitFromObj(cls, spaceToBatchNdoptions): - x = SpaceToBatchNDOptionsT() - x._UnPack(spaceToBatchNdoptions) + def InitFromPackedBuf(cls, buf, pos=0): + n = flatbuffers.encode.Get(flatbuffers.packer.uoffset, buf, pos) + return cls.InitFromBuf(buf, pos+n) + + @classmethod + def InitFromObj(cls, hashtableSizeOptions): + x = HashtableSizeOptionsT() + x._UnPack(hashtableSizeOptions) return x - # SpaceToBatchNDOptionsT - def _UnPack(self, spaceToBatchNdoptions): - if spaceToBatchNdoptions is None: + # HashtableSizeOptionsT + def _UnPack(self, hashtableSizeOptions): + if hashtableSizeOptions is None: return - # SpaceToBatchNDOptionsT + # HashtableSizeOptionsT def Pack(self, builder): - SpaceToBatchNDOptionsStart(builder) - spaceToBatchNdoptions = SpaceToBatchNDOptionsEnd(builder) - return spaceToBatchNdoptions -# automatically generated by the FlatBuffers compiler, do not modify - -# namespace: tflite + HashtableSizeOptionsStart(builder) + hashtableSizeOptions = HashtableSizeOptionsEnd(builder) + return hashtableSizeOptions -from flatbuffers.compat import import_numpy -np = import_numpy() -class SpaceToDepthOptions(object): +class VarHandleOptions(object): __slots__ = ['_tab'] @classmethod def GetRootAs(cls, buf, offset=0): n = flatbuffers.encode.Get(flatbuffers.packer.uoffset, buf, offset) - x = SpaceToDepthOptions() + x = VarHandleOptions() x.Init(buf, n + offset) return x @classmethod - def GetRootAsSpaceToDepthOptions(cls, buf, offset=0): + def GetRootAsVarHandleOptions(cls, buf, offset=0): """This method is deprecated. Please switch to GetRootAs.""" return cls.GetRootAs(buf, offset) @classmethod - def SpaceToDepthOptionsBufferHasIdentifier(cls, buf, offset, size_prefixed=False): + def VarHandleOptionsBufferHasIdentifier(cls, buf, offset, size_prefixed=False): return flatbuffers.util.BufferHasIdentifier(buf, offset, b"\x54\x46\x4C\x33", size_prefixed=size_prefixed) - # SpaceToDepthOptions + # VarHandleOptions def Init(self, buf, pos): self._tab = flatbuffers.table.Table(buf, pos) - # SpaceToDepthOptions - def BlockSize(self): + # VarHandleOptions + def Container(self): o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(4)) if o != 0: - return self._tab.Get(flatbuffers.number_types.Int32Flags, o + self._tab.Pos) - return 0 + return self._tab.String(o + self._tab.Pos) + return None + + # VarHandleOptions + def SharedName(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(6)) + if o != 0: + return self._tab.String(o + self._tab.Pos) + return None -def SpaceToDepthOptionsStart(builder): builder.StartObject(1) -def Start(builder): - return SpaceToDepthOptionsStart(builder) -def SpaceToDepthOptionsAddBlockSize(builder, blockSize): builder.PrependInt32Slot(0, blockSize, 0) -def AddBlockSize(builder, blockSize): - return SpaceToDepthOptionsAddBlockSize(builder, blockSize) -def SpaceToDepthOptionsEnd(builder): return builder.EndObject() -def End(builder): - return SpaceToDepthOptionsEnd(builder) +def VarHandleOptionsStart(builder): + builder.StartObject(2) -class SpaceToDepthOptionsT(object): +def VarHandleOptionsAddContainer(builder, container): + builder.PrependUOffsetTRelativeSlot(0, flatbuffers.number_types.UOffsetTFlags.py_type(container), 0) - # SpaceToDepthOptionsT +def VarHandleOptionsAddSharedName(builder, sharedName): + builder.PrependUOffsetTRelativeSlot(1, flatbuffers.number_types.UOffsetTFlags.py_type(sharedName), 0) + +def VarHandleOptionsEnd(builder): + return builder.EndObject() + + + +class VarHandleOptionsT(object): + + # VarHandleOptionsT def __init__(self): - self.blockSize = 0 # type: int + self.container = None # type: str + self.sharedName = None # type: str @classmethod def InitFromBuf(cls, buf, pos): - spaceToDepthOptions = SpaceToDepthOptions() - spaceToDepthOptions.Init(buf, pos) - return cls.InitFromObj(spaceToDepthOptions) + varHandleOptions = VarHandleOptions() + varHandleOptions.Init(buf, pos) + return cls.InitFromObj(varHandleOptions) + + @classmethod + def InitFromPackedBuf(cls, buf, pos=0): + n = flatbuffers.encode.Get(flatbuffers.packer.uoffset, buf, pos) + return cls.InitFromBuf(buf, pos+n) @classmethod - def InitFromObj(cls, spaceToDepthOptions): - x = SpaceToDepthOptionsT() - x._UnPack(spaceToDepthOptions) + def InitFromObj(cls, varHandleOptions): + x = VarHandleOptionsT() + x._UnPack(varHandleOptions) return x - # SpaceToDepthOptionsT - def _UnPack(self, spaceToDepthOptions): - if spaceToDepthOptions is None: + # VarHandleOptionsT + def _UnPack(self, varHandleOptions): + if varHandleOptions is None: return - self.blockSize = spaceToDepthOptions.BlockSize() + self.container = varHandleOptions.Container() + self.sharedName = varHandleOptions.SharedName() - # SpaceToDepthOptionsT + # VarHandleOptionsT def Pack(self, builder): - SpaceToDepthOptionsStart(builder) - SpaceToDepthOptionsAddBlockSize(builder, self.blockSize) - spaceToDepthOptions = SpaceToDepthOptionsEnd(builder) - return spaceToDepthOptions -# automatically generated by the FlatBuffers compiler, do not modify - -# namespace: tflite - -class SparseIndexVector(object): - NONE = 0 - Int32Vector = 1 - Uint16Vector = 2 - Uint8Vector = 3 - -def SparseIndexVectorCreator(unionType, table): - from flatbuffers.table import Table - if not isinstance(table, Table): - return None - if unionType == SparseIndexVector().Int32Vector: - return Int32VectorT.InitFromBuf(table.Bytes, table.Pos) - if unionType == SparseIndexVector().Uint16Vector: - return Uint16VectorT.InitFromBuf(table.Bytes, table.Pos) - if unionType == SparseIndexVector().Uint8Vector: - return Uint8VectorT.InitFromBuf(table.Bytes, table.Pos) - return None -# automatically generated by the FlatBuffers compiler, do not modify - -# namespace: tflite + if self.container is not None: + container = builder.CreateString(self.container) + if self.sharedName is not None: + sharedName = builder.CreateString(self.sharedName) + VarHandleOptionsStart(builder) + if self.container is not None: + VarHandleOptionsAddContainer(builder, container) + if self.sharedName is not None: + VarHandleOptionsAddSharedName(builder, sharedName) + varHandleOptions = VarHandleOptionsEnd(builder) + return varHandleOptions -from flatbuffers.compat import import_numpy -np = import_numpy() -class SparseToDenseOptions(object): +class ReadVariableOptions(object): __slots__ = ['_tab'] @classmethod def GetRootAs(cls, buf, offset=0): n = flatbuffers.encode.Get(flatbuffers.packer.uoffset, buf, offset) - x = SparseToDenseOptions() + x = ReadVariableOptions() x.Init(buf, n + offset) return x @classmethod - def GetRootAsSparseToDenseOptions(cls, buf, offset=0): + def GetRootAsReadVariableOptions(cls, buf, offset=0): """This method is deprecated. Please switch to GetRootAs.""" return cls.GetRootAs(buf, offset) @classmethod - def SparseToDenseOptionsBufferHasIdentifier(cls, buf, offset, size_prefixed=False): + def ReadVariableOptionsBufferHasIdentifier(cls, buf, offset, size_prefixed=False): return flatbuffers.util.BufferHasIdentifier(buf, offset, b"\x54\x46\x4C\x33", size_prefixed=size_prefixed) - # SparseToDenseOptions + # ReadVariableOptions def Init(self, buf, pos): self._tab = flatbuffers.table.Table(buf, pos) - # SparseToDenseOptions - def ValidateIndices(self): - o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(4)) - if o != 0: - return bool(self._tab.Get(flatbuffers.number_types.BoolFlags, o + self._tab.Pos)) - return False +def ReadVariableOptionsStart(builder): + builder.StartObject(0) -def SparseToDenseOptionsStart(builder): builder.StartObject(1) -def Start(builder): - return SparseToDenseOptionsStart(builder) -def SparseToDenseOptionsAddValidateIndices(builder, validateIndices): builder.PrependBoolSlot(0, validateIndices, 0) -def AddValidateIndices(builder, validateIndices): - return SparseToDenseOptionsAddValidateIndices(builder, validateIndices) -def SparseToDenseOptionsEnd(builder): return builder.EndObject() -def End(builder): - return SparseToDenseOptionsEnd(builder) +def ReadVariableOptionsEnd(builder): + return builder.EndObject() -class SparseToDenseOptionsT(object): - # SparseToDenseOptionsT + +class ReadVariableOptionsT(object): + + # ReadVariableOptionsT def __init__(self): - self.validateIndices = False # type: bool + pass @classmethod def InitFromBuf(cls, buf, pos): - sparseToDenseOptions = SparseToDenseOptions() - sparseToDenseOptions.Init(buf, pos) - return cls.InitFromObj(sparseToDenseOptions) + readVariableOptions = ReadVariableOptions() + readVariableOptions.Init(buf, pos) + return cls.InitFromObj(readVariableOptions) @classmethod - def InitFromObj(cls, sparseToDenseOptions): - x = SparseToDenseOptionsT() - x._UnPack(sparseToDenseOptions) + def InitFromPackedBuf(cls, buf, pos=0): + n = flatbuffers.encode.Get(flatbuffers.packer.uoffset, buf, pos) + return cls.InitFromBuf(buf, pos+n) + + @classmethod + def InitFromObj(cls, readVariableOptions): + x = ReadVariableOptionsT() + x._UnPack(readVariableOptions) return x - # SparseToDenseOptionsT - def _UnPack(self, sparseToDenseOptions): - if sparseToDenseOptions is None: + # ReadVariableOptionsT + def _UnPack(self, readVariableOptions): + if readVariableOptions is None: return - self.validateIndices = sparseToDenseOptions.ValidateIndices() - # SparseToDenseOptionsT + # ReadVariableOptionsT def Pack(self, builder): - SparseToDenseOptionsStart(builder) - SparseToDenseOptionsAddValidateIndices(builder, self.validateIndices) - sparseToDenseOptions = SparseToDenseOptionsEnd(builder) - return sparseToDenseOptions -# automatically generated by the FlatBuffers compiler, do not modify - -# namespace: tflite + ReadVariableOptionsStart(builder) + readVariableOptions = ReadVariableOptionsEnd(builder) + return readVariableOptions -from flatbuffers.compat import import_numpy -np = import_numpy() -class SparsityParameters(object): +class AssignVariableOptions(object): __slots__ = ['_tab'] @classmethod def GetRootAs(cls, buf, offset=0): n = flatbuffers.encode.Get(flatbuffers.packer.uoffset, buf, offset) - x = SparsityParameters() + x = AssignVariableOptions() x.Init(buf, n + offset) return x @classmethod - def GetRootAsSparsityParameters(cls, buf, offset=0): + def GetRootAsAssignVariableOptions(cls, buf, offset=0): """This method is deprecated. Please switch to GetRootAs.""" return cls.GetRootAs(buf, offset) @classmethod - def SparsityParametersBufferHasIdentifier(cls, buf, offset, size_prefixed=False): + def AssignVariableOptionsBufferHasIdentifier(cls, buf, offset, size_prefixed=False): return flatbuffers.util.BufferHasIdentifier(buf, offset, b"\x54\x46\x4C\x33", size_prefixed=size_prefixed) - # SparsityParameters + # AssignVariableOptions def Init(self, buf, pos): self._tab = flatbuffers.table.Table(buf, pos) - # SparsityParameters - def TraversalOrder(self, j): - o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(4)) - if o != 0: - a = self._tab.Vector(o) - return self._tab.Get(flatbuffers.number_types.Int32Flags, a + flatbuffers.number_types.UOffsetTFlags.py_type(j * 4)) - return 0 - - # SparsityParameters - def TraversalOrderAsNumpy(self): - o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(4)) - if o != 0: - return self._tab.GetVectorAsNumpy(flatbuffers.number_types.Int32Flags, o) - return 0 - - # SparsityParameters - def TraversalOrderLength(self): - o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(4)) - if o != 0: - return self._tab.VectorLen(o) - return 0 - - # SparsityParameters - def TraversalOrderIsNone(self): - o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(4)) - return o == 0 - - # SparsityParameters - def BlockMap(self, j): - o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(6)) - if o != 0: - a = self._tab.Vector(o) - return self._tab.Get(flatbuffers.number_types.Int32Flags, a + flatbuffers.number_types.UOffsetTFlags.py_type(j * 4)) - return 0 - - # SparsityParameters - def BlockMapAsNumpy(self): - o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(6)) - if o != 0: - return self._tab.GetVectorAsNumpy(flatbuffers.number_types.Int32Flags, o) - return 0 - - # SparsityParameters - def BlockMapLength(self): - o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(6)) - if o != 0: - return self._tab.VectorLen(o) - return 0 - - # SparsityParameters - def BlockMapIsNone(self): - o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(6)) - return o == 0 - - # SparsityParameters - def DimMetadata(self, j): - o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(8)) - if o != 0: - x = self._tab.Vector(o) - x += flatbuffers.number_types.UOffsetTFlags.py_type(j) * 4 - x = self._tab.Indirect(x) - obj = DimensionMetadata() - obj.Init(self._tab.Bytes, x) - return obj - return None +def AssignVariableOptionsStart(builder): + builder.StartObject(0) - # SparsityParameters - def DimMetadataLength(self): - o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(8)) - if o != 0: - return self._tab.VectorLen(o) - return 0 +def AssignVariableOptionsEnd(builder): + return builder.EndObject() - # SparsityParameters - def DimMetadataIsNone(self): - o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(8)) - return o == 0 -def SparsityParametersStart(builder): builder.StartObject(3) -def Start(builder): - return SparsityParametersStart(builder) -def SparsityParametersAddTraversalOrder(builder, traversalOrder): builder.PrependUOffsetTRelativeSlot(0, flatbuffers.number_types.UOffsetTFlags.py_type(traversalOrder), 0) -def AddTraversalOrder(builder, traversalOrder): - return SparsityParametersAddTraversalOrder(builder, traversalOrder) -def SparsityParametersStartTraversalOrderVector(builder, numElems): return builder.StartVector(4, numElems, 4) -def StartTraversalOrderVector(builder, numElems): - return SparsityParametersStartTraversalOrderVector(builder, numElems) -def SparsityParametersAddBlockMap(builder, blockMap): builder.PrependUOffsetTRelativeSlot(1, flatbuffers.number_types.UOffsetTFlags.py_type(blockMap), 0) -def AddBlockMap(builder, blockMap): - return SparsityParametersAddBlockMap(builder, blockMap) -def SparsityParametersStartBlockMapVector(builder, numElems): return builder.StartVector(4, numElems, 4) -def StartBlockMapVector(builder, numElems): - return SparsityParametersStartBlockMapVector(builder, numElems) -def SparsityParametersAddDimMetadata(builder, dimMetadata): builder.PrependUOffsetTRelativeSlot(2, flatbuffers.number_types.UOffsetTFlags.py_type(dimMetadata), 0) -def AddDimMetadata(builder, dimMetadata): - return SparsityParametersAddDimMetadata(builder, dimMetadata) -def SparsityParametersStartDimMetadataVector(builder, numElems): return builder.StartVector(4, numElems, 4) -def StartDimMetadataVector(builder, numElems): - return SparsityParametersStartDimMetadataVector(builder, numElems) -def SparsityParametersEnd(builder): return builder.EndObject() -def End(builder): - return SparsityParametersEnd(builder) -try: - from typing import List -except: - pass -class SparsityParametersT(object): +class AssignVariableOptionsT(object): - # SparsityParametersT + # AssignVariableOptionsT def __init__(self): - self.traversalOrder = None # type: List[int] - self.blockMap = None # type: List[int] - self.dimMetadata = None # type: List[DimensionMetadataT] + pass @classmethod def InitFromBuf(cls, buf, pos): - sparsityParameters = SparsityParameters() - sparsityParameters.Init(buf, pos) - return cls.InitFromObj(sparsityParameters) + assignVariableOptions = AssignVariableOptions() + assignVariableOptions.Init(buf, pos) + return cls.InitFromObj(assignVariableOptions) @classmethod - def InitFromObj(cls, sparsityParameters): - x = SparsityParametersT() - x._UnPack(sparsityParameters) + def InitFromPackedBuf(cls, buf, pos=0): + n = flatbuffers.encode.Get(flatbuffers.packer.uoffset, buf, pos) + return cls.InitFromBuf(buf, pos+n) + + @classmethod + def InitFromObj(cls, assignVariableOptions): + x = AssignVariableOptionsT() + x._UnPack(assignVariableOptions) return x - # SparsityParametersT - def _UnPack(self, sparsityParameters): - if sparsityParameters is None: - return - if not sparsityParameters.TraversalOrderIsNone(): - if np is None: - self.traversalOrder = [] - for i in range(sparsityParameters.TraversalOrderLength()): - self.traversalOrder.append(sparsityParameters.TraversalOrder(i)) - else: - self.traversalOrder = sparsityParameters.TraversalOrderAsNumpy() - if not sparsityParameters.BlockMapIsNone(): - if np is None: - self.blockMap = [] - for i in range(sparsityParameters.BlockMapLength()): - self.blockMap.append(sparsityParameters.BlockMap(i)) - else: - self.blockMap = sparsityParameters.BlockMapAsNumpy() - if not sparsityParameters.DimMetadataIsNone(): - self.dimMetadata = [] - for i in range(sparsityParameters.DimMetadataLength()): - if sparsityParameters.DimMetadata(i) is None: - self.dimMetadata.append(None) - else: - dimensionMetadata_ = DimensionMetadataT.InitFromObj(sparsityParameters.DimMetadata(i)) - self.dimMetadata.append(dimensionMetadata_) + # AssignVariableOptionsT + def _UnPack(self, assignVariableOptions): + if assignVariableOptions is None: + return - # SparsityParametersT + # AssignVariableOptionsT def Pack(self, builder): - if self.traversalOrder is not None: - if np is not None and type(self.traversalOrder) is np.ndarray: - traversalOrder = builder.CreateNumpyVector(self.traversalOrder) - else: - SparsityParametersStartTraversalOrderVector(builder, len(self.traversalOrder)) - for i in reversed(range(len(self.traversalOrder))): - builder.PrependInt32(self.traversalOrder[i]) - traversalOrder = builder.EndVector() - if self.blockMap is not None: - if np is not None and type(self.blockMap) is np.ndarray: - blockMap = builder.CreateNumpyVector(self.blockMap) - else: - SparsityParametersStartBlockMapVector(builder, len(self.blockMap)) - for i in reversed(range(len(self.blockMap))): - builder.PrependInt32(self.blockMap[i]) - blockMap = builder.EndVector() - if self.dimMetadata is not None: - dimMetadatalist = [] - for i in range(len(self.dimMetadata)): - dimMetadatalist.append(self.dimMetadata[i].Pack(builder)) - SparsityParametersStartDimMetadataVector(builder, len(self.dimMetadata)) - for i in reversed(range(len(self.dimMetadata))): - builder.PrependUOffsetTRelative(dimMetadatalist[i]) - dimMetadata = builder.EndVector() - SparsityParametersStart(builder) - if self.traversalOrder is not None: - SparsityParametersAddTraversalOrder(builder, traversalOrder) - if self.blockMap is not None: - SparsityParametersAddBlockMap(builder, blockMap) - if self.dimMetadata is not None: - SparsityParametersAddDimMetadata(builder, dimMetadata) - sparsityParameters = SparsityParametersEnd(builder) - return sparsityParameters -# automatically generated by the FlatBuffers compiler, do not modify - -# namespace: tflite + AssignVariableOptionsStart(builder) + assignVariableOptions = AssignVariableOptionsEnd(builder) + return assignVariableOptions -from flatbuffers.compat import import_numpy -np = import_numpy() -class SplitOptions(object): +class RandomOptions(object): __slots__ = ['_tab'] @classmethod def GetRootAs(cls, buf, offset=0): n = flatbuffers.encode.Get(flatbuffers.packer.uoffset, buf, offset) - x = SplitOptions() + x = RandomOptions() x.Init(buf, n + offset) return x @classmethod - def GetRootAsSplitOptions(cls, buf, offset=0): + def GetRootAsRandomOptions(cls, buf, offset=0): """This method is deprecated. Please switch to GetRootAs.""" return cls.GetRootAs(buf, offset) @classmethod - def SplitOptionsBufferHasIdentifier(cls, buf, offset, size_prefixed=False): + def RandomOptionsBufferHasIdentifier(cls, buf, offset, size_prefixed=False): return flatbuffers.util.BufferHasIdentifier(buf, offset, b"\x54\x46\x4C\x33", size_prefixed=size_prefixed) - # SplitOptions + # RandomOptions def Init(self, buf, pos): self._tab = flatbuffers.table.Table(buf, pos) - # SplitOptions - def NumSplits(self): + # RandomOptions + def Seed(self): o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(4)) if o != 0: - return self._tab.Get(flatbuffers.number_types.Int32Flags, o + self._tab.Pos) + return self._tab.Get(flatbuffers.number_types.Int64Flags, o + self._tab.Pos) + return 0 + + # RandomOptions + def Seed2(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(6)) + if o != 0: + return self._tab.Get(flatbuffers.number_types.Int64Flags, o + self._tab.Pos) return 0 -def SplitOptionsStart(builder): builder.StartObject(1) -def Start(builder): - return SplitOptionsStart(builder) -def SplitOptionsAddNumSplits(builder, numSplits): builder.PrependInt32Slot(0, numSplits, 0) -def AddNumSplits(builder, numSplits): - return SplitOptionsAddNumSplits(builder, numSplits) -def SplitOptionsEnd(builder): return builder.EndObject() -def End(builder): - return SplitOptionsEnd(builder) +def RandomOptionsStart(builder): + builder.StartObject(2) -class SplitOptionsT(object): +def RandomOptionsAddSeed(builder, seed): + builder.PrependInt64Slot(0, seed, 0) - # SplitOptionsT +def RandomOptionsAddSeed2(builder, seed2): + builder.PrependInt64Slot(1, seed2, 0) + +def RandomOptionsEnd(builder): + return builder.EndObject() + + + +class RandomOptionsT(object): + + # RandomOptionsT def __init__(self): - self.numSplits = 0 # type: int + self.seed = 0 # type: int + self.seed2 = 0 # type: int @classmethod def InitFromBuf(cls, buf, pos): - splitOptions = SplitOptions() - splitOptions.Init(buf, pos) - return cls.InitFromObj(splitOptions) + randomOptions = RandomOptions() + randomOptions.Init(buf, pos) + return cls.InitFromObj(randomOptions) @classmethod - def InitFromObj(cls, splitOptions): - x = SplitOptionsT() - x._UnPack(splitOptions) + def InitFromPackedBuf(cls, buf, pos=0): + n = flatbuffers.encode.Get(flatbuffers.packer.uoffset, buf, pos) + return cls.InitFromBuf(buf, pos+n) + + @classmethod + def InitFromObj(cls, randomOptions): + x = RandomOptionsT() + x._UnPack(randomOptions) return x - # SplitOptionsT - def _UnPack(self, splitOptions): - if splitOptions is None: + # RandomOptionsT + def _UnPack(self, randomOptions): + if randomOptions is None: return - self.numSplits = splitOptions.NumSplits() + self.seed = randomOptions.Seed() + self.seed2 = randomOptions.Seed2() - # SplitOptionsT + # RandomOptionsT def Pack(self, builder): - SplitOptionsStart(builder) - SplitOptionsAddNumSplits(builder, self.numSplits) - splitOptions = SplitOptionsEnd(builder) - return splitOptions -# automatically generated by the FlatBuffers compiler, do not modify - -# namespace: tflite + RandomOptionsStart(builder) + RandomOptionsAddSeed(builder, self.seed) + RandomOptionsAddSeed2(builder, self.seed2) + randomOptions = RandomOptionsEnd(builder) + return randomOptions -from flatbuffers.compat import import_numpy -np = import_numpy() -class SplitVOptions(object): +class BucketizeOptions(object): __slots__ = ['_tab'] @classmethod def GetRootAs(cls, buf, offset=0): n = flatbuffers.encode.Get(flatbuffers.packer.uoffset, buf, offset) - x = SplitVOptions() + x = BucketizeOptions() x.Init(buf, n + offset) return x @classmethod - def GetRootAsSplitVOptions(cls, buf, offset=0): + def GetRootAsBucketizeOptions(cls, buf, offset=0): """This method is deprecated. Please switch to GetRootAs.""" return cls.GetRootAs(buf, offset) @classmethod - def SplitVOptionsBufferHasIdentifier(cls, buf, offset, size_prefixed=False): + def BucketizeOptionsBufferHasIdentifier(cls, buf, offset, size_prefixed=False): return flatbuffers.util.BufferHasIdentifier(buf, offset, b"\x54\x46\x4C\x33", size_prefixed=size_prefixed) - # SplitVOptions + # BucketizeOptions def Init(self, buf, pos): self._tab = flatbuffers.table.Table(buf, pos) - # SplitVOptions - def NumSplits(self): + # BucketizeOptions + def Boundaries(self, j): o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(4)) if o != 0: - return self._tab.Get(flatbuffers.number_types.Int32Flags, o + self._tab.Pos) + a = self._tab.Vector(o) + return self._tab.Get(flatbuffers.number_types.Float32Flags, a + flatbuffers.number_types.UOffsetTFlags.py_type(j * 4)) return 0 -def SplitVOptionsStart(builder): builder.StartObject(1) -def Start(builder): - return SplitVOptionsStart(builder) -def SplitVOptionsAddNumSplits(builder, numSplits): builder.PrependInt32Slot(0, numSplits, 0) -def AddNumSplits(builder, numSplits): - return SplitVOptionsAddNumSplits(builder, numSplits) -def SplitVOptionsEnd(builder): return builder.EndObject() -def End(builder): - return SplitVOptionsEnd(builder) + # BucketizeOptions + def BoundariesAsNumpy(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(4)) + if o != 0: + return self._tab.GetVectorAsNumpy(flatbuffers.number_types.Float32Flags, o) + return 0 -class SplitVOptionsT(object): + # BucketizeOptions + def BoundariesLength(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(4)) + if o != 0: + return self._tab.VectorLen(o) + return 0 - # SplitVOptionsT + # BucketizeOptions + def BoundariesIsNone(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(4)) + return o == 0 + +def BucketizeOptionsStart(builder): + builder.StartObject(1) + +def BucketizeOptionsAddBoundaries(builder, boundaries): + builder.PrependUOffsetTRelativeSlot(0, flatbuffers.number_types.UOffsetTFlags.py_type(boundaries), 0) + +def BucketizeOptionsStartBoundariesVector(builder, numElems): + return builder.StartVector(4, numElems, 4) + +def BucketizeOptionsEnd(builder): + return builder.EndObject() + + +try: + from typing import List +except: + pass + +class BucketizeOptionsT(object): + + # BucketizeOptionsT def __init__(self): - self.numSplits = 0 # type: int + self.boundaries = None # type: List[float] @classmethod def InitFromBuf(cls, buf, pos): - splitVoptions = SplitVOptions() - splitVoptions.Init(buf, pos) - return cls.InitFromObj(splitVoptions) + bucketizeOptions = BucketizeOptions() + bucketizeOptions.Init(buf, pos) + return cls.InitFromObj(bucketizeOptions) @classmethod - def InitFromObj(cls, splitVoptions): - x = SplitVOptionsT() - x._UnPack(splitVoptions) + def InitFromPackedBuf(cls, buf, pos=0): + n = flatbuffers.encode.Get(flatbuffers.packer.uoffset, buf, pos) + return cls.InitFromBuf(buf, pos+n) + + @classmethod + def InitFromObj(cls, bucketizeOptions): + x = BucketizeOptionsT() + x._UnPack(bucketizeOptions) return x - # SplitVOptionsT - def _UnPack(self, splitVoptions): - if splitVoptions is None: + # BucketizeOptionsT + def _UnPack(self, bucketizeOptions): + if bucketizeOptions is None: return - self.numSplits = splitVoptions.NumSplits() + if not bucketizeOptions.BoundariesIsNone(): + if np is None: + self.boundaries = [] + for i in range(bucketizeOptions.BoundariesLength()): + self.boundaries.append(bucketizeOptions.Boundaries(i)) + else: + self.boundaries = bucketizeOptions.BoundariesAsNumpy() - # SplitVOptionsT + # BucketizeOptionsT def Pack(self, builder): - SplitVOptionsStart(builder) - SplitVOptionsAddNumSplits(builder, self.numSplits) - splitVoptions = SplitVOptionsEnd(builder) - return splitVoptions -# automatically generated by the FlatBuffers compiler, do not modify - -# namespace: tflite + if self.boundaries is not None: + if np is not None and type(self.boundaries) is np.ndarray: + boundaries = builder.CreateNumpyVector(self.boundaries) + else: + BucketizeOptionsStartBoundariesVector(builder, len(self.boundaries)) + for i in reversed(range(len(self.boundaries))): + builder.PrependFloat32(self.boundaries[i]) + boundaries = builder.EndVector() + BucketizeOptionsStart(builder) + if self.boundaries is not None: + BucketizeOptionsAddBoundaries(builder, boundaries) + bucketizeOptions = BucketizeOptionsEnd(builder) + return bucketizeOptions -from flatbuffers.compat import import_numpy -np = import_numpy() -class SquareOptions(object): +class GeluOptions(object): __slots__ = ['_tab'] @classmethod def GetRootAs(cls, buf, offset=0): n = flatbuffers.encode.Get(flatbuffers.packer.uoffset, buf, offset) - x = SquareOptions() + x = GeluOptions() x.Init(buf, n + offset) return x @classmethod - def GetRootAsSquareOptions(cls, buf, offset=0): + def GetRootAsGeluOptions(cls, buf, offset=0): """This method is deprecated. Please switch to GetRootAs.""" return cls.GetRootAs(buf, offset) @classmethod - def SquareOptionsBufferHasIdentifier(cls, buf, offset, size_prefixed=False): + def GeluOptionsBufferHasIdentifier(cls, buf, offset, size_prefixed=False): return flatbuffers.util.BufferHasIdentifier(buf, offset, b"\x54\x46\x4C\x33", size_prefixed=size_prefixed) - # SquareOptions + # GeluOptions def Init(self, buf, pos): self._tab = flatbuffers.table.Table(buf, pos) -def SquareOptionsStart(builder): builder.StartObject(0) -def Start(builder): - return SquareOptionsStart(builder) -def SquareOptionsEnd(builder): return builder.EndObject() -def End(builder): - return SquareOptionsEnd(builder) + # GeluOptions + def Approximate(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(4)) + if o != 0: + return bool(self._tab.Get(flatbuffers.number_types.BoolFlags, o + self._tab.Pos)) + return False -class SquareOptionsT(object): +def GeluOptionsStart(builder): + builder.StartObject(1) - # SquareOptionsT +def GeluOptionsAddApproximate(builder, approximate): + builder.PrependBoolSlot(0, approximate, 0) + +def GeluOptionsEnd(builder): + return builder.EndObject() + + + +class GeluOptionsT(object): + + # GeluOptionsT def __init__(self): - pass + self.approximate = False # type: bool @classmethod def InitFromBuf(cls, buf, pos): - squareOptions = SquareOptions() - squareOptions.Init(buf, pos) - return cls.InitFromObj(squareOptions) + geluOptions = GeluOptions() + geluOptions.Init(buf, pos) + return cls.InitFromObj(geluOptions) + + @classmethod + def InitFromPackedBuf(cls, buf, pos=0): + n = flatbuffers.encode.Get(flatbuffers.packer.uoffset, buf, pos) + return cls.InitFromBuf(buf, pos+n) @classmethod - def InitFromObj(cls, squareOptions): - x = SquareOptionsT() - x._UnPack(squareOptions) + def InitFromObj(cls, geluOptions): + x = GeluOptionsT() + x._UnPack(geluOptions) return x - # SquareOptionsT - def _UnPack(self, squareOptions): - if squareOptions is None: + # GeluOptionsT + def _UnPack(self, geluOptions): + if geluOptions is None: return + self.approximate = geluOptions.Approximate() - # SquareOptionsT + # GeluOptionsT def Pack(self, builder): - SquareOptionsStart(builder) - squareOptions = SquareOptionsEnd(builder) - return squareOptions -# automatically generated by the FlatBuffers compiler, do not modify - -# namespace: tflite + GeluOptionsStart(builder) + GeluOptionsAddApproximate(builder, self.approximate) + geluOptions = GeluOptionsEnd(builder) + return geluOptions -from flatbuffers.compat import import_numpy -np = import_numpy() -class SquaredDifferenceOptions(object): +class DynamicUpdateSliceOptions(object): __slots__ = ['_tab'] @classmethod def GetRootAs(cls, buf, offset=0): n = flatbuffers.encode.Get(flatbuffers.packer.uoffset, buf, offset) - x = SquaredDifferenceOptions() + x = DynamicUpdateSliceOptions() x.Init(buf, n + offset) return x @classmethod - def GetRootAsSquaredDifferenceOptions(cls, buf, offset=0): + def GetRootAsDynamicUpdateSliceOptions(cls, buf, offset=0): """This method is deprecated. Please switch to GetRootAs.""" return cls.GetRootAs(buf, offset) @classmethod - def SquaredDifferenceOptionsBufferHasIdentifier(cls, buf, offset, size_prefixed=False): + def DynamicUpdateSliceOptionsBufferHasIdentifier(cls, buf, offset, size_prefixed=False): return flatbuffers.util.BufferHasIdentifier(buf, offset, b"\x54\x46\x4C\x33", size_prefixed=size_prefixed) - # SquaredDifferenceOptions + # DynamicUpdateSliceOptions def Init(self, buf, pos): self._tab = flatbuffers.table.Table(buf, pos) -def SquaredDifferenceOptionsStart(builder): builder.StartObject(0) -def Start(builder): - return SquaredDifferenceOptionsStart(builder) -def SquaredDifferenceOptionsEnd(builder): return builder.EndObject() -def End(builder): - return SquaredDifferenceOptionsEnd(builder) +def DynamicUpdateSliceOptionsStart(builder): + builder.StartObject(0) -class SquaredDifferenceOptionsT(object): +def DynamicUpdateSliceOptionsEnd(builder): + return builder.EndObject() - # SquaredDifferenceOptionsT + + +class DynamicUpdateSliceOptionsT(object): + + # DynamicUpdateSliceOptionsT def __init__(self): pass @classmethod def InitFromBuf(cls, buf, pos): - squaredDifferenceOptions = SquaredDifferenceOptions() - squaredDifferenceOptions.Init(buf, pos) - return cls.InitFromObj(squaredDifferenceOptions) + dynamicUpdateSliceOptions = DynamicUpdateSliceOptions() + dynamicUpdateSliceOptions.Init(buf, pos) + return cls.InitFromObj(dynamicUpdateSliceOptions) @classmethod - def InitFromObj(cls, squaredDifferenceOptions): - x = SquaredDifferenceOptionsT() - x._UnPack(squaredDifferenceOptions) + def InitFromPackedBuf(cls, buf, pos=0): + n = flatbuffers.encode.Get(flatbuffers.packer.uoffset, buf, pos) + return cls.InitFromBuf(buf, pos+n) + + @classmethod + def InitFromObj(cls, dynamicUpdateSliceOptions): + x = DynamicUpdateSliceOptionsT() + x._UnPack(dynamicUpdateSliceOptions) return x - # SquaredDifferenceOptionsT - def _UnPack(self, squaredDifferenceOptions): - if squaredDifferenceOptions is None: + # DynamicUpdateSliceOptionsT + def _UnPack(self, dynamicUpdateSliceOptions): + if dynamicUpdateSliceOptions is None: return - # SquaredDifferenceOptionsT + # DynamicUpdateSliceOptionsT def Pack(self, builder): - SquaredDifferenceOptionsStart(builder) - squaredDifferenceOptions = SquaredDifferenceOptionsEnd(builder) - return squaredDifferenceOptions -# automatically generated by the FlatBuffers compiler, do not modify - -# namespace: tflite + DynamicUpdateSliceOptionsStart(builder) + dynamicUpdateSliceOptions = DynamicUpdateSliceOptionsEnd(builder) + return dynamicUpdateSliceOptions -from flatbuffers.compat import import_numpy -np = import_numpy() -class SqueezeOptions(object): +class UnsortedSegmentProdOptions(object): __slots__ = ['_tab'] @classmethod def GetRootAs(cls, buf, offset=0): n = flatbuffers.encode.Get(flatbuffers.packer.uoffset, buf, offset) - x = SqueezeOptions() + x = UnsortedSegmentProdOptions() x.Init(buf, n + offset) return x @classmethod - def GetRootAsSqueezeOptions(cls, buf, offset=0): + def GetRootAsUnsortedSegmentProdOptions(cls, buf, offset=0): """This method is deprecated. Please switch to GetRootAs.""" return cls.GetRootAs(buf, offset) @classmethod - def SqueezeOptionsBufferHasIdentifier(cls, buf, offset, size_prefixed=False): + def UnsortedSegmentProdOptionsBufferHasIdentifier(cls, buf, offset, size_prefixed=False): return flatbuffers.util.BufferHasIdentifier(buf, offset, b"\x54\x46\x4C\x33", size_prefixed=size_prefixed) - # SqueezeOptions + # UnsortedSegmentProdOptions def Init(self, buf, pos): self._tab = flatbuffers.table.Table(buf, pos) - # SqueezeOptions - def SqueezeDims(self, j): - o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(4)) - if o != 0: - a = self._tab.Vector(o) - return self._tab.Get(flatbuffers.number_types.Int32Flags, a + flatbuffers.number_types.UOffsetTFlags.py_type(j * 4)) - return 0 - - # SqueezeOptions - def SqueezeDimsAsNumpy(self): - o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(4)) - if o != 0: - return self._tab.GetVectorAsNumpy(flatbuffers.number_types.Int32Flags, o) - return 0 +def UnsortedSegmentProdOptionsStart(builder): + builder.StartObject(0) - # SqueezeOptions - def SqueezeDimsLength(self): - o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(4)) - if o != 0: - return self._tab.VectorLen(o) - return 0 +def UnsortedSegmentProdOptionsEnd(builder): + return builder.EndObject() - # SqueezeOptions - def SqueezeDimsIsNone(self): - o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(4)) - return o == 0 -def SqueezeOptionsStart(builder): builder.StartObject(1) -def Start(builder): - return SqueezeOptionsStart(builder) -def SqueezeOptionsAddSqueezeDims(builder, squeezeDims): builder.PrependUOffsetTRelativeSlot(0, flatbuffers.number_types.UOffsetTFlags.py_type(squeezeDims), 0) -def AddSqueezeDims(builder, squeezeDims): - return SqueezeOptionsAddSqueezeDims(builder, squeezeDims) -def SqueezeOptionsStartSqueezeDimsVector(builder, numElems): return builder.StartVector(4, numElems, 4) -def StartSqueezeDimsVector(builder, numElems): - return SqueezeOptionsStartSqueezeDimsVector(builder, numElems) -def SqueezeOptionsEnd(builder): return builder.EndObject() -def End(builder): - return SqueezeOptionsEnd(builder) -try: - from typing import List -except: - pass -class SqueezeOptionsT(object): +class UnsortedSegmentProdOptionsT(object): - # SqueezeOptionsT + # UnsortedSegmentProdOptionsT def __init__(self): - self.squeezeDims = None # type: List[int] + pass @classmethod def InitFromBuf(cls, buf, pos): - squeezeOptions = SqueezeOptions() - squeezeOptions.Init(buf, pos) - return cls.InitFromObj(squeezeOptions) + unsortedSegmentProdOptions = UnsortedSegmentProdOptions() + unsortedSegmentProdOptions.Init(buf, pos) + return cls.InitFromObj(unsortedSegmentProdOptions) @classmethod - def InitFromObj(cls, squeezeOptions): - x = SqueezeOptionsT() - x._UnPack(squeezeOptions) + def InitFromPackedBuf(cls, buf, pos=0): + n = flatbuffers.encode.Get(flatbuffers.packer.uoffset, buf, pos) + return cls.InitFromBuf(buf, pos+n) + + @classmethod + def InitFromObj(cls, unsortedSegmentProdOptions): + x = UnsortedSegmentProdOptionsT() + x._UnPack(unsortedSegmentProdOptions) return x - # SqueezeOptionsT - def _UnPack(self, squeezeOptions): - if squeezeOptions is None: + # UnsortedSegmentProdOptionsT + def _UnPack(self, unsortedSegmentProdOptions): + if unsortedSegmentProdOptions is None: return - if not squeezeOptions.SqueezeDimsIsNone(): - if np is None: - self.squeezeDims = [] - for i in range(squeezeOptions.SqueezeDimsLength()): - self.squeezeDims.append(squeezeOptions.SqueezeDims(i)) - else: - self.squeezeDims = squeezeOptions.SqueezeDimsAsNumpy() - # SqueezeOptionsT + # UnsortedSegmentProdOptionsT def Pack(self, builder): - if self.squeezeDims is not None: - if np is not None and type(self.squeezeDims) is np.ndarray: - squeezeDims = builder.CreateNumpyVector(self.squeezeDims) - else: - SqueezeOptionsStartSqueezeDimsVector(builder, len(self.squeezeDims)) - for i in reversed(range(len(self.squeezeDims))): - builder.PrependInt32(self.squeezeDims[i]) - squeezeDims = builder.EndVector() - SqueezeOptionsStart(builder) - if self.squeezeDims is not None: - SqueezeOptionsAddSqueezeDims(builder, squeezeDims) - squeezeOptions = SqueezeOptionsEnd(builder) - return squeezeOptions -# automatically generated by the FlatBuffers compiler, do not modify - -# namespace: tflite + UnsortedSegmentProdOptionsStart(builder) + unsortedSegmentProdOptions = UnsortedSegmentProdOptionsEnd(builder) + return unsortedSegmentProdOptions -from flatbuffers.compat import import_numpy -np = import_numpy() -class StridedSliceOptions(object): +class UnsortedSegmentMaxOptions(object): __slots__ = ['_tab'] @classmethod def GetRootAs(cls, buf, offset=0): n = flatbuffers.encode.Get(flatbuffers.packer.uoffset, buf, offset) - x = StridedSliceOptions() + x = UnsortedSegmentMaxOptions() x.Init(buf, n + offset) return x @classmethod - def GetRootAsStridedSliceOptions(cls, buf, offset=0): + def GetRootAsUnsortedSegmentMaxOptions(cls, buf, offset=0): """This method is deprecated. Please switch to GetRootAs.""" return cls.GetRootAs(buf, offset) @classmethod - def StridedSliceOptionsBufferHasIdentifier(cls, buf, offset, size_prefixed=False): + def UnsortedSegmentMaxOptionsBufferHasIdentifier(cls, buf, offset, size_prefixed=False): return flatbuffers.util.BufferHasIdentifier(buf, offset, b"\x54\x46\x4C\x33", size_prefixed=size_prefixed) - # StridedSliceOptions + # UnsortedSegmentMaxOptions def Init(self, buf, pos): self._tab = flatbuffers.table.Table(buf, pos) - # StridedSliceOptions - def BeginMask(self): - o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(4)) - if o != 0: - return self._tab.Get(flatbuffers.number_types.Int32Flags, o + self._tab.Pos) - return 0 - - # StridedSliceOptions - def EndMask(self): - o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(6)) - if o != 0: - return self._tab.Get(flatbuffers.number_types.Int32Flags, o + self._tab.Pos) - return 0 - - # StridedSliceOptions - def EllipsisMask(self): - o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(8)) - if o != 0: - return self._tab.Get(flatbuffers.number_types.Int32Flags, o + self._tab.Pos) - return 0 - - # StridedSliceOptions - def NewAxisMask(self): - o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(10)) - if o != 0: - return self._tab.Get(flatbuffers.number_types.Int32Flags, o + self._tab.Pos) - return 0 +def UnsortedSegmentMaxOptionsStart(builder): + builder.StartObject(0) - # StridedSliceOptions - def ShrinkAxisMask(self): - o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(12)) - if o != 0: - return self._tab.Get(flatbuffers.number_types.Int32Flags, o + self._tab.Pos) - return 0 +def UnsortedSegmentMaxOptionsEnd(builder): + return builder.EndObject() - # StridedSliceOptions - def Offset(self): - o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(14)) - if o != 0: - return bool(self._tab.Get(flatbuffers.number_types.BoolFlags, o + self._tab.Pos)) - return False -def StridedSliceOptionsStart(builder): builder.StartObject(6) -def Start(builder): - return StridedSliceOptionsStart(builder) -def StridedSliceOptionsAddBeginMask(builder, beginMask): builder.PrependInt32Slot(0, beginMask, 0) -def AddBeginMask(builder, beginMask): - return StridedSliceOptionsAddBeginMask(builder, beginMask) -def StridedSliceOptionsAddEndMask(builder, endMask): builder.PrependInt32Slot(1, endMask, 0) -def AddEndMask(builder, endMask): - return StridedSliceOptionsAddEndMask(builder, endMask) -def StridedSliceOptionsAddEllipsisMask(builder, ellipsisMask): builder.PrependInt32Slot(2, ellipsisMask, 0) -def AddEllipsisMask(builder, ellipsisMask): - return StridedSliceOptionsAddEllipsisMask(builder, ellipsisMask) -def StridedSliceOptionsAddNewAxisMask(builder, newAxisMask): builder.PrependInt32Slot(3, newAxisMask, 0) -def AddNewAxisMask(builder, newAxisMask): - return StridedSliceOptionsAddNewAxisMask(builder, newAxisMask) -def StridedSliceOptionsAddShrinkAxisMask(builder, shrinkAxisMask): builder.PrependInt32Slot(4, shrinkAxisMask, 0) -def AddShrinkAxisMask(builder, shrinkAxisMask): - return StridedSliceOptionsAddShrinkAxisMask(builder, shrinkAxisMask) -def StridedSliceOptionsAddOffset(builder, offset): builder.PrependBoolSlot(5, offset, 0) -def AddOffset(builder, offset): - return StridedSliceOptionsAddOffset(builder, offset) -def StridedSliceOptionsEnd(builder): return builder.EndObject() -def End(builder): - return StridedSliceOptionsEnd(builder) -class StridedSliceOptionsT(object): +class UnsortedSegmentMaxOptionsT(object): - # StridedSliceOptionsT + # UnsortedSegmentMaxOptionsT def __init__(self): - self.beginMask = 0 # type: int - self.endMask = 0 # type: int - self.ellipsisMask = 0 # type: int - self.newAxisMask = 0 # type: int - self.shrinkAxisMask = 0 # type: int - self.offset = False # type: bool + pass @classmethod def InitFromBuf(cls, buf, pos): - stridedSliceOptions = StridedSliceOptions() - stridedSliceOptions.Init(buf, pos) - return cls.InitFromObj(stridedSliceOptions) + unsortedSegmentMaxOptions = UnsortedSegmentMaxOptions() + unsortedSegmentMaxOptions.Init(buf, pos) + return cls.InitFromObj(unsortedSegmentMaxOptions) @classmethod - def InitFromObj(cls, stridedSliceOptions): - x = StridedSliceOptionsT() - x._UnPack(stridedSliceOptions) + def InitFromPackedBuf(cls, buf, pos=0): + n = flatbuffers.encode.Get(flatbuffers.packer.uoffset, buf, pos) + return cls.InitFromBuf(buf, pos+n) + + @classmethod + def InitFromObj(cls, unsortedSegmentMaxOptions): + x = UnsortedSegmentMaxOptionsT() + x._UnPack(unsortedSegmentMaxOptions) return x - # StridedSliceOptionsT - def _UnPack(self, stridedSliceOptions): - if stridedSliceOptions is None: - return - self.beginMask = stridedSliceOptions.BeginMask() - self.endMask = stridedSliceOptions.EndMask() - self.ellipsisMask = stridedSliceOptions.EllipsisMask() - self.newAxisMask = stridedSliceOptions.NewAxisMask() - self.shrinkAxisMask = stridedSliceOptions.ShrinkAxisMask() - self.offset = stridedSliceOptions.Offset() + # UnsortedSegmentMaxOptionsT + def _UnPack(self, unsortedSegmentMaxOptions): + if unsortedSegmentMaxOptions is None: + return - # StridedSliceOptionsT + # UnsortedSegmentMaxOptionsT def Pack(self, builder): - StridedSliceOptionsStart(builder) - StridedSliceOptionsAddBeginMask(builder, self.beginMask) - StridedSliceOptionsAddEndMask(builder, self.endMask) - StridedSliceOptionsAddEllipsisMask(builder, self.ellipsisMask) - StridedSliceOptionsAddNewAxisMask(builder, self.newAxisMask) - StridedSliceOptionsAddShrinkAxisMask(builder, self.shrinkAxisMask) - StridedSliceOptionsAddOffset(builder, self.offset) - stridedSliceOptions = StridedSliceOptionsEnd(builder) - return stridedSliceOptions -# automatically generated by the FlatBuffers compiler, do not modify - -# namespace: tflite + UnsortedSegmentMaxOptionsStart(builder) + unsortedSegmentMaxOptions = UnsortedSegmentMaxOptionsEnd(builder) + return unsortedSegmentMaxOptions -from flatbuffers.compat import import_numpy -np = import_numpy() -class SubGraph(object): +class UnsortedSegmentSumOptions(object): __slots__ = ['_tab'] @classmethod def GetRootAs(cls, buf, offset=0): n = flatbuffers.encode.Get(flatbuffers.packer.uoffset, buf, offset) - x = SubGraph() + x = UnsortedSegmentSumOptions() x.Init(buf, n + offset) return x @classmethod - def GetRootAsSubGraph(cls, buf, offset=0): + def GetRootAsUnsortedSegmentSumOptions(cls, buf, offset=0): """This method is deprecated. Please switch to GetRootAs.""" return cls.GetRootAs(buf, offset) @classmethod - def SubGraphBufferHasIdentifier(cls, buf, offset, size_prefixed=False): + def UnsortedSegmentSumOptionsBufferHasIdentifier(cls, buf, offset, size_prefixed=False): return flatbuffers.util.BufferHasIdentifier(buf, offset, b"\x54\x46\x4C\x33", size_prefixed=size_prefixed) - # SubGraph + # UnsortedSegmentSumOptions def Init(self, buf, pos): self._tab = flatbuffers.table.Table(buf, pos) - # SubGraph - def Tensors(self, j): - o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(4)) - if o != 0: - x = self._tab.Vector(o) - x += flatbuffers.number_types.UOffsetTFlags.py_type(j) * 4 - x = self._tab.Indirect(x) - obj = Tensor() - obj.Init(self._tab.Bytes, x) - return obj - return None - - # SubGraph - def TensorsLength(self): - o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(4)) - if o != 0: - return self._tab.VectorLen(o) - return 0 - - # SubGraph - def TensorsIsNone(self): - o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(4)) - return o == 0 - - # SubGraph - def Inputs(self, j): - o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(6)) - if o != 0: - a = self._tab.Vector(o) - return self._tab.Get(flatbuffers.number_types.Int32Flags, a + flatbuffers.number_types.UOffsetTFlags.py_type(j * 4)) - return 0 - - # SubGraph - def InputsAsNumpy(self): - o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(6)) - if o != 0: - return self._tab.GetVectorAsNumpy(flatbuffers.number_types.Int32Flags, o) - return 0 - - # SubGraph - def InputsLength(self): - o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(6)) - if o != 0: - return self._tab.VectorLen(o) - return 0 - - # SubGraph - def InputsIsNone(self): - o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(6)) - return o == 0 - - # SubGraph - def Outputs(self, j): - o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(8)) - if o != 0: - a = self._tab.Vector(o) - return self._tab.Get(flatbuffers.number_types.Int32Flags, a + flatbuffers.number_types.UOffsetTFlags.py_type(j * 4)) - return 0 - - # SubGraph - def OutputsAsNumpy(self): - o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(8)) - if o != 0: - return self._tab.GetVectorAsNumpy(flatbuffers.number_types.Int32Flags, o) - return 0 - - # SubGraph - def OutputsLength(self): - o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(8)) - if o != 0: - return self._tab.VectorLen(o) - return 0 - - # SubGraph - def OutputsIsNone(self): - o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(8)) - return o == 0 - - # SubGraph - def Operators(self, j): - o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(10)) - if o != 0: - x = self._tab.Vector(o) - x += flatbuffers.number_types.UOffsetTFlags.py_type(j) * 4 - x = self._tab.Indirect(x) - obj = Operator() - obj.Init(self._tab.Bytes, x) - return obj - return None - - # SubGraph - def OperatorsLength(self): - o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(10)) - if o != 0: - return self._tab.VectorLen(o) - return 0 +def UnsortedSegmentSumOptionsStart(builder): + builder.StartObject(0) - # SubGraph - def OperatorsIsNone(self): - o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(10)) - return o == 0 +def UnsortedSegmentSumOptionsEnd(builder): + return builder.EndObject() - # SubGraph - def Name(self): - o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(12)) - if o != 0: - return self._tab.String(o + self._tab.Pos) - return None -def SubGraphStart(builder): builder.StartObject(5) -def Start(builder): - return SubGraphStart(builder) -def SubGraphAddTensors(builder, tensors): builder.PrependUOffsetTRelativeSlot(0, flatbuffers.number_types.UOffsetTFlags.py_type(tensors), 0) -def AddTensors(builder, tensors): - return SubGraphAddTensors(builder, tensors) -def SubGraphStartTensorsVector(builder, numElems): return builder.StartVector(4, numElems, 4) -def StartTensorsVector(builder, numElems): - return SubGraphStartTensorsVector(builder, numElems) -def SubGraphAddInputs(builder, inputs): builder.PrependUOffsetTRelativeSlot(1, flatbuffers.number_types.UOffsetTFlags.py_type(inputs), 0) -def AddInputs(builder, inputs): - return SubGraphAddInputs(builder, inputs) -def SubGraphStartInputsVector(builder, numElems): return builder.StartVector(4, numElems, 4) -def StartInputsVector(builder, numElems): - return SubGraphStartInputsVector(builder, numElems) -def SubGraphAddOutputs(builder, outputs): builder.PrependUOffsetTRelativeSlot(2, flatbuffers.number_types.UOffsetTFlags.py_type(outputs), 0) -def AddOutputs(builder, outputs): - return SubGraphAddOutputs(builder, outputs) -def SubGraphStartOutputsVector(builder, numElems): return builder.StartVector(4, numElems, 4) -def StartOutputsVector(builder, numElems): - return SubGraphStartOutputsVector(builder, numElems) -def SubGraphAddOperators(builder, operators): builder.PrependUOffsetTRelativeSlot(3, flatbuffers.number_types.UOffsetTFlags.py_type(operators), 0) -def AddOperators(builder, operators): - return SubGraphAddOperators(builder, operators) -def SubGraphStartOperatorsVector(builder, numElems): return builder.StartVector(4, numElems, 4) -def StartOperatorsVector(builder, numElems): - return SubGraphStartOperatorsVector(builder, numElems) -def SubGraphAddName(builder, name): builder.PrependUOffsetTRelativeSlot(4, flatbuffers.number_types.UOffsetTFlags.py_type(name), 0) -def AddName(builder, name): - return SubGraphAddName(builder, name) -def SubGraphEnd(builder): return builder.EndObject() -def End(builder): - return SubGraphEnd(builder) -try: - from typing import List -except: - pass -class SubGraphT(object): +class UnsortedSegmentSumOptionsT(object): - # SubGraphT + # UnsortedSegmentSumOptionsT def __init__(self): - self.tensors = None # type: List[TensorT] - self.inputs = None # type: List[int] - self.outputs = None # type: List[int] - self.operators = None # type: List[OperatorT] - self.name = None # type: str + pass @classmethod def InitFromBuf(cls, buf, pos): - subGraph = SubGraph() - subGraph.Init(buf, pos) - return cls.InitFromObj(subGraph) + unsortedSegmentSumOptions = UnsortedSegmentSumOptions() + unsortedSegmentSumOptions.Init(buf, pos) + return cls.InitFromObj(unsortedSegmentSumOptions) @classmethod - def InitFromObj(cls, subGraph): - x = SubGraphT() - x._UnPack(subGraph) - return x + def InitFromPackedBuf(cls, buf, pos=0): + n = flatbuffers.encode.Get(flatbuffers.packer.uoffset, buf, pos) + return cls.InitFromBuf(buf, pos+n) - # SubGraphT - def _UnPack(self, subGraph): - if subGraph is None: - return - if not subGraph.TensorsIsNone(): - self.tensors = [] - for i in range(subGraph.TensorsLength()): - if subGraph.Tensors(i) is None: - self.tensors.append(None) - else: - tensor_ = TensorT.InitFromObj(subGraph.Tensors(i)) - self.tensors.append(tensor_) - if not subGraph.InputsIsNone(): - if np is None: - self.inputs = [] - for i in range(subGraph.InputsLength()): - self.inputs.append(subGraph.Inputs(i)) - else: - self.inputs = subGraph.InputsAsNumpy() - if not subGraph.OutputsIsNone(): - if np is None: - self.outputs = [] - for i in range(subGraph.OutputsLength()): - self.outputs.append(subGraph.Outputs(i)) - else: - self.outputs = subGraph.OutputsAsNumpy() - if not subGraph.OperatorsIsNone(): - self.operators = [] - for i in range(subGraph.OperatorsLength()): - if subGraph.Operators(i) is None: - self.operators.append(None) - else: - operator_ = OperatorT.InitFromObj(subGraph.Operators(i)) - self.operators.append(operator_) - self.name = subGraph.Name() + @classmethod + def InitFromObj(cls, unsortedSegmentSumOptions): + x = UnsortedSegmentSumOptionsT() + x._UnPack(unsortedSegmentSumOptions) + return x - # SubGraphT - def Pack(self, builder): - if self.tensors is not None: - tensorslist = [] - for i in range(len(self.tensors)): - tensorslist.append(self.tensors[i].Pack(builder)) - SubGraphStartTensorsVector(builder, len(self.tensors)) - for i in reversed(range(len(self.tensors))): - builder.PrependUOffsetTRelative(tensorslist[i]) - tensors = builder.EndVector() - if self.inputs is not None: - if np is not None and type(self.inputs) is np.ndarray: - inputs = builder.CreateNumpyVector(self.inputs) - else: - SubGraphStartInputsVector(builder, len(self.inputs)) - for i in reversed(range(len(self.inputs))): - builder.PrependInt32(self.inputs[i]) - inputs = builder.EndVector() - if self.outputs is not None: - if np is not None and type(self.outputs) is np.ndarray: - outputs = builder.CreateNumpyVector(self.outputs) - else: - SubGraphStartOutputsVector(builder, len(self.outputs)) - for i in reversed(range(len(self.outputs))): - builder.PrependInt32(self.outputs[i]) - outputs = builder.EndVector() - if self.operators is not None: - operatorslist = [] - for i in range(len(self.operators)): - operatorslist.append(self.operators[i].Pack(builder)) - SubGraphStartOperatorsVector(builder, len(self.operators)) - for i in reversed(range(len(self.operators))): - builder.PrependUOffsetTRelative(operatorslist[i]) - operators = builder.EndVector() - if self.name is not None: - name = builder.CreateString(self.name) - SubGraphStart(builder) - if self.tensors is not None: - SubGraphAddTensors(builder, tensors) - if self.inputs is not None: - SubGraphAddInputs(builder, inputs) - if self.outputs is not None: - SubGraphAddOutputs(builder, outputs) - if self.operators is not None: - SubGraphAddOperators(builder, operators) - if self.name is not None: - SubGraphAddName(builder, name) - subGraph = SubGraphEnd(builder) - return subGraph -# automatically generated by the FlatBuffers compiler, do not modify + # UnsortedSegmentSumOptionsT + def _UnPack(self, unsortedSegmentSumOptions): + if unsortedSegmentSumOptions is None: + return -# namespace: tflite + # UnsortedSegmentSumOptionsT + def Pack(self, builder): + UnsortedSegmentSumOptionsStart(builder) + unsortedSegmentSumOptions = UnsortedSegmentSumOptionsEnd(builder) + return unsortedSegmentSumOptions -from flatbuffers.compat import import_numpy -np = import_numpy() -class SubOptions(object): +class ATan2Options(object): __slots__ = ['_tab'] @classmethod def GetRootAs(cls, buf, offset=0): n = flatbuffers.encode.Get(flatbuffers.packer.uoffset, buf, offset) - x = SubOptions() + x = ATan2Options() x.Init(buf, n + offset) return x @classmethod - def GetRootAsSubOptions(cls, buf, offset=0): + def GetRootAsATan2Options(cls, buf, offset=0): """This method is deprecated. Please switch to GetRootAs.""" return cls.GetRootAs(buf, offset) @classmethod - def SubOptionsBufferHasIdentifier(cls, buf, offset, size_prefixed=False): + def ATan2OptionsBufferHasIdentifier(cls, buf, offset, size_prefixed=False): return flatbuffers.util.BufferHasIdentifier(buf, offset, b"\x54\x46\x4C\x33", size_prefixed=size_prefixed) - # SubOptions + # ATan2Options def Init(self, buf, pos): self._tab = flatbuffers.table.Table(buf, pos) - # SubOptions - def FusedActivationFunction(self): - o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(4)) - if o != 0: - return self._tab.Get(flatbuffers.number_types.Int8Flags, o + self._tab.Pos) - return 0 +def ATan2OptionsStart(builder): + builder.StartObject(0) - # SubOptions - def PotScaleInt16(self): - o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(6)) - if o != 0: - return bool(self._tab.Get(flatbuffers.number_types.BoolFlags, o + self._tab.Pos)) - return True +def ATan2OptionsEnd(builder): + return builder.EndObject() -def SubOptionsStart(builder): builder.StartObject(2) -def Start(builder): - return SubOptionsStart(builder) -def SubOptionsAddFusedActivationFunction(builder, fusedActivationFunction): builder.PrependInt8Slot(0, fusedActivationFunction, 0) -def AddFusedActivationFunction(builder, fusedActivationFunction): - return SubOptionsAddFusedActivationFunction(builder, fusedActivationFunction) -def SubOptionsAddPotScaleInt16(builder, potScaleInt16): builder.PrependBoolSlot(1, potScaleInt16, 1) -def AddPotScaleInt16(builder, potScaleInt16): - return SubOptionsAddPotScaleInt16(builder, potScaleInt16) -def SubOptionsEnd(builder): return builder.EndObject() -def End(builder): - return SubOptionsEnd(builder) -class SubOptionsT(object): - # SubOptionsT +class ATan2OptionsT(object): + + # ATan2OptionsT def __init__(self): - self.fusedActivationFunction = 0 # type: int - self.potScaleInt16 = True # type: bool + pass @classmethod def InitFromBuf(cls, buf, pos): - subOptions = SubOptions() - subOptions.Init(buf, pos) - return cls.InitFromObj(subOptions) + atan2Options = ATan2Options() + atan2Options.Init(buf, pos) + return cls.InitFromObj(atan2Options) @classmethod - def InitFromObj(cls, subOptions): - x = SubOptionsT() - x._UnPack(subOptions) + def InitFromPackedBuf(cls, buf, pos=0): + n = flatbuffers.encode.Get(flatbuffers.packer.uoffset, buf, pos) + return cls.InitFromBuf(buf, pos+n) + + @classmethod + def InitFromObj(cls, atan2Options): + x = ATan2OptionsT() + x._UnPack(atan2Options) return x - # SubOptionsT - def _UnPack(self, subOptions): - if subOptions is None: + # ATan2OptionsT + def _UnPack(self, atan2Options): + if atan2Options is None: return - self.fusedActivationFunction = subOptions.FusedActivationFunction() - self.potScaleInt16 = subOptions.PotScaleInt16() - # SubOptionsT + # ATan2OptionsT def Pack(self, builder): - SubOptionsStart(builder) - SubOptionsAddFusedActivationFunction(builder, self.fusedActivationFunction) - SubOptionsAddPotScaleInt16(builder, self.potScaleInt16) - subOptions = SubOptionsEnd(builder) - return subOptions -# automatically generated by the FlatBuffers compiler, do not modify - -# namespace: tflite + ATan2OptionsStart(builder) + atan2Options = ATan2OptionsEnd(builder) + return atan2Options -from flatbuffers.compat import import_numpy -np = import_numpy() -class Tensor(object): +class UnsortedSegmentMinOptions(object): __slots__ = ['_tab'] @classmethod def GetRootAs(cls, buf, offset=0): n = flatbuffers.encode.Get(flatbuffers.packer.uoffset, buf, offset) - x = Tensor() + x = UnsortedSegmentMinOptions() x.Init(buf, n + offset) return x @classmethod - def GetRootAsTensor(cls, buf, offset=0): + def GetRootAsUnsortedSegmentMinOptions(cls, buf, offset=0): """This method is deprecated. Please switch to GetRootAs.""" return cls.GetRootAs(buf, offset) @classmethod - def TensorBufferHasIdentifier(cls, buf, offset, size_prefixed=False): + def UnsortedSegmentMinOptionsBufferHasIdentifier(cls, buf, offset, size_prefixed=False): return flatbuffers.util.BufferHasIdentifier(buf, offset, b"\x54\x46\x4C\x33", size_prefixed=size_prefixed) - # Tensor + # UnsortedSegmentMinOptions def Init(self, buf, pos): self._tab = flatbuffers.table.Table(buf, pos) - # Tensor - def Shape(self, j): - o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(4)) - if o != 0: - a = self._tab.Vector(o) - return self._tab.Get(flatbuffers.number_types.Int32Flags, a + flatbuffers.number_types.UOffsetTFlags.py_type(j * 4)) - return 0 - - # Tensor - def ShapeAsNumpy(self): - o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(4)) - if o != 0: - return self._tab.GetVectorAsNumpy(flatbuffers.number_types.Int32Flags, o) - return 0 - - # Tensor - def ShapeLength(self): - o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(4)) - if o != 0: - return self._tab.VectorLen(o) - return 0 - - # Tensor - def ShapeIsNone(self): - o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(4)) - return o == 0 - - # Tensor - def Type(self): - o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(6)) - if o != 0: - return self._tab.Get(flatbuffers.number_types.Int8Flags, o + self._tab.Pos) - return 0 - - # Tensor - def Buffer(self): - o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(8)) - if o != 0: - return self._tab.Get(flatbuffers.number_types.Uint32Flags, o + self._tab.Pos) - return 0 +def UnsortedSegmentMinOptionsStart(builder): + builder.StartObject(0) - # Tensor - def Name(self): - o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(10)) - if o != 0: - return self._tab.String(o + self._tab.Pos) - return None +def UnsortedSegmentMinOptionsEnd(builder): + return builder.EndObject() - # Tensor - def Quantization(self): - o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(12)) - if o != 0: - x = self._tab.Indirect(o + self._tab.Pos) - obj = QuantizationParameters() - obj.Init(self._tab.Bytes, x) - return obj - return None - # Tensor - def IsVariable(self): - o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(14)) - if o != 0: - return bool(self._tab.Get(flatbuffers.number_types.BoolFlags, o + self._tab.Pos)) - return False - # Tensor - def Sparsity(self): - o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(16)) - if o != 0: - x = self._tab.Indirect(o + self._tab.Pos) - obj = SparsityParameters() - obj.Init(self._tab.Bytes, x) - return obj - return None +class UnsortedSegmentMinOptionsT(object): - # Tensor - def ShapeSignature(self, j): - o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(18)) - if o != 0: - a = self._tab.Vector(o) - return self._tab.Get(flatbuffers.number_types.Int32Flags, a + flatbuffers.number_types.UOffsetTFlags.py_type(j * 4)) - return 0 + # UnsortedSegmentMinOptionsT + def __init__(self): + pass - # Tensor - def ShapeSignatureAsNumpy(self): - o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(18)) - if o != 0: - return self._tab.GetVectorAsNumpy(flatbuffers.number_types.Int32Flags, o) - return 0 + @classmethod + def InitFromBuf(cls, buf, pos): + unsortedSegmentMinOptions = UnsortedSegmentMinOptions() + unsortedSegmentMinOptions.Init(buf, pos) + return cls.InitFromObj(unsortedSegmentMinOptions) - # Tensor - def ShapeSignatureLength(self): - o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(18)) - if o != 0: - return self._tab.VectorLen(o) - return 0 + @classmethod + def InitFromPackedBuf(cls, buf, pos=0): + n = flatbuffers.encode.Get(flatbuffers.packer.uoffset, buf, pos) + return cls.InitFromBuf(buf, pos+n) - # Tensor - def ShapeSignatureIsNone(self): - o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(18)) - return o == 0 + @classmethod + def InitFromObj(cls, unsortedSegmentMinOptions): + x = UnsortedSegmentMinOptionsT() + x._UnPack(unsortedSegmentMinOptions) + return x - # Tensor - def HasRank(self): - o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(20)) - if o != 0: - return bool(self._tab.Get(flatbuffers.number_types.BoolFlags, o + self._tab.Pos)) - return False + # UnsortedSegmentMinOptionsT + def _UnPack(self, unsortedSegmentMinOptions): + if unsortedSegmentMinOptions is None: + return - # Tensor - def VariantTensors(self, j): - o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(22)) - if o != 0: - x = self._tab.Vector(o) - x += flatbuffers.number_types.UOffsetTFlags.py_type(j) * 4 - x = self._tab.Indirect(x) - obj = VariantSubType() - obj.Init(self._tab.Bytes, x) - return obj - return None + # UnsortedSegmentMinOptionsT + def Pack(self, builder): + UnsortedSegmentMinOptionsStart(builder) + unsortedSegmentMinOptions = UnsortedSegmentMinOptionsEnd(builder) + return unsortedSegmentMinOptions - # Tensor - def VariantTensorsLength(self): - o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(22)) - if o != 0: - return self._tab.VectorLen(o) - return 0 - # Tensor - def VariantTensorsIsNone(self): - o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(22)) - return o == 0 +class SignOptions(object): + __slots__ = ['_tab'] -def TensorStart(builder): builder.StartObject(10) -def Start(builder): - return TensorStart(builder) -def TensorAddShape(builder, shape): builder.PrependUOffsetTRelativeSlot(0, flatbuffers.number_types.UOffsetTFlags.py_type(shape), 0) -def AddShape(builder, shape): - return TensorAddShape(builder, shape) -def TensorStartShapeVector(builder, numElems): return builder.StartVector(4, numElems, 4) -def StartShapeVector(builder, numElems): - return TensorStartShapeVector(builder, numElems) -def TensorAddType(builder, type): builder.PrependInt8Slot(1, type, 0) -def AddType(builder, type): - return TensorAddType(builder, type) -def TensorAddBuffer(builder, buffer): builder.PrependUint32Slot(2, buffer, 0) -def AddBuffer(builder, buffer): - return TensorAddBuffer(builder, buffer) -def TensorAddName(builder, name): builder.PrependUOffsetTRelativeSlot(3, flatbuffers.number_types.UOffsetTFlags.py_type(name), 0) -def AddName(builder, name): - return TensorAddName(builder, name) -def TensorAddQuantization(builder, quantization): builder.PrependUOffsetTRelativeSlot(4, flatbuffers.number_types.UOffsetTFlags.py_type(quantization), 0) -def AddQuantization(builder, quantization): - return TensorAddQuantization(builder, quantization) -def TensorAddIsVariable(builder, isVariable): builder.PrependBoolSlot(5, isVariable, 0) -def AddIsVariable(builder, isVariable): - return TensorAddIsVariable(builder, isVariable) -def TensorAddSparsity(builder, sparsity): builder.PrependUOffsetTRelativeSlot(6, flatbuffers.number_types.UOffsetTFlags.py_type(sparsity), 0) -def AddSparsity(builder, sparsity): - return TensorAddSparsity(builder, sparsity) -def TensorAddShapeSignature(builder, shapeSignature): builder.PrependUOffsetTRelativeSlot(7, flatbuffers.number_types.UOffsetTFlags.py_type(shapeSignature), 0) -def AddShapeSignature(builder, shapeSignature): - return TensorAddShapeSignature(builder, shapeSignature) -def TensorStartShapeSignatureVector(builder, numElems): return builder.StartVector(4, numElems, 4) -def StartShapeSignatureVector(builder, numElems): - return TensorStartShapeSignatureVector(builder, numElems) -def TensorAddHasRank(builder, hasRank): builder.PrependBoolSlot(8, hasRank, 0) -def AddHasRank(builder, hasRank): - return TensorAddHasRank(builder, hasRank) -def TensorAddVariantTensors(builder, variantTensors): builder.PrependUOffsetTRelativeSlot(9, flatbuffers.number_types.UOffsetTFlags.py_type(variantTensors), 0) -def AddVariantTensors(builder, variantTensors): - return TensorAddVariantTensors(builder, variantTensors) -def TensorStartVariantTensorsVector(builder, numElems): return builder.StartVector(4, numElems, 4) -def StartVariantTensorsVector(builder, numElems): - return TensorStartVariantTensorsVector(builder, numElems) -def TensorEnd(builder): return builder.EndObject() -def End(builder): - return TensorEnd(builder) -try: - from typing import List, Optional -except: - pass + @classmethod + def GetRootAs(cls, buf, offset=0): + n = flatbuffers.encode.Get(flatbuffers.packer.uoffset, buf, offset) + x = SignOptions() + x.Init(buf, n + offset) + return x -class TensorT(object): + @classmethod + def GetRootAsSignOptions(cls, buf, offset=0): + """This method is deprecated. Please switch to GetRootAs.""" + return cls.GetRootAs(buf, offset) + @classmethod + def SignOptionsBufferHasIdentifier(cls, buf, offset, size_prefixed=False): + return flatbuffers.util.BufferHasIdentifier(buf, offset, b"\x54\x46\x4C\x33", size_prefixed=size_prefixed) - # TensorT - def __init__(self): - self.shape = None # type: List[int] - self.type = 0 # type: int - self.buffer = 0 # type: int - self.name = None # type: str - self.quantization = None # type: Optional[QuantizationParametersT] - self.isVariable = False # type: bool - self.sparsity = None # type: Optional[SparsityParametersT] - self.shapeSignature = None # type: List[int] - self.hasRank = False # type: bool - self.variantTensors = None # type: List[VariantSubTypeT] + # SignOptions + def Init(self, buf, pos): + self._tab = flatbuffers.table.Table(buf, pos) + +def SignOptionsStart(builder): + builder.StartObject(0) + +def SignOptionsEnd(builder): + return builder.EndObject() + + + +class SignOptionsT(object): + + # SignOptionsT + def __init__(self): + pass @classmethod def InitFromBuf(cls, buf, pos): - tensor = Tensor() - tensor.Init(buf, pos) - return cls.InitFromObj(tensor) + signOptions = SignOptions() + signOptions.Init(buf, pos) + return cls.InitFromObj(signOptions) @classmethod - def InitFromObj(cls, tensor): - x = TensorT() - x._UnPack(tensor) + def InitFromPackedBuf(cls, buf, pos=0): + n = flatbuffers.encode.Get(flatbuffers.packer.uoffset, buf, pos) + return cls.InitFromBuf(buf, pos+n) + + @classmethod + def InitFromObj(cls, signOptions): + x = SignOptionsT() + x._UnPack(signOptions) return x - # TensorT - def _UnPack(self, tensor): - if tensor is None: + # SignOptionsT + def _UnPack(self, signOptions): + if signOptions is None: return - if not tensor.ShapeIsNone(): - if np is None: - self.shape = [] - for i in range(tensor.ShapeLength()): - self.shape.append(tensor.Shape(i)) - else: - self.shape = tensor.ShapeAsNumpy() - self.type = tensor.Type() - self.buffer = tensor.Buffer() - self.name = tensor.Name() - if tensor.Quantization() is not None: - self.quantization = QuantizationParametersT.InitFromObj(tensor.Quantization()) - self.isVariable = tensor.IsVariable() - if tensor.Sparsity() is not None: - self.sparsity = SparsityParametersT.InitFromObj(tensor.Sparsity()) - if not tensor.ShapeSignatureIsNone(): - if np is None: - self.shapeSignature = [] - for i in range(tensor.ShapeSignatureLength()): - self.shapeSignature.append(tensor.ShapeSignature(i)) - else: - self.shapeSignature = tensor.ShapeSignatureAsNumpy() - self.hasRank = tensor.HasRank() - if not tensor.VariantTensorsIsNone(): - self.variantTensors = [] - for i in range(tensor.VariantTensorsLength()): - if tensor.VariantTensors(i) is None: - self.variantTensors.append(None) - else: - variantSubType_ = VariantSubTypeT.InitFromObj(tensor.VariantTensors(i)) - self.variantTensors.append(variantSubType_) - # TensorT + # SignOptionsT def Pack(self, builder): - if self.shape is not None: - if np is not None and type(self.shape) is np.ndarray: - shape = builder.CreateNumpyVector(self.shape) - else: - TensorStartShapeVector(builder, len(self.shape)) - for i in reversed(range(len(self.shape))): - builder.PrependInt32(self.shape[i]) - shape = builder.EndVector() - if self.name is not None: - name = builder.CreateString(self.name) - if self.quantization is not None: - quantization = self.quantization.Pack(builder) - if self.sparsity is not None: - sparsity = self.sparsity.Pack(builder) - if self.shapeSignature is not None: - if np is not None and type(self.shapeSignature) is np.ndarray: - shapeSignature = builder.CreateNumpyVector(self.shapeSignature) - else: - TensorStartShapeSignatureVector(builder, len(self.shapeSignature)) - for i in reversed(range(len(self.shapeSignature))): - builder.PrependInt32(self.shapeSignature[i]) - shapeSignature = builder.EndVector() - if self.variantTensors is not None: - variantTensorslist = [] - for i in range(len(self.variantTensors)): - variantTensorslist.append(self.variantTensors[i].Pack(builder)) - TensorStartVariantTensorsVector(builder, len(self.variantTensors)) - for i in reversed(range(len(self.variantTensors))): - builder.PrependUOffsetTRelative(variantTensorslist[i]) - variantTensors = builder.EndVector() - TensorStart(builder) - if self.shape is not None: - TensorAddShape(builder, shape) - TensorAddType(builder, self.type) - TensorAddBuffer(builder, self.buffer) - if self.name is not None: - TensorAddName(builder, name) - if self.quantization is not None: - TensorAddQuantization(builder, quantization) - TensorAddIsVariable(builder, self.isVariable) - if self.sparsity is not None: - TensorAddSparsity(builder, sparsity) - if self.shapeSignature is not None: - TensorAddShapeSignature(builder, shapeSignature) - TensorAddHasRank(builder, self.hasRank) - if self.variantTensors is not None: - TensorAddVariantTensors(builder, variantTensors) - tensor = TensorEnd(builder) - return tensor -# automatically generated by the FlatBuffers compiler, do not modify - -# namespace: tflite + SignOptionsStart(builder) + signOptions = SignOptionsEnd(builder) + return signOptions -from flatbuffers.compat import import_numpy -np = import_numpy() -class TensorMap(object): +class BitcastOptions(object): __slots__ = ['_tab'] @classmethod def GetRootAs(cls, buf, offset=0): n = flatbuffers.encode.Get(flatbuffers.packer.uoffset, buf, offset) - x = TensorMap() + x = BitcastOptions() x.Init(buf, n + offset) return x @classmethod - def GetRootAsTensorMap(cls, buf, offset=0): + def GetRootAsBitcastOptions(cls, buf, offset=0): """This method is deprecated. Please switch to GetRootAs.""" return cls.GetRootAs(buf, offset) @classmethod - def TensorMapBufferHasIdentifier(cls, buf, offset, size_prefixed=False): + def BitcastOptionsBufferHasIdentifier(cls, buf, offset, size_prefixed=False): return flatbuffers.util.BufferHasIdentifier(buf, offset, b"\x54\x46\x4C\x33", size_prefixed=size_prefixed) - # TensorMap + # BitcastOptions def Init(self, buf, pos): self._tab = flatbuffers.table.Table(buf, pos) - # TensorMap - def Name(self): - o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(4)) - if o != 0: - return self._tab.String(o + self._tab.Pos) - return None +def BitcastOptionsStart(builder): + builder.StartObject(0) - # TensorMap - def TensorIndex(self): - o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(6)) - if o != 0: - return self._tab.Get(flatbuffers.number_types.Uint32Flags, o + self._tab.Pos) - return 0 +def BitcastOptionsEnd(builder): + return builder.EndObject() -def TensorMapStart(builder): builder.StartObject(2) -def Start(builder): - return TensorMapStart(builder) -def TensorMapAddName(builder, name): builder.PrependUOffsetTRelativeSlot(0, flatbuffers.number_types.UOffsetTFlags.py_type(name), 0) -def AddName(builder, name): - return TensorMapAddName(builder, name) -def TensorMapAddTensorIndex(builder, tensorIndex): builder.PrependUint32Slot(1, tensorIndex, 0) -def AddTensorIndex(builder, tensorIndex): - return TensorMapAddTensorIndex(builder, tensorIndex) -def TensorMapEnd(builder): return builder.EndObject() -def End(builder): - return TensorMapEnd(builder) -class TensorMapT(object): - # TensorMapT +class BitcastOptionsT(object): + + # BitcastOptionsT def __init__(self): - self.name = None # type: str - self.tensorIndex = 0 # type: int + pass @classmethod def InitFromBuf(cls, buf, pos): - tensorMap = TensorMap() - tensorMap.Init(buf, pos) - return cls.InitFromObj(tensorMap) + bitcastOptions = BitcastOptions() + bitcastOptions.Init(buf, pos) + return cls.InitFromObj(bitcastOptions) @classmethod - def InitFromObj(cls, tensorMap): - x = TensorMapT() - x._UnPack(tensorMap) + def InitFromPackedBuf(cls, buf, pos=0): + n = flatbuffers.encode.Get(flatbuffers.packer.uoffset, buf, pos) + return cls.InitFromBuf(buf, pos+n) + + @classmethod + def InitFromObj(cls, bitcastOptions): + x = BitcastOptionsT() + x._UnPack(bitcastOptions) return x - # TensorMapT - def _UnPack(self, tensorMap): - if tensorMap is None: + # BitcastOptionsT + def _UnPack(self, bitcastOptions): + if bitcastOptions is None: return - self.name = tensorMap.Name() - self.tensorIndex = tensorMap.TensorIndex() - # TensorMapT + # BitcastOptionsT def Pack(self, builder): - if self.name is not None: - name = builder.CreateString(self.name) - TensorMapStart(builder) - if self.name is not None: - TensorMapAddName(builder, name) - TensorMapAddTensorIndex(builder, self.tensorIndex) - tensorMap = TensorMapEnd(builder) - return tensorMap -# automatically generated by the FlatBuffers compiler, do not modify - -# namespace: tflite - -class TensorType(object): - FLOAT32 = 0 - FLOAT16 = 1 - INT32 = 2 - UINT8 = 3 - INT64 = 4 - STRING = 5 - BOOL = 6 - INT16 = 7 - COMPLEX64 = 8 - INT8 = 9 - FLOAT64 = 10 - COMPLEX128 = 11 - UINT64 = 12 - RESOURCE = 13 - VARIANT = 14 - UINT32 = 15 - UINT16 = 16 - INT4 = 17 -# automatically generated by the FlatBuffers compiler, do not modify - -# namespace: tflite + BitcastOptionsStart(builder) + bitcastOptions = BitcastOptionsEnd(builder) + return bitcastOptions -from flatbuffers.compat import import_numpy -np = import_numpy() -class TileOptions(object): +class BitwiseXorOptions(object): __slots__ = ['_tab'] @classmethod def GetRootAs(cls, buf, offset=0): n = flatbuffers.encode.Get(flatbuffers.packer.uoffset, buf, offset) - x = TileOptions() + x = BitwiseXorOptions() x.Init(buf, n + offset) return x @classmethod - def GetRootAsTileOptions(cls, buf, offset=0): + def GetRootAsBitwiseXorOptions(cls, buf, offset=0): """This method is deprecated. Please switch to GetRootAs.""" return cls.GetRootAs(buf, offset) @classmethod - def TileOptionsBufferHasIdentifier(cls, buf, offset, size_prefixed=False): + def BitwiseXorOptionsBufferHasIdentifier(cls, buf, offset, size_prefixed=False): return flatbuffers.util.BufferHasIdentifier(buf, offset, b"\x54\x46\x4C\x33", size_prefixed=size_prefixed) - # TileOptions + # BitwiseXorOptions def Init(self, buf, pos): self._tab = flatbuffers.table.Table(buf, pos) -def TileOptionsStart(builder): builder.StartObject(0) -def Start(builder): - return TileOptionsStart(builder) -def TileOptionsEnd(builder): return builder.EndObject() -def End(builder): - return TileOptionsEnd(builder) +def BitwiseXorOptionsStart(builder): + builder.StartObject(0) -class TileOptionsT(object): +def BitwiseXorOptionsEnd(builder): + return builder.EndObject() - # TileOptionsT + + +class BitwiseXorOptionsT(object): + + # BitwiseXorOptionsT def __init__(self): pass @classmethod def InitFromBuf(cls, buf, pos): - tileOptions = TileOptions() - tileOptions.Init(buf, pos) - return cls.InitFromObj(tileOptions) + bitwiseXorOptions = BitwiseXorOptions() + bitwiseXorOptions.Init(buf, pos) + return cls.InitFromObj(bitwiseXorOptions) @classmethod - def InitFromObj(cls, tileOptions): - x = TileOptionsT() - x._UnPack(tileOptions) - return x + def InitFromPackedBuf(cls, buf, pos=0): + n = flatbuffers.encode.Get(flatbuffers.packer.uoffset, buf, pos) + return cls.InitFromBuf(buf, pos+n) - # TileOptionsT - def _UnPack(self, tileOptions): - if tileOptions is None: - return + @classmethod + def InitFromObj(cls, bitwiseXorOptions): + x = BitwiseXorOptionsT() + x._UnPack(bitwiseXorOptions) + return x - # TileOptionsT - def Pack(self, builder): - TileOptionsStart(builder) - tileOptions = TileOptionsEnd(builder) - return tileOptions -# automatically generated by the FlatBuffers compiler, do not modify + # BitwiseXorOptionsT + def _UnPack(self, bitwiseXorOptions): + if bitwiseXorOptions is None: + return -# namespace: tflite + # BitwiseXorOptionsT + def Pack(self, builder): + BitwiseXorOptionsStart(builder) + bitwiseXorOptions = BitwiseXorOptionsEnd(builder) + return bitwiseXorOptions -from flatbuffers.compat import import_numpy -np = import_numpy() -class TopKV2Options(object): +class RightShiftOptions(object): __slots__ = ['_tab'] @classmethod def GetRootAs(cls, buf, offset=0): n = flatbuffers.encode.Get(flatbuffers.packer.uoffset, buf, offset) - x = TopKV2Options() + x = RightShiftOptions() x.Init(buf, n + offset) return x @classmethod - def GetRootAsTopKV2Options(cls, buf, offset=0): + def GetRootAsRightShiftOptions(cls, buf, offset=0): """This method is deprecated. Please switch to GetRootAs.""" return cls.GetRootAs(buf, offset) @classmethod - def TopKV2OptionsBufferHasIdentifier(cls, buf, offset, size_prefixed=False): + def RightShiftOptionsBufferHasIdentifier(cls, buf, offset, size_prefixed=False): return flatbuffers.util.BufferHasIdentifier(buf, offset, b"\x54\x46\x4C\x33", size_prefixed=size_prefixed) - # TopKV2Options + # RightShiftOptions def Init(self, buf, pos): self._tab = flatbuffers.table.Table(buf, pos) -def TopKV2OptionsStart(builder): builder.StartObject(0) -def Start(builder): - return TopKV2OptionsStart(builder) -def TopKV2OptionsEnd(builder): return builder.EndObject() -def End(builder): - return TopKV2OptionsEnd(builder) +def RightShiftOptionsStart(builder): + builder.StartObject(0) -class TopKV2OptionsT(object): +def RightShiftOptionsEnd(builder): + return builder.EndObject() - # TopKV2OptionsT + + +class RightShiftOptionsT(object): + + # RightShiftOptionsT def __init__(self): pass @classmethod def InitFromBuf(cls, buf, pos): - topKv2options = TopKV2Options() - topKv2options.Init(buf, pos) - return cls.InitFromObj(topKv2options) + rightShiftOptions = RightShiftOptions() + rightShiftOptions.Init(buf, pos) + return cls.InitFromObj(rightShiftOptions) @classmethod - def InitFromObj(cls, topKv2options): - x = TopKV2OptionsT() - x._UnPack(topKv2options) + def InitFromPackedBuf(cls, buf, pos=0): + n = flatbuffers.encode.Get(flatbuffers.packer.uoffset, buf, pos) + return cls.InitFromBuf(buf, pos+n) + + @classmethod + def InitFromObj(cls, rightShiftOptions): + x = RightShiftOptionsT() + x._UnPack(rightShiftOptions) return x - # TopKV2OptionsT - def _UnPack(self, topKv2options): - if topKv2options is None: + # RightShiftOptionsT + def _UnPack(self, rightShiftOptions): + if rightShiftOptions is None: return - # TopKV2OptionsT + # RightShiftOptionsT def Pack(self, builder): - TopKV2OptionsStart(builder) - topKv2options = TopKV2OptionsEnd(builder) - return topKv2options -# automatically generated by the FlatBuffers compiler, do not modify - -# namespace: tflite + RightShiftOptionsStart(builder) + rightShiftOptions = RightShiftOptionsEnd(builder) + return rightShiftOptions -from flatbuffers.compat import import_numpy -np = import_numpy() -class TransposeConvOptions(object): +class DilateOptions(object): __slots__ = ['_tab'] @classmethod def GetRootAs(cls, buf, offset=0): n = flatbuffers.encode.Get(flatbuffers.packer.uoffset, buf, offset) - x = TransposeConvOptions() + x = DilateOptions() x.Init(buf, n + offset) return x @classmethod - def GetRootAsTransposeConvOptions(cls, buf, offset=0): + def GetRootAsDilateOptions(cls, buf, offset=0): """This method is deprecated. Please switch to GetRootAs.""" return cls.GetRootAs(buf, offset) @classmethod - def TransposeConvOptionsBufferHasIdentifier(cls, buf, offset, size_prefixed=False): + def DilateOptionsBufferHasIdentifier(cls, buf, offset, size_prefixed=False): return flatbuffers.util.BufferHasIdentifier(buf, offset, b"\x54\x46\x4C\x33", size_prefixed=size_prefixed) - # TransposeConvOptions + # DilateOptions def Init(self, buf, pos): self._tab = flatbuffers.table.Table(buf, pos) - # TransposeConvOptions - def Padding(self): - o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(4)) - if o != 0: - return self._tab.Get(flatbuffers.number_types.Int8Flags, o + self._tab.Pos) - return 0 - - # TransposeConvOptions - def StrideW(self): - o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(6)) - if o != 0: - return self._tab.Get(flatbuffers.number_types.Int32Flags, o + self._tab.Pos) - return 0 +def DilateOptionsStart(builder): + builder.StartObject(0) - # TransposeConvOptions - def StrideH(self): - o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(8)) - if o != 0: - return self._tab.Get(flatbuffers.number_types.Int32Flags, o + self._tab.Pos) - return 0 +def DilateOptionsEnd(builder): + return builder.EndObject() - # TransposeConvOptions - def FusedActivationFunction(self): - o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(10)) - if o != 0: - return self._tab.Get(flatbuffers.number_types.Int8Flags, o + self._tab.Pos) - return 0 -def TransposeConvOptionsStart(builder): builder.StartObject(4) -def Start(builder): - return TransposeConvOptionsStart(builder) -def TransposeConvOptionsAddPadding(builder, padding): builder.PrependInt8Slot(0, padding, 0) -def AddPadding(builder, padding): - return TransposeConvOptionsAddPadding(builder, padding) -def TransposeConvOptionsAddStrideW(builder, strideW): builder.PrependInt32Slot(1, strideW, 0) -def AddStrideW(builder, strideW): - return TransposeConvOptionsAddStrideW(builder, strideW) -def TransposeConvOptionsAddStrideH(builder, strideH): builder.PrependInt32Slot(2, strideH, 0) -def AddStrideH(builder, strideH): - return TransposeConvOptionsAddStrideH(builder, strideH) -def TransposeConvOptionsAddFusedActivationFunction(builder, fusedActivationFunction): builder.PrependInt8Slot(3, fusedActivationFunction, 0) -def AddFusedActivationFunction(builder, fusedActivationFunction): - return TransposeConvOptionsAddFusedActivationFunction(builder, fusedActivationFunction) -def TransposeConvOptionsEnd(builder): return builder.EndObject() -def End(builder): - return TransposeConvOptionsEnd(builder) -class TransposeConvOptionsT(object): +class DilateOptionsT(object): - # TransposeConvOptionsT + # DilateOptionsT def __init__(self): - self.padding = 0 # type: int - self.strideW = 0 # type: int - self.strideH = 0 # type: int - self.fusedActivationFunction = 0 # type: int + pass @classmethod def InitFromBuf(cls, buf, pos): - transposeConvOptions = TransposeConvOptions() - transposeConvOptions.Init(buf, pos) - return cls.InitFromObj(transposeConvOptions) + dilateOptions = DilateOptions() + dilateOptions.Init(buf, pos) + return cls.InitFromObj(dilateOptions) @classmethod - def InitFromObj(cls, transposeConvOptions): - x = TransposeConvOptionsT() - x._UnPack(transposeConvOptions) + def InitFromPackedBuf(cls, buf, pos=0): + n = flatbuffers.encode.Get(flatbuffers.packer.uoffset, buf, pos) + return cls.InitFromBuf(buf, pos+n) + + @classmethod + def InitFromObj(cls, dilateOptions): + x = DilateOptionsT() + x._UnPack(dilateOptions) return x - # TransposeConvOptionsT - def _UnPack(self, transposeConvOptions): - if transposeConvOptions is None: + # DilateOptionsT + def _UnPack(self, dilateOptions): + if dilateOptions is None: return - self.padding = transposeConvOptions.Padding() - self.strideW = transposeConvOptions.StrideW() - self.strideH = transposeConvOptions.StrideH() - self.fusedActivationFunction = transposeConvOptions.FusedActivationFunction() - # TransposeConvOptionsT + # DilateOptionsT def Pack(self, builder): - TransposeConvOptionsStart(builder) - TransposeConvOptionsAddPadding(builder, self.padding) - TransposeConvOptionsAddStrideW(builder, self.strideW) - TransposeConvOptionsAddStrideH(builder, self.strideH) - TransposeConvOptionsAddFusedActivationFunction(builder, self.fusedActivationFunction) - transposeConvOptions = TransposeConvOptionsEnd(builder) - return transposeConvOptions -# automatically generated by the FlatBuffers compiler, do not modify - -# namespace: tflite + DilateOptionsStart(builder) + dilateOptions = DilateOptionsEnd(builder) + return dilateOptions -from flatbuffers.compat import import_numpy -np = import_numpy() -class TransposeOptions(object): +class ReduceWindowOptions(object): __slots__ = ['_tab'] @classmethod def GetRootAs(cls, buf, offset=0): n = flatbuffers.encode.Get(flatbuffers.packer.uoffset, buf, offset) - x = TransposeOptions() + x = ReduceWindowOptions() x.Init(buf, n + offset) return x @classmethod - def GetRootAsTransposeOptions(cls, buf, offset=0): + def GetRootAsReduceWindowOptions(cls, buf, offset=0): """This method is deprecated. Please switch to GetRootAs.""" return cls.GetRootAs(buf, offset) @classmethod - def TransposeOptionsBufferHasIdentifier(cls, buf, offset, size_prefixed=False): + def ReduceWindowOptionsBufferHasIdentifier(cls, buf, offset, size_prefixed=False): return flatbuffers.util.BufferHasIdentifier(buf, offset, b"\x54\x46\x4C\x33", size_prefixed=size_prefixed) - # TransposeOptions + # ReduceWindowOptions def Init(self, buf, pos): self._tab = flatbuffers.table.Table(buf, pos) -def TransposeOptionsStart(builder): builder.StartObject(0) -def Start(builder): - return TransposeOptionsStart(builder) -def TransposeOptionsEnd(builder): return builder.EndObject() -def End(builder): - return TransposeOptionsEnd(builder) + # ReduceWindowOptions + def ReduceFunction(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(4)) + if o != 0: + return self._tab.Get(flatbuffers.number_types.Int32Flags, o + self._tab.Pos) + return 0 -class TransposeOptionsT(object): +def ReduceWindowOptionsStart(builder): + builder.StartObject(1) - # TransposeOptionsT +def ReduceWindowOptionsAddReduceFunction(builder, reduceFunction): + builder.PrependInt32Slot(0, reduceFunction, 0) + +def ReduceWindowOptionsEnd(builder): + return builder.EndObject() + + + +class ReduceWindowOptionsT(object): + + # ReduceWindowOptionsT def __init__(self): - pass + self.reduceFunction = 0 # type: int @classmethod def InitFromBuf(cls, buf, pos): - transposeOptions = TransposeOptions() - transposeOptions.Init(buf, pos) - return cls.InitFromObj(transposeOptions) + reduceWindowOptions = ReduceWindowOptions() + reduceWindowOptions.Init(buf, pos) + return cls.InitFromObj(reduceWindowOptions) @classmethod - def InitFromObj(cls, transposeOptions): - x = TransposeOptionsT() - x._UnPack(transposeOptions) + def InitFromPackedBuf(cls, buf, pos=0): + n = flatbuffers.encode.Get(flatbuffers.packer.uoffset, buf, pos) + return cls.InitFromBuf(buf, pos+n) + + @classmethod + def InitFromObj(cls, reduceWindowOptions): + x = ReduceWindowOptionsT() + x._UnPack(reduceWindowOptions) return x - # TransposeOptionsT - def _UnPack(self, transposeOptions): - if transposeOptions is None: + # ReduceWindowOptionsT + def _UnPack(self, reduceWindowOptions): + if reduceWindowOptions is None: return + self.reduceFunction = reduceWindowOptions.ReduceFunction() - # TransposeOptionsT + # ReduceWindowOptionsT def Pack(self, builder): - TransposeOptionsStart(builder) - transposeOptions = TransposeOptionsEnd(builder) - return transposeOptions -# automatically generated by the FlatBuffers compiler, do not modify - -# namespace: tflite + ReduceWindowOptionsStart(builder) + ReduceWindowOptionsAddReduceFunction(builder, self.reduceFunction) + reduceWindowOptions = ReduceWindowOptionsEnd(builder) + return reduceWindowOptions -from flatbuffers.compat import import_numpy -np = import_numpy() -class Uint16Vector(object): +class OperatorCode(object): __slots__ = ['_tab'] @classmethod def GetRootAs(cls, buf, offset=0): n = flatbuffers.encode.Get(flatbuffers.packer.uoffset, buf, offset) - x = Uint16Vector() + x = OperatorCode() x.Init(buf, n + offset) return x @classmethod - def GetRootAsUint16Vector(cls, buf, offset=0): + def GetRootAsOperatorCode(cls, buf, offset=0): """This method is deprecated. Please switch to GetRootAs.""" return cls.GetRootAs(buf, offset) @classmethod - def Uint16VectorBufferHasIdentifier(cls, buf, offset, size_prefixed=False): + def OperatorCodeBufferHasIdentifier(cls, buf, offset, size_prefixed=False): return flatbuffers.util.BufferHasIdentifier(buf, offset, b"\x54\x46\x4C\x33", size_prefixed=size_prefixed) - # Uint16Vector + # OperatorCode def Init(self, buf, pos): self._tab = flatbuffers.table.Table(buf, pos) - # Uint16Vector - def Values(self, j): + # OperatorCode + def DeprecatedBuiltinCode(self): o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(4)) if o != 0: - a = self._tab.Vector(o) - return self._tab.Get(flatbuffers.number_types.Uint16Flags, a + flatbuffers.number_types.UOffsetTFlags.py_type(j * 2)) + return self._tab.Get(flatbuffers.number_types.Int8Flags, o + self._tab.Pos) return 0 - # Uint16Vector - def ValuesAsNumpy(self): - o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(4)) + # OperatorCode + def CustomCode(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(6)) if o != 0: - return self._tab.GetVectorAsNumpy(flatbuffers.number_types.Uint16Flags, o) - return 0 + return self._tab.String(o + self._tab.Pos) + return None - # Uint16Vector - def ValuesLength(self): - o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(4)) + # OperatorCode + def Version(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(8)) if o != 0: - return self._tab.VectorLen(o) + return self._tab.Get(flatbuffers.number_types.Int32Flags, o + self._tab.Pos) + return 1 + + # OperatorCode + def BuiltinCode(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(10)) + if o != 0: + return self._tab.Get(flatbuffers.number_types.Int32Flags, o + self._tab.Pos) return 0 - # Uint16Vector - def ValuesIsNone(self): - o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(4)) - return o == 0 +def OperatorCodeStart(builder): + builder.StartObject(4) -def Uint16VectorStart(builder): builder.StartObject(1) -def Start(builder): - return Uint16VectorStart(builder) -def Uint16VectorAddValues(builder, values): builder.PrependUOffsetTRelativeSlot(0, flatbuffers.number_types.UOffsetTFlags.py_type(values), 0) -def AddValues(builder, values): - return Uint16VectorAddValues(builder, values) -def Uint16VectorStartValuesVector(builder, numElems): return builder.StartVector(2, numElems, 2) -def StartValuesVector(builder, numElems): - return Uint16VectorStartValuesVector(builder, numElems) -def Uint16VectorEnd(builder): return builder.EndObject() -def End(builder): - return Uint16VectorEnd(builder) -try: - from typing import List -except: - pass +def OperatorCodeAddDeprecatedBuiltinCode(builder, deprecatedBuiltinCode): + builder.PrependInt8Slot(0, deprecatedBuiltinCode, 0) -class Uint16VectorT(object): +def OperatorCodeAddCustomCode(builder, customCode): + builder.PrependUOffsetTRelativeSlot(1, flatbuffers.number_types.UOffsetTFlags.py_type(customCode), 0) - # Uint16VectorT +def OperatorCodeAddVersion(builder, version): + builder.PrependInt32Slot(2, version, 1) + +def OperatorCodeAddBuiltinCode(builder, builtinCode): + builder.PrependInt32Slot(3, builtinCode, 0) + +def OperatorCodeEnd(builder): + return builder.EndObject() + + + +class OperatorCodeT(object): + + # OperatorCodeT def __init__(self): - self.values = None # type: List[int] + self.deprecatedBuiltinCode = 0 # type: int + self.customCode = None # type: str + self.version = 1 # type: int + self.builtinCode = 0 # type: int @classmethod def InitFromBuf(cls, buf, pos): - uint16vector = Uint16Vector() - uint16vector.Init(buf, pos) - return cls.InitFromObj(uint16vector) + operatorCode = OperatorCode() + operatorCode.Init(buf, pos) + return cls.InitFromObj(operatorCode) @classmethod - def InitFromObj(cls, uint16vector): - x = Uint16VectorT() - x._UnPack(uint16vector) + def InitFromPackedBuf(cls, buf, pos=0): + n = flatbuffers.encode.Get(flatbuffers.packer.uoffset, buf, pos) + return cls.InitFromBuf(buf, pos+n) + + @classmethod + def InitFromObj(cls, operatorCode): + x = OperatorCodeT() + x._UnPack(operatorCode) return x - # Uint16VectorT - def _UnPack(self, uint16vector): - if uint16vector is None: + # OperatorCodeT + def _UnPack(self, operatorCode): + if operatorCode is None: return - if not uint16vector.ValuesIsNone(): - if np is None: - self.values = [] - for i in range(uint16vector.ValuesLength()): - self.values.append(uint16vector.Values(i)) - else: - self.values = uint16vector.ValuesAsNumpy() + self.deprecatedBuiltinCode = operatorCode.DeprecatedBuiltinCode() + self.customCode = operatorCode.CustomCode() + self.version = operatorCode.Version() + self.builtinCode = operatorCode.BuiltinCode() - # Uint16VectorT + # OperatorCodeT def Pack(self, builder): - if self.values is not None: - if np is not None and type(self.values) is np.ndarray: - values = builder.CreateNumpyVector(self.values) - else: - Uint16VectorStartValuesVector(builder, len(self.values)) - for i in reversed(range(len(self.values))): - builder.PrependUint16(self.values[i]) - values = builder.EndVector() - Uint16VectorStart(builder) - if self.values is not None: - Uint16VectorAddValues(builder, values) - uint16vector = Uint16VectorEnd(builder) - return uint16vector -# automatically generated by the FlatBuffers compiler, do not modify - -# namespace: tflite + if self.customCode is not None: + customCode = builder.CreateString(self.customCode) + OperatorCodeStart(builder) + OperatorCodeAddDeprecatedBuiltinCode(builder, self.deprecatedBuiltinCode) + if self.customCode is not None: + OperatorCodeAddCustomCode(builder, customCode) + OperatorCodeAddVersion(builder, self.version) + OperatorCodeAddBuiltinCode(builder, self.builtinCode) + operatorCode = OperatorCodeEnd(builder) + return operatorCode -from flatbuffers.compat import import_numpy -np = import_numpy() -class Uint8Vector(object): +class StableHLOCompositeOptions(object): __slots__ = ['_tab'] @classmethod def GetRootAs(cls, buf, offset=0): n = flatbuffers.encode.Get(flatbuffers.packer.uoffset, buf, offset) - x = Uint8Vector() + x = StableHLOCompositeOptions() x.Init(buf, n + offset) return x @classmethod - def GetRootAsUint8Vector(cls, buf, offset=0): + def GetRootAsStableHLOCompositeOptions(cls, buf, offset=0): """This method is deprecated. Please switch to GetRootAs.""" return cls.GetRootAs(buf, offset) @classmethod - def Uint8VectorBufferHasIdentifier(cls, buf, offset, size_prefixed=False): + def StableHLOCompositeOptionsBufferHasIdentifier(cls, buf, offset, size_prefixed=False): return flatbuffers.util.BufferHasIdentifier(buf, offset, b"\x54\x46\x4C\x33", size_prefixed=size_prefixed) - # Uint8Vector + # StableHLOCompositeOptions def Init(self, buf, pos): self._tab = flatbuffers.table.Table(buf, pos) - # Uint8Vector - def Values(self, j): + # StableHLOCompositeOptions + def Name(self): o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(4)) + if o != 0: + return self._tab.String(o + self._tab.Pos) + return None + + # StableHLOCompositeOptions + def DecompositionSubgraphIndex(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(6)) + if o != 0: + return self._tab.Get(flatbuffers.number_types.Int32Flags, o + self._tab.Pos) + return 0 + + # StableHLOCompositeOptions + def CompositeAttributes(self, j): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(8)) if o != 0: a = self._tab.Vector(o) return self._tab.Get(flatbuffers.number_types.Uint8Flags, a + flatbuffers.number_types.UOffsetTFlags.py_type(j * 1)) return 0 - # Uint8Vector - def ValuesAsNumpy(self): - o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(4)) + # StableHLOCompositeOptions + def CompositeAttributesAsNumpy(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(8)) if o != 0: return self._tab.GetVectorAsNumpy(flatbuffers.number_types.Uint8Flags, o) return 0 - # Uint8Vector - def ValuesLength(self): - o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(4)) + # StableHLOCompositeOptions + def CompositeAttributesLength(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(8)) if o != 0: return self._tab.VectorLen(o) return 0 - # Uint8Vector - def ValuesIsNone(self): - o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(4)) + # StableHLOCompositeOptions + def CompositeAttributesIsNone(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(8)) return o == 0 -def Uint8VectorStart(builder): builder.StartObject(1) -def Start(builder): - return Uint8VectorStart(builder) -def Uint8VectorAddValues(builder, values): builder.PrependUOffsetTRelativeSlot(0, flatbuffers.number_types.UOffsetTFlags.py_type(values), 0) -def AddValues(builder, values): - return Uint8VectorAddValues(builder, values) -def Uint8VectorStartValuesVector(builder, numElems): return builder.StartVector(1, numElems, 1) -def StartValuesVector(builder, numElems): - return Uint8VectorStartValuesVector(builder, numElems) -def Uint8VectorEnd(builder): return builder.EndObject() -def End(builder): - return Uint8VectorEnd(builder) + # StableHLOCompositeOptions + def CompositeAttributesFormat(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(10)) + if o != 0: + return self._tab.Get(flatbuffers.number_types.Int8Flags, o + self._tab.Pos) + return 0 + + # StableHLOCompositeOptions + def Version(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(12)) + if o != 0: + return self._tab.Get(flatbuffers.number_types.Int32Flags, o + self._tab.Pos) + return 0 + +def StableHLOCompositeOptionsStart(builder): + builder.StartObject(5) + +def StableHLOCompositeOptionsAddName(builder, name): + builder.PrependUOffsetTRelativeSlot(0, flatbuffers.number_types.UOffsetTFlags.py_type(name), 0) + +def StableHLOCompositeOptionsAddDecompositionSubgraphIndex(builder, decompositionSubgraphIndex): + builder.PrependInt32Slot(1, decompositionSubgraphIndex, 0) + +def StableHLOCompositeOptionsAddCompositeAttributes(builder, compositeAttributes): + builder.PrependUOffsetTRelativeSlot(2, flatbuffers.number_types.UOffsetTFlags.py_type(compositeAttributes), 0) + +def StableHLOCompositeOptionsStartCompositeAttributesVector(builder, numElems): + return builder.StartVector(1, numElems, 1) + +def StableHLOCompositeOptionsAddCompositeAttributesFormat(builder, compositeAttributesFormat): + builder.PrependInt8Slot(3, compositeAttributesFormat, 0) + +def StableHLOCompositeOptionsAddVersion(builder, version): + builder.PrependInt32Slot(4, version, 0) + +def StableHLOCompositeOptionsEnd(builder): + return builder.EndObject() + + try: from typing import List except: pass -class Uint8VectorT(object): +class StableHLOCompositeOptionsT(object): - # Uint8VectorT + # StableHLOCompositeOptionsT def __init__(self): - self.values = None # type: List[int] + self.name = None # type: str + self.decompositionSubgraphIndex = 0 # type: int + self.compositeAttributes = None # type: List[int] + self.compositeAttributesFormat = 0 # type: int + self.version = 0 # type: int @classmethod def InitFromBuf(cls, buf, pos): - uint8vector = Uint8Vector() - uint8vector.Init(buf, pos) - return cls.InitFromObj(uint8vector) + stableHlocompositeOptions = StableHLOCompositeOptions() + stableHlocompositeOptions.Init(buf, pos) + return cls.InitFromObj(stableHlocompositeOptions) @classmethod - def InitFromObj(cls, uint8vector): - x = Uint8VectorT() - x._UnPack(uint8vector) + def InitFromPackedBuf(cls, buf, pos=0): + n = flatbuffers.encode.Get(flatbuffers.packer.uoffset, buf, pos) + return cls.InitFromBuf(buf, pos+n) + + @classmethod + def InitFromObj(cls, stableHlocompositeOptions): + x = StableHLOCompositeOptionsT() + x._UnPack(stableHlocompositeOptions) return x - # Uint8VectorT - def _UnPack(self, uint8vector): - if uint8vector is None: + # StableHLOCompositeOptionsT + def _UnPack(self, stableHlocompositeOptions): + if stableHlocompositeOptions is None: return - if not uint8vector.ValuesIsNone(): + self.name = stableHlocompositeOptions.Name() + self.decompositionSubgraphIndex = stableHlocompositeOptions.DecompositionSubgraphIndex() + if not stableHlocompositeOptions.CompositeAttributesIsNone(): if np is None: - self.values = [] - for i in range(uint8vector.ValuesLength()): - self.values.append(uint8vector.Values(i)) + self.compositeAttributes = [] + for i in range(stableHlocompositeOptions.CompositeAttributesLength()): + self.compositeAttributes.append(stableHlocompositeOptions.CompositeAttributes(i)) else: - self.values = uint8vector.ValuesAsNumpy() + self.compositeAttributes = stableHlocompositeOptions.CompositeAttributesAsNumpy() + self.compositeAttributesFormat = stableHlocompositeOptions.CompositeAttributesFormat() + self.version = stableHlocompositeOptions.Version() - # Uint8VectorT + # StableHLOCompositeOptionsT def Pack(self, builder): - if self.values is not None: - if np is not None and type(self.values) is np.ndarray: - values = builder.CreateNumpyVector(self.values) + if self.name is not None: + name = builder.CreateString(self.name) + if self.compositeAttributes is not None: + if np is not None and type(self.compositeAttributes) is np.ndarray: + compositeAttributes = builder.CreateNumpyVector(self.compositeAttributes) else: - Uint8VectorStartValuesVector(builder, len(self.values)) - for i in reversed(range(len(self.values))): - builder.PrependUint8(self.values[i]) - values = builder.EndVector() - Uint8VectorStart(builder) - if self.values is not None: - Uint8VectorAddValues(builder, values) - uint8vector = Uint8VectorEnd(builder) - return uint8vector -# automatically generated by the FlatBuffers compiler, do not modify - -# namespace: tflite + StableHLOCompositeOptionsStartCompositeAttributesVector(builder, len(self.compositeAttributes)) + for i in reversed(range(len(self.compositeAttributes))): + builder.PrependUint8(self.compositeAttributes[i]) + compositeAttributes = builder.EndVector() + StableHLOCompositeOptionsStart(builder) + if self.name is not None: + StableHLOCompositeOptionsAddName(builder, name) + StableHLOCompositeOptionsAddDecompositionSubgraphIndex(builder, self.decompositionSubgraphIndex) + if self.compositeAttributes is not None: + StableHLOCompositeOptionsAddCompositeAttributes(builder, compositeAttributes) + StableHLOCompositeOptionsAddCompositeAttributesFormat(builder, self.compositeAttributesFormat) + StableHLOCompositeOptionsAddVersion(builder, self.version) + stableHlocompositeOptions = StableHLOCompositeOptionsEnd(builder) + return stableHlocompositeOptions -from flatbuffers.compat import import_numpy -np = import_numpy() -class UnidirectionalSequenceLSTMOptions(object): +class Operator(object): __slots__ = ['_tab'] @classmethod def GetRootAs(cls, buf, offset=0): n = flatbuffers.encode.Get(flatbuffers.packer.uoffset, buf, offset) - x = UnidirectionalSequenceLSTMOptions() + x = Operator() x.Init(buf, n + offset) return x @classmethod - def GetRootAsUnidirectionalSequenceLSTMOptions(cls, buf, offset=0): + def GetRootAsOperator(cls, buf, offset=0): """This method is deprecated. Please switch to GetRootAs.""" return cls.GetRootAs(buf, offset) @classmethod - def UnidirectionalSequenceLSTMOptionsBufferHasIdentifier(cls, buf, offset, size_prefixed=False): + def OperatorBufferHasIdentifier(cls, buf, offset, size_prefixed=False): return flatbuffers.util.BufferHasIdentifier(buf, offset, b"\x54\x46\x4C\x33", size_prefixed=size_prefixed) - # UnidirectionalSequenceLSTMOptions + # Operator def Init(self, buf, pos): self._tab = flatbuffers.table.Table(buf, pos) - # UnidirectionalSequenceLSTMOptions - def FusedActivationFunction(self): + # Operator + def OpcodeIndex(self): o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(4)) if o != 0: - return self._tab.Get(flatbuffers.number_types.Int8Flags, o + self._tab.Pos) + return self._tab.Get(flatbuffers.number_types.Uint32Flags, o + self._tab.Pos) + return 0 + + # Operator + def Inputs(self, j): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(6)) + if o != 0: + a = self._tab.Vector(o) + return self._tab.Get(flatbuffers.number_types.Int32Flags, a + flatbuffers.number_types.UOffsetTFlags.py_type(j * 4)) + return 0 + + # Operator + def InputsAsNumpy(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(6)) + if o != 0: + return self._tab.GetVectorAsNumpy(flatbuffers.number_types.Int32Flags, o) + return 0 + + # Operator + def InputsLength(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(6)) + if o != 0: + return self._tab.VectorLen(o) + return 0 + + # Operator + def InputsIsNone(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(6)) + return o == 0 + + # Operator + def Outputs(self, j): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(8)) + if o != 0: + a = self._tab.Vector(o) + return self._tab.Get(flatbuffers.number_types.Int32Flags, a + flatbuffers.number_types.UOffsetTFlags.py_type(j * 4)) + return 0 + + # Operator + def OutputsAsNumpy(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(8)) + if o != 0: + return self._tab.GetVectorAsNumpy(flatbuffers.number_types.Int32Flags, o) + return 0 + + # Operator + def OutputsLength(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(8)) + if o != 0: + return self._tab.VectorLen(o) + return 0 + + # Operator + def OutputsIsNone(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(8)) + return o == 0 + + # Operator + def BuiltinOptionsType(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(10)) + if o != 0: + return self._tab.Get(flatbuffers.number_types.Uint8Flags, o + self._tab.Pos) + return 0 + + # Operator + def BuiltinOptions(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(12)) + if o != 0: + from flatbuffers.table import Table + obj = Table(bytearray(), 0) + self._tab.Union(obj, o) + return obj + return None + + # Operator + def CustomOptions(self, j): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(14)) + if o != 0: + a = self._tab.Vector(o) + return self._tab.Get(flatbuffers.number_types.Uint8Flags, a + flatbuffers.number_types.UOffsetTFlags.py_type(j * 1)) + return 0 + + # Operator + def CustomOptionsAsNumpy(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(14)) + if o != 0: + return self._tab.GetVectorAsNumpy(flatbuffers.number_types.Uint8Flags, o) + return 0 + + # Operator + def CustomOptionsLength(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(14)) + if o != 0: + return self._tab.VectorLen(o) + return 0 + + # Operator + def CustomOptionsIsNone(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(14)) + return o == 0 + + # Operator + def CustomOptionsFormat(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(16)) + if o != 0: + return self._tab.Get(flatbuffers.number_types.Int8Flags, o + self._tab.Pos) + return 0 + + # Operator + def MutatingVariableInputs(self, j): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(18)) + if o != 0: + a = self._tab.Vector(o) + return self._tab.Get(flatbuffers.number_types.BoolFlags, a + flatbuffers.number_types.UOffsetTFlags.py_type(j * 1)) + return 0 + + # Operator + def MutatingVariableInputsAsNumpy(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(18)) + if o != 0: + return self._tab.GetVectorAsNumpy(flatbuffers.number_types.BoolFlags, o) + return 0 + + # Operator + def MutatingVariableInputsLength(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(18)) + if o != 0: + return self._tab.VectorLen(o) + return 0 + + # Operator + def MutatingVariableInputsIsNone(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(18)) + return o == 0 + + # Operator + def Intermediates(self, j): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(20)) + if o != 0: + a = self._tab.Vector(o) + return self._tab.Get(flatbuffers.number_types.Int32Flags, a + flatbuffers.number_types.UOffsetTFlags.py_type(j * 4)) + return 0 + + # Operator + def IntermediatesAsNumpy(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(20)) + if o != 0: + return self._tab.GetVectorAsNumpy(flatbuffers.number_types.Int32Flags, o) + return 0 + + # Operator + def IntermediatesLength(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(20)) + if o != 0: + return self._tab.VectorLen(o) return 0 - # UnidirectionalSequenceLSTMOptions - def CellClip(self): - o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(6)) - if o != 0: - return self._tab.Get(flatbuffers.number_types.Float32Flags, o + self._tab.Pos) - return 0.0 + # Operator + def IntermediatesIsNone(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(20)) + return o == 0 - # UnidirectionalSequenceLSTMOptions - def ProjClip(self): - o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(8)) + # Operator + def LargeCustomOptionsOffset(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(22)) if o != 0: - return self._tab.Get(flatbuffers.number_types.Float32Flags, o + self._tab.Pos) - return 0.0 + return self._tab.Get(flatbuffers.number_types.Uint64Flags, o + self._tab.Pos) + return 0 - # UnidirectionalSequenceLSTMOptions - def TimeMajor(self): - o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(10)) + # Operator + def LargeCustomOptionsSize(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(24)) if o != 0: - return bool(self._tab.Get(flatbuffers.number_types.BoolFlags, o + self._tab.Pos)) - return False + return self._tab.Get(flatbuffers.number_types.Uint64Flags, o + self._tab.Pos) + return 0 - # UnidirectionalSequenceLSTMOptions - def AsymmetricQuantizeInputs(self): - o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(12)) + # Operator + def BuiltinOptions2Type(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(26)) if o != 0: - return bool(self._tab.Get(flatbuffers.number_types.BoolFlags, o + self._tab.Pos)) - return False + return self._tab.Get(flatbuffers.number_types.Uint8Flags, o + self._tab.Pos) + return 0 - # UnidirectionalSequenceLSTMOptions - def DiagonalRecurrentTensors(self): - o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(14)) + # Operator + def BuiltinOptions2(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(28)) if o != 0: - return bool(self._tab.Get(flatbuffers.number_types.BoolFlags, o + self._tab.Pos)) - return False + from flatbuffers.table import Table + obj = Table(bytearray(), 0) + self._tab.Union(obj, o) + return obj + return None -def UnidirectionalSequenceLSTMOptionsStart(builder): builder.StartObject(6) -def Start(builder): - return UnidirectionalSequenceLSTMOptionsStart(builder) -def UnidirectionalSequenceLSTMOptionsAddFusedActivationFunction(builder, fusedActivationFunction): builder.PrependInt8Slot(0, fusedActivationFunction, 0) -def AddFusedActivationFunction(builder, fusedActivationFunction): - return UnidirectionalSequenceLSTMOptionsAddFusedActivationFunction(builder, fusedActivationFunction) -def UnidirectionalSequenceLSTMOptionsAddCellClip(builder, cellClip): builder.PrependFloat32Slot(1, cellClip, 0.0) -def AddCellClip(builder, cellClip): - return UnidirectionalSequenceLSTMOptionsAddCellClip(builder, cellClip) -def UnidirectionalSequenceLSTMOptionsAddProjClip(builder, projClip): builder.PrependFloat32Slot(2, projClip, 0.0) -def AddProjClip(builder, projClip): - return UnidirectionalSequenceLSTMOptionsAddProjClip(builder, projClip) -def UnidirectionalSequenceLSTMOptionsAddTimeMajor(builder, timeMajor): builder.PrependBoolSlot(3, timeMajor, 0) -def AddTimeMajor(builder, timeMajor): - return UnidirectionalSequenceLSTMOptionsAddTimeMajor(builder, timeMajor) -def UnidirectionalSequenceLSTMOptionsAddAsymmetricQuantizeInputs(builder, asymmetricQuantizeInputs): builder.PrependBoolSlot(4, asymmetricQuantizeInputs, 0) -def AddAsymmetricQuantizeInputs(builder, asymmetricQuantizeInputs): - return UnidirectionalSequenceLSTMOptionsAddAsymmetricQuantizeInputs(builder, asymmetricQuantizeInputs) -def UnidirectionalSequenceLSTMOptionsAddDiagonalRecurrentTensors(builder, diagonalRecurrentTensors): builder.PrependBoolSlot(5, diagonalRecurrentTensors, 0) -def AddDiagonalRecurrentTensors(builder, diagonalRecurrentTensors): - return UnidirectionalSequenceLSTMOptionsAddDiagonalRecurrentTensors(builder, diagonalRecurrentTensors) -def UnidirectionalSequenceLSTMOptionsEnd(builder): return builder.EndObject() -def End(builder): - return UnidirectionalSequenceLSTMOptionsEnd(builder) +def OperatorStart(builder): + builder.StartObject(13) -class UnidirectionalSequenceLSTMOptionsT(object): +def OperatorAddOpcodeIndex(builder, opcodeIndex): + builder.PrependUint32Slot(0, opcodeIndex, 0) - # UnidirectionalSequenceLSTMOptionsT - def __init__(self): - self.fusedActivationFunction = 0 # type: int - self.cellClip = 0.0 # type: float - self.projClip = 0.0 # type: float - self.timeMajor = False # type: bool - self.asymmetricQuantizeInputs = False # type: bool - self.diagonalRecurrentTensors = False # type: bool +def OperatorAddInputs(builder, inputs): + builder.PrependUOffsetTRelativeSlot(1, flatbuffers.number_types.UOffsetTFlags.py_type(inputs), 0) - @classmethod - def InitFromBuf(cls, buf, pos): - unidirectionalSequenceLstmoptions = UnidirectionalSequenceLSTMOptions() - unidirectionalSequenceLstmoptions.Init(buf, pos) - return cls.InitFromObj(unidirectionalSequenceLstmoptions) +def OperatorStartInputsVector(builder, numElems): + return builder.StartVector(4, numElems, 4) - @classmethod - def InitFromObj(cls, unidirectionalSequenceLstmoptions): - x = UnidirectionalSequenceLSTMOptionsT() - x._UnPack(unidirectionalSequenceLstmoptions) - return x +def OperatorAddOutputs(builder, outputs): + builder.PrependUOffsetTRelativeSlot(2, flatbuffers.number_types.UOffsetTFlags.py_type(outputs), 0) - # UnidirectionalSequenceLSTMOptionsT - def _UnPack(self, unidirectionalSequenceLstmoptions): - if unidirectionalSequenceLstmoptions is None: - return - self.fusedActivationFunction = unidirectionalSequenceLstmoptions.FusedActivationFunction() - self.cellClip = unidirectionalSequenceLstmoptions.CellClip() - self.projClip = unidirectionalSequenceLstmoptions.ProjClip() - self.timeMajor = unidirectionalSequenceLstmoptions.TimeMajor() - self.asymmetricQuantizeInputs = unidirectionalSequenceLstmoptions.AsymmetricQuantizeInputs() - self.diagonalRecurrentTensors = unidirectionalSequenceLstmoptions.DiagonalRecurrentTensors() +def OperatorStartOutputsVector(builder, numElems): + return builder.StartVector(4, numElems, 4) - # UnidirectionalSequenceLSTMOptionsT - def Pack(self, builder): - UnidirectionalSequenceLSTMOptionsStart(builder) - UnidirectionalSequenceLSTMOptionsAddFusedActivationFunction(builder, self.fusedActivationFunction) - UnidirectionalSequenceLSTMOptionsAddCellClip(builder, self.cellClip) - UnidirectionalSequenceLSTMOptionsAddProjClip(builder, self.projClip) - UnidirectionalSequenceLSTMOptionsAddTimeMajor(builder, self.timeMajor) - UnidirectionalSequenceLSTMOptionsAddAsymmetricQuantizeInputs(builder, self.asymmetricQuantizeInputs) - UnidirectionalSequenceLSTMOptionsAddDiagonalRecurrentTensors(builder, self.diagonalRecurrentTensors) - unidirectionalSequenceLstmoptions = UnidirectionalSequenceLSTMOptionsEnd(builder) - return unidirectionalSequenceLstmoptions -# automatically generated by the FlatBuffers compiler, do not modify +def OperatorAddBuiltinOptionsType(builder, builtinOptionsType): + builder.PrependUint8Slot(3, builtinOptionsType, 0) -# namespace: tflite +def OperatorAddBuiltinOptions(builder, builtinOptions): + builder.PrependUOffsetTRelativeSlot(4, flatbuffers.number_types.UOffsetTFlags.py_type(builtinOptions), 0) -from flatbuffers.compat import import_numpy -np = import_numpy() +def OperatorAddCustomOptions(builder, customOptions): + builder.PrependUOffsetTRelativeSlot(5, flatbuffers.number_types.UOffsetTFlags.py_type(customOptions), 0) -class UniqueOptions(object): - __slots__ = ['_tab'] +def OperatorStartCustomOptionsVector(builder, numElems): + return builder.StartVector(1, numElems, 1) - @classmethod - def GetRootAs(cls, buf, offset=0): - n = flatbuffers.encode.Get(flatbuffers.packer.uoffset, buf, offset) - x = UniqueOptions() - x.Init(buf, n + offset) - return x +def OperatorAddCustomOptionsFormat(builder, customOptionsFormat): + builder.PrependInt8Slot(6, customOptionsFormat, 0) - @classmethod - def GetRootAsUniqueOptions(cls, buf, offset=0): - """This method is deprecated. Please switch to GetRootAs.""" - return cls.GetRootAs(buf, offset) - @classmethod - def UniqueOptionsBufferHasIdentifier(cls, buf, offset, size_prefixed=False): - return flatbuffers.util.BufferHasIdentifier(buf, offset, b"\x54\x46\x4C\x33", size_prefixed=size_prefixed) +def OperatorAddMutatingVariableInputs(builder, mutatingVariableInputs): + builder.PrependUOffsetTRelativeSlot(7, flatbuffers.number_types.UOffsetTFlags.py_type(mutatingVariableInputs), 0) - # UniqueOptions - def Init(self, buf, pos): - self._tab = flatbuffers.table.Table(buf, pos) +def OperatorStartMutatingVariableInputsVector(builder, numElems): + return builder.StartVector(1, numElems, 1) - # UniqueOptions - def IdxOutType(self): - o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(4)) - if o != 0: - return self._tab.Get(flatbuffers.number_types.Int8Flags, o + self._tab.Pos) - return 2 +def OperatorAddIntermediates(builder, intermediates): + builder.PrependUOffsetTRelativeSlot(8, flatbuffers.number_types.UOffsetTFlags.py_type(intermediates), 0) -def UniqueOptionsStart(builder): builder.StartObject(1) -def Start(builder): - return UniqueOptionsStart(builder) -def UniqueOptionsAddIdxOutType(builder, idxOutType): builder.PrependInt8Slot(0, idxOutType, 2) -def AddIdxOutType(builder, idxOutType): - return UniqueOptionsAddIdxOutType(builder, idxOutType) -def UniqueOptionsEnd(builder): return builder.EndObject() -def End(builder): - return UniqueOptionsEnd(builder) +def OperatorStartIntermediatesVector(builder, numElems): + return builder.StartVector(4, numElems, 4) -class UniqueOptionsT(object): +def OperatorAddLargeCustomOptionsOffset(builder, largeCustomOptionsOffset): + builder.PrependUint64Slot(9, largeCustomOptionsOffset, 0) - # UniqueOptionsT +def OperatorAddLargeCustomOptionsSize(builder, largeCustomOptionsSize): + builder.PrependUint64Slot(10, largeCustomOptionsSize, 0) + +def OperatorAddBuiltinOptions2Type(builder, builtinOptions2Type): + builder.PrependUint8Slot(11, builtinOptions2Type, 0) + +def OperatorAddBuiltinOptions2(builder, builtinOptions2): + builder.PrependUOffsetTRelativeSlot(12, flatbuffers.number_types.UOffsetTFlags.py_type(builtinOptions2), 0) + +def OperatorEnd(builder): + return builder.EndObject() + + +try: + from typing import List, Union +except: + pass + +class OperatorT(object): + + # OperatorT def __init__(self): - self.idxOutType = 2 # type: int + self.opcodeIndex = 0 # type: int + self.inputs = None # type: List[int] + self.outputs = None # type: List[int] + self.builtinOptionsType = 0 # type: int + self.builtinOptions = None # type: Union[None, Conv2DOptionsT, DepthwiseConv2DOptionsT, ConcatEmbeddingsOptionsT, LSHProjectionOptionsT, Pool2DOptionsT, SVDFOptionsT, RNNOptionsT, FullyConnectedOptionsT, SoftmaxOptionsT, ConcatenationOptionsT, AddOptionsT, L2NormOptionsT, LocalResponseNormalizationOptionsT, LSTMOptionsT, ResizeBilinearOptionsT, CallOptionsT, ReshapeOptionsT, SkipGramOptionsT, SpaceToDepthOptionsT, EmbeddingLookupSparseOptionsT, MulOptionsT, PadOptionsT, GatherOptionsT, BatchToSpaceNDOptionsT, SpaceToBatchNDOptionsT, TransposeOptionsT, ReducerOptionsT, SubOptionsT, DivOptionsT, SqueezeOptionsT, SequenceRNNOptionsT, StridedSliceOptionsT, ExpOptionsT, TopKV2OptionsT, SplitOptionsT, LogSoftmaxOptionsT, CastOptionsT, DequantizeOptionsT, MaximumMinimumOptionsT, ArgMaxOptionsT, LessOptionsT, NegOptionsT, PadV2OptionsT, GreaterOptionsT, GreaterEqualOptionsT, LessEqualOptionsT, SelectOptionsT, SliceOptionsT, TransposeConvOptionsT, SparseToDenseOptionsT, TileOptionsT, ExpandDimsOptionsT, EqualOptionsT, NotEqualOptionsT, ShapeOptionsT, PowOptionsT, ArgMinOptionsT, FakeQuantOptionsT, PackOptionsT, LogicalOrOptionsT, OneHotOptionsT, LogicalAndOptionsT, LogicalNotOptionsT, UnpackOptionsT, FloorDivOptionsT, SquareOptionsT, ZerosLikeOptionsT, FillOptionsT, BidirectionalSequenceLSTMOptionsT, BidirectionalSequenceRNNOptionsT, UnidirectionalSequenceLSTMOptionsT, FloorModOptionsT, RangeOptionsT, ResizeNearestNeighborOptionsT, LeakyReluOptionsT, SquaredDifferenceOptionsT, MirrorPadOptionsT, AbsOptionsT, SplitVOptionsT, UniqueOptionsT, ReverseV2OptionsT, AddNOptionsT, GatherNdOptionsT, CosOptionsT, WhereOptionsT, RankOptionsT, ReverseSequenceOptionsT, MatrixDiagOptionsT, QuantizeOptionsT, MatrixSetDiagOptionsT, HardSwishOptionsT, IfOptionsT, WhileOptionsT, DepthToSpaceOptionsT, NonMaxSuppressionV4OptionsT, NonMaxSuppressionV5OptionsT, ScatterNdOptionsT, SelectV2OptionsT, DensifyOptionsT, SegmentSumOptionsT, BatchMatMulOptionsT, CumsumOptionsT, CallOnceOptionsT, BroadcastToOptionsT, Rfft2dOptionsT, Conv3DOptionsT, HashtableOptionsT, HashtableFindOptionsT, HashtableImportOptionsT, HashtableSizeOptionsT, VarHandleOptionsT, ReadVariableOptionsT, AssignVariableOptionsT, RandomOptionsT, BucketizeOptionsT, GeluOptionsT, DynamicUpdateSliceOptionsT, UnsortedSegmentProdOptionsT, UnsortedSegmentMaxOptionsT, UnsortedSegmentMinOptionsT, UnsortedSegmentSumOptionsT, ATan2OptionsT, SignOptionsT, BitcastOptionsT, BitwiseXorOptionsT, RightShiftOptionsT] + self.customOptions = None # type: List[int] + self.customOptionsFormat = 0 # type: int + self.mutatingVariableInputs = None # type: List[bool] + self.intermediates = None # type: List[int] + self.largeCustomOptionsOffset = 0 # type: int + self.largeCustomOptionsSize = 0 # type: int + self.builtinOptions2Type = 0 # type: int + self.builtinOptions2 = None # type: Union[None, StablehloConcatenateOptionsT, StablehloBroadcastInDimOptionsT, StablehloSliceOptionsT, StablehloConvolutionOptionsT, StablehloCustomCallOptionsT, StablehloReduceOptionsT, StablehloScatterOptionsT, StablehloCompareOptionsT, StablehloDynamicSliceOptionsT, StablehloPadOptionsT, StablehloIotaOptionsT, StablehloDotGeneralOptionsT, StablehloReduceWindowOptionsT, StablehloSortOptionsT, StablehloWhileOptionsT, StablehloGatherOptionsT, StablehloTransposeOptionsT, DilateOptionsT, StablehloRngBitGeneratorOptionsT, ReduceWindowOptionsT, StableHLOCompositeOptionsT] @classmethod def InitFromBuf(cls, buf, pos): - uniqueOptions = UniqueOptions() - uniqueOptions.Init(buf, pos) - return cls.InitFromObj(uniqueOptions) + operator = Operator() + operator.Init(buf, pos) + return cls.InitFromObj(operator) @classmethod - def InitFromObj(cls, uniqueOptions): - x = UniqueOptionsT() - x._UnPack(uniqueOptions) + def InitFromPackedBuf(cls, buf, pos=0): + n = flatbuffers.encode.Get(flatbuffers.packer.uoffset, buf, pos) + return cls.InitFromBuf(buf, pos+n) + + @classmethod + def InitFromObj(cls, operator): + x = OperatorT() + x._UnPack(operator) return x - # UniqueOptionsT - def _UnPack(self, uniqueOptions): - if uniqueOptions is None: + # OperatorT + def _UnPack(self, operator): + if operator is None: return - self.idxOutType = uniqueOptions.IdxOutType() + self.opcodeIndex = operator.OpcodeIndex() + if not operator.InputsIsNone(): + if np is None: + self.inputs = [] + for i in range(operator.InputsLength()): + self.inputs.append(operator.Inputs(i)) + else: + self.inputs = operator.InputsAsNumpy() + if not operator.OutputsIsNone(): + if np is None: + self.outputs = [] + for i in range(operator.OutputsLength()): + self.outputs.append(operator.Outputs(i)) + else: + self.outputs = operator.OutputsAsNumpy() + self.builtinOptionsType = operator.BuiltinOptionsType() + self.builtinOptions = BuiltinOptionsCreator(self.builtinOptionsType, operator.BuiltinOptions()) + if not operator.CustomOptionsIsNone(): + if np is None: + self.customOptions = [] + for i in range(operator.CustomOptionsLength()): + self.customOptions.append(operator.CustomOptions(i)) + else: + self.customOptions = operator.CustomOptionsAsNumpy() + self.customOptionsFormat = operator.CustomOptionsFormat() + if not operator.MutatingVariableInputsIsNone(): + if np is None: + self.mutatingVariableInputs = [] + for i in range(operator.MutatingVariableInputsLength()): + self.mutatingVariableInputs.append(operator.MutatingVariableInputs(i)) + else: + self.mutatingVariableInputs = operator.MutatingVariableInputsAsNumpy() + if not operator.IntermediatesIsNone(): + if np is None: + self.intermediates = [] + for i in range(operator.IntermediatesLength()): + self.intermediates.append(operator.Intermediates(i)) + else: + self.intermediates = operator.IntermediatesAsNumpy() + self.largeCustomOptionsOffset = operator.LargeCustomOptionsOffset() + self.largeCustomOptionsSize = operator.LargeCustomOptionsSize() + self.builtinOptions2Type = operator.BuiltinOptions2Type() + self.builtinOptions2 = BuiltinOptions2Creator(self.builtinOptions2Type, operator.BuiltinOptions2()) - # UniqueOptionsT + # OperatorT def Pack(self, builder): - UniqueOptionsStart(builder) - UniqueOptionsAddIdxOutType(builder, self.idxOutType) - uniqueOptions = UniqueOptionsEnd(builder) - return uniqueOptions -# automatically generated by the FlatBuffers compiler, do not modify - -# namespace: tflite + if self.inputs is not None: + if np is not None and type(self.inputs) is np.ndarray: + inputs = builder.CreateNumpyVector(self.inputs) + else: + OperatorStartInputsVector(builder, len(self.inputs)) + for i in reversed(range(len(self.inputs))): + builder.PrependInt32(self.inputs[i]) + inputs = builder.EndVector() + if self.outputs is not None: + if np is not None and type(self.outputs) is np.ndarray: + outputs = builder.CreateNumpyVector(self.outputs) + else: + OperatorStartOutputsVector(builder, len(self.outputs)) + for i in reversed(range(len(self.outputs))): + builder.PrependInt32(self.outputs[i]) + outputs = builder.EndVector() + if self.builtinOptions is not None: + builtinOptions = self.builtinOptions.Pack(builder) + if self.customOptions is not None: + if np is not None and type(self.customOptions) is np.ndarray: + customOptions = builder.CreateNumpyVector(self.customOptions) + else: + OperatorStartCustomOptionsVector(builder, len(self.customOptions)) + for i in reversed(range(len(self.customOptions))): + builder.PrependUint8(self.customOptions[i]) + customOptions = builder.EndVector() + if self.mutatingVariableInputs is not None: + if np is not None and type(self.mutatingVariableInputs) is np.ndarray: + mutatingVariableInputs = builder.CreateNumpyVector(self.mutatingVariableInputs) + else: + OperatorStartMutatingVariableInputsVector(builder, len(self.mutatingVariableInputs)) + for i in reversed(range(len(self.mutatingVariableInputs))): + builder.PrependBool(self.mutatingVariableInputs[i]) + mutatingVariableInputs = builder.EndVector() + if self.intermediates is not None: + if np is not None and type(self.intermediates) is np.ndarray: + intermediates = builder.CreateNumpyVector(self.intermediates) + else: + OperatorStartIntermediatesVector(builder, len(self.intermediates)) + for i in reversed(range(len(self.intermediates))): + builder.PrependInt32(self.intermediates[i]) + intermediates = builder.EndVector() + if self.builtinOptions2 is not None: + builtinOptions2 = self.builtinOptions2.Pack(builder) + OperatorStart(builder) + OperatorAddOpcodeIndex(builder, self.opcodeIndex) + if self.inputs is not None: + OperatorAddInputs(builder, inputs) + if self.outputs is not None: + OperatorAddOutputs(builder, outputs) + OperatorAddBuiltinOptionsType(builder, self.builtinOptionsType) + if self.builtinOptions is not None: + OperatorAddBuiltinOptions(builder, builtinOptions) + if self.customOptions is not None: + OperatorAddCustomOptions(builder, customOptions) + OperatorAddCustomOptionsFormat(builder, self.customOptionsFormat) + if self.mutatingVariableInputs is not None: + OperatorAddMutatingVariableInputs(builder, mutatingVariableInputs) + if self.intermediates is not None: + OperatorAddIntermediates(builder, intermediates) + OperatorAddLargeCustomOptionsOffset(builder, self.largeCustomOptionsOffset) + OperatorAddLargeCustomOptionsSize(builder, self.largeCustomOptionsSize) + OperatorAddBuiltinOptions2Type(builder, self.builtinOptions2Type) + if self.builtinOptions2 is not None: + OperatorAddBuiltinOptions2(builder, builtinOptions2) + operator = OperatorEnd(builder) + return operator -from flatbuffers.compat import import_numpy -np = import_numpy() -class UnpackOptions(object): +class SubGraph(object): __slots__ = ['_tab'] @classmethod def GetRootAs(cls, buf, offset=0): n = flatbuffers.encode.Get(flatbuffers.packer.uoffset, buf, offset) - x = UnpackOptions() + x = SubGraph() x.Init(buf, n + offset) return x @classmethod - def GetRootAsUnpackOptions(cls, buf, offset=0): + def GetRootAsSubGraph(cls, buf, offset=0): """This method is deprecated. Please switch to GetRootAs.""" return cls.GetRootAs(buf, offset) @classmethod - def UnpackOptionsBufferHasIdentifier(cls, buf, offset, size_prefixed=False): + def SubGraphBufferHasIdentifier(cls, buf, offset, size_prefixed=False): return flatbuffers.util.BufferHasIdentifier(buf, offset, b"\x54\x46\x4C\x33", size_prefixed=size_prefixed) - # UnpackOptions + # SubGraph def Init(self, buf, pos): self._tab = flatbuffers.table.Table(buf, pos) - # UnpackOptions - def Num(self): + # SubGraph + def Tensors(self, j): o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(4)) if o != 0: - return self._tab.Get(flatbuffers.number_types.Int32Flags, o + self._tab.Pos) + x = self._tab.Vector(o) + x += flatbuffers.number_types.UOffsetTFlags.py_type(j) * 4 + x = self._tab.Indirect(x) + obj = Tensor() + obj.Init(self._tab.Bytes, x) + return obj + return None + + # SubGraph + def TensorsLength(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(4)) + if o != 0: + return self._tab.VectorLen(o) return 0 - # UnpackOptions - def Axis(self): + # SubGraph + def TensorsIsNone(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(4)) + return o == 0 + + # SubGraph + def Inputs(self, j): o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(6)) if o != 0: - return self._tab.Get(flatbuffers.number_types.Int32Flags, o + self._tab.Pos) + a = self._tab.Vector(o) + return self._tab.Get(flatbuffers.number_types.Int32Flags, a + flatbuffers.number_types.UOffsetTFlags.py_type(j * 4)) return 0 -def UnpackOptionsStart(builder): builder.StartObject(2) -def Start(builder): - return UnpackOptionsStart(builder) -def UnpackOptionsAddNum(builder, num): builder.PrependInt32Slot(0, num, 0) -def AddNum(builder, num): - return UnpackOptionsAddNum(builder, num) -def UnpackOptionsAddAxis(builder, axis): builder.PrependInt32Slot(1, axis, 0) -def AddAxis(builder, axis): - return UnpackOptionsAddAxis(builder, axis) -def UnpackOptionsEnd(builder): return builder.EndObject() -def End(builder): - return UnpackOptionsEnd(builder) + # SubGraph + def InputsAsNumpy(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(6)) + if o != 0: + return self._tab.GetVectorAsNumpy(flatbuffers.number_types.Int32Flags, o) + return 0 -class UnpackOptionsT(object): + # SubGraph + def InputsLength(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(6)) + if o != 0: + return self._tab.VectorLen(o) + return 0 - # UnpackOptionsT + # SubGraph + def InputsIsNone(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(6)) + return o == 0 + + # SubGraph + def Outputs(self, j): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(8)) + if o != 0: + a = self._tab.Vector(o) + return self._tab.Get(flatbuffers.number_types.Int32Flags, a + flatbuffers.number_types.UOffsetTFlags.py_type(j * 4)) + return 0 + + # SubGraph + def OutputsAsNumpy(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(8)) + if o != 0: + return self._tab.GetVectorAsNumpy(flatbuffers.number_types.Int32Flags, o) + return 0 + + # SubGraph + def OutputsLength(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(8)) + if o != 0: + return self._tab.VectorLen(o) + return 0 + + # SubGraph + def OutputsIsNone(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(8)) + return o == 0 + + # SubGraph + def Operators(self, j): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(10)) + if o != 0: + x = self._tab.Vector(o) + x += flatbuffers.number_types.UOffsetTFlags.py_type(j) * 4 + x = self._tab.Indirect(x) + obj = Operator() + obj.Init(self._tab.Bytes, x) + return obj + return None + + # SubGraph + def OperatorsLength(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(10)) + if o != 0: + return self._tab.VectorLen(o) + return 0 + + # SubGraph + def OperatorsIsNone(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(10)) + return o == 0 + + # SubGraph + def Name(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(12)) + if o != 0: + return self._tab.String(o + self._tab.Pos) + return None + +def SubGraphStart(builder): + builder.StartObject(5) + +def SubGraphAddTensors(builder, tensors): + builder.PrependUOffsetTRelativeSlot(0, flatbuffers.number_types.UOffsetTFlags.py_type(tensors), 0) + +def SubGraphStartTensorsVector(builder, numElems): + return builder.StartVector(4, numElems, 4) + +def SubGraphAddInputs(builder, inputs): + builder.PrependUOffsetTRelativeSlot(1, flatbuffers.number_types.UOffsetTFlags.py_type(inputs), 0) + +def SubGraphStartInputsVector(builder, numElems): + return builder.StartVector(4, numElems, 4) + +def SubGraphAddOutputs(builder, outputs): + builder.PrependUOffsetTRelativeSlot(2, flatbuffers.number_types.UOffsetTFlags.py_type(outputs), 0) + +def SubGraphStartOutputsVector(builder, numElems): + return builder.StartVector(4, numElems, 4) + +def SubGraphAddOperators(builder, operators): + builder.PrependUOffsetTRelativeSlot(3, flatbuffers.number_types.UOffsetTFlags.py_type(operators), 0) + +def SubGraphStartOperatorsVector(builder, numElems): + return builder.StartVector(4, numElems, 4) + +def SubGraphAddName(builder, name): + builder.PrependUOffsetTRelativeSlot(4, flatbuffers.number_types.UOffsetTFlags.py_type(name), 0) + +def SubGraphEnd(builder): + return builder.EndObject() + + +try: + from typing import List +except: + pass + +class SubGraphT(object): + + # SubGraphT def __init__(self): - self.num = 0 # type: int - self.axis = 0 # type: int + self.tensors = None # type: List[TensorT] + self.inputs = None # type: List[int] + self.outputs = None # type: List[int] + self.operators = None # type: List[OperatorT] + self.name = None # type: str @classmethod def InitFromBuf(cls, buf, pos): - unpackOptions = UnpackOptions() - unpackOptions.Init(buf, pos) - return cls.InitFromObj(unpackOptions) + subGraph = SubGraph() + subGraph.Init(buf, pos) + return cls.InitFromObj(subGraph) @classmethod - def InitFromObj(cls, unpackOptions): - x = UnpackOptionsT() - x._UnPack(unpackOptions) + def InitFromPackedBuf(cls, buf, pos=0): + n = flatbuffers.encode.Get(flatbuffers.packer.uoffset, buf, pos) + return cls.InitFromBuf(buf, pos+n) + + @classmethod + def InitFromObj(cls, subGraph): + x = SubGraphT() + x._UnPack(subGraph) return x - # UnpackOptionsT - def _UnPack(self, unpackOptions): - if unpackOptions is None: + # SubGraphT + def _UnPack(self, subGraph): + if subGraph is None: return - self.num = unpackOptions.Num() - self.axis = unpackOptions.Axis() + if not subGraph.TensorsIsNone(): + self.tensors = [] + for i in range(subGraph.TensorsLength()): + if subGraph.Tensors(i) is None: + self.tensors.append(None) + else: + tensor_ = TensorT.InitFromObj(subGraph.Tensors(i)) + self.tensors.append(tensor_) + if not subGraph.InputsIsNone(): + if np is None: + self.inputs = [] + for i in range(subGraph.InputsLength()): + self.inputs.append(subGraph.Inputs(i)) + else: + self.inputs = subGraph.InputsAsNumpy() + if not subGraph.OutputsIsNone(): + if np is None: + self.outputs = [] + for i in range(subGraph.OutputsLength()): + self.outputs.append(subGraph.Outputs(i)) + else: + self.outputs = subGraph.OutputsAsNumpy() + if not subGraph.OperatorsIsNone(): + self.operators = [] + for i in range(subGraph.OperatorsLength()): + if subGraph.Operators(i) is None: + self.operators.append(None) + else: + operator_ = OperatorT.InitFromObj(subGraph.Operators(i)) + self.operators.append(operator_) + self.name = subGraph.Name() - # UnpackOptionsT + # SubGraphT def Pack(self, builder): - UnpackOptionsStart(builder) - UnpackOptionsAddNum(builder, self.num) - UnpackOptionsAddAxis(builder, self.axis) - unpackOptions = UnpackOptionsEnd(builder) - return unpackOptions -# automatically generated by the FlatBuffers compiler, do not modify - -# namespace: tflite + if self.tensors is not None: + tensorslist = [] + for i in range(len(self.tensors)): + tensorslist.append(self.tensors[i].Pack(builder)) + SubGraphStartTensorsVector(builder, len(self.tensors)) + for i in reversed(range(len(self.tensors))): + builder.PrependUOffsetTRelative(tensorslist[i]) + tensors = builder.EndVector() + if self.inputs is not None: + if np is not None and type(self.inputs) is np.ndarray: + inputs = builder.CreateNumpyVector(self.inputs) + else: + SubGraphStartInputsVector(builder, len(self.inputs)) + for i in reversed(range(len(self.inputs))): + builder.PrependInt32(self.inputs[i]) + inputs = builder.EndVector() + if self.outputs is not None: + if np is not None and type(self.outputs) is np.ndarray: + outputs = builder.CreateNumpyVector(self.outputs) + else: + SubGraphStartOutputsVector(builder, len(self.outputs)) + for i in reversed(range(len(self.outputs))): + builder.PrependInt32(self.outputs[i]) + outputs = builder.EndVector() + if self.operators is not None: + operatorslist = [] + for i in range(len(self.operators)): + operatorslist.append(self.operators[i].Pack(builder)) + SubGraphStartOperatorsVector(builder, len(self.operators)) + for i in reversed(range(len(self.operators))): + builder.PrependUOffsetTRelative(operatorslist[i]) + operators = builder.EndVector() + if self.name is not None: + name = builder.CreateString(self.name) + SubGraphStart(builder) + if self.tensors is not None: + SubGraphAddTensors(builder, tensors) + if self.inputs is not None: + SubGraphAddInputs(builder, inputs) + if self.outputs is not None: + SubGraphAddOutputs(builder, outputs) + if self.operators is not None: + SubGraphAddOperators(builder, operators) + if self.name is not None: + SubGraphAddName(builder, name) + subGraph = SubGraphEnd(builder) + return subGraph -from flatbuffers.compat import import_numpy -np = import_numpy() -class UnsortedSegmentMaxOptions(object): +class Buffer(object): __slots__ = ['_tab'] @classmethod def GetRootAs(cls, buf, offset=0): n = flatbuffers.encode.Get(flatbuffers.packer.uoffset, buf, offset) - x = UnsortedSegmentMaxOptions() + x = Buffer() x.Init(buf, n + offset) return x @classmethod - def GetRootAsUnsortedSegmentMaxOptions(cls, buf, offset=0): + def GetRootAsBuffer(cls, buf, offset=0): """This method is deprecated. Please switch to GetRootAs.""" return cls.GetRootAs(buf, offset) @classmethod - def UnsortedSegmentMaxOptionsBufferHasIdentifier(cls, buf, offset, size_prefixed=False): + def BufferBufferHasIdentifier(cls, buf, offset, size_prefixed=False): return flatbuffers.util.BufferHasIdentifier(buf, offset, b"\x54\x46\x4C\x33", size_prefixed=size_prefixed) - # UnsortedSegmentMaxOptions + # Buffer def Init(self, buf, pos): self._tab = flatbuffers.table.Table(buf, pos) -def UnsortedSegmentMaxOptionsStart(builder): builder.StartObject(0) -def Start(builder): - return UnsortedSegmentMaxOptionsStart(builder) -def UnsortedSegmentMaxOptionsEnd(builder): return builder.EndObject() -def End(builder): - return UnsortedSegmentMaxOptionsEnd(builder) + # Buffer + def Data(self, j): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(4)) + if o != 0: + a = self._tab.Vector(o) + return self._tab.Get(flatbuffers.number_types.Uint8Flags, a + flatbuffers.number_types.UOffsetTFlags.py_type(j * 1)) + return 0 -class UnsortedSegmentMaxOptionsT(object): + # Buffer + def DataAsNumpy(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(4)) + if o != 0: + return self._tab.GetVectorAsNumpy(flatbuffers.number_types.Uint8Flags, o) + return 0 - # UnsortedSegmentMaxOptionsT - def __init__(self): - pass + # Buffer + def DataLength(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(4)) + if o != 0: + return self._tab.VectorLen(o) + return 0 - @classmethod - def InitFromBuf(cls, buf, pos): - unsortedSegmentMaxOptions = UnsortedSegmentMaxOptions() - unsortedSegmentMaxOptions.Init(buf, pos) - return cls.InitFromObj(unsortedSegmentMaxOptions) + # Buffer + def DataIsNone(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(4)) + return o == 0 - @classmethod - def InitFromObj(cls, unsortedSegmentMaxOptions): - x = UnsortedSegmentMaxOptionsT() - x._UnPack(unsortedSegmentMaxOptions) - return x + # Buffer + def Offset(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(6)) + if o != 0: + return self._tab.Get(flatbuffers.number_types.Uint64Flags, o + self._tab.Pos) + return 0 - # UnsortedSegmentMaxOptionsT - def _UnPack(self, unsortedSegmentMaxOptions): - if unsortedSegmentMaxOptions is None: - return + # Buffer + def Size(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(8)) + if o != 0: + return self._tab.Get(flatbuffers.number_types.Uint64Flags, o + self._tab.Pos) + return 0 - # UnsortedSegmentMaxOptionsT - def Pack(self, builder): - UnsortedSegmentMaxOptionsStart(builder) - unsortedSegmentMaxOptions = UnsortedSegmentMaxOptionsEnd(builder) - return unsortedSegmentMaxOptions -# automatically generated by the FlatBuffers compiler, do not modify +def BufferStart(builder): + builder.StartObject(3) -# namespace: tflite +def BufferAddData(builder, data): + builder.PrependUOffsetTRelativeSlot(0, flatbuffers.number_types.UOffsetTFlags.py_type(data), 0) -from flatbuffers.compat import import_numpy -np = import_numpy() +def BufferStartDataVector(builder, numElems): + return builder.StartVector(1, numElems, 1) -class UnsortedSegmentMinOptions(object): - __slots__ = ['_tab'] +def BufferAddOffset(builder, offset): + builder.PrependUint64Slot(1, offset, 0) - @classmethod - def GetRootAs(cls, buf, offset=0): - n = flatbuffers.encode.Get(flatbuffers.packer.uoffset, buf, offset) - x = UnsortedSegmentMinOptions() - x.Init(buf, n + offset) - return x +def BufferAddSize(builder, size): + builder.PrependUint64Slot(2, size, 0) - @classmethod - def GetRootAsUnsortedSegmentMinOptions(cls, buf, offset=0): - """This method is deprecated. Please switch to GetRootAs.""" - return cls.GetRootAs(buf, offset) - @classmethod - def UnsortedSegmentMinOptionsBufferHasIdentifier(cls, buf, offset, size_prefixed=False): - return flatbuffers.util.BufferHasIdentifier(buf, offset, b"\x54\x46\x4C\x33", size_prefixed=size_prefixed) +def BufferEnd(builder): + return builder.EndObject() - # UnsortedSegmentMinOptions - def Init(self, buf, pos): - self._tab = flatbuffers.table.Table(buf, pos) -def UnsortedSegmentMinOptionsStart(builder): builder.StartObject(0) -def Start(builder): - return UnsortedSegmentMinOptionsStart(builder) -def UnsortedSegmentMinOptionsEnd(builder): return builder.EndObject() -def End(builder): - return UnsortedSegmentMinOptionsEnd(builder) +try: + from typing import List +except: + pass -class UnsortedSegmentMinOptionsT(object): +class BufferT(object): - # UnsortedSegmentMinOptionsT + # BufferT def __init__(self): - pass + self.data = None # type: List[int] + self.offset = 0 # type: int + self.size = 0 # type: int @classmethod def InitFromBuf(cls, buf, pos): - unsortedSegmentMinOptions = UnsortedSegmentMinOptions() - unsortedSegmentMinOptions.Init(buf, pos) - return cls.InitFromObj(unsortedSegmentMinOptions) + buffer = Buffer() + buffer.Init(buf, pos) + return cls.InitFromObj(buffer) @classmethod - def InitFromObj(cls, unsortedSegmentMinOptions): - x = UnsortedSegmentMinOptionsT() - x._UnPack(unsortedSegmentMinOptions) + def InitFromPackedBuf(cls, buf, pos=0): + n = flatbuffers.encode.Get(flatbuffers.packer.uoffset, buf, pos) + return cls.InitFromBuf(buf, pos+n) + + @classmethod + def InitFromObj(cls, buffer): + x = BufferT() + x._UnPack(buffer) return x - # UnsortedSegmentMinOptionsT - def _UnPack(self, unsortedSegmentMinOptions): - if unsortedSegmentMinOptions is None: + # BufferT + def _UnPack(self, buffer): + if buffer is None: return + if not buffer.DataIsNone(): + if np is None: + self.data = [] + for i in range(buffer.DataLength()): + self.data.append(buffer.Data(i)) + else: + self.data = buffer.DataAsNumpy() + self.offset = buffer.Offset() + self.size = buffer.Size() - # UnsortedSegmentMinOptionsT + # BufferT def Pack(self, builder): - UnsortedSegmentMinOptionsStart(builder) - unsortedSegmentMinOptions = UnsortedSegmentMinOptionsEnd(builder) - return unsortedSegmentMinOptions -# automatically generated by the FlatBuffers compiler, do not modify - -# namespace: tflite + if self.data is not None: + if np is not None and type(self.data) is np.ndarray: + data = builder.CreateNumpyVector(self.data) + else: + BufferStartDataVector(builder, len(self.data)) + for i in reversed(range(len(self.data))): + builder.PrependUint8(self.data[i]) + data = builder.EndVector() + BufferStart(builder) + if self.data is not None: + BufferAddData(builder, data) + BufferAddOffset(builder, self.offset) + BufferAddSize(builder, self.size) + buffer = BufferEnd(builder) + return buffer -from flatbuffers.compat import import_numpy -np = import_numpy() -class UnsortedSegmentProdOptions(object): +class Metadata(object): __slots__ = ['_tab'] @classmethod def GetRootAs(cls, buf, offset=0): n = flatbuffers.encode.Get(flatbuffers.packer.uoffset, buf, offset) - x = UnsortedSegmentProdOptions() + x = Metadata() x.Init(buf, n + offset) return x @classmethod - def GetRootAsUnsortedSegmentProdOptions(cls, buf, offset=0): + def GetRootAsMetadata(cls, buf, offset=0): """This method is deprecated. Please switch to GetRootAs.""" return cls.GetRootAs(buf, offset) @classmethod - def UnsortedSegmentProdOptionsBufferHasIdentifier(cls, buf, offset, size_prefixed=False): + def MetadataBufferHasIdentifier(cls, buf, offset, size_prefixed=False): return flatbuffers.util.BufferHasIdentifier(buf, offset, b"\x54\x46\x4C\x33", size_prefixed=size_prefixed) - # UnsortedSegmentProdOptions + # Metadata def Init(self, buf, pos): self._tab = flatbuffers.table.Table(buf, pos) -def UnsortedSegmentProdOptionsStart(builder): builder.StartObject(0) -def Start(builder): - return UnsortedSegmentProdOptionsStart(builder) -def UnsortedSegmentProdOptionsEnd(builder): return builder.EndObject() -def End(builder): - return UnsortedSegmentProdOptionsEnd(builder) - -class UnsortedSegmentProdOptionsT(object): - - # UnsortedSegmentProdOptionsT - def __init__(self): - pass - - @classmethod - def InitFromBuf(cls, buf, pos): - unsortedSegmentProdOptions = UnsortedSegmentProdOptions() - unsortedSegmentProdOptions.Init(buf, pos) - return cls.InitFromObj(unsortedSegmentProdOptions) - - @classmethod - def InitFromObj(cls, unsortedSegmentProdOptions): - x = UnsortedSegmentProdOptionsT() - x._UnPack(unsortedSegmentProdOptions) - return x - - # UnsortedSegmentProdOptionsT - def _UnPack(self, unsortedSegmentProdOptions): - if unsortedSegmentProdOptions is None: - return - - # UnsortedSegmentProdOptionsT - def Pack(self, builder): - UnsortedSegmentProdOptionsStart(builder) - unsortedSegmentProdOptions = UnsortedSegmentProdOptionsEnd(builder) - return unsortedSegmentProdOptions -# automatically generated by the FlatBuffers compiler, do not modify + # Metadata + def Name(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(4)) + if o != 0: + return self._tab.String(o + self._tab.Pos) + return None -# namespace: tflite + # Metadata + def Buffer(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(6)) + if o != 0: + return self._tab.Get(flatbuffers.number_types.Uint32Flags, o + self._tab.Pos) + return 0 -from flatbuffers.compat import import_numpy -np = import_numpy() +def MetadataStart(builder): + builder.StartObject(2) -class UnsortedSegmentSumOptions(object): - __slots__ = ['_tab'] +def MetadataAddName(builder, name): + builder.PrependUOffsetTRelativeSlot(0, flatbuffers.number_types.UOffsetTFlags.py_type(name), 0) - @classmethod - def GetRootAs(cls, buf, offset=0): - n = flatbuffers.encode.Get(flatbuffers.packer.uoffset, buf, offset) - x = UnsortedSegmentSumOptions() - x.Init(buf, n + offset) - return x +def MetadataAddBuffer(builder, buffer): + builder.PrependUint32Slot(1, buffer, 0) - @classmethod - def GetRootAsUnsortedSegmentSumOptions(cls, buf, offset=0): - """This method is deprecated. Please switch to GetRootAs.""" - return cls.GetRootAs(buf, offset) - @classmethod - def UnsortedSegmentSumOptionsBufferHasIdentifier(cls, buf, offset, size_prefixed=False): - return flatbuffers.util.BufferHasIdentifier(buf, offset, b"\x54\x46\x4C\x33", size_prefixed=size_prefixed) +def MetadataEnd(builder): + return builder.EndObject() - # UnsortedSegmentSumOptions - def Init(self, buf, pos): - self._tab = flatbuffers.table.Table(buf, pos) -def UnsortedSegmentSumOptionsStart(builder): builder.StartObject(0) -def Start(builder): - return UnsortedSegmentSumOptionsStart(builder) -def UnsortedSegmentSumOptionsEnd(builder): return builder.EndObject() -def End(builder): - return UnsortedSegmentSumOptionsEnd(builder) -class UnsortedSegmentSumOptionsT(object): +class MetadataT(object): - # UnsortedSegmentSumOptionsT + # MetadataT def __init__(self): - pass + self.name = None # type: str + self.buffer = 0 # type: int @classmethod def InitFromBuf(cls, buf, pos): - unsortedSegmentSumOptions = UnsortedSegmentSumOptions() - unsortedSegmentSumOptions.Init(buf, pos) - return cls.InitFromObj(unsortedSegmentSumOptions) + metadata = Metadata() + metadata.Init(buf, pos) + return cls.InitFromObj(metadata) @classmethod - def InitFromObj(cls, unsortedSegmentSumOptions): - x = UnsortedSegmentSumOptionsT() - x._UnPack(unsortedSegmentSumOptions) + def InitFromPackedBuf(cls, buf, pos=0): + n = flatbuffers.encode.Get(flatbuffers.packer.uoffset, buf, pos) + return cls.InitFromBuf(buf, pos+n) + + @classmethod + def InitFromObj(cls, metadata): + x = MetadataT() + x._UnPack(metadata) return x - # UnsortedSegmentSumOptionsT - def _UnPack(self, unsortedSegmentSumOptions): - if unsortedSegmentSumOptions is None: + # MetadataT + def _UnPack(self, metadata): + if metadata is None: return + self.name = metadata.Name() + self.buffer = metadata.Buffer() - # UnsortedSegmentSumOptionsT + # MetadataT def Pack(self, builder): - UnsortedSegmentSumOptionsStart(builder) - unsortedSegmentSumOptions = UnsortedSegmentSumOptionsEnd(builder) - return unsortedSegmentSumOptions -# automatically generated by the FlatBuffers compiler, do not modify - -# namespace: tflite + if self.name is not None: + name = builder.CreateString(self.name) + MetadataStart(builder) + if self.name is not None: + MetadataAddName(builder, name) + MetadataAddBuffer(builder, self.buffer) + metadata = MetadataEnd(builder) + return metadata -from flatbuffers.compat import import_numpy -np = import_numpy() -class VarHandleOptions(object): +class TensorMap(object): __slots__ = ['_tab'] @classmethod def GetRootAs(cls, buf, offset=0): n = flatbuffers.encode.Get(flatbuffers.packer.uoffset, buf, offset) - x = VarHandleOptions() + x = TensorMap() x.Init(buf, n + offset) return x @classmethod - def GetRootAsVarHandleOptions(cls, buf, offset=0): + def GetRootAsTensorMap(cls, buf, offset=0): """This method is deprecated. Please switch to GetRootAs.""" return cls.GetRootAs(buf, offset) @classmethod - def VarHandleOptionsBufferHasIdentifier(cls, buf, offset, size_prefixed=False): + def TensorMapBufferHasIdentifier(cls, buf, offset, size_prefixed=False): return flatbuffers.util.BufferHasIdentifier(buf, offset, b"\x54\x46\x4C\x33", size_prefixed=size_prefixed) - # VarHandleOptions + # TensorMap def Init(self, buf, pos): self._tab = flatbuffers.table.Table(buf, pos) - # VarHandleOptions - def Container(self): + # TensorMap + def Name(self): o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(4)) if o != 0: return self._tab.String(o + self._tab.Pos) return None - # VarHandleOptions - def SharedName(self): + # TensorMap + def TensorIndex(self): o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(6)) if o != 0: - return self._tab.String(o + self._tab.Pos) - return None + return self._tab.Get(flatbuffers.number_types.Uint32Flags, o + self._tab.Pos) + return 0 -def VarHandleOptionsStart(builder): builder.StartObject(2) -def Start(builder): - return VarHandleOptionsStart(builder) -def VarHandleOptionsAddContainer(builder, container): builder.PrependUOffsetTRelativeSlot(0, flatbuffers.number_types.UOffsetTFlags.py_type(container), 0) -def AddContainer(builder, container): - return VarHandleOptionsAddContainer(builder, container) -def VarHandleOptionsAddSharedName(builder, sharedName): builder.PrependUOffsetTRelativeSlot(1, flatbuffers.number_types.UOffsetTFlags.py_type(sharedName), 0) -def AddSharedName(builder, sharedName): - return VarHandleOptionsAddSharedName(builder, sharedName) -def VarHandleOptionsEnd(builder): return builder.EndObject() -def End(builder): - return VarHandleOptionsEnd(builder) +def TensorMapStart(builder): + builder.StartObject(2) -class VarHandleOptionsT(object): +def TensorMapAddName(builder, name): + builder.PrependUOffsetTRelativeSlot(0, flatbuffers.number_types.UOffsetTFlags.py_type(name), 0) - # VarHandleOptionsT +def TensorMapAddTensorIndex(builder, tensorIndex): + builder.PrependUint32Slot(1, tensorIndex, 0) + +def TensorMapEnd(builder): + return builder.EndObject() + + + +class TensorMapT(object): + + # TensorMapT def __init__(self): - self.container = None # type: str - self.sharedName = None # type: str + self.name = None # type: str + self.tensorIndex = 0 # type: int @classmethod def InitFromBuf(cls, buf, pos): - varHandleOptions = VarHandleOptions() - varHandleOptions.Init(buf, pos) - return cls.InitFromObj(varHandleOptions) + tensorMap = TensorMap() + tensorMap.Init(buf, pos) + return cls.InitFromObj(tensorMap) @classmethod - def InitFromObj(cls, varHandleOptions): - x = VarHandleOptionsT() - x._UnPack(varHandleOptions) + def InitFromPackedBuf(cls, buf, pos=0): + n = flatbuffers.encode.Get(flatbuffers.packer.uoffset, buf, pos) + return cls.InitFromBuf(buf, pos+n) + + @classmethod + def InitFromObj(cls, tensorMap): + x = TensorMapT() + x._UnPack(tensorMap) return x - # VarHandleOptionsT - def _UnPack(self, varHandleOptions): - if varHandleOptions is None: + # TensorMapT + def _UnPack(self, tensorMap): + if tensorMap is None: return - self.container = varHandleOptions.Container() - self.sharedName = varHandleOptions.SharedName() + self.name = tensorMap.Name() + self.tensorIndex = tensorMap.TensorIndex() - # VarHandleOptionsT + # TensorMapT def Pack(self, builder): - if self.container is not None: - container = builder.CreateString(self.container) - if self.sharedName is not None: - sharedName = builder.CreateString(self.sharedName) - VarHandleOptionsStart(builder) - if self.container is not None: - VarHandleOptionsAddContainer(builder, container) - if self.sharedName is not None: - VarHandleOptionsAddSharedName(builder, sharedName) - varHandleOptions = VarHandleOptionsEnd(builder) - return varHandleOptions -# automatically generated by the FlatBuffers compiler, do not modify - -# namespace: tflite + if self.name is not None: + name = builder.CreateString(self.name) + TensorMapStart(builder) + if self.name is not None: + TensorMapAddName(builder, name) + TensorMapAddTensorIndex(builder, self.tensorIndex) + tensorMap = TensorMapEnd(builder) + return tensorMap -from flatbuffers.compat import import_numpy -np = import_numpy() -class VariantSubType(object): +class SignatureDef(object): __slots__ = ['_tab'] @classmethod def GetRootAs(cls, buf, offset=0): n = flatbuffers.encode.Get(flatbuffers.packer.uoffset, buf, offset) - x = VariantSubType() + x = SignatureDef() x.Init(buf, n + offset) return x @classmethod - def GetRootAsVariantSubType(cls, buf, offset=0): + def GetRootAsSignatureDef(cls, buf, offset=0): """This method is deprecated. Please switch to GetRootAs.""" return cls.GetRootAs(buf, offset) @classmethod - def VariantSubTypeBufferHasIdentifier(cls, buf, offset, size_prefixed=False): + def SignatureDefBufferHasIdentifier(cls, buf, offset, size_prefixed=False): return flatbuffers.util.BufferHasIdentifier(buf, offset, b"\x54\x46\x4C\x33", size_prefixed=size_prefixed) - # VariantSubType + # SignatureDef def Init(self, buf, pos): self._tab = flatbuffers.table.Table(buf, pos) - # VariantSubType - def Shape(self, j): - o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(4)) - if o != 0: - a = self._tab.Vector(o) - return self._tab.Get(flatbuffers.number_types.Int32Flags, a + flatbuffers.number_types.UOffsetTFlags.py_type(j * 4)) - return 0 - - # VariantSubType - def ShapeAsNumpy(self): + # SignatureDef + def Inputs(self, j): o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(4)) if o != 0: - return self._tab.GetVectorAsNumpy(flatbuffers.number_types.Int32Flags, o) - return 0 + x = self._tab.Vector(o) + x += flatbuffers.number_types.UOffsetTFlags.py_type(j) * 4 + x = self._tab.Indirect(x) + obj = TensorMap() + obj.Init(self._tab.Bytes, x) + return obj + return None - # VariantSubType - def ShapeLength(self): + # SignatureDef + def InputsLength(self): o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(4)) if o != 0: return self._tab.VectorLen(o) return 0 - # VariantSubType - def ShapeIsNone(self): + # SignatureDef + def InputsIsNone(self): o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(4)) return o == 0 - # VariantSubType - def Type(self): + # SignatureDef + def Outputs(self, j): o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(6)) if o != 0: - return self._tab.Get(flatbuffers.number_types.Int8Flags, o + self._tab.Pos) - return 0 + x = self._tab.Vector(o) + x += flatbuffers.number_types.UOffsetTFlags.py_type(j) * 4 + x = self._tab.Indirect(x) + obj = TensorMap() + obj.Init(self._tab.Bytes, x) + return obj + return None - # VariantSubType - def HasRank(self): - o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(8)) + # SignatureDef + def OutputsLength(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(6)) if o != 0: - return bool(self._tab.Get(flatbuffers.number_types.BoolFlags, o + self._tab.Pos)) - return False - -def VariantSubTypeStart(builder): builder.StartObject(3) -def Start(builder): - return VariantSubTypeStart(builder) -def VariantSubTypeAddShape(builder, shape): builder.PrependUOffsetTRelativeSlot(0, flatbuffers.number_types.UOffsetTFlags.py_type(shape), 0) -def AddShape(builder, shape): - return VariantSubTypeAddShape(builder, shape) -def VariantSubTypeStartShapeVector(builder, numElems): return builder.StartVector(4, numElems, 4) -def StartShapeVector(builder, numElems): - return VariantSubTypeStartShapeVector(builder, numElems) -def VariantSubTypeAddType(builder, type): builder.PrependInt8Slot(1, type, 0) -def AddType(builder, type): - return VariantSubTypeAddType(builder, type) -def VariantSubTypeAddHasRank(builder, hasRank): builder.PrependBoolSlot(2, hasRank, 0) -def AddHasRank(builder, hasRank): - return VariantSubTypeAddHasRank(builder, hasRank) -def VariantSubTypeEnd(builder): return builder.EndObject() -def End(builder): - return VariantSubTypeEnd(builder) -try: - from typing import List -except: - pass - -class VariantSubTypeT(object): - - # VariantSubTypeT - def __init__(self): - self.shape = None # type: List[int] - self.type = 0 # type: int - self.hasRank = False # type: bool - - @classmethod - def InitFromBuf(cls, buf, pos): - variantSubType = VariantSubType() - variantSubType.Init(buf, pos) - return cls.InitFromObj(variantSubType) - - @classmethod - def InitFromObj(cls, variantSubType): - x = VariantSubTypeT() - x._UnPack(variantSubType) - return x + return self._tab.VectorLen(o) + return 0 - # VariantSubTypeT - def _UnPack(self, variantSubType): - if variantSubType is None: - return - if not variantSubType.ShapeIsNone(): - if np is None: - self.shape = [] - for i in range(variantSubType.ShapeLength()): - self.shape.append(variantSubType.Shape(i)) - else: - self.shape = variantSubType.ShapeAsNumpy() - self.type = variantSubType.Type() - self.hasRank = variantSubType.HasRank() + # SignatureDef + def OutputsIsNone(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(6)) + return o == 0 - # VariantSubTypeT - def Pack(self, builder): - if self.shape is not None: - if np is not None and type(self.shape) is np.ndarray: - shape = builder.CreateNumpyVector(self.shape) - else: - VariantSubTypeStartShapeVector(builder, len(self.shape)) - for i in reversed(range(len(self.shape))): - builder.PrependInt32(self.shape[i]) - shape = builder.EndVector() - VariantSubTypeStart(builder) - if self.shape is not None: - VariantSubTypeAddShape(builder, shape) - VariantSubTypeAddType(builder, self.type) - VariantSubTypeAddHasRank(builder, self.hasRank) - variantSubType = VariantSubTypeEnd(builder) - return variantSubType -# automatically generated by the FlatBuffers compiler, do not modify + # SignatureDef + def SignatureKey(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(8)) + if o != 0: + return self._tab.String(o + self._tab.Pos) + return None -# namespace: tflite + # SignatureDef + def SubgraphIndex(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(12)) + if o != 0: + return self._tab.Get(flatbuffers.number_types.Uint32Flags, o + self._tab.Pos) + return 0 -from flatbuffers.compat import import_numpy -np = import_numpy() +def SignatureDefStart(builder): + builder.StartObject(5) -class WhereOptions(object): - __slots__ = ['_tab'] +def SignatureDefAddInputs(builder, inputs): + builder.PrependUOffsetTRelativeSlot(0, flatbuffers.number_types.UOffsetTFlags.py_type(inputs), 0) - @classmethod - def GetRootAs(cls, buf, offset=0): - n = flatbuffers.encode.Get(flatbuffers.packer.uoffset, buf, offset) - x = WhereOptions() - x.Init(buf, n + offset) - return x +def SignatureDefStartInputsVector(builder, numElems): + return builder.StartVector(4, numElems, 4) - @classmethod - def GetRootAsWhereOptions(cls, buf, offset=0): - """This method is deprecated. Please switch to GetRootAs.""" - return cls.GetRootAs(buf, offset) - @classmethod - def WhereOptionsBufferHasIdentifier(cls, buf, offset, size_prefixed=False): - return flatbuffers.util.BufferHasIdentifier(buf, offset, b"\x54\x46\x4C\x33", size_prefixed=size_prefixed) +def SignatureDefAddOutputs(builder, outputs): + builder.PrependUOffsetTRelativeSlot(1, flatbuffers.number_types.UOffsetTFlags.py_type(outputs), 0) - # WhereOptions - def Init(self, buf, pos): - self._tab = flatbuffers.table.Table(buf, pos) +def SignatureDefStartOutputsVector(builder, numElems): + return builder.StartVector(4, numElems, 4) -def WhereOptionsStart(builder): builder.StartObject(0) -def Start(builder): - return WhereOptionsStart(builder) -def WhereOptionsEnd(builder): return builder.EndObject() -def End(builder): - return WhereOptionsEnd(builder) +def SignatureDefAddSignatureKey(builder, signatureKey): + builder.PrependUOffsetTRelativeSlot(2, flatbuffers.number_types.UOffsetTFlags.py_type(signatureKey), 0) -class WhereOptionsT(object): +def SignatureDefAddSubgraphIndex(builder, subgraphIndex): + builder.PrependUint32Slot(4, subgraphIndex, 0) - # WhereOptionsT +def SignatureDefEnd(builder): + return builder.EndObject() + + +try: + from typing import List +except: + pass + +class SignatureDefT(object): + + # SignatureDefT def __init__(self): - pass + self.inputs = None # type: List[TensorMapT] + self.outputs = None # type: List[TensorMapT] + self.signatureKey = None # type: str + self.subgraphIndex = 0 # type: int @classmethod def InitFromBuf(cls, buf, pos): - whereOptions = WhereOptions() - whereOptions.Init(buf, pos) - return cls.InitFromObj(whereOptions) + signatureDef = SignatureDef() + signatureDef.Init(buf, pos) + return cls.InitFromObj(signatureDef) @classmethod - def InitFromObj(cls, whereOptions): - x = WhereOptionsT() - x._UnPack(whereOptions) + def InitFromPackedBuf(cls, buf, pos=0): + n = flatbuffers.encode.Get(flatbuffers.packer.uoffset, buf, pos) + return cls.InitFromBuf(buf, pos+n) + + @classmethod + def InitFromObj(cls, signatureDef): + x = SignatureDefT() + x._UnPack(signatureDef) return x - # WhereOptionsT - def _UnPack(self, whereOptions): - if whereOptions is None: + # SignatureDefT + def _UnPack(self, signatureDef): + if signatureDef is None: return + if not signatureDef.InputsIsNone(): + self.inputs = [] + for i in range(signatureDef.InputsLength()): + if signatureDef.Inputs(i) is None: + self.inputs.append(None) + else: + tensorMap_ = TensorMapT.InitFromObj(signatureDef.Inputs(i)) + self.inputs.append(tensorMap_) + if not signatureDef.OutputsIsNone(): + self.outputs = [] + for i in range(signatureDef.OutputsLength()): + if signatureDef.Outputs(i) is None: + self.outputs.append(None) + else: + tensorMap_ = TensorMapT.InitFromObj(signatureDef.Outputs(i)) + self.outputs.append(tensorMap_) + self.signatureKey = signatureDef.SignatureKey() + self.subgraphIndex = signatureDef.SubgraphIndex() - # WhereOptionsT + # SignatureDefT def Pack(self, builder): - WhereOptionsStart(builder) - whereOptions = WhereOptionsEnd(builder) - return whereOptions -# automatically generated by the FlatBuffers compiler, do not modify - -# namespace: tflite + if self.inputs is not None: + inputslist = [] + for i in range(len(self.inputs)): + inputslist.append(self.inputs[i].Pack(builder)) + SignatureDefStartInputsVector(builder, len(self.inputs)) + for i in reversed(range(len(self.inputs))): + builder.PrependUOffsetTRelative(inputslist[i]) + inputs = builder.EndVector() + if self.outputs is not None: + outputslist = [] + for i in range(len(self.outputs)): + outputslist.append(self.outputs[i].Pack(builder)) + SignatureDefStartOutputsVector(builder, len(self.outputs)) + for i in reversed(range(len(self.outputs))): + builder.PrependUOffsetTRelative(outputslist[i]) + outputs = builder.EndVector() + if self.signatureKey is not None: + signatureKey = builder.CreateString(self.signatureKey) + SignatureDefStart(builder) + if self.inputs is not None: + SignatureDefAddInputs(builder, inputs) + if self.outputs is not None: + SignatureDefAddOutputs(builder, outputs) + if self.signatureKey is not None: + SignatureDefAddSignatureKey(builder, signatureKey) + SignatureDefAddSubgraphIndex(builder, self.subgraphIndex) + signatureDef = SignatureDefEnd(builder) + return signatureDef -from flatbuffers.compat import import_numpy -np = import_numpy() -class WhileOptions(object): +class Model(object): __slots__ = ['_tab'] @classmethod def GetRootAs(cls, buf, offset=0): n = flatbuffers.encode.Get(flatbuffers.packer.uoffset, buf, offset) - x = WhileOptions() + x = Model() x.Init(buf, n + offset) return x @classmethod - def GetRootAsWhileOptions(cls, buf, offset=0): + def GetRootAsModel(cls, buf, offset=0): """This method is deprecated. Please switch to GetRootAs.""" return cls.GetRootAs(buf, offset) @classmethod - def WhileOptionsBufferHasIdentifier(cls, buf, offset, size_prefixed=False): + def ModelBufferHasIdentifier(cls, buf, offset, size_prefixed=False): return flatbuffers.util.BufferHasIdentifier(buf, offset, b"\x54\x46\x4C\x33", size_prefixed=size_prefixed) - # WhileOptions + # Model def Init(self, buf, pos): self._tab = flatbuffers.table.Table(buf, pos) - # WhileOptions - def CondSubgraphIndex(self): - o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(4)) + # Model + def Version(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(4)) + if o != 0: + return self._tab.Get(flatbuffers.number_types.Uint32Flags, o + self._tab.Pos) + return 0 + + # Model + def OperatorCodes(self, j): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(6)) + if o != 0: + x = self._tab.Vector(o) + x += flatbuffers.number_types.UOffsetTFlags.py_type(j) * 4 + x = self._tab.Indirect(x) + obj = OperatorCode() + obj.Init(self._tab.Bytes, x) + return obj + return None + + # Model + def OperatorCodesLength(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(6)) + if o != 0: + return self._tab.VectorLen(o) + return 0 + + # Model + def OperatorCodesIsNone(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(6)) + return o == 0 + + # Model + def Subgraphs(self, j): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(8)) + if o != 0: + x = self._tab.Vector(o) + x += flatbuffers.number_types.UOffsetTFlags.py_type(j) * 4 + x = self._tab.Indirect(x) + obj = SubGraph() + obj.Init(self._tab.Bytes, x) + return obj + return None + + # Model + def SubgraphsLength(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(8)) + if o != 0: + return self._tab.VectorLen(o) + return 0 + + # Model + def SubgraphsIsNone(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(8)) + return o == 0 + + # Model + def Description(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(10)) + if o != 0: + return self._tab.String(o + self._tab.Pos) + return None + + # Model + def Buffers(self, j): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(12)) + if o != 0: + x = self._tab.Vector(o) + x += flatbuffers.number_types.UOffsetTFlags.py_type(j) * 4 + x = self._tab.Indirect(x) + obj = Buffer() + obj.Init(self._tab.Bytes, x) + return obj + return None + + # Model + def BuffersLength(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(12)) + if o != 0: + return self._tab.VectorLen(o) + return 0 + + # Model + def BuffersIsNone(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(12)) + return o == 0 + + # Model + def MetadataBuffer(self, j): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(14)) + if o != 0: + a = self._tab.Vector(o) + return self._tab.Get(flatbuffers.number_types.Int32Flags, a + flatbuffers.number_types.UOffsetTFlags.py_type(j * 4)) + return 0 + + # Model + def MetadataBufferAsNumpy(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(14)) + if o != 0: + return self._tab.GetVectorAsNumpy(flatbuffers.number_types.Int32Flags, o) + return 0 + + # Model + def MetadataBufferLength(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(14)) + if o != 0: + return self._tab.VectorLen(o) + return 0 + + # Model + def MetadataBufferIsNone(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(14)) + return o == 0 + + # Model + def Metadata(self, j): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(16)) + if o != 0: + x = self._tab.Vector(o) + x += flatbuffers.number_types.UOffsetTFlags.py_type(j) * 4 + x = self._tab.Indirect(x) + obj = Metadata() + obj.Init(self._tab.Bytes, x) + return obj + return None + + # Model + def MetadataLength(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(16)) if o != 0: - return self._tab.Get(flatbuffers.number_types.Int32Flags, o + self._tab.Pos) + return self._tab.VectorLen(o) return 0 - # WhileOptions - def BodySubgraphIndex(self): - o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(6)) + # Model + def MetadataIsNone(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(16)) + return o == 0 + + # Model + def SignatureDefs(self, j): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(18)) if o != 0: - return self._tab.Get(flatbuffers.number_types.Int32Flags, o + self._tab.Pos) + x = self._tab.Vector(o) + x += flatbuffers.number_types.UOffsetTFlags.py_type(j) * 4 + x = self._tab.Indirect(x) + obj = SignatureDef() + obj.Init(self._tab.Bytes, x) + return obj + return None + + # Model + def SignatureDefsLength(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(18)) + if o != 0: + return self._tab.VectorLen(o) return 0 -def WhileOptionsStart(builder): builder.StartObject(2) -def Start(builder): - return WhileOptionsStart(builder) -def WhileOptionsAddCondSubgraphIndex(builder, condSubgraphIndex): builder.PrependInt32Slot(0, condSubgraphIndex, 0) -def AddCondSubgraphIndex(builder, condSubgraphIndex): - return WhileOptionsAddCondSubgraphIndex(builder, condSubgraphIndex) -def WhileOptionsAddBodySubgraphIndex(builder, bodySubgraphIndex): builder.PrependInt32Slot(1, bodySubgraphIndex, 0) -def AddBodySubgraphIndex(builder, bodySubgraphIndex): - return WhileOptionsAddBodySubgraphIndex(builder, bodySubgraphIndex) -def WhileOptionsEnd(builder): return builder.EndObject() -def End(builder): - return WhileOptionsEnd(builder) + # Model + def SignatureDefsIsNone(self): + o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(18)) + return o == 0 -class WhileOptionsT(object): +def ModelStart(builder): + builder.StartObject(8) - # WhileOptionsT - def __init__(self): - self.condSubgraphIndex = 0 # type: int - self.bodySubgraphIndex = 0 # type: int +def ModelAddVersion(builder, version): + builder.PrependUint32Slot(0, version, 0) - @classmethod - def InitFromBuf(cls, buf, pos): - whileOptions = WhileOptions() - whileOptions.Init(buf, pos) - return cls.InitFromObj(whileOptions) +def ModelAddOperatorCodes(builder, operatorCodes): + builder.PrependUOffsetTRelativeSlot(1, flatbuffers.number_types.UOffsetTFlags.py_type(operatorCodes), 0) - @classmethod - def InitFromObj(cls, whileOptions): - x = WhileOptionsT() - x._UnPack(whileOptions) - return x +def ModelStartOperatorCodesVector(builder, numElems): + return builder.StartVector(4, numElems, 4) - # WhileOptionsT - def _UnPack(self, whileOptions): - if whileOptions is None: - return - self.condSubgraphIndex = whileOptions.CondSubgraphIndex() - self.bodySubgraphIndex = whileOptions.BodySubgraphIndex() +def ModelAddSubgraphs(builder, subgraphs): + builder.PrependUOffsetTRelativeSlot(2, flatbuffers.number_types.UOffsetTFlags.py_type(subgraphs), 0) - # WhileOptionsT - def Pack(self, builder): - WhileOptionsStart(builder) - WhileOptionsAddCondSubgraphIndex(builder, self.condSubgraphIndex) - WhileOptionsAddBodySubgraphIndex(builder, self.bodySubgraphIndex) - whileOptions = WhileOptionsEnd(builder) - return whileOptions -# automatically generated by the FlatBuffers compiler, do not modify +def ModelStartSubgraphsVector(builder, numElems): + return builder.StartVector(4, numElems, 4) -# namespace: tflite +def ModelAddDescription(builder, description): + builder.PrependUOffsetTRelativeSlot(3, flatbuffers.number_types.UOffsetTFlags.py_type(description), 0) -from flatbuffers.compat import import_numpy -np = import_numpy() +def ModelAddBuffers(builder, buffers): + builder.PrependUOffsetTRelativeSlot(4, flatbuffers.number_types.UOffsetTFlags.py_type(buffers), 0) -class ZerosLikeOptions(object): - __slots__ = ['_tab'] +def ModelStartBuffersVector(builder, numElems): + return builder.StartVector(4, numElems, 4) - @classmethod - def GetRootAs(cls, buf, offset=0): - n = flatbuffers.encode.Get(flatbuffers.packer.uoffset, buf, offset) - x = ZerosLikeOptions() - x.Init(buf, n + offset) - return x +def ModelAddMetadataBuffer(builder, metadataBuffer): + builder.PrependUOffsetTRelativeSlot(5, flatbuffers.number_types.UOffsetTFlags.py_type(metadataBuffer), 0) - @classmethod - def GetRootAsZerosLikeOptions(cls, buf, offset=0): - """This method is deprecated. Please switch to GetRootAs.""" - return cls.GetRootAs(buf, offset) - @classmethod - def ZerosLikeOptionsBufferHasIdentifier(cls, buf, offset, size_prefixed=False): - return flatbuffers.util.BufferHasIdentifier(buf, offset, b"\x54\x46\x4C\x33", size_prefixed=size_prefixed) +def ModelStartMetadataBufferVector(builder, numElems): + return builder.StartVector(4, numElems, 4) - # ZerosLikeOptions - def Init(self, buf, pos): - self._tab = flatbuffers.table.Table(buf, pos) +def ModelAddMetadata(builder, metadata): + builder.PrependUOffsetTRelativeSlot(6, flatbuffers.number_types.UOffsetTFlags.py_type(metadata), 0) -def ZerosLikeOptionsStart(builder): builder.StartObject(0) -def Start(builder): - return ZerosLikeOptionsStart(builder) -def ZerosLikeOptionsEnd(builder): return builder.EndObject() -def End(builder): - return ZerosLikeOptionsEnd(builder) +def ModelStartMetadataVector(builder, numElems): + return builder.StartVector(4, numElems, 4) -class ZerosLikeOptionsT(object): +def ModelAddSignatureDefs(builder, signatureDefs): + builder.PrependUOffsetTRelativeSlot(7, flatbuffers.number_types.UOffsetTFlags.py_type(signatureDefs), 0) - # ZerosLikeOptionsT +def ModelStartSignatureDefsVector(builder, numElems): + return builder.StartVector(4, numElems, 4) + +def ModelEnd(builder): + return builder.EndObject() + + +try: + from typing import List +except: + pass + +class ModelT(object): + + # ModelT def __init__(self): - pass + self.version = 0 # type: int + self.operatorCodes = None # type: List[OperatorCodeT] + self.subgraphs = None # type: List[SubGraphT] + self.description = None # type: str + self.buffers = None # type: List[BufferT] + self.metadataBuffer = None # type: List[int] + self.metadata = None # type: List[MetadataT] + self.signatureDefs = None # type: List[SignatureDefT] @classmethod def InitFromBuf(cls, buf, pos): - zerosLikeOptions = ZerosLikeOptions() - zerosLikeOptions.Init(buf, pos) - return cls.InitFromObj(zerosLikeOptions) + model = Model() + model.Init(buf, pos) + return cls.InitFromObj(model) @classmethod - def InitFromObj(cls, zerosLikeOptions): - x = ZerosLikeOptionsT() - x._UnPack(zerosLikeOptions) + def InitFromPackedBuf(cls, buf, pos=0): + n = flatbuffers.encode.Get(flatbuffers.packer.uoffset, buf, pos) + return cls.InitFromBuf(buf, pos+n) + + @classmethod + def InitFromObj(cls, model): + x = ModelT() + x._UnPack(model) return x - # ZerosLikeOptionsT - def _UnPack(self, zerosLikeOptions): - if zerosLikeOptions is None: + # ModelT + def _UnPack(self, model): + if model is None: return + self.version = model.Version() + if not model.OperatorCodesIsNone(): + self.operatorCodes = [] + for i in range(model.OperatorCodesLength()): + if model.OperatorCodes(i) is None: + self.operatorCodes.append(None) + else: + operatorCode_ = OperatorCodeT.InitFromObj(model.OperatorCodes(i)) + self.operatorCodes.append(operatorCode_) + if not model.SubgraphsIsNone(): + self.subgraphs = [] + for i in range(model.SubgraphsLength()): + if model.Subgraphs(i) is None: + self.subgraphs.append(None) + else: + subGraph_ = SubGraphT.InitFromObj(model.Subgraphs(i)) + self.subgraphs.append(subGraph_) + self.description = model.Description() + if not model.BuffersIsNone(): + self.buffers = [] + for i in range(model.BuffersLength()): + if model.Buffers(i) is None: + self.buffers.append(None) + else: + buffer_ = BufferT.InitFromObj(model.Buffers(i)) + self.buffers.append(buffer_) + if not model.MetadataBufferIsNone(): + if np is None: + self.metadataBuffer = [] + for i in range(model.MetadataBufferLength()): + self.metadataBuffer.append(model.MetadataBuffer(i)) + else: + self.metadataBuffer = model.MetadataBufferAsNumpy() + if not model.MetadataIsNone(): + self.metadata = [] + for i in range(model.MetadataLength()): + if model.Metadata(i) is None: + self.metadata.append(None) + else: + metadata_ = MetadataT.InitFromObj(model.Metadata(i)) + self.metadata.append(metadata_) + if not model.SignatureDefsIsNone(): + self.signatureDefs = [] + for i in range(model.SignatureDefsLength()): + if model.SignatureDefs(i) is None: + self.signatureDefs.append(None) + else: + signatureDef_ = SignatureDefT.InitFromObj(model.SignatureDefs(i)) + self.signatureDefs.append(signatureDef_) - # ZerosLikeOptionsT + # ModelT def Pack(self, builder): - ZerosLikeOptionsStart(builder) - zerosLikeOptions = ZerosLikeOptionsEnd(builder) - return zerosLikeOptions + if self.operatorCodes is not None: + operatorCodeslist = [] + for i in range(len(self.operatorCodes)): + operatorCodeslist.append(self.operatorCodes[i].Pack(builder)) + ModelStartOperatorCodesVector(builder, len(self.operatorCodes)) + for i in reversed(range(len(self.operatorCodes))): + builder.PrependUOffsetTRelative(operatorCodeslist[i]) + operatorCodes = builder.EndVector() + if self.subgraphs is not None: + subgraphslist = [] + for i in range(len(self.subgraphs)): + subgraphslist.append(self.subgraphs[i].Pack(builder)) + ModelStartSubgraphsVector(builder, len(self.subgraphs)) + for i in reversed(range(len(self.subgraphs))): + builder.PrependUOffsetTRelative(subgraphslist[i]) + subgraphs = builder.EndVector() + if self.description is not None: + description = builder.CreateString(self.description) + if self.buffers is not None: + bufferslist = [] + for i in range(len(self.buffers)): + bufferslist.append(self.buffers[i].Pack(builder)) + ModelStartBuffersVector(builder, len(self.buffers)) + for i in reversed(range(len(self.buffers))): + builder.PrependUOffsetTRelative(bufferslist[i]) + buffers = builder.EndVector() + if self.metadataBuffer is not None: + if np is not None and type(self.metadataBuffer) is np.ndarray: + metadataBuffer = builder.CreateNumpyVector(self.metadataBuffer) + else: + ModelStartMetadataBufferVector(builder, len(self.metadataBuffer)) + for i in reversed(range(len(self.metadataBuffer))): + builder.PrependInt32(self.metadataBuffer[i]) + metadataBuffer = builder.EndVector() + if self.metadata is not None: + metadatalist = [] + for i in range(len(self.metadata)): + metadatalist.append(self.metadata[i].Pack(builder)) + ModelStartMetadataVector(builder, len(self.metadata)) + for i in reversed(range(len(self.metadata))): + builder.PrependUOffsetTRelative(metadatalist[i]) + metadata = builder.EndVector() + if self.signatureDefs is not None: + signatureDefslist = [] + for i in range(len(self.signatureDefs)): + signatureDefslist.append(self.signatureDefs[i].Pack(builder)) + ModelStartSignatureDefsVector(builder, len(self.signatureDefs)) + for i in reversed(range(len(self.signatureDefs))): + builder.PrependUOffsetTRelative(signatureDefslist[i]) + signatureDefs = builder.EndVector() + ModelStart(builder) + ModelAddVersion(builder, self.version) + if self.operatorCodes is not None: + ModelAddOperatorCodes(builder, operatorCodes) + if self.subgraphs is not None: + ModelAddSubgraphs(builder, subgraphs) + if self.description is not None: + ModelAddDescription(builder, description) + if self.buffers is not None: + ModelAddBuffers(builder, buffers) + if self.metadataBuffer is not None: + ModelAddMetadataBuffer(builder, metadataBuffer) + if self.metadata is not None: + ModelAddMetadata(builder, metadata) + if self.signatureDefs is not None: + ModelAddSignatureDefs(builder, signatureDefs) + model = ModelEnd(builder) + return model + + diff --git a/tensorflow/lite/schema/schema.fbs b/tensorflow/lite/schema/schema.fbs index 4c84646eeb0..7ab78be2673 100644 --- a/tensorflow/lite/schema/schema.fbs +++ b/tensorflow/lite/schema/schema.fbs @@ -58,6 +58,7 @@ enum TensorType : byte { UINT32 = 15, UINT16 = 16, INT4 = 17, + BFLOAT16 = 18, } // Custom quantization parameters for experimenting with new quantization @@ -235,8 +236,12 @@ table Tensor { // represented with -1. shape_signature:[int]; // Optional. - // If false, the rank or the number of tensor dimensions is unknown. - // If false, "shape" must be []. + // This field is added to distinguish between scalars and tensors of unknown + // ranks (both of which shape is []). + // For scalars (rank = 0), shape = [] and has_rank = true. + // For tensors with known rank (rank > 0) and shape, shape = [...] and + // has_rank = true. + // For tensors with unknown rank and shape, shape = [] and has_rank = false. has_rank: bool = false; // The nested Tensor types for VARIANT type. This is always empty for @@ -420,6 +425,53 @@ enum BuiltinOperator : int32 { BITCAST = 159, BITWISE_XOR = 160, RIGHT_SHIFT = 161, + // All Operators start with STABLEHLO_ prefixes are subject to change + // Many of the ops below can not be executed by TFlite runtime + STABLEHLO_LOGISTIC = 162, // WARNING: Do not have runtime support + STABLEHLO_ADD = 163, + STABLEHLO_DIVIDE = 164, // WARNING: No runtime support yet + STABLEHLO_MULTIPLY = 165, + STABLEHLO_MAXIMUM = 166, + STABLEHLO_RESHAPE = 167, // WARNING: No runtime support yet + STABLEHLO_CLAMP = 168, // WARNING: No runtime support + STABLEHLO_CONCATENATE = 169, // WARNING: No runtime support + STABLEHLO_BROADCAST_IN_DIM = 170, // WARNING: No runtime support + STABLEHLO_CONVOLUTION = 171, // WARNING: No runtime support + STABLEHLO_SLICE = 172, // WARNING: No runtime support + STABLEHLO_CUSTOM_CALL = 173, // WARNING: No runtime support + STABLEHLO_REDUCE = 174, // WARNING: No runtime support + STABLEHLO_ABS = 175, // WARNING: No runtime support + STABLEHLO_AND = 176, // WARNING: No runtime support + STABLEHLO_COSINE = 177, // WARNING: No runtime support + STABLEHLO_EXPONENTIAL = 178, // WARNING: No runtime support + STABLEHLO_FLOOR = 179, // WARNING: No runtime support + STABLEHLO_LOG = 180, // WARNING: No runtime support + STABLEHLO_MINIMUM = 181, + STABLEHLO_NEGATE = 182, // WARNING: No runtime support + STABLEHLO_OR = 183, // WARNING: No runtime support + STABLEHLO_POWER = 184, // WARNING: No runtime support + STABLEHLO_REMAINDER = 185, // WARNING: No runtime support + STABLEHLO_RSQRT = 186, // WARNING: No runtime support + STABLEHLO_SELECT = 187, // WARNING: No runtime support + STABLEHLO_SUBTRACT = 188, // WARNING: No runtime support + STABLEHLO_TANH = 189, // WARNING: No runtime support + STABLEHLO_SCATTER = 190, + STABLEHLO_COMPARE = 191, // WARNING: No runtime support + STABLEHLO_CONVERT = 192, // WARNING: No runtime support + STABLEHLO_DYNAMIC_SLICE = 193, // WARNING: No runtime support + STABLEHLO_DYNAMIC_UPDATE_SLICE = 194, // WARNING: No runtime support + STABLEHLO_PAD = 195, + STABLEHLO_IOTA = 196, // WARNING: No runtime support + STABLEHLO_DOT_GENERAL = 197, // WARNING: No runtime support + STABLEHLO_REDUCE_WINDOW = 198, + STABLEHLO_SORT = 199, // WARNING: No runtime support + STABLEHLO_WHILE = 200, // WARNING: No runtime support + STABLEHLO_GATHER = 201, + STABLEHLO_TRANSPOSE = 202, // WARNING: No runtime support + DILATE = 203, + STABLEHLO_RNG_BIT_GENERATOR = 204, + REDUCE_WINDOW = 205 (deprecated), + STABLEHLO_COMPOSITE = 206, // WARNING: No runtime support } // LINT.ThenChange(nnapi_linter/linter.proto) @@ -551,6 +603,192 @@ union BuiltinOptions { BitcastOptions, BitwiseXorOptions, RightShiftOptions, + // DO NOT add new options this union, will cause failure in Java api + // generation otherwise + // Add new builtin options into builtin options 2 instead +} + +union BuiltinOptions2{ + StablehloConcatenateOptions, + StablehloBroadcastInDimOptions, + StablehloSliceOptions, + StablehloConvolutionOptions, + StablehloCustomCallOptions, + StablehloReduceOptions, + StablehloScatterOptions, + StablehloCompareOptions, + StablehloDynamicSliceOptions, + StablehloPadOptions, + StablehloIotaOptions, + StablehloDotGeneralOptions, + StablehloReduceWindowOptions, + StablehloSortOptions, + StablehloWhileOptions, + StablehloGatherOptions, + StablehloTransposeOptions, + DilateOptions, + StablehloRngBitGeneratorOptions, + ReduceWindowOptions (deprecated), + StableHLOCompositeOptions, +} + +table StablehloGatherOptions{ + offset_dims : [long]; + collapsed_slice_dims : [long]; + start_index_map : [long]; + index_vector_dim : long; + slice_sizes : [long]; + indices_are_sorted : bool; +} + +table StablehloTransposeOptions{ + permutation : [long]; +} + +enum StablehloPrecisionConfig : uint { + DEFAULT, + HIGH, + HIGHEST, +} + +table StablehloDotGeneralOptions{ + lhs_batching_dimensions : [long]; + rhs_batching_dimensions : [long]; + lhs_contracting_dimensions : [long]; + rhs_contracting_dimensions : [long]; + precision_config : [StablehloPrecisionConfig]; +} + +table StablehloReduceWindowOptions{ + window_dimensions : [long]; + window_strides : [long]; + base_dilations : [long]; + window_dilations : [long]; + padding : [long]; + body_subgraph_index : int; +} + +table StablehloWhileOptions{ + cond_subgraph_index : int; + body_subgraph_index : int; +} + +table StablehloSortOptions{ + dimension : long; + is_stable : bool; + comparator_subgraph_index : int; +} + +table StablehloConcatenateOptions { + dimension : long; +} + +table StablehloBroadcastInDimOptions{ + broadcast_dimensions : [long]; +} + +enum StablehloComparisonDirection : uint { + STABLEHLO_COMPARISON_DIRECTION_EQ, + STABLEHLO_COMPARISON_DIRECTION_NE, + STABLEHLO_COMPARISON_DIRECTION_GE, + STABLEHLO_COMPARISON_DIRECTION_GT, + STABLEHLO_COMPARISON_DIRECTION_LE, + STABLEHLO_COMPARISON_DIRECTION_LT, + +} + +enum StablehloComparisonType : uint { + STABLEHLO_COMPARISON_TYPE_NOTYPE, + STABLEHLO_COMPARISON_TYPE_FLOAT, + STABLEHLO_COMPARISON_TYPE_FLOAT_TOTAL_ORDER, + STABLEHLO_COMPARISON_TYPE_SIGNED, + STABLEHLO_COMPARISON_TYPE_UNSIGNED, +} + +table StablehloCompareOptions{ + comparison_direction : StablehloComparisonDirection; + compare_type : StablehloComparisonType; +} + +table StablehloDynamicSliceOptions{ + slice_sizes : [long]; +} + +table StablehloPadOptions{ + edge_padding_low : [long]; + edge_padding_high : [long]; + interior_padding : [long]; +} + +table StablehloIotaOptions{ + iota_dimension : long; +} + +table StablehloCustomCallOptions { + call_target_name : string; + has_side_effect : bool; + backend_config: string; + api_version : int; // will be decprecated + called_computations: [int]; // should point to subgraphs of the computations + custom_attributes : [ubyte]; +} + +table StablehloReduceOptions { + dimensions : [long]; + body_subgraph_index : int; +} + +table StablehloSliceOptions{ + start_indices : [long]; + limit_indices : [long]; + strides : [long]; +} + +table StablehloConvolutionOptions{ + window_strides : [long]; + padding : [long]; + lhs_dilation : [long]; + rhs_dilation : [long]; + window_reversal : [bool]; + input_batch_dimension : long; + input_feature_dimension : long; + input_spatial_dimensions : [long]; + kernel_input_feature_dimension : long; + kernel_output_feature_dimension : long; + kernel_spatial_dimensions : [long]; + output_batch_dimension : long; + output_feature_dimension : long; + output_spatial_dimensions : [long]; + feature_group_count : long; + batch_group_count : long; + precision_config : [StablehloPrecisionConfig]; +} + +table StablehloScatterOptions { + indices_are_sorted: bool; + update_window_dims: [long]; + inserted_window_dims: [long]; + scatter_dims_to_operand_dims: [long]; + index_vector_dim: long; + unique_indices: bool; + update_computation_subgraph_index: int; +} + +enum RngAlgorithm : byte { + // An algorithm auto-selected by the system according to device type. + DEFAULT = 0, + // The Philox algorithm, as described in paper + // ['Parallel Random Numbers: As Easy as 1, 2, 3'] + // (https://www.thesalmons.org/john/random123/papers/random123sc11.pdf) + PHILOX = 1, + // The ThreeFry algorithm, as described in paper + // ['Parallel Random Numbers: As Easy as 1, 2, 3'] + // (https://www.thesalmons.org/john/random123/papers/random123sc11.pdf) + THREEFRY = 2, +} + +table StablehloRngBitGeneratorOptions { + algorithm:RngAlgorithm; } // LINT.IfChange @@ -575,6 +813,9 @@ table Conv2DOptions { fused_activation_function:ActivationFunctionType; dilation_w_factor:int = 1; dilation_h_factor:int = 1; + // Parameters for Conv2D version 8 or above. + // When set, quantized_bias_type defines the dtype for both bias and accumulator. + quantized_bias_type: TensorType; } // Options for both Conv3D and Conv3DTranspose. @@ -682,6 +923,10 @@ table FullyConnectedOptions { // If set to true, then weights-only op will use asymmetric quantization for // inputs. asymmetric_quantize_inputs: bool; + + // Parameters for FullyConnected version 11 or above. + // When set, quantized_bias_type defines the dtype for both bias and accumulator. + quantized_bias_type: TensorType; } table SoftmaxOptions { @@ -941,6 +1186,11 @@ table TransposeConvOptions { // Parameters supported by version 4: fused_activation_function:ActivationFunctionType = NONE; + + // Parameters for TransposeConv version 5 or above. + // If set, use this for bias and accumulator. + // When set, quantized_bias_type defines the dtype for both bias and accumulator. + quantized_bias_type: TensorType; } table ExpandDimsOptions { @@ -1198,6 +1448,23 @@ table BitwiseXorOptions { table RightShiftOptions { } +table DilateOptions { +} + +enum ReduceWindowFunction : int { + UNSUPPORTED, + ADD, + MUL, + MINIMUM, + MAXIMUM, + ALL, + ANY, +} + +table ReduceWindowOptions (deprecated) { + reduce_function: ReduceWindowFunction; +} + // An OperatorCode can be an enum value (BuiltinOperator) if the operator is a // builtin, or a string if the operator is custom. table OperatorCode { @@ -1222,6 +1489,14 @@ enum CustomOptionsFormat : byte { FLEXBUFFERS = 0, } +table StableHLOCompositeOptions { + name:string; + decomposition_subgraph_index:int32; + composite_attributes:[ubyte]; + composite_attributes_format:CustomOptionsFormat; + version:int32; +} + // An operator takes tensors as inputs and outputs. The type of operation being // performed is determined by an index into the list of valid OperatorCodes, // while the specifics of each operations is configured using builtin_options @@ -1264,6 +1539,11 @@ table Operator { // beginning of the file and is only valid if > 1 large_custom_options_offset: ulong; large_custom_options_size: ulong; + + // Flatbuffers union struct has a 128 elements limit in JAVA, so a second + // union is added, in the case of where BuitlinOptions2 runs out, a third + // one can be added + builtin_options_2 : BuiltinOptions2; } // The root type, defining a subgraph, which typically represents an entire diff --git a/tensorflow/lite/schema/schema_generated.h b/tensorflow/lite/schema/schema_generated.h index c9d92f84a08..d72861726c8 100755 --- a/tensorflow/lite/schema/schema_generated.h +++ b/tensorflow/lite/schema/schema_generated.h @@ -8,9 +8,9 @@ // Ensure the included flatbuffers.h is the same version as when this file was // generated, otherwise it may not be compatible. -static_assert(FLATBUFFERS_VERSION_MAJOR == 2 && - FLATBUFFERS_VERSION_MINOR == 0 && - FLATBUFFERS_VERSION_REVISION == 6, +static_assert(FLATBUFFERS_VERSION_MAJOR == 23 && + FLATBUFFERS_VERSION_MINOR == 5 && + FLATBUFFERS_VERSION_REVISION == 26, "Non-compatible flatbuffers version included"); namespace tflite { @@ -51,6 +51,78 @@ struct Tensor; struct TensorBuilder; struct TensorT; +struct StablehloGatherOptions; +struct StablehloGatherOptionsBuilder; +struct StablehloGatherOptionsT; + +struct StablehloTransposeOptions; +struct StablehloTransposeOptionsBuilder; +struct StablehloTransposeOptionsT; + +struct StablehloDotGeneralOptions; +struct StablehloDotGeneralOptionsBuilder; +struct StablehloDotGeneralOptionsT; + +struct StablehloReduceWindowOptions; +struct StablehloReduceWindowOptionsBuilder; +struct StablehloReduceWindowOptionsT; + +struct StablehloWhileOptions; +struct StablehloWhileOptionsBuilder; +struct StablehloWhileOptionsT; + +struct StablehloSortOptions; +struct StablehloSortOptionsBuilder; +struct StablehloSortOptionsT; + +struct StablehloConcatenateOptions; +struct StablehloConcatenateOptionsBuilder; +struct StablehloConcatenateOptionsT; + +struct StablehloBroadcastInDimOptions; +struct StablehloBroadcastInDimOptionsBuilder; +struct StablehloBroadcastInDimOptionsT; + +struct StablehloCompareOptions; +struct StablehloCompareOptionsBuilder; +struct StablehloCompareOptionsT; + +struct StablehloDynamicSliceOptions; +struct StablehloDynamicSliceOptionsBuilder; +struct StablehloDynamicSliceOptionsT; + +struct StablehloPadOptions; +struct StablehloPadOptionsBuilder; +struct StablehloPadOptionsT; + +struct StablehloIotaOptions; +struct StablehloIotaOptionsBuilder; +struct StablehloIotaOptionsT; + +struct StablehloCustomCallOptions; +struct StablehloCustomCallOptionsBuilder; +struct StablehloCustomCallOptionsT; + +struct StablehloReduceOptions; +struct StablehloReduceOptionsBuilder; +struct StablehloReduceOptionsT; + +struct StablehloSliceOptions; +struct StablehloSliceOptionsBuilder; +struct StablehloSliceOptionsT; + +struct StablehloConvolutionOptions; +struct StablehloConvolutionOptionsBuilder; +struct StablehloConvolutionOptionsT; + +struct StablehloScatterOptions; +struct StablehloScatterOptionsBuilder; +struct StablehloScatterOptionsT; + +struct StablehloRngBitGeneratorOptions; +struct StablehloRngBitGeneratorOptionsBuilder; +struct StablehloRngBitGeneratorOptionsT; + struct Conv2DOptions; struct Conv2DOptionsBuilder; struct Conv2DOptionsT; @@ -555,10 +627,22 @@ struct RightShiftOptions; struct RightShiftOptionsBuilder; struct RightShiftOptionsT; +struct DilateOptions; +struct DilateOptionsBuilder; +struct DilateOptionsT; + +struct ReduceWindowOptions; +struct ReduceWindowOptionsBuilder; +struct ReduceWindowOptionsT; + struct OperatorCode; struct OperatorCodeBuilder; struct OperatorCodeT; +struct StableHLOCompositeOptions; +struct StableHLOCompositeOptionsBuilder; +struct StableHLOCompositeOptionsT; + struct Operator; struct OperatorBuilder; struct OperatorT; @@ -606,11 +690,12 @@ enum TensorType : int8_t { TensorType_UINT32 = 15, TensorType_UINT16 = 16, TensorType_INT4 = 17, + TensorType_BFLOAT16 = 18, TensorType_MIN = TensorType_FLOAT32, - TensorType_MAX = TensorType_INT4 + TensorType_MAX = TensorType_BFLOAT16 }; -inline const TensorType (&EnumValuesTensorType())[18] { +inline const TensorType (&EnumValuesTensorType())[19] { static const TensorType values[] = { TensorType_FLOAT32, TensorType_FLOAT16, @@ -629,13 +714,14 @@ inline const TensorType (&EnumValuesTensorType())[18] { TensorType_VARIANT, TensorType_UINT32, TensorType_UINT16, - TensorType_INT4 + TensorType_INT4, + TensorType_BFLOAT16 }; return values; } inline const char * const *EnumNamesTensorType() { - static const char * const names[19] = { + static const char * const names[20] = { "FLOAT32", "FLOAT16", "INT32", @@ -654,13 +740,14 @@ inline const char * const *EnumNamesTensorType() { "UINT32", "UINT16", "INT4", + "BFLOAT16", nullptr }; return names; } inline const char *EnumNameTensorType(TensorType e) { - if (flatbuffers::IsOutRange(e, TensorType_FLOAT32, TensorType_INT4)) return ""; + if (::flatbuffers::IsOutRange(e, TensorType_FLOAT32, TensorType_BFLOAT16)) return ""; const size_t index = static_cast(e); return EnumNamesTensorType()[index]; } @@ -690,7 +777,7 @@ inline const char * const *EnumNamesQuantizationDetails() { } inline const char *EnumNameQuantizationDetails(QuantizationDetails e) { - if (flatbuffers::IsOutRange(e, QuantizationDetails_NONE, QuantizationDetails_CustomQuantization)) return ""; + if (::flatbuffers::IsOutRange(e, QuantizationDetails_NONE, QuantizationDetails_CustomQuantization)) return ""; const size_t index = static_cast(e); return EnumNamesQuantizationDetails()[index]; } @@ -738,8 +825,8 @@ struct QuantizationDetailsUnion { } } - static void *UnPack(const void *obj, QuantizationDetails type, const flatbuffers::resolver_function_t *resolver); - flatbuffers::Offset Pack(flatbuffers::FlatBufferBuilder &_fbb, const flatbuffers::rehasher_function_t *_rehasher = nullptr) const; + static void *UnPack(const void *obj, QuantizationDetails type, const ::flatbuffers::resolver_function_t *resolver); + ::flatbuffers::Offset Pack(::flatbuffers::FlatBufferBuilder &_fbb, const ::flatbuffers::rehasher_function_t *_rehasher = nullptr) const; tflite::CustomQuantizationT *AsCustomQuantization() { return type == QuantizationDetails_CustomQuantization ? @@ -751,8 +838,8 @@ struct QuantizationDetailsUnion { } }; -bool VerifyQuantizationDetails(flatbuffers::Verifier &verifier, const void *obj, QuantizationDetails type); -bool VerifyQuantizationDetailsVector(flatbuffers::Verifier &verifier, const flatbuffers::Vector> *values, const flatbuffers::Vector *types); +bool VerifyQuantizationDetails(::flatbuffers::Verifier &verifier, const void *obj, QuantizationDetails type); +bool VerifyQuantizationDetailsVector(::flatbuffers::Verifier &verifier, const ::flatbuffers::Vector<::flatbuffers::Offset> *values, const ::flatbuffers::Vector *types); enum DimensionType : int8_t { DimensionType_DENSE = 0, @@ -779,7 +866,7 @@ inline const char * const *EnumNamesDimensionType() { } inline const char *EnumNameDimensionType(DimensionType e) { - if (flatbuffers::IsOutRange(e, DimensionType_DENSE, DimensionType_SPARSE_CSR)) return ""; + if (::flatbuffers::IsOutRange(e, DimensionType_DENSE, DimensionType_SPARSE_CSR)) return ""; const size_t index = static_cast(e); return EnumNamesDimensionType()[index]; } @@ -815,7 +902,7 @@ inline const char * const *EnumNamesSparseIndexVector() { } inline const char *EnumNameSparseIndexVector(SparseIndexVector e) { - if (flatbuffers::IsOutRange(e, SparseIndexVector_NONE, SparseIndexVector_Uint8Vector)) return ""; + if (::flatbuffers::IsOutRange(e, SparseIndexVector_NONE, SparseIndexVector_Uint8Vector)) return ""; const size_t index = static_cast(e); return EnumNamesSparseIndexVector()[index]; } @@ -879,8 +966,8 @@ struct SparseIndexVectorUnion { } } - static void *UnPack(const void *obj, SparseIndexVector type, const flatbuffers::resolver_function_t *resolver); - flatbuffers::Offset Pack(flatbuffers::FlatBufferBuilder &_fbb, const flatbuffers::rehasher_function_t *_rehasher = nullptr) const; + static void *UnPack(const void *obj, SparseIndexVector type, const ::flatbuffers::resolver_function_t *resolver); + ::flatbuffers::Offset Pack(::flatbuffers::FlatBufferBuilder &_fbb, const ::flatbuffers::rehasher_function_t *_rehasher = nullptr) const; tflite::Int32VectorT *AsInt32Vector() { return type == SparseIndexVector_Int32Vector ? @@ -908,8 +995,8 @@ struct SparseIndexVectorUnion { } }; -bool VerifySparseIndexVector(flatbuffers::Verifier &verifier, const void *obj, SparseIndexVector type); -bool VerifySparseIndexVectorVector(flatbuffers::Verifier &verifier, const flatbuffers::Vector> *values, const flatbuffers::Vector *types); +bool VerifySparseIndexVector(::flatbuffers::Verifier &verifier, const void *obj, SparseIndexVector type); +bool VerifySparseIndexVectorVector(::flatbuffers::Verifier &verifier, const ::flatbuffers::Vector<::flatbuffers::Offset> *values, const ::flatbuffers::Vector *types); enum BuiltinOperator : int32_t { BuiltinOperator_ADD = 0, @@ -1074,11 +1161,56 @@ enum BuiltinOperator : int32_t { BuiltinOperator_BITCAST = 159, BuiltinOperator_BITWISE_XOR = 160, BuiltinOperator_RIGHT_SHIFT = 161, + BuiltinOperator_STABLEHLO_LOGISTIC = 162, + BuiltinOperator_STABLEHLO_ADD = 163, + BuiltinOperator_STABLEHLO_DIVIDE = 164, + BuiltinOperator_STABLEHLO_MULTIPLY = 165, + BuiltinOperator_STABLEHLO_MAXIMUM = 166, + BuiltinOperator_STABLEHLO_RESHAPE = 167, + BuiltinOperator_STABLEHLO_CLAMP = 168, + BuiltinOperator_STABLEHLO_CONCATENATE = 169, + BuiltinOperator_STABLEHLO_BROADCAST_IN_DIM = 170, + BuiltinOperator_STABLEHLO_CONVOLUTION = 171, + BuiltinOperator_STABLEHLO_SLICE = 172, + BuiltinOperator_STABLEHLO_CUSTOM_CALL = 173, + BuiltinOperator_STABLEHLO_REDUCE = 174, + BuiltinOperator_STABLEHLO_ABS = 175, + BuiltinOperator_STABLEHLO_AND = 176, + BuiltinOperator_STABLEHLO_COSINE = 177, + BuiltinOperator_STABLEHLO_EXPONENTIAL = 178, + BuiltinOperator_STABLEHLO_FLOOR = 179, + BuiltinOperator_STABLEHLO_LOG = 180, + BuiltinOperator_STABLEHLO_MINIMUM = 181, + BuiltinOperator_STABLEHLO_NEGATE = 182, + BuiltinOperator_STABLEHLO_OR = 183, + BuiltinOperator_STABLEHLO_POWER = 184, + BuiltinOperator_STABLEHLO_REMAINDER = 185, + BuiltinOperator_STABLEHLO_RSQRT = 186, + BuiltinOperator_STABLEHLO_SELECT = 187, + BuiltinOperator_STABLEHLO_SUBTRACT = 188, + BuiltinOperator_STABLEHLO_TANH = 189, + BuiltinOperator_STABLEHLO_SCATTER = 190, + BuiltinOperator_STABLEHLO_COMPARE = 191, + BuiltinOperator_STABLEHLO_CONVERT = 192, + BuiltinOperator_STABLEHLO_DYNAMIC_SLICE = 193, + BuiltinOperator_STABLEHLO_DYNAMIC_UPDATE_SLICE = 194, + BuiltinOperator_STABLEHLO_PAD = 195, + BuiltinOperator_STABLEHLO_IOTA = 196, + BuiltinOperator_STABLEHLO_DOT_GENERAL = 197, + BuiltinOperator_STABLEHLO_REDUCE_WINDOW = 198, + BuiltinOperator_STABLEHLO_SORT = 199, + BuiltinOperator_STABLEHLO_WHILE = 200, + BuiltinOperator_STABLEHLO_GATHER = 201, + BuiltinOperator_STABLEHLO_TRANSPOSE = 202, + BuiltinOperator_DILATE = 203, + BuiltinOperator_STABLEHLO_RNG_BIT_GENERATOR = 204, + BuiltinOperator_REDUCE_WINDOW = 205, + BuiltinOperator_STABLEHLO_COMPOSITE = 206, BuiltinOperator_MIN = BuiltinOperator_ADD, - BuiltinOperator_MAX = BuiltinOperator_RIGHT_SHIFT + BuiltinOperator_MAX = BuiltinOperator_STABLEHLO_COMPOSITE }; -inline const BuiltinOperator (&EnumValuesBuiltinOperator())[162] { +inline const BuiltinOperator (&EnumValuesBuiltinOperator())[207] { static const BuiltinOperator values[] = { BuiltinOperator_ADD, BuiltinOperator_AVERAGE_POOL_2D, @@ -1241,13 +1373,58 @@ inline const BuiltinOperator (&EnumValuesBuiltinOperator())[162] { BuiltinOperator_SIGN, BuiltinOperator_BITCAST, BuiltinOperator_BITWISE_XOR, - BuiltinOperator_RIGHT_SHIFT + BuiltinOperator_RIGHT_SHIFT, + BuiltinOperator_STABLEHLO_LOGISTIC, + BuiltinOperator_STABLEHLO_ADD, + BuiltinOperator_STABLEHLO_DIVIDE, + BuiltinOperator_STABLEHLO_MULTIPLY, + BuiltinOperator_STABLEHLO_MAXIMUM, + BuiltinOperator_STABLEHLO_RESHAPE, + BuiltinOperator_STABLEHLO_CLAMP, + BuiltinOperator_STABLEHLO_CONCATENATE, + BuiltinOperator_STABLEHLO_BROADCAST_IN_DIM, + BuiltinOperator_STABLEHLO_CONVOLUTION, + BuiltinOperator_STABLEHLO_SLICE, + BuiltinOperator_STABLEHLO_CUSTOM_CALL, + BuiltinOperator_STABLEHLO_REDUCE, + BuiltinOperator_STABLEHLO_ABS, + BuiltinOperator_STABLEHLO_AND, + BuiltinOperator_STABLEHLO_COSINE, + BuiltinOperator_STABLEHLO_EXPONENTIAL, + BuiltinOperator_STABLEHLO_FLOOR, + BuiltinOperator_STABLEHLO_LOG, + BuiltinOperator_STABLEHLO_MINIMUM, + BuiltinOperator_STABLEHLO_NEGATE, + BuiltinOperator_STABLEHLO_OR, + BuiltinOperator_STABLEHLO_POWER, + BuiltinOperator_STABLEHLO_REMAINDER, + BuiltinOperator_STABLEHLO_RSQRT, + BuiltinOperator_STABLEHLO_SELECT, + BuiltinOperator_STABLEHLO_SUBTRACT, + BuiltinOperator_STABLEHLO_TANH, + BuiltinOperator_STABLEHLO_SCATTER, + BuiltinOperator_STABLEHLO_COMPARE, + BuiltinOperator_STABLEHLO_CONVERT, + BuiltinOperator_STABLEHLO_DYNAMIC_SLICE, + BuiltinOperator_STABLEHLO_DYNAMIC_UPDATE_SLICE, + BuiltinOperator_STABLEHLO_PAD, + BuiltinOperator_STABLEHLO_IOTA, + BuiltinOperator_STABLEHLO_DOT_GENERAL, + BuiltinOperator_STABLEHLO_REDUCE_WINDOW, + BuiltinOperator_STABLEHLO_SORT, + BuiltinOperator_STABLEHLO_WHILE, + BuiltinOperator_STABLEHLO_GATHER, + BuiltinOperator_STABLEHLO_TRANSPOSE, + BuiltinOperator_DILATE, + BuiltinOperator_STABLEHLO_RNG_BIT_GENERATOR, + BuiltinOperator_REDUCE_WINDOW, + BuiltinOperator_STABLEHLO_COMPOSITE }; return values; } inline const char * const *EnumNamesBuiltinOperator() { - static const char * const names[163] = { + static const char * const names[208] = { "ADD", "AVERAGE_POOL_2D", "CONCATENATION", @@ -1410,13 +1587,58 @@ inline const char * const *EnumNamesBuiltinOperator() { "BITCAST", "BITWISE_XOR", "RIGHT_SHIFT", + "STABLEHLO_LOGISTIC", + "STABLEHLO_ADD", + "STABLEHLO_DIVIDE", + "STABLEHLO_MULTIPLY", + "STABLEHLO_MAXIMUM", + "STABLEHLO_RESHAPE", + "STABLEHLO_CLAMP", + "STABLEHLO_CONCATENATE", + "STABLEHLO_BROADCAST_IN_DIM", + "STABLEHLO_CONVOLUTION", + "STABLEHLO_SLICE", + "STABLEHLO_CUSTOM_CALL", + "STABLEHLO_REDUCE", + "STABLEHLO_ABS", + "STABLEHLO_AND", + "STABLEHLO_COSINE", + "STABLEHLO_EXPONENTIAL", + "STABLEHLO_FLOOR", + "STABLEHLO_LOG", + "STABLEHLO_MINIMUM", + "STABLEHLO_NEGATE", + "STABLEHLO_OR", + "STABLEHLO_POWER", + "STABLEHLO_REMAINDER", + "STABLEHLO_RSQRT", + "STABLEHLO_SELECT", + "STABLEHLO_SUBTRACT", + "STABLEHLO_TANH", + "STABLEHLO_SCATTER", + "STABLEHLO_COMPARE", + "STABLEHLO_CONVERT", + "STABLEHLO_DYNAMIC_SLICE", + "STABLEHLO_DYNAMIC_UPDATE_SLICE", + "STABLEHLO_PAD", + "STABLEHLO_IOTA", + "STABLEHLO_DOT_GENERAL", + "STABLEHLO_REDUCE_WINDOW", + "STABLEHLO_SORT", + "STABLEHLO_WHILE", + "STABLEHLO_GATHER", + "STABLEHLO_TRANSPOSE", + "DILATE", + "STABLEHLO_RNG_BIT_GENERATOR", + "REDUCE_WINDOW", + "STABLEHLO_COMPOSITE", nullptr }; return names; } inline const char *EnumNameBuiltinOperator(BuiltinOperator e) { - if (flatbuffers::IsOutRange(e, BuiltinOperator_ADD, BuiltinOperator_RIGHT_SHIFT)) return ""; + if (::flatbuffers::IsOutRange(e, BuiltinOperator_ADD, BuiltinOperator_STABLEHLO_COMPOSITE)) return ""; const size_t index = static_cast(e); return EnumNamesBuiltinOperator()[index]; } @@ -1821,7 +2043,7 @@ inline const char * const *EnumNamesBuiltinOptions() { } inline const char *EnumNameBuiltinOptions(BuiltinOptions e) { - if (flatbuffers::IsOutRange(e, BuiltinOptions_NONE, BuiltinOptions_RightShiftOptions)) return ""; + if (::flatbuffers::IsOutRange(e, BuiltinOptions_NONE, BuiltinOptions_RightShiftOptions)) return ""; const size_t index = static_cast(e); return EnumNamesBuiltinOptions()[index]; } @@ -2869,8 +3091,8 @@ struct BuiltinOptionsUnion { } } - static void *UnPack(const void *obj, BuiltinOptions type, const flatbuffers::resolver_function_t *resolver); - flatbuffers::Offset Pack(flatbuffers::FlatBufferBuilder &_fbb, const flatbuffers::rehasher_function_t *_rehasher = nullptr) const; + static void *UnPack(const void *obj, BuiltinOptions type, const ::flatbuffers::resolver_function_t *resolver); + ::flatbuffers::Offset Pack(::flatbuffers::FlatBufferBuilder &_fbb, const ::flatbuffers::rehasher_function_t *_rehasher = nullptr) const; tflite::Conv2DOptionsT *AsConv2DOptions() { return type == BuiltinOptions_Conv2DOptions ? @@ -3882,8 +4104,624 @@ struct BuiltinOptionsUnion { } }; -bool VerifyBuiltinOptions(flatbuffers::Verifier &verifier, const void *obj, BuiltinOptions type); -bool VerifyBuiltinOptionsVector(flatbuffers::Verifier &verifier, const flatbuffers::Vector> *values, const flatbuffers::Vector *types); +bool VerifyBuiltinOptions(::flatbuffers::Verifier &verifier, const void *obj, BuiltinOptions type); +bool VerifyBuiltinOptionsVector(::flatbuffers::Verifier &verifier, const ::flatbuffers::Vector<::flatbuffers::Offset> *values, const ::flatbuffers::Vector *types); + +enum BuiltinOptions2 : uint8_t { + BuiltinOptions2_NONE = 0, + BuiltinOptions2_StablehloConcatenateOptions = 1, + BuiltinOptions2_StablehloBroadcastInDimOptions = 2, + BuiltinOptions2_StablehloSliceOptions = 3, + BuiltinOptions2_StablehloConvolutionOptions = 4, + BuiltinOptions2_StablehloCustomCallOptions = 5, + BuiltinOptions2_StablehloReduceOptions = 6, + BuiltinOptions2_StablehloScatterOptions = 7, + BuiltinOptions2_StablehloCompareOptions = 8, + BuiltinOptions2_StablehloDynamicSliceOptions = 9, + BuiltinOptions2_StablehloPadOptions = 10, + BuiltinOptions2_StablehloIotaOptions = 11, + BuiltinOptions2_StablehloDotGeneralOptions = 12, + BuiltinOptions2_StablehloReduceWindowOptions = 13, + BuiltinOptions2_StablehloSortOptions = 14, + BuiltinOptions2_StablehloWhileOptions = 15, + BuiltinOptions2_StablehloGatherOptions = 16, + BuiltinOptions2_StablehloTransposeOptions = 17, + BuiltinOptions2_DilateOptions = 18, + BuiltinOptions2_StablehloRngBitGeneratorOptions = 19, + BuiltinOptions2_ReduceWindowOptions = 20, + BuiltinOptions2_StableHLOCompositeOptions = 21, + BuiltinOptions2_MIN = BuiltinOptions2_NONE, + BuiltinOptions2_MAX = BuiltinOptions2_StableHLOCompositeOptions +}; + +inline const BuiltinOptions2 (&EnumValuesBuiltinOptions2())[22] { + static const BuiltinOptions2 values[] = { + BuiltinOptions2_NONE, + BuiltinOptions2_StablehloConcatenateOptions, + BuiltinOptions2_StablehloBroadcastInDimOptions, + BuiltinOptions2_StablehloSliceOptions, + BuiltinOptions2_StablehloConvolutionOptions, + BuiltinOptions2_StablehloCustomCallOptions, + BuiltinOptions2_StablehloReduceOptions, + BuiltinOptions2_StablehloScatterOptions, + BuiltinOptions2_StablehloCompareOptions, + BuiltinOptions2_StablehloDynamicSliceOptions, + BuiltinOptions2_StablehloPadOptions, + BuiltinOptions2_StablehloIotaOptions, + BuiltinOptions2_StablehloDotGeneralOptions, + BuiltinOptions2_StablehloReduceWindowOptions, + BuiltinOptions2_StablehloSortOptions, + BuiltinOptions2_StablehloWhileOptions, + BuiltinOptions2_StablehloGatherOptions, + BuiltinOptions2_StablehloTransposeOptions, + BuiltinOptions2_DilateOptions, + BuiltinOptions2_StablehloRngBitGeneratorOptions, + BuiltinOptions2_ReduceWindowOptions, + BuiltinOptions2_StableHLOCompositeOptions + }; + return values; +} + +inline const char * const *EnumNamesBuiltinOptions2() { + static const char * const names[23] = { + "NONE", + "StablehloConcatenateOptions", + "StablehloBroadcastInDimOptions", + "StablehloSliceOptions", + "StablehloConvolutionOptions", + "StablehloCustomCallOptions", + "StablehloReduceOptions", + "StablehloScatterOptions", + "StablehloCompareOptions", + "StablehloDynamicSliceOptions", + "StablehloPadOptions", + "StablehloIotaOptions", + "StablehloDotGeneralOptions", + "StablehloReduceWindowOptions", + "StablehloSortOptions", + "StablehloWhileOptions", + "StablehloGatherOptions", + "StablehloTransposeOptions", + "DilateOptions", + "StablehloRngBitGeneratorOptions", + "ReduceWindowOptions", + "StableHLOCompositeOptions", + nullptr + }; + return names; +} + +inline const char *EnumNameBuiltinOptions2(BuiltinOptions2 e) { + if (::flatbuffers::IsOutRange(e, BuiltinOptions2_NONE, BuiltinOptions2_StableHLOCompositeOptions)) return ""; + const size_t index = static_cast(e); + return EnumNamesBuiltinOptions2()[index]; +} + +template struct BuiltinOptions2Traits { + static const BuiltinOptions2 enum_value = BuiltinOptions2_NONE; +}; + +template<> struct BuiltinOptions2Traits { + static const BuiltinOptions2 enum_value = BuiltinOptions2_StablehloConcatenateOptions; +}; + +template<> struct BuiltinOptions2Traits { + static const BuiltinOptions2 enum_value = BuiltinOptions2_StablehloBroadcastInDimOptions; +}; + +template<> struct BuiltinOptions2Traits { + static const BuiltinOptions2 enum_value = BuiltinOptions2_StablehloSliceOptions; +}; + +template<> struct BuiltinOptions2Traits { + static const BuiltinOptions2 enum_value = BuiltinOptions2_StablehloConvolutionOptions; +}; + +template<> struct BuiltinOptions2Traits { + static const BuiltinOptions2 enum_value = BuiltinOptions2_StablehloCustomCallOptions; +}; + +template<> struct BuiltinOptions2Traits { + static const BuiltinOptions2 enum_value = BuiltinOptions2_StablehloReduceOptions; +}; + +template<> struct BuiltinOptions2Traits { + static const BuiltinOptions2 enum_value = BuiltinOptions2_StablehloScatterOptions; +}; + +template<> struct BuiltinOptions2Traits { + static const BuiltinOptions2 enum_value = BuiltinOptions2_StablehloCompareOptions; +}; + +template<> struct BuiltinOptions2Traits { + static const BuiltinOptions2 enum_value = BuiltinOptions2_StablehloDynamicSliceOptions; +}; + +template<> struct BuiltinOptions2Traits { + static const BuiltinOptions2 enum_value = BuiltinOptions2_StablehloPadOptions; +}; + +template<> struct BuiltinOptions2Traits { + static const BuiltinOptions2 enum_value = BuiltinOptions2_StablehloIotaOptions; +}; + +template<> struct BuiltinOptions2Traits { + static const BuiltinOptions2 enum_value = BuiltinOptions2_StablehloDotGeneralOptions; +}; + +template<> struct BuiltinOptions2Traits { + static const BuiltinOptions2 enum_value = BuiltinOptions2_StablehloReduceWindowOptions; +}; + +template<> struct BuiltinOptions2Traits { + static const BuiltinOptions2 enum_value = BuiltinOptions2_StablehloSortOptions; +}; + +template<> struct BuiltinOptions2Traits { + static const BuiltinOptions2 enum_value = BuiltinOptions2_StablehloWhileOptions; +}; + +template<> struct BuiltinOptions2Traits { + static const BuiltinOptions2 enum_value = BuiltinOptions2_StablehloGatherOptions; +}; + +template<> struct BuiltinOptions2Traits { + static const BuiltinOptions2 enum_value = BuiltinOptions2_StablehloTransposeOptions; +}; + +template<> struct BuiltinOptions2Traits { + static const BuiltinOptions2 enum_value = BuiltinOptions2_DilateOptions; +}; + +template<> struct BuiltinOptions2Traits { + static const BuiltinOptions2 enum_value = BuiltinOptions2_StablehloRngBitGeneratorOptions; +}; + +template<> struct BuiltinOptions2Traits { + static const BuiltinOptions2 enum_value = BuiltinOptions2_ReduceWindowOptions; +}; + +template<> struct BuiltinOptions2Traits { + static const BuiltinOptions2 enum_value = BuiltinOptions2_StableHLOCompositeOptions; +}; + +template struct BuiltinOptions2UnionTraits { + static const BuiltinOptions2 enum_value = BuiltinOptions2_NONE; +}; + +template<> struct BuiltinOptions2UnionTraits { + static const BuiltinOptions2 enum_value = BuiltinOptions2_StablehloConcatenateOptions; +}; + +template<> struct BuiltinOptions2UnionTraits { + static const BuiltinOptions2 enum_value = BuiltinOptions2_StablehloBroadcastInDimOptions; +}; + +template<> struct BuiltinOptions2UnionTraits { + static const BuiltinOptions2 enum_value = BuiltinOptions2_StablehloSliceOptions; +}; + +template<> struct BuiltinOptions2UnionTraits { + static const BuiltinOptions2 enum_value = BuiltinOptions2_StablehloConvolutionOptions; +}; + +template<> struct BuiltinOptions2UnionTraits { + static const BuiltinOptions2 enum_value = BuiltinOptions2_StablehloCustomCallOptions; +}; + +template<> struct BuiltinOptions2UnionTraits { + static const BuiltinOptions2 enum_value = BuiltinOptions2_StablehloReduceOptions; +}; + +template<> struct BuiltinOptions2UnionTraits { + static const BuiltinOptions2 enum_value = BuiltinOptions2_StablehloScatterOptions; +}; + +template<> struct BuiltinOptions2UnionTraits { + static const BuiltinOptions2 enum_value = BuiltinOptions2_StablehloCompareOptions; +}; + +template<> struct BuiltinOptions2UnionTraits { + static const BuiltinOptions2 enum_value = BuiltinOptions2_StablehloDynamicSliceOptions; +}; + +template<> struct BuiltinOptions2UnionTraits { + static const BuiltinOptions2 enum_value = BuiltinOptions2_StablehloPadOptions; +}; + +template<> struct BuiltinOptions2UnionTraits { + static const BuiltinOptions2 enum_value = BuiltinOptions2_StablehloIotaOptions; +}; + +template<> struct BuiltinOptions2UnionTraits { + static const BuiltinOptions2 enum_value = BuiltinOptions2_StablehloDotGeneralOptions; +}; + +template<> struct BuiltinOptions2UnionTraits { + static const BuiltinOptions2 enum_value = BuiltinOptions2_StablehloReduceWindowOptions; +}; + +template<> struct BuiltinOptions2UnionTraits { + static const BuiltinOptions2 enum_value = BuiltinOptions2_StablehloSortOptions; +}; + +template<> struct BuiltinOptions2UnionTraits { + static const BuiltinOptions2 enum_value = BuiltinOptions2_StablehloWhileOptions; +}; + +template<> struct BuiltinOptions2UnionTraits { + static const BuiltinOptions2 enum_value = BuiltinOptions2_StablehloGatherOptions; +}; + +template<> struct BuiltinOptions2UnionTraits { + static const BuiltinOptions2 enum_value = BuiltinOptions2_StablehloTransposeOptions; +}; + +template<> struct BuiltinOptions2UnionTraits { + static const BuiltinOptions2 enum_value = BuiltinOptions2_DilateOptions; +}; + +template<> struct BuiltinOptions2UnionTraits { + static const BuiltinOptions2 enum_value = BuiltinOptions2_StablehloRngBitGeneratorOptions; +}; + +template<> struct BuiltinOptions2UnionTraits { + static const BuiltinOptions2 enum_value = BuiltinOptions2_ReduceWindowOptions; +}; + +template<> struct BuiltinOptions2UnionTraits { + static const BuiltinOptions2 enum_value = BuiltinOptions2_StableHLOCompositeOptions; +}; + +struct BuiltinOptions2Union { + BuiltinOptions2 type; + void *value; + + BuiltinOptions2Union() : type(BuiltinOptions2_NONE), value(nullptr) {} + BuiltinOptions2Union(BuiltinOptions2Union&& u) FLATBUFFERS_NOEXCEPT : + type(BuiltinOptions2_NONE), value(nullptr) + { std::swap(type, u.type); std::swap(value, u.value); } + BuiltinOptions2Union(const BuiltinOptions2Union &); + BuiltinOptions2Union &operator=(const BuiltinOptions2Union &u) + { BuiltinOptions2Union t(u); std::swap(type, t.type); std::swap(value, t.value); return *this; } + BuiltinOptions2Union &operator=(BuiltinOptions2Union &&u) FLATBUFFERS_NOEXCEPT + { std::swap(type, u.type); std::swap(value, u.value); return *this; } + ~BuiltinOptions2Union() { Reset(); } + + void Reset(); + + template + void Set(T&& val) { + typedef typename std::remove_reference::type RT; + Reset(); + type = BuiltinOptions2UnionTraits::enum_value; + if (type != BuiltinOptions2_NONE) { + value = new RT(std::forward(val)); + } + } + + static void *UnPack(const void *obj, BuiltinOptions2 type, const ::flatbuffers::resolver_function_t *resolver); + ::flatbuffers::Offset Pack(::flatbuffers::FlatBufferBuilder &_fbb, const ::flatbuffers::rehasher_function_t *_rehasher = nullptr) const; + + tflite::StablehloConcatenateOptionsT *AsStablehloConcatenateOptions() { + return type == BuiltinOptions2_StablehloConcatenateOptions ? + reinterpret_cast(value) : nullptr; + } + const tflite::StablehloConcatenateOptionsT *AsStablehloConcatenateOptions() const { + return type == BuiltinOptions2_StablehloConcatenateOptions ? + reinterpret_cast(value) : nullptr; + } + tflite::StablehloBroadcastInDimOptionsT *AsStablehloBroadcastInDimOptions() { + return type == BuiltinOptions2_StablehloBroadcastInDimOptions ? + reinterpret_cast(value) : nullptr; + } + const tflite::StablehloBroadcastInDimOptionsT *AsStablehloBroadcastInDimOptions() const { + return type == BuiltinOptions2_StablehloBroadcastInDimOptions ? + reinterpret_cast(value) : nullptr; + } + tflite::StablehloSliceOptionsT *AsStablehloSliceOptions() { + return type == BuiltinOptions2_StablehloSliceOptions ? + reinterpret_cast(value) : nullptr; + } + const tflite::StablehloSliceOptionsT *AsStablehloSliceOptions() const { + return type == BuiltinOptions2_StablehloSliceOptions ? + reinterpret_cast(value) : nullptr; + } + tflite::StablehloConvolutionOptionsT *AsStablehloConvolutionOptions() { + return type == BuiltinOptions2_StablehloConvolutionOptions ? + reinterpret_cast(value) : nullptr; + } + const tflite::StablehloConvolutionOptionsT *AsStablehloConvolutionOptions() const { + return type == BuiltinOptions2_StablehloConvolutionOptions ? + reinterpret_cast(value) : nullptr; + } + tflite::StablehloCustomCallOptionsT *AsStablehloCustomCallOptions() { + return type == BuiltinOptions2_StablehloCustomCallOptions ? + reinterpret_cast(value) : nullptr; + } + const tflite::StablehloCustomCallOptionsT *AsStablehloCustomCallOptions() const { + return type == BuiltinOptions2_StablehloCustomCallOptions ? + reinterpret_cast(value) : nullptr; + } + tflite::StablehloReduceOptionsT *AsStablehloReduceOptions() { + return type == BuiltinOptions2_StablehloReduceOptions ? + reinterpret_cast(value) : nullptr; + } + const tflite::StablehloReduceOptionsT *AsStablehloReduceOptions() const { + return type == BuiltinOptions2_StablehloReduceOptions ? + reinterpret_cast(value) : nullptr; + } + tflite::StablehloScatterOptionsT *AsStablehloScatterOptions() { + return type == BuiltinOptions2_StablehloScatterOptions ? + reinterpret_cast(value) : nullptr; + } + const tflite::StablehloScatterOptionsT *AsStablehloScatterOptions() const { + return type == BuiltinOptions2_StablehloScatterOptions ? + reinterpret_cast(value) : nullptr; + } + tflite::StablehloCompareOptionsT *AsStablehloCompareOptions() { + return type == BuiltinOptions2_StablehloCompareOptions ? + reinterpret_cast(value) : nullptr; + } + const tflite::StablehloCompareOptionsT *AsStablehloCompareOptions() const { + return type == BuiltinOptions2_StablehloCompareOptions ? + reinterpret_cast(value) : nullptr; + } + tflite::StablehloDynamicSliceOptionsT *AsStablehloDynamicSliceOptions() { + return type == BuiltinOptions2_StablehloDynamicSliceOptions ? + reinterpret_cast(value) : nullptr; + } + const tflite::StablehloDynamicSliceOptionsT *AsStablehloDynamicSliceOptions() const { + return type == BuiltinOptions2_StablehloDynamicSliceOptions ? + reinterpret_cast(value) : nullptr; + } + tflite::StablehloPadOptionsT *AsStablehloPadOptions() { + return type == BuiltinOptions2_StablehloPadOptions ? + reinterpret_cast(value) : nullptr; + } + const tflite::StablehloPadOptionsT *AsStablehloPadOptions() const { + return type == BuiltinOptions2_StablehloPadOptions ? + reinterpret_cast(value) : nullptr; + } + tflite::StablehloIotaOptionsT *AsStablehloIotaOptions() { + return type == BuiltinOptions2_StablehloIotaOptions ? + reinterpret_cast(value) : nullptr; + } + const tflite::StablehloIotaOptionsT *AsStablehloIotaOptions() const { + return type == BuiltinOptions2_StablehloIotaOptions ? + reinterpret_cast(value) : nullptr; + } + tflite::StablehloDotGeneralOptionsT *AsStablehloDotGeneralOptions() { + return type == BuiltinOptions2_StablehloDotGeneralOptions ? + reinterpret_cast(value) : nullptr; + } + const tflite::StablehloDotGeneralOptionsT *AsStablehloDotGeneralOptions() const { + return type == BuiltinOptions2_StablehloDotGeneralOptions ? + reinterpret_cast(value) : nullptr; + } + tflite::StablehloReduceWindowOptionsT *AsStablehloReduceWindowOptions() { + return type == BuiltinOptions2_StablehloReduceWindowOptions ? + reinterpret_cast(value) : nullptr; + } + const tflite::StablehloReduceWindowOptionsT *AsStablehloReduceWindowOptions() const { + return type == BuiltinOptions2_StablehloReduceWindowOptions ? + reinterpret_cast(value) : nullptr; + } + tflite::StablehloSortOptionsT *AsStablehloSortOptions() { + return type == BuiltinOptions2_StablehloSortOptions ? + reinterpret_cast(value) : nullptr; + } + const tflite::StablehloSortOptionsT *AsStablehloSortOptions() const { + return type == BuiltinOptions2_StablehloSortOptions ? + reinterpret_cast(value) : nullptr; + } + tflite::StablehloWhileOptionsT *AsStablehloWhileOptions() { + return type == BuiltinOptions2_StablehloWhileOptions ? + reinterpret_cast(value) : nullptr; + } + const tflite::StablehloWhileOptionsT *AsStablehloWhileOptions() const { + return type == BuiltinOptions2_StablehloWhileOptions ? + reinterpret_cast(value) : nullptr; + } + tflite::StablehloGatherOptionsT *AsStablehloGatherOptions() { + return type == BuiltinOptions2_StablehloGatherOptions ? + reinterpret_cast(value) : nullptr; + } + const tflite::StablehloGatherOptionsT *AsStablehloGatherOptions() const { + return type == BuiltinOptions2_StablehloGatherOptions ? + reinterpret_cast(value) : nullptr; + } + tflite::StablehloTransposeOptionsT *AsStablehloTransposeOptions() { + return type == BuiltinOptions2_StablehloTransposeOptions ? + reinterpret_cast(value) : nullptr; + } + const tflite::StablehloTransposeOptionsT *AsStablehloTransposeOptions() const { + return type == BuiltinOptions2_StablehloTransposeOptions ? + reinterpret_cast(value) : nullptr; + } + tflite::DilateOptionsT *AsDilateOptions() { + return type == BuiltinOptions2_DilateOptions ? + reinterpret_cast(value) : nullptr; + } + const tflite::DilateOptionsT *AsDilateOptions() const { + return type == BuiltinOptions2_DilateOptions ? + reinterpret_cast(value) : nullptr; + } + tflite::StablehloRngBitGeneratorOptionsT *AsStablehloRngBitGeneratorOptions() { + return type == BuiltinOptions2_StablehloRngBitGeneratorOptions ? + reinterpret_cast(value) : nullptr; + } + const tflite::StablehloRngBitGeneratorOptionsT *AsStablehloRngBitGeneratorOptions() const { + return type == BuiltinOptions2_StablehloRngBitGeneratorOptions ? + reinterpret_cast(value) : nullptr; + } + tflite::ReduceWindowOptionsT *AsReduceWindowOptions() { + return type == BuiltinOptions2_ReduceWindowOptions ? + reinterpret_cast(value) : nullptr; + } + const tflite::ReduceWindowOptionsT *AsReduceWindowOptions() const { + return type == BuiltinOptions2_ReduceWindowOptions ? + reinterpret_cast(value) : nullptr; + } + tflite::StableHLOCompositeOptionsT *AsStableHLOCompositeOptions() { + return type == BuiltinOptions2_StableHLOCompositeOptions ? + reinterpret_cast(value) : nullptr; + } + const tflite::StableHLOCompositeOptionsT *AsStableHLOCompositeOptions() const { + return type == BuiltinOptions2_StableHLOCompositeOptions ? + reinterpret_cast(value) : nullptr; + } +}; + +bool VerifyBuiltinOptions2(::flatbuffers::Verifier &verifier, const void *obj, BuiltinOptions2 type); +bool VerifyBuiltinOptions2Vector(::flatbuffers::Verifier &verifier, const ::flatbuffers::Vector<::flatbuffers::Offset> *values, const ::flatbuffers::Vector *types); + +enum StablehloPrecisionConfig : uint32_t { + StablehloPrecisionConfig_DEFAULT = 0, + StablehloPrecisionConfig_HIGH = 1, + StablehloPrecisionConfig_HIGHEST = 2, + StablehloPrecisionConfig_MIN = StablehloPrecisionConfig_DEFAULT, + StablehloPrecisionConfig_MAX = StablehloPrecisionConfig_HIGHEST +}; + +inline const StablehloPrecisionConfig (&EnumValuesStablehloPrecisionConfig())[3] { + static const StablehloPrecisionConfig values[] = { + StablehloPrecisionConfig_DEFAULT, + StablehloPrecisionConfig_HIGH, + StablehloPrecisionConfig_HIGHEST + }; + return values; +} + +inline const char * const *EnumNamesStablehloPrecisionConfig() { + static const char * const names[4] = { + "DEFAULT", + "HIGH", + "HIGHEST", + nullptr + }; + return names; +} + +inline const char *EnumNameStablehloPrecisionConfig(StablehloPrecisionConfig e) { + if (::flatbuffers::IsOutRange(e, StablehloPrecisionConfig_DEFAULT, StablehloPrecisionConfig_HIGHEST)) return ""; + const size_t index = static_cast(e); + return EnumNamesStablehloPrecisionConfig()[index]; +} + +enum StablehloComparisonDirection : uint32_t { + StablehloComparisonDirection_STABLEHLO_COMPARISON_DIRECTION_EQ = 0, + StablehloComparisonDirection_STABLEHLO_COMPARISON_DIRECTION_NE = 1, + StablehloComparisonDirection_STABLEHLO_COMPARISON_DIRECTION_GE = 2, + StablehloComparisonDirection_STABLEHLO_COMPARISON_DIRECTION_GT = 3, + StablehloComparisonDirection_STABLEHLO_COMPARISON_DIRECTION_LE = 4, + StablehloComparisonDirection_STABLEHLO_COMPARISON_DIRECTION_LT = 5, + StablehloComparisonDirection_MIN = StablehloComparisonDirection_STABLEHLO_COMPARISON_DIRECTION_EQ, + StablehloComparisonDirection_MAX = StablehloComparisonDirection_STABLEHLO_COMPARISON_DIRECTION_LT +}; + +inline const StablehloComparisonDirection (&EnumValuesStablehloComparisonDirection())[6] { + static const StablehloComparisonDirection values[] = { + StablehloComparisonDirection_STABLEHLO_COMPARISON_DIRECTION_EQ, + StablehloComparisonDirection_STABLEHLO_COMPARISON_DIRECTION_NE, + StablehloComparisonDirection_STABLEHLO_COMPARISON_DIRECTION_GE, + StablehloComparisonDirection_STABLEHLO_COMPARISON_DIRECTION_GT, + StablehloComparisonDirection_STABLEHLO_COMPARISON_DIRECTION_LE, + StablehloComparisonDirection_STABLEHLO_COMPARISON_DIRECTION_LT + }; + return values; +} + +inline const char * const *EnumNamesStablehloComparisonDirection() { + static const char * const names[7] = { + "STABLEHLO_COMPARISON_DIRECTION_EQ", + "STABLEHLO_COMPARISON_DIRECTION_NE", + "STABLEHLO_COMPARISON_DIRECTION_GE", + "STABLEHLO_COMPARISON_DIRECTION_GT", + "STABLEHLO_COMPARISON_DIRECTION_LE", + "STABLEHLO_COMPARISON_DIRECTION_LT", + nullptr + }; + return names; +} + +inline const char *EnumNameStablehloComparisonDirection(StablehloComparisonDirection e) { + if (::flatbuffers::IsOutRange(e, StablehloComparisonDirection_STABLEHLO_COMPARISON_DIRECTION_EQ, StablehloComparisonDirection_STABLEHLO_COMPARISON_DIRECTION_LT)) return ""; + const size_t index = static_cast(e); + return EnumNamesStablehloComparisonDirection()[index]; +} + +enum StablehloComparisonType : uint32_t { + StablehloComparisonType_STABLEHLO_COMPARISON_TYPE_NOTYPE = 0, + StablehloComparisonType_STABLEHLO_COMPARISON_TYPE_FLOAT = 1, + StablehloComparisonType_STABLEHLO_COMPARISON_TYPE_FLOAT_TOTAL_ORDER = 2, + StablehloComparisonType_STABLEHLO_COMPARISON_TYPE_SIGNED = 3, + StablehloComparisonType_STABLEHLO_COMPARISON_TYPE_UNSIGNED = 4, + StablehloComparisonType_MIN = StablehloComparisonType_STABLEHLO_COMPARISON_TYPE_NOTYPE, + StablehloComparisonType_MAX = StablehloComparisonType_STABLEHLO_COMPARISON_TYPE_UNSIGNED +}; + +inline const StablehloComparisonType (&EnumValuesStablehloComparisonType())[5] { + static const StablehloComparisonType values[] = { + StablehloComparisonType_STABLEHLO_COMPARISON_TYPE_NOTYPE, + StablehloComparisonType_STABLEHLO_COMPARISON_TYPE_FLOAT, + StablehloComparisonType_STABLEHLO_COMPARISON_TYPE_FLOAT_TOTAL_ORDER, + StablehloComparisonType_STABLEHLO_COMPARISON_TYPE_SIGNED, + StablehloComparisonType_STABLEHLO_COMPARISON_TYPE_UNSIGNED + }; + return values; +} + +inline const char * const *EnumNamesStablehloComparisonType() { + static const char * const names[6] = { + "STABLEHLO_COMPARISON_TYPE_NOTYPE", + "STABLEHLO_COMPARISON_TYPE_FLOAT", + "STABLEHLO_COMPARISON_TYPE_FLOAT_TOTAL_ORDER", + "STABLEHLO_COMPARISON_TYPE_SIGNED", + "STABLEHLO_COMPARISON_TYPE_UNSIGNED", + nullptr + }; + return names; +} + +inline const char *EnumNameStablehloComparisonType(StablehloComparisonType e) { + if (::flatbuffers::IsOutRange(e, StablehloComparisonType_STABLEHLO_COMPARISON_TYPE_NOTYPE, StablehloComparisonType_STABLEHLO_COMPARISON_TYPE_UNSIGNED)) return ""; + const size_t index = static_cast(e); + return EnumNamesStablehloComparisonType()[index]; +} + +enum RngAlgorithm : int8_t { + RngAlgorithm_DEFAULT = 0, + RngAlgorithm_PHILOX = 1, + RngAlgorithm_THREEFRY = 2, + RngAlgorithm_MIN = RngAlgorithm_DEFAULT, + RngAlgorithm_MAX = RngAlgorithm_THREEFRY +}; + +inline const RngAlgorithm (&EnumValuesRngAlgorithm())[3] { + static const RngAlgorithm values[] = { + RngAlgorithm_DEFAULT, + RngAlgorithm_PHILOX, + RngAlgorithm_THREEFRY + }; + return values; +} + +inline const char * const *EnumNamesRngAlgorithm() { + static const char * const names[4] = { + "DEFAULT", + "PHILOX", + "THREEFRY", + nullptr + }; + return names; +} + +inline const char *EnumNameRngAlgorithm(RngAlgorithm e) { + if (::flatbuffers::IsOutRange(e, RngAlgorithm_DEFAULT, RngAlgorithm_THREEFRY)) return ""; + const size_t index = static_cast(e); + return EnumNamesRngAlgorithm()[index]; +} enum Padding : int8_t { Padding_SAME = 0, @@ -3910,7 +4748,7 @@ inline const char * const *EnumNamesPadding() { } inline const char *EnumNamePadding(Padding e) { - if (flatbuffers::IsOutRange(e, Padding_SAME, Padding_VALID)) return ""; + if (::flatbuffers::IsOutRange(e, Padding_SAME, Padding_VALID)) return ""; const size_t index = static_cast(e); return EnumNamesPadding()[index]; } @@ -3952,7 +4790,7 @@ inline const char * const *EnumNamesActivationFunctionType() { } inline const char *EnumNameActivationFunctionType(ActivationFunctionType e) { - if (flatbuffers::IsOutRange(e, ActivationFunctionType_NONE, ActivationFunctionType_SIGN_BIT)) return ""; + if (::flatbuffers::IsOutRange(e, ActivationFunctionType_NONE, ActivationFunctionType_SIGN_BIT)) return ""; const size_t index = static_cast(e); return EnumNamesActivationFunctionType()[index]; } @@ -3985,7 +4823,7 @@ inline const char * const *EnumNamesLSHProjectionType() { } inline const char *EnumNameLSHProjectionType(LSHProjectionType e) { - if (flatbuffers::IsOutRange(e, LSHProjectionType_UNKNOWN, LSHProjectionType_DENSE)) return ""; + if (::flatbuffers::IsOutRange(e, LSHProjectionType_UNKNOWN, LSHProjectionType_DENSE)) return ""; const size_t index = static_cast(e); return EnumNamesLSHProjectionType()[index]; } @@ -4015,7 +4853,7 @@ inline const char * const *EnumNamesFullyConnectedOptionsWeightsFormat() { } inline const char *EnumNameFullyConnectedOptionsWeightsFormat(FullyConnectedOptionsWeightsFormat e) { - if (flatbuffers::IsOutRange(e, FullyConnectedOptionsWeightsFormat_DEFAULT, FullyConnectedOptionsWeightsFormat_SHUFFLED4x16INT8)) return ""; + if (::flatbuffers::IsOutRange(e, FullyConnectedOptionsWeightsFormat_DEFAULT, FullyConnectedOptionsWeightsFormat_SHUFFLED4x16INT8)) return ""; const size_t index = static_cast(e); return EnumNamesFullyConnectedOptionsWeightsFormat()[index]; } @@ -4045,7 +4883,7 @@ inline const char * const *EnumNamesLSTMKernelType() { } inline const char *EnumNameLSTMKernelType(LSTMKernelType e) { - if (flatbuffers::IsOutRange(e, LSTMKernelType_FULL, LSTMKernelType_BASIC)) return ""; + if (::flatbuffers::IsOutRange(e, LSTMKernelType_FULL, LSTMKernelType_BASIC)) return ""; const size_t index = static_cast(e); return EnumNamesLSTMKernelType()[index]; } @@ -4078,7 +4916,7 @@ inline const char * const *EnumNamesCombinerType() { } inline const char *EnumNameCombinerType(CombinerType e) { - if (flatbuffers::IsOutRange(e, CombinerType_SUM, CombinerType_SQRTN)) return ""; + if (::flatbuffers::IsOutRange(e, CombinerType_SUM, CombinerType_SQRTN)) return ""; const size_t index = static_cast(e); return EnumNamesCombinerType()[index]; } @@ -4108,11 +4946,56 @@ inline const char * const *EnumNamesMirrorPadMode() { } inline const char *EnumNameMirrorPadMode(MirrorPadMode e) { - if (flatbuffers::IsOutRange(e, MirrorPadMode_REFLECT, MirrorPadMode_SYMMETRIC)) return ""; + if (::flatbuffers::IsOutRange(e, MirrorPadMode_REFLECT, MirrorPadMode_SYMMETRIC)) return ""; const size_t index = static_cast(e); return EnumNamesMirrorPadMode()[index]; } +enum ReduceWindowFunction : int32_t { + ReduceWindowFunction_UNSUPPORTED = 0, + ReduceWindowFunction_ADD = 1, + ReduceWindowFunction_MUL = 2, + ReduceWindowFunction_MINIMUM = 3, + ReduceWindowFunction_MAXIMUM = 4, + ReduceWindowFunction_ALL = 5, + ReduceWindowFunction_ANY = 6, + ReduceWindowFunction_MIN = ReduceWindowFunction_UNSUPPORTED, + ReduceWindowFunction_MAX = ReduceWindowFunction_ANY +}; + +inline const ReduceWindowFunction (&EnumValuesReduceWindowFunction())[7] { + static const ReduceWindowFunction values[] = { + ReduceWindowFunction_UNSUPPORTED, + ReduceWindowFunction_ADD, + ReduceWindowFunction_MUL, + ReduceWindowFunction_MINIMUM, + ReduceWindowFunction_MAXIMUM, + ReduceWindowFunction_ALL, + ReduceWindowFunction_ANY + }; + return values; +} + +inline const char * const *EnumNamesReduceWindowFunction() { + static const char * const names[8] = { + "UNSUPPORTED", + "ADD", + "MUL", + "MINIMUM", + "MAXIMUM", + "ALL", + "ANY", + nullptr + }; + return names; +} + +inline const char *EnumNameReduceWindowFunction(ReduceWindowFunction e) { + if (::flatbuffers::IsOutRange(e, ReduceWindowFunction_UNSUPPORTED, ReduceWindowFunction_ANY)) return ""; + const size_t index = static_cast(e); + return EnumNamesReduceWindowFunction()[index]; +} + enum CustomOptionsFormat : int8_t { CustomOptionsFormat_FLEXBUFFERS = 0, CustomOptionsFormat_MIN = CustomOptionsFormat_FLEXBUFFERS, @@ -4135,64 +5018,64 @@ inline const char * const *EnumNamesCustomOptionsFormat() { } inline const char *EnumNameCustomOptionsFormat(CustomOptionsFormat e) { - if (flatbuffers::IsOutRange(e, CustomOptionsFormat_FLEXBUFFERS, CustomOptionsFormat_FLEXBUFFERS)) return ""; + if (::flatbuffers::IsOutRange(e, CustomOptionsFormat_FLEXBUFFERS, CustomOptionsFormat_FLEXBUFFERS)) return ""; const size_t index = static_cast(e); return EnumNamesCustomOptionsFormat()[index]; } -struct CustomQuantizationT : public flatbuffers::NativeTable { +struct CustomQuantizationT : public ::flatbuffers::NativeTable { typedef CustomQuantization TableType; std::vector custom{}; }; -struct CustomQuantization FLATBUFFERS_FINAL_CLASS : private flatbuffers::Table { +struct CustomQuantization FLATBUFFERS_FINAL_CLASS : private ::flatbuffers::Table { typedef CustomQuantizationT NativeTableType; typedef CustomQuantizationBuilder Builder; enum FlatBuffersVTableOffset FLATBUFFERS_VTABLE_UNDERLYING_TYPE { VT_CUSTOM = 4 }; - const flatbuffers::Vector *custom() const { - return GetPointer *>(VT_CUSTOM); + const ::flatbuffers::Vector *custom() const { + return GetPointer *>(VT_CUSTOM); } - bool Verify(flatbuffers::Verifier &verifier) const { + bool Verify(::flatbuffers::Verifier &verifier) const { return VerifyTableStart(verifier) && VerifyOffset(verifier, VT_CUSTOM) && verifier.VerifyVector(custom()) && verifier.EndTable(); } - CustomQuantizationT *UnPack(const flatbuffers::resolver_function_t *_resolver = nullptr) const; - void UnPackTo(CustomQuantizationT *_o, const flatbuffers::resolver_function_t *_resolver = nullptr) const; - static flatbuffers::Offset Pack(flatbuffers::FlatBufferBuilder &_fbb, const CustomQuantizationT* _o, const flatbuffers::rehasher_function_t *_rehasher = nullptr); + CustomQuantizationT *UnPack(const ::flatbuffers::resolver_function_t *_resolver = nullptr) const; + void UnPackTo(CustomQuantizationT *_o, const ::flatbuffers::resolver_function_t *_resolver = nullptr) const; + static ::flatbuffers::Offset Pack(::flatbuffers::FlatBufferBuilder &_fbb, const CustomQuantizationT* _o, const ::flatbuffers::rehasher_function_t *_rehasher = nullptr); }; struct CustomQuantizationBuilder { typedef CustomQuantization Table; - flatbuffers::FlatBufferBuilder &fbb_; - flatbuffers::uoffset_t start_; - void add_custom(flatbuffers::Offset> custom) { + ::flatbuffers::FlatBufferBuilder &fbb_; + ::flatbuffers::uoffset_t start_; + void add_custom(::flatbuffers::Offset<::flatbuffers::Vector> custom) { fbb_.AddOffset(CustomQuantization::VT_CUSTOM, custom); } - explicit CustomQuantizationBuilder(flatbuffers::FlatBufferBuilder &_fbb) + explicit CustomQuantizationBuilder(::flatbuffers::FlatBufferBuilder &_fbb) : fbb_(_fbb) { start_ = fbb_.StartTable(); } - flatbuffers::Offset Finish() { + ::flatbuffers::Offset Finish() { const auto end = fbb_.EndTable(start_); - auto o = flatbuffers::Offset(end); + auto o = ::flatbuffers::Offset(end); return o; } }; -inline flatbuffers::Offset CreateCustomQuantization( - flatbuffers::FlatBufferBuilder &_fbb, - flatbuffers::Offset> custom = 0) { +inline ::flatbuffers::Offset CreateCustomQuantization( + ::flatbuffers::FlatBufferBuilder &_fbb, + ::flatbuffers::Offset<::flatbuffers::Vector> custom = 0) { CustomQuantizationBuilder builder_(_fbb); builder_.add_custom(custom); return builder_.Finish(); } -inline flatbuffers::Offset CreateCustomQuantizationDirect( - flatbuffers::FlatBufferBuilder &_fbb, +inline ::flatbuffers::Offset CreateCustomQuantizationDirect( + ::flatbuffers::FlatBufferBuilder &_fbb, const std::vector *custom = nullptr) { if (custom) { _fbb.ForceVectorAlignment(custom->size(), sizeof(uint8_t), 16); } auto custom__ = custom ? _fbb.CreateVector(*custom) : 0; @@ -4201,9 +5084,9 @@ inline flatbuffers::Offset CreateCustomQuantizationDirect( custom__); } -flatbuffers::Offset CreateCustomQuantization(flatbuffers::FlatBufferBuilder &_fbb, const CustomQuantizationT *_o, const flatbuffers::rehasher_function_t *_rehasher = nullptr); +::flatbuffers::Offset CreateCustomQuantization(::flatbuffers::FlatBufferBuilder &_fbb, const CustomQuantizationT *_o, const ::flatbuffers::rehasher_function_t *_rehasher = nullptr); -struct QuantizationParametersT : public flatbuffers::NativeTable { +struct QuantizationParametersT : public ::flatbuffers::NativeTable { typedef QuantizationParameters TableType; std::vector min{}; std::vector max{}; @@ -4213,7 +5096,7 @@ struct QuantizationParametersT : public flatbuffers::NativeTable { int32_t quantized_dimension = 0; }; -struct QuantizationParameters FLATBUFFERS_FINAL_CLASS : private flatbuffers::Table { +struct QuantizationParameters FLATBUFFERS_FINAL_CLASS : private ::flatbuffers::Table { typedef QuantizationParametersT NativeTableType; typedef QuantizationParametersBuilder Builder; enum FlatBuffersVTableOffset FLATBUFFERS_VTABLE_UNDERLYING_TYPE { @@ -4225,17 +5108,17 @@ struct QuantizationParameters FLATBUFFERS_FINAL_CLASS : private flatbuffers::Tab VT_DETAILS = 14, VT_QUANTIZED_DIMENSION = 16 }; - const flatbuffers::Vector *min() const { - return GetPointer *>(VT_MIN); + const ::flatbuffers::Vector *min() const { + return GetPointer *>(VT_MIN); } - const flatbuffers::Vector *max() const { - return GetPointer *>(VT_MAX); + const ::flatbuffers::Vector *max() const { + return GetPointer *>(VT_MAX); } - const flatbuffers::Vector *scale() const { - return GetPointer *>(VT_SCALE); + const ::flatbuffers::Vector *scale() const { + return GetPointer *>(VT_SCALE); } - const flatbuffers::Vector *zero_point() const { - return GetPointer *>(VT_ZERO_POINT); + const ::flatbuffers::Vector *zero_point() const { + return GetPointer *>(VT_ZERO_POINT); } tflite::QuantizationDetails details_type() const { return static_cast(GetField(VT_DETAILS_TYPE, 0)); @@ -4250,7 +5133,7 @@ struct QuantizationParameters FLATBUFFERS_FINAL_CLASS : private flatbuffers::Tab int32_t quantized_dimension() const { return GetField(VT_QUANTIZED_DIMENSION, 0); } - bool Verify(flatbuffers::Verifier &verifier) const { + bool Verify(::flatbuffers::Verifier &verifier) const { return VerifyTableStart(verifier) && VerifyOffset(verifier, VT_MIN) && verifier.VerifyVector(min()) && @@ -4266,9 +5149,9 @@ struct QuantizationParameters FLATBUFFERS_FINAL_CLASS : private flatbuffers::Tab VerifyField(verifier, VT_QUANTIZED_DIMENSION, 4) && verifier.EndTable(); } - QuantizationParametersT *UnPack(const flatbuffers::resolver_function_t *_resolver = nullptr) const; - void UnPackTo(QuantizationParametersT *_o, const flatbuffers::resolver_function_t *_resolver = nullptr) const; - static flatbuffers::Offset Pack(flatbuffers::FlatBufferBuilder &_fbb, const QuantizationParametersT* _o, const flatbuffers::rehasher_function_t *_rehasher = nullptr); + QuantizationParametersT *UnPack(const ::flatbuffers::resolver_function_t *_resolver = nullptr) const; + void UnPackTo(QuantizationParametersT *_o, const ::flatbuffers::resolver_function_t *_resolver = nullptr) const; + static ::flatbuffers::Offset Pack(::flatbuffers::FlatBufferBuilder &_fbb, const QuantizationParametersT* _o, const ::flatbuffers::rehasher_function_t *_rehasher = nullptr); }; template<> inline const tflite::CustomQuantization *QuantizationParameters::details_as() const { @@ -4277,48 +5160,48 @@ template<> inline const tflite::CustomQuantization *QuantizationParameters::deta struct QuantizationParametersBuilder { typedef QuantizationParameters Table; - flatbuffers::FlatBufferBuilder &fbb_; - flatbuffers::uoffset_t start_; - void add_min(flatbuffers::Offset> min) { + ::flatbuffers::FlatBufferBuilder &fbb_; + ::flatbuffers::uoffset_t start_; + void add_min(::flatbuffers::Offset<::flatbuffers::Vector> min) { fbb_.AddOffset(QuantizationParameters::VT_MIN, min); } - void add_max(flatbuffers::Offset> max) { + void add_max(::flatbuffers::Offset<::flatbuffers::Vector> max) { fbb_.AddOffset(QuantizationParameters::VT_MAX, max); } - void add_scale(flatbuffers::Offset> scale) { + void add_scale(::flatbuffers::Offset<::flatbuffers::Vector> scale) { fbb_.AddOffset(QuantizationParameters::VT_SCALE, scale); } - void add_zero_point(flatbuffers::Offset> zero_point) { + void add_zero_point(::flatbuffers::Offset<::flatbuffers::Vector> zero_point) { fbb_.AddOffset(QuantizationParameters::VT_ZERO_POINT, zero_point); } void add_details_type(tflite::QuantizationDetails details_type) { fbb_.AddElement(QuantizationParameters::VT_DETAILS_TYPE, static_cast(details_type), 0); } - void add_details(flatbuffers::Offset details) { + void add_details(::flatbuffers::Offset details) { fbb_.AddOffset(QuantizationParameters::VT_DETAILS, details); } void add_quantized_dimension(int32_t quantized_dimension) { fbb_.AddElement(QuantizationParameters::VT_QUANTIZED_DIMENSION, quantized_dimension, 0); } - explicit QuantizationParametersBuilder(flatbuffers::FlatBufferBuilder &_fbb) + explicit QuantizationParametersBuilder(::flatbuffers::FlatBufferBuilder &_fbb) : fbb_(_fbb) { start_ = fbb_.StartTable(); } - flatbuffers::Offset Finish() { + ::flatbuffers::Offset Finish() { const auto end = fbb_.EndTable(start_); - auto o = flatbuffers::Offset(end); + auto o = ::flatbuffers::Offset(end); return o; } }; -inline flatbuffers::Offset CreateQuantizationParameters( - flatbuffers::FlatBufferBuilder &_fbb, - flatbuffers::Offset> min = 0, - flatbuffers::Offset> max = 0, - flatbuffers::Offset> scale = 0, - flatbuffers::Offset> zero_point = 0, +inline ::flatbuffers::Offset CreateQuantizationParameters( + ::flatbuffers::FlatBufferBuilder &_fbb, + ::flatbuffers::Offset<::flatbuffers::Vector> min = 0, + ::flatbuffers::Offset<::flatbuffers::Vector> max = 0, + ::flatbuffers::Offset<::flatbuffers::Vector> scale = 0, + ::flatbuffers::Offset<::flatbuffers::Vector> zero_point = 0, tflite::QuantizationDetails details_type = tflite::QuantizationDetails_NONE, - flatbuffers::Offset details = 0, + ::flatbuffers::Offset details = 0, int32_t quantized_dimension = 0) { QuantizationParametersBuilder builder_(_fbb); builder_.add_quantized_dimension(quantized_dimension); @@ -4331,14 +5214,14 @@ inline flatbuffers::Offset CreateQuantizationParameters( return builder_.Finish(); } -inline flatbuffers::Offset CreateQuantizationParametersDirect( - flatbuffers::FlatBufferBuilder &_fbb, +inline ::flatbuffers::Offset CreateQuantizationParametersDirect( + ::flatbuffers::FlatBufferBuilder &_fbb, const std::vector *min = nullptr, const std::vector *max = nullptr, const std::vector *scale = nullptr, const std::vector *zero_point = nullptr, tflite::QuantizationDetails details_type = tflite::QuantizationDetails_NONE, - flatbuffers::Offset details = 0, + ::flatbuffers::Offset details = 0, int32_t quantized_dimension = 0) { auto min__ = min ? _fbb.CreateVector(*min) : 0; auto max__ = max ? _fbb.CreateVector(*max) : 0; @@ -4355,61 +5238,61 @@ inline flatbuffers::Offset CreateQuantizationParametersD quantized_dimension); } -flatbuffers::Offset CreateQuantizationParameters(flatbuffers::FlatBufferBuilder &_fbb, const QuantizationParametersT *_o, const flatbuffers::rehasher_function_t *_rehasher = nullptr); +::flatbuffers::Offset CreateQuantizationParameters(::flatbuffers::FlatBufferBuilder &_fbb, const QuantizationParametersT *_o, const ::flatbuffers::rehasher_function_t *_rehasher = nullptr); -struct Int32VectorT : public flatbuffers::NativeTable { +struct Int32VectorT : public ::flatbuffers::NativeTable { typedef Int32Vector TableType; std::vector values{}; }; -struct Int32Vector FLATBUFFERS_FINAL_CLASS : private flatbuffers::Table { +struct Int32Vector FLATBUFFERS_FINAL_CLASS : private ::flatbuffers::Table { typedef Int32VectorT NativeTableType; typedef Int32VectorBuilder Builder; enum FlatBuffersVTableOffset FLATBUFFERS_VTABLE_UNDERLYING_TYPE { VT_VALUES = 4 }; - const flatbuffers::Vector *values() const { - return GetPointer *>(VT_VALUES); + const ::flatbuffers::Vector *values() const { + return GetPointer *>(VT_VALUES); } - bool Verify(flatbuffers::Verifier &verifier) const { + bool Verify(::flatbuffers::Verifier &verifier) const { return VerifyTableStart(verifier) && VerifyOffset(verifier, VT_VALUES) && verifier.VerifyVector(values()) && verifier.EndTable(); } - Int32VectorT *UnPack(const flatbuffers::resolver_function_t *_resolver = nullptr) const; - void UnPackTo(Int32VectorT *_o, const flatbuffers::resolver_function_t *_resolver = nullptr) const; - static flatbuffers::Offset Pack(flatbuffers::FlatBufferBuilder &_fbb, const Int32VectorT* _o, const flatbuffers::rehasher_function_t *_rehasher = nullptr); + Int32VectorT *UnPack(const ::flatbuffers::resolver_function_t *_resolver = nullptr) const; + void UnPackTo(Int32VectorT *_o, const ::flatbuffers::resolver_function_t *_resolver = nullptr) const; + static ::flatbuffers::Offset Pack(::flatbuffers::FlatBufferBuilder &_fbb, const Int32VectorT* _o, const ::flatbuffers::rehasher_function_t *_rehasher = nullptr); }; struct Int32VectorBuilder { typedef Int32Vector Table; - flatbuffers::FlatBufferBuilder &fbb_; - flatbuffers::uoffset_t start_; - void add_values(flatbuffers::Offset> values) { + ::flatbuffers::FlatBufferBuilder &fbb_; + ::flatbuffers::uoffset_t start_; + void add_values(::flatbuffers::Offset<::flatbuffers::Vector> values) { fbb_.AddOffset(Int32Vector::VT_VALUES, values); } - explicit Int32VectorBuilder(flatbuffers::FlatBufferBuilder &_fbb) + explicit Int32VectorBuilder(::flatbuffers::FlatBufferBuilder &_fbb) : fbb_(_fbb) { start_ = fbb_.StartTable(); } - flatbuffers::Offset Finish() { + ::flatbuffers::Offset Finish() { const auto end = fbb_.EndTable(start_); - auto o = flatbuffers::Offset(end); + auto o = ::flatbuffers::Offset(end); return o; } }; -inline flatbuffers::Offset CreateInt32Vector( - flatbuffers::FlatBufferBuilder &_fbb, - flatbuffers::Offset> values = 0) { +inline ::flatbuffers::Offset CreateInt32Vector( + ::flatbuffers::FlatBufferBuilder &_fbb, + ::flatbuffers::Offset<::flatbuffers::Vector> values = 0) { Int32VectorBuilder builder_(_fbb); builder_.add_values(values); return builder_.Finish(); } -inline flatbuffers::Offset CreateInt32VectorDirect( - flatbuffers::FlatBufferBuilder &_fbb, +inline ::flatbuffers::Offset CreateInt32VectorDirect( + ::flatbuffers::FlatBufferBuilder &_fbb, const std::vector *values = nullptr) { auto values__ = values ? _fbb.CreateVector(*values) : 0; return tflite::CreateInt32Vector( @@ -4417,61 +5300,61 @@ inline flatbuffers::Offset CreateInt32VectorDirect( values__); } -flatbuffers::Offset CreateInt32Vector(flatbuffers::FlatBufferBuilder &_fbb, const Int32VectorT *_o, const flatbuffers::rehasher_function_t *_rehasher = nullptr); +::flatbuffers::Offset CreateInt32Vector(::flatbuffers::FlatBufferBuilder &_fbb, const Int32VectorT *_o, const ::flatbuffers::rehasher_function_t *_rehasher = nullptr); -struct Uint16VectorT : public flatbuffers::NativeTable { +struct Uint16VectorT : public ::flatbuffers::NativeTable { typedef Uint16Vector TableType; std::vector values{}; }; -struct Uint16Vector FLATBUFFERS_FINAL_CLASS : private flatbuffers::Table { +struct Uint16Vector FLATBUFFERS_FINAL_CLASS : private ::flatbuffers::Table { typedef Uint16VectorT NativeTableType; typedef Uint16VectorBuilder Builder; enum FlatBuffersVTableOffset FLATBUFFERS_VTABLE_UNDERLYING_TYPE { VT_VALUES = 4 }; - const flatbuffers::Vector *values() const { - return GetPointer *>(VT_VALUES); + const ::flatbuffers::Vector *values() const { + return GetPointer *>(VT_VALUES); } - bool Verify(flatbuffers::Verifier &verifier) const { + bool Verify(::flatbuffers::Verifier &verifier) const { return VerifyTableStart(verifier) && VerifyOffset(verifier, VT_VALUES) && verifier.VerifyVector(values()) && verifier.EndTable(); } - Uint16VectorT *UnPack(const flatbuffers::resolver_function_t *_resolver = nullptr) const; - void UnPackTo(Uint16VectorT *_o, const flatbuffers::resolver_function_t *_resolver = nullptr) const; - static flatbuffers::Offset Pack(flatbuffers::FlatBufferBuilder &_fbb, const Uint16VectorT* _o, const flatbuffers::rehasher_function_t *_rehasher = nullptr); + Uint16VectorT *UnPack(const ::flatbuffers::resolver_function_t *_resolver = nullptr) const; + void UnPackTo(Uint16VectorT *_o, const ::flatbuffers::resolver_function_t *_resolver = nullptr) const; + static ::flatbuffers::Offset Pack(::flatbuffers::FlatBufferBuilder &_fbb, const Uint16VectorT* _o, const ::flatbuffers::rehasher_function_t *_rehasher = nullptr); }; struct Uint16VectorBuilder { typedef Uint16Vector Table; - flatbuffers::FlatBufferBuilder &fbb_; - flatbuffers::uoffset_t start_; - void add_values(flatbuffers::Offset> values) { + ::flatbuffers::FlatBufferBuilder &fbb_; + ::flatbuffers::uoffset_t start_; + void add_values(::flatbuffers::Offset<::flatbuffers::Vector> values) { fbb_.AddOffset(Uint16Vector::VT_VALUES, values); } - explicit Uint16VectorBuilder(flatbuffers::FlatBufferBuilder &_fbb) + explicit Uint16VectorBuilder(::flatbuffers::FlatBufferBuilder &_fbb) : fbb_(_fbb) { start_ = fbb_.StartTable(); } - flatbuffers::Offset Finish() { + ::flatbuffers::Offset Finish() { const auto end = fbb_.EndTable(start_); - auto o = flatbuffers::Offset(end); + auto o = ::flatbuffers::Offset(end); return o; } }; -inline flatbuffers::Offset CreateUint16Vector( - flatbuffers::FlatBufferBuilder &_fbb, - flatbuffers::Offset> values = 0) { +inline ::flatbuffers::Offset CreateUint16Vector( + ::flatbuffers::FlatBufferBuilder &_fbb, + ::flatbuffers::Offset<::flatbuffers::Vector> values = 0) { Uint16VectorBuilder builder_(_fbb); builder_.add_values(values); return builder_.Finish(); } -inline flatbuffers::Offset CreateUint16VectorDirect( - flatbuffers::FlatBufferBuilder &_fbb, +inline ::flatbuffers::Offset CreateUint16VectorDirect( + ::flatbuffers::FlatBufferBuilder &_fbb, const std::vector *values = nullptr) { if (values) { _fbb.ForceVectorAlignment(values->size(), sizeof(uint16_t), 4); } auto values__ = values ? _fbb.CreateVector(*values) : 0; @@ -4480,61 +5363,61 @@ inline flatbuffers::Offset CreateUint16VectorDirect( values__); } -flatbuffers::Offset CreateUint16Vector(flatbuffers::FlatBufferBuilder &_fbb, const Uint16VectorT *_o, const flatbuffers::rehasher_function_t *_rehasher = nullptr); +::flatbuffers::Offset CreateUint16Vector(::flatbuffers::FlatBufferBuilder &_fbb, const Uint16VectorT *_o, const ::flatbuffers::rehasher_function_t *_rehasher = nullptr); -struct Uint8VectorT : public flatbuffers::NativeTable { +struct Uint8VectorT : public ::flatbuffers::NativeTable { typedef Uint8Vector TableType; std::vector values{}; }; -struct Uint8Vector FLATBUFFERS_FINAL_CLASS : private flatbuffers::Table { +struct Uint8Vector FLATBUFFERS_FINAL_CLASS : private ::flatbuffers::Table { typedef Uint8VectorT NativeTableType; typedef Uint8VectorBuilder Builder; enum FlatBuffersVTableOffset FLATBUFFERS_VTABLE_UNDERLYING_TYPE { VT_VALUES = 4 }; - const flatbuffers::Vector *values() const { - return GetPointer *>(VT_VALUES); + const ::flatbuffers::Vector *values() const { + return GetPointer *>(VT_VALUES); } - bool Verify(flatbuffers::Verifier &verifier) const { + bool Verify(::flatbuffers::Verifier &verifier) const { return VerifyTableStart(verifier) && VerifyOffset(verifier, VT_VALUES) && verifier.VerifyVector(values()) && verifier.EndTable(); } - Uint8VectorT *UnPack(const flatbuffers::resolver_function_t *_resolver = nullptr) const; - void UnPackTo(Uint8VectorT *_o, const flatbuffers::resolver_function_t *_resolver = nullptr) const; - static flatbuffers::Offset Pack(flatbuffers::FlatBufferBuilder &_fbb, const Uint8VectorT* _o, const flatbuffers::rehasher_function_t *_rehasher = nullptr); + Uint8VectorT *UnPack(const ::flatbuffers::resolver_function_t *_resolver = nullptr) const; + void UnPackTo(Uint8VectorT *_o, const ::flatbuffers::resolver_function_t *_resolver = nullptr) const; + static ::flatbuffers::Offset Pack(::flatbuffers::FlatBufferBuilder &_fbb, const Uint8VectorT* _o, const ::flatbuffers::rehasher_function_t *_rehasher = nullptr); }; struct Uint8VectorBuilder { typedef Uint8Vector Table; - flatbuffers::FlatBufferBuilder &fbb_; - flatbuffers::uoffset_t start_; - void add_values(flatbuffers::Offset> values) { + ::flatbuffers::FlatBufferBuilder &fbb_; + ::flatbuffers::uoffset_t start_; + void add_values(::flatbuffers::Offset<::flatbuffers::Vector> values) { fbb_.AddOffset(Uint8Vector::VT_VALUES, values); } - explicit Uint8VectorBuilder(flatbuffers::FlatBufferBuilder &_fbb) + explicit Uint8VectorBuilder(::flatbuffers::FlatBufferBuilder &_fbb) : fbb_(_fbb) { start_ = fbb_.StartTable(); } - flatbuffers::Offset Finish() { + ::flatbuffers::Offset Finish() { const auto end = fbb_.EndTable(start_); - auto o = flatbuffers::Offset(end); + auto o = ::flatbuffers::Offset(end); return o; } }; -inline flatbuffers::Offset CreateUint8Vector( - flatbuffers::FlatBufferBuilder &_fbb, - flatbuffers::Offset> values = 0) { +inline ::flatbuffers::Offset CreateUint8Vector( + ::flatbuffers::FlatBufferBuilder &_fbb, + ::flatbuffers::Offset<::flatbuffers::Vector> values = 0) { Uint8VectorBuilder builder_(_fbb); builder_.add_values(values); return builder_.Finish(); } -inline flatbuffers::Offset CreateUint8VectorDirect( - flatbuffers::FlatBufferBuilder &_fbb, +inline ::flatbuffers::Offset CreateUint8VectorDirect( + ::flatbuffers::FlatBufferBuilder &_fbb, const std::vector *values = nullptr) { if (values) { _fbb.ForceVectorAlignment(values->size(), sizeof(uint8_t), 4); } auto values__ = values ? _fbb.CreateVector(*values) : 0; @@ -4543,9 +5426,9 @@ inline flatbuffers::Offset CreateUint8VectorDirect( values__); } -flatbuffers::Offset CreateUint8Vector(flatbuffers::FlatBufferBuilder &_fbb, const Uint8VectorT *_o, const flatbuffers::rehasher_function_t *_rehasher = nullptr); +::flatbuffers::Offset CreateUint8Vector(::flatbuffers::FlatBufferBuilder &_fbb, const Uint8VectorT *_o, const ::flatbuffers::rehasher_function_t *_rehasher = nullptr); -struct DimensionMetadataT : public flatbuffers::NativeTable { +struct DimensionMetadataT : public ::flatbuffers::NativeTable { typedef DimensionMetadata TableType; tflite::DimensionType format = tflite::DimensionType_DENSE; int32_t dense_size = 0; @@ -4553,7 +5436,7 @@ struct DimensionMetadataT : public flatbuffers::NativeTable { tflite::SparseIndexVectorUnion array_indices{}; }; -struct DimensionMetadata FLATBUFFERS_FINAL_CLASS : private flatbuffers::Table { +struct DimensionMetadata FLATBUFFERS_FINAL_CLASS : private ::flatbuffers::Table { typedef DimensionMetadataT NativeTableType; typedef DimensionMetadataBuilder Builder; enum FlatBuffersVTableOffset FLATBUFFERS_VTABLE_UNDERLYING_TYPE { @@ -4602,7 +5485,7 @@ struct DimensionMetadata FLATBUFFERS_FINAL_CLASS : private flatbuffers::Table { const tflite::Uint8Vector *array_indices_as_Uint8Vector() const { return array_indices_type() == tflite::SparseIndexVector_Uint8Vector ? static_cast(array_indices()) : nullptr; } - bool Verify(flatbuffers::Verifier &verifier) const { + bool Verify(::flatbuffers::Verifier &verifier) const { return VerifyTableStart(verifier) && VerifyField(verifier, VT_FORMAT, 1) && VerifyField(verifier, VT_DENSE_SIZE, 4) && @@ -4614,9 +5497,9 @@ struct DimensionMetadata FLATBUFFERS_FINAL_CLASS : private flatbuffers::Table { VerifySparseIndexVector(verifier, array_indices(), array_indices_type()) && verifier.EndTable(); } - DimensionMetadataT *UnPack(const flatbuffers::resolver_function_t *_resolver = nullptr) const; - void UnPackTo(DimensionMetadataT *_o, const flatbuffers::resolver_function_t *_resolver = nullptr) const; - static flatbuffers::Offset Pack(flatbuffers::FlatBufferBuilder &_fbb, const DimensionMetadataT* _o, const flatbuffers::rehasher_function_t *_rehasher = nullptr); + DimensionMetadataT *UnPack(const ::flatbuffers::resolver_function_t *_resolver = nullptr) const; + void UnPackTo(DimensionMetadataT *_o, const ::flatbuffers::resolver_function_t *_resolver = nullptr) const; + static ::flatbuffers::Offset Pack(::flatbuffers::FlatBufferBuilder &_fbb, const DimensionMetadataT* _o, const ::flatbuffers::rehasher_function_t *_rehasher = nullptr); }; template<> inline const tflite::Int32Vector *DimensionMetadata::array_segments_as() const { @@ -4645,8 +5528,8 @@ template<> inline const tflite::Uint8Vector *DimensionMetadata::array_indices_as struct DimensionMetadataBuilder { typedef DimensionMetadata Table; - flatbuffers::FlatBufferBuilder &fbb_; - flatbuffers::uoffset_t start_; + ::flatbuffers::FlatBufferBuilder &fbb_; + ::flatbuffers::uoffset_t start_; void add_format(tflite::DimensionType format) { fbb_.AddElement(DimensionMetadata::VT_FORMAT, static_cast(format), 0); } @@ -4656,34 +5539,34 @@ struct DimensionMetadataBuilder { void add_array_segments_type(tflite::SparseIndexVector array_segments_type) { fbb_.AddElement(DimensionMetadata::VT_ARRAY_SEGMENTS_TYPE, static_cast(array_segments_type), 0); } - void add_array_segments(flatbuffers::Offset array_segments) { + void add_array_segments(::flatbuffers::Offset array_segments) { fbb_.AddOffset(DimensionMetadata::VT_ARRAY_SEGMENTS, array_segments); } void add_array_indices_type(tflite::SparseIndexVector array_indices_type) { fbb_.AddElement(DimensionMetadata::VT_ARRAY_INDICES_TYPE, static_cast(array_indices_type), 0); } - void add_array_indices(flatbuffers::Offset array_indices) { + void add_array_indices(::flatbuffers::Offset array_indices) { fbb_.AddOffset(DimensionMetadata::VT_ARRAY_INDICES, array_indices); } - explicit DimensionMetadataBuilder(flatbuffers::FlatBufferBuilder &_fbb) + explicit DimensionMetadataBuilder(::flatbuffers::FlatBufferBuilder &_fbb) : fbb_(_fbb) { start_ = fbb_.StartTable(); } - flatbuffers::Offset Finish() { + ::flatbuffers::Offset Finish() { const auto end = fbb_.EndTable(start_); - auto o = flatbuffers::Offset(end); + auto o = ::flatbuffers::Offset(end); return o; } }; -inline flatbuffers::Offset CreateDimensionMetadata( - flatbuffers::FlatBufferBuilder &_fbb, +inline ::flatbuffers::Offset CreateDimensionMetadata( + ::flatbuffers::FlatBufferBuilder &_fbb, tflite::DimensionType format = tflite::DimensionType_DENSE, int32_t dense_size = 0, tflite::SparseIndexVector array_segments_type = tflite::SparseIndexVector_NONE, - flatbuffers::Offset array_segments = 0, + ::flatbuffers::Offset array_segments = 0, tflite::SparseIndexVector array_indices_type = tflite::SparseIndexVector_NONE, - flatbuffers::Offset array_indices = 0) { + ::flatbuffers::Offset array_indices = 0) { DimensionMetadataBuilder builder_(_fbb); builder_.add_array_indices(array_indices); builder_.add_array_segments(array_segments); @@ -4694,9 +5577,9 @@ inline flatbuffers::Offset CreateDimensionMetadata( return builder_.Finish(); } -flatbuffers::Offset CreateDimensionMetadata(flatbuffers::FlatBufferBuilder &_fbb, const DimensionMetadataT *_o, const flatbuffers::rehasher_function_t *_rehasher = nullptr); +::flatbuffers::Offset CreateDimensionMetadata(::flatbuffers::FlatBufferBuilder &_fbb, const DimensionMetadataT *_o, const ::flatbuffers::rehasher_function_t *_rehasher = nullptr); -struct SparsityParametersT : public flatbuffers::NativeTable { +struct SparsityParametersT : public ::flatbuffers::NativeTable { typedef SparsityParameters TableType; std::vector traversal_order{}; std::vector block_map{}; @@ -4707,7 +5590,7 @@ struct SparsityParametersT : public flatbuffers::NativeTable { SparsityParametersT &operator=(SparsityParametersT o) FLATBUFFERS_NOEXCEPT; }; -struct SparsityParameters FLATBUFFERS_FINAL_CLASS : private flatbuffers::Table { +struct SparsityParameters FLATBUFFERS_FINAL_CLASS : private ::flatbuffers::Table { typedef SparsityParametersT NativeTableType; typedef SparsityParametersBuilder Builder; enum FlatBuffersVTableOffset FLATBUFFERS_VTABLE_UNDERLYING_TYPE { @@ -4715,16 +5598,16 @@ struct SparsityParameters FLATBUFFERS_FINAL_CLASS : private flatbuffers::Table { VT_BLOCK_MAP = 6, VT_DIM_METADATA = 8 }; - const flatbuffers::Vector *traversal_order() const { - return GetPointer *>(VT_TRAVERSAL_ORDER); + const ::flatbuffers::Vector *traversal_order() const { + return GetPointer *>(VT_TRAVERSAL_ORDER); } - const flatbuffers::Vector *block_map() const { - return GetPointer *>(VT_BLOCK_MAP); + const ::flatbuffers::Vector *block_map() const { + return GetPointer *>(VT_BLOCK_MAP); } - const flatbuffers::Vector> *dim_metadata() const { - return GetPointer> *>(VT_DIM_METADATA); + const ::flatbuffers::Vector<::flatbuffers::Offset> *dim_metadata() const { + return GetPointer> *>(VT_DIM_METADATA); } - bool Verify(flatbuffers::Verifier &verifier) const { + bool Verify(::flatbuffers::Verifier &verifier) const { return VerifyTableStart(verifier) && VerifyOffset(verifier, VT_TRAVERSAL_ORDER) && verifier.VerifyVector(traversal_order()) && @@ -4735,40 +5618,40 @@ struct SparsityParameters FLATBUFFERS_FINAL_CLASS : private flatbuffers::Table { verifier.VerifyVectorOfTables(dim_metadata()) && verifier.EndTable(); } - SparsityParametersT *UnPack(const flatbuffers::resolver_function_t *_resolver = nullptr) const; - void UnPackTo(SparsityParametersT *_o, const flatbuffers::resolver_function_t *_resolver = nullptr) const; - static flatbuffers::Offset Pack(flatbuffers::FlatBufferBuilder &_fbb, const SparsityParametersT* _o, const flatbuffers::rehasher_function_t *_rehasher = nullptr); + SparsityParametersT *UnPack(const ::flatbuffers::resolver_function_t *_resolver = nullptr) const; + void UnPackTo(SparsityParametersT *_o, const ::flatbuffers::resolver_function_t *_resolver = nullptr) const; + static ::flatbuffers::Offset Pack(::flatbuffers::FlatBufferBuilder &_fbb, const SparsityParametersT* _o, const ::flatbuffers::rehasher_function_t *_rehasher = nullptr); }; struct SparsityParametersBuilder { typedef SparsityParameters Table; - flatbuffers::FlatBufferBuilder &fbb_; - flatbuffers::uoffset_t start_; - void add_traversal_order(flatbuffers::Offset> traversal_order) { + ::flatbuffers::FlatBufferBuilder &fbb_; + ::flatbuffers::uoffset_t start_; + void add_traversal_order(::flatbuffers::Offset<::flatbuffers::Vector> traversal_order) { fbb_.AddOffset(SparsityParameters::VT_TRAVERSAL_ORDER, traversal_order); } - void add_block_map(flatbuffers::Offset> block_map) { + void add_block_map(::flatbuffers::Offset<::flatbuffers::Vector> block_map) { fbb_.AddOffset(SparsityParameters::VT_BLOCK_MAP, block_map); } - void add_dim_metadata(flatbuffers::Offset>> dim_metadata) { + void add_dim_metadata(::flatbuffers::Offset<::flatbuffers::Vector<::flatbuffers::Offset>> dim_metadata) { fbb_.AddOffset(SparsityParameters::VT_DIM_METADATA, dim_metadata); } - explicit SparsityParametersBuilder(flatbuffers::FlatBufferBuilder &_fbb) + explicit SparsityParametersBuilder(::flatbuffers::FlatBufferBuilder &_fbb) : fbb_(_fbb) { start_ = fbb_.StartTable(); } - flatbuffers::Offset Finish() { + ::flatbuffers::Offset Finish() { const auto end = fbb_.EndTable(start_); - auto o = flatbuffers::Offset(end); + auto o = ::flatbuffers::Offset(end); return o; } }; -inline flatbuffers::Offset CreateSparsityParameters( - flatbuffers::FlatBufferBuilder &_fbb, - flatbuffers::Offset> traversal_order = 0, - flatbuffers::Offset> block_map = 0, - flatbuffers::Offset>> dim_metadata = 0) { +inline ::flatbuffers::Offset CreateSparsityParameters( + ::flatbuffers::FlatBufferBuilder &_fbb, + ::flatbuffers::Offset<::flatbuffers::Vector> traversal_order = 0, + ::flatbuffers::Offset<::flatbuffers::Vector> block_map = 0, + ::flatbuffers::Offset<::flatbuffers::Vector<::flatbuffers::Offset>> dim_metadata = 0) { SparsityParametersBuilder builder_(_fbb); builder_.add_dim_metadata(dim_metadata); builder_.add_block_map(block_map); @@ -4776,14 +5659,14 @@ inline flatbuffers::Offset CreateSparsityParameters( return builder_.Finish(); } -inline flatbuffers::Offset CreateSparsityParametersDirect( - flatbuffers::FlatBufferBuilder &_fbb, +inline ::flatbuffers::Offset CreateSparsityParametersDirect( + ::flatbuffers::FlatBufferBuilder &_fbb, const std::vector *traversal_order = nullptr, const std::vector *block_map = nullptr, - const std::vector> *dim_metadata = nullptr) { + const std::vector<::flatbuffers::Offset> *dim_metadata = nullptr) { auto traversal_order__ = traversal_order ? _fbb.CreateVector(*traversal_order) : 0; auto block_map__ = block_map ? _fbb.CreateVector(*block_map) : 0; - auto dim_metadata__ = dim_metadata ? _fbb.CreateVector>(*dim_metadata) : 0; + auto dim_metadata__ = dim_metadata ? _fbb.CreateVector<::flatbuffers::Offset>(*dim_metadata) : 0; return tflite::CreateSparsityParameters( _fbb, traversal_order__, @@ -4791,16 +5674,16 @@ inline flatbuffers::Offset CreateSparsityParametersDirect( dim_metadata__); } -flatbuffers::Offset CreateSparsityParameters(flatbuffers::FlatBufferBuilder &_fbb, const SparsityParametersT *_o, const flatbuffers::rehasher_function_t *_rehasher = nullptr); +::flatbuffers::Offset CreateSparsityParameters(::flatbuffers::FlatBufferBuilder &_fbb, const SparsityParametersT *_o, const ::flatbuffers::rehasher_function_t *_rehasher = nullptr); -struct VariantSubTypeT : public flatbuffers::NativeTable { +struct VariantSubTypeT : public ::flatbuffers::NativeTable { typedef VariantSubType TableType; std::vector shape{}; tflite::TensorType type = tflite::TensorType_FLOAT32; bool has_rank = false; }; -struct VariantSubType FLATBUFFERS_FINAL_CLASS : private flatbuffers::Table { +struct VariantSubType FLATBUFFERS_FINAL_CLASS : private ::flatbuffers::Table { typedef VariantSubTypeT NativeTableType; typedef VariantSubTypeBuilder Builder; enum FlatBuffersVTableOffset FLATBUFFERS_VTABLE_UNDERLYING_TYPE { @@ -4808,8 +5691,8 @@ struct VariantSubType FLATBUFFERS_FINAL_CLASS : private flatbuffers::Table { VT_TYPE = 6, VT_HAS_RANK = 8 }; - const flatbuffers::Vector *shape() const { - return GetPointer *>(VT_SHAPE); + const ::flatbuffers::Vector *shape() const { + return GetPointer *>(VT_SHAPE); } tflite::TensorType type() const { return static_cast(GetField(VT_TYPE, 0)); @@ -4817,7 +5700,7 @@ struct VariantSubType FLATBUFFERS_FINAL_CLASS : private flatbuffers::Table { bool has_rank() const { return GetField(VT_HAS_RANK, 0) != 0; } - bool Verify(flatbuffers::Verifier &verifier) const { + bool Verify(::flatbuffers::Verifier &verifier) const { return VerifyTableStart(verifier) && VerifyOffset(verifier, VT_SHAPE) && verifier.VerifyVector(shape()) && @@ -4825,16 +5708,16 @@ struct VariantSubType FLATBUFFERS_FINAL_CLASS : private flatbuffers::Table { VerifyField(verifier, VT_HAS_RANK, 1) && verifier.EndTable(); } - VariantSubTypeT *UnPack(const flatbuffers::resolver_function_t *_resolver = nullptr) const; - void UnPackTo(VariantSubTypeT *_o, const flatbuffers::resolver_function_t *_resolver = nullptr) const; - static flatbuffers::Offset Pack(flatbuffers::FlatBufferBuilder &_fbb, const VariantSubTypeT* _o, const flatbuffers::rehasher_function_t *_rehasher = nullptr); + VariantSubTypeT *UnPack(const ::flatbuffers::resolver_function_t *_resolver = nullptr) const; + void UnPackTo(VariantSubTypeT *_o, const ::flatbuffers::resolver_function_t *_resolver = nullptr) const; + static ::flatbuffers::Offset Pack(::flatbuffers::FlatBufferBuilder &_fbb, const VariantSubTypeT* _o, const ::flatbuffers::rehasher_function_t *_rehasher = nullptr); }; struct VariantSubTypeBuilder { typedef VariantSubType Table; - flatbuffers::FlatBufferBuilder &fbb_; - flatbuffers::uoffset_t start_; - void add_shape(flatbuffers::Offset> shape) { + ::flatbuffers::FlatBufferBuilder &fbb_; + ::flatbuffers::uoffset_t start_; + void add_shape(::flatbuffers::Offset<::flatbuffers::Vector> shape) { fbb_.AddOffset(VariantSubType::VT_SHAPE, shape); } void add_type(tflite::TensorType type) { @@ -4843,20 +5726,20 @@ struct VariantSubTypeBuilder { void add_has_rank(bool has_rank) { fbb_.AddElement(VariantSubType::VT_HAS_RANK, static_cast(has_rank), 0); } - explicit VariantSubTypeBuilder(flatbuffers::FlatBufferBuilder &_fbb) + explicit VariantSubTypeBuilder(::flatbuffers::FlatBufferBuilder &_fbb) : fbb_(_fbb) { start_ = fbb_.StartTable(); } - flatbuffers::Offset Finish() { + ::flatbuffers::Offset Finish() { const auto end = fbb_.EndTable(start_); - auto o = flatbuffers::Offset(end); + auto o = ::flatbuffers::Offset(end); return o; } }; -inline flatbuffers::Offset CreateVariantSubType( - flatbuffers::FlatBufferBuilder &_fbb, - flatbuffers::Offset> shape = 0, +inline ::flatbuffers::Offset CreateVariantSubType( + ::flatbuffers::FlatBufferBuilder &_fbb, + ::flatbuffers::Offset<::flatbuffers::Vector> shape = 0, tflite::TensorType type = tflite::TensorType_FLOAT32, bool has_rank = false) { VariantSubTypeBuilder builder_(_fbb); @@ -4866,8 +5749,8 @@ inline flatbuffers::Offset CreateVariantSubType( return builder_.Finish(); } -inline flatbuffers::Offset CreateVariantSubTypeDirect( - flatbuffers::FlatBufferBuilder &_fbb, +inline ::flatbuffers::Offset CreateVariantSubTypeDirect( + ::flatbuffers::FlatBufferBuilder &_fbb, const std::vector *shape = nullptr, tflite::TensorType type = tflite::TensorType_FLOAT32, bool has_rank = false) { @@ -4879,9 +5762,9 @@ inline flatbuffers::Offset CreateVariantSubTypeDirect( has_rank); } -flatbuffers::Offset CreateVariantSubType(flatbuffers::FlatBufferBuilder &_fbb, const VariantSubTypeT *_o, const flatbuffers::rehasher_function_t *_rehasher = nullptr); +::flatbuffers::Offset CreateVariantSubType(::flatbuffers::FlatBufferBuilder &_fbb, const VariantSubTypeT *_o, const ::flatbuffers::rehasher_function_t *_rehasher = nullptr); -struct TensorT : public flatbuffers::NativeTable { +struct TensorT : public ::flatbuffers::NativeTable { typedef Tensor TableType; std::vector shape{}; tflite::TensorType type = tflite::TensorType_FLOAT32; @@ -4899,7 +5782,7 @@ struct TensorT : public flatbuffers::NativeTable { TensorT &operator=(TensorT o) FLATBUFFERS_NOEXCEPT; }; -struct Tensor FLATBUFFERS_FINAL_CLASS : private flatbuffers::Table { +struct Tensor FLATBUFFERS_FINAL_CLASS : private ::flatbuffers::Table { typedef TensorT NativeTableType; typedef TensorBuilder Builder; enum FlatBuffersVTableOffset FLATBUFFERS_VTABLE_UNDERLYING_TYPE { @@ -4914,8 +5797,8 @@ struct Tensor FLATBUFFERS_FINAL_CLASS : private flatbuffers::Table { VT_HAS_RANK = 20, VT_VARIANT_TENSORS = 22 }; - const flatbuffers::Vector *shape() const { - return GetPointer *>(VT_SHAPE); + const ::flatbuffers::Vector *shape() const { + return GetPointer *>(VT_SHAPE); } tflite::TensorType type() const { return static_cast(GetField(VT_TYPE, 0)); @@ -4923,8 +5806,8 @@ struct Tensor FLATBUFFERS_FINAL_CLASS : private flatbuffers::Table { uint32_t buffer() const { return GetField(VT_BUFFER, 0); } - const flatbuffers::String *name() const { - return GetPointer(VT_NAME); + const ::flatbuffers::String *name() const { + return GetPointer(VT_NAME); } const tflite::QuantizationParameters *quantization() const { return GetPointer(VT_QUANTIZATION); @@ -4935,16 +5818,16 @@ struct Tensor FLATBUFFERS_FINAL_CLASS : private flatbuffers::Table { const tflite::SparsityParameters *sparsity() const { return GetPointer(VT_SPARSITY); } - const flatbuffers::Vector *shape_signature() const { - return GetPointer *>(VT_SHAPE_SIGNATURE); + const ::flatbuffers::Vector *shape_signature() const { + return GetPointer *>(VT_SHAPE_SIGNATURE); } bool has_rank() const { return GetField(VT_HAS_RANK, 0) != 0; } - const flatbuffers::Vector> *variant_tensors() const { - return GetPointer> *>(VT_VARIANT_TENSORS); + const ::flatbuffers::Vector<::flatbuffers::Offset> *variant_tensors() const { + return GetPointer> *>(VT_VARIANT_TENSORS); } - bool Verify(flatbuffers::Verifier &verifier) const { + bool Verify(::flatbuffers::Verifier &verifier) const { return VerifyTableStart(verifier) && VerifyOffset(verifier, VT_SHAPE) && verifier.VerifyVector(shape()) && @@ -4965,16 +5848,16 @@ struct Tensor FLATBUFFERS_FINAL_CLASS : private flatbuffers::Table { verifier.VerifyVectorOfTables(variant_tensors()) && verifier.EndTable(); } - TensorT *UnPack(const flatbuffers::resolver_function_t *_resolver = nullptr) const; - void UnPackTo(TensorT *_o, const flatbuffers::resolver_function_t *_resolver = nullptr) const; - static flatbuffers::Offset Pack(flatbuffers::FlatBufferBuilder &_fbb, const TensorT* _o, const flatbuffers::rehasher_function_t *_rehasher = nullptr); + TensorT *UnPack(const ::flatbuffers::resolver_function_t *_resolver = nullptr) const; + void UnPackTo(TensorT *_o, const ::flatbuffers::resolver_function_t *_resolver = nullptr) const; + static ::flatbuffers::Offset Pack(::flatbuffers::FlatBufferBuilder &_fbb, const TensorT* _o, const ::flatbuffers::rehasher_function_t *_rehasher = nullptr); }; struct TensorBuilder { typedef Tensor Table; - flatbuffers::FlatBufferBuilder &fbb_; - flatbuffers::uoffset_t start_; - void add_shape(flatbuffers::Offset> shape) { + ::flatbuffers::FlatBufferBuilder &fbb_; + ::flatbuffers::uoffset_t start_; + void add_shape(::flatbuffers::Offset<::flatbuffers::Vector> shape) { fbb_.AddOffset(Tensor::VT_SHAPE, shape); } void add_type(tflite::TensorType type) { @@ -4983,50 +5866,50 @@ struct TensorBuilder { void add_buffer(uint32_t buffer) { fbb_.AddElement(Tensor::VT_BUFFER, buffer, 0); } - void add_name(flatbuffers::Offset name) { + void add_name(::flatbuffers::Offset<::flatbuffers::String> name) { fbb_.AddOffset(Tensor::VT_NAME, name); } - void add_quantization(flatbuffers::Offset quantization) { + void add_quantization(::flatbuffers::Offset quantization) { fbb_.AddOffset(Tensor::VT_QUANTIZATION, quantization); } void add_is_variable(bool is_variable) { fbb_.AddElement(Tensor::VT_IS_VARIABLE, static_cast(is_variable), 0); } - void add_sparsity(flatbuffers::Offset sparsity) { + void add_sparsity(::flatbuffers::Offset sparsity) { fbb_.AddOffset(Tensor::VT_SPARSITY, sparsity); } - void add_shape_signature(flatbuffers::Offset> shape_signature) { + void add_shape_signature(::flatbuffers::Offset<::flatbuffers::Vector> shape_signature) { fbb_.AddOffset(Tensor::VT_SHAPE_SIGNATURE, shape_signature); } void add_has_rank(bool has_rank) { fbb_.AddElement(Tensor::VT_HAS_RANK, static_cast(has_rank), 0); } - void add_variant_tensors(flatbuffers::Offset>> variant_tensors) { + void add_variant_tensors(::flatbuffers::Offset<::flatbuffers::Vector<::flatbuffers::Offset>> variant_tensors) { fbb_.AddOffset(Tensor::VT_VARIANT_TENSORS, variant_tensors); } - explicit TensorBuilder(flatbuffers::FlatBufferBuilder &_fbb) + explicit TensorBuilder(::flatbuffers::FlatBufferBuilder &_fbb) : fbb_(_fbb) { start_ = fbb_.StartTable(); } - flatbuffers::Offset Finish() { + ::flatbuffers::Offset Finish() { const auto end = fbb_.EndTable(start_); - auto o = flatbuffers::Offset(end); + auto o = ::flatbuffers::Offset(end); return o; } }; -inline flatbuffers::Offset CreateTensor( - flatbuffers::FlatBufferBuilder &_fbb, - flatbuffers::Offset> shape = 0, +inline ::flatbuffers::Offset CreateTensor( + ::flatbuffers::FlatBufferBuilder &_fbb, + ::flatbuffers::Offset<::flatbuffers::Vector> shape = 0, tflite::TensorType type = tflite::TensorType_FLOAT32, uint32_t buffer = 0, - flatbuffers::Offset name = 0, - flatbuffers::Offset quantization = 0, + ::flatbuffers::Offset<::flatbuffers::String> name = 0, + ::flatbuffers::Offset quantization = 0, bool is_variable = false, - flatbuffers::Offset sparsity = 0, - flatbuffers::Offset> shape_signature = 0, + ::flatbuffers::Offset sparsity = 0, + ::flatbuffers::Offset<::flatbuffers::Vector> shape_signature = 0, bool has_rank = false, - flatbuffers::Offset>> variant_tensors = 0) { + ::flatbuffers::Offset<::flatbuffers::Vector<::flatbuffers::Offset>> variant_tensors = 0) { TensorBuilder builder_(_fbb); builder_.add_variant_tensors(variant_tensors); builder_.add_shape_signature(shape_signature); @@ -5041,22 +5924,22 @@ inline flatbuffers::Offset CreateTensor( return builder_.Finish(); } -inline flatbuffers::Offset CreateTensorDirect( - flatbuffers::FlatBufferBuilder &_fbb, +inline ::flatbuffers::Offset CreateTensorDirect( + ::flatbuffers::FlatBufferBuilder &_fbb, const std::vector *shape = nullptr, tflite::TensorType type = tflite::TensorType_FLOAT32, uint32_t buffer = 0, const char *name = nullptr, - flatbuffers::Offset quantization = 0, + ::flatbuffers::Offset quantization = 0, bool is_variable = false, - flatbuffers::Offset sparsity = 0, + ::flatbuffers::Offset sparsity = 0, const std::vector *shape_signature = nullptr, bool has_rank = false, - const std::vector> *variant_tensors = nullptr) { + const std::vector<::flatbuffers::Offset> *variant_tensors = nullptr) { auto shape__ = shape ? _fbb.CreateVector(*shape) : 0; auto name__ = name ? _fbb.CreateString(name) : 0; auto shape_signature__ = shape_signature ? _fbb.CreateVector(*shape_signature) : 0; - auto variant_tensors__ = variant_tensors ? _fbb.CreateVector>(*variant_tensors) : 0; + auto variant_tensors__ = variant_tensors ? _fbb.CreateVector<::flatbuffers::Offset>(*variant_tensors) : 0; return tflite::CreateTensor( _fbb, shape__, @@ -5071,2252 +5954,2892 @@ inline flatbuffers::Offset CreateTensorDirect( variant_tensors__); } -flatbuffers::Offset CreateTensor(flatbuffers::FlatBufferBuilder &_fbb, const TensorT *_o, const flatbuffers::rehasher_function_t *_rehasher = nullptr); +::flatbuffers::Offset CreateTensor(::flatbuffers::FlatBufferBuilder &_fbb, const TensorT *_o, const ::flatbuffers::rehasher_function_t *_rehasher = nullptr); -struct Conv2DOptionsT : public flatbuffers::NativeTable { - typedef Conv2DOptions TableType; - tflite::Padding padding = tflite::Padding_SAME; - int32_t stride_w = 0; - int32_t stride_h = 0; - tflite::ActivationFunctionType fused_activation_function = tflite::ActivationFunctionType_NONE; - int32_t dilation_w_factor = 1; - int32_t dilation_h_factor = 1; +struct StablehloGatherOptionsT : public ::flatbuffers::NativeTable { + typedef StablehloGatherOptions TableType; + std::vector offset_dims{}; + std::vector collapsed_slice_dims{}; + std::vector start_index_map{}; + int64_t index_vector_dim = 0; + std::vector slice_sizes{}; + bool indices_are_sorted = false; }; -struct Conv2DOptions FLATBUFFERS_FINAL_CLASS : private flatbuffers::Table { - typedef Conv2DOptionsT NativeTableType; - typedef Conv2DOptionsBuilder Builder; +struct StablehloGatherOptions FLATBUFFERS_FINAL_CLASS : private ::flatbuffers::Table { + typedef StablehloGatherOptionsT NativeTableType; + typedef StablehloGatherOptionsBuilder Builder; enum FlatBuffersVTableOffset FLATBUFFERS_VTABLE_UNDERLYING_TYPE { - VT_PADDING = 4, - VT_STRIDE_W = 6, - VT_STRIDE_H = 8, - VT_FUSED_ACTIVATION_FUNCTION = 10, - VT_DILATION_W_FACTOR = 12, - VT_DILATION_H_FACTOR = 14 + VT_OFFSET_DIMS = 4, + VT_COLLAPSED_SLICE_DIMS = 6, + VT_START_INDEX_MAP = 8, + VT_INDEX_VECTOR_DIM = 10, + VT_SLICE_SIZES = 12, + VT_INDICES_ARE_SORTED = 14 }; - tflite::Padding padding() const { - return static_cast(GetField(VT_PADDING, 0)); + const ::flatbuffers::Vector *offset_dims() const { + return GetPointer *>(VT_OFFSET_DIMS); } - int32_t stride_w() const { - return GetField(VT_STRIDE_W, 0); + const ::flatbuffers::Vector *collapsed_slice_dims() const { + return GetPointer *>(VT_COLLAPSED_SLICE_DIMS); } - int32_t stride_h() const { - return GetField(VT_STRIDE_H, 0); + const ::flatbuffers::Vector *start_index_map() const { + return GetPointer *>(VT_START_INDEX_MAP); } - tflite::ActivationFunctionType fused_activation_function() const { - return static_cast(GetField(VT_FUSED_ACTIVATION_FUNCTION, 0)); + int64_t index_vector_dim() const { + return GetField(VT_INDEX_VECTOR_DIM, 0); } - int32_t dilation_w_factor() const { - return GetField(VT_DILATION_W_FACTOR, 1); + const ::flatbuffers::Vector *slice_sizes() const { + return GetPointer *>(VT_SLICE_SIZES); } - int32_t dilation_h_factor() const { - return GetField(VT_DILATION_H_FACTOR, 1); + bool indices_are_sorted() const { + return GetField(VT_INDICES_ARE_SORTED, 0) != 0; } - bool Verify(flatbuffers::Verifier &verifier) const { + bool Verify(::flatbuffers::Verifier &verifier) const { return VerifyTableStart(verifier) && - VerifyField(verifier, VT_PADDING, 1) && - VerifyField(verifier, VT_STRIDE_W, 4) && - VerifyField(verifier, VT_STRIDE_H, 4) && - VerifyField(verifier, VT_FUSED_ACTIVATION_FUNCTION, 1) && - VerifyField(verifier, VT_DILATION_W_FACTOR, 4) && - VerifyField(verifier, VT_DILATION_H_FACTOR, 4) && + VerifyOffset(verifier, VT_OFFSET_DIMS) && + verifier.VerifyVector(offset_dims()) && + VerifyOffset(verifier, VT_COLLAPSED_SLICE_DIMS) && + verifier.VerifyVector(collapsed_slice_dims()) && + VerifyOffset(verifier, VT_START_INDEX_MAP) && + verifier.VerifyVector(start_index_map()) && + VerifyField(verifier, VT_INDEX_VECTOR_DIM, 8) && + VerifyOffset(verifier, VT_SLICE_SIZES) && + verifier.VerifyVector(slice_sizes()) && + VerifyField(verifier, VT_INDICES_ARE_SORTED, 1) && verifier.EndTable(); } - Conv2DOptionsT *UnPack(const flatbuffers::resolver_function_t *_resolver = nullptr) const; - void UnPackTo(Conv2DOptionsT *_o, const flatbuffers::resolver_function_t *_resolver = nullptr) const; - static flatbuffers::Offset Pack(flatbuffers::FlatBufferBuilder &_fbb, const Conv2DOptionsT* _o, const flatbuffers::rehasher_function_t *_rehasher = nullptr); + StablehloGatherOptionsT *UnPack(const ::flatbuffers::resolver_function_t *_resolver = nullptr) const; + void UnPackTo(StablehloGatherOptionsT *_o, const ::flatbuffers::resolver_function_t *_resolver = nullptr) const; + static ::flatbuffers::Offset Pack(::flatbuffers::FlatBufferBuilder &_fbb, const StablehloGatherOptionsT* _o, const ::flatbuffers::rehasher_function_t *_rehasher = nullptr); }; -struct Conv2DOptionsBuilder { - typedef Conv2DOptions Table; - flatbuffers::FlatBufferBuilder &fbb_; - flatbuffers::uoffset_t start_; - void add_padding(tflite::Padding padding) { - fbb_.AddElement(Conv2DOptions::VT_PADDING, static_cast(padding), 0); +struct StablehloGatherOptionsBuilder { + typedef StablehloGatherOptions Table; + ::flatbuffers::FlatBufferBuilder &fbb_; + ::flatbuffers::uoffset_t start_; + void add_offset_dims(::flatbuffers::Offset<::flatbuffers::Vector> offset_dims) { + fbb_.AddOffset(StablehloGatherOptions::VT_OFFSET_DIMS, offset_dims); } - void add_stride_w(int32_t stride_w) { - fbb_.AddElement(Conv2DOptions::VT_STRIDE_W, stride_w, 0); + void add_collapsed_slice_dims(::flatbuffers::Offset<::flatbuffers::Vector> collapsed_slice_dims) { + fbb_.AddOffset(StablehloGatherOptions::VT_COLLAPSED_SLICE_DIMS, collapsed_slice_dims); } - void add_stride_h(int32_t stride_h) { - fbb_.AddElement(Conv2DOptions::VT_STRIDE_H, stride_h, 0); + void add_start_index_map(::flatbuffers::Offset<::flatbuffers::Vector> start_index_map) { + fbb_.AddOffset(StablehloGatherOptions::VT_START_INDEX_MAP, start_index_map); } - void add_fused_activation_function(tflite::ActivationFunctionType fused_activation_function) { - fbb_.AddElement(Conv2DOptions::VT_FUSED_ACTIVATION_FUNCTION, static_cast(fused_activation_function), 0); + void add_index_vector_dim(int64_t index_vector_dim) { + fbb_.AddElement(StablehloGatherOptions::VT_INDEX_VECTOR_DIM, index_vector_dim, 0); } - void add_dilation_w_factor(int32_t dilation_w_factor) { - fbb_.AddElement(Conv2DOptions::VT_DILATION_W_FACTOR, dilation_w_factor, 1); + void add_slice_sizes(::flatbuffers::Offset<::flatbuffers::Vector> slice_sizes) { + fbb_.AddOffset(StablehloGatherOptions::VT_SLICE_SIZES, slice_sizes); } - void add_dilation_h_factor(int32_t dilation_h_factor) { - fbb_.AddElement(Conv2DOptions::VT_DILATION_H_FACTOR, dilation_h_factor, 1); + void add_indices_are_sorted(bool indices_are_sorted) { + fbb_.AddElement(StablehloGatherOptions::VT_INDICES_ARE_SORTED, static_cast(indices_are_sorted), 0); } - explicit Conv2DOptionsBuilder(flatbuffers::FlatBufferBuilder &_fbb) + explicit StablehloGatherOptionsBuilder(::flatbuffers::FlatBufferBuilder &_fbb) : fbb_(_fbb) { start_ = fbb_.StartTable(); } - flatbuffers::Offset Finish() { + ::flatbuffers::Offset Finish() { const auto end = fbb_.EndTable(start_); - auto o = flatbuffers::Offset(end); + auto o = ::flatbuffers::Offset(end); return o; } }; -inline flatbuffers::Offset CreateConv2DOptions( - flatbuffers::FlatBufferBuilder &_fbb, - tflite::Padding padding = tflite::Padding_SAME, - int32_t stride_w = 0, - int32_t stride_h = 0, - tflite::ActivationFunctionType fused_activation_function = tflite::ActivationFunctionType_NONE, - int32_t dilation_w_factor = 1, - int32_t dilation_h_factor = 1) { - Conv2DOptionsBuilder builder_(_fbb); - builder_.add_dilation_h_factor(dilation_h_factor); - builder_.add_dilation_w_factor(dilation_w_factor); - builder_.add_stride_h(stride_h); - builder_.add_stride_w(stride_w); - builder_.add_fused_activation_function(fused_activation_function); - builder_.add_padding(padding); +inline ::flatbuffers::Offset CreateStablehloGatherOptions( + ::flatbuffers::FlatBufferBuilder &_fbb, + ::flatbuffers::Offset<::flatbuffers::Vector> offset_dims = 0, + ::flatbuffers::Offset<::flatbuffers::Vector> collapsed_slice_dims = 0, + ::flatbuffers::Offset<::flatbuffers::Vector> start_index_map = 0, + int64_t index_vector_dim = 0, + ::flatbuffers::Offset<::flatbuffers::Vector> slice_sizes = 0, + bool indices_are_sorted = false) { + StablehloGatherOptionsBuilder builder_(_fbb); + builder_.add_index_vector_dim(index_vector_dim); + builder_.add_slice_sizes(slice_sizes); + builder_.add_start_index_map(start_index_map); + builder_.add_collapsed_slice_dims(collapsed_slice_dims); + builder_.add_offset_dims(offset_dims); + builder_.add_indices_are_sorted(indices_are_sorted); return builder_.Finish(); } -flatbuffers::Offset CreateConv2DOptions(flatbuffers::FlatBufferBuilder &_fbb, const Conv2DOptionsT *_o, const flatbuffers::rehasher_function_t *_rehasher = nullptr); +inline ::flatbuffers::Offset CreateStablehloGatherOptionsDirect( + ::flatbuffers::FlatBufferBuilder &_fbb, + const std::vector *offset_dims = nullptr, + const std::vector *collapsed_slice_dims = nullptr, + const std::vector *start_index_map = nullptr, + int64_t index_vector_dim = 0, + const std::vector *slice_sizes = nullptr, + bool indices_are_sorted = false) { + auto offset_dims__ = offset_dims ? _fbb.CreateVector(*offset_dims) : 0; + auto collapsed_slice_dims__ = collapsed_slice_dims ? _fbb.CreateVector(*collapsed_slice_dims) : 0; + auto start_index_map__ = start_index_map ? _fbb.CreateVector(*start_index_map) : 0; + auto slice_sizes__ = slice_sizes ? _fbb.CreateVector(*slice_sizes) : 0; + return tflite::CreateStablehloGatherOptions( + _fbb, + offset_dims__, + collapsed_slice_dims__, + start_index_map__, + index_vector_dim, + slice_sizes__, + indices_are_sorted); +} -struct Conv3DOptionsT : public flatbuffers::NativeTable { - typedef Conv3DOptions TableType; - tflite::Padding padding = tflite::Padding_SAME; - int32_t stride_d = 0; - int32_t stride_w = 0; - int32_t stride_h = 0; - tflite::ActivationFunctionType fused_activation_function = tflite::ActivationFunctionType_NONE; - int32_t dilation_d_factor = 1; - int32_t dilation_w_factor = 1; - int32_t dilation_h_factor = 1; +::flatbuffers::Offset CreateStablehloGatherOptions(::flatbuffers::FlatBufferBuilder &_fbb, const StablehloGatherOptionsT *_o, const ::flatbuffers::rehasher_function_t *_rehasher = nullptr); + +struct StablehloTransposeOptionsT : public ::flatbuffers::NativeTable { + typedef StablehloTransposeOptions TableType; + std::vector permutation{}; }; -struct Conv3DOptions FLATBUFFERS_FINAL_CLASS : private flatbuffers::Table { - typedef Conv3DOptionsT NativeTableType; - typedef Conv3DOptionsBuilder Builder; +struct StablehloTransposeOptions FLATBUFFERS_FINAL_CLASS : private ::flatbuffers::Table { + typedef StablehloTransposeOptionsT NativeTableType; + typedef StablehloTransposeOptionsBuilder Builder; enum FlatBuffersVTableOffset FLATBUFFERS_VTABLE_UNDERLYING_TYPE { - VT_PADDING = 4, - VT_STRIDE_D = 6, - VT_STRIDE_W = 8, - VT_STRIDE_H = 10, - VT_FUSED_ACTIVATION_FUNCTION = 12, - VT_DILATION_D_FACTOR = 14, - VT_DILATION_W_FACTOR = 16, - VT_DILATION_H_FACTOR = 18 + VT_PERMUTATION = 4 }; - tflite::Padding padding() const { - return static_cast(GetField(VT_PADDING, 0)); + const ::flatbuffers::Vector *permutation() const { + return GetPointer *>(VT_PERMUTATION); } - int32_t stride_d() const { - return GetField(VT_STRIDE_D, 0); + bool Verify(::flatbuffers::Verifier &verifier) const { + return VerifyTableStart(verifier) && + VerifyOffset(verifier, VT_PERMUTATION) && + verifier.VerifyVector(permutation()) && + verifier.EndTable(); } - int32_t stride_w() const { - return GetField(VT_STRIDE_W, 0); + StablehloTransposeOptionsT *UnPack(const ::flatbuffers::resolver_function_t *_resolver = nullptr) const; + void UnPackTo(StablehloTransposeOptionsT *_o, const ::flatbuffers::resolver_function_t *_resolver = nullptr) const; + static ::flatbuffers::Offset Pack(::flatbuffers::FlatBufferBuilder &_fbb, const StablehloTransposeOptionsT* _o, const ::flatbuffers::rehasher_function_t *_rehasher = nullptr); +}; + +struct StablehloTransposeOptionsBuilder { + typedef StablehloTransposeOptions Table; + ::flatbuffers::FlatBufferBuilder &fbb_; + ::flatbuffers::uoffset_t start_; + void add_permutation(::flatbuffers::Offset<::flatbuffers::Vector> permutation) { + fbb_.AddOffset(StablehloTransposeOptions::VT_PERMUTATION, permutation); } - int32_t stride_h() const { - return GetField(VT_STRIDE_H, 0); + explicit StablehloTransposeOptionsBuilder(::flatbuffers::FlatBufferBuilder &_fbb) + : fbb_(_fbb) { + start_ = fbb_.StartTable(); } - tflite::ActivationFunctionType fused_activation_function() const { - return static_cast(GetField(VT_FUSED_ACTIVATION_FUNCTION, 0)); + ::flatbuffers::Offset Finish() { + const auto end = fbb_.EndTable(start_); + auto o = ::flatbuffers::Offset(end); + return o; } - int32_t dilation_d_factor() const { - return GetField(VT_DILATION_D_FACTOR, 1); +}; + +inline ::flatbuffers::Offset CreateStablehloTransposeOptions( + ::flatbuffers::FlatBufferBuilder &_fbb, + ::flatbuffers::Offset<::flatbuffers::Vector> permutation = 0) { + StablehloTransposeOptionsBuilder builder_(_fbb); + builder_.add_permutation(permutation); + return builder_.Finish(); +} + +inline ::flatbuffers::Offset CreateStablehloTransposeOptionsDirect( + ::flatbuffers::FlatBufferBuilder &_fbb, + const std::vector *permutation = nullptr) { + auto permutation__ = permutation ? _fbb.CreateVector(*permutation) : 0; + return tflite::CreateStablehloTransposeOptions( + _fbb, + permutation__); +} + +::flatbuffers::Offset CreateStablehloTransposeOptions(::flatbuffers::FlatBufferBuilder &_fbb, const StablehloTransposeOptionsT *_o, const ::flatbuffers::rehasher_function_t *_rehasher = nullptr); + +struct StablehloDotGeneralOptionsT : public ::flatbuffers::NativeTable { + typedef StablehloDotGeneralOptions TableType; + std::vector lhs_batching_dimensions{}; + std::vector rhs_batching_dimensions{}; + std::vector lhs_contracting_dimensions{}; + std::vector rhs_contracting_dimensions{}; + std::vector precision_config{}; +}; + +struct StablehloDotGeneralOptions FLATBUFFERS_FINAL_CLASS : private ::flatbuffers::Table { + typedef StablehloDotGeneralOptionsT NativeTableType; + typedef StablehloDotGeneralOptionsBuilder Builder; + enum FlatBuffersVTableOffset FLATBUFFERS_VTABLE_UNDERLYING_TYPE { + VT_LHS_BATCHING_DIMENSIONS = 4, + VT_RHS_BATCHING_DIMENSIONS = 6, + VT_LHS_CONTRACTING_DIMENSIONS = 8, + VT_RHS_CONTRACTING_DIMENSIONS = 10, + VT_PRECISION_CONFIG = 12 + }; + const ::flatbuffers::Vector *lhs_batching_dimensions() const { + return GetPointer *>(VT_LHS_BATCHING_DIMENSIONS); } - int32_t dilation_w_factor() const { - return GetField(VT_DILATION_W_FACTOR, 1); + const ::flatbuffers::Vector *rhs_batching_dimensions() const { + return GetPointer *>(VT_RHS_BATCHING_DIMENSIONS); } - int32_t dilation_h_factor() const { - return GetField(VT_DILATION_H_FACTOR, 1); + const ::flatbuffers::Vector *lhs_contracting_dimensions() const { + return GetPointer *>(VT_LHS_CONTRACTING_DIMENSIONS); + } + const ::flatbuffers::Vector *rhs_contracting_dimensions() const { + return GetPointer *>(VT_RHS_CONTRACTING_DIMENSIONS); } - bool Verify(flatbuffers::Verifier &verifier) const { + const ::flatbuffers::Vector *precision_config() const { + return GetPointer *>(VT_PRECISION_CONFIG); + } + bool Verify(::flatbuffers::Verifier &verifier) const { return VerifyTableStart(verifier) && - VerifyField(verifier, VT_PADDING, 1) && - VerifyField(verifier, VT_STRIDE_D, 4) && - VerifyField(verifier, VT_STRIDE_W, 4) && - VerifyField(verifier, VT_STRIDE_H, 4) && - VerifyField(verifier, VT_FUSED_ACTIVATION_FUNCTION, 1) && - VerifyField(verifier, VT_DILATION_D_FACTOR, 4) && - VerifyField(verifier, VT_DILATION_W_FACTOR, 4) && - VerifyField(verifier, VT_DILATION_H_FACTOR, 4) && + VerifyOffset(verifier, VT_LHS_BATCHING_DIMENSIONS) && + verifier.VerifyVector(lhs_batching_dimensions()) && + VerifyOffset(verifier, VT_RHS_BATCHING_DIMENSIONS) && + verifier.VerifyVector(rhs_batching_dimensions()) && + VerifyOffset(verifier, VT_LHS_CONTRACTING_DIMENSIONS) && + verifier.VerifyVector(lhs_contracting_dimensions()) && + VerifyOffset(verifier, VT_RHS_CONTRACTING_DIMENSIONS) && + verifier.VerifyVector(rhs_contracting_dimensions()) && + VerifyOffset(verifier, VT_PRECISION_CONFIG) && + verifier.VerifyVector(precision_config()) && verifier.EndTable(); } - Conv3DOptionsT *UnPack(const flatbuffers::resolver_function_t *_resolver = nullptr) const; - void UnPackTo(Conv3DOptionsT *_o, const flatbuffers::resolver_function_t *_resolver = nullptr) const; - static flatbuffers::Offset Pack(flatbuffers::FlatBufferBuilder &_fbb, const Conv3DOptionsT* _o, const flatbuffers::rehasher_function_t *_rehasher = nullptr); + StablehloDotGeneralOptionsT *UnPack(const ::flatbuffers::resolver_function_t *_resolver = nullptr) const; + void UnPackTo(StablehloDotGeneralOptionsT *_o, const ::flatbuffers::resolver_function_t *_resolver = nullptr) const; + static ::flatbuffers::Offset Pack(::flatbuffers::FlatBufferBuilder &_fbb, const StablehloDotGeneralOptionsT* _o, const ::flatbuffers::rehasher_function_t *_rehasher = nullptr); }; -struct Conv3DOptionsBuilder { - typedef Conv3DOptions Table; - flatbuffers::FlatBufferBuilder &fbb_; - flatbuffers::uoffset_t start_; - void add_padding(tflite::Padding padding) { - fbb_.AddElement(Conv3DOptions::VT_PADDING, static_cast(padding), 0); - } - void add_stride_d(int32_t stride_d) { - fbb_.AddElement(Conv3DOptions::VT_STRIDE_D, stride_d, 0); - } - void add_stride_w(int32_t stride_w) { - fbb_.AddElement(Conv3DOptions::VT_STRIDE_W, stride_w, 0); - } - void add_stride_h(int32_t stride_h) { - fbb_.AddElement(Conv3DOptions::VT_STRIDE_H, stride_h, 0); +struct StablehloDotGeneralOptionsBuilder { + typedef StablehloDotGeneralOptions Table; + ::flatbuffers::FlatBufferBuilder &fbb_; + ::flatbuffers::uoffset_t start_; + void add_lhs_batching_dimensions(::flatbuffers::Offset<::flatbuffers::Vector> lhs_batching_dimensions) { + fbb_.AddOffset(StablehloDotGeneralOptions::VT_LHS_BATCHING_DIMENSIONS, lhs_batching_dimensions); } - void add_fused_activation_function(tflite::ActivationFunctionType fused_activation_function) { - fbb_.AddElement(Conv3DOptions::VT_FUSED_ACTIVATION_FUNCTION, static_cast(fused_activation_function), 0); + void add_rhs_batching_dimensions(::flatbuffers::Offset<::flatbuffers::Vector> rhs_batching_dimensions) { + fbb_.AddOffset(StablehloDotGeneralOptions::VT_RHS_BATCHING_DIMENSIONS, rhs_batching_dimensions); } - void add_dilation_d_factor(int32_t dilation_d_factor) { - fbb_.AddElement(Conv3DOptions::VT_DILATION_D_FACTOR, dilation_d_factor, 1); + void add_lhs_contracting_dimensions(::flatbuffers::Offset<::flatbuffers::Vector> lhs_contracting_dimensions) { + fbb_.AddOffset(StablehloDotGeneralOptions::VT_LHS_CONTRACTING_DIMENSIONS, lhs_contracting_dimensions); } - void add_dilation_w_factor(int32_t dilation_w_factor) { - fbb_.AddElement(Conv3DOptions::VT_DILATION_W_FACTOR, dilation_w_factor, 1); + void add_rhs_contracting_dimensions(::flatbuffers::Offset<::flatbuffers::Vector> rhs_contracting_dimensions) { + fbb_.AddOffset(StablehloDotGeneralOptions::VT_RHS_CONTRACTING_DIMENSIONS, rhs_contracting_dimensions); } - void add_dilation_h_factor(int32_t dilation_h_factor) { - fbb_.AddElement(Conv3DOptions::VT_DILATION_H_FACTOR, dilation_h_factor, 1); + void add_precision_config(::flatbuffers::Offset<::flatbuffers::Vector> precision_config) { + fbb_.AddOffset(StablehloDotGeneralOptions::VT_PRECISION_CONFIG, precision_config); } - explicit Conv3DOptionsBuilder(flatbuffers::FlatBufferBuilder &_fbb) + explicit StablehloDotGeneralOptionsBuilder(::flatbuffers::FlatBufferBuilder &_fbb) : fbb_(_fbb) { start_ = fbb_.StartTable(); } - flatbuffers::Offset Finish() { + ::flatbuffers::Offset Finish() { const auto end = fbb_.EndTable(start_); - auto o = flatbuffers::Offset(end); + auto o = ::flatbuffers::Offset(end); return o; } }; -inline flatbuffers::Offset CreateConv3DOptions( - flatbuffers::FlatBufferBuilder &_fbb, - tflite::Padding padding = tflite::Padding_SAME, - int32_t stride_d = 0, - int32_t stride_w = 0, - int32_t stride_h = 0, - tflite::ActivationFunctionType fused_activation_function = tflite::ActivationFunctionType_NONE, - int32_t dilation_d_factor = 1, - int32_t dilation_w_factor = 1, - int32_t dilation_h_factor = 1) { - Conv3DOptionsBuilder builder_(_fbb); - builder_.add_dilation_h_factor(dilation_h_factor); - builder_.add_dilation_w_factor(dilation_w_factor); - builder_.add_dilation_d_factor(dilation_d_factor); - builder_.add_stride_h(stride_h); - builder_.add_stride_w(stride_w); - builder_.add_stride_d(stride_d); - builder_.add_fused_activation_function(fused_activation_function); - builder_.add_padding(padding); +inline ::flatbuffers::Offset CreateStablehloDotGeneralOptions( + ::flatbuffers::FlatBufferBuilder &_fbb, + ::flatbuffers::Offset<::flatbuffers::Vector> lhs_batching_dimensions = 0, + ::flatbuffers::Offset<::flatbuffers::Vector> rhs_batching_dimensions = 0, + ::flatbuffers::Offset<::flatbuffers::Vector> lhs_contracting_dimensions = 0, + ::flatbuffers::Offset<::flatbuffers::Vector> rhs_contracting_dimensions = 0, + ::flatbuffers::Offset<::flatbuffers::Vector> precision_config = 0) { + StablehloDotGeneralOptionsBuilder builder_(_fbb); + builder_.add_precision_config(precision_config); + builder_.add_rhs_contracting_dimensions(rhs_contracting_dimensions); + builder_.add_lhs_contracting_dimensions(lhs_contracting_dimensions); + builder_.add_rhs_batching_dimensions(rhs_batching_dimensions); + builder_.add_lhs_batching_dimensions(lhs_batching_dimensions); return builder_.Finish(); } -flatbuffers::Offset CreateConv3DOptions(flatbuffers::FlatBufferBuilder &_fbb, const Conv3DOptionsT *_o, const flatbuffers::rehasher_function_t *_rehasher = nullptr); - -struct Pool2DOptionsT : public flatbuffers::NativeTable { - typedef Pool2DOptions TableType; - tflite::Padding padding = tflite::Padding_SAME; - int32_t stride_w = 0; - int32_t stride_h = 0; - int32_t filter_width = 0; - int32_t filter_height = 0; - tflite::ActivationFunctionType fused_activation_function = tflite::ActivationFunctionType_NONE; +inline ::flatbuffers::Offset CreateStablehloDotGeneralOptionsDirect( + ::flatbuffers::FlatBufferBuilder &_fbb, + const std::vector *lhs_batching_dimensions = nullptr, + const std::vector *rhs_batching_dimensions = nullptr, + const std::vector *lhs_contracting_dimensions = nullptr, + const std::vector *rhs_contracting_dimensions = nullptr, + const std::vector *precision_config = nullptr) { + auto lhs_batching_dimensions__ = lhs_batching_dimensions ? _fbb.CreateVector(*lhs_batching_dimensions) : 0; + auto rhs_batching_dimensions__ = rhs_batching_dimensions ? _fbb.CreateVector(*rhs_batching_dimensions) : 0; + auto lhs_contracting_dimensions__ = lhs_contracting_dimensions ? _fbb.CreateVector(*lhs_contracting_dimensions) : 0; + auto rhs_contracting_dimensions__ = rhs_contracting_dimensions ? _fbb.CreateVector(*rhs_contracting_dimensions) : 0; + auto precision_config__ = precision_config ? _fbb.CreateVector(*precision_config) : 0; + return tflite::CreateStablehloDotGeneralOptions( + _fbb, + lhs_batching_dimensions__, + rhs_batching_dimensions__, + lhs_contracting_dimensions__, + rhs_contracting_dimensions__, + precision_config__); +} + +::flatbuffers::Offset CreateStablehloDotGeneralOptions(::flatbuffers::FlatBufferBuilder &_fbb, const StablehloDotGeneralOptionsT *_o, const ::flatbuffers::rehasher_function_t *_rehasher = nullptr); + +struct StablehloReduceWindowOptionsT : public ::flatbuffers::NativeTable { + typedef StablehloReduceWindowOptions TableType; + std::vector window_dimensions{}; + std::vector window_strides{}; + std::vector base_dilations{}; + std::vector window_dilations{}; + std::vector padding{}; + int32_t body_subgraph_index = 0; }; -struct Pool2DOptions FLATBUFFERS_FINAL_CLASS : private flatbuffers::Table { - typedef Pool2DOptionsT NativeTableType; - typedef Pool2DOptionsBuilder Builder; +struct StablehloReduceWindowOptions FLATBUFFERS_FINAL_CLASS : private ::flatbuffers::Table { + typedef StablehloReduceWindowOptionsT NativeTableType; + typedef StablehloReduceWindowOptionsBuilder Builder; enum FlatBuffersVTableOffset FLATBUFFERS_VTABLE_UNDERLYING_TYPE { - VT_PADDING = 4, - VT_STRIDE_W = 6, - VT_STRIDE_H = 8, - VT_FILTER_WIDTH = 10, - VT_FILTER_HEIGHT = 12, - VT_FUSED_ACTIVATION_FUNCTION = 14 + VT_WINDOW_DIMENSIONS = 4, + VT_WINDOW_STRIDES = 6, + VT_BASE_DILATIONS = 8, + VT_WINDOW_DILATIONS = 10, + VT_PADDING = 12, + VT_BODY_SUBGRAPH_INDEX = 14 }; - tflite::Padding padding() const { - return static_cast(GetField(VT_PADDING, 0)); + const ::flatbuffers::Vector *window_dimensions() const { + return GetPointer *>(VT_WINDOW_DIMENSIONS); } - int32_t stride_w() const { - return GetField(VT_STRIDE_W, 0); + const ::flatbuffers::Vector *window_strides() const { + return GetPointer *>(VT_WINDOW_STRIDES); } - int32_t stride_h() const { - return GetField(VT_STRIDE_H, 0); + const ::flatbuffers::Vector *base_dilations() const { + return GetPointer *>(VT_BASE_DILATIONS); } - int32_t filter_width() const { - return GetField(VT_FILTER_WIDTH, 0); + const ::flatbuffers::Vector *window_dilations() const { + return GetPointer *>(VT_WINDOW_DILATIONS); } - int32_t filter_height() const { - return GetField(VT_FILTER_HEIGHT, 0); + const ::flatbuffers::Vector *padding() const { + return GetPointer *>(VT_PADDING); } - tflite::ActivationFunctionType fused_activation_function() const { - return static_cast(GetField(VT_FUSED_ACTIVATION_FUNCTION, 0)); + int32_t body_subgraph_index() const { + return GetField(VT_BODY_SUBGRAPH_INDEX, 0); } - bool Verify(flatbuffers::Verifier &verifier) const { + bool Verify(::flatbuffers::Verifier &verifier) const { return VerifyTableStart(verifier) && - VerifyField(verifier, VT_PADDING, 1) && - VerifyField(verifier, VT_STRIDE_W, 4) && - VerifyField(verifier, VT_STRIDE_H, 4) && - VerifyField(verifier, VT_FILTER_WIDTH, 4) && - VerifyField(verifier, VT_FILTER_HEIGHT, 4) && - VerifyField(verifier, VT_FUSED_ACTIVATION_FUNCTION, 1) && + VerifyOffset(verifier, VT_WINDOW_DIMENSIONS) && + verifier.VerifyVector(window_dimensions()) && + VerifyOffset(verifier, VT_WINDOW_STRIDES) && + verifier.VerifyVector(window_strides()) && + VerifyOffset(verifier, VT_BASE_DILATIONS) && + verifier.VerifyVector(base_dilations()) && + VerifyOffset(verifier, VT_WINDOW_DILATIONS) && + verifier.VerifyVector(window_dilations()) && + VerifyOffset(verifier, VT_PADDING) && + verifier.VerifyVector(padding()) && + VerifyField(verifier, VT_BODY_SUBGRAPH_INDEX, 4) && verifier.EndTable(); } - Pool2DOptionsT *UnPack(const flatbuffers::resolver_function_t *_resolver = nullptr) const; - void UnPackTo(Pool2DOptionsT *_o, const flatbuffers::resolver_function_t *_resolver = nullptr) const; - static flatbuffers::Offset Pack(flatbuffers::FlatBufferBuilder &_fbb, const Pool2DOptionsT* _o, const flatbuffers::rehasher_function_t *_rehasher = nullptr); + StablehloReduceWindowOptionsT *UnPack(const ::flatbuffers::resolver_function_t *_resolver = nullptr) const; + void UnPackTo(StablehloReduceWindowOptionsT *_o, const ::flatbuffers::resolver_function_t *_resolver = nullptr) const; + static ::flatbuffers::Offset Pack(::flatbuffers::FlatBufferBuilder &_fbb, const StablehloReduceWindowOptionsT* _o, const ::flatbuffers::rehasher_function_t *_rehasher = nullptr); }; -struct Pool2DOptionsBuilder { - typedef Pool2DOptions Table; - flatbuffers::FlatBufferBuilder &fbb_; - flatbuffers::uoffset_t start_; - void add_padding(tflite::Padding padding) { - fbb_.AddElement(Pool2DOptions::VT_PADDING, static_cast(padding), 0); +struct StablehloReduceWindowOptionsBuilder { + typedef StablehloReduceWindowOptions Table; + ::flatbuffers::FlatBufferBuilder &fbb_; + ::flatbuffers::uoffset_t start_; + void add_window_dimensions(::flatbuffers::Offset<::flatbuffers::Vector> window_dimensions) { + fbb_.AddOffset(StablehloReduceWindowOptions::VT_WINDOW_DIMENSIONS, window_dimensions); } - void add_stride_w(int32_t stride_w) { - fbb_.AddElement(Pool2DOptions::VT_STRIDE_W, stride_w, 0); + void add_window_strides(::flatbuffers::Offset<::flatbuffers::Vector> window_strides) { + fbb_.AddOffset(StablehloReduceWindowOptions::VT_WINDOW_STRIDES, window_strides); } - void add_stride_h(int32_t stride_h) { - fbb_.AddElement(Pool2DOptions::VT_STRIDE_H, stride_h, 0); + void add_base_dilations(::flatbuffers::Offset<::flatbuffers::Vector> base_dilations) { + fbb_.AddOffset(StablehloReduceWindowOptions::VT_BASE_DILATIONS, base_dilations); } - void add_filter_width(int32_t filter_width) { - fbb_.AddElement(Pool2DOptions::VT_FILTER_WIDTH, filter_width, 0); + void add_window_dilations(::flatbuffers::Offset<::flatbuffers::Vector> window_dilations) { + fbb_.AddOffset(StablehloReduceWindowOptions::VT_WINDOW_DILATIONS, window_dilations); } - void add_filter_height(int32_t filter_height) { - fbb_.AddElement(Pool2DOptions::VT_FILTER_HEIGHT, filter_height, 0); + void add_padding(::flatbuffers::Offset<::flatbuffers::Vector> padding) { + fbb_.AddOffset(StablehloReduceWindowOptions::VT_PADDING, padding); } - void add_fused_activation_function(tflite::ActivationFunctionType fused_activation_function) { - fbb_.AddElement(Pool2DOptions::VT_FUSED_ACTIVATION_FUNCTION, static_cast(fused_activation_function), 0); + void add_body_subgraph_index(int32_t body_subgraph_index) { + fbb_.AddElement(StablehloReduceWindowOptions::VT_BODY_SUBGRAPH_INDEX, body_subgraph_index, 0); } - explicit Pool2DOptionsBuilder(flatbuffers::FlatBufferBuilder &_fbb) + explicit StablehloReduceWindowOptionsBuilder(::flatbuffers::FlatBufferBuilder &_fbb) : fbb_(_fbb) { start_ = fbb_.StartTable(); } - flatbuffers::Offset Finish() { + ::flatbuffers::Offset Finish() { const auto end = fbb_.EndTable(start_); - auto o = flatbuffers::Offset(end); + auto o = ::flatbuffers::Offset(end); return o; } }; -inline flatbuffers::Offset CreatePool2DOptions( - flatbuffers::FlatBufferBuilder &_fbb, - tflite::Padding padding = tflite::Padding_SAME, - int32_t stride_w = 0, - int32_t stride_h = 0, - int32_t filter_width = 0, - int32_t filter_height = 0, - tflite::ActivationFunctionType fused_activation_function = tflite::ActivationFunctionType_NONE) { - Pool2DOptionsBuilder builder_(_fbb); - builder_.add_filter_height(filter_height); - builder_.add_filter_width(filter_width); - builder_.add_stride_h(stride_h); - builder_.add_stride_w(stride_w); - builder_.add_fused_activation_function(fused_activation_function); +inline ::flatbuffers::Offset CreateStablehloReduceWindowOptions( + ::flatbuffers::FlatBufferBuilder &_fbb, + ::flatbuffers::Offset<::flatbuffers::Vector> window_dimensions = 0, + ::flatbuffers::Offset<::flatbuffers::Vector> window_strides = 0, + ::flatbuffers::Offset<::flatbuffers::Vector> base_dilations = 0, + ::flatbuffers::Offset<::flatbuffers::Vector> window_dilations = 0, + ::flatbuffers::Offset<::flatbuffers::Vector> padding = 0, + int32_t body_subgraph_index = 0) { + StablehloReduceWindowOptionsBuilder builder_(_fbb); + builder_.add_body_subgraph_index(body_subgraph_index); builder_.add_padding(padding); + builder_.add_window_dilations(window_dilations); + builder_.add_base_dilations(base_dilations); + builder_.add_window_strides(window_strides); + builder_.add_window_dimensions(window_dimensions); return builder_.Finish(); } -flatbuffers::Offset CreatePool2DOptions(flatbuffers::FlatBufferBuilder &_fbb, const Pool2DOptionsT *_o, const flatbuffers::rehasher_function_t *_rehasher = nullptr); +inline ::flatbuffers::Offset CreateStablehloReduceWindowOptionsDirect( + ::flatbuffers::FlatBufferBuilder &_fbb, + const std::vector *window_dimensions = nullptr, + const std::vector *window_strides = nullptr, + const std::vector *base_dilations = nullptr, + const std::vector *window_dilations = nullptr, + const std::vector *padding = nullptr, + int32_t body_subgraph_index = 0) { + auto window_dimensions__ = window_dimensions ? _fbb.CreateVector(*window_dimensions) : 0; + auto window_strides__ = window_strides ? _fbb.CreateVector(*window_strides) : 0; + auto base_dilations__ = base_dilations ? _fbb.CreateVector(*base_dilations) : 0; + auto window_dilations__ = window_dilations ? _fbb.CreateVector(*window_dilations) : 0; + auto padding__ = padding ? _fbb.CreateVector(*padding) : 0; + return tflite::CreateStablehloReduceWindowOptions( + _fbb, + window_dimensions__, + window_strides__, + base_dilations__, + window_dilations__, + padding__, + body_subgraph_index); +} -struct DepthwiseConv2DOptionsT : public flatbuffers::NativeTable { - typedef DepthwiseConv2DOptions TableType; - tflite::Padding padding = tflite::Padding_SAME; - int32_t stride_w = 0; - int32_t stride_h = 0; - int32_t depth_multiplier = 0; - tflite::ActivationFunctionType fused_activation_function = tflite::ActivationFunctionType_NONE; - int32_t dilation_w_factor = 1; - int32_t dilation_h_factor = 1; +::flatbuffers::Offset CreateStablehloReduceWindowOptions(::flatbuffers::FlatBufferBuilder &_fbb, const StablehloReduceWindowOptionsT *_o, const ::flatbuffers::rehasher_function_t *_rehasher = nullptr); + +struct StablehloWhileOptionsT : public ::flatbuffers::NativeTable { + typedef StablehloWhileOptions TableType; + int32_t cond_subgraph_index = 0; + int32_t body_subgraph_index = 0; }; -struct DepthwiseConv2DOptions FLATBUFFERS_FINAL_CLASS : private flatbuffers::Table { - typedef DepthwiseConv2DOptionsT NativeTableType; - typedef DepthwiseConv2DOptionsBuilder Builder; +struct StablehloWhileOptions FLATBUFFERS_FINAL_CLASS : private ::flatbuffers::Table { + typedef StablehloWhileOptionsT NativeTableType; + typedef StablehloWhileOptionsBuilder Builder; enum FlatBuffersVTableOffset FLATBUFFERS_VTABLE_UNDERLYING_TYPE { - VT_PADDING = 4, - VT_STRIDE_W = 6, - VT_STRIDE_H = 8, - VT_DEPTH_MULTIPLIER = 10, - VT_FUSED_ACTIVATION_FUNCTION = 12, - VT_DILATION_W_FACTOR = 14, - VT_DILATION_H_FACTOR = 16 + VT_COND_SUBGRAPH_INDEX = 4, + VT_BODY_SUBGRAPH_INDEX = 6 }; - tflite::Padding padding() const { - return static_cast(GetField(VT_PADDING, 0)); - } - int32_t stride_w() const { - return GetField(VT_STRIDE_W, 0); - } - int32_t stride_h() const { - return GetField(VT_STRIDE_H, 0); - } - int32_t depth_multiplier() const { - return GetField(VT_DEPTH_MULTIPLIER, 0); - } - tflite::ActivationFunctionType fused_activation_function() const { - return static_cast(GetField(VT_FUSED_ACTIVATION_FUNCTION, 0)); - } - int32_t dilation_w_factor() const { - return GetField(VT_DILATION_W_FACTOR, 1); + int32_t cond_subgraph_index() const { + return GetField(VT_COND_SUBGRAPH_INDEX, 0); } - int32_t dilation_h_factor() const { - return GetField(VT_DILATION_H_FACTOR, 1); + int32_t body_subgraph_index() const { + return GetField(VT_BODY_SUBGRAPH_INDEX, 0); } - bool Verify(flatbuffers::Verifier &verifier) const { + bool Verify(::flatbuffers::Verifier &verifier) const { return VerifyTableStart(verifier) && - VerifyField(verifier, VT_PADDING, 1) && - VerifyField(verifier, VT_STRIDE_W, 4) && - VerifyField(verifier, VT_STRIDE_H, 4) && - VerifyField(verifier, VT_DEPTH_MULTIPLIER, 4) && - VerifyField(verifier, VT_FUSED_ACTIVATION_FUNCTION, 1) && - VerifyField(verifier, VT_DILATION_W_FACTOR, 4) && - VerifyField(verifier, VT_DILATION_H_FACTOR, 4) && + VerifyField(verifier, VT_COND_SUBGRAPH_INDEX, 4) && + VerifyField(verifier, VT_BODY_SUBGRAPH_INDEX, 4) && verifier.EndTable(); } - DepthwiseConv2DOptionsT *UnPack(const flatbuffers::resolver_function_t *_resolver = nullptr) const; - void UnPackTo(DepthwiseConv2DOptionsT *_o, const flatbuffers::resolver_function_t *_resolver = nullptr) const; - static flatbuffers::Offset Pack(flatbuffers::FlatBufferBuilder &_fbb, const DepthwiseConv2DOptionsT* _o, const flatbuffers::rehasher_function_t *_rehasher = nullptr); + StablehloWhileOptionsT *UnPack(const ::flatbuffers::resolver_function_t *_resolver = nullptr) const; + void UnPackTo(StablehloWhileOptionsT *_o, const ::flatbuffers::resolver_function_t *_resolver = nullptr) const; + static ::flatbuffers::Offset Pack(::flatbuffers::FlatBufferBuilder &_fbb, const StablehloWhileOptionsT* _o, const ::flatbuffers::rehasher_function_t *_rehasher = nullptr); }; -struct DepthwiseConv2DOptionsBuilder { - typedef DepthwiseConv2DOptions Table; - flatbuffers::FlatBufferBuilder &fbb_; - flatbuffers::uoffset_t start_; - void add_padding(tflite::Padding padding) { - fbb_.AddElement(DepthwiseConv2DOptions::VT_PADDING, static_cast(padding), 0); - } - void add_stride_w(int32_t stride_w) { - fbb_.AddElement(DepthwiseConv2DOptions::VT_STRIDE_W, stride_w, 0); - } - void add_stride_h(int32_t stride_h) { - fbb_.AddElement(DepthwiseConv2DOptions::VT_STRIDE_H, stride_h, 0); - } - void add_depth_multiplier(int32_t depth_multiplier) { - fbb_.AddElement(DepthwiseConv2DOptions::VT_DEPTH_MULTIPLIER, depth_multiplier, 0); - } - void add_fused_activation_function(tflite::ActivationFunctionType fused_activation_function) { - fbb_.AddElement(DepthwiseConv2DOptions::VT_FUSED_ACTIVATION_FUNCTION, static_cast(fused_activation_function), 0); - } - void add_dilation_w_factor(int32_t dilation_w_factor) { - fbb_.AddElement(DepthwiseConv2DOptions::VT_DILATION_W_FACTOR, dilation_w_factor, 1); +struct StablehloWhileOptionsBuilder { + typedef StablehloWhileOptions Table; + ::flatbuffers::FlatBufferBuilder &fbb_; + ::flatbuffers::uoffset_t start_; + void add_cond_subgraph_index(int32_t cond_subgraph_index) { + fbb_.AddElement(StablehloWhileOptions::VT_COND_SUBGRAPH_INDEX, cond_subgraph_index, 0); } - void add_dilation_h_factor(int32_t dilation_h_factor) { - fbb_.AddElement(DepthwiseConv2DOptions::VT_DILATION_H_FACTOR, dilation_h_factor, 1); + void add_body_subgraph_index(int32_t body_subgraph_index) { + fbb_.AddElement(StablehloWhileOptions::VT_BODY_SUBGRAPH_INDEX, body_subgraph_index, 0); } - explicit DepthwiseConv2DOptionsBuilder(flatbuffers::FlatBufferBuilder &_fbb) + explicit StablehloWhileOptionsBuilder(::flatbuffers::FlatBufferBuilder &_fbb) : fbb_(_fbb) { start_ = fbb_.StartTable(); } - flatbuffers::Offset Finish() { + ::flatbuffers::Offset Finish() { const auto end = fbb_.EndTable(start_); - auto o = flatbuffers::Offset(end); + auto o = ::flatbuffers::Offset(end); return o; } }; -inline flatbuffers::Offset CreateDepthwiseConv2DOptions( - flatbuffers::FlatBufferBuilder &_fbb, - tflite::Padding padding = tflite::Padding_SAME, - int32_t stride_w = 0, - int32_t stride_h = 0, - int32_t depth_multiplier = 0, - tflite::ActivationFunctionType fused_activation_function = tflite::ActivationFunctionType_NONE, - int32_t dilation_w_factor = 1, - int32_t dilation_h_factor = 1) { - DepthwiseConv2DOptionsBuilder builder_(_fbb); - builder_.add_dilation_h_factor(dilation_h_factor); - builder_.add_dilation_w_factor(dilation_w_factor); - builder_.add_depth_multiplier(depth_multiplier); - builder_.add_stride_h(stride_h); - builder_.add_stride_w(stride_w); - builder_.add_fused_activation_function(fused_activation_function); - builder_.add_padding(padding); +inline ::flatbuffers::Offset CreateStablehloWhileOptions( + ::flatbuffers::FlatBufferBuilder &_fbb, + int32_t cond_subgraph_index = 0, + int32_t body_subgraph_index = 0) { + StablehloWhileOptionsBuilder builder_(_fbb); + builder_.add_body_subgraph_index(body_subgraph_index); + builder_.add_cond_subgraph_index(cond_subgraph_index); return builder_.Finish(); } -flatbuffers::Offset CreateDepthwiseConv2DOptions(flatbuffers::FlatBufferBuilder &_fbb, const DepthwiseConv2DOptionsT *_o, const flatbuffers::rehasher_function_t *_rehasher = nullptr); +::flatbuffers::Offset CreateStablehloWhileOptions(::flatbuffers::FlatBufferBuilder &_fbb, const StablehloWhileOptionsT *_o, const ::flatbuffers::rehasher_function_t *_rehasher = nullptr); -struct ConcatEmbeddingsOptionsT : public flatbuffers::NativeTable { - typedef ConcatEmbeddingsOptions TableType; - int32_t num_channels = 0; - std::vector num_columns_per_channel{}; - std::vector embedding_dim_per_channel{}; +struct StablehloSortOptionsT : public ::flatbuffers::NativeTable { + typedef StablehloSortOptions TableType; + int64_t dimension = 0; + bool is_stable = false; + int32_t comparator_subgraph_index = 0; }; -struct ConcatEmbeddingsOptions FLATBUFFERS_FINAL_CLASS : private flatbuffers::Table { - typedef ConcatEmbeddingsOptionsT NativeTableType; - typedef ConcatEmbeddingsOptionsBuilder Builder; +struct StablehloSortOptions FLATBUFFERS_FINAL_CLASS : private ::flatbuffers::Table { + typedef StablehloSortOptionsT NativeTableType; + typedef StablehloSortOptionsBuilder Builder; enum FlatBuffersVTableOffset FLATBUFFERS_VTABLE_UNDERLYING_TYPE { - VT_NUM_CHANNELS = 4, - VT_NUM_COLUMNS_PER_CHANNEL = 6, - VT_EMBEDDING_DIM_PER_CHANNEL = 8 + VT_DIMENSION = 4, + VT_IS_STABLE = 6, + VT_COMPARATOR_SUBGRAPH_INDEX = 8 }; - int32_t num_channels() const { - return GetField(VT_NUM_CHANNELS, 0); + int64_t dimension() const { + return GetField(VT_DIMENSION, 0); } - const flatbuffers::Vector *num_columns_per_channel() const { - return GetPointer *>(VT_NUM_COLUMNS_PER_CHANNEL); + bool is_stable() const { + return GetField(VT_IS_STABLE, 0) != 0; } - const flatbuffers::Vector *embedding_dim_per_channel() const { - return GetPointer *>(VT_EMBEDDING_DIM_PER_CHANNEL); + int32_t comparator_subgraph_index() const { + return GetField(VT_COMPARATOR_SUBGRAPH_INDEX, 0); } - bool Verify(flatbuffers::Verifier &verifier) const { + bool Verify(::flatbuffers::Verifier &verifier) const { return VerifyTableStart(verifier) && - VerifyField(verifier, VT_NUM_CHANNELS, 4) && - VerifyOffset(verifier, VT_NUM_COLUMNS_PER_CHANNEL) && - verifier.VerifyVector(num_columns_per_channel()) && - VerifyOffset(verifier, VT_EMBEDDING_DIM_PER_CHANNEL) && - verifier.VerifyVector(embedding_dim_per_channel()) && + VerifyField(verifier, VT_DIMENSION, 8) && + VerifyField(verifier, VT_IS_STABLE, 1) && + VerifyField(verifier, VT_COMPARATOR_SUBGRAPH_INDEX, 4) && verifier.EndTable(); } - ConcatEmbeddingsOptionsT *UnPack(const flatbuffers::resolver_function_t *_resolver = nullptr) const; - void UnPackTo(ConcatEmbeddingsOptionsT *_o, const flatbuffers::resolver_function_t *_resolver = nullptr) const; - static flatbuffers::Offset Pack(flatbuffers::FlatBufferBuilder &_fbb, const ConcatEmbeddingsOptionsT* _o, const flatbuffers::rehasher_function_t *_rehasher = nullptr); + StablehloSortOptionsT *UnPack(const ::flatbuffers::resolver_function_t *_resolver = nullptr) const; + void UnPackTo(StablehloSortOptionsT *_o, const ::flatbuffers::resolver_function_t *_resolver = nullptr) const; + static ::flatbuffers::Offset Pack(::flatbuffers::FlatBufferBuilder &_fbb, const StablehloSortOptionsT* _o, const ::flatbuffers::rehasher_function_t *_rehasher = nullptr); }; -struct ConcatEmbeddingsOptionsBuilder { - typedef ConcatEmbeddingsOptions Table; - flatbuffers::FlatBufferBuilder &fbb_; - flatbuffers::uoffset_t start_; - void add_num_channels(int32_t num_channels) { - fbb_.AddElement(ConcatEmbeddingsOptions::VT_NUM_CHANNELS, num_channels, 0); +struct StablehloSortOptionsBuilder { + typedef StablehloSortOptions Table; + ::flatbuffers::FlatBufferBuilder &fbb_; + ::flatbuffers::uoffset_t start_; + void add_dimension(int64_t dimension) { + fbb_.AddElement(StablehloSortOptions::VT_DIMENSION, dimension, 0); } - void add_num_columns_per_channel(flatbuffers::Offset> num_columns_per_channel) { - fbb_.AddOffset(ConcatEmbeddingsOptions::VT_NUM_COLUMNS_PER_CHANNEL, num_columns_per_channel); + void add_is_stable(bool is_stable) { + fbb_.AddElement(StablehloSortOptions::VT_IS_STABLE, static_cast(is_stable), 0); } - void add_embedding_dim_per_channel(flatbuffers::Offset> embedding_dim_per_channel) { - fbb_.AddOffset(ConcatEmbeddingsOptions::VT_EMBEDDING_DIM_PER_CHANNEL, embedding_dim_per_channel); + void add_comparator_subgraph_index(int32_t comparator_subgraph_index) { + fbb_.AddElement(StablehloSortOptions::VT_COMPARATOR_SUBGRAPH_INDEX, comparator_subgraph_index, 0); } - explicit ConcatEmbeddingsOptionsBuilder(flatbuffers::FlatBufferBuilder &_fbb) + explicit StablehloSortOptionsBuilder(::flatbuffers::FlatBufferBuilder &_fbb) : fbb_(_fbb) { start_ = fbb_.StartTable(); } - flatbuffers::Offset Finish() { + ::flatbuffers::Offset Finish() { const auto end = fbb_.EndTable(start_); - auto o = flatbuffers::Offset(end); + auto o = ::flatbuffers::Offset(end); return o; } }; -inline flatbuffers::Offset CreateConcatEmbeddingsOptions( - flatbuffers::FlatBufferBuilder &_fbb, - int32_t num_channels = 0, - flatbuffers::Offset> num_columns_per_channel = 0, - flatbuffers::Offset> embedding_dim_per_channel = 0) { - ConcatEmbeddingsOptionsBuilder builder_(_fbb); - builder_.add_embedding_dim_per_channel(embedding_dim_per_channel); - builder_.add_num_columns_per_channel(num_columns_per_channel); - builder_.add_num_channels(num_channels); +inline ::flatbuffers::Offset CreateStablehloSortOptions( + ::flatbuffers::FlatBufferBuilder &_fbb, + int64_t dimension = 0, + bool is_stable = false, + int32_t comparator_subgraph_index = 0) { + StablehloSortOptionsBuilder builder_(_fbb); + builder_.add_dimension(dimension); + builder_.add_comparator_subgraph_index(comparator_subgraph_index); + builder_.add_is_stable(is_stable); return builder_.Finish(); } -inline flatbuffers::Offset CreateConcatEmbeddingsOptionsDirect( - flatbuffers::FlatBufferBuilder &_fbb, - int32_t num_channels = 0, - const std::vector *num_columns_per_channel = nullptr, - const std::vector *embedding_dim_per_channel = nullptr) { - auto num_columns_per_channel__ = num_columns_per_channel ? _fbb.CreateVector(*num_columns_per_channel) : 0; - auto embedding_dim_per_channel__ = embedding_dim_per_channel ? _fbb.CreateVector(*embedding_dim_per_channel) : 0; - return tflite::CreateConcatEmbeddingsOptions( - _fbb, - num_channels, - num_columns_per_channel__, - embedding_dim_per_channel__); -} - -flatbuffers::Offset CreateConcatEmbeddingsOptions(flatbuffers::FlatBufferBuilder &_fbb, const ConcatEmbeddingsOptionsT *_o, const flatbuffers::rehasher_function_t *_rehasher = nullptr); +::flatbuffers::Offset CreateStablehloSortOptions(::flatbuffers::FlatBufferBuilder &_fbb, const StablehloSortOptionsT *_o, const ::flatbuffers::rehasher_function_t *_rehasher = nullptr); -struct LSHProjectionOptionsT : public flatbuffers::NativeTable { - typedef LSHProjectionOptions TableType; - tflite::LSHProjectionType type = tflite::LSHProjectionType_UNKNOWN; +struct StablehloConcatenateOptionsT : public ::flatbuffers::NativeTable { + typedef StablehloConcatenateOptions TableType; + int64_t dimension = 0; }; -struct LSHProjectionOptions FLATBUFFERS_FINAL_CLASS : private flatbuffers::Table { - typedef LSHProjectionOptionsT NativeTableType; - typedef LSHProjectionOptionsBuilder Builder; +struct StablehloConcatenateOptions FLATBUFFERS_FINAL_CLASS : private ::flatbuffers::Table { + typedef StablehloConcatenateOptionsT NativeTableType; + typedef StablehloConcatenateOptionsBuilder Builder; enum FlatBuffersVTableOffset FLATBUFFERS_VTABLE_UNDERLYING_TYPE { - VT_TYPE = 4 + VT_DIMENSION = 4 }; - tflite::LSHProjectionType type() const { - return static_cast(GetField(VT_TYPE, 0)); + int64_t dimension() const { + return GetField(VT_DIMENSION, 0); } - bool Verify(flatbuffers::Verifier &verifier) const { + bool Verify(::flatbuffers::Verifier &verifier) const { return VerifyTableStart(verifier) && - VerifyField(verifier, VT_TYPE, 1) && + VerifyField(verifier, VT_DIMENSION, 8) && verifier.EndTable(); } - LSHProjectionOptionsT *UnPack(const flatbuffers::resolver_function_t *_resolver = nullptr) const; - void UnPackTo(LSHProjectionOptionsT *_o, const flatbuffers::resolver_function_t *_resolver = nullptr) const; - static flatbuffers::Offset Pack(flatbuffers::FlatBufferBuilder &_fbb, const LSHProjectionOptionsT* _o, const flatbuffers::rehasher_function_t *_rehasher = nullptr); + StablehloConcatenateOptionsT *UnPack(const ::flatbuffers::resolver_function_t *_resolver = nullptr) const; + void UnPackTo(StablehloConcatenateOptionsT *_o, const ::flatbuffers::resolver_function_t *_resolver = nullptr) const; + static ::flatbuffers::Offset Pack(::flatbuffers::FlatBufferBuilder &_fbb, const StablehloConcatenateOptionsT* _o, const ::flatbuffers::rehasher_function_t *_rehasher = nullptr); }; -struct LSHProjectionOptionsBuilder { - typedef LSHProjectionOptions Table; - flatbuffers::FlatBufferBuilder &fbb_; - flatbuffers::uoffset_t start_; - void add_type(tflite::LSHProjectionType type) { - fbb_.AddElement(LSHProjectionOptions::VT_TYPE, static_cast(type), 0); +struct StablehloConcatenateOptionsBuilder { + typedef StablehloConcatenateOptions Table; + ::flatbuffers::FlatBufferBuilder &fbb_; + ::flatbuffers::uoffset_t start_; + void add_dimension(int64_t dimension) { + fbb_.AddElement(StablehloConcatenateOptions::VT_DIMENSION, dimension, 0); } - explicit LSHProjectionOptionsBuilder(flatbuffers::FlatBufferBuilder &_fbb) + explicit StablehloConcatenateOptionsBuilder(::flatbuffers::FlatBufferBuilder &_fbb) : fbb_(_fbb) { start_ = fbb_.StartTable(); } - flatbuffers::Offset Finish() { + ::flatbuffers::Offset Finish() { const auto end = fbb_.EndTable(start_); - auto o = flatbuffers::Offset(end); + auto o = ::flatbuffers::Offset(end); return o; } }; -inline flatbuffers::Offset CreateLSHProjectionOptions( - flatbuffers::FlatBufferBuilder &_fbb, - tflite::LSHProjectionType type = tflite::LSHProjectionType_UNKNOWN) { - LSHProjectionOptionsBuilder builder_(_fbb); - builder_.add_type(type); +inline ::flatbuffers::Offset CreateStablehloConcatenateOptions( + ::flatbuffers::FlatBufferBuilder &_fbb, + int64_t dimension = 0) { + StablehloConcatenateOptionsBuilder builder_(_fbb); + builder_.add_dimension(dimension); return builder_.Finish(); } -flatbuffers::Offset CreateLSHProjectionOptions(flatbuffers::FlatBufferBuilder &_fbb, const LSHProjectionOptionsT *_o, const flatbuffers::rehasher_function_t *_rehasher = nullptr); +::flatbuffers::Offset CreateStablehloConcatenateOptions(::flatbuffers::FlatBufferBuilder &_fbb, const StablehloConcatenateOptionsT *_o, const ::flatbuffers::rehasher_function_t *_rehasher = nullptr); -struct SVDFOptionsT : public flatbuffers::NativeTable { - typedef SVDFOptions TableType; - int32_t rank = 0; - tflite::ActivationFunctionType fused_activation_function = tflite::ActivationFunctionType_NONE; - bool asymmetric_quantize_inputs = false; +struct StablehloBroadcastInDimOptionsT : public ::flatbuffers::NativeTable { + typedef StablehloBroadcastInDimOptions TableType; + std::vector broadcast_dimensions{}; }; -struct SVDFOptions FLATBUFFERS_FINAL_CLASS : private flatbuffers::Table { - typedef SVDFOptionsT NativeTableType; - typedef SVDFOptionsBuilder Builder; +struct StablehloBroadcastInDimOptions FLATBUFFERS_FINAL_CLASS : private ::flatbuffers::Table { + typedef StablehloBroadcastInDimOptionsT NativeTableType; + typedef StablehloBroadcastInDimOptionsBuilder Builder; enum FlatBuffersVTableOffset FLATBUFFERS_VTABLE_UNDERLYING_TYPE { - VT_RANK = 4, - VT_FUSED_ACTIVATION_FUNCTION = 6, - VT_ASYMMETRIC_QUANTIZE_INPUTS = 8 + VT_BROADCAST_DIMENSIONS = 4 }; - int32_t rank() const { - return GetField(VT_RANK, 0); - } - tflite::ActivationFunctionType fused_activation_function() const { - return static_cast(GetField(VT_FUSED_ACTIVATION_FUNCTION, 0)); + const ::flatbuffers::Vector *broadcast_dimensions() const { + return GetPointer *>(VT_BROADCAST_DIMENSIONS); } - bool asymmetric_quantize_inputs() const { - return GetField(VT_ASYMMETRIC_QUANTIZE_INPUTS, 0) != 0; - } - bool Verify(flatbuffers::Verifier &verifier) const { + bool Verify(::flatbuffers::Verifier &verifier) const { return VerifyTableStart(verifier) && - VerifyField(verifier, VT_RANK, 4) && - VerifyField(verifier, VT_FUSED_ACTIVATION_FUNCTION, 1) && - VerifyField(verifier, VT_ASYMMETRIC_QUANTIZE_INPUTS, 1) && + VerifyOffset(verifier, VT_BROADCAST_DIMENSIONS) && + verifier.VerifyVector(broadcast_dimensions()) && verifier.EndTable(); } - SVDFOptionsT *UnPack(const flatbuffers::resolver_function_t *_resolver = nullptr) const; - void UnPackTo(SVDFOptionsT *_o, const flatbuffers::resolver_function_t *_resolver = nullptr) const; - static flatbuffers::Offset Pack(flatbuffers::FlatBufferBuilder &_fbb, const SVDFOptionsT* _o, const flatbuffers::rehasher_function_t *_rehasher = nullptr); + StablehloBroadcastInDimOptionsT *UnPack(const ::flatbuffers::resolver_function_t *_resolver = nullptr) const; + void UnPackTo(StablehloBroadcastInDimOptionsT *_o, const ::flatbuffers::resolver_function_t *_resolver = nullptr) const; + static ::flatbuffers::Offset Pack(::flatbuffers::FlatBufferBuilder &_fbb, const StablehloBroadcastInDimOptionsT* _o, const ::flatbuffers::rehasher_function_t *_rehasher = nullptr); }; -struct SVDFOptionsBuilder { - typedef SVDFOptions Table; - flatbuffers::FlatBufferBuilder &fbb_; - flatbuffers::uoffset_t start_; - void add_rank(int32_t rank) { - fbb_.AddElement(SVDFOptions::VT_RANK, rank, 0); - } - void add_fused_activation_function(tflite::ActivationFunctionType fused_activation_function) { - fbb_.AddElement(SVDFOptions::VT_FUSED_ACTIVATION_FUNCTION, static_cast(fused_activation_function), 0); - } - void add_asymmetric_quantize_inputs(bool asymmetric_quantize_inputs) { - fbb_.AddElement(SVDFOptions::VT_ASYMMETRIC_QUANTIZE_INPUTS, static_cast(asymmetric_quantize_inputs), 0); +struct StablehloBroadcastInDimOptionsBuilder { + typedef StablehloBroadcastInDimOptions Table; + ::flatbuffers::FlatBufferBuilder &fbb_; + ::flatbuffers::uoffset_t start_; + void add_broadcast_dimensions(::flatbuffers::Offset<::flatbuffers::Vector> broadcast_dimensions) { + fbb_.AddOffset(StablehloBroadcastInDimOptions::VT_BROADCAST_DIMENSIONS, broadcast_dimensions); } - explicit SVDFOptionsBuilder(flatbuffers::FlatBufferBuilder &_fbb) + explicit StablehloBroadcastInDimOptionsBuilder(::flatbuffers::FlatBufferBuilder &_fbb) : fbb_(_fbb) { start_ = fbb_.StartTable(); } - flatbuffers::Offset Finish() { + ::flatbuffers::Offset Finish() { const auto end = fbb_.EndTable(start_); - auto o = flatbuffers::Offset(end); + auto o = ::flatbuffers::Offset(end); return o; } }; -inline flatbuffers::Offset CreateSVDFOptions( - flatbuffers::FlatBufferBuilder &_fbb, - int32_t rank = 0, - tflite::ActivationFunctionType fused_activation_function = tflite::ActivationFunctionType_NONE, - bool asymmetric_quantize_inputs = false) { - SVDFOptionsBuilder builder_(_fbb); - builder_.add_rank(rank); - builder_.add_asymmetric_quantize_inputs(asymmetric_quantize_inputs); - builder_.add_fused_activation_function(fused_activation_function); +inline ::flatbuffers::Offset CreateStablehloBroadcastInDimOptions( + ::flatbuffers::FlatBufferBuilder &_fbb, + ::flatbuffers::Offset<::flatbuffers::Vector> broadcast_dimensions = 0) { + StablehloBroadcastInDimOptionsBuilder builder_(_fbb); + builder_.add_broadcast_dimensions(broadcast_dimensions); return builder_.Finish(); } -flatbuffers::Offset CreateSVDFOptions(flatbuffers::FlatBufferBuilder &_fbb, const SVDFOptionsT *_o, const flatbuffers::rehasher_function_t *_rehasher = nullptr); +inline ::flatbuffers::Offset CreateStablehloBroadcastInDimOptionsDirect( + ::flatbuffers::FlatBufferBuilder &_fbb, + const std::vector *broadcast_dimensions = nullptr) { + auto broadcast_dimensions__ = broadcast_dimensions ? _fbb.CreateVector(*broadcast_dimensions) : 0; + return tflite::CreateStablehloBroadcastInDimOptions( + _fbb, + broadcast_dimensions__); +} -struct RNNOptionsT : public flatbuffers::NativeTable { - typedef RNNOptions TableType; - tflite::ActivationFunctionType fused_activation_function = tflite::ActivationFunctionType_NONE; - bool asymmetric_quantize_inputs = false; +::flatbuffers::Offset CreateStablehloBroadcastInDimOptions(::flatbuffers::FlatBufferBuilder &_fbb, const StablehloBroadcastInDimOptionsT *_o, const ::flatbuffers::rehasher_function_t *_rehasher = nullptr); + +struct StablehloCompareOptionsT : public ::flatbuffers::NativeTable { + typedef StablehloCompareOptions TableType; + tflite::StablehloComparisonDirection comparison_direction = tflite::StablehloComparisonDirection_STABLEHLO_COMPARISON_DIRECTION_EQ; + tflite::StablehloComparisonType compare_type = tflite::StablehloComparisonType_STABLEHLO_COMPARISON_TYPE_NOTYPE; }; -struct RNNOptions FLATBUFFERS_FINAL_CLASS : private flatbuffers::Table { - typedef RNNOptionsT NativeTableType; - typedef RNNOptionsBuilder Builder; +struct StablehloCompareOptions FLATBUFFERS_FINAL_CLASS : private ::flatbuffers::Table { + typedef StablehloCompareOptionsT NativeTableType; + typedef StablehloCompareOptionsBuilder Builder; enum FlatBuffersVTableOffset FLATBUFFERS_VTABLE_UNDERLYING_TYPE { - VT_FUSED_ACTIVATION_FUNCTION = 4, - VT_ASYMMETRIC_QUANTIZE_INPUTS = 6 + VT_COMPARISON_DIRECTION = 4, + VT_COMPARE_TYPE = 6 }; - tflite::ActivationFunctionType fused_activation_function() const { - return static_cast(GetField(VT_FUSED_ACTIVATION_FUNCTION, 0)); + tflite::StablehloComparisonDirection comparison_direction() const { + return static_cast(GetField(VT_COMPARISON_DIRECTION, 0)); } - bool asymmetric_quantize_inputs() const { - return GetField(VT_ASYMMETRIC_QUANTIZE_INPUTS, 0) != 0; + tflite::StablehloComparisonType compare_type() const { + return static_cast(GetField(VT_COMPARE_TYPE, 0)); } - bool Verify(flatbuffers::Verifier &verifier) const { + bool Verify(::flatbuffers::Verifier &verifier) const { return VerifyTableStart(verifier) && - VerifyField(verifier, VT_FUSED_ACTIVATION_FUNCTION, 1) && - VerifyField(verifier, VT_ASYMMETRIC_QUANTIZE_INPUTS, 1) && + VerifyField(verifier, VT_COMPARISON_DIRECTION, 4) && + VerifyField(verifier, VT_COMPARE_TYPE, 4) && verifier.EndTable(); } - RNNOptionsT *UnPack(const flatbuffers::resolver_function_t *_resolver = nullptr) const; - void UnPackTo(RNNOptionsT *_o, const flatbuffers::resolver_function_t *_resolver = nullptr) const; - static flatbuffers::Offset Pack(flatbuffers::FlatBufferBuilder &_fbb, const RNNOptionsT* _o, const flatbuffers::rehasher_function_t *_rehasher = nullptr); + StablehloCompareOptionsT *UnPack(const ::flatbuffers::resolver_function_t *_resolver = nullptr) const; + void UnPackTo(StablehloCompareOptionsT *_o, const ::flatbuffers::resolver_function_t *_resolver = nullptr) const; + static ::flatbuffers::Offset Pack(::flatbuffers::FlatBufferBuilder &_fbb, const StablehloCompareOptionsT* _o, const ::flatbuffers::rehasher_function_t *_rehasher = nullptr); }; -struct RNNOptionsBuilder { - typedef RNNOptions Table; - flatbuffers::FlatBufferBuilder &fbb_; - flatbuffers::uoffset_t start_; - void add_fused_activation_function(tflite::ActivationFunctionType fused_activation_function) { - fbb_.AddElement(RNNOptions::VT_FUSED_ACTIVATION_FUNCTION, static_cast(fused_activation_function), 0); +struct StablehloCompareOptionsBuilder { + typedef StablehloCompareOptions Table; + ::flatbuffers::FlatBufferBuilder &fbb_; + ::flatbuffers::uoffset_t start_; + void add_comparison_direction(tflite::StablehloComparisonDirection comparison_direction) { + fbb_.AddElement(StablehloCompareOptions::VT_COMPARISON_DIRECTION, static_cast(comparison_direction), 0); } - void add_asymmetric_quantize_inputs(bool asymmetric_quantize_inputs) { - fbb_.AddElement(RNNOptions::VT_ASYMMETRIC_QUANTIZE_INPUTS, static_cast(asymmetric_quantize_inputs), 0); + void add_compare_type(tflite::StablehloComparisonType compare_type) { + fbb_.AddElement(StablehloCompareOptions::VT_COMPARE_TYPE, static_cast(compare_type), 0); } - explicit RNNOptionsBuilder(flatbuffers::FlatBufferBuilder &_fbb) + explicit StablehloCompareOptionsBuilder(::flatbuffers::FlatBufferBuilder &_fbb) : fbb_(_fbb) { start_ = fbb_.StartTable(); } - flatbuffers::Offset Finish() { + ::flatbuffers::Offset Finish() { const auto end = fbb_.EndTable(start_); - auto o = flatbuffers::Offset(end); + auto o = ::flatbuffers::Offset(end); return o; } }; -inline flatbuffers::Offset CreateRNNOptions( - flatbuffers::FlatBufferBuilder &_fbb, - tflite::ActivationFunctionType fused_activation_function = tflite::ActivationFunctionType_NONE, - bool asymmetric_quantize_inputs = false) { - RNNOptionsBuilder builder_(_fbb); - builder_.add_asymmetric_quantize_inputs(asymmetric_quantize_inputs); - builder_.add_fused_activation_function(fused_activation_function); +inline ::flatbuffers::Offset CreateStablehloCompareOptions( + ::flatbuffers::FlatBufferBuilder &_fbb, + tflite::StablehloComparisonDirection comparison_direction = tflite::StablehloComparisonDirection_STABLEHLO_COMPARISON_DIRECTION_EQ, + tflite::StablehloComparisonType compare_type = tflite::StablehloComparisonType_STABLEHLO_COMPARISON_TYPE_NOTYPE) { + StablehloCompareOptionsBuilder builder_(_fbb); + builder_.add_compare_type(compare_type); + builder_.add_comparison_direction(comparison_direction); return builder_.Finish(); } -flatbuffers::Offset CreateRNNOptions(flatbuffers::FlatBufferBuilder &_fbb, const RNNOptionsT *_o, const flatbuffers::rehasher_function_t *_rehasher = nullptr); +::flatbuffers::Offset CreateStablehloCompareOptions(::flatbuffers::FlatBufferBuilder &_fbb, const StablehloCompareOptionsT *_o, const ::flatbuffers::rehasher_function_t *_rehasher = nullptr); -struct SequenceRNNOptionsT : public flatbuffers::NativeTable { - typedef SequenceRNNOptions TableType; - bool time_major = false; - tflite::ActivationFunctionType fused_activation_function = tflite::ActivationFunctionType_NONE; - bool asymmetric_quantize_inputs = false; +struct StablehloDynamicSliceOptionsT : public ::flatbuffers::NativeTable { + typedef StablehloDynamicSliceOptions TableType; + std::vector slice_sizes{}; }; -struct SequenceRNNOptions FLATBUFFERS_FINAL_CLASS : private flatbuffers::Table { - typedef SequenceRNNOptionsT NativeTableType; - typedef SequenceRNNOptionsBuilder Builder; +struct StablehloDynamicSliceOptions FLATBUFFERS_FINAL_CLASS : private ::flatbuffers::Table { + typedef StablehloDynamicSliceOptionsT NativeTableType; + typedef StablehloDynamicSliceOptionsBuilder Builder; enum FlatBuffersVTableOffset FLATBUFFERS_VTABLE_UNDERLYING_TYPE { - VT_TIME_MAJOR = 4, - VT_FUSED_ACTIVATION_FUNCTION = 6, - VT_ASYMMETRIC_QUANTIZE_INPUTS = 8 + VT_SLICE_SIZES = 4 }; - bool time_major() const { - return GetField(VT_TIME_MAJOR, 0) != 0; - } - tflite::ActivationFunctionType fused_activation_function() const { - return static_cast(GetField(VT_FUSED_ACTIVATION_FUNCTION, 0)); + const ::flatbuffers::Vector *slice_sizes() const { + return GetPointer *>(VT_SLICE_SIZES); } - bool asymmetric_quantize_inputs() const { - return GetField(VT_ASYMMETRIC_QUANTIZE_INPUTS, 0) != 0; - } - bool Verify(flatbuffers::Verifier &verifier) const { + bool Verify(::flatbuffers::Verifier &verifier) const { return VerifyTableStart(verifier) && - VerifyField(verifier, VT_TIME_MAJOR, 1) && - VerifyField(verifier, VT_FUSED_ACTIVATION_FUNCTION, 1) && - VerifyField(verifier, VT_ASYMMETRIC_QUANTIZE_INPUTS, 1) && + VerifyOffset(verifier, VT_SLICE_SIZES) && + verifier.VerifyVector(slice_sizes()) && verifier.EndTable(); } - SequenceRNNOptionsT *UnPack(const flatbuffers::resolver_function_t *_resolver = nullptr) const; - void UnPackTo(SequenceRNNOptionsT *_o, const flatbuffers::resolver_function_t *_resolver = nullptr) const; - static flatbuffers::Offset Pack(flatbuffers::FlatBufferBuilder &_fbb, const SequenceRNNOptionsT* _o, const flatbuffers::rehasher_function_t *_rehasher = nullptr); + StablehloDynamicSliceOptionsT *UnPack(const ::flatbuffers::resolver_function_t *_resolver = nullptr) const; + void UnPackTo(StablehloDynamicSliceOptionsT *_o, const ::flatbuffers::resolver_function_t *_resolver = nullptr) const; + static ::flatbuffers::Offset Pack(::flatbuffers::FlatBufferBuilder &_fbb, const StablehloDynamicSliceOptionsT* _o, const ::flatbuffers::rehasher_function_t *_rehasher = nullptr); }; -struct SequenceRNNOptionsBuilder { - typedef SequenceRNNOptions Table; - flatbuffers::FlatBufferBuilder &fbb_; - flatbuffers::uoffset_t start_; - void add_time_major(bool time_major) { - fbb_.AddElement(SequenceRNNOptions::VT_TIME_MAJOR, static_cast(time_major), 0); - } - void add_fused_activation_function(tflite::ActivationFunctionType fused_activation_function) { - fbb_.AddElement(SequenceRNNOptions::VT_FUSED_ACTIVATION_FUNCTION, static_cast(fused_activation_function), 0); - } - void add_asymmetric_quantize_inputs(bool asymmetric_quantize_inputs) { - fbb_.AddElement(SequenceRNNOptions::VT_ASYMMETRIC_QUANTIZE_INPUTS, static_cast(asymmetric_quantize_inputs), 0); +struct StablehloDynamicSliceOptionsBuilder { + typedef StablehloDynamicSliceOptions Table; + ::flatbuffers::FlatBufferBuilder &fbb_; + ::flatbuffers::uoffset_t start_; + void add_slice_sizes(::flatbuffers::Offset<::flatbuffers::Vector> slice_sizes) { + fbb_.AddOffset(StablehloDynamicSliceOptions::VT_SLICE_SIZES, slice_sizes); } - explicit SequenceRNNOptionsBuilder(flatbuffers::FlatBufferBuilder &_fbb) + explicit StablehloDynamicSliceOptionsBuilder(::flatbuffers::FlatBufferBuilder &_fbb) : fbb_(_fbb) { start_ = fbb_.StartTable(); } - flatbuffers::Offset Finish() { + ::flatbuffers::Offset Finish() { const auto end = fbb_.EndTable(start_); - auto o = flatbuffers::Offset(end); + auto o = ::flatbuffers::Offset(end); return o; } }; -inline flatbuffers::Offset CreateSequenceRNNOptions( - flatbuffers::FlatBufferBuilder &_fbb, - bool time_major = false, - tflite::ActivationFunctionType fused_activation_function = tflite::ActivationFunctionType_NONE, - bool asymmetric_quantize_inputs = false) { - SequenceRNNOptionsBuilder builder_(_fbb); - builder_.add_asymmetric_quantize_inputs(asymmetric_quantize_inputs); - builder_.add_fused_activation_function(fused_activation_function); - builder_.add_time_major(time_major); +inline ::flatbuffers::Offset CreateStablehloDynamicSliceOptions( + ::flatbuffers::FlatBufferBuilder &_fbb, + ::flatbuffers::Offset<::flatbuffers::Vector> slice_sizes = 0) { + StablehloDynamicSliceOptionsBuilder builder_(_fbb); + builder_.add_slice_sizes(slice_sizes); return builder_.Finish(); } -flatbuffers::Offset CreateSequenceRNNOptions(flatbuffers::FlatBufferBuilder &_fbb, const SequenceRNNOptionsT *_o, const flatbuffers::rehasher_function_t *_rehasher = nullptr); +inline ::flatbuffers::Offset CreateStablehloDynamicSliceOptionsDirect( + ::flatbuffers::FlatBufferBuilder &_fbb, + const std::vector *slice_sizes = nullptr) { + auto slice_sizes__ = slice_sizes ? _fbb.CreateVector(*slice_sizes) : 0; + return tflite::CreateStablehloDynamicSliceOptions( + _fbb, + slice_sizes__); +} -struct BidirectionalSequenceRNNOptionsT : public flatbuffers::NativeTable { - typedef BidirectionalSequenceRNNOptions TableType; - bool time_major = false; - tflite::ActivationFunctionType fused_activation_function = tflite::ActivationFunctionType_NONE; - bool merge_outputs = false; - bool asymmetric_quantize_inputs = false; +::flatbuffers::Offset CreateStablehloDynamicSliceOptions(::flatbuffers::FlatBufferBuilder &_fbb, const StablehloDynamicSliceOptionsT *_o, const ::flatbuffers::rehasher_function_t *_rehasher = nullptr); + +struct StablehloPadOptionsT : public ::flatbuffers::NativeTable { + typedef StablehloPadOptions TableType; + std::vector edge_padding_low{}; + std::vector edge_padding_high{}; + std::vector interior_padding{}; }; -struct BidirectionalSequenceRNNOptions FLATBUFFERS_FINAL_CLASS : private flatbuffers::Table { - typedef BidirectionalSequenceRNNOptionsT NativeTableType; - typedef BidirectionalSequenceRNNOptionsBuilder Builder; +struct StablehloPadOptions FLATBUFFERS_FINAL_CLASS : private ::flatbuffers::Table { + typedef StablehloPadOptionsT NativeTableType; + typedef StablehloPadOptionsBuilder Builder; enum FlatBuffersVTableOffset FLATBUFFERS_VTABLE_UNDERLYING_TYPE { - VT_TIME_MAJOR = 4, - VT_FUSED_ACTIVATION_FUNCTION = 6, - VT_MERGE_OUTPUTS = 8, - VT_ASYMMETRIC_QUANTIZE_INPUTS = 10 + VT_EDGE_PADDING_LOW = 4, + VT_EDGE_PADDING_HIGH = 6, + VT_INTERIOR_PADDING = 8 }; - bool time_major() const { - return GetField(VT_TIME_MAJOR, 0) != 0; - } - tflite::ActivationFunctionType fused_activation_function() const { - return static_cast(GetField(VT_FUSED_ACTIVATION_FUNCTION, 0)); + const ::flatbuffers::Vector *edge_padding_low() const { + return GetPointer *>(VT_EDGE_PADDING_LOW); } - bool merge_outputs() const { - return GetField(VT_MERGE_OUTPUTS, 0) != 0; + const ::flatbuffers::Vector *edge_padding_high() const { + return GetPointer *>(VT_EDGE_PADDING_HIGH); } - bool asymmetric_quantize_inputs() const { - return GetField(VT_ASYMMETRIC_QUANTIZE_INPUTS, 0) != 0; + const ::flatbuffers::Vector *interior_padding() const { + return GetPointer *>(VT_INTERIOR_PADDING); } - bool Verify(flatbuffers::Verifier &verifier) const { + bool Verify(::flatbuffers::Verifier &verifier) const { return VerifyTableStart(verifier) && - VerifyField(verifier, VT_TIME_MAJOR, 1) && - VerifyField(verifier, VT_FUSED_ACTIVATION_FUNCTION, 1) && - VerifyField(verifier, VT_MERGE_OUTPUTS, 1) && - VerifyField(verifier, VT_ASYMMETRIC_QUANTIZE_INPUTS, 1) && + VerifyOffset(verifier, VT_EDGE_PADDING_LOW) && + verifier.VerifyVector(edge_padding_low()) && + VerifyOffset(verifier, VT_EDGE_PADDING_HIGH) && + verifier.VerifyVector(edge_padding_high()) && + VerifyOffset(verifier, VT_INTERIOR_PADDING) && + verifier.VerifyVector(interior_padding()) && verifier.EndTable(); } - BidirectionalSequenceRNNOptionsT *UnPack(const flatbuffers::resolver_function_t *_resolver = nullptr) const; - void UnPackTo(BidirectionalSequenceRNNOptionsT *_o, const flatbuffers::resolver_function_t *_resolver = nullptr) const; - static flatbuffers::Offset Pack(flatbuffers::FlatBufferBuilder &_fbb, const BidirectionalSequenceRNNOptionsT* _o, const flatbuffers::rehasher_function_t *_rehasher = nullptr); + StablehloPadOptionsT *UnPack(const ::flatbuffers::resolver_function_t *_resolver = nullptr) const; + void UnPackTo(StablehloPadOptionsT *_o, const ::flatbuffers::resolver_function_t *_resolver = nullptr) const; + static ::flatbuffers::Offset Pack(::flatbuffers::FlatBufferBuilder &_fbb, const StablehloPadOptionsT* _o, const ::flatbuffers::rehasher_function_t *_rehasher = nullptr); }; -struct BidirectionalSequenceRNNOptionsBuilder { - typedef BidirectionalSequenceRNNOptions Table; - flatbuffers::FlatBufferBuilder &fbb_; - flatbuffers::uoffset_t start_; - void add_time_major(bool time_major) { - fbb_.AddElement(BidirectionalSequenceRNNOptions::VT_TIME_MAJOR, static_cast(time_major), 0); +struct StablehloPadOptionsBuilder { + typedef StablehloPadOptions Table; + ::flatbuffers::FlatBufferBuilder &fbb_; + ::flatbuffers::uoffset_t start_; + void add_edge_padding_low(::flatbuffers::Offset<::flatbuffers::Vector> edge_padding_low) { + fbb_.AddOffset(StablehloPadOptions::VT_EDGE_PADDING_LOW, edge_padding_low); } - void add_fused_activation_function(tflite::ActivationFunctionType fused_activation_function) { - fbb_.AddElement(BidirectionalSequenceRNNOptions::VT_FUSED_ACTIVATION_FUNCTION, static_cast(fused_activation_function), 0); + void add_edge_padding_high(::flatbuffers::Offset<::flatbuffers::Vector> edge_padding_high) { + fbb_.AddOffset(StablehloPadOptions::VT_EDGE_PADDING_HIGH, edge_padding_high); } - void add_merge_outputs(bool merge_outputs) { - fbb_.AddElement(BidirectionalSequenceRNNOptions::VT_MERGE_OUTPUTS, static_cast(merge_outputs), 0); + void add_interior_padding(::flatbuffers::Offset<::flatbuffers::Vector> interior_padding) { + fbb_.AddOffset(StablehloPadOptions::VT_INTERIOR_PADDING, interior_padding); } - void add_asymmetric_quantize_inputs(bool asymmetric_quantize_inputs) { - fbb_.AddElement(BidirectionalSequenceRNNOptions::VT_ASYMMETRIC_QUANTIZE_INPUTS, static_cast(asymmetric_quantize_inputs), 0); + explicit StablehloPadOptionsBuilder(::flatbuffers::FlatBufferBuilder &_fbb) + : fbb_(_fbb) { + start_ = fbb_.StartTable(); + } + ::flatbuffers::Offset Finish() { + const auto end = fbb_.EndTable(start_); + auto o = ::flatbuffers::Offset(end); + return o; + } +}; + +inline ::flatbuffers::Offset CreateStablehloPadOptions( + ::flatbuffers::FlatBufferBuilder &_fbb, + ::flatbuffers::Offset<::flatbuffers::Vector> edge_padding_low = 0, + ::flatbuffers::Offset<::flatbuffers::Vector> edge_padding_high = 0, + ::flatbuffers::Offset<::flatbuffers::Vector> interior_padding = 0) { + StablehloPadOptionsBuilder builder_(_fbb); + builder_.add_interior_padding(interior_padding); + builder_.add_edge_padding_high(edge_padding_high); + builder_.add_edge_padding_low(edge_padding_low); + return builder_.Finish(); +} + +inline ::flatbuffers::Offset CreateStablehloPadOptionsDirect( + ::flatbuffers::FlatBufferBuilder &_fbb, + const std::vector *edge_padding_low = nullptr, + const std::vector *edge_padding_high = nullptr, + const std::vector *interior_padding = nullptr) { + auto edge_padding_low__ = edge_padding_low ? _fbb.CreateVector(*edge_padding_low) : 0; + auto edge_padding_high__ = edge_padding_high ? _fbb.CreateVector(*edge_padding_high) : 0; + auto interior_padding__ = interior_padding ? _fbb.CreateVector(*interior_padding) : 0; + return tflite::CreateStablehloPadOptions( + _fbb, + edge_padding_low__, + edge_padding_high__, + interior_padding__); +} + +::flatbuffers::Offset CreateStablehloPadOptions(::flatbuffers::FlatBufferBuilder &_fbb, const StablehloPadOptionsT *_o, const ::flatbuffers::rehasher_function_t *_rehasher = nullptr); + +struct StablehloIotaOptionsT : public ::flatbuffers::NativeTable { + typedef StablehloIotaOptions TableType; + int64_t iota_dimension = 0; +}; + +struct StablehloIotaOptions FLATBUFFERS_FINAL_CLASS : private ::flatbuffers::Table { + typedef StablehloIotaOptionsT NativeTableType; + typedef StablehloIotaOptionsBuilder Builder; + enum FlatBuffersVTableOffset FLATBUFFERS_VTABLE_UNDERLYING_TYPE { + VT_IOTA_DIMENSION = 4 + }; + int64_t iota_dimension() const { + return GetField(VT_IOTA_DIMENSION, 0); + } + bool Verify(::flatbuffers::Verifier &verifier) const { + return VerifyTableStart(verifier) && + VerifyField(verifier, VT_IOTA_DIMENSION, 8) && + verifier.EndTable(); + } + StablehloIotaOptionsT *UnPack(const ::flatbuffers::resolver_function_t *_resolver = nullptr) const; + void UnPackTo(StablehloIotaOptionsT *_o, const ::flatbuffers::resolver_function_t *_resolver = nullptr) const; + static ::flatbuffers::Offset Pack(::flatbuffers::FlatBufferBuilder &_fbb, const StablehloIotaOptionsT* _o, const ::flatbuffers::rehasher_function_t *_rehasher = nullptr); +}; + +struct StablehloIotaOptionsBuilder { + typedef StablehloIotaOptions Table; + ::flatbuffers::FlatBufferBuilder &fbb_; + ::flatbuffers::uoffset_t start_; + void add_iota_dimension(int64_t iota_dimension) { + fbb_.AddElement(StablehloIotaOptions::VT_IOTA_DIMENSION, iota_dimension, 0); } - explicit BidirectionalSequenceRNNOptionsBuilder(flatbuffers::FlatBufferBuilder &_fbb) + explicit StablehloIotaOptionsBuilder(::flatbuffers::FlatBufferBuilder &_fbb) : fbb_(_fbb) { start_ = fbb_.StartTable(); } - flatbuffers::Offset Finish() { + ::flatbuffers::Offset Finish() { const auto end = fbb_.EndTable(start_); - auto o = flatbuffers::Offset(end); + auto o = ::flatbuffers::Offset(end); return o; } }; -inline flatbuffers::Offset CreateBidirectionalSequenceRNNOptions( - flatbuffers::FlatBufferBuilder &_fbb, - bool time_major = false, - tflite::ActivationFunctionType fused_activation_function = tflite::ActivationFunctionType_NONE, - bool merge_outputs = false, - bool asymmetric_quantize_inputs = false) { - BidirectionalSequenceRNNOptionsBuilder builder_(_fbb); - builder_.add_asymmetric_quantize_inputs(asymmetric_quantize_inputs); - builder_.add_merge_outputs(merge_outputs); - builder_.add_fused_activation_function(fused_activation_function); - builder_.add_time_major(time_major); +inline ::flatbuffers::Offset CreateStablehloIotaOptions( + ::flatbuffers::FlatBufferBuilder &_fbb, + int64_t iota_dimension = 0) { + StablehloIotaOptionsBuilder builder_(_fbb); + builder_.add_iota_dimension(iota_dimension); return builder_.Finish(); } -flatbuffers::Offset CreateBidirectionalSequenceRNNOptions(flatbuffers::FlatBufferBuilder &_fbb, const BidirectionalSequenceRNNOptionsT *_o, const flatbuffers::rehasher_function_t *_rehasher = nullptr); +::flatbuffers::Offset CreateStablehloIotaOptions(::flatbuffers::FlatBufferBuilder &_fbb, const StablehloIotaOptionsT *_o, const ::flatbuffers::rehasher_function_t *_rehasher = nullptr); -struct FullyConnectedOptionsT : public flatbuffers::NativeTable { - typedef FullyConnectedOptions TableType; - tflite::ActivationFunctionType fused_activation_function = tflite::ActivationFunctionType_NONE; - tflite::FullyConnectedOptionsWeightsFormat weights_format = tflite::FullyConnectedOptionsWeightsFormat_DEFAULT; - bool keep_num_dims = false; - bool asymmetric_quantize_inputs = false; +struct StablehloCustomCallOptionsT : public ::flatbuffers::NativeTable { + typedef StablehloCustomCallOptions TableType; + std::string call_target_name{}; + bool has_side_effect = false; + std::string backend_config{}; + int32_t api_version = 0; + std::vector called_computations{}; + std::vector custom_attributes{}; }; -struct FullyConnectedOptions FLATBUFFERS_FINAL_CLASS : private flatbuffers::Table { - typedef FullyConnectedOptionsT NativeTableType; - typedef FullyConnectedOptionsBuilder Builder; +struct StablehloCustomCallOptions FLATBUFFERS_FINAL_CLASS : private ::flatbuffers::Table { + typedef StablehloCustomCallOptionsT NativeTableType; + typedef StablehloCustomCallOptionsBuilder Builder; enum FlatBuffersVTableOffset FLATBUFFERS_VTABLE_UNDERLYING_TYPE { - VT_FUSED_ACTIVATION_FUNCTION = 4, - VT_WEIGHTS_FORMAT = 6, - VT_KEEP_NUM_DIMS = 8, - VT_ASYMMETRIC_QUANTIZE_INPUTS = 10 + VT_CALL_TARGET_NAME = 4, + VT_HAS_SIDE_EFFECT = 6, + VT_BACKEND_CONFIG = 8, + VT_API_VERSION = 10, + VT_CALLED_COMPUTATIONS = 12, + VT_CUSTOM_ATTRIBUTES = 14 }; - tflite::ActivationFunctionType fused_activation_function() const { - return static_cast(GetField(VT_FUSED_ACTIVATION_FUNCTION, 0)); + const ::flatbuffers::String *call_target_name() const { + return GetPointer(VT_CALL_TARGET_NAME); } - tflite::FullyConnectedOptionsWeightsFormat weights_format() const { - return static_cast(GetField(VT_WEIGHTS_FORMAT, 0)); + bool has_side_effect() const { + return GetField(VT_HAS_SIDE_EFFECT, 0) != 0; } - bool keep_num_dims() const { - return GetField(VT_KEEP_NUM_DIMS, 0) != 0; + const ::flatbuffers::String *backend_config() const { + return GetPointer(VT_BACKEND_CONFIG); } - bool asymmetric_quantize_inputs() const { - return GetField(VT_ASYMMETRIC_QUANTIZE_INPUTS, 0) != 0; + int32_t api_version() const { + return GetField(VT_API_VERSION, 0); + } + const ::flatbuffers::Vector *called_computations() const { + return GetPointer *>(VT_CALLED_COMPUTATIONS); } - bool Verify(flatbuffers::Verifier &verifier) const { + const ::flatbuffers::Vector *custom_attributes() const { + return GetPointer *>(VT_CUSTOM_ATTRIBUTES); + } + bool Verify(::flatbuffers::Verifier &verifier) const { return VerifyTableStart(verifier) && - VerifyField(verifier, VT_FUSED_ACTIVATION_FUNCTION, 1) && - VerifyField(verifier, VT_WEIGHTS_FORMAT, 1) && - VerifyField(verifier, VT_KEEP_NUM_DIMS, 1) && - VerifyField(verifier, VT_ASYMMETRIC_QUANTIZE_INPUTS, 1) && + VerifyOffset(verifier, VT_CALL_TARGET_NAME) && + verifier.VerifyString(call_target_name()) && + VerifyField(verifier, VT_HAS_SIDE_EFFECT, 1) && + VerifyOffset(verifier, VT_BACKEND_CONFIG) && + verifier.VerifyString(backend_config()) && + VerifyField(verifier, VT_API_VERSION, 4) && + VerifyOffset(verifier, VT_CALLED_COMPUTATIONS) && + verifier.VerifyVector(called_computations()) && + VerifyOffset(verifier, VT_CUSTOM_ATTRIBUTES) && + verifier.VerifyVector(custom_attributes()) && verifier.EndTable(); } - FullyConnectedOptionsT *UnPack(const flatbuffers::resolver_function_t *_resolver = nullptr) const; - void UnPackTo(FullyConnectedOptionsT *_o, const flatbuffers::resolver_function_t *_resolver = nullptr) const; - static flatbuffers::Offset Pack(flatbuffers::FlatBufferBuilder &_fbb, const FullyConnectedOptionsT* _o, const flatbuffers::rehasher_function_t *_rehasher = nullptr); + StablehloCustomCallOptionsT *UnPack(const ::flatbuffers::resolver_function_t *_resolver = nullptr) const; + void UnPackTo(StablehloCustomCallOptionsT *_o, const ::flatbuffers::resolver_function_t *_resolver = nullptr) const; + static ::flatbuffers::Offset Pack(::flatbuffers::FlatBufferBuilder &_fbb, const StablehloCustomCallOptionsT* _o, const ::flatbuffers::rehasher_function_t *_rehasher = nullptr); }; -struct FullyConnectedOptionsBuilder { - typedef FullyConnectedOptions Table; - flatbuffers::FlatBufferBuilder &fbb_; - flatbuffers::uoffset_t start_; - void add_fused_activation_function(tflite::ActivationFunctionType fused_activation_function) { - fbb_.AddElement(FullyConnectedOptions::VT_FUSED_ACTIVATION_FUNCTION, static_cast(fused_activation_function), 0); +struct StablehloCustomCallOptionsBuilder { + typedef StablehloCustomCallOptions Table; + ::flatbuffers::FlatBufferBuilder &fbb_; + ::flatbuffers::uoffset_t start_; + void add_call_target_name(::flatbuffers::Offset<::flatbuffers::String> call_target_name) { + fbb_.AddOffset(StablehloCustomCallOptions::VT_CALL_TARGET_NAME, call_target_name); } - void add_weights_format(tflite::FullyConnectedOptionsWeightsFormat weights_format) { - fbb_.AddElement(FullyConnectedOptions::VT_WEIGHTS_FORMAT, static_cast(weights_format), 0); + void add_has_side_effect(bool has_side_effect) { + fbb_.AddElement(StablehloCustomCallOptions::VT_HAS_SIDE_EFFECT, static_cast(has_side_effect), 0); } - void add_keep_num_dims(bool keep_num_dims) { - fbb_.AddElement(FullyConnectedOptions::VT_KEEP_NUM_DIMS, static_cast(keep_num_dims), 0); + void add_backend_config(::flatbuffers::Offset<::flatbuffers::String> backend_config) { + fbb_.AddOffset(StablehloCustomCallOptions::VT_BACKEND_CONFIG, backend_config); } - void add_asymmetric_quantize_inputs(bool asymmetric_quantize_inputs) { - fbb_.AddElement(FullyConnectedOptions::VT_ASYMMETRIC_QUANTIZE_INPUTS, static_cast(asymmetric_quantize_inputs), 0); + void add_api_version(int32_t api_version) { + fbb_.AddElement(StablehloCustomCallOptions::VT_API_VERSION, api_version, 0); + } + void add_called_computations(::flatbuffers::Offset<::flatbuffers::Vector> called_computations) { + fbb_.AddOffset(StablehloCustomCallOptions::VT_CALLED_COMPUTATIONS, called_computations); } - explicit FullyConnectedOptionsBuilder(flatbuffers::FlatBufferBuilder &_fbb) + void add_custom_attributes(::flatbuffers::Offset<::flatbuffers::Vector> custom_attributes) { + fbb_.AddOffset(StablehloCustomCallOptions::VT_CUSTOM_ATTRIBUTES, custom_attributes); + } + explicit StablehloCustomCallOptionsBuilder(::flatbuffers::FlatBufferBuilder &_fbb) : fbb_(_fbb) { start_ = fbb_.StartTable(); } - flatbuffers::Offset Finish() { + ::flatbuffers::Offset Finish() { const auto end = fbb_.EndTable(start_); - auto o = flatbuffers::Offset(end); + auto o = ::flatbuffers::Offset(end); return o; } }; -inline flatbuffers::Offset CreateFullyConnectedOptions( - flatbuffers::FlatBufferBuilder &_fbb, - tflite::ActivationFunctionType fused_activation_function = tflite::ActivationFunctionType_NONE, - tflite::FullyConnectedOptionsWeightsFormat weights_format = tflite::FullyConnectedOptionsWeightsFormat_DEFAULT, - bool keep_num_dims = false, - bool asymmetric_quantize_inputs = false) { - FullyConnectedOptionsBuilder builder_(_fbb); - builder_.add_asymmetric_quantize_inputs(asymmetric_quantize_inputs); - builder_.add_keep_num_dims(keep_num_dims); - builder_.add_weights_format(weights_format); - builder_.add_fused_activation_function(fused_activation_function); +inline ::flatbuffers::Offset CreateStablehloCustomCallOptions( + ::flatbuffers::FlatBufferBuilder &_fbb, + ::flatbuffers::Offset<::flatbuffers::String> call_target_name = 0, + bool has_side_effect = false, + ::flatbuffers::Offset<::flatbuffers::String> backend_config = 0, + int32_t api_version = 0, + ::flatbuffers::Offset<::flatbuffers::Vector> called_computations = 0, + ::flatbuffers::Offset<::flatbuffers::Vector> custom_attributes = 0) { + StablehloCustomCallOptionsBuilder builder_(_fbb); + builder_.add_custom_attributes(custom_attributes); + builder_.add_called_computations(called_computations); + builder_.add_api_version(api_version); + builder_.add_backend_config(backend_config); + builder_.add_call_target_name(call_target_name); + builder_.add_has_side_effect(has_side_effect); return builder_.Finish(); } -flatbuffers::Offset CreateFullyConnectedOptions(flatbuffers::FlatBufferBuilder &_fbb, const FullyConnectedOptionsT *_o, const flatbuffers::rehasher_function_t *_rehasher = nullptr); +inline ::flatbuffers::Offset CreateStablehloCustomCallOptionsDirect( + ::flatbuffers::FlatBufferBuilder &_fbb, + const char *call_target_name = nullptr, + bool has_side_effect = false, + const char *backend_config = nullptr, + int32_t api_version = 0, + const std::vector *called_computations = nullptr, + const std::vector *custom_attributes = nullptr) { + auto call_target_name__ = call_target_name ? _fbb.CreateString(call_target_name) : 0; + auto backend_config__ = backend_config ? _fbb.CreateString(backend_config) : 0; + auto called_computations__ = called_computations ? _fbb.CreateVector(*called_computations) : 0; + auto custom_attributes__ = custom_attributes ? _fbb.CreateVector(*custom_attributes) : 0; + return tflite::CreateStablehloCustomCallOptions( + _fbb, + call_target_name__, + has_side_effect, + backend_config__, + api_version, + called_computations__, + custom_attributes__); +} -struct SoftmaxOptionsT : public flatbuffers::NativeTable { - typedef SoftmaxOptions TableType; - float beta = 0.0f; +::flatbuffers::Offset CreateStablehloCustomCallOptions(::flatbuffers::FlatBufferBuilder &_fbb, const StablehloCustomCallOptionsT *_o, const ::flatbuffers::rehasher_function_t *_rehasher = nullptr); + +struct StablehloReduceOptionsT : public ::flatbuffers::NativeTable { + typedef StablehloReduceOptions TableType; + std::vector dimensions{}; + int32_t body_subgraph_index = 0; }; -struct SoftmaxOptions FLATBUFFERS_FINAL_CLASS : private flatbuffers::Table { - typedef SoftmaxOptionsT NativeTableType; - typedef SoftmaxOptionsBuilder Builder; +struct StablehloReduceOptions FLATBUFFERS_FINAL_CLASS : private ::flatbuffers::Table { + typedef StablehloReduceOptionsT NativeTableType; + typedef StablehloReduceOptionsBuilder Builder; enum FlatBuffersVTableOffset FLATBUFFERS_VTABLE_UNDERLYING_TYPE { - VT_BETA = 4 + VT_DIMENSIONS = 4, + VT_BODY_SUBGRAPH_INDEX = 6 }; - float beta() const { - return GetField(VT_BETA, 0.0f); + const ::flatbuffers::Vector *dimensions() const { + return GetPointer *>(VT_DIMENSIONS); } - bool Verify(flatbuffers::Verifier &verifier) const { + int32_t body_subgraph_index() const { + return GetField(VT_BODY_SUBGRAPH_INDEX, 0); + } + bool Verify(::flatbuffers::Verifier &verifier) const { return VerifyTableStart(verifier) && - VerifyField(verifier, VT_BETA, 4) && + VerifyOffset(verifier, VT_DIMENSIONS) && + verifier.VerifyVector(dimensions()) && + VerifyField(verifier, VT_BODY_SUBGRAPH_INDEX, 4) && verifier.EndTable(); } - SoftmaxOptionsT *UnPack(const flatbuffers::resolver_function_t *_resolver = nullptr) const; - void UnPackTo(SoftmaxOptionsT *_o, const flatbuffers::resolver_function_t *_resolver = nullptr) const; - static flatbuffers::Offset Pack(flatbuffers::FlatBufferBuilder &_fbb, const SoftmaxOptionsT* _o, const flatbuffers::rehasher_function_t *_rehasher = nullptr); + StablehloReduceOptionsT *UnPack(const ::flatbuffers::resolver_function_t *_resolver = nullptr) const; + void UnPackTo(StablehloReduceOptionsT *_o, const ::flatbuffers::resolver_function_t *_resolver = nullptr) const; + static ::flatbuffers::Offset Pack(::flatbuffers::FlatBufferBuilder &_fbb, const StablehloReduceOptionsT* _o, const ::flatbuffers::rehasher_function_t *_rehasher = nullptr); }; -struct SoftmaxOptionsBuilder { - typedef SoftmaxOptions Table; - flatbuffers::FlatBufferBuilder &fbb_; - flatbuffers::uoffset_t start_; - void add_beta(float beta) { - fbb_.AddElement(SoftmaxOptions::VT_BETA, beta, 0.0f); +struct StablehloReduceOptionsBuilder { + typedef StablehloReduceOptions Table; + ::flatbuffers::FlatBufferBuilder &fbb_; + ::flatbuffers::uoffset_t start_; + void add_dimensions(::flatbuffers::Offset<::flatbuffers::Vector> dimensions) { + fbb_.AddOffset(StablehloReduceOptions::VT_DIMENSIONS, dimensions); + } + void add_body_subgraph_index(int32_t body_subgraph_index) { + fbb_.AddElement(StablehloReduceOptions::VT_BODY_SUBGRAPH_INDEX, body_subgraph_index, 0); } - explicit SoftmaxOptionsBuilder(flatbuffers::FlatBufferBuilder &_fbb) + explicit StablehloReduceOptionsBuilder(::flatbuffers::FlatBufferBuilder &_fbb) : fbb_(_fbb) { start_ = fbb_.StartTable(); } - flatbuffers::Offset Finish() { + ::flatbuffers::Offset Finish() { const auto end = fbb_.EndTable(start_); - auto o = flatbuffers::Offset(end); + auto o = ::flatbuffers::Offset(end); return o; } }; -inline flatbuffers::Offset CreateSoftmaxOptions( - flatbuffers::FlatBufferBuilder &_fbb, - float beta = 0.0f) { - SoftmaxOptionsBuilder builder_(_fbb); - builder_.add_beta(beta); +inline ::flatbuffers::Offset CreateStablehloReduceOptions( + ::flatbuffers::FlatBufferBuilder &_fbb, + ::flatbuffers::Offset<::flatbuffers::Vector> dimensions = 0, + int32_t body_subgraph_index = 0) { + StablehloReduceOptionsBuilder builder_(_fbb); + builder_.add_body_subgraph_index(body_subgraph_index); + builder_.add_dimensions(dimensions); return builder_.Finish(); } -flatbuffers::Offset CreateSoftmaxOptions(flatbuffers::FlatBufferBuilder &_fbb, const SoftmaxOptionsT *_o, const flatbuffers::rehasher_function_t *_rehasher = nullptr); +inline ::flatbuffers::Offset CreateStablehloReduceOptionsDirect( + ::flatbuffers::FlatBufferBuilder &_fbb, + const std::vector *dimensions = nullptr, + int32_t body_subgraph_index = 0) { + auto dimensions__ = dimensions ? _fbb.CreateVector(*dimensions) : 0; + return tflite::CreateStablehloReduceOptions( + _fbb, + dimensions__, + body_subgraph_index); +} -struct ConcatenationOptionsT : public flatbuffers::NativeTable { - typedef ConcatenationOptions TableType; - int32_t axis = 0; - tflite::ActivationFunctionType fused_activation_function = tflite::ActivationFunctionType_NONE; +::flatbuffers::Offset CreateStablehloReduceOptions(::flatbuffers::FlatBufferBuilder &_fbb, const StablehloReduceOptionsT *_o, const ::flatbuffers::rehasher_function_t *_rehasher = nullptr); + +struct StablehloSliceOptionsT : public ::flatbuffers::NativeTable { + typedef StablehloSliceOptions TableType; + std::vector start_indices{}; + std::vector limit_indices{}; + std::vector strides{}; }; -struct ConcatenationOptions FLATBUFFERS_FINAL_CLASS : private flatbuffers::Table { - typedef ConcatenationOptionsT NativeTableType; - typedef ConcatenationOptionsBuilder Builder; +struct StablehloSliceOptions FLATBUFFERS_FINAL_CLASS : private ::flatbuffers::Table { + typedef StablehloSliceOptionsT NativeTableType; + typedef StablehloSliceOptionsBuilder Builder; enum FlatBuffersVTableOffset FLATBUFFERS_VTABLE_UNDERLYING_TYPE { - VT_AXIS = 4, - VT_FUSED_ACTIVATION_FUNCTION = 6 + VT_START_INDICES = 4, + VT_LIMIT_INDICES = 6, + VT_STRIDES = 8 }; - int32_t axis() const { - return GetField(VT_AXIS, 0); + const ::flatbuffers::Vector *start_indices() const { + return GetPointer *>(VT_START_INDICES); } - tflite::ActivationFunctionType fused_activation_function() const { - return static_cast(GetField(VT_FUSED_ACTIVATION_FUNCTION, 0)); + const ::flatbuffers::Vector *limit_indices() const { + return GetPointer *>(VT_LIMIT_INDICES); + } + const ::flatbuffers::Vector *strides() const { + return GetPointer *>(VT_STRIDES); } - bool Verify(flatbuffers::Verifier &verifier) const { + bool Verify(::flatbuffers::Verifier &verifier) const { return VerifyTableStart(verifier) && - VerifyField(verifier, VT_AXIS, 4) && - VerifyField(verifier, VT_FUSED_ACTIVATION_FUNCTION, 1) && + VerifyOffset(verifier, VT_START_INDICES) && + verifier.VerifyVector(start_indices()) && + VerifyOffset(verifier, VT_LIMIT_INDICES) && + verifier.VerifyVector(limit_indices()) && + VerifyOffset(verifier, VT_STRIDES) && + verifier.VerifyVector(strides()) && verifier.EndTable(); } - ConcatenationOptionsT *UnPack(const flatbuffers::resolver_function_t *_resolver = nullptr) const; - void UnPackTo(ConcatenationOptionsT *_o, const flatbuffers::resolver_function_t *_resolver = nullptr) const; - static flatbuffers::Offset Pack(flatbuffers::FlatBufferBuilder &_fbb, const ConcatenationOptionsT* _o, const flatbuffers::rehasher_function_t *_rehasher = nullptr); + StablehloSliceOptionsT *UnPack(const ::flatbuffers::resolver_function_t *_resolver = nullptr) const; + void UnPackTo(StablehloSliceOptionsT *_o, const ::flatbuffers::resolver_function_t *_resolver = nullptr) const; + static ::flatbuffers::Offset Pack(::flatbuffers::FlatBufferBuilder &_fbb, const StablehloSliceOptionsT* _o, const ::flatbuffers::rehasher_function_t *_rehasher = nullptr); }; -struct ConcatenationOptionsBuilder { - typedef ConcatenationOptions Table; - flatbuffers::FlatBufferBuilder &fbb_; - flatbuffers::uoffset_t start_; - void add_axis(int32_t axis) { - fbb_.AddElement(ConcatenationOptions::VT_AXIS, axis, 0); +struct StablehloSliceOptionsBuilder { + typedef StablehloSliceOptions Table; + ::flatbuffers::FlatBufferBuilder &fbb_; + ::flatbuffers::uoffset_t start_; + void add_start_indices(::flatbuffers::Offset<::flatbuffers::Vector> start_indices) { + fbb_.AddOffset(StablehloSliceOptions::VT_START_INDICES, start_indices); } - void add_fused_activation_function(tflite::ActivationFunctionType fused_activation_function) { - fbb_.AddElement(ConcatenationOptions::VT_FUSED_ACTIVATION_FUNCTION, static_cast(fused_activation_function), 0); + void add_limit_indices(::flatbuffers::Offset<::flatbuffers::Vector> limit_indices) { + fbb_.AddOffset(StablehloSliceOptions::VT_LIMIT_INDICES, limit_indices); } - explicit ConcatenationOptionsBuilder(flatbuffers::FlatBufferBuilder &_fbb) + void add_strides(::flatbuffers::Offset<::flatbuffers::Vector> strides) { + fbb_.AddOffset(StablehloSliceOptions::VT_STRIDES, strides); + } + explicit StablehloSliceOptionsBuilder(::flatbuffers::FlatBufferBuilder &_fbb) : fbb_(_fbb) { start_ = fbb_.StartTable(); } - flatbuffers::Offset Finish() { + ::flatbuffers::Offset Finish() { const auto end = fbb_.EndTable(start_); - auto o = flatbuffers::Offset(end); + auto o = ::flatbuffers::Offset(end); return o; } }; -inline flatbuffers::Offset CreateConcatenationOptions( - flatbuffers::FlatBufferBuilder &_fbb, - int32_t axis = 0, - tflite::ActivationFunctionType fused_activation_function = tflite::ActivationFunctionType_NONE) { - ConcatenationOptionsBuilder builder_(_fbb); - builder_.add_axis(axis); - builder_.add_fused_activation_function(fused_activation_function); +inline ::flatbuffers::Offset CreateStablehloSliceOptions( + ::flatbuffers::FlatBufferBuilder &_fbb, + ::flatbuffers::Offset<::flatbuffers::Vector> start_indices = 0, + ::flatbuffers::Offset<::flatbuffers::Vector> limit_indices = 0, + ::flatbuffers::Offset<::flatbuffers::Vector> strides = 0) { + StablehloSliceOptionsBuilder builder_(_fbb); + builder_.add_strides(strides); + builder_.add_limit_indices(limit_indices); + builder_.add_start_indices(start_indices); return builder_.Finish(); } -flatbuffers::Offset CreateConcatenationOptions(flatbuffers::FlatBufferBuilder &_fbb, const ConcatenationOptionsT *_o, const flatbuffers::rehasher_function_t *_rehasher = nullptr); - -struct AddOptionsT : public flatbuffers::NativeTable { - typedef AddOptions TableType; - tflite::ActivationFunctionType fused_activation_function = tflite::ActivationFunctionType_NONE; - bool pot_scale_int16 = true; -}; - -struct AddOptions FLATBUFFERS_FINAL_CLASS : private flatbuffers::Table { - typedef AddOptionsT NativeTableType; - typedef AddOptionsBuilder Builder; +inline ::flatbuffers::Offset CreateStablehloSliceOptionsDirect( + ::flatbuffers::FlatBufferBuilder &_fbb, + const std::vector *start_indices = nullptr, + const std::vector *limit_indices = nullptr, + const std::vector *strides = nullptr) { + auto start_indices__ = start_indices ? _fbb.CreateVector(*start_indices) : 0; + auto limit_indices__ = limit_indices ? _fbb.CreateVector(*limit_indices) : 0; + auto strides__ = strides ? _fbb.CreateVector(*strides) : 0; + return tflite::CreateStablehloSliceOptions( + _fbb, + start_indices__, + limit_indices__, + strides__); +} + +::flatbuffers::Offset CreateStablehloSliceOptions(::flatbuffers::FlatBufferBuilder &_fbb, const StablehloSliceOptionsT *_o, const ::flatbuffers::rehasher_function_t *_rehasher = nullptr); + +struct StablehloConvolutionOptionsT : public ::flatbuffers::NativeTable { + typedef StablehloConvolutionOptions TableType; + std::vector window_strides{}; + std::vector padding{}; + std::vector lhs_dilation{}; + std::vector rhs_dilation{}; + std::vector window_reversal{}; + int64_t input_batch_dimension = 0; + int64_t input_feature_dimension = 0; + std::vector input_spatial_dimensions{}; + int64_t kernel_input_feature_dimension = 0; + int64_t kernel_output_feature_dimension = 0; + std::vector kernel_spatial_dimensions{}; + int64_t output_batch_dimension = 0; + int64_t output_feature_dimension = 0; + std::vector output_spatial_dimensions{}; + int64_t feature_group_count = 0; + int64_t batch_group_count = 0; + std::vector precision_config{}; +}; + +struct StablehloConvolutionOptions FLATBUFFERS_FINAL_CLASS : private ::flatbuffers::Table { + typedef StablehloConvolutionOptionsT NativeTableType; + typedef StablehloConvolutionOptionsBuilder Builder; enum FlatBuffersVTableOffset FLATBUFFERS_VTABLE_UNDERLYING_TYPE { - VT_FUSED_ACTIVATION_FUNCTION = 4, - VT_POT_SCALE_INT16 = 6 + VT_WINDOW_STRIDES = 4, + VT_PADDING = 6, + VT_LHS_DILATION = 8, + VT_RHS_DILATION = 10, + VT_WINDOW_REVERSAL = 12, + VT_INPUT_BATCH_DIMENSION = 14, + VT_INPUT_FEATURE_DIMENSION = 16, + VT_INPUT_SPATIAL_DIMENSIONS = 18, + VT_KERNEL_INPUT_FEATURE_DIMENSION = 20, + VT_KERNEL_OUTPUT_FEATURE_DIMENSION = 22, + VT_KERNEL_SPATIAL_DIMENSIONS = 24, + VT_OUTPUT_BATCH_DIMENSION = 26, + VT_OUTPUT_FEATURE_DIMENSION = 28, + VT_OUTPUT_SPATIAL_DIMENSIONS = 30, + VT_FEATURE_GROUP_COUNT = 32, + VT_BATCH_GROUP_COUNT = 34, + VT_PRECISION_CONFIG = 36 }; - tflite::ActivationFunctionType fused_activation_function() const { - return static_cast(GetField(VT_FUSED_ACTIVATION_FUNCTION, 0)); + const ::flatbuffers::Vector *window_strides() const { + return GetPointer *>(VT_WINDOW_STRIDES); } - bool pot_scale_int16() const { - return GetField(VT_POT_SCALE_INT16, 1) != 0; + const ::flatbuffers::Vector *padding() const { + return GetPointer *>(VT_PADDING); + } + const ::flatbuffers::Vector *lhs_dilation() const { + return GetPointer *>(VT_LHS_DILATION); + } + const ::flatbuffers::Vector *rhs_dilation() const { + return GetPointer *>(VT_RHS_DILATION); + } + const ::flatbuffers::Vector *window_reversal() const { + return GetPointer *>(VT_WINDOW_REVERSAL); + } + int64_t input_batch_dimension() const { + return GetField(VT_INPUT_BATCH_DIMENSION, 0); + } + int64_t input_feature_dimension() const { + return GetField(VT_INPUT_FEATURE_DIMENSION, 0); + } + const ::flatbuffers::Vector *input_spatial_dimensions() const { + return GetPointer *>(VT_INPUT_SPATIAL_DIMENSIONS); + } + int64_t kernel_input_feature_dimension() const { + return GetField(VT_KERNEL_INPUT_FEATURE_DIMENSION, 0); + } + int64_t kernel_output_feature_dimension() const { + return GetField(VT_KERNEL_OUTPUT_FEATURE_DIMENSION, 0); + } + const ::flatbuffers::Vector *kernel_spatial_dimensions() const { + return GetPointer *>(VT_KERNEL_SPATIAL_DIMENSIONS); + } + int64_t output_batch_dimension() const { + return GetField(VT_OUTPUT_BATCH_DIMENSION, 0); + } + int64_t output_feature_dimension() const { + return GetField(VT_OUTPUT_FEATURE_DIMENSION, 0); + } + const ::flatbuffers::Vector *output_spatial_dimensions() const { + return GetPointer *>(VT_OUTPUT_SPATIAL_DIMENSIONS); + } + int64_t feature_group_count() const { + return GetField(VT_FEATURE_GROUP_COUNT, 0); + } + int64_t batch_group_count() const { + return GetField(VT_BATCH_GROUP_COUNT, 0); + } + const ::flatbuffers::Vector *precision_config() const { + return GetPointer *>(VT_PRECISION_CONFIG); } - bool Verify(flatbuffers::Verifier &verifier) const { + bool Verify(::flatbuffers::Verifier &verifier) const { return VerifyTableStart(verifier) && - VerifyField(verifier, VT_FUSED_ACTIVATION_FUNCTION, 1) && - VerifyField(verifier, VT_POT_SCALE_INT16, 1) && + VerifyOffset(verifier, VT_WINDOW_STRIDES) && + verifier.VerifyVector(window_strides()) && + VerifyOffset(verifier, VT_PADDING) && + verifier.VerifyVector(padding()) && + VerifyOffset(verifier, VT_LHS_DILATION) && + verifier.VerifyVector(lhs_dilation()) && + VerifyOffset(verifier, VT_RHS_DILATION) && + verifier.VerifyVector(rhs_dilation()) && + VerifyOffset(verifier, VT_WINDOW_REVERSAL) && + verifier.VerifyVector(window_reversal()) && + VerifyField(verifier, VT_INPUT_BATCH_DIMENSION, 8) && + VerifyField(verifier, VT_INPUT_FEATURE_DIMENSION, 8) && + VerifyOffset(verifier, VT_INPUT_SPATIAL_DIMENSIONS) && + verifier.VerifyVector(input_spatial_dimensions()) && + VerifyField(verifier, VT_KERNEL_INPUT_FEATURE_DIMENSION, 8) && + VerifyField(verifier, VT_KERNEL_OUTPUT_FEATURE_DIMENSION, 8) && + VerifyOffset(verifier, VT_KERNEL_SPATIAL_DIMENSIONS) && + verifier.VerifyVector(kernel_spatial_dimensions()) && + VerifyField(verifier, VT_OUTPUT_BATCH_DIMENSION, 8) && + VerifyField(verifier, VT_OUTPUT_FEATURE_DIMENSION, 8) && + VerifyOffset(verifier, VT_OUTPUT_SPATIAL_DIMENSIONS) && + verifier.VerifyVector(output_spatial_dimensions()) && + VerifyField(verifier, VT_FEATURE_GROUP_COUNT, 8) && + VerifyField(verifier, VT_BATCH_GROUP_COUNT, 8) && + VerifyOffset(verifier, VT_PRECISION_CONFIG) && + verifier.VerifyVector(precision_config()) && verifier.EndTable(); } - AddOptionsT *UnPack(const flatbuffers::resolver_function_t *_resolver = nullptr) const; - void UnPackTo(AddOptionsT *_o, const flatbuffers::resolver_function_t *_resolver = nullptr) const; - static flatbuffers::Offset Pack(flatbuffers::FlatBufferBuilder &_fbb, const AddOptionsT* _o, const flatbuffers::rehasher_function_t *_rehasher = nullptr); + StablehloConvolutionOptionsT *UnPack(const ::flatbuffers::resolver_function_t *_resolver = nullptr) const; + void UnPackTo(StablehloConvolutionOptionsT *_o, const ::flatbuffers::resolver_function_t *_resolver = nullptr) const; + static ::flatbuffers::Offset Pack(::flatbuffers::FlatBufferBuilder &_fbb, const StablehloConvolutionOptionsT* _o, const ::flatbuffers::rehasher_function_t *_rehasher = nullptr); }; -struct AddOptionsBuilder { - typedef AddOptions Table; - flatbuffers::FlatBufferBuilder &fbb_; - flatbuffers::uoffset_t start_; - void add_fused_activation_function(tflite::ActivationFunctionType fused_activation_function) { - fbb_.AddElement(AddOptions::VT_FUSED_ACTIVATION_FUNCTION, static_cast(fused_activation_function), 0); +struct StablehloConvolutionOptionsBuilder { + typedef StablehloConvolutionOptions Table; + ::flatbuffers::FlatBufferBuilder &fbb_; + ::flatbuffers::uoffset_t start_; + void add_window_strides(::flatbuffers::Offset<::flatbuffers::Vector> window_strides) { + fbb_.AddOffset(StablehloConvolutionOptions::VT_WINDOW_STRIDES, window_strides); } - void add_pot_scale_int16(bool pot_scale_int16) { - fbb_.AddElement(AddOptions::VT_POT_SCALE_INT16, static_cast(pot_scale_int16), 1); + void add_padding(::flatbuffers::Offset<::flatbuffers::Vector> padding) { + fbb_.AddOffset(StablehloConvolutionOptions::VT_PADDING, padding); + } + void add_lhs_dilation(::flatbuffers::Offset<::flatbuffers::Vector> lhs_dilation) { + fbb_.AddOffset(StablehloConvolutionOptions::VT_LHS_DILATION, lhs_dilation); + } + void add_rhs_dilation(::flatbuffers::Offset<::flatbuffers::Vector> rhs_dilation) { + fbb_.AddOffset(StablehloConvolutionOptions::VT_RHS_DILATION, rhs_dilation); + } + void add_window_reversal(::flatbuffers::Offset<::flatbuffers::Vector> window_reversal) { + fbb_.AddOffset(StablehloConvolutionOptions::VT_WINDOW_REVERSAL, window_reversal); + } + void add_input_batch_dimension(int64_t input_batch_dimension) { + fbb_.AddElement(StablehloConvolutionOptions::VT_INPUT_BATCH_DIMENSION, input_batch_dimension, 0); + } + void add_input_feature_dimension(int64_t input_feature_dimension) { + fbb_.AddElement(StablehloConvolutionOptions::VT_INPUT_FEATURE_DIMENSION, input_feature_dimension, 0); + } + void add_input_spatial_dimensions(::flatbuffers::Offset<::flatbuffers::Vector> input_spatial_dimensions) { + fbb_.AddOffset(StablehloConvolutionOptions::VT_INPUT_SPATIAL_DIMENSIONS, input_spatial_dimensions); + } + void add_kernel_input_feature_dimension(int64_t kernel_input_feature_dimension) { + fbb_.AddElement(StablehloConvolutionOptions::VT_KERNEL_INPUT_FEATURE_DIMENSION, kernel_input_feature_dimension, 0); + } + void add_kernel_output_feature_dimension(int64_t kernel_output_feature_dimension) { + fbb_.AddElement(StablehloConvolutionOptions::VT_KERNEL_OUTPUT_FEATURE_DIMENSION, kernel_output_feature_dimension, 0); } - explicit AddOptionsBuilder(flatbuffers::FlatBufferBuilder &_fbb) + void add_kernel_spatial_dimensions(::flatbuffers::Offset<::flatbuffers::Vector> kernel_spatial_dimensions) { + fbb_.AddOffset(StablehloConvolutionOptions::VT_KERNEL_SPATIAL_DIMENSIONS, kernel_spatial_dimensions); + } + void add_output_batch_dimension(int64_t output_batch_dimension) { + fbb_.AddElement(StablehloConvolutionOptions::VT_OUTPUT_BATCH_DIMENSION, output_batch_dimension, 0); + } + void add_output_feature_dimension(int64_t output_feature_dimension) { + fbb_.AddElement(StablehloConvolutionOptions::VT_OUTPUT_FEATURE_DIMENSION, output_feature_dimension, 0); + } + void add_output_spatial_dimensions(::flatbuffers::Offset<::flatbuffers::Vector> output_spatial_dimensions) { + fbb_.AddOffset(StablehloConvolutionOptions::VT_OUTPUT_SPATIAL_DIMENSIONS, output_spatial_dimensions); + } + void add_feature_group_count(int64_t feature_group_count) { + fbb_.AddElement(StablehloConvolutionOptions::VT_FEATURE_GROUP_COUNT, feature_group_count, 0); + } + void add_batch_group_count(int64_t batch_group_count) { + fbb_.AddElement(StablehloConvolutionOptions::VT_BATCH_GROUP_COUNT, batch_group_count, 0); + } + void add_precision_config(::flatbuffers::Offset<::flatbuffers::Vector> precision_config) { + fbb_.AddOffset(StablehloConvolutionOptions::VT_PRECISION_CONFIG, precision_config); + } + explicit StablehloConvolutionOptionsBuilder(::flatbuffers::FlatBufferBuilder &_fbb) : fbb_(_fbb) { start_ = fbb_.StartTable(); } - flatbuffers::Offset Finish() { + ::flatbuffers::Offset Finish() { const auto end = fbb_.EndTable(start_); - auto o = flatbuffers::Offset(end); + auto o = ::flatbuffers::Offset(end); return o; } }; -inline flatbuffers::Offset CreateAddOptions( - flatbuffers::FlatBufferBuilder &_fbb, - tflite::ActivationFunctionType fused_activation_function = tflite::ActivationFunctionType_NONE, - bool pot_scale_int16 = true) { - AddOptionsBuilder builder_(_fbb); - builder_.add_pot_scale_int16(pot_scale_int16); - builder_.add_fused_activation_function(fused_activation_function); +inline ::flatbuffers::Offset CreateStablehloConvolutionOptions( + ::flatbuffers::FlatBufferBuilder &_fbb, + ::flatbuffers::Offset<::flatbuffers::Vector> window_strides = 0, + ::flatbuffers::Offset<::flatbuffers::Vector> padding = 0, + ::flatbuffers::Offset<::flatbuffers::Vector> lhs_dilation = 0, + ::flatbuffers::Offset<::flatbuffers::Vector> rhs_dilation = 0, + ::flatbuffers::Offset<::flatbuffers::Vector> window_reversal = 0, + int64_t input_batch_dimension = 0, + int64_t input_feature_dimension = 0, + ::flatbuffers::Offset<::flatbuffers::Vector> input_spatial_dimensions = 0, + int64_t kernel_input_feature_dimension = 0, + int64_t kernel_output_feature_dimension = 0, + ::flatbuffers::Offset<::flatbuffers::Vector> kernel_spatial_dimensions = 0, + int64_t output_batch_dimension = 0, + int64_t output_feature_dimension = 0, + ::flatbuffers::Offset<::flatbuffers::Vector> output_spatial_dimensions = 0, + int64_t feature_group_count = 0, + int64_t batch_group_count = 0, + ::flatbuffers::Offset<::flatbuffers::Vector> precision_config = 0) { + StablehloConvolutionOptionsBuilder builder_(_fbb); + builder_.add_batch_group_count(batch_group_count); + builder_.add_feature_group_count(feature_group_count); + builder_.add_output_feature_dimension(output_feature_dimension); + builder_.add_output_batch_dimension(output_batch_dimension); + builder_.add_kernel_output_feature_dimension(kernel_output_feature_dimension); + builder_.add_kernel_input_feature_dimension(kernel_input_feature_dimension); + builder_.add_input_feature_dimension(input_feature_dimension); + builder_.add_input_batch_dimension(input_batch_dimension); + builder_.add_precision_config(precision_config); + builder_.add_output_spatial_dimensions(output_spatial_dimensions); + builder_.add_kernel_spatial_dimensions(kernel_spatial_dimensions); + builder_.add_input_spatial_dimensions(input_spatial_dimensions); + builder_.add_window_reversal(window_reversal); + builder_.add_rhs_dilation(rhs_dilation); + builder_.add_lhs_dilation(lhs_dilation); + builder_.add_padding(padding); + builder_.add_window_strides(window_strides); return builder_.Finish(); } -flatbuffers::Offset CreateAddOptions(flatbuffers::FlatBufferBuilder &_fbb, const AddOptionsT *_o, const flatbuffers::rehasher_function_t *_rehasher = nullptr); - -struct MulOptionsT : public flatbuffers::NativeTable { - typedef MulOptions TableType; - tflite::ActivationFunctionType fused_activation_function = tflite::ActivationFunctionType_NONE; -}; - -struct MulOptions FLATBUFFERS_FINAL_CLASS : private flatbuffers::Table { - typedef MulOptionsT NativeTableType; - typedef MulOptionsBuilder Builder; +inline ::flatbuffers::Offset CreateStablehloConvolutionOptionsDirect( + ::flatbuffers::FlatBufferBuilder &_fbb, + const std::vector *window_strides = nullptr, + const std::vector *padding = nullptr, + const std::vector *lhs_dilation = nullptr, + const std::vector *rhs_dilation = nullptr, + const std::vector *window_reversal = nullptr, + int64_t input_batch_dimension = 0, + int64_t input_feature_dimension = 0, + const std::vector *input_spatial_dimensions = nullptr, + int64_t kernel_input_feature_dimension = 0, + int64_t kernel_output_feature_dimension = 0, + const std::vector *kernel_spatial_dimensions = nullptr, + int64_t output_batch_dimension = 0, + int64_t output_feature_dimension = 0, + const std::vector *output_spatial_dimensions = nullptr, + int64_t feature_group_count = 0, + int64_t batch_group_count = 0, + const std::vector *precision_config = nullptr) { + auto window_strides__ = window_strides ? _fbb.CreateVector(*window_strides) : 0; + auto padding__ = padding ? _fbb.CreateVector(*padding) : 0; + auto lhs_dilation__ = lhs_dilation ? _fbb.CreateVector(*lhs_dilation) : 0; + auto rhs_dilation__ = rhs_dilation ? _fbb.CreateVector(*rhs_dilation) : 0; + auto window_reversal__ = window_reversal ? _fbb.CreateVector(*window_reversal) : 0; + auto input_spatial_dimensions__ = input_spatial_dimensions ? _fbb.CreateVector(*input_spatial_dimensions) : 0; + auto kernel_spatial_dimensions__ = kernel_spatial_dimensions ? _fbb.CreateVector(*kernel_spatial_dimensions) : 0; + auto output_spatial_dimensions__ = output_spatial_dimensions ? _fbb.CreateVector(*output_spatial_dimensions) : 0; + auto precision_config__ = precision_config ? _fbb.CreateVector(*precision_config) : 0; + return tflite::CreateStablehloConvolutionOptions( + _fbb, + window_strides__, + padding__, + lhs_dilation__, + rhs_dilation__, + window_reversal__, + input_batch_dimension, + input_feature_dimension, + input_spatial_dimensions__, + kernel_input_feature_dimension, + kernel_output_feature_dimension, + kernel_spatial_dimensions__, + output_batch_dimension, + output_feature_dimension, + output_spatial_dimensions__, + feature_group_count, + batch_group_count, + precision_config__); +} + +::flatbuffers::Offset CreateStablehloConvolutionOptions(::flatbuffers::FlatBufferBuilder &_fbb, const StablehloConvolutionOptionsT *_o, const ::flatbuffers::rehasher_function_t *_rehasher = nullptr); + +struct StablehloScatterOptionsT : public ::flatbuffers::NativeTable { + typedef StablehloScatterOptions TableType; + bool indices_are_sorted = false; + std::vector update_window_dims{}; + std::vector inserted_window_dims{}; + std::vector scatter_dims_to_operand_dims{}; + int64_t index_vector_dim = 0; + bool unique_indices = false; + int32_t update_computation_subgraph_index = 0; +}; + +struct StablehloScatterOptions FLATBUFFERS_FINAL_CLASS : private ::flatbuffers::Table { + typedef StablehloScatterOptionsT NativeTableType; + typedef StablehloScatterOptionsBuilder Builder; enum FlatBuffersVTableOffset FLATBUFFERS_VTABLE_UNDERLYING_TYPE { - VT_FUSED_ACTIVATION_FUNCTION = 4 + VT_INDICES_ARE_SORTED = 4, + VT_UPDATE_WINDOW_DIMS = 6, + VT_INSERTED_WINDOW_DIMS = 8, + VT_SCATTER_DIMS_TO_OPERAND_DIMS = 10, + VT_INDEX_VECTOR_DIM = 12, + VT_UNIQUE_INDICES = 14, + VT_UPDATE_COMPUTATION_SUBGRAPH_INDEX = 16 }; - tflite::ActivationFunctionType fused_activation_function() const { - return static_cast(GetField(VT_FUSED_ACTIVATION_FUNCTION, 0)); + bool indices_are_sorted() const { + return GetField(VT_INDICES_ARE_SORTED, 0) != 0; } - bool Verify(flatbuffers::Verifier &verifier) const { + const ::flatbuffers::Vector *update_window_dims() const { + return GetPointer *>(VT_UPDATE_WINDOW_DIMS); + } + const ::flatbuffers::Vector *inserted_window_dims() const { + return GetPointer *>(VT_INSERTED_WINDOW_DIMS); + } + const ::flatbuffers::Vector *scatter_dims_to_operand_dims() const { + return GetPointer *>(VT_SCATTER_DIMS_TO_OPERAND_DIMS); + } + int64_t index_vector_dim() const { + return GetField(VT_INDEX_VECTOR_DIM, 0); + } + bool unique_indices() const { + return GetField(VT_UNIQUE_INDICES, 0) != 0; + } + int32_t update_computation_subgraph_index() const { + return GetField(VT_UPDATE_COMPUTATION_SUBGRAPH_INDEX, 0); + } + bool Verify(::flatbuffers::Verifier &verifier) const { return VerifyTableStart(verifier) && - VerifyField(verifier, VT_FUSED_ACTIVATION_FUNCTION, 1) && + VerifyField(verifier, VT_INDICES_ARE_SORTED, 1) && + VerifyOffset(verifier, VT_UPDATE_WINDOW_DIMS) && + verifier.VerifyVector(update_window_dims()) && + VerifyOffset(verifier, VT_INSERTED_WINDOW_DIMS) && + verifier.VerifyVector(inserted_window_dims()) && + VerifyOffset(verifier, VT_SCATTER_DIMS_TO_OPERAND_DIMS) && + verifier.VerifyVector(scatter_dims_to_operand_dims()) && + VerifyField(verifier, VT_INDEX_VECTOR_DIM, 8) && + VerifyField(verifier, VT_UNIQUE_INDICES, 1) && + VerifyField(verifier, VT_UPDATE_COMPUTATION_SUBGRAPH_INDEX, 4) && verifier.EndTable(); } - MulOptionsT *UnPack(const flatbuffers::resolver_function_t *_resolver = nullptr) const; - void UnPackTo(MulOptionsT *_o, const flatbuffers::resolver_function_t *_resolver = nullptr) const; - static flatbuffers::Offset Pack(flatbuffers::FlatBufferBuilder &_fbb, const MulOptionsT* _o, const flatbuffers::rehasher_function_t *_rehasher = nullptr); + StablehloScatterOptionsT *UnPack(const ::flatbuffers::resolver_function_t *_resolver = nullptr) const; + void UnPackTo(StablehloScatterOptionsT *_o, const ::flatbuffers::resolver_function_t *_resolver = nullptr) const; + static ::flatbuffers::Offset Pack(::flatbuffers::FlatBufferBuilder &_fbb, const StablehloScatterOptionsT* _o, const ::flatbuffers::rehasher_function_t *_rehasher = nullptr); }; -struct MulOptionsBuilder { - typedef MulOptions Table; - flatbuffers::FlatBufferBuilder &fbb_; - flatbuffers::uoffset_t start_; - void add_fused_activation_function(tflite::ActivationFunctionType fused_activation_function) { - fbb_.AddElement(MulOptions::VT_FUSED_ACTIVATION_FUNCTION, static_cast(fused_activation_function), 0); +struct StablehloScatterOptionsBuilder { + typedef StablehloScatterOptions Table; + ::flatbuffers::FlatBufferBuilder &fbb_; + ::flatbuffers::uoffset_t start_; + void add_indices_are_sorted(bool indices_are_sorted) { + fbb_.AddElement(StablehloScatterOptions::VT_INDICES_ARE_SORTED, static_cast(indices_are_sorted), 0); + } + void add_update_window_dims(::flatbuffers::Offset<::flatbuffers::Vector> update_window_dims) { + fbb_.AddOffset(StablehloScatterOptions::VT_UPDATE_WINDOW_DIMS, update_window_dims); + } + void add_inserted_window_dims(::flatbuffers::Offset<::flatbuffers::Vector> inserted_window_dims) { + fbb_.AddOffset(StablehloScatterOptions::VT_INSERTED_WINDOW_DIMS, inserted_window_dims); + } + void add_scatter_dims_to_operand_dims(::flatbuffers::Offset<::flatbuffers::Vector> scatter_dims_to_operand_dims) { + fbb_.AddOffset(StablehloScatterOptions::VT_SCATTER_DIMS_TO_OPERAND_DIMS, scatter_dims_to_operand_dims); + } + void add_index_vector_dim(int64_t index_vector_dim) { + fbb_.AddElement(StablehloScatterOptions::VT_INDEX_VECTOR_DIM, index_vector_dim, 0); + } + void add_unique_indices(bool unique_indices) { + fbb_.AddElement(StablehloScatterOptions::VT_UNIQUE_INDICES, static_cast(unique_indices), 0); } - explicit MulOptionsBuilder(flatbuffers::FlatBufferBuilder &_fbb) + void add_update_computation_subgraph_index(int32_t update_computation_subgraph_index) { + fbb_.AddElement(StablehloScatterOptions::VT_UPDATE_COMPUTATION_SUBGRAPH_INDEX, update_computation_subgraph_index, 0); + } + explicit StablehloScatterOptionsBuilder(::flatbuffers::FlatBufferBuilder &_fbb) : fbb_(_fbb) { start_ = fbb_.StartTable(); } - flatbuffers::Offset Finish() { + ::flatbuffers::Offset Finish() { const auto end = fbb_.EndTable(start_); - auto o = flatbuffers::Offset(end); + auto o = ::flatbuffers::Offset(end); return o; } }; -inline flatbuffers::Offset CreateMulOptions( - flatbuffers::FlatBufferBuilder &_fbb, - tflite::ActivationFunctionType fused_activation_function = tflite::ActivationFunctionType_NONE) { - MulOptionsBuilder builder_(_fbb); - builder_.add_fused_activation_function(fused_activation_function); +inline ::flatbuffers::Offset CreateStablehloScatterOptions( + ::flatbuffers::FlatBufferBuilder &_fbb, + bool indices_are_sorted = false, + ::flatbuffers::Offset<::flatbuffers::Vector> update_window_dims = 0, + ::flatbuffers::Offset<::flatbuffers::Vector> inserted_window_dims = 0, + ::flatbuffers::Offset<::flatbuffers::Vector> scatter_dims_to_operand_dims = 0, + int64_t index_vector_dim = 0, + bool unique_indices = false, + int32_t update_computation_subgraph_index = 0) { + StablehloScatterOptionsBuilder builder_(_fbb); + builder_.add_index_vector_dim(index_vector_dim); + builder_.add_update_computation_subgraph_index(update_computation_subgraph_index); + builder_.add_scatter_dims_to_operand_dims(scatter_dims_to_operand_dims); + builder_.add_inserted_window_dims(inserted_window_dims); + builder_.add_update_window_dims(update_window_dims); + builder_.add_unique_indices(unique_indices); + builder_.add_indices_are_sorted(indices_are_sorted); return builder_.Finish(); } -flatbuffers::Offset CreateMulOptions(flatbuffers::FlatBufferBuilder &_fbb, const MulOptionsT *_o, const flatbuffers::rehasher_function_t *_rehasher = nullptr); +inline ::flatbuffers::Offset CreateStablehloScatterOptionsDirect( + ::flatbuffers::FlatBufferBuilder &_fbb, + bool indices_are_sorted = false, + const std::vector *update_window_dims = nullptr, + const std::vector *inserted_window_dims = nullptr, + const std::vector *scatter_dims_to_operand_dims = nullptr, + int64_t index_vector_dim = 0, + bool unique_indices = false, + int32_t update_computation_subgraph_index = 0) { + auto update_window_dims__ = update_window_dims ? _fbb.CreateVector(*update_window_dims) : 0; + auto inserted_window_dims__ = inserted_window_dims ? _fbb.CreateVector(*inserted_window_dims) : 0; + auto scatter_dims_to_operand_dims__ = scatter_dims_to_operand_dims ? _fbb.CreateVector(*scatter_dims_to_operand_dims) : 0; + return tflite::CreateStablehloScatterOptions( + _fbb, + indices_are_sorted, + update_window_dims__, + inserted_window_dims__, + scatter_dims_to_operand_dims__, + index_vector_dim, + unique_indices, + update_computation_subgraph_index); +} -struct L2NormOptionsT : public flatbuffers::NativeTable { - typedef L2NormOptions TableType; - tflite::ActivationFunctionType fused_activation_function = tflite::ActivationFunctionType_NONE; +::flatbuffers::Offset CreateStablehloScatterOptions(::flatbuffers::FlatBufferBuilder &_fbb, const StablehloScatterOptionsT *_o, const ::flatbuffers::rehasher_function_t *_rehasher = nullptr); + +struct StablehloRngBitGeneratorOptionsT : public ::flatbuffers::NativeTable { + typedef StablehloRngBitGeneratorOptions TableType; + tflite::RngAlgorithm algorithm = tflite::RngAlgorithm_DEFAULT; }; -struct L2NormOptions FLATBUFFERS_FINAL_CLASS : private flatbuffers::Table { - typedef L2NormOptionsT NativeTableType; - typedef L2NormOptionsBuilder Builder; +struct StablehloRngBitGeneratorOptions FLATBUFFERS_FINAL_CLASS : private ::flatbuffers::Table { + typedef StablehloRngBitGeneratorOptionsT NativeTableType; + typedef StablehloRngBitGeneratorOptionsBuilder Builder; enum FlatBuffersVTableOffset FLATBUFFERS_VTABLE_UNDERLYING_TYPE { - VT_FUSED_ACTIVATION_FUNCTION = 4 + VT_ALGORITHM = 4 }; - tflite::ActivationFunctionType fused_activation_function() const { - return static_cast(GetField(VT_FUSED_ACTIVATION_FUNCTION, 0)); + tflite::RngAlgorithm algorithm() const { + return static_cast(GetField(VT_ALGORITHM, 0)); } - bool Verify(flatbuffers::Verifier &verifier) const { + bool Verify(::flatbuffers::Verifier &verifier) const { return VerifyTableStart(verifier) && - VerifyField(verifier, VT_FUSED_ACTIVATION_FUNCTION, 1) && + VerifyField(verifier, VT_ALGORITHM, 1) && verifier.EndTable(); } - L2NormOptionsT *UnPack(const flatbuffers::resolver_function_t *_resolver = nullptr) const; - void UnPackTo(L2NormOptionsT *_o, const flatbuffers::resolver_function_t *_resolver = nullptr) const; - static flatbuffers::Offset Pack(flatbuffers::FlatBufferBuilder &_fbb, const L2NormOptionsT* _o, const flatbuffers::rehasher_function_t *_rehasher = nullptr); + StablehloRngBitGeneratorOptionsT *UnPack(const ::flatbuffers::resolver_function_t *_resolver = nullptr) const; + void UnPackTo(StablehloRngBitGeneratorOptionsT *_o, const ::flatbuffers::resolver_function_t *_resolver = nullptr) const; + static ::flatbuffers::Offset Pack(::flatbuffers::FlatBufferBuilder &_fbb, const StablehloRngBitGeneratorOptionsT* _o, const ::flatbuffers::rehasher_function_t *_rehasher = nullptr); }; -struct L2NormOptionsBuilder { - typedef L2NormOptions Table; - flatbuffers::FlatBufferBuilder &fbb_; - flatbuffers::uoffset_t start_; - void add_fused_activation_function(tflite::ActivationFunctionType fused_activation_function) { - fbb_.AddElement(L2NormOptions::VT_FUSED_ACTIVATION_FUNCTION, static_cast(fused_activation_function), 0); +struct StablehloRngBitGeneratorOptionsBuilder { + typedef StablehloRngBitGeneratorOptions Table; + ::flatbuffers::FlatBufferBuilder &fbb_; + ::flatbuffers::uoffset_t start_; + void add_algorithm(tflite::RngAlgorithm algorithm) { + fbb_.AddElement(StablehloRngBitGeneratorOptions::VT_ALGORITHM, static_cast(algorithm), 0); } - explicit L2NormOptionsBuilder(flatbuffers::FlatBufferBuilder &_fbb) + explicit StablehloRngBitGeneratorOptionsBuilder(::flatbuffers::FlatBufferBuilder &_fbb) : fbb_(_fbb) { start_ = fbb_.StartTable(); } - flatbuffers::Offset Finish() { + ::flatbuffers::Offset Finish() { const auto end = fbb_.EndTable(start_); - auto o = flatbuffers::Offset(end); + auto o = ::flatbuffers::Offset(end); return o; } }; -inline flatbuffers::Offset CreateL2NormOptions( - flatbuffers::FlatBufferBuilder &_fbb, - tflite::ActivationFunctionType fused_activation_function = tflite::ActivationFunctionType_NONE) { - L2NormOptionsBuilder builder_(_fbb); - builder_.add_fused_activation_function(fused_activation_function); +inline ::flatbuffers::Offset CreateStablehloRngBitGeneratorOptions( + ::flatbuffers::FlatBufferBuilder &_fbb, + tflite::RngAlgorithm algorithm = tflite::RngAlgorithm_DEFAULT) { + StablehloRngBitGeneratorOptionsBuilder builder_(_fbb); + builder_.add_algorithm(algorithm); return builder_.Finish(); } -flatbuffers::Offset CreateL2NormOptions(flatbuffers::FlatBufferBuilder &_fbb, const L2NormOptionsT *_o, const flatbuffers::rehasher_function_t *_rehasher = nullptr); +::flatbuffers::Offset CreateStablehloRngBitGeneratorOptions(::flatbuffers::FlatBufferBuilder &_fbb, const StablehloRngBitGeneratorOptionsT *_o, const ::flatbuffers::rehasher_function_t *_rehasher = nullptr); -struct LocalResponseNormalizationOptionsT : public flatbuffers::NativeTable { - typedef LocalResponseNormalizationOptions TableType; - int32_t radius = 0; - float bias = 0.0f; - float alpha = 0.0f; - float beta = 0.0f; +struct Conv2DOptionsT : public ::flatbuffers::NativeTable { + typedef Conv2DOptions TableType; + tflite::Padding padding = tflite::Padding_SAME; + int32_t stride_w = 0; + int32_t stride_h = 0; + tflite::ActivationFunctionType fused_activation_function = tflite::ActivationFunctionType_NONE; + int32_t dilation_w_factor = 1; + int32_t dilation_h_factor = 1; + tflite::TensorType quantized_bias_type = tflite::TensorType_FLOAT32; }; -struct LocalResponseNormalizationOptions FLATBUFFERS_FINAL_CLASS : private flatbuffers::Table { - typedef LocalResponseNormalizationOptionsT NativeTableType; - typedef LocalResponseNormalizationOptionsBuilder Builder; +struct Conv2DOptions FLATBUFFERS_FINAL_CLASS : private ::flatbuffers::Table { + typedef Conv2DOptionsT NativeTableType; + typedef Conv2DOptionsBuilder Builder; enum FlatBuffersVTableOffset FLATBUFFERS_VTABLE_UNDERLYING_TYPE { - VT_RADIUS = 4, - VT_BIAS = 6, - VT_ALPHA = 8, - VT_BETA = 10 + VT_PADDING = 4, + VT_STRIDE_W = 6, + VT_STRIDE_H = 8, + VT_FUSED_ACTIVATION_FUNCTION = 10, + VT_DILATION_W_FACTOR = 12, + VT_DILATION_H_FACTOR = 14, + VT_QUANTIZED_BIAS_TYPE = 16 }; - int32_t radius() const { - return GetField(VT_RADIUS, 0); + tflite::Padding padding() const { + return static_cast(GetField(VT_PADDING, 0)); } - float bias() const { - return GetField(VT_BIAS, 0.0f); + int32_t stride_w() const { + return GetField(VT_STRIDE_W, 0); } - float alpha() const { - return GetField(VT_ALPHA, 0.0f); + int32_t stride_h() const { + return GetField(VT_STRIDE_H, 0); } - float beta() const { - return GetField(VT_BETA, 0.0f); + tflite::ActivationFunctionType fused_activation_function() const { + return static_cast(GetField(VT_FUSED_ACTIVATION_FUNCTION, 0)); + } + int32_t dilation_w_factor() const { + return GetField(VT_DILATION_W_FACTOR, 1); + } + int32_t dilation_h_factor() const { + return GetField(VT_DILATION_H_FACTOR, 1); + } + tflite::TensorType quantized_bias_type() const { + return static_cast(GetField(VT_QUANTIZED_BIAS_TYPE, 0)); } - bool Verify(flatbuffers::Verifier &verifier) const { + bool Verify(::flatbuffers::Verifier &verifier) const { return VerifyTableStart(verifier) && - VerifyField(verifier, VT_RADIUS, 4) && - VerifyField(verifier, VT_BIAS, 4) && - VerifyField(verifier, VT_ALPHA, 4) && - VerifyField(verifier, VT_BETA, 4) && + VerifyField(verifier, VT_PADDING, 1) && + VerifyField(verifier, VT_STRIDE_W, 4) && + VerifyField(verifier, VT_STRIDE_H, 4) && + VerifyField(verifier, VT_FUSED_ACTIVATION_FUNCTION, 1) && + VerifyField(verifier, VT_DILATION_W_FACTOR, 4) && + VerifyField(verifier, VT_DILATION_H_FACTOR, 4) && + VerifyField(verifier, VT_QUANTIZED_BIAS_TYPE, 1) && verifier.EndTable(); } - LocalResponseNormalizationOptionsT *UnPack(const flatbuffers::resolver_function_t *_resolver = nullptr) const; - void UnPackTo(LocalResponseNormalizationOptionsT *_o, const flatbuffers::resolver_function_t *_resolver = nullptr) const; - static flatbuffers::Offset Pack(flatbuffers::FlatBufferBuilder &_fbb, const LocalResponseNormalizationOptionsT* _o, const flatbuffers::rehasher_function_t *_rehasher = nullptr); + Conv2DOptionsT *UnPack(const ::flatbuffers::resolver_function_t *_resolver = nullptr) const; + void UnPackTo(Conv2DOptionsT *_o, const ::flatbuffers::resolver_function_t *_resolver = nullptr) const; + static ::flatbuffers::Offset Pack(::flatbuffers::FlatBufferBuilder &_fbb, const Conv2DOptionsT* _o, const ::flatbuffers::rehasher_function_t *_rehasher = nullptr); }; -struct LocalResponseNormalizationOptionsBuilder { - typedef LocalResponseNormalizationOptions Table; - flatbuffers::FlatBufferBuilder &fbb_; - flatbuffers::uoffset_t start_; - void add_radius(int32_t radius) { - fbb_.AddElement(LocalResponseNormalizationOptions::VT_RADIUS, radius, 0); +struct Conv2DOptionsBuilder { + typedef Conv2DOptions Table; + ::flatbuffers::FlatBufferBuilder &fbb_; + ::flatbuffers::uoffset_t start_; + void add_padding(tflite::Padding padding) { + fbb_.AddElement(Conv2DOptions::VT_PADDING, static_cast(padding), 0); } - void add_bias(float bias) { - fbb_.AddElement(LocalResponseNormalizationOptions::VT_BIAS, bias, 0.0f); + void add_stride_w(int32_t stride_w) { + fbb_.AddElement(Conv2DOptions::VT_STRIDE_W, stride_w, 0); } - void add_alpha(float alpha) { - fbb_.AddElement(LocalResponseNormalizationOptions::VT_ALPHA, alpha, 0.0f); + void add_stride_h(int32_t stride_h) { + fbb_.AddElement(Conv2DOptions::VT_STRIDE_H, stride_h, 0); } - void add_beta(float beta) { - fbb_.AddElement(LocalResponseNormalizationOptions::VT_BETA, beta, 0.0f); + void add_fused_activation_function(tflite::ActivationFunctionType fused_activation_function) { + fbb_.AddElement(Conv2DOptions::VT_FUSED_ACTIVATION_FUNCTION, static_cast(fused_activation_function), 0); + } + void add_dilation_w_factor(int32_t dilation_w_factor) { + fbb_.AddElement(Conv2DOptions::VT_DILATION_W_FACTOR, dilation_w_factor, 1); + } + void add_dilation_h_factor(int32_t dilation_h_factor) { + fbb_.AddElement(Conv2DOptions::VT_DILATION_H_FACTOR, dilation_h_factor, 1); } - explicit LocalResponseNormalizationOptionsBuilder(flatbuffers::FlatBufferBuilder &_fbb) + void add_quantized_bias_type(tflite::TensorType quantized_bias_type) { + fbb_.AddElement(Conv2DOptions::VT_QUANTIZED_BIAS_TYPE, static_cast(quantized_bias_type), 0); + } + explicit Conv2DOptionsBuilder(::flatbuffers::FlatBufferBuilder &_fbb) : fbb_(_fbb) { start_ = fbb_.StartTable(); } - flatbuffers::Offset Finish() { + ::flatbuffers::Offset Finish() { const auto end = fbb_.EndTable(start_); - auto o = flatbuffers::Offset(end); + auto o = ::flatbuffers::Offset(end); return o; } }; -inline flatbuffers::Offset CreateLocalResponseNormalizationOptions( - flatbuffers::FlatBufferBuilder &_fbb, - int32_t radius = 0, - float bias = 0.0f, - float alpha = 0.0f, - float beta = 0.0f) { - LocalResponseNormalizationOptionsBuilder builder_(_fbb); - builder_.add_beta(beta); - builder_.add_alpha(alpha); - builder_.add_bias(bias); - builder_.add_radius(radius); +inline ::flatbuffers::Offset CreateConv2DOptions( + ::flatbuffers::FlatBufferBuilder &_fbb, + tflite::Padding padding = tflite::Padding_SAME, + int32_t stride_w = 0, + int32_t stride_h = 0, + tflite::ActivationFunctionType fused_activation_function = tflite::ActivationFunctionType_NONE, + int32_t dilation_w_factor = 1, + int32_t dilation_h_factor = 1, + tflite::TensorType quantized_bias_type = tflite::TensorType_FLOAT32) { + Conv2DOptionsBuilder builder_(_fbb); + builder_.add_dilation_h_factor(dilation_h_factor); + builder_.add_dilation_w_factor(dilation_w_factor); + builder_.add_stride_h(stride_h); + builder_.add_stride_w(stride_w); + builder_.add_quantized_bias_type(quantized_bias_type); + builder_.add_fused_activation_function(fused_activation_function); + builder_.add_padding(padding); return builder_.Finish(); } -flatbuffers::Offset CreateLocalResponseNormalizationOptions(flatbuffers::FlatBufferBuilder &_fbb, const LocalResponseNormalizationOptionsT *_o, const flatbuffers::rehasher_function_t *_rehasher = nullptr); +::flatbuffers::Offset CreateConv2DOptions(::flatbuffers::FlatBufferBuilder &_fbb, const Conv2DOptionsT *_o, const ::flatbuffers::rehasher_function_t *_rehasher = nullptr); -struct LSTMOptionsT : public flatbuffers::NativeTable { - typedef LSTMOptions TableType; +struct Conv3DOptionsT : public ::flatbuffers::NativeTable { + typedef Conv3DOptions TableType; + tflite::Padding padding = tflite::Padding_SAME; + int32_t stride_d = 0; + int32_t stride_w = 0; + int32_t stride_h = 0; tflite::ActivationFunctionType fused_activation_function = tflite::ActivationFunctionType_NONE; - float cell_clip = 0.0f; - float proj_clip = 0.0f; - tflite::LSTMKernelType kernel_type = tflite::LSTMKernelType_FULL; - bool asymmetric_quantize_inputs = false; + int32_t dilation_d_factor = 1; + int32_t dilation_w_factor = 1; + int32_t dilation_h_factor = 1; }; -struct LSTMOptions FLATBUFFERS_FINAL_CLASS : private flatbuffers::Table { - typedef LSTMOptionsT NativeTableType; - typedef LSTMOptionsBuilder Builder; +struct Conv3DOptions FLATBUFFERS_FINAL_CLASS : private ::flatbuffers::Table { + typedef Conv3DOptionsT NativeTableType; + typedef Conv3DOptionsBuilder Builder; enum FlatBuffersVTableOffset FLATBUFFERS_VTABLE_UNDERLYING_TYPE { - VT_FUSED_ACTIVATION_FUNCTION = 4, - VT_CELL_CLIP = 6, - VT_PROJ_CLIP = 8, - VT_KERNEL_TYPE = 10, - VT_ASYMMETRIC_QUANTIZE_INPUTS = 12 + VT_PADDING = 4, + VT_STRIDE_D = 6, + VT_STRIDE_W = 8, + VT_STRIDE_H = 10, + VT_FUSED_ACTIVATION_FUNCTION = 12, + VT_DILATION_D_FACTOR = 14, + VT_DILATION_W_FACTOR = 16, + VT_DILATION_H_FACTOR = 18 }; + tflite::Padding padding() const { + return static_cast(GetField(VT_PADDING, 0)); + } + int32_t stride_d() const { + return GetField(VT_STRIDE_D, 0); + } + int32_t stride_w() const { + return GetField(VT_STRIDE_W, 0); + } + int32_t stride_h() const { + return GetField(VT_STRIDE_H, 0); + } tflite::ActivationFunctionType fused_activation_function() const { return static_cast(GetField(VT_FUSED_ACTIVATION_FUNCTION, 0)); } - float cell_clip() const { - return GetField(VT_CELL_CLIP, 0.0f); - } - float proj_clip() const { - return GetField(VT_PROJ_CLIP, 0.0f); + int32_t dilation_d_factor() const { + return GetField(VT_DILATION_D_FACTOR, 1); } - tflite::LSTMKernelType kernel_type() const { - return static_cast(GetField(VT_KERNEL_TYPE, 0)); + int32_t dilation_w_factor() const { + return GetField(VT_DILATION_W_FACTOR, 1); } - bool asymmetric_quantize_inputs() const { - return GetField(VT_ASYMMETRIC_QUANTIZE_INPUTS, 0) != 0; + int32_t dilation_h_factor() const { + return GetField(VT_DILATION_H_FACTOR, 1); } - bool Verify(flatbuffers::Verifier &verifier) const { + bool Verify(::flatbuffers::Verifier &verifier) const { return VerifyTableStart(verifier) && + VerifyField(verifier, VT_PADDING, 1) && + VerifyField(verifier, VT_STRIDE_D, 4) && + VerifyField(verifier, VT_STRIDE_W, 4) && + VerifyField(verifier, VT_STRIDE_H, 4) && VerifyField(verifier, VT_FUSED_ACTIVATION_FUNCTION, 1) && - VerifyField(verifier, VT_CELL_CLIP, 4) && - VerifyField(verifier, VT_PROJ_CLIP, 4) && - VerifyField(verifier, VT_KERNEL_TYPE, 1) && - VerifyField(verifier, VT_ASYMMETRIC_QUANTIZE_INPUTS, 1) && + VerifyField(verifier, VT_DILATION_D_FACTOR, 4) && + VerifyField(verifier, VT_DILATION_W_FACTOR, 4) && + VerifyField(verifier, VT_DILATION_H_FACTOR, 4) && verifier.EndTable(); } - LSTMOptionsT *UnPack(const flatbuffers::resolver_function_t *_resolver = nullptr) const; - void UnPackTo(LSTMOptionsT *_o, const flatbuffers::resolver_function_t *_resolver = nullptr) const; - static flatbuffers::Offset Pack(flatbuffers::FlatBufferBuilder &_fbb, const LSTMOptionsT* _o, const flatbuffers::rehasher_function_t *_rehasher = nullptr); + Conv3DOptionsT *UnPack(const ::flatbuffers::resolver_function_t *_resolver = nullptr) const; + void UnPackTo(Conv3DOptionsT *_o, const ::flatbuffers::resolver_function_t *_resolver = nullptr) const; + static ::flatbuffers::Offset Pack(::flatbuffers::FlatBufferBuilder &_fbb, const Conv3DOptionsT* _o, const ::flatbuffers::rehasher_function_t *_rehasher = nullptr); }; -struct LSTMOptionsBuilder { - typedef LSTMOptions Table; - flatbuffers::FlatBufferBuilder &fbb_; - flatbuffers::uoffset_t start_; - void add_fused_activation_function(tflite::ActivationFunctionType fused_activation_function) { - fbb_.AddElement(LSTMOptions::VT_FUSED_ACTIVATION_FUNCTION, static_cast(fused_activation_function), 0); +struct Conv3DOptionsBuilder { + typedef Conv3DOptions Table; + ::flatbuffers::FlatBufferBuilder &fbb_; + ::flatbuffers::uoffset_t start_; + void add_padding(tflite::Padding padding) { + fbb_.AddElement(Conv3DOptions::VT_PADDING, static_cast(padding), 0); } - void add_cell_clip(float cell_clip) { - fbb_.AddElement(LSTMOptions::VT_CELL_CLIP, cell_clip, 0.0f); + void add_stride_d(int32_t stride_d) { + fbb_.AddElement(Conv3DOptions::VT_STRIDE_D, stride_d, 0); } - void add_proj_clip(float proj_clip) { - fbb_.AddElement(LSTMOptions::VT_PROJ_CLIP, proj_clip, 0.0f); + void add_stride_w(int32_t stride_w) { + fbb_.AddElement(Conv3DOptions::VT_STRIDE_W, stride_w, 0); } - void add_kernel_type(tflite::LSTMKernelType kernel_type) { - fbb_.AddElement(LSTMOptions::VT_KERNEL_TYPE, static_cast(kernel_type), 0); + void add_stride_h(int32_t stride_h) { + fbb_.AddElement(Conv3DOptions::VT_STRIDE_H, stride_h, 0); } - void add_asymmetric_quantize_inputs(bool asymmetric_quantize_inputs) { - fbb_.AddElement(LSTMOptions::VT_ASYMMETRIC_QUANTIZE_INPUTS, static_cast(asymmetric_quantize_inputs), 0); + void add_fused_activation_function(tflite::ActivationFunctionType fused_activation_function) { + fbb_.AddElement(Conv3DOptions::VT_FUSED_ACTIVATION_FUNCTION, static_cast(fused_activation_function), 0); + } + void add_dilation_d_factor(int32_t dilation_d_factor) { + fbb_.AddElement(Conv3DOptions::VT_DILATION_D_FACTOR, dilation_d_factor, 1); + } + void add_dilation_w_factor(int32_t dilation_w_factor) { + fbb_.AddElement(Conv3DOptions::VT_DILATION_W_FACTOR, dilation_w_factor, 1); + } + void add_dilation_h_factor(int32_t dilation_h_factor) { + fbb_.AddElement(Conv3DOptions::VT_DILATION_H_FACTOR, dilation_h_factor, 1); } - explicit LSTMOptionsBuilder(flatbuffers::FlatBufferBuilder &_fbb) + explicit Conv3DOptionsBuilder(::flatbuffers::FlatBufferBuilder &_fbb) : fbb_(_fbb) { start_ = fbb_.StartTable(); } - flatbuffers::Offset Finish() { + ::flatbuffers::Offset Finish() { const auto end = fbb_.EndTable(start_); - auto o = flatbuffers::Offset(end); + auto o = ::flatbuffers::Offset(end); return o; } }; -inline flatbuffers::Offset CreateLSTMOptions( - flatbuffers::FlatBufferBuilder &_fbb, +inline ::flatbuffers::Offset CreateConv3DOptions( + ::flatbuffers::FlatBufferBuilder &_fbb, + tflite::Padding padding = tflite::Padding_SAME, + int32_t stride_d = 0, + int32_t stride_w = 0, + int32_t stride_h = 0, tflite::ActivationFunctionType fused_activation_function = tflite::ActivationFunctionType_NONE, - float cell_clip = 0.0f, - float proj_clip = 0.0f, - tflite::LSTMKernelType kernel_type = tflite::LSTMKernelType_FULL, - bool asymmetric_quantize_inputs = false) { - LSTMOptionsBuilder builder_(_fbb); - builder_.add_proj_clip(proj_clip); - builder_.add_cell_clip(cell_clip); - builder_.add_asymmetric_quantize_inputs(asymmetric_quantize_inputs); - builder_.add_kernel_type(kernel_type); + int32_t dilation_d_factor = 1, + int32_t dilation_w_factor = 1, + int32_t dilation_h_factor = 1) { + Conv3DOptionsBuilder builder_(_fbb); + builder_.add_dilation_h_factor(dilation_h_factor); + builder_.add_dilation_w_factor(dilation_w_factor); + builder_.add_dilation_d_factor(dilation_d_factor); + builder_.add_stride_h(stride_h); + builder_.add_stride_w(stride_w); + builder_.add_stride_d(stride_d); builder_.add_fused_activation_function(fused_activation_function); + builder_.add_padding(padding); return builder_.Finish(); } -flatbuffers::Offset CreateLSTMOptions(flatbuffers::FlatBufferBuilder &_fbb, const LSTMOptionsT *_o, const flatbuffers::rehasher_function_t *_rehasher = nullptr); +::flatbuffers::Offset CreateConv3DOptions(::flatbuffers::FlatBufferBuilder &_fbb, const Conv3DOptionsT *_o, const ::flatbuffers::rehasher_function_t *_rehasher = nullptr); -struct UnidirectionalSequenceLSTMOptionsT : public flatbuffers::NativeTable { - typedef UnidirectionalSequenceLSTMOptions TableType; +struct Pool2DOptionsT : public ::flatbuffers::NativeTable { + typedef Pool2DOptions TableType; + tflite::Padding padding = tflite::Padding_SAME; + int32_t stride_w = 0; + int32_t stride_h = 0; + int32_t filter_width = 0; + int32_t filter_height = 0; tflite::ActivationFunctionType fused_activation_function = tflite::ActivationFunctionType_NONE; - float cell_clip = 0.0f; - float proj_clip = 0.0f; - bool time_major = false; - bool asymmetric_quantize_inputs = false; - bool diagonal_recurrent_tensors = false; }; -struct UnidirectionalSequenceLSTMOptions FLATBUFFERS_FINAL_CLASS : private flatbuffers::Table { - typedef UnidirectionalSequenceLSTMOptionsT NativeTableType; - typedef UnidirectionalSequenceLSTMOptionsBuilder Builder; +struct Pool2DOptions FLATBUFFERS_FINAL_CLASS : private ::flatbuffers::Table { + typedef Pool2DOptionsT NativeTableType; + typedef Pool2DOptionsBuilder Builder; enum FlatBuffersVTableOffset FLATBUFFERS_VTABLE_UNDERLYING_TYPE { - VT_FUSED_ACTIVATION_FUNCTION = 4, - VT_CELL_CLIP = 6, - VT_PROJ_CLIP = 8, - VT_TIME_MAJOR = 10, - VT_ASYMMETRIC_QUANTIZE_INPUTS = 12, - VT_DIAGONAL_RECURRENT_TENSORS = 14 - }; - tflite::ActivationFunctionType fused_activation_function() const { - return static_cast(GetField(VT_FUSED_ACTIVATION_FUNCTION, 0)); - } - float cell_clip() const { - return GetField(VT_CELL_CLIP, 0.0f); + VT_PADDING = 4, + VT_STRIDE_W = 6, + VT_STRIDE_H = 8, + VT_FILTER_WIDTH = 10, + VT_FILTER_HEIGHT = 12, + VT_FUSED_ACTIVATION_FUNCTION = 14 + }; + tflite::Padding padding() const { + return static_cast(GetField(VT_PADDING, 0)); } - float proj_clip() const { - return GetField(VT_PROJ_CLIP, 0.0f); + int32_t stride_w() const { + return GetField(VT_STRIDE_W, 0); } - bool time_major() const { - return GetField(VT_TIME_MAJOR, 0) != 0; + int32_t stride_h() const { + return GetField(VT_STRIDE_H, 0); } - bool asymmetric_quantize_inputs() const { - return GetField(VT_ASYMMETRIC_QUANTIZE_INPUTS, 0) != 0; + int32_t filter_width() const { + return GetField(VT_FILTER_WIDTH, 0); } - bool diagonal_recurrent_tensors() const { - return GetField(VT_DIAGONAL_RECURRENT_TENSORS, 0) != 0; + int32_t filter_height() const { + return GetField(VT_FILTER_HEIGHT, 0); + } + tflite::ActivationFunctionType fused_activation_function() const { + return static_cast(GetField(VT_FUSED_ACTIVATION_FUNCTION, 0)); } - bool Verify(flatbuffers::Verifier &verifier) const { + bool Verify(::flatbuffers::Verifier &verifier) const { return VerifyTableStart(verifier) && + VerifyField(verifier, VT_PADDING, 1) && + VerifyField(verifier, VT_STRIDE_W, 4) && + VerifyField(verifier, VT_STRIDE_H, 4) && + VerifyField(verifier, VT_FILTER_WIDTH, 4) && + VerifyField(verifier, VT_FILTER_HEIGHT, 4) && VerifyField(verifier, VT_FUSED_ACTIVATION_FUNCTION, 1) && - VerifyField(verifier, VT_CELL_CLIP, 4) && - VerifyField(verifier, VT_PROJ_CLIP, 4) && - VerifyField(verifier, VT_TIME_MAJOR, 1) && - VerifyField(verifier, VT_ASYMMETRIC_QUANTIZE_INPUTS, 1) && - VerifyField(verifier, VT_DIAGONAL_RECURRENT_TENSORS, 1) && verifier.EndTable(); } - UnidirectionalSequenceLSTMOptionsT *UnPack(const flatbuffers::resolver_function_t *_resolver = nullptr) const; - void UnPackTo(UnidirectionalSequenceLSTMOptionsT *_o, const flatbuffers::resolver_function_t *_resolver = nullptr) const; - static flatbuffers::Offset Pack(flatbuffers::FlatBufferBuilder &_fbb, const UnidirectionalSequenceLSTMOptionsT* _o, const flatbuffers::rehasher_function_t *_rehasher = nullptr); + Pool2DOptionsT *UnPack(const ::flatbuffers::resolver_function_t *_resolver = nullptr) const; + void UnPackTo(Pool2DOptionsT *_o, const ::flatbuffers::resolver_function_t *_resolver = nullptr) const; + static ::flatbuffers::Offset Pack(::flatbuffers::FlatBufferBuilder &_fbb, const Pool2DOptionsT* _o, const ::flatbuffers::rehasher_function_t *_rehasher = nullptr); }; -struct UnidirectionalSequenceLSTMOptionsBuilder { - typedef UnidirectionalSequenceLSTMOptions Table; - flatbuffers::FlatBufferBuilder &fbb_; - flatbuffers::uoffset_t start_; - void add_fused_activation_function(tflite::ActivationFunctionType fused_activation_function) { - fbb_.AddElement(UnidirectionalSequenceLSTMOptions::VT_FUSED_ACTIVATION_FUNCTION, static_cast(fused_activation_function), 0); +struct Pool2DOptionsBuilder { + typedef Pool2DOptions Table; + ::flatbuffers::FlatBufferBuilder &fbb_; + ::flatbuffers::uoffset_t start_; + void add_padding(tflite::Padding padding) { + fbb_.AddElement(Pool2DOptions::VT_PADDING, static_cast(padding), 0); } - void add_cell_clip(float cell_clip) { - fbb_.AddElement(UnidirectionalSequenceLSTMOptions::VT_CELL_CLIP, cell_clip, 0.0f); + void add_stride_w(int32_t stride_w) { + fbb_.AddElement(Pool2DOptions::VT_STRIDE_W, stride_w, 0); } - void add_proj_clip(float proj_clip) { - fbb_.AddElement(UnidirectionalSequenceLSTMOptions::VT_PROJ_CLIP, proj_clip, 0.0f); + void add_stride_h(int32_t stride_h) { + fbb_.AddElement(Pool2DOptions::VT_STRIDE_H, stride_h, 0); } - void add_time_major(bool time_major) { - fbb_.AddElement(UnidirectionalSequenceLSTMOptions::VT_TIME_MAJOR, static_cast(time_major), 0); + void add_filter_width(int32_t filter_width) { + fbb_.AddElement(Pool2DOptions::VT_FILTER_WIDTH, filter_width, 0); } - void add_asymmetric_quantize_inputs(bool asymmetric_quantize_inputs) { - fbb_.AddElement(UnidirectionalSequenceLSTMOptions::VT_ASYMMETRIC_QUANTIZE_INPUTS, static_cast(asymmetric_quantize_inputs), 0); + void add_filter_height(int32_t filter_height) { + fbb_.AddElement(Pool2DOptions::VT_FILTER_HEIGHT, filter_height, 0); } - void add_diagonal_recurrent_tensors(bool diagonal_recurrent_tensors) { - fbb_.AddElement(UnidirectionalSequenceLSTMOptions::VT_DIAGONAL_RECURRENT_TENSORS, static_cast(diagonal_recurrent_tensors), 0); + void add_fused_activation_function(tflite::ActivationFunctionType fused_activation_function) { + fbb_.AddElement(Pool2DOptions::VT_FUSED_ACTIVATION_FUNCTION, static_cast(fused_activation_function), 0); } - explicit UnidirectionalSequenceLSTMOptionsBuilder(flatbuffers::FlatBufferBuilder &_fbb) + explicit Pool2DOptionsBuilder(::flatbuffers::FlatBufferBuilder &_fbb) : fbb_(_fbb) { start_ = fbb_.StartTable(); } - flatbuffers::Offset Finish() { + ::flatbuffers::Offset Finish() { const auto end = fbb_.EndTable(start_); - auto o = flatbuffers::Offset(end); + auto o = ::flatbuffers::Offset(end); return o; } }; -inline flatbuffers::Offset CreateUnidirectionalSequenceLSTMOptions( - flatbuffers::FlatBufferBuilder &_fbb, - tflite::ActivationFunctionType fused_activation_function = tflite::ActivationFunctionType_NONE, - float cell_clip = 0.0f, - float proj_clip = 0.0f, - bool time_major = false, - bool asymmetric_quantize_inputs = false, - bool diagonal_recurrent_tensors = false) { - UnidirectionalSequenceLSTMOptionsBuilder builder_(_fbb); - builder_.add_proj_clip(proj_clip); - builder_.add_cell_clip(cell_clip); - builder_.add_diagonal_recurrent_tensors(diagonal_recurrent_tensors); - builder_.add_asymmetric_quantize_inputs(asymmetric_quantize_inputs); - builder_.add_time_major(time_major); +inline ::flatbuffers::Offset CreatePool2DOptions( + ::flatbuffers::FlatBufferBuilder &_fbb, + tflite::Padding padding = tflite::Padding_SAME, + int32_t stride_w = 0, + int32_t stride_h = 0, + int32_t filter_width = 0, + int32_t filter_height = 0, + tflite::ActivationFunctionType fused_activation_function = tflite::ActivationFunctionType_NONE) { + Pool2DOptionsBuilder builder_(_fbb); + builder_.add_filter_height(filter_height); + builder_.add_filter_width(filter_width); + builder_.add_stride_h(stride_h); + builder_.add_stride_w(stride_w); builder_.add_fused_activation_function(fused_activation_function); + builder_.add_padding(padding); return builder_.Finish(); } -flatbuffers::Offset CreateUnidirectionalSequenceLSTMOptions(flatbuffers::FlatBufferBuilder &_fbb, const UnidirectionalSequenceLSTMOptionsT *_o, const flatbuffers::rehasher_function_t *_rehasher = nullptr); +::flatbuffers::Offset CreatePool2DOptions(::flatbuffers::FlatBufferBuilder &_fbb, const Pool2DOptionsT *_o, const ::flatbuffers::rehasher_function_t *_rehasher = nullptr); -struct BidirectionalSequenceLSTMOptionsT : public flatbuffers::NativeTable { - typedef BidirectionalSequenceLSTMOptions TableType; +struct DepthwiseConv2DOptionsT : public ::flatbuffers::NativeTable { + typedef DepthwiseConv2DOptions TableType; + tflite::Padding padding = tflite::Padding_SAME; + int32_t stride_w = 0; + int32_t stride_h = 0; + int32_t depth_multiplier = 0; tflite::ActivationFunctionType fused_activation_function = tflite::ActivationFunctionType_NONE; - float cell_clip = 0.0f; - float proj_clip = 0.0f; - bool merge_outputs = false; - bool time_major = true; - bool asymmetric_quantize_inputs = false; + int32_t dilation_w_factor = 1; + int32_t dilation_h_factor = 1; }; -struct BidirectionalSequenceLSTMOptions FLATBUFFERS_FINAL_CLASS : private flatbuffers::Table { - typedef BidirectionalSequenceLSTMOptionsT NativeTableType; - typedef BidirectionalSequenceLSTMOptionsBuilder Builder; +struct DepthwiseConv2DOptions FLATBUFFERS_FINAL_CLASS : private ::flatbuffers::Table { + typedef DepthwiseConv2DOptionsT NativeTableType; + typedef DepthwiseConv2DOptionsBuilder Builder; enum FlatBuffersVTableOffset FLATBUFFERS_VTABLE_UNDERLYING_TYPE { - VT_FUSED_ACTIVATION_FUNCTION = 4, - VT_CELL_CLIP = 6, - VT_PROJ_CLIP = 8, - VT_MERGE_OUTPUTS = 10, - VT_TIME_MAJOR = 12, - VT_ASYMMETRIC_QUANTIZE_INPUTS = 14 + VT_PADDING = 4, + VT_STRIDE_W = 6, + VT_STRIDE_H = 8, + VT_DEPTH_MULTIPLIER = 10, + VT_FUSED_ACTIVATION_FUNCTION = 12, + VT_DILATION_W_FACTOR = 14, + VT_DILATION_H_FACTOR = 16 }; - tflite::ActivationFunctionType fused_activation_function() const { - return static_cast(GetField(VT_FUSED_ACTIVATION_FUNCTION, 0)); + tflite::Padding padding() const { + return static_cast(GetField(VT_PADDING, 0)); } - float cell_clip() const { - return GetField(VT_CELL_CLIP, 0.0f); + int32_t stride_w() const { + return GetField(VT_STRIDE_W, 0); } - float proj_clip() const { - return GetField(VT_PROJ_CLIP, 0.0f); + int32_t stride_h() const { + return GetField(VT_STRIDE_H, 0); } - bool merge_outputs() const { - return GetField(VT_MERGE_OUTPUTS, 0) != 0; + int32_t depth_multiplier() const { + return GetField(VT_DEPTH_MULTIPLIER, 0); } - bool time_major() const { - return GetField(VT_TIME_MAJOR, 1) != 0; + tflite::ActivationFunctionType fused_activation_function() const { + return static_cast(GetField(VT_FUSED_ACTIVATION_FUNCTION, 0)); } - bool asymmetric_quantize_inputs() const { - return GetField(VT_ASYMMETRIC_QUANTIZE_INPUTS, 0) != 0; + int32_t dilation_w_factor() const { + return GetField(VT_DILATION_W_FACTOR, 1); + } + int32_t dilation_h_factor() const { + return GetField(VT_DILATION_H_FACTOR, 1); } - bool Verify(flatbuffers::Verifier &verifier) const { + bool Verify(::flatbuffers::Verifier &verifier) const { return VerifyTableStart(verifier) && + VerifyField(verifier, VT_PADDING, 1) && + VerifyField(verifier, VT_STRIDE_W, 4) && + VerifyField(verifier, VT_STRIDE_H, 4) && + VerifyField(verifier, VT_DEPTH_MULTIPLIER, 4) && VerifyField(verifier, VT_FUSED_ACTIVATION_FUNCTION, 1) && - VerifyField(verifier, VT_CELL_CLIP, 4) && - VerifyField(verifier, VT_PROJ_CLIP, 4) && - VerifyField(verifier, VT_MERGE_OUTPUTS, 1) && - VerifyField(verifier, VT_TIME_MAJOR, 1) && - VerifyField(verifier, VT_ASYMMETRIC_QUANTIZE_INPUTS, 1) && + VerifyField(verifier, VT_DILATION_W_FACTOR, 4) && + VerifyField(verifier, VT_DILATION_H_FACTOR, 4) && verifier.EndTable(); } - BidirectionalSequenceLSTMOptionsT *UnPack(const flatbuffers::resolver_function_t *_resolver = nullptr) const; - void UnPackTo(BidirectionalSequenceLSTMOptionsT *_o, const flatbuffers::resolver_function_t *_resolver = nullptr) const; - static flatbuffers::Offset Pack(flatbuffers::FlatBufferBuilder &_fbb, const BidirectionalSequenceLSTMOptionsT* _o, const flatbuffers::rehasher_function_t *_rehasher = nullptr); + DepthwiseConv2DOptionsT *UnPack(const ::flatbuffers::resolver_function_t *_resolver = nullptr) const; + void UnPackTo(DepthwiseConv2DOptionsT *_o, const ::flatbuffers::resolver_function_t *_resolver = nullptr) const; + static ::flatbuffers::Offset Pack(::flatbuffers::FlatBufferBuilder &_fbb, const DepthwiseConv2DOptionsT* _o, const ::flatbuffers::rehasher_function_t *_rehasher = nullptr); }; -struct BidirectionalSequenceLSTMOptionsBuilder { - typedef BidirectionalSequenceLSTMOptions Table; - flatbuffers::FlatBufferBuilder &fbb_; - flatbuffers::uoffset_t start_; - void add_fused_activation_function(tflite::ActivationFunctionType fused_activation_function) { - fbb_.AddElement(BidirectionalSequenceLSTMOptions::VT_FUSED_ACTIVATION_FUNCTION, static_cast(fused_activation_function), 0); +struct DepthwiseConv2DOptionsBuilder { + typedef DepthwiseConv2DOptions Table; + ::flatbuffers::FlatBufferBuilder &fbb_; + ::flatbuffers::uoffset_t start_; + void add_padding(tflite::Padding padding) { + fbb_.AddElement(DepthwiseConv2DOptions::VT_PADDING, static_cast(padding), 0); } - void add_cell_clip(float cell_clip) { - fbb_.AddElement(BidirectionalSequenceLSTMOptions::VT_CELL_CLIP, cell_clip, 0.0f); + void add_stride_w(int32_t stride_w) { + fbb_.AddElement(DepthwiseConv2DOptions::VT_STRIDE_W, stride_w, 0); } - void add_proj_clip(float proj_clip) { - fbb_.AddElement(BidirectionalSequenceLSTMOptions::VT_PROJ_CLIP, proj_clip, 0.0f); + void add_stride_h(int32_t stride_h) { + fbb_.AddElement(DepthwiseConv2DOptions::VT_STRIDE_H, stride_h, 0); } - void add_merge_outputs(bool merge_outputs) { - fbb_.AddElement(BidirectionalSequenceLSTMOptions::VT_MERGE_OUTPUTS, static_cast(merge_outputs), 0); + void add_depth_multiplier(int32_t depth_multiplier) { + fbb_.AddElement(DepthwiseConv2DOptions::VT_DEPTH_MULTIPLIER, depth_multiplier, 0); } - void add_time_major(bool time_major) { - fbb_.AddElement(BidirectionalSequenceLSTMOptions::VT_TIME_MAJOR, static_cast(time_major), 1); + void add_fused_activation_function(tflite::ActivationFunctionType fused_activation_function) { + fbb_.AddElement(DepthwiseConv2DOptions::VT_FUSED_ACTIVATION_FUNCTION, static_cast(fused_activation_function), 0); } - void add_asymmetric_quantize_inputs(bool asymmetric_quantize_inputs) { - fbb_.AddElement(BidirectionalSequenceLSTMOptions::VT_ASYMMETRIC_QUANTIZE_INPUTS, static_cast(asymmetric_quantize_inputs), 0); + void add_dilation_w_factor(int32_t dilation_w_factor) { + fbb_.AddElement(DepthwiseConv2DOptions::VT_DILATION_W_FACTOR, dilation_w_factor, 1); + } + void add_dilation_h_factor(int32_t dilation_h_factor) { + fbb_.AddElement(DepthwiseConv2DOptions::VT_DILATION_H_FACTOR, dilation_h_factor, 1); } - explicit BidirectionalSequenceLSTMOptionsBuilder(flatbuffers::FlatBufferBuilder &_fbb) + explicit DepthwiseConv2DOptionsBuilder(::flatbuffers::FlatBufferBuilder &_fbb) : fbb_(_fbb) { start_ = fbb_.StartTable(); } - flatbuffers::Offset Finish() { + ::flatbuffers::Offset Finish() { const auto end = fbb_.EndTable(start_); - auto o = flatbuffers::Offset(end); + auto o = ::flatbuffers::Offset(end); return o; } }; -inline flatbuffers::Offset CreateBidirectionalSequenceLSTMOptions( - flatbuffers::FlatBufferBuilder &_fbb, +inline ::flatbuffers::Offset CreateDepthwiseConv2DOptions( + ::flatbuffers::FlatBufferBuilder &_fbb, + tflite::Padding padding = tflite::Padding_SAME, + int32_t stride_w = 0, + int32_t stride_h = 0, + int32_t depth_multiplier = 0, tflite::ActivationFunctionType fused_activation_function = tflite::ActivationFunctionType_NONE, - float cell_clip = 0.0f, - float proj_clip = 0.0f, - bool merge_outputs = false, - bool time_major = true, - bool asymmetric_quantize_inputs = false) { - BidirectionalSequenceLSTMOptionsBuilder builder_(_fbb); - builder_.add_proj_clip(proj_clip); - builder_.add_cell_clip(cell_clip); - builder_.add_asymmetric_quantize_inputs(asymmetric_quantize_inputs); - builder_.add_time_major(time_major); - builder_.add_merge_outputs(merge_outputs); + int32_t dilation_w_factor = 1, + int32_t dilation_h_factor = 1) { + DepthwiseConv2DOptionsBuilder builder_(_fbb); + builder_.add_dilation_h_factor(dilation_h_factor); + builder_.add_dilation_w_factor(dilation_w_factor); + builder_.add_depth_multiplier(depth_multiplier); + builder_.add_stride_h(stride_h); + builder_.add_stride_w(stride_w); builder_.add_fused_activation_function(fused_activation_function); + builder_.add_padding(padding); return builder_.Finish(); } -flatbuffers::Offset CreateBidirectionalSequenceLSTMOptions(flatbuffers::FlatBufferBuilder &_fbb, const BidirectionalSequenceLSTMOptionsT *_o, const flatbuffers::rehasher_function_t *_rehasher = nullptr); +::flatbuffers::Offset CreateDepthwiseConv2DOptions(::flatbuffers::FlatBufferBuilder &_fbb, const DepthwiseConv2DOptionsT *_o, const ::flatbuffers::rehasher_function_t *_rehasher = nullptr); -struct ResizeBilinearOptionsT : public flatbuffers::NativeTable { - typedef ResizeBilinearOptions TableType; - bool align_corners = false; - bool half_pixel_centers = false; +struct ConcatEmbeddingsOptionsT : public ::flatbuffers::NativeTable { + typedef ConcatEmbeddingsOptions TableType; + int32_t num_channels = 0; + std::vector num_columns_per_channel{}; + std::vector embedding_dim_per_channel{}; }; -struct ResizeBilinearOptions FLATBUFFERS_FINAL_CLASS : private flatbuffers::Table { - typedef ResizeBilinearOptionsT NativeTableType; - typedef ResizeBilinearOptionsBuilder Builder; +struct ConcatEmbeddingsOptions FLATBUFFERS_FINAL_CLASS : private ::flatbuffers::Table { + typedef ConcatEmbeddingsOptionsT NativeTableType; + typedef ConcatEmbeddingsOptionsBuilder Builder; enum FlatBuffersVTableOffset FLATBUFFERS_VTABLE_UNDERLYING_TYPE { - VT_ALIGN_CORNERS = 8, - VT_HALF_PIXEL_CENTERS = 10 + VT_NUM_CHANNELS = 4, + VT_NUM_COLUMNS_PER_CHANNEL = 6, + VT_EMBEDDING_DIM_PER_CHANNEL = 8 }; - bool align_corners() const { - return GetField(VT_ALIGN_CORNERS, 0) != 0; + int32_t num_channels() const { + return GetField(VT_NUM_CHANNELS, 0); } - bool half_pixel_centers() const { - return GetField(VT_HALF_PIXEL_CENTERS, 0) != 0; + const ::flatbuffers::Vector *num_columns_per_channel() const { + return GetPointer *>(VT_NUM_COLUMNS_PER_CHANNEL); } - bool Verify(flatbuffers::Verifier &verifier) const { + const ::flatbuffers::Vector *embedding_dim_per_channel() const { + return GetPointer *>(VT_EMBEDDING_DIM_PER_CHANNEL); + } + bool Verify(::flatbuffers::Verifier &verifier) const { return VerifyTableStart(verifier) && - VerifyField(verifier, VT_ALIGN_CORNERS, 1) && - VerifyField(verifier, VT_HALF_PIXEL_CENTERS, 1) && + VerifyField(verifier, VT_NUM_CHANNELS, 4) && + VerifyOffset(verifier, VT_NUM_COLUMNS_PER_CHANNEL) && + verifier.VerifyVector(num_columns_per_channel()) && + VerifyOffset(verifier, VT_EMBEDDING_DIM_PER_CHANNEL) && + verifier.VerifyVector(embedding_dim_per_channel()) && verifier.EndTable(); } - ResizeBilinearOptionsT *UnPack(const flatbuffers::resolver_function_t *_resolver = nullptr) const; - void UnPackTo(ResizeBilinearOptionsT *_o, const flatbuffers::resolver_function_t *_resolver = nullptr) const; - static flatbuffers::Offset Pack(flatbuffers::FlatBufferBuilder &_fbb, const ResizeBilinearOptionsT* _o, const flatbuffers::rehasher_function_t *_rehasher = nullptr); + ConcatEmbeddingsOptionsT *UnPack(const ::flatbuffers::resolver_function_t *_resolver = nullptr) const; + void UnPackTo(ConcatEmbeddingsOptionsT *_o, const ::flatbuffers::resolver_function_t *_resolver = nullptr) const; + static ::flatbuffers::Offset Pack(::flatbuffers::FlatBufferBuilder &_fbb, const ConcatEmbeddingsOptionsT* _o, const ::flatbuffers::rehasher_function_t *_rehasher = nullptr); }; -struct ResizeBilinearOptionsBuilder { - typedef ResizeBilinearOptions Table; - flatbuffers::FlatBufferBuilder &fbb_; - flatbuffers::uoffset_t start_; - void add_align_corners(bool align_corners) { - fbb_.AddElement(ResizeBilinearOptions::VT_ALIGN_CORNERS, static_cast(align_corners), 0); +struct ConcatEmbeddingsOptionsBuilder { + typedef ConcatEmbeddingsOptions Table; + ::flatbuffers::FlatBufferBuilder &fbb_; + ::flatbuffers::uoffset_t start_; + void add_num_channels(int32_t num_channels) { + fbb_.AddElement(ConcatEmbeddingsOptions::VT_NUM_CHANNELS, num_channels, 0); } - void add_half_pixel_centers(bool half_pixel_centers) { - fbb_.AddElement(ResizeBilinearOptions::VT_HALF_PIXEL_CENTERS, static_cast(half_pixel_centers), 0); + void add_num_columns_per_channel(::flatbuffers::Offset<::flatbuffers::Vector> num_columns_per_channel) { + fbb_.AddOffset(ConcatEmbeddingsOptions::VT_NUM_COLUMNS_PER_CHANNEL, num_columns_per_channel); } - explicit ResizeBilinearOptionsBuilder(flatbuffers::FlatBufferBuilder &_fbb) - : fbb_(_fbb) { + void add_embedding_dim_per_channel(::flatbuffers::Offset<::flatbuffers::Vector> embedding_dim_per_channel) { + fbb_.AddOffset(ConcatEmbeddingsOptions::VT_EMBEDDING_DIM_PER_CHANNEL, embedding_dim_per_channel); + } + explicit ConcatEmbeddingsOptionsBuilder(::flatbuffers::FlatBufferBuilder &_fbb) + : fbb_(_fbb) { start_ = fbb_.StartTable(); } - flatbuffers::Offset Finish() { + ::flatbuffers::Offset Finish() { const auto end = fbb_.EndTable(start_); - auto o = flatbuffers::Offset(end); + auto o = ::flatbuffers::Offset(end); return o; } }; -inline flatbuffers::Offset CreateResizeBilinearOptions( - flatbuffers::FlatBufferBuilder &_fbb, - bool align_corners = false, - bool half_pixel_centers = false) { - ResizeBilinearOptionsBuilder builder_(_fbb); - builder_.add_half_pixel_centers(half_pixel_centers); - builder_.add_align_corners(align_corners); +inline ::flatbuffers::Offset CreateConcatEmbeddingsOptions( + ::flatbuffers::FlatBufferBuilder &_fbb, + int32_t num_channels = 0, + ::flatbuffers::Offset<::flatbuffers::Vector> num_columns_per_channel = 0, + ::flatbuffers::Offset<::flatbuffers::Vector> embedding_dim_per_channel = 0) { + ConcatEmbeddingsOptionsBuilder builder_(_fbb); + builder_.add_embedding_dim_per_channel(embedding_dim_per_channel); + builder_.add_num_columns_per_channel(num_columns_per_channel); + builder_.add_num_channels(num_channels); return builder_.Finish(); } -flatbuffers::Offset CreateResizeBilinearOptions(flatbuffers::FlatBufferBuilder &_fbb, const ResizeBilinearOptionsT *_o, const flatbuffers::rehasher_function_t *_rehasher = nullptr); +inline ::flatbuffers::Offset CreateConcatEmbeddingsOptionsDirect( + ::flatbuffers::FlatBufferBuilder &_fbb, + int32_t num_channels = 0, + const std::vector *num_columns_per_channel = nullptr, + const std::vector *embedding_dim_per_channel = nullptr) { + auto num_columns_per_channel__ = num_columns_per_channel ? _fbb.CreateVector(*num_columns_per_channel) : 0; + auto embedding_dim_per_channel__ = embedding_dim_per_channel ? _fbb.CreateVector(*embedding_dim_per_channel) : 0; + return tflite::CreateConcatEmbeddingsOptions( + _fbb, + num_channels, + num_columns_per_channel__, + embedding_dim_per_channel__); +} + +::flatbuffers::Offset CreateConcatEmbeddingsOptions(::flatbuffers::FlatBufferBuilder &_fbb, const ConcatEmbeddingsOptionsT *_o, const ::flatbuffers::rehasher_function_t *_rehasher = nullptr); -struct ResizeNearestNeighborOptionsT : public flatbuffers::NativeTable { - typedef ResizeNearestNeighborOptions TableType; - bool align_corners = false; - bool half_pixel_centers = false; +struct LSHProjectionOptionsT : public ::flatbuffers::NativeTable { + typedef LSHProjectionOptions TableType; + tflite::LSHProjectionType type = tflite::LSHProjectionType_UNKNOWN; }; -struct ResizeNearestNeighborOptions FLATBUFFERS_FINAL_CLASS : private flatbuffers::Table { - typedef ResizeNearestNeighborOptionsT NativeTableType; - typedef ResizeNearestNeighborOptionsBuilder Builder; +struct LSHProjectionOptions FLATBUFFERS_FINAL_CLASS : private ::flatbuffers::Table { + typedef LSHProjectionOptionsT NativeTableType; + typedef LSHProjectionOptionsBuilder Builder; enum FlatBuffersVTableOffset FLATBUFFERS_VTABLE_UNDERLYING_TYPE { - VT_ALIGN_CORNERS = 4, - VT_HALF_PIXEL_CENTERS = 6 + VT_TYPE = 4 }; - bool align_corners() const { - return GetField(VT_ALIGN_CORNERS, 0) != 0; - } - bool half_pixel_centers() const { - return GetField(VT_HALF_PIXEL_CENTERS, 0) != 0; + tflite::LSHProjectionType type() const { + return static_cast(GetField(VT_TYPE, 0)); } - bool Verify(flatbuffers::Verifier &verifier) const { + bool Verify(::flatbuffers::Verifier &verifier) const { return VerifyTableStart(verifier) && - VerifyField(verifier, VT_ALIGN_CORNERS, 1) && - VerifyField(verifier, VT_HALF_PIXEL_CENTERS, 1) && + VerifyField(verifier, VT_TYPE, 1) && verifier.EndTable(); } - ResizeNearestNeighborOptionsT *UnPack(const flatbuffers::resolver_function_t *_resolver = nullptr) const; - void UnPackTo(ResizeNearestNeighborOptionsT *_o, const flatbuffers::resolver_function_t *_resolver = nullptr) const; - static flatbuffers::Offset Pack(flatbuffers::FlatBufferBuilder &_fbb, const ResizeNearestNeighborOptionsT* _o, const flatbuffers::rehasher_function_t *_rehasher = nullptr); + LSHProjectionOptionsT *UnPack(const ::flatbuffers::resolver_function_t *_resolver = nullptr) const; + void UnPackTo(LSHProjectionOptionsT *_o, const ::flatbuffers::resolver_function_t *_resolver = nullptr) const; + static ::flatbuffers::Offset Pack(::flatbuffers::FlatBufferBuilder &_fbb, const LSHProjectionOptionsT* _o, const ::flatbuffers::rehasher_function_t *_rehasher = nullptr); }; -struct ResizeNearestNeighborOptionsBuilder { - typedef ResizeNearestNeighborOptions Table; - flatbuffers::FlatBufferBuilder &fbb_; - flatbuffers::uoffset_t start_; - void add_align_corners(bool align_corners) { - fbb_.AddElement(ResizeNearestNeighborOptions::VT_ALIGN_CORNERS, static_cast(align_corners), 0); - } - void add_half_pixel_centers(bool half_pixel_centers) { - fbb_.AddElement(ResizeNearestNeighborOptions::VT_HALF_PIXEL_CENTERS, static_cast(half_pixel_centers), 0); +struct LSHProjectionOptionsBuilder { + typedef LSHProjectionOptions Table; + ::flatbuffers::FlatBufferBuilder &fbb_; + ::flatbuffers::uoffset_t start_; + void add_type(tflite::LSHProjectionType type) { + fbb_.AddElement(LSHProjectionOptions::VT_TYPE, static_cast(type), 0); } - explicit ResizeNearestNeighborOptionsBuilder(flatbuffers::FlatBufferBuilder &_fbb) + explicit LSHProjectionOptionsBuilder(::flatbuffers::FlatBufferBuilder &_fbb) : fbb_(_fbb) { start_ = fbb_.StartTable(); } - flatbuffers::Offset Finish() { + ::flatbuffers::Offset Finish() { const auto end = fbb_.EndTable(start_); - auto o = flatbuffers::Offset(end); + auto o = ::flatbuffers::Offset(end); return o; } }; -inline flatbuffers::Offset CreateResizeNearestNeighborOptions( - flatbuffers::FlatBufferBuilder &_fbb, - bool align_corners = false, - bool half_pixel_centers = false) { - ResizeNearestNeighborOptionsBuilder builder_(_fbb); - builder_.add_half_pixel_centers(half_pixel_centers); - builder_.add_align_corners(align_corners); +inline ::flatbuffers::Offset CreateLSHProjectionOptions( + ::flatbuffers::FlatBufferBuilder &_fbb, + tflite::LSHProjectionType type = tflite::LSHProjectionType_UNKNOWN) { + LSHProjectionOptionsBuilder builder_(_fbb); + builder_.add_type(type); return builder_.Finish(); } -flatbuffers::Offset CreateResizeNearestNeighborOptions(flatbuffers::FlatBufferBuilder &_fbb, const ResizeNearestNeighborOptionsT *_o, const flatbuffers::rehasher_function_t *_rehasher = nullptr); +::flatbuffers::Offset CreateLSHProjectionOptions(::flatbuffers::FlatBufferBuilder &_fbb, const LSHProjectionOptionsT *_o, const ::flatbuffers::rehasher_function_t *_rehasher = nullptr); -struct CallOptionsT : public flatbuffers::NativeTable { - typedef CallOptions TableType; - uint32_t subgraph = 0; +struct SVDFOptionsT : public ::flatbuffers::NativeTable { + typedef SVDFOptions TableType; + int32_t rank = 0; + tflite::ActivationFunctionType fused_activation_function = tflite::ActivationFunctionType_NONE; + bool asymmetric_quantize_inputs = false; }; -struct CallOptions FLATBUFFERS_FINAL_CLASS : private flatbuffers::Table { - typedef CallOptionsT NativeTableType; - typedef CallOptionsBuilder Builder; +struct SVDFOptions FLATBUFFERS_FINAL_CLASS : private ::flatbuffers::Table { + typedef SVDFOptionsT NativeTableType; + typedef SVDFOptionsBuilder Builder; enum FlatBuffersVTableOffset FLATBUFFERS_VTABLE_UNDERLYING_TYPE { - VT_SUBGRAPH = 4 + VT_RANK = 4, + VT_FUSED_ACTIVATION_FUNCTION = 6, + VT_ASYMMETRIC_QUANTIZE_INPUTS = 8 }; - uint32_t subgraph() const { - return GetField(VT_SUBGRAPH, 0); + int32_t rank() const { + return GetField(VT_RANK, 0); + } + tflite::ActivationFunctionType fused_activation_function() const { + return static_cast(GetField(VT_FUSED_ACTIVATION_FUNCTION, 0)); + } + bool asymmetric_quantize_inputs() const { + return GetField(VT_ASYMMETRIC_QUANTIZE_INPUTS, 0) != 0; } - bool Verify(flatbuffers::Verifier &verifier) const { + bool Verify(::flatbuffers::Verifier &verifier) const { return VerifyTableStart(verifier) && - VerifyField(verifier, VT_SUBGRAPH, 4) && + VerifyField(verifier, VT_RANK, 4) && + VerifyField(verifier, VT_FUSED_ACTIVATION_FUNCTION, 1) && + VerifyField(verifier, VT_ASYMMETRIC_QUANTIZE_INPUTS, 1) && verifier.EndTable(); } - CallOptionsT *UnPack(const flatbuffers::resolver_function_t *_resolver = nullptr) const; - void UnPackTo(CallOptionsT *_o, const flatbuffers::resolver_function_t *_resolver = nullptr) const; - static flatbuffers::Offset Pack(flatbuffers::FlatBufferBuilder &_fbb, const CallOptionsT* _o, const flatbuffers::rehasher_function_t *_rehasher = nullptr); + SVDFOptionsT *UnPack(const ::flatbuffers::resolver_function_t *_resolver = nullptr) const; + void UnPackTo(SVDFOptionsT *_o, const ::flatbuffers::resolver_function_t *_resolver = nullptr) const; + static ::flatbuffers::Offset Pack(::flatbuffers::FlatBufferBuilder &_fbb, const SVDFOptionsT* _o, const ::flatbuffers::rehasher_function_t *_rehasher = nullptr); }; -struct CallOptionsBuilder { - typedef CallOptions Table; - flatbuffers::FlatBufferBuilder &fbb_; - flatbuffers::uoffset_t start_; - void add_subgraph(uint32_t subgraph) { - fbb_.AddElement(CallOptions::VT_SUBGRAPH, subgraph, 0); +struct SVDFOptionsBuilder { + typedef SVDFOptions Table; + ::flatbuffers::FlatBufferBuilder &fbb_; + ::flatbuffers::uoffset_t start_; + void add_rank(int32_t rank) { + fbb_.AddElement(SVDFOptions::VT_RANK, rank, 0); + } + void add_fused_activation_function(tflite::ActivationFunctionType fused_activation_function) { + fbb_.AddElement(SVDFOptions::VT_FUSED_ACTIVATION_FUNCTION, static_cast(fused_activation_function), 0); + } + void add_asymmetric_quantize_inputs(bool asymmetric_quantize_inputs) { + fbb_.AddElement(SVDFOptions::VT_ASYMMETRIC_QUANTIZE_INPUTS, static_cast(asymmetric_quantize_inputs), 0); } - explicit CallOptionsBuilder(flatbuffers::FlatBufferBuilder &_fbb) + explicit SVDFOptionsBuilder(::flatbuffers::FlatBufferBuilder &_fbb) : fbb_(_fbb) { start_ = fbb_.StartTable(); } - flatbuffers::Offset Finish() { + ::flatbuffers::Offset Finish() { const auto end = fbb_.EndTable(start_); - auto o = flatbuffers::Offset(end); + auto o = ::flatbuffers::Offset(end); return o; } }; -inline flatbuffers::Offset CreateCallOptions( - flatbuffers::FlatBufferBuilder &_fbb, - uint32_t subgraph = 0) { - CallOptionsBuilder builder_(_fbb); - builder_.add_subgraph(subgraph); +inline ::flatbuffers::Offset CreateSVDFOptions( + ::flatbuffers::FlatBufferBuilder &_fbb, + int32_t rank = 0, + tflite::ActivationFunctionType fused_activation_function = tflite::ActivationFunctionType_NONE, + bool asymmetric_quantize_inputs = false) { + SVDFOptionsBuilder builder_(_fbb); + builder_.add_rank(rank); + builder_.add_asymmetric_quantize_inputs(asymmetric_quantize_inputs); + builder_.add_fused_activation_function(fused_activation_function); return builder_.Finish(); } -flatbuffers::Offset CreateCallOptions(flatbuffers::FlatBufferBuilder &_fbb, const CallOptionsT *_o, const flatbuffers::rehasher_function_t *_rehasher = nullptr); +::flatbuffers::Offset CreateSVDFOptions(::flatbuffers::FlatBufferBuilder &_fbb, const SVDFOptionsT *_o, const ::flatbuffers::rehasher_function_t *_rehasher = nullptr); -struct PadOptionsT : public flatbuffers::NativeTable { - typedef PadOptions TableType; +struct RNNOptionsT : public ::flatbuffers::NativeTable { + typedef RNNOptions TableType; + tflite::ActivationFunctionType fused_activation_function = tflite::ActivationFunctionType_NONE; + bool asymmetric_quantize_inputs = false; }; -struct PadOptions FLATBUFFERS_FINAL_CLASS : private flatbuffers::Table { - typedef PadOptionsT NativeTableType; - typedef PadOptionsBuilder Builder; - bool Verify(flatbuffers::Verifier &verifier) const { +struct RNNOptions FLATBUFFERS_FINAL_CLASS : private ::flatbuffers::Table { + typedef RNNOptionsT NativeTableType; + typedef RNNOptionsBuilder Builder; + enum FlatBuffersVTableOffset FLATBUFFERS_VTABLE_UNDERLYING_TYPE { + VT_FUSED_ACTIVATION_FUNCTION = 4, + VT_ASYMMETRIC_QUANTIZE_INPUTS = 6 + }; + tflite::ActivationFunctionType fused_activation_function() const { + return static_cast(GetField(VT_FUSED_ACTIVATION_FUNCTION, 0)); + } + bool asymmetric_quantize_inputs() const { + return GetField(VT_ASYMMETRIC_QUANTIZE_INPUTS, 0) != 0; + } + bool Verify(::flatbuffers::Verifier &verifier) const { return VerifyTableStart(verifier) && + VerifyField(verifier, VT_FUSED_ACTIVATION_FUNCTION, 1) && + VerifyField(verifier, VT_ASYMMETRIC_QUANTIZE_INPUTS, 1) && verifier.EndTable(); } - PadOptionsT *UnPack(const flatbuffers::resolver_function_t *_resolver = nullptr) const; - void UnPackTo(PadOptionsT *_o, const flatbuffers::resolver_function_t *_resolver = nullptr) const; - static flatbuffers::Offset Pack(flatbuffers::FlatBufferBuilder &_fbb, const PadOptionsT* _o, const flatbuffers::rehasher_function_t *_rehasher = nullptr); + RNNOptionsT *UnPack(const ::flatbuffers::resolver_function_t *_resolver = nullptr) const; + void UnPackTo(RNNOptionsT *_o, const ::flatbuffers::resolver_function_t *_resolver = nullptr) const; + static ::flatbuffers::Offset Pack(::flatbuffers::FlatBufferBuilder &_fbb, const RNNOptionsT* _o, const ::flatbuffers::rehasher_function_t *_rehasher = nullptr); }; -struct PadOptionsBuilder { - typedef PadOptions Table; - flatbuffers::FlatBufferBuilder &fbb_; - flatbuffers::uoffset_t start_; - explicit PadOptionsBuilder(flatbuffers::FlatBufferBuilder &_fbb) +struct RNNOptionsBuilder { + typedef RNNOptions Table; + ::flatbuffers::FlatBufferBuilder &fbb_; + ::flatbuffers::uoffset_t start_; + void add_fused_activation_function(tflite::ActivationFunctionType fused_activation_function) { + fbb_.AddElement(RNNOptions::VT_FUSED_ACTIVATION_FUNCTION, static_cast(fused_activation_function), 0); + } + void add_asymmetric_quantize_inputs(bool asymmetric_quantize_inputs) { + fbb_.AddElement(RNNOptions::VT_ASYMMETRIC_QUANTIZE_INPUTS, static_cast(asymmetric_quantize_inputs), 0); + } + explicit RNNOptionsBuilder(::flatbuffers::FlatBufferBuilder &_fbb) : fbb_(_fbb) { start_ = fbb_.StartTable(); } - flatbuffers::Offset Finish() { + ::flatbuffers::Offset Finish() { const auto end = fbb_.EndTable(start_); - auto o = flatbuffers::Offset(end); + auto o = ::flatbuffers::Offset(end); return o; } }; -inline flatbuffers::Offset CreatePadOptions( - flatbuffers::FlatBufferBuilder &_fbb) { - PadOptionsBuilder builder_(_fbb); +inline ::flatbuffers::Offset CreateRNNOptions( + ::flatbuffers::FlatBufferBuilder &_fbb, + tflite::ActivationFunctionType fused_activation_function = tflite::ActivationFunctionType_NONE, + bool asymmetric_quantize_inputs = false) { + RNNOptionsBuilder builder_(_fbb); + builder_.add_asymmetric_quantize_inputs(asymmetric_quantize_inputs); + builder_.add_fused_activation_function(fused_activation_function); return builder_.Finish(); } -flatbuffers::Offset CreatePadOptions(flatbuffers::FlatBufferBuilder &_fbb, const PadOptionsT *_o, const flatbuffers::rehasher_function_t *_rehasher = nullptr); +::flatbuffers::Offset CreateRNNOptions(::flatbuffers::FlatBufferBuilder &_fbb, const RNNOptionsT *_o, const ::flatbuffers::rehasher_function_t *_rehasher = nullptr); -struct PadV2OptionsT : public flatbuffers::NativeTable { - typedef PadV2Options TableType; +struct SequenceRNNOptionsT : public ::flatbuffers::NativeTable { + typedef SequenceRNNOptions TableType; + bool time_major = false; + tflite::ActivationFunctionType fused_activation_function = tflite::ActivationFunctionType_NONE; + bool asymmetric_quantize_inputs = false; }; -struct PadV2Options FLATBUFFERS_FINAL_CLASS : private flatbuffers::Table { - typedef PadV2OptionsT NativeTableType; - typedef PadV2OptionsBuilder Builder; - bool Verify(flatbuffers::Verifier &verifier) const { +struct SequenceRNNOptions FLATBUFFERS_FINAL_CLASS : private ::flatbuffers::Table { + typedef SequenceRNNOptionsT NativeTableType; + typedef SequenceRNNOptionsBuilder Builder; + enum FlatBuffersVTableOffset FLATBUFFERS_VTABLE_UNDERLYING_TYPE { + VT_TIME_MAJOR = 4, + VT_FUSED_ACTIVATION_FUNCTION = 6, + VT_ASYMMETRIC_QUANTIZE_INPUTS = 8 + }; + bool time_major() const { + return GetField(VT_TIME_MAJOR, 0) != 0; + } + tflite::ActivationFunctionType fused_activation_function() const { + return static_cast(GetField(VT_FUSED_ACTIVATION_FUNCTION, 0)); + } + bool asymmetric_quantize_inputs() const { + return GetField(VT_ASYMMETRIC_QUANTIZE_INPUTS, 0) != 0; + } + bool Verify(::flatbuffers::Verifier &verifier) const { return VerifyTableStart(verifier) && + VerifyField(verifier, VT_TIME_MAJOR, 1) && + VerifyField(verifier, VT_FUSED_ACTIVATION_FUNCTION, 1) && + VerifyField(verifier, VT_ASYMMETRIC_QUANTIZE_INPUTS, 1) && verifier.EndTable(); } - PadV2OptionsT *UnPack(const flatbuffers::resolver_function_t *_resolver = nullptr) const; - void UnPackTo(PadV2OptionsT *_o, const flatbuffers::resolver_function_t *_resolver = nullptr) const; - static flatbuffers::Offset Pack(flatbuffers::FlatBufferBuilder &_fbb, const PadV2OptionsT* _o, const flatbuffers::rehasher_function_t *_rehasher = nullptr); + SequenceRNNOptionsT *UnPack(const ::flatbuffers::resolver_function_t *_resolver = nullptr) const; + void UnPackTo(SequenceRNNOptionsT *_o, const ::flatbuffers::resolver_function_t *_resolver = nullptr) const; + static ::flatbuffers::Offset Pack(::flatbuffers::FlatBufferBuilder &_fbb, const SequenceRNNOptionsT* _o, const ::flatbuffers::rehasher_function_t *_rehasher = nullptr); }; -struct PadV2OptionsBuilder { - typedef PadV2Options Table; - flatbuffers::FlatBufferBuilder &fbb_; - flatbuffers::uoffset_t start_; - explicit PadV2OptionsBuilder(flatbuffers::FlatBufferBuilder &_fbb) +struct SequenceRNNOptionsBuilder { + typedef SequenceRNNOptions Table; + ::flatbuffers::FlatBufferBuilder &fbb_; + ::flatbuffers::uoffset_t start_; + void add_time_major(bool time_major) { + fbb_.AddElement(SequenceRNNOptions::VT_TIME_MAJOR, static_cast(time_major), 0); + } + void add_fused_activation_function(tflite::ActivationFunctionType fused_activation_function) { + fbb_.AddElement(SequenceRNNOptions::VT_FUSED_ACTIVATION_FUNCTION, static_cast(fused_activation_function), 0); + } + void add_asymmetric_quantize_inputs(bool asymmetric_quantize_inputs) { + fbb_.AddElement(SequenceRNNOptions::VT_ASYMMETRIC_QUANTIZE_INPUTS, static_cast(asymmetric_quantize_inputs), 0); + } + explicit SequenceRNNOptionsBuilder(::flatbuffers::FlatBufferBuilder &_fbb) : fbb_(_fbb) { start_ = fbb_.StartTable(); } - flatbuffers::Offset Finish() { + ::flatbuffers::Offset Finish() { const auto end = fbb_.EndTable(start_); - auto o = flatbuffers::Offset(end); + auto o = ::flatbuffers::Offset(end); return o; } }; -inline flatbuffers::Offset CreatePadV2Options( - flatbuffers::FlatBufferBuilder &_fbb) { - PadV2OptionsBuilder builder_(_fbb); +inline ::flatbuffers::Offset CreateSequenceRNNOptions( + ::flatbuffers::FlatBufferBuilder &_fbb, + bool time_major = false, + tflite::ActivationFunctionType fused_activation_function = tflite::ActivationFunctionType_NONE, + bool asymmetric_quantize_inputs = false) { + SequenceRNNOptionsBuilder builder_(_fbb); + builder_.add_asymmetric_quantize_inputs(asymmetric_quantize_inputs); + builder_.add_fused_activation_function(fused_activation_function); + builder_.add_time_major(time_major); return builder_.Finish(); } -flatbuffers::Offset CreatePadV2Options(flatbuffers::FlatBufferBuilder &_fbb, const PadV2OptionsT *_o, const flatbuffers::rehasher_function_t *_rehasher = nullptr); +::flatbuffers::Offset CreateSequenceRNNOptions(::flatbuffers::FlatBufferBuilder &_fbb, const SequenceRNNOptionsT *_o, const ::flatbuffers::rehasher_function_t *_rehasher = nullptr); -struct ReshapeOptionsT : public flatbuffers::NativeTable { - typedef ReshapeOptions TableType; - std::vector new_shape{}; +struct BidirectionalSequenceRNNOptionsT : public ::flatbuffers::NativeTable { + typedef BidirectionalSequenceRNNOptions TableType; + bool time_major = false; + tflite::ActivationFunctionType fused_activation_function = tflite::ActivationFunctionType_NONE; + bool merge_outputs = false; + bool asymmetric_quantize_inputs = false; }; -struct ReshapeOptions FLATBUFFERS_FINAL_CLASS : private flatbuffers::Table { - typedef ReshapeOptionsT NativeTableType; - typedef ReshapeOptionsBuilder Builder; +struct BidirectionalSequenceRNNOptions FLATBUFFERS_FINAL_CLASS : private ::flatbuffers::Table { + typedef BidirectionalSequenceRNNOptionsT NativeTableType; + typedef BidirectionalSequenceRNNOptionsBuilder Builder; enum FlatBuffersVTableOffset FLATBUFFERS_VTABLE_UNDERLYING_TYPE { - VT_NEW_SHAPE = 4 + VT_TIME_MAJOR = 4, + VT_FUSED_ACTIVATION_FUNCTION = 6, + VT_MERGE_OUTPUTS = 8, + VT_ASYMMETRIC_QUANTIZE_INPUTS = 10 }; - const flatbuffers::Vector *new_shape() const { - return GetPointer *>(VT_NEW_SHAPE); + bool time_major() const { + return GetField(VT_TIME_MAJOR, 0) != 0; + } + tflite::ActivationFunctionType fused_activation_function() const { + return static_cast(GetField(VT_FUSED_ACTIVATION_FUNCTION, 0)); + } + bool merge_outputs() const { + return GetField(VT_MERGE_OUTPUTS, 0) != 0; + } + bool asymmetric_quantize_inputs() const { + return GetField(VT_ASYMMETRIC_QUANTIZE_INPUTS, 0) != 0; } - bool Verify(flatbuffers::Verifier &verifier) const { + bool Verify(::flatbuffers::Verifier &verifier) const { return VerifyTableStart(verifier) && - VerifyOffset(verifier, VT_NEW_SHAPE) && - verifier.VerifyVector(new_shape()) && + VerifyField(verifier, VT_TIME_MAJOR, 1) && + VerifyField(verifier, VT_FUSED_ACTIVATION_FUNCTION, 1) && + VerifyField(verifier, VT_MERGE_OUTPUTS, 1) && + VerifyField(verifier, VT_ASYMMETRIC_QUANTIZE_INPUTS, 1) && verifier.EndTable(); } - ReshapeOptionsT *UnPack(const flatbuffers::resolver_function_t *_resolver = nullptr) const; - void UnPackTo(ReshapeOptionsT *_o, const flatbuffers::resolver_function_t *_resolver = nullptr) const; - static flatbuffers::Offset Pack(flatbuffers::FlatBufferBuilder &_fbb, const ReshapeOptionsT* _o, const flatbuffers::rehasher_function_t *_rehasher = nullptr); + BidirectionalSequenceRNNOptionsT *UnPack(const ::flatbuffers::resolver_function_t *_resolver = nullptr) const; + void UnPackTo(BidirectionalSequenceRNNOptionsT *_o, const ::flatbuffers::resolver_function_t *_resolver = nullptr) const; + static ::flatbuffers::Offset Pack(::flatbuffers::FlatBufferBuilder &_fbb, const BidirectionalSequenceRNNOptionsT* _o, const ::flatbuffers::rehasher_function_t *_rehasher = nullptr); }; -struct ReshapeOptionsBuilder { - typedef ReshapeOptions Table; - flatbuffers::FlatBufferBuilder &fbb_; - flatbuffers::uoffset_t start_; - void add_new_shape(flatbuffers::Offset> new_shape) { - fbb_.AddOffset(ReshapeOptions::VT_NEW_SHAPE, new_shape); +struct BidirectionalSequenceRNNOptionsBuilder { + typedef BidirectionalSequenceRNNOptions Table; + ::flatbuffers::FlatBufferBuilder &fbb_; + ::flatbuffers::uoffset_t start_; + void add_time_major(bool time_major) { + fbb_.AddElement(BidirectionalSequenceRNNOptions::VT_TIME_MAJOR, static_cast(time_major), 0); + } + void add_fused_activation_function(tflite::ActivationFunctionType fused_activation_function) { + fbb_.AddElement(BidirectionalSequenceRNNOptions::VT_FUSED_ACTIVATION_FUNCTION, static_cast(fused_activation_function), 0); + } + void add_merge_outputs(bool merge_outputs) { + fbb_.AddElement(BidirectionalSequenceRNNOptions::VT_MERGE_OUTPUTS, static_cast(merge_outputs), 0); + } + void add_asymmetric_quantize_inputs(bool asymmetric_quantize_inputs) { + fbb_.AddElement(BidirectionalSequenceRNNOptions::VT_ASYMMETRIC_QUANTIZE_INPUTS, static_cast(asymmetric_quantize_inputs), 0); } - explicit ReshapeOptionsBuilder(flatbuffers::FlatBufferBuilder &_fbb) + explicit BidirectionalSequenceRNNOptionsBuilder(::flatbuffers::FlatBufferBuilder &_fbb) : fbb_(_fbb) { start_ = fbb_.StartTable(); } - flatbuffers::Offset Finish() { + ::flatbuffers::Offset Finish() { const auto end = fbb_.EndTable(start_); - auto o = flatbuffers::Offset(end); + auto o = ::flatbuffers::Offset(end); return o; } }; -inline flatbuffers::Offset CreateReshapeOptions( - flatbuffers::FlatBufferBuilder &_fbb, - flatbuffers::Offset> new_shape = 0) { - ReshapeOptionsBuilder builder_(_fbb); - builder_.add_new_shape(new_shape); - return builder_.Finish(); -} - -inline flatbuffers::Offset CreateReshapeOptionsDirect( - flatbuffers::FlatBufferBuilder &_fbb, - const std::vector *new_shape = nullptr) { - auto new_shape__ = new_shape ? _fbb.CreateVector(*new_shape) : 0; - return tflite::CreateReshapeOptions( - _fbb, - new_shape__); -} - -flatbuffers::Offset CreateReshapeOptions(flatbuffers::FlatBufferBuilder &_fbb, const ReshapeOptionsT *_o, const flatbuffers::rehasher_function_t *_rehasher = nullptr); - -struct SpaceToBatchNDOptionsT : public flatbuffers::NativeTable { - typedef SpaceToBatchNDOptions TableType; -}; - -struct SpaceToBatchNDOptions FLATBUFFERS_FINAL_CLASS : private flatbuffers::Table { - typedef SpaceToBatchNDOptionsT NativeTableType; - typedef SpaceToBatchNDOptionsBuilder Builder; - bool Verify(flatbuffers::Verifier &verifier) const { - return VerifyTableStart(verifier) && - verifier.EndTable(); - } - SpaceToBatchNDOptionsT *UnPack(const flatbuffers::resolver_function_t *_resolver = nullptr) const; - void UnPackTo(SpaceToBatchNDOptionsT *_o, const flatbuffers::resolver_function_t *_resolver = nullptr) const; - static flatbuffers::Offset Pack(flatbuffers::FlatBufferBuilder &_fbb, const SpaceToBatchNDOptionsT* _o, const flatbuffers::rehasher_function_t *_rehasher = nullptr); -}; - -struct SpaceToBatchNDOptionsBuilder { - typedef SpaceToBatchNDOptions Table; - flatbuffers::FlatBufferBuilder &fbb_; - flatbuffers::uoffset_t start_; - explicit SpaceToBatchNDOptionsBuilder(flatbuffers::FlatBufferBuilder &_fbb) - : fbb_(_fbb) { - start_ = fbb_.StartTable(); - } - flatbuffers::Offset Finish() { - const auto end = fbb_.EndTable(start_); - auto o = flatbuffers::Offset(end); - return o; - } -}; - -inline flatbuffers::Offset CreateSpaceToBatchNDOptions( - flatbuffers::FlatBufferBuilder &_fbb) { - SpaceToBatchNDOptionsBuilder builder_(_fbb); - return builder_.Finish(); -} - -flatbuffers::Offset CreateSpaceToBatchNDOptions(flatbuffers::FlatBufferBuilder &_fbb, const SpaceToBatchNDOptionsT *_o, const flatbuffers::rehasher_function_t *_rehasher = nullptr); - -struct BatchToSpaceNDOptionsT : public flatbuffers::NativeTable { - typedef BatchToSpaceNDOptions TableType; -}; - -struct BatchToSpaceNDOptions FLATBUFFERS_FINAL_CLASS : private flatbuffers::Table { - typedef BatchToSpaceNDOptionsT NativeTableType; - typedef BatchToSpaceNDOptionsBuilder Builder; - bool Verify(flatbuffers::Verifier &verifier) const { - return VerifyTableStart(verifier) && - verifier.EndTable(); - } - BatchToSpaceNDOptionsT *UnPack(const flatbuffers::resolver_function_t *_resolver = nullptr) const; - void UnPackTo(BatchToSpaceNDOptionsT *_o, const flatbuffers::resolver_function_t *_resolver = nullptr) const; - static flatbuffers::Offset Pack(flatbuffers::FlatBufferBuilder &_fbb, const BatchToSpaceNDOptionsT* _o, const flatbuffers::rehasher_function_t *_rehasher = nullptr); -}; - -struct BatchToSpaceNDOptionsBuilder { - typedef BatchToSpaceNDOptions Table; - flatbuffers::FlatBufferBuilder &fbb_; - flatbuffers::uoffset_t start_; - explicit BatchToSpaceNDOptionsBuilder(flatbuffers::FlatBufferBuilder &_fbb) - : fbb_(_fbb) { - start_ = fbb_.StartTable(); - } - flatbuffers::Offset Finish() { - const auto end = fbb_.EndTable(start_); - auto o = flatbuffers::Offset(end); - return o; - } -}; - -inline flatbuffers::Offset CreateBatchToSpaceNDOptions( - flatbuffers::FlatBufferBuilder &_fbb) { - BatchToSpaceNDOptionsBuilder builder_(_fbb); +inline ::flatbuffers::Offset CreateBidirectionalSequenceRNNOptions( + ::flatbuffers::FlatBufferBuilder &_fbb, + bool time_major = false, + tflite::ActivationFunctionType fused_activation_function = tflite::ActivationFunctionType_NONE, + bool merge_outputs = false, + bool asymmetric_quantize_inputs = false) { + BidirectionalSequenceRNNOptionsBuilder builder_(_fbb); + builder_.add_asymmetric_quantize_inputs(asymmetric_quantize_inputs); + builder_.add_merge_outputs(merge_outputs); + builder_.add_fused_activation_function(fused_activation_function); + builder_.add_time_major(time_major); return builder_.Finish(); } -flatbuffers::Offset CreateBatchToSpaceNDOptions(flatbuffers::FlatBufferBuilder &_fbb, const BatchToSpaceNDOptionsT *_o, const flatbuffers::rehasher_function_t *_rehasher = nullptr); +::flatbuffers::Offset CreateBidirectionalSequenceRNNOptions(::flatbuffers::FlatBufferBuilder &_fbb, const BidirectionalSequenceRNNOptionsT *_o, const ::flatbuffers::rehasher_function_t *_rehasher = nullptr); -struct SkipGramOptionsT : public flatbuffers::NativeTable { - typedef SkipGramOptions TableType; - int32_t ngram_size = 0; - int32_t max_skip_size = 0; - bool include_all_ngrams = false; +struct FullyConnectedOptionsT : public ::flatbuffers::NativeTable { + typedef FullyConnectedOptions TableType; + tflite::ActivationFunctionType fused_activation_function = tflite::ActivationFunctionType_NONE; + tflite::FullyConnectedOptionsWeightsFormat weights_format = tflite::FullyConnectedOptionsWeightsFormat_DEFAULT; + bool keep_num_dims = false; + bool asymmetric_quantize_inputs = false; + tflite::TensorType quantized_bias_type = tflite::TensorType_FLOAT32; }; -struct SkipGramOptions FLATBUFFERS_FINAL_CLASS : private flatbuffers::Table { - typedef SkipGramOptionsT NativeTableType; - typedef SkipGramOptionsBuilder Builder; +struct FullyConnectedOptions FLATBUFFERS_FINAL_CLASS : private ::flatbuffers::Table { + typedef FullyConnectedOptionsT NativeTableType; + typedef FullyConnectedOptionsBuilder Builder; enum FlatBuffersVTableOffset FLATBUFFERS_VTABLE_UNDERLYING_TYPE { - VT_NGRAM_SIZE = 4, - VT_MAX_SKIP_SIZE = 6, - VT_INCLUDE_ALL_NGRAMS = 8 + VT_FUSED_ACTIVATION_FUNCTION = 4, + VT_WEIGHTS_FORMAT = 6, + VT_KEEP_NUM_DIMS = 8, + VT_ASYMMETRIC_QUANTIZE_INPUTS = 10, + VT_QUANTIZED_BIAS_TYPE = 12 }; - int32_t ngram_size() const { - return GetField(VT_NGRAM_SIZE, 0); + tflite::ActivationFunctionType fused_activation_function() const { + return static_cast(GetField(VT_FUSED_ACTIVATION_FUNCTION, 0)); } - int32_t max_skip_size() const { - return GetField(VT_MAX_SKIP_SIZE, 0); + tflite::FullyConnectedOptionsWeightsFormat weights_format() const { + return static_cast(GetField(VT_WEIGHTS_FORMAT, 0)); } - bool include_all_ngrams() const { - return GetField(VT_INCLUDE_ALL_NGRAMS, 0) != 0; + bool keep_num_dims() const { + return GetField(VT_KEEP_NUM_DIMS, 0) != 0; + } + bool asymmetric_quantize_inputs() const { + return GetField(VT_ASYMMETRIC_QUANTIZE_INPUTS, 0) != 0; } - bool Verify(flatbuffers::Verifier &verifier) const { + tflite::TensorType quantized_bias_type() const { + return static_cast(GetField(VT_QUANTIZED_BIAS_TYPE, 0)); + } + bool Verify(::flatbuffers::Verifier &verifier) const { return VerifyTableStart(verifier) && - VerifyField(verifier, VT_NGRAM_SIZE, 4) && - VerifyField(verifier, VT_MAX_SKIP_SIZE, 4) && - VerifyField(verifier, VT_INCLUDE_ALL_NGRAMS, 1) && + VerifyField(verifier, VT_FUSED_ACTIVATION_FUNCTION, 1) && + VerifyField(verifier, VT_WEIGHTS_FORMAT, 1) && + VerifyField(verifier, VT_KEEP_NUM_DIMS, 1) && + VerifyField(verifier, VT_ASYMMETRIC_QUANTIZE_INPUTS, 1) && + VerifyField(verifier, VT_QUANTIZED_BIAS_TYPE, 1) && verifier.EndTable(); } - SkipGramOptionsT *UnPack(const flatbuffers::resolver_function_t *_resolver = nullptr) const; - void UnPackTo(SkipGramOptionsT *_o, const flatbuffers::resolver_function_t *_resolver = nullptr) const; - static flatbuffers::Offset Pack(flatbuffers::FlatBufferBuilder &_fbb, const SkipGramOptionsT* _o, const flatbuffers::rehasher_function_t *_rehasher = nullptr); + FullyConnectedOptionsT *UnPack(const ::flatbuffers::resolver_function_t *_resolver = nullptr) const; + void UnPackTo(FullyConnectedOptionsT *_o, const ::flatbuffers::resolver_function_t *_resolver = nullptr) const; + static ::flatbuffers::Offset Pack(::flatbuffers::FlatBufferBuilder &_fbb, const FullyConnectedOptionsT* _o, const ::flatbuffers::rehasher_function_t *_rehasher = nullptr); }; -struct SkipGramOptionsBuilder { - typedef SkipGramOptions Table; - flatbuffers::FlatBufferBuilder &fbb_; - flatbuffers::uoffset_t start_; - void add_ngram_size(int32_t ngram_size) { - fbb_.AddElement(SkipGramOptions::VT_NGRAM_SIZE, ngram_size, 0); +struct FullyConnectedOptionsBuilder { + typedef FullyConnectedOptions Table; + ::flatbuffers::FlatBufferBuilder &fbb_; + ::flatbuffers::uoffset_t start_; + void add_fused_activation_function(tflite::ActivationFunctionType fused_activation_function) { + fbb_.AddElement(FullyConnectedOptions::VT_FUSED_ACTIVATION_FUNCTION, static_cast(fused_activation_function), 0); } - void add_max_skip_size(int32_t max_skip_size) { - fbb_.AddElement(SkipGramOptions::VT_MAX_SKIP_SIZE, max_skip_size, 0); + void add_weights_format(tflite::FullyConnectedOptionsWeightsFormat weights_format) { + fbb_.AddElement(FullyConnectedOptions::VT_WEIGHTS_FORMAT, static_cast(weights_format), 0); } - void add_include_all_ngrams(bool include_all_ngrams) { - fbb_.AddElement(SkipGramOptions::VT_INCLUDE_ALL_NGRAMS, static_cast(include_all_ngrams), 0); + void add_keep_num_dims(bool keep_num_dims) { + fbb_.AddElement(FullyConnectedOptions::VT_KEEP_NUM_DIMS, static_cast(keep_num_dims), 0); } - explicit SkipGramOptionsBuilder(flatbuffers::FlatBufferBuilder &_fbb) + void add_asymmetric_quantize_inputs(bool asymmetric_quantize_inputs) { + fbb_.AddElement(FullyConnectedOptions::VT_ASYMMETRIC_QUANTIZE_INPUTS, static_cast(asymmetric_quantize_inputs), 0); + } + void add_quantized_bias_type(tflite::TensorType quantized_bias_type) { + fbb_.AddElement(FullyConnectedOptions::VT_QUANTIZED_BIAS_TYPE, static_cast(quantized_bias_type), 0); + } + explicit FullyConnectedOptionsBuilder(::flatbuffers::FlatBufferBuilder &_fbb) : fbb_(_fbb) { start_ = fbb_.StartTable(); } - flatbuffers::Offset Finish() { + ::flatbuffers::Offset Finish() { const auto end = fbb_.EndTable(start_); - auto o = flatbuffers::Offset(end); + auto o = ::flatbuffers::Offset(end); return o; } }; -inline flatbuffers::Offset CreateSkipGramOptions( - flatbuffers::FlatBufferBuilder &_fbb, - int32_t ngram_size = 0, - int32_t max_skip_size = 0, - bool include_all_ngrams = false) { - SkipGramOptionsBuilder builder_(_fbb); - builder_.add_max_skip_size(max_skip_size); - builder_.add_ngram_size(ngram_size); - builder_.add_include_all_ngrams(include_all_ngrams); +inline ::flatbuffers::Offset CreateFullyConnectedOptions( + ::flatbuffers::FlatBufferBuilder &_fbb, + tflite::ActivationFunctionType fused_activation_function = tflite::ActivationFunctionType_NONE, + tflite::FullyConnectedOptionsWeightsFormat weights_format = tflite::FullyConnectedOptionsWeightsFormat_DEFAULT, + bool keep_num_dims = false, + bool asymmetric_quantize_inputs = false, + tflite::TensorType quantized_bias_type = tflite::TensorType_FLOAT32) { + FullyConnectedOptionsBuilder builder_(_fbb); + builder_.add_quantized_bias_type(quantized_bias_type); + builder_.add_asymmetric_quantize_inputs(asymmetric_quantize_inputs); + builder_.add_keep_num_dims(keep_num_dims); + builder_.add_weights_format(weights_format); + builder_.add_fused_activation_function(fused_activation_function); return builder_.Finish(); } -flatbuffers::Offset CreateSkipGramOptions(flatbuffers::FlatBufferBuilder &_fbb, const SkipGramOptionsT *_o, const flatbuffers::rehasher_function_t *_rehasher = nullptr); +::flatbuffers::Offset CreateFullyConnectedOptions(::flatbuffers::FlatBufferBuilder &_fbb, const FullyConnectedOptionsT *_o, const ::flatbuffers::rehasher_function_t *_rehasher = nullptr); -struct SpaceToDepthOptionsT : public flatbuffers::NativeTable { - typedef SpaceToDepthOptions TableType; - int32_t block_size = 0; +struct SoftmaxOptionsT : public ::flatbuffers::NativeTable { + typedef SoftmaxOptions TableType; + float beta = 0.0f; }; -struct SpaceToDepthOptions FLATBUFFERS_FINAL_CLASS : private flatbuffers::Table { - typedef SpaceToDepthOptionsT NativeTableType; - typedef SpaceToDepthOptionsBuilder Builder; +struct SoftmaxOptions FLATBUFFERS_FINAL_CLASS : private ::flatbuffers::Table { + typedef SoftmaxOptionsT NativeTableType; + typedef SoftmaxOptionsBuilder Builder; enum FlatBuffersVTableOffset FLATBUFFERS_VTABLE_UNDERLYING_TYPE { - VT_BLOCK_SIZE = 4 + VT_BETA = 4 }; - int32_t block_size() const { - return GetField(VT_BLOCK_SIZE, 0); + float beta() const { + return GetField(VT_BETA, 0.0f); } - bool Verify(flatbuffers::Verifier &verifier) const { + bool Verify(::flatbuffers::Verifier &verifier) const { return VerifyTableStart(verifier) && - VerifyField(verifier, VT_BLOCK_SIZE, 4) && + VerifyField(verifier, VT_BETA, 4) && verifier.EndTable(); } - SpaceToDepthOptionsT *UnPack(const flatbuffers::resolver_function_t *_resolver = nullptr) const; - void UnPackTo(SpaceToDepthOptionsT *_o, const flatbuffers::resolver_function_t *_resolver = nullptr) const; - static flatbuffers::Offset Pack(flatbuffers::FlatBufferBuilder &_fbb, const SpaceToDepthOptionsT* _o, const flatbuffers::rehasher_function_t *_rehasher = nullptr); + SoftmaxOptionsT *UnPack(const ::flatbuffers::resolver_function_t *_resolver = nullptr) const; + void UnPackTo(SoftmaxOptionsT *_o, const ::flatbuffers::resolver_function_t *_resolver = nullptr) const; + static ::flatbuffers::Offset Pack(::flatbuffers::FlatBufferBuilder &_fbb, const SoftmaxOptionsT* _o, const ::flatbuffers::rehasher_function_t *_rehasher = nullptr); }; -struct SpaceToDepthOptionsBuilder { - typedef SpaceToDepthOptions Table; - flatbuffers::FlatBufferBuilder &fbb_; - flatbuffers::uoffset_t start_; - void add_block_size(int32_t block_size) { - fbb_.AddElement(SpaceToDepthOptions::VT_BLOCK_SIZE, block_size, 0); +struct SoftmaxOptionsBuilder { + typedef SoftmaxOptions Table; + ::flatbuffers::FlatBufferBuilder &fbb_; + ::flatbuffers::uoffset_t start_; + void add_beta(float beta) { + fbb_.AddElement(SoftmaxOptions::VT_BETA, beta, 0.0f); } - explicit SpaceToDepthOptionsBuilder(flatbuffers::FlatBufferBuilder &_fbb) + explicit SoftmaxOptionsBuilder(::flatbuffers::FlatBufferBuilder &_fbb) : fbb_(_fbb) { start_ = fbb_.StartTable(); } - flatbuffers::Offset Finish() { + ::flatbuffers::Offset Finish() { const auto end = fbb_.EndTable(start_); - auto o = flatbuffers::Offset(end); + auto o = ::flatbuffers::Offset(end); return o; } }; -inline flatbuffers::Offset CreateSpaceToDepthOptions( - flatbuffers::FlatBufferBuilder &_fbb, - int32_t block_size = 0) { - SpaceToDepthOptionsBuilder builder_(_fbb); - builder_.add_block_size(block_size); +inline ::flatbuffers::Offset CreateSoftmaxOptions( + ::flatbuffers::FlatBufferBuilder &_fbb, + float beta = 0.0f) { + SoftmaxOptionsBuilder builder_(_fbb); + builder_.add_beta(beta); return builder_.Finish(); } -flatbuffers::Offset CreateSpaceToDepthOptions(flatbuffers::FlatBufferBuilder &_fbb, const SpaceToDepthOptionsT *_o, const flatbuffers::rehasher_function_t *_rehasher = nullptr); +::flatbuffers::Offset CreateSoftmaxOptions(::flatbuffers::FlatBufferBuilder &_fbb, const SoftmaxOptionsT *_o, const ::flatbuffers::rehasher_function_t *_rehasher = nullptr); -struct DepthToSpaceOptionsT : public flatbuffers::NativeTable { - typedef DepthToSpaceOptions TableType; - int32_t block_size = 0; +struct ConcatenationOptionsT : public ::flatbuffers::NativeTable { + typedef ConcatenationOptions TableType; + int32_t axis = 0; + tflite::ActivationFunctionType fused_activation_function = tflite::ActivationFunctionType_NONE; }; -struct DepthToSpaceOptions FLATBUFFERS_FINAL_CLASS : private flatbuffers::Table { - typedef DepthToSpaceOptionsT NativeTableType; - typedef DepthToSpaceOptionsBuilder Builder; +struct ConcatenationOptions FLATBUFFERS_FINAL_CLASS : private ::flatbuffers::Table { + typedef ConcatenationOptionsT NativeTableType; + typedef ConcatenationOptionsBuilder Builder; enum FlatBuffersVTableOffset FLATBUFFERS_VTABLE_UNDERLYING_TYPE { - VT_BLOCK_SIZE = 4 + VT_AXIS = 4, + VT_FUSED_ACTIVATION_FUNCTION = 6 }; - int32_t block_size() const { - return GetField(VT_BLOCK_SIZE, 0); + int32_t axis() const { + return GetField(VT_AXIS, 0); + } + tflite::ActivationFunctionType fused_activation_function() const { + return static_cast(GetField(VT_FUSED_ACTIVATION_FUNCTION, 0)); } - bool Verify(flatbuffers::Verifier &verifier) const { + bool Verify(::flatbuffers::Verifier &verifier) const { return VerifyTableStart(verifier) && - VerifyField(verifier, VT_BLOCK_SIZE, 4) && + VerifyField(verifier, VT_AXIS, 4) && + VerifyField(verifier, VT_FUSED_ACTIVATION_FUNCTION, 1) && verifier.EndTable(); } - DepthToSpaceOptionsT *UnPack(const flatbuffers::resolver_function_t *_resolver = nullptr) const; - void UnPackTo(DepthToSpaceOptionsT *_o, const flatbuffers::resolver_function_t *_resolver = nullptr) const; - static flatbuffers::Offset Pack(flatbuffers::FlatBufferBuilder &_fbb, const DepthToSpaceOptionsT* _o, const flatbuffers::rehasher_function_t *_rehasher = nullptr); + ConcatenationOptionsT *UnPack(const ::flatbuffers::resolver_function_t *_resolver = nullptr) const; + void UnPackTo(ConcatenationOptionsT *_o, const ::flatbuffers::resolver_function_t *_resolver = nullptr) const; + static ::flatbuffers::Offset Pack(::flatbuffers::FlatBufferBuilder &_fbb, const ConcatenationOptionsT* _o, const ::flatbuffers::rehasher_function_t *_rehasher = nullptr); }; -struct DepthToSpaceOptionsBuilder { - typedef DepthToSpaceOptions Table; - flatbuffers::FlatBufferBuilder &fbb_; - flatbuffers::uoffset_t start_; - void add_block_size(int32_t block_size) { - fbb_.AddElement(DepthToSpaceOptions::VT_BLOCK_SIZE, block_size, 0); +struct ConcatenationOptionsBuilder { + typedef ConcatenationOptions Table; + ::flatbuffers::FlatBufferBuilder &fbb_; + ::flatbuffers::uoffset_t start_; + void add_axis(int32_t axis) { + fbb_.AddElement(ConcatenationOptions::VT_AXIS, axis, 0); } - explicit DepthToSpaceOptionsBuilder(flatbuffers::FlatBufferBuilder &_fbb) + void add_fused_activation_function(tflite::ActivationFunctionType fused_activation_function) { + fbb_.AddElement(ConcatenationOptions::VT_FUSED_ACTIVATION_FUNCTION, static_cast(fused_activation_function), 0); + } + explicit ConcatenationOptionsBuilder(::flatbuffers::FlatBufferBuilder &_fbb) : fbb_(_fbb) { start_ = fbb_.StartTable(); } - flatbuffers::Offset Finish() { + ::flatbuffers::Offset Finish() { const auto end = fbb_.EndTable(start_); - auto o = flatbuffers::Offset(end); + auto o = ::flatbuffers::Offset(end); return o; } }; -inline flatbuffers::Offset CreateDepthToSpaceOptions( - flatbuffers::FlatBufferBuilder &_fbb, - int32_t block_size = 0) { - DepthToSpaceOptionsBuilder builder_(_fbb); - builder_.add_block_size(block_size); +inline ::flatbuffers::Offset CreateConcatenationOptions( + ::flatbuffers::FlatBufferBuilder &_fbb, + int32_t axis = 0, + tflite::ActivationFunctionType fused_activation_function = tflite::ActivationFunctionType_NONE) { + ConcatenationOptionsBuilder builder_(_fbb); + builder_.add_axis(axis); + builder_.add_fused_activation_function(fused_activation_function); return builder_.Finish(); } -flatbuffers::Offset CreateDepthToSpaceOptions(flatbuffers::FlatBufferBuilder &_fbb, const DepthToSpaceOptionsT *_o, const flatbuffers::rehasher_function_t *_rehasher = nullptr); +::flatbuffers::Offset CreateConcatenationOptions(::flatbuffers::FlatBufferBuilder &_fbb, const ConcatenationOptionsT *_o, const ::flatbuffers::rehasher_function_t *_rehasher = nullptr); -struct SubOptionsT : public flatbuffers::NativeTable { - typedef SubOptions TableType; +struct AddOptionsT : public ::flatbuffers::NativeTable { + typedef AddOptions TableType; tflite::ActivationFunctionType fused_activation_function = tflite::ActivationFunctionType_NONE; bool pot_scale_int16 = true; }; -struct SubOptions FLATBUFFERS_FINAL_CLASS : private flatbuffers::Table { - typedef SubOptionsT NativeTableType; - typedef SubOptionsBuilder Builder; +struct AddOptions FLATBUFFERS_FINAL_CLASS : private ::flatbuffers::Table { + typedef AddOptionsT NativeTableType; + typedef AddOptionsBuilder Builder; enum FlatBuffersVTableOffset FLATBUFFERS_VTABLE_UNDERLYING_TYPE { VT_FUSED_ACTIVATION_FUNCTION = 4, VT_POT_SCALE_INT16 = 6 @@ -7327,6635 +8850,8781 @@ struct SubOptions FLATBUFFERS_FINAL_CLASS : private flatbuffers::Table { bool pot_scale_int16() const { return GetField(VT_POT_SCALE_INT16, 1) != 0; } - bool Verify(flatbuffers::Verifier &verifier) const { + bool Verify(::flatbuffers::Verifier &verifier) const { return VerifyTableStart(verifier) && VerifyField(verifier, VT_FUSED_ACTIVATION_FUNCTION, 1) && VerifyField(verifier, VT_POT_SCALE_INT16, 1) && verifier.EndTable(); } - SubOptionsT *UnPack(const flatbuffers::resolver_function_t *_resolver = nullptr) const; - void UnPackTo(SubOptionsT *_o, const flatbuffers::resolver_function_t *_resolver = nullptr) const; - static flatbuffers::Offset Pack(flatbuffers::FlatBufferBuilder &_fbb, const SubOptionsT* _o, const flatbuffers::rehasher_function_t *_rehasher = nullptr); + AddOptionsT *UnPack(const ::flatbuffers::resolver_function_t *_resolver = nullptr) const; + void UnPackTo(AddOptionsT *_o, const ::flatbuffers::resolver_function_t *_resolver = nullptr) const; + static ::flatbuffers::Offset Pack(::flatbuffers::FlatBufferBuilder &_fbb, const AddOptionsT* _o, const ::flatbuffers::rehasher_function_t *_rehasher = nullptr); }; -struct SubOptionsBuilder { - typedef SubOptions Table; - flatbuffers::FlatBufferBuilder &fbb_; - flatbuffers::uoffset_t start_; +struct AddOptionsBuilder { + typedef AddOptions Table; + ::flatbuffers::FlatBufferBuilder &fbb_; + ::flatbuffers::uoffset_t start_; void add_fused_activation_function(tflite::ActivationFunctionType fused_activation_function) { - fbb_.AddElement(SubOptions::VT_FUSED_ACTIVATION_FUNCTION, static_cast(fused_activation_function), 0); + fbb_.AddElement(AddOptions::VT_FUSED_ACTIVATION_FUNCTION, static_cast(fused_activation_function), 0); } void add_pot_scale_int16(bool pot_scale_int16) { - fbb_.AddElement(SubOptions::VT_POT_SCALE_INT16, static_cast(pot_scale_int16), 1); + fbb_.AddElement(AddOptions::VT_POT_SCALE_INT16, static_cast(pot_scale_int16), 1); } - explicit SubOptionsBuilder(flatbuffers::FlatBufferBuilder &_fbb) + explicit AddOptionsBuilder(::flatbuffers::FlatBufferBuilder &_fbb) : fbb_(_fbb) { start_ = fbb_.StartTable(); } - flatbuffers::Offset Finish() { + ::flatbuffers::Offset Finish() { const auto end = fbb_.EndTable(start_); - auto o = flatbuffers::Offset(end); + auto o = ::flatbuffers::Offset(end); return o; } }; -inline flatbuffers::Offset CreateSubOptions( - flatbuffers::FlatBufferBuilder &_fbb, +inline ::flatbuffers::Offset CreateAddOptions( + ::flatbuffers::FlatBufferBuilder &_fbb, tflite::ActivationFunctionType fused_activation_function = tflite::ActivationFunctionType_NONE, bool pot_scale_int16 = true) { - SubOptionsBuilder builder_(_fbb); + AddOptionsBuilder builder_(_fbb); builder_.add_pot_scale_int16(pot_scale_int16); builder_.add_fused_activation_function(fused_activation_function); return builder_.Finish(); } -flatbuffers::Offset CreateSubOptions(flatbuffers::FlatBufferBuilder &_fbb, const SubOptionsT *_o, const flatbuffers::rehasher_function_t *_rehasher = nullptr); +::flatbuffers::Offset CreateAddOptions(::flatbuffers::FlatBufferBuilder &_fbb, const AddOptionsT *_o, const ::flatbuffers::rehasher_function_t *_rehasher = nullptr); -struct DivOptionsT : public flatbuffers::NativeTable { - typedef DivOptions TableType; +struct MulOptionsT : public ::flatbuffers::NativeTable { + typedef MulOptions TableType; tflite::ActivationFunctionType fused_activation_function = tflite::ActivationFunctionType_NONE; }; -struct DivOptions FLATBUFFERS_FINAL_CLASS : private flatbuffers::Table { - typedef DivOptionsT NativeTableType; - typedef DivOptionsBuilder Builder; +struct MulOptions FLATBUFFERS_FINAL_CLASS : private ::flatbuffers::Table { + typedef MulOptionsT NativeTableType; + typedef MulOptionsBuilder Builder; enum FlatBuffersVTableOffset FLATBUFFERS_VTABLE_UNDERLYING_TYPE { VT_FUSED_ACTIVATION_FUNCTION = 4 }; tflite::ActivationFunctionType fused_activation_function() const { return static_cast(GetField(VT_FUSED_ACTIVATION_FUNCTION, 0)); } - bool Verify(flatbuffers::Verifier &verifier) const { + bool Verify(::flatbuffers::Verifier &verifier) const { return VerifyTableStart(verifier) && VerifyField(verifier, VT_FUSED_ACTIVATION_FUNCTION, 1) && verifier.EndTable(); } - DivOptionsT *UnPack(const flatbuffers::resolver_function_t *_resolver = nullptr) const; - void UnPackTo(DivOptionsT *_o, const flatbuffers::resolver_function_t *_resolver = nullptr) const; - static flatbuffers::Offset Pack(flatbuffers::FlatBufferBuilder &_fbb, const DivOptionsT* _o, const flatbuffers::rehasher_function_t *_rehasher = nullptr); + MulOptionsT *UnPack(const ::flatbuffers::resolver_function_t *_resolver = nullptr) const; + void UnPackTo(MulOptionsT *_o, const ::flatbuffers::resolver_function_t *_resolver = nullptr) const; + static ::flatbuffers::Offset Pack(::flatbuffers::FlatBufferBuilder &_fbb, const MulOptionsT* _o, const ::flatbuffers::rehasher_function_t *_rehasher = nullptr); }; -struct DivOptionsBuilder { - typedef DivOptions Table; - flatbuffers::FlatBufferBuilder &fbb_; - flatbuffers::uoffset_t start_; +struct MulOptionsBuilder { + typedef MulOptions Table; + ::flatbuffers::FlatBufferBuilder &fbb_; + ::flatbuffers::uoffset_t start_; void add_fused_activation_function(tflite::ActivationFunctionType fused_activation_function) { - fbb_.AddElement(DivOptions::VT_FUSED_ACTIVATION_FUNCTION, static_cast(fused_activation_function), 0); + fbb_.AddElement(MulOptions::VT_FUSED_ACTIVATION_FUNCTION, static_cast(fused_activation_function), 0); } - explicit DivOptionsBuilder(flatbuffers::FlatBufferBuilder &_fbb) + explicit MulOptionsBuilder(::flatbuffers::FlatBufferBuilder &_fbb) : fbb_(_fbb) { start_ = fbb_.StartTable(); } - flatbuffers::Offset Finish() { + ::flatbuffers::Offset Finish() { const auto end = fbb_.EndTable(start_); - auto o = flatbuffers::Offset(end); + auto o = ::flatbuffers::Offset(end); return o; } }; -inline flatbuffers::Offset CreateDivOptions( - flatbuffers::FlatBufferBuilder &_fbb, +inline ::flatbuffers::Offset CreateMulOptions( + ::flatbuffers::FlatBufferBuilder &_fbb, tflite::ActivationFunctionType fused_activation_function = tflite::ActivationFunctionType_NONE) { - DivOptionsBuilder builder_(_fbb); + MulOptionsBuilder builder_(_fbb); builder_.add_fused_activation_function(fused_activation_function); return builder_.Finish(); } -flatbuffers::Offset CreateDivOptions(flatbuffers::FlatBufferBuilder &_fbb, const DivOptionsT *_o, const flatbuffers::rehasher_function_t *_rehasher = nullptr); +::flatbuffers::Offset CreateMulOptions(::flatbuffers::FlatBufferBuilder &_fbb, const MulOptionsT *_o, const ::flatbuffers::rehasher_function_t *_rehasher = nullptr); -struct TopKV2OptionsT : public flatbuffers::NativeTable { - typedef TopKV2Options TableType; +struct L2NormOptionsT : public ::flatbuffers::NativeTable { + typedef L2NormOptions TableType; + tflite::ActivationFunctionType fused_activation_function = tflite::ActivationFunctionType_NONE; }; -struct TopKV2Options FLATBUFFERS_FINAL_CLASS : private flatbuffers::Table { - typedef TopKV2OptionsT NativeTableType; - typedef TopKV2OptionsBuilder Builder; - bool Verify(flatbuffers::Verifier &verifier) const { +struct L2NormOptions FLATBUFFERS_FINAL_CLASS : private ::flatbuffers::Table { + typedef L2NormOptionsT NativeTableType; + typedef L2NormOptionsBuilder Builder; + enum FlatBuffersVTableOffset FLATBUFFERS_VTABLE_UNDERLYING_TYPE { + VT_FUSED_ACTIVATION_FUNCTION = 4 + }; + tflite::ActivationFunctionType fused_activation_function() const { + return static_cast(GetField(VT_FUSED_ACTIVATION_FUNCTION, 0)); + } + bool Verify(::flatbuffers::Verifier &verifier) const { return VerifyTableStart(verifier) && + VerifyField(verifier, VT_FUSED_ACTIVATION_FUNCTION, 1) && verifier.EndTable(); } - TopKV2OptionsT *UnPack(const flatbuffers::resolver_function_t *_resolver = nullptr) const; - void UnPackTo(TopKV2OptionsT *_o, const flatbuffers::resolver_function_t *_resolver = nullptr) const; - static flatbuffers::Offset Pack(flatbuffers::FlatBufferBuilder &_fbb, const TopKV2OptionsT* _o, const flatbuffers::rehasher_function_t *_rehasher = nullptr); + L2NormOptionsT *UnPack(const ::flatbuffers::resolver_function_t *_resolver = nullptr) const; + void UnPackTo(L2NormOptionsT *_o, const ::flatbuffers::resolver_function_t *_resolver = nullptr) const; + static ::flatbuffers::Offset Pack(::flatbuffers::FlatBufferBuilder &_fbb, const L2NormOptionsT* _o, const ::flatbuffers::rehasher_function_t *_rehasher = nullptr); }; -struct TopKV2OptionsBuilder { - typedef TopKV2Options Table; - flatbuffers::FlatBufferBuilder &fbb_; - flatbuffers::uoffset_t start_; - explicit TopKV2OptionsBuilder(flatbuffers::FlatBufferBuilder &_fbb) +struct L2NormOptionsBuilder { + typedef L2NormOptions Table; + ::flatbuffers::FlatBufferBuilder &fbb_; + ::flatbuffers::uoffset_t start_; + void add_fused_activation_function(tflite::ActivationFunctionType fused_activation_function) { + fbb_.AddElement(L2NormOptions::VT_FUSED_ACTIVATION_FUNCTION, static_cast(fused_activation_function), 0); + } + explicit L2NormOptionsBuilder(::flatbuffers::FlatBufferBuilder &_fbb) : fbb_(_fbb) { start_ = fbb_.StartTable(); } - flatbuffers::Offset Finish() { + ::flatbuffers::Offset Finish() { const auto end = fbb_.EndTable(start_); - auto o = flatbuffers::Offset(end); + auto o = ::flatbuffers::Offset(end); return o; } }; -inline flatbuffers::Offset CreateTopKV2Options( - flatbuffers::FlatBufferBuilder &_fbb) { - TopKV2OptionsBuilder builder_(_fbb); +inline ::flatbuffers::Offset CreateL2NormOptions( + ::flatbuffers::FlatBufferBuilder &_fbb, + tflite::ActivationFunctionType fused_activation_function = tflite::ActivationFunctionType_NONE) { + L2NormOptionsBuilder builder_(_fbb); + builder_.add_fused_activation_function(fused_activation_function); return builder_.Finish(); } -flatbuffers::Offset CreateTopKV2Options(flatbuffers::FlatBufferBuilder &_fbb, const TopKV2OptionsT *_o, const flatbuffers::rehasher_function_t *_rehasher = nullptr); +::flatbuffers::Offset CreateL2NormOptions(::flatbuffers::FlatBufferBuilder &_fbb, const L2NormOptionsT *_o, const ::flatbuffers::rehasher_function_t *_rehasher = nullptr); -struct EmbeddingLookupSparseOptionsT : public flatbuffers::NativeTable { - typedef EmbeddingLookupSparseOptions TableType; - tflite::CombinerType combiner = tflite::CombinerType_SUM; +struct LocalResponseNormalizationOptionsT : public ::flatbuffers::NativeTable { + typedef LocalResponseNormalizationOptions TableType; + int32_t radius = 0; + float bias = 0.0f; + float alpha = 0.0f; + float beta = 0.0f; }; -struct EmbeddingLookupSparseOptions FLATBUFFERS_FINAL_CLASS : private flatbuffers::Table { - typedef EmbeddingLookupSparseOptionsT NativeTableType; - typedef EmbeddingLookupSparseOptionsBuilder Builder; +struct LocalResponseNormalizationOptions FLATBUFFERS_FINAL_CLASS : private ::flatbuffers::Table { + typedef LocalResponseNormalizationOptionsT NativeTableType; + typedef LocalResponseNormalizationOptionsBuilder Builder; enum FlatBuffersVTableOffset FLATBUFFERS_VTABLE_UNDERLYING_TYPE { - VT_COMBINER = 4 + VT_RADIUS = 4, + VT_BIAS = 6, + VT_ALPHA = 8, + VT_BETA = 10 }; - tflite::CombinerType combiner() const { - return static_cast(GetField(VT_COMBINER, 0)); + int32_t radius() const { + return GetField(VT_RADIUS, 0); + } + float bias() const { + return GetField(VT_BIAS, 0.0f); + } + float alpha() const { + return GetField(VT_ALPHA, 0.0f); } - bool Verify(flatbuffers::Verifier &verifier) const { + float beta() const { + return GetField(VT_BETA, 0.0f); + } + bool Verify(::flatbuffers::Verifier &verifier) const { return VerifyTableStart(verifier) && - VerifyField(verifier, VT_COMBINER, 1) && + VerifyField(verifier, VT_RADIUS, 4) && + VerifyField(verifier, VT_BIAS, 4) && + VerifyField(verifier, VT_ALPHA, 4) && + VerifyField(verifier, VT_BETA, 4) && verifier.EndTable(); } - EmbeddingLookupSparseOptionsT *UnPack(const flatbuffers::resolver_function_t *_resolver = nullptr) const; - void UnPackTo(EmbeddingLookupSparseOptionsT *_o, const flatbuffers::resolver_function_t *_resolver = nullptr) const; - static flatbuffers::Offset Pack(flatbuffers::FlatBufferBuilder &_fbb, const EmbeddingLookupSparseOptionsT* _o, const flatbuffers::rehasher_function_t *_rehasher = nullptr); + LocalResponseNormalizationOptionsT *UnPack(const ::flatbuffers::resolver_function_t *_resolver = nullptr) const; + void UnPackTo(LocalResponseNormalizationOptionsT *_o, const ::flatbuffers::resolver_function_t *_resolver = nullptr) const; + static ::flatbuffers::Offset Pack(::flatbuffers::FlatBufferBuilder &_fbb, const LocalResponseNormalizationOptionsT* _o, const ::flatbuffers::rehasher_function_t *_rehasher = nullptr); }; -struct EmbeddingLookupSparseOptionsBuilder { - typedef EmbeddingLookupSparseOptions Table; - flatbuffers::FlatBufferBuilder &fbb_; - flatbuffers::uoffset_t start_; - void add_combiner(tflite::CombinerType combiner) { - fbb_.AddElement(EmbeddingLookupSparseOptions::VT_COMBINER, static_cast(combiner), 0); +struct LocalResponseNormalizationOptionsBuilder { + typedef LocalResponseNormalizationOptions Table; + ::flatbuffers::FlatBufferBuilder &fbb_; + ::flatbuffers::uoffset_t start_; + void add_radius(int32_t radius) { + fbb_.AddElement(LocalResponseNormalizationOptions::VT_RADIUS, radius, 0); + } + void add_bias(float bias) { + fbb_.AddElement(LocalResponseNormalizationOptions::VT_BIAS, bias, 0.0f); + } + void add_alpha(float alpha) { + fbb_.AddElement(LocalResponseNormalizationOptions::VT_ALPHA, alpha, 0.0f); + } + void add_beta(float beta) { + fbb_.AddElement(LocalResponseNormalizationOptions::VT_BETA, beta, 0.0f); } - explicit EmbeddingLookupSparseOptionsBuilder(flatbuffers::FlatBufferBuilder &_fbb) + explicit LocalResponseNormalizationOptionsBuilder(::flatbuffers::FlatBufferBuilder &_fbb) : fbb_(_fbb) { start_ = fbb_.StartTable(); } - flatbuffers::Offset Finish() { + ::flatbuffers::Offset Finish() { const auto end = fbb_.EndTable(start_); - auto o = flatbuffers::Offset(end); + auto o = ::flatbuffers::Offset(end); return o; } }; -inline flatbuffers::Offset CreateEmbeddingLookupSparseOptions( - flatbuffers::FlatBufferBuilder &_fbb, - tflite::CombinerType combiner = tflite::CombinerType_SUM) { - EmbeddingLookupSparseOptionsBuilder builder_(_fbb); - builder_.add_combiner(combiner); +inline ::flatbuffers::Offset CreateLocalResponseNormalizationOptions( + ::flatbuffers::FlatBufferBuilder &_fbb, + int32_t radius = 0, + float bias = 0.0f, + float alpha = 0.0f, + float beta = 0.0f) { + LocalResponseNormalizationOptionsBuilder builder_(_fbb); + builder_.add_beta(beta); + builder_.add_alpha(alpha); + builder_.add_bias(bias); + builder_.add_radius(radius); return builder_.Finish(); } -flatbuffers::Offset CreateEmbeddingLookupSparseOptions(flatbuffers::FlatBufferBuilder &_fbb, const EmbeddingLookupSparseOptionsT *_o, const flatbuffers::rehasher_function_t *_rehasher = nullptr); +::flatbuffers::Offset CreateLocalResponseNormalizationOptions(::flatbuffers::FlatBufferBuilder &_fbb, const LocalResponseNormalizationOptionsT *_o, const ::flatbuffers::rehasher_function_t *_rehasher = nullptr); -struct GatherOptionsT : public flatbuffers::NativeTable { - typedef GatherOptions TableType; - int32_t axis = 0; - int32_t batch_dims = 0; +struct LSTMOptionsT : public ::flatbuffers::NativeTable { + typedef LSTMOptions TableType; + tflite::ActivationFunctionType fused_activation_function = tflite::ActivationFunctionType_NONE; + float cell_clip = 0.0f; + float proj_clip = 0.0f; + tflite::LSTMKernelType kernel_type = tflite::LSTMKernelType_FULL; + bool asymmetric_quantize_inputs = false; }; -struct GatherOptions FLATBUFFERS_FINAL_CLASS : private flatbuffers::Table { - typedef GatherOptionsT NativeTableType; - typedef GatherOptionsBuilder Builder; +struct LSTMOptions FLATBUFFERS_FINAL_CLASS : private ::flatbuffers::Table { + typedef LSTMOptionsT NativeTableType; + typedef LSTMOptionsBuilder Builder; enum FlatBuffersVTableOffset FLATBUFFERS_VTABLE_UNDERLYING_TYPE { - VT_AXIS = 4, - VT_BATCH_DIMS = 6 + VT_FUSED_ACTIVATION_FUNCTION = 4, + VT_CELL_CLIP = 6, + VT_PROJ_CLIP = 8, + VT_KERNEL_TYPE = 10, + VT_ASYMMETRIC_QUANTIZE_INPUTS = 12 }; - int32_t axis() const { - return GetField(VT_AXIS, 0); + tflite::ActivationFunctionType fused_activation_function() const { + return static_cast(GetField(VT_FUSED_ACTIVATION_FUNCTION, 0)); } - int32_t batch_dims() const { - return GetField(VT_BATCH_DIMS, 0); + float cell_clip() const { + return GetField(VT_CELL_CLIP, 0.0f); + } + float proj_clip() const { + return GetField(VT_PROJ_CLIP, 0.0f); } - bool Verify(flatbuffers::Verifier &verifier) const { + tflite::LSTMKernelType kernel_type() const { + return static_cast(GetField(VT_KERNEL_TYPE, 0)); + } + bool asymmetric_quantize_inputs() const { + return GetField(VT_ASYMMETRIC_QUANTIZE_INPUTS, 0) != 0; + } + bool Verify(::flatbuffers::Verifier &verifier) const { return VerifyTableStart(verifier) && - VerifyField(verifier, VT_AXIS, 4) && - VerifyField(verifier, VT_BATCH_DIMS, 4) && + VerifyField(verifier, VT_FUSED_ACTIVATION_FUNCTION, 1) && + VerifyField(verifier, VT_CELL_CLIP, 4) && + VerifyField(verifier, VT_PROJ_CLIP, 4) && + VerifyField(verifier, VT_KERNEL_TYPE, 1) && + VerifyField(verifier, VT_ASYMMETRIC_QUANTIZE_INPUTS, 1) && verifier.EndTable(); } - GatherOptionsT *UnPack(const flatbuffers::resolver_function_t *_resolver = nullptr) const; - void UnPackTo(GatherOptionsT *_o, const flatbuffers::resolver_function_t *_resolver = nullptr) const; - static flatbuffers::Offset Pack(flatbuffers::FlatBufferBuilder &_fbb, const GatherOptionsT* _o, const flatbuffers::rehasher_function_t *_rehasher = nullptr); + LSTMOptionsT *UnPack(const ::flatbuffers::resolver_function_t *_resolver = nullptr) const; + void UnPackTo(LSTMOptionsT *_o, const ::flatbuffers::resolver_function_t *_resolver = nullptr) const; + static ::flatbuffers::Offset Pack(::flatbuffers::FlatBufferBuilder &_fbb, const LSTMOptionsT* _o, const ::flatbuffers::rehasher_function_t *_rehasher = nullptr); }; -struct GatherOptionsBuilder { - typedef GatherOptions Table; - flatbuffers::FlatBufferBuilder &fbb_; - flatbuffers::uoffset_t start_; - void add_axis(int32_t axis) { - fbb_.AddElement(GatherOptions::VT_AXIS, axis, 0); +struct LSTMOptionsBuilder { + typedef LSTMOptions Table; + ::flatbuffers::FlatBufferBuilder &fbb_; + ::flatbuffers::uoffset_t start_; + void add_fused_activation_function(tflite::ActivationFunctionType fused_activation_function) { + fbb_.AddElement(LSTMOptions::VT_FUSED_ACTIVATION_FUNCTION, static_cast(fused_activation_function), 0); } - void add_batch_dims(int32_t batch_dims) { - fbb_.AddElement(GatherOptions::VT_BATCH_DIMS, batch_dims, 0); + void add_cell_clip(float cell_clip) { + fbb_.AddElement(LSTMOptions::VT_CELL_CLIP, cell_clip, 0.0f); } - explicit GatherOptionsBuilder(flatbuffers::FlatBufferBuilder &_fbb) - : fbb_(_fbb) { - start_ = fbb_.StartTable(); + void add_proj_clip(float proj_clip) { + fbb_.AddElement(LSTMOptions::VT_PROJ_CLIP, proj_clip, 0.0f); } - flatbuffers::Offset Finish() { - const auto end = fbb_.EndTable(start_); - auto o = flatbuffers::Offset(end); - return o; + void add_kernel_type(tflite::LSTMKernelType kernel_type) { + fbb_.AddElement(LSTMOptions::VT_KERNEL_TYPE, static_cast(kernel_type), 0); } -}; - -inline flatbuffers::Offset CreateGatherOptions( - flatbuffers::FlatBufferBuilder &_fbb, - int32_t axis = 0, - int32_t batch_dims = 0) { - GatherOptionsBuilder builder_(_fbb); - builder_.add_batch_dims(batch_dims); - builder_.add_axis(axis); - return builder_.Finish(); -} - -flatbuffers::Offset CreateGatherOptions(flatbuffers::FlatBufferBuilder &_fbb, const GatherOptionsT *_o, const flatbuffers::rehasher_function_t *_rehasher = nullptr); - -struct TransposeOptionsT : public flatbuffers::NativeTable { - typedef TransposeOptions TableType; -}; - -struct TransposeOptions FLATBUFFERS_FINAL_CLASS : private flatbuffers::Table { - typedef TransposeOptionsT NativeTableType; - typedef TransposeOptionsBuilder Builder; - bool Verify(flatbuffers::Verifier &verifier) const { - return VerifyTableStart(verifier) && - verifier.EndTable(); + void add_asymmetric_quantize_inputs(bool asymmetric_quantize_inputs) { + fbb_.AddElement(LSTMOptions::VT_ASYMMETRIC_QUANTIZE_INPUTS, static_cast(asymmetric_quantize_inputs), 0); } - TransposeOptionsT *UnPack(const flatbuffers::resolver_function_t *_resolver = nullptr) const; - void UnPackTo(TransposeOptionsT *_o, const flatbuffers::resolver_function_t *_resolver = nullptr) const; - static flatbuffers::Offset Pack(flatbuffers::FlatBufferBuilder &_fbb, const TransposeOptionsT* _o, const flatbuffers::rehasher_function_t *_rehasher = nullptr); -}; - -struct TransposeOptionsBuilder { - typedef TransposeOptions Table; - flatbuffers::FlatBufferBuilder &fbb_; - flatbuffers::uoffset_t start_; - explicit TransposeOptionsBuilder(flatbuffers::FlatBufferBuilder &_fbb) + explicit LSTMOptionsBuilder(::flatbuffers::FlatBufferBuilder &_fbb) : fbb_(_fbb) { start_ = fbb_.StartTable(); } - flatbuffers::Offset Finish() { + ::flatbuffers::Offset Finish() { const auto end = fbb_.EndTable(start_); - auto o = flatbuffers::Offset(end); + auto o = ::flatbuffers::Offset(end); return o; } }; -inline flatbuffers::Offset CreateTransposeOptions( - flatbuffers::FlatBufferBuilder &_fbb) { - TransposeOptionsBuilder builder_(_fbb); +inline ::flatbuffers::Offset CreateLSTMOptions( + ::flatbuffers::FlatBufferBuilder &_fbb, + tflite::ActivationFunctionType fused_activation_function = tflite::ActivationFunctionType_NONE, + float cell_clip = 0.0f, + float proj_clip = 0.0f, + tflite::LSTMKernelType kernel_type = tflite::LSTMKernelType_FULL, + bool asymmetric_quantize_inputs = false) { + LSTMOptionsBuilder builder_(_fbb); + builder_.add_proj_clip(proj_clip); + builder_.add_cell_clip(cell_clip); + builder_.add_asymmetric_quantize_inputs(asymmetric_quantize_inputs); + builder_.add_kernel_type(kernel_type); + builder_.add_fused_activation_function(fused_activation_function); return builder_.Finish(); } -flatbuffers::Offset CreateTransposeOptions(flatbuffers::FlatBufferBuilder &_fbb, const TransposeOptionsT *_o, const flatbuffers::rehasher_function_t *_rehasher = nullptr); +::flatbuffers::Offset CreateLSTMOptions(::flatbuffers::FlatBufferBuilder &_fbb, const LSTMOptionsT *_o, const ::flatbuffers::rehasher_function_t *_rehasher = nullptr); -struct ExpOptionsT : public flatbuffers::NativeTable { - typedef ExpOptions TableType; +struct UnidirectionalSequenceLSTMOptionsT : public ::flatbuffers::NativeTable { + typedef UnidirectionalSequenceLSTMOptions TableType; + tflite::ActivationFunctionType fused_activation_function = tflite::ActivationFunctionType_NONE; + float cell_clip = 0.0f; + float proj_clip = 0.0f; + bool time_major = false; + bool asymmetric_quantize_inputs = false; + bool diagonal_recurrent_tensors = false; }; -struct ExpOptions FLATBUFFERS_FINAL_CLASS : private flatbuffers::Table { - typedef ExpOptionsT NativeTableType; - typedef ExpOptionsBuilder Builder; - bool Verify(flatbuffers::Verifier &verifier) const { +struct UnidirectionalSequenceLSTMOptions FLATBUFFERS_FINAL_CLASS : private ::flatbuffers::Table { + typedef UnidirectionalSequenceLSTMOptionsT NativeTableType; + typedef UnidirectionalSequenceLSTMOptionsBuilder Builder; + enum FlatBuffersVTableOffset FLATBUFFERS_VTABLE_UNDERLYING_TYPE { + VT_FUSED_ACTIVATION_FUNCTION = 4, + VT_CELL_CLIP = 6, + VT_PROJ_CLIP = 8, + VT_TIME_MAJOR = 10, + VT_ASYMMETRIC_QUANTIZE_INPUTS = 12, + VT_DIAGONAL_RECURRENT_TENSORS = 14 + }; + tflite::ActivationFunctionType fused_activation_function() const { + return static_cast(GetField(VT_FUSED_ACTIVATION_FUNCTION, 0)); + } + float cell_clip() const { + return GetField(VT_CELL_CLIP, 0.0f); + } + float proj_clip() const { + return GetField(VT_PROJ_CLIP, 0.0f); + } + bool time_major() const { + return GetField(VT_TIME_MAJOR, 0) != 0; + } + bool asymmetric_quantize_inputs() const { + return GetField(VT_ASYMMETRIC_QUANTIZE_INPUTS, 0) != 0; + } + bool diagonal_recurrent_tensors() const { + return GetField(VT_DIAGONAL_RECURRENT_TENSORS, 0) != 0; + } + bool Verify(::flatbuffers::Verifier &verifier) const { return VerifyTableStart(verifier) && + VerifyField(verifier, VT_FUSED_ACTIVATION_FUNCTION, 1) && + VerifyField(verifier, VT_CELL_CLIP, 4) && + VerifyField(verifier, VT_PROJ_CLIP, 4) && + VerifyField(verifier, VT_TIME_MAJOR, 1) && + VerifyField(verifier, VT_ASYMMETRIC_QUANTIZE_INPUTS, 1) && + VerifyField(verifier, VT_DIAGONAL_RECURRENT_TENSORS, 1) && verifier.EndTable(); } - ExpOptionsT *UnPack(const flatbuffers::resolver_function_t *_resolver = nullptr) const; - void UnPackTo(ExpOptionsT *_o, const flatbuffers::resolver_function_t *_resolver = nullptr) const; - static flatbuffers::Offset Pack(flatbuffers::FlatBufferBuilder &_fbb, const ExpOptionsT* _o, const flatbuffers::rehasher_function_t *_rehasher = nullptr); + UnidirectionalSequenceLSTMOptionsT *UnPack(const ::flatbuffers::resolver_function_t *_resolver = nullptr) const; + void UnPackTo(UnidirectionalSequenceLSTMOptionsT *_o, const ::flatbuffers::resolver_function_t *_resolver = nullptr) const; + static ::flatbuffers::Offset Pack(::flatbuffers::FlatBufferBuilder &_fbb, const UnidirectionalSequenceLSTMOptionsT* _o, const ::flatbuffers::rehasher_function_t *_rehasher = nullptr); }; -struct ExpOptionsBuilder { - typedef ExpOptions Table; - flatbuffers::FlatBufferBuilder &fbb_; - flatbuffers::uoffset_t start_; - explicit ExpOptionsBuilder(flatbuffers::FlatBufferBuilder &_fbb) +struct UnidirectionalSequenceLSTMOptionsBuilder { + typedef UnidirectionalSequenceLSTMOptions Table; + ::flatbuffers::FlatBufferBuilder &fbb_; + ::flatbuffers::uoffset_t start_; + void add_fused_activation_function(tflite::ActivationFunctionType fused_activation_function) { + fbb_.AddElement(UnidirectionalSequenceLSTMOptions::VT_FUSED_ACTIVATION_FUNCTION, static_cast(fused_activation_function), 0); + } + void add_cell_clip(float cell_clip) { + fbb_.AddElement(UnidirectionalSequenceLSTMOptions::VT_CELL_CLIP, cell_clip, 0.0f); + } + void add_proj_clip(float proj_clip) { + fbb_.AddElement(UnidirectionalSequenceLSTMOptions::VT_PROJ_CLIP, proj_clip, 0.0f); + } + void add_time_major(bool time_major) { + fbb_.AddElement(UnidirectionalSequenceLSTMOptions::VT_TIME_MAJOR, static_cast(time_major), 0); + } + void add_asymmetric_quantize_inputs(bool asymmetric_quantize_inputs) { + fbb_.AddElement(UnidirectionalSequenceLSTMOptions::VT_ASYMMETRIC_QUANTIZE_INPUTS, static_cast(asymmetric_quantize_inputs), 0); + } + void add_diagonal_recurrent_tensors(bool diagonal_recurrent_tensors) { + fbb_.AddElement(UnidirectionalSequenceLSTMOptions::VT_DIAGONAL_RECURRENT_TENSORS, static_cast(diagonal_recurrent_tensors), 0); + } + explicit UnidirectionalSequenceLSTMOptionsBuilder(::flatbuffers::FlatBufferBuilder &_fbb) : fbb_(_fbb) { start_ = fbb_.StartTable(); } - flatbuffers::Offset Finish() { + ::flatbuffers::Offset Finish() { const auto end = fbb_.EndTable(start_); - auto o = flatbuffers::Offset(end); + auto o = ::flatbuffers::Offset(end); return o; } }; -inline flatbuffers::Offset CreateExpOptions( - flatbuffers::FlatBufferBuilder &_fbb) { - ExpOptionsBuilder builder_(_fbb); +inline ::flatbuffers::Offset CreateUnidirectionalSequenceLSTMOptions( + ::flatbuffers::FlatBufferBuilder &_fbb, + tflite::ActivationFunctionType fused_activation_function = tflite::ActivationFunctionType_NONE, + float cell_clip = 0.0f, + float proj_clip = 0.0f, + bool time_major = false, + bool asymmetric_quantize_inputs = false, + bool diagonal_recurrent_tensors = false) { + UnidirectionalSequenceLSTMOptionsBuilder builder_(_fbb); + builder_.add_proj_clip(proj_clip); + builder_.add_cell_clip(cell_clip); + builder_.add_diagonal_recurrent_tensors(diagonal_recurrent_tensors); + builder_.add_asymmetric_quantize_inputs(asymmetric_quantize_inputs); + builder_.add_time_major(time_major); + builder_.add_fused_activation_function(fused_activation_function); return builder_.Finish(); } -flatbuffers::Offset CreateExpOptions(flatbuffers::FlatBufferBuilder &_fbb, const ExpOptionsT *_o, const flatbuffers::rehasher_function_t *_rehasher = nullptr); - -struct CosOptionsT : public flatbuffers::NativeTable { - typedef CosOptions TableType; -}; +::flatbuffers::Offset CreateUnidirectionalSequenceLSTMOptions(::flatbuffers::FlatBufferBuilder &_fbb, const UnidirectionalSequenceLSTMOptionsT *_o, const ::flatbuffers::rehasher_function_t *_rehasher = nullptr); -struct CosOptions FLATBUFFERS_FINAL_CLASS : private flatbuffers::Table { - typedef CosOptionsT NativeTableType; - typedef CosOptionsBuilder Builder; - bool Verify(flatbuffers::Verifier &verifier) const { +struct BidirectionalSequenceLSTMOptionsT : public ::flatbuffers::NativeTable { + typedef BidirectionalSequenceLSTMOptions TableType; + tflite::ActivationFunctionType fused_activation_function = tflite::ActivationFunctionType_NONE; + float cell_clip = 0.0f; + float proj_clip = 0.0f; + bool merge_outputs = false; + bool time_major = true; + bool asymmetric_quantize_inputs = false; +}; + +struct BidirectionalSequenceLSTMOptions FLATBUFFERS_FINAL_CLASS : private ::flatbuffers::Table { + typedef BidirectionalSequenceLSTMOptionsT NativeTableType; + typedef BidirectionalSequenceLSTMOptionsBuilder Builder; + enum FlatBuffersVTableOffset FLATBUFFERS_VTABLE_UNDERLYING_TYPE { + VT_FUSED_ACTIVATION_FUNCTION = 4, + VT_CELL_CLIP = 6, + VT_PROJ_CLIP = 8, + VT_MERGE_OUTPUTS = 10, + VT_TIME_MAJOR = 12, + VT_ASYMMETRIC_QUANTIZE_INPUTS = 14 + }; + tflite::ActivationFunctionType fused_activation_function() const { + return static_cast(GetField(VT_FUSED_ACTIVATION_FUNCTION, 0)); + } + float cell_clip() const { + return GetField(VT_CELL_CLIP, 0.0f); + } + float proj_clip() const { + return GetField(VT_PROJ_CLIP, 0.0f); + } + bool merge_outputs() const { + return GetField(VT_MERGE_OUTPUTS, 0) != 0; + } + bool time_major() const { + return GetField(VT_TIME_MAJOR, 1) != 0; + } + bool asymmetric_quantize_inputs() const { + return GetField(VT_ASYMMETRIC_QUANTIZE_INPUTS, 0) != 0; + } + bool Verify(::flatbuffers::Verifier &verifier) const { return VerifyTableStart(verifier) && + VerifyField(verifier, VT_FUSED_ACTIVATION_FUNCTION, 1) && + VerifyField(verifier, VT_CELL_CLIP, 4) && + VerifyField(verifier, VT_PROJ_CLIP, 4) && + VerifyField(verifier, VT_MERGE_OUTPUTS, 1) && + VerifyField(verifier, VT_TIME_MAJOR, 1) && + VerifyField(verifier, VT_ASYMMETRIC_QUANTIZE_INPUTS, 1) && verifier.EndTable(); } - CosOptionsT *UnPack(const flatbuffers::resolver_function_t *_resolver = nullptr) const; - void UnPackTo(CosOptionsT *_o, const flatbuffers::resolver_function_t *_resolver = nullptr) const; - static flatbuffers::Offset Pack(flatbuffers::FlatBufferBuilder &_fbb, const CosOptionsT* _o, const flatbuffers::rehasher_function_t *_rehasher = nullptr); + BidirectionalSequenceLSTMOptionsT *UnPack(const ::flatbuffers::resolver_function_t *_resolver = nullptr) const; + void UnPackTo(BidirectionalSequenceLSTMOptionsT *_o, const ::flatbuffers::resolver_function_t *_resolver = nullptr) const; + static ::flatbuffers::Offset Pack(::flatbuffers::FlatBufferBuilder &_fbb, const BidirectionalSequenceLSTMOptionsT* _o, const ::flatbuffers::rehasher_function_t *_rehasher = nullptr); }; -struct CosOptionsBuilder { - typedef CosOptions Table; - flatbuffers::FlatBufferBuilder &fbb_; - flatbuffers::uoffset_t start_; - explicit CosOptionsBuilder(flatbuffers::FlatBufferBuilder &_fbb) +struct BidirectionalSequenceLSTMOptionsBuilder { + typedef BidirectionalSequenceLSTMOptions Table; + ::flatbuffers::FlatBufferBuilder &fbb_; + ::flatbuffers::uoffset_t start_; + void add_fused_activation_function(tflite::ActivationFunctionType fused_activation_function) { + fbb_.AddElement(BidirectionalSequenceLSTMOptions::VT_FUSED_ACTIVATION_FUNCTION, static_cast(fused_activation_function), 0); + } + void add_cell_clip(float cell_clip) { + fbb_.AddElement(BidirectionalSequenceLSTMOptions::VT_CELL_CLIP, cell_clip, 0.0f); + } + void add_proj_clip(float proj_clip) { + fbb_.AddElement(BidirectionalSequenceLSTMOptions::VT_PROJ_CLIP, proj_clip, 0.0f); + } + void add_merge_outputs(bool merge_outputs) { + fbb_.AddElement(BidirectionalSequenceLSTMOptions::VT_MERGE_OUTPUTS, static_cast(merge_outputs), 0); + } + void add_time_major(bool time_major) { + fbb_.AddElement(BidirectionalSequenceLSTMOptions::VT_TIME_MAJOR, static_cast(time_major), 1); + } + void add_asymmetric_quantize_inputs(bool asymmetric_quantize_inputs) { + fbb_.AddElement(BidirectionalSequenceLSTMOptions::VT_ASYMMETRIC_QUANTIZE_INPUTS, static_cast(asymmetric_quantize_inputs), 0); + } + explicit BidirectionalSequenceLSTMOptionsBuilder(::flatbuffers::FlatBufferBuilder &_fbb) : fbb_(_fbb) { start_ = fbb_.StartTable(); } - flatbuffers::Offset Finish() { + ::flatbuffers::Offset Finish() { const auto end = fbb_.EndTable(start_); - auto o = flatbuffers::Offset(end); + auto o = ::flatbuffers::Offset(end); return o; } }; -inline flatbuffers::Offset CreateCosOptions( - flatbuffers::FlatBufferBuilder &_fbb) { - CosOptionsBuilder builder_(_fbb); +inline ::flatbuffers::Offset CreateBidirectionalSequenceLSTMOptions( + ::flatbuffers::FlatBufferBuilder &_fbb, + tflite::ActivationFunctionType fused_activation_function = tflite::ActivationFunctionType_NONE, + float cell_clip = 0.0f, + float proj_clip = 0.0f, + bool merge_outputs = false, + bool time_major = true, + bool asymmetric_quantize_inputs = false) { + BidirectionalSequenceLSTMOptionsBuilder builder_(_fbb); + builder_.add_proj_clip(proj_clip); + builder_.add_cell_clip(cell_clip); + builder_.add_asymmetric_quantize_inputs(asymmetric_quantize_inputs); + builder_.add_time_major(time_major); + builder_.add_merge_outputs(merge_outputs); + builder_.add_fused_activation_function(fused_activation_function); return builder_.Finish(); } -flatbuffers::Offset CreateCosOptions(flatbuffers::FlatBufferBuilder &_fbb, const CosOptionsT *_o, const flatbuffers::rehasher_function_t *_rehasher = nullptr); +::flatbuffers::Offset CreateBidirectionalSequenceLSTMOptions(::flatbuffers::FlatBufferBuilder &_fbb, const BidirectionalSequenceLSTMOptionsT *_o, const ::flatbuffers::rehasher_function_t *_rehasher = nullptr); -struct ReducerOptionsT : public flatbuffers::NativeTable { - typedef ReducerOptions TableType; - bool keep_dims = false; +struct ResizeBilinearOptionsT : public ::flatbuffers::NativeTable { + typedef ResizeBilinearOptions TableType; + bool align_corners = false; + bool half_pixel_centers = false; }; -struct ReducerOptions FLATBUFFERS_FINAL_CLASS : private flatbuffers::Table { - typedef ReducerOptionsT NativeTableType; - typedef ReducerOptionsBuilder Builder; +struct ResizeBilinearOptions FLATBUFFERS_FINAL_CLASS : private ::flatbuffers::Table { + typedef ResizeBilinearOptionsT NativeTableType; + typedef ResizeBilinearOptionsBuilder Builder; enum FlatBuffersVTableOffset FLATBUFFERS_VTABLE_UNDERLYING_TYPE { - VT_KEEP_DIMS = 4 + VT_ALIGN_CORNERS = 8, + VT_HALF_PIXEL_CENTERS = 10 }; - bool keep_dims() const { - return GetField(VT_KEEP_DIMS, 0) != 0; + bool align_corners() const { + return GetField(VT_ALIGN_CORNERS, 0) != 0; + } + bool half_pixel_centers() const { + return GetField(VT_HALF_PIXEL_CENTERS, 0) != 0; } - bool Verify(flatbuffers::Verifier &verifier) const { + bool Verify(::flatbuffers::Verifier &verifier) const { return VerifyTableStart(verifier) && - VerifyField(verifier, VT_KEEP_DIMS, 1) && + VerifyField(verifier, VT_ALIGN_CORNERS, 1) && + VerifyField(verifier, VT_HALF_PIXEL_CENTERS, 1) && verifier.EndTable(); } - ReducerOptionsT *UnPack(const flatbuffers::resolver_function_t *_resolver = nullptr) const; - void UnPackTo(ReducerOptionsT *_o, const flatbuffers::resolver_function_t *_resolver = nullptr) const; - static flatbuffers::Offset Pack(flatbuffers::FlatBufferBuilder &_fbb, const ReducerOptionsT* _o, const flatbuffers::rehasher_function_t *_rehasher = nullptr); + ResizeBilinearOptionsT *UnPack(const ::flatbuffers::resolver_function_t *_resolver = nullptr) const; + void UnPackTo(ResizeBilinearOptionsT *_o, const ::flatbuffers::resolver_function_t *_resolver = nullptr) const; + static ::flatbuffers::Offset Pack(::flatbuffers::FlatBufferBuilder &_fbb, const ResizeBilinearOptionsT* _o, const ::flatbuffers::rehasher_function_t *_rehasher = nullptr); }; -struct ReducerOptionsBuilder { - typedef ReducerOptions Table; - flatbuffers::FlatBufferBuilder &fbb_; - flatbuffers::uoffset_t start_; - void add_keep_dims(bool keep_dims) { - fbb_.AddElement(ReducerOptions::VT_KEEP_DIMS, static_cast(keep_dims), 0); +struct ResizeBilinearOptionsBuilder { + typedef ResizeBilinearOptions Table; + ::flatbuffers::FlatBufferBuilder &fbb_; + ::flatbuffers::uoffset_t start_; + void add_align_corners(bool align_corners) { + fbb_.AddElement(ResizeBilinearOptions::VT_ALIGN_CORNERS, static_cast(align_corners), 0); + } + void add_half_pixel_centers(bool half_pixel_centers) { + fbb_.AddElement(ResizeBilinearOptions::VT_HALF_PIXEL_CENTERS, static_cast(half_pixel_centers), 0); } - explicit ReducerOptionsBuilder(flatbuffers::FlatBufferBuilder &_fbb) + explicit ResizeBilinearOptionsBuilder(::flatbuffers::FlatBufferBuilder &_fbb) : fbb_(_fbb) { start_ = fbb_.StartTable(); } - flatbuffers::Offset Finish() { + ::flatbuffers::Offset Finish() { const auto end = fbb_.EndTable(start_); - auto o = flatbuffers::Offset(end); + auto o = ::flatbuffers::Offset(end); return o; } }; -inline flatbuffers::Offset CreateReducerOptions( - flatbuffers::FlatBufferBuilder &_fbb, - bool keep_dims = false) { - ReducerOptionsBuilder builder_(_fbb); - builder_.add_keep_dims(keep_dims); +inline ::flatbuffers::Offset CreateResizeBilinearOptions( + ::flatbuffers::FlatBufferBuilder &_fbb, + bool align_corners = false, + bool half_pixel_centers = false) { + ResizeBilinearOptionsBuilder builder_(_fbb); + builder_.add_half_pixel_centers(half_pixel_centers); + builder_.add_align_corners(align_corners); return builder_.Finish(); } -flatbuffers::Offset CreateReducerOptions(flatbuffers::FlatBufferBuilder &_fbb, const ReducerOptionsT *_o, const flatbuffers::rehasher_function_t *_rehasher = nullptr); +::flatbuffers::Offset CreateResizeBilinearOptions(::flatbuffers::FlatBufferBuilder &_fbb, const ResizeBilinearOptionsT *_o, const ::flatbuffers::rehasher_function_t *_rehasher = nullptr); -struct SqueezeOptionsT : public flatbuffers::NativeTable { - typedef SqueezeOptions TableType; - std::vector squeeze_dims{}; +struct ResizeNearestNeighborOptionsT : public ::flatbuffers::NativeTable { + typedef ResizeNearestNeighborOptions TableType; + bool align_corners = false; + bool half_pixel_centers = false; }; -struct SqueezeOptions FLATBUFFERS_FINAL_CLASS : private flatbuffers::Table { - typedef SqueezeOptionsT NativeTableType; - typedef SqueezeOptionsBuilder Builder; +struct ResizeNearestNeighborOptions FLATBUFFERS_FINAL_CLASS : private ::flatbuffers::Table { + typedef ResizeNearestNeighborOptionsT NativeTableType; + typedef ResizeNearestNeighborOptionsBuilder Builder; enum FlatBuffersVTableOffset FLATBUFFERS_VTABLE_UNDERLYING_TYPE { - VT_SQUEEZE_DIMS = 4 + VT_ALIGN_CORNERS = 4, + VT_HALF_PIXEL_CENTERS = 6 }; - const flatbuffers::Vector *squeeze_dims() const { - return GetPointer *>(VT_SQUEEZE_DIMS); + bool align_corners() const { + return GetField(VT_ALIGN_CORNERS, 0) != 0; } - bool Verify(flatbuffers::Verifier &verifier) const { + bool half_pixel_centers() const { + return GetField(VT_HALF_PIXEL_CENTERS, 0) != 0; + } + bool Verify(::flatbuffers::Verifier &verifier) const { return VerifyTableStart(verifier) && - VerifyOffset(verifier, VT_SQUEEZE_DIMS) && - verifier.VerifyVector(squeeze_dims()) && + VerifyField(verifier, VT_ALIGN_CORNERS, 1) && + VerifyField(verifier, VT_HALF_PIXEL_CENTERS, 1) && verifier.EndTable(); } - SqueezeOptionsT *UnPack(const flatbuffers::resolver_function_t *_resolver = nullptr) const; - void UnPackTo(SqueezeOptionsT *_o, const flatbuffers::resolver_function_t *_resolver = nullptr) const; - static flatbuffers::Offset Pack(flatbuffers::FlatBufferBuilder &_fbb, const SqueezeOptionsT* _o, const flatbuffers::rehasher_function_t *_rehasher = nullptr); + ResizeNearestNeighborOptionsT *UnPack(const ::flatbuffers::resolver_function_t *_resolver = nullptr) const; + void UnPackTo(ResizeNearestNeighborOptionsT *_o, const ::flatbuffers::resolver_function_t *_resolver = nullptr) const; + static ::flatbuffers::Offset Pack(::flatbuffers::FlatBufferBuilder &_fbb, const ResizeNearestNeighborOptionsT* _o, const ::flatbuffers::rehasher_function_t *_rehasher = nullptr); }; -struct SqueezeOptionsBuilder { - typedef SqueezeOptions Table; - flatbuffers::FlatBufferBuilder &fbb_; - flatbuffers::uoffset_t start_; - void add_squeeze_dims(flatbuffers::Offset> squeeze_dims) { - fbb_.AddOffset(SqueezeOptions::VT_SQUEEZE_DIMS, squeeze_dims); +struct ResizeNearestNeighborOptionsBuilder { + typedef ResizeNearestNeighborOptions Table; + ::flatbuffers::FlatBufferBuilder &fbb_; + ::flatbuffers::uoffset_t start_; + void add_align_corners(bool align_corners) { + fbb_.AddElement(ResizeNearestNeighborOptions::VT_ALIGN_CORNERS, static_cast(align_corners), 0); + } + void add_half_pixel_centers(bool half_pixel_centers) { + fbb_.AddElement(ResizeNearestNeighborOptions::VT_HALF_PIXEL_CENTERS, static_cast(half_pixel_centers), 0); } - explicit SqueezeOptionsBuilder(flatbuffers::FlatBufferBuilder &_fbb) + explicit ResizeNearestNeighborOptionsBuilder(::flatbuffers::FlatBufferBuilder &_fbb) : fbb_(_fbb) { start_ = fbb_.StartTable(); } - flatbuffers::Offset Finish() { + ::flatbuffers::Offset Finish() { const auto end = fbb_.EndTable(start_); - auto o = flatbuffers::Offset(end); + auto o = ::flatbuffers::Offset(end); return o; } }; -inline flatbuffers::Offset CreateSqueezeOptions( - flatbuffers::FlatBufferBuilder &_fbb, - flatbuffers::Offset> squeeze_dims = 0) { - SqueezeOptionsBuilder builder_(_fbb); - builder_.add_squeeze_dims(squeeze_dims); +inline ::flatbuffers::Offset CreateResizeNearestNeighborOptions( + ::flatbuffers::FlatBufferBuilder &_fbb, + bool align_corners = false, + bool half_pixel_centers = false) { + ResizeNearestNeighborOptionsBuilder builder_(_fbb); + builder_.add_half_pixel_centers(half_pixel_centers); + builder_.add_align_corners(align_corners); return builder_.Finish(); } -inline flatbuffers::Offset CreateSqueezeOptionsDirect( - flatbuffers::FlatBufferBuilder &_fbb, - const std::vector *squeeze_dims = nullptr) { - auto squeeze_dims__ = squeeze_dims ? _fbb.CreateVector(*squeeze_dims) : 0; - return tflite::CreateSqueezeOptions( - _fbb, - squeeze_dims__); -} +::flatbuffers::Offset CreateResizeNearestNeighborOptions(::flatbuffers::FlatBufferBuilder &_fbb, const ResizeNearestNeighborOptionsT *_o, const ::flatbuffers::rehasher_function_t *_rehasher = nullptr); -flatbuffers::Offset CreateSqueezeOptions(flatbuffers::FlatBufferBuilder &_fbb, const SqueezeOptionsT *_o, const flatbuffers::rehasher_function_t *_rehasher = nullptr); - -struct SplitOptionsT : public flatbuffers::NativeTable { - typedef SplitOptions TableType; - int32_t num_splits = 0; +struct CallOptionsT : public ::flatbuffers::NativeTable { + typedef CallOptions TableType; + uint32_t subgraph = 0; }; -struct SplitOptions FLATBUFFERS_FINAL_CLASS : private flatbuffers::Table { - typedef SplitOptionsT NativeTableType; - typedef SplitOptionsBuilder Builder; +struct CallOptions FLATBUFFERS_FINAL_CLASS : private ::flatbuffers::Table { + typedef CallOptionsT NativeTableType; + typedef CallOptionsBuilder Builder; enum FlatBuffersVTableOffset FLATBUFFERS_VTABLE_UNDERLYING_TYPE { - VT_NUM_SPLITS = 4 + VT_SUBGRAPH = 4 }; - int32_t num_splits() const { - return GetField(VT_NUM_SPLITS, 0); + uint32_t subgraph() const { + return GetField(VT_SUBGRAPH, 0); } - bool Verify(flatbuffers::Verifier &verifier) const { + bool Verify(::flatbuffers::Verifier &verifier) const { return VerifyTableStart(verifier) && - VerifyField(verifier, VT_NUM_SPLITS, 4) && + VerifyField(verifier, VT_SUBGRAPH, 4) && verifier.EndTable(); } - SplitOptionsT *UnPack(const flatbuffers::resolver_function_t *_resolver = nullptr) const; - void UnPackTo(SplitOptionsT *_o, const flatbuffers::resolver_function_t *_resolver = nullptr) const; - static flatbuffers::Offset Pack(flatbuffers::FlatBufferBuilder &_fbb, const SplitOptionsT* _o, const flatbuffers::rehasher_function_t *_rehasher = nullptr); + CallOptionsT *UnPack(const ::flatbuffers::resolver_function_t *_resolver = nullptr) const; + void UnPackTo(CallOptionsT *_o, const ::flatbuffers::resolver_function_t *_resolver = nullptr) const; + static ::flatbuffers::Offset Pack(::flatbuffers::FlatBufferBuilder &_fbb, const CallOptionsT* _o, const ::flatbuffers::rehasher_function_t *_rehasher = nullptr); }; -struct SplitOptionsBuilder { - typedef SplitOptions Table; - flatbuffers::FlatBufferBuilder &fbb_; - flatbuffers::uoffset_t start_; - void add_num_splits(int32_t num_splits) { - fbb_.AddElement(SplitOptions::VT_NUM_SPLITS, num_splits, 0); +struct CallOptionsBuilder { + typedef CallOptions Table; + ::flatbuffers::FlatBufferBuilder &fbb_; + ::flatbuffers::uoffset_t start_; + void add_subgraph(uint32_t subgraph) { + fbb_.AddElement(CallOptions::VT_SUBGRAPH, subgraph, 0); } - explicit SplitOptionsBuilder(flatbuffers::FlatBufferBuilder &_fbb) + explicit CallOptionsBuilder(::flatbuffers::FlatBufferBuilder &_fbb) : fbb_(_fbb) { start_ = fbb_.StartTable(); } - flatbuffers::Offset Finish() { + ::flatbuffers::Offset Finish() { const auto end = fbb_.EndTable(start_); - auto o = flatbuffers::Offset(end); + auto o = ::flatbuffers::Offset(end); return o; } }; -inline flatbuffers::Offset CreateSplitOptions( - flatbuffers::FlatBufferBuilder &_fbb, - int32_t num_splits = 0) { - SplitOptionsBuilder builder_(_fbb); - builder_.add_num_splits(num_splits); +inline ::flatbuffers::Offset CreateCallOptions( + ::flatbuffers::FlatBufferBuilder &_fbb, + uint32_t subgraph = 0) { + CallOptionsBuilder builder_(_fbb); + builder_.add_subgraph(subgraph); return builder_.Finish(); } -flatbuffers::Offset CreateSplitOptions(flatbuffers::FlatBufferBuilder &_fbb, const SplitOptionsT *_o, const flatbuffers::rehasher_function_t *_rehasher = nullptr); +::flatbuffers::Offset CreateCallOptions(::flatbuffers::FlatBufferBuilder &_fbb, const CallOptionsT *_o, const ::flatbuffers::rehasher_function_t *_rehasher = nullptr); -struct SplitVOptionsT : public flatbuffers::NativeTable { - typedef SplitVOptions TableType; - int32_t num_splits = 0; +struct PadOptionsT : public ::flatbuffers::NativeTable { + typedef PadOptions TableType; }; -struct SplitVOptions FLATBUFFERS_FINAL_CLASS : private flatbuffers::Table { - typedef SplitVOptionsT NativeTableType; - typedef SplitVOptionsBuilder Builder; - enum FlatBuffersVTableOffset FLATBUFFERS_VTABLE_UNDERLYING_TYPE { - VT_NUM_SPLITS = 4 - }; - int32_t num_splits() const { - return GetField(VT_NUM_SPLITS, 0); - } - bool Verify(flatbuffers::Verifier &verifier) const { +struct PadOptions FLATBUFFERS_FINAL_CLASS : private ::flatbuffers::Table { + typedef PadOptionsT NativeTableType; + typedef PadOptionsBuilder Builder; + bool Verify(::flatbuffers::Verifier &verifier) const { return VerifyTableStart(verifier) && - VerifyField(verifier, VT_NUM_SPLITS, 4) && verifier.EndTable(); } - SplitVOptionsT *UnPack(const flatbuffers::resolver_function_t *_resolver = nullptr) const; - void UnPackTo(SplitVOptionsT *_o, const flatbuffers::resolver_function_t *_resolver = nullptr) const; - static flatbuffers::Offset Pack(flatbuffers::FlatBufferBuilder &_fbb, const SplitVOptionsT* _o, const flatbuffers::rehasher_function_t *_rehasher = nullptr); + PadOptionsT *UnPack(const ::flatbuffers::resolver_function_t *_resolver = nullptr) const; + void UnPackTo(PadOptionsT *_o, const ::flatbuffers::resolver_function_t *_resolver = nullptr) const; + static ::flatbuffers::Offset Pack(::flatbuffers::FlatBufferBuilder &_fbb, const PadOptionsT* _o, const ::flatbuffers::rehasher_function_t *_rehasher = nullptr); }; -struct SplitVOptionsBuilder { - typedef SplitVOptions Table; - flatbuffers::FlatBufferBuilder &fbb_; - flatbuffers::uoffset_t start_; - void add_num_splits(int32_t num_splits) { - fbb_.AddElement(SplitVOptions::VT_NUM_SPLITS, num_splits, 0); - } - explicit SplitVOptionsBuilder(flatbuffers::FlatBufferBuilder &_fbb) +struct PadOptionsBuilder { + typedef PadOptions Table; + ::flatbuffers::FlatBufferBuilder &fbb_; + ::flatbuffers::uoffset_t start_; + explicit PadOptionsBuilder(::flatbuffers::FlatBufferBuilder &_fbb) : fbb_(_fbb) { start_ = fbb_.StartTable(); } - flatbuffers::Offset Finish() { + ::flatbuffers::Offset Finish() { const auto end = fbb_.EndTable(start_); - auto o = flatbuffers::Offset(end); + auto o = ::flatbuffers::Offset(end); return o; } }; -inline flatbuffers::Offset CreateSplitVOptions( - flatbuffers::FlatBufferBuilder &_fbb, - int32_t num_splits = 0) { - SplitVOptionsBuilder builder_(_fbb); - builder_.add_num_splits(num_splits); +inline ::flatbuffers::Offset CreatePadOptions( + ::flatbuffers::FlatBufferBuilder &_fbb) { + PadOptionsBuilder builder_(_fbb); return builder_.Finish(); } -flatbuffers::Offset CreateSplitVOptions(flatbuffers::FlatBufferBuilder &_fbb, const SplitVOptionsT *_o, const flatbuffers::rehasher_function_t *_rehasher = nullptr); +::flatbuffers::Offset CreatePadOptions(::flatbuffers::FlatBufferBuilder &_fbb, const PadOptionsT *_o, const ::flatbuffers::rehasher_function_t *_rehasher = nullptr); -struct StridedSliceOptionsT : public flatbuffers::NativeTable { - typedef StridedSliceOptions TableType; - int32_t begin_mask = 0; - int32_t end_mask = 0; - int32_t ellipsis_mask = 0; - int32_t new_axis_mask = 0; - int32_t shrink_axis_mask = 0; - bool offset = false; +struct PadV2OptionsT : public ::flatbuffers::NativeTable { + typedef PadV2Options TableType; }; -struct StridedSliceOptions FLATBUFFERS_FINAL_CLASS : private flatbuffers::Table { - typedef StridedSliceOptionsT NativeTableType; - typedef StridedSliceOptionsBuilder Builder; - enum FlatBuffersVTableOffset FLATBUFFERS_VTABLE_UNDERLYING_TYPE { - VT_BEGIN_MASK = 4, - VT_END_MASK = 6, - VT_ELLIPSIS_MASK = 8, - VT_NEW_AXIS_MASK = 10, - VT_SHRINK_AXIS_MASK = 12, - VT_OFFSET = 14 - }; - int32_t begin_mask() const { - return GetField(VT_BEGIN_MASK, 0); +struct PadV2Options FLATBUFFERS_FINAL_CLASS : private ::flatbuffers::Table { + typedef PadV2OptionsT NativeTableType; + typedef PadV2OptionsBuilder Builder; + bool Verify(::flatbuffers::Verifier &verifier) const { + return VerifyTableStart(verifier) && + verifier.EndTable(); } - int32_t end_mask() const { - return GetField(VT_END_MASK, 0); + PadV2OptionsT *UnPack(const ::flatbuffers::resolver_function_t *_resolver = nullptr) const; + void UnPackTo(PadV2OptionsT *_o, const ::flatbuffers::resolver_function_t *_resolver = nullptr) const; + static ::flatbuffers::Offset Pack(::flatbuffers::FlatBufferBuilder &_fbb, const PadV2OptionsT* _o, const ::flatbuffers::rehasher_function_t *_rehasher = nullptr); +}; + +struct PadV2OptionsBuilder { + typedef PadV2Options Table; + ::flatbuffers::FlatBufferBuilder &fbb_; + ::flatbuffers::uoffset_t start_; + explicit PadV2OptionsBuilder(::flatbuffers::FlatBufferBuilder &_fbb) + : fbb_(_fbb) { + start_ = fbb_.StartTable(); } - int32_t ellipsis_mask() const { - return GetField(VT_ELLIPSIS_MASK, 0); - } - int32_t new_axis_mask() const { - return GetField(VT_NEW_AXIS_MASK, 0); - } - int32_t shrink_axis_mask() const { - return GetField(VT_SHRINK_AXIS_MASK, 0); + ::flatbuffers::Offset Finish() { + const auto end = fbb_.EndTable(start_); + auto o = ::flatbuffers::Offset(end); + return o; } - bool offset() const { - return GetField(VT_OFFSET, 0) != 0; +}; + +inline ::flatbuffers::Offset CreatePadV2Options( + ::flatbuffers::FlatBufferBuilder &_fbb) { + PadV2OptionsBuilder builder_(_fbb); + return builder_.Finish(); +} + +::flatbuffers::Offset CreatePadV2Options(::flatbuffers::FlatBufferBuilder &_fbb, const PadV2OptionsT *_o, const ::flatbuffers::rehasher_function_t *_rehasher = nullptr); + +struct ReshapeOptionsT : public ::flatbuffers::NativeTable { + typedef ReshapeOptions TableType; + std::vector new_shape{}; +}; + +struct ReshapeOptions FLATBUFFERS_FINAL_CLASS : private ::flatbuffers::Table { + typedef ReshapeOptionsT NativeTableType; + typedef ReshapeOptionsBuilder Builder; + enum FlatBuffersVTableOffset FLATBUFFERS_VTABLE_UNDERLYING_TYPE { + VT_NEW_SHAPE = 4 + }; + const ::flatbuffers::Vector *new_shape() const { + return GetPointer *>(VT_NEW_SHAPE); } - bool Verify(flatbuffers::Verifier &verifier) const { + bool Verify(::flatbuffers::Verifier &verifier) const { return VerifyTableStart(verifier) && - VerifyField(verifier, VT_BEGIN_MASK, 4) && - VerifyField(verifier, VT_END_MASK, 4) && - VerifyField(verifier, VT_ELLIPSIS_MASK, 4) && - VerifyField(verifier, VT_NEW_AXIS_MASK, 4) && - VerifyField(verifier, VT_SHRINK_AXIS_MASK, 4) && - VerifyField(verifier, VT_OFFSET, 1) && + VerifyOffset(verifier, VT_NEW_SHAPE) && + verifier.VerifyVector(new_shape()) && verifier.EndTable(); } - StridedSliceOptionsT *UnPack(const flatbuffers::resolver_function_t *_resolver = nullptr) const; - void UnPackTo(StridedSliceOptionsT *_o, const flatbuffers::resolver_function_t *_resolver = nullptr) const; - static flatbuffers::Offset Pack(flatbuffers::FlatBufferBuilder &_fbb, const StridedSliceOptionsT* _o, const flatbuffers::rehasher_function_t *_rehasher = nullptr); + ReshapeOptionsT *UnPack(const ::flatbuffers::resolver_function_t *_resolver = nullptr) const; + void UnPackTo(ReshapeOptionsT *_o, const ::flatbuffers::resolver_function_t *_resolver = nullptr) const; + static ::flatbuffers::Offset Pack(::flatbuffers::FlatBufferBuilder &_fbb, const ReshapeOptionsT* _o, const ::flatbuffers::rehasher_function_t *_rehasher = nullptr); }; -struct StridedSliceOptionsBuilder { - typedef StridedSliceOptions Table; - flatbuffers::FlatBufferBuilder &fbb_; - flatbuffers::uoffset_t start_; - void add_begin_mask(int32_t begin_mask) { - fbb_.AddElement(StridedSliceOptions::VT_BEGIN_MASK, begin_mask, 0); - } - void add_end_mask(int32_t end_mask) { - fbb_.AddElement(StridedSliceOptions::VT_END_MASK, end_mask, 0); - } - void add_ellipsis_mask(int32_t ellipsis_mask) { - fbb_.AddElement(StridedSliceOptions::VT_ELLIPSIS_MASK, ellipsis_mask, 0); - } - void add_new_axis_mask(int32_t new_axis_mask) { - fbb_.AddElement(StridedSliceOptions::VT_NEW_AXIS_MASK, new_axis_mask, 0); - } - void add_shrink_axis_mask(int32_t shrink_axis_mask) { - fbb_.AddElement(StridedSliceOptions::VT_SHRINK_AXIS_MASK, shrink_axis_mask, 0); - } - void add_offset(bool offset) { - fbb_.AddElement(StridedSliceOptions::VT_OFFSET, static_cast(offset), 0); +struct ReshapeOptionsBuilder { + typedef ReshapeOptions Table; + ::flatbuffers::FlatBufferBuilder &fbb_; + ::flatbuffers::uoffset_t start_; + void add_new_shape(::flatbuffers::Offset<::flatbuffers::Vector> new_shape) { + fbb_.AddOffset(ReshapeOptions::VT_NEW_SHAPE, new_shape); } - explicit StridedSliceOptionsBuilder(flatbuffers::FlatBufferBuilder &_fbb) + explicit ReshapeOptionsBuilder(::flatbuffers::FlatBufferBuilder &_fbb) : fbb_(_fbb) { start_ = fbb_.StartTable(); } - flatbuffers::Offset Finish() { + ::flatbuffers::Offset Finish() { const auto end = fbb_.EndTable(start_); - auto o = flatbuffers::Offset(end); + auto o = ::flatbuffers::Offset(end); return o; } }; -inline flatbuffers::Offset CreateStridedSliceOptions( - flatbuffers::FlatBufferBuilder &_fbb, - int32_t begin_mask = 0, - int32_t end_mask = 0, - int32_t ellipsis_mask = 0, - int32_t new_axis_mask = 0, - int32_t shrink_axis_mask = 0, - bool offset = false) { - StridedSliceOptionsBuilder builder_(_fbb); - builder_.add_shrink_axis_mask(shrink_axis_mask); - builder_.add_new_axis_mask(new_axis_mask); - builder_.add_ellipsis_mask(ellipsis_mask); - builder_.add_end_mask(end_mask); - builder_.add_begin_mask(begin_mask); - builder_.add_offset(offset); +inline ::flatbuffers::Offset CreateReshapeOptions( + ::flatbuffers::FlatBufferBuilder &_fbb, + ::flatbuffers::Offset<::flatbuffers::Vector> new_shape = 0) { + ReshapeOptionsBuilder builder_(_fbb); + builder_.add_new_shape(new_shape); return builder_.Finish(); } -flatbuffers::Offset CreateStridedSliceOptions(flatbuffers::FlatBufferBuilder &_fbb, const StridedSliceOptionsT *_o, const flatbuffers::rehasher_function_t *_rehasher = nullptr); +inline ::flatbuffers::Offset CreateReshapeOptionsDirect( + ::flatbuffers::FlatBufferBuilder &_fbb, + const std::vector *new_shape = nullptr) { + auto new_shape__ = new_shape ? _fbb.CreateVector(*new_shape) : 0; + return tflite::CreateReshapeOptions( + _fbb, + new_shape__); +} + +::flatbuffers::Offset CreateReshapeOptions(::flatbuffers::FlatBufferBuilder &_fbb, const ReshapeOptionsT *_o, const ::flatbuffers::rehasher_function_t *_rehasher = nullptr); -struct LogSoftmaxOptionsT : public flatbuffers::NativeTable { - typedef LogSoftmaxOptions TableType; +struct SpaceToBatchNDOptionsT : public ::flatbuffers::NativeTable { + typedef SpaceToBatchNDOptions TableType; }; -struct LogSoftmaxOptions FLATBUFFERS_FINAL_CLASS : private flatbuffers::Table { - typedef LogSoftmaxOptionsT NativeTableType; - typedef LogSoftmaxOptionsBuilder Builder; - bool Verify(flatbuffers::Verifier &verifier) const { +struct SpaceToBatchNDOptions FLATBUFFERS_FINAL_CLASS : private ::flatbuffers::Table { + typedef SpaceToBatchNDOptionsT NativeTableType; + typedef SpaceToBatchNDOptionsBuilder Builder; + bool Verify(::flatbuffers::Verifier &verifier) const { return VerifyTableStart(verifier) && verifier.EndTable(); } - LogSoftmaxOptionsT *UnPack(const flatbuffers::resolver_function_t *_resolver = nullptr) const; - void UnPackTo(LogSoftmaxOptionsT *_o, const flatbuffers::resolver_function_t *_resolver = nullptr) const; - static flatbuffers::Offset Pack(flatbuffers::FlatBufferBuilder &_fbb, const LogSoftmaxOptionsT* _o, const flatbuffers::rehasher_function_t *_rehasher = nullptr); + SpaceToBatchNDOptionsT *UnPack(const ::flatbuffers::resolver_function_t *_resolver = nullptr) const; + void UnPackTo(SpaceToBatchNDOptionsT *_o, const ::flatbuffers::resolver_function_t *_resolver = nullptr) const; + static ::flatbuffers::Offset Pack(::flatbuffers::FlatBufferBuilder &_fbb, const SpaceToBatchNDOptionsT* _o, const ::flatbuffers::rehasher_function_t *_rehasher = nullptr); }; -struct LogSoftmaxOptionsBuilder { - typedef LogSoftmaxOptions Table; - flatbuffers::FlatBufferBuilder &fbb_; - flatbuffers::uoffset_t start_; - explicit LogSoftmaxOptionsBuilder(flatbuffers::FlatBufferBuilder &_fbb) +struct SpaceToBatchNDOptionsBuilder { + typedef SpaceToBatchNDOptions Table; + ::flatbuffers::FlatBufferBuilder &fbb_; + ::flatbuffers::uoffset_t start_; + explicit SpaceToBatchNDOptionsBuilder(::flatbuffers::FlatBufferBuilder &_fbb) : fbb_(_fbb) { start_ = fbb_.StartTable(); } - flatbuffers::Offset Finish() { + ::flatbuffers::Offset Finish() { const auto end = fbb_.EndTable(start_); - auto o = flatbuffers::Offset(end); + auto o = ::flatbuffers::Offset(end); return o; } }; -inline flatbuffers::Offset CreateLogSoftmaxOptions( - flatbuffers::FlatBufferBuilder &_fbb) { - LogSoftmaxOptionsBuilder builder_(_fbb); +inline ::flatbuffers::Offset CreateSpaceToBatchNDOptions( + ::flatbuffers::FlatBufferBuilder &_fbb) { + SpaceToBatchNDOptionsBuilder builder_(_fbb); return builder_.Finish(); } -flatbuffers::Offset CreateLogSoftmaxOptions(flatbuffers::FlatBufferBuilder &_fbb, const LogSoftmaxOptionsT *_o, const flatbuffers::rehasher_function_t *_rehasher = nullptr); +::flatbuffers::Offset CreateSpaceToBatchNDOptions(::flatbuffers::FlatBufferBuilder &_fbb, const SpaceToBatchNDOptionsT *_o, const ::flatbuffers::rehasher_function_t *_rehasher = nullptr); -struct CastOptionsT : public flatbuffers::NativeTable { - typedef CastOptions TableType; - tflite::TensorType in_data_type = tflite::TensorType_FLOAT32; - tflite::TensorType out_data_type = tflite::TensorType_FLOAT32; +struct BatchToSpaceNDOptionsT : public ::flatbuffers::NativeTable { + typedef BatchToSpaceNDOptions TableType; }; -struct CastOptions FLATBUFFERS_FINAL_CLASS : private flatbuffers::Table { - typedef CastOptionsT NativeTableType; - typedef CastOptionsBuilder Builder; - enum FlatBuffersVTableOffset FLATBUFFERS_VTABLE_UNDERLYING_TYPE { - VT_IN_DATA_TYPE = 4, - VT_OUT_DATA_TYPE = 6 - }; - tflite::TensorType in_data_type() const { - return static_cast(GetField(VT_IN_DATA_TYPE, 0)); - } - tflite::TensorType out_data_type() const { - return static_cast(GetField(VT_OUT_DATA_TYPE, 0)); - } - bool Verify(flatbuffers::Verifier &verifier) const { +struct BatchToSpaceNDOptions FLATBUFFERS_FINAL_CLASS : private ::flatbuffers::Table { + typedef BatchToSpaceNDOptionsT NativeTableType; + typedef BatchToSpaceNDOptionsBuilder Builder; + bool Verify(::flatbuffers::Verifier &verifier) const { return VerifyTableStart(verifier) && - VerifyField(verifier, VT_IN_DATA_TYPE, 1) && - VerifyField(verifier, VT_OUT_DATA_TYPE, 1) && verifier.EndTable(); } - CastOptionsT *UnPack(const flatbuffers::resolver_function_t *_resolver = nullptr) const; - void UnPackTo(CastOptionsT *_o, const flatbuffers::resolver_function_t *_resolver = nullptr) const; - static flatbuffers::Offset Pack(flatbuffers::FlatBufferBuilder &_fbb, const CastOptionsT* _o, const flatbuffers::rehasher_function_t *_rehasher = nullptr); + BatchToSpaceNDOptionsT *UnPack(const ::flatbuffers::resolver_function_t *_resolver = nullptr) const; + void UnPackTo(BatchToSpaceNDOptionsT *_o, const ::flatbuffers::resolver_function_t *_resolver = nullptr) const; + static ::flatbuffers::Offset Pack(::flatbuffers::FlatBufferBuilder &_fbb, const BatchToSpaceNDOptionsT* _o, const ::flatbuffers::rehasher_function_t *_rehasher = nullptr); }; -struct CastOptionsBuilder { - typedef CastOptions Table; - flatbuffers::FlatBufferBuilder &fbb_; - flatbuffers::uoffset_t start_; - void add_in_data_type(tflite::TensorType in_data_type) { - fbb_.AddElement(CastOptions::VT_IN_DATA_TYPE, static_cast(in_data_type), 0); - } - void add_out_data_type(tflite::TensorType out_data_type) { - fbb_.AddElement(CastOptions::VT_OUT_DATA_TYPE, static_cast(out_data_type), 0); - } - explicit CastOptionsBuilder(flatbuffers::FlatBufferBuilder &_fbb) +struct BatchToSpaceNDOptionsBuilder { + typedef BatchToSpaceNDOptions Table; + ::flatbuffers::FlatBufferBuilder &fbb_; + ::flatbuffers::uoffset_t start_; + explicit BatchToSpaceNDOptionsBuilder(::flatbuffers::FlatBufferBuilder &_fbb) : fbb_(_fbb) { start_ = fbb_.StartTable(); } - flatbuffers::Offset Finish() { + ::flatbuffers::Offset Finish() { const auto end = fbb_.EndTable(start_); - auto o = flatbuffers::Offset(end); + auto o = ::flatbuffers::Offset(end); return o; } }; -inline flatbuffers::Offset CreateCastOptions( - flatbuffers::FlatBufferBuilder &_fbb, - tflite::TensorType in_data_type = tflite::TensorType_FLOAT32, - tflite::TensorType out_data_type = tflite::TensorType_FLOAT32) { - CastOptionsBuilder builder_(_fbb); - builder_.add_out_data_type(out_data_type); - builder_.add_in_data_type(in_data_type); +inline ::flatbuffers::Offset CreateBatchToSpaceNDOptions( + ::flatbuffers::FlatBufferBuilder &_fbb) { + BatchToSpaceNDOptionsBuilder builder_(_fbb); return builder_.Finish(); } -flatbuffers::Offset CreateCastOptions(flatbuffers::FlatBufferBuilder &_fbb, const CastOptionsT *_o, const flatbuffers::rehasher_function_t *_rehasher = nullptr); +::flatbuffers::Offset CreateBatchToSpaceNDOptions(::flatbuffers::FlatBufferBuilder &_fbb, const BatchToSpaceNDOptionsT *_o, const ::flatbuffers::rehasher_function_t *_rehasher = nullptr); -struct DequantizeOptionsT : public flatbuffers::NativeTable { - typedef DequantizeOptions TableType; +struct SkipGramOptionsT : public ::flatbuffers::NativeTable { + typedef SkipGramOptions TableType; + int32_t ngram_size = 0; + int32_t max_skip_size = 0; + bool include_all_ngrams = false; }; -struct DequantizeOptions FLATBUFFERS_FINAL_CLASS : private flatbuffers::Table { - typedef DequantizeOptionsT NativeTableType; - typedef DequantizeOptionsBuilder Builder; - bool Verify(flatbuffers::Verifier &verifier) const { +struct SkipGramOptions FLATBUFFERS_FINAL_CLASS : private ::flatbuffers::Table { + typedef SkipGramOptionsT NativeTableType; + typedef SkipGramOptionsBuilder Builder; + enum FlatBuffersVTableOffset FLATBUFFERS_VTABLE_UNDERLYING_TYPE { + VT_NGRAM_SIZE = 4, + VT_MAX_SKIP_SIZE = 6, + VT_INCLUDE_ALL_NGRAMS = 8 + }; + int32_t ngram_size() const { + return GetField(VT_NGRAM_SIZE, 0); + } + int32_t max_skip_size() const { + return GetField(VT_MAX_SKIP_SIZE, 0); + } + bool include_all_ngrams() const { + return GetField(VT_INCLUDE_ALL_NGRAMS, 0) != 0; + } + bool Verify(::flatbuffers::Verifier &verifier) const { return VerifyTableStart(verifier) && + VerifyField(verifier, VT_NGRAM_SIZE, 4) && + VerifyField(verifier, VT_MAX_SKIP_SIZE, 4) && + VerifyField(verifier, VT_INCLUDE_ALL_NGRAMS, 1) && verifier.EndTable(); } - DequantizeOptionsT *UnPack(const flatbuffers::resolver_function_t *_resolver = nullptr) const; - void UnPackTo(DequantizeOptionsT *_o, const flatbuffers::resolver_function_t *_resolver = nullptr) const; - static flatbuffers::Offset Pack(flatbuffers::FlatBufferBuilder &_fbb, const DequantizeOptionsT* _o, const flatbuffers::rehasher_function_t *_rehasher = nullptr); + SkipGramOptionsT *UnPack(const ::flatbuffers::resolver_function_t *_resolver = nullptr) const; + void UnPackTo(SkipGramOptionsT *_o, const ::flatbuffers::resolver_function_t *_resolver = nullptr) const; + static ::flatbuffers::Offset Pack(::flatbuffers::FlatBufferBuilder &_fbb, const SkipGramOptionsT* _o, const ::flatbuffers::rehasher_function_t *_rehasher = nullptr); }; -struct DequantizeOptionsBuilder { - typedef DequantizeOptions Table; - flatbuffers::FlatBufferBuilder &fbb_; - flatbuffers::uoffset_t start_; - explicit DequantizeOptionsBuilder(flatbuffers::FlatBufferBuilder &_fbb) +struct SkipGramOptionsBuilder { + typedef SkipGramOptions Table; + ::flatbuffers::FlatBufferBuilder &fbb_; + ::flatbuffers::uoffset_t start_; + void add_ngram_size(int32_t ngram_size) { + fbb_.AddElement(SkipGramOptions::VT_NGRAM_SIZE, ngram_size, 0); + } + void add_max_skip_size(int32_t max_skip_size) { + fbb_.AddElement(SkipGramOptions::VT_MAX_SKIP_SIZE, max_skip_size, 0); + } + void add_include_all_ngrams(bool include_all_ngrams) { + fbb_.AddElement(SkipGramOptions::VT_INCLUDE_ALL_NGRAMS, static_cast(include_all_ngrams), 0); + } + explicit SkipGramOptionsBuilder(::flatbuffers::FlatBufferBuilder &_fbb) : fbb_(_fbb) { start_ = fbb_.StartTable(); } - flatbuffers::Offset Finish() { + ::flatbuffers::Offset Finish() { const auto end = fbb_.EndTable(start_); - auto o = flatbuffers::Offset(end); + auto o = ::flatbuffers::Offset(end); return o; } }; -inline flatbuffers::Offset CreateDequantizeOptions( - flatbuffers::FlatBufferBuilder &_fbb) { - DequantizeOptionsBuilder builder_(_fbb); +inline ::flatbuffers::Offset CreateSkipGramOptions( + ::flatbuffers::FlatBufferBuilder &_fbb, + int32_t ngram_size = 0, + int32_t max_skip_size = 0, + bool include_all_ngrams = false) { + SkipGramOptionsBuilder builder_(_fbb); + builder_.add_max_skip_size(max_skip_size); + builder_.add_ngram_size(ngram_size); + builder_.add_include_all_ngrams(include_all_ngrams); return builder_.Finish(); } -flatbuffers::Offset CreateDequantizeOptions(flatbuffers::FlatBufferBuilder &_fbb, const DequantizeOptionsT *_o, const flatbuffers::rehasher_function_t *_rehasher = nullptr); +::flatbuffers::Offset CreateSkipGramOptions(::flatbuffers::FlatBufferBuilder &_fbb, const SkipGramOptionsT *_o, const ::flatbuffers::rehasher_function_t *_rehasher = nullptr); -struct MaximumMinimumOptionsT : public flatbuffers::NativeTable { - typedef MaximumMinimumOptions TableType; +struct SpaceToDepthOptionsT : public ::flatbuffers::NativeTable { + typedef SpaceToDepthOptions TableType; + int32_t block_size = 0; }; -struct MaximumMinimumOptions FLATBUFFERS_FINAL_CLASS : private flatbuffers::Table { - typedef MaximumMinimumOptionsT NativeTableType; - typedef MaximumMinimumOptionsBuilder Builder; - bool Verify(flatbuffers::Verifier &verifier) const { +struct SpaceToDepthOptions FLATBUFFERS_FINAL_CLASS : private ::flatbuffers::Table { + typedef SpaceToDepthOptionsT NativeTableType; + typedef SpaceToDepthOptionsBuilder Builder; + enum FlatBuffersVTableOffset FLATBUFFERS_VTABLE_UNDERLYING_TYPE { + VT_BLOCK_SIZE = 4 + }; + int32_t block_size() const { + return GetField(VT_BLOCK_SIZE, 0); + } + bool Verify(::flatbuffers::Verifier &verifier) const { return VerifyTableStart(verifier) && + VerifyField(verifier, VT_BLOCK_SIZE, 4) && verifier.EndTable(); } - MaximumMinimumOptionsT *UnPack(const flatbuffers::resolver_function_t *_resolver = nullptr) const; - void UnPackTo(MaximumMinimumOptionsT *_o, const flatbuffers::resolver_function_t *_resolver = nullptr) const; - static flatbuffers::Offset Pack(flatbuffers::FlatBufferBuilder &_fbb, const MaximumMinimumOptionsT* _o, const flatbuffers::rehasher_function_t *_rehasher = nullptr); + SpaceToDepthOptionsT *UnPack(const ::flatbuffers::resolver_function_t *_resolver = nullptr) const; + void UnPackTo(SpaceToDepthOptionsT *_o, const ::flatbuffers::resolver_function_t *_resolver = nullptr) const; + static ::flatbuffers::Offset Pack(::flatbuffers::FlatBufferBuilder &_fbb, const SpaceToDepthOptionsT* _o, const ::flatbuffers::rehasher_function_t *_rehasher = nullptr); }; -struct MaximumMinimumOptionsBuilder { - typedef MaximumMinimumOptions Table; - flatbuffers::FlatBufferBuilder &fbb_; - flatbuffers::uoffset_t start_; - explicit MaximumMinimumOptionsBuilder(flatbuffers::FlatBufferBuilder &_fbb) +struct SpaceToDepthOptionsBuilder { + typedef SpaceToDepthOptions Table; + ::flatbuffers::FlatBufferBuilder &fbb_; + ::flatbuffers::uoffset_t start_; + void add_block_size(int32_t block_size) { + fbb_.AddElement(SpaceToDepthOptions::VT_BLOCK_SIZE, block_size, 0); + } + explicit SpaceToDepthOptionsBuilder(::flatbuffers::FlatBufferBuilder &_fbb) : fbb_(_fbb) { start_ = fbb_.StartTable(); } - flatbuffers::Offset Finish() { + ::flatbuffers::Offset Finish() { const auto end = fbb_.EndTable(start_); - auto o = flatbuffers::Offset(end); + auto o = ::flatbuffers::Offset(end); return o; } }; -inline flatbuffers::Offset CreateMaximumMinimumOptions( - flatbuffers::FlatBufferBuilder &_fbb) { - MaximumMinimumOptionsBuilder builder_(_fbb); +inline ::flatbuffers::Offset CreateSpaceToDepthOptions( + ::flatbuffers::FlatBufferBuilder &_fbb, + int32_t block_size = 0) { + SpaceToDepthOptionsBuilder builder_(_fbb); + builder_.add_block_size(block_size); return builder_.Finish(); } -flatbuffers::Offset CreateMaximumMinimumOptions(flatbuffers::FlatBufferBuilder &_fbb, const MaximumMinimumOptionsT *_o, const flatbuffers::rehasher_function_t *_rehasher = nullptr); +::flatbuffers::Offset CreateSpaceToDepthOptions(::flatbuffers::FlatBufferBuilder &_fbb, const SpaceToDepthOptionsT *_o, const ::flatbuffers::rehasher_function_t *_rehasher = nullptr); -struct TileOptionsT : public flatbuffers::NativeTable { - typedef TileOptions TableType; +struct DepthToSpaceOptionsT : public ::flatbuffers::NativeTable { + typedef DepthToSpaceOptions TableType; + int32_t block_size = 0; }; -struct TileOptions FLATBUFFERS_FINAL_CLASS : private flatbuffers::Table { - typedef TileOptionsT NativeTableType; - typedef TileOptionsBuilder Builder; - bool Verify(flatbuffers::Verifier &verifier) const { +struct DepthToSpaceOptions FLATBUFFERS_FINAL_CLASS : private ::flatbuffers::Table { + typedef DepthToSpaceOptionsT NativeTableType; + typedef DepthToSpaceOptionsBuilder Builder; + enum FlatBuffersVTableOffset FLATBUFFERS_VTABLE_UNDERLYING_TYPE { + VT_BLOCK_SIZE = 4 + }; + int32_t block_size() const { + return GetField(VT_BLOCK_SIZE, 0); + } + bool Verify(::flatbuffers::Verifier &verifier) const { return VerifyTableStart(verifier) && + VerifyField(verifier, VT_BLOCK_SIZE, 4) && verifier.EndTable(); } - TileOptionsT *UnPack(const flatbuffers::resolver_function_t *_resolver = nullptr) const; - void UnPackTo(TileOptionsT *_o, const flatbuffers::resolver_function_t *_resolver = nullptr) const; - static flatbuffers::Offset Pack(flatbuffers::FlatBufferBuilder &_fbb, const TileOptionsT* _o, const flatbuffers::rehasher_function_t *_rehasher = nullptr); + DepthToSpaceOptionsT *UnPack(const ::flatbuffers::resolver_function_t *_resolver = nullptr) const; + void UnPackTo(DepthToSpaceOptionsT *_o, const ::flatbuffers::resolver_function_t *_resolver = nullptr) const; + static ::flatbuffers::Offset Pack(::flatbuffers::FlatBufferBuilder &_fbb, const DepthToSpaceOptionsT* _o, const ::flatbuffers::rehasher_function_t *_rehasher = nullptr); }; -struct TileOptionsBuilder { - typedef TileOptions Table; - flatbuffers::FlatBufferBuilder &fbb_; - flatbuffers::uoffset_t start_; - explicit TileOptionsBuilder(flatbuffers::FlatBufferBuilder &_fbb) +struct DepthToSpaceOptionsBuilder { + typedef DepthToSpaceOptions Table; + ::flatbuffers::FlatBufferBuilder &fbb_; + ::flatbuffers::uoffset_t start_; + void add_block_size(int32_t block_size) { + fbb_.AddElement(DepthToSpaceOptions::VT_BLOCK_SIZE, block_size, 0); + } + explicit DepthToSpaceOptionsBuilder(::flatbuffers::FlatBufferBuilder &_fbb) : fbb_(_fbb) { start_ = fbb_.StartTable(); } - flatbuffers::Offset Finish() { + ::flatbuffers::Offset Finish() { const auto end = fbb_.EndTable(start_); - auto o = flatbuffers::Offset(end); + auto o = ::flatbuffers::Offset(end); return o; } }; -inline flatbuffers::Offset CreateTileOptions( - flatbuffers::FlatBufferBuilder &_fbb) { - TileOptionsBuilder builder_(_fbb); +inline ::flatbuffers::Offset CreateDepthToSpaceOptions( + ::flatbuffers::FlatBufferBuilder &_fbb, + int32_t block_size = 0) { + DepthToSpaceOptionsBuilder builder_(_fbb); + builder_.add_block_size(block_size); return builder_.Finish(); } -flatbuffers::Offset CreateTileOptions(flatbuffers::FlatBufferBuilder &_fbb, const TileOptionsT *_o, const flatbuffers::rehasher_function_t *_rehasher = nullptr); +::flatbuffers::Offset CreateDepthToSpaceOptions(::flatbuffers::FlatBufferBuilder &_fbb, const DepthToSpaceOptionsT *_o, const ::flatbuffers::rehasher_function_t *_rehasher = nullptr); -struct ArgMaxOptionsT : public flatbuffers::NativeTable { - typedef ArgMaxOptions TableType; - tflite::TensorType output_type = tflite::TensorType_FLOAT32; +struct SubOptionsT : public ::flatbuffers::NativeTable { + typedef SubOptions TableType; + tflite::ActivationFunctionType fused_activation_function = tflite::ActivationFunctionType_NONE; + bool pot_scale_int16 = true; }; -struct ArgMaxOptions FLATBUFFERS_FINAL_CLASS : private flatbuffers::Table { - typedef ArgMaxOptionsT NativeTableType; - typedef ArgMaxOptionsBuilder Builder; +struct SubOptions FLATBUFFERS_FINAL_CLASS : private ::flatbuffers::Table { + typedef SubOptionsT NativeTableType; + typedef SubOptionsBuilder Builder; enum FlatBuffersVTableOffset FLATBUFFERS_VTABLE_UNDERLYING_TYPE { - VT_OUTPUT_TYPE = 4 + VT_FUSED_ACTIVATION_FUNCTION = 4, + VT_POT_SCALE_INT16 = 6 }; - tflite::TensorType output_type() const { - return static_cast(GetField(VT_OUTPUT_TYPE, 0)); + tflite::ActivationFunctionType fused_activation_function() const { + return static_cast(GetField(VT_FUSED_ACTIVATION_FUNCTION, 0)); + } + bool pot_scale_int16() const { + return GetField(VT_POT_SCALE_INT16, 1) != 0; } - bool Verify(flatbuffers::Verifier &verifier) const { + bool Verify(::flatbuffers::Verifier &verifier) const { return VerifyTableStart(verifier) && - VerifyField(verifier, VT_OUTPUT_TYPE, 1) && + VerifyField(verifier, VT_FUSED_ACTIVATION_FUNCTION, 1) && + VerifyField(verifier, VT_POT_SCALE_INT16, 1) && verifier.EndTable(); } - ArgMaxOptionsT *UnPack(const flatbuffers::resolver_function_t *_resolver = nullptr) const; - void UnPackTo(ArgMaxOptionsT *_o, const flatbuffers::resolver_function_t *_resolver = nullptr) const; - static flatbuffers::Offset Pack(flatbuffers::FlatBufferBuilder &_fbb, const ArgMaxOptionsT* _o, const flatbuffers::rehasher_function_t *_rehasher = nullptr); + SubOptionsT *UnPack(const ::flatbuffers::resolver_function_t *_resolver = nullptr) const; + void UnPackTo(SubOptionsT *_o, const ::flatbuffers::resolver_function_t *_resolver = nullptr) const; + static ::flatbuffers::Offset Pack(::flatbuffers::FlatBufferBuilder &_fbb, const SubOptionsT* _o, const ::flatbuffers::rehasher_function_t *_rehasher = nullptr); }; -struct ArgMaxOptionsBuilder { - typedef ArgMaxOptions Table; - flatbuffers::FlatBufferBuilder &fbb_; - flatbuffers::uoffset_t start_; - void add_output_type(tflite::TensorType output_type) { - fbb_.AddElement(ArgMaxOptions::VT_OUTPUT_TYPE, static_cast(output_type), 0); +struct SubOptionsBuilder { + typedef SubOptions Table; + ::flatbuffers::FlatBufferBuilder &fbb_; + ::flatbuffers::uoffset_t start_; + void add_fused_activation_function(tflite::ActivationFunctionType fused_activation_function) { + fbb_.AddElement(SubOptions::VT_FUSED_ACTIVATION_FUNCTION, static_cast(fused_activation_function), 0); + } + void add_pot_scale_int16(bool pot_scale_int16) { + fbb_.AddElement(SubOptions::VT_POT_SCALE_INT16, static_cast(pot_scale_int16), 1); } - explicit ArgMaxOptionsBuilder(flatbuffers::FlatBufferBuilder &_fbb) + explicit SubOptionsBuilder(::flatbuffers::FlatBufferBuilder &_fbb) : fbb_(_fbb) { start_ = fbb_.StartTable(); } - flatbuffers::Offset Finish() { + ::flatbuffers::Offset Finish() { const auto end = fbb_.EndTable(start_); - auto o = flatbuffers::Offset(end); + auto o = ::flatbuffers::Offset(end); return o; } }; -inline flatbuffers::Offset CreateArgMaxOptions( - flatbuffers::FlatBufferBuilder &_fbb, - tflite::TensorType output_type = tflite::TensorType_FLOAT32) { - ArgMaxOptionsBuilder builder_(_fbb); - builder_.add_output_type(output_type); +inline ::flatbuffers::Offset CreateSubOptions( + ::flatbuffers::FlatBufferBuilder &_fbb, + tflite::ActivationFunctionType fused_activation_function = tflite::ActivationFunctionType_NONE, + bool pot_scale_int16 = true) { + SubOptionsBuilder builder_(_fbb); + builder_.add_pot_scale_int16(pot_scale_int16); + builder_.add_fused_activation_function(fused_activation_function); return builder_.Finish(); } -flatbuffers::Offset CreateArgMaxOptions(flatbuffers::FlatBufferBuilder &_fbb, const ArgMaxOptionsT *_o, const flatbuffers::rehasher_function_t *_rehasher = nullptr); +::flatbuffers::Offset CreateSubOptions(::flatbuffers::FlatBufferBuilder &_fbb, const SubOptionsT *_o, const ::flatbuffers::rehasher_function_t *_rehasher = nullptr); -struct ArgMinOptionsT : public flatbuffers::NativeTable { - typedef ArgMinOptions TableType; - tflite::TensorType output_type = tflite::TensorType_FLOAT32; +struct DivOptionsT : public ::flatbuffers::NativeTable { + typedef DivOptions TableType; + tflite::ActivationFunctionType fused_activation_function = tflite::ActivationFunctionType_NONE; }; -struct ArgMinOptions FLATBUFFERS_FINAL_CLASS : private flatbuffers::Table { - typedef ArgMinOptionsT NativeTableType; - typedef ArgMinOptionsBuilder Builder; +struct DivOptions FLATBUFFERS_FINAL_CLASS : private ::flatbuffers::Table { + typedef DivOptionsT NativeTableType; + typedef DivOptionsBuilder Builder; enum FlatBuffersVTableOffset FLATBUFFERS_VTABLE_UNDERLYING_TYPE { - VT_OUTPUT_TYPE = 4 + VT_FUSED_ACTIVATION_FUNCTION = 4 }; - tflite::TensorType output_type() const { - return static_cast(GetField(VT_OUTPUT_TYPE, 0)); + tflite::ActivationFunctionType fused_activation_function() const { + return static_cast(GetField(VT_FUSED_ACTIVATION_FUNCTION, 0)); } - bool Verify(flatbuffers::Verifier &verifier) const { + bool Verify(::flatbuffers::Verifier &verifier) const { return VerifyTableStart(verifier) && - VerifyField(verifier, VT_OUTPUT_TYPE, 1) && + VerifyField(verifier, VT_FUSED_ACTIVATION_FUNCTION, 1) && verifier.EndTable(); } - ArgMinOptionsT *UnPack(const flatbuffers::resolver_function_t *_resolver = nullptr) const; - void UnPackTo(ArgMinOptionsT *_o, const flatbuffers::resolver_function_t *_resolver = nullptr) const; - static flatbuffers::Offset Pack(flatbuffers::FlatBufferBuilder &_fbb, const ArgMinOptionsT* _o, const flatbuffers::rehasher_function_t *_rehasher = nullptr); + DivOptionsT *UnPack(const ::flatbuffers::resolver_function_t *_resolver = nullptr) const; + void UnPackTo(DivOptionsT *_o, const ::flatbuffers::resolver_function_t *_resolver = nullptr) const; + static ::flatbuffers::Offset Pack(::flatbuffers::FlatBufferBuilder &_fbb, const DivOptionsT* _o, const ::flatbuffers::rehasher_function_t *_rehasher = nullptr); }; -struct ArgMinOptionsBuilder { - typedef ArgMinOptions Table; - flatbuffers::FlatBufferBuilder &fbb_; - flatbuffers::uoffset_t start_; - void add_output_type(tflite::TensorType output_type) { - fbb_.AddElement(ArgMinOptions::VT_OUTPUT_TYPE, static_cast(output_type), 0); +struct DivOptionsBuilder { + typedef DivOptions Table; + ::flatbuffers::FlatBufferBuilder &fbb_; + ::flatbuffers::uoffset_t start_; + void add_fused_activation_function(tflite::ActivationFunctionType fused_activation_function) { + fbb_.AddElement(DivOptions::VT_FUSED_ACTIVATION_FUNCTION, static_cast(fused_activation_function), 0); } - explicit ArgMinOptionsBuilder(flatbuffers::FlatBufferBuilder &_fbb) + explicit DivOptionsBuilder(::flatbuffers::FlatBufferBuilder &_fbb) : fbb_(_fbb) { start_ = fbb_.StartTable(); } - flatbuffers::Offset Finish() { + ::flatbuffers::Offset Finish() { const auto end = fbb_.EndTable(start_); - auto o = flatbuffers::Offset(end); + auto o = ::flatbuffers::Offset(end); return o; } }; -inline flatbuffers::Offset CreateArgMinOptions( - flatbuffers::FlatBufferBuilder &_fbb, - tflite::TensorType output_type = tflite::TensorType_FLOAT32) { - ArgMinOptionsBuilder builder_(_fbb); - builder_.add_output_type(output_type); +inline ::flatbuffers::Offset CreateDivOptions( + ::flatbuffers::FlatBufferBuilder &_fbb, + tflite::ActivationFunctionType fused_activation_function = tflite::ActivationFunctionType_NONE) { + DivOptionsBuilder builder_(_fbb); + builder_.add_fused_activation_function(fused_activation_function); return builder_.Finish(); } -flatbuffers::Offset CreateArgMinOptions(flatbuffers::FlatBufferBuilder &_fbb, const ArgMinOptionsT *_o, const flatbuffers::rehasher_function_t *_rehasher = nullptr); +::flatbuffers::Offset CreateDivOptions(::flatbuffers::FlatBufferBuilder &_fbb, const DivOptionsT *_o, const ::flatbuffers::rehasher_function_t *_rehasher = nullptr); -struct GreaterOptionsT : public flatbuffers::NativeTable { - typedef GreaterOptions TableType; +struct TopKV2OptionsT : public ::flatbuffers::NativeTable { + typedef TopKV2Options TableType; }; -struct GreaterOptions FLATBUFFERS_FINAL_CLASS : private flatbuffers::Table { - typedef GreaterOptionsT NativeTableType; - typedef GreaterOptionsBuilder Builder; - bool Verify(flatbuffers::Verifier &verifier) const { +struct TopKV2Options FLATBUFFERS_FINAL_CLASS : private ::flatbuffers::Table { + typedef TopKV2OptionsT NativeTableType; + typedef TopKV2OptionsBuilder Builder; + bool Verify(::flatbuffers::Verifier &verifier) const { return VerifyTableStart(verifier) && verifier.EndTable(); } - GreaterOptionsT *UnPack(const flatbuffers::resolver_function_t *_resolver = nullptr) const; - void UnPackTo(GreaterOptionsT *_o, const flatbuffers::resolver_function_t *_resolver = nullptr) const; - static flatbuffers::Offset Pack(flatbuffers::FlatBufferBuilder &_fbb, const GreaterOptionsT* _o, const flatbuffers::rehasher_function_t *_rehasher = nullptr); + TopKV2OptionsT *UnPack(const ::flatbuffers::resolver_function_t *_resolver = nullptr) const; + void UnPackTo(TopKV2OptionsT *_o, const ::flatbuffers::resolver_function_t *_resolver = nullptr) const; + static ::flatbuffers::Offset Pack(::flatbuffers::FlatBufferBuilder &_fbb, const TopKV2OptionsT* _o, const ::flatbuffers::rehasher_function_t *_rehasher = nullptr); }; -struct GreaterOptionsBuilder { - typedef GreaterOptions Table; - flatbuffers::FlatBufferBuilder &fbb_; - flatbuffers::uoffset_t start_; - explicit GreaterOptionsBuilder(flatbuffers::FlatBufferBuilder &_fbb) +struct TopKV2OptionsBuilder { + typedef TopKV2Options Table; + ::flatbuffers::FlatBufferBuilder &fbb_; + ::flatbuffers::uoffset_t start_; + explicit TopKV2OptionsBuilder(::flatbuffers::FlatBufferBuilder &_fbb) : fbb_(_fbb) { start_ = fbb_.StartTable(); } - flatbuffers::Offset Finish() { + ::flatbuffers::Offset Finish() { const auto end = fbb_.EndTable(start_); - auto o = flatbuffers::Offset(end); + auto o = ::flatbuffers::Offset(end); return o; } }; -inline flatbuffers::Offset CreateGreaterOptions( - flatbuffers::FlatBufferBuilder &_fbb) { - GreaterOptionsBuilder builder_(_fbb); +inline ::flatbuffers::Offset CreateTopKV2Options( + ::flatbuffers::FlatBufferBuilder &_fbb) { + TopKV2OptionsBuilder builder_(_fbb); return builder_.Finish(); } -flatbuffers::Offset CreateGreaterOptions(flatbuffers::FlatBufferBuilder &_fbb, const GreaterOptionsT *_o, const flatbuffers::rehasher_function_t *_rehasher = nullptr); +::flatbuffers::Offset CreateTopKV2Options(::flatbuffers::FlatBufferBuilder &_fbb, const TopKV2OptionsT *_o, const ::flatbuffers::rehasher_function_t *_rehasher = nullptr); -struct GreaterEqualOptionsT : public flatbuffers::NativeTable { - typedef GreaterEqualOptions TableType; +struct EmbeddingLookupSparseOptionsT : public ::flatbuffers::NativeTable { + typedef EmbeddingLookupSparseOptions TableType; + tflite::CombinerType combiner = tflite::CombinerType_SUM; }; -struct GreaterEqualOptions FLATBUFFERS_FINAL_CLASS : private flatbuffers::Table { - typedef GreaterEqualOptionsT NativeTableType; - typedef GreaterEqualOptionsBuilder Builder; - bool Verify(flatbuffers::Verifier &verifier) const { +struct EmbeddingLookupSparseOptions FLATBUFFERS_FINAL_CLASS : private ::flatbuffers::Table { + typedef EmbeddingLookupSparseOptionsT NativeTableType; + typedef EmbeddingLookupSparseOptionsBuilder Builder; + enum FlatBuffersVTableOffset FLATBUFFERS_VTABLE_UNDERLYING_TYPE { + VT_COMBINER = 4 + }; + tflite::CombinerType combiner() const { + return static_cast(GetField(VT_COMBINER, 0)); + } + bool Verify(::flatbuffers::Verifier &verifier) const { return VerifyTableStart(verifier) && + VerifyField(verifier, VT_COMBINER, 1) && verifier.EndTable(); } - GreaterEqualOptionsT *UnPack(const flatbuffers::resolver_function_t *_resolver = nullptr) const; - void UnPackTo(GreaterEqualOptionsT *_o, const flatbuffers::resolver_function_t *_resolver = nullptr) const; - static flatbuffers::Offset Pack(flatbuffers::FlatBufferBuilder &_fbb, const GreaterEqualOptionsT* _o, const flatbuffers::rehasher_function_t *_rehasher = nullptr); + EmbeddingLookupSparseOptionsT *UnPack(const ::flatbuffers::resolver_function_t *_resolver = nullptr) const; + void UnPackTo(EmbeddingLookupSparseOptionsT *_o, const ::flatbuffers::resolver_function_t *_resolver = nullptr) const; + static ::flatbuffers::Offset Pack(::flatbuffers::FlatBufferBuilder &_fbb, const EmbeddingLookupSparseOptionsT* _o, const ::flatbuffers::rehasher_function_t *_rehasher = nullptr); }; -struct GreaterEqualOptionsBuilder { - typedef GreaterEqualOptions Table; - flatbuffers::FlatBufferBuilder &fbb_; - flatbuffers::uoffset_t start_; - explicit GreaterEqualOptionsBuilder(flatbuffers::FlatBufferBuilder &_fbb) +struct EmbeddingLookupSparseOptionsBuilder { + typedef EmbeddingLookupSparseOptions Table; + ::flatbuffers::FlatBufferBuilder &fbb_; + ::flatbuffers::uoffset_t start_; + void add_combiner(tflite::CombinerType combiner) { + fbb_.AddElement(EmbeddingLookupSparseOptions::VT_COMBINER, static_cast(combiner), 0); + } + explicit EmbeddingLookupSparseOptionsBuilder(::flatbuffers::FlatBufferBuilder &_fbb) : fbb_(_fbb) { start_ = fbb_.StartTable(); } - flatbuffers::Offset Finish() { + ::flatbuffers::Offset Finish() { const auto end = fbb_.EndTable(start_); - auto o = flatbuffers::Offset(end); + auto o = ::flatbuffers::Offset(end); return o; } }; -inline flatbuffers::Offset CreateGreaterEqualOptions( - flatbuffers::FlatBufferBuilder &_fbb) { - GreaterEqualOptionsBuilder builder_(_fbb); +inline ::flatbuffers::Offset CreateEmbeddingLookupSparseOptions( + ::flatbuffers::FlatBufferBuilder &_fbb, + tflite::CombinerType combiner = tflite::CombinerType_SUM) { + EmbeddingLookupSparseOptionsBuilder builder_(_fbb); + builder_.add_combiner(combiner); return builder_.Finish(); } -flatbuffers::Offset CreateGreaterEqualOptions(flatbuffers::FlatBufferBuilder &_fbb, const GreaterEqualOptionsT *_o, const flatbuffers::rehasher_function_t *_rehasher = nullptr); +::flatbuffers::Offset CreateEmbeddingLookupSparseOptions(::flatbuffers::FlatBufferBuilder &_fbb, const EmbeddingLookupSparseOptionsT *_o, const ::flatbuffers::rehasher_function_t *_rehasher = nullptr); -struct LessOptionsT : public flatbuffers::NativeTable { - typedef LessOptions TableType; +struct GatherOptionsT : public ::flatbuffers::NativeTable { + typedef GatherOptions TableType; + int32_t axis = 0; + int32_t batch_dims = 0; }; -struct LessOptions FLATBUFFERS_FINAL_CLASS : private flatbuffers::Table { - typedef LessOptionsT NativeTableType; - typedef LessOptionsBuilder Builder; - bool Verify(flatbuffers::Verifier &verifier) const { +struct GatherOptions FLATBUFFERS_FINAL_CLASS : private ::flatbuffers::Table { + typedef GatherOptionsT NativeTableType; + typedef GatherOptionsBuilder Builder; + enum FlatBuffersVTableOffset FLATBUFFERS_VTABLE_UNDERLYING_TYPE { + VT_AXIS = 4, + VT_BATCH_DIMS = 6 + }; + int32_t axis() const { + return GetField(VT_AXIS, 0); + } + int32_t batch_dims() const { + return GetField(VT_BATCH_DIMS, 0); + } + bool Verify(::flatbuffers::Verifier &verifier) const { return VerifyTableStart(verifier) && + VerifyField(verifier, VT_AXIS, 4) && + VerifyField(verifier, VT_BATCH_DIMS, 4) && verifier.EndTable(); } - LessOptionsT *UnPack(const flatbuffers::resolver_function_t *_resolver = nullptr) const; - void UnPackTo(LessOptionsT *_o, const flatbuffers::resolver_function_t *_resolver = nullptr) const; - static flatbuffers::Offset Pack(flatbuffers::FlatBufferBuilder &_fbb, const LessOptionsT* _o, const flatbuffers::rehasher_function_t *_rehasher = nullptr); + GatherOptionsT *UnPack(const ::flatbuffers::resolver_function_t *_resolver = nullptr) const; + void UnPackTo(GatherOptionsT *_o, const ::flatbuffers::resolver_function_t *_resolver = nullptr) const; + static ::flatbuffers::Offset Pack(::flatbuffers::FlatBufferBuilder &_fbb, const GatherOptionsT* _o, const ::flatbuffers::rehasher_function_t *_rehasher = nullptr); }; -struct LessOptionsBuilder { - typedef LessOptions Table; - flatbuffers::FlatBufferBuilder &fbb_; - flatbuffers::uoffset_t start_; - explicit LessOptionsBuilder(flatbuffers::FlatBufferBuilder &_fbb) +struct GatherOptionsBuilder { + typedef GatherOptions Table; + ::flatbuffers::FlatBufferBuilder &fbb_; + ::flatbuffers::uoffset_t start_; + void add_axis(int32_t axis) { + fbb_.AddElement(GatherOptions::VT_AXIS, axis, 0); + } + void add_batch_dims(int32_t batch_dims) { + fbb_.AddElement(GatherOptions::VT_BATCH_DIMS, batch_dims, 0); + } + explicit GatherOptionsBuilder(::flatbuffers::FlatBufferBuilder &_fbb) : fbb_(_fbb) { start_ = fbb_.StartTable(); } - flatbuffers::Offset Finish() { + ::flatbuffers::Offset Finish() { const auto end = fbb_.EndTable(start_); - auto o = flatbuffers::Offset(end); + auto o = ::flatbuffers::Offset(end); return o; } }; -inline flatbuffers::Offset CreateLessOptions( - flatbuffers::FlatBufferBuilder &_fbb) { - LessOptionsBuilder builder_(_fbb); +inline ::flatbuffers::Offset CreateGatherOptions( + ::flatbuffers::FlatBufferBuilder &_fbb, + int32_t axis = 0, + int32_t batch_dims = 0) { + GatherOptionsBuilder builder_(_fbb); + builder_.add_batch_dims(batch_dims); + builder_.add_axis(axis); return builder_.Finish(); } -flatbuffers::Offset CreateLessOptions(flatbuffers::FlatBufferBuilder &_fbb, const LessOptionsT *_o, const flatbuffers::rehasher_function_t *_rehasher = nullptr); +::flatbuffers::Offset CreateGatherOptions(::flatbuffers::FlatBufferBuilder &_fbb, const GatherOptionsT *_o, const ::flatbuffers::rehasher_function_t *_rehasher = nullptr); -struct LessEqualOptionsT : public flatbuffers::NativeTable { - typedef LessEqualOptions TableType; +struct TransposeOptionsT : public ::flatbuffers::NativeTable { + typedef TransposeOptions TableType; }; -struct LessEqualOptions FLATBUFFERS_FINAL_CLASS : private flatbuffers::Table { - typedef LessEqualOptionsT NativeTableType; - typedef LessEqualOptionsBuilder Builder; - bool Verify(flatbuffers::Verifier &verifier) const { +struct TransposeOptions FLATBUFFERS_FINAL_CLASS : private ::flatbuffers::Table { + typedef TransposeOptionsT NativeTableType; + typedef TransposeOptionsBuilder Builder; + bool Verify(::flatbuffers::Verifier &verifier) const { return VerifyTableStart(verifier) && verifier.EndTable(); } - LessEqualOptionsT *UnPack(const flatbuffers::resolver_function_t *_resolver = nullptr) const; - void UnPackTo(LessEqualOptionsT *_o, const flatbuffers::resolver_function_t *_resolver = nullptr) const; - static flatbuffers::Offset Pack(flatbuffers::FlatBufferBuilder &_fbb, const LessEqualOptionsT* _o, const flatbuffers::rehasher_function_t *_rehasher = nullptr); + TransposeOptionsT *UnPack(const ::flatbuffers::resolver_function_t *_resolver = nullptr) const; + void UnPackTo(TransposeOptionsT *_o, const ::flatbuffers::resolver_function_t *_resolver = nullptr) const; + static ::flatbuffers::Offset Pack(::flatbuffers::FlatBufferBuilder &_fbb, const TransposeOptionsT* _o, const ::flatbuffers::rehasher_function_t *_rehasher = nullptr); }; -struct LessEqualOptionsBuilder { - typedef LessEqualOptions Table; - flatbuffers::FlatBufferBuilder &fbb_; - flatbuffers::uoffset_t start_; - explicit LessEqualOptionsBuilder(flatbuffers::FlatBufferBuilder &_fbb) +struct TransposeOptionsBuilder { + typedef TransposeOptions Table; + ::flatbuffers::FlatBufferBuilder &fbb_; + ::flatbuffers::uoffset_t start_; + explicit TransposeOptionsBuilder(::flatbuffers::FlatBufferBuilder &_fbb) : fbb_(_fbb) { start_ = fbb_.StartTable(); } - flatbuffers::Offset Finish() { + ::flatbuffers::Offset Finish() { const auto end = fbb_.EndTable(start_); - auto o = flatbuffers::Offset(end); + auto o = ::flatbuffers::Offset(end); return o; } }; -inline flatbuffers::Offset CreateLessEqualOptions( - flatbuffers::FlatBufferBuilder &_fbb) { - LessEqualOptionsBuilder builder_(_fbb); +inline ::flatbuffers::Offset CreateTransposeOptions( + ::flatbuffers::FlatBufferBuilder &_fbb) { + TransposeOptionsBuilder builder_(_fbb); return builder_.Finish(); } -flatbuffers::Offset CreateLessEqualOptions(flatbuffers::FlatBufferBuilder &_fbb, const LessEqualOptionsT *_o, const flatbuffers::rehasher_function_t *_rehasher = nullptr); +::flatbuffers::Offset CreateTransposeOptions(::flatbuffers::FlatBufferBuilder &_fbb, const TransposeOptionsT *_o, const ::flatbuffers::rehasher_function_t *_rehasher = nullptr); -struct NegOptionsT : public flatbuffers::NativeTable { - typedef NegOptions TableType; +struct ExpOptionsT : public ::flatbuffers::NativeTable { + typedef ExpOptions TableType; }; -struct NegOptions FLATBUFFERS_FINAL_CLASS : private flatbuffers::Table { - typedef NegOptionsT NativeTableType; - typedef NegOptionsBuilder Builder; - bool Verify(flatbuffers::Verifier &verifier) const { +struct ExpOptions FLATBUFFERS_FINAL_CLASS : private ::flatbuffers::Table { + typedef ExpOptionsT NativeTableType; + typedef ExpOptionsBuilder Builder; + bool Verify(::flatbuffers::Verifier &verifier) const { return VerifyTableStart(verifier) && verifier.EndTable(); } - NegOptionsT *UnPack(const flatbuffers::resolver_function_t *_resolver = nullptr) const; - void UnPackTo(NegOptionsT *_o, const flatbuffers::resolver_function_t *_resolver = nullptr) const; - static flatbuffers::Offset Pack(flatbuffers::FlatBufferBuilder &_fbb, const NegOptionsT* _o, const flatbuffers::rehasher_function_t *_rehasher = nullptr); + ExpOptionsT *UnPack(const ::flatbuffers::resolver_function_t *_resolver = nullptr) const; + void UnPackTo(ExpOptionsT *_o, const ::flatbuffers::resolver_function_t *_resolver = nullptr) const; + static ::flatbuffers::Offset Pack(::flatbuffers::FlatBufferBuilder &_fbb, const ExpOptionsT* _o, const ::flatbuffers::rehasher_function_t *_rehasher = nullptr); }; -struct NegOptionsBuilder { - typedef NegOptions Table; - flatbuffers::FlatBufferBuilder &fbb_; - flatbuffers::uoffset_t start_; - explicit NegOptionsBuilder(flatbuffers::FlatBufferBuilder &_fbb) +struct ExpOptionsBuilder { + typedef ExpOptions Table; + ::flatbuffers::FlatBufferBuilder &fbb_; + ::flatbuffers::uoffset_t start_; + explicit ExpOptionsBuilder(::flatbuffers::FlatBufferBuilder &_fbb) : fbb_(_fbb) { start_ = fbb_.StartTable(); } - flatbuffers::Offset Finish() { + ::flatbuffers::Offset Finish() { const auto end = fbb_.EndTable(start_); - auto o = flatbuffers::Offset(end); + auto o = ::flatbuffers::Offset(end); return o; } }; -inline flatbuffers::Offset CreateNegOptions( - flatbuffers::FlatBufferBuilder &_fbb) { - NegOptionsBuilder builder_(_fbb); +inline ::flatbuffers::Offset CreateExpOptions( + ::flatbuffers::FlatBufferBuilder &_fbb) { + ExpOptionsBuilder builder_(_fbb); return builder_.Finish(); } -flatbuffers::Offset CreateNegOptions(flatbuffers::FlatBufferBuilder &_fbb, const NegOptionsT *_o, const flatbuffers::rehasher_function_t *_rehasher = nullptr); +::flatbuffers::Offset CreateExpOptions(::flatbuffers::FlatBufferBuilder &_fbb, const ExpOptionsT *_o, const ::flatbuffers::rehasher_function_t *_rehasher = nullptr); -struct SelectOptionsT : public flatbuffers::NativeTable { - typedef SelectOptions TableType; +struct CosOptionsT : public ::flatbuffers::NativeTable { + typedef CosOptions TableType; }; -struct SelectOptions FLATBUFFERS_FINAL_CLASS : private flatbuffers::Table { - typedef SelectOptionsT NativeTableType; - typedef SelectOptionsBuilder Builder; - bool Verify(flatbuffers::Verifier &verifier) const { +struct CosOptions FLATBUFFERS_FINAL_CLASS : private ::flatbuffers::Table { + typedef CosOptionsT NativeTableType; + typedef CosOptionsBuilder Builder; + bool Verify(::flatbuffers::Verifier &verifier) const { return VerifyTableStart(verifier) && verifier.EndTable(); } - SelectOptionsT *UnPack(const flatbuffers::resolver_function_t *_resolver = nullptr) const; - void UnPackTo(SelectOptionsT *_o, const flatbuffers::resolver_function_t *_resolver = nullptr) const; - static flatbuffers::Offset Pack(flatbuffers::FlatBufferBuilder &_fbb, const SelectOptionsT* _o, const flatbuffers::rehasher_function_t *_rehasher = nullptr); + CosOptionsT *UnPack(const ::flatbuffers::resolver_function_t *_resolver = nullptr) const; + void UnPackTo(CosOptionsT *_o, const ::flatbuffers::resolver_function_t *_resolver = nullptr) const; + static ::flatbuffers::Offset Pack(::flatbuffers::FlatBufferBuilder &_fbb, const CosOptionsT* _o, const ::flatbuffers::rehasher_function_t *_rehasher = nullptr); }; -struct SelectOptionsBuilder { - typedef SelectOptions Table; - flatbuffers::FlatBufferBuilder &fbb_; - flatbuffers::uoffset_t start_; - explicit SelectOptionsBuilder(flatbuffers::FlatBufferBuilder &_fbb) +struct CosOptionsBuilder { + typedef CosOptions Table; + ::flatbuffers::FlatBufferBuilder &fbb_; + ::flatbuffers::uoffset_t start_; + explicit CosOptionsBuilder(::flatbuffers::FlatBufferBuilder &_fbb) : fbb_(_fbb) { start_ = fbb_.StartTable(); } - flatbuffers::Offset Finish() { + ::flatbuffers::Offset Finish() { const auto end = fbb_.EndTable(start_); - auto o = flatbuffers::Offset(end); + auto o = ::flatbuffers::Offset(end); return o; } }; -inline flatbuffers::Offset CreateSelectOptions( - flatbuffers::FlatBufferBuilder &_fbb) { - SelectOptionsBuilder builder_(_fbb); +inline ::flatbuffers::Offset CreateCosOptions( + ::flatbuffers::FlatBufferBuilder &_fbb) { + CosOptionsBuilder builder_(_fbb); return builder_.Finish(); } -flatbuffers::Offset CreateSelectOptions(flatbuffers::FlatBufferBuilder &_fbb, const SelectOptionsT *_o, const flatbuffers::rehasher_function_t *_rehasher = nullptr); +::flatbuffers::Offset CreateCosOptions(::flatbuffers::FlatBufferBuilder &_fbb, const CosOptionsT *_o, const ::flatbuffers::rehasher_function_t *_rehasher = nullptr); -struct SliceOptionsT : public flatbuffers::NativeTable { - typedef SliceOptions TableType; +struct ReducerOptionsT : public ::flatbuffers::NativeTable { + typedef ReducerOptions TableType; + bool keep_dims = false; }; -struct SliceOptions FLATBUFFERS_FINAL_CLASS : private flatbuffers::Table { - typedef SliceOptionsT NativeTableType; - typedef SliceOptionsBuilder Builder; - bool Verify(flatbuffers::Verifier &verifier) const { +struct ReducerOptions FLATBUFFERS_FINAL_CLASS : private ::flatbuffers::Table { + typedef ReducerOptionsT NativeTableType; + typedef ReducerOptionsBuilder Builder; + enum FlatBuffersVTableOffset FLATBUFFERS_VTABLE_UNDERLYING_TYPE { + VT_KEEP_DIMS = 4 + }; + bool keep_dims() const { + return GetField(VT_KEEP_DIMS, 0) != 0; + } + bool Verify(::flatbuffers::Verifier &verifier) const { return VerifyTableStart(verifier) && + VerifyField(verifier, VT_KEEP_DIMS, 1) && verifier.EndTable(); } - SliceOptionsT *UnPack(const flatbuffers::resolver_function_t *_resolver = nullptr) const; - void UnPackTo(SliceOptionsT *_o, const flatbuffers::resolver_function_t *_resolver = nullptr) const; - static flatbuffers::Offset Pack(flatbuffers::FlatBufferBuilder &_fbb, const SliceOptionsT* _o, const flatbuffers::rehasher_function_t *_rehasher = nullptr); + ReducerOptionsT *UnPack(const ::flatbuffers::resolver_function_t *_resolver = nullptr) const; + void UnPackTo(ReducerOptionsT *_o, const ::flatbuffers::resolver_function_t *_resolver = nullptr) const; + static ::flatbuffers::Offset Pack(::flatbuffers::FlatBufferBuilder &_fbb, const ReducerOptionsT* _o, const ::flatbuffers::rehasher_function_t *_rehasher = nullptr); }; -struct SliceOptionsBuilder { - typedef SliceOptions Table; - flatbuffers::FlatBufferBuilder &fbb_; - flatbuffers::uoffset_t start_; - explicit SliceOptionsBuilder(flatbuffers::FlatBufferBuilder &_fbb) +struct ReducerOptionsBuilder { + typedef ReducerOptions Table; + ::flatbuffers::FlatBufferBuilder &fbb_; + ::flatbuffers::uoffset_t start_; + void add_keep_dims(bool keep_dims) { + fbb_.AddElement(ReducerOptions::VT_KEEP_DIMS, static_cast(keep_dims), 0); + } + explicit ReducerOptionsBuilder(::flatbuffers::FlatBufferBuilder &_fbb) : fbb_(_fbb) { start_ = fbb_.StartTable(); } - flatbuffers::Offset Finish() { + ::flatbuffers::Offset Finish() { const auto end = fbb_.EndTable(start_); - auto o = flatbuffers::Offset(end); + auto o = ::flatbuffers::Offset(end); return o; } }; -inline flatbuffers::Offset CreateSliceOptions( - flatbuffers::FlatBufferBuilder &_fbb) { - SliceOptionsBuilder builder_(_fbb); +inline ::flatbuffers::Offset CreateReducerOptions( + ::flatbuffers::FlatBufferBuilder &_fbb, + bool keep_dims = false) { + ReducerOptionsBuilder builder_(_fbb); + builder_.add_keep_dims(keep_dims); return builder_.Finish(); } -flatbuffers::Offset CreateSliceOptions(flatbuffers::FlatBufferBuilder &_fbb, const SliceOptionsT *_o, const flatbuffers::rehasher_function_t *_rehasher = nullptr); +::flatbuffers::Offset CreateReducerOptions(::flatbuffers::FlatBufferBuilder &_fbb, const ReducerOptionsT *_o, const ::flatbuffers::rehasher_function_t *_rehasher = nullptr); -struct TransposeConvOptionsT : public flatbuffers::NativeTable { - typedef TransposeConvOptions TableType; - tflite::Padding padding = tflite::Padding_SAME; - int32_t stride_w = 0; - int32_t stride_h = 0; - tflite::ActivationFunctionType fused_activation_function = tflite::ActivationFunctionType_NONE; +struct SqueezeOptionsT : public ::flatbuffers::NativeTable { + typedef SqueezeOptions TableType; + std::vector squeeze_dims{}; }; -struct TransposeConvOptions FLATBUFFERS_FINAL_CLASS : private flatbuffers::Table { - typedef TransposeConvOptionsT NativeTableType; - typedef TransposeConvOptionsBuilder Builder; +struct SqueezeOptions FLATBUFFERS_FINAL_CLASS : private ::flatbuffers::Table { + typedef SqueezeOptionsT NativeTableType; + typedef SqueezeOptionsBuilder Builder; enum FlatBuffersVTableOffset FLATBUFFERS_VTABLE_UNDERLYING_TYPE { - VT_PADDING = 4, - VT_STRIDE_W = 6, - VT_STRIDE_H = 8, - VT_FUSED_ACTIVATION_FUNCTION = 10 + VT_SQUEEZE_DIMS = 4 }; - tflite::Padding padding() const { - return static_cast(GetField(VT_PADDING, 0)); - } - int32_t stride_w() const { - return GetField(VT_STRIDE_W, 0); + const ::flatbuffers::Vector *squeeze_dims() const { + return GetPointer *>(VT_SQUEEZE_DIMS); } - int32_t stride_h() const { - return GetField(VT_STRIDE_H, 0); - } - tflite::ActivationFunctionType fused_activation_function() const { - return static_cast(GetField(VT_FUSED_ACTIVATION_FUNCTION, 0)); - } - bool Verify(flatbuffers::Verifier &verifier) const { + bool Verify(::flatbuffers::Verifier &verifier) const { return VerifyTableStart(verifier) && - VerifyField(verifier, VT_PADDING, 1) && - VerifyField(verifier, VT_STRIDE_W, 4) && - VerifyField(verifier, VT_STRIDE_H, 4) && - VerifyField(verifier, VT_FUSED_ACTIVATION_FUNCTION, 1) && + VerifyOffset(verifier, VT_SQUEEZE_DIMS) && + verifier.VerifyVector(squeeze_dims()) && verifier.EndTable(); } - TransposeConvOptionsT *UnPack(const flatbuffers::resolver_function_t *_resolver = nullptr) const; - void UnPackTo(TransposeConvOptionsT *_o, const flatbuffers::resolver_function_t *_resolver = nullptr) const; - static flatbuffers::Offset Pack(flatbuffers::FlatBufferBuilder &_fbb, const TransposeConvOptionsT* _o, const flatbuffers::rehasher_function_t *_rehasher = nullptr); + SqueezeOptionsT *UnPack(const ::flatbuffers::resolver_function_t *_resolver = nullptr) const; + void UnPackTo(SqueezeOptionsT *_o, const ::flatbuffers::resolver_function_t *_resolver = nullptr) const; + static ::flatbuffers::Offset Pack(::flatbuffers::FlatBufferBuilder &_fbb, const SqueezeOptionsT* _o, const ::flatbuffers::rehasher_function_t *_rehasher = nullptr); }; -struct TransposeConvOptionsBuilder { - typedef TransposeConvOptions Table; - flatbuffers::FlatBufferBuilder &fbb_; - flatbuffers::uoffset_t start_; - void add_padding(tflite::Padding padding) { - fbb_.AddElement(TransposeConvOptions::VT_PADDING, static_cast(padding), 0); - } - void add_stride_w(int32_t stride_w) { - fbb_.AddElement(TransposeConvOptions::VT_STRIDE_W, stride_w, 0); - } - void add_stride_h(int32_t stride_h) { - fbb_.AddElement(TransposeConvOptions::VT_STRIDE_H, stride_h, 0); - } - void add_fused_activation_function(tflite::ActivationFunctionType fused_activation_function) { - fbb_.AddElement(TransposeConvOptions::VT_FUSED_ACTIVATION_FUNCTION, static_cast(fused_activation_function), 0); +struct SqueezeOptionsBuilder { + typedef SqueezeOptions Table; + ::flatbuffers::FlatBufferBuilder &fbb_; + ::flatbuffers::uoffset_t start_; + void add_squeeze_dims(::flatbuffers::Offset<::flatbuffers::Vector> squeeze_dims) { + fbb_.AddOffset(SqueezeOptions::VT_SQUEEZE_DIMS, squeeze_dims); } - explicit TransposeConvOptionsBuilder(flatbuffers::FlatBufferBuilder &_fbb) + explicit SqueezeOptionsBuilder(::flatbuffers::FlatBufferBuilder &_fbb) : fbb_(_fbb) { start_ = fbb_.StartTable(); } - flatbuffers::Offset Finish() { + ::flatbuffers::Offset Finish() { const auto end = fbb_.EndTable(start_); - auto o = flatbuffers::Offset(end); + auto o = ::flatbuffers::Offset(end); return o; } }; -inline flatbuffers::Offset CreateTransposeConvOptions( - flatbuffers::FlatBufferBuilder &_fbb, - tflite::Padding padding = tflite::Padding_SAME, - int32_t stride_w = 0, - int32_t stride_h = 0, - tflite::ActivationFunctionType fused_activation_function = tflite::ActivationFunctionType_NONE) { - TransposeConvOptionsBuilder builder_(_fbb); - builder_.add_stride_h(stride_h); - builder_.add_stride_w(stride_w); - builder_.add_fused_activation_function(fused_activation_function); - builder_.add_padding(padding); +inline ::flatbuffers::Offset CreateSqueezeOptions( + ::flatbuffers::FlatBufferBuilder &_fbb, + ::flatbuffers::Offset<::flatbuffers::Vector> squeeze_dims = 0) { + SqueezeOptionsBuilder builder_(_fbb); + builder_.add_squeeze_dims(squeeze_dims); return builder_.Finish(); } -flatbuffers::Offset CreateTransposeConvOptions(flatbuffers::FlatBufferBuilder &_fbb, const TransposeConvOptionsT *_o, const flatbuffers::rehasher_function_t *_rehasher = nullptr); +inline ::flatbuffers::Offset CreateSqueezeOptionsDirect( + ::flatbuffers::FlatBufferBuilder &_fbb, + const std::vector *squeeze_dims = nullptr) { + auto squeeze_dims__ = squeeze_dims ? _fbb.CreateVector(*squeeze_dims) : 0; + return tflite::CreateSqueezeOptions( + _fbb, + squeeze_dims__); +} + +::flatbuffers::Offset CreateSqueezeOptions(::flatbuffers::FlatBufferBuilder &_fbb, const SqueezeOptionsT *_o, const ::flatbuffers::rehasher_function_t *_rehasher = nullptr); -struct ExpandDimsOptionsT : public flatbuffers::NativeTable { - typedef ExpandDimsOptions TableType; +struct SplitOptionsT : public ::flatbuffers::NativeTable { + typedef SplitOptions TableType; + int32_t num_splits = 0; }; -struct ExpandDimsOptions FLATBUFFERS_FINAL_CLASS : private flatbuffers::Table { - typedef ExpandDimsOptionsT NativeTableType; - typedef ExpandDimsOptionsBuilder Builder; - bool Verify(flatbuffers::Verifier &verifier) const { +struct SplitOptions FLATBUFFERS_FINAL_CLASS : private ::flatbuffers::Table { + typedef SplitOptionsT NativeTableType; + typedef SplitOptionsBuilder Builder; + enum FlatBuffersVTableOffset FLATBUFFERS_VTABLE_UNDERLYING_TYPE { + VT_NUM_SPLITS = 4 + }; + int32_t num_splits() const { + return GetField(VT_NUM_SPLITS, 0); + } + bool Verify(::flatbuffers::Verifier &verifier) const { return VerifyTableStart(verifier) && + VerifyField(verifier, VT_NUM_SPLITS, 4) && verifier.EndTable(); } - ExpandDimsOptionsT *UnPack(const flatbuffers::resolver_function_t *_resolver = nullptr) const; - void UnPackTo(ExpandDimsOptionsT *_o, const flatbuffers::resolver_function_t *_resolver = nullptr) const; - static flatbuffers::Offset Pack(flatbuffers::FlatBufferBuilder &_fbb, const ExpandDimsOptionsT* _o, const flatbuffers::rehasher_function_t *_rehasher = nullptr); + SplitOptionsT *UnPack(const ::flatbuffers::resolver_function_t *_resolver = nullptr) const; + void UnPackTo(SplitOptionsT *_o, const ::flatbuffers::resolver_function_t *_resolver = nullptr) const; + static ::flatbuffers::Offset Pack(::flatbuffers::FlatBufferBuilder &_fbb, const SplitOptionsT* _o, const ::flatbuffers::rehasher_function_t *_rehasher = nullptr); }; -struct ExpandDimsOptionsBuilder { - typedef ExpandDimsOptions Table; - flatbuffers::FlatBufferBuilder &fbb_; - flatbuffers::uoffset_t start_; - explicit ExpandDimsOptionsBuilder(flatbuffers::FlatBufferBuilder &_fbb) - : fbb_(_fbb) { - start_ = fbb_.StartTable(); +struct SplitOptionsBuilder { + typedef SplitOptions Table; + ::flatbuffers::FlatBufferBuilder &fbb_; + ::flatbuffers::uoffset_t start_; + void add_num_splits(int32_t num_splits) { + fbb_.AddElement(SplitOptions::VT_NUM_SPLITS, num_splits, 0); } - flatbuffers::Offset Finish() { + explicit SplitOptionsBuilder(::flatbuffers::FlatBufferBuilder &_fbb) + : fbb_(_fbb) { + start_ = fbb_.StartTable(); + } + ::flatbuffers::Offset Finish() { const auto end = fbb_.EndTable(start_); - auto o = flatbuffers::Offset(end); + auto o = ::flatbuffers::Offset(end); return o; } }; -inline flatbuffers::Offset CreateExpandDimsOptions( - flatbuffers::FlatBufferBuilder &_fbb) { - ExpandDimsOptionsBuilder builder_(_fbb); +inline ::flatbuffers::Offset CreateSplitOptions( + ::flatbuffers::FlatBufferBuilder &_fbb, + int32_t num_splits = 0) { + SplitOptionsBuilder builder_(_fbb); + builder_.add_num_splits(num_splits); return builder_.Finish(); } -flatbuffers::Offset CreateExpandDimsOptions(flatbuffers::FlatBufferBuilder &_fbb, const ExpandDimsOptionsT *_o, const flatbuffers::rehasher_function_t *_rehasher = nullptr); +::flatbuffers::Offset CreateSplitOptions(::flatbuffers::FlatBufferBuilder &_fbb, const SplitOptionsT *_o, const ::flatbuffers::rehasher_function_t *_rehasher = nullptr); -struct SparseToDenseOptionsT : public flatbuffers::NativeTable { - typedef SparseToDenseOptions TableType; - bool validate_indices = false; +struct SplitVOptionsT : public ::flatbuffers::NativeTable { + typedef SplitVOptions TableType; + int32_t num_splits = 0; }; -struct SparseToDenseOptions FLATBUFFERS_FINAL_CLASS : private flatbuffers::Table { - typedef SparseToDenseOptionsT NativeTableType; - typedef SparseToDenseOptionsBuilder Builder; +struct SplitVOptions FLATBUFFERS_FINAL_CLASS : private ::flatbuffers::Table { + typedef SplitVOptionsT NativeTableType; + typedef SplitVOptionsBuilder Builder; enum FlatBuffersVTableOffset FLATBUFFERS_VTABLE_UNDERLYING_TYPE { - VT_VALIDATE_INDICES = 4 + VT_NUM_SPLITS = 4 }; - bool validate_indices() const { - return GetField(VT_VALIDATE_INDICES, 0) != 0; + int32_t num_splits() const { + return GetField(VT_NUM_SPLITS, 0); } - bool Verify(flatbuffers::Verifier &verifier) const { + bool Verify(::flatbuffers::Verifier &verifier) const { return VerifyTableStart(verifier) && - VerifyField(verifier, VT_VALIDATE_INDICES, 1) && + VerifyField(verifier, VT_NUM_SPLITS, 4) && verifier.EndTable(); } - SparseToDenseOptionsT *UnPack(const flatbuffers::resolver_function_t *_resolver = nullptr) const; - void UnPackTo(SparseToDenseOptionsT *_o, const flatbuffers::resolver_function_t *_resolver = nullptr) const; - static flatbuffers::Offset Pack(flatbuffers::FlatBufferBuilder &_fbb, const SparseToDenseOptionsT* _o, const flatbuffers::rehasher_function_t *_rehasher = nullptr); + SplitVOptionsT *UnPack(const ::flatbuffers::resolver_function_t *_resolver = nullptr) const; + void UnPackTo(SplitVOptionsT *_o, const ::flatbuffers::resolver_function_t *_resolver = nullptr) const; + static ::flatbuffers::Offset Pack(::flatbuffers::FlatBufferBuilder &_fbb, const SplitVOptionsT* _o, const ::flatbuffers::rehasher_function_t *_rehasher = nullptr); }; -struct SparseToDenseOptionsBuilder { - typedef SparseToDenseOptions Table; - flatbuffers::FlatBufferBuilder &fbb_; - flatbuffers::uoffset_t start_; - void add_validate_indices(bool validate_indices) { - fbb_.AddElement(SparseToDenseOptions::VT_VALIDATE_INDICES, static_cast(validate_indices), 0); +struct SplitVOptionsBuilder { + typedef SplitVOptions Table; + ::flatbuffers::FlatBufferBuilder &fbb_; + ::flatbuffers::uoffset_t start_; + void add_num_splits(int32_t num_splits) { + fbb_.AddElement(SplitVOptions::VT_NUM_SPLITS, num_splits, 0); } - explicit SparseToDenseOptionsBuilder(flatbuffers::FlatBufferBuilder &_fbb) + explicit SplitVOptionsBuilder(::flatbuffers::FlatBufferBuilder &_fbb) : fbb_(_fbb) { start_ = fbb_.StartTable(); } - flatbuffers::Offset Finish() { + ::flatbuffers::Offset Finish() { const auto end = fbb_.EndTable(start_); - auto o = flatbuffers::Offset(end); + auto o = ::flatbuffers::Offset(end); return o; } }; -inline flatbuffers::Offset CreateSparseToDenseOptions( - flatbuffers::FlatBufferBuilder &_fbb, - bool validate_indices = false) { - SparseToDenseOptionsBuilder builder_(_fbb); - builder_.add_validate_indices(validate_indices); +inline ::flatbuffers::Offset CreateSplitVOptions( + ::flatbuffers::FlatBufferBuilder &_fbb, + int32_t num_splits = 0) { + SplitVOptionsBuilder builder_(_fbb); + builder_.add_num_splits(num_splits); return builder_.Finish(); } -flatbuffers::Offset CreateSparseToDenseOptions(flatbuffers::FlatBufferBuilder &_fbb, const SparseToDenseOptionsT *_o, const flatbuffers::rehasher_function_t *_rehasher = nullptr); +::flatbuffers::Offset CreateSplitVOptions(::flatbuffers::FlatBufferBuilder &_fbb, const SplitVOptionsT *_o, const ::flatbuffers::rehasher_function_t *_rehasher = nullptr); -struct EqualOptionsT : public flatbuffers::NativeTable { - typedef EqualOptions TableType; +struct StridedSliceOptionsT : public ::flatbuffers::NativeTable { + typedef StridedSliceOptions TableType; + int32_t begin_mask = 0; + int32_t end_mask = 0; + int32_t ellipsis_mask = 0; + int32_t new_axis_mask = 0; + int32_t shrink_axis_mask = 0; + bool offset = false; }; -struct EqualOptions FLATBUFFERS_FINAL_CLASS : private flatbuffers::Table { - typedef EqualOptionsT NativeTableType; - typedef EqualOptionsBuilder Builder; - bool Verify(flatbuffers::Verifier &verifier) const { +struct StridedSliceOptions FLATBUFFERS_FINAL_CLASS : private ::flatbuffers::Table { + typedef StridedSliceOptionsT NativeTableType; + typedef StridedSliceOptionsBuilder Builder; + enum FlatBuffersVTableOffset FLATBUFFERS_VTABLE_UNDERLYING_TYPE { + VT_BEGIN_MASK = 4, + VT_END_MASK = 6, + VT_ELLIPSIS_MASK = 8, + VT_NEW_AXIS_MASK = 10, + VT_SHRINK_AXIS_MASK = 12, + VT_OFFSET = 14 + }; + int32_t begin_mask() const { + return GetField(VT_BEGIN_MASK, 0); + } + int32_t end_mask() const { + return GetField(VT_END_MASK, 0); + } + int32_t ellipsis_mask() const { + return GetField(VT_ELLIPSIS_MASK, 0); + } + int32_t new_axis_mask() const { + return GetField(VT_NEW_AXIS_MASK, 0); + } + int32_t shrink_axis_mask() const { + return GetField(VT_SHRINK_AXIS_MASK, 0); + } + bool offset() const { + return GetField(VT_OFFSET, 0) != 0; + } + bool Verify(::flatbuffers::Verifier &verifier) const { return VerifyTableStart(verifier) && + VerifyField(verifier, VT_BEGIN_MASK, 4) && + VerifyField(verifier, VT_END_MASK, 4) && + VerifyField(verifier, VT_ELLIPSIS_MASK, 4) && + VerifyField(verifier, VT_NEW_AXIS_MASK, 4) && + VerifyField(verifier, VT_SHRINK_AXIS_MASK, 4) && + VerifyField(verifier, VT_OFFSET, 1) && verifier.EndTable(); } - EqualOptionsT *UnPack(const flatbuffers::resolver_function_t *_resolver = nullptr) const; - void UnPackTo(EqualOptionsT *_o, const flatbuffers::resolver_function_t *_resolver = nullptr) const; - static flatbuffers::Offset Pack(flatbuffers::FlatBufferBuilder &_fbb, const EqualOptionsT* _o, const flatbuffers::rehasher_function_t *_rehasher = nullptr); + StridedSliceOptionsT *UnPack(const ::flatbuffers::resolver_function_t *_resolver = nullptr) const; + void UnPackTo(StridedSliceOptionsT *_o, const ::flatbuffers::resolver_function_t *_resolver = nullptr) const; + static ::flatbuffers::Offset Pack(::flatbuffers::FlatBufferBuilder &_fbb, const StridedSliceOptionsT* _o, const ::flatbuffers::rehasher_function_t *_rehasher = nullptr); }; -struct EqualOptionsBuilder { - typedef EqualOptions Table; - flatbuffers::FlatBufferBuilder &fbb_; - flatbuffers::uoffset_t start_; - explicit EqualOptionsBuilder(flatbuffers::FlatBufferBuilder &_fbb) +struct StridedSliceOptionsBuilder { + typedef StridedSliceOptions Table; + ::flatbuffers::FlatBufferBuilder &fbb_; + ::flatbuffers::uoffset_t start_; + void add_begin_mask(int32_t begin_mask) { + fbb_.AddElement(StridedSliceOptions::VT_BEGIN_MASK, begin_mask, 0); + } + void add_end_mask(int32_t end_mask) { + fbb_.AddElement(StridedSliceOptions::VT_END_MASK, end_mask, 0); + } + void add_ellipsis_mask(int32_t ellipsis_mask) { + fbb_.AddElement(StridedSliceOptions::VT_ELLIPSIS_MASK, ellipsis_mask, 0); + } + void add_new_axis_mask(int32_t new_axis_mask) { + fbb_.AddElement(StridedSliceOptions::VT_NEW_AXIS_MASK, new_axis_mask, 0); + } + void add_shrink_axis_mask(int32_t shrink_axis_mask) { + fbb_.AddElement(StridedSliceOptions::VT_SHRINK_AXIS_MASK, shrink_axis_mask, 0); + } + void add_offset(bool offset) { + fbb_.AddElement(StridedSliceOptions::VT_OFFSET, static_cast(offset), 0); + } + explicit StridedSliceOptionsBuilder(::flatbuffers::FlatBufferBuilder &_fbb) : fbb_(_fbb) { start_ = fbb_.StartTable(); } - flatbuffers::Offset Finish() { + ::flatbuffers::Offset Finish() { const auto end = fbb_.EndTable(start_); - auto o = flatbuffers::Offset(end); + auto o = ::flatbuffers::Offset(end); return o; } }; -inline flatbuffers::Offset CreateEqualOptions( - flatbuffers::FlatBufferBuilder &_fbb) { - EqualOptionsBuilder builder_(_fbb); +inline ::flatbuffers::Offset CreateStridedSliceOptions( + ::flatbuffers::FlatBufferBuilder &_fbb, + int32_t begin_mask = 0, + int32_t end_mask = 0, + int32_t ellipsis_mask = 0, + int32_t new_axis_mask = 0, + int32_t shrink_axis_mask = 0, + bool offset = false) { + StridedSliceOptionsBuilder builder_(_fbb); + builder_.add_shrink_axis_mask(shrink_axis_mask); + builder_.add_new_axis_mask(new_axis_mask); + builder_.add_ellipsis_mask(ellipsis_mask); + builder_.add_end_mask(end_mask); + builder_.add_begin_mask(begin_mask); + builder_.add_offset(offset); return builder_.Finish(); } -flatbuffers::Offset CreateEqualOptions(flatbuffers::FlatBufferBuilder &_fbb, const EqualOptionsT *_o, const flatbuffers::rehasher_function_t *_rehasher = nullptr); +::flatbuffers::Offset CreateStridedSliceOptions(::flatbuffers::FlatBufferBuilder &_fbb, const StridedSliceOptionsT *_o, const ::flatbuffers::rehasher_function_t *_rehasher = nullptr); -struct NotEqualOptionsT : public flatbuffers::NativeTable { - typedef NotEqualOptions TableType; +struct LogSoftmaxOptionsT : public ::flatbuffers::NativeTable { + typedef LogSoftmaxOptions TableType; }; -struct NotEqualOptions FLATBUFFERS_FINAL_CLASS : private flatbuffers::Table { - typedef NotEqualOptionsT NativeTableType; - typedef NotEqualOptionsBuilder Builder; - bool Verify(flatbuffers::Verifier &verifier) const { +struct LogSoftmaxOptions FLATBUFFERS_FINAL_CLASS : private ::flatbuffers::Table { + typedef LogSoftmaxOptionsT NativeTableType; + typedef LogSoftmaxOptionsBuilder Builder; + bool Verify(::flatbuffers::Verifier &verifier) const { return VerifyTableStart(verifier) && verifier.EndTable(); } - NotEqualOptionsT *UnPack(const flatbuffers::resolver_function_t *_resolver = nullptr) const; - void UnPackTo(NotEqualOptionsT *_o, const flatbuffers::resolver_function_t *_resolver = nullptr) const; - static flatbuffers::Offset Pack(flatbuffers::FlatBufferBuilder &_fbb, const NotEqualOptionsT* _o, const flatbuffers::rehasher_function_t *_rehasher = nullptr); + LogSoftmaxOptionsT *UnPack(const ::flatbuffers::resolver_function_t *_resolver = nullptr) const; + void UnPackTo(LogSoftmaxOptionsT *_o, const ::flatbuffers::resolver_function_t *_resolver = nullptr) const; + static ::flatbuffers::Offset Pack(::flatbuffers::FlatBufferBuilder &_fbb, const LogSoftmaxOptionsT* _o, const ::flatbuffers::rehasher_function_t *_rehasher = nullptr); }; -struct NotEqualOptionsBuilder { - typedef NotEqualOptions Table; - flatbuffers::FlatBufferBuilder &fbb_; - flatbuffers::uoffset_t start_; - explicit NotEqualOptionsBuilder(flatbuffers::FlatBufferBuilder &_fbb) +struct LogSoftmaxOptionsBuilder { + typedef LogSoftmaxOptions Table; + ::flatbuffers::FlatBufferBuilder &fbb_; + ::flatbuffers::uoffset_t start_; + explicit LogSoftmaxOptionsBuilder(::flatbuffers::FlatBufferBuilder &_fbb) : fbb_(_fbb) { start_ = fbb_.StartTable(); } - flatbuffers::Offset Finish() { + ::flatbuffers::Offset Finish() { const auto end = fbb_.EndTable(start_); - auto o = flatbuffers::Offset(end); + auto o = ::flatbuffers::Offset(end); return o; } }; -inline flatbuffers::Offset CreateNotEqualOptions( - flatbuffers::FlatBufferBuilder &_fbb) { - NotEqualOptionsBuilder builder_(_fbb); +inline ::flatbuffers::Offset CreateLogSoftmaxOptions( + ::flatbuffers::FlatBufferBuilder &_fbb) { + LogSoftmaxOptionsBuilder builder_(_fbb); return builder_.Finish(); } -flatbuffers::Offset CreateNotEqualOptions(flatbuffers::FlatBufferBuilder &_fbb, const NotEqualOptionsT *_o, const flatbuffers::rehasher_function_t *_rehasher = nullptr); +::flatbuffers::Offset CreateLogSoftmaxOptions(::flatbuffers::FlatBufferBuilder &_fbb, const LogSoftmaxOptionsT *_o, const ::flatbuffers::rehasher_function_t *_rehasher = nullptr); -struct ShapeOptionsT : public flatbuffers::NativeTable { - typedef ShapeOptions TableType; - tflite::TensorType out_type = tflite::TensorType_FLOAT32; +struct CastOptionsT : public ::flatbuffers::NativeTable { + typedef CastOptions TableType; + tflite::TensorType in_data_type = tflite::TensorType_FLOAT32; + tflite::TensorType out_data_type = tflite::TensorType_FLOAT32; }; -struct ShapeOptions FLATBUFFERS_FINAL_CLASS : private flatbuffers::Table { - typedef ShapeOptionsT NativeTableType; - typedef ShapeOptionsBuilder Builder; +struct CastOptions FLATBUFFERS_FINAL_CLASS : private ::flatbuffers::Table { + typedef CastOptionsT NativeTableType; + typedef CastOptionsBuilder Builder; enum FlatBuffersVTableOffset FLATBUFFERS_VTABLE_UNDERLYING_TYPE { - VT_OUT_TYPE = 4 + VT_IN_DATA_TYPE = 4, + VT_OUT_DATA_TYPE = 6 }; - tflite::TensorType out_type() const { - return static_cast(GetField(VT_OUT_TYPE, 0)); + tflite::TensorType in_data_type() const { + return static_cast(GetField(VT_IN_DATA_TYPE, 0)); + } + tflite::TensorType out_data_type() const { + return static_cast(GetField(VT_OUT_DATA_TYPE, 0)); } - bool Verify(flatbuffers::Verifier &verifier) const { + bool Verify(::flatbuffers::Verifier &verifier) const { return VerifyTableStart(verifier) && - VerifyField(verifier, VT_OUT_TYPE, 1) && + VerifyField(verifier, VT_IN_DATA_TYPE, 1) && + VerifyField(verifier, VT_OUT_DATA_TYPE, 1) && verifier.EndTable(); } - ShapeOptionsT *UnPack(const flatbuffers::resolver_function_t *_resolver = nullptr) const; - void UnPackTo(ShapeOptionsT *_o, const flatbuffers::resolver_function_t *_resolver = nullptr) const; - static flatbuffers::Offset Pack(flatbuffers::FlatBufferBuilder &_fbb, const ShapeOptionsT* _o, const flatbuffers::rehasher_function_t *_rehasher = nullptr); + CastOptionsT *UnPack(const ::flatbuffers::resolver_function_t *_resolver = nullptr) const; + void UnPackTo(CastOptionsT *_o, const ::flatbuffers::resolver_function_t *_resolver = nullptr) const; + static ::flatbuffers::Offset Pack(::flatbuffers::FlatBufferBuilder &_fbb, const CastOptionsT* _o, const ::flatbuffers::rehasher_function_t *_rehasher = nullptr); }; -struct ShapeOptionsBuilder { - typedef ShapeOptions Table; - flatbuffers::FlatBufferBuilder &fbb_; - flatbuffers::uoffset_t start_; - void add_out_type(tflite::TensorType out_type) { - fbb_.AddElement(ShapeOptions::VT_OUT_TYPE, static_cast(out_type), 0); +struct CastOptionsBuilder { + typedef CastOptions Table; + ::flatbuffers::FlatBufferBuilder &fbb_; + ::flatbuffers::uoffset_t start_; + void add_in_data_type(tflite::TensorType in_data_type) { + fbb_.AddElement(CastOptions::VT_IN_DATA_TYPE, static_cast(in_data_type), 0); + } + void add_out_data_type(tflite::TensorType out_data_type) { + fbb_.AddElement(CastOptions::VT_OUT_DATA_TYPE, static_cast(out_data_type), 0); } - explicit ShapeOptionsBuilder(flatbuffers::FlatBufferBuilder &_fbb) + explicit CastOptionsBuilder(::flatbuffers::FlatBufferBuilder &_fbb) : fbb_(_fbb) { start_ = fbb_.StartTable(); } - flatbuffers::Offset Finish() { + ::flatbuffers::Offset Finish() { const auto end = fbb_.EndTable(start_); - auto o = flatbuffers::Offset(end); + auto o = ::flatbuffers::Offset(end); return o; } }; -inline flatbuffers::Offset CreateShapeOptions( - flatbuffers::FlatBufferBuilder &_fbb, - tflite::TensorType out_type = tflite::TensorType_FLOAT32) { - ShapeOptionsBuilder builder_(_fbb); - builder_.add_out_type(out_type); +inline ::flatbuffers::Offset CreateCastOptions( + ::flatbuffers::FlatBufferBuilder &_fbb, + tflite::TensorType in_data_type = tflite::TensorType_FLOAT32, + tflite::TensorType out_data_type = tflite::TensorType_FLOAT32) { + CastOptionsBuilder builder_(_fbb); + builder_.add_out_data_type(out_data_type); + builder_.add_in_data_type(in_data_type); return builder_.Finish(); } -flatbuffers::Offset CreateShapeOptions(flatbuffers::FlatBufferBuilder &_fbb, const ShapeOptionsT *_o, const flatbuffers::rehasher_function_t *_rehasher = nullptr); +::flatbuffers::Offset CreateCastOptions(::flatbuffers::FlatBufferBuilder &_fbb, const CastOptionsT *_o, const ::flatbuffers::rehasher_function_t *_rehasher = nullptr); -struct RankOptionsT : public flatbuffers::NativeTable { - typedef RankOptions TableType; +struct DequantizeOptionsT : public ::flatbuffers::NativeTable { + typedef DequantizeOptions TableType; }; -struct RankOptions FLATBUFFERS_FINAL_CLASS : private flatbuffers::Table { - typedef RankOptionsT NativeTableType; - typedef RankOptionsBuilder Builder; - bool Verify(flatbuffers::Verifier &verifier) const { +struct DequantizeOptions FLATBUFFERS_FINAL_CLASS : private ::flatbuffers::Table { + typedef DequantizeOptionsT NativeTableType; + typedef DequantizeOptionsBuilder Builder; + bool Verify(::flatbuffers::Verifier &verifier) const { return VerifyTableStart(verifier) && verifier.EndTable(); } - RankOptionsT *UnPack(const flatbuffers::resolver_function_t *_resolver = nullptr) const; - void UnPackTo(RankOptionsT *_o, const flatbuffers::resolver_function_t *_resolver = nullptr) const; - static flatbuffers::Offset Pack(flatbuffers::FlatBufferBuilder &_fbb, const RankOptionsT* _o, const flatbuffers::rehasher_function_t *_rehasher = nullptr); + DequantizeOptionsT *UnPack(const ::flatbuffers::resolver_function_t *_resolver = nullptr) const; + void UnPackTo(DequantizeOptionsT *_o, const ::flatbuffers::resolver_function_t *_resolver = nullptr) const; + static ::flatbuffers::Offset Pack(::flatbuffers::FlatBufferBuilder &_fbb, const DequantizeOptionsT* _o, const ::flatbuffers::rehasher_function_t *_rehasher = nullptr); }; -struct RankOptionsBuilder { - typedef RankOptions Table; - flatbuffers::FlatBufferBuilder &fbb_; - flatbuffers::uoffset_t start_; - explicit RankOptionsBuilder(flatbuffers::FlatBufferBuilder &_fbb) +struct DequantizeOptionsBuilder { + typedef DequantizeOptions Table; + ::flatbuffers::FlatBufferBuilder &fbb_; + ::flatbuffers::uoffset_t start_; + explicit DequantizeOptionsBuilder(::flatbuffers::FlatBufferBuilder &_fbb) : fbb_(_fbb) { start_ = fbb_.StartTable(); } - flatbuffers::Offset Finish() { + ::flatbuffers::Offset Finish() { const auto end = fbb_.EndTable(start_); - auto o = flatbuffers::Offset(end); + auto o = ::flatbuffers::Offset(end); return o; } }; -inline flatbuffers::Offset CreateRankOptions( - flatbuffers::FlatBufferBuilder &_fbb) { - RankOptionsBuilder builder_(_fbb); +inline ::flatbuffers::Offset CreateDequantizeOptions( + ::flatbuffers::FlatBufferBuilder &_fbb) { + DequantizeOptionsBuilder builder_(_fbb); return builder_.Finish(); } -flatbuffers::Offset CreateRankOptions(flatbuffers::FlatBufferBuilder &_fbb, const RankOptionsT *_o, const flatbuffers::rehasher_function_t *_rehasher = nullptr); +::flatbuffers::Offset CreateDequantizeOptions(::flatbuffers::FlatBufferBuilder &_fbb, const DequantizeOptionsT *_o, const ::flatbuffers::rehasher_function_t *_rehasher = nullptr); -struct PowOptionsT : public flatbuffers::NativeTable { - typedef PowOptions TableType; +struct MaximumMinimumOptionsT : public ::flatbuffers::NativeTable { + typedef MaximumMinimumOptions TableType; }; -struct PowOptions FLATBUFFERS_FINAL_CLASS : private flatbuffers::Table { - typedef PowOptionsT NativeTableType; - typedef PowOptionsBuilder Builder; - bool Verify(flatbuffers::Verifier &verifier) const { +struct MaximumMinimumOptions FLATBUFFERS_FINAL_CLASS : private ::flatbuffers::Table { + typedef MaximumMinimumOptionsT NativeTableType; + typedef MaximumMinimumOptionsBuilder Builder; + bool Verify(::flatbuffers::Verifier &verifier) const { return VerifyTableStart(verifier) && verifier.EndTable(); } - PowOptionsT *UnPack(const flatbuffers::resolver_function_t *_resolver = nullptr) const; - void UnPackTo(PowOptionsT *_o, const flatbuffers::resolver_function_t *_resolver = nullptr) const; - static flatbuffers::Offset Pack(flatbuffers::FlatBufferBuilder &_fbb, const PowOptionsT* _o, const flatbuffers::rehasher_function_t *_rehasher = nullptr); + MaximumMinimumOptionsT *UnPack(const ::flatbuffers::resolver_function_t *_resolver = nullptr) const; + void UnPackTo(MaximumMinimumOptionsT *_o, const ::flatbuffers::resolver_function_t *_resolver = nullptr) const; + static ::flatbuffers::Offset Pack(::flatbuffers::FlatBufferBuilder &_fbb, const MaximumMinimumOptionsT* _o, const ::flatbuffers::rehasher_function_t *_rehasher = nullptr); }; -struct PowOptionsBuilder { - typedef PowOptions Table; - flatbuffers::FlatBufferBuilder &fbb_; - flatbuffers::uoffset_t start_; - explicit PowOptionsBuilder(flatbuffers::FlatBufferBuilder &_fbb) +struct MaximumMinimumOptionsBuilder { + typedef MaximumMinimumOptions Table; + ::flatbuffers::FlatBufferBuilder &fbb_; + ::flatbuffers::uoffset_t start_; + explicit MaximumMinimumOptionsBuilder(::flatbuffers::FlatBufferBuilder &_fbb) : fbb_(_fbb) { start_ = fbb_.StartTable(); } - flatbuffers::Offset Finish() { + ::flatbuffers::Offset Finish() { const auto end = fbb_.EndTable(start_); - auto o = flatbuffers::Offset(end); + auto o = ::flatbuffers::Offset(end); return o; } }; -inline flatbuffers::Offset CreatePowOptions( - flatbuffers::FlatBufferBuilder &_fbb) { - PowOptionsBuilder builder_(_fbb); +inline ::flatbuffers::Offset CreateMaximumMinimumOptions( + ::flatbuffers::FlatBufferBuilder &_fbb) { + MaximumMinimumOptionsBuilder builder_(_fbb); return builder_.Finish(); } -flatbuffers::Offset CreatePowOptions(flatbuffers::FlatBufferBuilder &_fbb, const PowOptionsT *_o, const flatbuffers::rehasher_function_t *_rehasher = nullptr); +::flatbuffers::Offset CreateMaximumMinimumOptions(::flatbuffers::FlatBufferBuilder &_fbb, const MaximumMinimumOptionsT *_o, const ::flatbuffers::rehasher_function_t *_rehasher = nullptr); -struct FakeQuantOptionsT : public flatbuffers::NativeTable { - typedef FakeQuantOptions TableType; - float min = 0.0f; - float max = 0.0f; - int32_t num_bits = 0; - bool narrow_range = false; +struct TileOptionsT : public ::flatbuffers::NativeTable { + typedef TileOptions TableType; }; -struct FakeQuantOptions FLATBUFFERS_FINAL_CLASS : private flatbuffers::Table { - typedef FakeQuantOptionsT NativeTableType; - typedef FakeQuantOptionsBuilder Builder; - enum FlatBuffersVTableOffset FLATBUFFERS_VTABLE_UNDERLYING_TYPE { - VT_MIN = 4, - VT_MAX = 6, - VT_NUM_BITS = 8, - VT_NARROW_RANGE = 10 - }; - float min() const { - return GetField(VT_MIN, 0.0f); - } - float max() const { - return GetField(VT_MAX, 0.0f); - } - int32_t num_bits() const { - return GetField(VT_NUM_BITS, 0); - } - bool narrow_range() const { - return GetField(VT_NARROW_RANGE, 0) != 0; - } - bool Verify(flatbuffers::Verifier &verifier) const { +struct TileOptions FLATBUFFERS_FINAL_CLASS : private ::flatbuffers::Table { + typedef TileOptionsT NativeTableType; + typedef TileOptionsBuilder Builder; + bool Verify(::flatbuffers::Verifier &verifier) const { return VerifyTableStart(verifier) && - VerifyField(verifier, VT_MIN, 4) && - VerifyField(verifier, VT_MAX, 4) && - VerifyField(verifier, VT_NUM_BITS, 4) && - VerifyField(verifier, VT_NARROW_RANGE, 1) && verifier.EndTable(); } - FakeQuantOptionsT *UnPack(const flatbuffers::resolver_function_t *_resolver = nullptr) const; - void UnPackTo(FakeQuantOptionsT *_o, const flatbuffers::resolver_function_t *_resolver = nullptr) const; - static flatbuffers::Offset Pack(flatbuffers::FlatBufferBuilder &_fbb, const FakeQuantOptionsT* _o, const flatbuffers::rehasher_function_t *_rehasher = nullptr); + TileOptionsT *UnPack(const ::flatbuffers::resolver_function_t *_resolver = nullptr) const; + void UnPackTo(TileOptionsT *_o, const ::flatbuffers::resolver_function_t *_resolver = nullptr) const; + static ::flatbuffers::Offset Pack(::flatbuffers::FlatBufferBuilder &_fbb, const TileOptionsT* _o, const ::flatbuffers::rehasher_function_t *_rehasher = nullptr); }; -struct FakeQuantOptionsBuilder { - typedef FakeQuantOptions Table; - flatbuffers::FlatBufferBuilder &fbb_; - flatbuffers::uoffset_t start_; - void add_min(float min) { - fbb_.AddElement(FakeQuantOptions::VT_MIN, min, 0.0f); - } - void add_max(float max) { - fbb_.AddElement(FakeQuantOptions::VT_MAX, max, 0.0f); - } - void add_num_bits(int32_t num_bits) { - fbb_.AddElement(FakeQuantOptions::VT_NUM_BITS, num_bits, 0); - } - void add_narrow_range(bool narrow_range) { - fbb_.AddElement(FakeQuantOptions::VT_NARROW_RANGE, static_cast(narrow_range), 0); - } - explicit FakeQuantOptionsBuilder(flatbuffers::FlatBufferBuilder &_fbb) +struct TileOptionsBuilder { + typedef TileOptions Table; + ::flatbuffers::FlatBufferBuilder &fbb_; + ::flatbuffers::uoffset_t start_; + explicit TileOptionsBuilder(::flatbuffers::FlatBufferBuilder &_fbb) : fbb_(_fbb) { start_ = fbb_.StartTable(); } - flatbuffers::Offset Finish() { + ::flatbuffers::Offset Finish() { const auto end = fbb_.EndTable(start_); - auto o = flatbuffers::Offset(end); + auto o = ::flatbuffers::Offset(end); return o; } }; -inline flatbuffers::Offset CreateFakeQuantOptions( - flatbuffers::FlatBufferBuilder &_fbb, - float min = 0.0f, - float max = 0.0f, - int32_t num_bits = 0, - bool narrow_range = false) { - FakeQuantOptionsBuilder builder_(_fbb); - builder_.add_num_bits(num_bits); - builder_.add_max(max); - builder_.add_min(min); - builder_.add_narrow_range(narrow_range); +inline ::flatbuffers::Offset CreateTileOptions( + ::flatbuffers::FlatBufferBuilder &_fbb) { + TileOptionsBuilder builder_(_fbb); return builder_.Finish(); } -flatbuffers::Offset CreateFakeQuantOptions(flatbuffers::FlatBufferBuilder &_fbb, const FakeQuantOptionsT *_o, const flatbuffers::rehasher_function_t *_rehasher = nullptr); +::flatbuffers::Offset CreateTileOptions(::flatbuffers::FlatBufferBuilder &_fbb, const TileOptionsT *_o, const ::flatbuffers::rehasher_function_t *_rehasher = nullptr); -struct PackOptionsT : public flatbuffers::NativeTable { - typedef PackOptions TableType; - int32_t values_count = 0; - int32_t axis = 0; +struct ArgMaxOptionsT : public ::flatbuffers::NativeTable { + typedef ArgMaxOptions TableType; + tflite::TensorType output_type = tflite::TensorType_FLOAT32; }; -struct PackOptions FLATBUFFERS_FINAL_CLASS : private flatbuffers::Table { - typedef PackOptionsT NativeTableType; - typedef PackOptionsBuilder Builder; +struct ArgMaxOptions FLATBUFFERS_FINAL_CLASS : private ::flatbuffers::Table { + typedef ArgMaxOptionsT NativeTableType; + typedef ArgMaxOptionsBuilder Builder; enum FlatBuffersVTableOffset FLATBUFFERS_VTABLE_UNDERLYING_TYPE { - VT_VALUES_COUNT = 4, - VT_AXIS = 6 + VT_OUTPUT_TYPE = 4 }; - int32_t values_count() const { - return GetField(VT_VALUES_COUNT, 0); - } - int32_t axis() const { - return GetField(VT_AXIS, 0); + tflite::TensorType output_type() const { + return static_cast(GetField(VT_OUTPUT_TYPE, 0)); } - bool Verify(flatbuffers::Verifier &verifier) const { + bool Verify(::flatbuffers::Verifier &verifier) const { return VerifyTableStart(verifier) && - VerifyField(verifier, VT_VALUES_COUNT, 4) && - VerifyField(verifier, VT_AXIS, 4) && + VerifyField(verifier, VT_OUTPUT_TYPE, 1) && verifier.EndTable(); } - PackOptionsT *UnPack(const flatbuffers::resolver_function_t *_resolver = nullptr) const; - void UnPackTo(PackOptionsT *_o, const flatbuffers::resolver_function_t *_resolver = nullptr) const; - static flatbuffers::Offset Pack(flatbuffers::FlatBufferBuilder &_fbb, const PackOptionsT* _o, const flatbuffers::rehasher_function_t *_rehasher = nullptr); + ArgMaxOptionsT *UnPack(const ::flatbuffers::resolver_function_t *_resolver = nullptr) const; + void UnPackTo(ArgMaxOptionsT *_o, const ::flatbuffers::resolver_function_t *_resolver = nullptr) const; + static ::flatbuffers::Offset Pack(::flatbuffers::FlatBufferBuilder &_fbb, const ArgMaxOptionsT* _o, const ::flatbuffers::rehasher_function_t *_rehasher = nullptr); }; -struct PackOptionsBuilder { - typedef PackOptions Table; - flatbuffers::FlatBufferBuilder &fbb_; - flatbuffers::uoffset_t start_; - void add_values_count(int32_t values_count) { - fbb_.AddElement(PackOptions::VT_VALUES_COUNT, values_count, 0); - } - void add_axis(int32_t axis) { - fbb_.AddElement(PackOptions::VT_AXIS, axis, 0); +struct ArgMaxOptionsBuilder { + typedef ArgMaxOptions Table; + ::flatbuffers::FlatBufferBuilder &fbb_; + ::flatbuffers::uoffset_t start_; + void add_output_type(tflite::TensorType output_type) { + fbb_.AddElement(ArgMaxOptions::VT_OUTPUT_TYPE, static_cast(output_type), 0); } - explicit PackOptionsBuilder(flatbuffers::FlatBufferBuilder &_fbb) + explicit ArgMaxOptionsBuilder(::flatbuffers::FlatBufferBuilder &_fbb) : fbb_(_fbb) { start_ = fbb_.StartTable(); } - flatbuffers::Offset Finish() { + ::flatbuffers::Offset Finish() { const auto end = fbb_.EndTable(start_); - auto o = flatbuffers::Offset(end); + auto o = ::flatbuffers::Offset(end); return o; } }; -inline flatbuffers::Offset CreatePackOptions( - flatbuffers::FlatBufferBuilder &_fbb, - int32_t values_count = 0, - int32_t axis = 0) { - PackOptionsBuilder builder_(_fbb); - builder_.add_axis(axis); - builder_.add_values_count(values_count); +inline ::flatbuffers::Offset CreateArgMaxOptions( + ::flatbuffers::FlatBufferBuilder &_fbb, + tflite::TensorType output_type = tflite::TensorType_FLOAT32) { + ArgMaxOptionsBuilder builder_(_fbb); + builder_.add_output_type(output_type); return builder_.Finish(); } -flatbuffers::Offset CreatePackOptions(flatbuffers::FlatBufferBuilder &_fbb, const PackOptionsT *_o, const flatbuffers::rehasher_function_t *_rehasher = nullptr); +::flatbuffers::Offset CreateArgMaxOptions(::flatbuffers::FlatBufferBuilder &_fbb, const ArgMaxOptionsT *_o, const ::flatbuffers::rehasher_function_t *_rehasher = nullptr); -struct LogicalOrOptionsT : public flatbuffers::NativeTable { - typedef LogicalOrOptions TableType; +struct ArgMinOptionsT : public ::flatbuffers::NativeTable { + typedef ArgMinOptions TableType; + tflite::TensorType output_type = tflite::TensorType_FLOAT32; }; -struct LogicalOrOptions FLATBUFFERS_FINAL_CLASS : private flatbuffers::Table { - typedef LogicalOrOptionsT NativeTableType; - typedef LogicalOrOptionsBuilder Builder; - bool Verify(flatbuffers::Verifier &verifier) const { +struct ArgMinOptions FLATBUFFERS_FINAL_CLASS : private ::flatbuffers::Table { + typedef ArgMinOptionsT NativeTableType; + typedef ArgMinOptionsBuilder Builder; + enum FlatBuffersVTableOffset FLATBUFFERS_VTABLE_UNDERLYING_TYPE { + VT_OUTPUT_TYPE = 4 + }; + tflite::TensorType output_type() const { + return static_cast(GetField(VT_OUTPUT_TYPE, 0)); + } + bool Verify(::flatbuffers::Verifier &verifier) const { return VerifyTableStart(verifier) && + VerifyField(verifier, VT_OUTPUT_TYPE, 1) && verifier.EndTable(); } - LogicalOrOptionsT *UnPack(const flatbuffers::resolver_function_t *_resolver = nullptr) const; - void UnPackTo(LogicalOrOptionsT *_o, const flatbuffers::resolver_function_t *_resolver = nullptr) const; - static flatbuffers::Offset Pack(flatbuffers::FlatBufferBuilder &_fbb, const LogicalOrOptionsT* _o, const flatbuffers::rehasher_function_t *_rehasher = nullptr); + ArgMinOptionsT *UnPack(const ::flatbuffers::resolver_function_t *_resolver = nullptr) const; + void UnPackTo(ArgMinOptionsT *_o, const ::flatbuffers::resolver_function_t *_resolver = nullptr) const; + static ::flatbuffers::Offset Pack(::flatbuffers::FlatBufferBuilder &_fbb, const ArgMinOptionsT* _o, const ::flatbuffers::rehasher_function_t *_rehasher = nullptr); }; -struct LogicalOrOptionsBuilder { - typedef LogicalOrOptions Table; - flatbuffers::FlatBufferBuilder &fbb_; - flatbuffers::uoffset_t start_; - explicit LogicalOrOptionsBuilder(flatbuffers::FlatBufferBuilder &_fbb) +struct ArgMinOptionsBuilder { + typedef ArgMinOptions Table; + ::flatbuffers::FlatBufferBuilder &fbb_; + ::flatbuffers::uoffset_t start_; + void add_output_type(tflite::TensorType output_type) { + fbb_.AddElement(ArgMinOptions::VT_OUTPUT_TYPE, static_cast(output_type), 0); + } + explicit ArgMinOptionsBuilder(::flatbuffers::FlatBufferBuilder &_fbb) : fbb_(_fbb) { start_ = fbb_.StartTable(); } - flatbuffers::Offset Finish() { + ::flatbuffers::Offset Finish() { const auto end = fbb_.EndTable(start_); - auto o = flatbuffers::Offset(end); + auto o = ::flatbuffers::Offset(end); return o; } }; -inline flatbuffers::Offset CreateLogicalOrOptions( - flatbuffers::FlatBufferBuilder &_fbb) { - LogicalOrOptionsBuilder builder_(_fbb); +inline ::flatbuffers::Offset CreateArgMinOptions( + ::flatbuffers::FlatBufferBuilder &_fbb, + tflite::TensorType output_type = tflite::TensorType_FLOAT32) { + ArgMinOptionsBuilder builder_(_fbb); + builder_.add_output_type(output_type); return builder_.Finish(); } -flatbuffers::Offset CreateLogicalOrOptions(flatbuffers::FlatBufferBuilder &_fbb, const LogicalOrOptionsT *_o, const flatbuffers::rehasher_function_t *_rehasher = nullptr); +::flatbuffers::Offset CreateArgMinOptions(::flatbuffers::FlatBufferBuilder &_fbb, const ArgMinOptionsT *_o, const ::flatbuffers::rehasher_function_t *_rehasher = nullptr); -struct OneHotOptionsT : public flatbuffers::NativeTable { - typedef OneHotOptions TableType; - int32_t axis = 0; +struct GreaterOptionsT : public ::flatbuffers::NativeTable { + typedef GreaterOptions TableType; }; -struct OneHotOptions FLATBUFFERS_FINAL_CLASS : private flatbuffers::Table { - typedef OneHotOptionsT NativeTableType; - typedef OneHotOptionsBuilder Builder; - enum FlatBuffersVTableOffset FLATBUFFERS_VTABLE_UNDERLYING_TYPE { - VT_AXIS = 4 - }; - int32_t axis() const { - return GetField(VT_AXIS, 0); - } - bool Verify(flatbuffers::Verifier &verifier) const { +struct GreaterOptions FLATBUFFERS_FINAL_CLASS : private ::flatbuffers::Table { + typedef GreaterOptionsT NativeTableType; + typedef GreaterOptionsBuilder Builder; + bool Verify(::flatbuffers::Verifier &verifier) const { return VerifyTableStart(verifier) && - VerifyField(verifier, VT_AXIS, 4) && verifier.EndTable(); } - OneHotOptionsT *UnPack(const flatbuffers::resolver_function_t *_resolver = nullptr) const; - void UnPackTo(OneHotOptionsT *_o, const flatbuffers::resolver_function_t *_resolver = nullptr) const; - static flatbuffers::Offset Pack(flatbuffers::FlatBufferBuilder &_fbb, const OneHotOptionsT* _o, const flatbuffers::rehasher_function_t *_rehasher = nullptr); + GreaterOptionsT *UnPack(const ::flatbuffers::resolver_function_t *_resolver = nullptr) const; + void UnPackTo(GreaterOptionsT *_o, const ::flatbuffers::resolver_function_t *_resolver = nullptr) const; + static ::flatbuffers::Offset Pack(::flatbuffers::FlatBufferBuilder &_fbb, const GreaterOptionsT* _o, const ::flatbuffers::rehasher_function_t *_rehasher = nullptr); }; -struct OneHotOptionsBuilder { - typedef OneHotOptions Table; - flatbuffers::FlatBufferBuilder &fbb_; - flatbuffers::uoffset_t start_; - void add_axis(int32_t axis) { - fbb_.AddElement(OneHotOptions::VT_AXIS, axis, 0); - } - explicit OneHotOptionsBuilder(flatbuffers::FlatBufferBuilder &_fbb) +struct GreaterOptionsBuilder { + typedef GreaterOptions Table; + ::flatbuffers::FlatBufferBuilder &fbb_; + ::flatbuffers::uoffset_t start_; + explicit GreaterOptionsBuilder(::flatbuffers::FlatBufferBuilder &_fbb) : fbb_(_fbb) { start_ = fbb_.StartTable(); } - flatbuffers::Offset Finish() { + ::flatbuffers::Offset Finish() { const auto end = fbb_.EndTable(start_); - auto o = flatbuffers::Offset(end); + auto o = ::flatbuffers::Offset(end); return o; } }; -inline flatbuffers::Offset CreateOneHotOptions( - flatbuffers::FlatBufferBuilder &_fbb, - int32_t axis = 0) { - OneHotOptionsBuilder builder_(_fbb); - builder_.add_axis(axis); +inline ::flatbuffers::Offset CreateGreaterOptions( + ::flatbuffers::FlatBufferBuilder &_fbb) { + GreaterOptionsBuilder builder_(_fbb); return builder_.Finish(); } -flatbuffers::Offset CreateOneHotOptions(flatbuffers::FlatBufferBuilder &_fbb, const OneHotOptionsT *_o, const flatbuffers::rehasher_function_t *_rehasher = nullptr); +::flatbuffers::Offset CreateGreaterOptions(::flatbuffers::FlatBufferBuilder &_fbb, const GreaterOptionsT *_o, const ::flatbuffers::rehasher_function_t *_rehasher = nullptr); -struct AbsOptionsT : public flatbuffers::NativeTable { - typedef AbsOptions TableType; +struct GreaterEqualOptionsT : public ::flatbuffers::NativeTable { + typedef GreaterEqualOptions TableType; }; -struct AbsOptions FLATBUFFERS_FINAL_CLASS : private flatbuffers::Table { - typedef AbsOptionsT NativeTableType; - typedef AbsOptionsBuilder Builder; - bool Verify(flatbuffers::Verifier &verifier) const { +struct GreaterEqualOptions FLATBUFFERS_FINAL_CLASS : private ::flatbuffers::Table { + typedef GreaterEqualOptionsT NativeTableType; + typedef GreaterEqualOptionsBuilder Builder; + bool Verify(::flatbuffers::Verifier &verifier) const { return VerifyTableStart(verifier) && verifier.EndTable(); } - AbsOptionsT *UnPack(const flatbuffers::resolver_function_t *_resolver = nullptr) const; - void UnPackTo(AbsOptionsT *_o, const flatbuffers::resolver_function_t *_resolver = nullptr) const; - static flatbuffers::Offset Pack(flatbuffers::FlatBufferBuilder &_fbb, const AbsOptionsT* _o, const flatbuffers::rehasher_function_t *_rehasher = nullptr); + GreaterEqualOptionsT *UnPack(const ::flatbuffers::resolver_function_t *_resolver = nullptr) const; + void UnPackTo(GreaterEqualOptionsT *_o, const ::flatbuffers::resolver_function_t *_resolver = nullptr) const; + static ::flatbuffers::Offset Pack(::flatbuffers::FlatBufferBuilder &_fbb, const GreaterEqualOptionsT* _o, const ::flatbuffers::rehasher_function_t *_rehasher = nullptr); }; -struct AbsOptionsBuilder { - typedef AbsOptions Table; - flatbuffers::FlatBufferBuilder &fbb_; - flatbuffers::uoffset_t start_; - explicit AbsOptionsBuilder(flatbuffers::FlatBufferBuilder &_fbb) +struct GreaterEqualOptionsBuilder { + typedef GreaterEqualOptions Table; + ::flatbuffers::FlatBufferBuilder &fbb_; + ::flatbuffers::uoffset_t start_; + explicit GreaterEqualOptionsBuilder(::flatbuffers::FlatBufferBuilder &_fbb) : fbb_(_fbb) { start_ = fbb_.StartTable(); } - flatbuffers::Offset Finish() { + ::flatbuffers::Offset Finish() { const auto end = fbb_.EndTable(start_); - auto o = flatbuffers::Offset(end); + auto o = ::flatbuffers::Offset(end); return o; } }; -inline flatbuffers::Offset CreateAbsOptions( - flatbuffers::FlatBufferBuilder &_fbb) { - AbsOptionsBuilder builder_(_fbb); +inline ::flatbuffers::Offset CreateGreaterEqualOptions( + ::flatbuffers::FlatBufferBuilder &_fbb) { + GreaterEqualOptionsBuilder builder_(_fbb); return builder_.Finish(); } -flatbuffers::Offset CreateAbsOptions(flatbuffers::FlatBufferBuilder &_fbb, const AbsOptionsT *_o, const flatbuffers::rehasher_function_t *_rehasher = nullptr); +::flatbuffers::Offset CreateGreaterEqualOptions(::flatbuffers::FlatBufferBuilder &_fbb, const GreaterEqualOptionsT *_o, const ::flatbuffers::rehasher_function_t *_rehasher = nullptr); -struct HardSwishOptionsT : public flatbuffers::NativeTable { - typedef HardSwishOptions TableType; +struct LessOptionsT : public ::flatbuffers::NativeTable { + typedef LessOptions TableType; }; -struct HardSwishOptions FLATBUFFERS_FINAL_CLASS : private flatbuffers::Table { - typedef HardSwishOptionsT NativeTableType; - typedef HardSwishOptionsBuilder Builder; - bool Verify(flatbuffers::Verifier &verifier) const { +struct LessOptions FLATBUFFERS_FINAL_CLASS : private ::flatbuffers::Table { + typedef LessOptionsT NativeTableType; + typedef LessOptionsBuilder Builder; + bool Verify(::flatbuffers::Verifier &verifier) const { return VerifyTableStart(verifier) && verifier.EndTable(); } - HardSwishOptionsT *UnPack(const flatbuffers::resolver_function_t *_resolver = nullptr) const; - void UnPackTo(HardSwishOptionsT *_o, const flatbuffers::resolver_function_t *_resolver = nullptr) const; - static flatbuffers::Offset Pack(flatbuffers::FlatBufferBuilder &_fbb, const HardSwishOptionsT* _o, const flatbuffers::rehasher_function_t *_rehasher = nullptr); + LessOptionsT *UnPack(const ::flatbuffers::resolver_function_t *_resolver = nullptr) const; + void UnPackTo(LessOptionsT *_o, const ::flatbuffers::resolver_function_t *_resolver = nullptr) const; + static ::flatbuffers::Offset Pack(::flatbuffers::FlatBufferBuilder &_fbb, const LessOptionsT* _o, const ::flatbuffers::rehasher_function_t *_rehasher = nullptr); }; -struct HardSwishOptionsBuilder { - typedef HardSwishOptions Table; - flatbuffers::FlatBufferBuilder &fbb_; - flatbuffers::uoffset_t start_; - explicit HardSwishOptionsBuilder(flatbuffers::FlatBufferBuilder &_fbb) +struct LessOptionsBuilder { + typedef LessOptions Table; + ::flatbuffers::FlatBufferBuilder &fbb_; + ::flatbuffers::uoffset_t start_; + explicit LessOptionsBuilder(::flatbuffers::FlatBufferBuilder &_fbb) : fbb_(_fbb) { start_ = fbb_.StartTable(); } - flatbuffers::Offset Finish() { + ::flatbuffers::Offset Finish() { const auto end = fbb_.EndTable(start_); - auto o = flatbuffers::Offset(end); + auto o = ::flatbuffers::Offset(end); return o; } }; -inline flatbuffers::Offset CreateHardSwishOptions( - flatbuffers::FlatBufferBuilder &_fbb) { - HardSwishOptionsBuilder builder_(_fbb); +inline ::flatbuffers::Offset CreateLessOptions( + ::flatbuffers::FlatBufferBuilder &_fbb) { + LessOptionsBuilder builder_(_fbb); return builder_.Finish(); } -flatbuffers::Offset CreateHardSwishOptions(flatbuffers::FlatBufferBuilder &_fbb, const HardSwishOptionsT *_o, const flatbuffers::rehasher_function_t *_rehasher = nullptr); +::flatbuffers::Offset CreateLessOptions(::flatbuffers::FlatBufferBuilder &_fbb, const LessOptionsT *_o, const ::flatbuffers::rehasher_function_t *_rehasher = nullptr); -struct LogicalAndOptionsT : public flatbuffers::NativeTable { - typedef LogicalAndOptions TableType; +struct LessEqualOptionsT : public ::flatbuffers::NativeTable { + typedef LessEqualOptions TableType; }; -struct LogicalAndOptions FLATBUFFERS_FINAL_CLASS : private flatbuffers::Table { - typedef LogicalAndOptionsT NativeTableType; - typedef LogicalAndOptionsBuilder Builder; - bool Verify(flatbuffers::Verifier &verifier) const { +struct LessEqualOptions FLATBUFFERS_FINAL_CLASS : private ::flatbuffers::Table { + typedef LessEqualOptionsT NativeTableType; + typedef LessEqualOptionsBuilder Builder; + bool Verify(::flatbuffers::Verifier &verifier) const { return VerifyTableStart(verifier) && verifier.EndTable(); } - LogicalAndOptionsT *UnPack(const flatbuffers::resolver_function_t *_resolver = nullptr) const; - void UnPackTo(LogicalAndOptionsT *_o, const flatbuffers::resolver_function_t *_resolver = nullptr) const; - static flatbuffers::Offset Pack(flatbuffers::FlatBufferBuilder &_fbb, const LogicalAndOptionsT* _o, const flatbuffers::rehasher_function_t *_rehasher = nullptr); + LessEqualOptionsT *UnPack(const ::flatbuffers::resolver_function_t *_resolver = nullptr) const; + void UnPackTo(LessEqualOptionsT *_o, const ::flatbuffers::resolver_function_t *_resolver = nullptr) const; + static ::flatbuffers::Offset Pack(::flatbuffers::FlatBufferBuilder &_fbb, const LessEqualOptionsT* _o, const ::flatbuffers::rehasher_function_t *_rehasher = nullptr); }; -struct LogicalAndOptionsBuilder { - typedef LogicalAndOptions Table; - flatbuffers::FlatBufferBuilder &fbb_; - flatbuffers::uoffset_t start_; - explicit LogicalAndOptionsBuilder(flatbuffers::FlatBufferBuilder &_fbb) +struct LessEqualOptionsBuilder { + typedef LessEqualOptions Table; + ::flatbuffers::FlatBufferBuilder &fbb_; + ::flatbuffers::uoffset_t start_; + explicit LessEqualOptionsBuilder(::flatbuffers::FlatBufferBuilder &_fbb) : fbb_(_fbb) { start_ = fbb_.StartTable(); } - flatbuffers::Offset Finish() { + ::flatbuffers::Offset Finish() { const auto end = fbb_.EndTable(start_); - auto o = flatbuffers::Offset(end); + auto o = ::flatbuffers::Offset(end); return o; } }; -inline flatbuffers::Offset CreateLogicalAndOptions( - flatbuffers::FlatBufferBuilder &_fbb) { - LogicalAndOptionsBuilder builder_(_fbb); +inline ::flatbuffers::Offset CreateLessEqualOptions( + ::flatbuffers::FlatBufferBuilder &_fbb) { + LessEqualOptionsBuilder builder_(_fbb); return builder_.Finish(); } -flatbuffers::Offset CreateLogicalAndOptions(flatbuffers::FlatBufferBuilder &_fbb, const LogicalAndOptionsT *_o, const flatbuffers::rehasher_function_t *_rehasher = nullptr); +::flatbuffers::Offset CreateLessEqualOptions(::flatbuffers::FlatBufferBuilder &_fbb, const LessEqualOptionsT *_o, const ::flatbuffers::rehasher_function_t *_rehasher = nullptr); -struct LogicalNotOptionsT : public flatbuffers::NativeTable { - typedef LogicalNotOptions TableType; +struct NegOptionsT : public ::flatbuffers::NativeTable { + typedef NegOptions TableType; }; -struct LogicalNotOptions FLATBUFFERS_FINAL_CLASS : private flatbuffers::Table { - typedef LogicalNotOptionsT NativeTableType; - typedef LogicalNotOptionsBuilder Builder; - bool Verify(flatbuffers::Verifier &verifier) const { +struct NegOptions FLATBUFFERS_FINAL_CLASS : private ::flatbuffers::Table { + typedef NegOptionsT NativeTableType; + typedef NegOptionsBuilder Builder; + bool Verify(::flatbuffers::Verifier &verifier) const { return VerifyTableStart(verifier) && verifier.EndTable(); } - LogicalNotOptionsT *UnPack(const flatbuffers::resolver_function_t *_resolver = nullptr) const; - void UnPackTo(LogicalNotOptionsT *_o, const flatbuffers::resolver_function_t *_resolver = nullptr) const; - static flatbuffers::Offset Pack(flatbuffers::FlatBufferBuilder &_fbb, const LogicalNotOptionsT* _o, const flatbuffers::rehasher_function_t *_rehasher = nullptr); + NegOptionsT *UnPack(const ::flatbuffers::resolver_function_t *_resolver = nullptr) const; + void UnPackTo(NegOptionsT *_o, const ::flatbuffers::resolver_function_t *_resolver = nullptr) const; + static ::flatbuffers::Offset Pack(::flatbuffers::FlatBufferBuilder &_fbb, const NegOptionsT* _o, const ::flatbuffers::rehasher_function_t *_rehasher = nullptr); }; -struct LogicalNotOptionsBuilder { - typedef LogicalNotOptions Table; - flatbuffers::FlatBufferBuilder &fbb_; - flatbuffers::uoffset_t start_; - explicit LogicalNotOptionsBuilder(flatbuffers::FlatBufferBuilder &_fbb) +struct NegOptionsBuilder { + typedef NegOptions Table; + ::flatbuffers::FlatBufferBuilder &fbb_; + ::flatbuffers::uoffset_t start_; + explicit NegOptionsBuilder(::flatbuffers::FlatBufferBuilder &_fbb) : fbb_(_fbb) { start_ = fbb_.StartTable(); } - flatbuffers::Offset Finish() { + ::flatbuffers::Offset Finish() { const auto end = fbb_.EndTable(start_); - auto o = flatbuffers::Offset(end); + auto o = ::flatbuffers::Offset(end); return o; } }; -inline flatbuffers::Offset CreateLogicalNotOptions( - flatbuffers::FlatBufferBuilder &_fbb) { - LogicalNotOptionsBuilder builder_(_fbb); +inline ::flatbuffers::Offset CreateNegOptions( + ::flatbuffers::FlatBufferBuilder &_fbb) { + NegOptionsBuilder builder_(_fbb); return builder_.Finish(); } -flatbuffers::Offset CreateLogicalNotOptions(flatbuffers::FlatBufferBuilder &_fbb, const LogicalNotOptionsT *_o, const flatbuffers::rehasher_function_t *_rehasher = nullptr); +::flatbuffers::Offset CreateNegOptions(::flatbuffers::FlatBufferBuilder &_fbb, const NegOptionsT *_o, const ::flatbuffers::rehasher_function_t *_rehasher = nullptr); -struct UnpackOptionsT : public flatbuffers::NativeTable { - typedef UnpackOptions TableType; - int32_t num = 0; - int32_t axis = 0; +struct SelectOptionsT : public ::flatbuffers::NativeTable { + typedef SelectOptions TableType; }; -struct UnpackOptions FLATBUFFERS_FINAL_CLASS : private flatbuffers::Table { - typedef UnpackOptionsT NativeTableType; - typedef UnpackOptionsBuilder Builder; - enum FlatBuffersVTableOffset FLATBUFFERS_VTABLE_UNDERLYING_TYPE { - VT_NUM = 4, - VT_AXIS = 6 - }; - int32_t num() const { - return GetField(VT_NUM, 0); - } - int32_t axis() const { - return GetField(VT_AXIS, 0); - } - bool Verify(flatbuffers::Verifier &verifier) const { +struct SelectOptions FLATBUFFERS_FINAL_CLASS : private ::flatbuffers::Table { + typedef SelectOptionsT NativeTableType; + typedef SelectOptionsBuilder Builder; + bool Verify(::flatbuffers::Verifier &verifier) const { return VerifyTableStart(verifier) && - VerifyField(verifier, VT_NUM, 4) && - VerifyField(verifier, VT_AXIS, 4) && verifier.EndTable(); } - UnpackOptionsT *UnPack(const flatbuffers::resolver_function_t *_resolver = nullptr) const; - void UnPackTo(UnpackOptionsT *_o, const flatbuffers::resolver_function_t *_resolver = nullptr) const; - static flatbuffers::Offset Pack(flatbuffers::FlatBufferBuilder &_fbb, const UnpackOptionsT* _o, const flatbuffers::rehasher_function_t *_rehasher = nullptr); + SelectOptionsT *UnPack(const ::flatbuffers::resolver_function_t *_resolver = nullptr) const; + void UnPackTo(SelectOptionsT *_o, const ::flatbuffers::resolver_function_t *_resolver = nullptr) const; + static ::flatbuffers::Offset Pack(::flatbuffers::FlatBufferBuilder &_fbb, const SelectOptionsT* _o, const ::flatbuffers::rehasher_function_t *_rehasher = nullptr); }; -struct UnpackOptionsBuilder { - typedef UnpackOptions Table; - flatbuffers::FlatBufferBuilder &fbb_; - flatbuffers::uoffset_t start_; - void add_num(int32_t num) { - fbb_.AddElement(UnpackOptions::VT_NUM, num, 0); - } - void add_axis(int32_t axis) { - fbb_.AddElement(UnpackOptions::VT_AXIS, axis, 0); - } - explicit UnpackOptionsBuilder(flatbuffers::FlatBufferBuilder &_fbb) +struct SelectOptionsBuilder { + typedef SelectOptions Table; + ::flatbuffers::FlatBufferBuilder &fbb_; + ::flatbuffers::uoffset_t start_; + explicit SelectOptionsBuilder(::flatbuffers::FlatBufferBuilder &_fbb) : fbb_(_fbb) { start_ = fbb_.StartTable(); } - flatbuffers::Offset Finish() { + ::flatbuffers::Offset Finish() { const auto end = fbb_.EndTable(start_); - auto o = flatbuffers::Offset(end); + auto o = ::flatbuffers::Offset(end); return o; } }; -inline flatbuffers::Offset CreateUnpackOptions( - flatbuffers::FlatBufferBuilder &_fbb, - int32_t num = 0, - int32_t axis = 0) { - UnpackOptionsBuilder builder_(_fbb); - builder_.add_axis(axis); - builder_.add_num(num); +inline ::flatbuffers::Offset CreateSelectOptions( + ::flatbuffers::FlatBufferBuilder &_fbb) { + SelectOptionsBuilder builder_(_fbb); return builder_.Finish(); } -flatbuffers::Offset CreateUnpackOptions(flatbuffers::FlatBufferBuilder &_fbb, const UnpackOptionsT *_o, const flatbuffers::rehasher_function_t *_rehasher = nullptr); +::flatbuffers::Offset CreateSelectOptions(::flatbuffers::FlatBufferBuilder &_fbb, const SelectOptionsT *_o, const ::flatbuffers::rehasher_function_t *_rehasher = nullptr); -struct FloorDivOptionsT : public flatbuffers::NativeTable { - typedef FloorDivOptions TableType; +struct SliceOptionsT : public ::flatbuffers::NativeTable { + typedef SliceOptions TableType; }; -struct FloorDivOptions FLATBUFFERS_FINAL_CLASS : private flatbuffers::Table { - typedef FloorDivOptionsT NativeTableType; - typedef FloorDivOptionsBuilder Builder; - bool Verify(flatbuffers::Verifier &verifier) const { +struct SliceOptions FLATBUFFERS_FINAL_CLASS : private ::flatbuffers::Table { + typedef SliceOptionsT NativeTableType; + typedef SliceOptionsBuilder Builder; + bool Verify(::flatbuffers::Verifier &verifier) const { return VerifyTableStart(verifier) && verifier.EndTable(); } - FloorDivOptionsT *UnPack(const flatbuffers::resolver_function_t *_resolver = nullptr) const; - void UnPackTo(FloorDivOptionsT *_o, const flatbuffers::resolver_function_t *_resolver = nullptr) const; - static flatbuffers::Offset Pack(flatbuffers::FlatBufferBuilder &_fbb, const FloorDivOptionsT* _o, const flatbuffers::rehasher_function_t *_rehasher = nullptr); + SliceOptionsT *UnPack(const ::flatbuffers::resolver_function_t *_resolver = nullptr) const; + void UnPackTo(SliceOptionsT *_o, const ::flatbuffers::resolver_function_t *_resolver = nullptr) const; + static ::flatbuffers::Offset Pack(::flatbuffers::FlatBufferBuilder &_fbb, const SliceOptionsT* _o, const ::flatbuffers::rehasher_function_t *_rehasher = nullptr); }; -struct FloorDivOptionsBuilder { - typedef FloorDivOptions Table; - flatbuffers::FlatBufferBuilder &fbb_; - flatbuffers::uoffset_t start_; - explicit FloorDivOptionsBuilder(flatbuffers::FlatBufferBuilder &_fbb) +struct SliceOptionsBuilder { + typedef SliceOptions Table; + ::flatbuffers::FlatBufferBuilder &fbb_; + ::flatbuffers::uoffset_t start_; + explicit SliceOptionsBuilder(::flatbuffers::FlatBufferBuilder &_fbb) : fbb_(_fbb) { start_ = fbb_.StartTable(); } - flatbuffers::Offset Finish() { + ::flatbuffers::Offset Finish() { const auto end = fbb_.EndTable(start_); - auto o = flatbuffers::Offset(end); + auto o = ::flatbuffers::Offset(end); return o; } }; -inline flatbuffers::Offset CreateFloorDivOptions( - flatbuffers::FlatBufferBuilder &_fbb) { - FloorDivOptionsBuilder builder_(_fbb); +inline ::flatbuffers::Offset CreateSliceOptions( + ::flatbuffers::FlatBufferBuilder &_fbb) { + SliceOptionsBuilder builder_(_fbb); return builder_.Finish(); } -flatbuffers::Offset CreateFloorDivOptions(flatbuffers::FlatBufferBuilder &_fbb, const FloorDivOptionsT *_o, const flatbuffers::rehasher_function_t *_rehasher = nullptr); +::flatbuffers::Offset CreateSliceOptions(::flatbuffers::FlatBufferBuilder &_fbb, const SliceOptionsT *_o, const ::flatbuffers::rehasher_function_t *_rehasher = nullptr); -struct SquareOptionsT : public flatbuffers::NativeTable { - typedef SquareOptions TableType; +struct TransposeConvOptionsT : public ::flatbuffers::NativeTable { + typedef TransposeConvOptions TableType; + tflite::Padding padding = tflite::Padding_SAME; + int32_t stride_w = 0; + int32_t stride_h = 0; + tflite::ActivationFunctionType fused_activation_function = tflite::ActivationFunctionType_NONE; + tflite::TensorType quantized_bias_type = tflite::TensorType_FLOAT32; }; -struct SquareOptions FLATBUFFERS_FINAL_CLASS : private flatbuffers::Table { - typedef SquareOptionsT NativeTableType; - typedef SquareOptionsBuilder Builder; - bool Verify(flatbuffers::Verifier &verifier) const { +struct TransposeConvOptions FLATBUFFERS_FINAL_CLASS : private ::flatbuffers::Table { + typedef TransposeConvOptionsT NativeTableType; + typedef TransposeConvOptionsBuilder Builder; + enum FlatBuffersVTableOffset FLATBUFFERS_VTABLE_UNDERLYING_TYPE { + VT_PADDING = 4, + VT_STRIDE_W = 6, + VT_STRIDE_H = 8, + VT_FUSED_ACTIVATION_FUNCTION = 10, + VT_QUANTIZED_BIAS_TYPE = 12 + }; + tflite::Padding padding() const { + return static_cast(GetField(VT_PADDING, 0)); + } + int32_t stride_w() const { + return GetField(VT_STRIDE_W, 0); + } + int32_t stride_h() const { + return GetField(VT_STRIDE_H, 0); + } + tflite::ActivationFunctionType fused_activation_function() const { + return static_cast(GetField(VT_FUSED_ACTIVATION_FUNCTION, 0)); + } + tflite::TensorType quantized_bias_type() const { + return static_cast(GetField(VT_QUANTIZED_BIAS_TYPE, 0)); + } + bool Verify(::flatbuffers::Verifier &verifier) const { return VerifyTableStart(verifier) && + VerifyField(verifier, VT_PADDING, 1) && + VerifyField(verifier, VT_STRIDE_W, 4) && + VerifyField(verifier, VT_STRIDE_H, 4) && + VerifyField(verifier, VT_FUSED_ACTIVATION_FUNCTION, 1) && + VerifyField(verifier, VT_QUANTIZED_BIAS_TYPE, 1) && verifier.EndTable(); } - SquareOptionsT *UnPack(const flatbuffers::resolver_function_t *_resolver = nullptr) const; - void UnPackTo(SquareOptionsT *_o, const flatbuffers::resolver_function_t *_resolver = nullptr) const; - static flatbuffers::Offset Pack(flatbuffers::FlatBufferBuilder &_fbb, const SquareOptionsT* _o, const flatbuffers::rehasher_function_t *_rehasher = nullptr); + TransposeConvOptionsT *UnPack(const ::flatbuffers::resolver_function_t *_resolver = nullptr) const; + void UnPackTo(TransposeConvOptionsT *_o, const ::flatbuffers::resolver_function_t *_resolver = nullptr) const; + static ::flatbuffers::Offset Pack(::flatbuffers::FlatBufferBuilder &_fbb, const TransposeConvOptionsT* _o, const ::flatbuffers::rehasher_function_t *_rehasher = nullptr); }; -struct SquareOptionsBuilder { - typedef SquareOptions Table; - flatbuffers::FlatBufferBuilder &fbb_; - flatbuffers::uoffset_t start_; - explicit SquareOptionsBuilder(flatbuffers::FlatBufferBuilder &_fbb) +struct TransposeConvOptionsBuilder { + typedef TransposeConvOptions Table; + ::flatbuffers::FlatBufferBuilder &fbb_; + ::flatbuffers::uoffset_t start_; + void add_padding(tflite::Padding padding) { + fbb_.AddElement(TransposeConvOptions::VT_PADDING, static_cast(padding), 0); + } + void add_stride_w(int32_t stride_w) { + fbb_.AddElement(TransposeConvOptions::VT_STRIDE_W, stride_w, 0); + } + void add_stride_h(int32_t stride_h) { + fbb_.AddElement(TransposeConvOptions::VT_STRIDE_H, stride_h, 0); + } + void add_fused_activation_function(tflite::ActivationFunctionType fused_activation_function) { + fbb_.AddElement(TransposeConvOptions::VT_FUSED_ACTIVATION_FUNCTION, static_cast(fused_activation_function), 0); + } + void add_quantized_bias_type(tflite::TensorType quantized_bias_type) { + fbb_.AddElement(TransposeConvOptions::VT_QUANTIZED_BIAS_TYPE, static_cast(quantized_bias_type), 0); + } + explicit TransposeConvOptionsBuilder(::flatbuffers::FlatBufferBuilder &_fbb) : fbb_(_fbb) { start_ = fbb_.StartTable(); } - flatbuffers::Offset Finish() { + ::flatbuffers::Offset Finish() { const auto end = fbb_.EndTable(start_); - auto o = flatbuffers::Offset(end); + auto o = ::flatbuffers::Offset(end); return o; } }; -inline flatbuffers::Offset CreateSquareOptions( - flatbuffers::FlatBufferBuilder &_fbb) { - SquareOptionsBuilder builder_(_fbb); +inline ::flatbuffers::Offset CreateTransposeConvOptions( + ::flatbuffers::FlatBufferBuilder &_fbb, + tflite::Padding padding = tflite::Padding_SAME, + int32_t stride_w = 0, + int32_t stride_h = 0, + tflite::ActivationFunctionType fused_activation_function = tflite::ActivationFunctionType_NONE, + tflite::TensorType quantized_bias_type = tflite::TensorType_FLOAT32) { + TransposeConvOptionsBuilder builder_(_fbb); + builder_.add_stride_h(stride_h); + builder_.add_stride_w(stride_w); + builder_.add_quantized_bias_type(quantized_bias_type); + builder_.add_fused_activation_function(fused_activation_function); + builder_.add_padding(padding); return builder_.Finish(); } -flatbuffers::Offset CreateSquareOptions(flatbuffers::FlatBufferBuilder &_fbb, const SquareOptionsT *_o, const flatbuffers::rehasher_function_t *_rehasher = nullptr); +::flatbuffers::Offset CreateTransposeConvOptions(::flatbuffers::FlatBufferBuilder &_fbb, const TransposeConvOptionsT *_o, const ::flatbuffers::rehasher_function_t *_rehasher = nullptr); -struct ZerosLikeOptionsT : public flatbuffers::NativeTable { - typedef ZerosLikeOptions TableType; +struct ExpandDimsOptionsT : public ::flatbuffers::NativeTable { + typedef ExpandDimsOptions TableType; }; -struct ZerosLikeOptions FLATBUFFERS_FINAL_CLASS : private flatbuffers::Table { - typedef ZerosLikeOptionsT NativeTableType; - typedef ZerosLikeOptionsBuilder Builder; - bool Verify(flatbuffers::Verifier &verifier) const { +struct ExpandDimsOptions FLATBUFFERS_FINAL_CLASS : private ::flatbuffers::Table { + typedef ExpandDimsOptionsT NativeTableType; + typedef ExpandDimsOptionsBuilder Builder; + bool Verify(::flatbuffers::Verifier &verifier) const { return VerifyTableStart(verifier) && verifier.EndTable(); } - ZerosLikeOptionsT *UnPack(const flatbuffers::resolver_function_t *_resolver = nullptr) const; - void UnPackTo(ZerosLikeOptionsT *_o, const flatbuffers::resolver_function_t *_resolver = nullptr) const; - static flatbuffers::Offset Pack(flatbuffers::FlatBufferBuilder &_fbb, const ZerosLikeOptionsT* _o, const flatbuffers::rehasher_function_t *_rehasher = nullptr); + ExpandDimsOptionsT *UnPack(const ::flatbuffers::resolver_function_t *_resolver = nullptr) const; + void UnPackTo(ExpandDimsOptionsT *_o, const ::flatbuffers::resolver_function_t *_resolver = nullptr) const; + static ::flatbuffers::Offset Pack(::flatbuffers::FlatBufferBuilder &_fbb, const ExpandDimsOptionsT* _o, const ::flatbuffers::rehasher_function_t *_rehasher = nullptr); }; -struct ZerosLikeOptionsBuilder { - typedef ZerosLikeOptions Table; - flatbuffers::FlatBufferBuilder &fbb_; - flatbuffers::uoffset_t start_; - explicit ZerosLikeOptionsBuilder(flatbuffers::FlatBufferBuilder &_fbb) - : fbb_(_fbb) { +struct ExpandDimsOptionsBuilder { + typedef ExpandDimsOptions Table; + ::flatbuffers::FlatBufferBuilder &fbb_; + ::flatbuffers::uoffset_t start_; + explicit ExpandDimsOptionsBuilder(::flatbuffers::FlatBufferBuilder &_fbb) + : fbb_(_fbb) { start_ = fbb_.StartTable(); } - flatbuffers::Offset Finish() { + ::flatbuffers::Offset Finish() { const auto end = fbb_.EndTable(start_); - auto o = flatbuffers::Offset(end); + auto o = ::flatbuffers::Offset(end); return o; } }; -inline flatbuffers::Offset CreateZerosLikeOptions( - flatbuffers::FlatBufferBuilder &_fbb) { - ZerosLikeOptionsBuilder builder_(_fbb); +inline ::flatbuffers::Offset CreateExpandDimsOptions( + ::flatbuffers::FlatBufferBuilder &_fbb) { + ExpandDimsOptionsBuilder builder_(_fbb); return builder_.Finish(); } -flatbuffers::Offset CreateZerosLikeOptions(flatbuffers::FlatBufferBuilder &_fbb, const ZerosLikeOptionsT *_o, const flatbuffers::rehasher_function_t *_rehasher = nullptr); +::flatbuffers::Offset CreateExpandDimsOptions(::flatbuffers::FlatBufferBuilder &_fbb, const ExpandDimsOptionsT *_o, const ::flatbuffers::rehasher_function_t *_rehasher = nullptr); -struct FillOptionsT : public flatbuffers::NativeTable { - typedef FillOptions TableType; +struct SparseToDenseOptionsT : public ::flatbuffers::NativeTable { + typedef SparseToDenseOptions TableType; + bool validate_indices = false; }; -struct FillOptions FLATBUFFERS_FINAL_CLASS : private flatbuffers::Table { - typedef FillOptionsT NativeTableType; - typedef FillOptionsBuilder Builder; - bool Verify(flatbuffers::Verifier &verifier) const { +struct SparseToDenseOptions FLATBUFFERS_FINAL_CLASS : private ::flatbuffers::Table { + typedef SparseToDenseOptionsT NativeTableType; + typedef SparseToDenseOptionsBuilder Builder; + enum FlatBuffersVTableOffset FLATBUFFERS_VTABLE_UNDERLYING_TYPE { + VT_VALIDATE_INDICES = 4 + }; + bool validate_indices() const { + return GetField(VT_VALIDATE_INDICES, 0) != 0; + } + bool Verify(::flatbuffers::Verifier &verifier) const { return VerifyTableStart(verifier) && + VerifyField(verifier, VT_VALIDATE_INDICES, 1) && verifier.EndTable(); } - FillOptionsT *UnPack(const flatbuffers::resolver_function_t *_resolver = nullptr) const; - void UnPackTo(FillOptionsT *_o, const flatbuffers::resolver_function_t *_resolver = nullptr) const; - static flatbuffers::Offset Pack(flatbuffers::FlatBufferBuilder &_fbb, const FillOptionsT* _o, const flatbuffers::rehasher_function_t *_rehasher = nullptr); + SparseToDenseOptionsT *UnPack(const ::flatbuffers::resolver_function_t *_resolver = nullptr) const; + void UnPackTo(SparseToDenseOptionsT *_o, const ::flatbuffers::resolver_function_t *_resolver = nullptr) const; + static ::flatbuffers::Offset Pack(::flatbuffers::FlatBufferBuilder &_fbb, const SparseToDenseOptionsT* _o, const ::flatbuffers::rehasher_function_t *_rehasher = nullptr); }; -struct FillOptionsBuilder { - typedef FillOptions Table; - flatbuffers::FlatBufferBuilder &fbb_; - flatbuffers::uoffset_t start_; - explicit FillOptionsBuilder(flatbuffers::FlatBufferBuilder &_fbb) +struct SparseToDenseOptionsBuilder { + typedef SparseToDenseOptions Table; + ::flatbuffers::FlatBufferBuilder &fbb_; + ::flatbuffers::uoffset_t start_; + void add_validate_indices(bool validate_indices) { + fbb_.AddElement(SparseToDenseOptions::VT_VALIDATE_INDICES, static_cast(validate_indices), 0); + } + explicit SparseToDenseOptionsBuilder(::flatbuffers::FlatBufferBuilder &_fbb) : fbb_(_fbb) { start_ = fbb_.StartTable(); } - flatbuffers::Offset Finish() { + ::flatbuffers::Offset Finish() { const auto end = fbb_.EndTable(start_); - auto o = flatbuffers::Offset(end); + auto o = ::flatbuffers::Offset(end); return o; } }; -inline flatbuffers::Offset CreateFillOptions( - flatbuffers::FlatBufferBuilder &_fbb) { - FillOptionsBuilder builder_(_fbb); +inline ::flatbuffers::Offset CreateSparseToDenseOptions( + ::flatbuffers::FlatBufferBuilder &_fbb, + bool validate_indices = false) { + SparseToDenseOptionsBuilder builder_(_fbb); + builder_.add_validate_indices(validate_indices); return builder_.Finish(); } -flatbuffers::Offset CreateFillOptions(flatbuffers::FlatBufferBuilder &_fbb, const FillOptionsT *_o, const flatbuffers::rehasher_function_t *_rehasher = nullptr); +::flatbuffers::Offset CreateSparseToDenseOptions(::flatbuffers::FlatBufferBuilder &_fbb, const SparseToDenseOptionsT *_o, const ::flatbuffers::rehasher_function_t *_rehasher = nullptr); -struct FloorModOptionsT : public flatbuffers::NativeTable { - typedef FloorModOptions TableType; +struct EqualOptionsT : public ::flatbuffers::NativeTable { + typedef EqualOptions TableType; }; -struct FloorModOptions FLATBUFFERS_FINAL_CLASS : private flatbuffers::Table { - typedef FloorModOptionsT NativeTableType; - typedef FloorModOptionsBuilder Builder; - bool Verify(flatbuffers::Verifier &verifier) const { +struct EqualOptions FLATBUFFERS_FINAL_CLASS : private ::flatbuffers::Table { + typedef EqualOptionsT NativeTableType; + typedef EqualOptionsBuilder Builder; + bool Verify(::flatbuffers::Verifier &verifier) const { return VerifyTableStart(verifier) && verifier.EndTable(); } - FloorModOptionsT *UnPack(const flatbuffers::resolver_function_t *_resolver = nullptr) const; - void UnPackTo(FloorModOptionsT *_o, const flatbuffers::resolver_function_t *_resolver = nullptr) const; - static flatbuffers::Offset Pack(flatbuffers::FlatBufferBuilder &_fbb, const FloorModOptionsT* _o, const flatbuffers::rehasher_function_t *_rehasher = nullptr); + EqualOptionsT *UnPack(const ::flatbuffers::resolver_function_t *_resolver = nullptr) const; + void UnPackTo(EqualOptionsT *_o, const ::flatbuffers::resolver_function_t *_resolver = nullptr) const; + static ::flatbuffers::Offset Pack(::flatbuffers::FlatBufferBuilder &_fbb, const EqualOptionsT* _o, const ::flatbuffers::rehasher_function_t *_rehasher = nullptr); }; -struct FloorModOptionsBuilder { - typedef FloorModOptions Table; - flatbuffers::FlatBufferBuilder &fbb_; - flatbuffers::uoffset_t start_; - explicit FloorModOptionsBuilder(flatbuffers::FlatBufferBuilder &_fbb) +struct EqualOptionsBuilder { + typedef EqualOptions Table; + ::flatbuffers::FlatBufferBuilder &fbb_; + ::flatbuffers::uoffset_t start_; + explicit EqualOptionsBuilder(::flatbuffers::FlatBufferBuilder &_fbb) : fbb_(_fbb) { start_ = fbb_.StartTable(); } - flatbuffers::Offset Finish() { + ::flatbuffers::Offset Finish() { const auto end = fbb_.EndTable(start_); - auto o = flatbuffers::Offset(end); + auto o = ::flatbuffers::Offset(end); return o; } }; -inline flatbuffers::Offset CreateFloorModOptions( - flatbuffers::FlatBufferBuilder &_fbb) { - FloorModOptionsBuilder builder_(_fbb); +inline ::flatbuffers::Offset CreateEqualOptions( + ::flatbuffers::FlatBufferBuilder &_fbb) { + EqualOptionsBuilder builder_(_fbb); return builder_.Finish(); } -flatbuffers::Offset CreateFloorModOptions(flatbuffers::FlatBufferBuilder &_fbb, const FloorModOptionsT *_o, const flatbuffers::rehasher_function_t *_rehasher = nullptr); +::flatbuffers::Offset CreateEqualOptions(::flatbuffers::FlatBufferBuilder &_fbb, const EqualOptionsT *_o, const ::flatbuffers::rehasher_function_t *_rehasher = nullptr); -struct RangeOptionsT : public flatbuffers::NativeTable { - typedef RangeOptions TableType; +struct NotEqualOptionsT : public ::flatbuffers::NativeTable { + typedef NotEqualOptions TableType; }; -struct RangeOptions FLATBUFFERS_FINAL_CLASS : private flatbuffers::Table { - typedef RangeOptionsT NativeTableType; - typedef RangeOptionsBuilder Builder; - bool Verify(flatbuffers::Verifier &verifier) const { +struct NotEqualOptions FLATBUFFERS_FINAL_CLASS : private ::flatbuffers::Table { + typedef NotEqualOptionsT NativeTableType; + typedef NotEqualOptionsBuilder Builder; + bool Verify(::flatbuffers::Verifier &verifier) const { return VerifyTableStart(verifier) && verifier.EndTable(); } - RangeOptionsT *UnPack(const flatbuffers::resolver_function_t *_resolver = nullptr) const; - void UnPackTo(RangeOptionsT *_o, const flatbuffers::resolver_function_t *_resolver = nullptr) const; - static flatbuffers::Offset Pack(flatbuffers::FlatBufferBuilder &_fbb, const RangeOptionsT* _o, const flatbuffers::rehasher_function_t *_rehasher = nullptr); + NotEqualOptionsT *UnPack(const ::flatbuffers::resolver_function_t *_resolver = nullptr) const; + void UnPackTo(NotEqualOptionsT *_o, const ::flatbuffers::resolver_function_t *_resolver = nullptr) const; + static ::flatbuffers::Offset Pack(::flatbuffers::FlatBufferBuilder &_fbb, const NotEqualOptionsT* _o, const ::flatbuffers::rehasher_function_t *_rehasher = nullptr); }; -struct RangeOptionsBuilder { - typedef RangeOptions Table; - flatbuffers::FlatBufferBuilder &fbb_; - flatbuffers::uoffset_t start_; - explicit RangeOptionsBuilder(flatbuffers::FlatBufferBuilder &_fbb) +struct NotEqualOptionsBuilder { + typedef NotEqualOptions Table; + ::flatbuffers::FlatBufferBuilder &fbb_; + ::flatbuffers::uoffset_t start_; + explicit NotEqualOptionsBuilder(::flatbuffers::FlatBufferBuilder &_fbb) : fbb_(_fbb) { start_ = fbb_.StartTable(); } - flatbuffers::Offset Finish() { + ::flatbuffers::Offset Finish() { const auto end = fbb_.EndTable(start_); - auto o = flatbuffers::Offset(end); + auto o = ::flatbuffers::Offset(end); return o; } }; -inline flatbuffers::Offset CreateRangeOptions( - flatbuffers::FlatBufferBuilder &_fbb) { - RangeOptionsBuilder builder_(_fbb); +inline ::flatbuffers::Offset CreateNotEqualOptions( + ::flatbuffers::FlatBufferBuilder &_fbb) { + NotEqualOptionsBuilder builder_(_fbb); return builder_.Finish(); } -flatbuffers::Offset CreateRangeOptions(flatbuffers::FlatBufferBuilder &_fbb, const RangeOptionsT *_o, const flatbuffers::rehasher_function_t *_rehasher = nullptr); +::flatbuffers::Offset CreateNotEqualOptions(::flatbuffers::FlatBufferBuilder &_fbb, const NotEqualOptionsT *_o, const ::flatbuffers::rehasher_function_t *_rehasher = nullptr); -struct LeakyReluOptionsT : public flatbuffers::NativeTable { - typedef LeakyReluOptions TableType; - float alpha = 0.0f; +struct ShapeOptionsT : public ::flatbuffers::NativeTable { + typedef ShapeOptions TableType; + tflite::TensorType out_type = tflite::TensorType_FLOAT32; }; -struct LeakyReluOptions FLATBUFFERS_FINAL_CLASS : private flatbuffers::Table { - typedef LeakyReluOptionsT NativeTableType; - typedef LeakyReluOptionsBuilder Builder; +struct ShapeOptions FLATBUFFERS_FINAL_CLASS : private ::flatbuffers::Table { + typedef ShapeOptionsT NativeTableType; + typedef ShapeOptionsBuilder Builder; enum FlatBuffersVTableOffset FLATBUFFERS_VTABLE_UNDERLYING_TYPE { - VT_ALPHA = 4 + VT_OUT_TYPE = 4 }; - float alpha() const { - return GetField(VT_ALPHA, 0.0f); + tflite::TensorType out_type() const { + return static_cast(GetField(VT_OUT_TYPE, 0)); } - bool Verify(flatbuffers::Verifier &verifier) const { + bool Verify(::flatbuffers::Verifier &verifier) const { return VerifyTableStart(verifier) && - VerifyField(verifier, VT_ALPHA, 4) && + VerifyField(verifier, VT_OUT_TYPE, 1) && verifier.EndTable(); } - LeakyReluOptionsT *UnPack(const flatbuffers::resolver_function_t *_resolver = nullptr) const; - void UnPackTo(LeakyReluOptionsT *_o, const flatbuffers::resolver_function_t *_resolver = nullptr) const; - static flatbuffers::Offset Pack(flatbuffers::FlatBufferBuilder &_fbb, const LeakyReluOptionsT* _o, const flatbuffers::rehasher_function_t *_rehasher = nullptr); + ShapeOptionsT *UnPack(const ::flatbuffers::resolver_function_t *_resolver = nullptr) const; + void UnPackTo(ShapeOptionsT *_o, const ::flatbuffers::resolver_function_t *_resolver = nullptr) const; + static ::flatbuffers::Offset Pack(::flatbuffers::FlatBufferBuilder &_fbb, const ShapeOptionsT* _o, const ::flatbuffers::rehasher_function_t *_rehasher = nullptr); }; -struct LeakyReluOptionsBuilder { - typedef LeakyReluOptions Table; - flatbuffers::FlatBufferBuilder &fbb_; - flatbuffers::uoffset_t start_; - void add_alpha(float alpha) { - fbb_.AddElement(LeakyReluOptions::VT_ALPHA, alpha, 0.0f); +struct ShapeOptionsBuilder { + typedef ShapeOptions Table; + ::flatbuffers::FlatBufferBuilder &fbb_; + ::flatbuffers::uoffset_t start_; + void add_out_type(tflite::TensorType out_type) { + fbb_.AddElement(ShapeOptions::VT_OUT_TYPE, static_cast(out_type), 0); } - explicit LeakyReluOptionsBuilder(flatbuffers::FlatBufferBuilder &_fbb) + explicit ShapeOptionsBuilder(::flatbuffers::FlatBufferBuilder &_fbb) : fbb_(_fbb) { start_ = fbb_.StartTable(); } - flatbuffers::Offset Finish() { + ::flatbuffers::Offset Finish() { const auto end = fbb_.EndTable(start_); - auto o = flatbuffers::Offset(end); + auto o = ::flatbuffers::Offset(end); return o; } }; -inline flatbuffers::Offset CreateLeakyReluOptions( - flatbuffers::FlatBufferBuilder &_fbb, - float alpha = 0.0f) { - LeakyReluOptionsBuilder builder_(_fbb); - builder_.add_alpha(alpha); +inline ::flatbuffers::Offset CreateShapeOptions( + ::flatbuffers::FlatBufferBuilder &_fbb, + tflite::TensorType out_type = tflite::TensorType_FLOAT32) { + ShapeOptionsBuilder builder_(_fbb); + builder_.add_out_type(out_type); return builder_.Finish(); } -flatbuffers::Offset CreateLeakyReluOptions(flatbuffers::FlatBufferBuilder &_fbb, const LeakyReluOptionsT *_o, const flatbuffers::rehasher_function_t *_rehasher = nullptr); +::flatbuffers::Offset CreateShapeOptions(::flatbuffers::FlatBufferBuilder &_fbb, const ShapeOptionsT *_o, const ::flatbuffers::rehasher_function_t *_rehasher = nullptr); -struct SquaredDifferenceOptionsT : public flatbuffers::NativeTable { - typedef SquaredDifferenceOptions TableType; +struct RankOptionsT : public ::flatbuffers::NativeTable { + typedef RankOptions TableType; }; -struct SquaredDifferenceOptions FLATBUFFERS_FINAL_CLASS : private flatbuffers::Table { - typedef SquaredDifferenceOptionsT NativeTableType; - typedef SquaredDifferenceOptionsBuilder Builder; - bool Verify(flatbuffers::Verifier &verifier) const { +struct RankOptions FLATBUFFERS_FINAL_CLASS : private ::flatbuffers::Table { + typedef RankOptionsT NativeTableType; + typedef RankOptionsBuilder Builder; + bool Verify(::flatbuffers::Verifier &verifier) const { return VerifyTableStart(verifier) && verifier.EndTable(); } - SquaredDifferenceOptionsT *UnPack(const flatbuffers::resolver_function_t *_resolver = nullptr) const; - void UnPackTo(SquaredDifferenceOptionsT *_o, const flatbuffers::resolver_function_t *_resolver = nullptr) const; - static flatbuffers::Offset Pack(flatbuffers::FlatBufferBuilder &_fbb, const SquaredDifferenceOptionsT* _o, const flatbuffers::rehasher_function_t *_rehasher = nullptr); + RankOptionsT *UnPack(const ::flatbuffers::resolver_function_t *_resolver = nullptr) const; + void UnPackTo(RankOptionsT *_o, const ::flatbuffers::resolver_function_t *_resolver = nullptr) const; + static ::flatbuffers::Offset Pack(::flatbuffers::FlatBufferBuilder &_fbb, const RankOptionsT* _o, const ::flatbuffers::rehasher_function_t *_rehasher = nullptr); }; -struct SquaredDifferenceOptionsBuilder { - typedef SquaredDifferenceOptions Table; - flatbuffers::FlatBufferBuilder &fbb_; - flatbuffers::uoffset_t start_; - explicit SquaredDifferenceOptionsBuilder(flatbuffers::FlatBufferBuilder &_fbb) +struct RankOptionsBuilder { + typedef RankOptions Table; + ::flatbuffers::FlatBufferBuilder &fbb_; + ::flatbuffers::uoffset_t start_; + explicit RankOptionsBuilder(::flatbuffers::FlatBufferBuilder &_fbb) : fbb_(_fbb) { start_ = fbb_.StartTable(); } - flatbuffers::Offset Finish() { + ::flatbuffers::Offset Finish() { const auto end = fbb_.EndTable(start_); - auto o = flatbuffers::Offset(end); + auto o = ::flatbuffers::Offset(end); return o; } }; -inline flatbuffers::Offset CreateSquaredDifferenceOptions( - flatbuffers::FlatBufferBuilder &_fbb) { - SquaredDifferenceOptionsBuilder builder_(_fbb); +inline ::flatbuffers::Offset CreateRankOptions( + ::flatbuffers::FlatBufferBuilder &_fbb) { + RankOptionsBuilder builder_(_fbb); return builder_.Finish(); } -flatbuffers::Offset CreateSquaredDifferenceOptions(flatbuffers::FlatBufferBuilder &_fbb, const SquaredDifferenceOptionsT *_o, const flatbuffers::rehasher_function_t *_rehasher = nullptr); +::flatbuffers::Offset CreateRankOptions(::flatbuffers::FlatBufferBuilder &_fbb, const RankOptionsT *_o, const ::flatbuffers::rehasher_function_t *_rehasher = nullptr); -struct MirrorPadOptionsT : public flatbuffers::NativeTable { - typedef MirrorPadOptions TableType; - tflite::MirrorPadMode mode = tflite::MirrorPadMode_REFLECT; +struct PowOptionsT : public ::flatbuffers::NativeTable { + typedef PowOptions TableType; }; -struct MirrorPadOptions FLATBUFFERS_FINAL_CLASS : private flatbuffers::Table { - typedef MirrorPadOptionsT NativeTableType; - typedef MirrorPadOptionsBuilder Builder; - enum FlatBuffersVTableOffset FLATBUFFERS_VTABLE_UNDERLYING_TYPE { - VT_MODE = 4 - }; - tflite::MirrorPadMode mode() const { - return static_cast(GetField(VT_MODE, 0)); - } - bool Verify(flatbuffers::Verifier &verifier) const { +struct PowOptions FLATBUFFERS_FINAL_CLASS : private ::flatbuffers::Table { + typedef PowOptionsT NativeTableType; + typedef PowOptionsBuilder Builder; + bool Verify(::flatbuffers::Verifier &verifier) const { return VerifyTableStart(verifier) && - VerifyField(verifier, VT_MODE, 1) && verifier.EndTable(); } - MirrorPadOptionsT *UnPack(const flatbuffers::resolver_function_t *_resolver = nullptr) const; - void UnPackTo(MirrorPadOptionsT *_o, const flatbuffers::resolver_function_t *_resolver = nullptr) const; - static flatbuffers::Offset Pack(flatbuffers::FlatBufferBuilder &_fbb, const MirrorPadOptionsT* _o, const flatbuffers::rehasher_function_t *_rehasher = nullptr); + PowOptionsT *UnPack(const ::flatbuffers::resolver_function_t *_resolver = nullptr) const; + void UnPackTo(PowOptionsT *_o, const ::flatbuffers::resolver_function_t *_resolver = nullptr) const; + static ::flatbuffers::Offset Pack(::flatbuffers::FlatBufferBuilder &_fbb, const PowOptionsT* _o, const ::flatbuffers::rehasher_function_t *_rehasher = nullptr); }; -struct MirrorPadOptionsBuilder { - typedef MirrorPadOptions Table; - flatbuffers::FlatBufferBuilder &fbb_; - flatbuffers::uoffset_t start_; - void add_mode(tflite::MirrorPadMode mode) { - fbb_.AddElement(MirrorPadOptions::VT_MODE, static_cast(mode), 0); - } - explicit MirrorPadOptionsBuilder(flatbuffers::FlatBufferBuilder &_fbb) +struct PowOptionsBuilder { + typedef PowOptions Table; + ::flatbuffers::FlatBufferBuilder &fbb_; + ::flatbuffers::uoffset_t start_; + explicit PowOptionsBuilder(::flatbuffers::FlatBufferBuilder &_fbb) : fbb_(_fbb) { start_ = fbb_.StartTable(); } - flatbuffers::Offset Finish() { + ::flatbuffers::Offset Finish() { const auto end = fbb_.EndTable(start_); - auto o = flatbuffers::Offset(end); + auto o = ::flatbuffers::Offset(end); return o; } }; -inline flatbuffers::Offset CreateMirrorPadOptions( - flatbuffers::FlatBufferBuilder &_fbb, - tflite::MirrorPadMode mode = tflite::MirrorPadMode_REFLECT) { - MirrorPadOptionsBuilder builder_(_fbb); - builder_.add_mode(mode); +inline ::flatbuffers::Offset CreatePowOptions( + ::flatbuffers::FlatBufferBuilder &_fbb) { + PowOptionsBuilder builder_(_fbb); return builder_.Finish(); } -flatbuffers::Offset CreateMirrorPadOptions(flatbuffers::FlatBufferBuilder &_fbb, const MirrorPadOptionsT *_o, const flatbuffers::rehasher_function_t *_rehasher = nullptr); +::flatbuffers::Offset CreatePowOptions(::flatbuffers::FlatBufferBuilder &_fbb, const PowOptionsT *_o, const ::flatbuffers::rehasher_function_t *_rehasher = nullptr); -struct UniqueOptionsT : public flatbuffers::NativeTable { - typedef UniqueOptions TableType; - tflite::TensorType idx_out_type = tflite::TensorType_INT32; +struct FakeQuantOptionsT : public ::flatbuffers::NativeTable { + typedef FakeQuantOptions TableType; + float min = 0.0f; + float max = 0.0f; + int32_t num_bits = 0; + bool narrow_range = false; }; -struct UniqueOptions FLATBUFFERS_FINAL_CLASS : private flatbuffers::Table { - typedef UniqueOptionsT NativeTableType; - typedef UniqueOptionsBuilder Builder; +struct FakeQuantOptions FLATBUFFERS_FINAL_CLASS : private ::flatbuffers::Table { + typedef FakeQuantOptionsT NativeTableType; + typedef FakeQuantOptionsBuilder Builder; enum FlatBuffersVTableOffset FLATBUFFERS_VTABLE_UNDERLYING_TYPE { - VT_IDX_OUT_TYPE = 4 + VT_MIN = 4, + VT_MAX = 6, + VT_NUM_BITS = 8, + VT_NARROW_RANGE = 10 }; - tflite::TensorType idx_out_type() const { - return static_cast(GetField(VT_IDX_OUT_TYPE, 2)); + float min() const { + return GetField(VT_MIN, 0.0f); + } + float max() const { + return GetField(VT_MAX, 0.0f); } - bool Verify(flatbuffers::Verifier &verifier) const { + int32_t num_bits() const { + return GetField(VT_NUM_BITS, 0); + } + bool narrow_range() const { + return GetField(VT_NARROW_RANGE, 0) != 0; + } + bool Verify(::flatbuffers::Verifier &verifier) const { return VerifyTableStart(verifier) && - VerifyField(verifier, VT_IDX_OUT_TYPE, 1) && + VerifyField(verifier, VT_MIN, 4) && + VerifyField(verifier, VT_MAX, 4) && + VerifyField(verifier, VT_NUM_BITS, 4) && + VerifyField(verifier, VT_NARROW_RANGE, 1) && verifier.EndTable(); } - UniqueOptionsT *UnPack(const flatbuffers::resolver_function_t *_resolver = nullptr) const; - void UnPackTo(UniqueOptionsT *_o, const flatbuffers::resolver_function_t *_resolver = nullptr) const; - static flatbuffers::Offset Pack(flatbuffers::FlatBufferBuilder &_fbb, const UniqueOptionsT* _o, const flatbuffers::rehasher_function_t *_rehasher = nullptr); + FakeQuantOptionsT *UnPack(const ::flatbuffers::resolver_function_t *_resolver = nullptr) const; + void UnPackTo(FakeQuantOptionsT *_o, const ::flatbuffers::resolver_function_t *_resolver = nullptr) const; + static ::flatbuffers::Offset Pack(::flatbuffers::FlatBufferBuilder &_fbb, const FakeQuantOptionsT* _o, const ::flatbuffers::rehasher_function_t *_rehasher = nullptr); }; -struct UniqueOptionsBuilder { - typedef UniqueOptions Table; - flatbuffers::FlatBufferBuilder &fbb_; - flatbuffers::uoffset_t start_; - void add_idx_out_type(tflite::TensorType idx_out_type) { - fbb_.AddElement(UniqueOptions::VT_IDX_OUT_TYPE, static_cast(idx_out_type), 2); +struct FakeQuantOptionsBuilder { + typedef FakeQuantOptions Table; + ::flatbuffers::FlatBufferBuilder &fbb_; + ::flatbuffers::uoffset_t start_; + void add_min(float min) { + fbb_.AddElement(FakeQuantOptions::VT_MIN, min, 0.0f); + } + void add_max(float max) { + fbb_.AddElement(FakeQuantOptions::VT_MAX, max, 0.0f); + } + void add_num_bits(int32_t num_bits) { + fbb_.AddElement(FakeQuantOptions::VT_NUM_BITS, num_bits, 0); + } + void add_narrow_range(bool narrow_range) { + fbb_.AddElement(FakeQuantOptions::VT_NARROW_RANGE, static_cast(narrow_range), 0); } - explicit UniqueOptionsBuilder(flatbuffers::FlatBufferBuilder &_fbb) + explicit FakeQuantOptionsBuilder(::flatbuffers::FlatBufferBuilder &_fbb) : fbb_(_fbb) { start_ = fbb_.StartTable(); } - flatbuffers::Offset Finish() { + ::flatbuffers::Offset Finish() { const auto end = fbb_.EndTable(start_); - auto o = flatbuffers::Offset(end); + auto o = ::flatbuffers::Offset(end); return o; } }; -inline flatbuffers::Offset CreateUniqueOptions( - flatbuffers::FlatBufferBuilder &_fbb, - tflite::TensorType idx_out_type = tflite::TensorType_INT32) { - UniqueOptionsBuilder builder_(_fbb); - builder_.add_idx_out_type(idx_out_type); +inline ::flatbuffers::Offset CreateFakeQuantOptions( + ::flatbuffers::FlatBufferBuilder &_fbb, + float min = 0.0f, + float max = 0.0f, + int32_t num_bits = 0, + bool narrow_range = false) { + FakeQuantOptionsBuilder builder_(_fbb); + builder_.add_num_bits(num_bits); + builder_.add_max(max); + builder_.add_min(min); + builder_.add_narrow_range(narrow_range); return builder_.Finish(); } -flatbuffers::Offset CreateUniqueOptions(flatbuffers::FlatBufferBuilder &_fbb, const UniqueOptionsT *_o, const flatbuffers::rehasher_function_t *_rehasher = nullptr); +::flatbuffers::Offset CreateFakeQuantOptions(::flatbuffers::FlatBufferBuilder &_fbb, const FakeQuantOptionsT *_o, const ::flatbuffers::rehasher_function_t *_rehasher = nullptr); -struct ReverseV2OptionsT : public flatbuffers::NativeTable { - typedef ReverseV2Options TableType; +struct PackOptionsT : public ::flatbuffers::NativeTable { + typedef PackOptions TableType; + int32_t values_count = 0; + int32_t axis = 0; }; -struct ReverseV2Options FLATBUFFERS_FINAL_CLASS : private flatbuffers::Table { - typedef ReverseV2OptionsT NativeTableType; - typedef ReverseV2OptionsBuilder Builder; - bool Verify(flatbuffers::Verifier &verifier) const { +struct PackOptions FLATBUFFERS_FINAL_CLASS : private ::flatbuffers::Table { + typedef PackOptionsT NativeTableType; + typedef PackOptionsBuilder Builder; + enum FlatBuffersVTableOffset FLATBUFFERS_VTABLE_UNDERLYING_TYPE { + VT_VALUES_COUNT = 4, + VT_AXIS = 6 + }; + int32_t values_count() const { + return GetField(VT_VALUES_COUNT, 0); + } + int32_t axis() const { + return GetField(VT_AXIS, 0); + } + bool Verify(::flatbuffers::Verifier &verifier) const { return VerifyTableStart(verifier) && + VerifyField(verifier, VT_VALUES_COUNT, 4) && + VerifyField(verifier, VT_AXIS, 4) && verifier.EndTable(); } - ReverseV2OptionsT *UnPack(const flatbuffers::resolver_function_t *_resolver = nullptr) const; - void UnPackTo(ReverseV2OptionsT *_o, const flatbuffers::resolver_function_t *_resolver = nullptr) const; - static flatbuffers::Offset Pack(flatbuffers::FlatBufferBuilder &_fbb, const ReverseV2OptionsT* _o, const flatbuffers::rehasher_function_t *_rehasher = nullptr); + PackOptionsT *UnPack(const ::flatbuffers::resolver_function_t *_resolver = nullptr) const; + void UnPackTo(PackOptionsT *_o, const ::flatbuffers::resolver_function_t *_resolver = nullptr) const; + static ::flatbuffers::Offset Pack(::flatbuffers::FlatBufferBuilder &_fbb, const PackOptionsT* _o, const ::flatbuffers::rehasher_function_t *_rehasher = nullptr); }; -struct ReverseV2OptionsBuilder { - typedef ReverseV2Options Table; - flatbuffers::FlatBufferBuilder &fbb_; - flatbuffers::uoffset_t start_; - explicit ReverseV2OptionsBuilder(flatbuffers::FlatBufferBuilder &_fbb) +struct PackOptionsBuilder { + typedef PackOptions Table; + ::flatbuffers::FlatBufferBuilder &fbb_; + ::flatbuffers::uoffset_t start_; + void add_values_count(int32_t values_count) { + fbb_.AddElement(PackOptions::VT_VALUES_COUNT, values_count, 0); + } + void add_axis(int32_t axis) { + fbb_.AddElement(PackOptions::VT_AXIS, axis, 0); + } + explicit PackOptionsBuilder(::flatbuffers::FlatBufferBuilder &_fbb) : fbb_(_fbb) { start_ = fbb_.StartTable(); } - flatbuffers::Offset Finish() { + ::flatbuffers::Offset Finish() { const auto end = fbb_.EndTable(start_); - auto o = flatbuffers::Offset(end); + auto o = ::flatbuffers::Offset(end); return o; } }; -inline flatbuffers::Offset CreateReverseV2Options( - flatbuffers::FlatBufferBuilder &_fbb) { - ReverseV2OptionsBuilder builder_(_fbb); +inline ::flatbuffers::Offset CreatePackOptions( + ::flatbuffers::FlatBufferBuilder &_fbb, + int32_t values_count = 0, + int32_t axis = 0) { + PackOptionsBuilder builder_(_fbb); + builder_.add_axis(axis); + builder_.add_values_count(values_count); return builder_.Finish(); } -flatbuffers::Offset CreateReverseV2Options(flatbuffers::FlatBufferBuilder &_fbb, const ReverseV2OptionsT *_o, const flatbuffers::rehasher_function_t *_rehasher = nullptr); +::flatbuffers::Offset CreatePackOptions(::flatbuffers::FlatBufferBuilder &_fbb, const PackOptionsT *_o, const ::flatbuffers::rehasher_function_t *_rehasher = nullptr); -struct AddNOptionsT : public flatbuffers::NativeTable { - typedef AddNOptions TableType; +struct LogicalOrOptionsT : public ::flatbuffers::NativeTable { + typedef LogicalOrOptions TableType; }; -struct AddNOptions FLATBUFFERS_FINAL_CLASS : private flatbuffers::Table { - typedef AddNOptionsT NativeTableType; - typedef AddNOptionsBuilder Builder; - bool Verify(flatbuffers::Verifier &verifier) const { +struct LogicalOrOptions FLATBUFFERS_FINAL_CLASS : private ::flatbuffers::Table { + typedef LogicalOrOptionsT NativeTableType; + typedef LogicalOrOptionsBuilder Builder; + bool Verify(::flatbuffers::Verifier &verifier) const { return VerifyTableStart(verifier) && verifier.EndTable(); } - AddNOptionsT *UnPack(const flatbuffers::resolver_function_t *_resolver = nullptr) const; - void UnPackTo(AddNOptionsT *_o, const flatbuffers::resolver_function_t *_resolver = nullptr) const; - static flatbuffers::Offset Pack(flatbuffers::FlatBufferBuilder &_fbb, const AddNOptionsT* _o, const flatbuffers::rehasher_function_t *_rehasher = nullptr); + LogicalOrOptionsT *UnPack(const ::flatbuffers::resolver_function_t *_resolver = nullptr) const; + void UnPackTo(LogicalOrOptionsT *_o, const ::flatbuffers::resolver_function_t *_resolver = nullptr) const; + static ::flatbuffers::Offset Pack(::flatbuffers::FlatBufferBuilder &_fbb, const LogicalOrOptionsT* _o, const ::flatbuffers::rehasher_function_t *_rehasher = nullptr); }; -struct AddNOptionsBuilder { - typedef AddNOptions Table; - flatbuffers::FlatBufferBuilder &fbb_; - flatbuffers::uoffset_t start_; - explicit AddNOptionsBuilder(flatbuffers::FlatBufferBuilder &_fbb) +struct LogicalOrOptionsBuilder { + typedef LogicalOrOptions Table; + ::flatbuffers::FlatBufferBuilder &fbb_; + ::flatbuffers::uoffset_t start_; + explicit LogicalOrOptionsBuilder(::flatbuffers::FlatBufferBuilder &_fbb) : fbb_(_fbb) { start_ = fbb_.StartTable(); } - flatbuffers::Offset Finish() { + ::flatbuffers::Offset Finish() { const auto end = fbb_.EndTable(start_); - auto o = flatbuffers::Offset(end); + auto o = ::flatbuffers::Offset(end); return o; } }; -inline flatbuffers::Offset CreateAddNOptions( - flatbuffers::FlatBufferBuilder &_fbb) { - AddNOptionsBuilder builder_(_fbb); +inline ::flatbuffers::Offset CreateLogicalOrOptions( + ::flatbuffers::FlatBufferBuilder &_fbb) { + LogicalOrOptionsBuilder builder_(_fbb); return builder_.Finish(); } -flatbuffers::Offset CreateAddNOptions(flatbuffers::FlatBufferBuilder &_fbb, const AddNOptionsT *_o, const flatbuffers::rehasher_function_t *_rehasher = nullptr); +::flatbuffers::Offset CreateLogicalOrOptions(::flatbuffers::FlatBufferBuilder &_fbb, const LogicalOrOptionsT *_o, const ::flatbuffers::rehasher_function_t *_rehasher = nullptr); -struct GatherNdOptionsT : public flatbuffers::NativeTable { - typedef GatherNdOptions TableType; +struct OneHotOptionsT : public ::flatbuffers::NativeTable { + typedef OneHotOptions TableType; + int32_t axis = 0; }; -struct GatherNdOptions FLATBUFFERS_FINAL_CLASS : private flatbuffers::Table { - typedef GatherNdOptionsT NativeTableType; - typedef GatherNdOptionsBuilder Builder; - bool Verify(flatbuffers::Verifier &verifier) const { +struct OneHotOptions FLATBUFFERS_FINAL_CLASS : private ::flatbuffers::Table { + typedef OneHotOptionsT NativeTableType; + typedef OneHotOptionsBuilder Builder; + enum FlatBuffersVTableOffset FLATBUFFERS_VTABLE_UNDERLYING_TYPE { + VT_AXIS = 4 + }; + int32_t axis() const { + return GetField(VT_AXIS, 0); + } + bool Verify(::flatbuffers::Verifier &verifier) const { return VerifyTableStart(verifier) && + VerifyField(verifier, VT_AXIS, 4) && verifier.EndTable(); } - GatherNdOptionsT *UnPack(const flatbuffers::resolver_function_t *_resolver = nullptr) const; - void UnPackTo(GatherNdOptionsT *_o, const flatbuffers::resolver_function_t *_resolver = nullptr) const; - static flatbuffers::Offset Pack(flatbuffers::FlatBufferBuilder &_fbb, const GatherNdOptionsT* _o, const flatbuffers::rehasher_function_t *_rehasher = nullptr); + OneHotOptionsT *UnPack(const ::flatbuffers::resolver_function_t *_resolver = nullptr) const; + void UnPackTo(OneHotOptionsT *_o, const ::flatbuffers::resolver_function_t *_resolver = nullptr) const; + static ::flatbuffers::Offset Pack(::flatbuffers::FlatBufferBuilder &_fbb, const OneHotOptionsT* _o, const ::flatbuffers::rehasher_function_t *_rehasher = nullptr); }; -struct GatherNdOptionsBuilder { - typedef GatherNdOptions Table; - flatbuffers::FlatBufferBuilder &fbb_; - flatbuffers::uoffset_t start_; - explicit GatherNdOptionsBuilder(flatbuffers::FlatBufferBuilder &_fbb) +struct OneHotOptionsBuilder { + typedef OneHotOptions Table; + ::flatbuffers::FlatBufferBuilder &fbb_; + ::flatbuffers::uoffset_t start_; + void add_axis(int32_t axis) { + fbb_.AddElement(OneHotOptions::VT_AXIS, axis, 0); + } + explicit OneHotOptionsBuilder(::flatbuffers::FlatBufferBuilder &_fbb) : fbb_(_fbb) { start_ = fbb_.StartTable(); } - flatbuffers::Offset Finish() { + ::flatbuffers::Offset Finish() { const auto end = fbb_.EndTable(start_); - auto o = flatbuffers::Offset(end); + auto o = ::flatbuffers::Offset(end); return o; } }; -inline flatbuffers::Offset CreateGatherNdOptions( - flatbuffers::FlatBufferBuilder &_fbb) { - GatherNdOptionsBuilder builder_(_fbb); - return builder_.Finish(); -} - -flatbuffers::Offset CreateGatherNdOptions(flatbuffers::FlatBufferBuilder &_fbb, const GatherNdOptionsT *_o, const flatbuffers::rehasher_function_t *_rehasher = nullptr); - -struct WhereOptionsT : public flatbuffers::NativeTable { - typedef WhereOptions TableType; -}; - -struct WhereOptions FLATBUFFERS_FINAL_CLASS : private flatbuffers::Table { - typedef WhereOptionsT NativeTableType; - typedef WhereOptionsBuilder Builder; - bool Verify(flatbuffers::Verifier &verifier) const { - return VerifyTableStart(verifier) && - verifier.EndTable(); - } - WhereOptionsT *UnPack(const flatbuffers::resolver_function_t *_resolver = nullptr) const; - void UnPackTo(WhereOptionsT *_o, const flatbuffers::resolver_function_t *_resolver = nullptr) const; - static flatbuffers::Offset Pack(flatbuffers::FlatBufferBuilder &_fbb, const WhereOptionsT* _o, const flatbuffers::rehasher_function_t *_rehasher = nullptr); -}; - -struct WhereOptionsBuilder { - typedef WhereOptions Table; - flatbuffers::FlatBufferBuilder &fbb_; - flatbuffers::uoffset_t start_; - explicit WhereOptionsBuilder(flatbuffers::FlatBufferBuilder &_fbb) - : fbb_(_fbb) { - start_ = fbb_.StartTable(); - } - flatbuffers::Offset Finish() { - const auto end = fbb_.EndTable(start_); - auto o = flatbuffers::Offset(end); - return o; - } -}; - -inline flatbuffers::Offset CreateWhereOptions( - flatbuffers::FlatBufferBuilder &_fbb) { - WhereOptionsBuilder builder_(_fbb); +inline ::flatbuffers::Offset CreateOneHotOptions( + ::flatbuffers::FlatBufferBuilder &_fbb, + int32_t axis = 0) { + OneHotOptionsBuilder builder_(_fbb); + builder_.add_axis(axis); return builder_.Finish(); } -flatbuffers::Offset CreateWhereOptions(flatbuffers::FlatBufferBuilder &_fbb, const WhereOptionsT *_o, const flatbuffers::rehasher_function_t *_rehasher = nullptr); +::flatbuffers::Offset CreateOneHotOptions(::flatbuffers::FlatBufferBuilder &_fbb, const OneHotOptionsT *_o, const ::flatbuffers::rehasher_function_t *_rehasher = nullptr); -struct ReverseSequenceOptionsT : public flatbuffers::NativeTable { - typedef ReverseSequenceOptions TableType; - int32_t seq_dim = 0; - int32_t batch_dim = 0; +struct AbsOptionsT : public ::flatbuffers::NativeTable { + typedef AbsOptions TableType; }; -struct ReverseSequenceOptions FLATBUFFERS_FINAL_CLASS : private flatbuffers::Table { - typedef ReverseSequenceOptionsT NativeTableType; - typedef ReverseSequenceOptionsBuilder Builder; - enum FlatBuffersVTableOffset FLATBUFFERS_VTABLE_UNDERLYING_TYPE { - VT_SEQ_DIM = 4, - VT_BATCH_DIM = 6 - }; - int32_t seq_dim() const { - return GetField(VT_SEQ_DIM, 0); - } - int32_t batch_dim() const { - return GetField(VT_BATCH_DIM, 0); - } - bool Verify(flatbuffers::Verifier &verifier) const { +struct AbsOptions FLATBUFFERS_FINAL_CLASS : private ::flatbuffers::Table { + typedef AbsOptionsT NativeTableType; + typedef AbsOptionsBuilder Builder; + bool Verify(::flatbuffers::Verifier &verifier) const { return VerifyTableStart(verifier) && - VerifyField(verifier, VT_SEQ_DIM, 4) && - VerifyField(verifier, VT_BATCH_DIM, 4) && verifier.EndTable(); } - ReverseSequenceOptionsT *UnPack(const flatbuffers::resolver_function_t *_resolver = nullptr) const; - void UnPackTo(ReverseSequenceOptionsT *_o, const flatbuffers::resolver_function_t *_resolver = nullptr) const; - static flatbuffers::Offset Pack(flatbuffers::FlatBufferBuilder &_fbb, const ReverseSequenceOptionsT* _o, const flatbuffers::rehasher_function_t *_rehasher = nullptr); + AbsOptionsT *UnPack(const ::flatbuffers::resolver_function_t *_resolver = nullptr) const; + void UnPackTo(AbsOptionsT *_o, const ::flatbuffers::resolver_function_t *_resolver = nullptr) const; + static ::flatbuffers::Offset Pack(::flatbuffers::FlatBufferBuilder &_fbb, const AbsOptionsT* _o, const ::flatbuffers::rehasher_function_t *_rehasher = nullptr); }; -struct ReverseSequenceOptionsBuilder { - typedef ReverseSequenceOptions Table; - flatbuffers::FlatBufferBuilder &fbb_; - flatbuffers::uoffset_t start_; - void add_seq_dim(int32_t seq_dim) { - fbb_.AddElement(ReverseSequenceOptions::VT_SEQ_DIM, seq_dim, 0); - } - void add_batch_dim(int32_t batch_dim) { - fbb_.AddElement(ReverseSequenceOptions::VT_BATCH_DIM, batch_dim, 0); - } - explicit ReverseSequenceOptionsBuilder(flatbuffers::FlatBufferBuilder &_fbb) +struct AbsOptionsBuilder { + typedef AbsOptions Table; + ::flatbuffers::FlatBufferBuilder &fbb_; + ::flatbuffers::uoffset_t start_; + explicit AbsOptionsBuilder(::flatbuffers::FlatBufferBuilder &_fbb) : fbb_(_fbb) { start_ = fbb_.StartTable(); } - flatbuffers::Offset Finish() { + ::flatbuffers::Offset Finish() { const auto end = fbb_.EndTable(start_); - auto o = flatbuffers::Offset(end); + auto o = ::flatbuffers::Offset(end); return o; } }; -inline flatbuffers::Offset CreateReverseSequenceOptions( - flatbuffers::FlatBufferBuilder &_fbb, - int32_t seq_dim = 0, - int32_t batch_dim = 0) { - ReverseSequenceOptionsBuilder builder_(_fbb); - builder_.add_batch_dim(batch_dim); - builder_.add_seq_dim(seq_dim); +inline ::flatbuffers::Offset CreateAbsOptions( + ::flatbuffers::FlatBufferBuilder &_fbb) { + AbsOptionsBuilder builder_(_fbb); return builder_.Finish(); } -flatbuffers::Offset CreateReverseSequenceOptions(flatbuffers::FlatBufferBuilder &_fbb, const ReverseSequenceOptionsT *_o, const flatbuffers::rehasher_function_t *_rehasher = nullptr); +::flatbuffers::Offset CreateAbsOptions(::flatbuffers::FlatBufferBuilder &_fbb, const AbsOptionsT *_o, const ::flatbuffers::rehasher_function_t *_rehasher = nullptr); -struct MatrixDiagOptionsT : public flatbuffers::NativeTable { - typedef MatrixDiagOptions TableType; +struct HardSwishOptionsT : public ::flatbuffers::NativeTable { + typedef HardSwishOptions TableType; }; -struct MatrixDiagOptions FLATBUFFERS_FINAL_CLASS : private flatbuffers::Table { - typedef MatrixDiagOptionsT NativeTableType; - typedef MatrixDiagOptionsBuilder Builder; - bool Verify(flatbuffers::Verifier &verifier) const { +struct HardSwishOptions FLATBUFFERS_FINAL_CLASS : private ::flatbuffers::Table { + typedef HardSwishOptionsT NativeTableType; + typedef HardSwishOptionsBuilder Builder; + bool Verify(::flatbuffers::Verifier &verifier) const { return VerifyTableStart(verifier) && verifier.EndTable(); } - MatrixDiagOptionsT *UnPack(const flatbuffers::resolver_function_t *_resolver = nullptr) const; - void UnPackTo(MatrixDiagOptionsT *_o, const flatbuffers::resolver_function_t *_resolver = nullptr) const; - static flatbuffers::Offset Pack(flatbuffers::FlatBufferBuilder &_fbb, const MatrixDiagOptionsT* _o, const flatbuffers::rehasher_function_t *_rehasher = nullptr); + HardSwishOptionsT *UnPack(const ::flatbuffers::resolver_function_t *_resolver = nullptr) const; + void UnPackTo(HardSwishOptionsT *_o, const ::flatbuffers::resolver_function_t *_resolver = nullptr) const; + static ::flatbuffers::Offset Pack(::flatbuffers::FlatBufferBuilder &_fbb, const HardSwishOptionsT* _o, const ::flatbuffers::rehasher_function_t *_rehasher = nullptr); }; -struct MatrixDiagOptionsBuilder { - typedef MatrixDiagOptions Table; - flatbuffers::FlatBufferBuilder &fbb_; - flatbuffers::uoffset_t start_; - explicit MatrixDiagOptionsBuilder(flatbuffers::FlatBufferBuilder &_fbb) +struct HardSwishOptionsBuilder { + typedef HardSwishOptions Table; + ::flatbuffers::FlatBufferBuilder &fbb_; + ::flatbuffers::uoffset_t start_; + explicit HardSwishOptionsBuilder(::flatbuffers::FlatBufferBuilder &_fbb) : fbb_(_fbb) { start_ = fbb_.StartTable(); } - flatbuffers::Offset Finish() { + ::flatbuffers::Offset Finish() { const auto end = fbb_.EndTable(start_); - auto o = flatbuffers::Offset(end); + auto o = ::flatbuffers::Offset(end); return o; } }; -inline flatbuffers::Offset CreateMatrixDiagOptions( - flatbuffers::FlatBufferBuilder &_fbb) { - MatrixDiagOptionsBuilder builder_(_fbb); +inline ::flatbuffers::Offset CreateHardSwishOptions( + ::flatbuffers::FlatBufferBuilder &_fbb) { + HardSwishOptionsBuilder builder_(_fbb); return builder_.Finish(); } -flatbuffers::Offset CreateMatrixDiagOptions(flatbuffers::FlatBufferBuilder &_fbb, const MatrixDiagOptionsT *_o, const flatbuffers::rehasher_function_t *_rehasher = nullptr); +::flatbuffers::Offset CreateHardSwishOptions(::flatbuffers::FlatBufferBuilder &_fbb, const HardSwishOptionsT *_o, const ::flatbuffers::rehasher_function_t *_rehasher = nullptr); -struct QuantizeOptionsT : public flatbuffers::NativeTable { - typedef QuantizeOptions TableType; +struct LogicalAndOptionsT : public ::flatbuffers::NativeTable { + typedef LogicalAndOptions TableType; }; -struct QuantizeOptions FLATBUFFERS_FINAL_CLASS : private flatbuffers::Table { - typedef QuantizeOptionsT NativeTableType; - typedef QuantizeOptionsBuilder Builder; - bool Verify(flatbuffers::Verifier &verifier) const { +struct LogicalAndOptions FLATBUFFERS_FINAL_CLASS : private ::flatbuffers::Table { + typedef LogicalAndOptionsT NativeTableType; + typedef LogicalAndOptionsBuilder Builder; + bool Verify(::flatbuffers::Verifier &verifier) const { return VerifyTableStart(verifier) && verifier.EndTable(); } - QuantizeOptionsT *UnPack(const flatbuffers::resolver_function_t *_resolver = nullptr) const; - void UnPackTo(QuantizeOptionsT *_o, const flatbuffers::resolver_function_t *_resolver = nullptr) const; - static flatbuffers::Offset Pack(flatbuffers::FlatBufferBuilder &_fbb, const QuantizeOptionsT* _o, const flatbuffers::rehasher_function_t *_rehasher = nullptr); + LogicalAndOptionsT *UnPack(const ::flatbuffers::resolver_function_t *_resolver = nullptr) const; + void UnPackTo(LogicalAndOptionsT *_o, const ::flatbuffers::resolver_function_t *_resolver = nullptr) const; + static ::flatbuffers::Offset Pack(::flatbuffers::FlatBufferBuilder &_fbb, const LogicalAndOptionsT* _o, const ::flatbuffers::rehasher_function_t *_rehasher = nullptr); }; -struct QuantizeOptionsBuilder { - typedef QuantizeOptions Table; - flatbuffers::FlatBufferBuilder &fbb_; - flatbuffers::uoffset_t start_; - explicit QuantizeOptionsBuilder(flatbuffers::FlatBufferBuilder &_fbb) +struct LogicalAndOptionsBuilder { + typedef LogicalAndOptions Table; + ::flatbuffers::FlatBufferBuilder &fbb_; + ::flatbuffers::uoffset_t start_; + explicit LogicalAndOptionsBuilder(::flatbuffers::FlatBufferBuilder &_fbb) : fbb_(_fbb) { start_ = fbb_.StartTable(); } - flatbuffers::Offset Finish() { + ::flatbuffers::Offset Finish() { const auto end = fbb_.EndTable(start_); - auto o = flatbuffers::Offset(end); + auto o = ::flatbuffers::Offset(end); return o; } }; -inline flatbuffers::Offset CreateQuantizeOptions( - flatbuffers::FlatBufferBuilder &_fbb) { - QuantizeOptionsBuilder builder_(_fbb); +inline ::flatbuffers::Offset CreateLogicalAndOptions( + ::flatbuffers::FlatBufferBuilder &_fbb) { + LogicalAndOptionsBuilder builder_(_fbb); return builder_.Finish(); } -flatbuffers::Offset CreateQuantizeOptions(flatbuffers::FlatBufferBuilder &_fbb, const QuantizeOptionsT *_o, const flatbuffers::rehasher_function_t *_rehasher = nullptr); +::flatbuffers::Offset CreateLogicalAndOptions(::flatbuffers::FlatBufferBuilder &_fbb, const LogicalAndOptionsT *_o, const ::flatbuffers::rehasher_function_t *_rehasher = nullptr); -struct MatrixSetDiagOptionsT : public flatbuffers::NativeTable { - typedef MatrixSetDiagOptions TableType; +struct LogicalNotOptionsT : public ::flatbuffers::NativeTable { + typedef LogicalNotOptions TableType; }; -struct MatrixSetDiagOptions FLATBUFFERS_FINAL_CLASS : private flatbuffers::Table { - typedef MatrixSetDiagOptionsT NativeTableType; - typedef MatrixSetDiagOptionsBuilder Builder; - bool Verify(flatbuffers::Verifier &verifier) const { +struct LogicalNotOptions FLATBUFFERS_FINAL_CLASS : private ::flatbuffers::Table { + typedef LogicalNotOptionsT NativeTableType; + typedef LogicalNotOptionsBuilder Builder; + bool Verify(::flatbuffers::Verifier &verifier) const { return VerifyTableStart(verifier) && verifier.EndTable(); } - MatrixSetDiagOptionsT *UnPack(const flatbuffers::resolver_function_t *_resolver = nullptr) const; - void UnPackTo(MatrixSetDiagOptionsT *_o, const flatbuffers::resolver_function_t *_resolver = nullptr) const; - static flatbuffers::Offset Pack(flatbuffers::FlatBufferBuilder &_fbb, const MatrixSetDiagOptionsT* _o, const flatbuffers::rehasher_function_t *_rehasher = nullptr); + LogicalNotOptionsT *UnPack(const ::flatbuffers::resolver_function_t *_resolver = nullptr) const; + void UnPackTo(LogicalNotOptionsT *_o, const ::flatbuffers::resolver_function_t *_resolver = nullptr) const; + static ::flatbuffers::Offset Pack(::flatbuffers::FlatBufferBuilder &_fbb, const LogicalNotOptionsT* _o, const ::flatbuffers::rehasher_function_t *_rehasher = nullptr); }; -struct MatrixSetDiagOptionsBuilder { - typedef MatrixSetDiagOptions Table; - flatbuffers::FlatBufferBuilder &fbb_; - flatbuffers::uoffset_t start_; - explicit MatrixSetDiagOptionsBuilder(flatbuffers::FlatBufferBuilder &_fbb) +struct LogicalNotOptionsBuilder { + typedef LogicalNotOptions Table; + ::flatbuffers::FlatBufferBuilder &fbb_; + ::flatbuffers::uoffset_t start_; + explicit LogicalNotOptionsBuilder(::flatbuffers::FlatBufferBuilder &_fbb) : fbb_(_fbb) { start_ = fbb_.StartTable(); } - flatbuffers::Offset Finish() { + ::flatbuffers::Offset Finish() { const auto end = fbb_.EndTable(start_); - auto o = flatbuffers::Offset(end); + auto o = ::flatbuffers::Offset(end); return o; } }; -inline flatbuffers::Offset CreateMatrixSetDiagOptions( - flatbuffers::FlatBufferBuilder &_fbb) { - MatrixSetDiagOptionsBuilder builder_(_fbb); +inline ::flatbuffers::Offset CreateLogicalNotOptions( + ::flatbuffers::FlatBufferBuilder &_fbb) { + LogicalNotOptionsBuilder builder_(_fbb); return builder_.Finish(); } -flatbuffers::Offset CreateMatrixSetDiagOptions(flatbuffers::FlatBufferBuilder &_fbb, const MatrixSetDiagOptionsT *_o, const flatbuffers::rehasher_function_t *_rehasher = nullptr); +::flatbuffers::Offset CreateLogicalNotOptions(::flatbuffers::FlatBufferBuilder &_fbb, const LogicalNotOptionsT *_o, const ::flatbuffers::rehasher_function_t *_rehasher = nullptr); -struct IfOptionsT : public flatbuffers::NativeTable { - typedef IfOptions TableType; - int32_t then_subgraph_index = 0; - int32_t else_subgraph_index = 0; +struct UnpackOptionsT : public ::flatbuffers::NativeTable { + typedef UnpackOptions TableType; + int32_t num = 0; + int32_t axis = 0; }; -struct IfOptions FLATBUFFERS_FINAL_CLASS : private flatbuffers::Table { - typedef IfOptionsT NativeTableType; - typedef IfOptionsBuilder Builder; +struct UnpackOptions FLATBUFFERS_FINAL_CLASS : private ::flatbuffers::Table { + typedef UnpackOptionsT NativeTableType; + typedef UnpackOptionsBuilder Builder; enum FlatBuffersVTableOffset FLATBUFFERS_VTABLE_UNDERLYING_TYPE { - VT_THEN_SUBGRAPH_INDEX = 4, - VT_ELSE_SUBGRAPH_INDEX = 6 + VT_NUM = 4, + VT_AXIS = 6 }; - int32_t then_subgraph_index() const { - return GetField(VT_THEN_SUBGRAPH_INDEX, 0); + int32_t num() const { + return GetField(VT_NUM, 0); } - int32_t else_subgraph_index() const { - return GetField(VT_ELSE_SUBGRAPH_INDEX, 0); + int32_t axis() const { + return GetField(VT_AXIS, 0); } - bool Verify(flatbuffers::Verifier &verifier) const { + bool Verify(::flatbuffers::Verifier &verifier) const { return VerifyTableStart(verifier) && - VerifyField(verifier, VT_THEN_SUBGRAPH_INDEX, 4) && - VerifyField(verifier, VT_ELSE_SUBGRAPH_INDEX, 4) && + VerifyField(verifier, VT_NUM, 4) && + VerifyField(verifier, VT_AXIS, 4) && verifier.EndTable(); } - IfOptionsT *UnPack(const flatbuffers::resolver_function_t *_resolver = nullptr) const; - void UnPackTo(IfOptionsT *_o, const flatbuffers::resolver_function_t *_resolver = nullptr) const; - static flatbuffers::Offset Pack(flatbuffers::FlatBufferBuilder &_fbb, const IfOptionsT* _o, const flatbuffers::rehasher_function_t *_rehasher = nullptr); + UnpackOptionsT *UnPack(const ::flatbuffers::resolver_function_t *_resolver = nullptr) const; + void UnPackTo(UnpackOptionsT *_o, const ::flatbuffers::resolver_function_t *_resolver = nullptr) const; + static ::flatbuffers::Offset Pack(::flatbuffers::FlatBufferBuilder &_fbb, const UnpackOptionsT* _o, const ::flatbuffers::rehasher_function_t *_rehasher = nullptr); }; -struct IfOptionsBuilder { - typedef IfOptions Table; - flatbuffers::FlatBufferBuilder &fbb_; - flatbuffers::uoffset_t start_; - void add_then_subgraph_index(int32_t then_subgraph_index) { - fbb_.AddElement(IfOptions::VT_THEN_SUBGRAPH_INDEX, then_subgraph_index, 0); +struct UnpackOptionsBuilder { + typedef UnpackOptions Table; + ::flatbuffers::FlatBufferBuilder &fbb_; + ::flatbuffers::uoffset_t start_; + void add_num(int32_t num) { + fbb_.AddElement(UnpackOptions::VT_NUM, num, 0); } - void add_else_subgraph_index(int32_t else_subgraph_index) { - fbb_.AddElement(IfOptions::VT_ELSE_SUBGRAPH_INDEX, else_subgraph_index, 0); + void add_axis(int32_t axis) { + fbb_.AddElement(UnpackOptions::VT_AXIS, axis, 0); } - explicit IfOptionsBuilder(flatbuffers::FlatBufferBuilder &_fbb) + explicit UnpackOptionsBuilder(::flatbuffers::FlatBufferBuilder &_fbb) : fbb_(_fbb) { start_ = fbb_.StartTable(); } - flatbuffers::Offset Finish() { + ::flatbuffers::Offset Finish() { const auto end = fbb_.EndTable(start_); - auto o = flatbuffers::Offset(end); + auto o = ::flatbuffers::Offset(end); return o; } }; -inline flatbuffers::Offset CreateIfOptions( - flatbuffers::FlatBufferBuilder &_fbb, - int32_t then_subgraph_index = 0, - int32_t else_subgraph_index = 0) { - IfOptionsBuilder builder_(_fbb); - builder_.add_else_subgraph_index(else_subgraph_index); - builder_.add_then_subgraph_index(then_subgraph_index); +inline ::flatbuffers::Offset CreateUnpackOptions( + ::flatbuffers::FlatBufferBuilder &_fbb, + int32_t num = 0, + int32_t axis = 0) { + UnpackOptionsBuilder builder_(_fbb); + builder_.add_axis(axis); + builder_.add_num(num); return builder_.Finish(); } -flatbuffers::Offset CreateIfOptions(flatbuffers::FlatBufferBuilder &_fbb, const IfOptionsT *_o, const flatbuffers::rehasher_function_t *_rehasher = nullptr); +::flatbuffers::Offset CreateUnpackOptions(::flatbuffers::FlatBufferBuilder &_fbb, const UnpackOptionsT *_o, const ::flatbuffers::rehasher_function_t *_rehasher = nullptr); -struct CallOnceOptionsT : public flatbuffers::NativeTable { - typedef CallOnceOptions TableType; - int32_t init_subgraph_index = 0; +struct FloorDivOptionsT : public ::flatbuffers::NativeTable { + typedef FloorDivOptions TableType; }; -struct CallOnceOptions FLATBUFFERS_FINAL_CLASS : private flatbuffers::Table { - typedef CallOnceOptionsT NativeTableType; - typedef CallOnceOptionsBuilder Builder; - enum FlatBuffersVTableOffset FLATBUFFERS_VTABLE_UNDERLYING_TYPE { - VT_INIT_SUBGRAPH_INDEX = 4 - }; - int32_t init_subgraph_index() const { - return GetField(VT_INIT_SUBGRAPH_INDEX, 0); - } - bool Verify(flatbuffers::Verifier &verifier) const { +struct FloorDivOptions FLATBUFFERS_FINAL_CLASS : private ::flatbuffers::Table { + typedef FloorDivOptionsT NativeTableType; + typedef FloorDivOptionsBuilder Builder; + bool Verify(::flatbuffers::Verifier &verifier) const { return VerifyTableStart(verifier) && - VerifyField(verifier, VT_INIT_SUBGRAPH_INDEX, 4) && verifier.EndTable(); } - CallOnceOptionsT *UnPack(const flatbuffers::resolver_function_t *_resolver = nullptr) const; - void UnPackTo(CallOnceOptionsT *_o, const flatbuffers::resolver_function_t *_resolver = nullptr) const; - static flatbuffers::Offset Pack(flatbuffers::FlatBufferBuilder &_fbb, const CallOnceOptionsT* _o, const flatbuffers::rehasher_function_t *_rehasher = nullptr); + FloorDivOptionsT *UnPack(const ::flatbuffers::resolver_function_t *_resolver = nullptr) const; + void UnPackTo(FloorDivOptionsT *_o, const ::flatbuffers::resolver_function_t *_resolver = nullptr) const; + static ::flatbuffers::Offset Pack(::flatbuffers::FlatBufferBuilder &_fbb, const FloorDivOptionsT* _o, const ::flatbuffers::rehasher_function_t *_rehasher = nullptr); }; -struct CallOnceOptionsBuilder { - typedef CallOnceOptions Table; - flatbuffers::FlatBufferBuilder &fbb_; - flatbuffers::uoffset_t start_; - void add_init_subgraph_index(int32_t init_subgraph_index) { - fbb_.AddElement(CallOnceOptions::VT_INIT_SUBGRAPH_INDEX, init_subgraph_index, 0); - } - explicit CallOnceOptionsBuilder(flatbuffers::FlatBufferBuilder &_fbb) +struct FloorDivOptionsBuilder { + typedef FloorDivOptions Table; + ::flatbuffers::FlatBufferBuilder &fbb_; + ::flatbuffers::uoffset_t start_; + explicit FloorDivOptionsBuilder(::flatbuffers::FlatBufferBuilder &_fbb) : fbb_(_fbb) { start_ = fbb_.StartTable(); } - flatbuffers::Offset Finish() { + ::flatbuffers::Offset Finish() { const auto end = fbb_.EndTable(start_); - auto o = flatbuffers::Offset(end); + auto o = ::flatbuffers::Offset(end); return o; } }; -inline flatbuffers::Offset CreateCallOnceOptions( - flatbuffers::FlatBufferBuilder &_fbb, - int32_t init_subgraph_index = 0) { - CallOnceOptionsBuilder builder_(_fbb); - builder_.add_init_subgraph_index(init_subgraph_index); +inline ::flatbuffers::Offset CreateFloorDivOptions( + ::flatbuffers::FlatBufferBuilder &_fbb) { + FloorDivOptionsBuilder builder_(_fbb); return builder_.Finish(); } -flatbuffers::Offset CreateCallOnceOptions(flatbuffers::FlatBufferBuilder &_fbb, const CallOnceOptionsT *_o, const flatbuffers::rehasher_function_t *_rehasher = nullptr); +::flatbuffers::Offset CreateFloorDivOptions(::flatbuffers::FlatBufferBuilder &_fbb, const FloorDivOptionsT *_o, const ::flatbuffers::rehasher_function_t *_rehasher = nullptr); -struct WhileOptionsT : public flatbuffers::NativeTable { - typedef WhileOptions TableType; - int32_t cond_subgraph_index = 0; - int32_t body_subgraph_index = 0; +struct SquareOptionsT : public ::flatbuffers::NativeTable { + typedef SquareOptions TableType; }; -struct WhileOptions FLATBUFFERS_FINAL_CLASS : private flatbuffers::Table { - typedef WhileOptionsT NativeTableType; - typedef WhileOptionsBuilder Builder; - enum FlatBuffersVTableOffset FLATBUFFERS_VTABLE_UNDERLYING_TYPE { - VT_COND_SUBGRAPH_INDEX = 4, - VT_BODY_SUBGRAPH_INDEX = 6 - }; - int32_t cond_subgraph_index() const { - return GetField(VT_COND_SUBGRAPH_INDEX, 0); - } - int32_t body_subgraph_index() const { - return GetField(VT_BODY_SUBGRAPH_INDEX, 0); - } - bool Verify(flatbuffers::Verifier &verifier) const { +struct SquareOptions FLATBUFFERS_FINAL_CLASS : private ::flatbuffers::Table { + typedef SquareOptionsT NativeTableType; + typedef SquareOptionsBuilder Builder; + bool Verify(::flatbuffers::Verifier &verifier) const { return VerifyTableStart(verifier) && - VerifyField(verifier, VT_COND_SUBGRAPH_INDEX, 4) && - VerifyField(verifier, VT_BODY_SUBGRAPH_INDEX, 4) && verifier.EndTable(); } - WhileOptionsT *UnPack(const flatbuffers::resolver_function_t *_resolver = nullptr) const; - void UnPackTo(WhileOptionsT *_o, const flatbuffers::resolver_function_t *_resolver = nullptr) const; - static flatbuffers::Offset Pack(flatbuffers::FlatBufferBuilder &_fbb, const WhileOptionsT* _o, const flatbuffers::rehasher_function_t *_rehasher = nullptr); + SquareOptionsT *UnPack(const ::flatbuffers::resolver_function_t *_resolver = nullptr) const; + void UnPackTo(SquareOptionsT *_o, const ::flatbuffers::resolver_function_t *_resolver = nullptr) const; + static ::flatbuffers::Offset Pack(::flatbuffers::FlatBufferBuilder &_fbb, const SquareOptionsT* _o, const ::flatbuffers::rehasher_function_t *_rehasher = nullptr); }; -struct WhileOptionsBuilder { - typedef WhileOptions Table; - flatbuffers::FlatBufferBuilder &fbb_; - flatbuffers::uoffset_t start_; - void add_cond_subgraph_index(int32_t cond_subgraph_index) { - fbb_.AddElement(WhileOptions::VT_COND_SUBGRAPH_INDEX, cond_subgraph_index, 0); - } - void add_body_subgraph_index(int32_t body_subgraph_index) { - fbb_.AddElement(WhileOptions::VT_BODY_SUBGRAPH_INDEX, body_subgraph_index, 0); - } - explicit WhileOptionsBuilder(flatbuffers::FlatBufferBuilder &_fbb) +struct SquareOptionsBuilder { + typedef SquareOptions Table; + ::flatbuffers::FlatBufferBuilder &fbb_; + ::flatbuffers::uoffset_t start_; + explicit SquareOptionsBuilder(::flatbuffers::FlatBufferBuilder &_fbb) : fbb_(_fbb) { start_ = fbb_.StartTable(); } - flatbuffers::Offset Finish() { + ::flatbuffers::Offset Finish() { const auto end = fbb_.EndTable(start_); - auto o = flatbuffers::Offset(end); + auto o = ::flatbuffers::Offset(end); return o; } }; -inline flatbuffers::Offset CreateWhileOptions( - flatbuffers::FlatBufferBuilder &_fbb, - int32_t cond_subgraph_index = 0, - int32_t body_subgraph_index = 0) { - WhileOptionsBuilder builder_(_fbb); - builder_.add_body_subgraph_index(body_subgraph_index); - builder_.add_cond_subgraph_index(cond_subgraph_index); +inline ::flatbuffers::Offset CreateSquareOptions( + ::flatbuffers::FlatBufferBuilder &_fbb) { + SquareOptionsBuilder builder_(_fbb); return builder_.Finish(); } -flatbuffers::Offset CreateWhileOptions(flatbuffers::FlatBufferBuilder &_fbb, const WhileOptionsT *_o, const flatbuffers::rehasher_function_t *_rehasher = nullptr); +::flatbuffers::Offset CreateSquareOptions(::flatbuffers::FlatBufferBuilder &_fbb, const SquareOptionsT *_o, const ::flatbuffers::rehasher_function_t *_rehasher = nullptr); -struct NonMaxSuppressionV4OptionsT : public flatbuffers::NativeTable { - typedef NonMaxSuppressionV4Options TableType; +struct ZerosLikeOptionsT : public ::flatbuffers::NativeTable { + typedef ZerosLikeOptions TableType; }; -struct NonMaxSuppressionV4Options FLATBUFFERS_FINAL_CLASS : private flatbuffers::Table { - typedef NonMaxSuppressionV4OptionsT NativeTableType; - typedef NonMaxSuppressionV4OptionsBuilder Builder; - bool Verify(flatbuffers::Verifier &verifier) const { +struct ZerosLikeOptions FLATBUFFERS_FINAL_CLASS : private ::flatbuffers::Table { + typedef ZerosLikeOptionsT NativeTableType; + typedef ZerosLikeOptionsBuilder Builder; + bool Verify(::flatbuffers::Verifier &verifier) const { return VerifyTableStart(verifier) && verifier.EndTable(); } - NonMaxSuppressionV4OptionsT *UnPack(const flatbuffers::resolver_function_t *_resolver = nullptr) const; - void UnPackTo(NonMaxSuppressionV4OptionsT *_o, const flatbuffers::resolver_function_t *_resolver = nullptr) const; - static flatbuffers::Offset Pack(flatbuffers::FlatBufferBuilder &_fbb, const NonMaxSuppressionV4OptionsT* _o, const flatbuffers::rehasher_function_t *_rehasher = nullptr); + ZerosLikeOptionsT *UnPack(const ::flatbuffers::resolver_function_t *_resolver = nullptr) const; + void UnPackTo(ZerosLikeOptionsT *_o, const ::flatbuffers::resolver_function_t *_resolver = nullptr) const; + static ::flatbuffers::Offset Pack(::flatbuffers::FlatBufferBuilder &_fbb, const ZerosLikeOptionsT* _o, const ::flatbuffers::rehasher_function_t *_rehasher = nullptr); }; -struct NonMaxSuppressionV4OptionsBuilder { - typedef NonMaxSuppressionV4Options Table; - flatbuffers::FlatBufferBuilder &fbb_; - flatbuffers::uoffset_t start_; - explicit NonMaxSuppressionV4OptionsBuilder(flatbuffers::FlatBufferBuilder &_fbb) +struct ZerosLikeOptionsBuilder { + typedef ZerosLikeOptions Table; + ::flatbuffers::FlatBufferBuilder &fbb_; + ::flatbuffers::uoffset_t start_; + explicit ZerosLikeOptionsBuilder(::flatbuffers::FlatBufferBuilder &_fbb) : fbb_(_fbb) { start_ = fbb_.StartTable(); } - flatbuffers::Offset Finish() { + ::flatbuffers::Offset Finish() { const auto end = fbb_.EndTable(start_); - auto o = flatbuffers::Offset(end); + auto o = ::flatbuffers::Offset(end); return o; } }; -inline flatbuffers::Offset CreateNonMaxSuppressionV4Options( - flatbuffers::FlatBufferBuilder &_fbb) { - NonMaxSuppressionV4OptionsBuilder builder_(_fbb); +inline ::flatbuffers::Offset CreateZerosLikeOptions( + ::flatbuffers::FlatBufferBuilder &_fbb) { + ZerosLikeOptionsBuilder builder_(_fbb); return builder_.Finish(); } -flatbuffers::Offset CreateNonMaxSuppressionV4Options(flatbuffers::FlatBufferBuilder &_fbb, const NonMaxSuppressionV4OptionsT *_o, const flatbuffers::rehasher_function_t *_rehasher = nullptr); +::flatbuffers::Offset CreateZerosLikeOptions(::flatbuffers::FlatBufferBuilder &_fbb, const ZerosLikeOptionsT *_o, const ::flatbuffers::rehasher_function_t *_rehasher = nullptr); -struct NonMaxSuppressionV5OptionsT : public flatbuffers::NativeTable { - typedef NonMaxSuppressionV5Options TableType; +struct FillOptionsT : public ::flatbuffers::NativeTable { + typedef FillOptions TableType; }; -struct NonMaxSuppressionV5Options FLATBUFFERS_FINAL_CLASS : private flatbuffers::Table { - typedef NonMaxSuppressionV5OptionsT NativeTableType; - typedef NonMaxSuppressionV5OptionsBuilder Builder; - bool Verify(flatbuffers::Verifier &verifier) const { +struct FillOptions FLATBUFFERS_FINAL_CLASS : private ::flatbuffers::Table { + typedef FillOptionsT NativeTableType; + typedef FillOptionsBuilder Builder; + bool Verify(::flatbuffers::Verifier &verifier) const { return VerifyTableStart(verifier) && verifier.EndTable(); } - NonMaxSuppressionV5OptionsT *UnPack(const flatbuffers::resolver_function_t *_resolver = nullptr) const; - void UnPackTo(NonMaxSuppressionV5OptionsT *_o, const flatbuffers::resolver_function_t *_resolver = nullptr) const; - static flatbuffers::Offset Pack(flatbuffers::FlatBufferBuilder &_fbb, const NonMaxSuppressionV5OptionsT* _o, const flatbuffers::rehasher_function_t *_rehasher = nullptr); + FillOptionsT *UnPack(const ::flatbuffers::resolver_function_t *_resolver = nullptr) const; + void UnPackTo(FillOptionsT *_o, const ::flatbuffers::resolver_function_t *_resolver = nullptr) const; + static ::flatbuffers::Offset Pack(::flatbuffers::FlatBufferBuilder &_fbb, const FillOptionsT* _o, const ::flatbuffers::rehasher_function_t *_rehasher = nullptr); }; -struct NonMaxSuppressionV5OptionsBuilder { - typedef NonMaxSuppressionV5Options Table; - flatbuffers::FlatBufferBuilder &fbb_; - flatbuffers::uoffset_t start_; - explicit NonMaxSuppressionV5OptionsBuilder(flatbuffers::FlatBufferBuilder &_fbb) +struct FillOptionsBuilder { + typedef FillOptions Table; + ::flatbuffers::FlatBufferBuilder &fbb_; + ::flatbuffers::uoffset_t start_; + explicit FillOptionsBuilder(::flatbuffers::FlatBufferBuilder &_fbb) : fbb_(_fbb) { start_ = fbb_.StartTable(); } - flatbuffers::Offset Finish() { + ::flatbuffers::Offset Finish() { const auto end = fbb_.EndTable(start_); - auto o = flatbuffers::Offset(end); + auto o = ::flatbuffers::Offset(end); return o; } }; -inline flatbuffers::Offset CreateNonMaxSuppressionV5Options( - flatbuffers::FlatBufferBuilder &_fbb) { - NonMaxSuppressionV5OptionsBuilder builder_(_fbb); +inline ::flatbuffers::Offset CreateFillOptions( + ::flatbuffers::FlatBufferBuilder &_fbb) { + FillOptionsBuilder builder_(_fbb); return builder_.Finish(); } -flatbuffers::Offset CreateNonMaxSuppressionV5Options(flatbuffers::FlatBufferBuilder &_fbb, const NonMaxSuppressionV5OptionsT *_o, const flatbuffers::rehasher_function_t *_rehasher = nullptr); +::flatbuffers::Offset CreateFillOptions(::flatbuffers::FlatBufferBuilder &_fbb, const FillOptionsT *_o, const ::flatbuffers::rehasher_function_t *_rehasher = nullptr); -struct ScatterNdOptionsT : public flatbuffers::NativeTable { - typedef ScatterNdOptions TableType; +struct FloorModOptionsT : public ::flatbuffers::NativeTable { + typedef FloorModOptions TableType; }; -struct ScatterNdOptions FLATBUFFERS_FINAL_CLASS : private flatbuffers::Table { - typedef ScatterNdOptionsT NativeTableType; - typedef ScatterNdOptionsBuilder Builder; - bool Verify(flatbuffers::Verifier &verifier) const { +struct FloorModOptions FLATBUFFERS_FINAL_CLASS : private ::flatbuffers::Table { + typedef FloorModOptionsT NativeTableType; + typedef FloorModOptionsBuilder Builder; + bool Verify(::flatbuffers::Verifier &verifier) const { return VerifyTableStart(verifier) && verifier.EndTable(); } - ScatterNdOptionsT *UnPack(const flatbuffers::resolver_function_t *_resolver = nullptr) const; - void UnPackTo(ScatterNdOptionsT *_o, const flatbuffers::resolver_function_t *_resolver = nullptr) const; - static flatbuffers::Offset Pack(flatbuffers::FlatBufferBuilder &_fbb, const ScatterNdOptionsT* _o, const flatbuffers::rehasher_function_t *_rehasher = nullptr); + FloorModOptionsT *UnPack(const ::flatbuffers::resolver_function_t *_resolver = nullptr) const; + void UnPackTo(FloorModOptionsT *_o, const ::flatbuffers::resolver_function_t *_resolver = nullptr) const; + static ::flatbuffers::Offset Pack(::flatbuffers::FlatBufferBuilder &_fbb, const FloorModOptionsT* _o, const ::flatbuffers::rehasher_function_t *_rehasher = nullptr); }; -struct ScatterNdOptionsBuilder { - typedef ScatterNdOptions Table; - flatbuffers::FlatBufferBuilder &fbb_; - flatbuffers::uoffset_t start_; - explicit ScatterNdOptionsBuilder(flatbuffers::FlatBufferBuilder &_fbb) +struct FloorModOptionsBuilder { + typedef FloorModOptions Table; + ::flatbuffers::FlatBufferBuilder &fbb_; + ::flatbuffers::uoffset_t start_; + explicit FloorModOptionsBuilder(::flatbuffers::FlatBufferBuilder &_fbb) : fbb_(_fbb) { start_ = fbb_.StartTable(); } - flatbuffers::Offset Finish() { + ::flatbuffers::Offset Finish() { const auto end = fbb_.EndTable(start_); - auto o = flatbuffers::Offset(end); + auto o = ::flatbuffers::Offset(end); return o; } }; -inline flatbuffers::Offset CreateScatterNdOptions( - flatbuffers::FlatBufferBuilder &_fbb) { - ScatterNdOptionsBuilder builder_(_fbb); +inline ::flatbuffers::Offset CreateFloorModOptions( + ::flatbuffers::FlatBufferBuilder &_fbb) { + FloorModOptionsBuilder builder_(_fbb); return builder_.Finish(); } -flatbuffers::Offset CreateScatterNdOptions(flatbuffers::FlatBufferBuilder &_fbb, const ScatterNdOptionsT *_o, const flatbuffers::rehasher_function_t *_rehasher = nullptr); +::flatbuffers::Offset CreateFloorModOptions(::flatbuffers::FlatBufferBuilder &_fbb, const FloorModOptionsT *_o, const ::flatbuffers::rehasher_function_t *_rehasher = nullptr); -struct SelectV2OptionsT : public flatbuffers::NativeTable { - typedef SelectV2Options TableType; +struct RangeOptionsT : public ::flatbuffers::NativeTable { + typedef RangeOptions TableType; }; -struct SelectV2Options FLATBUFFERS_FINAL_CLASS : private flatbuffers::Table { - typedef SelectV2OptionsT NativeTableType; - typedef SelectV2OptionsBuilder Builder; - bool Verify(flatbuffers::Verifier &verifier) const { +struct RangeOptions FLATBUFFERS_FINAL_CLASS : private ::flatbuffers::Table { + typedef RangeOptionsT NativeTableType; + typedef RangeOptionsBuilder Builder; + bool Verify(::flatbuffers::Verifier &verifier) const { return VerifyTableStart(verifier) && verifier.EndTable(); } - SelectV2OptionsT *UnPack(const flatbuffers::resolver_function_t *_resolver = nullptr) const; - void UnPackTo(SelectV2OptionsT *_o, const flatbuffers::resolver_function_t *_resolver = nullptr) const; - static flatbuffers::Offset Pack(flatbuffers::FlatBufferBuilder &_fbb, const SelectV2OptionsT* _o, const flatbuffers::rehasher_function_t *_rehasher = nullptr); + RangeOptionsT *UnPack(const ::flatbuffers::resolver_function_t *_resolver = nullptr) const; + void UnPackTo(RangeOptionsT *_o, const ::flatbuffers::resolver_function_t *_resolver = nullptr) const; + static ::flatbuffers::Offset Pack(::flatbuffers::FlatBufferBuilder &_fbb, const RangeOptionsT* _o, const ::flatbuffers::rehasher_function_t *_rehasher = nullptr); }; -struct SelectV2OptionsBuilder { - typedef SelectV2Options Table; - flatbuffers::FlatBufferBuilder &fbb_; - flatbuffers::uoffset_t start_; - explicit SelectV2OptionsBuilder(flatbuffers::FlatBufferBuilder &_fbb) +struct RangeOptionsBuilder { + typedef RangeOptions Table; + ::flatbuffers::FlatBufferBuilder &fbb_; + ::flatbuffers::uoffset_t start_; + explicit RangeOptionsBuilder(::flatbuffers::FlatBufferBuilder &_fbb) : fbb_(_fbb) { start_ = fbb_.StartTable(); } - flatbuffers::Offset Finish() { + ::flatbuffers::Offset Finish() { const auto end = fbb_.EndTable(start_); - auto o = flatbuffers::Offset(end); + auto o = ::flatbuffers::Offset(end); return o; } }; -inline flatbuffers::Offset CreateSelectV2Options( - flatbuffers::FlatBufferBuilder &_fbb) { - SelectV2OptionsBuilder builder_(_fbb); +inline ::flatbuffers::Offset CreateRangeOptions( + ::flatbuffers::FlatBufferBuilder &_fbb) { + RangeOptionsBuilder builder_(_fbb); return builder_.Finish(); } -flatbuffers::Offset CreateSelectV2Options(flatbuffers::FlatBufferBuilder &_fbb, const SelectV2OptionsT *_o, const flatbuffers::rehasher_function_t *_rehasher = nullptr); +::flatbuffers::Offset CreateRangeOptions(::flatbuffers::FlatBufferBuilder &_fbb, const RangeOptionsT *_o, const ::flatbuffers::rehasher_function_t *_rehasher = nullptr); -struct DensifyOptionsT : public flatbuffers::NativeTable { - typedef DensifyOptions TableType; +struct LeakyReluOptionsT : public ::flatbuffers::NativeTable { + typedef LeakyReluOptions TableType; + float alpha = 0.0f; }; -struct DensifyOptions FLATBUFFERS_FINAL_CLASS : private flatbuffers::Table { - typedef DensifyOptionsT NativeTableType; - typedef DensifyOptionsBuilder Builder; - bool Verify(flatbuffers::Verifier &verifier) const { +struct LeakyReluOptions FLATBUFFERS_FINAL_CLASS : private ::flatbuffers::Table { + typedef LeakyReluOptionsT NativeTableType; + typedef LeakyReluOptionsBuilder Builder; + enum FlatBuffersVTableOffset FLATBUFFERS_VTABLE_UNDERLYING_TYPE { + VT_ALPHA = 4 + }; + float alpha() const { + return GetField(VT_ALPHA, 0.0f); + } + bool Verify(::flatbuffers::Verifier &verifier) const { return VerifyTableStart(verifier) && + VerifyField(verifier, VT_ALPHA, 4) && verifier.EndTable(); } - DensifyOptionsT *UnPack(const flatbuffers::resolver_function_t *_resolver = nullptr) const; - void UnPackTo(DensifyOptionsT *_o, const flatbuffers::resolver_function_t *_resolver = nullptr) const; - static flatbuffers::Offset Pack(flatbuffers::FlatBufferBuilder &_fbb, const DensifyOptionsT* _o, const flatbuffers::rehasher_function_t *_rehasher = nullptr); + LeakyReluOptionsT *UnPack(const ::flatbuffers::resolver_function_t *_resolver = nullptr) const; + void UnPackTo(LeakyReluOptionsT *_o, const ::flatbuffers::resolver_function_t *_resolver = nullptr) const; + static ::flatbuffers::Offset Pack(::flatbuffers::FlatBufferBuilder &_fbb, const LeakyReluOptionsT* _o, const ::flatbuffers::rehasher_function_t *_rehasher = nullptr); }; -struct DensifyOptionsBuilder { - typedef DensifyOptions Table; - flatbuffers::FlatBufferBuilder &fbb_; - flatbuffers::uoffset_t start_; - explicit DensifyOptionsBuilder(flatbuffers::FlatBufferBuilder &_fbb) +struct LeakyReluOptionsBuilder { + typedef LeakyReluOptions Table; + ::flatbuffers::FlatBufferBuilder &fbb_; + ::flatbuffers::uoffset_t start_; + void add_alpha(float alpha) { + fbb_.AddElement(LeakyReluOptions::VT_ALPHA, alpha, 0.0f); + } + explicit LeakyReluOptionsBuilder(::flatbuffers::FlatBufferBuilder &_fbb) : fbb_(_fbb) { start_ = fbb_.StartTable(); } - flatbuffers::Offset Finish() { + ::flatbuffers::Offset Finish() { const auto end = fbb_.EndTable(start_); - auto o = flatbuffers::Offset(end); + auto o = ::flatbuffers::Offset(end); return o; } }; -inline flatbuffers::Offset CreateDensifyOptions( - flatbuffers::FlatBufferBuilder &_fbb) { - DensifyOptionsBuilder builder_(_fbb); +inline ::flatbuffers::Offset CreateLeakyReluOptions( + ::flatbuffers::FlatBufferBuilder &_fbb, + float alpha = 0.0f) { + LeakyReluOptionsBuilder builder_(_fbb); + builder_.add_alpha(alpha); return builder_.Finish(); } -flatbuffers::Offset CreateDensifyOptions(flatbuffers::FlatBufferBuilder &_fbb, const DensifyOptionsT *_o, const flatbuffers::rehasher_function_t *_rehasher = nullptr); +::flatbuffers::Offset CreateLeakyReluOptions(::flatbuffers::FlatBufferBuilder &_fbb, const LeakyReluOptionsT *_o, const ::flatbuffers::rehasher_function_t *_rehasher = nullptr); -struct SegmentSumOptionsT : public flatbuffers::NativeTable { - typedef SegmentSumOptions TableType; +struct SquaredDifferenceOptionsT : public ::flatbuffers::NativeTable { + typedef SquaredDifferenceOptions TableType; }; -struct SegmentSumOptions FLATBUFFERS_FINAL_CLASS : private flatbuffers::Table { - typedef SegmentSumOptionsT NativeTableType; - typedef SegmentSumOptionsBuilder Builder; - bool Verify(flatbuffers::Verifier &verifier) const { +struct SquaredDifferenceOptions FLATBUFFERS_FINAL_CLASS : private ::flatbuffers::Table { + typedef SquaredDifferenceOptionsT NativeTableType; + typedef SquaredDifferenceOptionsBuilder Builder; + bool Verify(::flatbuffers::Verifier &verifier) const { return VerifyTableStart(verifier) && verifier.EndTable(); } - SegmentSumOptionsT *UnPack(const flatbuffers::resolver_function_t *_resolver = nullptr) const; - void UnPackTo(SegmentSumOptionsT *_o, const flatbuffers::resolver_function_t *_resolver = nullptr) const; - static flatbuffers::Offset Pack(flatbuffers::FlatBufferBuilder &_fbb, const SegmentSumOptionsT* _o, const flatbuffers::rehasher_function_t *_rehasher = nullptr); + SquaredDifferenceOptionsT *UnPack(const ::flatbuffers::resolver_function_t *_resolver = nullptr) const; + void UnPackTo(SquaredDifferenceOptionsT *_o, const ::flatbuffers::resolver_function_t *_resolver = nullptr) const; + static ::flatbuffers::Offset Pack(::flatbuffers::FlatBufferBuilder &_fbb, const SquaredDifferenceOptionsT* _o, const ::flatbuffers::rehasher_function_t *_rehasher = nullptr); }; -struct SegmentSumOptionsBuilder { - typedef SegmentSumOptions Table; - flatbuffers::FlatBufferBuilder &fbb_; - flatbuffers::uoffset_t start_; - explicit SegmentSumOptionsBuilder(flatbuffers::FlatBufferBuilder &_fbb) +struct SquaredDifferenceOptionsBuilder { + typedef SquaredDifferenceOptions Table; + ::flatbuffers::FlatBufferBuilder &fbb_; + ::flatbuffers::uoffset_t start_; + explicit SquaredDifferenceOptionsBuilder(::flatbuffers::FlatBufferBuilder &_fbb) : fbb_(_fbb) { start_ = fbb_.StartTable(); } - flatbuffers::Offset Finish() { + ::flatbuffers::Offset Finish() { const auto end = fbb_.EndTable(start_); - auto o = flatbuffers::Offset(end); + auto o = ::flatbuffers::Offset(end); return o; } }; -inline flatbuffers::Offset CreateSegmentSumOptions( - flatbuffers::FlatBufferBuilder &_fbb) { - SegmentSumOptionsBuilder builder_(_fbb); +inline ::flatbuffers::Offset CreateSquaredDifferenceOptions( + ::flatbuffers::FlatBufferBuilder &_fbb) { + SquaredDifferenceOptionsBuilder builder_(_fbb); return builder_.Finish(); } -flatbuffers::Offset CreateSegmentSumOptions(flatbuffers::FlatBufferBuilder &_fbb, const SegmentSumOptionsT *_o, const flatbuffers::rehasher_function_t *_rehasher = nullptr); +::flatbuffers::Offset CreateSquaredDifferenceOptions(::flatbuffers::FlatBufferBuilder &_fbb, const SquaredDifferenceOptionsT *_o, const ::flatbuffers::rehasher_function_t *_rehasher = nullptr); -struct BatchMatMulOptionsT : public flatbuffers::NativeTable { - typedef BatchMatMulOptions TableType; - bool adj_x = false; - bool adj_y = false; - bool asymmetric_quantize_inputs = false; +struct MirrorPadOptionsT : public ::flatbuffers::NativeTable { + typedef MirrorPadOptions TableType; + tflite::MirrorPadMode mode = tflite::MirrorPadMode_REFLECT; }; -struct BatchMatMulOptions FLATBUFFERS_FINAL_CLASS : private flatbuffers::Table { - typedef BatchMatMulOptionsT NativeTableType; - typedef BatchMatMulOptionsBuilder Builder; +struct MirrorPadOptions FLATBUFFERS_FINAL_CLASS : private ::flatbuffers::Table { + typedef MirrorPadOptionsT NativeTableType; + typedef MirrorPadOptionsBuilder Builder; enum FlatBuffersVTableOffset FLATBUFFERS_VTABLE_UNDERLYING_TYPE { - VT_ADJ_X = 4, - VT_ADJ_Y = 6, - VT_ASYMMETRIC_QUANTIZE_INPUTS = 8 + VT_MODE = 4 }; - bool adj_x() const { - return GetField(VT_ADJ_X, 0) != 0; - } - bool adj_y() const { - return GetField(VT_ADJ_Y, 0) != 0; - } - bool asymmetric_quantize_inputs() const { - return GetField(VT_ASYMMETRIC_QUANTIZE_INPUTS, 0) != 0; + tflite::MirrorPadMode mode() const { + return static_cast(GetField(VT_MODE, 0)); } - bool Verify(flatbuffers::Verifier &verifier) const { + bool Verify(::flatbuffers::Verifier &verifier) const { return VerifyTableStart(verifier) && - VerifyField(verifier, VT_ADJ_X, 1) && - VerifyField(verifier, VT_ADJ_Y, 1) && - VerifyField(verifier, VT_ASYMMETRIC_QUANTIZE_INPUTS, 1) && + VerifyField(verifier, VT_MODE, 1) && verifier.EndTable(); } - BatchMatMulOptionsT *UnPack(const flatbuffers::resolver_function_t *_resolver = nullptr) const; - void UnPackTo(BatchMatMulOptionsT *_o, const flatbuffers::resolver_function_t *_resolver = nullptr) const; - static flatbuffers::Offset Pack(flatbuffers::FlatBufferBuilder &_fbb, const BatchMatMulOptionsT* _o, const flatbuffers::rehasher_function_t *_rehasher = nullptr); + MirrorPadOptionsT *UnPack(const ::flatbuffers::resolver_function_t *_resolver = nullptr) const; + void UnPackTo(MirrorPadOptionsT *_o, const ::flatbuffers::resolver_function_t *_resolver = nullptr) const; + static ::flatbuffers::Offset Pack(::flatbuffers::FlatBufferBuilder &_fbb, const MirrorPadOptionsT* _o, const ::flatbuffers::rehasher_function_t *_rehasher = nullptr); }; -struct BatchMatMulOptionsBuilder { - typedef BatchMatMulOptions Table; - flatbuffers::FlatBufferBuilder &fbb_; - flatbuffers::uoffset_t start_; - void add_adj_x(bool adj_x) { - fbb_.AddElement(BatchMatMulOptions::VT_ADJ_X, static_cast(adj_x), 0); - } - void add_adj_y(bool adj_y) { - fbb_.AddElement(BatchMatMulOptions::VT_ADJ_Y, static_cast(adj_y), 0); - } - void add_asymmetric_quantize_inputs(bool asymmetric_quantize_inputs) { - fbb_.AddElement(BatchMatMulOptions::VT_ASYMMETRIC_QUANTIZE_INPUTS, static_cast(asymmetric_quantize_inputs), 0); +struct MirrorPadOptionsBuilder { + typedef MirrorPadOptions Table; + ::flatbuffers::FlatBufferBuilder &fbb_; + ::flatbuffers::uoffset_t start_; + void add_mode(tflite::MirrorPadMode mode) { + fbb_.AddElement(MirrorPadOptions::VT_MODE, static_cast(mode), 0); } - explicit BatchMatMulOptionsBuilder(flatbuffers::FlatBufferBuilder &_fbb) + explicit MirrorPadOptionsBuilder(::flatbuffers::FlatBufferBuilder &_fbb) : fbb_(_fbb) { start_ = fbb_.StartTable(); } - flatbuffers::Offset Finish() { + ::flatbuffers::Offset Finish() { const auto end = fbb_.EndTable(start_); - auto o = flatbuffers::Offset(end); + auto o = ::flatbuffers::Offset(end); return o; } }; -inline flatbuffers::Offset CreateBatchMatMulOptions( - flatbuffers::FlatBufferBuilder &_fbb, - bool adj_x = false, - bool adj_y = false, - bool asymmetric_quantize_inputs = false) { - BatchMatMulOptionsBuilder builder_(_fbb); - builder_.add_asymmetric_quantize_inputs(asymmetric_quantize_inputs); - builder_.add_adj_y(adj_y); - builder_.add_adj_x(adj_x); +inline ::flatbuffers::Offset CreateMirrorPadOptions( + ::flatbuffers::FlatBufferBuilder &_fbb, + tflite::MirrorPadMode mode = tflite::MirrorPadMode_REFLECT) { + MirrorPadOptionsBuilder builder_(_fbb); + builder_.add_mode(mode); return builder_.Finish(); } -flatbuffers::Offset CreateBatchMatMulOptions(flatbuffers::FlatBufferBuilder &_fbb, const BatchMatMulOptionsT *_o, const flatbuffers::rehasher_function_t *_rehasher = nullptr); +::flatbuffers::Offset CreateMirrorPadOptions(::flatbuffers::FlatBufferBuilder &_fbb, const MirrorPadOptionsT *_o, const ::flatbuffers::rehasher_function_t *_rehasher = nullptr); -struct CumsumOptionsT : public flatbuffers::NativeTable { - typedef CumsumOptions TableType; - bool exclusive = false; - bool reverse = false; +struct UniqueOptionsT : public ::flatbuffers::NativeTable { + typedef UniqueOptions TableType; + tflite::TensorType idx_out_type = tflite::TensorType_INT32; }; -struct CumsumOptions FLATBUFFERS_FINAL_CLASS : private flatbuffers::Table { - typedef CumsumOptionsT NativeTableType; - typedef CumsumOptionsBuilder Builder; +struct UniqueOptions FLATBUFFERS_FINAL_CLASS : private ::flatbuffers::Table { + typedef UniqueOptionsT NativeTableType; + typedef UniqueOptionsBuilder Builder; enum FlatBuffersVTableOffset FLATBUFFERS_VTABLE_UNDERLYING_TYPE { - VT_EXCLUSIVE = 4, - VT_REVERSE = 6 + VT_IDX_OUT_TYPE = 4 }; - bool exclusive() const { - return GetField(VT_EXCLUSIVE, 0) != 0; - } - bool reverse() const { - return GetField(VT_REVERSE, 0) != 0; + tflite::TensorType idx_out_type() const { + return static_cast(GetField(VT_IDX_OUT_TYPE, 2)); } - bool Verify(flatbuffers::Verifier &verifier) const { + bool Verify(::flatbuffers::Verifier &verifier) const { return VerifyTableStart(verifier) && - VerifyField(verifier, VT_EXCLUSIVE, 1) && - VerifyField(verifier, VT_REVERSE, 1) && + VerifyField(verifier, VT_IDX_OUT_TYPE, 1) && verifier.EndTable(); } - CumsumOptionsT *UnPack(const flatbuffers::resolver_function_t *_resolver = nullptr) const; - void UnPackTo(CumsumOptionsT *_o, const flatbuffers::resolver_function_t *_resolver = nullptr) const; - static flatbuffers::Offset Pack(flatbuffers::FlatBufferBuilder &_fbb, const CumsumOptionsT* _o, const flatbuffers::rehasher_function_t *_rehasher = nullptr); + UniqueOptionsT *UnPack(const ::flatbuffers::resolver_function_t *_resolver = nullptr) const; + void UnPackTo(UniqueOptionsT *_o, const ::flatbuffers::resolver_function_t *_resolver = nullptr) const; + static ::flatbuffers::Offset Pack(::flatbuffers::FlatBufferBuilder &_fbb, const UniqueOptionsT* _o, const ::flatbuffers::rehasher_function_t *_rehasher = nullptr); }; -struct CumsumOptionsBuilder { - typedef CumsumOptions Table; - flatbuffers::FlatBufferBuilder &fbb_; - flatbuffers::uoffset_t start_; - void add_exclusive(bool exclusive) { - fbb_.AddElement(CumsumOptions::VT_EXCLUSIVE, static_cast(exclusive), 0); - } - void add_reverse(bool reverse) { - fbb_.AddElement(CumsumOptions::VT_REVERSE, static_cast(reverse), 0); +struct UniqueOptionsBuilder { + typedef UniqueOptions Table; + ::flatbuffers::FlatBufferBuilder &fbb_; + ::flatbuffers::uoffset_t start_; + void add_idx_out_type(tflite::TensorType idx_out_type) { + fbb_.AddElement(UniqueOptions::VT_IDX_OUT_TYPE, static_cast(idx_out_type), 2); } - explicit CumsumOptionsBuilder(flatbuffers::FlatBufferBuilder &_fbb) + explicit UniqueOptionsBuilder(::flatbuffers::FlatBufferBuilder &_fbb) : fbb_(_fbb) { start_ = fbb_.StartTable(); } - flatbuffers::Offset Finish() { + ::flatbuffers::Offset Finish() { const auto end = fbb_.EndTable(start_); - auto o = flatbuffers::Offset(end); + auto o = ::flatbuffers::Offset(end); return o; } }; -inline flatbuffers::Offset CreateCumsumOptions( - flatbuffers::FlatBufferBuilder &_fbb, - bool exclusive = false, - bool reverse = false) { - CumsumOptionsBuilder builder_(_fbb); - builder_.add_reverse(reverse); - builder_.add_exclusive(exclusive); +inline ::flatbuffers::Offset CreateUniqueOptions( + ::flatbuffers::FlatBufferBuilder &_fbb, + tflite::TensorType idx_out_type = tflite::TensorType_INT32) { + UniqueOptionsBuilder builder_(_fbb); + builder_.add_idx_out_type(idx_out_type); return builder_.Finish(); } -flatbuffers::Offset CreateCumsumOptions(flatbuffers::FlatBufferBuilder &_fbb, const CumsumOptionsT *_o, const flatbuffers::rehasher_function_t *_rehasher = nullptr); +::flatbuffers::Offset CreateUniqueOptions(::flatbuffers::FlatBufferBuilder &_fbb, const UniqueOptionsT *_o, const ::flatbuffers::rehasher_function_t *_rehasher = nullptr); -struct BroadcastToOptionsT : public flatbuffers::NativeTable { - typedef BroadcastToOptions TableType; +struct ReverseV2OptionsT : public ::flatbuffers::NativeTable { + typedef ReverseV2Options TableType; }; -struct BroadcastToOptions FLATBUFFERS_FINAL_CLASS : private flatbuffers::Table { - typedef BroadcastToOptionsT NativeTableType; - typedef BroadcastToOptionsBuilder Builder; - bool Verify(flatbuffers::Verifier &verifier) const { +struct ReverseV2Options FLATBUFFERS_FINAL_CLASS : private ::flatbuffers::Table { + typedef ReverseV2OptionsT NativeTableType; + typedef ReverseV2OptionsBuilder Builder; + bool Verify(::flatbuffers::Verifier &verifier) const { return VerifyTableStart(verifier) && verifier.EndTable(); } - BroadcastToOptionsT *UnPack(const flatbuffers::resolver_function_t *_resolver = nullptr) const; - void UnPackTo(BroadcastToOptionsT *_o, const flatbuffers::resolver_function_t *_resolver = nullptr) const; - static flatbuffers::Offset Pack(flatbuffers::FlatBufferBuilder &_fbb, const BroadcastToOptionsT* _o, const flatbuffers::rehasher_function_t *_rehasher = nullptr); + ReverseV2OptionsT *UnPack(const ::flatbuffers::resolver_function_t *_resolver = nullptr) const; + void UnPackTo(ReverseV2OptionsT *_o, const ::flatbuffers::resolver_function_t *_resolver = nullptr) const; + static ::flatbuffers::Offset Pack(::flatbuffers::FlatBufferBuilder &_fbb, const ReverseV2OptionsT* _o, const ::flatbuffers::rehasher_function_t *_rehasher = nullptr); }; -struct BroadcastToOptionsBuilder { - typedef BroadcastToOptions Table; - flatbuffers::FlatBufferBuilder &fbb_; - flatbuffers::uoffset_t start_; - explicit BroadcastToOptionsBuilder(flatbuffers::FlatBufferBuilder &_fbb) +struct ReverseV2OptionsBuilder { + typedef ReverseV2Options Table; + ::flatbuffers::FlatBufferBuilder &fbb_; + ::flatbuffers::uoffset_t start_; + explicit ReverseV2OptionsBuilder(::flatbuffers::FlatBufferBuilder &_fbb) : fbb_(_fbb) { start_ = fbb_.StartTable(); } - flatbuffers::Offset Finish() { + ::flatbuffers::Offset Finish() { const auto end = fbb_.EndTable(start_); - auto o = flatbuffers::Offset(end); + auto o = ::flatbuffers::Offset(end); return o; } }; -inline flatbuffers::Offset CreateBroadcastToOptions( - flatbuffers::FlatBufferBuilder &_fbb) { - BroadcastToOptionsBuilder builder_(_fbb); +inline ::flatbuffers::Offset CreateReverseV2Options( + ::flatbuffers::FlatBufferBuilder &_fbb) { + ReverseV2OptionsBuilder builder_(_fbb); return builder_.Finish(); } -flatbuffers::Offset CreateBroadcastToOptions(flatbuffers::FlatBufferBuilder &_fbb, const BroadcastToOptionsT *_o, const flatbuffers::rehasher_function_t *_rehasher = nullptr); +::flatbuffers::Offset CreateReverseV2Options(::flatbuffers::FlatBufferBuilder &_fbb, const ReverseV2OptionsT *_o, const ::flatbuffers::rehasher_function_t *_rehasher = nullptr); -struct Rfft2dOptionsT : public flatbuffers::NativeTable { - typedef Rfft2dOptions TableType; +struct AddNOptionsT : public ::flatbuffers::NativeTable { + typedef AddNOptions TableType; }; -struct Rfft2dOptions FLATBUFFERS_FINAL_CLASS : private flatbuffers::Table { - typedef Rfft2dOptionsT NativeTableType; - typedef Rfft2dOptionsBuilder Builder; - bool Verify(flatbuffers::Verifier &verifier) const { +struct AddNOptions FLATBUFFERS_FINAL_CLASS : private ::flatbuffers::Table { + typedef AddNOptionsT NativeTableType; + typedef AddNOptionsBuilder Builder; + bool Verify(::flatbuffers::Verifier &verifier) const { return VerifyTableStart(verifier) && verifier.EndTable(); } - Rfft2dOptionsT *UnPack(const flatbuffers::resolver_function_t *_resolver = nullptr) const; - void UnPackTo(Rfft2dOptionsT *_o, const flatbuffers::resolver_function_t *_resolver = nullptr) const; - static flatbuffers::Offset Pack(flatbuffers::FlatBufferBuilder &_fbb, const Rfft2dOptionsT* _o, const flatbuffers::rehasher_function_t *_rehasher = nullptr); + AddNOptionsT *UnPack(const ::flatbuffers::resolver_function_t *_resolver = nullptr) const; + void UnPackTo(AddNOptionsT *_o, const ::flatbuffers::resolver_function_t *_resolver = nullptr) const; + static ::flatbuffers::Offset Pack(::flatbuffers::FlatBufferBuilder &_fbb, const AddNOptionsT* _o, const ::flatbuffers::rehasher_function_t *_rehasher = nullptr); }; -struct Rfft2dOptionsBuilder { - typedef Rfft2dOptions Table; - flatbuffers::FlatBufferBuilder &fbb_; - flatbuffers::uoffset_t start_; - explicit Rfft2dOptionsBuilder(flatbuffers::FlatBufferBuilder &_fbb) +struct AddNOptionsBuilder { + typedef AddNOptions Table; + ::flatbuffers::FlatBufferBuilder &fbb_; + ::flatbuffers::uoffset_t start_; + explicit AddNOptionsBuilder(::flatbuffers::FlatBufferBuilder &_fbb) : fbb_(_fbb) { start_ = fbb_.StartTable(); } - flatbuffers::Offset Finish() { + ::flatbuffers::Offset Finish() { const auto end = fbb_.EndTable(start_); - auto o = flatbuffers::Offset(end); + auto o = ::flatbuffers::Offset(end); return o; } }; -inline flatbuffers::Offset CreateRfft2dOptions( - flatbuffers::FlatBufferBuilder &_fbb) { - Rfft2dOptionsBuilder builder_(_fbb); +inline ::flatbuffers::Offset CreateAddNOptions( + ::flatbuffers::FlatBufferBuilder &_fbb) { + AddNOptionsBuilder builder_(_fbb); return builder_.Finish(); } -flatbuffers::Offset CreateRfft2dOptions(flatbuffers::FlatBufferBuilder &_fbb, const Rfft2dOptionsT *_o, const flatbuffers::rehasher_function_t *_rehasher = nullptr); +::flatbuffers::Offset CreateAddNOptions(::flatbuffers::FlatBufferBuilder &_fbb, const AddNOptionsT *_o, const ::flatbuffers::rehasher_function_t *_rehasher = nullptr); -struct HashtableOptionsT : public flatbuffers::NativeTable { - typedef HashtableOptions TableType; - int32_t table_id = 0; - tflite::TensorType key_dtype = tflite::TensorType_FLOAT32; - tflite::TensorType value_dtype = tflite::TensorType_FLOAT32; +struct GatherNdOptionsT : public ::flatbuffers::NativeTable { + typedef GatherNdOptions TableType; }; -struct HashtableOptions FLATBUFFERS_FINAL_CLASS : private flatbuffers::Table { - typedef HashtableOptionsT NativeTableType; - typedef HashtableOptionsBuilder Builder; - enum FlatBuffersVTableOffset FLATBUFFERS_VTABLE_UNDERLYING_TYPE { - VT_TABLE_ID = 4, - VT_KEY_DTYPE = 6, - VT_VALUE_DTYPE = 8 - }; - int32_t table_id() const { - return GetField(VT_TABLE_ID, 0); - } - tflite::TensorType key_dtype() const { - return static_cast(GetField(VT_KEY_DTYPE, 0)); - } - tflite::TensorType value_dtype() const { - return static_cast(GetField(VT_VALUE_DTYPE, 0)); - } - bool Verify(flatbuffers::Verifier &verifier) const { +struct GatherNdOptions FLATBUFFERS_FINAL_CLASS : private ::flatbuffers::Table { + typedef GatherNdOptionsT NativeTableType; + typedef GatherNdOptionsBuilder Builder; + bool Verify(::flatbuffers::Verifier &verifier) const { return VerifyTableStart(verifier) && - VerifyField(verifier, VT_TABLE_ID, 4) && - VerifyField(verifier, VT_KEY_DTYPE, 1) && - VerifyField(verifier, VT_VALUE_DTYPE, 1) && verifier.EndTable(); } - HashtableOptionsT *UnPack(const flatbuffers::resolver_function_t *_resolver = nullptr) const; - void UnPackTo(HashtableOptionsT *_o, const flatbuffers::resolver_function_t *_resolver = nullptr) const; - static flatbuffers::Offset Pack(flatbuffers::FlatBufferBuilder &_fbb, const HashtableOptionsT* _o, const flatbuffers::rehasher_function_t *_rehasher = nullptr); + GatherNdOptionsT *UnPack(const ::flatbuffers::resolver_function_t *_resolver = nullptr) const; + void UnPackTo(GatherNdOptionsT *_o, const ::flatbuffers::resolver_function_t *_resolver = nullptr) const; + static ::flatbuffers::Offset Pack(::flatbuffers::FlatBufferBuilder &_fbb, const GatherNdOptionsT* _o, const ::flatbuffers::rehasher_function_t *_rehasher = nullptr); }; -struct HashtableOptionsBuilder { - typedef HashtableOptions Table; - flatbuffers::FlatBufferBuilder &fbb_; - flatbuffers::uoffset_t start_; - void add_table_id(int32_t table_id) { - fbb_.AddElement(HashtableOptions::VT_TABLE_ID, table_id, 0); - } - void add_key_dtype(tflite::TensorType key_dtype) { - fbb_.AddElement(HashtableOptions::VT_KEY_DTYPE, static_cast(key_dtype), 0); - } - void add_value_dtype(tflite::TensorType value_dtype) { - fbb_.AddElement(HashtableOptions::VT_VALUE_DTYPE, static_cast(value_dtype), 0); - } - explicit HashtableOptionsBuilder(flatbuffers::FlatBufferBuilder &_fbb) +struct GatherNdOptionsBuilder { + typedef GatherNdOptions Table; + ::flatbuffers::FlatBufferBuilder &fbb_; + ::flatbuffers::uoffset_t start_; + explicit GatherNdOptionsBuilder(::flatbuffers::FlatBufferBuilder &_fbb) : fbb_(_fbb) { start_ = fbb_.StartTable(); } - flatbuffers::Offset Finish() { + ::flatbuffers::Offset Finish() { const auto end = fbb_.EndTable(start_); - auto o = flatbuffers::Offset(end); + auto o = ::flatbuffers::Offset(end); return o; } }; -inline flatbuffers::Offset CreateHashtableOptions( - flatbuffers::FlatBufferBuilder &_fbb, - int32_t table_id = 0, - tflite::TensorType key_dtype = tflite::TensorType_FLOAT32, - tflite::TensorType value_dtype = tflite::TensorType_FLOAT32) { - HashtableOptionsBuilder builder_(_fbb); - builder_.add_table_id(table_id); - builder_.add_value_dtype(value_dtype); - builder_.add_key_dtype(key_dtype); +inline ::flatbuffers::Offset CreateGatherNdOptions( + ::flatbuffers::FlatBufferBuilder &_fbb) { + GatherNdOptionsBuilder builder_(_fbb); return builder_.Finish(); } -flatbuffers::Offset CreateHashtableOptions(flatbuffers::FlatBufferBuilder &_fbb, const HashtableOptionsT *_o, const flatbuffers::rehasher_function_t *_rehasher = nullptr); +::flatbuffers::Offset CreateGatherNdOptions(::flatbuffers::FlatBufferBuilder &_fbb, const GatherNdOptionsT *_o, const ::flatbuffers::rehasher_function_t *_rehasher = nullptr); -struct HashtableFindOptionsT : public flatbuffers::NativeTable { - typedef HashtableFindOptions TableType; +struct WhereOptionsT : public ::flatbuffers::NativeTable { + typedef WhereOptions TableType; }; -struct HashtableFindOptions FLATBUFFERS_FINAL_CLASS : private flatbuffers::Table { - typedef HashtableFindOptionsT NativeTableType; - typedef HashtableFindOptionsBuilder Builder; - bool Verify(flatbuffers::Verifier &verifier) const { +struct WhereOptions FLATBUFFERS_FINAL_CLASS : private ::flatbuffers::Table { + typedef WhereOptionsT NativeTableType; + typedef WhereOptionsBuilder Builder; + bool Verify(::flatbuffers::Verifier &verifier) const { return VerifyTableStart(verifier) && verifier.EndTable(); } - HashtableFindOptionsT *UnPack(const flatbuffers::resolver_function_t *_resolver = nullptr) const; - void UnPackTo(HashtableFindOptionsT *_o, const flatbuffers::resolver_function_t *_resolver = nullptr) const; - static flatbuffers::Offset Pack(flatbuffers::FlatBufferBuilder &_fbb, const HashtableFindOptionsT* _o, const flatbuffers::rehasher_function_t *_rehasher = nullptr); + WhereOptionsT *UnPack(const ::flatbuffers::resolver_function_t *_resolver = nullptr) const; + void UnPackTo(WhereOptionsT *_o, const ::flatbuffers::resolver_function_t *_resolver = nullptr) const; + static ::flatbuffers::Offset Pack(::flatbuffers::FlatBufferBuilder &_fbb, const WhereOptionsT* _o, const ::flatbuffers::rehasher_function_t *_rehasher = nullptr); }; -struct HashtableFindOptionsBuilder { - typedef HashtableFindOptions Table; - flatbuffers::FlatBufferBuilder &fbb_; - flatbuffers::uoffset_t start_; - explicit HashtableFindOptionsBuilder(flatbuffers::FlatBufferBuilder &_fbb) +struct WhereOptionsBuilder { + typedef WhereOptions Table; + ::flatbuffers::FlatBufferBuilder &fbb_; + ::flatbuffers::uoffset_t start_; + explicit WhereOptionsBuilder(::flatbuffers::FlatBufferBuilder &_fbb) : fbb_(_fbb) { start_ = fbb_.StartTable(); } - flatbuffers::Offset Finish() { + ::flatbuffers::Offset Finish() { const auto end = fbb_.EndTable(start_); - auto o = flatbuffers::Offset(end); + auto o = ::flatbuffers::Offset(end); return o; } }; -inline flatbuffers::Offset CreateHashtableFindOptions( - flatbuffers::FlatBufferBuilder &_fbb) { - HashtableFindOptionsBuilder builder_(_fbb); +inline ::flatbuffers::Offset CreateWhereOptions( + ::flatbuffers::FlatBufferBuilder &_fbb) { + WhereOptionsBuilder builder_(_fbb); return builder_.Finish(); } -flatbuffers::Offset CreateHashtableFindOptions(flatbuffers::FlatBufferBuilder &_fbb, const HashtableFindOptionsT *_o, const flatbuffers::rehasher_function_t *_rehasher = nullptr); +::flatbuffers::Offset CreateWhereOptions(::flatbuffers::FlatBufferBuilder &_fbb, const WhereOptionsT *_o, const ::flatbuffers::rehasher_function_t *_rehasher = nullptr); -struct HashtableImportOptionsT : public flatbuffers::NativeTable { - typedef HashtableImportOptions TableType; +struct ReverseSequenceOptionsT : public ::flatbuffers::NativeTable { + typedef ReverseSequenceOptions TableType; + int32_t seq_dim = 0; + int32_t batch_dim = 0; }; -struct HashtableImportOptions FLATBUFFERS_FINAL_CLASS : private flatbuffers::Table { - typedef HashtableImportOptionsT NativeTableType; - typedef HashtableImportOptionsBuilder Builder; - bool Verify(flatbuffers::Verifier &verifier) const { +struct ReverseSequenceOptions FLATBUFFERS_FINAL_CLASS : private ::flatbuffers::Table { + typedef ReverseSequenceOptionsT NativeTableType; + typedef ReverseSequenceOptionsBuilder Builder; + enum FlatBuffersVTableOffset FLATBUFFERS_VTABLE_UNDERLYING_TYPE { + VT_SEQ_DIM = 4, + VT_BATCH_DIM = 6 + }; + int32_t seq_dim() const { + return GetField(VT_SEQ_DIM, 0); + } + int32_t batch_dim() const { + return GetField(VT_BATCH_DIM, 0); + } + bool Verify(::flatbuffers::Verifier &verifier) const { return VerifyTableStart(verifier) && + VerifyField(verifier, VT_SEQ_DIM, 4) && + VerifyField(verifier, VT_BATCH_DIM, 4) && verifier.EndTable(); } - HashtableImportOptionsT *UnPack(const flatbuffers::resolver_function_t *_resolver = nullptr) const; - void UnPackTo(HashtableImportOptionsT *_o, const flatbuffers::resolver_function_t *_resolver = nullptr) const; - static flatbuffers::Offset Pack(flatbuffers::FlatBufferBuilder &_fbb, const HashtableImportOptionsT* _o, const flatbuffers::rehasher_function_t *_rehasher = nullptr); + ReverseSequenceOptionsT *UnPack(const ::flatbuffers::resolver_function_t *_resolver = nullptr) const; + void UnPackTo(ReverseSequenceOptionsT *_o, const ::flatbuffers::resolver_function_t *_resolver = nullptr) const; + static ::flatbuffers::Offset Pack(::flatbuffers::FlatBufferBuilder &_fbb, const ReverseSequenceOptionsT* _o, const ::flatbuffers::rehasher_function_t *_rehasher = nullptr); }; -struct HashtableImportOptionsBuilder { - typedef HashtableImportOptions Table; - flatbuffers::FlatBufferBuilder &fbb_; - flatbuffers::uoffset_t start_; - explicit HashtableImportOptionsBuilder(flatbuffers::FlatBufferBuilder &_fbb) +struct ReverseSequenceOptionsBuilder { + typedef ReverseSequenceOptions Table; + ::flatbuffers::FlatBufferBuilder &fbb_; + ::flatbuffers::uoffset_t start_; + void add_seq_dim(int32_t seq_dim) { + fbb_.AddElement(ReverseSequenceOptions::VT_SEQ_DIM, seq_dim, 0); + } + void add_batch_dim(int32_t batch_dim) { + fbb_.AddElement(ReverseSequenceOptions::VT_BATCH_DIM, batch_dim, 0); + } + explicit ReverseSequenceOptionsBuilder(::flatbuffers::FlatBufferBuilder &_fbb) : fbb_(_fbb) { start_ = fbb_.StartTable(); } - flatbuffers::Offset Finish() { + ::flatbuffers::Offset Finish() { const auto end = fbb_.EndTable(start_); - auto o = flatbuffers::Offset(end); + auto o = ::flatbuffers::Offset(end); return o; } }; -inline flatbuffers::Offset CreateHashtableImportOptions( - flatbuffers::FlatBufferBuilder &_fbb) { - HashtableImportOptionsBuilder builder_(_fbb); +inline ::flatbuffers::Offset CreateReverseSequenceOptions( + ::flatbuffers::FlatBufferBuilder &_fbb, + int32_t seq_dim = 0, + int32_t batch_dim = 0) { + ReverseSequenceOptionsBuilder builder_(_fbb); + builder_.add_batch_dim(batch_dim); + builder_.add_seq_dim(seq_dim); return builder_.Finish(); } -flatbuffers::Offset CreateHashtableImportOptions(flatbuffers::FlatBufferBuilder &_fbb, const HashtableImportOptionsT *_o, const flatbuffers::rehasher_function_t *_rehasher = nullptr); +::flatbuffers::Offset CreateReverseSequenceOptions(::flatbuffers::FlatBufferBuilder &_fbb, const ReverseSequenceOptionsT *_o, const ::flatbuffers::rehasher_function_t *_rehasher = nullptr); -struct HashtableSizeOptionsT : public flatbuffers::NativeTable { - typedef HashtableSizeOptions TableType; +struct MatrixDiagOptionsT : public ::flatbuffers::NativeTable { + typedef MatrixDiagOptions TableType; }; -struct HashtableSizeOptions FLATBUFFERS_FINAL_CLASS : private flatbuffers::Table { - typedef HashtableSizeOptionsT NativeTableType; - typedef HashtableSizeOptionsBuilder Builder; - bool Verify(flatbuffers::Verifier &verifier) const { +struct MatrixDiagOptions FLATBUFFERS_FINAL_CLASS : private ::flatbuffers::Table { + typedef MatrixDiagOptionsT NativeTableType; + typedef MatrixDiagOptionsBuilder Builder; + bool Verify(::flatbuffers::Verifier &verifier) const { return VerifyTableStart(verifier) && verifier.EndTable(); } - HashtableSizeOptionsT *UnPack(const flatbuffers::resolver_function_t *_resolver = nullptr) const; - void UnPackTo(HashtableSizeOptionsT *_o, const flatbuffers::resolver_function_t *_resolver = nullptr) const; - static flatbuffers::Offset Pack(flatbuffers::FlatBufferBuilder &_fbb, const HashtableSizeOptionsT* _o, const flatbuffers::rehasher_function_t *_rehasher = nullptr); + MatrixDiagOptionsT *UnPack(const ::flatbuffers::resolver_function_t *_resolver = nullptr) const; + void UnPackTo(MatrixDiagOptionsT *_o, const ::flatbuffers::resolver_function_t *_resolver = nullptr) const; + static ::flatbuffers::Offset Pack(::flatbuffers::FlatBufferBuilder &_fbb, const MatrixDiagOptionsT* _o, const ::flatbuffers::rehasher_function_t *_rehasher = nullptr); }; -struct HashtableSizeOptionsBuilder { - typedef HashtableSizeOptions Table; - flatbuffers::FlatBufferBuilder &fbb_; - flatbuffers::uoffset_t start_; - explicit HashtableSizeOptionsBuilder(flatbuffers::FlatBufferBuilder &_fbb) +struct MatrixDiagOptionsBuilder { + typedef MatrixDiagOptions Table; + ::flatbuffers::FlatBufferBuilder &fbb_; + ::flatbuffers::uoffset_t start_; + explicit MatrixDiagOptionsBuilder(::flatbuffers::FlatBufferBuilder &_fbb) : fbb_(_fbb) { start_ = fbb_.StartTable(); } - flatbuffers::Offset Finish() { + ::flatbuffers::Offset Finish() { const auto end = fbb_.EndTable(start_); - auto o = flatbuffers::Offset(end); + auto o = ::flatbuffers::Offset(end); return o; } }; -inline flatbuffers::Offset CreateHashtableSizeOptions( - flatbuffers::FlatBufferBuilder &_fbb) { - HashtableSizeOptionsBuilder builder_(_fbb); +inline ::flatbuffers::Offset CreateMatrixDiagOptions( + ::flatbuffers::FlatBufferBuilder &_fbb) { + MatrixDiagOptionsBuilder builder_(_fbb); return builder_.Finish(); } -flatbuffers::Offset CreateHashtableSizeOptions(flatbuffers::FlatBufferBuilder &_fbb, const HashtableSizeOptionsT *_o, const flatbuffers::rehasher_function_t *_rehasher = nullptr); +::flatbuffers::Offset CreateMatrixDiagOptions(::flatbuffers::FlatBufferBuilder &_fbb, const MatrixDiagOptionsT *_o, const ::flatbuffers::rehasher_function_t *_rehasher = nullptr); -struct VarHandleOptionsT : public flatbuffers::NativeTable { - typedef VarHandleOptions TableType; - std::string container{}; - std::string shared_name{}; +struct QuantizeOptionsT : public ::flatbuffers::NativeTable { + typedef QuantizeOptions TableType; }; -struct VarHandleOptions FLATBUFFERS_FINAL_CLASS : private flatbuffers::Table { - typedef VarHandleOptionsT NativeTableType; - typedef VarHandleOptionsBuilder Builder; - enum FlatBuffersVTableOffset FLATBUFFERS_VTABLE_UNDERLYING_TYPE { - VT_CONTAINER = 4, - VT_SHARED_NAME = 6 - }; - const flatbuffers::String *container() const { - return GetPointer(VT_CONTAINER); - } - const flatbuffers::String *shared_name() const { - return GetPointer(VT_SHARED_NAME); - } - bool Verify(flatbuffers::Verifier &verifier) const { +struct QuantizeOptions FLATBUFFERS_FINAL_CLASS : private ::flatbuffers::Table { + typedef QuantizeOptionsT NativeTableType; + typedef QuantizeOptionsBuilder Builder; + bool Verify(::flatbuffers::Verifier &verifier) const { return VerifyTableStart(verifier) && - VerifyOffset(verifier, VT_CONTAINER) && - verifier.VerifyString(container()) && - VerifyOffset(verifier, VT_SHARED_NAME) && - verifier.VerifyString(shared_name()) && verifier.EndTable(); } - VarHandleOptionsT *UnPack(const flatbuffers::resolver_function_t *_resolver = nullptr) const; - void UnPackTo(VarHandleOptionsT *_o, const flatbuffers::resolver_function_t *_resolver = nullptr) const; - static flatbuffers::Offset Pack(flatbuffers::FlatBufferBuilder &_fbb, const VarHandleOptionsT* _o, const flatbuffers::rehasher_function_t *_rehasher = nullptr); + QuantizeOptionsT *UnPack(const ::flatbuffers::resolver_function_t *_resolver = nullptr) const; + void UnPackTo(QuantizeOptionsT *_o, const ::flatbuffers::resolver_function_t *_resolver = nullptr) const; + static ::flatbuffers::Offset Pack(::flatbuffers::FlatBufferBuilder &_fbb, const QuantizeOptionsT* _o, const ::flatbuffers::rehasher_function_t *_rehasher = nullptr); }; -struct VarHandleOptionsBuilder { - typedef VarHandleOptions Table; - flatbuffers::FlatBufferBuilder &fbb_; - flatbuffers::uoffset_t start_; - void add_container(flatbuffers::Offset container) { - fbb_.AddOffset(VarHandleOptions::VT_CONTAINER, container); - } - void add_shared_name(flatbuffers::Offset shared_name) { - fbb_.AddOffset(VarHandleOptions::VT_SHARED_NAME, shared_name); - } - explicit VarHandleOptionsBuilder(flatbuffers::FlatBufferBuilder &_fbb) +struct QuantizeOptionsBuilder { + typedef QuantizeOptions Table; + ::flatbuffers::FlatBufferBuilder &fbb_; + ::flatbuffers::uoffset_t start_; + explicit QuantizeOptionsBuilder(::flatbuffers::FlatBufferBuilder &_fbb) : fbb_(_fbb) { start_ = fbb_.StartTable(); } - flatbuffers::Offset Finish() { + ::flatbuffers::Offset Finish() { const auto end = fbb_.EndTable(start_); - auto o = flatbuffers::Offset(end); + auto o = ::flatbuffers::Offset(end); return o; } }; -inline flatbuffers::Offset CreateVarHandleOptions( - flatbuffers::FlatBufferBuilder &_fbb, - flatbuffers::Offset container = 0, - flatbuffers::Offset shared_name = 0) { - VarHandleOptionsBuilder builder_(_fbb); - builder_.add_shared_name(shared_name); - builder_.add_container(container); +inline ::flatbuffers::Offset CreateQuantizeOptions( + ::flatbuffers::FlatBufferBuilder &_fbb) { + QuantizeOptionsBuilder builder_(_fbb); return builder_.Finish(); } -inline flatbuffers::Offset CreateVarHandleOptionsDirect( - flatbuffers::FlatBufferBuilder &_fbb, - const char *container = nullptr, - const char *shared_name = nullptr) { - auto container__ = container ? _fbb.CreateString(container) : 0; - auto shared_name__ = shared_name ? _fbb.CreateString(shared_name) : 0; - return tflite::CreateVarHandleOptions( - _fbb, - container__, - shared_name__); -} - -flatbuffers::Offset CreateVarHandleOptions(flatbuffers::FlatBufferBuilder &_fbb, const VarHandleOptionsT *_o, const flatbuffers::rehasher_function_t *_rehasher = nullptr); +::flatbuffers::Offset CreateQuantizeOptions(::flatbuffers::FlatBufferBuilder &_fbb, const QuantizeOptionsT *_o, const ::flatbuffers::rehasher_function_t *_rehasher = nullptr); -struct ReadVariableOptionsT : public flatbuffers::NativeTable { - typedef ReadVariableOptions TableType; +struct MatrixSetDiagOptionsT : public ::flatbuffers::NativeTable { + typedef MatrixSetDiagOptions TableType; }; -struct ReadVariableOptions FLATBUFFERS_FINAL_CLASS : private flatbuffers::Table { - typedef ReadVariableOptionsT NativeTableType; - typedef ReadVariableOptionsBuilder Builder; - bool Verify(flatbuffers::Verifier &verifier) const { +struct MatrixSetDiagOptions FLATBUFFERS_FINAL_CLASS : private ::flatbuffers::Table { + typedef MatrixSetDiagOptionsT NativeTableType; + typedef MatrixSetDiagOptionsBuilder Builder; + bool Verify(::flatbuffers::Verifier &verifier) const { return VerifyTableStart(verifier) && verifier.EndTable(); } - ReadVariableOptionsT *UnPack(const flatbuffers::resolver_function_t *_resolver = nullptr) const; - void UnPackTo(ReadVariableOptionsT *_o, const flatbuffers::resolver_function_t *_resolver = nullptr) const; - static flatbuffers::Offset Pack(flatbuffers::FlatBufferBuilder &_fbb, const ReadVariableOptionsT* _o, const flatbuffers::rehasher_function_t *_rehasher = nullptr); + MatrixSetDiagOptionsT *UnPack(const ::flatbuffers::resolver_function_t *_resolver = nullptr) const; + void UnPackTo(MatrixSetDiagOptionsT *_o, const ::flatbuffers::resolver_function_t *_resolver = nullptr) const; + static ::flatbuffers::Offset Pack(::flatbuffers::FlatBufferBuilder &_fbb, const MatrixSetDiagOptionsT* _o, const ::flatbuffers::rehasher_function_t *_rehasher = nullptr); }; -struct ReadVariableOptionsBuilder { - typedef ReadVariableOptions Table; - flatbuffers::FlatBufferBuilder &fbb_; - flatbuffers::uoffset_t start_; - explicit ReadVariableOptionsBuilder(flatbuffers::FlatBufferBuilder &_fbb) +struct MatrixSetDiagOptionsBuilder { + typedef MatrixSetDiagOptions Table; + ::flatbuffers::FlatBufferBuilder &fbb_; + ::flatbuffers::uoffset_t start_; + explicit MatrixSetDiagOptionsBuilder(::flatbuffers::FlatBufferBuilder &_fbb) : fbb_(_fbb) { start_ = fbb_.StartTable(); } - flatbuffers::Offset Finish() { + ::flatbuffers::Offset Finish() { const auto end = fbb_.EndTable(start_); - auto o = flatbuffers::Offset(end); + auto o = ::flatbuffers::Offset(end); return o; } }; -inline flatbuffers::Offset CreateReadVariableOptions( - flatbuffers::FlatBufferBuilder &_fbb) { - ReadVariableOptionsBuilder builder_(_fbb); +inline ::flatbuffers::Offset CreateMatrixSetDiagOptions( + ::flatbuffers::FlatBufferBuilder &_fbb) { + MatrixSetDiagOptionsBuilder builder_(_fbb); return builder_.Finish(); } -flatbuffers::Offset CreateReadVariableOptions(flatbuffers::FlatBufferBuilder &_fbb, const ReadVariableOptionsT *_o, const flatbuffers::rehasher_function_t *_rehasher = nullptr); +::flatbuffers::Offset CreateMatrixSetDiagOptions(::flatbuffers::FlatBufferBuilder &_fbb, const MatrixSetDiagOptionsT *_o, const ::flatbuffers::rehasher_function_t *_rehasher = nullptr); -struct AssignVariableOptionsT : public flatbuffers::NativeTable { - typedef AssignVariableOptions TableType; +struct IfOptionsT : public ::flatbuffers::NativeTable { + typedef IfOptions TableType; + int32_t then_subgraph_index = 0; + int32_t else_subgraph_index = 0; }; -struct AssignVariableOptions FLATBUFFERS_FINAL_CLASS : private flatbuffers::Table { - typedef AssignVariableOptionsT NativeTableType; - typedef AssignVariableOptionsBuilder Builder; - bool Verify(flatbuffers::Verifier &verifier) const { +struct IfOptions FLATBUFFERS_FINAL_CLASS : private ::flatbuffers::Table { + typedef IfOptionsT NativeTableType; + typedef IfOptionsBuilder Builder; + enum FlatBuffersVTableOffset FLATBUFFERS_VTABLE_UNDERLYING_TYPE { + VT_THEN_SUBGRAPH_INDEX = 4, + VT_ELSE_SUBGRAPH_INDEX = 6 + }; + int32_t then_subgraph_index() const { + return GetField(VT_THEN_SUBGRAPH_INDEX, 0); + } + int32_t else_subgraph_index() const { + return GetField(VT_ELSE_SUBGRAPH_INDEX, 0); + } + bool Verify(::flatbuffers::Verifier &verifier) const { return VerifyTableStart(verifier) && + VerifyField(verifier, VT_THEN_SUBGRAPH_INDEX, 4) && + VerifyField(verifier, VT_ELSE_SUBGRAPH_INDEX, 4) && verifier.EndTable(); } - AssignVariableOptionsT *UnPack(const flatbuffers::resolver_function_t *_resolver = nullptr) const; - void UnPackTo(AssignVariableOptionsT *_o, const flatbuffers::resolver_function_t *_resolver = nullptr) const; - static flatbuffers::Offset Pack(flatbuffers::FlatBufferBuilder &_fbb, const AssignVariableOptionsT* _o, const flatbuffers::rehasher_function_t *_rehasher = nullptr); + IfOptionsT *UnPack(const ::flatbuffers::resolver_function_t *_resolver = nullptr) const; + void UnPackTo(IfOptionsT *_o, const ::flatbuffers::resolver_function_t *_resolver = nullptr) const; + static ::flatbuffers::Offset Pack(::flatbuffers::FlatBufferBuilder &_fbb, const IfOptionsT* _o, const ::flatbuffers::rehasher_function_t *_rehasher = nullptr); }; -struct AssignVariableOptionsBuilder { - typedef AssignVariableOptions Table; - flatbuffers::FlatBufferBuilder &fbb_; - flatbuffers::uoffset_t start_; - explicit AssignVariableOptionsBuilder(flatbuffers::FlatBufferBuilder &_fbb) +struct IfOptionsBuilder { + typedef IfOptions Table; + ::flatbuffers::FlatBufferBuilder &fbb_; + ::flatbuffers::uoffset_t start_; + void add_then_subgraph_index(int32_t then_subgraph_index) { + fbb_.AddElement(IfOptions::VT_THEN_SUBGRAPH_INDEX, then_subgraph_index, 0); + } + void add_else_subgraph_index(int32_t else_subgraph_index) { + fbb_.AddElement(IfOptions::VT_ELSE_SUBGRAPH_INDEX, else_subgraph_index, 0); + } + explicit IfOptionsBuilder(::flatbuffers::FlatBufferBuilder &_fbb) : fbb_(_fbb) { start_ = fbb_.StartTable(); } - flatbuffers::Offset Finish() { + ::flatbuffers::Offset Finish() { const auto end = fbb_.EndTable(start_); - auto o = flatbuffers::Offset(end); + auto o = ::flatbuffers::Offset(end); return o; } }; -inline flatbuffers::Offset CreateAssignVariableOptions( - flatbuffers::FlatBufferBuilder &_fbb) { - AssignVariableOptionsBuilder builder_(_fbb); +inline ::flatbuffers::Offset CreateIfOptions( + ::flatbuffers::FlatBufferBuilder &_fbb, + int32_t then_subgraph_index = 0, + int32_t else_subgraph_index = 0) { + IfOptionsBuilder builder_(_fbb); + builder_.add_else_subgraph_index(else_subgraph_index); + builder_.add_then_subgraph_index(then_subgraph_index); return builder_.Finish(); } -flatbuffers::Offset CreateAssignVariableOptions(flatbuffers::FlatBufferBuilder &_fbb, const AssignVariableOptionsT *_o, const flatbuffers::rehasher_function_t *_rehasher = nullptr); +::flatbuffers::Offset CreateIfOptions(::flatbuffers::FlatBufferBuilder &_fbb, const IfOptionsT *_o, const ::flatbuffers::rehasher_function_t *_rehasher = nullptr); -struct RandomOptionsT : public flatbuffers::NativeTable { - typedef RandomOptions TableType; - int64_t seed = 0; - int64_t seed2 = 0; +struct CallOnceOptionsT : public ::flatbuffers::NativeTable { + typedef CallOnceOptions TableType; + int32_t init_subgraph_index = 0; }; -struct RandomOptions FLATBUFFERS_FINAL_CLASS : private flatbuffers::Table { - typedef RandomOptionsT NativeTableType; - typedef RandomOptionsBuilder Builder; +struct CallOnceOptions FLATBUFFERS_FINAL_CLASS : private ::flatbuffers::Table { + typedef CallOnceOptionsT NativeTableType; + typedef CallOnceOptionsBuilder Builder; enum FlatBuffersVTableOffset FLATBUFFERS_VTABLE_UNDERLYING_TYPE { - VT_SEED = 4, - VT_SEED2 = 6 + VT_INIT_SUBGRAPH_INDEX = 4 }; - int64_t seed() const { - return GetField(VT_SEED, 0); - } - int64_t seed2() const { - return GetField(VT_SEED2, 0); + int32_t init_subgraph_index() const { + return GetField(VT_INIT_SUBGRAPH_INDEX, 0); } - bool Verify(flatbuffers::Verifier &verifier) const { + bool Verify(::flatbuffers::Verifier &verifier) const { return VerifyTableStart(verifier) && - VerifyField(verifier, VT_SEED, 8) && - VerifyField(verifier, VT_SEED2, 8) && + VerifyField(verifier, VT_INIT_SUBGRAPH_INDEX, 4) && verifier.EndTable(); } - RandomOptionsT *UnPack(const flatbuffers::resolver_function_t *_resolver = nullptr) const; - void UnPackTo(RandomOptionsT *_o, const flatbuffers::resolver_function_t *_resolver = nullptr) const; - static flatbuffers::Offset Pack(flatbuffers::FlatBufferBuilder &_fbb, const RandomOptionsT* _o, const flatbuffers::rehasher_function_t *_rehasher = nullptr); + CallOnceOptionsT *UnPack(const ::flatbuffers::resolver_function_t *_resolver = nullptr) const; + void UnPackTo(CallOnceOptionsT *_o, const ::flatbuffers::resolver_function_t *_resolver = nullptr) const; + static ::flatbuffers::Offset Pack(::flatbuffers::FlatBufferBuilder &_fbb, const CallOnceOptionsT* _o, const ::flatbuffers::rehasher_function_t *_rehasher = nullptr); }; -struct RandomOptionsBuilder { - typedef RandomOptions Table; - flatbuffers::FlatBufferBuilder &fbb_; - flatbuffers::uoffset_t start_; - void add_seed(int64_t seed) { - fbb_.AddElement(RandomOptions::VT_SEED, seed, 0); - } - void add_seed2(int64_t seed2) { - fbb_.AddElement(RandomOptions::VT_SEED2, seed2, 0); +struct CallOnceOptionsBuilder { + typedef CallOnceOptions Table; + ::flatbuffers::FlatBufferBuilder &fbb_; + ::flatbuffers::uoffset_t start_; + void add_init_subgraph_index(int32_t init_subgraph_index) { + fbb_.AddElement(CallOnceOptions::VT_INIT_SUBGRAPH_INDEX, init_subgraph_index, 0); } - explicit RandomOptionsBuilder(flatbuffers::FlatBufferBuilder &_fbb) + explicit CallOnceOptionsBuilder(::flatbuffers::FlatBufferBuilder &_fbb) : fbb_(_fbb) { start_ = fbb_.StartTable(); } - flatbuffers::Offset Finish() { + ::flatbuffers::Offset Finish() { const auto end = fbb_.EndTable(start_); - auto o = flatbuffers::Offset(end); + auto o = ::flatbuffers::Offset(end); return o; } }; -inline flatbuffers::Offset CreateRandomOptions( - flatbuffers::FlatBufferBuilder &_fbb, - int64_t seed = 0, - int64_t seed2 = 0) { - RandomOptionsBuilder builder_(_fbb); - builder_.add_seed2(seed2); - builder_.add_seed(seed); +inline ::flatbuffers::Offset CreateCallOnceOptions( + ::flatbuffers::FlatBufferBuilder &_fbb, + int32_t init_subgraph_index = 0) { + CallOnceOptionsBuilder builder_(_fbb); + builder_.add_init_subgraph_index(init_subgraph_index); return builder_.Finish(); } -flatbuffers::Offset CreateRandomOptions(flatbuffers::FlatBufferBuilder &_fbb, const RandomOptionsT *_o, const flatbuffers::rehasher_function_t *_rehasher = nullptr); +::flatbuffers::Offset CreateCallOnceOptions(::flatbuffers::FlatBufferBuilder &_fbb, const CallOnceOptionsT *_o, const ::flatbuffers::rehasher_function_t *_rehasher = nullptr); -struct BucketizeOptionsT : public flatbuffers::NativeTable { - typedef BucketizeOptions TableType; - std::vector boundaries{}; +struct WhileOptionsT : public ::flatbuffers::NativeTable { + typedef WhileOptions TableType; + int32_t cond_subgraph_index = 0; + int32_t body_subgraph_index = 0; }; -struct BucketizeOptions FLATBUFFERS_FINAL_CLASS : private flatbuffers::Table { - typedef BucketizeOptionsT NativeTableType; - typedef BucketizeOptionsBuilder Builder; +struct WhileOptions FLATBUFFERS_FINAL_CLASS : private ::flatbuffers::Table { + typedef WhileOptionsT NativeTableType; + typedef WhileOptionsBuilder Builder; enum FlatBuffersVTableOffset FLATBUFFERS_VTABLE_UNDERLYING_TYPE { - VT_BOUNDARIES = 4 + VT_COND_SUBGRAPH_INDEX = 4, + VT_BODY_SUBGRAPH_INDEX = 6 }; - const flatbuffers::Vector *boundaries() const { - return GetPointer *>(VT_BOUNDARIES); + int32_t cond_subgraph_index() const { + return GetField(VT_COND_SUBGRAPH_INDEX, 0); + } + int32_t body_subgraph_index() const { + return GetField(VT_BODY_SUBGRAPH_INDEX, 0); } - bool Verify(flatbuffers::Verifier &verifier) const { + bool Verify(::flatbuffers::Verifier &verifier) const { return VerifyTableStart(verifier) && - VerifyOffset(verifier, VT_BOUNDARIES) && - verifier.VerifyVector(boundaries()) && + VerifyField(verifier, VT_COND_SUBGRAPH_INDEX, 4) && + VerifyField(verifier, VT_BODY_SUBGRAPH_INDEX, 4) && verifier.EndTable(); } - BucketizeOptionsT *UnPack(const flatbuffers::resolver_function_t *_resolver = nullptr) const; - void UnPackTo(BucketizeOptionsT *_o, const flatbuffers::resolver_function_t *_resolver = nullptr) const; - static flatbuffers::Offset Pack(flatbuffers::FlatBufferBuilder &_fbb, const BucketizeOptionsT* _o, const flatbuffers::rehasher_function_t *_rehasher = nullptr); + WhileOptionsT *UnPack(const ::flatbuffers::resolver_function_t *_resolver = nullptr) const; + void UnPackTo(WhileOptionsT *_o, const ::flatbuffers::resolver_function_t *_resolver = nullptr) const; + static ::flatbuffers::Offset Pack(::flatbuffers::FlatBufferBuilder &_fbb, const WhileOptionsT* _o, const ::flatbuffers::rehasher_function_t *_rehasher = nullptr); }; -struct BucketizeOptionsBuilder { - typedef BucketizeOptions Table; - flatbuffers::FlatBufferBuilder &fbb_; - flatbuffers::uoffset_t start_; - void add_boundaries(flatbuffers::Offset> boundaries) { - fbb_.AddOffset(BucketizeOptions::VT_BOUNDARIES, boundaries); +struct WhileOptionsBuilder { + typedef WhileOptions Table; + ::flatbuffers::FlatBufferBuilder &fbb_; + ::flatbuffers::uoffset_t start_; + void add_cond_subgraph_index(int32_t cond_subgraph_index) { + fbb_.AddElement(WhileOptions::VT_COND_SUBGRAPH_INDEX, cond_subgraph_index, 0); + } + void add_body_subgraph_index(int32_t body_subgraph_index) { + fbb_.AddElement(WhileOptions::VT_BODY_SUBGRAPH_INDEX, body_subgraph_index, 0); } - explicit BucketizeOptionsBuilder(flatbuffers::FlatBufferBuilder &_fbb) + explicit WhileOptionsBuilder(::flatbuffers::FlatBufferBuilder &_fbb) : fbb_(_fbb) { start_ = fbb_.StartTable(); } - flatbuffers::Offset Finish() { + ::flatbuffers::Offset Finish() { const auto end = fbb_.EndTable(start_); - auto o = flatbuffers::Offset(end); + auto o = ::flatbuffers::Offset(end); return o; } }; -inline flatbuffers::Offset CreateBucketizeOptions( - flatbuffers::FlatBufferBuilder &_fbb, - flatbuffers::Offset> boundaries = 0) { - BucketizeOptionsBuilder builder_(_fbb); - builder_.add_boundaries(boundaries); +inline ::flatbuffers::Offset CreateWhileOptions( + ::flatbuffers::FlatBufferBuilder &_fbb, + int32_t cond_subgraph_index = 0, + int32_t body_subgraph_index = 0) { + WhileOptionsBuilder builder_(_fbb); + builder_.add_body_subgraph_index(body_subgraph_index); + builder_.add_cond_subgraph_index(cond_subgraph_index); return builder_.Finish(); } -inline flatbuffers::Offset CreateBucketizeOptionsDirect( - flatbuffers::FlatBufferBuilder &_fbb, - const std::vector *boundaries = nullptr) { - auto boundaries__ = boundaries ? _fbb.CreateVector(*boundaries) : 0; - return tflite::CreateBucketizeOptions( - _fbb, - boundaries__); -} - -flatbuffers::Offset CreateBucketizeOptions(flatbuffers::FlatBufferBuilder &_fbb, const BucketizeOptionsT *_o, const flatbuffers::rehasher_function_t *_rehasher = nullptr); +::flatbuffers::Offset CreateWhileOptions(::flatbuffers::FlatBufferBuilder &_fbb, const WhileOptionsT *_o, const ::flatbuffers::rehasher_function_t *_rehasher = nullptr); -struct GeluOptionsT : public flatbuffers::NativeTable { - typedef GeluOptions TableType; - bool approximate = false; +struct NonMaxSuppressionV4OptionsT : public ::flatbuffers::NativeTable { + typedef NonMaxSuppressionV4Options TableType; }; -struct GeluOptions FLATBUFFERS_FINAL_CLASS : private flatbuffers::Table { - typedef GeluOptionsT NativeTableType; - typedef GeluOptionsBuilder Builder; - enum FlatBuffersVTableOffset FLATBUFFERS_VTABLE_UNDERLYING_TYPE { - VT_APPROXIMATE = 4 - }; - bool approximate() const { - return GetField(VT_APPROXIMATE, 0) != 0; - } - bool Verify(flatbuffers::Verifier &verifier) const { +struct NonMaxSuppressionV4Options FLATBUFFERS_FINAL_CLASS : private ::flatbuffers::Table { + typedef NonMaxSuppressionV4OptionsT NativeTableType; + typedef NonMaxSuppressionV4OptionsBuilder Builder; + bool Verify(::flatbuffers::Verifier &verifier) const { return VerifyTableStart(verifier) && - VerifyField(verifier, VT_APPROXIMATE, 1) && verifier.EndTable(); } - GeluOptionsT *UnPack(const flatbuffers::resolver_function_t *_resolver = nullptr) const; - void UnPackTo(GeluOptionsT *_o, const flatbuffers::resolver_function_t *_resolver = nullptr) const; - static flatbuffers::Offset Pack(flatbuffers::FlatBufferBuilder &_fbb, const GeluOptionsT* _o, const flatbuffers::rehasher_function_t *_rehasher = nullptr); + NonMaxSuppressionV4OptionsT *UnPack(const ::flatbuffers::resolver_function_t *_resolver = nullptr) const; + void UnPackTo(NonMaxSuppressionV4OptionsT *_o, const ::flatbuffers::resolver_function_t *_resolver = nullptr) const; + static ::flatbuffers::Offset Pack(::flatbuffers::FlatBufferBuilder &_fbb, const NonMaxSuppressionV4OptionsT* _o, const ::flatbuffers::rehasher_function_t *_rehasher = nullptr); }; -struct GeluOptionsBuilder { - typedef GeluOptions Table; - flatbuffers::FlatBufferBuilder &fbb_; - flatbuffers::uoffset_t start_; - void add_approximate(bool approximate) { - fbb_.AddElement(GeluOptions::VT_APPROXIMATE, static_cast(approximate), 0); - } - explicit GeluOptionsBuilder(flatbuffers::FlatBufferBuilder &_fbb) +struct NonMaxSuppressionV4OptionsBuilder { + typedef NonMaxSuppressionV4Options Table; + ::flatbuffers::FlatBufferBuilder &fbb_; + ::flatbuffers::uoffset_t start_; + explicit NonMaxSuppressionV4OptionsBuilder(::flatbuffers::FlatBufferBuilder &_fbb) : fbb_(_fbb) { start_ = fbb_.StartTable(); } - flatbuffers::Offset Finish() { + ::flatbuffers::Offset Finish() { const auto end = fbb_.EndTable(start_); - auto o = flatbuffers::Offset(end); + auto o = ::flatbuffers::Offset(end); return o; } }; -inline flatbuffers::Offset CreateGeluOptions( - flatbuffers::FlatBufferBuilder &_fbb, - bool approximate = false) { - GeluOptionsBuilder builder_(_fbb); - builder_.add_approximate(approximate); +inline ::flatbuffers::Offset CreateNonMaxSuppressionV4Options( + ::flatbuffers::FlatBufferBuilder &_fbb) { + NonMaxSuppressionV4OptionsBuilder builder_(_fbb); return builder_.Finish(); } -flatbuffers::Offset CreateGeluOptions(flatbuffers::FlatBufferBuilder &_fbb, const GeluOptionsT *_o, const flatbuffers::rehasher_function_t *_rehasher = nullptr); +::flatbuffers::Offset CreateNonMaxSuppressionV4Options(::flatbuffers::FlatBufferBuilder &_fbb, const NonMaxSuppressionV4OptionsT *_o, const ::flatbuffers::rehasher_function_t *_rehasher = nullptr); -struct DynamicUpdateSliceOptionsT : public flatbuffers::NativeTable { - typedef DynamicUpdateSliceOptions TableType; +struct NonMaxSuppressionV5OptionsT : public ::flatbuffers::NativeTable { + typedef NonMaxSuppressionV5Options TableType; }; -struct DynamicUpdateSliceOptions FLATBUFFERS_FINAL_CLASS : private flatbuffers::Table { - typedef DynamicUpdateSliceOptionsT NativeTableType; - typedef DynamicUpdateSliceOptionsBuilder Builder; - bool Verify(flatbuffers::Verifier &verifier) const { +struct NonMaxSuppressionV5Options FLATBUFFERS_FINAL_CLASS : private ::flatbuffers::Table { + typedef NonMaxSuppressionV5OptionsT NativeTableType; + typedef NonMaxSuppressionV5OptionsBuilder Builder; + bool Verify(::flatbuffers::Verifier &verifier) const { return VerifyTableStart(verifier) && verifier.EndTable(); } - DynamicUpdateSliceOptionsT *UnPack(const flatbuffers::resolver_function_t *_resolver = nullptr) const; - void UnPackTo(DynamicUpdateSliceOptionsT *_o, const flatbuffers::resolver_function_t *_resolver = nullptr) const; - static flatbuffers::Offset Pack(flatbuffers::FlatBufferBuilder &_fbb, const DynamicUpdateSliceOptionsT* _o, const flatbuffers::rehasher_function_t *_rehasher = nullptr); + NonMaxSuppressionV5OptionsT *UnPack(const ::flatbuffers::resolver_function_t *_resolver = nullptr) const; + void UnPackTo(NonMaxSuppressionV5OptionsT *_o, const ::flatbuffers::resolver_function_t *_resolver = nullptr) const; + static ::flatbuffers::Offset Pack(::flatbuffers::FlatBufferBuilder &_fbb, const NonMaxSuppressionV5OptionsT* _o, const ::flatbuffers::rehasher_function_t *_rehasher = nullptr); }; -struct DynamicUpdateSliceOptionsBuilder { - typedef DynamicUpdateSliceOptions Table; - flatbuffers::FlatBufferBuilder &fbb_; - flatbuffers::uoffset_t start_; - explicit DynamicUpdateSliceOptionsBuilder(flatbuffers::FlatBufferBuilder &_fbb) +struct NonMaxSuppressionV5OptionsBuilder { + typedef NonMaxSuppressionV5Options Table; + ::flatbuffers::FlatBufferBuilder &fbb_; + ::flatbuffers::uoffset_t start_; + explicit NonMaxSuppressionV5OptionsBuilder(::flatbuffers::FlatBufferBuilder &_fbb) : fbb_(_fbb) { start_ = fbb_.StartTable(); } - flatbuffers::Offset Finish() { + ::flatbuffers::Offset Finish() { const auto end = fbb_.EndTable(start_); - auto o = flatbuffers::Offset(end); + auto o = ::flatbuffers::Offset(end); return o; } }; -inline flatbuffers::Offset CreateDynamicUpdateSliceOptions( - flatbuffers::FlatBufferBuilder &_fbb) { - DynamicUpdateSliceOptionsBuilder builder_(_fbb); +inline ::flatbuffers::Offset CreateNonMaxSuppressionV5Options( + ::flatbuffers::FlatBufferBuilder &_fbb) { + NonMaxSuppressionV5OptionsBuilder builder_(_fbb); return builder_.Finish(); } -flatbuffers::Offset CreateDynamicUpdateSliceOptions(flatbuffers::FlatBufferBuilder &_fbb, const DynamicUpdateSliceOptionsT *_o, const flatbuffers::rehasher_function_t *_rehasher = nullptr); +::flatbuffers::Offset CreateNonMaxSuppressionV5Options(::flatbuffers::FlatBufferBuilder &_fbb, const NonMaxSuppressionV5OptionsT *_o, const ::flatbuffers::rehasher_function_t *_rehasher = nullptr); -struct UnsortedSegmentProdOptionsT : public flatbuffers::NativeTable { - typedef UnsortedSegmentProdOptions TableType; +struct ScatterNdOptionsT : public ::flatbuffers::NativeTable { + typedef ScatterNdOptions TableType; }; -struct UnsortedSegmentProdOptions FLATBUFFERS_FINAL_CLASS : private flatbuffers::Table { - typedef UnsortedSegmentProdOptionsT NativeTableType; - typedef UnsortedSegmentProdOptionsBuilder Builder; - bool Verify(flatbuffers::Verifier &verifier) const { +struct ScatterNdOptions FLATBUFFERS_FINAL_CLASS : private ::flatbuffers::Table { + typedef ScatterNdOptionsT NativeTableType; + typedef ScatterNdOptionsBuilder Builder; + bool Verify(::flatbuffers::Verifier &verifier) const { return VerifyTableStart(verifier) && verifier.EndTable(); } - UnsortedSegmentProdOptionsT *UnPack(const flatbuffers::resolver_function_t *_resolver = nullptr) const; - void UnPackTo(UnsortedSegmentProdOptionsT *_o, const flatbuffers::resolver_function_t *_resolver = nullptr) const; - static flatbuffers::Offset Pack(flatbuffers::FlatBufferBuilder &_fbb, const UnsortedSegmentProdOptionsT* _o, const flatbuffers::rehasher_function_t *_rehasher = nullptr); + ScatterNdOptionsT *UnPack(const ::flatbuffers::resolver_function_t *_resolver = nullptr) const; + void UnPackTo(ScatterNdOptionsT *_o, const ::flatbuffers::resolver_function_t *_resolver = nullptr) const; + static ::flatbuffers::Offset Pack(::flatbuffers::FlatBufferBuilder &_fbb, const ScatterNdOptionsT* _o, const ::flatbuffers::rehasher_function_t *_rehasher = nullptr); }; -struct UnsortedSegmentProdOptionsBuilder { - typedef UnsortedSegmentProdOptions Table; - flatbuffers::FlatBufferBuilder &fbb_; - flatbuffers::uoffset_t start_; - explicit UnsortedSegmentProdOptionsBuilder(flatbuffers::FlatBufferBuilder &_fbb) +struct ScatterNdOptionsBuilder { + typedef ScatterNdOptions Table; + ::flatbuffers::FlatBufferBuilder &fbb_; + ::flatbuffers::uoffset_t start_; + explicit ScatterNdOptionsBuilder(::flatbuffers::FlatBufferBuilder &_fbb) : fbb_(_fbb) { start_ = fbb_.StartTable(); } - flatbuffers::Offset Finish() { + ::flatbuffers::Offset Finish() { const auto end = fbb_.EndTable(start_); - auto o = flatbuffers::Offset(end); + auto o = ::flatbuffers::Offset(end); return o; } }; -inline flatbuffers::Offset CreateUnsortedSegmentProdOptions( - flatbuffers::FlatBufferBuilder &_fbb) { - UnsortedSegmentProdOptionsBuilder builder_(_fbb); +inline ::flatbuffers::Offset CreateScatterNdOptions( + ::flatbuffers::FlatBufferBuilder &_fbb) { + ScatterNdOptionsBuilder builder_(_fbb); return builder_.Finish(); } -flatbuffers::Offset CreateUnsortedSegmentProdOptions(flatbuffers::FlatBufferBuilder &_fbb, const UnsortedSegmentProdOptionsT *_o, const flatbuffers::rehasher_function_t *_rehasher = nullptr); +::flatbuffers::Offset CreateScatterNdOptions(::flatbuffers::FlatBufferBuilder &_fbb, const ScatterNdOptionsT *_o, const ::flatbuffers::rehasher_function_t *_rehasher = nullptr); -struct UnsortedSegmentMaxOptionsT : public flatbuffers::NativeTable { - typedef UnsortedSegmentMaxOptions TableType; +struct SelectV2OptionsT : public ::flatbuffers::NativeTable { + typedef SelectV2Options TableType; }; -struct UnsortedSegmentMaxOptions FLATBUFFERS_FINAL_CLASS : private flatbuffers::Table { - typedef UnsortedSegmentMaxOptionsT NativeTableType; - typedef UnsortedSegmentMaxOptionsBuilder Builder; - bool Verify(flatbuffers::Verifier &verifier) const { +struct SelectV2Options FLATBUFFERS_FINAL_CLASS : private ::flatbuffers::Table { + typedef SelectV2OptionsT NativeTableType; + typedef SelectV2OptionsBuilder Builder; + bool Verify(::flatbuffers::Verifier &verifier) const { return VerifyTableStart(verifier) && verifier.EndTable(); } - UnsortedSegmentMaxOptionsT *UnPack(const flatbuffers::resolver_function_t *_resolver = nullptr) const; - void UnPackTo(UnsortedSegmentMaxOptionsT *_o, const flatbuffers::resolver_function_t *_resolver = nullptr) const; - static flatbuffers::Offset Pack(flatbuffers::FlatBufferBuilder &_fbb, const UnsortedSegmentMaxOptionsT* _o, const flatbuffers::rehasher_function_t *_rehasher = nullptr); + SelectV2OptionsT *UnPack(const ::flatbuffers::resolver_function_t *_resolver = nullptr) const; + void UnPackTo(SelectV2OptionsT *_o, const ::flatbuffers::resolver_function_t *_resolver = nullptr) const; + static ::flatbuffers::Offset Pack(::flatbuffers::FlatBufferBuilder &_fbb, const SelectV2OptionsT* _o, const ::flatbuffers::rehasher_function_t *_rehasher = nullptr); }; -struct UnsortedSegmentMaxOptionsBuilder { - typedef UnsortedSegmentMaxOptions Table; - flatbuffers::FlatBufferBuilder &fbb_; - flatbuffers::uoffset_t start_; - explicit UnsortedSegmentMaxOptionsBuilder(flatbuffers::FlatBufferBuilder &_fbb) +struct SelectV2OptionsBuilder { + typedef SelectV2Options Table; + ::flatbuffers::FlatBufferBuilder &fbb_; + ::flatbuffers::uoffset_t start_; + explicit SelectV2OptionsBuilder(::flatbuffers::FlatBufferBuilder &_fbb) : fbb_(_fbb) { start_ = fbb_.StartTable(); } - flatbuffers::Offset Finish() { + ::flatbuffers::Offset Finish() { const auto end = fbb_.EndTable(start_); - auto o = flatbuffers::Offset(end); + auto o = ::flatbuffers::Offset(end); return o; } }; -inline flatbuffers::Offset CreateUnsortedSegmentMaxOptions( - flatbuffers::FlatBufferBuilder &_fbb) { - UnsortedSegmentMaxOptionsBuilder builder_(_fbb); +inline ::flatbuffers::Offset CreateSelectV2Options( + ::flatbuffers::FlatBufferBuilder &_fbb) { + SelectV2OptionsBuilder builder_(_fbb); return builder_.Finish(); } -flatbuffers::Offset CreateUnsortedSegmentMaxOptions(flatbuffers::FlatBufferBuilder &_fbb, const UnsortedSegmentMaxOptionsT *_o, const flatbuffers::rehasher_function_t *_rehasher = nullptr); +::flatbuffers::Offset CreateSelectV2Options(::flatbuffers::FlatBufferBuilder &_fbb, const SelectV2OptionsT *_o, const ::flatbuffers::rehasher_function_t *_rehasher = nullptr); -struct UnsortedSegmentSumOptionsT : public flatbuffers::NativeTable { - typedef UnsortedSegmentSumOptions TableType; +struct DensifyOptionsT : public ::flatbuffers::NativeTable { + typedef DensifyOptions TableType; }; -struct UnsortedSegmentSumOptions FLATBUFFERS_FINAL_CLASS : private flatbuffers::Table { - typedef UnsortedSegmentSumOptionsT NativeTableType; - typedef UnsortedSegmentSumOptionsBuilder Builder; - bool Verify(flatbuffers::Verifier &verifier) const { +struct DensifyOptions FLATBUFFERS_FINAL_CLASS : private ::flatbuffers::Table { + typedef DensifyOptionsT NativeTableType; + typedef DensifyOptionsBuilder Builder; + bool Verify(::flatbuffers::Verifier &verifier) const { return VerifyTableStart(verifier) && verifier.EndTable(); } - UnsortedSegmentSumOptionsT *UnPack(const flatbuffers::resolver_function_t *_resolver = nullptr) const; - void UnPackTo(UnsortedSegmentSumOptionsT *_o, const flatbuffers::resolver_function_t *_resolver = nullptr) const; - static flatbuffers::Offset Pack(flatbuffers::FlatBufferBuilder &_fbb, const UnsortedSegmentSumOptionsT* _o, const flatbuffers::rehasher_function_t *_rehasher = nullptr); + DensifyOptionsT *UnPack(const ::flatbuffers::resolver_function_t *_resolver = nullptr) const; + void UnPackTo(DensifyOptionsT *_o, const ::flatbuffers::resolver_function_t *_resolver = nullptr) const; + static ::flatbuffers::Offset Pack(::flatbuffers::FlatBufferBuilder &_fbb, const DensifyOptionsT* _o, const ::flatbuffers::rehasher_function_t *_rehasher = nullptr); }; -struct UnsortedSegmentSumOptionsBuilder { - typedef UnsortedSegmentSumOptions Table; - flatbuffers::FlatBufferBuilder &fbb_; - flatbuffers::uoffset_t start_; - explicit UnsortedSegmentSumOptionsBuilder(flatbuffers::FlatBufferBuilder &_fbb) +struct DensifyOptionsBuilder { + typedef DensifyOptions Table; + ::flatbuffers::FlatBufferBuilder &fbb_; + ::flatbuffers::uoffset_t start_; + explicit DensifyOptionsBuilder(::flatbuffers::FlatBufferBuilder &_fbb) : fbb_(_fbb) { start_ = fbb_.StartTable(); } - flatbuffers::Offset Finish() { + ::flatbuffers::Offset Finish() { const auto end = fbb_.EndTable(start_); - auto o = flatbuffers::Offset(end); + auto o = ::flatbuffers::Offset(end); return o; } }; -inline flatbuffers::Offset CreateUnsortedSegmentSumOptions( - flatbuffers::FlatBufferBuilder &_fbb) { - UnsortedSegmentSumOptionsBuilder builder_(_fbb); +inline ::flatbuffers::Offset CreateDensifyOptions( + ::flatbuffers::FlatBufferBuilder &_fbb) { + DensifyOptionsBuilder builder_(_fbb); return builder_.Finish(); } -flatbuffers::Offset CreateUnsortedSegmentSumOptions(flatbuffers::FlatBufferBuilder &_fbb, const UnsortedSegmentSumOptionsT *_o, const flatbuffers::rehasher_function_t *_rehasher = nullptr); +::flatbuffers::Offset CreateDensifyOptions(::flatbuffers::FlatBufferBuilder &_fbb, const DensifyOptionsT *_o, const ::flatbuffers::rehasher_function_t *_rehasher = nullptr); -struct ATan2OptionsT : public flatbuffers::NativeTable { - typedef ATan2Options TableType; +struct SegmentSumOptionsT : public ::flatbuffers::NativeTable { + typedef SegmentSumOptions TableType; }; -struct ATan2Options FLATBUFFERS_FINAL_CLASS : private flatbuffers::Table { - typedef ATan2OptionsT NativeTableType; - typedef ATan2OptionsBuilder Builder; - bool Verify(flatbuffers::Verifier &verifier) const { - return VerifyTableStart(verifier) && +struct SegmentSumOptions FLATBUFFERS_FINAL_CLASS : private ::flatbuffers::Table { + typedef SegmentSumOptionsT NativeTableType; + typedef SegmentSumOptionsBuilder Builder; + bool Verify(::flatbuffers::Verifier &verifier) const { + return VerifyTableStart(verifier) && verifier.EndTable(); } - ATan2OptionsT *UnPack(const flatbuffers::resolver_function_t *_resolver = nullptr) const; - void UnPackTo(ATan2OptionsT *_o, const flatbuffers::resolver_function_t *_resolver = nullptr) const; - static flatbuffers::Offset Pack(flatbuffers::FlatBufferBuilder &_fbb, const ATan2OptionsT* _o, const flatbuffers::rehasher_function_t *_rehasher = nullptr); + SegmentSumOptionsT *UnPack(const ::flatbuffers::resolver_function_t *_resolver = nullptr) const; + void UnPackTo(SegmentSumOptionsT *_o, const ::flatbuffers::resolver_function_t *_resolver = nullptr) const; + static ::flatbuffers::Offset Pack(::flatbuffers::FlatBufferBuilder &_fbb, const SegmentSumOptionsT* _o, const ::flatbuffers::rehasher_function_t *_rehasher = nullptr); }; -struct ATan2OptionsBuilder { - typedef ATan2Options Table; - flatbuffers::FlatBufferBuilder &fbb_; - flatbuffers::uoffset_t start_; - explicit ATan2OptionsBuilder(flatbuffers::FlatBufferBuilder &_fbb) +struct SegmentSumOptionsBuilder { + typedef SegmentSumOptions Table; + ::flatbuffers::FlatBufferBuilder &fbb_; + ::flatbuffers::uoffset_t start_; + explicit SegmentSumOptionsBuilder(::flatbuffers::FlatBufferBuilder &_fbb) : fbb_(_fbb) { start_ = fbb_.StartTable(); } - flatbuffers::Offset Finish() { + ::flatbuffers::Offset Finish() { const auto end = fbb_.EndTable(start_); - auto o = flatbuffers::Offset(end); + auto o = ::flatbuffers::Offset(end); return o; } }; -inline flatbuffers::Offset CreateATan2Options( - flatbuffers::FlatBufferBuilder &_fbb) { - ATan2OptionsBuilder builder_(_fbb); +inline ::flatbuffers::Offset CreateSegmentSumOptions( + ::flatbuffers::FlatBufferBuilder &_fbb) { + SegmentSumOptionsBuilder builder_(_fbb); return builder_.Finish(); } -flatbuffers::Offset CreateATan2Options(flatbuffers::FlatBufferBuilder &_fbb, const ATan2OptionsT *_o, const flatbuffers::rehasher_function_t *_rehasher = nullptr); +::flatbuffers::Offset CreateSegmentSumOptions(::flatbuffers::FlatBufferBuilder &_fbb, const SegmentSumOptionsT *_o, const ::flatbuffers::rehasher_function_t *_rehasher = nullptr); -struct UnsortedSegmentMinOptionsT : public flatbuffers::NativeTable { - typedef UnsortedSegmentMinOptions TableType; +struct BatchMatMulOptionsT : public ::flatbuffers::NativeTable { + typedef BatchMatMulOptions TableType; + bool adj_x = false; + bool adj_y = false; + bool asymmetric_quantize_inputs = false; }; -struct UnsortedSegmentMinOptions FLATBUFFERS_FINAL_CLASS : private flatbuffers::Table { - typedef UnsortedSegmentMinOptionsT NativeTableType; - typedef UnsortedSegmentMinOptionsBuilder Builder; - bool Verify(flatbuffers::Verifier &verifier) const { +struct BatchMatMulOptions FLATBUFFERS_FINAL_CLASS : private ::flatbuffers::Table { + typedef BatchMatMulOptionsT NativeTableType; + typedef BatchMatMulOptionsBuilder Builder; + enum FlatBuffersVTableOffset FLATBUFFERS_VTABLE_UNDERLYING_TYPE { + VT_ADJ_X = 4, + VT_ADJ_Y = 6, + VT_ASYMMETRIC_QUANTIZE_INPUTS = 8 + }; + bool adj_x() const { + return GetField(VT_ADJ_X, 0) != 0; + } + bool adj_y() const { + return GetField(VT_ADJ_Y, 0) != 0; + } + bool asymmetric_quantize_inputs() const { + return GetField(VT_ASYMMETRIC_QUANTIZE_INPUTS, 0) != 0; + } + bool Verify(::flatbuffers::Verifier &verifier) const { return VerifyTableStart(verifier) && + VerifyField(verifier, VT_ADJ_X, 1) && + VerifyField(verifier, VT_ADJ_Y, 1) && + VerifyField(verifier, VT_ASYMMETRIC_QUANTIZE_INPUTS, 1) && verifier.EndTable(); } - UnsortedSegmentMinOptionsT *UnPack(const flatbuffers::resolver_function_t *_resolver = nullptr) const; - void UnPackTo(UnsortedSegmentMinOptionsT *_o, const flatbuffers::resolver_function_t *_resolver = nullptr) const; - static flatbuffers::Offset Pack(flatbuffers::FlatBufferBuilder &_fbb, const UnsortedSegmentMinOptionsT* _o, const flatbuffers::rehasher_function_t *_rehasher = nullptr); + BatchMatMulOptionsT *UnPack(const ::flatbuffers::resolver_function_t *_resolver = nullptr) const; + void UnPackTo(BatchMatMulOptionsT *_o, const ::flatbuffers::resolver_function_t *_resolver = nullptr) const; + static ::flatbuffers::Offset Pack(::flatbuffers::FlatBufferBuilder &_fbb, const BatchMatMulOptionsT* _o, const ::flatbuffers::rehasher_function_t *_rehasher = nullptr); }; -struct UnsortedSegmentMinOptionsBuilder { - typedef UnsortedSegmentMinOptions Table; - flatbuffers::FlatBufferBuilder &fbb_; - flatbuffers::uoffset_t start_; - explicit UnsortedSegmentMinOptionsBuilder(flatbuffers::FlatBufferBuilder &_fbb) +struct BatchMatMulOptionsBuilder { + typedef BatchMatMulOptions Table; + ::flatbuffers::FlatBufferBuilder &fbb_; + ::flatbuffers::uoffset_t start_; + void add_adj_x(bool adj_x) { + fbb_.AddElement(BatchMatMulOptions::VT_ADJ_X, static_cast(adj_x), 0); + } + void add_adj_y(bool adj_y) { + fbb_.AddElement(BatchMatMulOptions::VT_ADJ_Y, static_cast(adj_y), 0); + } + void add_asymmetric_quantize_inputs(bool asymmetric_quantize_inputs) { + fbb_.AddElement(BatchMatMulOptions::VT_ASYMMETRIC_QUANTIZE_INPUTS, static_cast(asymmetric_quantize_inputs), 0); + } + explicit BatchMatMulOptionsBuilder(::flatbuffers::FlatBufferBuilder &_fbb) : fbb_(_fbb) { start_ = fbb_.StartTable(); } - flatbuffers::Offset Finish() { + ::flatbuffers::Offset Finish() { const auto end = fbb_.EndTable(start_); - auto o = flatbuffers::Offset(end); + auto o = ::flatbuffers::Offset(end); return o; } }; -inline flatbuffers::Offset CreateUnsortedSegmentMinOptions( - flatbuffers::FlatBufferBuilder &_fbb) { - UnsortedSegmentMinOptionsBuilder builder_(_fbb); +inline ::flatbuffers::Offset CreateBatchMatMulOptions( + ::flatbuffers::FlatBufferBuilder &_fbb, + bool adj_x = false, + bool adj_y = false, + bool asymmetric_quantize_inputs = false) { + BatchMatMulOptionsBuilder builder_(_fbb); + builder_.add_asymmetric_quantize_inputs(asymmetric_quantize_inputs); + builder_.add_adj_y(adj_y); + builder_.add_adj_x(adj_x); return builder_.Finish(); } -flatbuffers::Offset CreateUnsortedSegmentMinOptions(flatbuffers::FlatBufferBuilder &_fbb, const UnsortedSegmentMinOptionsT *_o, const flatbuffers::rehasher_function_t *_rehasher = nullptr); +::flatbuffers::Offset CreateBatchMatMulOptions(::flatbuffers::FlatBufferBuilder &_fbb, const BatchMatMulOptionsT *_o, const ::flatbuffers::rehasher_function_t *_rehasher = nullptr); -struct SignOptionsT : public flatbuffers::NativeTable { - typedef SignOptions TableType; +struct CumsumOptionsT : public ::flatbuffers::NativeTable { + typedef CumsumOptions TableType; + bool exclusive = false; + bool reverse = false; }; -struct SignOptions FLATBUFFERS_FINAL_CLASS : private flatbuffers::Table { - typedef SignOptionsT NativeTableType; - typedef SignOptionsBuilder Builder; - bool Verify(flatbuffers::Verifier &verifier) const { +struct CumsumOptions FLATBUFFERS_FINAL_CLASS : private ::flatbuffers::Table { + typedef CumsumOptionsT NativeTableType; + typedef CumsumOptionsBuilder Builder; + enum FlatBuffersVTableOffset FLATBUFFERS_VTABLE_UNDERLYING_TYPE { + VT_EXCLUSIVE = 4, + VT_REVERSE = 6 + }; + bool exclusive() const { + return GetField(VT_EXCLUSIVE, 0) != 0; + } + bool reverse() const { + return GetField(VT_REVERSE, 0) != 0; + } + bool Verify(::flatbuffers::Verifier &verifier) const { return VerifyTableStart(verifier) && + VerifyField(verifier, VT_EXCLUSIVE, 1) && + VerifyField(verifier, VT_REVERSE, 1) && verifier.EndTable(); } - SignOptionsT *UnPack(const flatbuffers::resolver_function_t *_resolver = nullptr) const; - void UnPackTo(SignOptionsT *_o, const flatbuffers::resolver_function_t *_resolver = nullptr) const; - static flatbuffers::Offset Pack(flatbuffers::FlatBufferBuilder &_fbb, const SignOptionsT* _o, const flatbuffers::rehasher_function_t *_rehasher = nullptr); + CumsumOptionsT *UnPack(const ::flatbuffers::resolver_function_t *_resolver = nullptr) const; + void UnPackTo(CumsumOptionsT *_o, const ::flatbuffers::resolver_function_t *_resolver = nullptr) const; + static ::flatbuffers::Offset Pack(::flatbuffers::FlatBufferBuilder &_fbb, const CumsumOptionsT* _o, const ::flatbuffers::rehasher_function_t *_rehasher = nullptr); }; -struct SignOptionsBuilder { - typedef SignOptions Table; - flatbuffers::FlatBufferBuilder &fbb_; - flatbuffers::uoffset_t start_; - explicit SignOptionsBuilder(flatbuffers::FlatBufferBuilder &_fbb) +struct CumsumOptionsBuilder { + typedef CumsumOptions Table; + ::flatbuffers::FlatBufferBuilder &fbb_; + ::flatbuffers::uoffset_t start_; + void add_exclusive(bool exclusive) { + fbb_.AddElement(CumsumOptions::VT_EXCLUSIVE, static_cast(exclusive), 0); + } + void add_reverse(bool reverse) { + fbb_.AddElement(CumsumOptions::VT_REVERSE, static_cast(reverse), 0); + } + explicit CumsumOptionsBuilder(::flatbuffers::FlatBufferBuilder &_fbb) : fbb_(_fbb) { start_ = fbb_.StartTable(); } - flatbuffers::Offset Finish() { + ::flatbuffers::Offset Finish() { const auto end = fbb_.EndTable(start_); - auto o = flatbuffers::Offset(end); + auto o = ::flatbuffers::Offset(end); return o; } }; -inline flatbuffers::Offset CreateSignOptions( - flatbuffers::FlatBufferBuilder &_fbb) { - SignOptionsBuilder builder_(_fbb); +inline ::flatbuffers::Offset CreateCumsumOptions( + ::flatbuffers::FlatBufferBuilder &_fbb, + bool exclusive = false, + bool reverse = false) { + CumsumOptionsBuilder builder_(_fbb); + builder_.add_reverse(reverse); + builder_.add_exclusive(exclusive); return builder_.Finish(); } -flatbuffers::Offset CreateSignOptions(flatbuffers::FlatBufferBuilder &_fbb, const SignOptionsT *_o, const flatbuffers::rehasher_function_t *_rehasher = nullptr); +::flatbuffers::Offset CreateCumsumOptions(::flatbuffers::FlatBufferBuilder &_fbb, const CumsumOptionsT *_o, const ::flatbuffers::rehasher_function_t *_rehasher = nullptr); -struct BitcastOptionsT : public flatbuffers::NativeTable { - typedef BitcastOptions TableType; +struct BroadcastToOptionsT : public ::flatbuffers::NativeTable { + typedef BroadcastToOptions TableType; }; -struct BitcastOptions FLATBUFFERS_FINAL_CLASS : private flatbuffers::Table { - typedef BitcastOptionsT NativeTableType; - typedef BitcastOptionsBuilder Builder; - bool Verify(flatbuffers::Verifier &verifier) const { +struct BroadcastToOptions FLATBUFFERS_FINAL_CLASS : private ::flatbuffers::Table { + typedef BroadcastToOptionsT NativeTableType; + typedef BroadcastToOptionsBuilder Builder; + bool Verify(::flatbuffers::Verifier &verifier) const { return VerifyTableStart(verifier) && verifier.EndTable(); } - BitcastOptionsT *UnPack(const flatbuffers::resolver_function_t *_resolver = nullptr) const; - void UnPackTo(BitcastOptionsT *_o, const flatbuffers::resolver_function_t *_resolver = nullptr) const; - static flatbuffers::Offset Pack(flatbuffers::FlatBufferBuilder &_fbb, const BitcastOptionsT* _o, const flatbuffers::rehasher_function_t *_rehasher = nullptr); + BroadcastToOptionsT *UnPack(const ::flatbuffers::resolver_function_t *_resolver = nullptr) const; + void UnPackTo(BroadcastToOptionsT *_o, const ::flatbuffers::resolver_function_t *_resolver = nullptr) const; + static ::flatbuffers::Offset Pack(::flatbuffers::FlatBufferBuilder &_fbb, const BroadcastToOptionsT* _o, const ::flatbuffers::rehasher_function_t *_rehasher = nullptr); }; -struct BitcastOptionsBuilder { - typedef BitcastOptions Table; - flatbuffers::FlatBufferBuilder &fbb_; - flatbuffers::uoffset_t start_; - explicit BitcastOptionsBuilder(flatbuffers::FlatBufferBuilder &_fbb) +struct BroadcastToOptionsBuilder { + typedef BroadcastToOptions Table; + ::flatbuffers::FlatBufferBuilder &fbb_; + ::flatbuffers::uoffset_t start_; + explicit BroadcastToOptionsBuilder(::flatbuffers::FlatBufferBuilder &_fbb) : fbb_(_fbb) { start_ = fbb_.StartTable(); } - flatbuffers::Offset Finish() { + ::flatbuffers::Offset Finish() { const auto end = fbb_.EndTable(start_); - auto o = flatbuffers::Offset(end); + auto o = ::flatbuffers::Offset(end); return o; } }; -inline flatbuffers::Offset CreateBitcastOptions( - flatbuffers::FlatBufferBuilder &_fbb) { - BitcastOptionsBuilder builder_(_fbb); +inline ::flatbuffers::Offset CreateBroadcastToOptions( + ::flatbuffers::FlatBufferBuilder &_fbb) { + BroadcastToOptionsBuilder builder_(_fbb); return builder_.Finish(); } -flatbuffers::Offset CreateBitcastOptions(flatbuffers::FlatBufferBuilder &_fbb, const BitcastOptionsT *_o, const flatbuffers::rehasher_function_t *_rehasher = nullptr); +::flatbuffers::Offset CreateBroadcastToOptions(::flatbuffers::FlatBufferBuilder &_fbb, const BroadcastToOptionsT *_o, const ::flatbuffers::rehasher_function_t *_rehasher = nullptr); -struct BitwiseXorOptionsT : public flatbuffers::NativeTable { - typedef BitwiseXorOptions TableType; +struct Rfft2dOptionsT : public ::flatbuffers::NativeTable { + typedef Rfft2dOptions TableType; }; -struct BitwiseXorOptions FLATBUFFERS_FINAL_CLASS : private flatbuffers::Table { - typedef BitwiseXorOptionsT NativeTableType; - typedef BitwiseXorOptionsBuilder Builder; - bool Verify(flatbuffers::Verifier &verifier) const { +struct Rfft2dOptions FLATBUFFERS_FINAL_CLASS : private ::flatbuffers::Table { + typedef Rfft2dOptionsT NativeTableType; + typedef Rfft2dOptionsBuilder Builder; + bool Verify(::flatbuffers::Verifier &verifier) const { return VerifyTableStart(verifier) && verifier.EndTable(); } - BitwiseXorOptionsT *UnPack(const flatbuffers::resolver_function_t *_resolver = nullptr) const; - void UnPackTo(BitwiseXorOptionsT *_o, const flatbuffers::resolver_function_t *_resolver = nullptr) const; - static flatbuffers::Offset Pack(flatbuffers::FlatBufferBuilder &_fbb, const BitwiseXorOptionsT* _o, const flatbuffers::rehasher_function_t *_rehasher = nullptr); + Rfft2dOptionsT *UnPack(const ::flatbuffers::resolver_function_t *_resolver = nullptr) const; + void UnPackTo(Rfft2dOptionsT *_o, const ::flatbuffers::resolver_function_t *_resolver = nullptr) const; + static ::flatbuffers::Offset Pack(::flatbuffers::FlatBufferBuilder &_fbb, const Rfft2dOptionsT* _o, const ::flatbuffers::rehasher_function_t *_rehasher = nullptr); }; -struct BitwiseXorOptionsBuilder { - typedef BitwiseXorOptions Table; - flatbuffers::FlatBufferBuilder &fbb_; - flatbuffers::uoffset_t start_; - explicit BitwiseXorOptionsBuilder(flatbuffers::FlatBufferBuilder &_fbb) +struct Rfft2dOptionsBuilder { + typedef Rfft2dOptions Table; + ::flatbuffers::FlatBufferBuilder &fbb_; + ::flatbuffers::uoffset_t start_; + explicit Rfft2dOptionsBuilder(::flatbuffers::FlatBufferBuilder &_fbb) : fbb_(_fbb) { start_ = fbb_.StartTable(); } - flatbuffers::Offset Finish() { + ::flatbuffers::Offset Finish() { const auto end = fbb_.EndTable(start_); - auto o = flatbuffers::Offset(end); + auto o = ::flatbuffers::Offset(end); return o; } }; -inline flatbuffers::Offset CreateBitwiseXorOptions( - flatbuffers::FlatBufferBuilder &_fbb) { - BitwiseXorOptionsBuilder builder_(_fbb); +inline ::flatbuffers::Offset CreateRfft2dOptions( + ::flatbuffers::FlatBufferBuilder &_fbb) { + Rfft2dOptionsBuilder builder_(_fbb); return builder_.Finish(); } -flatbuffers::Offset CreateBitwiseXorOptions(flatbuffers::FlatBufferBuilder &_fbb, const BitwiseXorOptionsT *_o, const flatbuffers::rehasher_function_t *_rehasher = nullptr); +::flatbuffers::Offset CreateRfft2dOptions(::flatbuffers::FlatBufferBuilder &_fbb, const Rfft2dOptionsT *_o, const ::flatbuffers::rehasher_function_t *_rehasher = nullptr); -struct RightShiftOptionsT : public flatbuffers::NativeTable { - typedef RightShiftOptions TableType; +struct HashtableOptionsT : public ::flatbuffers::NativeTable { + typedef HashtableOptions TableType; + int32_t table_id = 0; + tflite::TensorType key_dtype = tflite::TensorType_FLOAT32; + tflite::TensorType value_dtype = tflite::TensorType_FLOAT32; }; -struct RightShiftOptions FLATBUFFERS_FINAL_CLASS : private flatbuffers::Table { - typedef RightShiftOptionsT NativeTableType; - typedef RightShiftOptionsBuilder Builder; - bool Verify(flatbuffers::Verifier &verifier) const { +struct HashtableOptions FLATBUFFERS_FINAL_CLASS : private ::flatbuffers::Table { + typedef HashtableOptionsT NativeTableType; + typedef HashtableOptionsBuilder Builder; + enum FlatBuffersVTableOffset FLATBUFFERS_VTABLE_UNDERLYING_TYPE { + VT_TABLE_ID = 4, + VT_KEY_DTYPE = 6, + VT_VALUE_DTYPE = 8 + }; + int32_t table_id() const { + return GetField(VT_TABLE_ID, 0); + } + tflite::TensorType key_dtype() const { + return static_cast(GetField(VT_KEY_DTYPE, 0)); + } + tflite::TensorType value_dtype() const { + return static_cast(GetField(VT_VALUE_DTYPE, 0)); + } + bool Verify(::flatbuffers::Verifier &verifier) const { return VerifyTableStart(verifier) && + VerifyField(verifier, VT_TABLE_ID, 4) && + VerifyField(verifier, VT_KEY_DTYPE, 1) && + VerifyField(verifier, VT_VALUE_DTYPE, 1) && verifier.EndTable(); } - RightShiftOptionsT *UnPack(const flatbuffers::resolver_function_t *_resolver = nullptr) const; - void UnPackTo(RightShiftOptionsT *_o, const flatbuffers::resolver_function_t *_resolver = nullptr) const; - static flatbuffers::Offset Pack(flatbuffers::FlatBufferBuilder &_fbb, const RightShiftOptionsT* _o, const flatbuffers::rehasher_function_t *_rehasher = nullptr); + HashtableOptionsT *UnPack(const ::flatbuffers::resolver_function_t *_resolver = nullptr) const; + void UnPackTo(HashtableOptionsT *_o, const ::flatbuffers::resolver_function_t *_resolver = nullptr) const; + static ::flatbuffers::Offset Pack(::flatbuffers::FlatBufferBuilder &_fbb, const HashtableOptionsT* _o, const ::flatbuffers::rehasher_function_t *_rehasher = nullptr); }; -struct RightShiftOptionsBuilder { - typedef RightShiftOptions Table; - flatbuffers::FlatBufferBuilder &fbb_; - flatbuffers::uoffset_t start_; - explicit RightShiftOptionsBuilder(flatbuffers::FlatBufferBuilder &_fbb) +struct HashtableOptionsBuilder { + typedef HashtableOptions Table; + ::flatbuffers::FlatBufferBuilder &fbb_; + ::flatbuffers::uoffset_t start_; + void add_table_id(int32_t table_id) { + fbb_.AddElement(HashtableOptions::VT_TABLE_ID, table_id, 0); + } + void add_key_dtype(tflite::TensorType key_dtype) { + fbb_.AddElement(HashtableOptions::VT_KEY_DTYPE, static_cast(key_dtype), 0); + } + void add_value_dtype(tflite::TensorType value_dtype) { + fbb_.AddElement(HashtableOptions::VT_VALUE_DTYPE, static_cast(value_dtype), 0); + } + explicit HashtableOptionsBuilder(::flatbuffers::FlatBufferBuilder &_fbb) : fbb_(_fbb) { start_ = fbb_.StartTable(); } - flatbuffers::Offset Finish() { + ::flatbuffers::Offset Finish() { const auto end = fbb_.EndTable(start_); - auto o = flatbuffers::Offset(end); + auto o = ::flatbuffers::Offset(end); return o; } }; -inline flatbuffers::Offset CreateRightShiftOptions( - flatbuffers::FlatBufferBuilder &_fbb) { - RightShiftOptionsBuilder builder_(_fbb); +inline ::flatbuffers::Offset CreateHashtableOptions( + ::flatbuffers::FlatBufferBuilder &_fbb, + int32_t table_id = 0, + tflite::TensorType key_dtype = tflite::TensorType_FLOAT32, + tflite::TensorType value_dtype = tflite::TensorType_FLOAT32) { + HashtableOptionsBuilder builder_(_fbb); + builder_.add_table_id(table_id); + builder_.add_value_dtype(value_dtype); + builder_.add_key_dtype(key_dtype); return builder_.Finish(); } -flatbuffers::Offset CreateRightShiftOptions(flatbuffers::FlatBufferBuilder &_fbb, const RightShiftOptionsT *_o, const flatbuffers::rehasher_function_t *_rehasher = nullptr); +::flatbuffers::Offset CreateHashtableOptions(::flatbuffers::FlatBufferBuilder &_fbb, const HashtableOptionsT *_o, const ::flatbuffers::rehasher_function_t *_rehasher = nullptr); -struct OperatorCodeT : public flatbuffers::NativeTable { - typedef OperatorCode TableType; - int8_t deprecated_builtin_code = 0; - std::string custom_code{}; - int32_t version = 1; - tflite::BuiltinOperator builtin_code = tflite::BuiltinOperator_ADD; +struct HashtableFindOptionsT : public ::flatbuffers::NativeTable { + typedef HashtableFindOptions TableType; }; -struct OperatorCode FLATBUFFERS_FINAL_CLASS : private flatbuffers::Table { - typedef OperatorCodeT NativeTableType; - typedef OperatorCodeBuilder Builder; - enum FlatBuffersVTableOffset FLATBUFFERS_VTABLE_UNDERLYING_TYPE { - VT_DEPRECATED_BUILTIN_CODE = 4, - VT_CUSTOM_CODE = 6, - VT_VERSION = 8, - VT_BUILTIN_CODE = 10 - }; - int8_t deprecated_builtin_code() const { - return GetField(VT_DEPRECATED_BUILTIN_CODE, 0); - } - const flatbuffers::String *custom_code() const { - return GetPointer(VT_CUSTOM_CODE); +struct HashtableFindOptions FLATBUFFERS_FINAL_CLASS : private ::flatbuffers::Table { + typedef HashtableFindOptionsT NativeTableType; + typedef HashtableFindOptionsBuilder Builder; + bool Verify(::flatbuffers::Verifier &verifier) const { + return VerifyTableStart(verifier) && + verifier.EndTable(); } - int32_t version() const { - return GetField(VT_VERSION, 1); + HashtableFindOptionsT *UnPack(const ::flatbuffers::resolver_function_t *_resolver = nullptr) const; + void UnPackTo(HashtableFindOptionsT *_o, const ::flatbuffers::resolver_function_t *_resolver = nullptr) const; + static ::flatbuffers::Offset Pack(::flatbuffers::FlatBufferBuilder &_fbb, const HashtableFindOptionsT* _o, const ::flatbuffers::rehasher_function_t *_rehasher = nullptr); +}; + +struct HashtableFindOptionsBuilder { + typedef HashtableFindOptions Table; + ::flatbuffers::FlatBufferBuilder &fbb_; + ::flatbuffers::uoffset_t start_; + explicit HashtableFindOptionsBuilder(::flatbuffers::FlatBufferBuilder &_fbb) + : fbb_(_fbb) { + start_ = fbb_.StartTable(); } - tflite::BuiltinOperator builtin_code() const { - return static_cast(GetField(VT_BUILTIN_CODE, 0)); + ::flatbuffers::Offset Finish() { + const auto end = fbb_.EndTable(start_); + auto o = ::flatbuffers::Offset(end); + return o; } - bool Verify(flatbuffers::Verifier &verifier) const { +}; + +inline ::flatbuffers::Offset CreateHashtableFindOptions( + ::flatbuffers::FlatBufferBuilder &_fbb) { + HashtableFindOptionsBuilder builder_(_fbb); + return builder_.Finish(); +} + +::flatbuffers::Offset CreateHashtableFindOptions(::flatbuffers::FlatBufferBuilder &_fbb, const HashtableFindOptionsT *_o, const ::flatbuffers::rehasher_function_t *_rehasher = nullptr); + +struct HashtableImportOptionsT : public ::flatbuffers::NativeTable { + typedef HashtableImportOptions TableType; +}; + +struct HashtableImportOptions FLATBUFFERS_FINAL_CLASS : private ::flatbuffers::Table { + typedef HashtableImportOptionsT NativeTableType; + typedef HashtableImportOptionsBuilder Builder; + bool Verify(::flatbuffers::Verifier &verifier) const { return VerifyTableStart(verifier) && - VerifyField(verifier, VT_DEPRECATED_BUILTIN_CODE, 1) && - VerifyOffset(verifier, VT_CUSTOM_CODE) && - verifier.VerifyString(custom_code()) && - VerifyField(verifier, VT_VERSION, 4) && - VerifyField(verifier, VT_BUILTIN_CODE, 4) && verifier.EndTable(); } - OperatorCodeT *UnPack(const flatbuffers::resolver_function_t *_resolver = nullptr) const; - void UnPackTo(OperatorCodeT *_o, const flatbuffers::resolver_function_t *_resolver = nullptr) const; - static flatbuffers::Offset Pack(flatbuffers::FlatBufferBuilder &_fbb, const OperatorCodeT* _o, const flatbuffers::rehasher_function_t *_rehasher = nullptr); + HashtableImportOptionsT *UnPack(const ::flatbuffers::resolver_function_t *_resolver = nullptr) const; + void UnPackTo(HashtableImportOptionsT *_o, const ::flatbuffers::resolver_function_t *_resolver = nullptr) const; + static ::flatbuffers::Offset Pack(::flatbuffers::FlatBufferBuilder &_fbb, const HashtableImportOptionsT* _o, const ::flatbuffers::rehasher_function_t *_rehasher = nullptr); }; -struct OperatorCodeBuilder { - typedef OperatorCode Table; - flatbuffers::FlatBufferBuilder &fbb_; - flatbuffers::uoffset_t start_; - void add_deprecated_builtin_code(int8_t deprecated_builtin_code) { - fbb_.AddElement(OperatorCode::VT_DEPRECATED_BUILTIN_CODE, deprecated_builtin_code, 0); - } - void add_custom_code(flatbuffers::Offset custom_code) { - fbb_.AddOffset(OperatorCode::VT_CUSTOM_CODE, custom_code); +struct HashtableImportOptionsBuilder { + typedef HashtableImportOptions Table; + ::flatbuffers::FlatBufferBuilder &fbb_; + ::flatbuffers::uoffset_t start_; + explicit HashtableImportOptionsBuilder(::flatbuffers::FlatBufferBuilder &_fbb) + : fbb_(_fbb) { + start_ = fbb_.StartTable(); } - void add_version(int32_t version) { - fbb_.AddElement(OperatorCode::VT_VERSION, version, 1); + ::flatbuffers::Offset Finish() { + const auto end = fbb_.EndTable(start_); + auto o = ::flatbuffers::Offset(end); + return o; } - void add_builtin_code(tflite::BuiltinOperator builtin_code) { - fbb_.AddElement(OperatorCode::VT_BUILTIN_CODE, static_cast(builtin_code), 0); +}; + +inline ::flatbuffers::Offset CreateHashtableImportOptions( + ::flatbuffers::FlatBufferBuilder &_fbb) { + HashtableImportOptionsBuilder builder_(_fbb); + return builder_.Finish(); +} + +::flatbuffers::Offset CreateHashtableImportOptions(::flatbuffers::FlatBufferBuilder &_fbb, const HashtableImportOptionsT *_o, const ::flatbuffers::rehasher_function_t *_rehasher = nullptr); + +struct HashtableSizeOptionsT : public ::flatbuffers::NativeTable { + typedef HashtableSizeOptions TableType; +}; + +struct HashtableSizeOptions FLATBUFFERS_FINAL_CLASS : private ::flatbuffers::Table { + typedef HashtableSizeOptionsT NativeTableType; + typedef HashtableSizeOptionsBuilder Builder; + bool Verify(::flatbuffers::Verifier &verifier) const { + return VerifyTableStart(verifier) && + verifier.EndTable(); } - explicit OperatorCodeBuilder(flatbuffers::FlatBufferBuilder &_fbb) + HashtableSizeOptionsT *UnPack(const ::flatbuffers::resolver_function_t *_resolver = nullptr) const; + void UnPackTo(HashtableSizeOptionsT *_o, const ::flatbuffers::resolver_function_t *_resolver = nullptr) const; + static ::flatbuffers::Offset Pack(::flatbuffers::FlatBufferBuilder &_fbb, const HashtableSizeOptionsT* _o, const ::flatbuffers::rehasher_function_t *_rehasher = nullptr); +}; + +struct HashtableSizeOptionsBuilder { + typedef HashtableSizeOptions Table; + ::flatbuffers::FlatBufferBuilder &fbb_; + ::flatbuffers::uoffset_t start_; + explicit HashtableSizeOptionsBuilder(::flatbuffers::FlatBufferBuilder &_fbb) : fbb_(_fbb) { start_ = fbb_.StartTable(); } - flatbuffers::Offset Finish() { + ::flatbuffers::Offset Finish() { const auto end = fbb_.EndTable(start_); - auto o = flatbuffers::Offset(end); + auto o = ::flatbuffers::Offset(end); return o; } }; -inline flatbuffers::Offset CreateOperatorCode( - flatbuffers::FlatBufferBuilder &_fbb, - int8_t deprecated_builtin_code = 0, - flatbuffers::Offset custom_code = 0, - int32_t version = 1, - tflite::BuiltinOperator builtin_code = tflite::BuiltinOperator_ADD) { - OperatorCodeBuilder builder_(_fbb); - builder_.add_builtin_code(builtin_code); - builder_.add_version(version); - builder_.add_custom_code(custom_code); - builder_.add_deprecated_builtin_code(deprecated_builtin_code); +inline ::flatbuffers::Offset CreateHashtableSizeOptions( + ::flatbuffers::FlatBufferBuilder &_fbb) { + HashtableSizeOptionsBuilder builder_(_fbb); return builder_.Finish(); } -inline flatbuffers::Offset CreateOperatorCodeDirect( - flatbuffers::FlatBufferBuilder &_fbb, - int8_t deprecated_builtin_code = 0, - const char *custom_code = nullptr, - int32_t version = 1, - tflite::BuiltinOperator builtin_code = tflite::BuiltinOperator_ADD) { - auto custom_code__ = custom_code ? _fbb.CreateString(custom_code) : 0; - return tflite::CreateOperatorCode( - _fbb, - deprecated_builtin_code, - custom_code__, - version, - builtin_code); -} +::flatbuffers::Offset CreateHashtableSizeOptions(::flatbuffers::FlatBufferBuilder &_fbb, const HashtableSizeOptionsT *_o, const ::flatbuffers::rehasher_function_t *_rehasher = nullptr); -flatbuffers::Offset CreateOperatorCode(flatbuffers::FlatBufferBuilder &_fbb, const OperatorCodeT *_o, const flatbuffers::rehasher_function_t *_rehasher = nullptr); - -struct OperatorT : public flatbuffers::NativeTable { - typedef Operator TableType; - uint32_t opcode_index = 0; - std::vector inputs{}; - std::vector outputs{}; - tflite::BuiltinOptionsUnion builtin_options{}; - std::vector custom_options{}; - tflite::CustomOptionsFormat custom_options_format = tflite::CustomOptionsFormat_FLEXBUFFERS; - std::vector mutating_variable_inputs{}; - std::vector intermediates{}; - uint64_t large_custom_options_offset = 0; - uint64_t large_custom_options_size = 0; +struct VarHandleOptionsT : public ::flatbuffers::NativeTable { + typedef VarHandleOptions TableType; + std::string container{}; + std::string shared_name{}; }; -struct Operator FLATBUFFERS_FINAL_CLASS : private flatbuffers::Table { - typedef OperatorT NativeTableType; - typedef OperatorBuilder Builder; +struct VarHandleOptions FLATBUFFERS_FINAL_CLASS : private ::flatbuffers::Table { + typedef VarHandleOptionsT NativeTableType; + typedef VarHandleOptionsBuilder Builder; enum FlatBuffersVTableOffset FLATBUFFERS_VTABLE_UNDERLYING_TYPE { - VT_OPCODE_INDEX = 4, - VT_INPUTS = 6, - VT_OUTPUTS = 8, - VT_BUILTIN_OPTIONS_TYPE = 10, - VT_BUILTIN_OPTIONS = 12, - VT_CUSTOM_OPTIONS = 14, - VT_CUSTOM_OPTIONS_FORMAT = 16, - VT_MUTATING_VARIABLE_INPUTS = 18, - VT_INTERMEDIATES = 20, - VT_LARGE_CUSTOM_OPTIONS_OFFSET = 22, - VT_LARGE_CUSTOM_OPTIONS_SIZE = 24 + VT_CONTAINER = 4, + VT_SHARED_NAME = 6 }; - uint32_t opcode_index() const { - return GetField(VT_OPCODE_INDEX, 0); - } - const flatbuffers::Vector *inputs() const { - return GetPointer *>(VT_INPUTS); - } - const flatbuffers::Vector *outputs() const { - return GetPointer *>(VT_OUTPUTS); - } - tflite::BuiltinOptions builtin_options_type() const { - return static_cast(GetField(VT_BUILTIN_OPTIONS_TYPE, 0)); - } - const void *builtin_options() const { - return GetPointer(VT_BUILTIN_OPTIONS); - } - template const T *builtin_options_as() const; - const tflite::Conv2DOptions *builtin_options_as_Conv2DOptions() const { - return builtin_options_type() == tflite::BuiltinOptions_Conv2DOptions ? static_cast(builtin_options()) : nullptr; - } - const tflite::DepthwiseConv2DOptions *builtin_options_as_DepthwiseConv2DOptions() const { - return builtin_options_type() == tflite::BuiltinOptions_DepthwiseConv2DOptions ? static_cast(builtin_options()) : nullptr; + const ::flatbuffers::String *container() const { + return GetPointer(VT_CONTAINER); } - const tflite::ConcatEmbeddingsOptions *builtin_options_as_ConcatEmbeddingsOptions() const { - return builtin_options_type() == tflite::BuiltinOptions_ConcatEmbeddingsOptions ? static_cast(builtin_options()) : nullptr; - } - const tflite::LSHProjectionOptions *builtin_options_as_LSHProjectionOptions() const { - return builtin_options_type() == tflite::BuiltinOptions_LSHProjectionOptions ? static_cast(builtin_options()) : nullptr; + const ::flatbuffers::String *shared_name() const { + return GetPointer(VT_SHARED_NAME); } - const tflite::Pool2DOptions *builtin_options_as_Pool2DOptions() const { - return builtin_options_type() == tflite::BuiltinOptions_Pool2DOptions ? static_cast(builtin_options()) : nullptr; + bool Verify(::flatbuffers::Verifier &verifier) const { + return VerifyTableStart(verifier) && + VerifyOffset(verifier, VT_CONTAINER) && + verifier.VerifyString(container()) && + VerifyOffset(verifier, VT_SHARED_NAME) && + verifier.VerifyString(shared_name()) && + verifier.EndTable(); } - const tflite::SVDFOptions *builtin_options_as_SVDFOptions() const { - return builtin_options_type() == tflite::BuiltinOptions_SVDFOptions ? static_cast(builtin_options()) : nullptr; + VarHandleOptionsT *UnPack(const ::flatbuffers::resolver_function_t *_resolver = nullptr) const; + void UnPackTo(VarHandleOptionsT *_o, const ::flatbuffers::resolver_function_t *_resolver = nullptr) const; + static ::flatbuffers::Offset Pack(::flatbuffers::FlatBufferBuilder &_fbb, const VarHandleOptionsT* _o, const ::flatbuffers::rehasher_function_t *_rehasher = nullptr); +}; + +struct VarHandleOptionsBuilder { + typedef VarHandleOptions Table; + ::flatbuffers::FlatBufferBuilder &fbb_; + ::flatbuffers::uoffset_t start_; + void add_container(::flatbuffers::Offset<::flatbuffers::String> container) { + fbb_.AddOffset(VarHandleOptions::VT_CONTAINER, container); } - const tflite::RNNOptions *builtin_options_as_RNNOptions() const { - return builtin_options_type() == tflite::BuiltinOptions_RNNOptions ? static_cast(builtin_options()) : nullptr; + void add_shared_name(::flatbuffers::Offset<::flatbuffers::String> shared_name) { + fbb_.AddOffset(VarHandleOptions::VT_SHARED_NAME, shared_name); } - const tflite::FullyConnectedOptions *builtin_options_as_FullyConnectedOptions() const { - return builtin_options_type() == tflite::BuiltinOptions_FullyConnectedOptions ? static_cast(builtin_options()) : nullptr; + explicit VarHandleOptionsBuilder(::flatbuffers::FlatBufferBuilder &_fbb) + : fbb_(_fbb) { + start_ = fbb_.StartTable(); } - const tflite::SoftmaxOptions *builtin_options_as_SoftmaxOptions() const { - return builtin_options_type() == tflite::BuiltinOptions_SoftmaxOptions ? static_cast(builtin_options()) : nullptr; + ::flatbuffers::Offset Finish() { + const auto end = fbb_.EndTable(start_); + auto o = ::flatbuffers::Offset(end); + return o; } - const tflite::ConcatenationOptions *builtin_options_as_ConcatenationOptions() const { - return builtin_options_type() == tflite::BuiltinOptions_ConcatenationOptions ? static_cast(builtin_options()) : nullptr; +}; + +inline ::flatbuffers::Offset CreateVarHandleOptions( + ::flatbuffers::FlatBufferBuilder &_fbb, + ::flatbuffers::Offset<::flatbuffers::String> container = 0, + ::flatbuffers::Offset<::flatbuffers::String> shared_name = 0) { + VarHandleOptionsBuilder builder_(_fbb); + builder_.add_shared_name(shared_name); + builder_.add_container(container); + return builder_.Finish(); +} + +inline ::flatbuffers::Offset CreateVarHandleOptionsDirect( + ::flatbuffers::FlatBufferBuilder &_fbb, + const char *container = nullptr, + const char *shared_name = nullptr) { + auto container__ = container ? _fbb.CreateString(container) : 0; + auto shared_name__ = shared_name ? _fbb.CreateString(shared_name) : 0; + return tflite::CreateVarHandleOptions( + _fbb, + container__, + shared_name__); +} + +::flatbuffers::Offset CreateVarHandleOptions(::flatbuffers::FlatBufferBuilder &_fbb, const VarHandleOptionsT *_o, const ::flatbuffers::rehasher_function_t *_rehasher = nullptr); + +struct ReadVariableOptionsT : public ::flatbuffers::NativeTable { + typedef ReadVariableOptions TableType; +}; + +struct ReadVariableOptions FLATBUFFERS_FINAL_CLASS : private ::flatbuffers::Table { + typedef ReadVariableOptionsT NativeTableType; + typedef ReadVariableOptionsBuilder Builder; + bool Verify(::flatbuffers::Verifier &verifier) const { + return VerifyTableStart(verifier) && + verifier.EndTable(); } - const tflite::AddOptions *builtin_options_as_AddOptions() const { - return builtin_options_type() == tflite::BuiltinOptions_AddOptions ? static_cast(builtin_options()) : nullptr; + ReadVariableOptionsT *UnPack(const ::flatbuffers::resolver_function_t *_resolver = nullptr) const; + void UnPackTo(ReadVariableOptionsT *_o, const ::flatbuffers::resolver_function_t *_resolver = nullptr) const; + static ::flatbuffers::Offset Pack(::flatbuffers::FlatBufferBuilder &_fbb, const ReadVariableOptionsT* _o, const ::flatbuffers::rehasher_function_t *_rehasher = nullptr); +}; + +struct ReadVariableOptionsBuilder { + typedef ReadVariableOptions Table; + ::flatbuffers::FlatBufferBuilder &fbb_; + ::flatbuffers::uoffset_t start_; + explicit ReadVariableOptionsBuilder(::flatbuffers::FlatBufferBuilder &_fbb) + : fbb_(_fbb) { + start_ = fbb_.StartTable(); } - const tflite::L2NormOptions *builtin_options_as_L2NormOptions() const { - return builtin_options_type() == tflite::BuiltinOptions_L2NormOptions ? static_cast(builtin_options()) : nullptr; + ::flatbuffers::Offset Finish() { + const auto end = fbb_.EndTable(start_); + auto o = ::flatbuffers::Offset(end); + return o; } - const tflite::LocalResponseNormalizationOptions *builtin_options_as_LocalResponseNormalizationOptions() const { - return builtin_options_type() == tflite::BuiltinOptions_LocalResponseNormalizationOptions ? static_cast(builtin_options()) : nullptr; +}; + +inline ::flatbuffers::Offset CreateReadVariableOptions( + ::flatbuffers::FlatBufferBuilder &_fbb) { + ReadVariableOptionsBuilder builder_(_fbb); + return builder_.Finish(); +} + +::flatbuffers::Offset CreateReadVariableOptions(::flatbuffers::FlatBufferBuilder &_fbb, const ReadVariableOptionsT *_o, const ::flatbuffers::rehasher_function_t *_rehasher = nullptr); + +struct AssignVariableOptionsT : public ::flatbuffers::NativeTable { + typedef AssignVariableOptions TableType; +}; + +struct AssignVariableOptions FLATBUFFERS_FINAL_CLASS : private ::flatbuffers::Table { + typedef AssignVariableOptionsT NativeTableType; + typedef AssignVariableOptionsBuilder Builder; + bool Verify(::flatbuffers::Verifier &verifier) const { + return VerifyTableStart(verifier) && + verifier.EndTable(); } - const tflite::LSTMOptions *builtin_options_as_LSTMOptions() const { - return builtin_options_type() == tflite::BuiltinOptions_LSTMOptions ? static_cast(builtin_options()) : nullptr; + AssignVariableOptionsT *UnPack(const ::flatbuffers::resolver_function_t *_resolver = nullptr) const; + void UnPackTo(AssignVariableOptionsT *_o, const ::flatbuffers::resolver_function_t *_resolver = nullptr) const; + static ::flatbuffers::Offset Pack(::flatbuffers::FlatBufferBuilder &_fbb, const AssignVariableOptionsT* _o, const ::flatbuffers::rehasher_function_t *_rehasher = nullptr); +}; + +struct AssignVariableOptionsBuilder { + typedef AssignVariableOptions Table; + ::flatbuffers::FlatBufferBuilder &fbb_; + ::flatbuffers::uoffset_t start_; + explicit AssignVariableOptionsBuilder(::flatbuffers::FlatBufferBuilder &_fbb) + : fbb_(_fbb) { + start_ = fbb_.StartTable(); } - const tflite::ResizeBilinearOptions *builtin_options_as_ResizeBilinearOptions() const { - return builtin_options_type() == tflite::BuiltinOptions_ResizeBilinearOptions ? static_cast(builtin_options()) : nullptr; + ::flatbuffers::Offset Finish() { + const auto end = fbb_.EndTable(start_); + auto o = ::flatbuffers::Offset(end); + return o; } - const tflite::CallOptions *builtin_options_as_CallOptions() const { - return builtin_options_type() == tflite::BuiltinOptions_CallOptions ? static_cast(builtin_options()) : nullptr; +}; + +inline ::flatbuffers::Offset CreateAssignVariableOptions( + ::flatbuffers::FlatBufferBuilder &_fbb) { + AssignVariableOptionsBuilder builder_(_fbb); + return builder_.Finish(); +} + +::flatbuffers::Offset CreateAssignVariableOptions(::flatbuffers::FlatBufferBuilder &_fbb, const AssignVariableOptionsT *_o, const ::flatbuffers::rehasher_function_t *_rehasher = nullptr); + +struct RandomOptionsT : public ::flatbuffers::NativeTable { + typedef RandomOptions TableType; + int64_t seed = 0; + int64_t seed2 = 0; +}; + +struct RandomOptions FLATBUFFERS_FINAL_CLASS : private ::flatbuffers::Table { + typedef RandomOptionsT NativeTableType; + typedef RandomOptionsBuilder Builder; + enum FlatBuffersVTableOffset FLATBUFFERS_VTABLE_UNDERLYING_TYPE { + VT_SEED = 4, + VT_SEED2 = 6 + }; + int64_t seed() const { + return GetField(VT_SEED, 0); } - const tflite::ReshapeOptions *builtin_options_as_ReshapeOptions() const { - return builtin_options_type() == tflite::BuiltinOptions_ReshapeOptions ? static_cast(builtin_options()) : nullptr; + int64_t seed2() const { + return GetField(VT_SEED2, 0); } - const tflite::SkipGramOptions *builtin_options_as_SkipGramOptions() const { - return builtin_options_type() == tflite::BuiltinOptions_SkipGramOptions ? static_cast(builtin_options()) : nullptr; + bool Verify(::flatbuffers::Verifier &verifier) const { + return VerifyTableStart(verifier) && + VerifyField(verifier, VT_SEED, 8) && + VerifyField(verifier, VT_SEED2, 8) && + verifier.EndTable(); } - const tflite::SpaceToDepthOptions *builtin_options_as_SpaceToDepthOptions() const { - return builtin_options_type() == tflite::BuiltinOptions_SpaceToDepthOptions ? static_cast(builtin_options()) : nullptr; + RandomOptionsT *UnPack(const ::flatbuffers::resolver_function_t *_resolver = nullptr) const; + void UnPackTo(RandomOptionsT *_o, const ::flatbuffers::resolver_function_t *_resolver = nullptr) const; + static ::flatbuffers::Offset Pack(::flatbuffers::FlatBufferBuilder &_fbb, const RandomOptionsT* _o, const ::flatbuffers::rehasher_function_t *_rehasher = nullptr); +}; + +struct RandomOptionsBuilder { + typedef RandomOptions Table; + ::flatbuffers::FlatBufferBuilder &fbb_; + ::flatbuffers::uoffset_t start_; + void add_seed(int64_t seed) { + fbb_.AddElement(RandomOptions::VT_SEED, seed, 0); } - const tflite::EmbeddingLookupSparseOptions *builtin_options_as_EmbeddingLookupSparseOptions() const { - return builtin_options_type() == tflite::BuiltinOptions_EmbeddingLookupSparseOptions ? static_cast(builtin_options()) : nullptr; + void add_seed2(int64_t seed2) { + fbb_.AddElement(RandomOptions::VT_SEED2, seed2, 0); } - const tflite::MulOptions *builtin_options_as_MulOptions() const { - return builtin_options_type() == tflite::BuiltinOptions_MulOptions ? static_cast(builtin_options()) : nullptr; + explicit RandomOptionsBuilder(::flatbuffers::FlatBufferBuilder &_fbb) + : fbb_(_fbb) { + start_ = fbb_.StartTable(); } - const tflite::PadOptions *builtin_options_as_PadOptions() const { - return builtin_options_type() == tflite::BuiltinOptions_PadOptions ? static_cast(builtin_options()) : nullptr; + ::flatbuffers::Offset Finish() { + const auto end = fbb_.EndTable(start_); + auto o = ::flatbuffers::Offset(end); + return o; } - const tflite::GatherOptions *builtin_options_as_GatherOptions() const { - return builtin_options_type() == tflite::BuiltinOptions_GatherOptions ? static_cast(builtin_options()) : nullptr; +}; + +inline ::flatbuffers::Offset CreateRandomOptions( + ::flatbuffers::FlatBufferBuilder &_fbb, + int64_t seed = 0, + int64_t seed2 = 0) { + RandomOptionsBuilder builder_(_fbb); + builder_.add_seed2(seed2); + builder_.add_seed(seed); + return builder_.Finish(); +} + +::flatbuffers::Offset CreateRandomOptions(::flatbuffers::FlatBufferBuilder &_fbb, const RandomOptionsT *_o, const ::flatbuffers::rehasher_function_t *_rehasher = nullptr); + +struct BucketizeOptionsT : public ::flatbuffers::NativeTable { + typedef BucketizeOptions TableType; + std::vector boundaries{}; +}; + +struct BucketizeOptions FLATBUFFERS_FINAL_CLASS : private ::flatbuffers::Table { + typedef BucketizeOptionsT NativeTableType; + typedef BucketizeOptionsBuilder Builder; + enum FlatBuffersVTableOffset FLATBUFFERS_VTABLE_UNDERLYING_TYPE { + VT_BOUNDARIES = 4 + }; + const ::flatbuffers::Vector *boundaries() const { + return GetPointer *>(VT_BOUNDARIES); } - const tflite::BatchToSpaceNDOptions *builtin_options_as_BatchToSpaceNDOptions() const { - return builtin_options_type() == tflite::BuiltinOptions_BatchToSpaceNDOptions ? static_cast(builtin_options()) : nullptr; + bool Verify(::flatbuffers::Verifier &verifier) const { + return VerifyTableStart(verifier) && + VerifyOffset(verifier, VT_BOUNDARIES) && + verifier.VerifyVector(boundaries()) && + verifier.EndTable(); } - const tflite::SpaceToBatchNDOptions *builtin_options_as_SpaceToBatchNDOptions() const { - return builtin_options_type() == tflite::BuiltinOptions_SpaceToBatchNDOptions ? static_cast(builtin_options()) : nullptr; + BucketizeOptionsT *UnPack(const ::flatbuffers::resolver_function_t *_resolver = nullptr) const; + void UnPackTo(BucketizeOptionsT *_o, const ::flatbuffers::resolver_function_t *_resolver = nullptr) const; + static ::flatbuffers::Offset Pack(::flatbuffers::FlatBufferBuilder &_fbb, const BucketizeOptionsT* _o, const ::flatbuffers::rehasher_function_t *_rehasher = nullptr); +}; + +struct BucketizeOptionsBuilder { + typedef BucketizeOptions Table; + ::flatbuffers::FlatBufferBuilder &fbb_; + ::flatbuffers::uoffset_t start_; + void add_boundaries(::flatbuffers::Offset<::flatbuffers::Vector> boundaries) { + fbb_.AddOffset(BucketizeOptions::VT_BOUNDARIES, boundaries); } - const tflite::TransposeOptions *builtin_options_as_TransposeOptions() const { - return builtin_options_type() == tflite::BuiltinOptions_TransposeOptions ? static_cast(builtin_options()) : nullptr; + explicit BucketizeOptionsBuilder(::flatbuffers::FlatBufferBuilder &_fbb) + : fbb_(_fbb) { + start_ = fbb_.StartTable(); } - const tflite::ReducerOptions *builtin_options_as_ReducerOptions() const { - return builtin_options_type() == tflite::BuiltinOptions_ReducerOptions ? static_cast(builtin_options()) : nullptr; + ::flatbuffers::Offset Finish() { + const auto end = fbb_.EndTable(start_); + auto o = ::flatbuffers::Offset(end); + return o; } - const tflite::SubOptions *builtin_options_as_SubOptions() const { - return builtin_options_type() == tflite::BuiltinOptions_SubOptions ? static_cast(builtin_options()) : nullptr; - } - const tflite::DivOptions *builtin_options_as_DivOptions() const { - return builtin_options_type() == tflite::BuiltinOptions_DivOptions ? static_cast(builtin_options()) : nullptr; - } - const tflite::SqueezeOptions *builtin_options_as_SqueezeOptions() const { - return builtin_options_type() == tflite::BuiltinOptions_SqueezeOptions ? static_cast(builtin_options()) : nullptr; - } - const tflite::SequenceRNNOptions *builtin_options_as_SequenceRNNOptions() const { - return builtin_options_type() == tflite::BuiltinOptions_SequenceRNNOptions ? static_cast(builtin_options()) : nullptr; +}; + +inline ::flatbuffers::Offset CreateBucketizeOptions( + ::flatbuffers::FlatBufferBuilder &_fbb, + ::flatbuffers::Offset<::flatbuffers::Vector> boundaries = 0) { + BucketizeOptionsBuilder builder_(_fbb); + builder_.add_boundaries(boundaries); + return builder_.Finish(); +} + +inline ::flatbuffers::Offset CreateBucketizeOptionsDirect( + ::flatbuffers::FlatBufferBuilder &_fbb, + const std::vector *boundaries = nullptr) { + auto boundaries__ = boundaries ? _fbb.CreateVector(*boundaries) : 0; + return tflite::CreateBucketizeOptions( + _fbb, + boundaries__); +} + +::flatbuffers::Offset CreateBucketizeOptions(::flatbuffers::FlatBufferBuilder &_fbb, const BucketizeOptionsT *_o, const ::flatbuffers::rehasher_function_t *_rehasher = nullptr); + +struct GeluOptionsT : public ::flatbuffers::NativeTable { + typedef GeluOptions TableType; + bool approximate = false; +}; + +struct GeluOptions FLATBUFFERS_FINAL_CLASS : private ::flatbuffers::Table { + typedef GeluOptionsT NativeTableType; + typedef GeluOptionsBuilder Builder; + enum FlatBuffersVTableOffset FLATBUFFERS_VTABLE_UNDERLYING_TYPE { + VT_APPROXIMATE = 4 + }; + bool approximate() const { + return GetField(VT_APPROXIMATE, 0) != 0; } - const tflite::StridedSliceOptions *builtin_options_as_StridedSliceOptions() const { - return builtin_options_type() == tflite::BuiltinOptions_StridedSliceOptions ? static_cast(builtin_options()) : nullptr; + bool Verify(::flatbuffers::Verifier &verifier) const { + return VerifyTableStart(verifier) && + VerifyField(verifier, VT_APPROXIMATE, 1) && + verifier.EndTable(); } - const tflite::ExpOptions *builtin_options_as_ExpOptions() const { - return builtin_options_type() == tflite::BuiltinOptions_ExpOptions ? static_cast(builtin_options()) : nullptr; + GeluOptionsT *UnPack(const ::flatbuffers::resolver_function_t *_resolver = nullptr) const; + void UnPackTo(GeluOptionsT *_o, const ::flatbuffers::resolver_function_t *_resolver = nullptr) const; + static ::flatbuffers::Offset Pack(::flatbuffers::FlatBufferBuilder &_fbb, const GeluOptionsT* _o, const ::flatbuffers::rehasher_function_t *_rehasher = nullptr); +}; + +struct GeluOptionsBuilder { + typedef GeluOptions Table; + ::flatbuffers::FlatBufferBuilder &fbb_; + ::flatbuffers::uoffset_t start_; + void add_approximate(bool approximate) { + fbb_.AddElement(GeluOptions::VT_APPROXIMATE, static_cast(approximate), 0); } - const tflite::TopKV2Options *builtin_options_as_TopKV2Options() const { - return builtin_options_type() == tflite::BuiltinOptions_TopKV2Options ? static_cast(builtin_options()) : nullptr; + explicit GeluOptionsBuilder(::flatbuffers::FlatBufferBuilder &_fbb) + : fbb_(_fbb) { + start_ = fbb_.StartTable(); } - const tflite::SplitOptions *builtin_options_as_SplitOptions() const { - return builtin_options_type() == tflite::BuiltinOptions_SplitOptions ? static_cast(builtin_options()) : nullptr; + ::flatbuffers::Offset Finish() { + const auto end = fbb_.EndTable(start_); + auto o = ::flatbuffers::Offset(end); + return o; } - const tflite::LogSoftmaxOptions *builtin_options_as_LogSoftmaxOptions() const { - return builtin_options_type() == tflite::BuiltinOptions_LogSoftmaxOptions ? static_cast(builtin_options()) : nullptr; +}; + +inline ::flatbuffers::Offset CreateGeluOptions( + ::flatbuffers::FlatBufferBuilder &_fbb, + bool approximate = false) { + GeluOptionsBuilder builder_(_fbb); + builder_.add_approximate(approximate); + return builder_.Finish(); +} + +::flatbuffers::Offset CreateGeluOptions(::flatbuffers::FlatBufferBuilder &_fbb, const GeluOptionsT *_o, const ::flatbuffers::rehasher_function_t *_rehasher = nullptr); + +struct DynamicUpdateSliceOptionsT : public ::flatbuffers::NativeTable { + typedef DynamicUpdateSliceOptions TableType; +}; + +struct DynamicUpdateSliceOptions FLATBUFFERS_FINAL_CLASS : private ::flatbuffers::Table { + typedef DynamicUpdateSliceOptionsT NativeTableType; + typedef DynamicUpdateSliceOptionsBuilder Builder; + bool Verify(::flatbuffers::Verifier &verifier) const { + return VerifyTableStart(verifier) && + verifier.EndTable(); } - const tflite::CastOptions *builtin_options_as_CastOptions() const { - return builtin_options_type() == tflite::BuiltinOptions_CastOptions ? static_cast(builtin_options()) : nullptr; + DynamicUpdateSliceOptionsT *UnPack(const ::flatbuffers::resolver_function_t *_resolver = nullptr) const; + void UnPackTo(DynamicUpdateSliceOptionsT *_o, const ::flatbuffers::resolver_function_t *_resolver = nullptr) const; + static ::flatbuffers::Offset Pack(::flatbuffers::FlatBufferBuilder &_fbb, const DynamicUpdateSliceOptionsT* _o, const ::flatbuffers::rehasher_function_t *_rehasher = nullptr); +}; + +struct DynamicUpdateSliceOptionsBuilder { + typedef DynamicUpdateSliceOptions Table; + ::flatbuffers::FlatBufferBuilder &fbb_; + ::flatbuffers::uoffset_t start_; + explicit DynamicUpdateSliceOptionsBuilder(::flatbuffers::FlatBufferBuilder &_fbb) + : fbb_(_fbb) { + start_ = fbb_.StartTable(); } - const tflite::DequantizeOptions *builtin_options_as_DequantizeOptions() const { - return builtin_options_type() == tflite::BuiltinOptions_DequantizeOptions ? static_cast(builtin_options()) : nullptr; + ::flatbuffers::Offset Finish() { + const auto end = fbb_.EndTable(start_); + auto o = ::flatbuffers::Offset(end); + return o; } - const tflite::MaximumMinimumOptions *builtin_options_as_MaximumMinimumOptions() const { - return builtin_options_type() == tflite::BuiltinOptions_MaximumMinimumOptions ? static_cast(builtin_options()) : nullptr; +}; + +inline ::flatbuffers::Offset CreateDynamicUpdateSliceOptions( + ::flatbuffers::FlatBufferBuilder &_fbb) { + DynamicUpdateSliceOptionsBuilder builder_(_fbb); + return builder_.Finish(); +} + +::flatbuffers::Offset CreateDynamicUpdateSliceOptions(::flatbuffers::FlatBufferBuilder &_fbb, const DynamicUpdateSliceOptionsT *_o, const ::flatbuffers::rehasher_function_t *_rehasher = nullptr); + +struct UnsortedSegmentProdOptionsT : public ::flatbuffers::NativeTable { + typedef UnsortedSegmentProdOptions TableType; +}; + +struct UnsortedSegmentProdOptions FLATBUFFERS_FINAL_CLASS : private ::flatbuffers::Table { + typedef UnsortedSegmentProdOptionsT NativeTableType; + typedef UnsortedSegmentProdOptionsBuilder Builder; + bool Verify(::flatbuffers::Verifier &verifier) const { + return VerifyTableStart(verifier) && + verifier.EndTable(); } - const tflite::ArgMaxOptions *builtin_options_as_ArgMaxOptions() const { - return builtin_options_type() == tflite::BuiltinOptions_ArgMaxOptions ? static_cast(builtin_options()) : nullptr; + UnsortedSegmentProdOptionsT *UnPack(const ::flatbuffers::resolver_function_t *_resolver = nullptr) const; + void UnPackTo(UnsortedSegmentProdOptionsT *_o, const ::flatbuffers::resolver_function_t *_resolver = nullptr) const; + static ::flatbuffers::Offset Pack(::flatbuffers::FlatBufferBuilder &_fbb, const UnsortedSegmentProdOptionsT* _o, const ::flatbuffers::rehasher_function_t *_rehasher = nullptr); +}; + +struct UnsortedSegmentProdOptionsBuilder { + typedef UnsortedSegmentProdOptions Table; + ::flatbuffers::FlatBufferBuilder &fbb_; + ::flatbuffers::uoffset_t start_; + explicit UnsortedSegmentProdOptionsBuilder(::flatbuffers::FlatBufferBuilder &_fbb) + : fbb_(_fbb) { + start_ = fbb_.StartTable(); } - const tflite::LessOptions *builtin_options_as_LessOptions() const { - return builtin_options_type() == tflite::BuiltinOptions_LessOptions ? static_cast(builtin_options()) : nullptr; + ::flatbuffers::Offset Finish() { + const auto end = fbb_.EndTable(start_); + auto o = ::flatbuffers::Offset(end); + return o; } - const tflite::NegOptions *builtin_options_as_NegOptions() const { - return builtin_options_type() == tflite::BuiltinOptions_NegOptions ? static_cast(builtin_options()) : nullptr; +}; + +inline ::flatbuffers::Offset CreateUnsortedSegmentProdOptions( + ::flatbuffers::FlatBufferBuilder &_fbb) { + UnsortedSegmentProdOptionsBuilder builder_(_fbb); + return builder_.Finish(); +} + +::flatbuffers::Offset CreateUnsortedSegmentProdOptions(::flatbuffers::FlatBufferBuilder &_fbb, const UnsortedSegmentProdOptionsT *_o, const ::flatbuffers::rehasher_function_t *_rehasher = nullptr); + +struct UnsortedSegmentMaxOptionsT : public ::flatbuffers::NativeTable { + typedef UnsortedSegmentMaxOptions TableType; +}; + +struct UnsortedSegmentMaxOptions FLATBUFFERS_FINAL_CLASS : private ::flatbuffers::Table { + typedef UnsortedSegmentMaxOptionsT NativeTableType; + typedef UnsortedSegmentMaxOptionsBuilder Builder; + bool Verify(::flatbuffers::Verifier &verifier) const { + return VerifyTableStart(verifier) && + verifier.EndTable(); } - const tflite::PadV2Options *builtin_options_as_PadV2Options() const { - return builtin_options_type() == tflite::BuiltinOptions_PadV2Options ? static_cast(builtin_options()) : nullptr; + UnsortedSegmentMaxOptionsT *UnPack(const ::flatbuffers::resolver_function_t *_resolver = nullptr) const; + void UnPackTo(UnsortedSegmentMaxOptionsT *_o, const ::flatbuffers::resolver_function_t *_resolver = nullptr) const; + static ::flatbuffers::Offset Pack(::flatbuffers::FlatBufferBuilder &_fbb, const UnsortedSegmentMaxOptionsT* _o, const ::flatbuffers::rehasher_function_t *_rehasher = nullptr); +}; + +struct UnsortedSegmentMaxOptionsBuilder { + typedef UnsortedSegmentMaxOptions Table; + ::flatbuffers::FlatBufferBuilder &fbb_; + ::flatbuffers::uoffset_t start_; + explicit UnsortedSegmentMaxOptionsBuilder(::flatbuffers::FlatBufferBuilder &_fbb) + : fbb_(_fbb) { + start_ = fbb_.StartTable(); } - const tflite::GreaterOptions *builtin_options_as_GreaterOptions() const { - return builtin_options_type() == tflite::BuiltinOptions_GreaterOptions ? static_cast(builtin_options()) : nullptr; + ::flatbuffers::Offset Finish() { + const auto end = fbb_.EndTable(start_); + auto o = ::flatbuffers::Offset(end); + return o; } - const tflite::GreaterEqualOptions *builtin_options_as_GreaterEqualOptions() const { - return builtin_options_type() == tflite::BuiltinOptions_GreaterEqualOptions ? static_cast(builtin_options()) : nullptr; +}; + +inline ::flatbuffers::Offset CreateUnsortedSegmentMaxOptions( + ::flatbuffers::FlatBufferBuilder &_fbb) { + UnsortedSegmentMaxOptionsBuilder builder_(_fbb); + return builder_.Finish(); +} + +::flatbuffers::Offset CreateUnsortedSegmentMaxOptions(::flatbuffers::FlatBufferBuilder &_fbb, const UnsortedSegmentMaxOptionsT *_o, const ::flatbuffers::rehasher_function_t *_rehasher = nullptr); + +struct UnsortedSegmentSumOptionsT : public ::flatbuffers::NativeTable { + typedef UnsortedSegmentSumOptions TableType; +}; + +struct UnsortedSegmentSumOptions FLATBUFFERS_FINAL_CLASS : private ::flatbuffers::Table { + typedef UnsortedSegmentSumOptionsT NativeTableType; + typedef UnsortedSegmentSumOptionsBuilder Builder; + bool Verify(::flatbuffers::Verifier &verifier) const { + return VerifyTableStart(verifier) && + verifier.EndTable(); } - const tflite::LessEqualOptions *builtin_options_as_LessEqualOptions() const { - return builtin_options_type() == tflite::BuiltinOptions_LessEqualOptions ? static_cast(builtin_options()) : nullptr; + UnsortedSegmentSumOptionsT *UnPack(const ::flatbuffers::resolver_function_t *_resolver = nullptr) const; + void UnPackTo(UnsortedSegmentSumOptionsT *_o, const ::flatbuffers::resolver_function_t *_resolver = nullptr) const; + static ::flatbuffers::Offset Pack(::flatbuffers::FlatBufferBuilder &_fbb, const UnsortedSegmentSumOptionsT* _o, const ::flatbuffers::rehasher_function_t *_rehasher = nullptr); +}; + +struct UnsortedSegmentSumOptionsBuilder { + typedef UnsortedSegmentSumOptions Table; + ::flatbuffers::FlatBufferBuilder &fbb_; + ::flatbuffers::uoffset_t start_; + explicit UnsortedSegmentSumOptionsBuilder(::flatbuffers::FlatBufferBuilder &_fbb) + : fbb_(_fbb) { + start_ = fbb_.StartTable(); } - const tflite::SelectOptions *builtin_options_as_SelectOptions() const { - return builtin_options_type() == tflite::BuiltinOptions_SelectOptions ? static_cast(builtin_options()) : nullptr; + ::flatbuffers::Offset Finish() { + const auto end = fbb_.EndTable(start_); + auto o = ::flatbuffers::Offset(end); + return o; } - const tflite::SliceOptions *builtin_options_as_SliceOptions() const { - return builtin_options_type() == tflite::BuiltinOptions_SliceOptions ? static_cast(builtin_options()) : nullptr; +}; + +inline ::flatbuffers::Offset CreateUnsortedSegmentSumOptions( + ::flatbuffers::FlatBufferBuilder &_fbb) { + UnsortedSegmentSumOptionsBuilder builder_(_fbb); + return builder_.Finish(); +} + +::flatbuffers::Offset CreateUnsortedSegmentSumOptions(::flatbuffers::FlatBufferBuilder &_fbb, const UnsortedSegmentSumOptionsT *_o, const ::flatbuffers::rehasher_function_t *_rehasher = nullptr); + +struct ATan2OptionsT : public ::flatbuffers::NativeTable { + typedef ATan2Options TableType; +}; + +struct ATan2Options FLATBUFFERS_FINAL_CLASS : private ::flatbuffers::Table { + typedef ATan2OptionsT NativeTableType; + typedef ATan2OptionsBuilder Builder; + bool Verify(::flatbuffers::Verifier &verifier) const { + return VerifyTableStart(verifier) && + verifier.EndTable(); } - const tflite::TransposeConvOptions *builtin_options_as_TransposeConvOptions() const { - return builtin_options_type() == tflite::BuiltinOptions_TransposeConvOptions ? static_cast(builtin_options()) : nullptr; + ATan2OptionsT *UnPack(const ::flatbuffers::resolver_function_t *_resolver = nullptr) const; + void UnPackTo(ATan2OptionsT *_o, const ::flatbuffers::resolver_function_t *_resolver = nullptr) const; + static ::flatbuffers::Offset Pack(::flatbuffers::FlatBufferBuilder &_fbb, const ATan2OptionsT* _o, const ::flatbuffers::rehasher_function_t *_rehasher = nullptr); +}; + +struct ATan2OptionsBuilder { + typedef ATan2Options Table; + ::flatbuffers::FlatBufferBuilder &fbb_; + ::flatbuffers::uoffset_t start_; + explicit ATan2OptionsBuilder(::flatbuffers::FlatBufferBuilder &_fbb) + : fbb_(_fbb) { + start_ = fbb_.StartTable(); } - const tflite::SparseToDenseOptions *builtin_options_as_SparseToDenseOptions() const { - return builtin_options_type() == tflite::BuiltinOptions_SparseToDenseOptions ? static_cast(builtin_options()) : nullptr; + ::flatbuffers::Offset Finish() { + const auto end = fbb_.EndTable(start_); + auto o = ::flatbuffers::Offset(end); + return o; } - const tflite::TileOptions *builtin_options_as_TileOptions() const { - return builtin_options_type() == tflite::BuiltinOptions_TileOptions ? static_cast(builtin_options()) : nullptr; +}; + +inline ::flatbuffers::Offset CreateATan2Options( + ::flatbuffers::FlatBufferBuilder &_fbb) { + ATan2OptionsBuilder builder_(_fbb); + return builder_.Finish(); +} + +::flatbuffers::Offset CreateATan2Options(::flatbuffers::FlatBufferBuilder &_fbb, const ATan2OptionsT *_o, const ::flatbuffers::rehasher_function_t *_rehasher = nullptr); + +struct UnsortedSegmentMinOptionsT : public ::flatbuffers::NativeTable { + typedef UnsortedSegmentMinOptions TableType; +}; + +struct UnsortedSegmentMinOptions FLATBUFFERS_FINAL_CLASS : private ::flatbuffers::Table { + typedef UnsortedSegmentMinOptionsT NativeTableType; + typedef UnsortedSegmentMinOptionsBuilder Builder; + bool Verify(::flatbuffers::Verifier &verifier) const { + return VerifyTableStart(verifier) && + verifier.EndTable(); } - const tflite::ExpandDimsOptions *builtin_options_as_ExpandDimsOptions() const { - return builtin_options_type() == tflite::BuiltinOptions_ExpandDimsOptions ? static_cast(builtin_options()) : nullptr; + UnsortedSegmentMinOptionsT *UnPack(const ::flatbuffers::resolver_function_t *_resolver = nullptr) const; + void UnPackTo(UnsortedSegmentMinOptionsT *_o, const ::flatbuffers::resolver_function_t *_resolver = nullptr) const; + static ::flatbuffers::Offset Pack(::flatbuffers::FlatBufferBuilder &_fbb, const UnsortedSegmentMinOptionsT* _o, const ::flatbuffers::rehasher_function_t *_rehasher = nullptr); +}; + +struct UnsortedSegmentMinOptionsBuilder { + typedef UnsortedSegmentMinOptions Table; + ::flatbuffers::FlatBufferBuilder &fbb_; + ::flatbuffers::uoffset_t start_; + explicit UnsortedSegmentMinOptionsBuilder(::flatbuffers::FlatBufferBuilder &_fbb) + : fbb_(_fbb) { + start_ = fbb_.StartTable(); } - const tflite::EqualOptions *builtin_options_as_EqualOptions() const { - return builtin_options_type() == tflite::BuiltinOptions_EqualOptions ? static_cast(builtin_options()) : nullptr; + ::flatbuffers::Offset Finish() { + const auto end = fbb_.EndTable(start_); + auto o = ::flatbuffers::Offset(end); + return o; } - const tflite::NotEqualOptions *builtin_options_as_NotEqualOptions() const { - return builtin_options_type() == tflite::BuiltinOptions_NotEqualOptions ? static_cast(builtin_options()) : nullptr; +}; + +inline ::flatbuffers::Offset CreateUnsortedSegmentMinOptions( + ::flatbuffers::FlatBufferBuilder &_fbb) { + UnsortedSegmentMinOptionsBuilder builder_(_fbb); + return builder_.Finish(); +} + +::flatbuffers::Offset CreateUnsortedSegmentMinOptions(::flatbuffers::FlatBufferBuilder &_fbb, const UnsortedSegmentMinOptionsT *_o, const ::flatbuffers::rehasher_function_t *_rehasher = nullptr); + +struct SignOptionsT : public ::flatbuffers::NativeTable { + typedef SignOptions TableType; +}; + +struct SignOptions FLATBUFFERS_FINAL_CLASS : private ::flatbuffers::Table { + typedef SignOptionsT NativeTableType; + typedef SignOptionsBuilder Builder; + bool Verify(::flatbuffers::Verifier &verifier) const { + return VerifyTableStart(verifier) && + verifier.EndTable(); } - const tflite::ShapeOptions *builtin_options_as_ShapeOptions() const { - return builtin_options_type() == tflite::BuiltinOptions_ShapeOptions ? static_cast(builtin_options()) : nullptr; + SignOptionsT *UnPack(const ::flatbuffers::resolver_function_t *_resolver = nullptr) const; + void UnPackTo(SignOptionsT *_o, const ::flatbuffers::resolver_function_t *_resolver = nullptr) const; + static ::flatbuffers::Offset Pack(::flatbuffers::FlatBufferBuilder &_fbb, const SignOptionsT* _o, const ::flatbuffers::rehasher_function_t *_rehasher = nullptr); +}; + +struct SignOptionsBuilder { + typedef SignOptions Table; + ::flatbuffers::FlatBufferBuilder &fbb_; + ::flatbuffers::uoffset_t start_; + explicit SignOptionsBuilder(::flatbuffers::FlatBufferBuilder &_fbb) + : fbb_(_fbb) { + start_ = fbb_.StartTable(); } - const tflite::PowOptions *builtin_options_as_PowOptions() const { - return builtin_options_type() == tflite::BuiltinOptions_PowOptions ? static_cast(builtin_options()) : nullptr; + ::flatbuffers::Offset Finish() { + const auto end = fbb_.EndTable(start_); + auto o = ::flatbuffers::Offset(end); + return o; } - const tflite::ArgMinOptions *builtin_options_as_ArgMinOptions() const { - return builtin_options_type() == tflite::BuiltinOptions_ArgMinOptions ? static_cast(builtin_options()) : nullptr; +}; + +inline ::flatbuffers::Offset CreateSignOptions( + ::flatbuffers::FlatBufferBuilder &_fbb) { + SignOptionsBuilder builder_(_fbb); + return builder_.Finish(); +} + +::flatbuffers::Offset CreateSignOptions(::flatbuffers::FlatBufferBuilder &_fbb, const SignOptionsT *_o, const ::flatbuffers::rehasher_function_t *_rehasher = nullptr); + +struct BitcastOptionsT : public ::flatbuffers::NativeTable { + typedef BitcastOptions TableType; +}; + +struct BitcastOptions FLATBUFFERS_FINAL_CLASS : private ::flatbuffers::Table { + typedef BitcastOptionsT NativeTableType; + typedef BitcastOptionsBuilder Builder; + bool Verify(::flatbuffers::Verifier &verifier) const { + return VerifyTableStart(verifier) && + verifier.EndTable(); } - const tflite::FakeQuantOptions *builtin_options_as_FakeQuantOptions() const { - return builtin_options_type() == tflite::BuiltinOptions_FakeQuantOptions ? static_cast(builtin_options()) : nullptr; + BitcastOptionsT *UnPack(const ::flatbuffers::resolver_function_t *_resolver = nullptr) const; + void UnPackTo(BitcastOptionsT *_o, const ::flatbuffers::resolver_function_t *_resolver = nullptr) const; + static ::flatbuffers::Offset Pack(::flatbuffers::FlatBufferBuilder &_fbb, const BitcastOptionsT* _o, const ::flatbuffers::rehasher_function_t *_rehasher = nullptr); +}; + +struct BitcastOptionsBuilder { + typedef BitcastOptions Table; + ::flatbuffers::FlatBufferBuilder &fbb_; + ::flatbuffers::uoffset_t start_; + explicit BitcastOptionsBuilder(::flatbuffers::FlatBufferBuilder &_fbb) + : fbb_(_fbb) { + start_ = fbb_.StartTable(); } - const tflite::PackOptions *builtin_options_as_PackOptions() const { - return builtin_options_type() == tflite::BuiltinOptions_PackOptions ? static_cast(builtin_options()) : nullptr; + ::flatbuffers::Offset Finish() { + const auto end = fbb_.EndTable(start_); + auto o = ::flatbuffers::Offset(end); + return o; } - const tflite::LogicalOrOptions *builtin_options_as_LogicalOrOptions() const { - return builtin_options_type() == tflite::BuiltinOptions_LogicalOrOptions ? static_cast(builtin_options()) : nullptr; +}; + +inline ::flatbuffers::Offset CreateBitcastOptions( + ::flatbuffers::FlatBufferBuilder &_fbb) { + BitcastOptionsBuilder builder_(_fbb); + return builder_.Finish(); +} + +::flatbuffers::Offset CreateBitcastOptions(::flatbuffers::FlatBufferBuilder &_fbb, const BitcastOptionsT *_o, const ::flatbuffers::rehasher_function_t *_rehasher = nullptr); + +struct BitwiseXorOptionsT : public ::flatbuffers::NativeTable { + typedef BitwiseXorOptions TableType; +}; + +struct BitwiseXorOptions FLATBUFFERS_FINAL_CLASS : private ::flatbuffers::Table { + typedef BitwiseXorOptionsT NativeTableType; + typedef BitwiseXorOptionsBuilder Builder; + bool Verify(::flatbuffers::Verifier &verifier) const { + return VerifyTableStart(verifier) && + verifier.EndTable(); } - const tflite::OneHotOptions *builtin_options_as_OneHotOptions() const { - return builtin_options_type() == tflite::BuiltinOptions_OneHotOptions ? static_cast(builtin_options()) : nullptr; + BitwiseXorOptionsT *UnPack(const ::flatbuffers::resolver_function_t *_resolver = nullptr) const; + void UnPackTo(BitwiseXorOptionsT *_o, const ::flatbuffers::resolver_function_t *_resolver = nullptr) const; + static ::flatbuffers::Offset Pack(::flatbuffers::FlatBufferBuilder &_fbb, const BitwiseXorOptionsT* _o, const ::flatbuffers::rehasher_function_t *_rehasher = nullptr); +}; + +struct BitwiseXorOptionsBuilder { + typedef BitwiseXorOptions Table; + ::flatbuffers::FlatBufferBuilder &fbb_; + ::flatbuffers::uoffset_t start_; + explicit BitwiseXorOptionsBuilder(::flatbuffers::FlatBufferBuilder &_fbb) + : fbb_(_fbb) { + start_ = fbb_.StartTable(); } - const tflite::LogicalAndOptions *builtin_options_as_LogicalAndOptions() const { - return builtin_options_type() == tflite::BuiltinOptions_LogicalAndOptions ? static_cast(builtin_options()) : nullptr; + ::flatbuffers::Offset Finish() { + const auto end = fbb_.EndTable(start_); + auto o = ::flatbuffers::Offset(end); + return o; } - const tflite::LogicalNotOptions *builtin_options_as_LogicalNotOptions() const { - return builtin_options_type() == tflite::BuiltinOptions_LogicalNotOptions ? static_cast(builtin_options()) : nullptr; +}; + +inline ::flatbuffers::Offset CreateBitwiseXorOptions( + ::flatbuffers::FlatBufferBuilder &_fbb) { + BitwiseXorOptionsBuilder builder_(_fbb); + return builder_.Finish(); +} + +::flatbuffers::Offset CreateBitwiseXorOptions(::flatbuffers::FlatBufferBuilder &_fbb, const BitwiseXorOptionsT *_o, const ::flatbuffers::rehasher_function_t *_rehasher = nullptr); + +struct RightShiftOptionsT : public ::flatbuffers::NativeTable { + typedef RightShiftOptions TableType; +}; + +struct RightShiftOptions FLATBUFFERS_FINAL_CLASS : private ::flatbuffers::Table { + typedef RightShiftOptionsT NativeTableType; + typedef RightShiftOptionsBuilder Builder; + bool Verify(::flatbuffers::Verifier &verifier) const { + return VerifyTableStart(verifier) && + verifier.EndTable(); } - const tflite::UnpackOptions *builtin_options_as_UnpackOptions() const { - return builtin_options_type() == tflite::BuiltinOptions_UnpackOptions ? static_cast(builtin_options()) : nullptr; + RightShiftOptionsT *UnPack(const ::flatbuffers::resolver_function_t *_resolver = nullptr) const; + void UnPackTo(RightShiftOptionsT *_o, const ::flatbuffers::resolver_function_t *_resolver = nullptr) const; + static ::flatbuffers::Offset Pack(::flatbuffers::FlatBufferBuilder &_fbb, const RightShiftOptionsT* _o, const ::flatbuffers::rehasher_function_t *_rehasher = nullptr); +}; + +struct RightShiftOptionsBuilder { + typedef RightShiftOptions Table; + ::flatbuffers::FlatBufferBuilder &fbb_; + ::flatbuffers::uoffset_t start_; + explicit RightShiftOptionsBuilder(::flatbuffers::FlatBufferBuilder &_fbb) + : fbb_(_fbb) { + start_ = fbb_.StartTable(); } - const tflite::FloorDivOptions *builtin_options_as_FloorDivOptions() const { - return builtin_options_type() == tflite::BuiltinOptions_FloorDivOptions ? static_cast(builtin_options()) : nullptr; + ::flatbuffers::Offset Finish() { + const auto end = fbb_.EndTable(start_); + auto o = ::flatbuffers::Offset(end); + return o; } - const tflite::SquareOptions *builtin_options_as_SquareOptions() const { - return builtin_options_type() == tflite::BuiltinOptions_SquareOptions ? static_cast(builtin_options()) : nullptr; +}; + +inline ::flatbuffers::Offset CreateRightShiftOptions( + ::flatbuffers::FlatBufferBuilder &_fbb) { + RightShiftOptionsBuilder builder_(_fbb); + return builder_.Finish(); +} + +::flatbuffers::Offset CreateRightShiftOptions(::flatbuffers::FlatBufferBuilder &_fbb, const RightShiftOptionsT *_o, const ::flatbuffers::rehasher_function_t *_rehasher = nullptr); + +struct DilateOptionsT : public ::flatbuffers::NativeTable { + typedef DilateOptions TableType; +}; + +struct DilateOptions FLATBUFFERS_FINAL_CLASS : private ::flatbuffers::Table { + typedef DilateOptionsT NativeTableType; + typedef DilateOptionsBuilder Builder; + bool Verify(::flatbuffers::Verifier &verifier) const { + return VerifyTableStart(verifier) && + verifier.EndTable(); } - const tflite::ZerosLikeOptions *builtin_options_as_ZerosLikeOptions() const { - return builtin_options_type() == tflite::BuiltinOptions_ZerosLikeOptions ? static_cast(builtin_options()) : nullptr; + DilateOptionsT *UnPack(const ::flatbuffers::resolver_function_t *_resolver = nullptr) const; + void UnPackTo(DilateOptionsT *_o, const ::flatbuffers::resolver_function_t *_resolver = nullptr) const; + static ::flatbuffers::Offset Pack(::flatbuffers::FlatBufferBuilder &_fbb, const DilateOptionsT* _o, const ::flatbuffers::rehasher_function_t *_rehasher = nullptr); +}; + +struct DilateOptionsBuilder { + typedef DilateOptions Table; + ::flatbuffers::FlatBufferBuilder &fbb_; + ::flatbuffers::uoffset_t start_; + explicit DilateOptionsBuilder(::flatbuffers::FlatBufferBuilder &_fbb) + : fbb_(_fbb) { + start_ = fbb_.StartTable(); } - const tflite::FillOptions *builtin_options_as_FillOptions() const { - return builtin_options_type() == tflite::BuiltinOptions_FillOptions ? static_cast(builtin_options()) : nullptr; + ::flatbuffers::Offset Finish() { + const auto end = fbb_.EndTable(start_); + auto o = ::flatbuffers::Offset(end); + return o; } - const tflite::BidirectionalSequenceLSTMOptions *builtin_options_as_BidirectionalSequenceLSTMOptions() const { - return builtin_options_type() == tflite::BuiltinOptions_BidirectionalSequenceLSTMOptions ? static_cast(builtin_options()) : nullptr; +}; + +inline ::flatbuffers::Offset CreateDilateOptions( + ::flatbuffers::FlatBufferBuilder &_fbb) { + DilateOptionsBuilder builder_(_fbb); + return builder_.Finish(); +} + +::flatbuffers::Offset CreateDilateOptions(::flatbuffers::FlatBufferBuilder &_fbb, const DilateOptionsT *_o, const ::flatbuffers::rehasher_function_t *_rehasher = nullptr); + +struct ReduceWindowOptionsT : public ::flatbuffers::NativeTable { + typedef ReduceWindowOptions TableType; + tflite::ReduceWindowFunction reduce_function = tflite::ReduceWindowFunction_UNSUPPORTED; +}; + +struct ReduceWindowOptions FLATBUFFERS_FINAL_CLASS : private ::flatbuffers::Table { + typedef ReduceWindowOptionsT NativeTableType; + typedef ReduceWindowOptionsBuilder Builder; + enum FlatBuffersVTableOffset FLATBUFFERS_VTABLE_UNDERLYING_TYPE { + VT_REDUCE_FUNCTION = 4 + }; + tflite::ReduceWindowFunction reduce_function() const { + return static_cast(GetField(VT_REDUCE_FUNCTION, 0)); } - const tflite::BidirectionalSequenceRNNOptions *builtin_options_as_BidirectionalSequenceRNNOptions() const { - return builtin_options_type() == tflite::BuiltinOptions_BidirectionalSequenceRNNOptions ? static_cast(builtin_options()) : nullptr; + bool Verify(::flatbuffers::Verifier &verifier) const { + return VerifyTableStart(verifier) && + VerifyField(verifier, VT_REDUCE_FUNCTION, 4) && + verifier.EndTable(); } - const tflite::UnidirectionalSequenceLSTMOptions *builtin_options_as_UnidirectionalSequenceLSTMOptions() const { - return builtin_options_type() == tflite::BuiltinOptions_UnidirectionalSequenceLSTMOptions ? static_cast(builtin_options()) : nullptr; + ReduceWindowOptionsT *UnPack(const ::flatbuffers::resolver_function_t *_resolver = nullptr) const; + void UnPackTo(ReduceWindowOptionsT *_o, const ::flatbuffers::resolver_function_t *_resolver = nullptr) const; + static ::flatbuffers::Offset Pack(::flatbuffers::FlatBufferBuilder &_fbb, const ReduceWindowOptionsT* _o, const ::flatbuffers::rehasher_function_t *_rehasher = nullptr); +}; + +struct ReduceWindowOptionsBuilder { + typedef ReduceWindowOptions Table; + ::flatbuffers::FlatBufferBuilder &fbb_; + ::flatbuffers::uoffset_t start_; + void add_reduce_function(tflite::ReduceWindowFunction reduce_function) { + fbb_.AddElement(ReduceWindowOptions::VT_REDUCE_FUNCTION, static_cast(reduce_function), 0); } - const tflite::FloorModOptions *builtin_options_as_FloorModOptions() const { - return builtin_options_type() == tflite::BuiltinOptions_FloorModOptions ? static_cast(builtin_options()) : nullptr; + explicit ReduceWindowOptionsBuilder(::flatbuffers::FlatBufferBuilder &_fbb) + : fbb_(_fbb) { + start_ = fbb_.StartTable(); } - const tflite::RangeOptions *builtin_options_as_RangeOptions() const { - return builtin_options_type() == tflite::BuiltinOptions_RangeOptions ? static_cast(builtin_options()) : nullptr; + ::flatbuffers::Offset Finish() { + const auto end = fbb_.EndTable(start_); + auto o = ::flatbuffers::Offset(end); + return o; } - const tflite::ResizeNearestNeighborOptions *builtin_options_as_ResizeNearestNeighborOptions() const { - return builtin_options_type() == tflite::BuiltinOptions_ResizeNearestNeighborOptions ? static_cast(builtin_options()) : nullptr; +}; + +inline ::flatbuffers::Offset CreateReduceWindowOptions( + ::flatbuffers::FlatBufferBuilder &_fbb, + tflite::ReduceWindowFunction reduce_function = tflite::ReduceWindowFunction_UNSUPPORTED) { + ReduceWindowOptionsBuilder builder_(_fbb); + builder_.add_reduce_function(reduce_function); + return builder_.Finish(); +} + +::flatbuffers::Offset CreateReduceWindowOptions(::flatbuffers::FlatBufferBuilder &_fbb, const ReduceWindowOptionsT *_o, const ::flatbuffers::rehasher_function_t *_rehasher = nullptr); + +struct OperatorCodeT : public ::flatbuffers::NativeTable { + typedef OperatorCode TableType; + int8_t deprecated_builtin_code = 0; + std::string custom_code{}; + int32_t version = 1; + tflite::BuiltinOperator builtin_code = tflite::BuiltinOperator_ADD; +}; + +struct OperatorCode FLATBUFFERS_FINAL_CLASS : private ::flatbuffers::Table { + typedef OperatorCodeT NativeTableType; + typedef OperatorCodeBuilder Builder; + enum FlatBuffersVTableOffset FLATBUFFERS_VTABLE_UNDERLYING_TYPE { + VT_DEPRECATED_BUILTIN_CODE = 4, + VT_CUSTOM_CODE = 6, + VT_VERSION = 8, + VT_BUILTIN_CODE = 10 + }; + int8_t deprecated_builtin_code() const { + return GetField(VT_DEPRECATED_BUILTIN_CODE, 0); } - const tflite::LeakyReluOptions *builtin_options_as_LeakyReluOptions() const { - return builtin_options_type() == tflite::BuiltinOptions_LeakyReluOptions ? static_cast(builtin_options()) : nullptr; + const ::flatbuffers::String *custom_code() const { + return GetPointer(VT_CUSTOM_CODE); } - const tflite::SquaredDifferenceOptions *builtin_options_as_SquaredDifferenceOptions() const { - return builtin_options_type() == tflite::BuiltinOptions_SquaredDifferenceOptions ? static_cast(builtin_options()) : nullptr; + int32_t version() const { + return GetField(VT_VERSION, 1); } - const tflite::MirrorPadOptions *builtin_options_as_MirrorPadOptions() const { - return builtin_options_type() == tflite::BuiltinOptions_MirrorPadOptions ? static_cast(builtin_options()) : nullptr; + tflite::BuiltinOperator builtin_code() const { + return static_cast(GetField(VT_BUILTIN_CODE, 0)); } - const tflite::AbsOptions *builtin_options_as_AbsOptions() const { - return builtin_options_type() == tflite::BuiltinOptions_AbsOptions ? static_cast(builtin_options()) : nullptr; + bool Verify(::flatbuffers::Verifier &verifier) const { + return VerifyTableStart(verifier) && + VerifyField(verifier, VT_DEPRECATED_BUILTIN_CODE, 1) && + VerifyOffset(verifier, VT_CUSTOM_CODE) && + verifier.VerifyString(custom_code()) && + VerifyField(verifier, VT_VERSION, 4) && + VerifyField(verifier, VT_BUILTIN_CODE, 4) && + verifier.EndTable(); } - const tflite::SplitVOptions *builtin_options_as_SplitVOptions() const { - return builtin_options_type() == tflite::BuiltinOptions_SplitVOptions ? static_cast(builtin_options()) : nullptr; + OperatorCodeT *UnPack(const ::flatbuffers::resolver_function_t *_resolver = nullptr) const; + void UnPackTo(OperatorCodeT *_o, const ::flatbuffers::resolver_function_t *_resolver = nullptr) const; + static ::flatbuffers::Offset Pack(::flatbuffers::FlatBufferBuilder &_fbb, const OperatorCodeT* _o, const ::flatbuffers::rehasher_function_t *_rehasher = nullptr); +}; + +struct OperatorCodeBuilder { + typedef OperatorCode Table; + ::flatbuffers::FlatBufferBuilder &fbb_; + ::flatbuffers::uoffset_t start_; + void add_deprecated_builtin_code(int8_t deprecated_builtin_code) { + fbb_.AddElement(OperatorCode::VT_DEPRECATED_BUILTIN_CODE, deprecated_builtin_code, 0); } - const tflite::UniqueOptions *builtin_options_as_UniqueOptions() const { - return builtin_options_type() == tflite::BuiltinOptions_UniqueOptions ? static_cast(builtin_options()) : nullptr; + void add_custom_code(::flatbuffers::Offset<::flatbuffers::String> custom_code) { + fbb_.AddOffset(OperatorCode::VT_CUSTOM_CODE, custom_code); } - const tflite::ReverseV2Options *builtin_options_as_ReverseV2Options() const { - return builtin_options_type() == tflite::BuiltinOptions_ReverseV2Options ? static_cast(builtin_options()) : nullptr; + void add_version(int32_t version) { + fbb_.AddElement(OperatorCode::VT_VERSION, version, 1); } - const tflite::AddNOptions *builtin_options_as_AddNOptions() const { - return builtin_options_type() == tflite::BuiltinOptions_AddNOptions ? static_cast(builtin_options()) : nullptr; + void add_builtin_code(tflite::BuiltinOperator builtin_code) { + fbb_.AddElement(OperatorCode::VT_BUILTIN_CODE, static_cast(builtin_code), 0); } - const tflite::GatherNdOptions *builtin_options_as_GatherNdOptions() const { - return builtin_options_type() == tflite::BuiltinOptions_GatherNdOptions ? static_cast(builtin_options()) : nullptr; + explicit OperatorCodeBuilder(::flatbuffers::FlatBufferBuilder &_fbb) + : fbb_(_fbb) { + start_ = fbb_.StartTable(); } - const tflite::CosOptions *builtin_options_as_CosOptions() const { - return builtin_options_type() == tflite::BuiltinOptions_CosOptions ? static_cast(builtin_options()) : nullptr; + ::flatbuffers::Offset Finish() { + const auto end = fbb_.EndTable(start_); + auto o = ::flatbuffers::Offset(end); + return o; } - const tflite::WhereOptions *builtin_options_as_WhereOptions() const { - return builtin_options_type() == tflite::BuiltinOptions_WhereOptions ? static_cast(builtin_options()) : nullptr; +}; + +inline ::flatbuffers::Offset CreateOperatorCode( + ::flatbuffers::FlatBufferBuilder &_fbb, + int8_t deprecated_builtin_code = 0, + ::flatbuffers::Offset<::flatbuffers::String> custom_code = 0, + int32_t version = 1, + tflite::BuiltinOperator builtin_code = tflite::BuiltinOperator_ADD) { + OperatorCodeBuilder builder_(_fbb); + builder_.add_builtin_code(builtin_code); + builder_.add_version(version); + builder_.add_custom_code(custom_code); + builder_.add_deprecated_builtin_code(deprecated_builtin_code); + return builder_.Finish(); +} + +inline ::flatbuffers::Offset CreateOperatorCodeDirect( + ::flatbuffers::FlatBufferBuilder &_fbb, + int8_t deprecated_builtin_code = 0, + const char *custom_code = nullptr, + int32_t version = 1, + tflite::BuiltinOperator builtin_code = tflite::BuiltinOperator_ADD) { + auto custom_code__ = custom_code ? _fbb.CreateString(custom_code) : 0; + return tflite::CreateOperatorCode( + _fbb, + deprecated_builtin_code, + custom_code__, + version, + builtin_code); +} + +::flatbuffers::Offset CreateOperatorCode(::flatbuffers::FlatBufferBuilder &_fbb, const OperatorCodeT *_o, const ::flatbuffers::rehasher_function_t *_rehasher = nullptr); + +struct StableHLOCompositeOptionsT : public ::flatbuffers::NativeTable { + typedef StableHLOCompositeOptions TableType; + std::string name{}; + int32_t decomposition_subgraph_index = 0; + std::vector composite_attributes{}; + tflite::CustomOptionsFormat composite_attributes_format = tflite::CustomOptionsFormat_FLEXBUFFERS; + int32_t version = 0; +}; + +struct StableHLOCompositeOptions FLATBUFFERS_FINAL_CLASS : private ::flatbuffers::Table { + typedef StableHLOCompositeOptionsT NativeTableType; + typedef StableHLOCompositeOptionsBuilder Builder; + enum FlatBuffersVTableOffset FLATBUFFERS_VTABLE_UNDERLYING_TYPE { + VT_NAME = 4, + VT_DECOMPOSITION_SUBGRAPH_INDEX = 6, + VT_COMPOSITE_ATTRIBUTES = 8, + VT_COMPOSITE_ATTRIBUTES_FORMAT = 10, + VT_VERSION = 12 + }; + const ::flatbuffers::String *name() const { + return GetPointer(VT_NAME); } - const tflite::RankOptions *builtin_options_as_RankOptions() const { - return builtin_options_type() == tflite::BuiltinOptions_RankOptions ? static_cast(builtin_options()) : nullptr; + int32_t decomposition_subgraph_index() const { + return GetField(VT_DECOMPOSITION_SUBGRAPH_INDEX, 0); } - const tflite::ReverseSequenceOptions *builtin_options_as_ReverseSequenceOptions() const { - return builtin_options_type() == tflite::BuiltinOptions_ReverseSequenceOptions ? static_cast(builtin_options()) : nullptr; + const ::flatbuffers::Vector *composite_attributes() const { + return GetPointer *>(VT_COMPOSITE_ATTRIBUTES); } - const tflite::MatrixDiagOptions *builtin_options_as_MatrixDiagOptions() const { - return builtin_options_type() == tflite::BuiltinOptions_MatrixDiagOptions ? static_cast(builtin_options()) : nullptr; + tflite::CustomOptionsFormat composite_attributes_format() const { + return static_cast(GetField(VT_COMPOSITE_ATTRIBUTES_FORMAT, 0)); } - const tflite::QuantizeOptions *builtin_options_as_QuantizeOptions() const { - return builtin_options_type() == tflite::BuiltinOptions_QuantizeOptions ? static_cast(builtin_options()) : nullptr; + int32_t version() const { + return GetField(VT_VERSION, 0); } - const tflite::MatrixSetDiagOptions *builtin_options_as_MatrixSetDiagOptions() const { - return builtin_options_type() == tflite::BuiltinOptions_MatrixSetDiagOptions ? static_cast(builtin_options()) : nullptr; + bool Verify(::flatbuffers::Verifier &verifier) const { + return VerifyTableStart(verifier) && + VerifyOffset(verifier, VT_NAME) && + verifier.VerifyString(name()) && + VerifyField(verifier, VT_DECOMPOSITION_SUBGRAPH_INDEX, 4) && + VerifyOffset(verifier, VT_COMPOSITE_ATTRIBUTES) && + verifier.VerifyVector(composite_attributes()) && + VerifyField(verifier, VT_COMPOSITE_ATTRIBUTES_FORMAT, 1) && + VerifyField(verifier, VT_VERSION, 4) && + verifier.EndTable(); } - const tflite::HardSwishOptions *builtin_options_as_HardSwishOptions() const { - return builtin_options_type() == tflite::BuiltinOptions_HardSwishOptions ? static_cast(builtin_options()) : nullptr; + StableHLOCompositeOptionsT *UnPack(const ::flatbuffers::resolver_function_t *_resolver = nullptr) const; + void UnPackTo(StableHLOCompositeOptionsT *_o, const ::flatbuffers::resolver_function_t *_resolver = nullptr) const; + static ::flatbuffers::Offset Pack(::flatbuffers::FlatBufferBuilder &_fbb, const StableHLOCompositeOptionsT* _o, const ::flatbuffers::rehasher_function_t *_rehasher = nullptr); +}; + +struct StableHLOCompositeOptionsBuilder { + typedef StableHLOCompositeOptions Table; + ::flatbuffers::FlatBufferBuilder &fbb_; + ::flatbuffers::uoffset_t start_; + void add_name(::flatbuffers::Offset<::flatbuffers::String> name) { + fbb_.AddOffset(StableHLOCompositeOptions::VT_NAME, name); } - const tflite::IfOptions *builtin_options_as_IfOptions() const { - return builtin_options_type() == tflite::BuiltinOptions_IfOptions ? static_cast(builtin_options()) : nullptr; + void add_decomposition_subgraph_index(int32_t decomposition_subgraph_index) { + fbb_.AddElement(StableHLOCompositeOptions::VT_DECOMPOSITION_SUBGRAPH_INDEX, decomposition_subgraph_index, 0); } - const tflite::WhileOptions *builtin_options_as_WhileOptions() const { - return builtin_options_type() == tflite::BuiltinOptions_WhileOptions ? static_cast(builtin_options()) : nullptr; + void add_composite_attributes(::flatbuffers::Offset<::flatbuffers::Vector> composite_attributes) { + fbb_.AddOffset(StableHLOCompositeOptions::VT_COMPOSITE_ATTRIBUTES, composite_attributes); } - const tflite::DepthToSpaceOptions *builtin_options_as_DepthToSpaceOptions() const { - return builtin_options_type() == tflite::BuiltinOptions_DepthToSpaceOptions ? static_cast(builtin_options()) : nullptr; + void add_composite_attributes_format(tflite::CustomOptionsFormat composite_attributes_format) { + fbb_.AddElement(StableHLOCompositeOptions::VT_COMPOSITE_ATTRIBUTES_FORMAT, static_cast(composite_attributes_format), 0); } - const tflite::NonMaxSuppressionV4Options *builtin_options_as_NonMaxSuppressionV4Options() const { - return builtin_options_type() == tflite::BuiltinOptions_NonMaxSuppressionV4Options ? static_cast(builtin_options()) : nullptr; + void add_version(int32_t version) { + fbb_.AddElement(StableHLOCompositeOptions::VT_VERSION, version, 0); } - const tflite::NonMaxSuppressionV5Options *builtin_options_as_NonMaxSuppressionV5Options() const { - return builtin_options_type() == tflite::BuiltinOptions_NonMaxSuppressionV5Options ? static_cast(builtin_options()) : nullptr; + explicit StableHLOCompositeOptionsBuilder(::flatbuffers::FlatBufferBuilder &_fbb) + : fbb_(_fbb) { + start_ = fbb_.StartTable(); } - const tflite::ScatterNdOptions *builtin_options_as_ScatterNdOptions() const { - return builtin_options_type() == tflite::BuiltinOptions_ScatterNdOptions ? static_cast(builtin_options()) : nullptr; + ::flatbuffers::Offset Finish() { + const auto end = fbb_.EndTable(start_); + auto o = ::flatbuffers::Offset(end); + return o; } - const tflite::SelectV2Options *builtin_options_as_SelectV2Options() const { - return builtin_options_type() == tflite::BuiltinOptions_SelectV2Options ? static_cast(builtin_options()) : nullptr; +}; + +inline ::flatbuffers::Offset CreateStableHLOCompositeOptions( + ::flatbuffers::FlatBufferBuilder &_fbb, + ::flatbuffers::Offset<::flatbuffers::String> name = 0, + int32_t decomposition_subgraph_index = 0, + ::flatbuffers::Offset<::flatbuffers::Vector> composite_attributes = 0, + tflite::CustomOptionsFormat composite_attributes_format = tflite::CustomOptionsFormat_FLEXBUFFERS, + int32_t version = 0) { + StableHLOCompositeOptionsBuilder builder_(_fbb); + builder_.add_version(version); + builder_.add_composite_attributes(composite_attributes); + builder_.add_decomposition_subgraph_index(decomposition_subgraph_index); + builder_.add_name(name); + builder_.add_composite_attributes_format(composite_attributes_format); + return builder_.Finish(); +} + +inline ::flatbuffers::Offset CreateStableHLOCompositeOptionsDirect( + ::flatbuffers::FlatBufferBuilder &_fbb, + const char *name = nullptr, + int32_t decomposition_subgraph_index = 0, + const std::vector *composite_attributes = nullptr, + tflite::CustomOptionsFormat composite_attributes_format = tflite::CustomOptionsFormat_FLEXBUFFERS, + int32_t version = 0) { + auto name__ = name ? _fbb.CreateString(name) : 0; + auto composite_attributes__ = composite_attributes ? _fbb.CreateVector(*composite_attributes) : 0; + return tflite::CreateStableHLOCompositeOptions( + _fbb, + name__, + decomposition_subgraph_index, + composite_attributes__, + composite_attributes_format, + version); +} + +::flatbuffers::Offset CreateStableHLOCompositeOptions(::flatbuffers::FlatBufferBuilder &_fbb, const StableHLOCompositeOptionsT *_o, const ::flatbuffers::rehasher_function_t *_rehasher = nullptr); + +struct OperatorT : public ::flatbuffers::NativeTable { + typedef Operator TableType; + uint32_t opcode_index = 0; + std::vector inputs{}; + std::vector outputs{}; + tflite::BuiltinOptionsUnion builtin_options{}; + std::vector custom_options{}; + tflite::CustomOptionsFormat custom_options_format = tflite::CustomOptionsFormat_FLEXBUFFERS; + std::vector mutating_variable_inputs{}; + std::vector intermediates{}; + uint64_t large_custom_options_offset = 0; + uint64_t large_custom_options_size = 0; + tflite::BuiltinOptions2Union builtin_options_2{}; +}; + +struct Operator FLATBUFFERS_FINAL_CLASS : private ::flatbuffers::Table { + typedef OperatorT NativeTableType; + typedef OperatorBuilder Builder; + enum FlatBuffersVTableOffset FLATBUFFERS_VTABLE_UNDERLYING_TYPE { + VT_OPCODE_INDEX = 4, + VT_INPUTS = 6, + VT_OUTPUTS = 8, + VT_BUILTIN_OPTIONS_TYPE = 10, + VT_BUILTIN_OPTIONS = 12, + VT_CUSTOM_OPTIONS = 14, + VT_CUSTOM_OPTIONS_FORMAT = 16, + VT_MUTATING_VARIABLE_INPUTS = 18, + VT_INTERMEDIATES = 20, + VT_LARGE_CUSTOM_OPTIONS_OFFSET = 22, + VT_LARGE_CUSTOM_OPTIONS_SIZE = 24, + VT_BUILTIN_OPTIONS_2_TYPE = 26, + VT_BUILTIN_OPTIONS_2 = 28 + }; + uint32_t opcode_index() const { + return GetField(VT_OPCODE_INDEX, 0); } - const tflite::DensifyOptions *builtin_options_as_DensifyOptions() const { - return builtin_options_type() == tflite::BuiltinOptions_DensifyOptions ? static_cast(builtin_options()) : nullptr; + const ::flatbuffers::Vector *inputs() const { + return GetPointer *>(VT_INPUTS); } - const tflite::SegmentSumOptions *builtin_options_as_SegmentSumOptions() const { - return builtin_options_type() == tflite::BuiltinOptions_SegmentSumOptions ? static_cast(builtin_options()) : nullptr; + const ::flatbuffers::Vector *outputs() const { + return GetPointer *>(VT_OUTPUTS); } - const tflite::BatchMatMulOptions *builtin_options_as_BatchMatMulOptions() const { - return builtin_options_type() == tflite::BuiltinOptions_BatchMatMulOptions ? static_cast(builtin_options()) : nullptr; + tflite::BuiltinOptions builtin_options_type() const { + return static_cast(GetField(VT_BUILTIN_OPTIONS_TYPE, 0)); } - const tflite::CumsumOptions *builtin_options_as_CumsumOptions() const { - return builtin_options_type() == tflite::BuiltinOptions_CumsumOptions ? static_cast(builtin_options()) : nullptr; + const void *builtin_options() const { + return GetPointer(VT_BUILTIN_OPTIONS); } - const tflite::CallOnceOptions *builtin_options_as_CallOnceOptions() const { - return builtin_options_type() == tflite::BuiltinOptions_CallOnceOptions ? static_cast(builtin_options()) : nullptr; + template const T *builtin_options_as() const; + const tflite::Conv2DOptions *builtin_options_as_Conv2DOptions() const { + return builtin_options_type() == tflite::BuiltinOptions_Conv2DOptions ? static_cast(builtin_options()) : nullptr; } - const tflite::BroadcastToOptions *builtin_options_as_BroadcastToOptions() const { - return builtin_options_type() == tflite::BuiltinOptions_BroadcastToOptions ? static_cast(builtin_options()) : nullptr; + const tflite::DepthwiseConv2DOptions *builtin_options_as_DepthwiseConv2DOptions() const { + return builtin_options_type() == tflite::BuiltinOptions_DepthwiseConv2DOptions ? static_cast(builtin_options()) : nullptr; } - const tflite::Rfft2dOptions *builtin_options_as_Rfft2dOptions() const { - return builtin_options_type() == tflite::BuiltinOptions_Rfft2dOptions ? static_cast(builtin_options()) : nullptr; + const tflite::ConcatEmbeddingsOptions *builtin_options_as_ConcatEmbeddingsOptions() const { + return builtin_options_type() == tflite::BuiltinOptions_ConcatEmbeddingsOptions ? static_cast(builtin_options()) : nullptr; } - const tflite::Conv3DOptions *builtin_options_as_Conv3DOptions() const { - return builtin_options_type() == tflite::BuiltinOptions_Conv3DOptions ? static_cast(builtin_options()) : nullptr; + const tflite::LSHProjectionOptions *builtin_options_as_LSHProjectionOptions() const { + return builtin_options_type() == tflite::BuiltinOptions_LSHProjectionOptions ? static_cast(builtin_options()) : nullptr; } - const tflite::HashtableOptions *builtin_options_as_HashtableOptions() const { - return builtin_options_type() == tflite::BuiltinOptions_HashtableOptions ? static_cast(builtin_options()) : nullptr; + const tflite::Pool2DOptions *builtin_options_as_Pool2DOptions() const { + return builtin_options_type() == tflite::BuiltinOptions_Pool2DOptions ? static_cast(builtin_options()) : nullptr; } - const tflite::HashtableFindOptions *builtin_options_as_HashtableFindOptions() const { - return builtin_options_type() == tflite::BuiltinOptions_HashtableFindOptions ? static_cast(builtin_options()) : nullptr; + const tflite::SVDFOptions *builtin_options_as_SVDFOptions() const { + return builtin_options_type() == tflite::BuiltinOptions_SVDFOptions ? static_cast(builtin_options()) : nullptr; } - const tflite::HashtableImportOptions *builtin_options_as_HashtableImportOptions() const { - return builtin_options_type() == tflite::BuiltinOptions_HashtableImportOptions ? static_cast(builtin_options()) : nullptr; + const tflite::RNNOptions *builtin_options_as_RNNOptions() const { + return builtin_options_type() == tflite::BuiltinOptions_RNNOptions ? static_cast(builtin_options()) : nullptr; } - const tflite::HashtableSizeOptions *builtin_options_as_HashtableSizeOptions() const { - return builtin_options_type() == tflite::BuiltinOptions_HashtableSizeOptions ? static_cast(builtin_options()) : nullptr; + const tflite::FullyConnectedOptions *builtin_options_as_FullyConnectedOptions() const { + return builtin_options_type() == tflite::BuiltinOptions_FullyConnectedOptions ? static_cast(builtin_options()) : nullptr; } - const tflite::VarHandleOptions *builtin_options_as_VarHandleOptions() const { - return builtin_options_type() == tflite::BuiltinOptions_VarHandleOptions ? static_cast(builtin_options()) : nullptr; + const tflite::SoftmaxOptions *builtin_options_as_SoftmaxOptions() const { + return builtin_options_type() == tflite::BuiltinOptions_SoftmaxOptions ? static_cast(builtin_options()) : nullptr; } - const tflite::ReadVariableOptions *builtin_options_as_ReadVariableOptions() const { - return builtin_options_type() == tflite::BuiltinOptions_ReadVariableOptions ? static_cast(builtin_options()) : nullptr; + const tflite::ConcatenationOptions *builtin_options_as_ConcatenationOptions() const { + return builtin_options_type() == tflite::BuiltinOptions_ConcatenationOptions ? static_cast(builtin_options()) : nullptr; } - const tflite::AssignVariableOptions *builtin_options_as_AssignVariableOptions() const { - return builtin_options_type() == tflite::BuiltinOptions_AssignVariableOptions ? static_cast(builtin_options()) : nullptr; + const tflite::AddOptions *builtin_options_as_AddOptions() const { + return builtin_options_type() == tflite::BuiltinOptions_AddOptions ? static_cast(builtin_options()) : nullptr; } - const tflite::RandomOptions *builtin_options_as_RandomOptions() const { - return builtin_options_type() == tflite::BuiltinOptions_RandomOptions ? static_cast(builtin_options()) : nullptr; + const tflite::L2NormOptions *builtin_options_as_L2NormOptions() const { + return builtin_options_type() == tflite::BuiltinOptions_L2NormOptions ? static_cast(builtin_options()) : nullptr; } - const tflite::BucketizeOptions *builtin_options_as_BucketizeOptions() const { - return builtin_options_type() == tflite::BuiltinOptions_BucketizeOptions ? static_cast(builtin_options()) : nullptr; + const tflite::LocalResponseNormalizationOptions *builtin_options_as_LocalResponseNormalizationOptions() const { + return builtin_options_type() == tflite::BuiltinOptions_LocalResponseNormalizationOptions ? static_cast(builtin_options()) : nullptr; } - const tflite::GeluOptions *builtin_options_as_GeluOptions() const { - return builtin_options_type() == tflite::BuiltinOptions_GeluOptions ? static_cast(builtin_options()) : nullptr; + const tflite::LSTMOptions *builtin_options_as_LSTMOptions() const { + return builtin_options_type() == tflite::BuiltinOptions_LSTMOptions ? static_cast(builtin_options()) : nullptr; } - const tflite::DynamicUpdateSliceOptions *builtin_options_as_DynamicUpdateSliceOptions() const { - return builtin_options_type() == tflite::BuiltinOptions_DynamicUpdateSliceOptions ? static_cast(builtin_options()) : nullptr; + const tflite::ResizeBilinearOptions *builtin_options_as_ResizeBilinearOptions() const { + return builtin_options_type() == tflite::BuiltinOptions_ResizeBilinearOptions ? static_cast(builtin_options()) : nullptr; } - const tflite::UnsortedSegmentProdOptions *builtin_options_as_UnsortedSegmentProdOptions() const { - return builtin_options_type() == tflite::BuiltinOptions_UnsortedSegmentProdOptions ? static_cast(builtin_options()) : nullptr; + const tflite::CallOptions *builtin_options_as_CallOptions() const { + return builtin_options_type() == tflite::BuiltinOptions_CallOptions ? static_cast(builtin_options()) : nullptr; } - const tflite::UnsortedSegmentMaxOptions *builtin_options_as_UnsortedSegmentMaxOptions() const { - return builtin_options_type() == tflite::BuiltinOptions_UnsortedSegmentMaxOptions ? static_cast(builtin_options()) : nullptr; + const tflite::ReshapeOptions *builtin_options_as_ReshapeOptions() const { + return builtin_options_type() == tflite::BuiltinOptions_ReshapeOptions ? static_cast(builtin_options()) : nullptr; } - const tflite::UnsortedSegmentMinOptions *builtin_options_as_UnsortedSegmentMinOptions() const { - return builtin_options_type() == tflite::BuiltinOptions_UnsortedSegmentMinOptions ? static_cast(builtin_options()) : nullptr; + const tflite::SkipGramOptions *builtin_options_as_SkipGramOptions() const { + return builtin_options_type() == tflite::BuiltinOptions_SkipGramOptions ? static_cast(builtin_options()) : nullptr; } - const tflite::UnsortedSegmentSumOptions *builtin_options_as_UnsortedSegmentSumOptions() const { - return builtin_options_type() == tflite::BuiltinOptions_UnsortedSegmentSumOptions ? static_cast(builtin_options()) : nullptr; + const tflite::SpaceToDepthOptions *builtin_options_as_SpaceToDepthOptions() const { + return builtin_options_type() == tflite::BuiltinOptions_SpaceToDepthOptions ? static_cast(builtin_options()) : nullptr; } - const tflite::ATan2Options *builtin_options_as_ATan2Options() const { - return builtin_options_type() == tflite::BuiltinOptions_ATan2Options ? static_cast(builtin_options()) : nullptr; + const tflite::EmbeddingLookupSparseOptions *builtin_options_as_EmbeddingLookupSparseOptions() const { + return builtin_options_type() == tflite::BuiltinOptions_EmbeddingLookupSparseOptions ? static_cast(builtin_options()) : nullptr; } - const tflite::SignOptions *builtin_options_as_SignOptions() const { - return builtin_options_type() == tflite::BuiltinOptions_SignOptions ? static_cast(builtin_options()) : nullptr; + const tflite::MulOptions *builtin_options_as_MulOptions() const { + return builtin_options_type() == tflite::BuiltinOptions_MulOptions ? static_cast(builtin_options()) : nullptr; } - const tflite::BitcastOptions *builtin_options_as_BitcastOptions() const { - return builtin_options_type() == tflite::BuiltinOptions_BitcastOptions ? static_cast(builtin_options()) : nullptr; + const tflite::PadOptions *builtin_options_as_PadOptions() const { + return builtin_options_type() == tflite::BuiltinOptions_PadOptions ? static_cast(builtin_options()) : nullptr; } - const tflite::BitwiseXorOptions *builtin_options_as_BitwiseXorOptions() const { - return builtin_options_type() == tflite::BuiltinOptions_BitwiseXorOptions ? static_cast(builtin_options()) : nullptr; + const tflite::GatherOptions *builtin_options_as_GatherOptions() const { + return builtin_options_type() == tflite::BuiltinOptions_GatherOptions ? static_cast(builtin_options()) : nullptr; } - const tflite::RightShiftOptions *builtin_options_as_RightShiftOptions() const { - return builtin_options_type() == tflite::BuiltinOptions_RightShiftOptions ? static_cast(builtin_options()) : nullptr; + const tflite::BatchToSpaceNDOptions *builtin_options_as_BatchToSpaceNDOptions() const { + return builtin_options_type() == tflite::BuiltinOptions_BatchToSpaceNDOptions ? static_cast(builtin_options()) : nullptr; } - const flatbuffers::Vector *custom_options() const { - return GetPointer *>(VT_CUSTOM_OPTIONS); + const tflite::SpaceToBatchNDOptions *builtin_options_as_SpaceToBatchNDOptions() const { + return builtin_options_type() == tflite::BuiltinOptions_SpaceToBatchNDOptions ? static_cast(builtin_options()) : nullptr; } - tflite::CustomOptionsFormat custom_options_format() const { - return static_cast(GetField(VT_CUSTOM_OPTIONS_FORMAT, 0)); + const tflite::TransposeOptions *builtin_options_as_TransposeOptions() const { + return builtin_options_type() == tflite::BuiltinOptions_TransposeOptions ? static_cast(builtin_options()) : nullptr; } - const flatbuffers::Vector *mutating_variable_inputs() const { - return GetPointer *>(VT_MUTATING_VARIABLE_INPUTS); + const tflite::ReducerOptions *builtin_options_as_ReducerOptions() const { + return builtin_options_type() == tflite::BuiltinOptions_ReducerOptions ? static_cast(builtin_options()) : nullptr; } - const flatbuffers::Vector *intermediates() const { - return GetPointer *>(VT_INTERMEDIATES); + const tflite::SubOptions *builtin_options_as_SubOptions() const { + return builtin_options_type() == tflite::BuiltinOptions_SubOptions ? static_cast(builtin_options()) : nullptr; } - uint64_t large_custom_options_offset() const { - return GetField(VT_LARGE_CUSTOM_OPTIONS_OFFSET, 0); + const tflite::DivOptions *builtin_options_as_DivOptions() const { + return builtin_options_type() == tflite::BuiltinOptions_DivOptions ? static_cast(builtin_options()) : nullptr; } - uint64_t large_custom_options_size() const { - return GetField(VT_LARGE_CUSTOM_OPTIONS_SIZE, 0); + const tflite::SqueezeOptions *builtin_options_as_SqueezeOptions() const { + return builtin_options_type() == tflite::BuiltinOptions_SqueezeOptions ? static_cast(builtin_options()) : nullptr; } - bool Verify(flatbuffers::Verifier &verifier) const { - return VerifyTableStart(verifier) && - VerifyField(verifier, VT_OPCODE_INDEX, 4) && - VerifyOffset(verifier, VT_INPUTS) && - verifier.VerifyVector(inputs()) && - VerifyOffset(verifier, VT_OUTPUTS) && - verifier.VerifyVector(outputs()) && - VerifyField(verifier, VT_BUILTIN_OPTIONS_TYPE, 1) && - VerifyOffset(verifier, VT_BUILTIN_OPTIONS) && - VerifyBuiltinOptions(verifier, builtin_options(), builtin_options_type()) && - VerifyOffset(verifier, VT_CUSTOM_OPTIONS) && - verifier.VerifyVector(custom_options()) && - VerifyField(verifier, VT_CUSTOM_OPTIONS_FORMAT, 1) && - VerifyOffset(verifier, VT_MUTATING_VARIABLE_INPUTS) && - verifier.VerifyVector(mutating_variable_inputs()) && - VerifyOffset(verifier, VT_INTERMEDIATES) && - verifier.VerifyVector(intermediates()) && - VerifyField(verifier, VT_LARGE_CUSTOM_OPTIONS_OFFSET, 8) && - VerifyField(verifier, VT_LARGE_CUSTOM_OPTIONS_SIZE, 8) && - verifier.EndTable(); + const tflite::SequenceRNNOptions *builtin_options_as_SequenceRNNOptions() const { + return builtin_options_type() == tflite::BuiltinOptions_SequenceRNNOptions ? static_cast(builtin_options()) : nullptr; } - OperatorT *UnPack(const flatbuffers::resolver_function_t *_resolver = nullptr) const; - void UnPackTo(OperatorT *_o, const flatbuffers::resolver_function_t *_resolver = nullptr) const; - static flatbuffers::Offset Pack(flatbuffers::FlatBufferBuilder &_fbb, const OperatorT* _o, const flatbuffers::rehasher_function_t *_rehasher = nullptr); -}; - -template<> inline const tflite::Conv2DOptions *Operator::builtin_options_as() const { - return builtin_options_as_Conv2DOptions(); -} - -template<> inline const tflite::DepthwiseConv2DOptions *Operator::builtin_options_as() const { - return builtin_options_as_DepthwiseConv2DOptions(); -} - -template<> inline const tflite::ConcatEmbeddingsOptions *Operator::builtin_options_as() const { - return builtin_options_as_ConcatEmbeddingsOptions(); -} - -template<> inline const tflite::LSHProjectionOptions *Operator::builtin_options_as() const { - return builtin_options_as_LSHProjectionOptions(); -} - -template<> inline const tflite::Pool2DOptions *Operator::builtin_options_as() const { - return builtin_options_as_Pool2DOptions(); -} - -template<> inline const tflite::SVDFOptions *Operator::builtin_options_as() const { - return builtin_options_as_SVDFOptions(); -} - -template<> inline const tflite::RNNOptions *Operator::builtin_options_as() const { - return builtin_options_as_RNNOptions(); -} - -template<> inline const tflite::FullyConnectedOptions *Operator::builtin_options_as() const { - return builtin_options_as_FullyConnectedOptions(); -} - -template<> inline const tflite::SoftmaxOptions *Operator::builtin_options_as() const { - return builtin_options_as_SoftmaxOptions(); -} - -template<> inline const tflite::ConcatenationOptions *Operator::builtin_options_as() const { - return builtin_options_as_ConcatenationOptions(); -} - -template<> inline const tflite::AddOptions *Operator::builtin_options_as() const { + const tflite::StridedSliceOptions *builtin_options_as_StridedSliceOptions() const { + return builtin_options_type() == tflite::BuiltinOptions_StridedSliceOptions ? static_cast(builtin_options()) : nullptr; + } + const tflite::ExpOptions *builtin_options_as_ExpOptions() const { + return builtin_options_type() == tflite::BuiltinOptions_ExpOptions ? static_cast(builtin_options()) : nullptr; + } + const tflite::TopKV2Options *builtin_options_as_TopKV2Options() const { + return builtin_options_type() == tflite::BuiltinOptions_TopKV2Options ? static_cast(builtin_options()) : nullptr; + } + const tflite::SplitOptions *builtin_options_as_SplitOptions() const { + return builtin_options_type() == tflite::BuiltinOptions_SplitOptions ? static_cast(builtin_options()) : nullptr; + } + const tflite::LogSoftmaxOptions *builtin_options_as_LogSoftmaxOptions() const { + return builtin_options_type() == tflite::BuiltinOptions_LogSoftmaxOptions ? static_cast(builtin_options()) : nullptr; + } + const tflite::CastOptions *builtin_options_as_CastOptions() const { + return builtin_options_type() == tflite::BuiltinOptions_CastOptions ? static_cast(builtin_options()) : nullptr; + } + const tflite::DequantizeOptions *builtin_options_as_DequantizeOptions() const { + return builtin_options_type() == tflite::BuiltinOptions_DequantizeOptions ? static_cast(builtin_options()) : nullptr; + } + const tflite::MaximumMinimumOptions *builtin_options_as_MaximumMinimumOptions() const { + return builtin_options_type() == tflite::BuiltinOptions_MaximumMinimumOptions ? static_cast(builtin_options()) : nullptr; + } + const tflite::ArgMaxOptions *builtin_options_as_ArgMaxOptions() const { + return builtin_options_type() == tflite::BuiltinOptions_ArgMaxOptions ? static_cast(builtin_options()) : nullptr; + } + const tflite::LessOptions *builtin_options_as_LessOptions() const { + return builtin_options_type() == tflite::BuiltinOptions_LessOptions ? static_cast(builtin_options()) : nullptr; + } + const tflite::NegOptions *builtin_options_as_NegOptions() const { + return builtin_options_type() == tflite::BuiltinOptions_NegOptions ? static_cast(builtin_options()) : nullptr; + } + const tflite::PadV2Options *builtin_options_as_PadV2Options() const { + return builtin_options_type() == tflite::BuiltinOptions_PadV2Options ? static_cast(builtin_options()) : nullptr; + } + const tflite::GreaterOptions *builtin_options_as_GreaterOptions() const { + return builtin_options_type() == tflite::BuiltinOptions_GreaterOptions ? static_cast(builtin_options()) : nullptr; + } + const tflite::GreaterEqualOptions *builtin_options_as_GreaterEqualOptions() const { + return builtin_options_type() == tflite::BuiltinOptions_GreaterEqualOptions ? static_cast(builtin_options()) : nullptr; + } + const tflite::LessEqualOptions *builtin_options_as_LessEqualOptions() const { + return builtin_options_type() == tflite::BuiltinOptions_LessEqualOptions ? static_cast(builtin_options()) : nullptr; + } + const tflite::SelectOptions *builtin_options_as_SelectOptions() const { + return builtin_options_type() == tflite::BuiltinOptions_SelectOptions ? static_cast(builtin_options()) : nullptr; + } + const tflite::SliceOptions *builtin_options_as_SliceOptions() const { + return builtin_options_type() == tflite::BuiltinOptions_SliceOptions ? static_cast(builtin_options()) : nullptr; + } + const tflite::TransposeConvOptions *builtin_options_as_TransposeConvOptions() const { + return builtin_options_type() == tflite::BuiltinOptions_TransposeConvOptions ? static_cast(builtin_options()) : nullptr; + } + const tflite::SparseToDenseOptions *builtin_options_as_SparseToDenseOptions() const { + return builtin_options_type() == tflite::BuiltinOptions_SparseToDenseOptions ? static_cast(builtin_options()) : nullptr; + } + const tflite::TileOptions *builtin_options_as_TileOptions() const { + return builtin_options_type() == tflite::BuiltinOptions_TileOptions ? static_cast(builtin_options()) : nullptr; + } + const tflite::ExpandDimsOptions *builtin_options_as_ExpandDimsOptions() const { + return builtin_options_type() == tflite::BuiltinOptions_ExpandDimsOptions ? static_cast(builtin_options()) : nullptr; + } + const tflite::EqualOptions *builtin_options_as_EqualOptions() const { + return builtin_options_type() == tflite::BuiltinOptions_EqualOptions ? static_cast(builtin_options()) : nullptr; + } + const tflite::NotEqualOptions *builtin_options_as_NotEqualOptions() const { + return builtin_options_type() == tflite::BuiltinOptions_NotEqualOptions ? static_cast(builtin_options()) : nullptr; + } + const tflite::ShapeOptions *builtin_options_as_ShapeOptions() const { + return builtin_options_type() == tflite::BuiltinOptions_ShapeOptions ? static_cast(builtin_options()) : nullptr; + } + const tflite::PowOptions *builtin_options_as_PowOptions() const { + return builtin_options_type() == tflite::BuiltinOptions_PowOptions ? static_cast(builtin_options()) : nullptr; + } + const tflite::ArgMinOptions *builtin_options_as_ArgMinOptions() const { + return builtin_options_type() == tflite::BuiltinOptions_ArgMinOptions ? static_cast(builtin_options()) : nullptr; + } + const tflite::FakeQuantOptions *builtin_options_as_FakeQuantOptions() const { + return builtin_options_type() == tflite::BuiltinOptions_FakeQuantOptions ? static_cast(builtin_options()) : nullptr; + } + const tflite::PackOptions *builtin_options_as_PackOptions() const { + return builtin_options_type() == tflite::BuiltinOptions_PackOptions ? static_cast(builtin_options()) : nullptr; + } + const tflite::LogicalOrOptions *builtin_options_as_LogicalOrOptions() const { + return builtin_options_type() == tflite::BuiltinOptions_LogicalOrOptions ? static_cast(builtin_options()) : nullptr; + } + const tflite::OneHotOptions *builtin_options_as_OneHotOptions() const { + return builtin_options_type() == tflite::BuiltinOptions_OneHotOptions ? static_cast(builtin_options()) : nullptr; + } + const tflite::LogicalAndOptions *builtin_options_as_LogicalAndOptions() const { + return builtin_options_type() == tflite::BuiltinOptions_LogicalAndOptions ? static_cast(builtin_options()) : nullptr; + } + const tflite::LogicalNotOptions *builtin_options_as_LogicalNotOptions() const { + return builtin_options_type() == tflite::BuiltinOptions_LogicalNotOptions ? static_cast(builtin_options()) : nullptr; + } + const tflite::UnpackOptions *builtin_options_as_UnpackOptions() const { + return builtin_options_type() == tflite::BuiltinOptions_UnpackOptions ? static_cast(builtin_options()) : nullptr; + } + const tflite::FloorDivOptions *builtin_options_as_FloorDivOptions() const { + return builtin_options_type() == tflite::BuiltinOptions_FloorDivOptions ? static_cast(builtin_options()) : nullptr; + } + const tflite::SquareOptions *builtin_options_as_SquareOptions() const { + return builtin_options_type() == tflite::BuiltinOptions_SquareOptions ? static_cast(builtin_options()) : nullptr; + } + const tflite::ZerosLikeOptions *builtin_options_as_ZerosLikeOptions() const { + return builtin_options_type() == tflite::BuiltinOptions_ZerosLikeOptions ? static_cast(builtin_options()) : nullptr; + } + const tflite::FillOptions *builtin_options_as_FillOptions() const { + return builtin_options_type() == tflite::BuiltinOptions_FillOptions ? static_cast(builtin_options()) : nullptr; + } + const tflite::BidirectionalSequenceLSTMOptions *builtin_options_as_BidirectionalSequenceLSTMOptions() const { + return builtin_options_type() == tflite::BuiltinOptions_BidirectionalSequenceLSTMOptions ? static_cast(builtin_options()) : nullptr; + } + const tflite::BidirectionalSequenceRNNOptions *builtin_options_as_BidirectionalSequenceRNNOptions() const { + return builtin_options_type() == tflite::BuiltinOptions_BidirectionalSequenceRNNOptions ? static_cast(builtin_options()) : nullptr; + } + const tflite::UnidirectionalSequenceLSTMOptions *builtin_options_as_UnidirectionalSequenceLSTMOptions() const { + return builtin_options_type() == tflite::BuiltinOptions_UnidirectionalSequenceLSTMOptions ? static_cast(builtin_options()) : nullptr; + } + const tflite::FloorModOptions *builtin_options_as_FloorModOptions() const { + return builtin_options_type() == tflite::BuiltinOptions_FloorModOptions ? static_cast(builtin_options()) : nullptr; + } + const tflite::RangeOptions *builtin_options_as_RangeOptions() const { + return builtin_options_type() == tflite::BuiltinOptions_RangeOptions ? static_cast(builtin_options()) : nullptr; + } + const tflite::ResizeNearestNeighborOptions *builtin_options_as_ResizeNearestNeighborOptions() const { + return builtin_options_type() == tflite::BuiltinOptions_ResizeNearestNeighborOptions ? static_cast(builtin_options()) : nullptr; + } + const tflite::LeakyReluOptions *builtin_options_as_LeakyReluOptions() const { + return builtin_options_type() == tflite::BuiltinOptions_LeakyReluOptions ? static_cast(builtin_options()) : nullptr; + } + const tflite::SquaredDifferenceOptions *builtin_options_as_SquaredDifferenceOptions() const { + return builtin_options_type() == tflite::BuiltinOptions_SquaredDifferenceOptions ? static_cast(builtin_options()) : nullptr; + } + const tflite::MirrorPadOptions *builtin_options_as_MirrorPadOptions() const { + return builtin_options_type() == tflite::BuiltinOptions_MirrorPadOptions ? static_cast(builtin_options()) : nullptr; + } + const tflite::AbsOptions *builtin_options_as_AbsOptions() const { + return builtin_options_type() == tflite::BuiltinOptions_AbsOptions ? static_cast(builtin_options()) : nullptr; + } + const tflite::SplitVOptions *builtin_options_as_SplitVOptions() const { + return builtin_options_type() == tflite::BuiltinOptions_SplitVOptions ? static_cast(builtin_options()) : nullptr; + } + const tflite::UniqueOptions *builtin_options_as_UniqueOptions() const { + return builtin_options_type() == tflite::BuiltinOptions_UniqueOptions ? static_cast(builtin_options()) : nullptr; + } + const tflite::ReverseV2Options *builtin_options_as_ReverseV2Options() const { + return builtin_options_type() == tflite::BuiltinOptions_ReverseV2Options ? static_cast(builtin_options()) : nullptr; + } + const tflite::AddNOptions *builtin_options_as_AddNOptions() const { + return builtin_options_type() == tflite::BuiltinOptions_AddNOptions ? static_cast(builtin_options()) : nullptr; + } + const tflite::GatherNdOptions *builtin_options_as_GatherNdOptions() const { + return builtin_options_type() == tflite::BuiltinOptions_GatherNdOptions ? static_cast(builtin_options()) : nullptr; + } + const tflite::CosOptions *builtin_options_as_CosOptions() const { + return builtin_options_type() == tflite::BuiltinOptions_CosOptions ? static_cast(builtin_options()) : nullptr; + } + const tflite::WhereOptions *builtin_options_as_WhereOptions() const { + return builtin_options_type() == tflite::BuiltinOptions_WhereOptions ? static_cast(builtin_options()) : nullptr; + } + const tflite::RankOptions *builtin_options_as_RankOptions() const { + return builtin_options_type() == tflite::BuiltinOptions_RankOptions ? static_cast(builtin_options()) : nullptr; + } + const tflite::ReverseSequenceOptions *builtin_options_as_ReverseSequenceOptions() const { + return builtin_options_type() == tflite::BuiltinOptions_ReverseSequenceOptions ? static_cast(builtin_options()) : nullptr; + } + const tflite::MatrixDiagOptions *builtin_options_as_MatrixDiagOptions() const { + return builtin_options_type() == tflite::BuiltinOptions_MatrixDiagOptions ? static_cast(builtin_options()) : nullptr; + } + const tflite::QuantizeOptions *builtin_options_as_QuantizeOptions() const { + return builtin_options_type() == tflite::BuiltinOptions_QuantizeOptions ? static_cast(builtin_options()) : nullptr; + } + const tflite::MatrixSetDiagOptions *builtin_options_as_MatrixSetDiagOptions() const { + return builtin_options_type() == tflite::BuiltinOptions_MatrixSetDiagOptions ? static_cast(builtin_options()) : nullptr; + } + const tflite::HardSwishOptions *builtin_options_as_HardSwishOptions() const { + return builtin_options_type() == tflite::BuiltinOptions_HardSwishOptions ? static_cast(builtin_options()) : nullptr; + } + const tflite::IfOptions *builtin_options_as_IfOptions() const { + return builtin_options_type() == tflite::BuiltinOptions_IfOptions ? static_cast(builtin_options()) : nullptr; + } + const tflite::WhileOptions *builtin_options_as_WhileOptions() const { + return builtin_options_type() == tflite::BuiltinOptions_WhileOptions ? static_cast(builtin_options()) : nullptr; + } + const tflite::DepthToSpaceOptions *builtin_options_as_DepthToSpaceOptions() const { + return builtin_options_type() == tflite::BuiltinOptions_DepthToSpaceOptions ? static_cast(builtin_options()) : nullptr; + } + const tflite::NonMaxSuppressionV4Options *builtin_options_as_NonMaxSuppressionV4Options() const { + return builtin_options_type() == tflite::BuiltinOptions_NonMaxSuppressionV4Options ? static_cast(builtin_options()) : nullptr; + } + const tflite::NonMaxSuppressionV5Options *builtin_options_as_NonMaxSuppressionV5Options() const { + return builtin_options_type() == tflite::BuiltinOptions_NonMaxSuppressionV5Options ? static_cast(builtin_options()) : nullptr; + } + const tflite::ScatterNdOptions *builtin_options_as_ScatterNdOptions() const { + return builtin_options_type() == tflite::BuiltinOptions_ScatterNdOptions ? static_cast(builtin_options()) : nullptr; + } + const tflite::SelectV2Options *builtin_options_as_SelectV2Options() const { + return builtin_options_type() == tflite::BuiltinOptions_SelectV2Options ? static_cast(builtin_options()) : nullptr; + } + const tflite::DensifyOptions *builtin_options_as_DensifyOptions() const { + return builtin_options_type() == tflite::BuiltinOptions_DensifyOptions ? static_cast(builtin_options()) : nullptr; + } + const tflite::SegmentSumOptions *builtin_options_as_SegmentSumOptions() const { + return builtin_options_type() == tflite::BuiltinOptions_SegmentSumOptions ? static_cast(builtin_options()) : nullptr; + } + const tflite::BatchMatMulOptions *builtin_options_as_BatchMatMulOptions() const { + return builtin_options_type() == tflite::BuiltinOptions_BatchMatMulOptions ? static_cast(builtin_options()) : nullptr; + } + const tflite::CumsumOptions *builtin_options_as_CumsumOptions() const { + return builtin_options_type() == tflite::BuiltinOptions_CumsumOptions ? static_cast(builtin_options()) : nullptr; + } + const tflite::CallOnceOptions *builtin_options_as_CallOnceOptions() const { + return builtin_options_type() == tflite::BuiltinOptions_CallOnceOptions ? static_cast(builtin_options()) : nullptr; + } + const tflite::BroadcastToOptions *builtin_options_as_BroadcastToOptions() const { + return builtin_options_type() == tflite::BuiltinOptions_BroadcastToOptions ? static_cast(builtin_options()) : nullptr; + } + const tflite::Rfft2dOptions *builtin_options_as_Rfft2dOptions() const { + return builtin_options_type() == tflite::BuiltinOptions_Rfft2dOptions ? static_cast(builtin_options()) : nullptr; + } + const tflite::Conv3DOptions *builtin_options_as_Conv3DOptions() const { + return builtin_options_type() == tflite::BuiltinOptions_Conv3DOptions ? static_cast(builtin_options()) : nullptr; + } + const tflite::HashtableOptions *builtin_options_as_HashtableOptions() const { + return builtin_options_type() == tflite::BuiltinOptions_HashtableOptions ? static_cast(builtin_options()) : nullptr; + } + const tflite::HashtableFindOptions *builtin_options_as_HashtableFindOptions() const { + return builtin_options_type() == tflite::BuiltinOptions_HashtableFindOptions ? static_cast(builtin_options()) : nullptr; + } + const tflite::HashtableImportOptions *builtin_options_as_HashtableImportOptions() const { + return builtin_options_type() == tflite::BuiltinOptions_HashtableImportOptions ? static_cast(builtin_options()) : nullptr; + } + const tflite::HashtableSizeOptions *builtin_options_as_HashtableSizeOptions() const { + return builtin_options_type() == tflite::BuiltinOptions_HashtableSizeOptions ? static_cast(builtin_options()) : nullptr; + } + const tflite::VarHandleOptions *builtin_options_as_VarHandleOptions() const { + return builtin_options_type() == tflite::BuiltinOptions_VarHandleOptions ? static_cast(builtin_options()) : nullptr; + } + const tflite::ReadVariableOptions *builtin_options_as_ReadVariableOptions() const { + return builtin_options_type() == tflite::BuiltinOptions_ReadVariableOptions ? static_cast(builtin_options()) : nullptr; + } + const tflite::AssignVariableOptions *builtin_options_as_AssignVariableOptions() const { + return builtin_options_type() == tflite::BuiltinOptions_AssignVariableOptions ? static_cast(builtin_options()) : nullptr; + } + const tflite::RandomOptions *builtin_options_as_RandomOptions() const { + return builtin_options_type() == tflite::BuiltinOptions_RandomOptions ? static_cast(builtin_options()) : nullptr; + } + const tflite::BucketizeOptions *builtin_options_as_BucketizeOptions() const { + return builtin_options_type() == tflite::BuiltinOptions_BucketizeOptions ? static_cast(builtin_options()) : nullptr; + } + const tflite::GeluOptions *builtin_options_as_GeluOptions() const { + return builtin_options_type() == tflite::BuiltinOptions_GeluOptions ? static_cast(builtin_options()) : nullptr; + } + const tflite::DynamicUpdateSliceOptions *builtin_options_as_DynamicUpdateSliceOptions() const { + return builtin_options_type() == tflite::BuiltinOptions_DynamicUpdateSliceOptions ? static_cast(builtin_options()) : nullptr; + } + const tflite::UnsortedSegmentProdOptions *builtin_options_as_UnsortedSegmentProdOptions() const { + return builtin_options_type() == tflite::BuiltinOptions_UnsortedSegmentProdOptions ? static_cast(builtin_options()) : nullptr; + } + const tflite::UnsortedSegmentMaxOptions *builtin_options_as_UnsortedSegmentMaxOptions() const { + return builtin_options_type() == tflite::BuiltinOptions_UnsortedSegmentMaxOptions ? static_cast(builtin_options()) : nullptr; + } + const tflite::UnsortedSegmentMinOptions *builtin_options_as_UnsortedSegmentMinOptions() const { + return builtin_options_type() == tflite::BuiltinOptions_UnsortedSegmentMinOptions ? static_cast(builtin_options()) : nullptr; + } + const tflite::UnsortedSegmentSumOptions *builtin_options_as_UnsortedSegmentSumOptions() const { + return builtin_options_type() == tflite::BuiltinOptions_UnsortedSegmentSumOptions ? static_cast(builtin_options()) : nullptr; + } + const tflite::ATan2Options *builtin_options_as_ATan2Options() const { + return builtin_options_type() == tflite::BuiltinOptions_ATan2Options ? static_cast(builtin_options()) : nullptr; + } + const tflite::SignOptions *builtin_options_as_SignOptions() const { + return builtin_options_type() == tflite::BuiltinOptions_SignOptions ? static_cast(builtin_options()) : nullptr; + } + const tflite::BitcastOptions *builtin_options_as_BitcastOptions() const { + return builtin_options_type() == tflite::BuiltinOptions_BitcastOptions ? static_cast(builtin_options()) : nullptr; + } + const tflite::BitwiseXorOptions *builtin_options_as_BitwiseXorOptions() const { + return builtin_options_type() == tflite::BuiltinOptions_BitwiseXorOptions ? static_cast(builtin_options()) : nullptr; + } + const tflite::RightShiftOptions *builtin_options_as_RightShiftOptions() const { + return builtin_options_type() == tflite::BuiltinOptions_RightShiftOptions ? static_cast(builtin_options()) : nullptr; + } + const ::flatbuffers::Vector *custom_options() const { + return GetPointer *>(VT_CUSTOM_OPTIONS); + } + tflite::CustomOptionsFormat custom_options_format() const { + return static_cast(GetField(VT_CUSTOM_OPTIONS_FORMAT, 0)); + } + const ::flatbuffers::Vector *mutating_variable_inputs() const { + return GetPointer *>(VT_MUTATING_VARIABLE_INPUTS); + } + const ::flatbuffers::Vector *intermediates() const { + return GetPointer *>(VT_INTERMEDIATES); + } + uint64_t large_custom_options_offset() const { + return GetField(VT_LARGE_CUSTOM_OPTIONS_OFFSET, 0); + } + uint64_t large_custom_options_size() const { + return GetField(VT_LARGE_CUSTOM_OPTIONS_SIZE, 0); + } + tflite::BuiltinOptions2 builtin_options_2_type() const { + return static_cast(GetField(VT_BUILTIN_OPTIONS_2_TYPE, 0)); + } + const void *builtin_options_2() const { + return GetPointer(VT_BUILTIN_OPTIONS_2); + } + template const T *builtin_options_2_as() const; + const tflite::StablehloConcatenateOptions *builtin_options_2_as_StablehloConcatenateOptions() const { + return builtin_options_2_type() == tflite::BuiltinOptions2_StablehloConcatenateOptions ? static_cast(builtin_options_2()) : nullptr; + } + const tflite::StablehloBroadcastInDimOptions *builtin_options_2_as_StablehloBroadcastInDimOptions() const { + return builtin_options_2_type() == tflite::BuiltinOptions2_StablehloBroadcastInDimOptions ? static_cast(builtin_options_2()) : nullptr; + } + const tflite::StablehloSliceOptions *builtin_options_2_as_StablehloSliceOptions() const { + return builtin_options_2_type() == tflite::BuiltinOptions2_StablehloSliceOptions ? static_cast(builtin_options_2()) : nullptr; + } + const tflite::StablehloConvolutionOptions *builtin_options_2_as_StablehloConvolutionOptions() const { + return builtin_options_2_type() == tflite::BuiltinOptions2_StablehloConvolutionOptions ? static_cast(builtin_options_2()) : nullptr; + } + const tflite::StablehloCustomCallOptions *builtin_options_2_as_StablehloCustomCallOptions() const { + return builtin_options_2_type() == tflite::BuiltinOptions2_StablehloCustomCallOptions ? static_cast(builtin_options_2()) : nullptr; + } + const tflite::StablehloReduceOptions *builtin_options_2_as_StablehloReduceOptions() const { + return builtin_options_2_type() == tflite::BuiltinOptions2_StablehloReduceOptions ? static_cast(builtin_options_2()) : nullptr; + } + const tflite::StablehloScatterOptions *builtin_options_2_as_StablehloScatterOptions() const { + return builtin_options_2_type() == tflite::BuiltinOptions2_StablehloScatterOptions ? static_cast(builtin_options_2()) : nullptr; + } + const tflite::StablehloCompareOptions *builtin_options_2_as_StablehloCompareOptions() const { + return builtin_options_2_type() == tflite::BuiltinOptions2_StablehloCompareOptions ? static_cast(builtin_options_2()) : nullptr; + } + const tflite::StablehloDynamicSliceOptions *builtin_options_2_as_StablehloDynamicSliceOptions() const { + return builtin_options_2_type() == tflite::BuiltinOptions2_StablehloDynamicSliceOptions ? static_cast(builtin_options_2()) : nullptr; + } + const tflite::StablehloPadOptions *builtin_options_2_as_StablehloPadOptions() const { + return builtin_options_2_type() == tflite::BuiltinOptions2_StablehloPadOptions ? static_cast(builtin_options_2()) : nullptr; + } + const tflite::StablehloIotaOptions *builtin_options_2_as_StablehloIotaOptions() const { + return builtin_options_2_type() == tflite::BuiltinOptions2_StablehloIotaOptions ? static_cast(builtin_options_2()) : nullptr; + } + const tflite::StablehloDotGeneralOptions *builtin_options_2_as_StablehloDotGeneralOptions() const { + return builtin_options_2_type() == tflite::BuiltinOptions2_StablehloDotGeneralOptions ? static_cast(builtin_options_2()) : nullptr; + } + const tflite::StablehloReduceWindowOptions *builtin_options_2_as_StablehloReduceWindowOptions() const { + return builtin_options_2_type() == tflite::BuiltinOptions2_StablehloReduceWindowOptions ? static_cast(builtin_options_2()) : nullptr; + } + const tflite::StablehloSortOptions *builtin_options_2_as_StablehloSortOptions() const { + return builtin_options_2_type() == tflite::BuiltinOptions2_StablehloSortOptions ? static_cast(builtin_options_2()) : nullptr; + } + const tflite::StablehloWhileOptions *builtin_options_2_as_StablehloWhileOptions() const { + return builtin_options_2_type() == tflite::BuiltinOptions2_StablehloWhileOptions ? static_cast(builtin_options_2()) : nullptr; + } + const tflite::StablehloGatherOptions *builtin_options_2_as_StablehloGatherOptions() const { + return builtin_options_2_type() == tflite::BuiltinOptions2_StablehloGatherOptions ? static_cast(builtin_options_2()) : nullptr; + } + const tflite::StablehloTransposeOptions *builtin_options_2_as_StablehloTransposeOptions() const { + return builtin_options_2_type() == tflite::BuiltinOptions2_StablehloTransposeOptions ? static_cast(builtin_options_2()) : nullptr; + } + const tflite::DilateOptions *builtin_options_2_as_DilateOptions() const { + return builtin_options_2_type() == tflite::BuiltinOptions2_DilateOptions ? static_cast(builtin_options_2()) : nullptr; + } + const tflite::StablehloRngBitGeneratorOptions *builtin_options_2_as_StablehloRngBitGeneratorOptions() const { + return builtin_options_2_type() == tflite::BuiltinOptions2_StablehloRngBitGeneratorOptions ? static_cast(builtin_options_2()) : nullptr; + } + const tflite::ReduceWindowOptions *builtin_options_2_as_ReduceWindowOptions() const { + return builtin_options_2_type() == tflite::BuiltinOptions2_ReduceWindowOptions ? static_cast(builtin_options_2()) : nullptr; + } + const tflite::StableHLOCompositeOptions *builtin_options_2_as_StableHLOCompositeOptions() const { + return builtin_options_2_type() == tflite::BuiltinOptions2_StableHLOCompositeOptions ? static_cast(builtin_options_2()) : nullptr; + } + bool Verify(::flatbuffers::Verifier &verifier) const { + return VerifyTableStart(verifier) && + VerifyField(verifier, VT_OPCODE_INDEX, 4) && + VerifyOffset(verifier, VT_INPUTS) && + verifier.VerifyVector(inputs()) && + VerifyOffset(verifier, VT_OUTPUTS) && + verifier.VerifyVector(outputs()) && + VerifyField(verifier, VT_BUILTIN_OPTIONS_TYPE, 1) && + VerifyOffset(verifier, VT_BUILTIN_OPTIONS) && + VerifyBuiltinOptions(verifier, builtin_options(), builtin_options_type()) && + VerifyOffset(verifier, VT_CUSTOM_OPTIONS) && + verifier.VerifyVector(custom_options()) && + VerifyField(verifier, VT_CUSTOM_OPTIONS_FORMAT, 1) && + VerifyOffset(verifier, VT_MUTATING_VARIABLE_INPUTS) && + verifier.VerifyVector(mutating_variable_inputs()) && + VerifyOffset(verifier, VT_INTERMEDIATES) && + verifier.VerifyVector(intermediates()) && + VerifyField(verifier, VT_LARGE_CUSTOM_OPTIONS_OFFSET, 8) && + VerifyField(verifier, VT_LARGE_CUSTOM_OPTIONS_SIZE, 8) && + VerifyField(verifier, VT_BUILTIN_OPTIONS_2_TYPE, 1) && + VerifyOffset(verifier, VT_BUILTIN_OPTIONS_2) && + VerifyBuiltinOptions2(verifier, builtin_options_2(), builtin_options_2_type()) && + verifier.EndTable(); + } + OperatorT *UnPack(const ::flatbuffers::resolver_function_t *_resolver = nullptr) const; + void UnPackTo(OperatorT *_o, const ::flatbuffers::resolver_function_t *_resolver = nullptr) const; + static ::flatbuffers::Offset Pack(::flatbuffers::FlatBufferBuilder &_fbb, const OperatorT* _o, const ::flatbuffers::rehasher_function_t *_rehasher = nullptr); +}; + +template<> inline const tflite::Conv2DOptions *Operator::builtin_options_as() const { + return builtin_options_as_Conv2DOptions(); +} + +template<> inline const tflite::DepthwiseConv2DOptions *Operator::builtin_options_as() const { + return builtin_options_as_DepthwiseConv2DOptions(); +} + +template<> inline const tflite::ConcatEmbeddingsOptions *Operator::builtin_options_as() const { + return builtin_options_as_ConcatEmbeddingsOptions(); +} + +template<> inline const tflite::LSHProjectionOptions *Operator::builtin_options_as() const { + return builtin_options_as_LSHProjectionOptions(); +} + +template<> inline const tflite::Pool2DOptions *Operator::builtin_options_as() const { + return builtin_options_as_Pool2DOptions(); +} + +template<> inline const tflite::SVDFOptions *Operator::builtin_options_as() const { + return builtin_options_as_SVDFOptions(); +} + +template<> inline const tflite::RNNOptions *Operator::builtin_options_as() const { + return builtin_options_as_RNNOptions(); +} + +template<> inline const tflite::FullyConnectedOptions *Operator::builtin_options_as() const { + return builtin_options_as_FullyConnectedOptions(); +} + +template<> inline const tflite::SoftmaxOptions *Operator::builtin_options_as() const { + return builtin_options_as_SoftmaxOptions(); +} + +template<> inline const tflite::ConcatenationOptions *Operator::builtin_options_as() const { + return builtin_options_as_ConcatenationOptions(); +} + +template<> inline const tflite::AddOptions *Operator::builtin_options_as() const { return builtin_options_as_AddOptions(); } -template<> inline const tflite::L2NormOptions *Operator::builtin_options_as() const { - return builtin_options_as_L2NormOptions(); +template<> inline const tflite::L2NormOptions *Operator::builtin_options_as() const { + return builtin_options_as_L2NormOptions(); +} + +template<> inline const tflite::LocalResponseNormalizationOptions *Operator::builtin_options_as() const { + return builtin_options_as_LocalResponseNormalizationOptions(); +} + +template<> inline const tflite::LSTMOptions *Operator::builtin_options_as() const { + return builtin_options_as_LSTMOptions(); +} + +template<> inline const tflite::ResizeBilinearOptions *Operator::builtin_options_as() const { + return builtin_options_as_ResizeBilinearOptions(); +} + +template<> inline const tflite::CallOptions *Operator::builtin_options_as() const { + return builtin_options_as_CallOptions(); +} + +template<> inline const tflite::ReshapeOptions *Operator::builtin_options_as() const { + return builtin_options_as_ReshapeOptions(); +} + +template<> inline const tflite::SkipGramOptions *Operator::builtin_options_as() const { + return builtin_options_as_SkipGramOptions(); +} + +template<> inline const tflite::SpaceToDepthOptions *Operator::builtin_options_as() const { + return builtin_options_as_SpaceToDepthOptions(); +} + +template<> inline const tflite::EmbeddingLookupSparseOptions *Operator::builtin_options_as() const { + return builtin_options_as_EmbeddingLookupSparseOptions(); +} + +template<> inline const tflite::MulOptions *Operator::builtin_options_as() const { + return builtin_options_as_MulOptions(); +} + +template<> inline const tflite::PadOptions *Operator::builtin_options_as() const { + return builtin_options_as_PadOptions(); +} + +template<> inline const tflite::GatherOptions *Operator::builtin_options_as() const { + return builtin_options_as_GatherOptions(); +} + +template<> inline const tflite::BatchToSpaceNDOptions *Operator::builtin_options_as() const { + return builtin_options_as_BatchToSpaceNDOptions(); +} + +template<> inline const tflite::SpaceToBatchNDOptions *Operator::builtin_options_as() const { + return builtin_options_as_SpaceToBatchNDOptions(); +} + +template<> inline const tflite::TransposeOptions *Operator::builtin_options_as() const { + return builtin_options_as_TransposeOptions(); +} + +template<> inline const tflite::ReducerOptions *Operator::builtin_options_as() const { + return builtin_options_as_ReducerOptions(); +} + +template<> inline const tflite::SubOptions *Operator::builtin_options_as() const { + return builtin_options_as_SubOptions(); +} + +template<> inline const tflite::DivOptions *Operator::builtin_options_as() const { + return builtin_options_as_DivOptions(); +} + +template<> inline const tflite::SqueezeOptions *Operator::builtin_options_as() const { + return builtin_options_as_SqueezeOptions(); +} + +template<> inline const tflite::SequenceRNNOptions *Operator::builtin_options_as() const { + return builtin_options_as_SequenceRNNOptions(); +} + +template<> inline const tflite::StridedSliceOptions *Operator::builtin_options_as() const { + return builtin_options_as_StridedSliceOptions(); +} + +template<> inline const tflite::ExpOptions *Operator::builtin_options_as() const { + return builtin_options_as_ExpOptions(); +} + +template<> inline const tflite::TopKV2Options *Operator::builtin_options_as() const { + return builtin_options_as_TopKV2Options(); +} + +template<> inline const tflite::SplitOptions *Operator::builtin_options_as() const { + return builtin_options_as_SplitOptions(); +} + +template<> inline const tflite::LogSoftmaxOptions *Operator::builtin_options_as() const { + return builtin_options_as_LogSoftmaxOptions(); +} + +template<> inline const tflite::CastOptions *Operator::builtin_options_as() const { + return builtin_options_as_CastOptions(); +} + +template<> inline const tflite::DequantizeOptions *Operator::builtin_options_as() const { + return builtin_options_as_DequantizeOptions(); +} + +template<> inline const tflite::MaximumMinimumOptions *Operator::builtin_options_as() const { + return builtin_options_as_MaximumMinimumOptions(); +} + +template<> inline const tflite::ArgMaxOptions *Operator::builtin_options_as() const { + return builtin_options_as_ArgMaxOptions(); +} + +template<> inline const tflite::LessOptions *Operator::builtin_options_as() const { + return builtin_options_as_LessOptions(); +} + +template<> inline const tflite::NegOptions *Operator::builtin_options_as() const { + return builtin_options_as_NegOptions(); +} + +template<> inline const tflite::PadV2Options *Operator::builtin_options_as() const { + return builtin_options_as_PadV2Options(); } -template<> inline const tflite::LocalResponseNormalizationOptions *Operator::builtin_options_as() const { - return builtin_options_as_LocalResponseNormalizationOptions(); +template<> inline const tflite::GreaterOptions *Operator::builtin_options_as() const { + return builtin_options_as_GreaterOptions(); } -template<> inline const tflite::LSTMOptions *Operator::builtin_options_as() const { - return builtin_options_as_LSTMOptions(); +template<> inline const tflite::GreaterEqualOptions *Operator::builtin_options_as() const { + return builtin_options_as_GreaterEqualOptions(); } -template<> inline const tflite::ResizeBilinearOptions *Operator::builtin_options_as() const { - return builtin_options_as_ResizeBilinearOptions(); +template<> inline const tflite::LessEqualOptions *Operator::builtin_options_as() const { + return builtin_options_as_LessEqualOptions(); } -template<> inline const tflite::CallOptions *Operator::builtin_options_as() const { - return builtin_options_as_CallOptions(); +template<> inline const tflite::SelectOptions *Operator::builtin_options_as() const { + return builtin_options_as_SelectOptions(); } -template<> inline const tflite::ReshapeOptions *Operator::builtin_options_as() const { - return builtin_options_as_ReshapeOptions(); +template<> inline const tflite::SliceOptions *Operator::builtin_options_as() const { + return builtin_options_as_SliceOptions(); } -template<> inline const tflite::SkipGramOptions *Operator::builtin_options_as() const { - return builtin_options_as_SkipGramOptions(); +template<> inline const tflite::TransposeConvOptions *Operator::builtin_options_as() const { + return builtin_options_as_TransposeConvOptions(); } -template<> inline const tflite::SpaceToDepthOptions *Operator::builtin_options_as() const { - return builtin_options_as_SpaceToDepthOptions(); +template<> inline const tflite::SparseToDenseOptions *Operator::builtin_options_as() const { + return builtin_options_as_SparseToDenseOptions(); } -template<> inline const tflite::EmbeddingLookupSparseOptions *Operator::builtin_options_as() const { - return builtin_options_as_EmbeddingLookupSparseOptions(); +template<> inline const tflite::TileOptions *Operator::builtin_options_as() const { + return builtin_options_as_TileOptions(); } -template<> inline const tflite::MulOptions *Operator::builtin_options_as() const { - return builtin_options_as_MulOptions(); +template<> inline const tflite::ExpandDimsOptions *Operator::builtin_options_as() const { + return builtin_options_as_ExpandDimsOptions(); } -template<> inline const tflite::PadOptions *Operator::builtin_options_as() const { - return builtin_options_as_PadOptions(); +template<> inline const tflite::EqualOptions *Operator::builtin_options_as() const { + return builtin_options_as_EqualOptions(); } -template<> inline const tflite::GatherOptions *Operator::builtin_options_as() const { - return builtin_options_as_GatherOptions(); +template<> inline const tflite::NotEqualOptions *Operator::builtin_options_as() const { + return builtin_options_as_NotEqualOptions(); } -template<> inline const tflite::BatchToSpaceNDOptions *Operator::builtin_options_as() const { - return builtin_options_as_BatchToSpaceNDOptions(); +template<> inline const tflite::ShapeOptions *Operator::builtin_options_as() const { + return builtin_options_as_ShapeOptions(); } -template<> inline const tflite::SpaceToBatchNDOptions *Operator::builtin_options_as() const { - return builtin_options_as_SpaceToBatchNDOptions(); +template<> inline const tflite::PowOptions *Operator::builtin_options_as() const { + return builtin_options_as_PowOptions(); } -template<> inline const tflite::TransposeOptions *Operator::builtin_options_as() const { - return builtin_options_as_TransposeOptions(); +template<> inline const tflite::ArgMinOptions *Operator::builtin_options_as() const { + return builtin_options_as_ArgMinOptions(); } -template<> inline const tflite::ReducerOptions *Operator::builtin_options_as() const { - return builtin_options_as_ReducerOptions(); +template<> inline const tflite::FakeQuantOptions *Operator::builtin_options_as() const { + return builtin_options_as_FakeQuantOptions(); } -template<> inline const tflite::SubOptions *Operator::builtin_options_as() const { - return builtin_options_as_SubOptions(); +template<> inline const tflite::PackOptions *Operator::builtin_options_as() const { + return builtin_options_as_PackOptions(); } -template<> inline const tflite::DivOptions *Operator::builtin_options_as() const { - return builtin_options_as_DivOptions(); +template<> inline const tflite::LogicalOrOptions *Operator::builtin_options_as() const { + return builtin_options_as_LogicalOrOptions(); +} + +template<> inline const tflite::OneHotOptions *Operator::builtin_options_as() const { + return builtin_options_as_OneHotOptions(); +} + +template<> inline const tflite::LogicalAndOptions *Operator::builtin_options_as() const { + return builtin_options_as_LogicalAndOptions(); +} + +template<> inline const tflite::LogicalNotOptions *Operator::builtin_options_as() const { + return builtin_options_as_LogicalNotOptions(); +} + +template<> inline const tflite::UnpackOptions *Operator::builtin_options_as() const { + return builtin_options_as_UnpackOptions(); +} + +template<> inline const tflite::FloorDivOptions *Operator::builtin_options_as() const { + return builtin_options_as_FloorDivOptions(); +} + +template<> inline const tflite::SquareOptions *Operator::builtin_options_as() const { + return builtin_options_as_SquareOptions(); +} + +template<> inline const tflite::ZerosLikeOptions *Operator::builtin_options_as() const { + return builtin_options_as_ZerosLikeOptions(); +} + +template<> inline const tflite::FillOptions *Operator::builtin_options_as() const { + return builtin_options_as_FillOptions(); +} + +template<> inline const tflite::BidirectionalSequenceLSTMOptions *Operator::builtin_options_as() const { + return builtin_options_as_BidirectionalSequenceLSTMOptions(); +} + +template<> inline const tflite::BidirectionalSequenceRNNOptions *Operator::builtin_options_as() const { + return builtin_options_as_BidirectionalSequenceRNNOptions(); +} + +template<> inline const tflite::UnidirectionalSequenceLSTMOptions *Operator::builtin_options_as() const { + return builtin_options_as_UnidirectionalSequenceLSTMOptions(); +} + +template<> inline const tflite::FloorModOptions *Operator::builtin_options_as() const { + return builtin_options_as_FloorModOptions(); +} + +template<> inline const tflite::RangeOptions *Operator::builtin_options_as() const { + return builtin_options_as_RangeOptions(); +} + +template<> inline const tflite::ResizeNearestNeighborOptions *Operator::builtin_options_as() const { + return builtin_options_as_ResizeNearestNeighborOptions(); +} + +template<> inline const tflite::LeakyReluOptions *Operator::builtin_options_as() const { + return builtin_options_as_LeakyReluOptions(); +} + +template<> inline const tflite::SquaredDifferenceOptions *Operator::builtin_options_as() const { + return builtin_options_as_SquaredDifferenceOptions(); +} + +template<> inline const tflite::MirrorPadOptions *Operator::builtin_options_as() const { + return builtin_options_as_MirrorPadOptions(); +} + +template<> inline const tflite::AbsOptions *Operator::builtin_options_as() const { + return builtin_options_as_AbsOptions(); +} + +template<> inline const tflite::SplitVOptions *Operator::builtin_options_as() const { + return builtin_options_as_SplitVOptions(); +} + +template<> inline const tflite::UniqueOptions *Operator::builtin_options_as() const { + return builtin_options_as_UniqueOptions(); +} + +template<> inline const tflite::ReverseV2Options *Operator::builtin_options_as() const { + return builtin_options_as_ReverseV2Options(); +} + +template<> inline const tflite::AddNOptions *Operator::builtin_options_as() const { + return builtin_options_as_AddNOptions(); +} + +template<> inline const tflite::GatherNdOptions *Operator::builtin_options_as() const { + return builtin_options_as_GatherNdOptions(); +} + +template<> inline const tflite::CosOptions *Operator::builtin_options_as() const { + return builtin_options_as_CosOptions(); +} + +template<> inline const tflite::WhereOptions *Operator::builtin_options_as() const { + return builtin_options_as_WhereOptions(); +} + +template<> inline const tflite::RankOptions *Operator::builtin_options_as() const { + return builtin_options_as_RankOptions(); +} + +template<> inline const tflite::ReverseSequenceOptions *Operator::builtin_options_as() const { + return builtin_options_as_ReverseSequenceOptions(); +} + +template<> inline const tflite::MatrixDiagOptions *Operator::builtin_options_as() const { + return builtin_options_as_MatrixDiagOptions(); +} + +template<> inline const tflite::QuantizeOptions *Operator::builtin_options_as() const { + return builtin_options_as_QuantizeOptions(); +} + +template<> inline const tflite::MatrixSetDiagOptions *Operator::builtin_options_as() const { + return builtin_options_as_MatrixSetDiagOptions(); +} + +template<> inline const tflite::HardSwishOptions *Operator::builtin_options_as() const { + return builtin_options_as_HardSwishOptions(); +} + +template<> inline const tflite::IfOptions *Operator::builtin_options_as() const { + return builtin_options_as_IfOptions(); +} + +template<> inline const tflite::WhileOptions *Operator::builtin_options_as() const { + return builtin_options_as_WhileOptions(); +} + +template<> inline const tflite::DepthToSpaceOptions *Operator::builtin_options_as() const { + return builtin_options_as_DepthToSpaceOptions(); +} + +template<> inline const tflite::NonMaxSuppressionV4Options *Operator::builtin_options_as() const { + return builtin_options_as_NonMaxSuppressionV4Options(); +} + +template<> inline const tflite::NonMaxSuppressionV5Options *Operator::builtin_options_as() const { + return builtin_options_as_NonMaxSuppressionV5Options(); +} + +template<> inline const tflite::ScatterNdOptions *Operator::builtin_options_as() const { + return builtin_options_as_ScatterNdOptions(); +} + +template<> inline const tflite::SelectV2Options *Operator::builtin_options_as() const { + return builtin_options_as_SelectV2Options(); +} + +template<> inline const tflite::DensifyOptions *Operator::builtin_options_as() const { + return builtin_options_as_DensifyOptions(); +} + +template<> inline const tflite::SegmentSumOptions *Operator::builtin_options_as() const { + return builtin_options_as_SegmentSumOptions(); +} + +template<> inline const tflite::BatchMatMulOptions *Operator::builtin_options_as() const { + return builtin_options_as_BatchMatMulOptions(); +} + +template<> inline const tflite::CumsumOptions *Operator::builtin_options_as() const { + return builtin_options_as_CumsumOptions(); +} + +template<> inline const tflite::CallOnceOptions *Operator::builtin_options_as() const { + return builtin_options_as_CallOnceOptions(); +} + +template<> inline const tflite::BroadcastToOptions *Operator::builtin_options_as() const { + return builtin_options_as_BroadcastToOptions(); +} + +template<> inline const tflite::Rfft2dOptions *Operator::builtin_options_as() const { + return builtin_options_as_Rfft2dOptions(); +} + +template<> inline const tflite::Conv3DOptions *Operator::builtin_options_as() const { + return builtin_options_as_Conv3DOptions(); } -template<> inline const tflite::SqueezeOptions *Operator::builtin_options_as() const { - return builtin_options_as_SqueezeOptions(); +template<> inline const tflite::HashtableOptions *Operator::builtin_options_as() const { + return builtin_options_as_HashtableOptions(); } -template<> inline const tflite::SequenceRNNOptions *Operator::builtin_options_as() const { - return builtin_options_as_SequenceRNNOptions(); +template<> inline const tflite::HashtableFindOptions *Operator::builtin_options_as() const { + return builtin_options_as_HashtableFindOptions(); } -template<> inline const tflite::StridedSliceOptions *Operator::builtin_options_as() const { - return builtin_options_as_StridedSliceOptions(); +template<> inline const tflite::HashtableImportOptions *Operator::builtin_options_as() const { + return builtin_options_as_HashtableImportOptions(); } -template<> inline const tflite::ExpOptions *Operator::builtin_options_as() const { - return builtin_options_as_ExpOptions(); +template<> inline const tflite::HashtableSizeOptions *Operator::builtin_options_as() const { + return builtin_options_as_HashtableSizeOptions(); } -template<> inline const tflite::TopKV2Options *Operator::builtin_options_as() const { - return builtin_options_as_TopKV2Options(); +template<> inline const tflite::VarHandleOptions *Operator::builtin_options_as() const { + return builtin_options_as_VarHandleOptions(); } -template<> inline const tflite::SplitOptions *Operator::builtin_options_as() const { - return builtin_options_as_SplitOptions(); +template<> inline const tflite::ReadVariableOptions *Operator::builtin_options_as() const { + return builtin_options_as_ReadVariableOptions(); } -template<> inline const tflite::LogSoftmaxOptions *Operator::builtin_options_as() const { - return builtin_options_as_LogSoftmaxOptions(); +template<> inline const tflite::AssignVariableOptions *Operator::builtin_options_as() const { + return builtin_options_as_AssignVariableOptions(); } -template<> inline const tflite::CastOptions *Operator::builtin_options_as() const { - return builtin_options_as_CastOptions(); +template<> inline const tflite::RandomOptions *Operator::builtin_options_as() const { + return builtin_options_as_RandomOptions(); } -template<> inline const tflite::DequantizeOptions *Operator::builtin_options_as() const { - return builtin_options_as_DequantizeOptions(); +template<> inline const tflite::BucketizeOptions *Operator::builtin_options_as() const { + return builtin_options_as_BucketizeOptions(); } -template<> inline const tflite::MaximumMinimumOptions *Operator::builtin_options_as() const { - return builtin_options_as_MaximumMinimumOptions(); +template<> inline const tflite::GeluOptions *Operator::builtin_options_as() const { + return builtin_options_as_GeluOptions(); } -template<> inline const tflite::ArgMaxOptions *Operator::builtin_options_as() const { - return builtin_options_as_ArgMaxOptions(); +template<> inline const tflite::DynamicUpdateSliceOptions *Operator::builtin_options_as() const { + return builtin_options_as_DynamicUpdateSliceOptions(); } -template<> inline const tflite::LessOptions *Operator::builtin_options_as() const { - return builtin_options_as_LessOptions(); +template<> inline const tflite::UnsortedSegmentProdOptions *Operator::builtin_options_as() const { + return builtin_options_as_UnsortedSegmentProdOptions(); } -template<> inline const tflite::NegOptions *Operator::builtin_options_as() const { - return builtin_options_as_NegOptions(); +template<> inline const tflite::UnsortedSegmentMaxOptions *Operator::builtin_options_as() const { + return builtin_options_as_UnsortedSegmentMaxOptions(); } -template<> inline const tflite::PadV2Options *Operator::builtin_options_as() const { - return builtin_options_as_PadV2Options(); +template<> inline const tflite::UnsortedSegmentMinOptions *Operator::builtin_options_as() const { + return builtin_options_as_UnsortedSegmentMinOptions(); } -template<> inline const tflite::GreaterOptions *Operator::builtin_options_as() const { - return builtin_options_as_GreaterOptions(); +template<> inline const tflite::UnsortedSegmentSumOptions *Operator::builtin_options_as() const { + return builtin_options_as_UnsortedSegmentSumOptions(); } -template<> inline const tflite::GreaterEqualOptions *Operator::builtin_options_as() const { - return builtin_options_as_GreaterEqualOptions(); +template<> inline const tflite::ATan2Options *Operator::builtin_options_as() const { + return builtin_options_as_ATan2Options(); } -template<> inline const tflite::LessEqualOptions *Operator::builtin_options_as() const { - return builtin_options_as_LessEqualOptions(); +template<> inline const tflite::SignOptions *Operator::builtin_options_as() const { + return builtin_options_as_SignOptions(); } -template<> inline const tflite::SelectOptions *Operator::builtin_options_as() const { - return builtin_options_as_SelectOptions(); +template<> inline const tflite::BitcastOptions *Operator::builtin_options_as() const { + return builtin_options_as_BitcastOptions(); } -template<> inline const tflite::SliceOptions *Operator::builtin_options_as() const { - return builtin_options_as_SliceOptions(); +template<> inline const tflite::BitwiseXorOptions *Operator::builtin_options_as() const { + return builtin_options_as_BitwiseXorOptions(); } -template<> inline const tflite::TransposeConvOptions *Operator::builtin_options_as() const { - return builtin_options_as_TransposeConvOptions(); +template<> inline const tflite::RightShiftOptions *Operator::builtin_options_as() const { + return builtin_options_as_RightShiftOptions(); } -template<> inline const tflite::SparseToDenseOptions *Operator::builtin_options_as() const { - return builtin_options_as_SparseToDenseOptions(); +template<> inline const tflite::StablehloConcatenateOptions *Operator::builtin_options_2_as() const { + return builtin_options_2_as_StablehloConcatenateOptions(); } -template<> inline const tflite::TileOptions *Operator::builtin_options_as() const { - return builtin_options_as_TileOptions(); +template<> inline const tflite::StablehloBroadcastInDimOptions *Operator::builtin_options_2_as() const { + return builtin_options_2_as_StablehloBroadcastInDimOptions(); } -template<> inline const tflite::ExpandDimsOptions *Operator::builtin_options_as() const { - return builtin_options_as_ExpandDimsOptions(); +template<> inline const tflite::StablehloSliceOptions *Operator::builtin_options_2_as() const { + return builtin_options_2_as_StablehloSliceOptions(); } -template<> inline const tflite::EqualOptions *Operator::builtin_options_as() const { - return builtin_options_as_EqualOptions(); +template<> inline const tflite::StablehloConvolutionOptions *Operator::builtin_options_2_as() const { + return builtin_options_2_as_StablehloConvolutionOptions(); } -template<> inline const tflite::NotEqualOptions *Operator::builtin_options_as() const { - return builtin_options_as_NotEqualOptions(); +template<> inline const tflite::StablehloCustomCallOptions *Operator::builtin_options_2_as() const { + return builtin_options_2_as_StablehloCustomCallOptions(); } -template<> inline const tflite::ShapeOptions *Operator::builtin_options_as() const { - return builtin_options_as_ShapeOptions(); +template<> inline const tflite::StablehloReduceOptions *Operator::builtin_options_2_as() const { + return builtin_options_2_as_StablehloReduceOptions(); } -template<> inline const tflite::PowOptions *Operator::builtin_options_as() const { - return builtin_options_as_PowOptions(); +template<> inline const tflite::StablehloScatterOptions *Operator::builtin_options_2_as() const { + return builtin_options_2_as_StablehloScatterOptions(); } -template<> inline const tflite::ArgMinOptions *Operator::builtin_options_as() const { - return builtin_options_as_ArgMinOptions(); +template<> inline const tflite::StablehloCompareOptions *Operator::builtin_options_2_as() const { + return builtin_options_2_as_StablehloCompareOptions(); } -template<> inline const tflite::FakeQuantOptions *Operator::builtin_options_as() const { - return builtin_options_as_FakeQuantOptions(); +template<> inline const tflite::StablehloDynamicSliceOptions *Operator::builtin_options_2_as() const { + return builtin_options_2_as_StablehloDynamicSliceOptions(); } -template<> inline const tflite::PackOptions *Operator::builtin_options_as() const { - return builtin_options_as_PackOptions(); +template<> inline const tflite::StablehloPadOptions *Operator::builtin_options_2_as() const { + return builtin_options_2_as_StablehloPadOptions(); } -template<> inline const tflite::LogicalOrOptions *Operator::builtin_options_as() const { - return builtin_options_as_LogicalOrOptions(); +template<> inline const tflite::StablehloIotaOptions *Operator::builtin_options_2_as() const { + return builtin_options_2_as_StablehloIotaOptions(); } -template<> inline const tflite::OneHotOptions *Operator::builtin_options_as() const { - return builtin_options_as_OneHotOptions(); +template<> inline const tflite::StablehloDotGeneralOptions *Operator::builtin_options_2_as() const { + return builtin_options_2_as_StablehloDotGeneralOptions(); } -template<> inline const tflite::LogicalAndOptions *Operator::builtin_options_as() const { - return builtin_options_as_LogicalAndOptions(); +template<> inline const tflite::StablehloReduceWindowOptions *Operator::builtin_options_2_as() const { + return builtin_options_2_as_StablehloReduceWindowOptions(); } -template<> inline const tflite::LogicalNotOptions *Operator::builtin_options_as() const { - return builtin_options_as_LogicalNotOptions(); +template<> inline const tflite::StablehloSortOptions *Operator::builtin_options_2_as() const { + return builtin_options_2_as_StablehloSortOptions(); } -template<> inline const tflite::UnpackOptions *Operator::builtin_options_as() const { - return builtin_options_as_UnpackOptions(); +template<> inline const tflite::StablehloWhileOptions *Operator::builtin_options_2_as() const { + return builtin_options_2_as_StablehloWhileOptions(); } -template<> inline const tflite::FloorDivOptions *Operator::builtin_options_as() const { - return builtin_options_as_FloorDivOptions(); +template<> inline const tflite::StablehloGatherOptions *Operator::builtin_options_2_as() const { + return builtin_options_2_as_StablehloGatherOptions(); } -template<> inline const tflite::SquareOptions *Operator::builtin_options_as() const { - return builtin_options_as_SquareOptions(); +template<> inline const tflite::StablehloTransposeOptions *Operator::builtin_options_2_as() const { + return builtin_options_2_as_StablehloTransposeOptions(); } -template<> inline const tflite::ZerosLikeOptions *Operator::builtin_options_as() const { - return builtin_options_as_ZerosLikeOptions(); +template<> inline const tflite::DilateOptions *Operator::builtin_options_2_as() const { + return builtin_options_2_as_DilateOptions(); } -template<> inline const tflite::FillOptions *Operator::builtin_options_as() const { - return builtin_options_as_FillOptions(); +template<> inline const tflite::StablehloRngBitGeneratorOptions *Operator::builtin_options_2_as() const { + return builtin_options_2_as_StablehloRngBitGeneratorOptions(); } -template<> inline const tflite::BidirectionalSequenceLSTMOptions *Operator::builtin_options_as() const { - return builtin_options_as_BidirectionalSequenceLSTMOptions(); +template<> inline const tflite::ReduceWindowOptions *Operator::builtin_options_2_as() const { + return builtin_options_2_as_ReduceWindowOptions(); } -template<> inline const tflite::BidirectionalSequenceRNNOptions *Operator::builtin_options_as() const { - return builtin_options_as_BidirectionalSequenceRNNOptions(); +template<> inline const tflite::StableHLOCompositeOptions *Operator::builtin_options_2_as() const { + return builtin_options_2_as_StableHLOCompositeOptions(); } -template<> inline const tflite::UnidirectionalSequenceLSTMOptions *Operator::builtin_options_as() const { - return builtin_options_as_UnidirectionalSequenceLSTMOptions(); -} +struct OperatorBuilder { + typedef Operator Table; + ::flatbuffers::FlatBufferBuilder &fbb_; + ::flatbuffers::uoffset_t start_; + void add_opcode_index(uint32_t opcode_index) { + fbb_.AddElement(Operator::VT_OPCODE_INDEX, opcode_index, 0); + } + void add_inputs(::flatbuffers::Offset<::flatbuffers::Vector> inputs) { + fbb_.AddOffset(Operator::VT_INPUTS, inputs); + } + void add_outputs(::flatbuffers::Offset<::flatbuffers::Vector> outputs) { + fbb_.AddOffset(Operator::VT_OUTPUTS, outputs); + } + void add_builtin_options_type(tflite::BuiltinOptions builtin_options_type) { + fbb_.AddElement(Operator::VT_BUILTIN_OPTIONS_TYPE, static_cast(builtin_options_type), 0); + } + void add_builtin_options(::flatbuffers::Offset builtin_options) { + fbb_.AddOffset(Operator::VT_BUILTIN_OPTIONS, builtin_options); + } + void add_custom_options(::flatbuffers::Offset<::flatbuffers::Vector> custom_options) { + fbb_.AddOffset(Operator::VT_CUSTOM_OPTIONS, custom_options); + } + void add_custom_options_format(tflite::CustomOptionsFormat custom_options_format) { + fbb_.AddElement(Operator::VT_CUSTOM_OPTIONS_FORMAT, static_cast(custom_options_format), 0); + } + void add_mutating_variable_inputs(::flatbuffers::Offset<::flatbuffers::Vector> mutating_variable_inputs) { + fbb_.AddOffset(Operator::VT_MUTATING_VARIABLE_INPUTS, mutating_variable_inputs); + } + void add_intermediates(::flatbuffers::Offset<::flatbuffers::Vector> intermediates) { + fbb_.AddOffset(Operator::VT_INTERMEDIATES, intermediates); + } + void add_large_custom_options_offset(uint64_t large_custom_options_offset) { + fbb_.AddElement(Operator::VT_LARGE_CUSTOM_OPTIONS_OFFSET, large_custom_options_offset, 0); + } + void add_large_custom_options_size(uint64_t large_custom_options_size) { + fbb_.AddElement(Operator::VT_LARGE_CUSTOM_OPTIONS_SIZE, large_custom_options_size, 0); + } + void add_builtin_options_2_type(tflite::BuiltinOptions2 builtin_options_2_type) { + fbb_.AddElement(Operator::VT_BUILTIN_OPTIONS_2_TYPE, static_cast(builtin_options_2_type), 0); + } + void add_builtin_options_2(::flatbuffers::Offset builtin_options_2) { + fbb_.AddOffset(Operator::VT_BUILTIN_OPTIONS_2, builtin_options_2); + } + explicit OperatorBuilder(::flatbuffers::FlatBufferBuilder &_fbb) + : fbb_(_fbb) { + start_ = fbb_.StartTable(); + } + ::flatbuffers::Offset Finish() { + const auto end = fbb_.EndTable(start_); + auto o = ::flatbuffers::Offset(end); + return o; + } +}; -template<> inline const tflite::FloorModOptions *Operator::builtin_options_as() const { - return builtin_options_as_FloorModOptions(); +inline ::flatbuffers::Offset CreateOperator( + ::flatbuffers::FlatBufferBuilder &_fbb, + uint32_t opcode_index = 0, + ::flatbuffers::Offset<::flatbuffers::Vector> inputs = 0, + ::flatbuffers::Offset<::flatbuffers::Vector> outputs = 0, + tflite::BuiltinOptions builtin_options_type = tflite::BuiltinOptions_NONE, + ::flatbuffers::Offset builtin_options = 0, + ::flatbuffers::Offset<::flatbuffers::Vector> custom_options = 0, + tflite::CustomOptionsFormat custom_options_format = tflite::CustomOptionsFormat_FLEXBUFFERS, + ::flatbuffers::Offset<::flatbuffers::Vector> mutating_variable_inputs = 0, + ::flatbuffers::Offset<::flatbuffers::Vector> intermediates = 0, + uint64_t large_custom_options_offset = 0, + uint64_t large_custom_options_size = 0, + tflite::BuiltinOptions2 builtin_options_2_type = tflite::BuiltinOptions2_NONE, + ::flatbuffers::Offset builtin_options_2 = 0) { + OperatorBuilder builder_(_fbb); + builder_.add_large_custom_options_size(large_custom_options_size); + builder_.add_large_custom_options_offset(large_custom_options_offset); + builder_.add_builtin_options_2(builtin_options_2); + builder_.add_intermediates(intermediates); + builder_.add_mutating_variable_inputs(mutating_variable_inputs); + builder_.add_custom_options(custom_options); + builder_.add_builtin_options(builtin_options); + builder_.add_outputs(outputs); + builder_.add_inputs(inputs); + builder_.add_opcode_index(opcode_index); + builder_.add_builtin_options_2_type(builtin_options_2_type); + builder_.add_custom_options_format(custom_options_format); + builder_.add_builtin_options_type(builtin_options_type); + return builder_.Finish(); } -template<> inline const tflite::RangeOptions *Operator::builtin_options_as() const { - return builtin_options_as_RangeOptions(); +inline ::flatbuffers::Offset CreateOperatorDirect( + ::flatbuffers::FlatBufferBuilder &_fbb, + uint32_t opcode_index = 0, + const std::vector *inputs = nullptr, + const std::vector *outputs = nullptr, + tflite::BuiltinOptions builtin_options_type = tflite::BuiltinOptions_NONE, + ::flatbuffers::Offset builtin_options = 0, + const std::vector *custom_options = nullptr, + tflite::CustomOptionsFormat custom_options_format = tflite::CustomOptionsFormat_FLEXBUFFERS, + const std::vector *mutating_variable_inputs = nullptr, + const std::vector *intermediates = nullptr, + uint64_t large_custom_options_offset = 0, + uint64_t large_custom_options_size = 0, + tflite::BuiltinOptions2 builtin_options_2_type = tflite::BuiltinOptions2_NONE, + ::flatbuffers::Offset builtin_options_2 = 0) { + auto inputs__ = inputs ? _fbb.CreateVector(*inputs) : 0; + auto outputs__ = outputs ? _fbb.CreateVector(*outputs) : 0; + auto custom_options__ = custom_options ? _fbb.CreateVector(*custom_options) : 0; + auto mutating_variable_inputs__ = mutating_variable_inputs ? _fbb.CreateVector(*mutating_variable_inputs) : 0; + auto intermediates__ = intermediates ? _fbb.CreateVector(*intermediates) : 0; + return tflite::CreateOperator( + _fbb, + opcode_index, + inputs__, + outputs__, + builtin_options_type, + builtin_options, + custom_options__, + custom_options_format, + mutating_variable_inputs__, + intermediates__, + large_custom_options_offset, + large_custom_options_size, + builtin_options_2_type, + builtin_options_2); } -template<> inline const tflite::ResizeNearestNeighborOptions *Operator::builtin_options_as() const { - return builtin_options_as_ResizeNearestNeighborOptions(); +::flatbuffers::Offset CreateOperator(::flatbuffers::FlatBufferBuilder &_fbb, const OperatorT *_o, const ::flatbuffers::rehasher_function_t *_rehasher = nullptr); + +struct SubGraphT : public ::flatbuffers::NativeTable { + typedef SubGraph TableType; + std::vector> tensors{}; + std::vector inputs{}; + std::vector outputs{}; + std::vector> operators{}; + std::string name{}; + SubGraphT() = default; + SubGraphT(const SubGraphT &o); + SubGraphT(SubGraphT&&) FLATBUFFERS_NOEXCEPT = default; + SubGraphT &operator=(SubGraphT o) FLATBUFFERS_NOEXCEPT; +}; + +struct SubGraph FLATBUFFERS_FINAL_CLASS : private ::flatbuffers::Table { + typedef SubGraphT NativeTableType; + typedef SubGraphBuilder Builder; + enum FlatBuffersVTableOffset FLATBUFFERS_VTABLE_UNDERLYING_TYPE { + VT_TENSORS = 4, + VT_INPUTS = 6, + VT_OUTPUTS = 8, + VT_OPERATORS = 10, + VT_NAME = 12 + }; + const ::flatbuffers::Vector<::flatbuffers::Offset> *tensors() const { + return GetPointer> *>(VT_TENSORS); + } + const ::flatbuffers::Vector *inputs() const { + return GetPointer *>(VT_INPUTS); + } + const ::flatbuffers::Vector *outputs() const { + return GetPointer *>(VT_OUTPUTS); + } + const ::flatbuffers::Vector<::flatbuffers::Offset> *operators() const { + return GetPointer> *>(VT_OPERATORS); + } + const ::flatbuffers::String *name() const { + return GetPointer(VT_NAME); + } + bool Verify(::flatbuffers::Verifier &verifier) const { + return VerifyTableStart(verifier) && + VerifyOffset(verifier, VT_TENSORS) && + verifier.VerifyVector(tensors()) && + verifier.VerifyVectorOfTables(tensors()) && + VerifyOffset(verifier, VT_INPUTS) && + verifier.VerifyVector(inputs()) && + VerifyOffset(verifier, VT_OUTPUTS) && + verifier.VerifyVector(outputs()) && + VerifyOffset(verifier, VT_OPERATORS) && + verifier.VerifyVector(operators()) && + verifier.VerifyVectorOfTables(operators()) && + VerifyOffset(verifier, VT_NAME) && + verifier.VerifyString(name()) && + verifier.EndTable(); + } + SubGraphT *UnPack(const ::flatbuffers::resolver_function_t *_resolver = nullptr) const; + void UnPackTo(SubGraphT *_o, const ::flatbuffers::resolver_function_t *_resolver = nullptr) const; + static ::flatbuffers::Offset Pack(::flatbuffers::FlatBufferBuilder &_fbb, const SubGraphT* _o, const ::flatbuffers::rehasher_function_t *_rehasher = nullptr); +}; + +struct SubGraphBuilder { + typedef SubGraph Table; + ::flatbuffers::FlatBufferBuilder &fbb_; + ::flatbuffers::uoffset_t start_; + void add_tensors(::flatbuffers::Offset<::flatbuffers::Vector<::flatbuffers::Offset>> tensors) { + fbb_.AddOffset(SubGraph::VT_TENSORS, tensors); + } + void add_inputs(::flatbuffers::Offset<::flatbuffers::Vector> inputs) { + fbb_.AddOffset(SubGraph::VT_INPUTS, inputs); + } + void add_outputs(::flatbuffers::Offset<::flatbuffers::Vector> outputs) { + fbb_.AddOffset(SubGraph::VT_OUTPUTS, outputs); + } + void add_operators(::flatbuffers::Offset<::flatbuffers::Vector<::flatbuffers::Offset>> operators) { + fbb_.AddOffset(SubGraph::VT_OPERATORS, operators); + } + void add_name(::flatbuffers::Offset<::flatbuffers::String> name) { + fbb_.AddOffset(SubGraph::VT_NAME, name); + } + explicit SubGraphBuilder(::flatbuffers::FlatBufferBuilder &_fbb) + : fbb_(_fbb) { + start_ = fbb_.StartTable(); + } + ::flatbuffers::Offset Finish() { + const auto end = fbb_.EndTable(start_); + auto o = ::flatbuffers::Offset(end); + return o; + } +}; + +inline ::flatbuffers::Offset CreateSubGraph( + ::flatbuffers::FlatBufferBuilder &_fbb, + ::flatbuffers::Offset<::flatbuffers::Vector<::flatbuffers::Offset>> tensors = 0, + ::flatbuffers::Offset<::flatbuffers::Vector> inputs = 0, + ::flatbuffers::Offset<::flatbuffers::Vector> outputs = 0, + ::flatbuffers::Offset<::flatbuffers::Vector<::flatbuffers::Offset>> operators = 0, + ::flatbuffers::Offset<::flatbuffers::String> name = 0) { + SubGraphBuilder builder_(_fbb); + builder_.add_name(name); + builder_.add_operators(operators); + builder_.add_outputs(outputs); + builder_.add_inputs(inputs); + builder_.add_tensors(tensors); + return builder_.Finish(); } -template<> inline const tflite::LeakyReluOptions *Operator::builtin_options_as() const { - return builtin_options_as_LeakyReluOptions(); +inline ::flatbuffers::Offset CreateSubGraphDirect( + ::flatbuffers::FlatBufferBuilder &_fbb, + const std::vector<::flatbuffers::Offset> *tensors = nullptr, + const std::vector *inputs = nullptr, + const std::vector *outputs = nullptr, + const std::vector<::flatbuffers::Offset> *operators = nullptr, + const char *name = nullptr) { + auto tensors__ = tensors ? _fbb.CreateVector<::flatbuffers::Offset>(*tensors) : 0; + auto inputs__ = inputs ? _fbb.CreateVector(*inputs) : 0; + auto outputs__ = outputs ? _fbb.CreateVector(*outputs) : 0; + auto operators__ = operators ? _fbb.CreateVector<::flatbuffers::Offset>(*operators) : 0; + auto name__ = name ? _fbb.CreateString(name) : 0; + return tflite::CreateSubGraph( + _fbb, + tensors__, + inputs__, + outputs__, + operators__, + name__); } -template<> inline const tflite::SquaredDifferenceOptions *Operator::builtin_options_as() const { - return builtin_options_as_SquaredDifferenceOptions(); -} +::flatbuffers::Offset CreateSubGraph(::flatbuffers::FlatBufferBuilder &_fbb, const SubGraphT *_o, const ::flatbuffers::rehasher_function_t *_rehasher = nullptr); -template<> inline const tflite::MirrorPadOptions *Operator::builtin_options_as() const { - return builtin_options_as_MirrorPadOptions(); -} +struct BufferT : public ::flatbuffers::NativeTable { + typedef Buffer TableType; + std::vector data{}; + uint64_t offset = 0; + uint64_t size = 0; +}; -template<> inline const tflite::AbsOptions *Operator::builtin_options_as() const { - return builtin_options_as_AbsOptions(); -} +struct Buffer FLATBUFFERS_FINAL_CLASS : private ::flatbuffers::Table { + typedef BufferT NativeTableType; + typedef BufferBuilder Builder; + enum FlatBuffersVTableOffset FLATBUFFERS_VTABLE_UNDERLYING_TYPE { + VT_DATA = 4, + VT_OFFSET = 6, + VT_SIZE = 8 + }; + const ::flatbuffers::Vector *data() const { + return GetPointer *>(VT_DATA); + } + uint64_t offset() const { + return GetField(VT_OFFSET, 0); + } + uint64_t size() const { + return GetField(VT_SIZE, 0); + } + bool Verify(::flatbuffers::Verifier &verifier) const { + return VerifyTableStart(verifier) && + VerifyOffset(verifier, VT_DATA) && + verifier.VerifyVector(data()) && + VerifyField(verifier, VT_OFFSET, 8) && + VerifyField(verifier, VT_SIZE, 8) && + verifier.EndTable(); + } + BufferT *UnPack(const ::flatbuffers::resolver_function_t *_resolver = nullptr) const; + void UnPackTo(BufferT *_o, const ::flatbuffers::resolver_function_t *_resolver = nullptr) const; + static ::flatbuffers::Offset Pack(::flatbuffers::FlatBufferBuilder &_fbb, const BufferT* _o, const ::flatbuffers::rehasher_function_t *_rehasher = nullptr); +}; -template<> inline const tflite::SplitVOptions *Operator::builtin_options_as() const { - return builtin_options_as_SplitVOptions(); -} +struct BufferBuilder { + typedef Buffer Table; + ::flatbuffers::FlatBufferBuilder &fbb_; + ::flatbuffers::uoffset_t start_; + void add_data(::flatbuffers::Offset<::flatbuffers::Vector> data) { + fbb_.AddOffset(Buffer::VT_DATA, data); + } + void add_offset(uint64_t offset) { + fbb_.AddElement(Buffer::VT_OFFSET, offset, 0); + } + void add_size(uint64_t size) { + fbb_.AddElement(Buffer::VT_SIZE, size, 0); + } + explicit BufferBuilder(::flatbuffers::FlatBufferBuilder &_fbb) + : fbb_(_fbb) { + start_ = fbb_.StartTable(); + } + ::flatbuffers::Offset Finish() { + const auto end = fbb_.EndTable(start_); + auto o = ::flatbuffers::Offset(end); + return o; + } +}; -template<> inline const tflite::UniqueOptions *Operator::builtin_options_as() const { - return builtin_options_as_UniqueOptions(); +inline ::flatbuffers::Offset CreateBuffer( + ::flatbuffers::FlatBufferBuilder &_fbb, + ::flatbuffers::Offset<::flatbuffers::Vector> data = 0, + uint64_t offset = 0, + uint64_t size = 0) { + BufferBuilder builder_(_fbb); + builder_.add_size(size); + builder_.add_offset(offset); + builder_.add_data(data); + return builder_.Finish(); } -template<> inline const tflite::ReverseV2Options *Operator::builtin_options_as() const { - return builtin_options_as_ReverseV2Options(); +inline ::flatbuffers::Offset CreateBufferDirect( + ::flatbuffers::FlatBufferBuilder &_fbb, + const std::vector *data = nullptr, + uint64_t offset = 0, + uint64_t size = 0) { + if (data) { _fbb.ForceVectorAlignment(data->size(), sizeof(uint8_t), 16); } + auto data__ = data ? _fbb.CreateVector(*data) : 0; + return tflite::CreateBuffer( + _fbb, + data__, + offset, + size); } -template<> inline const tflite::AddNOptions *Operator::builtin_options_as() const { - return builtin_options_as_AddNOptions(); -} +::flatbuffers::Offset CreateBuffer(::flatbuffers::FlatBufferBuilder &_fbb, const BufferT *_o, const ::flatbuffers::rehasher_function_t *_rehasher = nullptr); -template<> inline const tflite::GatherNdOptions *Operator::builtin_options_as() const { - return builtin_options_as_GatherNdOptions(); -} +struct MetadataT : public ::flatbuffers::NativeTable { + typedef Metadata TableType; + std::string name{}; + uint32_t buffer = 0; +}; -template<> inline const tflite::CosOptions *Operator::builtin_options_as() const { - return builtin_options_as_CosOptions(); -} +struct Metadata FLATBUFFERS_FINAL_CLASS : private ::flatbuffers::Table { + typedef MetadataT NativeTableType; + typedef MetadataBuilder Builder; + enum FlatBuffersVTableOffset FLATBUFFERS_VTABLE_UNDERLYING_TYPE { + VT_NAME = 4, + VT_BUFFER = 6 + }; + const ::flatbuffers::String *name() const { + return GetPointer(VT_NAME); + } + uint32_t buffer() const { + return GetField(VT_BUFFER, 0); + } + bool Verify(::flatbuffers::Verifier &verifier) const { + return VerifyTableStart(verifier) && + VerifyOffset(verifier, VT_NAME) && + verifier.VerifyString(name()) && + VerifyField(verifier, VT_BUFFER, 4) && + verifier.EndTable(); + } + MetadataT *UnPack(const ::flatbuffers::resolver_function_t *_resolver = nullptr) const; + void UnPackTo(MetadataT *_o, const ::flatbuffers::resolver_function_t *_resolver = nullptr) const; + static ::flatbuffers::Offset Pack(::flatbuffers::FlatBufferBuilder &_fbb, const MetadataT* _o, const ::flatbuffers::rehasher_function_t *_rehasher = nullptr); +}; -template<> inline const tflite::WhereOptions *Operator::builtin_options_as() const { - return builtin_options_as_WhereOptions(); -} +struct MetadataBuilder { + typedef Metadata Table; + ::flatbuffers::FlatBufferBuilder &fbb_; + ::flatbuffers::uoffset_t start_; + void add_name(::flatbuffers::Offset<::flatbuffers::String> name) { + fbb_.AddOffset(Metadata::VT_NAME, name); + } + void add_buffer(uint32_t buffer) { + fbb_.AddElement(Metadata::VT_BUFFER, buffer, 0); + } + explicit MetadataBuilder(::flatbuffers::FlatBufferBuilder &_fbb) + : fbb_(_fbb) { + start_ = fbb_.StartTable(); + } + ::flatbuffers::Offset Finish() { + const auto end = fbb_.EndTable(start_); + auto o = ::flatbuffers::Offset(end); + return o; + } +}; -template<> inline const tflite::RankOptions *Operator::builtin_options_as() const { - return builtin_options_as_RankOptions(); +inline ::flatbuffers::Offset CreateMetadata( + ::flatbuffers::FlatBufferBuilder &_fbb, + ::flatbuffers::Offset<::flatbuffers::String> name = 0, + uint32_t buffer = 0) { + MetadataBuilder builder_(_fbb); + builder_.add_buffer(buffer); + builder_.add_name(name); + return builder_.Finish(); } -template<> inline const tflite::ReverseSequenceOptions *Operator::builtin_options_as() const { - return builtin_options_as_ReverseSequenceOptions(); +inline ::flatbuffers::Offset CreateMetadataDirect( + ::flatbuffers::FlatBufferBuilder &_fbb, + const char *name = nullptr, + uint32_t buffer = 0) { + auto name__ = name ? _fbb.CreateString(name) : 0; + return tflite::CreateMetadata( + _fbb, + name__, + buffer); } -template<> inline const tflite::MatrixDiagOptions *Operator::builtin_options_as() const { - return builtin_options_as_MatrixDiagOptions(); -} +::flatbuffers::Offset CreateMetadata(::flatbuffers::FlatBufferBuilder &_fbb, const MetadataT *_o, const ::flatbuffers::rehasher_function_t *_rehasher = nullptr); -template<> inline const tflite::QuantizeOptions *Operator::builtin_options_as() const { - return builtin_options_as_QuantizeOptions(); -} +struct TensorMapT : public ::flatbuffers::NativeTable { + typedef TensorMap TableType; + std::string name{}; + uint32_t tensor_index = 0; +}; -template<> inline const tflite::MatrixSetDiagOptions *Operator::builtin_options_as() const { - return builtin_options_as_MatrixSetDiagOptions(); -} +struct TensorMap FLATBUFFERS_FINAL_CLASS : private ::flatbuffers::Table { + typedef TensorMapT NativeTableType; + typedef TensorMapBuilder Builder; + enum FlatBuffersVTableOffset FLATBUFFERS_VTABLE_UNDERLYING_TYPE { + VT_NAME = 4, + VT_TENSOR_INDEX = 6 + }; + const ::flatbuffers::String *name() const { + return GetPointer(VT_NAME); + } + uint32_t tensor_index() const { + return GetField(VT_TENSOR_INDEX, 0); + } + bool Verify(::flatbuffers::Verifier &verifier) const { + return VerifyTableStart(verifier) && + VerifyOffset(verifier, VT_NAME) && + verifier.VerifyString(name()) && + VerifyField(verifier, VT_TENSOR_INDEX, 4) && + verifier.EndTable(); + } + TensorMapT *UnPack(const ::flatbuffers::resolver_function_t *_resolver = nullptr) const; + void UnPackTo(TensorMapT *_o, const ::flatbuffers::resolver_function_t *_resolver = nullptr) const; + static ::flatbuffers::Offset Pack(::flatbuffers::FlatBufferBuilder &_fbb, const TensorMapT* _o, const ::flatbuffers::rehasher_function_t *_rehasher = nullptr); +}; -template<> inline const tflite::HardSwishOptions *Operator::builtin_options_as() const { - return builtin_options_as_HardSwishOptions(); -} +struct TensorMapBuilder { + typedef TensorMap Table; + ::flatbuffers::FlatBufferBuilder &fbb_; + ::flatbuffers::uoffset_t start_; + void add_name(::flatbuffers::Offset<::flatbuffers::String> name) { + fbb_.AddOffset(TensorMap::VT_NAME, name); + } + void add_tensor_index(uint32_t tensor_index) { + fbb_.AddElement(TensorMap::VT_TENSOR_INDEX, tensor_index, 0); + } + explicit TensorMapBuilder(::flatbuffers::FlatBufferBuilder &_fbb) + : fbb_(_fbb) { + start_ = fbb_.StartTable(); + } + ::flatbuffers::Offset Finish() { + const auto end = fbb_.EndTable(start_); + auto o = ::flatbuffers::Offset(end); + return o; + } +}; -template<> inline const tflite::IfOptions *Operator::builtin_options_as() const { - return builtin_options_as_IfOptions(); +inline ::flatbuffers::Offset CreateTensorMap( + ::flatbuffers::FlatBufferBuilder &_fbb, + ::flatbuffers::Offset<::flatbuffers::String> name = 0, + uint32_t tensor_index = 0) { + TensorMapBuilder builder_(_fbb); + builder_.add_tensor_index(tensor_index); + builder_.add_name(name); + return builder_.Finish(); } -template<> inline const tflite::WhileOptions *Operator::builtin_options_as() const { - return builtin_options_as_WhileOptions(); +inline ::flatbuffers::Offset CreateTensorMapDirect( + ::flatbuffers::FlatBufferBuilder &_fbb, + const char *name = nullptr, + uint32_t tensor_index = 0) { + auto name__ = name ? _fbb.CreateString(name) : 0; + return tflite::CreateTensorMap( + _fbb, + name__, + tensor_index); } -template<> inline const tflite::DepthToSpaceOptions *Operator::builtin_options_as() const { - return builtin_options_as_DepthToSpaceOptions(); -} +::flatbuffers::Offset CreateTensorMap(::flatbuffers::FlatBufferBuilder &_fbb, const TensorMapT *_o, const ::flatbuffers::rehasher_function_t *_rehasher = nullptr); -template<> inline const tflite::NonMaxSuppressionV4Options *Operator::builtin_options_as() const { - return builtin_options_as_NonMaxSuppressionV4Options(); -} +struct SignatureDefT : public ::flatbuffers::NativeTable { + typedef SignatureDef TableType; + std::vector> inputs{}; + std::vector> outputs{}; + std::string signature_key{}; + uint32_t subgraph_index = 0; + SignatureDefT() = default; + SignatureDefT(const SignatureDefT &o); + SignatureDefT(SignatureDefT&&) FLATBUFFERS_NOEXCEPT = default; + SignatureDefT &operator=(SignatureDefT o) FLATBUFFERS_NOEXCEPT; +}; -template<> inline const tflite::NonMaxSuppressionV5Options *Operator::builtin_options_as() const { - return builtin_options_as_NonMaxSuppressionV5Options(); -} +struct SignatureDef FLATBUFFERS_FINAL_CLASS : private ::flatbuffers::Table { + typedef SignatureDefT NativeTableType; + typedef SignatureDefBuilder Builder; + enum FlatBuffersVTableOffset FLATBUFFERS_VTABLE_UNDERLYING_TYPE { + VT_INPUTS = 4, + VT_OUTPUTS = 6, + VT_SIGNATURE_KEY = 8, + VT_SUBGRAPH_INDEX = 12 + }; + const ::flatbuffers::Vector<::flatbuffers::Offset> *inputs() const { + return GetPointer> *>(VT_INPUTS); + } + const ::flatbuffers::Vector<::flatbuffers::Offset> *outputs() const { + return GetPointer> *>(VT_OUTPUTS); + } + const ::flatbuffers::String *signature_key() const { + return GetPointer(VT_SIGNATURE_KEY); + } + uint32_t subgraph_index() const { + return GetField(VT_SUBGRAPH_INDEX, 0); + } + bool Verify(::flatbuffers::Verifier &verifier) const { + return VerifyTableStart(verifier) && + VerifyOffset(verifier, VT_INPUTS) && + verifier.VerifyVector(inputs()) && + verifier.VerifyVectorOfTables(inputs()) && + VerifyOffset(verifier, VT_OUTPUTS) && + verifier.VerifyVector(outputs()) && + verifier.VerifyVectorOfTables(outputs()) && + VerifyOffset(verifier, VT_SIGNATURE_KEY) && + verifier.VerifyString(signature_key()) && + VerifyField(verifier, VT_SUBGRAPH_INDEX, 4) && + verifier.EndTable(); + } + SignatureDefT *UnPack(const ::flatbuffers::resolver_function_t *_resolver = nullptr) const; + void UnPackTo(SignatureDefT *_o, const ::flatbuffers::resolver_function_t *_resolver = nullptr) const; + static ::flatbuffers::Offset Pack(::flatbuffers::FlatBufferBuilder &_fbb, const SignatureDefT* _o, const ::flatbuffers::rehasher_function_t *_rehasher = nullptr); +}; -template<> inline const tflite::ScatterNdOptions *Operator::builtin_options_as() const { - return builtin_options_as_ScatterNdOptions(); -} +struct SignatureDefBuilder { + typedef SignatureDef Table; + ::flatbuffers::FlatBufferBuilder &fbb_; + ::flatbuffers::uoffset_t start_; + void add_inputs(::flatbuffers::Offset<::flatbuffers::Vector<::flatbuffers::Offset>> inputs) { + fbb_.AddOffset(SignatureDef::VT_INPUTS, inputs); + } + void add_outputs(::flatbuffers::Offset<::flatbuffers::Vector<::flatbuffers::Offset>> outputs) { + fbb_.AddOffset(SignatureDef::VT_OUTPUTS, outputs); + } + void add_signature_key(::flatbuffers::Offset<::flatbuffers::String> signature_key) { + fbb_.AddOffset(SignatureDef::VT_SIGNATURE_KEY, signature_key); + } + void add_subgraph_index(uint32_t subgraph_index) { + fbb_.AddElement(SignatureDef::VT_SUBGRAPH_INDEX, subgraph_index, 0); + } + explicit SignatureDefBuilder(::flatbuffers::FlatBufferBuilder &_fbb) + : fbb_(_fbb) { + start_ = fbb_.StartTable(); + } + ::flatbuffers::Offset Finish() { + const auto end = fbb_.EndTable(start_); + auto o = ::flatbuffers::Offset(end); + return o; + } +}; -template<> inline const tflite::SelectV2Options *Operator::builtin_options_as() const { - return builtin_options_as_SelectV2Options(); +inline ::flatbuffers::Offset CreateSignatureDef( + ::flatbuffers::FlatBufferBuilder &_fbb, + ::flatbuffers::Offset<::flatbuffers::Vector<::flatbuffers::Offset>> inputs = 0, + ::flatbuffers::Offset<::flatbuffers::Vector<::flatbuffers::Offset>> outputs = 0, + ::flatbuffers::Offset<::flatbuffers::String> signature_key = 0, + uint32_t subgraph_index = 0) { + SignatureDefBuilder builder_(_fbb); + builder_.add_subgraph_index(subgraph_index); + builder_.add_signature_key(signature_key); + builder_.add_outputs(outputs); + builder_.add_inputs(inputs); + return builder_.Finish(); } -template<> inline const tflite::DensifyOptions *Operator::builtin_options_as() const { - return builtin_options_as_DensifyOptions(); +inline ::flatbuffers::Offset CreateSignatureDefDirect( + ::flatbuffers::FlatBufferBuilder &_fbb, + const std::vector<::flatbuffers::Offset> *inputs = nullptr, + const std::vector<::flatbuffers::Offset> *outputs = nullptr, + const char *signature_key = nullptr, + uint32_t subgraph_index = 0) { + auto inputs__ = inputs ? _fbb.CreateVector<::flatbuffers::Offset>(*inputs) : 0; + auto outputs__ = outputs ? _fbb.CreateVector<::flatbuffers::Offset>(*outputs) : 0; + auto signature_key__ = signature_key ? _fbb.CreateString(signature_key) : 0; + return tflite::CreateSignatureDef( + _fbb, + inputs__, + outputs__, + signature_key__, + subgraph_index); } -template<> inline const tflite::SegmentSumOptions *Operator::builtin_options_as() const { - return builtin_options_as_SegmentSumOptions(); -} +::flatbuffers::Offset CreateSignatureDef(::flatbuffers::FlatBufferBuilder &_fbb, const SignatureDefT *_o, const ::flatbuffers::rehasher_function_t *_rehasher = nullptr); -template<> inline const tflite::BatchMatMulOptions *Operator::builtin_options_as() const { - return builtin_options_as_BatchMatMulOptions(); -} +struct ModelT : public ::flatbuffers::NativeTable { + typedef Model TableType; + uint32_t version = 0; + std::vector> operator_codes{}; + std::vector> subgraphs{}; + std::string description{}; + std::vector> buffers{}; + std::vector metadata_buffer{}; + std::vector> metadata{}; + std::vector> signature_defs{}; + ModelT() = default; + ModelT(const ModelT &o); + ModelT(ModelT&&) FLATBUFFERS_NOEXCEPT = default; + ModelT &operator=(ModelT o) FLATBUFFERS_NOEXCEPT; +}; -template<> inline const tflite::CumsumOptions *Operator::builtin_options_as() const { - return builtin_options_as_CumsumOptions(); -} +struct Model FLATBUFFERS_FINAL_CLASS : private ::flatbuffers::Table { + typedef ModelT NativeTableType; + typedef ModelBuilder Builder; + enum FlatBuffersVTableOffset FLATBUFFERS_VTABLE_UNDERLYING_TYPE { + VT_VERSION = 4, + VT_OPERATOR_CODES = 6, + VT_SUBGRAPHS = 8, + VT_DESCRIPTION = 10, + VT_BUFFERS = 12, + VT_METADATA_BUFFER = 14, + VT_METADATA = 16, + VT_SIGNATURE_DEFS = 18 + }; + uint32_t version() const { + return GetField(VT_VERSION, 0); + } + const ::flatbuffers::Vector<::flatbuffers::Offset> *operator_codes() const { + return GetPointer> *>(VT_OPERATOR_CODES); + } + const ::flatbuffers::Vector<::flatbuffers::Offset> *subgraphs() const { + return GetPointer> *>(VT_SUBGRAPHS); + } + const ::flatbuffers::String *description() const { + return GetPointer(VT_DESCRIPTION); + } + const ::flatbuffers::Vector<::flatbuffers::Offset> *buffers() const { + return GetPointer> *>(VT_BUFFERS); + } + const ::flatbuffers::Vector *metadata_buffer() const { + return GetPointer *>(VT_METADATA_BUFFER); + } + const ::flatbuffers::Vector<::flatbuffers::Offset> *metadata() const { + return GetPointer> *>(VT_METADATA); + } + const ::flatbuffers::Vector<::flatbuffers::Offset> *signature_defs() const { + return GetPointer> *>(VT_SIGNATURE_DEFS); + } + bool Verify(::flatbuffers::Verifier &verifier) const { + return VerifyTableStart(verifier) && + VerifyField(verifier, VT_VERSION, 4) && + VerifyOffset(verifier, VT_OPERATOR_CODES) && + verifier.VerifyVector(operator_codes()) && + verifier.VerifyVectorOfTables(operator_codes()) && + VerifyOffset(verifier, VT_SUBGRAPHS) && + verifier.VerifyVector(subgraphs()) && + verifier.VerifyVectorOfTables(subgraphs()) && + VerifyOffset(verifier, VT_DESCRIPTION) && + verifier.VerifyString(description()) && + VerifyOffset(verifier, VT_BUFFERS) && + verifier.VerifyVector(buffers()) && + verifier.VerifyVectorOfTables(buffers()) && + VerifyOffset(verifier, VT_METADATA_BUFFER) && + verifier.VerifyVector(metadata_buffer()) && + VerifyOffset(verifier, VT_METADATA) && + verifier.VerifyVector(metadata()) && + verifier.VerifyVectorOfTables(metadata()) && + VerifyOffset(verifier, VT_SIGNATURE_DEFS) && + verifier.VerifyVector(signature_defs()) && + verifier.VerifyVectorOfTables(signature_defs()) && + verifier.EndTable(); + } + ModelT *UnPack(const ::flatbuffers::resolver_function_t *_resolver = nullptr) const; + void UnPackTo(ModelT *_o, const ::flatbuffers::resolver_function_t *_resolver = nullptr) const; + static ::flatbuffers::Offset Pack(::flatbuffers::FlatBufferBuilder &_fbb, const ModelT* _o, const ::flatbuffers::rehasher_function_t *_rehasher = nullptr); +}; -template<> inline const tflite::CallOnceOptions *Operator::builtin_options_as() const { - return builtin_options_as_CallOnceOptions(); -} +struct ModelBuilder { + typedef Model Table; + ::flatbuffers::FlatBufferBuilder &fbb_; + ::flatbuffers::uoffset_t start_; + void add_version(uint32_t version) { + fbb_.AddElement(Model::VT_VERSION, version, 0); + } + void add_operator_codes(::flatbuffers::Offset<::flatbuffers::Vector<::flatbuffers::Offset>> operator_codes) { + fbb_.AddOffset(Model::VT_OPERATOR_CODES, operator_codes); + } + void add_subgraphs(::flatbuffers::Offset<::flatbuffers::Vector<::flatbuffers::Offset>> subgraphs) { + fbb_.AddOffset(Model::VT_SUBGRAPHS, subgraphs); + } + void add_description(::flatbuffers::Offset<::flatbuffers::String> description) { + fbb_.AddOffset(Model::VT_DESCRIPTION, description); + } + void add_buffers(::flatbuffers::Offset<::flatbuffers::Vector<::flatbuffers::Offset>> buffers) { + fbb_.AddOffset(Model::VT_BUFFERS, buffers); + } + void add_metadata_buffer(::flatbuffers::Offset<::flatbuffers::Vector> metadata_buffer) { + fbb_.AddOffset(Model::VT_METADATA_BUFFER, metadata_buffer); + } + void add_metadata(::flatbuffers::Offset<::flatbuffers::Vector<::flatbuffers::Offset>> metadata) { + fbb_.AddOffset(Model::VT_METADATA, metadata); + } + void add_signature_defs(::flatbuffers::Offset<::flatbuffers::Vector<::flatbuffers::Offset>> signature_defs) { + fbb_.AddOffset(Model::VT_SIGNATURE_DEFS, signature_defs); + } + explicit ModelBuilder(::flatbuffers::FlatBufferBuilder &_fbb) + : fbb_(_fbb) { + start_ = fbb_.StartTable(); + } + ::flatbuffers::Offset Finish() { + const auto end = fbb_.EndTable(start_); + auto o = ::flatbuffers::Offset(end); + return o; + } +}; -template<> inline const tflite::BroadcastToOptions *Operator::builtin_options_as() const { - return builtin_options_as_BroadcastToOptions(); +inline ::flatbuffers::Offset CreateModel( + ::flatbuffers::FlatBufferBuilder &_fbb, + uint32_t version = 0, + ::flatbuffers::Offset<::flatbuffers::Vector<::flatbuffers::Offset>> operator_codes = 0, + ::flatbuffers::Offset<::flatbuffers::Vector<::flatbuffers::Offset>> subgraphs = 0, + ::flatbuffers::Offset<::flatbuffers::String> description = 0, + ::flatbuffers::Offset<::flatbuffers::Vector<::flatbuffers::Offset>> buffers = 0, + ::flatbuffers::Offset<::flatbuffers::Vector> metadata_buffer = 0, + ::flatbuffers::Offset<::flatbuffers::Vector<::flatbuffers::Offset>> metadata = 0, + ::flatbuffers::Offset<::flatbuffers::Vector<::flatbuffers::Offset>> signature_defs = 0) { + ModelBuilder builder_(_fbb); + builder_.add_signature_defs(signature_defs); + builder_.add_metadata(metadata); + builder_.add_metadata_buffer(metadata_buffer); + builder_.add_buffers(buffers); + builder_.add_description(description); + builder_.add_subgraphs(subgraphs); + builder_.add_operator_codes(operator_codes); + builder_.add_version(version); + return builder_.Finish(); } -template<> inline const tflite::Rfft2dOptions *Operator::builtin_options_as() const { - return builtin_options_as_Rfft2dOptions(); +inline ::flatbuffers::Offset CreateModelDirect( + ::flatbuffers::FlatBufferBuilder &_fbb, + uint32_t version = 0, + const std::vector<::flatbuffers::Offset> *operator_codes = nullptr, + const std::vector<::flatbuffers::Offset> *subgraphs = nullptr, + const char *description = nullptr, + const std::vector<::flatbuffers::Offset> *buffers = nullptr, + const std::vector *metadata_buffer = nullptr, + const std::vector<::flatbuffers::Offset> *metadata = nullptr, + const std::vector<::flatbuffers::Offset> *signature_defs = nullptr) { + auto operator_codes__ = operator_codes ? _fbb.CreateVector<::flatbuffers::Offset>(*operator_codes) : 0; + auto subgraphs__ = subgraphs ? _fbb.CreateVector<::flatbuffers::Offset>(*subgraphs) : 0; + auto description__ = description ? _fbb.CreateString(description) : 0; + auto buffers__ = buffers ? _fbb.CreateVector<::flatbuffers::Offset>(*buffers) : 0; + auto metadata_buffer__ = metadata_buffer ? _fbb.CreateVector(*metadata_buffer) : 0; + auto metadata__ = metadata ? _fbb.CreateVector<::flatbuffers::Offset>(*metadata) : 0; + auto signature_defs__ = signature_defs ? _fbb.CreateVector<::flatbuffers::Offset>(*signature_defs) : 0; + return tflite::CreateModel( + _fbb, + version, + operator_codes__, + subgraphs__, + description__, + buffers__, + metadata_buffer__, + metadata__, + signature_defs__); } -template<> inline const tflite::Conv3DOptions *Operator::builtin_options_as() const { - return builtin_options_as_Conv3DOptions(); -} +::flatbuffers::Offset CreateModel(::flatbuffers::FlatBufferBuilder &_fbb, const ModelT *_o, const ::flatbuffers::rehasher_function_t *_rehasher = nullptr); -template<> inline const tflite::HashtableOptions *Operator::builtin_options_as() const { - return builtin_options_as_HashtableOptions(); +inline CustomQuantizationT *CustomQuantization::UnPack(const ::flatbuffers::resolver_function_t *_resolver) const { + auto _o = std::unique_ptr(new CustomQuantizationT()); + UnPackTo(_o.get(), _resolver); + return _o.release(); } -template<> inline const tflite::HashtableFindOptions *Operator::builtin_options_as() const { - return builtin_options_as_HashtableFindOptions(); +inline void CustomQuantization::UnPackTo(CustomQuantizationT *_o, const ::flatbuffers::resolver_function_t *_resolver) const { + (void)_o; + (void)_resolver; + { auto _e = custom(); if (_e) { _o->custom.resize(_e->size()); std::copy(_e->begin(), _e->end(), _o->custom.begin()); } } } -template<> inline const tflite::HashtableImportOptions *Operator::builtin_options_as() const { - return builtin_options_as_HashtableImportOptions(); +inline ::flatbuffers::Offset CustomQuantization::Pack(::flatbuffers::FlatBufferBuilder &_fbb, const CustomQuantizationT* _o, const ::flatbuffers::rehasher_function_t *_rehasher) { + return CreateCustomQuantization(_fbb, _o, _rehasher); } -template<> inline const tflite::HashtableSizeOptions *Operator::builtin_options_as() const { - return builtin_options_as_HashtableSizeOptions(); +inline ::flatbuffers::Offset CreateCustomQuantization(::flatbuffers::FlatBufferBuilder &_fbb, const CustomQuantizationT *_o, const ::flatbuffers::rehasher_function_t *_rehasher) { + (void)_rehasher; + (void)_o; + struct _VectorArgs { ::flatbuffers::FlatBufferBuilder *__fbb; const CustomQuantizationT* __o; const ::flatbuffers::rehasher_function_t *__rehasher; } _va = { &_fbb, _o, _rehasher}; (void)_va; + _fbb.ForceVectorAlignment(_o->custom.size(), sizeof(uint8_t), 16); + auto _custom = _o->custom.size() ? _fbb.CreateVector(_o->custom) : 0; + return tflite::CreateCustomQuantization( + _fbb, + _custom); } -template<> inline const tflite::VarHandleOptions *Operator::builtin_options_as() const { - return builtin_options_as_VarHandleOptions(); +inline QuantizationParametersT *QuantizationParameters::UnPack(const ::flatbuffers::resolver_function_t *_resolver) const { + auto _o = std::unique_ptr(new QuantizationParametersT()); + UnPackTo(_o.get(), _resolver); + return _o.release(); } -template<> inline const tflite::ReadVariableOptions *Operator::builtin_options_as() const { - return builtin_options_as_ReadVariableOptions(); +inline void QuantizationParameters::UnPackTo(QuantizationParametersT *_o, const ::flatbuffers::resolver_function_t *_resolver) const { + (void)_o; + (void)_resolver; + { auto _e = min(); if (_e) { _o->min.resize(_e->size()); for (::flatbuffers::uoffset_t _i = 0; _i < _e->size(); _i++) { _o->min[_i] = _e->Get(_i); } } else { _o->min.resize(0); } } + { auto _e = max(); if (_e) { _o->max.resize(_e->size()); for (::flatbuffers::uoffset_t _i = 0; _i < _e->size(); _i++) { _o->max[_i] = _e->Get(_i); } } else { _o->max.resize(0); } } + { auto _e = scale(); if (_e) { _o->scale.resize(_e->size()); for (::flatbuffers::uoffset_t _i = 0; _i < _e->size(); _i++) { _o->scale[_i] = _e->Get(_i); } } else { _o->scale.resize(0); } } + { auto _e = zero_point(); if (_e) { _o->zero_point.resize(_e->size()); for (::flatbuffers::uoffset_t _i = 0; _i < _e->size(); _i++) { _o->zero_point[_i] = _e->Get(_i); } } else { _o->zero_point.resize(0); } } + { auto _e = details_type(); _o->details.type = _e; } + { auto _e = details(); if (_e) _o->details.value = tflite::QuantizationDetailsUnion::UnPack(_e, details_type(), _resolver); } + { auto _e = quantized_dimension(); _o->quantized_dimension = _e; } } -template<> inline const tflite::AssignVariableOptions *Operator::builtin_options_as() const { - return builtin_options_as_AssignVariableOptions(); +inline ::flatbuffers::Offset QuantizationParameters::Pack(::flatbuffers::FlatBufferBuilder &_fbb, const QuantizationParametersT* _o, const ::flatbuffers::rehasher_function_t *_rehasher) { + return CreateQuantizationParameters(_fbb, _o, _rehasher); } -template<> inline const tflite::RandomOptions *Operator::builtin_options_as() const { - return builtin_options_as_RandomOptions(); +inline ::flatbuffers::Offset CreateQuantizationParameters(::flatbuffers::FlatBufferBuilder &_fbb, const QuantizationParametersT *_o, const ::flatbuffers::rehasher_function_t *_rehasher) { + (void)_rehasher; + (void)_o; + struct _VectorArgs { ::flatbuffers::FlatBufferBuilder *__fbb; const QuantizationParametersT* __o; const ::flatbuffers::rehasher_function_t *__rehasher; } _va = { &_fbb, _o, _rehasher}; (void)_va; + auto _min = _o->min.size() ? _fbb.CreateVector(_o->min) : 0; + auto _max = _o->max.size() ? _fbb.CreateVector(_o->max) : 0; + auto _scale = _o->scale.size() ? _fbb.CreateVector(_o->scale) : 0; + auto _zero_point = _o->zero_point.size() ? _fbb.CreateVector(_o->zero_point) : 0; + auto _details_type = _o->details.type; + auto _details = _o->details.Pack(_fbb); + auto _quantized_dimension = _o->quantized_dimension; + return tflite::CreateQuantizationParameters( + _fbb, + _min, + _max, + _scale, + _zero_point, + _details_type, + _details, + _quantized_dimension); } -template<> inline const tflite::BucketizeOptions *Operator::builtin_options_as() const { - return builtin_options_as_BucketizeOptions(); +inline Int32VectorT *Int32Vector::UnPack(const ::flatbuffers::resolver_function_t *_resolver) const { + auto _o = std::unique_ptr(new Int32VectorT()); + UnPackTo(_o.get(), _resolver); + return _o.release(); } -template<> inline const tflite::GeluOptions *Operator::builtin_options_as() const { - return builtin_options_as_GeluOptions(); +inline void Int32Vector::UnPackTo(Int32VectorT *_o, const ::flatbuffers::resolver_function_t *_resolver) const { + (void)_o; + (void)_resolver; + { auto _e = values(); if (_e) { _o->values.resize(_e->size()); for (::flatbuffers::uoffset_t _i = 0; _i < _e->size(); _i++) { _o->values[_i] = _e->Get(_i); } } else { _o->values.resize(0); } } } -template<> inline const tflite::DynamicUpdateSliceOptions *Operator::builtin_options_as() const { - return builtin_options_as_DynamicUpdateSliceOptions(); +inline ::flatbuffers::Offset Int32Vector::Pack(::flatbuffers::FlatBufferBuilder &_fbb, const Int32VectorT* _o, const ::flatbuffers::rehasher_function_t *_rehasher) { + return CreateInt32Vector(_fbb, _o, _rehasher); } -template<> inline const tflite::UnsortedSegmentProdOptions *Operator::builtin_options_as() const { - return builtin_options_as_UnsortedSegmentProdOptions(); +inline ::flatbuffers::Offset CreateInt32Vector(::flatbuffers::FlatBufferBuilder &_fbb, const Int32VectorT *_o, const ::flatbuffers::rehasher_function_t *_rehasher) { + (void)_rehasher; + (void)_o; + struct _VectorArgs { ::flatbuffers::FlatBufferBuilder *__fbb; const Int32VectorT* __o; const ::flatbuffers::rehasher_function_t *__rehasher; } _va = { &_fbb, _o, _rehasher}; (void)_va; + auto _values = _o->values.size() ? _fbb.CreateVector(_o->values) : 0; + return tflite::CreateInt32Vector( + _fbb, + _values); } -template<> inline const tflite::UnsortedSegmentMaxOptions *Operator::builtin_options_as() const { - return builtin_options_as_UnsortedSegmentMaxOptions(); +inline Uint16VectorT *Uint16Vector::UnPack(const ::flatbuffers::resolver_function_t *_resolver) const { + auto _o = std::unique_ptr(new Uint16VectorT()); + UnPackTo(_o.get(), _resolver); + return _o.release(); } -template<> inline const tflite::UnsortedSegmentMinOptions *Operator::builtin_options_as() const { - return builtin_options_as_UnsortedSegmentMinOptions(); +inline void Uint16Vector::UnPackTo(Uint16VectorT *_o, const ::flatbuffers::resolver_function_t *_resolver) const { + (void)_o; + (void)_resolver; + { auto _e = values(); if (_e) { _o->values.resize(_e->size()); for (::flatbuffers::uoffset_t _i = 0; _i < _e->size(); _i++) { _o->values[_i] = _e->Get(_i); } } else { _o->values.resize(0); } } } -template<> inline const tflite::UnsortedSegmentSumOptions *Operator::builtin_options_as() const { - return builtin_options_as_UnsortedSegmentSumOptions(); +inline ::flatbuffers::Offset Uint16Vector::Pack(::flatbuffers::FlatBufferBuilder &_fbb, const Uint16VectorT* _o, const ::flatbuffers::rehasher_function_t *_rehasher) { + return CreateUint16Vector(_fbb, _o, _rehasher); } -template<> inline const tflite::ATan2Options *Operator::builtin_options_as() const { - return builtin_options_as_ATan2Options(); +inline ::flatbuffers::Offset CreateUint16Vector(::flatbuffers::FlatBufferBuilder &_fbb, const Uint16VectorT *_o, const ::flatbuffers::rehasher_function_t *_rehasher) { + (void)_rehasher; + (void)_o; + struct _VectorArgs { ::flatbuffers::FlatBufferBuilder *__fbb; const Uint16VectorT* __o; const ::flatbuffers::rehasher_function_t *__rehasher; } _va = { &_fbb, _o, _rehasher}; (void)_va; + _fbb.ForceVectorAlignment(_o->values.size(), sizeof(uint16_t), 4); + auto _values = _o->values.size() ? _fbb.CreateVector(_o->values) : 0; + return tflite::CreateUint16Vector( + _fbb, + _values); } -template<> inline const tflite::SignOptions *Operator::builtin_options_as() const { - return builtin_options_as_SignOptions(); +inline Uint8VectorT *Uint8Vector::UnPack(const ::flatbuffers::resolver_function_t *_resolver) const { + auto _o = std::unique_ptr(new Uint8VectorT()); + UnPackTo(_o.get(), _resolver); + return _o.release(); } -template<> inline const tflite::BitcastOptions *Operator::builtin_options_as() const { - return builtin_options_as_BitcastOptions(); +inline void Uint8Vector::UnPackTo(Uint8VectorT *_o, const ::flatbuffers::resolver_function_t *_resolver) const { + (void)_o; + (void)_resolver; + { auto _e = values(); if (_e) { _o->values.resize(_e->size()); std::copy(_e->begin(), _e->end(), _o->values.begin()); } } } -template<> inline const tflite::BitwiseXorOptions *Operator::builtin_options_as() const { - return builtin_options_as_BitwiseXorOptions(); +inline ::flatbuffers::Offset Uint8Vector::Pack(::flatbuffers::FlatBufferBuilder &_fbb, const Uint8VectorT* _o, const ::flatbuffers::rehasher_function_t *_rehasher) { + return CreateUint8Vector(_fbb, _o, _rehasher); } -template<> inline const tflite::RightShiftOptions *Operator::builtin_options_as() const { - return builtin_options_as_RightShiftOptions(); +inline ::flatbuffers::Offset CreateUint8Vector(::flatbuffers::FlatBufferBuilder &_fbb, const Uint8VectorT *_o, const ::flatbuffers::rehasher_function_t *_rehasher) { + (void)_rehasher; + (void)_o; + struct _VectorArgs { ::flatbuffers::FlatBufferBuilder *__fbb; const Uint8VectorT* __o; const ::flatbuffers::rehasher_function_t *__rehasher; } _va = { &_fbb, _o, _rehasher}; (void)_va; + _fbb.ForceVectorAlignment(_o->values.size(), sizeof(uint8_t), 4); + auto _values = _o->values.size() ? _fbb.CreateVector(_o->values) : 0; + return tflite::CreateUint8Vector( + _fbb, + _values); } -struct OperatorBuilder { - typedef Operator Table; - flatbuffers::FlatBufferBuilder &fbb_; - flatbuffers::uoffset_t start_; - void add_opcode_index(uint32_t opcode_index) { - fbb_.AddElement(Operator::VT_OPCODE_INDEX, opcode_index, 0); - } - void add_inputs(flatbuffers::Offset> inputs) { - fbb_.AddOffset(Operator::VT_INPUTS, inputs); - } - void add_outputs(flatbuffers::Offset> outputs) { - fbb_.AddOffset(Operator::VT_OUTPUTS, outputs); - } - void add_builtin_options_type(tflite::BuiltinOptions builtin_options_type) { - fbb_.AddElement(Operator::VT_BUILTIN_OPTIONS_TYPE, static_cast(builtin_options_type), 0); - } - void add_builtin_options(flatbuffers::Offset builtin_options) { - fbb_.AddOffset(Operator::VT_BUILTIN_OPTIONS, builtin_options); - } - void add_custom_options(flatbuffers::Offset> custom_options) { - fbb_.AddOffset(Operator::VT_CUSTOM_OPTIONS, custom_options); - } - void add_custom_options_format(tflite::CustomOptionsFormat custom_options_format) { - fbb_.AddElement(Operator::VT_CUSTOM_OPTIONS_FORMAT, static_cast(custom_options_format), 0); - } - void add_mutating_variable_inputs(flatbuffers::Offset> mutating_variable_inputs) { - fbb_.AddOffset(Operator::VT_MUTATING_VARIABLE_INPUTS, mutating_variable_inputs); - } - void add_intermediates(flatbuffers::Offset> intermediates) { - fbb_.AddOffset(Operator::VT_INTERMEDIATES, intermediates); - } - void add_large_custom_options_offset(uint64_t large_custom_options_offset) { - fbb_.AddElement(Operator::VT_LARGE_CUSTOM_OPTIONS_OFFSET, large_custom_options_offset, 0); - } - void add_large_custom_options_size(uint64_t large_custom_options_size) { - fbb_.AddElement(Operator::VT_LARGE_CUSTOM_OPTIONS_SIZE, large_custom_options_size, 0); - } - explicit OperatorBuilder(flatbuffers::FlatBufferBuilder &_fbb) - : fbb_(_fbb) { - start_ = fbb_.StartTable(); - } - flatbuffers::Offset Finish() { - const auto end = fbb_.EndTable(start_); - auto o = flatbuffers::Offset(end); - return o; - } -}; - -inline flatbuffers::Offset CreateOperator( - flatbuffers::FlatBufferBuilder &_fbb, - uint32_t opcode_index = 0, - flatbuffers::Offset> inputs = 0, - flatbuffers::Offset> outputs = 0, - tflite::BuiltinOptions builtin_options_type = tflite::BuiltinOptions_NONE, - flatbuffers::Offset builtin_options = 0, - flatbuffers::Offset> custom_options = 0, - tflite::CustomOptionsFormat custom_options_format = tflite::CustomOptionsFormat_FLEXBUFFERS, - flatbuffers::Offset> mutating_variable_inputs = 0, - flatbuffers::Offset> intermediates = 0, - uint64_t large_custom_options_offset = 0, - uint64_t large_custom_options_size = 0) { - OperatorBuilder builder_(_fbb); - builder_.add_large_custom_options_size(large_custom_options_size); - builder_.add_large_custom_options_offset(large_custom_options_offset); - builder_.add_intermediates(intermediates); - builder_.add_mutating_variable_inputs(mutating_variable_inputs); - builder_.add_custom_options(custom_options); - builder_.add_builtin_options(builtin_options); - builder_.add_outputs(outputs); - builder_.add_inputs(inputs); - builder_.add_opcode_index(opcode_index); - builder_.add_custom_options_format(custom_options_format); - builder_.add_builtin_options_type(builtin_options_type); - return builder_.Finish(); +inline DimensionMetadataT *DimensionMetadata::UnPack(const ::flatbuffers::resolver_function_t *_resolver) const { + auto _o = std::unique_ptr(new DimensionMetadataT()); + UnPackTo(_o.get(), _resolver); + return _o.release(); } -inline flatbuffers::Offset CreateOperatorDirect( - flatbuffers::FlatBufferBuilder &_fbb, - uint32_t opcode_index = 0, - const std::vector *inputs = nullptr, - const std::vector *outputs = nullptr, - tflite::BuiltinOptions builtin_options_type = tflite::BuiltinOptions_NONE, - flatbuffers::Offset builtin_options = 0, - const std::vector *custom_options = nullptr, - tflite::CustomOptionsFormat custom_options_format = tflite::CustomOptionsFormat_FLEXBUFFERS, - const std::vector *mutating_variable_inputs = nullptr, - const std::vector *intermediates = nullptr, - uint64_t large_custom_options_offset = 0, - uint64_t large_custom_options_size = 0) { - auto inputs__ = inputs ? _fbb.CreateVector(*inputs) : 0; - auto outputs__ = outputs ? _fbb.CreateVector(*outputs) : 0; - auto custom_options__ = custom_options ? _fbb.CreateVector(*custom_options) : 0; - auto mutating_variable_inputs__ = mutating_variable_inputs ? _fbb.CreateVector(*mutating_variable_inputs) : 0; - auto intermediates__ = intermediates ? _fbb.CreateVector(*intermediates) : 0; - return tflite::CreateOperator( - _fbb, - opcode_index, - inputs__, - outputs__, - builtin_options_type, - builtin_options, - custom_options__, - custom_options_format, - mutating_variable_inputs__, - intermediates__, - large_custom_options_offset, - large_custom_options_size); +inline void DimensionMetadata::UnPackTo(DimensionMetadataT *_o, const ::flatbuffers::resolver_function_t *_resolver) const { + (void)_o; + (void)_resolver; + { auto _e = format(); _o->format = _e; } + { auto _e = dense_size(); _o->dense_size = _e; } + { auto _e = array_segments_type(); _o->array_segments.type = _e; } + { auto _e = array_segments(); if (_e) _o->array_segments.value = tflite::SparseIndexVectorUnion::UnPack(_e, array_segments_type(), _resolver); } + { auto _e = array_indices_type(); _o->array_indices.type = _e; } + { auto _e = array_indices(); if (_e) _o->array_indices.value = tflite::SparseIndexVectorUnion::UnPack(_e, array_indices_type(), _resolver); } } -flatbuffers::Offset CreateOperator(flatbuffers::FlatBufferBuilder &_fbb, const OperatorT *_o, const flatbuffers::rehasher_function_t *_rehasher = nullptr); - -struct SubGraphT : public flatbuffers::NativeTable { - typedef SubGraph TableType; - std::vector> tensors{}; - std::vector inputs{}; - std::vector outputs{}; - std::vector> operators{}; - std::string name{}; - SubGraphT() = default; - SubGraphT(const SubGraphT &o); - SubGraphT(SubGraphT&&) FLATBUFFERS_NOEXCEPT = default; - SubGraphT &operator=(SubGraphT o) FLATBUFFERS_NOEXCEPT; -}; - -struct SubGraph FLATBUFFERS_FINAL_CLASS : private flatbuffers::Table { - typedef SubGraphT NativeTableType; - typedef SubGraphBuilder Builder; - enum FlatBuffersVTableOffset FLATBUFFERS_VTABLE_UNDERLYING_TYPE { - VT_TENSORS = 4, - VT_INPUTS = 6, - VT_OUTPUTS = 8, - VT_OPERATORS = 10, - VT_NAME = 12 - }; - const flatbuffers::Vector> *tensors() const { - return GetPointer> *>(VT_TENSORS); - } - const flatbuffers::Vector *inputs() const { - return GetPointer *>(VT_INPUTS); - } - const flatbuffers::Vector *outputs() const { - return GetPointer *>(VT_OUTPUTS); - } - const flatbuffers::Vector> *operators() const { - return GetPointer> *>(VT_OPERATORS); - } - const flatbuffers::String *name() const { - return GetPointer(VT_NAME); - } - bool Verify(flatbuffers::Verifier &verifier) const { - return VerifyTableStart(verifier) && - VerifyOffset(verifier, VT_TENSORS) && - verifier.VerifyVector(tensors()) && - verifier.VerifyVectorOfTables(tensors()) && - VerifyOffset(verifier, VT_INPUTS) && - verifier.VerifyVector(inputs()) && - VerifyOffset(verifier, VT_OUTPUTS) && - verifier.VerifyVector(outputs()) && - VerifyOffset(verifier, VT_OPERATORS) && - verifier.VerifyVector(operators()) && - verifier.VerifyVectorOfTables(operators()) && - VerifyOffset(verifier, VT_NAME) && - verifier.VerifyString(name()) && - verifier.EndTable(); - } - SubGraphT *UnPack(const flatbuffers::resolver_function_t *_resolver = nullptr) const; - void UnPackTo(SubGraphT *_o, const flatbuffers::resolver_function_t *_resolver = nullptr) const; - static flatbuffers::Offset Pack(flatbuffers::FlatBufferBuilder &_fbb, const SubGraphT* _o, const flatbuffers::rehasher_function_t *_rehasher = nullptr); -}; - -struct SubGraphBuilder { - typedef SubGraph Table; - flatbuffers::FlatBufferBuilder &fbb_; - flatbuffers::uoffset_t start_; - void add_tensors(flatbuffers::Offset>> tensors) { - fbb_.AddOffset(SubGraph::VT_TENSORS, tensors); - } - void add_inputs(flatbuffers::Offset> inputs) { - fbb_.AddOffset(SubGraph::VT_INPUTS, inputs); - } - void add_outputs(flatbuffers::Offset> outputs) { - fbb_.AddOffset(SubGraph::VT_OUTPUTS, outputs); - } - void add_operators(flatbuffers::Offset>> operators) { - fbb_.AddOffset(SubGraph::VT_OPERATORS, operators); - } - void add_name(flatbuffers::Offset name) { - fbb_.AddOffset(SubGraph::VT_NAME, name); - } - explicit SubGraphBuilder(flatbuffers::FlatBufferBuilder &_fbb) - : fbb_(_fbb) { - start_ = fbb_.StartTable(); - } - flatbuffers::Offset Finish() { - const auto end = fbb_.EndTable(start_); - auto o = flatbuffers::Offset(end); - return o; - } -}; - -inline flatbuffers::Offset CreateSubGraph( - flatbuffers::FlatBufferBuilder &_fbb, - flatbuffers::Offset>> tensors = 0, - flatbuffers::Offset> inputs = 0, - flatbuffers::Offset> outputs = 0, - flatbuffers::Offset>> operators = 0, - flatbuffers::Offset name = 0) { - SubGraphBuilder builder_(_fbb); - builder_.add_name(name); - builder_.add_operators(operators); - builder_.add_outputs(outputs); - builder_.add_inputs(inputs); - builder_.add_tensors(tensors); - return builder_.Finish(); +inline ::flatbuffers::Offset DimensionMetadata::Pack(::flatbuffers::FlatBufferBuilder &_fbb, const DimensionMetadataT* _o, const ::flatbuffers::rehasher_function_t *_rehasher) { + return CreateDimensionMetadata(_fbb, _o, _rehasher); } -inline flatbuffers::Offset CreateSubGraphDirect( - flatbuffers::FlatBufferBuilder &_fbb, - const std::vector> *tensors = nullptr, - const std::vector *inputs = nullptr, - const std::vector *outputs = nullptr, - const std::vector> *operators = nullptr, - const char *name = nullptr) { - auto tensors__ = tensors ? _fbb.CreateVector>(*tensors) : 0; - auto inputs__ = inputs ? _fbb.CreateVector(*inputs) : 0; - auto outputs__ = outputs ? _fbb.CreateVector(*outputs) : 0; - auto operators__ = operators ? _fbb.CreateVector>(*operators) : 0; - auto name__ = name ? _fbb.CreateString(name) : 0; - return tflite::CreateSubGraph( +inline ::flatbuffers::Offset CreateDimensionMetadata(::flatbuffers::FlatBufferBuilder &_fbb, const DimensionMetadataT *_o, const ::flatbuffers::rehasher_function_t *_rehasher) { + (void)_rehasher; + (void)_o; + struct _VectorArgs { ::flatbuffers::FlatBufferBuilder *__fbb; const DimensionMetadataT* __o; const ::flatbuffers::rehasher_function_t *__rehasher; } _va = { &_fbb, _o, _rehasher}; (void)_va; + auto _format = _o->format; + auto _dense_size = _o->dense_size; + auto _array_segments_type = _o->array_segments.type; + auto _array_segments = _o->array_segments.Pack(_fbb); + auto _array_indices_type = _o->array_indices.type; + auto _array_indices = _o->array_indices.Pack(_fbb); + return tflite::CreateDimensionMetadata( _fbb, - tensors__, - inputs__, - outputs__, - operators__, - name__); + _format, + _dense_size, + _array_segments_type, + _array_segments, + _array_indices_type, + _array_indices); } -flatbuffers::Offset CreateSubGraph(flatbuffers::FlatBufferBuilder &_fbb, const SubGraphT *_o, const flatbuffers::rehasher_function_t *_rehasher = nullptr); +inline SparsityParametersT::SparsityParametersT(const SparsityParametersT &o) + : traversal_order(o.traversal_order), + block_map(o.block_map) { + dim_metadata.reserve(o.dim_metadata.size()); + for (const auto &dim_metadata_ : o.dim_metadata) { dim_metadata.emplace_back((dim_metadata_) ? new tflite::DimensionMetadataT(*dim_metadata_) : nullptr); } +} -struct BufferT : public flatbuffers::NativeTable { - typedef Buffer TableType; - std::vector data{}; - uint64_t offset = 0; - uint64_t size = 0; -}; +inline SparsityParametersT &SparsityParametersT::operator=(SparsityParametersT o) FLATBUFFERS_NOEXCEPT { + std::swap(traversal_order, o.traversal_order); + std::swap(block_map, o.block_map); + std::swap(dim_metadata, o.dim_metadata); + return *this; +} -struct Buffer FLATBUFFERS_FINAL_CLASS : private flatbuffers::Table { - typedef BufferT NativeTableType; - typedef BufferBuilder Builder; - enum FlatBuffersVTableOffset FLATBUFFERS_VTABLE_UNDERLYING_TYPE { - VT_DATA = 4, - VT_OFFSET = 6, - VT_SIZE = 8 - }; - const flatbuffers::Vector *data() const { - return GetPointer *>(VT_DATA); - } - uint64_t offset() const { - return GetField(VT_OFFSET, 0); - } - uint64_t size() const { - return GetField(VT_SIZE, 0); - } - bool Verify(flatbuffers::Verifier &verifier) const { - return VerifyTableStart(verifier) && - VerifyOffset(verifier, VT_DATA) && - verifier.VerifyVector(data()) && - VerifyField(verifier, VT_OFFSET, 8) && - VerifyField(verifier, VT_SIZE, 8) && - verifier.EndTable(); - } - BufferT *UnPack(const flatbuffers::resolver_function_t *_resolver = nullptr) const; - void UnPackTo(BufferT *_o, const flatbuffers::resolver_function_t *_resolver = nullptr) const; - static flatbuffers::Offset Pack(flatbuffers::FlatBufferBuilder &_fbb, const BufferT* _o, const flatbuffers::rehasher_function_t *_rehasher = nullptr); -}; +inline SparsityParametersT *SparsityParameters::UnPack(const ::flatbuffers::resolver_function_t *_resolver) const { + auto _o = std::unique_ptr(new SparsityParametersT()); + UnPackTo(_o.get(), _resolver); + return _o.release(); +} -struct BufferBuilder { - typedef Buffer Table; - flatbuffers::FlatBufferBuilder &fbb_; - flatbuffers::uoffset_t start_; - void add_data(flatbuffers::Offset> data) { - fbb_.AddOffset(Buffer::VT_DATA, data); - } - void add_offset(uint64_t offset) { - fbb_.AddElement(Buffer::VT_OFFSET, offset, 0); - } - void add_size(uint64_t size) { - fbb_.AddElement(Buffer::VT_SIZE, size, 0); - } - explicit BufferBuilder(flatbuffers::FlatBufferBuilder &_fbb) - : fbb_(_fbb) { - start_ = fbb_.StartTable(); - } - flatbuffers::Offset Finish() { - const auto end = fbb_.EndTable(start_); - auto o = flatbuffers::Offset(end); - return o; - } -}; +inline void SparsityParameters::UnPackTo(SparsityParametersT *_o, const ::flatbuffers::resolver_function_t *_resolver) const { + (void)_o; + (void)_resolver; + { auto _e = traversal_order(); if (_e) { _o->traversal_order.resize(_e->size()); for (::flatbuffers::uoffset_t _i = 0; _i < _e->size(); _i++) { _o->traversal_order[_i] = _e->Get(_i); } } else { _o->traversal_order.resize(0); } } + { auto _e = block_map(); if (_e) { _o->block_map.resize(_e->size()); for (::flatbuffers::uoffset_t _i = 0; _i < _e->size(); _i++) { _o->block_map[_i] = _e->Get(_i); } } else { _o->block_map.resize(0); } } + { auto _e = dim_metadata(); if (_e) { _o->dim_metadata.resize(_e->size()); for (::flatbuffers::uoffset_t _i = 0; _i < _e->size(); _i++) { if(_o->dim_metadata[_i]) { _e->Get(_i)->UnPackTo(_o->dim_metadata[_i].get(), _resolver); } else { _o->dim_metadata[_i] = std::unique_ptr(_e->Get(_i)->UnPack(_resolver)); }; } } else { _o->dim_metadata.resize(0); } } +} -inline flatbuffers::Offset CreateBuffer( - flatbuffers::FlatBufferBuilder &_fbb, - flatbuffers::Offset> data = 0, - uint64_t offset = 0, - uint64_t size = 0) { - BufferBuilder builder_(_fbb); - builder_.add_size(size); - builder_.add_offset(offset); - builder_.add_data(data); - return builder_.Finish(); +inline ::flatbuffers::Offset SparsityParameters::Pack(::flatbuffers::FlatBufferBuilder &_fbb, const SparsityParametersT* _o, const ::flatbuffers::rehasher_function_t *_rehasher) { + return CreateSparsityParameters(_fbb, _o, _rehasher); } -inline flatbuffers::Offset CreateBufferDirect( - flatbuffers::FlatBufferBuilder &_fbb, - const std::vector *data = nullptr, - uint64_t offset = 0, - uint64_t size = 0) { - if (data) { _fbb.ForceVectorAlignment(data->size(), sizeof(uint8_t), 16); } - auto data__ = data ? _fbb.CreateVector(*data) : 0; - return tflite::CreateBuffer( +inline ::flatbuffers::Offset CreateSparsityParameters(::flatbuffers::FlatBufferBuilder &_fbb, const SparsityParametersT *_o, const ::flatbuffers::rehasher_function_t *_rehasher) { + (void)_rehasher; + (void)_o; + struct _VectorArgs { ::flatbuffers::FlatBufferBuilder *__fbb; const SparsityParametersT* __o; const ::flatbuffers::rehasher_function_t *__rehasher; } _va = { &_fbb, _o, _rehasher}; (void)_va; + auto _traversal_order = _o->traversal_order.size() ? _fbb.CreateVector(_o->traversal_order) : 0; + auto _block_map = _o->block_map.size() ? _fbb.CreateVector(_o->block_map) : 0; + auto _dim_metadata = _o->dim_metadata.size() ? _fbb.CreateVector<::flatbuffers::Offset> (_o->dim_metadata.size(), [](size_t i, _VectorArgs *__va) { return CreateDimensionMetadata(*__va->__fbb, __va->__o->dim_metadata[i].get(), __va->__rehasher); }, &_va ) : 0; + return tflite::CreateSparsityParameters( _fbb, - data__, - offset, - size); + _traversal_order, + _block_map, + _dim_metadata); } -flatbuffers::Offset CreateBuffer(flatbuffers::FlatBufferBuilder &_fbb, const BufferT *_o, const flatbuffers::rehasher_function_t *_rehasher = nullptr); +inline VariantSubTypeT *VariantSubType::UnPack(const ::flatbuffers::resolver_function_t *_resolver) const { + auto _o = std::unique_ptr(new VariantSubTypeT()); + UnPackTo(_o.get(), _resolver); + return _o.release(); +} -struct MetadataT : public flatbuffers::NativeTable { - typedef Metadata TableType; - std::string name{}; - uint32_t buffer = 0; -}; +inline void VariantSubType::UnPackTo(VariantSubTypeT *_o, const ::flatbuffers::resolver_function_t *_resolver) const { + (void)_o; + (void)_resolver; + { auto _e = shape(); if (_e) { _o->shape.resize(_e->size()); for (::flatbuffers::uoffset_t _i = 0; _i < _e->size(); _i++) { _o->shape[_i] = _e->Get(_i); } } else { _o->shape.resize(0); } } + { auto _e = type(); _o->type = _e; } + { auto _e = has_rank(); _o->has_rank = _e; } +} -struct Metadata FLATBUFFERS_FINAL_CLASS : private flatbuffers::Table { - typedef MetadataT NativeTableType; - typedef MetadataBuilder Builder; - enum FlatBuffersVTableOffset FLATBUFFERS_VTABLE_UNDERLYING_TYPE { - VT_NAME = 4, - VT_BUFFER = 6 - }; - const flatbuffers::String *name() const { - return GetPointer(VT_NAME); - } - uint32_t buffer() const { - return GetField(VT_BUFFER, 0); - } - bool Verify(flatbuffers::Verifier &verifier) const { - return VerifyTableStart(verifier) && - VerifyOffset(verifier, VT_NAME) && - verifier.VerifyString(name()) && - VerifyField(verifier, VT_BUFFER, 4) && - verifier.EndTable(); - } - MetadataT *UnPack(const flatbuffers::resolver_function_t *_resolver = nullptr) const; - void UnPackTo(MetadataT *_o, const flatbuffers::resolver_function_t *_resolver = nullptr) const; - static flatbuffers::Offset Pack(flatbuffers::FlatBufferBuilder &_fbb, const MetadataT* _o, const flatbuffers::rehasher_function_t *_rehasher = nullptr); -}; +inline ::flatbuffers::Offset VariantSubType::Pack(::flatbuffers::FlatBufferBuilder &_fbb, const VariantSubTypeT* _o, const ::flatbuffers::rehasher_function_t *_rehasher) { + return CreateVariantSubType(_fbb, _o, _rehasher); +} -struct MetadataBuilder { - typedef Metadata Table; - flatbuffers::FlatBufferBuilder &fbb_; - flatbuffers::uoffset_t start_; - void add_name(flatbuffers::Offset name) { - fbb_.AddOffset(Metadata::VT_NAME, name); - } - void add_buffer(uint32_t buffer) { - fbb_.AddElement(Metadata::VT_BUFFER, buffer, 0); - } - explicit MetadataBuilder(flatbuffers::FlatBufferBuilder &_fbb) - : fbb_(_fbb) { - start_ = fbb_.StartTable(); - } - flatbuffers::Offset Finish() { - const auto end = fbb_.EndTable(start_); - auto o = flatbuffers::Offset(end); - return o; - } -}; +inline ::flatbuffers::Offset CreateVariantSubType(::flatbuffers::FlatBufferBuilder &_fbb, const VariantSubTypeT *_o, const ::flatbuffers::rehasher_function_t *_rehasher) { + (void)_rehasher; + (void)_o; + struct _VectorArgs { ::flatbuffers::FlatBufferBuilder *__fbb; const VariantSubTypeT* __o; const ::flatbuffers::rehasher_function_t *__rehasher; } _va = { &_fbb, _o, _rehasher}; (void)_va; + auto _shape = _o->shape.size() ? _fbb.CreateVector(_o->shape) : 0; + auto _type = _o->type; + auto _has_rank = _o->has_rank; + return tflite::CreateVariantSubType( + _fbb, + _shape, + _type, + _has_rank); +} -inline flatbuffers::Offset CreateMetadata( - flatbuffers::FlatBufferBuilder &_fbb, - flatbuffers::Offset name = 0, - uint32_t buffer = 0) { - MetadataBuilder builder_(_fbb); - builder_.add_buffer(buffer); - builder_.add_name(name); - return builder_.Finish(); +inline TensorT::TensorT(const TensorT &o) + : shape(o.shape), + type(o.type), + buffer(o.buffer), + name(o.name), + quantization((o.quantization) ? new tflite::QuantizationParametersT(*o.quantization) : nullptr), + is_variable(o.is_variable), + sparsity((o.sparsity) ? new tflite::SparsityParametersT(*o.sparsity) : nullptr), + shape_signature(o.shape_signature), + has_rank(o.has_rank) { + variant_tensors.reserve(o.variant_tensors.size()); + for (const auto &variant_tensors_ : o.variant_tensors) { variant_tensors.emplace_back((variant_tensors_) ? new tflite::VariantSubTypeT(*variant_tensors_) : nullptr); } } -inline flatbuffers::Offset CreateMetadataDirect( - flatbuffers::FlatBufferBuilder &_fbb, - const char *name = nullptr, - uint32_t buffer = 0) { - auto name__ = name ? _fbb.CreateString(name) : 0; - return tflite::CreateMetadata( - _fbb, - name__, - buffer); +inline TensorT &TensorT::operator=(TensorT o) FLATBUFFERS_NOEXCEPT { + std::swap(shape, o.shape); + std::swap(type, o.type); + std::swap(buffer, o.buffer); + std::swap(name, o.name); + std::swap(quantization, o.quantization); + std::swap(is_variable, o.is_variable); + std::swap(sparsity, o.sparsity); + std::swap(shape_signature, o.shape_signature); + std::swap(has_rank, o.has_rank); + std::swap(variant_tensors, o.variant_tensors); + return *this; } -flatbuffers::Offset CreateMetadata(flatbuffers::FlatBufferBuilder &_fbb, const MetadataT *_o, const flatbuffers::rehasher_function_t *_rehasher = nullptr); +inline TensorT *Tensor::UnPack(const ::flatbuffers::resolver_function_t *_resolver) const { + auto _o = std::unique_ptr(new TensorT()); + UnPackTo(_o.get(), _resolver); + return _o.release(); +} -struct TensorMapT : public flatbuffers::NativeTable { - typedef TensorMap TableType; - std::string name{}; - uint32_t tensor_index = 0; -}; +inline void Tensor::UnPackTo(TensorT *_o, const ::flatbuffers::resolver_function_t *_resolver) const { + (void)_o; + (void)_resolver; + { auto _e = shape(); if (_e) { _o->shape.resize(_e->size()); for (::flatbuffers::uoffset_t _i = 0; _i < _e->size(); _i++) { _o->shape[_i] = _e->Get(_i); } } else { _o->shape.resize(0); } } + { auto _e = type(); _o->type = _e; } + { auto _e = buffer(); _o->buffer = _e; } + { auto _e = name(); if (_e) _o->name = _e->str(); } + { auto _e = quantization(); if (_e) { if(_o->quantization) { _e->UnPackTo(_o->quantization.get(), _resolver); } else { _o->quantization = std::unique_ptr(_e->UnPack(_resolver)); } } else if (_o->quantization) { _o->quantization.reset(); } } + { auto _e = is_variable(); _o->is_variable = _e; } + { auto _e = sparsity(); if (_e) { if(_o->sparsity) { _e->UnPackTo(_o->sparsity.get(), _resolver); } else { _o->sparsity = std::unique_ptr(_e->UnPack(_resolver)); } } else if (_o->sparsity) { _o->sparsity.reset(); } } + { auto _e = shape_signature(); if (_e) { _o->shape_signature.resize(_e->size()); for (::flatbuffers::uoffset_t _i = 0; _i < _e->size(); _i++) { _o->shape_signature[_i] = _e->Get(_i); } } else { _o->shape_signature.resize(0); } } + { auto _e = has_rank(); _o->has_rank = _e; } + { auto _e = variant_tensors(); if (_e) { _o->variant_tensors.resize(_e->size()); for (::flatbuffers::uoffset_t _i = 0; _i < _e->size(); _i++) { if(_o->variant_tensors[_i]) { _e->Get(_i)->UnPackTo(_o->variant_tensors[_i].get(), _resolver); } else { _o->variant_tensors[_i] = std::unique_ptr(_e->Get(_i)->UnPack(_resolver)); }; } } else { _o->variant_tensors.resize(0); } } +} -struct TensorMap FLATBUFFERS_FINAL_CLASS : private flatbuffers::Table { - typedef TensorMapT NativeTableType; - typedef TensorMapBuilder Builder; - enum FlatBuffersVTableOffset FLATBUFFERS_VTABLE_UNDERLYING_TYPE { - VT_NAME = 4, - VT_TENSOR_INDEX = 6 - }; - const flatbuffers::String *name() const { - return GetPointer(VT_NAME); - } - uint32_t tensor_index() const { - return GetField(VT_TENSOR_INDEX, 0); - } - bool Verify(flatbuffers::Verifier &verifier) const { - return VerifyTableStart(verifier) && - VerifyOffset(verifier, VT_NAME) && - verifier.VerifyString(name()) && - VerifyField(verifier, VT_TENSOR_INDEX, 4) && - verifier.EndTable(); - } - TensorMapT *UnPack(const flatbuffers::resolver_function_t *_resolver = nullptr) const; - void UnPackTo(TensorMapT *_o, const flatbuffers::resolver_function_t *_resolver = nullptr) const; - static flatbuffers::Offset Pack(flatbuffers::FlatBufferBuilder &_fbb, const TensorMapT* _o, const flatbuffers::rehasher_function_t *_rehasher = nullptr); -}; +inline ::flatbuffers::Offset Tensor::Pack(::flatbuffers::FlatBufferBuilder &_fbb, const TensorT* _o, const ::flatbuffers::rehasher_function_t *_rehasher) { + return CreateTensor(_fbb, _o, _rehasher); +} -struct TensorMapBuilder { - typedef TensorMap Table; - flatbuffers::FlatBufferBuilder &fbb_; - flatbuffers::uoffset_t start_; - void add_name(flatbuffers::Offset name) { - fbb_.AddOffset(TensorMap::VT_NAME, name); - } - void add_tensor_index(uint32_t tensor_index) { - fbb_.AddElement(TensorMap::VT_TENSOR_INDEX, tensor_index, 0); - } - explicit TensorMapBuilder(flatbuffers::FlatBufferBuilder &_fbb) - : fbb_(_fbb) { - start_ = fbb_.StartTable(); - } - flatbuffers::Offset Finish() { - const auto end = fbb_.EndTable(start_); - auto o = flatbuffers::Offset(end); - return o; - } -}; +inline ::flatbuffers::Offset CreateTensor(::flatbuffers::FlatBufferBuilder &_fbb, const TensorT *_o, const ::flatbuffers::rehasher_function_t *_rehasher) { + (void)_rehasher; + (void)_o; + struct _VectorArgs { ::flatbuffers::FlatBufferBuilder *__fbb; const TensorT* __o; const ::flatbuffers::rehasher_function_t *__rehasher; } _va = { &_fbb, _o, _rehasher}; (void)_va; + auto _shape = _o->shape.size() ? _fbb.CreateVector(_o->shape) : 0; + auto _type = _o->type; + auto _buffer = _o->buffer; + auto _name = _o->name.empty() ? 0 : _fbb.CreateString(_o->name); + auto _quantization = _o->quantization ? CreateQuantizationParameters(_fbb, _o->quantization.get(), _rehasher) : 0; + auto _is_variable = _o->is_variable; + auto _sparsity = _o->sparsity ? CreateSparsityParameters(_fbb, _o->sparsity.get(), _rehasher) : 0; + auto _shape_signature = _o->shape_signature.size() ? _fbb.CreateVector(_o->shape_signature) : 0; + auto _has_rank = _o->has_rank; + auto _variant_tensors = _o->variant_tensors.size() ? _fbb.CreateVector<::flatbuffers::Offset> (_o->variant_tensors.size(), [](size_t i, _VectorArgs *__va) { return CreateVariantSubType(*__va->__fbb, __va->__o->variant_tensors[i].get(), __va->__rehasher); }, &_va ) : 0; + return tflite::CreateTensor( + _fbb, + _shape, + _type, + _buffer, + _name, + _quantization, + _is_variable, + _sparsity, + _shape_signature, + _has_rank, + _variant_tensors); +} -inline flatbuffers::Offset CreateTensorMap( - flatbuffers::FlatBufferBuilder &_fbb, - flatbuffers::Offset name = 0, - uint32_t tensor_index = 0) { - TensorMapBuilder builder_(_fbb); - builder_.add_tensor_index(tensor_index); - builder_.add_name(name); - return builder_.Finish(); +inline StablehloGatherOptionsT *StablehloGatherOptions::UnPack(const ::flatbuffers::resolver_function_t *_resolver) const { + auto _o = std::unique_ptr(new StablehloGatherOptionsT()); + UnPackTo(_o.get(), _resolver); + return _o.release(); } -inline flatbuffers::Offset CreateTensorMapDirect( - flatbuffers::FlatBufferBuilder &_fbb, - const char *name = nullptr, - uint32_t tensor_index = 0) { - auto name__ = name ? _fbb.CreateString(name) : 0; - return tflite::CreateTensorMap( +inline void StablehloGatherOptions::UnPackTo(StablehloGatherOptionsT *_o, const ::flatbuffers::resolver_function_t *_resolver) const { + (void)_o; + (void)_resolver; + { auto _e = offset_dims(); if (_e) { _o->offset_dims.resize(_e->size()); for (::flatbuffers::uoffset_t _i = 0; _i < _e->size(); _i++) { _o->offset_dims[_i] = _e->Get(_i); } } else { _o->offset_dims.resize(0); } } + { auto _e = collapsed_slice_dims(); if (_e) { _o->collapsed_slice_dims.resize(_e->size()); for (::flatbuffers::uoffset_t _i = 0; _i < _e->size(); _i++) { _o->collapsed_slice_dims[_i] = _e->Get(_i); } } else { _o->collapsed_slice_dims.resize(0); } } + { auto _e = start_index_map(); if (_e) { _o->start_index_map.resize(_e->size()); for (::flatbuffers::uoffset_t _i = 0; _i < _e->size(); _i++) { _o->start_index_map[_i] = _e->Get(_i); } } else { _o->start_index_map.resize(0); } } + { auto _e = index_vector_dim(); _o->index_vector_dim = _e; } + { auto _e = slice_sizes(); if (_e) { _o->slice_sizes.resize(_e->size()); for (::flatbuffers::uoffset_t _i = 0; _i < _e->size(); _i++) { _o->slice_sizes[_i] = _e->Get(_i); } } else { _o->slice_sizes.resize(0); } } + { auto _e = indices_are_sorted(); _o->indices_are_sorted = _e; } +} + +inline ::flatbuffers::Offset StablehloGatherOptions::Pack(::flatbuffers::FlatBufferBuilder &_fbb, const StablehloGatherOptionsT* _o, const ::flatbuffers::rehasher_function_t *_rehasher) { + return CreateStablehloGatherOptions(_fbb, _o, _rehasher); +} + +inline ::flatbuffers::Offset CreateStablehloGatherOptions(::flatbuffers::FlatBufferBuilder &_fbb, const StablehloGatherOptionsT *_o, const ::flatbuffers::rehasher_function_t *_rehasher) { + (void)_rehasher; + (void)_o; + struct _VectorArgs { ::flatbuffers::FlatBufferBuilder *__fbb; const StablehloGatherOptionsT* __o; const ::flatbuffers::rehasher_function_t *__rehasher; } _va = { &_fbb, _o, _rehasher}; (void)_va; + auto _offset_dims = _o->offset_dims.size() ? _fbb.CreateVector(_o->offset_dims) : 0; + auto _collapsed_slice_dims = _o->collapsed_slice_dims.size() ? _fbb.CreateVector(_o->collapsed_slice_dims) : 0; + auto _start_index_map = _o->start_index_map.size() ? _fbb.CreateVector(_o->start_index_map) : 0; + auto _index_vector_dim = _o->index_vector_dim; + auto _slice_sizes = _o->slice_sizes.size() ? _fbb.CreateVector(_o->slice_sizes) : 0; + auto _indices_are_sorted = _o->indices_are_sorted; + return tflite::CreateStablehloGatherOptions( _fbb, - name__, - tensor_index); + _offset_dims, + _collapsed_slice_dims, + _start_index_map, + _index_vector_dim, + _slice_sizes, + _indices_are_sorted); } -flatbuffers::Offset CreateTensorMap(flatbuffers::FlatBufferBuilder &_fbb, const TensorMapT *_o, const flatbuffers::rehasher_function_t *_rehasher = nullptr); +inline StablehloTransposeOptionsT *StablehloTransposeOptions::UnPack(const ::flatbuffers::resolver_function_t *_resolver) const { + auto _o = std::unique_ptr(new StablehloTransposeOptionsT()); + UnPackTo(_o.get(), _resolver); + return _o.release(); +} -struct SignatureDefT : public flatbuffers::NativeTable { - typedef SignatureDef TableType; - std::vector> inputs{}; - std::vector> outputs{}; - std::string signature_key{}; - uint32_t subgraph_index = 0; - SignatureDefT() = default; - SignatureDefT(const SignatureDefT &o); - SignatureDefT(SignatureDefT&&) FLATBUFFERS_NOEXCEPT = default; - SignatureDefT &operator=(SignatureDefT o) FLATBUFFERS_NOEXCEPT; -}; +inline void StablehloTransposeOptions::UnPackTo(StablehloTransposeOptionsT *_o, const ::flatbuffers::resolver_function_t *_resolver) const { + (void)_o; + (void)_resolver; + { auto _e = permutation(); if (_e) { _o->permutation.resize(_e->size()); for (::flatbuffers::uoffset_t _i = 0; _i < _e->size(); _i++) { _o->permutation[_i] = _e->Get(_i); } } else { _o->permutation.resize(0); } } +} -struct SignatureDef FLATBUFFERS_FINAL_CLASS : private flatbuffers::Table { - typedef SignatureDefT NativeTableType; - typedef SignatureDefBuilder Builder; - enum FlatBuffersVTableOffset FLATBUFFERS_VTABLE_UNDERLYING_TYPE { - VT_INPUTS = 4, - VT_OUTPUTS = 6, - VT_SIGNATURE_KEY = 8, - VT_SUBGRAPH_INDEX = 12 - }; - const flatbuffers::Vector> *inputs() const { - return GetPointer> *>(VT_INPUTS); - } - const flatbuffers::Vector> *outputs() const { - return GetPointer> *>(VT_OUTPUTS); - } - const flatbuffers::String *signature_key() const { - return GetPointer(VT_SIGNATURE_KEY); - } - uint32_t subgraph_index() const { - return GetField(VT_SUBGRAPH_INDEX, 0); - } - bool Verify(flatbuffers::Verifier &verifier) const { - return VerifyTableStart(verifier) && - VerifyOffset(verifier, VT_INPUTS) && - verifier.VerifyVector(inputs()) && - verifier.VerifyVectorOfTables(inputs()) && - VerifyOffset(verifier, VT_OUTPUTS) && - verifier.VerifyVector(outputs()) && - verifier.VerifyVectorOfTables(outputs()) && - VerifyOffset(verifier, VT_SIGNATURE_KEY) && - verifier.VerifyString(signature_key()) && - VerifyField(verifier, VT_SUBGRAPH_INDEX, 4) && - verifier.EndTable(); - } - SignatureDefT *UnPack(const flatbuffers::resolver_function_t *_resolver = nullptr) const; - void UnPackTo(SignatureDefT *_o, const flatbuffers::resolver_function_t *_resolver = nullptr) const; - static flatbuffers::Offset Pack(flatbuffers::FlatBufferBuilder &_fbb, const SignatureDefT* _o, const flatbuffers::rehasher_function_t *_rehasher = nullptr); -}; +inline ::flatbuffers::Offset StablehloTransposeOptions::Pack(::flatbuffers::FlatBufferBuilder &_fbb, const StablehloTransposeOptionsT* _o, const ::flatbuffers::rehasher_function_t *_rehasher) { + return CreateStablehloTransposeOptions(_fbb, _o, _rehasher); +} -struct SignatureDefBuilder { - typedef SignatureDef Table; - flatbuffers::FlatBufferBuilder &fbb_; - flatbuffers::uoffset_t start_; - void add_inputs(flatbuffers::Offset>> inputs) { - fbb_.AddOffset(SignatureDef::VT_INPUTS, inputs); - } - void add_outputs(flatbuffers::Offset>> outputs) { - fbb_.AddOffset(SignatureDef::VT_OUTPUTS, outputs); - } - void add_signature_key(flatbuffers::Offset signature_key) { - fbb_.AddOffset(SignatureDef::VT_SIGNATURE_KEY, signature_key); - } - void add_subgraph_index(uint32_t subgraph_index) { - fbb_.AddElement(SignatureDef::VT_SUBGRAPH_INDEX, subgraph_index, 0); - } - explicit SignatureDefBuilder(flatbuffers::FlatBufferBuilder &_fbb) - : fbb_(_fbb) { - start_ = fbb_.StartTable(); - } - flatbuffers::Offset Finish() { - const auto end = fbb_.EndTable(start_); - auto o = flatbuffers::Offset(end); - return o; - } -}; +inline ::flatbuffers::Offset CreateStablehloTransposeOptions(::flatbuffers::FlatBufferBuilder &_fbb, const StablehloTransposeOptionsT *_o, const ::flatbuffers::rehasher_function_t *_rehasher) { + (void)_rehasher; + (void)_o; + struct _VectorArgs { ::flatbuffers::FlatBufferBuilder *__fbb; const StablehloTransposeOptionsT* __o; const ::flatbuffers::rehasher_function_t *__rehasher; } _va = { &_fbb, _o, _rehasher}; (void)_va; + auto _permutation = _o->permutation.size() ? _fbb.CreateVector(_o->permutation) : 0; + return tflite::CreateStablehloTransposeOptions( + _fbb, + _permutation); +} -inline flatbuffers::Offset CreateSignatureDef( - flatbuffers::FlatBufferBuilder &_fbb, - flatbuffers::Offset>> inputs = 0, - flatbuffers::Offset>> outputs = 0, - flatbuffers::Offset signature_key = 0, - uint32_t subgraph_index = 0) { - SignatureDefBuilder builder_(_fbb); - builder_.add_subgraph_index(subgraph_index); - builder_.add_signature_key(signature_key); - builder_.add_outputs(outputs); - builder_.add_inputs(inputs); - return builder_.Finish(); +inline StablehloDotGeneralOptionsT *StablehloDotGeneralOptions::UnPack(const ::flatbuffers::resolver_function_t *_resolver) const { + auto _o = std::unique_ptr(new StablehloDotGeneralOptionsT()); + UnPackTo(_o.get(), _resolver); + return _o.release(); } -inline flatbuffers::Offset CreateSignatureDefDirect( - flatbuffers::FlatBufferBuilder &_fbb, - const std::vector> *inputs = nullptr, - const std::vector> *outputs = nullptr, - const char *signature_key = nullptr, - uint32_t subgraph_index = 0) { - auto inputs__ = inputs ? _fbb.CreateVector>(*inputs) : 0; - auto outputs__ = outputs ? _fbb.CreateVector>(*outputs) : 0; - auto signature_key__ = signature_key ? _fbb.CreateString(signature_key) : 0; - return tflite::CreateSignatureDef( +inline void StablehloDotGeneralOptions::UnPackTo(StablehloDotGeneralOptionsT *_o, const ::flatbuffers::resolver_function_t *_resolver) const { + (void)_o; + (void)_resolver; + { auto _e = lhs_batching_dimensions(); if (_e) { _o->lhs_batching_dimensions.resize(_e->size()); for (::flatbuffers::uoffset_t _i = 0; _i < _e->size(); _i++) { _o->lhs_batching_dimensions[_i] = _e->Get(_i); } } else { _o->lhs_batching_dimensions.resize(0); } } + { auto _e = rhs_batching_dimensions(); if (_e) { _o->rhs_batching_dimensions.resize(_e->size()); for (::flatbuffers::uoffset_t _i = 0; _i < _e->size(); _i++) { _o->rhs_batching_dimensions[_i] = _e->Get(_i); } } else { _o->rhs_batching_dimensions.resize(0); } } + { auto _e = lhs_contracting_dimensions(); if (_e) { _o->lhs_contracting_dimensions.resize(_e->size()); for (::flatbuffers::uoffset_t _i = 0; _i < _e->size(); _i++) { _o->lhs_contracting_dimensions[_i] = _e->Get(_i); } } else { _o->lhs_contracting_dimensions.resize(0); } } + { auto _e = rhs_contracting_dimensions(); if (_e) { _o->rhs_contracting_dimensions.resize(_e->size()); for (::flatbuffers::uoffset_t _i = 0; _i < _e->size(); _i++) { _o->rhs_contracting_dimensions[_i] = _e->Get(_i); } } else { _o->rhs_contracting_dimensions.resize(0); } } + { auto _e = precision_config(); if (_e) { _o->precision_config.resize(_e->size()); for (::flatbuffers::uoffset_t _i = 0; _i < _e->size(); _i++) { _o->precision_config[_i] = static_cast(_e->Get(_i)); } } else { _o->precision_config.resize(0); } } +} + +inline ::flatbuffers::Offset StablehloDotGeneralOptions::Pack(::flatbuffers::FlatBufferBuilder &_fbb, const StablehloDotGeneralOptionsT* _o, const ::flatbuffers::rehasher_function_t *_rehasher) { + return CreateStablehloDotGeneralOptions(_fbb, _o, _rehasher); +} + +inline ::flatbuffers::Offset CreateStablehloDotGeneralOptions(::flatbuffers::FlatBufferBuilder &_fbb, const StablehloDotGeneralOptionsT *_o, const ::flatbuffers::rehasher_function_t *_rehasher) { + (void)_rehasher; + (void)_o; + struct _VectorArgs { ::flatbuffers::FlatBufferBuilder *__fbb; const StablehloDotGeneralOptionsT* __o; const ::flatbuffers::rehasher_function_t *__rehasher; } _va = { &_fbb, _o, _rehasher}; (void)_va; + auto _lhs_batching_dimensions = _o->lhs_batching_dimensions.size() ? _fbb.CreateVector(_o->lhs_batching_dimensions) : 0; + auto _rhs_batching_dimensions = _o->rhs_batching_dimensions.size() ? _fbb.CreateVector(_o->rhs_batching_dimensions) : 0; + auto _lhs_contracting_dimensions = _o->lhs_contracting_dimensions.size() ? _fbb.CreateVector(_o->lhs_contracting_dimensions) : 0; + auto _rhs_contracting_dimensions = _o->rhs_contracting_dimensions.size() ? _fbb.CreateVector(_o->rhs_contracting_dimensions) : 0; + auto _precision_config = _o->precision_config.size() ? _fbb.CreateVectorScalarCast(::flatbuffers::data(_o->precision_config), _o->precision_config.size()) : 0; + return tflite::CreateStablehloDotGeneralOptions( _fbb, - inputs__, - outputs__, - signature_key__, - subgraph_index); + _lhs_batching_dimensions, + _rhs_batching_dimensions, + _lhs_contracting_dimensions, + _rhs_contracting_dimensions, + _precision_config); } -flatbuffers::Offset CreateSignatureDef(flatbuffers::FlatBufferBuilder &_fbb, const SignatureDefT *_o, const flatbuffers::rehasher_function_t *_rehasher = nullptr); +inline StablehloReduceWindowOptionsT *StablehloReduceWindowOptions::UnPack(const ::flatbuffers::resolver_function_t *_resolver) const { + auto _o = std::unique_ptr(new StablehloReduceWindowOptionsT()); + UnPackTo(_o.get(), _resolver); + return _o.release(); +} -struct ModelT : public flatbuffers::NativeTable { - typedef Model TableType; - uint32_t version = 0; - std::vector> operator_codes{}; - std::vector> subgraphs{}; - std::string description{}; - std::vector> buffers{}; - std::vector metadata_buffer{}; - std::vector> metadata{}; - std::vector> signature_defs{}; - ModelT() = default; - ModelT(const ModelT &o); - ModelT(ModelT&&) FLATBUFFERS_NOEXCEPT = default; - ModelT &operator=(ModelT o) FLATBUFFERS_NOEXCEPT; -}; +inline void StablehloReduceWindowOptions::UnPackTo(StablehloReduceWindowOptionsT *_o, const ::flatbuffers::resolver_function_t *_resolver) const { + (void)_o; + (void)_resolver; + { auto _e = window_dimensions(); if (_e) { _o->window_dimensions.resize(_e->size()); for (::flatbuffers::uoffset_t _i = 0; _i < _e->size(); _i++) { _o->window_dimensions[_i] = _e->Get(_i); } } else { _o->window_dimensions.resize(0); } } + { auto _e = window_strides(); if (_e) { _o->window_strides.resize(_e->size()); for (::flatbuffers::uoffset_t _i = 0; _i < _e->size(); _i++) { _o->window_strides[_i] = _e->Get(_i); } } else { _o->window_strides.resize(0); } } + { auto _e = base_dilations(); if (_e) { _o->base_dilations.resize(_e->size()); for (::flatbuffers::uoffset_t _i = 0; _i < _e->size(); _i++) { _o->base_dilations[_i] = _e->Get(_i); } } else { _o->base_dilations.resize(0); } } + { auto _e = window_dilations(); if (_e) { _o->window_dilations.resize(_e->size()); for (::flatbuffers::uoffset_t _i = 0; _i < _e->size(); _i++) { _o->window_dilations[_i] = _e->Get(_i); } } else { _o->window_dilations.resize(0); } } + { auto _e = padding(); if (_e) { _o->padding.resize(_e->size()); for (::flatbuffers::uoffset_t _i = 0; _i < _e->size(); _i++) { _o->padding[_i] = _e->Get(_i); } } else { _o->padding.resize(0); } } + { auto _e = body_subgraph_index(); _o->body_subgraph_index = _e; } +} -struct Model FLATBUFFERS_FINAL_CLASS : private flatbuffers::Table { - typedef ModelT NativeTableType; - typedef ModelBuilder Builder; - enum FlatBuffersVTableOffset FLATBUFFERS_VTABLE_UNDERLYING_TYPE { - VT_VERSION = 4, - VT_OPERATOR_CODES = 6, - VT_SUBGRAPHS = 8, - VT_DESCRIPTION = 10, - VT_BUFFERS = 12, - VT_METADATA_BUFFER = 14, - VT_METADATA = 16, - VT_SIGNATURE_DEFS = 18 - }; - uint32_t version() const { - return GetField(VT_VERSION, 0); - } - const flatbuffers::Vector> *operator_codes() const { - return GetPointer> *>(VT_OPERATOR_CODES); - } - const flatbuffers::Vector> *subgraphs() const { - return GetPointer> *>(VT_SUBGRAPHS); - } - const flatbuffers::String *description() const { - return GetPointer(VT_DESCRIPTION); - } - const flatbuffers::Vector> *buffers() const { - return GetPointer> *>(VT_BUFFERS); - } - const flatbuffers::Vector *metadata_buffer() const { - return GetPointer *>(VT_METADATA_BUFFER); - } - const flatbuffers::Vector> *metadata() const { - return GetPointer> *>(VT_METADATA); - } - const flatbuffers::Vector> *signature_defs() const { - return GetPointer> *>(VT_SIGNATURE_DEFS); - } - bool Verify(flatbuffers::Verifier &verifier) const { - return VerifyTableStart(verifier) && - VerifyField(verifier, VT_VERSION, 4) && - VerifyOffset(verifier, VT_OPERATOR_CODES) && - verifier.VerifyVector(operator_codes()) && - verifier.VerifyVectorOfTables(operator_codes()) && - VerifyOffset(verifier, VT_SUBGRAPHS) && - verifier.VerifyVector(subgraphs()) && - verifier.VerifyVectorOfTables(subgraphs()) && - VerifyOffset(verifier, VT_DESCRIPTION) && - verifier.VerifyString(description()) && - VerifyOffset(verifier, VT_BUFFERS) && - verifier.VerifyVector(buffers()) && - verifier.VerifyVectorOfTables(buffers()) && - VerifyOffset(verifier, VT_METADATA_BUFFER) && - verifier.VerifyVector(metadata_buffer()) && - VerifyOffset(verifier, VT_METADATA) && - verifier.VerifyVector(metadata()) && - verifier.VerifyVectorOfTables(metadata()) && - VerifyOffset(verifier, VT_SIGNATURE_DEFS) && - verifier.VerifyVector(signature_defs()) && - verifier.VerifyVectorOfTables(signature_defs()) && - verifier.EndTable(); - } - ModelT *UnPack(const flatbuffers::resolver_function_t *_resolver = nullptr) const; - void UnPackTo(ModelT *_o, const flatbuffers::resolver_function_t *_resolver = nullptr) const; - static flatbuffers::Offset Pack(flatbuffers::FlatBufferBuilder &_fbb, const ModelT* _o, const flatbuffers::rehasher_function_t *_rehasher = nullptr); -}; +inline ::flatbuffers::Offset StablehloReduceWindowOptions::Pack(::flatbuffers::FlatBufferBuilder &_fbb, const StablehloReduceWindowOptionsT* _o, const ::flatbuffers::rehasher_function_t *_rehasher) { + return CreateStablehloReduceWindowOptions(_fbb, _o, _rehasher); +} -struct ModelBuilder { - typedef Model Table; - flatbuffers::FlatBufferBuilder &fbb_; - flatbuffers::uoffset_t start_; - void add_version(uint32_t version) { - fbb_.AddElement(Model::VT_VERSION, version, 0); - } - void add_operator_codes(flatbuffers::Offset>> operator_codes) { - fbb_.AddOffset(Model::VT_OPERATOR_CODES, operator_codes); - } - void add_subgraphs(flatbuffers::Offset>> subgraphs) { - fbb_.AddOffset(Model::VT_SUBGRAPHS, subgraphs); - } - void add_description(flatbuffers::Offset description) { - fbb_.AddOffset(Model::VT_DESCRIPTION, description); - } - void add_buffers(flatbuffers::Offset>> buffers) { - fbb_.AddOffset(Model::VT_BUFFERS, buffers); - } - void add_metadata_buffer(flatbuffers::Offset> metadata_buffer) { - fbb_.AddOffset(Model::VT_METADATA_BUFFER, metadata_buffer); - } - void add_metadata(flatbuffers::Offset>> metadata) { - fbb_.AddOffset(Model::VT_METADATA, metadata); - } - void add_signature_defs(flatbuffers::Offset>> signature_defs) { - fbb_.AddOffset(Model::VT_SIGNATURE_DEFS, signature_defs); - } - explicit ModelBuilder(flatbuffers::FlatBufferBuilder &_fbb) - : fbb_(_fbb) { - start_ = fbb_.StartTable(); - } - flatbuffers::Offset Finish() { - const auto end = fbb_.EndTable(start_); - auto o = flatbuffers::Offset(end); - return o; - } -}; +inline ::flatbuffers::Offset CreateStablehloReduceWindowOptions(::flatbuffers::FlatBufferBuilder &_fbb, const StablehloReduceWindowOptionsT *_o, const ::flatbuffers::rehasher_function_t *_rehasher) { + (void)_rehasher; + (void)_o; + struct _VectorArgs { ::flatbuffers::FlatBufferBuilder *__fbb; const StablehloReduceWindowOptionsT* __o; const ::flatbuffers::rehasher_function_t *__rehasher; } _va = { &_fbb, _o, _rehasher}; (void)_va; + auto _window_dimensions = _o->window_dimensions.size() ? _fbb.CreateVector(_o->window_dimensions) : 0; + auto _window_strides = _o->window_strides.size() ? _fbb.CreateVector(_o->window_strides) : 0; + auto _base_dilations = _o->base_dilations.size() ? _fbb.CreateVector(_o->base_dilations) : 0; + auto _window_dilations = _o->window_dilations.size() ? _fbb.CreateVector(_o->window_dilations) : 0; + auto _padding = _o->padding.size() ? _fbb.CreateVector(_o->padding) : 0; + auto _body_subgraph_index = _o->body_subgraph_index; + return tflite::CreateStablehloReduceWindowOptions( + _fbb, + _window_dimensions, + _window_strides, + _base_dilations, + _window_dilations, + _padding, + _body_subgraph_index); +} -inline flatbuffers::Offset CreateModel( - flatbuffers::FlatBufferBuilder &_fbb, - uint32_t version = 0, - flatbuffers::Offset>> operator_codes = 0, - flatbuffers::Offset>> subgraphs = 0, - flatbuffers::Offset description = 0, - flatbuffers::Offset>> buffers = 0, - flatbuffers::Offset> metadata_buffer = 0, - flatbuffers::Offset>> metadata = 0, - flatbuffers::Offset>> signature_defs = 0) { - ModelBuilder builder_(_fbb); - builder_.add_signature_defs(signature_defs); - builder_.add_metadata(metadata); - builder_.add_metadata_buffer(metadata_buffer); - builder_.add_buffers(buffers); - builder_.add_description(description); - builder_.add_subgraphs(subgraphs); - builder_.add_operator_codes(operator_codes); - builder_.add_version(version); - return builder_.Finish(); +inline StablehloWhileOptionsT *StablehloWhileOptions::UnPack(const ::flatbuffers::resolver_function_t *_resolver) const { + auto _o = std::unique_ptr(new StablehloWhileOptionsT()); + UnPackTo(_o.get(), _resolver); + return _o.release(); } -inline flatbuffers::Offset CreateModelDirect( - flatbuffers::FlatBufferBuilder &_fbb, - uint32_t version = 0, - const std::vector> *operator_codes = nullptr, - const std::vector> *subgraphs = nullptr, - const char *description = nullptr, - const std::vector> *buffers = nullptr, - const std::vector *metadata_buffer = nullptr, - const std::vector> *metadata = nullptr, - const std::vector> *signature_defs = nullptr) { - auto operator_codes__ = operator_codes ? _fbb.CreateVector>(*operator_codes) : 0; - auto subgraphs__ = subgraphs ? _fbb.CreateVector>(*subgraphs) : 0; - auto description__ = description ? _fbb.CreateString(description) : 0; - auto buffers__ = buffers ? _fbb.CreateVector>(*buffers) : 0; - auto metadata_buffer__ = metadata_buffer ? _fbb.CreateVector(*metadata_buffer) : 0; - auto metadata__ = metadata ? _fbb.CreateVector>(*metadata) : 0; - auto signature_defs__ = signature_defs ? _fbb.CreateVector>(*signature_defs) : 0; - return tflite::CreateModel( +inline void StablehloWhileOptions::UnPackTo(StablehloWhileOptionsT *_o, const ::flatbuffers::resolver_function_t *_resolver) const { + (void)_o; + (void)_resolver; + { auto _e = cond_subgraph_index(); _o->cond_subgraph_index = _e; } + { auto _e = body_subgraph_index(); _o->body_subgraph_index = _e; } +} + +inline ::flatbuffers::Offset StablehloWhileOptions::Pack(::flatbuffers::FlatBufferBuilder &_fbb, const StablehloWhileOptionsT* _o, const ::flatbuffers::rehasher_function_t *_rehasher) { + return CreateStablehloWhileOptions(_fbb, _o, _rehasher); +} + +inline ::flatbuffers::Offset CreateStablehloWhileOptions(::flatbuffers::FlatBufferBuilder &_fbb, const StablehloWhileOptionsT *_o, const ::flatbuffers::rehasher_function_t *_rehasher) { + (void)_rehasher; + (void)_o; + struct _VectorArgs { ::flatbuffers::FlatBufferBuilder *__fbb; const StablehloWhileOptionsT* __o; const ::flatbuffers::rehasher_function_t *__rehasher; } _va = { &_fbb, _o, _rehasher}; (void)_va; + auto _cond_subgraph_index = _o->cond_subgraph_index; + auto _body_subgraph_index = _o->body_subgraph_index; + return tflite::CreateStablehloWhileOptions( _fbb, - version, - operator_codes__, - subgraphs__, - description__, - buffers__, - metadata_buffer__, - metadata__, - signature_defs__); + _cond_subgraph_index, + _body_subgraph_index); } -flatbuffers::Offset CreateModel(flatbuffers::FlatBufferBuilder &_fbb, const ModelT *_o, const flatbuffers::rehasher_function_t *_rehasher = nullptr); +inline StablehloSortOptionsT *StablehloSortOptions::UnPack(const ::flatbuffers::resolver_function_t *_resolver) const { + auto _o = std::unique_ptr(new StablehloSortOptionsT()); + UnPackTo(_o.get(), _resolver); + return _o.release(); +} -inline CustomQuantizationT *CustomQuantization::UnPack(const flatbuffers::resolver_function_t *_resolver) const { - auto _o = std::unique_ptr(new CustomQuantizationT()); +inline void StablehloSortOptions::UnPackTo(StablehloSortOptionsT *_o, const ::flatbuffers::resolver_function_t *_resolver) const { + (void)_o; + (void)_resolver; + { auto _e = dimension(); _o->dimension = _e; } + { auto _e = is_stable(); _o->is_stable = _e; } + { auto _e = comparator_subgraph_index(); _o->comparator_subgraph_index = _e; } +} + +inline ::flatbuffers::Offset StablehloSortOptions::Pack(::flatbuffers::FlatBufferBuilder &_fbb, const StablehloSortOptionsT* _o, const ::flatbuffers::rehasher_function_t *_rehasher) { + return CreateStablehloSortOptions(_fbb, _o, _rehasher); +} + +inline ::flatbuffers::Offset CreateStablehloSortOptions(::flatbuffers::FlatBufferBuilder &_fbb, const StablehloSortOptionsT *_o, const ::flatbuffers::rehasher_function_t *_rehasher) { + (void)_rehasher; + (void)_o; + struct _VectorArgs { ::flatbuffers::FlatBufferBuilder *__fbb; const StablehloSortOptionsT* __o; const ::flatbuffers::rehasher_function_t *__rehasher; } _va = { &_fbb, _o, _rehasher}; (void)_va; + auto _dimension = _o->dimension; + auto _is_stable = _o->is_stable; + auto _comparator_subgraph_index = _o->comparator_subgraph_index; + return tflite::CreateStablehloSortOptions( + _fbb, + _dimension, + _is_stable, + _comparator_subgraph_index); +} + +inline StablehloConcatenateOptionsT *StablehloConcatenateOptions::UnPack(const ::flatbuffers::resolver_function_t *_resolver) const { + auto _o = std::unique_ptr(new StablehloConcatenateOptionsT()); UnPackTo(_o.get(), _resolver); return _o.release(); } -inline void CustomQuantization::UnPackTo(CustomQuantizationT *_o, const flatbuffers::resolver_function_t *_resolver) const { +inline void StablehloConcatenateOptions::UnPackTo(StablehloConcatenateOptionsT *_o, const ::flatbuffers::resolver_function_t *_resolver) const { (void)_o; (void)_resolver; - { auto _e = custom(); if (_e) { _o->custom.resize(_e->size()); std::copy(_e->begin(), _e->end(), _o->custom.begin()); } } + { auto _e = dimension(); _o->dimension = _e; } } -inline flatbuffers::Offset CustomQuantization::Pack(flatbuffers::FlatBufferBuilder &_fbb, const CustomQuantizationT* _o, const flatbuffers::rehasher_function_t *_rehasher) { - return CreateCustomQuantization(_fbb, _o, _rehasher); +inline ::flatbuffers::Offset StablehloConcatenateOptions::Pack(::flatbuffers::FlatBufferBuilder &_fbb, const StablehloConcatenateOptionsT* _o, const ::flatbuffers::rehasher_function_t *_rehasher) { + return CreateStablehloConcatenateOptions(_fbb, _o, _rehasher); } -inline flatbuffers::Offset CreateCustomQuantization(flatbuffers::FlatBufferBuilder &_fbb, const CustomQuantizationT *_o, const flatbuffers::rehasher_function_t *_rehasher) { +inline ::flatbuffers::Offset CreateStablehloConcatenateOptions(::flatbuffers::FlatBufferBuilder &_fbb, const StablehloConcatenateOptionsT *_o, const ::flatbuffers::rehasher_function_t *_rehasher) { (void)_rehasher; (void)_o; - struct _VectorArgs { flatbuffers::FlatBufferBuilder *__fbb; const CustomQuantizationT* __o; const flatbuffers::rehasher_function_t *__rehasher; } _va = { &_fbb, _o, _rehasher}; (void)_va; - _fbb.ForceVectorAlignment(_o->custom.size(), sizeof(uint8_t), 16); - auto _custom = _o->custom.size() ? _fbb.CreateVector(_o->custom) : 0; - return tflite::CreateCustomQuantization( + struct _VectorArgs { ::flatbuffers::FlatBufferBuilder *__fbb; const StablehloConcatenateOptionsT* __o; const ::flatbuffers::rehasher_function_t *__rehasher; } _va = { &_fbb, _o, _rehasher}; (void)_va; + auto _dimension = _o->dimension; + return tflite::CreateStablehloConcatenateOptions( _fbb, - _custom); + _dimension); } -inline QuantizationParametersT *QuantizationParameters::UnPack(const flatbuffers::resolver_function_t *_resolver) const { - auto _o = std::unique_ptr(new QuantizationParametersT()); +inline StablehloBroadcastInDimOptionsT *StablehloBroadcastInDimOptions::UnPack(const ::flatbuffers::resolver_function_t *_resolver) const { + auto _o = std::unique_ptr(new StablehloBroadcastInDimOptionsT()); UnPackTo(_o.get(), _resolver); return _o.release(); } -inline void QuantizationParameters::UnPackTo(QuantizationParametersT *_o, const flatbuffers::resolver_function_t *_resolver) const { +inline void StablehloBroadcastInDimOptions::UnPackTo(StablehloBroadcastInDimOptionsT *_o, const ::flatbuffers::resolver_function_t *_resolver) const { (void)_o; (void)_resolver; - { auto _e = min(); if (_e) { _o->min.resize(_e->size()); for (flatbuffers::uoffset_t _i = 0; _i < _e->size(); _i++) { _o->min[_i] = _e->Get(_i); } } } - { auto _e = max(); if (_e) { _o->max.resize(_e->size()); for (flatbuffers::uoffset_t _i = 0; _i < _e->size(); _i++) { _o->max[_i] = _e->Get(_i); } } } - { auto _e = scale(); if (_e) { _o->scale.resize(_e->size()); for (flatbuffers::uoffset_t _i = 0; _i < _e->size(); _i++) { _o->scale[_i] = _e->Get(_i); } } } - { auto _e = zero_point(); if (_e) { _o->zero_point.resize(_e->size()); for (flatbuffers::uoffset_t _i = 0; _i < _e->size(); _i++) { _o->zero_point[_i] = _e->Get(_i); } } } - { auto _e = details_type(); _o->details.type = _e; } - { auto _e = details(); if (_e) _o->details.value = tflite::QuantizationDetailsUnion::UnPack(_e, details_type(), _resolver); } - { auto _e = quantized_dimension(); _o->quantized_dimension = _e; } + { auto _e = broadcast_dimensions(); if (_e) { _o->broadcast_dimensions.resize(_e->size()); for (::flatbuffers::uoffset_t _i = 0; _i < _e->size(); _i++) { _o->broadcast_dimensions[_i] = _e->Get(_i); } } else { _o->broadcast_dimensions.resize(0); } } } -inline flatbuffers::Offset QuantizationParameters::Pack(flatbuffers::FlatBufferBuilder &_fbb, const QuantizationParametersT* _o, const flatbuffers::rehasher_function_t *_rehasher) { - return CreateQuantizationParameters(_fbb, _o, _rehasher); +inline ::flatbuffers::Offset StablehloBroadcastInDimOptions::Pack(::flatbuffers::FlatBufferBuilder &_fbb, const StablehloBroadcastInDimOptionsT* _o, const ::flatbuffers::rehasher_function_t *_rehasher) { + return CreateStablehloBroadcastInDimOptions(_fbb, _o, _rehasher); } -inline flatbuffers::Offset CreateQuantizationParameters(flatbuffers::FlatBufferBuilder &_fbb, const QuantizationParametersT *_o, const flatbuffers::rehasher_function_t *_rehasher) { +inline ::flatbuffers::Offset CreateStablehloBroadcastInDimOptions(::flatbuffers::FlatBufferBuilder &_fbb, const StablehloBroadcastInDimOptionsT *_o, const ::flatbuffers::rehasher_function_t *_rehasher) { (void)_rehasher; (void)_o; - struct _VectorArgs { flatbuffers::FlatBufferBuilder *__fbb; const QuantizationParametersT* __o; const flatbuffers::rehasher_function_t *__rehasher; } _va = { &_fbb, _o, _rehasher}; (void)_va; - auto _min = _o->min.size() ? _fbb.CreateVector(_o->min) : 0; - auto _max = _o->max.size() ? _fbb.CreateVector(_o->max) : 0; - auto _scale = _o->scale.size() ? _fbb.CreateVector(_o->scale) : 0; - auto _zero_point = _o->zero_point.size() ? _fbb.CreateVector(_o->zero_point) : 0; - auto _details_type = _o->details.type; - auto _details = _o->details.Pack(_fbb); - auto _quantized_dimension = _o->quantized_dimension; - return tflite::CreateQuantizationParameters( + struct _VectorArgs { ::flatbuffers::FlatBufferBuilder *__fbb; const StablehloBroadcastInDimOptionsT* __o; const ::flatbuffers::rehasher_function_t *__rehasher; } _va = { &_fbb, _o, _rehasher}; (void)_va; + auto _broadcast_dimensions = _o->broadcast_dimensions.size() ? _fbb.CreateVector(_o->broadcast_dimensions) : 0; + return tflite::CreateStablehloBroadcastInDimOptions( _fbb, - _min, - _max, - _scale, - _zero_point, - _details_type, - _details, - _quantized_dimension); + _broadcast_dimensions); } -inline Int32VectorT *Int32Vector::UnPack(const flatbuffers::resolver_function_t *_resolver) const { - auto _o = std::unique_ptr(new Int32VectorT()); +inline StablehloCompareOptionsT *StablehloCompareOptions::UnPack(const ::flatbuffers::resolver_function_t *_resolver) const { + auto _o = std::unique_ptr(new StablehloCompareOptionsT()); UnPackTo(_o.get(), _resolver); return _o.release(); } -inline void Int32Vector::UnPackTo(Int32VectorT *_o, const flatbuffers::resolver_function_t *_resolver) const { +inline void StablehloCompareOptions::UnPackTo(StablehloCompareOptionsT *_o, const ::flatbuffers::resolver_function_t *_resolver) const { (void)_o; (void)_resolver; - { auto _e = values(); if (_e) { _o->values.resize(_e->size()); for (flatbuffers::uoffset_t _i = 0; _i < _e->size(); _i++) { _o->values[_i] = _e->Get(_i); } } } + { auto _e = comparison_direction(); _o->comparison_direction = _e; } + { auto _e = compare_type(); _o->compare_type = _e; } } -inline flatbuffers::Offset Int32Vector::Pack(flatbuffers::FlatBufferBuilder &_fbb, const Int32VectorT* _o, const flatbuffers::rehasher_function_t *_rehasher) { - return CreateInt32Vector(_fbb, _o, _rehasher); +inline ::flatbuffers::Offset StablehloCompareOptions::Pack(::flatbuffers::FlatBufferBuilder &_fbb, const StablehloCompareOptionsT* _o, const ::flatbuffers::rehasher_function_t *_rehasher) { + return CreateStablehloCompareOptions(_fbb, _o, _rehasher); } -inline flatbuffers::Offset CreateInt32Vector(flatbuffers::FlatBufferBuilder &_fbb, const Int32VectorT *_o, const flatbuffers::rehasher_function_t *_rehasher) { +inline ::flatbuffers::Offset CreateStablehloCompareOptions(::flatbuffers::FlatBufferBuilder &_fbb, const StablehloCompareOptionsT *_o, const ::flatbuffers::rehasher_function_t *_rehasher) { (void)_rehasher; (void)_o; - struct _VectorArgs { flatbuffers::FlatBufferBuilder *__fbb; const Int32VectorT* __o; const flatbuffers::rehasher_function_t *__rehasher; } _va = { &_fbb, _o, _rehasher}; (void)_va; - auto _values = _o->values.size() ? _fbb.CreateVector(_o->values) : 0; - return tflite::CreateInt32Vector( + struct _VectorArgs { ::flatbuffers::FlatBufferBuilder *__fbb; const StablehloCompareOptionsT* __o; const ::flatbuffers::rehasher_function_t *__rehasher; } _va = { &_fbb, _o, _rehasher}; (void)_va; + auto _comparison_direction = _o->comparison_direction; + auto _compare_type = _o->compare_type; + return tflite::CreateStablehloCompareOptions( _fbb, - _values); + _comparison_direction, + _compare_type); } -inline Uint16VectorT *Uint16Vector::UnPack(const flatbuffers::resolver_function_t *_resolver) const { - auto _o = std::unique_ptr(new Uint16VectorT()); +inline StablehloDynamicSliceOptionsT *StablehloDynamicSliceOptions::UnPack(const ::flatbuffers::resolver_function_t *_resolver) const { + auto _o = std::unique_ptr(new StablehloDynamicSliceOptionsT()); UnPackTo(_o.get(), _resolver); return _o.release(); } -inline void Uint16Vector::UnPackTo(Uint16VectorT *_o, const flatbuffers::resolver_function_t *_resolver) const { +inline void StablehloDynamicSliceOptions::UnPackTo(StablehloDynamicSliceOptionsT *_o, const ::flatbuffers::resolver_function_t *_resolver) const { (void)_o; (void)_resolver; - { auto _e = values(); if (_e) { _o->values.resize(_e->size()); for (flatbuffers::uoffset_t _i = 0; _i < _e->size(); _i++) { _o->values[_i] = _e->Get(_i); } } } + { auto _e = slice_sizes(); if (_e) { _o->slice_sizes.resize(_e->size()); for (::flatbuffers::uoffset_t _i = 0; _i < _e->size(); _i++) { _o->slice_sizes[_i] = _e->Get(_i); } } else { _o->slice_sizes.resize(0); } } } -inline flatbuffers::Offset Uint16Vector::Pack(flatbuffers::FlatBufferBuilder &_fbb, const Uint16VectorT* _o, const flatbuffers::rehasher_function_t *_rehasher) { - return CreateUint16Vector(_fbb, _o, _rehasher); +inline ::flatbuffers::Offset StablehloDynamicSliceOptions::Pack(::flatbuffers::FlatBufferBuilder &_fbb, const StablehloDynamicSliceOptionsT* _o, const ::flatbuffers::rehasher_function_t *_rehasher) { + return CreateStablehloDynamicSliceOptions(_fbb, _o, _rehasher); } -inline flatbuffers::Offset CreateUint16Vector(flatbuffers::FlatBufferBuilder &_fbb, const Uint16VectorT *_o, const flatbuffers::rehasher_function_t *_rehasher) { +inline ::flatbuffers::Offset CreateStablehloDynamicSliceOptions(::flatbuffers::FlatBufferBuilder &_fbb, const StablehloDynamicSliceOptionsT *_o, const ::flatbuffers::rehasher_function_t *_rehasher) { (void)_rehasher; (void)_o; - struct _VectorArgs { flatbuffers::FlatBufferBuilder *__fbb; const Uint16VectorT* __o; const flatbuffers::rehasher_function_t *__rehasher; } _va = { &_fbb, _o, _rehasher}; (void)_va; - _fbb.ForceVectorAlignment(_o->values.size(), sizeof(uint16_t), 4); - auto _values = _o->values.size() ? _fbb.CreateVector(_o->values) : 0; - return tflite::CreateUint16Vector( + struct _VectorArgs { ::flatbuffers::FlatBufferBuilder *__fbb; const StablehloDynamicSliceOptionsT* __o; const ::flatbuffers::rehasher_function_t *__rehasher; } _va = { &_fbb, _o, _rehasher}; (void)_va; + auto _slice_sizes = _o->slice_sizes.size() ? _fbb.CreateVector(_o->slice_sizes) : 0; + return tflite::CreateStablehloDynamicSliceOptions( _fbb, - _values); + _slice_sizes); } -inline Uint8VectorT *Uint8Vector::UnPack(const flatbuffers::resolver_function_t *_resolver) const { - auto _o = std::unique_ptr(new Uint8VectorT()); +inline StablehloPadOptionsT *StablehloPadOptions::UnPack(const ::flatbuffers::resolver_function_t *_resolver) const { + auto _o = std::unique_ptr(new StablehloPadOptionsT()); UnPackTo(_o.get(), _resolver); return _o.release(); } -inline void Uint8Vector::UnPackTo(Uint8VectorT *_o, const flatbuffers::resolver_function_t *_resolver) const { +inline void StablehloPadOptions::UnPackTo(StablehloPadOptionsT *_o, const ::flatbuffers::resolver_function_t *_resolver) const { (void)_o; (void)_resolver; - { auto _e = values(); if (_e) { _o->values.resize(_e->size()); std::copy(_e->begin(), _e->end(), _o->values.begin()); } } + { auto _e = edge_padding_low(); if (_e) { _o->edge_padding_low.resize(_e->size()); for (::flatbuffers::uoffset_t _i = 0; _i < _e->size(); _i++) { _o->edge_padding_low[_i] = _e->Get(_i); } } else { _o->edge_padding_low.resize(0); } } + { auto _e = edge_padding_high(); if (_e) { _o->edge_padding_high.resize(_e->size()); for (::flatbuffers::uoffset_t _i = 0; _i < _e->size(); _i++) { _o->edge_padding_high[_i] = _e->Get(_i); } } else { _o->edge_padding_high.resize(0); } } + { auto _e = interior_padding(); if (_e) { _o->interior_padding.resize(_e->size()); for (::flatbuffers::uoffset_t _i = 0; _i < _e->size(); _i++) { _o->interior_padding[_i] = _e->Get(_i); } } else { _o->interior_padding.resize(0); } } } -inline flatbuffers::Offset Uint8Vector::Pack(flatbuffers::FlatBufferBuilder &_fbb, const Uint8VectorT* _o, const flatbuffers::rehasher_function_t *_rehasher) { - return CreateUint8Vector(_fbb, _o, _rehasher); +inline ::flatbuffers::Offset StablehloPadOptions::Pack(::flatbuffers::FlatBufferBuilder &_fbb, const StablehloPadOptionsT* _o, const ::flatbuffers::rehasher_function_t *_rehasher) { + return CreateStablehloPadOptions(_fbb, _o, _rehasher); } -inline flatbuffers::Offset CreateUint8Vector(flatbuffers::FlatBufferBuilder &_fbb, const Uint8VectorT *_o, const flatbuffers::rehasher_function_t *_rehasher) { +inline ::flatbuffers::Offset CreateStablehloPadOptions(::flatbuffers::FlatBufferBuilder &_fbb, const StablehloPadOptionsT *_o, const ::flatbuffers::rehasher_function_t *_rehasher) { (void)_rehasher; (void)_o; - struct _VectorArgs { flatbuffers::FlatBufferBuilder *__fbb; const Uint8VectorT* __o; const flatbuffers::rehasher_function_t *__rehasher; } _va = { &_fbb, _o, _rehasher}; (void)_va; - _fbb.ForceVectorAlignment(_o->values.size(), sizeof(uint8_t), 4); - auto _values = _o->values.size() ? _fbb.CreateVector(_o->values) : 0; - return tflite::CreateUint8Vector( + struct _VectorArgs { ::flatbuffers::FlatBufferBuilder *__fbb; const StablehloPadOptionsT* __o; const ::flatbuffers::rehasher_function_t *__rehasher; } _va = { &_fbb, _o, _rehasher}; (void)_va; + auto _edge_padding_low = _o->edge_padding_low.size() ? _fbb.CreateVector(_o->edge_padding_low) : 0; + auto _edge_padding_high = _o->edge_padding_high.size() ? _fbb.CreateVector(_o->edge_padding_high) : 0; + auto _interior_padding = _o->interior_padding.size() ? _fbb.CreateVector(_o->interior_padding) : 0; + return tflite::CreateStablehloPadOptions( _fbb, - _values); + _edge_padding_low, + _edge_padding_high, + _interior_padding); } -inline DimensionMetadataT *DimensionMetadata::UnPack(const flatbuffers::resolver_function_t *_resolver) const { - auto _o = std::unique_ptr(new DimensionMetadataT()); +inline StablehloIotaOptionsT *StablehloIotaOptions::UnPack(const ::flatbuffers::resolver_function_t *_resolver) const { + auto _o = std::unique_ptr(new StablehloIotaOptionsT()); UnPackTo(_o.get(), _resolver); return _o.release(); } -inline void DimensionMetadata::UnPackTo(DimensionMetadataT *_o, const flatbuffers::resolver_function_t *_resolver) const { +inline void StablehloIotaOptions::UnPackTo(StablehloIotaOptionsT *_o, const ::flatbuffers::resolver_function_t *_resolver) const { (void)_o; (void)_resolver; - { auto _e = format(); _o->format = _e; } - { auto _e = dense_size(); _o->dense_size = _e; } - { auto _e = array_segments_type(); _o->array_segments.type = _e; } - { auto _e = array_segments(); if (_e) _o->array_segments.value = tflite::SparseIndexVectorUnion::UnPack(_e, array_segments_type(), _resolver); } - { auto _e = array_indices_type(); _o->array_indices.type = _e; } - { auto _e = array_indices(); if (_e) _o->array_indices.value = tflite::SparseIndexVectorUnion::UnPack(_e, array_indices_type(), _resolver); } + { auto _e = iota_dimension(); _o->iota_dimension = _e; } } -inline flatbuffers::Offset DimensionMetadata::Pack(flatbuffers::FlatBufferBuilder &_fbb, const DimensionMetadataT* _o, const flatbuffers::rehasher_function_t *_rehasher) { - return CreateDimensionMetadata(_fbb, _o, _rehasher); +inline ::flatbuffers::Offset StablehloIotaOptions::Pack(::flatbuffers::FlatBufferBuilder &_fbb, const StablehloIotaOptionsT* _o, const ::flatbuffers::rehasher_function_t *_rehasher) { + return CreateStablehloIotaOptions(_fbb, _o, _rehasher); } -inline flatbuffers::Offset CreateDimensionMetadata(flatbuffers::FlatBufferBuilder &_fbb, const DimensionMetadataT *_o, const flatbuffers::rehasher_function_t *_rehasher) { +inline ::flatbuffers::Offset CreateStablehloIotaOptions(::flatbuffers::FlatBufferBuilder &_fbb, const StablehloIotaOptionsT *_o, const ::flatbuffers::rehasher_function_t *_rehasher) { (void)_rehasher; (void)_o; - struct _VectorArgs { flatbuffers::FlatBufferBuilder *__fbb; const DimensionMetadataT* __o; const flatbuffers::rehasher_function_t *__rehasher; } _va = { &_fbb, _o, _rehasher}; (void)_va; - auto _format = _o->format; - auto _dense_size = _o->dense_size; - auto _array_segments_type = _o->array_segments.type; - auto _array_segments = _o->array_segments.Pack(_fbb); - auto _array_indices_type = _o->array_indices.type; - auto _array_indices = _o->array_indices.Pack(_fbb); - return tflite::CreateDimensionMetadata( + struct _VectorArgs { ::flatbuffers::FlatBufferBuilder *__fbb; const StablehloIotaOptionsT* __o; const ::flatbuffers::rehasher_function_t *__rehasher; } _va = { &_fbb, _o, _rehasher}; (void)_va; + auto _iota_dimension = _o->iota_dimension; + return tflite::CreateStablehloIotaOptions( _fbb, - _format, - _dense_size, - _array_segments_type, - _array_segments, - _array_indices_type, - _array_indices); + _iota_dimension); } -inline SparsityParametersT::SparsityParametersT(const SparsityParametersT &o) - : traversal_order(o.traversal_order), - block_map(o.block_map) { - dim_metadata.reserve(o.dim_metadata.size()); - for (const auto &dim_metadata_ : o.dim_metadata) { dim_metadata.emplace_back((dim_metadata_) ? new tflite::DimensionMetadataT(*dim_metadata_) : nullptr); } +inline StablehloCustomCallOptionsT *StablehloCustomCallOptions::UnPack(const ::flatbuffers::resolver_function_t *_resolver) const { + auto _o = std::unique_ptr(new StablehloCustomCallOptionsT()); + UnPackTo(_o.get(), _resolver); + return _o.release(); } -inline SparsityParametersT &SparsityParametersT::operator=(SparsityParametersT o) FLATBUFFERS_NOEXCEPT { - std::swap(traversal_order, o.traversal_order); - std::swap(block_map, o.block_map); - std::swap(dim_metadata, o.dim_metadata); - return *this; +inline void StablehloCustomCallOptions::UnPackTo(StablehloCustomCallOptionsT *_o, const ::flatbuffers::resolver_function_t *_resolver) const { + (void)_o; + (void)_resolver; + { auto _e = call_target_name(); if (_e) _o->call_target_name = _e->str(); } + { auto _e = has_side_effect(); _o->has_side_effect = _e; } + { auto _e = backend_config(); if (_e) _o->backend_config = _e->str(); } + { auto _e = api_version(); _o->api_version = _e; } + { auto _e = called_computations(); if (_e) { _o->called_computations.resize(_e->size()); for (::flatbuffers::uoffset_t _i = 0; _i < _e->size(); _i++) { _o->called_computations[_i] = _e->Get(_i); } } else { _o->called_computations.resize(0); } } + { auto _e = custom_attributes(); if (_e) { _o->custom_attributes.resize(_e->size()); std::copy(_e->begin(), _e->end(), _o->custom_attributes.begin()); } } } -inline SparsityParametersT *SparsityParameters::UnPack(const flatbuffers::resolver_function_t *_resolver) const { - auto _o = std::unique_ptr(new SparsityParametersT()); +inline ::flatbuffers::Offset StablehloCustomCallOptions::Pack(::flatbuffers::FlatBufferBuilder &_fbb, const StablehloCustomCallOptionsT* _o, const ::flatbuffers::rehasher_function_t *_rehasher) { + return CreateStablehloCustomCallOptions(_fbb, _o, _rehasher); +} + +inline ::flatbuffers::Offset CreateStablehloCustomCallOptions(::flatbuffers::FlatBufferBuilder &_fbb, const StablehloCustomCallOptionsT *_o, const ::flatbuffers::rehasher_function_t *_rehasher) { + (void)_rehasher; + (void)_o; + struct _VectorArgs { ::flatbuffers::FlatBufferBuilder *__fbb; const StablehloCustomCallOptionsT* __o; const ::flatbuffers::rehasher_function_t *__rehasher; } _va = { &_fbb, _o, _rehasher}; (void)_va; + auto _call_target_name = _o->call_target_name.empty() ? 0 : _fbb.CreateString(_o->call_target_name); + auto _has_side_effect = _o->has_side_effect; + auto _backend_config = _o->backend_config.empty() ? 0 : _fbb.CreateString(_o->backend_config); + auto _api_version = _o->api_version; + auto _called_computations = _o->called_computations.size() ? _fbb.CreateVector(_o->called_computations) : 0; + auto _custom_attributes = _o->custom_attributes.size() ? _fbb.CreateVector(_o->custom_attributes) : 0; + return tflite::CreateStablehloCustomCallOptions( + _fbb, + _call_target_name, + _has_side_effect, + _backend_config, + _api_version, + _called_computations, + _custom_attributes); +} + +inline StablehloReduceOptionsT *StablehloReduceOptions::UnPack(const ::flatbuffers::resolver_function_t *_resolver) const { + auto _o = std::unique_ptr(new StablehloReduceOptionsT()); UnPackTo(_o.get(), _resolver); return _o.release(); } -inline void SparsityParameters::UnPackTo(SparsityParametersT *_o, const flatbuffers::resolver_function_t *_resolver) const { +inline void StablehloReduceOptions::UnPackTo(StablehloReduceOptionsT *_o, const ::flatbuffers::resolver_function_t *_resolver) const { (void)_o; (void)_resolver; - { auto _e = traversal_order(); if (_e) { _o->traversal_order.resize(_e->size()); for (flatbuffers::uoffset_t _i = 0; _i < _e->size(); _i++) { _o->traversal_order[_i] = _e->Get(_i); } } } - { auto _e = block_map(); if (_e) { _o->block_map.resize(_e->size()); for (flatbuffers::uoffset_t _i = 0; _i < _e->size(); _i++) { _o->block_map[_i] = _e->Get(_i); } } } - { auto _e = dim_metadata(); if (_e) { _o->dim_metadata.resize(_e->size()); for (flatbuffers::uoffset_t _i = 0; _i < _e->size(); _i++) { if(_o->dim_metadata[_i]) { _e->Get(_i)->UnPackTo(_o->dim_metadata[_i].get(), _resolver); } else { _o->dim_metadata[_i] = std::unique_ptr(_e->Get(_i)->UnPack(_resolver)); }; } } } + { auto _e = dimensions(); if (_e) { _o->dimensions.resize(_e->size()); for (::flatbuffers::uoffset_t _i = 0; _i < _e->size(); _i++) { _o->dimensions[_i] = _e->Get(_i); } } else { _o->dimensions.resize(0); } } + { auto _e = body_subgraph_index(); _o->body_subgraph_index = _e; } } -inline flatbuffers::Offset SparsityParameters::Pack(flatbuffers::FlatBufferBuilder &_fbb, const SparsityParametersT* _o, const flatbuffers::rehasher_function_t *_rehasher) { - return CreateSparsityParameters(_fbb, _o, _rehasher); +inline ::flatbuffers::Offset StablehloReduceOptions::Pack(::flatbuffers::FlatBufferBuilder &_fbb, const StablehloReduceOptionsT* _o, const ::flatbuffers::rehasher_function_t *_rehasher) { + return CreateStablehloReduceOptions(_fbb, _o, _rehasher); } -inline flatbuffers::Offset CreateSparsityParameters(flatbuffers::FlatBufferBuilder &_fbb, const SparsityParametersT *_o, const flatbuffers::rehasher_function_t *_rehasher) { +inline ::flatbuffers::Offset CreateStablehloReduceOptions(::flatbuffers::FlatBufferBuilder &_fbb, const StablehloReduceOptionsT *_o, const ::flatbuffers::rehasher_function_t *_rehasher) { (void)_rehasher; (void)_o; - struct _VectorArgs { flatbuffers::FlatBufferBuilder *__fbb; const SparsityParametersT* __o; const flatbuffers::rehasher_function_t *__rehasher; } _va = { &_fbb, _o, _rehasher}; (void)_va; - auto _traversal_order = _o->traversal_order.size() ? _fbb.CreateVector(_o->traversal_order) : 0; - auto _block_map = _o->block_map.size() ? _fbb.CreateVector(_o->block_map) : 0; - auto _dim_metadata = _o->dim_metadata.size() ? _fbb.CreateVector> (_o->dim_metadata.size(), [](size_t i, _VectorArgs *__va) { return CreateDimensionMetadata(*__va->__fbb, __va->__o->dim_metadata[i].get(), __va->__rehasher); }, &_va ) : 0; - return tflite::CreateSparsityParameters( + struct _VectorArgs { ::flatbuffers::FlatBufferBuilder *__fbb; const StablehloReduceOptionsT* __o; const ::flatbuffers::rehasher_function_t *__rehasher; } _va = { &_fbb, _o, _rehasher}; (void)_va; + auto _dimensions = _o->dimensions.size() ? _fbb.CreateVector(_o->dimensions) : 0; + auto _body_subgraph_index = _o->body_subgraph_index; + return tflite::CreateStablehloReduceOptions( _fbb, - _traversal_order, - _block_map, - _dim_metadata); + _dimensions, + _body_subgraph_index); } -inline VariantSubTypeT *VariantSubType::UnPack(const flatbuffers::resolver_function_t *_resolver) const { - auto _o = std::unique_ptr(new VariantSubTypeT()); +inline StablehloSliceOptionsT *StablehloSliceOptions::UnPack(const ::flatbuffers::resolver_function_t *_resolver) const { + auto _o = std::unique_ptr(new StablehloSliceOptionsT()); UnPackTo(_o.get(), _resolver); return _o.release(); } -inline void VariantSubType::UnPackTo(VariantSubTypeT *_o, const flatbuffers::resolver_function_t *_resolver) const { +inline void StablehloSliceOptions::UnPackTo(StablehloSliceOptionsT *_o, const ::flatbuffers::resolver_function_t *_resolver) const { (void)_o; (void)_resolver; - { auto _e = shape(); if (_e) { _o->shape.resize(_e->size()); for (flatbuffers::uoffset_t _i = 0; _i < _e->size(); _i++) { _o->shape[_i] = _e->Get(_i); } } } - { auto _e = type(); _o->type = _e; } - { auto _e = has_rank(); _o->has_rank = _e; } + { auto _e = start_indices(); if (_e) { _o->start_indices.resize(_e->size()); for (::flatbuffers::uoffset_t _i = 0; _i < _e->size(); _i++) { _o->start_indices[_i] = _e->Get(_i); } } else { _o->start_indices.resize(0); } } + { auto _e = limit_indices(); if (_e) { _o->limit_indices.resize(_e->size()); for (::flatbuffers::uoffset_t _i = 0; _i < _e->size(); _i++) { _o->limit_indices[_i] = _e->Get(_i); } } else { _o->limit_indices.resize(0); } } + { auto _e = strides(); if (_e) { _o->strides.resize(_e->size()); for (::flatbuffers::uoffset_t _i = 0; _i < _e->size(); _i++) { _o->strides[_i] = _e->Get(_i); } } else { _o->strides.resize(0); } } } -inline flatbuffers::Offset VariantSubType::Pack(flatbuffers::FlatBufferBuilder &_fbb, const VariantSubTypeT* _o, const flatbuffers::rehasher_function_t *_rehasher) { - return CreateVariantSubType(_fbb, _o, _rehasher); +inline ::flatbuffers::Offset StablehloSliceOptions::Pack(::flatbuffers::FlatBufferBuilder &_fbb, const StablehloSliceOptionsT* _o, const ::flatbuffers::rehasher_function_t *_rehasher) { + return CreateStablehloSliceOptions(_fbb, _o, _rehasher); } -inline flatbuffers::Offset CreateVariantSubType(flatbuffers::FlatBufferBuilder &_fbb, const VariantSubTypeT *_o, const flatbuffers::rehasher_function_t *_rehasher) { +inline ::flatbuffers::Offset CreateStablehloSliceOptions(::flatbuffers::FlatBufferBuilder &_fbb, const StablehloSliceOptionsT *_o, const ::flatbuffers::rehasher_function_t *_rehasher) { (void)_rehasher; (void)_o; - struct _VectorArgs { flatbuffers::FlatBufferBuilder *__fbb; const VariantSubTypeT* __o; const flatbuffers::rehasher_function_t *__rehasher; } _va = { &_fbb, _o, _rehasher}; (void)_va; - auto _shape = _o->shape.size() ? _fbb.CreateVector(_o->shape) : 0; - auto _type = _o->type; - auto _has_rank = _o->has_rank; - return tflite::CreateVariantSubType( + struct _VectorArgs { ::flatbuffers::FlatBufferBuilder *__fbb; const StablehloSliceOptionsT* __o; const ::flatbuffers::rehasher_function_t *__rehasher; } _va = { &_fbb, _o, _rehasher}; (void)_va; + auto _start_indices = _o->start_indices.size() ? _fbb.CreateVector(_o->start_indices) : 0; + auto _limit_indices = _o->limit_indices.size() ? _fbb.CreateVector(_o->limit_indices) : 0; + auto _strides = _o->strides.size() ? _fbb.CreateVector(_o->strides) : 0; + return tflite::CreateStablehloSliceOptions( _fbb, - _shape, - _type, - _has_rank); + _start_indices, + _limit_indices, + _strides); } -inline TensorT::TensorT(const TensorT &o) - : shape(o.shape), - type(o.type), - buffer(o.buffer), - name(o.name), - quantization((o.quantization) ? new tflite::QuantizationParametersT(*o.quantization) : nullptr), - is_variable(o.is_variable), - sparsity((o.sparsity) ? new tflite::SparsityParametersT(*o.sparsity) : nullptr), - shape_signature(o.shape_signature), - has_rank(o.has_rank) { - variant_tensors.reserve(o.variant_tensors.size()); - for (const auto &variant_tensors_ : o.variant_tensors) { variant_tensors.emplace_back((variant_tensors_) ? new tflite::VariantSubTypeT(*variant_tensors_) : nullptr); } +inline StablehloConvolutionOptionsT *StablehloConvolutionOptions::UnPack(const ::flatbuffers::resolver_function_t *_resolver) const { + auto _o = std::unique_ptr(new StablehloConvolutionOptionsT()); + UnPackTo(_o.get(), _resolver); + return _o.release(); } -inline TensorT &TensorT::operator=(TensorT o) FLATBUFFERS_NOEXCEPT { - std::swap(shape, o.shape); - std::swap(type, o.type); - std::swap(buffer, o.buffer); - std::swap(name, o.name); - std::swap(quantization, o.quantization); - std::swap(is_variable, o.is_variable); - std::swap(sparsity, o.sparsity); - std::swap(shape_signature, o.shape_signature); - std::swap(has_rank, o.has_rank); - std::swap(variant_tensors, o.variant_tensors); - return *this; +inline void StablehloConvolutionOptions::UnPackTo(StablehloConvolutionOptionsT *_o, const ::flatbuffers::resolver_function_t *_resolver) const { + (void)_o; + (void)_resolver; + { auto _e = window_strides(); if (_e) { _o->window_strides.resize(_e->size()); for (::flatbuffers::uoffset_t _i = 0; _i < _e->size(); _i++) { _o->window_strides[_i] = _e->Get(_i); } } else { _o->window_strides.resize(0); } } + { auto _e = padding(); if (_e) { _o->padding.resize(_e->size()); for (::flatbuffers::uoffset_t _i = 0; _i < _e->size(); _i++) { _o->padding[_i] = _e->Get(_i); } } else { _o->padding.resize(0); } } + { auto _e = lhs_dilation(); if (_e) { _o->lhs_dilation.resize(_e->size()); for (::flatbuffers::uoffset_t _i = 0; _i < _e->size(); _i++) { _o->lhs_dilation[_i] = _e->Get(_i); } } else { _o->lhs_dilation.resize(0); } } + { auto _e = rhs_dilation(); if (_e) { _o->rhs_dilation.resize(_e->size()); for (::flatbuffers::uoffset_t _i = 0; _i < _e->size(); _i++) { _o->rhs_dilation[_i] = _e->Get(_i); } } else { _o->rhs_dilation.resize(0); } } + { auto _e = window_reversal(); if (_e) { _o->window_reversal.resize(_e->size()); for (::flatbuffers::uoffset_t _i = 0; _i < _e->size(); _i++) { _o->window_reversal[_i] = _e->Get(_i) != 0; } } else { _o->window_reversal.resize(0); } } + { auto _e = input_batch_dimension(); _o->input_batch_dimension = _e; } + { auto _e = input_feature_dimension(); _o->input_feature_dimension = _e; } + { auto _e = input_spatial_dimensions(); if (_e) { _o->input_spatial_dimensions.resize(_e->size()); for (::flatbuffers::uoffset_t _i = 0; _i < _e->size(); _i++) { _o->input_spatial_dimensions[_i] = _e->Get(_i); } } else { _o->input_spatial_dimensions.resize(0); } } + { auto _e = kernel_input_feature_dimension(); _o->kernel_input_feature_dimension = _e; } + { auto _e = kernel_output_feature_dimension(); _o->kernel_output_feature_dimension = _e; } + { auto _e = kernel_spatial_dimensions(); if (_e) { _o->kernel_spatial_dimensions.resize(_e->size()); for (::flatbuffers::uoffset_t _i = 0; _i < _e->size(); _i++) { _o->kernel_spatial_dimensions[_i] = _e->Get(_i); } } else { _o->kernel_spatial_dimensions.resize(0); } } + { auto _e = output_batch_dimension(); _o->output_batch_dimension = _e; } + { auto _e = output_feature_dimension(); _o->output_feature_dimension = _e; } + { auto _e = output_spatial_dimensions(); if (_e) { _o->output_spatial_dimensions.resize(_e->size()); for (::flatbuffers::uoffset_t _i = 0; _i < _e->size(); _i++) { _o->output_spatial_dimensions[_i] = _e->Get(_i); } } else { _o->output_spatial_dimensions.resize(0); } } + { auto _e = feature_group_count(); _o->feature_group_count = _e; } + { auto _e = batch_group_count(); _o->batch_group_count = _e; } + { auto _e = precision_config(); if (_e) { _o->precision_config.resize(_e->size()); for (::flatbuffers::uoffset_t _i = 0; _i < _e->size(); _i++) { _o->precision_config[_i] = static_cast(_e->Get(_i)); } } else { _o->precision_config.resize(0); } } +} + +inline ::flatbuffers::Offset StablehloConvolutionOptions::Pack(::flatbuffers::FlatBufferBuilder &_fbb, const StablehloConvolutionOptionsT* _o, const ::flatbuffers::rehasher_function_t *_rehasher) { + return CreateStablehloConvolutionOptions(_fbb, _o, _rehasher); +} + +inline ::flatbuffers::Offset CreateStablehloConvolutionOptions(::flatbuffers::FlatBufferBuilder &_fbb, const StablehloConvolutionOptionsT *_o, const ::flatbuffers::rehasher_function_t *_rehasher) { + (void)_rehasher; + (void)_o; + struct _VectorArgs { ::flatbuffers::FlatBufferBuilder *__fbb; const StablehloConvolutionOptionsT* __o; const ::flatbuffers::rehasher_function_t *__rehasher; } _va = { &_fbb, _o, _rehasher}; (void)_va; + auto _window_strides = _o->window_strides.size() ? _fbb.CreateVector(_o->window_strides) : 0; + auto _padding = _o->padding.size() ? _fbb.CreateVector(_o->padding) : 0; + auto _lhs_dilation = _o->lhs_dilation.size() ? _fbb.CreateVector(_o->lhs_dilation) : 0; + auto _rhs_dilation = _o->rhs_dilation.size() ? _fbb.CreateVector(_o->rhs_dilation) : 0; + auto _window_reversal = _o->window_reversal.size() ? _fbb.CreateVector(_o->window_reversal) : 0; + auto _input_batch_dimension = _o->input_batch_dimension; + auto _input_feature_dimension = _o->input_feature_dimension; + auto _input_spatial_dimensions = _o->input_spatial_dimensions.size() ? _fbb.CreateVector(_o->input_spatial_dimensions) : 0; + auto _kernel_input_feature_dimension = _o->kernel_input_feature_dimension; + auto _kernel_output_feature_dimension = _o->kernel_output_feature_dimension; + auto _kernel_spatial_dimensions = _o->kernel_spatial_dimensions.size() ? _fbb.CreateVector(_o->kernel_spatial_dimensions) : 0; + auto _output_batch_dimension = _o->output_batch_dimension; + auto _output_feature_dimension = _o->output_feature_dimension; + auto _output_spatial_dimensions = _o->output_spatial_dimensions.size() ? _fbb.CreateVector(_o->output_spatial_dimensions) : 0; + auto _feature_group_count = _o->feature_group_count; + auto _batch_group_count = _o->batch_group_count; + auto _precision_config = _o->precision_config.size() ? _fbb.CreateVectorScalarCast(::flatbuffers::data(_o->precision_config), _o->precision_config.size()) : 0; + return tflite::CreateStablehloConvolutionOptions( + _fbb, + _window_strides, + _padding, + _lhs_dilation, + _rhs_dilation, + _window_reversal, + _input_batch_dimension, + _input_feature_dimension, + _input_spatial_dimensions, + _kernel_input_feature_dimension, + _kernel_output_feature_dimension, + _kernel_spatial_dimensions, + _output_batch_dimension, + _output_feature_dimension, + _output_spatial_dimensions, + _feature_group_count, + _batch_group_count, + _precision_config); +} + +inline StablehloScatterOptionsT *StablehloScatterOptions::UnPack(const ::flatbuffers::resolver_function_t *_resolver) const { + auto _o = std::unique_ptr(new StablehloScatterOptionsT()); + UnPackTo(_o.get(), _resolver); + return _o.release(); } -inline TensorT *Tensor::UnPack(const flatbuffers::resolver_function_t *_resolver) const { - auto _o = std::unique_ptr(new TensorT()); +inline void StablehloScatterOptions::UnPackTo(StablehloScatterOptionsT *_o, const ::flatbuffers::resolver_function_t *_resolver) const { + (void)_o; + (void)_resolver; + { auto _e = indices_are_sorted(); _o->indices_are_sorted = _e; } + { auto _e = update_window_dims(); if (_e) { _o->update_window_dims.resize(_e->size()); for (::flatbuffers::uoffset_t _i = 0; _i < _e->size(); _i++) { _o->update_window_dims[_i] = _e->Get(_i); } } else { _o->update_window_dims.resize(0); } } + { auto _e = inserted_window_dims(); if (_e) { _o->inserted_window_dims.resize(_e->size()); for (::flatbuffers::uoffset_t _i = 0; _i < _e->size(); _i++) { _o->inserted_window_dims[_i] = _e->Get(_i); } } else { _o->inserted_window_dims.resize(0); } } + { auto _e = scatter_dims_to_operand_dims(); if (_e) { _o->scatter_dims_to_operand_dims.resize(_e->size()); for (::flatbuffers::uoffset_t _i = 0; _i < _e->size(); _i++) { _o->scatter_dims_to_operand_dims[_i] = _e->Get(_i); } } else { _o->scatter_dims_to_operand_dims.resize(0); } } + { auto _e = index_vector_dim(); _o->index_vector_dim = _e; } + { auto _e = unique_indices(); _o->unique_indices = _e; } + { auto _e = update_computation_subgraph_index(); _o->update_computation_subgraph_index = _e; } +} + +inline ::flatbuffers::Offset StablehloScatterOptions::Pack(::flatbuffers::FlatBufferBuilder &_fbb, const StablehloScatterOptionsT* _o, const ::flatbuffers::rehasher_function_t *_rehasher) { + return CreateStablehloScatterOptions(_fbb, _o, _rehasher); +} + +inline ::flatbuffers::Offset CreateStablehloScatterOptions(::flatbuffers::FlatBufferBuilder &_fbb, const StablehloScatterOptionsT *_o, const ::flatbuffers::rehasher_function_t *_rehasher) { + (void)_rehasher; + (void)_o; + struct _VectorArgs { ::flatbuffers::FlatBufferBuilder *__fbb; const StablehloScatterOptionsT* __o; const ::flatbuffers::rehasher_function_t *__rehasher; } _va = { &_fbb, _o, _rehasher}; (void)_va; + auto _indices_are_sorted = _o->indices_are_sorted; + auto _update_window_dims = _o->update_window_dims.size() ? _fbb.CreateVector(_o->update_window_dims) : 0; + auto _inserted_window_dims = _o->inserted_window_dims.size() ? _fbb.CreateVector(_o->inserted_window_dims) : 0; + auto _scatter_dims_to_operand_dims = _o->scatter_dims_to_operand_dims.size() ? _fbb.CreateVector(_o->scatter_dims_to_operand_dims) : 0; + auto _index_vector_dim = _o->index_vector_dim; + auto _unique_indices = _o->unique_indices; + auto _update_computation_subgraph_index = _o->update_computation_subgraph_index; + return tflite::CreateStablehloScatterOptions( + _fbb, + _indices_are_sorted, + _update_window_dims, + _inserted_window_dims, + _scatter_dims_to_operand_dims, + _index_vector_dim, + _unique_indices, + _update_computation_subgraph_index); +} + +inline StablehloRngBitGeneratorOptionsT *StablehloRngBitGeneratorOptions::UnPack(const ::flatbuffers::resolver_function_t *_resolver) const { + auto _o = std::unique_ptr(new StablehloRngBitGeneratorOptionsT()); UnPackTo(_o.get(), _resolver); return _o.release(); } -inline void Tensor::UnPackTo(TensorT *_o, const flatbuffers::resolver_function_t *_resolver) const { +inline void StablehloRngBitGeneratorOptions::UnPackTo(StablehloRngBitGeneratorOptionsT *_o, const ::flatbuffers::resolver_function_t *_resolver) const { (void)_o; (void)_resolver; - { auto _e = shape(); if (_e) { _o->shape.resize(_e->size()); for (flatbuffers::uoffset_t _i = 0; _i < _e->size(); _i++) { _o->shape[_i] = _e->Get(_i); } } } - { auto _e = type(); _o->type = _e; } - { auto _e = buffer(); _o->buffer = _e; } - { auto _e = name(); if (_e) _o->name = _e->str(); } - { auto _e = quantization(); if (_e) { if(_o->quantization) { _e->UnPackTo(_o->quantization.get(), _resolver); } else { _o->quantization = std::unique_ptr(_e->UnPack(_resolver)); } } } - { auto _e = is_variable(); _o->is_variable = _e; } - { auto _e = sparsity(); if (_e) { if(_o->sparsity) { _e->UnPackTo(_o->sparsity.get(), _resolver); } else { _o->sparsity = std::unique_ptr(_e->UnPack(_resolver)); } } } - { auto _e = shape_signature(); if (_e) { _o->shape_signature.resize(_e->size()); for (flatbuffers::uoffset_t _i = 0; _i < _e->size(); _i++) { _o->shape_signature[_i] = _e->Get(_i); } } } - { auto _e = has_rank(); _o->has_rank = _e; } - { auto _e = variant_tensors(); if (_e) { _o->variant_tensors.resize(_e->size()); for (flatbuffers::uoffset_t _i = 0; _i < _e->size(); _i++) { if(_o->variant_tensors[_i]) { _e->Get(_i)->UnPackTo(_o->variant_tensors[_i].get(), _resolver); } else { _o->variant_tensors[_i] = std::unique_ptr(_e->Get(_i)->UnPack(_resolver)); }; } } } + { auto _e = algorithm(); _o->algorithm = _e; } } -inline flatbuffers::Offset Tensor::Pack(flatbuffers::FlatBufferBuilder &_fbb, const TensorT* _o, const flatbuffers::rehasher_function_t *_rehasher) { - return CreateTensor(_fbb, _o, _rehasher); +inline ::flatbuffers::Offset StablehloRngBitGeneratorOptions::Pack(::flatbuffers::FlatBufferBuilder &_fbb, const StablehloRngBitGeneratorOptionsT* _o, const ::flatbuffers::rehasher_function_t *_rehasher) { + return CreateStablehloRngBitGeneratorOptions(_fbb, _o, _rehasher); } -inline flatbuffers::Offset CreateTensor(flatbuffers::FlatBufferBuilder &_fbb, const TensorT *_o, const flatbuffers::rehasher_function_t *_rehasher) { +inline ::flatbuffers::Offset CreateStablehloRngBitGeneratorOptions(::flatbuffers::FlatBufferBuilder &_fbb, const StablehloRngBitGeneratorOptionsT *_o, const ::flatbuffers::rehasher_function_t *_rehasher) { (void)_rehasher; (void)_o; - struct _VectorArgs { flatbuffers::FlatBufferBuilder *__fbb; const TensorT* __o; const flatbuffers::rehasher_function_t *__rehasher; } _va = { &_fbb, _o, _rehasher}; (void)_va; - auto _shape = _o->shape.size() ? _fbb.CreateVector(_o->shape) : 0; - auto _type = _o->type; - auto _buffer = _o->buffer; - auto _name = _o->name.empty() ? 0 : _fbb.CreateString(_o->name); - auto _quantization = _o->quantization ? CreateQuantizationParameters(_fbb, _o->quantization.get(), _rehasher) : 0; - auto _is_variable = _o->is_variable; - auto _sparsity = _o->sparsity ? CreateSparsityParameters(_fbb, _o->sparsity.get(), _rehasher) : 0; - auto _shape_signature = _o->shape_signature.size() ? _fbb.CreateVector(_o->shape_signature) : 0; - auto _has_rank = _o->has_rank; - auto _variant_tensors = _o->variant_tensors.size() ? _fbb.CreateVector> (_o->variant_tensors.size(), [](size_t i, _VectorArgs *__va) { return CreateVariantSubType(*__va->__fbb, __va->__o->variant_tensors[i].get(), __va->__rehasher); }, &_va ) : 0; - return tflite::CreateTensor( + struct _VectorArgs { ::flatbuffers::FlatBufferBuilder *__fbb; const StablehloRngBitGeneratorOptionsT* __o; const ::flatbuffers::rehasher_function_t *__rehasher; } _va = { &_fbb, _o, _rehasher}; (void)_va; + auto _algorithm = _o->algorithm; + return tflite::CreateStablehloRngBitGeneratorOptions( _fbb, - _shape, - _type, - _buffer, - _name, - _quantization, - _is_variable, - _sparsity, - _shape_signature, - _has_rank, - _variant_tensors); + _algorithm); } -inline Conv2DOptionsT *Conv2DOptions::UnPack(const flatbuffers::resolver_function_t *_resolver) const { +inline Conv2DOptionsT *Conv2DOptions::UnPack(const ::flatbuffers::resolver_function_t *_resolver) const { auto _o = std::unique_ptr(new Conv2DOptionsT()); UnPackTo(_o.get(), _resolver); return _o.release(); } -inline void Conv2DOptions::UnPackTo(Conv2DOptionsT *_o, const flatbuffers::resolver_function_t *_resolver) const { +inline void Conv2DOptions::UnPackTo(Conv2DOptionsT *_o, const ::flatbuffers::resolver_function_t *_resolver) const { (void)_o; (void)_resolver; { auto _e = padding(); _o->padding = _e; } @@ -13964,22 +17633,24 @@ inline void Conv2DOptions::UnPackTo(Conv2DOptionsT *_o, const flatbuffers::resol { auto _e = fused_activation_function(); _o->fused_activation_function = _e; } { auto _e = dilation_w_factor(); _o->dilation_w_factor = _e; } { auto _e = dilation_h_factor(); _o->dilation_h_factor = _e; } + { auto _e = quantized_bias_type(); _o->quantized_bias_type = _e; } } -inline flatbuffers::Offset Conv2DOptions::Pack(flatbuffers::FlatBufferBuilder &_fbb, const Conv2DOptionsT* _o, const flatbuffers::rehasher_function_t *_rehasher) { +inline ::flatbuffers::Offset Conv2DOptions::Pack(::flatbuffers::FlatBufferBuilder &_fbb, const Conv2DOptionsT* _o, const ::flatbuffers::rehasher_function_t *_rehasher) { return CreateConv2DOptions(_fbb, _o, _rehasher); } -inline flatbuffers::Offset CreateConv2DOptions(flatbuffers::FlatBufferBuilder &_fbb, const Conv2DOptionsT *_o, const flatbuffers::rehasher_function_t *_rehasher) { +inline ::flatbuffers::Offset CreateConv2DOptions(::flatbuffers::FlatBufferBuilder &_fbb, const Conv2DOptionsT *_o, const ::flatbuffers::rehasher_function_t *_rehasher) { (void)_rehasher; (void)_o; - struct _VectorArgs { flatbuffers::FlatBufferBuilder *__fbb; const Conv2DOptionsT* __o; const flatbuffers::rehasher_function_t *__rehasher; } _va = { &_fbb, _o, _rehasher}; (void)_va; + struct _VectorArgs { ::flatbuffers::FlatBufferBuilder *__fbb; const Conv2DOptionsT* __o; const ::flatbuffers::rehasher_function_t *__rehasher; } _va = { &_fbb, _o, _rehasher}; (void)_va; auto _padding = _o->padding; auto _stride_w = _o->stride_w; auto _stride_h = _o->stride_h; auto _fused_activation_function = _o->fused_activation_function; auto _dilation_w_factor = _o->dilation_w_factor; auto _dilation_h_factor = _o->dilation_h_factor; + auto _quantized_bias_type = _o->quantized_bias_type; return tflite::CreateConv2DOptions( _fbb, _padding, @@ -13987,16 +17658,17 @@ inline flatbuffers::Offset CreateConv2DOptions(flatbuffers::FlatB _stride_h, _fused_activation_function, _dilation_w_factor, - _dilation_h_factor); + _dilation_h_factor, + _quantized_bias_type); } -inline Conv3DOptionsT *Conv3DOptions::UnPack(const flatbuffers::resolver_function_t *_resolver) const { +inline Conv3DOptionsT *Conv3DOptions::UnPack(const ::flatbuffers::resolver_function_t *_resolver) const { auto _o = std::unique_ptr(new Conv3DOptionsT()); UnPackTo(_o.get(), _resolver); return _o.release(); } -inline void Conv3DOptions::UnPackTo(Conv3DOptionsT *_o, const flatbuffers::resolver_function_t *_resolver) const { +inline void Conv3DOptions::UnPackTo(Conv3DOptionsT *_o, const ::flatbuffers::resolver_function_t *_resolver) const { (void)_o; (void)_resolver; { auto _e = padding(); _o->padding = _e; } @@ -14009,14 +17681,14 @@ inline void Conv3DOptions::UnPackTo(Conv3DOptionsT *_o, const flatbuffers::resol { auto _e = dilation_h_factor(); _o->dilation_h_factor = _e; } } -inline flatbuffers::Offset Conv3DOptions::Pack(flatbuffers::FlatBufferBuilder &_fbb, const Conv3DOptionsT* _o, const flatbuffers::rehasher_function_t *_rehasher) { +inline ::flatbuffers::Offset Conv3DOptions::Pack(::flatbuffers::FlatBufferBuilder &_fbb, const Conv3DOptionsT* _o, const ::flatbuffers::rehasher_function_t *_rehasher) { return CreateConv3DOptions(_fbb, _o, _rehasher); } -inline flatbuffers::Offset CreateConv3DOptions(flatbuffers::FlatBufferBuilder &_fbb, const Conv3DOptionsT *_o, const flatbuffers::rehasher_function_t *_rehasher) { +inline ::flatbuffers::Offset CreateConv3DOptions(::flatbuffers::FlatBufferBuilder &_fbb, const Conv3DOptionsT *_o, const ::flatbuffers::rehasher_function_t *_rehasher) { (void)_rehasher; (void)_o; - struct _VectorArgs { flatbuffers::FlatBufferBuilder *__fbb; const Conv3DOptionsT* __o; const flatbuffers::rehasher_function_t *__rehasher; } _va = { &_fbb, _o, _rehasher}; (void)_va; + struct _VectorArgs { ::flatbuffers::FlatBufferBuilder *__fbb; const Conv3DOptionsT* __o; const ::flatbuffers::rehasher_function_t *__rehasher; } _va = { &_fbb, _o, _rehasher}; (void)_va; auto _padding = _o->padding; auto _stride_d = _o->stride_d; auto _stride_w = _o->stride_w; @@ -14037,13 +17709,13 @@ inline flatbuffers::Offset CreateConv3DOptions(flatbuffers::FlatB _dilation_h_factor); } -inline Pool2DOptionsT *Pool2DOptions::UnPack(const flatbuffers::resolver_function_t *_resolver) const { +inline Pool2DOptionsT *Pool2DOptions::UnPack(const ::flatbuffers::resolver_function_t *_resolver) const { auto _o = std::unique_ptr(new Pool2DOptionsT()); UnPackTo(_o.get(), _resolver); return _o.release(); } -inline void Pool2DOptions::UnPackTo(Pool2DOptionsT *_o, const flatbuffers::resolver_function_t *_resolver) const { +inline void Pool2DOptions::UnPackTo(Pool2DOptionsT *_o, const ::flatbuffers::resolver_function_t *_resolver) const { (void)_o; (void)_resolver; { auto _e = padding(); _o->padding = _e; } @@ -14054,14 +17726,14 @@ inline void Pool2DOptions::UnPackTo(Pool2DOptionsT *_o, const flatbuffers::resol { auto _e = fused_activation_function(); _o->fused_activation_function = _e; } } -inline flatbuffers::Offset Pool2DOptions::Pack(flatbuffers::FlatBufferBuilder &_fbb, const Pool2DOptionsT* _o, const flatbuffers::rehasher_function_t *_rehasher) { +inline ::flatbuffers::Offset Pool2DOptions::Pack(::flatbuffers::FlatBufferBuilder &_fbb, const Pool2DOptionsT* _o, const ::flatbuffers::rehasher_function_t *_rehasher) { return CreatePool2DOptions(_fbb, _o, _rehasher); } -inline flatbuffers::Offset CreatePool2DOptions(flatbuffers::FlatBufferBuilder &_fbb, const Pool2DOptionsT *_o, const flatbuffers::rehasher_function_t *_rehasher) { +inline ::flatbuffers::Offset CreatePool2DOptions(::flatbuffers::FlatBufferBuilder &_fbb, const Pool2DOptionsT *_o, const ::flatbuffers::rehasher_function_t *_rehasher) { (void)_rehasher; (void)_o; - struct _VectorArgs { flatbuffers::FlatBufferBuilder *__fbb; const Pool2DOptionsT* __o; const flatbuffers::rehasher_function_t *__rehasher; } _va = { &_fbb, _o, _rehasher}; (void)_va; + struct _VectorArgs { ::flatbuffers::FlatBufferBuilder *__fbb; const Pool2DOptionsT* __o; const ::flatbuffers::rehasher_function_t *__rehasher; } _va = { &_fbb, _o, _rehasher}; (void)_va; auto _padding = _o->padding; auto _stride_w = _o->stride_w; auto _stride_h = _o->stride_h; @@ -14078,13 +17750,13 @@ inline flatbuffers::Offset CreatePool2DOptions(flatbuffers::FlatB _fused_activation_function); } -inline DepthwiseConv2DOptionsT *DepthwiseConv2DOptions::UnPack(const flatbuffers::resolver_function_t *_resolver) const { +inline DepthwiseConv2DOptionsT *DepthwiseConv2DOptions::UnPack(const ::flatbuffers::resolver_function_t *_resolver) const { auto _o = std::unique_ptr(new DepthwiseConv2DOptionsT()); UnPackTo(_o.get(), _resolver); return _o.release(); } -inline void DepthwiseConv2DOptions::UnPackTo(DepthwiseConv2DOptionsT *_o, const flatbuffers::resolver_function_t *_resolver) const { +inline void DepthwiseConv2DOptions::UnPackTo(DepthwiseConv2DOptionsT *_o, const ::flatbuffers::resolver_function_t *_resolver) const { (void)_o; (void)_resolver; { auto _e = padding(); _o->padding = _e; } @@ -14096,14 +17768,14 @@ inline void DepthwiseConv2DOptions::UnPackTo(DepthwiseConv2DOptionsT *_o, const { auto _e = dilation_h_factor(); _o->dilation_h_factor = _e; } } -inline flatbuffers::Offset DepthwiseConv2DOptions::Pack(flatbuffers::FlatBufferBuilder &_fbb, const DepthwiseConv2DOptionsT* _o, const flatbuffers::rehasher_function_t *_rehasher) { +inline ::flatbuffers::Offset DepthwiseConv2DOptions::Pack(::flatbuffers::FlatBufferBuilder &_fbb, const DepthwiseConv2DOptionsT* _o, const ::flatbuffers::rehasher_function_t *_rehasher) { return CreateDepthwiseConv2DOptions(_fbb, _o, _rehasher); } -inline flatbuffers::Offset CreateDepthwiseConv2DOptions(flatbuffers::FlatBufferBuilder &_fbb, const DepthwiseConv2DOptionsT *_o, const flatbuffers::rehasher_function_t *_rehasher) { +inline ::flatbuffers::Offset CreateDepthwiseConv2DOptions(::flatbuffers::FlatBufferBuilder &_fbb, const DepthwiseConv2DOptionsT *_o, const ::flatbuffers::rehasher_function_t *_rehasher) { (void)_rehasher; (void)_o; - struct _VectorArgs { flatbuffers::FlatBufferBuilder *__fbb; const DepthwiseConv2DOptionsT* __o; const flatbuffers::rehasher_function_t *__rehasher; } _va = { &_fbb, _o, _rehasher}; (void)_va; + struct _VectorArgs { ::flatbuffers::FlatBufferBuilder *__fbb; const DepthwiseConv2DOptionsT* __o; const ::flatbuffers::rehasher_function_t *__rehasher; } _va = { &_fbb, _o, _rehasher}; (void)_va; auto _padding = _o->padding; auto _stride_w = _o->stride_w; auto _stride_h = _o->stride_h; @@ -14122,28 +17794,28 @@ inline flatbuffers::Offset CreateDepthwiseConv2DOptions( _dilation_h_factor); } -inline ConcatEmbeddingsOptionsT *ConcatEmbeddingsOptions::UnPack(const flatbuffers::resolver_function_t *_resolver) const { +inline ConcatEmbeddingsOptionsT *ConcatEmbeddingsOptions::UnPack(const ::flatbuffers::resolver_function_t *_resolver) const { auto _o = std::unique_ptr(new ConcatEmbeddingsOptionsT()); UnPackTo(_o.get(), _resolver); return _o.release(); } -inline void ConcatEmbeddingsOptions::UnPackTo(ConcatEmbeddingsOptionsT *_o, const flatbuffers::resolver_function_t *_resolver) const { +inline void ConcatEmbeddingsOptions::UnPackTo(ConcatEmbeddingsOptionsT *_o, const ::flatbuffers::resolver_function_t *_resolver) const { (void)_o; (void)_resolver; { auto _e = num_channels(); _o->num_channels = _e; } - { auto _e = num_columns_per_channel(); if (_e) { _o->num_columns_per_channel.resize(_e->size()); for (flatbuffers::uoffset_t _i = 0; _i < _e->size(); _i++) { _o->num_columns_per_channel[_i] = _e->Get(_i); } } } - { auto _e = embedding_dim_per_channel(); if (_e) { _o->embedding_dim_per_channel.resize(_e->size()); for (flatbuffers::uoffset_t _i = 0; _i < _e->size(); _i++) { _o->embedding_dim_per_channel[_i] = _e->Get(_i); } } } + { auto _e = num_columns_per_channel(); if (_e) { _o->num_columns_per_channel.resize(_e->size()); for (::flatbuffers::uoffset_t _i = 0; _i < _e->size(); _i++) { _o->num_columns_per_channel[_i] = _e->Get(_i); } } else { _o->num_columns_per_channel.resize(0); } } + { auto _e = embedding_dim_per_channel(); if (_e) { _o->embedding_dim_per_channel.resize(_e->size()); for (::flatbuffers::uoffset_t _i = 0; _i < _e->size(); _i++) { _o->embedding_dim_per_channel[_i] = _e->Get(_i); } } else { _o->embedding_dim_per_channel.resize(0); } } } -inline flatbuffers::Offset ConcatEmbeddingsOptions::Pack(flatbuffers::FlatBufferBuilder &_fbb, const ConcatEmbeddingsOptionsT* _o, const flatbuffers::rehasher_function_t *_rehasher) { +inline ::flatbuffers::Offset ConcatEmbeddingsOptions::Pack(::flatbuffers::FlatBufferBuilder &_fbb, const ConcatEmbeddingsOptionsT* _o, const ::flatbuffers::rehasher_function_t *_rehasher) { return CreateConcatEmbeddingsOptions(_fbb, _o, _rehasher); } -inline flatbuffers::Offset CreateConcatEmbeddingsOptions(flatbuffers::FlatBufferBuilder &_fbb, const ConcatEmbeddingsOptionsT *_o, const flatbuffers::rehasher_function_t *_rehasher) { +inline ::flatbuffers::Offset CreateConcatEmbeddingsOptions(::flatbuffers::FlatBufferBuilder &_fbb, const ConcatEmbeddingsOptionsT *_o, const ::flatbuffers::rehasher_function_t *_rehasher) { (void)_rehasher; (void)_o; - struct _VectorArgs { flatbuffers::FlatBufferBuilder *__fbb; const ConcatEmbeddingsOptionsT* __o; const flatbuffers::rehasher_function_t *__rehasher; } _va = { &_fbb, _o, _rehasher}; (void)_va; + struct _VectorArgs { ::flatbuffers::FlatBufferBuilder *__fbb; const ConcatEmbeddingsOptionsT* __o; const ::flatbuffers::rehasher_function_t *__rehasher; } _va = { &_fbb, _o, _rehasher}; (void)_va; auto _num_channels = _o->num_channels; auto _num_columns_per_channel = _o->num_columns_per_channel.size() ? _fbb.CreateVector(_o->num_columns_per_channel) : 0; auto _embedding_dim_per_channel = _o->embedding_dim_per_channel.size() ? _fbb.CreateVector(_o->embedding_dim_per_channel) : 0; @@ -14154,39 +17826,39 @@ inline flatbuffers::Offset CreateConcatEmbeddingsOption _embedding_dim_per_channel); } -inline LSHProjectionOptionsT *LSHProjectionOptions::UnPack(const flatbuffers::resolver_function_t *_resolver) const { +inline LSHProjectionOptionsT *LSHProjectionOptions::UnPack(const ::flatbuffers::resolver_function_t *_resolver) const { auto _o = std::unique_ptr(new LSHProjectionOptionsT()); UnPackTo(_o.get(), _resolver); return _o.release(); } -inline void LSHProjectionOptions::UnPackTo(LSHProjectionOptionsT *_o, const flatbuffers::resolver_function_t *_resolver) const { +inline void LSHProjectionOptions::UnPackTo(LSHProjectionOptionsT *_o, const ::flatbuffers::resolver_function_t *_resolver) const { (void)_o; (void)_resolver; { auto _e = type(); _o->type = _e; } } -inline flatbuffers::Offset LSHProjectionOptions::Pack(flatbuffers::FlatBufferBuilder &_fbb, const LSHProjectionOptionsT* _o, const flatbuffers::rehasher_function_t *_rehasher) { +inline ::flatbuffers::Offset LSHProjectionOptions::Pack(::flatbuffers::FlatBufferBuilder &_fbb, const LSHProjectionOptionsT* _o, const ::flatbuffers::rehasher_function_t *_rehasher) { return CreateLSHProjectionOptions(_fbb, _o, _rehasher); } -inline flatbuffers::Offset CreateLSHProjectionOptions(flatbuffers::FlatBufferBuilder &_fbb, const LSHProjectionOptionsT *_o, const flatbuffers::rehasher_function_t *_rehasher) { +inline ::flatbuffers::Offset CreateLSHProjectionOptions(::flatbuffers::FlatBufferBuilder &_fbb, const LSHProjectionOptionsT *_o, const ::flatbuffers::rehasher_function_t *_rehasher) { (void)_rehasher; (void)_o; - struct _VectorArgs { flatbuffers::FlatBufferBuilder *__fbb; const LSHProjectionOptionsT* __o; const flatbuffers::rehasher_function_t *__rehasher; } _va = { &_fbb, _o, _rehasher}; (void)_va; + struct _VectorArgs { ::flatbuffers::FlatBufferBuilder *__fbb; const LSHProjectionOptionsT* __o; const ::flatbuffers::rehasher_function_t *__rehasher; } _va = { &_fbb, _o, _rehasher}; (void)_va; auto _type = _o->type; return tflite::CreateLSHProjectionOptions( _fbb, _type); } -inline SVDFOptionsT *SVDFOptions::UnPack(const flatbuffers::resolver_function_t *_resolver) const { +inline SVDFOptionsT *SVDFOptions::UnPack(const ::flatbuffers::resolver_function_t *_resolver) const { auto _o = std::unique_ptr(new SVDFOptionsT()); UnPackTo(_o.get(), _resolver); return _o.release(); } -inline void SVDFOptions::UnPackTo(SVDFOptionsT *_o, const flatbuffers::resolver_function_t *_resolver) const { +inline void SVDFOptions::UnPackTo(SVDFOptionsT *_o, const ::flatbuffers::resolver_function_t *_resolver) const { (void)_o; (void)_resolver; { auto _e = rank(); _o->rank = _e; } @@ -14194,14 +17866,14 @@ inline void SVDFOptions::UnPackTo(SVDFOptionsT *_o, const flatbuffers::resolver_ { auto _e = asymmetric_quantize_inputs(); _o->asymmetric_quantize_inputs = _e; } } -inline flatbuffers::Offset SVDFOptions::Pack(flatbuffers::FlatBufferBuilder &_fbb, const SVDFOptionsT* _o, const flatbuffers::rehasher_function_t *_rehasher) { +inline ::flatbuffers::Offset SVDFOptions::Pack(::flatbuffers::FlatBufferBuilder &_fbb, const SVDFOptionsT* _o, const ::flatbuffers::rehasher_function_t *_rehasher) { return CreateSVDFOptions(_fbb, _o, _rehasher); } -inline flatbuffers::Offset CreateSVDFOptions(flatbuffers::FlatBufferBuilder &_fbb, const SVDFOptionsT *_o, const flatbuffers::rehasher_function_t *_rehasher) { +inline ::flatbuffers::Offset CreateSVDFOptions(::flatbuffers::FlatBufferBuilder &_fbb, const SVDFOptionsT *_o, const ::flatbuffers::rehasher_function_t *_rehasher) { (void)_rehasher; (void)_o; - struct _VectorArgs { flatbuffers::FlatBufferBuilder *__fbb; const SVDFOptionsT* __o; const flatbuffers::rehasher_function_t *__rehasher; } _va = { &_fbb, _o, _rehasher}; (void)_va; + struct _VectorArgs { ::flatbuffers::FlatBufferBuilder *__fbb; const SVDFOptionsT* __o; const ::flatbuffers::rehasher_function_t *__rehasher; } _va = { &_fbb, _o, _rehasher}; (void)_va; auto _rank = _o->rank; auto _fused_activation_function = _o->fused_activation_function; auto _asymmetric_quantize_inputs = _o->asymmetric_quantize_inputs; @@ -14212,27 +17884,27 @@ inline flatbuffers::Offset CreateSVDFOptions(flatbuffers::FlatBuffe _asymmetric_quantize_inputs); } -inline RNNOptionsT *RNNOptions::UnPack(const flatbuffers::resolver_function_t *_resolver) const { +inline RNNOptionsT *RNNOptions::UnPack(const ::flatbuffers::resolver_function_t *_resolver) const { auto _o = std::unique_ptr(new RNNOptionsT()); UnPackTo(_o.get(), _resolver); return _o.release(); } -inline void RNNOptions::UnPackTo(RNNOptionsT *_o, const flatbuffers::resolver_function_t *_resolver) const { +inline void RNNOptions::UnPackTo(RNNOptionsT *_o, const ::flatbuffers::resolver_function_t *_resolver) const { (void)_o; (void)_resolver; { auto _e = fused_activation_function(); _o->fused_activation_function = _e; } { auto _e = asymmetric_quantize_inputs(); _o->asymmetric_quantize_inputs = _e; } } -inline flatbuffers::Offset RNNOptions::Pack(flatbuffers::FlatBufferBuilder &_fbb, const RNNOptionsT* _o, const flatbuffers::rehasher_function_t *_rehasher) { +inline ::flatbuffers::Offset RNNOptions::Pack(::flatbuffers::FlatBufferBuilder &_fbb, const RNNOptionsT* _o, const ::flatbuffers::rehasher_function_t *_rehasher) { return CreateRNNOptions(_fbb, _o, _rehasher); } -inline flatbuffers::Offset CreateRNNOptions(flatbuffers::FlatBufferBuilder &_fbb, const RNNOptionsT *_o, const flatbuffers::rehasher_function_t *_rehasher) { +inline ::flatbuffers::Offset CreateRNNOptions(::flatbuffers::FlatBufferBuilder &_fbb, const RNNOptionsT *_o, const ::flatbuffers::rehasher_function_t *_rehasher) { (void)_rehasher; (void)_o; - struct _VectorArgs { flatbuffers::FlatBufferBuilder *__fbb; const RNNOptionsT* __o; const flatbuffers::rehasher_function_t *__rehasher; } _va = { &_fbb, _o, _rehasher}; (void)_va; + struct _VectorArgs { ::flatbuffers::FlatBufferBuilder *__fbb; const RNNOptionsT* __o; const ::flatbuffers::rehasher_function_t *__rehasher; } _va = { &_fbb, _o, _rehasher}; (void)_va; auto _fused_activation_function = _o->fused_activation_function; auto _asymmetric_quantize_inputs = _o->asymmetric_quantize_inputs; return tflite::CreateRNNOptions( @@ -14241,13 +17913,13 @@ inline flatbuffers::Offset CreateRNNOptions(flatbuffers::FlatBufferB _asymmetric_quantize_inputs); } -inline SequenceRNNOptionsT *SequenceRNNOptions::UnPack(const flatbuffers::resolver_function_t *_resolver) const { +inline SequenceRNNOptionsT *SequenceRNNOptions::UnPack(const ::flatbuffers::resolver_function_t *_resolver) const { auto _o = std::unique_ptr(new SequenceRNNOptionsT()); UnPackTo(_o.get(), _resolver); return _o.release(); } -inline void SequenceRNNOptions::UnPackTo(SequenceRNNOptionsT *_o, const flatbuffers::resolver_function_t *_resolver) const { +inline void SequenceRNNOptions::UnPackTo(SequenceRNNOptionsT *_o, const ::flatbuffers::resolver_function_t *_resolver) const { (void)_o; (void)_resolver; { auto _e = time_major(); _o->time_major = _e; } @@ -14255,14 +17927,14 @@ inline void SequenceRNNOptions::UnPackTo(SequenceRNNOptionsT *_o, const flatbuff { auto _e = asymmetric_quantize_inputs(); _o->asymmetric_quantize_inputs = _e; } } -inline flatbuffers::Offset SequenceRNNOptions::Pack(flatbuffers::FlatBufferBuilder &_fbb, const SequenceRNNOptionsT* _o, const flatbuffers::rehasher_function_t *_rehasher) { +inline ::flatbuffers::Offset SequenceRNNOptions::Pack(::flatbuffers::FlatBufferBuilder &_fbb, const SequenceRNNOptionsT* _o, const ::flatbuffers::rehasher_function_t *_rehasher) { return CreateSequenceRNNOptions(_fbb, _o, _rehasher); } -inline flatbuffers::Offset CreateSequenceRNNOptions(flatbuffers::FlatBufferBuilder &_fbb, const SequenceRNNOptionsT *_o, const flatbuffers::rehasher_function_t *_rehasher) { +inline ::flatbuffers::Offset CreateSequenceRNNOptions(::flatbuffers::FlatBufferBuilder &_fbb, const SequenceRNNOptionsT *_o, const ::flatbuffers::rehasher_function_t *_rehasher) { (void)_rehasher; (void)_o; - struct _VectorArgs { flatbuffers::FlatBufferBuilder *__fbb; const SequenceRNNOptionsT* __o; const flatbuffers::rehasher_function_t *__rehasher; } _va = { &_fbb, _o, _rehasher}; (void)_va; + struct _VectorArgs { ::flatbuffers::FlatBufferBuilder *__fbb; const SequenceRNNOptionsT* __o; const ::flatbuffers::rehasher_function_t *__rehasher; } _va = { &_fbb, _o, _rehasher}; (void)_va; auto _time_major = _o->time_major; auto _fused_activation_function = _o->fused_activation_function; auto _asymmetric_quantize_inputs = _o->asymmetric_quantize_inputs; @@ -14273,13 +17945,13 @@ inline flatbuffers::Offset CreateSequenceRNNOptions(flatbuff _asymmetric_quantize_inputs); } -inline BidirectionalSequenceRNNOptionsT *BidirectionalSequenceRNNOptions::UnPack(const flatbuffers::resolver_function_t *_resolver) const { +inline BidirectionalSequenceRNNOptionsT *BidirectionalSequenceRNNOptions::UnPack(const ::flatbuffers::resolver_function_t *_resolver) const { auto _o = std::unique_ptr(new BidirectionalSequenceRNNOptionsT()); UnPackTo(_o.get(), _resolver); return _o.release(); } -inline void BidirectionalSequenceRNNOptions::UnPackTo(BidirectionalSequenceRNNOptionsT *_o, const flatbuffers::resolver_function_t *_resolver) const { +inline void BidirectionalSequenceRNNOptions::UnPackTo(BidirectionalSequenceRNNOptionsT *_o, const ::flatbuffers::resolver_function_t *_resolver) const { (void)_o; (void)_resolver; { auto _e = time_major(); _o->time_major = _e; } @@ -14288,14 +17960,14 @@ inline void BidirectionalSequenceRNNOptions::UnPackTo(BidirectionalSequenceRNNOp { auto _e = asymmetric_quantize_inputs(); _o->asymmetric_quantize_inputs = _e; } } -inline flatbuffers::Offset BidirectionalSequenceRNNOptions::Pack(flatbuffers::FlatBufferBuilder &_fbb, const BidirectionalSequenceRNNOptionsT* _o, const flatbuffers::rehasher_function_t *_rehasher) { +inline ::flatbuffers::Offset BidirectionalSequenceRNNOptions::Pack(::flatbuffers::FlatBufferBuilder &_fbb, const BidirectionalSequenceRNNOptionsT* _o, const ::flatbuffers::rehasher_function_t *_rehasher) { return CreateBidirectionalSequenceRNNOptions(_fbb, _o, _rehasher); } -inline flatbuffers::Offset CreateBidirectionalSequenceRNNOptions(flatbuffers::FlatBufferBuilder &_fbb, const BidirectionalSequenceRNNOptionsT *_o, const flatbuffers::rehasher_function_t *_rehasher) { +inline ::flatbuffers::Offset CreateBidirectionalSequenceRNNOptions(::flatbuffers::FlatBufferBuilder &_fbb, const BidirectionalSequenceRNNOptionsT *_o, const ::flatbuffers::rehasher_function_t *_rehasher) { (void)_rehasher; (void)_o; - struct _VectorArgs { flatbuffers::FlatBufferBuilder *__fbb; const BidirectionalSequenceRNNOptionsT* __o; const flatbuffers::rehasher_function_t *__rehasher; } _va = { &_fbb, _o, _rehasher}; (void)_va; + struct _VectorArgs { ::flatbuffers::FlatBufferBuilder *__fbb; const BidirectionalSequenceRNNOptionsT* __o; const ::flatbuffers::rehasher_function_t *__rehasher; } _va = { &_fbb, _o, _rehasher}; (void)_va; auto _time_major = _o->time_major; auto _fused_activation_function = _o->fused_activation_function; auto _merge_outputs = _o->merge_outputs; @@ -14308,88 +17980,91 @@ inline flatbuffers::Offset CreateBidirectionalS _asymmetric_quantize_inputs); } -inline FullyConnectedOptionsT *FullyConnectedOptions::UnPack(const flatbuffers::resolver_function_t *_resolver) const { +inline FullyConnectedOptionsT *FullyConnectedOptions::UnPack(const ::flatbuffers::resolver_function_t *_resolver) const { auto _o = std::unique_ptr(new FullyConnectedOptionsT()); UnPackTo(_o.get(), _resolver); return _o.release(); } -inline void FullyConnectedOptions::UnPackTo(FullyConnectedOptionsT *_o, const flatbuffers::resolver_function_t *_resolver) const { +inline void FullyConnectedOptions::UnPackTo(FullyConnectedOptionsT *_o, const ::flatbuffers::resolver_function_t *_resolver) const { (void)_o; (void)_resolver; { auto _e = fused_activation_function(); _o->fused_activation_function = _e; } { auto _e = weights_format(); _o->weights_format = _e; } { auto _e = keep_num_dims(); _o->keep_num_dims = _e; } { auto _e = asymmetric_quantize_inputs(); _o->asymmetric_quantize_inputs = _e; } + { auto _e = quantized_bias_type(); _o->quantized_bias_type = _e; } } -inline flatbuffers::Offset FullyConnectedOptions::Pack(flatbuffers::FlatBufferBuilder &_fbb, const FullyConnectedOptionsT* _o, const flatbuffers::rehasher_function_t *_rehasher) { +inline ::flatbuffers::Offset FullyConnectedOptions::Pack(::flatbuffers::FlatBufferBuilder &_fbb, const FullyConnectedOptionsT* _o, const ::flatbuffers::rehasher_function_t *_rehasher) { return CreateFullyConnectedOptions(_fbb, _o, _rehasher); } -inline flatbuffers::Offset CreateFullyConnectedOptions(flatbuffers::FlatBufferBuilder &_fbb, const FullyConnectedOptionsT *_o, const flatbuffers::rehasher_function_t *_rehasher) { +inline ::flatbuffers::Offset CreateFullyConnectedOptions(::flatbuffers::FlatBufferBuilder &_fbb, const FullyConnectedOptionsT *_o, const ::flatbuffers::rehasher_function_t *_rehasher) { (void)_rehasher; (void)_o; - struct _VectorArgs { flatbuffers::FlatBufferBuilder *__fbb; const FullyConnectedOptionsT* __o; const flatbuffers::rehasher_function_t *__rehasher; } _va = { &_fbb, _o, _rehasher}; (void)_va; + struct _VectorArgs { ::flatbuffers::FlatBufferBuilder *__fbb; const FullyConnectedOptionsT* __o; const ::flatbuffers::rehasher_function_t *__rehasher; } _va = { &_fbb, _o, _rehasher}; (void)_va; auto _fused_activation_function = _o->fused_activation_function; auto _weights_format = _o->weights_format; auto _keep_num_dims = _o->keep_num_dims; auto _asymmetric_quantize_inputs = _o->asymmetric_quantize_inputs; + auto _quantized_bias_type = _o->quantized_bias_type; return tflite::CreateFullyConnectedOptions( _fbb, _fused_activation_function, _weights_format, _keep_num_dims, - _asymmetric_quantize_inputs); + _asymmetric_quantize_inputs, + _quantized_bias_type); } -inline SoftmaxOptionsT *SoftmaxOptions::UnPack(const flatbuffers::resolver_function_t *_resolver) const { +inline SoftmaxOptionsT *SoftmaxOptions::UnPack(const ::flatbuffers::resolver_function_t *_resolver) const { auto _o = std::unique_ptr(new SoftmaxOptionsT()); UnPackTo(_o.get(), _resolver); return _o.release(); } -inline void SoftmaxOptions::UnPackTo(SoftmaxOptionsT *_o, const flatbuffers::resolver_function_t *_resolver) const { +inline void SoftmaxOptions::UnPackTo(SoftmaxOptionsT *_o, const ::flatbuffers::resolver_function_t *_resolver) const { (void)_o; (void)_resolver; { auto _e = beta(); _o->beta = _e; } } -inline flatbuffers::Offset SoftmaxOptions::Pack(flatbuffers::FlatBufferBuilder &_fbb, const SoftmaxOptionsT* _o, const flatbuffers::rehasher_function_t *_rehasher) { +inline ::flatbuffers::Offset SoftmaxOptions::Pack(::flatbuffers::FlatBufferBuilder &_fbb, const SoftmaxOptionsT* _o, const ::flatbuffers::rehasher_function_t *_rehasher) { return CreateSoftmaxOptions(_fbb, _o, _rehasher); } -inline flatbuffers::Offset CreateSoftmaxOptions(flatbuffers::FlatBufferBuilder &_fbb, const SoftmaxOptionsT *_o, const flatbuffers::rehasher_function_t *_rehasher) { +inline ::flatbuffers::Offset CreateSoftmaxOptions(::flatbuffers::FlatBufferBuilder &_fbb, const SoftmaxOptionsT *_o, const ::flatbuffers::rehasher_function_t *_rehasher) { (void)_rehasher; (void)_o; - struct _VectorArgs { flatbuffers::FlatBufferBuilder *__fbb; const SoftmaxOptionsT* __o; const flatbuffers::rehasher_function_t *__rehasher; } _va = { &_fbb, _o, _rehasher}; (void)_va; + struct _VectorArgs { ::flatbuffers::FlatBufferBuilder *__fbb; const SoftmaxOptionsT* __o; const ::flatbuffers::rehasher_function_t *__rehasher; } _va = { &_fbb, _o, _rehasher}; (void)_va; auto _beta = _o->beta; return tflite::CreateSoftmaxOptions( _fbb, _beta); } -inline ConcatenationOptionsT *ConcatenationOptions::UnPack(const flatbuffers::resolver_function_t *_resolver) const { +inline ConcatenationOptionsT *ConcatenationOptions::UnPack(const ::flatbuffers::resolver_function_t *_resolver) const { auto _o = std::unique_ptr(new ConcatenationOptionsT()); UnPackTo(_o.get(), _resolver); return _o.release(); } -inline void ConcatenationOptions::UnPackTo(ConcatenationOptionsT *_o, const flatbuffers::resolver_function_t *_resolver) const { +inline void ConcatenationOptions::UnPackTo(ConcatenationOptionsT *_o, const ::flatbuffers::resolver_function_t *_resolver) const { (void)_o; (void)_resolver; { auto _e = axis(); _o->axis = _e; } { auto _e = fused_activation_function(); _o->fused_activation_function = _e; } } -inline flatbuffers::Offset ConcatenationOptions::Pack(flatbuffers::FlatBufferBuilder &_fbb, const ConcatenationOptionsT* _o, const flatbuffers::rehasher_function_t *_rehasher) { +inline ::flatbuffers::Offset ConcatenationOptions::Pack(::flatbuffers::FlatBufferBuilder &_fbb, const ConcatenationOptionsT* _o, const ::flatbuffers::rehasher_function_t *_rehasher) { return CreateConcatenationOptions(_fbb, _o, _rehasher); } -inline flatbuffers::Offset CreateConcatenationOptions(flatbuffers::FlatBufferBuilder &_fbb, const ConcatenationOptionsT *_o, const flatbuffers::rehasher_function_t *_rehasher) { +inline ::flatbuffers::Offset CreateConcatenationOptions(::flatbuffers::FlatBufferBuilder &_fbb, const ConcatenationOptionsT *_o, const ::flatbuffers::rehasher_function_t *_rehasher) { (void)_rehasher; (void)_o; - struct _VectorArgs { flatbuffers::FlatBufferBuilder *__fbb; const ConcatenationOptionsT* __o; const flatbuffers::rehasher_function_t *__rehasher; } _va = { &_fbb, _o, _rehasher}; (void)_va; + struct _VectorArgs { ::flatbuffers::FlatBufferBuilder *__fbb; const ConcatenationOptionsT* __o; const ::flatbuffers::rehasher_function_t *__rehasher; } _va = { &_fbb, _o, _rehasher}; (void)_va; auto _axis = _o->axis; auto _fused_activation_function = _o->fused_activation_function; return tflite::CreateConcatenationOptions( @@ -14398,27 +18073,27 @@ inline flatbuffers::Offset CreateConcatenationOptions(flat _fused_activation_function); } -inline AddOptionsT *AddOptions::UnPack(const flatbuffers::resolver_function_t *_resolver) const { +inline AddOptionsT *AddOptions::UnPack(const ::flatbuffers::resolver_function_t *_resolver) const { auto _o = std::unique_ptr(new AddOptionsT()); UnPackTo(_o.get(), _resolver); return _o.release(); } -inline void AddOptions::UnPackTo(AddOptionsT *_o, const flatbuffers::resolver_function_t *_resolver) const { +inline void AddOptions::UnPackTo(AddOptionsT *_o, const ::flatbuffers::resolver_function_t *_resolver) const { (void)_o; (void)_resolver; { auto _e = fused_activation_function(); _o->fused_activation_function = _e; } { auto _e = pot_scale_int16(); _o->pot_scale_int16 = _e; } } -inline flatbuffers::Offset AddOptions::Pack(flatbuffers::FlatBufferBuilder &_fbb, const AddOptionsT* _o, const flatbuffers::rehasher_function_t *_rehasher) { +inline ::flatbuffers::Offset AddOptions::Pack(::flatbuffers::FlatBufferBuilder &_fbb, const AddOptionsT* _o, const ::flatbuffers::rehasher_function_t *_rehasher) { return CreateAddOptions(_fbb, _o, _rehasher); } -inline flatbuffers::Offset CreateAddOptions(flatbuffers::FlatBufferBuilder &_fbb, const AddOptionsT *_o, const flatbuffers::rehasher_function_t *_rehasher) { +inline ::flatbuffers::Offset CreateAddOptions(::flatbuffers::FlatBufferBuilder &_fbb, const AddOptionsT *_o, const ::flatbuffers::rehasher_function_t *_rehasher) { (void)_rehasher; (void)_o; - struct _VectorArgs { flatbuffers::FlatBufferBuilder *__fbb; const AddOptionsT* __o; const flatbuffers::rehasher_function_t *__rehasher; } _va = { &_fbb, _o, _rehasher}; (void)_va; + struct _VectorArgs { ::flatbuffers::FlatBufferBuilder *__fbb; const AddOptionsT* __o; const ::flatbuffers::rehasher_function_t *__rehasher; } _va = { &_fbb, _o, _rehasher}; (void)_va; auto _fused_activation_function = _o->fused_activation_function; auto _pot_scale_int16 = _o->pot_scale_int16; return tflite::CreateAddOptions( @@ -14427,65 +18102,65 @@ inline flatbuffers::Offset CreateAddOptions(flatbuffers::FlatBufferB _pot_scale_int16); } -inline MulOptionsT *MulOptions::UnPack(const flatbuffers::resolver_function_t *_resolver) const { +inline MulOptionsT *MulOptions::UnPack(const ::flatbuffers::resolver_function_t *_resolver) const { auto _o = std::unique_ptr(new MulOptionsT()); UnPackTo(_o.get(), _resolver); return _o.release(); } -inline void MulOptions::UnPackTo(MulOptionsT *_o, const flatbuffers::resolver_function_t *_resolver) const { +inline void MulOptions::UnPackTo(MulOptionsT *_o, const ::flatbuffers::resolver_function_t *_resolver) const { (void)_o; (void)_resolver; { auto _e = fused_activation_function(); _o->fused_activation_function = _e; } } -inline flatbuffers::Offset MulOptions::Pack(flatbuffers::FlatBufferBuilder &_fbb, const MulOptionsT* _o, const flatbuffers::rehasher_function_t *_rehasher) { +inline ::flatbuffers::Offset MulOptions::Pack(::flatbuffers::FlatBufferBuilder &_fbb, const MulOptionsT* _o, const ::flatbuffers::rehasher_function_t *_rehasher) { return CreateMulOptions(_fbb, _o, _rehasher); } -inline flatbuffers::Offset CreateMulOptions(flatbuffers::FlatBufferBuilder &_fbb, const MulOptionsT *_o, const flatbuffers::rehasher_function_t *_rehasher) { +inline ::flatbuffers::Offset CreateMulOptions(::flatbuffers::FlatBufferBuilder &_fbb, const MulOptionsT *_o, const ::flatbuffers::rehasher_function_t *_rehasher) { (void)_rehasher; (void)_o; - struct _VectorArgs { flatbuffers::FlatBufferBuilder *__fbb; const MulOptionsT* __o; const flatbuffers::rehasher_function_t *__rehasher; } _va = { &_fbb, _o, _rehasher}; (void)_va; + struct _VectorArgs { ::flatbuffers::FlatBufferBuilder *__fbb; const MulOptionsT* __o; const ::flatbuffers::rehasher_function_t *__rehasher; } _va = { &_fbb, _o, _rehasher}; (void)_va; auto _fused_activation_function = _o->fused_activation_function; return tflite::CreateMulOptions( _fbb, _fused_activation_function); } -inline L2NormOptionsT *L2NormOptions::UnPack(const flatbuffers::resolver_function_t *_resolver) const { +inline L2NormOptionsT *L2NormOptions::UnPack(const ::flatbuffers::resolver_function_t *_resolver) const { auto _o = std::unique_ptr(new L2NormOptionsT()); UnPackTo(_o.get(), _resolver); return _o.release(); } -inline void L2NormOptions::UnPackTo(L2NormOptionsT *_o, const flatbuffers::resolver_function_t *_resolver) const { +inline void L2NormOptions::UnPackTo(L2NormOptionsT *_o, const ::flatbuffers::resolver_function_t *_resolver) const { (void)_o; (void)_resolver; { auto _e = fused_activation_function(); _o->fused_activation_function = _e; } } -inline flatbuffers::Offset L2NormOptions::Pack(flatbuffers::FlatBufferBuilder &_fbb, const L2NormOptionsT* _o, const flatbuffers::rehasher_function_t *_rehasher) { +inline ::flatbuffers::Offset L2NormOptions::Pack(::flatbuffers::FlatBufferBuilder &_fbb, const L2NormOptionsT* _o, const ::flatbuffers::rehasher_function_t *_rehasher) { return CreateL2NormOptions(_fbb, _o, _rehasher); } -inline flatbuffers::Offset CreateL2NormOptions(flatbuffers::FlatBufferBuilder &_fbb, const L2NormOptionsT *_o, const flatbuffers::rehasher_function_t *_rehasher) { +inline ::flatbuffers::Offset CreateL2NormOptions(::flatbuffers::FlatBufferBuilder &_fbb, const L2NormOptionsT *_o, const ::flatbuffers::rehasher_function_t *_rehasher) { (void)_rehasher; (void)_o; - struct _VectorArgs { flatbuffers::FlatBufferBuilder *__fbb; const L2NormOptionsT* __o; const flatbuffers::rehasher_function_t *__rehasher; } _va = { &_fbb, _o, _rehasher}; (void)_va; + struct _VectorArgs { ::flatbuffers::FlatBufferBuilder *__fbb; const L2NormOptionsT* __o; const ::flatbuffers::rehasher_function_t *__rehasher; } _va = { &_fbb, _o, _rehasher}; (void)_va; auto _fused_activation_function = _o->fused_activation_function; return tflite::CreateL2NormOptions( _fbb, _fused_activation_function); } -inline LocalResponseNormalizationOptionsT *LocalResponseNormalizationOptions::UnPack(const flatbuffers::resolver_function_t *_resolver) const { +inline LocalResponseNormalizationOptionsT *LocalResponseNormalizationOptions::UnPack(const ::flatbuffers::resolver_function_t *_resolver) const { auto _o = std::unique_ptr(new LocalResponseNormalizationOptionsT()); UnPackTo(_o.get(), _resolver); return _o.release(); } -inline void LocalResponseNormalizationOptions::UnPackTo(LocalResponseNormalizationOptionsT *_o, const flatbuffers::resolver_function_t *_resolver) const { +inline void LocalResponseNormalizationOptions::UnPackTo(LocalResponseNormalizationOptionsT *_o, const ::flatbuffers::resolver_function_t *_resolver) const { (void)_o; (void)_resolver; { auto _e = radius(); _o->radius = _e; } @@ -14494,14 +18169,14 @@ inline void LocalResponseNormalizationOptions::UnPackTo(LocalResponseNormalizati { auto _e = beta(); _o->beta = _e; } } -inline flatbuffers::Offset LocalResponseNormalizationOptions::Pack(flatbuffers::FlatBufferBuilder &_fbb, const LocalResponseNormalizationOptionsT* _o, const flatbuffers::rehasher_function_t *_rehasher) { +inline ::flatbuffers::Offset LocalResponseNormalizationOptions::Pack(::flatbuffers::FlatBufferBuilder &_fbb, const LocalResponseNormalizationOptionsT* _o, const ::flatbuffers::rehasher_function_t *_rehasher) { return CreateLocalResponseNormalizationOptions(_fbb, _o, _rehasher); } -inline flatbuffers::Offset CreateLocalResponseNormalizationOptions(flatbuffers::FlatBufferBuilder &_fbb, const LocalResponseNormalizationOptionsT *_o, const flatbuffers::rehasher_function_t *_rehasher) { +inline ::flatbuffers::Offset CreateLocalResponseNormalizationOptions(::flatbuffers::FlatBufferBuilder &_fbb, const LocalResponseNormalizationOptionsT *_o, const ::flatbuffers::rehasher_function_t *_rehasher) { (void)_rehasher; (void)_o; - struct _VectorArgs { flatbuffers::FlatBufferBuilder *__fbb; const LocalResponseNormalizationOptionsT* __o; const flatbuffers::rehasher_function_t *__rehasher; } _va = { &_fbb, _o, _rehasher}; (void)_va; + struct _VectorArgs { ::flatbuffers::FlatBufferBuilder *__fbb; const LocalResponseNormalizationOptionsT* __o; const ::flatbuffers::rehasher_function_t *__rehasher; } _va = { &_fbb, _o, _rehasher}; (void)_va; auto _radius = _o->radius; auto _bias = _o->bias; auto _alpha = _o->alpha; @@ -14514,13 +18189,13 @@ inline flatbuffers::Offset CreateLocalRespons _beta); } -inline LSTMOptionsT *LSTMOptions::UnPack(const flatbuffers::resolver_function_t *_resolver) const { +inline LSTMOptionsT *LSTMOptions::UnPack(const ::flatbuffers::resolver_function_t *_resolver) const { auto _o = std::unique_ptr(new LSTMOptionsT()); UnPackTo(_o.get(), _resolver); return _o.release(); } -inline void LSTMOptions::UnPackTo(LSTMOptionsT *_o, const flatbuffers::resolver_function_t *_resolver) const { +inline void LSTMOptions::UnPackTo(LSTMOptionsT *_o, const ::flatbuffers::resolver_function_t *_resolver) const { (void)_o; (void)_resolver; { auto _e = fused_activation_function(); _o->fused_activation_function = _e; } @@ -14530,14 +18205,14 @@ inline void LSTMOptions::UnPackTo(LSTMOptionsT *_o, const flatbuffers::resolver_ { auto _e = asymmetric_quantize_inputs(); _o->asymmetric_quantize_inputs = _e; } } -inline flatbuffers::Offset LSTMOptions::Pack(flatbuffers::FlatBufferBuilder &_fbb, const LSTMOptionsT* _o, const flatbuffers::rehasher_function_t *_rehasher) { +inline ::flatbuffers::Offset LSTMOptions::Pack(::flatbuffers::FlatBufferBuilder &_fbb, const LSTMOptionsT* _o, const ::flatbuffers::rehasher_function_t *_rehasher) { return CreateLSTMOptions(_fbb, _o, _rehasher); } -inline flatbuffers::Offset CreateLSTMOptions(flatbuffers::FlatBufferBuilder &_fbb, const LSTMOptionsT *_o, const flatbuffers::rehasher_function_t *_rehasher) { +inline ::flatbuffers::Offset CreateLSTMOptions(::flatbuffers::FlatBufferBuilder &_fbb, const LSTMOptionsT *_o, const ::flatbuffers::rehasher_function_t *_rehasher) { (void)_rehasher; (void)_o; - struct _VectorArgs { flatbuffers::FlatBufferBuilder *__fbb; const LSTMOptionsT* __o; const flatbuffers::rehasher_function_t *__rehasher; } _va = { &_fbb, _o, _rehasher}; (void)_va; + struct _VectorArgs { ::flatbuffers::FlatBufferBuilder *__fbb; const LSTMOptionsT* __o; const ::flatbuffers::rehasher_function_t *__rehasher; } _va = { &_fbb, _o, _rehasher}; (void)_va; auto _fused_activation_function = _o->fused_activation_function; auto _cell_clip = _o->cell_clip; auto _proj_clip = _o->proj_clip; @@ -14552,13 +18227,13 @@ inline flatbuffers::Offset CreateLSTMOptions(flatbuffers::FlatBuffe _asymmetric_quantize_inputs); } -inline UnidirectionalSequenceLSTMOptionsT *UnidirectionalSequenceLSTMOptions::UnPack(const flatbuffers::resolver_function_t *_resolver) const { +inline UnidirectionalSequenceLSTMOptionsT *UnidirectionalSequenceLSTMOptions::UnPack(const ::flatbuffers::resolver_function_t *_resolver) const { auto _o = std::unique_ptr(new UnidirectionalSequenceLSTMOptionsT()); UnPackTo(_o.get(), _resolver); return _o.release(); } -inline void UnidirectionalSequenceLSTMOptions::UnPackTo(UnidirectionalSequenceLSTMOptionsT *_o, const flatbuffers::resolver_function_t *_resolver) const { +inline void UnidirectionalSequenceLSTMOptions::UnPackTo(UnidirectionalSequenceLSTMOptionsT *_o, const ::flatbuffers::resolver_function_t *_resolver) const { (void)_o; (void)_resolver; { auto _e = fused_activation_function(); _o->fused_activation_function = _e; } @@ -14569,14 +18244,14 @@ inline void UnidirectionalSequenceLSTMOptions::UnPackTo(UnidirectionalSequenceLS { auto _e = diagonal_recurrent_tensors(); _o->diagonal_recurrent_tensors = _e; } } -inline flatbuffers::Offset UnidirectionalSequenceLSTMOptions::Pack(flatbuffers::FlatBufferBuilder &_fbb, const UnidirectionalSequenceLSTMOptionsT* _o, const flatbuffers::rehasher_function_t *_rehasher) { +inline ::flatbuffers::Offset UnidirectionalSequenceLSTMOptions::Pack(::flatbuffers::FlatBufferBuilder &_fbb, const UnidirectionalSequenceLSTMOptionsT* _o, const ::flatbuffers::rehasher_function_t *_rehasher) { return CreateUnidirectionalSequenceLSTMOptions(_fbb, _o, _rehasher); } -inline flatbuffers::Offset CreateUnidirectionalSequenceLSTMOptions(flatbuffers::FlatBufferBuilder &_fbb, const UnidirectionalSequenceLSTMOptionsT *_o, const flatbuffers::rehasher_function_t *_rehasher) { +inline ::flatbuffers::Offset CreateUnidirectionalSequenceLSTMOptions(::flatbuffers::FlatBufferBuilder &_fbb, const UnidirectionalSequenceLSTMOptionsT *_o, const ::flatbuffers::rehasher_function_t *_rehasher) { (void)_rehasher; (void)_o; - struct _VectorArgs { flatbuffers::FlatBufferBuilder *__fbb; const UnidirectionalSequenceLSTMOptionsT* __o; const flatbuffers::rehasher_function_t *__rehasher; } _va = { &_fbb, _o, _rehasher}; (void)_va; + struct _VectorArgs { ::flatbuffers::FlatBufferBuilder *__fbb; const UnidirectionalSequenceLSTMOptionsT* __o; const ::flatbuffers::rehasher_function_t *__rehasher; } _va = { &_fbb, _o, _rehasher}; (void)_va; auto _fused_activation_function = _o->fused_activation_function; auto _cell_clip = _o->cell_clip; auto _proj_clip = _o->proj_clip; @@ -14593,13 +18268,13 @@ inline flatbuffers::Offset CreateUnidirection _diagonal_recurrent_tensors); } -inline BidirectionalSequenceLSTMOptionsT *BidirectionalSequenceLSTMOptions::UnPack(const flatbuffers::resolver_function_t *_resolver) const { +inline BidirectionalSequenceLSTMOptionsT *BidirectionalSequenceLSTMOptions::UnPack(const ::flatbuffers::resolver_function_t *_resolver) const { auto _o = std::unique_ptr(new BidirectionalSequenceLSTMOptionsT()); UnPackTo(_o.get(), _resolver); return _o.release(); } -inline void BidirectionalSequenceLSTMOptions::UnPackTo(BidirectionalSequenceLSTMOptionsT *_o, const flatbuffers::resolver_function_t *_resolver) const { +inline void BidirectionalSequenceLSTMOptions::UnPackTo(BidirectionalSequenceLSTMOptionsT *_o, const ::flatbuffers::resolver_function_t *_resolver) const { (void)_o; (void)_resolver; { auto _e = fused_activation_function(); _o->fused_activation_function = _e; } @@ -14610,14 +18285,14 @@ inline void BidirectionalSequenceLSTMOptions::UnPackTo(BidirectionalSequenceLSTM { auto _e = asymmetric_quantize_inputs(); _o->asymmetric_quantize_inputs = _e; } } -inline flatbuffers::Offset BidirectionalSequenceLSTMOptions::Pack(flatbuffers::FlatBufferBuilder &_fbb, const BidirectionalSequenceLSTMOptionsT* _o, const flatbuffers::rehasher_function_t *_rehasher) { +inline ::flatbuffers::Offset BidirectionalSequenceLSTMOptions::Pack(::flatbuffers::FlatBufferBuilder &_fbb, const BidirectionalSequenceLSTMOptionsT* _o, const ::flatbuffers::rehasher_function_t *_rehasher) { return CreateBidirectionalSequenceLSTMOptions(_fbb, _o, _rehasher); } -inline flatbuffers::Offset CreateBidirectionalSequenceLSTMOptions(flatbuffers::FlatBufferBuilder &_fbb, const BidirectionalSequenceLSTMOptionsT *_o, const flatbuffers::rehasher_function_t *_rehasher) { +inline ::flatbuffers::Offset CreateBidirectionalSequenceLSTMOptions(::flatbuffers::FlatBufferBuilder &_fbb, const BidirectionalSequenceLSTMOptionsT *_o, const ::flatbuffers::rehasher_function_t *_rehasher) { (void)_rehasher; (void)_o; - struct _VectorArgs { flatbuffers::FlatBufferBuilder *__fbb; const BidirectionalSequenceLSTMOptionsT* __o; const flatbuffers::rehasher_function_t *__rehasher; } _va = { &_fbb, _o, _rehasher}; (void)_va; + struct _VectorArgs { ::flatbuffers::FlatBufferBuilder *__fbb; const BidirectionalSequenceLSTMOptionsT* __o; const ::flatbuffers::rehasher_function_t *__rehasher; } _va = { &_fbb, _o, _rehasher}; (void)_va; auto _fused_activation_function = _o->fused_activation_function; auto _cell_clip = _o->cell_clip; auto _proj_clip = _o->proj_clip; @@ -14634,27 +18309,27 @@ inline flatbuffers::Offset CreateBidirectional _asymmetric_quantize_inputs); } -inline ResizeBilinearOptionsT *ResizeBilinearOptions::UnPack(const flatbuffers::resolver_function_t *_resolver) const { +inline ResizeBilinearOptionsT *ResizeBilinearOptions::UnPack(const ::flatbuffers::resolver_function_t *_resolver) const { auto _o = std::unique_ptr(new ResizeBilinearOptionsT()); UnPackTo(_o.get(), _resolver); return _o.release(); } -inline void ResizeBilinearOptions::UnPackTo(ResizeBilinearOptionsT *_o, const flatbuffers::resolver_function_t *_resolver) const { +inline void ResizeBilinearOptions::UnPackTo(ResizeBilinearOptionsT *_o, const ::flatbuffers::resolver_function_t *_resolver) const { (void)_o; (void)_resolver; { auto _e = align_corners(); _o->align_corners = _e; } { auto _e = half_pixel_centers(); _o->half_pixel_centers = _e; } } -inline flatbuffers::Offset ResizeBilinearOptions::Pack(flatbuffers::FlatBufferBuilder &_fbb, const ResizeBilinearOptionsT* _o, const flatbuffers::rehasher_function_t *_rehasher) { +inline ::flatbuffers::Offset ResizeBilinearOptions::Pack(::flatbuffers::FlatBufferBuilder &_fbb, const ResizeBilinearOptionsT* _o, const ::flatbuffers::rehasher_function_t *_rehasher) { return CreateResizeBilinearOptions(_fbb, _o, _rehasher); } -inline flatbuffers::Offset CreateResizeBilinearOptions(flatbuffers::FlatBufferBuilder &_fbb, const ResizeBilinearOptionsT *_o, const flatbuffers::rehasher_function_t *_rehasher) { +inline ::flatbuffers::Offset CreateResizeBilinearOptions(::flatbuffers::FlatBufferBuilder &_fbb, const ResizeBilinearOptionsT *_o, const ::flatbuffers::rehasher_function_t *_rehasher) { (void)_rehasher; (void)_o; - struct _VectorArgs { flatbuffers::FlatBufferBuilder *__fbb; const ResizeBilinearOptionsT* __o; const flatbuffers::rehasher_function_t *__rehasher; } _va = { &_fbb, _o, _rehasher}; (void)_va; + struct _VectorArgs { ::flatbuffers::FlatBufferBuilder *__fbb; const ResizeBilinearOptionsT* __o; const ::flatbuffers::rehasher_function_t *__rehasher; } _va = { &_fbb, _o, _rehasher}; (void)_va; auto _align_corners = _o->align_corners; auto _half_pixel_centers = _o->half_pixel_centers; return tflite::CreateResizeBilinearOptions( @@ -14663,27 +18338,27 @@ inline flatbuffers::Offset CreateResizeBilinearOptions(fl _half_pixel_centers); } -inline ResizeNearestNeighborOptionsT *ResizeNearestNeighborOptions::UnPack(const flatbuffers::resolver_function_t *_resolver) const { +inline ResizeNearestNeighborOptionsT *ResizeNearestNeighborOptions::UnPack(const ::flatbuffers::resolver_function_t *_resolver) const { auto _o = std::unique_ptr(new ResizeNearestNeighborOptionsT()); UnPackTo(_o.get(), _resolver); return _o.release(); } -inline void ResizeNearestNeighborOptions::UnPackTo(ResizeNearestNeighborOptionsT *_o, const flatbuffers::resolver_function_t *_resolver) const { +inline void ResizeNearestNeighborOptions::UnPackTo(ResizeNearestNeighborOptionsT *_o, const ::flatbuffers::resolver_function_t *_resolver) const { (void)_o; (void)_resolver; { auto _e = align_corners(); _o->align_corners = _e; } { auto _e = half_pixel_centers(); _o->half_pixel_centers = _e; } } -inline flatbuffers::Offset ResizeNearestNeighborOptions::Pack(flatbuffers::FlatBufferBuilder &_fbb, const ResizeNearestNeighborOptionsT* _o, const flatbuffers::rehasher_function_t *_rehasher) { +inline ::flatbuffers::Offset ResizeNearestNeighborOptions::Pack(::flatbuffers::FlatBufferBuilder &_fbb, const ResizeNearestNeighborOptionsT* _o, const ::flatbuffers::rehasher_function_t *_rehasher) { return CreateResizeNearestNeighborOptions(_fbb, _o, _rehasher); } -inline flatbuffers::Offset CreateResizeNearestNeighborOptions(flatbuffers::FlatBufferBuilder &_fbb, const ResizeNearestNeighborOptionsT *_o, const flatbuffers::rehasher_function_t *_rehasher) { +inline ::flatbuffers::Offset CreateResizeNearestNeighborOptions(::flatbuffers::FlatBufferBuilder &_fbb, const ResizeNearestNeighborOptionsT *_o, const ::flatbuffers::rehasher_function_t *_rehasher) { (void)_rehasher; (void)_o; - struct _VectorArgs { flatbuffers::FlatBufferBuilder *__fbb; const ResizeNearestNeighborOptionsT* __o; const flatbuffers::rehasher_function_t *__rehasher; } _va = { &_fbb, _o, _rehasher}; (void)_va; + struct _VectorArgs { ::flatbuffers::FlatBufferBuilder *__fbb; const ResizeNearestNeighborOptionsT* __o; const ::flatbuffers::rehasher_function_t *__rehasher; } _va = { &_fbb, _o, _rehasher}; (void)_va; auto _align_corners = _o->align_corners; auto _half_pixel_centers = _o->half_pixel_centers; return tflite::CreateResizeNearestNeighborOptions( @@ -14692,157 +18367,157 @@ inline flatbuffers::Offset CreateResizeNearestNeig _half_pixel_centers); } -inline CallOptionsT *CallOptions::UnPack(const flatbuffers::resolver_function_t *_resolver) const { +inline CallOptionsT *CallOptions::UnPack(const ::flatbuffers::resolver_function_t *_resolver) const { auto _o = std::unique_ptr(new CallOptionsT()); UnPackTo(_o.get(), _resolver); return _o.release(); } -inline void CallOptions::UnPackTo(CallOptionsT *_o, const flatbuffers::resolver_function_t *_resolver) const { +inline void CallOptions::UnPackTo(CallOptionsT *_o, const ::flatbuffers::resolver_function_t *_resolver) const { (void)_o; (void)_resolver; { auto _e = subgraph(); _o->subgraph = _e; } } -inline flatbuffers::Offset CallOptions::Pack(flatbuffers::FlatBufferBuilder &_fbb, const CallOptionsT* _o, const flatbuffers::rehasher_function_t *_rehasher) { +inline ::flatbuffers::Offset CallOptions::Pack(::flatbuffers::FlatBufferBuilder &_fbb, const CallOptionsT* _o, const ::flatbuffers::rehasher_function_t *_rehasher) { return CreateCallOptions(_fbb, _o, _rehasher); } -inline flatbuffers::Offset CreateCallOptions(flatbuffers::FlatBufferBuilder &_fbb, const CallOptionsT *_o, const flatbuffers::rehasher_function_t *_rehasher) { +inline ::flatbuffers::Offset CreateCallOptions(::flatbuffers::FlatBufferBuilder &_fbb, const CallOptionsT *_o, const ::flatbuffers::rehasher_function_t *_rehasher) { (void)_rehasher; (void)_o; - struct _VectorArgs { flatbuffers::FlatBufferBuilder *__fbb; const CallOptionsT* __o; const flatbuffers::rehasher_function_t *__rehasher; } _va = { &_fbb, _o, _rehasher}; (void)_va; + struct _VectorArgs { ::flatbuffers::FlatBufferBuilder *__fbb; const CallOptionsT* __o; const ::flatbuffers::rehasher_function_t *__rehasher; } _va = { &_fbb, _o, _rehasher}; (void)_va; auto _subgraph = _o->subgraph; return tflite::CreateCallOptions( _fbb, _subgraph); } -inline PadOptionsT *PadOptions::UnPack(const flatbuffers::resolver_function_t *_resolver) const { +inline PadOptionsT *PadOptions::UnPack(const ::flatbuffers::resolver_function_t *_resolver) const { auto _o = std::unique_ptr(new PadOptionsT()); UnPackTo(_o.get(), _resolver); return _o.release(); } -inline void PadOptions::UnPackTo(PadOptionsT *_o, const flatbuffers::resolver_function_t *_resolver) const { +inline void PadOptions::UnPackTo(PadOptionsT *_o, const ::flatbuffers::resolver_function_t *_resolver) const { (void)_o; (void)_resolver; } -inline flatbuffers::Offset PadOptions::Pack(flatbuffers::FlatBufferBuilder &_fbb, const PadOptionsT* _o, const flatbuffers::rehasher_function_t *_rehasher) { +inline ::flatbuffers::Offset PadOptions::Pack(::flatbuffers::FlatBufferBuilder &_fbb, const PadOptionsT* _o, const ::flatbuffers::rehasher_function_t *_rehasher) { return CreatePadOptions(_fbb, _o, _rehasher); } -inline flatbuffers::Offset CreatePadOptions(flatbuffers::FlatBufferBuilder &_fbb, const PadOptionsT *_o, const flatbuffers::rehasher_function_t *_rehasher) { +inline ::flatbuffers::Offset CreatePadOptions(::flatbuffers::FlatBufferBuilder &_fbb, const PadOptionsT *_o, const ::flatbuffers::rehasher_function_t *_rehasher) { (void)_rehasher; (void)_o; - struct _VectorArgs { flatbuffers::FlatBufferBuilder *__fbb; const PadOptionsT* __o; const flatbuffers::rehasher_function_t *__rehasher; } _va = { &_fbb, _o, _rehasher}; (void)_va; + struct _VectorArgs { ::flatbuffers::FlatBufferBuilder *__fbb; const PadOptionsT* __o; const ::flatbuffers::rehasher_function_t *__rehasher; } _va = { &_fbb, _o, _rehasher}; (void)_va; return tflite::CreatePadOptions( _fbb); } -inline PadV2OptionsT *PadV2Options::UnPack(const flatbuffers::resolver_function_t *_resolver) const { +inline PadV2OptionsT *PadV2Options::UnPack(const ::flatbuffers::resolver_function_t *_resolver) const { auto _o = std::unique_ptr(new PadV2OptionsT()); UnPackTo(_o.get(), _resolver); return _o.release(); } -inline void PadV2Options::UnPackTo(PadV2OptionsT *_o, const flatbuffers::resolver_function_t *_resolver) const { +inline void PadV2Options::UnPackTo(PadV2OptionsT *_o, const ::flatbuffers::resolver_function_t *_resolver) const { (void)_o; (void)_resolver; } -inline flatbuffers::Offset PadV2Options::Pack(flatbuffers::FlatBufferBuilder &_fbb, const PadV2OptionsT* _o, const flatbuffers::rehasher_function_t *_rehasher) { +inline ::flatbuffers::Offset PadV2Options::Pack(::flatbuffers::FlatBufferBuilder &_fbb, const PadV2OptionsT* _o, const ::flatbuffers::rehasher_function_t *_rehasher) { return CreatePadV2Options(_fbb, _o, _rehasher); } -inline flatbuffers::Offset CreatePadV2Options(flatbuffers::FlatBufferBuilder &_fbb, const PadV2OptionsT *_o, const flatbuffers::rehasher_function_t *_rehasher) { +inline ::flatbuffers::Offset CreatePadV2Options(::flatbuffers::FlatBufferBuilder &_fbb, const PadV2OptionsT *_o, const ::flatbuffers::rehasher_function_t *_rehasher) { (void)_rehasher; (void)_o; - struct _VectorArgs { flatbuffers::FlatBufferBuilder *__fbb; const PadV2OptionsT* __o; const flatbuffers::rehasher_function_t *__rehasher; } _va = { &_fbb, _o, _rehasher}; (void)_va; + struct _VectorArgs { ::flatbuffers::FlatBufferBuilder *__fbb; const PadV2OptionsT* __o; const ::flatbuffers::rehasher_function_t *__rehasher; } _va = { &_fbb, _o, _rehasher}; (void)_va; return tflite::CreatePadV2Options( _fbb); } -inline ReshapeOptionsT *ReshapeOptions::UnPack(const flatbuffers::resolver_function_t *_resolver) const { +inline ReshapeOptionsT *ReshapeOptions::UnPack(const ::flatbuffers::resolver_function_t *_resolver) const { auto _o = std::unique_ptr(new ReshapeOptionsT()); UnPackTo(_o.get(), _resolver); return _o.release(); } -inline void ReshapeOptions::UnPackTo(ReshapeOptionsT *_o, const flatbuffers::resolver_function_t *_resolver) const { +inline void ReshapeOptions::UnPackTo(ReshapeOptionsT *_o, const ::flatbuffers::resolver_function_t *_resolver) const { (void)_o; (void)_resolver; - { auto _e = new_shape(); if (_e) { _o->new_shape.resize(_e->size()); for (flatbuffers::uoffset_t _i = 0; _i < _e->size(); _i++) { _o->new_shape[_i] = _e->Get(_i); } } } + { auto _e = new_shape(); if (_e) { _o->new_shape.resize(_e->size()); for (::flatbuffers::uoffset_t _i = 0; _i < _e->size(); _i++) { _o->new_shape[_i] = _e->Get(_i); } } else { _o->new_shape.resize(0); } } } -inline flatbuffers::Offset ReshapeOptions::Pack(flatbuffers::FlatBufferBuilder &_fbb, const ReshapeOptionsT* _o, const flatbuffers::rehasher_function_t *_rehasher) { +inline ::flatbuffers::Offset ReshapeOptions::Pack(::flatbuffers::FlatBufferBuilder &_fbb, const ReshapeOptionsT* _o, const ::flatbuffers::rehasher_function_t *_rehasher) { return CreateReshapeOptions(_fbb, _o, _rehasher); } -inline flatbuffers::Offset CreateReshapeOptions(flatbuffers::FlatBufferBuilder &_fbb, const ReshapeOptionsT *_o, const flatbuffers::rehasher_function_t *_rehasher) { +inline ::flatbuffers::Offset CreateReshapeOptions(::flatbuffers::FlatBufferBuilder &_fbb, const ReshapeOptionsT *_o, const ::flatbuffers::rehasher_function_t *_rehasher) { (void)_rehasher; (void)_o; - struct _VectorArgs { flatbuffers::FlatBufferBuilder *__fbb; const ReshapeOptionsT* __o; const flatbuffers::rehasher_function_t *__rehasher; } _va = { &_fbb, _o, _rehasher}; (void)_va; + struct _VectorArgs { ::flatbuffers::FlatBufferBuilder *__fbb; const ReshapeOptionsT* __o; const ::flatbuffers::rehasher_function_t *__rehasher; } _va = { &_fbb, _o, _rehasher}; (void)_va; auto _new_shape = _o->new_shape.size() ? _fbb.CreateVector(_o->new_shape) : 0; return tflite::CreateReshapeOptions( _fbb, _new_shape); } -inline SpaceToBatchNDOptionsT *SpaceToBatchNDOptions::UnPack(const flatbuffers::resolver_function_t *_resolver) const { +inline SpaceToBatchNDOptionsT *SpaceToBatchNDOptions::UnPack(const ::flatbuffers::resolver_function_t *_resolver) const { auto _o = std::unique_ptr(new SpaceToBatchNDOptionsT()); UnPackTo(_o.get(), _resolver); return _o.release(); } -inline void SpaceToBatchNDOptions::UnPackTo(SpaceToBatchNDOptionsT *_o, const flatbuffers::resolver_function_t *_resolver) const { +inline void SpaceToBatchNDOptions::UnPackTo(SpaceToBatchNDOptionsT *_o, const ::flatbuffers::resolver_function_t *_resolver) const { (void)_o; (void)_resolver; } -inline flatbuffers::Offset SpaceToBatchNDOptions::Pack(flatbuffers::FlatBufferBuilder &_fbb, const SpaceToBatchNDOptionsT* _o, const flatbuffers::rehasher_function_t *_rehasher) { +inline ::flatbuffers::Offset SpaceToBatchNDOptions::Pack(::flatbuffers::FlatBufferBuilder &_fbb, const SpaceToBatchNDOptionsT* _o, const ::flatbuffers::rehasher_function_t *_rehasher) { return CreateSpaceToBatchNDOptions(_fbb, _o, _rehasher); } -inline flatbuffers::Offset CreateSpaceToBatchNDOptions(flatbuffers::FlatBufferBuilder &_fbb, const SpaceToBatchNDOptionsT *_o, const flatbuffers::rehasher_function_t *_rehasher) { +inline ::flatbuffers::Offset CreateSpaceToBatchNDOptions(::flatbuffers::FlatBufferBuilder &_fbb, const SpaceToBatchNDOptionsT *_o, const ::flatbuffers::rehasher_function_t *_rehasher) { (void)_rehasher; (void)_o; - struct _VectorArgs { flatbuffers::FlatBufferBuilder *__fbb; const SpaceToBatchNDOptionsT* __o; const flatbuffers::rehasher_function_t *__rehasher; } _va = { &_fbb, _o, _rehasher}; (void)_va; + struct _VectorArgs { ::flatbuffers::FlatBufferBuilder *__fbb; const SpaceToBatchNDOptionsT* __o; const ::flatbuffers::rehasher_function_t *__rehasher; } _va = { &_fbb, _o, _rehasher}; (void)_va; return tflite::CreateSpaceToBatchNDOptions( _fbb); } -inline BatchToSpaceNDOptionsT *BatchToSpaceNDOptions::UnPack(const flatbuffers::resolver_function_t *_resolver) const { +inline BatchToSpaceNDOptionsT *BatchToSpaceNDOptions::UnPack(const ::flatbuffers::resolver_function_t *_resolver) const { auto _o = std::unique_ptr(new BatchToSpaceNDOptionsT()); UnPackTo(_o.get(), _resolver); return _o.release(); } -inline void BatchToSpaceNDOptions::UnPackTo(BatchToSpaceNDOptionsT *_o, const flatbuffers::resolver_function_t *_resolver) const { +inline void BatchToSpaceNDOptions::UnPackTo(BatchToSpaceNDOptionsT *_o, const ::flatbuffers::resolver_function_t *_resolver) const { (void)_o; (void)_resolver; } -inline flatbuffers::Offset BatchToSpaceNDOptions::Pack(flatbuffers::FlatBufferBuilder &_fbb, const BatchToSpaceNDOptionsT* _o, const flatbuffers::rehasher_function_t *_rehasher) { +inline ::flatbuffers::Offset BatchToSpaceNDOptions::Pack(::flatbuffers::FlatBufferBuilder &_fbb, const BatchToSpaceNDOptionsT* _o, const ::flatbuffers::rehasher_function_t *_rehasher) { return CreateBatchToSpaceNDOptions(_fbb, _o, _rehasher); } -inline flatbuffers::Offset CreateBatchToSpaceNDOptions(flatbuffers::FlatBufferBuilder &_fbb, const BatchToSpaceNDOptionsT *_o, const flatbuffers::rehasher_function_t *_rehasher) { +inline ::flatbuffers::Offset CreateBatchToSpaceNDOptions(::flatbuffers::FlatBufferBuilder &_fbb, const BatchToSpaceNDOptionsT *_o, const ::flatbuffers::rehasher_function_t *_rehasher) { (void)_rehasher; (void)_o; - struct _VectorArgs { flatbuffers::FlatBufferBuilder *__fbb; const BatchToSpaceNDOptionsT* __o; const flatbuffers::rehasher_function_t *__rehasher; } _va = { &_fbb, _o, _rehasher}; (void)_va; + struct _VectorArgs { ::flatbuffers::FlatBufferBuilder *__fbb; const BatchToSpaceNDOptionsT* __o; const ::flatbuffers::rehasher_function_t *__rehasher; } _va = { &_fbb, _o, _rehasher}; (void)_va; return tflite::CreateBatchToSpaceNDOptions( _fbb); } -inline SkipGramOptionsT *SkipGramOptions::UnPack(const flatbuffers::resolver_function_t *_resolver) const { +inline SkipGramOptionsT *SkipGramOptions::UnPack(const ::flatbuffers::resolver_function_t *_resolver) const { auto _o = std::unique_ptr(new SkipGramOptionsT()); UnPackTo(_o.get(), _resolver); return _o.release(); } -inline void SkipGramOptions::UnPackTo(SkipGramOptionsT *_o, const flatbuffers::resolver_function_t *_resolver) const { +inline void SkipGramOptions::UnPackTo(SkipGramOptionsT *_o, const ::flatbuffers::resolver_function_t *_resolver) const { (void)_o; (void)_resolver; { auto _e = ngram_size(); _o->ngram_size = _e; } @@ -14850,14 +18525,14 @@ inline void SkipGramOptions::UnPackTo(SkipGramOptionsT *_o, const flatbuffers::r { auto _e = include_all_ngrams(); _o->include_all_ngrams = _e; } } -inline flatbuffers::Offset SkipGramOptions::Pack(flatbuffers::FlatBufferBuilder &_fbb, const SkipGramOptionsT* _o, const flatbuffers::rehasher_function_t *_rehasher) { +inline ::flatbuffers::Offset SkipGramOptions::Pack(::flatbuffers::FlatBufferBuilder &_fbb, const SkipGramOptionsT* _o, const ::flatbuffers::rehasher_function_t *_rehasher) { return CreateSkipGramOptions(_fbb, _o, _rehasher); } -inline flatbuffers::Offset CreateSkipGramOptions(flatbuffers::FlatBufferBuilder &_fbb, const SkipGramOptionsT *_o, const flatbuffers::rehasher_function_t *_rehasher) { +inline ::flatbuffers::Offset CreateSkipGramOptions(::flatbuffers::FlatBufferBuilder &_fbb, const SkipGramOptionsT *_o, const ::flatbuffers::rehasher_function_t *_rehasher) { (void)_rehasher; (void)_o; - struct _VectorArgs { flatbuffers::FlatBufferBuilder *__fbb; const SkipGramOptionsT* __o; const flatbuffers::rehasher_function_t *__rehasher; } _va = { &_fbb, _o, _rehasher}; (void)_va; + struct _VectorArgs { ::flatbuffers::FlatBufferBuilder *__fbb; const SkipGramOptionsT* __o; const ::flatbuffers::rehasher_function_t *__rehasher; } _va = { &_fbb, _o, _rehasher}; (void)_va; auto _ngram_size = _o->ngram_size; auto _max_skip_size = _o->max_skip_size; auto _include_all_ngrams = _o->include_all_ngrams; @@ -14868,79 +18543,79 @@ inline flatbuffers::Offset CreateSkipGramOptions(flatbuffers::F _include_all_ngrams); } -inline SpaceToDepthOptionsT *SpaceToDepthOptions::UnPack(const flatbuffers::resolver_function_t *_resolver) const { +inline SpaceToDepthOptionsT *SpaceToDepthOptions::UnPack(const ::flatbuffers::resolver_function_t *_resolver) const { auto _o = std::unique_ptr(new SpaceToDepthOptionsT()); UnPackTo(_o.get(), _resolver); return _o.release(); } -inline void SpaceToDepthOptions::UnPackTo(SpaceToDepthOptionsT *_o, const flatbuffers::resolver_function_t *_resolver) const { +inline void SpaceToDepthOptions::UnPackTo(SpaceToDepthOptionsT *_o, const ::flatbuffers::resolver_function_t *_resolver) const { (void)_o; (void)_resolver; { auto _e = block_size(); _o->block_size = _e; } } -inline flatbuffers::Offset SpaceToDepthOptions::Pack(flatbuffers::FlatBufferBuilder &_fbb, const SpaceToDepthOptionsT* _o, const flatbuffers::rehasher_function_t *_rehasher) { +inline ::flatbuffers::Offset SpaceToDepthOptions::Pack(::flatbuffers::FlatBufferBuilder &_fbb, const SpaceToDepthOptionsT* _o, const ::flatbuffers::rehasher_function_t *_rehasher) { return CreateSpaceToDepthOptions(_fbb, _o, _rehasher); } -inline flatbuffers::Offset CreateSpaceToDepthOptions(flatbuffers::FlatBufferBuilder &_fbb, const SpaceToDepthOptionsT *_o, const flatbuffers::rehasher_function_t *_rehasher) { +inline ::flatbuffers::Offset CreateSpaceToDepthOptions(::flatbuffers::FlatBufferBuilder &_fbb, const SpaceToDepthOptionsT *_o, const ::flatbuffers::rehasher_function_t *_rehasher) { (void)_rehasher; (void)_o; - struct _VectorArgs { flatbuffers::FlatBufferBuilder *__fbb; const SpaceToDepthOptionsT* __o; const flatbuffers::rehasher_function_t *__rehasher; } _va = { &_fbb, _o, _rehasher}; (void)_va; + struct _VectorArgs { ::flatbuffers::FlatBufferBuilder *__fbb; const SpaceToDepthOptionsT* __o; const ::flatbuffers::rehasher_function_t *__rehasher; } _va = { &_fbb, _o, _rehasher}; (void)_va; auto _block_size = _o->block_size; return tflite::CreateSpaceToDepthOptions( _fbb, _block_size); } -inline DepthToSpaceOptionsT *DepthToSpaceOptions::UnPack(const flatbuffers::resolver_function_t *_resolver) const { +inline DepthToSpaceOptionsT *DepthToSpaceOptions::UnPack(const ::flatbuffers::resolver_function_t *_resolver) const { auto _o = std::unique_ptr(new DepthToSpaceOptionsT()); UnPackTo(_o.get(), _resolver); return _o.release(); } -inline void DepthToSpaceOptions::UnPackTo(DepthToSpaceOptionsT *_o, const flatbuffers::resolver_function_t *_resolver) const { +inline void DepthToSpaceOptions::UnPackTo(DepthToSpaceOptionsT *_o, const ::flatbuffers::resolver_function_t *_resolver) const { (void)_o; (void)_resolver; { auto _e = block_size(); _o->block_size = _e; } } -inline flatbuffers::Offset DepthToSpaceOptions::Pack(flatbuffers::FlatBufferBuilder &_fbb, const DepthToSpaceOptionsT* _o, const flatbuffers::rehasher_function_t *_rehasher) { +inline ::flatbuffers::Offset DepthToSpaceOptions::Pack(::flatbuffers::FlatBufferBuilder &_fbb, const DepthToSpaceOptionsT* _o, const ::flatbuffers::rehasher_function_t *_rehasher) { return CreateDepthToSpaceOptions(_fbb, _o, _rehasher); } -inline flatbuffers::Offset CreateDepthToSpaceOptions(flatbuffers::FlatBufferBuilder &_fbb, const DepthToSpaceOptionsT *_o, const flatbuffers::rehasher_function_t *_rehasher) { +inline ::flatbuffers::Offset CreateDepthToSpaceOptions(::flatbuffers::FlatBufferBuilder &_fbb, const DepthToSpaceOptionsT *_o, const ::flatbuffers::rehasher_function_t *_rehasher) { (void)_rehasher; (void)_o; - struct _VectorArgs { flatbuffers::FlatBufferBuilder *__fbb; const DepthToSpaceOptionsT* __o; const flatbuffers::rehasher_function_t *__rehasher; } _va = { &_fbb, _o, _rehasher}; (void)_va; + struct _VectorArgs { ::flatbuffers::FlatBufferBuilder *__fbb; const DepthToSpaceOptionsT* __o; const ::flatbuffers::rehasher_function_t *__rehasher; } _va = { &_fbb, _o, _rehasher}; (void)_va; auto _block_size = _o->block_size; return tflite::CreateDepthToSpaceOptions( _fbb, _block_size); } -inline SubOptionsT *SubOptions::UnPack(const flatbuffers::resolver_function_t *_resolver) const { +inline SubOptionsT *SubOptions::UnPack(const ::flatbuffers::resolver_function_t *_resolver) const { auto _o = std::unique_ptr(new SubOptionsT()); UnPackTo(_o.get(), _resolver); return _o.release(); } -inline void SubOptions::UnPackTo(SubOptionsT *_o, const flatbuffers::resolver_function_t *_resolver) const { +inline void SubOptions::UnPackTo(SubOptionsT *_o, const ::flatbuffers::resolver_function_t *_resolver) const { (void)_o; (void)_resolver; { auto _e = fused_activation_function(); _o->fused_activation_function = _e; } { auto _e = pot_scale_int16(); _o->pot_scale_int16 = _e; } } -inline flatbuffers::Offset SubOptions::Pack(flatbuffers::FlatBufferBuilder &_fbb, const SubOptionsT* _o, const flatbuffers::rehasher_function_t *_rehasher) { +inline ::flatbuffers::Offset SubOptions::Pack(::flatbuffers::FlatBufferBuilder &_fbb, const SubOptionsT* _o, const ::flatbuffers::rehasher_function_t *_rehasher) { return CreateSubOptions(_fbb, _o, _rehasher); } -inline flatbuffers::Offset CreateSubOptions(flatbuffers::FlatBufferBuilder &_fbb, const SubOptionsT *_o, const flatbuffers::rehasher_function_t *_rehasher) { +inline ::flatbuffers::Offset CreateSubOptions(::flatbuffers::FlatBufferBuilder &_fbb, const SubOptionsT *_o, const ::flatbuffers::rehasher_function_t *_rehasher) { (void)_rehasher; (void)_o; - struct _VectorArgs { flatbuffers::FlatBufferBuilder *__fbb; const SubOptionsT* __o; const flatbuffers::rehasher_function_t *__rehasher; } _va = { &_fbb, _o, _rehasher}; (void)_va; + struct _VectorArgs { ::flatbuffers::FlatBufferBuilder *__fbb; const SubOptionsT* __o; const ::flatbuffers::rehasher_function_t *__rehasher; } _va = { &_fbb, _o, _rehasher}; (void)_va; auto _fused_activation_function = _o->fused_activation_function; auto _pot_scale_int16 = _o->pot_scale_int16; return tflite::CreateSubOptions( @@ -14949,102 +18624,102 @@ inline flatbuffers::Offset CreateSubOptions(flatbuffers::FlatBufferB _pot_scale_int16); } -inline DivOptionsT *DivOptions::UnPack(const flatbuffers::resolver_function_t *_resolver) const { +inline DivOptionsT *DivOptions::UnPack(const ::flatbuffers::resolver_function_t *_resolver) const { auto _o = std::unique_ptr(new DivOptionsT()); UnPackTo(_o.get(), _resolver); return _o.release(); } -inline void DivOptions::UnPackTo(DivOptionsT *_o, const flatbuffers::resolver_function_t *_resolver) const { +inline void DivOptions::UnPackTo(DivOptionsT *_o, const ::flatbuffers::resolver_function_t *_resolver) const { (void)_o; (void)_resolver; { auto _e = fused_activation_function(); _o->fused_activation_function = _e; } } -inline flatbuffers::Offset DivOptions::Pack(flatbuffers::FlatBufferBuilder &_fbb, const DivOptionsT* _o, const flatbuffers::rehasher_function_t *_rehasher) { +inline ::flatbuffers::Offset DivOptions::Pack(::flatbuffers::FlatBufferBuilder &_fbb, const DivOptionsT* _o, const ::flatbuffers::rehasher_function_t *_rehasher) { return CreateDivOptions(_fbb, _o, _rehasher); } -inline flatbuffers::Offset CreateDivOptions(flatbuffers::FlatBufferBuilder &_fbb, const DivOptionsT *_o, const flatbuffers::rehasher_function_t *_rehasher) { +inline ::flatbuffers::Offset CreateDivOptions(::flatbuffers::FlatBufferBuilder &_fbb, const DivOptionsT *_o, const ::flatbuffers::rehasher_function_t *_rehasher) { (void)_rehasher; (void)_o; - struct _VectorArgs { flatbuffers::FlatBufferBuilder *__fbb; const DivOptionsT* __o; const flatbuffers::rehasher_function_t *__rehasher; } _va = { &_fbb, _o, _rehasher}; (void)_va; + struct _VectorArgs { ::flatbuffers::FlatBufferBuilder *__fbb; const DivOptionsT* __o; const ::flatbuffers::rehasher_function_t *__rehasher; } _va = { &_fbb, _o, _rehasher}; (void)_va; auto _fused_activation_function = _o->fused_activation_function; return tflite::CreateDivOptions( _fbb, _fused_activation_function); } -inline TopKV2OptionsT *TopKV2Options::UnPack(const flatbuffers::resolver_function_t *_resolver) const { +inline TopKV2OptionsT *TopKV2Options::UnPack(const ::flatbuffers::resolver_function_t *_resolver) const { auto _o = std::unique_ptr(new TopKV2OptionsT()); UnPackTo(_o.get(), _resolver); return _o.release(); } -inline void TopKV2Options::UnPackTo(TopKV2OptionsT *_o, const flatbuffers::resolver_function_t *_resolver) const { +inline void TopKV2Options::UnPackTo(TopKV2OptionsT *_o, const ::flatbuffers::resolver_function_t *_resolver) const { (void)_o; (void)_resolver; } -inline flatbuffers::Offset TopKV2Options::Pack(flatbuffers::FlatBufferBuilder &_fbb, const TopKV2OptionsT* _o, const flatbuffers::rehasher_function_t *_rehasher) { +inline ::flatbuffers::Offset TopKV2Options::Pack(::flatbuffers::FlatBufferBuilder &_fbb, const TopKV2OptionsT* _o, const ::flatbuffers::rehasher_function_t *_rehasher) { return CreateTopKV2Options(_fbb, _o, _rehasher); } -inline flatbuffers::Offset CreateTopKV2Options(flatbuffers::FlatBufferBuilder &_fbb, const TopKV2OptionsT *_o, const flatbuffers::rehasher_function_t *_rehasher) { +inline ::flatbuffers::Offset CreateTopKV2Options(::flatbuffers::FlatBufferBuilder &_fbb, const TopKV2OptionsT *_o, const ::flatbuffers::rehasher_function_t *_rehasher) { (void)_rehasher; (void)_o; - struct _VectorArgs { flatbuffers::FlatBufferBuilder *__fbb; const TopKV2OptionsT* __o; const flatbuffers::rehasher_function_t *__rehasher; } _va = { &_fbb, _o, _rehasher}; (void)_va; + struct _VectorArgs { ::flatbuffers::FlatBufferBuilder *__fbb; const TopKV2OptionsT* __o; const ::flatbuffers::rehasher_function_t *__rehasher; } _va = { &_fbb, _o, _rehasher}; (void)_va; return tflite::CreateTopKV2Options( _fbb); } -inline EmbeddingLookupSparseOptionsT *EmbeddingLookupSparseOptions::UnPack(const flatbuffers::resolver_function_t *_resolver) const { +inline EmbeddingLookupSparseOptionsT *EmbeddingLookupSparseOptions::UnPack(const ::flatbuffers::resolver_function_t *_resolver) const { auto _o = std::unique_ptr(new EmbeddingLookupSparseOptionsT()); UnPackTo(_o.get(), _resolver); return _o.release(); } -inline void EmbeddingLookupSparseOptions::UnPackTo(EmbeddingLookupSparseOptionsT *_o, const flatbuffers::resolver_function_t *_resolver) const { +inline void EmbeddingLookupSparseOptions::UnPackTo(EmbeddingLookupSparseOptionsT *_o, const ::flatbuffers::resolver_function_t *_resolver) const { (void)_o; (void)_resolver; { auto _e = combiner(); _o->combiner = _e; } } -inline flatbuffers::Offset EmbeddingLookupSparseOptions::Pack(flatbuffers::FlatBufferBuilder &_fbb, const EmbeddingLookupSparseOptionsT* _o, const flatbuffers::rehasher_function_t *_rehasher) { +inline ::flatbuffers::Offset EmbeddingLookupSparseOptions::Pack(::flatbuffers::FlatBufferBuilder &_fbb, const EmbeddingLookupSparseOptionsT* _o, const ::flatbuffers::rehasher_function_t *_rehasher) { return CreateEmbeddingLookupSparseOptions(_fbb, _o, _rehasher); } -inline flatbuffers::Offset CreateEmbeddingLookupSparseOptions(flatbuffers::FlatBufferBuilder &_fbb, const EmbeddingLookupSparseOptionsT *_o, const flatbuffers::rehasher_function_t *_rehasher) { +inline ::flatbuffers::Offset CreateEmbeddingLookupSparseOptions(::flatbuffers::FlatBufferBuilder &_fbb, const EmbeddingLookupSparseOptionsT *_o, const ::flatbuffers::rehasher_function_t *_rehasher) { (void)_rehasher; (void)_o; - struct _VectorArgs { flatbuffers::FlatBufferBuilder *__fbb; const EmbeddingLookupSparseOptionsT* __o; const flatbuffers::rehasher_function_t *__rehasher; } _va = { &_fbb, _o, _rehasher}; (void)_va; + struct _VectorArgs { ::flatbuffers::FlatBufferBuilder *__fbb; const EmbeddingLookupSparseOptionsT* __o; const ::flatbuffers::rehasher_function_t *__rehasher; } _va = { &_fbb, _o, _rehasher}; (void)_va; auto _combiner = _o->combiner; return tflite::CreateEmbeddingLookupSparseOptions( _fbb, _combiner); } -inline GatherOptionsT *GatherOptions::UnPack(const flatbuffers::resolver_function_t *_resolver) const { +inline GatherOptionsT *GatherOptions::UnPack(const ::flatbuffers::resolver_function_t *_resolver) const { auto _o = std::unique_ptr(new GatherOptionsT()); UnPackTo(_o.get(), _resolver); return _o.release(); } -inline void GatherOptions::UnPackTo(GatherOptionsT *_o, const flatbuffers::resolver_function_t *_resolver) const { +inline void GatherOptions::UnPackTo(GatherOptionsT *_o, const ::flatbuffers::resolver_function_t *_resolver) const { (void)_o; (void)_resolver; { auto _e = axis(); _o->axis = _e; } { auto _e = batch_dims(); _o->batch_dims = _e; } } -inline flatbuffers::Offset GatherOptions::Pack(flatbuffers::FlatBufferBuilder &_fbb, const GatherOptionsT* _o, const flatbuffers::rehasher_function_t *_rehasher) { +inline ::flatbuffers::Offset GatherOptions::Pack(::flatbuffers::FlatBufferBuilder &_fbb, const GatherOptionsT* _o, const ::flatbuffers::rehasher_function_t *_rehasher) { return CreateGatherOptions(_fbb, _o, _rehasher); } -inline flatbuffers::Offset CreateGatherOptions(flatbuffers::FlatBufferBuilder &_fbb, const GatherOptionsT *_o, const flatbuffers::rehasher_function_t *_rehasher) { +inline ::flatbuffers::Offset CreateGatherOptions(::flatbuffers::FlatBufferBuilder &_fbb, const GatherOptionsT *_o, const ::flatbuffers::rehasher_function_t *_rehasher) { (void)_rehasher; (void)_o; - struct _VectorArgs { flatbuffers::FlatBufferBuilder *__fbb; const GatherOptionsT* __o; const flatbuffers::rehasher_function_t *__rehasher; } _va = { &_fbb, _o, _rehasher}; (void)_va; + struct _VectorArgs { ::flatbuffers::FlatBufferBuilder *__fbb; const GatherOptionsT* __o; const ::flatbuffers::rehasher_function_t *__rehasher; } _va = { &_fbb, _o, _rehasher}; (void)_va; auto _axis = _o->axis; auto _batch_dims = _o->batch_dims; return tflite::CreateGatherOptions( @@ -15053,186 +18728,186 @@ inline flatbuffers::Offset CreateGatherOptions(flatbuffers::FlatB _batch_dims); } -inline TransposeOptionsT *TransposeOptions::UnPack(const flatbuffers::resolver_function_t *_resolver) const { +inline TransposeOptionsT *TransposeOptions::UnPack(const ::flatbuffers::resolver_function_t *_resolver) const { auto _o = std::unique_ptr(new TransposeOptionsT()); UnPackTo(_o.get(), _resolver); return _o.release(); } -inline void TransposeOptions::UnPackTo(TransposeOptionsT *_o, const flatbuffers::resolver_function_t *_resolver) const { +inline void TransposeOptions::UnPackTo(TransposeOptionsT *_o, const ::flatbuffers::resolver_function_t *_resolver) const { (void)_o; (void)_resolver; } -inline flatbuffers::Offset TransposeOptions::Pack(flatbuffers::FlatBufferBuilder &_fbb, const TransposeOptionsT* _o, const flatbuffers::rehasher_function_t *_rehasher) { +inline ::flatbuffers::Offset TransposeOptions::Pack(::flatbuffers::FlatBufferBuilder &_fbb, const TransposeOptionsT* _o, const ::flatbuffers::rehasher_function_t *_rehasher) { return CreateTransposeOptions(_fbb, _o, _rehasher); } -inline flatbuffers::Offset CreateTransposeOptions(flatbuffers::FlatBufferBuilder &_fbb, const TransposeOptionsT *_o, const flatbuffers::rehasher_function_t *_rehasher) { +inline ::flatbuffers::Offset CreateTransposeOptions(::flatbuffers::FlatBufferBuilder &_fbb, const TransposeOptionsT *_o, const ::flatbuffers::rehasher_function_t *_rehasher) { (void)_rehasher; (void)_o; - struct _VectorArgs { flatbuffers::FlatBufferBuilder *__fbb; const TransposeOptionsT* __o; const flatbuffers::rehasher_function_t *__rehasher; } _va = { &_fbb, _o, _rehasher}; (void)_va; + struct _VectorArgs { ::flatbuffers::FlatBufferBuilder *__fbb; const TransposeOptionsT* __o; const ::flatbuffers::rehasher_function_t *__rehasher; } _va = { &_fbb, _o, _rehasher}; (void)_va; return tflite::CreateTransposeOptions( _fbb); } -inline ExpOptionsT *ExpOptions::UnPack(const flatbuffers::resolver_function_t *_resolver) const { +inline ExpOptionsT *ExpOptions::UnPack(const ::flatbuffers::resolver_function_t *_resolver) const { auto _o = std::unique_ptr(new ExpOptionsT()); UnPackTo(_o.get(), _resolver); return _o.release(); } -inline void ExpOptions::UnPackTo(ExpOptionsT *_o, const flatbuffers::resolver_function_t *_resolver) const { +inline void ExpOptions::UnPackTo(ExpOptionsT *_o, const ::flatbuffers::resolver_function_t *_resolver) const { (void)_o; (void)_resolver; } -inline flatbuffers::Offset ExpOptions::Pack(flatbuffers::FlatBufferBuilder &_fbb, const ExpOptionsT* _o, const flatbuffers::rehasher_function_t *_rehasher) { +inline ::flatbuffers::Offset ExpOptions::Pack(::flatbuffers::FlatBufferBuilder &_fbb, const ExpOptionsT* _o, const ::flatbuffers::rehasher_function_t *_rehasher) { return CreateExpOptions(_fbb, _o, _rehasher); } -inline flatbuffers::Offset CreateExpOptions(flatbuffers::FlatBufferBuilder &_fbb, const ExpOptionsT *_o, const flatbuffers::rehasher_function_t *_rehasher) { +inline ::flatbuffers::Offset CreateExpOptions(::flatbuffers::FlatBufferBuilder &_fbb, const ExpOptionsT *_o, const ::flatbuffers::rehasher_function_t *_rehasher) { (void)_rehasher; (void)_o; - struct _VectorArgs { flatbuffers::FlatBufferBuilder *__fbb; const ExpOptionsT* __o; const flatbuffers::rehasher_function_t *__rehasher; } _va = { &_fbb, _o, _rehasher}; (void)_va; + struct _VectorArgs { ::flatbuffers::FlatBufferBuilder *__fbb; const ExpOptionsT* __o; const ::flatbuffers::rehasher_function_t *__rehasher; } _va = { &_fbb, _o, _rehasher}; (void)_va; return tflite::CreateExpOptions( _fbb); } -inline CosOptionsT *CosOptions::UnPack(const flatbuffers::resolver_function_t *_resolver) const { +inline CosOptionsT *CosOptions::UnPack(const ::flatbuffers::resolver_function_t *_resolver) const { auto _o = std::unique_ptr(new CosOptionsT()); UnPackTo(_o.get(), _resolver); return _o.release(); } -inline void CosOptions::UnPackTo(CosOptionsT *_o, const flatbuffers::resolver_function_t *_resolver) const { +inline void CosOptions::UnPackTo(CosOptionsT *_o, const ::flatbuffers::resolver_function_t *_resolver) const { (void)_o; (void)_resolver; } -inline flatbuffers::Offset CosOptions::Pack(flatbuffers::FlatBufferBuilder &_fbb, const CosOptionsT* _o, const flatbuffers::rehasher_function_t *_rehasher) { +inline ::flatbuffers::Offset CosOptions::Pack(::flatbuffers::FlatBufferBuilder &_fbb, const CosOptionsT* _o, const ::flatbuffers::rehasher_function_t *_rehasher) { return CreateCosOptions(_fbb, _o, _rehasher); } -inline flatbuffers::Offset CreateCosOptions(flatbuffers::FlatBufferBuilder &_fbb, const CosOptionsT *_o, const flatbuffers::rehasher_function_t *_rehasher) { +inline ::flatbuffers::Offset CreateCosOptions(::flatbuffers::FlatBufferBuilder &_fbb, const CosOptionsT *_o, const ::flatbuffers::rehasher_function_t *_rehasher) { (void)_rehasher; (void)_o; - struct _VectorArgs { flatbuffers::FlatBufferBuilder *__fbb; const CosOptionsT* __o; const flatbuffers::rehasher_function_t *__rehasher; } _va = { &_fbb, _o, _rehasher}; (void)_va; + struct _VectorArgs { ::flatbuffers::FlatBufferBuilder *__fbb; const CosOptionsT* __o; const ::flatbuffers::rehasher_function_t *__rehasher; } _va = { &_fbb, _o, _rehasher}; (void)_va; return tflite::CreateCosOptions( _fbb); } -inline ReducerOptionsT *ReducerOptions::UnPack(const flatbuffers::resolver_function_t *_resolver) const { +inline ReducerOptionsT *ReducerOptions::UnPack(const ::flatbuffers::resolver_function_t *_resolver) const { auto _o = std::unique_ptr(new ReducerOptionsT()); UnPackTo(_o.get(), _resolver); return _o.release(); } -inline void ReducerOptions::UnPackTo(ReducerOptionsT *_o, const flatbuffers::resolver_function_t *_resolver) const { +inline void ReducerOptions::UnPackTo(ReducerOptionsT *_o, const ::flatbuffers::resolver_function_t *_resolver) const { (void)_o; (void)_resolver; { auto _e = keep_dims(); _o->keep_dims = _e; } } -inline flatbuffers::Offset ReducerOptions::Pack(flatbuffers::FlatBufferBuilder &_fbb, const ReducerOptionsT* _o, const flatbuffers::rehasher_function_t *_rehasher) { +inline ::flatbuffers::Offset ReducerOptions::Pack(::flatbuffers::FlatBufferBuilder &_fbb, const ReducerOptionsT* _o, const ::flatbuffers::rehasher_function_t *_rehasher) { return CreateReducerOptions(_fbb, _o, _rehasher); } -inline flatbuffers::Offset CreateReducerOptions(flatbuffers::FlatBufferBuilder &_fbb, const ReducerOptionsT *_o, const flatbuffers::rehasher_function_t *_rehasher) { +inline ::flatbuffers::Offset CreateReducerOptions(::flatbuffers::FlatBufferBuilder &_fbb, const ReducerOptionsT *_o, const ::flatbuffers::rehasher_function_t *_rehasher) { (void)_rehasher; (void)_o; - struct _VectorArgs { flatbuffers::FlatBufferBuilder *__fbb; const ReducerOptionsT* __o; const flatbuffers::rehasher_function_t *__rehasher; } _va = { &_fbb, _o, _rehasher}; (void)_va; + struct _VectorArgs { ::flatbuffers::FlatBufferBuilder *__fbb; const ReducerOptionsT* __o; const ::flatbuffers::rehasher_function_t *__rehasher; } _va = { &_fbb, _o, _rehasher}; (void)_va; auto _keep_dims = _o->keep_dims; return tflite::CreateReducerOptions( _fbb, _keep_dims); } -inline SqueezeOptionsT *SqueezeOptions::UnPack(const flatbuffers::resolver_function_t *_resolver) const { +inline SqueezeOptionsT *SqueezeOptions::UnPack(const ::flatbuffers::resolver_function_t *_resolver) const { auto _o = std::unique_ptr(new SqueezeOptionsT()); UnPackTo(_o.get(), _resolver); return _o.release(); } -inline void SqueezeOptions::UnPackTo(SqueezeOptionsT *_o, const flatbuffers::resolver_function_t *_resolver) const { +inline void SqueezeOptions::UnPackTo(SqueezeOptionsT *_o, const ::flatbuffers::resolver_function_t *_resolver) const { (void)_o; (void)_resolver; - { auto _e = squeeze_dims(); if (_e) { _o->squeeze_dims.resize(_e->size()); for (flatbuffers::uoffset_t _i = 0; _i < _e->size(); _i++) { _o->squeeze_dims[_i] = _e->Get(_i); } } } + { auto _e = squeeze_dims(); if (_e) { _o->squeeze_dims.resize(_e->size()); for (::flatbuffers::uoffset_t _i = 0; _i < _e->size(); _i++) { _o->squeeze_dims[_i] = _e->Get(_i); } } else { _o->squeeze_dims.resize(0); } } } -inline flatbuffers::Offset SqueezeOptions::Pack(flatbuffers::FlatBufferBuilder &_fbb, const SqueezeOptionsT* _o, const flatbuffers::rehasher_function_t *_rehasher) { +inline ::flatbuffers::Offset SqueezeOptions::Pack(::flatbuffers::FlatBufferBuilder &_fbb, const SqueezeOptionsT* _o, const ::flatbuffers::rehasher_function_t *_rehasher) { return CreateSqueezeOptions(_fbb, _o, _rehasher); } -inline flatbuffers::Offset CreateSqueezeOptions(flatbuffers::FlatBufferBuilder &_fbb, const SqueezeOptionsT *_o, const flatbuffers::rehasher_function_t *_rehasher) { +inline ::flatbuffers::Offset CreateSqueezeOptions(::flatbuffers::FlatBufferBuilder &_fbb, const SqueezeOptionsT *_o, const ::flatbuffers::rehasher_function_t *_rehasher) { (void)_rehasher; (void)_o; - struct _VectorArgs { flatbuffers::FlatBufferBuilder *__fbb; const SqueezeOptionsT* __o; const flatbuffers::rehasher_function_t *__rehasher; } _va = { &_fbb, _o, _rehasher}; (void)_va; + struct _VectorArgs { ::flatbuffers::FlatBufferBuilder *__fbb; const SqueezeOptionsT* __o; const ::flatbuffers::rehasher_function_t *__rehasher; } _va = { &_fbb, _o, _rehasher}; (void)_va; auto _squeeze_dims = _o->squeeze_dims.size() ? _fbb.CreateVector(_o->squeeze_dims) : 0; return tflite::CreateSqueezeOptions( _fbb, _squeeze_dims); } -inline SplitOptionsT *SplitOptions::UnPack(const flatbuffers::resolver_function_t *_resolver) const { +inline SplitOptionsT *SplitOptions::UnPack(const ::flatbuffers::resolver_function_t *_resolver) const { auto _o = std::unique_ptr(new SplitOptionsT()); UnPackTo(_o.get(), _resolver); return _o.release(); } -inline void SplitOptions::UnPackTo(SplitOptionsT *_o, const flatbuffers::resolver_function_t *_resolver) const { +inline void SplitOptions::UnPackTo(SplitOptionsT *_o, const ::flatbuffers::resolver_function_t *_resolver) const { (void)_o; (void)_resolver; { auto _e = num_splits(); _o->num_splits = _e; } } -inline flatbuffers::Offset SplitOptions::Pack(flatbuffers::FlatBufferBuilder &_fbb, const SplitOptionsT* _o, const flatbuffers::rehasher_function_t *_rehasher) { +inline ::flatbuffers::Offset SplitOptions::Pack(::flatbuffers::FlatBufferBuilder &_fbb, const SplitOptionsT* _o, const ::flatbuffers::rehasher_function_t *_rehasher) { return CreateSplitOptions(_fbb, _o, _rehasher); } -inline flatbuffers::Offset CreateSplitOptions(flatbuffers::FlatBufferBuilder &_fbb, const SplitOptionsT *_o, const flatbuffers::rehasher_function_t *_rehasher) { +inline ::flatbuffers::Offset CreateSplitOptions(::flatbuffers::FlatBufferBuilder &_fbb, const SplitOptionsT *_o, const ::flatbuffers::rehasher_function_t *_rehasher) { (void)_rehasher; (void)_o; - struct _VectorArgs { flatbuffers::FlatBufferBuilder *__fbb; const SplitOptionsT* __o; const flatbuffers::rehasher_function_t *__rehasher; } _va = { &_fbb, _o, _rehasher}; (void)_va; + struct _VectorArgs { ::flatbuffers::FlatBufferBuilder *__fbb; const SplitOptionsT* __o; const ::flatbuffers::rehasher_function_t *__rehasher; } _va = { &_fbb, _o, _rehasher}; (void)_va; auto _num_splits = _o->num_splits; return tflite::CreateSplitOptions( _fbb, _num_splits); } -inline SplitVOptionsT *SplitVOptions::UnPack(const flatbuffers::resolver_function_t *_resolver) const { +inline SplitVOptionsT *SplitVOptions::UnPack(const ::flatbuffers::resolver_function_t *_resolver) const { auto _o = std::unique_ptr(new SplitVOptionsT()); UnPackTo(_o.get(), _resolver); return _o.release(); } -inline void SplitVOptions::UnPackTo(SplitVOptionsT *_o, const flatbuffers::resolver_function_t *_resolver) const { +inline void SplitVOptions::UnPackTo(SplitVOptionsT *_o, const ::flatbuffers::resolver_function_t *_resolver) const { (void)_o; (void)_resolver; { auto _e = num_splits(); _o->num_splits = _e; } } -inline flatbuffers::Offset SplitVOptions::Pack(flatbuffers::FlatBufferBuilder &_fbb, const SplitVOptionsT* _o, const flatbuffers::rehasher_function_t *_rehasher) { +inline ::flatbuffers::Offset SplitVOptions::Pack(::flatbuffers::FlatBufferBuilder &_fbb, const SplitVOptionsT* _o, const ::flatbuffers::rehasher_function_t *_rehasher) { return CreateSplitVOptions(_fbb, _o, _rehasher); } -inline flatbuffers::Offset CreateSplitVOptions(flatbuffers::FlatBufferBuilder &_fbb, const SplitVOptionsT *_o, const flatbuffers::rehasher_function_t *_rehasher) { +inline ::flatbuffers::Offset CreateSplitVOptions(::flatbuffers::FlatBufferBuilder &_fbb, const SplitVOptionsT *_o, const ::flatbuffers::rehasher_function_t *_rehasher) { (void)_rehasher; (void)_o; - struct _VectorArgs { flatbuffers::FlatBufferBuilder *__fbb; const SplitVOptionsT* __o; const flatbuffers::rehasher_function_t *__rehasher; } _va = { &_fbb, _o, _rehasher}; (void)_va; + struct _VectorArgs { ::flatbuffers::FlatBufferBuilder *__fbb; const SplitVOptionsT* __o; const ::flatbuffers::rehasher_function_t *__rehasher; } _va = { &_fbb, _o, _rehasher}; (void)_va; auto _num_splits = _o->num_splits; return tflite::CreateSplitVOptions( _fbb, _num_splits); } -inline StridedSliceOptionsT *StridedSliceOptions::UnPack(const flatbuffers::resolver_function_t *_resolver) const { +inline StridedSliceOptionsT *StridedSliceOptions::UnPack(const ::flatbuffers::resolver_function_t *_resolver) const { auto _o = std::unique_ptr(new StridedSliceOptionsT()); UnPackTo(_o.get(), _resolver); return _o.release(); } -inline void StridedSliceOptions::UnPackTo(StridedSliceOptionsT *_o, const flatbuffers::resolver_function_t *_resolver) const { +inline void StridedSliceOptions::UnPackTo(StridedSliceOptionsT *_o, const ::flatbuffers::resolver_function_t *_resolver) const { (void)_o; (void)_resolver; { auto _e = begin_mask(); _o->begin_mask = _e; } @@ -15243,14 +18918,14 @@ inline void StridedSliceOptions::UnPackTo(StridedSliceOptionsT *_o, const flatbu { auto _e = offset(); _o->offset = _e; } } -inline flatbuffers::Offset StridedSliceOptions::Pack(flatbuffers::FlatBufferBuilder &_fbb, const StridedSliceOptionsT* _o, const flatbuffers::rehasher_function_t *_rehasher) { +inline ::flatbuffers::Offset StridedSliceOptions::Pack(::flatbuffers::FlatBufferBuilder &_fbb, const StridedSliceOptionsT* _o, const ::flatbuffers::rehasher_function_t *_rehasher) { return CreateStridedSliceOptions(_fbb, _o, _rehasher); } -inline flatbuffers::Offset CreateStridedSliceOptions(flatbuffers::FlatBufferBuilder &_fbb, const StridedSliceOptionsT *_o, const flatbuffers::rehasher_function_t *_rehasher) { +inline ::flatbuffers::Offset CreateStridedSliceOptions(::flatbuffers::FlatBufferBuilder &_fbb, const StridedSliceOptionsT *_o, const ::flatbuffers::rehasher_function_t *_rehasher) { (void)_rehasher; (void)_o; - struct _VectorArgs { flatbuffers::FlatBufferBuilder *__fbb; const StridedSliceOptionsT* __o; const flatbuffers::rehasher_function_t *__rehasher; } _va = { &_fbb, _o, _rehasher}; (void)_va; + struct _VectorArgs { ::flatbuffers::FlatBufferBuilder *__fbb; const StridedSliceOptionsT* __o; const ::flatbuffers::rehasher_function_t *__rehasher; } _va = { &_fbb, _o, _rehasher}; (void)_va; auto _begin_mask = _o->begin_mask; auto _end_mask = _o->end_mask; auto _ellipsis_mask = _o->ellipsis_mask; @@ -15267,50 +18942,50 @@ inline flatbuffers::Offset CreateStridedSliceOptions(flatbu _offset); } -inline LogSoftmaxOptionsT *LogSoftmaxOptions::UnPack(const flatbuffers::resolver_function_t *_resolver) const { +inline LogSoftmaxOptionsT *LogSoftmaxOptions::UnPack(const ::flatbuffers::resolver_function_t *_resolver) const { auto _o = std::unique_ptr(new LogSoftmaxOptionsT()); UnPackTo(_o.get(), _resolver); return _o.release(); } -inline void LogSoftmaxOptions::UnPackTo(LogSoftmaxOptionsT *_o, const flatbuffers::resolver_function_t *_resolver) const { +inline void LogSoftmaxOptions::UnPackTo(LogSoftmaxOptionsT *_o, const ::flatbuffers::resolver_function_t *_resolver) const { (void)_o; (void)_resolver; } -inline flatbuffers::Offset LogSoftmaxOptions::Pack(flatbuffers::FlatBufferBuilder &_fbb, const LogSoftmaxOptionsT* _o, const flatbuffers::rehasher_function_t *_rehasher) { +inline ::flatbuffers::Offset LogSoftmaxOptions::Pack(::flatbuffers::FlatBufferBuilder &_fbb, const LogSoftmaxOptionsT* _o, const ::flatbuffers::rehasher_function_t *_rehasher) { return CreateLogSoftmaxOptions(_fbb, _o, _rehasher); } -inline flatbuffers::Offset CreateLogSoftmaxOptions(flatbuffers::FlatBufferBuilder &_fbb, const LogSoftmaxOptionsT *_o, const flatbuffers::rehasher_function_t *_rehasher) { +inline ::flatbuffers::Offset CreateLogSoftmaxOptions(::flatbuffers::FlatBufferBuilder &_fbb, const LogSoftmaxOptionsT *_o, const ::flatbuffers::rehasher_function_t *_rehasher) { (void)_rehasher; (void)_o; - struct _VectorArgs { flatbuffers::FlatBufferBuilder *__fbb; const LogSoftmaxOptionsT* __o; const flatbuffers::rehasher_function_t *__rehasher; } _va = { &_fbb, _o, _rehasher}; (void)_va; + struct _VectorArgs { ::flatbuffers::FlatBufferBuilder *__fbb; const LogSoftmaxOptionsT* __o; const ::flatbuffers::rehasher_function_t *__rehasher; } _va = { &_fbb, _o, _rehasher}; (void)_va; return tflite::CreateLogSoftmaxOptions( _fbb); } -inline CastOptionsT *CastOptions::UnPack(const flatbuffers::resolver_function_t *_resolver) const { +inline CastOptionsT *CastOptions::UnPack(const ::flatbuffers::resolver_function_t *_resolver) const { auto _o = std::unique_ptr(new CastOptionsT()); UnPackTo(_o.get(), _resolver); return _o.release(); } -inline void CastOptions::UnPackTo(CastOptionsT *_o, const flatbuffers::resolver_function_t *_resolver) const { +inline void CastOptions::UnPackTo(CastOptionsT *_o, const ::flatbuffers::resolver_function_t *_resolver) const { (void)_o; (void)_resolver; { auto _e = in_data_type(); _o->in_data_type = _e; } { auto _e = out_data_type(); _o->out_data_type = _e; } } -inline flatbuffers::Offset CastOptions::Pack(flatbuffers::FlatBufferBuilder &_fbb, const CastOptionsT* _o, const flatbuffers::rehasher_function_t *_rehasher) { +inline ::flatbuffers::Offset CastOptions::Pack(::flatbuffers::FlatBufferBuilder &_fbb, const CastOptionsT* _o, const ::flatbuffers::rehasher_function_t *_rehasher) { return CreateCastOptions(_fbb, _o, _rehasher); } -inline flatbuffers::Offset CreateCastOptions(flatbuffers::FlatBufferBuilder &_fbb, const CastOptionsT *_o, const flatbuffers::rehasher_function_t *_rehasher) { +inline ::flatbuffers::Offset CreateCastOptions(::flatbuffers::FlatBufferBuilder &_fbb, const CastOptionsT *_o, const ::flatbuffers::rehasher_function_t *_rehasher) { (void)_rehasher; (void)_o; - struct _VectorArgs { flatbuffers::FlatBufferBuilder *__fbb; const CastOptionsT* __o; const flatbuffers::rehasher_function_t *__rehasher; } _va = { &_fbb, _o, _rehasher}; (void)_va; + struct _VectorArgs { ::flatbuffers::FlatBufferBuilder *__fbb; const CastOptionsT* __o; const ::flatbuffers::rehasher_function_t *__rehasher; } _va = { &_fbb, _o, _rehasher}; (void)_va; auto _in_data_type = _o->in_data_type; auto _out_data_type = _o->out_data_type; return tflite::CreateCastOptions( @@ -15319,497 +18994,500 @@ inline flatbuffers::Offset CreateCastOptions(flatbuffers::FlatBuffe _out_data_type); } -inline DequantizeOptionsT *DequantizeOptions::UnPack(const flatbuffers::resolver_function_t *_resolver) const { +inline DequantizeOptionsT *DequantizeOptions::UnPack(const ::flatbuffers::resolver_function_t *_resolver) const { auto _o = std::unique_ptr(new DequantizeOptionsT()); UnPackTo(_o.get(), _resolver); return _o.release(); } -inline void DequantizeOptions::UnPackTo(DequantizeOptionsT *_o, const flatbuffers::resolver_function_t *_resolver) const { +inline void DequantizeOptions::UnPackTo(DequantizeOptionsT *_o, const ::flatbuffers::resolver_function_t *_resolver) const { (void)_o; (void)_resolver; } -inline flatbuffers::Offset DequantizeOptions::Pack(flatbuffers::FlatBufferBuilder &_fbb, const DequantizeOptionsT* _o, const flatbuffers::rehasher_function_t *_rehasher) { +inline ::flatbuffers::Offset DequantizeOptions::Pack(::flatbuffers::FlatBufferBuilder &_fbb, const DequantizeOptionsT* _o, const ::flatbuffers::rehasher_function_t *_rehasher) { return CreateDequantizeOptions(_fbb, _o, _rehasher); } -inline flatbuffers::Offset CreateDequantizeOptions(flatbuffers::FlatBufferBuilder &_fbb, const DequantizeOptionsT *_o, const flatbuffers::rehasher_function_t *_rehasher) { +inline ::flatbuffers::Offset CreateDequantizeOptions(::flatbuffers::FlatBufferBuilder &_fbb, const DequantizeOptionsT *_o, const ::flatbuffers::rehasher_function_t *_rehasher) { (void)_rehasher; (void)_o; - struct _VectorArgs { flatbuffers::FlatBufferBuilder *__fbb; const DequantizeOptionsT* __o; const flatbuffers::rehasher_function_t *__rehasher; } _va = { &_fbb, _o, _rehasher}; (void)_va; + struct _VectorArgs { ::flatbuffers::FlatBufferBuilder *__fbb; const DequantizeOptionsT* __o; const ::flatbuffers::rehasher_function_t *__rehasher; } _va = { &_fbb, _o, _rehasher}; (void)_va; return tflite::CreateDequantizeOptions( _fbb); } -inline MaximumMinimumOptionsT *MaximumMinimumOptions::UnPack(const flatbuffers::resolver_function_t *_resolver) const { +inline MaximumMinimumOptionsT *MaximumMinimumOptions::UnPack(const ::flatbuffers::resolver_function_t *_resolver) const { auto _o = std::unique_ptr(new MaximumMinimumOptionsT()); UnPackTo(_o.get(), _resolver); return _o.release(); } -inline void MaximumMinimumOptions::UnPackTo(MaximumMinimumOptionsT *_o, const flatbuffers::resolver_function_t *_resolver) const { +inline void MaximumMinimumOptions::UnPackTo(MaximumMinimumOptionsT *_o, const ::flatbuffers::resolver_function_t *_resolver) const { (void)_o; (void)_resolver; } -inline flatbuffers::Offset MaximumMinimumOptions::Pack(flatbuffers::FlatBufferBuilder &_fbb, const MaximumMinimumOptionsT* _o, const flatbuffers::rehasher_function_t *_rehasher) { +inline ::flatbuffers::Offset MaximumMinimumOptions::Pack(::flatbuffers::FlatBufferBuilder &_fbb, const MaximumMinimumOptionsT* _o, const ::flatbuffers::rehasher_function_t *_rehasher) { return CreateMaximumMinimumOptions(_fbb, _o, _rehasher); } -inline flatbuffers::Offset CreateMaximumMinimumOptions(flatbuffers::FlatBufferBuilder &_fbb, const MaximumMinimumOptionsT *_o, const flatbuffers::rehasher_function_t *_rehasher) { +inline ::flatbuffers::Offset CreateMaximumMinimumOptions(::flatbuffers::FlatBufferBuilder &_fbb, const MaximumMinimumOptionsT *_o, const ::flatbuffers::rehasher_function_t *_rehasher) { (void)_rehasher; (void)_o; - struct _VectorArgs { flatbuffers::FlatBufferBuilder *__fbb; const MaximumMinimumOptionsT* __o; const flatbuffers::rehasher_function_t *__rehasher; } _va = { &_fbb, _o, _rehasher}; (void)_va; + struct _VectorArgs { ::flatbuffers::FlatBufferBuilder *__fbb; const MaximumMinimumOptionsT* __o; const ::flatbuffers::rehasher_function_t *__rehasher; } _va = { &_fbb, _o, _rehasher}; (void)_va; return tflite::CreateMaximumMinimumOptions( _fbb); } -inline TileOptionsT *TileOptions::UnPack(const flatbuffers::resolver_function_t *_resolver) const { +inline TileOptionsT *TileOptions::UnPack(const ::flatbuffers::resolver_function_t *_resolver) const { auto _o = std::unique_ptr(new TileOptionsT()); UnPackTo(_o.get(), _resolver); return _o.release(); } -inline void TileOptions::UnPackTo(TileOptionsT *_o, const flatbuffers::resolver_function_t *_resolver) const { +inline void TileOptions::UnPackTo(TileOptionsT *_o, const ::flatbuffers::resolver_function_t *_resolver) const { (void)_o; (void)_resolver; } -inline flatbuffers::Offset TileOptions::Pack(flatbuffers::FlatBufferBuilder &_fbb, const TileOptionsT* _o, const flatbuffers::rehasher_function_t *_rehasher) { +inline ::flatbuffers::Offset TileOptions::Pack(::flatbuffers::FlatBufferBuilder &_fbb, const TileOptionsT* _o, const ::flatbuffers::rehasher_function_t *_rehasher) { return CreateTileOptions(_fbb, _o, _rehasher); } -inline flatbuffers::Offset CreateTileOptions(flatbuffers::FlatBufferBuilder &_fbb, const TileOptionsT *_o, const flatbuffers::rehasher_function_t *_rehasher) { +inline ::flatbuffers::Offset CreateTileOptions(::flatbuffers::FlatBufferBuilder &_fbb, const TileOptionsT *_o, const ::flatbuffers::rehasher_function_t *_rehasher) { (void)_rehasher; (void)_o; - struct _VectorArgs { flatbuffers::FlatBufferBuilder *__fbb; const TileOptionsT* __o; const flatbuffers::rehasher_function_t *__rehasher; } _va = { &_fbb, _o, _rehasher}; (void)_va; + struct _VectorArgs { ::flatbuffers::FlatBufferBuilder *__fbb; const TileOptionsT* __o; const ::flatbuffers::rehasher_function_t *__rehasher; } _va = { &_fbb, _o, _rehasher}; (void)_va; return tflite::CreateTileOptions( _fbb); } -inline ArgMaxOptionsT *ArgMaxOptions::UnPack(const flatbuffers::resolver_function_t *_resolver) const { +inline ArgMaxOptionsT *ArgMaxOptions::UnPack(const ::flatbuffers::resolver_function_t *_resolver) const { auto _o = std::unique_ptr(new ArgMaxOptionsT()); UnPackTo(_o.get(), _resolver); return _o.release(); } -inline void ArgMaxOptions::UnPackTo(ArgMaxOptionsT *_o, const flatbuffers::resolver_function_t *_resolver) const { +inline void ArgMaxOptions::UnPackTo(ArgMaxOptionsT *_o, const ::flatbuffers::resolver_function_t *_resolver) const { (void)_o; (void)_resolver; { auto _e = output_type(); _o->output_type = _e; } } -inline flatbuffers::Offset ArgMaxOptions::Pack(flatbuffers::FlatBufferBuilder &_fbb, const ArgMaxOptionsT* _o, const flatbuffers::rehasher_function_t *_rehasher) { +inline ::flatbuffers::Offset ArgMaxOptions::Pack(::flatbuffers::FlatBufferBuilder &_fbb, const ArgMaxOptionsT* _o, const ::flatbuffers::rehasher_function_t *_rehasher) { return CreateArgMaxOptions(_fbb, _o, _rehasher); } -inline flatbuffers::Offset CreateArgMaxOptions(flatbuffers::FlatBufferBuilder &_fbb, const ArgMaxOptionsT *_o, const flatbuffers::rehasher_function_t *_rehasher) { +inline ::flatbuffers::Offset CreateArgMaxOptions(::flatbuffers::FlatBufferBuilder &_fbb, const ArgMaxOptionsT *_o, const ::flatbuffers::rehasher_function_t *_rehasher) { (void)_rehasher; (void)_o; - struct _VectorArgs { flatbuffers::FlatBufferBuilder *__fbb; const ArgMaxOptionsT* __o; const flatbuffers::rehasher_function_t *__rehasher; } _va = { &_fbb, _o, _rehasher}; (void)_va; + struct _VectorArgs { ::flatbuffers::FlatBufferBuilder *__fbb; const ArgMaxOptionsT* __o; const ::flatbuffers::rehasher_function_t *__rehasher; } _va = { &_fbb, _o, _rehasher}; (void)_va; auto _output_type = _o->output_type; return tflite::CreateArgMaxOptions( _fbb, _output_type); } -inline ArgMinOptionsT *ArgMinOptions::UnPack(const flatbuffers::resolver_function_t *_resolver) const { +inline ArgMinOptionsT *ArgMinOptions::UnPack(const ::flatbuffers::resolver_function_t *_resolver) const { auto _o = std::unique_ptr(new ArgMinOptionsT()); UnPackTo(_o.get(), _resolver); return _o.release(); } -inline void ArgMinOptions::UnPackTo(ArgMinOptionsT *_o, const flatbuffers::resolver_function_t *_resolver) const { +inline void ArgMinOptions::UnPackTo(ArgMinOptionsT *_o, const ::flatbuffers::resolver_function_t *_resolver) const { (void)_o; (void)_resolver; { auto _e = output_type(); _o->output_type = _e; } } -inline flatbuffers::Offset ArgMinOptions::Pack(flatbuffers::FlatBufferBuilder &_fbb, const ArgMinOptionsT* _o, const flatbuffers::rehasher_function_t *_rehasher) { +inline ::flatbuffers::Offset ArgMinOptions::Pack(::flatbuffers::FlatBufferBuilder &_fbb, const ArgMinOptionsT* _o, const ::flatbuffers::rehasher_function_t *_rehasher) { return CreateArgMinOptions(_fbb, _o, _rehasher); } -inline flatbuffers::Offset CreateArgMinOptions(flatbuffers::FlatBufferBuilder &_fbb, const ArgMinOptionsT *_o, const flatbuffers::rehasher_function_t *_rehasher) { +inline ::flatbuffers::Offset CreateArgMinOptions(::flatbuffers::FlatBufferBuilder &_fbb, const ArgMinOptionsT *_o, const ::flatbuffers::rehasher_function_t *_rehasher) { (void)_rehasher; (void)_o; - struct _VectorArgs { flatbuffers::FlatBufferBuilder *__fbb; const ArgMinOptionsT* __o; const flatbuffers::rehasher_function_t *__rehasher; } _va = { &_fbb, _o, _rehasher}; (void)_va; + struct _VectorArgs { ::flatbuffers::FlatBufferBuilder *__fbb; const ArgMinOptionsT* __o; const ::flatbuffers::rehasher_function_t *__rehasher; } _va = { &_fbb, _o, _rehasher}; (void)_va; auto _output_type = _o->output_type; return tflite::CreateArgMinOptions( _fbb, _output_type); } -inline GreaterOptionsT *GreaterOptions::UnPack(const flatbuffers::resolver_function_t *_resolver) const { +inline GreaterOptionsT *GreaterOptions::UnPack(const ::flatbuffers::resolver_function_t *_resolver) const { auto _o = std::unique_ptr(new GreaterOptionsT()); UnPackTo(_o.get(), _resolver); return _o.release(); } -inline void GreaterOptions::UnPackTo(GreaterOptionsT *_o, const flatbuffers::resolver_function_t *_resolver) const { +inline void GreaterOptions::UnPackTo(GreaterOptionsT *_o, const ::flatbuffers::resolver_function_t *_resolver) const { (void)_o; (void)_resolver; } -inline flatbuffers::Offset GreaterOptions::Pack(flatbuffers::FlatBufferBuilder &_fbb, const GreaterOptionsT* _o, const flatbuffers::rehasher_function_t *_rehasher) { +inline ::flatbuffers::Offset GreaterOptions::Pack(::flatbuffers::FlatBufferBuilder &_fbb, const GreaterOptionsT* _o, const ::flatbuffers::rehasher_function_t *_rehasher) { return CreateGreaterOptions(_fbb, _o, _rehasher); } -inline flatbuffers::Offset CreateGreaterOptions(flatbuffers::FlatBufferBuilder &_fbb, const GreaterOptionsT *_o, const flatbuffers::rehasher_function_t *_rehasher) { +inline ::flatbuffers::Offset CreateGreaterOptions(::flatbuffers::FlatBufferBuilder &_fbb, const GreaterOptionsT *_o, const ::flatbuffers::rehasher_function_t *_rehasher) { (void)_rehasher; (void)_o; - struct _VectorArgs { flatbuffers::FlatBufferBuilder *__fbb; const GreaterOptionsT* __o; const flatbuffers::rehasher_function_t *__rehasher; } _va = { &_fbb, _o, _rehasher}; (void)_va; + struct _VectorArgs { ::flatbuffers::FlatBufferBuilder *__fbb; const GreaterOptionsT* __o; const ::flatbuffers::rehasher_function_t *__rehasher; } _va = { &_fbb, _o, _rehasher}; (void)_va; return tflite::CreateGreaterOptions( _fbb); } -inline GreaterEqualOptionsT *GreaterEqualOptions::UnPack(const flatbuffers::resolver_function_t *_resolver) const { +inline GreaterEqualOptionsT *GreaterEqualOptions::UnPack(const ::flatbuffers::resolver_function_t *_resolver) const { auto _o = std::unique_ptr(new GreaterEqualOptionsT()); UnPackTo(_o.get(), _resolver); return _o.release(); } -inline void GreaterEqualOptions::UnPackTo(GreaterEqualOptionsT *_o, const flatbuffers::resolver_function_t *_resolver) const { +inline void GreaterEqualOptions::UnPackTo(GreaterEqualOptionsT *_o, const ::flatbuffers::resolver_function_t *_resolver) const { (void)_o; (void)_resolver; } -inline flatbuffers::Offset GreaterEqualOptions::Pack(flatbuffers::FlatBufferBuilder &_fbb, const GreaterEqualOptionsT* _o, const flatbuffers::rehasher_function_t *_rehasher) { +inline ::flatbuffers::Offset GreaterEqualOptions::Pack(::flatbuffers::FlatBufferBuilder &_fbb, const GreaterEqualOptionsT* _o, const ::flatbuffers::rehasher_function_t *_rehasher) { return CreateGreaterEqualOptions(_fbb, _o, _rehasher); } -inline flatbuffers::Offset CreateGreaterEqualOptions(flatbuffers::FlatBufferBuilder &_fbb, const GreaterEqualOptionsT *_o, const flatbuffers::rehasher_function_t *_rehasher) { +inline ::flatbuffers::Offset CreateGreaterEqualOptions(::flatbuffers::FlatBufferBuilder &_fbb, const GreaterEqualOptionsT *_o, const ::flatbuffers::rehasher_function_t *_rehasher) { (void)_rehasher; (void)_o; - struct _VectorArgs { flatbuffers::FlatBufferBuilder *__fbb; const GreaterEqualOptionsT* __o; const flatbuffers::rehasher_function_t *__rehasher; } _va = { &_fbb, _o, _rehasher}; (void)_va; + struct _VectorArgs { ::flatbuffers::FlatBufferBuilder *__fbb; const GreaterEqualOptionsT* __o; const ::flatbuffers::rehasher_function_t *__rehasher; } _va = { &_fbb, _o, _rehasher}; (void)_va; return tflite::CreateGreaterEqualOptions( _fbb); } -inline LessOptionsT *LessOptions::UnPack(const flatbuffers::resolver_function_t *_resolver) const { +inline LessOptionsT *LessOptions::UnPack(const ::flatbuffers::resolver_function_t *_resolver) const { auto _o = std::unique_ptr(new LessOptionsT()); UnPackTo(_o.get(), _resolver); return _o.release(); } -inline void LessOptions::UnPackTo(LessOptionsT *_o, const flatbuffers::resolver_function_t *_resolver) const { +inline void LessOptions::UnPackTo(LessOptionsT *_o, const ::flatbuffers::resolver_function_t *_resolver) const { (void)_o; (void)_resolver; } -inline flatbuffers::Offset LessOptions::Pack(flatbuffers::FlatBufferBuilder &_fbb, const LessOptionsT* _o, const flatbuffers::rehasher_function_t *_rehasher) { +inline ::flatbuffers::Offset LessOptions::Pack(::flatbuffers::FlatBufferBuilder &_fbb, const LessOptionsT* _o, const ::flatbuffers::rehasher_function_t *_rehasher) { return CreateLessOptions(_fbb, _o, _rehasher); } -inline flatbuffers::Offset CreateLessOptions(flatbuffers::FlatBufferBuilder &_fbb, const LessOptionsT *_o, const flatbuffers::rehasher_function_t *_rehasher) { +inline ::flatbuffers::Offset CreateLessOptions(::flatbuffers::FlatBufferBuilder &_fbb, const LessOptionsT *_o, const ::flatbuffers::rehasher_function_t *_rehasher) { (void)_rehasher; (void)_o; - struct _VectorArgs { flatbuffers::FlatBufferBuilder *__fbb; const LessOptionsT* __o; const flatbuffers::rehasher_function_t *__rehasher; } _va = { &_fbb, _o, _rehasher}; (void)_va; + struct _VectorArgs { ::flatbuffers::FlatBufferBuilder *__fbb; const LessOptionsT* __o; const ::flatbuffers::rehasher_function_t *__rehasher; } _va = { &_fbb, _o, _rehasher}; (void)_va; return tflite::CreateLessOptions( _fbb); } -inline LessEqualOptionsT *LessEqualOptions::UnPack(const flatbuffers::resolver_function_t *_resolver) const { +inline LessEqualOptionsT *LessEqualOptions::UnPack(const ::flatbuffers::resolver_function_t *_resolver) const { auto _o = std::unique_ptr(new LessEqualOptionsT()); UnPackTo(_o.get(), _resolver); return _o.release(); } -inline void LessEqualOptions::UnPackTo(LessEqualOptionsT *_o, const flatbuffers::resolver_function_t *_resolver) const { +inline void LessEqualOptions::UnPackTo(LessEqualOptionsT *_o, const ::flatbuffers::resolver_function_t *_resolver) const { (void)_o; (void)_resolver; } -inline flatbuffers::Offset LessEqualOptions::Pack(flatbuffers::FlatBufferBuilder &_fbb, const LessEqualOptionsT* _o, const flatbuffers::rehasher_function_t *_rehasher) { +inline ::flatbuffers::Offset LessEqualOptions::Pack(::flatbuffers::FlatBufferBuilder &_fbb, const LessEqualOptionsT* _o, const ::flatbuffers::rehasher_function_t *_rehasher) { return CreateLessEqualOptions(_fbb, _o, _rehasher); } -inline flatbuffers::Offset CreateLessEqualOptions(flatbuffers::FlatBufferBuilder &_fbb, const LessEqualOptionsT *_o, const flatbuffers::rehasher_function_t *_rehasher) { +inline ::flatbuffers::Offset CreateLessEqualOptions(::flatbuffers::FlatBufferBuilder &_fbb, const LessEqualOptionsT *_o, const ::flatbuffers::rehasher_function_t *_rehasher) { (void)_rehasher; (void)_o; - struct _VectorArgs { flatbuffers::FlatBufferBuilder *__fbb; const LessEqualOptionsT* __o; const flatbuffers::rehasher_function_t *__rehasher; } _va = { &_fbb, _o, _rehasher}; (void)_va; + struct _VectorArgs { ::flatbuffers::FlatBufferBuilder *__fbb; const LessEqualOptionsT* __o; const ::flatbuffers::rehasher_function_t *__rehasher; } _va = { &_fbb, _o, _rehasher}; (void)_va; return tflite::CreateLessEqualOptions( _fbb); } -inline NegOptionsT *NegOptions::UnPack(const flatbuffers::resolver_function_t *_resolver) const { +inline NegOptionsT *NegOptions::UnPack(const ::flatbuffers::resolver_function_t *_resolver) const { auto _o = std::unique_ptr(new NegOptionsT()); UnPackTo(_o.get(), _resolver); return _o.release(); } -inline void NegOptions::UnPackTo(NegOptionsT *_o, const flatbuffers::resolver_function_t *_resolver) const { +inline void NegOptions::UnPackTo(NegOptionsT *_o, const ::flatbuffers::resolver_function_t *_resolver) const { (void)_o; (void)_resolver; } -inline flatbuffers::Offset NegOptions::Pack(flatbuffers::FlatBufferBuilder &_fbb, const NegOptionsT* _o, const flatbuffers::rehasher_function_t *_rehasher) { +inline ::flatbuffers::Offset NegOptions::Pack(::flatbuffers::FlatBufferBuilder &_fbb, const NegOptionsT* _o, const ::flatbuffers::rehasher_function_t *_rehasher) { return CreateNegOptions(_fbb, _o, _rehasher); } -inline flatbuffers::Offset CreateNegOptions(flatbuffers::FlatBufferBuilder &_fbb, const NegOptionsT *_o, const flatbuffers::rehasher_function_t *_rehasher) { +inline ::flatbuffers::Offset CreateNegOptions(::flatbuffers::FlatBufferBuilder &_fbb, const NegOptionsT *_o, const ::flatbuffers::rehasher_function_t *_rehasher) { (void)_rehasher; (void)_o; - struct _VectorArgs { flatbuffers::FlatBufferBuilder *__fbb; const NegOptionsT* __o; const flatbuffers::rehasher_function_t *__rehasher; } _va = { &_fbb, _o, _rehasher}; (void)_va; + struct _VectorArgs { ::flatbuffers::FlatBufferBuilder *__fbb; const NegOptionsT* __o; const ::flatbuffers::rehasher_function_t *__rehasher; } _va = { &_fbb, _o, _rehasher}; (void)_va; return tflite::CreateNegOptions( _fbb); } -inline SelectOptionsT *SelectOptions::UnPack(const flatbuffers::resolver_function_t *_resolver) const { +inline SelectOptionsT *SelectOptions::UnPack(const ::flatbuffers::resolver_function_t *_resolver) const { auto _o = std::unique_ptr(new SelectOptionsT()); UnPackTo(_o.get(), _resolver); return _o.release(); } -inline void SelectOptions::UnPackTo(SelectOptionsT *_o, const flatbuffers::resolver_function_t *_resolver) const { +inline void SelectOptions::UnPackTo(SelectOptionsT *_o, const ::flatbuffers::resolver_function_t *_resolver) const { (void)_o; (void)_resolver; } -inline flatbuffers::Offset SelectOptions::Pack(flatbuffers::FlatBufferBuilder &_fbb, const SelectOptionsT* _o, const flatbuffers::rehasher_function_t *_rehasher) { +inline ::flatbuffers::Offset SelectOptions::Pack(::flatbuffers::FlatBufferBuilder &_fbb, const SelectOptionsT* _o, const ::flatbuffers::rehasher_function_t *_rehasher) { return CreateSelectOptions(_fbb, _o, _rehasher); } -inline flatbuffers::Offset CreateSelectOptions(flatbuffers::FlatBufferBuilder &_fbb, const SelectOptionsT *_o, const flatbuffers::rehasher_function_t *_rehasher) { +inline ::flatbuffers::Offset CreateSelectOptions(::flatbuffers::FlatBufferBuilder &_fbb, const SelectOptionsT *_o, const ::flatbuffers::rehasher_function_t *_rehasher) { (void)_rehasher; (void)_o; - struct _VectorArgs { flatbuffers::FlatBufferBuilder *__fbb; const SelectOptionsT* __o; const flatbuffers::rehasher_function_t *__rehasher; } _va = { &_fbb, _o, _rehasher}; (void)_va; + struct _VectorArgs { ::flatbuffers::FlatBufferBuilder *__fbb; const SelectOptionsT* __o; const ::flatbuffers::rehasher_function_t *__rehasher; } _va = { &_fbb, _o, _rehasher}; (void)_va; return tflite::CreateSelectOptions( _fbb); } -inline SliceOptionsT *SliceOptions::UnPack(const flatbuffers::resolver_function_t *_resolver) const { +inline SliceOptionsT *SliceOptions::UnPack(const ::flatbuffers::resolver_function_t *_resolver) const { auto _o = std::unique_ptr(new SliceOptionsT()); UnPackTo(_o.get(), _resolver); return _o.release(); } -inline void SliceOptions::UnPackTo(SliceOptionsT *_o, const flatbuffers::resolver_function_t *_resolver) const { +inline void SliceOptions::UnPackTo(SliceOptionsT *_o, const ::flatbuffers::resolver_function_t *_resolver) const { (void)_o; (void)_resolver; } -inline flatbuffers::Offset SliceOptions::Pack(flatbuffers::FlatBufferBuilder &_fbb, const SliceOptionsT* _o, const flatbuffers::rehasher_function_t *_rehasher) { +inline ::flatbuffers::Offset SliceOptions::Pack(::flatbuffers::FlatBufferBuilder &_fbb, const SliceOptionsT* _o, const ::flatbuffers::rehasher_function_t *_rehasher) { return CreateSliceOptions(_fbb, _o, _rehasher); } -inline flatbuffers::Offset CreateSliceOptions(flatbuffers::FlatBufferBuilder &_fbb, const SliceOptionsT *_o, const flatbuffers::rehasher_function_t *_rehasher) { +inline ::flatbuffers::Offset CreateSliceOptions(::flatbuffers::FlatBufferBuilder &_fbb, const SliceOptionsT *_o, const ::flatbuffers::rehasher_function_t *_rehasher) { (void)_rehasher; (void)_o; - struct _VectorArgs { flatbuffers::FlatBufferBuilder *__fbb; const SliceOptionsT* __o; const flatbuffers::rehasher_function_t *__rehasher; } _va = { &_fbb, _o, _rehasher}; (void)_va; + struct _VectorArgs { ::flatbuffers::FlatBufferBuilder *__fbb; const SliceOptionsT* __o; const ::flatbuffers::rehasher_function_t *__rehasher; } _va = { &_fbb, _o, _rehasher}; (void)_va; return tflite::CreateSliceOptions( _fbb); } -inline TransposeConvOptionsT *TransposeConvOptions::UnPack(const flatbuffers::resolver_function_t *_resolver) const { +inline TransposeConvOptionsT *TransposeConvOptions::UnPack(const ::flatbuffers::resolver_function_t *_resolver) const { auto _o = std::unique_ptr(new TransposeConvOptionsT()); UnPackTo(_o.get(), _resolver); return _o.release(); } -inline void TransposeConvOptions::UnPackTo(TransposeConvOptionsT *_o, const flatbuffers::resolver_function_t *_resolver) const { +inline void TransposeConvOptions::UnPackTo(TransposeConvOptionsT *_o, const ::flatbuffers::resolver_function_t *_resolver) const { (void)_o; (void)_resolver; { auto _e = padding(); _o->padding = _e; } { auto _e = stride_w(); _o->stride_w = _e; } { auto _e = stride_h(); _o->stride_h = _e; } { auto _e = fused_activation_function(); _o->fused_activation_function = _e; } + { auto _e = quantized_bias_type(); _o->quantized_bias_type = _e; } } -inline flatbuffers::Offset TransposeConvOptions::Pack(flatbuffers::FlatBufferBuilder &_fbb, const TransposeConvOptionsT* _o, const flatbuffers::rehasher_function_t *_rehasher) { +inline ::flatbuffers::Offset TransposeConvOptions::Pack(::flatbuffers::FlatBufferBuilder &_fbb, const TransposeConvOptionsT* _o, const ::flatbuffers::rehasher_function_t *_rehasher) { return CreateTransposeConvOptions(_fbb, _o, _rehasher); } -inline flatbuffers::Offset CreateTransposeConvOptions(flatbuffers::FlatBufferBuilder &_fbb, const TransposeConvOptionsT *_o, const flatbuffers::rehasher_function_t *_rehasher) { +inline ::flatbuffers::Offset CreateTransposeConvOptions(::flatbuffers::FlatBufferBuilder &_fbb, const TransposeConvOptionsT *_o, const ::flatbuffers::rehasher_function_t *_rehasher) { (void)_rehasher; (void)_o; - struct _VectorArgs { flatbuffers::FlatBufferBuilder *__fbb; const TransposeConvOptionsT* __o; const flatbuffers::rehasher_function_t *__rehasher; } _va = { &_fbb, _o, _rehasher}; (void)_va; + struct _VectorArgs { ::flatbuffers::FlatBufferBuilder *__fbb; const TransposeConvOptionsT* __o; const ::flatbuffers::rehasher_function_t *__rehasher; } _va = { &_fbb, _o, _rehasher}; (void)_va; auto _padding = _o->padding; auto _stride_w = _o->stride_w; auto _stride_h = _o->stride_h; auto _fused_activation_function = _o->fused_activation_function; + auto _quantized_bias_type = _o->quantized_bias_type; return tflite::CreateTransposeConvOptions( _fbb, _padding, _stride_w, _stride_h, - _fused_activation_function); + _fused_activation_function, + _quantized_bias_type); } -inline ExpandDimsOptionsT *ExpandDimsOptions::UnPack(const flatbuffers::resolver_function_t *_resolver) const { +inline ExpandDimsOptionsT *ExpandDimsOptions::UnPack(const ::flatbuffers::resolver_function_t *_resolver) const { auto _o = std::unique_ptr(new ExpandDimsOptionsT()); UnPackTo(_o.get(), _resolver); return _o.release(); } -inline void ExpandDimsOptions::UnPackTo(ExpandDimsOptionsT *_o, const flatbuffers::resolver_function_t *_resolver) const { +inline void ExpandDimsOptions::UnPackTo(ExpandDimsOptionsT *_o, const ::flatbuffers::resolver_function_t *_resolver) const { (void)_o; (void)_resolver; } -inline flatbuffers::Offset ExpandDimsOptions::Pack(flatbuffers::FlatBufferBuilder &_fbb, const ExpandDimsOptionsT* _o, const flatbuffers::rehasher_function_t *_rehasher) { +inline ::flatbuffers::Offset ExpandDimsOptions::Pack(::flatbuffers::FlatBufferBuilder &_fbb, const ExpandDimsOptionsT* _o, const ::flatbuffers::rehasher_function_t *_rehasher) { return CreateExpandDimsOptions(_fbb, _o, _rehasher); } -inline flatbuffers::Offset CreateExpandDimsOptions(flatbuffers::FlatBufferBuilder &_fbb, const ExpandDimsOptionsT *_o, const flatbuffers::rehasher_function_t *_rehasher) { +inline ::flatbuffers::Offset CreateExpandDimsOptions(::flatbuffers::FlatBufferBuilder &_fbb, const ExpandDimsOptionsT *_o, const ::flatbuffers::rehasher_function_t *_rehasher) { (void)_rehasher; (void)_o; - struct _VectorArgs { flatbuffers::FlatBufferBuilder *__fbb; const ExpandDimsOptionsT* __o; const flatbuffers::rehasher_function_t *__rehasher; } _va = { &_fbb, _o, _rehasher}; (void)_va; + struct _VectorArgs { ::flatbuffers::FlatBufferBuilder *__fbb; const ExpandDimsOptionsT* __o; const ::flatbuffers::rehasher_function_t *__rehasher; } _va = { &_fbb, _o, _rehasher}; (void)_va; return tflite::CreateExpandDimsOptions( _fbb); } -inline SparseToDenseOptionsT *SparseToDenseOptions::UnPack(const flatbuffers::resolver_function_t *_resolver) const { +inline SparseToDenseOptionsT *SparseToDenseOptions::UnPack(const ::flatbuffers::resolver_function_t *_resolver) const { auto _o = std::unique_ptr(new SparseToDenseOptionsT()); UnPackTo(_o.get(), _resolver); return _o.release(); } -inline void SparseToDenseOptions::UnPackTo(SparseToDenseOptionsT *_o, const flatbuffers::resolver_function_t *_resolver) const { +inline void SparseToDenseOptions::UnPackTo(SparseToDenseOptionsT *_o, const ::flatbuffers::resolver_function_t *_resolver) const { (void)_o; (void)_resolver; { auto _e = validate_indices(); _o->validate_indices = _e; } } -inline flatbuffers::Offset SparseToDenseOptions::Pack(flatbuffers::FlatBufferBuilder &_fbb, const SparseToDenseOptionsT* _o, const flatbuffers::rehasher_function_t *_rehasher) { +inline ::flatbuffers::Offset SparseToDenseOptions::Pack(::flatbuffers::FlatBufferBuilder &_fbb, const SparseToDenseOptionsT* _o, const ::flatbuffers::rehasher_function_t *_rehasher) { return CreateSparseToDenseOptions(_fbb, _o, _rehasher); } -inline flatbuffers::Offset CreateSparseToDenseOptions(flatbuffers::FlatBufferBuilder &_fbb, const SparseToDenseOptionsT *_o, const flatbuffers::rehasher_function_t *_rehasher) { +inline ::flatbuffers::Offset CreateSparseToDenseOptions(::flatbuffers::FlatBufferBuilder &_fbb, const SparseToDenseOptionsT *_o, const ::flatbuffers::rehasher_function_t *_rehasher) { (void)_rehasher; (void)_o; - struct _VectorArgs { flatbuffers::FlatBufferBuilder *__fbb; const SparseToDenseOptionsT* __o; const flatbuffers::rehasher_function_t *__rehasher; } _va = { &_fbb, _o, _rehasher}; (void)_va; + struct _VectorArgs { ::flatbuffers::FlatBufferBuilder *__fbb; const SparseToDenseOptionsT* __o; const ::flatbuffers::rehasher_function_t *__rehasher; } _va = { &_fbb, _o, _rehasher}; (void)_va; auto _validate_indices = _o->validate_indices; return tflite::CreateSparseToDenseOptions( _fbb, _validate_indices); } -inline EqualOptionsT *EqualOptions::UnPack(const flatbuffers::resolver_function_t *_resolver) const { +inline EqualOptionsT *EqualOptions::UnPack(const ::flatbuffers::resolver_function_t *_resolver) const { auto _o = std::unique_ptr(new EqualOptionsT()); UnPackTo(_o.get(), _resolver); return _o.release(); } -inline void EqualOptions::UnPackTo(EqualOptionsT *_o, const flatbuffers::resolver_function_t *_resolver) const { +inline void EqualOptions::UnPackTo(EqualOptionsT *_o, const ::flatbuffers::resolver_function_t *_resolver) const { (void)_o; (void)_resolver; } -inline flatbuffers::Offset EqualOptions::Pack(flatbuffers::FlatBufferBuilder &_fbb, const EqualOptionsT* _o, const flatbuffers::rehasher_function_t *_rehasher) { +inline ::flatbuffers::Offset EqualOptions::Pack(::flatbuffers::FlatBufferBuilder &_fbb, const EqualOptionsT* _o, const ::flatbuffers::rehasher_function_t *_rehasher) { return CreateEqualOptions(_fbb, _o, _rehasher); } -inline flatbuffers::Offset CreateEqualOptions(flatbuffers::FlatBufferBuilder &_fbb, const EqualOptionsT *_o, const flatbuffers::rehasher_function_t *_rehasher) { +inline ::flatbuffers::Offset CreateEqualOptions(::flatbuffers::FlatBufferBuilder &_fbb, const EqualOptionsT *_o, const ::flatbuffers::rehasher_function_t *_rehasher) { (void)_rehasher; (void)_o; - struct _VectorArgs { flatbuffers::FlatBufferBuilder *__fbb; const EqualOptionsT* __o; const flatbuffers::rehasher_function_t *__rehasher; } _va = { &_fbb, _o, _rehasher}; (void)_va; + struct _VectorArgs { ::flatbuffers::FlatBufferBuilder *__fbb; const EqualOptionsT* __o; const ::flatbuffers::rehasher_function_t *__rehasher; } _va = { &_fbb, _o, _rehasher}; (void)_va; return tflite::CreateEqualOptions( _fbb); } -inline NotEqualOptionsT *NotEqualOptions::UnPack(const flatbuffers::resolver_function_t *_resolver) const { +inline NotEqualOptionsT *NotEqualOptions::UnPack(const ::flatbuffers::resolver_function_t *_resolver) const { auto _o = std::unique_ptr(new NotEqualOptionsT()); UnPackTo(_o.get(), _resolver); return _o.release(); } -inline void NotEqualOptions::UnPackTo(NotEqualOptionsT *_o, const flatbuffers::resolver_function_t *_resolver) const { +inline void NotEqualOptions::UnPackTo(NotEqualOptionsT *_o, const ::flatbuffers::resolver_function_t *_resolver) const { (void)_o; (void)_resolver; } -inline flatbuffers::Offset NotEqualOptions::Pack(flatbuffers::FlatBufferBuilder &_fbb, const NotEqualOptionsT* _o, const flatbuffers::rehasher_function_t *_rehasher) { +inline ::flatbuffers::Offset NotEqualOptions::Pack(::flatbuffers::FlatBufferBuilder &_fbb, const NotEqualOptionsT* _o, const ::flatbuffers::rehasher_function_t *_rehasher) { return CreateNotEqualOptions(_fbb, _o, _rehasher); } -inline flatbuffers::Offset CreateNotEqualOptions(flatbuffers::FlatBufferBuilder &_fbb, const NotEqualOptionsT *_o, const flatbuffers::rehasher_function_t *_rehasher) { +inline ::flatbuffers::Offset CreateNotEqualOptions(::flatbuffers::FlatBufferBuilder &_fbb, const NotEqualOptionsT *_o, const ::flatbuffers::rehasher_function_t *_rehasher) { (void)_rehasher; (void)_o; - struct _VectorArgs { flatbuffers::FlatBufferBuilder *__fbb; const NotEqualOptionsT* __o; const flatbuffers::rehasher_function_t *__rehasher; } _va = { &_fbb, _o, _rehasher}; (void)_va; + struct _VectorArgs { ::flatbuffers::FlatBufferBuilder *__fbb; const NotEqualOptionsT* __o; const ::flatbuffers::rehasher_function_t *__rehasher; } _va = { &_fbb, _o, _rehasher}; (void)_va; return tflite::CreateNotEqualOptions( _fbb); } -inline ShapeOptionsT *ShapeOptions::UnPack(const flatbuffers::resolver_function_t *_resolver) const { +inline ShapeOptionsT *ShapeOptions::UnPack(const ::flatbuffers::resolver_function_t *_resolver) const { auto _o = std::unique_ptr(new ShapeOptionsT()); UnPackTo(_o.get(), _resolver); return _o.release(); } -inline void ShapeOptions::UnPackTo(ShapeOptionsT *_o, const flatbuffers::resolver_function_t *_resolver) const { +inline void ShapeOptions::UnPackTo(ShapeOptionsT *_o, const ::flatbuffers::resolver_function_t *_resolver) const { (void)_o; (void)_resolver; { auto _e = out_type(); _o->out_type = _e; } } -inline flatbuffers::Offset ShapeOptions::Pack(flatbuffers::FlatBufferBuilder &_fbb, const ShapeOptionsT* _o, const flatbuffers::rehasher_function_t *_rehasher) { +inline ::flatbuffers::Offset ShapeOptions::Pack(::flatbuffers::FlatBufferBuilder &_fbb, const ShapeOptionsT* _o, const ::flatbuffers::rehasher_function_t *_rehasher) { return CreateShapeOptions(_fbb, _o, _rehasher); } -inline flatbuffers::Offset CreateShapeOptions(flatbuffers::FlatBufferBuilder &_fbb, const ShapeOptionsT *_o, const flatbuffers::rehasher_function_t *_rehasher) { +inline ::flatbuffers::Offset CreateShapeOptions(::flatbuffers::FlatBufferBuilder &_fbb, const ShapeOptionsT *_o, const ::flatbuffers::rehasher_function_t *_rehasher) { (void)_rehasher; (void)_o; - struct _VectorArgs { flatbuffers::FlatBufferBuilder *__fbb; const ShapeOptionsT* __o; const flatbuffers::rehasher_function_t *__rehasher; } _va = { &_fbb, _o, _rehasher}; (void)_va; + struct _VectorArgs { ::flatbuffers::FlatBufferBuilder *__fbb; const ShapeOptionsT* __o; const ::flatbuffers::rehasher_function_t *__rehasher; } _va = { &_fbb, _o, _rehasher}; (void)_va; auto _out_type = _o->out_type; return tflite::CreateShapeOptions( _fbb, _out_type); } -inline RankOptionsT *RankOptions::UnPack(const flatbuffers::resolver_function_t *_resolver) const { +inline RankOptionsT *RankOptions::UnPack(const ::flatbuffers::resolver_function_t *_resolver) const { auto _o = std::unique_ptr(new RankOptionsT()); UnPackTo(_o.get(), _resolver); return _o.release(); } -inline void RankOptions::UnPackTo(RankOptionsT *_o, const flatbuffers::resolver_function_t *_resolver) const { +inline void RankOptions::UnPackTo(RankOptionsT *_o, const ::flatbuffers::resolver_function_t *_resolver) const { (void)_o; (void)_resolver; } -inline flatbuffers::Offset RankOptions::Pack(flatbuffers::FlatBufferBuilder &_fbb, const RankOptionsT* _o, const flatbuffers::rehasher_function_t *_rehasher) { +inline ::flatbuffers::Offset RankOptions::Pack(::flatbuffers::FlatBufferBuilder &_fbb, const RankOptionsT* _o, const ::flatbuffers::rehasher_function_t *_rehasher) { return CreateRankOptions(_fbb, _o, _rehasher); } -inline flatbuffers::Offset CreateRankOptions(flatbuffers::FlatBufferBuilder &_fbb, const RankOptionsT *_o, const flatbuffers::rehasher_function_t *_rehasher) { +inline ::flatbuffers::Offset CreateRankOptions(::flatbuffers::FlatBufferBuilder &_fbb, const RankOptionsT *_o, const ::flatbuffers::rehasher_function_t *_rehasher) { (void)_rehasher; (void)_o; - struct _VectorArgs { flatbuffers::FlatBufferBuilder *__fbb; const RankOptionsT* __o; const flatbuffers::rehasher_function_t *__rehasher; } _va = { &_fbb, _o, _rehasher}; (void)_va; + struct _VectorArgs { ::flatbuffers::FlatBufferBuilder *__fbb; const RankOptionsT* __o; const ::flatbuffers::rehasher_function_t *__rehasher; } _va = { &_fbb, _o, _rehasher}; (void)_va; return tflite::CreateRankOptions( _fbb); } -inline PowOptionsT *PowOptions::UnPack(const flatbuffers::resolver_function_t *_resolver) const { +inline PowOptionsT *PowOptions::UnPack(const ::flatbuffers::resolver_function_t *_resolver) const { auto _o = std::unique_ptr(new PowOptionsT()); UnPackTo(_o.get(), _resolver); return _o.release(); } -inline void PowOptions::UnPackTo(PowOptionsT *_o, const flatbuffers::resolver_function_t *_resolver) const { +inline void PowOptions::UnPackTo(PowOptionsT *_o, const ::flatbuffers::resolver_function_t *_resolver) const { (void)_o; (void)_resolver; } -inline flatbuffers::Offset PowOptions::Pack(flatbuffers::FlatBufferBuilder &_fbb, const PowOptionsT* _o, const flatbuffers::rehasher_function_t *_rehasher) { +inline ::flatbuffers::Offset PowOptions::Pack(::flatbuffers::FlatBufferBuilder &_fbb, const PowOptionsT* _o, const ::flatbuffers::rehasher_function_t *_rehasher) { return CreatePowOptions(_fbb, _o, _rehasher); } -inline flatbuffers::Offset CreatePowOptions(flatbuffers::FlatBufferBuilder &_fbb, const PowOptionsT *_o, const flatbuffers::rehasher_function_t *_rehasher) { +inline ::flatbuffers::Offset CreatePowOptions(::flatbuffers::FlatBufferBuilder &_fbb, const PowOptionsT *_o, const ::flatbuffers::rehasher_function_t *_rehasher) { (void)_rehasher; (void)_o; - struct _VectorArgs { flatbuffers::FlatBufferBuilder *__fbb; const PowOptionsT* __o; const flatbuffers::rehasher_function_t *__rehasher; } _va = { &_fbb, _o, _rehasher}; (void)_va; + struct _VectorArgs { ::flatbuffers::FlatBufferBuilder *__fbb; const PowOptionsT* __o; const ::flatbuffers::rehasher_function_t *__rehasher; } _va = { &_fbb, _o, _rehasher}; (void)_va; return tflite::CreatePowOptions( _fbb); } -inline FakeQuantOptionsT *FakeQuantOptions::UnPack(const flatbuffers::resolver_function_t *_resolver) const { +inline FakeQuantOptionsT *FakeQuantOptions::UnPack(const ::flatbuffers::resolver_function_t *_resolver) const { auto _o = std::unique_ptr(new FakeQuantOptionsT()); UnPackTo(_o.get(), _resolver); return _o.release(); } -inline void FakeQuantOptions::UnPackTo(FakeQuantOptionsT *_o, const flatbuffers::resolver_function_t *_resolver) const { +inline void FakeQuantOptions::UnPackTo(FakeQuantOptionsT *_o, const ::flatbuffers::resolver_function_t *_resolver) const { (void)_o; (void)_resolver; { auto _e = min(); _o->min = _e; } @@ -15818,14 +19496,14 @@ inline void FakeQuantOptions::UnPackTo(FakeQuantOptionsT *_o, const flatbuffers: { auto _e = narrow_range(); _o->narrow_range = _e; } } -inline flatbuffers::Offset FakeQuantOptions::Pack(flatbuffers::FlatBufferBuilder &_fbb, const FakeQuantOptionsT* _o, const flatbuffers::rehasher_function_t *_rehasher) { +inline ::flatbuffers::Offset FakeQuantOptions::Pack(::flatbuffers::FlatBufferBuilder &_fbb, const FakeQuantOptionsT* _o, const ::flatbuffers::rehasher_function_t *_rehasher) { return CreateFakeQuantOptions(_fbb, _o, _rehasher); } -inline flatbuffers::Offset CreateFakeQuantOptions(flatbuffers::FlatBufferBuilder &_fbb, const FakeQuantOptionsT *_o, const flatbuffers::rehasher_function_t *_rehasher) { +inline ::flatbuffers::Offset CreateFakeQuantOptions(::flatbuffers::FlatBufferBuilder &_fbb, const FakeQuantOptionsT *_o, const ::flatbuffers::rehasher_function_t *_rehasher) { (void)_rehasher; (void)_o; - struct _VectorArgs { flatbuffers::FlatBufferBuilder *__fbb; const FakeQuantOptionsT* __o; const flatbuffers::rehasher_function_t *__rehasher; } _va = { &_fbb, _o, _rehasher}; (void)_va; + struct _VectorArgs { ::flatbuffers::FlatBufferBuilder *__fbb; const FakeQuantOptionsT* __o; const ::flatbuffers::rehasher_function_t *__rehasher; } _va = { &_fbb, _o, _rehasher}; (void)_va; auto _min = _o->min; auto _max = _o->max; auto _num_bits = _o->num_bits; @@ -15838,27 +19516,27 @@ inline flatbuffers::Offset CreateFakeQuantOptions(flatbuffers: _narrow_range); } -inline PackOptionsT *PackOptions::UnPack(const flatbuffers::resolver_function_t *_resolver) const { +inline PackOptionsT *PackOptions::UnPack(const ::flatbuffers::resolver_function_t *_resolver) const { auto _o = std::unique_ptr(new PackOptionsT()); UnPackTo(_o.get(), _resolver); return _o.release(); } -inline void PackOptions::UnPackTo(PackOptionsT *_o, const flatbuffers::resolver_function_t *_resolver) const { +inline void PackOptions::UnPackTo(PackOptionsT *_o, const ::flatbuffers::resolver_function_t *_resolver) const { (void)_o; (void)_resolver; { auto _e = values_count(); _o->values_count = _e; } { auto _e = axis(); _o->axis = _e; } } -inline flatbuffers::Offset PackOptions::Pack(flatbuffers::FlatBufferBuilder &_fbb, const PackOptionsT* _o, const flatbuffers::rehasher_function_t *_rehasher) { +inline ::flatbuffers::Offset PackOptions::Pack(::flatbuffers::FlatBufferBuilder &_fbb, const PackOptionsT* _o, const ::flatbuffers::rehasher_function_t *_rehasher) { return CreatePackOptions(_fbb, _o, _rehasher); } -inline flatbuffers::Offset CreatePackOptions(flatbuffers::FlatBufferBuilder &_fbb, const PackOptionsT *_o, const flatbuffers::rehasher_function_t *_rehasher) { +inline ::flatbuffers::Offset CreatePackOptions(::flatbuffers::FlatBufferBuilder &_fbb, const PackOptionsT *_o, const ::flatbuffers::rehasher_function_t *_rehasher) { (void)_rehasher; (void)_o; - struct _VectorArgs { flatbuffers::FlatBufferBuilder *__fbb; const PackOptionsT* __o; const flatbuffers::rehasher_function_t *__rehasher; } _va = { &_fbb, _o, _rehasher}; (void)_va; + struct _VectorArgs { ::flatbuffers::FlatBufferBuilder *__fbb; const PackOptionsT* __o; const ::flatbuffers::rehasher_function_t *__rehasher; } _va = { &_fbb, _o, _rehasher}; (void)_va; auto _values_count = _o->values_count; auto _axis = _o->axis; return tflite::CreatePackOptions( @@ -15867,168 +19545,168 @@ inline flatbuffers::Offset CreatePackOptions(flatbuffers::FlatBuffe _axis); } -inline LogicalOrOptionsT *LogicalOrOptions::UnPack(const flatbuffers::resolver_function_t *_resolver) const { +inline LogicalOrOptionsT *LogicalOrOptions::UnPack(const ::flatbuffers::resolver_function_t *_resolver) const { auto _o = std::unique_ptr(new LogicalOrOptionsT()); UnPackTo(_o.get(), _resolver); return _o.release(); } -inline void LogicalOrOptions::UnPackTo(LogicalOrOptionsT *_o, const flatbuffers::resolver_function_t *_resolver) const { +inline void LogicalOrOptions::UnPackTo(LogicalOrOptionsT *_o, const ::flatbuffers::resolver_function_t *_resolver) const { (void)_o; (void)_resolver; } -inline flatbuffers::Offset LogicalOrOptions::Pack(flatbuffers::FlatBufferBuilder &_fbb, const LogicalOrOptionsT* _o, const flatbuffers::rehasher_function_t *_rehasher) { +inline ::flatbuffers::Offset LogicalOrOptions::Pack(::flatbuffers::FlatBufferBuilder &_fbb, const LogicalOrOptionsT* _o, const ::flatbuffers::rehasher_function_t *_rehasher) { return CreateLogicalOrOptions(_fbb, _o, _rehasher); } -inline flatbuffers::Offset CreateLogicalOrOptions(flatbuffers::FlatBufferBuilder &_fbb, const LogicalOrOptionsT *_o, const flatbuffers::rehasher_function_t *_rehasher) { +inline ::flatbuffers::Offset CreateLogicalOrOptions(::flatbuffers::FlatBufferBuilder &_fbb, const LogicalOrOptionsT *_o, const ::flatbuffers::rehasher_function_t *_rehasher) { (void)_rehasher; (void)_o; - struct _VectorArgs { flatbuffers::FlatBufferBuilder *__fbb; const LogicalOrOptionsT* __o; const flatbuffers::rehasher_function_t *__rehasher; } _va = { &_fbb, _o, _rehasher}; (void)_va; + struct _VectorArgs { ::flatbuffers::FlatBufferBuilder *__fbb; const LogicalOrOptionsT* __o; const ::flatbuffers::rehasher_function_t *__rehasher; } _va = { &_fbb, _o, _rehasher}; (void)_va; return tflite::CreateLogicalOrOptions( _fbb); } -inline OneHotOptionsT *OneHotOptions::UnPack(const flatbuffers::resolver_function_t *_resolver) const { +inline OneHotOptionsT *OneHotOptions::UnPack(const ::flatbuffers::resolver_function_t *_resolver) const { auto _o = std::unique_ptr(new OneHotOptionsT()); UnPackTo(_o.get(), _resolver); return _o.release(); } -inline void OneHotOptions::UnPackTo(OneHotOptionsT *_o, const flatbuffers::resolver_function_t *_resolver) const { +inline void OneHotOptions::UnPackTo(OneHotOptionsT *_o, const ::flatbuffers::resolver_function_t *_resolver) const { (void)_o; (void)_resolver; { auto _e = axis(); _o->axis = _e; } } -inline flatbuffers::Offset OneHotOptions::Pack(flatbuffers::FlatBufferBuilder &_fbb, const OneHotOptionsT* _o, const flatbuffers::rehasher_function_t *_rehasher) { +inline ::flatbuffers::Offset OneHotOptions::Pack(::flatbuffers::FlatBufferBuilder &_fbb, const OneHotOptionsT* _o, const ::flatbuffers::rehasher_function_t *_rehasher) { return CreateOneHotOptions(_fbb, _o, _rehasher); } -inline flatbuffers::Offset CreateOneHotOptions(flatbuffers::FlatBufferBuilder &_fbb, const OneHotOptionsT *_o, const flatbuffers::rehasher_function_t *_rehasher) { +inline ::flatbuffers::Offset CreateOneHotOptions(::flatbuffers::FlatBufferBuilder &_fbb, const OneHotOptionsT *_o, const ::flatbuffers::rehasher_function_t *_rehasher) { (void)_rehasher; (void)_o; - struct _VectorArgs { flatbuffers::FlatBufferBuilder *__fbb; const OneHotOptionsT* __o; const flatbuffers::rehasher_function_t *__rehasher; } _va = { &_fbb, _o, _rehasher}; (void)_va; + struct _VectorArgs { ::flatbuffers::FlatBufferBuilder *__fbb; const OneHotOptionsT* __o; const ::flatbuffers::rehasher_function_t *__rehasher; } _va = { &_fbb, _o, _rehasher}; (void)_va; auto _axis = _o->axis; return tflite::CreateOneHotOptions( _fbb, _axis); } -inline AbsOptionsT *AbsOptions::UnPack(const flatbuffers::resolver_function_t *_resolver) const { +inline AbsOptionsT *AbsOptions::UnPack(const ::flatbuffers::resolver_function_t *_resolver) const { auto _o = std::unique_ptr(new AbsOptionsT()); UnPackTo(_o.get(), _resolver); return _o.release(); } -inline void AbsOptions::UnPackTo(AbsOptionsT *_o, const flatbuffers::resolver_function_t *_resolver) const { +inline void AbsOptions::UnPackTo(AbsOptionsT *_o, const ::flatbuffers::resolver_function_t *_resolver) const { (void)_o; (void)_resolver; } -inline flatbuffers::Offset AbsOptions::Pack(flatbuffers::FlatBufferBuilder &_fbb, const AbsOptionsT* _o, const flatbuffers::rehasher_function_t *_rehasher) { +inline ::flatbuffers::Offset AbsOptions::Pack(::flatbuffers::FlatBufferBuilder &_fbb, const AbsOptionsT* _o, const ::flatbuffers::rehasher_function_t *_rehasher) { return CreateAbsOptions(_fbb, _o, _rehasher); } -inline flatbuffers::Offset CreateAbsOptions(flatbuffers::FlatBufferBuilder &_fbb, const AbsOptionsT *_o, const flatbuffers::rehasher_function_t *_rehasher) { +inline ::flatbuffers::Offset CreateAbsOptions(::flatbuffers::FlatBufferBuilder &_fbb, const AbsOptionsT *_o, const ::flatbuffers::rehasher_function_t *_rehasher) { (void)_rehasher; (void)_o; - struct _VectorArgs { flatbuffers::FlatBufferBuilder *__fbb; const AbsOptionsT* __o; const flatbuffers::rehasher_function_t *__rehasher; } _va = { &_fbb, _o, _rehasher}; (void)_va; + struct _VectorArgs { ::flatbuffers::FlatBufferBuilder *__fbb; const AbsOptionsT* __o; const ::flatbuffers::rehasher_function_t *__rehasher; } _va = { &_fbb, _o, _rehasher}; (void)_va; return tflite::CreateAbsOptions( _fbb); } -inline HardSwishOptionsT *HardSwishOptions::UnPack(const flatbuffers::resolver_function_t *_resolver) const { +inline HardSwishOptionsT *HardSwishOptions::UnPack(const ::flatbuffers::resolver_function_t *_resolver) const { auto _o = std::unique_ptr(new HardSwishOptionsT()); UnPackTo(_o.get(), _resolver); return _o.release(); } -inline void HardSwishOptions::UnPackTo(HardSwishOptionsT *_o, const flatbuffers::resolver_function_t *_resolver) const { +inline void HardSwishOptions::UnPackTo(HardSwishOptionsT *_o, const ::flatbuffers::resolver_function_t *_resolver) const { (void)_o; (void)_resolver; } -inline flatbuffers::Offset HardSwishOptions::Pack(flatbuffers::FlatBufferBuilder &_fbb, const HardSwishOptionsT* _o, const flatbuffers::rehasher_function_t *_rehasher) { +inline ::flatbuffers::Offset HardSwishOptions::Pack(::flatbuffers::FlatBufferBuilder &_fbb, const HardSwishOptionsT* _o, const ::flatbuffers::rehasher_function_t *_rehasher) { return CreateHardSwishOptions(_fbb, _o, _rehasher); } -inline flatbuffers::Offset CreateHardSwishOptions(flatbuffers::FlatBufferBuilder &_fbb, const HardSwishOptionsT *_o, const flatbuffers::rehasher_function_t *_rehasher) { +inline ::flatbuffers::Offset CreateHardSwishOptions(::flatbuffers::FlatBufferBuilder &_fbb, const HardSwishOptionsT *_o, const ::flatbuffers::rehasher_function_t *_rehasher) { (void)_rehasher; (void)_o; - struct _VectorArgs { flatbuffers::FlatBufferBuilder *__fbb; const HardSwishOptionsT* __o; const flatbuffers::rehasher_function_t *__rehasher; } _va = { &_fbb, _o, _rehasher}; (void)_va; + struct _VectorArgs { ::flatbuffers::FlatBufferBuilder *__fbb; const HardSwishOptionsT* __o; const ::flatbuffers::rehasher_function_t *__rehasher; } _va = { &_fbb, _o, _rehasher}; (void)_va; return tflite::CreateHardSwishOptions( _fbb); } -inline LogicalAndOptionsT *LogicalAndOptions::UnPack(const flatbuffers::resolver_function_t *_resolver) const { +inline LogicalAndOptionsT *LogicalAndOptions::UnPack(const ::flatbuffers::resolver_function_t *_resolver) const { auto _o = std::unique_ptr(new LogicalAndOptionsT()); UnPackTo(_o.get(), _resolver); return _o.release(); } -inline void LogicalAndOptions::UnPackTo(LogicalAndOptionsT *_o, const flatbuffers::resolver_function_t *_resolver) const { +inline void LogicalAndOptions::UnPackTo(LogicalAndOptionsT *_o, const ::flatbuffers::resolver_function_t *_resolver) const { (void)_o; (void)_resolver; } -inline flatbuffers::Offset LogicalAndOptions::Pack(flatbuffers::FlatBufferBuilder &_fbb, const LogicalAndOptionsT* _o, const flatbuffers::rehasher_function_t *_rehasher) { +inline ::flatbuffers::Offset LogicalAndOptions::Pack(::flatbuffers::FlatBufferBuilder &_fbb, const LogicalAndOptionsT* _o, const ::flatbuffers::rehasher_function_t *_rehasher) { return CreateLogicalAndOptions(_fbb, _o, _rehasher); } -inline flatbuffers::Offset CreateLogicalAndOptions(flatbuffers::FlatBufferBuilder &_fbb, const LogicalAndOptionsT *_o, const flatbuffers::rehasher_function_t *_rehasher) { +inline ::flatbuffers::Offset CreateLogicalAndOptions(::flatbuffers::FlatBufferBuilder &_fbb, const LogicalAndOptionsT *_o, const ::flatbuffers::rehasher_function_t *_rehasher) { (void)_rehasher; (void)_o; - struct _VectorArgs { flatbuffers::FlatBufferBuilder *__fbb; const LogicalAndOptionsT* __o; const flatbuffers::rehasher_function_t *__rehasher; } _va = { &_fbb, _o, _rehasher}; (void)_va; + struct _VectorArgs { ::flatbuffers::FlatBufferBuilder *__fbb; const LogicalAndOptionsT* __o; const ::flatbuffers::rehasher_function_t *__rehasher; } _va = { &_fbb, _o, _rehasher}; (void)_va; return tflite::CreateLogicalAndOptions( _fbb); } -inline LogicalNotOptionsT *LogicalNotOptions::UnPack(const flatbuffers::resolver_function_t *_resolver) const { +inline LogicalNotOptionsT *LogicalNotOptions::UnPack(const ::flatbuffers::resolver_function_t *_resolver) const { auto _o = std::unique_ptr(new LogicalNotOptionsT()); UnPackTo(_o.get(), _resolver); return _o.release(); } -inline void LogicalNotOptions::UnPackTo(LogicalNotOptionsT *_o, const flatbuffers::resolver_function_t *_resolver) const { +inline void LogicalNotOptions::UnPackTo(LogicalNotOptionsT *_o, const ::flatbuffers::resolver_function_t *_resolver) const { (void)_o; (void)_resolver; } -inline flatbuffers::Offset LogicalNotOptions::Pack(flatbuffers::FlatBufferBuilder &_fbb, const LogicalNotOptionsT* _o, const flatbuffers::rehasher_function_t *_rehasher) { +inline ::flatbuffers::Offset LogicalNotOptions::Pack(::flatbuffers::FlatBufferBuilder &_fbb, const LogicalNotOptionsT* _o, const ::flatbuffers::rehasher_function_t *_rehasher) { return CreateLogicalNotOptions(_fbb, _o, _rehasher); } -inline flatbuffers::Offset CreateLogicalNotOptions(flatbuffers::FlatBufferBuilder &_fbb, const LogicalNotOptionsT *_o, const flatbuffers::rehasher_function_t *_rehasher) { +inline ::flatbuffers::Offset CreateLogicalNotOptions(::flatbuffers::FlatBufferBuilder &_fbb, const LogicalNotOptionsT *_o, const ::flatbuffers::rehasher_function_t *_rehasher) { (void)_rehasher; (void)_o; - struct _VectorArgs { flatbuffers::FlatBufferBuilder *__fbb; const LogicalNotOptionsT* __o; const flatbuffers::rehasher_function_t *__rehasher; } _va = { &_fbb, _o, _rehasher}; (void)_va; + struct _VectorArgs { ::flatbuffers::FlatBufferBuilder *__fbb; const LogicalNotOptionsT* __o; const ::flatbuffers::rehasher_function_t *__rehasher; } _va = { &_fbb, _o, _rehasher}; (void)_va; return tflite::CreateLogicalNotOptions( _fbb); } -inline UnpackOptionsT *UnpackOptions::UnPack(const flatbuffers::resolver_function_t *_resolver) const { +inline UnpackOptionsT *UnpackOptions::UnPack(const ::flatbuffers::resolver_function_t *_resolver) const { auto _o = std::unique_ptr(new UnpackOptionsT()); UnPackTo(_o.get(), _resolver); return _o.release(); } -inline void UnpackOptions::UnPackTo(UnpackOptionsT *_o, const flatbuffers::resolver_function_t *_resolver) const { +inline void UnpackOptions::UnPackTo(UnpackOptionsT *_o, const ::flatbuffers::resolver_function_t *_resolver) const { (void)_o; (void)_resolver; { auto _e = num(); _o->num = _e; } { auto _e = axis(); _o->axis = _e; } } -inline flatbuffers::Offset UnpackOptions::Pack(flatbuffers::FlatBufferBuilder &_fbb, const UnpackOptionsT* _o, const flatbuffers::rehasher_function_t *_rehasher) { +inline ::flatbuffers::Offset UnpackOptions::Pack(::flatbuffers::FlatBufferBuilder &_fbb, const UnpackOptionsT* _o, const ::flatbuffers::rehasher_function_t *_rehasher) { return CreateUnpackOptions(_fbb, _o, _rehasher); } -inline flatbuffers::Offset CreateUnpackOptions(flatbuffers::FlatBufferBuilder &_fbb, const UnpackOptionsT *_o, const flatbuffers::rehasher_function_t *_rehasher) { +inline ::flatbuffers::Offset CreateUnpackOptions(::flatbuffers::FlatBufferBuilder &_fbb, const UnpackOptionsT *_o, const ::flatbuffers::rehasher_function_t *_rehasher) { (void)_rehasher; (void)_o; - struct _VectorArgs { flatbuffers::FlatBufferBuilder *__fbb; const UnpackOptionsT* __o; const flatbuffers::rehasher_function_t *__rehasher; } _va = { &_fbb, _o, _rehasher}; (void)_va; + struct _VectorArgs { ::flatbuffers::FlatBufferBuilder *__fbb; const UnpackOptionsT* __o; const ::flatbuffers::rehasher_function_t *__rehasher; } _va = { &_fbb, _o, _rehasher}; (void)_va; auto _num = _o->num; auto _axis = _o->axis; return tflite::CreateUnpackOptions( @@ -16037,358 +19715,358 @@ inline flatbuffers::Offset CreateUnpackOptions(flatbuffers::FlatB _axis); } -inline FloorDivOptionsT *FloorDivOptions::UnPack(const flatbuffers::resolver_function_t *_resolver) const { +inline FloorDivOptionsT *FloorDivOptions::UnPack(const ::flatbuffers::resolver_function_t *_resolver) const { auto _o = std::unique_ptr(new FloorDivOptionsT()); UnPackTo(_o.get(), _resolver); return _o.release(); } -inline void FloorDivOptions::UnPackTo(FloorDivOptionsT *_o, const flatbuffers::resolver_function_t *_resolver) const { +inline void FloorDivOptions::UnPackTo(FloorDivOptionsT *_o, const ::flatbuffers::resolver_function_t *_resolver) const { (void)_o; (void)_resolver; } -inline flatbuffers::Offset FloorDivOptions::Pack(flatbuffers::FlatBufferBuilder &_fbb, const FloorDivOptionsT* _o, const flatbuffers::rehasher_function_t *_rehasher) { +inline ::flatbuffers::Offset FloorDivOptions::Pack(::flatbuffers::FlatBufferBuilder &_fbb, const FloorDivOptionsT* _o, const ::flatbuffers::rehasher_function_t *_rehasher) { return CreateFloorDivOptions(_fbb, _o, _rehasher); } -inline flatbuffers::Offset CreateFloorDivOptions(flatbuffers::FlatBufferBuilder &_fbb, const FloorDivOptionsT *_o, const flatbuffers::rehasher_function_t *_rehasher) { +inline ::flatbuffers::Offset CreateFloorDivOptions(::flatbuffers::FlatBufferBuilder &_fbb, const FloorDivOptionsT *_o, const ::flatbuffers::rehasher_function_t *_rehasher) { (void)_rehasher; (void)_o; - struct _VectorArgs { flatbuffers::FlatBufferBuilder *__fbb; const FloorDivOptionsT* __o; const flatbuffers::rehasher_function_t *__rehasher; } _va = { &_fbb, _o, _rehasher}; (void)_va; + struct _VectorArgs { ::flatbuffers::FlatBufferBuilder *__fbb; const FloorDivOptionsT* __o; const ::flatbuffers::rehasher_function_t *__rehasher; } _va = { &_fbb, _o, _rehasher}; (void)_va; return tflite::CreateFloorDivOptions( _fbb); } -inline SquareOptionsT *SquareOptions::UnPack(const flatbuffers::resolver_function_t *_resolver) const { +inline SquareOptionsT *SquareOptions::UnPack(const ::flatbuffers::resolver_function_t *_resolver) const { auto _o = std::unique_ptr(new SquareOptionsT()); UnPackTo(_o.get(), _resolver); return _o.release(); } -inline void SquareOptions::UnPackTo(SquareOptionsT *_o, const flatbuffers::resolver_function_t *_resolver) const { +inline void SquareOptions::UnPackTo(SquareOptionsT *_o, const ::flatbuffers::resolver_function_t *_resolver) const { (void)_o; (void)_resolver; } -inline flatbuffers::Offset SquareOptions::Pack(flatbuffers::FlatBufferBuilder &_fbb, const SquareOptionsT* _o, const flatbuffers::rehasher_function_t *_rehasher) { +inline ::flatbuffers::Offset SquareOptions::Pack(::flatbuffers::FlatBufferBuilder &_fbb, const SquareOptionsT* _o, const ::flatbuffers::rehasher_function_t *_rehasher) { return CreateSquareOptions(_fbb, _o, _rehasher); } -inline flatbuffers::Offset CreateSquareOptions(flatbuffers::FlatBufferBuilder &_fbb, const SquareOptionsT *_o, const flatbuffers::rehasher_function_t *_rehasher) { +inline ::flatbuffers::Offset CreateSquareOptions(::flatbuffers::FlatBufferBuilder &_fbb, const SquareOptionsT *_o, const ::flatbuffers::rehasher_function_t *_rehasher) { (void)_rehasher; (void)_o; - struct _VectorArgs { flatbuffers::FlatBufferBuilder *__fbb; const SquareOptionsT* __o; const flatbuffers::rehasher_function_t *__rehasher; } _va = { &_fbb, _o, _rehasher}; (void)_va; + struct _VectorArgs { ::flatbuffers::FlatBufferBuilder *__fbb; const SquareOptionsT* __o; const ::flatbuffers::rehasher_function_t *__rehasher; } _va = { &_fbb, _o, _rehasher}; (void)_va; return tflite::CreateSquareOptions( _fbb); } -inline ZerosLikeOptionsT *ZerosLikeOptions::UnPack(const flatbuffers::resolver_function_t *_resolver) const { +inline ZerosLikeOptionsT *ZerosLikeOptions::UnPack(const ::flatbuffers::resolver_function_t *_resolver) const { auto _o = std::unique_ptr(new ZerosLikeOptionsT()); UnPackTo(_o.get(), _resolver); return _o.release(); } -inline void ZerosLikeOptions::UnPackTo(ZerosLikeOptionsT *_o, const flatbuffers::resolver_function_t *_resolver) const { +inline void ZerosLikeOptions::UnPackTo(ZerosLikeOptionsT *_o, const ::flatbuffers::resolver_function_t *_resolver) const { (void)_o; (void)_resolver; } -inline flatbuffers::Offset ZerosLikeOptions::Pack(flatbuffers::FlatBufferBuilder &_fbb, const ZerosLikeOptionsT* _o, const flatbuffers::rehasher_function_t *_rehasher) { +inline ::flatbuffers::Offset ZerosLikeOptions::Pack(::flatbuffers::FlatBufferBuilder &_fbb, const ZerosLikeOptionsT* _o, const ::flatbuffers::rehasher_function_t *_rehasher) { return CreateZerosLikeOptions(_fbb, _o, _rehasher); } -inline flatbuffers::Offset CreateZerosLikeOptions(flatbuffers::FlatBufferBuilder &_fbb, const ZerosLikeOptionsT *_o, const flatbuffers::rehasher_function_t *_rehasher) { +inline ::flatbuffers::Offset CreateZerosLikeOptions(::flatbuffers::FlatBufferBuilder &_fbb, const ZerosLikeOptionsT *_o, const ::flatbuffers::rehasher_function_t *_rehasher) { (void)_rehasher; (void)_o; - struct _VectorArgs { flatbuffers::FlatBufferBuilder *__fbb; const ZerosLikeOptionsT* __o; const flatbuffers::rehasher_function_t *__rehasher; } _va = { &_fbb, _o, _rehasher}; (void)_va; + struct _VectorArgs { ::flatbuffers::FlatBufferBuilder *__fbb; const ZerosLikeOptionsT* __o; const ::flatbuffers::rehasher_function_t *__rehasher; } _va = { &_fbb, _o, _rehasher}; (void)_va; return tflite::CreateZerosLikeOptions( _fbb); } -inline FillOptionsT *FillOptions::UnPack(const flatbuffers::resolver_function_t *_resolver) const { +inline FillOptionsT *FillOptions::UnPack(const ::flatbuffers::resolver_function_t *_resolver) const { auto _o = std::unique_ptr(new FillOptionsT()); UnPackTo(_o.get(), _resolver); return _o.release(); } -inline void FillOptions::UnPackTo(FillOptionsT *_o, const flatbuffers::resolver_function_t *_resolver) const { +inline void FillOptions::UnPackTo(FillOptionsT *_o, const ::flatbuffers::resolver_function_t *_resolver) const { (void)_o; (void)_resolver; } -inline flatbuffers::Offset FillOptions::Pack(flatbuffers::FlatBufferBuilder &_fbb, const FillOptionsT* _o, const flatbuffers::rehasher_function_t *_rehasher) { +inline ::flatbuffers::Offset FillOptions::Pack(::flatbuffers::FlatBufferBuilder &_fbb, const FillOptionsT* _o, const ::flatbuffers::rehasher_function_t *_rehasher) { return CreateFillOptions(_fbb, _o, _rehasher); } -inline flatbuffers::Offset CreateFillOptions(flatbuffers::FlatBufferBuilder &_fbb, const FillOptionsT *_o, const flatbuffers::rehasher_function_t *_rehasher) { +inline ::flatbuffers::Offset CreateFillOptions(::flatbuffers::FlatBufferBuilder &_fbb, const FillOptionsT *_o, const ::flatbuffers::rehasher_function_t *_rehasher) { (void)_rehasher; (void)_o; - struct _VectorArgs { flatbuffers::FlatBufferBuilder *__fbb; const FillOptionsT* __o; const flatbuffers::rehasher_function_t *__rehasher; } _va = { &_fbb, _o, _rehasher}; (void)_va; + struct _VectorArgs { ::flatbuffers::FlatBufferBuilder *__fbb; const FillOptionsT* __o; const ::flatbuffers::rehasher_function_t *__rehasher; } _va = { &_fbb, _o, _rehasher}; (void)_va; return tflite::CreateFillOptions( _fbb); } -inline FloorModOptionsT *FloorModOptions::UnPack(const flatbuffers::resolver_function_t *_resolver) const { +inline FloorModOptionsT *FloorModOptions::UnPack(const ::flatbuffers::resolver_function_t *_resolver) const { auto _o = std::unique_ptr(new FloorModOptionsT()); UnPackTo(_o.get(), _resolver); return _o.release(); } -inline void FloorModOptions::UnPackTo(FloorModOptionsT *_o, const flatbuffers::resolver_function_t *_resolver) const { +inline void FloorModOptions::UnPackTo(FloorModOptionsT *_o, const ::flatbuffers::resolver_function_t *_resolver) const { (void)_o; (void)_resolver; } -inline flatbuffers::Offset FloorModOptions::Pack(flatbuffers::FlatBufferBuilder &_fbb, const FloorModOptionsT* _o, const flatbuffers::rehasher_function_t *_rehasher) { +inline ::flatbuffers::Offset FloorModOptions::Pack(::flatbuffers::FlatBufferBuilder &_fbb, const FloorModOptionsT* _o, const ::flatbuffers::rehasher_function_t *_rehasher) { return CreateFloorModOptions(_fbb, _o, _rehasher); } -inline flatbuffers::Offset CreateFloorModOptions(flatbuffers::FlatBufferBuilder &_fbb, const FloorModOptionsT *_o, const flatbuffers::rehasher_function_t *_rehasher) { +inline ::flatbuffers::Offset CreateFloorModOptions(::flatbuffers::FlatBufferBuilder &_fbb, const FloorModOptionsT *_o, const ::flatbuffers::rehasher_function_t *_rehasher) { (void)_rehasher; (void)_o; - struct _VectorArgs { flatbuffers::FlatBufferBuilder *__fbb; const FloorModOptionsT* __o; const flatbuffers::rehasher_function_t *__rehasher; } _va = { &_fbb, _o, _rehasher}; (void)_va; + struct _VectorArgs { ::flatbuffers::FlatBufferBuilder *__fbb; const FloorModOptionsT* __o; const ::flatbuffers::rehasher_function_t *__rehasher; } _va = { &_fbb, _o, _rehasher}; (void)_va; return tflite::CreateFloorModOptions( _fbb); } -inline RangeOptionsT *RangeOptions::UnPack(const flatbuffers::resolver_function_t *_resolver) const { +inline RangeOptionsT *RangeOptions::UnPack(const ::flatbuffers::resolver_function_t *_resolver) const { auto _o = std::unique_ptr(new RangeOptionsT()); UnPackTo(_o.get(), _resolver); return _o.release(); } -inline void RangeOptions::UnPackTo(RangeOptionsT *_o, const flatbuffers::resolver_function_t *_resolver) const { +inline void RangeOptions::UnPackTo(RangeOptionsT *_o, const ::flatbuffers::resolver_function_t *_resolver) const { (void)_o; (void)_resolver; } -inline flatbuffers::Offset RangeOptions::Pack(flatbuffers::FlatBufferBuilder &_fbb, const RangeOptionsT* _o, const flatbuffers::rehasher_function_t *_rehasher) { +inline ::flatbuffers::Offset RangeOptions::Pack(::flatbuffers::FlatBufferBuilder &_fbb, const RangeOptionsT* _o, const ::flatbuffers::rehasher_function_t *_rehasher) { return CreateRangeOptions(_fbb, _o, _rehasher); } -inline flatbuffers::Offset CreateRangeOptions(flatbuffers::FlatBufferBuilder &_fbb, const RangeOptionsT *_o, const flatbuffers::rehasher_function_t *_rehasher) { +inline ::flatbuffers::Offset CreateRangeOptions(::flatbuffers::FlatBufferBuilder &_fbb, const RangeOptionsT *_o, const ::flatbuffers::rehasher_function_t *_rehasher) { (void)_rehasher; (void)_o; - struct _VectorArgs { flatbuffers::FlatBufferBuilder *__fbb; const RangeOptionsT* __o; const flatbuffers::rehasher_function_t *__rehasher; } _va = { &_fbb, _o, _rehasher}; (void)_va; + struct _VectorArgs { ::flatbuffers::FlatBufferBuilder *__fbb; const RangeOptionsT* __o; const ::flatbuffers::rehasher_function_t *__rehasher; } _va = { &_fbb, _o, _rehasher}; (void)_va; return tflite::CreateRangeOptions( _fbb); } -inline LeakyReluOptionsT *LeakyReluOptions::UnPack(const flatbuffers::resolver_function_t *_resolver) const { +inline LeakyReluOptionsT *LeakyReluOptions::UnPack(const ::flatbuffers::resolver_function_t *_resolver) const { auto _o = std::unique_ptr(new LeakyReluOptionsT()); UnPackTo(_o.get(), _resolver); return _o.release(); } -inline void LeakyReluOptions::UnPackTo(LeakyReluOptionsT *_o, const flatbuffers::resolver_function_t *_resolver) const { +inline void LeakyReluOptions::UnPackTo(LeakyReluOptionsT *_o, const ::flatbuffers::resolver_function_t *_resolver) const { (void)_o; (void)_resolver; { auto _e = alpha(); _o->alpha = _e; } } -inline flatbuffers::Offset LeakyReluOptions::Pack(flatbuffers::FlatBufferBuilder &_fbb, const LeakyReluOptionsT* _o, const flatbuffers::rehasher_function_t *_rehasher) { +inline ::flatbuffers::Offset LeakyReluOptions::Pack(::flatbuffers::FlatBufferBuilder &_fbb, const LeakyReluOptionsT* _o, const ::flatbuffers::rehasher_function_t *_rehasher) { return CreateLeakyReluOptions(_fbb, _o, _rehasher); } -inline flatbuffers::Offset CreateLeakyReluOptions(flatbuffers::FlatBufferBuilder &_fbb, const LeakyReluOptionsT *_o, const flatbuffers::rehasher_function_t *_rehasher) { +inline ::flatbuffers::Offset CreateLeakyReluOptions(::flatbuffers::FlatBufferBuilder &_fbb, const LeakyReluOptionsT *_o, const ::flatbuffers::rehasher_function_t *_rehasher) { (void)_rehasher; (void)_o; - struct _VectorArgs { flatbuffers::FlatBufferBuilder *__fbb; const LeakyReluOptionsT* __o; const flatbuffers::rehasher_function_t *__rehasher; } _va = { &_fbb, _o, _rehasher}; (void)_va; + struct _VectorArgs { ::flatbuffers::FlatBufferBuilder *__fbb; const LeakyReluOptionsT* __o; const ::flatbuffers::rehasher_function_t *__rehasher; } _va = { &_fbb, _o, _rehasher}; (void)_va; auto _alpha = _o->alpha; return tflite::CreateLeakyReluOptions( _fbb, _alpha); } -inline SquaredDifferenceOptionsT *SquaredDifferenceOptions::UnPack(const flatbuffers::resolver_function_t *_resolver) const { +inline SquaredDifferenceOptionsT *SquaredDifferenceOptions::UnPack(const ::flatbuffers::resolver_function_t *_resolver) const { auto _o = std::unique_ptr(new SquaredDifferenceOptionsT()); UnPackTo(_o.get(), _resolver); return _o.release(); } -inline void SquaredDifferenceOptions::UnPackTo(SquaredDifferenceOptionsT *_o, const flatbuffers::resolver_function_t *_resolver) const { +inline void SquaredDifferenceOptions::UnPackTo(SquaredDifferenceOptionsT *_o, const ::flatbuffers::resolver_function_t *_resolver) const { (void)_o; (void)_resolver; } -inline flatbuffers::Offset SquaredDifferenceOptions::Pack(flatbuffers::FlatBufferBuilder &_fbb, const SquaredDifferenceOptionsT* _o, const flatbuffers::rehasher_function_t *_rehasher) { +inline ::flatbuffers::Offset SquaredDifferenceOptions::Pack(::flatbuffers::FlatBufferBuilder &_fbb, const SquaredDifferenceOptionsT* _o, const ::flatbuffers::rehasher_function_t *_rehasher) { return CreateSquaredDifferenceOptions(_fbb, _o, _rehasher); } -inline flatbuffers::Offset CreateSquaredDifferenceOptions(flatbuffers::FlatBufferBuilder &_fbb, const SquaredDifferenceOptionsT *_o, const flatbuffers::rehasher_function_t *_rehasher) { +inline ::flatbuffers::Offset CreateSquaredDifferenceOptions(::flatbuffers::FlatBufferBuilder &_fbb, const SquaredDifferenceOptionsT *_o, const ::flatbuffers::rehasher_function_t *_rehasher) { (void)_rehasher; (void)_o; - struct _VectorArgs { flatbuffers::FlatBufferBuilder *__fbb; const SquaredDifferenceOptionsT* __o; const flatbuffers::rehasher_function_t *__rehasher; } _va = { &_fbb, _o, _rehasher}; (void)_va; + struct _VectorArgs { ::flatbuffers::FlatBufferBuilder *__fbb; const SquaredDifferenceOptionsT* __o; const ::flatbuffers::rehasher_function_t *__rehasher; } _va = { &_fbb, _o, _rehasher}; (void)_va; return tflite::CreateSquaredDifferenceOptions( _fbb); } -inline MirrorPadOptionsT *MirrorPadOptions::UnPack(const flatbuffers::resolver_function_t *_resolver) const { +inline MirrorPadOptionsT *MirrorPadOptions::UnPack(const ::flatbuffers::resolver_function_t *_resolver) const { auto _o = std::unique_ptr(new MirrorPadOptionsT()); UnPackTo(_o.get(), _resolver); return _o.release(); } -inline void MirrorPadOptions::UnPackTo(MirrorPadOptionsT *_o, const flatbuffers::resolver_function_t *_resolver) const { +inline void MirrorPadOptions::UnPackTo(MirrorPadOptionsT *_o, const ::flatbuffers::resolver_function_t *_resolver) const { (void)_o; (void)_resolver; { auto _e = mode(); _o->mode = _e; } } -inline flatbuffers::Offset MirrorPadOptions::Pack(flatbuffers::FlatBufferBuilder &_fbb, const MirrorPadOptionsT* _o, const flatbuffers::rehasher_function_t *_rehasher) { +inline ::flatbuffers::Offset MirrorPadOptions::Pack(::flatbuffers::FlatBufferBuilder &_fbb, const MirrorPadOptionsT* _o, const ::flatbuffers::rehasher_function_t *_rehasher) { return CreateMirrorPadOptions(_fbb, _o, _rehasher); } -inline flatbuffers::Offset CreateMirrorPadOptions(flatbuffers::FlatBufferBuilder &_fbb, const MirrorPadOptionsT *_o, const flatbuffers::rehasher_function_t *_rehasher) { +inline ::flatbuffers::Offset CreateMirrorPadOptions(::flatbuffers::FlatBufferBuilder &_fbb, const MirrorPadOptionsT *_o, const ::flatbuffers::rehasher_function_t *_rehasher) { (void)_rehasher; (void)_o; - struct _VectorArgs { flatbuffers::FlatBufferBuilder *__fbb; const MirrorPadOptionsT* __o; const flatbuffers::rehasher_function_t *__rehasher; } _va = { &_fbb, _o, _rehasher}; (void)_va; + struct _VectorArgs { ::flatbuffers::FlatBufferBuilder *__fbb; const MirrorPadOptionsT* __o; const ::flatbuffers::rehasher_function_t *__rehasher; } _va = { &_fbb, _o, _rehasher}; (void)_va; auto _mode = _o->mode; return tflite::CreateMirrorPadOptions( _fbb, _mode); } -inline UniqueOptionsT *UniqueOptions::UnPack(const flatbuffers::resolver_function_t *_resolver) const { +inline UniqueOptionsT *UniqueOptions::UnPack(const ::flatbuffers::resolver_function_t *_resolver) const { auto _o = std::unique_ptr(new UniqueOptionsT()); UnPackTo(_o.get(), _resolver); return _o.release(); } -inline void UniqueOptions::UnPackTo(UniqueOptionsT *_o, const flatbuffers::resolver_function_t *_resolver) const { +inline void UniqueOptions::UnPackTo(UniqueOptionsT *_o, const ::flatbuffers::resolver_function_t *_resolver) const { (void)_o; (void)_resolver; { auto _e = idx_out_type(); _o->idx_out_type = _e; } } -inline flatbuffers::Offset UniqueOptions::Pack(flatbuffers::FlatBufferBuilder &_fbb, const UniqueOptionsT* _o, const flatbuffers::rehasher_function_t *_rehasher) { +inline ::flatbuffers::Offset UniqueOptions::Pack(::flatbuffers::FlatBufferBuilder &_fbb, const UniqueOptionsT* _o, const ::flatbuffers::rehasher_function_t *_rehasher) { return CreateUniqueOptions(_fbb, _o, _rehasher); } -inline flatbuffers::Offset CreateUniqueOptions(flatbuffers::FlatBufferBuilder &_fbb, const UniqueOptionsT *_o, const flatbuffers::rehasher_function_t *_rehasher) { +inline ::flatbuffers::Offset CreateUniqueOptions(::flatbuffers::FlatBufferBuilder &_fbb, const UniqueOptionsT *_o, const ::flatbuffers::rehasher_function_t *_rehasher) { (void)_rehasher; (void)_o; - struct _VectorArgs { flatbuffers::FlatBufferBuilder *__fbb; const UniqueOptionsT* __o; const flatbuffers::rehasher_function_t *__rehasher; } _va = { &_fbb, _o, _rehasher}; (void)_va; + struct _VectorArgs { ::flatbuffers::FlatBufferBuilder *__fbb; const UniqueOptionsT* __o; const ::flatbuffers::rehasher_function_t *__rehasher; } _va = { &_fbb, _o, _rehasher}; (void)_va; auto _idx_out_type = _o->idx_out_type; return tflite::CreateUniqueOptions( _fbb, _idx_out_type); } -inline ReverseV2OptionsT *ReverseV2Options::UnPack(const flatbuffers::resolver_function_t *_resolver) const { +inline ReverseV2OptionsT *ReverseV2Options::UnPack(const ::flatbuffers::resolver_function_t *_resolver) const { auto _o = std::unique_ptr(new ReverseV2OptionsT()); UnPackTo(_o.get(), _resolver); return _o.release(); } -inline void ReverseV2Options::UnPackTo(ReverseV2OptionsT *_o, const flatbuffers::resolver_function_t *_resolver) const { +inline void ReverseV2Options::UnPackTo(ReverseV2OptionsT *_o, const ::flatbuffers::resolver_function_t *_resolver) const { (void)_o; (void)_resolver; } -inline flatbuffers::Offset ReverseV2Options::Pack(flatbuffers::FlatBufferBuilder &_fbb, const ReverseV2OptionsT* _o, const flatbuffers::rehasher_function_t *_rehasher) { +inline ::flatbuffers::Offset ReverseV2Options::Pack(::flatbuffers::FlatBufferBuilder &_fbb, const ReverseV2OptionsT* _o, const ::flatbuffers::rehasher_function_t *_rehasher) { return CreateReverseV2Options(_fbb, _o, _rehasher); } -inline flatbuffers::Offset CreateReverseV2Options(flatbuffers::FlatBufferBuilder &_fbb, const ReverseV2OptionsT *_o, const flatbuffers::rehasher_function_t *_rehasher) { +inline ::flatbuffers::Offset CreateReverseV2Options(::flatbuffers::FlatBufferBuilder &_fbb, const ReverseV2OptionsT *_o, const ::flatbuffers::rehasher_function_t *_rehasher) { (void)_rehasher; (void)_o; - struct _VectorArgs { flatbuffers::FlatBufferBuilder *__fbb; const ReverseV2OptionsT* __o; const flatbuffers::rehasher_function_t *__rehasher; } _va = { &_fbb, _o, _rehasher}; (void)_va; + struct _VectorArgs { ::flatbuffers::FlatBufferBuilder *__fbb; const ReverseV2OptionsT* __o; const ::flatbuffers::rehasher_function_t *__rehasher; } _va = { &_fbb, _o, _rehasher}; (void)_va; return tflite::CreateReverseV2Options( _fbb); } -inline AddNOptionsT *AddNOptions::UnPack(const flatbuffers::resolver_function_t *_resolver) const { +inline AddNOptionsT *AddNOptions::UnPack(const ::flatbuffers::resolver_function_t *_resolver) const { auto _o = std::unique_ptr(new AddNOptionsT()); UnPackTo(_o.get(), _resolver); return _o.release(); } -inline void AddNOptions::UnPackTo(AddNOptionsT *_o, const flatbuffers::resolver_function_t *_resolver) const { +inline void AddNOptions::UnPackTo(AddNOptionsT *_o, const ::flatbuffers::resolver_function_t *_resolver) const { (void)_o; (void)_resolver; } -inline flatbuffers::Offset AddNOptions::Pack(flatbuffers::FlatBufferBuilder &_fbb, const AddNOptionsT* _o, const flatbuffers::rehasher_function_t *_rehasher) { +inline ::flatbuffers::Offset AddNOptions::Pack(::flatbuffers::FlatBufferBuilder &_fbb, const AddNOptionsT* _o, const ::flatbuffers::rehasher_function_t *_rehasher) { return CreateAddNOptions(_fbb, _o, _rehasher); } -inline flatbuffers::Offset CreateAddNOptions(flatbuffers::FlatBufferBuilder &_fbb, const AddNOptionsT *_o, const flatbuffers::rehasher_function_t *_rehasher) { +inline ::flatbuffers::Offset CreateAddNOptions(::flatbuffers::FlatBufferBuilder &_fbb, const AddNOptionsT *_o, const ::flatbuffers::rehasher_function_t *_rehasher) { (void)_rehasher; (void)_o; - struct _VectorArgs { flatbuffers::FlatBufferBuilder *__fbb; const AddNOptionsT* __o; const flatbuffers::rehasher_function_t *__rehasher; } _va = { &_fbb, _o, _rehasher}; (void)_va; + struct _VectorArgs { ::flatbuffers::FlatBufferBuilder *__fbb; const AddNOptionsT* __o; const ::flatbuffers::rehasher_function_t *__rehasher; } _va = { &_fbb, _o, _rehasher}; (void)_va; return tflite::CreateAddNOptions( _fbb); } -inline GatherNdOptionsT *GatherNdOptions::UnPack(const flatbuffers::resolver_function_t *_resolver) const { +inline GatherNdOptionsT *GatherNdOptions::UnPack(const ::flatbuffers::resolver_function_t *_resolver) const { auto _o = std::unique_ptr(new GatherNdOptionsT()); UnPackTo(_o.get(), _resolver); return _o.release(); } -inline void GatherNdOptions::UnPackTo(GatherNdOptionsT *_o, const flatbuffers::resolver_function_t *_resolver) const { +inline void GatherNdOptions::UnPackTo(GatherNdOptionsT *_o, const ::flatbuffers::resolver_function_t *_resolver) const { (void)_o; (void)_resolver; } -inline flatbuffers::Offset GatherNdOptions::Pack(flatbuffers::FlatBufferBuilder &_fbb, const GatherNdOptionsT* _o, const flatbuffers::rehasher_function_t *_rehasher) { +inline ::flatbuffers::Offset GatherNdOptions::Pack(::flatbuffers::FlatBufferBuilder &_fbb, const GatherNdOptionsT* _o, const ::flatbuffers::rehasher_function_t *_rehasher) { return CreateGatherNdOptions(_fbb, _o, _rehasher); } -inline flatbuffers::Offset CreateGatherNdOptions(flatbuffers::FlatBufferBuilder &_fbb, const GatherNdOptionsT *_o, const flatbuffers::rehasher_function_t *_rehasher) { +inline ::flatbuffers::Offset CreateGatherNdOptions(::flatbuffers::FlatBufferBuilder &_fbb, const GatherNdOptionsT *_o, const ::flatbuffers::rehasher_function_t *_rehasher) { (void)_rehasher; (void)_o; - struct _VectorArgs { flatbuffers::FlatBufferBuilder *__fbb; const GatherNdOptionsT* __o; const flatbuffers::rehasher_function_t *__rehasher; } _va = { &_fbb, _o, _rehasher}; (void)_va; + struct _VectorArgs { ::flatbuffers::FlatBufferBuilder *__fbb; const GatherNdOptionsT* __o; const ::flatbuffers::rehasher_function_t *__rehasher; } _va = { &_fbb, _o, _rehasher}; (void)_va; return tflite::CreateGatherNdOptions( _fbb); } -inline WhereOptionsT *WhereOptions::UnPack(const flatbuffers::resolver_function_t *_resolver) const { +inline WhereOptionsT *WhereOptions::UnPack(const ::flatbuffers::resolver_function_t *_resolver) const { auto _o = std::unique_ptr(new WhereOptionsT()); UnPackTo(_o.get(), _resolver); return _o.release(); } -inline void WhereOptions::UnPackTo(WhereOptionsT *_o, const flatbuffers::resolver_function_t *_resolver) const { +inline void WhereOptions::UnPackTo(WhereOptionsT *_o, const ::flatbuffers::resolver_function_t *_resolver) const { (void)_o; (void)_resolver; } -inline flatbuffers::Offset WhereOptions::Pack(flatbuffers::FlatBufferBuilder &_fbb, const WhereOptionsT* _o, const flatbuffers::rehasher_function_t *_rehasher) { +inline ::flatbuffers::Offset WhereOptions::Pack(::flatbuffers::FlatBufferBuilder &_fbb, const WhereOptionsT* _o, const ::flatbuffers::rehasher_function_t *_rehasher) { return CreateWhereOptions(_fbb, _o, _rehasher); } -inline flatbuffers::Offset CreateWhereOptions(flatbuffers::FlatBufferBuilder &_fbb, const WhereOptionsT *_o, const flatbuffers::rehasher_function_t *_rehasher) { +inline ::flatbuffers::Offset CreateWhereOptions(::flatbuffers::FlatBufferBuilder &_fbb, const WhereOptionsT *_o, const ::flatbuffers::rehasher_function_t *_rehasher) { (void)_rehasher; (void)_o; - struct _VectorArgs { flatbuffers::FlatBufferBuilder *__fbb; const WhereOptionsT* __o; const flatbuffers::rehasher_function_t *__rehasher; } _va = { &_fbb, _o, _rehasher}; (void)_va; + struct _VectorArgs { ::flatbuffers::FlatBufferBuilder *__fbb; const WhereOptionsT* __o; const ::flatbuffers::rehasher_function_t *__rehasher; } _va = { &_fbb, _o, _rehasher}; (void)_va; return tflite::CreateWhereOptions( _fbb); } -inline ReverseSequenceOptionsT *ReverseSequenceOptions::UnPack(const flatbuffers::resolver_function_t *_resolver) const { +inline ReverseSequenceOptionsT *ReverseSequenceOptions::UnPack(const ::flatbuffers::resolver_function_t *_resolver) const { auto _o = std::unique_ptr(new ReverseSequenceOptionsT()); UnPackTo(_o.get(), _resolver); return _o.release(); } -inline void ReverseSequenceOptions::UnPackTo(ReverseSequenceOptionsT *_o, const flatbuffers::resolver_function_t *_resolver) const { +inline void ReverseSequenceOptions::UnPackTo(ReverseSequenceOptionsT *_o, const ::flatbuffers::resolver_function_t *_resolver) const { (void)_o; (void)_resolver; { auto _e = seq_dim(); _o->seq_dim = _e; } { auto _e = batch_dim(); _o->batch_dim = _e; } } -inline flatbuffers::Offset ReverseSequenceOptions::Pack(flatbuffers::FlatBufferBuilder &_fbb, const ReverseSequenceOptionsT* _o, const flatbuffers::rehasher_function_t *_rehasher) { +inline ::flatbuffers::Offset ReverseSequenceOptions::Pack(::flatbuffers::FlatBufferBuilder &_fbb, const ReverseSequenceOptionsT* _o, const ::flatbuffers::rehasher_function_t *_rehasher) { return CreateReverseSequenceOptions(_fbb, _o, _rehasher); } -inline flatbuffers::Offset CreateReverseSequenceOptions(flatbuffers::FlatBufferBuilder &_fbb, const ReverseSequenceOptionsT *_o, const flatbuffers::rehasher_function_t *_rehasher) { +inline ::flatbuffers::Offset CreateReverseSequenceOptions(::flatbuffers::FlatBufferBuilder &_fbb, const ReverseSequenceOptionsT *_o, const ::flatbuffers::rehasher_function_t *_rehasher) { (void)_rehasher; (void)_o; - struct _VectorArgs { flatbuffers::FlatBufferBuilder *__fbb; const ReverseSequenceOptionsT* __o; const flatbuffers::rehasher_function_t *__rehasher; } _va = { &_fbb, _o, _rehasher}; (void)_va; + struct _VectorArgs { ::flatbuffers::FlatBufferBuilder *__fbb; const ReverseSequenceOptionsT* __o; const ::flatbuffers::rehasher_function_t *__rehasher; } _va = { &_fbb, _o, _rehasher}; (void)_va; auto _seq_dim = _o->seq_dim; auto _batch_dim = _o->batch_dim; return tflite::CreateReverseSequenceOptions( @@ -16397,96 +20075,96 @@ inline flatbuffers::Offset CreateReverseSequenceOptions( _batch_dim); } -inline MatrixDiagOptionsT *MatrixDiagOptions::UnPack(const flatbuffers::resolver_function_t *_resolver) const { +inline MatrixDiagOptionsT *MatrixDiagOptions::UnPack(const ::flatbuffers::resolver_function_t *_resolver) const { auto _o = std::unique_ptr(new MatrixDiagOptionsT()); UnPackTo(_o.get(), _resolver); return _o.release(); } -inline void MatrixDiagOptions::UnPackTo(MatrixDiagOptionsT *_o, const flatbuffers::resolver_function_t *_resolver) const { +inline void MatrixDiagOptions::UnPackTo(MatrixDiagOptionsT *_o, const ::flatbuffers::resolver_function_t *_resolver) const { (void)_o; (void)_resolver; } -inline flatbuffers::Offset MatrixDiagOptions::Pack(flatbuffers::FlatBufferBuilder &_fbb, const MatrixDiagOptionsT* _o, const flatbuffers::rehasher_function_t *_rehasher) { +inline ::flatbuffers::Offset MatrixDiagOptions::Pack(::flatbuffers::FlatBufferBuilder &_fbb, const MatrixDiagOptionsT* _o, const ::flatbuffers::rehasher_function_t *_rehasher) { return CreateMatrixDiagOptions(_fbb, _o, _rehasher); } -inline flatbuffers::Offset CreateMatrixDiagOptions(flatbuffers::FlatBufferBuilder &_fbb, const MatrixDiagOptionsT *_o, const flatbuffers::rehasher_function_t *_rehasher) { +inline ::flatbuffers::Offset CreateMatrixDiagOptions(::flatbuffers::FlatBufferBuilder &_fbb, const MatrixDiagOptionsT *_o, const ::flatbuffers::rehasher_function_t *_rehasher) { (void)_rehasher; (void)_o; - struct _VectorArgs { flatbuffers::FlatBufferBuilder *__fbb; const MatrixDiagOptionsT* __o; const flatbuffers::rehasher_function_t *__rehasher; } _va = { &_fbb, _o, _rehasher}; (void)_va; + struct _VectorArgs { ::flatbuffers::FlatBufferBuilder *__fbb; const MatrixDiagOptionsT* __o; const ::flatbuffers::rehasher_function_t *__rehasher; } _va = { &_fbb, _o, _rehasher}; (void)_va; return tflite::CreateMatrixDiagOptions( _fbb); } -inline QuantizeOptionsT *QuantizeOptions::UnPack(const flatbuffers::resolver_function_t *_resolver) const { +inline QuantizeOptionsT *QuantizeOptions::UnPack(const ::flatbuffers::resolver_function_t *_resolver) const { auto _o = std::unique_ptr(new QuantizeOptionsT()); UnPackTo(_o.get(), _resolver); return _o.release(); } -inline void QuantizeOptions::UnPackTo(QuantizeOptionsT *_o, const flatbuffers::resolver_function_t *_resolver) const { +inline void QuantizeOptions::UnPackTo(QuantizeOptionsT *_o, const ::flatbuffers::resolver_function_t *_resolver) const { (void)_o; (void)_resolver; } -inline flatbuffers::Offset QuantizeOptions::Pack(flatbuffers::FlatBufferBuilder &_fbb, const QuantizeOptionsT* _o, const flatbuffers::rehasher_function_t *_rehasher) { +inline ::flatbuffers::Offset QuantizeOptions::Pack(::flatbuffers::FlatBufferBuilder &_fbb, const QuantizeOptionsT* _o, const ::flatbuffers::rehasher_function_t *_rehasher) { return CreateQuantizeOptions(_fbb, _o, _rehasher); } -inline flatbuffers::Offset CreateQuantizeOptions(flatbuffers::FlatBufferBuilder &_fbb, const QuantizeOptionsT *_o, const flatbuffers::rehasher_function_t *_rehasher) { +inline ::flatbuffers::Offset CreateQuantizeOptions(::flatbuffers::FlatBufferBuilder &_fbb, const QuantizeOptionsT *_o, const ::flatbuffers::rehasher_function_t *_rehasher) { (void)_rehasher; (void)_o; - struct _VectorArgs { flatbuffers::FlatBufferBuilder *__fbb; const QuantizeOptionsT* __o; const flatbuffers::rehasher_function_t *__rehasher; } _va = { &_fbb, _o, _rehasher}; (void)_va; + struct _VectorArgs { ::flatbuffers::FlatBufferBuilder *__fbb; const QuantizeOptionsT* __o; const ::flatbuffers::rehasher_function_t *__rehasher; } _va = { &_fbb, _o, _rehasher}; (void)_va; return tflite::CreateQuantizeOptions( _fbb); } -inline MatrixSetDiagOptionsT *MatrixSetDiagOptions::UnPack(const flatbuffers::resolver_function_t *_resolver) const { +inline MatrixSetDiagOptionsT *MatrixSetDiagOptions::UnPack(const ::flatbuffers::resolver_function_t *_resolver) const { auto _o = std::unique_ptr(new MatrixSetDiagOptionsT()); UnPackTo(_o.get(), _resolver); return _o.release(); } -inline void MatrixSetDiagOptions::UnPackTo(MatrixSetDiagOptionsT *_o, const flatbuffers::resolver_function_t *_resolver) const { +inline void MatrixSetDiagOptions::UnPackTo(MatrixSetDiagOptionsT *_o, const ::flatbuffers::resolver_function_t *_resolver) const { (void)_o; (void)_resolver; } -inline flatbuffers::Offset MatrixSetDiagOptions::Pack(flatbuffers::FlatBufferBuilder &_fbb, const MatrixSetDiagOptionsT* _o, const flatbuffers::rehasher_function_t *_rehasher) { +inline ::flatbuffers::Offset MatrixSetDiagOptions::Pack(::flatbuffers::FlatBufferBuilder &_fbb, const MatrixSetDiagOptionsT* _o, const ::flatbuffers::rehasher_function_t *_rehasher) { return CreateMatrixSetDiagOptions(_fbb, _o, _rehasher); } -inline flatbuffers::Offset CreateMatrixSetDiagOptions(flatbuffers::FlatBufferBuilder &_fbb, const MatrixSetDiagOptionsT *_o, const flatbuffers::rehasher_function_t *_rehasher) { +inline ::flatbuffers::Offset CreateMatrixSetDiagOptions(::flatbuffers::FlatBufferBuilder &_fbb, const MatrixSetDiagOptionsT *_o, const ::flatbuffers::rehasher_function_t *_rehasher) { (void)_rehasher; (void)_o; - struct _VectorArgs { flatbuffers::FlatBufferBuilder *__fbb; const MatrixSetDiagOptionsT* __o; const flatbuffers::rehasher_function_t *__rehasher; } _va = { &_fbb, _o, _rehasher}; (void)_va; + struct _VectorArgs { ::flatbuffers::FlatBufferBuilder *__fbb; const MatrixSetDiagOptionsT* __o; const ::flatbuffers::rehasher_function_t *__rehasher; } _va = { &_fbb, _o, _rehasher}; (void)_va; return tflite::CreateMatrixSetDiagOptions( _fbb); } -inline IfOptionsT *IfOptions::UnPack(const flatbuffers::resolver_function_t *_resolver) const { +inline IfOptionsT *IfOptions::UnPack(const ::flatbuffers::resolver_function_t *_resolver) const { auto _o = std::unique_ptr(new IfOptionsT()); UnPackTo(_o.get(), _resolver); return _o.release(); } -inline void IfOptions::UnPackTo(IfOptionsT *_o, const flatbuffers::resolver_function_t *_resolver) const { +inline void IfOptions::UnPackTo(IfOptionsT *_o, const ::flatbuffers::resolver_function_t *_resolver) const { (void)_o; (void)_resolver; { auto _e = then_subgraph_index(); _o->then_subgraph_index = _e; } { auto _e = else_subgraph_index(); _o->else_subgraph_index = _e; } } -inline flatbuffers::Offset IfOptions::Pack(flatbuffers::FlatBufferBuilder &_fbb, const IfOptionsT* _o, const flatbuffers::rehasher_function_t *_rehasher) { +inline ::flatbuffers::Offset IfOptions::Pack(::flatbuffers::FlatBufferBuilder &_fbb, const IfOptionsT* _o, const ::flatbuffers::rehasher_function_t *_rehasher) { return CreateIfOptions(_fbb, _o, _rehasher); } -inline flatbuffers::Offset CreateIfOptions(flatbuffers::FlatBufferBuilder &_fbb, const IfOptionsT *_o, const flatbuffers::rehasher_function_t *_rehasher) { +inline ::flatbuffers::Offset CreateIfOptions(::flatbuffers::FlatBufferBuilder &_fbb, const IfOptionsT *_o, const ::flatbuffers::rehasher_function_t *_rehasher) { (void)_rehasher; (void)_o; - struct _VectorArgs { flatbuffers::FlatBufferBuilder *__fbb; const IfOptionsT* __o; const flatbuffers::rehasher_function_t *__rehasher; } _va = { &_fbb, _o, _rehasher}; (void)_va; + struct _VectorArgs { ::flatbuffers::FlatBufferBuilder *__fbb; const IfOptionsT* __o; const ::flatbuffers::rehasher_function_t *__rehasher; } _va = { &_fbb, _o, _rehasher}; (void)_va; auto _then_subgraph_index = _o->then_subgraph_index; auto _else_subgraph_index = _o->else_subgraph_index; return tflite::CreateIfOptions( @@ -16495,53 +20173,53 @@ inline flatbuffers::Offset CreateIfOptions(flatbuffers::FlatBufferBui _else_subgraph_index); } -inline CallOnceOptionsT *CallOnceOptions::UnPack(const flatbuffers::resolver_function_t *_resolver) const { +inline CallOnceOptionsT *CallOnceOptions::UnPack(const ::flatbuffers::resolver_function_t *_resolver) const { auto _o = std::unique_ptr(new CallOnceOptionsT()); UnPackTo(_o.get(), _resolver); return _o.release(); } -inline void CallOnceOptions::UnPackTo(CallOnceOptionsT *_o, const flatbuffers::resolver_function_t *_resolver) const { +inline void CallOnceOptions::UnPackTo(CallOnceOptionsT *_o, const ::flatbuffers::resolver_function_t *_resolver) const { (void)_o; (void)_resolver; { auto _e = init_subgraph_index(); _o->init_subgraph_index = _e; } } -inline flatbuffers::Offset CallOnceOptions::Pack(flatbuffers::FlatBufferBuilder &_fbb, const CallOnceOptionsT* _o, const flatbuffers::rehasher_function_t *_rehasher) { +inline ::flatbuffers::Offset CallOnceOptions::Pack(::flatbuffers::FlatBufferBuilder &_fbb, const CallOnceOptionsT* _o, const ::flatbuffers::rehasher_function_t *_rehasher) { return CreateCallOnceOptions(_fbb, _o, _rehasher); } -inline flatbuffers::Offset CreateCallOnceOptions(flatbuffers::FlatBufferBuilder &_fbb, const CallOnceOptionsT *_o, const flatbuffers::rehasher_function_t *_rehasher) { +inline ::flatbuffers::Offset CreateCallOnceOptions(::flatbuffers::FlatBufferBuilder &_fbb, const CallOnceOptionsT *_o, const ::flatbuffers::rehasher_function_t *_rehasher) { (void)_rehasher; (void)_o; - struct _VectorArgs { flatbuffers::FlatBufferBuilder *__fbb; const CallOnceOptionsT* __o; const flatbuffers::rehasher_function_t *__rehasher; } _va = { &_fbb, _o, _rehasher}; (void)_va; + struct _VectorArgs { ::flatbuffers::FlatBufferBuilder *__fbb; const CallOnceOptionsT* __o; const ::flatbuffers::rehasher_function_t *__rehasher; } _va = { &_fbb, _o, _rehasher}; (void)_va; auto _init_subgraph_index = _o->init_subgraph_index; return tflite::CreateCallOnceOptions( _fbb, _init_subgraph_index); } -inline WhileOptionsT *WhileOptions::UnPack(const flatbuffers::resolver_function_t *_resolver) const { +inline WhileOptionsT *WhileOptions::UnPack(const ::flatbuffers::resolver_function_t *_resolver) const { auto _o = std::unique_ptr(new WhileOptionsT()); UnPackTo(_o.get(), _resolver); return _o.release(); } -inline void WhileOptions::UnPackTo(WhileOptionsT *_o, const flatbuffers::resolver_function_t *_resolver) const { +inline void WhileOptions::UnPackTo(WhileOptionsT *_o, const ::flatbuffers::resolver_function_t *_resolver) const { (void)_o; (void)_resolver; { auto _e = cond_subgraph_index(); _o->cond_subgraph_index = _e; } { auto _e = body_subgraph_index(); _o->body_subgraph_index = _e; } } -inline flatbuffers::Offset WhileOptions::Pack(flatbuffers::FlatBufferBuilder &_fbb, const WhileOptionsT* _o, const flatbuffers::rehasher_function_t *_rehasher) { +inline ::flatbuffers::Offset WhileOptions::Pack(::flatbuffers::FlatBufferBuilder &_fbb, const WhileOptionsT* _o, const ::flatbuffers::rehasher_function_t *_rehasher) { return CreateWhileOptions(_fbb, _o, _rehasher); } -inline flatbuffers::Offset CreateWhileOptions(flatbuffers::FlatBufferBuilder &_fbb, const WhileOptionsT *_o, const flatbuffers::rehasher_function_t *_rehasher) { +inline ::flatbuffers::Offset CreateWhileOptions(::flatbuffers::FlatBufferBuilder &_fbb, const WhileOptionsT *_o, const ::flatbuffers::rehasher_function_t *_rehasher) { (void)_rehasher; (void)_o; - struct _VectorArgs { flatbuffers::FlatBufferBuilder *__fbb; const WhileOptionsT* __o; const flatbuffers::rehasher_function_t *__rehasher; } _va = { &_fbb, _o, _rehasher}; (void)_va; + struct _VectorArgs { ::flatbuffers::FlatBufferBuilder *__fbb; const WhileOptionsT* __o; const ::flatbuffers::rehasher_function_t *__rehasher; } _va = { &_fbb, _o, _rehasher}; (void)_va; auto _cond_subgraph_index = _o->cond_subgraph_index; auto _body_subgraph_index = _o->body_subgraph_index; return tflite::CreateWhileOptions( @@ -16550,151 +20228,151 @@ inline flatbuffers::Offset CreateWhileOptions(flatbuffers::FlatBuf _body_subgraph_index); } -inline NonMaxSuppressionV4OptionsT *NonMaxSuppressionV4Options::UnPack(const flatbuffers::resolver_function_t *_resolver) const { +inline NonMaxSuppressionV4OptionsT *NonMaxSuppressionV4Options::UnPack(const ::flatbuffers::resolver_function_t *_resolver) const { auto _o = std::unique_ptr(new NonMaxSuppressionV4OptionsT()); UnPackTo(_o.get(), _resolver); return _o.release(); } -inline void NonMaxSuppressionV4Options::UnPackTo(NonMaxSuppressionV4OptionsT *_o, const flatbuffers::resolver_function_t *_resolver) const { +inline void NonMaxSuppressionV4Options::UnPackTo(NonMaxSuppressionV4OptionsT *_o, const ::flatbuffers::resolver_function_t *_resolver) const { (void)_o; (void)_resolver; } -inline flatbuffers::Offset NonMaxSuppressionV4Options::Pack(flatbuffers::FlatBufferBuilder &_fbb, const NonMaxSuppressionV4OptionsT* _o, const flatbuffers::rehasher_function_t *_rehasher) { +inline ::flatbuffers::Offset NonMaxSuppressionV4Options::Pack(::flatbuffers::FlatBufferBuilder &_fbb, const NonMaxSuppressionV4OptionsT* _o, const ::flatbuffers::rehasher_function_t *_rehasher) { return CreateNonMaxSuppressionV4Options(_fbb, _o, _rehasher); } -inline flatbuffers::Offset CreateNonMaxSuppressionV4Options(flatbuffers::FlatBufferBuilder &_fbb, const NonMaxSuppressionV4OptionsT *_o, const flatbuffers::rehasher_function_t *_rehasher) { +inline ::flatbuffers::Offset CreateNonMaxSuppressionV4Options(::flatbuffers::FlatBufferBuilder &_fbb, const NonMaxSuppressionV4OptionsT *_o, const ::flatbuffers::rehasher_function_t *_rehasher) { (void)_rehasher; (void)_o; - struct _VectorArgs { flatbuffers::FlatBufferBuilder *__fbb; const NonMaxSuppressionV4OptionsT* __o; const flatbuffers::rehasher_function_t *__rehasher; } _va = { &_fbb, _o, _rehasher}; (void)_va; + struct _VectorArgs { ::flatbuffers::FlatBufferBuilder *__fbb; const NonMaxSuppressionV4OptionsT* __o; const ::flatbuffers::rehasher_function_t *__rehasher; } _va = { &_fbb, _o, _rehasher}; (void)_va; return tflite::CreateNonMaxSuppressionV4Options( _fbb); } -inline NonMaxSuppressionV5OptionsT *NonMaxSuppressionV5Options::UnPack(const flatbuffers::resolver_function_t *_resolver) const { +inline NonMaxSuppressionV5OptionsT *NonMaxSuppressionV5Options::UnPack(const ::flatbuffers::resolver_function_t *_resolver) const { auto _o = std::unique_ptr(new NonMaxSuppressionV5OptionsT()); UnPackTo(_o.get(), _resolver); return _o.release(); } -inline void NonMaxSuppressionV5Options::UnPackTo(NonMaxSuppressionV5OptionsT *_o, const flatbuffers::resolver_function_t *_resolver) const { +inline void NonMaxSuppressionV5Options::UnPackTo(NonMaxSuppressionV5OptionsT *_o, const ::flatbuffers::resolver_function_t *_resolver) const { (void)_o; (void)_resolver; } -inline flatbuffers::Offset NonMaxSuppressionV5Options::Pack(flatbuffers::FlatBufferBuilder &_fbb, const NonMaxSuppressionV5OptionsT* _o, const flatbuffers::rehasher_function_t *_rehasher) { +inline ::flatbuffers::Offset NonMaxSuppressionV5Options::Pack(::flatbuffers::FlatBufferBuilder &_fbb, const NonMaxSuppressionV5OptionsT* _o, const ::flatbuffers::rehasher_function_t *_rehasher) { return CreateNonMaxSuppressionV5Options(_fbb, _o, _rehasher); } -inline flatbuffers::Offset CreateNonMaxSuppressionV5Options(flatbuffers::FlatBufferBuilder &_fbb, const NonMaxSuppressionV5OptionsT *_o, const flatbuffers::rehasher_function_t *_rehasher) { +inline ::flatbuffers::Offset CreateNonMaxSuppressionV5Options(::flatbuffers::FlatBufferBuilder &_fbb, const NonMaxSuppressionV5OptionsT *_o, const ::flatbuffers::rehasher_function_t *_rehasher) { (void)_rehasher; (void)_o; - struct _VectorArgs { flatbuffers::FlatBufferBuilder *__fbb; const NonMaxSuppressionV5OptionsT* __o; const flatbuffers::rehasher_function_t *__rehasher; } _va = { &_fbb, _o, _rehasher}; (void)_va; + struct _VectorArgs { ::flatbuffers::FlatBufferBuilder *__fbb; const NonMaxSuppressionV5OptionsT* __o; const ::flatbuffers::rehasher_function_t *__rehasher; } _va = { &_fbb, _o, _rehasher}; (void)_va; return tflite::CreateNonMaxSuppressionV5Options( _fbb); } -inline ScatterNdOptionsT *ScatterNdOptions::UnPack(const flatbuffers::resolver_function_t *_resolver) const { +inline ScatterNdOptionsT *ScatterNdOptions::UnPack(const ::flatbuffers::resolver_function_t *_resolver) const { auto _o = std::unique_ptr(new ScatterNdOptionsT()); UnPackTo(_o.get(), _resolver); return _o.release(); } -inline void ScatterNdOptions::UnPackTo(ScatterNdOptionsT *_o, const flatbuffers::resolver_function_t *_resolver) const { +inline void ScatterNdOptions::UnPackTo(ScatterNdOptionsT *_o, const ::flatbuffers::resolver_function_t *_resolver) const { (void)_o; (void)_resolver; } -inline flatbuffers::Offset ScatterNdOptions::Pack(flatbuffers::FlatBufferBuilder &_fbb, const ScatterNdOptionsT* _o, const flatbuffers::rehasher_function_t *_rehasher) { +inline ::flatbuffers::Offset ScatterNdOptions::Pack(::flatbuffers::FlatBufferBuilder &_fbb, const ScatterNdOptionsT* _o, const ::flatbuffers::rehasher_function_t *_rehasher) { return CreateScatterNdOptions(_fbb, _o, _rehasher); } -inline flatbuffers::Offset CreateScatterNdOptions(flatbuffers::FlatBufferBuilder &_fbb, const ScatterNdOptionsT *_o, const flatbuffers::rehasher_function_t *_rehasher) { +inline ::flatbuffers::Offset CreateScatterNdOptions(::flatbuffers::FlatBufferBuilder &_fbb, const ScatterNdOptionsT *_o, const ::flatbuffers::rehasher_function_t *_rehasher) { (void)_rehasher; (void)_o; - struct _VectorArgs { flatbuffers::FlatBufferBuilder *__fbb; const ScatterNdOptionsT* __o; const flatbuffers::rehasher_function_t *__rehasher; } _va = { &_fbb, _o, _rehasher}; (void)_va; + struct _VectorArgs { ::flatbuffers::FlatBufferBuilder *__fbb; const ScatterNdOptionsT* __o; const ::flatbuffers::rehasher_function_t *__rehasher; } _va = { &_fbb, _o, _rehasher}; (void)_va; return tflite::CreateScatterNdOptions( _fbb); } -inline SelectV2OptionsT *SelectV2Options::UnPack(const flatbuffers::resolver_function_t *_resolver) const { +inline SelectV2OptionsT *SelectV2Options::UnPack(const ::flatbuffers::resolver_function_t *_resolver) const { auto _o = std::unique_ptr(new SelectV2OptionsT()); UnPackTo(_o.get(), _resolver); return _o.release(); } -inline void SelectV2Options::UnPackTo(SelectV2OptionsT *_o, const flatbuffers::resolver_function_t *_resolver) const { +inline void SelectV2Options::UnPackTo(SelectV2OptionsT *_o, const ::flatbuffers::resolver_function_t *_resolver) const { (void)_o; (void)_resolver; } -inline flatbuffers::Offset SelectV2Options::Pack(flatbuffers::FlatBufferBuilder &_fbb, const SelectV2OptionsT* _o, const flatbuffers::rehasher_function_t *_rehasher) { +inline ::flatbuffers::Offset SelectV2Options::Pack(::flatbuffers::FlatBufferBuilder &_fbb, const SelectV2OptionsT* _o, const ::flatbuffers::rehasher_function_t *_rehasher) { return CreateSelectV2Options(_fbb, _o, _rehasher); } -inline flatbuffers::Offset CreateSelectV2Options(flatbuffers::FlatBufferBuilder &_fbb, const SelectV2OptionsT *_o, const flatbuffers::rehasher_function_t *_rehasher) { +inline ::flatbuffers::Offset CreateSelectV2Options(::flatbuffers::FlatBufferBuilder &_fbb, const SelectV2OptionsT *_o, const ::flatbuffers::rehasher_function_t *_rehasher) { (void)_rehasher; (void)_o; - struct _VectorArgs { flatbuffers::FlatBufferBuilder *__fbb; const SelectV2OptionsT* __o; const flatbuffers::rehasher_function_t *__rehasher; } _va = { &_fbb, _o, _rehasher}; (void)_va; + struct _VectorArgs { ::flatbuffers::FlatBufferBuilder *__fbb; const SelectV2OptionsT* __o; const ::flatbuffers::rehasher_function_t *__rehasher; } _va = { &_fbb, _o, _rehasher}; (void)_va; return tflite::CreateSelectV2Options( _fbb); } -inline DensifyOptionsT *DensifyOptions::UnPack(const flatbuffers::resolver_function_t *_resolver) const { +inline DensifyOptionsT *DensifyOptions::UnPack(const ::flatbuffers::resolver_function_t *_resolver) const { auto _o = std::unique_ptr(new DensifyOptionsT()); UnPackTo(_o.get(), _resolver); return _o.release(); } -inline void DensifyOptions::UnPackTo(DensifyOptionsT *_o, const flatbuffers::resolver_function_t *_resolver) const { +inline void DensifyOptions::UnPackTo(DensifyOptionsT *_o, const ::flatbuffers::resolver_function_t *_resolver) const { (void)_o; (void)_resolver; } -inline flatbuffers::Offset DensifyOptions::Pack(flatbuffers::FlatBufferBuilder &_fbb, const DensifyOptionsT* _o, const flatbuffers::rehasher_function_t *_rehasher) { +inline ::flatbuffers::Offset DensifyOptions::Pack(::flatbuffers::FlatBufferBuilder &_fbb, const DensifyOptionsT* _o, const ::flatbuffers::rehasher_function_t *_rehasher) { return CreateDensifyOptions(_fbb, _o, _rehasher); } -inline flatbuffers::Offset CreateDensifyOptions(flatbuffers::FlatBufferBuilder &_fbb, const DensifyOptionsT *_o, const flatbuffers::rehasher_function_t *_rehasher) { +inline ::flatbuffers::Offset CreateDensifyOptions(::flatbuffers::FlatBufferBuilder &_fbb, const DensifyOptionsT *_o, const ::flatbuffers::rehasher_function_t *_rehasher) { (void)_rehasher; (void)_o; - struct _VectorArgs { flatbuffers::FlatBufferBuilder *__fbb; const DensifyOptionsT* __o; const flatbuffers::rehasher_function_t *__rehasher; } _va = { &_fbb, _o, _rehasher}; (void)_va; + struct _VectorArgs { ::flatbuffers::FlatBufferBuilder *__fbb; const DensifyOptionsT* __o; const ::flatbuffers::rehasher_function_t *__rehasher; } _va = { &_fbb, _o, _rehasher}; (void)_va; return tflite::CreateDensifyOptions( _fbb); } -inline SegmentSumOptionsT *SegmentSumOptions::UnPack(const flatbuffers::resolver_function_t *_resolver) const { +inline SegmentSumOptionsT *SegmentSumOptions::UnPack(const ::flatbuffers::resolver_function_t *_resolver) const { auto _o = std::unique_ptr(new SegmentSumOptionsT()); UnPackTo(_o.get(), _resolver); return _o.release(); } -inline void SegmentSumOptions::UnPackTo(SegmentSumOptionsT *_o, const flatbuffers::resolver_function_t *_resolver) const { +inline void SegmentSumOptions::UnPackTo(SegmentSumOptionsT *_o, const ::flatbuffers::resolver_function_t *_resolver) const { (void)_o; (void)_resolver; } -inline flatbuffers::Offset SegmentSumOptions::Pack(flatbuffers::FlatBufferBuilder &_fbb, const SegmentSumOptionsT* _o, const flatbuffers::rehasher_function_t *_rehasher) { +inline ::flatbuffers::Offset SegmentSumOptions::Pack(::flatbuffers::FlatBufferBuilder &_fbb, const SegmentSumOptionsT* _o, const ::flatbuffers::rehasher_function_t *_rehasher) { return CreateSegmentSumOptions(_fbb, _o, _rehasher); } -inline flatbuffers::Offset CreateSegmentSumOptions(flatbuffers::FlatBufferBuilder &_fbb, const SegmentSumOptionsT *_o, const flatbuffers::rehasher_function_t *_rehasher) { +inline ::flatbuffers::Offset CreateSegmentSumOptions(::flatbuffers::FlatBufferBuilder &_fbb, const SegmentSumOptionsT *_o, const ::flatbuffers::rehasher_function_t *_rehasher) { (void)_rehasher; (void)_o; - struct _VectorArgs { flatbuffers::FlatBufferBuilder *__fbb; const SegmentSumOptionsT* __o; const flatbuffers::rehasher_function_t *__rehasher; } _va = { &_fbb, _o, _rehasher}; (void)_va; + struct _VectorArgs { ::flatbuffers::FlatBufferBuilder *__fbb; const SegmentSumOptionsT* __o; const ::flatbuffers::rehasher_function_t *__rehasher; } _va = { &_fbb, _o, _rehasher}; (void)_va; return tflite::CreateSegmentSumOptions( _fbb); } -inline BatchMatMulOptionsT *BatchMatMulOptions::UnPack(const flatbuffers::resolver_function_t *_resolver) const { +inline BatchMatMulOptionsT *BatchMatMulOptions::UnPack(const ::flatbuffers::resolver_function_t *_resolver) const { auto _o = std::unique_ptr(new BatchMatMulOptionsT()); UnPackTo(_o.get(), _resolver); return _o.release(); } -inline void BatchMatMulOptions::UnPackTo(BatchMatMulOptionsT *_o, const flatbuffers::resolver_function_t *_resolver) const { +inline void BatchMatMulOptions::UnPackTo(BatchMatMulOptionsT *_o, const ::flatbuffers::resolver_function_t *_resolver) const { (void)_o; (void)_resolver; { auto _e = adj_x(); _o->adj_x = _e; } @@ -16702,14 +20380,14 @@ inline void BatchMatMulOptions::UnPackTo(BatchMatMulOptionsT *_o, const flatbuff { auto _e = asymmetric_quantize_inputs(); _o->asymmetric_quantize_inputs = _e; } } -inline flatbuffers::Offset BatchMatMulOptions::Pack(flatbuffers::FlatBufferBuilder &_fbb, const BatchMatMulOptionsT* _o, const flatbuffers::rehasher_function_t *_rehasher) { +inline ::flatbuffers::Offset BatchMatMulOptions::Pack(::flatbuffers::FlatBufferBuilder &_fbb, const BatchMatMulOptionsT* _o, const ::flatbuffers::rehasher_function_t *_rehasher) { return CreateBatchMatMulOptions(_fbb, _o, _rehasher); } -inline flatbuffers::Offset CreateBatchMatMulOptions(flatbuffers::FlatBufferBuilder &_fbb, const BatchMatMulOptionsT *_o, const flatbuffers::rehasher_function_t *_rehasher) { +inline ::flatbuffers::Offset CreateBatchMatMulOptions(::flatbuffers::FlatBufferBuilder &_fbb, const BatchMatMulOptionsT *_o, const ::flatbuffers::rehasher_function_t *_rehasher) { (void)_rehasher; (void)_o; - struct _VectorArgs { flatbuffers::FlatBufferBuilder *__fbb; const BatchMatMulOptionsT* __o; const flatbuffers::rehasher_function_t *__rehasher; } _va = { &_fbb, _o, _rehasher}; (void)_va; + struct _VectorArgs { ::flatbuffers::FlatBufferBuilder *__fbb; const BatchMatMulOptionsT* __o; const ::flatbuffers::rehasher_function_t *__rehasher; } _va = { &_fbb, _o, _rehasher}; (void)_va; auto _adj_x = _o->adj_x; auto _adj_y = _o->adj_y; auto _asymmetric_quantize_inputs = _o->asymmetric_quantize_inputs; @@ -16720,27 +20398,27 @@ inline flatbuffers::Offset CreateBatchMatMulOptions(flatbuff _asymmetric_quantize_inputs); } -inline CumsumOptionsT *CumsumOptions::UnPack(const flatbuffers::resolver_function_t *_resolver) const { +inline CumsumOptionsT *CumsumOptions::UnPack(const ::flatbuffers::resolver_function_t *_resolver) const { auto _o = std::unique_ptr(new CumsumOptionsT()); UnPackTo(_o.get(), _resolver); return _o.release(); } -inline void CumsumOptions::UnPackTo(CumsumOptionsT *_o, const flatbuffers::resolver_function_t *_resolver) const { +inline void CumsumOptions::UnPackTo(CumsumOptionsT *_o, const ::flatbuffers::resolver_function_t *_resolver) const { (void)_o; (void)_resolver; { auto _e = exclusive(); _o->exclusive = _e; } { auto _e = reverse(); _o->reverse = _e; } } -inline flatbuffers::Offset CumsumOptions::Pack(flatbuffers::FlatBufferBuilder &_fbb, const CumsumOptionsT* _o, const flatbuffers::rehasher_function_t *_rehasher) { +inline ::flatbuffers::Offset CumsumOptions::Pack(::flatbuffers::FlatBufferBuilder &_fbb, const CumsumOptionsT* _o, const ::flatbuffers::rehasher_function_t *_rehasher) { return CreateCumsumOptions(_fbb, _o, _rehasher); } -inline flatbuffers::Offset CreateCumsumOptions(flatbuffers::FlatBufferBuilder &_fbb, const CumsumOptionsT *_o, const flatbuffers::rehasher_function_t *_rehasher) { +inline ::flatbuffers::Offset CreateCumsumOptions(::flatbuffers::FlatBufferBuilder &_fbb, const CumsumOptionsT *_o, const ::flatbuffers::rehasher_function_t *_rehasher) { (void)_rehasher; (void)_o; - struct _VectorArgs { flatbuffers::FlatBufferBuilder *__fbb; const CumsumOptionsT* __o; const flatbuffers::rehasher_function_t *__rehasher; } _va = { &_fbb, _o, _rehasher}; (void)_va; + struct _VectorArgs { ::flatbuffers::FlatBufferBuilder *__fbb; const CumsumOptionsT* __o; const ::flatbuffers::rehasher_function_t *__rehasher; } _va = { &_fbb, _o, _rehasher}; (void)_va; auto _exclusive = _o->exclusive; auto _reverse = _o->reverse; return tflite::CreateCumsumOptions( @@ -16749,59 +20427,59 @@ inline flatbuffers::Offset CreateCumsumOptions(flatbuffers::FlatB _reverse); } -inline BroadcastToOptionsT *BroadcastToOptions::UnPack(const flatbuffers::resolver_function_t *_resolver) const { +inline BroadcastToOptionsT *BroadcastToOptions::UnPack(const ::flatbuffers::resolver_function_t *_resolver) const { auto _o = std::unique_ptr(new BroadcastToOptionsT()); UnPackTo(_o.get(), _resolver); return _o.release(); } -inline void BroadcastToOptions::UnPackTo(BroadcastToOptionsT *_o, const flatbuffers::resolver_function_t *_resolver) const { +inline void BroadcastToOptions::UnPackTo(BroadcastToOptionsT *_o, const ::flatbuffers::resolver_function_t *_resolver) const { (void)_o; (void)_resolver; } -inline flatbuffers::Offset BroadcastToOptions::Pack(flatbuffers::FlatBufferBuilder &_fbb, const BroadcastToOptionsT* _o, const flatbuffers::rehasher_function_t *_rehasher) { +inline ::flatbuffers::Offset BroadcastToOptions::Pack(::flatbuffers::FlatBufferBuilder &_fbb, const BroadcastToOptionsT* _o, const ::flatbuffers::rehasher_function_t *_rehasher) { return CreateBroadcastToOptions(_fbb, _o, _rehasher); } -inline flatbuffers::Offset CreateBroadcastToOptions(flatbuffers::FlatBufferBuilder &_fbb, const BroadcastToOptionsT *_o, const flatbuffers::rehasher_function_t *_rehasher) { +inline ::flatbuffers::Offset CreateBroadcastToOptions(::flatbuffers::FlatBufferBuilder &_fbb, const BroadcastToOptionsT *_o, const ::flatbuffers::rehasher_function_t *_rehasher) { (void)_rehasher; (void)_o; - struct _VectorArgs { flatbuffers::FlatBufferBuilder *__fbb; const BroadcastToOptionsT* __o; const flatbuffers::rehasher_function_t *__rehasher; } _va = { &_fbb, _o, _rehasher}; (void)_va; + struct _VectorArgs { ::flatbuffers::FlatBufferBuilder *__fbb; const BroadcastToOptionsT* __o; const ::flatbuffers::rehasher_function_t *__rehasher; } _va = { &_fbb, _o, _rehasher}; (void)_va; return tflite::CreateBroadcastToOptions( _fbb); } -inline Rfft2dOptionsT *Rfft2dOptions::UnPack(const flatbuffers::resolver_function_t *_resolver) const { +inline Rfft2dOptionsT *Rfft2dOptions::UnPack(const ::flatbuffers::resolver_function_t *_resolver) const { auto _o = std::unique_ptr(new Rfft2dOptionsT()); UnPackTo(_o.get(), _resolver); return _o.release(); } -inline void Rfft2dOptions::UnPackTo(Rfft2dOptionsT *_o, const flatbuffers::resolver_function_t *_resolver) const { +inline void Rfft2dOptions::UnPackTo(Rfft2dOptionsT *_o, const ::flatbuffers::resolver_function_t *_resolver) const { (void)_o; (void)_resolver; } -inline flatbuffers::Offset Rfft2dOptions::Pack(flatbuffers::FlatBufferBuilder &_fbb, const Rfft2dOptionsT* _o, const flatbuffers::rehasher_function_t *_rehasher) { +inline ::flatbuffers::Offset Rfft2dOptions::Pack(::flatbuffers::FlatBufferBuilder &_fbb, const Rfft2dOptionsT* _o, const ::flatbuffers::rehasher_function_t *_rehasher) { return CreateRfft2dOptions(_fbb, _o, _rehasher); } -inline flatbuffers::Offset CreateRfft2dOptions(flatbuffers::FlatBufferBuilder &_fbb, const Rfft2dOptionsT *_o, const flatbuffers::rehasher_function_t *_rehasher) { +inline ::flatbuffers::Offset CreateRfft2dOptions(::flatbuffers::FlatBufferBuilder &_fbb, const Rfft2dOptionsT *_o, const ::flatbuffers::rehasher_function_t *_rehasher) { (void)_rehasher; (void)_o; - struct _VectorArgs { flatbuffers::FlatBufferBuilder *__fbb; const Rfft2dOptionsT* __o; const flatbuffers::rehasher_function_t *__rehasher; } _va = { &_fbb, _o, _rehasher}; (void)_va; + struct _VectorArgs { ::flatbuffers::FlatBufferBuilder *__fbb; const Rfft2dOptionsT* __o; const ::flatbuffers::rehasher_function_t *__rehasher; } _va = { &_fbb, _o, _rehasher}; (void)_va; return tflite::CreateRfft2dOptions( _fbb); } -inline HashtableOptionsT *HashtableOptions::UnPack(const flatbuffers::resolver_function_t *_resolver) const { +inline HashtableOptionsT *HashtableOptions::UnPack(const ::flatbuffers::resolver_function_t *_resolver) const { auto _o = std::unique_ptr(new HashtableOptionsT()); UnPackTo(_o.get(), _resolver); return _o.release(); } -inline void HashtableOptions::UnPackTo(HashtableOptionsT *_o, const flatbuffers::resolver_function_t *_resolver) const { +inline void HashtableOptions::UnPackTo(HashtableOptionsT *_o, const ::flatbuffers::resolver_function_t *_resolver) const { (void)_o; (void)_resolver; { auto _e = table_id(); _o->table_id = _e; } @@ -16809,14 +20487,14 @@ inline void HashtableOptions::UnPackTo(HashtableOptionsT *_o, const flatbuffers: { auto _e = value_dtype(); _o->value_dtype = _e; } } -inline flatbuffers::Offset HashtableOptions::Pack(flatbuffers::FlatBufferBuilder &_fbb, const HashtableOptionsT* _o, const flatbuffers::rehasher_function_t *_rehasher) { +inline ::flatbuffers::Offset HashtableOptions::Pack(::flatbuffers::FlatBufferBuilder &_fbb, const HashtableOptionsT* _o, const ::flatbuffers::rehasher_function_t *_rehasher) { return CreateHashtableOptions(_fbb, _o, _rehasher); } -inline flatbuffers::Offset CreateHashtableOptions(flatbuffers::FlatBufferBuilder &_fbb, const HashtableOptionsT *_o, const flatbuffers::rehasher_function_t *_rehasher) { +inline ::flatbuffers::Offset CreateHashtableOptions(::flatbuffers::FlatBufferBuilder &_fbb, const HashtableOptionsT *_o, const ::flatbuffers::rehasher_function_t *_rehasher) { (void)_rehasher; (void)_o; - struct _VectorArgs { flatbuffers::FlatBufferBuilder *__fbb; const HashtableOptionsT* __o; const flatbuffers::rehasher_function_t *__rehasher; } _va = { &_fbb, _o, _rehasher}; (void)_va; + struct _VectorArgs { ::flatbuffers::FlatBufferBuilder *__fbb; const HashtableOptionsT* __o; const ::flatbuffers::rehasher_function_t *__rehasher; } _va = { &_fbb, _o, _rehasher}; (void)_va; auto _table_id = _o->table_id; auto _key_dtype = _o->key_dtype; auto _value_dtype = _o->value_dtype; @@ -16827,96 +20505,96 @@ inline flatbuffers::Offset CreateHashtableOptions(flatbuffers: _value_dtype); } -inline HashtableFindOptionsT *HashtableFindOptions::UnPack(const flatbuffers::resolver_function_t *_resolver) const { +inline HashtableFindOptionsT *HashtableFindOptions::UnPack(const ::flatbuffers::resolver_function_t *_resolver) const { auto _o = std::unique_ptr(new HashtableFindOptionsT()); UnPackTo(_o.get(), _resolver); return _o.release(); } -inline void HashtableFindOptions::UnPackTo(HashtableFindOptionsT *_o, const flatbuffers::resolver_function_t *_resolver) const { +inline void HashtableFindOptions::UnPackTo(HashtableFindOptionsT *_o, const ::flatbuffers::resolver_function_t *_resolver) const { (void)_o; (void)_resolver; } -inline flatbuffers::Offset HashtableFindOptions::Pack(flatbuffers::FlatBufferBuilder &_fbb, const HashtableFindOptionsT* _o, const flatbuffers::rehasher_function_t *_rehasher) { +inline ::flatbuffers::Offset HashtableFindOptions::Pack(::flatbuffers::FlatBufferBuilder &_fbb, const HashtableFindOptionsT* _o, const ::flatbuffers::rehasher_function_t *_rehasher) { return CreateHashtableFindOptions(_fbb, _o, _rehasher); } -inline flatbuffers::Offset CreateHashtableFindOptions(flatbuffers::FlatBufferBuilder &_fbb, const HashtableFindOptionsT *_o, const flatbuffers::rehasher_function_t *_rehasher) { +inline ::flatbuffers::Offset CreateHashtableFindOptions(::flatbuffers::FlatBufferBuilder &_fbb, const HashtableFindOptionsT *_o, const ::flatbuffers::rehasher_function_t *_rehasher) { (void)_rehasher; (void)_o; - struct _VectorArgs { flatbuffers::FlatBufferBuilder *__fbb; const HashtableFindOptionsT* __o; const flatbuffers::rehasher_function_t *__rehasher; } _va = { &_fbb, _o, _rehasher}; (void)_va; + struct _VectorArgs { ::flatbuffers::FlatBufferBuilder *__fbb; const HashtableFindOptionsT* __o; const ::flatbuffers::rehasher_function_t *__rehasher; } _va = { &_fbb, _o, _rehasher}; (void)_va; return tflite::CreateHashtableFindOptions( _fbb); } -inline HashtableImportOptionsT *HashtableImportOptions::UnPack(const flatbuffers::resolver_function_t *_resolver) const { +inline HashtableImportOptionsT *HashtableImportOptions::UnPack(const ::flatbuffers::resolver_function_t *_resolver) const { auto _o = std::unique_ptr(new HashtableImportOptionsT()); UnPackTo(_o.get(), _resolver); return _o.release(); } -inline void HashtableImportOptions::UnPackTo(HashtableImportOptionsT *_o, const flatbuffers::resolver_function_t *_resolver) const { +inline void HashtableImportOptions::UnPackTo(HashtableImportOptionsT *_o, const ::flatbuffers::resolver_function_t *_resolver) const { (void)_o; (void)_resolver; } -inline flatbuffers::Offset HashtableImportOptions::Pack(flatbuffers::FlatBufferBuilder &_fbb, const HashtableImportOptionsT* _o, const flatbuffers::rehasher_function_t *_rehasher) { +inline ::flatbuffers::Offset HashtableImportOptions::Pack(::flatbuffers::FlatBufferBuilder &_fbb, const HashtableImportOptionsT* _o, const ::flatbuffers::rehasher_function_t *_rehasher) { return CreateHashtableImportOptions(_fbb, _o, _rehasher); } -inline flatbuffers::Offset CreateHashtableImportOptions(flatbuffers::FlatBufferBuilder &_fbb, const HashtableImportOptionsT *_o, const flatbuffers::rehasher_function_t *_rehasher) { +inline ::flatbuffers::Offset CreateHashtableImportOptions(::flatbuffers::FlatBufferBuilder &_fbb, const HashtableImportOptionsT *_o, const ::flatbuffers::rehasher_function_t *_rehasher) { (void)_rehasher; (void)_o; - struct _VectorArgs { flatbuffers::FlatBufferBuilder *__fbb; const HashtableImportOptionsT* __o; const flatbuffers::rehasher_function_t *__rehasher; } _va = { &_fbb, _o, _rehasher}; (void)_va; + struct _VectorArgs { ::flatbuffers::FlatBufferBuilder *__fbb; const HashtableImportOptionsT* __o; const ::flatbuffers::rehasher_function_t *__rehasher; } _va = { &_fbb, _o, _rehasher}; (void)_va; return tflite::CreateHashtableImportOptions( _fbb); } -inline HashtableSizeOptionsT *HashtableSizeOptions::UnPack(const flatbuffers::resolver_function_t *_resolver) const { +inline HashtableSizeOptionsT *HashtableSizeOptions::UnPack(const ::flatbuffers::resolver_function_t *_resolver) const { auto _o = std::unique_ptr(new HashtableSizeOptionsT()); UnPackTo(_o.get(), _resolver); return _o.release(); } -inline void HashtableSizeOptions::UnPackTo(HashtableSizeOptionsT *_o, const flatbuffers::resolver_function_t *_resolver) const { +inline void HashtableSizeOptions::UnPackTo(HashtableSizeOptionsT *_o, const ::flatbuffers::resolver_function_t *_resolver) const { (void)_o; (void)_resolver; } -inline flatbuffers::Offset HashtableSizeOptions::Pack(flatbuffers::FlatBufferBuilder &_fbb, const HashtableSizeOptionsT* _o, const flatbuffers::rehasher_function_t *_rehasher) { +inline ::flatbuffers::Offset HashtableSizeOptions::Pack(::flatbuffers::FlatBufferBuilder &_fbb, const HashtableSizeOptionsT* _o, const ::flatbuffers::rehasher_function_t *_rehasher) { return CreateHashtableSizeOptions(_fbb, _o, _rehasher); } -inline flatbuffers::Offset CreateHashtableSizeOptions(flatbuffers::FlatBufferBuilder &_fbb, const HashtableSizeOptionsT *_o, const flatbuffers::rehasher_function_t *_rehasher) { +inline ::flatbuffers::Offset CreateHashtableSizeOptions(::flatbuffers::FlatBufferBuilder &_fbb, const HashtableSizeOptionsT *_o, const ::flatbuffers::rehasher_function_t *_rehasher) { (void)_rehasher; (void)_o; - struct _VectorArgs { flatbuffers::FlatBufferBuilder *__fbb; const HashtableSizeOptionsT* __o; const flatbuffers::rehasher_function_t *__rehasher; } _va = { &_fbb, _o, _rehasher}; (void)_va; + struct _VectorArgs { ::flatbuffers::FlatBufferBuilder *__fbb; const HashtableSizeOptionsT* __o; const ::flatbuffers::rehasher_function_t *__rehasher; } _va = { &_fbb, _o, _rehasher}; (void)_va; return tflite::CreateHashtableSizeOptions( _fbb); } -inline VarHandleOptionsT *VarHandleOptions::UnPack(const flatbuffers::resolver_function_t *_resolver) const { +inline VarHandleOptionsT *VarHandleOptions::UnPack(const ::flatbuffers::resolver_function_t *_resolver) const { auto _o = std::unique_ptr(new VarHandleOptionsT()); UnPackTo(_o.get(), _resolver); return _o.release(); } -inline void VarHandleOptions::UnPackTo(VarHandleOptionsT *_o, const flatbuffers::resolver_function_t *_resolver) const { +inline void VarHandleOptions::UnPackTo(VarHandleOptionsT *_o, const ::flatbuffers::resolver_function_t *_resolver) const { (void)_o; (void)_resolver; { auto _e = container(); if (_e) _o->container = _e->str(); } { auto _e = shared_name(); if (_e) _o->shared_name = _e->str(); } } -inline flatbuffers::Offset VarHandleOptions::Pack(flatbuffers::FlatBufferBuilder &_fbb, const VarHandleOptionsT* _o, const flatbuffers::rehasher_function_t *_rehasher) { +inline ::flatbuffers::Offset VarHandleOptions::Pack(::flatbuffers::FlatBufferBuilder &_fbb, const VarHandleOptionsT* _o, const ::flatbuffers::rehasher_function_t *_rehasher) { return CreateVarHandleOptions(_fbb, _o, _rehasher); } -inline flatbuffers::Offset CreateVarHandleOptions(flatbuffers::FlatBufferBuilder &_fbb, const VarHandleOptionsT *_o, const flatbuffers::rehasher_function_t *_rehasher) { +inline ::flatbuffers::Offset CreateVarHandleOptions(::flatbuffers::FlatBufferBuilder &_fbb, const VarHandleOptionsT *_o, const ::flatbuffers::rehasher_function_t *_rehasher) { (void)_rehasher; (void)_o; - struct _VectorArgs { flatbuffers::FlatBufferBuilder *__fbb; const VarHandleOptionsT* __o; const flatbuffers::rehasher_function_t *__rehasher; } _va = { &_fbb, _o, _rehasher}; (void)_va; + struct _VectorArgs { ::flatbuffers::FlatBufferBuilder *__fbb; const VarHandleOptionsT* __o; const ::flatbuffers::rehasher_function_t *__rehasher; } _va = { &_fbb, _o, _rehasher}; (void)_va; auto _container = _o->container.empty() ? 0 : _fbb.CreateString(_o->container); auto _shared_name = _o->shared_name.empty() ? 0 : _fbb.CreateString(_o->shared_name); return tflite::CreateVarHandleOptions( @@ -16925,73 +20603,73 @@ inline flatbuffers::Offset CreateVarHandleOptions(flatbuffers: _shared_name); } -inline ReadVariableOptionsT *ReadVariableOptions::UnPack(const flatbuffers::resolver_function_t *_resolver) const { +inline ReadVariableOptionsT *ReadVariableOptions::UnPack(const ::flatbuffers::resolver_function_t *_resolver) const { auto _o = std::unique_ptr(new ReadVariableOptionsT()); UnPackTo(_o.get(), _resolver); return _o.release(); } -inline void ReadVariableOptions::UnPackTo(ReadVariableOptionsT *_o, const flatbuffers::resolver_function_t *_resolver) const { +inline void ReadVariableOptions::UnPackTo(ReadVariableOptionsT *_o, const ::flatbuffers::resolver_function_t *_resolver) const { (void)_o; (void)_resolver; } -inline flatbuffers::Offset ReadVariableOptions::Pack(flatbuffers::FlatBufferBuilder &_fbb, const ReadVariableOptionsT* _o, const flatbuffers::rehasher_function_t *_rehasher) { +inline ::flatbuffers::Offset ReadVariableOptions::Pack(::flatbuffers::FlatBufferBuilder &_fbb, const ReadVariableOptionsT* _o, const ::flatbuffers::rehasher_function_t *_rehasher) { return CreateReadVariableOptions(_fbb, _o, _rehasher); } -inline flatbuffers::Offset CreateReadVariableOptions(flatbuffers::FlatBufferBuilder &_fbb, const ReadVariableOptionsT *_o, const flatbuffers::rehasher_function_t *_rehasher) { +inline ::flatbuffers::Offset CreateReadVariableOptions(::flatbuffers::FlatBufferBuilder &_fbb, const ReadVariableOptionsT *_o, const ::flatbuffers::rehasher_function_t *_rehasher) { (void)_rehasher; (void)_o; - struct _VectorArgs { flatbuffers::FlatBufferBuilder *__fbb; const ReadVariableOptionsT* __o; const flatbuffers::rehasher_function_t *__rehasher; } _va = { &_fbb, _o, _rehasher}; (void)_va; + struct _VectorArgs { ::flatbuffers::FlatBufferBuilder *__fbb; const ReadVariableOptionsT* __o; const ::flatbuffers::rehasher_function_t *__rehasher; } _va = { &_fbb, _o, _rehasher}; (void)_va; return tflite::CreateReadVariableOptions( _fbb); } -inline AssignVariableOptionsT *AssignVariableOptions::UnPack(const flatbuffers::resolver_function_t *_resolver) const { +inline AssignVariableOptionsT *AssignVariableOptions::UnPack(const ::flatbuffers::resolver_function_t *_resolver) const { auto _o = std::unique_ptr(new AssignVariableOptionsT()); UnPackTo(_o.get(), _resolver); return _o.release(); } -inline void AssignVariableOptions::UnPackTo(AssignVariableOptionsT *_o, const flatbuffers::resolver_function_t *_resolver) const { +inline void AssignVariableOptions::UnPackTo(AssignVariableOptionsT *_o, const ::flatbuffers::resolver_function_t *_resolver) const { (void)_o; (void)_resolver; } -inline flatbuffers::Offset AssignVariableOptions::Pack(flatbuffers::FlatBufferBuilder &_fbb, const AssignVariableOptionsT* _o, const flatbuffers::rehasher_function_t *_rehasher) { +inline ::flatbuffers::Offset AssignVariableOptions::Pack(::flatbuffers::FlatBufferBuilder &_fbb, const AssignVariableOptionsT* _o, const ::flatbuffers::rehasher_function_t *_rehasher) { return CreateAssignVariableOptions(_fbb, _o, _rehasher); } -inline flatbuffers::Offset CreateAssignVariableOptions(flatbuffers::FlatBufferBuilder &_fbb, const AssignVariableOptionsT *_o, const flatbuffers::rehasher_function_t *_rehasher) { +inline ::flatbuffers::Offset CreateAssignVariableOptions(::flatbuffers::FlatBufferBuilder &_fbb, const AssignVariableOptionsT *_o, const ::flatbuffers::rehasher_function_t *_rehasher) { (void)_rehasher; (void)_o; - struct _VectorArgs { flatbuffers::FlatBufferBuilder *__fbb; const AssignVariableOptionsT* __o; const flatbuffers::rehasher_function_t *__rehasher; } _va = { &_fbb, _o, _rehasher}; (void)_va; + struct _VectorArgs { ::flatbuffers::FlatBufferBuilder *__fbb; const AssignVariableOptionsT* __o; const ::flatbuffers::rehasher_function_t *__rehasher; } _va = { &_fbb, _o, _rehasher}; (void)_va; return tflite::CreateAssignVariableOptions( _fbb); } -inline RandomOptionsT *RandomOptions::UnPack(const flatbuffers::resolver_function_t *_resolver) const { +inline RandomOptionsT *RandomOptions::UnPack(const ::flatbuffers::resolver_function_t *_resolver) const { auto _o = std::unique_ptr(new RandomOptionsT()); UnPackTo(_o.get(), _resolver); return _o.release(); } -inline void RandomOptions::UnPackTo(RandomOptionsT *_o, const flatbuffers::resolver_function_t *_resolver) const { +inline void RandomOptions::UnPackTo(RandomOptionsT *_o, const ::flatbuffers::resolver_function_t *_resolver) const { (void)_o; (void)_resolver; { auto _e = seed(); _o->seed = _e; } { auto _e = seed2(); _o->seed2 = _e; } } -inline flatbuffers::Offset RandomOptions::Pack(flatbuffers::FlatBufferBuilder &_fbb, const RandomOptionsT* _o, const flatbuffers::rehasher_function_t *_rehasher) { +inline ::flatbuffers::Offset RandomOptions::Pack(::flatbuffers::FlatBufferBuilder &_fbb, const RandomOptionsT* _o, const ::flatbuffers::rehasher_function_t *_rehasher) { return CreateRandomOptions(_fbb, _o, _rehasher); } -inline flatbuffers::Offset CreateRandomOptions(flatbuffers::FlatBufferBuilder &_fbb, const RandomOptionsT *_o, const flatbuffers::rehasher_function_t *_rehasher) { +inline ::flatbuffers::Offset CreateRandomOptions(::flatbuffers::FlatBufferBuilder &_fbb, const RandomOptionsT *_o, const ::flatbuffers::rehasher_function_t *_rehasher) { (void)_rehasher; (void)_o; - struct _VectorArgs { flatbuffers::FlatBufferBuilder *__fbb; const RandomOptionsT* __o; const flatbuffers::rehasher_function_t *__rehasher; } _va = { &_fbb, _o, _rehasher}; (void)_va; + struct _VectorArgs { ::flatbuffers::FlatBufferBuilder *__fbb; const RandomOptionsT* __o; const ::flatbuffers::rehasher_function_t *__rehasher; } _va = { &_fbb, _o, _rehasher}; (void)_va; auto _seed = _o->seed; auto _seed2 = _o->seed2; return tflite::CreateRandomOptions( @@ -17000,295 +20678,344 @@ inline flatbuffers::Offset CreateRandomOptions(flatbuffers::FlatB _seed2); } -inline BucketizeOptionsT *BucketizeOptions::UnPack(const flatbuffers::resolver_function_t *_resolver) const { +inline BucketizeOptionsT *BucketizeOptions::UnPack(const ::flatbuffers::resolver_function_t *_resolver) const { auto _o = std::unique_ptr(new BucketizeOptionsT()); UnPackTo(_o.get(), _resolver); return _o.release(); } -inline void BucketizeOptions::UnPackTo(BucketizeOptionsT *_o, const flatbuffers::resolver_function_t *_resolver) const { +inline void BucketizeOptions::UnPackTo(BucketizeOptionsT *_o, const ::flatbuffers::resolver_function_t *_resolver) const { (void)_o; (void)_resolver; - { auto _e = boundaries(); if (_e) { _o->boundaries.resize(_e->size()); for (flatbuffers::uoffset_t _i = 0; _i < _e->size(); _i++) { _o->boundaries[_i] = _e->Get(_i); } } } + { auto _e = boundaries(); if (_e) { _o->boundaries.resize(_e->size()); for (::flatbuffers::uoffset_t _i = 0; _i < _e->size(); _i++) { _o->boundaries[_i] = _e->Get(_i); } } else { _o->boundaries.resize(0); } } } -inline flatbuffers::Offset BucketizeOptions::Pack(flatbuffers::FlatBufferBuilder &_fbb, const BucketizeOptionsT* _o, const flatbuffers::rehasher_function_t *_rehasher) { +inline ::flatbuffers::Offset BucketizeOptions::Pack(::flatbuffers::FlatBufferBuilder &_fbb, const BucketizeOptionsT* _o, const ::flatbuffers::rehasher_function_t *_rehasher) { return CreateBucketizeOptions(_fbb, _o, _rehasher); } -inline flatbuffers::Offset CreateBucketizeOptions(flatbuffers::FlatBufferBuilder &_fbb, const BucketizeOptionsT *_o, const flatbuffers::rehasher_function_t *_rehasher) { +inline ::flatbuffers::Offset CreateBucketizeOptions(::flatbuffers::FlatBufferBuilder &_fbb, const BucketizeOptionsT *_o, const ::flatbuffers::rehasher_function_t *_rehasher) { (void)_rehasher; (void)_o; - struct _VectorArgs { flatbuffers::FlatBufferBuilder *__fbb; const BucketizeOptionsT* __o; const flatbuffers::rehasher_function_t *__rehasher; } _va = { &_fbb, _o, _rehasher}; (void)_va; + struct _VectorArgs { ::flatbuffers::FlatBufferBuilder *__fbb; const BucketizeOptionsT* __o; const ::flatbuffers::rehasher_function_t *__rehasher; } _va = { &_fbb, _o, _rehasher}; (void)_va; auto _boundaries = _o->boundaries.size() ? _fbb.CreateVector(_o->boundaries) : 0; return tflite::CreateBucketizeOptions( _fbb, _boundaries); } -inline GeluOptionsT *GeluOptions::UnPack(const flatbuffers::resolver_function_t *_resolver) const { +inline GeluOptionsT *GeluOptions::UnPack(const ::flatbuffers::resolver_function_t *_resolver) const { auto _o = std::unique_ptr(new GeluOptionsT()); UnPackTo(_o.get(), _resolver); return _o.release(); } -inline void GeluOptions::UnPackTo(GeluOptionsT *_o, const flatbuffers::resolver_function_t *_resolver) const { +inline void GeluOptions::UnPackTo(GeluOptionsT *_o, const ::flatbuffers::resolver_function_t *_resolver) const { (void)_o; (void)_resolver; { auto _e = approximate(); _o->approximate = _e; } } -inline flatbuffers::Offset GeluOptions::Pack(flatbuffers::FlatBufferBuilder &_fbb, const GeluOptionsT* _o, const flatbuffers::rehasher_function_t *_rehasher) { +inline ::flatbuffers::Offset GeluOptions::Pack(::flatbuffers::FlatBufferBuilder &_fbb, const GeluOptionsT* _o, const ::flatbuffers::rehasher_function_t *_rehasher) { return CreateGeluOptions(_fbb, _o, _rehasher); } -inline flatbuffers::Offset CreateGeluOptions(flatbuffers::FlatBufferBuilder &_fbb, const GeluOptionsT *_o, const flatbuffers::rehasher_function_t *_rehasher) { +inline ::flatbuffers::Offset CreateGeluOptions(::flatbuffers::FlatBufferBuilder &_fbb, const GeluOptionsT *_o, const ::flatbuffers::rehasher_function_t *_rehasher) { (void)_rehasher; (void)_o; - struct _VectorArgs { flatbuffers::FlatBufferBuilder *__fbb; const GeluOptionsT* __o; const flatbuffers::rehasher_function_t *__rehasher; } _va = { &_fbb, _o, _rehasher}; (void)_va; + struct _VectorArgs { ::flatbuffers::FlatBufferBuilder *__fbb; const GeluOptionsT* __o; const ::flatbuffers::rehasher_function_t *__rehasher; } _va = { &_fbb, _o, _rehasher}; (void)_va; auto _approximate = _o->approximate; return tflite::CreateGeluOptions( _fbb, _approximate); } -inline DynamicUpdateSliceOptionsT *DynamicUpdateSliceOptions::UnPack(const flatbuffers::resolver_function_t *_resolver) const { +inline DynamicUpdateSliceOptionsT *DynamicUpdateSliceOptions::UnPack(const ::flatbuffers::resolver_function_t *_resolver) const { auto _o = std::unique_ptr(new DynamicUpdateSliceOptionsT()); UnPackTo(_o.get(), _resolver); return _o.release(); } -inline void DynamicUpdateSliceOptions::UnPackTo(DynamicUpdateSliceOptionsT *_o, const flatbuffers::resolver_function_t *_resolver) const { +inline void DynamicUpdateSliceOptions::UnPackTo(DynamicUpdateSliceOptionsT *_o, const ::flatbuffers::resolver_function_t *_resolver) const { (void)_o; (void)_resolver; } -inline flatbuffers::Offset DynamicUpdateSliceOptions::Pack(flatbuffers::FlatBufferBuilder &_fbb, const DynamicUpdateSliceOptionsT* _o, const flatbuffers::rehasher_function_t *_rehasher) { +inline ::flatbuffers::Offset DynamicUpdateSliceOptions::Pack(::flatbuffers::FlatBufferBuilder &_fbb, const DynamicUpdateSliceOptionsT* _o, const ::flatbuffers::rehasher_function_t *_rehasher) { return CreateDynamicUpdateSliceOptions(_fbb, _o, _rehasher); } -inline flatbuffers::Offset CreateDynamicUpdateSliceOptions(flatbuffers::FlatBufferBuilder &_fbb, const DynamicUpdateSliceOptionsT *_o, const flatbuffers::rehasher_function_t *_rehasher) { +inline ::flatbuffers::Offset CreateDynamicUpdateSliceOptions(::flatbuffers::FlatBufferBuilder &_fbb, const DynamicUpdateSliceOptionsT *_o, const ::flatbuffers::rehasher_function_t *_rehasher) { (void)_rehasher; (void)_o; - struct _VectorArgs { flatbuffers::FlatBufferBuilder *__fbb; const DynamicUpdateSliceOptionsT* __o; const flatbuffers::rehasher_function_t *__rehasher; } _va = { &_fbb, _o, _rehasher}; (void)_va; + struct _VectorArgs { ::flatbuffers::FlatBufferBuilder *__fbb; const DynamicUpdateSliceOptionsT* __o; const ::flatbuffers::rehasher_function_t *__rehasher; } _va = { &_fbb, _o, _rehasher}; (void)_va; return tflite::CreateDynamicUpdateSliceOptions( _fbb); } -inline UnsortedSegmentProdOptionsT *UnsortedSegmentProdOptions::UnPack(const flatbuffers::resolver_function_t *_resolver) const { +inline UnsortedSegmentProdOptionsT *UnsortedSegmentProdOptions::UnPack(const ::flatbuffers::resolver_function_t *_resolver) const { auto _o = std::unique_ptr(new UnsortedSegmentProdOptionsT()); UnPackTo(_o.get(), _resolver); return _o.release(); } -inline void UnsortedSegmentProdOptions::UnPackTo(UnsortedSegmentProdOptionsT *_o, const flatbuffers::resolver_function_t *_resolver) const { +inline void UnsortedSegmentProdOptions::UnPackTo(UnsortedSegmentProdOptionsT *_o, const ::flatbuffers::resolver_function_t *_resolver) const { (void)_o; (void)_resolver; } -inline flatbuffers::Offset UnsortedSegmentProdOptions::Pack(flatbuffers::FlatBufferBuilder &_fbb, const UnsortedSegmentProdOptionsT* _o, const flatbuffers::rehasher_function_t *_rehasher) { +inline ::flatbuffers::Offset UnsortedSegmentProdOptions::Pack(::flatbuffers::FlatBufferBuilder &_fbb, const UnsortedSegmentProdOptionsT* _o, const ::flatbuffers::rehasher_function_t *_rehasher) { return CreateUnsortedSegmentProdOptions(_fbb, _o, _rehasher); } -inline flatbuffers::Offset CreateUnsortedSegmentProdOptions(flatbuffers::FlatBufferBuilder &_fbb, const UnsortedSegmentProdOptionsT *_o, const flatbuffers::rehasher_function_t *_rehasher) { +inline ::flatbuffers::Offset CreateUnsortedSegmentProdOptions(::flatbuffers::FlatBufferBuilder &_fbb, const UnsortedSegmentProdOptionsT *_o, const ::flatbuffers::rehasher_function_t *_rehasher) { (void)_rehasher; (void)_o; - struct _VectorArgs { flatbuffers::FlatBufferBuilder *__fbb; const UnsortedSegmentProdOptionsT* __o; const flatbuffers::rehasher_function_t *__rehasher; } _va = { &_fbb, _o, _rehasher}; (void)_va; + struct _VectorArgs { ::flatbuffers::FlatBufferBuilder *__fbb; const UnsortedSegmentProdOptionsT* __o; const ::flatbuffers::rehasher_function_t *__rehasher; } _va = { &_fbb, _o, _rehasher}; (void)_va; return tflite::CreateUnsortedSegmentProdOptions( _fbb); } -inline UnsortedSegmentMaxOptionsT *UnsortedSegmentMaxOptions::UnPack(const flatbuffers::resolver_function_t *_resolver) const { +inline UnsortedSegmentMaxOptionsT *UnsortedSegmentMaxOptions::UnPack(const ::flatbuffers::resolver_function_t *_resolver) const { auto _o = std::unique_ptr(new UnsortedSegmentMaxOptionsT()); UnPackTo(_o.get(), _resolver); return _o.release(); } -inline void UnsortedSegmentMaxOptions::UnPackTo(UnsortedSegmentMaxOptionsT *_o, const flatbuffers::resolver_function_t *_resolver) const { +inline void UnsortedSegmentMaxOptions::UnPackTo(UnsortedSegmentMaxOptionsT *_o, const ::flatbuffers::resolver_function_t *_resolver) const { (void)_o; (void)_resolver; } -inline flatbuffers::Offset UnsortedSegmentMaxOptions::Pack(flatbuffers::FlatBufferBuilder &_fbb, const UnsortedSegmentMaxOptionsT* _o, const flatbuffers::rehasher_function_t *_rehasher) { +inline ::flatbuffers::Offset UnsortedSegmentMaxOptions::Pack(::flatbuffers::FlatBufferBuilder &_fbb, const UnsortedSegmentMaxOptionsT* _o, const ::flatbuffers::rehasher_function_t *_rehasher) { return CreateUnsortedSegmentMaxOptions(_fbb, _o, _rehasher); } -inline flatbuffers::Offset CreateUnsortedSegmentMaxOptions(flatbuffers::FlatBufferBuilder &_fbb, const UnsortedSegmentMaxOptionsT *_o, const flatbuffers::rehasher_function_t *_rehasher) { +inline ::flatbuffers::Offset CreateUnsortedSegmentMaxOptions(::flatbuffers::FlatBufferBuilder &_fbb, const UnsortedSegmentMaxOptionsT *_o, const ::flatbuffers::rehasher_function_t *_rehasher) { (void)_rehasher; (void)_o; - struct _VectorArgs { flatbuffers::FlatBufferBuilder *__fbb; const UnsortedSegmentMaxOptionsT* __o; const flatbuffers::rehasher_function_t *__rehasher; } _va = { &_fbb, _o, _rehasher}; (void)_va; + struct _VectorArgs { ::flatbuffers::FlatBufferBuilder *__fbb; const UnsortedSegmentMaxOptionsT* __o; const ::flatbuffers::rehasher_function_t *__rehasher; } _va = { &_fbb, _o, _rehasher}; (void)_va; return tflite::CreateUnsortedSegmentMaxOptions( _fbb); } -inline UnsortedSegmentSumOptionsT *UnsortedSegmentSumOptions::UnPack(const flatbuffers::resolver_function_t *_resolver) const { +inline UnsortedSegmentSumOptionsT *UnsortedSegmentSumOptions::UnPack(const ::flatbuffers::resolver_function_t *_resolver) const { auto _o = std::unique_ptr(new UnsortedSegmentSumOptionsT()); UnPackTo(_o.get(), _resolver); return _o.release(); } -inline void UnsortedSegmentSumOptions::UnPackTo(UnsortedSegmentSumOptionsT *_o, const flatbuffers::resolver_function_t *_resolver) const { +inline void UnsortedSegmentSumOptions::UnPackTo(UnsortedSegmentSumOptionsT *_o, const ::flatbuffers::resolver_function_t *_resolver) const { (void)_o; (void)_resolver; } -inline flatbuffers::Offset UnsortedSegmentSumOptions::Pack(flatbuffers::FlatBufferBuilder &_fbb, const UnsortedSegmentSumOptionsT* _o, const flatbuffers::rehasher_function_t *_rehasher) { +inline ::flatbuffers::Offset UnsortedSegmentSumOptions::Pack(::flatbuffers::FlatBufferBuilder &_fbb, const UnsortedSegmentSumOptionsT* _o, const ::flatbuffers::rehasher_function_t *_rehasher) { return CreateUnsortedSegmentSumOptions(_fbb, _o, _rehasher); } -inline flatbuffers::Offset CreateUnsortedSegmentSumOptions(flatbuffers::FlatBufferBuilder &_fbb, const UnsortedSegmentSumOptionsT *_o, const flatbuffers::rehasher_function_t *_rehasher) { +inline ::flatbuffers::Offset CreateUnsortedSegmentSumOptions(::flatbuffers::FlatBufferBuilder &_fbb, const UnsortedSegmentSumOptionsT *_o, const ::flatbuffers::rehasher_function_t *_rehasher) { (void)_rehasher; (void)_o; - struct _VectorArgs { flatbuffers::FlatBufferBuilder *__fbb; const UnsortedSegmentSumOptionsT* __o; const flatbuffers::rehasher_function_t *__rehasher; } _va = { &_fbb, _o, _rehasher}; (void)_va; + struct _VectorArgs { ::flatbuffers::FlatBufferBuilder *__fbb; const UnsortedSegmentSumOptionsT* __o; const ::flatbuffers::rehasher_function_t *__rehasher; } _va = { &_fbb, _o, _rehasher}; (void)_va; return tflite::CreateUnsortedSegmentSumOptions( _fbb); } -inline ATan2OptionsT *ATan2Options::UnPack(const flatbuffers::resolver_function_t *_resolver) const { +inline ATan2OptionsT *ATan2Options::UnPack(const ::flatbuffers::resolver_function_t *_resolver) const { auto _o = std::unique_ptr(new ATan2OptionsT()); UnPackTo(_o.get(), _resolver); return _o.release(); } -inline void ATan2Options::UnPackTo(ATan2OptionsT *_o, const flatbuffers::resolver_function_t *_resolver) const { +inline void ATan2Options::UnPackTo(ATan2OptionsT *_o, const ::flatbuffers::resolver_function_t *_resolver) const { (void)_o; (void)_resolver; } -inline flatbuffers::Offset ATan2Options::Pack(flatbuffers::FlatBufferBuilder &_fbb, const ATan2OptionsT* _o, const flatbuffers::rehasher_function_t *_rehasher) { +inline ::flatbuffers::Offset ATan2Options::Pack(::flatbuffers::FlatBufferBuilder &_fbb, const ATan2OptionsT* _o, const ::flatbuffers::rehasher_function_t *_rehasher) { return CreateATan2Options(_fbb, _o, _rehasher); } -inline flatbuffers::Offset CreateATan2Options(flatbuffers::FlatBufferBuilder &_fbb, const ATan2OptionsT *_o, const flatbuffers::rehasher_function_t *_rehasher) { +inline ::flatbuffers::Offset CreateATan2Options(::flatbuffers::FlatBufferBuilder &_fbb, const ATan2OptionsT *_o, const ::flatbuffers::rehasher_function_t *_rehasher) { (void)_rehasher; (void)_o; - struct _VectorArgs { flatbuffers::FlatBufferBuilder *__fbb; const ATan2OptionsT* __o; const flatbuffers::rehasher_function_t *__rehasher; } _va = { &_fbb, _o, _rehasher}; (void)_va; + struct _VectorArgs { ::flatbuffers::FlatBufferBuilder *__fbb; const ATan2OptionsT* __o; const ::flatbuffers::rehasher_function_t *__rehasher; } _va = { &_fbb, _o, _rehasher}; (void)_va; return tflite::CreateATan2Options( _fbb); } -inline UnsortedSegmentMinOptionsT *UnsortedSegmentMinOptions::UnPack(const flatbuffers::resolver_function_t *_resolver) const { +inline UnsortedSegmentMinOptionsT *UnsortedSegmentMinOptions::UnPack(const ::flatbuffers::resolver_function_t *_resolver) const { auto _o = std::unique_ptr(new UnsortedSegmentMinOptionsT()); UnPackTo(_o.get(), _resolver); return _o.release(); } -inline void UnsortedSegmentMinOptions::UnPackTo(UnsortedSegmentMinOptionsT *_o, const flatbuffers::resolver_function_t *_resolver) const { +inline void UnsortedSegmentMinOptions::UnPackTo(UnsortedSegmentMinOptionsT *_o, const ::flatbuffers::resolver_function_t *_resolver) const { (void)_o; (void)_resolver; } -inline flatbuffers::Offset UnsortedSegmentMinOptions::Pack(flatbuffers::FlatBufferBuilder &_fbb, const UnsortedSegmentMinOptionsT* _o, const flatbuffers::rehasher_function_t *_rehasher) { +inline ::flatbuffers::Offset UnsortedSegmentMinOptions::Pack(::flatbuffers::FlatBufferBuilder &_fbb, const UnsortedSegmentMinOptionsT* _o, const ::flatbuffers::rehasher_function_t *_rehasher) { return CreateUnsortedSegmentMinOptions(_fbb, _o, _rehasher); } -inline flatbuffers::Offset CreateUnsortedSegmentMinOptions(flatbuffers::FlatBufferBuilder &_fbb, const UnsortedSegmentMinOptionsT *_o, const flatbuffers::rehasher_function_t *_rehasher) { +inline ::flatbuffers::Offset CreateUnsortedSegmentMinOptions(::flatbuffers::FlatBufferBuilder &_fbb, const UnsortedSegmentMinOptionsT *_o, const ::flatbuffers::rehasher_function_t *_rehasher) { (void)_rehasher; (void)_o; - struct _VectorArgs { flatbuffers::FlatBufferBuilder *__fbb; const UnsortedSegmentMinOptionsT* __o; const flatbuffers::rehasher_function_t *__rehasher; } _va = { &_fbb, _o, _rehasher}; (void)_va; + struct _VectorArgs { ::flatbuffers::FlatBufferBuilder *__fbb; const UnsortedSegmentMinOptionsT* __o; const ::flatbuffers::rehasher_function_t *__rehasher; } _va = { &_fbb, _o, _rehasher}; (void)_va; return tflite::CreateUnsortedSegmentMinOptions( _fbb); } -inline SignOptionsT *SignOptions::UnPack(const flatbuffers::resolver_function_t *_resolver) const { +inline SignOptionsT *SignOptions::UnPack(const ::flatbuffers::resolver_function_t *_resolver) const { auto _o = std::unique_ptr(new SignOptionsT()); UnPackTo(_o.get(), _resolver); return _o.release(); } -inline void SignOptions::UnPackTo(SignOptionsT *_o, const flatbuffers::resolver_function_t *_resolver) const { +inline void SignOptions::UnPackTo(SignOptionsT *_o, const ::flatbuffers::resolver_function_t *_resolver) const { (void)_o; (void)_resolver; } -inline flatbuffers::Offset SignOptions::Pack(flatbuffers::FlatBufferBuilder &_fbb, const SignOptionsT* _o, const flatbuffers::rehasher_function_t *_rehasher) { +inline ::flatbuffers::Offset SignOptions::Pack(::flatbuffers::FlatBufferBuilder &_fbb, const SignOptionsT* _o, const ::flatbuffers::rehasher_function_t *_rehasher) { return CreateSignOptions(_fbb, _o, _rehasher); } -inline flatbuffers::Offset CreateSignOptions(flatbuffers::FlatBufferBuilder &_fbb, const SignOptionsT *_o, const flatbuffers::rehasher_function_t *_rehasher) { +inline ::flatbuffers::Offset CreateSignOptions(::flatbuffers::FlatBufferBuilder &_fbb, const SignOptionsT *_o, const ::flatbuffers::rehasher_function_t *_rehasher) { (void)_rehasher; (void)_o; - struct _VectorArgs { flatbuffers::FlatBufferBuilder *__fbb; const SignOptionsT* __o; const flatbuffers::rehasher_function_t *__rehasher; } _va = { &_fbb, _o, _rehasher}; (void)_va; + struct _VectorArgs { ::flatbuffers::FlatBufferBuilder *__fbb; const SignOptionsT* __o; const ::flatbuffers::rehasher_function_t *__rehasher; } _va = { &_fbb, _o, _rehasher}; (void)_va; return tflite::CreateSignOptions( _fbb); } -inline BitcastOptionsT *BitcastOptions::UnPack(const flatbuffers::resolver_function_t *_resolver) const { +inline BitcastOptionsT *BitcastOptions::UnPack(const ::flatbuffers::resolver_function_t *_resolver) const { auto _o = std::unique_ptr(new BitcastOptionsT()); UnPackTo(_o.get(), _resolver); return _o.release(); } -inline void BitcastOptions::UnPackTo(BitcastOptionsT *_o, const flatbuffers::resolver_function_t *_resolver) const { +inline void BitcastOptions::UnPackTo(BitcastOptionsT *_o, const ::flatbuffers::resolver_function_t *_resolver) const { (void)_o; (void)_resolver; } -inline flatbuffers::Offset BitcastOptions::Pack(flatbuffers::FlatBufferBuilder &_fbb, const BitcastOptionsT* _o, const flatbuffers::rehasher_function_t *_rehasher) { +inline ::flatbuffers::Offset BitcastOptions::Pack(::flatbuffers::FlatBufferBuilder &_fbb, const BitcastOptionsT* _o, const ::flatbuffers::rehasher_function_t *_rehasher) { return CreateBitcastOptions(_fbb, _o, _rehasher); } -inline flatbuffers::Offset CreateBitcastOptions(flatbuffers::FlatBufferBuilder &_fbb, const BitcastOptionsT *_o, const flatbuffers::rehasher_function_t *_rehasher) { +inline ::flatbuffers::Offset CreateBitcastOptions(::flatbuffers::FlatBufferBuilder &_fbb, const BitcastOptionsT *_o, const ::flatbuffers::rehasher_function_t *_rehasher) { (void)_rehasher; (void)_o; - struct _VectorArgs { flatbuffers::FlatBufferBuilder *__fbb; const BitcastOptionsT* __o; const flatbuffers::rehasher_function_t *__rehasher; } _va = { &_fbb, _o, _rehasher}; (void)_va; + struct _VectorArgs { ::flatbuffers::FlatBufferBuilder *__fbb; const BitcastOptionsT* __o; const ::flatbuffers::rehasher_function_t *__rehasher; } _va = { &_fbb, _o, _rehasher}; (void)_va; return tflite::CreateBitcastOptions( _fbb); } -inline BitwiseXorOptionsT *BitwiseXorOptions::UnPack(const flatbuffers::resolver_function_t *_resolver) const { +inline BitwiseXorOptionsT *BitwiseXorOptions::UnPack(const ::flatbuffers::resolver_function_t *_resolver) const { auto _o = std::unique_ptr(new BitwiseXorOptionsT()); UnPackTo(_o.get(), _resolver); return _o.release(); } -inline void BitwiseXorOptions::UnPackTo(BitwiseXorOptionsT *_o, const flatbuffers::resolver_function_t *_resolver) const { +inline void BitwiseXorOptions::UnPackTo(BitwiseXorOptionsT *_o, const ::flatbuffers::resolver_function_t *_resolver) const { (void)_o; (void)_resolver; } -inline flatbuffers::Offset BitwiseXorOptions::Pack(flatbuffers::FlatBufferBuilder &_fbb, const BitwiseXorOptionsT* _o, const flatbuffers::rehasher_function_t *_rehasher) { +inline ::flatbuffers::Offset BitwiseXorOptions::Pack(::flatbuffers::FlatBufferBuilder &_fbb, const BitwiseXorOptionsT* _o, const ::flatbuffers::rehasher_function_t *_rehasher) { return CreateBitwiseXorOptions(_fbb, _o, _rehasher); } -inline flatbuffers::Offset CreateBitwiseXorOptions(flatbuffers::FlatBufferBuilder &_fbb, const BitwiseXorOptionsT *_o, const flatbuffers::rehasher_function_t *_rehasher) { +inline ::flatbuffers::Offset CreateBitwiseXorOptions(::flatbuffers::FlatBufferBuilder &_fbb, const BitwiseXorOptionsT *_o, const ::flatbuffers::rehasher_function_t *_rehasher) { (void)_rehasher; (void)_o; - struct _VectorArgs { flatbuffers::FlatBufferBuilder *__fbb; const BitwiseXorOptionsT* __o; const flatbuffers::rehasher_function_t *__rehasher; } _va = { &_fbb, _o, _rehasher}; (void)_va; + struct _VectorArgs { ::flatbuffers::FlatBufferBuilder *__fbb; const BitwiseXorOptionsT* __o; const ::flatbuffers::rehasher_function_t *__rehasher; } _va = { &_fbb, _o, _rehasher}; (void)_va; return tflite::CreateBitwiseXorOptions( _fbb); } -inline RightShiftOptionsT *RightShiftOptions::UnPack(const flatbuffers::resolver_function_t *_resolver) const { +inline RightShiftOptionsT *RightShiftOptions::UnPack(const ::flatbuffers::resolver_function_t *_resolver) const { auto _o = std::unique_ptr(new RightShiftOptionsT()); UnPackTo(_o.get(), _resolver); return _o.release(); } -inline void RightShiftOptions::UnPackTo(RightShiftOptionsT *_o, const flatbuffers::resolver_function_t *_resolver) const { +inline void RightShiftOptions::UnPackTo(RightShiftOptionsT *_o, const ::flatbuffers::resolver_function_t *_resolver) const { (void)_o; (void)_resolver; } -inline flatbuffers::Offset RightShiftOptions::Pack(flatbuffers::FlatBufferBuilder &_fbb, const RightShiftOptionsT* _o, const flatbuffers::rehasher_function_t *_rehasher) { +inline ::flatbuffers::Offset RightShiftOptions::Pack(::flatbuffers::FlatBufferBuilder &_fbb, const RightShiftOptionsT* _o, const ::flatbuffers::rehasher_function_t *_rehasher) { return CreateRightShiftOptions(_fbb, _o, _rehasher); } -inline flatbuffers::Offset CreateRightShiftOptions(flatbuffers::FlatBufferBuilder &_fbb, const RightShiftOptionsT *_o, const flatbuffers::rehasher_function_t *_rehasher) { +inline ::flatbuffers::Offset CreateRightShiftOptions(::flatbuffers::FlatBufferBuilder &_fbb, const RightShiftOptionsT *_o, const ::flatbuffers::rehasher_function_t *_rehasher) { (void)_rehasher; (void)_o; - struct _VectorArgs { flatbuffers::FlatBufferBuilder *__fbb; const RightShiftOptionsT* __o; const flatbuffers::rehasher_function_t *__rehasher; } _va = { &_fbb, _o, _rehasher}; (void)_va; + struct _VectorArgs { ::flatbuffers::FlatBufferBuilder *__fbb; const RightShiftOptionsT* __o; const ::flatbuffers::rehasher_function_t *__rehasher; } _va = { &_fbb, _o, _rehasher}; (void)_va; return tflite::CreateRightShiftOptions( _fbb); } -inline OperatorCodeT *OperatorCode::UnPack(const flatbuffers::resolver_function_t *_resolver) const { +inline DilateOptionsT *DilateOptions::UnPack(const ::flatbuffers::resolver_function_t *_resolver) const { + auto _o = std::unique_ptr(new DilateOptionsT()); + UnPackTo(_o.get(), _resolver); + return _o.release(); +} + +inline void DilateOptions::UnPackTo(DilateOptionsT *_o, const ::flatbuffers::resolver_function_t *_resolver) const { + (void)_o; + (void)_resolver; +} + +inline ::flatbuffers::Offset DilateOptions::Pack(::flatbuffers::FlatBufferBuilder &_fbb, const DilateOptionsT* _o, const ::flatbuffers::rehasher_function_t *_rehasher) { + return CreateDilateOptions(_fbb, _o, _rehasher); +} + +inline ::flatbuffers::Offset CreateDilateOptions(::flatbuffers::FlatBufferBuilder &_fbb, const DilateOptionsT *_o, const ::flatbuffers::rehasher_function_t *_rehasher) { + (void)_rehasher; + (void)_o; + struct _VectorArgs { ::flatbuffers::FlatBufferBuilder *__fbb; const DilateOptionsT* __o; const ::flatbuffers::rehasher_function_t *__rehasher; } _va = { &_fbb, _o, _rehasher}; (void)_va; + return tflite::CreateDilateOptions( + _fbb); +} + +inline ReduceWindowOptionsT *ReduceWindowOptions::UnPack(const ::flatbuffers::resolver_function_t *_resolver) const { + auto _o = std::unique_ptr(new ReduceWindowOptionsT()); + UnPackTo(_o.get(), _resolver); + return _o.release(); +} + +inline void ReduceWindowOptions::UnPackTo(ReduceWindowOptionsT *_o, const ::flatbuffers::resolver_function_t *_resolver) const { + (void)_o; + (void)_resolver; + { auto _e = reduce_function(); _o->reduce_function = _e; } +} + +inline ::flatbuffers::Offset ReduceWindowOptions::Pack(::flatbuffers::FlatBufferBuilder &_fbb, const ReduceWindowOptionsT* _o, const ::flatbuffers::rehasher_function_t *_rehasher) { + return CreateReduceWindowOptions(_fbb, _o, _rehasher); +} + +inline ::flatbuffers::Offset CreateReduceWindowOptions(::flatbuffers::FlatBufferBuilder &_fbb, const ReduceWindowOptionsT *_o, const ::flatbuffers::rehasher_function_t *_rehasher) { + (void)_rehasher; + (void)_o; + struct _VectorArgs { ::flatbuffers::FlatBufferBuilder *__fbb; const ReduceWindowOptionsT* __o; const ::flatbuffers::rehasher_function_t *__rehasher; } _va = { &_fbb, _o, _rehasher}; (void)_va; + auto _reduce_function = _o->reduce_function; + return tflite::CreateReduceWindowOptions( + _fbb, + _reduce_function); +} + +inline OperatorCodeT *OperatorCode::UnPack(const ::flatbuffers::resolver_function_t *_resolver) const { auto _o = std::unique_ptr(new OperatorCodeT()); UnPackTo(_o.get(), _resolver); return _o.release(); } -inline void OperatorCode::UnPackTo(OperatorCodeT *_o, const flatbuffers::resolver_function_t *_resolver) const { +inline void OperatorCode::UnPackTo(OperatorCodeT *_o, const ::flatbuffers::resolver_function_t *_resolver) const { (void)_o; (void)_resolver; { auto _e = deprecated_builtin_code(); _o->deprecated_builtin_code = _e; } @@ -17297,56 +21024,96 @@ inline void OperatorCode::UnPackTo(OperatorCodeT *_o, const flatbuffers::resolve { auto _e = builtin_code(); _o->builtin_code = _e; } } -inline flatbuffers::Offset OperatorCode::Pack(flatbuffers::FlatBufferBuilder &_fbb, const OperatorCodeT* _o, const flatbuffers::rehasher_function_t *_rehasher) { +inline ::flatbuffers::Offset OperatorCode::Pack(::flatbuffers::FlatBufferBuilder &_fbb, const OperatorCodeT* _o, const ::flatbuffers::rehasher_function_t *_rehasher) { return CreateOperatorCode(_fbb, _o, _rehasher); } -inline flatbuffers::Offset CreateOperatorCode(flatbuffers::FlatBufferBuilder &_fbb, const OperatorCodeT *_o, const flatbuffers::rehasher_function_t *_rehasher) { +inline ::flatbuffers::Offset CreateOperatorCode(::flatbuffers::FlatBufferBuilder &_fbb, const OperatorCodeT *_o, const ::flatbuffers::rehasher_function_t *_rehasher) { (void)_rehasher; (void)_o; - struct _VectorArgs { flatbuffers::FlatBufferBuilder *__fbb; const OperatorCodeT* __o; const flatbuffers::rehasher_function_t *__rehasher; } _va = { &_fbb, _o, _rehasher}; (void)_va; + struct _VectorArgs { ::flatbuffers::FlatBufferBuilder *__fbb; const OperatorCodeT* __o; const ::flatbuffers::rehasher_function_t *__rehasher; } _va = { &_fbb, _o, _rehasher}; (void)_va; auto _deprecated_builtin_code = _o->deprecated_builtin_code; auto _custom_code = _o->custom_code.empty() ? 0 : _fbb.CreateString(_o->custom_code); auto _version = _o->version; - auto _builtin_code = _o->builtin_code; - return tflite::CreateOperatorCode( + auto _builtin_code = _o->builtin_code; + return tflite::CreateOperatorCode( + _fbb, + _deprecated_builtin_code, + _custom_code, + _version, + _builtin_code); +} + +inline StableHLOCompositeOptionsT *StableHLOCompositeOptions::UnPack(const ::flatbuffers::resolver_function_t *_resolver) const { + auto _o = std::unique_ptr(new StableHLOCompositeOptionsT()); + UnPackTo(_o.get(), _resolver); + return _o.release(); +} + +inline void StableHLOCompositeOptions::UnPackTo(StableHLOCompositeOptionsT *_o, const ::flatbuffers::resolver_function_t *_resolver) const { + (void)_o; + (void)_resolver; + { auto _e = name(); if (_e) _o->name = _e->str(); } + { auto _e = decomposition_subgraph_index(); _o->decomposition_subgraph_index = _e; } + { auto _e = composite_attributes(); if (_e) { _o->composite_attributes.resize(_e->size()); std::copy(_e->begin(), _e->end(), _o->composite_attributes.begin()); } } + { auto _e = composite_attributes_format(); _o->composite_attributes_format = _e; } + { auto _e = version(); _o->version = _e; } +} + +inline ::flatbuffers::Offset StableHLOCompositeOptions::Pack(::flatbuffers::FlatBufferBuilder &_fbb, const StableHLOCompositeOptionsT* _o, const ::flatbuffers::rehasher_function_t *_rehasher) { + return CreateStableHLOCompositeOptions(_fbb, _o, _rehasher); +} + +inline ::flatbuffers::Offset CreateStableHLOCompositeOptions(::flatbuffers::FlatBufferBuilder &_fbb, const StableHLOCompositeOptionsT *_o, const ::flatbuffers::rehasher_function_t *_rehasher) { + (void)_rehasher; + (void)_o; + struct _VectorArgs { ::flatbuffers::FlatBufferBuilder *__fbb; const StableHLOCompositeOptionsT* __o; const ::flatbuffers::rehasher_function_t *__rehasher; } _va = { &_fbb, _o, _rehasher}; (void)_va; + auto _name = _o->name.empty() ? 0 : _fbb.CreateString(_o->name); + auto _decomposition_subgraph_index = _o->decomposition_subgraph_index; + auto _composite_attributes = _o->composite_attributes.size() ? _fbb.CreateVector(_o->composite_attributes) : 0; + auto _composite_attributes_format = _o->composite_attributes_format; + auto _version = _o->version; + return tflite::CreateStableHLOCompositeOptions( _fbb, - _deprecated_builtin_code, - _custom_code, - _version, - _builtin_code); + _name, + _decomposition_subgraph_index, + _composite_attributes, + _composite_attributes_format, + _version); } -inline OperatorT *Operator::UnPack(const flatbuffers::resolver_function_t *_resolver) const { +inline OperatorT *Operator::UnPack(const ::flatbuffers::resolver_function_t *_resolver) const { auto _o = std::unique_ptr(new OperatorT()); UnPackTo(_o.get(), _resolver); return _o.release(); } -inline void Operator::UnPackTo(OperatorT *_o, const flatbuffers::resolver_function_t *_resolver) const { +inline void Operator::UnPackTo(OperatorT *_o, const ::flatbuffers::resolver_function_t *_resolver) const { (void)_o; (void)_resolver; { auto _e = opcode_index(); _o->opcode_index = _e; } - { auto _e = inputs(); if (_e) { _o->inputs.resize(_e->size()); for (flatbuffers::uoffset_t _i = 0; _i < _e->size(); _i++) { _o->inputs[_i] = _e->Get(_i); } } } - { auto _e = outputs(); if (_e) { _o->outputs.resize(_e->size()); for (flatbuffers::uoffset_t _i = 0; _i < _e->size(); _i++) { _o->outputs[_i] = _e->Get(_i); } } } + { auto _e = inputs(); if (_e) { _o->inputs.resize(_e->size()); for (::flatbuffers::uoffset_t _i = 0; _i < _e->size(); _i++) { _o->inputs[_i] = _e->Get(_i); } } else { _o->inputs.resize(0); } } + { auto _e = outputs(); if (_e) { _o->outputs.resize(_e->size()); for (::flatbuffers::uoffset_t _i = 0; _i < _e->size(); _i++) { _o->outputs[_i] = _e->Get(_i); } } else { _o->outputs.resize(0); } } { auto _e = builtin_options_type(); _o->builtin_options.type = _e; } { auto _e = builtin_options(); if (_e) _o->builtin_options.value = tflite::BuiltinOptionsUnion::UnPack(_e, builtin_options_type(), _resolver); } { auto _e = custom_options(); if (_e) { _o->custom_options.resize(_e->size()); std::copy(_e->begin(), _e->end(), _o->custom_options.begin()); } } { auto _e = custom_options_format(); _o->custom_options_format = _e; } - { auto _e = mutating_variable_inputs(); if (_e) { _o->mutating_variable_inputs.resize(_e->size()); for (flatbuffers::uoffset_t _i = 0; _i < _e->size(); _i++) { _o->mutating_variable_inputs[_i] = _e->Get(_i) != 0; } } } - { auto _e = intermediates(); if (_e) { _o->intermediates.resize(_e->size()); for (flatbuffers::uoffset_t _i = 0; _i < _e->size(); _i++) { _o->intermediates[_i] = _e->Get(_i); } } } + { auto _e = mutating_variable_inputs(); if (_e) { _o->mutating_variable_inputs.resize(_e->size()); for (::flatbuffers::uoffset_t _i = 0; _i < _e->size(); _i++) { _o->mutating_variable_inputs[_i] = _e->Get(_i) != 0; } } else { _o->mutating_variable_inputs.resize(0); } } + { auto _e = intermediates(); if (_e) { _o->intermediates.resize(_e->size()); for (::flatbuffers::uoffset_t _i = 0; _i < _e->size(); _i++) { _o->intermediates[_i] = _e->Get(_i); } } else { _o->intermediates.resize(0); } } { auto _e = large_custom_options_offset(); _o->large_custom_options_offset = _e; } { auto _e = large_custom_options_size(); _o->large_custom_options_size = _e; } + { auto _e = builtin_options_2_type(); _o->builtin_options_2.type = _e; } + { auto _e = builtin_options_2(); if (_e) _o->builtin_options_2.value = tflite::BuiltinOptions2Union::UnPack(_e, builtin_options_2_type(), _resolver); } } -inline flatbuffers::Offset Operator::Pack(flatbuffers::FlatBufferBuilder &_fbb, const OperatorT* _o, const flatbuffers::rehasher_function_t *_rehasher) { +inline ::flatbuffers::Offset Operator::Pack(::flatbuffers::FlatBufferBuilder &_fbb, const OperatorT* _o, const ::flatbuffers::rehasher_function_t *_rehasher) { return CreateOperator(_fbb, _o, _rehasher); } -inline flatbuffers::Offset CreateOperator(flatbuffers::FlatBufferBuilder &_fbb, const OperatorT *_o, const flatbuffers::rehasher_function_t *_rehasher) { +inline ::flatbuffers::Offset CreateOperator(::flatbuffers::FlatBufferBuilder &_fbb, const OperatorT *_o, const ::flatbuffers::rehasher_function_t *_rehasher) { (void)_rehasher; (void)_o; - struct _VectorArgs { flatbuffers::FlatBufferBuilder *__fbb; const OperatorT* __o; const flatbuffers::rehasher_function_t *__rehasher; } _va = { &_fbb, _o, _rehasher}; (void)_va; + struct _VectorArgs { ::flatbuffers::FlatBufferBuilder *__fbb; const OperatorT* __o; const ::flatbuffers::rehasher_function_t *__rehasher; } _va = { &_fbb, _o, _rehasher}; (void)_va; auto _opcode_index = _o->opcode_index; auto _inputs = _o->inputs.size() ? _fbb.CreateVector(_o->inputs) : 0; auto _outputs = _o->outputs.size() ? _fbb.CreateVector(_o->outputs) : 0; @@ -17358,6 +21125,8 @@ inline flatbuffers::Offset CreateOperator(flatbuffers::FlatBufferBuild auto _intermediates = _o->intermediates.size() ? _fbb.CreateVector(_o->intermediates) : 0; auto _large_custom_options_offset = _o->large_custom_options_offset; auto _large_custom_options_size = _o->large_custom_options_size; + auto _builtin_options_2_type = _o->builtin_options_2.type; + auto _builtin_options_2 = _o->builtin_options_2.Pack(_fbb); return tflite::CreateOperator( _fbb, _opcode_index, @@ -17370,7 +21139,9 @@ inline flatbuffers::Offset CreateOperator(flatbuffers::FlatBufferBuild _mutating_variable_inputs, _intermediates, _large_custom_options_offset, - _large_custom_options_size); + _large_custom_options_size, + _builtin_options_2_type, + _builtin_options_2); } inline SubGraphT::SubGraphT(const SubGraphT &o) @@ -17392,34 +21163,34 @@ inline SubGraphT &SubGraphT::operator=(SubGraphT o) FLATBUFFERS_NOEXCEPT { return *this; } -inline SubGraphT *SubGraph::UnPack(const flatbuffers::resolver_function_t *_resolver) const { +inline SubGraphT *SubGraph::UnPack(const ::flatbuffers::resolver_function_t *_resolver) const { auto _o = std::unique_ptr(new SubGraphT()); UnPackTo(_o.get(), _resolver); return _o.release(); } -inline void SubGraph::UnPackTo(SubGraphT *_o, const flatbuffers::resolver_function_t *_resolver) const { +inline void SubGraph::UnPackTo(SubGraphT *_o, const ::flatbuffers::resolver_function_t *_resolver) const { (void)_o; (void)_resolver; - { auto _e = tensors(); if (_e) { _o->tensors.resize(_e->size()); for (flatbuffers::uoffset_t _i = 0; _i < _e->size(); _i++) { if(_o->tensors[_i]) { _e->Get(_i)->UnPackTo(_o->tensors[_i].get(), _resolver); } else { _o->tensors[_i] = std::unique_ptr(_e->Get(_i)->UnPack(_resolver)); }; } } } - { auto _e = inputs(); if (_e) { _o->inputs.resize(_e->size()); for (flatbuffers::uoffset_t _i = 0; _i < _e->size(); _i++) { _o->inputs[_i] = _e->Get(_i); } } } - { auto _e = outputs(); if (_e) { _o->outputs.resize(_e->size()); for (flatbuffers::uoffset_t _i = 0; _i < _e->size(); _i++) { _o->outputs[_i] = _e->Get(_i); } } } - { auto _e = operators(); if (_e) { _o->operators.resize(_e->size()); for (flatbuffers::uoffset_t _i = 0; _i < _e->size(); _i++) { if(_o->operators[_i]) { _e->Get(_i)->UnPackTo(_o->operators[_i].get(), _resolver); } else { _o->operators[_i] = std::unique_ptr(_e->Get(_i)->UnPack(_resolver)); }; } } } + { auto _e = tensors(); if (_e) { _o->tensors.resize(_e->size()); for (::flatbuffers::uoffset_t _i = 0; _i < _e->size(); _i++) { if(_o->tensors[_i]) { _e->Get(_i)->UnPackTo(_o->tensors[_i].get(), _resolver); } else { _o->tensors[_i] = std::unique_ptr(_e->Get(_i)->UnPack(_resolver)); }; } } else { _o->tensors.resize(0); } } + { auto _e = inputs(); if (_e) { _o->inputs.resize(_e->size()); for (::flatbuffers::uoffset_t _i = 0; _i < _e->size(); _i++) { _o->inputs[_i] = _e->Get(_i); } } else { _o->inputs.resize(0); } } + { auto _e = outputs(); if (_e) { _o->outputs.resize(_e->size()); for (::flatbuffers::uoffset_t _i = 0; _i < _e->size(); _i++) { _o->outputs[_i] = _e->Get(_i); } } else { _o->outputs.resize(0); } } + { auto _e = operators(); if (_e) { _o->operators.resize(_e->size()); for (::flatbuffers::uoffset_t _i = 0; _i < _e->size(); _i++) { if(_o->operators[_i]) { _e->Get(_i)->UnPackTo(_o->operators[_i].get(), _resolver); } else { _o->operators[_i] = std::unique_ptr(_e->Get(_i)->UnPack(_resolver)); }; } } else { _o->operators.resize(0); } } { auto _e = name(); if (_e) _o->name = _e->str(); } } -inline flatbuffers::Offset SubGraph::Pack(flatbuffers::FlatBufferBuilder &_fbb, const SubGraphT* _o, const flatbuffers::rehasher_function_t *_rehasher) { +inline ::flatbuffers::Offset SubGraph::Pack(::flatbuffers::FlatBufferBuilder &_fbb, const SubGraphT* _o, const ::flatbuffers::rehasher_function_t *_rehasher) { return CreateSubGraph(_fbb, _o, _rehasher); } -inline flatbuffers::Offset CreateSubGraph(flatbuffers::FlatBufferBuilder &_fbb, const SubGraphT *_o, const flatbuffers::rehasher_function_t *_rehasher) { +inline ::flatbuffers::Offset CreateSubGraph(::flatbuffers::FlatBufferBuilder &_fbb, const SubGraphT *_o, const ::flatbuffers::rehasher_function_t *_rehasher) { (void)_rehasher; (void)_o; - struct _VectorArgs { flatbuffers::FlatBufferBuilder *__fbb; const SubGraphT* __o; const flatbuffers::rehasher_function_t *__rehasher; } _va = { &_fbb, _o, _rehasher}; (void)_va; - auto _tensors = _o->tensors.size() ? _fbb.CreateVector> (_o->tensors.size(), [](size_t i, _VectorArgs *__va) { return CreateTensor(*__va->__fbb, __va->__o->tensors[i].get(), __va->__rehasher); }, &_va ) : 0; + struct _VectorArgs { ::flatbuffers::FlatBufferBuilder *__fbb; const SubGraphT* __o; const ::flatbuffers::rehasher_function_t *__rehasher; } _va = { &_fbb, _o, _rehasher}; (void)_va; + auto _tensors = _o->tensors.size() ? _fbb.CreateVector<::flatbuffers::Offset> (_o->tensors.size(), [](size_t i, _VectorArgs *__va) { return CreateTensor(*__va->__fbb, __va->__o->tensors[i].get(), __va->__rehasher); }, &_va ) : 0; auto _inputs = _o->inputs.size() ? _fbb.CreateVector(_o->inputs) : 0; auto _outputs = _o->outputs.size() ? _fbb.CreateVector(_o->outputs) : 0; - auto _operators = _o->operators.size() ? _fbb.CreateVector> (_o->operators.size(), [](size_t i, _VectorArgs *__va) { return CreateOperator(*__va->__fbb, __va->__o->operators[i].get(), __va->__rehasher); }, &_va ) : 0; + auto _operators = _o->operators.size() ? _fbb.CreateVector<::flatbuffers::Offset> (_o->operators.size(), [](size_t i, _VectorArgs *__va) { return CreateOperator(*__va->__fbb, __va->__o->operators[i].get(), __va->__rehasher); }, &_va ) : 0; auto _name = _o->name.empty() ? 0 : _fbb.CreateString(_o->name); return tflite::CreateSubGraph( _fbb, @@ -17430,13 +21201,13 @@ inline flatbuffers::Offset CreateSubGraph(flatbuffers::FlatBufferBuild _name); } -inline BufferT *Buffer::UnPack(const flatbuffers::resolver_function_t *_resolver) const { +inline BufferT *Buffer::UnPack(const ::flatbuffers::resolver_function_t *_resolver) const { auto _o = std::unique_ptr(new BufferT()); UnPackTo(_o.get(), _resolver); return _o.release(); } -inline void Buffer::UnPackTo(BufferT *_o, const flatbuffers::resolver_function_t *_resolver) const { +inline void Buffer::UnPackTo(BufferT *_o, const ::flatbuffers::resolver_function_t *_resolver) const { (void)_o; (void)_resolver; { auto _e = data(); if (_e) { _o->data.resize(_e->size()); std::copy(_e->begin(), _e->end(), _o->data.begin()); } } @@ -17444,14 +21215,14 @@ inline void Buffer::UnPackTo(BufferT *_o, const flatbuffers::resolver_function_t { auto _e = size(); _o->size = _e; } } -inline flatbuffers::Offset Buffer::Pack(flatbuffers::FlatBufferBuilder &_fbb, const BufferT* _o, const flatbuffers::rehasher_function_t *_rehasher) { +inline ::flatbuffers::Offset Buffer::Pack(::flatbuffers::FlatBufferBuilder &_fbb, const BufferT* _o, const ::flatbuffers::rehasher_function_t *_rehasher) { return CreateBuffer(_fbb, _o, _rehasher); } -inline flatbuffers::Offset CreateBuffer(flatbuffers::FlatBufferBuilder &_fbb, const BufferT *_o, const flatbuffers::rehasher_function_t *_rehasher) { +inline ::flatbuffers::Offset CreateBuffer(::flatbuffers::FlatBufferBuilder &_fbb, const BufferT *_o, const ::flatbuffers::rehasher_function_t *_rehasher) { (void)_rehasher; (void)_o; - struct _VectorArgs { flatbuffers::FlatBufferBuilder *__fbb; const BufferT* __o; const flatbuffers::rehasher_function_t *__rehasher; } _va = { &_fbb, _o, _rehasher}; (void)_va; + struct _VectorArgs { ::flatbuffers::FlatBufferBuilder *__fbb; const BufferT* __o; const ::flatbuffers::rehasher_function_t *__rehasher; } _va = { &_fbb, _o, _rehasher}; (void)_va; _fbb.ForceVectorAlignment(_o->data.size(), sizeof(uint8_t), 16); auto _data = _o->data.size() ? _fbb.CreateVector(_o->data) : 0; auto _offset = _o->offset; @@ -17463,27 +21234,27 @@ inline flatbuffers::Offset CreateBuffer(flatbuffers::FlatBufferBuilder & _size); } -inline MetadataT *Metadata::UnPack(const flatbuffers::resolver_function_t *_resolver) const { +inline MetadataT *Metadata::UnPack(const ::flatbuffers::resolver_function_t *_resolver) const { auto _o = std::unique_ptr(new MetadataT()); UnPackTo(_o.get(), _resolver); return _o.release(); } -inline void Metadata::UnPackTo(MetadataT *_o, const flatbuffers::resolver_function_t *_resolver) const { +inline void Metadata::UnPackTo(MetadataT *_o, const ::flatbuffers::resolver_function_t *_resolver) const { (void)_o; (void)_resolver; { auto _e = name(); if (_e) _o->name = _e->str(); } { auto _e = buffer(); _o->buffer = _e; } } -inline flatbuffers::Offset Metadata::Pack(flatbuffers::FlatBufferBuilder &_fbb, const MetadataT* _o, const flatbuffers::rehasher_function_t *_rehasher) { +inline ::flatbuffers::Offset Metadata::Pack(::flatbuffers::FlatBufferBuilder &_fbb, const MetadataT* _o, const ::flatbuffers::rehasher_function_t *_rehasher) { return CreateMetadata(_fbb, _o, _rehasher); } -inline flatbuffers::Offset CreateMetadata(flatbuffers::FlatBufferBuilder &_fbb, const MetadataT *_o, const flatbuffers::rehasher_function_t *_rehasher) { +inline ::flatbuffers::Offset CreateMetadata(::flatbuffers::FlatBufferBuilder &_fbb, const MetadataT *_o, const ::flatbuffers::rehasher_function_t *_rehasher) { (void)_rehasher; (void)_o; - struct _VectorArgs { flatbuffers::FlatBufferBuilder *__fbb; const MetadataT* __o; const flatbuffers::rehasher_function_t *__rehasher; } _va = { &_fbb, _o, _rehasher}; (void)_va; + struct _VectorArgs { ::flatbuffers::FlatBufferBuilder *__fbb; const MetadataT* __o; const ::flatbuffers::rehasher_function_t *__rehasher; } _va = { &_fbb, _o, _rehasher}; (void)_va; auto _name = _o->name.empty() ? 0 : _fbb.CreateString(_o->name); auto _buffer = _o->buffer; return tflite::CreateMetadata( @@ -17492,27 +21263,27 @@ inline flatbuffers::Offset CreateMetadata(flatbuffers::FlatBufferBuild _buffer); } -inline TensorMapT *TensorMap::UnPack(const flatbuffers::resolver_function_t *_resolver) const { +inline TensorMapT *TensorMap::UnPack(const ::flatbuffers::resolver_function_t *_resolver) const { auto _o = std::unique_ptr(new TensorMapT()); UnPackTo(_o.get(), _resolver); return _o.release(); } -inline void TensorMap::UnPackTo(TensorMapT *_o, const flatbuffers::resolver_function_t *_resolver) const { +inline void TensorMap::UnPackTo(TensorMapT *_o, const ::flatbuffers::resolver_function_t *_resolver) const { (void)_o; (void)_resolver; { auto _e = name(); if (_e) _o->name = _e->str(); } { auto _e = tensor_index(); _o->tensor_index = _e; } } -inline flatbuffers::Offset TensorMap::Pack(flatbuffers::FlatBufferBuilder &_fbb, const TensorMapT* _o, const flatbuffers::rehasher_function_t *_rehasher) { +inline ::flatbuffers::Offset TensorMap::Pack(::flatbuffers::FlatBufferBuilder &_fbb, const TensorMapT* _o, const ::flatbuffers::rehasher_function_t *_rehasher) { return CreateTensorMap(_fbb, _o, _rehasher); } -inline flatbuffers::Offset CreateTensorMap(flatbuffers::FlatBufferBuilder &_fbb, const TensorMapT *_o, const flatbuffers::rehasher_function_t *_rehasher) { +inline ::flatbuffers::Offset CreateTensorMap(::flatbuffers::FlatBufferBuilder &_fbb, const TensorMapT *_o, const ::flatbuffers::rehasher_function_t *_rehasher) { (void)_rehasher; (void)_o; - struct _VectorArgs { flatbuffers::FlatBufferBuilder *__fbb; const TensorMapT* __o; const flatbuffers::rehasher_function_t *__rehasher; } _va = { &_fbb, _o, _rehasher}; (void)_va; + struct _VectorArgs { ::flatbuffers::FlatBufferBuilder *__fbb; const TensorMapT* __o; const ::flatbuffers::rehasher_function_t *__rehasher; } _va = { &_fbb, _o, _rehasher}; (void)_va; auto _name = _o->name.empty() ? 0 : _fbb.CreateString(_o->name); auto _tensor_index = _o->tensor_index; return tflite::CreateTensorMap( @@ -17538,31 +21309,31 @@ inline SignatureDefT &SignatureDefT::operator=(SignatureDefT o) FLATBUFFERS_NOEX return *this; } -inline SignatureDefT *SignatureDef::UnPack(const flatbuffers::resolver_function_t *_resolver) const { +inline SignatureDefT *SignatureDef::UnPack(const ::flatbuffers::resolver_function_t *_resolver) const { auto _o = std::unique_ptr(new SignatureDefT()); UnPackTo(_o.get(), _resolver); return _o.release(); } -inline void SignatureDef::UnPackTo(SignatureDefT *_o, const flatbuffers::resolver_function_t *_resolver) const { +inline void SignatureDef::UnPackTo(SignatureDefT *_o, const ::flatbuffers::resolver_function_t *_resolver) const { (void)_o; (void)_resolver; - { auto _e = inputs(); if (_e) { _o->inputs.resize(_e->size()); for (flatbuffers::uoffset_t _i = 0; _i < _e->size(); _i++) { if(_o->inputs[_i]) { _e->Get(_i)->UnPackTo(_o->inputs[_i].get(), _resolver); } else { _o->inputs[_i] = std::unique_ptr(_e->Get(_i)->UnPack(_resolver)); }; } } } - { auto _e = outputs(); if (_e) { _o->outputs.resize(_e->size()); for (flatbuffers::uoffset_t _i = 0; _i < _e->size(); _i++) { if(_o->outputs[_i]) { _e->Get(_i)->UnPackTo(_o->outputs[_i].get(), _resolver); } else { _o->outputs[_i] = std::unique_ptr(_e->Get(_i)->UnPack(_resolver)); }; } } } + { auto _e = inputs(); if (_e) { _o->inputs.resize(_e->size()); for (::flatbuffers::uoffset_t _i = 0; _i < _e->size(); _i++) { if(_o->inputs[_i]) { _e->Get(_i)->UnPackTo(_o->inputs[_i].get(), _resolver); } else { _o->inputs[_i] = std::unique_ptr(_e->Get(_i)->UnPack(_resolver)); }; } } else { _o->inputs.resize(0); } } + { auto _e = outputs(); if (_e) { _o->outputs.resize(_e->size()); for (::flatbuffers::uoffset_t _i = 0; _i < _e->size(); _i++) { if(_o->outputs[_i]) { _e->Get(_i)->UnPackTo(_o->outputs[_i].get(), _resolver); } else { _o->outputs[_i] = std::unique_ptr(_e->Get(_i)->UnPack(_resolver)); }; } } else { _o->outputs.resize(0); } } { auto _e = signature_key(); if (_e) _o->signature_key = _e->str(); } { auto _e = subgraph_index(); _o->subgraph_index = _e; } } -inline flatbuffers::Offset SignatureDef::Pack(flatbuffers::FlatBufferBuilder &_fbb, const SignatureDefT* _o, const flatbuffers::rehasher_function_t *_rehasher) { +inline ::flatbuffers::Offset SignatureDef::Pack(::flatbuffers::FlatBufferBuilder &_fbb, const SignatureDefT* _o, const ::flatbuffers::rehasher_function_t *_rehasher) { return CreateSignatureDef(_fbb, _o, _rehasher); } -inline flatbuffers::Offset CreateSignatureDef(flatbuffers::FlatBufferBuilder &_fbb, const SignatureDefT *_o, const flatbuffers::rehasher_function_t *_rehasher) { +inline ::flatbuffers::Offset CreateSignatureDef(::flatbuffers::FlatBufferBuilder &_fbb, const SignatureDefT *_o, const ::flatbuffers::rehasher_function_t *_rehasher) { (void)_rehasher; (void)_o; - struct _VectorArgs { flatbuffers::FlatBufferBuilder *__fbb; const SignatureDefT* __o; const flatbuffers::rehasher_function_t *__rehasher; } _va = { &_fbb, _o, _rehasher}; (void)_va; - auto _inputs = _o->inputs.size() ? _fbb.CreateVector> (_o->inputs.size(), [](size_t i, _VectorArgs *__va) { return CreateTensorMap(*__va->__fbb, __va->__o->inputs[i].get(), __va->__rehasher); }, &_va ) : 0; - auto _outputs = _o->outputs.size() ? _fbb.CreateVector> (_o->outputs.size(), [](size_t i, _VectorArgs *__va) { return CreateTensorMap(*__va->__fbb, __va->__o->outputs[i].get(), __va->__rehasher); }, &_va ) : 0; + struct _VectorArgs { ::flatbuffers::FlatBufferBuilder *__fbb; const SignatureDefT* __o; const ::flatbuffers::rehasher_function_t *__rehasher; } _va = { &_fbb, _o, _rehasher}; (void)_va; + auto _inputs = _o->inputs.size() ? _fbb.CreateVector<::flatbuffers::Offset> (_o->inputs.size(), [](size_t i, _VectorArgs *__va) { return CreateTensorMap(*__va->__fbb, __va->__o->inputs[i].get(), __va->__rehasher); }, &_va ) : 0; + auto _outputs = _o->outputs.size() ? _fbb.CreateVector<::flatbuffers::Offset> (_o->outputs.size(), [](size_t i, _VectorArgs *__va) { return CreateTensorMap(*__va->__fbb, __va->__o->outputs[i].get(), __va->__rehasher); }, &_va ) : 0; auto _signature_key = _o->signature_key.empty() ? 0 : _fbb.CreateString(_o->signature_key); auto _subgraph_index = _o->subgraph_index; return tflite::CreateSignatureDef( @@ -17601,41 +21372,41 @@ inline ModelT &ModelT::operator=(ModelT o) FLATBUFFERS_NOEXCEPT { return *this; } -inline ModelT *Model::UnPack(const flatbuffers::resolver_function_t *_resolver) const { +inline ModelT *Model::UnPack(const ::flatbuffers::resolver_function_t *_resolver) const { auto _o = std::unique_ptr(new ModelT()); UnPackTo(_o.get(), _resolver); return _o.release(); } -inline void Model::UnPackTo(ModelT *_o, const flatbuffers::resolver_function_t *_resolver) const { +inline void Model::UnPackTo(ModelT *_o, const ::flatbuffers::resolver_function_t *_resolver) const { (void)_o; (void)_resolver; { auto _e = version(); _o->version = _e; } - { auto _e = operator_codes(); if (_e) { _o->operator_codes.resize(_e->size()); for (flatbuffers::uoffset_t _i = 0; _i < _e->size(); _i++) { if(_o->operator_codes[_i]) { _e->Get(_i)->UnPackTo(_o->operator_codes[_i].get(), _resolver); } else { _o->operator_codes[_i] = std::unique_ptr(_e->Get(_i)->UnPack(_resolver)); }; } } } - { auto _e = subgraphs(); if (_e) { _o->subgraphs.resize(_e->size()); for (flatbuffers::uoffset_t _i = 0; _i < _e->size(); _i++) { if(_o->subgraphs[_i]) { _e->Get(_i)->UnPackTo(_o->subgraphs[_i].get(), _resolver); } else { _o->subgraphs[_i] = std::unique_ptr(_e->Get(_i)->UnPack(_resolver)); }; } } } + { auto _e = operator_codes(); if (_e) { _o->operator_codes.resize(_e->size()); for (::flatbuffers::uoffset_t _i = 0; _i < _e->size(); _i++) { if(_o->operator_codes[_i]) { _e->Get(_i)->UnPackTo(_o->operator_codes[_i].get(), _resolver); } else { _o->operator_codes[_i] = std::unique_ptr(_e->Get(_i)->UnPack(_resolver)); }; } } else { _o->operator_codes.resize(0); } } + { auto _e = subgraphs(); if (_e) { _o->subgraphs.resize(_e->size()); for (::flatbuffers::uoffset_t _i = 0; _i < _e->size(); _i++) { if(_o->subgraphs[_i]) { _e->Get(_i)->UnPackTo(_o->subgraphs[_i].get(), _resolver); } else { _o->subgraphs[_i] = std::unique_ptr(_e->Get(_i)->UnPack(_resolver)); }; } } else { _o->subgraphs.resize(0); } } { auto _e = description(); if (_e) _o->description = _e->str(); } - { auto _e = buffers(); if (_e) { _o->buffers.resize(_e->size()); for (flatbuffers::uoffset_t _i = 0; _i < _e->size(); _i++) { if(_o->buffers[_i]) { _e->Get(_i)->UnPackTo(_o->buffers[_i].get(), _resolver); } else { _o->buffers[_i] = std::unique_ptr(_e->Get(_i)->UnPack(_resolver)); }; } } } - { auto _e = metadata_buffer(); if (_e) { _o->metadata_buffer.resize(_e->size()); for (flatbuffers::uoffset_t _i = 0; _i < _e->size(); _i++) { _o->metadata_buffer[_i] = _e->Get(_i); } } } - { auto _e = metadata(); if (_e) { _o->metadata.resize(_e->size()); for (flatbuffers::uoffset_t _i = 0; _i < _e->size(); _i++) { if(_o->metadata[_i]) { _e->Get(_i)->UnPackTo(_o->metadata[_i].get(), _resolver); } else { _o->metadata[_i] = std::unique_ptr(_e->Get(_i)->UnPack(_resolver)); }; } } } - { auto _e = signature_defs(); if (_e) { _o->signature_defs.resize(_e->size()); for (flatbuffers::uoffset_t _i = 0; _i < _e->size(); _i++) { if(_o->signature_defs[_i]) { _e->Get(_i)->UnPackTo(_o->signature_defs[_i].get(), _resolver); } else { _o->signature_defs[_i] = std::unique_ptr(_e->Get(_i)->UnPack(_resolver)); }; } } } + { auto _e = buffers(); if (_e) { _o->buffers.resize(_e->size()); for (::flatbuffers::uoffset_t _i = 0; _i < _e->size(); _i++) { if(_o->buffers[_i]) { _e->Get(_i)->UnPackTo(_o->buffers[_i].get(), _resolver); } else { _o->buffers[_i] = std::unique_ptr(_e->Get(_i)->UnPack(_resolver)); }; } } else { _o->buffers.resize(0); } } + { auto _e = metadata_buffer(); if (_e) { _o->metadata_buffer.resize(_e->size()); for (::flatbuffers::uoffset_t _i = 0; _i < _e->size(); _i++) { _o->metadata_buffer[_i] = _e->Get(_i); } } else { _o->metadata_buffer.resize(0); } } + { auto _e = metadata(); if (_e) { _o->metadata.resize(_e->size()); for (::flatbuffers::uoffset_t _i = 0; _i < _e->size(); _i++) { if(_o->metadata[_i]) { _e->Get(_i)->UnPackTo(_o->metadata[_i].get(), _resolver); } else { _o->metadata[_i] = std::unique_ptr(_e->Get(_i)->UnPack(_resolver)); }; } } else { _o->metadata.resize(0); } } + { auto _e = signature_defs(); if (_e) { _o->signature_defs.resize(_e->size()); for (::flatbuffers::uoffset_t _i = 0; _i < _e->size(); _i++) { if(_o->signature_defs[_i]) { _e->Get(_i)->UnPackTo(_o->signature_defs[_i].get(), _resolver); } else { _o->signature_defs[_i] = std::unique_ptr(_e->Get(_i)->UnPack(_resolver)); }; } } else { _o->signature_defs.resize(0); } } } -inline flatbuffers::Offset Model::Pack(flatbuffers::FlatBufferBuilder &_fbb, const ModelT* _o, const flatbuffers::rehasher_function_t *_rehasher) { +inline ::flatbuffers::Offset Model::Pack(::flatbuffers::FlatBufferBuilder &_fbb, const ModelT* _o, const ::flatbuffers::rehasher_function_t *_rehasher) { return CreateModel(_fbb, _o, _rehasher); } -inline flatbuffers::Offset CreateModel(flatbuffers::FlatBufferBuilder &_fbb, const ModelT *_o, const flatbuffers::rehasher_function_t *_rehasher) { +inline ::flatbuffers::Offset CreateModel(::flatbuffers::FlatBufferBuilder &_fbb, const ModelT *_o, const ::flatbuffers::rehasher_function_t *_rehasher) { (void)_rehasher; (void)_o; - struct _VectorArgs { flatbuffers::FlatBufferBuilder *__fbb; const ModelT* __o; const flatbuffers::rehasher_function_t *__rehasher; } _va = { &_fbb, _o, _rehasher}; (void)_va; + struct _VectorArgs { ::flatbuffers::FlatBufferBuilder *__fbb; const ModelT* __o; const ::flatbuffers::rehasher_function_t *__rehasher; } _va = { &_fbb, _o, _rehasher}; (void)_va; auto _version = _o->version; - auto _operator_codes = _o->operator_codes.size() ? _fbb.CreateVector> (_o->operator_codes.size(), [](size_t i, _VectorArgs *__va) { return CreateOperatorCode(*__va->__fbb, __va->__o->operator_codes[i].get(), __va->__rehasher); }, &_va ) : 0; - auto _subgraphs = _o->subgraphs.size() ? _fbb.CreateVector> (_o->subgraphs.size(), [](size_t i, _VectorArgs *__va) { return CreateSubGraph(*__va->__fbb, __va->__o->subgraphs[i].get(), __va->__rehasher); }, &_va ) : 0; + auto _operator_codes = _o->operator_codes.size() ? _fbb.CreateVector<::flatbuffers::Offset> (_o->operator_codes.size(), [](size_t i, _VectorArgs *__va) { return CreateOperatorCode(*__va->__fbb, __va->__o->operator_codes[i].get(), __va->__rehasher); }, &_va ) : 0; + auto _subgraphs = _o->subgraphs.size() ? _fbb.CreateVector<::flatbuffers::Offset> (_o->subgraphs.size(), [](size_t i, _VectorArgs *__va) { return CreateSubGraph(*__va->__fbb, __va->__o->subgraphs[i].get(), __va->__rehasher); }, &_va ) : 0; auto _description = _o->description.empty() ? 0 : _fbb.CreateString(_o->description); - auto _buffers = _o->buffers.size() ? _fbb.CreateVector> (_o->buffers.size(), [](size_t i, _VectorArgs *__va) { return CreateBuffer(*__va->__fbb, __va->__o->buffers[i].get(), __va->__rehasher); }, &_va ) : 0; + auto _buffers = _o->buffers.size() ? _fbb.CreateVector<::flatbuffers::Offset> (_o->buffers.size(), [](size_t i, _VectorArgs *__va) { return CreateBuffer(*__va->__fbb, __va->__o->buffers[i].get(), __va->__rehasher); }, &_va ) : 0; auto _metadata_buffer = _o->metadata_buffer.size() ? _fbb.CreateVector(_o->metadata_buffer) : 0; - auto _metadata = _o->metadata.size() ? _fbb.CreateVector> (_o->metadata.size(), [](size_t i, _VectorArgs *__va) { return CreateMetadata(*__va->__fbb, __va->__o->metadata[i].get(), __va->__rehasher); }, &_va ) : 0; - auto _signature_defs = _o->signature_defs.size() ? _fbb.CreateVector> (_o->signature_defs.size(), [](size_t i, _VectorArgs *__va) { return CreateSignatureDef(*__va->__fbb, __va->__o->signature_defs[i].get(), __va->__rehasher); }, &_va ) : 0; + auto _metadata = _o->metadata.size() ? _fbb.CreateVector<::flatbuffers::Offset> (_o->metadata.size(), [](size_t i, _VectorArgs *__va) { return CreateMetadata(*__va->__fbb, __va->__o->metadata[i].get(), __va->__rehasher); }, &_va ) : 0; + auto _signature_defs = _o->signature_defs.size() ? _fbb.CreateVector<::flatbuffers::Offset> (_o->signature_defs.size(), [](size_t i, _VectorArgs *__va) { return CreateSignatureDef(*__va->__fbb, __va->__o->signature_defs[i].get(), __va->__rehasher); }, &_va ) : 0; return tflite::CreateModel( _fbb, _version, @@ -17648,7 +21419,7 @@ inline flatbuffers::Offset CreateModel(flatbuffers::FlatBufferBuilder &_f _signature_defs); } -inline bool VerifyQuantizationDetails(flatbuffers::Verifier &verifier, const void *obj, QuantizationDetails type) { +inline bool VerifyQuantizationDetails(::flatbuffers::Verifier &verifier, const void *obj, QuantizationDetails type) { switch (type) { case QuantizationDetails_NONE: { return true; @@ -17661,10 +21432,10 @@ inline bool VerifyQuantizationDetails(flatbuffers::Verifier &verifier, const voi } } -inline bool VerifyQuantizationDetailsVector(flatbuffers::Verifier &verifier, const flatbuffers::Vector> *values, const flatbuffers::Vector *types) { +inline bool VerifyQuantizationDetailsVector(::flatbuffers::Verifier &verifier, const ::flatbuffers::Vector<::flatbuffers::Offset> *values, const ::flatbuffers::Vector *types) { if (!values || !types) return !values && !types; if (values->size() != types->size()) return false; - for (flatbuffers::uoffset_t i = 0; i < values->size(); ++i) { + for (::flatbuffers::uoffset_t i = 0; i < values->size(); ++i) { if (!VerifyQuantizationDetails( verifier, values->Get(i), types->GetEnum(i))) { return false; @@ -17673,7 +21444,7 @@ inline bool VerifyQuantizationDetailsVector(flatbuffers::Verifier &verifier, con return true; } -inline void *QuantizationDetailsUnion::UnPack(const void *obj, QuantizationDetails type, const flatbuffers::resolver_function_t *resolver) { +inline void *QuantizationDetailsUnion::UnPack(const void *obj, QuantizationDetails type, const ::flatbuffers::resolver_function_t *resolver) { (void)resolver; switch (type) { case QuantizationDetails_CustomQuantization: { @@ -17684,7 +21455,7 @@ inline void *QuantizationDetailsUnion::UnPack(const void *obj, QuantizationDetai } } -inline flatbuffers::Offset QuantizationDetailsUnion::Pack(flatbuffers::FlatBufferBuilder &_fbb, const flatbuffers::rehasher_function_t *_rehasher) const { +inline ::flatbuffers::Offset QuantizationDetailsUnion::Pack(::flatbuffers::FlatBufferBuilder &_fbb, const ::flatbuffers::rehasher_function_t *_rehasher) const { (void)_rehasher; switch (type) { case QuantizationDetails_CustomQuantization: { @@ -17719,7 +21490,7 @@ inline void QuantizationDetailsUnion::Reset() { type = QuantizationDetails_NONE; } -inline bool VerifySparseIndexVector(flatbuffers::Verifier &verifier, const void *obj, SparseIndexVector type) { +inline bool VerifySparseIndexVector(::flatbuffers::Verifier &verifier, const void *obj, SparseIndexVector type) { switch (type) { case SparseIndexVector_NONE: { return true; @@ -17740,10 +21511,10 @@ inline bool VerifySparseIndexVector(flatbuffers::Verifier &verifier, const void } } -inline bool VerifySparseIndexVectorVector(flatbuffers::Verifier &verifier, const flatbuffers::Vector> *values, const flatbuffers::Vector *types) { +inline bool VerifySparseIndexVectorVector(::flatbuffers::Verifier &verifier, const ::flatbuffers::Vector<::flatbuffers::Offset> *values, const ::flatbuffers::Vector *types) { if (!values || !types) return !values && !types; if (values->size() != types->size()) return false; - for (flatbuffers::uoffset_t i = 0; i < values->size(); ++i) { + for (::flatbuffers::uoffset_t i = 0; i < values->size(); ++i) { if (!VerifySparseIndexVector( verifier, values->Get(i), types->GetEnum(i))) { return false; @@ -17752,7 +21523,7 @@ inline bool VerifySparseIndexVectorVector(flatbuffers::Verifier &verifier, const return true; } -inline void *SparseIndexVectorUnion::UnPack(const void *obj, SparseIndexVector type, const flatbuffers::resolver_function_t *resolver) { +inline void *SparseIndexVectorUnion::UnPack(const void *obj, SparseIndexVector type, const ::flatbuffers::resolver_function_t *resolver) { (void)resolver; switch (type) { case SparseIndexVector_Int32Vector: { @@ -17771,7 +21542,7 @@ inline void *SparseIndexVectorUnion::UnPack(const void *obj, SparseIndexVector t } } -inline flatbuffers::Offset SparseIndexVectorUnion::Pack(flatbuffers::FlatBufferBuilder &_fbb, const flatbuffers::rehasher_function_t *_rehasher) const { +inline ::flatbuffers::Offset SparseIndexVectorUnion::Pack(::flatbuffers::FlatBufferBuilder &_fbb, const ::flatbuffers::rehasher_function_t *_rehasher) const { (void)_rehasher; switch (type) { case SparseIndexVector_Int32Vector: { @@ -17832,7 +21603,7 @@ inline void SparseIndexVectorUnion::Reset() { type = SparseIndexVector_NONE; } -inline bool VerifyBuiltinOptions(flatbuffers::Verifier &verifier, const void *obj, BuiltinOptions type) { +inline bool VerifyBuiltinOptions(::flatbuffers::Verifier &verifier, const void *obj, BuiltinOptions type) { switch (type) { case BuiltinOptions_NONE: { return true; @@ -18345,10 +22116,10 @@ inline bool VerifyBuiltinOptions(flatbuffers::Verifier &verifier, const void *ob } } -inline bool VerifyBuiltinOptionsVector(flatbuffers::Verifier &verifier, const flatbuffers::Vector> *values, const flatbuffers::Vector *types) { +inline bool VerifyBuiltinOptionsVector(::flatbuffers::Verifier &verifier, const ::flatbuffers::Vector<::flatbuffers::Offset> *values, const ::flatbuffers::Vector *types) { if (!values || !types) return !values && !types; if (values->size() != types->size()) return false; - for (flatbuffers::uoffset_t i = 0; i < values->size(); ++i) { + for (::flatbuffers::uoffset_t i = 0; i < values->size(); ++i) { if (!VerifyBuiltinOptions( verifier, values->Get(i), types->GetEnum(i))) { return false; @@ -18357,7 +22128,7 @@ inline bool VerifyBuiltinOptionsVector(flatbuffers::Verifier &verifier, const fl return true; } -inline void *BuiltinOptionsUnion::UnPack(const void *obj, BuiltinOptions type, const flatbuffers::resolver_function_t *resolver) { +inline void *BuiltinOptionsUnion::UnPack(const void *obj, BuiltinOptions type, const ::flatbuffers::resolver_function_t *resolver) { (void)resolver; switch (type) { case BuiltinOptions_Conv2DOptions: { @@ -18868,7 +22639,7 @@ inline void *BuiltinOptionsUnion::UnPack(const void *obj, BuiltinOptions type, c } } -inline flatbuffers::Offset BuiltinOptionsUnion::Pack(flatbuffers::FlatBufferBuilder &_fbb, const flatbuffers::rehasher_function_t *_rehasher) const { +inline ::flatbuffers::Offset BuiltinOptionsUnion::Pack(::flatbuffers::FlatBufferBuilder &_fbb, const ::flatbuffers::rehasher_function_t *_rehasher) const { (void)_rehasher; switch (type) { case BuiltinOptions_Conv2DOptions: { @@ -20528,12 +24299,503 @@ inline void BuiltinOptionsUnion::Reset() { type = BuiltinOptions_NONE; } +inline bool VerifyBuiltinOptions2(::flatbuffers::Verifier &verifier, const void *obj, BuiltinOptions2 type) { + switch (type) { + case BuiltinOptions2_NONE: { + return true; + } + case BuiltinOptions2_StablehloConcatenateOptions: { + auto ptr = reinterpret_cast(obj); + return verifier.VerifyTable(ptr); + } + case BuiltinOptions2_StablehloBroadcastInDimOptions: { + auto ptr = reinterpret_cast(obj); + return verifier.VerifyTable(ptr); + } + case BuiltinOptions2_StablehloSliceOptions: { + auto ptr = reinterpret_cast(obj); + return verifier.VerifyTable(ptr); + } + case BuiltinOptions2_StablehloConvolutionOptions: { + auto ptr = reinterpret_cast(obj); + return verifier.VerifyTable(ptr); + } + case BuiltinOptions2_StablehloCustomCallOptions: { + auto ptr = reinterpret_cast(obj); + return verifier.VerifyTable(ptr); + } + case BuiltinOptions2_StablehloReduceOptions: { + auto ptr = reinterpret_cast(obj); + return verifier.VerifyTable(ptr); + } + case BuiltinOptions2_StablehloScatterOptions: { + auto ptr = reinterpret_cast(obj); + return verifier.VerifyTable(ptr); + } + case BuiltinOptions2_StablehloCompareOptions: { + auto ptr = reinterpret_cast(obj); + return verifier.VerifyTable(ptr); + } + case BuiltinOptions2_StablehloDynamicSliceOptions: { + auto ptr = reinterpret_cast(obj); + return verifier.VerifyTable(ptr); + } + case BuiltinOptions2_StablehloPadOptions: { + auto ptr = reinterpret_cast(obj); + return verifier.VerifyTable(ptr); + } + case BuiltinOptions2_StablehloIotaOptions: { + auto ptr = reinterpret_cast(obj); + return verifier.VerifyTable(ptr); + } + case BuiltinOptions2_StablehloDotGeneralOptions: { + auto ptr = reinterpret_cast(obj); + return verifier.VerifyTable(ptr); + } + case BuiltinOptions2_StablehloReduceWindowOptions: { + auto ptr = reinterpret_cast(obj); + return verifier.VerifyTable(ptr); + } + case BuiltinOptions2_StablehloSortOptions: { + auto ptr = reinterpret_cast(obj); + return verifier.VerifyTable(ptr); + } + case BuiltinOptions2_StablehloWhileOptions: { + auto ptr = reinterpret_cast(obj); + return verifier.VerifyTable(ptr); + } + case BuiltinOptions2_StablehloGatherOptions: { + auto ptr = reinterpret_cast(obj); + return verifier.VerifyTable(ptr); + } + case BuiltinOptions2_StablehloTransposeOptions: { + auto ptr = reinterpret_cast(obj); + return verifier.VerifyTable(ptr); + } + case BuiltinOptions2_DilateOptions: { + auto ptr = reinterpret_cast(obj); + return verifier.VerifyTable(ptr); + } + case BuiltinOptions2_StablehloRngBitGeneratorOptions: { + auto ptr = reinterpret_cast(obj); + return verifier.VerifyTable(ptr); + } + case BuiltinOptions2_ReduceWindowOptions: { + auto ptr = reinterpret_cast(obj); + return verifier.VerifyTable(ptr); + } + case BuiltinOptions2_StableHLOCompositeOptions: { + auto ptr = reinterpret_cast(obj); + return verifier.VerifyTable(ptr); + } + default: return true; + } +} + +inline bool VerifyBuiltinOptions2Vector(::flatbuffers::Verifier &verifier, const ::flatbuffers::Vector<::flatbuffers::Offset> *values, const ::flatbuffers::Vector *types) { + if (!values || !types) return !values && !types; + if (values->size() != types->size()) return false; + for (::flatbuffers::uoffset_t i = 0; i < values->size(); ++i) { + if (!VerifyBuiltinOptions2( + verifier, values->Get(i), types->GetEnum(i))) { + return false; + } + } + return true; +} + +inline void *BuiltinOptions2Union::UnPack(const void *obj, BuiltinOptions2 type, const ::flatbuffers::resolver_function_t *resolver) { + (void)resolver; + switch (type) { + case BuiltinOptions2_StablehloConcatenateOptions: { + auto ptr = reinterpret_cast(obj); + return ptr->UnPack(resolver); + } + case BuiltinOptions2_StablehloBroadcastInDimOptions: { + auto ptr = reinterpret_cast(obj); + return ptr->UnPack(resolver); + } + case BuiltinOptions2_StablehloSliceOptions: { + auto ptr = reinterpret_cast(obj); + return ptr->UnPack(resolver); + } + case BuiltinOptions2_StablehloConvolutionOptions: { + auto ptr = reinterpret_cast(obj); + return ptr->UnPack(resolver); + } + case BuiltinOptions2_StablehloCustomCallOptions: { + auto ptr = reinterpret_cast(obj); + return ptr->UnPack(resolver); + } + case BuiltinOptions2_StablehloReduceOptions: { + auto ptr = reinterpret_cast(obj); + return ptr->UnPack(resolver); + } + case BuiltinOptions2_StablehloScatterOptions: { + auto ptr = reinterpret_cast(obj); + return ptr->UnPack(resolver); + } + case BuiltinOptions2_StablehloCompareOptions: { + auto ptr = reinterpret_cast(obj); + return ptr->UnPack(resolver); + } + case BuiltinOptions2_StablehloDynamicSliceOptions: { + auto ptr = reinterpret_cast(obj); + return ptr->UnPack(resolver); + } + case BuiltinOptions2_StablehloPadOptions: { + auto ptr = reinterpret_cast(obj); + return ptr->UnPack(resolver); + } + case BuiltinOptions2_StablehloIotaOptions: { + auto ptr = reinterpret_cast(obj); + return ptr->UnPack(resolver); + } + case BuiltinOptions2_StablehloDotGeneralOptions: { + auto ptr = reinterpret_cast(obj); + return ptr->UnPack(resolver); + } + case BuiltinOptions2_StablehloReduceWindowOptions: { + auto ptr = reinterpret_cast(obj); + return ptr->UnPack(resolver); + } + case BuiltinOptions2_StablehloSortOptions: { + auto ptr = reinterpret_cast(obj); + return ptr->UnPack(resolver); + } + case BuiltinOptions2_StablehloWhileOptions: { + auto ptr = reinterpret_cast(obj); + return ptr->UnPack(resolver); + } + case BuiltinOptions2_StablehloGatherOptions: { + auto ptr = reinterpret_cast(obj); + return ptr->UnPack(resolver); + } + case BuiltinOptions2_StablehloTransposeOptions: { + auto ptr = reinterpret_cast(obj); + return ptr->UnPack(resolver); + } + case BuiltinOptions2_DilateOptions: { + auto ptr = reinterpret_cast(obj); + return ptr->UnPack(resolver); + } + case BuiltinOptions2_StablehloRngBitGeneratorOptions: { + auto ptr = reinterpret_cast(obj); + return ptr->UnPack(resolver); + } + case BuiltinOptions2_ReduceWindowOptions: { + auto ptr = reinterpret_cast(obj); + return ptr->UnPack(resolver); + } + case BuiltinOptions2_StableHLOCompositeOptions: { + auto ptr = reinterpret_cast(obj); + return ptr->UnPack(resolver); + } + default: return nullptr; + } +} + +inline ::flatbuffers::Offset BuiltinOptions2Union::Pack(::flatbuffers::FlatBufferBuilder &_fbb, const ::flatbuffers::rehasher_function_t *_rehasher) const { + (void)_rehasher; + switch (type) { + case BuiltinOptions2_StablehloConcatenateOptions: { + auto ptr = reinterpret_cast(value); + return CreateStablehloConcatenateOptions(_fbb, ptr, _rehasher).Union(); + } + case BuiltinOptions2_StablehloBroadcastInDimOptions: { + auto ptr = reinterpret_cast(value); + return CreateStablehloBroadcastInDimOptions(_fbb, ptr, _rehasher).Union(); + } + case BuiltinOptions2_StablehloSliceOptions: { + auto ptr = reinterpret_cast(value); + return CreateStablehloSliceOptions(_fbb, ptr, _rehasher).Union(); + } + case BuiltinOptions2_StablehloConvolutionOptions: { + auto ptr = reinterpret_cast(value); + return CreateStablehloConvolutionOptions(_fbb, ptr, _rehasher).Union(); + } + case BuiltinOptions2_StablehloCustomCallOptions: { + auto ptr = reinterpret_cast(value); + return CreateStablehloCustomCallOptions(_fbb, ptr, _rehasher).Union(); + } + case BuiltinOptions2_StablehloReduceOptions: { + auto ptr = reinterpret_cast(value); + return CreateStablehloReduceOptions(_fbb, ptr, _rehasher).Union(); + } + case BuiltinOptions2_StablehloScatterOptions: { + auto ptr = reinterpret_cast(value); + return CreateStablehloScatterOptions(_fbb, ptr, _rehasher).Union(); + } + case BuiltinOptions2_StablehloCompareOptions: { + auto ptr = reinterpret_cast(value); + return CreateStablehloCompareOptions(_fbb, ptr, _rehasher).Union(); + } + case BuiltinOptions2_StablehloDynamicSliceOptions: { + auto ptr = reinterpret_cast(value); + return CreateStablehloDynamicSliceOptions(_fbb, ptr, _rehasher).Union(); + } + case BuiltinOptions2_StablehloPadOptions: { + auto ptr = reinterpret_cast(value); + return CreateStablehloPadOptions(_fbb, ptr, _rehasher).Union(); + } + case BuiltinOptions2_StablehloIotaOptions: { + auto ptr = reinterpret_cast(value); + return CreateStablehloIotaOptions(_fbb, ptr, _rehasher).Union(); + } + case BuiltinOptions2_StablehloDotGeneralOptions: { + auto ptr = reinterpret_cast(value); + return CreateStablehloDotGeneralOptions(_fbb, ptr, _rehasher).Union(); + } + case BuiltinOptions2_StablehloReduceWindowOptions: { + auto ptr = reinterpret_cast(value); + return CreateStablehloReduceWindowOptions(_fbb, ptr, _rehasher).Union(); + } + case BuiltinOptions2_StablehloSortOptions: { + auto ptr = reinterpret_cast(value); + return CreateStablehloSortOptions(_fbb, ptr, _rehasher).Union(); + } + case BuiltinOptions2_StablehloWhileOptions: { + auto ptr = reinterpret_cast(value); + return CreateStablehloWhileOptions(_fbb, ptr, _rehasher).Union(); + } + case BuiltinOptions2_StablehloGatherOptions: { + auto ptr = reinterpret_cast(value); + return CreateStablehloGatherOptions(_fbb, ptr, _rehasher).Union(); + } + case BuiltinOptions2_StablehloTransposeOptions: { + auto ptr = reinterpret_cast(value); + return CreateStablehloTransposeOptions(_fbb, ptr, _rehasher).Union(); + } + case BuiltinOptions2_DilateOptions: { + auto ptr = reinterpret_cast(value); + return CreateDilateOptions(_fbb, ptr, _rehasher).Union(); + } + case BuiltinOptions2_StablehloRngBitGeneratorOptions: { + auto ptr = reinterpret_cast(value); + return CreateStablehloRngBitGeneratorOptions(_fbb, ptr, _rehasher).Union(); + } + case BuiltinOptions2_ReduceWindowOptions: { + auto ptr = reinterpret_cast(value); + return CreateReduceWindowOptions(_fbb, ptr, _rehasher).Union(); + } + case BuiltinOptions2_StableHLOCompositeOptions: { + auto ptr = reinterpret_cast(value); + return CreateStableHLOCompositeOptions(_fbb, ptr, _rehasher).Union(); + } + default: return 0; + } +} + +inline BuiltinOptions2Union::BuiltinOptions2Union(const BuiltinOptions2Union &u) : type(u.type), value(nullptr) { + switch (type) { + case BuiltinOptions2_StablehloConcatenateOptions: { + value = new tflite::StablehloConcatenateOptionsT(*reinterpret_cast(u.value)); + break; + } + case BuiltinOptions2_StablehloBroadcastInDimOptions: { + value = new tflite::StablehloBroadcastInDimOptionsT(*reinterpret_cast(u.value)); + break; + } + case BuiltinOptions2_StablehloSliceOptions: { + value = new tflite::StablehloSliceOptionsT(*reinterpret_cast(u.value)); + break; + } + case BuiltinOptions2_StablehloConvolutionOptions: { + value = new tflite::StablehloConvolutionOptionsT(*reinterpret_cast(u.value)); + break; + } + case BuiltinOptions2_StablehloCustomCallOptions: { + value = new tflite::StablehloCustomCallOptionsT(*reinterpret_cast(u.value)); + break; + } + case BuiltinOptions2_StablehloReduceOptions: { + value = new tflite::StablehloReduceOptionsT(*reinterpret_cast(u.value)); + break; + } + case BuiltinOptions2_StablehloScatterOptions: { + value = new tflite::StablehloScatterOptionsT(*reinterpret_cast(u.value)); + break; + } + case BuiltinOptions2_StablehloCompareOptions: { + value = new tflite::StablehloCompareOptionsT(*reinterpret_cast(u.value)); + break; + } + case BuiltinOptions2_StablehloDynamicSliceOptions: { + value = new tflite::StablehloDynamicSliceOptionsT(*reinterpret_cast(u.value)); + break; + } + case BuiltinOptions2_StablehloPadOptions: { + value = new tflite::StablehloPadOptionsT(*reinterpret_cast(u.value)); + break; + } + case BuiltinOptions2_StablehloIotaOptions: { + value = new tflite::StablehloIotaOptionsT(*reinterpret_cast(u.value)); + break; + } + case BuiltinOptions2_StablehloDotGeneralOptions: { + value = new tflite::StablehloDotGeneralOptionsT(*reinterpret_cast(u.value)); + break; + } + case BuiltinOptions2_StablehloReduceWindowOptions: { + value = new tflite::StablehloReduceWindowOptionsT(*reinterpret_cast(u.value)); + break; + } + case BuiltinOptions2_StablehloSortOptions: { + value = new tflite::StablehloSortOptionsT(*reinterpret_cast(u.value)); + break; + } + case BuiltinOptions2_StablehloWhileOptions: { + value = new tflite::StablehloWhileOptionsT(*reinterpret_cast(u.value)); + break; + } + case BuiltinOptions2_StablehloGatherOptions: { + value = new tflite::StablehloGatherOptionsT(*reinterpret_cast(u.value)); + break; + } + case BuiltinOptions2_StablehloTransposeOptions: { + value = new tflite::StablehloTransposeOptionsT(*reinterpret_cast(u.value)); + break; + } + case BuiltinOptions2_DilateOptions: { + value = new tflite::DilateOptionsT(*reinterpret_cast(u.value)); + break; + } + case BuiltinOptions2_StablehloRngBitGeneratorOptions: { + value = new tflite::StablehloRngBitGeneratorOptionsT(*reinterpret_cast(u.value)); + break; + } + case BuiltinOptions2_ReduceWindowOptions: { + value = new tflite::ReduceWindowOptionsT(*reinterpret_cast(u.value)); + break; + } + case BuiltinOptions2_StableHLOCompositeOptions: { + value = new tflite::StableHLOCompositeOptionsT(*reinterpret_cast(u.value)); + break; + } + default: + break; + } +} + +inline void BuiltinOptions2Union::Reset() { + switch (type) { + case BuiltinOptions2_StablehloConcatenateOptions: { + auto ptr = reinterpret_cast(value); + delete ptr; + break; + } + case BuiltinOptions2_StablehloBroadcastInDimOptions: { + auto ptr = reinterpret_cast(value); + delete ptr; + break; + } + case BuiltinOptions2_StablehloSliceOptions: { + auto ptr = reinterpret_cast(value); + delete ptr; + break; + } + case BuiltinOptions2_StablehloConvolutionOptions: { + auto ptr = reinterpret_cast(value); + delete ptr; + break; + } + case BuiltinOptions2_StablehloCustomCallOptions: { + auto ptr = reinterpret_cast(value); + delete ptr; + break; + } + case BuiltinOptions2_StablehloReduceOptions: { + auto ptr = reinterpret_cast(value); + delete ptr; + break; + } + case BuiltinOptions2_StablehloScatterOptions: { + auto ptr = reinterpret_cast(value); + delete ptr; + break; + } + case BuiltinOptions2_StablehloCompareOptions: { + auto ptr = reinterpret_cast(value); + delete ptr; + break; + } + case BuiltinOptions2_StablehloDynamicSliceOptions: { + auto ptr = reinterpret_cast(value); + delete ptr; + break; + } + case BuiltinOptions2_StablehloPadOptions: { + auto ptr = reinterpret_cast(value); + delete ptr; + break; + } + case BuiltinOptions2_StablehloIotaOptions: { + auto ptr = reinterpret_cast(value); + delete ptr; + break; + } + case BuiltinOptions2_StablehloDotGeneralOptions: { + auto ptr = reinterpret_cast(value); + delete ptr; + break; + } + case BuiltinOptions2_StablehloReduceWindowOptions: { + auto ptr = reinterpret_cast(value); + delete ptr; + break; + } + case BuiltinOptions2_StablehloSortOptions: { + auto ptr = reinterpret_cast(value); + delete ptr; + break; + } + case BuiltinOptions2_StablehloWhileOptions: { + auto ptr = reinterpret_cast(value); + delete ptr; + break; + } + case BuiltinOptions2_StablehloGatherOptions: { + auto ptr = reinterpret_cast(value); + delete ptr; + break; + } + case BuiltinOptions2_StablehloTransposeOptions: { + auto ptr = reinterpret_cast(value); + delete ptr; + break; + } + case BuiltinOptions2_DilateOptions: { + auto ptr = reinterpret_cast(value); + delete ptr; + break; + } + case BuiltinOptions2_StablehloRngBitGeneratorOptions: { + auto ptr = reinterpret_cast(value); + delete ptr; + break; + } + case BuiltinOptions2_ReduceWindowOptions: { + auto ptr = reinterpret_cast(value); + delete ptr; + break; + } + case BuiltinOptions2_StableHLOCompositeOptions: { + auto ptr = reinterpret_cast(value); + delete ptr; + break; + } + default: break; + } + value = nullptr; + type = BuiltinOptions2_NONE; +} + inline const tflite::Model *GetModel(const void *buf) { - return flatbuffers::GetRoot(buf); + return ::flatbuffers::GetRoot(buf); } inline const tflite::Model *GetSizePrefixedModel(const void *buf) { - return flatbuffers::GetSizePrefixedRoot(buf); + return ::flatbuffers::GetSizePrefixedRoot(buf); } inline const char *ModelIdentifier() { @@ -20541,22 +24803,22 @@ inline const char *ModelIdentifier() { } inline bool ModelBufferHasIdentifier(const void *buf) { - return flatbuffers::BufferHasIdentifier( + return ::flatbuffers::BufferHasIdentifier( buf, ModelIdentifier()); } inline bool SizePrefixedModelBufferHasIdentifier(const void *buf) { - return flatbuffers::BufferHasIdentifier( + return ::flatbuffers::BufferHasIdentifier( buf, ModelIdentifier(), true); } inline bool VerifyModelBuffer( - flatbuffers::Verifier &verifier) { + ::flatbuffers::Verifier &verifier) { return verifier.VerifyBuffer(ModelIdentifier()); } inline bool VerifySizePrefixedModelBuffer( - flatbuffers::Verifier &verifier) { + ::flatbuffers::Verifier &verifier) { return verifier.VerifySizePrefixedBuffer(ModelIdentifier()); } @@ -20565,26 +24827,26 @@ inline const char *ModelExtension() { } inline void FinishModelBuffer( - flatbuffers::FlatBufferBuilder &fbb, - flatbuffers::Offset root) { + ::flatbuffers::FlatBufferBuilder &fbb, + ::flatbuffers::Offset root) { fbb.Finish(root, ModelIdentifier()); } inline void FinishSizePrefixedModelBuffer( - flatbuffers::FlatBufferBuilder &fbb, - flatbuffers::Offset root) { + ::flatbuffers::FlatBufferBuilder &fbb, + ::flatbuffers::Offset root) { fbb.FinishSizePrefixed(root, ModelIdentifier()); } inline std::unique_ptr UnPackModel( const void *buf, - const flatbuffers::resolver_function_t *res = nullptr) { + const ::flatbuffers::resolver_function_t *res = nullptr) { return std::unique_ptr(GetModel(buf)->UnPack(res)); } inline std::unique_ptr UnPackSizePrefixedModel( const void *buf, - const flatbuffers::resolver_function_t *res = nullptr) { + const ::flatbuffers::resolver_function_t *res = nullptr) { return std::unique_ptr(GetSizePrefixedModel(buf)->UnPack(res)); } diff --git a/tensorflow/lite/schema/schema_utils.cc b/tensorflow/lite/schema/schema_utils.cc index fc19290b862..285873de24d 100644 --- a/tensorflow/lite/schema/schema_utils.cc +++ b/tensorflow/lite/schema/schema_utils.cc @@ -21,7 +21,7 @@ limitations under the License. namespace tflite { // The following GetBuiltinCode methods are the utility methods for reading -// builtin operatore code, ensuring compatibility issues between v3 and v3a +// builtin operator code, ensuring compatibility issues between v3 and v3a // schema. Always the maximum value of the two fields always will be the correct // value as follows: // @@ -29,7 +29,7 @@ namespace tflite { // // The `builtin_code` field is not available in the v3 models. Flatbuffer // library will feed zero value, which is the default value in the v3a schema. -// The actual builtin operatore code value will exist in the +// The actual builtin operator code value will exist in the // `deprecated_builtin_code` field. At the same time, it implies that // `deprecated_builtin_code` >= `builtin_code` and the maximum value of the two // fields will be same with `deprecated_builtin_code'. diff --git a/tensorflow/lite/tools/BUILD b/tensorflow/lite/tools/BUILD index b5073c93a21..e7d51147cb7 100644 --- a/tensorflow/lite/tools/BUILD +++ b/tensorflow/lite/tools/BUILD @@ -7,7 +7,7 @@ py_library( visibility = ["//:__subpackages__"], deps = [ "@flatbuffers//:runtime_py", - requirement("tensorflow-cpu"), + requirement("tensorflow"), "//tensorflow/lite/python:schema_py", "//tensorflow/lite/python:schema_util", ], @@ -19,7 +19,7 @@ py_library( srcs_version = "PY3", deps = [ "@flatbuffers//:runtime_py", - requirement("tensorflow-cpu"), + requirement("tensorflow"), "//tensorflow/lite/python:schema_py", ], ) @@ -56,7 +56,7 @@ py_test( deps = [ ":flatbuffer_utils", ":test_utils", - requirement("tensorflow-cpu"), + requirement("tensorflow"), ], ) @@ -68,6 +68,6 @@ py_test( deps = [ ":test_utils", ":visualize", - requirement("tensorflow-cpu"), + requirement("tensorflow"), ], ) diff --git a/tensorflow/lite/tools/flatbuffer_utils.py b/tensorflow/lite/tools/flatbuffer_utils.py index f448bfa3c61..1a6c61e4b9e 100644 --- a/tensorflow/lite/tools/flatbuffer_utils.py +++ b/tensorflow/lite/tools/flatbuffer_utils.py @@ -304,6 +304,27 @@ def byte_swap_buffer_content(buffer, chunksize, from_endiness, to_endiness): ) +def byte_swap_string_content(buffer, from_endiness, to_endiness): + """Helper function for byte-swapping the string buffer. + + Args: + buffer: TFLite string buffer of from_endiness format. + from_endiness: The original endianness format of the string buffer. + to_endiness: The destined endianness format of the string buffer. + """ + num_of_strings = int.from_bytes(buffer.data[0:4], from_endiness) + string_content = bytearray(buffer.data[4 * (num_of_strings + 2) :]) + prefix_data = b''.join( + [ + int.from_bytes(buffer.data[i : i + 4], from_endiness).to_bytes( + 4, to_endiness + ) + for i in range(0, (num_of_strings + 1) * 4 + 1, 4) + ] + ) + buffer.data = prefix_data + string_content + + def byte_swap_tflite_model_obj(model, from_endiness, to_endiness): """Byte swaps the buffers field in a TFLite model. @@ -341,7 +362,11 @@ def byte_swap_tflite_model_obj(model, from_endiness, to_endiness): and tensor.buffer not in buffer_swapped and model.buffers[tensor.buffer].data is not None ): - if tensor.type in types_of_16_bits: + if tensor.type == schema_fb.TensorType.STRING: + byte_swap_string_content( + model.buffers[tensor.buffer], from_endiness, to_endiness + ) + elif tensor.type in types_of_16_bits: byte_swap_buffer_content( model.buffers[tensor.buffer], 2, from_endiness, to_endiness ) diff --git a/tensorflow/lite/tools/randomize_weights.py b/tensorflow/lite/tools/randomize_weights.py index 2b36fb179be..4baf50ded33 100644 --- a/tensorflow/lite/tools/randomize_weights.py +++ b/tensorflow/lite/tools/randomize_weights.py @@ -30,6 +30,15 @@ 'i.e., to be left unmodified.') flags.DEFINE_multi_string( 'ops_to_skip', [], 'Ops in the TFLite model to be skipped / unmodified.') +flags.DEFINE_multi_string( + 'ops_operands_to_skip', + [], + 'Op operand indices in the TFLite model to be skipped / unmodified. It' + ' should be specified in the format' + ' :[,]. For example,' + ' TRANSPOSE_CONV:0,2 stands for skipping the TRANSPOSE_CONV operands' + ' indexed 0 and 2', +) flags.DEFINE_integer('random_seed', 0, 'Input to the random number generator.') flags.mark_flag_as_required('input_tflite_file') @@ -39,15 +48,31 @@ def main(_): buffers_to_skip = FLAGS.buffers_to_skip ops_to_skip = [op.upper() for op in FLAGS.ops_to_skip] + ops_operands_to_skip = {} + for op_operands_to_skip in FLAGS.ops_operands_to_skip: + op_name, indices = op_operands_to_skip.split(':') + op_name_upper = op_name.upper() + if op_name_upper in ops_operands_to_skip: + raise ValueError( + 'Indices for the same op must be specified only once multiple' + f' specification for op {op_name}.' + ) + ops_operands_to_skip[op_name_upper] = list(map(int, indices.split(','))) + model = flatbuffer_utils.read_model(FLAGS.input_tflite_file) - # Add in buffers for ops in ops_to_skip to the list of skipped buffers. + # Add in buffers for ops in ops_to_skip or ops_operands_to_skip to the list of + # skipped buffers. for graph in model.subgraphs: for op in graph.operators: op_name = flatbuffer_utils.opcode_to_name(model, op.opcodeIndex) - if op_name.upper() in ops_to_skip: + op_name_upper = op_name.upper() + if op_name_upper in ops_to_skip: for input_idx in op.inputs: buffers_to_skip.append(graph.tensors[input_idx].buffer) + if op_name_upper in ops_operands_to_skip: + for operand_idx in ops_operands_to_skip[op_name_upper]: + buffers_to_skip.append(graph.tensors[op.inputs[operand_idx]].buffer) flatbuffer_utils.randomize_weights(model, FLAGS.random_seed, FLAGS.buffers_to_skip) diff --git a/third_party/flatbuffers/BUILD.external b/third_party/flatbuffers/BUILD.oss similarity index 86% rename from third_party/flatbuffers/BUILD.external rename to third_party/flatbuffers/BUILD.oss index dfd3a16966e..8f9d4ebe346 100644 --- a/third_party/flatbuffers/BUILD.external +++ b/third_party/flatbuffers/BUILD.oss @@ -2,30 +2,23 @@ load(":build_defs.bzl", "flatbuffer_py_strip_prefix_srcs") package(default_visibility = ["//visibility:public"]) -licenses(["notice"]) +licenses(["notice"]) # Apache 2.0 -exports_files(["LICENSE.txt"]) - -licenses(["notice"]) +exports_files(["LICENSE"]) config_setting( - name = "freebsd", + name = "platform_freebsd", values = {"cpu": "freebsd"}, ) -config_setting( - name = "windows", - values = {"cpu": "x64_windows"}, -) - config_setting( name = "platform_openbsd", values = {"cpu": "openbsd"}, ) config_setting( - name = "platform_freebsd", - values = {"cpu": "freebsd"}, + name = "windows", + values = {"cpu": "x64_windows"}, ) load("@rules_cc//cc:defs.bzl", "cc_binary", "cc_library") @@ -47,12 +40,13 @@ filegroup( "include/flatbuffers/allocator.h", "include/flatbuffers/array.h", "include/flatbuffers/base.h", - "include/flatbuffers/bfbs_generator.h", "include/flatbuffers/buffer.h", "include/flatbuffers/buffer_ref.h", + "include/flatbuffers/code_generator.h", "include/flatbuffers/code_generators.h", "include/flatbuffers/default_allocator.h", "include/flatbuffers/detached_buffer.h", + "include/flatbuffers/file_manager.h", "include/flatbuffers/flatbuffer_builder.h", "include/flatbuffers/flatbuffers.h", "include/flatbuffers/flex_flat_util.h", @@ -73,7 +67,7 @@ filegroup( "include/flatbuffers/vector_downward.h", "include/flatbuffers/verifier.h", ], - visibility = ["//:__subpackages__"], + visibility = ["//visibility:public"], ) # Public flatc compiler library. @@ -90,9 +84,11 @@ cc_library( cc_binary( name = "flatc", linkopts = select({ - ":freebsd": [ + ":platform_freebsd": [ "-lm", ], + # If Visual Studio 2022 developers facing linking errors, + # change the line below as ":windows": ["/DEFAULTLIB:msvcrt.lib"], ":windows": [], "//conditions:default": [ "-lm", @@ -110,7 +106,7 @@ filegroup( srcs = [ "include/flatbuffers/flatc.h", ], - visibility = ["//:__subpackages__"], + visibility = ["//visibility:public"], ) # Library used by flatbuffer_cc_library rules. @@ -120,22 +116,13 @@ cc_library( "include/flatbuffers/allocator.h", "include/flatbuffers/array.h", "include/flatbuffers/base.h", - "include/flatbuffers/bfbs_generator.h", "include/flatbuffers/buffer.h", "include/flatbuffers/buffer_ref.h", - "include/flatbuffers/code_generators.h", "include/flatbuffers/default_allocator.h", "include/flatbuffers/detached_buffer.h", "include/flatbuffers/flatbuffer_builder.h", "include/flatbuffers/flatbuffers.h", "include/flatbuffers/flexbuffers.h", - "include/flatbuffers/grpc.h", - "include/flatbuffers/hash.h", - "include/flatbuffers/idl.h", - "include/flatbuffers/minireflect.h", - "include/flatbuffers/reflection.h", - "include/flatbuffers/reflection_generated.h", - "include/flatbuffers/registry.h", "include/flatbuffers/stl_emulation.h", "include/flatbuffers/string.h", "include/flatbuffers/struct.h", @@ -187,4 +174,4 @@ py_library( name = "runtime_py", srcs = [":runtime_py_srcs"], visibility = ["//visibility:public"], -) \ No newline at end of file +) diff --git a/third_party/flatbuffers/build_defs.bzl b/third_party/flatbuffers/build_defs.bzl index b7f98b96763..b26381e941b 100644 --- a/third_party/flatbuffers/build_defs.bzl +++ b/third_party/flatbuffers/build_defs.bzl @@ -1,5 +1,7 @@ """BUILD rules for generating flatbuffer files.""" +load("@rules_python//python:defs.bzl", "py_library") + flatc_path = "@flatbuffers//:flatc" zip_files = "//tensorflow/lite/tools:zip_files" @@ -277,6 +279,11 @@ def _gen_flatbuffer_srcs_impl(ctx): else: no_includes_statement = [] + if ctx.attr.language_flag == "--python": + onefile_statement = ["--gen-onefile"] + else: + onefile_statement = [] + # Need to generate all files in a directory. if not outputs: outputs = [ctx.actions.declare_directory("{}_all".format(ctx.attr.name))] @@ -312,6 +319,7 @@ def _gen_flatbuffer_srcs_impl(ctx): "-I", ctx.bin_dir.path, ] + no_includes_statement + + onefile_statement + include_paths_cmd_line + [ "--no-union-value-namespacing", "--gen-object-api", @@ -357,7 +365,6 @@ _gen_flatbuffer_srcs = rule( cfg = "exec", ), }, - output_to_genfiles = True, ) def flatbuffer_py_strip_prefix_srcs(name, srcs = [], strip_prefix = ""): @@ -392,6 +399,7 @@ def _concat_flatbuffer_py_srcs_impl(ctx): ctx.attr.deps[0].files.to_list()[0].path, ctx.outputs.out.path, ), + use_default_shell_env = True, ) _concat_flatbuffer_py_srcs = rule( @@ -399,7 +407,6 @@ _concat_flatbuffer_py_srcs = rule( attrs = { "deps": attr.label_list(mandatory = True), }, - output_to_genfiles = True, outputs = {"out": "%{name}.py"}, ) @@ -430,6 +437,8 @@ def flatbuffer_py_library( deps = deps, include_paths = include_paths, ) + + # TODO(b/235550563): Remove the concatnation rule with 2.0.6 update. all_srcs_no_include = "{}_srcs_no_include".format(name) _gen_flatbuffer_srcs( name = all_srcs_no_include, @@ -446,7 +455,7 @@ def flatbuffer_py_library( ":{}".format(all_srcs_no_include), ], ) - native.py_library( + py_library( name = name, srcs = [ ":{}".format(concat_py_srcs), diff --git a/third_party/flatbuffers/workspace.bzl b/third_party/flatbuffers/workspace.bzl index e799a708945..812ade7c226 100644 --- a/third_party/flatbuffers/workspace.bzl +++ b/third_party/flatbuffers/workspace.bzl @@ -5,12 +5,12 @@ load("//third_party:repo.bzl", "tf_http_archive") def repo(): tf_http_archive( name = "flatbuffers", - strip_prefix = "flatbuffers-a66de58af9565586832c276fbb4251fc416bf07f", - sha256 = "da06ac2fc6fed8e38b6392f5a20fa24a4290cecaadd87aef16b6b84960408680", + strip_prefix = "flatbuffers-23.5.26", + sha256 = "1cce06b17cddd896b6d73cc047e36a254fb8df4d7ea18a46acf16c4c0cd3f3f3", urls = [ - "https://github.com/google/flatbuffers/archive/a66de58af9565586832c276fbb4251fc416bf07f.tar.gz", + "https://github.com/google/flatbuffers/archive/v23.5.26.tar.gz", ], - build_file = "//third_party/flatbuffers:BUILD.external", + build_file = "//third_party/flatbuffers:BUILD.oss", system_build_file = "//third_party/flatbuffers:BUILD.system", link_files = { "//third_party/flatbuffers:build_defs.bzl": "build_defs.bzl", diff --git a/third_party/hexagon/fully_connected.cc b/third_party/hexagon/fully_connected.cc index c27c238003c..99ee1f3c09d 100644 --- a/third_party/hexagon/fully_connected.cc +++ b/third_party/hexagon/fully_connected.cc @@ -129,4 +129,8 @@ TFLMRegistration Register_FULLY_CONNECTED() { HexagonFullyConnectedEval); } +TFLMInferenceRegistration RegisterInference_FULLY_CONNECTED() { + return tflite::micro::RegisterOp(HexagonFullyConnectedEval); +} + } // namespace tflite diff --git a/third_party/python_requirements.in b/third_party/python_requirements.in index 361431133a4..1b387b628f2 100644 --- a/third_party/python_requirements.in +++ b/third_party/python_requirements.in @@ -26,7 +26,8 @@ # is sensitive to the Python environment (interpreter version, etc.) in which # it is run. -tensorflow-cpu +tensorflow +twine numpy mako pillow diff --git a/third_party/python_requirements.txt b/third_party/python_requirements.txt index d0021b02055..f3e68a5e6c8 100644 --- a/third_party/python_requirements.txt +++ b/third_party/python_requirements.txt @@ -1,342 +1,517 @@ # -# This file is autogenerated by pip-compile with python 3.10 -# To update, run: +# This file is autogenerated by pip-compile with Python 3.11 +# by the following command: # # bazel run //third_party:python_requirements.update # -absl-py==1.4.0 \ - --hash=sha256:0d3fe606adfa4f7db64792dd4c7aee4ee0c38ab75dfd353b7a83ed3e957fcb47 \ - --hash=sha256:d2c244d01048ba476e7c080bd2c6df5e141d211de80223460d5b3b8a2a58433d +absl-py==2.0.0 \ + --hash=sha256:9a28abb62774ae4e8edbe2dd4c49ffcd45a6a848952a5eccc6a49f3f0fc1e2f3 \ + --hash=sha256:d9690211c5fcfefcdd1a45470ac2b5c5acd45241c3af71eed96bc5441746c0d5 # via # tensorboard - # tensorflow-cpu + # tensorflow astunparse==1.6.3 \ --hash=sha256:5ad93a8456f0d084c3456d059fd9a92cce667963232cbf763eac3bc5b7940872 \ --hash=sha256:c2652417f2c8b5bb325c885ae329bdf3f86424075c4fd1a128674bc6fba4b8e8 - # via tensorflow-cpu -cachetools==5.3.0 \ - --hash=sha256:13dfddc7b8df938c21a940dfa6557ce6e94a2f1cdfa58eb90c805721d58f2c14 \ - --hash=sha256:429e1a1e845c008ea6c85aa35d4b98b65d6a9763eeef3e37e92728a12d1de9d4 + # via tensorflow +cachetools==5.3.2 \ + --hash=sha256:086ee420196f7b2ab9ca2db2520aca326318b68fe5ba8bc4d49cca91add450f2 \ + --hash=sha256:861f35a13a451f94e301ce2bec7cac63e881232ccce7ed67fab9b5df4d3beaa1 # via google-auth -certifi==2023.5.7 \ - --hash=sha256:0f0d56dc5a6ad56fd4ba36484d6cc34451e1c6548c61daad8c320169f91eddc7 \ - --hash=sha256:c6c2e98f5c7869efca1f8916fed228dd91539f9f1b444c314c06eef02980c716 +certifi==2023.11.17 \ + --hash=sha256:9b469f3a900bf28dc19b8cfbf8019bf47f7fdd1a65a1d4ffb98fc14166beb4d1 \ + --hash=sha256:e036ab49d5b79556f99cfc2d9320b34cfbe5be05c5871b51de9329f0603b0474 # via requests -charset-normalizer==3.1.0 \ - --hash=sha256:04afa6387e2b282cf78ff3dbce20f0cc071c12dc8f685bd40960cc68644cfea6 \ - --hash=sha256:04eefcee095f58eaabe6dc3cc2262f3bcd776d2c67005880894f447b3f2cb9c1 \ - --hash=sha256:0be65ccf618c1e7ac9b849c315cc2e8a8751d9cfdaa43027d4f6624bd587ab7e \ - --hash=sha256:0c95f12b74681e9ae127728f7e5409cbbef9cd914d5896ef238cc779b8152373 \ - --hash=sha256:0ca564606d2caafb0abe6d1b5311c2649e8071eb241b2d64e75a0d0065107e62 \ - --hash=sha256:10c93628d7497c81686e8e5e557aafa78f230cd9e77dd0c40032ef90c18f2230 \ - --hash=sha256:11d117e6c63e8f495412d37e7dc2e2fff09c34b2d09dbe2bee3c6229577818be \ - --hash=sha256:11d3bcb7be35e7b1bba2c23beedac81ee893ac9871d0ba79effc7fc01167db6c \ - --hash=sha256:12a2b561af122e3d94cdb97fe6fb2bb2b82cef0cdca131646fdb940a1eda04f0 \ - --hash=sha256:12d1a39aa6b8c6f6248bb54550efcc1c38ce0d8096a146638fd4738e42284448 \ - --hash=sha256:1435ae15108b1cb6fffbcea2af3d468683b7afed0169ad718451f8db5d1aff6f \ - --hash=sha256:1c60b9c202d00052183c9be85e5eaf18a4ada0a47d188a83c8f5c5b23252f649 \ - --hash=sha256:1e8fcdd8f672a1c4fc8d0bd3a2b576b152d2a349782d1eb0f6b8e52e9954731d \ - --hash=sha256:20064ead0717cf9a73a6d1e779b23d149b53daf971169289ed2ed43a71e8d3b0 \ - --hash=sha256:21fa558996782fc226b529fdd2ed7866c2c6ec91cee82735c98a197fae39f706 \ - --hash=sha256:22908891a380d50738e1f978667536f6c6b526a2064156203d418f4856d6e86a \ - --hash=sha256:3160a0fd9754aab7d47f95a6b63ab355388d890163eb03b2d2b87ab0a30cfa59 \ - --hash=sha256:322102cdf1ab682ecc7d9b1c5eed4ec59657a65e1c146a0da342b78f4112db23 \ - --hash=sha256:34e0a2f9c370eb95597aae63bf85eb5e96826d81e3dcf88b8886012906f509b5 \ - --hash=sha256:3573d376454d956553c356df45bb824262c397c6e26ce43e8203c4c540ee0acb \ - --hash=sha256:3747443b6a904001473370d7810aa19c3a180ccd52a7157aacc264a5ac79265e \ - --hash=sha256:38e812a197bf8e71a59fe55b757a84c1f946d0ac114acafaafaf21667a7e169e \ - --hash=sha256:3a06f32c9634a8705f4ca9946d667609f52cf130d5548881401f1eb2c39b1e2c \ - --hash=sha256:3a5fc78f9e3f501a1614a98f7c54d3969f3ad9bba8ba3d9b438c3bc5d047dd28 \ - --hash=sha256:3d9098b479e78c85080c98e1e35ff40b4a31d8953102bb0fd7d1b6f8a2111a3d \ - --hash=sha256:3dc5b6a8ecfdc5748a7e429782598e4f17ef378e3e272eeb1340ea57c9109f41 \ - --hash=sha256:4155b51ae05ed47199dc5b2a4e62abccb274cee6b01da5b895099b61b1982974 \ - --hash=sha256:49919f8400b5e49e961f320c735388ee686a62327e773fa5b3ce6721f7e785ce \ - --hash=sha256:53d0a3fa5f8af98a1e261de6a3943ca631c526635eb5817a87a59d9a57ebf48f \ - --hash=sha256:5f008525e02908b20e04707a4f704cd286d94718f48bb33edddc7d7b584dddc1 \ - --hash=sha256:628c985afb2c7d27a4800bfb609e03985aaecb42f955049957814e0491d4006d \ - --hash=sha256:65ed923f84a6844de5fd29726b888e58c62820e0769b76565480e1fdc3d062f8 \ - --hash=sha256:6734e606355834f13445b6adc38b53c0fd45f1a56a9ba06c2058f86893ae8017 \ - --hash=sha256:6baf0baf0d5d265fa7944feb9f7451cc316bfe30e8df1a61b1bb08577c554f31 \ - --hash=sha256:6f4f4668e1831850ebcc2fd0b1cd11721947b6dc7c00bf1c6bd3c929ae14f2c7 \ - --hash=sha256:6f5c2e7bc8a4bf7c426599765b1bd33217ec84023033672c1e9a8b35eaeaaaf8 \ - --hash=sha256:6f6c7a8a57e9405cad7485f4c9d3172ae486cfef1344b5ddd8e5239582d7355e \ - --hash=sha256:7381c66e0561c5757ffe616af869b916c8b4e42b367ab29fedc98481d1e74e14 \ - --hash=sha256:73dc03a6a7e30b7edc5b01b601e53e7fc924b04e1835e8e407c12c037e81adbd \ - --hash=sha256:74db0052d985cf37fa111828d0dd230776ac99c740e1a758ad99094be4f1803d \ - --hash=sha256:75f2568b4189dda1c567339b48cba4ac7384accb9c2a7ed655cd86b04055c795 \ - --hash=sha256:78cacd03e79d009d95635e7d6ff12c21eb89b894c354bd2b2ed0b4763373693b \ - --hash=sha256:80d1543d58bd3d6c271b66abf454d437a438dff01c3e62fdbcd68f2a11310d4b \ - --hash=sha256:830d2948a5ec37c386d3170c483063798d7879037492540f10a475e3fd6f244b \ - --hash=sha256:891cf9b48776b5c61c700b55a598621fdb7b1e301a550365571e9624f270c203 \ - --hash=sha256:8f25e17ab3039b05f762b0a55ae0b3632b2e073d9c8fc88e89aca31a6198e88f \ - --hash=sha256:9a3267620866c9d17b959a84dd0bd2d45719b817245e49371ead79ed4f710d19 \ - --hash=sha256:a04f86f41a8916fe45ac5024ec477f41f886b3c435da2d4e3d2709b22ab02af1 \ - --hash=sha256:aaf53a6cebad0eae578f062c7d462155eada9c172bd8c4d250b8c1d8eb7f916a \ - --hash=sha256:abc1185d79f47c0a7aaf7e2412a0eb2c03b724581139193d2d82b3ad8cbb00ac \ - --hash=sha256:ac0aa6cd53ab9a31d397f8303f92c42f534693528fafbdb997c82bae6e477ad9 \ - --hash=sha256:ac3775e3311661d4adace3697a52ac0bab17edd166087d493b52d4f4f553f9f0 \ - --hash=sha256:b06f0d3bf045158d2fb8837c5785fe9ff9b8c93358be64461a1089f5da983137 \ - --hash=sha256:b116502087ce8a6b7a5f1814568ccbd0e9f6cfd99948aa59b0e241dc57cf739f \ - --hash=sha256:b82fab78e0b1329e183a65260581de4375f619167478dddab510c6c6fb04d9b6 \ - --hash=sha256:bd7163182133c0c7701b25e604cf1611c0d87712e56e88e7ee5d72deab3e76b5 \ - --hash=sha256:c36bcbc0d5174a80d6cccf43a0ecaca44e81d25be4b7f90f0ed7bcfbb5a00909 \ - --hash=sha256:c3af8e0f07399d3176b179f2e2634c3ce9c1301379a6b8c9c9aeecd481da494f \ - --hash=sha256:c84132a54c750fda57729d1e2599bb598f5fa0344085dbde5003ba429a4798c0 \ - --hash=sha256:cb7b2ab0188829593b9de646545175547a70d9a6e2b63bf2cd87a0a391599324 \ - --hash=sha256:cca4def576f47a09a943666b8f829606bcb17e2bc2d5911a46c8f8da45f56755 \ - --hash=sha256:cf6511efa4801b9b38dc5546d7547d5b5c6ef4b081c60b23e4d941d0eba9cbeb \ - --hash=sha256:d16fd5252f883eb074ca55cb622bc0bee49b979ae4e8639fff6ca3ff44f9f854 \ - --hash=sha256:d2686f91611f9e17f4548dbf050e75b079bbc2a82be565832bc8ea9047b61c8c \ - --hash=sha256:d7fc3fca01da18fbabe4625d64bb612b533533ed10045a2ac3dd194bfa656b60 \ - --hash=sha256:dd5653e67b149503c68c4018bf07e42eeed6b4e956b24c00ccdf93ac79cdff84 \ - --hash=sha256:de5695a6f1d8340b12a5d6d4484290ee74d61e467c39ff03b39e30df62cf83a0 \ - --hash=sha256:e0ac8959c929593fee38da1c2b64ee9778733cdf03c482c9ff1d508b6b593b2b \ - --hash=sha256:e1b25e3ad6c909f398df8921780d6a3d120d8c09466720226fc621605b6f92b1 \ - --hash=sha256:e633940f28c1e913615fd624fcdd72fdba807bf53ea6925d6a588e84e1151531 \ - --hash=sha256:e89df2958e5159b811af9ff0f92614dabf4ff617c03a4c1c6ff53bf1c399e0e1 \ - --hash=sha256:ea9f9c6034ea2d93d9147818f17c2a0860d41b71c38b9ce4d55f21b6f9165a11 \ - --hash=sha256:f645caaf0008bacf349875a974220f1f1da349c5dbe7c4ec93048cdc785a3326 \ - --hash=sha256:f8303414c7b03f794347ad062c0516cee0e15f7a612abd0ce1e25caf6ceb47df \ - --hash=sha256:fca62a8301b605b954ad2e9c3666f9d97f63872aa4efcae5492baca2056b74ab +cffi==1.16.0 \ + --hash=sha256:0c9ef6ff37e974b73c25eecc13952c55bceed9112be2d9d938ded8e856138bcc \ + --hash=sha256:131fd094d1065b19540c3d72594260f118b231090295d8c34e19a7bbcf2e860a \ + --hash=sha256:1b8ebc27c014c59692bb2664c7d13ce7a6e9a629be20e54e7271fa696ff2b417 \ + --hash=sha256:2c56b361916f390cd758a57f2e16233eb4f64bcbeee88a4881ea90fca14dc6ab \ + --hash=sha256:2d92b25dbf6cae33f65005baf472d2c245c050b1ce709cc4588cdcdd5495b520 \ + --hash=sha256:31d13b0f99e0836b7ff893d37af07366ebc90b678b6664c955b54561fc36ef36 \ + --hash=sha256:32c68ef735dbe5857c810328cb2481e24722a59a2003018885514d4c09af9743 \ + --hash=sha256:3686dffb02459559c74dd3d81748269ffb0eb027c39a6fc99502de37d501faa8 \ + --hash=sha256:582215a0e9adbe0e379761260553ba11c58943e4bbe9c36430c4ca6ac74b15ed \ + --hash=sha256:5b50bf3f55561dac5438f8e70bfcdfd74543fd60df5fa5f62d94e5867deca684 \ + --hash=sha256:5bf44d66cdf9e893637896c7faa22298baebcd18d1ddb6d2626a6e39793a1d56 \ + --hash=sha256:6602bc8dc6f3a9e02b6c22c4fc1e47aa50f8f8e6d3f78a5e16ac33ef5fefa324 \ + --hash=sha256:673739cb539f8cdaa07d92d02efa93c9ccf87e345b9a0b556e3ecc666718468d \ + --hash=sha256:68678abf380b42ce21a5f2abde8efee05c114c2fdb2e9eef2efdb0257fba1235 \ + --hash=sha256:68e7c44931cc171c54ccb702482e9fc723192e88d25a0e133edd7aff8fcd1f6e \ + --hash=sha256:6b3d6606d369fc1da4fd8c357d026317fbb9c9b75d36dc16e90e84c26854b088 \ + --hash=sha256:748dcd1e3d3d7cd5443ef03ce8685043294ad6bd7c02a38d1bd367cfd968e000 \ + --hash=sha256:7651c50c8c5ef7bdb41108b7b8c5a83013bfaa8a935590c5d74627c047a583c7 \ + --hash=sha256:7b78010e7b97fef4bee1e896df8a4bbb6712b7f05b7ef630f9d1da00f6444d2e \ + --hash=sha256:7e61e3e4fa664a8588aa25c883eab612a188c725755afff6289454d6362b9673 \ + --hash=sha256:80876338e19c951fdfed6198e70bc88f1c9758b94578d5a7c4c91a87af3cf31c \ + --hash=sha256:8895613bcc094d4a1b2dbe179d88d7fb4a15cee43c052e8885783fac397d91fe \ + --hash=sha256:88e2b3c14bdb32e440be531ade29d3c50a1a59cd4e51b1dd8b0865c54ea5d2e2 \ + --hash=sha256:8f8e709127c6c77446a8c0a8c8bf3c8ee706a06cd44b1e827c3e6a2ee6b8c098 \ + --hash=sha256:9cb4a35b3642fc5c005a6755a5d17c6c8b6bcb6981baf81cea8bfbc8903e8ba8 \ + --hash=sha256:9f90389693731ff1f659e55c7d1640e2ec43ff725cc61b04b2f9c6d8d017df6a \ + --hash=sha256:a09582f178759ee8128d9270cd1344154fd473bb77d94ce0aeb2a93ebf0feaf0 \ + --hash=sha256:a6a14b17d7e17fa0d207ac08642c8820f84f25ce17a442fd15e27ea18d67c59b \ + --hash=sha256:a72e8961a86d19bdb45851d8f1f08b041ea37d2bd8d4fd19903bc3083d80c896 \ + --hash=sha256:abd808f9c129ba2beda4cfc53bde801e5bcf9d6e0f22f095e45327c038bfe68e \ + --hash=sha256:ac0f5edd2360eea2f1daa9e26a41db02dd4b0451b48f7c318e217ee092a213e9 \ + --hash=sha256:b29ebffcf550f9da55bec9e02ad430c992a87e5f512cd63388abb76f1036d8d2 \ + --hash=sha256:b2ca4e77f9f47c55c194982e10f058db063937845bb2b7a86c84a6cfe0aefa8b \ + --hash=sha256:b7be2d771cdba2942e13215c4e340bfd76398e9227ad10402a8767ab1865d2e6 \ + --hash=sha256:b84834d0cf97e7d27dd5b7f3aca7b6e9263c56308ab9dc8aae9784abb774d404 \ + --hash=sha256:b86851a328eedc692acf81fb05444bdf1891747c25af7529e39ddafaf68a4f3f \ + --hash=sha256:bcb3ef43e58665bbda2fb198698fcae6776483e0c4a631aa5647806c25e02cc0 \ + --hash=sha256:c0f31130ebc2d37cdd8e44605fb5fa7ad59049298b3f745c74fa74c62fbfcfc4 \ + --hash=sha256:c6a164aa47843fb1b01e941d385aab7215563bb8816d80ff3a363a9f8448a8dc \ + --hash=sha256:d8a9d3ebe49f084ad71f9269834ceccbf398253c9fac910c4fd7053ff1386936 \ + --hash=sha256:db8e577c19c0fda0beb7e0d4e09e0ba74b1e4c092e0e40bfa12fe05b6f6d75ba \ + --hash=sha256:dc9b18bf40cc75f66f40a7379f6a9513244fe33c0e8aa72e2d56b0196a7ef872 \ + --hash=sha256:e09f3ff613345df5e8c3667da1d918f9149bd623cd9070c983c013792a9a62eb \ + --hash=sha256:e4108df7fe9b707191e55f33efbcb2d81928e10cea45527879a4749cbe472614 \ + --hash=sha256:e6024675e67af929088fda399b2094574609396b1decb609c55fa58b028a32a1 \ + --hash=sha256:e70f54f1796669ef691ca07d046cd81a29cb4deb1e5f942003f401c0c4a2695d \ + --hash=sha256:e715596e683d2ce000574bae5d07bd522c781a822866c20495e52520564f0969 \ + --hash=sha256:e760191dd42581e023a68b758769e2da259b5d52e3103c6060ddc02c9edb8d7b \ + --hash=sha256:ed86a35631f7bfbb28e108dd96773b9d5a6ce4811cf6ea468bb6a359b256b1e4 \ + --hash=sha256:ee07e47c12890ef248766a6e55bd38ebfb2bb8edd4142d56db91b21ea68b7627 \ + --hash=sha256:fa3a0128b152627161ce47201262d3140edb5a5c3da88d73a1b790a959126956 \ + --hash=sha256:fcc8eb6d5902bb1cf6dc4f187ee3ea80a1eba0a89aba40a5cb20a5087d961357 + # via cryptography +charset-normalizer==3.3.2 \ + --hash=sha256:06435b539f889b1f6f4ac1758871aae42dc3a8c0e24ac9e60c2384973ad73027 \ + --hash=sha256:06a81e93cd441c56a9b65d8e1d043daeb97a3d0856d177d5c90ba85acb3db087 \ + --hash=sha256:0a55554a2fa0d408816b3b5cedf0045f4b8e1a6065aec45849de2d6f3f8e9786 \ + --hash=sha256:0b2b64d2bb6d3fb9112bafa732def486049e63de9618b5843bcdd081d8144cd8 \ + --hash=sha256:10955842570876604d404661fbccbc9c7e684caf432c09c715ec38fbae45ae09 \ + --hash=sha256:122c7fa62b130ed55f8f285bfd56d5f4b4a5b503609d181f9ad85e55c89f4185 \ + --hash=sha256:1ceae2f17a9c33cb48e3263960dc5fc8005351ee19db217e9b1bb15d28c02574 \ + --hash=sha256:1d3193f4a680c64b4b6a9115943538edb896edc190f0b222e73761716519268e \ + --hash=sha256:1f79682fbe303db92bc2b1136016a38a42e835d932bab5b3b1bfcfbf0640e519 \ + --hash=sha256:2127566c664442652f024c837091890cb1942c30937add288223dc895793f898 \ + --hash=sha256:22afcb9f253dac0696b5a4be4a1c0f8762f8239e21b99680099abd9b2b1b2269 \ + --hash=sha256:25baf083bf6f6b341f4121c2f3c548875ee6f5339300e08be3f2b2ba1721cdd3 \ + --hash=sha256:2e81c7b9c8979ce92ed306c249d46894776a909505d8f5a4ba55b14206e3222f \ + --hash=sha256:3287761bc4ee9e33561a7e058c72ac0938c4f57fe49a09eae428fd88aafe7bb6 \ + --hash=sha256:34d1c8da1e78d2e001f363791c98a272bb734000fcef47a491c1e3b0505657a8 \ + --hash=sha256:37e55c8e51c236f95b033f6fb391d7d7970ba5fe7ff453dad675e88cf303377a \ + --hash=sha256:3d47fa203a7bd9c5b6cee4736ee84ca03b8ef23193c0d1ca99b5089f72645c73 \ + --hash=sha256:3e4d1f6587322d2788836a99c69062fbb091331ec940e02d12d179c1d53e25fc \ + --hash=sha256:42cb296636fcc8b0644486d15c12376cb9fa75443e00fb25de0b8602e64c1714 \ + --hash=sha256:45485e01ff4d3630ec0d9617310448a8702f70e9c01906b0d0118bdf9d124cf2 \ + --hash=sha256:4a78b2b446bd7c934f5dcedc588903fb2f5eec172f3d29e52a9096a43722adfc \ + --hash=sha256:4ab2fe47fae9e0f9dee8c04187ce5d09f48eabe611be8259444906793ab7cbce \ + --hash=sha256:4d0d1650369165a14e14e1e47b372cfcb31d6ab44e6e33cb2d4e57265290044d \ + --hash=sha256:549a3a73da901d5bc3ce8d24e0600d1fa85524c10287f6004fbab87672bf3e1e \ + --hash=sha256:55086ee1064215781fff39a1af09518bc9255b50d6333f2e4c74ca09fac6a8f6 \ + --hash=sha256:572c3763a264ba47b3cf708a44ce965d98555f618ca42c926a9c1616d8f34269 \ + --hash=sha256:573f6eac48f4769d667c4442081b1794f52919e7edada77495aaed9236d13a96 \ + --hash=sha256:5b4c145409bef602a690e7cfad0a15a55c13320ff7a3ad7ca59c13bb8ba4d45d \ + --hash=sha256:6463effa3186ea09411d50efc7d85360b38d5f09b870c48e4600f63af490e56a \ + --hash=sha256:65f6f63034100ead094b8744b3b97965785388f308a64cf8d7c34f2f2e5be0c4 \ + --hash=sha256:663946639d296df6a2bb2aa51b60a2454ca1cb29835324c640dafb5ff2131a77 \ + --hash=sha256:6897af51655e3691ff853668779c7bad41579facacf5fd7253b0133308cf000d \ + --hash=sha256:68d1f8a9e9e37c1223b656399be5d6b448dea850bed7d0f87a8311f1ff3dabb0 \ + --hash=sha256:6ac7ffc7ad6d040517be39eb591cac5ff87416c2537df6ba3cba3bae290c0fed \ + --hash=sha256:6b3251890fff30ee142c44144871185dbe13b11bab478a88887a639655be1068 \ + --hash=sha256:6c4caeef8fa63d06bd437cd4bdcf3ffefe6738fb1b25951440d80dc7df8c03ac \ + --hash=sha256:6ef1d82a3af9d3eecdba2321dc1b3c238245d890843e040e41e470ffa64c3e25 \ + --hash=sha256:753f10e867343b4511128c6ed8c82f7bec3bd026875576dfd88483c5c73b2fd8 \ + --hash=sha256:7cd13a2e3ddeed6913a65e66e94b51d80a041145a026c27e6bb76c31a853c6ab \ + --hash=sha256:7ed9e526742851e8d5cc9e6cf41427dfc6068d4f5a3bb03659444b4cabf6bc26 \ + --hash=sha256:7f04c839ed0b6b98b1a7501a002144b76c18fb1c1850c8b98d458ac269e26ed2 \ + --hash=sha256:802fe99cca7457642125a8a88a084cef28ff0cf9407060f7b93dca5aa25480db \ + --hash=sha256:80402cd6ee291dcb72644d6eac93785fe2c8b9cb30893c1af5b8fdd753b9d40f \ + --hash=sha256:8465322196c8b4d7ab6d1e049e4c5cb460d0394da4a27d23cc242fbf0034b6b5 \ + --hash=sha256:86216b5cee4b06df986d214f664305142d9c76df9b6512be2738aa72a2048f99 \ + --hash=sha256:87d1351268731db79e0f8e745d92493ee2841c974128ef629dc518b937d9194c \ + --hash=sha256:8bdb58ff7ba23002a4c5808d608e4e6c687175724f54a5dade5fa8c67b604e4d \ + --hash=sha256:8c622a5fe39a48f78944a87d4fb8a53ee07344641b0562c540d840748571b811 \ + --hash=sha256:8d756e44e94489e49571086ef83b2bb8ce311e730092d2c34ca8f7d925cb20aa \ + --hash=sha256:8f4a014bc36d3c57402e2977dada34f9c12300af536839dc38c0beab8878f38a \ + --hash=sha256:9063e24fdb1e498ab71cb7419e24622516c4a04476b17a2dab57e8baa30d6e03 \ + --hash=sha256:90d558489962fd4918143277a773316e56c72da56ec7aa3dc3dbbe20fdfed15b \ + --hash=sha256:923c0c831b7cfcb071580d3f46c4baf50f174be571576556269530f4bbd79d04 \ + --hash=sha256:95f2a5796329323b8f0512e09dbb7a1860c46a39da62ecb2324f116fa8fdc85c \ + --hash=sha256:96b02a3dc4381e5494fad39be677abcb5e6634bf7b4fa83a6dd3112607547001 \ + --hash=sha256:9f96df6923e21816da7e0ad3fd47dd8f94b2a5ce594e00677c0013018b813458 \ + --hash=sha256:a10af20b82360ab00827f916a6058451b723b4e65030c5a18577c8b2de5b3389 \ + --hash=sha256:a50aebfa173e157099939b17f18600f72f84eed3049e743b68ad15bd69b6bf99 \ + --hash=sha256:a981a536974bbc7a512cf44ed14938cf01030a99e9b3a06dd59578882f06f985 \ + --hash=sha256:a9a8e9031d613fd2009c182b69c7b2c1ef8239a0efb1df3f7c8da66d5dd3d537 \ + --hash=sha256:ae5f4161f18c61806f411a13b0310bea87f987c7d2ecdbdaad0e94eb2e404238 \ + --hash=sha256:aed38f6e4fb3f5d6bf81bfa990a07806be9d83cf7bacef998ab1a9bd660a581f \ + --hash=sha256:b01b88d45a6fcb69667cd6d2f7a9aeb4bf53760d7fc536bf679ec94fe9f3ff3d \ + --hash=sha256:b261ccdec7821281dade748d088bb6e9b69e6d15b30652b74cbbac25e280b796 \ + --hash=sha256:b2b0a0c0517616b6869869f8c581d4eb2dd83a4d79e0ebcb7d373ef9956aeb0a \ + --hash=sha256:b4a23f61ce87adf89be746c8a8974fe1c823c891d8f86eb218bb957c924bb143 \ + --hash=sha256:bd8f7df7d12c2db9fab40bdd87a7c09b1530128315d047a086fa3ae3435cb3a8 \ + --hash=sha256:beb58fe5cdb101e3a055192ac291b7a21e3b7ef4f67fa1d74e331a7f2124341c \ + --hash=sha256:c002b4ffc0be611f0d9da932eb0f704fe2602a9a949d1f738e4c34c75b0863d5 \ + --hash=sha256:c083af607d2515612056a31f0a8d9e0fcb5876b7bfc0abad3ecd275bc4ebc2d5 \ + --hash=sha256:c180f51afb394e165eafe4ac2936a14bee3eb10debc9d9e4db8958fe36afe711 \ + --hash=sha256:c235ebd9baae02f1b77bcea61bce332cb4331dc3617d254df3323aa01ab47bd4 \ + --hash=sha256:cd70574b12bb8a4d2aaa0094515df2463cb429d8536cfb6c7ce983246983e5a6 \ + --hash=sha256:d0eccceffcb53201b5bfebb52600a5fb483a20b61da9dbc885f8b103cbe7598c \ + --hash=sha256:d965bba47ddeec8cd560687584e88cf699fd28f192ceb452d1d7ee807c5597b7 \ + --hash=sha256:db364eca23f876da6f9e16c9da0df51aa4f104a972735574842618b8c6d999d4 \ + --hash=sha256:ddbb2551d7e0102e7252db79ba445cdab71b26640817ab1e3e3648dad515003b \ + --hash=sha256:deb6be0ac38ece9ba87dea880e438f25ca3eddfac8b002a2ec3d9183a454e8ae \ + --hash=sha256:e06ed3eb3218bc64786f7db41917d4e686cc4856944f53d5bdf83a6884432e12 \ + --hash=sha256:e27ad930a842b4c5eb8ac0016b0a54f5aebbe679340c26101df33424142c143c \ + --hash=sha256:e537484df0d8f426ce2afb2d0f8e1c3d0b114b83f8850e5f2fbea0e797bd82ae \ + --hash=sha256:eb00ed941194665c332bf8e078baf037d6c35d7c4f3102ea2d4f16ca94a26dc8 \ + --hash=sha256:eb6904c354526e758fda7167b33005998fb68c46fbc10e013ca97f21ca5c8887 \ + --hash=sha256:eb8821e09e916165e160797a6c17edda0679379a4be5c716c260e836e122f54b \ + --hash=sha256:efcb3f6676480691518c177e3b465bcddf57cea040302f9f4e6e191af91174d4 \ + --hash=sha256:f27273b60488abe721a075bcca6d7f3964f9f6f067c8c4c605743023d7d3944f \ + --hash=sha256:f30c3cb33b24454a82faecaf01b19c18562b1e89558fb6c56de4d9118a032fd5 \ + --hash=sha256:fb69256e180cb6c8a894fee62b3afebae785babc1ee98b81cdf68bbca1987f33 \ + --hash=sha256:fd1abc0d89e30cc4e02e4064dc67fcc51bd941eb395c502aac3ec19fab46b519 \ + --hash=sha256:ff8fa367d09b717b2a17a052544193ad76cd49979c805768879cb63d9ca50561 # via requests -flatbuffers==23.5.9 \ - --hash=sha256:93a506b6ab771c79ce816e7b35a93ed08ec5b4c9edb811101a22c44a4152f018 \ - --hash=sha256:a02eb8c2d61cba153cd211937de8f8f7764b6a7510971b2c4684ed8b02e6e571 - # via tensorflow-cpu -gast==0.4.0 \ - --hash=sha256:40feb7b8b8434785585ab224d1568b857edb18297e5a3047f1ba012bc83b42c1 \ - --hash=sha256:b7adcdd5adbebf1adf17378da5ba3f543684dbec47b1cda1f3997e573cd542c4 - # via tensorflow-cpu -google-auth==2.18.0 \ - --hash=sha256:c66b488a8b005b23ccb97b1198b6cece516c91869091ac5b7c267422db2733c7 \ - --hash=sha256:ef3f3a67fa54d421a1c155864570f9a8de9179cedc937bda496b7a8ca338e936 +cryptography==41.0.7 \ + --hash=sha256:079b85658ea2f59c4f43b70f8119a52414cdb7be34da5d019a77bf96d473b960 \ + --hash=sha256:09616eeaef406f99046553b8a40fbf8b1e70795a91885ba4c96a70793de5504a \ + --hash=sha256:13f93ce9bea8016c253b34afc6bd6a75993e5c40672ed5405a9c832f0d4a00bc \ + --hash=sha256:37a138589b12069efb424220bf78eac59ca68b95696fc622b6ccc1c0a197204a \ + --hash=sha256:3c78451b78313fa81607fa1b3f1ae0a5ddd8014c38a02d9db0616133987b9cdf \ + --hash=sha256:43f2552a2378b44869fe8827aa19e69512e3245a219104438692385b0ee119d1 \ + --hash=sha256:48a0476626da912a44cc078f9893f292f0b3e4c739caf289268168d8f4702a39 \ + --hash=sha256:49f0805fc0b2ac8d4882dd52f4a3b935b210935d500b6b805f321addc8177406 \ + --hash=sha256:5429ec739a29df2e29e15d082f1d9ad683701f0ec7709ca479b3ff2708dae65a \ + --hash=sha256:5a1b41bc97f1ad230a41657d9155113c7521953869ae57ac39ac7f1bb471469a \ + --hash=sha256:68a2dec79deebc5d26d617bfdf6e8aab065a4f34934b22d3b5010df3ba36612c \ + --hash=sha256:7a698cb1dac82c35fcf8fe3417a3aaba97de16a01ac914b89a0889d364d2f6be \ + --hash=sha256:841df4caa01008bad253bce2a6f7b47f86dc9f08df4b433c404def869f590a15 \ + --hash=sha256:90452ba79b8788fa380dfb587cca692976ef4e757b194b093d845e8d99f612f2 \ + --hash=sha256:928258ba5d6f8ae644e764d0f996d61a8777559f72dfeb2eea7e2fe0ad6e782d \ + --hash=sha256:af03b32695b24d85a75d40e1ba39ffe7db7ffcb099fe507b39fd41a565f1b157 \ + --hash=sha256:b640981bf64a3e978a56167594a0e97db71c89a479da8e175d8bb5be5178c003 \ + --hash=sha256:c5ca78485a255e03c32b513f8c2bc39fedb7f5c5f8535545bdc223a03b24f248 \ + --hash=sha256:c7f3201ec47d5207841402594f1d7950879ef890c0c495052fa62f58283fde1a \ + --hash=sha256:d5ec85080cce7b0513cfd233914eb8b7bbd0633f1d1703aa28d1dd5a72f678ec \ + --hash=sha256:d6c391c021ab1f7a82da5d8d0b3cee2f4b2c455ec86c8aebbc84837a631ff309 \ + --hash=sha256:e3114da6d7f95d2dee7d3f4eec16dacff819740bbab931aff8648cb13c5ff5e7 \ + --hash=sha256:f983596065a18a2183e7f79ab3fd4c475205b839e02cbc0efbbf9666c4b3083d + # via secretstorage +docutils==0.20.1 \ + --hash=sha256:96f387a2c5562db4476f09f13bbab2192e764cac08ebbf3a34a95d9b1e4a59d6 \ + --hash=sha256:f08a4e276c3a1583a86dce3e34aba3fe04d02bba2dd51ed16106244e8a923e3b + # via readme-renderer +flatbuffers==23.5.26 \ + --hash=sha256:9ea1144cac05ce5d86e2859f431c6cd5e66cd9c78c558317c7955fb8d4c78d89 \ + --hash=sha256:c0ff356da363087b915fde4b8b45bdda73432fc17cddb3c8157472eab1422ad1 + # via tensorflow +gast==0.5.4 \ + --hash=sha256:6fc4fa5fa10b72fb8aab4ae58bcb023058386e67b6fa2e3e34cec5c769360316 \ + --hash=sha256:9c270fe5f4b130969b54174de7db4e764b09b4f7f67ccfc32480e29f78348d97 + # via tensorflow +google-auth==2.26.2 \ + --hash=sha256:3f445c8ce9b61ed6459aad86d8ccdba4a9afed841b2d1451a11ef4db08957424 \ + --hash=sha256:97327dbbf58cccb58fc5a1712bba403ae76668e64814eb30f7316f7e27126b81 # via # google-auth-oauthlib # tensorboard -google-auth-oauthlib==1.0.0 \ - --hash=sha256:95880ca704928c300f48194d1770cf5b1462835b6e49db61445a520f793fd5fb \ - --hash=sha256:e375064964820b47221a7e1b7ee1fd77051b6323c3f9e3e19785f78ab67ecfc5 +google-auth-oauthlib==1.2.0 \ + --hash=sha256:292d2d3783349f2b0734a0a0207b1e1e322ac193c2c09d8f7c613fb7cc501ea8 \ + --hash=sha256:297c1ce4cb13a99b5834c74a1fe03252e1e499716718b190f56bcb9c4abc4faf # via tensorboard google-pasta==0.2.0 \ --hash=sha256:4612951da876b1a10fe3960d7226f0c7682cf901e16ac06e473b267a5afa8954 \ --hash=sha256:b32482794a366b5366a32c92a9a9201b107821889935a02b3e51f6b432ea84ed \ --hash=sha256:c9f2c8dfc8f96d0d5808299920721be30c9eec37f2389f28904f454565c8a16e - # via tensorflow-cpu -grpcio==1.54.0 \ - --hash=sha256:02000b005bc8b72ff50c477b6431e8886b29961159e8b8d03c00b3dd9139baed \ - --hash=sha256:031bbd26656e0739e4b2c81c172155fb26e274b8d0312d67aefc730bcba915b6 \ - --hash=sha256:1209d6b002b26e939e4c8ea37a3d5b4028eb9555394ea69fb1adbd4b61a10bb8 \ - --hash=sha256:125ed35aa3868efa82eabffece6264bf638cfdc9f0cd58ddb17936684aafd0f8 \ - --hash=sha256:1382bc499af92901c2240c4d540c74eae8a671e4fe9839bfeefdfcc3a106b5e2 \ - --hash=sha256:16bca8092dd994f2864fdab278ae052fad4913f36f35238b2dd11af2d55a87db \ - --hash=sha256:1c59d899ee7160638613a452f9a4931de22623e7ba17897d8e3e348c2e9d8d0b \ - --hash=sha256:1d109df30641d050e009105f9c9ca5a35d01e34d2ee2a4e9c0984d392fd6d704 \ - --hash=sha256:1fa7d6ddd33abbd3c8b3d7d07c56c40ea3d1891ce3cd2aa9fa73105ed5331866 \ - --hash=sha256:21c4a1aae861748d6393a3ff7867473996c139a77f90326d9f4104bebb22d8b8 \ - --hash=sha256:224166f06ccdaf884bf35690bf4272997c1405de3035d61384ccb5b25a4c1ca8 \ - --hash=sha256:2262bd3512ba9e9f0e91d287393df6f33c18999317de45629b7bd46c40f16ba9 \ - --hash=sha256:2585b3c294631a39b33f9f967a59b0fad23b1a71a212eba6bc1e3ca6e6eec9ee \ - --hash=sha256:27fb030a4589d2536daec5ff5ba2a128f4f155149efab578fe2de2cb21596d3d \ - --hash=sha256:30fbbce11ffeb4f9f91c13fe04899aaf3e9a81708bedf267bf447596b95df26b \ - --hash=sha256:3930669c9e6f08a2eed824738c3d5699d11cd47a0ecc13b68ed11595710b1133 \ - --hash=sha256:3b170e441e91e4f321e46d3cc95a01cb307a4596da54aca59eb78ab0fc03754d \ - --hash=sha256:3db71c6f1ab688d8dfc102271cedc9828beac335a3a4372ec54b8bf11b43fd29 \ - --hash=sha256:48cb7af77238ba16c77879009003f6b22c23425e5ee59cb2c4c103ec040638a5 \ - --hash=sha256:49eace8ea55fbc42c733defbda1e4feb6d3844ecd875b01bb8b923709e0f5ec8 \ - --hash=sha256:533eaf5b2a79a3c6f35cbd6a095ae99cac7f4f9c0e08bdcf86c130efd3c32adf \ - --hash=sha256:5942a3e05630e1ef5b7b5752e5da6582460a2e4431dae603de89fc45f9ec5aa9 \ - --hash=sha256:62117486460c83acd3b5d85c12edd5fe20a374630475388cfc89829831d3eb79 \ - --hash=sha256:650f5f2c9ab1275b4006707411bb6d6bc927886874a287661c3c6f332d4c068b \ - --hash=sha256:6dc1e2c9ac292c9a484ef900c568ccb2d6b4dfe26dfa0163d5bc815bb836c78d \ - --hash=sha256:73c238ef6e4b64272df7eec976bb016c73d3ab5a6c7e9cd906ab700523d312f3 \ - --hash=sha256:775a2f70501370e5ba54e1ee3464413bff9bd85bd9a0b25c989698c44a6fb52f \ - --hash=sha256:860fcd6db7dce80d0a673a1cc898ce6bc3d4783d195bbe0e911bf8a62c93ff3f \ - --hash=sha256:87f47bf9520bba4083d65ab911f8f4c0ac3efa8241993edd74c8dd08ae87552f \ - --hash=sha256:960b176e0bb2b4afeaa1cd2002db1e82ae54c9b6e27ea93570a42316524e77cf \ - --hash=sha256:a7caf553ccaf715ec05b28c9b2ab2ee3fdb4036626d779aa09cf7cbf54b71445 \ - --hash=sha256:a947d5298a0bbdd4d15671024bf33e2b7da79a70de600ed29ba7e0fef0539ebb \ - --hash=sha256:a97b0d01ae595c997c1d9d8249e2d2da829c2d8a4bdc29bb8f76c11a94915c9a \ - --hash=sha256:b7655f809e3420f80ce3bf89737169a9dce73238af594049754a1128132c0da4 \ - --hash=sha256:c33744d0d1a7322da445c0fe726ea6d4e3ef2dfb0539eadf23dce366f52f546c \ - --hash=sha256:c55a9cf5cba80fb88c850915c865b8ed78d5e46e1f2ec1b27692f3eaaf0dca7e \ - --hash=sha256:d2f62fb1c914a038921677cfa536d645cb80e3dd07dc4859a3c92d75407b90a5 \ - --hash=sha256:d8ae6e0df3a608e99ee1acafaafd7db0830106394d54571c1ece57f650124ce9 \ - --hash=sha256:e355ee9da9c1c03f174efea59292b17a95e0b7b4d7d2a389265f731a9887d5a9 \ - --hash=sha256:e3e526062c690517b42bba66ffe38aaf8bc99a180a78212e7b22baa86902f690 \ - --hash=sha256:eb0807323572642ab73fd86fe53d88d843ce617dd1ddf430351ad0759809a0ae \ - --hash=sha256:ebff0738be0499d7db74d20dca9f22a7b27deae31e1bf92ea44924fd69eb6251 \ - --hash=sha256:ed36e854449ff6c2f8ee145f94851fe171298e1e793f44d4f672c4a0d78064e7 \ - --hash=sha256:ed3d458ded32ff3a58f157b60cc140c88f7ac8c506a1c567b2a9ee8a2fd2ce54 \ - --hash=sha256:f4a7dca8ccd8023d916b900aa3c626f1bd181bd5b70159479b142f957ff420e4 + # via tensorflow +grpcio==1.60.0 \ + --hash=sha256:073f959c6f570797272f4ee9464a9997eaf1e98c27cb680225b82b53390d61e6 \ + --hash=sha256:0fd3b3968ffe7643144580f260f04d39d869fcc2cddb745deef078b09fd2b328 \ + --hash=sha256:1434ca77d6fed4ea312901122dc8da6c4389738bf5788f43efb19a838ac03ead \ + --hash=sha256:1c30bb23a41df95109db130a6cc1b974844300ae2e5d68dd4947aacba5985aa5 \ + --hash=sha256:20e7a4f7ded59097c84059d28230907cd97130fa74f4a8bfd1d8e5ba18c81491 \ + --hash=sha256:2199165a1affb666aa24adf0c97436686d0a61bc5fc113c037701fb7c7fceb96 \ + --hash=sha256:297eef542156d6b15174a1231c2493ea9ea54af8d016b8ca7d5d9cc65cfcc444 \ + --hash=sha256:2aef56e85901c2397bd557c5ba514f84de1f0ae5dd132f5d5fed042858115951 \ + --hash=sha256:30943b9530fe3620e3b195c03130396cd0ee3a0d10a66c1bee715d1819001eaf \ + --hash=sha256:3b36a2c6d4920ba88fa98075fdd58ff94ebeb8acc1215ae07d01a418af4c0253 \ + --hash=sha256:428d699c8553c27e98f4d29fdc0f0edc50e9a8a7590bfd294d2edb0da7be3629 \ + --hash=sha256:43e636dc2ce9ece583b3e2ca41df5c983f4302eabc6d5f9cd04f0562ee8ec1ae \ + --hash=sha256:452ca5b4afed30e7274445dd9b441a35ece656ec1600b77fff8c216fdf07df43 \ + --hash=sha256:467a7d31554892eed2aa6c2d47ded1079fc40ea0b9601d9f79204afa8902274b \ + --hash=sha256:4b44d7e39964e808b071714666a812049765b26b3ea48c4434a3b317bac82f14 \ + --hash=sha256:4c86343cf9ff7b2514dd229bdd88ebba760bd8973dac192ae687ff75e39ebfab \ + --hash=sha256:5208a57eae445ae84a219dfd8b56e04313445d146873117b5fa75f3245bc1390 \ + --hash=sha256:5ff21e000ff2f658430bde5288cb1ac440ff15c0d7d18b5fb222f941b46cb0d2 \ + --hash=sha256:675997222f2e2f22928fbba640824aebd43791116034f62006e19730715166c0 \ + --hash=sha256:676e4a44e740deaba0f4d95ba1d8c5c89a2fcc43d02c39f69450b1fa19d39590 \ + --hash=sha256:6e306b97966369b889985a562ede9d99180def39ad42c8014628dd3cc343f508 \ + --hash=sha256:6fd9584bf1bccdfff1512719316efa77be235469e1e3295dce64538c4773840b \ + --hash=sha256:705a68a973c4c76db5d369ed573fec3367d7d196673fa86614b33d8c8e9ebb08 \ + --hash=sha256:74d7d9fa97809c5b892449b28a65ec2bfa458a4735ddad46074f9f7d9550ad13 \ + --hash=sha256:77c8a317f0fd5a0a2be8ed5cbe5341537d5c00bb79b3bb27ba7c5378ba77dbca \ + --hash=sha256:79a050889eb8d57a93ed21d9585bb63fca881666fc709f5d9f7f9372f5e7fd03 \ + --hash=sha256:7db16dd4ea1b05ada504f08d0dca1cd9b926bed3770f50e715d087c6f00ad748 \ + --hash=sha256:83f2292ae292ed5a47cdcb9821039ca8e88902923198f2193f13959360c01860 \ + --hash=sha256:87c9224acba0ad8bacddf427a1c2772e17ce50b3042a789547af27099c5f751d \ + --hash=sha256:8a97a681e82bc11a42d4372fe57898d270a2707f36c45c6676e49ce0d5c41353 \ + --hash=sha256:9073513ec380434eb8d21970e1ab3161041de121f4018bbed3146839451a6d8e \ + --hash=sha256:90bdd76b3f04bdb21de5398b8a7c629676c81dfac290f5f19883857e9371d28c \ + --hash=sha256:91229d7203f1ef0ab420c9b53fe2ca5c1fbeb34f69b3bc1b5089466237a4a134 \ + --hash=sha256:92f88ca1b956eb8427a11bb8b4a0c0b2b03377235fc5102cb05e533b8693a415 \ + --hash=sha256:95ae3e8e2c1b9bf671817f86f155c5da7d49a2289c5cf27a319458c3e025c320 \ + --hash=sha256:9e30be89a75ee66aec7f9e60086fadb37ff8c0ba49a022887c28c134341f7179 \ + --hash=sha256:a48edde788b99214613e440fce495bbe2b1e142a7f214cce9e0832146c41e324 \ + --hash=sha256:a7152fa6e597c20cb97923407cf0934e14224af42c2b8d915f48bc3ad2d9ac18 \ + --hash=sha256:a9c7b71211f066908e518a2ef7a5e211670761651039f0d6a80d8d40054047df \ + --hash=sha256:b0571a5aef36ba9177e262dc88a9240c866d903a62799e44fd4aae3f9a2ec17e \ + --hash=sha256:b0fb2d4801546598ac5cd18e3ec79c1a9af8b8f2a86283c55a5337c5aeca4b1b \ + --hash=sha256:b10241250cb77657ab315270b064a6c7f1add58af94befa20687e7c8d8603ae6 \ + --hash=sha256:b87efe4a380887425bb15f220079aa8336276398dc33fce38c64d278164f963d \ + --hash=sha256:b98f43fcdb16172dec5f4b49f2fece4b16a99fd284d81c6bbac1b3b69fcbe0ff \ + --hash=sha256:c193109ca4070cdcaa6eff00fdb5a56233dc7610216d58fb81638f89f02e4968 \ + --hash=sha256:c826f93050c73e7769806f92e601e0efdb83ec8d7c76ddf45d514fee54e8e619 \ + --hash=sha256:d020cfa595d1f8f5c6b343530cd3ca16ae5aefdd1e832b777f9f0eb105f5b139 \ + --hash=sha256:d6a478581b1a1a8fdf3318ecb5f4d0cda41cacdffe2b527c23707c9c1b8fdb55 \ + --hash=sha256:de2ad69c9a094bf37c1102b5744c9aec6cf74d2b635558b779085d0263166454 \ + --hash=sha256:e278eafb406f7e1b1b637c2cf51d3ad45883bb5bd1ca56bc05e4fc135dfdaa65 \ + --hash=sha256:e381fe0c2aa6c03b056ad8f52f8efca7be29fb4d9ae2f8873520843b6039612a \ + --hash=sha256:e61e76020e0c332a98290323ecfec721c9544f5b739fab925b6e8cbe1944cf19 \ + --hash=sha256:f897c3b127532e6befdcf961c415c97f320d45614daf84deba0a54e64ea2457b \ + --hash=sha256:fb464479934778d7cc5baf463d959d361954d6533ad34c3a4f1d267e86ee25fd # via # tensorboard - # tensorflow-cpu -h5py==3.8.0 \ - --hash=sha256:03890b1c123d024fb0239a3279737d5432498c1901c354f8b10d8221d1d16235 \ - --hash=sha256:0fef76e10b9216657fa37e7edff6d8be0709b25bd5066474c229b56cf0098df9 \ - --hash=sha256:26ffc344ec9984d2cd3ca0265007299a8bac8d85c1ad48f4639d8d3aed2af171 \ - --hash=sha256:290e00fa2de74a10688d1bac98d5a9cdd43f14f58e562c580b5b3dfbd358ecae \ - --hash=sha256:33b15aae79e9147aebe1d0e54099cbcde8d65e3e227cd5b59e49b1272aa0e09d \ - --hash=sha256:36761693efbe53df179627a775476dcbc37727d6e920958277a7efbc18f1fb73 \ - --hash=sha256:377865821fe80ad984d003723d6f8890bd54ceeb5981b43c0313b9df95411b30 \ - --hash=sha256:49bc857635f935fa30e92e61ac1e87496df8f260a6945a3235e43a9890426866 \ - --hash=sha256:4a506fc223def428f4329e7e1f9fe1c8c593eab226e7c0942c8d75308ad49950 \ - --hash=sha256:533d7dad466ddb7e3b30af274b630eb7c1a6e4ddf01d1c373a0334dc2152110a \ - --hash=sha256:5fd2252d1fc364ba0e93dd0b7089f4906b66805cb4e6aca7fa8874ac08649647 \ - --hash=sha256:6fead82f0c4000cf38d53f9c030780d81bfa0220218aee13b90b7701c937d95f \ - --hash=sha256:7f3350fc0a8407d668b13247861c2acd23f7f5fe7d060a3ad9b0820f5fcbcae0 \ - --hash=sha256:8f55d9c6c84d7d09c79fb85979e97b81ec6071cc776a97eb6b96f8f6ec767323 \ - --hash=sha256:98a240cd4c1bfd568aaa52ec42d263131a2582dab82d74d3d42a0d954cac12be \ - --hash=sha256:9f6f6ffadd6bfa9b2c5b334805eb4b19ca0a5620433659d8f7fb86692c40a359 \ - --hash=sha256:b685453e538b2b5934c58a644ac3f3b3d0cec1a01b6fb26d57388e9f9b674ad0 \ - --hash=sha256:b7865de06779b14d98068da387333ad9bf2756b5b579cc887fac169bc08f87c3 \ - --hash=sha256:bacaa1c16810dd2b3e4417f8e730971b7c4d53d234de61fe4a918db78e80e1e4 \ - --hash=sha256:bae730580ae928de409d63cbe4fdca4c82c3ad2bed30511d19d34e995d63c77e \ - --hash=sha256:c3389b63222b1c7a158bb7fe69d11ca00066740ec5574596d47a2fe5317f563a \ - --hash=sha256:c873ba9fd4fa875ad62ce0e4891725e257a8fe7f5abdbc17e51a5d54819be55c \ - --hash=sha256:db03e3f2c716205fbdabb34d0848459840585225eb97b4f08998c743821ca323 \ - --hash=sha256:f47f757d1b76f0ecb8aa0508ec8d1b390df67a8b67ee2515dc1b046f3a1596ea \ - --hash=sha256:f891b17e3a3e974e93f9e34e7cca9f530806543571ce078998676a555837d91d - # via tensorflow-cpu -idna==3.4 \ - --hash=sha256:814f528e8dead7d329833b91c5faa87d60bf71824cd12a7530b5526063d02cb4 \ - --hash=sha256:90b77e79eaa3eba6de819a0c442c0b4ceefc341a7a2ab77d7562bf49f425c5c2 + # tensorflow +h5py==3.10.0 \ + --hash=sha256:012ab448590e3c4f5a8dd0f3533255bc57f80629bf7c5054cf4c87b30085063c \ + --hash=sha256:212bb997a91e6a895ce5e2f365ba764debeaef5d2dca5c6fb7098d66607adf99 \ + --hash=sha256:2381e98af081b6df7f6db300cd88f88e740649d77736e4b53db522d8874bf2dc \ + --hash=sha256:2c8e4fda19eb769e9a678592e67eaec3a2f069f7570c82d2da909c077aa94339 \ + --hash=sha256:3074ec45d3dc6e178c6f96834cf8108bf4a60ccb5ab044e16909580352010a97 \ + --hash=sha256:3c97d03f87f215e7759a354460fb4b0d0f27001450b18b23e556e7856a0b21c3 \ + --hash=sha256:43a61b2c2ad65b1fabc28802d133eed34debcc2c8b420cb213d3d4ef4d3e2229 \ + --hash=sha256:492305a074327e8d2513011fa9fffeb54ecb28a04ca4c4227d7e1e9616d35641 \ + --hash=sha256:5dfc65ac21fa2f630323c92453cadbe8d4f504726ec42f6a56cf80c2f90d6c52 \ + --hash=sha256:667fe23ab33d5a8a6b77970b229e14ae3bb84e4ea3382cc08567a02e1499eedd \ + --hash=sha256:6c013d2e79c00f28ffd0cc24e68665ea03ae9069e167087b2adb5727d2736a52 \ + --hash=sha256:781a24263c1270a62cd67be59f293e62b76acfcc207afa6384961762bb88ea03 \ + --hash=sha256:86df4c2de68257b8539a18646ceccdcf2c1ce6b1768ada16c8dcfb489eafae20 \ + --hash=sha256:90286b79abd085e4e65e07c1bd7ee65a0f15818ea107f44b175d2dfe1a4674b7 \ + --hash=sha256:92273ce69ae4983dadb898fd4d3bea5eb90820df953b401282ee69ad648df684 \ + --hash=sha256:93dd840bd675787fc0b016f7a05fc6efe37312a08849d9dd4053fd0377b1357f \ + --hash=sha256:9450464b458cca2c86252b624279115dcaa7260a40d3cb1594bf2b410a2bd1a3 \ + --hash=sha256:ae2f0201c950059676455daf92700eeb57dcf5caaf71b9e1328e6e6593601770 \ + --hash=sha256:aece0e2e1ed2aab076c41802e50a0c3e5ef8816d60ece39107d68717d4559824 \ + --hash=sha256:b963fb772964fc1d1563c57e4e2e874022ce11f75ddc6df1a626f42bd49ab99f \ + --hash=sha256:ba9ab36be991119a3ff32d0c7cbe5faf9b8d2375b5278b2aea64effbeba66039 \ + --hash=sha256:d4682b94fd36ab217352be438abd44c8f357c5449b8995e63886b431d260f3d3 \ + --hash=sha256:d93adc48ceeb33347eb24a634fb787efc7ae4644e6ea4ba733d099605045c049 \ + --hash=sha256:f42e6c30698b520f0295d70157c4e202a9e402406f50dc08f5a7bc416b24e52d \ + --hash=sha256:fd6f6d1384a9f491732cee233b99cd4bfd6e838a8815cc86722f9d2ee64032af + # via tensorflow +idna==3.6 \ + --hash=sha256:9ecdbbd083b06798ae1e86adcbfe8ab1479cf864e4ee30fe4e46a003d12491ca \ + --hash=sha256:c05567e9c24a6b9faaa835c4821bad0590fbb9d5779e7caa6e1cc4978e7eb24f # via requests -jax==0.4.9 \ - --hash=sha256:1ed135cd08f48e4baf10f6eafdb4a4cdae781f9052b5838c09c91a9f4fa75f09 - # via tensorflow-cpu -keras==2.12.0 \ - --hash=sha256:35c39534011e909645fb93515452e98e1a0ce23727b55d4918b9c58b2308c15e - # via tensorflow-cpu -libclang==16.0.0 \ - --hash=sha256:2adce42ae652f312245b8f4eda6f30b4076fb61f7619f2dfd0a0c31dee4c32b9 \ - --hash=sha256:65258a6bb3e7dc31dc9b26f8d42f53c9d3b959643ade291fcd1aef4855303ca6 \ - --hash=sha256:7b6686b67a0daa84b4c614bcc119578329fc4fbb52b919565b7376b507c4793b \ - --hash=sha256:a043138caaf2cb076ebb060c6281ec95612926645d425c691991fc9df00e8a24 \ - --hash=sha256:af55a4aa86fdfe6b2ec68bc8cfe5fdac6c448d591ca7648be86ca17099b41ca8 \ - --hash=sha256:bf4628fc4da7a1dd06a244f9b8e121c5ec68076a763c59d6b13cbb103acc935b \ - --hash=sha256:eb59652cb0559c0e71784ff4c8ba24c14644becc907b1446563ecfaa622d523b \ - --hash=sha256:ee20bf93e3dd330f71fc50cdbf13b92ced0aec8e540be64251db53502a9b33f7 - # via tensorflow-cpu -mako==1.2.4 \ - --hash=sha256:c97c79c018b9165ac9922ae4f32da095ffd3c4e6872b45eded42926deea46818 \ - --hash=sha256:d60a3903dc3bb01a18ad6a89cdbe2e4eadc69c0bc8ef1e3773ba53d44c3f7a34 +importlib-metadata==7.0.1 \ + --hash=sha256:4805911c3a4ec7c3966410053e9ec6a1fecd629117df5adee56dfc9432a1081e \ + --hash=sha256:f238736bb06590ae52ac1fab06a3a9ef1d8dce2b7a35b5ab329371d6c8f5d2cc + # via + # keyring + # twine + # yapf +jaraco-classes==3.3.0 \ + --hash=sha256:10afa92b6743f25c0cf5f37c6bb6e18e2c5bb84a16527ccfc0040ea377e7aaeb \ + --hash=sha256:c063dd08e89217cee02c8d5e5ec560f2c8ce6cdc2fcdc2e68f7b2e5547ed3621 + # via keyring +jeepney==0.8.0 \ + --hash=sha256:5efe48d255973902f6badc3ce55e2aa6c5c3b3bc642059ef3a91247bcfcc5806 \ + --hash=sha256:c0a454ad016ca575060802ee4d590dd912e35c122fa04e70306de3d076cce755 + # via + # keyring + # secretstorage +keras==2.15.0 \ + --hash=sha256:2dcc6d2e30cf9c951064b63c1f4c404b966c59caf09e01f3549138ec8ee0dd1f \ + --hash=sha256:81871d298c064dc4ac6b58440fdae67bfcf47c8d7ad28580fab401834c06a575 + # via tensorflow +keyring==24.3.0 \ + --hash=sha256:4446d35d636e6a10b8bce7caa66913dd9eca5fd222ca03a3d42c38608ac30836 \ + --hash=sha256:e730ecffd309658a08ee82535a3b5ec4b4c8669a9be11efb66249d8e0aeb9a25 + # via twine +libclang==16.0.6 \ + --hash=sha256:1e940048f51d0b0999099a9b78629ab8a64b62af5e9ff1b2b062439c21ee244d \ + --hash=sha256:4a9acbfd9c135a72f80d5dbff7588dfb0c81458244a89b9e83526e8595880e0a \ + --hash=sha256:4acdde39dfe410c877b4ccc0d4b57eb952100e4ee26bbdf6cfdb88e2033a7d31 \ + --hash=sha256:8130482120500476a027171f8f3c8dfc2536b591716eea71fc5da22cae13131b \ + --hash=sha256:88bc7e7b393c32e41e03ba77ef02fdd647da1f764c2cd028e69e0837080b79f6 \ + --hash=sha256:9dcdc730939788b8b69ffd6d5d75fe5366e3ee007f1e36a99799ec0b0c001492 \ + --hash=sha256:d80ed5827736ed5ec2bcedf536720476fd9d4fa4c79ef0cb24aea4c59332f361 \ + --hash=sha256:da9e47ebc3f0a6d90fb169ef25f9fbcd29b4a4ef97a8b0e3e3a17800af1423f4 \ + --hash=sha256:daab4a11dae228f1efa9efa3fe638b493b14d8d52c71fb3c7019e2f1df4514c2 \ + --hash=sha256:e1a5ad1e895e5443e205568c85c04b4608e4e973dae42f4dfd9cb46c81d1486b \ + --hash=sha256:f04e3060ae1f207f234d0608900c99c50edcb743e5e18276d78da2ddd727d39f + # via tensorflow +mako==1.3.0 \ + --hash=sha256:57d4e997349f1a92035aa25c17ace371a4213f2ca42f99bee9a602500cfd54d9 \ + --hash=sha256:e3a9d388fd00e87043edbe8792f45880ac0114e9c4adc69f6e9bfb2c55e3b11b # via -r third_party/python_requirements.in -markdown==3.4.3 \ - --hash=sha256:065fd4df22da73a625f14890dd77eb8040edcbd68794bcd35943be14490608b2 \ - --hash=sha256:8bf101198e004dc93e84a12a7395e31aac6a9c9942848ae1d99b9d72cf9b3520 +markdown==3.5.2 \ + --hash=sha256:d43323865d89fc0cb9b20c75fc8ad313af307cc087e84b657d9eec768eddeadd \ + --hash=sha256:e1ac7b3dc550ee80e602e71c1d168002f062e49f1b11e26a36264dafd4df2ef8 # via tensorboard -markupsafe==2.1.2 \ - --hash=sha256:0576fe974b40a400449768941d5d0858cc624e3249dfd1e0c33674e5c7ca7aed \ - --hash=sha256:085fd3201e7b12809f9e6e9bc1e5c96a368c8523fad5afb02afe3c051ae4afcc \ - --hash=sha256:090376d812fb6ac5f171e5938e82e7f2d7adc2b629101cec0db8b267815c85e2 \ - --hash=sha256:0b462104ba25f1ac006fdab8b6a01ebbfbce9ed37fd37fd4acd70c67c973e460 \ - --hash=sha256:137678c63c977754abe9086a3ec011e8fd985ab90631145dfb9294ad09c102a7 \ - --hash=sha256:1bea30e9bf331f3fef67e0a3877b2288593c98a21ccb2cf29b74c581a4eb3af0 \ - --hash=sha256:22152d00bf4a9c7c83960521fc558f55a1adbc0631fbb00a9471e097b19d72e1 \ - --hash=sha256:22731d79ed2eb25059ae3df1dfc9cb1546691cc41f4e3130fe6bfbc3ecbbecfa \ - --hash=sha256:2298c859cfc5463f1b64bd55cb3e602528db6fa0f3cfd568d3605c50678f8f03 \ - --hash=sha256:28057e985dace2f478e042eaa15606c7efccb700797660629da387eb289b9323 \ - --hash=sha256:2e7821bffe00aa6bd07a23913b7f4e01328c3d5cc0b40b36c0bd81d362faeb65 \ - --hash=sha256:2ec4f2d48ae59bbb9d1f9d7efb9236ab81429a764dedca114f5fdabbc3788013 \ - --hash=sha256:340bea174e9761308703ae988e982005aedf427de816d1afe98147668cc03036 \ - --hash=sha256:40627dcf047dadb22cd25ea7ecfe9cbf3bbbad0482ee5920b582f3809c97654f \ - --hash=sha256:40dfd3fefbef579ee058f139733ac336312663c6706d1163b82b3003fb1925c4 \ - --hash=sha256:4cf06cdc1dda95223e9d2d3c58d3b178aa5dacb35ee7e3bbac10e4e1faacb419 \ - --hash=sha256:50c42830a633fa0cf9e7d27664637532791bfc31c731a87b202d2d8ac40c3ea2 \ - --hash=sha256:55f44b440d491028addb3b88f72207d71eeebfb7b5dbf0643f7c023ae1fba619 \ - --hash=sha256:608e7073dfa9e38a85d38474c082d4281f4ce276ac0010224eaba11e929dd53a \ - --hash=sha256:63ba06c9941e46fa389d389644e2d8225e0e3e5ebcc4ff1ea8506dce646f8c8a \ - --hash=sha256:65608c35bfb8a76763f37036547f7adfd09270fbdbf96608be2bead319728fcd \ - --hash=sha256:665a36ae6f8f20a4676b53224e33d456a6f5a72657d9c83c2aa00765072f31f7 \ - --hash=sha256:6d6607f98fcf17e534162f0709aaad3ab7a96032723d8ac8750ffe17ae5a0666 \ - --hash=sha256:7313ce6a199651c4ed9d7e4cfb4aa56fe923b1adf9af3b420ee14e6d9a73df65 \ - --hash=sha256:7668b52e102d0ed87cb082380a7e2e1e78737ddecdde129acadb0eccc5423859 \ - --hash=sha256:7df70907e00c970c60b9ef2938d894a9381f38e6b9db73c5be35e59d92e06625 \ - --hash=sha256:7e007132af78ea9df29495dbf7b5824cb71648d7133cf7848a2a5dd00d36f9ff \ - --hash=sha256:835fb5e38fd89328e9c81067fd642b3593c33e1e17e2fdbf77f5676abb14a156 \ - --hash=sha256:8bca7e26c1dd751236cfb0c6c72d4ad61d986e9a41bbf76cb445f69488b2a2bd \ - --hash=sha256:8db032bf0ce9022a8e41a22598eefc802314e81b879ae093f36ce9ddf39ab1ba \ - --hash=sha256:99625a92da8229df6d44335e6fcc558a5037dd0a760e11d84be2260e6f37002f \ - --hash=sha256:9cad97ab29dfc3f0249b483412c85c8ef4766d96cdf9dcf5a1e3caa3f3661cf1 \ - --hash=sha256:a4abaec6ca3ad8660690236d11bfe28dfd707778e2442b45addd2f086d6ef094 \ - --hash=sha256:a6e40afa7f45939ca356f348c8e23048e02cb109ced1eb8420961b2f40fb373a \ - --hash=sha256:a6f2fcca746e8d5910e18782f976489939d54a91f9411c32051b4aab2bd7c513 \ - --hash=sha256:a806db027852538d2ad7555b203300173dd1b77ba116de92da9afbc3a3be3eed \ - --hash=sha256:abcabc8c2b26036d62d4c746381a6f7cf60aafcc653198ad678306986b09450d \ - --hash=sha256:b8526c6d437855442cdd3d87eede9c425c4445ea011ca38d937db299382e6fa3 \ - --hash=sha256:bb06feb762bade6bf3c8b844462274db0c76acc95c52abe8dbed28ae3d44a147 \ - --hash=sha256:c0a33bc9f02c2b17c3ea382f91b4db0e6cde90b63b296422a939886a7a80de1c \ - --hash=sha256:c4a549890a45f57f1ebf99c067a4ad0cb423a05544accaf2b065246827ed9603 \ - --hash=sha256:ca244fa73f50a800cf8c3ebf7fd93149ec37f5cb9596aa8873ae2c1d23498601 \ - --hash=sha256:cf877ab4ed6e302ec1d04952ca358b381a882fbd9d1b07cccbfd61783561f98a \ - --hash=sha256:d9d971ec1e79906046aa3ca266de79eac42f1dbf3612a05dc9368125952bd1a1 \ - --hash=sha256:da25303d91526aac3672ee6d49a2f3db2d9502a4a60b55519feb1a4c7714e07d \ - --hash=sha256:e55e40ff0cc8cc5c07996915ad367fa47da6b3fc091fdadca7f5403239c5fec3 \ - --hash=sha256:f03a532d7dee1bed20bc4884194a16160a2de9ffc6354b3878ec9682bb623c54 \ - --hash=sha256:f1cd098434e83e656abf198f103a8207a8187c0fc110306691a2e94a78d0abb2 \ - --hash=sha256:f2bfb563d0211ce16b63c7cb9395d2c682a23187f54c3d79bfec33e6705473c6 \ - --hash=sha256:f8ffb705ffcf5ddd0e80b65ddf7bed7ee4f5a441ea7d3419e861a12eaf41af58 +markdown-it-py==3.0.0 \ + --hash=sha256:355216845c60bd96232cd8d8c40e8f9765cc86f46880e43a8fd22dc1a1a8cab1 \ + --hash=sha256:e3f60a94fa066dc52ec76661e37c851cb232d92f9886b15cb560aaada2df8feb + # via rich +markupsafe==2.1.3 \ + --hash=sha256:05fb21170423db021895e1ea1e1f3ab3adb85d1c2333cbc2310f2a26bc77272e \ + --hash=sha256:0a4e4a1aff6c7ac4cd55792abf96c915634c2b97e3cc1c7129578aa68ebd754e \ + --hash=sha256:10bbfe99883db80bdbaff2dcf681dfc6533a614f700da1287707e8a5d78a8431 \ + --hash=sha256:134da1eca9ec0ae528110ccc9e48041e0828d79f24121a1a146161103c76e686 \ + --hash=sha256:14ff806850827afd6b07a5f32bd917fb7f45b046ba40c57abdb636674a8b559c \ + --hash=sha256:1577735524cdad32f9f694208aa75e422adba74f1baee7551620e43a3141f559 \ + --hash=sha256:1b40069d487e7edb2676d3fbdb2b0829ffa2cd63a2ec26c4938b2d34391b4ecc \ + --hash=sha256:1b8dd8c3fd14349433c79fa8abeb573a55fc0fdd769133baac1f5e07abf54aeb \ + --hash=sha256:1f67c7038d560d92149c060157d623c542173016c4babc0c1913cca0564b9939 \ + --hash=sha256:282c2cb35b5b673bbcadb33a585408104df04f14b2d9b01d4c345a3b92861c2c \ + --hash=sha256:2c1b19b3aaacc6e57b7e25710ff571c24d6c3613a45e905b1fde04d691b98ee0 \ + --hash=sha256:2ef12179d3a291be237280175b542c07a36e7f60718296278d8593d21ca937d4 \ + --hash=sha256:338ae27d6b8745585f87218a3f23f1512dbf52c26c28e322dbe54bcede54ccb9 \ + --hash=sha256:3c0fae6c3be832a0a0473ac912810b2877c8cb9d76ca48de1ed31e1c68386575 \ + --hash=sha256:3fd4abcb888d15a94f32b75d8fd18ee162ca0c064f35b11134be77050296d6ba \ + --hash=sha256:42de32b22b6b804f42c5d98be4f7e5e977ecdd9ee9b660fda1a3edf03b11792d \ + --hash=sha256:47d4f1c5f80fc62fdd7777d0d40a2e9dda0a05883ab11374334f6c4de38adffd \ + --hash=sha256:504b320cd4b7eff6f968eddf81127112db685e81f7e36e75f9f84f0df46041c3 \ + --hash=sha256:525808b8019e36eb524b8c68acdd63a37e75714eac50e988180b169d64480a00 \ + --hash=sha256:56d9f2ecac662ca1611d183feb03a3fa4406469dafe241673d521dd5ae92a155 \ + --hash=sha256:5bbe06f8eeafd38e5d0a4894ffec89378b6c6a625ff57e3028921f8ff59318ac \ + --hash=sha256:65c1a9bcdadc6c28eecee2c119465aebff8f7a584dd719facdd9e825ec61ab52 \ + --hash=sha256:68e78619a61ecf91e76aa3e6e8e33fc4894a2bebe93410754bd28fce0a8a4f9f \ + --hash=sha256:69c0f17e9f5a7afdf2cc9fb2d1ce6aabdb3bafb7f38017c0b77862bcec2bbad8 \ + --hash=sha256:6b2b56950d93e41f33b4223ead100ea0fe11f8e6ee5f641eb753ce4b77a7042b \ + --hash=sha256:715d3562f79d540f251b99ebd6d8baa547118974341db04f5ad06d5ea3eb8007 \ + --hash=sha256:787003c0ddb00500e49a10f2844fac87aa6ce977b90b0feaaf9de23c22508b24 \ + --hash=sha256:7ef3cb2ebbf91e330e3bb937efada0edd9003683db6b57bb108c4001f37a02ea \ + --hash=sha256:8023faf4e01efadfa183e863fefde0046de576c6f14659e8782065bcece22198 \ + --hash=sha256:8758846a7e80910096950b67071243da3e5a20ed2546e6392603c096778d48e0 \ + --hash=sha256:8afafd99945ead6e075b973fefa56379c5b5c53fd8937dad92c662da5d8fd5ee \ + --hash=sha256:8c41976a29d078bb235fea9b2ecd3da465df42a562910f9022f1a03107bd02be \ + --hash=sha256:8e254ae696c88d98da6555f5ace2279cf7cd5b3f52be2b5cf97feafe883b58d2 \ + --hash=sha256:8f9293864fe09b8149f0cc42ce56e3f0e54de883a9de90cd427f191c346eb2e1 \ + --hash=sha256:9402b03f1a1b4dc4c19845e5c749e3ab82d5078d16a2a4c2cd2df62d57bb0707 \ + --hash=sha256:962f82a3086483f5e5f64dbad880d31038b698494799b097bc59c2edf392fce6 \ + --hash=sha256:9aad3c1755095ce347e26488214ef77e0485a3c34a50c5a5e2471dff60b9dd9c \ + --hash=sha256:9dcdfd0eaf283af041973bff14a2e143b8bd64e069f4c383416ecd79a81aab58 \ + --hash=sha256:aa57bd9cf8ae831a362185ee444e15a93ecb2e344c8e52e4d721ea3ab6ef1823 \ + --hash=sha256:aa7bd130efab1c280bed0f45501b7c8795f9fdbeb02e965371bbef3523627779 \ + --hash=sha256:ab4a0df41e7c16a1392727727e7998a467472d0ad65f3ad5e6e765015df08636 \ + --hash=sha256:ad9e82fb8f09ade1c3e1b996a6337afac2b8b9e365f926f5a61aacc71adc5b3c \ + --hash=sha256:af598ed32d6ae86f1b747b82783958b1a4ab8f617b06fe68795c7f026abbdcad \ + --hash=sha256:b076b6226fb84157e3f7c971a47ff3a679d837cf338547532ab866c57930dbee \ + --hash=sha256:b7ff0f54cb4ff66dd38bebd335a38e2c22c41a8ee45aa608efc890ac3e3931bc \ + --hash=sha256:bfce63a9e7834b12b87c64d6b155fdd9b3b96191b6bd334bf37db7ff1fe457f2 \ + --hash=sha256:c011a4149cfbcf9f03994ec2edffcb8b1dc2d2aede7ca243746df97a5d41ce48 \ + --hash=sha256:c9c804664ebe8f83a211cace637506669e7890fec1b4195b505c214e50dd4eb7 \ + --hash=sha256:ca379055a47383d02a5400cb0d110cef0a776fc644cda797db0c5696cfd7e18e \ + --hash=sha256:cb0932dc158471523c9637e807d9bfb93e06a95cbf010f1a38b98623b929ef2b \ + --hash=sha256:cd0f502fe016460680cd20aaa5a76d241d6f35a1c3350c474bac1273803893fa \ + --hash=sha256:ceb01949af7121f9fc39f7d27f91be8546f3fb112c608bc4029aef0bab86a2a5 \ + --hash=sha256:d080e0a5eb2529460b30190fcfcc4199bd7f827663f858a226a81bc27beaa97e \ + --hash=sha256:dd15ff04ffd7e05ffcb7fe79f1b98041b8ea30ae9234aed2a9168b5797c3effb \ + --hash=sha256:df0be2b576a7abbf737b1575f048c23fb1d769f267ec4358296f31c2479db8f9 \ + --hash=sha256:e09031c87a1e51556fdcb46e5bd4f59dfb743061cf93c4d6831bf894f125eb57 \ + --hash=sha256:e4dd52d80b8c83fdce44e12478ad2e85c64ea965e75d66dbeafb0a3e77308fcc \ + --hash=sha256:f698de3fd0c4e6972b92290a45bd9b1536bffe8c6759c62471efaa8acb4c37bc \ + --hash=sha256:fec21693218efe39aa7f8599346e90c705afa52c5b31ae019b2e57e8f6542bb2 \ + --hash=sha256:ffcc3f7c66b5f5b7931a5aa68fc9cecc51e685ef90282f4a82f0f5e9b704ad11 # via # mako # werkzeug -ml-dtypes==0.1.0 \ - --hash=sha256:273c306db846005b83a98c9c7ec3dc8fa20e8f11c3772c8e8c20cc12d8abfd4b \ - --hash=sha256:2de6c81b0da398d54aabdd7de599f2dfc43e30b65d9fad379a69f4cc4ae165d3 \ - --hash=sha256:36e8518c8fd2c38729f020125f39ef07b045f5c16d0846320c7252d7773285ee \ - --hash=sha256:377f2d5cfbf809b59188e0bfda4a0774e658541f575b637fee4850d99c2f9fdc \ - --hash=sha256:41b6beeaea47e2466b94068664c9a45b2a65dd023aa4e5deeb5a73303661344e \ - --hash=sha256:77970beeb3cf6ac559c4b6b393f24778a5abd34fafbaad82d5a0d17d0f148936 \ - --hash=sha256:87aa1cf83d41fed5a40fc27ee57ac4c1bf904e940f082531d3d58f1c318b5928 \ - --hash=sha256:8c5c9fe086756fbc1bf51296431d64429536093cf6e2ba592e042d7fc07c8514 \ - --hash=sha256:8de9bbf5bed587a1166699447ea14d1e8fe66d4e812811e37bf2f4d988475476 \ - --hash=sha256:99fab8262d175c49bf1655c229244f301274e8289449c350ba4d5b95ade07d9a \ - --hash=sha256:a29fbf128583673eca0f43def1dbe77e02c1e8b8a8331db2877bbb57d091ef11 \ - --hash=sha256:ad765159ac6c18d5ee7d325fcf34d3106a9d9d7a49713d998f5cfa330a1459b4 \ - --hash=sha256:b9c5578dffd85637a7dd437192de18bc1a14eb6ba7d53ef40de3f84c51c789e5 \ - --hash=sha256:c1fc0afe63ce99069f9d7e0693a61cfd0aea90241fc3821af9953d0c11f4048a \ - --hash=sha256:c9218175b06764b8ddc95cb18d11a6c4b48a4b103a31c9ea2b2c3cd0cfc369f8 \ - --hash=sha256:dee8ea629b8e3e20c6649852c1b9deacfa13384ab9337f2c9e717e401d102f23 \ - --hash=sha256:ffb7882dd46399217dc54f37affc899e0a29a4cfb63e5bf733ac0baf4a179c77 - # via jax -numpy==1.23.5 \ - --hash=sha256:01dd17cbb340bf0fc23981e52e1d18a9d4050792e8fb8363cecbf066a84b827d \ - --hash=sha256:06005a2ef6014e9956c09ba07654f9837d9e26696a0470e42beedadb78c11b07 \ - --hash=sha256:09b7847f7e83ca37c6e627682f145856de331049013853f344f37b0c9690e3df \ - --hash=sha256:0aaee12d8883552fadfc41e96b4c82ee7d794949e2a7c3b3a7201e968c7ecab9 \ - --hash=sha256:0cbe9848fad08baf71de1a39e12d1b6310f1d5b2d0ea4de051058e6e1076852d \ - --hash=sha256:1b1766d6f397c18153d40015ddfc79ddb715cabadc04d2d228d4e5a8bc4ded1a \ - --hash=sha256:33161613d2269025873025b33e879825ec7b1d831317e68f4f2f0f84ed14c719 \ - --hash=sha256:5039f55555e1eab31124a5768898c9e22c25a65c1e0037f4d7c495a45778c9f2 \ - --hash=sha256:522e26bbf6377e4d76403826ed689c295b0b238f46c28a7251ab94716da0b280 \ - --hash=sha256:56e454c7833e94ec9769fa0f86e6ff8e42ee38ce0ce1fa4cbb747ea7e06d56aa \ - --hash=sha256:58f545efd1108e647604a1b5aa809591ccd2540f468a880bedb97247e72db387 \ - --hash=sha256:5e05b1c973a9f858c74367553e236f287e749465f773328c8ef31abe18f691e1 \ - --hash=sha256:7903ba8ab592b82014713c491f6c5d3a1cde5b4a3bf116404e08f5b52f6daf43 \ - --hash=sha256:8969bfd28e85c81f3f94eb4a66bc2cf1dbdc5c18efc320af34bffc54d6b1e38f \ - --hash=sha256:92c8c1e89a1f5028a4c6d9e3ccbe311b6ba53694811269b992c0b224269e2398 \ - --hash=sha256:9c88793f78fca17da0145455f0d7826bcb9f37da4764af27ac945488116efe63 \ - --hash=sha256:a7ac231a08bb37f852849bbb387a20a57574a97cfc7b6cabb488a4fc8be176de \ - --hash=sha256:abdde9f795cf292fb9651ed48185503a2ff29be87770c3b8e2a14b0cd7aa16f8 \ - --hash=sha256:af1da88f6bc3d2338ebbf0e22fe487821ea4d8e89053e25fa59d1d79786e7481 \ - --hash=sha256:b2a9ab7c279c91974f756c84c365a669a887efa287365a8e2c418f8b3ba73fb0 \ - --hash=sha256:bf837dc63ba5c06dc8797c398db1e223a466c7ece27a1f7b5232ba3466aafe3d \ - --hash=sha256:ca51fcfcc5f9354c45f400059e88bc09215fb71a48d3768fb80e357f3b457e1e \ - --hash=sha256:ce571367b6dfe60af04e04a1834ca2dc5f46004ac1cc756fb95319f64c095a96 \ - --hash=sha256:d208a0f8729f3fb790ed18a003f3a57895b989b40ea4dce4717e9cf4af62c6bb \ - --hash=sha256:dbee87b469018961d1ad79b1a5d50c0ae850000b639bcb1b694e9981083243b6 \ - --hash=sha256:e9f4c4e51567b616be64e05d517c79a8a22f3606499941d97bb76f2ca59f982d \ - --hash=sha256:f063b69b090c9d918f9df0a12116029e274daf0181df392839661c4c7ec9018a \ - --hash=sha256:f9a909a8bae284d46bbfdefbdd4a262ba19d3bc9921b1e76126b1d21c3c34135 +mdurl==0.1.2 \ + --hash=sha256:84008a41e51615a49fc9966191ff91509e3c40b939176e643fd50a5c2196b8f8 \ + --hash=sha256:bb413d29f5eea38f31dd4754dd7377d4465116fb207585f97bf925588687c1ba + # via markdown-it-py +ml-dtypes==0.2.0 \ + --hash=sha256:022d5a4ee6be14569c2a9d1549e16f1ec87ca949681d0dca59995445d5fcdd5b \ + --hash=sha256:1749b60348da71fd3c2ab303fdbc1965958dc50775ead41f5669c932a341cafd \ + --hash=sha256:32107e7fa9f62db9a5281de923861325211dfff87bd23faefb27b303314635ab \ + --hash=sha256:35b984cddbe8173b545a0e3334fe56ea1a5c3eb67c507f60d0cfde1d3fa8f8c2 \ + --hash=sha256:36d28b8861a8931695e5a31176cad5ae85f6504906650dea5598fbec06c94606 \ + --hash=sha256:50845af3e9a601810751b55091dee6c2562403fa1cb4e0123675cf3a4fc2c17a \ + --hash=sha256:6488eb642acaaf08d8020f6de0a38acee7ac324c1e6e92ee0c0fea42422cb797 \ + --hash=sha256:75015818a7fccf99a5e8ed18720cb430f3e71a8838388840f4cdf225c036c983 \ + --hash=sha256:80d304c836d73f10605c58ccf7789c171cc229bfb678748adfb7cea2510dfd0e \ + --hash=sha256:832a019a1b6db5c4422032ca9940a990fa104eee420f643713241b3a518977fa \ + --hash=sha256:8faaf0897942c8253dd126662776ba45f0a5861968cf0f06d6d465f8a7bc298a \ + --hash=sha256:bc29a0524ef5e23a7fbb8d881bdecabeb3fc1d19d9db61785d077a86cb94fab2 \ + --hash=sha256:df6a76e1c8adf484feb138ed323f9f40a7b6c21788f120f7c78bec20ac37ee81 \ + --hash=sha256:e70047ec2c83eaee01afdfdabee2c5b0c133804d90d0f7db4dd903360fcc537c \ + --hash=sha256:e85ba8e24cf48d456e564688e981cf379d4c8e644db0a2f719b78de281bac2ca \ + --hash=sha256:f00c71c8c63e03aff313bc6a7aeaac9a4f1483a921a6ffefa6d4404efd1af3d0 \ + --hash=sha256:f08c391c2794f2aad358e6f4c70785a9a7b1df980ef4c232b3ccd4f6fe39f719 + # via tensorflow +more-itertools==10.2.0 \ + --hash=sha256:686b06abe565edfab151cb8fd385a05651e1fdf8f0a14191e4439283421f8684 \ + --hash=sha256:8fccb480c43d3e99a00087634c06dd02b0d50fbf088b380de5a41a015ec239e1 + # via jaraco-classes +nh3==0.2.15 \ + --hash=sha256:0d02d0ff79dfd8208ed25a39c12cbda092388fff7f1662466e27d97ad011b770 \ + --hash=sha256:3277481293b868b2715907310c7be0f1b9d10491d5adf9fce11756a97e97eddf \ + --hash=sha256:3b803a5875e7234907f7d64777dfde2b93db992376f3d6d7af7f3bc347deb305 \ + --hash=sha256:427fecbb1031db085eaac9931362adf4a796428ef0163070c484b5a768e71601 \ + --hash=sha256:5f0d77272ce6d34db6c87b4f894f037d55183d9518f948bba236fe81e2bb4e28 \ + --hash=sha256:60684857cfa8fdbb74daa867e5cad3f0c9789415aba660614fe16cd66cbb9ec7 \ + --hash=sha256:6f42f99f0cf6312e470b6c09e04da31f9abaadcd3eb591d7d1a88ea931dca7f3 \ + --hash=sha256:86e447a63ca0b16318deb62498db4f76fc60699ce0a1231262880b38b6cff911 \ + --hash=sha256:8d595df02413aa38586c24811237e95937ef18304e108b7e92c890a06793e3bf \ + --hash=sha256:9c0d415f6b7f2338f93035bba5c0d8c1b464e538bfbb1d598acd47d7969284f0 \ + --hash=sha256:a5167a6403d19c515217b6bcaaa9be420974a6ac30e0da9e84d4fc67a5d474c5 \ + --hash=sha256:ac19c0d68cd42ecd7ead91a3a032fdfff23d29302dbb1311e641a130dfefba97 \ + --hash=sha256:b1e97221cedaf15a54f5243f2c5894bb12ca951ae4ddfd02a9d4ea9df9e1a29d \ + --hash=sha256:bc2d086fb540d0fa52ce35afaded4ea526b8fc4d3339f783db55c95de40ef02e \ + --hash=sha256:d1e30ff2d8d58fb2a14961f7aac1bbb1c51f9bdd7da727be35c63826060b0bf3 \ + --hash=sha256:f3b53ba93bb7725acab1e030bc2ecd012a817040fd7851b332f86e2f9bb98dc6 + # via readme-renderer +numpy==1.26.3 \ + --hash=sha256:02f98011ba4ab17f46f80f7f8f1c291ee7d855fcef0a5a98db80767a468c85cd \ + --hash=sha256:0b7e807d6888da0db6e7e75838444d62495e2b588b99e90dd80c3459594e857b \ + --hash=sha256:12c70ac274b32bc00c7f61b515126c9205323703abb99cd41836e8125ea0043e \ + --hash=sha256:1666f634cb3c80ccbd77ec97bc17337718f56d6658acf5d3b906ca03e90ce87f \ + --hash=sha256:18c3319a7d39b2c6a9e3bb75aab2304ab79a811ac0168a671a62e6346c29b03f \ + --hash=sha256:211ddd1e94817ed2d175b60b6374120244a4dd2287f4ece45d49228b4d529178 \ + --hash=sha256:21a9484e75ad018974a2fdaa216524d64ed4212e418e0a551a2d83403b0531d3 \ + --hash=sha256:39763aee6dfdd4878032361b30b2b12593fb445ddb66bbac802e2113eb8a6ac4 \ + --hash=sha256:3c67423b3703f8fbd90f5adaa37f85b5794d3366948efe9a5190a5f3a83fc34e \ + --hash=sha256:46f47ee566d98849323f01b349d58f2557f02167ee301e5e28809a8c0e27a2d0 \ + --hash=sha256:51c7f1b344f302067b02e0f5b5d2daa9ed4a721cf49f070280ac202738ea7f00 \ + --hash=sha256:5f24750ef94d56ce6e33e4019a8a4d68cfdb1ef661a52cdaee628a56d2437419 \ + --hash=sha256:697df43e2b6310ecc9d95f05d5ef20eacc09c7c4ecc9da3f235d39e71b7da1e4 \ + --hash=sha256:6d45b3ec2faed4baca41c76617fcdcfa4f684ff7a151ce6fc78ad3b6e85af0a6 \ + --hash=sha256:77810ef29e0fb1d289d225cabb9ee6cf4d11978a00bb99f7f8ec2132a84e0166 \ + --hash=sha256:7ca4f24341df071877849eb2034948459ce3a07915c2734f1abb4018d9c49d7b \ + --hash=sha256:7f784e13e598e9594750b2ef6729bcd5a47f6cfe4a12cca13def35e06d8163e3 \ + --hash=sha256:806dd64230dbbfaca8a27faa64e2f414bf1c6622ab78cc4264f7f5f028fee3bf \ + --hash=sha256:867e3644e208c8922a3be26fc6bbf112a035f50f0a86497f98f228c50c607bb2 \ + --hash=sha256:8c66d6fec467e8c0f975818c1796d25c53521124b7cfb760114be0abad53a0a2 \ + --hash=sha256:8ed07a90f5450d99dad60d3799f9c03c6566709bd53b497eb9ccad9a55867f36 \ + --hash=sha256:9bc6d1a7f8cedd519c4b7b1156d98e051b726bf160715b769106661d567b3f03 \ + --hash=sha256:9e1591f6ae98bcfac2a4bbf9221c0b92ab49762228f38287f6eeb5f3f55905ce \ + --hash=sha256:9e87562b91f68dd8b1c39149d0323b42e0082db7ddb8e934ab4c292094d575d6 \ + --hash=sha256:a7081fd19a6d573e1a05e600c82a1c421011db7935ed0d5c483e9dd96b99cf13 \ + --hash=sha256:a8474703bffc65ca15853d5fd4d06b18138ae90c17c8d12169968e998e448bb5 \ + --hash=sha256:af36e0aa45e25c9f57bf684b1175e59ea05d9a7d3e8e87b7ae1a1da246f2767e \ + --hash=sha256:b1240f767f69d7c4c8a29adde2310b871153df9b26b5cb2b54a561ac85146485 \ + --hash=sha256:b4d362e17bcb0011738c2d83e0a65ea8ce627057b2fdda37678f4374a382a137 \ + --hash=sha256:b831295e5472954104ecb46cd98c08b98b49c69fdb7040483aff799a755a7374 \ + --hash=sha256:b8c275f0ae90069496068c714387b4a0eba5d531aace269559ff2b43655edd58 \ + --hash=sha256:bdd2b45bf079d9ad90377048e2747a0c82351989a2165821f0c96831b4a2a54b \ + --hash=sha256:cc0743f0302b94f397a4a65a660d4cd24267439eb16493fb3caad2e4389bccbb \ + --hash=sha256:da4b0c6c699a0ad73c810736303f7fbae483bcb012e38d7eb06a5e3b432c981b \ + --hash=sha256:f25e2811a9c932e43943a2615e65fc487a0b6b49218899e62e426e7f0a57eeda \ + --hash=sha256:f73497e8c38295aaa4741bdfa4fda1a5aedda5473074369eca10626835445511 # via # -r third_party/python_requirements.in # h5py - # jax # ml-dtypes # opt-einsum - # scipy # tensorboard - # tensorflow-cpu + # tensorflow oauthlib==3.2.2 \ --hash=sha256:8139f29aac13e25d502680e9e19963e83f16838d48a0d71c287fe40e7067fbca \ --hash=sha256:9859c40929662bec5d64f34d01c99e093149682a3f38915dc0655d5a633dd918 @@ -344,102 +519,110 @@ oauthlib==3.2.2 \ opt-einsum==3.3.0 \ --hash=sha256:2455e59e3947d3c275477df7f5205b30635e266fe6dc300e3d9f9646bfcea147 \ --hash=sha256:59f6475f77bbc37dcf7cd748519c0ec60722e91e63ca114e68821c0c54a46549 - # via - # jax - # tensorflow-cpu -packaging==23.1 \ - --hash=sha256:994793af429502c4ea2ebf6bf664629d07c1a9fe974af92966e4b8d2df7edc61 \ - --hash=sha256:a392980d2b6cffa644431898be54b0045151319d1e7ec34f0cfed48767dd334f - # via tensorflow-cpu -pillow==9.5.0 \ - --hash=sha256:07999f5834bdc404c442146942a2ecadd1cb6292f5229f4ed3b31e0a108746b1 \ - --hash=sha256:0852ddb76d85f127c135b6dd1f0bb88dbb9ee990d2cd9aa9e28526c93e794fba \ - --hash=sha256:1781a624c229cb35a2ac31cc4a77e28cafc8900733a864870c49bfeedacd106a \ - --hash=sha256:1e7723bd90ef94eda669a3c2c19d549874dd5badaeefabefd26053304abe5799 \ - --hash=sha256:229e2c79c00e85989a34b5981a2b67aa079fd08c903f0aaead522a1d68d79e51 \ - --hash=sha256:22baf0c3cf0c7f26e82d6e1adf118027afb325e703922c8dfc1d5d0156bb2eeb \ - --hash=sha256:252a03f1bdddce077eff2354c3861bf437c892fb1832f75ce813ee94347aa9b5 \ - --hash=sha256:2dfaaf10b6172697b9bceb9a3bd7b951819d1ca339a5ef294d1f1ac6d7f63270 \ - --hash=sha256:322724c0032af6692456cd6ed554bb85f8149214d97398bb80613b04e33769f6 \ - --hash=sha256:35f6e77122a0c0762268216315bf239cf52b88865bba522999dc38f1c52b9b47 \ - --hash=sha256:375f6e5ee9620a271acb6820b3d1e94ffa8e741c0601db4c0c4d3cb0a9c224bf \ - --hash=sha256:3ded42b9ad70e5f1754fb7c2e2d6465a9c842e41d178f262e08b8c85ed8a1d8e \ - --hash=sha256:432b975c009cf649420615388561c0ce7cc31ce9b2e374db659ee4f7d57a1f8b \ - --hash=sha256:482877592e927fd263028c105b36272398e3e1be3269efda09f6ba21fd83ec66 \ - --hash=sha256:489f8389261e5ed43ac8ff7b453162af39c3e8abd730af8363587ba64bb2e865 \ - --hash=sha256:54f7102ad31a3de5666827526e248c3530b3a33539dbda27c6843d19d72644ec \ - --hash=sha256:560737e70cb9c6255d6dcba3de6578a9e2ec4b573659943a5e7e4af13f298f5c \ - --hash=sha256:5671583eab84af046a397d6d0ba25343c00cd50bce03787948e0fff01d4fd9b1 \ - --hash=sha256:5ba1b81ee69573fe7124881762bb4cd2e4b6ed9dd28c9c60a632902fe8db8b38 \ - --hash=sha256:5d4ebf8e1db4441a55c509c4baa7a0587a0210f7cd25fcfe74dbbce7a4bd1906 \ - --hash=sha256:60037a8db8750e474af7ffc9faa9b5859e6c6d0a50e55c45576bf28be7419705 \ - --hash=sha256:608488bdcbdb4ba7837461442b90ea6f3079397ddc968c31265c1e056964f1ef \ - --hash=sha256:6608ff3bf781eee0cd14d0901a2b9cc3d3834516532e3bd673a0a204dc8615fc \ - --hash=sha256:662da1f3f89a302cc22faa9f14a262c2e3951f9dbc9617609a47521c69dd9f8f \ - --hash=sha256:7002d0797a3e4193c7cdee3198d7c14f92c0836d6b4a3f3046a64bd1ce8df2bf \ - --hash=sha256:763782b2e03e45e2c77d7779875f4432e25121ef002a41829d8868700d119392 \ - --hash=sha256:77165c4a5e7d5a284f10a6efaa39a0ae8ba839da344f20b111d62cc932fa4e5d \ - --hash=sha256:7c9af5a3b406a50e313467e3565fc99929717f780164fe6fbb7704edba0cebbe \ - --hash=sha256:7ec6f6ce99dab90b52da21cf0dc519e21095e332ff3b399a357c187b1a5eee32 \ - --hash=sha256:833b86a98e0ede388fa29363159c9b1a294b0905b5128baf01db683672f230f5 \ - --hash=sha256:84a6f19ce086c1bf894644b43cd129702f781ba5751ca8572f08aa40ef0ab7b7 \ - --hash=sha256:8507eda3cd0608a1f94f58c64817e83ec12fa93a9436938b191b80d9e4c0fc44 \ - --hash=sha256:85ec677246533e27770b0de5cf0f9d6e4ec0c212a1f89dfc941b64b21226009d \ - --hash=sha256:8aca1152d93dcc27dc55395604dcfc55bed5f25ef4c98716a928bacba90d33a3 \ - --hash=sha256:8d935f924bbab8f0a9a28404422da8af4904e36d5c33fc6f677e4c4485515625 \ - --hash=sha256:8f36397bf3f7d7c6a3abdea815ecf6fd14e7fcd4418ab24bae01008d8d8ca15e \ - --hash=sha256:91ec6fe47b5eb5a9968c79ad9ed78c342b1f97a091677ba0e012701add857829 \ - --hash=sha256:965e4a05ef364e7b973dd17fc765f42233415974d773e82144c9bbaaaea5d089 \ - --hash=sha256:96e88745a55b88a7c64fa49bceff363a1a27d9a64e04019c2281049444a571e3 \ - --hash=sha256:99eb6cafb6ba90e436684e08dad8be1637efb71c4f2180ee6b8f940739406e78 \ - --hash=sha256:9adf58f5d64e474bed00d69bcd86ec4bcaa4123bfa70a65ce72e424bfb88ed96 \ - --hash=sha256:9b1af95c3a967bf1da94f253e56b6286b50af23392a886720f563c547e48e964 \ - --hash=sha256:a0aa9417994d91301056f3d0038af1199eb7adc86e646a36b9e050b06f526597 \ - --hash=sha256:a0f9bb6c80e6efcde93ffc51256d5cfb2155ff8f78292f074f60f9e70b942d99 \ - --hash=sha256:a127ae76092974abfbfa38ca2d12cbeddcdeac0fb71f9627cc1135bedaf9d51a \ - --hash=sha256:aaf305d6d40bd9632198c766fb64f0c1a83ca5b667f16c1e79e1661ab5060140 \ - --hash=sha256:aca1c196f407ec7cf04dcbb15d19a43c507a81f7ffc45b690899d6a76ac9fda7 \ - --hash=sha256:ace6ca218308447b9077c14ea4ef381ba0b67ee78d64046b3f19cf4e1139ad16 \ - --hash=sha256:b416f03d37d27290cb93597335a2f85ed446731200705b22bb927405320de903 \ - --hash=sha256:bf548479d336726d7a0eceb6e767e179fbde37833ae42794602631a070d630f1 \ - --hash=sha256:c1170d6b195555644f0616fd6ed929dfcf6333b8675fcca044ae5ab110ded296 \ - --hash=sha256:c380b27d041209b849ed246b111b7c166ba36d7933ec6e41175fd15ab9eb1572 \ - --hash=sha256:c446d2245ba29820d405315083d55299a796695d747efceb5717a8b450324115 \ - --hash=sha256:c830a02caeb789633863b466b9de10c015bded434deb3ec87c768e53752ad22a \ - --hash=sha256:cb841572862f629b99725ebaec3287fc6d275be9b14443ea746c1dd325053cbd \ - --hash=sha256:cfa4561277f677ecf651e2b22dc43e8f5368b74a25a8f7d1d4a3a243e573f2d4 \ - --hash=sha256:cfcc2c53c06f2ccb8976fb5c71d448bdd0a07d26d8e07e321c103416444c7ad1 \ - --hash=sha256:d3c6b54e304c60c4181da1c9dadf83e4a54fd266a99c70ba646a9baa626819eb \ - --hash=sha256:d3d403753c9d5adc04d4694d35cf0391f0f3d57c8e0030aac09d7678fa8030aa \ - --hash=sha256:d9c206c29b46cfd343ea7cdfe1232443072bbb270d6a46f59c259460db76779a \ - --hash=sha256:e49eb4e95ff6fd7c0c402508894b1ef0e01b99a44320ba7d8ecbabefddcc5569 \ - --hash=sha256:f8286396b351785801a976b1e85ea88e937712ee2c3ac653710a4a57a8da5d9c \ - --hash=sha256:f8fc330c3370a81bbf3f88557097d1ea26cd8b019d6433aa59f71195f5ddebbf \ - --hash=sha256:fbd359831c1657d69bb81f0db962905ee05e5e9451913b18b831febfe0519082 \ - --hash=sha256:fe7e1c262d3392afcf5071df9afa574544f28eac825284596ac6db56e6d11062 \ - --hash=sha256:fed1e1cf6a42577953abbe8e6cf2fe2f566daebde7c34724ec8803c4c0cda579 + # via tensorflow +packaging==23.2 \ + --hash=sha256:048fb0e9405036518eaaf48a55953c750c11e1a1b68e0dd1a9d62ed0c092cfc5 \ + --hash=sha256:8c491190033a9af7e1d931d0b5dacc2ef47509b34dd0de67ed209b5203fc88c7 + # via tensorflow +pillow==10.2.0 \ + --hash=sha256:0304004f8067386b477d20a518b50f3fa658a28d44e4116970abfcd94fac34a8 \ + --hash=sha256:0689b5a8c5288bc0504d9fcee48f61a6a586b9b98514d7d29b840143d6734f39 \ + --hash=sha256:0eae2073305f451d8ecacb5474997c08569fb4eb4ac231ffa4ad7d342fdc25ac \ + --hash=sha256:0fb3e7fc88a14eacd303e90481ad983fd5b69c761e9e6ef94c983f91025da869 \ + --hash=sha256:11fa2e5984b949b0dd6d7a94d967743d87c577ff0b83392f17cb3990d0d2fd6e \ + --hash=sha256:127cee571038f252a552760076407f9cff79761c3d436a12af6000cd182a9d04 \ + --hash=sha256:154e939c5f0053a383de4fd3d3da48d9427a7e985f58af8e94d0b3c9fcfcf4f9 \ + --hash=sha256:15587643b9e5eb26c48e49a7b33659790d28f190fc514a322d55da2fb5c2950e \ + --hash=sha256:170aeb00224ab3dc54230c797f8404507240dd868cf52066f66a41b33169bdbe \ + --hash=sha256:1b5e1b74d1bd1b78bc3477528919414874748dd363e6272efd5abf7654e68bef \ + --hash=sha256:1da3b2703afd040cf65ec97efea81cfba59cdbed9c11d8efc5ab09df9509fc56 \ + --hash=sha256:1e23412b5c41e58cec602f1135c57dfcf15482013ce6e5f093a86db69646a5aa \ + --hash=sha256:2247178effb34a77c11c0e8ac355c7a741ceca0a732b27bf11e747bbc950722f \ + --hash=sha256:257d8788df5ca62c980314053197f4d46eefedf4e6175bc9412f14412ec4ea2f \ + --hash=sha256:3031709084b6e7852d00479fd1d310b07d0ba82765f973b543c8af5061cf990e \ + --hash=sha256:322209c642aabdd6207517e9739c704dc9f9db943015535783239022002f054a \ + --hash=sha256:322bdf3c9b556e9ffb18f93462e5f749d3444ce081290352c6070d014c93feb2 \ + --hash=sha256:33870dc4653c5017bf4c8873e5488d8f8d5f8935e2f1fb9a2208c47cdd66efd2 \ + --hash=sha256:35bb52c37f256f662abdfa49d2dfa6ce5d93281d323a9af377a120e89a9eafb5 \ + --hash=sha256:3c31822339516fb3c82d03f30e22b1d038da87ef27b6a78c9549888f8ceda39a \ + --hash=sha256:3eedd52442c0a5ff4f887fab0c1c0bb164d8635b32c894bc1faf4c618dd89df2 \ + --hash=sha256:3ff074fc97dd4e80543a3e91f69d58889baf2002b6be64347ea8cf5533188213 \ + --hash=sha256:47c0995fc4e7f79b5cfcab1fc437ff2890b770440f7696a3ba065ee0fd496563 \ + --hash=sha256:49d9ba1ed0ef3e061088cd1e7538a0759aab559e2e0a80a36f9fd9d8c0c21591 \ + --hash=sha256:51f1a1bffc50e2e9492e87d8e09a17c5eea8409cda8d3f277eb6edc82813c17c \ + --hash=sha256:52a50aa3fb3acb9cf7213573ef55d31d6eca37f5709c69e6858fe3bc04a5c2a2 \ + --hash=sha256:54f1852cd531aa981bc0965b7d609f5f6cc8ce8c41b1139f6ed6b3c54ab82bfb \ + --hash=sha256:609448742444d9290fd687940ac0b57fb35e6fd92bdb65386e08e99af60bf757 \ + --hash=sha256:69ffdd6120a4737710a9eee73e1d2e37db89b620f702754b8f6e62594471dee0 \ + --hash=sha256:6fad5ff2f13d69b7e74ce5b4ecd12cc0ec530fcee76356cac6742785ff71c452 \ + --hash=sha256:7049e301399273a0136ff39b84c3678e314f2158f50f517bc50285fb5ec847ad \ + --hash=sha256:70c61d4c475835a19b3a5aa42492409878bbca7438554a1f89d20d58a7c75c01 \ + --hash=sha256:716d30ed977be8b37d3ef185fecb9e5a1d62d110dfbdcd1e2a122ab46fddb03f \ + --hash=sha256:753cd8f2086b2b80180d9b3010dd4ed147efc167c90d3bf593fe2af21265e5a5 \ + --hash=sha256:773efe0603db30c281521a7c0214cad7836c03b8ccff897beae9b47c0b657d61 \ + --hash=sha256:7823bdd049099efa16e4246bdf15e5a13dbb18a51b68fa06d6c1d4d8b99a796e \ + --hash=sha256:7c8f97e8e7a9009bcacbe3766a36175056c12f9a44e6e6f2d5caad06dcfbf03b \ + --hash=sha256:823ef7a27cf86df6597fa0671066c1b596f69eba53efa3d1e1cb8b30f3533068 \ + --hash=sha256:8373c6c251f7ef8bda6675dd6d2b3a0fcc31edf1201266b5cf608b62a37407f9 \ + --hash=sha256:83b2021f2ade7d1ed556bc50a399127d7fb245e725aa0113ebd05cfe88aaf588 \ + --hash=sha256:870ea1ada0899fd0b79643990809323b389d4d1d46c192f97342eeb6ee0b8483 \ + --hash=sha256:8d12251f02d69d8310b046e82572ed486685c38f02176bd08baf216746eb947f \ + --hash=sha256:9c23f307202661071d94b5e384e1e1dc7dfb972a28a2310e4ee16103e66ddb67 \ + --hash=sha256:9d189550615b4948f45252d7f005e53c2040cea1af5b60d6f79491a6e147eef7 \ + --hash=sha256:a086c2af425c5f62a65e12fbf385f7c9fcb8f107d0849dba5839461a129cf311 \ + --hash=sha256:a2b56ba36e05f973d450582fb015594aaa78834fefe8dfb8fcd79b93e64ba4c6 \ + --hash=sha256:aebb6044806f2e16ecc07b2a2637ee1ef67a11840a66752751714a0d924adf72 \ + --hash=sha256:b1b3020d90c2d8e1dae29cf3ce54f8094f7938460fb5ce8bc5c01450b01fbaf6 \ + --hash=sha256:b4b6b1e20608493548b1f32bce8cca185bf0480983890403d3b8753e44077129 \ + --hash=sha256:b6f491cdf80ae540738859d9766783e3b3c8e5bd37f5dfa0b76abdecc5081f13 \ + --hash=sha256:b792a349405fbc0163190fde0dc7b3fef3c9268292586cf5645598b48e63dc67 \ + --hash=sha256:b7c2286c23cd350b80d2fc9d424fc797575fb16f854b831d16fd47ceec078f2c \ + --hash=sha256:babf5acfede515f176833ed6028754cbcd0d206f7f614ea3447d67c33be12516 \ + --hash=sha256:c365fd1703040de1ec284b176d6af5abe21b427cb3a5ff68e0759e1e313a5e7e \ + --hash=sha256:c4225f5220f46b2fde568c74fca27ae9771536c2e29d7c04f4fb62c83275ac4e \ + --hash=sha256:c570f24be1e468e3f0ce7ef56a89a60f0e05b30a3669a459e419c6eac2c35364 \ + --hash=sha256:c6dafac9e0f2b3c78df97e79af707cdc5ef8e88208d686a4847bab8266870023 \ + --hash=sha256:c8de2789052ed501dd829e9cae8d3dcce7acb4777ea4a479c14521c942d395b1 \ + --hash=sha256:cb28c753fd5eb3dd859b4ee95de66cc62af91bcff5db5f2571d32a520baf1f04 \ + --hash=sha256:cb4c38abeef13c61d6916f264d4845fab99d7b711be96c326b84df9e3e0ff62d \ + --hash=sha256:d1b35bcd6c5543b9cb547dee3150c93008f8dd0f1fef78fc0cd2b141c5baf58a \ + --hash=sha256:d8e6aeb9201e655354b3ad049cb77d19813ad4ece0df1249d3c793de3774f8c7 \ + --hash=sha256:d8ecd059fdaf60c1963c58ceb8997b32e9dc1b911f5da5307aab614f1ce5c2fb \ + --hash=sha256:da2b52b37dad6d9ec64e653637a096905b258d2fc2b984c41ae7d08b938a67e4 \ + --hash=sha256:e87f0b2c78157e12d7686b27d63c070fd65d994e8ddae6f328e0dcf4a0cd007e \ + --hash=sha256:edca80cbfb2b68d7b56930b84a0e45ae1694aeba0541f798e908a49d66b837f1 \ + --hash=sha256:f379abd2f1e3dddb2b61bc67977a6b5a0a3f7485538bcc6f39ec76163891ee48 \ + --hash=sha256:fe4c15f6c9285dc54ce6553a3ce908ed37c8f3825b5a51a15c91442bb955b868 # via -r third_party/python_requirements.in -protobuf==4.23.0 \ - --hash=sha256:03eee35b60317112a72d19c54d0bff7bc58ff12fea4cd7b018232bd99758ffdf \ - --hash=sha256:2b94bd6df92d71bd1234a2ffe7ce96ddf6d10cf637a18d6b55ad0a89fbb7fc21 \ - --hash=sha256:36f5370a930cb77c8ad2f4135590c672d0d2c72d4a707c7d0058dce4b4b4a598 \ - --hash=sha256:5f1eba1da2a2f3f7df469fccddef3cc060b8a16cfe3cc65961ad36b4dbcf59c5 \ - --hash=sha256:6c16657d6717a0c62d5d740cb354fbad1b0d8cb811669e06fc1caa0ff4799ddd \ - --hash=sha256:6fe180b56e1169d72ecc4acbd39186339aed20af5384531b8e8979b02bbee159 \ - --hash=sha256:7cb5b9a05ce52c6a782bb97de52679bd3438ff2b7460eff5da348db65650f227 \ - --hash=sha256:9744e934ea5855d12191040ea198eaf704ac78665d365a89d9572e3b627c2688 \ - --hash=sha256:9f5a0fbfcdcc364f3986f9ed9f8bb1328fb84114fd790423ff3d7fdb0f85c2d1 \ - --hash=sha256:baca40d067dddd62141a129f244703160d278648b569e90bb0e3753067644711 \ - --hash=sha256:d5a35ff54e3f62e8fc7be02bb0d2fbc212bba1a5a9cc2748090690093996f07b \ - --hash=sha256:e62fb869762b4ba18666370e2f8a18f17f8ab92dd4467295c6d38be6f8fef60b \ - --hash=sha256:ebde3a023b8e11bfa6c890ef34cd6a8b47d586f26135e86c21344fe433daf2e2 +pkginfo==1.9.6 \ + --hash=sha256:4b7a555a6d5a22169fcc9cf7bfd78d296b0361adad412a346c1226849af5e546 \ + --hash=sha256:8fd5896e8718a4372f0ea9cc9d96f6417c9b986e23a4d116dda26b62cc29d046 + # via twine +platformdirs==4.1.0 \ + --hash=sha256:11c8f37bcca40db96d8144522d925583bdb7a31f7b0e37e3ed4318400a8e2380 \ + --hash=sha256:906d548203468492d432bcb294d4bc2fff751bf84971fbb2c10918cc206ee420 + # via yapf +protobuf==4.23.4 \ + --hash=sha256:0a5759f5696895de8cc913f084e27fd4125e8fb0914bb729a17816a33819f474 \ + --hash=sha256:351cc90f7d10839c480aeb9b870a211e322bf05f6ab3f55fcb2f51331f80a7d2 \ + --hash=sha256:5fea3c64d41ea5ecf5697b83e41d09b9589e6f20b677ab3c48e5f242d9b7897b \ + --hash=sha256:6dd9b9940e3f17077e820b75851126615ee38643c2c5332aa7a359988820c720 \ + --hash=sha256:7b19b6266d92ca6a2a87effa88ecc4af73ebc5cfde194dc737cf8ef23a9a3b12 \ + --hash=sha256:8547bf44fe8cec3c69e3042f5c4fb3e36eb2a7a013bb0a44c018fc1e427aafbd \ + --hash=sha256:9053df6df8e5a76c84339ee4a9f5a2661ceee4a0dab019e8663c50ba324208b0 \ + --hash=sha256:c3e0939433c40796ca4cfc0fac08af50b00eb66a40bbbc5dee711998fb0bbc1e \ + --hash=sha256:ccd9430c0719dce806b93f89c91de7977304729e55377f872a92465d548329a9 \ + --hash=sha256:e1c915778d8ced71e26fcf43c0866d7499891bca14c4368448a82edc61fdbc70 \ + --hash=sha256:e9d0be5bf34b275b9f87ba7407796556abeeba635455d036c7351f7c183ef8ff \ + --hash=sha256:effeac51ab79332d44fba74660d40ae79985901ac21bca408f8dc335a81aa597 \ + --hash=sha256:fee88269a090ada09ca63551bf2f573eb2424035bcf2cb1b121895b01a46594a # via # -r third_party/python_requirements.in # tensorboard - # tensorflow-cpu -pyasn1==0.5.0 \ - --hash=sha256:87a2121042a1ac9358cabcaf1d07680ff97ee6404333bacca15f76aa8ad01a57 \ - --hash=sha256:97b7290ca68e62a832558ec3976f15cbf911bf5d7c7039d8b861c2a0ece69fde + # tensorflow +pyasn1==0.5.1 \ + --hash=sha256:4439847c58d40b1d0a573d07e3856e95333f1976294494c325775aeca506eb58 \ + --hash=sha256:6d391a96e59b23130a5cfa74d6fd7f388dbbe26cc8f1edf39fdddf08d9d6676c # via # pyasn1-modules # rsa @@ -447,120 +630,124 @@ pyasn1-modules==0.3.0 \ --hash=sha256:5bd01446b736eb9d31512a30d46c1ac3395d676c6f3cafa4c03eb54b9925631c \ --hash=sha256:d3ccd6ed470d9ffbc716be08bd90efbd44d0734bc9303818f7336070984a162d # via google-auth -requests==2.30.0 \ - --hash=sha256:10e94cc4f3121ee6da529d358cdaeaff2f1c409cd377dbc72b825852f2f7e294 \ - --hash=sha256:239d7d4458afcb28a692cdd298d87542235f4ca8d36d03a15bfc128a6559a2f4 +pycparser==2.21 \ + --hash=sha256:8ee45429555515e1f6b185e78100aea234072576aa43ab53aefcae078162fca9 \ + --hash=sha256:e644fdec12f7872f86c58ff790da456218b10f863970249516d60a5eaca77206 + # via cffi +pygments==2.17.2 \ + --hash=sha256:b27c2826c47d0f3219f29554824c30c5e8945175d888647acd804ddd04af846c \ + --hash=sha256:da46cec9fd2de5be3a8a784f434e4c4ab670b4ff54d605c4c2717e9d49c4c367 + # via + # readme-renderer + # rich +readme-renderer==42.0 \ + --hash=sha256:13d039515c1f24de668e2c93f2e877b9dbe6c6c32328b90a40a49d8b2b85f36d \ + --hash=sha256:2d55489f83be4992fe4454939d1a051c33edbab778e82761d060c9fc6b308cd1 + # via twine +requests==2.31.0 \ + --hash=sha256:58cd2187c01e70e6e26505bca751777aa9f2ee0b7f4300988b709f44e013003f \ + --hash=sha256:942c5a758f98d790eaed1a29cb6eefc7ffb0d1cf7af05c3d2791656dbd6ad1e1 # via # requests-oauthlib + # requests-toolbelt # tensorboard + # twine requests-oauthlib==1.3.1 \ --hash=sha256:2577c501a2fb8d05a304c09d090d6e47c306fef15809d102b327cf8364bddab5 \ --hash=sha256:75beac4a47881eeb94d5ea5d6ad31ef88856affe2332b9aafb52c6452ccf0d7a # via google-auth-oauthlib +requests-toolbelt==1.0.0 \ + --hash=sha256:7681a0a3d047012b5bdc0ee37d7f8f07ebe76ab08caeccfc3921ce23c88d5bc6 \ + --hash=sha256:cccfdd665f0a24fcf4726e690f65639d272bb0637b9b92dfd91a5568ccf6bd06 + # via twine +rfc3986==2.0.0 \ + --hash=sha256:50b1502b60e289cb37883f3dfd34532b8873c7de9f49bb546641ce9cbd256ebd \ + --hash=sha256:97aacf9dbd4bfd829baad6e6309fa6573aaf1be3f6fa735c8ab05e46cecb261c + # via twine +rich==13.7.0 \ + --hash=sha256:5cb5123b5cf9ee70584244246816e9114227e0b98ad9176eede6ad54bf5403fa \ + --hash=sha256:6da14c108c4866ee9520bbffa71f6fe3962e193b7da68720583850cd4548e235 + # via twine rsa==4.9 \ --hash=sha256:90260d9058e514786967344d0ef75fa8727eed8a7d2e43ce9f4bcf1b536174f7 \ --hash=sha256:e38464a49c6c85d7f1351b0126661487a7e0a14a50f1675ec50eb34d4f20ef21 # via google-auth -scipy==1.10.1 \ - --hash=sha256:049a8bbf0ad95277ffba9b3b7d23e5369cc39e66406d60422c8cfef40ccc8415 \ - --hash=sha256:07c3457ce0b3ad5124f98a86533106b643dd811dd61b548e78cf4c8786652f6f \ - --hash=sha256:0f1564ea217e82c1bbe75ddf7285ba0709ecd503f048cb1236ae9995f64217bd \ - --hash=sha256:1553b5dcddd64ba9a0d95355e63fe6c3fc303a8fd77c7bc91e77d61363f7433f \ - --hash=sha256:15a35c4242ec5f292c3dd364a7c71a61be87a3d4ddcc693372813c0b73c9af1d \ - --hash=sha256:1b4735d6c28aad3cdcf52117e0e91d6b39acd4272f3f5cd9907c24ee931ad601 \ - --hash=sha256:2cf9dfb80a7b4589ba4c40ce7588986d6d5cebc5457cad2c2880f6bc2d42f3a5 \ - --hash=sha256:39becb03541f9e58243f4197584286e339029e8908c46f7221abeea4b749fa88 \ - --hash=sha256:43b8e0bcb877faf0abfb613d51026cd5cc78918e9530e375727bf0625c82788f \ - --hash=sha256:4b3f429188c66603a1a5c549fb414e4d3bdc2a24792e061ffbd607d3d75fd84e \ - --hash=sha256:4c0ff64b06b10e35215abce517252b375e580a6125fd5fdf6421b98efbefb2d2 \ - --hash=sha256:51af417a000d2dbe1ec6c372dfe688e041a7084da4fdd350aeb139bd3fb55353 \ - --hash=sha256:5678f88c68ea866ed9ebe3a989091088553ba12c6090244fdae3e467b1139c35 \ - --hash=sha256:79c8e5a6c6ffaf3a2262ef1be1e108a035cf4f05c14df56057b64acc5bebffb6 \ - --hash=sha256:7ff7f37b1bf4417baca958d254e8e2875d0cc23aaadbe65b3d5b3077b0eb23ea \ - --hash=sha256:aaea0a6be54462ec027de54fca511540980d1e9eea68b2d5c1dbfe084797be35 \ - --hash=sha256:bce5869c8d68cf383ce240e44c1d9ae7c06078a9396df68ce88a1230f93a30c1 \ - --hash=sha256:cd9f1027ff30d90618914a64ca9b1a77a431159df0e2a195d8a9e8a04c78abf9 \ - --hash=sha256:d925fa1c81b772882aa55bcc10bf88324dadb66ff85d548c71515f6689c6dac5 \ - --hash=sha256:e7354fd7527a4b0377ce55f286805b34e8c54b91be865bac273f527e1b839019 \ - --hash=sha256:fae8a7b898c42dffe3f7361c40d5952b6bf32d10c4569098d276b4c547905ee1 - # via jax +secretstorage==3.3.3 \ + --hash=sha256:2403533ef369eca6d2ba81718576c5e0f564d5cca1b58f73a8b23e7d4eeebd77 \ + --hash=sha256:f356e6628222568e3af06f2eba8df495efa13b3b63081dafd4f7d9a7b7bc9f99 + # via keyring +setuptools==69.0.3 \ + --hash=sha256:385eb4edd9c9d5c17540511303e39a147ce2fc04bc55289c322b9e5904fe2c05 \ + --hash=sha256:be1af57fc409f93647f2e8e4573a142ed38724b8cdd389706a867bb4efcf1e78 + # via + # tensorboard + # tensorflow six==1.16.0 \ --hash=sha256:1e61c37477a1626458e36f7b1d82aa5c9b094fa4802892072e49de9c60c4c926 \ --hash=sha256:8abb2f1d86890a2dfb989f9a77cfcfd3e47c2a354b01111771326f8aa26e0254 # via # astunparse - # google-auth # google-pasta - # tensorflow-cpu -tensorboard==2.12.3 \ - --hash=sha256:b4a69366784bc347e02fbe7d847e01896a649ca52f8948a11005e205dcf724fb - # via tensorflow-cpu -tensorboard-data-server==0.7.0 \ - --hash=sha256:64aa1be7c23e80b1a42c13b686eb0875bb70f5e755f4d2b8de5c1d880cf2267f \ - --hash=sha256:753d4214799b31da7b6d93837959abebbc6afa86e69eacf1e9a317a48daa31eb \ - --hash=sha256:eb7fa518737944dbf4f0cf83c2e40a7ac346bf91be2e6a0215de98be74e85454 + # tensorboard + # tensorflow +tensorboard==2.15.1 \ + --hash=sha256:c46c1d1cf13a458c429868a78b2531d8ff5f682058d69ec0840b0bc7a38f1c0f + # via tensorflow +tensorboard-data-server==0.7.2 \ + --hash=sha256:7e0610d205889588983836ec05dc098e80f97b7e7bbff7e994ebb78f578d0ddb \ + --hash=sha256:9fe5d24221b29625dbc7328b0436ca7fc1c23de4acf4d272f1180856e32f9f60 \ + --hash=sha256:ef687163c24185ae9754ed5650eb5bc4d84ff257aabdc33f0cc6f74d8ba54530 # via tensorboard -tensorflow-cpu==2.12.0 \ - --hash=sha256:361b19b5a64bf611beccd22de1fc04f614a8c157ac99893d9702ed24932018d6 \ - --hash=sha256:374b15d1cec1a62006e388062e89dd4899a121272d41ea5d3fcbcc96e2d875c9 \ - --hash=sha256:55685b9a19c8ecb2587fb53914c045b188ed0289a2c6495e4e59d5fb082da9cc \ - --hash=sha256:5beeb99d2a1cc1383ca981513c35a4a18157e52d91a89e69c94cb7b7e411f0d8 \ - --hash=sha256:734ce850e2b3493041bdc071b594f0f78d35e4bfce5a7e0a98d449b20420e01d \ - --hash=sha256:8fdb636736f95094368bc7d26bb3b8ed93ba820cc5d95f847e00bf4a7645463d \ - --hash=sha256:a406f751180fe5282776e8bc84f39a2dc2b796c3ae35fbe20e4edc86ec580dd3 \ - --hash=sha256:b6ba926f9a56cdf0657defc6d046735e31ded383054f67c1a16ef2b0511f68d7 \ - --hash=sha256:b9c8f0d0658da8a5b25a4fe5ca315f86c449eb11e30d79cea49c7658be75a825 \ - --hash=sha256:d5ad746bf8c87d9a9fcea4698828ba1d101a7f7bfd323a2571130374a192578b \ - --hash=sha256:e8c7047552a2d759f3e65ac13e36dd24bb5fec2e6576e848287811ec44b3d62f \ - --hash=sha256:ef4f142b6fe75fcc71ada6331ed2a15ed61b7034187049d0ef1dac482d52db78 +tensorflow==2.15.0.post1 \ + --hash=sha256:8716acd8eb2950db126d74a419ac1ed870558ba34efc7d8e506165ad5896b261 \ + --hash=sha256:c2dc9b6a519a9caad80430220c17f604e9ae76f3b5007c07cc8a5321e9f9cad0 \ + --hash=sha256:d4b3926a3759e08a1e818f01104c25e6952da6fb1d7a77c75d9f1d8407f72593 # via -r third_party/python_requirements.in -tensorflow-estimator==2.12.0 \ - --hash=sha256:59b191bead4883822de3d63ac02ace11a83bfe6c10d64d0c4dfde75a50e60ca1 - # via tensorflow-cpu -tensorflow-io-gcs-filesystem==0.32.0 \ - --hash=sha256:045d51bba586390d0545fcd8a18727d62b175eb142f6f4c6d719d39de40774cd \ - --hash=sha256:05e65d3cb6c93a7929b384d86c6369c63cbbab8a770440a3d95e094878403f9f \ - --hash=sha256:122be149e5f6a030f5c2901be0cc3cb07619232f7b03889e2cdf3da1c0d4f92f \ - --hash=sha256:1ce80e1555d6ee88dda67feddf366cc8b30252b5837a7a17303df7b06a71fc2e \ - --hash=sha256:21de7dcc06eb1e7de3c022b0072d90ba35ef886578149663437aa7a6fb5bf6b3 \ - --hash=sha256:28202492d904a6e280cf27560791e87ac1c7566000db82065d63a70c27008af2 \ - --hash=sha256:336d9b3fe6b55aea149c4f6aa1fd6ffaf27d4e5c37e55a182340b47caba38846 \ - --hash=sha256:5635df0bbe40f971dc1b946e3372744b0bdfda45c38ffcd28ef53a32bb8da4da \ - --hash=sha256:74a7e25e83d4117a7ebb09a3f247553a5497393ab48c3ee0cf0d17b405026817 \ - --hash=sha256:79fdd02103b8ae9f8b89af41f744c013fa1caaea709de19833917795e3063857 \ - --hash=sha256:7f15fd22e592661b10de317be2f42a0f84be7bfc5e6a565fcfcb04b60d625b78 \ - --hash=sha256:8214cdf85bea694160f9035ff395221c1e25e119784ccb4c104919b1f5dec84e \ - --hash=sha256:842f5f09cd756bdb3b4d0b5571b3a6f72fd534d42da938b9acf0ef462995eada \ - --hash=sha256:db682e9a510c27dd35710ba5a2c62c371e25b727741b2fe3a920355fa501e947 - # via tensorflow-cpu -termcolor==2.3.0 \ - --hash=sha256:3afb05607b89aed0ffe25202399ee0867ad4d3cb4180d98aaf8eefa6a5f7d475 \ - --hash=sha256:b5b08f68937f138fe92f6c089b99f1e2da0ae56c52b78bf7075fd95420fd9a5a - # via tensorflow-cpu +tensorflow-estimator==2.15.0 \ + --hash=sha256:aedf21eec7fb2dc91150fc91a1ce12bc44dbb72278a08b58e79ff87c9e28f153 + # via tensorflow +tensorflow-io-gcs-filesystem==0.35.0 \ + --hash=sha256:0fce1466bdb91096b6d22e7df17358ba228bcb92db5cff83f2f9f1c68eb26788 \ + --hash=sha256:1856fe321fdb75f3386d92109c60db6ef097f610b450f9cc69d76444fb9980d1 \ + --hash=sha256:35b6eca7225c815d962254327195f191d88c3c9c2278a5ab23e0ac834acbadbb \ + --hash=sha256:5521721b38105496d4b43a4ffb0af5b04cc4873d464f26fbceddf8d63815ce98 \ + --hash=sha256:6e997389bfe008210cbd97c0c738d64282a2f03ad4d0536013bb0a9efde0c283 \ + --hash=sha256:ac8f1de60fdf9c734aea967b98555e366ac8743f77bca15c49eff023f587076b \ + --hash=sha256:b8fb3402fb1457482c386ea19371bc76383412ae9ea4396edb1e8adb4ba76f21 \ + --hash=sha256:c4f786eebd98d401565374722f2e67f3878675b0d87489cbaa13c70ee6ac370a \ + --hash=sha256:dd8f30908bf8b7b2a017d6b145720d105aff7f998422671b71729708ec7b2fe4 \ + --hash=sha256:eb6bf8f5b40207ecb17e7fdc3b4fc824a8361267c14e9528c1688e16de135cb7 + # via tensorflow +termcolor==2.4.0 \ + --hash=sha256:9297c0df9c99445c2412e832e882a7884038a25617c60cea2ad69488d4040d63 \ + --hash=sha256:aab9e56047c8ac41ed798fa36d892a37aca6b3e9159f3e0c24bc64a9b3ac7b7a + # via tensorflow tomli==2.0.1 \ --hash=sha256:939de3e7a6161af0c887ef91b7d41a53e7c5a1ca976325f429cb46ea9bc30ecc \ --hash=sha256:de526c12914f0c550d15924c62d72abc48d6fe7364aa87328337a31007fe8a4f # via yapf -typing-extensions==4.5.0 \ - --hash=sha256:5cb5f4a79139d699607b3ef622a1dedafa84e115ab0024e0d9c044a9479ca7cb \ - --hash=sha256:fb33085c39dd998ac16d1431ebc293a8b3eedd00fd4a32de0ff79002c19511b4 - # via tensorflow-cpu -urllib3==1.26.15 \ - --hash=sha256:8a388717b9476f934a21484e8c8e61875ab60644d29b9b39e11e4b9dc1c6b305 \ - --hash=sha256:aa751d169e23c7479ce47a0cb0da579e3ede798f994f5816a74e4f4500dcea42 +twine==4.0.2 \ + --hash=sha256:929bc3c280033347a00f847236564d1c52a3e61b1ac2516c97c48f3ceab756d8 \ + --hash=sha256:9e102ef5fdd5a20661eb88fad46338806c3bd32cf1db729603fe3697b1bc83c8 + # via -r third_party/python_requirements.in +typing-extensions==4.9.0 \ + --hash=sha256:23478f88c37f27d76ac8aee6c905017a143b0b1b886c3c9f66bc2fd94f9f5783 \ + --hash=sha256:af72aea155e91adfc61c3ae9e0e342dbc0cba726d6cba4b6c72c1f34e47291cd + # via tensorflow +urllib3==2.1.0 \ + --hash=sha256:55901e917a5896a349ff771be919f8bd99aff50b79fe58fec595eb37bbc56bb3 \ + --hash=sha256:df7aa8afb0148fa78488e7899b2c59b5f4ffcfa82e6c54ccb9dd37c1d7b52d54 # via - # google-auth # requests -werkzeug==2.3.4 \ - --hash=sha256:1d5a58e0377d1fe39d061a5de4469e414e78ccb1e1e59c0f5ad6fa1c36c52b76 \ - --hash=sha256:48e5e61472fee0ddee27ebad085614ebedb7af41e88f687aaf881afb723a162f + # twine +werkzeug==3.0.1 \ + --hash=sha256:507e811ecea72b18a404947aded4b3390e1db8f826b494d76550ef45bb3b1dcc \ + --hash=sha256:90a285dc0e42ad56b34e696398b8122ee4c681833fb35b8334a095d82c56da10 # via tensorboard -wheel==0.40.0 \ - --hash=sha256:cd1196f3faee2b31968d626e1731c94f99cbdb67cf5a46e4f5656cbee7738873 \ - --hash=sha256:d236b20e7cb522daf2390fa84c55eea81c5c30190f90f29ae2ca1ad8355bf247 - # via - # astunparse - # tensorboard +wheel==0.42.0 \ + --hash=sha256:177f9c9b0d45c47873b619f5b650346d632cdc35fb5e4d25058e09c9e581433d \ + --hash=sha256:c45be39f7882c9d34243236f2d63cbd58039e360f85d0913425fbd7ceea617a8 + # via astunparse wrapt==1.14.1 \ --hash=sha256:00b6d4ea20a906c0ca56d84f93065b398ab74b927a7a3dbd470f6fc503f95dc3 \ --hash=sha256:01c205616a89d09827986bc4e859bcabd64f5a0662a7fe95e0d359424e0e071b \ @@ -568,23 +755,30 @@ wrapt==1.14.1 \ --hash=sha256:07f7a7d0f388028b2df1d916e94bbb40624c59b48ecc6cbc232546706fac74c2 \ --hash=sha256:11871514607b15cfeb87c547a49bca19fde402f32e2b1c24a632506c0a756656 \ --hash=sha256:1b376b3f4896e7930f1f772ac4b064ac12598d1c38d04907e696cc4d794b43d3 \ + --hash=sha256:2020f391008ef874c6d9e208b24f28e31bcb85ccff4f335f15a3251d222b92d9 \ --hash=sha256:21ac0156c4b089b330b7666db40feee30a5d52634cc4560e1905d6529a3897ff \ + --hash=sha256:240b1686f38ae665d1b15475966fe0472f78e71b1b4903c143a842659c8e4cb9 \ --hash=sha256:257fd78c513e0fb5cdbe058c27a0624c9884e735bbd131935fd49e9fe719d310 \ + --hash=sha256:26046cd03936ae745a502abf44dac702a5e6880b2b01c29aea8ddf3353b68224 \ --hash=sha256:2b39d38039a1fdad98c87279b48bc5dce2c0ca0d73483b12cb72aa9609278e8a \ --hash=sha256:2cf71233a0ed05ccdabe209c606fe0bac7379fdcf687f39b944420d2a09fdb57 \ --hash=sha256:2fe803deacd09a233e4762a1adcea5db5d31e6be577a43352936179d14d90069 \ + --hash=sha256:2feecf86e1f7a86517cab34ae6c2f081fd2d0dac860cb0c0ded96d799d20b335 \ --hash=sha256:3232822c7d98d23895ccc443bbdf57c7412c5a65996c30442ebe6ed3df335383 \ --hash=sha256:34aa51c45f28ba7f12accd624225e2b1e5a3a45206aa191f6f9aac931d9d56fe \ + --hash=sha256:358fe87cc899c6bb0ddc185bf3dbfa4ba646f05b1b0b9b5a27c2cb92c2cea204 \ --hash=sha256:36f582d0c6bc99d5f39cd3ac2a9062e57f3cf606ade29a0a0d6b323462f4dd87 \ --hash=sha256:380a85cf89e0e69b7cfbe2ea9f765f004ff419f34194018a6827ac0e3edfed4d \ --hash=sha256:40e7bc81c9e2b2734ea4bc1aceb8a8f0ceaac7c5299bc5d69e37c44d9081d43b \ --hash=sha256:43ca3bbbe97af00f49efb06e352eae40434ca9d915906f77def219b88e85d907 \ + --hash=sha256:49ef582b7a1152ae2766557f0550a9fcbf7bbd76f43fbdc94dd3bf07cc7168be \ --hash=sha256:4fcc4649dc762cddacd193e6b55bc02edca674067f5f98166d7713b193932b7f \ --hash=sha256:5a0f54ce2c092aaf439813735584b9537cad479575a09892b8352fea5e988dc0 \ --hash=sha256:5a9a0d155deafd9448baff28c08e150d9b24ff010e899311ddd63c45c2445e28 \ --hash=sha256:5b02d65b9ccf0ef6c34cba6cf5bf2aab1bb2f49c6090bafeecc9cd81ad4ea1c1 \ --hash=sha256:60db23fa423575eeb65ea430cee741acb7c26a1365d103f7b0f6ec412b893853 \ --hash=sha256:642c2e7a804fcf18c222e1060df25fc210b9c58db7c91416fb055897fc27e8cc \ + --hash=sha256:6447e9f3ba72f8e2b985a1da758767698efa72723d5b59accefd716e9e8272bf \ --hash=sha256:6a9a25751acb379b466ff6be78a315e2b439d4c94c1e99cb7266d40a537995d3 \ --hash=sha256:6b1a564e6cb69922c7fe3a678b9f9a3c54e72b469875aa8018f18b4d1dd1adf3 \ --hash=sha256:6d323e1554b3d22cfc03cd3243b5bb815a51f5249fdcbb86fda4bf62bab9e164 \ @@ -607,8 +801,10 @@ wrapt==1.14.1 \ --hash=sha256:9e0fd32e0148dd5dea6af5fee42beb949098564cc23211a88d799e434255a1f4 \ --hash=sha256:9f3e6f9e05148ff90002b884fbc2a86bd303ae847e472f44ecc06c2cd2fcdb2d \ --hash=sha256:a85d2b46be66a71bedde836d9e41859879cc54a2a04fad1191eb50c2066f6e9d \ + --hash=sha256:a9008dad07d71f68487c91e96579c8567c98ca4c3881b9b113bc7b33e9fd78b8 \ --hash=sha256:a9a52172be0b5aae932bef82a79ec0a0ce87288c7d132946d645eba03f0ad8a8 \ --hash=sha256:aa31fdcc33fef9eb2552cbcbfee7773d5a6792c137b359e82879c101e98584c5 \ + --hash=sha256:acae32e13a4153809db37405f5eba5bac5fbe2e2ba61ab227926a22901051c0a \ --hash=sha256:b014c23646a467558be7da3d6b9fa409b2c567d2110599b7cf9a0c5992b3b471 \ --hash=sha256:b21bb4c09ffabfa0e85e3a6b623e19b80e7acd709b9f91452b8297ace2a8ab00 \ --hash=sha256:b5901a312f4d14c59918c221323068fad0540e34324925c8475263841dbdfe68 \ @@ -623,19 +819,16 @@ wrapt==1.14.1 \ --hash=sha256:dee60e1de1898bde3b238f18340eec6148986da0455d8ba7848d50470a7a32fb \ --hash=sha256:e2f83e18fe2f4c9e7db597e988f72712c0c3676d337d8b101f6758107c42425b \ --hash=sha256:e3fb1677c720409d5f671e39bac6c9e0e422584e5f518bfd50aa4cbbea02433f \ + --hash=sha256:ecee4132c6cd2ce5308e21672015ddfed1ff975ad0ac8d27168ea82e71413f55 \ --hash=sha256:ee2b1b1769f6707a8a445162ea16dddf74285c3964f605877a20e38545c3c462 \ --hash=sha256:ee6acae74a2b91865910eef5e7de37dc6895ad96fa23603d1d27ea69df545015 \ --hash=sha256:ef3f72c9666bba2bab70d2a8b79f2c6d2c1a42a7f7e2b0ec83bb2f9e383950af - # via tensorflow-cpu -yapf==0.33.0 \ - --hash=sha256:4c2b59bd5ffe46f3a7da48df87596877189148226ce267c16e8b44240e51578d \ - --hash=sha256:da62bdfea3df3673553351e6246abed26d9fe6780e548a5af9e70f6d2b4f5b9a + # via tensorflow +yapf==0.40.2 \ + --hash=sha256:4dab8a5ed7134e26d57c1647c7483afb3f136878b579062b786c9ba16b94637b \ + --hash=sha256:adc8b5dd02c0143108878c499284205adb258aad6db6634e5b869e7ee2bd548b # via -r third_party/python_requirements.in - -# The following packages are considered to be unsafe in a requirements file: -setuptools==67.7.2 \ - --hash=sha256:23aaf86b85ca52ceb801d32703f12d77517b2556af839621c641fca11287952b \ - --hash=sha256:f104fa03692a2602fa0fec6c6a9e63b6c8a968de13e17c026957dd1f53d80990 - # via - # tensorboard - # tensorflow-cpu +zipp==3.17.0 \ + --hash=sha256:0e923e726174922dce09c53c59ad483ff7bbb8e572e00c7f7c46b88556409f31 \ + --hash=sha256:84e64a1c28cf7e91ed2078bb8cc8c259cb19b76942096c8d7b84947690cabaf0 + # via importlib-metadata diff --git a/third_party/ruy/BUILD b/third_party/ruy/BUILD index 4c36181fef3..518fea8f08e 100644 --- a/third_party/ruy/BUILD +++ b/third_party/ruy/BUILD @@ -4,5 +4,3 @@ package( default_visibility = ["//visibility:public"], licenses = ["notice"], ) - -exports_files(["LICENSE"]) diff --git a/third_party/xtensa/nnlib_hifi4/BUILD b/third_party/xtensa/nnlib_hifi4/BUILD new file mode 100644 index 00000000000..aadcd9f045f --- /dev/null +++ b/third_party/xtensa/nnlib_hifi4/BUILD @@ -0,0 +1,7 @@ +alias( + name = "nnlib_hifi4_lib", + actual = "@nnlib_hifi4//:lib", + visibility = [ + "//visibility:public", + ], +) diff --git a/third_party/xtensa/nnlib_hifi4/nnlib_hifi4.BUILD b/third_party/xtensa/nnlib_hifi4/nnlib_hifi4.BUILD new file mode 100644 index 00000000000..44f477701b4 --- /dev/null +++ b/third_party/xtensa/nnlib_hifi4/nnlib_hifi4.BUILD @@ -0,0 +1,43 @@ +constraint_setting( + name = "compatible_constraint", +) + +# Set this constraint_value on your platform to indicate compatiblity with this +# library. +constraint_value( + name = "compatible", + constraint_setting = ":compatible_constraint", + visibility = [ + "//visibility:public", + ], +) + +cc_library( + name = "lib", + srcs = glob(["xa_nnlib/algo/**/*.c"]), + hdrs = glob([ + "xa_nnlib/algo/**/*.h", + "xa_nnlib/include/**/*.h", + ]), + copts = ["-Wno-unused-parameter"], + defines = [ + "NNLIB_V2=1", + "MODEL_INT16=1", + "EIGEN_NO_MALLOC=1", + "hifi4=1", + ], + includes = [ + "xa_nnlib", + "xa_nnlib/algo/common/include", + "xa_nnlib/algo/kernels", + "xa_nnlib/algo/ndsp/hifi4/include", + "xa_nnlib/include", + "xa_nnlib/include/nnlib", + ], + target_compatible_with = [ + ":compatible", + ], + visibility = [ + "//visibility:public", + ], +) diff --git a/third_party_static/flatbuffers/LICENSE b/third_party_static/flatbuffers/LICENSE new file mode 100644 index 00000000000..d6456956733 --- /dev/null +++ b/third_party_static/flatbuffers/LICENSE @@ -0,0 +1,202 @@ + + Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "[]" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright [yyyy] [name of copyright owner] + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. diff --git a/third_party_static/flatbuffers/include/flatbuffers/allocator.h b/third_party_static/flatbuffers/include/flatbuffers/allocator.h new file mode 100644 index 00000000000..30427190b6c --- /dev/null +++ b/third_party_static/flatbuffers/include/flatbuffers/allocator.h @@ -0,0 +1,68 @@ +/* + * Copyright 2021 Google Inc. All rights reserved. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#ifndef FLATBUFFERS_ALLOCATOR_H_ +#define FLATBUFFERS_ALLOCATOR_H_ + +#include "flatbuffers/base.h" + +namespace flatbuffers { + +// Allocator interface. This is flatbuffers-specific and meant only for +// `vector_downward` usage. +class Allocator { + public: + virtual ~Allocator() {} + + // Allocate `size` bytes of memory. + virtual uint8_t *allocate(size_t size) = 0; + + // Deallocate `size` bytes of memory at `p` allocated by this allocator. + virtual void deallocate(uint8_t *p, size_t size) = 0; + + // Reallocate `new_size` bytes of memory, replacing the old region of size + // `old_size` at `p`. In contrast to a normal realloc, this grows downwards, + // and is intended specifcally for `vector_downward` use. + // `in_use_back` and `in_use_front` indicate how much of `old_size` is + // actually in use at each end, and needs to be copied. + virtual uint8_t *reallocate_downward(uint8_t *old_p, size_t old_size, + size_t new_size, size_t in_use_back, + size_t in_use_front) { + FLATBUFFERS_ASSERT(new_size > old_size); // vector_downward only grows + uint8_t *new_p = allocate(new_size); + memcpy_downward(old_p, old_size, new_p, new_size, in_use_back, + in_use_front); + deallocate(old_p, old_size); + return new_p; + } + + protected: + // Called by `reallocate_downward` to copy memory from `old_p` of `old_size` + // to `new_p` of `new_size`. Only memory of size `in_use_front` and + // `in_use_back` will be copied from the front and back of the old memory + // allocation. + void memcpy_downward(uint8_t *old_p, size_t old_size, uint8_t *new_p, + size_t new_size, size_t in_use_back, + size_t in_use_front) { + memcpy(new_p + new_size - in_use_back, old_p + old_size - in_use_back, + in_use_back); + memcpy(new_p, old_p, in_use_front); + } +}; + +} // namespace flatbuffers + +#endif // FLATBUFFERS_ALLOCATOR_H_ diff --git a/third_party_static/flatbuffers/include/flatbuffers/array.h b/third_party_static/flatbuffers/include/flatbuffers/array.h new file mode 100644 index 00000000000..f4bfbf054c4 --- /dev/null +++ b/third_party_static/flatbuffers/include/flatbuffers/array.h @@ -0,0 +1,256 @@ +/* + * Copyright 2021 Google Inc. All rights reserved. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#ifndef FLATBUFFERS_ARRAY_H_ +#define FLATBUFFERS_ARRAY_H_ + +#include +#include + +#include "flatbuffers/base.h" +#include "flatbuffers/stl_emulation.h" +#include "flatbuffers/vector.h" + +namespace flatbuffers { + +// This is used as a helper type for accessing arrays. +template class Array { + // Array can carry only POD data types (scalars or structs). + typedef typename flatbuffers::bool_constant::value> + scalar_tag; + typedef + typename flatbuffers::conditional::type + IndirectHelperType; + + public: + typedef uint16_t size_type; + typedef typename IndirectHelper::return_type return_type; + typedef VectorConstIterator const_iterator; + typedef VectorReverseIterator const_reverse_iterator; + + // If T is a LE-scalar or a struct (!scalar_tag::value). + static FLATBUFFERS_CONSTEXPR bool is_span_observable = + (scalar_tag::value && (FLATBUFFERS_LITTLEENDIAN || sizeof(T) == 1)) || + !scalar_tag::value; + + FLATBUFFERS_CONSTEXPR uint16_t size() const { return length; } + + return_type Get(uoffset_t i) const { + FLATBUFFERS_ASSERT(i < size()); + return IndirectHelper::Read(Data(), i); + } + + return_type operator[](uoffset_t i) const { return Get(i); } + + // If this is a Vector of enums, T will be its storage type, not the enum + // type. This function makes it convenient to retrieve value with enum + // type E. + template E GetEnum(uoffset_t i) const { + return static_cast(Get(i)); + } + + const_iterator begin() const { return const_iterator(Data(), 0); } + const_iterator end() const { return const_iterator(Data(), size()); } + + const_reverse_iterator rbegin() const { + return const_reverse_iterator(end()); + } + const_reverse_iterator rend() const { + return const_reverse_iterator(begin()); + } + + const_iterator cbegin() const { return begin(); } + const_iterator cend() const { return end(); } + + const_reverse_iterator crbegin() const { return rbegin(); } + const_reverse_iterator crend() const { return rend(); } + + // Get a mutable pointer to elements inside this array. + // This method used to mutate arrays of structs followed by a @p Mutate + // operation. For primitive types use @p Mutate directly. + // @warning Assignments and reads to/from the dereferenced pointer are not + // automatically converted to the correct endianness. + typename flatbuffers::conditional::type + GetMutablePointer(uoffset_t i) const { + FLATBUFFERS_ASSERT(i < size()); + return const_cast(&data()[i]); + } + + // Change elements if you have a non-const pointer to this object. + void Mutate(uoffset_t i, const T &val) { MutateImpl(scalar_tag(), i, val); } + + // The raw data in little endian format. Use with care. + const uint8_t *Data() const { return data_; } + + uint8_t *Data() { return data_; } + + // Similarly, but typed, much like std::vector::data + const T *data() const { return reinterpret_cast(Data()); } + T *data() { return reinterpret_cast(Data()); } + + // Copy data from a span with endian conversion. + // If this Array and the span overlap, the behavior is undefined. + void CopyFromSpan(flatbuffers::span src) { + const auto p1 = reinterpret_cast(src.data()); + const auto p2 = Data(); + FLATBUFFERS_ASSERT(!(p1 >= p2 && p1 < (p2 + length)) && + !(p2 >= p1 && p2 < (p1 + length))); + (void)p1; + (void)p2; + CopyFromSpanImpl(flatbuffers::bool_constant(), src); + } + + protected: + void MutateImpl(flatbuffers::true_type, uoffset_t i, const T &val) { + FLATBUFFERS_ASSERT(i < size()); + WriteScalar(data() + i, val); + } + + void MutateImpl(flatbuffers::false_type, uoffset_t i, const T &val) { + *(GetMutablePointer(i)) = val; + } + + void CopyFromSpanImpl(flatbuffers::true_type, + flatbuffers::span src) { + // Use std::memcpy() instead of std::copy() to avoid performance degradation + // due to aliasing if T is char or unsigned char. + // The size is known at compile time, so memcpy would be inlined. + std::memcpy(data(), src.data(), length * sizeof(T)); + } + + // Copy data from flatbuffers::span with endian conversion. + void CopyFromSpanImpl(flatbuffers::false_type, + flatbuffers::span src) { + for (size_type k = 0; k < length; k++) { Mutate(k, src[k]); } + } + + // This class is only used to access pre-existing data. Don't ever + // try to construct these manually. + // 'constexpr' allows us to use 'size()' at compile time. + // @note Must not use 'FLATBUFFERS_CONSTEXPR' here, as const is not allowed on + // a constructor. +#if defined(__cpp_constexpr) + constexpr Array(); +#else + Array(); +#endif + + uint8_t data_[length * sizeof(T)]; + + private: + // This class is a pointer. Copying will therefore create an invalid object. + // Private and unimplemented copy constructor. + Array(const Array &); + Array &operator=(const Array &); +}; + +// Specialization for Array[struct] with access using Offset pointer. +// This specialization used by idl_gen_text.cpp. +template class OffsetT> +class Array, length> { + static_assert(flatbuffers::is_same::value, "unexpected type T"); + + public: + typedef const void *return_type; + typedef uint16_t size_type; + + const uint8_t *Data() const { return data_; } + + // Make idl_gen_text.cpp::PrintContainer happy. + return_type operator[](uoffset_t) const { + FLATBUFFERS_ASSERT(false); + return nullptr; + } + + private: + // This class is only used to access pre-existing data. + Array(); + Array(const Array &); + Array &operator=(const Array &); + + uint8_t data_[1]; +}; + +template +FLATBUFFERS_CONSTEXPR_CPP11 flatbuffers::span make_span(Array &arr) + FLATBUFFERS_NOEXCEPT { + static_assert( + Array::is_span_observable, + "wrong type U, only plain struct, LE-scalar, or byte types are allowed"); + return span(arr.data(), N); +} + +template +FLATBUFFERS_CONSTEXPR_CPP11 flatbuffers::span make_span( + const Array &arr) FLATBUFFERS_NOEXCEPT { + static_assert( + Array::is_span_observable, + "wrong type U, only plain struct, LE-scalar, or byte types are allowed"); + return span(arr.data(), N); +} + +template +FLATBUFFERS_CONSTEXPR_CPP11 flatbuffers::span +make_bytes_span(Array &arr) FLATBUFFERS_NOEXCEPT { + static_assert(Array::is_span_observable, + "internal error, Array might hold only scalars or structs"); + return span(arr.Data(), sizeof(U) * N); +} + +template +FLATBUFFERS_CONSTEXPR_CPP11 flatbuffers::span +make_bytes_span(const Array &arr) FLATBUFFERS_NOEXCEPT { + static_assert(Array::is_span_observable, + "internal error, Array might hold only scalars or structs"); + return span(arr.Data(), sizeof(U) * N); +} + +// Cast a raw T[length] to a raw flatbuffers::Array +// without endian conversion. Use with care. +// TODO: move these Cast-methods to `internal` namespace. +template +Array &CastToArray(T (&arr)[length]) { + return *reinterpret_cast *>(arr); +} + +template +const Array &CastToArray(const T (&arr)[length]) { + return *reinterpret_cast *>(arr); +} + +template +Array &CastToArrayOfEnum(T (&arr)[length]) { + static_assert(sizeof(E) == sizeof(T), "invalid enum type E"); + return *reinterpret_cast *>(arr); +} + +template +const Array &CastToArrayOfEnum(const T (&arr)[length]) { + static_assert(sizeof(E) == sizeof(T), "invalid enum type E"); + return *reinterpret_cast *>(arr); +} + +template +bool operator==(const Array &lhs, + const Array &rhs) noexcept { + return std::addressof(lhs) == std::addressof(rhs) || + (lhs.size() == rhs.size() && + std::memcmp(lhs.Data(), rhs.Data(), rhs.size() * sizeof(T)) == 0); +} + +} // namespace flatbuffers + +#endif // FLATBUFFERS_ARRAY_H_ diff --git a/third_party_static/flatbuffers/include/flatbuffers/base.h b/third_party_static/flatbuffers/include/flatbuffers/base.h new file mode 100644 index 00000000000..1a631641176 --- /dev/null +++ b/third_party_static/flatbuffers/include/flatbuffers/base.h @@ -0,0 +1,505 @@ +#ifndef FLATBUFFERS_BASE_H_ +#define FLATBUFFERS_BASE_H_ + +// For TFLM, we always want FLATBUFFERS_LOCALE_INDEPENDENT to be defined as 0. +// We could achieve this by adding -DFLATBUFFERS_LOCALE_INDEPENDENT=0 to the +// TFLM Makefile. However, for (at least) the Arduino, adding additional build +// flags during the compilation can be a bit awkward. As such, we have instead +// made a decision to change the default to be FLATBUFFERS_LOCALE_INDEPENDENT=0 +// for TFLM to make it easier for external IDE integration. +#ifndef FLATBUFFERS_LOCALE_INDEPENDENT +#define FLATBUFFERS_LOCALE_INDEPENDENT 0 +#endif + +// clang-format off + +// If activate should be declared and included first. +#if defined(FLATBUFFERS_MEMORY_LEAK_TRACKING) && \ + defined(_MSC_VER) && defined(_DEBUG) + // The _CRTDBG_MAP_ALLOC inside will replace + // calloc/free (etc) to its debug version using #define directives. + #define _CRTDBG_MAP_ALLOC + #include + #include + // Replace operator new by trace-enabled version. + #define DEBUG_NEW new(_NORMAL_BLOCK, __FILE__, __LINE__) + #define new DEBUG_NEW +#endif + +#if !defined(FLATBUFFERS_ASSERT) +#include +#define FLATBUFFERS_ASSERT assert +#elif defined(FLATBUFFERS_ASSERT_INCLUDE) +// Include file with forward declaration +#include FLATBUFFERS_ASSERT_INCLUDE +#endif + +#ifndef ARDUINO +#include +#endif + +#include +#include +#include + +#if defined(ARDUINO) && !defined(ARDUINOSTL_M_H) && defined(__AVR__) + #include +#else + #include +#endif + +#include +#include +#include +#include +#include +#include +#include +#include + +#if defined(__unix__) && !defined(FLATBUFFERS_LOCALE_INDEPENDENT) + #include +#endif + +#ifdef __ANDROID__ + #include +#endif + +#if defined(__ICCARM__) +#include +#endif + +// Note the __clang__ check is needed, because clang presents itself +// as an older GNUC compiler (4.2). +// Clang 3.3 and later implement all of the ISO C++ 2011 standard. +// Clang 3.4 and later implement all of the ISO C++ 2014 standard. +// http://clang.llvm.org/cxx_status.html + +// Note the MSVC value '__cplusplus' may be incorrect: +// The '__cplusplus' predefined macro in the MSVC stuck at the value 199711L, +// indicating (erroneously!) that the compiler conformed to the C++98 Standard. +// This value should be correct starting from MSVC2017-15.7-Preview-3. +// The '__cplusplus' will be valid only if MSVC2017-15.7-P3 and the `/Zc:__cplusplus` switch is set. +// Workaround (for details see MSDN): +// Use the _MSC_VER and _MSVC_LANG definition instead of the __cplusplus for compatibility. +// The _MSVC_LANG macro reports the Standard version regardless of the '/Zc:__cplusplus' switch. + +#if defined(__GNUC__) && !defined(__clang__) + #define FLATBUFFERS_GCC (__GNUC__ * 10000 + __GNUC_MINOR__ * 100 + __GNUC_PATCHLEVEL__) +#else + #define FLATBUFFERS_GCC 0 +#endif + +#if defined(__clang__) + #define FLATBUFFERS_CLANG (__clang_major__ * 10000 + __clang_minor__ * 100 + __clang_patchlevel__) +#else + #define FLATBUFFERS_CLANG 0 +#endif + +/// @cond FLATBUFFERS_INTERNAL +#if __cplusplus <= 199711L && \ + (!defined(_MSC_VER) || _MSC_VER < 1600) && \ + (!defined(__GNUC__) || \ + (__GNUC__ * 10000 + __GNUC_MINOR__ * 100 + __GNUC_PATCHLEVEL__ < 40400)) + #error A C++11 compatible compiler with support for the auto typing is \ + required for FlatBuffers. + #error __cplusplus _MSC_VER __GNUC__ __GNUC_MINOR__ __GNUC_PATCHLEVEL__ +#endif + +#if !defined(__clang__) && \ + defined(__GNUC__) && \ + (__GNUC__ * 10000 + __GNUC_MINOR__ * 100 + __GNUC_PATCHLEVEL__ < 40600) + // Backwards compatibility for g++ 4.4, and 4.5 which don't have the nullptr + // and constexpr keywords. Note the __clang__ check is needed, because clang + // presents itself as an older GNUC compiler. + #ifndef nullptr_t + const class nullptr_t { + public: + template inline operator T*() const { return 0; } + private: + void operator&() const; + } nullptr = {}; + #endif + #ifndef constexpr + #define constexpr const + #endif +#endif + +// The wire format uses a little endian encoding (since that's efficient for +// the common platforms). +#if defined(__s390x__) + #define FLATBUFFERS_LITTLEENDIAN 0 +#endif // __s390x__ +#if !defined(FLATBUFFERS_LITTLEENDIAN) + #if defined(__GNUC__) || defined(__clang__) || defined(__ICCARM__) + #if (defined(__BIG_ENDIAN__) || \ + (defined(__BYTE_ORDER__) && __BYTE_ORDER__ == __ORDER_BIG_ENDIAN__)) + #define FLATBUFFERS_LITTLEENDIAN 0 + #else + #define FLATBUFFERS_LITTLEENDIAN 1 + #endif // __BIG_ENDIAN__ + #elif defined(_MSC_VER) + #if defined(_M_PPC) + #define FLATBUFFERS_LITTLEENDIAN 0 + #else + #define FLATBUFFERS_LITTLEENDIAN 1 + #endif + #else + #error Unable to determine endianness, define FLATBUFFERS_LITTLEENDIAN. + #endif +#endif // !defined(FLATBUFFERS_LITTLEENDIAN) + +#define FLATBUFFERS_VERSION_MAJOR 23 +#define FLATBUFFERS_VERSION_MINOR 5 +#define FLATBUFFERS_VERSION_REVISION 26 +#define FLATBUFFERS_STRING_EXPAND(X) #X +#define FLATBUFFERS_STRING(X) FLATBUFFERS_STRING_EXPAND(X) +namespace flatbuffers { + // Returns version as string "MAJOR.MINOR.REVISION". + const char* FLATBUFFERS_VERSION(); +} + +#if (!defined(_MSC_VER) || _MSC_VER > 1600) && \ + (!defined(__GNUC__) || (__GNUC__ * 100 + __GNUC_MINOR__ >= 407)) || \ + defined(__clang__) + #define FLATBUFFERS_FINAL_CLASS final + #define FLATBUFFERS_OVERRIDE override + #define FLATBUFFERS_EXPLICIT_CPP11 explicit + #define FLATBUFFERS_VTABLE_UNDERLYING_TYPE : flatbuffers::voffset_t +#else + #define FLATBUFFERS_FINAL_CLASS + #define FLATBUFFERS_OVERRIDE + #define FLATBUFFERS_EXPLICIT_CPP11 + #define FLATBUFFERS_VTABLE_UNDERLYING_TYPE +#endif + +#if (!defined(_MSC_VER) || _MSC_VER >= 1900) && \ + (!defined(__GNUC__) || (__GNUC__ * 100 + __GNUC_MINOR__ >= 406)) || \ + (defined(__cpp_constexpr) && __cpp_constexpr >= 200704) + #define FLATBUFFERS_CONSTEXPR constexpr + #define FLATBUFFERS_CONSTEXPR_CPP11 constexpr + #define FLATBUFFERS_CONSTEXPR_DEFINED +#else + #define FLATBUFFERS_CONSTEXPR const + #define FLATBUFFERS_CONSTEXPR_CPP11 +#endif + +#if (defined(__cplusplus) && __cplusplus >= 201402L) || \ + (defined(__cpp_constexpr) && __cpp_constexpr >= 201304) + #define FLATBUFFERS_CONSTEXPR_CPP14 FLATBUFFERS_CONSTEXPR_CPP11 +#else + #define FLATBUFFERS_CONSTEXPR_CPP14 +#endif + +#if (defined(__GXX_EXPERIMENTAL_CXX0X__) && (__GNUC__ * 100 + __GNUC_MINOR__ >= 406)) || \ + (defined(_MSC_FULL_VER) && (_MSC_FULL_VER >= 190023026)) || \ + defined(__clang__) + #define FLATBUFFERS_NOEXCEPT noexcept +#else + #define FLATBUFFERS_NOEXCEPT +#endif + +// NOTE: the FLATBUFFERS_DELETE_FUNC macro may change the access mode to +// private, so be sure to put it at the end or reset access mode explicitly. +#if (!defined(_MSC_VER) || _MSC_FULL_VER >= 180020827) && \ + (!defined(__GNUC__) || (__GNUC__ * 100 + __GNUC_MINOR__ >= 404)) || \ + defined(__clang__) + #define FLATBUFFERS_DELETE_FUNC(func) func = delete +#else + #define FLATBUFFERS_DELETE_FUNC(func) private: func +#endif + +#if (!defined(_MSC_VER) || _MSC_VER >= 1900) && \ + (!defined(__GNUC__) || (__GNUC__ * 100 + __GNUC_MINOR__ >= 409)) || \ + defined(__clang__) + #define FLATBUFFERS_DEFAULT_DECLARATION +#endif + +// Check if we can use template aliases +// Not possible if Microsoft Compiler before 2012 +// Possible is the language feature __cpp_alias_templates is defined well +// Or possible if the C++ std is C+11 or newer +#if (defined(_MSC_VER) && _MSC_VER > 1700 /* MSVC2012 */) \ + || (defined(__cpp_alias_templates) && __cpp_alias_templates >= 200704) \ + || (defined(__cplusplus) && __cplusplus >= 201103L) + #define FLATBUFFERS_TEMPLATES_ALIASES +#endif + +#ifndef FLATBUFFERS_HAS_STRING_VIEW + // Only provide flatbuffers::string_view if __has_include can be used + // to detect a header that provides an implementation + #if defined(__has_include) + // Check for std::string_view (in c++17) + #if __has_include() && (__cplusplus >= 201606 || (defined(_HAS_CXX17) && _HAS_CXX17)) + #include + namespace flatbuffers { + typedef std::string_view string_view; + } + #define FLATBUFFERS_HAS_STRING_VIEW 1 + // Check for std::experimental::string_view (in c++14, compiler-dependent) + #elif __has_include() && (__cplusplus >= 201411) + #include + namespace flatbuffers { + typedef std::experimental::string_view string_view; + } + #define FLATBUFFERS_HAS_STRING_VIEW 1 + // Check for absl::string_view + #elif __has_include("absl/strings/string_view.h") && \ + __has_include("absl/base/config.h") && \ + (__cplusplus >= 201411) + #include "absl/base/config.h" + #if !defined(ABSL_USES_STD_STRING_VIEW) + #include "absl/strings/string_view.h" + namespace flatbuffers { + typedef absl::string_view string_view; + } + #define FLATBUFFERS_HAS_STRING_VIEW 1 + #endif + #endif + #endif // __has_include +#endif // !FLATBUFFERS_HAS_STRING_VIEW + +#ifndef FLATBUFFERS_GENERAL_HEAP_ALLOC_OK + // Allow heap allocations to be used + #define FLATBUFFERS_GENERAL_HEAP_ALLOC_OK 1 +#endif // !FLATBUFFERS_GENERAL_HEAP_ALLOC_OK + +#ifndef FLATBUFFERS_HAS_NEW_STRTOD + // Modern (C++11) strtod and strtof functions are available for use. + // 1) nan/inf strings as argument of strtod; + // 2) hex-float as argument of strtod/strtof. + #if (defined(_MSC_VER) && _MSC_VER >= 1900) || \ + (defined(__GNUC__) && (__GNUC__ * 100 + __GNUC_MINOR__ >= 409)) || \ + (defined(__clang__)) + #define FLATBUFFERS_HAS_NEW_STRTOD 1 + #endif +#endif // !FLATBUFFERS_HAS_NEW_STRTOD + +#ifndef FLATBUFFERS_LOCALE_INDEPENDENT + // Enable locale independent functions {strtof_l, strtod_l,strtoll_l, + // strtoull_l}. + #if (defined(_MSC_VER) && _MSC_VER >= 1800) || \ + (defined(__ANDROID_API__) && __ANDROID_API__>= 21) || \ + (defined(_XOPEN_VERSION) && (_XOPEN_VERSION >= 700)) && \ + (!defined(__Fuchsia__) && !defined(__ANDROID_API__)) + #define FLATBUFFERS_LOCALE_INDEPENDENT 1 + #else + #define FLATBUFFERS_LOCALE_INDEPENDENT 0 + #endif +#endif // !FLATBUFFERS_LOCALE_INDEPENDENT + +// Suppress Undefined Behavior Sanitizer (recoverable only). Usage: +// - FLATBUFFERS_SUPPRESS_UBSAN("undefined") +// - FLATBUFFERS_SUPPRESS_UBSAN("signed-integer-overflow") +#if defined(__clang__) && (__clang_major__ > 3 || (__clang_major__ == 3 && __clang_minor__ >=7)) + #define FLATBUFFERS_SUPPRESS_UBSAN(type) __attribute__((no_sanitize(type))) +#elif defined(__GNUC__) && (__GNUC__ * 100 + __GNUC_MINOR__ >= 409) + #define FLATBUFFERS_SUPPRESS_UBSAN(type) __attribute__((no_sanitize_undefined)) +#else + #define FLATBUFFERS_SUPPRESS_UBSAN(type) +#endif + +// This is constexpr function used for checking compile-time constants. +// Avoid `#pragma warning(disable: 4127) // C4127: expression is constant`. +template FLATBUFFERS_CONSTEXPR inline bool IsConstTrue(T t) { + return !!t; +} + +// Enable C++ attribute [[]] if std:c++17 or higher. +#if ((__cplusplus >= 201703L) \ + || (defined(_MSVC_LANG) && (_MSVC_LANG >= 201703L))) + // All attributes unknown to an implementation are ignored without causing an error. + #define FLATBUFFERS_ATTRIBUTE(attr) attr + + #define FLATBUFFERS_FALLTHROUGH() [[fallthrough]] +#else + #define FLATBUFFERS_ATTRIBUTE(attr) + + #if FLATBUFFERS_CLANG >= 30800 + #define FLATBUFFERS_FALLTHROUGH() [[clang::fallthrough]] + #elif FLATBUFFERS_GCC >= 70300 + #define FLATBUFFERS_FALLTHROUGH() [[gnu::fallthrough]] + #else + #define FLATBUFFERS_FALLTHROUGH() + #endif +#endif + +/// @endcond + +/// @file +namespace flatbuffers { + +/// @cond FLATBUFFERS_INTERNAL +// Our default offset / size type, 32bit on purpose on 64bit systems. +// Also, using a consistent offset type maintains compatibility of serialized +// offset values between 32bit and 64bit systems. +typedef uint32_t uoffset_t; +typedef uint64_t uoffset64_t; + +// Signed offsets for references that can go in both directions. +typedef int32_t soffset_t; +typedef int64_t soffset64_t; + +// Offset/index used in v-tables, can be changed to uint8_t in +// format forks to save a bit of space if desired. +typedef uint16_t voffset_t; + +typedef uintmax_t largest_scalar_t; + +// In 32bits, this evaluates to 2GB - 1 +#define FLATBUFFERS_MAX_BUFFER_SIZE std::numeric_limits<::flatbuffers::soffset_t>::max() +#define FLATBUFFERS_MAX_64_BUFFER_SIZE std::numeric_limits<::flatbuffers::soffset64_t>::max() + +// The minimum size buffer that can be a valid flatbuffer. +// Includes the offset to the root table (uoffset_t), the offset to the vtable +// of the root table (soffset_t), the size of the vtable (uint16_t), and the +// size of the referring table (uint16_t). +#define FLATBUFFERS_MIN_BUFFER_SIZE sizeof(uoffset_t) + sizeof(soffset_t) + \ + sizeof(uint16_t) + sizeof(uint16_t) + +// We support aligning the contents of buffers up to this size. +#ifndef FLATBUFFERS_MAX_ALIGNMENT + #define FLATBUFFERS_MAX_ALIGNMENT 32 +#endif + +/// @brief The length of a FlatBuffer file header. +static const size_t kFileIdentifierLength = 4; + +inline bool VerifyAlignmentRequirements(size_t align, size_t min_align = 1) { + return (min_align <= align) && (align <= (FLATBUFFERS_MAX_ALIGNMENT)) && + (align & (align - 1)) == 0; // must be power of 2 +} + +#if defined(_MSC_VER) + #pragma warning(disable: 4351) // C4351: new behavior: elements of array ... will be default initialized + #pragma warning(push) + #pragma warning(disable: 4127) // C4127: conditional expression is constant +#endif + +template T EndianSwap(T t) { + #if defined(_MSC_VER) + #define FLATBUFFERS_BYTESWAP16 _byteswap_ushort + #define FLATBUFFERS_BYTESWAP32 _byteswap_ulong + #define FLATBUFFERS_BYTESWAP64 _byteswap_uint64 + #elif defined(__ICCARM__) + #define FLATBUFFERS_BYTESWAP16 __REV16 + #define FLATBUFFERS_BYTESWAP32 __REV + #define FLATBUFFERS_BYTESWAP64(x) \ + ((__REV(static_cast(x >> 32U))) | (static_cast(__REV(static_cast(x)))) << 32U) + #else + #if defined(__GNUC__) && __GNUC__ * 100 + __GNUC_MINOR__ < 408 && !defined(__clang__) + // __builtin_bswap16 was missing prior to GCC 4.8. + #define FLATBUFFERS_BYTESWAP16(x) \ + static_cast(__builtin_bswap32(static_cast(x) << 16)) + #else + #define FLATBUFFERS_BYTESWAP16 __builtin_bswap16 + #endif + #define FLATBUFFERS_BYTESWAP32 __builtin_bswap32 + #define FLATBUFFERS_BYTESWAP64 __builtin_bswap64 + #endif + if (sizeof(T) == 1) { // Compile-time if-then's. + return t; + } else if (sizeof(T) == 2) { + union { T t; uint16_t i; } u = { t }; + u.i = FLATBUFFERS_BYTESWAP16(u.i); + return u.t; + } else if (sizeof(T) == 4) { + union { T t; uint32_t i; } u = { t }; + u.i = FLATBUFFERS_BYTESWAP32(u.i); + return u.t; + } else if (sizeof(T) == 8) { + union { T t; uint64_t i; } u = { t }; + u.i = FLATBUFFERS_BYTESWAP64(u.i); + return u.t; + } else { + FLATBUFFERS_ASSERT(0); + return t; + } +} + +#if defined(_MSC_VER) + #pragma warning(pop) +#endif + + +template T EndianScalar(T t) { + #if FLATBUFFERS_LITTLEENDIAN + return t; + #else + return EndianSwap(t); + #endif +} + +template +// UBSAN: C++ aliasing type rules, see std::bit_cast<> for details. +FLATBUFFERS_SUPPRESS_UBSAN("alignment") +T ReadScalar(const void *p) { + return EndianScalar(*reinterpret_cast(p)); +} + +// See https://github.com/google/flatbuffers/issues/5950 + +#if (FLATBUFFERS_GCC >= 100000) && (FLATBUFFERS_GCC < 110000) + #pragma GCC diagnostic push + #pragma GCC diagnostic ignored "-Wstringop-overflow" +#endif + +template +// UBSAN: C++ aliasing type rules, see std::bit_cast<> for details. +FLATBUFFERS_SUPPRESS_UBSAN("alignment") +void WriteScalar(void *p, T t) { + *reinterpret_cast(p) = EndianScalar(t); +} + +template struct Offset; +template FLATBUFFERS_SUPPRESS_UBSAN("alignment") void WriteScalar(void *p, Offset t) { + *reinterpret_cast(p) = EndianScalar(t.o); +} + +#if (FLATBUFFERS_GCC >= 100000) && (FLATBUFFERS_GCC < 110000) + #pragma GCC diagnostic pop +#endif + +// Computes how many bytes you'd have to pad to be able to write an +// "scalar_size" scalar if the buffer had grown to "buf_size" (downwards in +// memory). +FLATBUFFERS_SUPPRESS_UBSAN("unsigned-integer-overflow") +inline size_t PaddingBytes(size_t buf_size, size_t scalar_size) { + return ((~buf_size) + 1) & (scalar_size - 1); +} + +// Generic 'operator==' with conditional specialisations. +// T e - new value of a scalar field. +// T def - default of scalar (is known at compile-time). +template inline bool IsTheSameAs(T e, T def) { return e == def; } + +#if defined(FLATBUFFERS_NAN_DEFAULTS) && \ + defined(FLATBUFFERS_HAS_NEW_STRTOD) && (FLATBUFFERS_HAS_NEW_STRTOD > 0) +// Like `operator==(e, def)` with weak NaN if T=(float|double). +template inline bool IsFloatTheSameAs(T e, T def) { + return (e == def) || ((def != def) && (e != e)); +} +template<> inline bool IsTheSameAs(float e, float def) { + return IsFloatTheSameAs(e, def); +} +template<> inline bool IsTheSameAs(double e, double def) { + return IsFloatTheSameAs(e, def); +} +#endif + +// Check 'v' is out of closed range [low; high]. +// Workaround for GCC warning [-Werror=type-limits]: +// comparison is always true due to limited range of data type. +template +inline bool IsOutRange(const T &v, const T &low, const T &high) { + return (v < low) || (high < v); +} + +// Check 'v' is in closed range [low; high]. +template +inline bool IsInRange(const T &v, const T &low, const T &high) { + return !IsOutRange(v, low, high); +} + +} // namespace flatbuffers +#endif // FLATBUFFERS_BASE_H_ diff --git a/third_party_static/flatbuffers/include/flatbuffers/buffer.h b/third_party_static/flatbuffers/include/flatbuffers/buffer.h new file mode 100644 index 00000000000..94d4f7903be --- /dev/null +++ b/third_party_static/flatbuffers/include/flatbuffers/buffer.h @@ -0,0 +1,199 @@ +/* + * Copyright 2021 Google Inc. All rights reserved. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#ifndef FLATBUFFERS_BUFFER_H_ +#define FLATBUFFERS_BUFFER_H_ + +#include + +#include "flatbuffers/base.h" + +namespace flatbuffers { + +// Wrapper for uoffset_t to allow safe template specialization. +// Value is allowed to be 0 to indicate a null object (see e.g. AddOffset). +template struct Offset { + // The type of offset to use. + typedef uoffset_t offset_type; + + offset_type o; + Offset() : o(0) {} + Offset(const offset_type _o) : o(_o) {} + Offset<> Union() const { return o; } + bool IsNull() const { return !o; } +}; + +// Wrapper for uoffset64_t Offsets. +template struct Offset64 { + // The type of offset to use. + typedef uoffset64_t offset_type; + + offset_type o; + Offset64() : o(0) {} + Offset64(const offset_type offset) : o(offset) {} + Offset64<> Union() const { return o; } + bool IsNull() const { return !o; } +}; + +// Litmus check for ensuring the Offsets are the expected size. +static_assert(sizeof(Offset<>) == 4, "Offset has wrong size"); +static_assert(sizeof(Offset64<>) == 8, "Offset64 has wrong size"); + +inline void EndianCheck() { + int endiantest = 1; + // If this fails, see FLATBUFFERS_LITTLEENDIAN above. + FLATBUFFERS_ASSERT(*reinterpret_cast(&endiantest) == + FLATBUFFERS_LITTLEENDIAN); + (void)endiantest; +} + +template FLATBUFFERS_CONSTEXPR size_t AlignOf() { + // clang-format off + #ifdef _MSC_VER + return __alignof(T); + #else + #ifndef alignof + return __alignof__(T); + #else + return alignof(T); + #endif + #endif + // clang-format on +} + +// Lexicographically compare two strings (possibly containing nulls), and +// return true if the first is less than the second. +static inline bool StringLessThan(const char *a_data, uoffset_t a_size, + const char *b_data, uoffset_t b_size) { + const auto cmp = memcmp(a_data, b_data, (std::min)(a_size, b_size)); + return cmp == 0 ? a_size < b_size : cmp < 0; +} + +// When we read serialized data from memory, in the case of most scalars, +// we want to just read T, but in the case of Offset, we want to actually +// perform the indirection and return a pointer. +// The template specialization below does just that. +// It is wrapped in a struct since function templates can't overload on the +// return type like this. +// The typedef is for the convenience of callers of this function +// (avoiding the need for a trailing return decltype) +template struct IndirectHelper { + typedef T return_type; + typedef T mutable_return_type; + static const size_t element_stride = sizeof(T); + + static return_type Read(const uint8_t *p, const size_t i) { + return EndianScalar((reinterpret_cast(p))[i]); + } + static mutable_return_type Read(uint8_t *p, const size_t i) { + return reinterpret_cast( + Read(const_cast(p), i)); + } +}; + +// For vector of Offsets. +template class OffsetT> +struct IndirectHelper> { + typedef const T *return_type; + typedef T *mutable_return_type; + typedef typename OffsetT::offset_type offset_type; + static const offset_type element_stride = sizeof(offset_type); + + static return_type Read(const uint8_t *const p, const offset_type i) { + // Offsets are relative to themselves, so first update the pointer to + // point to the offset location. + const uint8_t *const offset_location = p + i * element_stride; + + // Then read the scalar value of the offset (which may be 32 or 64-bits) and + // then determine the relative location from the offset location. + return reinterpret_cast( + offset_location + ReadScalar(offset_location)); + } + static mutable_return_type Read(uint8_t *const p, const offset_type i) { + // Offsets are relative to themselves, so first update the pointer to + // point to the offset location. + uint8_t *const offset_location = p + i * element_stride; + + // Then read the scalar value of the offset (which may be 32 or 64-bits) and + // then determine the relative location from the offset location. + return reinterpret_cast( + offset_location + ReadScalar(offset_location)); + } +}; + +// For vector of structs. +template struct IndirectHelper { + typedef const T *return_type; + typedef T *mutable_return_type; + static const size_t element_stride = sizeof(T); + + static return_type Read(const uint8_t *const p, const size_t i) { + // Structs are stored inline, relative to the first struct pointer. + return reinterpret_cast(p + i * element_stride); + } + static mutable_return_type Read(uint8_t *const p, const size_t i) { + // Structs are stored inline, relative to the first struct pointer. + return reinterpret_cast(p + i * element_stride); + } +}; + +/// @brief Get a pointer to the file_identifier section of the buffer. +/// @return Returns a const char pointer to the start of the file_identifier +/// characters in the buffer. The returned char * has length +/// 'flatbuffers::FlatBufferBuilder::kFileIdentifierLength'. +/// This function is UNDEFINED for FlatBuffers whose schema does not include +/// a file_identifier (likely points at padding or the start of a the root +/// vtable). +inline const char *GetBufferIdentifier(const void *buf, + bool size_prefixed = false) { + return reinterpret_cast(buf) + + ((size_prefixed) ? 2 * sizeof(uoffset_t) : sizeof(uoffset_t)); +} + +// Helper to see if the identifier in a buffer has the expected value. +inline bool BufferHasIdentifier(const void *buf, const char *identifier, + bool size_prefixed = false) { + return strncmp(GetBufferIdentifier(buf, size_prefixed), identifier, + flatbuffers::kFileIdentifierLength) == 0; +} + +/// @cond FLATBUFFERS_INTERNAL +// Helpers to get a typed pointer to the root object contained in the buffer. +template T *GetMutableRoot(void *buf) { + if (!buf) return nullptr; + EndianCheck(); + return reinterpret_cast( + reinterpret_cast(buf) + + EndianScalar(*reinterpret_cast(buf))); +} + +template +T *GetMutableSizePrefixedRoot(void *buf) { + return GetMutableRoot(reinterpret_cast(buf) + sizeof(SizeT)); +} + +template const T *GetRoot(const void *buf) { + return GetMutableRoot(const_cast(buf)); +} + +template +const T *GetSizePrefixedRoot(const void *buf) { + return GetRoot(reinterpret_cast(buf) + sizeof(SizeT)); +} + +} // namespace flatbuffers + +#endif // FLATBUFFERS_BUFFER_H_ diff --git a/third_party_static/flatbuffers/include/flatbuffers/buffer_ref.h b/third_party_static/flatbuffers/include/flatbuffers/buffer_ref.h new file mode 100644 index 00000000000..f70941fc64d --- /dev/null +++ b/third_party_static/flatbuffers/include/flatbuffers/buffer_ref.h @@ -0,0 +1,53 @@ +/* + * Copyright 2021 Google Inc. All rights reserved. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#ifndef FLATBUFFERS_BUFFER_REF_H_ +#define FLATBUFFERS_BUFFER_REF_H_ + +#include "flatbuffers/base.h" +#include "flatbuffers/verifier.h" + +namespace flatbuffers { + +// Convenient way to bundle a buffer and its length, to pass it around +// typed by its root. +// A BufferRef does not own its buffer. +struct BufferRefBase {}; // for std::is_base_of + +template struct BufferRef : BufferRefBase { + BufferRef() : buf(nullptr), len(0), must_free(false) {} + BufferRef(uint8_t *_buf, uoffset_t _len) + : buf(_buf), len(_len), must_free(false) {} + + ~BufferRef() { + if (must_free) free(buf); + } + + const T *GetRoot() const { return flatbuffers::GetRoot(buf); } + + bool Verify() { + Verifier verifier(buf, len); + return verifier.VerifyBuffer(nullptr); + } + + uint8_t *buf; + uoffset_t len; + bool must_free; +}; + +} // namespace flatbuffers + +#endif // FLATBUFFERS_BUFFER_REF_H_ diff --git a/third_party_static/flatbuffers/include/flatbuffers/code_generator.h b/third_party_static/flatbuffers/include/flatbuffers/code_generator.h new file mode 100644 index 00000000000..2971e556eec --- /dev/null +++ b/third_party_static/flatbuffers/include/flatbuffers/code_generator.h @@ -0,0 +1,97 @@ +/* + * Copyright 2023 Google Inc. All rights reserved. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#ifndef FLATBUFFERS_CODE_GENERATOR_H_ +#define FLATBUFFERS_CODE_GENERATOR_H_ + +#include + +#include "flatbuffers/idl.h" + +namespace flatbuffers { + +struct CodeGenOptions { + std::string output_path; +}; + +// A code generator interface for producing converting flatbuffer schema into +// code. +class CodeGenerator { + public: + virtual ~CodeGenerator() = default; + + enum Status { + OK = 0, + ERROR = 1, + FAILED_VERIFICATION = 2, + NOT_IMPLEMENTED = 3 + }; + + std::string status_detail; + + // Generate code from the provided `parser`. + // + // DEPRECATED: prefer using the other overload of GenerateCode for bfbs. + virtual Status GenerateCode(const Parser &parser, const std::string &path, + const std::string &filename) = 0; + + // Generate code from the provided `parser` and place it in the output. + virtual Status GenerateCodeString(const Parser &parser, + const std::string &filename, + std::string &output) { + (void)parser; + (void)filename; + (void)output; + return Status::NOT_IMPLEMENTED; + } + + // Generate code from the provided `buffer` of given `length`. The buffer is a + // serialized reflection.fbs. + virtual Status GenerateCode(const uint8_t *buffer, int64_t length, + const CodeGenOptions &options) = 0; + + virtual Status GenerateMakeRule(const Parser &parser, const std::string &path, + const std::string &filename, + std::string &output) = 0; + + virtual Status GenerateGrpcCode(const Parser &parser, const std::string &path, + const std::string &filename) = 0; + + virtual Status GenerateRootFile(const Parser &parser, + const std::string &path) = 0; + + virtual bool IsSchemaOnly() const = 0; + + virtual bool SupportsBfbsGeneration() const = 0; + + virtual bool SupportsRootFileGeneration() const = 0; + + virtual IDLOptions::Language Language() const = 0; + + virtual std::string LanguageName() const = 0; + + protected: + CodeGenerator() = default; + + private: + // Copying is not supported. + CodeGenerator(const CodeGenerator &) = delete; + CodeGenerator &operator=(const CodeGenerator &) = delete; +}; + +} // namespace flatbuffers + +#endif // FLATBUFFERS_CODE_GENERATOR_H_ diff --git a/third_party_static/flatbuffers/include/flatbuffers/code_generators.h b/third_party_static/flatbuffers/include/flatbuffers/code_generators.h new file mode 100644 index 00000000000..fc030d43943 --- /dev/null +++ b/third_party_static/flatbuffers/include/flatbuffers/code_generators.h @@ -0,0 +1,238 @@ +/* + * Copyright 2014 Google Inc. All rights reserved. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#ifndef FLATBUFFERS_CODE_GENERATORS_H_ +#define FLATBUFFERS_CODE_GENERATORS_H_ + +#include +#include + +#include "flatbuffers/idl.h" + +namespace flatbuffers { + +// Utility class to assist in generating code through use of text templates. +// +// Example code: +// CodeWriter code("\t"); +// code.SetValue("NAME", "Foo"); +// code += "void {{NAME}}() { printf("%s", "{{NAME}}"); }"; +// code.SetValue("NAME", "Bar"); +// code += "void {{NAME}}() { printf("%s", "{{NAME}}"); }"; +// std::cout << code.ToString() << std::endl; +// +// Output: +// void Foo() { printf("%s", "Foo"); } +// void Bar() { printf("%s", "Bar"); } +class CodeWriter { + public: + CodeWriter(std::string pad = std::string()) + : pad_(pad), cur_ident_lvl_(0), ignore_ident_(false) {} + + // Clears the current "written" code. + void Clear() { + stream_.str(""); + stream_.clear(); + } + + // Associates a key with a value. All subsequent calls to operator+=, where + // the specified key is contained in {{ and }} delimiters will be replaced by + // the given value. + void SetValue(const std::string &key, const std::string &value) { + value_map_[key] = value; + } + + std::string GetValue(const std::string &key) const { + const auto it = value_map_.find(key); + return it == value_map_.end() ? "" : it->second; + } + + // Appends the given text to the generated code as well as a newline + // character. Any text within {{ and }} delimiters is replaced by values + // previously stored in the CodeWriter by calling SetValue above. The newline + // will be suppressed if the text ends with the \\ character. + void operator+=(std::string text); + + // Returns the current contents of the CodeWriter as a std::string. + std::string ToString() const { return stream_.str(); } + + // Increase ident level for writing code + void IncrementIdentLevel() { cur_ident_lvl_++; } + // Decrease ident level for writing code + void DecrementIdentLevel() { + if (cur_ident_lvl_) cur_ident_lvl_--; + } + + void SetPadding(const std::string &padding) { pad_ = padding; } + + private: + std::map value_map_; + std::stringstream stream_; + std::string pad_; + int cur_ident_lvl_; + bool ignore_ident_; + + // Add ident padding (tab or space) based on ident level + void AppendIdent(std::stringstream &stream); +}; + +class BaseGenerator { + public: + virtual bool generate() = 0; + + static std::string NamespaceDir(const Parser &parser, const std::string &path, + const Namespace &ns, + const bool dasherize = false); + + std::string GeneratedFileName(const std::string &path, + const std::string &file_name, + const IDLOptions &options) const; + + protected: + BaseGenerator(const Parser &parser, const std::string &path, + const std::string &file_name, std::string qualifying_start, + std::string qualifying_separator, std::string default_extension) + : parser_(parser), + path_(path), + file_name_(file_name), + qualifying_start_(qualifying_start), + qualifying_separator_(qualifying_separator), + default_extension_(default_extension) {} + virtual ~BaseGenerator() {} + + // No copy/assign. + BaseGenerator &operator=(const BaseGenerator &); + BaseGenerator(const BaseGenerator &); + + std::string NamespaceDir(const Namespace &ns, + const bool dasherize = false) const; + + static const char *FlatBuffersGeneratedWarning(); + + static std::string FullNamespace(const char *separator, const Namespace &ns); + + static std::string LastNamespacePart(const Namespace &ns); + + // tracks the current namespace for early exit in WrapInNameSpace + // c++, java and csharp returns a different namespace from + // the following default (no early exit, always fully qualify), + // which works for js and php + virtual const Namespace *CurrentNameSpace() const { return nullptr; } + + // Ensure that a type is prefixed with its namespace even within + // its own namespace to avoid conflict between generated method + // names and similarly named classes or structs + std::string WrapInNameSpace(const Namespace *ns, + const std::string &name) const; + + std::string WrapInNameSpace(const Definition &def, + const std::string &suffix = "") const; + + std::string GetNameSpace(const Definition &def) const; + + const Parser &parser_; + const std::string &path_; + const std::string &file_name_; + const std::string qualifying_start_; + const std::string qualifying_separator_; + const std::string default_extension_; +}; + +struct CommentConfig { + const char *first_line; + const char *content_line_prefix; + const char *last_line; +}; + +extern void GenComment(const std::vector &dc, + std::string *code_ptr, const CommentConfig *config, + const char *prefix = ""); + +class FloatConstantGenerator { + public: + virtual ~FloatConstantGenerator() {} + std::string GenFloatConstant(const FieldDef &field) const; + + private: + virtual std::string Value(double v, const std::string &src) const = 0; + virtual std::string Inf(double v) const = 0; + virtual std::string NaN(double v) const = 0; + + virtual std::string Value(float v, const std::string &src) const = 0; + virtual std::string Inf(float v) const = 0; + virtual std::string NaN(float v) const = 0; + + template + std::string GenFloatConstantImpl(const FieldDef &field) const; +}; + +class SimpleFloatConstantGenerator : public FloatConstantGenerator { + public: + SimpleFloatConstantGenerator(const char *nan_number, + const char *pos_inf_number, + const char *neg_inf_number); + + private: + std::string Value(double v, + const std::string &src) const FLATBUFFERS_OVERRIDE; + std::string Inf(double v) const FLATBUFFERS_OVERRIDE; + std::string NaN(double v) const FLATBUFFERS_OVERRIDE; + + std::string Value(float v, const std::string &src) const FLATBUFFERS_OVERRIDE; + std::string Inf(float v) const FLATBUFFERS_OVERRIDE; + std::string NaN(float v) const FLATBUFFERS_OVERRIDE; + + const std::string nan_number_; + const std::string pos_inf_number_; + const std::string neg_inf_number_; +}; + +// C++, C#, Java like generator. +class TypedFloatConstantGenerator : public FloatConstantGenerator { + public: + TypedFloatConstantGenerator(const char *double_prefix, + const char *single_prefix, const char *nan_number, + const char *pos_inf_number, + const char *neg_inf_number = ""); + + private: + std::string Value(double v, + const std::string &src) const FLATBUFFERS_OVERRIDE; + std::string Inf(double v) const FLATBUFFERS_OVERRIDE; + + std::string NaN(double v) const FLATBUFFERS_OVERRIDE; + + std::string Value(float v, const std::string &src) const FLATBUFFERS_OVERRIDE; + std::string Inf(float v) const FLATBUFFERS_OVERRIDE; + std::string NaN(float v) const FLATBUFFERS_OVERRIDE; + + std::string MakeNaN(const std::string &prefix) const; + std::string MakeInf(bool neg, const std::string &prefix) const; + + const std::string double_prefix_; + const std::string single_prefix_; + const std::string nan_number_; + const std::string pos_inf_number_; + const std::string neg_inf_number_; +}; + +std::string JavaCSharpMakeRule(const bool java, const Parser &parser, + const std::string &path, + const std::string &file_name); + +} // namespace flatbuffers + +#endif // FLATBUFFERS_CODE_GENERATORS_H_ diff --git a/third_party_static/flatbuffers/include/flatbuffers/default_allocator.h b/third_party_static/flatbuffers/include/flatbuffers/default_allocator.h new file mode 100644 index 00000000000..975d9380249 --- /dev/null +++ b/third_party_static/flatbuffers/include/flatbuffers/default_allocator.h @@ -0,0 +1,58 @@ +/* + * Copyright 2021 Google Inc. All rights reserved. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#ifndef FLATBUFFERS_DEFAULT_ALLOCATOR_H_ +#define FLATBUFFERS_DEFAULT_ALLOCATOR_H_ + +#include "flatbuffers/allocator.h" +#include "flatbuffers/base.h" + +namespace flatbuffers { + +// DefaultAllocator uses new/delete to allocate memory regions +class DefaultAllocator : public Allocator { + public: + uint8_t *allocate(size_t size) FLATBUFFERS_OVERRIDE { + return new uint8_t[size]; + } + + void deallocate(uint8_t *p, size_t) FLATBUFFERS_OVERRIDE { delete[] p; } + + static void dealloc(void *p, size_t) { delete[] static_cast(p); } +}; + +// These functions allow for a null allocator to mean use the default allocator, +// as used by DetachedBuffer and vector_downward below. +// This is to avoid having a statically or dynamically allocated default +// allocator, or having to move it between the classes that may own it. +inline uint8_t *Allocate(Allocator *allocator, size_t size) { + return allocator->allocate(size); +} + +inline void Deallocate(Allocator *allocator, uint8_t *p, size_t size) { + allocator->deallocate(p, size); +} + +inline uint8_t *ReallocateDownward(Allocator *allocator, uint8_t *old_p, + size_t old_size, size_t new_size, + size_t in_use_back, size_t in_use_front) { + return allocator->reallocate_downward(old_p, old_size, new_size, in_use_back, + in_use_front); +} + +} // namespace flatbuffers + +#endif // FLATBUFFERS_DEFAULT_ALLOCATOR_H_ diff --git a/third_party_static/flatbuffers/include/flatbuffers/detached_buffer.h b/third_party_static/flatbuffers/include/flatbuffers/detached_buffer.h new file mode 100644 index 00000000000..5e900baeb57 --- /dev/null +++ b/third_party_static/flatbuffers/include/flatbuffers/detached_buffer.h @@ -0,0 +1,114 @@ +/* + * Copyright 2021 Google Inc. All rights reserved. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#ifndef FLATBUFFERS_DETACHED_BUFFER_H_ +#define FLATBUFFERS_DETACHED_BUFFER_H_ + +#include "flatbuffers/allocator.h" +#include "flatbuffers/base.h" +#include "flatbuffers/default_allocator.h" + +namespace flatbuffers { + +// DetachedBuffer is a finished flatbuffer memory region, detached from its +// builder. The original memory region and allocator are also stored so that +// the DetachedBuffer can manage the memory lifetime. +class DetachedBuffer { + public: + DetachedBuffer() + : allocator_(nullptr), + own_allocator_(false), + buf_(nullptr), + reserved_(0), + cur_(nullptr), + size_(0) {} + + DetachedBuffer(Allocator *allocator, bool own_allocator, uint8_t *buf, + size_t reserved, uint8_t *cur, size_t sz) + : allocator_(allocator), + own_allocator_(own_allocator), + buf_(buf), + reserved_(reserved), + cur_(cur), + size_(sz) {} + + DetachedBuffer(DetachedBuffer &&other) noexcept + : allocator_(other.allocator_), + own_allocator_(other.own_allocator_), + buf_(other.buf_), + reserved_(other.reserved_), + cur_(other.cur_), + size_(other.size_) { + other.reset(); + } + + DetachedBuffer &operator=(DetachedBuffer &&other) noexcept { + if (this == &other) return *this; + + destroy(); + + allocator_ = other.allocator_; + own_allocator_ = other.own_allocator_; + buf_ = other.buf_; + reserved_ = other.reserved_; + cur_ = other.cur_; + size_ = other.size_; + + other.reset(); + + return *this; + } + + ~DetachedBuffer() { destroy(); } + + const uint8_t *data() const { return cur_; } + + uint8_t *data() { return cur_; } + + size_t size() const { return size_; } + + // These may change access mode, leave these at end of public section + FLATBUFFERS_DELETE_FUNC(DetachedBuffer(const DetachedBuffer &other)); + FLATBUFFERS_DELETE_FUNC( + DetachedBuffer &operator=(const DetachedBuffer &other)); + + protected: + Allocator *allocator_; + bool own_allocator_; + uint8_t *buf_; + size_t reserved_; + uint8_t *cur_; + size_t size_; + + inline void destroy() { + if (buf_) Deallocate(allocator_, buf_, reserved_); + if (own_allocator_ && allocator_) { delete allocator_; } + reset(); + } + + inline void reset() { + allocator_ = nullptr; + own_allocator_ = false; + buf_ = nullptr; + reserved_ = 0; + cur_ = nullptr; + size_ = 0; + } +}; + +} // namespace flatbuffers + +#endif // FLATBUFFERS_DETACHED_BUFFER_H_ diff --git a/third_party_static/flatbuffers/include/flatbuffers/file_manager.h b/third_party_static/flatbuffers/include/flatbuffers/file_manager.h new file mode 100644 index 00000000000..069df5b8842 --- /dev/null +++ b/third_party_static/flatbuffers/include/flatbuffers/file_manager.h @@ -0,0 +1,48 @@ +/* + * Copyright 2023 Google Inc. All rights reserved. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#ifndef FLATBUFFERS_FILE_MANAGER_H_ +#define FLATBUFFERS_FILE_MANAGER_H_ + +#include +#include + +#include "flatbuffers/util.h" + +namespace flatbuffers { + +// A File interface to write data to file by default or +// save only file names +class FileManager { + public: + FileManager() = default; + virtual ~FileManager() = default; + + virtual bool SaveFile(const std::string &absolute_file_name, + const std::string &content) = 0; + + virtual bool LoadFile(const std::string &absolute_file_name, + std::string *buf) = 0; + + private: + // Copying is not supported. + FileManager(const FileManager &) = delete; + FileManager &operator=(const FileManager &) = delete; +}; + +} // namespace flatbuffers + +#endif // FLATBUFFERS_FILE_MANAGER_H_ diff --git a/third_party_static/flatbuffers/include/flatbuffers/flatbuffer_builder.h b/third_party_static/flatbuffers/include/flatbuffers/flatbuffer_builder.h new file mode 100644 index 00000000000..0a38b4ac311 --- /dev/null +++ b/third_party_static/flatbuffers/include/flatbuffers/flatbuffer_builder.h @@ -0,0 +1,1465 @@ +/* + * Copyright 2021 Google Inc. All rights reserved. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#ifndef FLATBUFFERS_FLATBUFFER_BUILDER_H_ +#define FLATBUFFERS_FLATBUFFER_BUILDER_H_ + +#include +#include +#include +#include +#include + +#include "flatbuffers/allocator.h" +#include "flatbuffers/array.h" +#include "flatbuffers/base.h" +#include "flatbuffers/buffer.h" +#include "flatbuffers/buffer_ref.h" +#include "flatbuffers/default_allocator.h" +#include "flatbuffers/detached_buffer.h" +#include "flatbuffers/stl_emulation.h" +#include "flatbuffers/string.h" +#include "flatbuffers/struct.h" +#include "flatbuffers/table.h" +#include "flatbuffers/vector.h" +#include "flatbuffers/vector_downward.h" +#include "flatbuffers/verifier.h" + +namespace flatbuffers { + +// Converts a Field ID to a virtual table offset. +inline voffset_t FieldIndexToOffset(voffset_t field_id) { + // Should correspond to what EndTable() below builds up. + const voffset_t fixed_fields = + 2 * sizeof(voffset_t); // Vtable size and Object Size. + return fixed_fields + field_id * sizeof(voffset_t); +} + +template> +const T *data(const std::vector &v) { + // Eventually the returned pointer gets passed down to memcpy, so + // we need it to be non-null to avoid undefined behavior. + static uint8_t t; + return v.empty() ? reinterpret_cast(&t) : &v.front(); +} +template> +T *data(std::vector &v) { + // Eventually the returned pointer gets passed down to memcpy, so + // we need it to be non-null to avoid undefined behavior. + static uint8_t t; + return v.empty() ? reinterpret_cast(&t) : &v.front(); +} + +/// @addtogroup flatbuffers_cpp_api +/// @{ +/// @class FlatBufferBuilder +/// @brief Helper class to hold data needed in creation of a FlatBuffer. +/// To serialize data, you typically call one of the `Create*()` functions in +/// the generated code, which in turn call a sequence of `StartTable`/ +/// `PushElement`/`AddElement`/`EndTable`, or the builtin `CreateString`/ +/// `CreateVector` functions. Do this is depth-first order to build up a tree to +/// the root. `Finish()` wraps up the buffer ready for transport. +template class FlatBufferBuilderImpl { + public: + // This switches the size type of the builder, based on if its 64-bit aware + // (uoffset64_t) or not (uoffset_t). + typedef + typename std::conditional::type SizeT; + + /// @brief Default constructor for FlatBufferBuilder. + /// @param[in] initial_size The initial size of the buffer, in bytes. Defaults + /// to `1024`. + /// @param[in] allocator An `Allocator` to use. If null will use + /// `DefaultAllocator`. + /// @param[in] own_allocator Whether the builder/vector should own the + /// allocator. Defaults to / `false`. + /// @param[in] buffer_minalign Force the buffer to be aligned to the given + /// minimum alignment upon reallocation. Only needed if you intend to store + /// types with custom alignment AND you wish to read the buffer in-place + /// directly after creation. + explicit FlatBufferBuilderImpl( + size_t initial_size = 1024, Allocator *allocator = nullptr, + bool own_allocator = false, + size_t buffer_minalign = AlignOf()) + : buf_(initial_size, allocator, own_allocator, buffer_minalign, + static_cast(Is64Aware ? FLATBUFFERS_MAX_64_BUFFER_SIZE + : FLATBUFFERS_MAX_BUFFER_SIZE)), + num_field_loc(0), + max_voffset_(0), + length_of_64_bit_region_(0), + nested(false), + finished(false), + minalign_(1), + force_defaults_(false), + dedup_vtables_(true), + string_pool(nullptr) { + EndianCheck(); + } + + /// @brief Move constructor for FlatBufferBuilder. + FlatBufferBuilderImpl(FlatBufferBuilderImpl &&other) noexcept + : buf_(1024, nullptr, false, AlignOf(), + static_cast(Is64Aware ? FLATBUFFERS_MAX_64_BUFFER_SIZE + : FLATBUFFERS_MAX_BUFFER_SIZE)), + num_field_loc(0), + max_voffset_(0), + length_of_64_bit_region_(0), + nested(false), + finished(false), + minalign_(1), + force_defaults_(false), + dedup_vtables_(true), + string_pool(nullptr) { + EndianCheck(); + // Default construct and swap idiom. + // Lack of delegating constructors in vs2010 makes it more verbose than + // needed. + Swap(other); + } + + /// @brief Move assignment operator for FlatBufferBuilder. + FlatBufferBuilderImpl &operator=(FlatBufferBuilderImpl &&other) noexcept { + // Move construct a temporary and swap idiom + FlatBufferBuilderImpl temp(std::move(other)); + Swap(temp); + return *this; + } + + void Swap(FlatBufferBuilderImpl &other) { + using std::swap; + buf_.swap(other.buf_); + swap(num_field_loc, other.num_field_loc); + swap(max_voffset_, other.max_voffset_); + swap(length_of_64_bit_region_, other.length_of_64_bit_region_); + swap(nested, other.nested); + swap(finished, other.finished); + swap(minalign_, other.minalign_); + swap(force_defaults_, other.force_defaults_); + swap(dedup_vtables_, other.dedup_vtables_); + swap(string_pool, other.string_pool); + } + + ~FlatBufferBuilderImpl() { + if (string_pool) delete string_pool; + } + + void Reset() { + Clear(); // clear builder state + buf_.reset(); // deallocate buffer + } + + /// @brief Reset all the state in this FlatBufferBuilder so it can be reused + /// to construct another buffer. + void Clear() { + ClearOffsets(); + buf_.clear(); + nested = false; + finished = false; + minalign_ = 1; + length_of_64_bit_region_ = 0; + if (string_pool) string_pool->clear(); + } + + /// @brief The current size of the serialized buffer, counting from the end. + /// @return Returns an `SizeT` with the current size of the buffer. + SizeT GetSize() const { return buf_.size(); } + + /// @brief The current size of the serialized buffer relative to the end of + /// the 32-bit region. + /// @return Returns an `uoffset_t` with the current size of the buffer. + template + // Only enable this method for the 64-bit builder, as only that builder is + // concerned with the 32/64-bit boundary, and should be the one to bare any + // run time costs. + typename std::enable_if::type GetSizeRelative32BitRegion() + const { + //[32-bit region][64-bit region] + // [XXXXXXXXXXXXXXXXXXX] GetSize() + // [YYYYYYYYYYYYY] length_of_64_bit_region_ + // [ZZZZ] return size + return static_cast(GetSize() - length_of_64_bit_region_); + } + + template + // Only enable this method for the 32-bit builder. + typename std::enable_if::type GetSizeRelative32BitRegion() + const { + return static_cast(GetSize()); + } + + /// @brief Get the serialized buffer (after you call `Finish()`). + /// @return Returns an `uint8_t` pointer to the FlatBuffer data inside the + /// buffer. + uint8_t *GetBufferPointer() const { + Finished(); + return buf_.data(); + } + + /// @brief Get the serialized buffer (after you call `Finish()`) as a span. + /// @return Returns a constructed flatbuffers::span that is a view over the + /// FlatBuffer data inside the buffer. + flatbuffers::span GetBufferSpan() const { + Finished(); + return flatbuffers::span(buf_.data(), buf_.size()); + } + + /// @brief Get a pointer to an unfinished buffer. + /// @return Returns a `uint8_t` pointer to the unfinished buffer. + uint8_t *GetCurrentBufferPointer() const { return buf_.data(); } + + /// @brief Get the released pointer to the serialized buffer. + /// @warning Do NOT attempt to use this FlatBufferBuilder afterwards! + /// @return A `FlatBuffer` that owns the buffer and its allocator and + /// behaves similar to a `unique_ptr` with a deleter. + FLATBUFFERS_ATTRIBUTE([[deprecated("use Release() instead")]]) + DetachedBuffer ReleaseBufferPointer() { + Finished(); + return buf_.release(); + } + + /// @brief Get the released DetachedBuffer. + /// @return A `DetachedBuffer` that owns the buffer and its allocator. + DetachedBuffer Release() { + Finished(); + return buf_.release(); + } + + /// @brief Get the released pointer to the serialized buffer. + /// @param size The size of the memory block containing + /// the serialized `FlatBuffer`. + /// @param offset The offset from the released pointer where the finished + /// `FlatBuffer` starts. + /// @return A raw pointer to the start of the memory block containing + /// the serialized `FlatBuffer`. + /// @remark If the allocator is owned, it gets deleted when the destructor is + /// called.. + uint8_t *ReleaseRaw(size_t &size, size_t &offset) { + Finished(); + return buf_.release_raw(size, offset); + } + + /// @brief get the minimum alignment this buffer needs to be accessed + /// properly. This is only known once all elements have been written (after + /// you call Finish()). You can use this information if you need to embed + /// a FlatBuffer in some other buffer, such that you can later read it + /// without first having to copy it into its own buffer. + size_t GetBufferMinAlignment() const { + Finished(); + return minalign_; + } + + /// @cond FLATBUFFERS_INTERNAL + void Finished() const { + // If you get this assert, you're attempting to get access a buffer + // which hasn't been finished yet. Be sure to call + // FlatBufferBuilder::Finish with your root table. + // If you really need to access an unfinished buffer, call + // GetCurrentBufferPointer instead. + FLATBUFFERS_ASSERT(finished); + } + /// @endcond + + /// @brief In order to save space, fields that are set to their default value + /// don't get serialized into the buffer. + /// @param[in] fd When set to `true`, always serializes default values that + /// are set. Optional fields which are not set explicitly, will still not be + /// serialized. + void ForceDefaults(bool fd) { force_defaults_ = fd; } + + /// @brief By default vtables are deduped in order to save space. + /// @param[in] dedup When set to `true`, dedup vtables. + void DedupVtables(bool dedup) { dedup_vtables_ = dedup; } + + /// @cond FLATBUFFERS_INTERNAL + void Pad(size_t num_bytes) { buf_.fill(num_bytes); } + + void TrackMinAlign(size_t elem_size) { + if (elem_size > minalign_) minalign_ = elem_size; + } + + void Align(size_t elem_size) { + TrackMinAlign(elem_size); + buf_.fill(PaddingBytes(buf_.size(), elem_size)); + } + + void PushFlatBuffer(const uint8_t *bytes, size_t size) { + PushBytes(bytes, size); + finished = true; + } + + void PushBytes(const uint8_t *bytes, size_t size) { buf_.push(bytes, size); } + + void PopBytes(size_t amount) { buf_.pop(amount); } + + template void AssertScalarT() { + // The code assumes power of 2 sizes and endian-swap-ability. + static_assert(flatbuffers::is_scalar::value, "T must be a scalar type"); + } + + // Write a single aligned scalar to the buffer + template + ReturnT PushElement(T element) { + AssertScalarT(); + Align(sizeof(T)); + buf_.push_small(EndianScalar(element)); + return CalculateOffset(); + } + + template class OffsetT = Offset> + uoffset_t PushElement(OffsetT off) { + // Special case for offsets: see ReferTo below. + return PushElement(ReferTo(off.o)); + } + + // When writing fields, we track where they are, so we can create correct + // vtables later. + void TrackField(voffset_t field, uoffset_t off) { + FieldLoc fl = { off, field }; + buf_.scratch_push_small(fl); + num_field_loc++; + if (field > max_voffset_) { max_voffset_ = field; } + } + + // Like PushElement, but additionally tracks the field this represents. + template void AddElement(voffset_t field, T e, T def) { + // We don't serialize values equal to the default. + if (IsTheSameAs(e, def) && !force_defaults_) return; + TrackField(field, PushElement(e)); + } + + template void AddElement(voffset_t field, T e) { + TrackField(field, PushElement(e)); + } + + template void AddOffset(voffset_t field, Offset off) { + if (off.IsNull()) return; // Don't store. + AddElement(field, ReferTo(off.o), static_cast(0)); + } + + template void AddOffset(voffset_t field, Offset64 off) { + if (off.IsNull()) return; // Don't store. + AddElement(field, ReferTo(off.o), static_cast(0)); + } + + template void AddStruct(voffset_t field, const T *structptr) { + if (!structptr) return; // Default, don't store. + Align(AlignOf()); + buf_.push_small(*structptr); + TrackField(field, CalculateOffset()); + } + + void AddStructOffset(voffset_t field, uoffset_t off) { + TrackField(field, off); + } + + // Offsets initially are relative to the end of the buffer (downwards). + // This function converts them to be relative to the current location + // in the buffer (when stored here), pointing upwards. + uoffset_t ReferTo(uoffset_t off) { + // Align to ensure GetSizeRelative32BitRegion() below is correct. + Align(sizeof(uoffset_t)); + // 32-bit offsets are relative to the tail of the 32-bit region of the + // buffer. For most cases (without 64-bit entities) this is equivalent to + // size of the whole buffer (e.g. GetSize()) + return ReferTo(off, GetSizeRelative32BitRegion()); + } + + uoffset64_t ReferTo(uoffset64_t off) { + // Align to ensure GetSize() below is correct. + Align(sizeof(uoffset64_t)); + // 64-bit offsets are relative to tail of the whole buffer + return ReferTo(off, GetSize()); + } + + template T ReferTo(const T off, const T2 size) { + FLATBUFFERS_ASSERT(off && off <= size); + return size - off + static_cast(sizeof(T)); + } + + template T ReferTo(const T off, const T size) { + FLATBUFFERS_ASSERT(off && off <= size); + return size - off + static_cast(sizeof(T)); + } + + void NotNested() { + // If you hit this, you're trying to construct a Table/Vector/String + // during the construction of its parent table (between the MyTableBuilder + // and table.Finish(). + // Move the creation of these sub-objects to above the MyTableBuilder to + // not get this assert. + // Ignoring this assert may appear to work in simple cases, but the reason + // it is here is that storing objects in-line may cause vtable offsets + // to not fit anymore. It also leads to vtable duplication. + FLATBUFFERS_ASSERT(!nested); + // If you hit this, fields were added outside the scope of a table. + FLATBUFFERS_ASSERT(!num_field_loc); + } + + // From generated code (or from the parser), we call StartTable/EndTable + // with a sequence of AddElement calls in between. + uoffset_t StartTable() { + NotNested(); + nested = true; + return GetSizeRelative32BitRegion(); + } + + // This finishes one serialized object by generating the vtable if it's a + // table, comparing it against existing vtables, and writing the + // resulting vtable offset. + uoffset_t EndTable(uoffset_t start) { + // If you get this assert, a corresponding StartTable wasn't called. + FLATBUFFERS_ASSERT(nested); + // Write the vtable offset, which is the start of any Table. + // We fill its value later. + // This is relative to the end of the 32-bit region. + const uoffset_t vtable_offset_loc = + static_cast(PushElement(0)); + // Write a vtable, which consists entirely of voffset_t elements. + // It starts with the number of offsets, followed by a type id, followed + // by the offsets themselves. In reverse: + // Include space for the last offset and ensure empty tables have a + // minimum size. + max_voffset_ = + (std::max)(static_cast(max_voffset_ + sizeof(voffset_t)), + FieldIndexToOffset(0)); + buf_.fill_big(max_voffset_); + const uoffset_t table_object_size = vtable_offset_loc - start; + // Vtable use 16bit offsets. + FLATBUFFERS_ASSERT(table_object_size < 0x10000); + WriteScalar(buf_.data() + sizeof(voffset_t), + static_cast(table_object_size)); + WriteScalar(buf_.data(), max_voffset_); + // Write the offsets into the table + for (auto it = buf_.scratch_end() - num_field_loc * sizeof(FieldLoc); + it < buf_.scratch_end(); it += sizeof(FieldLoc)) { + auto field_location = reinterpret_cast(it); + const voffset_t pos = + static_cast(vtable_offset_loc - field_location->off); + // If this asserts, it means you've set a field twice. + FLATBUFFERS_ASSERT( + !ReadScalar(buf_.data() + field_location->id)); + WriteScalar(buf_.data() + field_location->id, pos); + } + ClearOffsets(); + auto vt1 = reinterpret_cast(buf_.data()); + auto vt1_size = ReadScalar(vt1); + auto vt_use = GetSizeRelative32BitRegion(); + // See if we already have generated a vtable with this exact same + // layout before. If so, make it point to the old one, remove this one. + if (dedup_vtables_) { + for (auto it = buf_.scratch_data(); it < buf_.scratch_end(); + it += sizeof(uoffset_t)) { + auto vt_offset_ptr = reinterpret_cast(it); + auto vt2 = reinterpret_cast(buf_.data_at(*vt_offset_ptr)); + auto vt2_size = ReadScalar(vt2); + if (vt1_size != vt2_size || 0 != memcmp(vt2, vt1, vt1_size)) continue; + vt_use = *vt_offset_ptr; + buf_.pop(GetSizeRelative32BitRegion() - vtable_offset_loc); + break; + } + } + // If this is a new vtable, remember it. + if (vt_use == GetSizeRelative32BitRegion()) { + buf_.scratch_push_small(vt_use); + } + // Fill the vtable offset we created above. + // The offset points from the beginning of the object to where the vtable is + // stored. + // Offsets default direction is downward in memory for future format + // flexibility (storing all vtables at the start of the file). + WriteScalar(buf_.data_at(vtable_offset_loc + length_of_64_bit_region_), + static_cast(vt_use) - + static_cast(vtable_offset_loc)); + nested = false; + return vtable_offset_loc; + } + + FLATBUFFERS_ATTRIBUTE([[deprecated("call the version above instead")]]) + uoffset_t EndTable(uoffset_t start, voffset_t /*numfields*/) { + return EndTable(start); + } + + // This checks a required field has been set in a given table that has + // just been constructed. + template void Required(Offset table, voffset_t field) { + auto table_ptr = reinterpret_cast(buf_.data_at(table.o)); + bool ok = table_ptr->GetOptionalFieldOffset(field) != 0; + // If this fails, the caller will show what field needs to be set. + FLATBUFFERS_ASSERT(ok); + (void)ok; + } + + uoffset_t StartStruct(size_t alignment) { + Align(alignment); + return GetSizeRelative32BitRegion(); + } + + uoffset_t EndStruct() { return GetSizeRelative32BitRegion(); } + + void ClearOffsets() { + buf_.scratch_pop(num_field_loc * sizeof(FieldLoc)); + num_field_loc = 0; + max_voffset_ = 0; + } + + // Aligns such that when "len" bytes are written, an object can be written + // after it (forward in the buffer) with "alignment" without padding. + void PreAlign(size_t len, size_t alignment) { + if (len == 0) return; + TrackMinAlign(alignment); + buf_.fill(PaddingBytes(GetSize() + len, alignment)); + } + + // Aligns such than when "len" bytes are written, an object of type `AlignT` + // can be written after it (forward in the buffer) without padding. + template void PreAlign(size_t len) { + AssertScalarT(); + PreAlign(len, AlignOf()); + } + /// @endcond + + /// @brief Store a string in the buffer, which can contain any binary data. + /// @param[in] str A const char pointer to the data to be stored as a string. + /// @param[in] len The number of bytes that should be stored from `str`. + /// @return Returns the offset in the buffer where the string starts. + template class OffsetT = Offset> + OffsetT CreateString(const char *str, size_t len) { + CreateStringImpl(str, len); + return OffsetT( + CalculateOffset::offset_type>()); + } + + /// @brief Store a string in the buffer, which is null-terminated. + /// @param[in] str A const char pointer to a C-string to add to the buffer. + /// @return Returns the offset in the buffer where the string starts. + template class OffsetT = Offset> + OffsetT CreateString(const char *str) { + return CreateString(str, strlen(str)); + } + + /// @brief Store a string in the buffer, which is null-terminated. + /// @param[in] str A char pointer to a C-string to add to the buffer. + /// @return Returns the offset in the buffer where the string starts. + template class OffsetT = Offset> + OffsetT CreateString(char *str) { + return CreateString(str, strlen(str)); + } + + /// @brief Store a string in the buffer, which can contain any binary data. + /// @param[in] str A const reference to a std::string to store in the buffer. + /// @return Returns the offset in the buffer where the string starts. + template class OffsetT = Offset> + OffsetT CreateString(const std::string &str) { + return CreateString(str.c_str(), str.length()); + } + + // clang-format off + #ifdef FLATBUFFERS_HAS_STRING_VIEW + /// @brief Store a string in the buffer, which can contain any binary data. + /// @param[in] str A const string_view to copy in to the buffer. + /// @return Returns the offset in the buffer where the string starts. + template