diff --git a/.gitmodules b/.gitmodules index 17a9a5bd6be18..1126c2fdbf366 100644 --- a/.gitmodules +++ b/.gitmodules @@ -56,9 +56,6 @@ [submodule "cmake/external/mp11"] path = cmake/external/mp11 url = https://github.com/boostorg/mp11.git -[submodule "cmake/external/coremltools"] - path = cmake/external/coremltools - url = https://github.com/apple/coremltools.git [submodule "cmake/external/dlpack"] path = cmake/external/dlpack url = https://github.com/dmlc/dlpack.git diff --git a/CODEOWNERS b/CODEOWNERS index 59e103be91a39..2274b0a848ce0 100644 --- a/CODEOWNERS +++ b/CODEOWNERS @@ -12,3 +12,8 @@ samples/python/training/** @thiagocrepaldi @tlh20 @liqunfu @baijumeswani @Sherlo # Mobile /onnxruntime/test/testdata/kernel_def_hashes/ @skottmckay @gwang-msft @YUNQIUGUO @edgchen1 + +# Contrib Ops +onnxruntime/core/graph/contrib_ops/nhwc_schema_defs.cc @zhanghuanrong @chenfucn @yufenglee @yihonglyu @snnn +onnxruntime/core/graph/contrib_ops/nchwc_schema_defs.cc @zhanghuanrong @chenfucn @yufenglee @yihonglyu @snnn +onnxruntime/core/graph/contrib_ops/quantization_defs.* @zhanghuanrong @chenfucn @yufenglee @yihonglyu @snnn \ No newline at end of file diff --git a/cgmanifests/generated/cgmanifest.json b/cgmanifests/generated/cgmanifest.json index 8acfa446b87f1..b10e0be0e67a7 100644 --- a/cgmanifests/generated/cgmanifest.json +++ b/cgmanifests/generated/cgmanifest.json @@ -110,16 +110,6 @@ "comments": "git submodule at cmake/external/SafeInt/safeint" } }, - { - "component": { - "type": "git", - "git": { - "commitHash": "523d5e03d86c26267ee6bdf17dd20f6ce6bdadd7", - "repositoryUrl": "https://github.com/apple/coremltools.git" - }, - "comments": "git submodule at cmake/external/coremltools" - } - }, { "component": { "type": "git", @@ -174,7 +164,7 @@ "component": { "type": "git", "git": { - "commitHash": "a3d65c80d32c3e584b7aab41d516a0043b2a5e84", + "commitHash": "3acac70a551c321574732e5bfd67930244bb7151", "repositoryUrl": "https://github.com/emscripten-core/emsdk.git" }, "comments": "git submodule at cmake/external/emsdk" @@ -215,7 +205,7 @@ "type": "git", "git": { "commitHash": "db78ac1d7716f56fc9f1b030b715f872f93964e4", - "repositoryUrl": "https://github.com/nlohmann/json" + "repositoryUrl": "https://github.com/nlohmann/json.git" }, "comments": "git submodule at cmake/external/json" } @@ -234,7 +224,7 @@ "component": { "type": "git", "git": { - "commitHash": "2d54553b7a78c7c35620b827e7e5ab2228ecb495", + "commitHash": "f412df7a2b64421e1f1d61fde6055a6ea288e8f5", "repositoryUrl": "https://github.com/microsoft/mimalloc.git" }, "comments": "git submodule at cmake/external/mimalloc" @@ -255,7 +245,7 @@ "type": "git", "git": { "commitHash": "436617053d0f39a1019a371c3a9aa599b3cb2cea", - "repositoryUrl": "https://github.com/google/nsync" + "repositoryUrl": "https://github.com/google/nsync.git" }, "comments": "git submodule at cmake/external/nsync" } @@ -265,7 +255,7 @@ "type": "git", "git": { "commitHash": "be76ca7148396176784ba8733133b9fb1186ea0d", - "repositoryUrl": "https://github.com/onnx/onnx" + "repositoryUrl": "https://github.com/onnx/onnx.git" }, "comments": "git submodule at cmake/external/onnx" } @@ -294,7 +284,7 @@ "component": { "type": "git", "git": { - "commitHash": "1f416bb462689f3ef9e3f1057a113d9c6aba6972", + "commitHash": "e9456d57605c883cdf985e634ab483e2c1500bb1", "repositoryUrl": "https://github.com/onnx/onnx-tensorrt.git" }, "comments": "git submodule at cmake/external/onnx-tensorrt" @@ -304,7 +294,7 @@ "component": { "type": "git", "git": { - "commitHash": "553df22c67bee5f0fe6599cff60f1afc6748c635", + "commitHash": "994c6181247d7b419b28889fc57d5817e2089419", "repositoryUrl": "https://github.com/onnx/onnx.git" }, "comments": "git submodule at cmake/external/onnx-tensorrt/third_party/onnx" @@ -324,22 +314,12 @@ "component": { "type": "git", "git": { - "commitHash": "09f082940113661256310e3f4811aa7261a9fa05", + "commitHash": "59a2ac2745d8a57ac94c6accced73620d59fb844", "repositoryUrl": "https://github.com/pybind/pybind11.git" }, "comments": "git submodule at cmake/external/onnx-tensorrt/third_party/onnx/third_party/pybind11" } }, - { - "component": { - "type": "git", - "git": { - "commitHash": "6a00cbc4a9b8e68b71caf7f774b3f9c753ae84d5", - "repositoryUrl": "https://github.com/wjakob/clang-cindex-python3" - }, - "comments": "git submodule at cmake/external/onnx-tensorrt/third_party/onnx/third_party/pybind11/tools/clang" - } - }, { "component": { "type": "git", @@ -410,62 +390,12 @@ "comments": "git submodule at cmake/external/tensorboard" } }, - { - "component": { - "type": "git", - "git": { - "commitHash": "9ec2b92d180dff8877e402018b97baa574031b8b", - "repositoryUrl": "https://github.com/microsoft/onnxruntime-tvm.git" - }, - "comments": "git submodule at cmake/external/tvm" - } - }, - { - "component": { - "type": "git", - "git": { - "commitHash": "b257a9221ee1e5180d994b3488ddcc259b0ac157", - "repositoryUrl": "https://github.com/dmlc/HalideIR" - }, - "comments": "git submodule at cmake/external/tvm/3rdparty/HalideIR" - } - }, - { - "component": { - "type": "git", - "git": { - "commitHash": "5c792cef3aee54ad8b7000111c9dc1797f327b59", - "repositoryUrl": "https://github.com/dmlc/dlpack" - }, - "comments": "git submodule at cmake/external/tvm/3rdparty/dlpack" - } - }, - { - "component": { - "type": "git", - "git": { - "commitHash": "d07fb7a443b5db8a89d65a15a024af6a425615a5", - "repositoryUrl": "https://github.com/dmlc/dmlc-core" - }, - "comments": "git submodule at cmake/external/tvm/3rdparty/dmlc-core" - } - }, - { - "component": { - "type": "git", - "git": { - "commitHash": "cabe04d6d6b05356fa8f9741704924788f0dd762", - "repositoryUrl": "https://github.com/agauniyal/rang" - }, - "comments": "git submodule at cmake/external/tvm/3rdparty/rang" - } - }, { "component": { "type": "git", "git": { "commitHash": "e8c599bca6c56c44b6730ad93f6abbc9ecd60fc1", - "repositoryUrl": "https://github.com/microsoft/wil" + "repositoryUrl": "https://github.com/microsoft/wil.git" }, "comments": "git submodule at cmake/external/wil" } diff --git a/cmake/CMakeLists.txt b/cmake/CMakeLists.txt index 2011285360629..650caa6652386 100644 --- a/cmake/CMakeLists.txt +++ b/cmake/CMakeLists.txt @@ -120,8 +120,8 @@ option(onnxruntime_EXTENDED_MINIMAL_BUILD "onnxruntime_MINIMAL_BUILD with suppor option(onnxruntime_MINIMAL_BUILD_CUSTOM_OPS "Add custom operator kernels support to a minimal build." OFF) option(onnxruntime_REDUCED_OPS_BUILD "Reduced set of kernels are registered in build via modification of the kernel registration source files." OFF) option(onnxruntime_DISABLE_EXTERNAL_INITIALIZERS "Don't allow models to load external data" OFF) -cmake_dependent_option(onnxruntime_ENABLE_RUNTIME_OPTIMIZATION_REPLAY_IN_MINIMAL_BUILD - "Enable runtime graph optimization replay for ORT format models in an extended minimal build." +cmake_dependent_option(onnxruntime_ENABLE_RUNTIME_OPTIMIZATION_IN_MINIMAL_BUILD + "Enable runtime graph optimizations for ORT format models in an extended minimal build." OFF "onnxruntime_EXTENDED_MINIMAL_BUILD" OFF) #A special option just for debugging and sanitize check. Please do not enable in option in retail builds. @@ -347,8 +347,8 @@ if (onnxruntime_MINIMAL_BUILD) # enable EPs that compile kernels at runtime add_compile_definitions(ORT_EXTENDED_MINIMAL_BUILD) - if (onnxruntime_ENABLE_RUNTIME_OPTIMIZATION_REPLAY_IN_MINIMAL_BUILD) - add_compile_definitions(ORT_ENABLE_RUNTIME_OPTIMIZATION_REPLAY_IN_MINIMAL_BUILD) + if (onnxruntime_ENABLE_RUNTIME_OPTIMIZATION_IN_MINIMAL_BUILD) + add_compile_definitions(ORT_ENABLE_RUNTIME_OPTIMIZATION_IN_MINIMAL_BUILD) endif() endif() @@ -491,6 +491,14 @@ if (onnxruntime_CROSS_COMPILING) endif() endif() +if (CMAKE_CXX_COMPILER_VERSION VERSION_LESS 9.0) + check_cxx_compiler_flag(-Wno-error HAS_NOERROR) + if (HAS_NOERROR) + string(APPEND CMAKE_CXX_FLAGS " -Wno-error=attributes") + string(APPEND CMAKE_C_FLAGS " -Wno-error=attributes") + endif() +endif() + # Mark symbols to be invisible, for macOS/iOS target only # Due to many dependencies have different symbol visibility settings, set global compile flags here. if (${CMAKE_SYSTEM_NAME} MATCHES "Darwin|iOS") diff --git a/cmake/external/coremltools b/cmake/external/coremltools deleted file mode 160000 index 523d5e03d86c2..0000000000000 --- a/cmake/external/coremltools +++ /dev/null @@ -1 +0,0 @@ -Subproject commit 523d5e03d86c26267ee6bdf17dd20f6ce6bdadd7 diff --git a/cmake/external/onnx_minimal.cmake b/cmake/external/onnx_minimal.cmake index 33df1f5a31671..c42f431ea9920 100644 --- a/cmake/external/onnx_minimal.cmake +++ b/cmake/external/onnx_minimal.cmake @@ -31,10 +31,10 @@ else() if(HAS_UNUSED_BUT_SET_VARIABLE) target_compile_options(onnx_proto PRIVATE "-Wno-unused-but-set-variable") - endif() + endif() endif() -# For reference, this would be the full ONNX source include. We only need data_type_utils.* in this build. +# For reference, this would be the full ONNX source include. We only need data_type_utils in this build. # file(GLOB_RECURSE onnx_src CONFIGURE_DEPENDS # "${ONNX_SOURCE_ROOT}/onnx/*.h" # "${ONNX_SOURCE_ROOT}/onnx/*.cc" @@ -46,10 +46,11 @@ endif() # "${ONNX_SOURCE_ROOT}/onnx/test/*" # "${ONNX_SOURCE_ROOT}/onnx/cpp2py_export.cc" # ) -# list(REMOVE_ITEM onnx_src ${onnx_exclude_src}) -file(GLOB onnx_src CONFIGURE_DEPENDS -"${ONNX_SOURCE_ROOT}/onnx/common/common.h" -"${ONNX_SOURCE_ROOT}/onnx/defs/data_type_utils.*" +# list(REMOVE_ITEM onnx_src ${onnx_exclude_src}) +set(onnx_src + "${ONNX_SOURCE_ROOT}/onnx/common/common.h" + "${ONNX_SOURCE_ROOT}/onnx/defs/data_type_utils.h" + "${ONNX_SOURCE_ROOT}/onnx/defs/data_type_utils.cc" ) add_library(onnx ${onnx_src}) @@ -75,7 +76,7 @@ if (WIN32) /EHsc # exception handling - C++ may throw, extern "C" will not ) endif() - + target_compile_options(onnx_proto PRIVATE /wd4244 # 'argument' conversion from 'google::protobuf::int64' to 'int', possible loss of data ) diff --git a/cmake/onnxruntime.cmake b/cmake/onnxruntime.cmake index 5f2312216493e..6fe4c6497a9ca 100644 --- a/cmake/onnxruntime.cmake +++ b/cmake/onnxruntime.cmake @@ -7,7 +7,7 @@ if(UNIX) set(OUTPUT_STYLE xcode) else() set(OUTPUT_STYLE gcc) - endif() + endif() else() set(SYMBOL_FILE ${CMAKE_CURRENT_BINARY_DIR}/onnxruntime_dll.def) set(OUTPUT_STYLE vc) @@ -157,6 +157,8 @@ if(CMAKE_SYSTEM_NAME STREQUAL "Android" AND onnxruntime_BUILD_JAVA) endforeach() endif() +# This list is a reversed topological ordering of library dependencies. +# Earlier entries may depend on later ones. Later ones should not depend on earlier ones. set(onnxruntime_INTERNAL_LIBRARIES onnxruntime_session ${onnxruntime_libs} @@ -174,10 +176,10 @@ set(onnxruntime_INTERNAL_LIBRARIES ${onnxruntime_winml} onnxruntime_optimizer onnxruntime_providers - onnxruntime_util ${onnxruntime_tvm_libs} onnxruntime_framework onnxruntime_graph + onnxruntime_util ${ONNXRUNTIME_MLAS_LIBS} onnxruntime_common onnxruntime_flatbuffers diff --git a/cmake/onnxruntime_framework.cmake b/cmake/onnxruntime_framework.cmake index 82f1e75dbc3c1..5b6681fd20daa 100644 --- a/cmake/onnxruntime_framework.cmake +++ b/cmake/onnxruntime_framework.cmake @@ -53,7 +53,7 @@ if (onnxruntime_ENABLE_TRAINING OR onnxruntime_ENABLE_TRAINING_OPS) onnxruntime_add_include_to_target(onnxruntime_framework Python::Module) target_include_directories(onnxruntime_framework PRIVATE ${PROJECT_SOURCE_DIR}/external/dlpack/include) endif() - if (onnxruntime_USE_NCCL OR onnxruntime_USE_MPI) + if (onnxruntime_USE_NCCL OR onnxruntime_USE_MPI) target_include_directories(onnxruntime_framework PUBLIC ${MPI_CXX_INCLUDE_DIRS}) endif() endif() @@ -95,4 +95,8 @@ if (onnxruntime_DEBUG_NODE_INPUTS_OUTPUTS_ENABLE_DUMP_TO_SQLDB) target_compile_definitions(onnxruntime_framework PRIVATE DEBUG_NODE_INPUTS_OUTPUTS_ENABLE_DUMP_TO_SQLDB) endif() +if (WIN32) + target_compile_definitions(onnxruntime_framework PRIVATE _SCL_SECURE_NO_WARNINGS) +endif() + install(DIRECTORY ${PROJECT_SOURCE_DIR}/../include/onnxruntime/core/framework DESTINATION ${CMAKE_INSTALL_INCLUDEDIR}/onnxruntime/core) diff --git a/cmake/onnxruntime_graph.cmake b/cmake/onnxruntime_graph.cmake index db09ae7a36394..d98cee372c5aa 100644 --- a/cmake/onnxruntime_graph.cmake +++ b/cmake/onnxruntime_graph.cmake @@ -17,6 +17,7 @@ if (onnxruntime_MINIMAL_BUILD) "${ONNXRUNTIME_ROOT}/core/graph/schema_registry.cc" "${ONNXRUNTIME_ROOT}/core/graph/contrib_ops/*defs.h" "${ONNXRUNTIME_ROOT}/core/graph/contrib_ops/*defs.cc" + "${ONNXRUNTIME_ROOT}/core/graph/contrib_ops/onnx_deprecated_operators.cc" "${ONNXRUNTIME_ROOT}/core/graph/contrib_ops/onnx_function_util.h" "${ONNXRUNTIME_ROOT}/core/graph/contrib_ops/onnx_function_util.cc" ) diff --git a/cmake/onnxruntime_optimizer.cmake b/cmake/onnxruntime_optimizer.cmake index 1a3ceaa8095e3..ecf28ffb82bf8 100644 --- a/cmake/onnxruntime_optimizer.cmake +++ b/cmake/onnxruntime_optimizer.cmake @@ -21,12 +21,14 @@ if (onnxruntime_MINIMAL_BUILD) "${ONNXRUNTIME_ROOT}/core/optimizer/qdq_transformer/qdq_util.cc" "${ONNXRUNTIME_ROOT}/core/optimizer/initializer.h" "${ONNXRUNTIME_ROOT}/core/optimizer/initializer.cc" + "${ONNXRUNTIME_ROOT}/core/optimizer/selectors_actions/helpers.h" + "${ONNXRUNTIME_ROOT}/core/optimizer/selectors_actions/helpers.cc" "${ONNXRUNTIME_ROOT}/core/optimizer/utils.h" "${ONNXRUNTIME_ROOT}/core/optimizer/utils.cc" ) endif() - if (onnxruntime_ENABLE_RUNTIME_OPTIMIZATION_REPLAY_IN_MINIMAL_BUILD) + if (onnxruntime_ENABLE_RUNTIME_OPTIMIZATION_IN_MINIMAL_BUILD) list(APPEND onnxruntime_optimizer_src_patterns "${ONNXRUNTIME_INCLUDE_DIR}/core/optimizer/graph_transformer_utils.h" "${ONNXRUNTIME_ROOT}/core/optimizer/graph_transformer_utils.cc" @@ -36,6 +38,9 @@ if (onnxruntime_MINIMAL_BUILD) "${ONNXRUNTIME_ROOT}/core/optimizer/qdq_transformer/selectors_actions/*.cc" "${ONNXRUNTIME_ROOT}/core/optimizer/selectors_actions/*.h" "${ONNXRUNTIME_ROOT}/core/optimizer/selectors_actions/*.cc" + + "${ONNXRUNTIME_ROOT}/core/optimizer/transpose_optimizer/*.h" + "${ONNXRUNTIME_ROOT}/core/optimizer/transpose_optimizer/*.cc" ) endif() else() diff --git a/cmake/onnxruntime_providers.cmake b/cmake/onnxruntime_providers.cmake index a05490d1bd132..eab1918de701d 100644 --- a/cmake/onnxruntime_providers.cmake +++ b/cmake/onnxruntime_providers.cmake @@ -784,7 +784,7 @@ if (onnxruntime_USE_COREML) # Compile CoreML proto definition to ${CMAKE_CURRENT_BINARY_DIR}/coreml if (CMAKE_SYSTEM_NAME STREQUAL "Darwin" OR CMAKE_SYSTEM_NAME STREQUAL "iOS") - set(COREML_PROTO_ROOT ${PROJECT_SOURCE_DIR}/external/coremltools/mlmodel/format) + set(COREML_PROTO_ROOT ${PROJECT_SOURCE_DIR}/../onnxruntime/core/providers/coreml/mlmodel_format) file(GLOB coreml_proto_srcs "${COREML_PROTO_ROOT}/*.proto" ) diff --git a/cmake/onnxruntime_python.cmake b/cmake/onnxruntime_python.cmake index 3286024c59610..d8760123b18ca 100644 --- a/cmake/onnxruntime_python.cmake +++ b/cmake/onnxruntime_python.cmake @@ -311,6 +311,9 @@ if (onnxruntime_ENABLE_TRAINING) file(GLOB onnxruntime_python_ortmodule_torch_cpp_ext_fused_ops_srcs CONFIGURE_DEPENDS "${ORTTRAINING_SOURCE_DIR}/python/training/ortmodule/torch_cpp_extensions/cuda/fused_ops/*" ) + file(GLOB onnxruntime_python_utils_data_srcs CONFIGURE_DEPENDS + "${ORTTRAINING_SOURCE_DIR}/python/training/utils/data/*" + ) else() file(GLOB onnxruntime_python_capi_training_srcs CONFIGURE_DEPENDS "${ONNXRUNTIME_ROOT}/python/training/*.py" @@ -541,6 +544,7 @@ if (onnxruntime_ENABLE_TRAINING) COMMAND ${CMAKE_COMMAND} -E make_directory $/onnxruntime/training/ortmodule/torch_cpp_extensions/cpu/torch_interop_utils COMMAND ${CMAKE_COMMAND} -E make_directory $/onnxruntime/training/ortmodule/torch_cpp_extensions/cuda/torch_gpu_allocator COMMAND ${CMAKE_COMMAND} -E make_directory $/onnxruntime/training/ortmodule/torch_cpp_extensions/cuda/fused_ops + COMMAND ${CMAKE_COMMAND} -E make_directory $/onnxruntime/training/utils/data/ COMMAND ${CMAKE_COMMAND} -E copy ${onnxruntime_python_capi_training_srcs} $/onnxruntime/capi/training/ @@ -580,6 +584,9 @@ if (onnxruntime_ENABLE_TRAINING) COMMAND ${CMAKE_COMMAND} -E copy ${onnxruntime_python_ortmodule_torch_cpp_ext_fused_ops_srcs} $/onnxruntime/training/ortmodule/torch_cpp_extensions/cuda/fused_ops/ + COMMAND ${CMAKE_COMMAND} -E copy + ${onnxruntime_python_utils_data_srcs} + $/onnxruntime/training/utils/data/ ) endif() diff --git a/cmake/onnxruntime_unittests.cmake b/cmake/onnxruntime_unittests.cmake index 7c3bcf2c939da..17647c91941dc 100644 --- a/cmake/onnxruntime_unittests.cmake +++ b/cmake/onnxruntime_unittests.cmake @@ -15,7 +15,7 @@ endif() set(disabled_warnings) function(AddTest) - cmake_parse_arguments(_UT "DYN" "TARGET" "LIBS;SOURCES;DEPENDS" ${ARGN}) + cmake_parse_arguments(_UT "DYN" "TARGET" "LIBS;SOURCES;DEPENDS;TEST_ARGS" ${ARGN}) list(REMOVE_DUPLICATES _UT_SOURCES) if (${CMAKE_SYSTEM_NAME} STREQUAL "iOS") @@ -96,7 +96,7 @@ function(AddTest) target_compile_options(${_UT_TARGET} PRIVATE "-Wno-error=uninitialized") endif() - set(TEST_ARGS) + set(TEST_ARGS ${_UT_TEST_ARGS}) if (onnxruntime_GENERATE_TEST_REPORTS) # generate a report file next to the test program if (onnxruntime_BUILD_WEBASSEMBLY) @@ -236,7 +236,7 @@ else() # minimal and/or reduced ops build endif() endif() -if((NOT onnxruntime_MINIMAL_BUILD OR onnxruntime_ENABLE_RUNTIME_OPTIMIZATION_REPLAY_IN_MINIMAL_BUILD) +if((NOT onnxruntime_MINIMAL_BUILD OR onnxruntime_ENABLE_RUNTIME_OPTIMIZATION_IN_MINIMAL_BUILD) AND NOT onnxruntime_REDUCED_OPS_BUILD) list(APPEND onnxruntime_test_optimizer_src "${TEST_SRC_DIR}/optimizer/runtime_optimization/graph_runtime_optimization_test.cc") @@ -685,6 +685,17 @@ if (onnxruntime_BUILD_WEBASSEMBLY) endif() endif() +set(test_all_args) +if (onnxruntime_USE_TENSORRT) + # TRT EP CI takes much longer time when updating to TRT 8.2 + # So, we only run trt ep and exclude other eps to reduce CI test time. + # + # The test names of model tests were using sequential number in the past. + # This PR https://github.com/microsoft/onnxruntime/pull/10220 (Please see ExpandModelName function in model_tests.cc for more details) + # made test name contain the "ep" and "model path" information, so we can easily filter the tests using cuda ep or other ep with *cpu__* or *xxx__*. + list(APPEND test_all_args "--gtest_filter=-*cpu__*:*cuda__*" ) +endif () + AddTest( TARGET onnxruntime_test_all SOURCES ${all_tests} ${onnxruntime_unittest_main_src} @@ -692,6 +703,7 @@ AddTest( onnx_test_runner_common ${onnxruntime_test_providers_libs} ${onnxruntime_test_common_libs} onnx_test_data_proto nlohmann_json::nlohmann_json DEPENDS ${all_dependencies} + TEST_ARGS ${test_all_args} ) if (MSVC) # The warning means the type of two integral values around a binary operator is narrow than their result. diff --git a/cmake/onnxruntime_util.cmake b/cmake/onnxruntime_util.cmake index 8449cf2e0f4e9..d25bd386ec9f2 100644 --- a/cmake/onnxruntime_util.cmake +++ b/cmake/onnxruntime_util.cmake @@ -4,17 +4,12 @@ file(GLOB_RECURSE onnxruntime_util_srcs CONFIGURE_DEPENDS "${ONNXRUNTIME_ROOT}/core/util/*.h" "${ONNXRUNTIME_ROOT}/core/util/*.cc" - "${ONNXRUNTIME_ROOT}/core/profile/*.h" - "${ONNXRUNTIME_ROOT}/core/profile/*.cc" ) source_group(TREE ${ONNXRUNTIME_ROOT}/core FILES ${onnxruntime_util_srcs}) onnxruntime_add_static_library(onnxruntime_util ${onnxruntime_util_srcs}) target_include_directories(onnxruntime_util PRIVATE ${ONNXRUNTIME_ROOT} PUBLIC ${eigen_INCLUDE_DIRS}) -if (onnxruntime_USE_CUDA) - target_include_directories(onnxruntime_util PRIVATE ${CMAKE_CUDA_TOOLKIT_INCLUDE_DIRECTORIES}) -endif() onnxruntime_add_include_to_target(onnxruntime_util onnxruntime_common onnxruntime_framework onnx onnx_proto ${PROTOBUF_LIB}) if(UNIX) target_compile_options(onnxruntime_util PUBLIC "-Wno-error=comment") @@ -24,5 +19,4 @@ set_target_properties(onnxruntime_util PROPERTIES FOLDER "ONNXRuntime") add_dependencies(onnxruntime_util ${onnxruntime_EXTERNAL_DEPENDENCIES}) if (WIN32) target_compile_definitions(onnxruntime_util PRIVATE _SCL_SECURE_NO_WARNINGS) - target_compile_definitions(onnxruntime_framework PRIVATE _SCL_SECURE_NO_WARNINGS) endif() diff --git a/csharp/src/Microsoft.ML.OnnxRuntime/InferenceSession.shared.cs b/csharp/src/Microsoft.ML.OnnxRuntime/InferenceSession.shared.cs index 693f6ea2bd632..027cbfdc788c7 100644 --- a/csharp/src/Microsoft.ML.OnnxRuntime/InferenceSession.shared.cs +++ b/csharp/src/Microsoft.ML.OnnxRuntime/InferenceSession.shared.cs @@ -40,7 +40,7 @@ public class InferenceSession : IDisposable /// Dictionary that represents overridableInitializers metadata /// private Dictionary _overridableInitializerMetadata; - + private SessionOptions _builtInSessionOptions = null; private RunOptions _builtInRunOptions = null; private ModelMetadata _modelMetadata = null; @@ -998,9 +998,15 @@ internal static NodeMetadata GetMetadataFromTypeInfo(IntPtr typeInfo) NativeApiStatus.VerifySuccess(NativeMethods.OrtGetTensorElementType(tensorInfo, out el_type)); type = (TensorElementType)el_type; } + Type dotnetType = null; int width = 0; - TensorElementTypeConverter.GetTypeAndWidth(type, out dotnetType, out width); + if (!TensorElementTypeConverter.GetTypeAndWidth(type, out dotnetType, out width)) + { + throw new OnnxRuntimeException(ErrorCode.InvalidArgument, + "Unable to query type information for data type: " + type.ToString()); + } + UIntPtr numDimensions; NativeApiStatus.VerifySuccess(NativeMethods.OrtGetDimensionsCount(tensorInfo, out numDimensions)); diff --git a/csharp/src/Microsoft.ML.OnnxRuntime/NativeOnnxTensorMemory.shared.cs b/csharp/src/Microsoft.ML.OnnxRuntime/NativeOnnxTensorMemory.shared.cs index 61ac3324b6b06..b2439c32b0708 100644 --- a/csharp/src/Microsoft.ML.OnnxRuntime/NativeOnnxTensorMemory.shared.cs +++ b/csharp/src/Microsoft.ML.OnnxRuntime/NativeOnnxTensorMemory.shared.cs @@ -54,7 +54,7 @@ protected virtual void Dispose(bool disposing) // dispose managed state (managed objects). if (disposing) { - if(_disposables != null) + if (_disposables != null) { _disposables.Dispose(); _disposables = null; @@ -106,10 +106,19 @@ public NativeOnnxTensorMemory(OrtValue ortValue) NativeApiStatus.VerifySuccess(NativeMethods.OrtGetTensorElementType(typeAndShape, out el_type)); elemType = (TensorElementType)el_type; } - TensorElementTypeConverter.GetTypeAndWidth(elemType, out type, out width); + + if (!TensorElementTypeConverter.GetTypeAndWidth(elemType, out type, out width)) + { + throw new OnnxRuntimeException(ErrorCode.InvalidArgument, + "Unable to query type information for data type: " + elemType.ToString()); + } if (typeof(T) != type) - throw new NotSupportedException(nameof(NativeOnnxTensorMemory) + " does not support T = " + nameof(T)); + { + var message = String.Format("The NativeOnnxTensorMemory type being instantiated for T = : {0} while supplied OrtValue contains T = {1}", + typeof(T), type); + throw new OnnxRuntimeException(ErrorCode.InvalidArgument, message); + } ElementType = elemType; ElementWidth = width; @@ -136,7 +145,7 @@ public NativeOnnxTensorMemory(OrtValue ortValue) Dimensions[i] = (int)shape[i]; } - if (typeof(T) != typeof(string)) + if (elemType != TensorElementType.String) { NativeApiStatus.VerifySuccess(NativeMethods.OrtGetTensorMutableData(ortValue.Handle, out _dataBufferPointer)); } diff --git a/csharp/src/Microsoft.ML.OnnxRuntime/NativeOnnxValueHelper.shared.cs b/csharp/src/Microsoft.ML.OnnxRuntime/NativeOnnxValueHelper.shared.cs index ee57a02b8120b..67781b82f5f1f 100644 --- a/csharp/src/Microsoft.ML.OnnxRuntime/NativeOnnxValueHelper.shared.cs +++ b/csharp/src/Microsoft.ML.OnnxRuntime/NativeOnnxValueHelper.shared.cs @@ -108,19 +108,22 @@ internal static IntPtr[] ConvertNamesToUtf8(IReadOnlyCollection names, Nam internal static class TensorElementTypeConverter { - public static void GetTypeAndWidth(TensorElementType elemType, out Type type, out int width) + public static bool GetTypeAndWidth(TensorElementType elemType, out Type type, out int width) { - TensorElementTypeInfo result = TensorBase.GetElementTypeInfo(elemType); - if(result != null) + bool result = true; + TensorElementTypeInfo typeInfo = TensorBase.GetElementTypeInfo(elemType); + if(typeInfo != null) { - type = result.TensorType; - width = result.TypeSize; + type = typeInfo.TensorType; + width = typeInfo.TypeSize; } else { type = null; width = 0; + result = false; } + return result; } } } diff --git a/csharp/src/Microsoft.ML.OnnxRuntime/OrtAllocator.shared.cs b/csharp/src/Microsoft.ML.OnnxRuntime/OrtAllocator.shared.cs index a396df41ec580..c420b706e28f2 100644 --- a/csharp/src/Microsoft.ML.OnnxRuntime/OrtAllocator.shared.cs +++ b/csharp/src/Microsoft.ML.OnnxRuntime/OrtAllocator.shared.cs @@ -1,6 +1,7 @@ // Copyright (c) Microsoft Corporation. All rights reserved. // Licensed under the MIT License. +using Microsoft.ML.OnnxRuntime.Tensors; using System; using System.Runtime.InteropServices; using System.Text; @@ -61,7 +62,7 @@ internal IntPtr Pointer } #region SafeHandle - + /// /// Overrides SafeHandle.IsInvalid /// @@ -257,7 +258,7 @@ public OrtAllocatorType GetAllocatorType() public override bool Equals(object obj) { var other = obj as OrtMemoryInfo; - if(other == null) + if (other == null) { return false; } @@ -271,7 +272,7 @@ public override bool Equals(object obj) /// true if instances are equal according to OrtCompareMemoryInfo. public bool Equals(OrtMemoryInfo other) { - if(this == other) + if (this == other) { return true; } @@ -310,6 +311,78 @@ protected override bool ReleaseHandle() #endregion } + /// + /// This class represents an arbitrary buffer of memory + /// allocated and owned by the user. It can be either a CPU, GPU or other device memory + /// that can be suitably represented by IntPtr. + /// This is just a composite of the buffer related information. + /// The memory is assumed to be pinned if necessary and usable immediately + /// in the native code. + /// + public class OrtExternalAllocation + { + /// + /// Constructor + /// + /// use to accurately describe a piece of memory that this is wrapping + /// shape of this buffer + /// element type + /// the actual pointer to memory + /// size of the allocation in bytes + public OrtExternalAllocation(OrtMemoryInfo memInfo, long[] shape, Tensors.TensorElementType elementType, IntPtr pointer, long sizeInBytes) + { + Type type; + int width; + if (!TensorElementTypeConverter.GetTypeAndWidth(elementType, out type, out width)) + { + throw new OnnxRuntimeException(ErrorCode.InvalidArgument, + "Unable to query type information for data type: " + elementType.ToString()); + } + + if (elementType == TensorElementType.String) + { + throw new OnnxRuntimeException(ErrorCode.InvalidArgument, + "Strings are not supported by this API"); + } + + var shapeSize = ArrayUtilities.GetSizeForShape(shape); + var requiredBufferSize = shapeSize * width; + if (requiredBufferSize > sizeInBytes) + { + var message = String.Format("Shape of {0} elements requires a buffer of at least {1} bytes. Provided: {2} bytes", + shapeSize, requiredBufferSize, sizeInBytes); + throw new OnnxRuntimeException(ErrorCode.InvalidArgument, message); + } + + Info = memInfo; + Shape = shape; + ElementType = elementType; + Pointer = pointer; + Size = sizeInBytes; + } + + /// + /// OrtMemoryInfo + /// + public OrtMemoryInfo Info { get; private set; } + /// + /// Shape + /// + public long[] Shape { get; private set; } + /// + /// Data type + /// + public Tensors.TensorElementType ElementType { get; private set; } + /// + /// Actual memory ptr + /// + public IntPtr Pointer { get; private set; } + /// + /// Size of the allocation in bytes + /// + public long Size { get; private set; } + } + /// /// This class represents memory allocation made by a specific onnxruntime /// allocator. Use OrtAllocator.Allocate() to obtain an instance of this class. diff --git a/csharp/src/Microsoft.ML.OnnxRuntime/OrtIoBinding.shared.cs b/csharp/src/Microsoft.ML.OnnxRuntime/OrtIoBinding.shared.cs index 40549a684856c..382ffe7929de7 100644 --- a/csharp/src/Microsoft.ML.OnnxRuntime/OrtIoBinding.shared.cs +++ b/csharp/src/Microsoft.ML.OnnxRuntime/OrtIoBinding.shared.cs @@ -1,6 +1,7 @@ // Copyright (c) Microsoft Corporation. All rights reserved. // Licensed under the MIT License. +using Microsoft.ML.OnnxRuntime.Tensors; using System; using System.Runtime.InteropServices; using System.Text; @@ -55,9 +56,7 @@ internal IntPtr Handle /// Bind a piece of pre-allocated native memory as a OrtValue Tensor with a given shape /// to an input with a given name. The model will read the specified input from that memory /// possibly avoiding the need to copy between devices. OrtMemoryAllocation continues to own - /// the chunk of native memory and should be alive until the end of execution. - /// The size of the allocation can not be less than required. - /// by the Tensor of the given size. + /// the chunk of native memory, and the allocation should be alive until the end of execution. /// /// of the input /// Tensor element type @@ -65,11 +64,20 @@ internal IntPtr Handle /// native memory allocation public void BindInput(string name, Tensors.TensorElementType elementType, long[] shape, OrtMemoryAllocation allocation) { - using (var ortValue = OrtValue.CreateTensorValueWithData(allocation.Info, - elementType, - shape, - allocation.Pointer, allocation.Size)) - BindInputOrOutput(name, ortValue.Handle, true); + BindOrtAllocation(name, elementType, shape, allocation, true); + } + + /// + /// Bind externally (not from OrtAllocator) allocated memory as input. + /// The model will read the specified input from that memory + /// possibly avoiding the need to copy between devices. The user code continues to own + /// the chunk of externally allocated memory, and the allocation should be alive until the end of execution. + /// + /// name + /// non ort allocated memory + public void BindInput(string name, OrtExternalAllocation allocation) + { + BindExternalAllocation(name, allocation, true); } /// @@ -80,7 +88,7 @@ public void BindInput(string name, Tensors.TensorElementType elementType, long[] /// public void BindInput(string name, FixedBufferOnnxValue fixedValue) { - if(fixedValue.OnnxValueType != OnnxValueType.ONNX_TYPE_TENSOR) + if (fixedValue.OnnxValueType != OnnxValueType.ONNX_TYPE_TENSOR) { throw new OnnxRuntimeException(ErrorCode.InvalidArgument, "Binding works only with Tensors"); } @@ -93,13 +101,12 @@ public void BindInput(string name, FixedBufferOnnxValue fixedValue) /// public void SynchronizeBoundInputs() { - NativeMethods.OrtSynchronizeBoundInputs(handle); + NativeApiStatus.VerifySuccess(NativeMethods.OrtSynchronizeBoundInputs(handle)); } /// /// Bind model output to an OrtValue as Tensor with a given type and shape. An instance of OrtMemoryAllocaiton - /// owns the memory and should be alive for the time of execution.The size of the allocation can not be less than required - /// by the Tensor of the given size. + /// owns the memory and should be alive for the time of execution. /// /// of the output /// tensor element type @@ -107,11 +114,20 @@ public void SynchronizeBoundInputs() /// allocated memory public void BindOutput(string name, Tensors.TensorElementType elementType, long[] shape, OrtMemoryAllocation allocation) { - using (var ortValue = OrtValue.CreateTensorValueWithData(allocation.Info, - elementType, - shape, - allocation.Pointer, allocation.Size)) - BindInputOrOutput(name, ortValue.Handle, false); + BindOrtAllocation(name, elementType, shape, allocation, false); + } + + /// + /// Bind externally (not from OrtAllocator) allocated memory as output. + /// The model will read the specified input from that memory + /// possibly avoiding the need to copy between devices. The user code continues to own + /// the chunk of externally allocated memory, and the allocation should be alive until the end of execution. + /// + /// name + /// non ort allocated memory + public void BindOutput(string name, OrtExternalAllocation allocation) + { + BindExternalAllocation(name, allocation, false); } /// @@ -139,7 +155,7 @@ public void BindOutputToDevice(string name, OrtMemoryInfo memInfo) { var utf8NamePinned = GCHandle.Alloc(NativeOnnxValueHelper.StringToZeroTerminatedUtf8(name), GCHandleType.Pinned); using (var pinnedName = new PinnedGCHandle(utf8NamePinned)) - NativeApiStatus.VerifySuccess(NativeMethods.OrtBindOutputToDevice(handle, pinnedName.Pointer, memInfo.Pointer)); + NativeApiStatus.VerifySuccess(NativeMethods.OrtBindOutputToDevice(handle, pinnedName.Pointer, memInfo.Pointer)); } /// @@ -148,9 +164,46 @@ public void BindOutputToDevice(string name, OrtMemoryInfo memInfo) /// public void SynchronizeBoundOutputs() { - NativeMethods.OrtSynchronizeBoundOutputs(handle); + NativeApiStatus.VerifySuccess(NativeMethods.OrtSynchronizeBoundOutputs(handle)); } + /// + /// Bind allocation obtained from an Ort allocator + /// + /// name + /// data type + /// tensor shape + /// ort allocation + /// whether this is input or output + private void BindOrtAllocation(string name, Tensors.TensorElementType elementType, long[] shape, + OrtMemoryAllocation allocation, bool isInput) + { + using (var ortValue = OrtValue.CreateTensorValueWithData(allocation.Info, + elementType, + shape, + allocation.Pointer, allocation.Size)) + BindInputOrOutput(name, ortValue.Handle, isInput); + } + + + /// + /// Bind external allocation as input or output. + /// The allocation is owned by the user code. + /// + /// name + /// non ort allocated memory + /// whether this is an input or output + private void BindExternalAllocation(string name, OrtExternalAllocation allocation, bool isInput) + { + using (var ortValue = OrtValue.CreateTensorValueWithData(allocation.Info, + allocation.ElementType, + allocation.Shape, + allocation.Pointer, + allocation.Size)) + BindInputOrOutput(name, ortValue.Handle, isInput); + } + + /// /// Internal helper /// @@ -185,7 +238,7 @@ public string[] GetOutputNames() var allocator = OrtAllocator.DefaultInstance; NativeApiStatus.VerifySuccess(NativeMethods.OrtGetBoundOutputNames(handle, allocator.Pointer, out buffer, out lengths, out count)); - if(count.Equals(UIntPtr.Zero)) + if (count.Equals(UIntPtr.Zero)) { return new string[0]; } @@ -196,9 +249,9 @@ public string[] GetOutputNames() int outputCount = (int)count; var lens = new int[outputCount]; int totalLength = 0; - for(int i = 0; i < outputCount; ++i) + for (int i = 0; i < outputCount; ++i) { - var len =(int)Marshal.ReadIntPtr(lengths, IntPtr.Size * i); + var len = (int)Marshal.ReadIntPtr(lengths, IntPtr.Size * i); lens[i] = len; totalLength += len; } @@ -208,7 +261,7 @@ public string[] GetOutputNames() string[] result = new string[outputCount]; int readOffset = 0; - for(int i = 0; i < outputCount; ++i) + for (int i = 0; i < outputCount; ++i) { var strLen = lens[i]; result[i] = Encoding.UTF8.GetString(stringData, readOffset, strLen); @@ -229,23 +282,24 @@ public IDisposableReadOnlyCollection GetOutputValues() var allocator = OrtAllocator.DefaultInstance; NativeApiStatus.VerifySuccess(NativeMethods.OrtGetBoundOutputValues(handle, allocator.Pointer, out ortValues, out count)); - if(count.Equals(UIntPtr.Zero)) + if (count.Equals(UIntPtr.Zero)) { return new DisposableList(); } - using(var ortValuesAllocation = new OrtMemoryAllocation(allocator, ortValues, 0)) + using (var ortValuesAllocation = new OrtMemoryAllocation(allocator, ortValues, 0)) { int outputCount = (int)count; var ortList = new DisposableList(outputCount); try { - for(int i = 0; i < outputCount; ++i) + for (int i = 0; i < outputCount; ++i) { IntPtr ortValue = Marshal.ReadIntPtr(ortValues, IntPtr.Size * i); ortList.Add(new OrtValue(ortValue)); } - } catch(Exception) + } + catch (Exception) { ortList.Dispose(); throw; diff --git a/csharp/src/Microsoft.ML.OnnxRuntime/OrtValue.shared.cs b/csharp/src/Microsoft.ML.OnnxRuntime/OrtValue.shared.cs index 49f9cb33f0686..08609bb4826a6 100644 --- a/csharp/src/Microsoft.ML.OnnxRuntime/OrtValue.shared.cs +++ b/csharp/src/Microsoft.ML.OnnxRuntime/OrtValue.shared.cs @@ -94,16 +94,25 @@ public static OrtValue CreateTensorValueWithData(OrtMemoryInfo memInfo, TensorEl { Type type; int width; - TensorElementTypeConverter.GetTypeAndWidth(elementType, out type, out width); - if(width < 1) + if (!TensorElementTypeConverter.GetTypeAndWidth(elementType, out type, out width)) { - throw new OnnxRuntimeException(ErrorCode.InvalidArgument, "Unsupported data type (such as string)"); + throw new OnnxRuntimeException(ErrorCode.InvalidArgument, + "Unable to query type information for data type: " + elementType.ToString()); + } + + if (elementType == TensorElementType.String) + { + throw new OnnxRuntimeException(ErrorCode.InvalidArgument, + "Cannot map managed strings buffer to native OrtValue"); } var shapeSize = ArrayUtilities.GetSizeForShape(shape); - if((shapeSize * width) > bufferLength) + var requiredBufferSize = shapeSize * width; + if (requiredBufferSize > bufferLength) { - throw new OnnxRuntimeException(ErrorCode.InvalidArgument, "Can not bind the shape to smaller buffer"); + var message = String.Format("Shape of: {0} elements requires a buffer of at least {1} bytes. Provided: {2} bytes", + shapeSize, requiredBufferSize, bufferLength); + throw new OnnxRuntimeException(ErrorCode.InvalidArgument, message); } IntPtr ortValueHandle = IntPtr.Zero; diff --git a/csharp/test/Microsoft.ML.OnnxRuntime.Tests.Common/OrtIoBindingAllocationTest.cs b/csharp/test/Microsoft.ML.OnnxRuntime.Tests.Common/OrtIoBindingAllocationTest.cs index 7c9fcfe34819c..c6312b65f751b 100644 --- a/csharp/test/Microsoft.ML.OnnxRuntime.Tests.Common/OrtIoBindingAllocationTest.cs +++ b/csharp/test/Microsoft.ML.OnnxRuntime.Tests.Common/OrtIoBindingAllocationTest.cs @@ -2,8 +2,10 @@ // Licensed under the MIT License. using Microsoft.ML.OnnxRuntime.Tensors; +using Microsoft.Win32.SafeHandles; using System; using System.Linq; +using System.Runtime.InteropServices; using Xunit; using static Microsoft.ML.OnnxRuntime.Tests.InferenceTest; @@ -23,15 +25,36 @@ private static void PopulateNativeBufferFloat(OrtMemoryAllocation buffer, float[ Assert.True(false); } + PopulateNativeBuffer(buffer.Pointer, elements); + } + + private static void PopulateNativeBuffer(IntPtr buffer, float[] elements) + { unsafe { - float* p = (float*)buffer.Pointer; + float* p = (float*)buffer; for (int i = 0; i < elements.Length; ++i) { *p++ = elements[i]; } } } + /// + /// Use to free globally allocated memory + /// + class OrtSafeMemoryHandle : SafeHandle + { + public OrtSafeMemoryHandle(IntPtr allocPtr) : base(allocPtr, true) { } + + public override bool IsInvalid => handle == IntPtr.Zero; + + protected override bool ReleaseHandle() + { + Marshal.FreeHGlobal(handle); + handle = IntPtr.Zero; + return true; + } + } [Fact(DisplayName = "TestIOBindingWithOrtAllocation")] public void TestIOBindingWithOrtAllocation() @@ -61,8 +84,17 @@ public void TestIOBindingWithOrtAllocation() var ortAllocationInput = allocator.Allocate((uint)inputData.Length * sizeof(float)); dispList.Add(ortAllocationInput); var inputShape = Array.ConvertAll(inputMeta[inputName].Dimensions, d => d); + var shapeSize = ArrayUtilities.GetSizeForShape(inputShape); + Assert.Equal(shapeSize, inputData.Length); PopulateNativeBufferFloat(ortAllocationInput, inputData); + // Create an external allocation for testing OrtExternalAllocation + var cpuMemInfo = OrtMemoryInfo.DefaultInstance; + var sizeInBytes = shapeSize * sizeof(float); + IntPtr allocPtr = Marshal.AllocHGlobal((int)sizeInBytes); + dispList.Add(new OrtSafeMemoryHandle(allocPtr)); + PopulateNativeBuffer(allocPtr, inputData); + var ortAllocationOutput = allocator.Allocate((uint)outputData.Length * sizeof(float)); dispList.Add(ortAllocationOutput); @@ -102,6 +134,46 @@ public void TestIOBindingWithOrtAllocation() Assert.Equal(outputData, tensor.ToArray(), new FloatComparer()); } } + // 3. Test external allocation + { + var externalInputAllocation = new OrtExternalAllocation(cpuMemInfo, inputShape, + Tensors.TensorElementType.Float, allocPtr, sizeInBytes); + + ioBinding.BindInput(inputName, externalInputAllocation); + ioBinding.BindOutput(outputName, Tensors.TensorElementType.Float, outputShape, ortAllocationOutput); + ioBinding.SynchronizeBoundInputs(); + using (var outputs = session.RunWithBindingAndNames(runOptions, ioBinding)) + { + ioBinding.SynchronizeBoundOutputs(); + Assert.Equal(1, outputs.Count); + var output = outputs.ElementAt(0); + Assert.Equal(outputName, output.Name); + var tensor = output.AsTensor(); + Assert.True(tensor.IsFixedSize); + Assert.Equal(outputData, tensor.ToArray(), new FloatComparer()); + } + } + // 4. Some negative tests for external allocation + { + // Small buffer size + Action smallBuffer = delegate () + { + new OrtExternalAllocation(cpuMemInfo, inputShape, + Tensors.TensorElementType.Float, allocPtr, sizeInBytes - 10); + }; + + Assert.Throws(smallBuffer); + + Action stringType = delegate () + { + new OrtExternalAllocation(cpuMemInfo, inputShape, + Tensors.TensorElementType.String, allocPtr, sizeInBytes); + }; + + Assert.Throws(stringType); + + } + } } } diff --git a/csharp/test/Microsoft.ML.OnnxRuntime.Tests.Common/TestDataLoader.cs b/csharp/test/Microsoft.ML.OnnxRuntime.Tests.Common/TestDataLoader.cs index e4ff2bf9c71da..1e44cfdf3699d 100644 --- a/csharp/test/Microsoft.ML.OnnxRuntime.Tests.Common/TestDataLoader.cs +++ b/csharp/test/Microsoft.ML.OnnxRuntime.Tests.Common/TestDataLoader.cs @@ -56,8 +56,7 @@ internal static void GetTypeAndWidth(Tensors.TensorElementType elemType, out Typ } else { - type = null; - width = 0; + throw new ArgumentException("Unable to get information for type: " + elemType.ToString()); } } diff --git a/dockerfiles/Dockerfile.migraphx b/dockerfiles/Dockerfile.migraphx index 7106735a47b8a..60d8eee6cf174 100644 --- a/dockerfiles/Dockerfile.migraphx +++ b/dockerfiles/Dockerfile.migraphx @@ -21,7 +21,7 @@ ENV LANG C.UTF-8 # Install rocm RUN apt-get update && apt-get install -y gnupg2 --no-install-recommends curl && \ curl -sL http://repo.radeon.com/rocm/rocm.gpg.key | apt-key add - && \ - sh -c 'echo deb [arch=amd64] http://repo.radeon.com/rocm/apt/4.5/ ubuntu main > /etc/apt/sources.list.d/rocm.list' + sh -c 'echo deb [arch=amd64] http://repo.radeon.com/rocm/apt/4.5.2/ ubuntu main > /etc/apt/sources.list.d/rocm.list' RUN apt-get update &&\ apt-get install -y sudo git bash build-essential rocm-dev libpython3.6-dev python3-pip miopen-hip \ @@ -32,24 +32,28 @@ https://github.com/Kitware/CMake/releases/download/v3.21.0/cmake-3.21.0-linux-x8 tar -zxf /tmp/cmake-3.21.0-linux-x86_64.tar.gz --strip=1 -C /usr # Install rbuild -RUN pip3 install https://github.com/RadeonOpenCompute/rbuild/archive/master.tar.gz +RUN pip3 install https://github.com/RadeonOpenCompute/rbuild/archive/master.tar.gz numpy yapf==0.28.0 ENV PATH /opt/miniconda/bin:/code/cmake-3.21.0-linux-x86_64/bin:${PATH} # Install MIGraphX from source RUN mkdir -p /migraphx RUN cd /migraphx && git clone --depth=1 --branch migraphx_for_ort https://github.com/ROCmSoftwarePlatform/AMDMIGraphX src -RUN cd /migraphx && rbuild package --cxx /opt/rocm-4.5.0/llvm/bin/clang++ -d /migraphx/deps -B /migraphx/build -S /migraphx/src/ -DPYTHON_EXECUTABLE=/usr/bin/python3 +RUN cd /migraphx && rbuild package --cxx /opt/rocm-4.5.2/llvm/bin/clang++ -d /migraphx/deps -B /migraphx/build -S /migraphx/src/ -DPYTHON_EXECUTABLE=/usr/bin/python3 RUN dpkg -i /migraphx/build/*.deb RUN rm -rf /migraphx +# Install rocm ep dependencies +RUN apt-get update &&\ + apt-get install -y rocrand rccl hipsparse hipfft hipcub hipblas rocthrust + WORKDIR /code # Prepare onnxruntime repository & build onnxruntime RUN git clone --single-branch --branch ${ONNXRUNTIME_BRANCH} --recursive ${ONNXRUNTIME_REPO} onnxruntime &&\ /bin/sh onnxruntime/dockerfiles/scripts/install_common_deps.sh &&\ - cd onnxruntime &&\ - /bin/sh ./build.sh --config Release --build_wheel --update --build --parallel --cmake_extra_defines ONNXRUNTIME_VERSION=$(cat ./VERSION_NUMBER) --use_migraphx &&\ - pip install /code/onnxruntime/build/Linux/Release/dist/*.whl &&\ - cd .. &&\ - rm -rf onnxruntime cmake-3.21.0-linux-x86_64 + cd onnxruntime &&\ + /bin/sh ./build.sh --cmake_extra_defines ONNXRUNTIME_VERSION=`cat ./VERSION_NUMBER` --config Release --parallel \ + --skip_tests --build_wheel --use_rocm --rocm_version=4.5.2 --rocm_home /opt/rocm --use_migraphx &&\ + pip install /code/onnxruntime/build/Linux/Release/dist/*.whl + diff --git a/dockerfiles/Dockerfile.rocm b/dockerfiles/Dockerfile.rocm index f323f50945a21..68412b0104c3c 100644 --- a/dockerfiles/Dockerfile.rocm +++ b/dockerfiles/Dockerfile.rocm @@ -20,20 +20,23 @@ ENV LANG C.UTF-8 # Install rocm RUN apt-get update && apt-get install -y gnupg2 --no-install-recommends curl && \ curl -sL http://repo.radeon.com/rocm/rocm.gpg.key | apt-key add - && \ - sh -c 'echo deb [arch=amd64] http://repo.radeon.com/rocm/apt/4.0/ xenial main > /etc/apt/sources.list.d/rocm.list' + sh -c 'echo deb [arch=amd64] http://repo.radeon.com/rocm/apt/4.5.2/ ubuntu main > /etc/apt/sources.list.d/rocm.list' RUN apt-get update &&\ - apt-get install -y --no-install-recommends sudo git bash build-essential cmake libelf1 rocm-dkms libpython3.6-dev python3-pip miopen-hip rocblas\ - libnuma-dev kmod half hipsparse rocfft hipblas + apt-get install -y sudo git bash build-essential rocm-dev libpython3.6-dev python3-pip miopen-hip rocblas half aria2 libnuma-dev + +RUN aria2c -q -d /tmp -o cmake-3.20.0-linux-x86_64.tar.gz \ +https://github.com/Kitware/CMake/releases/download/v3.20.0/cmake-3.20.0-linux-x86_64.tar.gz &&\ +tar -zxf /tmp/cmake-3.20.0-linux-x86_64.tar.gz --strip=1 -C /usr # Install yapf -RUN pip3 install yapf==0.28.0 +RUN pip3 install yapf==0.28.0 numpy -ENV PATH /opt/miniconda/bin:/code/cmake-3.21.0-linux-x86_64/bin:${PATH} +ENV PATH /opt/miniconda/bin:/code/cmake-3.20.0-linux-x86_64/bin:${PATH} # Install dependencies -COPY ./scripts/install_rocm_deps.sh / -RUN chmod +x /install_rocm_deps.sh && /install_rocm_deps.sh && rm /install_rocm_deps.sh +RUN apt-get update &&\ + apt-get install -y rocrand rccl hipsparse hipfft hipcub hipblas rocthrust WORKDIR /code @@ -42,8 +45,7 @@ RUN git clone --single-branch --branch ${ONNXRUNTIME_BRANCH} --recursive ${ONNXR /bin/sh onnxruntime/dockerfiles/scripts/install_common_deps.sh &&\ cd onnxruntime &&\ /bin/sh ./build.sh --config Release --build_wheel --update --build --parallel --cmake_extra_defines\ - ONNXRUNTIME_VERSION=$(cat ./VERSION_NUMBER) --use_rocm --rocm_home=/opt/rocm &&\ + ONNXRUNTIME_VERSION=$(cat ./VERSION_NUMBER) --use_rocm --rocm_home=/opt/rocm &&\ pip install /code/onnxruntime/build/Linux/Release/dist/*.whl &&\ - cd .. &&\ - rm -rf onnxruntime cmake-3.21.0-linux-x86_64 + cd .. diff --git a/dockerfiles/scripts/install_rocm_deps.sh b/dockerfiles/scripts/install_rocm_deps.sh index eed8125b741db..fd445be87479b 100644 --- a/dockerfiles/scripts/install_rocm_deps.sh +++ b/dockerfiles/scripts/install_rocm_deps.sh @@ -12,49 +12,53 @@ apt-get update && apt-get install -y --no-install-recommends \ python3-dev # rocm-cmake -wget --quiet https://github.com/RadeonOpenCompute/rocm-cmake/archive/rocm-3.8.0.tar.gz -tar -xzvf rocm-3.8.0.tar.gz -rm rocm-3.8.0.tar.gz -cd rocm-cmake-rocm-3.8.0 +rocm_cmake_version=4.5.2 +wget --quiet https://github.com/RadeonOpenCompute/rocm-cmake/archive/refs/tags/rocm-${rocm_cmake_version}.tar.gz +tar -xzvf rocm-${rocm_cmake_version}.tar.gz +rm rocm-${rocm_cmake_version}.tar.gz +cd rocm-cmake-rocm-${rocm_cmake_version} mkdir build cd build cmake -DCMAKE_INSTALL_PREFIX=$prefix .. make -j8 make install cd ../.. -rm -rf rocm-cmake-rocm-3.8.0 +rm -rf rocm-cmake-rocm-${rocm_cmake_version} # rccl -wget --quiet https://github.com/ROCmSoftwarePlatform/rccl/archive/rocm-4.0.0.tar.gz -tar -xzvf rocm-4.0.0.tar.gz -rm rocm-4.0.0.tar.gz -cd rccl-rocm-4.0.0 +rccl_version=4.5.2 +wget --quiet https://github.com/ROCmSoftwarePlatform/rccl/archive/refs/tags/rocm-${rccl_version}.tar.gz +tar -xzvf rocm-${rccl_version}.tar.gz +rm rocm-${rccl_version}.tar.gz +cd rccl-rocm-${rccl_version} mkdir build cd build CXX=/opt/rocm/bin/hipcc cmake -DCMAKE_INSTALL_PREFIX=$prefix .. make -j8 make install cd ../.. -rm -rf rccl-rocm-4.0.0 +rm -rf rccl-rocm-${rccl_version} #rocrand -wget --quiet https://github.com/ROCmSoftwarePlatform/rocRAND/archive/rocm-4.0.0.tar.gz -tar -xzvf rocm-4.0.0.tar.gz -rm rocm-4.0.0.tar.gz -cd rocRAND-rocm-4.0.0 +rocrand_version=4.5.2 +wget --quiet https://github.com/ROCmSoftwarePlatform/rocRAND/archive/refs/tags/rocm-${rocrand_version}.tar.gz +tar -xzvf rocm-${rocrand_version}.tar.gz +rm rocm-${rocrand_version}.tar.gz +cd rocRAND-rocm-${rocrand_version} mkdir build cd build CXX=/opt/rocm/bin/hipcc cmake -DCMAKE_INSTALL_PREFIX=$prefix .. make -j8 make install cd ../.. -rm -rf rocRAND-rocm-4.0.0 +rm -rf rocRAND-rocm-${rocrand_version} #hipcub -wget --quiet https://github.com/ROCmSoftwarePlatform/hipCUB/archive/rocm-4.0.0.tar.gz -tar -xzvf rocm-4.0.0.tar.gz -rm rocm-4.0.0.tar.gz -cd hipCUB-rocm-4.0.0 +hipcub_version=4.5.2 +wget --quiet https://github.com/ROCmSoftwarePlatform/hipCUB/archive/refs/tags/rocm-${hipcub_version}.tar.gz +tar -xzvf rocm-${hipcub_version}.tar.gz +rm rocm-${hipcub_version}.tar.gz +cd hipCUB-rocm-${hipcub_version} mkdir build cd build CXX=/opt/rocm/bin/hipcc cmake -DCMAKE_INSTALL_PREFIX=$prefix .. @@ -62,18 +66,19 @@ make -j8 make package make install cd ../.. -rm -rf hipCUB-rocm-4.0.0 +rm -rf hipCUB-rocm-${hipcub_version} #rocprim -wget --quiet https://github.com/ROCmSoftwarePlatform/rocPRIM/archive/rocm-4.0.0.tar.gz -tar -xzvf rocm-4.0.0.tar.gz -rm rocm-4.0.0.tar.gz -cd rocPRIM-rocm-4.0.0 +rocprim_version=4.5.2 +wget --quiet https://github.com/ROCmSoftwarePlatform/rocPRIM/archive/refs/tags/rocm-${rocprim_version}.tar.gz +tar -xzvf rocm-${rocprim_version}.tar.gz +rm rocm-${rocprim_version}.tar.gz +cd rocPRIM-rocm-${rocprim_version} mkdir build cd build CXX=/opt/rocm/bin/hipcc cmake -DCMAKE_INSTALL_PREFIX=$prefix .. make -j8 make install cd ../.. -rm -rf rocPRIM-rocm-4.0.0 +rm -rf rocPRIM-rocm-${rocprim_version} diff --git a/docs/ContribOperators.md b/docs/ContribOperators.md index ac2130f36c62e..d9092cd185bb6 100644 --- a/docs/ContribOperators.md +++ b/docs/ContribOperators.md @@ -37,6 +37,7 @@ Do not modify directly.* * com.microsoft.MulInteger * com.microsoft.MurmurHash3 * com.microsoft.NGramRepeatBlock + * com.microsoft.NhwcConv * com.microsoft.NhwcMaxPool * com.microsoft.Pad * com.microsoft.QAttention @@ -361,7 +362,7 @@ This version of the operator has been available since version 1 of the 'com.micr
The id of the padding token
-#### Inputs (6 - 9) +#### Inputs (6 - 10)
input_ids : I
@@ -382,6 +383,8 @@ This version of the operator has been available since version 1 of the 'com.micr
The parameter for repetition penalty. Default value 1.0 means no penalty. Accepts value > 0.0. Shape is (1)
vocab_mask (optional) : M
Mask of vocabulary. Words that masked with 0 are not allowed to be generated, and 1 is allowed. Shape is (vacab_size)
+
prefix_vocab_mask (optional) : M
+
Mask of vocabulary for first step. Words that masked with 0 are not allowed to be generated, and 1 is allowed. Shape is (batch_size, vocab_size)
#### Outputs (1 - 3) @@ -1821,6 +1824,55 @@ This version of the operator has been available since version 1 of the 'com.micr +### **com.microsoft.NhwcConv** + +#### Version + +This version of the operator has been available since version 1 of the 'com.microsoft' operator set. + +#### Attributes + +
+
auto_pad : string
+
+
dilations : list of ints
+
dilation value along each spatial axis of the filter. If not present, the dilation defaults is 1 along each spatial axis.
+
group : int
+
number of groups input channels and output channels are divided into.
+
kernel_shape : list of ints
+
The shape of the convolution kernel. If not present, should be inferred from input W.
+
pads : list of ints
+
+
strides : list of ints
+
Stride along each spatial axis. If not present, the stride defaults is 1 along each spatial axis.
+
+ +#### Inputs (2 - 3) + +
+
X : T
+
Input data tensor from previous layer; has size (N x C x H x W), where N is the batch size, C is the number of channels, and H and W are the height and width. Note that this is for the 2D image. Otherwise the size is (N x C x D1 x D2 ... x Dn). Optionally, if dimension denotation is in effect, the operation expects input data tensor to arrive with the dimension denotation of [DATA_BATCH, DATA_CHANNEL, DATA_FEATURE, DATA_FEATURE ...].
+
W : T
+
The weight tensor that will be used in the convolutions; has size (M x C/group x kH x kW), where C is the number of channels, and kH and kW are the height and width of the kernel, and M is the number of feature maps. For more than 2 dimensions, the kernel shape will be (M x C/group x k1 x k2 x ... x kn), where (k1 x k2 x ... kn) is the dimension of the kernel. Optionally, if dimension denotation is in effect, the operation expects the weight tensor to arrive with the dimension denotation of [FILTER_OUT_CHANNEL, FILTER_IN_CHANNEL, FILTER_SPATIAL, FILTER_SPATIAL ...]. Assuming zero based indices for the shape array, X.shape[1] == (W.shape[1] * group) == C and W.shape[0] mod G == 0. Or in other words FILTER_IN_CHANNEL multiplied by the number of groups should be equal to DATA_CHANNEL and the number of feature maps M should be a multiple of the number of groups G.
+
B (optional) : T
+
Optional 1D bias to be added to the convolution, has size of M.
+
+ +#### Outputs + +
+
Y : T
+
Output data tensor that contains the result of the convolution. The output dimensions are functions of the kernel size, stride size, and pad lengths.
+
+ +#### Type Constraints + +
+
T : tensor(float16), tensor(float), tensor(double)
+
Constrain input and output types to float tensors.
+
+ + ### **com.microsoft.NhwcMaxPool** #### Version diff --git a/docs/OperatorKernels.md b/docs/OperatorKernels.md index 4d885fdb15530..ad4bab1d09745 100644 --- a/docs/OperatorKernels.md +++ b/docs/OperatorKernels.md @@ -377,7 +377,7 @@ Do not modify directly.* |**Operator Domain:** *com.microsoft*|||| |Attention|*in* input:**T**
*in* weight:**T**
*in* bias:**T**
*in* mask_index:**M**
*in* past:**T**
*in* extra_add:**T**
*out* output:**T**
*out* present:**T**|1+|**T** = tensor(float)| |AttnLSTM|*in* X:**T**
*in* W:**T**
*in* R:**T**
*in* B:**T**
*in* sequence_lens:**T1**
*in* initial_h:**T**
*in* initial_c:**T**
*in* P:**T**
*in* QW:**T**
*in* MW:**T**
*in* V:**T**
*in* M:**T**
*in* memory_seq_lens:**T1**
*in* AW:**T**
*out* Y:**T**
*out* Y_h:**T**
*out* Y_c:**T**|1+|**T** = tensor(double), tensor(float)
**T1** = tensor(int32)| -|BeamSearch|*in* input_ids:**I**
*in* max_length:**I**
*in* min_length:**I**
*in* num_beams:**I**
*in* num_return_sequences:**I**
*in* temperature:**T**
*in* length_penalty:**T**
*in* repetition_penalty:**T**
*in* vocab_mask:**M**
*out* sequences:**I**
*out* sequences_scores:**T**
*out* scores:**T**|1+|**T** = tensor(float)| +|BeamSearch|*in* input_ids:**I**
*in* max_length:**I**
*in* min_length:**I**
*in* num_beams:**I**
*in* num_return_sequences:**I**
*in* temperature:**T**
*in* length_penalty:**T**
*in* repetition_penalty:**T**
*in* vocab_mask:**M**
*in* prefix_vocab_mask:**M**
*out* sequences:**I**
*out* sequences_scores:**T**
*out* scores:**T**|1+|**T** = tensor(float)| |BiasGelu|*in* A:**T**
*in* B:**T**
*out* C:**T**|1+|**T** = tensor(float)| |BifurcationDetector|*in* src_tokens:**T**
*in* cur_tokens:**T**
*in* prev_suffix_match_idx:**T**
*in* pred_tokens:**T**
*out* tokens:**T**
*out* suffix_match_idx:**T**|1+|**T** = tensor(int64)| |CDist|*in* A:**T**
*in* B:**T**
*out* C:**T**|1+|**T** = tensor(double), tensor(float)| @@ -405,7 +405,7 @@ Do not modify directly.* |Pad|*in* data:**T**
*in* pads:**tensor(int64)**
*in* value:**T**
*out* output:**T**|1+|**T** = tensor(float)| |QAttention|*in* input:**T1**
*in* weight:**T2**
*in* bias:**T3**
*in* input_scale:**T3**
*in* weight_scale:**T3**
*in* mask_index:**T4**
*in* input_zero_point:**T1**
*in* weight_zero_point:**T2**
*in* past:**T3**
*out* output:**T3**
*out* present:**T3**|1+|**T1** = tensor(uint8)
**T2** = tensor(int8), tensor(uint8)
**T3** = tensor(float)
**T4** = tensor(int32)| |QEmbedLayerNormalization|*in* input_ids:**T1**
*in* segment_ids:**T1**
*in* word_embedding_quant:**T2**
*in* position_embedding_quant:**T2**
*in* segment_embedding:**T2**
*in* gamma_quant:**T2**
*in* beta_quant:**T2**
*in* mask:**T1**
*in* word_embedding_scale:**T**
*in* position_embedding_scale:**T**
*in* segment_embedding_scale:**T**
*in* gamma_scale:**T**
*in* beta_scale:**T**
*in* word_embedding_zero_point:**T2**
*in* position_embedding_zero_point:**T2**
*in* segment_embedding_zero_point:**T2**
*in* gamma_zero_point:**T2**
*in* beta_zero_point:**T2**
*out* layernorm_out:**T**
*out* mask_index_out:**T1**|1+|**T** = tensor(float)| -|QGemm|*in* A:**TA**
*in* a_scale:**T**
*in* a_zero_point:**TA**
*in* B:**TB**
*in* b_scale:**T**
*in* b_zero_point:**TB**
*in* C:**TC**
*in* y_scale:**T**
*in* y_zero_point:**TYZ**
*out* Y:**TY**|1+|**T** = tensor(float)
**TA** = tensor(uint8)
**TB** = tensor(int8), tensor(uint8)
**TC** = tensor(int32)
**TY** = tensor(float), tensor(uint8)
**TYZ** = tensor(uint8)| +|QGemm|*in* A:**TA**
*in* a_scale:**T**
*in* a_zero_point:**TA**
*in* B:**TB**
*in* b_scale:**T**
*in* b_zero_point:**TB**
*in* C:**TC**
*in* y_scale:**T**
*in* y_zero_point:**TYZ**
*out* Y:**TY**|1+|**T** = tensor(float)
**TA** = tensor(int8), tensor(uint8)
**TB** = tensor(int8), tensor(uint8)
**TC** = tensor(int32)
**TY** = tensor(float), tensor(int8), tensor(uint8)
**TYZ** = tensor(int8), tensor(uint8)| |QLinearAdd|*in* A:**T**
*in* A_scale:**tensor(float)**
*in* A_zero_point:**T**
*in* B:**T**
*in* B_scale:**tensor(float)**
*in* B_zero_point:**T**
*in* C_scale:**tensor(float)**
*in* C_zero_point:**T**
*out* C:**T**|1+|**T** = tensor(int8), tensor(uint8)| |QLinearConv|*in* x:**T1**
*in* x_scale:**tensor(float)**
*in* x_zero_point:**T1**
*in* w:**T2**
*in* w_scale:**tensor(float)**
*in* w_zero_point:**T2**
*in* y_scale:**tensor(float)**
*in* y_zero_point:**T3**
*in* B:**T4**
*out* y:**T3**|1+|**T1** = tensor(int8), tensor(uint8)
**T2** = tensor(int8), tensor(uint8)
**T3** = tensor(int8), tensor(uint8)
**T4** = tensor(int32)| |QLinearLeakyRelu|*in* X:**T**
*in* X_scale:**tensor(float)**
*in* X_zero_point:**T**
*in* Y_scale:**tensor(float)**
*in* Y_zero_point:**T**
*out* Y:**T**|1+|**T** = tensor(int8), tensor(uint8)| diff --git a/include/onnxruntime/core/framework/tensor_shape.h b/include/onnxruntime/core/framework/tensor_shape.h index 4db6b58adc965..ef0cfcfa01d51 100644 --- a/include/onnxruntime/core/framework/tensor_shape.h +++ b/include/onnxruntime/core/framework/tensor_shape.h @@ -43,7 +43,7 @@ using InlinedShapeVector = absl::InlinedVector& span) { TensorShapeVector out; out.reserve(span.size()); - out.assign(span.cbegin(), span.cend()); + out.assign(span.begin(), span.end()); return out; } diff --git a/include/onnxruntime/core/graph/graph.h b/include/onnxruntime/core/graph/graph.h index 49204b54ba773..421d01feddf2a 100644 --- a/include/onnxruntime/core/graph/graph.h +++ b/include/onnxruntime/core/graph/graph.h @@ -53,7 +53,7 @@ struct IndexedSubGraph; class Model; class OpSignature; -#if !defined(ORT_MINIMAL_BUILD) || defined(ORT_ENABLE_RUNTIME_OPTIMIZATION_REPLAY_IN_MINIMAL_BUILD) +#if !defined(ORT_MINIMAL_BUILD) || defined(ORT_ENABLE_RUNTIME_OPTIMIZATION_IN_MINIMAL_BUILD) class RuntimeOptimizationRecordContainer; #endif @@ -357,10 +357,12 @@ class Node { /** Gets the Node's attributes. */ const NodeAttributes& GetAttributes() const noexcept { return attributes_; } -#if !defined(ORT_MINIMAL_BUILD) +#if !defined(ORT_MINIMAL_BUILD) || defined(ORT_ENABLE_RUNTIME_OPTIMIZATION_IN_MINIMAL_BUILD) /** Remove the specified attribute from this Node */ bool ClearAttribute(const std::string& attr_name); +#endif // !defined(ORT_MINIMAL_BUILD) || defined(ORT_ENABLE_RUNTIME_OPTIMIZATION_IN_MINIMAL_BUILD) +#if !defined(ORT_MINIMAL_BUILD) /** Gets the Node's mutable attributes. */ NodeAttributes& GetMutableAttributes() noexcept { return attributes_; } @@ -831,13 +833,15 @@ class Graph { return *(result.first->second); } -#if !defined(ORT_MINIMAL_BUILD) +#if !defined(ORT_MINIMAL_BUILD) || defined(ORT_ENABLE_RUNTIME_OPTIMIZATION_IN_MINIMAL_BUILD) /** Generate a unique name in this Graph for a NodeArg */ std::string GenerateNodeArgName(const std::string& base_name); /** Generate a unique name in this Graph for a Node */ std::string GenerateNodeName(const std::string& base_name); +#endif // !defined(ORT_MINIMAL_BUILD) || defined(ORT_ENABLE_RUNTIME_OPTIMIZATION_IN_MINIMAL_BUILD) +#if !defined(ORT_MINIMAL_BUILD) /** Copy a Node and add it to this Graph. @param other Node to copy @returns Reference to the Node that was created and added to this Graph. @@ -1071,6 +1075,9 @@ class Graph { */ void SetOutputs(const std::vector& outputs); +#endif // !defined(ORT_MINIMAL_BUILD) + +#if !defined(ORT_MINIMAL_BUILD) || defined(ORT_ENABLE_RUNTIME_OPTIMIZATION_IN_MINIMAL_BUILD) /** Sets the type of a NodeArg, replacing existing type/shape if any */ void SetNodeArgType(NodeArg& arg, const ONNX_NAMESPACE::TypeProto& type_proto); @@ -1096,6 +1103,18 @@ class Graph { return GetConsumerNodesImpl(*this, node_arg_name); } + // Without removing the existing consumers, add a consumer to the give node arg name. + void AddConsumerNode(const std::string& node_arg_name, Node* consumer) { + node_arg_to_consumer_nodes_[node_arg_name].insert(consumer->Index()); + } + + // Remove a consumer from the set + void RemoveConsumerNode(const std::string& node_arg_name, Node* consumer) { + node_arg_to_consumer_nodes_[node_arg_name].erase(consumer->Index()); + } +#endif // !defined(ORT_MINIMAL_BUILD) || defined(ORT_ENABLE_RUNTIME_OPTIMIZATION_IN_MINIMAL_BUILD) + +#if !defined(ORT_MINIMAL_BUILD) std::vector GetMutableConsumerNodes(const std::string& node_arg_name) { return GetConsumerNodesImpl(*this, node_arg_name); } @@ -1110,16 +1129,6 @@ class Graph { } } - // Without removing the existing consumers, add a consumer to the give node arg name. - void AddConsumerNode(const std::string& node_arg_name, Node* consumer) { - node_arg_to_consumer_nodes_[node_arg_name].insert(consumer->Index()); - } - - // Remove a consumer from the set - void RemoveConsumerNode(const std::string& node_arg_name, Node* consumer) { - node_arg_to_consumer_nodes_[node_arg_name].erase(consumer->Index()); - } - /** During constant folding it may become possible to infer the shape for a node. To avoid running a full Resolve allow an individual node to have the shape inferencing re-run. */ @@ -1201,7 +1210,7 @@ class Graph { Graph& parent_graph, const Node& parent_node, const logging::Logger& logger, std::unique_ptr& graph); -#if !defined(ORT_MINIMAL_BUILD) || defined(ORT_ENABLE_RUNTIME_OPTIMIZATION_REPLAY_IN_MINIMAL_BUILD) +#if !defined(ORT_MINIMAL_BUILD) || defined(ORT_ENABLE_RUNTIME_OPTIMIZATION_IN_MINIMAL_BUILD) const RuntimeOptimizationRecordContainer& RuntimeOptimizations() const { return runtime_optimizations_; } @@ -1223,7 +1232,7 @@ class Graph { RuntimeOptimizationReplayContext& MutableRuntimeOptimizationReplayCtx() { return runtime_optimization_replay_context_; } -#endif // !defined(ORT_MINIMAL_BUILD) || defined(ORT_ENABLE_RUNTIME_OPTIMIZATION_REPLAY_IN_MINIMAL_BUILD) +#endif // !defined(ORT_MINIMAL_BUILD) || defined(ORT_ENABLE_RUNTIME_OPTIMIZATION_IN_MINIMAL_BUILD) // This friendship relationship should only be used to call Graph::Graph and // Graph::LoadGraph All other access should be via the public API. @@ -1375,16 +1384,10 @@ class Graph { void ToGraphProtoInternal(ONNX_NAMESPACE::GraphProto& graph_proto) const; - template - static auto GetProducerNodeImpl( - TInstance& instance, const std::string& node_arg_name) -> decltype(instance.GetNode(0)) { - auto iter = instance.node_arg_to_producer_node_.find(node_arg_name); - if (iter != instance.node_arg_to_producer_node_.end()) { - auto node_index = iter->second; - return instance.GetNode(node_index); - } - return nullptr; - } +#endif // !defined(ORT_MINIMAL_BUILD) + +#if !defined(ORT_MINIMAL_BUILD) || defined(ORT_ENABLE_RUNTIME_OPTIMIZATION_IN_MINIMAL_BUILD) + Status PopulateNodeArgToProducerConsumerLookupsFromNodes(); template static auto GetConsumerNodesImpl( @@ -1400,7 +1403,17 @@ class Graph { return results; } -#endif // !defined(ORT_MINIMAL_BUILD) + template + static auto GetProducerNodeImpl( + TInstance& instance, const std::string& node_arg_name) -> decltype(instance.GetNode(0)) { + auto iter = instance.node_arg_to_producer_node_.find(node_arg_name); + if (iter != instance.node_arg_to_producer_node_.end()) { + auto node_index = iter->second; + return instance.GetNode(node_index); + } + return nullptr; + } +#endif // !defined(ORT_MINIMAL_BUILD) || defined(ORT_ENABLE_RUNTIME_OPTIMIZATION_IN_MINIMAL_BUILD) #if !defined(ORT_MINIMAL_BUILD) || defined(ORT_EXTENDED_MINIMAL_BUILD) gsl::not_null AllocateNode(); @@ -1440,7 +1453,7 @@ class Graph { std::hash, std::equal_to> sparse_tensor_names_; -#if !defined(ORT_MINIMAL_BUILD) || defined(ORT_ENABLE_RUNTIME_OPTIMIZATION_REPLAY_IN_MINIMAL_BUILD) +#if !defined(ORT_MINIMAL_BUILD) || defined(ORT_ENABLE_RUNTIME_OPTIMIZATION_IN_MINIMAL_BUILD) // Runtime optimization storage. // Note: runtime_optimizations_ == *runtime_optimizations_ptr_ and must be initialized std::unique_ptr runtime_optimizations_ptr_; @@ -1500,7 +1513,7 @@ class Graph { // All node args owned by <*this> graph. Key is node arg name. std::unordered_map> node_args_; -#if !defined(ORT_MINIMAL_BUILD) +#if !defined(ORT_MINIMAL_BUILD) || defined(ORT_ENABLE_RUNTIME_OPTIMIZATION_IN_MINIMAL_BUILD) int name_generator_ = 0; // Strings which have been used as node names. @@ -1516,8 +1529,7 @@ class Graph { // node arg to its consumer nodes std::unordered_map> node_arg_to_consumer_nodes_; - -#endif // !defined(ORT_MINIMAL_BUILD) +#endif // !defined(ORT_MINIMAL_BUILD) || defined(ORT_ENABLE_RUNTIME_OPTIMIZATION_IN_MINIMAL_BUILD) const std::unordered_map domain_to_version_; diff --git a/include/onnxruntime/core/graph/node_arg.h b/include/onnxruntime/core/graph/node_arg.h index 80f5dda2dcfde..7c575fed952f7 100644 --- a/include/onnxruntime/core/graph/node_arg.h +++ b/include/onnxruntime/core/graph/node_arg.h @@ -63,7 +63,8 @@ class NodeArg { @returns true if NodeArg is a normal tensor with a non-empty shape or a scalar with an empty shape. Otherwise, returns false. */ bool HasTensorOrScalarShape() const; -#if !defined(ORT_MINIMAL_BUILD) +#if !defined(ORT_MINIMAL_BUILD) || defined(ORT_ENABLE_RUNTIME_OPTIMIZATION_IN_MINIMAL_BUILD) + /** Sets the shape. @remarks Shape can only be set if the TypeProto was provided to the ctor, or #SetType has been called, as the shape information is stored as part of TypeProto. */ @@ -73,6 +74,10 @@ class NodeArg { @remarks If there is a mismatch during shape inferencing that can't be resolved the shape info may be removed. */ void ClearShape(); +#endif // !defined(ORT_MINIMAL_BUILD) || defined(ORT_ENABLE_RUNTIME_OPTIMIZATION_IN_MINIMAL_BUILD) + +#if !defined(ORT_MINIMAL_BUILD) + /** Override current type from input_type if override_types is set to true, return failure status otherwise. @param input_tensor_elem_type Tensor element type parsed input_type @param current_tensor_elem_type Tensor element type parsed from existing type @@ -112,10 +117,10 @@ class NodeArg { private: ORT_DISALLOW_COPY_AND_ASSIGNMENT(NodeArg); -#if !defined(ORT_MINIMAL_BUILD) void SetType(const std::string* p_type); +#if !defined(ORT_MINIMAL_BUILD) || defined(ORT_ENABLE_RUNTIME_OPTIMIZATION_IN_MINIMAL_BUILD) void SetType(const ONNX_NAMESPACE::TypeProto& type_proto); -#endif +#endif // !defined(ORT_MINIMAL_BUILD) || defined(ORT_ENABLE_RUNTIME_OPTIMIZATION_IN_MINIMAL_BUILD) // Node arg PType. const std::string* type_; diff --git a/include/onnxruntime/core/optimizer/graph_transformer_utils.h b/include/onnxruntime/core/optimizer/graph_transformer_utils.h index 1c8628fd8fc2e..633e64b31ed74 100644 --- a/include/onnxruntime/core/optimizer/graph_transformer_utils.h +++ b/include/onnxruntime/core/optimizer/graph_transformer_utils.h @@ -15,7 +15,7 @@ #include "core/optimizer/rewrite_rule.h" #endif -#if !defined(ORT_MINIMAL_BUILD) || defined(ORT_ENABLE_RUNTIME_OPTIMIZATION_REPLAY_IN_MINIMAL_BUILD) +#if !defined(ORT_MINIMAL_BUILD) || defined(ORT_ENABLE_RUNTIME_OPTIMIZATION_IN_MINIMAL_BUILD) #include "core/optimizer/selectors_actions/selector_action_transformer_apply_contexts.h" #endif @@ -52,7 +52,7 @@ std::vector> GenerateTransformers( #endif // !defined(ORT_MINIMAL_BUILD) -#if !defined(ORT_MINIMAL_BUILD) || defined(ORT_ENABLE_RUNTIME_OPTIMIZATION_REPLAY_IN_MINIMAL_BUILD) +#if !defined(ORT_MINIMAL_BUILD) || defined(ORT_ENABLE_RUNTIME_OPTIMIZATION_IN_MINIMAL_BUILD) /** Generates all predefined transformers which support runtime optimizations for this level. Any transformers or rewrite rules named in rules_and_transformers_to_disable will be excluded. @@ -66,7 +66,7 @@ std::vector> GenerateTransformersForRuntimeOpt const SatApplyContextVariant& apply_context, const std::unordered_set& rules_and_transformers_to_disable = {}); -#endif // !defined(ORT_MINIMAL_BUILD) || defined(ORT_ENABLE_RUNTIME_OPTIMIZATION_REPLAY_IN_MINIMAL_BUILD) +#endif // !defined(ORT_MINIMAL_BUILD) || defined(ORT_ENABLE_RUNTIME_OPTIMIZATION_IN_MINIMAL_BUILD) } // namespace optimizer_utils } // namespace onnxruntime diff --git a/include/onnxruntime/core/providers/tensorrt/tensorrt_provider_options.h b/include/onnxruntime/core/providers/tensorrt/tensorrt_provider_options.h index 43b0b938f130b..fd4ad0b6d9cd6 100644 --- a/include/onnxruntime/core/providers/tensorrt/tensorrt_provider_options.h +++ b/include/onnxruntime/core/providers/tensorrt/tensorrt_provider_options.h @@ -5,7 +5,9 @@ /// /// Options for the TensorRT provider that are passed to SessionOptionsAppendExecutionProvider_TensorRT_V2. -/// Please note that this struct is identical to OrtTensorRTProviderOptions but only to be used internally. +/// Please note that this struct is *similar* to OrtTensorRTProviderOptions but only to be used internally. +/// Going forward, new trt provider options are to be supported via this struct and usage of the publicly defined +/// OrtTensorRTProviderOptions will be deprecated over time. /// User can only get the instance of OrtTensorRTProviderOptionsV2 via CreateTensorRTProviderOptions. /// struct OrtTensorRTProviderOptionsV2 { diff --git a/include/onnxruntime/core/session/onnxruntime_c_api.h b/include/onnxruntime/core/session/onnxruntime_c_api.h index 8add1cddaa30a..84708c27e8649 100644 --- a/include/onnxruntime/core/session/onnxruntime_c_api.h +++ b/include/onnxruntime/core/session/onnxruntime_c_api.h @@ -482,6 +482,9 @@ typedef struct OrtTensorRTProviderOptions { int trt_engine_decryption_enable; // enable engine decryption. Default 0 = false, nonzero = true const char* trt_engine_decryption_lib_path; // specify engine decryption library path int trt_force_sequential_engine_build; // force building TensorRT engine sequentially. Default 0 = false, nonzero = true + // This is the legacy struct and don't add new fields here. + // For new field that can be represented by string, please add it in include/onnxruntime/core/providers/tensorrt/tensorrt_provider_options.h + // For non-string field, need to create a new separate api to handle it. } OrtTensorRTProviderOptions; /** \brief MIGraphX Provider Options diff --git a/include/onnxruntime/core/session/onnxruntime_cxx_api.h b/include/onnxruntime/core/session/onnxruntime_cxx_api.h index c2463977f101d..0007a9d044e7d 100644 --- a/include/onnxruntime/core/session/onnxruntime_cxx_api.h +++ b/include/onnxruntime/core/session/onnxruntime_cxx_api.h @@ -360,6 +360,7 @@ struct SessionOptions : Base { SessionOptions& AppendExecutionProvider_ROCM(const OrtROCMProviderOptions& provider_options); ///< Wraps OrtApi::SessionOptionsAppendExecutionProvider_ROCM SessionOptions& AppendExecutionProvider_OpenVINO(const OrtOpenVINOProviderOptions& provider_options); ///< Wraps OrtApi::SessionOptionsAppendExecutionProvider_OpenVINO SessionOptions& AppendExecutionProvider_TensorRT(const OrtTensorRTProviderOptions& provider_options); ///< Wraps OrtApi::SessionOptionsAppendExecutionProvider_TensorRT + SessionOptions& AppendExecutionProvider_TensorRT_V2(const OrtTensorRTProviderOptionsV2& provider_options); ///< Wraps OrtApi::SessionOptionsAppendExecutionProvider_TensorRT SessionOptions& AppendExecutionProvider_MIGraphX(const OrtMIGraphXProviderOptions& provider_options); ///< Wraps OrtApi::SessionOptionsAppendExecutionProvider_MIGraphX SessionOptions& SetCustomCreateThreadFn(OrtCustomCreateThreadFn ort_custom_create_thread_fn); ///< Wraps OrtApi::SessionOptionsSetCustomCreateThreadFn diff --git a/include/onnxruntime/core/session/onnxruntime_cxx_inline.h b/include/onnxruntime/core/session/onnxruntime_cxx_inline.h index d281bb5542797..063acb1702a84 100644 --- a/include/onnxruntime/core/session/onnxruntime_cxx_inline.h +++ b/include/onnxruntime/core/session/onnxruntime_cxx_inline.h @@ -518,6 +518,11 @@ inline SessionOptions& SessionOptions::AppendExecutionProvider_TensorRT(const Or return *this; } +inline SessionOptions& SessionOptions::AppendExecutionProvider_TensorRT_V2(const OrtTensorRTProviderOptionsV2& provider_options) { + ThrowOnError(GetApi().SessionOptionsAppendExecutionProvider_TensorRT_V2(p_, &provider_options)); + return *this; +} + inline SessionOptions& SessionOptions::AppendExecutionProvider_MIGraphX(const OrtMIGraphXProviderOptions& provider_options) { ThrowOnError(GetApi().SessionOptionsAppendExecutionProvider_MIGraphX(p_, &provider_options)); return *this; diff --git a/js/.eslintrc.js b/js/.eslintrc.js index a484c73ce4549..dda12c6dff74a 100644 --- a/js/.eslintrc.js +++ b/js/.eslintrc.js @@ -160,7 +160,7 @@ module.exports = { '@typescript-eslint/naming-convention': 'off' } }, { - files: ['react_native/example/src/**/*.ts', 'react_native/example/src/**/*.tsx'], rules: { + files: ['react_native/e2e/src/**/*.ts', 'react_native/e2e/src/**/*.tsx'], rules: { '@typescript-eslint/no-non-null-assertion': 'off', '@typescript-eslint/no-unnecessary-type-assertion': 'off', 'unicorn/filename-case': 'off', diff --git a/js/README.md b/js/README.md index 88d707bb428d7..f92a5bbbcb2de 100644 --- a/js/README.md +++ b/js/README.md @@ -363,12 +363,10 @@ By default, ONNX Runtime React Native leverages ONNX Runtime Mobile package with 4. Copy `/aar_out/MinSizeRel/com/microsoft/onnxruntime/onnxruntime-mobile//onnxruntime-mobile-.aar` into `/js/react_native/android/libs` directory. - 5. Modify `Onnxruntime_mobileVersion` property in `/js/react_native/android/build.properties` to consume a locally built package or a newly published package from Maven. - - 6. To verify, open Android Emulator and run this command from `/js/react_native/android` + 5. To verify, open Android Emulator and run this command from `/js/react_native/android` ```sh - adb shell am instrument -w ai.onnxruntime.react_native.test/androidx.test.runner.AndroidJUnitRunner + ./gradlew connectedDebugAndroidTest ``` 3. Build iOS ONNX Runtime package @@ -393,38 +391,57 @@ By default, ONNX Runtime React Native leverages ONNX Runtime Mobile package with ```sh pod install - xcodebuild test -workspace OnnxruntimeModule.xcworkspace -scheme OnnxruntimeModuleTest -destination 'platform=iOS Simulator,name=iPhone 11,OS=15.0' + xcodebuild test -workspace OnnxruntimeModule.xcworkspace -scheme OnnxruntimeModuleTest -destination 'platform=iOS Simulator,OS=latest,name=iPhone 13' ``` -4. Test an example for Android and iOS. In Windows, open Android Emulator first. +4. Test Android and iOS apps. In Windows, open Android Emulator first. `debug.keystore` must be generated ahead for Android example. ```sh - keytool -genkey -v -keystore /js/react_native/example/android/app/debug.keystore -alias androiddebugkey -storepass android -keypass android -keyalg RSA -keysize 2048 -validity 999999 -dname "CN=Android Debug,O=Android,C=US" + keytool -genkey -v -keystore /js/react_native/e2e/android/debug.keystore -alias androiddebugkey -storepass android -keypass android -keyalg RSA -keysize 2048 -validity 999999 -dname "CN=Android Debug,O=Android,C=US" ``` From `/js/react_native, ```sh yarn bootstrap - yarn example ios - yarn example android + ``` + + When testing with a custom built ONNX Runtime Android package, copy `/aar_out/MinSizeRel/com/microsoft/onnxruntime/onnxruntime-mobile//onnxruntime-mobile-.aar` into `/js/react_native/e2e/node_modules/onnxruntime-react-native/android/libs` directory. Using a custom built ONNX Runtime iOS package, copy `onnxruntime-mobile-c.zip` into `/js/react_native/local_pods` directory if it's not already done. + + From `/js/react_native/e2e/android`, run e2e Android tests as follows, + + ```sh + ./gradlew :app:connectedDebugAndroidTest + ``` + + From `/js/react_native/e2e/ios`, run e2e iOS tests as follows, + + ```sh + xcrun xcodebuild test -workspace OnnxruntimeModuleExample.xcworkspace -scheme OnnxruntimeModuleExample -destination 'platform=iOS Simulator,OS=latest,name=iPhone 13' + ``` + + ***`yarn bootstrap` changes `packages.json` and `yarn.lock` files. Once testing is done, restore changes to avoid unwanted commit.*** + +5. Run Android and iOS apps. + + ```sh + yarn e2e android + yarn e2e ios ``` ### NPM Packaging 1. Update a version using `npm verison ` from `/js/react_native` folder. If it's for a dev, use `npm version -dev.` -2. Modify Onnxruntime_mobileVersion property in `/js/react_native/android/build.properties` to update ONNX Runtime Android package version. - -3. Run `yarn prepack` to change `onnxruntime-common` to point to a published npm package +2. Run `yarn prepack-rel` to change `onnxruntime-common` to point to a published npm package -4. Run `npm pack` and verify NPM package contents +3. Run `npm pack` and verify NPM package contents -5. Run `npm publish --dry-run` to see how it's going to be published +4. Run `npm publish --dry-run` to see how it's going to be published -6. Run `npm publish ` to publish to npmjs. If it's for a dev, add flag `--tag dev`. +5. Run `npm publish ` to publish to npmjs. If it's for a dev, add flag `--tag dev`. ### Distribution diff --git a/js/react_native/.gitignore b/js/react_native/.gitignore index 7bc424adb519c..3ecc1be27e5db 100644 --- a/js/react_native/.gitignore +++ b/js/react_native/.gitignore @@ -36,7 +36,7 @@ android.iml # Cocoapods # ios/Pods/ -example/ios/Pods/ +e2e/ios/Pods/ # node.js # diff --git a/js/react_native/android/build.gradle b/js/react_native/android/build.gradle index fde479d23fa8f..9ea0893649d79 100644 --- a/js/react_native/android/build.gradle +++ b/js/react_native/android/build.gradle @@ -121,8 +121,6 @@ repositories { } dependencies { - def onnxruntimeMobileVersion = project.properties['Onnxruntime_mobileVersion'] - // noinspection GradleDynamicVersion api "com.facebook.react:react-native:+" api "org.mockito:mockito-core:2.28.2" @@ -134,5 +132,5 @@ dependencies { androidTestImplementation "com.linkedin.dexmaker:dexmaker-mockito-inline-extended:2.28.1" - implementation "com.microsoft.onnxruntime:onnxruntime-mobile:$onnxruntimeMobileVersion@aar" + implementation "com.microsoft.onnxruntime:onnxruntime-mobile:latest.integration@aar" } diff --git a/js/react_native/android/gradle.properties b/js/react_native/android/gradle.properties index 1962845810f1f..04f6e699f0c93 100644 --- a/js/react_native/android/gradle.properties +++ b/js/react_native/android/gradle.properties @@ -16,4 +16,3 @@ android.useAndroidX=true OnnxruntimeModule_buildToolsVersion=29.0.2 OnnxruntimeModule_compileSdkVersion=29 OnnxruntimeModule_targetSdkVersion=29 -Onnxruntime_mobileVersion=1.8.2 diff --git a/js/react_native/example/android/gradle/wrapper/gradle-wrapper.jar b/js/react_native/android/gradle/wrapper/gradle-wrapper.jar similarity index 56% rename from js/react_native/example/android/gradle/wrapper/gradle-wrapper.jar rename to js/react_native/android/gradle/wrapper/gradle-wrapper.jar index 5c2d1cf016b38..62d4c053550b9 100644 Binary files a/js/react_native/example/android/gradle/wrapper/gradle-wrapper.jar and b/js/react_native/android/gradle/wrapper/gradle-wrapper.jar differ diff --git a/js/react_native/android/gradle/wrapper/gradle-wrapper.properties b/js/react_native/android/gradle/wrapper/gradle-wrapper.properties index abd655e44b8a2..442d9132ea328 100644 --- a/js/react_native/android/gradle/wrapper/gradle-wrapper.properties +++ b/js/react_native/android/gradle/wrapper/gradle-wrapper.properties @@ -1,6 +1,5 @@ -#Tue Jan 26 16:57:19 PST 2021 distributionBase=GRADLE_USER_HOME distributionPath=wrapper/dists +distributionUrl=https\://services.gradle.org/distributions/gradle-6.8.3-bin.zip zipStoreBase=GRADLE_USER_HOME zipStorePath=wrapper/dists -distributionUrl=https\://services.gradle.org/distributions/gradle-6.5-all.zip diff --git a/js/react_native/example/android/gradlew b/js/react_native/android/gradlew similarity index 99% rename from js/react_native/example/android/gradlew rename to js/react_native/android/gradlew index 2fe81a7d95e4f..fbd7c515832da 100755 --- a/js/react_native/example/android/gradlew +++ b/js/react_native/android/gradlew @@ -82,6 +82,7 @@ esac CLASSPATH=$APP_HOME/gradle/wrapper/gradle-wrapper.jar + # Determine the Java command to use to start the JVM. if [ -n "$JAVA_HOME" ] ; then if [ -x "$JAVA_HOME/jre/sh/java" ] ; then @@ -129,6 +130,7 @@ fi if [ "$cygwin" = "true" -o "$msys" = "true" ] ; then APP_HOME=`cygpath --path --mixed "$APP_HOME"` CLASSPATH=`cygpath --path --mixed "$CLASSPATH"` + JAVACMD=`cygpath --unix "$JAVACMD"` # We build the pattern for arguments to be converted via cygpath diff --git a/js/react_native/example/android/gradlew.bat b/js/react_native/android/gradlew.bat similarity index 98% rename from js/react_native/example/android/gradlew.bat rename to js/react_native/android/gradlew.bat index b742c9917f78e..5093609d512a9 100644 --- a/js/react_native/example/android/gradlew.bat +++ b/js/react_native/android/gradlew.bat @@ -5,7 +5,7 @@ @rem you may not use this file except in compliance with the License. @rem You may obtain a copy of the License at @rem -@rem http://www.apache.org/licenses/LICENSE-2.0 +@rem https://www.apache.org/licenses/LICENSE-2.0 @rem @rem Unless required by applicable law or agreed to in writing, software @rem distributed under the License is distributed on an "AS IS" BASIS, @@ -84,6 +84,7 @@ set CMD_LINE_ARGS=%* set CLASSPATH=%APP_HOME%\gradle\wrapper\gradle-wrapper.jar + @rem Execute Gradle "%JAVA_EXE%" %DEFAULT_JVM_OPTS% %JAVA_OPTS% %GRADLE_OPTS% "-Dorg.gradle.appname=%APP_BASE_NAME%" -classpath "%CLASSPATH%" org.gradle.wrapper.GradleWrapperMain %CMD_LINE_ARGS% diff --git a/js/react_native/example/android/app/build.gradle b/js/react_native/e2e/android/app/build.gradle similarity index 95% rename from js/react_native/example/android/app/build.gradle rename to js/react_native/e2e/android/app/build.gradle index f132fbc8347a8..3683aa70da648 100644 --- a/js/react_native/example/android/app/build.gradle +++ b/js/react_native/e2e/android/app/build.gradle @@ -133,6 +133,8 @@ android { targetSdkVersion rootProject.ext.targetSdkVersion versionCode 1 versionName "1.0" + + testInstrumentationRunner "androidx.test.runner.AndroidJUnitRunner" } splits { abi { @@ -144,16 +146,13 @@ android { } signingConfigs { debug { - storeFile file('debug.keystore') - storePassword 'android' + storeFile rootProject.file('debug.keystore') keyAlias 'androiddebugkey' keyPassword 'android' + storePassword 'android' } } buildTypes { - debug { - signingConfig signingConfigs.debug - } release { // Caution! In production, you need to generate your own keystore file. // see https://reactnative.dev/docs/signed-apk-android. @@ -183,7 +182,6 @@ dependencies { //noinspection GradleDynamicVersion implementation "com.facebook.react:react-native:+" // From node_modules - implementation "androidx.swiperefreshlayout:swiperefreshlayout:1.0.0" debugImplementation("com.facebook.flipper:flipper:${FLIPPER_VERSION}") { exclude group:'com.facebook.fbjni' @@ -204,7 +202,11 @@ dependencies { implementation jscFlavor } - implementation project(':onnxruntimereactnative') + androidTestImplementation 'androidx.test.espresso:espresso-core:3.4.0' + androidTestImplementation 'androidx.test:runner:1.4.0' + androidTestImplementation 'androidx.test:rules:1.4.0' + + implementation project(':onnxruntime-react-native') } // Run this once to be able to run the application with BUCK diff --git a/js/react_native/example/android/app/proguard-rules.pro b/js/react_native/e2e/android/app/proguard-rules.pro similarity index 100% rename from js/react_native/example/android/app/proguard-rules.pro rename to js/react_native/e2e/android/app/proguard-rules.pro diff --git a/js/react_native/e2e/android/app/src/androidTest/java/com/example/reactnativeonnxruntimemodule/OnnxruntimeModuleExampleUITests.java b/js/react_native/e2e/android/app/src/androidTest/java/com/example/reactnativeonnxruntimemodule/OnnxruntimeModuleExampleUITests.java new file mode 100644 index 0000000000000..b937ce193f8a5 --- /dev/null +++ b/js/react_native/e2e/android/app/src/androidTest/java/com/example/reactnativeonnxruntimemodule/OnnxruntimeModuleExampleUITests.java @@ -0,0 +1,79 @@ +package com.example.reactnativeonnxruntimemodule; + +import android.view.View; +import android.widget.TextView; + +import androidx.test.espresso.NoMatchingViewException; +import androidx.test.espresso.UiController; +import androidx.test.espresso.ViewAction; +import androidx.test.espresso.ViewInteraction; +import androidx.test.filters.LargeTest; +import androidx.test.rule.ActivityTestRule; +import androidx.test.runner.AndroidJUnit4; + +import org.hamcrest.Matcher; +import org.junit.Assert; +import org.junit.Rule; +import org.junit.Test; +import org.junit.runner.RunWith; + +import static androidx.test.espresso.Espresso.onView; +import static androidx.test.espresso.matcher.ViewMatchers.isAssignableFrom; +import static androidx.test.espresso.matcher.ViewMatchers.isDisplayed; +import static androidx.test.espresso.matcher.ViewMatchers.withContentDescription; +import static org.hamcrest.Matchers.allOf; + +@RunWith(AndroidJUnit4.class) +@LargeTest +public class OnnxruntimeModuleExampleUITests { + @Rule + public ActivityTestRule activityTestRule = new ActivityTestRule<>(MainActivity.class); + + @Test + public void testExample() { + // Wait for a view displayed + int waitTime = 0; + final int sleepTime = 1000; + do { + try { + ViewInteraction view = onView(allOf(withContentDescription("output"), isDisplayed())); + if (getText(view) != null) { + break; + } + } catch (NoMatchingViewException ne) { + try { + Thread.sleep(sleepTime); + } catch (InterruptedException ie) { + } + waitTime += sleepTime; + } + } while (waitTime < 180000); + + ViewInteraction view = onView(allOf(withContentDescription("output"), isDisplayed())); + Assert.assertEquals(getText(view), "Result: 3"); + } + + private String getText(ViewInteraction matcher) { + final String[] text = {null}; + + matcher.perform(new ViewAction() { + @Override + public Matcher getConstraints() { + return isAssignableFrom(TextView.class); + } + + @Override + public String getDescription() { + return "get a text from a TextView"; + } + + @Override + public void perform(UiController uiController, View view) { + TextView textView = (TextView)view; + text[0] = textView.getText().toString(); + } + }); + + return text[0]; + } +} diff --git a/js/react_native/example/android/app/src/debug/AndroidManifest.xml b/js/react_native/e2e/android/app/src/debug/AndroidManifest.xml similarity index 100% rename from js/react_native/example/android/app/src/debug/AndroidManifest.xml rename to js/react_native/e2e/android/app/src/debug/AndroidManifest.xml diff --git a/js/react_native/example/android/app/src/debug/java/com/example/reactnativeonnxruntimemodule/ReactNativeFlipper.java b/js/react_native/e2e/android/app/src/debug/java/com/example/reactnativeonnxruntimemodule/ReactNativeFlipper.java similarity index 100% rename from js/react_native/example/android/app/src/debug/java/com/example/reactnativeonnxruntimemodule/ReactNativeFlipper.java rename to js/react_native/e2e/android/app/src/debug/java/com/example/reactnativeonnxruntimemodule/ReactNativeFlipper.java diff --git a/js/react_native/example/android/app/src/main/AndroidManifest.xml b/js/react_native/e2e/android/app/src/main/AndroidManifest.xml similarity index 100% rename from js/react_native/example/android/app/src/main/AndroidManifest.xml rename to js/react_native/e2e/android/app/src/main/AndroidManifest.xml diff --git a/js/react_native/example/android/app/src/main/assets/3.jpg b/js/react_native/e2e/android/app/src/main/assets/3.jpg similarity index 100% rename from js/react_native/example/android/app/src/main/assets/3.jpg rename to js/react_native/e2e/android/app/src/main/assets/3.jpg diff --git a/js/react_native/e2e/android/app/src/main/assets/index.android.bundle b/js/react_native/e2e/android/app/src/main/assets/index.android.bundle new file mode 100644 index 0000000000000..00d9a7249c151 --- /dev/null +++ b/js/react_native/e2e/android/app/src/main/assets/index.android.bundle @@ -0,0 +1,811 @@ +var __BUNDLE_START_TIME__=this.nativePerformanceNow?nativePerformanceNow():Date.now(),__DEV__=false,process=this.process||{},__METRO_GLOBAL_PREFIX__='';process.env=process.env||{};process.env.NODE_ENV=process.env.NODE_ENV||"production"; +!(function(r){"use strict";r.__r=o,r[__METRO_GLOBAL_PREFIX__+"__d"]=function(r,i,n){if(null!=e[i])return;var o={dependencyMap:n,factory:r,hasError:!1,importedAll:t,importedDefault:t,isInitialized:!1,publicModule:{exports:{}}};e[i]=o},r.__c=n,r.__registerSegment=function(r,t,i){s[r]=t,i&&i.forEach(function(t){e[t]||v.has(t)||v.set(t,r)})};var e=n(),t={},i={}.hasOwnProperty;function n(){return e=Object.create(null)}function o(r){var t=r,i=e[t];return i&&i.isInitialized?i.publicModule.exports:d(t,i)}function l(r){var i=r;if(e[i]&&e[i].importedDefault!==t)return e[i].importedDefault;var n=o(i),l=n&&n.__esModule?n.default:n;return e[i].importedDefault=l}function u(r){var n=r;if(e[n]&&e[n].importedAll!==t)return e[n].importedAll;var l,u=o(n);if(u&&u.__esModule)l=u;else{if(l={},u)for(var a in u)i.call(u,a)&&(l[a]=u[a]);l.default=u}return e[n].importedAll=l}o.importDefault=l,o.importAll=u;var a=!1;function d(e,t){if(!a&&r.ErrorUtils){var i;a=!0;try{i=h(e,t)}catch(e){r.ErrorUtils.reportFatalError(e)}return a=!1,i}return h(e,t)}var f=16,c=65535;function p(r){return{segmentId:r>>>f,localId:r&c}}o.unpackModuleId=p,o.packModuleId=function(r){return(r.segmentId<0){var n,a=null!==(n=v.get(t))&&void 0!==n?n:0,d=s[a];null!=d&&(d(t),i=e[t],v.delete(t))}var f=r.nativeRequire;if(!i&&f){var c=p(t),h=c.segmentId;f(c.localId,h),i=e[t]}if(!i)throw Error('Requiring unknown module "'+t+'".');if(i.hasError)throw _(t,i.error);i.isInitialized=!0;var m=i,g=m.factory,I=m.dependencyMap;try{var M=i.publicModule;return M.id=t,g(r,o,l,u,M,M.exports,I),i.factory=void 0,i.dependencyMap=void 0,M.exports}catch(r){throw i.hasError=!0,i.error=r,i.isInitialized=!1,i.publicModule.exports=void 0,r}}function _(r,e){return Error('Requiring module "'+r+'", which threw an exception: '+e)}})('undefined'!=typeof globalThis?globalThis:'undefined'!=typeof global?global:'undefined'!=typeof window?window:this); +!(function(n){var e=(function(){function n(n,e){return n}function e(n){var e={};return n.forEach(function(n,r){e[n]=!0}),e}function r(n,r,u){if(n.formatValueCalls++,n.formatValueCalls>200)return"[TOO BIG formatValueCalls "+n.formatValueCalls+" exceeded limit of 200]";var f=t(n,r);if(f)return f;var c=Object.keys(r),s=e(c);if(d(r)&&(c.indexOf('message')>=0||c.indexOf('description')>=0))return o(r);if(0===c.length){if(v(r)){var g=r.name?': '+r.name:'';return n.stylize('[Function'+g+']','special')}if(p(r))return n.stylize(RegExp.prototype.toString.call(r),'regexp');if(y(r))return n.stylize(Date.prototype.toString.call(r),'date');if(d(r))return o(r)}var h,b,m='',j=!1,O=['{','}'];(h=r,Array.isArray(h)&&(j=!0,O=['[',']']),v(r))&&(m=' [Function'+(r.name?': '+r.name:'')+']');return p(r)&&(m=' '+RegExp.prototype.toString.call(r)),y(r)&&(m=' '+Date.prototype.toUTCString.call(r)),d(r)&&(m=' '+o(r)),0!==c.length||j&&0!=r.length?u<0?p(r)?n.stylize(RegExp.prototype.toString.call(r),'regexp'):n.stylize('[Object]','special'):(n.seen.push(r),b=j?i(n,r,u,s,c):c.map(function(e){return l(n,r,u,s,e,j)}),n.seen.pop(),a(b,m,O)):O[0]+m+O[1]}function t(n,e){if(s(e))return n.stylize('undefined','undefined');if('string'==typeof e){var r="'"+JSON.stringify(e).replace(/^"|"$/g,'').replace(/'/g,"\\'").replace(/\\"/g,'"')+"'";return n.stylize(r,'string')}return c(e)?n.stylize(''+e,'number'):u(e)?n.stylize(''+e,'boolean'):f(e)?n.stylize('null','null'):void 0}function o(n){return'['+Error.prototype.toString.call(n)+']'}function i(n,e,r,t,o){for(var i=[],a=0,u=e.length;a-1&&(u=l?u.split('\n').map(function(n){return' '+n}).join('\n').substr(2):'\n'+u.split('\n').map(function(n){return' '+n}).join('\n')):u=n.stylize('[Circular]','special')),s(a)){if(l&&i.match(/^\d+$/))return u;(a=JSON.stringify(''+i)).match(/^"([a-zA-Z_][a-zA-Z_0-9]*)"$/)?(a=a.substr(1,a.length-2),a=n.stylize(a,'name')):(a=a.replace(/'/g,"\\'").replace(/\\"/g,'"').replace(/(^"|"$)/g,"'"),a=n.stylize(a,'string'))}return a+': '+u}function a(n,e,r){return n.reduce(function(n,e){return 0,e.indexOf('\n')>=0&&0,n+e.replace(/\u001b\[\d\d?m/g,'').length+1},0)>60?r[0]+(''===e?'':e+'\n ')+' '+n.join(',\n ')+' '+r[1]:r[0]+e+' '+n.join(', ')+' '+r[1]}function u(n){return'boolean'==typeof n}function f(n){return null===n}function c(n){return'number'==typeof n}function s(n){return void 0===n}function p(n){return g(n)&&'[object RegExp]'===h(n)}function g(n){return'object'==typeof n&&null!==n}function y(n){return g(n)&&'[object Date]'===h(n)}function d(n){return g(n)&&('[object Error]'===h(n)||n instanceof Error)}function v(n){return'function'==typeof n}function h(n){return Object.prototype.toString.call(n)}function b(n,e){return Object.prototype.hasOwnProperty.call(n,e)}return function(e,t){return r({seen:[],formatValueCalls:0,stylize:n},e,t.depth)}})(),r='(index)',t={trace:0,info:1,warn:2,error:3},o=[];o[t.trace]='debug',o[t.info]='log',o[t.warn]='warning',o[t.error]='error';var i=1;function l(r){return function(){var l;l=1===arguments.length&&'string'==typeof arguments[0]?arguments[0]:Array.prototype.map.call(arguments,function(n){return e(n,{depth:10})}).join(', ');var a=arguments[0],u=r;'string'==typeof a&&'Warning: '===a.slice(0,9)&&u>=t.error&&(u=t.warn),n.__inspectorLog&&n.__inspectorLog(o[u],l,[].slice.call(arguments),i),s.length&&(l=p('',l)),n.nativeLoggingHook(l,u)}}function a(n,e){return Array.apply(null,Array(e)).map(function(){return n})}var u="\u2502",f="\u2510",c="\u2518",s=[];function p(n,e){return s.join('')+n+' '+(e||'')}if(n.nativeLoggingHook){n.console;n.console={error:l(t.error),info:l(t.info),log:l(t.info),warn:l(t.warn),trace:l(t.trace),debug:l(t.trace),table:function(e){if(!Array.isArray(e)){var o=e;for(var i in e=[],o)if(o.hasOwnProperty(i)){var l=o[i];l[r]=i,e.push(l)}}if(0!==e.length){var u=Object.keys(e[0]).sort(),f=[],c=[];u.forEach(function(n,r){c[r]=n.length;for(var t=0;t';return function(){for(var r=arguments.length,u=new Array(r),e=0;e1?n-1:0),o=1;o0?l[l.length-1]:null,c=l.length>1?l[l.length-2]:null,v='function'==typeof s,h='function'==typeof c;h&&r(d[1])(v,'Cannot have a non-function arg after a function arg.');var y=v?s:null,C=h?c:null,M=v+h,b=l.slice(0,l.length-M);if('sync'===o)return r(d[3]).callNativeSyncHook(n,t,b,C,y);r(d[3]).enqueueNativeCall(n,t,b,C,y)}).type=o,u}function u(n,t){return-1!==n.indexOf(t)}function l(n,t){return r(d[2])(t,n||{})}g.__fbGenNativeModule=n;var f={};if(g.nativeModuleProxy)f=g.nativeModuleProxy;else if(!g.nativeExtensions){var s=g.__fbBatchedBridgeConfig;r(d[1])(s,'__fbBatchedBridgeConfig is not set, cannot invoke native modules');var c=r(d[4]);(s.remoteModuleConfig||[]).forEach(function(o,u){var l=n(o,u);l&&(l.module?f[l.name]=l.module:c(f,l.name,{get:function(){return t(l.name,u)}}))})}m.exports=f},22,[23,18,29,30,39]); +__d(function(g,r,_i,a,m,e,d){m.exports=function(t,o){return r(d[0])(t)||r(d[1])(t,o)||r(d[2])(t,o)||r(d[3])()},m.exports.__esModule=!0,m.exports.default=m.exports},23,[24,25,26,28]); +__d(function(g,r,i,a,m,e,d){m.exports=function(t){if(Array.isArray(t))return t},m.exports.__esModule=!0,m.exports.default=m.exports},24,[]); +__d(function(g,r,_i2,a,m,e,d){m.exports=function(t,l){var n=null==t?null:"undefined"!=typeof Symbol&&t[Symbol.iterator]||t["@@iterator"];if(null!=n){var o,u,f=[],i=!0,y=!1;try{for(n=n.call(t);!(i=(o=n.next()).done)&&(f.push(o.value),!l||f.length!==l);i=!0);}catch(t){y=!0,u=t}finally{try{i||null==n.return||n.return()}finally{if(y)throw u}}return f}},m.exports.__esModule=!0,m.exports.default=m.exports},25,[]); +__d(function(g,r,i,a,m,e,d){m.exports=function(t,o){if(t){if("string"==typeof t)return r(d[0])(t,o);var n=Object.prototype.toString.call(t).slice(8,-1);return"Object"===n&&t.constructor&&(n=t.constructor.name),"Map"===n||"Set"===n?Array.from(t):"Arguments"===n||/^(?:Ui|I)nt(?:8|16|32)(?:Clamped)?Array$/.test(n)?r(d[0])(t,o):void 0}},m.exports.__esModule=!0,m.exports.default=m.exports},26,[27]); +__d(function(g,r,_i,a,m,e,d){m.exports=function(t,n){(null==n||n>t.length)&&(n=t.length);for(var o=0,l=new Array(n);o=5){var h=this._queue;this._queue=[[],[],[],this._callID],this._lastFlush=o,g.nativeFlushQueueImmediate(h)}r(d[2]).counterEvent('pending_js_to_native_queue',this._queue[0].length),this.__spy&&this.__spy({type:1,module:t+'',method:l,args:s})}},{key:"createDebugLookup",value:function(t,l,s){}},{key:"setReactNativeMicrotasksCallback",value:function(t){this._reactNativeMicrotasksCallback=t}},{key:"__guard",value:function(t){if(this.__shouldPauseOnThrow())t();else try{t()}catch(t){r(d[3]).reportFatalError(t)}}},{key:"__shouldPauseOnThrow",value:function(){return'undefined'!=typeof DebuggerInternal&&!0===DebuggerInternal.shouldPauseOnThrow}},{key:"__callReactNativeMicrotasks",value:function(){r(d[2]).beginEvent('JSTimers.callReactNativeMicrotasks()'),null!=this._reactNativeMicrotasksCallback&&this._reactNativeMicrotasksCallback(),r(d[2]).endEvent()}},{key:"__callFunction",value:function(t,l,s){this._lastFlush=Date.now(),this._eventLoopStartTime=this._lastFlush,this.__spy?r(d[2]).beginEvent(t+"."+l+"("+r(d[4]).default(s)+")"):r(d[2]).beginEvent(t+"."+l+"(...)"),this.__spy&&this.__spy({type:0,module:t,method:l,args:s});var u=this.getCallableModule(t);r(d[5])(!!u,"Module "+t+" is not a registered callable module (calling "+l+"). A frequent cause of the error is that the application entry file path is incorrect.\n This can also happen when the JS bundle is corrupt or there is an early initialization error when loading React Native."),r(d[5])(!!u[l],"Method "+l+" does not exist on module "+t),u[l].apply(u,s),r(d[2]).endEvent()}},{key:"__invokeCallback",value:function(t,l){this._lastFlush=Date.now(),this._eventLoopStartTime=this._lastFlush;var s=t>>>1,u=1&t?this._successCallbacks.get(s):this._failureCallbacks.get(s);u&&(this._successCallbacks.delete(s),this._failureCallbacks.delete(s),u.apply(void 0,r(d[6])(l)))}}],[{key:"spy",value:function(l){t.prototype.__spy=!0===l?function(t){console.log((0===t.type?'N->JS':'JS->N')+" : "+(null!=t.module?t.module+'.':'')+t.method+"("+JSON.stringify(t.args)+")")}:!1===l?null:l}}]),t})();m.exports=t},31,[7,8,32,33,34,18,35]); +__d(function(g,r,i,a,m,e,d){'use strict';var n=!1,t=0,c={installReactHook:function(){!0},setEnabled:function(t){n!==t&&(n=t)},isEnabled:function(){return n},beginEvent:function(t,c){if(n){var o='function'==typeof t?t():t;g.nativeTraceBeginSection(131072,o,c)}},endEvent:function(){n&&g.nativeTraceEndSection(131072)},beginAsyncEvent:function(c){var o=t;if(n){t++;var f='function'==typeof c?c():c;g.nativeTraceBeginAsyncSection(131072,f,o)}return o},endAsyncEvent:function(t,c){if(n){var o='function'==typeof t?t():t;g.nativeTraceEndAsyncSection(131072,o,c)}},counterEvent:function(t,c){if(n){var o='function'==typeof t?t():t;g.nativeTraceCounter&&g.nativeTraceCounter(131072,o,c)}}};m.exports=c},32,[]); +__d(function(g,r,i,a,m,e,d){m.exports=g.ErrorUtils},33,[]); +__d(function(g,r,_i,a,m,_e,d){Object.defineProperty(_e,"__esModule",{value:!0}),_e.createStringifySafeWithLimits=o,_e.default=void 0;var t=r(d[0])(r(d[1]));function e(t,e){var i="undefined"!=typeof Symbol&&t[Symbol.iterator]||t["@@iterator"];if(i)return(i=i.call(t)).next.bind(i);if(Array.isArray(t)||(i=n(t))||e&&t&&"number"==typeof t.length){i&&(t=i);var o=0;return function(){return o>=t.length?{done:!0}:{done:!1,value:t[o++]}}}throw new TypeError("Invalid attempt to iterate non-iterable instance.\nIn order to be iterable, non-array objects must have a [Symbol.iterator]() method.")}function n(t,e){if(t){if("string"==typeof t)return i(t,e);var n=Object.prototype.toString.call(t).slice(8,-1);return"Object"===n&&t.constructor&&(n=t.constructor.name),"Map"===n||"Set"===n?Array.from(t):"Arguments"===n||/^(?:Ui|I)nt(?:8|16|32)(?:Clamped)?Array$/.test(n)?i(t,e):void 0}}function i(t,e){(null==e||e>t.length)&&(e=t.length);for(var n=0,i=new Array(e);nf+"...(truncated)...".length?i.substring(0,f)+"...(truncated)...":i}if('object'!=typeof i||null===i)return i;var u=i;if(Array.isArray(i))h.length>=o?u="[ ... array with "+i.length+" values ... ]":i.length>c&&(u=i.slice(0,c).concat(["... extra "+(i.length-c)+" values truncated ..."]));else{(0,t.default)('object'==typeof i,'This was already found earlier');var l=Object.keys(i);if(h.length>=o)u="{ ... object with "+l.length+" keys ... }";else if(l.length>s){u={};for(var y,v=e(l.slice(0,s));!(y=v()).done;){var b=y.value;u[b]=i[b]}u['...(truncated keys)...']=l.length-s}}return h.unshift(u),u}return function(t){if(void 0===t)return'undefined';if(null===t)return'null';if('function'==typeof t)try{return t.toString()}catch(t){return'[function unknown]'}else{if(t instanceof Error)return t.name+': '+t.message;try{var e=JSON.stringify(t,v);return void 0===e?'["'+typeof t+'" failed to stringify]':e}catch(e){if('function'==typeof t.toString)try{return t.toString()}catch(t){}}}return'["'+typeof t+'" failed to stringify]'}}var u=o({maxDepth:10,maxStringLimit:100,maxArrayLimit:50,maxObjectKeysLimit:50});_e.default=u},34,[3,18]); +__d(function(g,r,i,a,m,e,d){m.exports=function(t){return r(d[0])(t)||r(d[1])(t)||r(d[2])(t)||r(d[3])()},m.exports.__esModule=!0,m.exports.default=m.exports},35,[36,37,26,38]); +__d(function(g,r,i,a,m,e,d){m.exports=function(t){if(Array.isArray(t))return r(d[0])(t)},m.exports.__esModule=!0,m.exports.default=m.exports},36,[27]); +__d(function(g,r,i,a,m,e,d){m.exports=function(o){if("undefined"!=typeof Symbol&&null!=o[Symbol.iterator]||null!=o["@@iterator"])return Array.from(o)},m.exports.__esModule=!0,m.exports.default=m.exports},37,[]); +__d(function(g,r,i,a,m,e,d){m.exports=function(){throw new TypeError("Invalid attempt to spread non-iterable instance.\nIn order to be iterable, non-array objects must have a [Symbol.iterator]() method.")},m.exports.__esModule=!0,m.exports.default=m.exports},38,[]); +__d(function(g,r,i,a,m,e,d){'use strict';m.exports=function(t,n,u){var b,c=u.get,o=!1!==u.enumerable,f=!1!==u.writable,l=!1;function s(u){b=u,l=!0,Object.defineProperty(t,n,{value:u,configurable:!0,enumerable:o,writable:f})}Object.defineProperty(t,n,{get:function(){return l||(l=!0,s(c())),b},set:s,configurable:!0,enumerable:o})}},39,[]); +__d(function(g,r,i,a,m,e,d){function t(n){if("function"!=typeof WeakMap)return null;var o=new WeakMap,f=new WeakMap;return(t=function(t){return t?f:o})(n)}Object.defineProperty(e,"__esModule",{value:!0}),e.default=void 0;var n=(function(n,o){if(!o&&n&&n.__esModule)return n;if(null===n||"object"!=typeof n&&"function"!=typeof n)return{default:n};var f=t(o);if(f&&f.has(n))return f.get(n);var u={},c=Object.defineProperty&&Object.getOwnPropertyDescriptor;for(var l in n)if("default"!==l&&Object.prototype.hasOwnProperty.call(n,l)){var p=c?Object.getOwnPropertyDescriptor(n,l):null;p&&(p.get||p.set)?Object.defineProperty(u,l,p):u[l]=n[l]}u.default=n,f&&f.set(n,u);return u})(r(d[0])).get('AccessibilityInfo');e.default=n},40,[21]); +__d(function(g,r,i,a,m,e,d){function t(n){if("function"!=typeof WeakMap)return null;var o=new WeakMap,u=new WeakMap;return(t=function(t){return t?u:o})(n)}Object.defineProperty(e,"__esModule",{value:!0}),e.default=void 0;var n=(function(n,o){if(!o&&n&&n.__esModule)return n;if(null===n||"object"!=typeof n&&"function"!=typeof n)return{default:n};var u=t(o);if(u&&u.has(n))return u.get(n);var f={},c=Object.defineProperty&&Object.getOwnPropertyDescriptor;for(var l in n)if("default"!==l&&Object.prototype.hasOwnProperty.call(n,l)){var p=c?Object.getOwnPropertyDescriptor(n,l):null;p&&(p.get||p.set)?Object.defineProperty(f,l,p):f[l]=n[l]}f.default=n,u&&u.set(n,f);return f})(r(d[0])).get('AccessibilityManager');e.default=n},41,[21]); +__d(function(g,r,i,a,m,e,d){var t=r(d[0])(r(d[1]));m.exports=function(s,c){'focus'===c&&t.default.sendAccessibilityEvent(s,t.default.getConstants().AccessibilityEventTypes.typeViewFocused),'click'===c&&t.default.sendAccessibilityEvent(s,t.default.getConstants().AccessibilityEventTypes.typeViewClicked)}},42,[3,43]); +__d(function(g,r,i,a,m,e,d){var l=r(d[0])(r(d[1])),n=!0===g.RN$Bridgeless?r(d[2]):null==l.default.unstable_UIManager?r(d[3]):l.default.unstable_UIManager;m.exports=n},43,[3,44,45,46]); +__d(function(g,r,i,a,m,e,d){Object.defineProperty(e,"__esModule",{value:!0}),e.default=void 0;e.default={unstable_UIManager:null}},44,[]); +__d(function(g,r,i,a,m,e,d){'use strict';m.exports={getViewManagerConfig:function(n){return console.warn('Attempting to get config for view manager: '+n),'RCTVirtualText'===n?{}:null},hasViewManagerConfig:function(n){return'RCTVirtualText'===n||'RCTShimmeringView'===n},getConstants:function(){return{}},getConstantsForViewManager:function(n){},getDefaultEventTypes:function(){return[]},lazilyLoadView:function(n){},createView:function(n,t,o,u){},updateView:function(n,t,o){},focus:function(n){},blur:function(n){},findSubviewIn:function(n,t,o){},dispatchViewManagerCommand:function(n,t,o){},measure:function(n,t){},measureInWindow:function(n,t){},viewIsDescendantOf:function(n,t,o){},measureLayout:function(n,t,o,u){},measureLayoutRelativeToParent:function(n,t,o){},setJSResponder:function(n,t){},clearJSResponder:function(){},configureNextLayoutAnimation:function(n,t,o){},removeSubviewsFromContainerWithID:function(n){},replaceExistingNonRootView:function(n,t){},setChildren:function(n,t){},manageChildren:function(n,t,o,u,c,f){},setLayoutAnimationEnabledExperimental:function(n){},sendAccessibilityEvent:function(n,t){},showPopupMenu:function(n,t,o,u){},dismissPopupMenu:function(){}}},45,[]); +__d(function(g,r,i,a,m,_e,d){var n=r(d[0])(r(d[1])),e=r(d[0])(r(d[2])),t={},o=new Set,f={},u=!1;function c(){return u||(f=e.default.getConstants(),u=!0),f}function l(n){if(void 0===t[n]&&g.nativeCallSyncHook&&e.default.getConstantsForViewManager)try{t[n]=e.default.getConstantsForViewManager(n)}catch(e){console.error("NativeUIManager.getConstantsForViewManager('"+n+"') threw an exception.",e),t[n]=null}var f=t[n];if(f)return f;if(!g.nativeCallSyncHook)return f;if(e.default.lazilyLoadView&&!o.has(n)){var u=e.default.lazilyLoadView(n);o.add(n),null!=u&&null!=u.viewConfig&&(c()[n]=u.viewConfig,w(n))}return t[n]}var s=(0,n.default)({},e.default,{createView:function(n,t,o,f){e.default.createView(n,t,o,f)},getConstants:function(){return c()},getViewManagerConfig:function(n){return l(n)},hasViewManagerConfig:function(n){return null!=l(n)}});function w(n){var e=c()[n];t[n]=e,e.Manager&&(r(d[3])(e,'Constants',{get:function(){var n=r(d[4])[e.Manager],t={};return n&&Object.keys(n).forEach(function(e){var o=n[e];'function'!=typeof o&&(t[e]=o)}),t}}),r(d[3])(e,'Commands',{get:function(){var n=r(d[4])[e.Manager],t={},o=0;return n&&Object.keys(n).forEach(function(e){'function'==typeof n[e]&&(t[e]=o++)}),t}}))}e.default.getViewManagerConfig=s.getViewManagerConfig,c().ViewManagerNames&&e.default.getConstants().ViewManagerNames.forEach(function(n){r(d[3])(e.default,n,{get:function(){return e.default.getConstantsForViewManager(n)}})}),g.nativeCallSyncHook||Object.keys(c()).forEach(function(n){r(d[5]).includes(n)||(t[n]||(t[n]=c()[n]),r(d[3])(e.default,n,{get:function(){return console.warn("Accessing view manager configs directly off UIManager via UIManager['"+n+"'] is no longer supported. Use UIManager.getViewManagerConfig('"+n+"') instead."),s.getViewManagerConfig(n)}}))}),m.exports=s},46,[3,29,47,39,22,48]); +__d(function(g,r,i,a,m,e,d){function t(n){if("function"!=typeof WeakMap)return null;var o=new WeakMap,f=new WeakMap;return(t=function(t){return t?f:o})(n)}Object.defineProperty(e,"__esModule",{value:!0}),e.default=void 0;var n=(function(n,o){if(!o&&n&&n.__esModule)return n;if(null===n||"object"!=typeof n&&"function"!=typeof n)return{default:n};var f=t(o);if(f&&f.has(n))return f.get(n);var u={},c=Object.defineProperty&&Object.getOwnPropertyDescriptor;for(var l in n)if("default"!==l&&Object.prototype.hasOwnProperty.call(n,l)){var p=c?Object.getOwnPropertyDescriptor(n,l):null;p&&(p.get||p.set)?Object.defineProperty(u,l,p):u[l]=n[l]}u.default=n,f&&f.set(n,u);return u})(r(d[0])).getEnforcing('UIManager');e.default=n},47,[21]); +__d(function(g,r,i,a,m,e,d){'use strict';m.exports=['clearJSResponder','configureNextLayoutAnimation','createView','dismissPopupMenu','dispatchViewManagerCommand','findSubviewIn','getConstantsForViewManager','getDefaultEventTypes','manageChildren','measure','measureInWindow','measureLayout','measureLayoutRelativeToParent','removeRootView','removeSubviewsFromContainerWithID','replaceExistingNonRootView','sendAccessibilityEvent','setChildren','setJSResponder','setLayoutAnimationEnabledExperimental','showPopupMenu','updateView','viewIsDescendantOf','PopupMenu','LazyViewManagersEnabled','ViewManagerNames','StyleConstants','AccessibilityEventTypes','UIView','getViewManagerConfig','hasViewManagerConfig','blur','focus','genericBubblingEventTypes','genericDirectEventTypes','lazilyLoadView']},48,[]); +__d(function(g,r,i,a,m,e,d){'use strict';var t;t=r(d[0]),m.exports=t},49,[50]); +__d(function(e,n,t,r,l,a,i){"use strict";n(i[0]);var u=n(i[1]);function o(e,n,t,r,l,a,i,u,o){var s=Array.prototype.slice.call(arguments,3);try{n.apply(t,s)}catch(e){this.onError(e)}}var s=!1,c=null,d=!1,f=null,p={onError:function(e){s=!0,c=e}};function h(e,n,t,r,l,a,i,u,d){s=!1,c=null,o.apply(p,arguments)}function g(e,n,t,r,l,a,i,u,o){if(h.apply(this,arguments),s){if(!s)throw Error("clearCaughtError was called but no error was captured. This error is likely caused by a bug in React. Please file an issue.");var p=c;s=!1,c=null,d||(d=!0,f=p)}}var m=Array.isArray,v=null,b=null,y=null;function S(e,n,t){var r=e.type||"unknown-event";e.currentTarget=y(t),g(r,n,void 0,e),e.currentTarget=null}function k(e){var n=e._dispatchListeners,t=e._dispatchInstances;if(m(n))throw Error("executeDirectDispatch(...): Invalid `event`.");return e.currentTarget=n?y(t):null,n=n?n(e):null,e.currentTarget=null,e._dispatchListeners=null,e._dispatchInstances=null,n}function w(){return!0}function _(){return!1}function T(e,n,t,r){for(var l in this.dispatchConfig=e,this._targetInst=n,this.nativeEvent=t,this._dispatchInstances=this._dispatchListeners=null,e=this.constructor.Interface)e.hasOwnProperty(l)&&((n=e[l])?this[l]=n(t):"target"===l?this.target=r:this[l]=t[l]);return this.isDefaultPrevented=(null!=t.defaultPrevented?t.defaultPrevented:!1===t.returnValue)?w:_,this.isPropagationStopped=_,this}function x(e,n,t,r){if(this.eventPool.length){var l=this.eventPool.pop();return this.call(l,e,n,t,r),l}return new this(e,n,t,r)}function P(e){if(!(e instanceof this))throw Error("Trying to release an event instance into a pool of a different type.");e.destructor(),10>this.eventPool.length&&this.eventPool.push(e)}function R(e){e.getPooled=x,e.eventPool=[],e.release=P}n(i[2])(T.prototype,{preventDefault:function(){this.defaultPrevented=!0;var e=this.nativeEvent;e&&(e.preventDefault?e.preventDefault():"unknown"!=typeof e.returnValue&&(e.returnValue=!1),this.isDefaultPrevented=w)},stopPropagation:function(){var e=this.nativeEvent;e&&(e.stopPropagation?e.stopPropagation():"unknown"!=typeof e.cancelBubble&&(e.cancelBubble=!0),this.isPropagationStopped=w)},persist:function(){this.isPersistent=w},isPersistent:_,destructor:function(){var e,n=this.constructor.Interface;for(e in n)this[e]=null;this.nativeEvent=this._targetInst=this.dispatchConfig=null,this.isPropagationStopped=this.isDefaultPrevented=_,this._dispatchInstances=this._dispatchListeners=null}}),T.Interface={type:null,target:null,currentTarget:function(){return null},eventPhase:null,bubbles:null,cancelable:null,timeStamp:function(e){return e.timeStamp||Date.now()},defaultPrevented:null,isTrusted:null},T.extend=function(e){function t(){}function r(){return l.apply(this,arguments)}var l=this;t.prototype=l.prototype;var a=new t;return n(i[2])(a,r.prototype),r.prototype=a,r.prototype.constructor=r,r.Interface=n(i[2])({},l.Interface,e),r.extend=l.extend,R(r),r},R(T);var E=T.extend({touchHistory:function(){return null}});function C(e){return"topTouchStart"===e}function N(e){return"topTouchMove"===e}var z=["topTouchStart"],I=["topTouchMove"],L=["topTouchCancel","topTouchEnd"],U=[],M={touchBank:U,numberActiveTouches:0,indexOfSingleActiveTouch:-1,mostRecentTimeStamp:0};function F(e){return e.timeStamp||e.timestamp}function D(e){if(null==(e=e.identifier))throw Error("Touch object is missing identifier.");return e}function A(e){var n=D(e),t=U[n];t?(t.touchActive=!0,t.startPageX=e.pageX,t.startPageY=e.pageY,t.startTimeStamp=F(e),t.currentPageX=e.pageX,t.currentPageY=e.pageY,t.currentTimeStamp=F(e),t.previousPageX=e.pageX,t.previousPageY=e.pageY,t.previousTimeStamp=F(e)):(t={touchActive:!0,startPageX:e.pageX,startPageY:e.pageY,startTimeStamp:F(e),currentPageX:e.pageX,currentPageY:e.pageY,currentTimeStamp:F(e),previousPageX:e.pageX,previousPageY:e.pageY,previousTimeStamp:F(e)},U[n]=t),M.mostRecentTimeStamp=F(e)}function Q(e){var n=U[D(e)];n&&(n.touchActive=!0,n.previousPageX=n.currentPageX,n.previousPageY=n.currentPageY,n.previousTimeStamp=n.currentTimeStamp,n.currentPageX=e.pageX,n.currentPageY=e.pageY,n.currentTimeStamp=F(e),M.mostRecentTimeStamp=F(e))}function H(e){var n=U[D(e)];n&&(n.touchActive=!1,n.previousPageX=n.currentPageX,n.previousPageY=n.currentPageY,n.previousTimeStamp=n.currentTimeStamp,n.currentPageX=e.pageX,n.currentPageY=e.pageY,n.currentTimeStamp=F(e),M.mostRecentTimeStamp=F(e))}var O,j={instrument:function(e){O=e},recordTouchTrack:function(e,n){if(null!=O&&O(e,n),N(e))n.changedTouches.forEach(Q);else if(C(e))n.changedTouches.forEach(A),M.numberActiveTouches=n.touches.length,1===M.numberActiveTouches&&(M.indexOfSingleActiveTouch=n.touches[0].identifier);else if(("topTouchEnd"===e||"topTouchCancel"===e)&&(n.changedTouches.forEach(H),M.numberActiveTouches=n.touches.length,1===M.numberActiveTouches))for(e=0;ei||(a=i),Ne(a,e,l)}}}),v=function(e){return we.get(e._nativeTag)||null},b=_e,y=function(e){var n=(e=e.stateNode)._nativeTag;if(void 0===n&&(n=(e=e.canonical)._nativeTag),!n)throw Error("All native instances should have a tag.");return e},re.injection.injectGlobalResponderHandler({onChange:function(e,t,r){null!==t?n(i[3]).UIManager.setJSResponder(t.stateNode._nativeTag,r):n(i[3]).UIManager.clearJSResponder()}});var ze=u.__SECRET_INTERNALS_DO_NOT_USE_OR_YOU_WILL_BE_FIRED,Ie=60103,Le=60106,Ue=60107,Me=60108,Fe=60114,De=60109,Ae=60110,Qe=60112,He=60113,Oe=60120,je=60115,Be=60116,Ve=60129,We=60130,Ye=60131,qe=60132;if("function"==typeof Symbol&&Symbol.for){var Xe=Symbol.for;Ie=Xe("react.element"),Le=Xe("react.portal"),Ue=Xe("react.fragment"),Me=Xe("react.strict_mode"),Fe=Xe("react.profiler"),De=Xe("react.provider"),Ae=Xe("react.context"),Qe=Xe("react.forward_ref"),He=Xe("react.suspense"),Oe=Xe("react.suspense_list"),je=Xe("react.memo"),Be=Xe("react.lazy"),Xe("react.scope"),Ve=Xe("react.debug_trace_mode"),We=Xe("react.offscreen"),Ye=Xe("react.legacy_hidden"),qe=Xe("react.cache")}var $e="function"==typeof Symbol&&Symbol.iterator;function Ge(e){return null===e||"object"!=typeof e?null:"function"==typeof(e=$e&&e[$e]||e["@@iterator"])?e:null}function Ke(e){if(null==e)return null;if("function"==typeof e)return e.displayName||e.name||null;if("string"==typeof e)return e;switch(e){case Ue:return"Fragment";case Le:return"Portal";case Fe:return"Profiler";case Me:return"StrictMode";case He:return"Suspense";case Oe:return"SuspenseList";case qe:return"Cache"}if("object"==typeof e)switch(e.$$typeof){case Ae:return(e.displayName||"Context")+".Consumer";case De:return(e._context.displayName||"Context")+".Provider";case Qe:var n=e.render;return(e=e.displayName)||(e=""!==(e=n.displayName||n.name||"")?"ForwardRef("+e+")":"ForwardRef"),e;case je:return null!==(n=e.displayName||null)?n:Ke(e.type)||"Memo";case Be:n=e._payload,e=e._init;try{return Ke(e(n))}catch(e){}}return null}function Je(e){var n=e.type;switch(e.tag){case 24:return"Cache";case 9:return(n.displayName||"Context")+".Consumer";case 10:return(n._context.displayName||"Context")+".Provider";case 18:return"DehydratedFragment";case 11:return e=(e=n.render).displayName||e.name||"",n.displayName||(""!==e?"ForwardRef("+e+")":"ForwardRef");case 7:return"Fragment";case 5:return n;case 4:return"Portal";case 3:return"Root";case 6:return"Text";case 16:return Ke(n);case 23:return"LegacyHidden";case 8:return n===Me?"StrictMode":"Mode";case 22:return"Offscreen";case 12:return"Profiler";case 21:return"Scope";case 13:return"Suspense";case 19:return"SuspenseList";case 1:case 0:case 17:case 2:case 14:case 15:if("function"==typeof n)return n.displayName||n.name||null;if("string"==typeof n)return n}return null}function Ze(e){var n=e,t=e;if(e.alternate)for(;n.return;)n=n.return;else{e=n;do{0!=(2050&(n=e).flags)&&(t=n.return),e=n.return}while(e)}return 3===n.tag?t:null}function en(e){if(Ze(e)!==e)throw Error("Unable to find node on an unmounted component.")}function nn(e){var n=e.alternate;if(!n){if(null===(n=Ze(e)))throw Error("Unable to find node on an unmounted component.");return n!==e?null:e}for(var t=e,r=n;;){var l=t.return;if(null===l)break;var a=l.alternate;if(null===a){if(null!==(r=l.return)){t=r;continue}break}if(l.child===a.child){for(a=l.child;a;){if(a===t)return en(l),e;if(a===r)return en(l),n;a=a.sibling}throw Error("Unable to find node on an unmounted component.")}if(t.return!==r.return)t=l,r=a;else{for(var i=!1,u=l.child;u;){if(u===t){i=!0,t=l,r=a;break}if(u===r){i=!0,r=l,t=a;break}u=u.sibling}if(!i){for(u=a.child;u;){if(u===t){i=!0,t=a,r=l;break}if(u===r){i=!0,r=a,t=l;break}u=u.sibling}if(!i)throw Error("Child was not found in either parent set. This indicates a bug in React related to the return pointer. Please file an issue.")}}if(t.alternate!==r)throw Error("Return fibers should always be each others' alternates. This error is likely caused by a bug in React. Please file an issue.")}if(3!==t.tag)throw Error("Unable to find node on an unmounted component.");return t.stateNode.current===t?e:n}function tn(e){return null!==(e=nn(e))?rn(e):null}function rn(e){if(5===e.tag||6===e.tag)return e;for(e=e.child;null!==e;){var n=rn(e);if(null!==n)return n;e=e.sibling}return null}var ln={},an=null,un=0,on={unsafelyIgnoreFunctions:!0};function sn(e,t){return"object"!=typeof t||null===t||n(i[3]).deepDiffer(e,t,on)}function cn(e,n,t){if(m(n))for(var r=n.length;r--&&0=(a=n&-n)||16===l&&0!=(4194240&a)))return n;if(0!=(4&r)&&(r|=16&t),0!==(n=e.entangledLanes))for(e=e.entanglements,n&=r;0t;t++)n.push(e);return n}function Rn(e,n,t){e.pendingLanes|=n,536870912!==n&&(e.suspendedLanes=0,e.pingedLanes=0),(e=e.eventTimes)[n=31-Nn(n)]=t}function En(e,n){var t=e.pendingLanes&~n;e.pendingLanes=n,e.suspendedLanes=0,e.pingedLanes=0,e.expiredLanes&=n,e.mutableReadLanes&=n,e.entangledLanes&=n,n=e.entanglements;var r=e.eventTimes;for(e=e.expirationTimes;0Xn||(e.current=qn[Xn],qn[Xn]=null,Xn--)}function Kn(e,n){qn[++Xn]=e.current,e.current=n}var Jn={},Zn=$n(Jn),et=$n(!1),nt=Jn;function tt(e,n){var t=e.type.contextTypes;if(!t)return Jn;var r=e.stateNode;if(r&&r.__reactInternalMemoizedUnmaskedChildContext===n)return r.__reactInternalMemoizedMaskedChildContext;var l,a={};for(l in t)a[l]=n[l];return r&&((e=e.stateNode).__reactInternalMemoizedUnmaskedChildContext=n,e.__reactInternalMemoizedMaskedChildContext=a),a}function rt(e){return null!==(e=e.childContextTypes)&&void 0!==e}function lt(){Gn(et),Gn(Zn)}function at(e,n,t){if(Zn.current!==Jn)throw Error("Unexpected context found on stack. This error is likely caused by a bug in React. Please file an issue.");Kn(Zn,n),Kn(et,t)}function it(e,t,r){var l=e.stateNode;if(t=t.childContextTypes,"function"!=typeof l.getChildContext)return r;for(var a in l=l.getChildContext())if(!(a in t))throw Error((Je(e)||"Unknown")+'.getChildContext(): key "'+a+'" is not defined in childContextTypes.');return n(i[2])({},r,l)}function ut(e){return e=(e=e.stateNode)&&e.__reactInternalMemoizedMergedChildContext||Jn,nt=Zn.current,Kn(Zn,e),Kn(et,et.current),!0}function ot(e,n,t){var r=e.stateNode;if(!r)throw Error("Expected to have an instance by this point. This error is likely caused by a bug in React. Please file an issue.");t?(e=it(e,n,nt),r.__reactInternalMemoizedMergedChildContext=e,Gn(et),Gn(Zn),Kn(Zn,e)):Gn(et),Kn(et,t)}var st=null,ct=!1,dt=!1;function ft(){if(!dt&&null!==st){dt=!0;var e=0,t=Ln;try{var r=st;for(Ln=1;eg?(m=h,h=null):m=h.sibling;var v=f(l,h,u[g],o);if(null===v){null===h&&(h=m);break}e&&h&&null===v.alternate&&n(l,h),i=a(v,i,g),null===c?s=v:c.sibling=v,c=v,h=m}if(g===u.length)return t(l,h),s;if(null===h){for(;gg?(m=h,h=null):m=h.sibling;var b=f(l,h,v.value,o);if(null===b){null===h&&(h=m);break}e&&h&&null===b.alternate&&n(l,h),i=a(b,i,g),null===c?s=b:c.sibling=b,c=b,h=m}if(v.done)return t(l,h),s;if(null===h){for(;!v.done;g++,v=u.next())null!==(v=d(l,v.value,o))&&(i=a(v,i,g),null===c?s=v:c.sibling=v,c=v);return s}for(h=r(l,h);!v.done;g++,v=u.next())null!==(v=p(h,l,g,v.value,o))&&(e&&null!==v.alternate&&h.delete(null===v.key?g:v.key),i=a(v,i,g),null===c?s=v:c.sibling=v,c=v);return e&&h.forEach(function(e){return n(l,e)}),s}return function(e,r,a,u){var o="object"==typeof a&&null!==a&&a.type===Ue&&null===a.key;if(o&&(a=a.props.children),"object"==typeof a&&null!==a){switch(a.$$typeof){case Ie:e:{var s=a.key;for(o=r;null!==o;){if(o.key===s){if((s=a.type)===Ue){if(7===o.tag){t(e,o.sibling),(r=l(o,a.props.children)).return=e,e=r;break e}}else if(o.elementType===s){t(e,o.sibling),(r=l(o,a.props)).ref=Wt(e,o,a),r.return=e,e=r;break e}t(e,o);break}n(e,o),o=o.sibling}a.type===Ue?((r=Ka(a.props.children,e.mode,u,a.key)).return=e,e=r):((u=Ga(a.type,a.key,a.props,null,e.mode,u)).ref=Wt(e,r,a),u.return=e,e=u)}return i(e);case Le:e:{for(o=a.key;null!==r;){if(r.key===o){if(4===r.tag&&r.stateNode.containerInfo===a.containerInfo&&r.stateNode.implementation===a.implementation){t(e,r.sibling),(r=l(r,a.children||[])).return=e,e=r;break e}t(e,r);break}n(e,r),r=r.sibling}(r=ei(a,e.mode,u)).return=e,e=r}return i(e)}if(m(a))return h(e,r,a,u);if(Ge(a))return g(e,r,a,u);Yt(e,a)}if("string"==typeof a||"number"==typeof a)return a=""+a,null!==r&&6===r.tag?(t(e,r.sibling),(r=l(r,a)).return=e,e=r):(t(e,r),(r=Za(a,e.mode,u)).return=e,e=r),i(e);if(void 0===a&&!o)switch(e.tag){case 1:case 0:case 11:case 15:throw Error((Je(e)||"Component")+"(...): Nothing was returned from render. This usually means a return statement is missing. Or, to render nothing, return null.")}return t(e,r)}}var Xt=qt(!0),$t=qt(!1),Gt={},Kt=$n(Gt),Jt=$n(Gt),Zt=$n(Gt);function er(e){if(e===Gt)throw Error("Expected host context to exist. This error is likely caused by a bug in React. Please file an issue.");return e}function nr(e,n){Kn(Zt,n),Kn(Jt,e),Kn(Kt,Gt),Gn(Kt),Kn(Kt,{isInAParentText:!1})}function tr(){Gn(Kt),Gn(Jt),Gn(Zt)}function rr(e){er(Zt.current);var n=er(Kt.current),t=e.type;t="AndroidTextInput"===t||"RCTMultilineTextInputView"===t||"RCTSinglelineTextInputView"===t||"RCTText"===t||"RCTVirtualText"===t,n!==(t=n.isInAParentText!==t?{isInAParentText:t}:n)&&(Kn(Jt,e),Kn(Kt,t))}function lr(e){Jt.current===e&&(Gn(Kt),Gn(Jt))}var ar=$n(0);function ir(e){for(var n=e;null!==n;){if(13===n.tag){var t=n.memoizedState;if(null!==t&&(null===t.dehydrated||Mn()||Mn()))return n}else if(19===n.tag&&void 0!==n.memoizedProps.revealOrder){if(0!=(128&n.flags))return n}else if(null!==n.child){n.child.return=n,n=n.child;continue}if(n===e)break;for(;null===n.sibling;){if(null===n.return||n.return===e)return null;n=n.return}n.sibling.return=n.return,n=n.sibling}return null}var ur=[];function or(){for(var e=0;ea))throw Error("Too many re-renders. React limits the number of renders to prevent an infinite loop.");a+=1,hr=pr=null,n.updateQueue=null,sr.current=Yr,e=t(r,l)}while(mr)}if(sr.current=Br,n=null!==pr&&null!==pr.next,dr=0,hr=pr=fr=null,gr=!1,n)throw Error("Rendered fewer hooks than expected. This may be caused by an accidental early return statement.");return e}function Sr(){var e={memoizedState:null,baseState:null,baseQueue:null,queue:null,next:null};return null===hr?fr.memoizedState=hr=e:hr=hr.next=e,hr}function kr(){if(null===pr){var e=fr.alternate;e=null!==e?e.memoizedState:null}else e=pr.next;var n=null===hr?fr.memoizedState:hr.next;if(null!==n)hr=n,pr=e;else{if(null===e)throw Error("Rendered more hooks than during the previous render.");e={memoizedState:(pr=e).memoizedState,baseState:pr.baseState,baseQueue:pr.baseQueue,queue:pr.queue,next:null},null===hr?fr.memoizedState=hr=e:hr=hr.next=e}return hr}function wr(e,n){return"function"==typeof n?n(e):n}function _r(e){var n=kr(),t=n.queue;if(null===t)throw Error("Should have a queue. This is likely a bug in React. Please file an issue.");t.lastRenderedReducer=e;var r=pr,l=r.baseQueue,a=t.pending;if(null!==a){if(null!==l){var i=l.next;l.next=a.next,a.next=i}r.baseQueue=l=a,t.pending=null}if(null!==l){a=l.next,r=r.baseState;var u=i=null,o=null,s=a;do{var c=s.lane;if((dr&c)===c)null!==o&&(o=o.next={lane:0,action:s.action,eagerReducer:s.eagerReducer,eagerState:s.eagerState,next:null}),r=s.eagerReducer===e?s.eagerState:e(r,s.action);else{var d={lane:c,action:s.action,eagerReducer:s.eagerReducer,eagerState:s.eagerState,next:null};null===o?(u=o=d,i=r):o=o.next=d,fr.lanes|=c,ia|=c}s=s.next}while(null!==s&&s!==a);null===o?i=r:o.next=u,ht(r,n.memoizedState)||(Zr=!0),n.memoizedState=r,n.baseState=i,n.baseQueue=o,t.lastRenderedState=r}if(null!==(e=t.interleaved)){l=e;do{a=l.lane,fr.lanes|=a,ia|=a,l=l.next}while(l!==e)}else null===l&&(t.lanes=0);return[n.memoizedState,t.dispatch]}function Tr(e){var n=kr(),t=n.queue;if(null===t)throw Error("Should have a queue. This is likely a bug in React. Please file an issue.");t.lastRenderedReducer=e;var r=t.dispatch,l=t.pending,a=n.memoizedState;if(null!==l){t.pending=null;var i=l=l.next;do{a=e(a,i.action),i=i.next}while(i!==l);ht(a,n.memoizedState)||(Zr=!0),n.memoizedState=a,null===n.baseQueue&&(n.baseState=a),t.lastRenderedState=a}return[a,r]}function xr(e,n,t){var r=n._getVersion;r=r(n._source);var l=n._workInProgressVersionPrimary;if(null!==l?e=l===r:(e=e.mutableReadLanes,(e=(dr&e)===e)&&(n._workInProgressVersionPrimary=r,ur.push(n))),e)return t(n._source);throw ur.push(n),Error("Cannot read from mutable source during the current render without tearing. This may be a bug in React. Please file an issue.")}function Pr(e,n,t,r){var l=Zl;if(null===l)throw Error("Expected a work-in-progress root. This is a bug in React. Please file an issue.");var a=n._getVersion,i=a(n._source),u=sr.current,o=u.useState(function(){return xr(l,n,t)}),s=o[1],c=o[0];o=hr;var d=e.memoizedState,f=d.refs,p=f.getSnapshot,h=d.source;d=d.subscribe;var g=fr;return e.memoizedState={refs:f,source:n,subscribe:r},u.useEffect(function(){f.getSnapshot=t,f.setSnapshot=s;var e=a(n._source);ht(i,e)||(e=t(n._source),ht(c,e)||(s(e),e=wa(g),l.mutableReadLanes|=e&l.pendingLanes),Cn(l,l.mutableReadLanes))},[t,n,r]),u.useEffect(function(){return r(n._source,function(){var e=f.getSnapshot,t=f.setSnapshot;try{t(e(n._source));var r=wa(g);l.mutableReadLanes|=r&l.pendingLanes}catch(e){t(function(){throw e})}})},[n,r]),ht(p,t)&&ht(h,n)&&ht(d,r)||((e={pending:null,interleaved:null,lanes:0,dispatch:null,lastRenderedReducer:wr,lastRenderedState:c}).dispatch=s=jr.bind(null,fr,e),o.queue=e,o.baseQueue=null,c=xr(l,n,t),o.memoizedState=o.baseState=c),c}function Rr(e,n,t){return Pr(kr(),e,n,t)}function Er(e){var n=Sr();return"function"==typeof e&&(e=e()),n.memoizedState=n.baseState=e,e=(e=n.queue={pending:null,interleaved:null,lanes:0,dispatch:null,lastRenderedReducer:wr,lastRenderedState:e}).dispatch=jr.bind(null,fr,e),[n.memoizedState,e]}function Cr(e,n,t,r){return e={tag:e,create:n,destroy:t,deps:r,next:null},null===(n=fr.updateQueue)?(n={lastEffect:null},fr.updateQueue=n,n.lastEffect=e.next=e):null===(t=n.lastEffect)?n.lastEffect=e.next=e:(r=t.next,t.next=e,e.next=r,n.lastEffect=e),e}function Nr(){return kr().memoizedState}function zr(e,n,t,r){var l=Sr();fr.flags|=e,l.memoizedState=Cr(1|n,t,void 0,void 0===r?null:r)}function Ir(e,n,t,r){var l=kr();r=void 0===r?null:r;var a=void 0;if(null!==pr){var i=pr.memoizedState;if(a=i.destroy,null!==r&&br(r,i.deps))return void(l.memoizedState=Cr(n,t,a,r))}fr.flags|=e,l.memoizedState=Cr(1|n,t,a,r)}function Lr(e,n){return zr(1049600,4,e,n)}function Ur(e,n){return Ir(1024,4,e,n)}function Mr(e,n){return Ir(4,2,e,n)}function Fr(e,n){return"function"==typeof n?(e=e(),n(e),function(){n(null)}):null!==n&&void 0!==n?(e=e(),n.current=e,function(){n.current=null}):void 0}function Dr(e,n,t){return t=null!==t&&void 0!==t?t.concat([e]):null,Ir(4,2,Fr.bind(null,n,e),t)}function Ar(){}function Qr(e,n){var t=kr();n=void 0===n?null:n;var r=t.memoizedState;return null!==r&&null!==n&&br(n,r[1])?r[0]:(t.memoizedState=[e,n],e)}function Hr(e,n){var t=kr();n=void 0===n?null:n;var r=t.memoizedState;return null!==r&&null!==n&&br(n,r[1])?r[0]:(e=e(),t.memoizedState=[e,n],e)}function Or(e,n){var t=Ln;Ln=0!==t&&4>t?t:4,e(!0);var r=cr.transition;cr.transition=1;try{e(!1),n()}finally{Ln=t,cr.transition=r}}function jr(e,n,t){var r=ka(),l=wa(e),a={lane:l,action:t,eagerReducer:null,eagerState:null,next:null},i=e.alternate;if(e===fr||null!==i&&i===fr)mr=gr=!0,null===(l=n.pending)?a.next=a:(a.next=l.next,l.next=a),n.pending=a;else{if(null!==Zl&&0!=(1&e.mode)&&0==(8&Jl)){var u=n.interleaved;null===u?(a.next=a,null===Et?Et=[n]:Et.push(n)):(a.next=u.next,u.next=a),n.interleaved=a}else null===(u=n.pending)?a.next=a:(a.next=u.next,u.next=a),n.pending=a;if(0===e.lanes&&(null===i||0===i.lanes)&&null!==(i=n.lastRenderedReducer))try{var o=n.lastRenderedState,s=i(o,t);if(a.eagerReducer=i,a.eagerState=s,ht(s,o))return}catch(e){}a=_a(e,l,r),0!=(4194240&l)&&null!==a&&(e=n.lanes,l|=e&=a.pendingLanes,n.lanes=l,Cn(a,l))}}var Br={readContext:Rt,useCallback:vr,useContext:vr,useEffect:vr,useImperativeHandle:vr,useLayoutEffect:vr,useMemo:vr,useReducer:vr,useRef:vr,useState:vr,useDebugValue:vr,useDeferredValue:vr,useTransition:vr,useMutableSource:vr,useOpaqueIdentifier:vr,unstable_isNewReconciler:!1},Vr={readContext:Rt,useCallback:function(e,n){return Sr().memoizedState=[e,void 0===n?null:n],e},useContext:Rt,useEffect:Lr,useImperativeHandle:function(e,n,t){return t=null!==t&&void 0!==t?t.concat([e]):null,zr(4,2,Fr.bind(null,n,e),t)},useLayoutEffect:function(e,n){return zr(4,2,e,n)},useMemo:function(e,n){var t=Sr();return n=void 0===n?null:n,e=e(),t.memoizedState=[e,n],e},useReducer:function(e,n,t){var r=Sr();return n=void 0!==t?t(n):n,r.memoizedState=r.baseState=n,e=(e=r.queue={pending:null,interleaved:null,lanes:0,dispatch:null,lastRenderedReducer:e,lastRenderedState:n}).dispatch=jr.bind(null,fr,e),[r.memoizedState,e]},useRef:function(e){return e={current:e},Sr().memoizedState=e},useState:Er,useDebugValue:Ar,useDeferredValue:function(e){var n=Er(e),t=n[0],r=n[1];return Lr(function(){var n=cr.transition;cr.transition=1;try{r(e)}finally{cr.transition=n}},[e]),t},useTransition:function(){var e=Er(!1),n=e[0];return e=Or.bind(null,e[1]),Sr().memoizedState=e,[n,e]},useMutableSource:function(e,n,t){var r=Sr();return r.memoizedState={refs:{getSnapshot:n,setSnapshot:null},source:e,subscribe:t},Pr(r,e,n,t)},useOpaqueIdentifier:function(){throw Error("Not yet implemented")},unstable_isNewReconciler:!1},Wr={readContext:Rt,useCallback:Qr,useContext:Rt,useEffect:Ur,useImperativeHandle:Dr,useLayoutEffect:Mr,useMemo:Hr,useReducer:_r,useRef:Nr,useState:function(){return _r(wr)},useDebugValue:Ar,useDeferredValue:function(e){var n=_r(wr),t=n[0],r=n[1];return Ur(function(){var n=cr.transition;cr.transition=1;try{r(e)}finally{cr.transition=n}},[e]),t},useTransition:function(){return[_r(wr)[0],kr().memoizedState]},useMutableSource:Rr,useOpaqueIdentifier:function(){return _r(wr)[0]},unstable_isNewReconciler:!1},Yr={readContext:Rt,useCallback:Qr,useContext:Rt,useEffect:Ur,useImperativeHandle:Dr,useLayoutEffect:Mr,useMemo:Hr,useReducer:Tr,useRef:Nr,useState:function(){return Tr(wr)},useDebugValue:Ar,useDeferredValue:function(e){var n=Tr(wr),t=n[0],r=n[1];return Ur(function(){var n=cr.transition;cr.transition=1;try{r(e)}finally{cr.transition=n}},[e]),t},useTransition:function(){return[Tr(wr)[0],kr().memoizedState]},useMutableSource:Rr,useOpaqueIdentifier:function(){return Tr(wr)[0]},unstable_isNewReconciler:!1};function qr(e,n){return{value:e,source:n,stack:vt(n)}}if("function"!=typeof n(i[3]).ReactFiberErrorDialog.showErrorDialog)throw Error("Expected ReactFiberErrorDialog.showErrorDialog to be a function.");function Xr(e,t){try{!1!==n(i[3]).ReactFiberErrorDialog.showErrorDialog({componentStack:null!==t.stack?t.stack:"",error:t.value,errorBoundary:null!==e&&1===e.tag?e.stateNode:null})&&console.error(t.value)}catch(e){setTimeout(function(){throw e})}}var $r="function"==typeof WeakMap?WeakMap:Map;function Gr(e,n,t){(t=It(-1,t)).tag=3,t.payload={element:null};var r=n.value;return t.callback=function(){da||(da=!0,fa=r),Xr(e,n)},t}function Kr(e,n,t){(t=It(-1,t)).tag=3;var r=e.type.getDerivedStateFromError;if("function"==typeof r){var l=n.value;t.payload=function(){return Xr(e,n),r(l)}}var a=e.stateNode;return null!==a&&"function"==typeof a.componentDidCatch&&(t.callback=function(){"function"!=typeof r&&(null===pa?pa=new Set([this]):pa.add(this),Xr(e,n));var t=n.stack;this.componentDidCatch(n.value,{componentStack:null!==t?t:""})}),t}var Jr=ze.ReactCurrentOwner,Zr=!1;function el(e,n,t,r){n.child=null===e?$t(n,null,t,r):Xt(n,e.child,t,r)}function nl(e,n,t,r,l){t=t.render;var a=n.ref;return Pt(n,l),r=yr(e,n,t,r,a,l),null===e||Zr?(n.flags|=1,el(e,n,r,l),n.child):(n.updateQueue=e.updateQueue,n.flags&=-1029,e.lanes&=~l,_l(e,n,l))}function tl(e,n,t,r,l,a){if(null===e){var i=t.type;return"function"!=typeof i||qa(i)||void 0!==i.defaultProps||null!==t.compare||void 0!==t.defaultProps?((e=Ga(t.type,null,r,n,n.mode,a)).ref=n.ref,e.return=n,n.child=e):(n.tag=15,n.type=i,rl(e,n,i,r,l,a))}return i=e.child,0==(l&a)&&(l=i.memoizedProps,(t=null!==(t=t.compare)?t:gt)(l,r)&&e.ref===n.ref)?_l(e,n,a):(n.flags|=1,(e=$a(i,r)).ref=n.ref,e.return=n,n.child=e)}function rl(e,n,t,r,l,a){if(null!==e&>(e.memoizedProps,r)&&e.ref===n.ref){if(Zr=!1,0==(a&l))return n.lanes=e.lanes,_l(e,n,a);0!=(32768&e.flags)&&(Zr=!0)}return il(e,n,t,r,a)}function ll(e,n,t){var r=n.pendingProps,l=r.children,a=null!==e?e.memoizedState:null;if("hidden"===r.mode||"unstable-defer-without-hiding"===r.mode)if(0==(1&n.mode))n.memoizedState={baseLanes:0,cachePool:null},Kn(ra,ta),ta|=t;else{if(0==(1073741824&t))return e=null!==a?a.baseLanes|t:t,n.lanes=n.childLanes=1073741824,n.memoizedState={baseLanes:e,cachePool:null},n.updateQueue=null,Kn(ra,ta),ta|=e,null;n.memoizedState={baseLanes:0,cachePool:null},r=null!==a?a.baseLanes:t,Kn(ra,ta),ta|=r}else null!==a?(r=a.baseLanes|t,n.memoizedState=null):r=t,Kn(ra,ta),ta|=r;return el(e,n,l,t),n.child}function al(e,n){var t=n.ref;(null===e&&null!==t||null!==e&&e.ref!==t)&&(n.flags|=256)}function il(e,n,t,r,l){var a=rt(t)?nt:Zn.current;return a=tt(n,a),Pt(n,l),t=yr(e,n,t,r,a,l),null===e||Zr?(n.flags|=1,el(e,n,t,l),n.child):(n.updateQueue=e.updateQueue,n.flags&=-1029,e.lanes&=~l,_l(e,n,l))}function ul(e,n,t,r,l){if(rt(t)){var a=!0;ut(n)}else a=!1;if(Pt(n,l),null===n.stateNode)null!==e&&(e.alternate=null,n.alternate=null,n.flags|=2),jt(n,t,r),Vt(n,t,r,l),r=!0;else if(null===e){var i=n.stateNode,u=n.memoizedProps;i.props=u;var o=i.context,s=t.contextType;"object"==typeof s&&null!==s?s=Rt(s):s=tt(n,s=rt(t)?nt:Zn.current);var c=t.getDerivedStateFromProps,d="function"==typeof c||"function"==typeof i.getSnapshotBeforeUpdate;d||"function"!=typeof i.UNSAFE_componentWillReceiveProps&&"function"!=typeof i.componentWillReceiveProps||(u!==r||o!==s)&&Bt(n,i,r,s),Ct=!1;var f=n.memoizedState;i.state=f,Ft(n,r,i,l),o=n.memoizedState,u!==r||f!==o||et.current||Ct?("function"==typeof c&&(Qt(n,t,c,r),o=n.memoizedState),(u=Ct||Ot(n,t,u,r,f,o,s))?(d||"function"!=typeof i.UNSAFE_componentWillMount&&"function"!=typeof i.componentWillMount||("function"==typeof i.componentWillMount&&i.componentWillMount(),"function"==typeof i.UNSAFE_componentWillMount&&i.UNSAFE_componentWillMount()),"function"==typeof i.componentDidMount&&(n.flags|=4)):("function"==typeof i.componentDidMount&&(n.flags|=4),n.memoizedProps=r,n.memoizedState=o),i.props=r,i.state=o,i.context=s,r=u):("function"==typeof i.componentDidMount&&(n.flags|=4),r=!1)}else{i=n.stateNode,zt(e,n),u=n.memoizedProps,s=n.type===n.elementType?u:bt(n.type,u),i.props=s,d=n.pendingProps,f=i.context,"object"==typeof(o=t.contextType)&&null!==o?o=Rt(o):o=tt(n,o=rt(t)?nt:Zn.current);var p=t.getDerivedStateFromProps;(c="function"==typeof p||"function"==typeof i.getSnapshotBeforeUpdate)||"function"!=typeof i.UNSAFE_componentWillReceiveProps&&"function"!=typeof i.componentWillReceiveProps||(u!==d||f!==o)&&Bt(n,i,r,o),Ct=!1,f=n.memoizedState,i.state=f,Ft(n,r,i,l);var h=n.memoizedState;u!==d||f!==h||et.current||Ct?("function"==typeof p&&(Qt(n,t,p,r),h=n.memoizedState),(s=Ct||Ot(n,t,s,r,f,h,o)||!1)?(c||"function"!=typeof i.UNSAFE_componentWillUpdate&&"function"!=typeof i.componentWillUpdate||("function"==typeof i.componentWillUpdate&&i.componentWillUpdate(r,h,o),"function"==typeof i.UNSAFE_componentWillUpdate&&i.UNSAFE_componentWillUpdate(r,h,o)),"function"==typeof i.componentDidUpdate&&(n.flags|=4),"function"==typeof i.getSnapshotBeforeUpdate&&(n.flags|=512)):("function"!=typeof i.componentDidUpdate||u===e.memoizedProps&&f===e.memoizedState||(n.flags|=4),"function"!=typeof i.getSnapshotBeforeUpdate||u===e.memoizedProps&&f===e.memoizedState||(n.flags|=512),n.memoizedProps=r,n.memoizedState=h),i.props=r,i.state=h,i.context=o,r=s):("function"!=typeof i.componentDidUpdate||u===e.memoizedProps&&f===e.memoizedState||(n.flags|=4),"function"!=typeof i.getSnapshotBeforeUpdate||u===e.memoizedProps&&f===e.memoizedState||(n.flags|=512),r=!1)}return ol(e,n,t,r,a,l)}function ol(e,n,t,r,l,a){al(e,n);var i=0!=(128&n.flags);if(!r&&!i)return l&&ot(n,t,!1),_l(e,n,a);r=n.stateNode,Jr.current=n;var u=i&&"function"!=typeof t.getDerivedStateFromError?null:r.render();return n.flags|=1,null!==e&&i?(n.child=Xt(n,e.child,null,a),n.child=Xt(n,null,u,a)):el(e,n,u,a),n.memoizedState=r.state,l&&ot(n,t,!0),n.child}function sl(e){var n=e.stateNode;n.pendingContext?at(0,n.pendingContext,n.pendingContext!==n.context):n.context&&at(0,n.context,!1),nr(e,n.containerInfo)}var cl,dl,fl,pl,hl={dehydrated:null,retryLane:0};function gl(e){return{baseLanes:e,cachePool:null}}function ml(e,n,t){var r,l=n.pendingProps,a=ar.current,i=!1;return(r=0!=(128&n.flags))||(r=(null===e||null!==e.memoizedState)&&0!=(2&a)),r?(i=!0,n.flags&=-129):null!==e&&null===e.memoizedState||void 0===l.fallback||!0===l.unstable_avoidThisFallback||(a|=1),Kn(ar,1&a),null===e?(e=l.children,a=l.fallback,i?(e=vl(n,e,a,t),n.child.memoizedState=gl(t),n.memoizedState=hl,e):"number"==typeof l.unstable_expectedLoadTime?(e=vl(n,e,a,t),n.child.memoizedState=gl(t),n.memoizedState=hl,n.lanes=4194304,e):((t=Ja({mode:"visible",children:e},n.mode,t,null)).return=n,n.child=t)):(e.memoizedState,i?(l=yl(e,n,l.children,l.fallback,t),i=n.child,a=e.child.memoizedState,i.memoizedState=null===a?gl(t):{baseLanes:a.baseLanes|t,cachePool:null},i.childLanes=e.childLanes&~t,n.memoizedState=hl,l):(t=bl(e,n,l.children,t),n.memoizedState=null,t))}function vl(e,n,t,r){var l=e.mode,a=e.child;return n={mode:"hidden",children:n},0==(1&l)&&null!==a?(a.childLanes=0,a.pendingProps=n):a=Ja(n,l,0,null),t=Ka(t,l,r,null),a.return=e,t.return=e,a.sibling=t,e.child=a,t}function bl(e,n,t,r){var l=e.child;return e=l.sibling,t=$a(l,{mode:"visible",children:t}),0==(1&n.mode)&&(t.lanes=r),t.return=n,t.sibling=null,null!==e&&(null===(r=n.deletions)?(n.deletions=[e],n.flags|=16):r.push(e)),n.child=t}function yl(e,n,t,r,l){var a=n.mode,i=(e=e.child).sibling,u={mode:"hidden",children:t};return 0==(1&a)&&n.child!==e?((t=n.child).childLanes=0,t.pendingProps=u,n.deletions=null):(t=$a(e,u)).subtreeFlags=1835008&e.subtreeFlags,null!==i?r=$a(i,r):(r=Ka(r,a,l,null)).flags|=2,r.return=n,t.return=n,t.sibling=r,n.child=t,r}function Sl(e,n){e.lanes|=n;var t=e.alternate;null!==t&&(t.lanes|=n),xt(e.return,n)}function kl(e,n,t,r,l){var a=e.memoizedState;null===a?e.memoizedState={isBackwards:n,rendering:null,renderingStartTime:0,last:r,tail:t,tailMode:l}:(a.isBackwards=n,a.rendering=null,a.renderingStartTime=0,a.last=r,a.tail=t,a.tailMode=l)}function wl(e,n,t){var r=n.pendingProps,l=r.revealOrder,a=r.tail;if(el(e,n,r.children,t),0!=(2&(r=ar.current)))r=1&r|2,n.flags|=128;else{if(null!==e&&0!=(128&e.flags))e:for(e=n.child;null!==e;){if(13===e.tag)null!==e.memoizedState&&Sl(e,t);else if(19===e.tag)Sl(e,t);else if(null!==e.child){e.child.return=e,e=e.child;continue}if(e===n)break e;for(;null===e.sibling;){if(null===e.return||e.return===n)break e;e=e.return}e.sibling.return=e.return,e=e.sibling}r&=1}if(Kn(ar,r),0==(1&n.mode))n.memoizedState=null;else switch(l){case"forwards":for(t=n.child,l=null;null!==t;)null!==(e=t.alternate)&&null===ir(e)&&(l=t),t=t.sibling;null===(t=l)?(l=n.child,n.child=null):(l=t.sibling,t.sibling=null),kl(n,!1,l,t,a);break;case"backwards":for(t=null,l=n.child,n.child=null;null!==l;){if(null!==(e=l.alternate)&&null===ir(e)){n.child=l;break}e=l.sibling,l.sibling=t,t=l,l=e}kl(n,!0,t,null,a);break;case"together":kl(n,!1,null,null,void 0);break;default:n.memoizedState=null}return n.child}function _l(e,n,t){if(null!==e&&(n.dependencies=e.dependencies),ia|=n.lanes,0==(t&n.childLanes))return null;if(null!==e&&n.child!==e.child)throw Error("Resuming work not yet implemented.");if(null!==n.child){for(t=$a(e=n.child,e.pendingProps),n.child=t,t.return=n;null!==e.sibling;)e=e.sibling,(t=t.sibling=$a(e,e.pendingProps)).return=n;t.sibling=null}return n.child}function Tl(e,n){switch(e.tailMode){case"hidden":n=e.tail;for(var t=null;null!==n;)null!==n.alternate&&(t=n),n=n.sibling;null===t?e.tail=null:t.sibling=null;break;case"collapsed":t=e.tail;for(var r=null;null!==t;)null!==t.alternate&&(r=t),t=t.sibling;null===r?n||null===e.tail?e.tail=null:e.tail.sibling=null:r.sibling=null}}function xl(e){var n=null!==e.alternate&&e.alternate.child===e.child,t=0,r=0;if(n)for(var l=e.child;null!==l;)t|=l.lanes|l.childLanes,r|=1835008&l.subtreeFlags,r|=1835008&l.flags,l.return=e,l=l.sibling;else for(l=e.child;null!==l;)t|=l.lanes|l.childLanes,r|=l.subtreeFlags,r|=l.flags,l.return=e,l=l.sibling;return e.subtreeFlags|=r,e.childLanes=t,n}function Pl(e,t,r){var l=t.pendingProps;switch(t.tag){case 2:case 16:case 15:case 0:case 11:case 7:case 8:case 12:case 9:case 14:return xl(t),null;case 1:return rt(t.type)&<(),xl(t),null;case 3:return l=t.stateNode,tr(),Gn(et),Gn(Zn),or(),l.pendingContext&&(l.context=l.pendingContext,l.pendingContext=null),null!==e&&null!==e.child||l.hydrate||(t.flags|=512),dl(e,t),xl(t),null;case 5:lr(t),r=er(Zt.current);var a=t.type;if(null!==e&&null!=t.stateNode)fl(e,t,a,l,r),e.ref!==t.ref&&(t.flags|=256);else{if(!l){if(null===t.stateNode)throw Error("We must have new props for new mounts. This error is likely caused by a bug in React. Please file an issue.");return xl(t),null}er(Kt.current),e=Qn(),a=Fn(a);var u=hn(null,ln,l,a.validAttributes);n(i[3]).UIManager.createView(e,a.uiViewClassName,r,u),r=new mn(e,a,t),ke.set(e,t),we.set(e,l),cl(r,t,!1,!1),t.stateNode=r,On(r)&&(t.flags|=4),null!==t.ref&&(t.flags|=256)}return xl(t),null;case 6:if(e&&null!=t.stateNode)pl(e,t,e.memoizedProps,l);else{if("string"!=typeof l&&null===t.stateNode)throw Error("We must have new props for new mounts. This error is likely caused by a bug in React. Please file an issue.");if(e=er(Zt.current),!er(Kt.current).isInAParentText)throw Error("Text strings must be rendered within a component.");r=Qn(),n(i[3]).UIManager.createView(r,"RCTRawText",e,{text:l}),ke.set(r,t),t.stateNode=r}return xl(t),null;case 13:return Gn(ar),l=t.memoizedState,0!=(128&t.flags)?(t.lanes=r,t):(l=null!==l,r=!1,null!==e&&(r=null!==e.memoizedState),l&&!r&&0!=(1&t.mode)&&(null===e&&!0!==t.memoizedProps.unstable_avoidThisFallback||0!=(1&ar.current)?0===la&&(la=3):(0!==la&&3!==la||(la=4),null===Zl||0==(268435455&ia)&&0==(268435455&ua)||Ra(Zl,na))),(l||r)&&(t.flags|=4),xl(t),null);case 4:return tr(),dl(e,t),xl(t),null;case 10:return Tt(t.type._context),xl(t),null;case 17:return rt(t.type)&<(),xl(t),null;case 19:if(Gn(ar),null===(a=t.memoizedState))return xl(t),null;if(l=0!=(128&t.flags),null===(u=a.rendering))if(l)Tl(a,!1);else{if(0!==la||null!==e&&0!=(128&e.flags))for(e=t.child;null!==e;){if(null!==(u=ir(e))){for(t.flags|=128,Tl(a,!1),null!==(e=u.updateQueue)&&(t.updateQueue=e,t.flags|=4),t.subtreeFlags=0,e=r,l=t.child;null!==l;)a=e,(r=l).flags&=1835010,null===(u=r.alternate)?(r.childLanes=0,r.lanes=a,r.child=null,r.subtreeFlags=0,r.memoizedProps=null,r.memoizedState=null,r.updateQueue=null,r.dependencies=null,r.stateNode=null):(r.childLanes=u.childLanes,r.lanes=u.lanes,r.child=u.child,r.subtreeFlags=0,r.deletions=null,r.memoizedProps=u.memoizedProps,r.memoizedState=u.memoizedState,r.updateQueue=u.updateQueue,r.type=u.type,a=u.dependencies,r.dependencies=null===a?null:{lanes:a.lanes,firstContext:a.firstContext}),l=l.sibling;return Kn(ar,1&ar.current|2),t.child}e=e.sibling}null!==a.tail&&n(i[4]).unstable_now()>ca&&(t.flags|=128,l=!0,Tl(a,!1),t.lanes=4194304)}else{if(!l)if(null!==(e=ir(u))){if(t.flags|=128,l=!0,null!==(e=e.updateQueue)&&(t.updateQueue=e,t.flags|=4),Tl(a,!0),null===a.tail&&"hidden"===a.tailMode&&!u.alternate)return xl(t),null}else 2*n(i[4]).unstable_now()-a.renderingStartTime>ca&&1073741824!==r&&(t.flags|=128,l=!0,Tl(a,!1),t.lanes=4194304);a.isBackwards?(u.sibling=t.child,t.child=u):(null!==(e=a.last)?e.sibling=u:t.child=u,a.last=u)}return null!==a.tail?(t=a.tail,a.rendering=t,a.tail=t.sibling,a.renderingStartTime=n(i[4]).unstable_now(),t.sibling=null,e=ar.current,Kn(ar,l?1&e|2:1&e),t):(xl(t),null);case 22:case 23:return Ca(),r=null!==t.memoizedState,null!==e&&null!==e.memoizedState!==r&&"unstable-defer-without-hiding"!==l.mode&&(t.flags|=4),r&&0==(1073741824&ta)&&0!=(1&t.mode)||xl(t),null}throw Error("Unknown unit of work tag ("+t.tag+"). This error is likely caused by a bug in React. Please file an issue.")}function Rl(e){switch(e.tag){case 1:rt(e.type)&<();var n=e.flags;return 16384&n?(e.flags=-16385&n|128,e):null;case 3:if(tr(),Gn(et),Gn(Zn),or(),0!=(128&(n=e.flags)))throw Error("The root failed to unmount after an error. This is likely a bug in React. Please file an issue.");return e.flags=-16385&n|128,e;case 5:return lr(e),null;case 13:return Gn(ar),16384&(n=e.flags)?(e.flags=-16385&n|128,e):null;case 19:return Gn(ar),null;case 4:return tr(),null;case 10:return Tt(e.type._context),null;case 22:case 23:return Ca(),null;case 24:default:return null}}cl=function(e,n){for(var t=n.child;null!==t;){if(5===t.tag||6===t.tag)e._children.push(t.stateNode);else if(4!==t.tag&&null!==t.child){t.child.return=t,t=t.child;continue}if(t===n)break;for(;null===t.sibling;){if(null===t.return||t.return===n)return;t=t.return}t.sibling.return=t.return,t=t.sibling}},dl=function(){},fl=function(e,n,t,r){e.memoizedProps!==r&&(er(Kt.current),n.updateQueue=Dn)&&(n.flags|=4)},pl=function(e,n,t,r){t!==r&&(n.flags|=4)};var El="function"==typeof WeakSet?WeakSet:Set,Cl=null;function Nl(e,n){var t=e.ref;if(null!==t)if("function"==typeof t)try{t(null)}catch(t){ja(e,n,t)}else t.current=null}var zl=!1;function Il(e,n){for(Cl=n;null!==Cl;)if(n=(e=Cl).child,0!=(516&e.subtreeFlags)&&null!==n)n.return=e,Cl=n;else for(;null!==Cl;){e=Cl;try{var t=e.alternate;if(0!=(512&e.flags))switch(e.tag){case 0:case 11:case 15:break;case 1:if(null!==t){var r=t.memoizedProps,l=t.memoizedState,a=e.stateNode,i=a.getSnapshotBeforeUpdate(e.elementType===e.type?r:bt(e.type,r),l);a.__reactInternalSnapshotBeforeUpdate=i}break;case 3:break;case 5:case 6:case 4:case 17:break;default:throw Error("This unit of work tag should not have side-effects. This error is likely caused by a bug in React. Please file an issue.")}}catch(n){ja(e,e.return,n)}if(null!==(n=e.sibling)){n.return=e.return,Cl=n;break}Cl=e.return}return t=zl,zl=!1,t}function Ll(e,n,t){var r=n.updateQueue;if(null!==(r=null!==r?r.lastEffect:null)){var l=r=r.next;do{if((l.tag&e)===e){var a=l.destroy;if(l.destroy=void 0,void 0!==a){var i=n,u=t;try{a()}catch(e){ja(i,u,e)}}}l=l.next}while(l!==r)}}function Ul(e,n){if(null!==(n=null!==(n=n.updateQueue)?n.lastEffect:null)){var t=n=n.next;do{if((t.tag&e)===e){var r=t.create;t.destroy=r()}t=t.next}while(t!==n)}}function Ml(e,t){for(var r=null,l=e;;){if(5===l.tag){if(null===r){r=l;var a=l.stateNode;if(t){var u=a.viewConfig,o=hn(null,ln,{style:{display:"none"}},u.validAttributes);n(i[3]).UIManager.updateView(a._nativeTag,u.uiViewClassName,o)}else{a=l.stateNode,o=l.memoizedProps,u=a.viewConfig,o=hn(null,n(i[2])({},o,{style:[o.style,{display:"none"}]}),o,u.validAttributes),n(i[3]).UIManager.updateView(a._nativeTag,u.uiViewClassName,o)}}}else if(6===l.tag){if(null===r)throw Error("Not yet implemented.")}else if((22!==l.tag&&23!==l.tag||null===l.memoizedState||l===e)&&null!==l.child){l.child.return=l,l=l.child;continue}if(l===e)break;for(;null===l.sibling;){if(null===l.return||l.return===e)return;r===l&&(r=null),l=l.return}r===l&&(r=null),l.sibling.return=l.return,l=l.sibling}}function Fl(e,n,t){if(bn&&"function"==typeof bn.onCommitFiberUnmount)try{bn.onCommitFiberUnmount(vn,n)}catch(e){}switch(n.tag){case 0:case 11:case 14:case 15:if(null!==(e=n.updateQueue)&&null!==(e=e.lastEffect)){var r=e=e.next;do{var l=r,a=l.destroy;if(l=l.tag,void 0!==a&&0!=(2&l)){l=n;var i=t;try{a()}catch(e){ja(l,i,e)}}r=r.next}while(r!==e)}break;case 1:if(Nl(n,t),"function"==typeof(e=n.stateNode).componentWillUnmount)try{e.props=n.memoizedProps,e.state=n.memoizedState,e.componentWillUnmount()}catch(e){ja(n,t,e)}break;case 5:Nl(n,t);break;case 4:jl(e,n,t)}}function Dl(e){var n=e.alternate;null!==n&&(e.alternate=null,Dl(n)),e.child=null,e.deletions=null,e.sibling=null,e.stateNode=null,e.return=null,e.dependencies=null,e.memoizedProps=null,e.memoizedState=null,e.pendingProps=null,e.stateNode=null,e.updateQueue=null}function Al(e){return 5===e.tag||3===e.tag||4===e.tag}function Ql(e){e:{for(var n=e.return;null!==n;){if(Al(n))break e;n=n.return}throw Error("Expected to find a host parent. This error is likely caused by a bug in React. Please file an issue.")}var t=n;switch(n=t.stateNode,t.tag){case 5:var r=!1;break;case 3:case 4:n=n.containerInfo,r=!0;break;default:throw Error("Invalid host parent fiber. This error is likely caused by a bug in React. Please file an issue.")}32&t.flags&&(t.flags&=-33);e:n:for(t=e;;){for(;null===t.sibling;){if(null===t.return||Al(t.return)){t=null;break e}t=t.return}for(t.sibling.return=t.return,t=t.sibling;5!==t.tag&&6!==t.tag&&18!==t.tag;){if(2&t.flags)continue n;if(null===t.child||4===t.tag)continue n;t.child.return=t,t=t.child}if(!(2&t.flags)){t=t.stateNode;break e}}r?Hl(e,t,n):Ol(e,t,n)}function Hl(e,t,r){var l=e.tag;if(5===l||6===l)if(e=e.stateNode,t){if("number"==typeof r)throw Error("Container does not support insertBefore operation")}else n(i[3]).UIManager.setChildren(r,["number"==typeof e?e:e._nativeTag]);else if(4!==l&&null!==(e=e.child))for(Hl(e,t,r),e=e.sibling;null!==e;)Hl(e,t,r),e=e.sibling}function Ol(e,t,r){var l=e.tag;if(5===l||6===l)if(e=e.stateNode,t){var a=(l=r._children).indexOf(e);0<=a?(l.splice(a,1),t=l.indexOf(t),l.splice(t,0,e),n(i[3]).UIManager.manageChildren(r._nativeTag,[a],[t],[],[],[])):(t=l.indexOf(t),l.splice(t,0,e),n(i[3]).UIManager.manageChildren(r._nativeTag,[],[],["number"==typeof e?e:e._nativeTag],[t],[]))}else t="number"==typeof e?e:e._nativeTag,0<=(a=(l=r._children).indexOf(e))?(l.splice(a,1),l.push(e),n(i[3]).UIManager.manageChildren(r._nativeTag,[a],[l.length-1],[],[],[])):(l.push(e),n(i[3]).UIManager.manageChildren(r._nativeTag,[],[],[t],[l.length-1],[]));else if(4!==l&&null!==(e=e.child))for(Ol(e,t,r),e=e.sibling;null!==e;)Ol(e,t,r),e=e.sibling}function jl(e,t,r){for(var l,a,u=t,o=!1;;){if(!o){o=u.return;e:for(;;){if(null===o)throw Error("Expected to find a host parent. This error is likely caused by a bug in React. Please file an issue.");switch(l=o.stateNode,o.tag){case 5:a=!1;break e;case 3:case 4:l=l.containerInfo,a=!0;break e}o=o.return}o=!0}if(5===u.tag||6===u.tag){e:for(var s=e,c=u,d=r,f=c;;)if(Fl(s,f,d),null!==f.child&&4!==f.tag)f.child.return=f,f=f.child;else{if(f===c)break e;for(;null===f.sibling;){if(null===f.return||f.return===c)break e;f=f.return}f.sibling.return=f.return,f=f.sibling}a?(s=l,Hn(u.stateNode),n(i[3]).UIManager.manageChildren(s,[],[],[],[],[0])):(s=l,Hn(d=u.stateNode),d=(c=s._children).indexOf(d),c.splice(d,1),n(i[3]).UIManager.manageChildren(s._nativeTag,[],[],[],[],[d]))}else if(4===u.tag){if(null!==u.child){l=u.stateNode.containerInfo,a=!0,u.child.return=u,u=u.child;continue}}else if(Fl(e,u,r),null!==u.child){u.child.return=u,u=u.child;continue}if(u===t)break;for(;null===u.sibling;){if(null===u.return||u.return===t)return;4===(u=u.return).tag&&(o=!1)}u.sibling.return=u.return,u=u.sibling}}function Bl(e,t){switch(t.tag){case 0:case 11:case 14:case 15:return void Ll(3,t,t.return);case 1:return;case 5:var r=t.stateNode;if(null!=r){var l=t.memoizedProps;e=null!==e?e.memoizedProps:l;var a=t.updateQueue;t.updateQueue=null,null!==a&&(t=r.viewConfig,we.set(r._nativeTag,l),null!=(l=hn(null,e,l,t.validAttributes))&&n(i[3]).UIManager.updateView(r._nativeTag,t.uiViewClassName,l))}return;case 6:if(null===t.stateNode)throw Error("This should have a text node initialized. This error is likely caused by a bug in React. Please file an issue.");return void n(i[3]).UIManager.updateView(t.stateNode,"RCTRawText",{text:t.memoizedProps});case 3:case 12:return;case 13:return null!==t.memoizedState&&(sa=n(i[4]).unstable_now(),Ml(t.child,!0)),void Vl(t);case 19:return void Vl(t);case 17:return;case 22:case 23:return void Ml(t,null!==t.memoizedState)}throw Error("This unit of work tag should not have side-effects. This error is likely caused by a bug in React. Please file an issue.")}function Vl(e){var n=e.updateQueue;if(null!==n){e.updateQueue=null;var t=e.stateNode;null===t&&(t=e.stateNode=new El),n.forEach(function(n){var r=Va.bind(null,e,n);t.has(n)||(t.add(n),n.then(r,r))})}}function Wl(e,n){for(Cl=n;null!==Cl;){var t=(n=Cl).deletions;if(null!==t)for(var r=0;ra&&(a=o),l&=~u}if(l=a,10<(l=(120>(l=n(i[4]).unstable_now()-l)?120:480>l?480:1080>l?1080:1920>l?1920:3e3>l?3e3:4320>l?4320:1960*Xl(l/1960))-l)){e.timeoutHandle=jn(Aa.bind(null,e),l);break}Aa(e);break;case 5:Aa(e);break;default:throw Error("Unknown root exit status.")}}return xa(e,n(i[4]).unstable_now()),e.callbackNode===r?Pa.bind(null,e):null}function Ra(e,n){for(n&=~oa,n&=~ua,e.suspendedLanes|=n,e.pingedLanes&=~n,e=e.expirationTimes;0 component higher in the tree to provide a loading indicator or placeholder to display.")}5!==la&&(la=2),o=qr(o,u),p=i;do{switch(p.tag){case 3:a=o,p.flags|=16384,n&=-n,p.lanes|=n,Mt(p,Gr(p,a,n));break e;case 1:a=o;var w=p.type,_=p.stateNode;if(0==(128&p.flags)&&("function"==typeof w.getDerivedStateFromError||null!==_&&"function"==typeof _.componentDidCatch&&(null===pa||!pa.has(_)))){p.flags|=16384,n&=-n,p.lanes|=n,Mt(p,Kr(p,a,n));break e}}p=p.return}while(null!==p)}Da(t)}catch(e){n=e,ea===t&&null!==t&&(ea=t=t.return);continue}break}}function Ia(){var e=$l.current;return $l.current=Br,null===e?Br:e}function La(e,n){var t=Jl;Jl|=8;var r=Ia();for(Zl===e&&na===n||Na(e,n);;)try{Ua();break}catch(n){za(e,n)}if(_t(),Jl=t,$l.current=r,null!==ea)throw Error("Cannot commit an incomplete root. This error is likely caused by a bug in React. Please file an issue.");return Zl=null,na=0,la}function Ua(){for(;null!==ea;)Fa(ea)}function Ma(){for(;null!==ea&&!n(i[4]).unstable_shouldYield();)Fa(ea)}function Fa(e){var n=ql(e.alternate,e,ta);e.memoizedProps=e.pendingProps,null===n?Da(e):ea=n,Gl.current=null}function Da(e){var n=e;do{var t=n.alternate;if(e=n.return,0==(8192&n.flags)){if(null!==(t=Pl(t,n,ta)))return void(ea=t)}else{if(null!==(t=Rl(n)))return t.flags&=8191,void(ea=t);null!==e&&(e.flags|=8192,e.subtreeFlags=0,e.deletions=null)}if(null!==(n=n.sibling))return void(ea=n);ea=n=e}while(null!==n);0===la&&(la=5)}function Aa(e){var n=Ln,t=Kl.transition;try{Kl.transition=0,Ln=1,Qa(e,n)}finally{Kl.transition=t,Ln=n}return null}function Qa(e,t){do{Ha()}while(null!==ga);if(0!=(24&Jl))throw Error("Should not already be working.");var r=e.finishedWork,l=e.finishedLanes;if(null===r)return null;if(e.finishedWork=null,e.finishedLanes=0,r===e.current)throw Error("Cannot commit the same tree as before. This error is likely caused by a bug in React. Please file an issue.");e.callbackNode=null,e.callbackPriority=0;var a=r.lanes|r.childLanes;if(En(e,a),e===Zl&&(ea=Zl=null,na=0),0==(1040&r.subtreeFlags)&&0==(1040&r.flags)||ha||(ha=!0,n(i[4]).unstable_scheduleCallback(n(i[4]).unstable_NormalPriority,function(){return Ha(),null})),a=0!=(8054&r.flags),0!=(8054&r.subtreeFlags)||a){a=Kl.transition,Kl.transition=0;var u=Ln;Ln=1;var o=Jl;Jl|=16,Gl.current=null,Il(e,r),Wl(e,r),e.current=r,Yl(r),n(i[4]).unstable_requestPaint(),Jl=o,Ln=u,Kl.transition=a}else e.current=r;if(ha&&(ha=!1,ga=e,ma=l),0===(a=e.pendingLanes)&&(pa=null),0!=(1&a)?e===ba?va++:(va=0,ba=e):va=0,yn(r.stateNode),xa(e,n(i[4]).unstable_now()),da)throw da=!1,e=fa,fa=null,e;return 0!=(4&Jl)?null:(0!=(1&ma)&&0!==e.tag&&Ha(),ft(),null)}function Ha(){if(null!==ga){var e=Un(ma),n=Kl.transition,t=Ln;try{if(Kl.transition=0,Ln=16>e?16:e,null===ga)var r=!1;else{if(e=ga,ga=null,ma=0,0!=(24&Jl))throw Error("Cannot flush passive effects while already rendering.");var l=Jl;for(Jl|=16,Cl=e.current;null!==Cl;){var a=Cl,i=a.child;if(0!=(16&Cl.flags)){var u=a.deletions;if(null!==u){for(var o=0;on(i[4]).unstable_now()-sa?Na(e,0):oa|=r),xa(e,t)}function Va(e,n){var t=e.stateNode;null!==t&&t.delete(n),0===(n=0)&&(0==(1&e.mode)?n=1:(n=kn,0==(130023424&(kn<<=1))&&(kn=4194304))),t=ka(),null!==(e=Ta(e,n))&&(Rn(e,n,t),xa(e,t))}function Wa(e,n,t,r){this.tag=e,this.key=t,this.sibling=this.child=this.return=this.stateNode=this.type=this.elementType=null,this.index=0,this.ref=null,this.pendingProps=n,this.dependencies=this.memoizedState=this.updateQueue=this.memoizedProps=null,this.mode=r,this.subtreeFlags=this.flags=0,this.deletions=null,this.childLanes=this.lanes=0,this.alternate=null}function Ya(e,n,t,r){return new Wa(e,n,t,r)}function qa(e){return!(!(e=e.prototype)||!e.isReactComponent)}function Xa(e){if("function"==typeof e)return qa(e)?1:0;if(void 0!==e&&null!==e){if((e=e.$$typeof)===Qe)return 11;if(e===je)return 14}return 2}function $a(e,n){var t=e.alternate;return null===t?((t=Ya(e.tag,n,e.key,e.mode)).elementType=e.elementType,t.type=e.type,t.stateNode=e.stateNode,t.alternate=e,e.alternate=t):(t.pendingProps=n,t.type=e.type,t.flags=0,t.subtreeFlags=0,t.deletions=null),t.flags=1835008&e.flags,t.childLanes=e.childLanes,t.lanes=e.lanes,t.child=e.child,t.memoizedProps=e.memoizedProps,t.memoizedState=e.memoizedState,t.updateQueue=e.updateQueue,n=e.dependencies,t.dependencies=null===n?null:{lanes:n.lanes,firstContext:n.firstContext},t.sibling=e.sibling,t.index=e.index,t.ref=e.ref,t}function Ga(e,n,t,r,l,a){var i=2;if(r=e,"function"==typeof e)qa(e)&&(i=1);else if("string"==typeof e)i=5;else e:switch(e){case Ue:return Ka(t.children,l,a,n);case Ve:i=8,l|=4;break;case Me:i=8,l|=8;break;case Fe:return(e=Ya(12,t,n,2|l)).elementType=Fe,e.lanes=a,e;case He:return(e=Ya(13,t,n,l)).elementType=He,e.lanes=a,e;case Oe:return(e=Ya(19,t,n,l)).elementType=Oe,e.lanes=a,e;case We:return Ja(t,l,a,n);case Ye:return(e=Ya(23,t,n,l)).elementType=Ye,e.lanes=a,e;default:if("object"==typeof e&&null!==e)switch(e.$$typeof){case De:i=10;break e;case Ae:i=9;break e;case Qe:i=11;break e;case je:i=14;break e;case Be:i=16,r=null;break e}throw Error("Element type is invalid: expected a string (for built-in components) or a class/function (for composite components) but got: "+(null==e?e:typeof e)+".")}return(n=Ya(i,t,n,l)).elementType=e,n.type=r,n.lanes=a,n}function Ka(e,n,t,r){return(e=Ya(7,e,r,n)).lanes=t,e}function Ja(e,n,t,r){return(e=Ya(22,e,r,n)).elementType=We,e.lanes=t,e}function Za(e,n,t){return(e=Ya(6,e,null,n)).lanes=t,e}function ei(e,n,t){return(n=Ya(4,null!==e.children?e.children:[],e.key,n)).lanes=t,n.stateNode={containerInfo:e.containerInfo,pendingChildren:null,implementation:e.implementation},n}function ni(e,n,t){this.tag=n,this.containerInfo=e,this.finishedWork=this.pingCache=this.current=this.pendingChildren=null,this.timeoutHandle=-1,this.pendingContext=this.context=null,this.hydrate=t,this.callbackNode=null,this.callbackPriority=0,this.eventTimes=Pn(0),this.expirationTimes=Pn(-1),this.entangledLanes=this.finishedLanes=this.mutableReadLanes=this.expiredLanes=this.pingedLanes=this.suspendedLanes=this.pendingLanes=0,this.entanglements=Pn(0)}function ti(e,n,t){var r=3=t.length?{done:!0}:{done:!1,value:t[i++]}}}throw new TypeError("Invalid attempt to iterate non-iterable instance.\nIn order to be iterable, non-array objects must have a [Symbol.iterator]() method.")}function n(t,n){if(t){if("string"==typeof t)return o(t,n);var u=Object.prototype.toString.call(t).slice(8,-1);return"Object"===u&&t.constructor&&(u=t.constructor.name),"Map"===u||"Set"===u?Array.from(t):"Arguments"===u||/^(?:Ui|I)nt(?:8|16|32)(?:Clamped)?Array$/.test(u)?o(t,n):void 0}}function o(t,n){(null==n||n>t.length)&&(n=t.length);for(var o=0,u=new Array(n);o|\/|[a-z]:\\|\\\\).*?)(?::(\d+))?(?::(\d+))?\)?\s*$/i,u=/\((\S*)(?::(\d+))(?::(\d+))\)/;function t(t){var o=l.exec(t);if(!o)return null;var c=o[2]&&0===o[2].indexOf('native'),s=o[2]&&0===o[2].indexOf('eval'),v=u.exec(o[2]);return s&&null!=v&&(o[2]=v[1],o[3]=v[2],o[4]=v[3]),{file:c?null:o[2],methodName:o[1]||n,arguments:c?[o[2]]:[],lineNumber:o[3]?+o[3]:null,column:o[4]?+o[4]:null}}var o=/^\s*at (?:((?:\[object object\])?.+) )?\(?((?:file|ms-appx|https?|webpack|blob):.*?):(\d+)(?::(\d+))?\)?\s*$/i;function c(l){var u=o.exec(l);return u?{file:u[2],methodName:u[1]||n,arguments:[],lineNumber:+u[3],column:u[4]?+u[4]:null}:null}var s=/^\s*(.*?)(?:\((.*?)\))?(?:^|@)((?:file|https?|blob|chrome|webpack|resource|\[native).*?|[^@]*bundle)(?::(\d+))?(?::(\d+))?\s*$/i,v=/(\S+) line (\d+)(?: > eval line \d+)* > eval/i;function f(l){var u=s.exec(l);if(!u)return null;var t=u[3]&&u[3].indexOf(' > eval')>-1,o=v.exec(u[3]);return t&&null!=o&&(u[3]=o[1],u[4]=o[2],u[5]=null),{file:u[3],methodName:u[1]||n,arguments:u[2]?u[2].split(','):[],lineNumber:u[4]?+u[4]:null,column:u[5]?+u[5]:null}}var b=/^\s*(?:([^@]*)(?:\((.*?)\))?@)?(\S.*?):(\d+)(?::(\d+))?\s*$/i;function p(l){var u=b.exec(l);return u?{file:u[3],methodName:u[1]||n,arguments:[],lineNumber:+u[4],column:u[5]?+u[5]:null}:null}var x=/^\s*at (?:((?:\[object object\])?[^\\/]+(?: \[as \S+\])?) )?\(?(.*?):(\d+)(?::(\d+))?\)?\s*$/i;function h(l){var u=x.exec(l);return u?{file:u[2],methodName:u[1]||n,arguments:[],lineNumber:+u[3],column:u[4]?+u[4]:null}:null}e.parse=function(n){return n.split('\n').reduce(function(n,l){var u=t(l)||c(l)||f(l)||h(l)||p(l);return u&&n.push(u),n},[])}},64,[]); +__d(function(g,r,_i,a,m,e,d){'use strict';var t=/^ {4}at (.+?)(?: \((native)\)?| \((address at )?(.*?):(\d+):(\d+)\))$/,n=/^ {4}... skipping (\d+) frames$/;function s(s){var i=s.match(t);if(i)return{type:'FRAME',functionName:i[1],location:'native'===i[2]?{type:'NATIVE'}:'address at '===i[3]?{type:'BYTECODE',sourceUrl:i[4],line1Based:Number.parseInt(i[5],10),virtualOffset0Based:Number.parseInt(i[6],10)}:{type:'SOURCE',sourceUrl:i[4],line1Based:Number.parseInt(i[5],10),column1Based:Number.parseInt(i[6],10)}};var u=s.match(n);return u?{type:'SKIPPED',count:Number.parseInt(u[1],10)}:void 0}m.exports=function(t){for(var n=t.split(/\n/),i=[],u=-1,p=0;p-1}m.exports={isNativeFunction:t,hasNativeConstructor:function(n,o){var c=Object.getPrototypeOf(n).constructor;return c.name===o&&t(c)}}},74,[]); +__d(function(g,r,i,a,m,e,d){m.exports=r(d[0])},75,[76]); +__d(function(g,r,_i,a,m,e,d){var t=(function(t){"use strict";var n,o=Object.prototype,i=o.hasOwnProperty,c="function"==typeof Symbol?Symbol:{},u=c.iterator||"@@iterator",h=c.asyncIterator||"@@asyncIterator",f=c.toStringTag||"@@toStringTag";function l(t,n,o){return Object.defineProperty(t,n,{value:o,enumerable:!0,configurable:!0,writable:!0}),t[n]}try{l({},"")}catch(t){l=function(t,n,o){return t[n]=o}}function s(t,n,o,i){var c=n&&n.prototype instanceof b?n:b,u=Object.create(c.prototype),h=new R(i||[]);return u._invoke=F(t,o,h),u}function p(t,n,o){try{return{type:"normal",arg:t.call(n,o)}}catch(t){return{type:"throw",arg:t}}}t.wrap=s;var y="suspendedStart",v="suspendedYield",w="executing",L="completed",x={};function b(){}function E(){}function _(){}var j={};l(j,u,function(){return this});var O=Object.getPrototypeOf,k=O&&O(O(A([])));k&&k!==o&&i.call(k,u)&&(j=k);var G=_.prototype=b.prototype=Object.create(j);function N(t){["next","throw","return"].forEach(function(n){l(t,n,function(t){return this._invoke(n,t)})})}function T(t,n){function o(c,u,h,f){var l=p(t[c],t,u);if("throw"!==l.type){var s=l.arg,y=s.value;return y&&"object"==typeof y&&i.call(y,"__await")?n.resolve(y.__await).then(function(t){o("next",t,h,f)},function(t){o("throw",t,h,f)}):n.resolve(y).then(function(t){s.value=t,h(s)},function(t){return o("throw",t,h,f)})}f(l.arg)}var c;this._invoke=function(t,i){function u(){return new n(function(n,c){o(t,i,n,c)})}return c=c?c.then(u,u):u()}}function F(t,n,o){var i=y;return function(c,u){if(i===w)throw new Error("Generator is already running");if(i===L){if("throw"===c)throw u;return Y()}for(o.method=c,o.arg=u;;){var h=o.delegate;if(h){var f=P(h,o);if(f){if(f===x)continue;return f}}if("next"===o.method)o.sent=o._sent=o.arg;else if("throw"===o.method){if(i===y)throw i=L,o.arg;o.dispatchException(o.arg)}else"return"===o.method&&o.abrupt("return",o.arg);i=w;var l=p(t,n,o);if("normal"===l.type){if(i=o.done?L:v,l.arg===x)continue;return{value:l.arg,done:o.done}}"throw"===l.type&&(i=L,o.method="throw",o.arg=l.arg)}}}function P(t,o){var i=t.iterator[o.method];if(i===n){if(o.delegate=null,"throw"===o.method){if(t.iterator.return&&(o.method="return",o.arg=n,P(t,o),"throw"===o.method))return x;o.method="throw",o.arg=new TypeError("The iterator does not provide a 'throw' method")}return x}var c=p(i,t.iterator,o.arg);if("throw"===c.type)return o.method="throw",o.arg=c.arg,o.delegate=null,x;var u=c.arg;return u?u.done?(o[t.resultName]=u.value,o.next=t.nextLoc,"return"!==o.method&&(o.method="next",o.arg=n),o.delegate=null,x):u:(o.method="throw",o.arg=new TypeError("iterator result is not an object"),o.delegate=null,x)}function S(t){var n={tryLoc:t[0]};1 in t&&(n.catchLoc=t[1]),2 in t&&(n.finallyLoc=t[2],n.afterLoc=t[3]),this.tryEntries.push(n)}function I(t){var n=t.completion||{};n.type="normal",delete n.arg,t.completion=n}function R(t){this.tryEntries=[{tryLoc:"root"}],t.forEach(S,this),this.reset(!0)}function A(t){if(t){var o=t[u];if(o)return o.call(t);if("function"==typeof t.next)return t;if(!isNaN(t.length)){var c=-1,h=function o(){for(;++c=0;--u){var h=this.tryEntries[u],f=h.completion;if("root"===h.tryLoc)return c("end");if(h.tryLoc<=this.prev){var l=i.call(h,"catchLoc"),s=i.call(h,"finallyLoc");if(l&&s){if(this.prev=0;--o){var c=this.tryEntries[o];if(c.tryLoc<=this.prev&&i.call(c,"finallyLoc")&&this.prev=0;--n){var o=this.tryEntries[n];if(o.finallyLoc===t)return this.complete(o.completion,o.afterLoc),I(o),x}},catch:function(t){for(var n=this.tryEntries.length-1;n>=0;--n){var o=this.tryEntries[n];if(o.tryLoc===t){var i=o.completion;if("throw"===i.type){var c=i.arg;I(o)}return c}}throw new Error("illegal catch attempt")},delegateYield:function(t,o,i){return this.delegate={iterator:A(t),resultName:o,nextLoc:i},"next"===this.method&&(this.arg=n),x}},t})("object"==typeof m?m.exports:{});try{regeneratorRuntime=t}catch(n){"object"==typeof globalThis?globalThis.regeneratorRuntime=t:Function("r","regeneratorRuntime = r")(t)}},76,[]); +__d(function(g,r,i,a,m,e,d){'use strict';var l,n,t=!0===(null==(l=g.HermesInternal)?void 0:null==l.hasPromise?void 0:l.hasPromise())&&!0===(null==(n=g.HermesInternal)?void 0:null==n.useEngineQueue?void 0:n.useEngineQueue()),u=r(d[0]).isNativeFunction(Promise)||t;if(!g.RN$Bridgeless){var o=function(l){r(d[1]).polyfillGlobal(l,function(){return r(d[2])[l]})};o('setTimeout'),o('clearTimeout'),o('setInterval'),o('clearInterval'),o('requestAnimationFrame'),o('cancelAnimationFrame'),o('requestIdleCallback'),o('cancelIdleCallback')}u?(r(d[1]).polyfillGlobal('setImmediate',function(){return r(d[3]).setImmediate}),r(d[1]).polyfillGlobal('clearImmediate',function(){return r(d[3]).clearImmediate})):g.RN$Bridgeless||(r(d[1]).polyfillGlobal('setImmediate',function(){return r(d[2]).queueReactNativeMicrotask}),r(d[1]).polyfillGlobal('clearImmediate',function(){return r(d[2]).clearReactNativeMicrotask})),t?r(d[1]).polyfillGlobal('queueMicrotask',function(){var l;return null==(l=g.HermesInternal)?void 0:l.enqueueJob}):r(d[1]).polyfillGlobal('queueMicrotask',function(){return r(d[4]).default})},77,[74,67,78,80,81]); +__d(function(g,r,_i,a,m,_e,d){var e=r(d[0])(r(d[1])),t=16.666666666666668,n=[],i=[],l=[],o=[],c=[],u={},f=1,s=null,v=!1;function h(){var e=l.indexOf(null);return-1===e&&(e=l.length),e}function T(e,t){var o=f++,c=h();return l[c]=o,n[c]=e,i[c]=t,o}function k(e,o,c){e>f&&console.warn('Tried to call timer with ID %s but no such timer exists.',e);var u=l.indexOf(e);if(-1!==u){var v=i[u],h=n[u];if(h&&v){'setInterval'!==v&&p(u);try{'setTimeout'===v||'setInterval'===v||'queueReactNativeMicrotask'===v?h():'requestAnimationFrame'===v?h(g.performance.now()):'requestIdleCallback'===v?h({timeRemaining:function(){return Math.max(0,t-(g.performance.now()-o))},didTimeout:!!c}):console.error('Tried to call a callback with invalid type: '+v)}catch(e){s?s.push(e):s=[e]}}else console.error('No callback found for timerID '+e)}}function w(){if(0===o.length)return!1;var e=o;o=[];for(var t=0;t0}function p(e){l[e]=null,n[e]=null,i[e]=null}function N(e){if(null!=e){var t=l.indexOf(e);if(-1!==t){var n=i[t];p(t),'queueReactNativeMicrotask'!==n&&'requestIdleCallback'!==n&&M(e)}}}var b,I={setTimeout:function(e,t){for(var n=arguments.length,i=new Array(n>2?n-2:0),l=2;l2?n-2:0),l=2;l1?t-1:0),i=1;i-1&&(c.splice(e,1),k(i,g.performance.now(),!0)),delete u[i],0===c.length&&R(!1)},n);u[i]=l}return i},cancelIdleCallback:function(e){N(e);var t=c.indexOf(e);-1!==t&&c.splice(t,1);var n=u[e];n&&(I.clearTimeout(n),delete u[e]),0===c.length&&R(!1)},clearTimeout:function(e){N(e)},clearInterval:function(e){N(e)},clearReactNativeMicrotask:function(e){N(e);var t=o.indexOf(e);-1!==t&&o.splice(t,1)},cancelAnimationFrame:function(e){N(e)},callTimers:function(e){r(d[2])(0!==e.length,'Cannot call `callTimers` with an empty list of IDs.'),s=null;for(var t=0;t1)for(var i=1;i0){var n=c;c=[];for(var i=0;i1?u-1:0),c=1;c=0,loaded:t,total:s})}},{key:"__didCompleteResponse",value:function(e,t,s){e===this._requestId&&(t&&(''!==this._responseType&&'text'!==this._responseType||(this._response=t),this._hasError=!0,s&&(this._timedOut=!0)),this._clearSubscriptions(),this._requestId=null,this.setReadyState(this.DONE),t?E._interceptor&&E._interceptor.loadingFailed(e,t):E._interceptor&&E._interceptor.loadingFinished(e,this._response.length))}},{key:"_clearSubscriptions",value:function(){(this._subscriptions||[]).forEach(function(e){e&&e.remove()}),this._subscriptions=[]}},{key:"getAllResponseHeaders",value:function(){if(!this.responseHeaders)return null;var e=this.responseHeaders||{};return Object.keys(e).map(function(t){return t+': '+e[t]}).join('\r\n')}},{key:"getResponseHeader",value:function(e){var t=this._lowerCaseResponseHeaders[e.toLowerCase()];return void 0!==t?t:null}},{key:"setRequestHeader",value:function(e,t){if(this.readyState!==this.OPENED)throw new Error('Request has not been opened');this._headers[e.toLowerCase()]=String(t)}},{key:"setTrackingName",value:function(e){return this._trackingName=e,this}},{key:"setPerformanceLogger",value:function(e){return this._performanceLogger=e,this}},{key:"open",value:function(e,t,s){if(this.readyState!==this.UNSENT)throw new Error('Cannot open, already sending');if(void 0!==s&&!s)throw new Error('Synchronous http requests are not supported');if(!t)throw new Error('Cannot load an empty url');this._method=e.toUpperCase(),this._url=t,this._aborted=!1,this.setReadyState(this.OPENED)}},{key:"send",value:function(t){var s=this;if(this.readyState!==this.OPENED)throw new Error('Request has not been opened');if(this._sent)throw new Error('Request has already been sent');this._sent=!0;var n=this._incrementalEvents||!!this.onreadystatechange||!!this.onprogress;this._subscriptions.push(r(d[13]).addListener('didSendNetworkData',function(t){return s.__didUploadProgress.apply(s,(0,e.default)(t))})),this._subscriptions.push(r(d[13]).addListener('didReceiveNetworkResponse',function(t){return s.__didReceiveResponse.apply(s,(0,e.default)(t))})),this._subscriptions.push(r(d[13]).addListener('didReceiveNetworkData',function(t){return s.__didReceiveData.apply(s,(0,e.default)(t))})),this._subscriptions.push(r(d[13]).addListener('didReceiveNetworkIncrementalData',function(t){return s.__didReceiveIncrementalData.apply(s,(0,e.default)(t))})),this._subscriptions.push(r(d[13]).addListener('didReceiveNetworkDataProgress',function(t){return s.__didReceiveDataProgress.apply(s,(0,e.default)(t))})),this._subscriptions.push(r(d[13]).addListener('didCompleteNetworkResponse',function(t){return s.__didCompleteResponse.apply(s,(0,e.default)(t))}));var o='text';'arraybuffer'===this._responseType&&(o='base64'),'blob'===this._responseType&&(o='blob');var h;h='unknown'!==s._trackingName?s._trackingName:s._url,s._perfKey='network_XMLHttpRequest_'+String(h),s._performanceLogger.startTimespan(s._perfKey),r(d[11])(s._method,'XMLHttpRequest method needs to be defined (%s).',h),r(d[11])(s._url,'XMLHttpRequest URL needs to be defined (%s).',h),r(d[13]).sendRequest(s._method,s._trackingName,s._url,s._headers,t,o,n,s.timeout,s.__didCreateRequest.bind(s),s.withCredentials)}},{key:"abort",value:function(){this._aborted=!0,this._requestId&&r(d[13]).abortRequest(this._requestId),this.readyState===this.UNSENT||this.readyState===this.OPENED&&!this._sent||this.readyState===this.DONE||(this._reset(),this.setReadyState(this.DONE)),this._reset()}},{key:"setResponseHeaders",value:function(e){this.responseHeaders=e||null;var t=e||{};this._lowerCaseResponseHeaders=Object.keys(t).reduce(function(e,s){return e[s.toLowerCase()]=t[s],e},{})}},{key:"setReadyState",value:function(e){this.readyState=e,this.dispatchEvent({type:'readystatechange'}),e===this.DONE&&(this._aborted?this.dispatchEvent({type:'abort'}):this._hasError?this._timedOut?this.dispatchEvent({type:'timeout'}):this.dispatchEvent({type:'error'}):this.dispatchEvent({type:'load'}),this.dispatchEvent({type:'loadend'}))}},{key:"addEventListener",value:function(e,s){'readystatechange'!==e&&'progress'!==e||(this._incrementalEvents=!0),(0,t.default)((0,u.default)(E.prototype),"addEventListener",this).call(this,e,s)}}],[{key:"setInterceptor",value:function(e){E._interceptor=e}}]),E})(r(d[9]).apply(void 0,(0,e.default)(b)));N.UNSENT=l,N.OPENED=_,N.HEADERS_RECEIVED=f,N.LOADING=y,N.DONE=v,N._interceptor=null,m.exports=N},83,[3,35,84,8,7,10,12,15,86,90,91,18,93,94]); +__d(function(g,r,i,a,m,e,d){function t(){return"undefined"!=typeof Reflect&&Reflect.get?(m.exports=t=Reflect.get,m.exports.__esModule=!0,m.exports.default=m.exports):(m.exports=t=function(t,o,p){var s=r(d[0])(t,o);if(s){var l=Object.getOwnPropertyDescriptor(s,o);return l.get?l.get.call(arguments.length<3?t:p):l.value}},m.exports.__esModule=!0,m.exports.default=m.exports),t.apply(this,arguments)}m.exports=t,m.exports.__esModule=!0,m.exports.default=m.exports},84,[85]); +__d(function(g,r,i,a,m,e,d){m.exports=function(t,o){for(;!Object.prototype.hasOwnProperty.call(t,o)&&null!==(t=r(d[0])(t)););return t},m.exports.__esModule=!0,m.exports.default=m.exports},85,[15]); +__d(function(g,_r,i,a,m,e,d){var t=_r(d[0])(_r(d[1])),l=_r(d[0])(_r(d[2])),r=_r(d[0])(_r(d[3])),o=_r(d[0])(_r(d[4])),n=_r(d[0])(_r(d[5]));var u=(function(){function u(){(0,l.default)(this,u)}return(0,r.default)(u,null,[{key:"createFromParts",value:function(t,l){(0,n.default)(o.default,'NativeBlobModule is available.');var r='xxxxxxxx-xxxx-4xxx-yxxx-xxxxxxxxxxxx'.replace(/[xy]/g,function(t){var l=16*Math.random()|0;return('x'==t?l:3&l|8).toString(16)}),f=t.map(function(t){if(t instanceof ArrayBuffer||g.ArrayBufferView&&t instanceof g.ArrayBufferView)throw new Error("Creating blobs from 'ArrayBuffer' and 'ArrayBufferView' are not supported");return t instanceof _r(d[6])?{data:t.data,type:'blob'}:{data:String(t),type:'string'}}),c=f.reduce(function(t,l){return'string'===l.type?t+g.unescape(encodeURI(l.data)).length:t+l.data.size},0);return o.default.createFromParts(f,r),u.createFromOptions({blobId:r,offset:0,size:c,type:l?l.type:'',lastModified:l?l.lastModified:Date.now()})}},{key:"createFromOptions",value:function(l){return _r(d[7]).register(l.blobId),(0,t.default)(Object.create(_r(d[6]).prototype),{data:null==l.__collector?(0,t.default)({},l,{__collector:(r=l.blobId,null==g.__blobCollectorProvider?null:g.__blobCollectorProvider(r))}):l});var r}},{key:"release",value:function(t){(0,n.default)(o.default,'NativeBlobModule is available.'),_r(d[7]).unregister(t),_r(d[7]).has(t)||o.default.release(t)}},{key:"addNetworkingHandler",value:function(){(0,n.default)(o.default,'NativeBlobModule is available.'),o.default.addNetworkingHandler()}},{key:"addWebSocketHandler",value:function(t){(0,n.default)(o.default,'NativeBlobModule is available.'),o.default.addWebSocketHandler(t)}},{key:"removeWebSocketHandler",value:function(t){(0,n.default)(o.default,'NativeBlobModule is available.'),o.default.removeWebSocketHandler(t)}},{key:"sendOverSocket",value:function(t,l){(0,n.default)(o.default,'NativeBlobModule is available.'),o.default.sendOverSocket(t.data,l)}}]),u})();u.isAvailable=!!o.default,m.exports=u},86,[3,29,7,8,87,18,88,89]); +__d(function(g,r,i,a,m,e,d){function t(n){if("function"!=typeof WeakMap)return null;var o=new WeakMap,u=new WeakMap;return(t=function(t){return t?u:o})(n)}Object.defineProperty(e,"__esModule",{value:!0}),e.default=void 0;var n=(function(n,o){if(!o&&n&&n.__esModule)return n;if(null===n||"object"!=typeof n&&"function"!=typeof n)return{default:n};var u=t(o);if(u&&u.has(n))return u.get(n);var l={},c=Object.defineProperty&&Object.getOwnPropertyDescriptor;for(var f in n)if("default"!==f&&Object.prototype.hasOwnProperty.call(n,f)){var s=c?Object.getOwnPropertyDescriptor(n,f):null;s&&(s.get||s.set)?Object.defineProperty(l,f,s):l[f]=n[f]}l.default=n,u&&u.set(n,l);return l})(r(d[0])).get('BlobModule'),o=null,u=null;null!=n&&(u={getConstants:function(){return null==o&&(o=n.getConstants()),o},addNetworkingHandler:function(){n.addNetworkingHandler()},addWebSocketHandler:function(t){n.addWebSocketHandler(t)},removeWebSocketHandler:function(t){n.removeWebSocketHandler(t)},sendOverSocket:function(t,o){n.sendOverSocket(t,o)},createFromParts:function(t,o){n.createFromParts(t,o)},release:function(t){n.release(t)}});var l=u;e.default=l},87,[21]); +__d(function(g,r,i,a,m,e,d){'use strict';var t=(function(){function t(){var n=arguments.length>0&&void 0!==arguments[0]?arguments[0]:[],s=arguments.length>1?arguments[1]:void 0;r(d[0])(this,t);var o=r(d[1]);this.data=o.createFromParts(n,s).data}return r(d[2])(t,[{key:"data",get:function(){if(!this._data)throw new Error('Blob has been closed and is no longer available');return this._data},set:function(t){this._data=t}},{key:"slice",value:function(t,n){var s=r(d[1]),o=this.data,u=o.offset,l=o.size;return'number'==typeof t&&(t>l&&(t=l),u+=t,l-=t,'number'==typeof n&&(n<0&&(n=this.size+n),l=n-t)),s.createFromOptions({blobId:this.data.blobId,offset:u,size:l})}},{key:"close",value:function(){r(d[1]).release(this.data.blobId),this.data=null}},{key:"size",get:function(){return this.data.size}},{key:"type",get:function(){return this.data.type||''}}]),t})();m.exports=t},88,[7,86,8]); +__d(function(g,r,i,a,m,e,d){var n={};m.exports={register:function(t){n[t]?n[t]++:n[t]=1},unregister:function(t){n[t]&&(n[t]--,n[t]<=0&&delete n[t])},has:function(t){return n[t]&&n[t]>0}}},89,[]); +__d(function(g,r,_i,a,m,e,d){'use strict';Object.defineProperty(e,'__esModule',{value:!0});var t=new WeakMap,n=new WeakMap;function o(n){var o=t.get(n);return console.assert(null!=o,"'this' is expected an Event object, but got",n),o}function i(t){null==t.passiveListener?t.event.cancelable&&(t.canceled=!0,"function"==typeof t.event.preventDefault&&t.event.preventDefault()):"undefined"!=typeof console&&"function"==typeof console.error&&console.error("Unable to preventDefault inside passive event listener invocation.",t.passiveListener)}function l(n,o){t.set(this,{eventTarget:n,event:o,eventPhase:2,currentTarget:n,canceled:!1,stopped:!1,immediateStopped:!1,passiveListener:null,timeStamp:o.timeStamp||Date.now()}),Object.defineProperty(this,"isTrusted",{value:!1,enumerable:!0});for(var i=Object.keys(o),l=0;l0){for(var t=new Array(arguments.length),n=0;n1&&void 0!==arguments[1]?arguments[1]:l(),n=arguments.length>2?arguments[2]:void 0;this._closed||null==this._points[t]&&(this._points[t]=s,n&&(this._pointExtras[t]=n))}},{key:"removeExtra",value:function(t){var s=this._extras[t];return delete this._extras[t],s}},{key:"setExtra",value:function(t,s){this._closed||this._extras.hasOwnProperty(t)||(this._extras[t]=s)}},{key:"startTimespan",value:function(t){var s=arguments.length>1&&void 0!==arguments[1]?arguments[1]:l(),n=arguments.length>2?arguments[2]:void 0;this._closed||this._timespans[t]||(this._timespans[t]={startTime:s,startExtras:n},u[t]=r(d[4]).beginAsyncEvent(t))}},{key:"stopTimespan",value:function(t){var s=arguments.length>1&&void 0!==arguments[1]?arguments[1]:l(),n=arguments.length>2?arguments[2]:void 0;if(!this._closed){var o=this._timespans[t];o&&null!=o.startTime&&null==o.endTime&&(o.endExtras=n,o.endTime=s,o.totalTime=o.endTime-(o.startTime||0),null!=u[t]&&(r(d[4]).endAsyncEvent(t,u[t]),delete u[t]))}}}]),t})()},92,[3,29,7,8,32]); +__d(function(g,r,_i,a,m,e,d){'use strict';e.byteLength=function(t){var n=i(t),o=n[0],h=n[1];return 3*(o+h)/4-h},e.toByteArray=function(t){var h,u,c=i(t),A=c[0],C=c[1],y=new o(f(t,A,C)),s=0,v=C>0?A-4:A;for(u=0;u>16&255,y[s++]=h>>8&255,y[s++]=255&h;2===C&&(h=n[t.charCodeAt(u)]<<2|n[t.charCodeAt(u+1)]>>4,y[s++]=255&h);1===C&&(h=n[t.charCodeAt(u)]<<10|n[t.charCodeAt(u+1)]<<4|n[t.charCodeAt(u+2)]>>2,y[s++]=h>>8&255,y[s++]=255&h);return y},e.fromByteArray=function(n){for(var o,h=n.length,u=h%3,c=[],i=0,f=h-u;if?f:i+16383));1===u?(o=n[h-1],c.push(t[o>>2]+t[o<<4&63]+'==')):2===u&&(o=(n[h-2]<<8)+n[h-1],c.push(t[o>>10]+t[o>>4&63]+t[o<<2&63]+'='));return c.join('')};for(var t=[],n=[],o='undefined'!=typeof Uint8Array?Uint8Array:Array,h='ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789+/',u=0,c=h.length;u0)throw new Error('Invalid string. Length must be a multiple of 4');var o=t.indexOf('=');return-1===o&&(o=n),[o,o===n?0:4-o%4]}function f(t,n,o){return 3*(n+o)/4-o}function A(n,o,h){for(var u,c,i=[],f=o;f>18&63]+t[c>>12&63]+t[c>>6&63]+t[63&c]);return i.join('')}n['-'.charCodeAt(0)]=62,n['_'.charCodeAt(0)]=63},93,[]); +__d(function(g,r,i,a,m,_e,d){var t=r(d[0])(r(d[1])),e=r(d[0])(r(d[2])),u=r(d[0])(r(d[3])),n=r(d[0])(r(d[4])),f=r(d[0])(r(d[5])),o=r(d[0])(r(d[6])),l=r(d[0])(r(d[7])),c=r(d[0])(r(d[8])),s=r(d[0])(r(d[9])),v=r(d[0])(r(d[10]));function h(){if("undefined"==typeof Reflect||!Reflect.construct)return!1;if(Reflect.construct.sham)return!1;if("function"==typeof Proxy)return!0;try{return Boolean.prototype.valueOf.call(Reflect.construct(Boolean,[],function(){})),!0}catch(t){return!1}}function p(t){var e=[];for(var u in t)e.push([u,t[u]]);return e}var y=1;var R=(function(l){(0,n.default)(D,l);var R,k,q=(R=D,k=h(),function(){var t,e=(0,o.default)(R);if(k){var u=(0,o.default)(this).constructor;t=Reflect.construct(e,arguments,u)}else t=e.apply(this,arguments);return(0,f.default)(this,t)});function D(){return(0,e.default)(this,D),q.call(this,'ios'!==v.default.OS?null:c.default)}return(0,u.default)(D,[{key:"sendRequest",value:function(e,u,n,f,o,l,v,h,R,k){var q=(0,s.default)(o);q&&q.formData&&(q.formData=q.formData.map(function(e){return(0,t.default)({},e,{headers:p(e.headers)})}));var D=y++;c.default.sendRequest(e,n,D,p(f),(0,t.default)({},q,{trackingName:u}),l,v,h,k),R(D)}},{key:"abortRequest",value:function(t){c.default.abortRequest(t)}},{key:"clearCookies",value:function(t){c.default.clearCookies(t)}}]),D})(l.default);m.exports=new R},94,[3,29,7,8,10,12,15,95,96,97,19]); +__d(function(g,r,i,a,m,e,d){'use strict';Object.defineProperty(e,"__esModule",{value:!0}),e.default=void 0;var t=r(d[0])(r(d[1])),n=r(d[0])(r(d[2])),l=r(d[0])(r(d[3])),u=r(d[0])(r(d[4])),o=r(d[0])(r(d[5])),s=(function(){function s(n){(0,t.default)(this,s),'ios'===l.default.OS&&(0,o.default)(null!=n,'`new NativeEventEmitter()` requires a non-null argument.');var u=!!n&&'function'==typeof n.addListener,v=!!n&&'function'==typeof n.removeListeners;n&&u&&v?this._nativeModule=n:null!=n&&(u||console.warn('`new NativeEventEmitter()` was called with a non-null argument without the required `addListener` method.'),v||console.warn('`new NativeEventEmitter()` was called with a non-null argument without the required `removeListeners` method.'))}return(0,n.default)(s,[{key:"addListener",value:function(t,n,l){var o,s=this;null==(o=this._nativeModule)||o.addListener(t);var v=u.default.addListener(t,n,l);return{remove:function(){var t;null!=v&&(null==(t=s._nativeModule)||t.removeListeners(1),v.remove(),v=null)}}}},{key:"removeListener",value:function(t,n){var l;null==(l=this._nativeModule)||l.removeListeners(1),u.default.removeListener(t,n)}},{key:"emit",value:function(t){for(var n=arguments.length,l=new Array(n>1?n-1:0),o=1;o-1};function s(t){if('string'!=typeof t&&(t=String(t)),/[^a-z0-9\-#$%&'*+.^_`|~!]/i.test(t)||''===t)throw new TypeError('Invalid character in header field name: "'+t+'"');return t.toLowerCase()}function h(t){return'string'!=typeof t&&(t=String(t)),t}function f(t){var e={next:function(){var e=t.shift();return{done:void 0===e,value:e}}};return o.iterable&&(e[Symbol.iterator]=function(){return e}),e}function u(t){this.map={},t instanceof u?t.forEach(function(t,e){this.append(e,t)},this):Array.isArray(t)?t.forEach(function(t){this.append(t[0],t[1])},this):t&&Object.getOwnPropertyNames(t).forEach(function(e){this.append(e,t[e])},this)}function c(t){if(t.bodyUsed)return Promise.reject(new TypeError('Already read'));t.bodyUsed=!0}function y(t){return new Promise(function(e,o){t.onload=function(){e(t.result)},t.onerror=function(){o(t.error)}})}function l(t){var e=new FileReader,o=y(e);return e.readAsArrayBuffer(t),o}function p(t){for(var e=new Uint8Array(t),o=new Array(e.length),n=0;n-1?n:o),this.mode=e.mode||this.mode||null,this.signal=e.signal||this.signal,this.referrer=null,('GET'===this.method||'HEAD'===this.method)&&i)throw new TypeError('Body not allowed for GET or HEAD requests');if(this._initBody(i),!('GET'!==this.method&&'HEAD'!==this.method||'no-store'!==e.cache&&'no-cache'!==e.cache)){var s=/([?&])_=[^&]*/;if(s.test(this.url))this.url=this.url.replace(s,'$1_='+(new Date).getTime());else{this.url+=(/\?/.test(this.url)?'&':'?')+'_='+(new Date).getTime()}}}function E(t){var e=new FormData;return t.trim().split('&').forEach(function(t){if(t){var o=t.split('='),n=o.shift().replace(/\+/g,' '),i=o.join('=').replace(/\+/g,' ');e.append(decodeURIComponent(n),decodeURIComponent(i))}}),e}function T(t,e){if(!(this instanceof T))throw new TypeError('Please use the "new" operator, this DOM object constructor cannot be called as a function.');e||(e={}),this.type='default',this.status=void 0===e.status?200:e.status,this.ok=this.status>=200&&this.status<300,this.statusText=void 0===e.statusText?'':''+e.statusText,this.headers=new u(e.headers),this.url=e.url||'',this._initBody(t)}_.prototype.clone=function(){return new _(this,{body:this._bodyInit})},w.call(_.prototype),w.call(T.prototype),T.prototype.clone=function(){return new T(this._bodyInit,{status:this.status,statusText:this.statusText,headers:new u(this.headers),url:this.url})},T.error=function(){var t=new T(null,{status:0,statusText:''});return t.type='error',t};var A=[301,302,303,307,308];T.redirect=function(t,e){if(-1===A.indexOf(e))throw new RangeError('Invalid status code');return new T(null,{status:e,headers:{location:t}})},t.DOMException=e.DOMException;try{new t.DOMException}catch(e){t.DOMException=function(t,e){this.message=t,this.name=e;var o=Error(t);this.stack=o.stack},t.DOMException.prototype=Object.create(Error.prototype),t.DOMException.prototype.constructor=t.DOMException}function B(n,i){return new Promise(function(s,f){var c=new _(n,i);if(c.signal&&c.signal.aborted)return f(new t.DOMException('Aborted','AbortError'));var y=new XMLHttpRequest;function l(){y.abort()}y.onload=function(){var t,e,o={status:y.status,statusText:y.statusText,headers:(t=y.getAllResponseHeaders()||'',e=new u,t.replace(/\r?\n[\t ]+/g,' ').split('\r').map(function(t){return 0===t.indexOf('\n')?t.substr(1,t.length):t}).forEach(function(t){var o=t.split(':'),n=o.shift().trim();if(n){var i=o.join(':').trim();e.append(n,i)}}),e)};o.url='responseURL'in y?y.responseURL:o.headers.get('X-Request-URL');var n='response'in y?y.response:y.responseText;setTimeout(function(){s(new T(n,o))},0)},y.onerror=function(){setTimeout(function(){f(new TypeError('Network request failed'))},0)},y.ontimeout=function(){setTimeout(function(){f(new TypeError('Network request failed'))},0)},y.onabort=function(){setTimeout(function(){f(new t.DOMException('Aborted','AbortError'))},0)},y.open(c.method,(function(t){try{return''===t&&e.location.href?e.location.href:t}catch(e){return t}})(c.url),!0),'include'===c.credentials?y.withCredentials=!0:'omit'===c.credentials&&(y.withCredentials=!1),'responseType'in y&&(o.blob?y.responseType='blob':o.arrayBuffer&&c.headers.get('Content-Type')&&-1!==c.headers.get('Content-Type').indexOf('application/octet-stream')&&(y.responseType='arraybuffer')),!i||'object'!=typeof i.headers||i.headers instanceof u?c.headers.forEach(function(t,e){y.setRequestHeader(e,t)}):Object.getOwnPropertyNames(i.headers).forEach(function(t){y.setRequestHeader(t,h(i.headers[t]))}),c.signal&&(c.signal.addEventListener('abort',l),y.onreadystatechange=function(){4===y.readyState&&c.signal.removeEventListener('abort',l)}),y.send(void 0===c._bodyInit?null:c._bodyInit)})}B.polyfill=!0,e.fetch||(e.fetch=B,e.Headers=u,e.Request=_,e.Response=T),t.Headers=u,t.Request=_,t.Response=T,t.fetch=B,Object.defineProperty(t,'__esModule',{value:!0})},'object'==typeof _e&&void 0!==m?e(_e):'function'==typeof define&&define.amd?define(['exports'],e):e(t.WHATWGFetch={})},101,[]); +__d(function(g,r,i,a,m,_e,d){var e=r(d[0])(r(d[1])),t=r(d[0])(r(d[2])),n=r(d[0])(r(d[3])),s=r(d[0])(r(d[4])),o=r(d[0])(r(d[5])),u=r(d[0])(r(d[6])),c=r(d[0])(r(d[7])),l=r(d[0])(r(d[8])),f=r(d[0])(r(d[9])),h=r(d[0])(r(d[10])),y=r(d[0])(r(d[11])),b=r(d[0])(r(d[12])),p=r(d[0])(r(d[13])),v=r(d[0])(r(d[14])),_=r(d[0])(r(d[15])),E=r(d[0])(r(d[16])),k=["headers"];function S(){if("undefined"==typeof Reflect||!Reflect.construct)return!1;if(Reflect.construct.sham)return!1;if("function"==typeof Proxy)return!0;try{return Boolean.prototype.valueOf.call(Reflect.construct(Boolean,[],function(){})),!0}catch(e){return!1}}var I=0,N=1,O=2,w=3,C=0,L=(function(_){(0,s.default)(R,_);var L,T,A=(L=R,T=S(),function(){var e,t=(0,u.default)(L);if(T){var n=(0,u.default)(this).constructor;e=Reflect.construct(t,arguments,n)}else e=t.apply(this,arguments);return(0,o.default)(this,e)});function R(n,s,o){var u;(0,t.default)(this,R),(u=A.call(this)).CONNECTING=I,u.OPEN=N,u.CLOSING=O,u.CLOSED=w,u.readyState=I,u.url=n,'string'==typeof s&&(s=[s]);var c=o||{},l=c.headers,h=void 0===l?{}:l,p=(0,e.default)(c,k);return p&&'string'==typeof p.origin&&(console.warn('Specifying `origin` as a WebSocket connection option is deprecated. Include it under `headers` instead.'),h.origin=p.origin,delete p.origin),Object.keys(p).length>0&&console.warn('Unrecognized WebSocket connection option(s) `'+Object.keys(p).join('`, `')+"`. Did you mean to put these under `headers`?"),Array.isArray(s)||(s=null),u._eventEmitter=new f.default('ios'!==y.default.OS?null:b.default),u._socketId=C++,u._registerEvents(),b.default.connect(n,s,{headers:h},u._socketId),u}return(0,n.default)(R,[{key:"binaryType",get:function(){return this._binaryType},set:function(e){if('blob'!==e&&'arraybuffer'!==e)throw new Error("binaryType must be either 'blob' or 'arraybuffer'");'blob'!==this._binaryType&&'blob'!==e||((0,E.default)(l.default.isAvailable,'Native module BlobModule is required for blob support'),'blob'===e?l.default.addWebSocketHandler(this._socketId):l.default.removeWebSocketHandler(this._socketId)),this._binaryType=e}},{key:"close",value:function(e,t){this.readyState!==this.CLOSING&&this.readyState!==this.CLOSED&&(this.readyState=this.CLOSING,this._close(e,t))}},{key:"send",value:function(e){if(this.readyState===this.CONNECTING)throw new Error('INVALID_STATE_ERR');if(e instanceof c.default)return(0,E.default)(l.default.isAvailable,'Native module BlobModule is required for blob support'),void l.default.sendOverSocket(e,this._socketId);if('string'!=typeof e){if(!(e instanceof ArrayBuffer||ArrayBuffer.isView(e)))throw new Error('Unsupported data type');b.default.sendBinary((0,h.default)(e),this._socketId)}else b.default.send(e,this._socketId)}},{key:"ping",value:function(){if(this.readyState===this.CONNECTING)throw new Error('INVALID_STATE_ERR');b.default.ping(this._socketId)}},{key:"_close",value:function(e,t){var n='number'==typeof e?e:1e3,s='string'==typeof t?t:'';b.default.close(n,s,this._socketId),l.default.isAvailable&&'blob'===this._binaryType&&l.default.removeWebSocketHandler(this._socketId)}},{key:"_unregisterEvents",value:function(){this._subscriptions.forEach(function(e){return e.remove()}),this._subscriptions=[]}},{key:"_registerEvents",value:function(){var e=this;this._subscriptions=[this._eventEmitter.addListener('websocketMessage',function(t){if(t.id===e._socketId){var n=t.data;switch(t.type){case'binary':n=v.default.toByteArray(t.data).buffer;break;case'blob':n=l.default.createFromOptions(t.data)}e.dispatchEvent(new p.default('message',{data:n}))}}),this._eventEmitter.addListener('websocketOpen',function(t){t.id===e._socketId&&(e.readyState=e.OPEN,e.protocol=t.protocol,e.dispatchEvent(new p.default('open')))}),this._eventEmitter.addListener('websocketClosed',function(t){t.id===e._socketId&&(e.readyState=e.CLOSED,e.dispatchEvent(new p.default('close',{code:t.code,reason:t.reason})),e._unregisterEvents(),e.close())}),this._eventEmitter.addListener('websocketFailed',function(t){t.id===e._socketId&&(e.readyState=e.CLOSED,e.dispatchEvent(new p.default('error',{message:t.message})),e.dispatchEvent(new p.default('close',{message:t.message})),e._unregisterEvents(),e.close())})]}}]),R})(_.default.apply(void 0,['close','error','message','open']));L.CONNECTING=I,L.OPEN=N,L.CLOSING=O,L.CLOSED=w,m.exports=L},102,[3,103,7,8,10,12,15,88,86,95,99,19,105,106,93,90,18]); +__d(function(g,r,_i,a,m,e,d){m.exports=function(t,o){if(null==t)return{};var n,l,p=r(d[0])(t,o);if(Object.getOwnPropertySymbols){var s=Object.getOwnPropertySymbols(t);for(l=0;l=0||Object.prototype.propertyIsEnumerable.call(t,n)&&(p[n]=t[n])}return p},m.exports.__esModule=!0,m.exports.default=m.exports},103,[104]); +__d(function(g,r,_i,a,m,e,d){m.exports=function(t,n){if(null==t)return{};var o,u,f={},s=Object.keys(t);for(u=0;u=0||(f[o]=t[o]);return f},m.exports.__esModule=!0,m.exports.default=m.exports},104,[]); +__d(function(g,r,i,a,m,e,d){function t(n){if("function"!=typeof WeakMap)return null;var o=new WeakMap,u=new WeakMap;return(t=function(t){return t?u:o})(n)}Object.defineProperty(e,"__esModule",{value:!0}),e.default=void 0;var n=(function(n,o){if(!o&&n&&n.__esModule)return n;if(null===n||"object"!=typeof n&&"function"!=typeof n)return{default:n};var u=t(o);if(u&&u.has(n))return u.get(n);var f={},c=Object.defineProperty&&Object.getOwnPropertyDescriptor;for(var l in n)if("default"!==l&&Object.prototype.hasOwnProperty.call(n,l)){var p=c?Object.getOwnPropertyDescriptor(n,l):null;p&&(p.get||p.set)?Object.defineProperty(f,l,p):f[l]=n[l]}f.default=n,u&&u.set(n,f);return f})(r(d[0])).getEnforcing('WebSocketModule');e.default=n},105,[21]); +__d(function(g,r,i,a,m,e,d){'use strict';var t=r(d[0])(function t(s,n){r(d[1])(this,t),this.type=s.toString(),r(d[2])(this,n)});m.exports=t},106,[8,7,29]); +__d(function(g,r,i,a,m,_e,d){'use strict';function t(){if("undefined"==typeof Reflect||!Reflect.construct)return!1;if(Reflect.construct.sham)return!1;if("function"==typeof Proxy)return!0;try{return Boolean.prototype.valueOf.call(Reflect.construct(Boolean,[],function(){})),!0}catch(t){return!1}}var e=(function(e){r(d[2])(o,e);var n,u,c=(n=o,u=t(),function(){var t,e=r(d[0])(n);if(u){var c=r(d[0])(this).constructor;t=Reflect.construct(e,arguments,c)}else t=e.apply(this,arguments);return r(d[1])(this,t)});function o(t,e,n){var u;return r(d[3])(this,o),r(d[4])(null!=t&&null!=e,'Failed to construct `File`: Must pass both `parts` and `name` arguments.'),(u=c.call(this,t,n)).data.name=e,u}return r(d[5])(o,[{key:"name",get:function(){return r(d[4])(null!=this.data.name,'Files must have a name set.'),this.data.name}},{key:"lastModified",get:function(){return this.data.lastModified||0}}]),o})(r(d[6]));m.exports=e},107,[15,12,10,7,18,8,88]); +__d(function(g,r,i,a,m,_e,d){var t=r(d[0])(r(d[1])),e=r(d[0])(r(d[2])),n=r(d[0])(r(d[3])),o=r(d[0])(r(d[4])),s=r(d[0])(r(d[5])),u=r(d[0])(r(d[6]));function l(){if("undefined"==typeof Reflect||!Reflect.construct)return!1;if(Reflect.construct.sham)return!1;if("function"==typeof Proxy)return!0;try{return Boolean.prototype.valueOf.call(Reflect.construct(Boolean,[],function(){})),!0}catch(t){return!1}}var c=0,f=1,h=2,y=(function(y){(0,n.default)(b,y);var _,p,v=(_=b,p=l(),function(){var t,e=(0,s.default)(_);if(p){var n=(0,s.default)(this).constructor;t=Reflect.construct(e,arguments,n)}else t=e.apply(this,arguments);return(0,o.default)(this,t)});function b(){var e;return(0,t.default)(this,b),(e=v.call(this)).EMPTY=c,e.LOADING=f,e.DONE=h,e._aborted=!1,e._subscriptions=[],e._reset(),e}return(0,e.default)(b,[{key:"_reset",value:function(){this._readyState=c,this._error=null,this._result=null}},{key:"_clearSubscriptions",value:function(){this._subscriptions.forEach(function(t){return t.remove()}),this._subscriptions=[]}},{key:"_setReadyState",value:function(t){this._readyState=t,this.dispatchEvent({type:'readystatechange'}),t===h&&(this._aborted?this.dispatchEvent({type:'abort'}):this._error?this.dispatchEvent({type:'error'}):this.dispatchEvent({type:'load'}),this.dispatchEvent({type:'loadend'}))}},{key:"readAsArrayBuffer",value:function(){throw new Error('FileReader.readAsArrayBuffer is not implemented')}},{key:"readAsDataURL",value:function(t){var e=this;if(this._aborted=!1,null==t)throw new TypeError("Failed to execute 'readAsDataURL' on 'FileReader': parameter 1 is not of type 'Blob'");u.default.readAsDataURL(t.data).then(function(t){e._aborted||(e._result=t,e._setReadyState(h))},function(t){e._aborted||(e._error=t,e._setReadyState(h))})}},{key:"readAsText",value:function(t){var e=this,n=arguments.length>1&&void 0!==arguments[1]?arguments[1]:'UTF-8';if(this._aborted=!1,null==t)throw new TypeError("Failed to execute 'readAsText' on 'FileReader': parameter 1 is not of type 'Blob'");u.default.readAsText(t.data,n).then(function(t){e._aborted||(e._result=t,e._setReadyState(h))},function(t){e._aborted||(e._error=t,e._setReadyState(h))})}},{key:"abort",value:function(){this._aborted=!0,this._readyState!==c&&this._readyState!==h&&(this._reset(),this._setReadyState(h)),this._reset()}},{key:"readyState",get:function(){return this._readyState}},{key:"error",get:function(){return this._error}},{key:"result",get:function(){return this._result}}]),b})(r(d[7]).apply(void 0,['abort','error','load','loadstart','loadend','progress']));y.EMPTY=c,y.LOADING=f,y.DONE=h,m.exports=y},108,[3,7,8,10,12,15,109,90]); +__d(function(g,r,i,a,m,e,d){function t(n){if("function"!=typeof WeakMap)return null;var o=new WeakMap,u=new WeakMap;return(t=function(t){return t?u:o})(n)}Object.defineProperty(e,"__esModule",{value:!0}),e.default=void 0;var n=(function(n,o){if(!o&&n&&n.__esModule)return n;if(null===n||"object"!=typeof n&&"function"!=typeof n)return{default:n};var u=t(o);if(u&&u.has(n))return u.get(n);var f={},l=Object.defineProperty&&Object.getOwnPropertyDescriptor;for(var c in n)if("default"!==c&&Object.prototype.hasOwnProperty.call(n,c)){var p=l?Object.getOwnPropertyDescriptor(n,c):null;p&&(p.get||p.set)?Object.defineProperty(f,c,p):f[c]=n[c]}f.default=n,u&&u.set(n,f);return f})(r(d[0])).getEnforcing('FileReaderModule');e.default=n},109,[21]); +__d(function(g,r,i,a,m,e,d){Object.defineProperty(e,"__esModule",{value:!0}),e.URLSearchParams=e.URL=void 0;var t=r(d[0])(r(d[1])),n=r(d[0])(r(d[2])),o=r(d[0])(r(d[3])),s=null;if(o.default&&'string'==typeof o.default.getConstants().BLOB_URI_SCHEME){var u=o.default.getConstants();s=u.BLOB_URI_SCHEME+':','string'==typeof u.BLOB_URI_HOST&&(s+="//"+u.BLOB_URI_HOST+"/")}var h=(function(o){function s(n){var o=this;(0,t.default)(this,s),this._searchParams=[],'object'==typeof n&&Object.keys(n).forEach(function(t){return o.append(t,n[t])})}return(0,n.default)(s,[{key:"append",value:function(t,n){this._searchParams.push([t,n])}},{key:"delete",value:function(t){throw new Error('URLSearchParams.delete is not implemented')}},{key:"get",value:function(t){throw new Error('URLSearchParams.get is not implemented')}},{key:"getAll",value:function(t){throw new Error('URLSearchParams.getAll is not implemented')}},{key:"has",value:function(t){throw new Error('URLSearchParams.has is not implemented')}},{key:"set",value:function(t,n){throw new Error('URLSearchParams.set is not implemented')}},{key:"sort",value:function(){throw new Error('URLSearchParams.sort is not implemented')}},{key:o,value:function(){return this._searchParams[Symbol.iterator]()}},{key:"toString",value:function(){if(0===this._searchParams.length)return'';var t=this._searchParams.length-1;return this._searchParams.reduce(function(n,o,s){return n+o.join('=')+(s===t?'':'&')},'')}}]),s})(Symbol.iterator);function f(t){return/^(?:(?:(?:https?|ftp):)?\/\/)(?:(?:[1-9]\d?|1\d\d|2[01]\d|22[0-3])(?:\.(?:1?\d{1,2}|2[0-4]\d|25[0-5])){2}(?:\.(?:[1-9]\d?|1\d\d|2[0-4]\d|25[0-4]))|(?:(?:[a-z0-9\u00a1-\uffff][a-z0-9\u00a1-\uffff_-]{0,62})?[a-z0-9\u00a1-\uffff]\.)*(?:[a-z\u00a1-\uffff]{2,}\.?))(?::\d{2,5})?(?:[/?#]\S*)?$/.test(t)}e.URLSearchParams=h;var l=(function(){function o(n,s){(0,t.default)(this,o),this._searchParamsInstance=null;var u=null;if(!s||f(n))this._url=n,this._url.endsWith('/')||(this._url+='/');else{if('string'==typeof s){if(!f(u=s))throw new TypeError("Invalid base URL: "+u)}else'object'==typeof s&&(u=s.toString());u.endsWith('/')&&(u=u.slice(0,u.length-1)),n.startsWith('/')||(n="/"+n),u.endsWith(n)&&(n=''),this._url=""+u+n}}return(0,n.default)(o,[{key:"hash",get:function(){throw new Error('URL.hash is not implemented')}},{key:"host",get:function(){throw new Error('URL.host is not implemented')}},{key:"hostname",get:function(){throw new Error('URL.hostname is not implemented')}},{key:"href",get:function(){return this.toString()}},{key:"origin",get:function(){throw new Error('URL.origin is not implemented')}},{key:"password",get:function(){throw new Error('URL.password is not implemented')}},{key:"pathname",get:function(){throw new Error('URL.pathname not implemented')}},{key:"port",get:function(){throw new Error('URL.port is not implemented')}},{key:"protocol",get:function(){throw new Error('URL.protocol is not implemented')}},{key:"search",get:function(){throw new Error('URL.search is not implemented')}},{key:"searchParams",get:function(){return null==this._searchParamsInstance&&(this._searchParamsInstance=new h),this._searchParamsInstance}},{key:"toJSON",value:function(){return this.toString()}},{key:"toString",value:function(){if(null===this._searchParamsInstance)return this._url;var t=this._url.indexOf('?')>-1?'&':'?';return this._url+t+this._searchParamsInstance.toString()}},{key:"username",get:function(){throw new Error('URL.username is not implemented')}}],[{key:"createObjectURL",value:function(t){if(null===s)throw new Error('Cannot create URL for blob!');return""+s+t.data.blobId+"?offset="+t.data.offset+"&size="+t.size}},{key:"revokeObjectURL",value:function(t){}}]),o})();e.URL=l},110,[3,7,8,87]); +__d(function(g,r,i,a,m,_e,d){'use strict';function t(){if("undefined"==typeof Reflect||!Reflect.construct)return!1;if(Reflect.construct.sham)return!1;if("function"==typeof Proxy)return!0;try{return Boolean.prototype.valueOf.call(Reflect.construct(Boolean,[],function(){})),!0}catch(t){return!1}}Object.defineProperty(_e,'__esModule',{value:!0});var e=(function(e){r(d[2])(c,e);var n,l,u=(n=c,l=t(),function(){var t,e=r(d[0])(n);if(l){var o=r(d[0])(this).constructor;t=Reflect.construct(e,arguments,o)}else t=e.apply(this,arguments);return r(d[1])(this,t)});function c(){throw r(d[3])(this,c),u.call(this),new TypeError("AbortSignal cannot be constructed directly")}return r(d[4])(c,[{key:"aborted",get:function(){var t=o.get(this);if("boolean"!=typeof t)throw new TypeError("Expected 'this' to be an 'AbortSignal' object, but got "+(null===this?"null":typeof this));return t}}]),c})(r(d[5]).EventTarget);r(d[5]).defineEventAttribute(e.prototype,"abort");var o=new WeakMap;Object.defineProperties(e.prototype,{aborted:{enumerable:!0}}),"function"==typeof Symbol&&"symbol"==typeof Symbol.toStringTag&&Object.defineProperty(e.prototype,Symbol.toStringTag,{configurable:!0,value:"AbortSignal"});var n=(function(){function t(){var n;r(d[3])(this,t),l.set(this,(n=Object.create(e.prototype),r(d[5]).EventTarget.call(n),o.set(n,!1),n))}return r(d[4])(t,[{key:"signal",get:function(){return u(this)}},{key:"abort",value:function(){var t;t=u(this),!1===o.get(t)&&(o.set(t,!0),t.dispatchEvent({type:"abort"}))}}]),t})(),l=new WeakMap;function u(t){var e=l.get(t);if(null==e)throw new TypeError("Expected 'this' to be an 'AbortController' object, but got "+(null===t?"null":typeof t));return e}Object.defineProperties(n.prototype,{signal:{enumerable:!0},abort:{enumerable:!0}}),"function"==typeof Symbol&&"symbol"==typeof Symbol.toStringTag&&Object.defineProperty(n.prototype,Symbol.toStringTag,{configurable:!0,value:"AbortController"}),_e.AbortController=n,_e.AbortSignal=e,_e.default=n,m.exports=n,m.exports.AbortController=m.exports.default=n,m.exports.AbortSignal=e},111,[15,12,10,7,8,90]); +__d(function(g,r,i,a,m,e,d){'use strict';g.alert||(g.alert=function(t){r(d[0]).alert('Alert',''+t)})},112,[113]); +__d(function(g,r,i,a,m,e,d){var t=r(d[0])(r(d[1])),n=r(d[0])(r(d[2])),o=r(d[0])(r(d[3])),s=r(d[0])(r(d[4])),l=(function(){function l(){(0,t.default)(this,l)}return(0,n.default)(l,null,[{key:"alert",value:function(t,n,s,u){if('ios'===o.default.OS)l.prompt(t,n,s,'default');else if('android'===o.default.OS){var c=r(d[5]).default;if(!c)return;var f=c.getConstants(),v={title:t||'',message:n||'',cancelable:!1};u&&u.cancelable&&(v.cancelable=u.cancelable);var p=s?s.slice(0,3):[{text:"OK"}],b=p.pop(),y=p.pop(),h=p.pop();h&&(v.buttonNeutral=h.text||''),y&&(v.buttonNegative=y.text||''),b&&(v.buttonPositive=b.text||"OK");c.showAlert(v,function(t){return console.warn(t)},function(t,n){t===f.buttonClicked?n===f.buttonNeutral?h.onPress&&h.onPress():n===f.buttonNegative?y.onPress&&y.onPress():n===f.buttonPositive&&b.onPress&&b.onPress():t===f.dismissed&&u&&u.onDismiss&&u.onDismiss()})}}},{key:"prompt",value:function(t,n,l){var u=arguments.length>3&&void 0!==arguments[3]?arguments[3]:'plain-text',c=arguments.length>4?arguments[4]:void 0,f=arguments.length>5?arguments[5]:void 0;if('ios'===o.default.OS){var v,p,b=[],y=[];'function'==typeof l?b=[l]:Array.isArray(l)&&l.forEach(function(t,n){if(b[n]=t.onPress,'cancel'===t.style?v=String(n):'destructive'===t.style&&(p=String(n)),t.text||n<(l||[]).length-1){var o={};o[n]=t.text||'',y.push(o)}}),s.default.alertWithArgs({title:t||'',message:n||void 0,buttons:y,type:u||void 0,defaultValue:c,cancelButtonKey:v,destructiveButtonKey:p,keyboardType:f},function(t,n){var o=b[t];o&&o(n)})}}}]),l})();m.exports=l},113,[3,7,8,19,114,115]); +__d(function(g,r,i,a,m,e,d){var t=r(d[0])(r(d[1]));function n(){}m.exports={alertWithArgs:function(f,o){t.default&&t.default.showAlert(f,n,o||n)}}},114,[3,115]); +__d(function(g,r,i,a,m,e,d){function t(n){if("function"!=typeof WeakMap)return null;var o=new WeakMap,u=new WeakMap;return(t=function(t){return t?u:o})(n)}Object.defineProperty(e,"__esModule",{value:!0}),e.default=void 0;var n=(function(n,o){if(!o&&n&&n.__esModule)return n;if(null===n||"object"!=typeof n&&"function"!=typeof n)return{default:n};var u=t(o);if(u&&u.has(n))return u.get(n);var f={},l=Object.defineProperty&&Object.getOwnPropertyDescriptor;for(var c in n)if("default"!==c&&Object.prototype.hasOwnProperty.call(n,c)){var p=l?Object.getOwnPropertyDescriptor(n,c):null;p&&(p.get||p.set)?Object.defineProperty(f,c,p):f[c]=n[c]}f.default=n,u&&u.set(n,f);return f})(r(d[0])).get('DialogManagerAndroid');e.default=n},115,[21]); +__d(function(g,r,i,a,m,e,d){'use strict';var t=g.navigator;void 0===t&&(g.navigator=t={}),r(d[0]).polyfillObjectProperty(t,'product',function(){return'ReactNative'})},116,[67]); +__d(function(g,r,i,a,m,e,d){'use strict';var n;if(g.RN$Bridgeless&&g.RN$registerCallableModule)n=g.RN$registerCallableModule;else{var t=r(d[0]);n=function(n,u){return t.registerLazyCallableModule(n,u)}}n('Systrace',function(){return r(d[1])}),n('JSTimers',function(){return r(d[2])}),n('HeapCapture',function(){return r(d[3])}),n('SamplingProfiler',function(){return r(d[4])}),n('RCTLog',function(){return r(d[5])}),n('RCTDeviceEventEmitter',function(){return r(d[6]).default}),n('RCTNativeAppEventEmitter',function(){return r(d[7])}),n('GlobalPerformanceLogger',function(){return r(d[8])}),n('JSDevSupportModule',function(){return r(d[9])}),n('HMRClient',function(){return r(d[10])})},117,[30,32,78,118,120,122,4,123,91,124,126]); +__d(function(g,r,i,a,m,_e,d){var e=r(d[0])(r(d[1])),t={captureHeap:function(t){var p=null;try{g.nativeCaptureHeap(t),console.log('HeapCapture.captureHeap succeeded: '+t)}catch(e){console.log('HeapCapture.captureHeap error: '+e.toString()),p=e.toString()}e.default&&e.default.captureComplete(t,p)}};m.exports=t},118,[3,119]); +__d(function(g,r,i,a,m,e,d){function t(n){if("function"!=typeof WeakMap)return null;var u=new WeakMap,o=new WeakMap;return(t=function(t){return t?o:u})(n)}Object.defineProperty(e,"__esModule",{value:!0}),e.default=void 0;var n=(function(n,u){if(!u&&n&&n.__esModule)return n;if(null===n||"object"!=typeof n&&"function"!=typeof n)return{default:n};var o=t(u);if(o&&o.has(n))return o.get(n);var f={},p=Object.defineProperty&&Object.getOwnPropertyDescriptor;for(var c in n)if("default"!==c&&Object.prototype.hasOwnProperty.call(n,c)){var l=p?Object.getOwnPropertyDescriptor(n,c):null;l&&(l.get||l.set)?Object.defineProperty(f,c,l):f[c]=n[c]}f.default=n,o&&o.set(n,f);return f})(r(d[0])).get('JSCHeapCapture');e.default=n},119,[21]); +__d(function(g,r,i,a,m,_e,d){'use strict';var o={poke:function(o){var e=null,l=null;try{null===(l=g.pokeSamplingProfiler())?console.log('The JSC Sampling Profiler has started'):console.log('The JSC Sampling Profiler has stopped')}catch(o){console.log('Error occurred when restarting Sampling Profiler: '+o.toString()),e=o.toString()}var n=r(d[0]).default;n&&n.operationComplete(o,l,e)}};m.exports=o},120,[121]); +__d(function(g,r,i,a,m,e,d){function t(n){if("function"!=typeof WeakMap)return null;var o=new WeakMap,f=new WeakMap;return(t=function(t){return t?f:o})(n)}Object.defineProperty(e,"__esModule",{value:!0}),e.default=void 0;var n=(function(n,o){if(!o&&n&&n.__esModule)return n;if(null===n||"object"!=typeof n&&"function"!=typeof n)return{default:n};var f=t(o);if(f&&f.has(n))return f.get(n);var u={},l=Object.defineProperty&&Object.getOwnPropertyDescriptor;for(var p in n)if("default"!==p&&Object.prototype.hasOwnProperty.call(n,p)){var c=l?Object.getOwnPropertyDescriptor(n,p):null;c&&(c.get||c.set)?Object.defineProperty(u,p,c):u[p]=n[p]}u.default=n,f&&f.set(n,u);return u})(r(d[0])).get('JSCSamplingProfiler');e.default=n},121,[21]); +__d(function(g,r,i,a,m,e,d){'use strict';var o={log:'log',info:'info',warn:'warn',error:'error',fatal:'error'},n=null,l={logIfNoNativeHook:function(o){for(var t=arguments.length,f=new Array(t>1?t-1:0),c=1;c1?f-1:0),v=1;v1?t-1:0),f=1;f>>8)>>>0,t|=0)}}},141,[142,144]); +__d(function(g,r,i,a,m,e,d){var t=r(d[0])(r(d[1]));m.exports=function(n){if('object'==typeof n&&null!=n&&null!=(0,r(d[2]).normalizeColorObject)(n))return n;if('string'==typeof n||'number'==typeof n)return(0,t.default)(n)}},142,[3,143,144]); +__d(function(_g,_r,i,a,m,e,d){'use strict';function r(r,l,n){return n<0&&(n+=1),n>1&&(n-=1),n<.16666666666666666?r+6*(l-r)*n:n<.5?l:n<.6666666666666666?r+(l-r)*(.6666666666666666-n)*6:r}function l(l,n,t){var o=t<.5?t*(1+n):t+n-t*n,u=2*t-o,g=r(u,o,l+.3333333333333333),s=r(u,o,l),h=r(u,o,l-.3333333333333333);return Math.round(255*g)<<24|Math.round(255*s)<<16|Math.round(255*h)<<8}var n,t='[-+]?\\d*\\.?\\d+',o="[-+]?\\d*\\.?\\d+%";function u(){for(var r=arguments.length,l=new Array(r),n=0;n255?255:l}function s(r){return(parseFloat(r)%360+360)%360/360}function h(r){var l=parseFloat(r);return l<0?0:l>1?255:Math.round(255*l)}function c(r){var l=parseFloat(r);return l<0?0:l>100?1:l/100}var p={transparent:0,aliceblue:4042850303,antiquewhite:4209760255,aqua:16777215,aquamarine:2147472639,azure:4043309055,beige:4126530815,bisque:4293182719,black:255,blanchedalmond:4293643775,blue:65535,blueviolet:2318131967,brown:2771004159,burlywood:3736635391,burntsienna:3934150143,cadetblue:1604231423,chartreuse:2147418367,chocolate:3530104575,coral:4286533887,cornflowerblue:1687547391,cornsilk:4294499583,crimson:3692313855,cyan:16777215,darkblue:35839,darkcyan:9145343,darkgoldenrod:3095792639,darkgray:2846468607,darkgreen:6553855,darkgrey:2846468607,darkkhaki:3182914559,darkmagenta:2332068863,darkolivegreen:1433087999,darkorange:4287365375,darkorchid:2570243327,darkred:2332033279,darksalmon:3918953215,darkseagreen:2411499519,darkslateblue:1211993087,darkslategray:793726975,darkslategrey:793726975,darkturquoise:13554175,darkviolet:2483082239,deeppink:4279538687,deepskyblue:12582911,dimgray:1768516095,dimgrey:1768516095,dodgerblue:512819199,firebrick:2988581631,floralwhite:4294635775,forestgreen:579543807,fuchsia:4278255615,gainsboro:3705462015,ghostwhite:4177068031,gold:4292280575,goldenrod:3668254975,gray:2155905279,green:8388863,greenyellow:2919182335,grey:2155905279,honeydew:4043305215,hotpink:4285117695,indianred:3445382399,indigo:1258324735,ivory:4294963455,khaki:4041641215,lavender:3873897215,lavenderblush:4293981695,lawngreen:2096890111,lemonchiffon:4294626815,lightblue:2916673279,lightcoral:4034953471,lightcyan:3774873599,lightgoldenrodyellow:4210742015,lightgray:3553874943,lightgreen:2431553791,lightgrey:3553874943,lightpink:4290167295,lightsalmon:4288707327,lightseagreen:548580095,lightskyblue:2278488831,lightslategray:2005441023,lightslategrey:2005441023,lightsteelblue:2965692159,lightyellow:4294959359,lime:16711935,limegreen:852308735,linen:4210091775,magenta:4278255615,maroon:2147483903,mediumaquamarine:1724754687,mediumblue:52735,mediumorchid:3126187007,mediumpurple:2473647103,mediumseagreen:1018393087,mediumslateblue:2070474495,mediumspringgreen:16423679,mediumturquoise:1221709055,mediumvioletred:3340076543,midnightblue:421097727,mintcream:4127193855,mistyrose:4293190143,moccasin:4293178879,navajowhite:4292783615,navy:33023,oldlace:4260751103,olive:2155872511,olivedrab:1804477439,orange:4289003775,orangered:4282712319,orchid:3664828159,palegoldenrod:4008225535,palegreen:2566625535,paleturquoise:2951671551,palevioletred:3681588223,papayawhip:4293907967,peachpuff:4292524543,peru:3448061951,pink:4290825215,plum:3718307327,powderblue:2967529215,purple:2147516671,rebeccapurple:1714657791,red:4278190335,rosybrown:3163525119,royalblue:1097458175,saddlebrown:2336560127,salmon:4202722047,sandybrown:4104413439,seagreen:780883967,seashell:4294307583,sienna:2689740287,silver:3233857791,skyblue:2278484991,slateblue:1784335871,slategray:1887473919,slategrey:1887473919,snow:4294638335,springgreen:16744447,steelblue:1182971135,tan:3535047935,teal:8421631,thistle:3636451583,tomato:4284696575,turquoise:1088475391,violet:4001558271,wheat:4125012991,white:4294967295,whitesmoke:4126537215,yellow:4294902015,yellowgreen:2597139199};m.exports=function(r){var b,y=(void 0===n&&(n={rgb:new RegExp('rgb'+u(t,t,t)),rgba:new RegExp('rgba'+u(t,t,t,t)),hsl:new RegExp('hsl'+u(t,o,o)),hsla:new RegExp('hsla'+u(t,o,o,t)),hex3:/^#([0-9a-fA-F]{1})([0-9a-fA-F]{1})([0-9a-fA-F]{1})$/,hex4:/^#([0-9a-fA-F]{1})([0-9a-fA-F]{1})([0-9a-fA-F]{1})([0-9a-fA-F]{1})$/,hex6:/^#([0-9a-fA-F]{6})$/,hex8:/^#([0-9a-fA-F]{8})$/}),n);return'number'==typeof r?r>>>0===r&&r>=0&&r<=4294967295?r:null:'string'!=typeof r?null:(b=y.hex6.exec(r))?parseInt(b[1]+'ff',16)>>>0:p.hasOwnProperty(r)?p[r]:(b=y.rgb.exec(r))?(g(b[1])<<24|g(b[2])<<16|g(b[3])<<8|255)>>>0:(b=y.rgba.exec(r))?(g(b[1])<<24|g(b[2])<<16|g(b[3])<<8|h(b[4]))>>>0:(b=y.hex3.exec(r))?parseInt(b[1]+b[1]+b[2]+b[2]+b[3]+b[3]+'ff',16)>>>0:(b=y.hex8.exec(r))?parseInt(b[1],16)>>>0:(b=y.hex4.exec(r))?parseInt(b[1]+b[1]+b[2]+b[2]+b[3]+b[3]+b[4]+b[4],16)>>>0:(b=y.hsl.exec(r))?(255|l(s(b[1]),c(b[2]),c(b[3])))>>>0:(b=y.hsla.exec(r))?(l(s(b[1]),c(b[2]),c(b[3]))|h(b[4]))>>>0:null}},143,[]); +__d(function(g,r,i,a,m,e,d){Object.defineProperty(e,"__esModule",{value:!0}),e.processColorObject=e.normalizeColorObject=e.PlatformColor=void 0;e.PlatformColor=function(){for(var o=arguments.length,n=new Array(o),t=0;t.49999*l?[0,2*Math.atan2(u,m)*C,90]:M<-.49999*l?[0,-2*Math.atan2(u,m)*C,-90]:[t.roundTo3Places(Math.atan2(2*u*m-2*s*c,1-2*v-2*h)*C),t.roundTo3Places(Math.atan2(2*s*m-2*u*c,1-2*f-2*h)*C),t.roundTo3Places(Math.asin(2*u*s+2*c*m)*C)]},roundTo3Places:function(t){var n=t.toString().split('e');return.001*Math.round(n[0]+'e'+(n[1]?+n[1]-3:3))},decomposeMatrix:function(n){r(d[1])(16===n.length,'Matrix decomposition needs a list of 3d matrix values, received %s',n);var a=[],o=[],i=[],u=[],s=[];if(n[15]){for(var c=[],m=[],v=0;v<4;v++){c.push([]);for(var f=0;f<4;f++){var h=n[4*v+f]/n[15];c[v].push(h),m.push(3===f?0:h)}}if(m[15]=1,t.determinant(m)){if(0!==c[0][3]||0!==c[1][3]||0!==c[2][3]){var M=[c[0][3],c[1][3],c[2][3],c[3][3]],l=t.inverse(m),C=t.transpose(l);a=t.multiplyVectorByMatrix(M,C)}else a[0]=a[1]=a[2]=0,a[3]=1;for(var p=0;p<3;p++)s[p]=c[3][p];for(var x=[],T=0;T<3;T++)x[T]=[c[T][0],c[T][1],c[T][2]];i[0]=t.v3Length(x[0]),x[0]=t.v3Normalize(x[0],i[0]),u[0]=t.v3Dot(x[0],x[1]),x[1]=t.v3Combine(x[1],x[0],1,-u[0]),i[1]=t.v3Length(x[1]),x[1]=t.v3Normalize(x[1],i[1]),u[0]/=i[1],u[1]=t.v3Dot(x[0],x[2]),x[2]=t.v3Combine(x[2],x[0],1,-u[1]),u[2]=t.v3Dot(x[1],x[2]),x[2]=t.v3Combine(x[2],x[1],1,-u[2]),i[2]=t.v3Length(x[2]),x[2]=t.v3Normalize(x[2],i[2]),u[1]/=i[2],u[2]/=i[2];var y,S=t.v3Cross(x[1],x[2]);if(t.v3Dot(x[0],S)<0)for(var P=0;P<3;P++)i[P]*=-1,x[P][0]*=-1,x[P][1]*=-1,x[P][2]*=-1;return o[0]=.5*Math.sqrt(Math.max(1+x[0][0]-x[1][1]-x[2][2],0)),o[1]=.5*Math.sqrt(Math.max(1-x[0][0]+x[1][1]-x[2][2],0)),o[2]=.5*Math.sqrt(Math.max(1-x[0][0]-x[1][1]+x[2][2],0)),o[3]=.5*Math.sqrt(Math.max(1+x[0][0]+x[1][1]+x[2][2],0)),x[2][1]>x[1][2]&&(o[0]=-o[0]),x[0][2]>x[2][0]&&(o[1]=-o[1]),x[1][0]>x[0][1]&&(o[2]=-o[2]),{rotationDegrees:y=o[0]<.001&&o[0]>=0&&o[1]<.001&&o[1]>=0?[0,0,t.roundTo3Places(180*Math.atan2(x[0][1],x[0][0])/Math.PI)]:t.quaternionToDegreesXYZ(o,c,x),perspective:a,quaternion:o,scale:i,skew:u,translation:s,rotate:y[2],rotateX:y[0],rotateY:y[1],scaleX:i[0],scaleY:i[1],translateX:s[0],translateY:s[1]}}}}};_m.exports=t},149,[23,18]); +__d(function(g,r,i,a,m,e,d){Object.defineProperty(e,"__esModule",{value:!0}),e.get=p,e.getWithFallback_DEPRECATED=function(n,o){if(null==t){if(v(n))return p(n,o)}else if(null!=t(n))return p(n,o);var u=function(t){return null};return u.displayName="Fallback("+n+")",u},e.setRuntimeConfigProvider=function(n){(0,f.default)(null==t,'NativeComponentRegistry.setRuntimeConfigProvider() called more than once.'),t=n},e.unstable_hasComponent=function(t){var n=s.get(t);if(null==n){if(!g.__nativeComponentRegistry__hasComponent)throw"unstable_hasComponent('"+t+"'): Global function is not registered";n=g.__nativeComponentRegistry__hasComponent(t),s.set(t,n)}return n},e.unstable_hasStaticViewConfig=function(n){var o;return!(null!=(o=null==t?void 0:t(n))?o:{native:!0}).native};var t,n=r(d[0])(r(d[1])),o=r(d[0])(r(d[2])),u=r(d[0])(r(d[3])),l=r(d[0])(r(d[4])),f=r(d[0])(r(d[5]));!(function(t,n){if(!n&&t&&t.__esModule)return t;if(null===t||"object"!=typeof t&&"function"!=typeof t)return{default:t};var o=c(n);if(o&&o.has(t))return o.get(t);var u={},l=Object.defineProperty&&Object.getOwnPropertyDescriptor;for(var f in t)if("default"!==f&&Object.prototype.hasOwnProperty.call(t,f)){var s=l?Object.getOwnPropertyDescriptor(t,f):null;s&&(s.get||s.set)?Object.defineProperty(u,f,s):u[f]=t[f]}u.default=t,o&&o.set(t,u)})(r(d[6]));function c(t){if("function"!=typeof WeakMap)return null;var n=new WeakMap,o=new WeakMap;return(c=function(t){return t?o:n})(t)}var s=new Map;function p(n,f){return o.default.register(n,function(){var o,c=null!=(o=null==t?void 0:t(n))?o:{native:!0,verify:!1},s=c.native,p=c.verify,v=s?(0,u.default)(n):(0,r(d[7]).createViewConfig)(f());return p&&(s?(0,l.default)(v,(0,r(d[7]).createViewConfig)(f())):(0,l.default)((0,u.default)(n),v)),v}),n}function v(o){return(0,f.default)(null==t,'Unexpected invocation!'),null!=n.default.getViewManagerConfig(o)}},150,[3,43,134,151,164,18,129,165]); +__d(function(g,r,i,a,m,e,d){'use strict';var n=!1;function t(n){var t=r(d[0]).getConstants();t.ViewManagerNames||t.LazyViewManagersEnabled?n=s(n,r(d[0]).getDefaultEventTypes()):(n.bubblingEventTypes=s(n.bubblingEventTypes,t.genericBubblingEventTypes),n.directEventTypes=s(n.directEventTypes,t.genericDirectEventTypes))}function s(n,t){if(!t)return n;if(!n)return t;for(var o in t)if(t.hasOwnProperty(o)){var u=t[o];if(n.hasOwnProperty(o)){var c=n[o];'object'==typeof u&&'object'==typeof c&&(u=s(c,u))}n[o]=u}return n}function o(n){switch(n){case'CATransform3D':return r(d[4]);case'CGPoint':return r(d[5]);case'CGSize':return r(d[6]);case'UIEdgeInsets':return r(d[7]);case'Point':return r(d[5])}return null}function u(n){switch(n){case'CGColor':case'UIColor':return r(d[8]);case'CGColorArray':case'UIColorArray':return r(d[9]);case'CGImage':case'UIImage':case'RCTImageSource':return r(d[10]);case'Color':return r(d[8]);case'ColorArray':return r(d[9])}return null}m.exports=function(s){var c=r(d[0]).getViewManagerConfig(s);r(d[1])(null!=c&&null!=c.NativeProps,'requireNativeComponent: "%s" was not found in the UIManager.',s);for(var l=c.baseModuleName,v=c.bubblingEventTypes,b=c.directEventTypes,p=c.NativeProps;l;){var f=r(d[0]).getViewManagerConfig(l);f?(v=r(d[2])({},f.bubblingEventTypes,v),b=r(d[2])({},f.directEventTypes,b),p=r(d[2])({},f.NativeProps,p),l=f.baseModuleName):l=null}var y={};for(var C in p){var E=p[C],T=o(E),w=u(E);y[C]=null==T&&null==w||{diff:T,process:w}}return y.style=r(d[3]),r(d[2])(c,{uiViewClassName:s,validAttributes:y,bubblingEventTypes:v,directEventTypes:b}),n||(t(c),n=!0),c}},151,[43,18,29,152,147,153,146,145,141,154,155]); +__d(function(g,r,i,a,m,e,d){var t=r(d[0])(r(d[1])),o=r(d[0])(r(d[2])),n=r(d[0])(r(d[3])),l={process:t.default},s={alignContent:!0,alignItems:!0,alignSelf:!0,aspectRatio:!0,borderBottomWidth:!0,borderEndWidth:!0,borderLeftWidth:!0,borderRightWidth:!0,borderStartWidth:!0,borderTopWidth:!0,borderWidth:!0,bottom:!0,direction:!0,display:!0,end:!0,flex:!0,flexBasis:!0,flexDirection:!0,flexGrow:!0,flexShrink:!0,flexWrap:!0,height:!0,justifyContent:!0,left:!0,margin:!0,marginBottom:!0,marginEnd:!0,marginHorizontal:!0,marginLeft:!0,marginRight:!0,marginStart:!0,marginTop:!0,marginVertical:!0,maxHeight:!0,maxWidth:!0,minHeight:!0,minWidth:!0,overflow:!0,padding:!0,paddingBottom:!0,paddingEnd:!0,paddingHorizontal:!0,paddingLeft:!0,paddingRight:!0,paddingStart:!0,paddingTop:!0,paddingVertical:!0,position:!0,right:!0,start:!0,top:!0,width:!0,zIndex:!0,elevation:!0,shadowColor:l,shadowOffset:{diff:n.default},shadowOpacity:!0,shadowRadius:!0,decomposedMatrix:!0,rotation:!0,scaleX:!0,scaleY:!0,transform:{process:o.default},transformMatrix:!0,translateX:!0,translateY:!0,backfaceVisibility:!0,backgroundColor:l,borderBottomColor:l,borderBottomEndRadius:!0,borderBottomLeftRadius:!0,borderBottomRightRadius:!0,borderBottomStartRadius:!0,borderColor:l,borderEndColor:l,borderLeftColor:l,borderRadius:!0,borderRightColor:l,borderStartColor:l,borderStyle:!0,borderTopColor:l,borderTopEndRadius:!0,borderTopLeftRadius:!0,borderTopRightRadius:!0,borderTopStartRadius:!0,opacity:!0,color:l,fontFamily:!0,fontSize:!0,fontStyle:!0,fontVariant:!0,fontWeight:!0,includeFontPadding:!0,letterSpacing:!0,lineHeight:!0,textAlign:!0,textAlignVertical:!0,textDecorationColor:l,textDecorationLine:!0,textDecorationStyle:!0,textShadowColor:l,textShadowOffset:!0,textShadowRadius:!0,textTransform:!0,writingDirection:!0,overlayColor:l,resizeMode:!0,tintColor:l};m.exports=s},152,[3,141,148,146]); +__d(function(g,r,i,a,m,e,d){'use strict';var t={x:void 0,y:void 0};m.exports=function(n,o){return(n=n||t)!==(o=o||t)&&(n.x!==o.x||n.y!==o.y)}},153,[]); +__d(function(g,r,i,a,m,e,d){'use strict';var n=r(d[0])(r(d[1])),l=0;function u(u){var o=(0,n.default)(u);return null==o?(console.error('Invalid value in color array:',u),l):o}m.exports=function(n){return null==n?null:n.map(u)}},154,[3,141]); +__d(function(g,r,i,a,m,e,d){'use strict';var t,n,s,u;function o(){if(u)return u;var t=g.nativeExtensions&&g.nativeExtensions.SourceCode;return t||(t=r(d[0]).default),u=t.getConstants().scriptURL}function f(){if(void 0===n){var t=o(),s=t&&t.match(/^https?:\/\/.*?\//);n=s?s[0]:null}return n}function c(t){if(t){if(t.startsWith('assets://'))return null;(t=t.substring(0,t.lastIndexOf('/')+1)).includes('://')||(t='file://'+t)}return t}m.exports=function(n){if('object'==typeof n)return n;var u=r(d[1]).getAssetByID(n);if(!u)return null;var l=new(r(d[2]))(f(),(void 0===s&&(s=c(o())),s),u);return t?t(l):l.defaultAsset()},m.exports.pickScale=r(d[3]).pickScale,m.exports.setCustomSourceTransformer=function(n){t=n}},155,[156,157,158,159]); +__d(function(g,r,i,a,m,e,d){function t(n){if("function"!=typeof WeakMap)return null;var o=new WeakMap,u=new WeakMap;return(t=function(t){return t?u:o})(n)}Object.defineProperty(e,"__esModule",{value:!0}),e.default=void 0;var n=(function(n,o){if(!o&&n&&n.__esModule)return n;if(null===n||"object"!=typeof n&&"function"!=typeof n)return{default:n};var u=t(o);if(u&&u.has(n))return u.get(n);var f={},l=Object.defineProperty&&Object.getOwnPropertyDescriptor;for(var c in n)if("default"!==c&&Object.prototype.hasOwnProperty.call(n,c)){var p=l?Object.getOwnPropertyDescriptor(n,c):null;p&&(p.get||p.set)?Object.defineProperty(f,c,p):f[c]=n[c]}f.default=n,u&&u.set(n,f);return f})(r(d[0])).getEnforcing('SourceCode'),o=null,u={getConstants:function(){return null==o&&(o=n.getConstants()),o}};e.default=u},156,[21]); +__d(function(g,r,i,a,m,e,d){'use strict';var t=[];m.exports={registerAsset:function(s){return t.push(s)},getAssetByID:function(s){return t[s-1]}}},157,[]); +__d(function(g,r,i,a,m,e,d){'use strict';function t(t){var s=r(d[0]).pickScale(t.scales,r(d[1]).get()),n=1===s?'':'@'+s+'x';return r(d[2]).getBasePath(t)+'/'+t.name+n+'.'+t.type}var s=(function(){function s(t,n,u){r(d[3])(this,s),this.serverUrl=t,this.jsbundleUrl=n,this.asset=u}return r(d[4])(s,[{key:"isLoadedFromServer",value:function(){return!!this.serverUrl}},{key:"isLoadedFromFileSystem",value:function(){return!(!this.jsbundleUrl||!this.jsbundleUrl.startsWith('file://'))}},{key:"defaultAsset",value:function(){return this.isLoadedFromServer()?this.assetServerURL():this.isLoadedFromFileSystem()?this.drawableFolderInBundle():this.resourceIdentifierWithoutScale()}},{key:"assetServerURL",value:function(){return r(d[5])(!!this.serverUrl,'need server to load from'),this.fromSource(this.serverUrl+t(this.asset)+"?platform=android&hash="+this.asset.hash)}},{key:"scaledAssetPath",value:function(){return this.fromSource(t(this.asset))}},{key:"scaledAssetURLNearBundle",value:function(){var s=this.jsbundleUrl||'file://';return this.fromSource(s+t(this.asset).replace(/\.\.\//g,'_'))}},{key:"resourceIdentifierWithoutScale",value:function(){return r(d[5])(!0,'resource identifiers work on Android'),this.fromSource(r(d[2]).getAndroidResourceIdentifier(this.asset))}},{key:"drawableFolderInBundle",value:function(){var t,s,n=this.jsbundleUrl||'file://';return this.fromSource(n+(t=this.asset,s=r(d[0]).pickScale(t.scales,r(d[1]).get()),r(d[2]).getAndroidResourceFolderName(t,s)+'/'+r(d[2]).getAndroidResourceIdentifier(t)+'.'+t.type))}},{key:"fromSource",value:function(t){return{__packager_asset:!0,width:this.asset.width,height:this.asset.height,uri:t,scale:r(d[0]).pickScale(this.asset.scales,r(d[1]).get())}}}]),s})();s.pickScale=r(d[0]).pickScale,m.exports=s},158,[159,160,163,7,8,18]); +__d(function(g,r,_i,a,m,e,d){Object.defineProperty(e,"__esModule",{value:!0}),e.getUrlCacheBreaker=function(){if(null==n)return'';return n},e.pickScale=function(n,u){null==u&&(u=t.default.get());for(var l=0;l=u)return n[l];return n[n.length-1]||1},e.setUrlCacheBreaker=function(t){n=t};var n,t=r(d[0])(r(d[1]))},159,[3,160]); +__d(function(g,r,i,a,m,e,d){'use strict';var t=(function(){function t(){r(d[0])(this,t)}return r(d[1])(t,null,[{key:"get",value:function(){return r(d[2]).get('window').scale}},{key:"getFontScale",value:function(){return r(d[2]).get('window').fontScale||t.get()}},{key:"getPixelSizeForLayoutSize",value:function(n){return Math.round(n*t.get())}},{key:"roundToNearestPixel",value:function(n){var u=t.get();return Math.round(n*u)/u}},{key:"startDetecting",value:function(){}}]),t})();m.exports=t},160,[7,8,161]); +__d(function(g,r,i,a,m,e,d){var n,t=r(d[0])(r(d[1])),s=r(d[0])(r(d[2])),o=r(d[0])(r(d[3])),l=r(d[0])(r(d[4])),c=r(d[0])(r(d[5])),u=r(d[0])(r(d[6])),f=new o.default,v=!1,h=(function(){function o(){(0,t.default)(this,o)}return(0,s.default)(o,null,[{key:"get",value:function(t){return(0,u.default)(n[t],'No dimension set for key '+t),n[t]}},{key:"set",value:function(t){var s=t.screen,o=t.window,l=t.windowPhysicalPixels;l&&(o={width:l.width/l.scale,height:l.height/l.scale,scale:l.scale,fontScale:l.fontScale});var c=t.screenPhysicalPixels;c?s={width:c.width/c.scale,height:c.height/c.scale,scale:c.scale,fontScale:c.fontScale}:null==s&&(s=o),n={window:o,screen:s},v?f.emit('change',n):v=!0}},{key:"addEventListener",value:function(n,t){return(0,u.default)('change'===n,'Trying to subscribe to unknown event: "%s"',n),f.addListener(n,t)}},{key:"removeEventListener",value:function(n,t){(0,u.default)('change'===n,'Trying to remove listener for unknown event: "%s"',n),f.removeListener(n,t)}}]),o})(),w=g.nativeExtensions&&g.nativeExtensions.DeviceInfo&&g.nativeExtensions.DeviceInfo.Dimensions;w||(l.default.addListener('didUpdateDimensions',function(n){h.set(n)}),w=c.default.getConstants().Dimensions),h.set(w),m.exports=h},161,[3,7,8,5,4,162,18]); +__d(function(g,r,i,a,m,e,d){function t(n){if("function"!=typeof WeakMap)return null;var o=new WeakMap,u=new WeakMap;return(t=function(t){return t?u:o})(n)}Object.defineProperty(e,"__esModule",{value:!0}),e.default=void 0;var n=(function(n,o){if(!o&&n&&n.__esModule)return n;if(null===n||"object"!=typeof n&&"function"!=typeof n)return{default:n};var u=t(o);if(u&&u.has(n))return u.get(n);var f={},l=Object.defineProperty&&Object.getOwnPropertyDescriptor;for(var c in n)if("default"!==c&&Object.prototype.hasOwnProperty.call(n,c)){var p=l?Object.getOwnPropertyDescriptor(n,c):null;p&&(p.get||p.set)?Object.defineProperty(f,c,p):f[c]=n[c]}f.default=n,u&&u.set(n,f);return f})(r(d[0])).getEnforcing('DeviceInfo'),o=null,u={getConstants:function(){return null==o&&(o=n.getConstants()),o}};e.default=u},162,[21]); +__d(function(g,r,i,a,m,e,d){'use strict';var t={.75:'ldpi',1:'mdpi',1.5:'hdpi',2:'xhdpi',3:'xxhdpi',4:'xxxhdpi'};function n(n){if(n.toString()in t)return t[n.toString()];throw new Error('no such scale '+n.toString())}var o=new Set(['gif','jpeg','jpg','png','svg','webp','xml']);function s(t){var n=t.httpServerLocation;return n.startsWith('/')?n.substr(1):n}m.exports={getAndroidResourceFolderName:function(s,u){if(!o.has(s.type))return'raw';var c=n(u);if(!c)throw new Error("Don't know which android drawable suffix to use for scale: "+u+'\nAsset: '+JSON.stringify(s,null,'\t')+'\nPossible scales are:'+JSON.stringify(t,null,'\t'));return'drawable-'+c},getAndroidResourceIdentifier:function(t){return(s(t)+'/'+t.name).toLowerCase().replace(/\//g,'_').replace(/([^a-z0-9_])/g,'').replace(/^assets_/,'')},getBasePath:s}},163,[]); +__d(function(g,r,i,a,m,e,d){Object.defineProperty(e,"__esModule",{value:!0}),e.default=function(t,n){for(var o=0,u=['validAttributes','bubblingEventTypes','directEventTypes'];o0){var l,v=null!=(l=n.uiViewClassName)?l:t.uiViewClassName;console.error("'"+v+"' has a view config that does not match native. '"+s+"' is missing: "+c.join(', '))}}},e.getConfigWithoutViewProps=function(n,f){if(!n[f])return{};return Object.keys(n[f]).filter(function(n){return!t.default[f][n]}).reduce(function(t,o){return t[o]=n[f][o],t},{})},e.lefthandObjectDiff=f,e.stringifyViewConfig=function(t){return JSON.stringify(t,function(t,n){return'function'==typeof n?"\u0192 "+n.name:n},2)};var t=r(d[0])(r(d[1])),n=['transform','hitSlop'];function f(t,o){var u={};function s(t,n,o){if(typeof t==typeof n||null==t)if('object'!=typeof t)t===n||(u[o]=n);else{var s=f(t,n);Object.keys(s).length>1&&(u[o]=s)}else u[o]=n}for(var c in t)n.includes(c)||(o?t.hasOwnProperty(c)&&s(t[c],o[c],c):u[c]={});return u}},164,[3,139]); +__d(function(g,r,i,a,m,e,d){Object.defineProperty(e,"__esModule",{value:!0}),e.createViewConfig=function(t){return{uiViewClassName:t.uiViewClassName,Commands:{},bubblingEventTypes:u(n.default.bubblingEventTypes,t.bubblingEventTypes),directEventTypes:u(n.default.directEventTypes,t.directEventTypes),validAttributes:u(n.default.validAttributes,t.validAttributes)}};var t=r(d[0])(r(d[1])),n=r(d[0])(r(d[2]));function u(n,u){var l;return null==n||null==u?null!=(l=null!=n?n:u)?l:{}:(0,t.default)({},n,u)}},165,[3,29,139]); +__d(function(g,r,i,a,m,e,d){'use strict';var n;m.exports=function t(o,u){var f=arguments.length>2&&void 0!==arguments[2]?arguments[2]:-1,s=arguments.length>3?arguments[3]:void 0,c='number'==typeof f?s:f,l='number'==typeof f?f:-1;if(0===l)return!0;if(o===u)return!1;if('function'==typeof o&&'function'==typeof u){var v=null==c?void 0:c.unsafelyIgnoreFunctions;return null==v&&(!n||!n.onDifferentFunctionsIgnored||c&&'unsafelyIgnoreFunctions'in c||n.onDifferentFunctionsIgnored(o.name,u.name),v=!0),!v}if('object'!=typeof o||null===o)return o!==u;if('object'!=typeof u||null===u)return!0;if(o.constructor!==u.constructor)return!0;if(Array.isArray(o)){var y=o.length;if(u.length!==y)return!0;for(var p=0;p=w},r=function(){},_e.unstable_forceFrameRate=function(e){0>e||125>>1,a=e[r];if(!(void 0!==a&&0x(l,t))void 0!==s&&0>x(s,l)?(e[r]=s,e[u]=t,r=u):(e[r]=l,e[o]=t,r=o);else{if(!(void 0!==s&&0>x(s,t)))break e;e[r]=s,e[u]=t,r=u}}}return n}return null}function x(e,n){var t=e.sortIndex-n.sortIndex;return 0!==t?t:e.id-n.id}var P=[],F=[],I=1,M=null,C=3,j=!1,A=!1,L=!1;function q(e){for(var n=T(F);null!==n;){if(null===n.callback)g(F);else{if(!(n.startTime<=e))break;g(F),n.sortIndex=n.expirationTime,k(P,n)}n=T(F)}}function R(t){if(L=!1,q(t),!A)if(null!==T(P))A=!0,e(Y);else{var r=T(F);null!==r&&n(R,r.startTime-t)}}function Y(e,r){A=!1,L&&(L=!1,t()),j=!0;var a=C;try{for(q(r),M=T(P);null!==M&&(!(M.expirationTime>r)||e&&!_e.unstable_shouldYield());){var o=M.callback;if("function"==typeof o){M.callback=null,C=M.priorityLevel;var l=o(M.expirationTime<=r);r=_e.unstable_now(),"function"==typeof l?M.callback=l:M===T(P)&&g(P),q(r)}else g(P);M=T(P)}if(null!==M)var u=!0;else{var s=T(F);null!==s&&n(R,s.startTime-r),u=!1}return u}finally{M=null,C=a,j=!1}}var E=r;_e.unstable_IdlePriority=5,_e.unstable_ImmediatePriority=1,_e.unstable_LowPriority=4,_e.unstable_NormalPriority=3,_e.unstable_Profiling=null,_e.unstable_UserBlockingPriority=2,_e.unstable_cancelCallback=function(e){e.callback=null},_e.unstable_continueExecution=function(){A||j||(A=!0,e(Y))},_e.unstable_getCurrentPriorityLevel=function(){return C},_e.unstable_getFirstCallbackNode=function(){return T(P)},_e.unstable_next=function(e){switch(C){case 1:case 2:case 3:var n=3;break;default:n=C}var t=C;C=n;try{return e()}finally{C=t}},_e.unstable_pauseExecution=function(){},_e.unstable_requestPaint=E,_e.unstable_runWithPriority=function(e,n){switch(e){case 1:case 2:case 3:case 4:case 5:break;default:e=3}var t=C;C=e;try{return n()}finally{C=t}},_e.unstable_scheduleCallback=function(r,a,o){var l=_e.unstable_now();switch("object"==typeof o&&null!==o?o="number"==typeof(o=o.delay)&&0l?(r.sortIndex=o,k(F,r),null===T(P)&&r===T(F)&&(L?t():L=!0,n(R,o-l))):(r.sortIndex=u,k(P,r),A||j||(A=!0,e(Y))),r},_e.unstable_wrapCallback=function(e){var n=C;return function(){var t=C;C=n;try{return e.apply(this,arguments)}finally{C=t}}}},171,[]); +__d(function(g,r,i,a,m,e,d){'use strict';var t=r(d[0])(r(d[1])),n=r(d[0])(r(d[2])),o=(function(t,n){if(!n&&t&&t.__esModule)return t;if(null===t||"object"!=typeof t&&"function"!=typeof t)return{default:t};var o=c(n);if(o&&o.has(t))return o.get(t);var l={},u=Object.defineProperty&&Object.getOwnPropertyDescriptor;for(var f in t)if("default"!==f&&Object.prototype.hasOwnProperty.call(t,f)){var s=u?Object.getOwnPropertyDescriptor(t,f):null;s&&(s.get||s.set)?Object.defineProperty(l,f,s):l[f]=t[f]}l.default=t,o&&o.set(t,l);return l})(r(d[3])),l=r(d[0])(r(d[4])),u=r(d[0])(r(d[5])),f=r(d[0])(r(d[6])),s=["animating","color","hidesWhenStopped","onLayout","size","style"];function c(t){if("function"!=typeof WeakMap)return null;var n=new WeakMap,o=new WeakMap;return(c=function(t){return t?o:n})(t)}var p='android'===l.default.OS?r(d[7]):r(d[8]).default,y=o.forwardRef(function(c,y){var v,O,w=c.animating,b=void 0===w||w,S=c.color,j=void 0===S?'ios'===l.default.OS?"#999999":null:S,z=c.hidesWhenStopped,W=void 0===z||z,k=c.onLayout,L=c.size,P=void 0===L?'small':L,M=c.style,_=(0,n.default)(c,s);switch(P){case'small':v=h.sizeSmall,O='small';break;case'large':v=h.sizeLarge,O='large';break;default:v={height:P,width:P}}var E=(0,t.default)({animating:b,color:j,hidesWhenStopped:W},_,{ref:y,style:v,size:O});return o.createElement(f.default,{onLayout:k,style:u.default.compose(h.container,M)},'android'===l.default.OS?o.createElement(p,(0,t.default)({},E,{styleAttr:'Normal',indeterminate:!0})):o.createElement(p,E))});y.displayName='ActivityIndicator';var h=u.default.create({container:{alignItems:'center',justifyContent:'center'},sizeSmall:{width:20,height:20},sizeLarge:{width:36,height:36}});m.exports=y},172,[3,29,103,129,19,173,174,177,182]); +__d(function(g,r,i,a,m,e,d){'use strict';var t=r(d[0]).roundToNearestPixel(.4);0===t&&(t=1/r(d[0]).get());var o={position:'absolute',left:0,right:0,top:0,bottom:0};m.exports={hairlineWidth:t,absoluteFill:o,absoluteFillObject:o,compose:function(t,o){return null!=t&&null!=o?[t,o]:null!=t?t:o},flatten:r(d[1]),setStyleAttributePreprocessor:function(t,o){var l;if(!0===r(d[2])[t])l={process:o};else{if('object'!=typeof r(d[2])[t])return void console.error(t+" is not a valid style attribute");l=r(d[3])({},r(d[2])[t],{process:o})}r(d[2])[t]=l},create:function(t){return t}}},173,[160,168,152,29]); +__d(function(g,r,i,a,m,e,d){var t=r(d[0])(r(d[1])),n=r(d[0])(r(d[2])),f=r(d[0])(r(d[3])),u=(function(t,n){if(!n&&t&&t.__esModule)return t;if(null===t||"object"!=typeof t&&"function"!=typeof t)return{default:t};var f=o(n);if(f&&f.has(t))return f.get(t);var u={},l=Object.defineProperty&&Object.getOwnPropertyDescriptor;for(var c in t)if("default"!==c&&Object.prototype.hasOwnProperty.call(t,c)){var p=l?Object.getOwnPropertyDescriptor(t,c):null;p&&(p.get||p.set)?Object.defineProperty(u,c,p):u[c]=t[c]}u.default=t,f&&f.set(t,u);return u})(r(d[4]));function o(t){if("function"!=typeof WeakMap)return null;var n=new WeakMap,f=new WeakMap;return(o=function(t){return t?f:n})(t)}var l=u.forwardRef(function(o,l){return u.createElement(f.default.Provider,{value:!1},u.createElement(n.default,(0,t.default)({},o,{ref:l})))});l.displayName='View',m.exports=l},174,[3,29,175,176,129]); +__d(function(g,r,i,a,m,e,d){Object.defineProperty(e,"__esModule",{value:!0}),e.default=e.Commands=void 0;var t=l(r(d[0])),n=r(d[1])(r(d[2])),o=r(d[1])(r(d[3])),u=r(d[1])(r(d[4]));l(r(d[5]));function f(t){if("function"!=typeof WeakMap)return null;var n=new WeakMap,o=new WeakMap;return(f=function(t){return t?o:n})(t)}function l(t,n){if(!n&&t&&t.__esModule)return t;if(null===t||"object"!=typeof t&&"function"!=typeof t)return{default:t};var o=f(n);if(o&&o.has(t))return o.get(t);var u={},l=Object.defineProperty&&Object.getOwnPropertyDescriptor;for(var p in t)if("default"!==p&&Object.prototype.hasOwnProperty.call(t,p)){var s=l?Object.getOwnPropertyDescriptor(t,p):null;s&&(s.get||s.set)?Object.defineProperty(u,p,s):u[p]=t[p]}return u.default=t,o&&o.set(t,u),u}var p=t.get('RCTView',function(){return'android'===n.default.OS?u.default:{uiViewClassName:'RCTView'}}),s=(0,o.default)({supportedCommands:['hotspotUpdate','setPressed']});e.Commands=s;var c=p;e.default=c},175,[150,3,19,137,140,129]); +__d(function(g,r,i,a,m,e,d){'use strict';var t=r(d[0]).createContext(!1);m.exports=t},176,[129]); +__d(function(g,r,i,a,m,e,d){var t=r(d[0])(r(d[1])),n=r(d[0])(r(d[2])),f=r(d[0])(r(d[3])),l=["styleAttr","indeterminate","animating"],o=r(d[4]),u=o.forwardRef(function(u,v){var s=u.styleAttr,c=void 0===s?'Normal':s,y=u.indeterminate,A=void 0===y||y,_=u.animating,p=void 0===_||_,w=(0,n.default)(u,l);return o.createElement(f.default,(0,t.default)({styleAttr:c,indeterminate:A,animating:p},w,{ref:v}))});m.exports=u},177,[3,29,103,178,129]); +__d(function(g,r,i,a,m,e,d){Object.defineProperty(e,"__esModule",{value:!0}),e.default=void 0;var t=(0,r(d[0])(r(d[1])).default)('AndroidProgressBar',{interfaceOnly:!0});e.default=t},178,[3,179]); +__d(function(g,r,i,a,m,e,d){Object.defineProperty(e,"__esModule",{value:!0}),e.default=void 0;var n=r(d[0])(r(d[1])),o=r(d[0])(r(d[2]));var t=function(t,p){var l=p&&null!=p.paperComponentName?p.paperComponentName:t;if(null!=p&&null!=p.paperComponentNameDeprecated)if(o.default.getViewManagerConfig(t))l=t;else{var u;if(null==p.paperComponentNameDeprecated||!o.default.getViewManagerConfig(p.paperComponentNameDeprecated))throw new Error("Failed to find native component for either "+t+" or "+(null!=(u=p.paperComponentNameDeprecated)?u:'(unknown)'));l=p.paperComponentNameDeprecated}return(0,n.default)(l)};e.default=t},179,[3,180,43]); +__d(function(g,r,i,a,m,e,d){'use strict';m.exports=function(n){return r(d[0])(n,function(){return r(d[1])(n)})}},180,[181,151]); +__d(function(g,r,i,a,m,e,d){'use strict';var t=r(d[0]).ReactNativeViewConfigRegistry.register;m.exports=function(n,s){return t(n,s)}},181,[132]); +__d(function(g,r,i,a,m,e,d){Object.defineProperty(e,"__esModule",{value:!0}),e.default=void 0;var t=(0,r(d[0])(r(d[1])).default)('ActivityIndicatorView',{paperComponentName:'RCTActivityIndicatorView'});e.default=t},182,[3,179]); +__d(function(g,r,i,a,m,_e,d){'use strict';var t=r(d[0])(r(d[1])),e=r(d[0])(r(d[2])),o=r(d[0])(r(d[3])),s=r(d[0])(r(d[4])),n=r(d[0])(r(d[5])),c=r(d[0])(r(d[6])),l=(function(t,e){if(!e&&t&&t.__esModule)return t;if(null===t||"object"!=typeof t&&"function"!=typeof t)return{default:t};var o=x(e);if(o&&o.has(t))return o.get(t);var s={},n=Object.defineProperty&&Object.getOwnPropertyDescriptor;for(var c in t)if("default"!==c&&Object.prototype.hasOwnProperty.call(t,c)){var l=n?Object.getOwnPropertyDescriptor(t,c):null;l&&(l.get||l.set)?Object.defineProperty(s,c,l):s[c]=t[c]}s.default=t,o&&o.set(t,s);return s})(r(d[7])),u=r(d[0])(r(d[8])),f=r(d[0])(r(d[9])),p=r(d[0])(r(d[10])),b=r(d[0])(r(d[11])),y=r(d[0])(r(d[12])),h=r(d[0])(r(d[13])),v=r(d[0])(r(d[14]));function x(t){if("function"!=typeof WeakMap)return null;var e=new WeakMap,o=new WeakMap;return(x=function(t){return t?o:e})(t)}function F(){if("undefined"==typeof Reflect||!Reflect.construct)return!1;if(Reflect.construct.sham)return!1;if("function"==typeof Proxy)return!0;try{return Boolean.prototype.valueOf.call(Reflect.construct(Boolean,[],function(){})),!0}catch(t){return!1}}var D=(function(f){(0,s.default)(w,f);var x,D,S=(x=w,D=F(),function(){var t,e=(0,c.default)(x);if(D){var o=(0,c.default)(this).constructor;t=Reflect.construct(e,arguments,o)}else t=e.apply(this,arguments);return(0,n.default)(this,t)});function w(){return(0,e.default)(this,w),S.apply(this,arguments)}return(0,o.default)(w,[{key:"render",value:function(){var e,o,s=this.props,n=s.accessibilityLabel,c=s.color,f=s.onPress,x=s.touchSoundDisabled,F=s.title,D=s.hasTVPreferredFocus,S=s.nextFocusDown,w=s.nextFocusForward,P=s.nextFocusLeft,R=s.nextFocusRight,A=s.nextFocusUp,k=s.testID,j=s.accessible,C=s.accessibilityActions,L=s.onAccessibilityAction,M=[O.button],W=[O.text];c&&('ios'===u.default.OS?W.push({color:c}):M.push({backgroundColor:c}));var _=null!=this.props.disabled?this.props.disabled:null==(e=this.props.accessibilityState)?void 0:e.disabled,B=_!==(null==(o=this.props.accessibilityState)?void 0:o.disabled)?(0,t.default)({},this.props.accessibilityState,{disabled:_}):this.props.accessibilityState;_&&(M.push(O.buttonDisabled),W.push(O.textDisabled)),(0,v.default)('string'==typeof F,'The title prop of a Button must be a string');var E='android'===u.default.OS?F.toUpperCase():F,T='android'===u.default.OS?b.default:y.default;return l.createElement(T,{accessible:j,accessibilityActions:C,onAccessibilityAction:L,accessibilityLabel:n,accessibilityRole:"button",accessibilityState:B,hasTVPreferredFocus:D,nextFocusDown:S,nextFocusForward:w,nextFocusLeft:P,nextFocusRight:R,nextFocusUp:A,testID:k,disabled:_,onPress:f,touchSoundDisabled:x},l.createElement(h.default,{style:M},l.createElement(p.default,{style:W,disabled:_},E)))}}]),w})(l.Component),O=f.default.create({button:u.default.select({ios:{},android:{elevation:4,backgroundColor:'#2196F3',borderRadius:2}}),text:(0,t.default)({textAlign:'center',margin:8},u.default.select({ios:{color:'#007AFF',fontSize:18},android:{color:'white',fontWeight:'500'}})),buttonDisabled:u.default.select({ios:{},android:{elevation:0,backgroundColor:'#dfdfdf'}}),textDisabled:u.default.select({ios:{color:'#cdcdcd'},android:{color:'#a1a1a1'}})});m.exports=D},183,[3,29,7,8,10,12,15,129,19,173,184,209,210,174,18]); +__d(function(g,r,i,a,m,e,d){var n=r(d[0])(r(d[1])),o=r(d[0])(r(d[2])),t=r(d[0])(r(d[3])),s=r(d[0])(r(d[4])),l=(v(r(d[5])),r(d[0])(r(d[6]))),u=(r(d[0])(r(d[7])),r(d[0])(r(d[8]))),p=r(d[0])(r(d[9])),f=v(r(d[10])),c=(r(d[0])(r(d[11])),["accessible","allowFontScaling","ellipsizeMode","onLongPress","onPress","onPressIn","onPressOut","onResponderGrant","onResponderMove","onResponderRelease","onResponderTerminate","onResponderTerminationRequest","onStartShouldSetResponder","pressRetentionOffset","suppressHighlighting"]);function R(n){if("function"!=typeof WeakMap)return null;var o=new WeakMap,t=new WeakMap;return(R=function(n){return n?t:o})(n)}function v(n,o){if(!o&&n&&n.__esModule)return n;if(null===n||"object"!=typeof n&&"function"!=typeof n)return{default:n};var t=R(o);if(t&&t.has(n))return t.get(n);var s={},l=Object.defineProperty&&Object.getOwnPropertyDescriptor;for(var u in n)if("default"!==u&&Object.prototype.hasOwnProperty.call(n,u)){var p=l?Object.getOwnPropertyDescriptor(n,u):null;p&&(p.get||p.set)?Object.defineProperty(s,u,p):s[u]=n[u]}return s.default=n,t&&t.set(n,s),s}var P=f.forwardRef(function(s,R){var v=s.accessible,P=s.allowFontScaling,b=s.ellipsizeMode,O=s.onLongPress,T=s.onPress,h=s.onPressIn,y=s.onPressOut,M=s.onResponderGrant,w=s.onResponderMove,E=s.onResponderRelease,C=s.onResponderTerminate,L=s.onResponderTerminationRequest,j=s.onStartShouldSetResponder,x=s.pressRetentionOffset,D=s.suppressHighlighting,_=(0,t.default)(s,c),q=(0,f.useState)(!1),G=(0,o.default)(q,2),H=G[0],k=G[1],z=(null!=T||null!=O||null!=j)&&!0!==_.disabled,F=S(z),I=(0,f.useMemo)(function(){return F?{disabled:!z,pressRectOffset:x,onLongPress:O,onPress:T,onPressIn:function(n){k(!D),null==h||h(n)},onPressOut:function(n){k(!1),null==y||y(n)},onResponderTerminationRequest_DEPRECATED:L,onStartShouldSetResponder_DEPRECATED:j}:null},[F,z,x,O,T,h,y,L,j,D]),N=(0,l.default)(I),W=(0,f.useMemo)(function(){return null==N?null:{onResponderGrant:function(n){N.onResponderGrant(n),null!=M&&M(n)},onResponderMove:function(n){N.onResponderMove(n),null!=w&&w(n)},onResponderRelease:function(n){N.onResponderRelease(n),null!=E&&E(n)},onResponderTerminate:function(n){N.onResponderTerminate(n),null!=C&&C(n)},onResponderTerminationRequest:N.onResponderTerminationRequest,onStartShouldSetResponder:N.onStartShouldSetResponder}},[N,M,w,E,C]),A=null==_.selectionColor?null:(0,u.default)(_.selectionColor),V=_.style,B=_.numberOfLines;return null==B||B>=0||(console.error("'numberOfLines' in must be a non-negative number, received: "+B+". The value will be set to 0."),B=0),(0,f.useContext)(p.default)?f.createElement(r(d[12]).NativeVirtualText,(0,n.default)({},_,W,{isHighlighted:H,numberOfLines:B,selectionColor:A,style:V,ref:R})):f.createElement(p.default.Provider,{value:!0},f.createElement(r(d[12]).NativeText,(0,n.default)({},_,W,{accessible:!1!==v,allowFontScaling:!1!==P,ellipsizeMode:null!=b?b:'tail',isHighlighted:H,numberOfLines:B,selectionColor:A,style:V,ref:R})))});function S(n){var t=(0,f.useState)(n),s=(0,o.default)(t,2),l=s[0],u=s[1];return!l&&n&&u(n),l}P.displayName='Text',P.propTypes=s.default,m.exports=P},184,[3,29,23,103,185,199,200,173,141,176,129,18,207]); +__d(function(g,r,i,a,m,e,d){'use strict';var o=r(d[0])(r(d[1]));m.exports={ellipsizeMode:r(d[2]).oneOf(['head','middle','tail','clip']),numberOfLines:r(d[2]).number,textBreakStrategy:r(d[2]).oneOf(['simple','highQuality','balanced']),onLayout:r(d[2]).func,onPress:r(d[2]).func,onLongPress:r(d[2]).func,pressRetentionOffset:r(d[3]),selectable:r(d[2]).bool,selectionColor:r(d[4]),suppressHighlighting:r(d[2]).bool,style:o,testID:r(d[2]).string,nativeID:r(d[2]).string,allowFontScaling:r(d[2]).bool,maxFontSizeMultiplier:r(d[2]).number,accessible:r(d[2]).bool,adjustsFontSizeToFit:r(d[2]).bool,minimumFontScale:r(d[2]).number,disabled:r(d[2]).bool,dataDetectorType:r(d[2]).oneOf(['phoneNumber','link','email','none','all'])}},185,[186,188,191,198,195]); +__d(function(g,r,i,a,m,e,d){'use strict';m.exports=function(n){var t=r(d[0])(n);return function(n,o,c,u){var f=n;n[o]&&((f={})[o]=r(d[1])(n[o]));for(var v=arguments.length,p=new Array(v>4?v-4:0),s=4;s5?v-5:0),j=5;j4?s-4:0),p=4;p3?f-3:0),p=3;p0?t._pressDelayTimeout=setTimeout(function(){t._receiveSignal('DELAY',E)},n):t._receiveSignal('DELAY',E);var R=I(t._config.delayLongPress,10,500-n);t._longPressDelayTimeout=setTimeout(function(){t._handleLongPress(E)},R+n)},onResponderMove:function(E){var n=t._config.onPressMove;null!=n&&n(E);var R=t._responderRegion;if(null!=R){var _=A(E);if(null==_)return t._cancelLongPressDelayTimeout(),void t._receiveSignal('LEAVE_PRESS_RECT',E);if(null!=t._touchActivatePosition){var o=t._touchActivatePosition.pageX-_.pageX,l=t._touchActivatePosition.pageY-_.pageY;Math.hypot(o,l)>10&&t._cancelLongPressDelayTimeout()}t._isTouchWithinResponderRegion(_,R)?t._receiveSignal('ENTER_PRESS_RECT',E):(t._cancelLongPressDelayTimeout(),t._receiveSignal('LEAVE_PRESS_RECT',E))}},onResponderRelease:function(E){t._receiveSignal('RESPONDER_RELEASE',E)},onResponderTerminate:function(E){t._receiveSignal('RESPONDER_TERMINATED',E)},onResponderTerminationRequest:function(){var E=t._config.cancelable;if(null==E){var n=t._config.onResponderTerminationRequest_DEPRECATED;return null==n||n()}return E},onClick:function(E){var n=t._config,R=n.onPress,_=n.disabled;null!=R&&!0!==_&&R(E)}},_='ios'===l.default.OS||'android'===l.default.OS?null:{onMouseEnter:function(E){if((0,r(d[10]).isHoverEnabled)()){t._isHovered=!0,t._cancelHoverOutDelayTimeout();var n=t._config.onHoverIn;if(null!=n){var R=I(t._config.delayHoverIn);R>0?(E.persist(),t._hoverInDelayTimeout=setTimeout(function(){n(E)},R)):n(E)}}},onMouseLeave:function(E){if(t._isHovered){t._isHovered=!1,t._cancelHoverInDelayTimeout();var n=t._config.onHoverOut;if(null!=n){var R=I(t._config.delayHoverOut);R>0?(E.persist(),t._hoverInDelayTimeout=setTimeout(function(){n(E)},R)):n(E)}}}};return(0,E.default)({},n,R,_)}},{key:"_receiveSignal",value:function(E,t){var n,_=this._touchState,l=null==(n=S[_])?void 0:n[E];null==this._responderID&&'RESPONDER_RELEASE'===E||((0,R.default)(null!=l&&'ERROR'!==l,'Pressability: Invalid signal `%s` for state `%s` on responder: %s',E,_,'number'==typeof this._responderID?this._responderID:'<>'),_!==l&&(null!=t.nativeEvent.timestamp&&o.default.emitEvent(function(){return{signal:E,touchDelayMs:Date.now()-t.nativeEvent.timestamp}}),this._performTransitionSideEffects(_,l,E,t),this._touchState=l))}},{key:"_performTransitionSideEffects",value:function(E,t,n,R){c(n)&&(this._touchActivatePosition=null,this._cancelLongPressDelayTimeout());var o='NOT_RESPONDER'===E&&'RESPONDER_INACTIVE_PRESS_IN'===t,u=!P(E)&&P(t);if((o||u)&&this._measureResponderRegion(),O(E)&&'LONG_PRESS_DETECTED'===n){var s=this._config.onLongPress;null!=s&&s(R)}var S=T(E),D=T(t);if(!S&&D?this._activate(R):S&&!D&&this._deactivate(R),O(E)&&'RESPONDER_RELEASE'===n){D||S||(this._activate(R),this._deactivate(R));var N=this._config,h=N.onLongPress,f=N.onPress,v=N.android_disableSound;if(null!=f)null!=h&&'RESPONDER_ACTIVE_LONG_PRESS_IN'===E&&this._shouldLongPressCancelPress()||('android'===l.default.OS&&!0!==v&&_.default.playTouchSound(),f(R))}this._cancelPressDelayTimeout()}},{key:"_activate",value:function(E){var t=this._config.onPressIn,n=A(E),R=n.pageX,_=n.pageY;this._touchActivatePosition={pageX:R,pageY:_},this._touchActivateTime=Date.now(),null!=t&&t(E)}},{key:"_deactivate",value:function(E){var t=this._config.onPressOut;if(null!=t){var n,R=I(this._config.minPressDuration,0,130),_=Date.now()-(null!=(n=this._touchActivateTime)?n:0),o=Math.max(R-_,I(this._config.delayPressOut));o>0?(E.persist(),this._pressOutDelayTimeout=setTimeout(function(){t(E)},o)):t(E)}this._touchActivateTime=null}},{key:"_measureResponderRegion",value:function(){null!=this._responderID&&('number'==typeof this._responderID?u.default.measure(this._responderID,this._measureCallback):this._responderID.measure(this._measureCallback))}},{key:"_isTouchWithinResponderRegion",value:function(E,t){var n,R,_,o,l=(0,r(d[11]).normalizeRect)(this._config.hitSlop),u=(0,r(d[11]).normalizeRect)(this._config.pressRectOffset),s=t.bottom,S=t.left,T=t.right,P=t.top;return null!=l&&(null!=l.bottom&&(s+=l.bottom),null!=l.left&&(S-=l.left),null!=l.right&&(T+=l.right),null!=l.top&&(P-=l.top)),s+=null!=(n=null==u?void 0:u.bottom)?n:D,S-=null!=(R=null==u?void 0:u.left)?R:N,T+=null!=(_=null==u?void 0:u.right)?_:h,P-=null!=(o=null==u?void 0:u.top)?o:f,E.pageX>S&&E.pageXP&&E.pageY1&&void 0!==arguments[1]?arguments[1]:0,n=arguments.length>2&&void 0!==arguments[2]?arguments[2]:0;return Math.max(t,null!=E?E:n)}e.default=v;var A=function(E){var t=E.nativeEvent,n=t.changedTouches,R=t.touches;return null!=R&&R.length>0?R[0]:null!=n&&n.length>0?n[0]:E.nativeEvent}},201,[3,29,7,8,18,202,204,19,43,129,205,206]); +__d(function(g,r,i,a,m,e,d){var u=r(d[0])(r(d[1])),o={playTouchSound:function(){u.default&&u.default.playTouchSound()}};m.exports=o},202,[3,203]); +__d(function(g,r,i,a,m,e,d){function t(n){if("function"!=typeof WeakMap)return null;var o=new WeakMap,u=new WeakMap;return(t=function(t){return t?u:o})(n)}Object.defineProperty(e,"__esModule",{value:!0}),e.default=void 0;var n=(function(n,o){if(!o&&n&&n.__esModule)return n;if(null===n||"object"!=typeof n&&"function"!=typeof n)return{default:n};var u=t(o);if(u&&u.has(n))return u.get(n);var f={},c=Object.defineProperty&&Object.getOwnPropertyDescriptor;for(var l in n)if("default"!==l&&Object.prototype.hasOwnProperty.call(n,l)){var p=c?Object.getOwnPropertyDescriptor(n,l):null;p&&(p.get||p.set)?Object.defineProperty(f,l,p):f[l]=n[l]}f.default=n,u&&u.set(n,f);return f})(r(d[0])).get('SoundManager');e.default=n},203,[21]); +__d(function(g,r,i,a,m,e,d){Object.defineProperty(e,"__esModule",{value:!0}),e.default=void 0;var t=r(d[0])(r(d[1])),n=r(d[0])(r(d[2])),s=new((function(){function s(){(0,t.default)(this,s),this._listeners=[]}return(0,n.default)(s,[{key:"addListener",value:function(t){this._listeners.push(t)}},{key:"removeListener",value:function(t){var n=this._listeners.indexOf(t);n>-1&&this._listeners.splice(n,1)}},{key:"emitEvent",value:function(t){if(0!==this._listeners.length){var n=t();this._listeners.forEach(function(t){return t(n)})}}}]),s})());e.default=s},204,[3,7,8]); +__d(function(g,r,i,a,m,e,d){Object.defineProperty(e,"__esModule",{value:!0}),e.isHoverEnabled=function(){return n};var n=!1;if('web'===r(d[0])(r(d[1])).default.OS&&Boolean('undefined'!=typeof window&&window.document&&window.document.createElement)){var t=0,o=function(){t=Date.now(),n&&(n=!1)};document.addEventListener('touchstart',o,!0),document.addEventListener('touchmove',o,!0),document.addEventListener('mousemove',function(){n||Date.now()-t<1e3||(n=!0)},!0)}},205,[3,19]); +__d(function(g,r,i,a,m,e,d){function t(t){return{bottom:t,left:t,right:t,top:t}}Object.defineProperty(e,"__esModule",{value:!0}),e.createSquare=t,e.normalizeRect=function(n){return'number'==typeof n?t(n):n}},206,[]); +__d(function(g,r,i,a,m,e,d){Object.defineProperty(e,"__esModule",{value:!0}),e.NativeVirtualText=e.NativeText=void 0;var t=r(d[0])(r(d[1])),l=r(d[0])(r(d[2])),n=r(d[0])(r(d[3])),o=r(d[0])(r(d[4])),u=(0,o.default)('RCTText',function(){return{validAttributes:(0,t.default)({},l.default.UIView,{isHighlighted:!0,numberOfLines:!0,ellipsizeMode:!0,allowFontScaling:!0,maxFontSizeMultiplier:!0,disabled:!0,selectable:!0,selectionColor:!0,adjustsFontSizeToFit:!0,minimumFontScale:!0,textBreakStrategy:!0,onTextLayout:!0,onInlineViewLayout:!0,dataDetectorType:!0,android_hyphenationFrequency:!0}),directEventTypes:{topTextLayout:{registrationName:'onTextLayout'},topInlineViewLayout:{registrationName:'onInlineViewLayout'}},uiViewClassName:'RCTText'}});e.NativeText=u;var s=g.RN$Bridgeless||n.default.hasViewManagerConfig('RCTVirtualText')?(0,o.default)('RCTVirtualText',function(){return{validAttributes:(0,t.default)({},l.default.UIView,{isHighlighted:!0,maxFontSizeMultiplier:!0}),uiViewClassName:'RCTVirtualText'}}):u;e.NativeVirtualText=s},207,[3,29,208,43,181]); +__d(function(g,r,i,a,m,e,d){'use strict';var s=r(d[0])(r(d[1])),t={pointerEvents:!0,accessible:!0,accessibilityActions:!0,accessibilityLabel:!0,accessibilityLiveRegion:!0,accessibilityRole:!0,accessibilityState:!0,accessibilityValue:!0,accessibilityHint:!0,importantForAccessibility:!0,nativeID:!0,testID:!0,renderToHardwareTextureAndroid:!0,shouldRasterizeIOS:!0,onLayout:!0,onAccessibilityAction:!0,onAccessibilityTap:!0,onMagicTap:!0,onAccessibilityEscape:!0,collapsable:!0,needsOffscreenAlphaCompositing:!0,style:r(d[0])(r(d[2])).default},c={UIView:t,RCTView:(0,s.default)({},t,{removeClippedSubviews:!0})};m.exports=c},208,[3,29,152]); +__d(function(g,r,i,a,m,_e,d){var e=r(d[0])(r(d[1])),t=r(d[0])(r(d[2])),s=r(d[0])(r(d[3])),o=r(d[0])(r(d[4])),n=r(d[0])(r(d[5])),l=r(d[0])(r(d[6])),c=r(d[0])(r(d[7])),p=r(d[0])(r(d[8])),u=r(d[0])(r(d[9])),f=r(d[0])(r(d[10])),h=(r(d[0])(r(d[11])),r(d[0])(r(d[12]))),b=(function(e,t){if(!t&&e&&e.__esModule)return e;if(null===e||"object"!=typeof e&&"function"!=typeof e)return{default:e};var s=P(t);if(s&&s.has(e))return s.get(e);var o={},n=Object.defineProperty&&Object.getOwnPropertyDescriptor;for(var l in e)if("default"!==l&&Object.prototype.hasOwnProperty.call(e,l)){var c=n?Object.getOwnPropertyDescriptor(e,l):null;c&&(c.get||c.set)?Object.defineProperty(o,l,c):o[l]=e[l]}o.default=e,s&&s.set(e,o);return o})(r(d[13])),y=r(d[0])(r(d[14])),v=["onBlur","onFocus"];function P(e){if("function"!=typeof WeakMap)return null;var t=new WeakMap,s=new WeakMap;return(P=function(e){return e?s:t})(e)}function F(){if("undefined"==typeof Reflect||!Reflect.construct)return!1;if(Reflect.construct.sham)return!1;if("function"==typeof Proxy)return!0;try{return Boolean.prototype.valueOf.call(Reflect.construct(Boolean,[],function(){})),!0}catch(e){return!1}}var O=(function(h){(0,n.default)(R,h);var y,P,O=(y=R,P=F(),function(){var e,t=(0,c.default)(y);if(P){var s=(0,c.default)(this).constructor;e=Reflect.construct(t,arguments,s)}else e=t.apply(this,arguments);return(0,l.default)(this,e)});function R(){var e;(0,s.default)(this,R);for(var t=arguments.length,o=new Array(t),n=0;n=23};var S='android'===f.default.OS?function(e,t){return t&&O.canUseNativeForeground()?{nativeForegroundAndroid:e}:{nativeBackgroundAndroid:e}}:function(e,t){return null};O.displayName='TouchableNativeFeedback',m.exports=O},209,[3,29,103,7,8,10,12,15,201,49,19,174,141,129,18,175]); +__d(function(g,r,i,a,m,_e,d){var t=r(d[0])(r(d[1])),e=r(d[0])(r(d[2])),s=r(d[0])(r(d[3])),o=r(d[0])(r(d[4])),n=r(d[0])(r(d[5])),c=r(d[0])(r(d[6])),l=r(d[0])(r(d[7])),p=r(d[0])(r(d[8])),u=r(d[0])(r(d[9])),f=r(d[0])(r(d[10])),y=r(d[0])(r(d[11])),h=r(d[0])(r(d[12])),b=(function(t,e){if(!e&&t&&t.__esModule)return t;if(null===t||"object"!=typeof t&&"function"!=typeof t)return{default:t};var s=P(e);if(s&&s.has(t))return s.get(t);var o={},n=Object.defineProperty&&Object.getOwnPropertyDescriptor;for(var c in t)if("default"!==c&&Object.prototype.hasOwnProperty.call(t,c)){var l=n?Object.getOwnPropertyDescriptor(t,c):null;l&&(l.get||l.set)?Object.defineProperty(o,c,l):o[c]=t[c]}o.default=t,s&&s.set(t,o);return o})(r(d[13])),v=["onBlur","onFocus"];function P(t){if("function"!=typeof WeakMap)return null;var e=new WeakMap,s=new WeakMap;return(P=function(t){return t?s:e})(t)}function O(){if("undefined"==typeof Reflect||!Reflect.construct)return!1;if(Reflect.construct.sham)return!1;if("function"==typeof Proxy)return!0;try{return Boolean.prototype.valueOf.call(Reflect.construct(Boolean,[],function(){})),!0}catch(t){return!1}}var F=(function(P){(0,n.default)(w,P);var F,_,R=(F=w,_=O(),function(){var t,e=(0,l.default)(F);if(_){var s=(0,l.default)(this).constructor;t=Reflect.construct(e,arguments,s)}else t=e.apply(this,arguments);return(0,c.default)(this,t)});function w(){var t;(0,s.default)(this,w);for(var e=arguments.length,o=new Array(e),n=0;n1&&void 0!==arguments[1]?arguments[1]:{}).iterations;return t},event:r(d[5]).event,createAnimatedComponent:r(d[6]),attachNativeEvent:r(d[7]).attachNativeEvent,forkEvent:r(d[5]).forkEvent,unforkEvent:r(d[5]).unforkEvent,Event:r(d[7]).AnimatedEvent}},212,[29,213,222,217,219,223,239,238]); +__d(function(g,r,i,a,m,_e,d){'use strict';function t(){if("undefined"==typeof Reflect||!Reflect.construct)return!1;if(Reflect.construct.sham)return!1;if("function"==typeof Proxy)return!0;try{return Boolean.prototype.valueOf.call(Reflect.construct(Boolean,[],function(){})),!0}catch(t){return!1}}function e(t){var e=new Set;!(function t(n){'function'==typeof n.update?e.add(n):n.__getChildren().forEach(t)})(t),e.forEach(function(t){return t.update()})}var n=(function(n){r(d[3])(_,n);var s,u,o=(s=_,u=t(),function(){var t,e=r(d[0])(s);if(u){var n=r(d[0])(this).constructor;t=Reflect.construct(e,arguments,n)}else t=e.apply(this,arguments);return r(d[1])(this,t)});function _(t){var e;if(r(d[4])(this,_),e=o.call(this),'number'!=typeof t)throw new Error('AnimatedValue: Attempting to set value to undefined');return e._startingValue=e._value=t,e._offset=0,e._animation=null,e}return r(d[5])(_,[{key:"__detach",value:function(){var t=this;this.__isNative&&r(d[2]).API.getValue(this.__getNativeTag(),function(e){t._value=e}),this.stopAnimation(),r(d[6])(r(d[0])(_.prototype),"__detach",this).call(this)}},{key:"__getValue",value:function(){return this._value+this._offset}},{key:"setValue",value:function(t){var e,n,s=this;this._animation&&(this._animation.stop(),this._animation=null),this._updateValue(t,!this.__isNative),this.__isNative&&(e=this.__getNativeTag().toString(),n=function(){r(d[2]).API.setAnimatedNodeValue(s.__getNativeTag(),t)},r(d[2]).API.setWaitingForIdentifier(e),n(),r(d[2]).API.unsetWaitingForIdentifier(e))}},{key:"setOffset",value:function(t){this._offset=t,this.__isNative&&r(d[2]).API.setAnimatedNodeOffset(this.__getNativeTag(),t)}},{key:"flattenOffset",value:function(){this._value+=this._offset,this._offset=0,this.__isNative&&r(d[2]).API.flattenAnimatedNodeOffset(this.__getNativeTag())}},{key:"extractOffset",value:function(){this._offset+=this._value,this._value=0,this.__isNative&&r(d[2]).API.extractAnimatedNodeOffset(this.__getNativeTag())}},{key:"stopAnimation",value:function(t){this.stopTracking(),this._animation&&this._animation.stop(),this._animation=null,t&&t(this.__getValue())}},{key:"resetAnimation",value:function(t){this.stopAnimation(t),this._value=this._startingValue,this.__isNative&&r(d[2]).API.setAnimatedNodeValue(this.__getNativeTag(),this._startingValue)}},{key:"_onAnimatedValueUpdateReceived",value:function(t){this._updateValue(t,!1)}},{key:"interpolate",value:function(t){return new(r(d[7]))(this,t)}},{key:"animate",value:function(t,e){var n=this,s=null;t.__isInteraction&&(s=r(d[8]).createInteractionHandle());var u=this._animation;this._animation&&this._animation.stop(),this._animation=t,t.start(this._value,function(t){n._updateValue(t,!0)},function(t){n._animation=null,null!==s&&r(d[8]).clearInteractionHandle(s),e&&e(t)},u,this)}},{key:"stopTracking",value:function(){this._tracking&&this._tracking.__detach(),this._tracking=null}},{key:"track",value:function(t){this.stopTracking(),this._tracking=t}},{key:"_updateValue",value:function(t,n){if(void 0===t)throw new Error('AnimatedValue: Attempting to set value to undefined');this._value=t,n&&e(this),r(d[6])(r(d[0])(_.prototype),"__callListeners",this).call(this,this.__getValue())}},{key:"__getNativeConfig",value:function(){return{type:'value',value:this._value,offset:this._offset}}}]),_})(r(d[9]));m.exports=n},213,[15,12,214,10,7,8,84,217,220,218]); +__d(function(g,r,i,a,m,e,d){var t,n=r(d[0])(r(d[1])),o=r(d[0])(r(d[2])),u=r(d[0])(r(d[3])),l=r(d[0])(r(d[4])),s=r(d[0])(r(d[5])),f='ios'===l.default.OS&&g.RN$Bridgeless?o.default:n.default,c=1,v=1,p=new Set,N=!1,b=[],A={getValue:function(t,n){(0,s.default)(f,'Native animated module is not available'),A.queueOperation(function(){f.getValue(t,n)})},setWaitingForIdentifier:function(t){p.add(t),N=!0},unsetWaitingForIdentifier:function(t){p.delete(t),0===p.size&&(N=!1,A.disableQueue())},disableQueue:function(){(0,s.default)(f,'Native animated module is not available'),'android'===l.default.OS&&f.startOperationBatch();for(var t=0,n=b.length;tn){if('identity'===u)return c;'clamp'===u&&(c=n)}return a===r?a:e===n?t<=e?a:r:(e===-1/0?c=-c:n===1/0?c-=e:c=(c-e)/(n-e),c=i(c),a===-1/0?c=-c:r===1/0?c+=a:c=c*(r-a)+a,c)}function r(t){var e=_r(d[3])(t);return null===e||'number'!=typeof e?t:"rgba("+((4278190080&(e=e||0))>>>24)+", "+((16711680&e)>>>16)+", "+((65280&e)>>>8)+", "+(255&e)/255+")"}var i=/[+-]?(?:\d+\.?\d*|\.\d+)(?:[eE][+-]?\d+)?/g;function o(t){var e=t.outputRange;_r(d[2])(e.length>=2,'Bad output range'),u(e=e.map(r));var a=e[0].match(i).map(function(){return[]});e.forEach(function(t){t.match(i).forEach(function(t,e){a[e].push(+t)})});var o,c=e[0].match(i).map(function(e,r){return n(_r(d[4])({},t,{outputRange:a[r]}))}),l='string'==typeof(o=e[0])&&o.startsWith('rgb');return function(t){var n=0;return e[0].replace(i,function(){var e=+c[n++](t);return l&&(e=n<4?Math.round(e):Math.round(1e3*e)/1e3),String(e)})}}function u(t){for(var e=t[0].replace(i,''),n=1;n=t);++n);return n-1}function l(t){_r(d[2])(t.length>=2,'inputRange must have at least 2 elements');for(var e=1;e=t[e-1],'inputRange must be monotonically non-decreasing '+t)}function p(t,e){_r(d[2])(e.length>=2,t+' must have at least 2 elements'),_r(d[2])(2!==e.length||e[0]!==-1/0||e[1]!==1/0,t+'cannot be ]-infinity;+infinity[ '+e)}var f=(function(e){_r(d[5])(o,e);var a,r,i=(a=o,r=t(),function(){var t,e=_r(d[0])(a);if(r){var n=_r(d[0])(this).constructor;t=Reflect.construct(e,arguments,n)}else t=e.apply(this,arguments);return _r(d[1])(this,t)});function o(t,e){var a;return _r(d[6])(this,o),(a=i.call(this))._parent=t,a._config=e,a._interpolation=n(e),a}return _r(d[7])(o,[{key:"__makeNative",value:function(){this._parent.__makeNative(),_r(d[8])(_r(d[0])(o.prototype),"__makeNative",this).call(this)}},{key:"__getValue",value:function(){var t=this._parent.__getValue();return _r(d[2])('number'==typeof t,'Cannot interpolate an input which is not a number.'),this._interpolation(t)}},{key:"interpolate",value:function(t){return new o(this,t)}},{key:"__attach",value:function(){this._parent.__addChild(this)}},{key:"__detach",value:function(){this._parent.__removeChild(this),_r(d[8])(_r(d[0])(o.prototype),"__detach",this).call(this)}},{key:"__transformDataType",value:function(t){return t.map(_r(d[9]).transformDataType)}},{key:"__getNativeConfig",value:function(){return{inputRange:this._config.inputRange,outputRange:this.__transformDataType(this._config.outputRange),extrapolateLeft:this._config.extrapolateLeft||this._config.extrapolate||'extend',extrapolateRight:this._config.extrapolateRight||this._config.extrapolate||'extend',type:'interpolation'}}}]),o})(_r(d[10]));f.__createInterpolation=n,m.exports=f},217,[15,12,18,142,29,10,7,8,84,214,218]); +__d(function(g,r,_i,a,m,_e,d){'use strict';function t(t,i){var n="undefined"!=typeof Symbol&&t[Symbol.iterator]||t["@@iterator"];if(n)return(n=n.call(t)).next.bind(n);if(Array.isArray(t)||(n=e(t))||i&&t&&"number"==typeof t.length){n&&(t=n);var o=0;return function(){return o>=t.length?{done:!0}:{done:!1,value:t[o++]}}}throw new TypeError("Invalid attempt to iterate non-iterable instance.\nIn order to be iterable, non-array objects must have a [Symbol.iterator]() method.")}function e(t,e){if(t){if("string"==typeof t)return i(t,e);var n=Object.prototype.toString.call(t).slice(8,-1);return"Object"===n&&t.constructor&&(n=t.constructor.name),"Map"===n||"Set"===n?Array.from(t):"Arguments"===n||/^(?:Ui|I)nt(?:8|16|32)(?:Clamped)?Array$/.test(n)?i(t,e):void 0}}function i(t,e){(null==e||e>t.length)&&(e=t.length);for(var i=0,n=new Array(e);i0?setTimeout(h,0):setImmediate(h))}function h(){l=0;var f=o.size;c.forEach(function(n){return o.add(n)}),s.forEach(function(n){return o.delete(n)});var h=o.size;if(0!==f&&0===h?n.emit(t.Events.interactionComplete):0===f&&0!==h&&n.emit(t.Events.interactionStart),0===h)for(;u.hasTasksToProcess();)if(u.processNext(),p>0&&r(d[4]).getEventLoopRunningTime()>=p){v();break}c.clear(),s.clear()}m.exports=t},220,[3,5,18,221,30]); +__d(function(g,r,i,a,m,_e,d){'use strict';var e=(function(){function e(t){var u=t.onMoreTasks;r(d[0])(this,e),this._onMoreTasks=u,this._queueStack=[{tasks:[],popable:!1}]}return r(d[1])(e,[{key:"enqueue",value:function(e){this._getCurrentQueue().push(e)}},{key:"enqueueTasks",value:function(e){var t=this;e.forEach(function(e){return t.enqueue(e)})}},{key:"cancelTasks",value:function(e){this._queueStack=this._queueStack.map(function(t){return r(d[2])({},t,{tasks:t.tasks.filter(function(t){return-1===e.indexOf(t)})})}).filter(function(e,t){return e.tasks.length>0||0===t})}},{key:"hasTasksToProcess",value:function(){return this._getCurrentQueue().length>0}},{key:"processNext",value:function(){var e=this._getCurrentQueue();if(e.length){var t=e.shift();try{'object'==typeof t&&t.gen?this._genPromise(t):'object'==typeof t&&t.run?t.run():(r(d[3])('function'==typeof t,'Expected Function, SimpleTask, or PromiseTask, but got:\n'+JSON.stringify(t,null,2)),t())}catch(e){throw e.message='TaskQueue: Error with task '+(t.name||'')+': '+e.message,e}}}},{key:"_getCurrentQueue",value:function(){var e=this._queueStack.length-1,t=this._queueStack[e];return t.popable&&0===t.tasks.length&&this._queueStack.length>1?(this._queueStack.pop(),this._getCurrentQueue()):t.tasks}},{key:"_genPromise",value:function(e){var t=this;this._queueStack.push({tasks:[],popable:!1});var u=this._queueStack.length-1,s=this._queueStack[u];e.gen().then(function(){s.popable=!0,t.hasTasksToProcess()&&t._onMoreTasks()}).catch(function(t){throw t.message="TaskQueue: Error resolving Promise in task "+e.name+": "+t.message,t}).done()}}]),e})();m.exports=e},221,[7,8,29,18]); +__d(function(g,r,i,a,m,_e,d){'use strict';function t(){if("undefined"==typeof Reflect||!Reflect.construct)return!1;if(Reflect.construct.sham)return!1;if("function"==typeof Proxy)return!0;try{return Boolean.prototype.valueOf.call(Reflect.construct(Boolean,[],function(){})),!0}catch(t){return!1}}var e=1,n=(function(n){r(d[2])(o,n);var s,u,f=(s=o,u=t(),function(){var t,e=r(d[0])(s);if(u){var n=r(d[0])(this).constructor;t=Reflect.construct(e,arguments,n)}else t=e.apply(this,arguments);return r(d[1])(this,t)});function o(t){var e;r(d[3])(this,o),e=f.call(this);var n=t||{x:0,y:0};return'number'==typeof n.x&&'number'==typeof n.y?(e.x=new(r(d[4]))(n.x),e.y=new(r(d[4]))(n.y)):(r(d[5])(n.x instanceof r(d[4])&&n.y instanceof r(d[4]),"AnimatedValueXY must be initialized with an object of numbers or AnimatedValues."),e.x=n.x,e.y=n.y),e._listeners={},e}return r(d[6])(o,[{key:"setValue",value:function(t){this.x.setValue(t.x),this.y.setValue(t.y)}},{key:"setOffset",value:function(t){this.x.setOffset(t.x),this.y.setOffset(t.y)}},{key:"flattenOffset",value:function(){this.x.flattenOffset(),this.y.flattenOffset()}},{key:"extractOffset",value:function(){this.x.extractOffset(),this.y.extractOffset()}},{key:"__getValue",value:function(){return{x:this.x.__getValue(),y:this.y.__getValue()}}},{key:"resetAnimation",value:function(t){this.x.resetAnimation(),this.y.resetAnimation(),t&&t(this.__getValue())}},{key:"stopAnimation",value:function(t){this.x.stopAnimation(),this.y.stopAnimation(),t&&t(this.__getValue())}},{key:"addListener",value:function(t){var n=this,s=String(e++),u=function(e){e.value;t(n.__getValue())};return this._listeners[s]={x:this.x.addListener(u),y:this.y.addListener(u)},s}},{key:"removeListener",value:function(t){this.x.removeListener(this._listeners[t].x),this.y.removeListener(this._listeners[t].y),delete this._listeners[t]}},{key:"removeAllListeners",value:function(){this.x.removeAllListeners(),this.y.removeAllListeners(),this._listeners={}}},{key:"getLayout",value:function(){return{left:this.x,top:this.y}}},{key:"getTranslateTransform",value:function(){return[{translateX:this.x},{translateY:this.y}]}}]),o})(r(d[7]));m.exports=n},222,[15,12,10,7,213,18,8,218]); +__d(function(g,r,_i,_a,m,e,d){'use strict';var n=function(n,t){return n&&t.onComplete?function(){t.onComplete&&t.onComplete.apply(t,arguments),n&&n.apply(void 0,arguments)}:n||t.onComplete},t=function(n,t,i){if(n instanceof r(d[6])){var o=r(d[7])({},t),u=r(d[7])({},t);for(var s in t){var c=t[s],f=c.x,v=c.y;void 0!==f&&void 0!==v&&(o[s]=f,u[s]=v)}var p=i(n.x,o),l=i(n.y,u);return a([p,l],{stopTogether:!1})}return null},i=function i(o,a){var u=function(t,i,o){o=n(o,i);var a=t,u=i;a.stopTracking(),i.toValue instanceof r(d[8])?a.track(new(r(d[9]))(a,i.toValue,r(d[11]),u,o)):a.animate(new(r(d[11]))(u),o)};return t(o,a,i)||{start:function(n){u(o,a,n)},stop:function(){o.stopAnimation()},reset:function(){o.resetAnimation()},_startNativeLoop:function(n){var t=r(d[7])({},a,{iterations:n});u(o,t)},_isUsingNativeDriver:function(){return a.useNativeDriver||!1}}},o=function(n){var t=0;return{start:function(i){0===n.length?i&&i({finished:!0}):n[t].start(function o(a){a.finished&&++t!==n.length?n[t].start(o):i&&i(a)})},stop:function(){t1&&void 0!==arguments[1]?arguments[1]:{},i=t.iterations,o=void 0===i?-1:i,a=t.resetBeforeIteration,u=void 0===a||a,s=!1,c=0;return{start:function(t){n&&0!==o?n._isUsingNativeDriver()?n._startNativeLoop(o):(function i(){var a=arguments.length>0&&void 0!==arguments[0]?arguments[0]:{finished:!0};s||c===o||!1===a.finished?t&&t(a):(c++,u&&n.reset(),n.start(i))})():t&&t({finished:!0})},stop:function(){s=!0,n.stop()},reset:function(){c=0,s=!1,n.reset()},_startNativeLoop:function(){throw new Error('Loops run using the native driver cannot contain Animated.loop animations')},_isUsingNativeDriver:function(){return n._isUsingNativeDriver()}}},event:function(n,t){var i=new(r(d[14]).AnimatedEvent)(n,t);return i.__isNative?i:i.__getHandler()},createAnimatedComponent:r(d[16]),attachNativeEvent:r(d[14]).attachNativeEvent,forkEvent:function(n,t){return n?n instanceof r(d[14]).AnimatedEvent?(n.__addListener(t),n):function(){'function'==typeof n&&n.apply(void 0,arguments),t.apply(void 0,arguments)}:t},unforkEvent:function(n,t){n&&n instanceof r(d[14]).AnimatedEvent&&n.__removeListener(t)},Event:r(d[14]).AnimatedEvent}},223,[224,225,226,227,228,229,222,29,219,230,231,234,237,213,238,217,239]); +__d(function(g,r,i,_a,m,_e,d){'use strict';function t(){if("undefined"==typeof Reflect||!Reflect.construct)return!1;if(Reflect.construct.sham)return!1;if("function"==typeof Proxy)return!0;try{return Boolean.prototype.valueOf.call(Reflect.construct(Boolean,[],function(){})),!0}catch(t){return!1}}var e=(function(e){r(d[2])(u,e);var n,a,_=(n=u,a=t(),function(){var t,e=r(d[0])(n);if(a){var _=r(d[0])(this).constructor;t=Reflect.construct(e,arguments,_)}else t=e.apply(this,arguments);return r(d[1])(this,t)});function u(t,e){var n;return r(d[3])(this,u),(n=_.call(this))._a='number'==typeof t?new(r(d[4]))(t):t,n._b='number'==typeof e?new(r(d[4]))(e):e,n}return r(d[5])(u,[{key:"__makeNative",value:function(){this._a.__makeNative(),this._b.__makeNative(),r(d[6])(r(d[0])(u.prototype),"__makeNative",this).call(this)}},{key:"__getValue",value:function(){return this._a.__getValue()+this._b.__getValue()}},{key:"interpolate",value:function(t){return new(r(d[7]))(this,t)}},{key:"__attach",value:function(){this._a.__addChild(this),this._b.__addChild(this)}},{key:"__detach",value:function(){this._a.__removeChild(this),this._b.__removeChild(this),r(d[6])(r(d[0])(u.prototype),"__detach",this).call(this)}},{key:"__getNativeConfig",value:function(){return{type:'addition',input:[this._a.__getNativeTag(),this._b.__getNativeTag()]}}}]),u})(r(d[8]));m.exports=e},224,[15,12,10,7,213,8,84,217,218]); +__d(function(g,r,i,_a,m,_e,d){'use strict';function t(){if("undefined"==typeof Reflect||!Reflect.construct)return!1;if(Reflect.construct.sham)return!1;if("function"==typeof Proxy)return!0;try{return Boolean.prototype.valueOf.call(Reflect.construct(Boolean,[],function(){})),!0}catch(t){return!1}}var e=(function(e){r(d[2])(u,e);var n,a,_=(n=u,a=t(),function(){var t,e=r(d[0])(n);if(a){var _=r(d[0])(this).constructor;t=Reflect.construct(e,arguments,_)}else t=e.apply(this,arguments);return r(d[1])(this,t)});function u(t,e){var n;return r(d[3])(this,u),(n=_.call(this))._a='number'==typeof t?new(r(d[4]))(t):t,n._b='number'==typeof e?new(r(d[4]))(e):e,n}return r(d[5])(u,[{key:"__makeNative",value:function(){this._a.__makeNative(),this._b.__makeNative(),r(d[6])(r(d[0])(u.prototype),"__makeNative",this).call(this)}},{key:"__getValue",value:function(){return this._a.__getValue()-this._b.__getValue()}},{key:"interpolate",value:function(t){return new(r(d[7]))(this,t)}},{key:"__attach",value:function(){this._a.__addChild(this),this._b.__addChild(this)}},{key:"__detach",value:function(){this._a.__removeChild(this),this._b.__removeChild(this),r(d[6])(r(d[0])(u.prototype),"__detach",this).call(this)}},{key:"__getNativeConfig",value:function(){return{type:'subtraction',input:[this._a.__getNativeTag(),this._b.__getNativeTag()]}}}]),u})(r(d[8]));m.exports=e},225,[15,12,10,7,213,8,84,217,218]); +__d(function(g,r,i,_a,m,_e,d){'use strict';function t(){if("undefined"==typeof Reflect||!Reflect.construct)return!1;if(Reflect.construct.sham)return!1;if("function"==typeof Proxy)return!0;try{return Boolean.prototype.valueOf.call(Reflect.construct(Boolean,[],function(){})),!0}catch(t){return!1}}var e=(function(e){r(d[2])(_,e);var n,o,a=(n=_,o=t(),function(){var t,e=r(d[0])(n);if(o){var a=r(d[0])(this).constructor;t=Reflect.construct(e,arguments,a)}else t=e.apply(this,arguments);return r(d[1])(this,t)});function _(t,e){var n;return r(d[3])(this,_),(n=a.call(this))._warnedAboutDivideByZero=!1,(0===e||e instanceof r(d[4])&&0===e.__getValue())&&console.error('Detected potential division by zero in AnimatedDivision'),n._a='number'==typeof t?new(r(d[5]))(t):t,n._b='number'==typeof e?new(r(d[5]))(e):e,n}return r(d[6])(_,[{key:"__makeNative",value:function(){this._a.__makeNative(),this._b.__makeNative(),r(d[7])(r(d[0])(_.prototype),"__makeNative",this).call(this)}},{key:"__getValue",value:function(){var t=this._a.__getValue(),e=this._b.__getValue();return 0===e?(this._warnedAboutDivideByZero||(console.error('Detected division by zero in AnimatedDivision'),this._warnedAboutDivideByZero=!0),0):(this._warnedAboutDivideByZero=!1,t/e)}},{key:"interpolate",value:function(t){return new(r(d[8]))(this,t)}},{key:"__attach",value:function(){this._a.__addChild(this),this._b.__addChild(this)}},{key:"__detach",value:function(){this._a.__removeChild(this),this._b.__removeChild(this),r(d[7])(r(d[0])(_.prototype),"__detach",this).call(this)}},{key:"__getNativeConfig",value:function(){return{type:'division',input:[this._a.__getNativeTag(),this._b.__getNativeTag()]}}}]),_})(r(d[9]));m.exports=e},226,[15,12,10,7,219,213,8,84,217,218]); +__d(function(g,r,i,_a,m,_e,d){'use strict';function t(){if("undefined"==typeof Reflect||!Reflect.construct)return!1;if(Reflect.construct.sham)return!1;if("function"==typeof Proxy)return!0;try{return Boolean.prototype.valueOf.call(Reflect.construct(Boolean,[],function(){})),!0}catch(t){return!1}}var e=(function(e){r(d[2])(u,e);var n,a,_=(n=u,a=t(),function(){var t,e=r(d[0])(n);if(a){var _=r(d[0])(this).constructor;t=Reflect.construct(e,arguments,_)}else t=e.apply(this,arguments);return r(d[1])(this,t)});function u(t,e){var n;return r(d[3])(this,u),(n=_.call(this))._a='number'==typeof t?new(r(d[4]))(t):t,n._b='number'==typeof e?new(r(d[4]))(e):e,n}return r(d[5])(u,[{key:"__makeNative",value:function(){this._a.__makeNative(),this._b.__makeNative(),r(d[6])(r(d[0])(u.prototype),"__makeNative",this).call(this)}},{key:"__getValue",value:function(){return this._a.__getValue()*this._b.__getValue()}},{key:"interpolate",value:function(t){return new(r(d[7]))(this,t)}},{key:"__attach",value:function(){this._a.__addChild(this),this._b.__addChild(this)}},{key:"__detach",value:function(){this._a.__removeChild(this),this._b.__removeChild(this),r(d[6])(r(d[0])(u.prototype),"__detach",this).call(this)}},{key:"__getNativeConfig",value:function(){return{type:'multiplication',input:[this._a.__getNativeTag(),this._b.__getNativeTag()]}}}]),u})(r(d[8]));m.exports=e},227,[15,12,10,7,213,8,84,217,218]); +__d(function(g,r,i,_a,m,_e,d){'use strict';function t(){if("undefined"==typeof Reflect||!Reflect.construct)return!1;if(Reflect.construct.sham)return!1;if("function"==typeof Proxy)return!0;try{return Boolean.prototype.valueOf.call(Reflect.construct(Boolean,[],function(){})),!0}catch(t){return!1}}var e=(function(e){r(d[2])(o,e);var u,n,a=(u=o,n=t(),function(){var t,e=r(d[0])(u);if(n){var a=r(d[0])(this).constructor;t=Reflect.construct(e,arguments,a)}else t=e.apply(this,arguments);return r(d[1])(this,t)});function o(t,e){var u;return r(d[3])(this,o),(u=a.call(this))._a=t,u._modulus=e,u}return r(d[4])(o,[{key:"__makeNative",value:function(){this._a.__makeNative(),r(d[5])(r(d[0])(o.prototype),"__makeNative",this).call(this)}},{key:"__getValue",value:function(){return(this._a.__getValue()%this._modulus+this._modulus)%this._modulus}},{key:"interpolate",value:function(t){return new(r(d[6]))(this,t)}},{key:"__attach",value:function(){this._a.__addChild(this)}},{key:"__detach",value:function(){this._a.__removeChild(this),r(d[5])(r(d[0])(o.prototype),"__detach",this).call(this)}},{key:"__getNativeConfig",value:function(){return{type:'modulus',input:this._a.__getNativeTag(),modulus:this._modulus}}}]),o})(r(d[7]));m.exports=e},228,[15,12,10,7,8,84,217,218]); +__d(function(g,r,i,_a,m,_e,d){'use strict';function t(){if("undefined"==typeof Reflect||!Reflect.construct)return!1;if(Reflect.construct.sham)return!1;if("function"==typeof Proxy)return!0;try{return Boolean.prototype.valueOf.call(Reflect.construct(Boolean,[],function(){})),!0}catch(t){return!1}}var e=(function(e){r(d[2])(_,e);var a,n,u=(a=_,n=t(),function(){var t,e=r(d[0])(a);if(n){var u=r(d[0])(this).constructor;t=Reflect.construct(e,arguments,u)}else t=e.apply(this,arguments);return r(d[1])(this,t)});function _(t,e,a){var n;return r(d[3])(this,_),(n=u.call(this))._a=t,n._min=e,n._max=a,n._value=n._lastValue=n._a.__getValue(),n}return r(d[4])(_,[{key:"__makeNative",value:function(){this._a.__makeNative(),r(d[5])(r(d[0])(_.prototype),"__makeNative",this).call(this)}},{key:"interpolate",value:function(t){return new(r(d[6]))(this,t)}},{key:"__getValue",value:function(){var t=this._a.__getValue(),e=t-this._lastValue;return this._lastValue=t,this._value=Math.min(Math.max(this._value+e,this._min),this._max),this._value}},{key:"__attach",value:function(){this._a.__addChild(this)}},{key:"__detach",value:function(){this._a.__removeChild(this),r(d[5])(r(d[0])(_.prototype),"__detach",this).call(this)}},{key:"__getNativeConfig",value:function(){return{type:'diffclamp',input:this._a.__getNativeTag(),min:this._min,max:this._max}}}]),_})(r(d[7]));m.exports=e},229,[15,12,10,7,8,84,217,218]); +__d(function(g,r,i,a,m,_e,d){'use strict';function t(){if("undefined"==typeof Reflect||!Reflect.construct)return!1;if(Reflect.construct.sham)return!1;if("function"==typeof Proxy)return!0;try{return Boolean.prototype.valueOf.call(Reflect.construct(Boolean,[],function(){})),!0}catch(t){return!1}}var e=(function(e){r(d[2])(u,e);var n,_,o=(n=u,_=t(),function(){var t,e=r(d[0])(n);if(_){var o=r(d[0])(this).constructor;t=Reflect.construct(e,arguments,o)}else t=e.apply(this,arguments);return r(d[1])(this,t)});function u(t,e,n,_,s){var l;return r(d[3])(this,u),(l=o.call(this))._value=t,l._parent=e,l._animationClass=n,l._animationConfig=_,l._useNativeDriver=r(d[4]).shouldUseNativeDriver(_),l._callback=s,l.__attach(),l}return r(d[5])(u,[{key:"__makeNative",value:function(){this.__isNative=!0,this._parent.__makeNative(),r(d[6])(r(d[0])(u.prototype),"__makeNative",this).call(this),this._value.__makeNative()}},{key:"__getValue",value:function(){return this._parent.__getValue()}},{key:"__attach",value:function(){this._parent.__addChild(this),this._useNativeDriver&&this.__makeNative()}},{key:"__detach",value:function(){this._parent.__removeChild(this),r(d[6])(r(d[0])(u.prototype),"__detach",this).call(this)}},{key:"update",value:function(){this._value.animate(new this._animationClass(r(d[7])({},this._animationConfig,{toValue:this._animationConfig.toValue.__getValue()})),this._callback)}},{key:"__getNativeConfig",value:function(){var t=new this._animationClass(r(d[7])({},this._animationConfig,{toValue:void 0})).__getNativeAnimationConfig();return{type:'tracking',animationId:r(d[4]).generateNewAnimationId(),animationConfig:t,toValue:this._parent.__getNativeTag(),value:this._value.__getNativeTag()}}}]),u})(r(d[8]));m.exports=e},230,[15,12,10,7,214,8,84,29,219]); +__d(function(g,r,i,a,_m,_e,d){'use strict';function t(){if("undefined"==typeof Reflect||!Reflect.construct)return!1;if(Reflect.construct.sham)return!1;if("function"==typeof Proxy)return!0;try{return Boolean.prototype.valueOf.call(Reflect.construct(Boolean,[],function(){})),!0}catch(t){return!1}}var s=(function(s){r(d[2])(l,s);var e,n,o=(e=l,n=t(),function(){var t,s=r(d[0])(e);if(n){var o=r(d[0])(this).constructor;t=Reflect.construct(s,arguments,o)}else t=s.apply(this,arguments);return r(d[1])(this,t)});function l(t){var s,e,n,h,_,u,f,c,m,v,p,y;if(r(d[3])(this,l),(m=o.call(this))._overshootClamping=null!=(s=t.overshootClamping)&&s,m._restDisplacementThreshold=null!=(e=t.restDisplacementThreshold)?e:.001,m._restSpeedThreshold=null!=(n=t.restSpeedThreshold)?n:.001,m._initialVelocity=null!=(h=t.velocity)?h:0,m._lastVelocity=null!=(_=t.velocity)?_:0,m._toValue=t.toValue,m._delay=null!=(u=t.delay)?u:0,m._useNativeDriver=r(d[4]).shouldUseNativeDriver(t),m.__isInteraction=null!=(f=t.isInteraction)?f:!m._useNativeDriver,m.__iterations=null!=(c=t.iterations)?c:1,void 0!==t.stiffness||void 0!==t.damping||void 0!==t.mass)r(d[5])(void 0===t.bounciness&&void 0===t.speed&&void 0===t.tension&&void 0===t.friction,'You can define one of bounciness/speed, tension/friction, or stiffness/damping/mass, but not more than one'),m._stiffness=null!=(v=t.stiffness)?v:100,m._damping=null!=(p=t.damping)?p:10,m._mass=null!=(y=t.mass)?y:1;else if(void 0!==t.bounciness||void 0!==t.speed){var V,T;r(d[5])(void 0===t.tension&&void 0===t.friction&&void 0===t.stiffness&&void 0===t.damping&&void 0===t.mass,'You can define one of bounciness/speed, tension/friction, or stiffness/damping/mass, but not more than one');var b=r(d[6]).fromBouncinessAndSpeed(null!=(V=t.bounciness)?V:8,null!=(T=t.speed)?T:12);m._stiffness=b.stiffness,m._damping=b.damping,m._mass=1}else{var M,D,P=r(d[6]).fromOrigamiTensionAndFriction(null!=(M=t.tension)?M:40,null!=(D=t.friction)?D:7);m._stiffness=P.stiffness,m._damping=P.damping,m._mass=1}return r(d[5])(m._stiffness>0,'Stiffness value must be greater than 0'),r(d[5])(m._damping>0,'Damping value must be greater than 0'),r(d[5])(m._mass>0,'Mass value must be greater than 0'),m}return r(d[7])(l,[{key:"__getNativeAnimationConfig",value:function(){var t;return{type:'spring',overshootClamping:this._overshootClamping,restDisplacementThreshold:this._restDisplacementThreshold,restSpeedThreshold:this._restSpeedThreshold,stiffness:this._stiffness,damping:this._damping,mass:this._mass,initialVelocity:null!=(t=this._initialVelocity)?t:this._lastVelocity,toValue:this._toValue,iterations:this.__iterations}}},{key:"start",value:function(t,s,e,n,o){var h=this;if(this.__active=!0,this._startPosition=t,this._lastPosition=this._startPosition,this._onUpdate=s,this.__onEnd=e,this._lastTime=Date.now(),this._frameTime=0,n instanceof l){var _=n.getInternalState();this._lastPosition=_.lastPosition,this._lastVelocity=_.lastVelocity,this._initialVelocity=this._lastVelocity,this._lastTime=_.lastTime}var u=function(){h._useNativeDriver?h.__startNativeAnimation(o):h.onUpdate()};this._delay?this._timeout=setTimeout(u,this._delay):u()}},{key:"getInternalState",value:function(){return{lastPosition:this._lastPosition,lastVelocity:this._lastVelocity,lastTime:this._lastTime}}},{key:"onUpdate",value:function(){var t=Date.now();t>this._lastTime+64&&(t=this._lastTime+64);var s=(t-this._lastTime)/1e3;this._frameTime+=s;var e=this._damping,n=this._mass,o=this._stiffness,l=-this._initialVelocity,h=e/(2*Math.sqrt(o*n)),_=Math.sqrt(o/n),u=_*Math.sqrt(1-h*h),f=this._toValue-this._startPosition,c=0,m=0,v=this._frameTime;if(h<1){var p=Math.exp(-h*_*v);c=this._toValue-p*((l+h*_*f)/u*Math.sin(u*v)+f*Math.cos(u*v)),m=h*_*p*(Math.sin(u*v)*(l+h*_*f)/u+f*Math.cos(u*v))-p*(Math.cos(u*v)*(l+h*_*f)-u*f*Math.sin(u*v))}else{var y=Math.exp(-_*v);c=this._toValue-y*(f+(l+_*f)*v),m=y*(l*(v*_-1)+v*f*(_*_))}if(this._lastTime=t,this._lastPosition=c,this._lastVelocity=m,this._onUpdate(c),this.__active){var V=!1;this._overshootClamping&&0!==this._stiffness&&(V=this._startPositionthis._toValue:c18&&A<=44?p(A):h(A),s(2*M-M*M,v,.01));return{stiffness:n(x),damping:t(B)}}}},232,[]); +__d(function(g,r,i,a,m,_e,d){'use strict';var t=1,n=(function(){function n(){r(d[0])(this,n)}return r(d[1])(n,[{key:"start",value:function(t,n,e,o,_){}},{key:"stop",value:function(){this.__nativeId&&r(d[2]).API.stopAnimation(this.__nativeId)}},{key:"__getNativeAnimationConfig",value:function(){throw new Error('This animation type cannot be offloaded to native')}},{key:"__debouncedOnEnd",value:function(t){var n=this.__onEnd;this.__onEnd=null,n&&n(t)}},{key:"__startNativeAnimation",value:function(n){var e=t+":startAnimation";t+=1,r(d[2]).API.setWaitingForIdentifier(e);try{n.__makeNative(),this.__nativeId=r(d[2]).generateNewAnimationId(),r(d[2]).API.startAnimatingNode(this.__nativeId,n.__getNativeTag(),this.__getNativeAnimationConfig(),this.__debouncedOnEnd.bind(this))}catch(t){throw t}finally{r(d[2]).API.unsetWaitingForIdentifier(e)}}}]),n})();m.exports=n},233,[7,8,214]); +__d(function(g,r,i,a,m,_e,d){'use strict';function t(){if("undefined"==typeof Reflect||!Reflect.construct)return!1;if(Reflect.construct.sham)return!1;if("function"==typeof Proxy)return!0;try{return Boolean.prototype.valueOf.call(Reflect.construct(Boolean,[],function(){})),!0}catch(t){return!1}}var e;function n(){if(!e){var t=r(d[2]);e=t.inOut(t.ease)}return e}var s=(function(e){r(d[3])(_,e);var s,o,u=(s=_,o=t(),function(){var t,e=r(d[0])(s);if(o){var n=r(d[0])(this).constructor;t=Reflect.construct(e,arguments,n)}else t=e.apply(this,arguments);return r(d[1])(this,t)});function _(t){var e,s,o,h,l,c;return r(d[4])(this,_),(c=u.call(this))._toValue=t.toValue,c._easing=null!=(e=t.easing)?e:n(),c._duration=null!=(s=t.duration)?s:500,c._delay=null!=(o=t.delay)?o:0,c.__iterations=null!=(h=t.iterations)?h:1,c._useNativeDriver=r(d[5]).shouldUseNativeDriver(t),c.__isInteraction=null!=(l=t.isInteraction)?l:!c._useNativeDriver,c}return r(d[6])(_,[{key:"__getNativeAnimationConfig",value:function(){for(var t=[],e=Math.round(this._duration/16.666666666666668),n=0;n=this._startTime+this._duration)return 0===this._duration?this._onUpdate(this._toValue):this._onUpdate(this._fromValue+this._easing(1)*(this._toValue-this._fromValue)),void this.__debouncedOnEnd({finished:!0});this._onUpdate(this._fromValue+this._easing((t-this._startTime)/this._duration)*(this._toValue-this._fromValue)),this.__active&&(this._animationFrame=requestAnimationFrame(this.onUpdate.bind(this)))}},{key:"stop",value:function(){r(d[7])(r(d[0])(_.prototype),"stop",this).call(this),this.__active=!1,clearTimeout(this._timeout),g.cancelAnimationFrame(this._animationFrame),this.__debouncedOnEnd({finished:!1})}}]),_})(r(d[8]));m.exports=s},234,[15,12,235,10,7,214,8,84,233]); +__d(function(g,r,i,a,m,e,d){'use strict';var n,u=(function(){function u(){r(d[0])(this,u)}return r(d[1])(u,null,[{key:"step0",value:function(n){return n>0?1:0}},{key:"step1",value:function(n){return n>=1?1:0}},{key:"linear",value:function(n){return n}},{key:"ease",value:function(t){return n||(n=u.bezier(.42,0,1,1)),n(t)}},{key:"quad",value:function(n){return n*n}},{key:"cubic",value:function(n){return n*n*n}},{key:"poly",value:function(n){return function(u){return Math.pow(u,n)}}},{key:"sin",value:function(n){return 1-Math.cos(n*Math.PI/2)}},{key:"circle",value:function(n){return 1-Math.sqrt(1-n*n)}},{key:"exp",value:function(n){return Math.pow(2,10*(n-1))}},{key:"elastic",value:function(){var n=(arguments.length>0&&void 0!==arguments[0]?arguments[0]:1)*Math.PI;return function(u){return 1-Math.pow(Math.cos(u*Math.PI/2),3)*Math.cos(u*n)}}},{key:"back",value:function(){var n=arguments.length>0&&void 0!==arguments[0]?arguments[0]:1.70158;return function(u){return u*u*((n+1)*u-n)}}},{key:"bounce",value:function(n){if(n<.36363636363636365)return 7.5625*n*n;if(n<.7272727272727273){var u=n-.5454545454545454;return 7.5625*u*u+.75}if(n<.9090909090909091){var t=n-.8181818181818182;return 7.5625*t*t+.9375}var o=n-.9545454545454546;return 7.5625*o*o+.984375}},{key:"bezier",value:function(n,u,t,o){return r(d[2])(n,u,t,o)}},{key:"in",value:function(n){return n}},{key:"out",value:function(n){return function(u){return 1-n(1-u)}}},{key:"inOut",value:function(n){return function(u){return u<.5?n(2*u)/2:1-n(2*(1-u))/2}}}]),u})();m.exports=u},235,[7,8,236]); +__d(function(g,r,_i,a,m,e,d){'use strict';var n=4,t=.001,u=1e-7,o=10,f=.1,i='function'==typeof Float32Array;function c(n,t){return 1-3*t+3*n}function v(n,t){return 3*t-6*n}function s(n){return 3*n}function w(n,t,u){return((c(t,u)*n+v(t,u))*n+s(t))*n}function l(n,t,u){return 3*c(t,u)*n*n+2*v(t,u)*n+s(t)}function y(n,t,f,i,c){var v,s,l=0,y=t,b=f;do{(v=w(s=y+(b-y)/2,i,c)-n)>0?b=s:y=s}while(Math.abs(v)>u&&++l=0&&n<=1&&o>=0&&o<=1))throw new Error('bezier x values must be in [0, 1] range');var v=i?new Float32Array(11):new Array(11);if(n!==u||o!==c)for(var s=0;s<11;++s)v[s]=w(s*f,n,o);function h(u){for(var i=0,c=1;10!==c&&v[c]<=u;++c)i+=f;var s=i+(u-v[--c])/(v[c+1]-v[c])*f,w=l(s,n,o);return w>=t?b(u,s,n,o):0===w?s:y(u,i,i+f,n,o)}return function(t){return n===u&&o===c?t:0===t?0:1===t?1:w(h(t),u,c)}}},236,[]); +__d(function(g,r,i,a,m,_e,d){'use strict';function t(){if("undefined"==typeof Reflect||!Reflect.construct)return!1;if(Reflect.construct.sham)return!1;if("function"==typeof Proxy)return!0;try{return Boolean.prototype.valueOf.call(Reflect.construct(Boolean,[],function(){})),!0}catch(t){return!1}}var e=(function(e){r(d[2])(c,e);var n,s,o=(n=c,s=t(),function(){var t,e=r(d[0])(n);if(s){var o=r(d[0])(this).constructor;t=Reflect.construct(e,arguments,o)}else t=e.apply(this,arguments);return r(d[1])(this,t)});function c(t){var e,n,s,u;return r(d[3])(this,c),(u=o.call(this))._deceleration=null!=(e=t.deceleration)?e:.998,u._velocity=t.velocity,u._useNativeDriver=r(d[4]).shouldUseNativeDriver(t),u.__isInteraction=null!=(n=t.isInteraction)?n:!u._useNativeDriver,u.__iterations=null!=(s=t.iterations)?s:1,u}return r(d[5])(c,[{key:"__getNativeAnimationConfig",value:function(){return{type:'decay',deceleration:this._deceleration,velocity:this._velocity,iterations:this.__iterations}}},{key:"start",value:function(t,e,n,s,o){this.__active=!0,this._lastValue=t,this._fromValue=t,this._onUpdate=e,this.__onEnd=n,this._startTime=Date.now(),this._useNativeDriver?this.__startNativeAnimation(o):this._animationFrame=requestAnimationFrame(this.onUpdate.bind(this))}},{key:"onUpdate",value:function(){var t=Date.now(),e=this._fromValue+this._velocity/(1-this._deceleration)*(1-Math.exp(-(1-this._deceleration)*(t-this._startTime)));this._onUpdate(e),Math.abs(this._lastValue-e)<.1?this.__debouncedOnEnd({finished:!0}):(this._lastValue=e,this.__active&&(this._animationFrame=requestAnimationFrame(this.onUpdate.bind(this))))}},{key:"stop",value:function(){r(d[6])(r(d[0])(c.prototype),"stop",this).call(this),this.__active=!1,g.cancelAnimationFrame(this._animationFrame),this.__debouncedOnEnd({finished:!1})}}]),c})(r(d[7]));m.exports=e},237,[15,12,10,7,214,8,84,233]); +__d(function(g,r,i,a,m,e,d){'use strict';function t(t,n,s){var v=[];r(d[1])(s[0]&&s[0].nativeEvent,'Native driven events only support animated values contained inside `nativeEvent`.'),(function t(n,s){if(n instanceof r(d[0]))n.__makeNative(),v.push({nativeEventPath:s,animatedValueTag:n.__getNativeTag()});else if('object'==typeof n)for(var o in n)t(n[o],s.concat(o))})(s[0].nativeEvent,[]);var o=r(d[2]).findNodeHandle(t);return null!=o&&v.forEach(function(t){r(d[3]).API.addAnimatedEventToView(o,n,t)}),{detach:function(){null!=o&&v.forEach(function(t){r(d[3]).API.removeAnimatedEventFromView(o,n,t.animatedValueTag)})}}}var n=(function(){function n(t,s){r(d[4])(this,n),this._listeners=[],this._argMapping=t,null==s&&(console.warn('Animated.event now requires a second argument for options'),s={useNativeDriver:!1}),s.listener&&this.__addListener(s.listener),this._callListeners=this._callListeners.bind(this),this._attachedEvent=null,this.__isNative=r(d[3]).shouldUseNativeDriver(s)}return r(d[5])(n,[{key:"__addListener",value:function(t){this._listeners.push(t)}},{key:"__removeListener",value:function(t){this._listeners=this._listeners.filter(function(n){return n!==t})}},{key:"__attach",value:function(n,s){r(d[1])(this.__isNative,'Only native driven events need to be attached.'),this._attachedEvent=t(n,s,this._argMapping)}},{key:"__detach",value:function(t,n){r(d[1])(this.__isNative,'Only native driven events need to be detached.'),this._attachedEvent&&this._attachedEvent.detach()}},{key:"__getHandler",value:function(){var t=this;if(this.__isNative)return this._callListeners;return function(){for(var n=arguments.length,s=new Array(n),v=0;v1){for(var l=[],s=0;s1?Math.ceil(e.length/n):e.length}return 0},t._keyExtractor=function(e,n){var o,l=v(t.props.numColumns),s=null!=(o=t.props.keyExtractor)?o:r(d[10]).keyExtractor;return l>1?Array.isArray(e)?e.map(function(e,t){return s(e,n*l+t)}).join(':'):void r(d[11])(Array.isArray(e),"FlatList: Encountered internal consistency error, expected each item to consist of an array with 1-%s columns; instead, received a single item.",l):s(e,n)},t._renderer=function(){var e=t.props,o=e.ListItemComponent,l=e.renderItem,s=e.columnWrapperStyle,u=v(t.props.numColumns),c=o?'ListItemComponent':'renderItem',f=function(e){return o?h.createElement(o,e):l?l(e):null};return(0,n.default)({},c,function(e){if(u>1){var t=e.item,n=e.index;return r(d[11])(Array.isArray(t),'Expected array of items with numColumns > 1'),h.createElement(r(d[12]),{style:r(d[13]).compose(y.row,s)},t.map(function(t,o){var l=f({item:t,index:n*u+o,separators:e.separators});return null!=l?h.createElement(h.Fragment,{key:o},l):null}))}return f(e)})},t._checkProps(t.props),t.props.viewabilityConfigCallbackPairs?t._virtualizedListPairs=t.props.viewabilityConfigCallbackPairs.map(function(e){return{viewabilityConfig:e.viewabilityConfig,onViewableItemsChanged:t._createOnViewableItemsChanged(e.onViewableItemsChanged)}}):t.props.onViewableItemsChanged&&t._virtualizedListPairs.push({viewabilityConfig:t.props.viewabilityConfig,onViewableItemsChanged:t._createOnViewableItemsChanged(t.props.onViewableItemsChanged)}),t}return(0,l.default)(k,[{key:"scrollToEnd",value:function(e){this._listRef&&this._listRef.scrollToEnd(e)}},{key:"scrollToIndex",value:function(e){this._listRef&&this._listRef.scrollToIndex(e)}},{key:"scrollToItem",value:function(e){this._listRef&&this._listRef.scrollToItem(e)}},{key:"scrollToOffset",value:function(e){this._listRef&&this._listRef.scrollToOffset(e)}},{key:"recordInteraction",value:function(){this._listRef&&this._listRef.recordInteraction()}},{key:"flashScrollIndicators",value:function(){this._listRef&&this._listRef.flashScrollIndicators()}},{key:"getScrollResponder",value:function(){if(this._listRef)return this._listRef.getScrollResponder()}},{key:"getNativeScrollRef",value:function(){if(this._listRef)return this._listRef.getScrollRef()}},{key:"getScrollableNode",value:function(){if(this._listRef)return this._listRef.getScrollableNode()}},{key:"setNativeProps",value:function(e){this._listRef&&this._listRef.setNativeProps(e)}},{key:"componentDidUpdate",value:function(e){r(d[11])(e.numColumns===this.props.numColumns,"Changing numColumns on the fly is not supported. Change the key prop on FlatList when changing the number of columns to force a fresh render of the component."),r(d[11])(e.onViewableItemsChanged===this.props.onViewableItemsChanged,'Changing onViewableItemsChanged on the fly is not supported'),r(d[11])(!r(d[14])(e.viewabilityConfig,this.props.viewabilityConfig),'Changing viewabilityConfig on the fly is not supported'),r(d[11])(e.viewabilityConfigCallbackPairs===this.props.viewabilityConfigCallbackPairs,'Changing viewabilityConfigCallbackPairs on the fly is not supported'),this._checkProps(this.props)}},{key:"_checkProps",value:function(e){var t=e.getItem,n=e.getItemCount,o=e.horizontal,l=e.columnWrapperStyle,s=e.onViewableItemsChanged,u=e.viewabilityConfigCallbackPairs,c=v(this.props.numColumns);r(d[11])(!t&&!n,'FlatList does not support custom data formats.'),c>1?r(d[11])(!o,'numColumns does not support horizontal.'):r(d[11])(!l,'columnWrapperStyle not supported for single column lists'),r(d[11])(!(s&&u),"FlatList does not support setting both onViewableItemsChanged and viewabilityConfigCallbackPairs.")}},{key:"_pushMultiColumnViewable",value:function(e,n){var o,l=v(this.props.numColumns),s=null!=(o=this.props.keyExtractor)?o:r(d[10]).keyExtractor;n.item.forEach(function(o,u){r(d[11])(null!=n.index,'Missing index!');var c=n.index*l+u;e.push((0,t.default)({},n,{item:o,key:s(o,c),index:c}))})}},{key:"_createOnViewableItemsChanged",value:function(e){var t=this;return function(n){var o=v(t.props.numColumns);if(e)if(o>1){var l=[],s=[];n.viewableItems.forEach(function(e){return t._pushMultiColumnViewable(s,e)}),n.changed.forEach(function(e){return t._pushMultiColumnViewable(l,e)}),e({viewableItems:s,changed:l})}else e(n)}}},{key:"render",value:function(){var n,o=this.props,l=(o.numColumns,o.columnWrapperStyle,o.removeClippedSubviews),s=(0,e.default)(o,f);return h.createElement(r(d[15]),(0,t.default)({},s,{getItem:this._getItem,getItemCount:this._getItemCount,keyExtractor:this._keyExtractor,ref:this._captureRef,viewabilityConfigCallbackPairs:this._virtualizedListPairs,removeClippedSubviews:(n=l,null==n||n)},this._renderer()))}}]),k})(h.PureComponent),y=r(d[13]).create({row:{flexDirection:'row'}});m.exports=C},246,[3,103,29,247,7,8,10,12,15,129,248,18,174,173,166,249]); +__d(function(g,r,i,a,m,e,d){m.exports=function(t,n,o){return n in t?Object.defineProperty(t,n,{value:o,enumerable:!0,configurable:!0,writable:!0}):t[n]=o,t},m.exports.__esModule=!0,m.exports.default=m.exports},247,[]); +__d(function(g,r,i,a,m,e,d){'use strict';Object.defineProperty(e,"__esModule",{value:!0}),e.computeWindowedRenderLimits=function(n,s,o,u,c,h,v){var b=s(n);if(0===b)return c;var M=v.offset,y=v.velocity,x=v.visibleLength,p=Math.max(0,M),w=p+x,O=(u-1)*x,k=y>1?'after':y<-1?'before':'none',_=Math.max(0,p-.5*O),j=Math.max(0,w+.5*O);if(h(b-1).offset<_)return{first:Math.max(0,b-1-o),last:b-1};var L=f([_,p,w,j],b,h),S=(0,t.default)(L,4),C=S[0],E=S[1],J=S[2],N=S[3];C=null==C?0:C,E=null==E?Math.max(0,C):E,N=null==N?b-1:N,J=null==J?Math.min(N,E+o-1):J;var R={first:E,last:J},B=l(c,R);for(;!(E<=C&&J>=N);){var F=B>=o,P=E<=c.first||E>c.last,T=E>C&&(!F||!P),W=J>=c.last||J=E&&E>=0&&J=C&&J<=N&&E<=R.first&&J>=R.last))throw new Error('Bad window calculation '+JSON.stringify({first:E,last:J,itemCount:b,overscanFirst:C,overscanLast:N,visible:R}));return{first:E,last:J}},e.elementsThatOverlapOffsets=f,e.keyExtractor=function(t,n){if('object'==typeof t&&null!=(null==t?void 0:t.key))return t.key;if('object'==typeof t&&null!=(null==t?void 0:t.id))return t.id;return String(n)},e.newRangeCount=l;var t=r(d[0])(r(d[1])),n=r(d[0])(r(d[2]));function f(t,f,l){for(var s=[],o=0,u=0;u=t[v]&&(s[v]=u,o++,v===t.length-1))return(0,n.default)(o===t.length,'bad offsets input, should be in increasing order: %s',JSON.stringify(t)),s;return s}function l(t,n){return n.last-n.first+1-Math.max(0,1+Math.min(n.last,t.last)-Math.max(n.first,t.first))}},248,[3,23,18]); +__d(function(g,r,_i,a,m,_e,d){var e=r(d[0])(r(d[1])),t=r(d[0])(r(d[2])),o=r(d[0])(r(d[3])),n=r(d[0])(r(d[4])),s=r(d[0])(r(d[5])),i=r(d[0])(r(d[6])),l=r(d[0])(r(d[7])),c=r(d[0])(r(d[8])),h=(function(e,t){if(!t&&e&&e.__esModule)return e;if(null===e||"object"!=typeof e&&"function"!=typeof e)return{default:e};var o=u(t);if(o&&o.has(e))return o.get(e);var n={},s=Object.defineProperty&&Object.getOwnPropertyDescriptor;for(var i in e)if("default"!==i&&Object.prototype.hasOwnProperty.call(e,i)){var l=s?Object.getOwnPropertyDescriptor(e,i):null;l&&(l.get||l.set)?Object.defineProperty(n,i,l):n[i]=e[i]}n.default=e,o&&o.set(e,n);return n})(r(d[9]));function u(e){if("function"!=typeof WeakMap)return null;var t=new WeakMap,o=new WeakMap;return(u=function(e){return e?o:t})(e)}function p(e,t){var o="undefined"!=typeof Symbol&&e[Symbol.iterator]||e["@@iterator"];if(o)return(o=o.call(e)).next.bind(o);if(Array.isArray(e)||(o=f(e))||t&&e&&"number"==typeof e.length){o&&(e=o);var n=0;return function(){return n>=e.length?{done:!0}:{done:!1,value:e[n++]}}}throw new TypeError("Invalid attempt to iterate non-iterable instance.\nIn order to be iterable, non-array objects must have a [Symbol.iterator]() method.")}function f(e,t){if(e){if("string"==typeof e)return _(e,t);var o=Object.prototype.toString.call(e).slice(8,-1);return"Object"===o&&e.constructor&&(o=e.constructor.name),"Map"===o||"Set"===o?Array.from(e):"Arguments"===o||/^(?:Ui|I)nt(?:8|16|32)(?:Clamped)?Array$/.test(o)?_(e,t):void 0}}function _(e,t){(null==t||t>e.length)&&(t=e.length);for(var o=0,n=new Array(t);o0&&t>0&&null!=i.props.initialScrollIndex&&i.props.initialScrollIndex>0&&!i._hasDoneInitialScroll&&(null==i.props.contentOffset&&i.scrollToIndex({animated:!1,index:i.props.initialScrollIndex}),i._hasDoneInitialScroll=!0),i.props.onContentSizeChange&&i.props.onContentSizeChange(e,t),i._scrollMetrics.contentLength=i._selectLength({height:t,width:e}),i._scheduleCellsToRenderUpdate(),i._maybeCallOnEndReached()},i._convertParentScrollMetrics=function(e){var t=e.offset-i._offsetFromParentVirtualizedList,o=e.visibleLength,n=t-i._scrollMetrics.offset;return{visibleLength:o,contentLength:i._scrollMetrics.contentLength,offset:t,dOffset:n}},i._onScroll=function(e){i._nestedChildLists.forEach(function(t){t.ref&&t.ref._onScroll(e)}),i.props.onScroll&&i.props.onScroll(e);var t=e.timeStamp,o=i._selectLength(e.nativeEvent.layoutMeasurement),n=i._selectLength(e.nativeEvent.contentSize),s=i._selectOffset(e.nativeEvent.contentOffset),l=s-i._scrollMetrics.offset;if(i._isNestedWithSameOrientation()){if(0===i._scrollMetrics.contentLength)return;var c=i._convertParentScrollMetrics({visibleLength:o,offset:s});o=c.visibleLength,n=c.contentLength,s=c.offset,l=c.dOffset}var h=i._scrollMetrics.timestamp?Math.max(1,t-i._scrollMetrics.timestamp):1,u=l/h;h>500&&i._scrollMetrics.dt>500&&n>5*o&&!i._hasWarned.perf&&(r(d[14])("VirtualizedList: You have a large list that is slow to update - make sure your renderItem function renders components that follow React performance best practices like PureComponent, shouldComponentUpdate, etc.",{dt:h,prevDt:i._scrollMetrics.dt,contentLength:n}),i._hasWarned.perf=!0),i._scrollMetrics={contentLength:n,dt:h,dOffset:l,offset:s,timestamp:t,velocity:u,visibleLength:o},i._updateViewableItems(i.props.data),i.props&&(i._maybeCallOnEndReached(),0!==u&&i._fillRateHelper.activate(),i._computeBlankness(),i._scheduleCellsToRenderUpdate())},i._onScrollBeginDrag=function(e){i._nestedChildLists.forEach(function(t){t.ref&&t.ref._onScrollBeginDrag(e)}),i._viewabilityTuples.forEach(function(e){e.viewabilityHelper.recordInteraction()}),i._hasInteracted=!0,i.props.onScrollBeginDrag&&i.props.onScrollBeginDrag(e)},i._onScrollEndDrag=function(e){i._nestedChildLists.forEach(function(t){t.ref&&t.ref._onScrollEndDrag(e)});var t=e.nativeEvent.velocity;t&&(i._scrollMetrics.velocity=i._selectOffset(t)),i._computeBlankness(),i.props.onScrollEndDrag&&i.props.onScrollEndDrag(e)},i._onMomentumScrollBegin=function(e){i._nestedChildLists.forEach(function(t){t.ref&&t.ref._onMomentumScrollBegin(e)}),i.props.onMomentumScrollBegin&&i.props.onMomentumScrollBegin(e)},i._onMomentumScrollEnd=function(e){i._nestedChildLists.forEach(function(t){t.ref&&t.ref._onMomentumScrollEnd(e)}),i._scrollMetrics.velocity=0,i._computeBlankness(),i.props.onMomentumScrollEnd&&i.props.onMomentumScrollEnd(e)},i._updateCellsToRender=function(){var e=i.props,t=e.data,o=e.getItemCount,n=M(e.onEndReachedThreshold),s=i._isVirtualizationDisabled();i._updateViewableItems(t),t&&i.setState(function(e){var l,c=i._scrollMetrics,h=c.contentLength,u=c.offset,f=c.visibleLength;if(s){var _=h-f-u0&&h>0&&(i.props.initialScrollIndex&&!i._scrollMetrics.offset||(l=(0,r(d[15]).computeWindowedRenderLimits)(i.props.data,i.props.getItemCount,I(i.props.maxToRenderPerBatch),R(i.props.windowSize),e,i._getFrameMetricsApprox,i._scrollMetrics)));if(l&&i._nestedChildLists.size>0)for(var y=l.first,v=l.last,C=y;C<=v;C++){var L=i._indicesToKeys.get(C),b=L&&i._cellKeysToChildListKeys.get(L);if(b){for(var S,M=!1,x=p(b);!(S=x()).done;){var w=S.value,k=i._nestedChildLists.get(w);if(k&&k.ref&&k.ref.hasMore()){M=!0;break}}if(M){l.last=C;break}}}return null!=l&&l.first===e.first&&l.last===e.last&&(l=null),l})},i._createViewToken=function(e,t){var o=i.props,n=o.data,s=(0,o.getItem)(n,e);return{index:e,item:s,key:i._keyExtractor(s,e),isViewable:t}},i._getFrameMetricsApprox=function(e){var t=i._getFrameMetrics(e);if(t&&t.index===e)return t;var o=i.props.getItemLayout;return r(d[11])(!o,'Should not have to estimate frames when a measurement metrics function is provided'),{length:i._averageCellLength,offset:i._averageCellLength*e}},i._getFrameMetrics=function(e){var t=i.props,o=t.data,n=t.getItem,s=t.getItemCount,l=t.getItemLayout;r(d[11])(s(o)>e,'Tried to get frame for out of range index '+e);var c=n(o,e),h=c&&i._frames[i._keyExtractor(c,e)];return h&&h.index===e||l&&(h=l(o,e)),h},r(d[11])(!e.onScroll||!e.onScroll.__isNative,"Components based on VirtualizedList must be wrapped with Animated.createAnimatedComponent to support native onScroll events with useNativeDriver"),r(d[11])(R(e.windowSize)>0,'VirtualizedList: The windowSize prop must be present and set to a value greater than 0.'),i._fillRateHelper=new(r(d[16]))(i._getFrameMetrics),i._updateCellsToRenderBatcher=new(r(d[17]))(i._updateCellsToRender,null!=(n=i.props.updateCellsBatchingPeriod)?n:50),i.props.viewabilityConfigCallbackPairs?i._viewabilityTuples=i.props.viewabilityConfigCallbackPairs.map(function(e){return{viewabilityHelper:new(r(d[18]))(e.viewabilityConfig),onViewableItemsChanged:e.onViewableItemsChanged}}):i.props.onViewableItemsChanged&&i._viewabilityTuples.push({viewabilityHelper:new(r(d[18]))(i.props.viewabilityConfig),onViewableItemsChanged:i.props.onViewableItemsChanged});var l={first:i.props.initialScrollIndex||0,last:Math.min(i.props.getItemCount(i.props.data),(i.props.initialScrollIndex||0)+S(i.props.initialNumToRender))-1};if(i._isNestedWithSameOrientation()){var f=i.context.getNestedChildState(i._getListKey());f&&(l=f,i.state=f,i._frames=f.frames)}return i.state=l,i}return(0,n.default)(u,[{key:"scrollToEnd",value:function(e){var t=!e||e.animated,o=this.props.getItemCount(this.props.data)-1,n=this._getFrameMetricsApprox(o),s=Math.max(0,n.offset+n.length+this._footerLength-this._scrollMetrics.visibleLength);null!=this._scrollRef&&(null!=this._scrollRef.scrollTo?this._scrollRef.scrollTo(b(this.props.horizontal)?{x:s,animated:t}:{y:s,animated:t}):console.warn("No scrollTo method provided. This may be because you have two nested VirtualizedLists with the same orientation, or because you are using a custom component that does not implement scrollTo."))}},{key:"scrollToIndex",value:function(e){var t=this.props,o=t.data,n=t.horizontal,s=t.getItemCount,i=t.getItemLayout,l=t.onScrollToIndexFailed,c=e.animated,h=e.index,u=e.viewOffset,p=e.viewPosition;if(r(d[11])(h>=0,"scrollToIndex out of range: requested index "+h+" but minimum is 0"),r(d[11])(s(o)>=1,"scrollToIndex out of range: item length "+s(o)+" but minimum is 1"),r(d[11])(hthis._highestMeasuredFrameIndex)return r(d[11])(!!l,"scrollToIndex should be used in conjunction with getItemLayout or onScrollToIndexFailed, otherwise there is no way to know the location of offscreen indices or handle failures."),void l({averageItemLength:this._averageCellLength,highestMeasuredFrameIndex:this._highestMeasuredFrameIndex,index:h});var f=this._getFrameMetricsApprox(h),_=Math.max(0,f.offset-(p||0)*(this._scrollMetrics.visibleLength-f.length))-(u||0);null!=this._scrollRef&&(null!=this._scrollRef.scrollTo?this._scrollRef.scrollTo(n?{x:_,animated:c}:{y:_,animated:c}):console.warn("No scrollTo method provided. This may be because you have two nested VirtualizedLists with the same orientation, or because you are using a custom component that does not implement scrollTo."))}},{key:"scrollToItem",value:function(e){for(var o=e.item,n=this.props,s=n.data,i=n.getItem,l=(0,n.getItemCount)(s),c=0;c0){C=!1,L='';var x=this._getSpacerKey(!p),w=this.props.initialScrollIndex?-1:S(this.props.initialNumToRender)-1,k=this.state,T=k.first,z=k.last;this._pushCells(y,I,v,0,w,_);var K=Math.max(w+1,T);if(!f&&T>w+1){var O=!1;if(v.size>0)for(var P=l?1:0,F=K-1;F>w;F--)if(v.has(F+P)){var V=this._getFrameMetricsApprox(w),D=this._getFrameMetricsApprox(F),N=D.offset-V.offset-(this.props.initialScrollIndex?0:V.length);y.push(h.createElement(r(d[10]),{key:"$sticky_lead",style:(0,e.default)({},x,N)})),this._pushCells(y,I,v,F,F,_);var A=this._getFrameMetricsApprox(T).offset-(D.offset+D.length);y.push(h.createElement(r(d[10]),{key:"$sticky_trail",style:(0,e.default)({},x,A)})),O=!0;break}if(!O){var B=this._getFrameMetricsApprox(w),H=this._getFrameMetricsApprox(T).offset-(B.offset+B.length);y.push(h.createElement(r(d[10]),{key:"$lead_spacer",style:(0,e.default)({},x,H)}))}}if(this._pushCells(y,I,v,K,z,_),!this._hasWarned.keys&&C&&(console.warn("VirtualizedList: missing keys for items, make sure to specify a key or id property on each item or provide a custom keyExtractor.",L),this._hasWarned.keys=!0),!f&&zu&&(this._sentEndForContentLength=0)}},{key:"_scheduleCellsToRenderUpdate",value:function(){var e=this.state,t=e.first,o=e.last,n=this._scrollMetrics,s=n.offset,i=n.visibleLength,l=n.velocity,c=this.props.getItemCount(this.props.data),h=!1,u=M(this.props.onEndReachedThreshold)*i/2;if(t>0){var p=s-this._getFrameMetricsApprox(t).offset;h=h||p<0||l<-2&&p2&&f0&&(this._scrollAnimatedValueAttachment=p.default.attachNativeEvent(this._scrollViewRef,'onScroll',[{nativeEvent:{contentOffset:{y:this._scrollAnimatedValue}}}]))}},{key:"_setStickyHeaderRef",value:function(e,o){o?this._stickyHeaderRefs.set(e,o):this._stickyHeaderRefs.delete(e)}},{key:"_onStickyHeaderLayout",value:function(e,o,t){var n=this.props.stickyHeaderIndices;if(n){var l=y.Children.toArray(this.props.children);if(t===this._getKeyForIndex(e,l)){var s=o.nativeEvent.layout.y;this._headerLayoutYs.set(t,s);var u=n[n.indexOf(e)-1];if(null!=u){var c=this._stickyHeaderRefs.get(this._getKeyForIndex(u,l));c&&c.setNextHeaderY&&c.setNextHeaderY(s)}}}}},{key:"render",value:function(){var t=this,n=!0===this.props.horizontal?P:F,l=(0,o.default)(n,2),s=l[0],u=l[1],c=[!0===this.props.horizontal&&Y.contentContainerHorizontal,this.props.contentContainerStyle],p=null==this.props.onContentSizeChange?null:{onLayout:this._handleContentOnLayout},f=this.props.stickyHeaderIndices,S=this.props.children;if(null!=f&&f.length>0){var b=y.Children.toArray(this.props.children);S=b.map(function(e,o){var n=e?f.indexOf(o):-1;if(n>-1){var l=e.key,s=f[n+1],u=t.props.StickyHeaderComponent||_.default;return y.createElement(u,{key:l,nativeID:'StickyHeader-'+l,ref:function(e){return t._setStickyHeaderRef(l,e)},nextHeaderLayoutY:t._headerLayoutYs.get(t._getKeyForIndex(s,b)),onLayout:function(e){return t._onStickyHeaderLayout(o,e,l)},scrollAnimatedValue:t._scrollAnimatedValue,inverted:t.props.invertStickyHeaders,hiddenOnScroll:t.props.stickyHeaderHiddenOnScroll,scrollViewHeight:t.state.layoutHeight},e)}return e})}S=y.createElement(D.default.Provider,{value:!0===this.props.horizontal?D.HORIZONTAL:D.VERTICAL},S);var R=Array.isArray(f)&&f.length>0,T=y.createElement(u,(0,e.default)({},p,{ref:this._setInnerViewRef,style:c,removeClippedSubviews:('android'!==h.default.OS||!R)&&this.props.removeClippedSubviews,collapsable:!1}),S),w=void 0!==this.props.alwaysBounceHorizontal?this.props.alwaysBounceHorizontal:this.props.horizontal,V=void 0!==this.props.alwaysBounceVertical?this.props.alwaysBounceVertical:!this.props.horizontal,k=!0===this.props.horizontal?Y.baseHorizontal:Y.baseVertical,E=(0,e.default)({},this.props,{alwaysBounceHorizontal:w,alwaysBounceVertical:V,style:v.default.compose(k,this.props.style),onContentSizeChange:null,onLayout:this._handleLayout,onMomentumScrollBegin:this._handleMomentumScrollBegin,onMomentumScrollEnd:this._handleMomentumScrollEnd,onResponderGrant:this._handleResponderGrant,onResponderReject:this._handleResponderReject,onResponderRelease:this._handleResponderRelease,onResponderTerminationRequest:this._handleResponderTerminationRequest,onScrollBeginDrag:this._handleScrollBeginDrag,onScrollEndDrag:this._handleScrollEndDrag,onScrollShouldSetResponder:this._handleScrollShouldSetResponder,onStartShouldSetResponder:this._handleStartShouldSetResponder,onStartShouldSetResponderCapture:this._handleStartShouldSetResponderCapture,onTouchEnd:this._handleTouchEnd,onTouchMove:this._handleTouchMove,onTouchStart:this._handleTouchStart,onTouchCancel:this._handleTouchCancel,onScroll:this._handleScroll,scrollEventThrottle:R?1:this.props.scrollEventThrottle,sendMomentumEvents:!(!this.props.onMomentumScrollBegin&&!this.props.onMomentumScrollEnd),snapToStart:!1!==this.props.snapToStart,snapToEnd:!1!==this.props.snapToEnd,pagingEnabled:h.default.select({ios:!0===this.props.pagingEnabled&&null==this.props.snapToInterval&&null==this.props.snapToOffsets,android:!0===this.props.pagingEnabled||null!=this.props.snapToInterval||null!=this.props.snapToOffsets})}),K=this.props.decelerationRate;null!=K&&(E.decelerationRate=(0,O.default)(K));var A=this.props.refreshControl;if(A){if('ios'===h.default.OS)return y.createElement(s,(0,e.default)({},E,{ref:this._setNativeRef}),A,T);if('android'===h.default.OS){var N=(0,I.default)((0,H.default)(E.style)),M=N.outer,W=N.inner;return y.cloneElement(A,{style:v.default.compose(k,M)},y.createElement(s,(0,e.default)({},E,{style:v.default.compose(k,W),ref:this._setNativeRef}),T))}}return y.createElement(s,(0,e.default)({},E,{ref:this._setNativeRef}),T)}}]),x})(y.Component);G.Context=D.default;var Y=v.default.create({baseVertical:{flexGrow:1,flexShrink:1,flexDirection:'column',overflow:'scroll'},baseHorizontal:{flexGrow:1,flexShrink:1,flexDirection:'row',overflow:'scroll'},contentContainerHorizontal:{flexDirection:'row'}});function U(o,t){return y.createElement(G,(0,e.default)({},o,{scrollViewRef:t}))}U.displayName='ScrollView';var Z=y.forwardRef(U);Z.Context=D.default,Z.displayName='ScrollView',m.exports=Z},250,[3,29,23,7,8,14,10,12,15,223,161,19,129,49,251,173,174,43,252,256,135,254,168,18,258,259,241,260,261,262,263,264,265]); +__d(function(g,r,i,a,m,e,d){Object.defineProperty(e,"__esModule",{value:!0}),e.default=void 0;var t=r(d[0])(r(d[1])),n=r(d[0])(r(d[2])),u=r(d[0])(r(d[3])),l=r(d[0])(r(d[4])),o=r(d[0])(r(d[5])),f=(function(t,n){if(!n&&t&&t.__esModule)return t;if(null===t||"object"!=typeof t&&"function"!=typeof t)return{default:t};var u=s(n);if(u&&u.has(t))return u.get(t);var l={},o=Object.defineProperty&&Object.getOwnPropertyDescriptor;for(var f in t)if("default"!==f&&Object.prototype.hasOwnProperty.call(t,f)){var c=o?Object.getOwnPropertyDescriptor(t,f):null;c&&(c.get||c.set)?Object.defineProperty(l,f,c):l[f]=t[f]}l.default=t,u&&u.set(t,l);return l})(r(d[6]));function s(t){if("function"!=typeof WeakMap)return null;var n=new WeakMap,u=new WeakMap;return(s=function(t){return t?u:n})(t)}var c=o.default.View,p=f.forwardRef(function(l,s){var p=l.inverted,h=l.scrollViewHeight,y=l.hiddenOnScroll,R=l.scrollAnimatedValue,O=l.nextHeaderLayoutY,L=f.useState(!1),b=(0,t.default)(L,2),w=b[0],S=b[1],j=f.useState(0),E=(0,t.default)(j,2),P=E[0],_=E[1],x=f.useState(0),M=(0,t.default)(x,2),k=M[0],C=M[1],D=f.useState(null),H=(0,t.default)(D,2),I=H[0],V=H[1],Y=f.useState(O),T=(0,t.default)(Y,2),W=T[0],A=T[1],N=f.useState(!1),z=(0,t.default)(N,2),F=z[0],q=z[1],B=f.useRef(),G=(0,n.default)({getForwardedRef:function(){return s},setLocalRef:function(t){var n,u;(B.current=t,t)&&(t.setNextHeaderY=function(t){A(t)},q(!(null==(n=t._internalInstanceHandle)||null==(u=n.stateNode)||!u.canonical)))}}),J=(0,f.useMemo)(function(){return!0===y?o.default.diffClamp(R.interpolate({extrapolateLeft:'clamp',inputRange:[P,P+1],outputRange:[0,1]}).interpolate({inputRange:[0,1],outputRange:[0,-1]}),-k,0):null},[R,k,P,y]),K=f.useState(function(){var t=R.interpolate({inputRange:[-1,0],outputRange:[0,0]});return null!=J?o.default.add(t,J):t}),Q=(0,t.default)(K,2),U=Q[0],X=Q[1],Z=(0,f.useRef)(!0),$=(0,f.useRef)(null);(0,f.useEffect)(function(){0!==I&&null!=I&&(Z.current=!1)},[I]);var ee=(0,f.useCallback)(function(t){var n=t.value,l='android'===u.default.OS?15:64;0!==n||Z.current?(null!=$.current&&clearTimeout($.current),$.current=setTimeout(function(){n!==I&&V(n)},l)):Z.current=!0},[I]);(0,f.useEffect)(function(){var t=[-1,0],n=[0,0];if(w)if(!0===p){if(null!=h){var u=P+k-h;if(u>0){t.push(u),n.push(0),t.push(u+1),n.push(1);var l=(W||0)-k-h;l>u&&(t.push(l,l+1),n.push(l-u,l-u))}}}else{t.push(P),n.push(0);var f=(W||0)-k;f>=P?(t.push(f,f+1),n.push(f-P,f-P)):(t.push(P+1),n.push(1))}var s,c=R.interpolate({inputRange:t,outputRange:n});return null!=J&&(c=o.default.add(c,J)),F&&(s=c.addListener(ee)),X(c),function(){s&&c.removeListener(s),null!=$.current&&clearTimeout($.current)}},[W,w,k,P,h,R,p,J,ee,F]);var te=f.Children.only(l.children),ne=F&&null!=I?{style:{transform:[{translateY:I}]}}:null;return f.createElement(c,{collapsable:!1,nativeID:l.nativeID,onLayout:function(t){_(t.nativeEvent.layout.y),C(t.nativeEvent.layout.height),S(!0),l.onLayout(t);var n=f.Children.only(l.children);n.props.onLayout&&n.props.onLayout(t)},ref:G,style:[te.props.style,v.header,{transform:[{translateY:U}]}],passthroughAnimatedPropExplicitValues:ne},f.cloneElement(te,{style:v.fill,onLayout:void 0}))}),v=l.default.create({header:{zIndex:10,position:'relative'},fill:{flex:1}}),h=p;e.default=h},251,[3,23,241,19,173,211,129]); +__d(function(g,r,i,a,m,e,d){var t=r(d[0])(r(d[1])),n=r(d[0])(r(d[2])),u=r(d[0])(r(d[3])),l=r(d[0])(r(d[4])),o=r(d[0])(r(d[5])),s=r(d[0])(r(d[6])),f=r(d[0])(r(d[7])),v=(function(){function v(){(0,t.default)(this,v),this._emitter=new u.default('ios'!==s.default.OS?null:f.default)}return(0,n.default)(v,[{key:"addListener",value:function(t,n,u){return this._emitter.addListener(t,n)}},{key:"removeListener",value:function(t,n){this._emitter.removeListener(t,n)}},{key:"removeAllListeners",value:function(t){this._emitter.removeAllListeners(t)}},{key:"dismiss",value:function(){(0,o.default)()}},{key:"scheduleLayoutAnimation",value:function(t){var n=t.duration,u=t.easing;null!=n&&0!==n&&l.default.configureNext({duration:n,update:{duration:n,type:null!=u&&l.default.Types[u]||'keyboard'}})}}]),v})();m.exports=new v},252,[3,7,8,95,253,254,19,255]); +__d(function(g,r,i,a,m,e,d){'use strict';var n=r(d[0])(r(d[1]));function t(t,u,o){var l,s;if(!n.default.isTesting){var c,p,y=!1,f=function(){y||(y=!0,clearTimeout(b),null==u||u())},b=setTimeout(f,(null!=(l=t.duration)?l:0)+17),I=null==(s=g)?void 0:s.nativeFabricUIManager;if(null!=I&&I.configureNextLayoutAnimation)null==(c=g)||null==(p=c.nativeFabricUIManager)||p.configureNextLayoutAnimation(t,f,null!=o?o:function(){});else null!=r(d[2])&&r(d[2]).configureNextLayoutAnimation&&r(d[2]).configureNextLayoutAnimation(t,null!=f?f:function(){},null!=o?o:function(){})}}function u(n,t,u){return{duration:n,create:{type:t,property:u},update:{type:t},delete:{type:t,property:u}}}var o={easeInEaseOut:u(300,'easeInEaseOut','opacity'),linear:u(500,'linear','opacity'),spring:{duration:700,create:{type:'linear',property:'opacity'},update:{type:'spring',springDamping:.4},delete:{type:'linear',property:'opacity'}}},l={configureNext:t,create:u,Types:Object.freeze({spring:'spring',linear:'linear',easeInEaseOut:'easeInEaseOut',easeIn:'easeIn',easeOut:'easeOut',keyboard:'keyboard'}),Properties:Object.freeze({opacity:'opacity',scaleX:'scaleX',scaleY:'scaleY',scaleXY:'scaleXY'}),checkConfig:function(){console.error('LayoutAnimation.checkConfig(...) has been disabled.')},Presets:o,easeInEaseOut:t.bind(null,o.easeInEaseOut),linear:t.bind(null,o.linear),spring:t.bind(null,o.spring)};m.exports=l},253,[3,19,43]); +__d(function(g,r,i,a,m,e,d){'use strict';m.exports=function(){r(d[0]).blurTextInput(r(d[0]).currentlyFocusedInput())}},254,[135]); +__d(function(g,r,i,a,m,e,d){function t(n){if("function"!=typeof WeakMap)return null;var o=new WeakMap,u=new WeakMap;return(t=function(t){return t?u:o})(n)}Object.defineProperty(e,"__esModule",{value:!0}),e.default=void 0;var n=(function(n,o){if(!o&&n&&n.__esModule)return n;if(null===n||"object"!=typeof n&&"function"!=typeof n)return{default:n};var u=t(o);if(u&&u.has(n))return u.get(n);var f={},c=Object.defineProperty&&Object.getOwnPropertyDescriptor;for(var l in n)if("default"!==l&&Object.prototype.hasOwnProperty.call(n,l)){var p=c?Object.getOwnPropertyDescriptor(n,l):null;p&&(p.get||p.set)?Object.defineProperty(f,l,p):f[l]=n[l]}f.default=n,u&&u.set(n,f);return f})(r(d[0])).get('KeyboardObserver');e.default=n},255,[21]); +__d(function(g,r,i,a,m,e,d){var t=r(d[0])(r(d[1])),o={setGlobalOptions:function(o){if(void 0!==o.debug&&r(d[2])(t.default,'Trying to debug FrameRateLogger without the native module!'),t.default){var l={debug:!!o.debug,reportStackTraces:!!o.reportStackTraces};t.default.setGlobalOptions(l)}},setContext:function(o){t.default&&t.default.setContext(o)},beginScroll:function(){t.default&&t.default.beginScroll()},endScroll:function(){t.default&&t.default.endScroll()}};m.exports=o},256,[3,257,18]); +__d(function(g,r,i,a,m,e,d){function t(n){if("function"!=typeof WeakMap)return null;var o=new WeakMap,u=new WeakMap;return(t=function(t){return t?u:o})(n)}Object.defineProperty(e,"__esModule",{value:!0}),e.default=void 0;var n=(function(n,o){if(!o&&n&&n.__esModule)return n;if(null===n||"object"!=typeof n&&"function"!=typeof n)return{default:n};var u=t(o);if(u&&u.has(n))return u.get(n);var f={},c=Object.defineProperty&&Object.getOwnPropertyDescriptor;for(var l in n)if("default"!==l&&Object.prototype.hasOwnProperty.call(n,l)){var p=c?Object.getOwnPropertyDescriptor(n,l):null;p&&(p.get||p.set)?Object.defineProperty(f,l,p):f[l]=n[l]}f.default=n,u&&u.set(n,f);return f})(r(d[0])).get('FrameRateLogger');e.default=n},257,[21]); +__d(function(g,r,i,a,m,e,d){var t=r(d[0])(r(d[1]));m.exports=function(n){return'normal'===n?t.default.select({ios:.998,android:.985}):'fast'===n?t.default.select({ios:.99,android:.9}):n}},258,[3,19]); +__d(function(g,r,i,a,m,e,d){Object.defineProperty(e,"__esModule",{value:!0}),e.default=function(s){var c=null,t=null;if(null!=s){c={},t={};for(var n=0,l=Object.keys(s);nMath.random(),this._resetData()}return r(d[0])(_,[{key:"activate",value:function(){this._enabled&&null==this._samplesStartTime&&(this._samplesStartTime=g.performance.now())}},{key:"deactivateAndFlush",value:function(){if(this._enabled){var t=this._samplesStartTime;if(null!=t)if(this._info.sample_count0&&(c=Math.min(h,Math.max(0,y.offset-_)));for(var p=0,b=n.last,v=this._getFrameMetrics(b);b>=n.first&&(!v||!v.inLayout);)v=this._getFrameMetrics(b),b--;if(v&&b0?(this._anyBlankStartTime=f,this._info.any_blank_speed_sum+=u,this._info.any_blank_count++,this._info.pixels_blank+=M,T>.5&&(this._mostlyBlankStartTime=f,this._info.mostly_blank_count++)):(u<.01||Math.abs(l)<1)&&this.deactivateAndFlush(),T}},{key:"enabled",value:function(){return this._enabled}},{key:"_resetData",value:function(){this._anyBlankStartTime=null,this._info=new t,this._mostlyBlankStartTime=null,this._samplesStartTime=null}}],[{key:"addListener",value:function(t){return null===l&&console.warn('Call `FillRateHelper.setSampleRate` before `addListener`.'),n.push(t),{remove:function(){n=n.filter(function(n){return t!==n})}}}},{key:"setSampleRate",value:function(t){l=t}},{key:"setMinSampleCount",value:function(t){s=t}}]),_})();m.exports=_},270,[8,7,29]); +__d(function(g,r,i,a,m,e,d){'use strict';var t=(function(){function t(n,l){r(d[0])(this,t),this._delay=l,this._callback=n}return r(d[1])(t,[{key:"dispose",value:function(){var t=arguments.length>0&&void 0!==arguments[0]?arguments[0]:{abort:!1};this._taskHandle&&(this._taskHandle.cancel(),t.abort||this._callback(),this._taskHandle=null)}},{key:"schedule",value:function(){var t=this;if(!this._taskHandle){var n=setTimeout(function(){t._taskHandle=r(d[2]).runAfterInteractions(function(){t._taskHandle=null,t._callback()})},this._delay);this._taskHandle={cancel:function(){return clearTimeout(n)}}}}}]),t})();m.exports=t},271,[7,8,220]); +__d(function(g,r,_i,a,m,e,d){'use strict';function t(t,i){var o="undefined"!=typeof Symbol&&t[Symbol.iterator]||t["@@iterator"];if(o)return(o=o.call(t)).next.bind(o);if(Array.isArray(t)||(o=n(t))||i&&t&&"number"==typeof t.length){o&&(t=o);var s=0;return function(){return s>=t.length?{done:!0}:{done:!1,value:t[s++]}}}throw new TypeError("Invalid attempt to iterate non-iterable instance.\nIn order to be iterable, non-array objects must have a [Symbol.iterator]() method.")}function n(t,n){if(t){if("string"==typeof t)return i(t,n);var o=Object.prototype.toString.call(t).slice(8,-1);return"Object"===o&&t.constructor&&(o=t.constructor.name),"Map"===o||"Set"===o?Array.from(t):"Arguments"===o||/^(?:Ui|I)nt(?:8|16|32)(?:Clamped)?Array$/.test(o)?i(t,n):void 0}}function i(t,n){(null==n||n>t.length)&&(n=t.length);for(var i=0,o=new Array(n);i0&&void 0!==arguments[0]?arguments[0]:{viewAreaCoveragePercentThreshold:0};r(d[0])(this,n),this._hasInteracted=!1,this._timers=new Set,this._viewableIndices=[],this._viewableItems=new Map,this._config=t}return r(d[1])(n,[{key:"dispose",value:function(){this._timers.forEach(clearTimeout)}},{key:"computeViewableItems",value:function(t,n,i,o,l){var u=this._config,c=u.itemVisiblePercentThreshold,h=u.viewAreaCoveragePercentThreshold,f=null!=h,v=f?h:c;r(d[2])(null!=v&&null!=c!=(null!=h),'Must set exactly one of itemVisiblePercentThreshold or viewAreaCoveragePercentThreshold');var b=[];if(0===t)return b;var y=-1,w=l||{first:0,last:t-1},_=w.first,p=w.last;if(p>=t)return console.warn('Invalid render range computing viewability '+JSON.stringify({renderRange:l,itemCount:t})),[];for(var I=_;I<=p;I++){var A=o(I);if(A){var S=A.offset-n,T=S+A.length;if(S0)y=I,s(f,v,S,T,i,A.length)&&b.push(I);else if(y>=0)break}}return b}},{key:"onUpdate",value:function(t,n,i,o,s,l,u){var c=this;if((!this._config.waitForInteraction||this._hasInteracted)&&0!==t&&o(0)){var h=[];if(t&&(h=this.computeViewableItems(t,n,i,o,u)),this._viewableIndices.length!==h.length||!this._viewableIndices.every(function(t,n){return t===h[n]}))if(this._viewableIndices=h,this._config.minimumViewTime){var f=setTimeout(function(){c._timers.delete(f),c._onUpdateSync(h,l,s)},this._config.minimumViewTime);this._timers.add(f)}else this._onUpdateSync(h,l,s)}}},{key:"resetViewableIndices",value:function(){this._viewableIndices=[]}},{key:"recordInteraction",value:function(){this._hasInteracted=!0}},{key:"_onUpdateSync",value:function(n,i,o){var s=this;n=n.filter(function(t){return s._viewableIndices.includes(t)});for(var l,u=this._viewableItems,c=new Map(n.map(function(t){var n=o(t,!0);return[n.key,n]})),h=[],f=t(c);!(l=f()).done;){var v=l.value,b=r(d[3])(v,2),y=b[0],w=b[1];u.has(y)||h.push(w)}for(var _,p=t(u);!(_=p()).done;){var I=_.value,A=r(d[3])(I,2),S=A[0],T=A[1];c.has(S)||h.push(r(d[4])({},T,{isViewable:!1}))}h.length>0&&(this._viewableItems=c,i({viewableItems:Array.from(c.values()),changed:h,viewabilityConfig:this._config}))}}]),n})();function s(t,n,i,o,s,c){if(u(i,o,s))return!0;var h=l(i,o,s);return 100*(t?h/s:h/c)>=n}function l(t,n,i){var o=Math.min(n,i)-Math.max(t,0);return Math.max(0,o)}function u(t,n,i){return t>=0&&n<=i&&n>t}m.exports=o},272,[7,8,18,23,29]); +__d(function(g,r,i,a,m,e,d){Object.defineProperty(e,"__esModule",{value:!0}),e.VirtualizedListCellContextProvider=function(l){var u=l.cellKey,s=l.children,c=(0,n.useContext)(o);return n.createElement(o.Provider,{value:null==c?null:(0,t.default)({},c,{cellKey:u})},s)},e.VirtualizedListContext=void 0,e.VirtualizedListContextProvider=function(t){var l=t.children,u=t.value,s=(0,n.useMemo)(function(){return{cellKey:null,getScrollMetrics:u.getScrollMetrics,horizontal:u.horizontal,getOutermostParentListRef:u.getOutermostParentListRef,getNestedChildState:u.getNestedChildState,registerAsNestedChild:u.registerAsNestedChild,unregisterAsNestedChild:u.unregisterAsNestedChild,debugInfo:{cellKey:u.debugInfo.cellKey,horizontal:u.debugInfo.horizontal,listKey:u.debugInfo.listKey,parent:u.debugInfo.parent}}},[u.getScrollMetrics,u.horizontal,u.getOutermostParentListRef,u.getNestedChildState,u.registerAsNestedChild,u.unregisterAsNestedChild,u.debugInfo.cellKey,u.debugInfo.horizontal,u.debugInfo.listKey,u.debugInfo.parent]);return n.createElement(o.Provider,{value:s},l)},e.VirtualizedListContextResetter=function(t){var l=t.children;return n.createElement(o.Provider,{value:null},l)};var t=r(d[0])(r(d[1])),n=(function(t,n){if(!n&&t&&t.__esModule)return t;if(null===t||"object"!=typeof t&&"function"!=typeof t)return{default:t};var o=l(n);if(o&&o.has(t))return o.get(t);var u={},s=Object.defineProperty&&Object.getOwnPropertyDescriptor;for(var c in t)if("default"!==c&&Object.prototype.hasOwnProperty.call(t,c)){var f=s?Object.getOwnPropertyDescriptor(t,c):null;f&&(f.get||f.set)?Object.defineProperty(u,c,f):u[c]=t[c]}u.default=t,o&&o.set(t,u);return u})(r(d[2]));function l(t){if("function"!=typeof WeakMap)return null;var n=new WeakMap,o=new WeakMap;return(l=function(t){return t?o:n})(t)}var o=n.createContext(null);e.VirtualizedListContext=o},273,[3,29,129]); +__d(function(g,r,i,a,m,e,d){!(function(n,o){if(!o&&n&&n.__esModule)return n;if(null===n||"object"!=typeof n&&"function"!=typeof n)return{default:n};var f=t(o);if(f&&f.has(n))return f.get(n);var u={},p=Object.defineProperty&&Object.getOwnPropertyDescriptor;for(var c in n)if("default"!==c&&Object.prototype.hasOwnProperty.call(n,c)){var l=p?Object.getOwnPropertyDescriptor(n,c):null;l&&(l.get||l.set)?Object.defineProperty(u,c,l):u[c]=n[c]}u.default=n,f&&f.set(n,u)})(r(d[0]));function t(n){if("function"!=typeof WeakMap)return null;var o=new WeakMap,f=new WeakMap;return(t=function(t){return t?f:o})(n)}m.exports=r(d[1])(r(d[2]),{collapsable:!1})},274,[129,239,275]); +__d(function(g,r,i,a,m,e,d){var t=r(d[0])(r(d[1])),n=r(d[0])(r(d[2])),o=r(d[0])(r(d[3])),u=r(d[0])(r(d[4])),l=(function(t,n){if(!n&&t&&t.__esModule)return t;if(null===t||"object"!=typeof t&&"function"!=typeof t)return{default:t};var o=b(n);if(o&&o.has(t))return o.get(t);var u={},l=Object.defineProperty&&Object.getOwnPropertyDescriptor;for(var c in t)if("default"!==c&&Object.prototype.hasOwnProperty.call(t,c)){var f=l?Object.getOwnPropertyDescriptor(t,c):null;f&&(f.get||f.set)?Object.defineProperty(u,c,f):u[c]=t[c]}u.default=t,o&&o.set(t,u);return u})(r(d[5])),c=r(d[0])(r(d[6])),f=r(d[0])(r(d[7])),s=r(d[0])(r(d[8])),h=r(d[0])(r(d[9])),p=r(d[0])(r(d[10])),y=r(d[0])(r(d[11])),w=r(d[0])(r(d[12])),v=r(d[0])(r(d[13]));function b(t){if("function"!=typeof WeakMap)return null;var n=new WeakMap,o=new WeakMap;return(b=function(t){return t?o:n})(t)}var S=1;function I(t,n){var o=S++;return n&&n(o),w.default.prefetchImage(t,o)}var E=function(n,o){var c,s,w,b,S=(0,y.default)(n.source),I=(0,y.default)(n.defaultSource),E=(0,y.default)(n.loadingIndicatorSource);S&&(''===S.uri&&console.warn('source.uri should not be an empty string'));if(n.src&&console.warn('The component requires a `source` property rather than `src`.'),n.children)throw new Error('The component cannot contain children. If you want to render content on top of the image, consider using the component or absolute positioning.');if(n.defaultSource&&n.loadingIndicatorSource)throw new Error('The component cannot have defaultSource and loadingIndicatorSource at the same time. Please use either defaultSource or loadingIndicatorSource.');if(!S||S.uri||Array.isArray(S)||(S=null),null!=(null==(c=S)?void 0:c.uri)){var P=S,_=P.width,j=P.height;w=(0,p.default)([{width:_,height:j},O.base,n.style]),b=[{uri:S.uri}]}else w=(0,p.default)([O.base,n.style]),b=S;var z=n.onLoadStart,C=n.onLoad,W=n.onLoadEnd,M=n.onError,T=(0,t.default)({},n,{style:w,shouldNotifyLoadEvents:!!(z||C||W||M),src:b,headers:null==(s=S)?void 0:s.headers,defaultSrc:I?I.uri:null,loadingIndicatorSrc:E?E.uri:null,ref:o});return l.createElement(h.default.Consumer,null,function(n){var o=null!==n?(0,t.default)({},T,{internal_analyticTag:n}):T;return l.createElement(f.default.Consumer,null,function(t){return t?l.createElement(v.default,o):l.createElement(u.default,o)})})};E=l.forwardRef(E),null!=s.default.unstable_createImageComponent&&(E=s.default.unstable_createImageComponent(E)),E.displayName='Image',E.getSize=function(t,n,o){return w.default.getSize(t).then(function(t){n(t.width,t.height)}).catch(o||function(){console.warn('Failed to get size for image: '+t)})},E.getSizeWithHeaders=function(t,n,o,u){return w.default.getSizeWithHeaders(t,n).then(function(t){o(t.width,t.height)}).catch(u||function(){console.warn('Failed to get size for image: '+t)})},E.prefetch=I,E.prefetchWithMetadata=function(t,n,o,u){I(t,u)},E.abortPrefetch=function(t){w.default.abortRequest(t)},E.queryCache=function(t){return n.default.async(function(o){for(;;)switch(o.prev=o.next){case 0:return o.next=2,n.default.awrap(w.default.queryCache(t));case 2:return o.abrupt("return",o.sent);case 3:case"end":return o.stop()}},null,null,null,Promise)},E.resolveAssetSource=y.default,E.propTypes=o.default;var O=c.default.create({base:{overflow:'hidden'}});m.exports=E},275,[3,29,75,276,280,129,173,176,281,283,168,155,284,282]); +__d(function(g,r,i,a,m,e,d){'use strict';var n=r(d[0])({},r(d[1]),{style:r(d[2])(r(d[3])),source:r(d[4]).oneOfType([r(d[4]).shape({uri:r(d[4]).string,headers:r(d[4]).objectOf(r(d[4]).string)}),r(d[4]).number,r(d[4]).arrayOf(r(d[4]).shape({uri:r(d[4]).string,width:r(d[4]).number,height:r(d[4]).number,headers:r(d[4]).objectOf(r(d[4]).string)}))]),blurRadius:r(d[4]).number,defaultSource:r(d[4]).number,loadingIndicatorSource:r(d[4]).oneOfType([r(d[4]).shape({uri:r(d[4]).string}),r(d[4]).number]),progressiveRenderingEnabled:r(d[4]).bool,fadeDuration:r(d[4]).number,internal_analyticTag:r(d[4]).string,onLoadStart:r(d[4]).func,onError:r(d[4]).func,onLoad:r(d[4]).func,onLoadEnd:r(d[4]).func,testID:r(d[4]).string,resizeMethod:r(d[4]).oneOf(['auto','resize','scale']),resizeMode:r(d[4]).oneOf(['cover','contain','stretch','repeat','center'])});m.exports=n},276,[29,277,186,279,191]); +__d(function(g,r,i,a,m,e,d){'use strict';var o=r(d[0])(r(d[1]));m.exports={accessible:r(d[2]).bool,accessibilityLabel:r(d[2]).node,accessibilityHint:r(d[2]).string,accessibilityActions:r(d[2]).arrayOf(r(d[2]).string),accessibilityIgnoresInvertColors:r(d[2]).bool,accessibilityRole:r(d[2]).oneOf(r(d[3]).DeprecatedAccessibilityRoles),accessibilityState:r(d[2]).object,accessibilityValue:r(d[2]).object,accessibilityLiveRegion:r(d[2]).oneOf(['none','polite','assertive']),importantForAccessibility:r(d[2]).oneOf(['auto','yes','no','no-hide-descendants']),accessibilityViewIsModal:r(d[2]).bool,accessibilityElementsHidden:r(d[2]).bool,onAccessibilityAction:r(d[2]).func,onAccessibilityTap:r(d[2]).func,onMagicTap:r(d[2]).func,testID:r(d[2]).string,nativeID:r(d[2]).string,onResponderGrant:r(d[2]).func,onResponderMove:r(d[2]).func,onResponderReject:r(d[2]).func,onResponderRelease:r(d[2]).func,onResponderTerminate:r(d[2]).func,onResponderTerminationRequest:r(d[2]).func,onStartShouldSetResponder:r(d[2]).func,onStartShouldSetResponderCapture:r(d[2]).func,onMoveShouldSetResponder:r(d[2]).func,onMoveShouldSetResponderCapture:r(d[2]).func,hitSlop:r(d[4]),onLayout:r(d[2]).func,pointerEvents:r(d[2]).oneOf(['box-none','none','box-only','auto']),style:o,removeClippedSubviews:r(d[2]).bool,renderToHardwareTextureAndroid:r(d[2]).bool,shouldRasterizeIOS:r(d[2]).bool,collapsable:r(d[2]).bool,needsOffscreenAlphaCompositing:r(d[2]).bool}},277,[186,189,191,278,198]); +__d(function(g,r,i,a,m,e,d){'use strict';m.exports={DeprecatedAccessibilityRoles:['none','button','togglebutton','link','search','image','keyboardkey','text','adjustable','imagebutton','header','summary','alert','checkbox','combobox','menu','menubar','menuitem','progressbar','radio','radiogroup','scrollbar','spinbutton','switch','tab','tablist','timer','list','toolbar']}},278,[]); +__d(function(g,r,i,a,m,e,d){'use strict';var o=r(d[0])({},r(d[1]),r(d[2]),r(d[3]),{resizeMode:r(d[4]).oneOf(['center','contain','cover','repeat','stretch']),backfaceVisibility:r(d[4]).oneOf(['visible','hidden']),backgroundColor:r(d[5]),borderColor:r(d[5]),borderWidth:r(d[4]).number,borderRadius:r(d[4]).number,overflow:r(d[4]).oneOf(['visible','hidden']),tintColor:r(d[5]),opacity:r(d[4]).number,overlayColor:r(d[4]).string,borderTopLeftRadius:r(d[4]).number,borderTopRightRadius:r(d[4]).number,borderBottomLeftRadius:r(d[4]).number,borderBottomRightRadius:r(d[4]).number});m.exports=o},279,[29,190,194,196,191,195]); +__d(function(g,r,i,a,m,e,d){function t(o){if("function"!=typeof WeakMap)return null;var n=new WeakMap,s=new WeakMap;return(t=function(t){return t?s:n})(o)}Object.defineProperty(e,"__esModule",{value:!0}),e.default=void 0;var o=(function(o,n){if(!n&&o&&o.__esModule)return o;if(null===o||"object"!=typeof o&&"function"!=typeof o)return{default:o};var s=t(n);if(s&&s.has(o))return s.get(o);var u={},l=Object.defineProperty&&Object.getOwnPropertyDescriptor;for(var f in o)if("default"!==f&&Object.prototype.hasOwnProperty.call(o,f)){var c=l?Object.getOwnPropertyDescriptor(o,f):null;c&&(c.get||c.set)?Object.defineProperty(u,f,c):u[f]=o[f]}u.default=o,s&&s.set(o,u);return u})(r(d[0])).get('RCTImageView',function(){return{uiViewClassName:'RCTImageView',bubblingEventTypes:{},directEventTypes:{topLoadStart:{registrationName:'onLoadStart'},topProgress:{registrationName:'onProgress'},topError:{registrationName:'onError'},topPartialLoad:{registrationName:'onPartialLoad'},topLoad:{registrationName:'onLoad'},topLoadEnd:{registrationName:'onLoadEnd'}},validAttributes:{blurRadius:!0,capInsets:{diff:r(d[1])},defaultSource:{process:r(d[2])},defaultSrc:!0,fadeDuration:!0,headers:!0,internal_analyticTag:!0,loadingIndicatorSrc:!0,onError:!0,onLoad:!0,onLoadEnd:!0,onLoadStart:!0,onPartialLoad:!0,onProgress:!0,overlayColor:{process:r(d[3])},progressiveRenderingEnabled:!0,resizeMethod:!0,resizeMode:!0,shouldNotifyLoadEvents:!0,source:!0,src:!0,tintColor:{process:r(d[3])}}}});e.default=o},280,[150,145,155,141]); +__d(function(g,r,i,a,m,e,d){Object.defineProperty(e,"__esModule",{value:!0}),e.default=void 0;(function(n,o){if(!o&&n&&n.__esModule)return n;if(null===n||"object"!=typeof n&&"function"!=typeof n)return{default:n};var u=t(o);if(u&&u.has(n))return u.get(n);var f={},l=Object.defineProperty&&Object.getOwnPropertyDescriptor;for(var c in n)if("default"!==c&&Object.prototype.hasOwnProperty.call(n,c)){var p=l?Object.getOwnPropertyDescriptor(n,c):null;p&&(p.get||p.set)?Object.defineProperty(f,c,p):f[c]=n[c]}f.default=n,u&&u.set(n,f)})(r(d[0])),r(d[1])(r(d[2])),r(d[1])(r(d[3]));function t(n){if("function"!=typeof WeakMap)return null;var o=new WeakMap,u=new WeakMap;return(t=function(t){return t?u:o})(n)}e.default={unstable_createImageComponent:null}},281,[129,3,280,282]); +__d(function(g,r,i,a,m,e,d){'use strict';function t(o){if("function"!=typeof WeakMap)return null;var n=new WeakMap,s=new WeakMap;return(t=function(t){return t?s:n})(o)}var o=(function(o,n){if(!n&&o&&o.__esModule)return o;if(null===o||"object"!=typeof o&&"function"!=typeof o)return{default:o};var s=t(n);if(s&&s.has(o))return s.get(o);var u={},l=Object.defineProperty&&Object.getOwnPropertyDescriptor;for(var c in o)if("default"!==c&&Object.prototype.hasOwnProperty.call(o,c)){var p=l?Object.getOwnPropertyDescriptor(o,c):null;p&&(p.get||p.set)?Object.defineProperty(u,c,p):u[c]=o[c]}u.default=o,s&&s.set(o,u);return u})(r(d[0])).get('RCTTextInlineImage',function(){return{uiViewClassName:'RCTImageView',bubblingEventTypes:{},directEventTypes:{topLoadStart:{registrationName:'onLoadStart'},topProgress:{registrationName:'onProgress'},topError:{registrationName:'onError'},topPartialLoad:{registrationName:'onPartialLoad'},topLoad:{registrationName:'onLoad'},topLoadEnd:{registrationName:'onLoadEnd'}},validAttributes:{blurRadius:!0,capInsets:{diff:r(d[1])},defaultSource:{process:r(d[2])},defaultSrc:!0,fadeDuration:!0,headers:!0,internal_analyticTag:!0,loadingIndicatorSrc:!0,onError:!0,onLoad:!0,onLoadEnd:!0,onLoadStart:!0,onPartialLoad:!0,onProgress:!0,overlayColor:{process:r(d[3])},progressiveRenderingEnabled:!0,resizeMethod:!0,resizeMode:!0,shouldNotifyLoadEvents:!0,source:!0,src:!0,tintColor:{process:r(d[3])}}}});m.exports=o},282,[150,145,155,141]); +__d(function(g,r,i,a,m,e,d){function t(n){if("function"!=typeof WeakMap)return null;var o=new WeakMap,u=new WeakMap;return(t=function(t){return t?u:o})(n)}Object.defineProperty(e,"__esModule",{value:!0}),e.default=void 0;var n=(function(n,o){if(!o&&n&&n.__esModule)return n;if(null===n||"object"!=typeof n&&"function"!=typeof n)return{default:n};var u=t(o);if(u&&u.has(n))return u.get(n);var f={},l=Object.defineProperty&&Object.getOwnPropertyDescriptor;for(var c in n)if("default"!==c&&Object.prototype.hasOwnProperty.call(n,c)){var p=l?Object.getOwnPropertyDescriptor(n,c):null;p&&(p.get||p.set)?Object.defineProperty(f,c,p):f[c]=n[c]}f.default=n,u&&u.set(n,f);return f})(r(d[0])).createContext(null);e.default=n},283,[129]); +__d(function(g,r,i,a,m,e,d){function t(n){if("function"!=typeof WeakMap)return null;var o=new WeakMap,f=new WeakMap;return(t=function(t){return t?f:o})(n)}Object.defineProperty(e,"__esModule",{value:!0}),e.default=void 0;var n=(function(n,o){if(!o&&n&&n.__esModule)return n;if(null===n||"object"!=typeof n&&"function"!=typeof n)return{default:n};var f=t(o);if(f&&f.has(n))return f.get(n);var u={},c=Object.defineProperty&&Object.getOwnPropertyDescriptor;for(var l in n)if("default"!==l&&Object.prototype.hasOwnProperty.call(n,l)){var p=c?Object.getOwnPropertyDescriptor(n,l):null;p&&(p.get||p.set)?Object.defineProperty(u,l,p):u[l]=n[l]}u.default=n,f&&f.set(n,u);return u})(r(d[0])).getEnforcing('ImageLoader');e.default=n},284,[21]); +__d(function(g,r,i,a,m,e,d){var t=r(d[0])(r(d[1])),n=(function(t,n){if(!n&&t&&t.__esModule)return t;if(null===t||"object"!=typeof t&&"function"!=typeof t)return{default:t};var f=o(n);if(f&&f.has(t))return f.get(t);var u={},l=Object.defineProperty&&Object.getOwnPropertyDescriptor;for(var c in t)if("default"!==c&&Object.prototype.hasOwnProperty.call(t,c)){var p=l?Object.getOwnPropertyDescriptor(t,c):null;p&&(p.get||p.set)?Object.defineProperty(u,c,p):u[c]=t[c]}u.default=t,f&&f.set(t,u);return u})(r(d[2]));function o(t){if("function"!=typeof WeakMap)return null;var n=new WeakMap,f=new WeakMap;return(o=function(t){return t?f:n})(t)}var f=n.forwardRef(function(o,f){return n.createElement(r(d[3]),(0,t.default)({scrollEventThrottle:1e-4},o,{ref:f}))});m.exports=r(d[4])(f,{collapsable:!1})},285,[3,29,129,250,239]); +__d(function(g,r,i,a,m,e,d){var t=r(d[0])(r(d[1])),n=(function(t,n){if(!n&&t&&t.__esModule)return t;if(null===t||"object"!=typeof t&&"function"!=typeof t)return{default:t};var f=o(n);if(f&&f.has(t))return f.get(t);var u={},c=Object.defineProperty&&Object.getOwnPropertyDescriptor;for(var l in t)if("default"!==l&&Object.prototype.hasOwnProperty.call(t,l)){var p=c?Object.getOwnPropertyDescriptor(t,l):null;p&&(p.get||p.set)?Object.defineProperty(u,l,p):u[l]=t[l]}u.default=t,f&&f.set(t,u);return u})(r(d[2])),f=r(d[0])(r(d[3]));function o(t){if("function"!=typeof WeakMap)return null;var n=new WeakMap,f=new WeakMap;return(o=function(t){return t?f:n})(t)}var u=n.forwardRef(function(o,u){return n.createElement(f.default,(0,t.default)({scrollEventThrottle:1e-4},o,{ref:u}))});m.exports=r(d[4])(u)},286,[3,29,129,287,239]); +__d(function(g,r,i,a,m,_e,d){'use strict';Object.defineProperty(_e,"__esModule",{value:!0}),_e.default=void 0;var e=r(d[0])(r(d[1])),t=r(d[0])(r(d[2])),n=r(d[0])(r(d[3])),o=r(d[0])(r(d[4])),f=r(d[0])(r(d[5])),u=r(d[0])(r(d[6])),c=r(d[0])(r(d[7])),l=r(d[0])(r(d[8])),s=(function(e,t){if(!t&&e&&e.__esModule)return e;if(null===e||"object"!=typeof e&&"function"!=typeof e)return{default:e};var n=y(t);if(n&&n.has(e))return n.get(e);var o={},f=Object.defineProperty&&Object.getOwnPropertyDescriptor;for(var u in e)if("default"!==u&&Object.prototype.hasOwnProperty.call(e,u)){var c=f?Object.getOwnPropertyDescriptor(e,u):null;c&&(c.get||c.set)?Object.defineProperty(o,u,c):o[u]=e[u]}o.default=e,n&&n.set(e,o);return o})(r(d[9])),p=r(d[0])(r(d[10])),v=["stickySectionHeadersEnabled"];function y(e){if("function"!=typeof WeakMap)return null;var t=new WeakMap,n=new WeakMap;return(y=function(e){return e?n:t})(e)}function h(){if("undefined"==typeof Reflect||!Reflect.construct)return!1;if(Reflect.construct.sham)return!1;if("function"==typeof Proxy)return!0;try{return Boolean.prototype.valueOf.call(Reflect.construct(Boolean,[],function(){})),!0}catch(e){return!1}}var R=(function(y){(0,f.default)(w,y);var R,_,L=(R=w,_=h(),function(){var e,t=(0,c.default)(R);if(_){var n=(0,c.default)(this).constructor;e=Reflect.construct(t,arguments,n)}else e=t.apply(this,arguments);return(0,u.default)(this,e)});function w(){var e;(0,n.default)(this,w);for(var t=arguments.length,o=new Array(t),f=0;f=e.length?{done:!0}:{done:!1,value:e[i++]}}}throw new TypeError("Invalid attempt to iterate non-iterable instance.\nIn order to be iterable, non-array objects must have a [Symbol.iterator]() method.")}function I(e,t){if(e){if("string"==typeof e)return _(e,t);var n=Object.prototype.toString.call(e).slice(8,-1);return"Object"===n&&e.constructor&&(n=e.constructor.name),"Map"===n||"Set"===n?Array.from(e):"Arguments"===n||/^(?:Ui|I)nt(?:8|16|32)(?:Clamped)?Array$/.test(n)?_(e,t):void 0}}function _(e,t){(null==t||t>e.length)&&(t=e.length);for(var n=0,i=new Array(t);n0&&this.props.stickySectionHeadersEnabled)o+=this._listRef._getFrameMetricsApprox(t-e.itemIndex).length;var l=(0,n.default)({},e,{viewOffset:o,index:t});this._listRef.scrollToIndex(l)}}},{key:"getListRef",value:function(){return this._listRef}},{key:"render",value:function(){for(var e,i=this,o=this.props,l=(o.ItemSeparatorComponent,o.SectionSeparatorComponent,o.renderItem,o.renderSectionFooter,o.renderSectionHeader,o.sections,o.stickySectionHeadersEnabled,(0,t.default)(o,v)),u=this.props.ListHeaderComponent?1:0,c=this.props.stickySectionHeadersEnabled?[]:void 0,s=0,p=S(this.props.sections);!(e=p()).done;){var y=e.value;null!=c&&c.push(s+u),s+=2,s+=this.props.getItemCount(y.data)}var I=this._renderItem(s);return h.createElement(f.VirtualizedList,(0,n.default)({},l,{keyExtractor:this._keyExtractor,stickyHeaderIndices:c,renderItem:I,data:this.props.sections,getItem:function(e,t){return i._getItem(i.props,e,t)},getItemCount:function(){return s},onViewableItemsChanged:this.props.onViewableItemsChanged?this._onViewableItemsChanged:void 0,ref:this._captureRef}))}},{key:"_getItem",value:function(e,t,n){if(!t)return null;for(var i=n-1,o=0;o=o(p)+1)t-=o(p)+1;else return-1===t?{section:s,key:f+':header',index:null,header:!0,trailingSection:u[c+1]}:t===o(p)?{section:s,key:f+':footer',index:null,header:!1,trailingSection:u[c+1]}:{section:s,key:f+':'+(s.keyExtractor||l||r(d[13]).keyExtractor)(i(p,t),t),index:t,leadingItem:i(p,t-1),leadingSection:u[c-1],trailingItem:i(p,t+1),trailingSection:u[c+1]}}}},{key:"_getSeparatorComponent",value:function(e,t,n){if(!(t=t||this._subExtractor(e)))return null;var i=t.section.ItemSeparatorComponent||this.props.ItemSeparatorComponent,o=this.props.SectionSeparatorComponent,l=e===n-1,u=t.index===this.props.getItemCount(t.section.data)-1;return o&&u?o:!i||u||l?null:i}}]),x})(h.PureComponent);function k(t){var i=t.LeadingSeparatorComponent,o=t.SeparatorComponent,l=t.cellKey,u=t.prevCellKey,c=t.setSelfHighlightCallback,s=t.updateHighlightFor,p=t.setSelfUpdatePropsCallback,v=t.updatePropsFor,y=t.item,S=t.index,I=t.section,_=t.inverted,b=h.useState(!1),x=(0,e.default)(b,2),k=x[0],C=x[1],E=h.useState(!1),w=(0,e.default)(E,2),H=w[0],P=w[1],F=h.useState({leadingItem:t.leadingItem,leadingSection:t.leadingSection,section:t.section,trailingItem:t.item,trailingSection:t.trailingSection}),R=(0,e.default)(F,2),M=R[0],O=R[1],V=h.useState({leadingItem:t.item,leadingSection:t.leadingSection,section:t.section,trailingItem:t.trailingItem,trailingSection:t.trailingSection}),j=(0,e.default)(V,2),A=j[0],U=j[1];h.useEffect(function(){return c(l,P),p(l,U),function(){p(l,null),c(l,null)}},[l,c,U,p]);var L={highlight:function(){C(!0),P(!0),null!=u&&s(u,!0)},unhighlight:function(){C(!1),P(!1),null!=u&&s(u,!1)},updateProps:function(e,t){'leading'===e?null!=i?O((0,n.default)({},M,t)):null!=u&&v(u,(0,n.default)({},M,t)):'trailing'===e&&null!=o&&U((0,n.default)({},A,t))}},B=t.renderItem({item:y,index:S,section:I,separators:L}),K=null!=i&&h.createElement(i,(0,n.default)({highlighted:k},M)),T=null!=o&&h.createElement(o,(0,n.default)({highlighted:H},A));return K||T?h.createElement(f.View,null,!1===_?K:T,B,!1===_?T:K):B}m.exports=x},288,[3,23,103,29,7,8,14,10,12,15,18,1,129,248]); +__d(function(g,r,i,a,m,e,d){!(function(n,o){if(!o&&n&&n.__esModule)return n;if(null===n||"object"!=typeof n&&"function"!=typeof n)return{default:n};var f=t(o);if(f&&f.has(n))return f.get(n);var u={},p=Object.defineProperty&&Object.getOwnPropertyDescriptor;for(var c in n)if("default"!==c&&Object.prototype.hasOwnProperty.call(n,c)){var l=p?Object.getOwnPropertyDescriptor(n,c):null;l&&(l.get||l.set)?Object.defineProperty(u,c,l):u[c]=n[c]}u.default=n,f&&f.set(n,u)})(r(d[0]));function t(n){if("function"!=typeof WeakMap)return null;var o=new WeakMap,f=new WeakMap;return(t=function(t){return t?f:o})(n)}m.exports=r(d[1])(r(d[2]),{collapsable:!1})},289,[129,239,184]); +__d(function(g,r,i,a,m,e,d){!(function(n,o){if(!o&&n&&n.__esModule)return n;if(null===n||"object"!=typeof n&&"function"!=typeof n)return{default:n};var f=t(o);if(f&&f.has(n))return f.get(n);var u={},p=Object.defineProperty&&Object.getOwnPropertyDescriptor;for(var c in n)if("default"!==c&&Object.prototype.hasOwnProperty.call(n,c)){var l=p?Object.getOwnPropertyDescriptor(n,c):null;l&&(l.get||l.set)?Object.defineProperty(u,c,l):u[c]=n[c]}u.default=n,f&&f.set(n,u)})(r(d[0]));function t(n){if("function"!=typeof WeakMap)return null;var o=new WeakMap,f=new WeakMap;return(t=function(t){return t?f:o})(n)}m.exports=r(d[1])(r(d[2]),{collapsable:!0})},290,[129,239,174]); +__d(function(g,r,i,a,m,e,d){'use strict';var n={};m.exports=function(o,t){n[o]||(console.warn(t),n[o]=!0)}},291,[]); +__d(function(g,r,i,a,m,_e,d){'use strict';var e=r(d[0])(r(d[1])),t=r(d[0])(r(d[2])),n=r(d[0])(r(d[3])),o=r(d[0])(r(d[4])),u=r(d[0])(r(d[5])),c=(function(e,t){if(!t&&e&&e.__esModule)return e;if(null===e||"object"!=typeof e&&"function"!=typeof e)return{default:e};var n=p(t);if(n&&n.has(e))return n.get(e);var o={},u=Object.defineProperty&&Object.getOwnPropertyDescriptor;for(var c in e)if("default"!==c&&Object.prototype.hasOwnProperty.call(e,c)){var f=u?Object.getOwnPropertyDescriptor(e,c):null;f&&(f.get||f.set)?Object.defineProperty(o,c,f):o[c]=e[c]}o.default=e,n&&n.set(e,o);return o})(r(d[6])),f=r(d[0])(r(d[7])),l=r(d[0])(r(d[8])),s=r(d[0])(r(d[9]));function p(e){if("function"!=typeof WeakMap)return null;var t=new WeakMap,n=new WeakMap;return(p=function(e){return e?n:t})(e)}function y(){if("undefined"==typeof Reflect||!Reflect.construct)return!1;if(Reflect.construct.sham)return!1;if("function"==typeof Proxy)return!0;try{return Boolean.prototype.valueOf.call(Reflect.construct(Boolean,[],function(){})),!0}catch(e){return!1}}var h=(function(f){(0,n.default)(b,f);var p,h,O=(p=b,h=y(),function(){var e,t=(0,u.default)(p);if(h){var n=(0,u.default)(this).constructor;e=Reflect.construct(t,arguments,n)}else e=t.apply(this,arguments);return(0,o.default)(this,e)});function b(){return(0,e.default)(this,b),O.apply(this,arguments)}return(0,t.default)(b,[{key:"render",value:function(){return c.createElement(s.default,{style:[v.dummyDatePickerIOS,this.props.style]},c.createElement(l.default,{style:v.datePickerText},"DatePickerIOS is not supported on this platform!"))}}]),b})(c.Component),v=f.default.create({dummyDatePickerIOS:{height:100,width:300,backgroundColor:'#ffbcbc',borderWidth:1,borderColor:'red',alignItems:'center',justifyContent:'center',margin:10},datePickerText:{color:'#333333',margin:20}});m.exports=h},292,[3,7,8,10,12,15,129,173,184,174]); +__d(function(g,r,i,a,m,_e,d){var e=r(d[0])(r(d[1])),t=r(d[0])(r(d[2])),n=r(d[0])(r(d[3])),o=r(d[0])(r(d[4])),u=r(d[0])(r(d[5])),s=r(d[0])(r(d[6])),l=r(d[0])(r(d[7])),f=r(d[0])(r(d[8])),c=k(r(d[9])),p=r(d[0])(r(d[10])),w=r(d[0])(r(d[11])),h=r(d[0])(r(d[12])),v=r(d[0])(r(d[13])),y=r(d[0])(r(d[14])),D=k(r(d[15])),b=["onDrawerStateChanged","renderNavigationView","onDrawerOpen","onDrawerClose"];function C(e){if("function"!=typeof WeakMap)return null;var t=new WeakMap,n=new WeakMap;return(C=function(e){return e?n:t})(e)}function k(e,t){if(!t&&e&&e.__esModule)return e;if(null===e||"object"!=typeof e&&"function"!=typeof e)return{default:e};var n=C(t);if(n&&n.has(e))return n.get(e);var o={},u=Object.defineProperty&&Object.getOwnPropertyDescriptor;for(var s in e)if("default"!==s&&Object.prototype.hasOwnProperty.call(e,s)){var l=u?Object.getOwnPropertyDescriptor(e,s):null;l&&(l.get||l.set)?Object.defineProperty(o,s,l):o[s]=e[s]}return o.default=e,n&&n.set(e,o),o}function _(){if("undefined"==typeof Reflect||!Reflect.construct)return!1;if(Reflect.construct.sham)return!1;if("function"==typeof Proxy)return!0;try{return Boolean.prototype.valueOf.call(Reflect.construct(Boolean,[],function(){})),!0}catch(e){return!1}}var S=['Idle','Dragging','Settling'],R=(function(w){(0,u.default)(O,w);var C,k,R=(C=O,k=_(),function(){var e,t=(0,l.default)(C);if(k){var n=(0,l.default)(this).constructor;e=Reflect.construct(t,arguments,n)}else e=t.apply(this,arguments);return(0,s.default)(this,e)});function O(){var e;(0,n.default)(this,O);for(var t=arguments.length,o=new Array(t),u=0;u=21&&null!=this.props.statusBarBackgroundColor,l=c.createElement(h.default,{style:[B.drawerSubview,{width:this.props.drawerWidth,backgroundColor:this.props.drawerBackgroundColor}],collapsable:!1},o(),s&&c.createElement(h.default,{style:B.drawerStatusBar})),w=c.createElement(h.default,{style:B.mainSubview,collapsable:!1},s&&c.createElement(p.default,{translucent:!0,backgroundColor:this.props.statusBarBackgroundColor}),s&&c.createElement(h.default,{style:[B.statusBar,{backgroundColor:this.props.statusBarBackgroundColor}]}),this.props.children);return c.createElement(D.default,(0,e.default)({},u,{ref:this._nativeRef,drawerWidth:this.props.drawerWidth,drawerPosition:this.props.drawerPosition,drawerLockMode:this.props.drawerLockMode,style:[B.base,this.props.style],onDrawerSlide:this._onDrawerSlide,onDrawerOpen:this._onDrawerOpen,onDrawerClose:this._onDrawerClose,onDrawerStateChanged:this._onDrawerStateChanged}),w,l)}},{key:"openDrawer",value:function(){D.Commands.openDrawer((0,y.default)(this._nativeRef.current))}},{key:"closeDrawer",value:function(){D.Commands.closeDrawer((0,y.default)(this._nativeRef.current))}},{key:"blur",value:function(){(0,y.default)(this._nativeRef.current).blur()}},{key:"focus",value:function(){(0,y.default)(this._nativeRef.current).focus()}},{key:"measure",value:function(e){(0,y.default)(this._nativeRef.current).measure(e)}},{key:"measureInWindow",value:function(e){(0,y.default)(this._nativeRef.current).measureInWindow(e)}},{key:"measureLayout",value:function(e,t,n){(0,y.default)(this._nativeRef.current).measureLayout(e,t,n)}},{key:"setNativeProps",value:function(e){(0,y.default)(this._nativeRef.current).setNativeProps(e)}}],[{key:"positions",get:function(){return console.warn('Setting DrawerLayoutAndroid drawerPosition using `DrawerLayoutAndroid.positions` is deprecated. Instead pass the string value "left" or "right"'),{Left:'left',Right:'right'}}}]),O})(c.Component);R.defaultProps={drawerBackgroundColor:'white'};var B=w.default.create({base:{flex:1,elevation:16},mainSubview:{position:'absolute',top:0,left:0,right:0,bottom:0},drawerSubview:{position:'absolute',top:0,bottom:0},statusBar:{height:p.default.currentHeight},drawerStatusBar:{position:'absolute',top:0,left:0,right:0,height:p.default.currentHeight,backgroundColor:'rgba(0, 0, 0, 0.251)'}});m.exports=R},293,[3,29,103,7,8,10,12,15,19,129,294,173,174,254,297,298]); +__d(function(g,r,i,a,m,_e,d){var t,e=r(d[0])(r(d[1])),n=r(d[0])(r(d[2])),l=r(d[0])(r(d[3])),o=r(d[0])(r(d[4])),u=r(d[0])(r(d[5])),c=r(d[0])(r(d[6])),s=(function(t,e){if(!e&&t&&t.__esModule)return t;if(null===t||"object"!=typeof t&&"function"!=typeof t)return{default:t};var n=S(e);if(n&&n.has(t))return n.get(t);var l={},o=Object.defineProperty&&Object.getOwnPropertyDescriptor;for(var u in t)if("default"!==u&&Object.prototype.hasOwnProperty.call(t,u)){var c=o?Object.getOwnPropertyDescriptor(t,u):null;c&&(c.get||c.set)?Object.defineProperty(l,u,c):l[u]=t[u]}l.default=t,n&&n.set(t,l);return l})(r(d[7])),f=r(d[0])(r(d[8])),p=r(d[0])(r(d[9])),y=r(d[0])(r(d[10])),v=r(d[0])(r(d[11])),k=r(d[0])(r(d[12]));function S(t){if("function"!=typeof WeakMap)return null;var e=new WeakMap,n=new WeakMap;return(S=function(t){return t?n:e})(t)}function b(){if("undefined"==typeof Reflect||!Reflect.construct)return!1;if(Reflect.construct.sham)return!1;if("function"==typeof Proxy)return!0;try{return Boolean.prototype.valueOf.call(Reflect.construct(Boolean,[],function(){})),!0}catch(t){return!1}}function _(t){var e,n,l=null!=(e=t.animated)&&e,o=null!=(n=t.showHideTransition)?n:'fade';return{backgroundColor:null!=t.backgroundColor?{value:t.backgroundColor,animated:l}:null,barStyle:null!=t.barStyle?{value:t.barStyle,animated:l}:null,translucent:t.translucent,hidden:null!=t.hidden?{value:t.hidden,animated:l,transition:o}:null,networkActivityIndicatorVisible:t.networkActivityIndicatorVisible}}var h=(function(t){(0,l.default)(h,t);var c,s,S=(c=h,s=b(),function(){var t,e=(0,u.default)(c);if(s){var n=(0,u.default)(this).constructor;t=Reflect.construct(e,arguments,n)}else t=e.apply(this,arguments);return(0,o.default)(this,t)});function h(){var t;(0,e.default)(this,h);for(var n=arguments.length,l=new Array(n),o=0;o is only supported on iOS.'),0===c.Children.count(this.props.children)?null:c.createElement(s.default,{style:[this.props.style,v.container],nativeID:this.props.nativeID,backgroundColor:this.props.backgroundColor},this.props.children)}}]),b})(c.Component),v=l.default.create({container:{position:'absolute'}});m.exports=h},300,[3,7,8,10,12,15,129,19,173,301]); +__d(function(g,r,i,a,m,e,d){Object.defineProperty(e,"__esModule",{value:!0}),e.default=void 0;var t=(0,r(d[0])(r(d[1])).default)('InputAccessory',{interfaceOnly:!0,paperComponentName:'RCTInputAccessoryView',excludedPlatforms:['android']});e.default=t},301,[3,179]); +__d(function(g,r,i,a,m,_e,d){Object.defineProperty(_e,"__esModule",{value:!0}),_e.default=void 0;var e=r(d[0])(r(d[1])),t=r(d[0])(r(d[2])),n=r(d[0])(r(d[3])),o=r(d[0])(r(d[4])),u=r(d[0])(r(d[5])),f=r(d[0])(r(d[6])),l=r(d[0])(r(d[7])),s=r(d[0])(r(d[8])),c=r(d[0])(r(d[9])),y=r(d[0])(r(d[10])),h=(function(e,t){if(!t&&e&&e.__esModule)return e;if(null===e||"object"!=typeof e&&"function"!=typeof e)return{default:e};var n=_(t);if(n&&n.has(e))return n.get(e);var o={},u=Object.defineProperty&&Object.getOwnPropertyDescriptor;for(var f in e)if("default"!==f&&Object.prototype.hasOwnProperty.call(e,f)){var l=u?Object.getOwnPropertyDescriptor(e,f):null;l&&(l.get||l.set)?Object.defineProperty(o,f,l):o[f]=e[f]}o.default=e,n&&n.set(e,o);return o})(r(d[11])),p=r(d[0])(r(d[12])),v=r(d[0])(r(d[13])),b=["behavior","children","contentContainerStyle","enabled","keyboardVerticalOffset","style"];function _(e){if("function"!=typeof WeakMap)return null;var t=new WeakMap,n=new WeakMap;return(_=function(e){return e?n:t})(e)}function k(){if("undefined"==typeof Reflect||!Reflect.construct)return!1;if(Reflect.construct.sham)return!1;if("function"==typeof Proxy)return!0;try{return Boolean.prototype.valueOf.call(Reflect.construct(Boolean,[],function(){})),!0}catch(e){return!1}}var O=(function(_){(0,u.default)(E,_);var O,w,L=(O=E,w=k(),function(){var e,t=(0,l.default)(O);if(w){var n=(0,l.default)(this).constructor;e=Reflect.construct(t,arguments,n)}else e=t.apply(this,arguments);return(0,f.default)(this,e)});function E(e){var t;return(0,n.default)(this,E),(t=L.call(this,e))._frame=null,t._keyboardEvent=null,t._subscriptions=[],t._initialFrameHeight=0,t._onKeyboardChange=function(e){t._keyboardEvent=e,t._updateBottomIfNecesarry()},t._onLayout=function(e){var n=null==t._frame;t._frame=e.nativeEvent.layout,t._initialFrameHeight||(t._initialFrameHeight=t._frame.height),n&&t._updateBottomIfNecesarry()},t._updateBottomIfNecesarry=function(){if(null!=t._keyboardEvent){var e=t._keyboardEvent,n=e.duration,o=e.easing,u=e.endCoordinates,f=t._relativeKeyboardHeight(u);t.state.bottom!==f&&(n&&o&&c.default.configureNext({duration:n>10?n:10,update:{duration:n>10?n:10,type:c.default.Types[o]||'keyboard'}}),t.setState({bottom:f}))}else t.setState({bottom:0})},t.state={bottom:0},t.viewRef=h.createRef(),t}return(0,o.default)(E,[{key:"_relativeKeyboardHeight",value:function(e){var t,n=this._frame;if(!n||!e)return 0;var o=e.screenY-(null!=(t=this.props.keyboardVerticalOffset)?t:0);return Math.max(n.y+n.height-o,0)}},{key:"componentDidMount",value:function(){'ios'===y.default.OS?this._subscriptions=[s.default.addListener('keyboardWillChangeFrame',this._onKeyboardChange)]:this._subscriptions=[s.default.addListener('keyboardDidHide',this._onKeyboardChange),s.default.addListener('keyboardDidShow',this._onKeyboardChange)]}},{key:"componentWillUnmount",value:function(){this._subscriptions.forEach(function(e){e.remove()})}},{key:"render",value:function(){var n=this.props,o=n.behavior,u=n.children,f=n.contentContainerStyle,l=n.enabled,s=void 0===l||l,c=(n.keyboardVerticalOffset,n.style),y=(0,t.default)(n,b),_=!0===s?this.state.bottom:0;switch(o){case'height':var k;return null!=this._frame&&this.state.bottom>0&&(k={height:this._initialFrameHeight-_,flex:0}),h.createElement(v.default,(0,e.default)({ref:this.viewRef,style:p.default.compose(c,k),onLayout:this._onLayout},y),u);case'position':return h.createElement(v.default,(0,e.default)({ref:this.viewRef,style:c,onLayout:this._onLayout},y),h.createElement(v.default,{style:p.default.compose(f,{bottom:_})},u));case'padding':return h.createElement(v.default,(0,e.default)({ref:this.viewRef,style:p.default.compose(c,{paddingBottom:_}),onLayout:this._onLayout},y),u);default:return h.createElement(v.default,(0,e.default)({ref:this.viewRef,onLayout:this._onLayout,style:c},y),u)}}}]),E})(h.Component);_e.default=O},302,[3,29,103,7,8,10,12,15,252,253,19,129,173,174]); +__d(function(g,r,i,a,m,e,d){'use strict';m.exports=r(d[0])},303,[304]); +__d(function(g,r,i,a,m,_e,d){'use strict';var e=r(d[0])(r(d[1])),t=r(d[0])(r(d[2])),n=r(d[0])(r(d[3])),u=r(d[0])(r(d[4])),o=r(d[0])(r(d[5])),f=(function(e,t){if(!t&&e&&e.__esModule)return e;if(null===e||"object"!=typeof e&&"function"!=typeof e)return{default:e};var n=l(t);if(n&&n.has(e))return n.get(e);var u={},o=Object.defineProperty&&Object.getOwnPropertyDescriptor;for(var f in e)if("default"!==f&&Object.prototype.hasOwnProperty.call(e,f)){var c=o?Object.getOwnPropertyDescriptor(e,f):null;c&&(c.get||c.set)?Object.defineProperty(u,f,c):u[f]=e[f]}u.default=e,n&&n.set(e,u);return u})(r(d[6])),c=r(d[0])(r(d[7]));function l(e){if("function"!=typeof WeakMap)return null;var t=new WeakMap,n=new WeakMap;return(l=function(e){return e?n:t})(e)}function p(){if("undefined"==typeof Reflect||!Reflect.construct)return!1;if(Reflect.construct.sham)return!1;if("function"==typeof Proxy)return!0;try{return Boolean.prototype.valueOf.call(Reflect.construct(Boolean,[],function(){})),!0}catch(e){return!1}}var s=(function(c){(0,n.default)(h,c);var l,s,v=(l=h,s=p(),function(){var e,t=(0,o.default)(l);if(s){var n=(0,o.default)(this).constructor;e=Reflect.construct(t,arguments,n)}else e=t.apply(this,arguments);return(0,u.default)(this,e)});function h(){return(0,e.default)(this,h),v.apply(this,arguments)}return(0,t.default)(h,[{key:"render",value:function(){var e=r(d[8]);return f.createElement(e,{style:[y.unimplementedView,this.props.style]},this.props.children)}}]),h})(f.Component),y=c.default.create({unimplementedView:{}});m.exports=s},304,[3,7,8,10,12,15,129,173,174]); +__d(function(g,r,i,a,m,_e,d){var t,e,n=r(d[0])(r(d[1])),o=r(d[0])(r(d[2])),s=r(d[0])(r(d[3])),l=r(d[0])(r(d[4])),u=r(d[0])(r(d[5])),p=r(d[0])(r(d[6])),c=r(d[0])(r(d[7])),f=(r(d[0])(r(d[8])),r(d[0])(r(d[9])),r(d[0])(r(d[10])));function h(){if("undefined"==typeof Reflect||!Reflect.construct)return!1;if(Reflect.construct.sham)return!1;if("function"==typeof Proxy)return!0;try{return Boolean.prototype.valueOf.call(Reflect.construct(Boolean,[],function(){})),!0}catch(t){return!1}}var v=r(d[11]),y=0,R=(function(t){(0,l.default)(R,t);var e,n,c=(e=R,n=h(),function(){var t,o=(0,p.default)(e);if(n){var s=(0,p.default)(this).constructor;t=Reflect.construct(o,arguments,s)}else t=o.apply(this,arguments);return(0,u.default)(this,t)});function R(t){var e;return(0,o.default)(this,R),(e=c.call(this,t))._identifier=y++,e}return(0,s.default)(R,[{key:"componentDidMount",value:function(){}},{key:"componentWillUnmount",value:function(){this._eventSubscription&&this._eventSubscription.remove()}},{key:"componentDidUpdate",value:function(){}},{key:"render",value:function(){var t=this;if(!0!==this.props.visible)return null;var e={backgroundColor:!0===this.props.transparent?'transparent':'white'},n=this.props.animationType||'none',o=this.props.presentationStyle;o||(o='fullScreen',!0===this.props.transparent&&(o='overFullScreen'));var s=this.props.children;return v.createElement(f.default,{animationType:n,presentationStyle:o,transparent:this.props.transparent,hardwareAccelerated:this.props.hardwareAccelerated,onRequestClose:this.props.onRequestClose,onShow:this.props.onShow,onDismiss:function(){t.props.onDismiss&&t.props.onDismiss()},visible:this.props.visible,statusBarTranslucent:this.props.statusBarTranslucent,identifier:this._identifier,style:b.modal,onStartShouldSetResponder:this._shouldSetResponder,supportedOrientations:this.props.supportedOrientations,onOrientationChange:this.props.onOrientationChange},v.createElement(r(d[12]).VirtualizedListContextResetter,null,v.createElement(r(d[13]).Context.Provider,{value:null},v.createElement(r(d[14]),{style:[b.container,e],collapsable:!1},s))))}},{key:"_shouldSetResponder",value:function(){return!0}}]),R})(v.Component);R.defaultProps={visible:!0,hardwareAccelerated:!1},R.contextType=r(d[15]).RootTagContext;var S=r(d[16]).getConstants().isRTL?'right':'left',b=r(d[17]).create({modal:{position:'absolute'},container:(t={},(0,n.default)(t,S,0),(0,n.default)(t,"top",0),(0,n.default)(t,"flex",1),t)}),C=null!=(e=c.default.unstable_Modal)?e:R;m.exports=C},305,[3,247,7,8,10,12,15,306,95,307,308,129,273,250,174,309,310,173]); +__d(function(g,r,i,a,m,e,d){Object.defineProperty(e,"__esModule",{value:!0}),e.default=void 0;e.default={unstable_Modal:null}},306,[]); +__d(function(g,r,i,a,m,e,d){function t(n){if("function"!=typeof WeakMap)return null;var o=new WeakMap,u=new WeakMap;return(t=function(t){return t?u:o})(n)}Object.defineProperty(e,"__esModule",{value:!0}),e.default=void 0;var n=(function(n,o){if(!o&&n&&n.__esModule)return n;if(null===n||"object"!=typeof n&&"function"!=typeof n)return{default:n};var u=t(o);if(u&&u.has(n))return u.get(n);var f={},l=Object.defineProperty&&Object.getOwnPropertyDescriptor;for(var c in n)if("default"!==c&&Object.prototype.hasOwnProperty.call(n,c)){var p=l?Object.getOwnPropertyDescriptor(n,c):null;p&&(p.get||p.set)?Object.defineProperty(f,c,p):f[c]=n[c]}f.default=n,u&&u.set(n,f);return f})(r(d[0])).get('ModalManager');e.default=n},307,[21]); +__d(function(g,r,i,a,m,e,d){Object.defineProperty(e,"__esModule",{value:!0}),e.default=void 0;var o=(0,r(d[0])(r(d[1])).default)('ModalHostView',{interfaceOnly:!0,paperComponentName:'RCTModalHostView'});e.default=o},308,[3,179]); +__d(function(g,r,i,a,m,e,d){function t(n){if("function"!=typeof WeakMap)return null;var o=new WeakMap,u=new WeakMap;return(t=function(t){return t?u:o})(n)}Object.defineProperty(e,"__esModule",{value:!0}),e.RootTagContext=void 0,e.createRootTag=function(t){return t};var n=(function(n,o){if(!o&&n&&n.__esModule)return n;if(null===n||"object"!=typeof n&&"function"!=typeof n)return{default:n};var u=t(o);if(u&&u.has(n))return u.get(n);var f={},c=Object.defineProperty&&Object.getOwnPropertyDescriptor;for(var p in n)if("default"!==p&&Object.prototype.hasOwnProperty.call(n,p)){var l=c?Object.getOwnPropertyDescriptor(n,p):null;l&&(l.get||l.set)?Object.defineProperty(f,p,l):f[p]=n[p]}f.default=n,u&&u.set(n,f);return f})(r(d[0])).createContext(0);e.RootTagContext=n},309,[129]); +__d(function(g,r,i,a,m,e,d){var t=r(d[0])(r(d[1])),n=(function(){if(t.default){var n=t.default.getConstants(),f=n.isRTL,L=n.doLeftAndRightSwapInRTL,R=n.localeIdentifier;return{isRTL:f,doLeftAndRightSwapInRTL:L,localeIdentifier:R}}return{isRTL:!1,doLeftAndRightSwapInRTL:!0}})();m.exports={getConstants:function(){return n},allowRTL:function(n){t.default&&t.default.allowRTL(n)},forceRTL:function(n){t.default&&t.default.forceRTL(n)},swapLeftAndRightInRTL:function(n){t.default&&t.default.swapLeftAndRightInRTL(n)},isRTL:n.isRTL,doLeftAndRightSwapInRTL:n.doLeftAndRightSwapInRTL}},310,[3,311]); +__d(function(g,r,i,a,m,e,d){function t(n){if("function"!=typeof WeakMap)return null;var o=new WeakMap,u=new WeakMap;return(t=function(t){return t?u:o})(n)}Object.defineProperty(e,"__esModule",{value:!0}),e.default=void 0;var n=(function(n,o){if(!o&&n&&n.__esModule)return n;if(null===n||"object"!=typeof n&&"function"!=typeof n)return{default:n};var u=t(o);if(u&&u.has(n))return u.get(n);var f={},c=Object.defineProperty&&Object.getOwnPropertyDescriptor;for(var l in n)if("default"!==l&&Object.prototype.hasOwnProperty.call(n,l)){var p=c?Object.getOwnPropertyDescriptor(n,l):null;p&&(p.get||p.set)?Object.defineProperty(f,l,p):f[l]=n[l]}f.default=n,u&&u.set(n,f);return f})(r(d[0])).get('I18nManager');e.default=n},311,[21]); +__d(function(g,r,i,a,m,e,d){Object.defineProperty(e,"__esModule",{value:!0}),e.default=void 0;var n=r(d[0])(r(d[1])),s=r(d[0])(r(d[2])),t=r(d[0])(r(d[3])),l=(function(n,s){if(!s&&n&&n.__esModule)return n;if(null===n||"object"!=typeof n&&"function"!=typeof n)return{default:n};var t=p(s);if(t&&t.has(n))return t.get(n);var l={},o=Object.defineProperty&&Object.getOwnPropertyDescriptor;for(var u in n)if("default"!==u&&Object.prototype.hasOwnProperty.call(n,u)){var f=o?Object.getOwnPropertyDescriptor(n,u):null;f&&(f.get||f.set)?Object.defineProperty(l,u,f):l[u]=n[u]}l.default=n,t&&t.set(n,l);return l})(r(d[4])),o=r(d[0])(r(d[5])),u=r(d[0])(r(d[6])),f=r(d[0])(r(d[7])),c=["accessible","android_disableSound","android_ripple","cancelable","children","delayLongPress","disabled","focusable","onLongPress","onPress","onPressIn","onPressOut","pressRetentionOffset","style","testOnly_pressed","unstable_pressDelay"];function p(n){if("function"!=typeof WeakMap)return null;var s=new WeakMap,t=new WeakMap;return(p=function(n){return n?t:s})(n)}function b(n){var t=(0,l.useState)(!1),o=(0,s.default)(t,2),u=o[0],f=o[1];return[u||n,f]}var P=l.memo(l.forwardRef(function(p,P){var y=p.accessible,v=p.android_disableSound,O=p.android_ripple,_=p.cancelable,S=p.children,M=p.delayLongPress,h=p.disabled,j=p.focusable,w=p.onLongPress,I=p.onPress,L=p.onPressIn,R=p.onPressOut,D=p.pressRetentionOffset,k=p.style,W=p.testOnly_pressed,z=p.unstable_pressDelay,E=(0,t.default)(p,c),H=(0,l.useRef)(null);(0,l.useImperativeHandle)(P,function(){return H.current});var N=(0,o.default)(O,H),q=b(!0===W),x=(0,s.default)(q,2),A=x[0],B=x[1],C=(0,r(d[8]).normalizeRect)(p.hitSlop),F=null!=h?(0,n.default)({},p.accessibilityState,{disabled:h}):p.accessibilityState,G=(0,n.default)({},E,null==N?void 0:N.viewProps,{accessible:!1!==y,accessibilityState:F,focusable:!1!==j,hitSlop:C}),J=(0,l.useMemo)(function(){return{cancelable:_,disabled:h,hitSlop:C,pressRectOffset:D,android_disableSound:v,delayLongPress:M,delayPressIn:z,onLongPress:w,onPress:I,onPressIn:function(n){null!=N&&N.onPressIn(n),B(!0),null!=L&&L(n)},onPressMove:null==N?void 0:N.onPressMove,onPressOut:function(n){null!=N&&N.onPressOut(n),B(!1),null!=R&&R(n)}}},[v,N,_,M,h,C,w,I,L,R,D,B,z]),K=(0,u.default)(J);return l.createElement(f.default,(0,n.default)({},G,K,{ref:H,style:'function'==typeof k?k({pressed:A}):k,collapsable:!1}),'function'==typeof S?S({pressed:A}):S,null)}));P.displayName='Pressable';var y=P;e.default=y},312,[3,29,23,103,129,313,200,174,206]); +__d(function(g,r,i,a,m,e,d){Object.defineProperty(e,"__esModule",{value:!0}),e.default=function(l,u){var s=null!=l?l:{},c=s.color,f=s.borderless,p=s.radius,v=s.foreground;return(0,o.useMemo)(function(){if('android'===t.Platform.OS&&t.Platform.Version>=21&&(null!=c||null!=f||null!=p)){var o=(0,t.processColor)(c);(0,n.default)(null==o||'number'==typeof o,'Unexpected color given for Ripple color');var l={type:'RippleAndroid',color:o,borderless:!0===f,rippleRadius:p};return{viewProps:!0===v?{nativeForegroundAndroid:l}:{nativeBackgroundAndroid:l},onPressIn:function(n){var t,o,l=u.current;null!=l&&(r(d[4]).Commands.hotspotUpdate(l,null!=(t=n.nativeEvent.locationX)?t:0,null!=(o=n.nativeEvent.locationY)?o:0),r(d[4]).Commands.setPressed(l,!0))},onPressMove:function(n){var t,o,l=u.current;null!=l&&r(d[4]).Commands.hotspotUpdate(l,null!=(t=n.nativeEvent.locationX)?t:0,null!=(o=n.nativeEvent.locationY)?o:0)},onPressOut:function(n){var t=u.current;null!=t&&r(d[4]).Commands.setPressed(t,!1)}}}return null},[f,c,v,p,u])};var n=r(d[0])(r(d[1])),t=r(d[2]),o=(function(n,t){if(!t&&n&&n.__esModule)return n;if(null===n||"object"!=typeof n&&"function"!=typeof n)return{default:n};var o=l(t);if(o&&o.has(n))return o.get(n);var u={},s=Object.defineProperty&&Object.getOwnPropertyDescriptor;for(var c in n)if("default"!==c&&Object.prototype.hasOwnProperty.call(n,c)){var f=s?Object.getOwnPropertyDescriptor(n,c):null;f&&(f.get||f.set)?Object.defineProperty(u,c,f):u[c]=n[c]}u.default=n,o&&o.set(n,u);return u})(r(d[3]));function l(n){if("function"!=typeof WeakMap)return null;var t=new WeakMap,o=new WeakMap;return(l=function(n){return n?o:t})(n)}},313,[3,18,1,129,175]); +__d(function(g,r,i,a,m,_e,d){'use strict';var t=r(d[0])(r(d[1])),e=r(d[0])(r(d[2])),n=r(d[0])(r(d[3])),o=r(d[0])(r(d[4])),u=r(d[0])(r(d[5])),f=(function(t,e){if(!e&&t&&t.__esModule)return t;if(null===t||"object"!=typeof t&&"function"!=typeof t)return{default:t};var n=p(e);if(n&&n.has(t))return n.get(t);var o={},u=Object.defineProperty&&Object.getOwnPropertyDescriptor;for(var f in t)if("default"!==f&&Object.prototype.hasOwnProperty.call(t,f)){var c=u?Object.getOwnPropertyDescriptor(t,f):null;c&&(c.get||c.set)?Object.defineProperty(o,f,c):o[f]=t[f]}o.default=t,n&&n.set(t,o);return o})(r(d[6])),c=r(d[0])(r(d[7])),l=r(d[0])(r(d[8])),s=r(d[0])(r(d[9]));function p(t){if("function"!=typeof WeakMap)return null;var e=new WeakMap,n=new WeakMap;return(p=function(t){return t?n:e})(t)}function y(){if("undefined"==typeof Reflect||!Reflect.construct)return!1;if(Reflect.construct.sham)return!1;if("function"==typeof Proxy)return!0;try{return Boolean.prototype.valueOf.call(Reflect.construct(Boolean,[],function(){})),!0}catch(t){return!1}}var h=(function(c){(0,n.default)(O,c);var p,h,b=(p=O,h=y(),function(){var t,e=(0,u.default)(p);if(h){var n=(0,u.default)(this).constructor;t=Reflect.construct(e,arguments,n)}else t=e.apply(this,arguments);return(0,o.default)(this,t)});function O(){return(0,t.default)(this,O),b.apply(this,arguments)}return(0,e.default)(O,[{key:"render",value:function(){return f.createElement(s.default,{style:[v.dummy,this.props.style]},f.createElement(l.default,{style:v.text},"ProgressViewIOS is not supported on this platform!"))}}]),O})(f.Component),v=c.default.create({dummy:{width:120,height:20,backgroundColor:'#ffbcbc',borderWidth:1,borderColor:'red',alignItems:'center',justifyContent:'center'},text:{color:'#333333',margin:5,fontSize:10}});m.exports=h},314,[3,7,8,10,12,15,129,173,184,174]); +__d(function(g,r,i,a,m,e,d){Object.defineProperty(e,"__esModule",{value:!0}),e.default=void 0;var t,n=r(d[0])(r(d[1])),u=r(d[0])(r(d[2])),f=r(d[0])(r(d[3])),o=(function(t,n){if(!n&&t&&t.__esModule)return t;if(null===t||"object"!=typeof t&&"function"!=typeof t)return{default:t};var u=c(n);if(u&&u.has(t))return u.get(t);var f={},o=Object.defineProperty&&Object.getOwnPropertyDescriptor;for(var l in t)if("default"!==l&&Object.prototype.hasOwnProperty.call(t,l)){var p=o?Object.getOwnPropertyDescriptor(t,l):null;p&&(p.get||p.set)?Object.defineProperty(f,l,p):f[l]=t[l]}f.default=t,u&&u.set(t,f);return f})(r(d[4])),l=r(d[0])(r(d[5])),p=["emulateUnlessSupported"];function c(t){if("function"!=typeof WeakMap)return null;var n=new WeakMap,u=new WeakMap;return(c=function(t){return t?u:n})(t)}if('android'===f.default.OS)t=o.forwardRef(function(t,f){t.emulateUnlessSupported;var c=(0,u.default)(t,p);return o.createElement(l.default,(0,n.default)({},c,{ref:f}))});else{var s=r(d[6]).default;t=o.forwardRef(function(t,u){return o.createElement(s,(0,n.default)({emulateUnlessSupported:!0},t,{ref:u}))})}var v=t;e.default=v},315,[3,29,103,19,129,174,316]); +__d(function(g,r,i,a,m,e,d){Object.defineProperty(e,"__esModule",{value:!0}),e.default=void 0;var f=(0,r(d[0])(r(d[1])).default)('SafeAreaView',{paperComponentName:'RCTSafeAreaView',interfaceOnly:!0});e.default=f},316,[3,179]); +__d(function(g,r,i,a,m,_e,d){'use strict';var t=r(d[0])(r(d[1])),e=r(d[0])(r(d[2])),n=r(d[0])(r(d[3])),o=r(d[0])(r(d[4])),u=r(d[0])(r(d[5])),f=(function(t,e){if(!e&&t&&t.__esModule)return t;if(null===t||"object"!=typeof t&&"function"!=typeof t)return{default:t};var n=p(e);if(n&&n.has(t))return n.get(t);var o={},u=Object.defineProperty&&Object.getOwnPropertyDescriptor;for(var f in t)if("default"!==f&&Object.prototype.hasOwnProperty.call(t,f)){var c=u?Object.getOwnPropertyDescriptor(t,f):null;c&&(c.get||c.set)?Object.defineProperty(o,f,c):o[f]=t[f]}o.default=t,n&&n.set(t,o);return o})(r(d[6])),c=r(d[0])(r(d[7])),l=r(d[0])(r(d[8])),s=r(d[0])(r(d[9]));function p(t){if("function"!=typeof WeakMap)return null;var e=new WeakMap,n=new WeakMap;return(p=function(t){return t?n:e})(t)}function y(){if("undefined"==typeof Reflect||!Reflect.construct)return!1;if(Reflect.construct.sham)return!1;if("function"==typeof Proxy)return!0;try{return Boolean.prototype.valueOf.call(Reflect.construct(Boolean,[],function(){})),!0}catch(t){return!1}}var h=(function(c){(0,n.default)(O,c);var p,h,b=(p=O,h=y(),function(){var t,e=(0,u.default)(p);if(h){var n=(0,u.default)(this).constructor;t=Reflect.construct(e,arguments,n)}else t=e.apply(this,arguments);return(0,o.default)(this,t)});function O(){return(0,t.default)(this,O),b.apply(this,arguments)}return(0,e.default)(O,[{key:"render",value:function(){return f.createElement(s.default,{style:[v.dummy,this.props.style]},f.createElement(l.default,{style:v.text},"SegmentedControlIOS is not supported on this platform!"))}}]),O})(f.Component),v=c.default.create({dummy:{width:120,height:50,backgroundColor:'#ffbcbc',borderWidth:1,borderColor:'red',alignItems:'center',justifyContent:'center'},text:{color:'#333333',margin:5,fontSize:10}});m.exports=h},317,[3,7,8,10,12,15,129,173,184,174]); +__d(function(g,r,i,a,m,e,d){var t=r(d[0])(r(d[1])),n=r(d[0])(r(d[2])),l=(function(t,n){if(!n&&t&&t.__esModule)return t;if(null===t||"object"!=typeof t&&"function"!=typeof t)return{default:t};var l=c(n);if(l&&l.has(t))return l.get(t);var u={},o=Object.defineProperty&&Object.getOwnPropertyDescriptor;for(var f in t)if("default"!==f&&Object.prototype.hasOwnProperty.call(t,f)){var s=o?Object.getOwnPropertyDescriptor(t,f):null;s&&(s.get||s.set)?Object.defineProperty(u,f,s):u[f]=t[f]}u.default=t,l&&l.set(t,u);return u})(r(d[3])),u=r(d[0])(r(d[4])),o=r(d[0])(r(d[5])),f=r(d[0])(r(d[6])),s=["value","minimumValue","maximumValue","step","onValueChange","onSlidingComplete"];function c(t){if("function"!=typeof WeakMap)return null;var n=new WeakMap,l=new WeakMap;return(c=function(t){return t?l:n})(t)}var v,p=l.forwardRef(function(c,p){var b,y=f.default.compose(v.slider,c.style),S=c.value,O=void 0===S?.5:S,h=c.minimumValue,V=void 0===h?0:h,C=c.maximumValue,j=void 0===C?1:C,w=c.step,E=void 0===w?0:w,P=c.onValueChange,x=c.onSlidingComplete,M=(0,n.default)(c,s),R=P?function(t){var n=!0;'android'===u.default.OS&&(n=null!=t.nativeEvent.fromUser&&t.nativeEvent.fromUser),n&&P(t.nativeEvent.value)}:null,_=R,k=x?function(t){x(t.nativeEvent.value)}:null,W=!0===c.disabled||!0===(null==(b=c.accessibilityState)?void 0:b.disabled),D=W?(0,t.default)({},c.accessibilityState,{disabled:!0}):c.accessibilityState;return l.createElement(o.default,(0,t.default)({},M,{accessibilityState:D,enabled:!W,disabled:W,maximumValue:j,minimumValue:V,onChange:_,onResponderTerminationRequest:function(){return!1},onSlidingComplete:k,onStartShouldSetResponder:function(){return!0},onValueChange:R,ref:p,step:E,style:y,value:O}))});v='ios'===u.default.OS?f.default.create({slider:{height:40}}):f.default.create({slider:{}}),m.exports=p},318,[3,29,103,129,19,319,173]); +__d(function(g,r,i,a,m,e,d){Object.defineProperty(e,"__esModule",{value:!0}),e.default=void 0;var l=(0,r(d[0])(r(d[1])).default)('Slider',{interfaceOnly:!0,paperComponentName:'RCTSlider'});e.default=l},319,[3,179]); +__d(function(g,r,i,a,m,e,d){Object.defineProperty(e,"__esModule",{value:!0}),e.default=void 0;var t=r(d[0])(r(d[1])),n=r(d[0])(r(d[2])),o=r(d[0])(r(d[3])),l=r(d[0])(r(d[4])),u=C(r(d[5])),f=r(d[0])(r(d[6])),c=r(d[0])(r(d[7])),s=C(r(d[8])),v=C(r(d[9])),b=["disabled","ios_backgroundColor","onChange","onValueChange","style","thumbColor","trackColor","value"];function p(t){if("function"!=typeof WeakMap)return null;var n=new WeakMap,o=new WeakMap;return(p=function(t){return t?o:n})(t)}function C(t,n){if(!n&&t&&t.__esModule)return t;if(null===t||"object"!=typeof t&&"function"!=typeof t)return{default:t};var o=p(n);if(o&&o.has(t))return o.get(t);var l={},u=Object.defineProperty&&Object.getOwnPropertyDescriptor;for(var f in t)if("default"!==f&&Object.prototype.hasOwnProperty.call(t,f)){var c=u?Object.getOwnPropertyDescriptor(t,f):null;c&&(c.get||c.set)?Object.defineProperty(l,f,c):l[f]=t[f]}return l.default=t,o&&o.set(t,l),l}var h=function(){return!1},y=function(){return!0},R=u.forwardRef(function(p,C){var R=p.disabled,k=p.ios_backgroundColor,O=p.onChange,w=p.onValueChange,S=p.style,_=p.thumbColor,j=p.trackColor,P=p.value,T=(0,o.default)(p,b),E=null==j?void 0:j.false,M=null==j?void 0:j.true,V=u.useRef(null),F=(0,c.default)(V,C),W=u.useState({value:null}),q=(0,n.default)(W,2),D=q[0],N=q[1],L=function(t){null==O||O(t),null==w||w(t.nativeEvent.value),N({value:t.nativeEvent.value})};if(u.useLayoutEffect(function(){var t,n=!0===P;D.value!==n&&null!=(null==(t=V.current)?void 0:t.setNativeProps)&&('android'===l.default.OS?s.Commands.setNativeValue(V.current,n):v.Commands.setValue(V.current,n))},[P,D]),'android'===l.default.OS){var x,z={enabled:!0!==R,on:!0===P,style:S,thumbTintColor:_,trackColorForFalse:E,trackColorForTrue:M,trackTintColor:!0===P?M:E};return u.createElement(s.default,(0,t.default)({},T,z,{accessibilityRole:null!=(x=p.accessibilityRole)?x:'switch',onChange:L,onResponderTerminationRequest:h,onStartShouldSetResponder:y,ref:F}))}var A,B={disabled:R,onTintColor:M,style:f.default.compose({height:31,width:51},f.default.compose(S,null==k?null:{backgroundColor:k,borderRadius:16})),thumbTintColor:_,tintColor:E,value:!0===P};return u.createElement(v.default,(0,t.default)({},T,B,{accessibilityRole:null!=(A=p.accessibilityRole)?A:'switch',onChange:L,onResponderTerminationRequest:h,onStartShouldSetResponder:y,ref:F}))});e.default=R},320,[3,29,23,103,19,129,173,321,322,323]); +__d(function(g,r,_i,a,m,e,d){Object.defineProperty(e,"__esModule",{value:!0}),e.default=function(){for(var o=arguments.length,u=new Array(o),i=0;i=t.length?{done:!0}:{done:!1,value:t[i++]}}}throw new TypeError("Invalid attempt to iterate non-iterable instance.\nIn order to be iterable, non-array objects must have a [Symbol.iterator]() method.")}function o(t,n){if(t){if("string"==typeof t)return u(t,n);var o=Object.prototype.toString.call(t).slice(8,-1);return"Object"===o&&t.constructor&&(o=t.constructor.name),"Map"===o||"Set"===o?Array.from(t):"Arguments"===o||/^(?:Ui|I)nt(?:8|16|32)(?:Clamped)?Array$/.test(o)?u(t,n):void 0}}function u(t,n){(null==n||n>t.length)&&(n=t.length);for(var o=0,u=new Array(n);o1&&(be=u.createElement(f.default,null,be)),oe=u.createElement(I,(0,t.default)({ref:ee},o,pe,{accessible:ce,autoCapitalize:ye,blurOnSubmit:ae,caretHidden:se,children:be,disableFullscreenUI:o.disableFullscreenUI,focusable:ie,mostRecentEventCount:H,onBlur:ue,onChange:te,onFocus:le,onScroll:re,onSelectionChange:ne,placeholder:Se,selection:M,style:Ce,text:X,textBreakStrategy:o.textBreakStrategy}))}return u.createElement(p.default.Provider,{value:!0},oe)}var M=u.forwardRef(function(l,o){var c=l.allowFontScaling,s=void 0===c||c,f=l.rejectResponderTermination,p=void 0===f||f,v=l.underlineColorAndroid,C=void 0===v?'transparent':v,y=(0,n.default)(l,F);return u.createElement(A,(0,t.default)({allowFontScaling:s,rejectResponderTermination:p,underlineColorAndroid:C},y,{forwardedRef:o}))});M.propTypes=o.default,M.State={currentlyFocusedInput:v.default.currentlyFocusedInput,currentlyFocusedField:v.default.currentlyFocusedField,focusTextInput:v.default.focusTextInput,blurTextInput:v.default.blurTextInput};var z=s.default.create({multilineInput:{paddingTop:5}});m.exports=M},324,[3,29,103,23,129,325,19,173,184,176,135,18,297,241,200,136,326,328]); +__d(function(g,r,i,a,m,e,d){'use strict';var n=['phoneNumber','link','address','calendarEvent','none','all'];m.exports=r(d[0])({},r(d[1]),{autoCapitalize:r(d[2]).oneOf(['none','sentences','words','characters']),autoComplete:r(d[2]).oneOf(['cc-csc','cc-exp','cc-exp-month','cc-exp-year','cc-number','email','name','password','postal-code','street-address','tel','username','off']),autoCorrect:r(d[2]).bool,spellCheck:r(d[2]).bool,autoFocus:r(d[2]).bool,allowFontScaling:r(d[2]).bool,maxFontSizeMultiplier:r(d[2]).number,editable:r(d[2]).bool,keyboardType:r(d[2]).oneOf(['default','email-address','numeric','phone-pad','number-pad','url','ascii-capable','numbers-and-punctuation','name-phone-pad','decimal-pad','twitter','web-search','ascii-capable-number-pad','visible-password']),keyboardAppearance:r(d[2]).oneOf(['default','light','dark']),returnKeyType:r(d[2]).oneOf(['done','go','next','search','send','none','previous','default','emergency-call','google','join','route','yahoo']),returnKeyLabel:r(d[2]).string,maxLength:r(d[2]).number,numberOfLines:r(d[2]).number,disableFullscreenUI:r(d[2]).bool,enablesReturnKeyAutomatically:r(d[2]).bool,multiline:r(d[2]).bool,textBreakStrategy:r(d[2]).oneOf(['simple','highQuality','balanced']),onBlur:r(d[2]).func,onFocus:r(d[2]).func,onChange:r(d[2]).func,onChangeText:r(d[2]).func,onContentSizeChange:r(d[2]).func,onTextInput:r(d[2]).func,onEndEditing:r(d[2]).func,onSelectionChange:r(d[2]).func,onSubmitEditing:r(d[2]).func,onKeyPress:r(d[2]).func,onLayout:r(d[2]).func,onScroll:r(d[2]).func,placeholder:r(d[2]).string,placeholderTextColor:r(d[3]),scrollEnabled:r(d[2]).bool,secureTextEntry:r(d[2]).bool,selectionColor:r(d[3]),selection:r(d[2]).shape({start:r(d[2]).number.isRequired,end:r(d[2]).number}),value:r(d[2]).string,defaultValue:r(d[2]).string,clearButtonMode:r(d[2]).oneOf(['never','while-editing','unless-editing','always']),clearTextOnFocus:r(d[2]).bool,selectTextOnFocus:r(d[2]).bool,blurOnSubmit:r(d[2]).bool,style:r(d[4]).style,underlineColorAndroid:r(d[3]),inlineImageLeft:r(d[2]).string,inlineImagePadding:r(d[2]).number,rejectResponderTermination:r(d[2]).bool,dataDetectorTypes:r(d[2]).oneOfType([r(d[2]).oneOf(n),r(d[2]).arrayOf(r(d[2]).oneOf(n))]),caretHidden:r(d[2]).bool,contextMenuHidden:r(d[2]).bool,inputAccessoryViewID:r(d[2]).string,textContentType:r(d[2]).oneOf(['none','URL','addressCity','addressCityAndState','addressState','countryName','creditCardNumber','emailAddress','familyName','fullStreetAddress','givenName','jobTitle','location','middleName','name','namePrefix','nameSuffix','nickname','organizationName','postalCode','streetAddressLine1','streetAddressLine2','sublocality','telephoneNumber','username','password','newPassword','oneTimeCode']),showSoftInputOnFocus:r(d[2]).bool})},325,[29,277,191,195,185]); +__d(function(g,r,i,a,m,e,d){Object.defineProperty(e,"__esModule",{value:!0}),e.default=e.Commands=void 0;var t=r(d[0])(r(d[1])),n=r(d[0])(r(d[2])),u=(function(t,n){if(!n&&t&&t.__esModule)return t;if(null===t||"object"!=typeof t&&"function"!=typeof t)return{default:t};var u=o(n);if(u&&u.has(t))return u.get(t);var f={},l=Object.defineProperty&&Object.getOwnPropertyDescriptor;for(var c in t)if("default"!==c&&Object.prototype.hasOwnProperty.call(t,c)){var p=l?Object.getOwnPropertyDescriptor(t,c):null;p&&(p.get||p.set)?Object.defineProperty(f,c,p):f[c]=t[c]}f.default=t,u&&u.set(t,f);return f})(r(d[3]));function o(t){if("function"!=typeof WeakMap)return null;var n=new WeakMap,u=new WeakMap;return(o=function(t){return t?u:n})(t)}var f=(0,t.default)({supportedCommands:['focus','blur','setTextAndSelection']});e.Commands=f;var l=u.get('RCTSinglelineTextInputView',function(){return n.default});e.default=l},326,[3,137,327,150]); +__d(function(g,r,i,a,m,e,d){var t=r(d[0])(r(d[1])),o=r(d[0])(r(d[2])),n={uiViewClassName:'RCTSinglelineTextInputView',bubblingEventTypes:{topBlur:{phasedRegistrationNames:{bubbled:'onBlur',captured:'onBlurCapture'}},topChange:{phasedRegistrationNames:{bubbled:'onChange',captured:'onChangeCapture'}},topEndEditing:{phasedRegistrationNames:{bubbled:'onEndEditing',captured:'onEndEditingCapture'}},topFocus:{phasedRegistrationNames:{bubbled:'onFocus',captured:'onFocusCapture'}},topKeyPress:{phasedRegistrationNames:{bubbled:'onKeyPress',captured:'onKeyPressCapture'}},topSubmitEditing:{phasedRegistrationNames:{bubbled:'onSubmitEditing',captured:'onSubmitEditingCapture'}},topTouchCancel:{phasedRegistrationNames:{bubbled:'onTouchCancel',captured:'onTouchCancelCapture'}},topTouchEnd:{phasedRegistrationNames:{bubbled:'onTouchEnd',captured:'onTouchEndCapture'}},topTouchMove:{phasedRegistrationNames:{bubbled:'onTouchMove',captured:'onTouchMoveCapture'}}},directEventTypes:{},validAttributes:(0,t.default)({},o.default.validAttributes,{fontSize:!0,fontWeight:!0,fontVariant:!0,textShadowOffset:{diff:r(d[3])},allowFontScaling:!0,fontStyle:!0,textTransform:!0,textAlign:!0,fontFamily:!0,lineHeight:!0,isHighlighted:!0,writingDirection:!0,textDecorationLine:!0,textShadowRadius:!0,letterSpacing:!0,textDecorationStyle:!0,textDecorationColor:{process:r(d[4])},color:{process:r(d[4])},maxFontSizeMultiplier:!0,textShadowColor:{process:r(d[4])},editable:!0,inputAccessoryViewID:!0,caretHidden:!0,enablesReturnKeyAutomatically:!0,placeholderTextColor:{process:r(d[4])},onSelectionChange:!0,clearButtonMode:!0,onContentSizeChange:!0,keyboardType:!0,selection:!0,returnKeyType:!0,blurOnSubmit:!0,mostRecentEventCount:!0,onChange:!0,scrollEnabled:!0,selectionColor:{process:r(d[4])},contextMenuHidden:!0,secureTextEntry:!0,onTextInput:!0,placeholder:!0,autoCorrect:!0,onScroll:!0,multiline:!0,textContentType:!0,maxLength:!0,autoCapitalize:!0,keyboardAppearance:!0,passwordRules:!0,spellCheck:!0,selectTextOnFocus:!0,text:!0,clearTextOnFocus:!0})};m.exports=n},327,[3,29,139,146,141]); +__d(function(g,r,i,a,m,e,d){Object.defineProperty(e,"__esModule",{value:!0}),e.default=e.Commands=void 0;var t=r(d[0])(r(d[1])),n=r(d[0])(r(d[2])),u=(function(t,n){if(!n&&t&&t.__esModule)return t;if(null===t||"object"!=typeof t&&"function"!=typeof t)return{default:t};var u=o(n);if(u&&u.has(t))return u.get(t);var f={},l=Object.defineProperty&&Object.getOwnPropertyDescriptor;for(var c in t)if("default"!==c&&Object.prototype.hasOwnProperty.call(t,c)){var p=l?Object.getOwnPropertyDescriptor(t,c):null;p&&(p.get||p.set)?Object.defineProperty(f,c,p):f[c]=t[c]}f.default=t,u&&u.set(t,f);return f})(r(d[3]));function o(t){if("function"!=typeof WeakMap)return null;var n=new WeakMap,u=new WeakMap;return(o=function(t){return t?u:n})(t)}var f=(0,t.default)({supportedCommands:['focus','blur','setTextAndSelection']});e.Commands=f;var l=u.get('RCTMultilineTextInputView',function(){return n.default});e.default=l},328,[3,137,327,150]); +__d(function(g,r,i,a,m,_e,d){var t=r(d[0])(r(d[1])),e=r(d[0])(r(d[2])),o=((function(t,e){if(!e&&t&&t.__esModule)return t;if(null===t||"object"!=typeof t&&"function"!=typeof t)return{default:t};var o=u(e);if(o&&o.has(t))return o.get(t);var E={},s=Object.defineProperty&&Object.getOwnPropertyDescriptor;for(var n in t)if("default"!==n&&Object.prototype.hasOwnProperty.call(t,n)){var l=s?Object.getOwnPropertyDescriptor(t,n):null;l&&(l.get||l.set)?Object.defineProperty(E,n,l):E[n]=t[n]}E.default=t,o&&o.set(t,E)})(r(d[3])),r(d[0])(r(d[4]))),E=r(d[0])(r(d[5])),s=r(d[0])(r(d[6])),n=r(d[0])(r(d[7])),l=r(d[0])(r(d[8]));function u(t){if("function"!=typeof WeakMap)return null;var e=new WeakMap,o=new WeakMap;return(u=function(t){return t?o:e})(t)}var h=function(t){var e=t.touches,o=t.changedTouches,E=e&&e.length>0,s=o&&o.length>0;return!E&&s?o[0]:E?e[0]:t},R='NOT_RESPONDER',_='RESPONDER_INACTIVE_PRESS_IN',c='RESPONDER_INACTIVE_PRESS_OUT',S='RESPONDER_ACTIVE_PRESS_IN',T='RESPONDER_ACTIVE_PRESS_OUT',P='RESPONDER_ACTIVE_LONG_PRESS_IN',D='RESPONDER_ACTIVE_LONG_PRESS_OUT',N='ERROR',O={NOT_RESPONDER:!1,RESPONDER_INACTIVE_PRESS_IN:!1,RESPONDER_INACTIVE_PRESS_OUT:!1,RESPONDER_ACTIVE_PRESS_IN:!1,RESPONDER_ACTIVE_PRESS_OUT:!1,RESPONDER_ACTIVE_LONG_PRESS_IN:!1,RESPONDER_ACTIVE_LONG_PRESS_OUT:!1,ERROR:!1},p=(0,e.default)({},O,{RESPONDER_ACTIVE_PRESS_OUT:!0,RESPONDER_ACTIVE_PRESS_IN:!0}),f=(0,e.default)({},O,{RESPONDER_INACTIVE_PRESS_IN:!0,RESPONDER_ACTIVE_PRESS_IN:!0,RESPONDER_ACTIVE_LONG_PRESS_IN:!0}),A=(0,e.default)({},O,{RESPONDER_ACTIVE_LONG_PRESS_IN:!0}),b='DELAY',I='RESPONDER_GRANT',L='RESPONDER_RELEASE',v='RESPONDER_TERMINATED',y='ENTER_PRESS_RECT',C='LEAVE_PRESS_RECT',G='LONG_PRESS_DETECTED',V={NOT_RESPONDER:{DELAY:N,RESPONDER_GRANT:_,RESPONDER_RELEASE:N,RESPONDER_TERMINATED:N,ENTER_PRESS_RECT:N,LEAVE_PRESS_RECT:N,LONG_PRESS_DETECTED:N},RESPONDER_INACTIVE_PRESS_IN:{DELAY:S,RESPONDER_GRANT:N,RESPONDER_RELEASE:R,RESPONDER_TERMINATED:R,ENTER_PRESS_RECT:_,LEAVE_PRESS_RECT:c,LONG_PRESS_DETECTED:N},RESPONDER_INACTIVE_PRESS_OUT:{DELAY:T,RESPONDER_GRANT:N,RESPONDER_RELEASE:R,RESPONDER_TERMINATED:R,ENTER_PRESS_RECT:_,LEAVE_PRESS_RECT:c,LONG_PRESS_DETECTED:N},RESPONDER_ACTIVE_PRESS_IN:{DELAY:N,RESPONDER_GRANT:N,RESPONDER_RELEASE:R,RESPONDER_TERMINATED:R,ENTER_PRESS_RECT:S,LEAVE_PRESS_RECT:T,LONG_PRESS_DETECTED:P},RESPONDER_ACTIVE_PRESS_OUT:{DELAY:N,RESPONDER_GRANT:N,RESPONDER_RELEASE:R,RESPONDER_TERMINATED:R,ENTER_PRESS_RECT:S,LEAVE_PRESS_RECT:T,LONG_PRESS_DETECTED:N},RESPONDER_ACTIVE_LONG_PRESS_IN:{DELAY:N,RESPONDER_GRANT:N,RESPONDER_RELEASE:R,RESPONDER_TERMINATED:R,ENTER_PRESS_RECT:P,LEAVE_PRESS_RECT:D,LONG_PRESS_DETECTED:P},RESPONDER_ACTIVE_LONG_PRESS_OUT:{DELAY:N,RESPONDER_GRANT:N,RESPONDER_RELEASE:R,RESPONDER_TERMINATED:R,ENTER_PRESS_RECT:P,LEAVE_PRESS_RECT:D,LONG_PRESS_DETECTED:N},error:{DELAY:R,RESPONDER_GRANT:_,RESPONDER_RELEASE:R,RESPONDER_TERMINATED:R,ENTER_PRESS_RECT:R,LEAVE_PRESS_RECT:R,LONG_PRESS_DETECTED:R}},H={componentDidMount:function(){E.default.isTV},componentWillUnmount:function(){this.touchableDelayTimeout&&clearTimeout(this.touchableDelayTimeout),this.longPressDelayTimeout&&clearTimeout(this.longPressDelayTimeout),this.pressOutDelayTimeout&&clearTimeout(this.pressOutDelayTimeout)},touchableGetInitialState:function(){return{touchable:{touchState:void 0,responderID:null}}},touchableHandleResponderTerminationRequest:function(){return!this.props.rejectResponderTermination},touchableHandleStartShouldSetResponder:function(){return!this.props.disabled},touchableLongPressCancelsPress:function(){return!0},touchableHandleResponderGrant:function(t){var e=t.currentTarget;t.persist(),this.pressOutDelayTimeout&&clearTimeout(this.pressOutDelayTimeout),this.pressOutDelayTimeout=null,this.state.touchable.touchState=R,this.state.touchable.responderID=e,this._receiveSignal(I,t);var o=void 0!==this.touchableGetHighlightDelayMS?Math.max(this.touchableGetHighlightDelayMS(),0):130;0!==(o=isNaN(o)?130:o)?this.touchableDelayTimeout=setTimeout(this._handleDelay.bind(this,t),o):this._handleDelay(t);var E=void 0!==this.touchableGetLongPressDelayMS?Math.max(this.touchableGetLongPressDelayMS(),10):370;E=isNaN(E)?370:E,this.longPressDelayTimeout=setTimeout(this._handleLongDelay.bind(this,t),E+o)},touchableHandleResponderRelease:function(t){this.pressInLocation=null,this._receiveSignal(L,t)},touchableHandleResponderTerminate:function(t){this.pressInLocation=null,this._receiveSignal(v,t)},touchableHandleResponderMove:function(t){if(this.state.touchable.positionOnActivate){var e=this.state.touchable.positionOnActivate,o=this.state.touchable.dimensionsOnActivate,E=this.touchableGetPressRectOffset?this.touchableGetPressRectOffset():{left:20,right:20,top:20,bottom:20},s=E.left,n=E.top,l=E.right,u=E.bottom,R=this.touchableGetHitSlop?this.touchableGetHitSlop():null;R&&(s+=R.left||0,n+=R.top||0,l+=R.right||0,u+=R.bottom||0);var c=h(t.nativeEvent),S=c&&c.pageX,T=c&&c.pageY;if(this.pressInLocation)this._getDistanceBetweenPoints(S,T,this.pressInLocation.pageX,this.pressInLocation.pageY)>10&&this._cancelLongPressDelayTimeout();if(S>e.left-s&&T>e.top-n&&S>`");E!==s&&(this._performSideEffectsForTransition(E,s,t,e),this.state.touchable.touchState=s)}},_cancelLongPressDelayTimeout:function(){this.longPressDelayTimeout&&clearTimeout(this.longPressDelayTimeout),this.longPressDelayTimeout=null},_isHighlight:function(t){return t===S||t===P},_savePressInLocation:function(t){var e=h(t.nativeEvent),o=e&&e.pageX,E=e&&e.pageY,s=e&&e.locationX,n=e&&e.locationY;this.pressInLocation={pageX:o,pageY:E,locationX:s,locationY:n}},_getDistanceBetweenPoints:function(t,e,o,E){var s=t-o,n=e-E;return Math.sqrt(s*s+n*n)},_performSideEffectsForTransition:function(t,e,o,s){var n=this._isHighlight(t),u=this._isHighlight(e);(o===v||o===L)&&this._cancelLongPressDelayTimeout();var h=t===R&&e===_,c=!p[t]&&p[e];if((h||c)&&this._remeasureMetricsOnActivation(),f[t]&&o===G&&this.touchableHandleLongPress&&this.touchableHandleLongPress(s),u&&!n?this._startHighlight(s):!u&&n&&this._endHighlight(s),f[t]&&o===L){var S=!!this.props.onLongPress,T=A[t]&&(!S||!this.touchableLongPressCancelsPress());(!A[t]||T)&&this.touchableHandlePress&&(u||n||(this._startHighlight(s),this._endHighlight(s)),'android'!==E.default.OS||this.props.touchSoundDisabled||l.default.playTouchSound(),this.touchableHandlePress(s))}this.touchableDelayTimeout&&clearTimeout(this.touchableDelayTimeout),this.touchableDelayTimeout=null},_startHighlight:function(t){this._savePressInLocation(t),this.touchableHandleActivePressIn&&this.touchableHandleActivePressIn(t)},_endHighlight:function(t){var e=this;this.touchableHandleActivePressOut&&(this.touchableGetPressOutDelayMS&&this.touchableGetPressOutDelayMS()?this.pressOutDelayTimeout=setTimeout(function(){e.touchableHandleActivePressOut(t)},this.touchableGetPressOutDelayMS()):this.touchableHandleActivePressOut(t))},withoutDefaultFocusAndBlur:{}},M=(H.touchableHandleFocus,H.touchableHandleBlur,(0,t.default)(H,["touchableHandleFocus","touchableHandleBlur"]));H.withoutDefaultFocusAndBlur=M;var w={Mixin:H,renderDebugView:function(t){t.color,t.hitSlop;return null}};m.exports=w},329,[3,103,29,129,330,19,332,43,202]); +__d(function(g,r,i,a,m,e,d){'use strict';var t=r(d[0])(r(d[1])),o=t.default.twoArgumentPooler;function n(t,o){this.width=t,this.height=o}n.prototype.destructor=function(){this.width=null,this.height=null},n.getPooledFromElement=function(t){return n.getPooled(t.offsetWidth,t.offsetHeight)},t.default.addPoolingTo(n,o),m.exports=n},330,[3,331]); +__d(function(g,r,i,a,m,e,d){'use strict';var t=r(d[0])(r(d[1])),n=function(t){if(this.instancePool.length){var n=this.instancePool.pop();return this.call(n,t),n}return new this(t)},o=function(n){(0,t.default)(n instanceof this,'Trying to release an instance into a pool of a different type.'),n.destructor(),this.instancePool.length=e.length?{done:!0}:{done:!1,value:e[o++]}}}throw new TypeError("Invalid attempt to iterate non-iterable instance.\nIn order to be iterable, non-array objects must have a [Symbol.iterator]() method.")}function b(e,t){if(e){if("string"==typeof e)return h(e,t);var n=Object.prototype.toString.call(e).slice(8,-1);return"Object"===n&&e.constructor&&(n=e.constructor.name),"Map"===n||"Set"===n?Array.from(e):"Arguments"===n||/^(?:Ui|I)nt(?:8|16|32)(?:Clamped)?Array$/.test(n)?h(e,t):void 0}}function h(e,t){(null==t||t>e.length)&&(t=e.length);for(var n=0,o=new Array(t);n=0;n--)if(t[n]())return;i.exitApp()});var i={exitApp:function(){n.default&&n.default.invokeDefaultBackPressHandler()},addEventListener:function(n,f){return-1===t.indexOf(f)&&t.push(f),{remove:function(){return i.removeEventListener(n,f)}}},removeEventListener:function(n,i){-1!==t.indexOf(i)&&t.splice(t.indexOf(i),1)}};m.exports=i},345,[3,346,4]); +__d(function(g,r,i,a,m,e,d){function t(n){if("function"!=typeof WeakMap)return null;var o=new WeakMap,u=new WeakMap;return(t=function(t){return t?u:o})(n)}Object.defineProperty(e,"__esModule",{value:!0}),e.default=void 0;var n=(function(n,o){if(!o&&n&&n.__esModule)return n;if(null===n||"object"!=typeof n&&"function"!=typeof n)return{default:n};var u=t(o);if(u&&u.has(n))return u.get(n);var f={},c=Object.defineProperty&&Object.getOwnPropertyDescriptor;for(var l in n)if("default"!==l&&Object.prototype.hasOwnProperty.call(n,l)){var p=c?Object.getOwnPropertyDescriptor(n,l):null;p&&(p.get||p.set)?Object.defineProperty(f,l,p):f[l]=n[l]}f.default=n,u&&u.set(n,f);return f})(r(d[0])).get('DeviceEventManager');e.default=n},346,[21]); +__d(function(g,r,i,a,m,_e,d){var t=r(d[0])(r(d[1])),e=r(d[0])(r(d[2])),n=r(d[0])(r(d[3])),o=r(d[0])(r(d[4])),u=r(d[0])(r(d[5])),c=r(d[0])(r(d[6])),l=(r(d[0])(r(d[7])),r(d[0])(r(d[8]))),s=(function(t,e){if(!e&&t&&t.__esModule)return t;if(null===t||"object"!=typeof t&&"function"!=typeof t)return{default:t};var n=p(e);if(n&&n.has(t))return n.get(t);var o={},u=Object.defineProperty&&Object.getOwnPropertyDescriptor;for(var c in t)if("default"!==c&&Object.prototype.hasOwnProperty.call(t,c)){var l=u?Object.getOwnPropertyDescriptor(t,c):null;l&&(l.get||l.set)?Object.defineProperty(o,c,l):o[c]=t[c]}o.default=t,n&&n.set(t,o);return o})(r(d[9]));function p(t){if("function"!=typeof WeakMap)return null;var e=new WeakMap,n=new WeakMap;return(p=function(t){return t?n:e})(t)}function f(){if("undefined"==typeof Reflect||!Reflect.construct)return!1;if(Reflect.construct.sham)return!1;if("function"==typeof Proxy)return!0;try{return Boolean.prototype.valueOf.call(Reflect.construct(Boolean,[],function(){})),!0}catch(t){return!1}}var h=(function(l){(0,n.default)(b,l);var p,h,y=(p=b,h=f(),function(){var t,e=(0,u.default)(p);if(h){var n=(0,u.default)(this).constructor;t=Reflect.construct(e,arguments,n)}else t=e.apply(this,arguments);return(0,o.default)(this,t)});function b(){var e;(0,t.default)(this,b);for(var n=arguments.length,o=new Array(n),u=0;uthis.eventPool.length&&this.eventPool.push(e)}function T(e){e.getPooled=_,e.eventPool=[],e.release=R}n(i[2])(P.prototype,{preventDefault:function(){this.defaultPrevented=!0;var e=this.nativeEvent;e&&(e.preventDefault?e.preventDefault():"unknown"!=typeof e.returnValue&&(e.returnValue=!1),this.isDefaultPrevented=w)},stopPropagation:function(){var e=this.nativeEvent;e&&(e.stopPropagation?e.stopPropagation():"unknown"!=typeof e.cancelBubble&&(e.cancelBubble=!0),this.isPropagationStopped=w)},persist:function(){this.isPersistent=w},isPersistent:x,destructor:function(){var e,n=this.constructor.Interface;for(e in n)this[e]=null;this.nativeEvent=this._targetInst=this.dispatchConfig=null,this.isPropagationStopped=this.isDefaultPrevented=x,this._dispatchInstances=this._dispatchListeners=null}}),P.Interface={type:null,target:null,currentTarget:function(){return null},eventPhase:null,bubbles:null,cancelable:null,timeStamp:function(e){return e.timeStamp||Date.now()},defaultPrevented:null,isTrusted:null},P.extend=function(e){function t(){}function r(){return l.apply(this,arguments)}var l=this;t.prototype=l.prototype;var a=new t;return n(i[2])(a,r.prototype),r.prototype=a,r.prototype.constructor=r,r.Interface=n(i[2])({},l.Interface,e),r.extend=l.extend,T(r),r},T(P);var E=P.extend({touchHistory:function(){return null}});function N(e){return"topTouchStart"===e}function C(e){return"topTouchMove"===e}var z=["topTouchStart"],I=["topTouchMove"],L=["topTouchCancel","topTouchEnd"],U=[],M={touchBank:U,numberActiveTouches:0,indexOfSingleActiveTouch:-1,mostRecentTimeStamp:0};function F(e){return e.timeStamp||e.timestamp}function D(e){if(null==(e=e.identifier))throw Error("Touch object is missing identifier.");return e}function A(e){var n=D(e),t=U[n];t?(t.touchActive=!0,t.startPageX=e.pageX,t.startPageY=e.pageY,t.startTimeStamp=F(e),t.currentPageX=e.pageX,t.currentPageY=e.pageY,t.currentTimeStamp=F(e),t.previousPageX=e.pageX,t.previousPageY=e.pageY,t.previousTimeStamp=F(e)):(t={touchActive:!0,startPageX:e.pageX,startPageY:e.pageY,startTimeStamp:F(e),currentPageX:e.pageX,currentPageY:e.pageY,currentTimeStamp:F(e),previousPageX:e.pageX,previousPageY:e.pageY,previousTimeStamp:F(e)},U[n]=t),M.mostRecentTimeStamp=F(e)}function H(e){var n=U[D(e)];n&&(n.touchActive=!0,n.previousPageX=n.currentPageX,n.previousPageY=n.currentPageY,n.previousTimeStamp=n.currentTimeStamp,n.currentPageX=e.pageX,n.currentPageY=e.pageY,n.currentTimeStamp=F(e),M.mostRecentTimeStamp=F(e))}function Q(e){var n=U[D(e)];n&&(n.touchActive=!1,n.previousPageX=n.currentPageX,n.previousPageY=n.currentPageY,n.previousTimeStamp=n.currentTimeStamp,n.currentPageX=e.pageX,n.currentPageY=e.pageY,n.currentTimeStamp=F(e),M.mostRecentTimeStamp=F(e))}var j,B={instrument:function(e){j=e},recordTouchTrack:function(e,n){if(null!=j&&j(e,n),C(e))n.changedTouches.forEach(H);else if(N(e))n.changedTouches.forEach(A),M.numberActiveTouches=n.touches.length,1===M.numberActiveTouches&&(M.indexOfSingleActiveTouch=n.touches[0].identifier);else if(("topTouchEnd"===e||"topTouchCancel"===e)&&(n.changedTouches.forEach(Q),M.numberActiveTouches=n.touches.length,1===M.numberActiveTouches))for(e=0;e=(a=n&-n)||16===l&&0!=(4194240&a)))return n;if(0!=(4&r)&&(r|=16&t),0!==(n=e.entangledLanes))for(e=e.entanglements,n&=r;0t;t++)n.push(e);return n}function wn(e,n,t){e.pendingLanes|=n,536870912!==n&&(e.suspendedLanes=0,e.pingedLanes=0),(e=e.eventTimes)[n=31-_n(n)]=t}function xn(e,n){var t=e.pendingLanes&~n;e.pendingLanes=n,e.suspendedLanes=0,e.pingedLanes=0,e.expiredLanes&=n,e.mutableReadLanes&=n,e.entangledLanes&=n,n=e.entanglements;var r=e.eventTimes;for(e=e.expirationTimes;0 component.");return t=$n,$n+=2,{node:Ln(t,"RCTRawText",n,{text:e},r)}}var Kn=setTimeout,Zn=clearTimeout;function et(e){var n=e.node,t=an(null,Ge,{style:{display:"none"}},e.canonical.viewConfig.validAttributes);return{node:Dn(n,t),canonical:e.canonical}}function nt(e,n,t){return n="",t&&(n=" (created by "+t+")"),"\n in "+(e||"Unknown")+n}function tt(e,n){return e?nt(e.displayName||e.name||null,n,null):""}var rt=Object.prototype.hasOwnProperty,lt=[],at=-1;function it(e){return{current:e}}function ut(e){0>at||(e.current=lt[at],lt[at]=null,at--)}function ot(e,n){lt[++at]=e.current,e.current=n}var st={},ct=it(st),dt=it(!1),ft=st;function pt(e,n){var t=e.type.contextTypes;if(!t)return st;var r=e.stateNode;if(r&&r.__reactInternalMemoizedUnmaskedChildContext===n)return r.__reactInternalMemoizedMaskedChildContext;var l,a={};for(l in t)a[l]=n[l];return r&&((e=e.stateNode).__reactInternalMemoizedUnmaskedChildContext=n,e.__reactInternalMemoizedMaskedChildContext=a),a}function ht(e){return null!==(e=e.childContextTypes)&&void 0!==e}function gt(){ut(dt),ut(ct)}function mt(e,n,t){if(ct.current!==st)throw Error("Unexpected context found on stack. This error is likely caused by a bug in React. Please file an issue.");ot(ct,n),ot(dt,t)}function vt(e,t,r){var l=e.stateNode;if(t=t.childContextTypes,"function"!=typeof l.getChildContext)return r;for(var a in l=l.getChildContext())if(!(a in t))throw Error((We(e)||"Unknown")+'.getChildContext(): key "'+a+'" is not defined in childContextTypes.');return n(i[2])({},r,l)}function bt(e){return e=(e=e.stateNode)&&e.__reactInternalMemoizedMergedChildContext||st,ft=ct.current,ot(ct,e),ot(dt,dt.current),!0}function yt(e,n,t){var r=e.stateNode;if(!r)throw Error("Expected to have an instance by this point. This error is likely caused by a bug in React. Please file an issue.");t?(e=vt(e,n,ft),r.__reactInternalMemoizedMergedChildContext=e,ut(dt),ut(ct),ot(ct,e)):ut(dt),ot(dt,t)}var St=null,kt=!1,wt=!1;function xt(){if(!wt&&null!==St){wt=!0;var e=0,t=En;try{var r=St;for(En=1;eg?(m=h,h=null):m=h.sibling;var v=f(l,h,u[g],o);if(null===v){null===h&&(h=m);break}e&&h&&null===v.alternate&&n(l,h),i=a(v,i,g),null===c?s=v:c.sibling=v,c=v,h=m}if(g===u.length)return t(l,h),s;if(null===h){for(;gg?(m=h,h=null):m=h.sibling;var b=f(l,h,v.value,o);if(null===b){null===h&&(h=m);break}e&&h&&null===b.alternate&&n(l,h),i=a(b,i,g),null===c?s=b:c.sibling=b,c=b,h=m}if(v.done)return t(l,h),s;if(null===h){for(;!v.done;g++,v=u.next())null!==(v=d(l,v.value,o))&&(i=a(v,i,g),null===c?s=v:c.sibling=v,c=v);return s}for(h=r(l,h);!v.done;g++,v=u.next())null!==(v=p(h,l,g,v.value,o))&&(e&&null!==v.alternate&&h.delete(null===v.key?g:v.key),i=a(v,i,g),null===c?s=v:c.sibling=v,c=v);return e&&h.forEach(function(e){return n(l,e)}),s}return function(e,r,a,u){var o="object"==typeof a&&null!==a&&a.type===_e&&null===a.key;if(o&&(a=a.props.children),"object"==typeof a&&null!==a){switch(a.$$typeof){case xe:e:{var s=a.key;for(o=r;null!==o;){if(o.key===s){if((s=a.type)===_e){if(7===o.tag){t(e,o.sibling),(r=l(o,a.props.children)).return=e,e=r;break e}}else if(o.elementType===s){t(e,o.sibling),(r=l(o,a.props)).ref=nr(e,o,a),r.return=e,e=r;break e}t(e,o);break}n(e,o),o=o.sibling}a.type===_e?((r=ti(a.props.children,e.mode,u,a.key)).return=e,e=r):((u=ni(a.type,a.key,a.props,null,e.mode,u)).ref=nr(e,r,a),u.return=e,e=u)}return i(e);case Pe:e:{for(o=a.key;null!==r;){if(r.key===o){if(4===r.tag&&r.stateNode.containerInfo===a.containerInfo&&r.stateNode.implementation===a.implementation){t(e,r.sibling),(r=l(r,a.children||[])).return=e,e=r;break e}t(e,r);break}n(e,r),r=r.sibling}(r=ai(a,e.mode,u)).return=e,e=r}return i(e)}if(m(a))return h(e,r,a,u);if(je(a))return g(e,r,a,u);tr(e,a)}if("string"==typeof a||"number"==typeof a)return a=""+a,null!==r&&6===r.tag?(t(e,r.sibling),(r=l(r,a)).return=e,e=r):(t(e,r),(r=li(a,e.mode,u)).return=e,e=r),i(e);if(void 0===a&&!o)switch(e.tag){case 1:case 0:case 11:case 15:throw Error((We(e)||"Component")+"(...): Nothing was returned from render. This usually means a return statement is missing. Or, to render nothing, return null.")}return t(e,r)}}var lr=rr(!0),ar=rr(!1),ir={},ur=it(ir),or=it(ir),sr=it(ir);function cr(e){if(e===ir)throw Error("Expected host context to exist. This error is likely caused by a bug in React. Please file an issue.");return e}function dr(e,n){ot(sr,n),ot(or,e),ot(ur,ir),ut(ur),ot(ur,{isInAParentText:!1})}function fr(){ut(ur),ut(or),ut(sr)}function pr(e){cr(sr.current);var n=cr(ur.current),t=e.type;t="AndroidTextInput"===t||"RCTMultilineTextInputView"===t||"RCTSinglelineTextInputView"===t||"RCTText"===t||"RCTVirtualText"===t,n!==(t=n.isInAParentText!==t?{isInAParentText:t}:n)&&(ot(or,e),ot(ur,t))}function hr(e){or.current===e&&(ut(ur),ut(or))}var gr=it(0);function mr(e){for(var n=e;null!==n;){if(13===n.tag){var t=n.memoizedState;if(null!==t&&(null===t.dehydrated||zn()||zn()))return n}else if(19===n.tag&&void 0!==n.memoizedProps.revealOrder){if(0!=(128&n.flags))return n}else if(null!==n.child){n.child.return=n,n=n.child;continue}if(n===e)break;for(;null===n.sibling;){if(null===n.return||n.return===e)return null;n=n.return}n.sibling.return=n.return,n=n.sibling}return null}var vr=[];function br(){for(var e=0;ea))throw Error("Too many re-renders. React limits the number of renders to prevent an infinite loop.");a+=1,Pr=xr=null,n.updateQueue=null,yr.current=tl,e=t(r,l)}while(Rr)}if(yr.current=Zr,n=null!==xr&&null!==xr.next,kr=0,Pr=xr=wr=null,_r=!1,n)throw Error("Rendered fewer hooks than expected. This may be caused by an accidental early return statement.");return e}function Cr(){var e={memoizedState:null,baseState:null,baseQueue:null,queue:null,next:null};return null===Pr?wr.memoizedState=Pr=e:Pr=Pr.next=e,Pr}function zr(){if(null===xr){var e=wr.alternate;e=null!==e?e.memoizedState:null}else e=xr.next;var n=null===Pr?wr.memoizedState:Pr.next;if(null!==n)Pr=n,xr=e;else{if(null===e)throw Error("Rendered more hooks than during the previous render.");e={memoizedState:(xr=e).memoizedState,baseState:xr.baseState,baseQueue:xr.baseQueue,queue:xr.queue,next:null},null===Pr?wr.memoizedState=Pr=e:Pr=Pr.next=e}return Pr}function Ir(e,n){return"function"==typeof n?n(e):n}function Lr(e){var n=zr(),t=n.queue;if(null===t)throw Error("Should have a queue. This is likely a bug in React. Please file an issue.");t.lastRenderedReducer=e;var r=xr,l=r.baseQueue,a=t.pending;if(null!==a){if(null!==l){var i=l.next;l.next=a.next,a.next=i}r.baseQueue=l=a,t.pending=null}if(null!==l){a=l.next,r=r.baseState;var u=i=null,o=null,s=a;do{var c=s.lane;if((kr&c)===c)null!==o&&(o=o.next={lane:0,action:s.action,eagerReducer:s.eagerReducer,eagerState:s.eagerState,next:null}),r=s.eagerReducer===e?s.eagerState:e(r,s.action);else{var d={lane:c,action:s.action,eagerReducer:s.eagerReducer,eagerState:s.eagerState,next:null};null===o?(u=o=d,i=r):o=o.next=d,wr.lanes|=c,da|=c}s=s.next}while(null!==s&&s!==a);null===o?i=r:o.next=u,_t(r,n.memoizedState)||(sl=!0),n.memoizedState=r,n.baseState=i,n.baseQueue=o,t.lastRenderedState=r}if(null!==(e=t.interleaved)){l=e;do{a=l.lane,wr.lanes|=a,da|=a,l=l.next}while(l!==e)}else null===l&&(t.lanes=0);return[n.memoizedState,t.dispatch]}function Ur(e){var n=zr(),t=n.queue;if(null===t)throw Error("Should have a queue. This is likely a bug in React. Please file an issue.");t.lastRenderedReducer=e;var r=t.dispatch,l=t.pending,a=n.memoizedState;if(null!==l){t.pending=null;var i=l=l.next;do{a=e(a,i.action),i=i.next}while(i!==l);_t(a,n.memoizedState)||(sl=!0),n.memoizedState=a,null===n.baseQueue&&(n.baseState=a),t.lastRenderedState=a}return[a,r]}function Mr(e,n,t){var r=n._getVersion;r=r(n._source);var l=n._workInProgressVersionSecondary;if(null!==l?e=l===r:(e=e.mutableReadLanes,(e=(kr&e)===e)&&(n._workInProgressVersionSecondary=r,vr.push(n))),e)return t(n._source);throw vr.push(n),Error("Cannot read from mutable source during the current render without tearing. This may be a bug in React. Please file an issue.")}function Fr(e,n,t,r){var l=la;if(null===l)throw Error("Expected a work-in-progress root. This is a bug in React. Please file an issue.");var a=n._getVersion,i=a(n._source),u=yr.current,o=u.useState(function(){return Mr(l,n,t)}),s=o[1],c=o[0];o=Pr;var d=e.memoizedState,f=d.refs,p=f.getSnapshot,h=d.source;d=d.subscribe;var g=wr;return e.memoizedState={refs:f,source:n,subscribe:r},u.useEffect(function(){f.getSnapshot=t,f.setSnapshot=s;var e=a(n._source);_t(i,e)||(e=t(n._source),_t(c,e)||(s(e),e=Ta(g),l.mutableReadLanes|=e&l.pendingLanes),Pn(l,l.mutableReadLanes))},[t,n,r]),u.useEffect(function(){return r(n._source,function(){var e=f.getSnapshot,t=f.setSnapshot;try{t(e(n._source));var r=Ta(g);l.mutableReadLanes|=r&l.pendingLanes}catch(e){t(function(){throw e})}})},[n,r]),_t(p,t)&&_t(h,n)&&_t(d,r)||((e={pending:null,interleaved:null,lanes:0,dispatch:null,lastRenderedReducer:Ir,lastRenderedState:c}).dispatch=s=Kr.bind(null,wr,e),o.queue=e,o.baseQueue=null,c=Mr(l,n,t),o.memoizedState=o.baseState=c),c}function Dr(e,n,t){return Fr(zr(),e,n,t)}function Ar(e){var n=Cr();return"function"==typeof e&&(e=e()),n.memoizedState=n.baseState=e,e=(e=n.queue={pending:null,interleaved:null,lanes:0,dispatch:null,lastRenderedReducer:Ir,lastRenderedState:e}).dispatch=Kr.bind(null,wr,e),[n.memoizedState,e]}function Hr(e,n,t,r){return e={tag:e,create:n,destroy:t,deps:r,next:null},null===(n=wr.updateQueue)?(n={lastEffect:null},wr.updateQueue=n,n.lastEffect=e.next=e):null===(t=n.lastEffect)?n.lastEffect=e.next=e:(r=t.next,t.next=e,e.next=r,n.lastEffect=e),e}function Qr(){return zr().memoizedState}function jr(e,n,t,r){var l=Cr();wr.flags|=e,l.memoizedState=Hr(1|n,t,void 0,void 0===r?null:r)}function Br(e,n,t,r){var l=zr();r=void 0===r?null:r;var a=void 0;if(null!==xr){var i=xr.memoizedState;if(a=i.destroy,null!==r&&Er(r,i.deps))return void(l.memoizedState=Hr(n,t,a,r))}wr.flags|=e,l.memoizedState=Hr(1|n,t,a,r)}function Wr(e,n){return jr(1049600,4,e,n)}function Or(e,n){return Br(1024,4,e,n)}function Vr(e,n){return Br(4,2,e,n)}function Yr(e,n){return"function"==typeof n?(e=e(),n(e),function(){n(null)}):null!==n&&void 0!==n?(e=e(),n.current=e,function(){n.current=null}):void 0}function qr(e,n,t){return t=null!==t&&void 0!==t?t.concat([e]):null,Br(4,2,Yr.bind(null,n,e),t)}function Xr(){}function $r(e,n){var t=zr();n=void 0===n?null:n;var r=t.memoizedState;return null!==r&&null!==n&&Er(n,r[1])?r[0]:(t.memoizedState=[e,n],e)}function Gr(e,n){var t=zr();n=void 0===n?null:n;var r=t.memoizedState;return null!==r&&null!==n&&Er(n,r[1])?r[0]:(e=e(),t.memoizedState=[e,n],e)}function Jr(e,n){var t=En;En=0!==t&&4>t?t:4,e(!0);var r=Sr.transition;Sr.transition=1;try{e(!1),n()}finally{En=t,Sr.transition=r}}function Kr(e,n,t){var r=Ra(),l=Ta(e),a={lane:l,action:t,eagerReducer:null,eagerState:null,next:null},i=e.alternate;if(e===wr||null!==i&&i===wr)Rr=_r=!0,null===(l=n.pending)?a.next=a:(a.next=l.next,l.next=a),n.pending=a;else{if(null!==la&&0!=(1&e.mode)&&0==(8&ra)){var u=n.interleaved;null===u?(a.next=a,null===At?At=[n]:At.push(n)):(a.next=u.next,u.next=a),n.interleaved=a}else null===(u=n.pending)?a.next=a:(a.next=u.next,u.next=a),n.pending=a;if(0===e.lanes&&(null===i||0===i.lanes)&&null!==(i=n.lastRenderedReducer))try{var o=n.lastRenderedState,s=i(o,t);if(a.eagerReducer=i,a.eagerState=s,_t(s,o))return}catch(e){}a=Ea(e,l,r),0!=(4194240&l)&&null!==a&&(e=n.lanes,l|=e&=a.pendingLanes,n.lanes=l,Pn(a,l))}}var Zr={readContext:Dt,useCallback:Tr,useContext:Tr,useEffect:Tr,useImperativeHandle:Tr,useLayoutEffect:Tr,useMemo:Tr,useReducer:Tr,useRef:Tr,useState:Tr,useDebugValue:Tr,useDeferredValue:Tr,useTransition:Tr,useMutableSource:Tr,useOpaqueIdentifier:Tr,unstable_isNewReconciler:!1},el={readContext:Dt,useCallback:function(e,n){return Cr().memoizedState=[e,void 0===n?null:n],e},useContext:Dt,useEffect:Wr,useImperativeHandle:function(e,n,t){return t=null!==t&&void 0!==t?t.concat([e]):null,jr(4,2,Yr.bind(null,n,e),t)},useLayoutEffect:function(e,n){return jr(4,2,e,n)},useMemo:function(e,n){var t=Cr();return n=void 0===n?null:n,e=e(),t.memoizedState=[e,n],e},useReducer:function(e,n,t){var r=Cr();return n=void 0!==t?t(n):n,r.memoizedState=r.baseState=n,e=(e=r.queue={pending:null,interleaved:null,lanes:0,dispatch:null,lastRenderedReducer:e,lastRenderedState:n}).dispatch=Kr.bind(null,wr,e),[r.memoizedState,e]},useRef:function(e){return e={current:e},Cr().memoizedState=e},useState:Ar,useDebugValue:Xr,useDeferredValue:function(e){var n=Ar(e),t=n[0],r=n[1];return Wr(function(){var n=Sr.transition;Sr.transition=1;try{r(e)}finally{Sr.transition=n}},[e]),t},useTransition:function(){var e=Ar(!1),n=e[0];return e=Jr.bind(null,e[1]),Cr().memoizedState=e,[n,e]},useMutableSource:function(e,n,t){var r=Cr();return r.memoizedState={refs:{getSnapshot:n,setSnapshot:null},source:e,subscribe:t},Fr(r,e,n,t)},useOpaqueIdentifier:function(){throw Error("Not yet implemented")},unstable_isNewReconciler:!1},nl={readContext:Dt,useCallback:$r,useContext:Dt,useEffect:Or,useImperativeHandle:qr,useLayoutEffect:Vr,useMemo:Gr,useReducer:Lr,useRef:Qr,useState:function(){return Lr(Ir)},useDebugValue:Xr,useDeferredValue:function(e){var n=Lr(Ir),t=n[0],r=n[1];return Or(function(){var n=Sr.transition;Sr.transition=1;try{r(e)}finally{Sr.transition=n}},[e]),t},useTransition:function(){return[Lr(Ir)[0],zr().memoizedState]},useMutableSource:Dr,useOpaqueIdentifier:function(){return Lr(Ir)[0]},unstable_isNewReconciler:!1},tl={readContext:Dt,useCallback:$r,useContext:Dt,useEffect:Or,useImperativeHandle:qr,useLayoutEffect:Vr,useMemo:Gr,useReducer:Ur,useRef:Qr,useState:function(){return Ur(Ir)},useDebugValue:Xr,useDeferredValue:function(e){var n=Ur(Ir),t=n[0],r=n[1];return Or(function(){var n=Sr.transition;Sr.transition=1;try{r(e)}finally{Sr.transition=n}},[e]),t},useTransition:function(){return[Ur(Ir)[0],zr().memoizedState]},useMutableSource:Dr,useOpaqueIdentifier:function(){return Ur(Ir)[0]},unstable_isNewReconciler:!1};function rl(e,n){try{var t="",r=n;do{t+=Tt(r),r=r.return}while(r);var l=t}catch(e){l="\nError generating stack: "+e.message+"\n"+e.stack}return{value:e,source:n,stack:l}}if("function"!=typeof n(i[3]).ReactFiberErrorDialog.showErrorDialog)throw Error("Expected ReactFiberErrorDialog.showErrorDialog to be a function.");function ll(e,t){try{!1!==n(i[3]).ReactFiberErrorDialog.showErrorDialog({componentStack:null!==t.stack?t.stack:"",error:t.value,errorBoundary:null!==e&&1===e.tag?e.stateNode:null})&&console.error(t.value)}catch(e){setTimeout(function(){throw e})}}var al="function"==typeof WeakMap?WeakMap:Map;function il(e,n,t){(t=Bt(-1,t)).tag=3,t.payload={element:null};var r=n.value;return t.callback=function(){ma||(ma=!0,va=r),ll(e,n)},t}function ul(e,n,t){(t=Bt(-1,t)).tag=3;var r=e.type.getDerivedStateFromError;if("function"==typeof r){var l=n.value;t.payload=function(){return ll(e,n),r(l)}}var a=e.stateNode;return null!==a&&"function"==typeof a.componentDidCatch&&(t.callback=function(){"function"!=typeof r&&(null===ba?ba=new Set([this]):ba.add(this),ll(e,n));var t=n.stack;this.componentDidCatch(n.value,{componentStack:null!==t?t:""})}),t}var ol=we.ReactCurrentOwner,sl=!1;function cl(e,n,t,r){n.child=null===e?ar(n,null,t,r):lr(n,e.child,t,r)}function dl(e,n,t,r,l){t=t.render;var a=n.ref;return Ft(n,l),r=Nr(e,n,t,r,a,l),null===e||sl?(n.flags|=1,cl(e,n,r,l),n.child):(n.updateQueue=e.updateQueue,n.flags&=-1029,e.lanes&=~l,Ll(e,n,l))}function fl(e,n,t,r,l,a){if(null===e){var i=t.type;return"function"!=typeof i||Ka(i)||void 0!==i.defaultProps||null!==t.compare||void 0!==t.defaultProps?((e=ni(t.type,null,r,n,n.mode,a)).ref=n.ref,e.return=n,n.child=e):(n.tag=15,n.type=i,pl(e,n,i,r,l,a))}return i=e.child,0==(l&a)&&(l=i.memoizedProps,(t=null!==(t=t.compare)?t:Rt)(l,r)&&e.ref===n.ref)?Ll(e,n,a):(n.flags|=1,(e=ei(i,r)).ref=n.ref,e.return=n,n.child=e)}function pl(e,n,t,r,l,a){if(null!==e&&Rt(e.memoizedProps,r)&&e.ref===n.ref){if(sl=!1,0==(a&l))return n.lanes=e.lanes,Ll(e,n,a);0!=(32768&e.flags)&&(sl=!0)}return ml(e,n,t,r,a)}function hl(e,n,t){var r=n.pendingProps,l=r.children,a=null!==e?e.memoizedState:null;if("hidden"===r.mode||"unstable-defer-without-hiding"===r.mode)if(0==(1&n.mode))n.memoizedState={baseLanes:0,cachePool:null},ot(oa,ua),ua|=t;else{if(0==(1073741824&t))return e=null!==a?a.baseLanes|t:t,n.lanes=n.childLanes=1073741824,n.memoizedState={baseLanes:e,cachePool:null},n.updateQueue=null,ot(oa,ua),ua|=e,null;n.memoizedState={baseLanes:0,cachePool:null},r=null!==a?a.baseLanes:t,ot(oa,ua),ua|=r}else null!==a?(r=a.baseLanes|t,n.memoizedState=null):r=t,ot(oa,ua),ua|=r;return cl(e,n,l,t),n.child}function gl(e,n){var t=n.ref;(null===e&&null!==t||null!==e&&e.ref!==t)&&(n.flags|=256)}function ml(e,n,t,r,l){var a=ht(t)?ft:ct.current;return a=pt(n,a),Ft(n,l),t=Nr(e,n,t,r,a,l),null===e||sl?(n.flags|=1,cl(e,n,t,l),n.child):(n.updateQueue=e.updateQueue,n.flags&=-1029,e.lanes&=~l,Ll(e,n,l))}function vl(e,n,t,r,l){if(ht(t)){var a=!0;bt(n)}else a=!1;if(Ft(n,l),null===n.stateNode)null!==e&&(e.alternate=null,n.alternate=null,n.flags|=2),Kt(n,t,r),er(n,t,r,l),r=!0;else if(null===e){var i=n.stateNode,u=n.memoizedProps;i.props=u;var o=i.context,s=t.contextType;"object"==typeof s&&null!==s?s=Dt(s):s=pt(n,s=ht(t)?ft:ct.current);var c=t.getDerivedStateFromProps,d="function"==typeof c||"function"==typeof i.getSnapshotBeforeUpdate;d||"function"!=typeof i.UNSAFE_componentWillReceiveProps&&"function"!=typeof i.componentWillReceiveProps||(u!==r||o!==s)&&Zt(n,i,r,s),Ht=!1;var f=n.memoizedState;i.state=f,Yt(n,r,i,l),o=n.memoizedState,u!==r||f!==o||dt.current||Ht?("function"==typeof c&&($t(n,t,c,r),o=n.memoizedState),(u=Ht||Jt(n,t,u,r,f,o,s))?(d||"function"!=typeof i.UNSAFE_componentWillMount&&"function"!=typeof i.componentWillMount||("function"==typeof i.componentWillMount&&i.componentWillMount(),"function"==typeof i.UNSAFE_componentWillMount&&i.UNSAFE_componentWillMount()),"function"==typeof i.componentDidMount&&(n.flags|=4)):("function"==typeof i.componentDidMount&&(n.flags|=4),n.memoizedProps=r,n.memoizedState=o),i.props=r,i.state=o,i.context=s,r=u):("function"==typeof i.componentDidMount&&(n.flags|=4),r=!1)}else{i=n.stateNode,jt(e,n),u=n.memoizedProps,s=n.type===n.elementType?u:Et(n.type,u),i.props=s,d=n.pendingProps,f=i.context,"object"==typeof(o=t.contextType)&&null!==o?o=Dt(o):o=pt(n,o=ht(t)?ft:ct.current);var p=t.getDerivedStateFromProps;(c="function"==typeof p||"function"==typeof i.getSnapshotBeforeUpdate)||"function"!=typeof i.UNSAFE_componentWillReceiveProps&&"function"!=typeof i.componentWillReceiveProps||(u!==d||f!==o)&&Zt(n,i,r,o),Ht=!1,f=n.memoizedState,i.state=f,Yt(n,r,i,l);var h=n.memoizedState;u!==d||f!==h||dt.current||Ht?("function"==typeof p&&($t(n,t,p,r),h=n.memoizedState),(s=Ht||Jt(n,t,s,r,f,h,o)||!1)?(c||"function"!=typeof i.UNSAFE_componentWillUpdate&&"function"!=typeof i.componentWillUpdate||("function"==typeof i.componentWillUpdate&&i.componentWillUpdate(r,h,o),"function"==typeof i.UNSAFE_componentWillUpdate&&i.UNSAFE_componentWillUpdate(r,h,o)),"function"==typeof i.componentDidUpdate&&(n.flags|=4),"function"==typeof i.getSnapshotBeforeUpdate&&(n.flags|=512)):("function"!=typeof i.componentDidUpdate||u===e.memoizedProps&&f===e.memoizedState||(n.flags|=4),"function"!=typeof i.getSnapshotBeforeUpdate||u===e.memoizedProps&&f===e.memoizedState||(n.flags|=512),n.memoizedProps=r,n.memoizedState=h),i.props=r,i.state=h,i.context=o,r=s):("function"!=typeof i.componentDidUpdate||u===e.memoizedProps&&f===e.memoizedState||(n.flags|=4),"function"!=typeof i.getSnapshotBeforeUpdate||u===e.memoizedProps&&f===e.memoizedState||(n.flags|=512),r=!1)}return bl(e,n,t,r,a,l)}function bl(e,n,t,r,l,a){gl(e,n);var i=0!=(128&n.flags);if(!r&&!i)return l&&yt(n,t,!1),Ll(e,n,a);r=n.stateNode,ol.current=n;var u=i&&"function"!=typeof t.getDerivedStateFromError?null:r.render();return n.flags|=1,null!==e&&i?(n.child=lr(n,e.child,null,a),n.child=lr(n,null,u,a)):cl(e,n,u,a),n.memoizedState=r.state,l&&yt(n,t,!0),n.child}function yl(e){var n=e.stateNode;n.pendingContext?mt(0,n.pendingContext,n.pendingContext!==n.context):n.context&&mt(0,n.context,!1),dr(e,n.containerInfo)}var Sl,kl,wl,xl,Pl={dehydrated:null,retryLane:0};function _l(e){return{baseLanes:e,cachePool:null}}function Rl(e,n,t){var r,l=n.pendingProps,a=gr.current,i=!1;return(r=0!=(128&n.flags))||(r=(null===e||null!==e.memoizedState)&&0!=(2&a)),r?(i=!0,n.flags&=-129):null!==e&&null===e.memoizedState||void 0===l.fallback||!0===l.unstable_avoidThisFallback||(a|=1),ot(gr,1&a),null===e?(e=l.children,a=l.fallback,i?(e=Tl(n,e,a,t),n.child.memoizedState=_l(t),n.memoizedState=Pl,e):"number"==typeof l.unstable_expectedLoadTime?(e=Tl(n,e,a,t),n.child.memoizedState=_l(t),n.memoizedState=Pl,n.lanes=4194304,e):((t=ri({mode:"visible",children:e},n.mode,t,null)).return=n,n.child=t)):(e.memoizedState,i?(l=Nl(e,n,l.children,l.fallback,t),i=n.child,a=e.child.memoizedState,i.memoizedState=null===a?_l(t):{baseLanes:a.baseLanes|t,cachePool:null},i.childLanes=e.childLanes&~t,n.memoizedState=Pl,l):(t=El(e,n,l.children,t),n.memoizedState=null,t))}function Tl(e,n,t,r){var l=e.mode,a=e.child;return n={mode:"hidden",children:n},0==(1&l)&&null!==a?(a.childLanes=0,a.pendingProps=n):a=ri(n,l,0,null),t=ti(t,l,r,null),a.return=e,t.return=e,a.sibling=t,e.child=a,t}function El(e,n,t,r){var l=e.child;return e=l.sibling,t=ei(l,{mode:"visible",children:t}),0==(1&n.mode)&&(t.lanes=r),t.return=n,t.sibling=null,null!==e&&(null===(r=n.deletions)?(n.deletions=[e],n.flags|=16):r.push(e)),n.child=t}function Nl(e,n,t,r,l){var a=n.mode,i=(e=e.child).sibling,u={mode:"hidden",children:t};return 0==(1&a)&&n.child!==e?((t=n.child).childLanes=0,t.pendingProps=u,n.deletions=null):(t=ei(e,u)).subtreeFlags=1835008&e.subtreeFlags,null!==i?r=ei(i,r):(r=ti(r,a,l,null)).flags|=2,r.return=n,t.return=n,t.sibling=r,n.child=t,r}function Cl(e,n){e.lanes|=n;var t=e.alternate;null!==t&&(t.lanes|=n),Mt(e.return,n)}function zl(e,n,t,r,l){var a=e.memoizedState;null===a?e.memoizedState={isBackwards:n,rendering:null,renderingStartTime:0,last:r,tail:t,tailMode:l}:(a.isBackwards=n,a.rendering=null,a.renderingStartTime=0,a.last=r,a.tail=t,a.tailMode=l)}function Il(e,n,t){var r=n.pendingProps,l=r.revealOrder,a=r.tail;if(cl(e,n,r.children,t),0!=(2&(r=gr.current)))r=1&r|2,n.flags|=128;else{if(null!==e&&0!=(128&e.flags))e:for(e=n.child;null!==e;){if(13===e.tag)null!==e.memoizedState&&Cl(e,t);else if(19===e.tag)Cl(e,t);else if(null!==e.child){e.child.return=e,e=e.child;continue}if(e===n)break e;for(;null===e.sibling;){if(null===e.return||e.return===n)break e;e=e.return}e.sibling.return=e.return,e=e.sibling}r&=1}if(ot(gr,r),0==(1&n.mode))n.memoizedState=null;else switch(l){case"forwards":for(t=n.child,l=null;null!==t;)null!==(e=t.alternate)&&null===mr(e)&&(l=t),t=t.sibling;null===(t=l)?(l=n.child,n.child=null):(l=t.sibling,t.sibling=null),zl(n,!1,l,t,a);break;case"backwards":for(t=null,l=n.child,n.child=null;null!==l;){if(null!==(e=l.alternate)&&null===mr(e)){n.child=l;break}e=l.sibling,l.sibling=t,t=l,l=e}zl(n,!0,t,null,a);break;case"together":zl(n,!1,null,null,void 0);break;default:n.memoizedState=null}return n.child}function Ll(e,n,t){if(null!==e&&(n.dependencies=e.dependencies),da|=n.lanes,0==(t&n.childLanes))return null;if(null!==e&&n.child!==e.child)throw Error("Resuming work not yet implemented.");if(null!==n.child){for(t=ei(e=n.child,e.pendingProps),n.child=t,t.return=n;null!==e.sibling;)e=e.sibling,(t=t.sibling=ei(e,e.pendingProps)).return=n;t.sibling=null}return n.child}function Ul(e,n){if(null!==e&&e.child===n.child)return!0;if(0!=(16&n.flags))return!1;for(e=n.child;null!==e;){if(0!=(6454&e.flags)||0!=(6454&e.subtreeFlags))return!1;e=e.sibling}return!0}function Ml(e,n,t,r){for(var l=n.child;null!==l;){if(5===l.tag){var a=l.stateNode;t&&r&&(a=et(a)),Qn(e,a.node)}else if(6===l.tag){if(a=l.stateNode,t&&r)throw Error("Not yet implemented.");Qn(e,a.node)}else if(4!==l.tag){if(13===l.tag&&0!=(4&l.flags)&&(a=null!==l.memoizedState)){var i=l.child;if(null!==i&&(null!==i.child&&(i.child.return=i,Ml(e,i,!0,a)),null!==(a=i.sibling))){a.return=l,l=a;continue}}if(null!==l.child){l.child.return=l,l=l.child;continue}}if(l===n)break;for(;null===l.sibling;){if(null===l.return||l.return===n)return;l=l.return}l.sibling.return=l.return,l=l.sibling}}function Fl(e,n){switch(e.tailMode){case"hidden":n=e.tail;for(var t=null;null!==n;)null!==n.alternate&&(t=n),n=n.sibling;null===t?e.tail=null:t.sibling=null;break;case"collapsed":t=e.tail;for(var r=null;null!==t;)null!==t.alternate&&(r=t),t=t.sibling;null===r?n||null===e.tail?e.tail=null:e.tail.sibling=null:r.sibling=null}}function Dl(e){var n=null!==e.alternate&&e.alternate.child===e.child,t=0,r=0;if(n)for(var l=e.child;null!==l;)t|=l.lanes|l.childLanes,r|=1835008&l.subtreeFlags,r|=1835008&l.flags,l.return=e,l=l.sibling;else for(l=e.child;null!==l;)t|=l.lanes|l.childLanes,r|=l.subtreeFlags,r|=l.flags,l.return=e,l=l.sibling;return e.subtreeFlags|=r,e.childLanes=t,n}function Al(e,t,r){var l=t.pendingProps;switch(t.tag){case 2:case 16:case 15:case 0:case 11:case 7:case 8:case 12:case 9:case 14:return Dl(t),null;case 1:return ht(t.type)&>(),Dl(t),null;case 3:return l=t.stateNode,fr(),ut(dt),ut(ct),br(),l.pendingContext&&(l.context=l.pendingContext,l.pendingContext=null),null!==e&&null!==e.child||l.hydrate||(t.flags|=512),kl(e,t),Dl(t),null;case 5:hr(t),r=cr(sr.current);var a=t.type;if(null!==e&&null!=t.stateNode)wl(e,t,a,l,r),e.ref!==t.ref&&(t.flags|=256);else{if(!l){if(null===t.stateNode)throw Error("We must have new props for new mounts. This error is likely caused by a bug in React. Please file an issue.");return Dl(t),null}cr(ur.current),e=$n,$n+=2,a=Xn(a);var u=an(null,Ge,l,a.validAttributes);r=Ln(e,a.uiViewClassName,r,u,t),e=new Gn(e,a,l,t),Sl(e={node:r,canonical:e},t,!1,!1),t.stateNode=e,null!==t.ref&&(t.flags|=256)}return Dl(t),null;case 6:if(e&&null!=t.stateNode)xl(e,t,e.memoizedProps,l);else{if("string"!=typeof l&&null===t.stateNode)throw Error("We must have new props for new mounts. This error is likely caused by a bug in React. Please file an issue.");e=cr(sr.current),r=cr(ur.current),t.stateNode=Jn(l,e,r,t)}return Dl(t),null;case 13:return ut(gr),l=t.memoizedState,0!=(128&t.flags)?(t.lanes=r,t):(l=null!==l,r=!1,null!==e&&(r=null!==e.memoizedState),l&&!r&&0!=(1&t.mode)&&(null===e&&!0!==t.memoizedProps.unstable_avoidThisFallback||0!=(1&gr.current)?0===sa&&(sa=3):(0!==sa&&3!==sa||(sa=4),null===la||0==(268435455&da)&&0==(268435455&fa)||Ia(la,ia))),l&&(t.flags|=4),Dl(t),null);case 4:return fr(),kl(e,t),Dl(t),null;case 10:return Ut(t.type._context),Dl(t),null;case 17:return ht(t.type)&>(),Dl(t),null;case 19:if(ut(gr),null===(a=t.memoizedState))return Dl(t),null;if(l=0!=(128&t.flags),null===(u=a.rendering))if(l)Fl(a,!1);else{if(0!==sa||null!==e&&0!=(128&e.flags))for(e=t.child;null!==e;){if(null!==(u=mr(e))){for(t.flags|=128,Fl(a,!1),null!==(e=u.updateQueue)&&(t.updateQueue=e,t.flags|=4),t.subtreeFlags=0,e=r,l=t.child;null!==l;)a=e,(r=l).flags&=1835010,null===(u=r.alternate)?(r.childLanes=0,r.lanes=a,r.child=null,r.subtreeFlags=0,r.memoizedProps=null,r.memoizedState=null,r.updateQueue=null,r.dependencies=null,r.stateNode=null):(r.childLanes=u.childLanes,r.lanes=u.lanes,r.child=u.child,r.subtreeFlags=0,r.deletions=null,r.memoizedProps=u.memoizedProps,r.memoizedState=u.memoizedState,r.updateQueue=u.updateQueue,r.type=u.type,a=u.dependencies,r.dependencies=null===a?null:{lanes:a.lanes,firstContext:a.firstContext}),l=l.sibling;return ot(gr,1&gr.current|2),t.child}e=e.sibling}null!==a.tail&&n(i[4]).unstable_now()>ga&&(t.flags|=128,l=!0,Fl(a,!1),t.lanes=4194304)}else{if(!l)if(null!==(e=mr(u))){if(t.flags|=128,l=!0,null!==(e=e.updateQueue)&&(t.updateQueue=e,t.flags|=4),Fl(a,!0),null===a.tail&&"hidden"===a.tailMode&&!u.alternate)return Dl(t),null}else 2*n(i[4]).unstable_now()-a.renderingStartTime>ga&&1073741824!==r&&(t.flags|=128,l=!0,Fl(a,!1),t.lanes=4194304);a.isBackwards?(u.sibling=t.child,t.child=u):(null!==(e=a.last)?e.sibling=u:t.child=u,a.last=u)}return null!==a.tail?(t=a.tail,a.rendering=t,a.tail=t.sibling,a.renderingStartTime=n(i[4]).unstable_now(),t.sibling=null,e=gr.current,ot(gr,l?1&e|2:1&e),t):(Dl(t),null);case 22:case 23:return Ua(),r=null!==t.memoizedState,null!==e&&null!==e.memoizedState!==r&&"unstable-defer-without-hiding"!==l.mode&&(t.flags|=4),r&&0==(1073741824&ua)&&0!=(1&t.mode)||Dl(t),null}throw Error("Unknown unit of work tag ("+t.tag+"). This error is likely caused by a bug in React. Please file an issue.")}function Hl(e){switch(e.tag){case 1:ht(e.type)&>();var n=e.flags;return 16384&n?(e.flags=-16385&n|128,e):null;case 3:if(fr(),ut(dt),ut(ct),br(),0!=(128&(n=e.flags)))throw Error("The root failed to unmount after an error. This is likely a bug in React. Please file an issue.");return e.flags=-16385&n|128,e;case 5:return hr(e),null;case 13:return ut(gr),16384&(n=e.flags)?(e.flags=-16385&n|128,e):null;case 19:return ut(gr),null;case 4:return fr(),null;case 10:return Ut(e.type._context),null;case 22:case 23:return Ua(),null;case 24:default:return null}}Sl=function(e,n,t,r){for(var l=n.child;null!==l;){if(5===l.tag){var a=l.stateNode;t&&r&&(a=et(a)),Hn(e.node,a.node)}else if(6===l.tag){if(a=l.stateNode,t&&r)throw Error("Not yet implemented.");Hn(e.node,a.node)}else if(4!==l.tag){if(13===l.tag&&0!=(4&l.flags)&&(a=null!==l.memoizedState)){var i=l.child;if(null!==i&&(null!==i.child&&(i.child.return=i,Sl(e,i,!0,a)),null!==(a=i.sibling))){a.return=l,l=a;continue}}if(null!==l.child){l.child.return=l,l=l.child;continue}}if(l===n)break;for(;null===l.sibling;){if(null===l.return||l.return===n)return;l=l.return}l.sibling.return=l.return,l=l.sibling}},kl=function(e,n){var t=n.stateNode;if(!Ul(e,n)){e=t.containerInfo;var r=An(e);Ml(r,n,!1,!1),t.pendingChildren=r,n.flags|=4,jn(e,r)}},wl=function(e,n,t,r){t=e.stateNode;var l=e.memoizedProps;if((e=Ul(e,n))&&l===r)n.stateNode=t;else{var a=n.stateNode;cr(ur.current);var i=null;l!==r&&(l=an(null,l,r,a.canonical.viewConfig.validAttributes),a.canonical.currentProps=r,i=l),e&&null===i?n.stateNode=t:(r=i,l=t.node,t={node:e?null!==r?Dn(l,r):Un(l):null!==r?Fn(l,r):Mn(l),canonical:t.canonical},n.stateNode=t,e?n.flags|=4:Sl(t,n,!1,!1))}},xl=function(e,n,t,r){t!==r?(e=cr(sr.current),t=cr(ur.current),n.stateNode=Jn(r,e,t,n),n.flags|=4):n.stateNode=e.stateNode};var Ql="function"==typeof WeakSet?WeakSet:Set,jl=null;function Bl(e,n){var t=e.ref;if(null!==t)if("function"==typeof t)try{t(null)}catch(t){qa(e,n,t)}else t.current=null}var Wl=!1;function Ol(e,n){for(jl=n;null!==jl;)if(n=(e=jl).child,0!=(516&e.subtreeFlags)&&null!==n)n.return=e,jl=n;else for(;null!==jl;){e=jl;try{var t=e.alternate;if(0!=(512&e.flags))switch(e.tag){case 0:case 11:case 15:break;case 1:if(null!==t){var r=t.memoizedProps,l=t.memoizedState,a=e.stateNode,i=a.getSnapshotBeforeUpdate(e.elementType===e.type?r:Et(e.type,r),l);a.__reactInternalSnapshotBeforeUpdate=i}break;case 3:break;case 5:case 6:case 4:case 17:break;default:throw Error("This unit of work tag should not have side-effects. This error is likely caused by a bug in React. Please file an issue.")}}catch(n){qa(e,e.return,n)}if(null!==(n=e.sibling)){n.return=e.return,jl=n;break}jl=e.return}return t=Wl,Wl=!1,t}function Vl(e,n,t){var r=n.updateQueue;if(null!==(r=null!==r?r.lastEffect:null)){var l=r=r.next;do{if((l.tag&e)===e){var a=l.destroy;if(l.destroy=void 0,void 0!==a){var i=n,u=t;try{a()}catch(e){qa(i,u,e)}}}l=l.next}while(l!==r)}}function Yl(e,n){if(null!==(n=null!==(n=n.updateQueue)?n.lastEffect:null)){var t=n=n.next;do{if((t.tag&e)===e){var r=t.create;t.destroy=r()}t=t.next}while(t!==n)}}function ql(e){var n=e.alternate;null!==n&&(e.alternate=null,ql(n)),e.child=null,e.deletions=null,e.sibling=null,e.stateNode=null,e.return=null,e.dependencies=null,e.memoizedProps=null,e.memoizedState=null,e.pendingProps=null,e.stateNode=null,e.updateQueue=null}function Xl(e,t){switch(t.tag){case 0:case 11:case 14:case 15:return void Vl(3,t,t.return);case 12:return;case 13:return null!==t.memoizedState&&(ha=n(i[4]).unstable_now()),void $l(t);case 19:return void $l(t);case 22:case 23:return}e:{switch(t.tag){case 1:case 5:case 6:break e;case 3:case 4:break e}throw Error("This unit of work tag should not have side-effects. This error is likely caused by a bug in React. Please file an issue.")}}function $l(e){var n=e.updateQueue;if(null!==n){e.updateQueue=null;var t=e.stateNode;null===t&&(t=e.stateNode=new Ql),n.forEach(function(n){var r=$a.bind(null,e,n);t.has(n)||(t.add(n),n.then(r,r))})}}function Gl(e,n){for(jl=n;null!==jl;){if(null!==(n=(e=jl).deletions))for(var t=0;ta&&(a=o),l&=~u}if(l=a,10<(l=(120>(l=n(i[4]).unstable_now()-l)?120:480>l?480:1080>l?1080:1920>l?1920:3e3>l?3e3:4320>l?4320:1960*Zl(l/1960))-l)){e.timeoutHandle=Kn(Wa.bind(null,e),l);break}Wa(e);break;case 5:Wa(e);break;default:throw Error("Unknown root exit status.")}}return Ca(e,n(i[4]).unstable_now()),e.callbackNode===r?za.bind(null,e):null}function Ia(e,n){for(n&=~pa,n&=~fa,e.suspendedLanes|=n,e.pingedLanes&=~n,e=e.expirationTimes;0 component higher in the tree to provide a loading indicator or placeholder to display.")}5!==sa&&(sa=2),o=rl(o,u),p=i;do{switch(p.tag){case 3:a=o,p.flags|=16384,n&=-n,p.lanes|=n,Vt(p,il(p,a,n));break e;case 1:a=o;var w=p.type,x=p.stateNode;if(0==(128&p.flags)&&("function"==typeof w.getDerivedStateFromError||null!==x&&"function"==typeof x.componentDidCatch&&(null===ba||!ba.has(x)))){p.flags|=16384,n&=-n,p.lanes|=n,Vt(p,ul(p,a,n));break e}}p=p.return}while(null!==p)}Ba(t)}catch(e){n=e,aa===t&&null!==t&&(aa=t=t.return);continue}break}}function Da(){var e=ea.current;return ea.current=Zr,null===e?Zr:e}function Aa(e,n){var t=ra;ra|=8;var r=Da();for(la===e&&ia===n||Ma(e,n);;)try{Ha();break}catch(n){Fa(e,n)}if(Lt(),ra=t,ea.current=r,null!==aa)throw Error("Cannot commit an incomplete root. This error is likely caused by a bug in React. Please file an issue.");return la=null,ia=0,sa}function Ha(){for(;null!==aa;)ja(aa)}function Qa(){for(;null!==aa&&!n(i[4]).unstable_shouldYield();)ja(aa)}function ja(e){var n=Kl(e.alternate,e,ua);e.memoizedProps=e.pendingProps,null===n?Ba(e):aa=n,na.current=null}function Ba(e){var n=e;do{var t=n.alternate;if(e=n.return,0==(8192&n.flags)){if(null!==(t=Al(t,n,ua)))return void(aa=t)}else{if(null!==(t=Hl(n)))return t.flags&=8191,void(aa=t);null!==e&&(e.flags|=8192,e.subtreeFlags=0,e.deletions=null)}if(null!==(n=n.sibling))return void(aa=n);aa=n=e}while(null!==n);0===sa&&(sa=5)}function Wa(e){var n=En,t=ta.transition;try{ta.transition=0,En=1,Oa(e,n)}finally{ta.transition=t,En=n}return null}function Oa(e,t){do{Va()}while(null!==Sa);if(0!=(24&ra))throw Error("Should not already be working.");var r=e.finishedWork,l=e.finishedLanes;if(null===r)return null;if(e.finishedWork=null,e.finishedLanes=0,r===e.current)throw Error("Cannot commit the same tree as before. This error is likely caused by a bug in React. Please file an issue.");e.callbackNode=null,e.callbackPriority=0;var a=r.lanes|r.childLanes;if(xn(e,a),e===la&&(aa=la=null,ia=0),0==(1040&r.subtreeFlags)&&0==(1040&r.flags)||ya||(ya=!0,n(i[4]).unstable_scheduleCallback(n(i[4]).unstable_NormalPriority,function(){return Va(),null})),a=0!=(8054&r.flags),0!=(8054&r.subtreeFlags)||a){a=ta.transition,ta.transition=0;var u=En;En=1;var o=ra;ra|=16,na.current=null,Ol(e,r),Gl(e,r),e.current=r,Jl(r),n(i[4]).unstable_requestPaint(),ra=o,En=u,ta.transition=a}else e.current=r;if(ya&&(ya=!1,Sa=e,ka=l),0===(a=e.pendingLanes)&&(ba=null),0!=(1&a)?e===xa?wa++:(wa=0,xa=e):wa=0,hn(r.stateNode),Ca(e,n(i[4]).unstable_now()),ma)throw ma=!1,e=va,va=null,e;return 0!=(4&ra)?null:(0!=(1&ka)&&0!==e.tag&&Va(),xt(),null)}function Va(){if(null!==Sa){var e=Nn(ka),n=ta.transition,t=En;try{if(ta.transition=0,En=16>e?16:e,null===Sa)var r=!1;else{if(e=Sa,Sa=null,ka=0,0!=(24&ra))throw Error("Cannot flush passive effects while already rendering.");var l=ra;for(ra|=16,jl=e.current;null!==jl;){var a=jl,i=a.child;if(0!=(16&jl.flags)){var u=a.deletions;if(null!==u){for(var o=0;on(i[4]).unstable_now()-ha?Ma(e,0):pa|=r),Ca(e,t)}function $a(e,n){var t=e.stateNode;null!==t&&t.delete(n),0===(n=0)&&(0==(1&e.mode)?n=1:(n=mn,0==(130023424&(mn<<=1))&&(mn=4194304))),t=Ra(),null!==(e=Na(e,n))&&(wn(e,n,t),Ca(e,t))}function Ga(e,n,t,r){this.tag=e,this.key=t,this.sibling=this.child=this.return=this.stateNode=this.type=this.elementType=null,this.index=0,this.ref=null,this.pendingProps=n,this.dependencies=this.memoizedState=this.updateQueue=this.memoizedProps=null,this.mode=r,this.subtreeFlags=this.flags=0,this.deletions=null,this.childLanes=this.lanes=0,this.alternate=null}function Ja(e,n,t,r){return new Ga(e,n,t,r)}function Ka(e){return!(!(e=e.prototype)||!e.isReactComponent)}function Za(e){if("function"==typeof e)return Ka(e)?1:0;if(void 0!==e&&null!==e){if((e=e.$$typeof)===Ce)return 11;if(e===Le)return 14}return 2}function ei(e,n){var t=e.alternate;return null===t?((t=Ja(e.tag,n,e.key,e.mode)).elementType=e.elementType,t.type=e.type,t.stateNode=e.stateNode,t.alternate=e,e.alternate=t):(t.pendingProps=n,t.type=e.type,t.flags=0,t.subtreeFlags=0,t.deletions=null),t.flags=1835008&e.flags,t.childLanes=e.childLanes,t.lanes=e.lanes,t.child=e.child,t.memoizedProps=e.memoizedProps,t.memoizedState=e.memoizedState,t.updateQueue=e.updateQueue,n=e.dependencies,t.dependencies=null===n?null:{lanes:n.lanes,firstContext:n.firstContext},t.sibling=e.sibling,t.index=e.index,t.ref=e.ref,t}function ni(e,n,t,r,l,a){var i=2;if(r=e,"function"==typeof e)Ka(e)&&(i=1);else if("string"==typeof e)i=5;else e:switch(e){case _e:return ti(t.children,l,a,n);case Me:i=8,l|=4;break;case Re:i=8,l|=8;break;case Te:return(e=Ja(12,t,n,2|l)).elementType=Te,e.lanes=a,e;case ze:return(e=Ja(13,t,n,l)).elementType=ze,e.lanes=a,e;case Ie:return(e=Ja(19,t,n,l)).elementType=Ie,e.lanes=a,e;case Fe:return ri(t,l,a,n);case De:return(e=Ja(23,t,n,l)).elementType=De,e.lanes=a,e;default:if("object"==typeof e&&null!==e)switch(e.$$typeof){case Ee:i=10;break e;case Ne:i=9;break e;case Ce:i=11;break e;case Le:i=14;break e;case Ue:i=16,r=null;break e}throw Error("Element type is invalid: expected a string (for built-in components) or a class/function (for composite components) but got: "+(null==e?e:typeof e)+".")}return(n=Ja(i,t,n,l)).elementType=e,n.type=r,n.lanes=a,n}function ti(e,n,t,r){return(e=Ja(7,e,r,n)).lanes=t,e}function ri(e,n,t,r){return(e=Ja(22,e,r,n)).elementType=Fe,e.lanes=t,e}function li(e,n,t){return(e=Ja(6,e,null,n)).lanes=t,e}function ai(e,n,t){return(n=Ja(4,null!==e.children?e.children:[],e.key,n)).lanes=t,n.stateNode={containerInfo:e.containerInfo,pendingChildren:null,implementation:e.implementation},n}function ii(e,n,t){this.tag=n,this.containerInfo=e,this.finishedWork=this.pingCache=this.current=this.pendingChildren=null,this.timeoutHandle=-1,this.pendingContext=this.context=null,this.hydrate=t,this.callbackNode=null,this.callbackPriority=0,this.eventTimes=kn(0),this.expirationTimes=kn(-1),this.entangledLanes=this.finishedLanes=this.mutableReadLanes=this.expiredLanes=this.pingedLanes=this.suspendedLanes=this.pendingLanes=0,this.entanglements=kn(0)}function ui(e,n,t){var r=3=t.length?{done:!0}:{done:!1,value:t[o++]}}}throw new TypeError("Invalid attempt to iterate non-iterable instance.\nIn order to be iterable, non-array objects must have a [Symbol.iterator]() method.")}function f(t,n){if(t){if("string"==typeof t)return s(t,n);var u=Object.prototype.toString.call(t).slice(8,-1);return"Object"===u&&t.constructor&&(u=t.constructor.name),"Map"===u||"Set"===u?Array.from(t):"Arguments"===u||/^(?:Ui|I)nt(?:8|16|32)(?:Clamped)?Array$/.test(u)?s(t,n):void 0}}function s(t,n){(null==n||n>t.length)&&(n=t.length);for(var u=0,o=new Array(n);ui&&(f+=u&&o?h.currentPageX:u&&!o?h.currentPageY:!u&&o?h.previousPageX:h.previousPageY,s=1);else for(var v=0;v=i){f+=u&&o?C.currentPageX:u&&!o?C.currentPageY:!u&&o?C.previousPageX:C.previousPageY,s++}}return s>0?f/s:n.noCentroid},currentCentroidXOfTouchesChangedAfter:function(t,i){return n.centroidDimension(t,i,!0,!0)},currentCentroidYOfTouchesChangedAfter:function(t,i){return n.centroidDimension(t,i,!1,!0)},previousCentroidXOfTouchesChangedAfter:function(t,i){return n.centroidDimension(t,i,!0,!1)},previousCentroidYOfTouchesChangedAfter:function(t,i){return n.centroidDimension(t,i,!1,!1)},currentCentroidX:function(t){return n.centroidDimension(t,0,!0,!0)},currentCentroidY:function(t){return n.centroidDimension(t,0,!1,!0)},noCentroid:-1};m.exports=n},376,[]); +__d(function(g,r,i,a,m,e,d){var n=r(d[0])(r(d[1])),s=r(d[0])(r(d[2])),o=r(d[0])(r(d[3])),t=r(d[0])(r(d[4])),E=r(d[0])(r(d[5])),A=r(d[0])(r(d[6])),_=r(d[0])(r(d[7])),u=Object.freeze({GRANTED:'granted',DENIED:'denied',NEVER_ASK_AGAIN:'never_ask_again'}),S=Object.freeze({READ_CALENDAR:'android.permission.READ_CALENDAR',WRITE_CALENDAR:'android.permission.WRITE_CALENDAR',CAMERA:'android.permission.CAMERA',READ_CONTACTS:'android.permission.READ_CONTACTS',WRITE_CONTACTS:'android.permission.WRITE_CONTACTS',GET_ACCOUNTS:'android.permission.GET_ACCOUNTS',ACCESS_FINE_LOCATION:'android.permission.ACCESS_FINE_LOCATION',ACCESS_COARSE_LOCATION:'android.permission.ACCESS_COARSE_LOCATION',ACCESS_BACKGROUND_LOCATION:'android.permission.ACCESS_BACKGROUND_LOCATION',RECORD_AUDIO:'android.permission.RECORD_AUDIO',READ_PHONE_STATE:'android.permission.READ_PHONE_STATE',CALL_PHONE:'android.permission.CALL_PHONE',READ_CALL_LOG:'android.permission.READ_CALL_LOG',WRITE_CALL_LOG:'android.permission.WRITE_CALL_LOG',ADD_VOICEMAIL:'com.android.voicemail.permission.ADD_VOICEMAIL',USE_SIP:'android.permission.USE_SIP',PROCESS_OUTGOING_CALLS:'android.permission.PROCESS_OUTGOING_CALLS',BODY_SENSORS:'android.permission.BODY_SENSORS',SEND_SMS:'android.permission.SEND_SMS',RECEIVE_SMS:'android.permission.RECEIVE_SMS',READ_SMS:'android.permission.READ_SMS',RECEIVE_WAP_PUSH:'android.permission.RECEIVE_WAP_PUSH',RECEIVE_MMS:'android.permission.RECEIVE_MMS',READ_EXTERNAL_STORAGE:'android.permission.READ_EXTERNAL_STORAGE',WRITE_EXTERNAL_STORAGE:'android.permission.WRITE_EXTERNAL_STORAGE',BLUETOOTH_CONNECT:'android.permission.BLUETOOTH_CONNECT',BLUETOOTH_SCAN:'android.permission.BLUETOOTH_SCAN',BLUETOOTH_ADVERTISE:'android.permission.BLUETOOTH_ADVERTISE'}),O=new((function(){function O(){(0,o.default)(this,O),this.PERMISSIONS=S,this.RESULTS=u}return(0,t.default)(O,[{key:"checkPermission",value:function(n){return console.warn('"PermissionsAndroid.checkPermission" is deprecated. Use "PermissionsAndroid.check" instead'),(0,_.default)(A.default,'PermissionsAndroid is not installed correctly.'),A.default.checkPermission(n)}},{key:"check",value:function(n){return(0,_.default)(A.default,'PermissionsAndroid is not installed correctly.'),A.default.checkPermission(n)}},{key:"requestPermission",value:function(s,o){var t;return n.default.async(function(E){for(;;)switch(E.prev=E.next){case 0:console.warn('"PermissionsAndroid.requestPermission" is deprecated. Use "PermissionsAndroid.request" instead'),E.next=4;break;case 4:return E.next=6,n.default.awrap(this.request(s,o));case 6:return t=E.sent,E.abrupt("return",t===this.RESULTS.GRANTED);case 8:case"end":return E.stop()}},null,this,null,Promise)}},{key:"request",value:function(o,t){return n.default.async(function(u){for(;;)switch(u.prev=u.next){case 0:u.next=3;break;case 3:if((0,_.default)(A.default,'PermissionsAndroid is not installed correctly.'),!t){u.next=10;break}return u.next=7,n.default.awrap(A.default.shouldShowRequestPermissionRationale(o));case 7:if(!u.sent||!E.default){u.next=10;break}return u.abrupt("return",new Promise(function(n,_){var u=(0,s.default)({},t);E.default.showAlert(u,function(){return _(new Error('Error showing rationale'))},function(){return n(A.default.requestPermission(o))})}));case 10:return u.abrupt("return",A.default.requestPermission(o));case 11:case"end":return u.stop()}},null,this,null,Promise)}},{key:"requestMultiple",value:function(n){return(0,_.default)(A.default,'PermissionsAndroid is not installed correctly.'),A.default.requestMultiplePermissions(n)}}]),O})());m.exports=O},377,[3,75,29,7,8,115,378,18]); +__d(function(g,r,i,a,m,e,d){function t(n){if("function"!=typeof WeakMap)return null;var o=new WeakMap,u=new WeakMap;return(t=function(t){return t?u:o})(n)}Object.defineProperty(e,"__esModule",{value:!0}),e.default=void 0;var n=(function(n,o){if(!o&&n&&n.__esModule)return n;if(null===n||"object"!=typeof n&&"function"!=typeof n)return{default:n};var u=t(o);if(u&&u.has(n))return u.get(n);var f={},c=Object.defineProperty&&Object.getOwnPropertyDescriptor;for(var l in n)if("default"!==l&&Object.prototype.hasOwnProperty.call(n,l)){var p=c?Object.getOwnPropertyDescriptor(n,l):null;p&&(p.get||p.set)?Object.defineProperty(f,l,p):f[l]=n[l]}f.default=n,u&&u.set(n,f);return f})(r(d[0])).get('PermissionsAndroid');e.default=n},378,[21]); +__d(function(g,r,i,a,m,e,d){var t=r(d[0])(r(d[1])),o=r(d[0])(r(d[2])),n=r(d[0])(r(d[3])),l=r(d[0])(r(d[4])),u=r(d[0])(r(d[5])),c=r(d[0])(r(d[6])),f=new n.default('ios'!==c.default.OS?null:l.default),s=new Map,v=(function(){function n(o){var l=this;(0,t.default)(this,n),this._data={},this._remoteNotificationCompleteCallbackCalled=!1,this._isRemote=o.remote,this._isRemote&&(this._notificationId=o.notificationId),o.remote?Object.keys(o).forEach(function(t){var n=o[t];'aps'===t?(l._alert=n.alert,l._sound=n.sound,l._badgeCount=n.badge,l._category=n.category,l._contentAvailable=n['content-available'],l._threadID=n['thread-id']):l._data[t]=n}):(this._badgeCount=o.applicationIconBadgeNumber,this._sound=o.soundName,this._alert=o.alertBody,this._data=o.userInfo,this._category=o.category)}return(0,o.default)(n,[{key:"finish",value:function(t){this._isRemote&&this._notificationId&&!this._remoteNotificationCompleteCallbackCalled&&(this._remoteNotificationCompleteCallbackCalled=!0,(0,u.default)(l.default,'PushNotificationManager is not available.'),l.default.onFinishRemoteNotification(this._notificationId,t))}},{key:"getMessage",value:function(){return this._alert}},{key:"getSound",value:function(){return this._sound}},{key:"getCategory",value:function(){return this._category}},{key:"getAlert",value:function(){return this._alert}},{key:"getContentAvailable",value:function(){return this._contentAvailable}},{key:"getBadgeCount",value:function(){return this._badgeCount}},{key:"getData",value:function(){return this._data}},{key:"getThreadID",value:function(){return this._threadID}}],[{key:"presentLocalNotification",value:function(t){(0,u.default)(l.default,'PushNotificationManager is not available.'),l.default.presentLocalNotification(t)}},{key:"scheduleLocalNotification",value:function(t){(0,u.default)(l.default,'PushNotificationManager is not available.'),l.default.scheduleLocalNotification(t)}},{key:"cancelAllLocalNotifications",value:function(){(0,u.default)(l.default,'PushNotificationManager is not available.'),l.default.cancelAllLocalNotifications()}},{key:"removeAllDeliveredNotifications",value:function(){(0,u.default)(l.default,'PushNotificationManager is not available.'),l.default.removeAllDeliveredNotifications()}},{key:"getDeliveredNotifications",value:function(t){(0,u.default)(l.default,'PushNotificationManager is not available.'),l.default.getDeliveredNotifications(t)}},{key:"removeDeliveredNotifications",value:function(t){(0,u.default)(l.default,'PushNotificationManager is not available.'),l.default.removeDeliveredNotifications(t)}},{key:"setApplicationIconBadgeNumber",value:function(t){(0,u.default)(l.default,'PushNotificationManager is not available.'),l.default.setApplicationIconBadgeNumber(t)}},{key:"getApplicationIconBadgeNumber",value:function(t){(0,u.default)(l.default,'PushNotificationManager is not available.'),l.default.getApplicationIconBadgeNumber(t)}},{key:"cancelLocalNotifications",value:function(t){(0,u.default)(l.default,'PushNotificationManager is not available.'),l.default.cancelLocalNotifications(t)}},{key:"getScheduledLocalNotifications",value:function(t){(0,u.default)(l.default,'PushNotificationManager is not available.'),l.default.getScheduledLocalNotifications(t)}},{key:"addEventListener",value:function(t,o){var l;(0,u.default)('notification'===t||'register'===t||'registrationError'===t||'localNotification'===t,'PushNotificationIOS only supports `notification`, `register`, `registrationError`, and `localNotification` events'),'notification'===t?l=f.addListener("remoteNotificationReceived",function(t){o(new n(t))}):'localNotification'===t?l=f.addListener("localNotificationReceived",function(t){o(new n(t))}):'register'===t?l=f.addListener("remoteNotificationsRegistered",function(t){o(t.deviceToken)}):'registrationError'===t&&(l=f.addListener("remoteNotificationRegistrationError",function(t){o(t)})),s.set(t,l)}},{key:"removeEventListener",value:function(t,o){(0,u.default)('notification'===t||'register'===t||'registrationError'===t||'localNotification'===t,'PushNotificationIOS only supports `notification`, `register`, `registrationError`, and `localNotification` events');var n=s.get(t);n&&(n.remove(),s.delete(t))}},{key:"requestPermissions",value:function(t){var o={alert:!0,badge:!0,sound:!0};return t&&(o={alert:!!t.alert,badge:!!t.badge,sound:!!t.sound}),(0,u.default)(l.default,'PushNotificationManager is not available.'),l.default.requestPermissions(o)}},{key:"abandonPermissions",value:function(){(0,u.default)(l.default,'PushNotificationManager is not available.'),l.default.abandonPermissions()}},{key:"checkPermissions",value:function(t){(0,u.default)('function'==typeof t,'Must provide a valid callback'),(0,u.default)(l.default,'PushNotificationManager is not available.'),l.default.checkPermissions(t)}},{key:"getInitialNotification",value:function(){return(0,u.default)(l.default,'PushNotificationManager is not available.'),l.default.getInitialNotification().then(function(t){return t&&new n(t)})}},{key:"getAuthorizationStatus",value:function(t){(0,u.default)(l.default,'PushNotificationManager is not available.'),l.default.getAuthorizationStatus(t)}}]),n})();v.FetchResult={NewData:'UIBackgroundFetchResultNewData',NoData:'UIBackgroundFetchResultNoData',ResultFailed:'UIBackgroundFetchResultFailed'},m.exports=v},379,[3,7,8,95,380,18,19]); +__d(function(g,r,i,a,m,e,d){function t(n){if("function"!=typeof WeakMap)return null;var o=new WeakMap,u=new WeakMap;return(t=function(t){return t?u:o})(n)}Object.defineProperty(e,"__esModule",{value:!0}),e.default=void 0;var n=(function(n,o){if(!o&&n&&n.__esModule)return n;if(null===n||"object"!=typeof n&&"function"!=typeof n)return{default:n};var u=t(o);if(u&&u.has(n))return u.get(n);var f={},c=Object.defineProperty&&Object.getOwnPropertyDescriptor;for(var l in n)if("default"!==l&&Object.prototype.hasOwnProperty.call(n,l)){var p=c?Object.getOwnPropertyDescriptor(n,l):null;p&&(p.get||p.set)?Object.defineProperty(f,l,p):f[l]=n[l]}f.default=n,u&&u.set(n,f);return f})(r(d[0])).get('PushNotificationManager');e.default=n},380,[21]); +__d(function(g,r,i,a,m,e,d){'use strict';var n={get:function(n){return console.warn('Settings is not yet supported on Android'),null},set:function(n){console.warn('Settings is not yet supported on Android')},watchKeys:function(n,t){return console.warn('Settings is not yet supported on Android'),-1},clearWatch:function(n){console.warn('Settings is not yet supported on Android')}};m.exports=n},381,[]); +__d(function(g,r,i,a,m,e,d){var t=r(d[0])(r(d[1])),n=r(d[0])(r(d[2])),s=r(d[0])(r(d[3])),l=(r(d[0])(r(d[4])),r(d[0])(r(d[5]))),o=(function(){function o(){(0,n.default)(this,o)}return(0,s.default)(o,null,[{key:"share",value:function(n){var s=arguments.length>1&&void 0!==arguments[1]?arguments[1]:{};r(d[6])('object'==typeof n&&null!==n,'Content to share must be a valid object'),r(d[6])('string'==typeof n.url||'string'==typeof n.message,'At least one of URL and message is required'),r(d[6])('object'==typeof s&&null!==s,'Options must be a valid object'),r(d[6])(l.default,'ShareModule should be registered on Android.'),r(d[6])(null==n.title||'string'==typeof n.title,'Invalid title: title should be a string.');var o={title:n.title,message:'string'==typeof n.message?n.message:void 0};return l.default.share(o,s.dialogTitle).then(function(n){return(0,t.default)({activityType:null},n)})}}]),o})();o.sharedAction='sharedAction',o.dismissedAction='dismissedAction',m.exports=o},382,[3,29,7,8,336,383,18]); +__d(function(g,r,i,a,m,e,d){function t(n){if("function"!=typeof WeakMap)return null;var o=new WeakMap,u=new WeakMap;return(t=function(t){return t?u:o})(n)}Object.defineProperty(e,"__esModule",{value:!0}),e.default=void 0;var n=(function(n,o){if(!o&&n&&n.__esModule)return n;if(null===n||"object"!=typeof n&&"function"!=typeof n)return{default:n};var u=t(o);if(u&&u.has(n))return u.get(n);var f={},l=Object.defineProperty&&Object.getOwnPropertyDescriptor;for(var c in n)if("default"!==c&&Object.prototype.hasOwnProperty.call(n,c)){var p=l?Object.getOwnPropertyDescriptor(n,c):null;p&&(p.get||p.set)?Object.defineProperty(f,c,p):f[c]=n[c]}f.default=n,u&&u.set(n,f);return f})(r(d[0])).get('ShareModule');e.default=n},383,[21]); +__d(function(g,r,i,a,m,e,d){var t=r(d[0])(r(d[1])),f=t.default.getConstants(),n={SHORT:f.SHORT,LONG:f.LONG,TOP:f.TOP,BOTTOM:f.BOTTOM,CENTER:f.CENTER,show:function(f,n){t.default.show(f,n)},showWithGravity:function(f,n,o){t.default.showWithGravity(f,n,o)},showWithGravityAndOffset:function(f,n,o,O,s){t.default.showWithGravityAndOffset(f,n,o,O,s)}};m.exports=n},384,[3,385]); +__d(function(g,r,i,a,m,e,d){function t(n){if("function"!=typeof WeakMap)return null;var o=new WeakMap,f=new WeakMap;return(t=function(t){return t?f:o})(n)}Object.defineProperty(e,"__esModule",{value:!0}),e.default=void 0;var n=(function(n,o){if(!o&&n&&n.__esModule)return n;if(null===n||"object"!=typeof n&&"function"!=typeof n)return{default:n};var f=t(o);if(f&&f.has(n))return f.get(n);var u={},c=Object.defineProperty&&Object.getOwnPropertyDescriptor;for(var l in n)if("default"!==l&&Object.prototype.hasOwnProperty.call(n,l)){var p=c?Object.getOwnPropertyDescriptor(n,l):null;p&&(p.get||p.set)?Object.defineProperty(u,l,p):u[l]=n[l]}u.default=n,f&&f.set(n,u);return u})(r(d[0])).getEnforcing('ToastAndroid');e.default=n},385,[21]); +__d(function(g,r,i,a,m,e,d){Object.defineProperty(e,"__esModule",{value:!0}),e.default=function(){var t=(0,n.useMemo)(function(){return{getCurrentValue:function(){return u.default.getColorScheme()},subscribe:function(n){var t=u.default.addChangeListener(n);return function(){t.remove()}}}},[]);return(0,r(d[3]).useSubscription)(t)};var n=r(d[0]),u=r(d[1])(r(d[2]))},386,[129,3,337,387]); +__d(function(g,r,i,a,m,e,d){'use strict';m.exports=r(d[0])},387,[388]); +__d(function(_g,r,i,_a,m,e,_d){'use strict';var u=r(_d[0]);e.useSubscription=function(t){var n=t.getCurrentValue,a=t.subscribe,s=u.useState(function(){return{getCurrentValue:n,subscribe:a,value:n()}});t=s[0];var c=s[1];return s=t.value,t.getCurrentValue===n&&t.subscribe===a||(s=n(),c({getCurrentValue:n,subscribe:a,value:s})),u.useDebugValue(s),u.useEffect(function(){function u(){if(!t){var u=n();c(function(t){return t.getCurrentValue!==n||t.subscribe!==a||t.value===u?t:r(_d[1])({},t,{value:u})})}}var t=!1,s=a(u);return u(),function(){t=!0,s()}},[n,a]),s}},388,[129,131]); +__d(function(g,r,i,a,m,e,d){Object.defineProperty(e,"__esModule",{value:!0}),e.default=function(){var f=(0,u.useState)(function(){return n.default.get('window')}),o=(0,t.default)(f,2),c=o[0],l=o[1];return(0,u.useEffect)(function(){function t(t){var n=t.window;c.width===n.width&&c.height===n.height&&c.scale===n.scale&&c.fontScale===n.fontScale||l(n)}var u=n.default.addEventListener('change',t);return t({window:n.default.get('window')}),function(){u.remove()}},[c]),c};var t=r(d[0])(r(d[1])),n=r(d[0])(r(d[2])),u=r(d[3])},389,[3,23,161,129]); +__d(function(g,r,i,a,m,e,d){'use strict';var A=r(d[0])({BOM:"\ufeff",BULLET:"\u2022",BULLET_SP:"\xa0\u2022\xa0",MIDDOT:"\xb7",MIDDOT_SP:"\xa0\xb7\xa0",MIDDOT_KATAKANA:"\u30fb",MDASH:"\u2014",MDASH_SP:"\xa0\u2014\xa0",NDASH:"\u2013",NDASH_SP:"\xa0\u2013\xa0",NBSP:"\xa0",PIZZA:"\ud83c\udf55",TRIANGLE_LEFT:"\u25c0",TRIANGLE_RIGHT:"\u25b6"});m.exports=A},390,[167]); +__d(function(g,r,i,a,m,e,d){var t=r(d[0])(r(d[1])),n=400;var o={vibrate:function(){var o=arguments.length>0&&void 0!==arguments[0]?arguments[0]:n,f=arguments.length>1&&void 0!==arguments[1]&&arguments[1];if('number'==typeof o)t.default.vibrate(o);else{if(!Array.isArray(o))throw new Error('Vibration pattern should be a number or array');t.default.vibrateByPattern(o,f?0:-1)}},cancel:function(){t.default.cancel()}};m.exports=o},391,[3,392]); +__d(function(g,r,i,a,m,e,d){function t(n){if("function"!=typeof WeakMap)return null;var o=new WeakMap,f=new WeakMap;return(t=function(t){return t?f:o})(n)}Object.defineProperty(e,"__esModule",{value:!0}),e.default=void 0;var n=(function(n,o){if(!o&&n&&n.__esModule)return n;if(null===n||"object"!=typeof n&&"function"!=typeof n)return{default:n};var f=t(o);if(f&&f.has(n))return f.get(n);var u={},c=Object.defineProperty&&Object.getOwnPropertyDescriptor;for(var l in n)if("default"!==l&&Object.prototype.hasOwnProperty.call(n,l)){var p=c?Object.getOwnPropertyDescriptor(n,l):null;p&&(p.get||p.set)?Object.defineProperty(u,l,p):u[l]=n[l]}u.default=n,f&&f.set(n,u);return u})(r(d[0])).getEnforcing('Vibration');e.default=n},392,[21]); +__d(function(g,r,i,a,m,_e,d){'use strict';function t(){if("undefined"==typeof Reflect||!Reflect.construct)return!1;if(Reflect.construct.sham)return!1;if("function"==typeof Proxy)return!0;try{return Boolean.prototype.valueOf.call(Reflect.construct(Boolean,[],function(){})),!0}catch(t){return!1}}var n;n=(function(n){r(d[3])(o,n);var e,u,c=(e=o,u=t(),function(){var t,n=r(d[0])(e);if(u){var c=r(d[0])(this).constructor;t=Reflect.construct(n,arguments,c)}else t=n.apply(this,arguments);return r(d[1])(this,t)});function o(){return r(d[4])(this,o),c.apply(this,arguments)}return r(d[5])(o,[{key:"render",value:function(){return null}}],[{key:"ignoreWarnings",value:function(t){}},{key:"install",value:function(){}},{key:"uninstall",value:function(){}}]),o})(r(d[2]).Component),m.exports=n},393,[15,12,129,10,7,8]); +__d(function(g,r,i,a,m,e,d){Object.defineProperty(e,"__esModule",{value:!0}),e.DynamicColorIOS=void 0;e.DynamicColorIOS=function(o){throw new Error('DynamicColorIOS is not available on this platform.')}},394,[]); +__d(function(g,r,i,a,m,e,d){'use strict';var n=r(d[0]).shape({x:r(d[0]).number,y:r(d[0]).number});m.exports=n},395,[191]); +__d(function(g,r,i,a,m,_e,d){Object.defineProperty(_e,"__esModule",{value:!0}),_e.default=void 0;var e=r(d[0])(r(d[1])),t=r(d[0])(r(d[2])),n=r(d[0])(r(d[3])),u=r(d[0])(r(d[4])),s=r(d[0])(r(d[5])),o=r(d[0])(r(d[6])),c=(function(e,t){if(!t&&e&&e.__esModule)return e;if(null===e||"object"!=typeof e&&"function"!=typeof e)return{default:e};var n=p(t);if(n&&n.has(e))return n.get(e);var u={},s=Object.defineProperty&&Object.getOwnPropertyDescriptor;for(var o in e)if("default"!==o&&Object.prototype.hasOwnProperty.call(e,o)){var c=s?Object.getOwnPropertyDescriptor(e,o):null;c&&(c.get||c.set)?Object.defineProperty(u,o,c):u[o]=e[o]}u.default=e,n&&n.set(e,u);return u})(r(d[7])),l=r(d[8]),f=r(d[0])(r(d[9]));function p(e){if("function"!=typeof WeakMap)return null;var t=new WeakMap,n=new WeakMap;return(p=function(e){return e?n:t})(e)}function h(){if("undefined"==typeof Reflect||!Reflect.construct)return!1;if(Reflect.construct.sham)return!1;if("function"==typeof Proxy)return!0;try{return Boolean.prototype.valueOf.call(Reflect.construct(Boolean,[],function(){})),!0}catch(e){return!1}}var y=(function(p){(0,u.default)(w,p);var y,v,b=(y=w,v=h(),function(){var e,t=(0,o.default)(y);if(v){var n=(0,o.default)(this).constructor;e=Reflect.construct(t,arguments,n)}else e=t.apply(this,arguments);return(0,s.default)(this,e)});function w(n){var u;return(0,t.default)(this,w),(u=b.call(this,n)).infer=function(){var t,n,s,o,c,l,p,h,y,v,b,w;return e.default.async(function(P){for(;;)switch(P.prev=P.next){case 0:return P.prev=0,t={},P.next=4,e.default.awrap(f.default.preprocess(u.state.imagePath));case 4:for(o in n=P.sent,s={},n)Object.hasOwnProperty.call(n,o)&&(c=r(d[10]).Buffer.from(n[o].data,'base64'),l=new Float32Array(c.buffer,c.byteOffset,c.length/Float32Array.BYTES_PER_ELEMENT),s[o]=new(r(d[11]).Tensor)(n[o].type,l,n[o].dims));return P.next=9,e.default.awrap(u.state.session.run(s,u.state.session.outputNames,t));case 9:for(y in p=P.sent,h={},p)Object.hasOwnProperty.call(p,y)&&(v=p[y].data.buffer,b={data:r(d[10]).Buffer.from(v,0,v.byteLength).toString('base64')},h[y]=b);return P.next=14,e.default.awrap(f.default.postprocess(h));case 14:w=P.sent,u.setState({output:w.result}),P.next=21;break;case 18:P.prev=18,P.t0=P.catch(0),console.log(P.t0.message);case 21:case"end":return P.stop()}},null,null,[[0,18]],Promise)},u.state={session:null,output:null,imagePath:null},u}return(0,n.default)(w,[{key:"componentDidMount",value:function(){var t,n,u;return e.default.async(function(s){for(;;)switch(s.prev=s.next){case 0:if(this.state.session){s.next=19;break}return s.prev=1,s.next=4,e.default.awrap(f.default.getImagePath());case 4:return t=s.sent,this.setState({imagePath:t}),s.next=8,e.default.awrap(f.default.getLocalModelPath());case 8:return n=s.sent,s.next=11,e.default.awrap(r(d[11]).InferenceSession.create(n));case 11:u=s.sent,this.setState({session:u}),this.infer(),s.next=19;break;case 16:s.prev=16,s.t0=s.catch(1),console.log(s.t0.message);case 19:case"end":return s.stop()}},null,this,[[1,16]],Promise)}},{key:"render",value:function(){var e=this.state,t=e.output,n=e.imagePath;return c.createElement(l.View,null,c.createElement(l.Text,null,'\n'),n&&c.createElement(l.Image,{source:{uri:n},style:{height:200,width:200,resizeMode:'stretch'}}),t&&c.createElement(l.TextInput,{accessibilityLabel:"output"},"Result: ",t))}}]),w})(c.PureComponent);_e.default=y},396,[3,75,7,8,10,12,15,129,1,397,398,400]); +__d(function(g,r,i,a,m,e,d){Object.defineProperty(e,"__esModule",{value:!0}),e.default=void 0;var t=r(d[0]).NativeModules.MNISTDataHandler;e.default=t},397,[1]); +__d(function(g,r,_i,_a,_m,_e,d){'use strict';function t(){if("undefined"==typeof Reflect||!Reflect.construct)return!1;if(Reflect.construct.sham)return!1;if("function"==typeof Proxy)return!0;try{return Boolean.prototype.valueOf.call(Reflect.construct(Boolean,[],function(){})),!0}catch(t){return!1}}var e='function'==typeof Symbol&&'function'==typeof Symbol.for?Symbol.for('nodejs.util.inspect.custom'):null;_e.Buffer=o,_e.SlowBuffer=function(t){+t!=t&&(t=0);return o.alloc(+t)},_e.INSPECT_MAX_BYTES=50;var n=2147483647;function i(t){if(t>n)throw new RangeError('The value "'+t+'" is invalid for option "size"');var e=new Uint8Array(t);return Object.setPrototypeOf(e,o.prototype),e}function o(t,e,n){if('number'==typeof t){if('string'==typeof e)throw new TypeError('The "string" argument must be of type string. Received type number');return h(t)}return f(t,e,n)}function f(t,e,n){if('string'==typeof t)return a(t,e);if(ArrayBuffer.isView(t))return c(t);if(null==t)throw new TypeError("The first argument must be one of type string, Buffer, ArrayBuffer, Array, or Array-like Object. Received type "+typeof t);if(nt(t,ArrayBuffer)||t&&nt(t.buffer,ArrayBuffer))return l(t,e,n);if('undefined'!=typeof SharedArrayBuffer&&(nt(t,SharedArrayBuffer)||t&&nt(t.buffer,SharedArrayBuffer)))return l(t,e,n);if('number'==typeof t)throw new TypeError('The "value" argument must not be of type number. Received type number');var i=t.valueOf&&t.valueOf();if(null!=i&&i!==t)return o.from(i,e,n);var f=y(t);if(f)return f;if('undefined'!=typeof Symbol&&null!=Symbol.toPrimitive&&'function'==typeof t[Symbol.toPrimitive])return o.from(t[Symbol.toPrimitive]('string'),e,n);throw new TypeError("The first argument must be one of type string, Buffer, ArrayBuffer, Array, or Array-like Object. Received type "+typeof t)}function u(t){if('number'!=typeof t)throw new TypeError('"size" argument must be of type number');if(t<0)throw new RangeError('The value "'+t+'" is invalid for option "size"')}function s(t,e,n){return u(t),t<=0?i(t):void 0!==e?'string'==typeof n?i(t).fill(e,n):i(t).fill(e):i(t)}function h(t){return u(t),i(t<0?0:0|w(t))}function a(t,e){if('string'==typeof e&&''!==e||(e='utf8'),!o.isEncoding(e))throw new TypeError('Unknown encoding: '+e);var n=0|v(t,e),f=i(n),u=f.write(t,e);return u!==n&&(f=f.slice(0,u)),f}function p(t){for(var e=t.length<0?0:0|w(t.length),n=i(e),o=0;o=n)throw new RangeError("Attempt to allocate Buffer larger than maximum size: 0x"+n.toString(16)+' bytes');return 0|t}function v(t,e){if(o.isBuffer(t))return t.length;if(ArrayBuffer.isView(t)||nt(t,ArrayBuffer))return t.byteLength;if('string'!=typeof t)throw new TypeError("The \"string\" argument must be one of type string, Buffer, or ArrayBuffer. Received type "+typeof t);var n=t.length,i=arguments.length>2&&!0===arguments[2];if(!i&&0===n)return 0;for(var f=!1;;)switch(e){case'ascii':case'latin1':case'binary':return n;case'utf8':case'utf-8':return K(t).length;case'ucs2':case'ucs-2':case'utf16le':case'utf-16le':return 2*n;case'hex':return n>>>1;case'base64':return rt(t).length;default:if(f)return i?-1:K(t).length;e=(''+e).toLowerCase(),f=!0}}function b(t,e,n){var i=!1;if((void 0===e||e<0)&&(e=0),e>this.length)return'';if((void 0===n||n>this.length)&&(n=this.length),n<=0)return'';if((n>>>=0)<=(e>>>=0))return'';for(t||(t='utf8');;)switch(t){case'hex':return P(this,e,n);case'utf8':case'utf-8':return L(this,e,n);case'ascii':return S(this,e,n);case'latin1':case'binary':return x(this,e,n);case'base64':return O(this,e,n);case'ucs2':case'ucs-2':case'utf16le':case'utf-16le':return k(this,e,n);default:if(i)throw new TypeError('Unknown encoding: '+t);t=(t+'').toLowerCase(),i=!0}}function B(t,e,n){var i=t[e];t[e]=t[n],t[n]=i}function E(t,e,n,i,f){if(0===t.length)return-1;if('string'==typeof n?(i=n,n=0):n>2147483647?n=2147483647:n<-2147483648&&(n=-2147483648),it(n=+n)&&(n=f?0:t.length-1),n<0&&(n=t.length+n),n>=t.length){if(f)return-1;n=t.length-1}else if(n<0){if(!f)return-1;n=0}if('string'==typeof e&&(e=o.from(e,i)),o.isBuffer(e))return 0===e.length?-1:m(t,e,n,i,f);if('number'==typeof e)return e&=255,'function'==typeof Uint8Array.prototype.indexOf?f?Uint8Array.prototype.indexOf.call(t,e,n):Uint8Array.prototype.lastIndexOf.call(t,e,n):m(t,[e],n,i,f);throw new TypeError('val must be string, number or Buffer')}function m(t,e,n,i,o){var f,u=1,s=t.length,h=e.length;if(void 0!==i&&('ucs2'===(i=String(i).toLowerCase())||'ucs-2'===i||'utf16le'===i||'utf-16le'===i)){if(t.length<2||e.length<2)return-1;u=2,s/=2,h/=2,n/=2}function a(t,e){return 1===u?t[e]:t.readUInt16BE(e*u)}if(o){var p=-1;for(f=n;fs&&(n=s-h),f=n;f>=0;f--){for(var c=!0,l=0;lo&&(i=o):i=o;var f,u=e.length;for(i>u/2&&(i=u/2),f=0;f239?4:f>223?3:f>191?2:1;if(o+s<=n){var h=void 0,a=void 0,p=void 0,c=void 0;switch(s){case 1:f<128&&(u=f);break;case 2:128==(192&(h=t[o+1]))&&(c=(31&f)<<6|63&h)>127&&(u=c);break;case 3:h=t[o+1],a=t[o+2],128==(192&h)&&128==(192&a)&&(c=(15&f)<<12|(63&h)<<6|63&a)>2047&&(c<55296||c>57343)&&(u=c);break;case 4:h=t[o+1],a=t[o+2],p=t[o+3],128==(192&h)&&128==(192&a)&&128==(192&p)&&(c=(15&f)<<18|(63&h)<<12|(63&a)<<6|63&p)>65535&&c<1114112&&(u=c)}}null===u?(u=65533,s=1):u>65535&&(u-=65536,i.push(u>>>10&1023|55296),u=56320|1023&u),i.push(u),o+=s}return _(i)}_e.kMaxLength=n,o.TYPED_ARRAY_SUPPORT=(function(){try{var t=new Uint8Array(1),e={foo:function(){return 42}};return Object.setPrototypeOf(e,Uint8Array.prototype),Object.setPrototypeOf(t,e),42===t.foo()}catch(t){return!1}})(),o.TYPED_ARRAY_SUPPORT||'undefined'==typeof console||'function'!=typeof console.error||console.error("This browser lacks typed array (Uint8Array) support which is required by `buffer` v5.x. Use `buffer` v4.x if you require old browser support."),Object.defineProperty(o.prototype,'parent',{enumerable:!0,get:function(){if(o.isBuffer(this))return this.buffer}}),Object.defineProperty(o.prototype,'offset',{enumerable:!0,get:function(){if(o.isBuffer(this))return this.byteOffset}}),o.poolSize=8192,o.from=function(t,e,n){return f(t,e,n)},Object.setPrototypeOf(o.prototype,Uint8Array.prototype),Object.setPrototypeOf(o,Uint8Array),o.alloc=function(t,e,n){return s(t,e,n)},o.allocUnsafe=function(t){return h(t)},o.allocUnsafeSlow=function(t){return h(t)},o.isBuffer=function(t){return null!=t&&!0===t._isBuffer&&t!==o.prototype},o.compare=function(t,e){if(nt(t,Uint8Array)&&(t=o.from(t,t.offset,t.byteLength)),nt(e,Uint8Array)&&(e=o.from(e,e.offset,e.byteLength)),!o.isBuffer(t)||!o.isBuffer(e))throw new TypeError('The "buf1", "buf2" arguments must be one of type Buffer or Uint8Array');if(t===e)return 0;for(var n=t.length,i=e.length,f=0,u=Math.min(n,i);fi.length?(o.isBuffer(u)||(u=o.from(u)),u.copy(i,f)):Uint8Array.prototype.set.call(i,u,f);else{if(!o.isBuffer(u))throw new TypeError('"list" argument must be an Array of Buffers');u.copy(i,f)}f+=u.length}return i},o.byteLength=v,o.prototype._isBuffer=!0,o.prototype.swap16=function(){var t=this.length;if(t%2!=0)throw new RangeError('Buffer size must be a multiple of 16-bits');for(var e=0;ee&&(t+=' ... '),''},e&&(o.prototype[e]=o.prototype.inspect),o.prototype.compare=function(t,e,n,i,f){if(nt(t,Uint8Array)&&(t=o.from(t,t.offset,t.byteLength)),!o.isBuffer(t))throw new TypeError("The \"target\" argument must be one of type Buffer or Uint8Array. Received type "+typeof t);if(void 0===e&&(e=0),void 0===n&&(n=t?t.length:0),void 0===i&&(i=0),void 0===f&&(f=this.length),e<0||n>t.length||i<0||f>this.length)throw new RangeError('out of range index');if(i>=f&&e>=n)return 0;if(i>=f)return-1;if(e>=n)return 1;if(e>>>=0,n>>>=0,i>>>=0,f>>>=0,this===t)return 0;for(var u=f-i,s=n-e,h=Math.min(u,s),a=this.slice(i,f),p=t.slice(e,n),c=0;c>>=0,isFinite(n)?(n>>>=0,void 0===i&&(i='utf8')):(i=n,n=void 0)}var o=this.length-e;if((void 0===n||n>o)&&(n=o),t.length>0&&(n<0||e<0)||e>this.length)throw new RangeError('Attempt to write outside buffer bounds');i||(i='utf8');for(var f=!1;;)switch(i){case'hex':return I(this,t,e,n);case'utf8':case'utf-8':return U(this,t,e,n);case'ascii':case'latin1':case'binary':return A(this,t,e,n);case'base64':return R(this,t,e,n);case'ucs2':case'ucs-2':case'utf16le':case'utf-16le':return T(this,t,e,n);default:if(f)throw new TypeError('Unknown encoding: '+i);i=(''+i).toLowerCase(),f=!0}},o.prototype.toJSON=function(){return{type:'Buffer',data:Array.prototype.slice.call(this._arr||this,0)}};var M=4096;function _(t){var e=t.length;if(e<=M)return String.fromCharCode.apply(String,t);for(var n='',i=0;ii)&&(n=i);for(var o='',f=e;fn)throw new RangeError('Trying to access beyond buffer length')}function N(t,e,n,i,f,u){if(!o.isBuffer(t))throw new TypeError('"buffer" argument must be a Buffer instance');if(e>f||et.length)throw new RangeError('Index out of range')}function F(t,e,n,i,o){X(e,i,o,t,n,7);var f=Number(e&BigInt(4294967295));t[n++]=f,f>>=8,t[n++]=f,f>>=8,t[n++]=f,f>>=8,t[n++]=f;var u=Number(e>>BigInt(32)&BigInt(4294967295));return t[n++]=u,u>>=8,t[n++]=u,u>>=8,t[n++]=u,u>>=8,t[n++]=u,n}function j(t,e,n,i,o){X(e,i,o,t,n,7);var f=Number(e&BigInt(4294967295));t[n+7]=f,f>>=8,t[n+6]=f,f>>=8,t[n+5]=f,f>>=8,t[n+4]=f;var u=Number(e>>BigInt(32)&BigInt(4294967295));return t[n+3]=u,u>>=8,t[n+2]=u,u>>=8,t[n+1]=u,u>>=8,t[n]=u,n+8}function D(t,e,n,i,o,f){if(n+i>t.length)throw new RangeError('Index out of range');if(n<0)throw new RangeError('Index out of range')}function z(t,e,n,i,o){return e=+e,n>>>=0,o||D(t,0,n,4),r(d[3]).write(t,e,n,i,23,4),n+4}function Y(t,e,n,i,o){return e=+e,n>>>=0,o||D(t,0,n,8),r(d[3]).write(t,e,n,i,52,8),n+8}o.prototype.slice=function(t,e){var n=this.length;t=~~t,e=void 0===e?n:~~e,t<0?(t+=n)<0&&(t=0):t>n&&(t=n),e<0?(e+=n)<0&&(e=0):e>n&&(e=n),e>>=0,e>>>=0,n||C(t,e,this.length);for(var i=this[t],o=1,f=0;++f>>=0,e>>>=0,n||C(t,e,this.length);for(var i=this[t+--e],o=1;e>0&&(o*=256);)i+=this[t+--e]*o;return i},o.prototype.readUint8=o.prototype.readUInt8=function(t,e){return t>>>=0,e||C(t,1,this.length),this[t]},o.prototype.readUint16LE=o.prototype.readUInt16LE=function(t,e){return t>>>=0,e||C(t,2,this.length),this[t]|this[t+1]<<8},o.prototype.readUint16BE=o.prototype.readUInt16BE=function(t,e){return t>>>=0,e||C(t,2,this.length),this[t]<<8|this[t+1]},o.prototype.readUint32LE=o.prototype.readUInt32LE=function(t,e){return t>>>=0,e||C(t,4,this.length),(this[t]|this[t+1]<<8|this[t+2]<<16)+16777216*this[t+3]},o.prototype.readUint32BE=o.prototype.readUInt32BE=function(t,e){return t>>>=0,e||C(t,4,this.length),16777216*this[t]+(this[t+1]<<16|this[t+2]<<8|this[t+3])},o.prototype.readBigUInt64LE=ft(function(t){J(t>>>=0,'offset');var e=this[t],n=this[t+7];void 0!==e&&void 0!==n||Z(t,this.length-8);var i=e+this[++t]*Math.pow(2,8)+this[++t]*Math.pow(2,16)+this[++t]*Math.pow(2,24),o=this[++t]+this[++t]*Math.pow(2,8)+this[++t]*Math.pow(2,16)+n*Math.pow(2,24);return BigInt(i)+(BigInt(o)<>>=0,'offset');var e=this[t],n=this[t+7];void 0!==e&&void 0!==n||Z(t,this.length-8);var i=e*Math.pow(2,24)+this[++t]*Math.pow(2,16)+this[++t]*Math.pow(2,8)+this[++t],o=this[++t]*Math.pow(2,24)+this[++t]*Math.pow(2,16)+this[++t]*Math.pow(2,8)+n;return(BigInt(i)<>>=0,e>>>=0,n||C(t,e,this.length);for(var i=this[t],o=1,f=0;++f=(o*=128)&&(i-=Math.pow(2,8*e)),i},o.prototype.readIntBE=function(t,e,n){t>>>=0,e>>>=0,n||C(t,e,this.length);for(var i=e,o=1,f=this[t+--i];i>0&&(o*=256);)f+=this[t+--i]*o;return f>=(o*=128)&&(f-=Math.pow(2,8*e)),f},o.prototype.readInt8=function(t,e){return t>>>=0,e||C(t,1,this.length),128&this[t]?-1*(255-this[t]+1):this[t]},o.prototype.readInt16LE=function(t,e){t>>>=0,e||C(t,2,this.length);var n=this[t]|this[t+1]<<8;return 32768&n?4294901760|n:n},o.prototype.readInt16BE=function(t,e){t>>>=0,e||C(t,2,this.length);var n=this[t+1]|this[t]<<8;return 32768&n?4294901760|n:n},o.prototype.readInt32LE=function(t,e){return t>>>=0,e||C(t,4,this.length),this[t]|this[t+1]<<8|this[t+2]<<16|this[t+3]<<24},o.prototype.readInt32BE=function(t,e){return t>>>=0,e||C(t,4,this.length),this[t]<<24|this[t+1]<<16|this[t+2]<<8|this[t+3]},o.prototype.readBigInt64LE=ft(function(t){J(t>>>=0,'offset');var e=this[t],n=this[t+7];void 0!==e&&void 0!==n||Z(t,this.length-8);var i=this[t+4]+this[t+5]*Math.pow(2,8)+this[t+6]*Math.pow(2,16)+(n<<24);return(BigInt(i)<>>=0,'offset');var e=this[t],n=this[t+7];void 0!==e&&void 0!==n||Z(t,this.length-8);var i=(e<<24)+this[++t]*Math.pow(2,16)+this[++t]*Math.pow(2,8)+this[++t];return(BigInt(i)<>>=0,e||C(t,4,this.length),r(d[3]).read(this,t,!0,23,4)},o.prototype.readFloatBE=function(t,e){return t>>>=0,e||C(t,4,this.length),r(d[3]).read(this,t,!1,23,4)},o.prototype.readDoubleLE=function(t,e){return t>>>=0,e||C(t,8,this.length),r(d[3]).read(this,t,!0,52,8)},o.prototype.readDoubleBE=function(t,e){return t>>>=0,e||C(t,8,this.length),r(d[3]).read(this,t,!1,52,8)},o.prototype.writeUintLE=o.prototype.writeUIntLE=function(t,e,n,i){(t=+t,e>>>=0,n>>>=0,i)||N(this,t,e,n,Math.pow(2,8*n)-1,0);var o=1,f=0;for(this[e]=255&t;++f>>=0,n>>>=0,i)||N(this,t,e,n,Math.pow(2,8*n)-1,0);var o=n-1,f=1;for(this[e+o]=255&t;--o>=0&&(f*=256);)this[e+o]=t/f&255;return e+n},o.prototype.writeUint8=o.prototype.writeUInt8=function(t,e,n){return t=+t,e>>>=0,n||N(this,t,e,1,255,0),this[e]=255&t,e+1},o.prototype.writeUint16LE=o.prototype.writeUInt16LE=function(t,e,n){return t=+t,e>>>=0,n||N(this,t,e,2,65535,0),this[e]=255&t,this[e+1]=t>>>8,e+2},o.prototype.writeUint16BE=o.prototype.writeUInt16BE=function(t,e,n){return t=+t,e>>>=0,n||N(this,t,e,2,65535,0),this[e]=t>>>8,this[e+1]=255&t,e+2},o.prototype.writeUint32LE=o.prototype.writeUInt32LE=function(t,e,n){return t=+t,e>>>=0,n||N(this,t,e,4,4294967295,0),this[e+3]=t>>>24,this[e+2]=t>>>16,this[e+1]=t>>>8,this[e]=255&t,e+4},o.prototype.writeUint32BE=o.prototype.writeUInt32BE=function(t,e,n){return t=+t,e>>>=0,n||N(this,t,e,4,4294967295,0),this[e]=t>>>24,this[e+1]=t>>>16,this[e+2]=t>>>8,this[e+3]=255&t,e+4},o.prototype.writeBigUInt64LE=ft(function(t){return F(this,t,arguments.length>1&&void 0!==arguments[1]?arguments[1]:0,BigInt(0),BigInt('0xffffffffffffffff'))}),o.prototype.writeBigUInt64BE=ft(function(t){return j(this,t,arguments.length>1&&void 0!==arguments[1]?arguments[1]:0,BigInt(0),BigInt('0xffffffffffffffff'))}),o.prototype.writeIntLE=function(t,e,n,i){if(t=+t,e>>>=0,!i){var o=Math.pow(2,8*n-1);N(this,t,e,n,o-1,-o)}var f=0,u=1,s=0;for(this[e]=255&t;++f>0)-s&255;return e+n},o.prototype.writeIntBE=function(t,e,n,i){if(t=+t,e>>>=0,!i){var o=Math.pow(2,8*n-1);N(this,t,e,n,o-1,-o)}var f=n-1,u=1,s=0;for(this[e+f]=255&t;--f>=0&&(u*=256);)t<0&&0===s&&0!==this[e+f+1]&&(s=1),this[e+f]=(t/u>>0)-s&255;return e+n},o.prototype.writeInt8=function(t,e,n){return t=+t,e>>>=0,n||N(this,t,e,1,127,-128),t<0&&(t=255+t+1),this[e]=255&t,e+1},o.prototype.writeInt16LE=function(t,e,n){return t=+t,e>>>=0,n||N(this,t,e,2,32767,-32768),this[e]=255&t,this[e+1]=t>>>8,e+2},o.prototype.writeInt16BE=function(t,e,n){return t=+t,e>>>=0,n||N(this,t,e,2,32767,-32768),this[e]=t>>>8,this[e+1]=255&t,e+2},o.prototype.writeInt32LE=function(t,e,n){return t=+t,e>>>=0,n||N(this,t,e,4,2147483647,-2147483648),this[e]=255&t,this[e+1]=t>>>8,this[e+2]=t>>>16,this[e+3]=t>>>24,e+4},o.prototype.writeInt32BE=function(t,e,n){return t=+t,e>>>=0,n||N(this,t,e,4,2147483647,-2147483648),t<0&&(t=4294967295+t+1),this[e]=t>>>24,this[e+1]=t>>>16,this[e+2]=t>>>8,this[e+3]=255&t,e+4},o.prototype.writeBigInt64LE=ft(function(t){return F(this,t,arguments.length>1&&void 0!==arguments[1]?arguments[1]:0,-BigInt('0x8000000000000000'),BigInt('0x7fffffffffffffff'))}),o.prototype.writeBigInt64BE=ft(function(t){return j(this,t,arguments.length>1&&void 0!==arguments[1]?arguments[1]:0,-BigInt('0x8000000000000000'),BigInt('0x7fffffffffffffff'))}),o.prototype.writeFloatLE=function(t,e,n){return z(this,t,e,!0,n)},o.prototype.writeFloatBE=function(t,e,n){return z(this,t,e,!1,n)},o.prototype.writeDoubleLE=function(t,e,n){return Y(this,t,e,!0,n)},o.prototype.writeDoubleBE=function(t,e,n){return Y(this,t,e,!1,n)},o.prototype.copy=function(t,e,n,i){if(!o.isBuffer(t))throw new TypeError('argument should be a Buffer');if(n||(n=0),i||0===i||(i=this.length),e>=t.length&&(e=t.length),e||(e=0),i>0&&i=this.length)throw new RangeError('Index out of range');if(i<0)throw new RangeError('sourceEnd out of bounds');i>this.length&&(i=this.length),t.length-e>>=0,n=void 0===n?this.length:n>>>0,t||(t=0),'number'==typeof t)for(u=e;u=i+4;n-=3)e="_"+t.slice(n-3,n)+e;return""+t.slice(0,n)+e}function W(t,e,n){J(e,'offset'),void 0!==t[e]&&void 0!==t[e+n]||Z(e,t.length-(n+1))}function X(t,e,n,i,o,f){if(t>n||t3?0===e||e===BigInt(0)?">= 0"+s+" and < 2"+s+" ** "+8*(f+1)+s:">= -(2"+s+" ** "+(8*(f+1)-1)+s+") and < 2 ** "+(8*(f+1)-1)+s:">= "+e+s+" and <= "+n+s,new G.ERR_OUT_OF_RANGE('value',u,t)}W(i,o,f)}function J(t,e){if('number'!=typeof t)throw new G.ERR_INVALID_ARG_TYPE(e,'number',t)}function Z(t,e,n){if(Math.floor(t)!==t)throw J(t,n),new G.ERR_OUT_OF_RANGE(n||'offset','an integer',t);if(e<0)throw new G.ERR_BUFFER_OUT_OF_BOUNDS;throw new G.ERR_OUT_OF_RANGE(n||'offset',">= "+(n?1:0)+" and <= "+e,t)}V('ERR_BUFFER_OUT_OF_BOUNDS',function(t){return t?t+" is outside of buffer bounds":'Attempt to access memory outside buffer bounds'},RangeError),V('ERR_INVALID_ARG_TYPE',function(t,e){return"The \""+t+"\" argument must be of type number. Received type "+typeof e},TypeError),V('ERR_OUT_OF_RANGE',function(t,e,n){var i="The value of \""+t+"\" is out of range.",o=n;return Number.isInteger(n)&&Math.abs(n)>Math.pow(2,32)?o=q(String(n)):'bigint'==typeof n&&(o=String(n),(n>Math.pow(BigInt(2),BigInt(32))||n<-Math.pow(BigInt(2),BigInt(32)))&&(o=q(o)),o+='n'),i+=" It must be "+e+". Received "+o},RangeError);var $=/[^+/0-9A-Za-z-_]/g;function H(t){if((t=(t=t.split('=')[0]).trim().replace($,'')).length<2)return'';for(;t.length%4!=0;)t+='=';return t}function K(t,e){var n;e=e||1/0;for(var i=t.length,o=null,f=[],u=0;u55295&&n<57344){if(!o){if(n>56319){(e-=3)>-1&&f.push(239,191,189);continue}if(u+1===i){(e-=3)>-1&&f.push(239,191,189);continue}o=n;continue}if(n<56320){(e-=3)>-1&&f.push(239,191,189),o=n;continue}n=65536+(o-55296<<10|n-56320)}else o&&(e-=3)>-1&&f.push(239,191,189);if(o=null,n<128){if((e-=1)<0)break;f.push(n)}else if(n<2048){if((e-=2)<0)break;f.push(n>>6|192,63&n|128)}else if(n<65536){if((e-=3)<0)break;f.push(n>>12|224,n>>6&63|128,63&n|128)}else{if(!(n<1114112))throw new Error('Invalid code point');if((e-=4)<0)break;f.push(n>>18|240,n>>12&63|128,n>>6&63|128,63&n|128)}}return f}function Q(t){for(var e=[],n=0;n>8,o=n%256,f.push(o),f.push(i);return f}function rt(t){return r(d[2]).toByteArray(H(t))}function et(t,e,n,i){var o;for(o=0;o=e.length||o>=t.length);++o)e[o+n]=t[o];return o}function nt(t,e){return t instanceof e||null!=t&&null!=t.constructor&&null!=t.constructor.name&&t.constructor.name===e.name}function it(t){return t!=t}var ot=(function(){for(var t=new Array(256),e=0;e<16;++e)for(var n=16*e,i=0;i<16;++i)t[n+i]="0123456789abcdef"[e]+"0123456789abcdef"[i];return t})();function ft(t){return'undefined'==typeof BigInt?ut:t}function ut(){throw new Error('BigInt not supported')}},398,[15,12,93,399,10,7,14,8]); +__d(function(g,r,_i,a,_m,_e,_d){_e.read=function(o,t,h,M,f){var w,p,i=8*f-M-1,n=(1<>1,e=-7,u=h?f-1:0,s=h?-1:1,c=o[t+u];for(u+=s,w=c&(1<<-e)-1,c>>=-e,e+=i;e>0;w=256*w+o[t+u],u+=s,e-=8);for(p=w&(1<<-e)-1,w>>=-e,e+=M;e>0;p=256*p+o[t+u],u+=s,e-=8);if(0===w)w=1-N;else{if(w===n)return p?NaN:1/0*(c?-1:1);p+=Math.pow(2,M),w-=N}return(c?-1:1)*p*Math.pow(2,w-M)},_e.write=function(o,t,h,M,f,w){var p,i,n,N=8*w-f-1,e=(1<>1,s=23===f?Math.pow(2,-24)-Math.pow(2,-77):0,c=M?0:w-1,l=M?1:-1,d=t<0||0===t&&1/t<0?1:0;for(t=Math.abs(t),isNaN(t)||t===1/0?(i=isNaN(t)?1:0,p=e):(p=Math.floor(Math.log(t)/Math.LN2),t*(n=Math.pow(2,-p))<1&&(p--,n*=2),(t+=p+u>=1?s/n:s*Math.pow(2,1-u))*n>=2&&(p++,n/=2),p+u>=e?(i=0,p=e):p+u>=1?(i=(t*n-1)*Math.pow(2,f),p+=u):(i=t*Math.pow(2,u-1)*Math.pow(2,f),p=0));f>=8;o[h+c]=255&i,c+=l,i/=256,f-=8);for(p=p<0;o[h+c]=255&p,c+=l,p/=256,N-=8);o[h+c-l]|=128*d}},399,[]); +__d(function(g,r,i,a,m,e,d){Object.defineProperty(e,"__esModule",{value:!0}),Object.keys(r(d[0])).forEach(function(n){"default"!==n&&"__esModule"!==n&&(n in e&&e[n]===r(d[0])[n]||Object.defineProperty(e,n,{enumerable:!0,get:function(){return r(d[0])[n]}}))}),(0,r(d[0]).registerBackend)('cpu',r(d[1]).onnxruntimeBackend,1)},400,[401,406]); +__d(function(g,_r5,_i2,_a2,m,_e6,_d){function e(e,t){var n="undefined"!=typeof Symbol&&e[Symbol.iterator]||e["@@iterator"];if(n)return(n=n.call(e)).next.bind(n);if(Array.isArray(e)||(n=r(e))||t&&e&&"number"==typeof e.length){n&&(e=n);var a=0;return function(){return a>=e.length?{done:!0}:{done:!1,value:e[a++]}}}throw new TypeError("Invalid attempt to iterate non-iterable instance.\nIn order to be iterable, non-array objects must have a [Symbol.iterator]() method.")}function r(e,r){if(e){if("string"==typeof e)return t(e,r);var n=Object.prototype.toString.call(e).slice(8,-1);return"Object"===n&&e.constructor&&(n=e.constructor.name),"Map"===n||"Set"===n?Array.from(e):"Arguments"===n||/^(?:Ui|I)nt(?:8|16|32)(?:Clamped)?Array$/.test(n)?t(e,r):void 0}}function t(e,r){(null==r||r>e.length)&&(r=e.length);for(var t=0,n=new Array(r);t=0){for(var o=0;o=c.byteLength)){h.next=32;break}throw new RangeError("'byteOffset' is out of range [0, "+c.byteLength+").");case 32:if(y=t.byteLength-b,"number"!=typeof o){h.next=45;break}if(y=o,Number.isSafeInteger(y)){h.next=35;break}throw new RangeError("'byteLength' must be an integer.");case 35:if(!(y<=0||b+y>c.byteLength)){h.next=37;break}throw new RangeError("'byteLength' is out of range (0, "+(c.byteLength-b)+"].");case 37:if("object"!=typeof s||null===s){h.next=41;break}f=s,h.next=43;break;case 41:if(void 0===s){h.next=43;break}throw new TypeError("'options' must be an object.");case 43:h.next=47;break;case 45:if(void 0===o){h.next=47;break}throw new TypeError("'byteLength' must be a number.");case 47:h.next=51;break;case 49:if(void 0===i){h.next=51;break}throw new TypeError("'options' must be an object.");case 51:u=new Uint8Array(c,b,y);case 52:return l=(f.executionProviders||[]).map(function(e){return"string"==typeof e?e:e.name}),h.next=55,_r5(_d[2]).awrap((function(r){var t,i,o,s,u,f;return _r5(_d[2]).async(function(c){for(;;)switch(c.prev=c.next){case 0:t=0===r.length?a:r,i=[],o=e(t);case 2:if((s=o()).done){c.next=28;break}if(u=s.value,!(f=n[u])){c.next=26;break}if(!f.initialized){c.next=8;break}return c.abrupt("return",f.backend);case 8:if(!f.initializing){c.next=10;break}throw new Error("backend \""+u+"\" is being initialized; cannot initialize multiple times.");case 10:if(!f.aborted){c.next=12;break}return c.abrupt("continue",26);case 12:return c.prev=12,f.initializing=!0,c.next=16,_r5(_d[2]).awrap(f.backend.init());case 16:return f.initialized=!0,c.abrupt("return",f.backend);case 20:c.prev=20,c.t0=c.catch(12),i.push({name:u,err:c.t0}),f.aborted=!0;case 23:return c.prev=23,f.initializing=!1,c.finish(23);case 26:c.next=2;break;case 28:throw new Error("no available backend found. ERR: "+i.map(function(e){return"["+e.name+"] "+e.err}).join(", "));case 29:case"end":return c.stop()}},null,null,[[12,20,23,26]],Promise)})(l));case 55:return p=h.sent,h.next=58,_r5(_d[2]).awrap(p.createSessionHandler(u,f));case 58:return d=h.sent,h.abrupt("return",new r(d));case 60:case"end":return h.stop()}},null,null,null,Promise)}}]),r})(),l=_e6;for(var p in t)l[p]=t[p];t.__esModule&&Object.defineProperty(l,"__esModule",{value:!0})})()},401,[402,403,404]); +__d(function(g,r,i,a,m,e,d){m.exports=function(o,n){if(!(o instanceof n))throw new TypeError("Cannot call a class as a function")},m.exports.__esModule=!0,m.exports.default=m.exports},402,[]); +__d(function(g,r,_i,a,m,e,d){function t(t,o){for(var n=0;n=0;--u){var h=this.tryEntries[u],f=h.completion;if("root"===h.tryLoc)return c("end");if(h.tryLoc<=this.prev){var l=i.call(h,"catchLoc"),s=i.call(h,"finallyLoc");if(l&&s){if(this.prev=0;--o){var c=this.tryEntries[o];if(c.tryLoc<=this.prev&&i.call(c,"finallyLoc")&&this.prev=0;--n){var o=this.tryEntries[n];if(o.finallyLoc===t)return this.complete(o.completion,o.afterLoc),I(o),x}},catch:function(t){for(var n=this.tryEntries.length-1;n>=0;--n){var o=this.tryEntries[n];if(o.tryLoc===t){var i=o.completion;if("throw"===i.type){var c=i.arg;I(o)}return c}}throw new Error("illegal catch attempt")},delegateYield:function(t,o,i){return this.delegate={iterator:A(t),resultName:o,nextLoc:i},"next"===this.method&&(this.arg=n),x}},t})("object"==typeof m?m.exports:{});try{regeneratorRuntime=t}catch(n){"object"==typeof globalThis?globalThis.regeneratorRuntime=t:Function("r","regeneratorRuntime = r")(t)}},405,[]); +__d(function(g,r,i,a,m,e,d){Object.defineProperty(e,"__esModule",{value:!0}),e.onnxruntimeBackend=void 0;var t=r(d[0])(r(d[1])),n=r(d[0])(r(d[2])),u=r(d[0])(r(d[3])),s=r(d[0])(r(d[4])),o=r(d[0])(r(d[5])),l=function(t){switch(t){case'float32':return Float32Array;case'int8':return Int8Array;case'int16':return Int16Array;case'int32':return Int32Array;case'bool':return Int8Array;case'float64':return Float64Array;case'int64':return BigInt64Array;default:throw new Error("unsupported type: "+t)}},c=(0,o.default)("inferenceSession"),f=(0,o.default)("key"),p=(function(){function o(t){(0,n.default)(this,o),Object.defineProperty(this,c,{writable:!0,value:void 0}),Object.defineProperty(this,f,{writable:!0,value:void 0}),(0,s.default)(this,c)[c]=r(d[6]).binding,(0,s.default)(this,f)[f]=t,this.inputNames=[],this.outputNames=[]}return(0,u.default)(o,[{key:"loadModel",value:function(n){var u;return t.default.async(function(o){for(;;)switch(o.prev=o.next){case 0:return o.prev=0,o.next=3,t.default.awrap((0,s.default)(this,c)[c].loadModel((0,s.default)(this,f)[f],n));case 3:if((u=o.sent).key===(0,s.default)(this,f)[f]){o.next=6;break}throw new Error('Session key is invalid');case 6:this.inputNames=u.inputNames,this.outputNames=u.outputNames,o.next=13;break;case 10:throw o.prev=10,o.t0=o.catch(0),new Error("Can't load a model: "+o.t0.message);case 13:case"end":return o.stop()}},null,this,[[0,10]],Promise)}},{key:"dispose",value:function(){return t.default.async(function(t){for(;;)switch(t.prev=t.next){case 0:return t.abrupt("return",Promise.resolve());case 1:case"end":return t.stop()}},null,null,null,Promise)}},{key:"startProfiling",value:function(){}},{key:"endProfiling",value:function(){}},{key:"run",value:function(n,u,o){var l,p,y,v,h;return t.default.async(function(w){for(;;)switch(w.prev=w.next){case 0:l=[],w.t0=t.default.keys(u);case 2:if((w.t1=w.t0()).done){w.next=10;break}if(p=w.t1.value,!Object.prototype.hasOwnProperty.call(u,p)){w.next=8;break}if(!u[p]){w.next=7;break}throw new Error('Preallocated output is not supported and only names as string array is allowed as parameter');case 7:l.push(p);case 8:w.next=2;break;case 10:return y=this.encodeFeedsType(n),w.next=13,t.default.awrap((0,s.default)(this,c)[c].run((0,s.default)(this,f)[f],y,l,o));case 13:return v=w.sent,h=this.decodeReturnType(v),w.abrupt("return",h);case 16:case"end":return w.stop()}},null,this,null,Promise)}},{key:"encodeFeedsType",value:function(t){var n={};for(var u in t)if(Object.hasOwnProperty.call(t,u)){var s=void 0;if(Array.isArray(t[u].data))s=t[u].data;else{var o=t[u].data.buffer;s=r(d[7]).Buffer.from(o,0,o.byteLength).toString('base64')}n[u]={dims:t[u].dims,type:t[u].type,data:s}}return n}},{key:"decodeReturnType",value:function(t){var n={};for(var u in t)if(Object.hasOwnProperty.call(t,u)){var s=void 0;if(Array.isArray(t[u].data))s=t[u].data;else{var o=r(d[7]).Buffer.from(t[u].data,'base64'),c=l(t[u].type);s=new c(o.buffer,o.byteOffset,o.length/c.BYTES_PER_ELEMENT)}n[u]=new(r(d[8]).Tensor)(t[u].type,s,t[u].dims)}return n}}]),o})(),y=new((function(){function s(){(0,n.default)(this,s)}return(0,u.default)(s,[{key:"init",value:function(){return t.default.async(function(t){for(;;)switch(t.prev=t.next){case 0:return t.abrupt("return",Promise.resolve());case 1:case"end":return t.stop()}},null,null,null,Promise)}},{key:"createSessionHandler",value:function(n,u){var s;return t.default.async(function(o){for(;;)switch(o.prev=o.next){case 0:if('string'==typeof n){o.next=2;break}throw new Error('Uint8Array is not supported');case 2:return s=new p(n),o.next=5,t.default.awrap(s.loadModel(u||{}));case 5:return o.abrupt("return",s);case 6:case"end":return o.stop()}},null,null,null,Promise)}}]),s})());e.onnxruntimeBackend=y},406,[407,404,402,403,408,409,410,801,401]); +__d(function(g,r,i,a,m,e,d){m.exports=function(t){return t&&t.__esModule?t:{default:t}},m.exports.__esModule=!0,m.exports.default=m.exports},407,[]); +__d(function(g,r,i,a,m,e,d){m.exports=function(t,o){if(!Object.prototype.hasOwnProperty.call(t,o))throw new TypeError("attempted to use private field on non-instance");return t},m.exports.__esModule=!0,m.exports.default=m.exports},408,[]); +__d(function(g,r,i,a,m,e,d){var t=0;m.exports=function(_){return"__private_"+t+++"_"+_},m.exports.__esModule=!0,m.exports.default=m.exports},409,[]); +__d(function(g,r,i,a,m,e,d){Object.defineProperty(e,"__esModule",{value:!0}),e.binding=void 0;var n=r(d[0]).NativeModules.Onnxruntime;e.binding=n},410,[411]); +__d(function(g,r,i,a,m,e,d){'use strict';m.exports={get AccessibilityInfo(){return r(d[0]).default},get ActivityIndicator(){return r(d[1])},get Button(){return r(d[2])},get DatePickerIOS(){return r(d[3])('DatePickerIOS-merged',"DatePickerIOS has been merged with DatePickerAndroid and will be removed in a future release. It can now be installed and imported from '@react-native-community/datetimepicker' instead of 'react-native'. See https://github.com/react-native-datetimepicker/datetimepicker"),r(d[4])},get DrawerLayoutAndroid(){return r(d[5])},get FlatList(){return r(d[6])},get Image(){return r(d[7])},get ImageBackground(){return r(d[8])},get InputAccessoryView(){return r(d[9])},get KeyboardAvoidingView(){return r(d[10]).default},get MaskedViewIOS(){return r(d[3])('maskedviewios-moved',"MaskedViewIOS has been extracted from react-native core and will be removed in a future release. It can now be installed and imported from '@react-native-masked-view/masked-view' instead of 'react-native'. See https://github.com/react-native-masked-view/masked-view"),r(d[11])},get Modal(){return r(d[12])},get Pressable(){return r(d[13]).default},get ProgressBarAndroid(){return r(d[3])('progress-bar-android-moved',"ProgressBarAndroid has been extracted from react-native core and will be removed in a future release. It can now be installed and imported from '@react-native-community/progress-bar-android' instead of 'react-native'. See https://github.com/react-native-progress-view/progress-bar-android"),r(d[14])},get ProgressViewIOS(){return r(d[3])('progress-view-ios-moved',"ProgressViewIOS has been extracted from react-native core and will be removed in a future release. It can now be installed and imported from '@react-native-community/progress-view' instead of 'react-native'. See https://github.com/react-native-progress-view/progress-view"),r(d[15])},get RefreshControl(){return r(d[16])},get SafeAreaView(){return r(d[17]).default},get ScrollView(){return r(d[18])},get SectionList(){return r(d[19]).default},get SegmentedControlIOS(){return r(d[3])('segmented-control-ios-moved',"SegmentedControlIOS has been extracted from react-native core and will be removed in a future release. It can now be installed and imported from '@react-native-segmented-control/segmented-control' instead of 'react-native'. See https://github.com/react-native-segmented-control/segmented-control"),r(d[20])},get Slider(){return r(d[3])('slider-moved',"Slider has been extracted from react-native core and will be removed in a future release. It can now be installed and imported from '@react-native-community/slider' instead of 'react-native'. See https://github.com/callstack/react-native-slider"),r(d[21])},get StatusBar(){return r(d[22])},get Switch(){return r(d[23]).default},get Text(){return r(d[24])},get TextInput(){return r(d[25])},get Touchable(){return r(d[26])},get TouchableHighlight(){return r(d[27])},get TouchableNativeFeedback(){return r(d[28])},get TouchableOpacity(){return r(d[29])},get TouchableWithoutFeedback(){return r(d[30])},get View(){return r(d[31])},get VirtualizedList(){return r(d[32])},get VirtualizedSectionList(){return r(d[33])},get ActionSheetIOS(){return r(d[34])},get Alert(){return r(d[35])},get Animated(){return r(d[36])},get Appearance(){return r(d[37])},get AppRegistry(){return r(d[38])},get AppState(){return r(d[39])},get AsyncStorage(){return r(d[3])('async-storage-moved',"AsyncStorage has been extracted from react-native core and will be removed in a future release. It can now be installed and imported from '@react-native-async-storage/async-storage' instead of 'react-native'. See https://github.com/react-native-async-storage/async-storage"),r(d[40])},get BackHandler(){return r(d[41])},get Clipboard(){return r(d[3])('clipboard-moved',"Clipboard has been extracted from react-native core and will be removed in a future release. It can now be installed and imported from '@react-native-clipboard/clipboard' instead of 'react-native'. See https://github.com/react-native-clipboard/clipboard"),r(d[42])},get DatePickerAndroid(){return r(d[3])('DatePickerAndroid-merged',"DatePickerAndroid has been merged with DatePickerIOS and will be removed in a future release. It can now be installed and imported from '@react-native-community/datetimepicker' instead of 'react-native'. See https://github.com/react-native-datetimepicker/datetimepicker"),r(d[43])},get DeviceInfo(){return r(d[44])},get DevSettings(){return r(d[45])},get Dimensions(){return r(d[46])},get Easing(){return r(d[47])},get findNodeHandle(){return r(d[48]).findNodeHandle},get I18nManager(){return r(d[49])},get ImagePickerIOS(){return r(d[3])('imagePickerIOS-moved',"ImagePickerIOS has been extracted from react-native core and will be removed in a future release. Please upgrade to use either '@react-native-community/react-native-image-picker' or 'expo-image-picker'. If you cannot upgrade to a different library, please install the deprecated '@react-native-community/image-picker-ios' package. See https://github.com/rnc-archive/react-native-image-picker-ios"),r(d[50])},get InteractionManager(){return r(d[51])},get Keyboard(){return r(d[52])},get LayoutAnimation(){return r(d[53])},get Linking(){return r(d[54])},get LogBox(){return r(d[55])},get NativeDialogManagerAndroid(){return r(d[56]).default},get NativeEventEmitter(){return r(d[57]).default},get Networking(){return r(d[58])},get PanResponder(){return r(d[59])},get PermissionsAndroid(){return r(d[60])},get PixelRatio(){return r(d[61])},get PushNotificationIOS(){return r(d[3])('pushNotificationIOS-moved',"PushNotificationIOS has been extracted from react-native core and will be removed in a future release. It can now be installed and imported from '@react-native-community/push-notification-ios' instead of 'react-native'. See https://github.com/react-native-push-notification-ios/push-notification-ios"),r(d[62])},get Settings(){return r(d[63])},get Share(){return r(d[64])},get StyleSheet(){return r(d[65])},get Systrace(){return r(d[66])},get ToastAndroid(){return r(d[67])},get TurboModuleRegistry(){return r(d[68])},get UIManager(){return r(d[69])},get unstable_batchedUpdates(){return r(d[48]).unstable_batchedUpdates},get useColorScheme(){return r(d[70]).default},get useWindowDimensions(){return r(d[71]).default},get UTFSequence(){return r(d[72])},get Vibration(){return r(d[73])},get YellowBox(){return r(d[74])},get DeviceEventEmitter(){return r(d[75]).default},get DynamicColorIOS(){return r(d[76]).DynamicColorIOS},get NativeAppEventEmitter(){return r(d[77])},get NativeModules(){return r(d[78])},get Platform(){return r(d[79])},get PlatformColor(){return r(d[80]).PlatformColor},get processColor(){return r(d[81])},get requireNativeComponent(){return r(d[82])},get RootTagContext(){return r(d[83]).RootTagContext},get unstable_enableLogBox(){return function(){return console.warn('LogBox is enabled by default so there is no need to call unstable_enableLogBox() anymore. This is a no op and will be removed in the next version.')}},get ColorPropType(){return r(d[84])},get EdgeInsetsPropType(){return r(d[85])},get PointPropType(){return r(d[86])},get ViewPropTypes(){return r(d[87])}}},411,[412,577,588,696,697,698,651,680,704,705,707,708,710,717,582,719,671,720,655,692,722,723,699,725,589,729,734,738,614,615,739,579,654,693,740,518,616,742,744,761,764,750,767,769,771,772,566,640,456,715,774,625,657,658,776,779,520,500,499,780,782,565,784,786,787,578,439,789,428,450,791,794,795,796,798,413,799,528,429,426,549,546,585,714,600,603,800,682]); +__d(function(g,r,i,a,m,e,d){Object.defineProperty(e,"__esModule",{value:!0}),e.default=void 0;var n=r(d[0])(r(d[1])),t=r(d[0])(r(d[2])),l=r(d[0])(r(d[3])),u=r(d[0])(r(d[4])),o=r(d[0])(r(d[5])),c='android'===t.default.OS?new Map([['change','touchExplorationDidChange'],['reduceMotionChanged','reduceMotionDidChange'],['screenReaderChanged','touchExplorationDidChange']]):new Map([['announcementFinished','announcementFinished'],['boldTextChanged','boldTextChanged'],['change','screenReaderChanged'],['grayscaleChanged','grayscaleChanged'],['invertColorsChanged','invertColorsChanged'],['reduceMotionChanged','reduceMotionChanged'],['reduceTransparencyChanged','reduceTransparencyChanged'],['screenReaderChanged','screenReaderChanged']]),s={isBoldTextEnabled:function(){return'android'===t.default.OS?Promise.resolve(!1):new Promise(function(n,t){null!=u.default?u.default.getCurrentBoldTextState(n,t):t(null)})},isGrayscaleEnabled:function(){return'android'===t.default.OS?Promise.resolve(!1):new Promise(function(n,t){null!=u.default?u.default.getCurrentGrayscaleState(n,t):t(null)})},isInvertColorsEnabled:function(){return'android'===t.default.OS?Promise.resolve(!1):new Promise(function(n,t){null!=u.default?u.default.getCurrentInvertColorsState(n,t):t(null)})},isReduceMotionEnabled:function(){return new Promise(function(n,o){'android'===t.default.OS?null!=l.default?l.default.isReduceMotionEnabled(n):o(null):null!=u.default?u.default.getCurrentReduceMotionState(n,o):o(null)})},isReduceTransparencyEnabled:function(){return'android'===t.default.OS?Promise.resolve(!1):new Promise(function(n,t){null!=u.default?u.default.getCurrentReduceTransparencyState(n,t):t(null)})},isScreenReaderEnabled:function(){return new Promise(function(n,o){'android'===t.default.OS?null!=l.default?l.default.isTouchExplorationEnabled(n):o(null):null!=u.default?u.default.getCurrentVoiceOverState(n,o):o(null)})},addEventListener:function(t,l){var u=c.get(t);return null==u?{remove:function(){}}:n.default.addListener(u,l)},setAccessibilityFocus:function(n){(0,o.default)(n,'focus')},sendAccessibilityEvent_unstable:function(n,l){'ios'===t.default.OS&&'click'===l||(0,r(d[6]).sendAccessibilityEvent)(n,l)},announceForAccessibility:function(n){'android'===t.default.OS?null==l.default||l.default.announceForAccessibility(n):null==u.default||u.default.announceForAccessibility(n)},removeEventListener:function(t,l){null!=c.get(t)&&n.default.removeListener('deviceEventName',l)},getRecommendedTimeoutMillis:function(n){return'android'===t.default.OS?new Promise(function(t,u){null!=l.default&&l.default.getRecommendedTimeoutMillis?l.default.getRecommendedTimeoutMillis(n,t):t(n)}):Promise.resolve(n)}};e.default=s},412,[407,413,426,447,448,449,456]); +__d(function(g,r,i,a,m,e,d){Object.defineProperty(e,"__esModule",{value:!0}),e.default=void 0;var t=new(r(d[0])(r(d[1])).default);e.default=t},413,[407,414]); +__d(function(g,r,i,a,m,e,d){'use strict';Object.defineProperty(e,"__esModule",{value:!0}),e.default=void 0,e.default=r(d[0])},414,[415]); +__d(function(g,r,_i,a,m,e,d){var t=r(d[0])(r(d[1])),i=r(d[0])(r(d[2])),n=r(d[0])(r(d[3])),s=r(d[0])(r(d[4])),o=function(){return!0},u=(function(){function u(i){(0,t.default)(this,u),this._subscriber=new s.default,null!=i&&(console.warn('EventEmitter(...): Constructor argument is deprecated.'),this._subscriber=i)}return(0,i.default)(u,[{key:"addListener",value:function(t,i,s){return this._subscriber.addSubscription(t,new n.default(this,this._subscriber,i,s))}},{key:"removeAllListeners",value:function(t){this._subscriber.removeAllSubscriptions(t)}},{key:"removeSubscription",value:function(t){console.warn("EventEmitter.removeSubscription(...): Method has been deprecated. Please instead use `remove()` on the subscription itself."),this.__removeSubscription(t)}},{key:"__removeSubscription",value:function(t){r(d[5])(t.emitter===this,'Subscription does not belong to this emitter.'),this._subscriber.removeSubscription(t)}},{key:"listenerCount",value:function(t){var i=this._subscriber.getSubscriptionsForType(t);return i?i.filter(o).length:0}},{key:"emit",value:function(t){var i=this._subscriber.getSubscriptionsForType(t);if(i){for(var n=arguments.length,s=new Array(n>1?n-1:0),o=1;o0?l[l.length-1]:null,c=l.length>1?l[l.length-2]:null,v='function'==typeof s,h='function'==typeof c;h&&r(d[1])(v,'Cannot have a non-function arg after a function arg.');var y=v?s:null,C=h?c:null,M=v+h,b=l.slice(0,l.length-M);if('sync'===o)return r(d[3]).callNativeSyncHook(n,t,b,C,y);r(d[3]).enqueueNativeCall(n,t,b,C,y)}).type=o,u}function u(n,t){return-1!==n.indexOf(t)}function l(n,t){return r(d[2])(t,n||{})}g.__fbGenNativeModule=n;var f={};if(g.nativeModuleProxy)f=g.nativeModuleProxy;else if(!g.nativeExtensions){var s=g.__fbBatchedBridgeConfig;r(d[1])(s,'__fbBatchedBridgeConfig is not set, cannot invoke native modules');var c=r(d[4]);(s.remoteModuleConfig||[]).forEach(function(o,u){var l=n(o,u);l&&(l.module?f[l.name]=l.module:c(f,l.name,{get:function(){return t(l.name,u)}}))})}m.exports=f},429,[430,425,436,437,446]); +__d(function(g,r,_i,a,m,e,d){m.exports=function(t,o){return r(d[0])(t)||r(d[1])(t,o)||r(d[2])(t,o)||r(d[3])()},m.exports.__esModule=!0,m.exports.default=m.exports},430,[431,432,433,435]); +__d(function(g,r,i,a,m,e,d){m.exports=function(t){if(Array.isArray(t))return t},m.exports.__esModule=!0,m.exports.default=m.exports},431,[]); +__d(function(g,r,_i2,a,m,e,d){m.exports=function(t,l){var n=null==t?null:"undefined"!=typeof Symbol&&t[Symbol.iterator]||t["@@iterator"];if(null!=n){var o,u,f=[],i=!0,y=!1;try{for(n=n.call(t);!(i=(o=n.next()).done)&&(f.push(o.value),!l||f.length!==l);i=!0);}catch(t){y=!0,u=t}finally{try{i||null==n.return||n.return()}finally{if(y)throw u}}return f}},m.exports.__esModule=!0,m.exports.default=m.exports},432,[]); +__d(function(g,r,i,a,m,e,d){m.exports=function(t,o){if(t){if("string"==typeof t)return r(d[0])(t,o);var n=Object.prototype.toString.call(t).slice(8,-1);return"Object"===n&&t.constructor&&(n=t.constructor.name),"Map"===n||"Set"===n?Array.from(t):"Arguments"===n||/^(?:Ui|I)nt(?:8|16|32)(?:Clamped)?Array$/.test(n)?r(d[0])(t,o):void 0}},m.exports.__esModule=!0,m.exports.default=m.exports},433,[434]); +__d(function(g,r,_i,a,m,e,d){m.exports=function(t,n){(null==n||n>t.length)&&(n=t.length);for(var o=0,l=new Array(n);o=5){var h=this._queue;this._queue=[[],[],[],this._callID],this._lastFlush=o,g.nativeFlushQueueImmediate(h)}r(d[2]).counterEvent('pending_js_to_native_queue',this._queue[0].length),this.__spy&&this.__spy({type:1,module:t+'',method:l,args:s})}},{key:"createDebugLookup",value:function(t,l,s){}},{key:"setReactNativeMicrotasksCallback",value:function(t){this._reactNativeMicrotasksCallback=t}},{key:"__guard",value:function(t){if(this.__shouldPauseOnThrow())t();else try{t()}catch(t){r(d[3]).reportFatalError(t)}}},{key:"__shouldPauseOnThrow",value:function(){return'undefined'!=typeof DebuggerInternal&&!0===DebuggerInternal.shouldPauseOnThrow}},{key:"__callReactNativeMicrotasks",value:function(){r(d[2]).beginEvent('JSTimers.callReactNativeMicrotasks()'),null!=this._reactNativeMicrotasksCallback&&this._reactNativeMicrotasksCallback(),r(d[2]).endEvent()}},{key:"__callFunction",value:function(t,l,s){this._lastFlush=Date.now(),this._eventLoopStartTime=this._lastFlush,this.__spy?r(d[2]).beginEvent(t+"."+l+"("+r(d[4]).default(s)+")"):r(d[2]).beginEvent(t+"."+l+"(...)"),this.__spy&&this.__spy({type:0,module:t,method:l,args:s});var u=this.getCallableModule(t);r(d[5])(!!u,"Module "+t+" is not a registered callable module (calling "+l+"). A frequent cause of the error is that the application entry file path is incorrect.\n This can also happen when the JS bundle is corrupt or there is an early initialization error when loading React Native."),r(d[5])(!!u[l],"Method "+l+" does not exist on module "+t),u[l].apply(u,s),r(d[2]).endEvent()}},{key:"__invokeCallback",value:function(t,l){this._lastFlush=Date.now(),this._eventLoopStartTime=this._lastFlush;var s=t>>>1,u=1&t?this._successCallbacks.get(s):this._failureCallbacks.get(s);u&&(this._successCallbacks.delete(s),this._failureCallbacks.delete(s),u.apply(void 0,r(d[6])(l)))}}],[{key:"spy",value:function(l){t.prototype.__spy=!0===l?function(t){console.log((0===t.type?'N->JS':'JS->N')+" : "+(null!=t.module?t.module+'.':'')+t.method+"("+JSON.stringify(t.args)+")")}:!1===l?null:l}}]),t})();m.exports=t},438,[402,403,439,440,441,425,442]); +__d(function(g,r,i,a,m,e,d){'use strict';var n=!1,t=0,c={installReactHook:function(){!0},setEnabled:function(t){n!==t&&(n=t)},isEnabled:function(){return n},beginEvent:function(t,c){if(n){var o='function'==typeof t?t():t;g.nativeTraceBeginSection(131072,o,c)}},endEvent:function(){n&&g.nativeTraceEndSection(131072)},beginAsyncEvent:function(c){var o=t;if(n){t++;var f='function'==typeof c?c():c;g.nativeTraceBeginAsyncSection(131072,f,o)}return o},endAsyncEvent:function(t,c){if(n){var o='function'==typeof t?t():t;g.nativeTraceEndAsyncSection(131072,o,c)}},counterEvent:function(t,c){if(n){var o='function'==typeof t?t():t;g.nativeTraceCounter&&g.nativeTraceCounter(131072,o,c)}}};m.exports=c},439,[]); +__d(function(g,r,i,a,m,e,d){m.exports=g.ErrorUtils},440,[]); +__d(function(g,r,_i,a,m,_e,d){Object.defineProperty(_e,"__esModule",{value:!0}),_e.createStringifySafeWithLimits=o,_e.default=void 0;var t=r(d[0])(r(d[1]));function e(t,e){var i="undefined"!=typeof Symbol&&t[Symbol.iterator]||t["@@iterator"];if(i)return(i=i.call(t)).next.bind(i);if(Array.isArray(t)||(i=n(t))||e&&t&&"number"==typeof t.length){i&&(t=i);var o=0;return function(){return o>=t.length?{done:!0}:{done:!1,value:t[o++]}}}throw new TypeError("Invalid attempt to iterate non-iterable instance.\nIn order to be iterable, non-array objects must have a [Symbol.iterator]() method.")}function n(t,e){if(t){if("string"==typeof t)return i(t,e);var n=Object.prototype.toString.call(t).slice(8,-1);return"Object"===n&&t.constructor&&(n=t.constructor.name),"Map"===n||"Set"===n?Array.from(t):"Arguments"===n||/^(?:Ui|I)nt(?:8|16|32)(?:Clamped)?Array$/.test(n)?i(t,e):void 0}}function i(t,e){(null==e||e>t.length)&&(e=t.length);for(var n=0,i=new Array(e);nf+"...(truncated)...".length?i.substring(0,f)+"...(truncated)...":i}if('object'!=typeof i||null===i)return i;var u=i;if(Array.isArray(i))h.length>=o?u="[ ... array with "+i.length+" values ... ]":i.length>c&&(u=i.slice(0,c).concat(["... extra "+(i.length-c)+" values truncated ..."]));else{(0,t.default)('object'==typeof i,'This was already found earlier');var l=Object.keys(i);if(h.length>=o)u="{ ... object with "+l.length+" keys ... }";else if(l.length>s){u={};for(var y,v=e(l.slice(0,s));!(y=v()).done;){var b=y.value;u[b]=i[b]}u['...(truncated keys)...']=l.length-s}}return h.unshift(u),u}return function(t){if(void 0===t)return'undefined';if(null===t)return'null';if('function'==typeof t)try{return t.toString()}catch(t){return'[function unknown]'}else{if(t instanceof Error)return t.name+': '+t.message;try{var e=JSON.stringify(t,v);return void 0===e?'["'+typeof t+'" failed to stringify]':e}catch(e){if('function'==typeof t.toString)try{return t.toString()}catch(t){}}}return'["'+typeof t+'" failed to stringify]'}}var u=o({maxDepth:10,maxStringLimit:100,maxArrayLimit:50,maxObjectKeysLimit:50});_e.default=u},441,[407,425]); +__d(function(g,r,i,a,m,e,d){m.exports=function(t){return r(d[0])(t)||r(d[1])(t)||r(d[2])(t)||r(d[3])()},m.exports.__esModule=!0,m.exports.default=m.exports},442,[443,444,433,445]); +__d(function(g,r,i,a,m,e,d){m.exports=function(t){if(Array.isArray(t))return r(d[0])(t)},m.exports.__esModule=!0,m.exports.default=m.exports},443,[434]); +__d(function(g,r,i,a,m,e,d){m.exports=function(o){if("undefined"!=typeof Symbol&&null!=o[Symbol.iterator]||null!=o["@@iterator"])return Array.from(o)},m.exports.__esModule=!0,m.exports.default=m.exports},444,[]); +__d(function(g,r,i,a,m,e,d){m.exports=function(){throw new TypeError("Invalid attempt to spread non-iterable instance.\nIn order to be iterable, non-array objects must have a [Symbol.iterator]() method.")},m.exports.__esModule=!0,m.exports.default=m.exports},445,[]); +__d(function(g,r,i,a,m,e,d){'use strict';m.exports=function(t,n,u){var b,c=u.get,o=!1!==u.enumerable,f=!1!==u.writable,l=!1;function s(u){b=u,l=!0,Object.defineProperty(t,n,{value:u,configurable:!0,enumerable:o,writable:f})}Object.defineProperty(t,n,{get:function(){return l||(l=!0,s(c())),b},set:s,configurable:!0,enumerable:o})}},446,[]); +__d(function(g,r,i,a,m,e,d){function t(n){if("function"!=typeof WeakMap)return null;var o=new WeakMap,f=new WeakMap;return(t=function(t){return t?f:o})(n)}Object.defineProperty(e,"__esModule",{value:!0}),e.default=void 0;var n=(function(n,o){if(!o&&n&&n.__esModule)return n;if(null===n||"object"!=typeof n&&"function"!=typeof n)return{default:n};var f=t(o);if(f&&f.has(n))return f.get(n);var u={},c=Object.defineProperty&&Object.getOwnPropertyDescriptor;for(var l in n)if("default"!==l&&Object.prototype.hasOwnProperty.call(n,l)){var p=c?Object.getOwnPropertyDescriptor(n,l):null;p&&(p.get||p.set)?Object.defineProperty(u,l,p):u[l]=n[l]}u.default=n,f&&f.set(n,u);return u})(r(d[0])).get('AccessibilityInfo');e.default=n},447,[428]); +__d(function(g,r,i,a,m,e,d){function t(n){if("function"!=typeof WeakMap)return null;var o=new WeakMap,u=new WeakMap;return(t=function(t){return t?u:o})(n)}Object.defineProperty(e,"__esModule",{value:!0}),e.default=void 0;var n=(function(n,o){if(!o&&n&&n.__esModule)return n;if(null===n||"object"!=typeof n&&"function"!=typeof n)return{default:n};var u=t(o);if(u&&u.has(n))return u.get(n);var f={},c=Object.defineProperty&&Object.getOwnPropertyDescriptor;for(var l in n)if("default"!==l&&Object.prototype.hasOwnProperty.call(n,l)){var p=c?Object.getOwnPropertyDescriptor(n,l):null;p&&(p.get||p.set)?Object.defineProperty(f,l,p):f[l]=n[l]}f.default=n,u&&u.set(n,f);return f})(r(d[0])).get('AccessibilityManager');e.default=n},448,[428]); +__d(function(g,r,i,a,m,e,d){var t=r(d[0])(r(d[1]));m.exports=function(s,c){'focus'===c&&t.default.sendAccessibilityEvent(s,t.default.getConstants().AccessibilityEventTypes.typeViewFocused),'click'===c&&t.default.sendAccessibilityEvent(s,t.default.getConstants().AccessibilityEventTypes.typeViewClicked)}},449,[407,450]); +__d(function(g,r,i,a,m,e,d){var l=r(d[0])(r(d[1])),n=!0===g.RN$Bridgeless?r(d[2]):null==l.default.unstable_UIManager?r(d[3]):l.default.unstable_UIManager;m.exports=n},450,[407,451,452,453]); +__d(function(g,r,i,a,m,e,d){Object.defineProperty(e,"__esModule",{value:!0}),e.default=void 0;e.default={unstable_UIManager:null}},451,[]); +__d(function(g,r,i,a,m,e,d){'use strict';m.exports={getViewManagerConfig:function(n){return console.warn('Attempting to get config for view manager: '+n),'RCTVirtualText'===n?{}:null},hasViewManagerConfig:function(n){return'RCTVirtualText'===n||'RCTShimmeringView'===n},getConstants:function(){return{}},getConstantsForViewManager:function(n){},getDefaultEventTypes:function(){return[]},lazilyLoadView:function(n){},createView:function(n,t,o,u){},updateView:function(n,t,o){},focus:function(n){},blur:function(n){},findSubviewIn:function(n,t,o){},dispatchViewManagerCommand:function(n,t,o){},measure:function(n,t){},measureInWindow:function(n,t){},viewIsDescendantOf:function(n,t,o){},measureLayout:function(n,t,o,u){},measureLayoutRelativeToParent:function(n,t,o){},setJSResponder:function(n,t){},clearJSResponder:function(){},configureNextLayoutAnimation:function(n,t,o){},removeSubviewsFromContainerWithID:function(n){},replaceExistingNonRootView:function(n,t){},setChildren:function(n,t){},manageChildren:function(n,t,o,u,c,f){},setLayoutAnimationEnabledExperimental:function(n){},sendAccessibilityEvent:function(n,t){},showPopupMenu:function(n,t,o,u){},dismissPopupMenu:function(){}}},452,[]); +__d(function(g,r,i,a,m,_e,d){var n=r(d[0])(r(d[1])),e=r(d[0])(r(d[2])),t={},o=new Set,f={},u=!1;function c(){return u||(f=e.default.getConstants(),u=!0),f}function l(n){if(void 0===t[n]&&g.nativeCallSyncHook&&e.default.getConstantsForViewManager)try{t[n]=e.default.getConstantsForViewManager(n)}catch(e){console.error("NativeUIManager.getConstantsForViewManager('"+n+"') threw an exception.",e),t[n]=null}var f=t[n];if(f)return f;if(!g.nativeCallSyncHook)return f;if(e.default.lazilyLoadView&&!o.has(n)){var u=e.default.lazilyLoadView(n);o.add(n),null!=u&&null!=u.viewConfig&&(c()[n]=u.viewConfig,w(n))}return t[n]}var s=(0,n.default)({},e.default,{createView:function(n,t,o,f){e.default.createView(n,t,o,f)},getConstants:function(){return c()},getViewManagerConfig:function(n){return l(n)},hasViewManagerConfig:function(n){return null!=l(n)}});function w(n){var e=c()[n];t[n]=e,e.Manager&&(r(d[3])(e,'Constants',{get:function(){var n=r(d[4])[e.Manager],t={};return n&&Object.keys(n).forEach(function(e){var o=n[e];'function'!=typeof o&&(t[e]=o)}),t}}),r(d[3])(e,'Commands',{get:function(){var n=r(d[4])[e.Manager],t={},o=0;return n&&Object.keys(n).forEach(function(e){'function'==typeof n[e]&&(t[e]=o++)}),t}}))}e.default.getViewManagerConfig=s.getViewManagerConfig,c().ViewManagerNames&&e.default.getConstants().ViewManagerNames.forEach(function(n){r(d[3])(e.default,n,{get:function(){return e.default.getConstantsForViewManager(n)}})}),g.nativeCallSyncHook||Object.keys(c()).forEach(function(n){r(d[5]).includes(n)||(t[n]||(t[n]=c()[n]),r(d[3])(e.default,n,{get:function(){return console.warn("Accessing view manager configs directly off UIManager via UIManager['"+n+"'] is no longer supported. Use UIManager.getViewManagerConfig('"+n+"') instead."),s.getViewManagerConfig(n)}}))}),m.exports=s},453,[407,436,454,446,429,455]); +__d(function(g,r,i,a,m,e,d){function t(n){if("function"!=typeof WeakMap)return null;var o=new WeakMap,f=new WeakMap;return(t=function(t){return t?f:o})(n)}Object.defineProperty(e,"__esModule",{value:!0}),e.default=void 0;var n=(function(n,o){if(!o&&n&&n.__esModule)return n;if(null===n||"object"!=typeof n&&"function"!=typeof n)return{default:n};var f=t(o);if(f&&f.has(n))return f.get(n);var u={},c=Object.defineProperty&&Object.getOwnPropertyDescriptor;for(var l in n)if("default"!==l&&Object.prototype.hasOwnProperty.call(n,l)){var p=c?Object.getOwnPropertyDescriptor(n,l):null;p&&(p.get||p.set)?Object.defineProperty(u,l,p):u[l]=n[l]}u.default=n,f&&f.set(n,u);return u})(r(d[0])).getEnforcing('UIManager');e.default=n},454,[428]); +__d(function(g,r,i,a,m,e,d){'use strict';m.exports=['clearJSResponder','configureNextLayoutAnimation','createView','dismissPopupMenu','dispatchViewManagerCommand','findSubviewIn','getConstantsForViewManager','getDefaultEventTypes','manageChildren','measure','measureInWindow','measureLayout','measureLayoutRelativeToParent','removeRootView','removeSubviewsFromContainerWithID','replaceExistingNonRootView','sendAccessibilityEvent','setChildren','setJSResponder','setLayoutAnimationEnabledExperimental','showPopupMenu','updateView','viewIsDescendantOf','PopupMenu','LazyViewManagersEnabled','ViewManagerNames','StyleConstants','AccessibilityEventTypes','UIView','getViewManagerConfig','hasViewManagerConfig','blur','focus','genericBubblingEventTypes','genericDirectEventTypes','lazilyLoadView']},455,[]); +__d(function(g,r,i,a,m,e,d){'use strict';var t;t=r(d[0]),m.exports=t},456,[457]); +__d(function(e,n,t,r,l,a,i){"use strict";n(i[0]);var u=n(i[1]);function o(e,n,t,r,l,a,i,u,o){var s=Array.prototype.slice.call(arguments,3);try{n.apply(t,s)}catch(e){this.onError(e)}}var s=!1,c=null,d=!1,f=null,p={onError:function(e){s=!0,c=e}};function h(e,n,t,r,l,a,i,u,d){s=!1,c=null,o.apply(p,arguments)}function g(e,n,t,r,l,a,i,u,o){if(h.apply(this,arguments),s){if(!s)throw Error("clearCaughtError was called but no error was captured. This error is likely caused by a bug in React. Please file an issue.");var p=c;s=!1,c=null,d||(d=!0,f=p)}}var m=Array.isArray,v=null,b=null,y=null;function S(e,n,t){var r=e.type||"unknown-event";e.currentTarget=y(t),g(r,n,void 0,e),e.currentTarget=null}function k(e){var n=e._dispatchListeners,t=e._dispatchInstances;if(m(n))throw Error("executeDirectDispatch(...): Invalid `event`.");return e.currentTarget=n?y(t):null,n=n?n(e):null,e.currentTarget=null,e._dispatchListeners=null,e._dispatchInstances=null,n}function w(){return!0}function _(){return!1}function T(e,n,t,r){for(var l in this.dispatchConfig=e,this._targetInst=n,this.nativeEvent=t,this._dispatchInstances=this._dispatchListeners=null,e=this.constructor.Interface)e.hasOwnProperty(l)&&((n=e[l])?this[l]=n(t):"target"===l?this.target=r:this[l]=t[l]);return this.isDefaultPrevented=(null!=t.defaultPrevented?t.defaultPrevented:!1===t.returnValue)?w:_,this.isPropagationStopped=_,this}function x(e,n,t,r){if(this.eventPool.length){var l=this.eventPool.pop();return this.call(l,e,n,t,r),l}return new this(e,n,t,r)}function P(e){if(!(e instanceof this))throw Error("Trying to release an event instance into a pool of a different type.");e.destructor(),10>this.eventPool.length&&this.eventPool.push(e)}function R(e){e.getPooled=x,e.eventPool=[],e.release=P}n(i[2])(T.prototype,{preventDefault:function(){this.defaultPrevented=!0;var e=this.nativeEvent;e&&(e.preventDefault?e.preventDefault():"unknown"!=typeof e.returnValue&&(e.returnValue=!1),this.isDefaultPrevented=w)},stopPropagation:function(){var e=this.nativeEvent;e&&(e.stopPropagation?e.stopPropagation():"unknown"!=typeof e.cancelBubble&&(e.cancelBubble=!0),this.isPropagationStopped=w)},persist:function(){this.isPersistent=w},isPersistent:_,destructor:function(){var e,n=this.constructor.Interface;for(e in n)this[e]=null;this.nativeEvent=this._targetInst=this.dispatchConfig=null,this.isPropagationStopped=this.isDefaultPrevented=_,this._dispatchInstances=this._dispatchListeners=null}}),T.Interface={type:null,target:null,currentTarget:function(){return null},eventPhase:null,bubbles:null,cancelable:null,timeStamp:function(e){return e.timeStamp||Date.now()},defaultPrevented:null,isTrusted:null},T.extend=function(e){function t(){}function r(){return l.apply(this,arguments)}var l=this;t.prototype=l.prototype;var a=new t;return n(i[2])(a,r.prototype),r.prototype=a,r.prototype.constructor=r,r.Interface=n(i[2])({},l.Interface,e),r.extend=l.extend,R(r),r},R(T);var E=T.extend({touchHistory:function(){return null}});function C(e){return"topTouchStart"===e}function N(e){return"topTouchMove"===e}var z=["topTouchStart"],I=["topTouchMove"],L=["topTouchCancel","topTouchEnd"],U=[],M={touchBank:U,numberActiveTouches:0,indexOfSingleActiveTouch:-1,mostRecentTimeStamp:0};function F(e){return e.timeStamp||e.timestamp}function D(e){if(null==(e=e.identifier))throw Error("Touch object is missing identifier.");return e}function A(e){var n=D(e),t=U[n];t?(t.touchActive=!0,t.startPageX=e.pageX,t.startPageY=e.pageY,t.startTimeStamp=F(e),t.currentPageX=e.pageX,t.currentPageY=e.pageY,t.currentTimeStamp=F(e),t.previousPageX=e.pageX,t.previousPageY=e.pageY,t.previousTimeStamp=F(e)):(t={touchActive:!0,startPageX:e.pageX,startPageY:e.pageY,startTimeStamp:F(e),currentPageX:e.pageX,currentPageY:e.pageY,currentTimeStamp:F(e),previousPageX:e.pageX,previousPageY:e.pageY,previousTimeStamp:F(e)},U[n]=t),M.mostRecentTimeStamp=F(e)}function Q(e){var n=U[D(e)];n&&(n.touchActive=!0,n.previousPageX=n.currentPageX,n.previousPageY=n.currentPageY,n.previousTimeStamp=n.currentTimeStamp,n.currentPageX=e.pageX,n.currentPageY=e.pageY,n.currentTimeStamp=F(e),M.mostRecentTimeStamp=F(e))}function H(e){var n=U[D(e)];n&&(n.touchActive=!1,n.previousPageX=n.currentPageX,n.previousPageY=n.currentPageY,n.previousTimeStamp=n.currentTimeStamp,n.currentPageX=e.pageX,n.currentPageY=e.pageY,n.currentTimeStamp=F(e),M.mostRecentTimeStamp=F(e))}var O,j={instrument:function(e){O=e},recordTouchTrack:function(e,n){if(null!=O&&O(e,n),N(e))n.changedTouches.forEach(Q);else if(C(e))n.changedTouches.forEach(A),M.numberActiveTouches=n.touches.length,1===M.numberActiveTouches&&(M.indexOfSingleActiveTouch=n.touches[0].identifier);else if(("topTouchEnd"===e||"topTouchCancel"===e)&&(n.changedTouches.forEach(H),M.numberActiveTouches=n.touches.length,1===M.numberActiveTouches))for(e=0;ei||(a=i),Ne(a,e,l)}}}),v=function(e){return we.get(e._nativeTag)||null},b=_e,y=function(e){var n=(e=e.stateNode)._nativeTag;if(void 0===n&&(n=(e=e.canonical)._nativeTag),!n)throw Error("All native instances should have a tag.");return e},re.injection.injectGlobalResponderHandler({onChange:function(e,t,r){null!==t?n(i[3]).UIManager.setJSResponder(t.stateNode._nativeTag,r):n(i[3]).UIManager.clearJSResponder()}});var ze=u.__SECRET_INTERNALS_DO_NOT_USE_OR_YOU_WILL_BE_FIRED,Ie=60103,Le=60106,Ue=60107,Me=60108,Fe=60114,De=60109,Ae=60110,Qe=60112,He=60113,Oe=60120,je=60115,Be=60116,Ve=60129,We=60130,Ye=60131,qe=60132;if("function"==typeof Symbol&&Symbol.for){var Xe=Symbol.for;Ie=Xe("react.element"),Le=Xe("react.portal"),Ue=Xe("react.fragment"),Me=Xe("react.strict_mode"),Fe=Xe("react.profiler"),De=Xe("react.provider"),Ae=Xe("react.context"),Qe=Xe("react.forward_ref"),He=Xe("react.suspense"),Oe=Xe("react.suspense_list"),je=Xe("react.memo"),Be=Xe("react.lazy"),Xe("react.scope"),Ve=Xe("react.debug_trace_mode"),We=Xe("react.offscreen"),Ye=Xe("react.legacy_hidden"),qe=Xe("react.cache")}var $e="function"==typeof Symbol&&Symbol.iterator;function Ge(e){return null===e||"object"!=typeof e?null:"function"==typeof(e=$e&&e[$e]||e["@@iterator"])?e:null}function Ke(e){if(null==e)return null;if("function"==typeof e)return e.displayName||e.name||null;if("string"==typeof e)return e;switch(e){case Ue:return"Fragment";case Le:return"Portal";case Fe:return"Profiler";case Me:return"StrictMode";case He:return"Suspense";case Oe:return"SuspenseList";case qe:return"Cache"}if("object"==typeof e)switch(e.$$typeof){case Ae:return(e.displayName||"Context")+".Consumer";case De:return(e._context.displayName||"Context")+".Provider";case Qe:var n=e.render;return(e=e.displayName)||(e=""!==(e=n.displayName||n.name||"")?"ForwardRef("+e+")":"ForwardRef"),e;case je:return null!==(n=e.displayName||null)?n:Ke(e.type)||"Memo";case Be:n=e._payload,e=e._init;try{return Ke(e(n))}catch(e){}}return null}function Je(e){var n=e.type;switch(e.tag){case 24:return"Cache";case 9:return(n.displayName||"Context")+".Consumer";case 10:return(n._context.displayName||"Context")+".Provider";case 18:return"DehydratedFragment";case 11:return e=(e=n.render).displayName||e.name||"",n.displayName||(""!==e?"ForwardRef("+e+")":"ForwardRef");case 7:return"Fragment";case 5:return n;case 4:return"Portal";case 3:return"Root";case 6:return"Text";case 16:return Ke(n);case 23:return"LegacyHidden";case 8:return n===Me?"StrictMode":"Mode";case 22:return"Offscreen";case 12:return"Profiler";case 21:return"Scope";case 13:return"Suspense";case 19:return"SuspenseList";case 1:case 0:case 17:case 2:case 14:case 15:if("function"==typeof n)return n.displayName||n.name||null;if("string"==typeof n)return n}return null}function Ze(e){var n=e,t=e;if(e.alternate)for(;n.return;)n=n.return;else{e=n;do{0!=(2050&(n=e).flags)&&(t=n.return),e=n.return}while(e)}return 3===n.tag?t:null}function en(e){if(Ze(e)!==e)throw Error("Unable to find node on an unmounted component.")}function nn(e){var n=e.alternate;if(!n){if(null===(n=Ze(e)))throw Error("Unable to find node on an unmounted component.");return n!==e?null:e}for(var t=e,r=n;;){var l=t.return;if(null===l)break;var a=l.alternate;if(null===a){if(null!==(r=l.return)){t=r;continue}break}if(l.child===a.child){for(a=l.child;a;){if(a===t)return en(l),e;if(a===r)return en(l),n;a=a.sibling}throw Error("Unable to find node on an unmounted component.")}if(t.return!==r.return)t=l,r=a;else{for(var i=!1,u=l.child;u;){if(u===t){i=!0,t=l,r=a;break}if(u===r){i=!0,r=l,t=a;break}u=u.sibling}if(!i){for(u=a.child;u;){if(u===t){i=!0,t=a,r=l;break}if(u===r){i=!0,r=a,t=l;break}u=u.sibling}if(!i)throw Error("Child was not found in either parent set. This indicates a bug in React related to the return pointer. Please file an issue.")}}if(t.alternate!==r)throw Error("Return fibers should always be each others' alternates. This error is likely caused by a bug in React. Please file an issue.")}if(3!==t.tag)throw Error("Unable to find node on an unmounted component.");return t.stateNode.current===t?e:n}function tn(e){return null!==(e=nn(e))?rn(e):null}function rn(e){if(5===e.tag||6===e.tag)return e;for(e=e.child;null!==e;){var n=rn(e);if(null!==n)return n;e=e.sibling}return null}var ln={},an=null,un=0,on={unsafelyIgnoreFunctions:!0};function sn(e,t){return"object"!=typeof t||null===t||n(i[3]).deepDiffer(e,t,on)}function cn(e,n,t){if(m(n))for(var r=n.length;r--&&0=(a=n&-n)||16===l&&0!=(4194240&a)))return n;if(0!=(4&r)&&(r|=16&t),0!==(n=e.entangledLanes))for(e=e.entanglements,n&=r;0t;t++)n.push(e);return n}function Rn(e,n,t){e.pendingLanes|=n,536870912!==n&&(e.suspendedLanes=0,e.pingedLanes=0),(e=e.eventTimes)[n=31-Nn(n)]=t}function En(e,n){var t=e.pendingLanes&~n;e.pendingLanes=n,e.suspendedLanes=0,e.pingedLanes=0,e.expiredLanes&=n,e.mutableReadLanes&=n,e.entangledLanes&=n,n=e.entanglements;var r=e.eventTimes;for(e=e.expirationTimes;0Xn||(e.current=qn[Xn],qn[Xn]=null,Xn--)}function Kn(e,n){qn[++Xn]=e.current,e.current=n}var Jn={},Zn=$n(Jn),et=$n(!1),nt=Jn;function tt(e,n){var t=e.type.contextTypes;if(!t)return Jn;var r=e.stateNode;if(r&&r.__reactInternalMemoizedUnmaskedChildContext===n)return r.__reactInternalMemoizedMaskedChildContext;var l,a={};for(l in t)a[l]=n[l];return r&&((e=e.stateNode).__reactInternalMemoizedUnmaskedChildContext=n,e.__reactInternalMemoizedMaskedChildContext=a),a}function rt(e){return null!==(e=e.childContextTypes)&&void 0!==e}function lt(){Gn(et),Gn(Zn)}function at(e,n,t){if(Zn.current!==Jn)throw Error("Unexpected context found on stack. This error is likely caused by a bug in React. Please file an issue.");Kn(Zn,n),Kn(et,t)}function it(e,t,r){var l=e.stateNode;if(t=t.childContextTypes,"function"!=typeof l.getChildContext)return r;for(var a in l=l.getChildContext())if(!(a in t))throw Error((Je(e)||"Unknown")+'.getChildContext(): key "'+a+'" is not defined in childContextTypes.');return n(i[2])({},r,l)}function ut(e){return e=(e=e.stateNode)&&e.__reactInternalMemoizedMergedChildContext||Jn,nt=Zn.current,Kn(Zn,e),Kn(et,et.current),!0}function ot(e,n,t){var r=e.stateNode;if(!r)throw Error("Expected to have an instance by this point. This error is likely caused by a bug in React. Please file an issue.");t?(e=it(e,n,nt),r.__reactInternalMemoizedMergedChildContext=e,Gn(et),Gn(Zn),Kn(Zn,e)):Gn(et),Kn(et,t)}var st=null,ct=!1,dt=!1;function ft(){if(!dt&&null!==st){dt=!0;var e=0,t=Ln;try{var r=st;for(Ln=1;eg?(m=h,h=null):m=h.sibling;var v=f(l,h,u[g],o);if(null===v){null===h&&(h=m);break}e&&h&&null===v.alternate&&n(l,h),i=a(v,i,g),null===c?s=v:c.sibling=v,c=v,h=m}if(g===u.length)return t(l,h),s;if(null===h){for(;gg?(m=h,h=null):m=h.sibling;var b=f(l,h,v.value,o);if(null===b){null===h&&(h=m);break}e&&h&&null===b.alternate&&n(l,h),i=a(b,i,g),null===c?s=b:c.sibling=b,c=b,h=m}if(v.done)return t(l,h),s;if(null===h){for(;!v.done;g++,v=u.next())null!==(v=d(l,v.value,o))&&(i=a(v,i,g),null===c?s=v:c.sibling=v,c=v);return s}for(h=r(l,h);!v.done;g++,v=u.next())null!==(v=p(h,l,g,v.value,o))&&(e&&null!==v.alternate&&h.delete(null===v.key?g:v.key),i=a(v,i,g),null===c?s=v:c.sibling=v,c=v);return e&&h.forEach(function(e){return n(l,e)}),s}return function(e,r,a,u){var o="object"==typeof a&&null!==a&&a.type===Ue&&null===a.key;if(o&&(a=a.props.children),"object"==typeof a&&null!==a){switch(a.$$typeof){case Ie:e:{var s=a.key;for(o=r;null!==o;){if(o.key===s){if((s=a.type)===Ue){if(7===o.tag){t(e,o.sibling),(r=l(o,a.props.children)).return=e,e=r;break e}}else if(o.elementType===s){t(e,o.sibling),(r=l(o,a.props)).ref=Wt(e,o,a),r.return=e,e=r;break e}t(e,o);break}n(e,o),o=o.sibling}a.type===Ue?((r=Ka(a.props.children,e.mode,u,a.key)).return=e,e=r):((u=Ga(a.type,a.key,a.props,null,e.mode,u)).ref=Wt(e,r,a),u.return=e,e=u)}return i(e);case Le:e:{for(o=a.key;null!==r;){if(r.key===o){if(4===r.tag&&r.stateNode.containerInfo===a.containerInfo&&r.stateNode.implementation===a.implementation){t(e,r.sibling),(r=l(r,a.children||[])).return=e,e=r;break e}t(e,r);break}n(e,r),r=r.sibling}(r=ei(a,e.mode,u)).return=e,e=r}return i(e)}if(m(a))return h(e,r,a,u);if(Ge(a))return g(e,r,a,u);Yt(e,a)}if("string"==typeof a||"number"==typeof a)return a=""+a,null!==r&&6===r.tag?(t(e,r.sibling),(r=l(r,a)).return=e,e=r):(t(e,r),(r=Za(a,e.mode,u)).return=e,e=r),i(e);if(void 0===a&&!o)switch(e.tag){case 1:case 0:case 11:case 15:throw Error((Je(e)||"Component")+"(...): Nothing was returned from render. This usually means a return statement is missing. Or, to render nothing, return null.")}return t(e,r)}}var Xt=qt(!0),$t=qt(!1),Gt={},Kt=$n(Gt),Jt=$n(Gt),Zt=$n(Gt);function er(e){if(e===Gt)throw Error("Expected host context to exist. This error is likely caused by a bug in React. Please file an issue.");return e}function nr(e,n){Kn(Zt,n),Kn(Jt,e),Kn(Kt,Gt),Gn(Kt),Kn(Kt,{isInAParentText:!1})}function tr(){Gn(Kt),Gn(Jt),Gn(Zt)}function rr(e){er(Zt.current);var n=er(Kt.current),t=e.type;t="AndroidTextInput"===t||"RCTMultilineTextInputView"===t||"RCTSinglelineTextInputView"===t||"RCTText"===t||"RCTVirtualText"===t,n!==(t=n.isInAParentText!==t?{isInAParentText:t}:n)&&(Kn(Jt,e),Kn(Kt,t))}function lr(e){Jt.current===e&&(Gn(Kt),Gn(Jt))}var ar=$n(0);function ir(e){for(var n=e;null!==n;){if(13===n.tag){var t=n.memoizedState;if(null!==t&&(null===t.dehydrated||Mn()||Mn()))return n}else if(19===n.tag&&void 0!==n.memoizedProps.revealOrder){if(0!=(128&n.flags))return n}else if(null!==n.child){n.child.return=n,n=n.child;continue}if(n===e)break;for(;null===n.sibling;){if(null===n.return||n.return===e)return null;n=n.return}n.sibling.return=n.return,n=n.sibling}return null}var ur=[];function or(){for(var e=0;ea))throw Error("Too many re-renders. React limits the number of renders to prevent an infinite loop.");a+=1,hr=pr=null,n.updateQueue=null,sr.current=Yr,e=t(r,l)}while(mr)}if(sr.current=Br,n=null!==pr&&null!==pr.next,dr=0,hr=pr=fr=null,gr=!1,n)throw Error("Rendered fewer hooks than expected. This may be caused by an accidental early return statement.");return e}function Sr(){var e={memoizedState:null,baseState:null,baseQueue:null,queue:null,next:null};return null===hr?fr.memoizedState=hr=e:hr=hr.next=e,hr}function kr(){if(null===pr){var e=fr.alternate;e=null!==e?e.memoizedState:null}else e=pr.next;var n=null===hr?fr.memoizedState:hr.next;if(null!==n)hr=n,pr=e;else{if(null===e)throw Error("Rendered more hooks than during the previous render.");e={memoizedState:(pr=e).memoizedState,baseState:pr.baseState,baseQueue:pr.baseQueue,queue:pr.queue,next:null},null===hr?fr.memoizedState=hr=e:hr=hr.next=e}return hr}function wr(e,n){return"function"==typeof n?n(e):n}function _r(e){var n=kr(),t=n.queue;if(null===t)throw Error("Should have a queue. This is likely a bug in React. Please file an issue.");t.lastRenderedReducer=e;var r=pr,l=r.baseQueue,a=t.pending;if(null!==a){if(null!==l){var i=l.next;l.next=a.next,a.next=i}r.baseQueue=l=a,t.pending=null}if(null!==l){a=l.next,r=r.baseState;var u=i=null,o=null,s=a;do{var c=s.lane;if((dr&c)===c)null!==o&&(o=o.next={lane:0,action:s.action,eagerReducer:s.eagerReducer,eagerState:s.eagerState,next:null}),r=s.eagerReducer===e?s.eagerState:e(r,s.action);else{var d={lane:c,action:s.action,eagerReducer:s.eagerReducer,eagerState:s.eagerState,next:null};null===o?(u=o=d,i=r):o=o.next=d,fr.lanes|=c,ia|=c}s=s.next}while(null!==s&&s!==a);null===o?i=r:o.next=u,ht(r,n.memoizedState)||(Zr=!0),n.memoizedState=r,n.baseState=i,n.baseQueue=o,t.lastRenderedState=r}if(null!==(e=t.interleaved)){l=e;do{a=l.lane,fr.lanes|=a,ia|=a,l=l.next}while(l!==e)}else null===l&&(t.lanes=0);return[n.memoizedState,t.dispatch]}function Tr(e){var n=kr(),t=n.queue;if(null===t)throw Error("Should have a queue. This is likely a bug in React. Please file an issue.");t.lastRenderedReducer=e;var r=t.dispatch,l=t.pending,a=n.memoizedState;if(null!==l){t.pending=null;var i=l=l.next;do{a=e(a,i.action),i=i.next}while(i!==l);ht(a,n.memoizedState)||(Zr=!0),n.memoizedState=a,null===n.baseQueue&&(n.baseState=a),t.lastRenderedState=a}return[a,r]}function xr(e,n,t){var r=n._getVersion;r=r(n._source);var l=n._workInProgressVersionPrimary;if(null!==l?e=l===r:(e=e.mutableReadLanes,(e=(dr&e)===e)&&(n._workInProgressVersionPrimary=r,ur.push(n))),e)return t(n._source);throw ur.push(n),Error("Cannot read from mutable source during the current render without tearing. This may be a bug in React. Please file an issue.")}function Pr(e,n,t,r){var l=Zl;if(null===l)throw Error("Expected a work-in-progress root. This is a bug in React. Please file an issue.");var a=n._getVersion,i=a(n._source),u=sr.current,o=u.useState(function(){return xr(l,n,t)}),s=o[1],c=o[0];o=hr;var d=e.memoizedState,f=d.refs,p=f.getSnapshot,h=d.source;d=d.subscribe;var g=fr;return e.memoizedState={refs:f,source:n,subscribe:r},u.useEffect(function(){f.getSnapshot=t,f.setSnapshot=s;var e=a(n._source);ht(i,e)||(e=t(n._source),ht(c,e)||(s(e),e=wa(g),l.mutableReadLanes|=e&l.pendingLanes),Cn(l,l.mutableReadLanes))},[t,n,r]),u.useEffect(function(){return r(n._source,function(){var e=f.getSnapshot,t=f.setSnapshot;try{t(e(n._source));var r=wa(g);l.mutableReadLanes|=r&l.pendingLanes}catch(e){t(function(){throw e})}})},[n,r]),ht(p,t)&&ht(h,n)&&ht(d,r)||((e={pending:null,interleaved:null,lanes:0,dispatch:null,lastRenderedReducer:wr,lastRenderedState:c}).dispatch=s=jr.bind(null,fr,e),o.queue=e,o.baseQueue=null,c=xr(l,n,t),o.memoizedState=o.baseState=c),c}function Rr(e,n,t){return Pr(kr(),e,n,t)}function Er(e){var n=Sr();return"function"==typeof e&&(e=e()),n.memoizedState=n.baseState=e,e=(e=n.queue={pending:null,interleaved:null,lanes:0,dispatch:null,lastRenderedReducer:wr,lastRenderedState:e}).dispatch=jr.bind(null,fr,e),[n.memoizedState,e]}function Cr(e,n,t,r){return e={tag:e,create:n,destroy:t,deps:r,next:null},null===(n=fr.updateQueue)?(n={lastEffect:null},fr.updateQueue=n,n.lastEffect=e.next=e):null===(t=n.lastEffect)?n.lastEffect=e.next=e:(r=t.next,t.next=e,e.next=r,n.lastEffect=e),e}function Nr(){return kr().memoizedState}function zr(e,n,t,r){var l=Sr();fr.flags|=e,l.memoizedState=Cr(1|n,t,void 0,void 0===r?null:r)}function Ir(e,n,t,r){var l=kr();r=void 0===r?null:r;var a=void 0;if(null!==pr){var i=pr.memoizedState;if(a=i.destroy,null!==r&&br(r,i.deps))return void(l.memoizedState=Cr(n,t,a,r))}fr.flags|=e,l.memoizedState=Cr(1|n,t,a,r)}function Lr(e,n){return zr(1049600,4,e,n)}function Ur(e,n){return Ir(1024,4,e,n)}function Mr(e,n){return Ir(4,2,e,n)}function Fr(e,n){return"function"==typeof n?(e=e(),n(e),function(){n(null)}):null!==n&&void 0!==n?(e=e(),n.current=e,function(){n.current=null}):void 0}function Dr(e,n,t){return t=null!==t&&void 0!==t?t.concat([e]):null,Ir(4,2,Fr.bind(null,n,e),t)}function Ar(){}function Qr(e,n){var t=kr();n=void 0===n?null:n;var r=t.memoizedState;return null!==r&&null!==n&&br(n,r[1])?r[0]:(t.memoizedState=[e,n],e)}function Hr(e,n){var t=kr();n=void 0===n?null:n;var r=t.memoizedState;return null!==r&&null!==n&&br(n,r[1])?r[0]:(e=e(),t.memoizedState=[e,n],e)}function Or(e,n){var t=Ln;Ln=0!==t&&4>t?t:4,e(!0);var r=cr.transition;cr.transition=1;try{e(!1),n()}finally{Ln=t,cr.transition=r}}function jr(e,n,t){var r=ka(),l=wa(e),a={lane:l,action:t,eagerReducer:null,eagerState:null,next:null},i=e.alternate;if(e===fr||null!==i&&i===fr)mr=gr=!0,null===(l=n.pending)?a.next=a:(a.next=l.next,l.next=a),n.pending=a;else{if(null!==Zl&&0!=(1&e.mode)&&0==(8&Jl)){var u=n.interleaved;null===u?(a.next=a,null===Et?Et=[n]:Et.push(n)):(a.next=u.next,u.next=a),n.interleaved=a}else null===(u=n.pending)?a.next=a:(a.next=u.next,u.next=a),n.pending=a;if(0===e.lanes&&(null===i||0===i.lanes)&&null!==(i=n.lastRenderedReducer))try{var o=n.lastRenderedState,s=i(o,t);if(a.eagerReducer=i,a.eagerState=s,ht(s,o))return}catch(e){}a=_a(e,l,r),0!=(4194240&l)&&null!==a&&(e=n.lanes,l|=e&=a.pendingLanes,n.lanes=l,Cn(a,l))}}var Br={readContext:Rt,useCallback:vr,useContext:vr,useEffect:vr,useImperativeHandle:vr,useLayoutEffect:vr,useMemo:vr,useReducer:vr,useRef:vr,useState:vr,useDebugValue:vr,useDeferredValue:vr,useTransition:vr,useMutableSource:vr,useOpaqueIdentifier:vr,unstable_isNewReconciler:!1},Vr={readContext:Rt,useCallback:function(e,n){return Sr().memoizedState=[e,void 0===n?null:n],e},useContext:Rt,useEffect:Lr,useImperativeHandle:function(e,n,t){return t=null!==t&&void 0!==t?t.concat([e]):null,zr(4,2,Fr.bind(null,n,e),t)},useLayoutEffect:function(e,n){return zr(4,2,e,n)},useMemo:function(e,n){var t=Sr();return n=void 0===n?null:n,e=e(),t.memoizedState=[e,n],e},useReducer:function(e,n,t){var r=Sr();return n=void 0!==t?t(n):n,r.memoizedState=r.baseState=n,e=(e=r.queue={pending:null,interleaved:null,lanes:0,dispatch:null,lastRenderedReducer:e,lastRenderedState:n}).dispatch=jr.bind(null,fr,e),[r.memoizedState,e]},useRef:function(e){return e={current:e},Sr().memoizedState=e},useState:Er,useDebugValue:Ar,useDeferredValue:function(e){var n=Er(e),t=n[0],r=n[1];return Lr(function(){var n=cr.transition;cr.transition=1;try{r(e)}finally{cr.transition=n}},[e]),t},useTransition:function(){var e=Er(!1),n=e[0];return e=Or.bind(null,e[1]),Sr().memoizedState=e,[n,e]},useMutableSource:function(e,n,t){var r=Sr();return r.memoizedState={refs:{getSnapshot:n,setSnapshot:null},source:e,subscribe:t},Pr(r,e,n,t)},useOpaqueIdentifier:function(){throw Error("Not yet implemented")},unstable_isNewReconciler:!1},Wr={readContext:Rt,useCallback:Qr,useContext:Rt,useEffect:Ur,useImperativeHandle:Dr,useLayoutEffect:Mr,useMemo:Hr,useReducer:_r,useRef:Nr,useState:function(){return _r(wr)},useDebugValue:Ar,useDeferredValue:function(e){var n=_r(wr),t=n[0],r=n[1];return Ur(function(){var n=cr.transition;cr.transition=1;try{r(e)}finally{cr.transition=n}},[e]),t},useTransition:function(){return[_r(wr)[0],kr().memoizedState]},useMutableSource:Rr,useOpaqueIdentifier:function(){return _r(wr)[0]},unstable_isNewReconciler:!1},Yr={readContext:Rt,useCallback:Qr,useContext:Rt,useEffect:Ur,useImperativeHandle:Dr,useLayoutEffect:Mr,useMemo:Hr,useReducer:Tr,useRef:Nr,useState:function(){return Tr(wr)},useDebugValue:Ar,useDeferredValue:function(e){var n=Tr(wr),t=n[0],r=n[1];return Ur(function(){var n=cr.transition;cr.transition=1;try{r(e)}finally{cr.transition=n}},[e]),t},useTransition:function(){return[Tr(wr)[0],kr().memoizedState]},useMutableSource:Rr,useOpaqueIdentifier:function(){return Tr(wr)[0]},unstable_isNewReconciler:!1};function qr(e,n){return{value:e,source:n,stack:vt(n)}}if("function"!=typeof n(i[3]).ReactFiberErrorDialog.showErrorDialog)throw Error("Expected ReactFiberErrorDialog.showErrorDialog to be a function.");function Xr(e,t){try{!1!==n(i[3]).ReactFiberErrorDialog.showErrorDialog({componentStack:null!==t.stack?t.stack:"",error:t.value,errorBoundary:null!==e&&1===e.tag?e.stateNode:null})&&console.error(t.value)}catch(e){setTimeout(function(){throw e})}}var $r="function"==typeof WeakMap?WeakMap:Map;function Gr(e,n,t){(t=It(-1,t)).tag=3,t.payload={element:null};var r=n.value;return t.callback=function(){da||(da=!0,fa=r),Xr(e,n)},t}function Kr(e,n,t){(t=It(-1,t)).tag=3;var r=e.type.getDerivedStateFromError;if("function"==typeof r){var l=n.value;t.payload=function(){return Xr(e,n),r(l)}}var a=e.stateNode;return null!==a&&"function"==typeof a.componentDidCatch&&(t.callback=function(){"function"!=typeof r&&(null===pa?pa=new Set([this]):pa.add(this),Xr(e,n));var t=n.stack;this.componentDidCatch(n.value,{componentStack:null!==t?t:""})}),t}var Jr=ze.ReactCurrentOwner,Zr=!1;function el(e,n,t,r){n.child=null===e?$t(n,null,t,r):Xt(n,e.child,t,r)}function nl(e,n,t,r,l){t=t.render;var a=n.ref;return Pt(n,l),r=yr(e,n,t,r,a,l),null===e||Zr?(n.flags|=1,el(e,n,r,l),n.child):(n.updateQueue=e.updateQueue,n.flags&=-1029,e.lanes&=~l,_l(e,n,l))}function tl(e,n,t,r,l,a){if(null===e){var i=t.type;return"function"!=typeof i||qa(i)||void 0!==i.defaultProps||null!==t.compare||void 0!==t.defaultProps?((e=Ga(t.type,null,r,n,n.mode,a)).ref=n.ref,e.return=n,n.child=e):(n.tag=15,n.type=i,rl(e,n,i,r,l,a))}return i=e.child,0==(l&a)&&(l=i.memoizedProps,(t=null!==(t=t.compare)?t:gt)(l,r)&&e.ref===n.ref)?_l(e,n,a):(n.flags|=1,(e=$a(i,r)).ref=n.ref,e.return=n,n.child=e)}function rl(e,n,t,r,l,a){if(null!==e&>(e.memoizedProps,r)&&e.ref===n.ref){if(Zr=!1,0==(a&l))return n.lanes=e.lanes,_l(e,n,a);0!=(32768&e.flags)&&(Zr=!0)}return il(e,n,t,r,a)}function ll(e,n,t){var r=n.pendingProps,l=r.children,a=null!==e?e.memoizedState:null;if("hidden"===r.mode||"unstable-defer-without-hiding"===r.mode)if(0==(1&n.mode))n.memoizedState={baseLanes:0,cachePool:null},Kn(ra,ta),ta|=t;else{if(0==(1073741824&t))return e=null!==a?a.baseLanes|t:t,n.lanes=n.childLanes=1073741824,n.memoizedState={baseLanes:e,cachePool:null},n.updateQueue=null,Kn(ra,ta),ta|=e,null;n.memoizedState={baseLanes:0,cachePool:null},r=null!==a?a.baseLanes:t,Kn(ra,ta),ta|=r}else null!==a?(r=a.baseLanes|t,n.memoizedState=null):r=t,Kn(ra,ta),ta|=r;return el(e,n,l,t),n.child}function al(e,n){var t=n.ref;(null===e&&null!==t||null!==e&&e.ref!==t)&&(n.flags|=256)}function il(e,n,t,r,l){var a=rt(t)?nt:Zn.current;return a=tt(n,a),Pt(n,l),t=yr(e,n,t,r,a,l),null===e||Zr?(n.flags|=1,el(e,n,t,l),n.child):(n.updateQueue=e.updateQueue,n.flags&=-1029,e.lanes&=~l,_l(e,n,l))}function ul(e,n,t,r,l){if(rt(t)){var a=!0;ut(n)}else a=!1;if(Pt(n,l),null===n.stateNode)null!==e&&(e.alternate=null,n.alternate=null,n.flags|=2),jt(n,t,r),Vt(n,t,r,l),r=!0;else if(null===e){var i=n.stateNode,u=n.memoizedProps;i.props=u;var o=i.context,s=t.contextType;"object"==typeof s&&null!==s?s=Rt(s):s=tt(n,s=rt(t)?nt:Zn.current);var c=t.getDerivedStateFromProps,d="function"==typeof c||"function"==typeof i.getSnapshotBeforeUpdate;d||"function"!=typeof i.UNSAFE_componentWillReceiveProps&&"function"!=typeof i.componentWillReceiveProps||(u!==r||o!==s)&&Bt(n,i,r,s),Ct=!1;var f=n.memoizedState;i.state=f,Ft(n,r,i,l),o=n.memoizedState,u!==r||f!==o||et.current||Ct?("function"==typeof c&&(Qt(n,t,c,r),o=n.memoizedState),(u=Ct||Ot(n,t,u,r,f,o,s))?(d||"function"!=typeof i.UNSAFE_componentWillMount&&"function"!=typeof i.componentWillMount||("function"==typeof i.componentWillMount&&i.componentWillMount(),"function"==typeof i.UNSAFE_componentWillMount&&i.UNSAFE_componentWillMount()),"function"==typeof i.componentDidMount&&(n.flags|=4)):("function"==typeof i.componentDidMount&&(n.flags|=4),n.memoizedProps=r,n.memoizedState=o),i.props=r,i.state=o,i.context=s,r=u):("function"==typeof i.componentDidMount&&(n.flags|=4),r=!1)}else{i=n.stateNode,zt(e,n),u=n.memoizedProps,s=n.type===n.elementType?u:bt(n.type,u),i.props=s,d=n.pendingProps,f=i.context,"object"==typeof(o=t.contextType)&&null!==o?o=Rt(o):o=tt(n,o=rt(t)?nt:Zn.current);var p=t.getDerivedStateFromProps;(c="function"==typeof p||"function"==typeof i.getSnapshotBeforeUpdate)||"function"!=typeof i.UNSAFE_componentWillReceiveProps&&"function"!=typeof i.componentWillReceiveProps||(u!==d||f!==o)&&Bt(n,i,r,o),Ct=!1,f=n.memoizedState,i.state=f,Ft(n,r,i,l);var h=n.memoizedState;u!==d||f!==h||et.current||Ct?("function"==typeof p&&(Qt(n,t,p,r),h=n.memoizedState),(s=Ct||Ot(n,t,s,r,f,h,o)||!1)?(c||"function"!=typeof i.UNSAFE_componentWillUpdate&&"function"!=typeof i.componentWillUpdate||("function"==typeof i.componentWillUpdate&&i.componentWillUpdate(r,h,o),"function"==typeof i.UNSAFE_componentWillUpdate&&i.UNSAFE_componentWillUpdate(r,h,o)),"function"==typeof i.componentDidUpdate&&(n.flags|=4),"function"==typeof i.getSnapshotBeforeUpdate&&(n.flags|=512)):("function"!=typeof i.componentDidUpdate||u===e.memoizedProps&&f===e.memoizedState||(n.flags|=4),"function"!=typeof i.getSnapshotBeforeUpdate||u===e.memoizedProps&&f===e.memoizedState||(n.flags|=512),n.memoizedProps=r,n.memoizedState=h),i.props=r,i.state=h,i.context=o,r=s):("function"!=typeof i.componentDidUpdate||u===e.memoizedProps&&f===e.memoizedState||(n.flags|=4),"function"!=typeof i.getSnapshotBeforeUpdate||u===e.memoizedProps&&f===e.memoizedState||(n.flags|=512),r=!1)}return ol(e,n,t,r,a,l)}function ol(e,n,t,r,l,a){al(e,n);var i=0!=(128&n.flags);if(!r&&!i)return l&&ot(n,t,!1),_l(e,n,a);r=n.stateNode,Jr.current=n;var u=i&&"function"!=typeof t.getDerivedStateFromError?null:r.render();return n.flags|=1,null!==e&&i?(n.child=Xt(n,e.child,null,a),n.child=Xt(n,null,u,a)):el(e,n,u,a),n.memoizedState=r.state,l&&ot(n,t,!0),n.child}function sl(e){var n=e.stateNode;n.pendingContext?at(0,n.pendingContext,n.pendingContext!==n.context):n.context&&at(0,n.context,!1),nr(e,n.containerInfo)}var cl,dl,fl,pl,hl={dehydrated:null,retryLane:0};function gl(e){return{baseLanes:e,cachePool:null}}function ml(e,n,t){var r,l=n.pendingProps,a=ar.current,i=!1;return(r=0!=(128&n.flags))||(r=(null===e||null!==e.memoizedState)&&0!=(2&a)),r?(i=!0,n.flags&=-129):null!==e&&null===e.memoizedState||void 0===l.fallback||!0===l.unstable_avoidThisFallback||(a|=1),Kn(ar,1&a),null===e?(e=l.children,a=l.fallback,i?(e=vl(n,e,a,t),n.child.memoizedState=gl(t),n.memoizedState=hl,e):"number"==typeof l.unstable_expectedLoadTime?(e=vl(n,e,a,t),n.child.memoizedState=gl(t),n.memoizedState=hl,n.lanes=4194304,e):((t=Ja({mode:"visible",children:e},n.mode,t,null)).return=n,n.child=t)):(e.memoizedState,i?(l=yl(e,n,l.children,l.fallback,t),i=n.child,a=e.child.memoizedState,i.memoizedState=null===a?gl(t):{baseLanes:a.baseLanes|t,cachePool:null},i.childLanes=e.childLanes&~t,n.memoizedState=hl,l):(t=bl(e,n,l.children,t),n.memoizedState=null,t))}function vl(e,n,t,r){var l=e.mode,a=e.child;return n={mode:"hidden",children:n},0==(1&l)&&null!==a?(a.childLanes=0,a.pendingProps=n):a=Ja(n,l,0,null),t=Ka(t,l,r,null),a.return=e,t.return=e,a.sibling=t,e.child=a,t}function bl(e,n,t,r){var l=e.child;return e=l.sibling,t=$a(l,{mode:"visible",children:t}),0==(1&n.mode)&&(t.lanes=r),t.return=n,t.sibling=null,null!==e&&(null===(r=n.deletions)?(n.deletions=[e],n.flags|=16):r.push(e)),n.child=t}function yl(e,n,t,r,l){var a=n.mode,i=(e=e.child).sibling,u={mode:"hidden",children:t};return 0==(1&a)&&n.child!==e?((t=n.child).childLanes=0,t.pendingProps=u,n.deletions=null):(t=$a(e,u)).subtreeFlags=1835008&e.subtreeFlags,null!==i?r=$a(i,r):(r=Ka(r,a,l,null)).flags|=2,r.return=n,t.return=n,t.sibling=r,n.child=t,r}function Sl(e,n){e.lanes|=n;var t=e.alternate;null!==t&&(t.lanes|=n),xt(e.return,n)}function kl(e,n,t,r,l){var a=e.memoizedState;null===a?e.memoizedState={isBackwards:n,rendering:null,renderingStartTime:0,last:r,tail:t,tailMode:l}:(a.isBackwards=n,a.rendering=null,a.renderingStartTime=0,a.last=r,a.tail=t,a.tailMode=l)}function wl(e,n,t){var r=n.pendingProps,l=r.revealOrder,a=r.tail;if(el(e,n,r.children,t),0!=(2&(r=ar.current)))r=1&r|2,n.flags|=128;else{if(null!==e&&0!=(128&e.flags))e:for(e=n.child;null!==e;){if(13===e.tag)null!==e.memoizedState&&Sl(e,t);else if(19===e.tag)Sl(e,t);else if(null!==e.child){e.child.return=e,e=e.child;continue}if(e===n)break e;for(;null===e.sibling;){if(null===e.return||e.return===n)break e;e=e.return}e.sibling.return=e.return,e=e.sibling}r&=1}if(Kn(ar,r),0==(1&n.mode))n.memoizedState=null;else switch(l){case"forwards":for(t=n.child,l=null;null!==t;)null!==(e=t.alternate)&&null===ir(e)&&(l=t),t=t.sibling;null===(t=l)?(l=n.child,n.child=null):(l=t.sibling,t.sibling=null),kl(n,!1,l,t,a);break;case"backwards":for(t=null,l=n.child,n.child=null;null!==l;){if(null!==(e=l.alternate)&&null===ir(e)){n.child=l;break}e=l.sibling,l.sibling=t,t=l,l=e}kl(n,!0,t,null,a);break;case"together":kl(n,!1,null,null,void 0);break;default:n.memoizedState=null}return n.child}function _l(e,n,t){if(null!==e&&(n.dependencies=e.dependencies),ia|=n.lanes,0==(t&n.childLanes))return null;if(null!==e&&n.child!==e.child)throw Error("Resuming work not yet implemented.");if(null!==n.child){for(t=$a(e=n.child,e.pendingProps),n.child=t,t.return=n;null!==e.sibling;)e=e.sibling,(t=t.sibling=$a(e,e.pendingProps)).return=n;t.sibling=null}return n.child}function Tl(e,n){switch(e.tailMode){case"hidden":n=e.tail;for(var t=null;null!==n;)null!==n.alternate&&(t=n),n=n.sibling;null===t?e.tail=null:t.sibling=null;break;case"collapsed":t=e.tail;for(var r=null;null!==t;)null!==t.alternate&&(r=t),t=t.sibling;null===r?n||null===e.tail?e.tail=null:e.tail.sibling=null:r.sibling=null}}function xl(e){var n=null!==e.alternate&&e.alternate.child===e.child,t=0,r=0;if(n)for(var l=e.child;null!==l;)t|=l.lanes|l.childLanes,r|=1835008&l.subtreeFlags,r|=1835008&l.flags,l.return=e,l=l.sibling;else for(l=e.child;null!==l;)t|=l.lanes|l.childLanes,r|=l.subtreeFlags,r|=l.flags,l.return=e,l=l.sibling;return e.subtreeFlags|=r,e.childLanes=t,n}function Pl(e,t,r){var l=t.pendingProps;switch(t.tag){case 2:case 16:case 15:case 0:case 11:case 7:case 8:case 12:case 9:case 14:return xl(t),null;case 1:return rt(t.type)&<(),xl(t),null;case 3:return l=t.stateNode,tr(),Gn(et),Gn(Zn),or(),l.pendingContext&&(l.context=l.pendingContext,l.pendingContext=null),null!==e&&null!==e.child||l.hydrate||(t.flags|=512),dl(e,t),xl(t),null;case 5:lr(t),r=er(Zt.current);var a=t.type;if(null!==e&&null!=t.stateNode)fl(e,t,a,l,r),e.ref!==t.ref&&(t.flags|=256);else{if(!l){if(null===t.stateNode)throw Error("We must have new props for new mounts. This error is likely caused by a bug in React. Please file an issue.");return xl(t),null}er(Kt.current),e=Qn(),a=Fn(a);var u=hn(null,ln,l,a.validAttributes);n(i[3]).UIManager.createView(e,a.uiViewClassName,r,u),r=new mn(e,a,t),ke.set(e,t),we.set(e,l),cl(r,t,!1,!1),t.stateNode=r,On(r)&&(t.flags|=4),null!==t.ref&&(t.flags|=256)}return xl(t),null;case 6:if(e&&null!=t.stateNode)pl(e,t,e.memoizedProps,l);else{if("string"!=typeof l&&null===t.stateNode)throw Error("We must have new props for new mounts. This error is likely caused by a bug in React. Please file an issue.");if(e=er(Zt.current),!er(Kt.current).isInAParentText)throw Error("Text strings must be rendered within a component.");r=Qn(),n(i[3]).UIManager.createView(r,"RCTRawText",e,{text:l}),ke.set(r,t),t.stateNode=r}return xl(t),null;case 13:return Gn(ar),l=t.memoizedState,0!=(128&t.flags)?(t.lanes=r,t):(l=null!==l,r=!1,null!==e&&(r=null!==e.memoizedState),l&&!r&&0!=(1&t.mode)&&(null===e&&!0!==t.memoizedProps.unstable_avoidThisFallback||0!=(1&ar.current)?0===la&&(la=3):(0!==la&&3!==la||(la=4),null===Zl||0==(268435455&ia)&&0==(268435455&ua)||Ra(Zl,na))),(l||r)&&(t.flags|=4),xl(t),null);case 4:return tr(),dl(e,t),xl(t),null;case 10:return Tt(t.type._context),xl(t),null;case 17:return rt(t.type)&<(),xl(t),null;case 19:if(Gn(ar),null===(a=t.memoizedState))return xl(t),null;if(l=0!=(128&t.flags),null===(u=a.rendering))if(l)Tl(a,!1);else{if(0!==la||null!==e&&0!=(128&e.flags))for(e=t.child;null!==e;){if(null!==(u=ir(e))){for(t.flags|=128,Tl(a,!1),null!==(e=u.updateQueue)&&(t.updateQueue=e,t.flags|=4),t.subtreeFlags=0,e=r,l=t.child;null!==l;)a=e,(r=l).flags&=1835010,null===(u=r.alternate)?(r.childLanes=0,r.lanes=a,r.child=null,r.subtreeFlags=0,r.memoizedProps=null,r.memoizedState=null,r.updateQueue=null,r.dependencies=null,r.stateNode=null):(r.childLanes=u.childLanes,r.lanes=u.lanes,r.child=u.child,r.subtreeFlags=0,r.deletions=null,r.memoizedProps=u.memoizedProps,r.memoizedState=u.memoizedState,r.updateQueue=u.updateQueue,r.type=u.type,a=u.dependencies,r.dependencies=null===a?null:{lanes:a.lanes,firstContext:a.firstContext}),l=l.sibling;return Kn(ar,1&ar.current|2),t.child}e=e.sibling}null!==a.tail&&n(i[4]).unstable_now()>ca&&(t.flags|=128,l=!0,Tl(a,!1),t.lanes=4194304)}else{if(!l)if(null!==(e=ir(u))){if(t.flags|=128,l=!0,null!==(e=e.updateQueue)&&(t.updateQueue=e,t.flags|=4),Tl(a,!0),null===a.tail&&"hidden"===a.tailMode&&!u.alternate)return xl(t),null}else 2*n(i[4]).unstable_now()-a.renderingStartTime>ca&&1073741824!==r&&(t.flags|=128,l=!0,Tl(a,!1),t.lanes=4194304);a.isBackwards?(u.sibling=t.child,t.child=u):(null!==(e=a.last)?e.sibling=u:t.child=u,a.last=u)}return null!==a.tail?(t=a.tail,a.rendering=t,a.tail=t.sibling,a.renderingStartTime=n(i[4]).unstable_now(),t.sibling=null,e=ar.current,Kn(ar,l?1&e|2:1&e),t):(xl(t),null);case 22:case 23:return Ca(),r=null!==t.memoizedState,null!==e&&null!==e.memoizedState!==r&&"unstable-defer-without-hiding"!==l.mode&&(t.flags|=4),r&&0==(1073741824&ta)&&0!=(1&t.mode)||xl(t),null}throw Error("Unknown unit of work tag ("+t.tag+"). This error is likely caused by a bug in React. Please file an issue.")}function Rl(e){switch(e.tag){case 1:rt(e.type)&<();var n=e.flags;return 16384&n?(e.flags=-16385&n|128,e):null;case 3:if(tr(),Gn(et),Gn(Zn),or(),0!=(128&(n=e.flags)))throw Error("The root failed to unmount after an error. This is likely a bug in React. Please file an issue.");return e.flags=-16385&n|128,e;case 5:return lr(e),null;case 13:return Gn(ar),16384&(n=e.flags)?(e.flags=-16385&n|128,e):null;case 19:return Gn(ar),null;case 4:return tr(),null;case 10:return Tt(e.type._context),null;case 22:case 23:return Ca(),null;case 24:default:return null}}cl=function(e,n){for(var t=n.child;null!==t;){if(5===t.tag||6===t.tag)e._children.push(t.stateNode);else if(4!==t.tag&&null!==t.child){t.child.return=t,t=t.child;continue}if(t===n)break;for(;null===t.sibling;){if(null===t.return||t.return===n)return;t=t.return}t.sibling.return=t.return,t=t.sibling}},dl=function(){},fl=function(e,n,t,r){e.memoizedProps!==r&&(er(Kt.current),n.updateQueue=Dn)&&(n.flags|=4)},pl=function(e,n,t,r){t!==r&&(n.flags|=4)};var El="function"==typeof WeakSet?WeakSet:Set,Cl=null;function Nl(e,n){var t=e.ref;if(null!==t)if("function"==typeof t)try{t(null)}catch(t){ja(e,n,t)}else t.current=null}var zl=!1;function Il(e,n){for(Cl=n;null!==Cl;)if(n=(e=Cl).child,0!=(516&e.subtreeFlags)&&null!==n)n.return=e,Cl=n;else for(;null!==Cl;){e=Cl;try{var t=e.alternate;if(0!=(512&e.flags))switch(e.tag){case 0:case 11:case 15:break;case 1:if(null!==t){var r=t.memoizedProps,l=t.memoizedState,a=e.stateNode,i=a.getSnapshotBeforeUpdate(e.elementType===e.type?r:bt(e.type,r),l);a.__reactInternalSnapshotBeforeUpdate=i}break;case 3:break;case 5:case 6:case 4:case 17:break;default:throw Error("This unit of work tag should not have side-effects. This error is likely caused by a bug in React. Please file an issue.")}}catch(n){ja(e,e.return,n)}if(null!==(n=e.sibling)){n.return=e.return,Cl=n;break}Cl=e.return}return t=zl,zl=!1,t}function Ll(e,n,t){var r=n.updateQueue;if(null!==(r=null!==r?r.lastEffect:null)){var l=r=r.next;do{if((l.tag&e)===e){var a=l.destroy;if(l.destroy=void 0,void 0!==a){var i=n,u=t;try{a()}catch(e){ja(i,u,e)}}}l=l.next}while(l!==r)}}function Ul(e,n){if(null!==(n=null!==(n=n.updateQueue)?n.lastEffect:null)){var t=n=n.next;do{if((t.tag&e)===e){var r=t.create;t.destroy=r()}t=t.next}while(t!==n)}}function Ml(e,t){for(var r=null,l=e;;){if(5===l.tag){if(null===r){r=l;var a=l.stateNode;if(t){var u=a.viewConfig,o=hn(null,ln,{style:{display:"none"}},u.validAttributes);n(i[3]).UIManager.updateView(a._nativeTag,u.uiViewClassName,o)}else{a=l.stateNode,o=l.memoizedProps,u=a.viewConfig,o=hn(null,n(i[2])({},o,{style:[o.style,{display:"none"}]}),o,u.validAttributes),n(i[3]).UIManager.updateView(a._nativeTag,u.uiViewClassName,o)}}}else if(6===l.tag){if(null===r)throw Error("Not yet implemented.")}else if((22!==l.tag&&23!==l.tag||null===l.memoizedState||l===e)&&null!==l.child){l.child.return=l,l=l.child;continue}if(l===e)break;for(;null===l.sibling;){if(null===l.return||l.return===e)return;r===l&&(r=null),l=l.return}r===l&&(r=null),l.sibling.return=l.return,l=l.sibling}}function Fl(e,n,t){if(bn&&"function"==typeof bn.onCommitFiberUnmount)try{bn.onCommitFiberUnmount(vn,n)}catch(e){}switch(n.tag){case 0:case 11:case 14:case 15:if(null!==(e=n.updateQueue)&&null!==(e=e.lastEffect)){var r=e=e.next;do{var l=r,a=l.destroy;if(l=l.tag,void 0!==a&&0!=(2&l)){l=n;var i=t;try{a()}catch(e){ja(l,i,e)}}r=r.next}while(r!==e)}break;case 1:if(Nl(n,t),"function"==typeof(e=n.stateNode).componentWillUnmount)try{e.props=n.memoizedProps,e.state=n.memoizedState,e.componentWillUnmount()}catch(e){ja(n,t,e)}break;case 5:Nl(n,t);break;case 4:jl(e,n,t)}}function Dl(e){var n=e.alternate;null!==n&&(e.alternate=null,Dl(n)),e.child=null,e.deletions=null,e.sibling=null,e.stateNode=null,e.return=null,e.dependencies=null,e.memoizedProps=null,e.memoizedState=null,e.pendingProps=null,e.stateNode=null,e.updateQueue=null}function Al(e){return 5===e.tag||3===e.tag||4===e.tag}function Ql(e){e:{for(var n=e.return;null!==n;){if(Al(n))break e;n=n.return}throw Error("Expected to find a host parent. This error is likely caused by a bug in React. Please file an issue.")}var t=n;switch(n=t.stateNode,t.tag){case 5:var r=!1;break;case 3:case 4:n=n.containerInfo,r=!0;break;default:throw Error("Invalid host parent fiber. This error is likely caused by a bug in React. Please file an issue.")}32&t.flags&&(t.flags&=-33);e:n:for(t=e;;){for(;null===t.sibling;){if(null===t.return||Al(t.return)){t=null;break e}t=t.return}for(t.sibling.return=t.return,t=t.sibling;5!==t.tag&&6!==t.tag&&18!==t.tag;){if(2&t.flags)continue n;if(null===t.child||4===t.tag)continue n;t.child.return=t,t=t.child}if(!(2&t.flags)){t=t.stateNode;break e}}r?Hl(e,t,n):Ol(e,t,n)}function Hl(e,t,r){var l=e.tag;if(5===l||6===l)if(e=e.stateNode,t){if("number"==typeof r)throw Error("Container does not support insertBefore operation")}else n(i[3]).UIManager.setChildren(r,["number"==typeof e?e:e._nativeTag]);else if(4!==l&&null!==(e=e.child))for(Hl(e,t,r),e=e.sibling;null!==e;)Hl(e,t,r),e=e.sibling}function Ol(e,t,r){var l=e.tag;if(5===l||6===l)if(e=e.stateNode,t){var a=(l=r._children).indexOf(e);0<=a?(l.splice(a,1),t=l.indexOf(t),l.splice(t,0,e),n(i[3]).UIManager.manageChildren(r._nativeTag,[a],[t],[],[],[])):(t=l.indexOf(t),l.splice(t,0,e),n(i[3]).UIManager.manageChildren(r._nativeTag,[],[],["number"==typeof e?e:e._nativeTag],[t],[]))}else t="number"==typeof e?e:e._nativeTag,0<=(a=(l=r._children).indexOf(e))?(l.splice(a,1),l.push(e),n(i[3]).UIManager.manageChildren(r._nativeTag,[a],[l.length-1],[],[],[])):(l.push(e),n(i[3]).UIManager.manageChildren(r._nativeTag,[],[],[t],[l.length-1],[]));else if(4!==l&&null!==(e=e.child))for(Ol(e,t,r),e=e.sibling;null!==e;)Ol(e,t,r),e=e.sibling}function jl(e,t,r){for(var l,a,u=t,o=!1;;){if(!o){o=u.return;e:for(;;){if(null===o)throw Error("Expected to find a host parent. This error is likely caused by a bug in React. Please file an issue.");switch(l=o.stateNode,o.tag){case 5:a=!1;break e;case 3:case 4:l=l.containerInfo,a=!0;break e}o=o.return}o=!0}if(5===u.tag||6===u.tag){e:for(var s=e,c=u,d=r,f=c;;)if(Fl(s,f,d),null!==f.child&&4!==f.tag)f.child.return=f,f=f.child;else{if(f===c)break e;for(;null===f.sibling;){if(null===f.return||f.return===c)break e;f=f.return}f.sibling.return=f.return,f=f.sibling}a?(s=l,Hn(u.stateNode),n(i[3]).UIManager.manageChildren(s,[],[],[],[],[0])):(s=l,Hn(d=u.stateNode),d=(c=s._children).indexOf(d),c.splice(d,1),n(i[3]).UIManager.manageChildren(s._nativeTag,[],[],[],[],[d]))}else if(4===u.tag){if(null!==u.child){l=u.stateNode.containerInfo,a=!0,u.child.return=u,u=u.child;continue}}else if(Fl(e,u,r),null!==u.child){u.child.return=u,u=u.child;continue}if(u===t)break;for(;null===u.sibling;){if(null===u.return||u.return===t)return;4===(u=u.return).tag&&(o=!1)}u.sibling.return=u.return,u=u.sibling}}function Bl(e,t){switch(t.tag){case 0:case 11:case 14:case 15:return void Ll(3,t,t.return);case 1:return;case 5:var r=t.stateNode;if(null!=r){var l=t.memoizedProps;e=null!==e?e.memoizedProps:l;var a=t.updateQueue;t.updateQueue=null,null!==a&&(t=r.viewConfig,we.set(r._nativeTag,l),null!=(l=hn(null,e,l,t.validAttributes))&&n(i[3]).UIManager.updateView(r._nativeTag,t.uiViewClassName,l))}return;case 6:if(null===t.stateNode)throw Error("This should have a text node initialized. This error is likely caused by a bug in React. Please file an issue.");return void n(i[3]).UIManager.updateView(t.stateNode,"RCTRawText",{text:t.memoizedProps});case 3:case 12:return;case 13:return null!==t.memoizedState&&(sa=n(i[4]).unstable_now(),Ml(t.child,!0)),void Vl(t);case 19:return void Vl(t);case 17:return;case 22:case 23:return void Ml(t,null!==t.memoizedState)}throw Error("This unit of work tag should not have side-effects. This error is likely caused by a bug in React. Please file an issue.")}function Vl(e){var n=e.updateQueue;if(null!==n){e.updateQueue=null;var t=e.stateNode;null===t&&(t=e.stateNode=new El),n.forEach(function(n){var r=Va.bind(null,e,n);t.has(n)||(t.add(n),n.then(r,r))})}}function Wl(e,n){for(Cl=n;null!==Cl;){var t=(n=Cl).deletions;if(null!==t)for(var r=0;ra&&(a=o),l&=~u}if(l=a,10<(l=(120>(l=n(i[4]).unstable_now()-l)?120:480>l?480:1080>l?1080:1920>l?1920:3e3>l?3e3:4320>l?4320:1960*Xl(l/1960))-l)){e.timeoutHandle=jn(Aa.bind(null,e),l);break}Aa(e);break;case 5:Aa(e);break;default:throw Error("Unknown root exit status.")}}return xa(e,n(i[4]).unstable_now()),e.callbackNode===r?Pa.bind(null,e):null}function Ra(e,n){for(n&=~oa,n&=~ua,e.suspendedLanes|=n,e.pingedLanes&=~n,e=e.expirationTimes;0 component higher in the tree to provide a loading indicator or placeholder to display.")}5!==la&&(la=2),o=qr(o,u),p=i;do{switch(p.tag){case 3:a=o,p.flags|=16384,n&=-n,p.lanes|=n,Mt(p,Gr(p,a,n));break e;case 1:a=o;var w=p.type,_=p.stateNode;if(0==(128&p.flags)&&("function"==typeof w.getDerivedStateFromError||null!==_&&"function"==typeof _.componentDidCatch&&(null===pa||!pa.has(_)))){p.flags|=16384,n&=-n,p.lanes|=n,Mt(p,Kr(p,a,n));break e}}p=p.return}while(null!==p)}Da(t)}catch(e){n=e,ea===t&&null!==t&&(ea=t=t.return);continue}break}}function Ia(){var e=$l.current;return $l.current=Br,null===e?Br:e}function La(e,n){var t=Jl;Jl|=8;var r=Ia();for(Zl===e&&na===n||Na(e,n);;)try{Ua();break}catch(n){za(e,n)}if(_t(),Jl=t,$l.current=r,null!==ea)throw Error("Cannot commit an incomplete root. This error is likely caused by a bug in React. Please file an issue.");return Zl=null,na=0,la}function Ua(){for(;null!==ea;)Fa(ea)}function Ma(){for(;null!==ea&&!n(i[4]).unstable_shouldYield();)Fa(ea)}function Fa(e){var n=ql(e.alternate,e,ta);e.memoizedProps=e.pendingProps,null===n?Da(e):ea=n,Gl.current=null}function Da(e){var n=e;do{var t=n.alternate;if(e=n.return,0==(8192&n.flags)){if(null!==(t=Pl(t,n,ta)))return void(ea=t)}else{if(null!==(t=Rl(n)))return t.flags&=8191,void(ea=t);null!==e&&(e.flags|=8192,e.subtreeFlags=0,e.deletions=null)}if(null!==(n=n.sibling))return void(ea=n);ea=n=e}while(null!==n);0===la&&(la=5)}function Aa(e){var n=Ln,t=Kl.transition;try{Kl.transition=0,Ln=1,Qa(e,n)}finally{Kl.transition=t,Ln=n}return null}function Qa(e,t){do{Ha()}while(null!==ga);if(0!=(24&Jl))throw Error("Should not already be working.");var r=e.finishedWork,l=e.finishedLanes;if(null===r)return null;if(e.finishedWork=null,e.finishedLanes=0,r===e.current)throw Error("Cannot commit the same tree as before. This error is likely caused by a bug in React. Please file an issue.");e.callbackNode=null,e.callbackPriority=0;var a=r.lanes|r.childLanes;if(En(e,a),e===Zl&&(ea=Zl=null,na=0),0==(1040&r.subtreeFlags)&&0==(1040&r.flags)||ha||(ha=!0,n(i[4]).unstable_scheduleCallback(n(i[4]).unstable_NormalPriority,function(){return Ha(),null})),a=0!=(8054&r.flags),0!=(8054&r.subtreeFlags)||a){a=Kl.transition,Kl.transition=0;var u=Ln;Ln=1;var o=Jl;Jl|=16,Gl.current=null,Il(e,r),Wl(e,r),e.current=r,Yl(r),n(i[4]).unstable_requestPaint(),Jl=o,Ln=u,Kl.transition=a}else e.current=r;if(ha&&(ha=!1,ga=e,ma=l),0===(a=e.pendingLanes)&&(pa=null),0!=(1&a)?e===ba?va++:(va=0,ba=e):va=0,yn(r.stateNode),xa(e,n(i[4]).unstable_now()),da)throw da=!1,e=fa,fa=null,e;return 0!=(4&Jl)?null:(0!=(1&ma)&&0!==e.tag&&Ha(),ft(),null)}function Ha(){if(null!==ga){var e=Un(ma),n=Kl.transition,t=Ln;try{if(Kl.transition=0,Ln=16>e?16:e,null===ga)var r=!1;else{if(e=ga,ga=null,ma=0,0!=(24&Jl))throw Error("Cannot flush passive effects while already rendering.");var l=Jl;for(Jl|=16,Cl=e.current;null!==Cl;){var a=Cl,i=a.child;if(0!=(16&Cl.flags)){var u=a.deletions;if(null!==u){for(var o=0;on(i[4]).unstable_now()-sa?Na(e,0):oa|=r),xa(e,t)}function Va(e,n){var t=e.stateNode;null!==t&&t.delete(n),0===(n=0)&&(0==(1&e.mode)?n=1:(n=kn,0==(130023424&(kn<<=1))&&(kn=4194304))),t=ka(),null!==(e=Ta(e,n))&&(Rn(e,n,t),xa(e,t))}function Wa(e,n,t,r){this.tag=e,this.key=t,this.sibling=this.child=this.return=this.stateNode=this.type=this.elementType=null,this.index=0,this.ref=null,this.pendingProps=n,this.dependencies=this.memoizedState=this.updateQueue=this.memoizedProps=null,this.mode=r,this.subtreeFlags=this.flags=0,this.deletions=null,this.childLanes=this.lanes=0,this.alternate=null}function Ya(e,n,t,r){return new Wa(e,n,t,r)}function qa(e){return!(!(e=e.prototype)||!e.isReactComponent)}function Xa(e){if("function"==typeof e)return qa(e)?1:0;if(void 0!==e&&null!==e){if((e=e.$$typeof)===Qe)return 11;if(e===je)return 14}return 2}function $a(e,n){var t=e.alternate;return null===t?((t=Ya(e.tag,n,e.key,e.mode)).elementType=e.elementType,t.type=e.type,t.stateNode=e.stateNode,t.alternate=e,e.alternate=t):(t.pendingProps=n,t.type=e.type,t.flags=0,t.subtreeFlags=0,t.deletions=null),t.flags=1835008&e.flags,t.childLanes=e.childLanes,t.lanes=e.lanes,t.child=e.child,t.memoizedProps=e.memoizedProps,t.memoizedState=e.memoizedState,t.updateQueue=e.updateQueue,n=e.dependencies,t.dependencies=null===n?null:{lanes:n.lanes,firstContext:n.firstContext},t.sibling=e.sibling,t.index=e.index,t.ref=e.ref,t}function Ga(e,n,t,r,l,a){var i=2;if(r=e,"function"==typeof e)qa(e)&&(i=1);else if("string"==typeof e)i=5;else e:switch(e){case Ue:return Ka(t.children,l,a,n);case Ve:i=8,l|=4;break;case Me:i=8,l|=8;break;case Fe:return(e=Ya(12,t,n,2|l)).elementType=Fe,e.lanes=a,e;case He:return(e=Ya(13,t,n,l)).elementType=He,e.lanes=a,e;case Oe:return(e=Ya(19,t,n,l)).elementType=Oe,e.lanes=a,e;case We:return Ja(t,l,a,n);case Ye:return(e=Ya(23,t,n,l)).elementType=Ye,e.lanes=a,e;default:if("object"==typeof e&&null!==e)switch(e.$$typeof){case De:i=10;break e;case Ae:i=9;break e;case Qe:i=11;break e;case je:i=14;break e;case Be:i=16,r=null;break e}throw Error("Element type is invalid: expected a string (for built-in components) or a class/function (for composite components) but got: "+(null==e?e:typeof e)+".")}return(n=Ya(i,t,n,l)).elementType=e,n.type=r,n.lanes=a,n}function Ka(e,n,t,r){return(e=Ya(7,e,r,n)).lanes=t,e}function Ja(e,n,t,r){return(e=Ya(22,e,r,n)).elementType=We,e.lanes=t,e}function Za(e,n,t){return(e=Ya(6,e,null,n)).lanes=t,e}function ei(e,n,t){return(n=Ya(4,null!==e.children?e.children:[],e.key,n)).lanes=t,n.stateNode={containerInfo:e.containerInfo,pendingChildren:null,implementation:e.implementation},n}function ni(e,n,t){this.tag=n,this.containerInfo=e,this.finishedWork=this.pingCache=this.current=this.pendingChildren=null,this.timeoutHandle=-1,this.pendingContext=this.context=null,this.hydrate=t,this.callbackNode=null,this.callbackPriority=0,this.eventTimes=Pn(0),this.expirationTimes=Pn(-1),this.entangledLanes=this.finishedLanes=this.mutableReadLanes=this.expiredLanes=this.pingedLanes=this.suspendedLanes=this.pendingLanes=0,this.entanglements=Pn(0)}function ti(e,n,t){var r=3=t.length?{done:!0}:{done:!1,value:t[i++]}}}throw new TypeError("Invalid attempt to iterate non-iterable instance.\nIn order to be iterable, non-array objects must have a [Symbol.iterator]() method.")}function n(t,n){if(t){if("string"==typeof t)return o(t,n);var u=Object.prototype.toString.call(t).slice(8,-1);return"Object"===u&&t.constructor&&(u=t.constructor.name),"Map"===u||"Set"===u?Array.from(t):"Arguments"===u||/^(?:Ui|I)nt(?:8|16|32)(?:Clamped)?Array$/.test(u)?o(t,n):void 0}}function o(t,n){(null==n||n>t.length)&&(n=t.length);for(var o=0,u=new Array(n);o|\/|[a-z]:\\|\\\\).*?)(?::(\d+))?(?::(\d+))?\)?\s*$/i,u=/\((\S*)(?::(\d+))(?::(\d+))\)/;function t(t){var o=l.exec(t);if(!o)return null;var c=o[2]&&0===o[2].indexOf('native'),s=o[2]&&0===o[2].indexOf('eval'),v=u.exec(o[2]);return s&&null!=v&&(o[2]=v[1],o[3]=v[2],o[4]=v[3]),{file:c?null:o[2],methodName:o[1]||n,arguments:c?[o[2]]:[],lineNumber:o[3]?+o[3]:null,column:o[4]?+o[4]:null}}var o=/^\s*at (?:((?:\[object object\])?.+) )?\(?((?:file|ms-appx|https?|webpack|blob):.*?):(\d+)(?::(\d+))?\)?\s*$/i;function c(l){var u=o.exec(l);return u?{file:u[2],methodName:u[1]||n,arguments:[],lineNumber:+u[3],column:u[4]?+u[4]:null}:null}var s=/^\s*(.*?)(?:\((.*?)\))?(?:^|@)((?:file|https?|blob|chrome|webpack|resource|\[native).*?|[^@]*bundle)(?::(\d+))?(?::(\d+))?\s*$/i,v=/(\S+) line (\d+)(?: > eval line \d+)* > eval/i;function f(l){var u=s.exec(l);if(!u)return null;var t=u[3]&&u[3].indexOf(' > eval')>-1,o=v.exec(u[3]);return t&&null!=o&&(u[3]=o[1],u[4]=o[2],u[5]=null),{file:u[3],methodName:u[1]||n,arguments:u[2]?u[2].split(','):[],lineNumber:u[4]?+u[4]:null,column:u[5]?+u[5]:null}}var b=/^\s*(?:([^@]*)(?:\((.*?)\))?@)?(\S.*?):(\d+)(?::(\d+))?\s*$/i;function p(l){var u=b.exec(l);return u?{file:u[3],methodName:u[1]||n,arguments:[],lineNumber:+u[4],column:u[5]?+u[5]:null}:null}var x=/^\s*at (?:((?:\[object object\])?[^\\/]+(?: \[as \S+\])?) )?\(?(.*?):(\d+)(?::(\d+))?\)?\s*$/i;function h(l){var u=x.exec(l);return u?{file:u[2],methodName:u[1]||n,arguments:[],lineNumber:+u[3],column:u[4]?+u[4]:null}:null}e.parse=function(n){return n.split('\n').reduce(function(n,l){var u=t(l)||c(l)||f(l)||h(l)||p(l);return u&&n.push(u),n},[])}},471,[]); +__d(function(g,r,_i,a,m,e,d){'use strict';var t=/^ {4}at (.+?)(?: \((native)\)?| \((address at )?(.*?):(\d+):(\d+)\))$/,n=/^ {4}... skipping (\d+) frames$/;function s(s){var i=s.match(t);if(i)return{type:'FRAME',functionName:i[1],location:'native'===i[2]?{type:'NATIVE'}:'address at '===i[3]?{type:'BYTECODE',sourceUrl:i[4],line1Based:Number.parseInt(i[5],10),virtualOffset0Based:Number.parseInt(i[6],10)}:{type:'SOURCE',sourceUrl:i[4],line1Based:Number.parseInt(i[5],10),column1Based:Number.parseInt(i[6],10)}};var u=s.match(n);return u?{type:'SKIPPED',count:Number.parseInt(u[1],10)}:void 0}m.exports=function(t){for(var n=t.split(/\n/),i=[],u=-1,p=0;p-1}m.exports={isNativeFunction:t,hasNativeConstructor:function(n,o){var c=Object.getPrototypeOf(n).constructor;return c.name===o&&t(c)}}},481,[]); +__d(function(g,r,i,a,m,e,d){'use strict';var l,n,t=!0===(null==(l=g.HermesInternal)?void 0:null==l.hasPromise?void 0:l.hasPromise())&&!0===(null==(n=g.HermesInternal)?void 0:null==n.useEngineQueue?void 0:n.useEngineQueue()),u=r(d[0]).isNativeFunction(Promise)||t;if(!g.RN$Bridgeless){var o=function(l){r(d[1]).polyfillGlobal(l,function(){return r(d[2])[l]})};o('setTimeout'),o('clearTimeout'),o('setInterval'),o('clearInterval'),o('requestAnimationFrame'),o('cancelAnimationFrame'),o('requestIdleCallback'),o('cancelIdleCallback')}u?(r(d[1]).polyfillGlobal('setImmediate',function(){return r(d[3]).setImmediate}),r(d[1]).polyfillGlobal('clearImmediate',function(){return r(d[3]).clearImmediate})):g.RN$Bridgeless||(r(d[1]).polyfillGlobal('setImmediate',function(){return r(d[2]).queueReactNativeMicrotask}),r(d[1]).polyfillGlobal('clearImmediate',function(){return r(d[2]).clearReactNativeMicrotask})),t?r(d[1]).polyfillGlobal('queueMicrotask',function(){var l;return null==(l=g.HermesInternal)?void 0:l.enqueueJob}):r(d[1]).polyfillGlobal('queueMicrotask',function(){return r(d[4]).default})},482,[481,474,483,485,486]); +__d(function(g,r,_i,a,m,_e,d){var e=r(d[0])(r(d[1])),t=16.666666666666668,n=[],i=[],l=[],o=[],c=[],u={},f=1,s=null,v=!1;function h(){var e=l.indexOf(null);return-1===e&&(e=l.length),e}function T(e,t){var o=f++,c=h();return l[c]=o,n[c]=e,i[c]=t,o}function k(e,o,c){e>f&&console.warn('Tried to call timer with ID %s but no such timer exists.',e);var u=l.indexOf(e);if(-1!==u){var v=i[u],h=n[u];if(h&&v){'setInterval'!==v&&p(u);try{'setTimeout'===v||'setInterval'===v||'queueReactNativeMicrotask'===v?h():'requestAnimationFrame'===v?h(g.performance.now()):'requestIdleCallback'===v?h({timeRemaining:function(){return Math.max(0,t-(g.performance.now()-o))},didTimeout:!!c}):console.error('Tried to call a callback with invalid type: '+v)}catch(e){s?s.push(e):s=[e]}}else console.error('No callback found for timerID '+e)}}function w(){if(0===o.length)return!1;var e=o;o=[];for(var t=0;t0}function p(e){l[e]=null,n[e]=null,i[e]=null}function N(e){if(null!=e){var t=l.indexOf(e);if(-1!==t){var n=i[t];p(t),'queueReactNativeMicrotask'!==n&&'requestIdleCallback'!==n&&M(e)}}}var b,I={setTimeout:function(e,t){for(var n=arguments.length,i=new Array(n>2?n-2:0),l=2;l2?n-2:0),l=2;l1?t-1:0),i=1;i-1&&(c.splice(e,1),k(i,g.performance.now(),!0)),delete u[i],0===c.length&&R(!1)},n);u[i]=l}return i},cancelIdleCallback:function(e){N(e);var t=c.indexOf(e);-1!==t&&c.splice(t,1);var n=u[e];n&&(I.clearTimeout(n),delete u[e]),0===c.length&&R(!1)},clearTimeout:function(e){N(e)},clearInterval:function(e){N(e)},clearReactNativeMicrotask:function(e){N(e);var t=o.indexOf(e);-1!==t&&o.splice(t,1)},cancelAnimationFrame:function(e){N(e)},callTimers:function(e){r(d[2])(0!==e.length,'Cannot call `callTimers` with an empty list of IDs.'),s=null;for(var t=0;t1)for(var i=1;i0){var n=c;c=[];for(var i=0;i1?u-1:0),c=1;c=0,loaded:t,total:s})}},{key:"__didCompleteResponse",value:function(e,t,s){e===this._requestId&&(t&&(''!==this._responseType&&'text'!==this._responseType||(this._response=t),this._hasError=!0,s&&(this._timedOut=!0)),this._clearSubscriptions(),this._requestId=null,this.setReadyState(this.DONE),t?E._interceptor&&E._interceptor.loadingFailed(e,t):E._interceptor&&E._interceptor.loadingFinished(e,this._response.length))}},{key:"_clearSubscriptions",value:function(){(this._subscriptions||[]).forEach(function(e){e&&e.remove()}),this._subscriptions=[]}},{key:"getAllResponseHeaders",value:function(){if(!this.responseHeaders)return null;var e=this.responseHeaders||{};return Object.keys(e).map(function(t){return t+': '+e[t]}).join('\r\n')}},{key:"getResponseHeader",value:function(e){var t=this._lowerCaseResponseHeaders[e.toLowerCase()];return void 0!==t?t:null}},{key:"setRequestHeader",value:function(e,t){if(this.readyState!==this.OPENED)throw new Error('Request has not been opened');this._headers[e.toLowerCase()]=String(t)}},{key:"setTrackingName",value:function(e){return this._trackingName=e,this}},{key:"setPerformanceLogger",value:function(e){return this._performanceLogger=e,this}},{key:"open",value:function(e,t,s){if(this.readyState!==this.UNSENT)throw new Error('Cannot open, already sending');if(void 0!==s&&!s)throw new Error('Synchronous http requests are not supported');if(!t)throw new Error('Cannot load an empty url');this._method=e.toUpperCase(),this._url=t,this._aborted=!1,this.setReadyState(this.OPENED)}},{key:"send",value:function(t){var s=this;if(this.readyState!==this.OPENED)throw new Error('Request has not been opened');if(this._sent)throw new Error('Request has already been sent');this._sent=!0;var n=this._incrementalEvents||!!this.onreadystatechange||!!this.onprogress;this._subscriptions.push(r(d[13]).addListener('didSendNetworkData',function(t){return s.__didUploadProgress.apply(s,(0,e.default)(t))})),this._subscriptions.push(r(d[13]).addListener('didReceiveNetworkResponse',function(t){return s.__didReceiveResponse.apply(s,(0,e.default)(t))})),this._subscriptions.push(r(d[13]).addListener('didReceiveNetworkData',function(t){return s.__didReceiveData.apply(s,(0,e.default)(t))})),this._subscriptions.push(r(d[13]).addListener('didReceiveNetworkIncrementalData',function(t){return s.__didReceiveIncrementalData.apply(s,(0,e.default)(t))})),this._subscriptions.push(r(d[13]).addListener('didReceiveNetworkDataProgress',function(t){return s.__didReceiveDataProgress.apply(s,(0,e.default)(t))})),this._subscriptions.push(r(d[13]).addListener('didCompleteNetworkResponse',function(t){return s.__didCompleteResponse.apply(s,(0,e.default)(t))}));var o='text';'arraybuffer'===this._responseType&&(o='base64'),'blob'===this._responseType&&(o='blob');var h;h='unknown'!==s._trackingName?s._trackingName:s._url,s._perfKey='network_XMLHttpRequest_'+String(h),s._performanceLogger.startTimespan(s._perfKey),r(d[11])(s._method,'XMLHttpRequest method needs to be defined (%s).',h),r(d[11])(s._url,'XMLHttpRequest URL needs to be defined (%s).',h),r(d[13]).sendRequest(s._method,s._trackingName,s._url,s._headers,t,o,n,s.timeout,s.__didCreateRequest.bind(s),s.withCredentials)}},{key:"abort",value:function(){this._aborted=!0,this._requestId&&r(d[13]).abortRequest(this._requestId),this.readyState===this.UNSENT||this.readyState===this.OPENED&&!this._sent||this.readyState===this.DONE||(this._reset(),this.setReadyState(this.DONE)),this._reset()}},{key:"setResponseHeaders",value:function(e){this.responseHeaders=e||null;var t=e||{};this._lowerCaseResponseHeaders=Object.keys(t).reduce(function(e,s){return e[s.toLowerCase()]=t[s],e},{})}},{key:"setReadyState",value:function(e){this.readyState=e,this.dispatchEvent({type:'readystatechange'}),e===this.DONE&&(this._aborted?this.dispatchEvent({type:'abort'}):this._hasError?this._timedOut?this.dispatchEvent({type:'timeout'}):this.dispatchEvent({type:'error'}):this.dispatchEvent({type:'load'}),this.dispatchEvent({type:'loadend'}))}},{key:"addEventListener",value:function(e,s){'readystatechange'!==e&&'progress'!==e||(this._incrementalEvents=!0),(0,t.default)((0,u.default)(E.prototype),"addEventListener",this).call(this,e,s)}}],[{key:"setInterceptor",value:function(e){E._interceptor=e}}]),E})(r(d[9]).apply(void 0,(0,e.default)(b)));N.UNSENT=l,N.OPENED=_,N.HEADERS_RECEIVED=f,N.LOADING=y,N.DONE=v,N._interceptor=null,m.exports=N},488,[407,442,489,403,402,417,419,422,491,495,496,425,498,499]); +__d(function(g,r,i,a,m,e,d){function t(){return"undefined"!=typeof Reflect&&Reflect.get?(m.exports=t=Reflect.get,m.exports.__esModule=!0,m.exports.default=m.exports):(m.exports=t=function(t,o,p){var s=r(d[0])(t,o);if(s){var l=Object.getOwnPropertyDescriptor(s,o);return l.get?l.get.call(arguments.length<3?t:p):l.value}},m.exports.__esModule=!0,m.exports.default=m.exports),t.apply(this,arguments)}m.exports=t,m.exports.__esModule=!0,m.exports.default=m.exports},489,[490]); +__d(function(g,r,i,a,m,e,d){m.exports=function(t,o){for(;!Object.prototype.hasOwnProperty.call(t,o)&&null!==(t=r(d[0])(t)););return t},m.exports.__esModule=!0,m.exports.default=m.exports},490,[422]); +__d(function(g,_r,i,a,m,e,d){var t=_r(d[0])(_r(d[1])),l=_r(d[0])(_r(d[2])),r=_r(d[0])(_r(d[3])),o=_r(d[0])(_r(d[4])),n=_r(d[0])(_r(d[5]));var u=(function(){function u(){(0,l.default)(this,u)}return(0,r.default)(u,null,[{key:"createFromParts",value:function(t,l){(0,n.default)(o.default,'NativeBlobModule is available.');var r='xxxxxxxx-xxxx-4xxx-yxxx-xxxxxxxxxxxx'.replace(/[xy]/g,function(t){var l=16*Math.random()|0;return('x'==t?l:3&l|8).toString(16)}),f=t.map(function(t){if(t instanceof ArrayBuffer||g.ArrayBufferView&&t instanceof g.ArrayBufferView)throw new Error("Creating blobs from 'ArrayBuffer' and 'ArrayBufferView' are not supported");return t instanceof _r(d[6])?{data:t.data,type:'blob'}:{data:String(t),type:'string'}}),c=f.reduce(function(t,l){return'string'===l.type?t+g.unescape(encodeURI(l.data)).length:t+l.data.size},0);return o.default.createFromParts(f,r),u.createFromOptions({blobId:r,offset:0,size:c,type:l?l.type:'',lastModified:l?l.lastModified:Date.now()})}},{key:"createFromOptions",value:function(l){return _r(d[7]).register(l.blobId),(0,t.default)(Object.create(_r(d[6]).prototype),{data:null==l.__collector?(0,t.default)({},l,{__collector:(r=l.blobId,null==g.__blobCollectorProvider?null:g.__blobCollectorProvider(r))}):l});var r}},{key:"release",value:function(t){(0,n.default)(o.default,'NativeBlobModule is available.'),_r(d[7]).unregister(t),_r(d[7]).has(t)||o.default.release(t)}},{key:"addNetworkingHandler",value:function(){(0,n.default)(o.default,'NativeBlobModule is available.'),o.default.addNetworkingHandler()}},{key:"addWebSocketHandler",value:function(t){(0,n.default)(o.default,'NativeBlobModule is available.'),o.default.addWebSocketHandler(t)}},{key:"removeWebSocketHandler",value:function(t){(0,n.default)(o.default,'NativeBlobModule is available.'),o.default.removeWebSocketHandler(t)}},{key:"sendOverSocket",value:function(t,l){(0,n.default)(o.default,'NativeBlobModule is available.'),o.default.sendOverSocket(t.data,l)}}]),u})();u.isAvailable=!!o.default,m.exports=u},491,[407,436,402,403,492,425,493,494]); +__d(function(g,r,i,a,m,e,d){function t(n){if("function"!=typeof WeakMap)return null;var o=new WeakMap,u=new WeakMap;return(t=function(t){return t?u:o})(n)}Object.defineProperty(e,"__esModule",{value:!0}),e.default=void 0;var n=(function(n,o){if(!o&&n&&n.__esModule)return n;if(null===n||"object"!=typeof n&&"function"!=typeof n)return{default:n};var u=t(o);if(u&&u.has(n))return u.get(n);var l={},c=Object.defineProperty&&Object.getOwnPropertyDescriptor;for(var f in n)if("default"!==f&&Object.prototype.hasOwnProperty.call(n,f)){var s=c?Object.getOwnPropertyDescriptor(n,f):null;s&&(s.get||s.set)?Object.defineProperty(l,f,s):l[f]=n[f]}l.default=n,u&&u.set(n,l);return l})(r(d[0])).get('BlobModule'),o=null,u=null;null!=n&&(u={getConstants:function(){return null==o&&(o=n.getConstants()),o},addNetworkingHandler:function(){n.addNetworkingHandler()},addWebSocketHandler:function(t){n.addWebSocketHandler(t)},removeWebSocketHandler:function(t){n.removeWebSocketHandler(t)},sendOverSocket:function(t,o){n.sendOverSocket(t,o)},createFromParts:function(t,o){n.createFromParts(t,o)},release:function(t){n.release(t)}});var l=u;e.default=l},492,[428]); +__d(function(g,r,i,a,m,e,d){'use strict';var t=(function(){function t(){var n=arguments.length>0&&void 0!==arguments[0]?arguments[0]:[],s=arguments.length>1?arguments[1]:void 0;r(d[0])(this,t);var o=r(d[1]);this.data=o.createFromParts(n,s).data}return r(d[2])(t,[{key:"data",get:function(){if(!this._data)throw new Error('Blob has been closed and is no longer available');return this._data},set:function(t){this._data=t}},{key:"slice",value:function(t,n){var s=r(d[1]),o=this.data,u=o.offset,l=o.size;return'number'==typeof t&&(t>l&&(t=l),u+=t,l-=t,'number'==typeof n&&(n<0&&(n=this.size+n),l=n-t)),s.createFromOptions({blobId:this.data.blobId,offset:u,size:l})}},{key:"close",value:function(){r(d[1]).release(this.data.blobId),this.data=null}},{key:"size",get:function(){return this.data.size}},{key:"type",get:function(){return this.data.type||''}}]),t})();m.exports=t},493,[402,491,403]); +__d(function(g,r,i,a,m,e,d){var n={};m.exports={register:function(t){n[t]?n[t]++:n[t]=1},unregister:function(t){n[t]&&(n[t]--,n[t]<=0&&delete n[t])},has:function(t){return n[t]&&n[t]>0}}},494,[]); +__d(function(g,r,_i,a,m,e,d){'use strict';Object.defineProperty(e,'__esModule',{value:!0});var t=new WeakMap,n=new WeakMap;function o(n){var o=t.get(n);return console.assert(null!=o,"'this' is expected an Event object, but got",n),o}function i(t){null==t.passiveListener?t.event.cancelable&&(t.canceled=!0,"function"==typeof t.event.preventDefault&&t.event.preventDefault()):"undefined"!=typeof console&&"function"==typeof console.error&&console.error("Unable to preventDefault inside passive event listener invocation.",t.passiveListener)}function l(n,o){t.set(this,{eventTarget:n,event:o,eventPhase:2,currentTarget:n,canceled:!1,stopped:!1,immediateStopped:!1,passiveListener:null,timeStamp:o.timeStamp||Date.now()}),Object.defineProperty(this,"isTrusted",{value:!1,enumerable:!0});for(var i=Object.keys(o),l=0;l0){for(var t=new Array(arguments.length),n=0;n1&&void 0!==arguments[1]?arguments[1]:l(),n=arguments.length>2?arguments[2]:void 0;this._closed||null==this._points[t]&&(this._points[t]=s,n&&(this._pointExtras[t]=n))}},{key:"removeExtra",value:function(t){var s=this._extras[t];return delete this._extras[t],s}},{key:"setExtra",value:function(t,s){this._closed||this._extras.hasOwnProperty(t)||(this._extras[t]=s)}},{key:"startTimespan",value:function(t){var s=arguments.length>1&&void 0!==arguments[1]?arguments[1]:l(),n=arguments.length>2?arguments[2]:void 0;this._closed||this._timespans[t]||(this._timespans[t]={startTime:s,startExtras:n},u[t]=r(d[4]).beginAsyncEvent(t))}},{key:"stopTimespan",value:function(t){var s=arguments.length>1&&void 0!==arguments[1]?arguments[1]:l(),n=arguments.length>2?arguments[2]:void 0;if(!this._closed){var o=this._timespans[t];o&&null!=o.startTime&&null==o.endTime&&(o.endExtras=n,o.endTime=s,o.totalTime=o.endTime-(o.startTime||0),null!=u[t]&&(r(d[4]).endAsyncEvent(t,u[t]),delete u[t]))}}}]),t})()},497,[407,436,402,403,439]); +__d(function(g,r,_i,a,m,e,d){'use strict';e.byteLength=function(t){var n=i(t),o=n[0],h=n[1];return 3*(o+h)/4-h},e.toByteArray=function(t){var h,u,c=i(t),A=c[0],C=c[1],y=new o(f(t,A,C)),s=0,v=C>0?A-4:A;for(u=0;u>16&255,y[s++]=h>>8&255,y[s++]=255&h;2===C&&(h=n[t.charCodeAt(u)]<<2|n[t.charCodeAt(u+1)]>>4,y[s++]=255&h);1===C&&(h=n[t.charCodeAt(u)]<<10|n[t.charCodeAt(u+1)]<<4|n[t.charCodeAt(u+2)]>>2,y[s++]=h>>8&255,y[s++]=255&h);return y},e.fromByteArray=function(n){for(var o,h=n.length,u=h%3,c=[],i=0,f=h-u;if?f:i+16383));1===u?(o=n[h-1],c.push(t[o>>2]+t[o<<4&63]+'==')):2===u&&(o=(n[h-2]<<8)+n[h-1],c.push(t[o>>10]+t[o>>4&63]+t[o<<2&63]+'='));return c.join('')};for(var t=[],n=[],o='undefined'!=typeof Uint8Array?Uint8Array:Array,h='ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789+/',u=0,c=h.length;u0)throw new Error('Invalid string. Length must be a multiple of 4');var o=t.indexOf('=');return-1===o&&(o=n),[o,o===n?0:4-o%4]}function f(t,n,o){return 3*(n+o)/4-o}function A(n,o,h){for(var u,c,i=[],f=o;f>18&63]+t[c>>12&63]+t[c>>6&63]+t[63&c]);return i.join('')}n['-'.charCodeAt(0)]=62,n['_'.charCodeAt(0)]=63},498,[]); +__d(function(g,r,i,a,m,_e,d){var t=r(d[0])(r(d[1])),e=r(d[0])(r(d[2])),u=r(d[0])(r(d[3])),n=r(d[0])(r(d[4])),f=r(d[0])(r(d[5])),o=r(d[0])(r(d[6])),l=r(d[0])(r(d[7])),c=r(d[0])(r(d[8])),s=r(d[0])(r(d[9])),v=r(d[0])(r(d[10]));function h(){if("undefined"==typeof Reflect||!Reflect.construct)return!1;if(Reflect.construct.sham)return!1;if("function"==typeof Proxy)return!0;try{return Boolean.prototype.valueOf.call(Reflect.construct(Boolean,[],function(){})),!0}catch(t){return!1}}function p(t){var e=[];for(var u in t)e.push([u,t[u]]);return e}var y=1;var R=(function(l){(0,n.default)(D,l);var R,k,q=(R=D,k=h(),function(){var t,e=(0,o.default)(R);if(k){var u=(0,o.default)(this).constructor;t=Reflect.construct(e,arguments,u)}else t=e.apply(this,arguments);return(0,f.default)(this,t)});function D(){return(0,e.default)(this,D),q.call(this,'ios'!==v.default.OS?null:c.default)}return(0,u.default)(D,[{key:"sendRequest",value:function(e,u,n,f,o,l,v,h,R,k){var q=(0,s.default)(o);q&&q.formData&&(q.formData=q.formData.map(function(e){return(0,t.default)({},e,{headers:p(e.headers)})}));var D=y++;c.default.sendRequest(e,n,D,p(f),(0,t.default)({},q,{trackingName:u}),l,v,h,k),R(D)}},{key:"abortRequest",value:function(t){c.default.abortRequest(t)}},{key:"clearCookies",value:function(t){c.default.clearCookies(t)}}]),D})(l.default);m.exports=new R},499,[407,436,402,403,417,419,422,500,501,502,426]); +__d(function(g,r,i,a,m,e,d){'use strict';Object.defineProperty(e,"__esModule",{value:!0}),e.default=void 0;var t=r(d[0])(r(d[1])),n=r(d[0])(r(d[2])),l=r(d[0])(r(d[3])),u=r(d[0])(r(d[4])),o=r(d[0])(r(d[5])),s=(function(){function s(n){(0,t.default)(this,s),'ios'===l.default.OS&&(0,o.default)(null!=n,'`new NativeEventEmitter()` requires a non-null argument.');var u=!!n&&'function'==typeof n.addListener,v=!!n&&'function'==typeof n.removeListeners;n&&u&&v?this._nativeModule=n:null!=n&&(u||console.warn('`new NativeEventEmitter()` was called with a non-null argument without the required `addListener` method.'),v||console.warn('`new NativeEventEmitter()` was called with a non-null argument without the required `removeListeners` method.'))}return(0,n.default)(s,[{key:"addListener",value:function(t,n,l){var o,s=this;null==(o=this._nativeModule)||o.addListener(t);var v=u.default.addListener(t,n,l);return{remove:function(){var t;null!=v&&(null==(t=s._nativeModule)||t.removeListeners(1),v.remove(),v=null)}}}},{key:"removeListener",value:function(t,n){var l;null==(l=this._nativeModule)||l.removeListeners(1),u.default.removeListener(t,n)}},{key:"emit",value:function(t){for(var n=arguments.length,l=new Array(n>1?n-1:0),o=1;o-1};function s(t){if('string'!=typeof t&&(t=String(t)),/[^a-z0-9\-#$%&'*+.^_`|~!]/i.test(t)||''===t)throw new TypeError('Invalid character in header field name: "'+t+'"');return t.toLowerCase()}function h(t){return'string'!=typeof t&&(t=String(t)),t}function f(t){var e={next:function(){var e=t.shift();return{done:void 0===e,value:e}}};return o.iterable&&(e[Symbol.iterator]=function(){return e}),e}function u(t){this.map={},t instanceof u?t.forEach(function(t,e){this.append(e,t)},this):Array.isArray(t)?t.forEach(function(t){this.append(t[0],t[1])},this):t&&Object.getOwnPropertyNames(t).forEach(function(e){this.append(e,t[e])},this)}function c(t){if(t.bodyUsed)return Promise.reject(new TypeError('Already read'));t.bodyUsed=!0}function y(t){return new Promise(function(e,o){t.onload=function(){e(t.result)},t.onerror=function(){o(t.error)}})}function l(t){var e=new FileReader,o=y(e);return e.readAsArrayBuffer(t),o}function p(t){for(var e=new Uint8Array(t),o=new Array(e.length),n=0;n-1?n:o),this.mode=e.mode||this.mode||null,this.signal=e.signal||this.signal,this.referrer=null,('GET'===this.method||'HEAD'===this.method)&&i)throw new TypeError('Body not allowed for GET or HEAD requests');if(this._initBody(i),!('GET'!==this.method&&'HEAD'!==this.method||'no-store'!==e.cache&&'no-cache'!==e.cache)){var s=/([?&])_=[^&]*/;if(s.test(this.url))this.url=this.url.replace(s,'$1_='+(new Date).getTime());else{this.url+=(/\?/.test(this.url)?'&':'?')+'_='+(new Date).getTime()}}}function E(t){var e=new FormData;return t.trim().split('&').forEach(function(t){if(t){var o=t.split('='),n=o.shift().replace(/\+/g,' '),i=o.join('=').replace(/\+/g,' ');e.append(decodeURIComponent(n),decodeURIComponent(i))}}),e}function T(t,e){if(!(this instanceof T))throw new TypeError('Please use the "new" operator, this DOM object constructor cannot be called as a function.');e||(e={}),this.type='default',this.status=void 0===e.status?200:e.status,this.ok=this.status>=200&&this.status<300,this.statusText=void 0===e.statusText?'':''+e.statusText,this.headers=new u(e.headers),this.url=e.url||'',this._initBody(t)}_.prototype.clone=function(){return new _(this,{body:this._bodyInit})},w.call(_.prototype),w.call(T.prototype),T.prototype.clone=function(){return new T(this._bodyInit,{status:this.status,statusText:this.statusText,headers:new u(this.headers),url:this.url})},T.error=function(){var t=new T(null,{status:0,statusText:''});return t.type='error',t};var A=[301,302,303,307,308];T.redirect=function(t,e){if(-1===A.indexOf(e))throw new RangeError('Invalid status code');return new T(null,{status:e,headers:{location:t}})},t.DOMException=e.DOMException;try{new t.DOMException}catch(e){t.DOMException=function(t,e){this.message=t,this.name=e;var o=Error(t);this.stack=o.stack},t.DOMException.prototype=Object.create(Error.prototype),t.DOMException.prototype.constructor=t.DOMException}function B(n,i){return new Promise(function(s,f){var c=new _(n,i);if(c.signal&&c.signal.aborted)return f(new t.DOMException('Aborted','AbortError'));var y=new XMLHttpRequest;function l(){y.abort()}y.onload=function(){var t,e,o={status:y.status,statusText:y.statusText,headers:(t=y.getAllResponseHeaders()||'',e=new u,t.replace(/\r?\n[\t ]+/g,' ').split('\r').map(function(t){return 0===t.indexOf('\n')?t.substr(1,t.length):t}).forEach(function(t){var o=t.split(':'),n=o.shift().trim();if(n){var i=o.join(':').trim();e.append(n,i)}}),e)};o.url='responseURL'in y?y.responseURL:o.headers.get('X-Request-URL');var n='response'in y?y.response:y.responseText;setTimeout(function(){s(new T(n,o))},0)},y.onerror=function(){setTimeout(function(){f(new TypeError('Network request failed'))},0)},y.ontimeout=function(){setTimeout(function(){f(new TypeError('Network request failed'))},0)},y.onabort=function(){setTimeout(function(){f(new t.DOMException('Aborted','AbortError'))},0)},y.open(c.method,(function(t){try{return''===t&&e.location.href?e.location.href:t}catch(e){return t}})(c.url),!0),'include'===c.credentials?y.withCredentials=!0:'omit'===c.credentials&&(y.withCredentials=!1),'responseType'in y&&(o.blob?y.responseType='blob':o.arrayBuffer&&c.headers.get('Content-Type')&&-1!==c.headers.get('Content-Type').indexOf('application/octet-stream')&&(y.responseType='arraybuffer')),!i||'object'!=typeof i.headers||i.headers instanceof u?c.headers.forEach(function(t,e){y.setRequestHeader(e,t)}):Object.getOwnPropertyNames(i.headers).forEach(function(t){y.setRequestHeader(t,h(i.headers[t]))}),c.signal&&(c.signal.addEventListener('abort',l),y.onreadystatechange=function(){4===y.readyState&&c.signal.removeEventListener('abort',l)}),y.send(void 0===c._bodyInit?null:c._bodyInit)})}B.polyfill=!0,e.fetch||(e.fetch=B,e.Headers=u,e.Request=_,e.Response=T),t.Headers=u,t.Request=_,t.Response=T,t.fetch=B,Object.defineProperty(t,'__esModule',{value:!0})},'object'==typeof _e&&void 0!==m?e(_e):'function'==typeof define&&define.amd?define(['exports'],e):e(t.WHATWGFetch={})},506,[]); +__d(function(g,r,i,a,m,_e,d){var e=r(d[0])(r(d[1])),t=r(d[0])(r(d[2])),n=r(d[0])(r(d[3])),s=r(d[0])(r(d[4])),o=r(d[0])(r(d[5])),u=r(d[0])(r(d[6])),c=r(d[0])(r(d[7])),l=r(d[0])(r(d[8])),f=r(d[0])(r(d[9])),h=r(d[0])(r(d[10])),y=r(d[0])(r(d[11])),b=r(d[0])(r(d[12])),p=r(d[0])(r(d[13])),v=r(d[0])(r(d[14])),_=r(d[0])(r(d[15])),E=r(d[0])(r(d[16])),k=["headers"];function S(){if("undefined"==typeof Reflect||!Reflect.construct)return!1;if(Reflect.construct.sham)return!1;if("function"==typeof Proxy)return!0;try{return Boolean.prototype.valueOf.call(Reflect.construct(Boolean,[],function(){})),!0}catch(e){return!1}}var I=0,N=1,O=2,w=3,C=0,L=(function(_){(0,s.default)(R,_);var L,T,A=(L=R,T=S(),function(){var e,t=(0,u.default)(L);if(T){var n=(0,u.default)(this).constructor;e=Reflect.construct(t,arguments,n)}else e=t.apply(this,arguments);return(0,o.default)(this,e)});function R(n,s,o){var u;(0,t.default)(this,R),(u=A.call(this)).CONNECTING=I,u.OPEN=N,u.CLOSING=O,u.CLOSED=w,u.readyState=I,u.url=n,'string'==typeof s&&(s=[s]);var c=o||{},l=c.headers,h=void 0===l?{}:l,p=(0,e.default)(c,k);return p&&'string'==typeof p.origin&&(console.warn('Specifying `origin` as a WebSocket connection option is deprecated. Include it under `headers` instead.'),h.origin=p.origin,delete p.origin),Object.keys(p).length>0&&console.warn('Unrecognized WebSocket connection option(s) `'+Object.keys(p).join('`, `')+"`. Did you mean to put these under `headers`?"),Array.isArray(s)||(s=null),u._eventEmitter=new f.default('ios'!==y.default.OS?null:b.default),u._socketId=C++,u._registerEvents(),b.default.connect(n,s,{headers:h},u._socketId),u}return(0,n.default)(R,[{key:"binaryType",get:function(){return this._binaryType},set:function(e){if('blob'!==e&&'arraybuffer'!==e)throw new Error("binaryType must be either 'blob' or 'arraybuffer'");'blob'!==this._binaryType&&'blob'!==e||((0,E.default)(l.default.isAvailable,'Native module BlobModule is required for blob support'),'blob'===e?l.default.addWebSocketHandler(this._socketId):l.default.removeWebSocketHandler(this._socketId)),this._binaryType=e}},{key:"close",value:function(e,t){this.readyState!==this.CLOSING&&this.readyState!==this.CLOSED&&(this.readyState=this.CLOSING,this._close(e,t))}},{key:"send",value:function(e){if(this.readyState===this.CONNECTING)throw new Error('INVALID_STATE_ERR');if(e instanceof c.default)return(0,E.default)(l.default.isAvailable,'Native module BlobModule is required for blob support'),void l.default.sendOverSocket(e,this._socketId);if('string'!=typeof e){if(!(e instanceof ArrayBuffer||ArrayBuffer.isView(e)))throw new Error('Unsupported data type');b.default.sendBinary((0,h.default)(e),this._socketId)}else b.default.send(e,this._socketId)}},{key:"ping",value:function(){if(this.readyState===this.CONNECTING)throw new Error('INVALID_STATE_ERR');b.default.ping(this._socketId)}},{key:"_close",value:function(e,t){var n='number'==typeof e?e:1e3,s='string'==typeof t?t:'';b.default.close(n,s,this._socketId),l.default.isAvailable&&'blob'===this._binaryType&&l.default.removeWebSocketHandler(this._socketId)}},{key:"_unregisterEvents",value:function(){this._subscriptions.forEach(function(e){return e.remove()}),this._subscriptions=[]}},{key:"_registerEvents",value:function(){var e=this;this._subscriptions=[this._eventEmitter.addListener('websocketMessage',function(t){if(t.id===e._socketId){var n=t.data;switch(t.type){case'binary':n=v.default.toByteArray(t.data).buffer;break;case'blob':n=l.default.createFromOptions(t.data)}e.dispatchEvent(new p.default('message',{data:n}))}}),this._eventEmitter.addListener('websocketOpen',function(t){t.id===e._socketId&&(e.readyState=e.OPEN,e.protocol=t.protocol,e.dispatchEvent(new p.default('open')))}),this._eventEmitter.addListener('websocketClosed',function(t){t.id===e._socketId&&(e.readyState=e.CLOSED,e.dispatchEvent(new p.default('close',{code:t.code,reason:t.reason})),e._unregisterEvents(),e.close())}),this._eventEmitter.addListener('websocketFailed',function(t){t.id===e._socketId&&(e.readyState=e.CLOSED,e.dispatchEvent(new p.default('error',{message:t.message})),e.dispatchEvent(new p.default('close',{message:t.message})),e._unregisterEvents(),e.close())})]}}]),R})(_.default.apply(void 0,['close','error','message','open']));L.CONNECTING=I,L.OPEN=N,L.CLOSING=O,L.CLOSED=w,m.exports=L},507,[407,508,402,403,417,419,422,493,491,500,504,426,510,511,498,495,425]); +__d(function(g,r,_i,a,m,e,d){m.exports=function(t,o){if(null==t)return{};var n,l,p=r(d[0])(t,o);if(Object.getOwnPropertySymbols){var s=Object.getOwnPropertySymbols(t);for(l=0;l=0||Object.prototype.propertyIsEnumerable.call(t,n)&&(p[n]=t[n])}return p},m.exports.__esModule=!0,m.exports.default=m.exports},508,[509]); +__d(function(g,r,_i,a,m,e,d){m.exports=function(t,n){if(null==t)return{};var o,u,f={},s=Object.keys(t);for(u=0;u=0||(f[o]=t[o]);return f},m.exports.__esModule=!0,m.exports.default=m.exports},509,[]); +__d(function(g,r,i,a,m,e,d){function t(n){if("function"!=typeof WeakMap)return null;var o=new WeakMap,u=new WeakMap;return(t=function(t){return t?u:o})(n)}Object.defineProperty(e,"__esModule",{value:!0}),e.default=void 0;var n=(function(n,o){if(!o&&n&&n.__esModule)return n;if(null===n||"object"!=typeof n&&"function"!=typeof n)return{default:n};var u=t(o);if(u&&u.has(n))return u.get(n);var f={},c=Object.defineProperty&&Object.getOwnPropertyDescriptor;for(var l in n)if("default"!==l&&Object.prototype.hasOwnProperty.call(n,l)){var p=c?Object.getOwnPropertyDescriptor(n,l):null;p&&(p.get||p.set)?Object.defineProperty(f,l,p):f[l]=n[l]}f.default=n,u&&u.set(n,f);return f})(r(d[0])).getEnforcing('WebSocketModule');e.default=n},510,[428]); +__d(function(g,r,i,a,m,e,d){'use strict';var t=r(d[0])(function t(s,n){r(d[1])(this,t),this.type=s.toString(),r(d[2])(this,n)});m.exports=t},511,[403,402,436]); +__d(function(g,r,i,a,m,_e,d){'use strict';function t(){if("undefined"==typeof Reflect||!Reflect.construct)return!1;if(Reflect.construct.sham)return!1;if("function"==typeof Proxy)return!0;try{return Boolean.prototype.valueOf.call(Reflect.construct(Boolean,[],function(){})),!0}catch(t){return!1}}var e=(function(e){r(d[2])(o,e);var n,u,c=(n=o,u=t(),function(){var t,e=r(d[0])(n);if(u){var c=r(d[0])(this).constructor;t=Reflect.construct(e,arguments,c)}else t=e.apply(this,arguments);return r(d[1])(this,t)});function o(t,e,n){var u;return r(d[3])(this,o),r(d[4])(null!=t&&null!=e,'Failed to construct `File`: Must pass both `parts` and `name` arguments.'),(u=c.call(this,t,n)).data.name=e,u}return r(d[5])(o,[{key:"name",get:function(){return r(d[4])(null!=this.data.name,'Files must have a name set.'),this.data.name}},{key:"lastModified",get:function(){return this.data.lastModified||0}}]),o})(r(d[6]));m.exports=e},512,[422,419,417,402,425,403,493]); +__d(function(g,r,i,a,m,_e,d){var t=r(d[0])(r(d[1])),e=r(d[0])(r(d[2])),n=r(d[0])(r(d[3])),o=r(d[0])(r(d[4])),s=r(d[0])(r(d[5])),u=r(d[0])(r(d[6]));function l(){if("undefined"==typeof Reflect||!Reflect.construct)return!1;if(Reflect.construct.sham)return!1;if("function"==typeof Proxy)return!0;try{return Boolean.prototype.valueOf.call(Reflect.construct(Boolean,[],function(){})),!0}catch(t){return!1}}var c=0,f=1,h=2,y=(function(y){(0,n.default)(b,y);var _,p,v=(_=b,p=l(),function(){var t,e=(0,s.default)(_);if(p){var n=(0,s.default)(this).constructor;t=Reflect.construct(e,arguments,n)}else t=e.apply(this,arguments);return(0,o.default)(this,t)});function b(){var e;return(0,t.default)(this,b),(e=v.call(this)).EMPTY=c,e.LOADING=f,e.DONE=h,e._aborted=!1,e._subscriptions=[],e._reset(),e}return(0,e.default)(b,[{key:"_reset",value:function(){this._readyState=c,this._error=null,this._result=null}},{key:"_clearSubscriptions",value:function(){this._subscriptions.forEach(function(t){return t.remove()}),this._subscriptions=[]}},{key:"_setReadyState",value:function(t){this._readyState=t,this.dispatchEvent({type:'readystatechange'}),t===h&&(this._aborted?this.dispatchEvent({type:'abort'}):this._error?this.dispatchEvent({type:'error'}):this.dispatchEvent({type:'load'}),this.dispatchEvent({type:'loadend'}))}},{key:"readAsArrayBuffer",value:function(){throw new Error('FileReader.readAsArrayBuffer is not implemented')}},{key:"readAsDataURL",value:function(t){var e=this;if(this._aborted=!1,null==t)throw new TypeError("Failed to execute 'readAsDataURL' on 'FileReader': parameter 1 is not of type 'Blob'");u.default.readAsDataURL(t.data).then(function(t){e._aborted||(e._result=t,e._setReadyState(h))},function(t){e._aborted||(e._error=t,e._setReadyState(h))})}},{key:"readAsText",value:function(t){var e=this,n=arguments.length>1&&void 0!==arguments[1]?arguments[1]:'UTF-8';if(this._aborted=!1,null==t)throw new TypeError("Failed to execute 'readAsText' on 'FileReader': parameter 1 is not of type 'Blob'");u.default.readAsText(t.data,n).then(function(t){e._aborted||(e._result=t,e._setReadyState(h))},function(t){e._aborted||(e._error=t,e._setReadyState(h))})}},{key:"abort",value:function(){this._aborted=!0,this._readyState!==c&&this._readyState!==h&&(this._reset(),this._setReadyState(h)),this._reset()}},{key:"readyState",get:function(){return this._readyState}},{key:"error",get:function(){return this._error}},{key:"result",get:function(){return this._result}}]),b})(r(d[7]).apply(void 0,['abort','error','load','loadstart','loadend','progress']));y.EMPTY=c,y.LOADING=f,y.DONE=h,m.exports=y},513,[407,402,403,417,419,422,514,495]); +__d(function(g,r,i,a,m,e,d){function t(n){if("function"!=typeof WeakMap)return null;var o=new WeakMap,u=new WeakMap;return(t=function(t){return t?u:o})(n)}Object.defineProperty(e,"__esModule",{value:!0}),e.default=void 0;var n=(function(n,o){if(!o&&n&&n.__esModule)return n;if(null===n||"object"!=typeof n&&"function"!=typeof n)return{default:n};var u=t(o);if(u&&u.has(n))return u.get(n);var f={},l=Object.defineProperty&&Object.getOwnPropertyDescriptor;for(var c in n)if("default"!==c&&Object.prototype.hasOwnProperty.call(n,c)){var p=l?Object.getOwnPropertyDescriptor(n,c):null;p&&(p.get||p.set)?Object.defineProperty(f,c,p):f[c]=n[c]}f.default=n,u&&u.set(n,f);return f})(r(d[0])).getEnforcing('FileReaderModule');e.default=n},514,[428]); +__d(function(g,r,i,a,m,e,d){Object.defineProperty(e,"__esModule",{value:!0}),e.URLSearchParams=e.URL=void 0;var t=r(d[0])(r(d[1])),n=r(d[0])(r(d[2])),o=r(d[0])(r(d[3])),s=null;if(o.default&&'string'==typeof o.default.getConstants().BLOB_URI_SCHEME){var u=o.default.getConstants();s=u.BLOB_URI_SCHEME+':','string'==typeof u.BLOB_URI_HOST&&(s+="//"+u.BLOB_URI_HOST+"/")}var h=(function(o){function s(n){var o=this;(0,t.default)(this,s),this._searchParams=[],'object'==typeof n&&Object.keys(n).forEach(function(t){return o.append(t,n[t])})}return(0,n.default)(s,[{key:"append",value:function(t,n){this._searchParams.push([t,n])}},{key:"delete",value:function(t){throw new Error('URLSearchParams.delete is not implemented')}},{key:"get",value:function(t){throw new Error('URLSearchParams.get is not implemented')}},{key:"getAll",value:function(t){throw new Error('URLSearchParams.getAll is not implemented')}},{key:"has",value:function(t){throw new Error('URLSearchParams.has is not implemented')}},{key:"set",value:function(t,n){throw new Error('URLSearchParams.set is not implemented')}},{key:"sort",value:function(){throw new Error('URLSearchParams.sort is not implemented')}},{key:o,value:function(){return this._searchParams[Symbol.iterator]()}},{key:"toString",value:function(){if(0===this._searchParams.length)return'';var t=this._searchParams.length-1;return this._searchParams.reduce(function(n,o,s){return n+o.join('=')+(s===t?'':'&')},'')}}]),s})(Symbol.iterator);function f(t){return/^(?:(?:(?:https?|ftp):)?\/\/)(?:(?:[1-9]\d?|1\d\d|2[01]\d|22[0-3])(?:\.(?:1?\d{1,2}|2[0-4]\d|25[0-5])){2}(?:\.(?:[1-9]\d?|1\d\d|2[0-4]\d|25[0-4]))|(?:(?:[a-z0-9\u00a1-\uffff][a-z0-9\u00a1-\uffff_-]{0,62})?[a-z0-9\u00a1-\uffff]\.)*(?:[a-z\u00a1-\uffff]{2,}\.?))(?::\d{2,5})?(?:[/?#]\S*)?$/.test(t)}e.URLSearchParams=h;var l=(function(){function o(n,s){(0,t.default)(this,o),this._searchParamsInstance=null;var u=null;if(!s||f(n))this._url=n,this._url.endsWith('/')||(this._url+='/');else{if('string'==typeof s){if(!f(u=s))throw new TypeError("Invalid base URL: "+u)}else'object'==typeof s&&(u=s.toString());u.endsWith('/')&&(u=u.slice(0,u.length-1)),n.startsWith('/')||(n="/"+n),u.endsWith(n)&&(n=''),this._url=""+u+n}}return(0,n.default)(o,[{key:"hash",get:function(){throw new Error('URL.hash is not implemented')}},{key:"host",get:function(){throw new Error('URL.host is not implemented')}},{key:"hostname",get:function(){throw new Error('URL.hostname is not implemented')}},{key:"href",get:function(){return this.toString()}},{key:"origin",get:function(){throw new Error('URL.origin is not implemented')}},{key:"password",get:function(){throw new Error('URL.password is not implemented')}},{key:"pathname",get:function(){throw new Error('URL.pathname not implemented')}},{key:"port",get:function(){throw new Error('URL.port is not implemented')}},{key:"protocol",get:function(){throw new Error('URL.protocol is not implemented')}},{key:"search",get:function(){throw new Error('URL.search is not implemented')}},{key:"searchParams",get:function(){return null==this._searchParamsInstance&&(this._searchParamsInstance=new h),this._searchParamsInstance}},{key:"toJSON",value:function(){return this.toString()}},{key:"toString",value:function(){if(null===this._searchParamsInstance)return this._url;var t=this._url.indexOf('?')>-1?'&':'?';return this._url+t+this._searchParamsInstance.toString()}},{key:"username",get:function(){throw new Error('URL.username is not implemented')}}],[{key:"createObjectURL",value:function(t){if(null===s)throw new Error('Cannot create URL for blob!');return""+s+t.data.blobId+"?offset="+t.data.offset+"&size="+t.size}},{key:"revokeObjectURL",value:function(t){}}]),o})();e.URL=l},515,[407,402,403,492]); +__d(function(g,r,i,a,m,_e,d){'use strict';function t(){if("undefined"==typeof Reflect||!Reflect.construct)return!1;if(Reflect.construct.sham)return!1;if("function"==typeof Proxy)return!0;try{return Boolean.prototype.valueOf.call(Reflect.construct(Boolean,[],function(){})),!0}catch(t){return!1}}Object.defineProperty(_e,'__esModule',{value:!0});var e=(function(e){r(d[2])(c,e);var n,l,u=(n=c,l=t(),function(){var t,e=r(d[0])(n);if(l){var o=r(d[0])(this).constructor;t=Reflect.construct(e,arguments,o)}else t=e.apply(this,arguments);return r(d[1])(this,t)});function c(){throw r(d[3])(this,c),u.call(this),new TypeError("AbortSignal cannot be constructed directly")}return r(d[4])(c,[{key:"aborted",get:function(){var t=o.get(this);if("boolean"!=typeof t)throw new TypeError("Expected 'this' to be an 'AbortSignal' object, but got "+(null===this?"null":typeof this));return t}}]),c})(r(d[5]).EventTarget);r(d[5]).defineEventAttribute(e.prototype,"abort");var o=new WeakMap;Object.defineProperties(e.prototype,{aborted:{enumerable:!0}}),"function"==typeof Symbol&&"symbol"==typeof Symbol.toStringTag&&Object.defineProperty(e.prototype,Symbol.toStringTag,{configurable:!0,value:"AbortSignal"});var n=(function(){function t(){var n;r(d[3])(this,t),l.set(this,(n=Object.create(e.prototype),r(d[5]).EventTarget.call(n),o.set(n,!1),n))}return r(d[4])(t,[{key:"signal",get:function(){return u(this)}},{key:"abort",value:function(){var t;t=u(this),!1===o.get(t)&&(o.set(t,!0),t.dispatchEvent({type:"abort"}))}}]),t})(),l=new WeakMap;function u(t){var e=l.get(t);if(null==e)throw new TypeError("Expected 'this' to be an 'AbortController' object, but got "+(null===t?"null":typeof t));return e}Object.defineProperties(n.prototype,{signal:{enumerable:!0},abort:{enumerable:!0}}),"function"==typeof Symbol&&"symbol"==typeof Symbol.toStringTag&&Object.defineProperty(n.prototype,Symbol.toStringTag,{configurable:!0,value:"AbortController"}),_e.AbortController=n,_e.AbortSignal=e,_e.default=n,m.exports=n,m.exports.AbortController=m.exports.default=n,m.exports.AbortSignal=e},516,[422,419,417,402,403,495]); +__d(function(g,r,i,a,m,e,d){'use strict';g.alert||(g.alert=function(t){r(d[0]).alert('Alert',''+t)})},517,[518]); +__d(function(g,r,i,a,m,e,d){var t=r(d[0])(r(d[1])),n=r(d[0])(r(d[2])),o=r(d[0])(r(d[3])),s=r(d[0])(r(d[4])),l=(function(){function l(){(0,t.default)(this,l)}return(0,n.default)(l,null,[{key:"alert",value:function(t,n,s,u){if('ios'===o.default.OS)l.prompt(t,n,s,'default');else if('android'===o.default.OS){var c=r(d[5]).default;if(!c)return;var f=c.getConstants(),v={title:t||'',message:n||'',cancelable:!1};u&&u.cancelable&&(v.cancelable=u.cancelable);var p=s?s.slice(0,3):[{text:"OK"}],b=p.pop(),y=p.pop(),h=p.pop();h&&(v.buttonNeutral=h.text||''),y&&(v.buttonNegative=y.text||''),b&&(v.buttonPositive=b.text||"OK");c.showAlert(v,function(t){return console.warn(t)},function(t,n){t===f.buttonClicked?n===f.buttonNeutral?h.onPress&&h.onPress():n===f.buttonNegative?y.onPress&&y.onPress():n===f.buttonPositive&&b.onPress&&b.onPress():t===f.dismissed&&u&&u.onDismiss&&u.onDismiss()})}}},{key:"prompt",value:function(t,n,l){var u=arguments.length>3&&void 0!==arguments[3]?arguments[3]:'plain-text',c=arguments.length>4?arguments[4]:void 0,f=arguments.length>5?arguments[5]:void 0;if('ios'===o.default.OS){var v,p,b=[],y=[];'function'==typeof l?b=[l]:Array.isArray(l)&&l.forEach(function(t,n){if(b[n]=t.onPress,'cancel'===t.style?v=String(n):'destructive'===t.style&&(p=String(n)),t.text||n<(l||[]).length-1){var o={};o[n]=t.text||'',y.push(o)}}),s.default.alertWithArgs({title:t||'',message:n||void 0,buttons:y,type:u||void 0,defaultValue:c,cancelButtonKey:v,destructiveButtonKey:p,keyboardType:f},function(t,n){var o=b[t];o&&o(n)})}}}]),l})();m.exports=l},518,[407,402,403,426,519,520]); +__d(function(g,r,i,a,m,e,d){var t=r(d[0])(r(d[1]));function n(){}m.exports={alertWithArgs:function(f,o){t.default&&t.default.showAlert(f,n,o||n)}}},519,[407,520]); +__d(function(g,r,i,a,m,e,d){function t(n){if("function"!=typeof WeakMap)return null;var o=new WeakMap,u=new WeakMap;return(t=function(t){return t?u:o})(n)}Object.defineProperty(e,"__esModule",{value:!0}),e.default=void 0;var n=(function(n,o){if(!o&&n&&n.__esModule)return n;if(null===n||"object"!=typeof n&&"function"!=typeof n)return{default:n};var u=t(o);if(u&&u.has(n))return u.get(n);var f={},l=Object.defineProperty&&Object.getOwnPropertyDescriptor;for(var c in n)if("default"!==c&&Object.prototype.hasOwnProperty.call(n,c)){var p=l?Object.getOwnPropertyDescriptor(n,c):null;p&&(p.get||p.set)?Object.defineProperty(f,c,p):f[c]=n[c]}f.default=n,u&&u.set(n,f);return f})(r(d[0])).get('DialogManagerAndroid');e.default=n},520,[428]); +__d(function(g,r,i,a,m,e,d){'use strict';var t=g.navigator;void 0===t&&(g.navigator=t={}),r(d[0]).polyfillObjectProperty(t,'product',function(){return'ReactNative'})},521,[474]); +__d(function(g,r,i,a,m,e,d){'use strict';var n;if(g.RN$Bridgeless&&g.RN$registerCallableModule)n=g.RN$registerCallableModule;else{var t=r(d[0]);n=function(n,u){return t.registerLazyCallableModule(n,u)}}n('Systrace',function(){return r(d[1])}),n('JSTimers',function(){return r(d[2])}),n('HeapCapture',function(){return r(d[3])}),n('SamplingProfiler',function(){return r(d[4])}),n('RCTLog',function(){return r(d[5])}),n('RCTDeviceEventEmitter',function(){return r(d[6]).default}),n('RCTNativeAppEventEmitter',function(){return r(d[7])}),n('GlobalPerformanceLogger',function(){return r(d[8])}),n('JSDevSupportModule',function(){return r(d[9])}),n('HMRClient',function(){return r(d[10])})},522,[437,439,483,523,525,527,413,528,496,529,531]); +__d(function(g,r,i,a,m,_e,d){var e=r(d[0])(r(d[1])),t={captureHeap:function(t){var p=null;try{g.nativeCaptureHeap(t),console.log('HeapCapture.captureHeap succeeded: '+t)}catch(e){console.log('HeapCapture.captureHeap error: '+e.toString()),p=e.toString()}e.default&&e.default.captureComplete(t,p)}};m.exports=t},523,[407,524]); +__d(function(g,r,i,a,m,e,d){function t(n){if("function"!=typeof WeakMap)return null;var u=new WeakMap,o=new WeakMap;return(t=function(t){return t?o:u})(n)}Object.defineProperty(e,"__esModule",{value:!0}),e.default=void 0;var n=(function(n,u){if(!u&&n&&n.__esModule)return n;if(null===n||"object"!=typeof n&&"function"!=typeof n)return{default:n};var o=t(u);if(o&&o.has(n))return o.get(n);var f={},p=Object.defineProperty&&Object.getOwnPropertyDescriptor;for(var c in n)if("default"!==c&&Object.prototype.hasOwnProperty.call(n,c)){var l=p?Object.getOwnPropertyDescriptor(n,c):null;l&&(l.get||l.set)?Object.defineProperty(f,c,l):f[c]=n[c]}f.default=n,o&&o.set(n,f);return f})(r(d[0])).get('JSCHeapCapture');e.default=n},524,[428]); +__d(function(g,r,i,a,m,_e,d){'use strict';var o={poke:function(o){var e=null,l=null;try{null===(l=g.pokeSamplingProfiler())?console.log('The JSC Sampling Profiler has started'):console.log('The JSC Sampling Profiler has stopped')}catch(o){console.log('Error occurred when restarting Sampling Profiler: '+o.toString()),e=o.toString()}var n=r(d[0]).default;n&&n.operationComplete(o,l,e)}};m.exports=o},525,[526]); +__d(function(g,r,i,a,m,e,d){function t(n){if("function"!=typeof WeakMap)return null;var o=new WeakMap,f=new WeakMap;return(t=function(t){return t?f:o})(n)}Object.defineProperty(e,"__esModule",{value:!0}),e.default=void 0;var n=(function(n,o){if(!o&&n&&n.__esModule)return n;if(null===n||"object"!=typeof n&&"function"!=typeof n)return{default:n};var f=t(o);if(f&&f.has(n))return f.get(n);var u={},l=Object.defineProperty&&Object.getOwnPropertyDescriptor;for(var p in n)if("default"!==p&&Object.prototype.hasOwnProperty.call(n,p)){var c=l?Object.getOwnPropertyDescriptor(n,p):null;c&&(c.get||c.set)?Object.defineProperty(u,p,c):u[p]=n[p]}u.default=n,f&&f.set(n,u);return u})(r(d[0])).get('JSCSamplingProfiler');e.default=n},526,[428]); +__d(function(g,r,i,a,m,e,d){'use strict';var o={log:'log',info:'info',warn:'warn',error:'error',fatal:'error'},n=null,l={logIfNoNativeHook:function(o){for(var t=arguments.length,f=new Array(t>1?t-1:0),c=1;c1?f-1:0),v=1;v1?t-1:0),f=1;f>>8)>>>0,t|=0)}}},546,[547,549]); +__d(function(g,r,i,a,m,e,d){var t=r(d[0])(r(d[1]));m.exports=function(n){if('object'==typeof n&&null!=n&&null!=(0,r(d[2]).normalizeColorObject)(n))return n;if('string'==typeof n||'number'==typeof n)return(0,t.default)(n)}},547,[407,548,549]); +__d(function(_g,_r,i,a,m,e,d){'use strict';function r(r,l,n){return n<0&&(n+=1),n>1&&(n-=1),n<.16666666666666666?r+6*(l-r)*n:n<.5?l:n<.6666666666666666?r+(l-r)*(.6666666666666666-n)*6:r}function l(l,n,t){var o=t<.5?t*(1+n):t+n-t*n,u=2*t-o,g=r(u,o,l+.3333333333333333),s=r(u,o,l),h=r(u,o,l-.3333333333333333);return Math.round(255*g)<<24|Math.round(255*s)<<16|Math.round(255*h)<<8}var n,t='[-+]?\\d*\\.?\\d+',o="[-+]?\\d*\\.?\\d+%";function u(){for(var r=arguments.length,l=new Array(r),n=0;n255?255:l}function s(r){return(parseFloat(r)%360+360)%360/360}function h(r){var l=parseFloat(r);return l<0?0:l>1?255:Math.round(255*l)}function c(r){var l=parseFloat(r);return l<0?0:l>100?1:l/100}var p={transparent:0,aliceblue:4042850303,antiquewhite:4209760255,aqua:16777215,aquamarine:2147472639,azure:4043309055,beige:4126530815,bisque:4293182719,black:255,blanchedalmond:4293643775,blue:65535,blueviolet:2318131967,brown:2771004159,burlywood:3736635391,burntsienna:3934150143,cadetblue:1604231423,chartreuse:2147418367,chocolate:3530104575,coral:4286533887,cornflowerblue:1687547391,cornsilk:4294499583,crimson:3692313855,cyan:16777215,darkblue:35839,darkcyan:9145343,darkgoldenrod:3095792639,darkgray:2846468607,darkgreen:6553855,darkgrey:2846468607,darkkhaki:3182914559,darkmagenta:2332068863,darkolivegreen:1433087999,darkorange:4287365375,darkorchid:2570243327,darkred:2332033279,darksalmon:3918953215,darkseagreen:2411499519,darkslateblue:1211993087,darkslategray:793726975,darkslategrey:793726975,darkturquoise:13554175,darkviolet:2483082239,deeppink:4279538687,deepskyblue:12582911,dimgray:1768516095,dimgrey:1768516095,dodgerblue:512819199,firebrick:2988581631,floralwhite:4294635775,forestgreen:579543807,fuchsia:4278255615,gainsboro:3705462015,ghostwhite:4177068031,gold:4292280575,goldenrod:3668254975,gray:2155905279,green:8388863,greenyellow:2919182335,grey:2155905279,honeydew:4043305215,hotpink:4285117695,indianred:3445382399,indigo:1258324735,ivory:4294963455,khaki:4041641215,lavender:3873897215,lavenderblush:4293981695,lawngreen:2096890111,lemonchiffon:4294626815,lightblue:2916673279,lightcoral:4034953471,lightcyan:3774873599,lightgoldenrodyellow:4210742015,lightgray:3553874943,lightgreen:2431553791,lightgrey:3553874943,lightpink:4290167295,lightsalmon:4288707327,lightseagreen:548580095,lightskyblue:2278488831,lightslategray:2005441023,lightslategrey:2005441023,lightsteelblue:2965692159,lightyellow:4294959359,lime:16711935,limegreen:852308735,linen:4210091775,magenta:4278255615,maroon:2147483903,mediumaquamarine:1724754687,mediumblue:52735,mediumorchid:3126187007,mediumpurple:2473647103,mediumseagreen:1018393087,mediumslateblue:2070474495,mediumspringgreen:16423679,mediumturquoise:1221709055,mediumvioletred:3340076543,midnightblue:421097727,mintcream:4127193855,mistyrose:4293190143,moccasin:4293178879,navajowhite:4292783615,navy:33023,oldlace:4260751103,olive:2155872511,olivedrab:1804477439,orange:4289003775,orangered:4282712319,orchid:3664828159,palegoldenrod:4008225535,palegreen:2566625535,paleturquoise:2951671551,palevioletred:3681588223,papayawhip:4293907967,peachpuff:4292524543,peru:3448061951,pink:4290825215,plum:3718307327,powderblue:2967529215,purple:2147516671,rebeccapurple:1714657791,red:4278190335,rosybrown:3163525119,royalblue:1097458175,saddlebrown:2336560127,salmon:4202722047,sandybrown:4104413439,seagreen:780883967,seashell:4294307583,sienna:2689740287,silver:3233857791,skyblue:2278484991,slateblue:1784335871,slategray:1887473919,slategrey:1887473919,snow:4294638335,springgreen:16744447,steelblue:1182971135,tan:3535047935,teal:8421631,thistle:3636451583,tomato:4284696575,turquoise:1088475391,violet:4001558271,wheat:4125012991,white:4294967295,whitesmoke:4126537215,yellow:4294902015,yellowgreen:2597139199};m.exports=function(r){var b,y=(void 0===n&&(n={rgb:new RegExp('rgb'+u(t,t,t)),rgba:new RegExp('rgba'+u(t,t,t,t)),hsl:new RegExp('hsl'+u(t,o,o)),hsla:new RegExp('hsla'+u(t,o,o,t)),hex3:/^#([0-9a-fA-F]{1})([0-9a-fA-F]{1})([0-9a-fA-F]{1})$/,hex4:/^#([0-9a-fA-F]{1})([0-9a-fA-F]{1})([0-9a-fA-F]{1})([0-9a-fA-F]{1})$/,hex6:/^#([0-9a-fA-F]{6})$/,hex8:/^#([0-9a-fA-F]{8})$/}),n);return'number'==typeof r?r>>>0===r&&r>=0&&r<=4294967295?r:null:'string'!=typeof r?null:(b=y.hex6.exec(r))?parseInt(b[1]+'ff',16)>>>0:p.hasOwnProperty(r)?p[r]:(b=y.rgb.exec(r))?(g(b[1])<<24|g(b[2])<<16|g(b[3])<<8|255)>>>0:(b=y.rgba.exec(r))?(g(b[1])<<24|g(b[2])<<16|g(b[3])<<8|h(b[4]))>>>0:(b=y.hex3.exec(r))?parseInt(b[1]+b[1]+b[2]+b[2]+b[3]+b[3]+'ff',16)>>>0:(b=y.hex8.exec(r))?parseInt(b[1],16)>>>0:(b=y.hex4.exec(r))?parseInt(b[1]+b[1]+b[2]+b[2]+b[3]+b[3]+b[4]+b[4],16)>>>0:(b=y.hsl.exec(r))?(255|l(s(b[1]),c(b[2]),c(b[3])))>>>0:(b=y.hsla.exec(r))?(l(s(b[1]),c(b[2]),c(b[3]))|h(b[4]))>>>0:null}},548,[]); +__d(function(g,r,i,a,m,e,d){Object.defineProperty(e,"__esModule",{value:!0}),e.processColorObject=e.normalizeColorObject=e.PlatformColor=void 0;e.PlatformColor=function(){for(var o=arguments.length,n=new Array(o),t=0;t.49999*l?[0,2*Math.atan2(u,m)*C,90]:M<-.49999*l?[0,-2*Math.atan2(u,m)*C,-90]:[t.roundTo3Places(Math.atan2(2*u*m-2*s*c,1-2*v-2*h)*C),t.roundTo3Places(Math.atan2(2*s*m-2*u*c,1-2*f-2*h)*C),t.roundTo3Places(Math.asin(2*u*s+2*c*m)*C)]},roundTo3Places:function(t){var n=t.toString().split('e');return.001*Math.round(n[0]+'e'+(n[1]?+n[1]-3:3))},decomposeMatrix:function(n){r(d[1])(16===n.length,'Matrix decomposition needs a list of 3d matrix values, received %s',n);var a=[],o=[],i=[],u=[],s=[];if(n[15]){for(var c=[],m=[],v=0;v<4;v++){c.push([]);for(var f=0;f<4;f++){var h=n[4*v+f]/n[15];c[v].push(h),m.push(3===f?0:h)}}if(m[15]=1,t.determinant(m)){if(0!==c[0][3]||0!==c[1][3]||0!==c[2][3]){var M=[c[0][3],c[1][3],c[2][3],c[3][3]],l=t.inverse(m),C=t.transpose(l);a=t.multiplyVectorByMatrix(M,C)}else a[0]=a[1]=a[2]=0,a[3]=1;for(var p=0;p<3;p++)s[p]=c[3][p];for(var x=[],T=0;T<3;T++)x[T]=[c[T][0],c[T][1],c[T][2]];i[0]=t.v3Length(x[0]),x[0]=t.v3Normalize(x[0],i[0]),u[0]=t.v3Dot(x[0],x[1]),x[1]=t.v3Combine(x[1],x[0],1,-u[0]),i[1]=t.v3Length(x[1]),x[1]=t.v3Normalize(x[1],i[1]),u[0]/=i[1],u[1]=t.v3Dot(x[0],x[2]),x[2]=t.v3Combine(x[2],x[0],1,-u[1]),u[2]=t.v3Dot(x[1],x[2]),x[2]=t.v3Combine(x[2],x[1],1,-u[2]),i[2]=t.v3Length(x[2]),x[2]=t.v3Normalize(x[2],i[2]),u[1]/=i[2],u[2]/=i[2];var y,S=t.v3Cross(x[1],x[2]);if(t.v3Dot(x[0],S)<0)for(var P=0;P<3;P++)i[P]*=-1,x[P][0]*=-1,x[P][1]*=-1,x[P][2]*=-1;return o[0]=.5*Math.sqrt(Math.max(1+x[0][0]-x[1][1]-x[2][2],0)),o[1]=.5*Math.sqrt(Math.max(1-x[0][0]+x[1][1]-x[2][2],0)),o[2]=.5*Math.sqrt(Math.max(1-x[0][0]-x[1][1]+x[2][2],0)),o[3]=.5*Math.sqrt(Math.max(1+x[0][0]+x[1][1]+x[2][2],0)),x[2][1]>x[1][2]&&(o[0]=-o[0]),x[0][2]>x[2][0]&&(o[1]=-o[1]),x[1][0]>x[0][1]&&(o[2]=-o[2]),{rotationDegrees:y=o[0]<.001&&o[0]>=0&&o[1]<.001&&o[1]>=0?[0,0,t.roundTo3Places(180*Math.atan2(x[0][1],x[0][0])/Math.PI)]:t.quaternionToDegreesXYZ(o,c,x),perspective:a,quaternion:o,scale:i,skew:u,translation:s,rotate:y[2],rotateX:y[0],rotateY:y[1],scaleX:i[0],scaleY:i[1],translateX:s[0],translateY:s[1]}}}}};_m.exports=t},554,[430,425]); +__d(function(g,r,i,a,m,e,d){Object.defineProperty(e,"__esModule",{value:!0}),e.get=p,e.getWithFallback_DEPRECATED=function(n,o){if(null==t){if(v(n))return p(n,o)}else if(null!=t(n))return p(n,o);var u=function(t){return null};return u.displayName="Fallback("+n+")",u},e.setRuntimeConfigProvider=function(n){(0,f.default)(null==t,'NativeComponentRegistry.setRuntimeConfigProvider() called more than once.'),t=n},e.unstable_hasComponent=function(t){var n=s.get(t);if(null==n){if(!g.__nativeComponentRegistry__hasComponent)throw"unstable_hasComponent('"+t+"'): Global function is not registered";n=g.__nativeComponentRegistry__hasComponent(t),s.set(t,n)}return n},e.unstable_hasStaticViewConfig=function(n){var o;return!(null!=(o=null==t?void 0:t(n))?o:{native:!0}).native};var t,n=r(d[0])(r(d[1])),o=r(d[0])(r(d[2])),u=r(d[0])(r(d[3])),l=r(d[0])(r(d[4])),f=r(d[0])(r(d[5]));!(function(t,n){if(!n&&t&&t.__esModule)return t;if(null===t||"object"!=typeof t&&"function"!=typeof t)return{default:t};var o=c(n);if(o&&o.has(t))return o.get(t);var u={},l=Object.defineProperty&&Object.getOwnPropertyDescriptor;for(var f in t)if("default"!==f&&Object.prototype.hasOwnProperty.call(t,f)){var s=l?Object.getOwnPropertyDescriptor(t,f):null;s&&(s.get||s.set)?Object.defineProperty(u,f,s):u[f]=t[f]}u.default=t,o&&o.set(t,u)})(r(d[6]));function c(t){if("function"!=typeof WeakMap)return null;var n=new WeakMap,o=new WeakMap;return(c=function(t){return t?o:n})(t)}var s=new Map;function p(n,f){return o.default.register(n,function(){var o,c=null!=(o=null==t?void 0:t(n))?o:{native:!0,verify:!1},s=c.native,p=c.verify,v=s?(0,u.default)(n):(0,r(d[7]).createViewConfig)(f());return p&&(s?(0,l.default)(v,(0,r(d[7]).createViewConfig)(f())):(0,l.default)((0,u.default)(n),v)),v}),n}function v(o){return(0,f.default)(null==t,'Unexpected invocation!'),null!=n.default.getViewManagerConfig(o)}},555,[407,450,539,556,569,425,534,570]); +__d(function(g,r,i,a,m,e,d){'use strict';var n=!1;function t(n){var t=r(d[0]).getConstants();t.ViewManagerNames||t.LazyViewManagersEnabled?n=s(n,r(d[0]).getDefaultEventTypes()):(n.bubblingEventTypes=s(n.bubblingEventTypes,t.genericBubblingEventTypes),n.directEventTypes=s(n.directEventTypes,t.genericDirectEventTypes))}function s(n,t){if(!t)return n;if(!n)return t;for(var o in t)if(t.hasOwnProperty(o)){var u=t[o];if(n.hasOwnProperty(o)){var c=n[o];'object'==typeof u&&'object'==typeof c&&(u=s(c,u))}n[o]=u}return n}function o(n){switch(n){case'CATransform3D':return r(d[4]);case'CGPoint':return r(d[5]);case'CGSize':return r(d[6]);case'UIEdgeInsets':return r(d[7]);case'Point':return r(d[5])}return null}function u(n){switch(n){case'CGColor':case'UIColor':return r(d[8]);case'CGColorArray':case'UIColorArray':return r(d[9]);case'CGImage':case'UIImage':case'RCTImageSource':return r(d[10]);case'Color':return r(d[8]);case'ColorArray':return r(d[9])}return null}m.exports=function(s){var c=r(d[0]).getViewManagerConfig(s);r(d[1])(null!=c&&null!=c.NativeProps,'requireNativeComponent: "%s" was not found in the UIManager.',s);for(var l=c.baseModuleName,v=c.bubblingEventTypes,b=c.directEventTypes,p=c.NativeProps;l;){var f=r(d[0]).getViewManagerConfig(l);f?(v=r(d[2])({},f.bubblingEventTypes,v),b=r(d[2])({},f.directEventTypes,b),p=r(d[2])({},f.NativeProps,p),l=f.baseModuleName):l=null}var y={};for(var C in p){var E=p[C],T=o(E),w=u(E);y[C]=null==T&&null==w||{diff:T,process:w}}return y.style=r(d[3]),r(d[2])(c,{uiViewClassName:s,validAttributes:y,bubblingEventTypes:v,directEventTypes:b}),n||(t(c),n=!0),c}},556,[450,425,436,557,552,558,551,550,546,559,560]); +__d(function(g,r,i,a,m,e,d){var t=r(d[0])(r(d[1])),o=r(d[0])(r(d[2])),n=r(d[0])(r(d[3])),l={process:t.default},s={alignContent:!0,alignItems:!0,alignSelf:!0,aspectRatio:!0,borderBottomWidth:!0,borderEndWidth:!0,borderLeftWidth:!0,borderRightWidth:!0,borderStartWidth:!0,borderTopWidth:!0,borderWidth:!0,bottom:!0,direction:!0,display:!0,end:!0,flex:!0,flexBasis:!0,flexDirection:!0,flexGrow:!0,flexShrink:!0,flexWrap:!0,height:!0,justifyContent:!0,left:!0,margin:!0,marginBottom:!0,marginEnd:!0,marginHorizontal:!0,marginLeft:!0,marginRight:!0,marginStart:!0,marginTop:!0,marginVertical:!0,maxHeight:!0,maxWidth:!0,minHeight:!0,minWidth:!0,overflow:!0,padding:!0,paddingBottom:!0,paddingEnd:!0,paddingHorizontal:!0,paddingLeft:!0,paddingRight:!0,paddingStart:!0,paddingTop:!0,paddingVertical:!0,position:!0,right:!0,start:!0,top:!0,width:!0,zIndex:!0,elevation:!0,shadowColor:l,shadowOffset:{diff:n.default},shadowOpacity:!0,shadowRadius:!0,decomposedMatrix:!0,rotation:!0,scaleX:!0,scaleY:!0,transform:{process:o.default},transformMatrix:!0,translateX:!0,translateY:!0,backfaceVisibility:!0,backgroundColor:l,borderBottomColor:l,borderBottomEndRadius:!0,borderBottomLeftRadius:!0,borderBottomRightRadius:!0,borderBottomStartRadius:!0,borderColor:l,borderEndColor:l,borderLeftColor:l,borderRadius:!0,borderRightColor:l,borderStartColor:l,borderStyle:!0,borderTopColor:l,borderTopEndRadius:!0,borderTopLeftRadius:!0,borderTopRightRadius:!0,borderTopStartRadius:!0,opacity:!0,color:l,fontFamily:!0,fontSize:!0,fontStyle:!0,fontVariant:!0,fontWeight:!0,includeFontPadding:!0,letterSpacing:!0,lineHeight:!0,textAlign:!0,textAlignVertical:!0,textDecorationColor:l,textDecorationLine:!0,textDecorationStyle:!0,textShadowColor:l,textShadowOffset:!0,textShadowRadius:!0,textTransform:!0,writingDirection:!0,overlayColor:l,resizeMode:!0,tintColor:l};m.exports=s},557,[407,546,553,551]); +__d(function(g,r,i,a,m,e,d){'use strict';var t={x:void 0,y:void 0};m.exports=function(n,o){return(n=n||t)!==(o=o||t)&&(n.x!==o.x||n.y!==o.y)}},558,[]); +__d(function(g,r,i,a,m,e,d){'use strict';var n=r(d[0])(r(d[1])),l=0;function u(u){var o=(0,n.default)(u);return null==o?(console.error('Invalid value in color array:',u),l):o}m.exports=function(n){return null==n?null:n.map(u)}},559,[407,546]); +__d(function(g,r,i,a,m,e,d){'use strict';var t,n,s,u;function o(){if(u)return u;var t=g.nativeExtensions&&g.nativeExtensions.SourceCode;return t||(t=r(d[0]).default),u=t.getConstants().scriptURL}function f(){if(void 0===n){var t=o(),s=t&&t.match(/^https?:\/\/.*?\//);n=s?s[0]:null}return n}function c(t){if(t){if(t.startsWith('assets://'))return null;(t=t.substring(0,t.lastIndexOf('/')+1)).includes('://')||(t='file://'+t)}return t}m.exports=function(n){if('object'==typeof n)return n;var u=r(d[1]).getAssetByID(n);if(!u)return null;var l=new(r(d[2]))(f(),(void 0===s&&(s=c(o())),s),u);return t?t(l):l.defaultAsset()},m.exports.pickScale=r(d[3]).pickScale,m.exports.setCustomSourceTransformer=function(n){t=n}},560,[561,562,563,564]); +__d(function(g,r,i,a,m,e,d){function t(n){if("function"!=typeof WeakMap)return null;var o=new WeakMap,u=new WeakMap;return(t=function(t){return t?u:o})(n)}Object.defineProperty(e,"__esModule",{value:!0}),e.default=void 0;var n=(function(n,o){if(!o&&n&&n.__esModule)return n;if(null===n||"object"!=typeof n&&"function"!=typeof n)return{default:n};var u=t(o);if(u&&u.has(n))return u.get(n);var f={},l=Object.defineProperty&&Object.getOwnPropertyDescriptor;for(var c in n)if("default"!==c&&Object.prototype.hasOwnProperty.call(n,c)){var p=l?Object.getOwnPropertyDescriptor(n,c):null;p&&(p.get||p.set)?Object.defineProperty(f,c,p):f[c]=n[c]}f.default=n,u&&u.set(n,f);return f})(r(d[0])).getEnforcing('SourceCode'),o=null,u={getConstants:function(){return null==o&&(o=n.getConstants()),o}};e.default=u},561,[428]); +__d(function(g,r,i,a,m,e,d){'use strict';var t=[];m.exports={registerAsset:function(s){return t.push(s)},getAssetByID:function(s){return t[s-1]}}},562,[]); +__d(function(g,r,i,a,m,e,d){'use strict';function t(t){var s=r(d[0]).pickScale(t.scales,r(d[1]).get()),n=1===s?'':'@'+s+'x';return r(d[2]).getBasePath(t)+'/'+t.name+n+'.'+t.type}var s=(function(){function s(t,n,u){r(d[3])(this,s),this.serverUrl=t,this.jsbundleUrl=n,this.asset=u}return r(d[4])(s,[{key:"isLoadedFromServer",value:function(){return!!this.serverUrl}},{key:"isLoadedFromFileSystem",value:function(){return!(!this.jsbundleUrl||!this.jsbundleUrl.startsWith('file://'))}},{key:"defaultAsset",value:function(){return this.isLoadedFromServer()?this.assetServerURL():this.isLoadedFromFileSystem()?this.drawableFolderInBundle():this.resourceIdentifierWithoutScale()}},{key:"assetServerURL",value:function(){return r(d[5])(!!this.serverUrl,'need server to load from'),this.fromSource(this.serverUrl+t(this.asset)+"?platform=android&hash="+this.asset.hash)}},{key:"scaledAssetPath",value:function(){return this.fromSource(t(this.asset))}},{key:"scaledAssetURLNearBundle",value:function(){var s=this.jsbundleUrl||'file://';return this.fromSource(s+t(this.asset).replace(/\.\.\//g,'_'))}},{key:"resourceIdentifierWithoutScale",value:function(){return r(d[5])(!0,'resource identifiers work on Android'),this.fromSource(r(d[2]).getAndroidResourceIdentifier(this.asset))}},{key:"drawableFolderInBundle",value:function(){var t,s,n=this.jsbundleUrl||'file://';return this.fromSource(n+(t=this.asset,s=r(d[0]).pickScale(t.scales,r(d[1]).get()),r(d[2]).getAndroidResourceFolderName(t,s)+'/'+r(d[2]).getAndroidResourceIdentifier(t)+'.'+t.type))}},{key:"fromSource",value:function(t){return{__packager_asset:!0,width:this.asset.width,height:this.asset.height,uri:t,scale:r(d[0]).pickScale(this.asset.scales,r(d[1]).get())}}}]),s})();s.pickScale=r(d[0]).pickScale,m.exports=s},563,[564,565,568,402,403,425]); +__d(function(g,r,_i,a,m,e,d){Object.defineProperty(e,"__esModule",{value:!0}),e.getUrlCacheBreaker=function(){if(null==n)return'';return n},e.pickScale=function(n,u){null==u&&(u=t.default.get());for(var l=0;l=u)return n[l];return n[n.length-1]||1},e.setUrlCacheBreaker=function(t){n=t};var n,t=r(d[0])(r(d[1]))},564,[407,565]); +__d(function(g,r,i,a,m,e,d){'use strict';var t=(function(){function t(){r(d[0])(this,t)}return r(d[1])(t,null,[{key:"get",value:function(){return r(d[2]).get('window').scale}},{key:"getFontScale",value:function(){return r(d[2]).get('window').fontScale||t.get()}},{key:"getPixelSizeForLayoutSize",value:function(n){return Math.round(n*t.get())}},{key:"roundToNearestPixel",value:function(n){var u=t.get();return Math.round(n*u)/u}},{key:"startDetecting",value:function(){}}]),t})();m.exports=t},565,[402,403,566]); +__d(function(g,r,i,a,m,e,d){var n,t=r(d[0])(r(d[1])),s=r(d[0])(r(d[2])),o=r(d[0])(r(d[3])),l=r(d[0])(r(d[4])),c=r(d[0])(r(d[5])),u=r(d[0])(r(d[6])),f=new o.default,v=!1,h=(function(){function o(){(0,t.default)(this,o)}return(0,s.default)(o,null,[{key:"get",value:function(t){return(0,u.default)(n[t],'No dimension set for key '+t),n[t]}},{key:"set",value:function(t){var s=t.screen,o=t.window,l=t.windowPhysicalPixels;l&&(o={width:l.width/l.scale,height:l.height/l.scale,scale:l.scale,fontScale:l.fontScale});var c=t.screenPhysicalPixels;c?s={width:c.width/c.scale,height:c.height/c.scale,scale:c.scale,fontScale:c.fontScale}:null==s&&(s=o),n={window:o,screen:s},v?f.emit('change',n):v=!0}},{key:"addEventListener",value:function(n,t){return(0,u.default)('change'===n,'Trying to subscribe to unknown event: "%s"',n),f.addListener(n,t)}},{key:"removeEventListener",value:function(n,t){(0,u.default)('change'===n,'Trying to remove listener for unknown event: "%s"',n),f.removeListener(n,t)}}]),o})(),w=g.nativeExtensions&&g.nativeExtensions.DeviceInfo&&g.nativeExtensions.DeviceInfo.Dimensions;w||(l.default.addListener('didUpdateDimensions',function(n){h.set(n)}),w=c.default.getConstants().Dimensions),h.set(w),m.exports=h},566,[407,402,403,414,413,567,425]); +__d(function(g,r,i,a,m,e,d){function t(n){if("function"!=typeof WeakMap)return null;var o=new WeakMap,u=new WeakMap;return(t=function(t){return t?u:o})(n)}Object.defineProperty(e,"__esModule",{value:!0}),e.default=void 0;var n=(function(n,o){if(!o&&n&&n.__esModule)return n;if(null===n||"object"!=typeof n&&"function"!=typeof n)return{default:n};var u=t(o);if(u&&u.has(n))return u.get(n);var f={},l=Object.defineProperty&&Object.getOwnPropertyDescriptor;for(var c in n)if("default"!==c&&Object.prototype.hasOwnProperty.call(n,c)){var p=l?Object.getOwnPropertyDescriptor(n,c):null;p&&(p.get||p.set)?Object.defineProperty(f,c,p):f[c]=n[c]}f.default=n,u&&u.set(n,f);return f})(r(d[0])).getEnforcing('DeviceInfo'),o=null,u={getConstants:function(){return null==o&&(o=n.getConstants()),o}};e.default=u},567,[428]); +__d(function(g,r,i,a,m,e,d){'use strict';var t={.75:'ldpi',1:'mdpi',1.5:'hdpi',2:'xhdpi',3:'xxhdpi',4:'xxxhdpi'};function n(n){if(n.toString()in t)return t[n.toString()];throw new Error('no such scale '+n.toString())}var o=new Set(['gif','jpeg','jpg','png','svg','webp','xml']);function s(t){var n=t.httpServerLocation;return n.startsWith('/')?n.substr(1):n}m.exports={getAndroidResourceFolderName:function(s,u){if(!o.has(s.type))return'raw';var c=n(u);if(!c)throw new Error("Don't know which android drawable suffix to use for scale: "+u+'\nAsset: '+JSON.stringify(s,null,'\t')+'\nPossible scales are:'+JSON.stringify(t,null,'\t'));return'drawable-'+c},getAndroidResourceIdentifier:function(t){return(s(t)+'/'+t.name).toLowerCase().replace(/\//g,'_').replace(/([^a-z0-9_])/g,'').replace(/^assets_/,'')},getBasePath:s}},568,[]); +__d(function(g,r,i,a,m,e,d){Object.defineProperty(e,"__esModule",{value:!0}),e.default=function(t,n){for(var o=0,u=['validAttributes','bubblingEventTypes','directEventTypes'];o0){var l,v=null!=(l=n.uiViewClassName)?l:t.uiViewClassName;console.error("'"+v+"' has a view config that does not match native. '"+s+"' is missing: "+c.join(', '))}}},e.getConfigWithoutViewProps=function(n,f){if(!n[f])return{};return Object.keys(n[f]).filter(function(n){return!t.default[f][n]}).reduce(function(t,o){return t[o]=n[f][o],t},{})},e.lefthandObjectDiff=f,e.stringifyViewConfig=function(t){return JSON.stringify(t,function(t,n){return'function'==typeof n?"\u0192 "+n.name:n},2)};var t=r(d[0])(r(d[1])),n=['transform','hitSlop'];function f(t,o){var u={};function s(t,n,o){if(typeof t==typeof n||null==t)if('object'!=typeof t)t===n||(u[o]=n);else{var s=f(t,n);Object.keys(s).length>1&&(u[o]=s)}else u[o]=n}for(var c in t)n.includes(c)||(o?t.hasOwnProperty(c)&&s(t[c],o[c],c):u[c]={});return u}},569,[407,544]); +__d(function(g,r,i,a,m,e,d){Object.defineProperty(e,"__esModule",{value:!0}),e.createViewConfig=function(t){return{uiViewClassName:t.uiViewClassName,Commands:{},bubblingEventTypes:u(n.default.bubblingEventTypes,t.bubblingEventTypes),directEventTypes:u(n.default.directEventTypes,t.directEventTypes),validAttributes:u(n.default.validAttributes,t.validAttributes)}};var t=r(d[0])(r(d[1])),n=r(d[0])(r(d[2]));function u(n,u){var l;return null==n||null==u?null!=(l=null!=n?n:u)?l:{}:(0,t.default)({},n,u)}},570,[407,436,544]); +__d(function(g,r,i,a,m,e,d){'use strict';var n;m.exports=function t(o,u){var f=arguments.length>2&&void 0!==arguments[2]?arguments[2]:-1,s=arguments.length>3?arguments[3]:void 0,c='number'==typeof f?s:f,l='number'==typeof f?f:-1;if(0===l)return!0;if(o===u)return!1;if('function'==typeof o&&'function'==typeof u){var v=null==c?void 0:c.unsafelyIgnoreFunctions;return null==v&&(!n||!n.onDifferentFunctionsIgnored||c&&'unsafelyIgnoreFunctions'in c||n.onDifferentFunctionsIgnored(o.name,u.name),v=!0),!v}if('object'!=typeof o||null===o)return o!==u;if('object'!=typeof u||null===u)return!0;if(o.constructor!==u.constructor)return!0;if(Array.isArray(o)){var y=o.length;if(u.length!==y)return!0;for(var p=0;p=w},r=function(){},_e.unstable_forceFrameRate=function(e){0>e||125>>1,a=e[r];if(!(void 0!==a&&0x(l,t))void 0!==s&&0>x(s,l)?(e[r]=s,e[u]=t,r=u):(e[r]=l,e[o]=t,r=o);else{if(!(void 0!==s&&0>x(s,t)))break e;e[r]=s,e[u]=t,r=u}}}return n}return null}function x(e,n){var t=e.sortIndex-n.sortIndex;return 0!==t?t:e.id-n.id}var P=[],F=[],I=1,M=null,C=3,j=!1,A=!1,L=!1;function q(e){for(var n=T(F);null!==n;){if(null===n.callback)g(F);else{if(!(n.startTime<=e))break;g(F),n.sortIndex=n.expirationTime,k(P,n)}n=T(F)}}function R(t){if(L=!1,q(t),!A)if(null!==T(P))A=!0,e(Y);else{var r=T(F);null!==r&&n(R,r.startTime-t)}}function Y(e,r){A=!1,L&&(L=!1,t()),j=!0;var a=C;try{for(q(r),M=T(P);null!==M&&(!(M.expirationTime>r)||e&&!_e.unstable_shouldYield());){var o=M.callback;if("function"==typeof o){M.callback=null,C=M.priorityLevel;var l=o(M.expirationTime<=r);r=_e.unstable_now(),"function"==typeof l?M.callback=l:M===T(P)&&g(P),q(r)}else g(P);M=T(P)}if(null!==M)var u=!0;else{var s=T(F);null!==s&&n(R,s.startTime-r),u=!1}return u}finally{M=null,C=a,j=!1}}var E=r;_e.unstable_IdlePriority=5,_e.unstable_ImmediatePriority=1,_e.unstable_LowPriority=4,_e.unstable_NormalPriority=3,_e.unstable_Profiling=null,_e.unstable_UserBlockingPriority=2,_e.unstable_cancelCallback=function(e){e.callback=null},_e.unstable_continueExecution=function(){A||j||(A=!0,e(Y))},_e.unstable_getCurrentPriorityLevel=function(){return C},_e.unstable_getFirstCallbackNode=function(){return T(P)},_e.unstable_next=function(e){switch(C){case 1:case 2:case 3:var n=3;break;default:n=C}var t=C;C=n;try{return e()}finally{C=t}},_e.unstable_pauseExecution=function(){},_e.unstable_requestPaint=E,_e.unstable_runWithPriority=function(e,n){switch(e){case 1:case 2:case 3:case 4:case 5:break;default:e=3}var t=C;C=e;try{return n()}finally{C=t}},_e.unstable_scheduleCallback=function(r,a,o){var l=_e.unstable_now();switch("object"==typeof o&&null!==o?o="number"==typeof(o=o.delay)&&0l?(r.sortIndex=o,k(F,r),null===T(P)&&r===T(F)&&(L?t():L=!0,n(R,o-l))):(r.sortIndex=u,k(P,r),A||j||(A=!0,e(Y))),r},_e.unstable_wrapCallback=function(e){var n=C;return function(){var t=C;C=n;try{return e.apply(this,arguments)}finally{C=t}}}},576,[]); +__d(function(g,r,i,a,m,e,d){'use strict';var t=r(d[0])(r(d[1])),n=r(d[0])(r(d[2])),o=(function(t,n){if(!n&&t&&t.__esModule)return t;if(null===t||"object"!=typeof t&&"function"!=typeof t)return{default:t};var o=c(n);if(o&&o.has(t))return o.get(t);var l={},u=Object.defineProperty&&Object.getOwnPropertyDescriptor;for(var f in t)if("default"!==f&&Object.prototype.hasOwnProperty.call(t,f)){var s=u?Object.getOwnPropertyDescriptor(t,f):null;s&&(s.get||s.set)?Object.defineProperty(l,f,s):l[f]=t[f]}l.default=t,o&&o.set(t,l);return l})(r(d[3])),l=r(d[0])(r(d[4])),u=r(d[0])(r(d[5])),f=r(d[0])(r(d[6])),s=["animating","color","hidesWhenStopped","onLayout","size","style"];function c(t){if("function"!=typeof WeakMap)return null;var n=new WeakMap,o=new WeakMap;return(c=function(t){return t?o:n})(t)}var p='android'===l.default.OS?r(d[7]):r(d[8]).default,y=o.forwardRef(function(c,y){var v,O,w=c.animating,b=void 0===w||w,S=c.color,j=void 0===S?'ios'===l.default.OS?"#999999":null:S,z=c.hidesWhenStopped,W=void 0===z||z,k=c.onLayout,L=c.size,P=void 0===L?'small':L,M=c.style,_=(0,n.default)(c,s);switch(P){case'small':v=h.sizeSmall,O='small';break;case'large':v=h.sizeLarge,O='large';break;default:v={height:P,width:P}}var E=(0,t.default)({animating:b,color:j,hidesWhenStopped:W},_,{ref:y,style:v,size:O});return o.createElement(f.default,{onLayout:k,style:u.default.compose(h.container,M)},'android'===l.default.OS?o.createElement(p,(0,t.default)({},E,{styleAttr:'Normal',indeterminate:!0})):o.createElement(p,E))});y.displayName='ActivityIndicator';var h=u.default.create({container:{alignItems:'center',justifyContent:'center'},sizeSmall:{width:20,height:20},sizeLarge:{width:36,height:36}});m.exports=y},577,[407,436,508,534,426,578,579,582,587]); +__d(function(g,r,i,a,m,e,d){'use strict';var t=r(d[0]).roundToNearestPixel(.4);0===t&&(t=1/r(d[0]).get());var o={position:'absolute',left:0,right:0,top:0,bottom:0};m.exports={hairlineWidth:t,absoluteFill:o,absoluteFillObject:o,compose:function(t,o){return null!=t&&null!=o?[t,o]:null!=t?t:o},flatten:r(d[1]),setStyleAttributePreprocessor:function(t,o){var l;if(!0===r(d[2])[t])l={process:o};else{if('object'!=typeof r(d[2])[t])return void console.error(t+" is not a valid style attribute");l=r(d[3])({},r(d[2])[t],{process:o})}r(d[2])[t]=l},create:function(t){return t}}},578,[565,573,557,436]); +__d(function(g,r,i,a,m,e,d){var t=r(d[0])(r(d[1])),n=r(d[0])(r(d[2])),f=r(d[0])(r(d[3])),u=(function(t,n){if(!n&&t&&t.__esModule)return t;if(null===t||"object"!=typeof t&&"function"!=typeof t)return{default:t};var f=o(n);if(f&&f.has(t))return f.get(t);var u={},l=Object.defineProperty&&Object.getOwnPropertyDescriptor;for(var c in t)if("default"!==c&&Object.prototype.hasOwnProperty.call(t,c)){var p=l?Object.getOwnPropertyDescriptor(t,c):null;p&&(p.get||p.set)?Object.defineProperty(u,c,p):u[c]=t[c]}u.default=t,f&&f.set(t,u);return u})(r(d[4]));function o(t){if("function"!=typeof WeakMap)return null;var n=new WeakMap,f=new WeakMap;return(o=function(t){return t?f:n})(t)}var l=u.forwardRef(function(o,l){return u.createElement(f.default.Provider,{value:!1},u.createElement(n.default,(0,t.default)({},o,{ref:l})))});l.displayName='View',m.exports=l},579,[407,436,580,581,534]); +__d(function(g,r,i,a,m,e,d){Object.defineProperty(e,"__esModule",{value:!0}),e.default=e.Commands=void 0;var t=l(r(d[0])),n=r(d[1])(r(d[2])),o=r(d[1])(r(d[3])),u=r(d[1])(r(d[4]));l(r(d[5]));function f(t){if("function"!=typeof WeakMap)return null;var n=new WeakMap,o=new WeakMap;return(f=function(t){return t?o:n})(t)}function l(t,n){if(!n&&t&&t.__esModule)return t;if(null===t||"object"!=typeof t&&"function"!=typeof t)return{default:t};var o=f(n);if(o&&o.has(t))return o.get(t);var u={},l=Object.defineProperty&&Object.getOwnPropertyDescriptor;for(var p in t)if("default"!==p&&Object.prototype.hasOwnProperty.call(t,p)){var s=l?Object.getOwnPropertyDescriptor(t,p):null;s&&(s.get||s.set)?Object.defineProperty(u,p,s):u[p]=t[p]}return u.default=t,o&&o.set(t,u),u}var p=t.get('RCTView',function(){return'android'===n.default.OS?u.default:{uiViewClassName:'RCTView'}}),s=(0,o.default)({supportedCommands:['hotspotUpdate','setPressed']});e.Commands=s;var c=p;e.default=c},580,[555,407,426,542,545,534]); +__d(function(g,r,i,a,m,e,d){'use strict';var t=r(d[0]).createContext(!1);m.exports=t},581,[534]); +__d(function(g,r,i,a,m,e,d){var t=r(d[0])(r(d[1])),n=r(d[0])(r(d[2])),f=r(d[0])(r(d[3])),l=["styleAttr","indeterminate","animating"],o=r(d[4]),u=o.forwardRef(function(u,v){var s=u.styleAttr,c=void 0===s?'Normal':s,y=u.indeterminate,A=void 0===y||y,_=u.animating,p=void 0===_||_,w=(0,n.default)(u,l);return o.createElement(f.default,(0,t.default)({styleAttr:c,indeterminate:A,animating:p},w,{ref:v}))});m.exports=u},582,[407,436,508,583,534]); +__d(function(g,r,i,a,m,e,d){Object.defineProperty(e,"__esModule",{value:!0}),e.default=void 0;var t=(0,r(d[0])(r(d[1])).default)('AndroidProgressBar',{interfaceOnly:!0});e.default=t},583,[407,584]); +__d(function(g,r,i,a,m,e,d){Object.defineProperty(e,"__esModule",{value:!0}),e.default=void 0;var n=r(d[0])(r(d[1])),o=r(d[0])(r(d[2]));var t=function(t,p){var l=p&&null!=p.paperComponentName?p.paperComponentName:t;if(null!=p&&null!=p.paperComponentNameDeprecated)if(o.default.getViewManagerConfig(t))l=t;else{var u;if(null==p.paperComponentNameDeprecated||!o.default.getViewManagerConfig(p.paperComponentNameDeprecated))throw new Error("Failed to find native component for either "+t+" or "+(null!=(u=p.paperComponentNameDeprecated)?u:'(unknown)'));l=p.paperComponentNameDeprecated}return(0,n.default)(l)};e.default=t},584,[407,585,450]); +__d(function(g,r,i,a,m,e,d){'use strict';m.exports=function(n){return r(d[0])(n,function(){return r(d[1])(n)})}},585,[586,556]); +__d(function(g,r,i,a,m,e,d){'use strict';var t=r(d[0]).ReactNativeViewConfigRegistry.register;m.exports=function(n,s){return t(n,s)}},586,[537]); +__d(function(g,r,i,a,m,e,d){Object.defineProperty(e,"__esModule",{value:!0}),e.default=void 0;var t=(0,r(d[0])(r(d[1])).default)('ActivityIndicatorView',{paperComponentName:'RCTActivityIndicatorView'});e.default=t},587,[407,584]); +__d(function(g,r,i,a,m,_e,d){'use strict';var t=r(d[0])(r(d[1])),e=r(d[0])(r(d[2])),o=r(d[0])(r(d[3])),s=r(d[0])(r(d[4])),n=r(d[0])(r(d[5])),c=r(d[0])(r(d[6])),l=(function(t,e){if(!e&&t&&t.__esModule)return t;if(null===t||"object"!=typeof t&&"function"!=typeof t)return{default:t};var o=x(e);if(o&&o.has(t))return o.get(t);var s={},n=Object.defineProperty&&Object.getOwnPropertyDescriptor;for(var c in t)if("default"!==c&&Object.prototype.hasOwnProperty.call(t,c)){var l=n?Object.getOwnPropertyDescriptor(t,c):null;l&&(l.get||l.set)?Object.defineProperty(s,c,l):s[c]=t[c]}s.default=t,o&&o.set(t,s);return s})(r(d[7])),u=r(d[0])(r(d[8])),f=r(d[0])(r(d[9])),p=r(d[0])(r(d[10])),b=r(d[0])(r(d[11])),y=r(d[0])(r(d[12])),h=r(d[0])(r(d[13])),v=r(d[0])(r(d[14]));function x(t){if("function"!=typeof WeakMap)return null;var e=new WeakMap,o=new WeakMap;return(x=function(t){return t?o:e})(t)}function F(){if("undefined"==typeof Reflect||!Reflect.construct)return!1;if(Reflect.construct.sham)return!1;if("function"==typeof Proxy)return!0;try{return Boolean.prototype.valueOf.call(Reflect.construct(Boolean,[],function(){})),!0}catch(t){return!1}}var D=(function(f){(0,s.default)(w,f);var x,D,S=(x=w,D=F(),function(){var t,e=(0,c.default)(x);if(D){var o=(0,c.default)(this).constructor;t=Reflect.construct(e,arguments,o)}else t=e.apply(this,arguments);return(0,n.default)(this,t)});function w(){return(0,e.default)(this,w),S.apply(this,arguments)}return(0,o.default)(w,[{key:"render",value:function(){var e,o,s=this.props,n=s.accessibilityLabel,c=s.color,f=s.onPress,x=s.touchSoundDisabled,F=s.title,D=s.hasTVPreferredFocus,S=s.nextFocusDown,w=s.nextFocusForward,P=s.nextFocusLeft,R=s.nextFocusRight,A=s.nextFocusUp,k=s.testID,j=s.accessible,C=s.accessibilityActions,L=s.onAccessibilityAction,M=[O.button],W=[O.text];c&&('ios'===u.default.OS?W.push({color:c}):M.push({backgroundColor:c}));var _=null!=this.props.disabled?this.props.disabled:null==(e=this.props.accessibilityState)?void 0:e.disabled,B=_!==(null==(o=this.props.accessibilityState)?void 0:o.disabled)?(0,t.default)({},this.props.accessibilityState,{disabled:_}):this.props.accessibilityState;_&&(M.push(O.buttonDisabled),W.push(O.textDisabled)),(0,v.default)('string'==typeof F,'The title prop of a Button must be a string');var E='android'===u.default.OS?F.toUpperCase():F,T='android'===u.default.OS?b.default:y.default;return l.createElement(T,{accessible:j,accessibilityActions:C,onAccessibilityAction:L,accessibilityLabel:n,accessibilityRole:"button",accessibilityState:B,hasTVPreferredFocus:D,nextFocusDown:S,nextFocusForward:w,nextFocusLeft:P,nextFocusRight:R,nextFocusUp:A,testID:k,disabled:_,onPress:f,touchSoundDisabled:x},l.createElement(h.default,{style:M},l.createElement(p.default,{style:W,disabled:_},E)))}}]),w})(l.Component),O=f.default.create({button:u.default.select({ios:{},android:{elevation:4,backgroundColor:'#2196F3',borderRadius:2}}),text:(0,t.default)({textAlign:'center',margin:8},u.default.select({ios:{color:'#007AFF',fontSize:18},android:{color:'white',fontWeight:'500'}})),buttonDisabled:u.default.select({ios:{},android:{elevation:0,backgroundColor:'#dfdfdf'}}),textDisabled:u.default.select({ios:{color:'#cdcdcd'},android:{color:'#a1a1a1'}})});m.exports=D},588,[407,436,402,403,417,419,422,534,426,578,589,614,615,579,425]); +__d(function(g,r,i,a,m,e,d){var n=r(d[0])(r(d[1])),o=r(d[0])(r(d[2])),t=r(d[0])(r(d[3])),s=r(d[0])(r(d[4])),l=(v(r(d[5])),r(d[0])(r(d[6]))),u=(r(d[0])(r(d[7])),r(d[0])(r(d[8]))),p=r(d[0])(r(d[9])),f=v(r(d[10])),c=(r(d[0])(r(d[11])),["accessible","allowFontScaling","ellipsizeMode","onLongPress","onPress","onPressIn","onPressOut","onResponderGrant","onResponderMove","onResponderRelease","onResponderTerminate","onResponderTerminationRequest","onStartShouldSetResponder","pressRetentionOffset","suppressHighlighting"]);function R(n){if("function"!=typeof WeakMap)return null;var o=new WeakMap,t=new WeakMap;return(R=function(n){return n?t:o})(n)}function v(n,o){if(!o&&n&&n.__esModule)return n;if(null===n||"object"!=typeof n&&"function"!=typeof n)return{default:n};var t=R(o);if(t&&t.has(n))return t.get(n);var s={},l=Object.defineProperty&&Object.getOwnPropertyDescriptor;for(var u in n)if("default"!==u&&Object.prototype.hasOwnProperty.call(n,u)){var p=l?Object.getOwnPropertyDescriptor(n,u):null;p&&(p.get||p.set)?Object.defineProperty(s,u,p):s[u]=n[u]}return s.default=n,t&&t.set(n,s),s}var P=f.forwardRef(function(s,R){var v=s.accessible,P=s.allowFontScaling,b=s.ellipsizeMode,O=s.onLongPress,T=s.onPress,h=s.onPressIn,y=s.onPressOut,M=s.onResponderGrant,w=s.onResponderMove,E=s.onResponderRelease,C=s.onResponderTerminate,L=s.onResponderTerminationRequest,j=s.onStartShouldSetResponder,x=s.pressRetentionOffset,D=s.suppressHighlighting,_=(0,t.default)(s,c),q=(0,f.useState)(!1),G=(0,o.default)(q,2),H=G[0],k=G[1],z=(null!=T||null!=O||null!=j)&&!0!==_.disabled,F=S(z),I=(0,f.useMemo)(function(){return F?{disabled:!z,pressRectOffset:x,onLongPress:O,onPress:T,onPressIn:function(n){k(!D),null==h||h(n)},onPressOut:function(n){k(!1),null==y||y(n)},onResponderTerminationRequest_DEPRECATED:L,onStartShouldSetResponder_DEPRECATED:j}:null},[F,z,x,O,T,h,y,L,j,D]),N=(0,l.default)(I),W=(0,f.useMemo)(function(){return null==N?null:{onResponderGrant:function(n){N.onResponderGrant(n),null!=M&&M(n)},onResponderMove:function(n){N.onResponderMove(n),null!=w&&w(n)},onResponderRelease:function(n){N.onResponderRelease(n),null!=E&&E(n)},onResponderTerminate:function(n){N.onResponderTerminate(n),null!=C&&C(n)},onResponderTerminationRequest:N.onResponderTerminationRequest,onStartShouldSetResponder:N.onStartShouldSetResponder}},[N,M,w,E,C]),A=null==_.selectionColor?null:(0,u.default)(_.selectionColor),V=_.style,B=_.numberOfLines;return null==B||B>=0||(console.error("'numberOfLines' in must be a non-negative number, received: "+B+". The value will be set to 0."),B=0),(0,f.useContext)(p.default)?f.createElement(r(d[12]).NativeVirtualText,(0,n.default)({},_,W,{isHighlighted:H,numberOfLines:B,selectionColor:A,style:V,ref:R})):f.createElement(p.default.Provider,{value:!0},f.createElement(r(d[12]).NativeText,(0,n.default)({},_,W,{accessible:!1!==v,allowFontScaling:!1!==P,ellipsizeMode:null!=b?b:'tail',isHighlighted:H,numberOfLines:B,selectionColor:A,style:V,ref:R})))});function S(n){var t=(0,f.useState)(n),s=(0,o.default)(t,2),l=s[0],u=s[1];return!l&&n&&u(n),l}P.displayName='Text',P.propTypes=s.default,m.exports=P},589,[407,436,430,508,590,604,605,578,546,581,534,425,612]); +__d(function(g,r,i,a,m,e,d){'use strict';var o=r(d[0])(r(d[1]));m.exports={ellipsizeMode:r(d[2]).oneOf(['head','middle','tail','clip']),numberOfLines:r(d[2]).number,textBreakStrategy:r(d[2]).oneOf(['simple','highQuality','balanced']),onLayout:r(d[2]).func,onPress:r(d[2]).func,onLongPress:r(d[2]).func,pressRetentionOffset:r(d[3]),selectable:r(d[2]).bool,selectionColor:r(d[4]),suppressHighlighting:r(d[2]).bool,style:o,testID:r(d[2]).string,nativeID:r(d[2]).string,allowFontScaling:r(d[2]).bool,maxFontSizeMultiplier:r(d[2]).number,accessible:r(d[2]).bool,adjustsFontSizeToFit:r(d[2]).bool,minimumFontScale:r(d[2]).number,disabled:r(d[2]).bool,dataDetectorType:r(d[2]).oneOf(['phoneNumber','link','email','none','all'])}},590,[591,593,596,603,600]); +__d(function(g,r,i,a,m,e,d){'use strict';m.exports=function(n){var t=r(d[0])(n);return function(n,o,c,u){var f=n;n[o]&&((f={})[o]=r(d[1])(n[o]));for(var v=arguments.length,p=new Array(v>4?v-4:0),s=4;s5?v-5:0),j=5;j4?s-4:0),p=4;p3?f-3:0),p=3;p0?t._pressDelayTimeout=setTimeout(function(){t._receiveSignal('DELAY',E)},n):t._receiveSignal('DELAY',E);var R=I(t._config.delayLongPress,10,500-n);t._longPressDelayTimeout=setTimeout(function(){t._handleLongPress(E)},R+n)},onResponderMove:function(E){var n=t._config.onPressMove;null!=n&&n(E);var R=t._responderRegion;if(null!=R){var _=A(E);if(null==_)return t._cancelLongPressDelayTimeout(),void t._receiveSignal('LEAVE_PRESS_RECT',E);if(null!=t._touchActivatePosition){var o=t._touchActivatePosition.pageX-_.pageX,l=t._touchActivatePosition.pageY-_.pageY;Math.hypot(o,l)>10&&t._cancelLongPressDelayTimeout()}t._isTouchWithinResponderRegion(_,R)?t._receiveSignal('ENTER_PRESS_RECT',E):(t._cancelLongPressDelayTimeout(),t._receiveSignal('LEAVE_PRESS_RECT',E))}},onResponderRelease:function(E){t._receiveSignal('RESPONDER_RELEASE',E)},onResponderTerminate:function(E){t._receiveSignal('RESPONDER_TERMINATED',E)},onResponderTerminationRequest:function(){var E=t._config.cancelable;if(null==E){var n=t._config.onResponderTerminationRequest_DEPRECATED;return null==n||n()}return E},onClick:function(E){var n=t._config,R=n.onPress,_=n.disabled;null!=R&&!0!==_&&R(E)}},_='ios'===l.default.OS||'android'===l.default.OS?null:{onMouseEnter:function(E){if((0,r(d[10]).isHoverEnabled)()){t._isHovered=!0,t._cancelHoverOutDelayTimeout();var n=t._config.onHoverIn;if(null!=n){var R=I(t._config.delayHoverIn);R>0?(E.persist(),t._hoverInDelayTimeout=setTimeout(function(){n(E)},R)):n(E)}}},onMouseLeave:function(E){if(t._isHovered){t._isHovered=!1,t._cancelHoverInDelayTimeout();var n=t._config.onHoverOut;if(null!=n){var R=I(t._config.delayHoverOut);R>0?(E.persist(),t._hoverInDelayTimeout=setTimeout(function(){n(E)},R)):n(E)}}}};return(0,E.default)({},n,R,_)}},{key:"_receiveSignal",value:function(E,t){var n,_=this._touchState,l=null==(n=S[_])?void 0:n[E];null==this._responderID&&'RESPONDER_RELEASE'===E||((0,R.default)(null!=l&&'ERROR'!==l,'Pressability: Invalid signal `%s` for state `%s` on responder: %s',E,_,'number'==typeof this._responderID?this._responderID:'<>'),_!==l&&(null!=t.nativeEvent.timestamp&&o.default.emitEvent(function(){return{signal:E,touchDelayMs:Date.now()-t.nativeEvent.timestamp}}),this._performTransitionSideEffects(_,l,E,t),this._touchState=l))}},{key:"_performTransitionSideEffects",value:function(E,t,n,R){c(n)&&(this._touchActivatePosition=null,this._cancelLongPressDelayTimeout());var o='NOT_RESPONDER'===E&&'RESPONDER_INACTIVE_PRESS_IN'===t,u=!P(E)&&P(t);if((o||u)&&this._measureResponderRegion(),O(E)&&'LONG_PRESS_DETECTED'===n){var s=this._config.onLongPress;null!=s&&s(R)}var S=T(E),D=T(t);if(!S&&D?this._activate(R):S&&!D&&this._deactivate(R),O(E)&&'RESPONDER_RELEASE'===n){D||S||(this._activate(R),this._deactivate(R));var N=this._config,h=N.onLongPress,f=N.onPress,v=N.android_disableSound;if(null!=f)null!=h&&'RESPONDER_ACTIVE_LONG_PRESS_IN'===E&&this._shouldLongPressCancelPress()||('android'===l.default.OS&&!0!==v&&_.default.playTouchSound(),f(R))}this._cancelPressDelayTimeout()}},{key:"_activate",value:function(E){var t=this._config.onPressIn,n=A(E),R=n.pageX,_=n.pageY;this._touchActivatePosition={pageX:R,pageY:_},this._touchActivateTime=Date.now(),null!=t&&t(E)}},{key:"_deactivate",value:function(E){var t=this._config.onPressOut;if(null!=t){var n,R=I(this._config.minPressDuration,0,130),_=Date.now()-(null!=(n=this._touchActivateTime)?n:0),o=Math.max(R-_,I(this._config.delayPressOut));o>0?(E.persist(),this._pressOutDelayTimeout=setTimeout(function(){t(E)},o)):t(E)}this._touchActivateTime=null}},{key:"_measureResponderRegion",value:function(){null!=this._responderID&&('number'==typeof this._responderID?u.default.measure(this._responderID,this._measureCallback):this._responderID.measure(this._measureCallback))}},{key:"_isTouchWithinResponderRegion",value:function(E,t){var n,R,_,o,l=(0,r(d[11]).normalizeRect)(this._config.hitSlop),u=(0,r(d[11]).normalizeRect)(this._config.pressRectOffset),s=t.bottom,S=t.left,T=t.right,P=t.top;return null!=l&&(null!=l.bottom&&(s+=l.bottom),null!=l.left&&(S-=l.left),null!=l.right&&(T+=l.right),null!=l.top&&(P-=l.top)),s+=null!=(n=null==u?void 0:u.bottom)?n:D,S-=null!=(R=null==u?void 0:u.left)?R:N,T+=null!=(_=null==u?void 0:u.right)?_:h,P-=null!=(o=null==u?void 0:u.top)?o:f,E.pageX>S&&E.pageXP&&E.pageY1&&void 0!==arguments[1]?arguments[1]:0,n=arguments.length>2&&void 0!==arguments[2]?arguments[2]:0;return Math.max(t,null!=E?E:n)}e.default=v;var A=function(E){var t=E.nativeEvent,n=t.changedTouches,R=t.touches;return null!=R&&R.length>0?R[0]:null!=n&&n.length>0?n[0]:E.nativeEvent}},606,[407,436,402,403,425,607,609,426,450,534,610,611]); +__d(function(g,r,i,a,m,e,d){var u=r(d[0])(r(d[1])),o={playTouchSound:function(){u.default&&u.default.playTouchSound()}};m.exports=o},607,[407,608]); +__d(function(g,r,i,a,m,e,d){function t(n){if("function"!=typeof WeakMap)return null;var o=new WeakMap,u=new WeakMap;return(t=function(t){return t?u:o})(n)}Object.defineProperty(e,"__esModule",{value:!0}),e.default=void 0;var n=(function(n,o){if(!o&&n&&n.__esModule)return n;if(null===n||"object"!=typeof n&&"function"!=typeof n)return{default:n};var u=t(o);if(u&&u.has(n))return u.get(n);var f={},c=Object.defineProperty&&Object.getOwnPropertyDescriptor;for(var l in n)if("default"!==l&&Object.prototype.hasOwnProperty.call(n,l)){var p=c?Object.getOwnPropertyDescriptor(n,l):null;p&&(p.get||p.set)?Object.defineProperty(f,l,p):f[l]=n[l]}f.default=n,u&&u.set(n,f);return f})(r(d[0])).get('SoundManager');e.default=n},608,[428]); +__d(function(g,r,i,a,m,e,d){Object.defineProperty(e,"__esModule",{value:!0}),e.default=void 0;var t=r(d[0])(r(d[1])),n=r(d[0])(r(d[2])),s=new((function(){function s(){(0,t.default)(this,s),this._listeners=[]}return(0,n.default)(s,[{key:"addListener",value:function(t){this._listeners.push(t)}},{key:"removeListener",value:function(t){var n=this._listeners.indexOf(t);n>-1&&this._listeners.splice(n,1)}},{key:"emitEvent",value:function(t){if(0!==this._listeners.length){var n=t();this._listeners.forEach(function(t){return t(n)})}}}]),s})());e.default=s},609,[407,402,403]); +__d(function(g,r,i,a,m,e,d){Object.defineProperty(e,"__esModule",{value:!0}),e.isHoverEnabled=function(){return n};var n=!1;if('web'===r(d[0])(r(d[1])).default.OS&&Boolean('undefined'!=typeof window&&window.document&&window.document.createElement)){var t=0,o=function(){t=Date.now(),n&&(n=!1)};document.addEventListener('touchstart',o,!0),document.addEventListener('touchmove',o,!0),document.addEventListener('mousemove',function(){n||Date.now()-t<1e3||(n=!0)},!0)}},610,[407,426]); +__d(function(g,r,i,a,m,e,d){function t(t){return{bottom:t,left:t,right:t,top:t}}Object.defineProperty(e,"__esModule",{value:!0}),e.createSquare=t,e.normalizeRect=function(n){return'number'==typeof n?t(n):n}},611,[]); +__d(function(g,r,i,a,m,e,d){Object.defineProperty(e,"__esModule",{value:!0}),e.NativeVirtualText=e.NativeText=void 0;var t=r(d[0])(r(d[1])),l=r(d[0])(r(d[2])),n=r(d[0])(r(d[3])),o=r(d[0])(r(d[4])),u=(0,o.default)('RCTText',function(){return{validAttributes:(0,t.default)({},l.default.UIView,{isHighlighted:!0,numberOfLines:!0,ellipsizeMode:!0,allowFontScaling:!0,maxFontSizeMultiplier:!0,disabled:!0,selectable:!0,selectionColor:!0,adjustsFontSizeToFit:!0,minimumFontScale:!0,textBreakStrategy:!0,onTextLayout:!0,onInlineViewLayout:!0,dataDetectorType:!0,android_hyphenationFrequency:!0}),directEventTypes:{topTextLayout:{registrationName:'onTextLayout'},topInlineViewLayout:{registrationName:'onInlineViewLayout'}},uiViewClassName:'RCTText'}});e.NativeText=u;var s=g.RN$Bridgeless||n.default.hasViewManagerConfig('RCTVirtualText')?(0,o.default)('RCTVirtualText',function(){return{validAttributes:(0,t.default)({},l.default.UIView,{isHighlighted:!0,maxFontSizeMultiplier:!0}),uiViewClassName:'RCTVirtualText'}}):u;e.NativeVirtualText=s},612,[407,436,613,450,586]); +__d(function(g,r,i,a,m,e,d){'use strict';var s=r(d[0])(r(d[1])),t={pointerEvents:!0,accessible:!0,accessibilityActions:!0,accessibilityLabel:!0,accessibilityLiveRegion:!0,accessibilityRole:!0,accessibilityState:!0,accessibilityValue:!0,accessibilityHint:!0,importantForAccessibility:!0,nativeID:!0,testID:!0,renderToHardwareTextureAndroid:!0,shouldRasterizeIOS:!0,onLayout:!0,onAccessibilityAction:!0,onAccessibilityTap:!0,onMagicTap:!0,onAccessibilityEscape:!0,collapsable:!0,needsOffscreenAlphaCompositing:!0,style:r(d[0])(r(d[2])).default},c={UIView:t,RCTView:(0,s.default)({},t,{removeClippedSubviews:!0})};m.exports=c},613,[407,436,557]); +__d(function(g,r,i,a,m,_e,d){var e=r(d[0])(r(d[1])),t=r(d[0])(r(d[2])),s=r(d[0])(r(d[3])),o=r(d[0])(r(d[4])),n=r(d[0])(r(d[5])),l=r(d[0])(r(d[6])),c=r(d[0])(r(d[7])),p=r(d[0])(r(d[8])),u=r(d[0])(r(d[9])),f=r(d[0])(r(d[10])),h=(r(d[0])(r(d[11])),r(d[0])(r(d[12]))),b=(function(e,t){if(!t&&e&&e.__esModule)return e;if(null===e||"object"!=typeof e&&"function"!=typeof e)return{default:e};var s=P(t);if(s&&s.has(e))return s.get(e);var o={},n=Object.defineProperty&&Object.getOwnPropertyDescriptor;for(var l in e)if("default"!==l&&Object.prototype.hasOwnProperty.call(e,l)){var c=n?Object.getOwnPropertyDescriptor(e,l):null;c&&(c.get||c.set)?Object.defineProperty(o,l,c):o[l]=e[l]}o.default=e,s&&s.set(e,o);return o})(r(d[13])),y=r(d[0])(r(d[14])),v=["onBlur","onFocus"];function P(e){if("function"!=typeof WeakMap)return null;var t=new WeakMap,s=new WeakMap;return(P=function(e){return e?s:t})(e)}function F(){if("undefined"==typeof Reflect||!Reflect.construct)return!1;if(Reflect.construct.sham)return!1;if("function"==typeof Proxy)return!0;try{return Boolean.prototype.valueOf.call(Reflect.construct(Boolean,[],function(){})),!0}catch(e){return!1}}var O=(function(h){(0,n.default)(R,h);var y,P,O=(y=R,P=F(),function(){var e,t=(0,c.default)(y);if(P){var s=(0,c.default)(this).constructor;e=Reflect.construct(t,arguments,s)}else e=t.apply(this,arguments);return(0,l.default)(this,e)});function R(){var e;(0,s.default)(this,R);for(var t=arguments.length,o=new Array(t),n=0;n=23};var S='android'===f.default.OS?function(e,t){return t&&O.canUseNativeForeground()?{nativeForegroundAndroid:e}:{nativeBackgroundAndroid:e}}:function(e,t){return null};O.displayName='TouchableNativeFeedback',m.exports=O},614,[407,436,508,402,403,417,419,422,606,456,426,579,546,534,425,580]); +__d(function(g,r,i,a,m,_e,d){var t=r(d[0])(r(d[1])),e=r(d[0])(r(d[2])),s=r(d[0])(r(d[3])),o=r(d[0])(r(d[4])),n=r(d[0])(r(d[5])),c=r(d[0])(r(d[6])),l=r(d[0])(r(d[7])),p=r(d[0])(r(d[8])),u=r(d[0])(r(d[9])),f=r(d[0])(r(d[10])),y=r(d[0])(r(d[11])),h=r(d[0])(r(d[12])),b=(function(t,e){if(!e&&t&&t.__esModule)return t;if(null===t||"object"!=typeof t&&"function"!=typeof t)return{default:t};var s=P(e);if(s&&s.has(t))return s.get(t);var o={},n=Object.defineProperty&&Object.getOwnPropertyDescriptor;for(var c in t)if("default"!==c&&Object.prototype.hasOwnProperty.call(t,c)){var l=n?Object.getOwnPropertyDescriptor(t,c):null;l&&(l.get||l.set)?Object.defineProperty(o,c,l):o[c]=t[c]}o.default=t,s&&s.set(t,o);return o})(r(d[13])),v=["onBlur","onFocus"];function P(t){if("function"!=typeof WeakMap)return null;var e=new WeakMap,s=new WeakMap;return(P=function(t){return t?s:e})(t)}function O(){if("undefined"==typeof Reflect||!Reflect.construct)return!1;if(Reflect.construct.sham)return!1;if("function"==typeof Proxy)return!0;try{return Boolean.prototype.valueOf.call(Reflect.construct(Boolean,[],function(){})),!0}catch(t){return!1}}var F=(function(P){(0,n.default)(w,P);var F,_,R=(F=w,_=O(),function(){var t,e=(0,l.default)(F);if(_){var s=(0,l.default)(this).constructor;t=Reflect.construct(e,arguments,s)}else t=e.apply(this,arguments);return(0,c.default)(this,t)});function w(){var t;(0,s.default)(this,w);for(var e=arguments.length,o=new Array(e),n=0;n1&&void 0!==arguments[1]?arguments[1]:{}).iterations;return t},event:r(d[5]).event,createAnimatedComponent:r(d[6]),attachNativeEvent:r(d[7]).attachNativeEvent,forkEvent:r(d[5]).forkEvent,unforkEvent:r(d[5]).unforkEvent,Event:r(d[7]).AnimatedEvent}},617,[436,618,627,622,624,628,644,643]); +__d(function(g,r,i,a,m,_e,d){'use strict';function t(){if("undefined"==typeof Reflect||!Reflect.construct)return!1;if(Reflect.construct.sham)return!1;if("function"==typeof Proxy)return!0;try{return Boolean.prototype.valueOf.call(Reflect.construct(Boolean,[],function(){})),!0}catch(t){return!1}}function e(t){var e=new Set;!(function t(n){'function'==typeof n.update?e.add(n):n.__getChildren().forEach(t)})(t),e.forEach(function(t){return t.update()})}var n=(function(n){r(d[3])(_,n);var s,u,o=(s=_,u=t(),function(){var t,e=r(d[0])(s);if(u){var n=r(d[0])(this).constructor;t=Reflect.construct(e,arguments,n)}else t=e.apply(this,arguments);return r(d[1])(this,t)});function _(t){var e;if(r(d[4])(this,_),e=o.call(this),'number'!=typeof t)throw new Error('AnimatedValue: Attempting to set value to undefined');return e._startingValue=e._value=t,e._offset=0,e._animation=null,e}return r(d[5])(_,[{key:"__detach",value:function(){var t=this;this.__isNative&&r(d[2]).API.getValue(this.__getNativeTag(),function(e){t._value=e}),this.stopAnimation(),r(d[6])(r(d[0])(_.prototype),"__detach",this).call(this)}},{key:"__getValue",value:function(){return this._value+this._offset}},{key:"setValue",value:function(t){var e,n,s=this;this._animation&&(this._animation.stop(),this._animation=null),this._updateValue(t,!this.__isNative),this.__isNative&&(e=this.__getNativeTag().toString(),n=function(){r(d[2]).API.setAnimatedNodeValue(s.__getNativeTag(),t)},r(d[2]).API.setWaitingForIdentifier(e),n(),r(d[2]).API.unsetWaitingForIdentifier(e))}},{key:"setOffset",value:function(t){this._offset=t,this.__isNative&&r(d[2]).API.setAnimatedNodeOffset(this.__getNativeTag(),t)}},{key:"flattenOffset",value:function(){this._value+=this._offset,this._offset=0,this.__isNative&&r(d[2]).API.flattenAnimatedNodeOffset(this.__getNativeTag())}},{key:"extractOffset",value:function(){this._offset+=this._value,this._value=0,this.__isNative&&r(d[2]).API.extractAnimatedNodeOffset(this.__getNativeTag())}},{key:"stopAnimation",value:function(t){this.stopTracking(),this._animation&&this._animation.stop(),this._animation=null,t&&t(this.__getValue())}},{key:"resetAnimation",value:function(t){this.stopAnimation(t),this._value=this._startingValue,this.__isNative&&r(d[2]).API.setAnimatedNodeValue(this.__getNativeTag(),this._startingValue)}},{key:"_onAnimatedValueUpdateReceived",value:function(t){this._updateValue(t,!1)}},{key:"interpolate",value:function(t){return new(r(d[7]))(this,t)}},{key:"animate",value:function(t,e){var n=this,s=null;t.__isInteraction&&(s=r(d[8]).createInteractionHandle());var u=this._animation;this._animation&&this._animation.stop(),this._animation=t,t.start(this._value,function(t){n._updateValue(t,!0)},function(t){n._animation=null,null!==s&&r(d[8]).clearInteractionHandle(s),e&&e(t)},u,this)}},{key:"stopTracking",value:function(){this._tracking&&this._tracking.__detach(),this._tracking=null}},{key:"track",value:function(t){this.stopTracking(),this._tracking=t}},{key:"_updateValue",value:function(t,n){if(void 0===t)throw new Error('AnimatedValue: Attempting to set value to undefined');this._value=t,n&&e(this),r(d[6])(r(d[0])(_.prototype),"__callListeners",this).call(this,this.__getValue())}},{key:"__getNativeConfig",value:function(){return{type:'value',value:this._value,offset:this._offset}}}]),_})(r(d[9]));m.exports=n},618,[422,419,619,417,402,403,489,622,625,623]); +__d(function(g,r,i,a,m,e,d){var t,n=r(d[0])(r(d[1])),o=r(d[0])(r(d[2])),u=r(d[0])(r(d[3])),l=r(d[0])(r(d[4])),s=r(d[0])(r(d[5])),f='ios'===l.default.OS&&g.RN$Bridgeless?o.default:n.default,c=1,v=1,p=new Set,N=!1,b=[],A={getValue:function(t,n){(0,s.default)(f,'Native animated module is not available'),A.queueOperation(function(){f.getValue(t,n)})},setWaitingForIdentifier:function(t){p.add(t),N=!0},unsetWaitingForIdentifier:function(t){p.delete(t),0===p.size&&(N=!1,A.disableQueue())},disableQueue:function(){(0,s.default)(f,'Native animated module is not available'),'android'===l.default.OS&&f.startOperationBatch();for(var t=0,n=b.length;tn){if('identity'===u)return c;'clamp'===u&&(c=n)}return a===r?a:e===n?t<=e?a:r:(e===-1/0?c=-c:n===1/0?c-=e:c=(c-e)/(n-e),c=i(c),a===-1/0?c=-c:r===1/0?c+=a:c=c*(r-a)+a,c)}function r(t){var e=_r(d[3])(t);return null===e||'number'!=typeof e?t:"rgba("+((4278190080&(e=e||0))>>>24)+", "+((16711680&e)>>>16)+", "+((65280&e)>>>8)+", "+(255&e)/255+")"}var i=/[+-]?(?:\d+\.?\d*|\.\d+)(?:[eE][+-]?\d+)?/g;function o(t){var e=t.outputRange;_r(d[2])(e.length>=2,'Bad output range'),u(e=e.map(r));var a=e[0].match(i).map(function(){return[]});e.forEach(function(t){t.match(i).forEach(function(t,e){a[e].push(+t)})});var o,c=e[0].match(i).map(function(e,r){return n(_r(d[4])({},t,{outputRange:a[r]}))}),l='string'==typeof(o=e[0])&&o.startsWith('rgb');return function(t){var n=0;return e[0].replace(i,function(){var e=+c[n++](t);return l&&(e=n<4?Math.round(e):Math.round(1e3*e)/1e3),String(e)})}}function u(t){for(var e=t[0].replace(i,''),n=1;n=t);++n);return n-1}function l(t){_r(d[2])(t.length>=2,'inputRange must have at least 2 elements');for(var e=1;e=t[e-1],'inputRange must be monotonically non-decreasing '+t)}function p(t,e){_r(d[2])(e.length>=2,t+' must have at least 2 elements'),_r(d[2])(2!==e.length||e[0]!==-1/0||e[1]!==1/0,t+'cannot be ]-infinity;+infinity[ '+e)}var f=(function(e){_r(d[5])(o,e);var a,r,i=(a=o,r=t(),function(){var t,e=_r(d[0])(a);if(r){var n=_r(d[0])(this).constructor;t=Reflect.construct(e,arguments,n)}else t=e.apply(this,arguments);return _r(d[1])(this,t)});function o(t,e){var a;return _r(d[6])(this,o),(a=i.call(this))._parent=t,a._config=e,a._interpolation=n(e),a}return _r(d[7])(o,[{key:"__makeNative",value:function(){this._parent.__makeNative(),_r(d[8])(_r(d[0])(o.prototype),"__makeNative",this).call(this)}},{key:"__getValue",value:function(){var t=this._parent.__getValue();return _r(d[2])('number'==typeof t,'Cannot interpolate an input which is not a number.'),this._interpolation(t)}},{key:"interpolate",value:function(t){return new o(this,t)}},{key:"__attach",value:function(){this._parent.__addChild(this)}},{key:"__detach",value:function(){this._parent.__removeChild(this),_r(d[8])(_r(d[0])(o.prototype),"__detach",this).call(this)}},{key:"__transformDataType",value:function(t){return t.map(_r(d[9]).transformDataType)}},{key:"__getNativeConfig",value:function(){return{inputRange:this._config.inputRange,outputRange:this.__transformDataType(this._config.outputRange),extrapolateLeft:this._config.extrapolateLeft||this._config.extrapolate||'extend',extrapolateRight:this._config.extrapolateRight||this._config.extrapolate||'extend',type:'interpolation'}}}]),o})(_r(d[10]));f.__createInterpolation=n,m.exports=f},622,[422,419,425,547,436,417,402,403,489,619,623]); +__d(function(g,r,_i,a,m,_e,d){'use strict';function t(t,i){var n="undefined"!=typeof Symbol&&t[Symbol.iterator]||t["@@iterator"];if(n)return(n=n.call(t)).next.bind(n);if(Array.isArray(t)||(n=e(t))||i&&t&&"number"==typeof t.length){n&&(t=n);var o=0;return function(){return o>=t.length?{done:!0}:{done:!1,value:t[o++]}}}throw new TypeError("Invalid attempt to iterate non-iterable instance.\nIn order to be iterable, non-array objects must have a [Symbol.iterator]() method.")}function e(t,e){if(t){if("string"==typeof t)return i(t,e);var n=Object.prototype.toString.call(t).slice(8,-1);return"Object"===n&&t.constructor&&(n=t.constructor.name),"Map"===n||"Set"===n?Array.from(t):"Arguments"===n||/^(?:Ui|I)nt(?:8|16|32)(?:Clamped)?Array$/.test(n)?i(t,e):void 0}}function i(t,e){(null==e||e>t.length)&&(e=t.length);for(var i=0,n=new Array(e);i0?setTimeout(h,0):setImmediate(h))}function h(){l=0;var f=o.size;c.forEach(function(n){return o.add(n)}),s.forEach(function(n){return o.delete(n)});var h=o.size;if(0!==f&&0===h?n.emit(t.Events.interactionComplete):0===f&&0!==h&&n.emit(t.Events.interactionStart),0===h)for(;u.hasTasksToProcess();)if(u.processNext(),p>0&&r(d[4]).getEventLoopRunningTime()>=p){v();break}c.clear(),s.clear()}m.exports=t},625,[407,414,425,626,437]); +__d(function(g,r,i,a,m,_e,d){'use strict';var e=(function(){function e(t){var u=t.onMoreTasks;r(d[0])(this,e),this._onMoreTasks=u,this._queueStack=[{tasks:[],popable:!1}]}return r(d[1])(e,[{key:"enqueue",value:function(e){this._getCurrentQueue().push(e)}},{key:"enqueueTasks",value:function(e){var t=this;e.forEach(function(e){return t.enqueue(e)})}},{key:"cancelTasks",value:function(e){this._queueStack=this._queueStack.map(function(t){return r(d[2])({},t,{tasks:t.tasks.filter(function(t){return-1===e.indexOf(t)})})}).filter(function(e,t){return e.tasks.length>0||0===t})}},{key:"hasTasksToProcess",value:function(){return this._getCurrentQueue().length>0}},{key:"processNext",value:function(){var e=this._getCurrentQueue();if(e.length){var t=e.shift();try{'object'==typeof t&&t.gen?this._genPromise(t):'object'==typeof t&&t.run?t.run():(r(d[3])('function'==typeof t,'Expected Function, SimpleTask, or PromiseTask, but got:\n'+JSON.stringify(t,null,2)),t())}catch(e){throw e.message='TaskQueue: Error with task '+(t.name||'')+': '+e.message,e}}}},{key:"_getCurrentQueue",value:function(){var e=this._queueStack.length-1,t=this._queueStack[e];return t.popable&&0===t.tasks.length&&this._queueStack.length>1?(this._queueStack.pop(),this._getCurrentQueue()):t.tasks}},{key:"_genPromise",value:function(e){var t=this;this._queueStack.push({tasks:[],popable:!1});var u=this._queueStack.length-1,s=this._queueStack[u];e.gen().then(function(){s.popable=!0,t.hasTasksToProcess()&&t._onMoreTasks()}).catch(function(t){throw t.message="TaskQueue: Error resolving Promise in task "+e.name+": "+t.message,t}).done()}}]),e})();m.exports=e},626,[402,403,436,425]); +__d(function(g,r,i,a,m,_e,d){'use strict';function t(){if("undefined"==typeof Reflect||!Reflect.construct)return!1;if(Reflect.construct.sham)return!1;if("function"==typeof Proxy)return!0;try{return Boolean.prototype.valueOf.call(Reflect.construct(Boolean,[],function(){})),!0}catch(t){return!1}}var e=1,n=(function(n){r(d[2])(o,n);var s,u,f=(s=o,u=t(),function(){var t,e=r(d[0])(s);if(u){var n=r(d[0])(this).constructor;t=Reflect.construct(e,arguments,n)}else t=e.apply(this,arguments);return r(d[1])(this,t)});function o(t){var e;r(d[3])(this,o),e=f.call(this);var n=t||{x:0,y:0};return'number'==typeof n.x&&'number'==typeof n.y?(e.x=new(r(d[4]))(n.x),e.y=new(r(d[4]))(n.y)):(r(d[5])(n.x instanceof r(d[4])&&n.y instanceof r(d[4]),"AnimatedValueXY must be initialized with an object of numbers or AnimatedValues."),e.x=n.x,e.y=n.y),e._listeners={},e}return r(d[6])(o,[{key:"setValue",value:function(t){this.x.setValue(t.x),this.y.setValue(t.y)}},{key:"setOffset",value:function(t){this.x.setOffset(t.x),this.y.setOffset(t.y)}},{key:"flattenOffset",value:function(){this.x.flattenOffset(),this.y.flattenOffset()}},{key:"extractOffset",value:function(){this.x.extractOffset(),this.y.extractOffset()}},{key:"__getValue",value:function(){return{x:this.x.__getValue(),y:this.y.__getValue()}}},{key:"resetAnimation",value:function(t){this.x.resetAnimation(),this.y.resetAnimation(),t&&t(this.__getValue())}},{key:"stopAnimation",value:function(t){this.x.stopAnimation(),this.y.stopAnimation(),t&&t(this.__getValue())}},{key:"addListener",value:function(t){var n=this,s=String(e++),u=function(e){e.value;t(n.__getValue())};return this._listeners[s]={x:this.x.addListener(u),y:this.y.addListener(u)},s}},{key:"removeListener",value:function(t){this.x.removeListener(this._listeners[t].x),this.y.removeListener(this._listeners[t].y),delete this._listeners[t]}},{key:"removeAllListeners",value:function(){this.x.removeAllListeners(),this.y.removeAllListeners(),this._listeners={}}},{key:"getLayout",value:function(){return{left:this.x,top:this.y}}},{key:"getTranslateTransform",value:function(){return[{translateX:this.x},{translateY:this.y}]}}]),o})(r(d[7]));m.exports=n},627,[422,419,417,402,618,425,403,623]); +__d(function(g,r,_i,_a,m,e,d){'use strict';var n=function(n,t){return n&&t.onComplete?function(){t.onComplete&&t.onComplete.apply(t,arguments),n&&n.apply(void 0,arguments)}:n||t.onComplete},t=function(n,t,i){if(n instanceof r(d[6])){var o=r(d[7])({},t),u=r(d[7])({},t);for(var s in t){var c=t[s],f=c.x,v=c.y;void 0!==f&&void 0!==v&&(o[s]=f,u[s]=v)}var p=i(n.x,o),l=i(n.y,u);return a([p,l],{stopTogether:!1})}return null},i=function i(o,a){var u=function(t,i,o){o=n(o,i);var a=t,u=i;a.stopTracking(),i.toValue instanceof r(d[8])?a.track(new(r(d[9]))(a,i.toValue,r(d[11]),u,o)):a.animate(new(r(d[11]))(u),o)};return t(o,a,i)||{start:function(n){u(o,a,n)},stop:function(){o.stopAnimation()},reset:function(){o.resetAnimation()},_startNativeLoop:function(n){var t=r(d[7])({},a,{iterations:n});u(o,t)},_isUsingNativeDriver:function(){return a.useNativeDriver||!1}}},o=function(n){var t=0;return{start:function(i){0===n.length?i&&i({finished:!0}):n[t].start(function o(a){a.finished&&++t!==n.length?n[t].start(o):i&&i(a)})},stop:function(){t1&&void 0!==arguments[1]?arguments[1]:{},i=t.iterations,o=void 0===i?-1:i,a=t.resetBeforeIteration,u=void 0===a||a,s=!1,c=0;return{start:function(t){n&&0!==o?n._isUsingNativeDriver()?n._startNativeLoop(o):(function i(){var a=arguments.length>0&&void 0!==arguments[0]?arguments[0]:{finished:!0};s||c===o||!1===a.finished?t&&t(a):(c++,u&&n.reset(),n.start(i))})():t&&t({finished:!0})},stop:function(){s=!0,n.stop()},reset:function(){c=0,s=!1,n.reset()},_startNativeLoop:function(){throw new Error('Loops run using the native driver cannot contain Animated.loop animations')},_isUsingNativeDriver:function(){return n._isUsingNativeDriver()}}},event:function(n,t){var i=new(r(d[14]).AnimatedEvent)(n,t);return i.__isNative?i:i.__getHandler()},createAnimatedComponent:r(d[16]),attachNativeEvent:r(d[14]).attachNativeEvent,forkEvent:function(n,t){return n?n instanceof r(d[14]).AnimatedEvent?(n.__addListener(t),n):function(){'function'==typeof n&&n.apply(void 0,arguments),t.apply(void 0,arguments)}:t},unforkEvent:function(n,t){n&&n instanceof r(d[14]).AnimatedEvent&&n.__removeListener(t)},Event:r(d[14]).AnimatedEvent}},628,[629,630,631,632,633,634,627,436,624,635,636,639,642,618,643,622,644]); +__d(function(g,r,i,_a,m,_e,d){'use strict';function t(){if("undefined"==typeof Reflect||!Reflect.construct)return!1;if(Reflect.construct.sham)return!1;if("function"==typeof Proxy)return!0;try{return Boolean.prototype.valueOf.call(Reflect.construct(Boolean,[],function(){})),!0}catch(t){return!1}}var e=(function(e){r(d[2])(u,e);var n,a,_=(n=u,a=t(),function(){var t,e=r(d[0])(n);if(a){var _=r(d[0])(this).constructor;t=Reflect.construct(e,arguments,_)}else t=e.apply(this,arguments);return r(d[1])(this,t)});function u(t,e){var n;return r(d[3])(this,u),(n=_.call(this))._a='number'==typeof t?new(r(d[4]))(t):t,n._b='number'==typeof e?new(r(d[4]))(e):e,n}return r(d[5])(u,[{key:"__makeNative",value:function(){this._a.__makeNative(),this._b.__makeNative(),r(d[6])(r(d[0])(u.prototype),"__makeNative",this).call(this)}},{key:"__getValue",value:function(){return this._a.__getValue()+this._b.__getValue()}},{key:"interpolate",value:function(t){return new(r(d[7]))(this,t)}},{key:"__attach",value:function(){this._a.__addChild(this),this._b.__addChild(this)}},{key:"__detach",value:function(){this._a.__removeChild(this),this._b.__removeChild(this),r(d[6])(r(d[0])(u.prototype),"__detach",this).call(this)}},{key:"__getNativeConfig",value:function(){return{type:'addition',input:[this._a.__getNativeTag(),this._b.__getNativeTag()]}}}]),u})(r(d[8]));m.exports=e},629,[422,419,417,402,618,403,489,622,623]); +__d(function(g,r,i,_a,m,_e,d){'use strict';function t(){if("undefined"==typeof Reflect||!Reflect.construct)return!1;if(Reflect.construct.sham)return!1;if("function"==typeof Proxy)return!0;try{return Boolean.prototype.valueOf.call(Reflect.construct(Boolean,[],function(){})),!0}catch(t){return!1}}var e=(function(e){r(d[2])(u,e);var n,a,_=(n=u,a=t(),function(){var t,e=r(d[0])(n);if(a){var _=r(d[0])(this).constructor;t=Reflect.construct(e,arguments,_)}else t=e.apply(this,arguments);return r(d[1])(this,t)});function u(t,e){var n;return r(d[3])(this,u),(n=_.call(this))._a='number'==typeof t?new(r(d[4]))(t):t,n._b='number'==typeof e?new(r(d[4]))(e):e,n}return r(d[5])(u,[{key:"__makeNative",value:function(){this._a.__makeNative(),this._b.__makeNative(),r(d[6])(r(d[0])(u.prototype),"__makeNative",this).call(this)}},{key:"__getValue",value:function(){return this._a.__getValue()-this._b.__getValue()}},{key:"interpolate",value:function(t){return new(r(d[7]))(this,t)}},{key:"__attach",value:function(){this._a.__addChild(this),this._b.__addChild(this)}},{key:"__detach",value:function(){this._a.__removeChild(this),this._b.__removeChild(this),r(d[6])(r(d[0])(u.prototype),"__detach",this).call(this)}},{key:"__getNativeConfig",value:function(){return{type:'subtraction',input:[this._a.__getNativeTag(),this._b.__getNativeTag()]}}}]),u})(r(d[8]));m.exports=e},630,[422,419,417,402,618,403,489,622,623]); +__d(function(g,r,i,_a,m,_e,d){'use strict';function t(){if("undefined"==typeof Reflect||!Reflect.construct)return!1;if(Reflect.construct.sham)return!1;if("function"==typeof Proxy)return!0;try{return Boolean.prototype.valueOf.call(Reflect.construct(Boolean,[],function(){})),!0}catch(t){return!1}}var e=(function(e){r(d[2])(_,e);var n,o,a=(n=_,o=t(),function(){var t,e=r(d[0])(n);if(o){var a=r(d[0])(this).constructor;t=Reflect.construct(e,arguments,a)}else t=e.apply(this,arguments);return r(d[1])(this,t)});function _(t,e){var n;return r(d[3])(this,_),(n=a.call(this))._warnedAboutDivideByZero=!1,(0===e||e instanceof r(d[4])&&0===e.__getValue())&&console.error('Detected potential division by zero in AnimatedDivision'),n._a='number'==typeof t?new(r(d[5]))(t):t,n._b='number'==typeof e?new(r(d[5]))(e):e,n}return r(d[6])(_,[{key:"__makeNative",value:function(){this._a.__makeNative(),this._b.__makeNative(),r(d[7])(r(d[0])(_.prototype),"__makeNative",this).call(this)}},{key:"__getValue",value:function(){var t=this._a.__getValue(),e=this._b.__getValue();return 0===e?(this._warnedAboutDivideByZero||(console.error('Detected division by zero in AnimatedDivision'),this._warnedAboutDivideByZero=!0),0):(this._warnedAboutDivideByZero=!1,t/e)}},{key:"interpolate",value:function(t){return new(r(d[8]))(this,t)}},{key:"__attach",value:function(){this._a.__addChild(this),this._b.__addChild(this)}},{key:"__detach",value:function(){this._a.__removeChild(this),this._b.__removeChild(this),r(d[7])(r(d[0])(_.prototype),"__detach",this).call(this)}},{key:"__getNativeConfig",value:function(){return{type:'division',input:[this._a.__getNativeTag(),this._b.__getNativeTag()]}}}]),_})(r(d[9]));m.exports=e},631,[422,419,417,402,624,618,403,489,622,623]); +__d(function(g,r,i,_a,m,_e,d){'use strict';function t(){if("undefined"==typeof Reflect||!Reflect.construct)return!1;if(Reflect.construct.sham)return!1;if("function"==typeof Proxy)return!0;try{return Boolean.prototype.valueOf.call(Reflect.construct(Boolean,[],function(){})),!0}catch(t){return!1}}var e=(function(e){r(d[2])(u,e);var n,a,_=(n=u,a=t(),function(){var t,e=r(d[0])(n);if(a){var _=r(d[0])(this).constructor;t=Reflect.construct(e,arguments,_)}else t=e.apply(this,arguments);return r(d[1])(this,t)});function u(t,e){var n;return r(d[3])(this,u),(n=_.call(this))._a='number'==typeof t?new(r(d[4]))(t):t,n._b='number'==typeof e?new(r(d[4]))(e):e,n}return r(d[5])(u,[{key:"__makeNative",value:function(){this._a.__makeNative(),this._b.__makeNative(),r(d[6])(r(d[0])(u.prototype),"__makeNative",this).call(this)}},{key:"__getValue",value:function(){return this._a.__getValue()*this._b.__getValue()}},{key:"interpolate",value:function(t){return new(r(d[7]))(this,t)}},{key:"__attach",value:function(){this._a.__addChild(this),this._b.__addChild(this)}},{key:"__detach",value:function(){this._a.__removeChild(this),this._b.__removeChild(this),r(d[6])(r(d[0])(u.prototype),"__detach",this).call(this)}},{key:"__getNativeConfig",value:function(){return{type:'multiplication',input:[this._a.__getNativeTag(),this._b.__getNativeTag()]}}}]),u})(r(d[8]));m.exports=e},632,[422,419,417,402,618,403,489,622,623]); +__d(function(g,r,i,_a,m,_e,d){'use strict';function t(){if("undefined"==typeof Reflect||!Reflect.construct)return!1;if(Reflect.construct.sham)return!1;if("function"==typeof Proxy)return!0;try{return Boolean.prototype.valueOf.call(Reflect.construct(Boolean,[],function(){})),!0}catch(t){return!1}}var e=(function(e){r(d[2])(o,e);var u,n,a=(u=o,n=t(),function(){var t,e=r(d[0])(u);if(n){var a=r(d[0])(this).constructor;t=Reflect.construct(e,arguments,a)}else t=e.apply(this,arguments);return r(d[1])(this,t)});function o(t,e){var u;return r(d[3])(this,o),(u=a.call(this))._a=t,u._modulus=e,u}return r(d[4])(o,[{key:"__makeNative",value:function(){this._a.__makeNative(),r(d[5])(r(d[0])(o.prototype),"__makeNative",this).call(this)}},{key:"__getValue",value:function(){return(this._a.__getValue()%this._modulus+this._modulus)%this._modulus}},{key:"interpolate",value:function(t){return new(r(d[6]))(this,t)}},{key:"__attach",value:function(){this._a.__addChild(this)}},{key:"__detach",value:function(){this._a.__removeChild(this),r(d[5])(r(d[0])(o.prototype),"__detach",this).call(this)}},{key:"__getNativeConfig",value:function(){return{type:'modulus',input:this._a.__getNativeTag(),modulus:this._modulus}}}]),o})(r(d[7]));m.exports=e},633,[422,419,417,402,403,489,622,623]); +__d(function(g,r,i,_a,m,_e,d){'use strict';function t(){if("undefined"==typeof Reflect||!Reflect.construct)return!1;if(Reflect.construct.sham)return!1;if("function"==typeof Proxy)return!0;try{return Boolean.prototype.valueOf.call(Reflect.construct(Boolean,[],function(){})),!0}catch(t){return!1}}var e=(function(e){r(d[2])(_,e);var a,n,u=(a=_,n=t(),function(){var t,e=r(d[0])(a);if(n){var u=r(d[0])(this).constructor;t=Reflect.construct(e,arguments,u)}else t=e.apply(this,arguments);return r(d[1])(this,t)});function _(t,e,a){var n;return r(d[3])(this,_),(n=u.call(this))._a=t,n._min=e,n._max=a,n._value=n._lastValue=n._a.__getValue(),n}return r(d[4])(_,[{key:"__makeNative",value:function(){this._a.__makeNative(),r(d[5])(r(d[0])(_.prototype),"__makeNative",this).call(this)}},{key:"interpolate",value:function(t){return new(r(d[6]))(this,t)}},{key:"__getValue",value:function(){var t=this._a.__getValue(),e=t-this._lastValue;return this._lastValue=t,this._value=Math.min(Math.max(this._value+e,this._min),this._max),this._value}},{key:"__attach",value:function(){this._a.__addChild(this)}},{key:"__detach",value:function(){this._a.__removeChild(this),r(d[5])(r(d[0])(_.prototype),"__detach",this).call(this)}},{key:"__getNativeConfig",value:function(){return{type:'diffclamp',input:this._a.__getNativeTag(),min:this._min,max:this._max}}}]),_})(r(d[7]));m.exports=e},634,[422,419,417,402,403,489,622,623]); +__d(function(g,r,i,a,m,_e,d){'use strict';function t(){if("undefined"==typeof Reflect||!Reflect.construct)return!1;if(Reflect.construct.sham)return!1;if("function"==typeof Proxy)return!0;try{return Boolean.prototype.valueOf.call(Reflect.construct(Boolean,[],function(){})),!0}catch(t){return!1}}var e=(function(e){r(d[2])(u,e);var n,_,o=(n=u,_=t(),function(){var t,e=r(d[0])(n);if(_){var o=r(d[0])(this).constructor;t=Reflect.construct(e,arguments,o)}else t=e.apply(this,arguments);return r(d[1])(this,t)});function u(t,e,n,_,s){var l;return r(d[3])(this,u),(l=o.call(this))._value=t,l._parent=e,l._animationClass=n,l._animationConfig=_,l._useNativeDriver=r(d[4]).shouldUseNativeDriver(_),l._callback=s,l.__attach(),l}return r(d[5])(u,[{key:"__makeNative",value:function(){this.__isNative=!0,this._parent.__makeNative(),r(d[6])(r(d[0])(u.prototype),"__makeNative",this).call(this),this._value.__makeNative()}},{key:"__getValue",value:function(){return this._parent.__getValue()}},{key:"__attach",value:function(){this._parent.__addChild(this),this._useNativeDriver&&this.__makeNative()}},{key:"__detach",value:function(){this._parent.__removeChild(this),r(d[6])(r(d[0])(u.prototype),"__detach",this).call(this)}},{key:"update",value:function(){this._value.animate(new this._animationClass(r(d[7])({},this._animationConfig,{toValue:this._animationConfig.toValue.__getValue()})),this._callback)}},{key:"__getNativeConfig",value:function(){var t=new this._animationClass(r(d[7])({},this._animationConfig,{toValue:void 0})).__getNativeAnimationConfig();return{type:'tracking',animationId:r(d[4]).generateNewAnimationId(),animationConfig:t,toValue:this._parent.__getNativeTag(),value:this._value.__getNativeTag()}}}]),u})(r(d[8]));m.exports=e},635,[422,419,417,402,619,403,489,436,624]); +__d(function(g,r,i,a,_m,_e,d){'use strict';function t(){if("undefined"==typeof Reflect||!Reflect.construct)return!1;if(Reflect.construct.sham)return!1;if("function"==typeof Proxy)return!0;try{return Boolean.prototype.valueOf.call(Reflect.construct(Boolean,[],function(){})),!0}catch(t){return!1}}var s=(function(s){r(d[2])(l,s);var e,n,o=(e=l,n=t(),function(){var t,s=r(d[0])(e);if(n){var o=r(d[0])(this).constructor;t=Reflect.construct(s,arguments,o)}else t=s.apply(this,arguments);return r(d[1])(this,t)});function l(t){var s,e,n,h,_,u,f,c,m,v,p,y;if(r(d[3])(this,l),(m=o.call(this))._overshootClamping=null!=(s=t.overshootClamping)&&s,m._restDisplacementThreshold=null!=(e=t.restDisplacementThreshold)?e:.001,m._restSpeedThreshold=null!=(n=t.restSpeedThreshold)?n:.001,m._initialVelocity=null!=(h=t.velocity)?h:0,m._lastVelocity=null!=(_=t.velocity)?_:0,m._toValue=t.toValue,m._delay=null!=(u=t.delay)?u:0,m._useNativeDriver=r(d[4]).shouldUseNativeDriver(t),m.__isInteraction=null!=(f=t.isInteraction)?f:!m._useNativeDriver,m.__iterations=null!=(c=t.iterations)?c:1,void 0!==t.stiffness||void 0!==t.damping||void 0!==t.mass)r(d[5])(void 0===t.bounciness&&void 0===t.speed&&void 0===t.tension&&void 0===t.friction,'You can define one of bounciness/speed, tension/friction, or stiffness/damping/mass, but not more than one'),m._stiffness=null!=(v=t.stiffness)?v:100,m._damping=null!=(p=t.damping)?p:10,m._mass=null!=(y=t.mass)?y:1;else if(void 0!==t.bounciness||void 0!==t.speed){var V,T;r(d[5])(void 0===t.tension&&void 0===t.friction&&void 0===t.stiffness&&void 0===t.damping&&void 0===t.mass,'You can define one of bounciness/speed, tension/friction, or stiffness/damping/mass, but not more than one');var b=r(d[6]).fromBouncinessAndSpeed(null!=(V=t.bounciness)?V:8,null!=(T=t.speed)?T:12);m._stiffness=b.stiffness,m._damping=b.damping,m._mass=1}else{var M,D,P=r(d[6]).fromOrigamiTensionAndFriction(null!=(M=t.tension)?M:40,null!=(D=t.friction)?D:7);m._stiffness=P.stiffness,m._damping=P.damping,m._mass=1}return r(d[5])(m._stiffness>0,'Stiffness value must be greater than 0'),r(d[5])(m._damping>0,'Damping value must be greater than 0'),r(d[5])(m._mass>0,'Mass value must be greater than 0'),m}return r(d[7])(l,[{key:"__getNativeAnimationConfig",value:function(){var t;return{type:'spring',overshootClamping:this._overshootClamping,restDisplacementThreshold:this._restDisplacementThreshold,restSpeedThreshold:this._restSpeedThreshold,stiffness:this._stiffness,damping:this._damping,mass:this._mass,initialVelocity:null!=(t=this._initialVelocity)?t:this._lastVelocity,toValue:this._toValue,iterations:this.__iterations}}},{key:"start",value:function(t,s,e,n,o){var h=this;if(this.__active=!0,this._startPosition=t,this._lastPosition=this._startPosition,this._onUpdate=s,this.__onEnd=e,this._lastTime=Date.now(),this._frameTime=0,n instanceof l){var _=n.getInternalState();this._lastPosition=_.lastPosition,this._lastVelocity=_.lastVelocity,this._initialVelocity=this._lastVelocity,this._lastTime=_.lastTime}var u=function(){h._useNativeDriver?h.__startNativeAnimation(o):h.onUpdate()};this._delay?this._timeout=setTimeout(u,this._delay):u()}},{key:"getInternalState",value:function(){return{lastPosition:this._lastPosition,lastVelocity:this._lastVelocity,lastTime:this._lastTime}}},{key:"onUpdate",value:function(){var t=Date.now();t>this._lastTime+64&&(t=this._lastTime+64);var s=(t-this._lastTime)/1e3;this._frameTime+=s;var e=this._damping,n=this._mass,o=this._stiffness,l=-this._initialVelocity,h=e/(2*Math.sqrt(o*n)),_=Math.sqrt(o/n),u=_*Math.sqrt(1-h*h),f=this._toValue-this._startPosition,c=0,m=0,v=this._frameTime;if(h<1){var p=Math.exp(-h*_*v);c=this._toValue-p*((l+h*_*f)/u*Math.sin(u*v)+f*Math.cos(u*v)),m=h*_*p*(Math.sin(u*v)*(l+h*_*f)/u+f*Math.cos(u*v))-p*(Math.cos(u*v)*(l+h*_*f)-u*f*Math.sin(u*v))}else{var y=Math.exp(-_*v);c=this._toValue-y*(f+(l+_*f)*v),m=y*(l*(v*_-1)+v*f*(_*_))}if(this._lastTime=t,this._lastPosition=c,this._lastVelocity=m,this._onUpdate(c),this.__active){var V=!1;this._overshootClamping&&0!==this._stiffness&&(V=this._startPositionthis._toValue:c18&&A<=44?p(A):h(A),s(2*M-M*M,v,.01));return{stiffness:n(x),damping:t(B)}}}},637,[]); +__d(function(g,r,i,a,m,_e,d){'use strict';var t=1,n=(function(){function n(){r(d[0])(this,n)}return r(d[1])(n,[{key:"start",value:function(t,n,e,o,_){}},{key:"stop",value:function(){this.__nativeId&&r(d[2]).API.stopAnimation(this.__nativeId)}},{key:"__getNativeAnimationConfig",value:function(){throw new Error('This animation type cannot be offloaded to native')}},{key:"__debouncedOnEnd",value:function(t){var n=this.__onEnd;this.__onEnd=null,n&&n(t)}},{key:"__startNativeAnimation",value:function(n){var e=t+":startAnimation";t+=1,r(d[2]).API.setWaitingForIdentifier(e);try{n.__makeNative(),this.__nativeId=r(d[2]).generateNewAnimationId(),r(d[2]).API.startAnimatingNode(this.__nativeId,n.__getNativeTag(),this.__getNativeAnimationConfig(),this.__debouncedOnEnd.bind(this))}catch(t){throw t}finally{r(d[2]).API.unsetWaitingForIdentifier(e)}}}]),n})();m.exports=n},638,[402,403,619]); +__d(function(g,r,i,a,m,_e,d){'use strict';function t(){if("undefined"==typeof Reflect||!Reflect.construct)return!1;if(Reflect.construct.sham)return!1;if("function"==typeof Proxy)return!0;try{return Boolean.prototype.valueOf.call(Reflect.construct(Boolean,[],function(){})),!0}catch(t){return!1}}var e;function n(){if(!e){var t=r(d[2]);e=t.inOut(t.ease)}return e}var s=(function(e){r(d[3])(_,e);var s,o,u=(s=_,o=t(),function(){var t,e=r(d[0])(s);if(o){var n=r(d[0])(this).constructor;t=Reflect.construct(e,arguments,n)}else t=e.apply(this,arguments);return r(d[1])(this,t)});function _(t){var e,s,o,h,l,c;return r(d[4])(this,_),(c=u.call(this))._toValue=t.toValue,c._easing=null!=(e=t.easing)?e:n(),c._duration=null!=(s=t.duration)?s:500,c._delay=null!=(o=t.delay)?o:0,c.__iterations=null!=(h=t.iterations)?h:1,c._useNativeDriver=r(d[5]).shouldUseNativeDriver(t),c.__isInteraction=null!=(l=t.isInteraction)?l:!c._useNativeDriver,c}return r(d[6])(_,[{key:"__getNativeAnimationConfig",value:function(){for(var t=[],e=Math.round(this._duration/16.666666666666668),n=0;n=this._startTime+this._duration)return 0===this._duration?this._onUpdate(this._toValue):this._onUpdate(this._fromValue+this._easing(1)*(this._toValue-this._fromValue)),void this.__debouncedOnEnd({finished:!0});this._onUpdate(this._fromValue+this._easing((t-this._startTime)/this._duration)*(this._toValue-this._fromValue)),this.__active&&(this._animationFrame=requestAnimationFrame(this.onUpdate.bind(this)))}},{key:"stop",value:function(){r(d[7])(r(d[0])(_.prototype),"stop",this).call(this),this.__active=!1,clearTimeout(this._timeout),g.cancelAnimationFrame(this._animationFrame),this.__debouncedOnEnd({finished:!1})}}]),_})(r(d[8]));m.exports=s},639,[422,419,640,417,402,619,403,489,638]); +__d(function(g,r,i,a,m,e,d){'use strict';var n,u=(function(){function u(){r(d[0])(this,u)}return r(d[1])(u,null,[{key:"step0",value:function(n){return n>0?1:0}},{key:"step1",value:function(n){return n>=1?1:0}},{key:"linear",value:function(n){return n}},{key:"ease",value:function(t){return n||(n=u.bezier(.42,0,1,1)),n(t)}},{key:"quad",value:function(n){return n*n}},{key:"cubic",value:function(n){return n*n*n}},{key:"poly",value:function(n){return function(u){return Math.pow(u,n)}}},{key:"sin",value:function(n){return 1-Math.cos(n*Math.PI/2)}},{key:"circle",value:function(n){return 1-Math.sqrt(1-n*n)}},{key:"exp",value:function(n){return Math.pow(2,10*(n-1))}},{key:"elastic",value:function(){var n=(arguments.length>0&&void 0!==arguments[0]?arguments[0]:1)*Math.PI;return function(u){return 1-Math.pow(Math.cos(u*Math.PI/2),3)*Math.cos(u*n)}}},{key:"back",value:function(){var n=arguments.length>0&&void 0!==arguments[0]?arguments[0]:1.70158;return function(u){return u*u*((n+1)*u-n)}}},{key:"bounce",value:function(n){if(n<.36363636363636365)return 7.5625*n*n;if(n<.7272727272727273){var u=n-.5454545454545454;return 7.5625*u*u+.75}if(n<.9090909090909091){var t=n-.8181818181818182;return 7.5625*t*t+.9375}var o=n-.9545454545454546;return 7.5625*o*o+.984375}},{key:"bezier",value:function(n,u,t,o){return r(d[2])(n,u,t,o)}},{key:"in",value:function(n){return n}},{key:"out",value:function(n){return function(u){return 1-n(1-u)}}},{key:"inOut",value:function(n){return function(u){return u<.5?n(2*u)/2:1-n(2*(1-u))/2}}}]),u})();m.exports=u},640,[402,403,641]); +__d(function(g,r,_i,a,m,e,d){'use strict';var n=4,t=.001,u=1e-7,o=10,f=.1,i='function'==typeof Float32Array;function c(n,t){return 1-3*t+3*n}function v(n,t){return 3*t-6*n}function s(n){return 3*n}function w(n,t,u){return((c(t,u)*n+v(t,u))*n+s(t))*n}function l(n,t,u){return 3*c(t,u)*n*n+2*v(t,u)*n+s(t)}function y(n,t,f,i,c){var v,s,l=0,y=t,b=f;do{(v=w(s=y+(b-y)/2,i,c)-n)>0?b=s:y=s}while(Math.abs(v)>u&&++l=0&&n<=1&&o>=0&&o<=1))throw new Error('bezier x values must be in [0, 1] range');var v=i?new Float32Array(11):new Array(11);if(n!==u||o!==c)for(var s=0;s<11;++s)v[s]=w(s*f,n,o);function h(u){for(var i=0,c=1;10!==c&&v[c]<=u;++c)i+=f;var s=i+(u-v[--c])/(v[c+1]-v[c])*f,w=l(s,n,o);return w>=t?b(u,s,n,o):0===w?s:y(u,i,i+f,n,o)}return function(t){return n===u&&o===c?t:0===t?0:1===t?1:w(h(t),u,c)}}},641,[]); +__d(function(g,r,i,a,m,_e,d){'use strict';function t(){if("undefined"==typeof Reflect||!Reflect.construct)return!1;if(Reflect.construct.sham)return!1;if("function"==typeof Proxy)return!0;try{return Boolean.prototype.valueOf.call(Reflect.construct(Boolean,[],function(){})),!0}catch(t){return!1}}var e=(function(e){r(d[2])(c,e);var n,s,o=(n=c,s=t(),function(){var t,e=r(d[0])(n);if(s){var o=r(d[0])(this).constructor;t=Reflect.construct(e,arguments,o)}else t=e.apply(this,arguments);return r(d[1])(this,t)});function c(t){var e,n,s,u;return r(d[3])(this,c),(u=o.call(this))._deceleration=null!=(e=t.deceleration)?e:.998,u._velocity=t.velocity,u._useNativeDriver=r(d[4]).shouldUseNativeDriver(t),u.__isInteraction=null!=(n=t.isInteraction)?n:!u._useNativeDriver,u.__iterations=null!=(s=t.iterations)?s:1,u}return r(d[5])(c,[{key:"__getNativeAnimationConfig",value:function(){return{type:'decay',deceleration:this._deceleration,velocity:this._velocity,iterations:this.__iterations}}},{key:"start",value:function(t,e,n,s,o){this.__active=!0,this._lastValue=t,this._fromValue=t,this._onUpdate=e,this.__onEnd=n,this._startTime=Date.now(),this._useNativeDriver?this.__startNativeAnimation(o):this._animationFrame=requestAnimationFrame(this.onUpdate.bind(this))}},{key:"onUpdate",value:function(){var t=Date.now(),e=this._fromValue+this._velocity/(1-this._deceleration)*(1-Math.exp(-(1-this._deceleration)*(t-this._startTime)));this._onUpdate(e),Math.abs(this._lastValue-e)<.1?this.__debouncedOnEnd({finished:!0}):(this._lastValue=e,this.__active&&(this._animationFrame=requestAnimationFrame(this.onUpdate.bind(this))))}},{key:"stop",value:function(){r(d[6])(r(d[0])(c.prototype),"stop",this).call(this),this.__active=!1,g.cancelAnimationFrame(this._animationFrame),this.__debouncedOnEnd({finished:!1})}}]),c})(r(d[7]));m.exports=e},642,[422,419,417,402,619,403,489,638]); +__d(function(g,r,i,a,m,e,d){'use strict';function t(t,n,s){var v=[];r(d[1])(s[0]&&s[0].nativeEvent,'Native driven events only support animated values contained inside `nativeEvent`.'),(function t(n,s){if(n instanceof r(d[0]))n.__makeNative(),v.push({nativeEventPath:s,animatedValueTag:n.__getNativeTag()});else if('object'==typeof n)for(var o in n)t(n[o],s.concat(o))})(s[0].nativeEvent,[]);var o=r(d[2]).findNodeHandle(t);return null!=o&&v.forEach(function(t){r(d[3]).API.addAnimatedEventToView(o,n,t)}),{detach:function(){null!=o&&v.forEach(function(t){r(d[3]).API.removeAnimatedEventFromView(o,n,t.animatedValueTag)})}}}var n=(function(){function n(t,s){r(d[4])(this,n),this._listeners=[],this._argMapping=t,null==s&&(console.warn('Animated.event now requires a second argument for options'),s={useNativeDriver:!1}),s.listener&&this.__addListener(s.listener),this._callListeners=this._callListeners.bind(this),this._attachedEvent=null,this.__isNative=r(d[3]).shouldUseNativeDriver(s)}return r(d[5])(n,[{key:"__addListener",value:function(t){this._listeners.push(t)}},{key:"__removeListener",value:function(t){this._listeners=this._listeners.filter(function(n){return n!==t})}},{key:"__attach",value:function(n,s){r(d[1])(this.__isNative,'Only native driven events need to be attached.'),this._attachedEvent=t(n,s,this._argMapping)}},{key:"__detach",value:function(t,n){r(d[1])(this.__isNative,'Only native driven events need to be detached.'),this._attachedEvent&&this._attachedEvent.detach()}},{key:"__getHandler",value:function(){var t=this;if(this.__isNative)return this._callListeners;return function(){for(var n=arguments.length,s=new Array(n),v=0;v1){for(var l=[],s=0;s1?Math.ceil(e.length/n):e.length}return 0},t._keyExtractor=function(e,n){var o,l=v(t.props.numColumns),s=null!=(o=t.props.keyExtractor)?o:r(d[10]).keyExtractor;return l>1?Array.isArray(e)?e.map(function(e,t){return s(e,n*l+t)}).join(':'):void r(d[11])(Array.isArray(e),"FlatList: Encountered internal consistency error, expected each item to consist of an array with 1-%s columns; instead, received a single item.",l):s(e,n)},t._renderer=function(){var e=t.props,o=e.ListItemComponent,l=e.renderItem,s=e.columnWrapperStyle,u=v(t.props.numColumns),c=o?'ListItemComponent':'renderItem',f=function(e){return o?h.createElement(o,e):l?l(e):null};return(0,n.default)({},c,function(e){if(u>1){var t=e.item,n=e.index;return r(d[11])(Array.isArray(t),'Expected array of items with numColumns > 1'),h.createElement(r(d[12]),{style:r(d[13]).compose(y.row,s)},t.map(function(t,o){var l=f({item:t,index:n*u+o,separators:e.separators});return null!=l?h.createElement(h.Fragment,{key:o},l):null}))}return f(e)})},t._checkProps(t.props),t.props.viewabilityConfigCallbackPairs?t._virtualizedListPairs=t.props.viewabilityConfigCallbackPairs.map(function(e){return{viewabilityConfig:e.viewabilityConfig,onViewableItemsChanged:t._createOnViewableItemsChanged(e.onViewableItemsChanged)}}):t.props.onViewableItemsChanged&&t._virtualizedListPairs.push({viewabilityConfig:t.props.viewabilityConfig,onViewableItemsChanged:t._createOnViewableItemsChanged(t.props.onViewableItemsChanged)}),t}return(0,l.default)(k,[{key:"scrollToEnd",value:function(e){this._listRef&&this._listRef.scrollToEnd(e)}},{key:"scrollToIndex",value:function(e){this._listRef&&this._listRef.scrollToIndex(e)}},{key:"scrollToItem",value:function(e){this._listRef&&this._listRef.scrollToItem(e)}},{key:"scrollToOffset",value:function(e){this._listRef&&this._listRef.scrollToOffset(e)}},{key:"recordInteraction",value:function(){this._listRef&&this._listRef.recordInteraction()}},{key:"flashScrollIndicators",value:function(){this._listRef&&this._listRef.flashScrollIndicators()}},{key:"getScrollResponder",value:function(){if(this._listRef)return this._listRef.getScrollResponder()}},{key:"getNativeScrollRef",value:function(){if(this._listRef)return this._listRef.getScrollRef()}},{key:"getScrollableNode",value:function(){if(this._listRef)return this._listRef.getScrollableNode()}},{key:"setNativeProps",value:function(e){this._listRef&&this._listRef.setNativeProps(e)}},{key:"componentDidUpdate",value:function(e){r(d[11])(e.numColumns===this.props.numColumns,"Changing numColumns on the fly is not supported. Change the key prop on FlatList when changing the number of columns to force a fresh render of the component."),r(d[11])(e.onViewableItemsChanged===this.props.onViewableItemsChanged,'Changing onViewableItemsChanged on the fly is not supported'),r(d[11])(!r(d[14])(e.viewabilityConfig,this.props.viewabilityConfig),'Changing viewabilityConfig on the fly is not supported'),r(d[11])(e.viewabilityConfigCallbackPairs===this.props.viewabilityConfigCallbackPairs,'Changing viewabilityConfigCallbackPairs on the fly is not supported'),this._checkProps(this.props)}},{key:"_checkProps",value:function(e){var t=e.getItem,n=e.getItemCount,o=e.horizontal,l=e.columnWrapperStyle,s=e.onViewableItemsChanged,u=e.viewabilityConfigCallbackPairs,c=v(this.props.numColumns);r(d[11])(!t&&!n,'FlatList does not support custom data formats.'),c>1?r(d[11])(!o,'numColumns does not support horizontal.'):r(d[11])(!l,'columnWrapperStyle not supported for single column lists'),r(d[11])(!(s&&u),"FlatList does not support setting both onViewableItemsChanged and viewabilityConfigCallbackPairs.")}},{key:"_pushMultiColumnViewable",value:function(e,n){var o,l=v(this.props.numColumns),s=null!=(o=this.props.keyExtractor)?o:r(d[10]).keyExtractor;n.item.forEach(function(o,u){r(d[11])(null!=n.index,'Missing index!');var c=n.index*l+u;e.push((0,t.default)({},n,{item:o,key:s(o,c),index:c}))})}},{key:"_createOnViewableItemsChanged",value:function(e){var t=this;return function(n){var o=v(t.props.numColumns);if(e)if(o>1){var l=[],s=[];n.viewableItems.forEach(function(e){return t._pushMultiColumnViewable(s,e)}),n.changed.forEach(function(e){return t._pushMultiColumnViewable(l,e)}),e({viewableItems:s,changed:l})}else e(n)}}},{key:"render",value:function(){var n,o=this.props,l=(o.numColumns,o.columnWrapperStyle,o.removeClippedSubviews),s=(0,e.default)(o,f);return h.createElement(r(d[15]),(0,t.default)({},s,{getItem:this._getItem,getItemCount:this._getItemCount,keyExtractor:this._keyExtractor,ref:this._captureRef,viewabilityConfigCallbackPairs:this._virtualizedListPairs,removeClippedSubviews:(n=l,null==n||n)},this._renderer()))}}]),k})(h.PureComponent),y=r(d[13]).create({row:{flexDirection:'row'}});m.exports=C},651,[407,508,436,652,402,403,417,419,422,534,653,425,579,578,571,654]); +__d(function(g,r,i,a,m,e,d){m.exports=function(t,n,o){return n in t?Object.defineProperty(t,n,{value:o,enumerable:!0,configurable:!0,writable:!0}):t[n]=o,t},m.exports.__esModule=!0,m.exports.default=m.exports},652,[]); +__d(function(g,r,i,a,m,e,d){'use strict';Object.defineProperty(e,"__esModule",{value:!0}),e.computeWindowedRenderLimits=function(n,s,o,u,c,h,v){var b=s(n);if(0===b)return c;var M=v.offset,y=v.velocity,x=v.visibleLength,p=Math.max(0,M),w=p+x,O=(u-1)*x,k=y>1?'after':y<-1?'before':'none',_=Math.max(0,p-.5*O),j=Math.max(0,w+.5*O);if(h(b-1).offset<_)return{first:Math.max(0,b-1-o),last:b-1};var L=f([_,p,w,j],b,h),S=(0,t.default)(L,4),C=S[0],E=S[1],J=S[2],N=S[3];C=null==C?0:C,E=null==E?Math.max(0,C):E,N=null==N?b-1:N,J=null==J?Math.min(N,E+o-1):J;var R={first:E,last:J},B=l(c,R);for(;!(E<=C&&J>=N);){var F=B>=o,P=E<=c.first||E>c.last,T=E>C&&(!F||!P),W=J>=c.last||J=E&&E>=0&&J=C&&J<=N&&E<=R.first&&J>=R.last))throw new Error('Bad window calculation '+JSON.stringify({first:E,last:J,itemCount:b,overscanFirst:C,overscanLast:N,visible:R}));return{first:E,last:J}},e.elementsThatOverlapOffsets=f,e.keyExtractor=function(t,n){if('object'==typeof t&&null!=(null==t?void 0:t.key))return t.key;if('object'==typeof t&&null!=(null==t?void 0:t.id))return t.id;return String(n)},e.newRangeCount=l;var t=r(d[0])(r(d[1])),n=r(d[0])(r(d[2]));function f(t,f,l){for(var s=[],o=0,u=0;u=t[v]&&(s[v]=u,o++,v===t.length-1))return(0,n.default)(o===t.length,'bad offsets input, should be in increasing order: %s',JSON.stringify(t)),s;return s}function l(t,n){return n.last-n.first+1-Math.max(0,1+Math.min(n.last,t.last)-Math.max(n.first,t.first))}},653,[407,430,425]); +__d(function(g,r,_i,a,m,_e,d){var e=r(d[0])(r(d[1])),t=r(d[0])(r(d[2])),o=r(d[0])(r(d[3])),n=r(d[0])(r(d[4])),s=r(d[0])(r(d[5])),i=r(d[0])(r(d[6])),l=r(d[0])(r(d[7])),c=r(d[0])(r(d[8])),h=(function(e,t){if(!t&&e&&e.__esModule)return e;if(null===e||"object"!=typeof e&&"function"!=typeof e)return{default:e};var o=u(t);if(o&&o.has(e))return o.get(e);var n={},s=Object.defineProperty&&Object.getOwnPropertyDescriptor;for(var i in e)if("default"!==i&&Object.prototype.hasOwnProperty.call(e,i)){var l=s?Object.getOwnPropertyDescriptor(e,i):null;l&&(l.get||l.set)?Object.defineProperty(n,i,l):n[i]=e[i]}n.default=e,o&&o.set(e,n);return n})(r(d[9]));function u(e){if("function"!=typeof WeakMap)return null;var t=new WeakMap,o=new WeakMap;return(u=function(e){return e?o:t})(e)}function p(e,t){var o="undefined"!=typeof Symbol&&e[Symbol.iterator]||e["@@iterator"];if(o)return(o=o.call(e)).next.bind(o);if(Array.isArray(e)||(o=f(e))||t&&e&&"number"==typeof e.length){o&&(e=o);var n=0;return function(){return n>=e.length?{done:!0}:{done:!1,value:e[n++]}}}throw new TypeError("Invalid attempt to iterate non-iterable instance.\nIn order to be iterable, non-array objects must have a [Symbol.iterator]() method.")}function f(e,t){if(e){if("string"==typeof e)return _(e,t);var o=Object.prototype.toString.call(e).slice(8,-1);return"Object"===o&&e.constructor&&(o=e.constructor.name),"Map"===o||"Set"===o?Array.from(e):"Arguments"===o||/^(?:Ui|I)nt(?:8|16|32)(?:Clamped)?Array$/.test(o)?_(e,t):void 0}}function _(e,t){(null==t||t>e.length)&&(t=e.length);for(var o=0,n=new Array(t);o0&&t>0&&null!=i.props.initialScrollIndex&&i.props.initialScrollIndex>0&&!i._hasDoneInitialScroll&&(null==i.props.contentOffset&&i.scrollToIndex({animated:!1,index:i.props.initialScrollIndex}),i._hasDoneInitialScroll=!0),i.props.onContentSizeChange&&i.props.onContentSizeChange(e,t),i._scrollMetrics.contentLength=i._selectLength({height:t,width:e}),i._scheduleCellsToRenderUpdate(),i._maybeCallOnEndReached()},i._convertParentScrollMetrics=function(e){var t=e.offset-i._offsetFromParentVirtualizedList,o=e.visibleLength,n=t-i._scrollMetrics.offset;return{visibleLength:o,contentLength:i._scrollMetrics.contentLength,offset:t,dOffset:n}},i._onScroll=function(e){i._nestedChildLists.forEach(function(t){t.ref&&t.ref._onScroll(e)}),i.props.onScroll&&i.props.onScroll(e);var t=e.timeStamp,o=i._selectLength(e.nativeEvent.layoutMeasurement),n=i._selectLength(e.nativeEvent.contentSize),s=i._selectOffset(e.nativeEvent.contentOffset),l=s-i._scrollMetrics.offset;if(i._isNestedWithSameOrientation()){if(0===i._scrollMetrics.contentLength)return;var c=i._convertParentScrollMetrics({visibleLength:o,offset:s});o=c.visibleLength,n=c.contentLength,s=c.offset,l=c.dOffset}var h=i._scrollMetrics.timestamp?Math.max(1,t-i._scrollMetrics.timestamp):1,u=l/h;h>500&&i._scrollMetrics.dt>500&&n>5*o&&!i._hasWarned.perf&&(r(d[14])("VirtualizedList: You have a large list that is slow to update - make sure your renderItem function renders components that follow React performance best practices like PureComponent, shouldComponentUpdate, etc.",{dt:h,prevDt:i._scrollMetrics.dt,contentLength:n}),i._hasWarned.perf=!0),i._scrollMetrics={contentLength:n,dt:h,dOffset:l,offset:s,timestamp:t,velocity:u,visibleLength:o},i._updateViewableItems(i.props.data),i.props&&(i._maybeCallOnEndReached(),0!==u&&i._fillRateHelper.activate(),i._computeBlankness(),i._scheduleCellsToRenderUpdate())},i._onScrollBeginDrag=function(e){i._nestedChildLists.forEach(function(t){t.ref&&t.ref._onScrollBeginDrag(e)}),i._viewabilityTuples.forEach(function(e){e.viewabilityHelper.recordInteraction()}),i._hasInteracted=!0,i.props.onScrollBeginDrag&&i.props.onScrollBeginDrag(e)},i._onScrollEndDrag=function(e){i._nestedChildLists.forEach(function(t){t.ref&&t.ref._onScrollEndDrag(e)});var t=e.nativeEvent.velocity;t&&(i._scrollMetrics.velocity=i._selectOffset(t)),i._computeBlankness(),i.props.onScrollEndDrag&&i.props.onScrollEndDrag(e)},i._onMomentumScrollBegin=function(e){i._nestedChildLists.forEach(function(t){t.ref&&t.ref._onMomentumScrollBegin(e)}),i.props.onMomentumScrollBegin&&i.props.onMomentumScrollBegin(e)},i._onMomentumScrollEnd=function(e){i._nestedChildLists.forEach(function(t){t.ref&&t.ref._onMomentumScrollEnd(e)}),i._scrollMetrics.velocity=0,i._computeBlankness(),i.props.onMomentumScrollEnd&&i.props.onMomentumScrollEnd(e)},i._updateCellsToRender=function(){var e=i.props,t=e.data,o=e.getItemCount,n=M(e.onEndReachedThreshold),s=i._isVirtualizationDisabled();i._updateViewableItems(t),t&&i.setState(function(e){var l,c=i._scrollMetrics,h=c.contentLength,u=c.offset,f=c.visibleLength;if(s){var _=h-f-u0&&h>0&&(i.props.initialScrollIndex&&!i._scrollMetrics.offset||(l=(0,r(d[15]).computeWindowedRenderLimits)(i.props.data,i.props.getItemCount,I(i.props.maxToRenderPerBatch),R(i.props.windowSize),e,i._getFrameMetricsApprox,i._scrollMetrics)));if(l&&i._nestedChildLists.size>0)for(var y=l.first,v=l.last,C=y;C<=v;C++){var L=i._indicesToKeys.get(C),b=L&&i._cellKeysToChildListKeys.get(L);if(b){for(var S,M=!1,x=p(b);!(S=x()).done;){var w=S.value,k=i._nestedChildLists.get(w);if(k&&k.ref&&k.ref.hasMore()){M=!0;break}}if(M){l.last=C;break}}}return null!=l&&l.first===e.first&&l.last===e.last&&(l=null),l})},i._createViewToken=function(e,t){var o=i.props,n=o.data,s=(0,o.getItem)(n,e);return{index:e,item:s,key:i._keyExtractor(s,e),isViewable:t}},i._getFrameMetricsApprox=function(e){var t=i._getFrameMetrics(e);if(t&&t.index===e)return t;var o=i.props.getItemLayout;return r(d[11])(!o,'Should not have to estimate frames when a measurement metrics function is provided'),{length:i._averageCellLength,offset:i._averageCellLength*e}},i._getFrameMetrics=function(e){var t=i.props,o=t.data,n=t.getItem,s=t.getItemCount,l=t.getItemLayout;r(d[11])(s(o)>e,'Tried to get frame for out of range index '+e);var c=n(o,e),h=c&&i._frames[i._keyExtractor(c,e)];return h&&h.index===e||l&&(h=l(o,e)),h},r(d[11])(!e.onScroll||!e.onScroll.__isNative,"Components based on VirtualizedList must be wrapped with Animated.createAnimatedComponent to support native onScroll events with useNativeDriver"),r(d[11])(R(e.windowSize)>0,'VirtualizedList: The windowSize prop must be present and set to a value greater than 0.'),i._fillRateHelper=new(r(d[16]))(i._getFrameMetrics),i._updateCellsToRenderBatcher=new(r(d[17]))(i._updateCellsToRender,null!=(n=i.props.updateCellsBatchingPeriod)?n:50),i.props.viewabilityConfigCallbackPairs?i._viewabilityTuples=i.props.viewabilityConfigCallbackPairs.map(function(e){return{viewabilityHelper:new(r(d[18]))(e.viewabilityConfig),onViewableItemsChanged:e.onViewableItemsChanged}}):i.props.onViewableItemsChanged&&i._viewabilityTuples.push({viewabilityHelper:new(r(d[18]))(i.props.viewabilityConfig),onViewableItemsChanged:i.props.onViewableItemsChanged});var l={first:i.props.initialScrollIndex||0,last:Math.min(i.props.getItemCount(i.props.data),(i.props.initialScrollIndex||0)+S(i.props.initialNumToRender))-1};if(i._isNestedWithSameOrientation()){var f=i.context.getNestedChildState(i._getListKey());f&&(l=f,i.state=f,i._frames=f.frames)}return i.state=l,i}return(0,n.default)(u,[{key:"scrollToEnd",value:function(e){var t=!e||e.animated,o=this.props.getItemCount(this.props.data)-1,n=this._getFrameMetricsApprox(o),s=Math.max(0,n.offset+n.length+this._footerLength-this._scrollMetrics.visibleLength);null!=this._scrollRef&&(null!=this._scrollRef.scrollTo?this._scrollRef.scrollTo(b(this.props.horizontal)?{x:s,animated:t}:{y:s,animated:t}):console.warn("No scrollTo method provided. This may be because you have two nested VirtualizedLists with the same orientation, or because you are using a custom component that does not implement scrollTo."))}},{key:"scrollToIndex",value:function(e){var t=this.props,o=t.data,n=t.horizontal,s=t.getItemCount,i=t.getItemLayout,l=t.onScrollToIndexFailed,c=e.animated,h=e.index,u=e.viewOffset,p=e.viewPosition;if(r(d[11])(h>=0,"scrollToIndex out of range: requested index "+h+" but minimum is 0"),r(d[11])(s(o)>=1,"scrollToIndex out of range: item length "+s(o)+" but minimum is 1"),r(d[11])(hthis._highestMeasuredFrameIndex)return r(d[11])(!!l,"scrollToIndex should be used in conjunction with getItemLayout or onScrollToIndexFailed, otherwise there is no way to know the location of offscreen indices or handle failures."),void l({averageItemLength:this._averageCellLength,highestMeasuredFrameIndex:this._highestMeasuredFrameIndex,index:h});var f=this._getFrameMetricsApprox(h),_=Math.max(0,f.offset-(p||0)*(this._scrollMetrics.visibleLength-f.length))-(u||0);null!=this._scrollRef&&(null!=this._scrollRef.scrollTo?this._scrollRef.scrollTo(n?{x:_,animated:c}:{y:_,animated:c}):console.warn("No scrollTo method provided. This may be because you have two nested VirtualizedLists with the same orientation, or because you are using a custom component that does not implement scrollTo."))}},{key:"scrollToItem",value:function(e){for(var o=e.item,n=this.props,s=n.data,i=n.getItem,l=(0,n.getItemCount)(s),c=0;c0){C=!1,L='';var x=this._getSpacerKey(!p),w=this.props.initialScrollIndex?-1:S(this.props.initialNumToRender)-1,k=this.state,T=k.first,z=k.last;this._pushCells(y,I,v,0,w,_);var K=Math.max(w+1,T);if(!f&&T>w+1){var O=!1;if(v.size>0)for(var P=l?1:0,F=K-1;F>w;F--)if(v.has(F+P)){var V=this._getFrameMetricsApprox(w),D=this._getFrameMetricsApprox(F),N=D.offset-V.offset-(this.props.initialScrollIndex?0:V.length);y.push(h.createElement(r(d[10]),{key:"$sticky_lead",style:(0,e.default)({},x,N)})),this._pushCells(y,I,v,F,F,_);var A=this._getFrameMetricsApprox(T).offset-(D.offset+D.length);y.push(h.createElement(r(d[10]),{key:"$sticky_trail",style:(0,e.default)({},x,A)})),O=!0;break}if(!O){var B=this._getFrameMetricsApprox(w),H=this._getFrameMetricsApprox(T).offset-(B.offset+B.length);y.push(h.createElement(r(d[10]),{key:"$lead_spacer",style:(0,e.default)({},x,H)}))}}if(this._pushCells(y,I,v,K,z,_),!this._hasWarned.keys&&C&&(console.warn("VirtualizedList: missing keys for items, make sure to specify a key or id property on each item or provide a custom keyExtractor.",L),this._hasWarned.keys=!0),!f&&zu&&(this._sentEndForContentLength=0)}},{key:"_scheduleCellsToRenderUpdate",value:function(){var e=this.state,t=e.first,o=e.last,n=this._scrollMetrics,s=n.offset,i=n.visibleLength,l=n.velocity,c=this.props.getItemCount(this.props.data),h=!1,u=M(this.props.onEndReachedThreshold)*i/2;if(t>0){var p=s-this._getFrameMetricsApprox(t).offset;h=h||p<0||l<-2&&p2&&f0&&(this._scrollAnimatedValueAttachment=p.default.attachNativeEvent(this._scrollViewRef,'onScroll',[{nativeEvent:{contentOffset:{y:this._scrollAnimatedValue}}}]))}},{key:"_setStickyHeaderRef",value:function(e,o){o?this._stickyHeaderRefs.set(e,o):this._stickyHeaderRefs.delete(e)}},{key:"_onStickyHeaderLayout",value:function(e,o,t){var n=this.props.stickyHeaderIndices;if(n){var l=y.Children.toArray(this.props.children);if(t===this._getKeyForIndex(e,l)){var s=o.nativeEvent.layout.y;this._headerLayoutYs.set(t,s);var u=n[n.indexOf(e)-1];if(null!=u){var c=this._stickyHeaderRefs.get(this._getKeyForIndex(u,l));c&&c.setNextHeaderY&&c.setNextHeaderY(s)}}}}},{key:"render",value:function(){var t=this,n=!0===this.props.horizontal?P:F,l=(0,o.default)(n,2),s=l[0],u=l[1],c=[!0===this.props.horizontal&&Y.contentContainerHorizontal,this.props.contentContainerStyle],p=null==this.props.onContentSizeChange?null:{onLayout:this._handleContentOnLayout},f=this.props.stickyHeaderIndices,S=this.props.children;if(null!=f&&f.length>0){var b=y.Children.toArray(this.props.children);S=b.map(function(e,o){var n=e?f.indexOf(o):-1;if(n>-1){var l=e.key,s=f[n+1],u=t.props.StickyHeaderComponent||_.default;return y.createElement(u,{key:l,nativeID:'StickyHeader-'+l,ref:function(e){return t._setStickyHeaderRef(l,e)},nextHeaderLayoutY:t._headerLayoutYs.get(t._getKeyForIndex(s,b)),onLayout:function(e){return t._onStickyHeaderLayout(o,e,l)},scrollAnimatedValue:t._scrollAnimatedValue,inverted:t.props.invertStickyHeaders,hiddenOnScroll:t.props.stickyHeaderHiddenOnScroll,scrollViewHeight:t.state.layoutHeight},e)}return e})}S=y.createElement(D.default.Provider,{value:!0===this.props.horizontal?D.HORIZONTAL:D.VERTICAL},S);var R=Array.isArray(f)&&f.length>0,T=y.createElement(u,(0,e.default)({},p,{ref:this._setInnerViewRef,style:c,removeClippedSubviews:('android'!==h.default.OS||!R)&&this.props.removeClippedSubviews,collapsable:!1}),S),w=void 0!==this.props.alwaysBounceHorizontal?this.props.alwaysBounceHorizontal:this.props.horizontal,V=void 0!==this.props.alwaysBounceVertical?this.props.alwaysBounceVertical:!this.props.horizontal,k=!0===this.props.horizontal?Y.baseHorizontal:Y.baseVertical,E=(0,e.default)({},this.props,{alwaysBounceHorizontal:w,alwaysBounceVertical:V,style:v.default.compose(k,this.props.style),onContentSizeChange:null,onLayout:this._handleLayout,onMomentumScrollBegin:this._handleMomentumScrollBegin,onMomentumScrollEnd:this._handleMomentumScrollEnd,onResponderGrant:this._handleResponderGrant,onResponderReject:this._handleResponderReject,onResponderRelease:this._handleResponderRelease,onResponderTerminationRequest:this._handleResponderTerminationRequest,onScrollBeginDrag:this._handleScrollBeginDrag,onScrollEndDrag:this._handleScrollEndDrag,onScrollShouldSetResponder:this._handleScrollShouldSetResponder,onStartShouldSetResponder:this._handleStartShouldSetResponder,onStartShouldSetResponderCapture:this._handleStartShouldSetResponderCapture,onTouchEnd:this._handleTouchEnd,onTouchMove:this._handleTouchMove,onTouchStart:this._handleTouchStart,onTouchCancel:this._handleTouchCancel,onScroll:this._handleScroll,scrollEventThrottle:R?1:this.props.scrollEventThrottle,sendMomentumEvents:!(!this.props.onMomentumScrollBegin&&!this.props.onMomentumScrollEnd),snapToStart:!1!==this.props.snapToStart,snapToEnd:!1!==this.props.snapToEnd,pagingEnabled:h.default.select({ios:!0===this.props.pagingEnabled&&null==this.props.snapToInterval&&null==this.props.snapToOffsets,android:!0===this.props.pagingEnabled||null!=this.props.snapToInterval||null!=this.props.snapToOffsets})}),K=this.props.decelerationRate;null!=K&&(E.decelerationRate=(0,O.default)(K));var A=this.props.refreshControl;if(A){if('ios'===h.default.OS)return y.createElement(s,(0,e.default)({},E,{ref:this._setNativeRef}),A,T);if('android'===h.default.OS){var N=(0,I.default)((0,H.default)(E.style)),M=N.outer,W=N.inner;return y.cloneElement(A,{style:v.default.compose(k,M)},y.createElement(s,(0,e.default)({},E,{style:v.default.compose(k,W),ref:this._setNativeRef}),T))}}return y.createElement(s,(0,e.default)({},E,{ref:this._setNativeRef}),T)}}]),x})(y.Component);G.Context=D.default;var Y=v.default.create({baseVertical:{flexGrow:1,flexShrink:1,flexDirection:'column',overflow:'scroll'},baseHorizontal:{flexGrow:1,flexShrink:1,flexDirection:'row',overflow:'scroll'},contentContainerHorizontal:{flexDirection:'row'}});function U(o,t){return y.createElement(G,(0,e.default)({},o,{scrollViewRef:t}))}U.displayName='ScrollView';var Z=y.forwardRef(U);Z.Context=D.default,Z.displayName='ScrollView',m.exports=Z},655,[407,436,430,402,403,421,417,419,422,628,566,426,534,456,656,578,579,450,657,661,540,659,573,425,663,664,646,665,666,667,668,669,670]); +__d(function(g,r,i,a,m,e,d){Object.defineProperty(e,"__esModule",{value:!0}),e.default=void 0;var t=r(d[0])(r(d[1])),n=r(d[0])(r(d[2])),u=r(d[0])(r(d[3])),l=r(d[0])(r(d[4])),o=r(d[0])(r(d[5])),f=(function(t,n){if(!n&&t&&t.__esModule)return t;if(null===t||"object"!=typeof t&&"function"!=typeof t)return{default:t};var u=s(n);if(u&&u.has(t))return u.get(t);var l={},o=Object.defineProperty&&Object.getOwnPropertyDescriptor;for(var f in t)if("default"!==f&&Object.prototype.hasOwnProperty.call(t,f)){var c=o?Object.getOwnPropertyDescriptor(t,f):null;c&&(c.get||c.set)?Object.defineProperty(l,f,c):l[f]=t[f]}l.default=t,u&&u.set(t,l);return l})(r(d[6]));function s(t){if("function"!=typeof WeakMap)return null;var n=new WeakMap,u=new WeakMap;return(s=function(t){return t?u:n})(t)}var c=o.default.View,p=f.forwardRef(function(l,s){var p=l.inverted,h=l.scrollViewHeight,y=l.hiddenOnScroll,R=l.scrollAnimatedValue,O=l.nextHeaderLayoutY,L=f.useState(!1),b=(0,t.default)(L,2),w=b[0],S=b[1],j=f.useState(0),E=(0,t.default)(j,2),P=E[0],_=E[1],x=f.useState(0),M=(0,t.default)(x,2),k=M[0],C=M[1],D=f.useState(null),H=(0,t.default)(D,2),I=H[0],V=H[1],Y=f.useState(O),T=(0,t.default)(Y,2),W=T[0],A=T[1],N=f.useState(!1),z=(0,t.default)(N,2),F=z[0],q=z[1],B=f.useRef(),G=(0,n.default)({getForwardedRef:function(){return s},setLocalRef:function(t){var n,u;(B.current=t,t)&&(t.setNextHeaderY=function(t){A(t)},q(!(null==(n=t._internalInstanceHandle)||null==(u=n.stateNode)||!u.canonical)))}}),J=(0,f.useMemo)(function(){return!0===y?o.default.diffClamp(R.interpolate({extrapolateLeft:'clamp',inputRange:[P,P+1],outputRange:[0,1]}).interpolate({inputRange:[0,1],outputRange:[0,-1]}),-k,0):null},[R,k,P,y]),K=f.useState(function(){var t=R.interpolate({inputRange:[-1,0],outputRange:[0,0]});return null!=J?o.default.add(t,J):t}),Q=(0,t.default)(K,2),U=Q[0],X=Q[1],Z=(0,f.useRef)(!0),$=(0,f.useRef)(null);(0,f.useEffect)(function(){0!==I&&null!=I&&(Z.current=!1)},[I]);var ee=(0,f.useCallback)(function(t){var n=t.value,l='android'===u.default.OS?15:64;0!==n||Z.current?(null!=$.current&&clearTimeout($.current),$.current=setTimeout(function(){n!==I&&V(n)},l)):Z.current=!0},[I]);(0,f.useEffect)(function(){var t=[-1,0],n=[0,0];if(w)if(!0===p){if(null!=h){var u=P+k-h;if(u>0){t.push(u),n.push(0),t.push(u+1),n.push(1);var l=(W||0)-k-h;l>u&&(t.push(l,l+1),n.push(l-u,l-u))}}}else{t.push(P),n.push(0);var f=(W||0)-k;f>=P?(t.push(f,f+1),n.push(f-P,f-P)):(t.push(P+1),n.push(1))}var s,c=R.interpolate({inputRange:t,outputRange:n});return null!=J&&(c=o.default.add(c,J)),F&&(s=c.addListener(ee)),X(c),function(){s&&c.removeListener(s),null!=$.current&&clearTimeout($.current)}},[W,w,k,P,h,R,p,J,ee,F]);var te=f.Children.only(l.children),ne=F&&null!=I?{style:{transform:[{translateY:I}]}}:null;return f.createElement(c,{collapsable:!1,nativeID:l.nativeID,onLayout:function(t){_(t.nativeEvent.layout.y),C(t.nativeEvent.layout.height),S(!0),l.onLayout(t);var n=f.Children.only(l.children);n.props.onLayout&&n.props.onLayout(t)},ref:G,style:[te.props.style,v.header,{transform:[{translateY:U}]}],passthroughAnimatedPropExplicitValues:ne},f.cloneElement(te,{style:v.fill,onLayout:void 0}))}),v=l.default.create({header:{zIndex:10,position:'relative'},fill:{flex:1}}),h=p;e.default=h},656,[407,430,646,426,578,616,534]); +__d(function(g,r,i,a,m,e,d){var t=r(d[0])(r(d[1])),n=r(d[0])(r(d[2])),u=r(d[0])(r(d[3])),l=r(d[0])(r(d[4])),o=r(d[0])(r(d[5])),s=r(d[0])(r(d[6])),f=r(d[0])(r(d[7])),v=(function(){function v(){(0,t.default)(this,v),this._emitter=new u.default('ios'!==s.default.OS?null:f.default)}return(0,n.default)(v,[{key:"addListener",value:function(t,n,u){return this._emitter.addListener(t,n)}},{key:"removeListener",value:function(t,n){this._emitter.removeListener(t,n)}},{key:"removeAllListeners",value:function(t){this._emitter.removeAllListeners(t)}},{key:"dismiss",value:function(){(0,o.default)()}},{key:"scheduleLayoutAnimation",value:function(t){var n=t.duration,u=t.easing;null!=n&&0!==n&&l.default.configureNext({duration:n,update:{duration:n,type:null!=u&&l.default.Types[u]||'keyboard'}})}}]),v})();m.exports=new v},657,[407,402,403,500,658,659,426,660]); +__d(function(g,r,i,a,m,e,d){'use strict';var n=r(d[0])(r(d[1]));function t(t,u,o){var l,s;if(!n.default.isTesting){var c,p,y=!1,f=function(){y||(y=!0,clearTimeout(b),null==u||u())},b=setTimeout(f,(null!=(l=t.duration)?l:0)+17),I=null==(s=g)?void 0:s.nativeFabricUIManager;if(null!=I&&I.configureNextLayoutAnimation)null==(c=g)||null==(p=c.nativeFabricUIManager)||p.configureNextLayoutAnimation(t,f,null!=o?o:function(){});else null!=r(d[2])&&r(d[2]).configureNextLayoutAnimation&&r(d[2]).configureNextLayoutAnimation(t,null!=f?f:function(){},null!=o?o:function(){})}}function u(n,t,u){return{duration:n,create:{type:t,property:u},update:{type:t},delete:{type:t,property:u}}}var o={easeInEaseOut:u(300,'easeInEaseOut','opacity'),linear:u(500,'linear','opacity'),spring:{duration:700,create:{type:'linear',property:'opacity'},update:{type:'spring',springDamping:.4},delete:{type:'linear',property:'opacity'}}},l={configureNext:t,create:u,Types:Object.freeze({spring:'spring',linear:'linear',easeInEaseOut:'easeInEaseOut',easeIn:'easeIn',easeOut:'easeOut',keyboard:'keyboard'}),Properties:Object.freeze({opacity:'opacity',scaleX:'scaleX',scaleY:'scaleY',scaleXY:'scaleXY'}),checkConfig:function(){console.error('LayoutAnimation.checkConfig(...) has been disabled.')},Presets:o,easeInEaseOut:t.bind(null,o.easeInEaseOut),linear:t.bind(null,o.linear),spring:t.bind(null,o.spring)};m.exports=l},658,[407,426,450]); +__d(function(g,r,i,a,m,e,d){'use strict';m.exports=function(){r(d[0]).blurTextInput(r(d[0]).currentlyFocusedInput())}},659,[540]); +__d(function(g,r,i,a,m,e,d){function t(n){if("function"!=typeof WeakMap)return null;var o=new WeakMap,u=new WeakMap;return(t=function(t){return t?u:o})(n)}Object.defineProperty(e,"__esModule",{value:!0}),e.default=void 0;var n=(function(n,o){if(!o&&n&&n.__esModule)return n;if(null===n||"object"!=typeof n&&"function"!=typeof n)return{default:n};var u=t(o);if(u&&u.has(n))return u.get(n);var f={},c=Object.defineProperty&&Object.getOwnPropertyDescriptor;for(var l in n)if("default"!==l&&Object.prototype.hasOwnProperty.call(n,l)){var p=c?Object.getOwnPropertyDescriptor(n,l):null;p&&(p.get||p.set)?Object.defineProperty(f,l,p):f[l]=n[l]}f.default=n,u&&u.set(n,f);return f})(r(d[0])).get('KeyboardObserver');e.default=n},660,[428]); +__d(function(g,r,i,a,m,e,d){var t=r(d[0])(r(d[1])),o={setGlobalOptions:function(o){if(void 0!==o.debug&&r(d[2])(t.default,'Trying to debug FrameRateLogger without the native module!'),t.default){var l={debug:!!o.debug,reportStackTraces:!!o.reportStackTraces};t.default.setGlobalOptions(l)}},setContext:function(o){t.default&&t.default.setContext(o)},beginScroll:function(){t.default&&t.default.beginScroll()},endScroll:function(){t.default&&t.default.endScroll()}};m.exports=o},661,[407,662,425]); +__d(function(g,r,i,a,m,e,d){function t(n){if("function"!=typeof WeakMap)return null;var o=new WeakMap,u=new WeakMap;return(t=function(t){return t?u:o})(n)}Object.defineProperty(e,"__esModule",{value:!0}),e.default=void 0;var n=(function(n,o){if(!o&&n&&n.__esModule)return n;if(null===n||"object"!=typeof n&&"function"!=typeof n)return{default:n};var u=t(o);if(u&&u.has(n))return u.get(n);var f={},c=Object.defineProperty&&Object.getOwnPropertyDescriptor;for(var l in n)if("default"!==l&&Object.prototype.hasOwnProperty.call(n,l)){var p=c?Object.getOwnPropertyDescriptor(n,l):null;p&&(p.get||p.set)?Object.defineProperty(f,l,p):f[l]=n[l]}f.default=n,u&&u.set(n,f);return f})(r(d[0])).get('FrameRateLogger');e.default=n},662,[428]); +__d(function(g,r,i,a,m,e,d){var t=r(d[0])(r(d[1]));m.exports=function(n){return'normal'===n?t.default.select({ios:.998,android:.985}):'fast'===n?t.default.select({ios:.99,android:.9}):n}},663,[407,426]); +__d(function(g,r,i,a,m,e,d){Object.defineProperty(e,"__esModule",{value:!0}),e.default=function(s){var c=null,t=null;if(null!=s){c={},t={};for(var n=0,l=Object.keys(s);nMath.random(),this._resetData()}return r(d[0])(_,[{key:"activate",value:function(){this._enabled&&null==this._samplesStartTime&&(this._samplesStartTime=g.performance.now())}},{key:"deactivateAndFlush",value:function(){if(this._enabled){var t=this._samplesStartTime;if(null!=t)if(this._info.sample_count0&&(c=Math.min(h,Math.max(0,y.offset-_)));for(var p=0,b=n.last,v=this._getFrameMetrics(b);b>=n.first&&(!v||!v.inLayout);)v=this._getFrameMetrics(b),b--;if(v&&b0?(this._anyBlankStartTime=f,this._info.any_blank_speed_sum+=u,this._info.any_blank_count++,this._info.pixels_blank+=M,T>.5&&(this._mostlyBlankStartTime=f,this._info.mostly_blank_count++)):(u<.01||Math.abs(l)<1)&&this.deactivateAndFlush(),T}},{key:"enabled",value:function(){return this._enabled}},{key:"_resetData",value:function(){this._anyBlankStartTime=null,this._info=new t,this._mostlyBlankStartTime=null,this._samplesStartTime=null}}],[{key:"addListener",value:function(t){return null===l&&console.warn('Call `FillRateHelper.setSampleRate` before `addListener`.'),n.push(t),{remove:function(){n=n.filter(function(n){return t!==n})}}}},{key:"setSampleRate",value:function(t){l=t}},{key:"setMinSampleCount",value:function(t){s=t}}]),_})();m.exports=_},675,[403,402,436]); +__d(function(g,r,i,a,m,e,d){'use strict';var t=(function(){function t(n,l){r(d[0])(this,t),this._delay=l,this._callback=n}return r(d[1])(t,[{key:"dispose",value:function(){var t=arguments.length>0&&void 0!==arguments[0]?arguments[0]:{abort:!1};this._taskHandle&&(this._taskHandle.cancel(),t.abort||this._callback(),this._taskHandle=null)}},{key:"schedule",value:function(){var t=this;if(!this._taskHandle){var n=setTimeout(function(){t._taskHandle=r(d[2]).runAfterInteractions(function(){t._taskHandle=null,t._callback()})},this._delay);this._taskHandle={cancel:function(){return clearTimeout(n)}}}}}]),t})();m.exports=t},676,[402,403,625]); +__d(function(g,r,_i,a,m,e,d){'use strict';function t(t,i){var o="undefined"!=typeof Symbol&&t[Symbol.iterator]||t["@@iterator"];if(o)return(o=o.call(t)).next.bind(o);if(Array.isArray(t)||(o=n(t))||i&&t&&"number"==typeof t.length){o&&(t=o);var s=0;return function(){return s>=t.length?{done:!0}:{done:!1,value:t[s++]}}}throw new TypeError("Invalid attempt to iterate non-iterable instance.\nIn order to be iterable, non-array objects must have a [Symbol.iterator]() method.")}function n(t,n){if(t){if("string"==typeof t)return i(t,n);var o=Object.prototype.toString.call(t).slice(8,-1);return"Object"===o&&t.constructor&&(o=t.constructor.name),"Map"===o||"Set"===o?Array.from(t):"Arguments"===o||/^(?:Ui|I)nt(?:8|16|32)(?:Clamped)?Array$/.test(o)?i(t,n):void 0}}function i(t,n){(null==n||n>t.length)&&(n=t.length);for(var i=0,o=new Array(n);i0&&void 0!==arguments[0]?arguments[0]:{viewAreaCoveragePercentThreshold:0};r(d[0])(this,n),this._hasInteracted=!1,this._timers=new Set,this._viewableIndices=[],this._viewableItems=new Map,this._config=t}return r(d[1])(n,[{key:"dispose",value:function(){this._timers.forEach(clearTimeout)}},{key:"computeViewableItems",value:function(t,n,i,o,l){var u=this._config,c=u.itemVisiblePercentThreshold,h=u.viewAreaCoveragePercentThreshold,f=null!=h,v=f?h:c;r(d[2])(null!=v&&null!=c!=(null!=h),'Must set exactly one of itemVisiblePercentThreshold or viewAreaCoveragePercentThreshold');var b=[];if(0===t)return b;var y=-1,w=l||{first:0,last:t-1},_=w.first,p=w.last;if(p>=t)return console.warn('Invalid render range computing viewability '+JSON.stringify({renderRange:l,itemCount:t})),[];for(var I=_;I<=p;I++){var A=o(I);if(A){var S=A.offset-n,T=S+A.length;if(S0)y=I,s(f,v,S,T,i,A.length)&&b.push(I);else if(y>=0)break}}return b}},{key:"onUpdate",value:function(t,n,i,o,s,l,u){var c=this;if((!this._config.waitForInteraction||this._hasInteracted)&&0!==t&&o(0)){var h=[];if(t&&(h=this.computeViewableItems(t,n,i,o,u)),this._viewableIndices.length!==h.length||!this._viewableIndices.every(function(t,n){return t===h[n]}))if(this._viewableIndices=h,this._config.minimumViewTime){var f=setTimeout(function(){c._timers.delete(f),c._onUpdateSync(h,l,s)},this._config.minimumViewTime);this._timers.add(f)}else this._onUpdateSync(h,l,s)}}},{key:"resetViewableIndices",value:function(){this._viewableIndices=[]}},{key:"recordInteraction",value:function(){this._hasInteracted=!0}},{key:"_onUpdateSync",value:function(n,i,o){var s=this;n=n.filter(function(t){return s._viewableIndices.includes(t)});for(var l,u=this._viewableItems,c=new Map(n.map(function(t){var n=o(t,!0);return[n.key,n]})),h=[],f=t(c);!(l=f()).done;){var v=l.value,b=r(d[3])(v,2),y=b[0],w=b[1];u.has(y)||h.push(w)}for(var _,p=t(u);!(_=p()).done;){var I=_.value,A=r(d[3])(I,2),S=A[0],T=A[1];c.has(S)||h.push(r(d[4])({},T,{isViewable:!1}))}h.length>0&&(this._viewableItems=c,i({viewableItems:Array.from(c.values()),changed:h,viewabilityConfig:this._config}))}}]),n})();function s(t,n,i,o,s,c){if(u(i,o,s))return!0;var h=l(i,o,s);return 100*(t?h/s:h/c)>=n}function l(t,n,i){var o=Math.min(n,i)-Math.max(t,0);return Math.max(0,o)}function u(t,n,i){return t>=0&&n<=i&&n>t}m.exports=o},677,[402,403,425,430,436]); +__d(function(g,r,i,a,m,e,d){Object.defineProperty(e,"__esModule",{value:!0}),e.VirtualizedListCellContextProvider=function(l){var u=l.cellKey,s=l.children,c=(0,n.useContext)(o);return n.createElement(o.Provider,{value:null==c?null:(0,t.default)({},c,{cellKey:u})},s)},e.VirtualizedListContext=void 0,e.VirtualizedListContextProvider=function(t){var l=t.children,u=t.value,s=(0,n.useMemo)(function(){return{cellKey:null,getScrollMetrics:u.getScrollMetrics,horizontal:u.horizontal,getOutermostParentListRef:u.getOutermostParentListRef,getNestedChildState:u.getNestedChildState,registerAsNestedChild:u.registerAsNestedChild,unregisterAsNestedChild:u.unregisterAsNestedChild,debugInfo:{cellKey:u.debugInfo.cellKey,horizontal:u.debugInfo.horizontal,listKey:u.debugInfo.listKey,parent:u.debugInfo.parent}}},[u.getScrollMetrics,u.horizontal,u.getOutermostParentListRef,u.getNestedChildState,u.registerAsNestedChild,u.unregisterAsNestedChild,u.debugInfo.cellKey,u.debugInfo.horizontal,u.debugInfo.listKey,u.debugInfo.parent]);return n.createElement(o.Provider,{value:s},l)},e.VirtualizedListContextResetter=function(t){var l=t.children;return n.createElement(o.Provider,{value:null},l)};var t=r(d[0])(r(d[1])),n=(function(t,n){if(!n&&t&&t.__esModule)return t;if(null===t||"object"!=typeof t&&"function"!=typeof t)return{default:t};var o=l(n);if(o&&o.has(t))return o.get(t);var u={},s=Object.defineProperty&&Object.getOwnPropertyDescriptor;for(var c in t)if("default"!==c&&Object.prototype.hasOwnProperty.call(t,c)){var f=s?Object.getOwnPropertyDescriptor(t,c):null;f&&(f.get||f.set)?Object.defineProperty(u,c,f):u[c]=t[c]}u.default=t,o&&o.set(t,u);return u})(r(d[2]));function l(t){if("function"!=typeof WeakMap)return null;var n=new WeakMap,o=new WeakMap;return(l=function(t){return t?o:n})(t)}var o=n.createContext(null);e.VirtualizedListContext=o},678,[407,436,534]); +__d(function(g,r,i,a,m,e,d){!(function(n,o){if(!o&&n&&n.__esModule)return n;if(null===n||"object"!=typeof n&&"function"!=typeof n)return{default:n};var f=t(o);if(f&&f.has(n))return f.get(n);var u={},p=Object.defineProperty&&Object.getOwnPropertyDescriptor;for(var c in n)if("default"!==c&&Object.prototype.hasOwnProperty.call(n,c)){var l=p?Object.getOwnPropertyDescriptor(n,c):null;l&&(l.get||l.set)?Object.defineProperty(u,c,l):u[c]=n[c]}u.default=n,f&&f.set(n,u)})(r(d[0]));function t(n){if("function"!=typeof WeakMap)return null;var o=new WeakMap,f=new WeakMap;return(t=function(t){return t?f:o})(n)}m.exports=r(d[1])(r(d[2]),{collapsable:!1})},679,[534,644,680]); +__d(function(g,r,i,a,m,e,d){var t=r(d[0])(r(d[1])),n=r(d[0])(r(d[2])),o=r(d[0])(r(d[3])),u=r(d[0])(r(d[4])),l=(function(t,n){if(!n&&t&&t.__esModule)return t;if(null===t||"object"!=typeof t&&"function"!=typeof t)return{default:t};var o=b(n);if(o&&o.has(t))return o.get(t);var u={},l=Object.defineProperty&&Object.getOwnPropertyDescriptor;for(var c in t)if("default"!==c&&Object.prototype.hasOwnProperty.call(t,c)){var f=l?Object.getOwnPropertyDescriptor(t,c):null;f&&(f.get||f.set)?Object.defineProperty(u,c,f):u[c]=t[c]}u.default=t,o&&o.set(t,u);return u})(r(d[5])),c=r(d[0])(r(d[6])),f=r(d[0])(r(d[7])),s=r(d[0])(r(d[8])),h=r(d[0])(r(d[9])),p=r(d[0])(r(d[10])),y=r(d[0])(r(d[11])),w=r(d[0])(r(d[12])),v=r(d[0])(r(d[13]));function b(t){if("function"!=typeof WeakMap)return null;var n=new WeakMap,o=new WeakMap;return(b=function(t){return t?o:n})(t)}var S=1;function I(t,n){var o=S++;return n&&n(o),w.default.prefetchImage(t,o)}var E=function(n,o){var c,s,w,b,S=(0,y.default)(n.source),I=(0,y.default)(n.defaultSource),E=(0,y.default)(n.loadingIndicatorSource);S&&(''===S.uri&&console.warn('source.uri should not be an empty string'));if(n.src&&console.warn('The component requires a `source` property rather than `src`.'),n.children)throw new Error('The component cannot contain children. If you want to render content on top of the image, consider using the component or absolute positioning.');if(n.defaultSource&&n.loadingIndicatorSource)throw new Error('The component cannot have defaultSource and loadingIndicatorSource at the same time. Please use either defaultSource or loadingIndicatorSource.');if(!S||S.uri||Array.isArray(S)||(S=null),null!=(null==(c=S)?void 0:c.uri)){var P=S,_=P.width,j=P.height;w=(0,p.default)([{width:_,height:j},O.base,n.style]),b=[{uri:S.uri}]}else w=(0,p.default)([O.base,n.style]),b=S;var z=n.onLoadStart,C=n.onLoad,W=n.onLoadEnd,M=n.onError,T=(0,t.default)({},n,{style:w,shouldNotifyLoadEvents:!!(z||C||W||M),src:b,headers:null==(s=S)?void 0:s.headers,defaultSrc:I?I.uri:null,loadingIndicatorSrc:E?E.uri:null,ref:o});return l.createElement(h.default.Consumer,null,function(n){var o=null!==n?(0,t.default)({},T,{internal_analyticTag:n}):T;return l.createElement(f.default.Consumer,null,function(t){return t?l.createElement(v.default,o):l.createElement(u.default,o)})})};E=l.forwardRef(E),null!=s.default.unstable_createImageComponent&&(E=s.default.unstable_createImageComponent(E)),E.displayName='Image',E.getSize=function(t,n,o){return w.default.getSize(t).then(function(t){n(t.width,t.height)}).catch(o||function(){console.warn('Failed to get size for image: '+t)})},E.getSizeWithHeaders=function(t,n,o,u){return w.default.getSizeWithHeaders(t,n).then(function(t){o(t.width,t.height)}).catch(u||function(){console.warn('Failed to get size for image: '+t)})},E.prefetch=I,E.prefetchWithMetadata=function(t,n,o,u){I(t,u)},E.abortPrefetch=function(t){w.default.abortRequest(t)},E.queryCache=function(t){return n.default.async(function(o){for(;;)switch(o.prev=o.next){case 0:return o.next=2,n.default.awrap(w.default.queryCache(t));case 2:return o.abrupt("return",o.sent);case 3:case"end":return o.stop()}},null,null,null,Promise)},E.resolveAssetSource=y.default,E.propTypes=o.default;var O=c.default.create({base:{overflow:'hidden'}});m.exports=E},680,[407,436,404,681,685,534,578,581,686,688,573,560,689,687]); +__d(function(g,r,i,a,m,e,d){'use strict';var n=r(d[0])({},r(d[1]),{style:r(d[2])(r(d[3])),source:r(d[4]).oneOfType([r(d[4]).shape({uri:r(d[4]).string,headers:r(d[4]).objectOf(r(d[4]).string)}),r(d[4]).number,r(d[4]).arrayOf(r(d[4]).shape({uri:r(d[4]).string,width:r(d[4]).number,height:r(d[4]).number,headers:r(d[4]).objectOf(r(d[4]).string)}))]),blurRadius:r(d[4]).number,defaultSource:r(d[4]).number,loadingIndicatorSource:r(d[4]).oneOfType([r(d[4]).shape({uri:r(d[4]).string}),r(d[4]).number]),progressiveRenderingEnabled:r(d[4]).bool,fadeDuration:r(d[4]).number,internal_analyticTag:r(d[4]).string,onLoadStart:r(d[4]).func,onError:r(d[4]).func,onLoad:r(d[4]).func,onLoadEnd:r(d[4]).func,testID:r(d[4]).string,resizeMethod:r(d[4]).oneOf(['auto','resize','scale']),resizeMode:r(d[4]).oneOf(['cover','contain','stretch','repeat','center'])});m.exports=n},681,[436,682,591,684,596]); +__d(function(g,r,i,a,m,e,d){'use strict';var o=r(d[0])(r(d[1]));m.exports={accessible:r(d[2]).bool,accessibilityLabel:r(d[2]).node,accessibilityHint:r(d[2]).string,accessibilityActions:r(d[2]).arrayOf(r(d[2]).string),accessibilityIgnoresInvertColors:r(d[2]).bool,accessibilityRole:r(d[2]).oneOf(r(d[3]).DeprecatedAccessibilityRoles),accessibilityState:r(d[2]).object,accessibilityValue:r(d[2]).object,accessibilityLiveRegion:r(d[2]).oneOf(['none','polite','assertive']),importantForAccessibility:r(d[2]).oneOf(['auto','yes','no','no-hide-descendants']),accessibilityViewIsModal:r(d[2]).bool,accessibilityElementsHidden:r(d[2]).bool,onAccessibilityAction:r(d[2]).func,onAccessibilityTap:r(d[2]).func,onMagicTap:r(d[2]).func,testID:r(d[2]).string,nativeID:r(d[2]).string,onResponderGrant:r(d[2]).func,onResponderMove:r(d[2]).func,onResponderReject:r(d[2]).func,onResponderRelease:r(d[2]).func,onResponderTerminate:r(d[2]).func,onResponderTerminationRequest:r(d[2]).func,onStartShouldSetResponder:r(d[2]).func,onStartShouldSetResponderCapture:r(d[2]).func,onMoveShouldSetResponder:r(d[2]).func,onMoveShouldSetResponderCapture:r(d[2]).func,hitSlop:r(d[4]),onLayout:r(d[2]).func,pointerEvents:r(d[2]).oneOf(['box-none','none','box-only','auto']),style:o,removeClippedSubviews:r(d[2]).bool,renderToHardwareTextureAndroid:r(d[2]).bool,shouldRasterizeIOS:r(d[2]).bool,collapsable:r(d[2]).bool,needsOffscreenAlphaCompositing:r(d[2]).bool}},682,[591,594,596,683,603]); +__d(function(g,r,i,a,m,e,d){'use strict';m.exports={DeprecatedAccessibilityRoles:['none','button','togglebutton','link','search','image','keyboardkey','text','adjustable','imagebutton','header','summary','alert','checkbox','combobox','menu','menubar','menuitem','progressbar','radio','radiogroup','scrollbar','spinbutton','switch','tab','tablist','timer','list','toolbar']}},683,[]); +__d(function(g,r,i,a,m,e,d){'use strict';var o=r(d[0])({},r(d[1]),r(d[2]),r(d[3]),{resizeMode:r(d[4]).oneOf(['center','contain','cover','repeat','stretch']),backfaceVisibility:r(d[4]).oneOf(['visible','hidden']),backgroundColor:r(d[5]),borderColor:r(d[5]),borderWidth:r(d[4]).number,borderRadius:r(d[4]).number,overflow:r(d[4]).oneOf(['visible','hidden']),tintColor:r(d[5]),opacity:r(d[4]).number,overlayColor:r(d[4]).string,borderTopLeftRadius:r(d[4]).number,borderTopRightRadius:r(d[4]).number,borderBottomLeftRadius:r(d[4]).number,borderBottomRightRadius:r(d[4]).number});m.exports=o},684,[436,595,599,601,596,600]); +__d(function(g,r,i,a,m,e,d){function t(o){if("function"!=typeof WeakMap)return null;var n=new WeakMap,s=new WeakMap;return(t=function(t){return t?s:n})(o)}Object.defineProperty(e,"__esModule",{value:!0}),e.default=void 0;var o=(function(o,n){if(!n&&o&&o.__esModule)return o;if(null===o||"object"!=typeof o&&"function"!=typeof o)return{default:o};var s=t(n);if(s&&s.has(o))return s.get(o);var u={},l=Object.defineProperty&&Object.getOwnPropertyDescriptor;for(var f in o)if("default"!==f&&Object.prototype.hasOwnProperty.call(o,f)){var c=l?Object.getOwnPropertyDescriptor(o,f):null;c&&(c.get||c.set)?Object.defineProperty(u,f,c):u[f]=o[f]}u.default=o,s&&s.set(o,u);return u})(r(d[0])).get('RCTImageView',function(){return{uiViewClassName:'RCTImageView',bubblingEventTypes:{},directEventTypes:{topLoadStart:{registrationName:'onLoadStart'},topProgress:{registrationName:'onProgress'},topError:{registrationName:'onError'},topPartialLoad:{registrationName:'onPartialLoad'},topLoad:{registrationName:'onLoad'},topLoadEnd:{registrationName:'onLoadEnd'}},validAttributes:{blurRadius:!0,capInsets:{diff:r(d[1])},defaultSource:{process:r(d[2])},defaultSrc:!0,fadeDuration:!0,headers:!0,internal_analyticTag:!0,loadingIndicatorSrc:!0,onError:!0,onLoad:!0,onLoadEnd:!0,onLoadStart:!0,onPartialLoad:!0,onProgress:!0,overlayColor:{process:r(d[3])},progressiveRenderingEnabled:!0,resizeMethod:!0,resizeMode:!0,shouldNotifyLoadEvents:!0,source:!0,src:!0,tintColor:{process:r(d[3])}}}});e.default=o},685,[555,550,560,546]); +__d(function(g,r,i,a,m,e,d){Object.defineProperty(e,"__esModule",{value:!0}),e.default=void 0;(function(n,o){if(!o&&n&&n.__esModule)return n;if(null===n||"object"!=typeof n&&"function"!=typeof n)return{default:n};var u=t(o);if(u&&u.has(n))return u.get(n);var f={},l=Object.defineProperty&&Object.getOwnPropertyDescriptor;for(var c in n)if("default"!==c&&Object.prototype.hasOwnProperty.call(n,c)){var p=l?Object.getOwnPropertyDescriptor(n,c):null;p&&(p.get||p.set)?Object.defineProperty(f,c,p):f[c]=n[c]}f.default=n,u&&u.set(n,f)})(r(d[0])),r(d[1])(r(d[2])),r(d[1])(r(d[3]));function t(n){if("function"!=typeof WeakMap)return null;var o=new WeakMap,u=new WeakMap;return(t=function(t){return t?u:o})(n)}e.default={unstable_createImageComponent:null}},686,[534,407,685,687]); +__d(function(g,r,i,a,m,e,d){'use strict';function t(o){if("function"!=typeof WeakMap)return null;var n=new WeakMap,s=new WeakMap;return(t=function(t){return t?s:n})(o)}var o=(function(o,n){if(!n&&o&&o.__esModule)return o;if(null===o||"object"!=typeof o&&"function"!=typeof o)return{default:o};var s=t(n);if(s&&s.has(o))return s.get(o);var u={},l=Object.defineProperty&&Object.getOwnPropertyDescriptor;for(var c in o)if("default"!==c&&Object.prototype.hasOwnProperty.call(o,c)){var p=l?Object.getOwnPropertyDescriptor(o,c):null;p&&(p.get||p.set)?Object.defineProperty(u,c,p):u[c]=o[c]}u.default=o,s&&s.set(o,u);return u})(r(d[0])).get('RCTTextInlineImage',function(){return{uiViewClassName:'RCTImageView',bubblingEventTypes:{},directEventTypes:{topLoadStart:{registrationName:'onLoadStart'},topProgress:{registrationName:'onProgress'},topError:{registrationName:'onError'},topPartialLoad:{registrationName:'onPartialLoad'},topLoad:{registrationName:'onLoad'},topLoadEnd:{registrationName:'onLoadEnd'}},validAttributes:{blurRadius:!0,capInsets:{diff:r(d[1])},defaultSource:{process:r(d[2])},defaultSrc:!0,fadeDuration:!0,headers:!0,internal_analyticTag:!0,loadingIndicatorSrc:!0,onError:!0,onLoad:!0,onLoadEnd:!0,onLoadStart:!0,onPartialLoad:!0,onProgress:!0,overlayColor:{process:r(d[3])},progressiveRenderingEnabled:!0,resizeMethod:!0,resizeMode:!0,shouldNotifyLoadEvents:!0,source:!0,src:!0,tintColor:{process:r(d[3])}}}});m.exports=o},687,[555,550,560,546]); +__d(function(g,r,i,a,m,e,d){function t(n){if("function"!=typeof WeakMap)return null;var o=new WeakMap,u=new WeakMap;return(t=function(t){return t?u:o})(n)}Object.defineProperty(e,"__esModule",{value:!0}),e.default=void 0;var n=(function(n,o){if(!o&&n&&n.__esModule)return n;if(null===n||"object"!=typeof n&&"function"!=typeof n)return{default:n};var u=t(o);if(u&&u.has(n))return u.get(n);var f={},l=Object.defineProperty&&Object.getOwnPropertyDescriptor;for(var c in n)if("default"!==c&&Object.prototype.hasOwnProperty.call(n,c)){var p=l?Object.getOwnPropertyDescriptor(n,c):null;p&&(p.get||p.set)?Object.defineProperty(f,c,p):f[c]=n[c]}f.default=n,u&&u.set(n,f);return f})(r(d[0])).createContext(null);e.default=n},688,[534]); +__d(function(g,r,i,a,m,e,d){function t(n){if("function"!=typeof WeakMap)return null;var o=new WeakMap,f=new WeakMap;return(t=function(t){return t?f:o})(n)}Object.defineProperty(e,"__esModule",{value:!0}),e.default=void 0;var n=(function(n,o){if(!o&&n&&n.__esModule)return n;if(null===n||"object"!=typeof n&&"function"!=typeof n)return{default:n};var f=t(o);if(f&&f.has(n))return f.get(n);var u={},c=Object.defineProperty&&Object.getOwnPropertyDescriptor;for(var l in n)if("default"!==l&&Object.prototype.hasOwnProperty.call(n,l)){var p=c?Object.getOwnPropertyDescriptor(n,l):null;p&&(p.get||p.set)?Object.defineProperty(u,l,p):u[l]=n[l]}u.default=n,f&&f.set(n,u);return u})(r(d[0])).getEnforcing('ImageLoader');e.default=n},689,[428]); +__d(function(g,r,i,a,m,e,d){var t=r(d[0])(r(d[1])),n=(function(t,n){if(!n&&t&&t.__esModule)return t;if(null===t||"object"!=typeof t&&"function"!=typeof t)return{default:t};var f=o(n);if(f&&f.has(t))return f.get(t);var u={},l=Object.defineProperty&&Object.getOwnPropertyDescriptor;for(var c in t)if("default"!==c&&Object.prototype.hasOwnProperty.call(t,c)){var p=l?Object.getOwnPropertyDescriptor(t,c):null;p&&(p.get||p.set)?Object.defineProperty(u,c,p):u[c]=t[c]}u.default=t,f&&f.set(t,u);return u})(r(d[2]));function o(t){if("function"!=typeof WeakMap)return null;var n=new WeakMap,f=new WeakMap;return(o=function(t){return t?f:n})(t)}var f=n.forwardRef(function(o,f){return n.createElement(r(d[3]),(0,t.default)({scrollEventThrottle:1e-4},o,{ref:f}))});m.exports=r(d[4])(f,{collapsable:!1})},690,[407,436,534,655,644]); +__d(function(g,r,i,a,m,e,d){var t=r(d[0])(r(d[1])),n=(function(t,n){if(!n&&t&&t.__esModule)return t;if(null===t||"object"!=typeof t&&"function"!=typeof t)return{default:t};var f=o(n);if(f&&f.has(t))return f.get(t);var u={},c=Object.defineProperty&&Object.getOwnPropertyDescriptor;for(var l in t)if("default"!==l&&Object.prototype.hasOwnProperty.call(t,l)){var p=c?Object.getOwnPropertyDescriptor(t,l):null;p&&(p.get||p.set)?Object.defineProperty(u,l,p):u[l]=t[l]}u.default=t,f&&f.set(t,u);return u})(r(d[2])),f=r(d[0])(r(d[3]));function o(t){if("function"!=typeof WeakMap)return null;var n=new WeakMap,f=new WeakMap;return(o=function(t){return t?f:n})(t)}var u=n.forwardRef(function(o,u){return n.createElement(f.default,(0,t.default)({scrollEventThrottle:1e-4},o,{ref:u}))});m.exports=r(d[4])(u)},691,[407,436,534,692,644]); +__d(function(g,r,i,a,m,_e,d){'use strict';Object.defineProperty(_e,"__esModule",{value:!0}),_e.default=void 0;var e=r(d[0])(r(d[1])),t=r(d[0])(r(d[2])),n=r(d[0])(r(d[3])),o=r(d[0])(r(d[4])),f=r(d[0])(r(d[5])),u=r(d[0])(r(d[6])),c=r(d[0])(r(d[7])),l=r(d[0])(r(d[8])),s=(function(e,t){if(!t&&e&&e.__esModule)return e;if(null===e||"object"!=typeof e&&"function"!=typeof e)return{default:e};var n=y(t);if(n&&n.has(e))return n.get(e);var o={},f=Object.defineProperty&&Object.getOwnPropertyDescriptor;for(var u in e)if("default"!==u&&Object.prototype.hasOwnProperty.call(e,u)){var c=f?Object.getOwnPropertyDescriptor(e,u):null;c&&(c.get||c.set)?Object.defineProperty(o,u,c):o[u]=e[u]}o.default=e,n&&n.set(e,o);return o})(r(d[9])),p=r(d[0])(r(d[10])),v=["stickySectionHeadersEnabled"];function y(e){if("function"!=typeof WeakMap)return null;var t=new WeakMap,n=new WeakMap;return(y=function(e){return e?n:t})(e)}function h(){if("undefined"==typeof Reflect||!Reflect.construct)return!1;if(Reflect.construct.sham)return!1;if("function"==typeof Proxy)return!0;try{return Boolean.prototype.valueOf.call(Reflect.construct(Boolean,[],function(){})),!0}catch(e){return!1}}var R=(function(y){(0,f.default)(w,y);var R,_,L=(R=w,_=h(),function(){var e,t=(0,c.default)(R);if(_){var n=(0,c.default)(this).constructor;e=Reflect.construct(t,arguments,n)}else e=t.apply(this,arguments);return(0,u.default)(this,e)});function w(){var e;(0,n.default)(this,w);for(var t=arguments.length,o=new Array(t),f=0;f=e.length?{done:!0}:{done:!1,value:e[i++]}}}throw new TypeError("Invalid attempt to iterate non-iterable instance.\nIn order to be iterable, non-array objects must have a [Symbol.iterator]() method.")}function I(e,t){if(e){if("string"==typeof e)return _(e,t);var n=Object.prototype.toString.call(e).slice(8,-1);return"Object"===n&&e.constructor&&(n=e.constructor.name),"Map"===n||"Set"===n?Array.from(e):"Arguments"===n||/^(?:Ui|I)nt(?:8|16|32)(?:Clamped)?Array$/.test(n)?_(e,t):void 0}}function _(e,t){(null==t||t>e.length)&&(t=e.length);for(var n=0,i=new Array(t);n0&&this.props.stickySectionHeadersEnabled)o+=this._listRef._getFrameMetricsApprox(t-e.itemIndex).length;var l=(0,n.default)({},e,{viewOffset:o,index:t});this._listRef.scrollToIndex(l)}}},{key:"getListRef",value:function(){return this._listRef}},{key:"render",value:function(){for(var e,i=this,o=this.props,l=(o.ItemSeparatorComponent,o.SectionSeparatorComponent,o.renderItem,o.renderSectionFooter,o.renderSectionHeader,o.sections,o.stickySectionHeadersEnabled,(0,t.default)(o,v)),u=this.props.ListHeaderComponent?1:0,c=this.props.stickySectionHeadersEnabled?[]:void 0,s=0,p=S(this.props.sections);!(e=p()).done;){var y=e.value;null!=c&&c.push(s+u),s+=2,s+=this.props.getItemCount(y.data)}var I=this._renderItem(s);return h.createElement(f.VirtualizedList,(0,n.default)({},l,{keyExtractor:this._keyExtractor,stickyHeaderIndices:c,renderItem:I,data:this.props.sections,getItem:function(e,t){return i._getItem(i.props,e,t)},getItemCount:function(){return s},onViewableItemsChanged:this.props.onViewableItemsChanged?this._onViewableItemsChanged:void 0,ref:this._captureRef}))}},{key:"_getItem",value:function(e,t,n){if(!t)return null;for(var i=n-1,o=0;o=o(p)+1)t-=o(p)+1;else return-1===t?{section:s,key:f+':header',index:null,header:!0,trailingSection:u[c+1]}:t===o(p)?{section:s,key:f+':footer',index:null,header:!1,trailingSection:u[c+1]}:{section:s,key:f+':'+(s.keyExtractor||l||r(d[13]).keyExtractor)(i(p,t),t),index:t,leadingItem:i(p,t-1),leadingSection:u[c-1],trailingItem:i(p,t+1),trailingSection:u[c+1]}}}},{key:"_getSeparatorComponent",value:function(e,t,n){if(!(t=t||this._subExtractor(e)))return null;var i=t.section.ItemSeparatorComponent||this.props.ItemSeparatorComponent,o=this.props.SectionSeparatorComponent,l=e===n-1,u=t.index===this.props.getItemCount(t.section.data)-1;return o&&u?o:!i||u||l?null:i}}]),x})(h.PureComponent);function k(t){var i=t.LeadingSeparatorComponent,o=t.SeparatorComponent,l=t.cellKey,u=t.prevCellKey,c=t.setSelfHighlightCallback,s=t.updateHighlightFor,p=t.setSelfUpdatePropsCallback,v=t.updatePropsFor,y=t.item,S=t.index,I=t.section,_=t.inverted,b=h.useState(!1),x=(0,e.default)(b,2),k=x[0],C=x[1],E=h.useState(!1),w=(0,e.default)(E,2),H=w[0],P=w[1],F=h.useState({leadingItem:t.leadingItem,leadingSection:t.leadingSection,section:t.section,trailingItem:t.item,trailingSection:t.trailingSection}),R=(0,e.default)(F,2),M=R[0],O=R[1],V=h.useState({leadingItem:t.item,leadingSection:t.leadingSection,section:t.section,trailingItem:t.trailingItem,trailingSection:t.trailingSection}),j=(0,e.default)(V,2),A=j[0],U=j[1];h.useEffect(function(){return c(l,P),p(l,U),function(){p(l,null),c(l,null)}},[l,c,U,p]);var L={highlight:function(){C(!0),P(!0),null!=u&&s(u,!0)},unhighlight:function(){C(!1),P(!1),null!=u&&s(u,!1)},updateProps:function(e,t){'leading'===e?null!=i?O((0,n.default)({},M,t)):null!=u&&v(u,(0,n.default)({},M,t)):'trailing'===e&&null!=o&&U((0,n.default)({},A,t))}},B=t.renderItem({item:y,index:S,section:I,separators:L}),K=null!=i&&h.createElement(i,(0,n.default)({highlighted:k},M)),T=null!=o&&h.createElement(o,(0,n.default)({highlighted:H},A));return K||T?h.createElement(f.View,null,!1===_?K:T,B,!1===_?T:K):B}m.exports=x},693,[407,430,508,436,402,403,421,417,419,422,425,411,534,653]); +__d(function(g,r,i,a,m,e,d){!(function(n,o){if(!o&&n&&n.__esModule)return n;if(null===n||"object"!=typeof n&&"function"!=typeof n)return{default:n};var f=t(o);if(f&&f.has(n))return f.get(n);var u={},p=Object.defineProperty&&Object.getOwnPropertyDescriptor;for(var c in n)if("default"!==c&&Object.prototype.hasOwnProperty.call(n,c)){var l=p?Object.getOwnPropertyDescriptor(n,c):null;l&&(l.get||l.set)?Object.defineProperty(u,c,l):u[c]=n[c]}u.default=n,f&&f.set(n,u)})(r(d[0]));function t(n){if("function"!=typeof WeakMap)return null;var o=new WeakMap,f=new WeakMap;return(t=function(t){return t?f:o})(n)}m.exports=r(d[1])(r(d[2]),{collapsable:!1})},694,[534,644,589]); +__d(function(g,r,i,a,m,e,d){!(function(n,o){if(!o&&n&&n.__esModule)return n;if(null===n||"object"!=typeof n&&"function"!=typeof n)return{default:n};var f=t(o);if(f&&f.has(n))return f.get(n);var u={},p=Object.defineProperty&&Object.getOwnPropertyDescriptor;for(var c in n)if("default"!==c&&Object.prototype.hasOwnProperty.call(n,c)){var l=p?Object.getOwnPropertyDescriptor(n,c):null;l&&(l.get||l.set)?Object.defineProperty(u,c,l):u[c]=n[c]}u.default=n,f&&f.set(n,u)})(r(d[0]));function t(n){if("function"!=typeof WeakMap)return null;var o=new WeakMap,f=new WeakMap;return(t=function(t){return t?f:o})(n)}m.exports=r(d[1])(r(d[2]),{collapsable:!0})},695,[534,644,579]); +__d(function(g,r,i,a,m,e,d){'use strict';var n={};m.exports=function(o,t){n[o]||(console.warn(t),n[o]=!0)}},696,[]); +__d(function(g,r,i,a,m,_e,d){'use strict';var e=r(d[0])(r(d[1])),t=r(d[0])(r(d[2])),n=r(d[0])(r(d[3])),o=r(d[0])(r(d[4])),u=r(d[0])(r(d[5])),c=(function(e,t){if(!t&&e&&e.__esModule)return e;if(null===e||"object"!=typeof e&&"function"!=typeof e)return{default:e};var n=p(t);if(n&&n.has(e))return n.get(e);var o={},u=Object.defineProperty&&Object.getOwnPropertyDescriptor;for(var c in e)if("default"!==c&&Object.prototype.hasOwnProperty.call(e,c)){var f=u?Object.getOwnPropertyDescriptor(e,c):null;f&&(f.get||f.set)?Object.defineProperty(o,c,f):o[c]=e[c]}o.default=e,n&&n.set(e,o);return o})(r(d[6])),f=r(d[0])(r(d[7])),l=r(d[0])(r(d[8])),s=r(d[0])(r(d[9]));function p(e){if("function"!=typeof WeakMap)return null;var t=new WeakMap,n=new WeakMap;return(p=function(e){return e?n:t})(e)}function y(){if("undefined"==typeof Reflect||!Reflect.construct)return!1;if(Reflect.construct.sham)return!1;if("function"==typeof Proxy)return!0;try{return Boolean.prototype.valueOf.call(Reflect.construct(Boolean,[],function(){})),!0}catch(e){return!1}}var h=(function(f){(0,n.default)(b,f);var p,h,O=(p=b,h=y(),function(){var e,t=(0,u.default)(p);if(h){var n=(0,u.default)(this).constructor;e=Reflect.construct(t,arguments,n)}else e=t.apply(this,arguments);return(0,o.default)(this,e)});function b(){return(0,e.default)(this,b),O.apply(this,arguments)}return(0,t.default)(b,[{key:"render",value:function(){return c.createElement(s.default,{style:[v.dummyDatePickerIOS,this.props.style]},c.createElement(l.default,{style:v.datePickerText},"DatePickerIOS is not supported on this platform!"))}}]),b})(c.Component),v=f.default.create({dummyDatePickerIOS:{height:100,width:300,backgroundColor:'#ffbcbc',borderWidth:1,borderColor:'red',alignItems:'center',justifyContent:'center',margin:10},datePickerText:{color:'#333333',margin:20}});m.exports=h},697,[407,402,403,417,419,422,534,578,589,579]); +__d(function(g,r,i,a,m,_e,d){var e=r(d[0])(r(d[1])),t=r(d[0])(r(d[2])),n=r(d[0])(r(d[3])),o=r(d[0])(r(d[4])),u=r(d[0])(r(d[5])),s=r(d[0])(r(d[6])),l=r(d[0])(r(d[7])),f=r(d[0])(r(d[8])),c=k(r(d[9])),p=r(d[0])(r(d[10])),w=r(d[0])(r(d[11])),h=r(d[0])(r(d[12])),v=r(d[0])(r(d[13])),y=r(d[0])(r(d[14])),D=k(r(d[15])),b=["onDrawerStateChanged","renderNavigationView","onDrawerOpen","onDrawerClose"];function C(e){if("function"!=typeof WeakMap)return null;var t=new WeakMap,n=new WeakMap;return(C=function(e){return e?n:t})(e)}function k(e,t){if(!t&&e&&e.__esModule)return e;if(null===e||"object"!=typeof e&&"function"!=typeof e)return{default:e};var n=C(t);if(n&&n.has(e))return n.get(e);var o={},u=Object.defineProperty&&Object.getOwnPropertyDescriptor;for(var s in e)if("default"!==s&&Object.prototype.hasOwnProperty.call(e,s)){var l=u?Object.getOwnPropertyDescriptor(e,s):null;l&&(l.get||l.set)?Object.defineProperty(o,s,l):o[s]=e[s]}return o.default=e,n&&n.set(e,o),o}function _(){if("undefined"==typeof Reflect||!Reflect.construct)return!1;if(Reflect.construct.sham)return!1;if("function"==typeof Proxy)return!0;try{return Boolean.prototype.valueOf.call(Reflect.construct(Boolean,[],function(){})),!0}catch(e){return!1}}var S=['Idle','Dragging','Settling'],R=(function(w){(0,u.default)(O,w);var C,k,R=(C=O,k=_(),function(){var e,t=(0,l.default)(C);if(k){var n=(0,l.default)(this).constructor;e=Reflect.construct(t,arguments,n)}else e=t.apply(this,arguments);return(0,s.default)(this,e)});function O(){var e;(0,n.default)(this,O);for(var t=arguments.length,o=new Array(t),u=0;u=21&&null!=this.props.statusBarBackgroundColor,l=c.createElement(h.default,{style:[B.drawerSubview,{width:this.props.drawerWidth,backgroundColor:this.props.drawerBackgroundColor}],collapsable:!1},o(),s&&c.createElement(h.default,{style:B.drawerStatusBar})),w=c.createElement(h.default,{style:B.mainSubview,collapsable:!1},s&&c.createElement(p.default,{translucent:!0,backgroundColor:this.props.statusBarBackgroundColor}),s&&c.createElement(h.default,{style:[B.statusBar,{backgroundColor:this.props.statusBarBackgroundColor}]}),this.props.children);return c.createElement(D.default,(0,e.default)({},u,{ref:this._nativeRef,drawerWidth:this.props.drawerWidth,drawerPosition:this.props.drawerPosition,drawerLockMode:this.props.drawerLockMode,style:[B.base,this.props.style],onDrawerSlide:this._onDrawerSlide,onDrawerOpen:this._onDrawerOpen,onDrawerClose:this._onDrawerClose,onDrawerStateChanged:this._onDrawerStateChanged}),w,l)}},{key:"openDrawer",value:function(){D.Commands.openDrawer((0,y.default)(this._nativeRef.current))}},{key:"closeDrawer",value:function(){D.Commands.closeDrawer((0,y.default)(this._nativeRef.current))}},{key:"blur",value:function(){(0,y.default)(this._nativeRef.current).blur()}},{key:"focus",value:function(){(0,y.default)(this._nativeRef.current).focus()}},{key:"measure",value:function(e){(0,y.default)(this._nativeRef.current).measure(e)}},{key:"measureInWindow",value:function(e){(0,y.default)(this._nativeRef.current).measureInWindow(e)}},{key:"measureLayout",value:function(e,t,n){(0,y.default)(this._nativeRef.current).measureLayout(e,t,n)}},{key:"setNativeProps",value:function(e){(0,y.default)(this._nativeRef.current).setNativeProps(e)}}],[{key:"positions",get:function(){return console.warn('Setting DrawerLayoutAndroid drawerPosition using `DrawerLayoutAndroid.positions` is deprecated. Instead pass the string value "left" or "right"'),{Left:'left',Right:'right'}}}]),O})(c.Component);R.defaultProps={drawerBackgroundColor:'white'};var B=w.default.create({base:{flex:1,elevation:16},mainSubview:{position:'absolute',top:0,left:0,right:0,bottom:0},drawerSubview:{position:'absolute',top:0,bottom:0},statusBar:{height:p.default.currentHeight},drawerStatusBar:{position:'absolute',top:0,left:0,right:0,height:p.default.currentHeight,backgroundColor:'rgba(0, 0, 0, 0.251)'}});m.exports=R},698,[407,436,508,402,403,417,419,422,426,534,699,578,579,659,702,703]); +__d(function(g,r,i,a,m,_e,d){var t,e=r(d[0])(r(d[1])),n=r(d[0])(r(d[2])),l=r(d[0])(r(d[3])),o=r(d[0])(r(d[4])),u=r(d[0])(r(d[5])),c=r(d[0])(r(d[6])),s=(function(t,e){if(!e&&t&&t.__esModule)return t;if(null===t||"object"!=typeof t&&"function"!=typeof t)return{default:t};var n=S(e);if(n&&n.has(t))return n.get(t);var l={},o=Object.defineProperty&&Object.getOwnPropertyDescriptor;for(var u in t)if("default"!==u&&Object.prototype.hasOwnProperty.call(t,u)){var c=o?Object.getOwnPropertyDescriptor(t,u):null;c&&(c.get||c.set)?Object.defineProperty(l,u,c):l[u]=t[u]}l.default=t,n&&n.set(t,l);return l})(r(d[7])),f=r(d[0])(r(d[8])),p=r(d[0])(r(d[9])),y=r(d[0])(r(d[10])),v=r(d[0])(r(d[11])),k=r(d[0])(r(d[12]));function S(t){if("function"!=typeof WeakMap)return null;var e=new WeakMap,n=new WeakMap;return(S=function(t){return t?n:e})(t)}function b(){if("undefined"==typeof Reflect||!Reflect.construct)return!1;if(Reflect.construct.sham)return!1;if("function"==typeof Proxy)return!0;try{return Boolean.prototype.valueOf.call(Reflect.construct(Boolean,[],function(){})),!0}catch(t){return!1}}function _(t){var e,n,l=null!=(e=t.animated)&&e,o=null!=(n=t.showHideTransition)?n:'fade';return{backgroundColor:null!=t.backgroundColor?{value:t.backgroundColor,animated:l}:null,barStyle:null!=t.barStyle?{value:t.barStyle,animated:l}:null,translucent:t.translucent,hidden:null!=t.hidden?{value:t.hidden,animated:l,transition:o}:null,networkActivityIndicatorVisible:t.networkActivityIndicatorVisible}}var h=(function(t){(0,l.default)(h,t);var c,s,S=(c=h,s=b(),function(){var t,e=(0,u.default)(c);if(s){var n=(0,u.default)(this).constructor;t=Reflect.construct(e,arguments,n)}else t=e.apply(this,arguments);return(0,o.default)(this,t)});function h(){var t;(0,e.default)(this,h);for(var n=arguments.length,l=new Array(n),o=0;o is only supported on iOS.'),0===c.Children.count(this.props.children)?null:c.createElement(s.default,{style:[this.props.style,v.container],nativeID:this.props.nativeID,backgroundColor:this.props.backgroundColor},this.props.children)}}]),b})(c.Component),v=l.default.create({container:{position:'absolute'}});m.exports=h},705,[407,402,403,417,419,422,534,426,578,706]); +__d(function(g,r,i,a,m,e,d){Object.defineProperty(e,"__esModule",{value:!0}),e.default=void 0;var t=(0,r(d[0])(r(d[1])).default)('InputAccessory',{interfaceOnly:!0,paperComponentName:'RCTInputAccessoryView',excludedPlatforms:['android']});e.default=t},706,[407,584]); +__d(function(g,r,i,a,m,_e,d){Object.defineProperty(_e,"__esModule",{value:!0}),_e.default=void 0;var e=r(d[0])(r(d[1])),t=r(d[0])(r(d[2])),n=r(d[0])(r(d[3])),o=r(d[0])(r(d[4])),u=r(d[0])(r(d[5])),f=r(d[0])(r(d[6])),l=r(d[0])(r(d[7])),s=r(d[0])(r(d[8])),c=r(d[0])(r(d[9])),y=r(d[0])(r(d[10])),h=(function(e,t){if(!t&&e&&e.__esModule)return e;if(null===e||"object"!=typeof e&&"function"!=typeof e)return{default:e};var n=_(t);if(n&&n.has(e))return n.get(e);var o={},u=Object.defineProperty&&Object.getOwnPropertyDescriptor;for(var f in e)if("default"!==f&&Object.prototype.hasOwnProperty.call(e,f)){var l=u?Object.getOwnPropertyDescriptor(e,f):null;l&&(l.get||l.set)?Object.defineProperty(o,f,l):o[f]=e[f]}o.default=e,n&&n.set(e,o);return o})(r(d[11])),p=r(d[0])(r(d[12])),v=r(d[0])(r(d[13])),b=["behavior","children","contentContainerStyle","enabled","keyboardVerticalOffset","style"];function _(e){if("function"!=typeof WeakMap)return null;var t=new WeakMap,n=new WeakMap;return(_=function(e){return e?n:t})(e)}function k(){if("undefined"==typeof Reflect||!Reflect.construct)return!1;if(Reflect.construct.sham)return!1;if("function"==typeof Proxy)return!0;try{return Boolean.prototype.valueOf.call(Reflect.construct(Boolean,[],function(){})),!0}catch(e){return!1}}var O=(function(_){(0,u.default)(E,_);var O,w,L=(O=E,w=k(),function(){var e,t=(0,l.default)(O);if(w){var n=(0,l.default)(this).constructor;e=Reflect.construct(t,arguments,n)}else e=t.apply(this,arguments);return(0,f.default)(this,e)});function E(e){var t;return(0,n.default)(this,E),(t=L.call(this,e))._frame=null,t._keyboardEvent=null,t._subscriptions=[],t._initialFrameHeight=0,t._onKeyboardChange=function(e){t._keyboardEvent=e,t._updateBottomIfNecesarry()},t._onLayout=function(e){var n=null==t._frame;t._frame=e.nativeEvent.layout,t._initialFrameHeight||(t._initialFrameHeight=t._frame.height),n&&t._updateBottomIfNecesarry()},t._updateBottomIfNecesarry=function(){if(null!=t._keyboardEvent){var e=t._keyboardEvent,n=e.duration,o=e.easing,u=e.endCoordinates,f=t._relativeKeyboardHeight(u);t.state.bottom!==f&&(n&&o&&c.default.configureNext({duration:n>10?n:10,update:{duration:n>10?n:10,type:c.default.Types[o]||'keyboard'}}),t.setState({bottom:f}))}else t.setState({bottom:0})},t.state={bottom:0},t.viewRef=h.createRef(),t}return(0,o.default)(E,[{key:"_relativeKeyboardHeight",value:function(e){var t,n=this._frame;if(!n||!e)return 0;var o=e.screenY-(null!=(t=this.props.keyboardVerticalOffset)?t:0);return Math.max(n.y+n.height-o,0)}},{key:"componentDidMount",value:function(){'ios'===y.default.OS?this._subscriptions=[s.default.addListener('keyboardWillChangeFrame',this._onKeyboardChange)]:this._subscriptions=[s.default.addListener('keyboardDidHide',this._onKeyboardChange),s.default.addListener('keyboardDidShow',this._onKeyboardChange)]}},{key:"componentWillUnmount",value:function(){this._subscriptions.forEach(function(e){e.remove()})}},{key:"render",value:function(){var n=this.props,o=n.behavior,u=n.children,f=n.contentContainerStyle,l=n.enabled,s=void 0===l||l,c=(n.keyboardVerticalOffset,n.style),y=(0,t.default)(n,b),_=!0===s?this.state.bottom:0;switch(o){case'height':var k;return null!=this._frame&&this.state.bottom>0&&(k={height:this._initialFrameHeight-_,flex:0}),h.createElement(v.default,(0,e.default)({ref:this.viewRef,style:p.default.compose(c,k),onLayout:this._onLayout},y),u);case'position':return h.createElement(v.default,(0,e.default)({ref:this.viewRef,style:c,onLayout:this._onLayout},y),h.createElement(v.default,{style:p.default.compose(f,{bottom:_})},u));case'padding':return h.createElement(v.default,(0,e.default)({ref:this.viewRef,style:p.default.compose(c,{paddingBottom:_}),onLayout:this._onLayout},y),u);default:return h.createElement(v.default,(0,e.default)({ref:this.viewRef,onLayout:this._onLayout,style:c},y),u)}}}]),E})(h.Component);_e.default=O},707,[407,436,508,402,403,417,419,422,657,658,426,534,578,579]); +__d(function(g,r,i,a,m,e,d){'use strict';m.exports=r(d[0])},708,[709]); +__d(function(g,r,i,a,m,_e,d){'use strict';var e=r(d[0])(r(d[1])),t=r(d[0])(r(d[2])),n=r(d[0])(r(d[3])),u=r(d[0])(r(d[4])),o=r(d[0])(r(d[5])),f=(function(e,t){if(!t&&e&&e.__esModule)return e;if(null===e||"object"!=typeof e&&"function"!=typeof e)return{default:e};var n=l(t);if(n&&n.has(e))return n.get(e);var u={},o=Object.defineProperty&&Object.getOwnPropertyDescriptor;for(var f in e)if("default"!==f&&Object.prototype.hasOwnProperty.call(e,f)){var c=o?Object.getOwnPropertyDescriptor(e,f):null;c&&(c.get||c.set)?Object.defineProperty(u,f,c):u[f]=e[f]}u.default=e,n&&n.set(e,u);return u})(r(d[6])),c=r(d[0])(r(d[7]));function l(e){if("function"!=typeof WeakMap)return null;var t=new WeakMap,n=new WeakMap;return(l=function(e){return e?n:t})(e)}function p(){if("undefined"==typeof Reflect||!Reflect.construct)return!1;if(Reflect.construct.sham)return!1;if("function"==typeof Proxy)return!0;try{return Boolean.prototype.valueOf.call(Reflect.construct(Boolean,[],function(){})),!0}catch(e){return!1}}var s=(function(c){(0,n.default)(h,c);var l,s,v=(l=h,s=p(),function(){var e,t=(0,o.default)(l);if(s){var n=(0,o.default)(this).constructor;e=Reflect.construct(t,arguments,n)}else e=t.apply(this,arguments);return(0,u.default)(this,e)});function h(){return(0,e.default)(this,h),v.apply(this,arguments)}return(0,t.default)(h,[{key:"render",value:function(){var e=r(d[8]);return f.createElement(e,{style:[y.unimplementedView,this.props.style]},this.props.children)}}]),h})(f.Component),y=c.default.create({unimplementedView:{}});m.exports=s},709,[407,402,403,417,419,422,534,578,579]); +__d(function(g,r,i,a,m,_e,d){var t,e,n=r(d[0])(r(d[1])),o=r(d[0])(r(d[2])),s=r(d[0])(r(d[3])),l=r(d[0])(r(d[4])),u=r(d[0])(r(d[5])),p=r(d[0])(r(d[6])),c=r(d[0])(r(d[7])),f=(r(d[0])(r(d[8])),r(d[0])(r(d[9])),r(d[0])(r(d[10])));function h(){if("undefined"==typeof Reflect||!Reflect.construct)return!1;if(Reflect.construct.sham)return!1;if("function"==typeof Proxy)return!0;try{return Boolean.prototype.valueOf.call(Reflect.construct(Boolean,[],function(){})),!0}catch(t){return!1}}var v=r(d[11]),y=0,R=(function(t){(0,l.default)(R,t);var e,n,c=(e=R,n=h(),function(){var t,o=(0,p.default)(e);if(n){var s=(0,p.default)(this).constructor;t=Reflect.construct(o,arguments,s)}else t=o.apply(this,arguments);return(0,u.default)(this,t)});function R(t){var e;return(0,o.default)(this,R),(e=c.call(this,t))._identifier=y++,e}return(0,s.default)(R,[{key:"componentDidMount",value:function(){}},{key:"componentWillUnmount",value:function(){this._eventSubscription&&this._eventSubscription.remove()}},{key:"componentDidUpdate",value:function(){}},{key:"render",value:function(){var t=this;if(!0!==this.props.visible)return null;var e={backgroundColor:!0===this.props.transparent?'transparent':'white'},n=this.props.animationType||'none',o=this.props.presentationStyle;o||(o='fullScreen',!0===this.props.transparent&&(o='overFullScreen'));var s=this.props.children;return v.createElement(f.default,{animationType:n,presentationStyle:o,transparent:this.props.transparent,hardwareAccelerated:this.props.hardwareAccelerated,onRequestClose:this.props.onRequestClose,onShow:this.props.onShow,onDismiss:function(){t.props.onDismiss&&t.props.onDismiss()},visible:this.props.visible,statusBarTranslucent:this.props.statusBarTranslucent,identifier:this._identifier,style:b.modal,onStartShouldSetResponder:this._shouldSetResponder,supportedOrientations:this.props.supportedOrientations,onOrientationChange:this.props.onOrientationChange},v.createElement(r(d[12]).VirtualizedListContextResetter,null,v.createElement(r(d[13]).Context.Provider,{value:null},v.createElement(r(d[14]),{style:[b.container,e],collapsable:!1},s))))}},{key:"_shouldSetResponder",value:function(){return!0}}]),R})(v.Component);R.defaultProps={visible:!0,hardwareAccelerated:!1},R.contextType=r(d[15]).RootTagContext;var S=r(d[16]).getConstants().isRTL?'right':'left',b=r(d[17]).create({modal:{position:'absolute'},container:(t={},(0,n.default)(t,S,0),(0,n.default)(t,"top",0),(0,n.default)(t,"flex",1),t)}),C=null!=(e=c.default.unstable_Modal)?e:R;m.exports=C},710,[407,652,402,403,417,419,422,711,500,712,713,534,678,655,579,714,715,578]); +__d(function(g,r,i,a,m,e,d){Object.defineProperty(e,"__esModule",{value:!0}),e.default=void 0;e.default={unstable_Modal:null}},711,[]); +__d(function(g,r,i,a,m,e,d){function t(n){if("function"!=typeof WeakMap)return null;var o=new WeakMap,u=new WeakMap;return(t=function(t){return t?u:o})(n)}Object.defineProperty(e,"__esModule",{value:!0}),e.default=void 0;var n=(function(n,o){if(!o&&n&&n.__esModule)return n;if(null===n||"object"!=typeof n&&"function"!=typeof n)return{default:n};var u=t(o);if(u&&u.has(n))return u.get(n);var f={},l=Object.defineProperty&&Object.getOwnPropertyDescriptor;for(var c in n)if("default"!==c&&Object.prototype.hasOwnProperty.call(n,c)){var p=l?Object.getOwnPropertyDescriptor(n,c):null;p&&(p.get||p.set)?Object.defineProperty(f,c,p):f[c]=n[c]}f.default=n,u&&u.set(n,f);return f})(r(d[0])).get('ModalManager');e.default=n},712,[428]); +__d(function(g,r,i,a,m,e,d){Object.defineProperty(e,"__esModule",{value:!0}),e.default=void 0;var o=(0,r(d[0])(r(d[1])).default)('ModalHostView',{interfaceOnly:!0,paperComponentName:'RCTModalHostView'});e.default=o},713,[407,584]); +__d(function(g,r,i,a,m,e,d){function t(n){if("function"!=typeof WeakMap)return null;var o=new WeakMap,u=new WeakMap;return(t=function(t){return t?u:o})(n)}Object.defineProperty(e,"__esModule",{value:!0}),e.RootTagContext=void 0,e.createRootTag=function(t){return t};var n=(function(n,o){if(!o&&n&&n.__esModule)return n;if(null===n||"object"!=typeof n&&"function"!=typeof n)return{default:n};var u=t(o);if(u&&u.has(n))return u.get(n);var f={},c=Object.defineProperty&&Object.getOwnPropertyDescriptor;for(var p in n)if("default"!==p&&Object.prototype.hasOwnProperty.call(n,p)){var l=c?Object.getOwnPropertyDescriptor(n,p):null;l&&(l.get||l.set)?Object.defineProperty(f,p,l):f[p]=n[p]}f.default=n,u&&u.set(n,f);return f})(r(d[0])).createContext(0);e.RootTagContext=n},714,[534]); +__d(function(g,r,i,a,m,e,d){var t=r(d[0])(r(d[1])),n=(function(){if(t.default){var n=t.default.getConstants(),f=n.isRTL,L=n.doLeftAndRightSwapInRTL,R=n.localeIdentifier;return{isRTL:f,doLeftAndRightSwapInRTL:L,localeIdentifier:R}}return{isRTL:!1,doLeftAndRightSwapInRTL:!0}})();m.exports={getConstants:function(){return n},allowRTL:function(n){t.default&&t.default.allowRTL(n)},forceRTL:function(n){t.default&&t.default.forceRTL(n)},swapLeftAndRightInRTL:function(n){t.default&&t.default.swapLeftAndRightInRTL(n)},isRTL:n.isRTL,doLeftAndRightSwapInRTL:n.doLeftAndRightSwapInRTL}},715,[407,716]); +__d(function(g,r,i,a,m,e,d){function t(n){if("function"!=typeof WeakMap)return null;var o=new WeakMap,u=new WeakMap;return(t=function(t){return t?u:o})(n)}Object.defineProperty(e,"__esModule",{value:!0}),e.default=void 0;var n=(function(n,o){if(!o&&n&&n.__esModule)return n;if(null===n||"object"!=typeof n&&"function"!=typeof n)return{default:n};var u=t(o);if(u&&u.has(n))return u.get(n);var f={},c=Object.defineProperty&&Object.getOwnPropertyDescriptor;for(var l in n)if("default"!==l&&Object.prototype.hasOwnProperty.call(n,l)){var p=c?Object.getOwnPropertyDescriptor(n,l):null;p&&(p.get||p.set)?Object.defineProperty(f,l,p):f[l]=n[l]}f.default=n,u&&u.set(n,f);return f})(r(d[0])).get('I18nManager');e.default=n},716,[428]); +__d(function(g,r,i,a,m,e,d){Object.defineProperty(e,"__esModule",{value:!0}),e.default=void 0;var n=r(d[0])(r(d[1])),s=r(d[0])(r(d[2])),t=r(d[0])(r(d[3])),l=(function(n,s){if(!s&&n&&n.__esModule)return n;if(null===n||"object"!=typeof n&&"function"!=typeof n)return{default:n};var t=p(s);if(t&&t.has(n))return t.get(n);var l={},o=Object.defineProperty&&Object.getOwnPropertyDescriptor;for(var u in n)if("default"!==u&&Object.prototype.hasOwnProperty.call(n,u)){var f=o?Object.getOwnPropertyDescriptor(n,u):null;f&&(f.get||f.set)?Object.defineProperty(l,u,f):l[u]=n[u]}l.default=n,t&&t.set(n,l);return l})(r(d[4])),o=r(d[0])(r(d[5])),u=r(d[0])(r(d[6])),f=r(d[0])(r(d[7])),c=["accessible","android_disableSound","android_ripple","cancelable","children","delayLongPress","disabled","focusable","onLongPress","onPress","onPressIn","onPressOut","pressRetentionOffset","style","testOnly_pressed","unstable_pressDelay"];function p(n){if("function"!=typeof WeakMap)return null;var s=new WeakMap,t=new WeakMap;return(p=function(n){return n?t:s})(n)}function b(n){var t=(0,l.useState)(!1),o=(0,s.default)(t,2),u=o[0],f=o[1];return[u||n,f]}var P=l.memo(l.forwardRef(function(p,P){var y=p.accessible,v=p.android_disableSound,O=p.android_ripple,_=p.cancelable,S=p.children,M=p.delayLongPress,h=p.disabled,j=p.focusable,w=p.onLongPress,I=p.onPress,L=p.onPressIn,R=p.onPressOut,D=p.pressRetentionOffset,k=p.style,W=p.testOnly_pressed,z=p.unstable_pressDelay,E=(0,t.default)(p,c),H=(0,l.useRef)(null);(0,l.useImperativeHandle)(P,function(){return H.current});var N=(0,o.default)(O,H),q=b(!0===W),x=(0,s.default)(q,2),A=x[0],B=x[1],C=(0,r(d[8]).normalizeRect)(p.hitSlop),F=null!=h?(0,n.default)({},p.accessibilityState,{disabled:h}):p.accessibilityState,G=(0,n.default)({},E,null==N?void 0:N.viewProps,{accessible:!1!==y,accessibilityState:F,focusable:!1!==j,hitSlop:C}),J=(0,l.useMemo)(function(){return{cancelable:_,disabled:h,hitSlop:C,pressRectOffset:D,android_disableSound:v,delayLongPress:M,delayPressIn:z,onLongPress:w,onPress:I,onPressIn:function(n){null!=N&&N.onPressIn(n),B(!0),null!=L&&L(n)},onPressMove:null==N?void 0:N.onPressMove,onPressOut:function(n){null!=N&&N.onPressOut(n),B(!1),null!=R&&R(n)}}},[v,N,_,M,h,C,w,I,L,R,D,B,z]),K=(0,u.default)(J);return l.createElement(f.default,(0,n.default)({},G,K,{ref:H,style:'function'==typeof k?k({pressed:A}):k,collapsable:!1}),'function'==typeof S?S({pressed:A}):S,null)}));P.displayName='Pressable';var y=P;e.default=y},717,[407,436,430,508,534,718,605,579,611]); +__d(function(g,r,i,a,m,e,d){Object.defineProperty(e,"__esModule",{value:!0}),e.default=function(l,u){var s=null!=l?l:{},c=s.color,f=s.borderless,p=s.radius,v=s.foreground;return(0,o.useMemo)(function(){if('android'===t.Platform.OS&&t.Platform.Version>=21&&(null!=c||null!=f||null!=p)){var o=(0,t.processColor)(c);(0,n.default)(null==o||'number'==typeof o,'Unexpected color given for Ripple color');var l={type:'RippleAndroid',color:o,borderless:!0===f,rippleRadius:p};return{viewProps:!0===v?{nativeForegroundAndroid:l}:{nativeBackgroundAndroid:l},onPressIn:function(n){var t,o,l=u.current;null!=l&&(r(d[4]).Commands.hotspotUpdate(l,null!=(t=n.nativeEvent.locationX)?t:0,null!=(o=n.nativeEvent.locationY)?o:0),r(d[4]).Commands.setPressed(l,!0))},onPressMove:function(n){var t,o,l=u.current;null!=l&&r(d[4]).Commands.hotspotUpdate(l,null!=(t=n.nativeEvent.locationX)?t:0,null!=(o=n.nativeEvent.locationY)?o:0)},onPressOut:function(n){var t=u.current;null!=t&&r(d[4]).Commands.setPressed(t,!1)}}}return null},[f,c,v,p,u])};var n=r(d[0])(r(d[1])),t=r(d[2]),o=(function(n,t){if(!t&&n&&n.__esModule)return n;if(null===n||"object"!=typeof n&&"function"!=typeof n)return{default:n};var o=l(t);if(o&&o.has(n))return o.get(n);var u={},s=Object.defineProperty&&Object.getOwnPropertyDescriptor;for(var c in n)if("default"!==c&&Object.prototype.hasOwnProperty.call(n,c)){var f=s?Object.getOwnPropertyDescriptor(n,c):null;f&&(f.get||f.set)?Object.defineProperty(u,c,f):u[c]=n[c]}u.default=n,o&&o.set(n,u);return u})(r(d[3]));function l(n){if("function"!=typeof WeakMap)return null;var t=new WeakMap,o=new WeakMap;return(l=function(n){return n?o:t})(n)}},718,[407,425,411,534,580]); +__d(function(g,r,i,a,m,_e,d){'use strict';var t=r(d[0])(r(d[1])),e=r(d[0])(r(d[2])),n=r(d[0])(r(d[3])),o=r(d[0])(r(d[4])),u=r(d[0])(r(d[5])),f=(function(t,e){if(!e&&t&&t.__esModule)return t;if(null===t||"object"!=typeof t&&"function"!=typeof t)return{default:t};var n=p(e);if(n&&n.has(t))return n.get(t);var o={},u=Object.defineProperty&&Object.getOwnPropertyDescriptor;for(var f in t)if("default"!==f&&Object.prototype.hasOwnProperty.call(t,f)){var c=u?Object.getOwnPropertyDescriptor(t,f):null;c&&(c.get||c.set)?Object.defineProperty(o,f,c):o[f]=t[f]}o.default=t,n&&n.set(t,o);return o})(r(d[6])),c=r(d[0])(r(d[7])),l=r(d[0])(r(d[8])),s=r(d[0])(r(d[9]));function p(t){if("function"!=typeof WeakMap)return null;var e=new WeakMap,n=new WeakMap;return(p=function(t){return t?n:e})(t)}function y(){if("undefined"==typeof Reflect||!Reflect.construct)return!1;if(Reflect.construct.sham)return!1;if("function"==typeof Proxy)return!0;try{return Boolean.prototype.valueOf.call(Reflect.construct(Boolean,[],function(){})),!0}catch(t){return!1}}var h=(function(c){(0,n.default)(O,c);var p,h,b=(p=O,h=y(),function(){var t,e=(0,u.default)(p);if(h){var n=(0,u.default)(this).constructor;t=Reflect.construct(e,arguments,n)}else t=e.apply(this,arguments);return(0,o.default)(this,t)});function O(){return(0,t.default)(this,O),b.apply(this,arguments)}return(0,e.default)(O,[{key:"render",value:function(){return f.createElement(s.default,{style:[v.dummy,this.props.style]},f.createElement(l.default,{style:v.text},"ProgressViewIOS is not supported on this platform!"))}}]),O})(f.Component),v=c.default.create({dummy:{width:120,height:20,backgroundColor:'#ffbcbc',borderWidth:1,borderColor:'red',alignItems:'center',justifyContent:'center'},text:{color:'#333333',margin:5,fontSize:10}});m.exports=h},719,[407,402,403,417,419,422,534,578,589,579]); +__d(function(g,r,i,a,m,e,d){Object.defineProperty(e,"__esModule",{value:!0}),e.default=void 0;var t,n=r(d[0])(r(d[1])),u=r(d[0])(r(d[2])),f=r(d[0])(r(d[3])),o=(function(t,n){if(!n&&t&&t.__esModule)return t;if(null===t||"object"!=typeof t&&"function"!=typeof t)return{default:t};var u=c(n);if(u&&u.has(t))return u.get(t);var f={},o=Object.defineProperty&&Object.getOwnPropertyDescriptor;for(var l in t)if("default"!==l&&Object.prototype.hasOwnProperty.call(t,l)){var p=o?Object.getOwnPropertyDescriptor(t,l):null;p&&(p.get||p.set)?Object.defineProperty(f,l,p):f[l]=t[l]}f.default=t,u&&u.set(t,f);return f})(r(d[4])),l=r(d[0])(r(d[5])),p=["emulateUnlessSupported"];function c(t){if("function"!=typeof WeakMap)return null;var n=new WeakMap,u=new WeakMap;return(c=function(t){return t?u:n})(t)}if('android'===f.default.OS)t=o.forwardRef(function(t,f){t.emulateUnlessSupported;var c=(0,u.default)(t,p);return o.createElement(l.default,(0,n.default)({},c,{ref:f}))});else{var s=r(d[6]).default;t=o.forwardRef(function(t,u){return o.createElement(s,(0,n.default)({emulateUnlessSupported:!0},t,{ref:u}))})}var v=t;e.default=v},720,[407,436,508,426,534,579,721]); +__d(function(g,r,i,a,m,e,d){Object.defineProperty(e,"__esModule",{value:!0}),e.default=void 0;var f=(0,r(d[0])(r(d[1])).default)('SafeAreaView',{paperComponentName:'RCTSafeAreaView',interfaceOnly:!0});e.default=f},721,[407,584]); +__d(function(g,r,i,a,m,_e,d){'use strict';var t=r(d[0])(r(d[1])),e=r(d[0])(r(d[2])),n=r(d[0])(r(d[3])),o=r(d[0])(r(d[4])),u=r(d[0])(r(d[5])),f=(function(t,e){if(!e&&t&&t.__esModule)return t;if(null===t||"object"!=typeof t&&"function"!=typeof t)return{default:t};var n=p(e);if(n&&n.has(t))return n.get(t);var o={},u=Object.defineProperty&&Object.getOwnPropertyDescriptor;for(var f in t)if("default"!==f&&Object.prototype.hasOwnProperty.call(t,f)){var c=u?Object.getOwnPropertyDescriptor(t,f):null;c&&(c.get||c.set)?Object.defineProperty(o,f,c):o[f]=t[f]}o.default=t,n&&n.set(t,o);return o})(r(d[6])),c=r(d[0])(r(d[7])),l=r(d[0])(r(d[8])),s=r(d[0])(r(d[9]));function p(t){if("function"!=typeof WeakMap)return null;var e=new WeakMap,n=new WeakMap;return(p=function(t){return t?n:e})(t)}function y(){if("undefined"==typeof Reflect||!Reflect.construct)return!1;if(Reflect.construct.sham)return!1;if("function"==typeof Proxy)return!0;try{return Boolean.prototype.valueOf.call(Reflect.construct(Boolean,[],function(){})),!0}catch(t){return!1}}var h=(function(c){(0,n.default)(O,c);var p,h,b=(p=O,h=y(),function(){var t,e=(0,u.default)(p);if(h){var n=(0,u.default)(this).constructor;t=Reflect.construct(e,arguments,n)}else t=e.apply(this,arguments);return(0,o.default)(this,t)});function O(){return(0,t.default)(this,O),b.apply(this,arguments)}return(0,e.default)(O,[{key:"render",value:function(){return f.createElement(s.default,{style:[v.dummy,this.props.style]},f.createElement(l.default,{style:v.text},"SegmentedControlIOS is not supported on this platform!"))}}]),O})(f.Component),v=c.default.create({dummy:{width:120,height:50,backgroundColor:'#ffbcbc',borderWidth:1,borderColor:'red',alignItems:'center',justifyContent:'center'},text:{color:'#333333',margin:5,fontSize:10}});m.exports=h},722,[407,402,403,417,419,422,534,578,589,579]); +__d(function(g,r,i,a,m,e,d){var t=r(d[0])(r(d[1])),n=r(d[0])(r(d[2])),l=(function(t,n){if(!n&&t&&t.__esModule)return t;if(null===t||"object"!=typeof t&&"function"!=typeof t)return{default:t};var l=c(n);if(l&&l.has(t))return l.get(t);var u={},o=Object.defineProperty&&Object.getOwnPropertyDescriptor;for(var f in t)if("default"!==f&&Object.prototype.hasOwnProperty.call(t,f)){var s=o?Object.getOwnPropertyDescriptor(t,f):null;s&&(s.get||s.set)?Object.defineProperty(u,f,s):u[f]=t[f]}u.default=t,l&&l.set(t,u);return u})(r(d[3])),u=r(d[0])(r(d[4])),o=r(d[0])(r(d[5])),f=r(d[0])(r(d[6])),s=["value","minimumValue","maximumValue","step","onValueChange","onSlidingComplete"];function c(t){if("function"!=typeof WeakMap)return null;var n=new WeakMap,l=new WeakMap;return(c=function(t){return t?l:n})(t)}var v,p=l.forwardRef(function(c,p){var b,y=f.default.compose(v.slider,c.style),S=c.value,O=void 0===S?.5:S,h=c.minimumValue,V=void 0===h?0:h,C=c.maximumValue,j=void 0===C?1:C,w=c.step,E=void 0===w?0:w,P=c.onValueChange,x=c.onSlidingComplete,M=(0,n.default)(c,s),R=P?function(t){var n=!0;'android'===u.default.OS&&(n=null!=t.nativeEvent.fromUser&&t.nativeEvent.fromUser),n&&P(t.nativeEvent.value)}:null,_=R,k=x?function(t){x(t.nativeEvent.value)}:null,W=!0===c.disabled||!0===(null==(b=c.accessibilityState)?void 0:b.disabled),D=W?(0,t.default)({},c.accessibilityState,{disabled:!0}):c.accessibilityState;return l.createElement(o.default,(0,t.default)({},M,{accessibilityState:D,enabled:!W,disabled:W,maximumValue:j,minimumValue:V,onChange:_,onResponderTerminationRequest:function(){return!1},onSlidingComplete:k,onStartShouldSetResponder:function(){return!0},onValueChange:R,ref:p,step:E,style:y,value:O}))});v='ios'===u.default.OS?f.default.create({slider:{height:40}}):f.default.create({slider:{}}),m.exports=p},723,[407,436,508,534,426,724,578]); +__d(function(g,r,i,a,m,e,d){Object.defineProperty(e,"__esModule",{value:!0}),e.default=void 0;var l=(0,r(d[0])(r(d[1])).default)('Slider',{interfaceOnly:!0,paperComponentName:'RCTSlider'});e.default=l},724,[407,584]); +__d(function(g,r,i,a,m,e,d){Object.defineProperty(e,"__esModule",{value:!0}),e.default=void 0;var t=r(d[0])(r(d[1])),n=r(d[0])(r(d[2])),o=r(d[0])(r(d[3])),l=r(d[0])(r(d[4])),u=C(r(d[5])),f=r(d[0])(r(d[6])),c=r(d[0])(r(d[7])),s=C(r(d[8])),v=C(r(d[9])),b=["disabled","ios_backgroundColor","onChange","onValueChange","style","thumbColor","trackColor","value"];function p(t){if("function"!=typeof WeakMap)return null;var n=new WeakMap,o=new WeakMap;return(p=function(t){return t?o:n})(t)}function C(t,n){if(!n&&t&&t.__esModule)return t;if(null===t||"object"!=typeof t&&"function"!=typeof t)return{default:t};var o=p(n);if(o&&o.has(t))return o.get(t);var l={},u=Object.defineProperty&&Object.getOwnPropertyDescriptor;for(var f in t)if("default"!==f&&Object.prototype.hasOwnProperty.call(t,f)){var c=u?Object.getOwnPropertyDescriptor(t,f):null;c&&(c.get||c.set)?Object.defineProperty(l,f,c):l[f]=t[f]}return l.default=t,o&&o.set(t,l),l}var h=function(){return!1},y=function(){return!0},R=u.forwardRef(function(p,C){var R=p.disabled,k=p.ios_backgroundColor,O=p.onChange,w=p.onValueChange,S=p.style,_=p.thumbColor,j=p.trackColor,P=p.value,T=(0,o.default)(p,b),E=null==j?void 0:j.false,M=null==j?void 0:j.true,V=u.useRef(null),F=(0,c.default)(V,C),W=u.useState({value:null}),q=(0,n.default)(W,2),D=q[0],N=q[1],L=function(t){null==O||O(t),null==w||w(t.nativeEvent.value),N({value:t.nativeEvent.value})};if(u.useLayoutEffect(function(){var t,n=!0===P;D.value!==n&&null!=(null==(t=V.current)?void 0:t.setNativeProps)&&('android'===l.default.OS?s.Commands.setNativeValue(V.current,n):v.Commands.setValue(V.current,n))},[P,D]),'android'===l.default.OS){var x,z={enabled:!0!==R,on:!0===P,style:S,thumbTintColor:_,trackColorForFalse:E,trackColorForTrue:M,trackTintColor:!0===P?M:E};return u.createElement(s.default,(0,t.default)({},T,z,{accessibilityRole:null!=(x=p.accessibilityRole)?x:'switch',onChange:L,onResponderTerminationRequest:h,onStartShouldSetResponder:y,ref:F}))}var A,B={disabled:R,onTintColor:M,style:f.default.compose({height:31,width:51},f.default.compose(S,null==k?null:{backgroundColor:k,borderRadius:16})),thumbTintColor:_,tintColor:E,value:!0===P};return u.createElement(v.default,(0,t.default)({},T,B,{accessibilityRole:null!=(A=p.accessibilityRole)?A:'switch',onChange:L,onResponderTerminationRequest:h,onStartShouldSetResponder:y,ref:F}))});e.default=R},725,[407,436,430,508,426,534,578,726,727,728]); +__d(function(g,r,_i,a,m,e,d){Object.defineProperty(e,"__esModule",{value:!0}),e.default=function(){for(var o=arguments.length,u=new Array(o),i=0;i=t.length?{done:!0}:{done:!1,value:t[i++]}}}throw new TypeError("Invalid attempt to iterate non-iterable instance.\nIn order to be iterable, non-array objects must have a [Symbol.iterator]() method.")}function o(t,n){if(t){if("string"==typeof t)return u(t,n);var o=Object.prototype.toString.call(t).slice(8,-1);return"Object"===o&&t.constructor&&(o=t.constructor.name),"Map"===o||"Set"===o?Array.from(t):"Arguments"===o||/^(?:Ui|I)nt(?:8|16|32)(?:Clamped)?Array$/.test(o)?u(t,n):void 0}}function u(t,n){(null==n||n>t.length)&&(n=t.length);for(var o=0,u=new Array(n);o1&&(be=u.createElement(f.default,null,be)),oe=u.createElement(I,(0,t.default)({ref:ee},o,pe,{accessible:ce,autoCapitalize:ye,blurOnSubmit:ae,caretHidden:se,children:be,disableFullscreenUI:o.disableFullscreenUI,focusable:ie,mostRecentEventCount:H,onBlur:ue,onChange:te,onFocus:le,onScroll:re,onSelectionChange:ne,placeholder:Se,selection:M,style:Ce,text:X,textBreakStrategy:o.textBreakStrategy}))}return u.createElement(p.default.Provider,{value:!0},oe)}var M=u.forwardRef(function(l,o){var c=l.allowFontScaling,s=void 0===c||c,f=l.rejectResponderTermination,p=void 0===f||f,v=l.underlineColorAndroid,C=void 0===v?'transparent':v,y=(0,n.default)(l,F);return u.createElement(A,(0,t.default)({allowFontScaling:s,rejectResponderTermination:p,underlineColorAndroid:C},y,{forwardedRef:o}))});M.propTypes=o.default,M.State={currentlyFocusedInput:v.default.currentlyFocusedInput,currentlyFocusedField:v.default.currentlyFocusedField,focusTextInput:v.default.focusTextInput,blurTextInput:v.default.blurTextInput};var z=s.default.create({multilineInput:{paddingTop:5}});m.exports=M},729,[407,436,508,430,534,730,426,578,589,581,540,425,702,646,605,541,731,733]); +__d(function(g,r,i,a,m,e,d){'use strict';var n=['phoneNumber','link','address','calendarEvent','none','all'];m.exports=r(d[0])({},r(d[1]),{autoCapitalize:r(d[2]).oneOf(['none','sentences','words','characters']),autoComplete:r(d[2]).oneOf(['cc-csc','cc-exp','cc-exp-month','cc-exp-year','cc-number','email','name','password','postal-code','street-address','tel','username','off']),autoCorrect:r(d[2]).bool,spellCheck:r(d[2]).bool,autoFocus:r(d[2]).bool,allowFontScaling:r(d[2]).bool,maxFontSizeMultiplier:r(d[2]).number,editable:r(d[2]).bool,keyboardType:r(d[2]).oneOf(['default','email-address','numeric','phone-pad','number-pad','url','ascii-capable','numbers-and-punctuation','name-phone-pad','decimal-pad','twitter','web-search','ascii-capable-number-pad','visible-password']),keyboardAppearance:r(d[2]).oneOf(['default','light','dark']),returnKeyType:r(d[2]).oneOf(['done','go','next','search','send','none','previous','default','emergency-call','google','join','route','yahoo']),returnKeyLabel:r(d[2]).string,maxLength:r(d[2]).number,numberOfLines:r(d[2]).number,disableFullscreenUI:r(d[2]).bool,enablesReturnKeyAutomatically:r(d[2]).bool,multiline:r(d[2]).bool,textBreakStrategy:r(d[2]).oneOf(['simple','highQuality','balanced']),onBlur:r(d[2]).func,onFocus:r(d[2]).func,onChange:r(d[2]).func,onChangeText:r(d[2]).func,onContentSizeChange:r(d[2]).func,onTextInput:r(d[2]).func,onEndEditing:r(d[2]).func,onSelectionChange:r(d[2]).func,onSubmitEditing:r(d[2]).func,onKeyPress:r(d[2]).func,onLayout:r(d[2]).func,onScroll:r(d[2]).func,placeholder:r(d[2]).string,placeholderTextColor:r(d[3]),scrollEnabled:r(d[2]).bool,secureTextEntry:r(d[2]).bool,selectionColor:r(d[3]),selection:r(d[2]).shape({start:r(d[2]).number.isRequired,end:r(d[2]).number}),value:r(d[2]).string,defaultValue:r(d[2]).string,clearButtonMode:r(d[2]).oneOf(['never','while-editing','unless-editing','always']),clearTextOnFocus:r(d[2]).bool,selectTextOnFocus:r(d[2]).bool,blurOnSubmit:r(d[2]).bool,style:r(d[4]).style,underlineColorAndroid:r(d[3]),inlineImageLeft:r(d[2]).string,inlineImagePadding:r(d[2]).number,rejectResponderTermination:r(d[2]).bool,dataDetectorTypes:r(d[2]).oneOfType([r(d[2]).oneOf(n),r(d[2]).arrayOf(r(d[2]).oneOf(n))]),caretHidden:r(d[2]).bool,contextMenuHidden:r(d[2]).bool,inputAccessoryViewID:r(d[2]).string,textContentType:r(d[2]).oneOf(['none','URL','addressCity','addressCityAndState','addressState','countryName','creditCardNumber','emailAddress','familyName','fullStreetAddress','givenName','jobTitle','location','middleName','name','namePrefix','nameSuffix','nickname','organizationName','postalCode','streetAddressLine1','streetAddressLine2','sublocality','telephoneNumber','username','password','newPassword','oneTimeCode']),showSoftInputOnFocus:r(d[2]).bool})},730,[436,682,596,600,590]); +__d(function(g,r,i,a,m,e,d){Object.defineProperty(e,"__esModule",{value:!0}),e.default=e.Commands=void 0;var t=r(d[0])(r(d[1])),n=r(d[0])(r(d[2])),u=(function(t,n){if(!n&&t&&t.__esModule)return t;if(null===t||"object"!=typeof t&&"function"!=typeof t)return{default:t};var u=o(n);if(u&&u.has(t))return u.get(t);var f={},l=Object.defineProperty&&Object.getOwnPropertyDescriptor;for(var c in t)if("default"!==c&&Object.prototype.hasOwnProperty.call(t,c)){var p=l?Object.getOwnPropertyDescriptor(t,c):null;p&&(p.get||p.set)?Object.defineProperty(f,c,p):f[c]=t[c]}f.default=t,u&&u.set(t,f);return f})(r(d[3]));function o(t){if("function"!=typeof WeakMap)return null;var n=new WeakMap,u=new WeakMap;return(o=function(t){return t?u:n})(t)}var f=(0,t.default)({supportedCommands:['focus','blur','setTextAndSelection']});e.Commands=f;var l=u.get('RCTSinglelineTextInputView',function(){return n.default});e.default=l},731,[407,542,732,555]); +__d(function(g,r,i,a,m,e,d){var t=r(d[0])(r(d[1])),o=r(d[0])(r(d[2])),n={uiViewClassName:'RCTSinglelineTextInputView',bubblingEventTypes:{topBlur:{phasedRegistrationNames:{bubbled:'onBlur',captured:'onBlurCapture'}},topChange:{phasedRegistrationNames:{bubbled:'onChange',captured:'onChangeCapture'}},topEndEditing:{phasedRegistrationNames:{bubbled:'onEndEditing',captured:'onEndEditingCapture'}},topFocus:{phasedRegistrationNames:{bubbled:'onFocus',captured:'onFocusCapture'}},topKeyPress:{phasedRegistrationNames:{bubbled:'onKeyPress',captured:'onKeyPressCapture'}},topSubmitEditing:{phasedRegistrationNames:{bubbled:'onSubmitEditing',captured:'onSubmitEditingCapture'}},topTouchCancel:{phasedRegistrationNames:{bubbled:'onTouchCancel',captured:'onTouchCancelCapture'}},topTouchEnd:{phasedRegistrationNames:{bubbled:'onTouchEnd',captured:'onTouchEndCapture'}},topTouchMove:{phasedRegistrationNames:{bubbled:'onTouchMove',captured:'onTouchMoveCapture'}}},directEventTypes:{},validAttributes:(0,t.default)({},o.default.validAttributes,{fontSize:!0,fontWeight:!0,fontVariant:!0,textShadowOffset:{diff:r(d[3])},allowFontScaling:!0,fontStyle:!0,textTransform:!0,textAlign:!0,fontFamily:!0,lineHeight:!0,isHighlighted:!0,writingDirection:!0,textDecorationLine:!0,textShadowRadius:!0,letterSpacing:!0,textDecorationStyle:!0,textDecorationColor:{process:r(d[4])},color:{process:r(d[4])},maxFontSizeMultiplier:!0,textShadowColor:{process:r(d[4])},editable:!0,inputAccessoryViewID:!0,caretHidden:!0,enablesReturnKeyAutomatically:!0,placeholderTextColor:{process:r(d[4])},onSelectionChange:!0,clearButtonMode:!0,onContentSizeChange:!0,keyboardType:!0,selection:!0,returnKeyType:!0,blurOnSubmit:!0,mostRecentEventCount:!0,onChange:!0,scrollEnabled:!0,selectionColor:{process:r(d[4])},contextMenuHidden:!0,secureTextEntry:!0,onTextInput:!0,placeholder:!0,autoCorrect:!0,onScroll:!0,multiline:!0,textContentType:!0,maxLength:!0,autoCapitalize:!0,keyboardAppearance:!0,passwordRules:!0,spellCheck:!0,selectTextOnFocus:!0,text:!0,clearTextOnFocus:!0})};m.exports=n},732,[407,436,544,551,546]); +__d(function(g,r,i,a,m,e,d){Object.defineProperty(e,"__esModule",{value:!0}),e.default=e.Commands=void 0;var t=r(d[0])(r(d[1])),n=r(d[0])(r(d[2])),u=(function(t,n){if(!n&&t&&t.__esModule)return t;if(null===t||"object"!=typeof t&&"function"!=typeof t)return{default:t};var u=o(n);if(u&&u.has(t))return u.get(t);var f={},l=Object.defineProperty&&Object.getOwnPropertyDescriptor;for(var c in t)if("default"!==c&&Object.prototype.hasOwnProperty.call(t,c)){var p=l?Object.getOwnPropertyDescriptor(t,c):null;p&&(p.get||p.set)?Object.defineProperty(f,c,p):f[c]=t[c]}f.default=t,u&&u.set(t,f);return f})(r(d[3]));function o(t){if("function"!=typeof WeakMap)return null;var n=new WeakMap,u=new WeakMap;return(o=function(t){return t?u:n})(t)}var f=(0,t.default)({supportedCommands:['focus','blur','setTextAndSelection']});e.Commands=f;var l=u.get('RCTMultilineTextInputView',function(){return n.default});e.default=l},733,[407,542,732,555]); +__d(function(g,r,i,a,m,_e,d){var t=r(d[0])(r(d[1])),e=r(d[0])(r(d[2])),o=((function(t,e){if(!e&&t&&t.__esModule)return t;if(null===t||"object"!=typeof t&&"function"!=typeof t)return{default:t};var o=u(e);if(o&&o.has(t))return o.get(t);var E={},s=Object.defineProperty&&Object.getOwnPropertyDescriptor;for(var n in t)if("default"!==n&&Object.prototype.hasOwnProperty.call(t,n)){var l=s?Object.getOwnPropertyDescriptor(t,n):null;l&&(l.get||l.set)?Object.defineProperty(E,n,l):E[n]=t[n]}E.default=t,o&&o.set(t,E)})(r(d[3])),r(d[0])(r(d[4]))),E=r(d[0])(r(d[5])),s=r(d[0])(r(d[6])),n=r(d[0])(r(d[7])),l=r(d[0])(r(d[8]));function u(t){if("function"!=typeof WeakMap)return null;var e=new WeakMap,o=new WeakMap;return(u=function(t){return t?o:e})(t)}var h=function(t){var e=t.touches,o=t.changedTouches,E=e&&e.length>0,s=o&&o.length>0;return!E&&s?o[0]:E?e[0]:t},R='NOT_RESPONDER',_='RESPONDER_INACTIVE_PRESS_IN',c='RESPONDER_INACTIVE_PRESS_OUT',S='RESPONDER_ACTIVE_PRESS_IN',T='RESPONDER_ACTIVE_PRESS_OUT',P='RESPONDER_ACTIVE_LONG_PRESS_IN',D='RESPONDER_ACTIVE_LONG_PRESS_OUT',N='ERROR',O={NOT_RESPONDER:!1,RESPONDER_INACTIVE_PRESS_IN:!1,RESPONDER_INACTIVE_PRESS_OUT:!1,RESPONDER_ACTIVE_PRESS_IN:!1,RESPONDER_ACTIVE_PRESS_OUT:!1,RESPONDER_ACTIVE_LONG_PRESS_IN:!1,RESPONDER_ACTIVE_LONG_PRESS_OUT:!1,ERROR:!1},p=(0,e.default)({},O,{RESPONDER_ACTIVE_PRESS_OUT:!0,RESPONDER_ACTIVE_PRESS_IN:!0}),f=(0,e.default)({},O,{RESPONDER_INACTIVE_PRESS_IN:!0,RESPONDER_ACTIVE_PRESS_IN:!0,RESPONDER_ACTIVE_LONG_PRESS_IN:!0}),A=(0,e.default)({},O,{RESPONDER_ACTIVE_LONG_PRESS_IN:!0}),b='DELAY',I='RESPONDER_GRANT',L='RESPONDER_RELEASE',v='RESPONDER_TERMINATED',y='ENTER_PRESS_RECT',C='LEAVE_PRESS_RECT',G='LONG_PRESS_DETECTED',V={NOT_RESPONDER:{DELAY:N,RESPONDER_GRANT:_,RESPONDER_RELEASE:N,RESPONDER_TERMINATED:N,ENTER_PRESS_RECT:N,LEAVE_PRESS_RECT:N,LONG_PRESS_DETECTED:N},RESPONDER_INACTIVE_PRESS_IN:{DELAY:S,RESPONDER_GRANT:N,RESPONDER_RELEASE:R,RESPONDER_TERMINATED:R,ENTER_PRESS_RECT:_,LEAVE_PRESS_RECT:c,LONG_PRESS_DETECTED:N},RESPONDER_INACTIVE_PRESS_OUT:{DELAY:T,RESPONDER_GRANT:N,RESPONDER_RELEASE:R,RESPONDER_TERMINATED:R,ENTER_PRESS_RECT:_,LEAVE_PRESS_RECT:c,LONG_PRESS_DETECTED:N},RESPONDER_ACTIVE_PRESS_IN:{DELAY:N,RESPONDER_GRANT:N,RESPONDER_RELEASE:R,RESPONDER_TERMINATED:R,ENTER_PRESS_RECT:S,LEAVE_PRESS_RECT:T,LONG_PRESS_DETECTED:P},RESPONDER_ACTIVE_PRESS_OUT:{DELAY:N,RESPONDER_GRANT:N,RESPONDER_RELEASE:R,RESPONDER_TERMINATED:R,ENTER_PRESS_RECT:S,LEAVE_PRESS_RECT:T,LONG_PRESS_DETECTED:N},RESPONDER_ACTIVE_LONG_PRESS_IN:{DELAY:N,RESPONDER_GRANT:N,RESPONDER_RELEASE:R,RESPONDER_TERMINATED:R,ENTER_PRESS_RECT:P,LEAVE_PRESS_RECT:D,LONG_PRESS_DETECTED:P},RESPONDER_ACTIVE_LONG_PRESS_OUT:{DELAY:N,RESPONDER_GRANT:N,RESPONDER_RELEASE:R,RESPONDER_TERMINATED:R,ENTER_PRESS_RECT:P,LEAVE_PRESS_RECT:D,LONG_PRESS_DETECTED:N},error:{DELAY:R,RESPONDER_GRANT:_,RESPONDER_RELEASE:R,RESPONDER_TERMINATED:R,ENTER_PRESS_RECT:R,LEAVE_PRESS_RECT:R,LONG_PRESS_DETECTED:R}},H={componentDidMount:function(){E.default.isTV},componentWillUnmount:function(){this.touchableDelayTimeout&&clearTimeout(this.touchableDelayTimeout),this.longPressDelayTimeout&&clearTimeout(this.longPressDelayTimeout),this.pressOutDelayTimeout&&clearTimeout(this.pressOutDelayTimeout)},touchableGetInitialState:function(){return{touchable:{touchState:void 0,responderID:null}}},touchableHandleResponderTerminationRequest:function(){return!this.props.rejectResponderTermination},touchableHandleStartShouldSetResponder:function(){return!this.props.disabled},touchableLongPressCancelsPress:function(){return!0},touchableHandleResponderGrant:function(t){var e=t.currentTarget;t.persist(),this.pressOutDelayTimeout&&clearTimeout(this.pressOutDelayTimeout),this.pressOutDelayTimeout=null,this.state.touchable.touchState=R,this.state.touchable.responderID=e,this._receiveSignal(I,t);var o=void 0!==this.touchableGetHighlightDelayMS?Math.max(this.touchableGetHighlightDelayMS(),0):130;0!==(o=isNaN(o)?130:o)?this.touchableDelayTimeout=setTimeout(this._handleDelay.bind(this,t),o):this._handleDelay(t);var E=void 0!==this.touchableGetLongPressDelayMS?Math.max(this.touchableGetLongPressDelayMS(),10):370;E=isNaN(E)?370:E,this.longPressDelayTimeout=setTimeout(this._handleLongDelay.bind(this,t),E+o)},touchableHandleResponderRelease:function(t){this.pressInLocation=null,this._receiveSignal(L,t)},touchableHandleResponderTerminate:function(t){this.pressInLocation=null,this._receiveSignal(v,t)},touchableHandleResponderMove:function(t){if(this.state.touchable.positionOnActivate){var e=this.state.touchable.positionOnActivate,o=this.state.touchable.dimensionsOnActivate,E=this.touchableGetPressRectOffset?this.touchableGetPressRectOffset():{left:20,right:20,top:20,bottom:20},s=E.left,n=E.top,l=E.right,u=E.bottom,R=this.touchableGetHitSlop?this.touchableGetHitSlop():null;R&&(s+=R.left||0,n+=R.top||0,l+=R.right||0,u+=R.bottom||0);var c=h(t.nativeEvent),S=c&&c.pageX,T=c&&c.pageY;if(this.pressInLocation)this._getDistanceBetweenPoints(S,T,this.pressInLocation.pageX,this.pressInLocation.pageY)>10&&this._cancelLongPressDelayTimeout();if(S>e.left-s&&T>e.top-n&&S>`");E!==s&&(this._performSideEffectsForTransition(E,s,t,e),this.state.touchable.touchState=s)}},_cancelLongPressDelayTimeout:function(){this.longPressDelayTimeout&&clearTimeout(this.longPressDelayTimeout),this.longPressDelayTimeout=null},_isHighlight:function(t){return t===S||t===P},_savePressInLocation:function(t){var e=h(t.nativeEvent),o=e&&e.pageX,E=e&&e.pageY,s=e&&e.locationX,n=e&&e.locationY;this.pressInLocation={pageX:o,pageY:E,locationX:s,locationY:n}},_getDistanceBetweenPoints:function(t,e,o,E){var s=t-o,n=e-E;return Math.sqrt(s*s+n*n)},_performSideEffectsForTransition:function(t,e,o,s){var n=this._isHighlight(t),u=this._isHighlight(e);(o===v||o===L)&&this._cancelLongPressDelayTimeout();var h=t===R&&e===_,c=!p[t]&&p[e];if((h||c)&&this._remeasureMetricsOnActivation(),f[t]&&o===G&&this.touchableHandleLongPress&&this.touchableHandleLongPress(s),u&&!n?this._startHighlight(s):!u&&n&&this._endHighlight(s),f[t]&&o===L){var S=!!this.props.onLongPress,T=A[t]&&(!S||!this.touchableLongPressCancelsPress());(!A[t]||T)&&this.touchableHandlePress&&(u||n||(this._startHighlight(s),this._endHighlight(s)),'android'!==E.default.OS||this.props.touchSoundDisabled||l.default.playTouchSound(),this.touchableHandlePress(s))}this.touchableDelayTimeout&&clearTimeout(this.touchableDelayTimeout),this.touchableDelayTimeout=null},_startHighlight:function(t){this._savePressInLocation(t),this.touchableHandleActivePressIn&&this.touchableHandleActivePressIn(t)},_endHighlight:function(t){var e=this;this.touchableHandleActivePressOut&&(this.touchableGetPressOutDelayMS&&this.touchableGetPressOutDelayMS()?this.pressOutDelayTimeout=setTimeout(function(){e.touchableHandleActivePressOut(t)},this.touchableGetPressOutDelayMS()):this.touchableHandleActivePressOut(t))},withoutDefaultFocusAndBlur:{}},M=(H.touchableHandleFocus,H.touchableHandleBlur,(0,t.default)(H,["touchableHandleFocus","touchableHandleBlur"]));H.withoutDefaultFocusAndBlur=M;var w={Mixin:H,renderDebugView:function(t){t.color,t.hitSlop;return null}};m.exports=w},734,[407,508,436,534,735,426,737,450,607]); +__d(function(g,r,i,a,m,e,d){'use strict';var t=r(d[0])(r(d[1])),o=t.default.twoArgumentPooler;function n(t,o){this.width=t,this.height=o}n.prototype.destructor=function(){this.width=null,this.height=null},n.getPooledFromElement=function(t){return n.getPooled(t.offsetWidth,t.offsetHeight)},t.default.addPoolingTo(n,o),m.exports=n},735,[407,736]); +__d(function(g,r,i,a,m,e,d){'use strict';var t=r(d[0])(r(d[1])),n=function(t){if(this.instancePool.length){var n=this.instancePool.pop();return this.call(n,t),n}return new this(t)},o=function(n){(0,t.default)(n instanceof this,'Trying to release an instance into a pool of a different type.'),n.destructor(),this.instancePool.length=e.length?{done:!0}:{done:!1,value:e[o++]}}}throw new TypeError("Invalid attempt to iterate non-iterable instance.\nIn order to be iterable, non-array objects must have a [Symbol.iterator]() method.")}function b(e,t){if(e){if("string"==typeof e)return h(e,t);var n=Object.prototype.toString.call(e).slice(8,-1);return"Object"===n&&e.constructor&&(n=e.constructor.name),"Map"===n||"Set"===n?Array.from(e):"Arguments"===n||/^(?:Ui|I)nt(?:8|16|32)(?:Clamped)?Array$/.test(n)?h(e,t):void 0}}function h(e,t){(null==t||t>e.length)&&(t=e.length);for(var n=0,o=new Array(t);n=0;n--)if(t[n]())return;i.exitApp()});var i={exitApp:function(){n.default&&n.default.invokeDefaultBackPressHandler()},addEventListener:function(n,f){return-1===t.indexOf(f)&&t.push(f),{remove:function(){return i.removeEventListener(n,f)}}},removeEventListener:function(n,i){-1!==t.indexOf(i)&&t.splice(t.indexOf(i),1)}};m.exports=i},750,[407,751,413]); +__d(function(g,r,i,a,m,e,d){function t(n){if("function"!=typeof WeakMap)return null;var o=new WeakMap,u=new WeakMap;return(t=function(t){return t?u:o})(n)}Object.defineProperty(e,"__esModule",{value:!0}),e.default=void 0;var n=(function(n,o){if(!o&&n&&n.__esModule)return n;if(null===n||"object"!=typeof n&&"function"!=typeof n)return{default:n};var u=t(o);if(u&&u.has(n))return u.get(n);var f={},c=Object.defineProperty&&Object.getOwnPropertyDescriptor;for(var l in n)if("default"!==l&&Object.prototype.hasOwnProperty.call(n,l)){var p=c?Object.getOwnPropertyDescriptor(n,l):null;p&&(p.get||p.set)?Object.defineProperty(f,l,p):f[l]=n[l]}f.default=n,u&&u.set(n,f);return f})(r(d[0])).get('DeviceEventManager');e.default=n},751,[428]); +__d(function(g,r,i,a,m,_e,d){var t=r(d[0])(r(d[1])),e=r(d[0])(r(d[2])),n=r(d[0])(r(d[3])),o=r(d[0])(r(d[4])),u=r(d[0])(r(d[5])),c=r(d[0])(r(d[6])),l=(r(d[0])(r(d[7])),r(d[0])(r(d[8]))),s=(function(t,e){if(!e&&t&&t.__esModule)return t;if(null===t||"object"!=typeof t&&"function"!=typeof t)return{default:t};var n=p(e);if(n&&n.has(t))return n.get(t);var o={},u=Object.defineProperty&&Object.getOwnPropertyDescriptor;for(var c in t)if("default"!==c&&Object.prototype.hasOwnProperty.call(t,c)){var l=u?Object.getOwnPropertyDescriptor(t,c):null;l&&(l.get||l.set)?Object.defineProperty(o,c,l):o[c]=t[c]}o.default=t,n&&n.set(t,o);return o})(r(d[9]));function p(t){if("function"!=typeof WeakMap)return null;var e=new WeakMap,n=new WeakMap;return(p=function(t){return t?n:e})(t)}function f(){if("undefined"==typeof Reflect||!Reflect.construct)return!1;if(Reflect.construct.sham)return!1;if("function"==typeof Proxy)return!0;try{return Boolean.prototype.valueOf.call(Reflect.construct(Boolean,[],function(){})),!0}catch(t){return!1}}var h=(function(l){(0,n.default)(b,l);var p,h,y=(p=b,h=f(),function(){var t,e=(0,u.default)(p);if(h){var n=(0,u.default)(this).constructor;t=Reflect.construct(e,arguments,n)}else t=e.apply(this,arguments);return(0,o.default)(this,t)});function b(){var e;(0,t.default)(this,b);for(var n=arguments.length,o=new Array(n),u=0;uthis.eventPool.length&&this.eventPool.push(e)}function T(e){e.getPooled=_,e.eventPool=[],e.release=R}n(i[2])(P.prototype,{preventDefault:function(){this.defaultPrevented=!0;var e=this.nativeEvent;e&&(e.preventDefault?e.preventDefault():"unknown"!=typeof e.returnValue&&(e.returnValue=!1),this.isDefaultPrevented=w)},stopPropagation:function(){var e=this.nativeEvent;e&&(e.stopPropagation?e.stopPropagation():"unknown"!=typeof e.cancelBubble&&(e.cancelBubble=!0),this.isPropagationStopped=w)},persist:function(){this.isPersistent=w},isPersistent:x,destructor:function(){var e,n=this.constructor.Interface;for(e in n)this[e]=null;this.nativeEvent=this._targetInst=this.dispatchConfig=null,this.isPropagationStopped=this.isDefaultPrevented=x,this._dispatchInstances=this._dispatchListeners=null}}),P.Interface={type:null,target:null,currentTarget:function(){return null},eventPhase:null,bubbles:null,cancelable:null,timeStamp:function(e){return e.timeStamp||Date.now()},defaultPrevented:null,isTrusted:null},P.extend=function(e){function t(){}function r(){return l.apply(this,arguments)}var l=this;t.prototype=l.prototype;var a=new t;return n(i[2])(a,r.prototype),r.prototype=a,r.prototype.constructor=r,r.Interface=n(i[2])({},l.Interface,e),r.extend=l.extend,T(r),r},T(P);var E=P.extend({touchHistory:function(){return null}});function N(e){return"topTouchStart"===e}function C(e){return"topTouchMove"===e}var z=["topTouchStart"],I=["topTouchMove"],L=["topTouchCancel","topTouchEnd"],U=[],M={touchBank:U,numberActiveTouches:0,indexOfSingleActiveTouch:-1,mostRecentTimeStamp:0};function F(e){return e.timeStamp||e.timestamp}function D(e){if(null==(e=e.identifier))throw Error("Touch object is missing identifier.");return e}function A(e){var n=D(e),t=U[n];t?(t.touchActive=!0,t.startPageX=e.pageX,t.startPageY=e.pageY,t.startTimeStamp=F(e),t.currentPageX=e.pageX,t.currentPageY=e.pageY,t.currentTimeStamp=F(e),t.previousPageX=e.pageX,t.previousPageY=e.pageY,t.previousTimeStamp=F(e)):(t={touchActive:!0,startPageX:e.pageX,startPageY:e.pageY,startTimeStamp:F(e),currentPageX:e.pageX,currentPageY:e.pageY,currentTimeStamp:F(e),previousPageX:e.pageX,previousPageY:e.pageY,previousTimeStamp:F(e)},U[n]=t),M.mostRecentTimeStamp=F(e)}function H(e){var n=U[D(e)];n&&(n.touchActive=!0,n.previousPageX=n.currentPageX,n.previousPageY=n.currentPageY,n.previousTimeStamp=n.currentTimeStamp,n.currentPageX=e.pageX,n.currentPageY=e.pageY,n.currentTimeStamp=F(e),M.mostRecentTimeStamp=F(e))}function Q(e){var n=U[D(e)];n&&(n.touchActive=!1,n.previousPageX=n.currentPageX,n.previousPageY=n.currentPageY,n.previousTimeStamp=n.currentTimeStamp,n.currentPageX=e.pageX,n.currentPageY=e.pageY,n.currentTimeStamp=F(e),M.mostRecentTimeStamp=F(e))}var j,B={instrument:function(e){j=e},recordTouchTrack:function(e,n){if(null!=j&&j(e,n),C(e))n.changedTouches.forEach(H);else if(N(e))n.changedTouches.forEach(A),M.numberActiveTouches=n.touches.length,1===M.numberActiveTouches&&(M.indexOfSingleActiveTouch=n.touches[0].identifier);else if(("topTouchEnd"===e||"topTouchCancel"===e)&&(n.changedTouches.forEach(Q),M.numberActiveTouches=n.touches.length,1===M.numberActiveTouches))for(e=0;e=(a=n&-n)||16===l&&0!=(4194240&a)))return n;if(0!=(4&r)&&(r|=16&t),0!==(n=e.entangledLanes))for(e=e.entanglements,n&=r;0t;t++)n.push(e);return n}function wn(e,n,t){e.pendingLanes|=n,536870912!==n&&(e.suspendedLanes=0,e.pingedLanes=0),(e=e.eventTimes)[n=31-_n(n)]=t}function xn(e,n){var t=e.pendingLanes&~n;e.pendingLanes=n,e.suspendedLanes=0,e.pingedLanes=0,e.expiredLanes&=n,e.mutableReadLanes&=n,e.entangledLanes&=n,n=e.entanglements;var r=e.eventTimes;for(e=e.expirationTimes;0 component.");return t=$n,$n+=2,{node:Ln(t,"RCTRawText",n,{text:e},r)}}var Kn=setTimeout,Zn=clearTimeout;function et(e){var n=e.node,t=an(null,Ge,{style:{display:"none"}},e.canonical.viewConfig.validAttributes);return{node:Dn(n,t),canonical:e.canonical}}function nt(e,n,t){return n="",t&&(n=" (created by "+t+")"),"\n in "+(e||"Unknown")+n}function tt(e,n){return e?nt(e.displayName||e.name||null,n,null):""}var rt=Object.prototype.hasOwnProperty,lt=[],at=-1;function it(e){return{current:e}}function ut(e){0>at||(e.current=lt[at],lt[at]=null,at--)}function ot(e,n){lt[++at]=e.current,e.current=n}var st={},ct=it(st),dt=it(!1),ft=st;function pt(e,n){var t=e.type.contextTypes;if(!t)return st;var r=e.stateNode;if(r&&r.__reactInternalMemoizedUnmaskedChildContext===n)return r.__reactInternalMemoizedMaskedChildContext;var l,a={};for(l in t)a[l]=n[l];return r&&((e=e.stateNode).__reactInternalMemoizedUnmaskedChildContext=n,e.__reactInternalMemoizedMaskedChildContext=a),a}function ht(e){return null!==(e=e.childContextTypes)&&void 0!==e}function gt(){ut(dt),ut(ct)}function mt(e,n,t){if(ct.current!==st)throw Error("Unexpected context found on stack. This error is likely caused by a bug in React. Please file an issue.");ot(ct,n),ot(dt,t)}function vt(e,t,r){var l=e.stateNode;if(t=t.childContextTypes,"function"!=typeof l.getChildContext)return r;for(var a in l=l.getChildContext())if(!(a in t))throw Error((We(e)||"Unknown")+'.getChildContext(): key "'+a+'" is not defined in childContextTypes.');return n(i[2])({},r,l)}function bt(e){return e=(e=e.stateNode)&&e.__reactInternalMemoizedMergedChildContext||st,ft=ct.current,ot(ct,e),ot(dt,dt.current),!0}function yt(e,n,t){var r=e.stateNode;if(!r)throw Error("Expected to have an instance by this point. This error is likely caused by a bug in React. Please file an issue.");t?(e=vt(e,n,ft),r.__reactInternalMemoizedMergedChildContext=e,ut(dt),ut(ct),ot(ct,e)):ut(dt),ot(dt,t)}var St=null,kt=!1,wt=!1;function xt(){if(!wt&&null!==St){wt=!0;var e=0,t=En;try{var r=St;for(En=1;eg?(m=h,h=null):m=h.sibling;var v=f(l,h,u[g],o);if(null===v){null===h&&(h=m);break}e&&h&&null===v.alternate&&n(l,h),i=a(v,i,g),null===c?s=v:c.sibling=v,c=v,h=m}if(g===u.length)return t(l,h),s;if(null===h){for(;gg?(m=h,h=null):m=h.sibling;var b=f(l,h,v.value,o);if(null===b){null===h&&(h=m);break}e&&h&&null===b.alternate&&n(l,h),i=a(b,i,g),null===c?s=b:c.sibling=b,c=b,h=m}if(v.done)return t(l,h),s;if(null===h){for(;!v.done;g++,v=u.next())null!==(v=d(l,v.value,o))&&(i=a(v,i,g),null===c?s=v:c.sibling=v,c=v);return s}for(h=r(l,h);!v.done;g++,v=u.next())null!==(v=p(h,l,g,v.value,o))&&(e&&null!==v.alternate&&h.delete(null===v.key?g:v.key),i=a(v,i,g),null===c?s=v:c.sibling=v,c=v);return e&&h.forEach(function(e){return n(l,e)}),s}return function(e,r,a,u){var o="object"==typeof a&&null!==a&&a.type===_e&&null===a.key;if(o&&(a=a.props.children),"object"==typeof a&&null!==a){switch(a.$$typeof){case xe:e:{var s=a.key;for(o=r;null!==o;){if(o.key===s){if((s=a.type)===_e){if(7===o.tag){t(e,o.sibling),(r=l(o,a.props.children)).return=e,e=r;break e}}else if(o.elementType===s){t(e,o.sibling),(r=l(o,a.props)).ref=nr(e,o,a),r.return=e,e=r;break e}t(e,o);break}n(e,o),o=o.sibling}a.type===_e?((r=ti(a.props.children,e.mode,u,a.key)).return=e,e=r):((u=ni(a.type,a.key,a.props,null,e.mode,u)).ref=nr(e,r,a),u.return=e,e=u)}return i(e);case Pe:e:{for(o=a.key;null!==r;){if(r.key===o){if(4===r.tag&&r.stateNode.containerInfo===a.containerInfo&&r.stateNode.implementation===a.implementation){t(e,r.sibling),(r=l(r,a.children||[])).return=e,e=r;break e}t(e,r);break}n(e,r),r=r.sibling}(r=ai(a,e.mode,u)).return=e,e=r}return i(e)}if(m(a))return h(e,r,a,u);if(je(a))return g(e,r,a,u);tr(e,a)}if("string"==typeof a||"number"==typeof a)return a=""+a,null!==r&&6===r.tag?(t(e,r.sibling),(r=l(r,a)).return=e,e=r):(t(e,r),(r=li(a,e.mode,u)).return=e,e=r),i(e);if(void 0===a&&!o)switch(e.tag){case 1:case 0:case 11:case 15:throw Error((We(e)||"Component")+"(...): Nothing was returned from render. This usually means a return statement is missing. Or, to render nothing, return null.")}return t(e,r)}}var lr=rr(!0),ar=rr(!1),ir={},ur=it(ir),or=it(ir),sr=it(ir);function cr(e){if(e===ir)throw Error("Expected host context to exist. This error is likely caused by a bug in React. Please file an issue.");return e}function dr(e,n){ot(sr,n),ot(or,e),ot(ur,ir),ut(ur),ot(ur,{isInAParentText:!1})}function fr(){ut(ur),ut(or),ut(sr)}function pr(e){cr(sr.current);var n=cr(ur.current),t=e.type;t="AndroidTextInput"===t||"RCTMultilineTextInputView"===t||"RCTSinglelineTextInputView"===t||"RCTText"===t||"RCTVirtualText"===t,n!==(t=n.isInAParentText!==t?{isInAParentText:t}:n)&&(ot(or,e),ot(ur,t))}function hr(e){or.current===e&&(ut(ur),ut(or))}var gr=it(0);function mr(e){for(var n=e;null!==n;){if(13===n.tag){var t=n.memoizedState;if(null!==t&&(null===t.dehydrated||zn()||zn()))return n}else if(19===n.tag&&void 0!==n.memoizedProps.revealOrder){if(0!=(128&n.flags))return n}else if(null!==n.child){n.child.return=n,n=n.child;continue}if(n===e)break;for(;null===n.sibling;){if(null===n.return||n.return===e)return null;n=n.return}n.sibling.return=n.return,n=n.sibling}return null}var vr=[];function br(){for(var e=0;ea))throw Error("Too many re-renders. React limits the number of renders to prevent an infinite loop.");a+=1,Pr=xr=null,n.updateQueue=null,yr.current=tl,e=t(r,l)}while(Rr)}if(yr.current=Zr,n=null!==xr&&null!==xr.next,kr=0,Pr=xr=wr=null,_r=!1,n)throw Error("Rendered fewer hooks than expected. This may be caused by an accidental early return statement.");return e}function Cr(){var e={memoizedState:null,baseState:null,baseQueue:null,queue:null,next:null};return null===Pr?wr.memoizedState=Pr=e:Pr=Pr.next=e,Pr}function zr(){if(null===xr){var e=wr.alternate;e=null!==e?e.memoizedState:null}else e=xr.next;var n=null===Pr?wr.memoizedState:Pr.next;if(null!==n)Pr=n,xr=e;else{if(null===e)throw Error("Rendered more hooks than during the previous render.");e={memoizedState:(xr=e).memoizedState,baseState:xr.baseState,baseQueue:xr.baseQueue,queue:xr.queue,next:null},null===Pr?wr.memoizedState=Pr=e:Pr=Pr.next=e}return Pr}function Ir(e,n){return"function"==typeof n?n(e):n}function Lr(e){var n=zr(),t=n.queue;if(null===t)throw Error("Should have a queue. This is likely a bug in React. Please file an issue.");t.lastRenderedReducer=e;var r=xr,l=r.baseQueue,a=t.pending;if(null!==a){if(null!==l){var i=l.next;l.next=a.next,a.next=i}r.baseQueue=l=a,t.pending=null}if(null!==l){a=l.next,r=r.baseState;var u=i=null,o=null,s=a;do{var c=s.lane;if((kr&c)===c)null!==o&&(o=o.next={lane:0,action:s.action,eagerReducer:s.eagerReducer,eagerState:s.eagerState,next:null}),r=s.eagerReducer===e?s.eagerState:e(r,s.action);else{var d={lane:c,action:s.action,eagerReducer:s.eagerReducer,eagerState:s.eagerState,next:null};null===o?(u=o=d,i=r):o=o.next=d,wr.lanes|=c,da|=c}s=s.next}while(null!==s&&s!==a);null===o?i=r:o.next=u,_t(r,n.memoizedState)||(sl=!0),n.memoizedState=r,n.baseState=i,n.baseQueue=o,t.lastRenderedState=r}if(null!==(e=t.interleaved)){l=e;do{a=l.lane,wr.lanes|=a,da|=a,l=l.next}while(l!==e)}else null===l&&(t.lanes=0);return[n.memoizedState,t.dispatch]}function Ur(e){var n=zr(),t=n.queue;if(null===t)throw Error("Should have a queue. This is likely a bug in React. Please file an issue.");t.lastRenderedReducer=e;var r=t.dispatch,l=t.pending,a=n.memoizedState;if(null!==l){t.pending=null;var i=l=l.next;do{a=e(a,i.action),i=i.next}while(i!==l);_t(a,n.memoizedState)||(sl=!0),n.memoizedState=a,null===n.baseQueue&&(n.baseState=a),t.lastRenderedState=a}return[a,r]}function Mr(e,n,t){var r=n._getVersion;r=r(n._source);var l=n._workInProgressVersionSecondary;if(null!==l?e=l===r:(e=e.mutableReadLanes,(e=(kr&e)===e)&&(n._workInProgressVersionSecondary=r,vr.push(n))),e)return t(n._source);throw vr.push(n),Error("Cannot read from mutable source during the current render without tearing. This may be a bug in React. Please file an issue.")}function Fr(e,n,t,r){var l=la;if(null===l)throw Error("Expected a work-in-progress root. This is a bug in React. Please file an issue.");var a=n._getVersion,i=a(n._source),u=yr.current,o=u.useState(function(){return Mr(l,n,t)}),s=o[1],c=o[0];o=Pr;var d=e.memoizedState,f=d.refs,p=f.getSnapshot,h=d.source;d=d.subscribe;var g=wr;return e.memoizedState={refs:f,source:n,subscribe:r},u.useEffect(function(){f.getSnapshot=t,f.setSnapshot=s;var e=a(n._source);_t(i,e)||(e=t(n._source),_t(c,e)||(s(e),e=Ta(g),l.mutableReadLanes|=e&l.pendingLanes),Pn(l,l.mutableReadLanes))},[t,n,r]),u.useEffect(function(){return r(n._source,function(){var e=f.getSnapshot,t=f.setSnapshot;try{t(e(n._source));var r=Ta(g);l.mutableReadLanes|=r&l.pendingLanes}catch(e){t(function(){throw e})}})},[n,r]),_t(p,t)&&_t(h,n)&&_t(d,r)||((e={pending:null,interleaved:null,lanes:0,dispatch:null,lastRenderedReducer:Ir,lastRenderedState:c}).dispatch=s=Kr.bind(null,wr,e),o.queue=e,o.baseQueue=null,c=Mr(l,n,t),o.memoizedState=o.baseState=c),c}function Dr(e,n,t){return Fr(zr(),e,n,t)}function Ar(e){var n=Cr();return"function"==typeof e&&(e=e()),n.memoizedState=n.baseState=e,e=(e=n.queue={pending:null,interleaved:null,lanes:0,dispatch:null,lastRenderedReducer:Ir,lastRenderedState:e}).dispatch=Kr.bind(null,wr,e),[n.memoizedState,e]}function Hr(e,n,t,r){return e={tag:e,create:n,destroy:t,deps:r,next:null},null===(n=wr.updateQueue)?(n={lastEffect:null},wr.updateQueue=n,n.lastEffect=e.next=e):null===(t=n.lastEffect)?n.lastEffect=e.next=e:(r=t.next,t.next=e,e.next=r,n.lastEffect=e),e}function Qr(){return zr().memoizedState}function jr(e,n,t,r){var l=Cr();wr.flags|=e,l.memoizedState=Hr(1|n,t,void 0,void 0===r?null:r)}function Br(e,n,t,r){var l=zr();r=void 0===r?null:r;var a=void 0;if(null!==xr){var i=xr.memoizedState;if(a=i.destroy,null!==r&&Er(r,i.deps))return void(l.memoizedState=Hr(n,t,a,r))}wr.flags|=e,l.memoizedState=Hr(1|n,t,a,r)}function Wr(e,n){return jr(1049600,4,e,n)}function Or(e,n){return Br(1024,4,e,n)}function Vr(e,n){return Br(4,2,e,n)}function Yr(e,n){return"function"==typeof n?(e=e(),n(e),function(){n(null)}):null!==n&&void 0!==n?(e=e(),n.current=e,function(){n.current=null}):void 0}function qr(e,n,t){return t=null!==t&&void 0!==t?t.concat([e]):null,Br(4,2,Yr.bind(null,n,e),t)}function Xr(){}function $r(e,n){var t=zr();n=void 0===n?null:n;var r=t.memoizedState;return null!==r&&null!==n&&Er(n,r[1])?r[0]:(t.memoizedState=[e,n],e)}function Gr(e,n){var t=zr();n=void 0===n?null:n;var r=t.memoizedState;return null!==r&&null!==n&&Er(n,r[1])?r[0]:(e=e(),t.memoizedState=[e,n],e)}function Jr(e,n){var t=En;En=0!==t&&4>t?t:4,e(!0);var r=Sr.transition;Sr.transition=1;try{e(!1),n()}finally{En=t,Sr.transition=r}}function Kr(e,n,t){var r=Ra(),l=Ta(e),a={lane:l,action:t,eagerReducer:null,eagerState:null,next:null},i=e.alternate;if(e===wr||null!==i&&i===wr)Rr=_r=!0,null===(l=n.pending)?a.next=a:(a.next=l.next,l.next=a),n.pending=a;else{if(null!==la&&0!=(1&e.mode)&&0==(8&ra)){var u=n.interleaved;null===u?(a.next=a,null===At?At=[n]:At.push(n)):(a.next=u.next,u.next=a),n.interleaved=a}else null===(u=n.pending)?a.next=a:(a.next=u.next,u.next=a),n.pending=a;if(0===e.lanes&&(null===i||0===i.lanes)&&null!==(i=n.lastRenderedReducer))try{var o=n.lastRenderedState,s=i(o,t);if(a.eagerReducer=i,a.eagerState=s,_t(s,o))return}catch(e){}a=Ea(e,l,r),0!=(4194240&l)&&null!==a&&(e=n.lanes,l|=e&=a.pendingLanes,n.lanes=l,Pn(a,l))}}var Zr={readContext:Dt,useCallback:Tr,useContext:Tr,useEffect:Tr,useImperativeHandle:Tr,useLayoutEffect:Tr,useMemo:Tr,useReducer:Tr,useRef:Tr,useState:Tr,useDebugValue:Tr,useDeferredValue:Tr,useTransition:Tr,useMutableSource:Tr,useOpaqueIdentifier:Tr,unstable_isNewReconciler:!1},el={readContext:Dt,useCallback:function(e,n){return Cr().memoizedState=[e,void 0===n?null:n],e},useContext:Dt,useEffect:Wr,useImperativeHandle:function(e,n,t){return t=null!==t&&void 0!==t?t.concat([e]):null,jr(4,2,Yr.bind(null,n,e),t)},useLayoutEffect:function(e,n){return jr(4,2,e,n)},useMemo:function(e,n){var t=Cr();return n=void 0===n?null:n,e=e(),t.memoizedState=[e,n],e},useReducer:function(e,n,t){var r=Cr();return n=void 0!==t?t(n):n,r.memoizedState=r.baseState=n,e=(e=r.queue={pending:null,interleaved:null,lanes:0,dispatch:null,lastRenderedReducer:e,lastRenderedState:n}).dispatch=Kr.bind(null,wr,e),[r.memoizedState,e]},useRef:function(e){return e={current:e},Cr().memoizedState=e},useState:Ar,useDebugValue:Xr,useDeferredValue:function(e){var n=Ar(e),t=n[0],r=n[1];return Wr(function(){var n=Sr.transition;Sr.transition=1;try{r(e)}finally{Sr.transition=n}},[e]),t},useTransition:function(){var e=Ar(!1),n=e[0];return e=Jr.bind(null,e[1]),Cr().memoizedState=e,[n,e]},useMutableSource:function(e,n,t){var r=Cr();return r.memoizedState={refs:{getSnapshot:n,setSnapshot:null},source:e,subscribe:t},Fr(r,e,n,t)},useOpaqueIdentifier:function(){throw Error("Not yet implemented")},unstable_isNewReconciler:!1},nl={readContext:Dt,useCallback:$r,useContext:Dt,useEffect:Or,useImperativeHandle:qr,useLayoutEffect:Vr,useMemo:Gr,useReducer:Lr,useRef:Qr,useState:function(){return Lr(Ir)},useDebugValue:Xr,useDeferredValue:function(e){var n=Lr(Ir),t=n[0],r=n[1];return Or(function(){var n=Sr.transition;Sr.transition=1;try{r(e)}finally{Sr.transition=n}},[e]),t},useTransition:function(){return[Lr(Ir)[0],zr().memoizedState]},useMutableSource:Dr,useOpaqueIdentifier:function(){return Lr(Ir)[0]},unstable_isNewReconciler:!1},tl={readContext:Dt,useCallback:$r,useContext:Dt,useEffect:Or,useImperativeHandle:qr,useLayoutEffect:Vr,useMemo:Gr,useReducer:Ur,useRef:Qr,useState:function(){return Ur(Ir)},useDebugValue:Xr,useDeferredValue:function(e){var n=Ur(Ir),t=n[0],r=n[1];return Or(function(){var n=Sr.transition;Sr.transition=1;try{r(e)}finally{Sr.transition=n}},[e]),t},useTransition:function(){return[Ur(Ir)[0],zr().memoizedState]},useMutableSource:Dr,useOpaqueIdentifier:function(){return Ur(Ir)[0]},unstable_isNewReconciler:!1};function rl(e,n){try{var t="",r=n;do{t+=Tt(r),r=r.return}while(r);var l=t}catch(e){l="\nError generating stack: "+e.message+"\n"+e.stack}return{value:e,source:n,stack:l}}if("function"!=typeof n(i[3]).ReactFiberErrorDialog.showErrorDialog)throw Error("Expected ReactFiberErrorDialog.showErrorDialog to be a function.");function ll(e,t){try{!1!==n(i[3]).ReactFiberErrorDialog.showErrorDialog({componentStack:null!==t.stack?t.stack:"",error:t.value,errorBoundary:null!==e&&1===e.tag?e.stateNode:null})&&console.error(t.value)}catch(e){setTimeout(function(){throw e})}}var al="function"==typeof WeakMap?WeakMap:Map;function il(e,n,t){(t=Bt(-1,t)).tag=3,t.payload={element:null};var r=n.value;return t.callback=function(){ma||(ma=!0,va=r),ll(e,n)},t}function ul(e,n,t){(t=Bt(-1,t)).tag=3;var r=e.type.getDerivedStateFromError;if("function"==typeof r){var l=n.value;t.payload=function(){return ll(e,n),r(l)}}var a=e.stateNode;return null!==a&&"function"==typeof a.componentDidCatch&&(t.callback=function(){"function"!=typeof r&&(null===ba?ba=new Set([this]):ba.add(this),ll(e,n));var t=n.stack;this.componentDidCatch(n.value,{componentStack:null!==t?t:""})}),t}var ol=we.ReactCurrentOwner,sl=!1;function cl(e,n,t,r){n.child=null===e?ar(n,null,t,r):lr(n,e.child,t,r)}function dl(e,n,t,r,l){t=t.render;var a=n.ref;return Ft(n,l),r=Nr(e,n,t,r,a,l),null===e||sl?(n.flags|=1,cl(e,n,r,l),n.child):(n.updateQueue=e.updateQueue,n.flags&=-1029,e.lanes&=~l,Ll(e,n,l))}function fl(e,n,t,r,l,a){if(null===e){var i=t.type;return"function"!=typeof i||Ka(i)||void 0!==i.defaultProps||null!==t.compare||void 0!==t.defaultProps?((e=ni(t.type,null,r,n,n.mode,a)).ref=n.ref,e.return=n,n.child=e):(n.tag=15,n.type=i,pl(e,n,i,r,l,a))}return i=e.child,0==(l&a)&&(l=i.memoizedProps,(t=null!==(t=t.compare)?t:Rt)(l,r)&&e.ref===n.ref)?Ll(e,n,a):(n.flags|=1,(e=ei(i,r)).ref=n.ref,e.return=n,n.child=e)}function pl(e,n,t,r,l,a){if(null!==e&&Rt(e.memoizedProps,r)&&e.ref===n.ref){if(sl=!1,0==(a&l))return n.lanes=e.lanes,Ll(e,n,a);0!=(32768&e.flags)&&(sl=!0)}return ml(e,n,t,r,a)}function hl(e,n,t){var r=n.pendingProps,l=r.children,a=null!==e?e.memoizedState:null;if("hidden"===r.mode||"unstable-defer-without-hiding"===r.mode)if(0==(1&n.mode))n.memoizedState={baseLanes:0,cachePool:null},ot(oa,ua),ua|=t;else{if(0==(1073741824&t))return e=null!==a?a.baseLanes|t:t,n.lanes=n.childLanes=1073741824,n.memoizedState={baseLanes:e,cachePool:null},n.updateQueue=null,ot(oa,ua),ua|=e,null;n.memoizedState={baseLanes:0,cachePool:null},r=null!==a?a.baseLanes:t,ot(oa,ua),ua|=r}else null!==a?(r=a.baseLanes|t,n.memoizedState=null):r=t,ot(oa,ua),ua|=r;return cl(e,n,l,t),n.child}function gl(e,n){var t=n.ref;(null===e&&null!==t||null!==e&&e.ref!==t)&&(n.flags|=256)}function ml(e,n,t,r,l){var a=ht(t)?ft:ct.current;return a=pt(n,a),Ft(n,l),t=Nr(e,n,t,r,a,l),null===e||sl?(n.flags|=1,cl(e,n,t,l),n.child):(n.updateQueue=e.updateQueue,n.flags&=-1029,e.lanes&=~l,Ll(e,n,l))}function vl(e,n,t,r,l){if(ht(t)){var a=!0;bt(n)}else a=!1;if(Ft(n,l),null===n.stateNode)null!==e&&(e.alternate=null,n.alternate=null,n.flags|=2),Kt(n,t,r),er(n,t,r,l),r=!0;else if(null===e){var i=n.stateNode,u=n.memoizedProps;i.props=u;var o=i.context,s=t.contextType;"object"==typeof s&&null!==s?s=Dt(s):s=pt(n,s=ht(t)?ft:ct.current);var c=t.getDerivedStateFromProps,d="function"==typeof c||"function"==typeof i.getSnapshotBeforeUpdate;d||"function"!=typeof i.UNSAFE_componentWillReceiveProps&&"function"!=typeof i.componentWillReceiveProps||(u!==r||o!==s)&&Zt(n,i,r,s),Ht=!1;var f=n.memoizedState;i.state=f,Yt(n,r,i,l),o=n.memoizedState,u!==r||f!==o||dt.current||Ht?("function"==typeof c&&($t(n,t,c,r),o=n.memoizedState),(u=Ht||Jt(n,t,u,r,f,o,s))?(d||"function"!=typeof i.UNSAFE_componentWillMount&&"function"!=typeof i.componentWillMount||("function"==typeof i.componentWillMount&&i.componentWillMount(),"function"==typeof i.UNSAFE_componentWillMount&&i.UNSAFE_componentWillMount()),"function"==typeof i.componentDidMount&&(n.flags|=4)):("function"==typeof i.componentDidMount&&(n.flags|=4),n.memoizedProps=r,n.memoizedState=o),i.props=r,i.state=o,i.context=s,r=u):("function"==typeof i.componentDidMount&&(n.flags|=4),r=!1)}else{i=n.stateNode,jt(e,n),u=n.memoizedProps,s=n.type===n.elementType?u:Et(n.type,u),i.props=s,d=n.pendingProps,f=i.context,"object"==typeof(o=t.contextType)&&null!==o?o=Dt(o):o=pt(n,o=ht(t)?ft:ct.current);var p=t.getDerivedStateFromProps;(c="function"==typeof p||"function"==typeof i.getSnapshotBeforeUpdate)||"function"!=typeof i.UNSAFE_componentWillReceiveProps&&"function"!=typeof i.componentWillReceiveProps||(u!==d||f!==o)&&Zt(n,i,r,o),Ht=!1,f=n.memoizedState,i.state=f,Yt(n,r,i,l);var h=n.memoizedState;u!==d||f!==h||dt.current||Ht?("function"==typeof p&&($t(n,t,p,r),h=n.memoizedState),(s=Ht||Jt(n,t,s,r,f,h,o)||!1)?(c||"function"!=typeof i.UNSAFE_componentWillUpdate&&"function"!=typeof i.componentWillUpdate||("function"==typeof i.componentWillUpdate&&i.componentWillUpdate(r,h,o),"function"==typeof i.UNSAFE_componentWillUpdate&&i.UNSAFE_componentWillUpdate(r,h,o)),"function"==typeof i.componentDidUpdate&&(n.flags|=4),"function"==typeof i.getSnapshotBeforeUpdate&&(n.flags|=512)):("function"!=typeof i.componentDidUpdate||u===e.memoizedProps&&f===e.memoizedState||(n.flags|=4),"function"!=typeof i.getSnapshotBeforeUpdate||u===e.memoizedProps&&f===e.memoizedState||(n.flags|=512),n.memoizedProps=r,n.memoizedState=h),i.props=r,i.state=h,i.context=o,r=s):("function"!=typeof i.componentDidUpdate||u===e.memoizedProps&&f===e.memoizedState||(n.flags|=4),"function"!=typeof i.getSnapshotBeforeUpdate||u===e.memoizedProps&&f===e.memoizedState||(n.flags|=512),r=!1)}return bl(e,n,t,r,a,l)}function bl(e,n,t,r,l,a){gl(e,n);var i=0!=(128&n.flags);if(!r&&!i)return l&&yt(n,t,!1),Ll(e,n,a);r=n.stateNode,ol.current=n;var u=i&&"function"!=typeof t.getDerivedStateFromError?null:r.render();return n.flags|=1,null!==e&&i?(n.child=lr(n,e.child,null,a),n.child=lr(n,null,u,a)):cl(e,n,u,a),n.memoizedState=r.state,l&&yt(n,t,!0),n.child}function yl(e){var n=e.stateNode;n.pendingContext?mt(0,n.pendingContext,n.pendingContext!==n.context):n.context&&mt(0,n.context,!1),dr(e,n.containerInfo)}var Sl,kl,wl,xl,Pl={dehydrated:null,retryLane:0};function _l(e){return{baseLanes:e,cachePool:null}}function Rl(e,n,t){var r,l=n.pendingProps,a=gr.current,i=!1;return(r=0!=(128&n.flags))||(r=(null===e||null!==e.memoizedState)&&0!=(2&a)),r?(i=!0,n.flags&=-129):null!==e&&null===e.memoizedState||void 0===l.fallback||!0===l.unstable_avoidThisFallback||(a|=1),ot(gr,1&a),null===e?(e=l.children,a=l.fallback,i?(e=Tl(n,e,a,t),n.child.memoizedState=_l(t),n.memoizedState=Pl,e):"number"==typeof l.unstable_expectedLoadTime?(e=Tl(n,e,a,t),n.child.memoizedState=_l(t),n.memoizedState=Pl,n.lanes=4194304,e):((t=ri({mode:"visible",children:e},n.mode,t,null)).return=n,n.child=t)):(e.memoizedState,i?(l=Nl(e,n,l.children,l.fallback,t),i=n.child,a=e.child.memoizedState,i.memoizedState=null===a?_l(t):{baseLanes:a.baseLanes|t,cachePool:null},i.childLanes=e.childLanes&~t,n.memoizedState=Pl,l):(t=El(e,n,l.children,t),n.memoizedState=null,t))}function Tl(e,n,t,r){var l=e.mode,a=e.child;return n={mode:"hidden",children:n},0==(1&l)&&null!==a?(a.childLanes=0,a.pendingProps=n):a=ri(n,l,0,null),t=ti(t,l,r,null),a.return=e,t.return=e,a.sibling=t,e.child=a,t}function El(e,n,t,r){var l=e.child;return e=l.sibling,t=ei(l,{mode:"visible",children:t}),0==(1&n.mode)&&(t.lanes=r),t.return=n,t.sibling=null,null!==e&&(null===(r=n.deletions)?(n.deletions=[e],n.flags|=16):r.push(e)),n.child=t}function Nl(e,n,t,r,l){var a=n.mode,i=(e=e.child).sibling,u={mode:"hidden",children:t};return 0==(1&a)&&n.child!==e?((t=n.child).childLanes=0,t.pendingProps=u,n.deletions=null):(t=ei(e,u)).subtreeFlags=1835008&e.subtreeFlags,null!==i?r=ei(i,r):(r=ti(r,a,l,null)).flags|=2,r.return=n,t.return=n,t.sibling=r,n.child=t,r}function Cl(e,n){e.lanes|=n;var t=e.alternate;null!==t&&(t.lanes|=n),Mt(e.return,n)}function zl(e,n,t,r,l){var a=e.memoizedState;null===a?e.memoizedState={isBackwards:n,rendering:null,renderingStartTime:0,last:r,tail:t,tailMode:l}:(a.isBackwards=n,a.rendering=null,a.renderingStartTime=0,a.last=r,a.tail=t,a.tailMode=l)}function Il(e,n,t){var r=n.pendingProps,l=r.revealOrder,a=r.tail;if(cl(e,n,r.children,t),0!=(2&(r=gr.current)))r=1&r|2,n.flags|=128;else{if(null!==e&&0!=(128&e.flags))e:for(e=n.child;null!==e;){if(13===e.tag)null!==e.memoizedState&&Cl(e,t);else if(19===e.tag)Cl(e,t);else if(null!==e.child){e.child.return=e,e=e.child;continue}if(e===n)break e;for(;null===e.sibling;){if(null===e.return||e.return===n)break e;e=e.return}e.sibling.return=e.return,e=e.sibling}r&=1}if(ot(gr,r),0==(1&n.mode))n.memoizedState=null;else switch(l){case"forwards":for(t=n.child,l=null;null!==t;)null!==(e=t.alternate)&&null===mr(e)&&(l=t),t=t.sibling;null===(t=l)?(l=n.child,n.child=null):(l=t.sibling,t.sibling=null),zl(n,!1,l,t,a);break;case"backwards":for(t=null,l=n.child,n.child=null;null!==l;){if(null!==(e=l.alternate)&&null===mr(e)){n.child=l;break}e=l.sibling,l.sibling=t,t=l,l=e}zl(n,!0,t,null,a);break;case"together":zl(n,!1,null,null,void 0);break;default:n.memoizedState=null}return n.child}function Ll(e,n,t){if(null!==e&&(n.dependencies=e.dependencies),da|=n.lanes,0==(t&n.childLanes))return null;if(null!==e&&n.child!==e.child)throw Error("Resuming work not yet implemented.");if(null!==n.child){for(t=ei(e=n.child,e.pendingProps),n.child=t,t.return=n;null!==e.sibling;)e=e.sibling,(t=t.sibling=ei(e,e.pendingProps)).return=n;t.sibling=null}return n.child}function Ul(e,n){if(null!==e&&e.child===n.child)return!0;if(0!=(16&n.flags))return!1;for(e=n.child;null!==e;){if(0!=(6454&e.flags)||0!=(6454&e.subtreeFlags))return!1;e=e.sibling}return!0}function Ml(e,n,t,r){for(var l=n.child;null!==l;){if(5===l.tag){var a=l.stateNode;t&&r&&(a=et(a)),Qn(e,a.node)}else if(6===l.tag){if(a=l.stateNode,t&&r)throw Error("Not yet implemented.");Qn(e,a.node)}else if(4!==l.tag){if(13===l.tag&&0!=(4&l.flags)&&(a=null!==l.memoizedState)){var i=l.child;if(null!==i&&(null!==i.child&&(i.child.return=i,Ml(e,i,!0,a)),null!==(a=i.sibling))){a.return=l,l=a;continue}}if(null!==l.child){l.child.return=l,l=l.child;continue}}if(l===n)break;for(;null===l.sibling;){if(null===l.return||l.return===n)return;l=l.return}l.sibling.return=l.return,l=l.sibling}}function Fl(e,n){switch(e.tailMode){case"hidden":n=e.tail;for(var t=null;null!==n;)null!==n.alternate&&(t=n),n=n.sibling;null===t?e.tail=null:t.sibling=null;break;case"collapsed":t=e.tail;for(var r=null;null!==t;)null!==t.alternate&&(r=t),t=t.sibling;null===r?n||null===e.tail?e.tail=null:e.tail.sibling=null:r.sibling=null}}function Dl(e){var n=null!==e.alternate&&e.alternate.child===e.child,t=0,r=0;if(n)for(var l=e.child;null!==l;)t|=l.lanes|l.childLanes,r|=1835008&l.subtreeFlags,r|=1835008&l.flags,l.return=e,l=l.sibling;else for(l=e.child;null!==l;)t|=l.lanes|l.childLanes,r|=l.subtreeFlags,r|=l.flags,l.return=e,l=l.sibling;return e.subtreeFlags|=r,e.childLanes=t,n}function Al(e,t,r){var l=t.pendingProps;switch(t.tag){case 2:case 16:case 15:case 0:case 11:case 7:case 8:case 12:case 9:case 14:return Dl(t),null;case 1:return ht(t.type)&>(),Dl(t),null;case 3:return l=t.stateNode,fr(),ut(dt),ut(ct),br(),l.pendingContext&&(l.context=l.pendingContext,l.pendingContext=null),null!==e&&null!==e.child||l.hydrate||(t.flags|=512),kl(e,t),Dl(t),null;case 5:hr(t),r=cr(sr.current);var a=t.type;if(null!==e&&null!=t.stateNode)wl(e,t,a,l,r),e.ref!==t.ref&&(t.flags|=256);else{if(!l){if(null===t.stateNode)throw Error("We must have new props for new mounts. This error is likely caused by a bug in React. Please file an issue.");return Dl(t),null}cr(ur.current),e=$n,$n+=2,a=Xn(a);var u=an(null,Ge,l,a.validAttributes);r=Ln(e,a.uiViewClassName,r,u,t),e=new Gn(e,a,l,t),Sl(e={node:r,canonical:e},t,!1,!1),t.stateNode=e,null!==t.ref&&(t.flags|=256)}return Dl(t),null;case 6:if(e&&null!=t.stateNode)xl(e,t,e.memoizedProps,l);else{if("string"!=typeof l&&null===t.stateNode)throw Error("We must have new props for new mounts. This error is likely caused by a bug in React. Please file an issue.");e=cr(sr.current),r=cr(ur.current),t.stateNode=Jn(l,e,r,t)}return Dl(t),null;case 13:return ut(gr),l=t.memoizedState,0!=(128&t.flags)?(t.lanes=r,t):(l=null!==l,r=!1,null!==e&&(r=null!==e.memoizedState),l&&!r&&0!=(1&t.mode)&&(null===e&&!0!==t.memoizedProps.unstable_avoidThisFallback||0!=(1&gr.current)?0===sa&&(sa=3):(0!==sa&&3!==sa||(sa=4),null===la||0==(268435455&da)&&0==(268435455&fa)||Ia(la,ia))),l&&(t.flags|=4),Dl(t),null);case 4:return fr(),kl(e,t),Dl(t),null;case 10:return Ut(t.type._context),Dl(t),null;case 17:return ht(t.type)&>(),Dl(t),null;case 19:if(ut(gr),null===(a=t.memoizedState))return Dl(t),null;if(l=0!=(128&t.flags),null===(u=a.rendering))if(l)Fl(a,!1);else{if(0!==sa||null!==e&&0!=(128&e.flags))for(e=t.child;null!==e;){if(null!==(u=mr(e))){for(t.flags|=128,Fl(a,!1),null!==(e=u.updateQueue)&&(t.updateQueue=e,t.flags|=4),t.subtreeFlags=0,e=r,l=t.child;null!==l;)a=e,(r=l).flags&=1835010,null===(u=r.alternate)?(r.childLanes=0,r.lanes=a,r.child=null,r.subtreeFlags=0,r.memoizedProps=null,r.memoizedState=null,r.updateQueue=null,r.dependencies=null,r.stateNode=null):(r.childLanes=u.childLanes,r.lanes=u.lanes,r.child=u.child,r.subtreeFlags=0,r.deletions=null,r.memoizedProps=u.memoizedProps,r.memoizedState=u.memoizedState,r.updateQueue=u.updateQueue,r.type=u.type,a=u.dependencies,r.dependencies=null===a?null:{lanes:a.lanes,firstContext:a.firstContext}),l=l.sibling;return ot(gr,1&gr.current|2),t.child}e=e.sibling}null!==a.tail&&n(i[4]).unstable_now()>ga&&(t.flags|=128,l=!0,Fl(a,!1),t.lanes=4194304)}else{if(!l)if(null!==(e=mr(u))){if(t.flags|=128,l=!0,null!==(e=e.updateQueue)&&(t.updateQueue=e,t.flags|=4),Fl(a,!0),null===a.tail&&"hidden"===a.tailMode&&!u.alternate)return Dl(t),null}else 2*n(i[4]).unstable_now()-a.renderingStartTime>ga&&1073741824!==r&&(t.flags|=128,l=!0,Fl(a,!1),t.lanes=4194304);a.isBackwards?(u.sibling=t.child,t.child=u):(null!==(e=a.last)?e.sibling=u:t.child=u,a.last=u)}return null!==a.tail?(t=a.tail,a.rendering=t,a.tail=t.sibling,a.renderingStartTime=n(i[4]).unstable_now(),t.sibling=null,e=gr.current,ot(gr,l?1&e|2:1&e),t):(Dl(t),null);case 22:case 23:return Ua(),r=null!==t.memoizedState,null!==e&&null!==e.memoizedState!==r&&"unstable-defer-without-hiding"!==l.mode&&(t.flags|=4),r&&0==(1073741824&ua)&&0!=(1&t.mode)||Dl(t),null}throw Error("Unknown unit of work tag ("+t.tag+"). This error is likely caused by a bug in React. Please file an issue.")}function Hl(e){switch(e.tag){case 1:ht(e.type)&>();var n=e.flags;return 16384&n?(e.flags=-16385&n|128,e):null;case 3:if(fr(),ut(dt),ut(ct),br(),0!=(128&(n=e.flags)))throw Error("The root failed to unmount after an error. This is likely a bug in React. Please file an issue.");return e.flags=-16385&n|128,e;case 5:return hr(e),null;case 13:return ut(gr),16384&(n=e.flags)?(e.flags=-16385&n|128,e):null;case 19:return ut(gr),null;case 4:return fr(),null;case 10:return Ut(e.type._context),null;case 22:case 23:return Ua(),null;case 24:default:return null}}Sl=function(e,n,t,r){for(var l=n.child;null!==l;){if(5===l.tag){var a=l.stateNode;t&&r&&(a=et(a)),Hn(e.node,a.node)}else if(6===l.tag){if(a=l.stateNode,t&&r)throw Error("Not yet implemented.");Hn(e.node,a.node)}else if(4!==l.tag){if(13===l.tag&&0!=(4&l.flags)&&(a=null!==l.memoizedState)){var i=l.child;if(null!==i&&(null!==i.child&&(i.child.return=i,Sl(e,i,!0,a)),null!==(a=i.sibling))){a.return=l,l=a;continue}}if(null!==l.child){l.child.return=l,l=l.child;continue}}if(l===n)break;for(;null===l.sibling;){if(null===l.return||l.return===n)return;l=l.return}l.sibling.return=l.return,l=l.sibling}},kl=function(e,n){var t=n.stateNode;if(!Ul(e,n)){e=t.containerInfo;var r=An(e);Ml(r,n,!1,!1),t.pendingChildren=r,n.flags|=4,jn(e,r)}},wl=function(e,n,t,r){t=e.stateNode;var l=e.memoizedProps;if((e=Ul(e,n))&&l===r)n.stateNode=t;else{var a=n.stateNode;cr(ur.current);var i=null;l!==r&&(l=an(null,l,r,a.canonical.viewConfig.validAttributes),a.canonical.currentProps=r,i=l),e&&null===i?n.stateNode=t:(r=i,l=t.node,t={node:e?null!==r?Dn(l,r):Un(l):null!==r?Fn(l,r):Mn(l),canonical:t.canonical},n.stateNode=t,e?n.flags|=4:Sl(t,n,!1,!1))}},xl=function(e,n,t,r){t!==r?(e=cr(sr.current),t=cr(ur.current),n.stateNode=Jn(r,e,t,n),n.flags|=4):n.stateNode=e.stateNode};var Ql="function"==typeof WeakSet?WeakSet:Set,jl=null;function Bl(e,n){var t=e.ref;if(null!==t)if("function"==typeof t)try{t(null)}catch(t){qa(e,n,t)}else t.current=null}var Wl=!1;function Ol(e,n){for(jl=n;null!==jl;)if(n=(e=jl).child,0!=(516&e.subtreeFlags)&&null!==n)n.return=e,jl=n;else for(;null!==jl;){e=jl;try{var t=e.alternate;if(0!=(512&e.flags))switch(e.tag){case 0:case 11:case 15:break;case 1:if(null!==t){var r=t.memoizedProps,l=t.memoizedState,a=e.stateNode,i=a.getSnapshotBeforeUpdate(e.elementType===e.type?r:Et(e.type,r),l);a.__reactInternalSnapshotBeforeUpdate=i}break;case 3:break;case 5:case 6:case 4:case 17:break;default:throw Error("This unit of work tag should not have side-effects. This error is likely caused by a bug in React. Please file an issue.")}}catch(n){qa(e,e.return,n)}if(null!==(n=e.sibling)){n.return=e.return,jl=n;break}jl=e.return}return t=Wl,Wl=!1,t}function Vl(e,n,t){var r=n.updateQueue;if(null!==(r=null!==r?r.lastEffect:null)){var l=r=r.next;do{if((l.tag&e)===e){var a=l.destroy;if(l.destroy=void 0,void 0!==a){var i=n,u=t;try{a()}catch(e){qa(i,u,e)}}}l=l.next}while(l!==r)}}function Yl(e,n){if(null!==(n=null!==(n=n.updateQueue)?n.lastEffect:null)){var t=n=n.next;do{if((t.tag&e)===e){var r=t.create;t.destroy=r()}t=t.next}while(t!==n)}}function ql(e){var n=e.alternate;null!==n&&(e.alternate=null,ql(n)),e.child=null,e.deletions=null,e.sibling=null,e.stateNode=null,e.return=null,e.dependencies=null,e.memoizedProps=null,e.memoizedState=null,e.pendingProps=null,e.stateNode=null,e.updateQueue=null}function Xl(e,t){switch(t.tag){case 0:case 11:case 14:case 15:return void Vl(3,t,t.return);case 12:return;case 13:return null!==t.memoizedState&&(ha=n(i[4]).unstable_now()),void $l(t);case 19:return void $l(t);case 22:case 23:return}e:{switch(t.tag){case 1:case 5:case 6:break e;case 3:case 4:break e}throw Error("This unit of work tag should not have side-effects. This error is likely caused by a bug in React. Please file an issue.")}}function $l(e){var n=e.updateQueue;if(null!==n){e.updateQueue=null;var t=e.stateNode;null===t&&(t=e.stateNode=new Ql),n.forEach(function(n){var r=$a.bind(null,e,n);t.has(n)||(t.add(n),n.then(r,r))})}}function Gl(e,n){for(jl=n;null!==jl;){if(null!==(n=(e=jl).deletions))for(var t=0;ta&&(a=o),l&=~u}if(l=a,10<(l=(120>(l=n(i[4]).unstable_now()-l)?120:480>l?480:1080>l?1080:1920>l?1920:3e3>l?3e3:4320>l?4320:1960*Zl(l/1960))-l)){e.timeoutHandle=Kn(Wa.bind(null,e),l);break}Wa(e);break;case 5:Wa(e);break;default:throw Error("Unknown root exit status.")}}return Ca(e,n(i[4]).unstable_now()),e.callbackNode===r?za.bind(null,e):null}function Ia(e,n){for(n&=~pa,n&=~fa,e.suspendedLanes|=n,e.pingedLanes&=~n,e=e.expirationTimes;0 component higher in the tree to provide a loading indicator or placeholder to display.")}5!==sa&&(sa=2),o=rl(o,u),p=i;do{switch(p.tag){case 3:a=o,p.flags|=16384,n&=-n,p.lanes|=n,Vt(p,il(p,a,n));break e;case 1:a=o;var w=p.type,x=p.stateNode;if(0==(128&p.flags)&&("function"==typeof w.getDerivedStateFromError||null!==x&&"function"==typeof x.componentDidCatch&&(null===ba||!ba.has(x)))){p.flags|=16384,n&=-n,p.lanes|=n,Vt(p,ul(p,a,n));break e}}p=p.return}while(null!==p)}Ba(t)}catch(e){n=e,aa===t&&null!==t&&(aa=t=t.return);continue}break}}function Da(){var e=ea.current;return ea.current=Zr,null===e?Zr:e}function Aa(e,n){var t=ra;ra|=8;var r=Da();for(la===e&&ia===n||Ma(e,n);;)try{Ha();break}catch(n){Fa(e,n)}if(Lt(),ra=t,ea.current=r,null!==aa)throw Error("Cannot commit an incomplete root. This error is likely caused by a bug in React. Please file an issue.");return la=null,ia=0,sa}function Ha(){for(;null!==aa;)ja(aa)}function Qa(){for(;null!==aa&&!n(i[4]).unstable_shouldYield();)ja(aa)}function ja(e){var n=Kl(e.alternate,e,ua);e.memoizedProps=e.pendingProps,null===n?Ba(e):aa=n,na.current=null}function Ba(e){var n=e;do{var t=n.alternate;if(e=n.return,0==(8192&n.flags)){if(null!==(t=Al(t,n,ua)))return void(aa=t)}else{if(null!==(t=Hl(n)))return t.flags&=8191,void(aa=t);null!==e&&(e.flags|=8192,e.subtreeFlags=0,e.deletions=null)}if(null!==(n=n.sibling))return void(aa=n);aa=n=e}while(null!==n);0===sa&&(sa=5)}function Wa(e){var n=En,t=ta.transition;try{ta.transition=0,En=1,Oa(e,n)}finally{ta.transition=t,En=n}return null}function Oa(e,t){do{Va()}while(null!==Sa);if(0!=(24&ra))throw Error("Should not already be working.");var r=e.finishedWork,l=e.finishedLanes;if(null===r)return null;if(e.finishedWork=null,e.finishedLanes=0,r===e.current)throw Error("Cannot commit the same tree as before. This error is likely caused by a bug in React. Please file an issue.");e.callbackNode=null,e.callbackPriority=0;var a=r.lanes|r.childLanes;if(xn(e,a),e===la&&(aa=la=null,ia=0),0==(1040&r.subtreeFlags)&&0==(1040&r.flags)||ya||(ya=!0,n(i[4]).unstable_scheduleCallback(n(i[4]).unstable_NormalPriority,function(){return Va(),null})),a=0!=(8054&r.flags),0!=(8054&r.subtreeFlags)||a){a=ta.transition,ta.transition=0;var u=En;En=1;var o=ra;ra|=16,na.current=null,Ol(e,r),Gl(e,r),e.current=r,Jl(r),n(i[4]).unstable_requestPaint(),ra=o,En=u,ta.transition=a}else e.current=r;if(ya&&(ya=!1,Sa=e,ka=l),0===(a=e.pendingLanes)&&(ba=null),0!=(1&a)?e===xa?wa++:(wa=0,xa=e):wa=0,hn(r.stateNode),Ca(e,n(i[4]).unstable_now()),ma)throw ma=!1,e=va,va=null,e;return 0!=(4&ra)?null:(0!=(1&ka)&&0!==e.tag&&Va(),xt(),null)}function Va(){if(null!==Sa){var e=Nn(ka),n=ta.transition,t=En;try{if(ta.transition=0,En=16>e?16:e,null===Sa)var r=!1;else{if(e=Sa,Sa=null,ka=0,0!=(24&ra))throw Error("Cannot flush passive effects while already rendering.");var l=ra;for(ra|=16,jl=e.current;null!==jl;){var a=jl,i=a.child;if(0!=(16&jl.flags)){var u=a.deletions;if(null!==u){for(var o=0;on(i[4]).unstable_now()-ha?Ma(e,0):pa|=r),Ca(e,t)}function $a(e,n){var t=e.stateNode;null!==t&&t.delete(n),0===(n=0)&&(0==(1&e.mode)?n=1:(n=mn,0==(130023424&(mn<<=1))&&(mn=4194304))),t=Ra(),null!==(e=Na(e,n))&&(wn(e,n,t),Ca(e,t))}function Ga(e,n,t,r){this.tag=e,this.key=t,this.sibling=this.child=this.return=this.stateNode=this.type=this.elementType=null,this.index=0,this.ref=null,this.pendingProps=n,this.dependencies=this.memoizedState=this.updateQueue=this.memoizedProps=null,this.mode=r,this.subtreeFlags=this.flags=0,this.deletions=null,this.childLanes=this.lanes=0,this.alternate=null}function Ja(e,n,t,r){return new Ga(e,n,t,r)}function Ka(e){return!(!(e=e.prototype)||!e.isReactComponent)}function Za(e){if("function"==typeof e)return Ka(e)?1:0;if(void 0!==e&&null!==e){if((e=e.$$typeof)===Ce)return 11;if(e===Le)return 14}return 2}function ei(e,n){var t=e.alternate;return null===t?((t=Ja(e.tag,n,e.key,e.mode)).elementType=e.elementType,t.type=e.type,t.stateNode=e.stateNode,t.alternate=e,e.alternate=t):(t.pendingProps=n,t.type=e.type,t.flags=0,t.subtreeFlags=0,t.deletions=null),t.flags=1835008&e.flags,t.childLanes=e.childLanes,t.lanes=e.lanes,t.child=e.child,t.memoizedProps=e.memoizedProps,t.memoizedState=e.memoizedState,t.updateQueue=e.updateQueue,n=e.dependencies,t.dependencies=null===n?null:{lanes:n.lanes,firstContext:n.firstContext},t.sibling=e.sibling,t.index=e.index,t.ref=e.ref,t}function ni(e,n,t,r,l,a){var i=2;if(r=e,"function"==typeof e)Ka(e)&&(i=1);else if("string"==typeof e)i=5;else e:switch(e){case _e:return ti(t.children,l,a,n);case Me:i=8,l|=4;break;case Re:i=8,l|=8;break;case Te:return(e=Ja(12,t,n,2|l)).elementType=Te,e.lanes=a,e;case ze:return(e=Ja(13,t,n,l)).elementType=ze,e.lanes=a,e;case Ie:return(e=Ja(19,t,n,l)).elementType=Ie,e.lanes=a,e;case Fe:return ri(t,l,a,n);case De:return(e=Ja(23,t,n,l)).elementType=De,e.lanes=a,e;default:if("object"==typeof e&&null!==e)switch(e.$$typeof){case Ee:i=10;break e;case Ne:i=9;break e;case Ce:i=11;break e;case Le:i=14;break e;case Ue:i=16,r=null;break e}throw Error("Element type is invalid: expected a string (for built-in components) or a class/function (for composite components) but got: "+(null==e?e:typeof e)+".")}return(n=Ja(i,t,n,l)).elementType=e,n.type=r,n.lanes=a,n}function ti(e,n,t,r){return(e=Ja(7,e,r,n)).lanes=t,e}function ri(e,n,t,r){return(e=Ja(22,e,r,n)).elementType=Fe,e.lanes=t,e}function li(e,n,t){return(e=Ja(6,e,null,n)).lanes=t,e}function ai(e,n,t){return(n=Ja(4,null!==e.children?e.children:[],e.key,n)).lanes=t,n.stateNode={containerInfo:e.containerInfo,pendingChildren:null,implementation:e.implementation},n}function ii(e,n,t){this.tag=n,this.containerInfo=e,this.finishedWork=this.pingCache=this.current=this.pendingChildren=null,this.timeoutHandle=-1,this.pendingContext=this.context=null,this.hydrate=t,this.callbackNode=null,this.callbackPriority=0,this.eventTimes=kn(0),this.expirationTimes=kn(-1),this.entangledLanes=this.finishedLanes=this.mutableReadLanes=this.expiredLanes=this.pingedLanes=this.suspendedLanes=this.pendingLanes=0,this.entanglements=kn(0)}function ui(e,n,t){var r=3=t.length?{done:!0}:{done:!1,value:t[o++]}}}throw new TypeError("Invalid attempt to iterate non-iterable instance.\nIn order to be iterable, non-array objects must have a [Symbol.iterator]() method.")}function f(t,n){if(t){if("string"==typeof t)return s(t,n);var u=Object.prototype.toString.call(t).slice(8,-1);return"Object"===u&&t.constructor&&(u=t.constructor.name),"Map"===u||"Set"===u?Array.from(t):"Arguments"===u||/^(?:Ui|I)nt(?:8|16|32)(?:Clamped)?Array$/.test(u)?s(t,n):void 0}}function s(t,n){(null==n||n>t.length)&&(n=t.length);for(var u=0,o=new Array(n);ui&&(f+=u&&o?h.currentPageX:u&&!o?h.currentPageY:!u&&o?h.previousPageX:h.previousPageY,s=1);else for(var v=0;v=i){f+=u&&o?C.currentPageX:u&&!o?C.currentPageY:!u&&o?C.previousPageX:C.previousPageY,s++}}return s>0?f/s:n.noCentroid},currentCentroidXOfTouchesChangedAfter:function(t,i){return n.centroidDimension(t,i,!0,!0)},currentCentroidYOfTouchesChangedAfter:function(t,i){return n.centroidDimension(t,i,!1,!0)},previousCentroidXOfTouchesChangedAfter:function(t,i){return n.centroidDimension(t,i,!0,!1)},previousCentroidYOfTouchesChangedAfter:function(t,i){return n.centroidDimension(t,i,!1,!1)},currentCentroidX:function(t){return n.centroidDimension(t,0,!0,!0)},currentCentroidY:function(t){return n.centroidDimension(t,0,!1,!0)},noCentroid:-1};m.exports=n},781,[]); +__d(function(g,r,i,a,m,e,d){var n=r(d[0])(r(d[1])),s=r(d[0])(r(d[2])),o=r(d[0])(r(d[3])),t=r(d[0])(r(d[4])),E=r(d[0])(r(d[5])),A=r(d[0])(r(d[6])),_=r(d[0])(r(d[7])),u=Object.freeze({GRANTED:'granted',DENIED:'denied',NEVER_ASK_AGAIN:'never_ask_again'}),S=Object.freeze({READ_CALENDAR:'android.permission.READ_CALENDAR',WRITE_CALENDAR:'android.permission.WRITE_CALENDAR',CAMERA:'android.permission.CAMERA',READ_CONTACTS:'android.permission.READ_CONTACTS',WRITE_CONTACTS:'android.permission.WRITE_CONTACTS',GET_ACCOUNTS:'android.permission.GET_ACCOUNTS',ACCESS_FINE_LOCATION:'android.permission.ACCESS_FINE_LOCATION',ACCESS_COARSE_LOCATION:'android.permission.ACCESS_COARSE_LOCATION',ACCESS_BACKGROUND_LOCATION:'android.permission.ACCESS_BACKGROUND_LOCATION',RECORD_AUDIO:'android.permission.RECORD_AUDIO',READ_PHONE_STATE:'android.permission.READ_PHONE_STATE',CALL_PHONE:'android.permission.CALL_PHONE',READ_CALL_LOG:'android.permission.READ_CALL_LOG',WRITE_CALL_LOG:'android.permission.WRITE_CALL_LOG',ADD_VOICEMAIL:'com.android.voicemail.permission.ADD_VOICEMAIL',USE_SIP:'android.permission.USE_SIP',PROCESS_OUTGOING_CALLS:'android.permission.PROCESS_OUTGOING_CALLS',BODY_SENSORS:'android.permission.BODY_SENSORS',SEND_SMS:'android.permission.SEND_SMS',RECEIVE_SMS:'android.permission.RECEIVE_SMS',READ_SMS:'android.permission.READ_SMS',RECEIVE_WAP_PUSH:'android.permission.RECEIVE_WAP_PUSH',RECEIVE_MMS:'android.permission.RECEIVE_MMS',READ_EXTERNAL_STORAGE:'android.permission.READ_EXTERNAL_STORAGE',WRITE_EXTERNAL_STORAGE:'android.permission.WRITE_EXTERNAL_STORAGE',BLUETOOTH_CONNECT:'android.permission.BLUETOOTH_CONNECT',BLUETOOTH_SCAN:'android.permission.BLUETOOTH_SCAN',BLUETOOTH_ADVERTISE:'android.permission.BLUETOOTH_ADVERTISE'}),O=new((function(){function O(){(0,o.default)(this,O),this.PERMISSIONS=S,this.RESULTS=u}return(0,t.default)(O,[{key:"checkPermission",value:function(n){return console.warn('"PermissionsAndroid.checkPermission" is deprecated. Use "PermissionsAndroid.check" instead'),(0,_.default)(A.default,'PermissionsAndroid is not installed correctly.'),A.default.checkPermission(n)}},{key:"check",value:function(n){return(0,_.default)(A.default,'PermissionsAndroid is not installed correctly.'),A.default.checkPermission(n)}},{key:"requestPermission",value:function(s,o){var t;return n.default.async(function(E){for(;;)switch(E.prev=E.next){case 0:console.warn('"PermissionsAndroid.requestPermission" is deprecated. Use "PermissionsAndroid.request" instead'),E.next=4;break;case 4:return E.next=6,n.default.awrap(this.request(s,o));case 6:return t=E.sent,E.abrupt("return",t===this.RESULTS.GRANTED);case 8:case"end":return E.stop()}},null,this,null,Promise)}},{key:"request",value:function(o,t){return n.default.async(function(u){for(;;)switch(u.prev=u.next){case 0:u.next=3;break;case 3:if((0,_.default)(A.default,'PermissionsAndroid is not installed correctly.'),!t){u.next=10;break}return u.next=7,n.default.awrap(A.default.shouldShowRequestPermissionRationale(o));case 7:if(!u.sent||!E.default){u.next=10;break}return u.abrupt("return",new Promise(function(n,_){var u=(0,s.default)({},t);E.default.showAlert(u,function(){return _(new Error('Error showing rationale'))},function(){return n(A.default.requestPermission(o))})}));case 10:return u.abrupt("return",A.default.requestPermission(o));case 11:case"end":return u.stop()}},null,this,null,Promise)}},{key:"requestMultiple",value:function(n){return(0,_.default)(A.default,'PermissionsAndroid is not installed correctly.'),A.default.requestMultiplePermissions(n)}}]),O})());m.exports=O},782,[407,404,436,402,403,520,783,425]); +__d(function(g,r,i,a,m,e,d){function t(n){if("function"!=typeof WeakMap)return null;var o=new WeakMap,u=new WeakMap;return(t=function(t){return t?u:o})(n)}Object.defineProperty(e,"__esModule",{value:!0}),e.default=void 0;var n=(function(n,o){if(!o&&n&&n.__esModule)return n;if(null===n||"object"!=typeof n&&"function"!=typeof n)return{default:n};var u=t(o);if(u&&u.has(n))return u.get(n);var f={},c=Object.defineProperty&&Object.getOwnPropertyDescriptor;for(var l in n)if("default"!==l&&Object.prototype.hasOwnProperty.call(n,l)){var p=c?Object.getOwnPropertyDescriptor(n,l):null;p&&(p.get||p.set)?Object.defineProperty(f,l,p):f[l]=n[l]}f.default=n,u&&u.set(n,f);return f})(r(d[0])).get('PermissionsAndroid');e.default=n},783,[428]); +__d(function(g,r,i,a,m,e,d){var t=r(d[0])(r(d[1])),o=r(d[0])(r(d[2])),n=r(d[0])(r(d[3])),l=r(d[0])(r(d[4])),u=r(d[0])(r(d[5])),c=r(d[0])(r(d[6])),f=new n.default('ios'!==c.default.OS?null:l.default),s=new Map,v=(function(){function n(o){var l=this;(0,t.default)(this,n),this._data={},this._remoteNotificationCompleteCallbackCalled=!1,this._isRemote=o.remote,this._isRemote&&(this._notificationId=o.notificationId),o.remote?Object.keys(o).forEach(function(t){var n=o[t];'aps'===t?(l._alert=n.alert,l._sound=n.sound,l._badgeCount=n.badge,l._category=n.category,l._contentAvailable=n['content-available'],l._threadID=n['thread-id']):l._data[t]=n}):(this._badgeCount=o.applicationIconBadgeNumber,this._sound=o.soundName,this._alert=o.alertBody,this._data=o.userInfo,this._category=o.category)}return(0,o.default)(n,[{key:"finish",value:function(t){this._isRemote&&this._notificationId&&!this._remoteNotificationCompleteCallbackCalled&&(this._remoteNotificationCompleteCallbackCalled=!0,(0,u.default)(l.default,'PushNotificationManager is not available.'),l.default.onFinishRemoteNotification(this._notificationId,t))}},{key:"getMessage",value:function(){return this._alert}},{key:"getSound",value:function(){return this._sound}},{key:"getCategory",value:function(){return this._category}},{key:"getAlert",value:function(){return this._alert}},{key:"getContentAvailable",value:function(){return this._contentAvailable}},{key:"getBadgeCount",value:function(){return this._badgeCount}},{key:"getData",value:function(){return this._data}},{key:"getThreadID",value:function(){return this._threadID}}],[{key:"presentLocalNotification",value:function(t){(0,u.default)(l.default,'PushNotificationManager is not available.'),l.default.presentLocalNotification(t)}},{key:"scheduleLocalNotification",value:function(t){(0,u.default)(l.default,'PushNotificationManager is not available.'),l.default.scheduleLocalNotification(t)}},{key:"cancelAllLocalNotifications",value:function(){(0,u.default)(l.default,'PushNotificationManager is not available.'),l.default.cancelAllLocalNotifications()}},{key:"removeAllDeliveredNotifications",value:function(){(0,u.default)(l.default,'PushNotificationManager is not available.'),l.default.removeAllDeliveredNotifications()}},{key:"getDeliveredNotifications",value:function(t){(0,u.default)(l.default,'PushNotificationManager is not available.'),l.default.getDeliveredNotifications(t)}},{key:"removeDeliveredNotifications",value:function(t){(0,u.default)(l.default,'PushNotificationManager is not available.'),l.default.removeDeliveredNotifications(t)}},{key:"setApplicationIconBadgeNumber",value:function(t){(0,u.default)(l.default,'PushNotificationManager is not available.'),l.default.setApplicationIconBadgeNumber(t)}},{key:"getApplicationIconBadgeNumber",value:function(t){(0,u.default)(l.default,'PushNotificationManager is not available.'),l.default.getApplicationIconBadgeNumber(t)}},{key:"cancelLocalNotifications",value:function(t){(0,u.default)(l.default,'PushNotificationManager is not available.'),l.default.cancelLocalNotifications(t)}},{key:"getScheduledLocalNotifications",value:function(t){(0,u.default)(l.default,'PushNotificationManager is not available.'),l.default.getScheduledLocalNotifications(t)}},{key:"addEventListener",value:function(t,o){var l;(0,u.default)('notification'===t||'register'===t||'registrationError'===t||'localNotification'===t,'PushNotificationIOS only supports `notification`, `register`, `registrationError`, and `localNotification` events'),'notification'===t?l=f.addListener("remoteNotificationReceived",function(t){o(new n(t))}):'localNotification'===t?l=f.addListener("localNotificationReceived",function(t){o(new n(t))}):'register'===t?l=f.addListener("remoteNotificationsRegistered",function(t){o(t.deviceToken)}):'registrationError'===t&&(l=f.addListener("remoteNotificationRegistrationError",function(t){o(t)})),s.set(t,l)}},{key:"removeEventListener",value:function(t,o){(0,u.default)('notification'===t||'register'===t||'registrationError'===t||'localNotification'===t,'PushNotificationIOS only supports `notification`, `register`, `registrationError`, and `localNotification` events');var n=s.get(t);n&&(n.remove(),s.delete(t))}},{key:"requestPermissions",value:function(t){var o={alert:!0,badge:!0,sound:!0};return t&&(o={alert:!!t.alert,badge:!!t.badge,sound:!!t.sound}),(0,u.default)(l.default,'PushNotificationManager is not available.'),l.default.requestPermissions(o)}},{key:"abandonPermissions",value:function(){(0,u.default)(l.default,'PushNotificationManager is not available.'),l.default.abandonPermissions()}},{key:"checkPermissions",value:function(t){(0,u.default)('function'==typeof t,'Must provide a valid callback'),(0,u.default)(l.default,'PushNotificationManager is not available.'),l.default.checkPermissions(t)}},{key:"getInitialNotification",value:function(){return(0,u.default)(l.default,'PushNotificationManager is not available.'),l.default.getInitialNotification().then(function(t){return t&&new n(t)})}},{key:"getAuthorizationStatus",value:function(t){(0,u.default)(l.default,'PushNotificationManager is not available.'),l.default.getAuthorizationStatus(t)}}]),n})();v.FetchResult={NewData:'UIBackgroundFetchResultNewData',NoData:'UIBackgroundFetchResultNoData',ResultFailed:'UIBackgroundFetchResultFailed'},m.exports=v},784,[407,402,403,500,785,425,426]); +__d(function(g,r,i,a,m,e,d){function t(n){if("function"!=typeof WeakMap)return null;var o=new WeakMap,u=new WeakMap;return(t=function(t){return t?u:o})(n)}Object.defineProperty(e,"__esModule",{value:!0}),e.default=void 0;var n=(function(n,o){if(!o&&n&&n.__esModule)return n;if(null===n||"object"!=typeof n&&"function"!=typeof n)return{default:n};var u=t(o);if(u&&u.has(n))return u.get(n);var f={},c=Object.defineProperty&&Object.getOwnPropertyDescriptor;for(var l in n)if("default"!==l&&Object.prototype.hasOwnProperty.call(n,l)){var p=c?Object.getOwnPropertyDescriptor(n,l):null;p&&(p.get||p.set)?Object.defineProperty(f,l,p):f[l]=n[l]}f.default=n,u&&u.set(n,f);return f})(r(d[0])).get('PushNotificationManager');e.default=n},785,[428]); +__d(function(g,r,i,a,m,e,d){'use strict';var n={get:function(n){return console.warn('Settings is not yet supported on Android'),null},set:function(n){console.warn('Settings is not yet supported on Android')},watchKeys:function(n,t){return console.warn('Settings is not yet supported on Android'),-1},clearWatch:function(n){console.warn('Settings is not yet supported on Android')}};m.exports=n},786,[]); +__d(function(g,r,i,a,m,e,d){var t=r(d[0])(r(d[1])),n=r(d[0])(r(d[2])),s=r(d[0])(r(d[3])),l=(r(d[0])(r(d[4])),r(d[0])(r(d[5]))),o=(function(){function o(){(0,n.default)(this,o)}return(0,s.default)(o,null,[{key:"share",value:function(n){var s=arguments.length>1&&void 0!==arguments[1]?arguments[1]:{};r(d[6])('object'==typeof n&&null!==n,'Content to share must be a valid object'),r(d[6])('string'==typeof n.url||'string'==typeof n.message,'At least one of URL and message is required'),r(d[6])('object'==typeof s&&null!==s,'Options must be a valid object'),r(d[6])(l.default,'ShareModule should be registered on Android.'),r(d[6])(null==n.title||'string'==typeof n.title,'Invalid title: title should be a string.');var o={title:n.title,message:'string'==typeof n.message?n.message:void 0};return l.default.share(o,s.dialogTitle).then(function(n){return(0,t.default)({activityType:null},n)})}}]),o})();o.sharedAction='sharedAction',o.dismissedAction='dismissedAction',m.exports=o},787,[407,436,402,403,741,788,425]); +__d(function(g,r,i,a,m,e,d){function t(n){if("function"!=typeof WeakMap)return null;var o=new WeakMap,u=new WeakMap;return(t=function(t){return t?u:o})(n)}Object.defineProperty(e,"__esModule",{value:!0}),e.default=void 0;var n=(function(n,o){if(!o&&n&&n.__esModule)return n;if(null===n||"object"!=typeof n&&"function"!=typeof n)return{default:n};var u=t(o);if(u&&u.has(n))return u.get(n);var f={},l=Object.defineProperty&&Object.getOwnPropertyDescriptor;for(var c in n)if("default"!==c&&Object.prototype.hasOwnProperty.call(n,c)){var p=l?Object.getOwnPropertyDescriptor(n,c):null;p&&(p.get||p.set)?Object.defineProperty(f,c,p):f[c]=n[c]}f.default=n,u&&u.set(n,f);return f})(r(d[0])).get('ShareModule');e.default=n},788,[428]); +__d(function(g,r,i,a,m,e,d){var t=r(d[0])(r(d[1])),f=t.default.getConstants(),n={SHORT:f.SHORT,LONG:f.LONG,TOP:f.TOP,BOTTOM:f.BOTTOM,CENTER:f.CENTER,show:function(f,n){t.default.show(f,n)},showWithGravity:function(f,n,o){t.default.showWithGravity(f,n,o)},showWithGravityAndOffset:function(f,n,o,O,s){t.default.showWithGravityAndOffset(f,n,o,O,s)}};m.exports=n},789,[407,790]); +__d(function(g,r,i,a,m,e,d){function t(n){if("function"!=typeof WeakMap)return null;var o=new WeakMap,f=new WeakMap;return(t=function(t){return t?f:o})(n)}Object.defineProperty(e,"__esModule",{value:!0}),e.default=void 0;var n=(function(n,o){if(!o&&n&&n.__esModule)return n;if(null===n||"object"!=typeof n&&"function"!=typeof n)return{default:n};var f=t(o);if(f&&f.has(n))return f.get(n);var u={},c=Object.defineProperty&&Object.getOwnPropertyDescriptor;for(var l in n)if("default"!==l&&Object.prototype.hasOwnProperty.call(n,l)){var p=c?Object.getOwnPropertyDescriptor(n,l):null;p&&(p.get||p.set)?Object.defineProperty(u,l,p):u[l]=n[l]}u.default=n,f&&f.set(n,u);return u})(r(d[0])).getEnforcing('ToastAndroid');e.default=n},790,[428]); +__d(function(g,r,i,a,m,e,d){Object.defineProperty(e,"__esModule",{value:!0}),e.default=function(){var t=(0,n.useMemo)(function(){return{getCurrentValue:function(){return u.default.getColorScheme()},subscribe:function(n){var t=u.default.addChangeListener(n);return function(){t.remove()}}}},[]);return(0,r(d[3]).useSubscription)(t)};var n=r(d[0]),u=r(d[1])(r(d[2]))},791,[534,407,742,792]); +__d(function(g,r,i,a,m,e,d){'use strict';m.exports=r(d[0])},792,[793]); +__d(function(_g,r,i,_a,m,e,_d){'use strict';var u=r(_d[0]);e.useSubscription=function(t){var n=t.getCurrentValue,a=t.subscribe,s=u.useState(function(){return{getCurrentValue:n,subscribe:a,value:n()}});t=s[0];var c=s[1];return s=t.value,t.getCurrentValue===n&&t.subscribe===a||(s=n(),c({getCurrentValue:n,subscribe:a,value:s})),u.useDebugValue(s),u.useEffect(function(){function u(){if(!t){var u=n();c(function(t){return t.getCurrentValue!==n||t.subscribe!==a||t.value===u?t:r(_d[1])({},t,{value:u})})}}var t=!1,s=a(u);return u(),function(){t=!0,s()}},[n,a]),s}},793,[534,536]); +__d(function(g,r,i,a,m,e,d){Object.defineProperty(e,"__esModule",{value:!0}),e.default=function(){var f=(0,u.useState)(function(){return n.default.get('window')}),o=(0,t.default)(f,2),c=o[0],l=o[1];return(0,u.useEffect)(function(){function t(t){var n=t.window;c.width===n.width&&c.height===n.height&&c.scale===n.scale&&c.fontScale===n.fontScale||l(n)}var u=n.default.addEventListener('change',t);return t({window:n.default.get('window')}),function(){u.remove()}},[c]),c};var t=r(d[0])(r(d[1])),n=r(d[0])(r(d[2])),u=r(d[3])},794,[407,430,566,534]); +__d(function(g,r,i,a,m,e,d){'use strict';var A=r(d[0])({BOM:"\ufeff",BULLET:"\u2022",BULLET_SP:"\xa0\u2022\xa0",MIDDOT:"\xb7",MIDDOT_SP:"\xa0\xb7\xa0",MIDDOT_KATAKANA:"\u30fb",MDASH:"\u2014",MDASH_SP:"\xa0\u2014\xa0",NDASH:"\u2013",NDASH_SP:"\xa0\u2013\xa0",NBSP:"\xa0",PIZZA:"\ud83c\udf55",TRIANGLE_LEFT:"\u25c0",TRIANGLE_RIGHT:"\u25b6"});m.exports=A},795,[572]); +__d(function(g,r,i,a,m,e,d){var t=r(d[0])(r(d[1])),n=400;var o={vibrate:function(){var o=arguments.length>0&&void 0!==arguments[0]?arguments[0]:n,f=arguments.length>1&&void 0!==arguments[1]&&arguments[1];if('number'==typeof o)t.default.vibrate(o);else{if(!Array.isArray(o))throw new Error('Vibration pattern should be a number or array');t.default.vibrateByPattern(o,f?0:-1)}},cancel:function(){t.default.cancel()}};m.exports=o},796,[407,797]); +__d(function(g,r,i,a,m,e,d){function t(n){if("function"!=typeof WeakMap)return null;var o=new WeakMap,f=new WeakMap;return(t=function(t){return t?f:o})(n)}Object.defineProperty(e,"__esModule",{value:!0}),e.default=void 0;var n=(function(n,o){if(!o&&n&&n.__esModule)return n;if(null===n||"object"!=typeof n&&"function"!=typeof n)return{default:n};var f=t(o);if(f&&f.has(n))return f.get(n);var u={},c=Object.defineProperty&&Object.getOwnPropertyDescriptor;for(var l in n)if("default"!==l&&Object.prototype.hasOwnProperty.call(n,l)){var p=c?Object.getOwnPropertyDescriptor(n,l):null;p&&(p.get||p.set)?Object.defineProperty(u,l,p):u[l]=n[l]}u.default=n,f&&f.set(n,u);return u})(r(d[0])).getEnforcing('Vibration');e.default=n},797,[428]); +__d(function(g,r,i,a,m,_e,d){'use strict';function t(){if("undefined"==typeof Reflect||!Reflect.construct)return!1;if(Reflect.construct.sham)return!1;if("function"==typeof Proxy)return!0;try{return Boolean.prototype.valueOf.call(Reflect.construct(Boolean,[],function(){})),!0}catch(t){return!1}}var n;n=(function(n){r(d[3])(o,n);var e,u,c=(e=o,u=t(),function(){var t,n=r(d[0])(e);if(u){var c=r(d[0])(this).constructor;t=Reflect.construct(n,arguments,c)}else t=n.apply(this,arguments);return r(d[1])(this,t)});function o(){return r(d[4])(this,o),c.apply(this,arguments)}return r(d[5])(o,[{key:"render",value:function(){return null}}],[{key:"ignoreWarnings",value:function(t){}},{key:"install",value:function(){}},{key:"uninstall",value:function(){}}]),o})(r(d[2]).Component),m.exports=n},798,[422,419,534,417,402,403]); +__d(function(g,r,i,a,m,e,d){Object.defineProperty(e,"__esModule",{value:!0}),e.DynamicColorIOS=void 0;e.DynamicColorIOS=function(o){throw new Error('DynamicColorIOS is not available on this platform.')}},799,[]); +__d(function(g,r,i,a,m,e,d){'use strict';var n=r(d[0]).shape({x:r(d[0]).number,y:r(d[0]).number});m.exports=n},800,[596]); +__d(function(g,r,_i,_a,_m,_e,d){'use strict';function t(){if("undefined"==typeof Reflect||!Reflect.construct)return!1;if(Reflect.construct.sham)return!1;if("function"==typeof Proxy)return!0;try{return Boolean.prototype.valueOf.call(Reflect.construct(Boolean,[],function(){})),!0}catch(t){return!1}}var e='function'==typeof Symbol&&'function'==typeof Symbol.for?Symbol.for('nodejs.util.inspect.custom'):null;_e.Buffer=o,_e.SlowBuffer=function(t){+t!=t&&(t=0);return o.alloc(+t)},_e.INSPECT_MAX_BYTES=50;var n=2147483647;function i(t){if(t>n)throw new RangeError('The value "'+t+'" is invalid for option "size"');var e=new Uint8Array(t);return Object.setPrototypeOf(e,o.prototype),e}function o(t,e,n){if('number'==typeof t){if('string'==typeof e)throw new TypeError('The "string" argument must be of type string. Received type number');return h(t)}return f(t,e,n)}function f(t,e,n){if('string'==typeof t)return a(t,e);if(ArrayBuffer.isView(t))return c(t);if(null==t)throw new TypeError("The first argument must be one of type string, Buffer, ArrayBuffer, Array, or Array-like Object. Received type "+typeof t);if(nt(t,ArrayBuffer)||t&&nt(t.buffer,ArrayBuffer))return l(t,e,n);if('undefined'!=typeof SharedArrayBuffer&&(nt(t,SharedArrayBuffer)||t&&nt(t.buffer,SharedArrayBuffer)))return l(t,e,n);if('number'==typeof t)throw new TypeError('The "value" argument must not be of type number. Received type number');var i=t.valueOf&&t.valueOf();if(null!=i&&i!==t)return o.from(i,e,n);var f=y(t);if(f)return f;if('undefined'!=typeof Symbol&&null!=Symbol.toPrimitive&&'function'==typeof t[Symbol.toPrimitive])return o.from(t[Symbol.toPrimitive]('string'),e,n);throw new TypeError("The first argument must be one of type string, Buffer, ArrayBuffer, Array, or Array-like Object. Received type "+typeof t)}function u(t){if('number'!=typeof t)throw new TypeError('"size" argument must be of type number');if(t<0)throw new RangeError('The value "'+t+'" is invalid for option "size"')}function s(t,e,n){return u(t),t<=0?i(t):void 0!==e?'string'==typeof n?i(t).fill(e,n):i(t).fill(e):i(t)}function h(t){return u(t),i(t<0?0:0|w(t))}function a(t,e){if('string'==typeof e&&''!==e||(e='utf8'),!o.isEncoding(e))throw new TypeError('Unknown encoding: '+e);var n=0|v(t,e),f=i(n),u=f.write(t,e);return u!==n&&(f=f.slice(0,u)),f}function p(t){for(var e=t.length<0?0:0|w(t.length),n=i(e),o=0;o=n)throw new RangeError("Attempt to allocate Buffer larger than maximum size: 0x"+n.toString(16)+' bytes');return 0|t}function v(t,e){if(o.isBuffer(t))return t.length;if(ArrayBuffer.isView(t)||nt(t,ArrayBuffer))return t.byteLength;if('string'!=typeof t)throw new TypeError("The \"string\" argument must be one of type string, Buffer, or ArrayBuffer. Received type "+typeof t);var n=t.length,i=arguments.length>2&&!0===arguments[2];if(!i&&0===n)return 0;for(var f=!1;;)switch(e){case'ascii':case'latin1':case'binary':return n;case'utf8':case'utf-8':return K(t).length;case'ucs2':case'ucs-2':case'utf16le':case'utf-16le':return 2*n;case'hex':return n>>>1;case'base64':return rt(t).length;default:if(f)return i?-1:K(t).length;e=(''+e).toLowerCase(),f=!0}}function b(t,e,n){var i=!1;if((void 0===e||e<0)&&(e=0),e>this.length)return'';if((void 0===n||n>this.length)&&(n=this.length),n<=0)return'';if((n>>>=0)<=(e>>>=0))return'';for(t||(t='utf8');;)switch(t){case'hex':return P(this,e,n);case'utf8':case'utf-8':return L(this,e,n);case'ascii':return S(this,e,n);case'latin1':case'binary':return x(this,e,n);case'base64':return O(this,e,n);case'ucs2':case'ucs-2':case'utf16le':case'utf-16le':return k(this,e,n);default:if(i)throw new TypeError('Unknown encoding: '+t);t=(t+'').toLowerCase(),i=!0}}function B(t,e,n){var i=t[e];t[e]=t[n],t[n]=i}function E(t,e,n,i,f){if(0===t.length)return-1;if('string'==typeof n?(i=n,n=0):n>2147483647?n=2147483647:n<-2147483648&&(n=-2147483648),it(n=+n)&&(n=f?0:t.length-1),n<0&&(n=t.length+n),n>=t.length){if(f)return-1;n=t.length-1}else if(n<0){if(!f)return-1;n=0}if('string'==typeof e&&(e=o.from(e,i)),o.isBuffer(e))return 0===e.length?-1:m(t,e,n,i,f);if('number'==typeof e)return e&=255,'function'==typeof Uint8Array.prototype.indexOf?f?Uint8Array.prototype.indexOf.call(t,e,n):Uint8Array.prototype.lastIndexOf.call(t,e,n):m(t,[e],n,i,f);throw new TypeError('val must be string, number or Buffer')}function m(t,e,n,i,o){var f,u=1,s=t.length,h=e.length;if(void 0!==i&&('ucs2'===(i=String(i).toLowerCase())||'ucs-2'===i||'utf16le'===i||'utf-16le'===i)){if(t.length<2||e.length<2)return-1;u=2,s/=2,h/=2,n/=2}function a(t,e){return 1===u?t[e]:t.readUInt16BE(e*u)}if(o){var p=-1;for(f=n;fs&&(n=s-h),f=n;f>=0;f--){for(var c=!0,l=0;lo&&(i=o):i=o;var f,u=e.length;for(i>u/2&&(i=u/2),f=0;f239?4:f>223?3:f>191?2:1;if(o+s<=n){var h=void 0,a=void 0,p=void 0,c=void 0;switch(s){case 1:f<128&&(u=f);break;case 2:128==(192&(h=t[o+1]))&&(c=(31&f)<<6|63&h)>127&&(u=c);break;case 3:h=t[o+1],a=t[o+2],128==(192&h)&&128==(192&a)&&(c=(15&f)<<12|(63&h)<<6|63&a)>2047&&(c<55296||c>57343)&&(u=c);break;case 4:h=t[o+1],a=t[o+2],p=t[o+3],128==(192&h)&&128==(192&a)&&128==(192&p)&&(c=(15&f)<<18|(63&h)<<12|(63&a)<<6|63&p)>65535&&c<1114112&&(u=c)}}null===u?(u=65533,s=1):u>65535&&(u-=65536,i.push(u>>>10&1023|55296),u=56320|1023&u),i.push(u),o+=s}return _(i)}_e.kMaxLength=n,o.TYPED_ARRAY_SUPPORT=(function(){try{var t=new Uint8Array(1),e={foo:function(){return 42}};return Object.setPrototypeOf(e,Uint8Array.prototype),Object.setPrototypeOf(t,e),42===t.foo()}catch(t){return!1}})(),o.TYPED_ARRAY_SUPPORT||'undefined'==typeof console||'function'!=typeof console.error||console.error("This browser lacks typed array (Uint8Array) support which is required by `buffer` v5.x. Use `buffer` v4.x if you require old browser support."),Object.defineProperty(o.prototype,'parent',{enumerable:!0,get:function(){if(o.isBuffer(this))return this.buffer}}),Object.defineProperty(o.prototype,'offset',{enumerable:!0,get:function(){if(o.isBuffer(this))return this.byteOffset}}),o.poolSize=8192,o.from=function(t,e,n){return f(t,e,n)},Object.setPrototypeOf(o.prototype,Uint8Array.prototype),Object.setPrototypeOf(o,Uint8Array),o.alloc=function(t,e,n){return s(t,e,n)},o.allocUnsafe=function(t){return h(t)},o.allocUnsafeSlow=function(t){return h(t)},o.isBuffer=function(t){return null!=t&&!0===t._isBuffer&&t!==o.prototype},o.compare=function(t,e){if(nt(t,Uint8Array)&&(t=o.from(t,t.offset,t.byteLength)),nt(e,Uint8Array)&&(e=o.from(e,e.offset,e.byteLength)),!o.isBuffer(t)||!o.isBuffer(e))throw new TypeError('The "buf1", "buf2" arguments must be one of type Buffer or Uint8Array');if(t===e)return 0;for(var n=t.length,i=e.length,f=0,u=Math.min(n,i);fi.length?(o.isBuffer(u)||(u=o.from(u)),u.copy(i,f)):Uint8Array.prototype.set.call(i,u,f);else{if(!o.isBuffer(u))throw new TypeError('"list" argument must be an Array of Buffers');u.copy(i,f)}f+=u.length}return i},o.byteLength=v,o.prototype._isBuffer=!0,o.prototype.swap16=function(){var t=this.length;if(t%2!=0)throw new RangeError('Buffer size must be a multiple of 16-bits');for(var e=0;ee&&(t+=' ... '),''},e&&(o.prototype[e]=o.prototype.inspect),o.prototype.compare=function(t,e,n,i,f){if(nt(t,Uint8Array)&&(t=o.from(t,t.offset,t.byteLength)),!o.isBuffer(t))throw new TypeError("The \"target\" argument must be one of type Buffer or Uint8Array. Received type "+typeof t);if(void 0===e&&(e=0),void 0===n&&(n=t?t.length:0),void 0===i&&(i=0),void 0===f&&(f=this.length),e<0||n>t.length||i<0||f>this.length)throw new RangeError('out of range index');if(i>=f&&e>=n)return 0;if(i>=f)return-1;if(e>=n)return 1;if(e>>>=0,n>>>=0,i>>>=0,f>>>=0,this===t)return 0;for(var u=f-i,s=n-e,h=Math.min(u,s),a=this.slice(i,f),p=t.slice(e,n),c=0;c>>=0,isFinite(n)?(n>>>=0,void 0===i&&(i='utf8')):(i=n,n=void 0)}var o=this.length-e;if((void 0===n||n>o)&&(n=o),t.length>0&&(n<0||e<0)||e>this.length)throw new RangeError('Attempt to write outside buffer bounds');i||(i='utf8');for(var f=!1;;)switch(i){case'hex':return I(this,t,e,n);case'utf8':case'utf-8':return U(this,t,e,n);case'ascii':case'latin1':case'binary':return A(this,t,e,n);case'base64':return R(this,t,e,n);case'ucs2':case'ucs-2':case'utf16le':case'utf-16le':return T(this,t,e,n);default:if(f)throw new TypeError('Unknown encoding: '+i);i=(''+i).toLowerCase(),f=!0}},o.prototype.toJSON=function(){return{type:'Buffer',data:Array.prototype.slice.call(this._arr||this,0)}};var M=4096;function _(t){var e=t.length;if(e<=M)return String.fromCharCode.apply(String,t);for(var n='',i=0;ii)&&(n=i);for(var o='',f=e;fn)throw new RangeError('Trying to access beyond buffer length')}function N(t,e,n,i,f,u){if(!o.isBuffer(t))throw new TypeError('"buffer" argument must be a Buffer instance');if(e>f||et.length)throw new RangeError('Index out of range')}function F(t,e,n,i,o){X(e,i,o,t,n,7);var f=Number(e&BigInt(4294967295));t[n++]=f,f>>=8,t[n++]=f,f>>=8,t[n++]=f,f>>=8,t[n++]=f;var u=Number(e>>BigInt(32)&BigInt(4294967295));return t[n++]=u,u>>=8,t[n++]=u,u>>=8,t[n++]=u,u>>=8,t[n++]=u,n}function j(t,e,n,i,o){X(e,i,o,t,n,7);var f=Number(e&BigInt(4294967295));t[n+7]=f,f>>=8,t[n+6]=f,f>>=8,t[n+5]=f,f>>=8,t[n+4]=f;var u=Number(e>>BigInt(32)&BigInt(4294967295));return t[n+3]=u,u>>=8,t[n+2]=u,u>>=8,t[n+1]=u,u>>=8,t[n]=u,n+8}function D(t,e,n,i,o,f){if(n+i>t.length)throw new RangeError('Index out of range');if(n<0)throw new RangeError('Index out of range')}function z(t,e,n,i,o){return e=+e,n>>>=0,o||D(t,0,n,4),r(d[3]).write(t,e,n,i,23,4),n+4}function Y(t,e,n,i,o){return e=+e,n>>>=0,o||D(t,0,n,8),r(d[3]).write(t,e,n,i,52,8),n+8}o.prototype.slice=function(t,e){var n=this.length;t=~~t,e=void 0===e?n:~~e,t<0?(t+=n)<0&&(t=0):t>n&&(t=n),e<0?(e+=n)<0&&(e=0):e>n&&(e=n),e>>=0,e>>>=0,n||C(t,e,this.length);for(var i=this[t],o=1,f=0;++f>>=0,e>>>=0,n||C(t,e,this.length);for(var i=this[t+--e],o=1;e>0&&(o*=256);)i+=this[t+--e]*o;return i},o.prototype.readUint8=o.prototype.readUInt8=function(t,e){return t>>>=0,e||C(t,1,this.length),this[t]},o.prototype.readUint16LE=o.prototype.readUInt16LE=function(t,e){return t>>>=0,e||C(t,2,this.length),this[t]|this[t+1]<<8},o.prototype.readUint16BE=o.prototype.readUInt16BE=function(t,e){return t>>>=0,e||C(t,2,this.length),this[t]<<8|this[t+1]},o.prototype.readUint32LE=o.prototype.readUInt32LE=function(t,e){return t>>>=0,e||C(t,4,this.length),(this[t]|this[t+1]<<8|this[t+2]<<16)+16777216*this[t+3]},o.prototype.readUint32BE=o.prototype.readUInt32BE=function(t,e){return t>>>=0,e||C(t,4,this.length),16777216*this[t]+(this[t+1]<<16|this[t+2]<<8|this[t+3])},o.prototype.readBigUInt64LE=ft(function(t){J(t>>>=0,'offset');var e=this[t],n=this[t+7];void 0!==e&&void 0!==n||Z(t,this.length-8);var i=e+this[++t]*Math.pow(2,8)+this[++t]*Math.pow(2,16)+this[++t]*Math.pow(2,24),o=this[++t]+this[++t]*Math.pow(2,8)+this[++t]*Math.pow(2,16)+n*Math.pow(2,24);return BigInt(i)+(BigInt(o)<>>=0,'offset');var e=this[t],n=this[t+7];void 0!==e&&void 0!==n||Z(t,this.length-8);var i=e*Math.pow(2,24)+this[++t]*Math.pow(2,16)+this[++t]*Math.pow(2,8)+this[++t],o=this[++t]*Math.pow(2,24)+this[++t]*Math.pow(2,16)+this[++t]*Math.pow(2,8)+n;return(BigInt(i)<>>=0,e>>>=0,n||C(t,e,this.length);for(var i=this[t],o=1,f=0;++f=(o*=128)&&(i-=Math.pow(2,8*e)),i},o.prototype.readIntBE=function(t,e,n){t>>>=0,e>>>=0,n||C(t,e,this.length);for(var i=e,o=1,f=this[t+--i];i>0&&(o*=256);)f+=this[t+--i]*o;return f>=(o*=128)&&(f-=Math.pow(2,8*e)),f},o.prototype.readInt8=function(t,e){return t>>>=0,e||C(t,1,this.length),128&this[t]?-1*(255-this[t]+1):this[t]},o.prototype.readInt16LE=function(t,e){t>>>=0,e||C(t,2,this.length);var n=this[t]|this[t+1]<<8;return 32768&n?4294901760|n:n},o.prototype.readInt16BE=function(t,e){t>>>=0,e||C(t,2,this.length);var n=this[t+1]|this[t]<<8;return 32768&n?4294901760|n:n},o.prototype.readInt32LE=function(t,e){return t>>>=0,e||C(t,4,this.length),this[t]|this[t+1]<<8|this[t+2]<<16|this[t+3]<<24},o.prototype.readInt32BE=function(t,e){return t>>>=0,e||C(t,4,this.length),this[t]<<24|this[t+1]<<16|this[t+2]<<8|this[t+3]},o.prototype.readBigInt64LE=ft(function(t){J(t>>>=0,'offset');var e=this[t],n=this[t+7];void 0!==e&&void 0!==n||Z(t,this.length-8);var i=this[t+4]+this[t+5]*Math.pow(2,8)+this[t+6]*Math.pow(2,16)+(n<<24);return(BigInt(i)<>>=0,'offset');var e=this[t],n=this[t+7];void 0!==e&&void 0!==n||Z(t,this.length-8);var i=(e<<24)+this[++t]*Math.pow(2,16)+this[++t]*Math.pow(2,8)+this[++t];return(BigInt(i)<>>=0,e||C(t,4,this.length),r(d[3]).read(this,t,!0,23,4)},o.prototype.readFloatBE=function(t,e){return t>>>=0,e||C(t,4,this.length),r(d[3]).read(this,t,!1,23,4)},o.prototype.readDoubleLE=function(t,e){return t>>>=0,e||C(t,8,this.length),r(d[3]).read(this,t,!0,52,8)},o.prototype.readDoubleBE=function(t,e){return t>>>=0,e||C(t,8,this.length),r(d[3]).read(this,t,!1,52,8)},o.prototype.writeUintLE=o.prototype.writeUIntLE=function(t,e,n,i){(t=+t,e>>>=0,n>>>=0,i)||N(this,t,e,n,Math.pow(2,8*n)-1,0);var o=1,f=0;for(this[e]=255&t;++f>>=0,n>>>=0,i)||N(this,t,e,n,Math.pow(2,8*n)-1,0);var o=n-1,f=1;for(this[e+o]=255&t;--o>=0&&(f*=256);)this[e+o]=t/f&255;return e+n},o.prototype.writeUint8=o.prototype.writeUInt8=function(t,e,n){return t=+t,e>>>=0,n||N(this,t,e,1,255,0),this[e]=255&t,e+1},o.prototype.writeUint16LE=o.prototype.writeUInt16LE=function(t,e,n){return t=+t,e>>>=0,n||N(this,t,e,2,65535,0),this[e]=255&t,this[e+1]=t>>>8,e+2},o.prototype.writeUint16BE=o.prototype.writeUInt16BE=function(t,e,n){return t=+t,e>>>=0,n||N(this,t,e,2,65535,0),this[e]=t>>>8,this[e+1]=255&t,e+2},o.prototype.writeUint32LE=o.prototype.writeUInt32LE=function(t,e,n){return t=+t,e>>>=0,n||N(this,t,e,4,4294967295,0),this[e+3]=t>>>24,this[e+2]=t>>>16,this[e+1]=t>>>8,this[e]=255&t,e+4},o.prototype.writeUint32BE=o.prototype.writeUInt32BE=function(t,e,n){return t=+t,e>>>=0,n||N(this,t,e,4,4294967295,0),this[e]=t>>>24,this[e+1]=t>>>16,this[e+2]=t>>>8,this[e+3]=255&t,e+4},o.prototype.writeBigUInt64LE=ft(function(t){return F(this,t,arguments.length>1&&void 0!==arguments[1]?arguments[1]:0,BigInt(0),BigInt('0xffffffffffffffff'))}),o.prototype.writeBigUInt64BE=ft(function(t){return j(this,t,arguments.length>1&&void 0!==arguments[1]?arguments[1]:0,BigInt(0),BigInt('0xffffffffffffffff'))}),o.prototype.writeIntLE=function(t,e,n,i){if(t=+t,e>>>=0,!i){var o=Math.pow(2,8*n-1);N(this,t,e,n,o-1,-o)}var f=0,u=1,s=0;for(this[e]=255&t;++f>0)-s&255;return e+n},o.prototype.writeIntBE=function(t,e,n,i){if(t=+t,e>>>=0,!i){var o=Math.pow(2,8*n-1);N(this,t,e,n,o-1,-o)}var f=n-1,u=1,s=0;for(this[e+f]=255&t;--f>=0&&(u*=256);)t<0&&0===s&&0!==this[e+f+1]&&(s=1),this[e+f]=(t/u>>0)-s&255;return e+n},o.prototype.writeInt8=function(t,e,n){return t=+t,e>>>=0,n||N(this,t,e,1,127,-128),t<0&&(t=255+t+1),this[e]=255&t,e+1},o.prototype.writeInt16LE=function(t,e,n){return t=+t,e>>>=0,n||N(this,t,e,2,32767,-32768),this[e]=255&t,this[e+1]=t>>>8,e+2},o.prototype.writeInt16BE=function(t,e,n){return t=+t,e>>>=0,n||N(this,t,e,2,32767,-32768),this[e]=t>>>8,this[e+1]=255&t,e+2},o.prototype.writeInt32LE=function(t,e,n){return t=+t,e>>>=0,n||N(this,t,e,4,2147483647,-2147483648),this[e]=255&t,this[e+1]=t>>>8,this[e+2]=t>>>16,this[e+3]=t>>>24,e+4},o.prototype.writeInt32BE=function(t,e,n){return t=+t,e>>>=0,n||N(this,t,e,4,2147483647,-2147483648),t<0&&(t=4294967295+t+1),this[e]=t>>>24,this[e+1]=t>>>16,this[e+2]=t>>>8,this[e+3]=255&t,e+4},o.prototype.writeBigInt64LE=ft(function(t){return F(this,t,arguments.length>1&&void 0!==arguments[1]?arguments[1]:0,-BigInt('0x8000000000000000'),BigInt('0x7fffffffffffffff'))}),o.prototype.writeBigInt64BE=ft(function(t){return j(this,t,arguments.length>1&&void 0!==arguments[1]?arguments[1]:0,-BigInt('0x8000000000000000'),BigInt('0x7fffffffffffffff'))}),o.prototype.writeFloatLE=function(t,e,n){return z(this,t,e,!0,n)},o.prototype.writeFloatBE=function(t,e,n){return z(this,t,e,!1,n)},o.prototype.writeDoubleLE=function(t,e,n){return Y(this,t,e,!0,n)},o.prototype.writeDoubleBE=function(t,e,n){return Y(this,t,e,!1,n)},o.prototype.copy=function(t,e,n,i){if(!o.isBuffer(t))throw new TypeError('argument should be a Buffer');if(n||(n=0),i||0===i||(i=this.length),e>=t.length&&(e=t.length),e||(e=0),i>0&&i=this.length)throw new RangeError('Index out of range');if(i<0)throw new RangeError('sourceEnd out of bounds');i>this.length&&(i=this.length),t.length-e>>=0,n=void 0===n?this.length:n>>>0,t||(t=0),'number'==typeof t)for(u=e;u=i+4;n-=3)e="_"+t.slice(n-3,n)+e;return""+t.slice(0,n)+e}function W(t,e,n){J(e,'offset'),void 0!==t[e]&&void 0!==t[e+n]||Z(e,t.length-(n+1))}function X(t,e,n,i,o,f){if(t>n||t3?0===e||e===BigInt(0)?">= 0"+s+" and < 2"+s+" ** "+8*(f+1)+s:">= -(2"+s+" ** "+(8*(f+1)-1)+s+") and < 2 ** "+(8*(f+1)-1)+s:">= "+e+s+" and <= "+n+s,new G.ERR_OUT_OF_RANGE('value',u,t)}W(i,o,f)}function J(t,e){if('number'!=typeof t)throw new G.ERR_INVALID_ARG_TYPE(e,'number',t)}function Z(t,e,n){if(Math.floor(t)!==t)throw J(t,n),new G.ERR_OUT_OF_RANGE(n||'offset','an integer',t);if(e<0)throw new G.ERR_BUFFER_OUT_OF_BOUNDS;throw new G.ERR_OUT_OF_RANGE(n||'offset',">= "+(n?1:0)+" and <= "+e,t)}V('ERR_BUFFER_OUT_OF_BOUNDS',function(t){return t?t+" is outside of buffer bounds":'Attempt to access memory outside buffer bounds'},RangeError),V('ERR_INVALID_ARG_TYPE',function(t,e){return"The \""+t+"\" argument must be of type number. Received type "+typeof e},TypeError),V('ERR_OUT_OF_RANGE',function(t,e,n){var i="The value of \""+t+"\" is out of range.",o=n;return Number.isInteger(n)&&Math.abs(n)>Math.pow(2,32)?o=q(String(n)):'bigint'==typeof n&&(o=String(n),(n>Math.pow(BigInt(2),BigInt(32))||n<-Math.pow(BigInt(2),BigInt(32)))&&(o=q(o)),o+='n'),i+=" It must be "+e+". Received "+o},RangeError);var $=/[^+/0-9A-Za-z-_]/g;function H(t){if((t=(t=t.split('=')[0]).trim().replace($,'')).length<2)return'';for(;t.length%4!=0;)t+='=';return t}function K(t,e){var n;e=e||1/0;for(var i=t.length,o=null,f=[],u=0;u55295&&n<57344){if(!o){if(n>56319){(e-=3)>-1&&f.push(239,191,189);continue}if(u+1===i){(e-=3)>-1&&f.push(239,191,189);continue}o=n;continue}if(n<56320){(e-=3)>-1&&f.push(239,191,189),o=n;continue}n=65536+(o-55296<<10|n-56320)}else o&&(e-=3)>-1&&f.push(239,191,189);if(o=null,n<128){if((e-=1)<0)break;f.push(n)}else if(n<2048){if((e-=2)<0)break;f.push(n>>6|192,63&n|128)}else if(n<65536){if((e-=3)<0)break;f.push(n>>12|224,n>>6&63|128,63&n|128)}else{if(!(n<1114112))throw new Error('Invalid code point');if((e-=4)<0)break;f.push(n>>18|240,n>>12&63|128,n>>6&63|128,63&n|128)}}return f}function Q(t){for(var e=[],n=0;n>8,o=n%256,f.push(o),f.push(i);return f}function rt(t){return r(d[2]).toByteArray(H(t))}function et(t,e,n,i){var o;for(o=0;o=e.length||o>=t.length);++o)e[o+n]=t[o];return o}function nt(t,e){return t instanceof e||null!=t&&null!=t.constructor&&null!=t.constructor.name&&t.constructor.name===e.name}function it(t){return t!=t}var ot=(function(){for(var t=new Array(256),e=0;e<16;++e)for(var n=16*e,i=0;i<16;++i)t[n+i]="0123456789abcdef"[e]+"0123456789abcdef"[i];return t})();function ft(t){return'undefined'==typeof BigInt?ut:t}function ut(){throw new Error('BigInt not supported')}},801,[422,419,498,802,417,402,421,403]); +__d(function(g,r,_i,a,_m,_e,_d){_e.read=function(o,t,h,M,f){var w,p,i=8*f-M-1,n=(1<>1,e=-7,u=h?f-1:0,s=h?-1:1,c=o[t+u];for(u+=s,w=c&(1<<-e)-1,c>>=-e,e+=i;e>0;w=256*w+o[t+u],u+=s,e-=8);for(p=w&(1<<-e)-1,w>>=-e,e+=M;e>0;p=256*p+o[t+u],u+=s,e-=8);if(0===w)w=1-N;else{if(w===n)return p?NaN:1/0*(c?-1:1);p+=Math.pow(2,M),w-=N}return(c?-1:1)*p*Math.pow(2,w-M)},_e.write=function(o,t,h,M,f,w){var p,i,n,N=8*w-f-1,e=(1<>1,s=23===f?Math.pow(2,-24)-Math.pow(2,-77):0,c=M?0:w-1,l=M?1:-1,d=t<0||0===t&&1/t<0?1:0;for(t=Math.abs(t),isNaN(t)||t===1/0?(i=isNaN(t)?1:0,p=e):(p=Math.floor(Math.log(t)/Math.LN2),t*(n=Math.pow(2,-p))<1&&(p--,n*=2),(t+=p+u>=1?s/n:s*Math.pow(2,1-u))*n>=2&&(p++,n/=2),p+u>=e?(i=0,p=e):p+u>=1?(i=(t*n-1)*Math.pow(2,f),p+=u):(i=t*Math.pow(2,u-1)*Math.pow(2,f),p=0));f>=8;o[h+c]=255&i,c+=l,i/=256,f-=8);for(p=p<0;o[h+c]=255&p,c+=l,p/=256,N-=8);o[h+c-l]|=128*d}},802,[]); +__d(function(e,n,m,a,l,u,x){l.exports={name:"OnnxruntimeModuleExample",displayName:"OnnxruntimeModule Example"}},803,[]); +__r(52); +__r(0); \ No newline at end of file diff --git a/js/react_native/example/android/app/src/main/assets/mnist.ort b/js/react_native/e2e/android/app/src/main/assets/mnist.ort similarity index 100% rename from js/react_native/example/android/app/src/main/assets/mnist.ort rename to js/react_native/e2e/android/app/src/main/assets/mnist.ort diff --git a/js/react_native/example/android/app/src/main/java/com/example/reactnativeonnxruntimemodule/DataHandlerPackage.java b/js/react_native/e2e/android/app/src/main/java/com/example/reactnativeonnxruntimemodule/DataHandlerPackage.java similarity index 100% rename from js/react_native/example/android/app/src/main/java/com/example/reactnativeonnxruntimemodule/DataHandlerPackage.java rename to js/react_native/e2e/android/app/src/main/java/com/example/reactnativeonnxruntimemodule/DataHandlerPackage.java diff --git a/js/react_native/example/android/app/src/main/java/com/example/reactnativeonnxruntimemodule/MNISTDataHandler.java b/js/react_native/e2e/android/app/src/main/java/com/example/reactnativeonnxruntimemodule/MNISTDataHandler.java similarity index 99% rename from js/react_native/example/android/app/src/main/java/com/example/reactnativeonnxruntimemodule/MNISTDataHandler.java rename to js/react_native/e2e/android/app/src/main/java/com/example/reactnativeonnxruntimemodule/MNISTDataHandler.java index a6e80ca9b0bd6..8c9d71b76b34c 100644 --- a/js/react_native/example/android/app/src/main/java/com/example/reactnativeonnxruntimemodule/MNISTDataHandler.java +++ b/js/react_native/e2e/android/app/src/main/java/com/example/reactnativeonnxruntimemodule/MNISTDataHandler.java @@ -186,7 +186,7 @@ private WritableMap postprocess(ReadableMap result) throws Exception { if (maxValue == 0) { detectionResult = "No match"; } else { - detectionResult = "I guess, it's " + argmax; + detectionResult = String.valueOf(argmax); } WritableMap cookedMap = Arguments.createMap(); diff --git a/js/react_native/example/android/app/src/main/java/com/example/reactnativeonnxruntimemodule/MainActivity.java b/js/react_native/e2e/android/app/src/main/java/com/example/reactnativeonnxruntimemodule/MainActivity.java similarity index 100% rename from js/react_native/example/android/app/src/main/java/com/example/reactnativeonnxruntimemodule/MainActivity.java rename to js/react_native/e2e/android/app/src/main/java/com/example/reactnativeonnxruntimemodule/MainActivity.java diff --git a/js/react_native/example/android/app/src/main/java/com/example/reactnativeonnxruntimemodule/MainApplication.java b/js/react_native/e2e/android/app/src/main/java/com/example/reactnativeonnxruntimemodule/MainApplication.java similarity index 98% rename from js/react_native/example/android/app/src/main/java/com/example/reactnativeonnxruntimemodule/MainApplication.java rename to js/react_native/e2e/android/app/src/main/java/com/example/reactnativeonnxruntimemodule/MainApplication.java index 7c88c46d78c4c..2c59e5dd3d3b1 100644 --- a/js/react_native/example/android/app/src/main/java/com/example/reactnativeonnxruntimemodule/MainApplication.java +++ b/js/react_native/e2e/android/app/src/main/java/com/example/reactnativeonnxruntimemodule/MainApplication.java @@ -27,7 +27,6 @@ public boolean getUseDeveloperSupport() { @Override protected List getPackages() { @SuppressWarnings("UnnecessaryLocalVariable") List packages = new PackageList(this).getPackages(); - packages.add(new OnnxruntimePackage()); packages.add(new DataHandlerPackage()); return packages; diff --git a/js/react_native/example/android/app/src/main/res/mipmap-hdpi/ic_launcher.png b/js/react_native/e2e/android/app/src/main/res/mipmap-hdpi/ic_launcher.png similarity index 100% rename from js/react_native/example/android/app/src/main/res/mipmap-hdpi/ic_launcher.png rename to js/react_native/e2e/android/app/src/main/res/mipmap-hdpi/ic_launcher.png diff --git a/js/react_native/example/android/app/src/main/res/mipmap-hdpi/ic_launcher_round.png b/js/react_native/e2e/android/app/src/main/res/mipmap-hdpi/ic_launcher_round.png similarity index 100% rename from js/react_native/example/android/app/src/main/res/mipmap-hdpi/ic_launcher_round.png rename to js/react_native/e2e/android/app/src/main/res/mipmap-hdpi/ic_launcher_round.png diff --git a/js/react_native/example/android/app/src/main/res/mipmap-mdpi/ic_launcher.png b/js/react_native/e2e/android/app/src/main/res/mipmap-mdpi/ic_launcher.png similarity index 100% rename from js/react_native/example/android/app/src/main/res/mipmap-mdpi/ic_launcher.png rename to js/react_native/e2e/android/app/src/main/res/mipmap-mdpi/ic_launcher.png diff --git a/js/react_native/example/android/app/src/main/res/mipmap-mdpi/ic_launcher_round.png b/js/react_native/e2e/android/app/src/main/res/mipmap-mdpi/ic_launcher_round.png similarity index 100% rename from js/react_native/example/android/app/src/main/res/mipmap-mdpi/ic_launcher_round.png rename to js/react_native/e2e/android/app/src/main/res/mipmap-mdpi/ic_launcher_round.png diff --git a/js/react_native/example/android/app/src/main/res/mipmap-xhdpi/ic_launcher.png b/js/react_native/e2e/android/app/src/main/res/mipmap-xhdpi/ic_launcher.png similarity index 100% rename from js/react_native/example/android/app/src/main/res/mipmap-xhdpi/ic_launcher.png rename to js/react_native/e2e/android/app/src/main/res/mipmap-xhdpi/ic_launcher.png diff --git a/js/react_native/example/android/app/src/main/res/mipmap-xhdpi/ic_launcher_round.png b/js/react_native/e2e/android/app/src/main/res/mipmap-xhdpi/ic_launcher_round.png similarity index 100% rename from js/react_native/example/android/app/src/main/res/mipmap-xhdpi/ic_launcher_round.png rename to js/react_native/e2e/android/app/src/main/res/mipmap-xhdpi/ic_launcher_round.png diff --git a/js/react_native/example/android/app/src/main/res/mipmap-xxhdpi/ic_launcher.png b/js/react_native/e2e/android/app/src/main/res/mipmap-xxhdpi/ic_launcher.png similarity index 100% rename from js/react_native/example/android/app/src/main/res/mipmap-xxhdpi/ic_launcher.png rename to js/react_native/e2e/android/app/src/main/res/mipmap-xxhdpi/ic_launcher.png diff --git a/js/react_native/example/android/app/src/main/res/mipmap-xxhdpi/ic_launcher_round.png b/js/react_native/e2e/android/app/src/main/res/mipmap-xxhdpi/ic_launcher_round.png similarity index 100% rename from js/react_native/example/android/app/src/main/res/mipmap-xxhdpi/ic_launcher_round.png rename to js/react_native/e2e/android/app/src/main/res/mipmap-xxhdpi/ic_launcher_round.png diff --git a/js/react_native/example/android/app/src/main/res/mipmap-xxxhdpi/ic_launcher.png b/js/react_native/e2e/android/app/src/main/res/mipmap-xxxhdpi/ic_launcher.png similarity index 100% rename from js/react_native/example/android/app/src/main/res/mipmap-xxxhdpi/ic_launcher.png rename to js/react_native/e2e/android/app/src/main/res/mipmap-xxxhdpi/ic_launcher.png diff --git a/js/react_native/example/android/app/src/main/res/mipmap-xxxhdpi/ic_launcher_round.png b/js/react_native/e2e/android/app/src/main/res/mipmap-xxxhdpi/ic_launcher_round.png similarity index 100% rename from js/react_native/example/android/app/src/main/res/mipmap-xxxhdpi/ic_launcher_round.png rename to js/react_native/e2e/android/app/src/main/res/mipmap-xxxhdpi/ic_launcher_round.png diff --git a/js/react_native/example/app.json b/js/react_native/e2e/android/app/src/main/res/raw/app.json similarity index 100% rename from js/react_native/example/app.json rename to js/react_native/e2e/android/app/src/main/res/raw/app.json diff --git a/js/react_native/example/android/app/src/main/res/values/strings.xml b/js/react_native/e2e/android/app/src/main/res/values/strings.xml similarity index 100% rename from js/react_native/example/android/app/src/main/res/values/strings.xml rename to js/react_native/e2e/android/app/src/main/res/values/strings.xml diff --git a/js/react_native/example/android/app/src/main/res/values/styles.xml b/js/react_native/e2e/android/app/src/main/res/values/styles.xml similarity index 100% rename from js/react_native/example/android/app/src/main/res/values/styles.xml rename to js/react_native/e2e/android/app/src/main/res/values/styles.xml diff --git a/js/react_native/example/android/build.gradle b/js/react_native/e2e/android/build.gradle similarity index 91% rename from js/react_native/example/android/build.gradle rename to js/react_native/e2e/android/build.gradle index ad8671975386a..209d9a804881b 100644 --- a/js/react_native/example/android/build.gradle +++ b/js/react_native/e2e/android/build.gradle @@ -34,9 +34,5 @@ allprojects { google() jcenter() maven { url 'https://www.jitpack.io' } - - flatDir { - dir project(':onnxruntimereactnative').file('libs') - } } } diff --git a/js/react_native/example/android/gradle.properties b/js/react_native/e2e/android/gradle.properties similarity index 100% rename from js/react_native/example/android/gradle.properties rename to js/react_native/e2e/android/gradle.properties diff --git a/js/react_native/e2e/android/gradle/wrapper/gradle-wrapper.jar b/js/react_native/e2e/android/gradle/wrapper/gradle-wrapper.jar new file mode 100644 index 0000000000000..62d4c053550b9 Binary files /dev/null and b/js/react_native/e2e/android/gradle/wrapper/gradle-wrapper.jar differ diff --git a/js/react_native/example/android/gradle/wrapper/gradle-wrapper.properties b/js/react_native/e2e/android/gradle/wrapper/gradle-wrapper.properties similarity index 80% rename from js/react_native/example/android/gradle/wrapper/gradle-wrapper.properties rename to js/react_native/e2e/android/gradle/wrapper/gradle-wrapper.properties index 57c966f75cae0..442d9132ea328 100644 --- a/js/react_native/example/android/gradle/wrapper/gradle-wrapper.properties +++ b/js/react_native/e2e/android/gradle/wrapper/gradle-wrapper.properties @@ -1,6 +1,5 @@ -#Tue Jan 26 22:27:34 PST 2021 distributionBase=GRADLE_USER_HOME distributionPath=wrapper/dists +distributionUrl=https\://services.gradle.org/distributions/gradle-6.8.3-bin.zip zipStoreBase=GRADLE_USER_HOME zipStorePath=wrapper/dists -distributionUrl=https\://services.gradle.org/distributions/gradle-6.5-all.zip diff --git a/js/react_native/e2e/android/gradlew b/js/react_native/e2e/android/gradlew new file mode 100755 index 0000000000000..fbd7c515832da --- /dev/null +++ b/js/react_native/e2e/android/gradlew @@ -0,0 +1,185 @@ +#!/usr/bin/env sh + +# +# Copyright 2015 the original author or authors. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# https://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +############################################################################## +## +## Gradle start up script for UN*X +## +############################################################################## + +# Attempt to set APP_HOME +# Resolve links: $0 may be a link +PRG="$0" +# Need this for relative symlinks. +while [ -h "$PRG" ] ; do + ls=`ls -ld "$PRG"` + link=`expr "$ls" : '.*-> \(.*\)$'` + if expr "$link" : '/.*' > /dev/null; then + PRG="$link" + else + PRG=`dirname "$PRG"`"/$link" + fi +done +SAVED="`pwd`" +cd "`dirname \"$PRG\"`/" >/dev/null +APP_HOME="`pwd -P`" +cd "$SAVED" >/dev/null + +APP_NAME="Gradle" +APP_BASE_NAME=`basename "$0"` + +# Add default JVM options here. You can also use JAVA_OPTS and GRADLE_OPTS to pass JVM options to this script. +DEFAULT_JVM_OPTS='"-Xmx64m" "-Xms64m"' + +# Use the maximum available, or set MAX_FD != -1 to use that value. +MAX_FD="maximum" + +warn () { + echo "$*" +} + +die () { + echo + echo "$*" + echo + exit 1 +} + +# OS specific support (must be 'true' or 'false'). +cygwin=false +msys=false +darwin=false +nonstop=false +case "`uname`" in + CYGWIN* ) + cygwin=true + ;; + Darwin* ) + darwin=true + ;; + MINGW* ) + msys=true + ;; + NONSTOP* ) + nonstop=true + ;; +esac + +CLASSPATH=$APP_HOME/gradle/wrapper/gradle-wrapper.jar + + +# Determine the Java command to use to start the JVM. +if [ -n "$JAVA_HOME" ] ; then + if [ -x "$JAVA_HOME/jre/sh/java" ] ; then + # IBM's JDK on AIX uses strange locations for the executables + JAVACMD="$JAVA_HOME/jre/sh/java" + else + JAVACMD="$JAVA_HOME/bin/java" + fi + if [ ! -x "$JAVACMD" ] ; then + die "ERROR: JAVA_HOME is set to an invalid directory: $JAVA_HOME + +Please set the JAVA_HOME variable in your environment to match the +location of your Java installation." + fi +else + JAVACMD="java" + which java >/dev/null 2>&1 || die "ERROR: JAVA_HOME is not set and no 'java' command could be found in your PATH. + +Please set the JAVA_HOME variable in your environment to match the +location of your Java installation." +fi + +# Increase the maximum file descriptors if we can. +if [ "$cygwin" = "false" -a "$darwin" = "false" -a "$nonstop" = "false" ] ; then + MAX_FD_LIMIT=`ulimit -H -n` + if [ $? -eq 0 ] ; then + if [ "$MAX_FD" = "maximum" -o "$MAX_FD" = "max" ] ; then + MAX_FD="$MAX_FD_LIMIT" + fi + ulimit -n $MAX_FD + if [ $? -ne 0 ] ; then + warn "Could not set maximum file descriptor limit: $MAX_FD" + fi + else + warn "Could not query maximum file descriptor limit: $MAX_FD_LIMIT" + fi +fi + +# For Darwin, add options to specify how the application appears in the dock +if $darwin; then + GRADLE_OPTS="$GRADLE_OPTS \"-Xdock:name=$APP_NAME\" \"-Xdock:icon=$APP_HOME/media/gradle.icns\"" +fi + +# For Cygwin or MSYS, switch paths to Windows format before running java +if [ "$cygwin" = "true" -o "$msys" = "true" ] ; then + APP_HOME=`cygpath --path --mixed "$APP_HOME"` + CLASSPATH=`cygpath --path --mixed "$CLASSPATH"` + + JAVACMD=`cygpath --unix "$JAVACMD"` + + # We build the pattern for arguments to be converted via cygpath + ROOTDIRSRAW=`find -L / -maxdepth 1 -mindepth 1 -type d 2>/dev/null` + SEP="" + for dir in $ROOTDIRSRAW ; do + ROOTDIRS="$ROOTDIRS$SEP$dir" + SEP="|" + done + OURCYGPATTERN="(^($ROOTDIRS))" + # Add a user-defined pattern to the cygpath arguments + if [ "$GRADLE_CYGPATTERN" != "" ] ; then + OURCYGPATTERN="$OURCYGPATTERN|($GRADLE_CYGPATTERN)" + fi + # Now convert the arguments - kludge to limit ourselves to /bin/sh + i=0 + for arg in "$@" ; do + CHECK=`echo "$arg"|egrep -c "$OURCYGPATTERN" -` + CHECK2=`echo "$arg"|egrep -c "^-"` ### Determine if an option + + if [ $CHECK -ne 0 ] && [ $CHECK2 -eq 0 ] ; then ### Added a condition + eval `echo args$i`=`cygpath --path --ignore --mixed "$arg"` + else + eval `echo args$i`="\"$arg\"" + fi + i=`expr $i + 1` + done + case $i in + 0) set -- ;; + 1) set -- "$args0" ;; + 2) set -- "$args0" "$args1" ;; + 3) set -- "$args0" "$args1" "$args2" ;; + 4) set -- "$args0" "$args1" "$args2" "$args3" ;; + 5) set -- "$args0" "$args1" "$args2" "$args3" "$args4" ;; + 6) set -- "$args0" "$args1" "$args2" "$args3" "$args4" "$args5" ;; + 7) set -- "$args0" "$args1" "$args2" "$args3" "$args4" "$args5" "$args6" ;; + 8) set -- "$args0" "$args1" "$args2" "$args3" "$args4" "$args5" "$args6" "$args7" ;; + 9) set -- "$args0" "$args1" "$args2" "$args3" "$args4" "$args5" "$args6" "$args7" "$args8" ;; + esac +fi + +# Escape application args +save () { + for i do printf %s\\n "$i" | sed "s/'/'\\\\''/g;1s/^/'/;\$s/\$/' \\\\/" ; done + echo " " +} +APP_ARGS=`save "$@"` + +# Collect all arguments for the java command, following the shell quoting and substitution rules +eval set -- $DEFAULT_JVM_OPTS $JAVA_OPTS $GRADLE_OPTS "\"-Dorg.gradle.appname=$APP_BASE_NAME\"" -classpath "\"$CLASSPATH\"" org.gradle.wrapper.GradleWrapperMain "$APP_ARGS" + +exec "$JAVACMD" "$@" diff --git a/js/react_native/e2e/android/gradlew.bat b/js/react_native/e2e/android/gradlew.bat new file mode 100644 index 0000000000000..5093609d512a9 --- /dev/null +++ b/js/react_native/e2e/android/gradlew.bat @@ -0,0 +1,104 @@ +@rem +@rem Copyright 2015 the original author or authors. +@rem +@rem Licensed under the Apache License, Version 2.0 (the "License"); +@rem you may not use this file except in compliance with the License. +@rem You may obtain a copy of the License at +@rem +@rem https://www.apache.org/licenses/LICENSE-2.0 +@rem +@rem Unless required by applicable law or agreed to in writing, software +@rem distributed under the License is distributed on an "AS IS" BASIS, +@rem WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +@rem See the License for the specific language governing permissions and +@rem limitations under the License. +@rem + +@if "%DEBUG%" == "" @echo off +@rem ########################################################################## +@rem +@rem Gradle startup script for Windows +@rem +@rem ########################################################################## + +@rem Set local scope for the variables with windows NT shell +if "%OS%"=="Windows_NT" setlocal + +set DIRNAME=%~dp0 +if "%DIRNAME%" == "" set DIRNAME=. +set APP_BASE_NAME=%~n0 +set APP_HOME=%DIRNAME% + +@rem Resolve any "." and ".." in APP_HOME to make it shorter. +for %%i in ("%APP_HOME%") do set APP_HOME=%%~fi + +@rem Add default JVM options here. You can also use JAVA_OPTS and GRADLE_OPTS to pass JVM options to this script. +set DEFAULT_JVM_OPTS="-Xmx64m" "-Xms64m" + +@rem Find java.exe +if defined JAVA_HOME goto findJavaFromJavaHome + +set JAVA_EXE=java.exe +%JAVA_EXE% -version >NUL 2>&1 +if "%ERRORLEVEL%" == "0" goto init + +echo. +echo ERROR: JAVA_HOME is not set and no 'java' command could be found in your PATH. +echo. +echo Please set the JAVA_HOME variable in your environment to match the +echo location of your Java installation. + +goto fail + +:findJavaFromJavaHome +set JAVA_HOME=%JAVA_HOME:"=% +set JAVA_EXE=%JAVA_HOME%/bin/java.exe + +if exist "%JAVA_EXE%" goto init + +echo. +echo ERROR: JAVA_HOME is set to an invalid directory: %JAVA_HOME% +echo. +echo Please set the JAVA_HOME variable in your environment to match the +echo location of your Java installation. + +goto fail + +:init +@rem Get command-line arguments, handling Windows variants + +if not "%OS%" == "Windows_NT" goto win9xME_args + +:win9xME_args +@rem Slurp the command line arguments. +set CMD_LINE_ARGS= +set _SKIP=2 + +:win9xME_args_slurp +if "x%~1" == "x" goto execute + +set CMD_LINE_ARGS=%* + +:execute +@rem Setup the command line + +set CLASSPATH=%APP_HOME%\gradle\wrapper\gradle-wrapper.jar + + +@rem Execute Gradle +"%JAVA_EXE%" %DEFAULT_JVM_OPTS% %JAVA_OPTS% %GRADLE_OPTS% "-Dorg.gradle.appname=%APP_BASE_NAME%" -classpath "%CLASSPATH%" org.gradle.wrapper.GradleWrapperMain %CMD_LINE_ARGS% + +:end +@rem End local scope for the variables with windows NT shell +if "%ERRORLEVEL%"=="0" goto mainEnd + +:fail +rem Set variable GRADLE_EXIT_CONSOLE if you need the _script_ return code instead of +rem the _cmd.exe /c_ return code! +if not "" == "%GRADLE_EXIT_CONSOLE%" exit 1 +exit /b 1 + +:mainEnd +if "%OS%"=="Windows_NT" endlocal + +:omega diff --git a/js/react_native/example/android/settings.gradle b/js/react_native/e2e/android/settings.gradle similarity index 60% rename from js/react_native/example/android/settings.gradle rename to js/react_native/e2e/android/settings.gradle index e0ca0eca3ce83..fd02678d9bb4b 100644 --- a/js/react_native/example/android/settings.gradle +++ b/js/react_native/e2e/android/settings.gradle @@ -1,6 +1,3 @@ rootProject.name = 'OnnxruntimeModuleExample' apply from: file("../node_modules/@react-native-community/cli-platform-android/native_modules.gradle"); applyNativeModulesSettingsGradle(settings) include ':app' - -include ':onnxruntimereactnative' -project(':onnxruntimereactnative').projectDir = new File(rootProject.projectDir, '../../android') diff --git a/js/react_native/e2e/app.json b/js/react_native/e2e/app.json new file mode 100644 index 0000000000000..3763f86f058e7 --- /dev/null +++ b/js/react_native/e2e/app.json @@ -0,0 +1,4 @@ +{ + "name": "OnnxruntimeModuleExample", + "displayName": "OnnxruntimeModule Example" +} diff --git a/js/react_native/example/babel.config.js b/js/react_native/e2e/babel.config.js similarity index 100% rename from js/react_native/example/babel.config.js rename to js/react_native/e2e/babel.config.js diff --git a/js/react_native/example/index.tsx b/js/react_native/e2e/index.tsx similarity index 100% rename from js/react_native/example/index.tsx rename to js/react_native/e2e/index.tsx diff --git a/js/react_native/example/ios/File.swift b/js/react_native/e2e/ios/File.swift similarity index 100% rename from js/react_native/example/ios/File.swift rename to js/react_native/e2e/ios/File.swift diff --git a/js/react_native/example/ios/MNISTDataHandler.h b/js/react_native/e2e/ios/MNISTDataHandler.h similarity index 100% rename from js/react_native/example/ios/MNISTDataHandler.h rename to js/react_native/e2e/ios/MNISTDataHandler.h diff --git a/js/react_native/example/ios/MNISTDataHandler.mm b/js/react_native/e2e/ios/MNISTDataHandler.mm similarity index 98% rename from js/react_native/example/ios/MNISTDataHandler.mm rename to js/react_native/e2e/ios/MNISTDataHandler.mm index 74477376c1da7..d639ec930daa4 100644 --- a/js/react_native/example/ios/MNISTDataHandler.mm +++ b/js/react_native/e2e/ios/MNISTDataHandler.mm @@ -156,7 +156,7 @@ - (NSDictionary *)postprocess:(NSDictionary *)result { if (maxValue == 0.0f) { detectionResult = [NSMutableString stringWithString:@"No match"]; } else { - detectionResult = [NSMutableString stringWithFormat:@"I guess, it's %d", argmax]; + detectionResult = [NSMutableString stringWithFormat:@"%d", argmax]; } NSDictionary *cookedMap = @{@"result" : detectionResult}; diff --git a/js/react_native/example/ios/OnnxruntimeModuleExample-Bridging-Header.h b/js/react_native/e2e/ios/OnnxruntimeModuleExample-Bridging-Header.h similarity index 100% rename from js/react_native/example/ios/OnnxruntimeModuleExample-Bridging-Header.h rename to js/react_native/e2e/ios/OnnxruntimeModuleExample-Bridging-Header.h diff --git a/js/react_native/example/ios/OnnxruntimeModuleExample.xcodeproj/project.pbxproj b/js/react_native/e2e/ios/OnnxruntimeModuleExample.xcodeproj/project.pbxproj similarity index 79% rename from js/react_native/example/ios/OnnxruntimeModuleExample.xcodeproj/project.pbxproj rename to js/react_native/e2e/ios/OnnxruntimeModuleExample.xcodeproj/project.pbxproj index 43a2dafa431ba..515550762a424 100644 --- a/js/react_native/example/ios/OnnxruntimeModuleExample.xcodeproj/project.pbxproj +++ b/js/react_native/e2e/ios/OnnxruntimeModuleExample.xcodeproj/project.pbxproj @@ -12,11 +12,22 @@ 13B07FC11A68108700A75B9A /* main.m in Sources */ = {isa = PBXBuildFile; fileRef = 13B07FB71A68108700A75B9A /* main.m */; }; 81AB9BB82411601600AC10FF /* LaunchScreen.storyboard in Resources */ = {isa = PBXBuildFile; fileRef = 81AB9BB72411601600AC10FF /* LaunchScreen.storyboard */; }; A769E116237385B138BCB816 /* Pods_OnnxruntimeModuleExample.framework in Frameworks */ = {isa = PBXBuildFile; fileRef = 0C2AC7C168BAB1163669C38C /* Pods_OnnxruntimeModuleExample.framework */; }; + DB61BA27278684FB0096C971 /* OnnxruntimeModuleExampleUITests.m in Sources */ = {isa = PBXBuildFile; fileRef = DB61BA26278684FB0096C971 /* OnnxruntimeModuleExampleUITests.m */; }; DBA8BA87267293C4008CC55A /* mnist.ort in Resources */ = {isa = PBXBuildFile; fileRef = DBA8BA86267293C4008CC55A /* mnist.ort */; }; DBBF7412263B8C7100487C77 /* MNISTDataHandler.mm in Sources */ = {isa = PBXBuildFile; fileRef = DBBF7411263B8C7100487C77 /* MNISTDataHandler.mm */; }; DBBF7414263B8CCB00487C77 /* 3.jpg in Resources */ = {isa = PBXBuildFile; fileRef = DBBF7413263B8CCB00487C77 /* 3.jpg */; }; /* End PBXBuildFile section */ +/* Begin PBXContainerItemProxy section */ + DB61BA2A278684FB0096C971 /* PBXContainerItemProxy */ = { + isa = PBXContainerItemProxy; + containerPortal = 83CBB9F71A601CBA00E9B192 /* Project object */; + proxyType = 1; + remoteGlobalIDString = 13B07F861A680F5B00A75B9A; + remoteInfo = OnnxruntimeModuleExample; + }; +/* End PBXContainerItemProxy section */ + /* Begin PBXCopyFilesBuildPhase section */ DB8FCD9C25C3404B00C72F26 /* Embed Libraries */ = { isa = PBXCopyFilesBuildPhase; @@ -41,6 +52,8 @@ 13B07FB71A68108700A75B9A /* main.m */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.objc; name = main.m; path = OnnxruntimeModuleExample/main.m; sourceTree = ""; }; 47F7ED3B7971BE374F7B8635 /* Pods-OnnxruntimeModuleExample.debug.xcconfig */ = {isa = PBXFileReference; includeInIndex = 1; lastKnownFileType = text.xcconfig; name = "Pods-OnnxruntimeModuleExample.debug.xcconfig"; path = "Target Support Files/Pods-OnnxruntimeModuleExample/Pods-OnnxruntimeModuleExample.debug.xcconfig"; sourceTree = ""; }; 81AB9BB72411601600AC10FF /* LaunchScreen.storyboard */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = file.storyboard; name = LaunchScreen.storyboard; path = OnnxruntimeModuleExample/LaunchScreen.storyboard; sourceTree = ""; }; + DB61BA24278684FB0096C971 /* OnnxruntimeModuleExampleUITests.xctest */ = {isa = PBXFileReference; explicitFileType = wrapper.cfbundle; includeInIndex = 0; path = OnnxruntimeModuleExampleUITests.xctest; sourceTree = BUILT_PRODUCTS_DIR; }; + DB61BA26278684FB0096C971 /* OnnxruntimeModuleExampleUITests.m */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.objc; path = OnnxruntimeModuleExampleUITests.m; sourceTree = ""; }; DBA8BA86267293C4008CC55A /* mnist.ort */ = {isa = PBXFileReference; lastKnownFileType = file; name = mnist.ort; path = ../src/mnist.ort; sourceTree = ""; }; DBBF7410263B8C5F00487C77 /* MNISTDataHandler.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; path = MNISTDataHandler.h; sourceTree = ""; }; DBBF7411263B8C7100487C77 /* MNISTDataHandler.mm */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.cpp.objcpp; path = MNISTDataHandler.mm; sourceTree = ""; }; @@ -58,6 +71,13 @@ ); runOnlyForDeploymentPostprocessing = 0; }; + DB61BA21278684FB0096C971 /* Frameworks */ = { + isa = PBXFrameworksBuildPhase; + buildActionMask = 2147483647; + files = ( + ); + runOnlyForDeploymentPostprocessing = 0; + }; /* End PBXFrameworksBuildPhase section */ /* Begin PBXGroup section */ @@ -109,6 +129,7 @@ DBBF7413263B8CCB00487C77 /* 3.jpg */, 13B07FAE1A68108700A75B9A /* OnnxruntimeModuleExample */, 832341AE1AAA6A7D00B99B32 /* Libraries */, + DB61BA25278684FB0096C971 /* OnnxruntimeModuleExampleUITests */, 83CBBA001A601CBA00E9B192 /* Products */, 2D16E6871FA4F8E400B85C8A /* Frameworks */, 6B9684456A2045ADE5A6E47E /* Pods */, @@ -122,10 +143,19 @@ isa = PBXGroup; children = ( 13B07F961A680F5B00A75B9A /* OnnxruntimeModuleExample.app */, + DB61BA24278684FB0096C971 /* OnnxruntimeModuleExampleUITests.xctest */, ); name = Products; sourceTree = ""; }; + DB61BA25278684FB0096C971 /* OnnxruntimeModuleExampleUITests */ = { + isa = PBXGroup; + children = ( + DB61BA26278684FB0096C971 /* OnnxruntimeModuleExampleUITests.m */, + ); + path = OnnxruntimeModuleExampleUITests; + sourceTree = ""; + }; /* End PBXGroup section */ /* Begin PBXNativeTarget section */ @@ -151,6 +181,24 @@ productReference = 13B07F961A680F5B00A75B9A /* OnnxruntimeModuleExample.app */; productType = "com.apple.product-type.application"; }; + DB61BA23278684FB0096C971 /* OnnxruntimeModuleExampleUITests */ = { + isa = PBXNativeTarget; + buildConfigurationList = DB61BA2E278684FB0096C971 /* Build configuration list for PBXNativeTarget "OnnxruntimeModuleExampleUITests" */; + buildPhases = ( + DB61BA20278684FB0096C971 /* Sources */, + DB61BA21278684FB0096C971 /* Frameworks */, + DB61BA22278684FB0096C971 /* Resources */, + ); + buildRules = ( + ); + dependencies = ( + DB61BA2B278684FB0096C971 /* PBXTargetDependency */, + ); + name = OnnxruntimeModuleExampleUITests; + productName = OnnxruntimeModuleExampleUITests; + productReference = DB61BA24278684FB0096C971 /* OnnxruntimeModuleExampleUITests.xctest */; + productType = "com.apple.product-type.bundle.ui-testing"; + }; /* End PBXNativeTarget section */ /* Begin PBXProject section */ @@ -160,9 +208,13 @@ LastUpgradeCheck = 1130; TargetAttributes = { 13B07F861A680F5B00A75B9A = { - DevelopmentTeam = X7YWPPMK53; LastSwiftMigration = 1120; }; + DB61BA23278684FB0096C971 = { + CreatedOnToolsVersion = 13.2.1; + ProvisioningStyle = Automatic; + TestTargetID = 13B07F861A680F5B00A75B9A; + }; }; }; buildConfigurationList = 83CBB9FA1A601CBA00E9B192 /* Build configuration list for PBXProject "OnnxruntimeModuleExample" */; @@ -179,6 +231,7 @@ projectRoot = ""; targets = ( 13B07F861A680F5B00A75B9A /* OnnxruntimeModuleExample */, + DB61BA23278684FB0096C971 /* OnnxruntimeModuleExampleUITests */, ); }; /* End PBXProject section */ @@ -195,6 +248,13 @@ ); runOnlyForDeploymentPostprocessing = 0; }; + DB61BA22278684FB0096C971 /* Resources */ = { + isa = PBXResourcesBuildPhase; + buildActionMask = 2147483647; + files = ( + ); + runOnlyForDeploymentPostprocessing = 0; + }; /* End PBXResourcesBuildPhase section */ /* Begin PBXShellScriptBuildPhase section */ @@ -221,7 +281,7 @@ "${PODS_ROOT}/Target Support Files/Pods-OnnxruntimeModuleExample/Pods-OnnxruntimeModuleExample-frameworks.sh", "${BUILT_PRODUCTS_DIR}/DoubleConversion/DoubleConversion.framework", "${BUILT_PRODUCTS_DIR}/FBReactNativeSpec/FBReactNativeSpec.framework", - "${BUILT_PRODUCTS_DIR}/Folly/folly.framework", + "${BUILT_PRODUCTS_DIR}/RCT-Folly/folly.framework", "${BUILT_PRODUCTS_DIR}/RCTTypeSafety/RCTTypeSafety.framework", "${BUILT_PRODUCTS_DIR}/React-Core/React.framework", "${BUILT_PRODUCTS_DIR}/React-CoreModules/CoreModules.framework", @@ -237,8 +297,11 @@ "${BUILT_PRODUCTS_DIR}/React-jsi/jsi.framework", "${BUILT_PRODUCTS_DIR}/React-jsiexecutor/jsireact.framework", "${BUILT_PRODUCTS_DIR}/React-jsinspector/jsinspector.framework", + "${BUILT_PRODUCTS_DIR}/React-logger/logger.framework", + "${BUILT_PRODUCTS_DIR}/React-perflogger/reactperflogger.framework", "${BUILT_PRODUCTS_DIR}/ReactCommon/ReactCommon.framework", "${BUILT_PRODUCTS_DIR}/Yoga/yoga.framework", + "${BUILT_PRODUCTS_DIR}/fmt/fmt.framework", "${BUILT_PRODUCTS_DIR}/glog/glog.framework", ); name = "[CP] Embed Pods Frameworks"; @@ -261,8 +324,11 @@ "${TARGET_BUILD_DIR}/${FRAMEWORKS_FOLDER_PATH}/jsi.framework", "${TARGET_BUILD_DIR}/${FRAMEWORKS_FOLDER_PATH}/jsireact.framework", "${TARGET_BUILD_DIR}/${FRAMEWORKS_FOLDER_PATH}/jsinspector.framework", + "${TARGET_BUILD_DIR}/${FRAMEWORKS_FOLDER_PATH}/logger.framework", + "${TARGET_BUILD_DIR}/${FRAMEWORKS_FOLDER_PATH}/reactperflogger.framework", "${TARGET_BUILD_DIR}/${FRAMEWORKS_FOLDER_PATH}/ReactCommon.framework", "${TARGET_BUILD_DIR}/${FRAMEWORKS_FOLDER_PATH}/yoga.framework", + "${TARGET_BUILD_DIR}/${FRAMEWORKS_FOLDER_PATH}/fmt.framework", "${TARGET_BUILD_DIR}/${FRAMEWORKS_FOLDER_PATH}/glog.framework", ); runOnlyForDeploymentPostprocessing = 0; @@ -324,8 +390,24 @@ ); runOnlyForDeploymentPostprocessing = 0; }; + DB61BA20278684FB0096C971 /* Sources */ = { + isa = PBXSourcesBuildPhase; + buildActionMask = 2147483647; + files = ( + DB61BA27278684FB0096C971 /* OnnxruntimeModuleExampleUITests.m in Sources */, + ); + runOnlyForDeploymentPostprocessing = 0; + }; /* End PBXSourcesBuildPhase section */ +/* Begin PBXTargetDependency section */ + DB61BA2B278684FB0096C971 /* PBXTargetDependency */ = { + isa = PBXTargetDependency; + target = 13B07F861A680F5B00A75B9A /* OnnxruntimeModuleExample */; + targetProxy = DB61BA2A278684FB0096C971 /* PBXContainerItemProxy */; + }; +/* End PBXTargetDependency section */ + /* Begin XCBuildConfiguration section */ 13B07F941A680F5B00A75B9A /* Debug */ = { isa = XCBuildConfiguration; @@ -334,7 +416,7 @@ ASSETCATALOG_COMPILER_APPICON_NAME = AppIcon; CLANG_ENABLE_MODULES = YES; CURRENT_PROJECT_VERSION = 1; - DEVELOPMENT_TEAM = X7YWPPMK53; + DEVELOPMENT_TEAM = ""; ENABLE_BITCODE = NO; INFOPLIST_FILE = OnnxruntimeModuleExample/Info.plist; LD_RUNPATH_SEARCH_PATHS = "$(inherited) @executable_path/Frameworks"; @@ -358,7 +440,7 @@ ASSETCATALOG_COMPILER_APPICON_NAME = AppIcon; CLANG_ENABLE_MODULES = YES; CURRENT_PROJECT_VERSION = 1; - DEVELOPMENT_TEAM = X7YWPPMK53; + DEVELOPMENT_TEAM = ""; INFOPLIST_FILE = OnnxruntimeModuleExample/Info.plist; LD_RUNPATH_SEARCH_PATHS = "$(inherited) @executable_path/Frameworks"; OTHER_LDFLAGS = ( @@ -487,6 +569,60 @@ }; name = Release; }; + DB61BA2C278684FB0096C971 /* Debug */ = { + isa = XCBuildConfiguration; + buildSettings = { + CLANG_ANALYZER_NONNULL = YES; + CLANG_ANALYZER_NUMBER_OBJECT_CONVERSION = YES_AGGRESSIVE; + CLANG_CXX_LANGUAGE_STANDARD = "gnu++17"; + CLANG_ENABLE_OBJC_WEAK = YES; + CLANG_WARN_DOCUMENTATION_COMMENTS = YES; + CLANG_WARN_QUOTED_INCLUDE_IN_FRAMEWORK_HEADER = YES; + CLANG_WARN_UNGUARDED_AVAILABILITY = YES_AGGRESSIVE; + CODE_SIGN_STYLE = Automatic; + CURRENT_PROJECT_VERSION = 1; + DEBUG_INFORMATION_FORMAT = dwarf; + GCC_C_LANGUAGE_STANDARD = gnu11; + GENERATE_INFOPLIST_FILE = YES; + IPHONEOS_DEPLOYMENT_TARGET = 15.2; + MARKETING_VERSION = 1.0; + MTL_ENABLE_DEBUG_INFO = INCLUDE_SOURCE; + MTL_FAST_MATH = YES; + PRODUCT_BUNDLE_IDENTIFIER = ai.onnxruntime.reactnative.OnnxruntimeModuleExampleUITests; + PRODUCT_NAME = "$(TARGET_NAME)"; + SWIFT_EMIT_LOC_STRINGS = NO; + TARGETED_DEVICE_FAMILY = "1,2"; + TEST_TARGET_NAME = OnnxruntimeModuleExample; + }; + name = Debug; + }; + DB61BA2D278684FB0096C971 /* Release */ = { + isa = XCBuildConfiguration; + buildSettings = { + CLANG_ANALYZER_NONNULL = YES; + CLANG_ANALYZER_NUMBER_OBJECT_CONVERSION = YES_AGGRESSIVE; + CLANG_CXX_LANGUAGE_STANDARD = "gnu++17"; + CLANG_ENABLE_OBJC_WEAK = YES; + CLANG_WARN_DOCUMENTATION_COMMENTS = YES; + CLANG_WARN_QUOTED_INCLUDE_IN_FRAMEWORK_HEADER = YES; + CLANG_WARN_UNGUARDED_AVAILABILITY = YES_AGGRESSIVE; + CODE_SIGN_STYLE = Automatic; + COPY_PHASE_STRIP = NO; + CURRENT_PROJECT_VERSION = 1; + DEBUG_INFORMATION_FORMAT = "dwarf-with-dsym"; + GCC_C_LANGUAGE_STANDARD = gnu11; + GENERATE_INFOPLIST_FILE = YES; + IPHONEOS_DEPLOYMENT_TARGET = 15.2; + MARKETING_VERSION = 1.0; + MTL_FAST_MATH = YES; + PRODUCT_BUNDLE_IDENTIFIER = ai.onnxruntime.reactnative.OnnxruntimeModuleExampleUITests; + PRODUCT_NAME = "$(TARGET_NAME)"; + SWIFT_EMIT_LOC_STRINGS = NO; + TARGETED_DEVICE_FAMILY = "1,2"; + TEST_TARGET_NAME = OnnxruntimeModuleExample; + }; + name = Release; + }; /* End XCBuildConfiguration section */ /* Begin XCConfigurationList section */ @@ -508,6 +644,15 @@ defaultConfigurationIsVisible = 0; defaultConfigurationName = Release; }; + DB61BA2E278684FB0096C971 /* Build configuration list for PBXNativeTarget "OnnxruntimeModuleExampleUITests" */ = { + isa = XCConfigurationList; + buildConfigurations = ( + DB61BA2C278684FB0096C971 /* Debug */, + DB61BA2D278684FB0096C971 /* Release */, + ); + defaultConfigurationIsVisible = 0; + defaultConfigurationName = Release; + }; /* End XCConfigurationList section */ }; rootObject = 83CBB9F71A601CBA00E9B192 /* Project object */; diff --git a/js/react_native/example/ios/OnnxruntimeModuleExample.xcodeproj/xcshareddata/xcschemes/OnnxruntimeModuleExample.xcscheme b/js/react_native/e2e/ios/OnnxruntimeModuleExample.xcodeproj/xcshareddata/xcschemes/OnnxruntimeModuleExample.xcscheme similarity index 87% rename from js/react_native/example/ios/OnnxruntimeModuleExample.xcodeproj/xcshareddata/xcschemes/OnnxruntimeModuleExample.xcscheme rename to js/react_native/e2e/ios/OnnxruntimeModuleExample.xcodeproj/xcshareddata/xcschemes/OnnxruntimeModuleExample.xcscheme index 37863e7f94e6d..97ba7ee3e934c 100644 --- a/js/react_native/example/ios/OnnxruntimeModuleExample.xcodeproj/xcshareddata/xcschemes/OnnxruntimeModuleExample.xcscheme +++ b/js/react_native/e2e/ios/OnnxruntimeModuleExample.xcodeproj/xcshareddata/xcschemes/OnnxruntimeModuleExample.xcscheme @@ -50,9 +50,21 @@ ReferencedContainer = "container:OnnxruntimeModuleExample.xcodeproj"> + + + + + + + +@interface OnnxruntimeModuleExampleUITests : XCTestCase + +@end + +@implementation OnnxruntimeModuleExampleUITests + +- (void)setUp { + self.continueAfterFailure = NO; +} + +- (void)testExample { + XCUIApplication *app = [[XCUIApplication alloc] init]; + [app launch]; + + XCTAssert([app.textFields[@"output"] waitForExistenceWithTimeout:180]); + NSString* value = app.textFields[@"output"].value; + XCTAssertEqualObjects(value, @"Result: 3"); +} + +@end diff --git a/js/react_native/example/ios/Podfile b/js/react_native/e2e/ios/Podfile similarity index 86% rename from js/react_native/example/ios/Podfile rename to js/react_native/e2e/ios/Podfile index d497c6e229de2..a9eeeb7728418 100644 --- a/js/react_native/example/ios/Podfile +++ b/js/react_native/e2e/ios/Podfile @@ -13,6 +13,7 @@ target 'OnnxruntimeModuleExample' do if File.exist?('../../local_pods/onnxruntime-mobile-c.zip') pod 'onnxruntime-mobile-c', :podspec => '../../onnxruntime-mobile-c.podspec' end + #pod 'onnxruntime-react-native', :path => '../node_modules/onnxruntime-react-native' pod 'onnxruntime-react-native', :path => '../..' inherit! :search_paths diff --git a/js/react_native/example/ios/main.jsbundle b/js/react_native/e2e/ios/main.jsbundle similarity index 100% rename from js/react_native/example/ios/main.jsbundle rename to js/react_native/e2e/ios/main.jsbundle diff --git a/js/react_native/example/metro.config.js b/js/react_native/e2e/metro.config.js similarity index 80% rename from js/react_native/example/metro.config.js rename to js/react_native/e2e/metro.config.js index befddbf0934f2..56941aa01458c 100644 --- a/js/react_native/example/metro.config.js +++ b/js/react_native/e2e/metro.config.js @@ -1,7 +1,7 @@ 'use strict'; const path = require('path'); -const blacklist = require('metro-config/src/defaults/blacklist'); +const exclusionlist = require('metro-config/src/defaults/exclusionList'); const escape = require('escape-string-regexp'); const pak = require('../package.json'); @@ -16,9 +16,9 @@ module.exports = { watchFolders: [root], // We need to make sure that only one version is loaded for peerDependencies - // So we blacklist them at the root, and alias them to the versions in example's node_modules + // So we exclusionlist them at the root, and alias them to the versions in example's node_modules resolver: { - blacklistRE: blacklist( + exclusionlistRE: exclusionlist( modules.map( (m) => new RegExp(`^${escape(path.join(root, 'node_modules', m))}\\/.*$`) diff --git a/js/react_native/example/package.json b/js/react_native/e2e/package.json similarity index 69% rename from js/react_native/example/package.json rename to js/react_native/e2e/package.json index 7b18b665ac45f..5350dc87715cf 100644 --- a/js/react_native/example/package.json +++ b/js/react_native/e2e/package.json @@ -9,13 +9,13 @@ "start": "react-native start" }, "dependencies": { - "react": "16.13.1", - "react-native": "0.63.4" + "react": "^17.0.2", + "react-native": "^0.67.2" }, "devDependencies": { - "@babel/core": "^7.12.10", - "@babel/runtime": "^7.12.5", + "@babel/core": "^7.17.0", + "@babel/runtime": "^7.17.0", "babel-plugin-module-resolver": "^4.0.0", - "metro-react-native-babel-preset": "^0.64.0" + "metro-react-native-babel-preset": "^0.67.0" } } diff --git a/js/react_native/example/src/3.jpg b/js/react_native/e2e/src/3.jpg similarity index 100% rename from js/react_native/example/src/3.jpg rename to js/react_native/e2e/src/3.jpg diff --git a/js/react_native/example/src/App.tsx b/js/react_native/e2e/src/App.tsx similarity index 71% rename from js/react_native/example/src/App.tsx rename to js/react_native/e2e/src/App.tsx index 220afc1157f49..07d58f26af9b7 100644 --- a/js/react_native/example/src/App.tsx +++ b/js/react_native/e2e/src/App.tsx @@ -1,28 +1,19 @@ // Copyright (c) Microsoft Corporation. All rights reserved. // Licensed under the MIT License. -import *as React from 'react'; -import{Button, Image, Text, View} from 'react-native'; +import * as React from 'react'; +import{Image, Text, TextInput, View} from 'react-native'; +// onnxruntime-react-native package is installed when bootstraping +// eslint-disable-next-line import/no-extraneous-dependencies import{InferenceSession, Tensor} from 'onnxruntime-react-native'; import MNIST, {MNISTInput, MNISTOutput, MNISTResult, } from './mnist-data-handler'; import{Buffer} from 'buffer'; -interface Duration { -preprocess: - number; -inference: - number; -postprocess: - number; -} - interface State { session: InferenceSession | null; output: string | null; -duration: - Duration | null; imagePath: string | null; } @@ -36,7 +27,6 @@ export default class App extends React.PureComponent<{}, State> { this.state = { session : null, output : null, - duration : null, imagePath : null, }; } @@ -51,6 +41,8 @@ export default class App extends React.PureComponent<{}, State> { const modelPath = await MNIST.getLocalModelPath(); const session : InferenceSession = await InferenceSession.create(modelPath); this.setState({session}); + + void this.infer(); } catch (err) { console.log(err.message); } @@ -60,13 +52,8 @@ export default class App extends React.PureComponent<{}, State> { // Run a model with a given image infer = async() : Promise => { try { - let preprocessTime = 0; - let inferenceTime = 0; - let postprocessTime = 0; - const options : InferenceSession.RunOptions = {}; - let startTime = Date.now(); const mnistInput : MNISTInput = await MNIST.preprocess(this.state.imagePath !); const input : {[name:string] : Tensor} = {}; for (const key in mnistInput) { @@ -77,14 +64,10 @@ export default class App extends React.PureComponent<{}, State> { input[key] = new Tensor(mnistInput[key].type as keyof Tensor.DataTypeMap, tensorData, mnistInput[key].dims); } } - preprocessTime = Date.now() - startTime; - startTime = Date.now(); const output : InferenceSession.ReturnType = await this.state.session !.run(input, this.state.session !.outputNames, options); - inferenceTime = Date.now() - startTime; - startTime = Date.now(); const mnistOutput : MNISTOutput = {}; for (const key in output) { if (Object.hasOwnProperty.call(output, key)) { @@ -96,15 +79,9 @@ export default class App extends React.PureComponent<{}, State> { } } const result : MNISTResult = await MNIST.postprocess(mnistOutput); - postprocessTime = Date.now() - startTime; this.setState({ - output : result.result, - duration : { - preprocess : preprocessTime, - inference : inferenceTime, - postprocess : postprocessTime, - }, + output : result.result }); } catch (err) { console.log(err.message); @@ -112,16 +89,11 @@ export default class App extends React.PureComponent<{}, State> { }; render() : JSX.Element { - const {output, duration, imagePath} = this.state; + const {output, imagePath} = this.state; return ( {'\n'} -
/// ORT Graph to wrap with API /// Allocator used for reshaping/transposing tensors -/// Logger /// New nodes are assigned to this EP, or left unassigned if nullptr /// api::GraphRef for use with transpose optimizer std::unique_ptr MakeApiGraph(onnxruntime::Graph& graph, - AllocatorPtr cpu_allocator, - const logging::Logger& logger, - const char* new_node_ep); + AllocatorPtr cpu_allocator, + const char* new_node_ep); /// /// Reveals underlying ORT graph from an api::GraphRef diff --git a/onnxruntime/core/optimizer/transpose_optimizer/ort_transpose_optimizer.cc b/onnxruntime/core/optimizer/transpose_optimizer/ort_transpose_optimizer.cc index 13d1b873250ee..80a1472cdea81 100644 --- a/onnxruntime/core/optimizer/transpose_optimizer/ort_transpose_optimizer.cc +++ b/onnxruntime/core/optimizer/transpose_optimizer/ort_transpose_optimizer.cc @@ -17,7 +17,7 @@ using namespace onnx_layout_transformation; namespace onnxruntime { Status TransposeOptimizer::ApplyImpl(Graph& graph, bool& modified, int graph_level, const logging::Logger& logger) const { - auto api_graph = MakeApiGraph(graph, cpu_allocator_, logger, /*new_node_ep*/ nullptr); + auto api_graph = MakeApiGraph(graph, cpu_allocator_, /*new_node_ep*/ nullptr); if (onnx_layout_transformation::Optimize(*api_graph, /*allow_extended_ops*/ false)) { modified = true; } diff --git a/onnxruntime/core/optimizer/utils.cc b/onnxruntime/core/optimizer/utils.cc index 3faf803af4572..e9260e553b703 100644 --- a/onnxruntime/core/optimizer/utils.cc +++ b/onnxruntime/core/optimizer/utils.cc @@ -281,9 +281,67 @@ bool IsOperationDeterministic(const std::string& domain, const std::string& op) if (domain.compare(kOnnxDomain) == 0) { auto iter = std::find(kOnnxDomainNonDeterministicOps.begin(), kOnnxDomainNonDeterministicOps.end(), op); return iter == kOnnxDomainNonDeterministicOps.end(); - } + } // Unknown domain. Assume the op is not deterministic. - return false; + return false; +} + +bool GetClipConstantMinMax(const Graph& graph, const Node& node, float& min, float& max) { + min = std::numeric_limits::lowest(); + max = std::numeric_limits::max(); + + // Clip opset 1 and 6 has min and max as attributes. they're inputs from opset 11 on. + bool min_max_are_attributes = graph_utils::IsSupportedOptypeVersionAndDomain(node, "Clip", {1, 6}); + bool min_max_are_constant_values = true; + + if (min_max_are_attributes) { + min = graph_utils::GetNodeAttribute(node, "min")->f(); + max = graph_utils::GetNodeAttribute(node, "max")->f(); + } else { + // update min/max if provided via a constant initializer + // return true if value is default or coming from a constant initializer and update 'value' + // return false if value is mutable + auto update_if_constant_value = [&graph](const Node& node, size_t input_idx, float& value) { + const auto& input_defs = node.InputDefs(); + const NodeArg* input = (input_defs.size() > input_idx) ? input_defs[input_idx] : nullptr; + + if (input == nullptr || !input->Exists()) { + // optional input not specified so using default value + return true; + } + + bool is_constant = true; + const ONNX_NAMESPACE::TensorProto* initializer = graph_utils::GetConstantInitializer(graph, input->Name()); + if (initializer) { + Initializer i(*initializer, graph.ModelPath()); + switch (initializer->data_type()) { + case ONNX_NAMESPACE::TensorProto_DataType_FLOAT: + value = *i.data(); + break; + // double isn't currently supported + //case ONNX_NAMESPACE::TensorProto_DataType_DOUBLE: + // value = static_cast(*i.data()); + // break; + case ONNX_NAMESPACE::TensorProto_DataType_FLOAT16: + value = math::halfToFloat(i.data()->val); + break; + default: + ORT_THROW("Unexpected data type for Clip input of ", initializer->data_type()); + } + } else { + is_constant = false; + } + + return is_constant; + }; + + // 'min' is input 1, 'max' is input 2. both are optional. + // if the input is constant, 'min' or 'max' is updated by the call to get_if_constant_value + min_max_are_constant_values = update_if_constant_value(node, 1, min) && + update_if_constant_value(node, 2, max); + } + + return min_max_are_constant_values; } #endif // #if !defined(ORT_MINIMAL_BUILD) diff --git a/onnxruntime/core/optimizer/utils.h b/onnxruntime/core/optimizer/utils.h index 9b87ee680d9c6..d7c1fa71ead59 100644 --- a/onnxruntime/core/optimizer/utils.h +++ b/onnxruntime/core/optimizer/utils.h @@ -102,6 +102,11 @@ bool CheckOutputEdges(const Graph& graph, const Node& node, size_t expected_outp bool IsOperationDeterministic(const std::string& domain, const std::string& op); +/** Get min/max values from Clip if they are constant. +@returns false if mutable and cannot be used. +*/ +bool GetClipConstantMinMax(const Graph& graph, const Node& node, float& min, float& max); + #endif // !#if !defined(ORT_MINIMAL_BUILD) #if !defined(ORT_MINIMAL_BUILD) || defined(ORT_EXTENDED_MINIMAL_BUILD) diff --git a/onnxruntime/core/providers/coreml/mlmodel_format/ArrayFeatureExtractor.proto b/onnxruntime/core/providers/coreml/mlmodel_format/ArrayFeatureExtractor.proto new file mode 100644 index 0000000000000..2b83ccbe3574f --- /dev/null +++ b/onnxruntime/core/providers/coreml/mlmodel_format/ArrayFeatureExtractor.proto @@ -0,0 +1,19 @@ +// Copyright (c) 2017, Apple Inc. All rights reserved. +// +// Use of this source code is governed by a BSD-3-clause license that can be +// found in LICENSE.txt or at https://opensource.org/licenses/BSD-3-Clause + +syntax = "proto3"; +option optimize_for = LITE_RUNTIME; + +package CoreML.Specification; + +/** + * An array feature extractor. + * + * Given an index, extracts the value at that index from its array input. + * Indexes are zero-based. + */ +message ArrayFeatureExtractor { + repeated uint64 extractIndex = 1; +} diff --git a/onnxruntime/core/providers/coreml/mlmodel_format/BayesianProbitRegressor.proto b/onnxruntime/core/providers/coreml/mlmodel_format/BayesianProbitRegressor.proto new file mode 100644 index 0000000000000..9688d87ce48ba --- /dev/null +++ b/onnxruntime/core/providers/coreml/mlmodel_format/BayesianProbitRegressor.proto @@ -0,0 +1,139 @@ +// Copyright (c) 2017, Apple Inc. All rights reserved. +// +// Use of this source code is governed by a BSD-3-clause license that can be +// found in LICENSE.txt or at https://opensource.org/licenses/BSD-3-Clause + +syntax = "proto3"; +option optimize_for = LITE_RUNTIME; + +package CoreML.Specification; + +/** +* A Bayesian probit regressor. +* +* The probit regression model is superficially similar to the more commonly known +* logistic regression, with sampling distribution of the model given by +* +* P(y=+1|x,w) = Φ(/β) +* +* where w are the set of weights, +* x are the set of features for the given event, +* β is a model hyper-parameter, and +* Φ is the link function, defined to be the CDF of the normal distribution. +* The weights w[i,j] are Gaussian distributed, with mean μ[i,j] and precision 1/(σ[i,j])^2 +* (where i indexes over features and j indexes over the values for the feature). +* The parameter β scales the steepness of the inverse link function. +* +* (see https://en.wikipedia.org/wiki/Probit_model and https://en.wikipedia.org/wiki/Logistic_regression +* for more details on probit model and logistic regression, respectively) +* +* Input: X +* x represents a set of features, each taking on a discrete value (note that continuous values +* would first need to be discretized). x can be represented as a vector where the index i is +* the feature id and x[i] is the feature value. Alternatively, x can be represented as a matrix +* with 2 columns where the first column indicates the feature id and the second column contains +* the feature values, i.e. x[i,0] is the feature id and x[i,1] is the feature value. +* +* additional input features: +* - "optimism": apply a mean shift to the probability, i.e. shift regression mean by o*stdev, +* where o is the "optimism" parameter (see additional output features) +* - "samplingScale": for sampling from posterior, multiply standard deviation by this factor +* - "samplingTruncation": for sampling from posterior, truncate sampling distribution at given multiple of std from mean +* +* Output: Y +* probability P(y|x,w) +* +* additional output features: +* - mean (regression output before applying link function) +* - variance (regression output variance before applying link function) +* - pessimistic probability: P(y|x,w) with a mean shift parameterized by "optimism" feature +* - sampled probability: p ~ P(y|x,w) with standard deviation scaling parametrized by "samplingScale" feature +* and distribution truncated at multiple of standard deviation, +* where multiple parameterized by "samplingTruncation" feature. +* +*/ + +message BayesianProbitRegressor { + + /* + * Parameterization of a Gaussian distribution + */ + message Gaussian { + double mean = 1; + double precision = 2; // inverse of the variance + } + + /* + * Weight for a specific feature value + * The weight is represented as a Gaussian distribution + * with a mean and precision (1/variance) to capture + * uncertainty in the weight + */ + message FeatureValueWeight { + uint32 featureValue = 1; + Gaussian featureWeight = 2; + } + + /* + * Feature with associated weights (for different values) + * Each feature has a set of weights for the (discrete) values + * it can take + */ + message FeatureWeight { + uint32 featureId = 1; + repeated FeatureValueWeight weights = 2; + } + + uint32 numberOfFeatures = 1; + + Gaussian bias = 2; // bias term + + /* + * Set of features with associated weights + */ + repeated FeatureWeight features = 3; // feature weights + + /* + * Set this name to be the same as input feature of type multi-array (1D) + * in the model description you want to use as the regression input + */ + string regressionInputFeatureName = 10; + + /* + * Set this name to be the same as optional input feature of type double + * in the model description you want to use as the optimism input + */ + string optimismInputFeatureName = 11; + + /* + * Set this name to be the same as optional input feature of type double + * in the model description you want to use as the samplingScale input + */ + string samplingScaleInputFeatureName = 12; + + /* + * Set this name to be the same as optional input feature of type double + * in the model description you want to use as the samplingBounds input + */ + string samplingTruncationInputFeatureName = 13; + + /* + * name of 'mean' output feature + */ + string meanOutputFeatureName = 20; + + /* + * name of 'variance' output feature + */ + string varianceOutputFeatureName = 21; + + /* + * name of 'pessimistic' output feature + */ + string pessimisticProbabilityOutputFeatureName = 22; + + /* + * name of 'sampled' output feature: samples from the scaled posterior probability distribuiton + */ + string sampledProbabilityOutputFeatureName = 23; +} diff --git a/onnxruntime/core/providers/coreml/mlmodel_format/CategoricalMapping.proto b/onnxruntime/core/providers/coreml/mlmodel_format/CategoricalMapping.proto new file mode 100644 index 0000000000000..23112d074213a --- /dev/null +++ b/onnxruntime/core/providers/coreml/mlmodel_format/CategoricalMapping.proto @@ -0,0 +1,38 @@ +// Copyright (c) 2017, Apple Inc. All rights reserved. +// +// Use of this source code is governed by a BSD-3-clause license that can be +// found in LICENSE.txt or at https://opensource.org/licenses/BSD-3-Clause + +syntax = "proto3"; +option optimize_for = LITE_RUNTIME; + +import public "DataStructures.proto"; + +package CoreML.Specification; + +/** + * A categorical mapping. + * + * This allows conversion from integers to strings, or from strings to integers. + */ +message CategoricalMapping { + oneof MappingType { + // Conversion from strings to integers + StringToInt64Map stringToInt64Map = 1; + + // Conversion from integer to string + Int64ToStringMap int64ToStringMap = 2; + } + + /** + * The value returned if an input is not contained in the map above. + * If one of these is not set, then an error is raised on an unknown input. + */ + oneof ValueOnUnknown { + // Default output when converting from an integer to a string. + string strValue = 101; + + // Default output when converting from a string to an integer. + int64 int64Value = 102; + } +} diff --git a/onnxruntime/core/providers/coreml/mlmodel_format/CustomModel.proto b/onnxruntime/core/providers/coreml/mlmodel_format/CustomModel.proto new file mode 100644 index 0000000000000..9a6d36e009ada --- /dev/null +++ b/onnxruntime/core/providers/coreml/mlmodel_format/CustomModel.proto @@ -0,0 +1,30 @@ +// Copyright (c) 2017, Apple Inc. All rights reserved. +// +// Use of this source code is governed by a BSD-3-clause license that can be +// found in LICENSE.txt or at https://opensource.org/licenses/BSD-3-Clause + +syntax = "proto3"; +option optimize_for = LITE_RUNTIME; + +package CoreML.Specification; + +/** +* A parameterized model whose function is defined in code +*/ +message CustomModel { + + message CustomModelParamValue { + oneof value { + double doubleValue = 10; + string stringValue = 20; + int32 intValue = 30; + int64 longValue = 40; + bool boolValue = 50; + bytes bytesValue = 60; + } + } + + string className = 10; // The name of the class (conforming to MLCustomModel) corresponding to this model + map parameters = 30; + string description = 40; // An (optional) description provided by the model creator. This information is displayed when viewing the model, but does not affect the model's execution on device. +} diff --git a/onnxruntime/core/providers/coreml/mlmodel_format/DataStructures.proto b/onnxruntime/core/providers/coreml/mlmodel_format/DataStructures.proto new file mode 100644 index 0000000000000..8b120c2d7d102 --- /dev/null +++ b/onnxruntime/core/providers/coreml/mlmodel_format/DataStructures.proto @@ -0,0 +1,95 @@ +// Copyright (c) 2017, Apple Inc. All rights reserved. +// +// Use of this source code is governed by a BSD-3-clause license that can be +// found in LICENSE.txt or at https://opensource.org/licenses/BSD-3-Clause + +syntax = "proto3"; +option optimize_for = LITE_RUNTIME; + +import public "FeatureTypes.proto"; + +package CoreML.Specification; + +/** + * A mapping from a string + * to a 64-bit integer. + */ +message StringToInt64Map { + map map = 1; +} + +/** + * A mapping from a 64-bit integer + * to a string. + */ +message Int64ToStringMap { + map map = 1; +} + +/** + * A mapping from a string + * to a double-precision floating point number. + */ +message StringToDoubleMap { + map map = 1; +} + +/** + * A mapping from a 64-bit integer + * to a double-precision floating point number. + */ +message Int64ToDoubleMap { + map map = 1; +} + +/** + * A vector of strings. + */ +message StringVector { + repeated string vector = 1; +} + +/** + * A vector of 64-bit integers. + */ +message Int64Vector { + repeated int64 vector = 1; +} + +/** + * A vector of floating point numbers. + */ +message FloatVector { + repeated float vector = 1; +} + +/** + * A vector of double-precision floating point numbers. + */ +message DoubleVector { + repeated double vector = 1; +} + +/** + * A range of int64 values + */ +message Int64Range { + int64 minValue = 1; + int64 maxValue = 2; +} + +/** + * A set of int64 values + */ +message Int64Set { + repeated int64 values = 1; +} + +/** + * A range of double values + */ +message DoubleRange { + double minValue = 1; + double maxValue = 2; +} + diff --git a/onnxruntime/core/providers/coreml/mlmodel_format/DictVectorizer.proto b/onnxruntime/core/providers/coreml/mlmodel_format/DictVectorizer.proto new file mode 100644 index 0000000000000..3f94eeec1745c --- /dev/null +++ b/onnxruntime/core/providers/coreml/mlmodel_format/DictVectorizer.proto @@ -0,0 +1,36 @@ +// Copyright (c) 2017, Apple Inc. All rights reserved. +// +// Use of this source code is governed by a BSD-3-clause license that can be +// found in LICENSE.txt or at https://opensource.org/licenses/BSD-3-Clause + +syntax = "proto3"; +option optimize_for = LITE_RUNTIME; + +import public "DataStructures.proto"; + +package CoreML.Specification; + +/** + * Uses an index mapping to convert a dictionary to an array. + * + * The output array will be equal in length to the index mapping vector parameter. + * All keys in the input dictionary must be present in the index mapping vector. + * + * For each item in the input dictionary, insert its value in the output array. + * The position of the insertion is determined by the position of the item's key + * in the index mapping. Any keys not present in the input dictionary, will be + * zero in the output array. + * + * For example: if the ``stringToIndex`` parameter is set to ``["a", "c", "b", "z"]``, + * then an input of ``{"a": 4, "c": 8}`` will produce an output of ``[4, 8, 0, 0]``. + * + */ +message DictVectorizer { + oneof Map { + /// String keys to indexes + StringVector stringToIndex = 1; + + /// Int keys to indexes + Int64Vector int64ToIndex = 2; + } +} diff --git a/onnxruntime/core/providers/coreml/mlmodel_format/FeatureTypes.proto b/onnxruntime/core/providers/coreml/mlmodel_format/FeatureTypes.proto new file mode 100644 index 0000000000000..8711ac7de3026 --- /dev/null +++ b/onnxruntime/core/providers/coreml/mlmodel_format/FeatureTypes.proto @@ -0,0 +1,224 @@ +// Copyright (c) 2017, Apple Inc. All rights reserved. +// +// Use of this source code is governed by a BSD-3-clause license that can be +// found in LICENSE.txt or at https://opensource.org/licenses/BSD-3-Clause + +syntax = "proto3"; +option optimize_for = LITE_RUNTIME; + +package CoreML.Specification; + +/** + * The 64-bit integer feature type. + */ +message Int64FeatureType {} + +/** + * The double-precision floating point number feature type. + */ +message DoubleFeatureType {} + +/** + * The string feature type. + */ +message StringFeatureType {} + + +message SizeRange { + uint64 lowerBound = 1; + int64 upperBound = 2; // negative value means unbound otherwise upperbound is included in range +} + +/** + * The image feature type. + */ +message ImageFeatureType { + // Assumes raw (decompressed) format + enum ColorSpace { + INVALID_COLOR_SPACE = 0; + GRAYSCALE = 10; // 8 bits per pixel + RGB = 20; // 32 bits per pixel: RGBA with A channel ignored + BGR = 30; // 32 bits per pixel: BGRA with A channel ignored + } + + message ImageSize { + uint64 width = 1; + uint64 height = 2; + } + + message EnumeratedImageSizes { + repeated ImageSize sizes = 1; + } + + message ImageSizeRange { + SizeRange widthRange = 1; + SizeRange heightRange = 2; + } + + // The required or default image size is width x height + // + // If specificationVersion <= 2 or SizeFlexibility is empty, + // width x height is the required fixed image size + // + // If SizeFlexibility is present, width x height indicate a "default" + // image size which must be consistent with the flexibilty specified + + int64 width = 1; + int64 height = 2; + + // For specification version >= 3 you can specify image size flexibility. + + oneof SizeFlexibility { + + // Use enumeratedSizes for a set of distinct fixed sizes + // e.g. portrait or landscape: [80 x 100, 100 x 8] + // + // If the width x height fields above are specified then they must be + // one of the sizes listed. + // + // If width and height are not specified above then the default width + // and height will be enumeratedSizes[0] + // + // Must be non-empty + + EnumeratedImageSizes enumeratedSizes = 21; + + // Use imageSizeRange to allow for ranges of values + // e.g. any image greater than 10 x 20: [10..= 3 you can specify image size flexibility. + + oneof ShapeFlexibility { + + // Use enumeratedShapes for a set of distinct fixed shapes + // + // If the shape field is specified then it must be + // one of the enumerated shapes. + /// + // If shape is not specifed, the "default" shape will be considered + // enumeratedShapes[0] + // + // Must be non-empty + + EnumeratedShapes enumeratedShapes = 21; + + // Use shapeRange to allow the size of each dimension vary within + // indpendently specified ranges + // + // If you specify shape above it must fall in the range + // specified in shapeRanges. It will be treated as the default shape. + // + // If you don't specify shape above then the default shape will + // have shape[d] = shapeRange.sizeRanges[d].lowerBound + + ShapeRange shapeRange = 31; + + } + + oneof defaultOptionalValue { + int32 intDefaultValue = 41; + float floatDefaultValue = 51; + double doubleDefaultValue = 61; + } + +} + +/** + * The dictionary feature type. + */ +message DictionaryFeatureType { + /** + * Key/value type tags, with the following restrictions: + * - ``keyType`` must be a hashable type + * - ``valueType`` is assumed to be a ``double`` + */ + oneof KeyType { + Int64FeatureType int64KeyType = 1; + StringFeatureType stringKeyType = 2; + } +} + +/** + * The Sequence feature type. + */ +message SequenceFeatureType { + + /** + * Currently only categorical int64 and String sequences are supported + */ + oneof Type { + Int64FeatureType int64Type = 1; + StringFeatureType stringType = 3; + } + + // Range of allowed size/length/count of sequence + SizeRange sizeRange = 101; +} + +/** + * A feature, which may be optional. + */ +message FeatureType { + oneof Type { + Int64FeatureType int64Type = 1; + DoubleFeatureType doubleType = 2; + StringFeatureType stringType = 3; + ImageFeatureType imageType = 4; + ArrayFeatureType multiArrayType = 5; + DictionaryFeatureType dictionaryType = 6; + SequenceFeatureType sequenceType = 7; + } + + bool isOptional = 1000; +} + diff --git a/onnxruntime/core/providers/coreml/mlmodel_format/FeatureVectorizer.proto b/onnxruntime/core/providers/coreml/mlmodel_format/FeatureVectorizer.proto new file mode 100644 index 0000000000000..75eaf14b53669 --- /dev/null +++ b/onnxruntime/core/providers/coreml/mlmodel_format/FeatureVectorizer.proto @@ -0,0 +1,26 @@ +// Copyright (c) 2017, Apple Inc. All rights reserved. +// +// Use of this source code is governed by a BSD-3-clause license that can be +// found in LICENSE.txt or at https://opensource.org/licenses/BSD-3-Clause + +syntax = "proto3"; +option optimize_for = LITE_RUNTIME; + +package CoreML.Specification; + +/** + * A FeatureVectorizer puts one or more features into a single array. + * + * The ordering of features in the output array is determined by + * ``inputList``. + * + * ``inputDimensions`` is a zero based index. + */ +message FeatureVectorizer { + message InputColumn { + string inputColumn = 1; + uint64 inputDimensions = 2; + } + + repeated InputColumn inputList = 1; +} diff --git a/onnxruntime/core/providers/coreml/mlmodel_format/GLMClassifier.proto b/onnxruntime/core/providers/coreml/mlmodel_format/GLMClassifier.proto new file mode 100644 index 0000000000000..47f6f4a3c7b8c --- /dev/null +++ b/onnxruntime/core/providers/coreml/mlmodel_format/GLMClassifier.proto @@ -0,0 +1,43 @@ +// Copyright (c) 2017, Apple Inc. All rights reserved. +// +// Use of this source code is governed by a BSD-3-clause license that can be +// found in LICENSE.txt or at https://opensource.org/licenses/BSD-3-Clause + +syntax = "proto3"; +option optimize_for = LITE_RUNTIME; + +import public "DataStructures.proto"; + +package CoreML.Specification; + +/** + * A generalized linear model classifier. + */ +message GLMClassifier { + message DoubleArray { + repeated double value = 1; + } + + enum PostEvaluationTransform { + Logit = 0; + Probit = 1; /// Only binary classification is supported for probit + } + + enum ClassEncoding { + ReferenceClass = 0; /// First class is the reference class + OneVsRest = 1; /// Also called One vs All + } + + repeated DoubleArray weights = 1; + repeated double offset = 2; + PostEvaluationTransform postEvaluationTransform = 3; + ClassEncoding classEncoding = 4; + + /** + * Required class label mapping. + */ + oneof ClassLabels { + StringVector stringClassLabels = 100; + Int64Vector int64ClassLabels = 101; + } +} diff --git a/onnxruntime/core/providers/coreml/mlmodel_format/GLMRegressor.proto b/onnxruntime/core/providers/coreml/mlmodel_format/GLMRegressor.proto new file mode 100644 index 0000000000000..64093c4f156a8 --- /dev/null +++ b/onnxruntime/core/providers/coreml/mlmodel_format/GLMRegressor.proto @@ -0,0 +1,28 @@ +// Copyright (c) 2017, Apple Inc. All rights reserved. +// +// Use of this source code is governed by a BSD-3-clause license that can be +// found in LICENSE.txt or at https://opensource.org/licenses/BSD-3-Clause + +syntax = "proto3"; +option optimize_for = LITE_RUNTIME; + +package CoreML.Specification; + +/** + * A generalized linear model regressor. + */ +message GLMRegressor { + message DoubleArray { + repeated double value = 1; + } + + enum PostEvaluationTransform { + NoTransform = 0; + Logit = 1; + Probit = 2; + } + + repeated DoubleArray weights = 1; + repeated double offset = 2; + PostEvaluationTransform postEvaluationTransform = 3; +} diff --git a/onnxruntime/core/providers/coreml/mlmodel_format/Gazetteer.proto b/onnxruntime/core/providers/coreml/mlmodel_format/Gazetteer.proto new file mode 100644 index 0000000000000..6abbffaf623b9 --- /dev/null +++ b/onnxruntime/core/providers/coreml/mlmodel_format/Gazetteer.proto @@ -0,0 +1,43 @@ +// Copyright (c) 2019, Apple Inc. All rights reserved. +// +// Use of this source code is governed by a BSD-3-clause license that can be +// found in LICENSE.txt or at https://opensource.org/licenses/BSD-3-Clause + +syntax = "proto3"; +option optimize_for = LITE_RUNTIME; + +import public "DataStructures.proto"; + +package CoreML.Specification.CoreMLModels; + +/** +* A model which uses an efficient probabilistic representation +* for assigning labels to a set of strings. +*/ +message Gazetteer { + + /* + * Stores the revision number for the model, revision 2 is available on + * iOS, tvOS 13.0+, macOS 10.15+ + */ + uint32 revision = 1; + + /* + * Stores the language of the model, as specified in BCP-47 format, + * e.g. "en-US". See https://tools.ietf.org/html/bcp47 + */ + string language = 10; + + /* + * Natural Lanaguge framework's efficient representation of a gazetter. + */ + bytes modelParameterData = 100; + + /* + * Stores the set of output class labels + */ + oneof ClassLabels { + StringVector stringClassLabels = 200; + } + +} diff --git a/onnxruntime/core/providers/coreml/mlmodel_format/Identity.proto b/onnxruntime/core/providers/coreml/mlmodel_format/Identity.proto new file mode 100644 index 0000000000000..123a15e59156d --- /dev/null +++ b/onnxruntime/core/providers/coreml/mlmodel_format/Identity.proto @@ -0,0 +1,18 @@ +// Copyright (c) 2017, Apple Inc. All rights reserved. +// +// Use of this source code is governed by a BSD-3-clause license that can be +// found in LICENSE.txt or at https://opensource.org/licenses/BSD-3-Clause + +syntax = "proto3"; +option optimize_for = LITE_RUNTIME; + +package CoreML.Specification; + +/** + * An identity model. + * + * This model returns given inputs as outputs, unchanged. + * Intended to be used for testing purposes. + */ +message Identity { +} diff --git a/onnxruntime/core/providers/coreml/mlmodel_format/Imputer.proto b/onnxruntime/core/providers/coreml/mlmodel_format/Imputer.proto new file mode 100644 index 0000000000000..3de280b2f162d --- /dev/null +++ b/onnxruntime/core/providers/coreml/mlmodel_format/Imputer.proto @@ -0,0 +1,43 @@ +// Copyright (c) 2017, Apple Inc. All rights reserved. +// +// Use of this source code is governed by a BSD-3-clause license that can be +// found in LICENSE.txt or at https://opensource.org/licenses/BSD-3-Clause + +syntax = "proto3"; +option optimize_for = LITE_RUNTIME; + +import public "DataStructures.proto"; + +package CoreML.Specification; + +/** + * A transformer that replaces missing values with a default value, + * such as a statistically-derived value. + * + * If ``ReplaceValue`` is set, then missing values of that type are + * replaced with the corresponding value. + * + * For example: if ``replaceDoubleValue`` is set to ``NaN`` + * and a single ``NaN`` double value is provided as input, + * then it is replaced by ``imputedDoubleValue``. However + * if the input is an array of doubles, then any instances + * of ``NaN`` in the array is replaced with the corresponding + * value in ``imputedDoubleArray``. + */ +message Imputer { + oneof ImputedValue { + double imputedDoubleValue = 1; + int64 imputedInt64Value = 2; + string imputedStringValue = 3; + DoubleVector imputedDoubleArray = 4; + Int64Vector imputedInt64Array = 5; + StringToDoubleMap imputedStringDictionary = 6; + Int64ToDoubleMap imputedInt64Dictionary = 7; + } + + oneof ReplaceValue { + double replaceDoubleValue = 11; + int64 replaceInt64Value = 12; + string replaceStringValue = 13; + } +} diff --git a/onnxruntime/core/providers/coreml/mlmodel_format/ItemSimilarityRecommender.proto b/onnxruntime/core/providers/coreml/mlmodel_format/ItemSimilarityRecommender.proto new file mode 100644 index 0000000000000..a5a8c11092d36 --- /dev/null +++ b/onnxruntime/core/providers/coreml/mlmodel_format/ItemSimilarityRecommender.proto @@ -0,0 +1,93 @@ +// Copyright (c) 2017, Apple Inc. All rights reserved. +// +// Use of this source code is governed by a BSD-3-clause license that can be +// found in LICENSE.txt or at https://opensource.org/licenses/BSD-3-Clause + +/** + * Each tree is a collection of nodes, + * each of which is identified by a unique identifier. + * + * Each node is either a branch or a leaf node. + * A branch node evaluates a value according to a behavior; + * if true, the node identified by ``true_child_node_id`` is evaluated next, + * if false, the node identified by ``false_child_node_id`` is evaluated next. + * A leaf node adds the evaluation value to the base prediction value + * to get the final prediction. + * + * A tree must have exactly one root node, + * which has no parent node. + * A tree must not terminate on a branch node. + * All leaf nodes must be accessible + * by evaluating one or more branch nodes in sequence, + * starting from the root node. + */ + +syntax = "proto3"; +option optimize_for = LITE_RUNTIME; + +import public "DataStructures.proto"; + +package CoreML.Specification; + + +/** + * Item Similarity Recommender + * + * The Item Similarity recommender takes as input a list of items and scores, + * then uses that information and a table of item similarities to predict similarity + * scores for all items. By default, the items predicted are most similar to the given + * items but not part of that item set. + * + * The predicted score for a given item k is + * sum_(i in observed items) sim_(k,i) * (score_i - shift_k) + * + * Because only the most similar scores for each item i are stored, + * sim_(k,i) is often zero. + * + * For many models, the score adjustment parameter shift_j is zero -- it's occasionally used + * to counteract global biases for popular items. + * + * + * References: + */ +message ItemSimilarityRecommender { + + /** The items similar to a given base item. + */ + message ConnectedItem { + uint64 itemId = 1; + double similarityScore = 2; + } + + /** The formula for the score of a given model as given above, with shift_k + * parameter given by itemScoreAdjustment, and the similar item list filling in + * all the known sim(k,i) scores for i given by itemID and k given by the itemID parameter in + * the similarItemList. + */ + message SimilarItems { + uint64 itemId = 1; + repeated ConnectedItem similarItemList = 2; + double itemScoreAdjustment = 3; + } + + repeated SimilarItems itemItemSimilarities = 1; + + /** One or none of these are given. If none are given, then the items must number 0, 1, ..., num_items - 1. + * If either is given, the length must be exactly num_items. + */ + StringVector itemStringIds = 2; + Int64Vector itemInt64Ids = 3; + + /** Input parameter names specifying different possible inputs to the recommender. + */ + string itemInputFeatureName = 10; /* Required */ + string numRecommendationsInputFeatureName = 11; /* Optional; defaults to all items if not given.*/ + string itemRestrictionInputFeatureName = 12; /* Optional. */ + string itemExclusionInputFeatureName = 13; /* Optional; defaults to input item list if not given. */ + + /** The predicted outputs. At least one of these must be specified. + */ + string recommendedItemListOutputFeatureName = 20; + string recommendedItemScoreOutputFeatureName = 21; + +} diff --git a/onnxruntime/core/providers/coreml/mlmodel_format/LinkedModel.proto b/onnxruntime/core/providers/coreml/mlmodel_format/LinkedModel.proto new file mode 100644 index 0000000000000..b113000e80a8d --- /dev/null +++ b/onnxruntime/core/providers/coreml/mlmodel_format/LinkedModel.proto @@ -0,0 +1,42 @@ +// Copyright (c) 2019, Apple Inc. All rights reserved. +// +// Use of this source code is governed by a BSD-3-clause license that can be +// found in LICENSE.txt or at https://opensource.org/licenses/BSD-3-Clause + +syntax = "proto3"; +option optimize_for = LITE_RUNTIME; +import public "Parameters.proto"; + +package CoreML.Specification; + +/** + * A model which wraps another (compiled) model external to this one + */ +message LinkedModel { + + oneof LinkType { + // A model located via a file system path + LinkedModelFile linkedModelFile = 1; + } +} + +// Model is referenced by a model file name and search path +message LinkedModelFile { + + // Model file name: e.g. "MyFetureExtractor.mlmodelc" + StringParameter linkedModelFileName = 1; + + // Search path to find the linked model file + // Multiple paths can be searched using the unix-style path separator ":" + // Each path can be relative (to this model) or absolute + // + // An empty string is the same as teh relative search path "." + // which searches in the same location as this model file + // + // There are some special paths which start with $ + // - $BUNDLE_MAIN - Indicates to look in the main bundle + // - $BUNDLE_IDENTIFIER(identifier) - Looks in Bunde with given identifer + StringParameter linkedModelSearchPath = 2; +} + + diff --git a/onnxruntime/core/providers/coreml/mlmodel_format/Model.proto b/onnxruntime/core/providers/coreml/mlmodel_format/Model.proto new file mode 100644 index 0000000000000..737233f2e3fe7 --- /dev/null +++ b/onnxruntime/core/providers/coreml/mlmodel_format/Model.proto @@ -0,0 +1,322 @@ +// Copyright (c) 2017, Apple Inc. All rights reserved. +// +// Use of this source code is governed by a BSD-3-clause license that can be +// found in LICENSE.txt or at https://opensource.org/licenses/BSD-3-Clause + +/** + * A Core ML model consists of a specification version + * and a model description, + * and can be any one of the following types: + * + * Neural Networks + * - `NeuralNetwork` + * + * Regressors + * - ``GLMRegressor`` + * - ``SupportVectorRegressor`` + * - ``TreeEnsembleRegressor`` + * - ``NeuralNetworkRegressor`` + * - ``BayesianProbitRegressor`` + * + * Classifiers + * - `NeuralNetworkClassifier` + * - `TreeEnsembleClassifier` + * - `GLMClassifier` + * - `SupportVectorClassifier` + * - `KNearestNeighborsClassifier` + * + * Other models + * - `CustomModel` + * - `TextClassifier` + * - `WordTagger` + * - `Gazetteer` + * - `WordEmbedding` + * - `VisionFeaturePrint` + * - `LinkedModel` + * - `SoundAnalysisPreprocessing` + * - `ItemSimilarityRecommender` + * + * Feature Engineering + * - `Imputer` + * - `Scaler` + * - `Normalizer` + * - `OneHotEncoder` + * - `CategoricalMapping` + * - `FeatureVectorizer` + * - `DictVectorizer` + * - `ArrayFeatureExtractor` + * - `NonMaximumSuppression` + * + * Pipelines + * - `PipelineClassifier` + * - `PipelineRegressor` + * - `Pipeline` + * + * Simple Mathematical Functions + * - `Identity` + */ + +syntax = "proto3"; +option optimize_for = LITE_RUNTIME; + +import public "VisionFeaturePrint.proto"; +import public "TextClassifier.proto"; +import public "WordTagger.proto"; +import public "Gazetteer.proto"; +import public "WordEmbedding.proto"; +import public "ArrayFeatureExtractor.proto"; +import public "BayesianProbitRegressor.proto"; +import public "CategoricalMapping.proto"; +import public "CustomModel.proto"; +import public "DictVectorizer.proto"; +import public "FeatureTypes.proto"; +import public "FeatureVectorizer.proto"; +import public "GLMRegressor.proto"; +import public "GLMClassifier.proto"; +import public "NearestNeighbors.proto"; +import public "Identity.proto"; +import public "Imputer.proto"; +import public "NeuralNetwork.proto"; +import public "Normalizer.proto"; +import public "OneHotEncoder.proto"; +import public "Scaler.proto"; +import public "NonMaximumSuppression.proto"; +import public "SVM.proto"; +import public "TreeEnsemble.proto"; +import public "Parameters.proto"; +import public "ItemSimilarityRecommender.proto"; +import public "SoundAnalysisPreprocessing.proto"; +import public "LinkedModel.proto"; + +package CoreML.Specification; + +/** + * A pipeline consisting of one or more models. + */ +message Pipeline { + repeated Model models = 1; + + // Optional names given for each model + // If not supplied it defaults to ["model0",..., "model"(models.size()-1)] + // These names can be used to disambiguate the scope / domain of a parameter + repeated string names = 2; +} + +/** + * A classifier pipeline. + */ +message PipelineClassifier { + Pipeline pipeline = 1; +} + +/** + * A regressor pipeline. + */ +message PipelineRegressor { + Pipeline pipeline = 1; +} + +/** + * A feature description, + * consisting of a name, short description, and type. + */ +message FeatureDescription { + string name = 1; + string shortDescription = 2; + FeatureType type = 3; +} + +/** + * Model metadata, + * consisting of a short description, a version string, + * an author, a license, and any other user defined + * key/value meta data. + */ +message Metadata { + string shortDescription = 1; + string versionString = 2; + string author = 3; + string license = 4; + map userDefined = 100; +} + +/** + * A description of a model, + * consisting of descriptions of its input and output features. + * Both regressor and classifier models require the name of the + * primary predicted output feature (``predictedFeatureName``). + * Classifier models can specify the output feature containing + * probabilities for the predicted classes + * (``predictedProbabilitiesName``). + */ +message ModelDescription { + repeated FeatureDescription input = 1; + repeated FeatureDescription output = 10; + + // [Required for regressor and classifier models]: the name + // to give to an output feature containing the prediction. + string predictedFeatureName = 11; + + // [Optional for classifier models]: the name to give to an + // output feature containing a dictionary mapping class + // labels to their predicted probabilities. If not specified, + // the dictionary will not be returned by the model. + string predictedProbabilitiesName = 12; + + repeated FeatureDescription trainingInput = 50; + + Metadata metadata = 100; +} + +message SerializedModel { + // Identifier whose content describes the model type of the serialized protocol buffer message. + string identifier = 1; + + // Must be a valid serialized protocol buffer of the above specified type. + bytes model = 2; +} + +/** + * A Core ML model, + * consisting of a specification version, + * a model description, and a model type. + * + * Core ML model compatibility is indicated by + * a monotonically increasing specification version number, + * which is incremented anytime a backward-incompatible change is made + * (this is functionally equivalent to the MAJOR version number + * described by `Semantic Versioning 2.0.0 `_). + * + * Specification Versions : OS Availability (Core ML Version) + * + * 1 : iOS 11, macOS 10.13, tvOS 11, watchOS 4 (Core ML 1) + * - Feedforward & Recurrent Neural Networks + * - General Linear Models + * - Tree Ensembles + * - Support Vector Machines + * - Pipelines + * - Feature Engineering + * + * 2 : iOS 11.2, macOS 10.13.2, tvOS 11.2, watchOS 4.2 (Core ML 1.2) + * - Custom Layers for Neural Networks + * - Float 16 support for Neural Network layers + * + * 3 : iOS 12, macOS 10.14, tvOS 12, watchOS 5 (Core ML 2) + * - Flexible shapes and image sizes + * - Categorical sequences + * - Core ML Vision Feature Print, Text Classifier, Word Tagger + * - Non Max Suppression + * - Crop and Resize Bilinear NN layers + * - Custom Models + * + * 4 : iOS 13, macOS 10.15, tvOS 13, watchOS 6 (Core ML 3) + * - Updatable models + * - Exact shape / general rank mapping for neural networks + * - Large expansion of supported neural network layers + * - Generalized operations + * - Control flow + * - Dynamic layers + * - See NeuralNetwork.proto + * - Nearest Neighbor Classifier + * - Sound Analysis Prepreocessing + * - Recommender + * - Linked Model + * - NLP Gazeteer + * - NLP WordEmbedding + * + * 5 : iOS 14, macOS 11, tvOS 14, watchOS 7 (Core ML 4) + * - Model Deployment + * - Model Encryption + * - Unified converter API with PyTorch and Tensorflow 2 Support in coremltools 4 + * - MIL builder for neural networks and composite ops in coremltools 4 + * - New layers in neural network: + * - CumSum + * - OneHot + * - ClampedReLu + * - ArgSort + * - SliceBySize + * - Convolution3D + * - Pool3D + * - Bilinear Upsample with align corners and fractional factors + * - PixelShuffle + * - MatMul with int8 weights and int8 activations + * - Concat interleave + * - See NeuralNetwork.proto + * - Enhanced Xcode model view with interactive previews + * - Enhanced Xcode Playground support for Core ML models + * + */ +message Model { + int32 specificationVersion = 1; + ModelDescription description = 2; + + /* + * Following model types support on-device update: + * + * - NeuralNetworkClassifier + * - NeuralNetworkRegressor + * - NeuralNetwork + * - KNearestNeighborsClassifier + */ + bool isUpdatable = 10; + + // start at 200 here + // model specific parameters: + oneof Type { + // pipeline starts at 200 + PipelineClassifier pipelineClassifier = 200; + PipelineRegressor pipelineRegressor = 201; + Pipeline pipeline = 202; + + // regressors start at 300 + GLMRegressor glmRegressor = 300; + SupportVectorRegressor supportVectorRegressor = 301; + TreeEnsembleRegressor treeEnsembleRegressor = 302; + NeuralNetworkRegressor neuralNetworkRegressor = 303; + BayesianProbitRegressor bayesianProbitRegressor = 304; + + // classifiers start at 400 + GLMClassifier glmClassifier = 400; + SupportVectorClassifier supportVectorClassifier = 401; + TreeEnsembleClassifier treeEnsembleClassifier = 402; + NeuralNetworkClassifier neuralNetworkClassifier = 403; + KNearestNeighborsClassifier kNearestNeighborsClassifier = 404; + + // generic models start at 500 + NeuralNetwork neuralNetwork = 500; + ItemSimilarityRecommender itemSimilarityRecommender = 501; + + // Custom and linked models + CustomModel customModel = 555; + LinkedModel linkedModel = 556; + + // feature engineering starts at 600 + OneHotEncoder oneHotEncoder = 600; + Imputer imputer = 601; + FeatureVectorizer featureVectorizer = 602; + DictVectorizer dictVectorizer = 603; + Scaler scaler = 604; + CategoricalMapping categoricalMapping = 606; + Normalizer normalizer = 607; + ArrayFeatureExtractor arrayFeatureExtractor = 609; + NonMaximumSuppression nonMaximumSuppression = 610; + + + // simple mathematical functions used for testing start at 900 + Identity identity = 900; + + // reserved until 1000 + + // CoreML provided models + CoreMLModels.TextClassifier textClassifier = 2000; + CoreMLModels.WordTagger wordTagger = 2001; + CoreMLModels.VisionFeaturePrint visionFeaturePrint = 2002; + CoreMLModels.SoundAnalysisPreprocessing soundAnalysisPreprocessing = 2003; + CoreMLModels.Gazetteer gazetteer = 2004; + CoreMLModels.WordEmbedding wordEmbedding = 2005; + + // Reserved private messages start at 3000 + // These messages are subject to change with no notice or support. + SerializedModel serializedModel = 3000; + } +} diff --git a/onnxruntime/core/providers/coreml/mlmodel_format/NearestNeighbors.proto b/onnxruntime/core/providers/coreml/mlmodel_format/NearestNeighbors.proto new file mode 100644 index 0000000000000..82acd8490374d --- /dev/null +++ b/onnxruntime/core/providers/coreml/mlmodel_format/NearestNeighbors.proto @@ -0,0 +1,132 @@ +// Copyright (c) 2017, Apple Inc. All rights reserved. +// +// Use of this source code is governed by a BSD-3-clause license that can be +// found in LICENSE.txt or at https://opensource.org/licenses/BSD-3-Clause + +syntax = "proto3"; +option optimize_for = LITE_RUNTIME; + +package CoreML.Specification; + +import public "DataStructures.proto"; +import public "Parameters.proto"; + +/** + * A k-Nearest-Neighbor classifier + */ +message KNearestNeighborsClassifier { + + /** + * The "core" nearest neighbor model attributes. + */ + NearestNeighborsIndex nearestNeighborsIndex = 1; + + /** + * Number of neighbors to use for classification. + */ + Int64Parameter numberOfNeighbors = 3; + + /** + * Type of labels supported by the model. Currently supports String or Int64 + * labels. + */ + oneof ClassLabels { + StringVector stringClassLabels = 100; + Int64Vector int64ClassLabels = 101; + } + + /** + * Default value of class label (useful when prediction is called on an empty kNN classifier) + */ + oneof DefaultClassLabel { + string defaultStringLabel = 110; + int64 defaultInt64Label = 111; + } + + /** + * Weighting scheme to be used when computing the majority label of a + * new data point. + */ + oneof WeightingScheme { + UniformWeighting uniformWeighting = 200; + InverseDistanceWeighting inverseDistanceWeighting = 210; + } +} + +/** + * The "core" attributes of a Nearest Neighbors model. + */ +message NearestNeighborsIndex { + + /** + * Number of dimensions of the input data. + */ + int32 numberOfDimensions = 1; + + /** + * Vector of floating point data that makes up the model. Each data point must have 'numberOfDimensions' + * dimensions. + */ + repeated FloatVector floatSamples = 2; + + /** + * Backing data structure for the Nearest Neighbors Index. Currently supports + * a linear index or a kd-tree index. + */ + oneof IndexType { + LinearIndex linearIndex = 100; + SingleKdTreeIndex singleKdTreeIndex = 110; + } + + /** + * Distance function to be used to find neighbors. Currently only Squared Euclidean + * Distance is supported. + */ + oneof DistanceFunction { + SquaredEuclideanDistance squaredEuclideanDistance = 200; + } + +} + +/** + * Specifies a uniform weighting scheme (i.e. each neighbor receives equal + * voting power). + */ +message UniformWeighting { +} + + +/** + * Specifies a inverse-distance weighting scheme (i.e. closest neighbors receives higher + * voting power). A nearest neighbor with highest sum of (1 / distance) is picked. + */ +message InverseDistanceWeighting { +} + + +/** + * Specifies a flat index of data points to be searched by brute force. + */ +message LinearIndex { +} + + +/** + * Specifies a kd-tree backend for the nearest neighbors model. + */ +message SingleKdTreeIndex { + + /** + * Number of data points contained within a leaf node of the kd-tree. + */ + int32 leafSize = 1; + +} + + +/** + * Specifies the Squared Euclidean Distance function. + */ +message SquaredEuclideanDistance { +} + diff --git a/onnxruntime/core/providers/coreml/mlmodel_format/NeuralNetwork.proto b/onnxruntime/core/providers/coreml/mlmodel_format/NeuralNetwork.proto new file mode 100644 index 0000000000000..44a77c6e7f5f1 --- /dev/null +++ b/onnxruntime/core/providers/coreml/mlmodel_format/NeuralNetwork.proto @@ -0,0 +1,6531 @@ +// Copyright (c) 2017-2019, Apple Inc. All rights reserved. +// +// Use of this source code is governed by a BSD-3-clause license that can be +// found in LICENSE.txt or at https://opensource.org/licenses/BSD-3-Clause + +/** + * A neural network is defined through a collection of layers + * and represents a directed acyclic graph (DAG). + * Each layer has a name, a layer type, + * a list of input names, a list of output names, + * and a collection of parameters specific to the layer type. + * + * The graph structure and connectivity of the neural network + * is inferred from the input and output names. + * A neural network starts with the layer + * whose input name is equal to the value specified in + * ``Model.description.input.name``, + * and ends with the layer + * whose output name is equal to the value specified in + * ``Model.description.output.name``. + * Layers must have unique input and output names, + * and a layer may not have input or output names that + * refer to layers that are not yet defined. + * + * For Core ML specification version <=3, + * all inputs are mapped to static rank 5 tensors, with axis notations + * [Sequence, Batch, Channel, Height, Width]. + * + * From specification version 4 onwards (iOS >= 13, macOS >= 10.15), more options are available + * (see enums ``NeuralNetworkMultiArrayShapeMapping``, ``NeuralNetworkImageShapeMapping``) + * to map inputs to generic N-Dimensional (or N rank) tensors, where N >= 1. + * + * Each layer type may have specific constraints on the ranks of its inputs and outputs. + * + * Some of the layers (such as softmax, reduce, etc) have parameters that have been described in + * terms of notational axis "Channel", "Height", "Width" or "Sequence". They can be re-interpreted easily in + * the general ND setting by using the following rule: + * "width" is same as axis = -1 (i.e. the last axis from the end) + * "height" is same as axis = -2 (i.e. the second last axis from the end) + * "channel" is same as axis = -3 (i.e. the third last axis from the end) + * "sequence" is same as axis = -5 (i.e. the fifth last axis from the end) + * + * Several layers are available in 3 different variations, with the names ending + * in identifiers: ``like``, ``static`` and ``dynamic``. For instance, ``FillLike``, + * ``FillStatic`` and ``FillDynamic``. The ``static`` variation generally will have + * a property corresponding to the shape of the output. For instance, if the + * output of the ``FillStatic`` layer is desired to be of shape (10, 4), the + * property ``targetShape`` will have to be set to [10, 4]. In the ``dynamic`` case, + * the shape is an input, hence it can be changed at runtime. For instance, for + * a ``FillDynamic`` layer, the input would have to be an array containing the + * values 10 and 4, if the desired output is of shape (10, 4). Whereas in the + * ``like`` case, the additional input's shape is used as the output shape, ignoring + * its values. For instance, for a ``FillLike`` layer, for an input with shape + * (10, 4), the output generated will also be of shape (10, 4), values of the + * input will be ignored. + */ + +syntax = "proto3"; +option optimize_for = LITE_RUNTIME; + +import public "DataStructures.proto"; +import public "Parameters.proto"; + +package CoreML.Specification; + + +enum NeuralNetworkMultiArrayShapeMapping { + + /* + * Describes how the MultiArray shape for the inputs, + * provided in Features Types proto via model description, + * is mapped to construct tensors that are fed into the Neural Network layers. + */ + + /* + * Default legacy value. Only supported for Core ML Specification version <= 3. + * + * The default legacy shape mapping resolves all input shapes to a rank 5 equivalent + * with axis notation of [Seq, Batch, Channel, Height, Width]. + * + * When this enum value is selected, + * the repeated shape field in the message "ArrayFeatureType" in feature types proto, + * must be either length 1 or length 3. + * + * The following rule is used to map the values in the shape field to the actual tensor shape: + * rank 1 shape is mapped to shape [1,1,C,1,1] + * rank 3 shape is mapped to shape [1,1,C,H,W] + * At runtime, the first two dimensions (Seq or Batch) can be presented as well, with non-1 values. + * + * It is invalid to use this enum value if any of the layers added + * Specification version 4 (iOS >= 13, macOS >= 10.15) onwards are used in the network. + * Validator will raise an error in that case. + */ + RANK5_ARRAY_MAPPING = 0; + + /* + * The exact shape and rank (i.e. number of dimensions in the shape) of the input, + * as specified in the message "ArrayFeatureType", is passed through to the layers. + * Supported only for Specification version >= 4 (iOS >= 13, macOS >= 10.15). + */ + EXACT_ARRAY_MAPPING = 1; + +} + +enum NeuralNetworkImageShapeMapping { + + /* + * Describes how the shape of the input tensors is constructed from image inputs. + */ + + /* + * In this case, image input is mapped to a rank 5 tensor. + * For Color images, input tensor is shaped as [1,1,3,H,W]. + * For Gray images, input tensor is shaped as [1,1,1,H,W]. + */ + RANK5_IMAGE_MAPPING = 0; + + /* + * For Color images, input tensor is shaped as [1,3,H,W]. + * For Gray images, input tensor is shaped as [1,1,H,W]. + * Supported only for Specification version >= 4 (iOS >= 13, macOS >= 10.15). + */ + RANK4_IMAGE_MAPPING = 1; + +} + +/** + A neural network. + */ +message NeuralNetwork { + + repeated NeuralNetworkLayer layers = 1; + repeated NeuralNetworkPreprocessing preprocessing = 2; + + // use this enum value to determine the input tensor shapes to the neural network, for multiarray inputs + NeuralNetworkMultiArrayShapeMapping arrayInputShapeMapping = 5; + + // use this enum value to determine the input tensor shapes to the neural network, for image inputs + NeuralNetworkImageShapeMapping imageInputShapeMapping = 6; + + + NetworkUpdateParameters updateParams = 10; + +} + +/// Preprocessing +/// ------------- + +/** + * A neural network preprocessor that + * performs a scalar multiplication of an image + * followed by addition of scalar biases to the channels. + * + * Input: X + * An image in BGR or RGB format with shape ``[3, H, W]`` + * or in grayscale format with shape ``[1, H, W]``. + * Output: Y + * An image with format and shape corresponding to the input. + * + * If the input image is in BGR format: + * + * .. code:: + * + * Y[0, :, :] = channelScale * X[0, :, :] + blueBias + * Y[1, :, :] = channelScale * X[1, :, :] + greenBias + * Y[2, :, :] = channelScale * X[2, :, :] + redBias + * + * If the input image is in RGB format: + * + * .. code:: + * + * Y[0, :, :] = channelScale * X[0, :, :] + redBias + * Y[1, :, :] = channelScale * X[1, :, :] + greenBias + * Y[2, :, :] = channelScale * X[2, :, :] + blueBias + * + * If the input image is in grayscale format: + * + * .. code:: + * + * Y[0, :, :] = channelScale * X[0, :, :] + grayBias + */ +message NeuralNetworkImageScaler { + + float channelScale = 10; ///Scalar to be multiplied. + float blueBias = 20; ///Scalar blue bias to be added. + float greenBias = 21; ///Scalar green bias to be added. + float redBias = 22; ///Scalar red bias to be added. + float grayBias = 30; ///Scalar bias to be added for grayscale images. + +} + +/** + * A neural network preprocessor that + * subtracts the provided mean image from the input image. + * The mean image is subtracted from the input named + * ``NeuralNetworkPreprocessing.featureName``. + */ +message NeuralNetworkMeanImage { + + /** + * Mean image stored as a flattened array of floats, + * representing shape [Channel,Height,Width]. + */ + repeated float meanImage = 1; + +} + +/// Preprocessing parameters for image inputs. +message NeuralNetworkPreprocessing { + + string featureName = 1; /// must be equal to the input name to which the preprocessing is applied + oneof preprocessor { + NeuralNetworkImageScaler scaler = 10; + NeuralNetworkMeanImage meanImage = 11; + } + +} + +/// Activation Functions +/// -------------------- + +/** + * A rectified linear unit (ReLU) activation function. + * + * This function has the following formula: + * + * .. math:: + * f(x) = \text{max}(0, x) + */ +message ActivationReLU { + +} + +/** + * A leaky rectified linear unit (ReLU) activation function. + * + * This function has the following formula: + * + * .. math:: + * f(x) = \begin{cases} + * x & \text{if } x \geq 0 \\ + * \alpha x & \text{if } x < 0 + * \end{cases} + */ +message ActivationLeakyReLU { + + float alpha = 1; //negative slope value for leakyReLU + +} + +/** + * A hyperbolic tangent activation function. + * + * This function has the following formula: + * + * .. math:: + * f(x) = \dfrac{1 - e^{-2x}}{1 + e^{-2x}} + */ +message ActivationTanh { + +} + +/** + * A scaled hyperbolic tangent activation function. + * + * This function has the following formula: + * + * .. math:: + * f(x) = \alpha \tanh(\beta x) + */ +message ActivationScaledTanh { + + float alpha = 1; + float beta = 2; + +} + +/** + * A sigmoid activation function. + * + * This function has the following formula: + * + * .. math:: + * f(x) = \dfrac{1}{1 + e^{-x}} + */ +message ActivationSigmoid { + +} + +/** + * A linear activation function. + * + * This function has the following formula: + * + * .. math:: + * f(x) = \alpha x + \beta + */ +message ActivationLinear { + + float alpha = 1; + float beta = 2; + +} + +/** + * A hard sigmoid activation function. + * + * This function has the following formula: + * + * .. math:: + * f(x) = \text{min}(\text{max}(\alpha x + \beta, 0), 1) + */ +message ActivationSigmoidHard { + + float alpha = 1; + float beta = 2; + +} + +/** + * A parameterized rectified linear unit (PReLU) activation function. + * Input must be at least rank 3. Axis = -3 is denoted by "C", or channels. + * "alpha" parameter can be a vector of length C. + * + * This function has the following formula: + * + * .. math:: + * f(x_i) = \begin{cases} + * x_i & \text{if } x_i \geq 0 \\ + * \alpha_i x_i & \text{if } x_i < 0 + * \end{cases} \;,\;i=1,...,C + */ +message ActivationPReLU { + + // parameter of length C or 1. + // If length is 1, same value is used for all channels + WeightParams alpha = 1; + +} + +/** + * An exponential linear unit (ELU) activation function. + * + * This function has the following formula: + * + * .. math:: + * f(x) = \begin{cases} + * x & \text{if } x \geq 0 \\ + * \alpha (e^x - 1) & \text{if } x < 0 + * \end{cases} + */ +message ActivationELU { + + float alpha = 1; + +} + +/** + * A thresholded rectified linear unit (ReLU) activation function. + * + * This function has the following formula: + * + * .. math:: + * f(x) = \begin{cases} + * x & \text{if } x \geq \alpha \\ + * 0 & \text{if } x < \alpha + * \end{cases} + */ +message ActivationThresholdedReLU { + + float alpha = 1; + +} + +/** + * A softsign activation function. + * + * This function has the following formula: + * + * .. math:: + * f(x) = \dfrac{x}{1 + |x|} + */ +message ActivationSoftsign { + +} + +/** + * A softplus activation function. + * + * This function has the following formula: + * + * .. math:: + * f(x) = \text{log}(1 + e^x) + */ +message ActivationSoftplus { + +} + +/** + * A parametric softplus activation function. + * Input must be at least rank 3. axis = -3 is denoted by "C", or channels. + * "alpha"/"beta" parameter can be a vector of length C. + * + * This function has the following formula: + * + * .. math:: + * f(x_i) = \alpha_i \text{log}(1 + e^{\beta_i x_i}) \;,\;i=1,...,C + */ +message ActivationParametricSoftplus { + + // If length is 1, same value is used for all channels + WeightParams alpha = 1; //parameter of length C or 1 + WeightParams beta = 2; //parameter of length C or 1 + +} + +message ActivationParams { + + oneof NonlinearityType { + ActivationLinear linear = 5; + + ActivationReLU ReLU = 10; + ActivationLeakyReLU leakyReLU = 15; + ActivationThresholdedReLU thresholdedReLU = 20; + ActivationPReLU PReLU = 25; + + ActivationTanh tanh = 30; + ActivationScaledTanh scaledTanh = 31; + + ActivationSigmoid sigmoid = 40; + ActivationSigmoidHard sigmoidHard = 41; + + ActivationELU ELU = 50; + + ActivationSoftsign softsign = 60; + ActivationSoftplus softplus = 70; + ActivationParametricSoftplus parametricSoftplus = 71; + } + +} + +/** + * Representation of the intermediate tensors + */ +message Tensor { + + // Number of dimensions in the tensor shape + uint32 rank = 1; + // actual value of the tensor shape. + // must be of length "rank". Can contain -1s for unknown dimensions. + repeated int64 dimValue = 2; + +} + +/** + * A single neural network layer. + */ +message NeuralNetworkLayer { + + string name = 1; //descriptive name of the layer + repeated string input = 2; + repeated string output = 3; + + repeated Tensor inputTensor = 4; // must be the same length as the "input" field + repeated Tensor outputTensor = 5; // must be the same length as the "output" field + + // Must be set to true to mark the layer as updatable. + // If true, the weightParams in the layer's properties must also be set to updatable + // If false, the value of the isUpdatable parameter within the layer's weights are ignored + bool isUpdatable = 10; + + oneof layer { + + // Start at 100 here + ConvolutionLayerParams convolution = 100; + + PoolingLayerParams pooling = 120; + + ActivationParams activation = 130; + + InnerProductLayerParams innerProduct = 140; + EmbeddingLayerParams embedding = 150; + + // Normalization-related Layers + BatchnormLayerParams batchnorm = 160; + MeanVarianceNormalizeLayerParams mvn = 165; + L2NormalizeLayerParams l2normalize = 170; + SoftmaxLayerParams softmax = 175; + LRNLayerParams lrn = 180; + + CropLayerParams crop = 190; + PaddingLayerParams padding = 200; + UpsampleLayerParams upsample = 210; + + ResizeBilinearLayerParams resizeBilinear = 211; + CropResizeLayerParams cropResize = 212; + + UnaryFunctionLayerParams unary = 220; + + // Element-wise Operations + AddLayerParams add = 230; + MultiplyLayerParams multiply = 231; + + AverageLayerParams average = 240; + ScaleLayerParams scale = 245; + + BiasLayerParams bias = 250; + MaxLayerParams max = 260; + MinLayerParams min = 261; + + DotProductLayerParams dot = 270; + ReduceLayerParams reduce = 280; + LoadConstantLayerParams loadConstant = 290; + + // Data Reorganization + ReshapeLayerParams reshape = 300; + FlattenLayerParams flatten = 301; + PermuteLayerParams permute = 310; + ConcatLayerParams concat = 320; + SplitLayerParams split = 330; + SequenceRepeatLayerParams sequenceRepeat = 340; + + ReorganizeDataLayerParams reorganizeData = 345; + SliceLayerParams slice = 350; + + // Recurrent Layers + SimpleRecurrentLayerParams simpleRecurrent = 400; + GRULayerParams gru = 410; + UniDirectionalLSTMLayerParams uniDirectionalLSTM = 420; + BiDirectionalLSTMLayerParams biDirectionalLSTM = 430; + + // Custom (user-implemented) Layer + CustomLayerParams custom = 500; + + // Following layers are available only after Core ML Specification + // version >= 4 (iOS >= 13, macOS >= 10.15) + + // Control Flow related Layers + CopyLayerParams copy = 600; + BranchLayerParams branch = 605; + + LoopLayerParams loop = 615; + LoopBreakLayerParams loopBreak = 620; + LoopContinueLayerParams loopContinue = 625; + + RangeStaticLayerParams rangeStatic = 635; + RangeDynamicLayerParams rangeDynamic = 640; + + // Element-wise Unary Layers + ClipLayerParams clip = 660; + CeilLayerParams ceil = 665; + FloorLayerParams floor = 670; + + SignLayerParams sign = 680; + RoundLayerParams round = 685; + + Exp2LayerParams exp2 = 700; + + SinLayerParams sin = 710; + CosLayerParams cos = 715; + TanLayerParams tan = 720; + + AsinLayerParams asin = 730; + AcosLayerParams acos = 735; + AtanLayerParams atan = 740; + + SinhLayerParams sinh = 750; + CoshLayerParams cosh = 755; + TanhLayerParams tanh = 760; + + AsinhLayerParams asinh = 770; + AcoshLayerParams acosh = 775; + AtanhLayerParams atanh = 780; + + ErfLayerParams erf = 790; + GeluLayerParams gelu = 795; + + // Element-wise Binary with Broadcasting Support + EqualLayerParams equal = 815; + NotEqualLayerParams notEqual = 820; + LessThanLayerParams lessThan = 825; + LessEqualLayerParams lessEqual = 827; + GreaterThanLayerParams greaterThan = 830; + GreaterEqualLayerParams greaterEqual = 832; + + LogicalOrLayerParams logicalOr = 840; + LogicalXorLayerParams logicalXor = 845; + LogicalNotLayerParams logicalNot = 850; + LogicalAndLayerParams logicalAnd = 855; + + ModBroadcastableLayerParams modBroadcastable = 865; + MinBroadcastableLayerParams minBroadcastable = 870; + MaxBroadcastableLayerParams maxBroadcastable = 875; + AddBroadcastableLayerParams addBroadcastable = 880; + PowBroadcastableLayerParams powBroadcastable = 885; + DivideBroadcastableLayerParams divideBroadcastable = 890; + FloorDivBroadcastableLayerParams floorDivBroadcastable = 895; + MultiplyBroadcastableLayerParams multiplyBroadcastable = 900; + SubtractBroadcastableLayerParams subtractBroadcastable = 905; + + // Tensor Manipulations + TileLayerParams tile = 920; + StackLayerParams stack = 925; + GatherLayerParams gather = 930; + ScatterLayerParams scatter = 935; + GatherNDLayerParams gatherND = 940; + ScatterNDLayerParams scatterND = 945; + SoftmaxNDLayerParams softmaxND = 950; + GatherAlongAxisLayerParams gatherAlongAxis = 952; + ScatterAlongAxisLayerParams scatterAlongAxis = 954; + + ReverseLayerParams reverse = 960; + ReverseSeqLayerParams reverseSeq = 965; + + SplitNDLayerParams splitND = 975; + ConcatNDLayerParams concatND = 980; + TransposeLayerParams transpose = 985; + + SliceStaticLayerParams sliceStatic = 995; + SliceDynamicLayerParams sliceDynamic = 1000; + SlidingWindowsLayerParams slidingWindows = 1005; + + TopKLayerParams topK = 1015; + ArgMinLayerParams argMin = 1020; + ArgMaxLayerParams argMax = 1025; + + EmbeddingNDLayerParams embeddingND = 1040; + BatchedMatMulLayerParams batchedMatmul = 1045; + + // Tensor Allocation / Reshape-related Operations + GetShapeLayerParams getShape = 1065; + LoadConstantNDLayerParams loadConstantND = 1070; + + FillLikeLayerParams fillLike = 1080; + FillStaticLayerParams fillStatic = 1085; + FillDynamicLayerParams fillDynamic = 1090; + + BroadcastToLikeLayerParams broadcastToLike = 1100; + BroadcastToStaticLayerParams broadcastToStatic = 1105; + BroadcastToDynamicLayerParams broadcastToDynamic = 1110; + + SqueezeLayerParams squeeze = 1120; + ExpandDimsLayerParams expandDims = 1125; + FlattenTo2DLayerParams flattenTo2D = 1130; + ReshapeLikeLayerParams reshapeLike = 1135; + ReshapeStaticLayerParams reshapeStatic = 1140; + ReshapeDynamicLayerParams reshapeDynamic = 1145; + RankPreservingReshapeLayerParams rankPreservingReshape = 1150; + + ConstantPaddingLayerParams constantPad = 1155; + + // Random Distributions + RandomNormalLikeLayerParams randomNormalLike = 1170; + RandomNormalStaticLayerParams randomNormalStatic = 1175; + RandomNormalDynamicLayerParams randomNormalDynamic = 1180; + + RandomUniformLikeLayerParams randomUniformLike = 1190; + RandomUniformStaticLayerParams randomUniformStatic = 1195; + RandomUniformDynamicLayerParams randomUniformDynamic = 1200; + + RandomBernoulliLikeLayerParams randomBernoulliLike = 1210; + RandomBernoulliStaticLayerParams randomBernoulliStatic = 1215; + RandomBernoulliDynamicLayerParams randomBernoulliDynamic = 1220; + + CategoricalDistributionLayerParams categoricalDistribution = 1230; + + // Reduction-related Layers: + ReduceL1LayerParams reduceL1 = 1250; + ReduceL2LayerParams reduceL2 = 1255; + ReduceMaxLayerParams reduceMax = 1260; + ReduceMinLayerParams reduceMin = 1265; + ReduceSumLayerParams reduceSum = 1270; + ReduceProdLayerParams reduceProd = 1275; + ReduceMeanLayerParams reduceMean = 1280; + ReduceLogSumLayerParams reduceLogSum = 1285; + ReduceSumSquareLayerParams reduceSumSquare = 1290; + ReduceLogSumExpLayerParams reduceLogSumExp = 1295; + + // Masking / Selection Layers + WhereNonZeroLayerParams whereNonZero = 1313; + MatrixBandPartLayerParams matrixBandPart = 1315; + LowerTriangularLayerParams lowerTriangular = 1320; + UpperTriangularLayerParams upperTriangular = 1325; + WhereBroadcastableLayerParams whereBroadcastable = 1330; + + // Normalization Layers + LayerNormalizationLayerParams layerNormalization = 1350; + + NonMaximumSuppressionLayerParams NonMaximumSuppression = 1400; + + // Following layers are available only after Core ML Specification + // version >= 5 (iOS >= 14, macOS >= 11.0) + OneHotLayerParams oneHot = 1450; + CumSumLayerParams cumSum = 1455; + ClampedReLULayerParams clampedReLU = 1460; + ArgSortLayerParams argSort = 1461; + Pooling3DLayerParams pooling3d = 1465; + GlobalPooling3DLayerParams globalPooling3d = 1466; + SliceBySizeLayerParams sliceBySize = 1470; + Convolution3DLayerParams convolution3d = 1471; + + } + +} + +/** + * Branching Layer + * + * A layer that provides the functionality of branching or an If-Else block. + * + * Must have 1 input. There are no outputs as the execution is transferred to either the + * if or the else branch based on the value of the input. + * + * Input is the condition predicate. Must be a scalar (length 1 tensor). + * + */ +message BranchLayerParams { + + /** + * execute this graph if the absolute value of the input Tensor is greater than 1e-6 + * This must be present. + */ + NeuralNetwork ifBranch = 1; + /** + * execute this graph if the absolute value of the input Tensor is less than 1e-6 + * This is optional. + */ + NeuralNetwork elseBranch = 2; + +} + +/** + * Loop Layer + * + * A layer that provides the functionality of a "for" loop or a "while" loop. + * + * There are either no inputs or 1 input. When an input is present, it corresponds to the maximum loop count, + * in that case the value of the "maxLoopIterations" field is ignored. Input must be a scalar. + * (For description below, maxLoopIterations is assumed to be the value of the input, when its present) + * + * No outputs are produced. Blobs produced by the condition or the body network are visible in the scope of the overall network. + * + * "conditionNetwork" must produce a tensor with the name specified in the "conditionVar" field. + * + * There are 3 possible cases for determining the termination condition: + * + * Case 1: + * + * If there is no "conditionNetwork", in this case the layer corresponds to a pure for loop, which is run "maxLoopIterations" number of times. + * Equivalent pseudo-code: + * + * for loopIterator = 0 : maxLoopIterations + * bodyNetwork() + * + * + * Case 2: + * + * "conditionNetwork" is present, and "maxLoopIterations" is 0 and there is no input, + * in this case the layer corresponds to a while loop. Equivalent pseudo-code: + * + * conditionVar = conditionNetwork() + * while conditionVar: + * bodyNetwork() + * conditionVar = conditionNetwork() + * + * + * Case 3: + * + * "conditionNetwork" is provided, and "maxLoopIterations" is positive or there is an input, + * in this case the layer corresponds to a while loop with a joint condition. Equivalent pseudo-code: + * + * loopIterator = 0 + * conditionVar = conditionNetwork() + * while (conditionVar and loopIterator < maxLoopIterations): + * bodyNetwork() + * loopIterator = loopIterator + 1 + * conditionVar = conditionNetwork() + * + */ +message LoopLayerParams { + + /** + * maximum number of iterations. Ignored if input is present. + */ + uint64 maxLoopIterations = 1; + /** + * This field provides the name of the tensor which is produced by the conditionNetwork + * and whose value is checked to start/continue/terminate the loop. Value close to 0.0f is treated as False. + * This field is optional. + * Must be a non empty string if and only if "conditionNetwork" is present. + */ + string conditionVar = 2; + /** + * Must generate a tensor with the name provided in the "conditionVar" field. + * This field is optional. + * Must be present if and only if "conditionVar" field is a non empty string. + */ + NeuralNetwork conditionNetwork = 3; + /** + * Body of the loop. + * This field must be present. + */ + NeuralNetwork bodyNetwork = 4; + +} + +/** + * Loop break Layer + * + * Terminate the loop that has this layer. + * If present, it should always reside in the "bodyNetwork" of the loop layer + * + * No inputs/outputs + * + */ +message LoopBreakLayerParams { + +} + +/** + * Loop Continue Layer + * + * Stop the current loop iteration and continue on the next iteration. + * If present, it should always reside in the "bodyNetwork" of the loop layer + * + * No inputs/outputs + * + */ +message LoopContinueLayerParams { + +} + +/** + * Copy Layer + * + * A layer that copies its input tensor to the output tensor. + * Must have 1 input and 1 output, with distinct names. + * This is the only layer that is allowed to re-generate an output that is already present in the neural network prior to this layer, + * in which case it will overwrite the output tensor. + * + */ +message CopyLayerParams { + +} + +/** + * GreaterThan Layer + * + * Either 1 or 2 inputs. + * Produces 1 output. + * Perform elementwise greater than operation. + * + * Output is 1.0f if the condition is true otherwise 0.0f. + * + * .. code:: + * + * y = x1 > x2 + * or + * y = x1 > alpha, if only one input is provided + * + * Broadcasting is supported. + * + */ +message GreaterThanLayerParams { + + /** + * Compare to the scalar value provided here if there is 1 input + */ + float alpha = 2; + +} + +/** + * GreaterEqual Layer + * + * Either 1 or 2 inputs. + * Produces 1 output. + * Perform elementwise greater equal operation. + * + * Output is 1.0f if the condition is true otherwise 0.0f. + * + * .. code:: + * + * y = x1 >= x2 + * or + * y = x1 >= alpha, if only one input is provided + * + * Broadcasting is supported. + * + */ +message GreaterEqualLayerParams { + + /** + * Compare to the scalar value provided here if there is 1 input + */ + float alpha = 2; + +} + +/** + * LessThan Layer + * + * Either 1 or 2 inputs. + * Produces 1 output. + * Perform elementwise less than operation. + * + * Output is 1.0f if the condition is true otherwise 0.0f. + * + * .. code:: + * + * y = x1 < x2 + * or + * y = x1 < alpha, if only one input is provided + * + * Broadcasting is supported. + * + */ +message LessThanLayerParams { + + /** + * Compare to the scalar value provided here if there is 1 input + */ + float alpha = 2; + +} + +/** + * LessEqual Layer + * + * Either 1 or 2 inputs. + * Produces 1 output. + * Perform elementwise less equal operation. + * + * Output is 1.0f if the condition is true otherwise 0.0f. + * + * .. code:: + * + * y = x1 <= x2 + * or + * y = x1 <= alpha, if only one input is provided + * + * Broadcasting is supported. + * + */ +message LessEqualLayerParams { + + /** + * Compare to the scalar value provided here if there is 1 input + */ + float alpha = 2; + +} + +/** + * Equal Layer + * + * Either 1 or 2 inputs. + * Produces 1 output. + * Perform elementwise equal operation. + * + * Output is 1.0f if the condition is true otherwise 0.0f. + * + * .. code:: + * + * y = x1 == x2 + * or + * y = x1 == alpha, if only one input is provided + * + * Broadcasting is supported. + * + */ +message EqualLayerParams { + + /** + * Compare to the scalar value provided here if there is 1 input + */ + float alpha = 1; + +} + +/** + * NotEqual Layer + * + * Either 1 or 2 inputs. + * Produces 1 output. + * Perform elementwise not equal operation. + * + * Output is 1.0f if the condition is true otherwise 0.0f. + * + * .. code:: + * + * y = x1 != x2 + * or + * y = x1 != alpha, if only one input is provided + * + * Broadcasting is supported. + * + */ +message NotEqualLayerParams { + + /** + * Compare to the scalar value provided here if there is 1 input + */ + float alpha = 1; + +} + +/** + * LogicalAnd Layer + * + * Must have 2 inputs, produces 1 output. + * Perform elementwise logical AND operation. + * + * Input is considered False if equal to 0.0f otherwise True. + * Output is 1.0f if the condition is true otherwise 0.0f. + * + * .. code:: + * + * y = AND(x1, x2) + * + * Broadcasting is supported. + * + */ +message LogicalAndLayerParams { + +} + +/** + * LogicalOr Layer + * + * Must have 2 inputs, produces 1 output. + * Perform elementwise logical OR operation. + * + * Input is considered False if equal to 0.0f otherwise True. + * Output is 1.0f if the condition is true otherwise 0.0f. + * + * .. code:: + * + * y = OR(x1, x2) + * + * Broadcasting is supported. + * + */ +message LogicalOrLayerParams { + +} + +/** + * LogicalXor Layer + * + * Must have 2 inputs, produces 1 output. + * Perform elementwise logical XOR operation. + * + * Input is considered False if equal to 0.0f otherwise True. + * Output is 1.0f if the condition is true otherwise 0.0f. + * + * .. code:: + * + * y = XOR(x1, x2) + * + * Broadcasting is supported. + * + */ +message LogicalXorLayerParams { + +} + +/** + * LogicalNot Layer + * + * Must have 1 input, produces 1 output. + * Perform elementwise logical NOT operation. + * + * Input is considered False if equal to 0.0f otherwise True. + * Output is 1.0f if the condition is true otherwise 0.0f. + * + * .. code:: + * + * y = NOT(x) + * + * + */ +message LogicalNotLayerParams { + +} + +/// Border Amounts +/// -------------- + +/** + * Specifies the amount of spatial border to be either padded or cropped. + * + * For padding: + * + * .. code:: + * + * H_out = borderAmounts[0].startEdgeSize + H_in + borderAmounts[0].endEdgeSize + * W_out = borderAmounts[1].startEdgeSize + W_in + borderAmounts[1].endEdgeSize + * + * topPaddingAmount == Height startEdgeSize + * bottomPaddingAmount == Height endEdgeSize + * leftPaddingAmount == Width startEdgeSize + * rightPaddingAmount == Width endEdgeSize + * + * For cropping: + * + * .. code:: + * + * H_out = (-borderAmounts[0].startEdgeSize) + H_in + (-borderAmounts[0].endEdgeSize) + * W_out = (-borderAmounts[1].startEdgeSize) + W_in + (-borderAmounts[1].endEdgeSize) + * + * topCropAmount == Height startEdgeSize + * bottomCropAmount == Height endEdgeSize + * leftCropAmount == Width startEdgeSize + * rightCropAmount == Width endEdgeSize + */ +message BorderAmounts { + + message EdgeSizes { + /** + * The amount to be padded or cropped from the beginning. + */ + uint64 startEdgeSize = 1; + + /** + * The amount to be padded or cropped from the end. + */ + uint64 endEdgeSize = 2; + } + + /** + * The border amounts. + * This must be length 2 in the order ``[H, W]``. + */ + repeated EdgeSizes borderAmounts = 10; + +} + +/** + * Specifies the type of padding to be used with Convolution/Deconvolution and Pooling layers. + * After padding, input spatial shape: ``[H_in, W_in]``, gets modified to the + * output spatial shape ``[H_out, W_out]``. + * + * .. code:: + * + * topPaddingAmount == Height startEdgeSize == borderAmounts[0].startEdgeSize + * bottomPaddingAmount == Height endEdgeSize == borderAmounts[0].endEdgeSize + * leftPaddingAmount == Width startEdgeSize == borderAmounts[1].startEdgeSize + * rightPaddingAmount == Width endEdgeSize == borderAmounts[1].endEdgeSize + * + * With Convolution or Pooling: + * + * .. code:: + * + * H_out = int_division_round_down((H_in + topPaddingAmount + bottomPaddingAmount - KernelSize[0]),stride[0]) + 1 + * + * which is same as: + * + * .. code:: + * + * H_out = int_division_round_up((H_in + topPaddingAmount + bottomPaddingAmount - KernelSize[0] + 1),stride[0]) + * + * With Deconvolution: + * + * .. code:: + * + * H_out = (H_in-1) * stride[0] + kernelSize[0] - (topPaddingAmount + bottomPaddingAmount) + * + * + * The equivalent expressions hold true for ``W_out`` as well. + * + * + * By default, the values of ``paddingAmounts`` are set to ``0``, + * which results in a "true" valid padding. + * If non-zero values are provided for ``paddingAmounts``, + * "valid" convolution/pooling is performed within the spatially expanded input. + * + */ +message ValidPadding { + + BorderAmounts paddingAmounts = 1; + +} + +/** + * Specifies the type of padding to be used with Convolution/Deconvolution and pooling layers. + * After padding, input spatial shape: ``[H_in, W_in]``, gets modified to the + * output spatial shape ``[H_out, W_out]``. + * With Convolution or pooling: + * + * .. code:: + * + * H_out = int_division_round_up(H_in,stride[0]) + * W_out = int_division_round_up(W_in,stride[1]) + * + * This is achieved by using the following padding amounts: + * + * .. code:: + * + * totalPaddingHeight = max(0,(H_out-1) * stride[0] + KernelSize[0] - Hin) + * totalPaddingWidth = max(0,(W_out-1) * stride[1] + KernelSize[1] - Win) + * + * There are two modes of asymmetry: + * ``BOTTOM_RIGHT_HEAVY``, and ``TOP_LEFT_HEAVY``. + * + * If the mode is ``BOTTOM_RIGHT_HEAVY``: + * + * .. code:: + * + * topPaddingAmount = floor(totalPaddingHeight / 2) + * bottomPaddingAmount = totalPaddingHeight - topPaddingAmount + * leftPaddingAmount = floor(totalPaddingWidth / 2) + * rightPaddingAmount = totalPaddingWidth - leftPaddingAmount + * + * If the mode is ``TOP_LEFT_HEAVY``: + * + * .. code:: + * + * bottomPaddingAmount = floor(totalPaddingHeight / 2) + * topPaddingAmount = totalPaddingHeight - bottomPaddingAmount + * rightPaddingAmount = floor(totalPaddingWidth / 2) + * leftPaddingAmount = totalPaddingWidth - rightPaddingAmount + * + * + * With Deconvolution: + * + * .. code:: + * + * H_out = H_in * stride[0] + * W_out = W_in * stride[1] + */ +message SamePadding { + + enum SamePaddingMode { + + BOTTOM_RIGHT_HEAVY = 0; + TOP_LEFT_HEAVY = 1; + + } + SamePaddingMode asymmetryMode = 1; + +} + +/** + * Specifies how grid points are sampled from an interval. + * Without the loss of generality, assume the interval to be [0, X-1] from which N points are to be sampled. + * Here X may correspond to an input image's height or width. + * All the methods can be expressed in terms of numpy's linspace function, along with the constraint that grid points have to lie in the interval [0, X-1]. + * Note: numpy.linspace(start = start, end = end, num = N, endpoint = True) corresponds to sampling + * N points uniformly from the interval [start, end], endpoints included. + * The methods vary in how the ``start`` and ``end`` values are computed. + */ +message SamplingMode { + + enum Method { + + /** + * start = 0, end = X-1 + * grid points = numpy.linspace(start, end) + */ + STRICT_ALIGN_ENDPOINTS_MODE = 0; + + /** + * if N == 1: start = end = (X-1)/2 + * otherwise, start = 0, end = X-1 + * grid points = numpy.linspace(start, end) + */ + ALIGN_ENDPOINTS_MODE = 1; + + /** + * start = 0, end = X - X/N + * grid points = min(X-1, numpy.linspace(start, end)) + * This is same as the mode used in the upsample layer in this specification, when used with bilinear interpolation. In that case N/X = upsample ratio. + */ + UPSAMPLE_MODE = 2; + + /** + * spacing = max(1, X-1)/N + * start = 0.5 * spacing + * end = start + (N-1) * spacing + * grid points = min(X-1, numpy.linspace(start, end)) + */ + ROI_ALIGN_MODE = 3; + + } + + Method samplingMethod = 1; + +} + +/** + * Specifies the convention used to specify four bounding box coordinates for an image of size (Height, Width). + * The (0,0) coordinate corresponds to the top-left corner of the image. + */ +message BoxCoordinatesMode { + + enum Coordinates { + + /** + * [h_start, w_start, h_end, w_end] + */ + CORNERS_HEIGHT_FIRST = 0; + + /** + * [w_start, h_start, w_end, h_end] + */ + CORNERS_WIDTH_FIRST = 1; + + /** + * [h_center, w_center, box_height, box_width] + */ + CENTER_SIZE_HEIGHT_FIRST = 2; + + /** + * [w_center, h_center, box_width, box_height] + */ + CENTER_SIZE_WIDTH_FIRST = 3; + + } + + Coordinates boxMode = 1; + +} + +/** + * Weights for layer parameters. + * Weights are stored as repeated floating point numbers + * using row-major ordering + * and can represent 1-, 2-, 3-, or 4-dimensional data. + */ +message WeightParams { + + /** + * Values specified in single / float / FP32 precision. + */ + repeated float floatValue = 1; + + /** + * Values in 16-bit half precision floating point. + */ + bytes float16Value = 2; + + /** + * Raw value specification for quantized lower precisions. + * + * This field is interpreted as uintN, where N is the number of bits in quantization. + * E.g. if n=8, the field is interpreted as an array of UINT8. + * Use this field for quantized parameters unless specifically noted to use + * int8RawValue. + */ + bytes rawValue = 30; + + /** + * Field to be used if int8DynamicQuantize is set in the parent layer. + * Cannot be set if rawValue is also set. + * The values in this field are interpreted as INT8. + * + * If this field is set, following conditions must hold true: + * * QuantizationType == LinearQuantizationParams, such that + * * size of the "scale" field is 1 and "bias" field is empty in "LinearQuantizationParams" + */ + bytes int8RawValue = 31; + + /** + * Quantization related parameters. + */ + QuantizationParams quantization = 40; + + bool isUpdatable = 50; + +} + +/** + * Quantization parameters. + */ +message QuantizationParams { + + uint64 numberOfBits = 1; + oneof QuantizationType { + LinearQuantizationParams linearQuantization = 101; + LookUpTableQuantizationParams lookupTableQuantization = 102; + } + +} + +message LinearQuantizationParams { + + /** + * Stores scale and bias values corresponding to the quantized weights. + * Must be an array of 1 element, or an array of C elements, where C + * is number of output channels. For recurrent layers it is equal to + * the output vector size. + * + * Relationship between quantized weights, unquantized weights, scale and bias: + * + * W_unquantized = W_quantized * scale + bias + * + */ + repeated float scale = 1; + repeated float bias = 2; + +} + +message LookUpTableQuantizationParams { + + /* Stores look-up table quantization values. Must be an array of + (2^numberOfBits) Elements. + */ + repeated float floatValue = 1; + +} + +/// Layers +/// ------ + +/** + * A layer that performs spatial convolution or deconvolution. + * + * .. code:: + * + * y = ConvolutionLayer(x) + * + * Requires 1 or 2 inputs and produces 1 output. + * + * Input + * First Input: + * A blob with rank greater than or equal to 4. + * Rank 4 blob represents [Batch, channels, height, width]. + * For ranks greater than 4, the leading dimensions, starting from 0 to -4 (inclusive), are all treated as batch. + * + * From Core ML specification version 4 onwards (iOS >= 13, macOS >= 10.15). + * convolution layer can have 2 inputs, in which case the second input is + * the blob representing the weights. This is allowed when "isDeconvolution" = False. + * The weight blob should have shape + * ``[outputChannels, kernelChannels, kernelHeight, kernelWidth]``, + * where kernelChannels == inputChannels / nGroups. + * + * Output + * Rank is same as the input. e.g.: for rank 4 input, output shape is [B, C_out, H_out, W_out] + * + * + * If ``dilationFactor`` is not 1, effective kernel size is + * modified as follows: + * + * .. code:: + * + * KernelSize[0] <-- (kernelSize[0]-1) * dilationFactor[0] + 1 + * KernelSize[1] <-- (kernelSize[1]-1) * dilationFactor[1] + 1 + * + * Type of padding can be ``valid`` or ``same``. Output spatial dimensions depend on the + * the type of padding. For details, refer to the descriptions of the messages "ValidPadding" + * and "SamePadding". Padded values are all zeros. + * + * For Deconvolution, ``ConvolutionPaddingType`` (``valid`` or ``same``) is ignored when ``outputShape`` is set. + * + * + */ +message ConvolutionLayerParams { + + /** + * The number of kernels. + * Same as ``C_out`` used in the layer description. + */ + uint64 outputChannels = 1; + + /** + * Channel dimension of the kernels. + * Must be equal to ``inputChannels / nGroups``, if isDeconvolution == False + * Must be equal to ``inputChannels``, if isDeconvolution == True + */ + uint64 kernelChannels = 2; + + /** + * Group convolution, i.e. weight reuse along channel axis. + * Input and kernels are divided into g groups + * and convolution / deconvolution is applied within the groups independently. + * If not set or 0, it is set to the default value 1. + */ + uint64 nGroups = 10; + + /** + * Must be length 2 in the order ``[H, W]``. + * If not set, default value ``[3, 3]`` is used. + */ + repeated uint64 kernelSize = 20; + + /** + * Must be length 2 in the order ``[H, W]``. + * If not set, default value ``[1, 1]`` is used. + */ + repeated uint64 stride = 30; + + /** + * Must be length 2 in order ``[H, W]``. + * If not set, default value ``[1, 1]`` is used. + * It is ignored if ``isDeconvolution == true``. + */ + repeated uint64 dilationFactor = 40; + + /** + * The type of padding. + */ + oneof ConvolutionPaddingType { + ValidPadding valid = 50; + SamePadding same = 51; + } + + /** + * Flag to specify whether it is a deconvolution layer. + */ + bool isDeconvolution = 60; + + /** + * Flag to specify whether a bias is to be added or not. + */ + bool hasBias = 70; + + /** + * Weights associated with this layer. + * If convolution (``isDeconvolution == false``), weights have the shape + * ``[outputChannels, kernelChannels, kernelHeight, kernelWidth]``, where kernelChannels == inputChannels / nGroups + * If deconvolution (``isDeconvolution == true``) weights have the shape + * ``[kernelChannels, outputChannels / nGroups, kernelHeight, kernelWidth]``, where kernelChannels == inputChannels + */ + WeightParams weights = 90; + WeightParams bias = 91; /// Must be of size [outputChannels]. + + /** + * The output shape, which has length 2 ``[H_out, W_out]``. + * This is used only for deconvolution (``isDeconvolution == true``). + * If not set, the deconvolution output shape is calculated + * based on ``ConvolutionPaddingType``. + */ + repeated uint64 outputShape = 100; + +} + +/** + * A layer that performs a 3-dimensional convolution. + * + * .. code:: + * + * y = Convolution3DLayer(x) + * + * Input + * A blob of rank 5. + * The input blob's shape should be ``[batch, channels, depth, height, width]``. + * + * Fields + * The bias field, if set, should have shape of ``[channelsOut]``. + * + * Output + * A blob of rank 5. + * The output blob's shape is ``[batch, channelsOut, depthOut, heightOut, widthOut]``. + * + * Type of padding can be ``custom``, ``valid``, or ``same``. Padded values are all zeros. + * Output spatial dimensions depend on the the type of padding. For details, refer to the + * descriptions of the ``PaddingType`` field of this ``Convolution3DLayerParams`` message. + * + * Example + * For example, given an input of size ``[1, 3, 3, 8, 8]``, a stride of 2 in each dimension, + * a kernel of 3 in each dimension, 2 output channels, and ``same`` padding, this layer will + * compute the total padding applied in the depth, height, and width dimensions to be 2, 1, and 1, + * respectively. The depth padding is even and will be applied equally to both sides of the depth + * dimension. Since the height and width padding values are odd, they'll be applied to the + * bottom/right of the height/width dimensions. Thus, the padding applied to the input will be + * ``[1, 1, 0, 1, 0, 1]`` (front, back, top, bottom, left, right). Finally, the output produced + * will have size ``[1, 2, 2, 4, 4]``. + * + */ +message Convolution3DLayerParams { + + /** + * The number of channels in the output (channelsOut). Must be a positive integer. + */ + int32 outputChannels = 1; + + /** + * The number of channels in the input (channels). Must be a positive integer. + */ + int32 inputChannels = 2; + + /** + * Group convolution, i.e., weight reuse along the channel axis. + * It must evenly divide both the number of input and output channels and be at most the number + * of input channels (a depthwise convolution). + * Input and kernels are divided into g groups and convolution is applied within the groups + * independently. + */ + int32 nGroups = 10; + + /* Depth of the convolution kernel. Must be a positive integer. + */ + int32 kernelDepth = 20; + + /* Height of the convolution kernel. Must be a positive integer. + */ + int32 kernelHeight = 21; + + /* Width of the convolution kernel. Must be a positive integer. + */ + int32 kernelWidth = 22; + + /* Stride along the depth direction. Must be a positive integer. + */ + int32 strideDepth = 31; + + /* Stride along the height direction. Must be a positive integer. + */ + int32 strideHeight = 32; + + /* Stride along the width direction. Must be a positive integer. + */ + int32 strideWidth = 33; + + /* Dilation along the depth direction. Must be a positive integer. + */ + int32 dilationDepth = 40; + + /* Dilation along the height direction. Must be a positive integer. + */ + int32 dilationHeight = 41; + + /* Dilation along the width direction. Must be a positive integer. + */ + int32 dilationWidth = 42; + + /** + * Flag to specify whether a bias is to be added or not. + * If false, then no bias is added. + */ + bool hasBias = 50; + + /** + * Weights associated with this layer. + * Weights have the shape + * if deconvolution == False + * ``[outputChannels, kernelChannels, kernelDepth, kernelHeight, kernelWidth]``, where + * kernelChannels == inputChannels / nGroups + * else if deconvolution == True + * ``[outputChannels / nGroups, kernelChannels, kernelDepth, kernelHeight, kernelWidth]``, where + */ + WeightParams weights = 60; + + /** + * Must be of size ``[outputChannels]``. + */ + WeightParams bias = 61; + + + /** + * The type of padding. + * All padding types pad the input shape with zeros. + * CUSTOM padding will add the custom padding values specified below to their respective + * dimensions, e.g., `customPaddingFront` number of zeros will be added to one side of the + * input's depth dimension and `customPaddingBack` number of zeros will be added to the other + * side of the input's depth dimension. + * VALID padding adds no padding to any dimension. In this case, the last convolution along + * each dimension will be dropped if the input dimension and the kernel size, stride, and + * dilation do not match. + * SAME padding adds enough padding to each dimension such that the output of the convolution + * has size ``Ceiling(inputShape / stride)``. Padding is added evenly to both sides of each + * dimension unless the total padding to add is odd, in which case it is added to the + * back/bottom/right side of the respective dimension. For example, if the total padding needed + * in the depth dimension is 3, 1 zero will be added to the front side of the depth dimension + * and 2 zeros will be added to the back side. + */ + enum PaddingType { + CUSTOM = 0; + VALID = 1; + SAME = 2; + } + PaddingType paddingType = 70; + + /* Padding before the input in the depth direction. Must be zero or a positive integer. + * Used when the `PaddingType` is `CustomPadding`, otherwise ignored by other padding types. + */ + int32 customPaddingFront = 80; + + /* Padding after the input in the depth direction. Must be zero or a positive integer. + * Used when the `PaddingType` is `CustomPadding`, otherwise ignored by other padding types. + */ + int32 customPaddingBack = 81; + + /* Padding before the input in the height direction. Must be zero or a positive integer. + * Used when the `PaddingType` is `CustomPadding`, otherwise ignored by other padding types. + */ + int32 customPaddingTop = 82; + + /* Padding after the input in the height direction. Must be zero or a positive integer. + * Used when the `PaddingType` is `CustomPadding`, otherwise ignored by other padding types. + */ + int32 customPaddingBottom = 83; + + /* Padding before the input in the width direction. Must be zero or a positive integer. + * Used when the `PaddingType` is `CustomPadding`, otherwise ignored by other padding types. + */ + int32 customPaddingLeft = 84; + + /* Padding after the input in the width direction. Must be zero or a positive integer. + * Used when the `PaddingType` is `CustomPadding`, otherwise ignored by other padding types. + */ + int32 customPaddingRight = 85; + + /* Flag to specify if this is Convolution Transpose or not. + */ + bool isDeconvolution = 86; + + /* + * The output shape, which has length 3 ``[D_out, H_out, W_out]``. + * This is used only for deconvolution (``isDeconvolution == true``). + * If not set, the deconvolution output shape is calculated + * based on ``PaddingType``. + */ + repeated uint64 outputShape = 87; + +} + +/** + * A layer that performs a matrix-vector or matrix-matrix product. + * This is equivalent to a fully-connected, or dense layer. + * The weight parameters correspond to a matrix of dimensions (inputChannels, outputChannels) i.e. (C_in, C_out) + * + * .. code:: + * + * y = InnerProductLayer(x) + * + * Requires 1 input and produces 1 output. + * + * Input + * Input can have rank 1 to rank 5. This is how it is reshaped in to the matrix (for rank > 1): + * rank 1 (x1) : in this case, the layer corresponds to a matrix-vector product. x1 must be equal to C_in + * rank 2 (x1, x2): x2 must be equal to C_in + * rank 3 (x1, x2, x3) --> (x1 * x2, x3). x3 must be equal to C_in + * rank 4 (x1, x2, x3, x4) ---> (x1, x2 * x3 * x4). x2 * x3 * x4 must be equal to C_in + * rank 5 (x1, x2, x3, x4, x5) ---> (x1 * x2, x3 * x4 * x5). x3 * x4 * x5 must be equal to C_in + * + * Output + * Output rank is same as the input rank + * rank 1: (C_out) + * rank 2: (x1, C_out) + * rank 3: (x1, x2, C_out) + * rank 4: (x1, C_out, 1, 1) + * rank 5: (x1, x2, C_out, 1, 1) + * + */ +message InnerProductLayerParams { + + uint64 inputChannels = 1; /// Input size: C_in. + uint64 outputChannels = 2; /// Output size: C_out. + + bool hasBias = 10; /// Whether a bias is added or not. + + WeightParams weights = 20; /// Weight matrix [C_out, C_in]. + WeightParams bias = 21; /// Bias vector [C_out]. + + /** + * If set, this layer, at runtime, quantizes the floating point input blob to int8 before applying an + * inner product using INT8 weight matrix parameters, as provided in weights->int8RawValue. The + * result is then dequantized. + * Requires: + * * hasBias == false + * * QuantizationType == LinearQuantizationParams, such that + * * size of the "scale" field is 1 and "bias" field is empty in "LinearQuantizationParams" + * * numberOfBits == 8 + * * weights->rawValue_size to be empty + */ + bool int8DynamicQuantize = 22; + +} + +/** + * A layer that performs a matrix lookup and optionally adds a bias. + * The weights matrix is stored with dimensions [outputChannels, inputDim]. + * + * .. code:: + * + * y = EmbeddingLayer(x) + * + * Requires 1 input and produces 1 output. + * + * Input + * Input values must be in the range ``[0, inputDim - 1]``. + * + * Input must have rank equal to 4 or 5, such that the last 3 dimensions are all 1. + * rank 4: shape (x1, 1, 1, 1). x1 is effectively the batch/sequence length. + * rank 5: shape (x1, x2 , 1, 1, 1). x1 * x2 is effectively the combined batch/sequence length. + * + * Output + * Output rank is same as the input rank. Please see input description above. + * rank 4: shape (x1, outputChannels, 1, 1) + * rank 5: shape (x1, x2, outputChannels, 1, 1) + * + */ +message EmbeddingLayerParams { + + uint64 inputDim = 1; /// Size of the input dictionary. + uint64 outputChannels = 2; /// Size of the output vectors. + + bool hasBias = 10; /// Whether a bias is added or not. + + WeightParams weights = 20; /// 2-D weights of dimensions [outputChannels, inputDim]. + WeightParams bias = 21; /// Bias of size [outputChannels]. + +} + +/** + * A layer that performs a matrix lookup and optionally adds a bias. + * The weights matrix is stored with dimensions [embeddingSize, vocabSize]. + * + * .. code:: + * + * y = EmbeddingNDLayer(x) + * + * Requires 1 input and produces 1 output. + * + * Input + * Input values must be in the range ``[0, vocabSize - 1]``. + * Input must have rank at least 2. The last dimension must always be 1. + * rank 2: shape (x1, 1). x1 is the batch/sequence length. + * rank 3: shape (x1, x2, 1). x1 * x2 is effectively the combined batch/sequence length. + * rank 4: shape (x1, x2, x3, 1). x1 * x2 * x2 is effectively the combined batch/sequence length. + * rank 5: shape (x1, x2 , x3, x4, 1). x1 * x2 * x3 * x4 is effectively the combined batch/sequence length. + * + * Output + * Output rank is same as the input rank. Please see input description above. + * rank 2: shape (x1, embeddingSize) + * rank 3: shape (x1, x2, embeddingSize) + * rank 4: shape (x1, x2, x3, embeddingSize) + * rank 5: shape (x1, x2, x3, x4, embeddingSize) + * + */ +message EmbeddingNDLayerParams { + + uint64 vocabSize = 1; /// Size of the input dictionary. + uint64 embeddingSize = 2; /// Size of the output vectors. + bool hasBias = 3; /// Whether a bias is added or not. + WeightParams weights = 20; /// 2-D weights of dimensions [embeddingSize, vocabSize]. + WeightParams bias = 21; /// Bias of size [embeddingSize]. + +} + +/** + * A layer that performs batch normalization, + * which is performed along axis = -3, + * and repeated along the other axes, if present. + * + * .. code:: + * + * y = BatchnormLayer(x) + * + * Requires 1 input and produces 1 output. + * + * This operation is described by the following formula: + * + * .. math:: + * y_i = \gamma_i \dfrac{ (x_i - \mu_i)}{\sqrt{\sigma_i^2 + \epsilon}} + \beta_i \;,\;i=1,....,C + * + * Input + * A blob with rank greater than equal to 3. + * Example: Rank 4 blob represents [Batch, channels, height, width] + * For ranks greater than 3, the leading dimensions, starting from 0 to -4 (inclusive), are all treated as batch. + * + * Output + * A blob with the same shape as the input. + */ +message BatchnormLayerParams { + + uint64 channels = 1; /// Size of the channel dimension in the input. + + /** + * If ``computeMeanVar == true``, + * the mean and variance are calculated from either + * the single input instance, if ``instanceNormalization == true``, + * or the whole batch, if ``instanceNormalization = false``. + * and the values provided in parameters "mean" and "variance" are ignored. + */ + bool computeMeanVar = 5; + bool instanceNormalization = 6; + + /** + * A small constant to avoid division by 0 while normalizing by variance. + * Defaults to ``1e-5`` if not set or set to ``0``. + */ + float epsilon = 10; + + WeightParams gamma = 15; /// Parameter of length [channels] + WeightParams beta = 16; /// Parameter of length [channels] + WeightParams mean = 17; /// Parameter of length [channels] + WeightParams variance = 18; /// Parameter of length [channels] + +} + +/** + * A spatial pooling layer. + * + * .. code:: + * + * y = PoolingLayer(x) + * + * Requires 1 input and produces 1 output. + * + * Input + * A blob with rank greater than equal to 4. + * Rank 4 blob represents [Batch, channels, height, width] + * For ranks greater than 4, the leading dimensions, starting from 0 to -4 (inclusive), are all treated as batch. + * + * Output + * Rank is same as the input. e.g.: for rank 4 input, output shape is [B, C, H_out, W_out] + * + * Padding options are similar to ``ConvolutionLayerParams`` + * with the additional option of ``ValidCompletePadding`` (``includeLastPixel``), + * which ensures that the last application of the kernel + * always includes the last pixel of the input image, if there is padding. + * + * .. code:: + * + * H_out = ceil(float(H_in + 2 * paddingAmounts[0] - kernelSize[0])/float(Stride[0])) + 1 + * if (paddingAmounts[0] > 0 or paddingAmounts[1] > 0) + * if ((H_out - 1) * Stride >= H_in + paddingAmounts[0]) { + * H_out = H_out - 1 + * } + * } + * + * The equivalent expressions hold true for ``W_out`` as well. + * Only symmetric padding is supported with this option. + */ +message PoolingLayerParams { + + enum PoolingType { + + MAX = 0; + AVERAGE = 1; + L2 = 2; + + } + PoolingType type = 1; /// Type of pooling operation. + + /** + * Must be length 2 in the order ``[H, W]``. + * If not set, default value ``[3, 3]`` is used. + */ + repeated uint64 kernelSize = 10; + + /** + * Must be length 2 in the order ``[H, W]``. + * If not set, default value ``[1, 1]`` is used. + */ + repeated uint64 stride = 20; + + message ValidCompletePadding { + + /** + * Must be length 2 in order ``[H, W]``. + * If not set, value ``[0, 0]`` is used. + */ + repeated uint64 paddingAmounts = 10; + + } + + oneof PoolingPaddingType { + ValidPadding valid = 30; + SamePadding same = 31; + ValidCompletePadding includeLastPixel = 32; + } + + /** + * If true, padded values are excluded from the count (denominator) + * when computing average pooling. + */ + bool avgPoolExcludePadding = 50; + + /** + * If true, global pooling is performed. + * Kernel size is inferred from the input data spatial dimensions. + */ + bool globalPooling = 60; + +} + +/* + * A layer to pool three spatial dimensions + * + * Input + * A blob with rank equal to 5, representing [Batch, channels, depth, height, width]. + * + * Output + * Rank is same as the input: A blob with rank equal to 5, representing [Batch, channels, depth, height, width]. + * + * Requires 1 input and produces 1 output. + * + * For example, given an input of shape (1,1,2,3,3): + * +----+----+----+ + * / | 10 | 11 | 12 | + * / +----+----+----+ + * / | 13 | 14 | 15 | + * / +----+----+----+ + * / | 16 | 17 | 18 | + * / +----+----+----+ + * +----+----+----+ / + * | 1 | 2 | 3 | / + * +----+----+----+ / + * | 4 | 5 | 6 | / + * +----+----+----+ / + * | 7 | 8 | 9 | / + * +----+----+----+ + * + * And applying MAX pooling using: + * Kernel: 2x2x2 + * Stride: 1x1x1 + * Valid Padding + * We expect to get an output with shape: (1,1,1,2,2) and value: + * +----+----+ + * | 14 | 15 | + * +----+----+ + * | 17 | 18 | + * +----+----+ + */ +message Pooling3DLayerParams { + + enum PoolingType3D { + MAX = 0; + AVERAGE = 1; + } + + // Whether to use Max or Average + PoolingType3D type = 1; + + // Depth of the pooling region. + int32 kernelDepth = 2; + + // Height of the pooling region. + int32 kernelHeight = 3; + + // Width of the pooling region. + int32 kernelWidth = 4; + + // Stride along the depth direction + int32 strideDepth = 5; + + // Stride along the height direction + int32 strideHeight = 6; + + // Stride along the width direction + int32 strideWidth = 7; + + /** + * The type of padding. + * All padding types pad the input shape with zeros. + * CUSTOM padding will add the custom padding values specified below to their respective + * dimensions, e.g., `customPaddingFront` number of zeros will be added to one side of the + * input's depth dimension and `customPaddingBack` number of zeros will be added to the other + * side of the input's depth dimension. + * VALID padding adds no padding to any dimension. In this case, the last pool along + * each dimension will be dropped if the input dimension and the kernel size, and stride do not match. + * SAME padding adds enough padding to each dimension such that the output + * has the same spatial dimensions as the input. Padding is added evenly to both + * sides of each dimension unless the total padding to add is odd, in which case the extra padding + * is added to the back/bottom/right side of the respective dimension. For example, if the the + * total horizontal padding is 3, then there will be 1 padding on the left, and 2 padding on the right. + */ + enum Pooling3DPaddingType { + CUSTOM = 0; + VALID = 1; + SAME = 2; + } + Pooling3DPaddingType paddingType = 15; + + // Padding before the input in the depth direction. + int32 customPaddingFront = 8; + + // Padding after the input in the depth direction. + int32 customPaddingBack = 9; + + // Padding before the input in the height direction. + int32 customPaddingTop = 10; + + // Padding after the input in the height direction. + int32 customPaddingBottom = 11; + + // Padding before the input in the width direction. + int32 customPaddingLeft = 12; + + // Padding after the input in the width direction. + int32 customPaddingRight = 13; + + // If true, exclude zeros from padding in Average pooling. Meaningless in Max Pooling. + bool countExcludePadding = 14; +} + +/* + * A layer to pool three spatial dimensions down to one value. + * This behaves like a special case of Pooling3DLayerParams in which + * the Kernel is the size of the input and there is no padding. + * + * Input + * A blob with rank equal to 5, representing [Batch, channels, depth, height, width]. + * + * Output + * Rank is same as the input: A blob with rank equal to 5, representing [Batch, channels, depth, height, width]. + * Depth, height, and width of the output will always be 1. + * + * Requires 1 input and produces 1 output. + * + * For example, given an input of shape (1,1,2,3,3): + * +----+----+----+ + * / | 10 | 11 | 12 | + * / +----+----+----+ + * / | 13 | 14 | 15 | + * / +----+----+----+ + * / | 16 | 17 | 18 | + * / +----+----+----+ + * +----+----+----+ / + * | 1 | 2 | 3 | / + * +----+----+----+ / + * | 4 | 5 | 6 | / + * +----+----+----+ / + * | 7 | 8 | 9 | / + * +----+----+----+ + * + * And applying MAX global 3d pooling, we expect to get an output with shape: (1,1,1,1,1) and value: + * +----+ + * | 18 | + * +----+ + */ +message GlobalPooling3DLayerParams { + + enum GlobalPoolingType3D { + MAX = 0; + AVERAGE = 1; + } + + // Whether to use Max or Average + GlobalPoolingType3D type = 1; +} + +/** + * A layer that performs padding along spatial dimensions. + * + * .. code:: + * + * y = PaddingLayer(x) + * + * Requires 1 input and produces 1 output. + * + * Input + * A blob with rank at least 2. + * e.g.: blob with shape ``[H_in, W_in]``. + * For ranks greater than 2, the leading dimensions, starting from 0 to -4 (inclusive), are all treated as batch + * i.e. Padding is applied on last two dimensions. + * + * Output + * Same rank as the input. + * e.g.: blob with shape ``[H_out, W_out]``. + * + * Output dimensions are calculated as follows: + * + * .. code:: + * + * H_out = H_in + topPaddingAmount + bottomPaddingAmount + * W_out = W_in + leftPaddingAmount + rightPaddingAmount + * + * topPaddingAmount == Height startEdgeSize == borderAmounts[0].startEdgeSize + * bottomPaddingAmount == Height endEdgeSize == borderAmounts[0].endEdgeSize + * leftPaddingAmount == Width startEdgeSize == borderAmounts[1].startEdgeSize + * rightPaddingAmount == Width endEdgeSize == borderAmounts[1].endEdgeSize + * + * There are three types of padding: + * + * - ``PaddingConstant``, which fills a constant value at the border. + * - ``PaddingReflection``, which reflects the values at the border. + * - ``PaddingReplication``, which replicates the values at the border. + * + * Given the following input: + * + * .. code:: + * + * [1, 3, 4] : 1 2 3 4 + * 5 6 7 8 + * 9 10 11 12 + * + * Here is the output of applying the padding + * ``(top=2, left=2, bottom=0, right=0)`` + * with each of the supported types: + * + * - ``PaddingConstant`` (``value = 0``): + * .. code:: + * + * [1, 5, 6] : 0 0 0 0 0 0 + * 0 0 0 0 0 0 + * 0 0 1 2 3 4 + * 0 0 5 6 7 8 + * 0 0 9 10 11 12 + * + * - ``PaddingReflection``: + * .. code:: + * + * [1, 5, 6] : 11 10 9 10 11 12 + * 7 6 5 6 7 8 + * 3 2 1 2 3 4 + * 7 6 5 6 7 8 + * 11 10 9 10 11 12 + * + * - ``PaddingReplication``: + * .. code:: + * + * [1, 5, 6] : 1 1 1 2 3 4 + * 1 1 1 2 3 4 + * 1 1 1 2 3 4 + * 5 5 5 6 7 8 + * 9 9 9 10 11 12 + */ +message PaddingLayerParams { + + /** + * Fill a constant value in the padded region. + */ + message PaddingConstant { + float value = 1; + } + + /** + * Reflect the values at the border for padding. + */ + message PaddingReflection { + } + + /** + * Replicate the values at the border for padding. + */ + message PaddingReplication { + } + + oneof PaddingType { + PaddingConstant constant = 1; + PaddingReflection reflection = 2; + PaddingReplication replication = 3; + } + + BorderAmounts paddingAmounts = 10; /// Amounts to be padded to the input. + +} + +/** + * A layer that concatenates along the axis = -3 or -5. + * For general concatenation along any axis, see ConcatNDLayer. + * + * .. code:: + * + * y = ConcatLayer(x1,x2,....) + * + * Requires more than 1 input and produces 1 output. + * + * Input + * All input blobs must have same rank. + * If "sequenceConcat" = False, rank must be greater than equal to 3. In this case concatenation is along axis = -3 + * If "sequenceConcat" = True, rank must be greater than equal to 5. In this case concatenation is along axis = -5 + * + * Output + * Same rank as the input. + * + */ +message ConcatLayerParams { + + /** + * If true, concatenate along the axis = -5 instead of axis = -3. + */ + bool sequenceConcat = 100; + +} + +/** + * A layer that performs local response normalization (LRN). + * + * .. code:: + * + * y = LRNLayer(x) + * + * Requires 1 input and produces 1 output. + * + * Input + * A blob with rank greater than equal to 3. + * Example: Rank 4 blob represents [Batch, channels, height, width] + * For ranks greater than 3, the leading dimensions, starting from 0 to -4 (inclusive), are all treated as batch. + * Output + * A blob with the same shape as the input. + * + * This layer is described by the following formula: + * + * .. math:: + * x_i \leftarrow \dfrac{x_i}{\left ( k + \dfrac{\alpha}{C} \sum_j x_j^2 \right )^\beta} + * + * where the summation is done over a ``(localSize, 1, 1)`` neighborhood --- + * that is, over a window "across" channels in 1x1 spatial neighborhoods. + */ +message LRNLayerParams { + + float alpha = 1; + float beta = 2; + uint64 localSize = 3; /// Number of channels in the normalization window. + float k = 4; /// Defaults to 1 if not set or 0. Must be strictly positive. + +} + +/** + * Softmax Normalization Layer + * + * A layer that performs softmax normalization. + * Normalization is applied along axis = -3 or N-3 (where N is the rank of the input) + * For softmax layer that can operate on any axis, see SoftmaxNDLayer. + * + * + * .. code:: + * + * y = SoftmaxLayer(x) + * + * Requires 1 input and produces 1 output. + * + * Input + * Must be a blob with rank >= 3. + * Output + * A blob with the same shape as the input. + * + * This layer is described by the following formula: + * + * .. math:: + * x_i \leftarrow \dfrac{e^{x_i}}{\sum_i{e^{x_i}}} + */ +message SoftmaxLayerParams { + +} + +/** + * A layer that uniformly splits across axis = -3 to produce a specified number of outputs. + * For general split operation along any axis, see SplitNDLayer. + * + * .. code:: + * + * (y1,y2,...yN) = SplitLayer(x), where N = nOutputs + * + * Requires 1 input and produces multiple outputs. + * + * Input + * A blob with rank at least 3. + * e.g.: blob with shape ``[C, H, W]`` + * Output + * ``nOutputs`` blobs each with same rank as the input. + * e.g.: For input that is of shape ``[C, H, W]``, output shapes will be ``[C/nOutputs, H, W]`` + */ +message SplitLayerParams { + + uint64 nOutputs = 1; /// The number of outputs. + +} + +/** + * A layer that performs elementwise addition. + * This layer has limited broadcasting support. For general broadcasting see AddBroadcastableLayer. + * + * .. code:: + * + * y = AddLayer(x1,x2,...) + * + * Requires 1 or more than 1 input and produces 1 output. + * + * Input + * In general, there are no rank constraints. + * However, only certain set of shapes are broadcastable. For example: + * [B, 1, 1, 1], [B, C, 1, 1], [B, 1, H, W], [B, C, H, W] + * Output + * A blob with shape equal to the input blob. + * + * If only one input is provided, scalar addition is performed: + * + * .. math:: + * y = x + \alpha + * + */ +message AddLayerParams { + + /** + * Scalar to be added to the input. + * Only used if there is a single input. + */ + float alpha = 1; + +} + +/** + * A layer that performs elementwise multiplication. + * This layer has limited broadcasting support. For general broadcasting see MultiplyBroadcastableLayer. + * + * .. code:: + * + * y = MultiplyLayer(x1,x2,...) + * + * Requires 1 or more than 1 input and produces 1 output. + * + * Input + * In general, there are no rank constraints. + * However, only certain set of shapes are broadcastable. For example: + * [B, 1, 1, 1], [B, C, 1, 1], [B, 1, H, W], [B, C, H, W] + * Output + * A blob with shape equal to the first input blob. + * + * If only one input is provided, scalar multiplication is performed: + * + * .. math:: + * y = \alpha x + * + */ +message MultiplyLayerParams { + + /** + * Scalar to be multiplied with the input. + * Only used if there is a single input. + */ + float alpha = 1; + +} + +/** + * A layer that applies a unary function. + * + * .. code:: + * + * y = UnaryFunctionLayer(x) + * + * Requires 1 input and produces 1 output. + * + * Input + * A blob with no rank constraints. + * Output + * A blob with the same shape as the input. + * + * The input is first modified by shifting and scaling: + * + * .. math:: + * x \leftarrow \text{scale} \cdot x + \text{shift} + */ +message UnaryFunctionLayerParams { + + /** + * A unary operator. + * + * The following functions are supported: + * + * ``SQRT`` + * .. math:: f(x) = \sqrt{x} + * + * ``RSQRT`` + * .. math:: f(x) = \dfrac{1}{\sqrt{x + \epsilon}} + * + * ``INVERSE`` + * .. math:: f(x) = \dfrac{1}{x + \epsilon} + * + * ``POWER`` + * .. math:: f(x) = x^\alpha + * + * ``EXP`` + * .. math:: f(x) = e^x + * + * ``LOG`` + * .. math:: f(x) = \log x + * + * ``ABS`` + * .. math:: f(x) = |x| + * + * ``THRESHOLD`` + * .. math:: f(x) = \text{max}(\alpha, x) + */ + enum Operation { + SQRT = 0; + RSQRT = 1; + INVERSE = 2; + POWER = 3; + EXP = 4; + LOG = 5; + ABS = 6; + THRESHOLD = 7; + } + Operation type = 1; /// The type of unary function. + + /** + * A constant used in ``POWER`` and ``THRESHOLD`` functions. + */ + float alpha = 2; + + /** + * A small constant to avoid division by 0 while normalizing variance. + * Defaults to ``1e-6`` if not set or set to ``0``. + */ + float epsilon = 3; + + /** + * Input is shifted by this amount + * before the unary function is applied. + * Defaults to ``0.0`` if not set. + */ + float shift = 4; + + /** + * Input is scaled by this amount + * before the unary function is applied. + * Defaults to ``1.0`` if not set or set to ``0``. + */ + float scale = 5; + +} + +/** + * A layer that scales up spatial dimensions. + * It supports two modes: nearest neighbour (default) and bilinear. + * + * .. code:: + * + * y = UpsampleLayer(x) + * + * Requires 1 input and produces 1 output. + * + * Input + * A blob with rank at least 3. + * e.g.: blob with shape ``[C, H, W]``. + * For ranks greater than 3, the leading dimensions, starting from 0 to -4 (inclusive), are all treated as batch. + * + * Output + * Same rank as the input. + * e.g.: blob with shape ``[C, scalingFactor[0] * H, scalingFactor[1] * W]`` + */ +message UpsampleLayerParams { + + /** + * Scaling Factor. Mutually exclusive with fractionalScalingFactor. + * Must be length 2 in order ``[H, W]``. + * If not set, default value ``[1, 1]`` is used. + */ + repeated uint64 scalingFactor = 1; + + /** + * Fractional scaling factor. Mutually exclusive with scalingFactor. + * Must be length 2 in order ``[H, W]``. + * If not set, default value ``[1.0, 1.0]`` is used. + */ + repeated float fractionalScalingFactor = 7; + + /* + * Overall mode for interpolating new elements when upsampling. + * NN - Nearest Neighbors - simply pick the nearest true value for interpolated values. + * BILINEAR - Use bilinear interpolation. See LinearUpsamplingMode for behavior. + */ + enum InterpolationMode { + + NN = 0; /// Nearest Neighbour + BILINEAR = 1; /// Bilinear + + } + + InterpolationMode mode = 5; + + /** + * LinearUpsampleMode specifies the behavior for linear upsampling. Only valid when Interpolation Mode is BILINEAR. + * If input grid is [0, Xin-1] (corresponding to an input size of Xin), and if the output size is Xout, + * then the grid points are sampled in the following manner: + * DEFAULT: + * spacing = (Xin-Xin/Xout) / (Xout-1) + * grid_point[i] = min(Xin-1, max(0, i * spacing)), for i = 0,1,2,….,Xout-1 + * ALIGN_CORNERS_TRUE: + * spacing = (Xin-1) / (Xout-1) + * grid_point[i] = min(Xin-1, max(0, i * spacing)), for i = 0,1,2,….,Xout-1 + * ALIGN_CORNERS_FALSE: + * spacing = Xin / Xout + * grid_point[i] = min(Xin-1, max(0, i * spacing + 0.5 * spacing - 0.5)), for i = 0,1,2,….,Xout-1 + */ + enum LinearUpsampleMode { + + DEFAULT = 0; + ALIGN_CORNERS_TRUE = 1; + ALIGN_CORNERS_FALSE = 2; + + } + + LinearUpsampleMode linearUpsampleMode = 6; + +} + +/** +* A layer that resizes the input to a pre-specified spatial size using bilinear interpolation. +* +* .. code:: +* +* y = ResizeBilinearLayer(x) +* +* Requires 1 input and produces 1 output. +* +* Input +* A blob with rank at least 3. +* e.g.: blob with shape ``[C, H_in, W_in]``. +* For ranks greater than 3, the leading dimensions, starting from 0 to -4 (inclusive), are all treated as batch. +* +* Output +* Same rank as the input. +* e.g.: blob with shape ``[C, H_out, W_out]``. +* +*/ +message ResizeBilinearLayerParams { + + /** + * Target Spatial Size. + * Must be length 2 in order ``[Height, Width]``, i.e. ``[H_out, W_out]``. + * If not set, default value ``[1, 1]`` is used. + */ + repeated uint64 targetSize = 1; + + /** + * Mode used to compute the grid on which the spatial output values are evaluated. + * Same mode is applied to both the height and width axes. + */ + SamplingMode mode = 2; + +} + +/** +* A layer that extracts cropped spatial patches or RoIs (regions of interest) from the input and resizes them to a pre-specified size using +* bilinear interpolation. +* Note that RoI Align layer can be implemented with this layer followed by a pooling layer. +* +* .. code:: +* +* y = CropResizeLayer(x) +* +* Requires 2 inputs and produces 1 output. +* +* Input +* There are two inputs. +* First input represents an image feature map. +* Second input represents the bounding box coordinates for N patches or RoIs (region of interest). +* +* First input is rank 5: [1, Batch, C, H_in, W_in]. +* Second input is rank 5. Its shape can be either [N, 1, 4, 1, 1] or [N, 1, 5, 1, 1]. +* +* N: number of patches/RoIs to be extracted +* +* If RoI shape = ``[N, 1, 4, 1, 1]`` +* The axis=-3 corresponds to the four coordinates specifying the bounding box. +* All the N RoIs are extracted from all the batches of the input. +* +* If RoI shape = ``[N, 1, 5, 1, 1]`` +* The first element of the axis=-3 specifies the input batch id from which to extract the RoI and +* must be in the interval ``[0, Batch - 1]``. That is, n-th RoI is extracted from the RoI[n,0,0,0,0]-th +* input batch id. The last four elements of the axis=-3 specify the bounding box coordinates. +* +* Output +* A blob with rank 5. +* - Shape is [N, Batch, C, H_out, W_out] if input RoI shape is [N, 1, 4, 1, 1] +* - Shape is [N, 1, C, H_out, W_out] if input RoI shape is [N, 1, 5, 1, 1] +* +*/ +message CropResizeLayerParams { + + /** + * Target Spatial Size. + * Must be length 2 in order ``[Height, Width]``, i.e. ``[H_out, W_out]``. + * If not set, default value ``[1, 1]`` is used. + */ + repeated uint64 targetSize = 1; + + /** + * If true the bounding box coordinates must be in the interval [0, 1]. + * They are scaled by (H_in - 1), (W_in - 1), i.e. based on the input spatial dimensions. + * If false the bounding box coordinates must be in the interval + * [0, H_in -1] and [0, W_in - 1], respectively for height and width dimensions. + */ + bool normalizedCoordinates = 2; + + /** + * Mode used to compute the grid on which the spatial output values are evaluated. + * Same mode is applied to both the height and width axes. + */ + SamplingMode mode = 3; + + /** + * Representation used to express the bounding box coordinates. + * It determines how the values of the second input are interpreted. + */ + BoxCoordinatesMode boxIndicesMode = 4; + + /** + * Additional spatial scale that multiplies the bounding box coordinates. + * Generally used while implementing the RoI Align layer, + * which uses unnormalized RoI coordinates along with a spatial scale less than or equal to 1. + */ + float spatialScale = 5; + +} + +/** + * A layer that performs elementwise addition of a bias, + * which is broadcasted to match the input shape. + * + * .. code:: + * + * y = BiasLayer(x) + * + * Requires 1 input and produces 1 output. + * + * Input + * A blob with rank at least 3. + * e.g.: blob with shape ``[C, H, W]``. + * For ranks greater than 3, the leading dimensions, starting from 0 to -4 (inclusive), are all treated as batch. + * Output + * A blob with the same shape as the input. + */ +message BiasLayerParams { + + /** + * The shape of the bias. + * Must be one of the following: + * ``[1]``, ``[C]``, ``[1, H, W]`` or ``[C, H, W]``. + */ + repeated uint64 shape = 1; + + /** + * The bias values. + * The size must be equal to the product of the ``shape`` dimensions. + */ + WeightParams bias = 2; + +} + +/** + * A layer that performs elmentwise multiplication by a scale factor + * and optionally adds a bias; + * both the scale and bias are broadcasted to match the input shape. + * + * .. code:: + * + * y = ScaleLayer(x) + * + * Requires 1 input and produces 1 output. + * + * Input + * A blob with rank at least 3. + * e.g.: blob with shape ``[C, H, W]``. + * For ranks greater than 3, the leading dimensions, starting from 0 to -4 (inclusive), are all treated as batch. + * Output + * A blob with the same shape as the input. + */ +message ScaleLayerParams { + + /** + * The shape of the scale. + * Must be one of the following: + * ``[1]``, ``[C]``, ``[1, H, W]`` or ``[C, H, W]``. + */ + repeated uint64 shapeScale = 1; + + /** + * The scale values. + * The size must be equal to the product of the ``shape`` dimensions. + */ + WeightParams scale = 2; /// Scale values. Size must be equal to the product of dimensions specified in shapeScale. + + bool hasBias = 3; /// If true, a bias is added after scaling. + + /** + * The shape of the bias. + * Must be one of the following: + * ``[1]``, ``[C]``, ``[1, H, W]`` or ``[C, H, W]``. + */ + repeated uint64 shapeBias = 4; + + /** + * The bias values. + * The size must be equal to the product of the ``shape`` dimensions. + */ + WeightParams bias = 5; + +} + +/** + * A layer that loads data as a parameter and provides it as an output. + * The output is rank 5. For general rank, see LoadConstantNDLayer. + * + * .. code:: + * + * y = LoadConstantLayer() + * + * Requires no input and produces 1 output. + * + * Output: + * A blob with rank 5 and shape ``[1, 1, C, H, W]`` + */ +message LoadConstantLayerParams { + + /** + * The shape of the constant to be loaded, + * which must be``[C, H, W]``, that is length 3. + */ + repeated uint64 shape = 1; + + /** + * The data values, + * of size ``C * H * W``. + */ + WeightParams data = 2; + +} + +/** + * A layer that performs L2 normalization, i.e. divides by the + * the square root of the sum of squares of all elements of input. + * + * .. code:: + * + * y = L2NormalizeLayer(x) + * + * Requires 1 input and produces 1 output. + * + * Input + * A blob with rank greater than equal to 3. + * For ranks greater than 3, the leading dimensions, starting from 0 to -4 (inclusive), are all treated as batch. + * Output + * A blob with the same shape as the input. + * + * This layer is described by the following formula: + * + * .. math:: + * x_i \leftarrow \dfrac{x_i}{\sqrt{\sum{x_i^2} + \epsilon}} + */ +message L2NormalizeLayerParams { + + /** + * A small constant to avoid division by 0 while normalizing variance. + * Defaults to ``1e-6`` if not set or set to ``0``. + */ + float epsilon = 1; + +} + +/// Data Reorganization Layers +/// -------------------------- + +/** + * A layer that flattens the input. + * + * .. code:: + * + * y = FlattenLayer(x) + * + * Requires 1 input and produces 1 output. + * + * Input + * A blob with rank greater than equal to 3. + * e.g.: Rank 4 blob represents [Batch, C, H, W] + * For ranks greater than 3, the leading dimensions, starting from 0 to -4 (inclusive), are all treated as batch. + * Output + * Same rank as the input, such that last two dimensions are both 1. + * e.g.: For rank 4 input, output shape is ``[Batch, C * H * W, 1, 1]`` + * + * There are two X orders: ``CHANNEL_FIRST`` and ``CHANNEL_LAST``. + * ``CHANNEL_FIRST`` does not require data to be rearranged, + * because row major ordering is used by internal storage. + * ``CHANNEL_LAST`` requires data to be rearranged. + */ +message FlattenLayerParams { + + enum FlattenOrder { + + CHANNEL_FIRST = 0; + CHANNEL_LAST = 1; + + } + FlattenOrder mode = 1; + +} + +/** + * A layer that recasts the input into a new shape. + * + * .. code:: + * + * y = ReshapeLayer(x) + * + * Requires 1 input and produces 1 output. + * + * Input + * A blob with rank 5. + * e.g.: ``[1, 1, C, H, W]`` or ``[Seq, 1, C, H, W]``. + * Output + * A blob with rank 5. + * e.g.: ``[1, 1, C_out, H_out, W_out]`` or ``[Seq_out, 1, C_out, H_out, W_out]``. + * + * There are two reshape orders: ``CHANNEL_FIRST`` and ``CHANNEL_LAST``. + * ``CHANNEL_FIRST`` is equivalent to + * flattening the input to ``[Seq, 1, C * H * W, 1, 1]`` in channel first order + * and then reshaping it to the target shape; + * no data rearrangement is required. + * ``CHANNEL_LAST`` is equivalent to + * flattening the input to ``[Seq, 1, H * W * C, 1, 1]`` in channel last order, + * reshaping it to ``[Seq_out, 1, H_out, W_out, C_out]`` (it is now in "H_out-major"" order), + * and then permuting it to ``[C_out, H_out, W_out]``; + * both the flattening and permuting requires the data to be rearranged. + */ +message ReshapeLayerParams { + + /** + * The shape of the output. + * Must be of length 3 or 4. + * If set to 3, ``targetShape`` is interpreted as + * ``[1, 1, C_out, H_out, W_out]``, and sequence length of the input is preserved. + * If set to 4, ``targetShape`` is interpreted as + * ``[Seq_out, 1, C_out, H_out, W_out]``, + * where ``Seq_out`` is the new sequence length. + */ + repeated int64 targetShape = 1; + + enum ReshapeOrder { + + CHANNEL_FIRST = 0; + CHANNEL_LAST = 1; + + } + ReshapeOrder mode = 2; + +} + +/** + * A layer that rearranges the dimensions and data of an input. + * For generic transpose/permute operation see TransposeLayer. + * + * .. code:: + * + * y = PermuteLayer(x) + * + * Requires 1 input and produces 1 output. + * + * Input + * Must be a rank 5 blob. + * e.g.: shape ``[Seq, B, C, H, W]``. + * Output + * Rank 5 blob. Transposed version of the input, such that dimensions at axis=1 or axis=-4 is unchanged. + * + * + * Examples: + * + * Assume input shape is [Seq, B, C, H, W] + * + * - If ``axis`` is set to ``[0, 3, 1, 2]``, + * then the output has shape ``[Seq, B, W, C, H]`` + * + * - If ``axis`` is set to ``[3, 1, 2, 0]``, + * then the output has shape ``[W, B, C, H, Seq]`` + * + * - If ``axis`` is set to ``[0, 3, 2, 1]``, + * then the output has shape ``[Seq, B, W, H, C]`` + * + * - If ``axis`` is not set, or is set to ``[0, 1, 2, 3]``, + * the output is the same as the input. + */ +message PermuteLayerParams { + + /** + * The order in which to permute the dimensions. + * Must have length 4 and a permutation of ``[0, 1, 2, 3]``. + */ + repeated uint64 axis = 1; + +} + +/** + * A layer that reorganizes data in the input in specific ways. + * + * .. code:: + * + * y = ReorganizeDataLayer(x) + * + * Requires 1 input and produces 1 output. + * + * Input + * A blob with rank at least 3. + * e.g.: blob with shape ``[C, H, W]``. + * For ranks greater than 3, the leading dimensions, starting from 0 to -4 (inclusive), are all treated as batch. + * Output + * Same rank as the input. + * e.g.: blob with shape ``[C_out, H_out, W_out]``. + * + * mode == SPACE_TO_DEPTH + * ``[C_out, H_out, W_out]`` : ``[C * blockSize * blockSize, H/blockSize, W/blockSize]``. + * blockSize must divide H and W. + * Data is moved from the spatial dimensions to the channel dimension. Input is spatially divided into + * non-overlapping blocks of size blockSize X blockSize and data from each block is moved into the + * channel dimension. + * + * mode == DEPTH_TO_SPACE + * ``[C_out, H_out, W_out]`` : ``[C/(blockSize * blockSize), H * blockSize, W * blockSize]``. + * Square of blockSize must divide C. + * Reverse of SPACE_TO_DEPTH. Data is moved from the channel dimension to the spatial dimensions. + * + * mode == PIXEL_SHUFFLE + * ``[C_out, H_out, W_out]`` : ``[C/(blockSize * blockSize), H * blockSize, W * blockSize]``. + * Square of blockSize must divide C. + * Similar to DEPTH_TO_SPACE, but using the pixel-shuffle semantics for channel order in the output space. + * In both modes, elements along the channel dimension are collapsed into + * blocks in the spatial dimensions. The difference is in the arrangement of + * the input-channels' data in the output space. See below example for more + * detail. + * (Only available in Core ML Specification >= 5 (iOS >= 14, macOS >= 11.0) + * + * + * Examples: + * + * Assume input is the following [C = 8, H = 1, W = 2] tensor: + * + * .. code:: + * + * [[[1 2]] [[3 4]] [[5 6]] [[7 8]] [[9 10]] [[11 12]] [[13 14]] [[15 16]]] + * + * If block_size == 2 and mode == DEPTH_TO_SPACE, output will be the following + * [C = 2, H = 2, W = 4] tensor: + * + * .. code:: + * + * [[[ 1 5 2 6] + * [ 9 13 10 14]] + * + * [[ 3 7 4 8] + * [11 15 12 16]]] + * + * For mode == SPACE_TO_DEPTH, the behavior is the same as mode == + * DEPTH_TO_SPACE, but with the input and output swapped. + * + * If block_size == 2 and mode == PIXEL_SHUFFLE, output will be the following + * [C = 2, H = 2, W = 4] tensor: + * + * .. code:: + * + * [[[ 1 3 2 4] + * [ 5 7 6 8]] + * + * [[ 9 11 10 12] + * [13 15 14 16]]] + * + */ +message ReorganizeDataLayerParams { + + enum ReorganizationType { + + SPACE_TO_DEPTH = 0; + DEPTH_TO_SPACE = 1; + PIXEL_SHUFFLE = 2; + + } + ReorganizationType mode = 1; + uint64 blockSize = 2; /// must be greater than 1 + +} + +/** + * A layer that slices the input data along axis = -1 or -2 or -3. + * For general slice along any axis, please see SliceStaticLayer/SliceDynamicLayer. + * + * .. code:: + * + * y = SliceLayer(x) + * + * Requires 1 input and produces 1 output. + * + * Input + * A blob that can, in general, have any rank. However, depending on the value of "axis" , + * there may be additional rank constraints. + * Output + * A blob with the same rank as the input. + * + * Sliced section is taken from the interval ``[startIndex, endIndex)``, i.e. + * startIndex is inclusive while endIndex is exclusive. + * stride must be positive and represents the step size for slicing. + * Negative indexing is supported for startIndex and endIndex. + * -1 denotes N-1, -2 denotes N-2 and so on, where N is the length of the dimension to be sliced. + * + */ +message SliceLayerParams { + + int64 startIndex = 1; /// start of the sliced section. Inclusive. + int64 endIndex = 2; /// end of sliced section. Exclusive. + uint64 stride = 3; /// The step size. Must be positive. + + enum SliceAxis { + + CHANNEL_AXIS = 0; + HEIGHT_AXIS = 1; + WIDTH_AXIS = 2; + + } + // The following mapping is used for interpreting this parameter: + // CHANNEL_AXIS => axis = -3, input must have rank at least 3. + // HEIGHT_AXIS => axis = -2, input must have rank at least 2. + // WIDTH_AXIS => axis = -1 + SliceAxis axis = 4; + +} + +/** + * A layer that reduces the input using a specified operation. + * + * .. code:: + * + * y = ReduceLayer(x) + * + * Requires 1 input and produces 1 output. + * + * Input + * A blob that can, in general, have any rank. However, depending on the value of "axis" , + * there may be additional rank constraints. + * Output + * A blob with the same rank as the input, which has 1s on the dimensions specified in the parameter "axis" + * + * Values supported for axis are [-1], [-2], [-3], [-2,-1], [-3,-2,-1] + * and the equivalent positive values (depending on the rank of the input) + * For mode == 'ArgMax', axis must be [-1] or [-2] or [-3]. + */ +message ReduceLayerParams { + + /* + * The following reduction operations are supported + * and are applied on the specified axis of the input array: + * + * ``SUM`` + * Sum of all elements + * + * .. math:: \sum{x_i} + * + * ``AVG`` + * Sum of all elements divided by the number of elements + * + * .. math:: \dfrac{\sum^n{x_i}}{n} + * + * ``PROD`` + * Product of all elements + * + * .. math:: \prod{x_i} + * + * ``LOGSUM`` + * Sum of the natural logarithm of all elements + * + * .. math:: \sum{\ln{(x_i + \epsilon)}} + * + * ``SUMSQUARE`` + * Sum of squares of all elements + * + * .. math:: \sum{x^2} + * + * ``L1`` + * L1 normalization of all elements + * + * .. math:: ||x||_1 = \sum{|x_i|} + * + * ``L2`` + * L2 normalization of all elements + * + * .. math:: ||x||_2 = \sqrt{\sum{x_i^2}} + * + * ``MAX`` + * Maximum of all elements + * + * .. math:: \text{max}(x_i) + * + * ``MIN`` + * Minumum of all elements + * + * .. math:: \text{min}(x_i) + * + * ``ARGMAX`` + * Argument of the maximum of all elements + * + * .. math:: \text{argmax}(x_i) + * + */ + enum ReduceOperation { + + SUM = 0; + AVG = 1; + PROD = 2; + LOGSUM = 3; + SUMSQUARE = 4; + L1 = 5; + L2 = 6; + MAX = 7; + MIN = 8; + ARGMAX = 9; /// only supported with axis = C, H or W. + + } + ReduceOperation mode = 1; /// Specifies function used to reduce. + + /** + * Used if mode is ``LOGSUM``. + * Defaults to ``1e-6`` if not set or is set to ``0``. + */ + float epsilon = 2; + + enum ReduceAxis { + + CHW = 0; + HW = 1; + C = 2; + H = 3; + W = 4; + + } + + // The following mapping is used for interpreting this parameter: + // CHW = axis [-3, -2, -1], input must have rank at least 3. + // HW = axis [-2, -1], input must have rank at least 2. + // C = axis [-3] + // H = axis [-2] + // W = axis [-1] + ReduceAxis axis = 3; + +} + +/** + * A layer that crops the spatial dimensions of an input. + * If two inputs are provided, the shape of the second input is used as the reference shape. + * + * .. code:: + * + * y = CropLayer(x1) or y = CropLayer(x1,x2) + * + * Requires 1 or 2 inputs and produces 1 output. + * + * Input + * 1 or 2 tensors, each with rank at least 3, both inputs must have equal rank. + * Example: + * - 1 input case: A blob with shape ``[C, H_in, W_in]``. + * - 2 input case: 1st blob with shape ``[C, H_in, W_in]``, 2nd blob with shape ``[C, H_out, W_out]``. + * + * For ranks greater than 3, the leading dimensions, starting from 0 to -4 (inclusive), are all treated as batch. + * + * Output + * Same rank as the inputs. + * e.g.: A blob with shape ``[C, H_out, W_out]``. + * + * If one input is used, output is computed as follows: + * + * .. code:: + * + * y = x1[:, topCropAmount:H_in - bottomCropAmount, leftCropAmount:W_in - rightCropAmount] + * + * topCropAmount == Height startEdgeSize == borderAmounts[0].startEdgeSize + * bottomCropAmount == Height endEdgeSize == borderAmounts[0].endEdgeSize + * leftCropAmount == Width startEdgeSize == borderAmounts[1].startEdgeSize + * rightCropAmount == Width endEdgeSize == borderAmounts[1].endEdgeSize + * + * H_out = H_in - topCropAmount - bottomCropAmount + * W_out = W_in - leftCropAmount - rightCropAmount + * + * If two inputs are used, output is computed as follows: + * + * .. code:: + * + * y = x1[:, offset[0]:offset[0] + H_out, offset[1]:offset[1] + W_out] + */ +message CropLayerParams { + + /** + * The amounts to be cropped from the input. + * Used only if a single input is provided. + */ + BorderAmounts cropAmounts = 1; + + /** + * The offset amounts. + * Used only if two inputs are provided. + * Must be of length 2, in order ``[H, W]``. + */ + repeated uint64 offset = 5; + +} + +/** + * A layer that computes the elementwise average of the inputs. + * This layer has limited broadcasting support. For general broadcasting see AddBroadcastableLayer. + * + * .. code:: + * + * y = AverageLayer(x1,x2,...) + * + * Requires multiple inputs and produces 1 output. + * + * Input + * In general, there are no rank constraints. + * However, only certain set of shapes are broadcastable. For example: + * [B, 1, 1, 1], [B, C, 1, 1], [B, 1, H, W], [B, C, H, W] + * Output + * A blob with the same shape as each input. + */ +message AverageLayerParams { + +} + +/** + * A layer that computes the elementwise maximum over the inputs. + * + * .. code:: + * + * y = MaxLayer(x1,x2,...) + * + * Requires multiple inputs and produces 1 output. + * + * Input + * In general, there are no rank constraints. + * However, only certain set of shapes are broadcastable. For example: + * [B, C, 1, 1], [B, C, H, W] + * Output + * A blob with the same shape as each input. + */ +message MaxLayerParams { + +} + +/** + * A layer that computes the elementwise minimum over the inputs. + * + * .. code:: + * + * y = MinLayer(x1,x2,...) + * + * Requires multiple inputs and produces 1 output. + * + * Input + * In general, there are no rank constraints. + * However, only certain set of shapes are broadcastable. For example: + * [B, C, 1, 1], [B, C, H, W] + * Output + * A blob with the same shape as each input. + */ +message MinLayerParams { + +} + +/** + * A layer that computes the dot product of two vectors. + * + * .. code:: + * + * y = DotProductLayer(x1,x2) + * + * Requires 2 inputs and produces 1 output. + * + * Input + * Two blobs with rank at least 3, such that the last two dimensions must be 1. + * e.g.: blobs with shape ``[B, C, 1, 1]``. + * For ranks greater than 3, the leading dimensions, starting from 0 to -4 (inclusive), are all treated as batch. + * + * Output + * Same rank as the input. + * e.g. for rank 4 inputs, output shape: [B, 1, 1, 1] + */ +message DotProductLayerParams { + + /** + * If true, inputs are normalized first, + * thereby computing the cosine similarity. + */ + bool cosineSimilarity = 1; + +} + +/** + * A layer that performs mean variance normalization, along axis = -3. + * + * .. code:: + * + * y = MeanVarianceNormalizeLayer(x) + * + * Requires 1 input and produces 1 output. + * + * Input + * A blob with rank greater than equal to 3. + * Example: Rank 4 blob represents [Batch, channels, height, width] + * For ranks greater than 3, the leading dimensions, starting from 0 to -4 (inclusive), are all treated as batch. + * + * Output + * A blob with the same shape as the input. + * + * If ``acrossChannels == true`` + * normalization is performed on flattened input, i.e. the input is reshaped to (Batch,C), where "Batch" contains + * all dimensions from 0 to -4 (inclusive), and C contains dimensions -1, -2, -3. + * + * If ``acrossChannels == false`` + * normalization is performed within a channel, + * across spatial dimensions (i.e. last two dimensions). + */ +message MeanVarianceNormalizeLayerParams { + + /** + * If true, mean and variance are computed across channels. + */ + bool acrossChannels = 1; + + /** + * If false, only mean is subtracted. + */ + bool normalizeVariance = 2; + + /** + * A small constant to avoid division by 0 while normalizing variance. + * Defaults to ``1e-6`` if not set or set to ``0``. + */ + float epsilon = 3; + +} + +/** + * A layer that repeats a sequence or the dimension sitting at axis = -5 + * + * .. code:: + * + * y = SequenceRepeatLayer(x) + * + * Requires 1 input and produces 1 output. + * + * Input + * A blob with rank at least 5. + * e.g: shape ``[Seq, B, C, H, W]`` + * Output + * A blob with the same rank as the input. + * e.g.: for input shape ``[Seq, B, C, H, W]``, output shape is ``[nRepetitions * Seq, B, C, H, W]``. + */ +message SequenceRepeatLayerParams { + + /** + * Number of repetitions. + * Defaults to ``1`` if not set or set to ``0``. + */ + uint64 nRepetitions = 1; + +} + +/// Recurrent Layers +/// ---------------- + +/* + * The following activations are supported with recurrent layers: + * - Linear + * - Sigmoid + * - Tanh + * - ReLU + * - Scaled Hyperbolic Tangent: alpha * tanh(beta * x), currently only supported for alpha = 1.7159, beta = 2/3 + * - Hard Sigmoid: min(max(alpha * x + beta, 0), 1), currently only supported for alpha = 0.2, beta = 0.5 + */ + +/** + * A simple recurrent layer. + * + * .. code:: + * + * y_t = SimpleRecurrentLayer(x_t, y_{t-1}) + * + * Input + * A blob of rank 5, with shape `[Seq, Batch, inputVectorSize, 1, 1]``. + * This represents a sequence of vectors of size ``inputVectorSize``. + * Output + * Same rank as the input. + * Represents a vector of size ``outputVectorSize``. It is either the final output or a sequence of outputs at all time steps. + * + * - Output Shape: ``[1, Batch, outputVectorSize, 1, 1]`` , if ``sequenceOutput == false`` + * - Output Shape: ``[Seq, Batch, outputVectorSize, 1, 1]`` , if ``sequenceOutput == true`` + * + * This layer is described by the following equation: + * + * .. math:: + * \boldsymbol{y_t} = f(\mathrm{clip}(W \boldsymbol{x_t} + \ + * R \boldsymbol{y_{t-1}} + b)) + * + * - ``W`` is a 2-dimensional weight matrix + * (``[outputVectorSize, inputVectorSize]``, row-major) + * - ``R`` is a 2-dimensional recursion matrix + * (``[outputVectorSize, outputVectorSize]``, row-major) + * - ``b`` is a 1-dimensional bias vector (``[outputVectorSize]``) + * - ``f()`` is an activation + * - ``clip()`` is a function that constrains values between ``[-50.0, 50.0]`` + */ +message SimpleRecurrentLayerParams { + + uint64 inputVectorSize = 1; /// The size of the input vectors. + uint64 outputVectorSize = 2; /// The size of the output vectors. + + /** + * Activations supported are Linear, Sigmoid, Tanh, ReLU, Scaled Tanh (alpha = 1.71, beta = 2/3), Hard sigmoid (alpha = 0.2, beta = 0.5) + */ + ActivationParams activation = 10; /// The activation function. + + /** + If false output is just the result after final state update. + If true, output is a sequence, containing outputs at all time steps. + */ + bool sequenceOutput = 15; + + bool hasBiasVector = 20; /// If false, no bias is added. + + WeightParams weightMatrix = 30; /// Weight matrix W. + WeightParams recursionMatrix = 31; /// Recursion Weight matrix R. + WeightParams biasVector = 32; /// Bias vector b. + + bool reverseInput = 100; + // If true, then the node processes the input sequence from right to left + +} + +/** + * Gated-Recurrent Unit (GRU) Layer + * + * .. code:: + * + * y_t = GRULayer(x_t, y_{t-1}) + * + * Input + * A blob of rank 5, with shape `[Seq, Batch, inputVectorSize, 1, 1]``. + * This represents a sequence of vectors of size ``inputVectorSize``. + * Output + * Same rank as the input. + * Represents a vector of size ``outputVectorSize``. It is either the final output or a sequence of outputs at all time steps. + * + * - Output Shape: ``[1, Batch, outputVectorSize, 1, 1]`` , if ``sequenceOutput == false`` + * - Output Shape: ``[Seq, Batch, outputVectorSize, 1, 1]`` , if ``sequenceOutput == true`` + * + * This layer is described by the following equations: + * + * Update Gate + * .. math:: + * \boldsymbol{z_t} = \ + * f(\mathrm{clip}(W_z \boldsymbol{x_t} + \ + * R_z \boldsymbol{y_{t-1}} + b_z) + * + * Reset Gate + * .. math:: + * \boldsymbol{r_t} = \ + * f(\mathrm{clip}(W_r \boldsymbol{x_t} + \ + * R_r \boldsymbol{y_{t-1}} + b_r)) + * + * Cell Memory State + * .. math:: + * \boldsymbol{c_t} = \ + * \boldsymbol{y_{t-1}} \odot \boldsymbol{r_t} + * + * Output Gate + * .. math:: + * \boldsymbol{o_t} = \ + * g(\mathrm{clip}(W_o \boldsymbol{x_t} + \ + * R_o \boldsymbol{c_t} + b_o)) + * + * Output + * .. math:: + * \boldsymbol{y_t} = \ + * (1 - \boldsymbol{z_t}) \odot \boldsymbol{o_t} + \ + * \boldsymbol{z_t} \odot \boldsymbol{y_{t-1}} + * + * - ``W_z``, ``W_r``, ``W_o`` are 2-dimensional input weight matrices + * (``[outputVectorSize, inputVectorSize]``, row-major) + * - ``R_z``, ``R_r``, ``R_o`` are 2-dimensional recursion matrices + * (``[outputVectorSize, outputVectorSize]``, row-major) + * - ``b_z``, ``b_r``, ``b_o`` are 1-dimensional bias vectors + * (``[outputVectorSize]``) + * - ``f()``, ``g()`` are activations + * - ``clip()`` is a function that constrains values between ``[-50.0, 50.0]`` + * - ``⊙`` denotes the elementwise product of matrices + */ +message GRULayerParams { + + uint64 inputVectorSize = 1; /// Size of the input vectors. + uint64 outputVectorSize = 2; /// Size of the output vectors. + + /** + * 2 element array representing activations [f(), g()] in that order. + * Typical values used = [sigmoid, tanh]. + * Activations supported are Linear, Sigmoid, Tanh, ReLU, Scaled Tanh (alpha = 1.71, beta = 2/3), Hard sigmoid (alpha = 0.2, beta = 0.5) + */ + repeated ActivationParams activations = 10; + + /** + * If false output is just the result after final state update. + * If true, output is a sequence, containing outputs at all time steps. + */ + bool sequenceOutput = 15; + + /** + * If false, no biases (``b_z``, ``b_r``, ``b_o``) are added. + */ + bool hasBiasVectors = 20; + + WeightParams updateGateWeightMatrix = 30; /// Weight Matrix W_z. + WeightParams resetGateWeightMatrix = 31; /// Weight Matrix W_r. + WeightParams outputGateWeightMatrix = 32; /// Weight Matrix W_o. + + WeightParams updateGateRecursionMatrix = 50; /// Recursion Weight Matrix R_z. + WeightParams resetGateRecursionMatrix = 51; /// Recursion Weight Matrix R_r. + WeightParams outputGateRecursionMatrix = 52; /// Recursion Weight Matrix R_o. + + WeightParams updateGateBiasVector = 70; /// Bias vector b_z. + WeightParams resetGateBiasVector = 71; /// Bias vector b_r. + WeightParams outputGateBiasVector = 72; /// Bias vector b_o. + + /// If true, then the node processes the input sequence from right to left + bool reverseInput = 100; + +} + +/** + * Long short-term memory (LSTM) parameters. + * + * This is described by the following equations: + * + * Input Gate + * .. math:: + * \boldsymbol{i_t} = \ + * f(\mathrm{clip}(W_i \boldsymbol{x_t} + \ + * R_i \boldsymbol{y_{t-1}} + \ + * p_i \odot c_{t-1} + b_i)) + * + * Forget Gate + * .. math:: + * \boldsymbol{f_t} = \ + * f(\mathrm{clip}(W_f \boldsymbol{x_t} + \ + * R_f \boldsymbol{y_{t-1}} + \ + * p_f \odot c_{t-1} + b_f)) + * + * Block Input + * .. math:: + * \boldsymbol{z_t} = \ + * g(\mathrm{clip}(W_z \boldsymbol{x_t} + \ + * R_z \boldsymbol{y_{t-1}} + b_z)) + * + * Cell Memory State + * .. math:: + * \boldsymbol{c_t} = \ + * \boldsymbol{c_{t-1}} \odot \boldsymbol{f_t} + \ + * \boldsymbol{i_t} \odot \boldsymbol{z_t} + * + * Output Gate + * .. math:: + * \boldsymbol{o_t} = \ + * f(\mathrm{clip}(W_o \boldsymbol{x_t} + \ + * R_o \boldsymbol{y_{t-1}} + \ + * p_o \odot c_t + b_o)) + * + * Output + * .. math:: + * \boldsymbol{y_t} = \ + * h(\boldsymbol{c_t}) \odot \boldsymbol{o_t} + * + * - ``W_i``, ``W_f``, ``W_z``, ``W_o`` are 2-dimensional input weight matrices + * (``[outputVectorSize, inputVectorSize]``, row-major) + * - ``R_i``, ``R_f``, ``R_z``, ``R_o`` are 2-dimensional recursion matrices + * (``[outputVectorSize, outputVectorSize]``, row-major) + * - ``b_i``, ``b_f``, ``b_z``, ``b_o`` are 1-dimensional bias vectors + * (``[outputVectorSize]``) + * - ``p_``, ``p_f``, ``p_o`` are 1-dimensional peephole vectors + * (``[outputVectorSize]``) + * - ``f()``, ``g()``, ``h()`` are activations + * - ``clip()`` is a function that constrains values between ``[-50.0, 50.0]`` + * - ``⊙`` denotes the elementwise product of matrices + */ +message LSTMParams { + + /** + * If true, output is a sequence, containing outputs at all time steps. + * If false, output is just the result after final state update. + */ + bool sequenceOutput = 10; + + /** + * If false, no biases (``b_i``, ``b_f``, ``b_z``, ``b_o``) are added. + */ + bool hasBiasVectors = 20; + + /** + * If true, a vector of ``1`` values is added to ``b_f``. + */ + bool forgetBias = 30; + + /** + * If true, peephole vectors are included. + */ + bool hasPeepholeVectors = 40; + + /** + * If the coupled Input and Forget flag is on, the behaviour of + * ``c_t`` is changed to the following (i.e. forget gate is not used): + * + * .. math:: + * \boldsymbol{c_t} = \ + * \boldsymbol{c_{t-1}} \odot (1 - \boldsymbol{i_t}) + \ + * \boldsymbol{i_t} \odot \boldsymbol{z_t} + * + */ + bool coupledInputAndForgetGate = 50; + + /** + * Places a limit on the maximum and minimum values of ``c_t``. + * c_t = min(c_t, cellClipThreshold) + * c_t = max(c_t, -cellClipThreshold) + * If 0, it is set to its default value = 50.0. + */ + float cellClipThreshold = 60; + +} + +/** + * Weights for long short-term memory (LSTM) layers + */ +message LSTMWeightParams { + + WeightParams inputGateWeightMatrix = 1; /// Weight Matrix W_i. + WeightParams forgetGateWeightMatrix = 2; /// Weight Matrix W_f. + WeightParams blockInputWeightMatrix = 3; /// Weight Matrix W_z. + WeightParams outputGateWeightMatrix = 4; /// Weight Matrix W_o. + + WeightParams inputGateRecursionMatrix = 20; /// Recursion Weight Matrix R_i. + WeightParams forgetGateRecursionMatrix = 21; /// Recursion Weight Matrix R_f. + WeightParams blockInputRecursionMatrix = 22; /// Recursion Weight Matrix R_z. + WeightParams outputGateRecursionMatrix = 23; /// Recursion Weight Matrix R_o. + + //biases: + WeightParams inputGateBiasVector = 40; /// Bias vector b_i. + WeightParams forgetGateBiasVector = 41; /// Bias vector b_f. + WeightParams blockInputBiasVector = 42; /// Bias vector b_z. + WeightParams outputGateBiasVector = 43; /// Bias vector b_o. + + //peepholes: + WeightParams inputGatePeepholeVector = 60; /// Peephole vector p_i. + WeightParams forgetGatePeepholeVector = 61; /// Peephole vector p_f. + WeightParams outputGatePeepholeVector = 62; /// Peephole vector p_o. + +} + +/** + * A unidirectional long short-term memory (LSTM) layer. + * + * .. code:: + * + * (y_t, c_t) = UniDirectionalLSTMLayer(x_t, y_{t-1}, c_{t-1}) + * + * Input + * A blob of rank 5, with shape `[Seq, Batch, inputVectorSize, 1, 1]``. + * This represents a sequence of vectors of size ``inputVectorSize``. + * Output + * Same rank as the input. + * Represents a vector of size ``outputVectorSize``. It is either the final output or a sequence of outputs at all time steps. + * + * - Output Shape: ``[1, Batch, outputVectorSize, 1, 1]`` , if ``sequenceOutput == false`` + * - Output Shape: ``[Seq, Batch, outputVectorSize, 1, 1]`` , if ``sequenceOutput == true`` + * + */ +message UniDirectionalLSTMLayerParams { + + uint64 inputVectorSize = 1; /// Size of the input vectors. + uint64 outputVectorSize = 2; /// Size of the output vectors. + + /** + * 3 element array representing activations [f(),g(),h()] in that order. + * Typical values used = [sigmoid, tanh, tanh]. + * Activations supported are Linear, Sigmoid, Tanh, ReLU, Scaled Tanh (alpha = 1.71, beta = 2/3), Hard sigmoid (alpha = 0.2, beta = 0.5) + */ + repeated ActivationParams activations = 10; + + LSTMParams params = 15; + + LSTMWeightParams weightParams = 20; /// Weights, biases and peepholes. + + /// If true, then the node processes the input sequence from right to left + bool reverseInput = 100; + +} + +/** + * Bidirectional long short-term memory (LSTM) layer + * + * .. code:: + * + * (y_t, c_t, y_t_reverse, c_t_reverse) = BiDirectionalLSTMLayer(x_t, y_{t-1}, c_{t-1}, y_{t-1}_reverse, c_{t-1}_reverse) + * + * Input + * A blob of rank 5, with shape `[Seq, Batch, inputVectorSize, 1, 1]``. + * This represents a sequence of vectors of size ``inputVectorSize``. + * Output + * Same rank as the input. + * Represents a vector of size ``2 * outputVectorSize``. It is either the final output or a sequence of outputs at all time steps. + * + * - Output Shape: ``[1, Batch, 2 * outputVectorSize, 1, 1]`` , if ``sequenceOutput == false`` + * - Output Shape: ``[Seq, Batch, 2 * outputVectorSize, 1, 1]`` , if ``sequenceOutput == true`` + * + * + * The first LSTM operates on the input sequence in the forward direction. + * The second LSTM operates on the input sequence in the reverse direction. + * + * Example: given the input sequence ``[x_1, x_2, x_3]``, + * where ``x_i`` are vectors at time index ``i``: + * + * The forward LSTM output is ``[yf_1, yf_2, yf_3]``, + * + * where ``yf_i`` are vectors of size ``outputVectorSize``: + * + * - ``yf_1`` is the output at the end of sequence {``x_1``} + * - ``yf_2`` is the output at the end of sequence {``x_1``, ``x_2``} + * - ``yf_3`` is the output at the end of sequence {``x_1``, ``x_2``, ``x_3``} + * + * The backward LSTM output: ``[yb_1, yb_2, yb_3]``, + * + * where ``yb_i`` are vectors of size ``outputVectorSize``: + * + * - ``yb_1`` is the output at the end of sequence {``x_3``} + * - ``yb_2`` is the output at the end of sequence {``x_3``, ``x_2``} + * - ``yb_3`` is the output at the end of sequence {``x_3``, ``x_2``, ``x_1``} + * + * Output of the bi-dir layer: + * + * - if ``sequenceOutput = True`` : { ``[yf_1, yb_3]``, ``[yf_2, yb_2]``, ``[yf_3, yb_1]`` } + * - if ``sequenceOutput = False`` : { ``[yf_3, yb_3]`` } + */ +message BiDirectionalLSTMLayerParams { + + /** + * Size of the input vectors. + */ + uint64 inputVectorSize = 1; + /** + * Size of the outputs vectors. + * It is same for both forward and backward LSTMs. + */ + uint64 outputVectorSize = 2; + + /** + * 3 element array representing activations [f(),g(),h()] in that order. + * Typical values used = [sigmoid, tanh, tanh]. + * Activations supported are Linear, Sigmoid, Tanh, ReLU, Scaled Tanh (alpha = 1.71, beta = 2/3), Hard sigmoid (alpha = 0.2, beta = 0.5) + */ + repeated ActivationParams activationsForwardLSTM = 10; + /** + * Currently, backward LSTM activations + * must be same as the ones for the forward LSTM. + */ + repeated ActivationParams activationsBackwardLSTM = 11; + + /** + * Common parameters shared by the forward and backward LSTMs. + */ + LSTMParams params = 15; + + /** + * Weights and biases. + * Must be a length 2 message, + * for the forward and backward LSTM respectively. + */ + repeated LSTMWeightParams weightParams = 20; + +} + +message CustomLayerParams { + + message CustomLayerParamValue { + oneof value { + double doubleValue = 10; + string stringValue = 20; + int32 intValue = 30; + int64 longValue = 40; + bool boolValue = 50; + } + } + + string className = 10; // The name of the class (conforming to MLCustomLayer) corresponding to this layer + repeated WeightParams weights = 20; // Any weights -- these are serialized in binary format and memmapped at runtime + map parameters = 30; // these may be handled as strings, so this should not be large + string description = 40; // An (optional) description of the layer provided by the model creator. This information is displayed when viewing the model, but does not affect the model's execution on device. + +} + +/** + * A layer that rearranges the dimensions and data of an input. + * + * .. code:: + * + * y = TransposeLayer(x) + * + * Requires 1 input and produces 1 output. + * + * Input + * A N-Dimensional tensor. + * Output + * A N-Dimensional tensor of the same rank but with dimensions and data permuted according to axes. + * Shape: ``[InputShape[axis[0]], InputShape[axis[1]], ... , InputShape[axis[N-1]]]`` + * + * Examples: + * + * - If ``axes`` is set to ``[3, 1, 2, 0]`` and the input shape is ``[6,7,8,9]``, + * then the output has shape ``[9,7,8,6]`` + */ + +message TransposeLayerParams { + + /** + * Length of "axes" should match the rank of input & output tensor + * "axes" should be a permutation of "[0,1,2,...,N-1]" where N is the rank. + */ + repeated uint64 axes = 1; // + +} + +/** + * A layer that computes the matrix multiplication of two tensors with numpy-like broadcasting + * where the matrices reside in the last two indices of the tensor. + * + * .. code:: + * + * y = BatchedMatMul(a,b) + * + * Requires 1 or 2 inputs and produces 1 output. + * + * The first tensor, "a", must be provided as an input. The second tensor can either be an input or provided as a weight matrix parameter. + * + * Input + * - a: First N-Dimensional tensor + * - b: Second N-Dimensional tensor (either a rank-N input or a matrix, i.e. N=2, provided as a layer parameter) + * + * Output + * A tensor containing the matrix product of two tensors. + * When there are two inputs: rank is max(2, rank(a), rank(b)) + * When there is one input: rank is same as that of the input. + * + * This operation behaves as following: + * + * When there are two inputs: + * - If N >= 2 for both tensors, it is treated as a batch of matrices residing in the last two indices. + * All the indices, except for the last two, are broadcasted using conventional rules. + * - If the first tensor is 1-D, it is converted to a 2-D tensor by prepending a 1 to its shape. Eg. (D) -> (1,D) + * - If the second tensor is 1-D, it is converted to a 2-D tensor by appending a 1 to its shape. Eg. (D) -> (D,1) + * + * When there is one input: + * - The weight matrix corresponds to a matrix, of shape (X1, X2). Values of X1, X2 must be provided as layer parameters. + * - The input, "a", is reshaped into a matrix by combining all the leading dimensions, except the last, into a batch dimension. eg: + * - if "a" is rank 1 (X1,) --> (1, X1). Output shape will be (X2,) + * - if "a" is rank 2 (B1, X1) --> no need to reshape. Output shape will be (B1, X2) + * - if "a" is rank 3 (B1, B2, X1) --> (B1 * B2, X1). Output shape will be (B1, B2, X2) + * - etc + */ +message BatchedMatMulLayerParams { + + /** + * If transposeA is true, it transposes the left matrix on the fly before matrix multiplication. + * (is ignored when there is one input) + */ + bool transposeA = 1; + /** + * If transposeB is true, it transposes the right matrix on the fly before matrix multiplication. + * (is ignored when there is one input) + */ + bool transposeB = 2; + + /* + * Following parameters are ignored when there are two inputs. + */ + + uint64 weightMatrixFirstDimension = 5; /// X1: same as the last dimension of the input tensor + uint64 weightMatrixSecondDimension = 6; /// X2: same as the last dimension of the output tensor + + bool hasBias = 7; /// Whether a bias is added or not. Supported only when there is one input. + + /* + * Weight matrix representing shape [X1, X2]. + * Values are however stored in column major order, + * in the "repeated float" or "bytes" fields of the message "WeightParams" + */ + WeightParams weights = 8; + WeightParams bias = 9; /// Bias vector [X2]. Supported only when there is one input. + + /** + * If set, this layer, at runtime, quantizes the floating point input blob to int8 before applying the + * matrix multiplication using the INT8 weight parameters provided in weights->int8RawValue. The + * result is then dequantized. + * Requires: + * * number of inputs to be 1 + * * hasBias == false + * * QuantizationType == LinearQuantizationParams, such that + * * size of the "scale" field is 1 and "bias" field is empty in "LinearQuantizationParams" + * * numberOfBits == 8 + * * weights->rawValue_size to be empty + */ + bool int8DynamicQuantize = 10; + +} + +/** + * A layer that concatenates a list of tensors along a specified axis. + * + * .. code:: + * + * y = ConcatNDLayer(x1,x2,....) + * + * Requires at least 2 input and produces 1 output. + * + * Input + * The rank of the input tensors must match and all dimensions also must match, except for the dimension 'axis'. + * + * + * Output + * Same rank as the input. The dimension along "axis", is the sum of the dimensions of the inputs. + * + * example: + * + * in1 : shape (3, 2), value = [[1, 2], [3, 4], [5, 6]] + * in2 : shape (3, 2), value = [[7, 8], [9, 10], [11, 12]] + * axis = 0 + * + * if interleave = False (default) + * output : shape (6, 2) + * output[0:3, :] = in1 + * output[3:6, :] = in2 + * value = [[1, 2], [3, 4], [5, 6], [7, 8], [9, 10], [11, 12]] + * + * if interleave = True + * output : shape (6, 2) + * output[0::2, :] = in1 + * output[1::2, :] = in2 + * value = [[1, 2], [7, 8], [3, 4], [9, 10], [5, 6], [11, 12]] + * + */ +message ConcatNDLayerParams { + + /** + * Dimension along which to concatenate. Supports negative values of the parameter 'axis'. + */ + int64 axis = 1; + + /** + * (Only available in Core ML Specification >= 5 (iOS >= 14, macOS >= 11.0) + * Interleave option. If True, concatenation is done via interleaving the inputs. + * This requires all inputs to have the exact same shape. + */ + bool interleave = 2; + + +} + +/** + * A layer that performs softmax normalization along a specified axis. + * + * .. code:: + * + * y = SoftmaxNDLayer(x) + * + * Requires 1 input and produces 1 output. + * + * Output shape is same as the input. + */ +message SoftmaxNDLayerParams { + + /** + * Dimension on which the softmax would be performed. Supports negative values of the parameter 'axis'. + */ + int64 axis = 1; + +} + +/** + * A layer that reverses specific dimensions of the input tensor. + * It is similar in functionality to the numpy.flip method. + * + * Requires 1 input and produces 1 output. + * Output shape is same as the input. + */ +message ReverseLayerParams { + + /** + * Reverses each dimension of the input tensor for which corresponding reverseDim is set to True. + * Requires len(reverseDim) == rank(inputTensor) + */ + repeated bool reverseDim = 1; + +} + +/** + * A layer that reverses variable length slices. + * + * Requires 2 inputs and produces 1 output. + * + * 2 inputs, in order are denoted by "data", "seq_lengths". + * "seq_lenghts" must be a rank 1 tensor, i.e. seq_lengths.shape = (B,) + * which contains the lengths of the amount of sequence to be reversed, for each element of the batch. + * Dimension "batchAxis" in "data" must be equal to B, i.e, + * data.shape[batchAxis] = B. + * + * According to the batch axis, input "data" is first divided into a batch of B inputs, + * each of which is flipped along the dimension "sequenceAxis", by the amount specified in + * "seq_lengths", the second input. + * + * e.g.: + * + * data [shape = (2,4)]: + * [0 1 2 3] + * [4 5 6 7] + * seq_lengths [shape = (2,)]: + * [3, 0] + * batchAxis = 0 + * sequenceAxis = 1 + * + * output [shape = (2,4)]: + * [2 1 0 3] + * [4 5 6 7] + * + * + * data [shape = (2,3,2)]: + * [0 1] + * [2 3] + * [4 5] (slice = 0) + * [6 7] + * [8 9] + * [10 11] (slice = 1) + * seq_lengths [shape = (2,)]: + * [2, 3] + * batchAxis = 0 + * sequenceAxis = 1 + * + * output [shape = (2,3,2)]: + * [2 3] + * [0 1] + * [4 5] (slice = 0) + * [10 11] + * [8 9] + * [6 7] (slice = 1) + * + * Output shape is same as the input. + */ +message ReverseSeqLayerParams { + + int64 batchAxis = 1; // batch axis has to be strictly less than seq_axis + int64 sequenceAxis = 2; + +} + +/** + * A layer that loads data as a parameter and provides it as an output. + * + * .. code:: + * + * y = LoadConstantNDLayer() + * + * Requires no input and produces 1 output. + * + * Output: A tensor with shape as provided in the parameter "shape" + */ +message LoadConstantNDLayerParams { + + /** + * The shape of the constant to be loaded. + */ + repeated uint64 shape = 1; + WeightParams data = 2; + +} + +/** + * A layer that generates an output tensor with a constant value. + * Input is only used to determine the shape of the output. + * This layer is used to allocate a tensor with a dynamic shape (that of the input) and constant value. + * + * Requires 1 input and produces 1 output. + * + * .. code:: + * + * y = FillLikeLayer(x) + * + * Input + * A N-Dimensional tensor, whose values are ignored. Only the shape is used to + * infer the shape of the output. + * + * Output + * A N-Dimensional tensor with the same shape as the input tensor. + * + */ +message FillLikeLayerParams { + + float value = 1; + +} + +/** + * A layer that generates an output tensor with a constant value. + * This layer is used to allocate a tensor with a static shape and constant value. + * + * Requires no input and produces 1 output. + * + * .. code:: + * + * y = FillStaticLayer(x) + * + * Output + * A N-Dimensional tensor of shape "targetShape". + * + */ +message FillStaticLayerParams { + + float value = 1; + repeated uint64 targetShape = 2; + +} + +/** + * A layer that generates an output tensor with a constant value. + * This layer is used to allocate a tensor with a dynamic shape (as specified by the input) and constant value. + * + * Requires 1 input and produces 1 output. + * + * .. code:: + * + * y = FillDynamicLayer(x) + * + * Input + * A rank 1 tensor specifying the shape of the output + * + * Output + * An N-Dimensional tensor with the shape specified by the values in the input tensor. + * + */ +message FillDynamicLayerParams { + + float value = 1; + +} + +/** + * A layer that returns the elements either from tensor x or tensor y, + * depending on the value in the condition tensor. + * It is similar in functionality to the numpy.where method with 3 inputs. + * + * Requires 3 inputs and produces 1 output. + * Inputs, in order, are the condition tensor, x and y. + * + * for each vector index (i,...,j): + * output[i,...,j] = x[i,...,j] if condition[i,...,j] = True + * y[i,...,j] if condition[i,...,j] = False + * + * All the 3 inputs are first broadcasted to a common shape. + * (the shapes must be broadcastable) + * + * output.rank = max(input[0].rank, input[1].rank, input[2].rank) + * + */ +message WhereBroadcastableLayerParams { + +} + +/** + * A layer that computes elementwise trigonometric sine function. + * + * + * .. code:: + * + * y = SinLayer(x) + * + * Requires 1 input and produces 1 output. + * Output shape is same as the input. + * + */ +message SinLayerParams { + +} + +/** + * A layer that computes elementwise trigonometric cosine function. + * + * + * .. code:: + * + * y = CosLayer(x) + * + * Requires 1 input and produces 1 output. + * Output shape is same as the input. + * + */ +message CosLayerParams { + +} + +/** + * A layer that computes elementwise trigonometric tangent function. + * + * + * .. code:: + * + * y = TanLayer(x) + * + * Requires 1 input and produces 1 output. + * Output shape is same as the input. + * + */ +message TanLayerParams { + +} + +/** + * A layer that computes elementwise trigonometric arcsine function. + * + * + * .. code:: + * + * y = AsinLayer(x) + * + * Requires 1 input and produces 1 output. + * Output shape is same as the input. + * + */ +message AsinLayerParams { + +} + +/** + * A layer that computes elementwise trigonometric arccosine function. + * + * + * .. code:: + * + * y = AcosLayer(x) + * + * Requires 1 input and produces 1 output. + * Output shape is same as the input. + * + */ +message AcosLayerParams { + +} + +/** + * A layer that computes elementwise trigonometric arctangent function. + * + * + * .. code:: + * + * y = AtanLayer(x) + * + * Requires 1 input and produces 1 output. + * Output shape is same as the input. + * + */ +message AtanLayerParams { + +} + +/** + * A layer that computes elementwise trigonometric hyperbolic sine function. + * + * + * .. code:: + * + * y = SinhLayer(x) + * + * Requires 1 input and produces 1 output. + * Output shape is same as the input. + * + */ +message SinhLayerParams { + +} + +/** + * A layer that computes elementwise trigonometric hyperbolic cosine function. + * + * + * .. code:: + * + * y = CoshLayer(x) + * + * Requires 1 input and produces 1 output. + * Output shape is same as the input. + * + */ +message CoshLayerParams { + +} + +/** + * A layer that computes elementwise trigonometric hyperbolic tangent function. + * + * + * .. code:: + * + * y = TanhLayer(x) + * + * Requires 1 input and produces 1 output. + * Output shape is same as the input. + * + */ +message TanhLayerParams { + +} + +/** + * A layer that computes elementwise trigonometric hyperbolic arcsine function. + * + * + * .. code:: + * + * y = AsinhLayer(x) + * + * Requires 1 input and produces 1 output. + * Output shape is same as the input. + * + */ +message AsinhLayerParams { + +} + +/** + * A layer that computes elementwise trigonometric hyperbolic arccosine function. + * + * + * .. code:: + * + * y = AcoshLayer(x) + * + * Requires 1 input and produces 1 output. + * Output shape is same as the input. + * + */ +message AcoshLayerParams { + +} + +/** + * A layer that computes elementwise trigonometric hyperbolic arctangent function. + * + * + * .. code:: + * + * y = AtanhLayer(x) + * + * Requires 1 input and produces 1 output. + * Output shape is same as the input. + * + */ +message AtanhLayerParams { + +} +/** + * A layer that raises each element in first tensor to the power of + * corresponding element in the second tensor. + * Supports conventional numpy-like broadcasting. + * + * .. code:: + * + * y = PowBroadcastableLayer(x) + * + * Requires 2 inputs and produces 1 output. + * + * Input + * - First N-Dimensional tensor + * - Second N-Dimensional tensor + * + * Output + * An N-Dimensional tensor with the broadcast shape. + * + */ +message PowBroadcastableLayerParams { + +} + +/** + * A layer that computes the exponential of all elements in the input tensor, with the base 2. + * + * + * .. code:: + * + * y = Exp2Layer(x) + * + * Requires 1 input and produces 1 output. + * Output shape is same as the input. + * + */ +message Exp2LayerParams { + +} + +/** + * A layer that returns a tensor containing the indices of all non-zero + * elements of input tensor. + * It is similar in functionality to the numpy.where method with 1 input. + * + * Requires 1 input and produces 1 output. + * Output is of rank 2, of shape (N,R), + * where N is the number of non-zero elements in the input and R is the rank of the input. + * + * Output contains indices represented in the multi-index form + * + * e.g.: + * input {shape = (4,)}: + * [0 1 0 2] + * output {shape = (2,1)}: + * [1] + * [3] + * + * + * input {shape = (3, 3)}: + * [1 2 1] + * [0 2 2] + * [2 1 0] + * output {shape = (7,1)}: + * [0. 0.] + * [0. 1.] + * [0. 2.] + * [1. 1.] + * [1. 2.] + * [2. 0.] + * [2. 1.] + * + */ +message WhereNonZeroLayerParams { + +} + +/** + * A layer that copies a tensor setting everything outside a central band in + * each inner-most matrix to zero. + * + * Requires 1 input and produces 1 output. + * + * Parameters for matrix_band_part layer + * band(m, n) = (num_lower < 0 || (m-n) <= num_lower) && (num_upper < 0 || (n-m) <= num_upper). + * output[i, j, k, ..., m, n] = band(m, n) * input[i, j, k, ..., m, n] + * + * + * Output shape is same as the input shape. + * Rank of the input must be at least 2. + * For rank higher than 2, the last 2 dimensions are treated as the matrix, while the rest are treated as batch. + */ +message MatrixBandPartLayerParams { + + int64 numLower = 1; + int64 numUpper = 2; + +} + +/** + * A layer that copies a tensor setting everything outside upper triangular to zero. + * + * Requires 1 input and produces 1 output. + * + * Output shape is same as the input shape. + * Rank of the input must be at least 2. + * For rank higher than 2, the last 2 dimensions are treated as the matrix, while the rest are treated as batch. + */ +message UpperTriangularLayerParams { + + int64 k = 1; // Diagonal below which to zero elements. k = 0 (the default) is the main diagonal, k < 0 is below it and k > 0 is above + +} + +/** + * A layer that copies a tensor setting everything outside lower triangular to zero. + * + * Requires 1 input and produces 1 output. + * + * Output shape is same as the input shape. + * Rank of the input must be at least 2. + * For rank higher than 2, the last 2 dimensions are treated as the matrix, while the rest are treated as batch. + */ +message LowerTriangularLayerParams { + + int64 k = 1; // Diagonal above which to zero elements. k = 0 (the default) is the main diagonal, k < 0 is below it and k > 0 is above + +} + +/** + * + * A layer that broadcasts a tensor to a new shape. + * + * Requires 2 inputs and produces 1 output. + * + * First input is broadcast to produce the output, while the second input is only + * used to determine the shape of the output. Values of second input are not used. + * + * Output is a tensor with the same shape as the second input. + * + */ +message BroadcastToLikeLayerParams { + +} + +/** + * + * A layer that broadcasts a tensor to a new shape. + * + * Requires 1 input and produces 1 output. + * + * Output tensor is the broadcasted version of the input and has shape as specified in the + * parameter "targetShape". + */ +message BroadcastToStaticLayerParams { + + repeated uint64 targetShape = 1; + +} + +/** + * + * A layer that broadcasts a tensor to a new shape. + * + * Requires 2 inputs and produces 1 output. + * + * First input is the one that is broadcasted to produce the output. + * Second input is a rank 1 tensor specifying the shape of the output. + * Output tensor has shape as specified by the values in the 2nd input tensor. + */ +message BroadcastToDynamicLayerParams { + +} + +/** + * A layer that performs element-wise addition operation with broadcast support. + * + * Requires 2 inputs and produces 1 output. + */ +message AddBroadcastableLayerParams { + +} + +/** + * A layer that performs element-wise maximum operation with broadcast support. + * + * Requires 2 inputs and produces 1 output. + */ +message MaxBroadcastableLayerParams { + +} + +/** + * A layer that performs element-wise minimum operation with broadcast support. + * + * Requires 2 inputs and produces 1 output. + */ +message MinBroadcastableLayerParams { + +} + +/** + * A layer that performs element-wise modular operation with broadcast support. + * + * Requires 2 inputs and produces 1 output. + */ +message ModBroadcastableLayerParams { + +} + +/** + * A layer that performs element-wise floor division operation with broadcast support. + * + * Requires 2 inputs and produces 1 output. + */ +message FloorDivBroadcastableLayerParams { + +} + +/** + * A layer that performs element-wise subtract operation with broadcast support. + * + * Requires 2 inputs and produces 1 output. + */ +message SubtractBroadcastableLayerParams { + +} + +/** + * A layer that performs element-wise multiply operation with broadcast support. + * + * Requires 2 inputs and produces 1 output. + */ +message MultiplyBroadcastableLayerParams { + +} + +/** + * A layer that performs element-wise division operation with broadcast support. + * + * Requires 2 inputs and produces 1 output. + */ +message DivideBroadcastableLayerParams { + +} + +/** + * Gather layer that gathers elements from the first input, along a specified axis, + * at indices specified in the second input. + * It is similar in functionality to the numpy.take method. + * + * Requires 2 inputs and produces 1 output. + * + * Given two inputs, 'data' and 'indices', gather the slices of 'data' + * and store into output. + * e.g. + * for i in [0, length(indices) - 1] + * output[i] = data[indices[i]] (1-D case, axis=0) + * + * if axis = 0: + * for each vector index (i,...,j) + * output[i,...,j,:,..,:] = data[indices[i,...,j],:,..,:] + * + * output.rank = (data.rank - 1) + indices.rank + * + * Negative indices and negative axis are supported. + * + * e.g: + * + * data shape = (2, 3) + * indices shape = (6, 8) + * axis = 0 + * output shape = (6, 8) + (3,) = (6, 8, 3) + * + * data shape = (2, 3, 5) + * indices shape = (6, 8) + * axis = 1 + * output shape = (2,) + (6, 8) + (5,) = (2, 6, 8, 5) + * + */ +message GatherLayerParams { + + int64 axis = 1; + +} + +/* + * Scatter accumulation mode. + */ +enum ScatterMode { + + SCATTER_UPDATE = 0; + SCATTER_ADD = 1; /// add + SCATTER_SUB = 2; /// subtract + SCATTER_MUL = 3; /// multiply + SCATTER_DIV = 4; /// divide + SCATTER_MAX = 5; /// maximum + SCATTER_MIN = 6; /// minimum + +} + +/* + * A layer that scatters data into a new tensor according to indices from the input. + * This is the inverse operation of Gather. + * + * Requires 3 inputs and produces 1 output. + * + * Output is initialized with the first input. + * Then updated with the values in the third input, at indices specified by the second input. + * + * An example when axis=0: + * Given three inputs, in order, "container", "indices", "updates", where + * + * - "container" is a rank R+1 tensor of shape [D_0, D_1, ..., D_R], which + * contains D_0 number of tensors, each with shape [D_1, ..., D_R]. + * + * - "indices" is a rank 1 tensor with shape [N], where N is the number of updates. + * The values in this tensor must be in the range [0, D_0 - 1]. (negative indexing is supported) + * + * - "updates" is a rank R+1 tensor with shape [N, D_1, ..., D_R], which represents + * a total number of N tensors, each of shape [D_1, ..., D_R]. + * + * The effect of this operation is as follows: + * + * output = container; + * For each i in 0, ..., N - 1 + * output[indices[i], :, ..., :] = updates[i, :, ..., :] // if mode == "SCATTER_UPDATE" + * + * or + * For each i in 0, ..., N - 1 + * output[indices[i], :, ..., :] += updates[i, :, ..., :] // if mode == "SCATTER_ADD" + * + * etc + * + * When "indices" is a tensor of rank greater than 1, the equation becomes (for axis=0): + * For each vector index (i,...,j) + * output[indices[i,...,j],...] -= updates[i,...,j,...] // if mode == "SCATTER_SUB" + * + * + * The output has the same shape as the first input. + * "indices" input must have rank less than or equal to the "updates" input and its shape + * must be a subset of the the shape of the "updates" input. + * + * e.g: + * + * container shape = (4, 3) + * indices shape = (5, 2, 3) + * updates shape = (4, 5, 2, 3) + * axis = 1 + * output shape = (4, 3) + * + * container shape = (4, 4, 3) + * indices shape = (6,) + * updates shape = (4, 6, 3) + * axis = -2 + * output shape = (4, 4, 3) + * + * container shape = (5,) + * indices shape = (5, 7, 5, 6) + * updates shape = (5, 7, 5, 6) + * axis = -1 + * output shape = (5,) + */ + +message ScatterLayerParams { + + int64 axis = 1; + ScatterMode mode = 2; /// mode of accumulation. + +} + +/** + * A layer that gathers elements from the first input, 'params', at the multi-indices specified + * by the second input, 'indices'. + * + * Requires 2 inputs and produces 1 output. + * + * 'params' = input[0], 'indices' = input[1] + * + * 'indices' is a rank K+1 tensor of shape [I_0, I_1, .., I_(K-1), I_K] which is viewed as a collection of + * indices of (I_0 * I_1 * ... * I_(K-1)) points in the I_K dimensional space. For instance, the multi-index of the first point + * is indices[0,0,...,0,:]. + * + * Here is how the output is constructed: + * + * for i = 0,1,...,(I_0-1) + * ... + * for j = 0,1,....,(I_(K-1)-1) + * output[i,....,j,:,:,..,:] = params[indices[i,...,j,:], :,:,..,:] + * + * Hence, output shape is [I_0, I_1,...,I(K-1)] + params.shape[I_K:] + * + * output.rank = indices.rank - 1 + params.rank - indices.shape[-1] + * + * e.g: + * + * input[0] shape = (4, 2, 3, 4) + * input[1] shape = (6, 2) + * output shape = (6,) + (3, 4) = (6, 3, 4) + * + * input[0] shape = (3, 3, 3, 4, 7) + * input[1] shape = (3, 5) + * output shape = (3,) + () = (3,) + * + * input[0] shape = (5, 3, 2, 5) + * input[1] shape = (2, 7, 3, 2) + * output shape = (2, 7, 3) + (2, 5) = (2, 7, 3, 2, 5) + * + */ +message GatherNDLayerParams { + +} + +/* + * A layer that scatters data into a new tensor according to multi-indices from the input. + * This is the inverse operation of GatherND. + * + * Requires 3 inputs and produces 1 output. + * 3 inputs, in order are denoted as "container", "indices", "updates". + * + * 'indices' is a rank K+1 tensor of shape [I_0, I_1, .., I_(K-1), I_K] which is viewed as a collection of + * indices of (I_0 * I_1 * ... * I_(K-1)) points in the I_K dimensional space. For instance, the multi-index of the first point + * is indices[0,0,...,0,:]. + * + * container.rank >= I_K + * updates.rank = K + (container.rank - I_K) + * shape of 'updates' = [I_0, I_1,...,I(K-1)] + container.shape[I_K:] + * + * output = container + * For each vector index (i,...,j) s.t. 0<=i shape: (3,) + * reps = N/A [Ignored] + * output shape = (2, 8, 12) + * + */ +message TileLayerParams { + + repeated uint64 reps = 1; + +} + +/** + * A layer that returns the shape of an input tensor. + * + * Requires 1 input and produces 1 output. + * + * Input: a tensor. + * Output: a vector of length R, where R is the rank of the input tensor + * Output is always a rank 1 tensor. + */ +message GetShapeLayerParams { + +} + +/** + * A layer that computes the Gauss error function, + * which is defined as: + * + * .. math:: + * f(x) = \dfrac{1}{\sqrt{\pi}}\int_{-x}^{x}{e^{-t^2}dt} + * + * Requires 1 input and produces 1 output. + * Output shape is same as the input. + */ +message ErfLayerParams { + +} + +/** + * A layer that evaluates the Gaussian Error Linear Unit (GELU) activation. + * Following equations are used to compute the activation based on the value of the "mode" parameter: + * + * mode == 'EXACT': + * .. math:: + * f(x) = 0.5x\left ( 1+\rm{erf}\left ( \frac{x}{\sqrt{2}} \right ) \right ) + * + * mode == 'TANH_APPROXIMATION': + * .. math:: + * f(x) = 0.5x\left ( 1+\rm{tanh}\left ( \sqrt{2/\pi}\left ( x + 0.044715x^3 \right ) \right ) \right ) + * + * mode == 'SIGMOID_APPROXIMATION': + * .. math:: + * f(x) = x*\rm{sigmoid}(1.702x) + * + * Requires 1 input and produces 1 output. + * Output shape is same as the input. + * + */ +message GeluLayerParams { + + enum GeluMode { + + EXACT = 0; + TANH_APPROXIMATION = 1; + SIGMOID_APPROXIMATION = 2; + + } + + GeluMode mode = 1; /// mode of GELU operation. + +} + +/** + * RangeStatic layer that returns a tensor that contains evenly spaced values. + * It is similar in functionality to the numpy.arange method. + * + * Requires no input and produces 1 output. + * Output is a rank 1 tensor. + */ +message RangeStaticLayerParams { + + float endValue = 1; + float startValue = 2; + float stepSizeValue = 3; + +} + +/** + * A layer that returns a tensor that contains evenly spaced values. + * Its functionality is similar to the numpy.arange method. + * + * Requires at least 1 input, up to a maximum of 3 inputs. + * Produces 1 output, which is a rank 1 tensor. + * + * Each input must be a scalar, or rank 1 and shape (1,). + * + * The first input represents the "endValue". + * The second input, if present, corresponds to "startValue". In this case the value of the "startValue" parameter is ignored. + * The third input, if present, corresponds to "stepSizeValue". In this case the value of the "stepSizeValue" parameter is ignored. + * + */ +message RangeDynamicLayerParams { + + float startValue = 2; + float stepSizeValue = 3; + +} + +/** + * A layer that returns a tensor containing all windows of size ``windowSize`` + * separated by ``step`` along the dimension ``axis``. + * + * .. code:: + * + * y = SlidingWindows(x) + * + * Requires 1 input and produces 1 output. + * + * Input + * An N-Dimensional tensor. + * + * Output + * An (N+1)-Dimensional tensor. + * + * This operation behaves as following: + * - if axis = 0 & input is rank 1 (L,). Output shape will be (M, W). + * - if axis = 1 & input is rank 3 (B1, L, C1). Output shape will be (B1, M, W, C1) + * - if axis = 2 & input is rank 5 (B1, B2, L, C1, C2) --> (B1 * B2, L, C1 * C2) --> (B1 * B2, M, W, C1 * C2). Output shape will be (B1, B2, M, W, C1, C2) + * - etc. + * where + * - L, C, B refer to input length, feature dimension length & batch size respectively + * - W is the window size. + * - M is the number of windows/slices calculated as M = (L - W) / step + 1 + */ +message SlidingWindowsLayerParams { + + int64 axis = 1; + uint64 windowSize = 2; + uint64 step = 3; + +} + +/** + * A layer that applies layer normalization over the input tensor. + * + * Requires 1 input and produces 1 output. + * + * output = gamma * (input - computed_mean) / (sqrt(computed_variance + eps)) + beta + * + * Parameters + * normalizedShape: subset of the input shape, along with layer norm is performed, rest of the input shape is treated as the batch dimension. The mean and variance are computed for the input, over the last few dimensions as specified by the normalizedShape parameter. + * gamma: must have shape = "normalizedShape" + * beta: must have shape = "normalizedShape" + * eps: small constant to avoid division by 0 + * + * Output shape is same as the input. + * + * e.g.: + * input shape = (10,5) + * normalized shape = (5,) or (10,5) + * + * input shape = (10,5,6,7) + * normalized shape = (7,) or (6,7) or (5,6,7) or (10,5,6,7) + */ +message LayerNormalizationLayerParams { + + repeated int64 normalizedShape = 1; + float eps = 2; + WeightParams gamma = 3; + WeightParams beta = 4; + +} + +/** + * Non maximum suppression (NMS) layer. + * Applies the non maximum suppression algorithm to input bounding box coordinates. + * The effect of this layer is similar to the functionality of the "NonMaximumSuppression" + * model type (for details please see NonMaximumSuppression.proto) with a couple of differences. + * One, this is a layer in a neural network model, whereas that is a different model type. Second, + * this layer supports a batch of bounding boxes. + * + * The NMS layer requires at least 2 inputs, and up to a maximum of 5 inputs. It produces 4 outputs. + * Following is the description of inputs and outputs: + * + * input 1, shape (B,N,4): coordinates of N boxes, for a batch size B. + * input 2, shape (B,N,C): class scores for each box. C can be 1 when there is only 1 score per box, i.e., no class specific score. + * + * input 3, optional, shape (1,): IoU threshold. When present, it overwrites the value provided in layer parameter "iouThreshold". + * input 4, optional, shape (1,): Score threshold. When present, it overwrites the value provided in layer parameter "scoreThreshold". + * input 5, optional, shape (1,): Maximum number of boxes. When present, it overwrites the value provided in layer parameter "maxBoxes". + * + * output 1, shape (B,maxBoxes,4): box coordinates, corresponding to the surviving boxes. + * output 2, shape (B,maxBoxes,C): box scores, corresponding to the surviving boxes. + * output 3, shape (B,maxBoxes): indices of the surviving boxes. Hence it will have values in the range [0,N-1], except for padding. + * output 4, shape (B,): number of boxes selected after the NMS algorithm, for each batch. + * + * When surviving boxes are less than "maxBoxes", the first 3 outputs are padded. + * For the first two outputs, the padding is done using values 0, whereas for the third output the + * padding value used is -1, since the output values represent indices. + * + * If no box survives, that is, all the scores are below the "scoreThreshold", + * then for that batch, number of boxes (value of the fourth output) will be 1. The first 3 outputs will + * correspond to the box with the highest score. This is to avoid generating an "empty" output. + * + * The four values that describe the box dimensions are (in order): + * + * - x (center location of the box along the horizontal axis) + * - y (center location of the box along the vertical axis) + * - width (size of box along the horizontal axis) + * - height (size of box on along the vertical axis) + * + * In each batch, + * the N scores for N boxes, used for suppression, are generated by taking the max of the matrix (N,C) + * along the columns. + * If "perClassSuppression" flag is false, suppression happens across all classes. + * If "perClassSuppression" flag is true, each box is assigned to the class with the highest + * score and then the suppression happens separately for boxes within the same class. + * + * Note that the 4th output can be used to dynamically slice the first 3 outputs, in case + * the padded outputs are not required. + * + */ +message NonMaximumSuppressionLayerParams { + /** + * The intersection over union (IoU) threshold over which boxes are suppressed. + */ + float iouThreshold = 1; + + /** + * Before IoU suppression is performed, boxes with class scores below this threshold are rejected. + */ + float scoreThreshold = 2; + + /** + * The maximum number of boxes to be given out as output. + * If the number of surviving boxes are less, output is padded up to this number. + */ + uint64 maxBoxes = 3; + + /** + * If true, suppression is performed independently within boxes of each class. + */ + bool perClassSuppression = 4; +} + +/** + * A layer that performs element-wise clamped ReLU operation. + * + * Requires 1 input and produces 1 output. + * + * This function has the following formula: + * + * .. math:: + * f(x) = \begin{cases} + * \text{min}(\text{beta},x) \;\; \text{if} \;\; x \geq 0\\ + * \text{min}(\text{beta} ,\text{alpha}\cdot x) \;\; \text{if} \;\; x<0 + * \end{cases} + * + * Output shape is same as the input. + * + * Available (iOS >= 14, macOS >= 11.0, watchOS >= 7) + */ +message ClampedReLULayerParams { + + float alpha = 1; + float beta = 2; + +} + +/** +* A layer that returns the indices that would sort the input tensor, along a specified axis. +* +* Requires 1 input and produces 1 output. +* +* Output has the same rank and shape as the input. +* +* Value of "axis" must be positive and less than the rank of the input. +* +* e.g.: +* +* input shape = (5,) +* axis = 0 +* input values = [3.1, 5.4, 32.9, 3.2, 77.0] +* output shape = (5,) +* output values = [0, 3, 1, 2, 4], descending = False +* output values = [4, 2, 1, 3, 0], descending = True +* +* input shape = (2,3) +* axis = 1 +* input values = [[3, 5, 32], [3, 77, 6]] +* output shape = (2,3) +* output values = [[0, 1, 2], [0, 2, 1]], descending = False +* output values = [[2, 1, 0], [1, 2, 0]], descending = True +* +*/ +message ArgSortLayerParams { + + int64 axis = 1; /// must be between [0, input_rank - 1] + bool descending = 2; + +} + +/** + * A layer that does slice operation by providing size to be extracted + * from the given input tensor. + * + * Requires 2 inputs and produces 1 output. + * Rank of the output is same as the rank of the first input. + * + * The 1st input represents the tensor to be sliced. + * The 2nd input represents the beginning index to be sliced from. + * + * Example: + * Input 1: x (x.shape = (2, 3, 4)) + * Input 2: begin + * size: 2 + * axis: 1 + * + * Output: x[:, begin:begin+2, :] + * + */ +message SliceBySizeLayerParams { + + int64 size = 2; + int64 axis = 3; + +} + + +/// Neural Network Specializations +/// ------------------------------ + +/** + * A neural network specialized as a classifier. + */ +message NeuralNetworkClassifier { + + repeated NeuralNetworkLayer layers = 1; + repeated NeuralNetworkPreprocessing preprocessing = 2; + + // use this enum value to determine the input tensor shapes to the neural network, for multiarray inputs + NeuralNetworkMultiArrayShapeMapping arrayInputShapeMapping = 5; + + // use this enum value to determine the input tensor shapes to the neural network, for image inputs + NeuralNetworkImageShapeMapping imageInputShapeMapping = 6; + + NetworkUpdateParameters updateParams = 10; + + // The set of labels for every possible class. + oneof ClassLabels { + StringVector stringClassLabels = 100; + Int64Vector int64ClassLabels = 101; + } + + // The name of the output blob containing the probability of each class. + // In other words, the score vector. Must be a 1-D tensor with the same + // number and order of elements as ClassLabels. + string labelProbabilityLayerName = 200; +} + + +/** + * A layer that computes the one hot representation of the input. + * + * Requires 1 or 2 inputs and produces 1 output. + * Rank of the output is one more than the first input. + * If the second input is present, it is used to determine the value of "oneHotVectorSize" and the parameter "oneHotVectorSize" is ignored. + * + * Input values correspond to indices and should typically be in the range [0,"oneHotVectorSize" -1]. If it is outside this range, a vector of all "offValue" will be chosen. + * + * Typically one hot vectors contain 0s everywhere, except 1 at the index that the input corresponds to. + * However, instead of 0, any float value could be generated by using the "offValue" parameter. + * Similarly, instead of 1, any other value can be used by employing the "onValue" parameter. + * + * e.g.: + * input shape: (10,), "oneHotVectorSize" : 32, axis=-1, then output shape will be (10,32) + * input shape: (10,23), "oneHotVectorSize" : 32, axis=1, then output shape will be (10,32,23) + * input shape: (10,), "oneHotVectorSize" : 32, axis=0, then output shape will be (32,10) + * + * input shape: (2,), "oneHotVectorSize" : 4, axis=-1, then output shape will be (2,4) + * say input values = [2, 0], and "onValue" = 5, and "offValue" = -1, then output will be: + * [-1, -1, 5, -1 + * 5, -1, -1, -1] + * + * say input values = [2, -1], and "onValue" = 5, and "offValue" = -1, then output will be: + * [-1, -1, 5, -1 + * -1, -1, -1, -1] + * + * Available (iOS >= 14, macOS >= 11.0, watchOS >= 7) + */ + +message OneHotLayerParams { + + uint64 oneHotVectorSize = 1; /// size of the one hot vector + int64 axis = 2; /// negative indexing is supported. It refers to the axis in the output tensor. + float onValue = 3; + float offValue = 4; +} + + +/** + * A layer that computes the cumsum values of the input along a given axis. + * + * Requires 1 or 2 inputs and produces 1 output. + * + * Output shape and rank is same as the first input. + * If the second input is present, it is used to determine the value of "axis" and the parameter "axis" is ignored. + * + * e.g.: + * Input shape = (3,), values it has: [4, 6, 7] + * + * Then output values will be: + * + * if "excludeFinalSum" = False and "reverse" = False: + * output values : [4, 10, 17] + * + * if "excludeFinalSum" = True and "reverse" = False: + * output values : [0, 4, 10] + * + * if "excludeFinalSum" = False and "reverse" = True: + * output values : [17, 13, 7] + * + * if "excludeFinalSum" = True and "reverse" = True: + * output values : [13, 7, 0] + * + * + * Available (iOS >= 14, macOS >= 11.0, watchOS >= 7) + */ + + +message CumSumLayerParams { + + int64 axis = 1; /// negative indexing is supported + + /// if true, the first element of the output is 0, and the last element contains the sum of the input up to the penultimate value + /// if false, the first element of the output is same as the input and the last element is the sum of all the input values + /// (this behavior is reversed when "reverse" flag is True) + bool excludeFinalSum = 2; + + bool reverse = 3; /// if true, cumsum is performed in the opposite direction +} + + +/** + * A neural network specialized as a regressor. + */ +message NeuralNetworkRegressor { + + repeated NeuralNetworkLayer layers = 1; + repeated NeuralNetworkPreprocessing preprocessing = 2; + + // use this enum value to determine the input tensor shapes to the neural network, for multiarray inputs + NeuralNetworkMultiArrayShapeMapping arrayInputShapeMapping = 5; + + // use this enum value to determine the input tensor shapes to the neural network, for image inputs + NeuralNetworkImageShapeMapping imageInputShapeMapping = 6; + + NetworkUpdateParameters updateParams = 10; + +} + +/// --------------------------------------------------------- +/// On-device Training related messages +/// --------------------------------------------------------- + +/** + * Details on how the network will be updated + */ +message NetworkUpdateParameters { + + repeated LossLayer lossLayers = 1; + Optimizer optimizer = 2; + Int64Parameter epochs = 3; + + /** + * Describes whether to shuffle the batch of data between epochs. + */ + BoolParameter shuffle = 10; + + /** + * The seed to be used in an associated random number generator. + */ + Int64Parameter seed = 20; +} + +/** + * Loss layer - categorical cross entropy and mean squared error are the only supported loss functions currently + */ +message LossLayer { + + string name = 1; + oneof LossLayerType { + + CategoricalCrossEntropyLossLayer categoricalCrossEntropyLossLayer = 10; + MeanSquaredErrorLossLayer meanSquaredErrorLossLayer = 11; + + } + +} + +/** + * Categorical cross entropy loss layer + * Categorical cross entropy is used for single label categorization (only one category is applicable for each data point). + * + * The input is a vector of length N representing the distribution over N categories. It must be the output of a softmax. + * + * The target is a single value representing the true category or class label. If the target is the predictedFeatureName of a neural network classifier it will be inverse mapped to the corresponding categorical index for you. + * + * math: + * Loss_{CCE}(input, target) = -\sum_{i=1}^{N} (target == i) log( input[i] ) = - log (input[target]) + */ +message CategoricalCrossEntropyLossLayer { + + string input = 1; + string target = 2; + +} + +/** + * Mean squared error loss layer, + * specifying input and target + */ +message MeanSquaredErrorLossLayer { + + string input = 1; + string target = 2; + +} + +/** + * Optimizer - stochastic gradient descent and adam are the only supported optimizers currently + */ +message Optimizer { + + oneof OptimizerType { + + SGDOptimizer sgdOptimizer = 10; + AdamOptimizer adamOptimizer = 11; + + } + +} + +/** + * Stochastic gradient descent optimizer, + * specifying configurable learning rate, mini batch size, and momentum + */ +message SGDOptimizer { + + DoubleParameter learningRate = 1; + Int64Parameter miniBatchSize = 2; + DoubleParameter momentum = 3; + +} + +/** + * Adam optimizer, + * specifying configurable learning rate, mini batch size, betas, and eps + */ +message AdamOptimizer { + + DoubleParameter learningRate = 1; + Int64Parameter miniBatchSize = 2; + DoubleParameter beta1 = 3; + DoubleParameter beta2 = 4; + DoubleParameter eps = 5; + +} diff --git a/onnxruntime/core/providers/coreml/mlmodel_format/NonMaximumSuppression.proto b/onnxruntime/core/providers/coreml/mlmodel_format/NonMaximumSuppression.proto new file mode 100644 index 0000000000000..c98949a0c2e21 --- /dev/null +++ b/onnxruntime/core/providers/coreml/mlmodel_format/NonMaximumSuppression.proto @@ -0,0 +1,187 @@ +// Copyright (c) 2018, Apple Inc. All rights reserved. +// +// Use of this source code is governed by a BSD-3-clause license that can be +// found in LICENSE.txt or at https://opensource.org/licenses/BSD-3-Clause + +syntax = "proto3"; +option optimize_for = LITE_RUNTIME; + +import public "DataStructures.proto"; + +package CoreML.Specification; + +/* +* Non-maximum suppression of axis-aligned bounding boxes. +* +* This is used primarily for object detectors that tend to produce multiple +* boxes around a single object. This is a byproduct of the detector's +* robustness to spatial translation. If there are two or more bounding boxes +* that are very similar to one another, the algorithm should return only a +* single representative. +* +* Similarity between two bounding boxes is measured by intersection-over-union +* (IOU), the fraction between the area of intersection and area of the union. +* Here is an example where the areas can be calculated by hand by counting glyphs:: +* +* +-------+ +-------+ +* | | | | +* | +------+ +--+ | +---+ +* | | | | | | | | +* +-------+ | +--+ +----+ | +* | | | | +* +------+ +------+ +* Intersection Union +* IOU: 0.16 = 12 / 73 +* +* All IOU scores are fractions betwen 0.0 (fully disjoint) and 1.0 (perfect +* overlap). The standard algorithm (PickTop) is defined as follows: +* +* 1. Sort boxes by descending order of confidence +* 2. Take the top one and mark it as keep +* 3. Suppress (mark it as discard) all boxes within a fixed IOU radius of the +* keep box +* 4. Go to 2 and repeat on the subset of boxes not already kept or discarded +* 5. When all boxes are processed, output only the ones marked as keep +* +* Before the algorithm, boxes that fall below the confidence threshold are +* discarded. +*/ +message NonMaximumSuppression { + // Suppression methods: + /* + * Pick the bounding box of the top confidence, suppress all within a radius. + */ + message PickTop { + /* + * Suppression is only done among predictions with the same label + * (argmax of the confidence). + */ + bool perClass = 1; + } + + /* + * Choose which underlying suppression method to use + */ + oneof SuppressionMethod { + PickTop pickTop = 1; + } + + /* + * Optional class label mapping. + */ + oneof ClassLabels { + StringVector stringClassLabels = 100; + Int64Vector int64ClassLabels = 101; + } + + /* + * This defines the radius of suppression. A box is considered to be within + * the radius of another box if their IOU score is less than this value. + */ + double iouThreshold = 110; + + /* + * Remove bounding boxes below this threshold. The algorithm run-time is + * proportional to the square of the number of incoming bounding boxes + * (O(N^2)). This threshold is a way to reduce N to make the algorithm + * faster. The confidence threshold can be any non-negative value. Negative + * confidences are not allowed, since if the output shape is specified to be + * larger than boxes after suppression, the unused boxes are filled with + * zero confidence. If the prediction is handled by Core Vision, it is also + * important that confidences are defined with the following semantics: + * + * 1. Confidences should be between 0 and 1 + * 2. The sum of the confidences for a prediction should not exceed 1, but is + * allowed to be less than 1 + * 3. The sum of the confidences will be interpreted as the confidence of + * any object (e.g. if the confidences for two classes are 0.2 and 0.4, + it means there is a 60% (0.2 + 0.4) confidence that an object is + present) + */ + double confidenceThreshold = 111; + + /* + * Set the name of the confidence input. + * + * The input should be a multi-array of type double and shape N x C. N is + * the number of boxes and C the number of classes. Each row describes the + * confidences of each object category being present at that particular + * location. Confidences should be nonnegative, where 0.0 means the highest + * certainty the object is not present. + * + * Specifying shape is optional. + */ + string confidenceInputFeatureName = 200; + + /* + * Set the name of the coordinates input. + * + * The input should be a multi-array of type double and shape N x 4. The + * rows correspond to the rows of the confidence matrix. The four values + * describe (in order): + * + * - x (center location of the box along the horizontal axis) + * - y (center location of the box along the vertical axis) + * - width (size of box along the horizontal axis) + * - height (size of box on along the vertical axis) + * + * Specifying shape is optional. + */ + string coordinatesInputFeatureName = 201; + + /* + * The iouThreshold can be optionally overridden by specifying this string + * and providing a corresponding input of type double. This allows changing + * the value of the parameter during run-time. + * + * The input should be a scalar double between 0.0 and 1.0. Setting it to 1.0 + * means there will be no suppression based on IOU. + */ + string iouThresholdInputFeatureName = 202; + + /* + * The confidenceThreshold can be optionally overridden by specifying this + * string and providing a corresponding input. This allows changing the + * value of the parameter during run-time, which can aid setting it just + * right for a particular use case. + * + * The input should be a scalar double with nonnegative value. + */ + string confidenceThresholdInputFeatureName = 203; + + /* + * Set the name of the confidence output. The output will be the same type + * and shape as the corresponding input. The only difference is that the + * number of rows may have been reduced. + * + * Specifying shape is optional. One reason to specify shape is to limit + * the number of output boxes. This can be done is several ways: + * + * Fixed shape: + * The output can be pinned to a fixed set of boxes. If this number is larger + * than the number of boxes that would have been returned, the output is padded + * with zeros for both confidence and coordinates. Specifying a fixed shape + * can be done by setting either shape (deprecated) or allowedShapes set to + * fixedsize. + * + * Min/max: + * It is also possible to set both a minimum and a maximum. The same zero-padding + * as for fixed shape is applied when necessary. Setting min/max is done by defining + * two allowedShapes, where the first dimension uses a rangeofsizes defining lowerbound + * and upperbound. + */ + string confidenceOutputFeatureName = 210; + + /* + * Set the name of the coordinates output. The output will be the same type + * and shape as the corresponding input. The only difference is that the + * number of rows may have been reduced. + * + * Specifying shape is optional. See confidence output for a more detailed + * description. Note that to achieve either fixed shape output or a + * constraint range of boxes, only one of confidence or coordinates need to + * set a shape. Both shapes are allowed to be defined, but in such case they + * have to be consistent along dimension 0. + */ + string coordinatesOutputFeatureName = 211; +} diff --git a/onnxruntime/core/providers/coreml/mlmodel_format/Normalizer.proto b/onnxruntime/core/providers/coreml/mlmodel_format/Normalizer.proto new file mode 100644 index 0000000000000..627f7e2e3afd7 --- /dev/null +++ b/onnxruntime/core/providers/coreml/mlmodel_format/Normalizer.proto @@ -0,0 +1,38 @@ +// Copyright (c) 2017, Apple Inc. All rights reserved. +// +// Use of this source code is governed by a BSD-3-clause license that can be +// found in LICENSE.txt or at https://opensource.org/licenses/BSD-3-Clause + +syntax = "proto3"; +option optimize_for = LITE_RUNTIME; + +package CoreML.Specification; + +/** + * A normalization preprocessor. + */ +message Normalizer { + /** + * There are three normalization modes, + * which have the corresponding formulas: + * + * Max + * .. math:: + * max(x_i) + * + * L1 + * .. math:: + * z = ||x||_1 = \sum_{i=1}^{n} |x_i| + * + * L2 + * .. math:: + * z = ||x||_2 = \sqrt{\sum_{i=1}^{n} x_i^2} + */ + enum NormType { + LMax = 0; + L1 = 1; + L2 = 2; + } + + NormType normType = 1; +} diff --git a/onnxruntime/core/providers/coreml/mlmodel_format/OneHotEncoder.proto b/onnxruntime/core/providers/coreml/mlmodel_format/OneHotEncoder.proto new file mode 100644 index 0000000000000..f47cf28166222 --- /dev/null +++ b/onnxruntime/core/providers/coreml/mlmodel_format/OneHotEncoder.proto @@ -0,0 +1,41 @@ +// Copyright (c) 2017, Apple Inc. All rights reserved. +// +// Use of this source code is governed by a BSD-3-clause license that can be +// found in LICENSE.txt or at https://opensource.org/licenses/BSD-3-Clause + +syntax = "proto3"; +option optimize_for = LITE_RUNTIME; + +import public "DataStructures.proto"; + +package CoreML.Specification; + +/** + * Transforms a categorical feature into an array. The array will be all + * zeros expect a single entry of one. + * + * Each categorical value will map to an index, this mapping is given by + * either the ``stringCategories`` parameter or the ``int64Categories`` + * parameter. + */ +message OneHotEncoder { + enum HandleUnknown { + ErrorOnUnknown = 0; + IgnoreUnknown = 1; // Output will be all zeros for unknown values. + } + + /** + * Mapping to be used for the encoding. The position of the category in + * the below vector determines where the single one entry will be in the + * output. + */ + oneof CategoryType { + StringVector stringCategories = 1; + Int64Vector int64Categories = 2; + } + + // Output can be a dictionary with only one entry, instead of an array. + bool outputSparse = 10; + + HandleUnknown handleUnknown = 11; +} diff --git a/onnxruntime/core/providers/coreml/mlmodel_format/Parameters.proto b/onnxruntime/core/providers/coreml/mlmodel_format/Parameters.proto new file mode 100644 index 0000000000000..ed1ebe525181f --- /dev/null +++ b/onnxruntime/core/providers/coreml/mlmodel_format/Parameters.proto @@ -0,0 +1,52 @@ +// Copyright (c) 2017, Apple Inc. All rights reserved. +// +// Use of this source code is governed by a BSD-3-clause license that can be +// found in LICENSE.txt or at https://opensource.org/licenses/BSD-3-Clause + +syntax = "proto3"; +option optimize_for = LITE_RUNTIME; + +import public "DataStructures.proto"; + +package CoreML.Specification; + +/** + * Int64 parameter, + * consisting of a default int64 value, and allowed range or set of values + * value is unbounded if AllowedValues is not set. + */ +message Int64Parameter { + int64 defaultValue = 1; + oneof AllowedValues { + Int64Range range = 10; + Int64Set set = 11; + } +} + +/** + * Double parameter, + * consisting of a default double value, and allowed range of values + * value is unbounded if AllowedValues is not set. + */ +message DoubleParameter { + double defaultValue = 1; + oneof AllowedValues { + DoubleRange range = 10; + } +} + +/** + * String parameter, + * A default string value must be provided + */ +message StringParameter { + string defaultValue = 1; +} + +/** + * String parameter, + * A default bool value must be provided + */ +message BoolParameter { + bool defaultValue = 1; +} diff --git a/onnxruntime/core/providers/coreml/mlmodel_format/README.md b/onnxruntime/core/providers/coreml/mlmodel_format/README.md new file mode 100644 index 0000000000000..e5eba65f982ad --- /dev/null +++ b/onnxruntime/core/providers/coreml/mlmodel_format/README.md @@ -0,0 +1,16 @@ +# Core ML Model Format Specification +This directory contains the protobuf message definitions that comprise the Core ML model document (``.mlmodel``) format. + +The top-level message is ``Model``, which is defined in ``Model.proto``. +Other message types describe data structures, feature types, feature engineering model types, and predictive model types. + +# Update the Core ML Model Format Specification +Please do not modify protobuf message definitions, they are copied directly from [Core ML Tools](https://github.com/apple/coremltools) repository. + +To update the Core ML Model Format Schema schema files to a more recent version: +1. Delete all the protobuf message definitions (`.proto`) from this directory. +2. Copy the new version of protobuf message definitions (`.proto`) from the `mlmodel/format/` directory of preferred coremltools release branch. + +# Core ML Model Format Schema version history +## [coremltools 4.0](https://github.com/apple/coremltools/releases/tag/4.0) +[Core ML Model Format Specification](https://github.com/apple/coremltools/tree/4.0/mlmodel/format) diff --git a/onnxruntime/core/providers/coreml/mlmodel_format/SVM.proto b/onnxruntime/core/providers/coreml/mlmodel_format/SVM.proto new file mode 100644 index 0000000000000..932a4ec216682 --- /dev/null +++ b/onnxruntime/core/providers/coreml/mlmodel_format/SVM.proto @@ -0,0 +1,195 @@ +// Copyright (c) 2017, Apple Inc. All rights reserved. +// +// Use of this source code is governed by a BSD-3-clause license that can be +// found in LICENSE.txt or at https://opensource.org/licenses/BSD-3-Clause + +syntax = "proto3"; +option optimize_for = LITE_RUNTIME; + +import public "DataStructures.proto"; + +package CoreML.Specification; + +/// Kernel Definitions +/// ------------------ + +/** + * A linear kernel. + * + * This function has the following formula: + * + * .. math:: + * K(\boldsymbol{x}, \boldsymbol{x'}) = \boldsymbol{x}^T \boldsymbol{x'} + */ +message LinearKernel { +} + +/** + * A Gaussian radial basis function (RBF) kernel. + * + * This function has the following formula: + * + * .. math:: + * K(\boldsymbol{x}, \boldsymbol{x'}) = \ + * \exp(-\gamma || \boldsymbol{x} - \boldsymbol{x'} ||^2 ) + * + */ +message RBFKernel { + double gamma = 1; +} + +/** + * A polynomial kernel. + * + * This function has the following formula: + * + * .. math:: + * K(\boldsymbol{x}, \boldsymbol{x'}) = \ + * (\gamma \boldsymbol{x}^T \boldsymbol{x'} + c)^{degree} + */ +message PolyKernel { + int32 degree = 1; + double c = 2; + double gamma = 3; +} + +/** + * A sigmoid kernel. + * + * This function has the following formula: + * + * .. math:: + * K(\boldsymbol{x}, \boldsymbol{x'}) = \ + * \tanh(\gamma \boldsymbol{x}^T \boldsymbol{x'} + c) + */ +message SigmoidKernel { + double gamma = 1; + double c = 2; +} + +/** + * A kernel. + */ +message Kernel { + oneof kernel { + LinearKernel linearKernel = 1; + RBFKernel rbfKernel = 2; + PolyKernel polyKernel = 3; + SigmoidKernel sigmoidKernel = 4; + } +} + + +/// Support Vector Definitions +/// -------------------------- + +/** + * A sparse node. + */ +message SparseNode { + int32 index = 1; // 1-based indexes, like libsvm + double value = 2; +} + +/** + * A sparse vector. + */ +message SparseVector { + repeated SparseNode nodes = 1; +} + +/** + * One or more sparse support vectors. + */ +message SparseSupportVectors { + repeated SparseVector vectors = 1; +} + +/** + * A dense vector. + */ +message DenseVector { + repeated double values = 1; +} + +/** + * One or more dense support vectors. + */ +message DenseSupportVectors { + repeated DenseVector vectors = 1; +} + +/** + * One or more coefficients. + */ +message Coefficients { + repeated double alpha = 1; +} + +/** + * A support vector regressor. + */ +message SupportVectorRegressor { + Kernel kernel = 1; + + // Support vectors, either sparse or dense format + oneof supportVectors { + SparseSupportVectors sparseSupportVectors = 2; + DenseSupportVectors denseSupportVectors = 3; + } + + // Coefficients, one for each support vector + Coefficients coefficients = 4; + + double rho = 5; +} + +/** + * A support vector classifier + */ +message SupportVectorClassifier { + Kernel kernel = 1; + + /** + * The number of support vectors for each class. + */ + repeated int32 numberOfSupportVectorsPerClass = 2; + + /** + * The support vectors, in either sparse or dense format. + */ + oneof supportVectors { + SparseSupportVectors sparseSupportVectors = 3; + DenseSupportVectors denseSupportVectors = 4; + } + + /** + * The coefficients, essentially a two dimensional array of + * size: (numberOfClasses-1) by (total number of support vectors) + */ + repeated Coefficients coefficients = 5; + + /** + * Constants for decision function, + * with K*(K-1) / 2 elements, + * where K is the number of classes. + */ + repeated double rho = 6; + + /** + * Pairwise probability information for A vs B classifier. + * Total of K*(K-1)/2 elements where K is the number of classes. + * These fields are optional, + * and only required if you want probabilities or multi class predictions. + */ + repeated double probA = 7; + repeated double probB = 8; + + /** + * Class label mapping. + */ + oneof ClassLabels { + StringVector stringClassLabels = 100; + Int64Vector int64ClassLabels = 101; + } +} diff --git a/onnxruntime/core/providers/coreml/mlmodel_format/Scaler.proto b/onnxruntime/core/providers/coreml/mlmodel_format/Scaler.proto new file mode 100644 index 0000000000000..f0e13d54be2e8 --- /dev/null +++ b/onnxruntime/core/providers/coreml/mlmodel_format/Scaler.proto @@ -0,0 +1,34 @@ +// Copyright (c) 2017, Apple Inc. All rights reserved. +// +// Use of this source code is governed by a BSD-3-clause license that can be +// found in LICENSE.txt or at https://opensource.org/licenses/BSD-3-Clause + +syntax = "proto3"; +option optimize_for = LITE_RUNTIME; + +package CoreML.Specification; + +/** + * A scaling operation. + * + * This function has the following formula: + * + * .. math:: + * f(x) = scaleValue \cdot (x + shiftValue) + * + * If the ``scaleValue`` is not given, the default value 1 is used. + * If the ``shiftValue`` is not given, the default value 0 is used. + * + * If ``scaleValue`` and ``shiftValue`` are each a single value + * and the input is an array, then the scale and shift are applied + * to each element of the array. + * + * If the input is an integer, then it is converted to a double to + * perform the scaling operation. If the output type is an integer, + * then it is cast to an integer. If that cast is lossy, then an + * error is generated. + */ +message Scaler { + repeated double shiftValue = 1; + repeated double scaleValue = 2; +} diff --git a/onnxruntime/core/providers/coreml/mlmodel_format/SoundAnalysisPreprocessing.proto b/onnxruntime/core/providers/coreml/mlmodel_format/SoundAnalysisPreprocessing.proto new file mode 100644 index 0000000000000..05bb744a9af94 --- /dev/null +++ b/onnxruntime/core/providers/coreml/mlmodel_format/SoundAnalysisPreprocessing.proto @@ -0,0 +1,60 @@ +// Copyright (c) 2019, Apple Inc. All rights reserved. +// +// Use of this source code is governed by a BSD-3-clause license that can be +// found in LICENSE.txt or at https://opensource.org/licenses/BSD-3-Clause + +syntax = "proto3"; +option optimize_for = LITE_RUNTIME; + +package CoreML.Specification.CoreMLModels; + +/** +* A model which takes audio signal samples as input and outputs an array of +* preprocessed samples according to the specified preprocessing types +*/ +message SoundAnalysisPreprocessing { + + // Specific preprocessing types for sound analysis + + /* Vggish preprocesses input audio samples and makes them ready to + be fed to Vggish feature extractor. + c.f. https://arxiv.org/pdf/1609.09430.pdf + + The preprocessing takes input a single channel (monophonic) audio samples + 975 miliseconds long, sampled at 16KHz, i.e., 15600 samples 1D multiarray + and produces preprocessed samples in multiarray of shape [1, 96, 64] + + (1) Splits the input audio samples into overlapping frames, where each + frame is 25 milliseconds long and hops forward by 10 milliseconds. + Any partial frames at the end are dropped. + + (2) Hann window: apply a periodic Hann with a window_length of + 25 milliseconds, which translates to 400 samples in 16KHz sampling rate + + w(n) = 0.5 - 0.5 * cos(2*pi*n/window_length_sample), + where 0 <= n <= window_lenth_samples - 1 and window_lenth_samples = 400 + + Then, the Hann window is applied to each frame as below + + windowed_frame(n) = frame(n) * w(n) + where 0 <= n <= window_lenth_samples - 1 and window_lenth_samples = 400 + + (3) Power spectrum: calculate short-time Fourier transfor magnitude, with + an FFT length of 512 + + (4) Log Mel filter bank: calculates a log magnitude mel-frequency + spectrogram minimum frequency of 125Hz and maximum frequency of 7500Hz, + number of mel bins is 64, log_offset is 0.01, number of spectrum bins + is 64. + */ + + message Vggish { + // no specific parameter + } + + // Vision feature print type + oneof SoundAnalysisPreprocessingType { + Vggish vggish = 20; + } + +} diff --git a/onnxruntime/core/providers/coreml/mlmodel_format/TextClassifier.proto b/onnxruntime/core/providers/coreml/mlmodel_format/TextClassifier.proto new file mode 100644 index 0000000000000..bf6d3c7f7f3e5 --- /dev/null +++ b/onnxruntime/core/providers/coreml/mlmodel_format/TextClassifier.proto @@ -0,0 +1,43 @@ +// Copyright (c) 2018, Apple Inc. All rights reserved. +// +// Use of this source code is governed by a BSD-3-clause license that can be +// found in LICENSE.txt or at https://opensource.org/licenses/BSD-3-Clause + +syntax = "proto3"; +option optimize_for = LITE_RUNTIME; + +import public "DataStructures.proto"; + +package CoreML.Specification.CoreMLModels; + +/** +* A model which takes a single input string and outputs a +* label for the input. +*/ +message TextClassifier { + + /* + * Stores the resivion number for the model, revision 1 is available on + * iOS, tvOS 12.0+, macoOS 10.14+ + */ + uint32 revision = 1; + + /* + * Stores the language of the model, as specified in BCP-47 format, + * e.g. "en-US". See https://tools.ietf.org/html/bcp47 + */ + string language = 10; + + /* + * Stores the byte representation of learned model parameters + */ + bytes modelParameterData = 100; + + /* + * Stores the set of output class labels + */ + oneof ClassLabels { + StringVector stringClassLabels = 200; + } + +} diff --git a/onnxruntime/core/providers/coreml/mlmodel_format/TreeEnsemble.proto b/onnxruntime/core/providers/coreml/mlmodel_format/TreeEnsemble.proto new file mode 100644 index 0000000000000..defebee98852c --- /dev/null +++ b/onnxruntime/core/providers/coreml/mlmodel_format/TreeEnsemble.proto @@ -0,0 +1,161 @@ +// Copyright (c) 2017, Apple Inc. All rights reserved. +// +// Use of this source code is governed by a BSD-3-clause license that can be +// found in LICENSE.txt or at https://opensource.org/licenses/BSD-3-Clause + +/** + * Each tree is a collection of nodes, + * each of which is identified by a unique identifier. + * + * Each node is either a branch or a leaf node. + * A branch node evaluates a value according to a behavior; + * if true, the node identified by ``true_child_node_id`` is evaluated next, + * if false, the node identified by ``false_child_node_id`` is evaluated next. + * A leaf node adds the evaluation value to the base prediction value + * to get the final prediction. + * + * A tree must have exactly one root node, + * which has no parent node. + * A tree must not terminate on a branch node. + * All leaf nodes must be accessible + * by evaluating one or more branch nodes in sequence, + * starting from the root node. + */ + +syntax = "proto3"; +option optimize_for = LITE_RUNTIME; + +import public "DataStructures.proto"; + +package CoreML.Specification; + +/** + * A tree ensemble post-evaluation transform. + */ +enum TreeEnsemblePostEvaluationTransform { + NoTransform = 0; + Classification_SoftMax = 1; + Regression_Logistic = 2; + Classification_SoftMaxWithZeroClassReference = 3; +} + +/** + * Tree ensemble parameters. + */ +message TreeEnsembleParameters { + message TreeNode { + uint64 treeId = 1; + uint64 nodeId = 2; + + enum TreeNodeBehavior { + BranchOnValueLessThanEqual = 0; + BranchOnValueLessThan = 1; + BranchOnValueGreaterThanEqual = 2; + BranchOnValueGreaterThan = 3; + BranchOnValueEqual = 4; + BranchOnValueNotEqual = 5; + LeafNode = 6; + } + + /** + * The branch mode parameters. + * + * If branch is false, + * then the parameters in this section must be filled in + * to determine how the branching functions. + */ + TreeNodeBehavior nodeBehavior = 3; + + /** + * If the node behavior mode is a branch mode, + * then these values must be filled in. + */ + uint64 branchFeatureIndex = 10; + double branchFeatureValue = 11; + uint64 trueChildNodeId = 12; + uint64 falseChildNodeId = 13; + bool missingValueTracksTrueChild = 14; + + /** + * The leaf mode. + * + * If ``nodeBahavior`` == ``LeafNode``, + * then the evaluationValue is added to the base prediction value + * in order to get the final prediction. + * To support multiclass classification + * as well as regression and binary classification, + * the evaluation value is encoded here as a sparse vector, + * with evaluationIndex being the index of the base vector + * that evaluation value is added to. + * In the single class case, + * it is expected that evaluationIndex is exactly 0. + */ + message EvaluationInfo { + uint64 evaluationIndex = 1; + double evaluationValue = 2; + } + + repeated EvaluationInfo evaluationInfo = 20; + + /** + * The relative hit rate of a node for optimization purposes. + * + * This value has no effect on the accuracy of the result; + * it allows the tree to optimize for frequent branches. + * The value is relative, + * compared to the hit rates of other branch nodes. + * + * You typically use a proportion of training samples + * that reached this node + * or some similar metric to derive this value. + */ + double relativeHitRate = 30; + } + + repeated TreeNode nodes = 1; + + /** + * The number of prediction dimensions or classes in the model. + * + * All instances of ``evaluationIndex`` in a leaf node + * must be less than this value, + * and the number of values in the ``basePredictionValue`` field + * must be equal to this value. + * + * For regression, + * this is the dimension of the prediction. + * For classification, + * this is the number of classes. + */ + uint64 numPredictionDimensions = 2; + + /** + * The base prediction value. + * + * The number of values in this must match + * the default values of the tree model. + */ + repeated double basePredictionValue = 3; +} + +/** + * A tree ensemble classifier. + */ +message TreeEnsembleClassifier { + TreeEnsembleParameters treeEnsemble = 1; + TreeEnsemblePostEvaluationTransform postEvaluationTransform = 2; + + // Required class label mapping + oneof ClassLabels { + StringVector stringClassLabels = 100; + Int64Vector int64ClassLabels = 101; + } +} + +/** + * A tree ensemble regressor. + */ +message TreeEnsembleRegressor { + TreeEnsembleParameters treeEnsemble = 1; + TreeEnsemblePostEvaluationTransform postEvaluationTransform = 2; +} diff --git a/onnxruntime/core/providers/coreml/mlmodel_format/VisionFeaturePrint.proto b/onnxruntime/core/providers/coreml/mlmodel_format/VisionFeaturePrint.proto new file mode 100644 index 0000000000000..cd13d290e421e --- /dev/null +++ b/onnxruntime/core/providers/coreml/mlmodel_format/VisionFeaturePrint.proto @@ -0,0 +1,63 @@ +// Copyright (c) 2018, Apple Inc. All rights reserved. +// +// Use of this source code is governed by a BSD-3-clause license that can be +// found in LICENSE.txt or at https://opensource.org/licenses/BSD-3-Clause + +syntax = "proto3"; +option optimize_for = LITE_RUNTIME; + +package CoreML.Specification.CoreMLModels; + +/** +* A model which takes an input image and outputs array(s) of features +* according to the specified feature types +*/ +message VisionFeaturePrint { + + // Specific vision feature print types + + // Scene extracts features useful for identifying contents of natural images + // in both indoor and outdoor environments + message Scene { + enum SceneVersion { + SCENE_VERSION_INVALID = 0; + // VERSION_1 is available on iOS,tvOS 12.0+, macOS 10.14+ + // It uses a 299x299 input image and yields a 2048 float feature vector + SCENE_VERSION_1 = 1; + } + + SceneVersion version = 1; + } + + // Objects extracts features useful for identifying and localizing + // objects in natural images + message Objects { + enum ObjectsVersion { + OBJECTS_VERSION_INVALID = 0; + // VERSION_1 is available on iOS,tvOS 14.0+, macOS 11.0+ + // It uses a 299x299 input image and yields two multiarray + // features: one at high resolution of shape (288, 35, 35) + // the other at low resolution of shape (768, 17, 17) + OBJECTS_VERSION_1 = 1; + } + + ObjectsVersion version = 1; + + /* + * Stores the names of the output features according to the + * order of them being computed from the neural network, i.e., + * the first element in the output is the earliest being + * computed, while the last is the latest being computed. In + * general, the order reflects the resolution of the feature. + * The earlier it is computed, the higher the feature resolution. + */ + repeated string output = 100; + } + + // Vision feature print type + oneof VisionFeaturePrintType { + Scene scene = 20; + Objects objects = 21; + } + +} diff --git a/onnxruntime/core/providers/coreml/mlmodel_format/WordEmbedding.proto b/onnxruntime/core/providers/coreml/mlmodel_format/WordEmbedding.proto new file mode 100644 index 0000000000000..ec11a67ca5294 --- /dev/null +++ b/onnxruntime/core/providers/coreml/mlmodel_format/WordEmbedding.proto @@ -0,0 +1,35 @@ +// Copyright (c) 2019, Apple Inc. All rights reserved. +// +// Use of this source code is governed by a BSD-3-clause license that can be +// found in LICENSE.txt or at https://opensource.org/licenses/BSD-3-Clause + +syntax = "proto3"; +option optimize_for = LITE_RUNTIME; + +import public "DataStructures.proto"; + +package CoreML.Specification.CoreMLModels; + +/** +* A model which maps a set of strings into a finite-dimensional real vector space. +*/ +message WordEmbedding { + + /* + * Stores the revision number for the model, revision 2 is available on + * iOS, tvOS 13.0+, macOS 10.15+ + */ + uint32 revision = 1; + + /* + * Stores the language of the model, as specified in BCP-47 format, + * e.g. "en-US". See https://tools.ietf.org/html/bcp47 + */ + string language = 10; + + /* + * Stores efficient representation of emebedding as encoded by the Natural Language Framework + */ + bytes modelParameterData = 100; + +} diff --git a/onnxruntime/core/providers/coreml/mlmodel_format/WordTagger.proto b/onnxruntime/core/providers/coreml/mlmodel_format/WordTagger.proto new file mode 100644 index 0000000000000..8523e05df2c0b --- /dev/null +++ b/onnxruntime/core/providers/coreml/mlmodel_format/WordTagger.proto @@ -0,0 +1,75 @@ +// Copyright (c) 2018, Apple Inc. All rights reserved. +// +// Use of this source code is governed by a BSD-3-clause license that can be +// found in LICENSE.txt or at https://opensource.org/licenses/BSD-3-Clause + +syntax = "proto3"; +option optimize_for = LITE_RUNTIME; + +import public "DataStructures.proto"; + +package CoreML.Specification.CoreMLModels; + +/** +* A model which takes a single input string and outputs a +* sequence of tokens, tags for tokens, along with their +* locations and lengths, in the original string. +*/ +message WordTagger { + + /* + * Stores the resivion number for the model, revision 1 is available on + * iOS, tvOS 12.0+, macoOS 10.14+ + */ + uint32 revision = 1; + + /* + * Stores the language of the model, as specified in BCP-47 format, + * e.g. "en-US". See https://tools.ietf.org/html/bcp47 + */ + string language = 10; + + /* + * Stores the name of tokens output. The output will be + * a sequence of strings that contains the tokens in the + * input string + */ + string tokensOutputFeatureName = 20; + + /* + * Stores the name of token tags output. The output will be + * a sequence of strings that contains the tags for each + * token in the input string + */ + string tokenTagsOutputFeatureName = 21; + + /* + * Stores the name of token locations output. The output will be + * a sequence of integers that contains the locations (indices) + * for each token in the input string, location starts from 0 + */ + string tokenLocationsOutputFeatureName = 22; + + /* + * Stores the name of token lengths output. The output will be + * a sequence of integers that contains the lengths for each + * token in the input string + */ + string tokenLengthsOutputFeatureName = 23; + + /* + * Stores the byte representation of learned model parameters + */ + bytes modelParameterData = 100; + + /* + * Stores the set of output tags + */ + oneof Tags { + StringVector stringTags = 200; + } + + + +} + diff --git a/onnxruntime/core/providers/cpu/math/element_wise_ops.cc b/onnxruntime/core/providers/cpu/math/element_wise_ops.cc index 5224c7fef5735..73978d61a1885 100644 --- a/onnxruntime/core/providers/cpu/math/element_wise_ops.cc +++ b/onnxruntime/core/providers/cpu/math/element_wise_ops.cc @@ -1,8 +1,10 @@ // Copyright (c) Microsoft Corporation. All rights reserved. // Licensed under the MIT License. -#include "core/framework/data_types_internal.h" #include "core/providers/cpu/math/element_wise_ops.h" + +#include "core/framework/data_types_internal.h" +#include "core/framework/math.h" #include "core/providers/cpu/tensor/utils.h" #include "core/providers/op_kernel_type_control.h" #include diff --git a/onnxruntime/core/providers/cpu/math/sign.cc b/onnxruntime/core/providers/cpu/math/sign.cc index c7ddda013c904..afeff1073384e 100644 --- a/onnxruntime/core/providers/cpu/math/sign.cc +++ b/onnxruntime/core/providers/cpu/math/sign.cc @@ -8,10 +8,10 @@ #include "core/common/common.h" #include "core/framework/data_types.h" #include "core/framework/element_type_lists.h" +#include "core/framework/math.h" #include "core/framework/op_kernel.h" #include "core/providers/op_kernel_type_control.h" #include "core/util/math.h" -#include "core/util/math_cpuonly.h" using namespace ::onnxruntime::common; using namespace ONNX_NAMESPACE; diff --git a/onnxruntime/core/providers/cpu/math/softmax_shared.cc b/onnxruntime/core/providers/cpu/math/softmax_shared.cc index f6e246e1b4ebd..b8eb8dae0977b 100644 --- a/onnxruntime/core/providers/cpu/math/softmax_shared.cc +++ b/onnxruntime/core/providers/cpu/math/softmax_shared.cc @@ -18,9 +18,12 @@ * limitations under the License. */ +#include "core/providers/cpu/math/softmax_shared.h" + #include #include -#include "core/providers/cpu/math/softmax_shared.h" +#include + #include "core/util/math.h" #include "core/util/math_cpuonly.h" #include "core/mlas/inc/mlas.h" diff --git a/onnxruntime/core/providers/cpu/nn/shrink.cc b/onnxruntime/core/providers/cpu/nn/shrink.cc index 0c336da99e24e..7a18e450b06c4 100644 --- a/onnxruntime/core/providers/cpu/nn/shrink.cc +++ b/onnxruntime/core/providers/cpu/nn/shrink.cc @@ -4,9 +4,9 @@ #include "core/providers/cpu/nn/shrink.h" #include "core/framework/element_type_lists.h" +#include "core/framework/math.h" #include "core/framework/utils.h" #include "core/providers/op_kernel_type_control.h" -#include "core/util/math_cpuonly.h" #include "core/util/math.h" namespace onnxruntime { diff --git a/onnxruntime/core/providers/cpu/rnn/rnn_helpers.h b/onnxruntime/core/providers/cpu/rnn/rnn_helpers.h index 5551ad1cad0d5..95a8d87cc7665 100644 --- a/onnxruntime/core/providers/cpu/rnn/rnn_helpers.h +++ b/onnxruntime/core/providers/cpu/rnn/rnn_helpers.h @@ -10,6 +10,7 @@ #include "core/common/common.h" #include "core/common/logging/logging.h" #include "core/framework/allocator.h" +#include "core/framework/tensor.h" #include "core/util/math.h" #include "core/util/math_cpuonly.h" #include "core/util/qmath.h" diff --git a/onnxruntime/core/providers/cpu/tensor/isinf.cc b/onnxruntime/core/providers/cpu/tensor/isinf.cc index 782cfeb9bee92..9b455ac7a9587 100644 --- a/onnxruntime/core/providers/cpu/tensor/isinf.cc +++ b/onnxruntime/core/providers/cpu/tensor/isinf.cc @@ -5,10 +5,10 @@ #include "core/common/common.h" #include "core/framework/data_types_internal.h" +#include "core/framework/math.h" #include "core/framework/op_kernel.h" #include "core/framework/tensor.h" #include "core/providers/op_kernel_type_control.h" -#include "core/util/math_cpuonly.h" namespace onnxruntime { // https://github.com/onnx/onnx/blob/master/docs/Operators.md#IsInf diff --git a/onnxruntime/core/providers/cpu/tensor/isnan.cc b/onnxruntime/core/providers/cpu/tensor/isnan.cc index a4ac251e54fe3..9fc784ed30823 100644 --- a/onnxruntime/core/providers/cpu/tensor/isnan.cc +++ b/onnxruntime/core/providers/cpu/tensor/isnan.cc @@ -1,9 +1,10 @@ // Copyright (c) Microsoft Corporation. All rights reserved. // Licensed under the MIT License. -#include "isnan.h" -#include "core/util/math_cpuonly.h" +#include "core/providers/cpu/tensor/isnan.h" + #include "core/common/common.h" +#include "core/framework/math.h" #include "core/framework/tensor.h" #include "Eigen/src/Core/arch/Default/Half.h" diff --git a/onnxruntime/core/providers/cpu/tensor/upsample.cc b/onnxruntime/core/providers/cpu/tensor/upsample.cc index 81ac2cd32552c..61e4d28cf0ddb 100644 --- a/onnxruntime/core/providers/cpu/tensor/upsample.cc +++ b/onnxruntime/core/providers/cpu/tensor/upsample.cc @@ -571,8 +571,6 @@ void UpsampleBilinear(int64_t batch_size, p.dx1[x] * p.dy1[y] * X22); } } - Xdata += input_height * input_width; - Ydata += output_width * output_height; }); } } diff --git a/onnxruntime/core/providers/cuda/cu_inc/binary_elementwise_impl.cuh b/onnxruntime/core/providers/cuda/cu_inc/binary_elementwise_impl.cuh index 069cf0658dc84..1f76a6c096982 100644 --- a/onnxruntime/core/providers/cuda/cu_inc/binary_elementwise_impl.cuh +++ b/onnxruntime/core/providers/cuda/cu_inc/binary_elementwise_impl.cuh @@ -188,15 +188,24 @@ void BinaryElementWiseNoBroadcastImpl( size_t count) { if (count == 0) // special case where there's a dim value of 0 in the output shape return; + + #ifdef USE_ROCM + const int num_elements_per_thread = 2; + const int num_threads_per_block = 512; + #else + const int num_elements_per_thread = GridDim::maxElementsPerThread; + const int num_threads_per_block = GridDim::maxThreadsPerBlock; + #endif - int blocksPerGrid = static_cast(CeilDiv(count, GridDim::maxThreadsPerBlock * GridDim::maxElementsPerThread)); + int blocksPerGrid = static_cast(CeilDiv(count, num_threads_per_block * num_elements_per_thread)); CUDA_LONG N = static_cast(count); - _BinaryElementWiseSimple<<>>( + _BinaryElementWiseSimple<<>>( lhs_data, rhs_data, output_data, func, N); + } template @@ -216,32 +225,39 @@ void BinaryElementWiseImpl( if (count == 0) // special case where there's a dim value of 0 in the output shape return; - int blocksPerGrid = static_cast(CeilDiv(count, GridDim::maxThreadsPerBlock * GridDim::maxElementsPerThread)); + #ifdef USE_ROCM + const int num_elements_per_thread = 2; + const int num_threads_per_block = 512; + #else + const int num_elements_per_thread = GridDim::maxElementsPerThread; + const int num_threads_per_block = GridDim::maxThreadsPerBlock; + #endif + + int blocksPerGrid = static_cast(CeilDiv(count, num_threads_per_block * num_elements_per_thread)); CUDA_LONG N = static_cast(count); if (output_rank_or_simple_broadcast == static_cast(SimpleBroadcast::NoBroadcast)) { - _BinaryElementWiseSimple<<>>( + _BinaryElementWiseSimple<<>>( lhs_data, rhs_data, output_data, func, N); } else if (output_rank_or_simple_broadcast == static_cast(SimpleBroadcast::LeftScalar)) { - _BinaryElementWiseSimple<<>>( + _BinaryElementWiseSimple<<>>( lhs_data, rhs_data, output_data, func, N); } else if (output_rank_or_simple_broadcast == static_cast(SimpleBroadcast::RightScalar)) { - _BinaryElementWiseSimple<<>>( + _BinaryElementWiseSimple<<>>( lhs_data, rhs_data, output_data, func, N); } else if (output_rank_or_simple_broadcast == static_cast(SimpleBroadcast::RightPerChannelBatch1)) { - _BinaryElementWiseRhsPerChannelBatch1<<>>( + _BinaryElementWiseRhsPerChannelBatch1<<>>( lhs_data, rhs_data, fdm_H, @@ -249,7 +265,7 @@ void BinaryElementWiseImpl( func, N); } else if (output_rank_or_simple_broadcast == static_cast(SimpleBroadcast::RightPerChannelBatchN)) { - _BinaryElementWiseRhsPerChannelBatchN<<>>( + _BinaryElementWiseRhsPerChannelBatchN<<>>( lhs_data, rhs_data, fdm_H, @@ -259,7 +275,7 @@ void BinaryElementWiseImpl( N); } else { if (lhs_padded_strides && rhs_padded_strides && lhs_padded_strides->Size() && rhs_padded_strides->Size()) - _BinaryElementWise<<>>( + _BinaryElementWise<<>>( output_rank_or_simple_broadcast, *lhs_padded_strides, lhs_data, @@ -270,7 +286,7 @@ void BinaryElementWiseImpl( func, N); else if (lhs_padded_strides && lhs_padded_strides->Size()) - _BinaryElementWise<<>>( + _BinaryElementWise<<>>( output_rank_or_simple_broadcast, *lhs_padded_strides, lhs_data, @@ -281,7 +297,7 @@ void BinaryElementWiseImpl( func, N); else if (rhs_padded_strides && rhs_padded_strides->Size()) - _BinaryElementWise<<>>( + _BinaryElementWise<<>>( output_rank_or_simple_broadcast, TArray(), // lhs is not computed, so no need to deference lhs_padded_strides lhs_data, diff --git a/onnxruntime/core/providers/cuda/cu_inc/common.cuh b/onnxruntime/core/providers/cuda/cu_inc/common.cuh index ea9ddf0450908..ad08884208bbb 100644 --- a/onnxruntime/core/providers/cuda/cu_inc/common.cuh +++ b/onnxruntime/core/providers/cuda/cu_inc/common.cuh @@ -351,16 +351,6 @@ __device__ __inline__ BFloat16 _Log(BFloat16 a) { return logf(static_cast template <> __device__ __inline__ BFloat16 _Tanh(BFloat16 a) { return tanhf(static_cast(a)); } -#if CUDA_VERSION >= 11000 && (__CUDA_ARCH__ >= 800 || !defined(__CUDA_ARCH__)) -template <> -__device__ __inline__ nv_bfloat162 _Tanh(nv_bfloat162 a) { - float2 tmp = (__bfloat1622float2(a)); - tmp.x = tanhf(tmp.x); - tmp.y = tanhf(tmp.y); - return __float22bfloat162_rn(tmp); -} -#endif - template <> __device__ __inline__ BFloat16 _Normcdf(BFloat16 a) { return normcdff(static_cast(a)); } diff --git a/onnxruntime/core/providers/cuda/cudnn_common.cc b/onnxruntime/core/providers/cuda/cudnn_common.cc index ed1d792a5259e..dbc45f13e1615 100644 --- a/onnxruntime/core/providers/cuda/cudnn_common.cc +++ b/onnxruntime/core/providers/cuda/cudnn_common.cc @@ -136,6 +136,18 @@ cudnnDataType_t CudnnTensor::GetDataType() { return CUDNN_DATA_HALF; } +template <> + +cudnnDataType_t CudnnTensor::GetDataType() { +#if CUDNN_VERSION >= 8100 + return CUDNN_DATA_BFLOAT16; +#else + ORT_THROW("cuDNN version is too low to support BFloat16."); + // Not reachable but GCC complains + return CUDNN_DATA_FLOAT; +#endif +} + template <> cudnnDataType_t CudnnTensor::GetDataType() { return CUDNN_DATA_INT8; diff --git a/onnxruntime/core/providers/cuda/math/softmax.cc b/onnxruntime/core/providers/cuda/math/softmax.cc index 9116178ddad51..b2a7d4dc6a51b 100644 --- a/onnxruntime/core/providers/cuda/math/softmax.cc +++ b/onnxruntime/core/providers/cuda/math/softmax.cc @@ -43,24 +43,7 @@ Status SoftMaxComputeHelper( SPECIALIZED_SOFTMAX_HELPER_IMPL(float) SPECIALIZED_SOFTMAX_HELPER_IMPL(double) SPECIALIZED_SOFTMAX_HELPER_IMPL(MLFloat16) - -// cudnnSoftmaxForward/Backward doesn't support BFloat16. -#define SPECIALIZED_SOFTMAX_HELPER_IMPL_BFloat16(is_log_softmax) \ - template <> \ - Status SoftMaxComputeHelper(cudaStream_t stream, const BFloat16* X, \ - const TensorShape& input_shape, BFloat16* Y, int64_t axis) { \ - typedef typename ToCudaType::MappedType CudaT; \ - int64_t N = input_shape.SizeToDimension(axis); \ - int64_t D = input_shape.SizeFromDimension(axis); \ - auto Y_data = reinterpret_cast(Y); \ - auto X_data = reinterpret_cast(X); \ - dispatch_warpwise_softmax_forward, is_log_softmax>( \ - stream, Y_data, X_data, gsl::narrow_cast(D), gsl::narrow_cast(D), gsl::narrow_cast(N)); \ - return Status::OK(); \ - } - -SPECIALIZED_SOFTMAX_HELPER_IMPL_BFloat16(true) -SPECIALIZED_SOFTMAX_HELPER_IMPL_BFloat16(false) +SPECIALIZED_SOFTMAX_HELPER_IMPL(BFloat16) #define REGISTER_KERNEL_TYPED(T) \ ONNX_OPERATOR_VERSIONED_TYPED_KERNEL_EX( \ @@ -112,8 +95,8 @@ SPECIALIZED_SOFTMAX_HELPER_IMPL_BFloat16(false) (*KernelDefBuilder::Create()).TypeConstraint("T", DataTypeImpl::GetTensorType()), \ Softmax); - template - Status Softmax::ComputeInternal(OpKernelContext* ctx) const { +template +Status Softmax::ComputeInternal(OpKernelContext* ctx) const { const Tensor* X = ctx->Input(0); const TensorShape& input_shape{X->Shape()}; size_t rank = input_shape.NumDimensions(); diff --git a/onnxruntime/core/providers/cuda/math/unary_elementwise_ops_impl.cu b/onnxruntime/core/providers/cuda/math/unary_elementwise_ops_impl.cu index 9bb5a2d855651..bb17158e29473 100644 --- a/onnxruntime/core/providers/cuda/math/unary_elementwise_ops_impl.cu +++ b/onnxruntime/core/providers/cuda/math/unary_elementwise_ops_impl.cu @@ -55,6 +55,10 @@ UNARY_OPS() SPECIALIZED_UNARY_ELEMENTWISE_IMPL(name, float) \ SPECIALIZED_UNARY_ELEMENTWISE_IMPL(name, double) +#define SPECIALIZED_UNARY_ELEMENTWISE_IMPL_HFDB(name) \ + SPECIALIZED_UNARY_ELEMENTWISE_IMPL_HFD(name) \ + SPECIALIZED_UNARY_ELEMENTWISE_IMPL(name, BFloat16) + #define SPECIALIZED_UNARY_ELEMENTWISE_IMPL_CSILHFD(name) \ SPECIALIZED_UNARY_ELEMENTWISE_IMPL(name, int8_t) \ SPECIALIZED_UNARY_ELEMENTWISE_IMPL(name, int16_t) \ @@ -75,8 +79,8 @@ SPECIALIZED_UNARY_ELEMENTWISE_IMPL_HFD(Floor) SPECIALIZED_UNARY_ELEMENTWISE_IMPL_HFD(Ceil) SPECIALIZED_UNARY_ELEMENTWISE_IMPL_HFD(Reciprocal) SPECIALIZED_UNARY_ELEMENTWISE_IMPL_HFD(Sqrt) -SPECIALIZED_UNARY_ELEMENTWISE_IMPL_HFD(Log) -SPECIALIZED_UNARY_ELEMENTWISE_IMPL_HFD(Exp) +SPECIALIZED_UNARY_ELEMENTWISE_IMPL_HFDB(Log) +SPECIALIZED_UNARY_ELEMENTWISE_IMPL_HFDB(Exp) SPECIALIZED_UNARY_ELEMENTWISE_IMPL_HFD(Erf) SPECIALIZED_UNARY_ELEMENTWISE_IMPL_HFD(Round) SPECIALIZED_UNARY_ELEMENTWISE_IMPL_HFD(Sin) diff --git a/onnxruntime/core/profile/context.h b/onnxruntime/core/providers/cuda/nvtx_profile_context.h similarity index 100% rename from onnxruntime/core/profile/context.h rename to onnxruntime/core/providers/cuda/nvtx_profile_context.h diff --git a/onnxruntime/core/providers/cuda/reduction/reduction_ops.cc b/onnxruntime/core/providers/cuda/reduction/reduction_ops.cc index 5b055487289e1..da8be441eaaa2 100644 --- a/onnxruntime/core/providers/cuda/reduction/reduction_ops.cc +++ b/onnxruntime/core/providers/cuda/reduction/reduction_ops.cc @@ -513,7 +513,9 @@ Status ReduceComputeCore(CUDAExecutionProvider& cuda_ep, const Tensor& input, Pr IAllocatorUniquePtr temp_X; cudnnDataType_t cudnn_type_X = CudnnTensor::GetDataType(); - if (ReduceTensorIndices == CUDNN_REDUCE_TENSOR_FLATTENED_INDICES && std::is_same::value) { + // Reducesum with BFP16 is not supported by cudnn, so convert input to fp32 then call cudnn + if ((ReduceTensorIndices == CUDNN_REDUCE_TENSOR_FLATTENED_INDICES && std::is_same::value) || + (ReduceTensorIndices == CUDNN_REDUCE_TENSOR_NO_INDICES && std::is_same::value)) { // ArgMax/ArgMin with FP16 are not supported by cudnn, so convert input to fp32 then call cudnn temp_X = cuda_ep.GetScratchBuffer(input_count); cudnn_type_X = CUDNN_DATA_FLOAT; @@ -521,7 +523,7 @@ Status ReduceComputeCore(CUDAExecutionProvider& cuda_ep, const Tensor& input, Pr } CudnnReduceDescriptor reduce_desc; - if (std::is_same::value) { + ORT_IF_CONSTEXPR (std::is_same::value || std::is_same::value) { ORT_RETURN_IF_ERROR(reduce_desc.Set(cudnn_reduce_op, CudnnTensor::GetDataType(), ReduceTensorIndices)); } else { ORT_RETURN_IF_ERROR(reduce_desc.Set(cudnn_reduce_op, cudnn_type_X, ReduceTensorIndices)); @@ -652,11 +654,22 @@ Status ReduceComputeCore(CUDAExecutionProvider& cuda_ep, const Tensor& input, Pr CUDA_RETURN_IF_ERROR(cudaMemcpyAsync(output.template MutableData(), input.template Data(), input_count * sizeof(T), cudaMemcpyDeviceToDevice, stream)); } } else { - CUDNN_RETURN_IF_ERROR(cudnnReduceTensor( - cuda_ep.PerThreadCudnnHandle(), reduce_desc, indices_cuda.get(), indices_bytes, - workspace_cuda.get(), workspace_bytes, - &one, input_tensor, reinterpret_cast(input.template Data()), - &zero, output_tensor, reinterpret_cast(output.template MutableData()))); + if (temp_X) { + auto temp_output = cuda_ep.GetScratchBuffer(output_count); + CUDNN_RETURN_IF_ERROR(cudnnReduceTensor( + cuda_ep.PerThreadCudnnHandle(), reduce_desc, indices_cuda.get(), indices_bytes, + workspace_cuda.get(), workspace_bytes, + &one, input_tensor, temp_X.get(), + &zero, output_tensor, temp_output.get())); + + Impl_Cast(stream, temp_output.get(), reinterpret_cast(output.template MutableData()), output_count); + } else { + CUDNN_RETURN_IF_ERROR(cudnnReduceTensor( + cuda_ep.PerThreadCudnnHandle(), reduce_desc, indices_cuda.get(), indices_bytes, + workspace_cuda.get(), workspace_bytes, + &one, input_tensor, reinterpret_cast(input.template Data()), + &zero, output_tensor, reinterpret_cast(output.template MutableData()))); + } } } } else { @@ -835,111 +848,6 @@ SPECIALIZED_REDUCEKERNEL_COMPUTEIMPL(int64_t) SPECIALIZED_REDUCEKERNEL_COMPUTEIMPL(int8_t) SPECIALIZED_REDUCEKERNEL_COMPUTEIMPL(uint8_t) -template <> -template <> -Status ReduceKernel::ComputeImpl( - OpKernelContext* ctx, cudnnReduceTensorOp_t cudnn_reduce_op) const { - typedef typename ToCudaType::MappedType CudaT; - const Tensor* X = ctx->Input(0); - TensorShapeVector axes; - size_t num_inputs = ctx->InputCount(); - if (num_inputs == 2) { - const Tensor* axes_tensor = ctx->Input(1); - ORT_ENFORCE(axes_tensor != nullptr, "Axes input is null"); - ORT_ENFORCE(axes_tensor->Shape().NumDimensions() == 1, "An axes tensor must be a vector tensor."); - auto nDims = static_cast(axes_tensor->Shape()[0]); - const auto* data = axes_tensor->template Data(); - axes.assign(data, data + nDims); - } else { - axes.assign(axes_.begin(), axes_.end()); - } - - if (axes.empty() && noop_with_empty_axes_) { - auto* Y = ctx->Output(0, X->Shape()); - CUDA_RETURN_IF_ERROR(cudaMemcpyAsync(Y->template MutableData(), X->template Data(), - X->SizeInBytes(), cudaMemcpyDeviceToDevice, Stream())); - return Status::OK(); - } - - PrepareReduceMetadata prepare_reduce_metadata; - ORT_RETURN_IF_ERROR(PrepareForReduce(X, keepdims_, axes, prepare_reduce_metadata)); - - Tensor* Y = ctx->Output(0, prepare_reduce_metadata.squeezed_output_dims); - - int64_t input_count = prepare_reduce_metadata.input_count; - int64_t output_count = prepare_reduce_metadata.output_count; - auto& input_dims_cudnn = prepare_reduce_metadata.input_dims_cudnn; - auto& output_dims_cudnn = prepare_reduce_metadata.output_dims_cudnn; - - if (input_count == 0) { - assert(Y->Shape().Size() == 0); - return Status::OK(); - } - - if (input_count == output_count) { - if (Y->template MutableData() != X->template Data()) { - CUDA_RETURN_IF_ERROR(cudaMemcpyAsync(Y->template MutableData(), X->template Data(), - input_count * sizeof(BFloat16), cudaMemcpyDeviceToDevice, Stream())); - } - return Status::OK(); - } - - if (fast_reduction_ && !ctx->GetUseDeterministicCompute()) { - int m{}, n{}; - const auto applicable_matrix_reduction = - get_applicable_matrix_reduction(cudnn_reduce_op, X->Shape().GetDims(), axes, m, n); - switch (applicable_matrix_reduction) { - case ApplicableMatrixReduction::Rows: { - return reduce_matrix_rows(Stream(), reinterpret_cast(X->template Data()), - reinterpret_cast(Y->template MutableData()), m, n); - } - case ApplicableMatrixReduction::Columns: { - const auto buffer_size_bytes = compute_reduce_matrix_columns_buffer_size(m, n); - auto buffer = cuda_ep_->GetScratchBuffer(buffer_size_bytes); - return reduce_matrix_columns(Stream(), reinterpret_cast(X->template Data()), - reinterpret_cast(Y->template MutableData()), m, n, buffer.get(), - buffer_size_bytes); - } - default: - break; - } - } - - CUDA_RETURN_IF_ERROR(cudaMemsetAsync(Y->MutableDataRaw(), 0, Y->SizeInBytes(), Stream())); - - size_t indices_bytes = 0; - size_t workspace_bytes = 0; - CudnnTensor input_tensor; - CudnnTensor output_tensor; - CudnnReduceDescriptor reduce_desc; - - cudnnDataType_t cudnn_type_X = CUDNN_DATA_FLOAT; - IAllocatorUniquePtr temp_X = GetScratchBuffer(input_count); - Impl_Cast(Stream(), reinterpret_cast(X->template Data()), temp_X.get(), - X->Shape().Size()); - - ORT_RETURN_IF_ERROR(reduce_desc.Set(cudnn_reduce_op, cudnn_type_X, CUDNN_REDUCE_TENSOR_NO_INDICES)); - ORT_RETURN_IF_ERROR(input_tensor.Set(input_dims_cudnn, cudnn_type_X)); - ORT_RETURN_IF_ERROR(output_tensor.Set(output_dims_cudnn, cudnn_type_X)); - CUDNN_RETURN_IF_ERROR( - cudnnGetReductionIndicesSize(CudnnHandle(), reduce_desc, input_tensor, output_tensor, &indices_bytes)); - CUDNN_RETURN_IF_ERROR( - cudnnGetReductionWorkspaceSize(CudnnHandle(), reduce_desc, input_tensor, output_tensor, &workspace_bytes)); - IAllocatorUniquePtr indices_cuda = GetScratchBuffer(indices_bytes); - IAllocatorUniquePtr workspace_cuda = GetScratchBuffer(workspace_bytes); - - const auto one = Consts::One; - const auto zero = Consts::Zero; - auto temp_Y = GetScratchBuffer(output_count); - CUDNN_RETURN_IF_ERROR(cudnnReduceTensor(CudnnHandle(), reduce_desc, indices_cuda.get(), indices_bytes, - workspace_cuda.get(), workspace_bytes, &one, input_tensor, temp_X.get(), - &zero, output_tensor, temp_Y.get())); - - Impl_Cast(Stream(), temp_Y.get(), reinterpret_cast(Y->template MutableData()), output_count); - - return Status::OK(); -} - namespace ReductionOps { template diff --git a/onnxruntime/core/providers/nnapi/nnapi_builtin/builders/helper.cc b/onnxruntime/core/providers/nnapi/nnapi_builtin/builders/helper.cc index 339f1e1f65273..7ae031d45f7c8 100644 --- a/onnxruntime/core/providers/nnapi/nnapi_builtin/builders/helper.cc +++ b/onnxruntime/core/providers/nnapi/nnapi_builtin/builders/helper.cc @@ -48,24 +48,43 @@ std::string GetErrorCause(int error_code) { } } -QLinearOpType GetQLinearOpType(const onnxruntime::Node& node) { - const auto& op_type = node.OpType(); - if (op_type == "DequantizeLinear") - return QLinearOpType::DequantizeLinear; - else if (op_type == "QuantizeLinear") - return QLinearOpType::QuantizeLinear; - else if (op_type == "QLinearConv") - return QLinearOpType::QLinearConv; - else if (op_type == "QLinearMatMul") - return QLinearOpType::QLinearMatMul; - else if (op_type == "QLinearAdd") - return QLinearOpType::QLinearAdd; - else if (op_type == "QLinearSigmoid") - return QLinearOpType::QLinearSigmoid; - else if (op_type == "QLinearAveragePool") - return QLinearOpType::QLinearAveragePool; - - return QLinearOpType::Unknown; +QuantizedOpType GetQuantizedOpType(const NodeUnit& node_unit) { + const auto& op_type = node_unit.OpType(); + if (node_unit.UnitType() == NodeUnit::Type::SingleNode) { + if (op_type == "DequantizeLinear") + return QuantizedOpType::DequantizeLinear; + else if (op_type == "QuantizeLinear") + return QuantizedOpType::QuantizeLinear; + else if (op_type == "QLinearConv") + return QuantizedOpType::QLinearConv; + else if (op_type == "QLinearMatMul") + return QuantizedOpType::QLinearMatMul; + else if (op_type == "QLinearAdd") + return QuantizedOpType::QLinearAdd; + else if (op_type == "QLinearMul") + return QuantizedOpType::QLinearMul; + else if (op_type == "QLinearSigmoid") + return QuantizedOpType::QLinearSigmoid; + else if (op_type == "QLinearAveragePool") + return QuantizedOpType::QLinearAveragePool; + } else if (node_unit.UnitType() == NodeUnit::Type::QDQGroup) { + if (op_type == "Conv") + return QuantizedOpType::QDQConv; + else if (op_type == "Resize") + return QuantizedOpType::QDQResize; + else if (op_type == "AveragePool") + return QuantizedOpType::QDQAveragePool; + else if (op_type == "Add") + return QuantizedOpType::QDQAdd; + else if (op_type == "Mul") + return QuantizedOpType::QDQMul; + else if (op_type == "Transpose") + return QuantizedOpType::QDQTranspose; + } else { + // throw? + } + + return QuantizedOpType::Unknown; } ConvType GetConvType(const NodeUnit& node_unit, const InitializedTensorSet& initializers) { @@ -89,31 +108,29 @@ ConvType GetConvType(const NodeUnit& node_unit, const InitializedTensorSet& init return ConvType::Grouped; } -bool IsQLinearBinaryOp(QLinearOpType qlinear_op_type) { - return qlinear_op_type == QLinearOpType::QLinearConv || - qlinear_op_type == QLinearOpType::QLinearMatMul || - qlinear_op_type == QLinearOpType::QLinearAdd; +bool IsQuantizedConv(QuantizedOpType quant_op_type) { + return (quant_op_type == QuantizedOpType::QLinearConv) || + (quant_op_type == QuantizedOpType::QDQConv); } -bool HasValidUnaryOpQuantizedInputs(const NodeUnit& node_unit) { - int32_t input_type; - if (!GetType(node_unit.Inputs()[0].node_arg, input_type)) - return false; - - if (input_type != ONNX_NAMESPACE::TensorProto_DataType_UINT8) { - LOGS_DEFAULT(VERBOSE) << "[" << node_unit.OpType() - << "] Input type: [" << input_type - << "] is not supported for now"; - return false; - } +bool IsQuantizedPool(QuantizedOpType quant_op_type) { + return (quant_op_type == QuantizedOpType::QLinearAveragePool) || + (quant_op_type == QuantizedOpType::QDQAveragePool); +} - return true; +bool IsQuantizedBinaryOp(QuantizedOpType quant_op_type) { + return quant_op_type == QuantizedOpType::QLinearMatMul || + quant_op_type == QuantizedOpType::QLinearAdd || + quant_op_type == QuantizedOpType::QLinearMul || + quant_op_type == QuantizedOpType::QDQAdd || + quant_op_type == QuantizedOpType::QDQMul || + IsQuantizedConv(quant_op_type); } -bool HasValidBinaryOpQuantizedInputs(const NodeUnit& node_unit) { - auto op_type = GetQLinearOpType(node_unit.GetNode()); +bool HasValidBinaryOpQuantizedInputTypes(const NodeUnit& node_unit) { + auto quant_op_type = GetQuantizedOpType(node_unit); int32_t a_input_type, b_input_type; - if (!IsQLinearBinaryOp(op_type)) { + if (!IsQuantizedBinaryOp(quant_op_type)) { LOGS_DEFAULT(VERBOSE) << "[" << node_unit.OpType() << "] is not a binary qlinear op"; return false; } @@ -124,16 +141,17 @@ bool HasValidBinaryOpQuantizedInputs(const NodeUnit& node_unit) { if (!GetType(inputs[1].node_arg, b_input_type)) return false; - // QlinearConv supports u8u8 or u8s8 - // QLinearMatMul/Add only support u8u8 - bool is_qlinear_conv = op_type == QLinearOpType::QLinearConv; + // QlinearConv/MatMul supports u8u8 or u8s8 + // QLinearAdd/QLinearMul only support u8u8 + bool is_quant_conv_or_matmul = IsQuantizedConv(quant_op_type) || (quant_op_type == QuantizedOpType::QLinearMatMul); + bool has_valid_qlinear_conv_weight = (b_input_type == ONNX_NAMESPACE::TensorProto_DataType_UINT8 || b_input_type == ONNX_NAMESPACE::TensorProto_DataType_INT8); if (a_input_type != ONNX_NAMESPACE::TensorProto_DataType_UINT8 || - (!is_qlinear_conv && a_input_type != b_input_type) || - (is_qlinear_conv && !has_valid_qlinear_conv_weight)) { + (!is_quant_conv_or_matmul && a_input_type != b_input_type) || + (is_quant_conv_or_matmul && !has_valid_qlinear_conv_weight)) { LOGS_DEFAULT(VERBOSE) << "[" << node_unit.OpType() << "] A Input type: [" << a_input_type << "] B Input type: [" << b_input_type @@ -144,182 +162,6 @@ bool HasValidBinaryOpQuantizedInputs(const NodeUnit& node_unit) { return true; } -bool HasValidQuantizationScales(const InitializedTensorSet& initializers, const NodeUnit& node_unit, - const std::vector& indices, const OpSupportCheckParams& params, bool is_input) { - const auto& op_type = node_unit.OpType(); - auto qlinear_op_type = GetQLinearOpType(node_unit.GetNode()); - bool is_qlinear_conv = (qlinear_op_type == QLinearOpType::QLinearConv); - bool is_qlinear_matmul = (qlinear_op_type == QLinearOpType::QLinearMatMul); - const auto& io_defs = is_input ? node_unit.Inputs() : node_unit.Outputs(); - for (const auto idx : indices) { - if (idx >= io_defs.size()) { - LOGS_DEFAULT(VERBOSE) << (is_input ? "Input" : "Output") << " index, " << idx - << " >= size, " << io_defs.size() - << " of NodeUnit: " << node_unit.Name(); - return false; - } - - const auto& io_def = io_defs[idx]; - if (!io_def.quant_param.has_value()) { - LOGS_DEFAULT(VERBOSE) << "HasValidQuantizationZeroPoints, Input index, " << idx - << " has no quant_param"; - return false; - } - - const auto scale_name = io_def.quant_param->scale.Name(); - - if (!Contains(initializers, scale_name)) { - LOGS_DEFAULT(VERBOSE) << "The scale of " << op_type << " must be an initializer tensor"; - return false; - } - - // If this op is Qlinear[Conv/MatMul], we want to check u8s8 support for weight tensor (or B tensor for QlinearMatMul) - bool is_conv_matmul_weight = is_input && (is_qlinear_conv || is_qlinear_matmul) && idx == 1; - bool is_conv_matmul_u8s8_weight = false; - - if (is_conv_matmul_weight) { - const auto& weight_tensor = *initializers.at(io_def.node_arg.Name()); - is_conv_matmul_u8s8_weight = weight_tensor.data_type() == ONNX_NAMESPACE::TensorProto_DataType_INT8; - } - - const auto& scale_tensor = *initializers.at(scale_name); - int64_t scales_dim = scale_tensor.dims().empty() ? 1 : scale_tensor.dims()[0]; - if (!is_conv_matmul_u8s8_weight) { - if (scales_dim != 1) { - LOGS_DEFAULT(VERBOSE) << op_type << " does not support per-channel quantization, " - << " for now, only u8s8 QlinearConv supports per-channel quantization on API 29+"; - return false; - } - } else if (scales_dim != 1) { - // For u8s8 Qlinear[Conv/MatMul], we support - // 1. Per-tensor, the weight will be transformed to uint8 later - // 2. Per-channel, only from Android API level 29 - if (is_qlinear_matmul) { - LOGS_DEFAULT(VERBOSE) << "QLinearMatMul does not support per-channel quantization"; - return false; - } - - if (params.android_feature_level < ANEURALNETWORKS_FEATURE_LEVEL_3) { - LOGS_DEFAULT(VERBOSE) << op_type << " only supports per-channel quantization on Android API 29+, " - << "system NNAPI feature level: " << params.android_feature_level; - return false; - } - - const auto& weight_tensor = *initializers.at(io_def.node_arg.Name()); - if (weight_tensor.dims()[0] != scales_dim) { - LOGS_DEFAULT(VERBOSE) << op_type << " mismatch int8 per-channel quantization weight," - << " weight dimension[0] " << weight_tensor.dims()[0] - << " scale dimension " << scales_dim; - return false; - } - } - } - - return true; -} - -bool HasValidQuantizationZeroPoints(const InitializedTensorSet& initializers, const NodeUnit& node_unit, - const std::vector& indices, bool is_input) { - const auto& op_type = node_unit.OpType(); - auto qlinear_op_type = GetQLinearOpType(node_unit.GetNode()); - bool is_qlinear_conv = (qlinear_op_type == QLinearOpType::QLinearConv); - bool is_qlinear_matmul = (qlinear_op_type == QLinearOpType::QLinearMatMul); - - const auto& io_defs = is_input ? node_unit.Inputs() : node_unit.Outputs(); - for (const auto idx : indices) { - if (idx >= io_defs.size()) { - LOGS_DEFAULT(VERBOSE) << "HasValidQuantizationZeroPoints, " - << (is_input ? "Input" : "Output") << " index, " << idx - << " >= size, " << io_defs.size(); - return false; - } - - const auto& io_def = io_defs[idx]; - if (!io_def.quant_param.has_value()) { - LOGS_DEFAULT(VERBOSE) << "HasValidQuantizationZeroPoints, Input index, " << idx - << " has no quant_param"; - return false; - } - - // zero point is optional here - if (!io_def.quant_param->zero_point) - return true; - - const auto& zero_point_name = io_def.quant_param->zero_point->Name(); - if (!Contains(initializers, zero_point_name)) { - LOGS_DEFAULT(VERBOSE) << "The zero point of " << op_type << " must be an initializer tensor"; - return false; - } - - bool is_conv_matmul_weight = is_input && (is_qlinear_conv || is_qlinear_matmul) && idx == 1; - bool is_conv_matmul_u8s8_weight = false; - - if (is_conv_matmul_weight) { - const auto& weight_tensor = *initializers.at(io_def.node_arg.Name()); - is_conv_matmul_u8s8_weight = weight_tensor.data_type() == ONNX_NAMESPACE::TensorProto_DataType_INT8; - } - - const auto& zero_tensor = *initializers.at(zero_point_name); - int64_t zero_dim = zero_tensor.dims().empty() ? 1 : zero_tensor.dims()[0]; - - if (!is_conv_matmul_u8s8_weight) { - if (zero_dim != 1) { - LOGS_DEFAULT(VERBOSE) << op_type << " does not support per-channel quantization, " - << " for now, only u8s8 QlinearConv supports per-channel quantization on API 29+"; - return false; - } - } else { - // For u8s8 Qlinear[Conv/MatMul], we support - // 1. Per-tensor, the weight will be transformed to uint8 later - // 2. Per-channel, only from Android API level 29 - if (zero_tensor.data_type() != ONNX_NAMESPACE::TensorProto_DataType_INT8) { - LOGS_DEFAULT(VERBOSE) << "u8s8 Qlinear[Conv/MatMul] only supports int8 zero point for weight, " - << "actual zero point type: [" << zero_tensor.data_type() << "]"; - return false; - } - - if (zero_dim != 1) { - if (is_qlinear_matmul) { - LOGS_DEFAULT(VERBOSE) << "QLinearMatMul does not support per-channel quantization"; - return false; - } - } - - // For onnx, u8s8 QlinearConv, the weight zero point can be a scalar, - // or a tensor with same channel as weight, for NNAPI we only support it be - // 0 (scalar) or all 0 (tensor), NNAPI will assume the zero point for per-channel - // quantization is 0 there is no input for it - const auto& weight_tensor = *initializers.at(io_def.node_arg.Name()); - if (weight_tensor.dims()[0] != zero_dim && zero_dim != 1) { - LOGS_DEFAULT(VERBOSE) << op_type << " mismatch int8 per-channel quantization weight," - << " weight dimension[0] " << weight_tensor.dims()[0] - << " zero point dimension " << zero_dim; - return false; - } - - std::vector unpacked_tensor; - auto status = onnxruntime::utils::UnpackInitializerData(zero_tensor, node_unit.ModelPath(), unpacked_tensor); - if (!status.IsOK()) { - LOGS_DEFAULT(ERROR) << "Qlinear[Conv/MatMul] error when unpack zero tensor: " << zero_point_name - << ", error msg: " << status.ErrorMessage(); - return false; - } - - // Verify all onnx weight zero point(s) are 0(s) - const int8_t* zero_points = reinterpret_cast(unpacked_tensor.data()); - for (size_t i = 0; i < unpacked_tensor.size(); i++) { - if (zero_points[i] != 0) { - LOGS_DEFAULT(VERBOSE) << "u8s8 Qlinear[Conv/MatMul] only support 0 as zero point, " - << "zero_points[" << i << "] has value: " << zero_points[i]; - return false; - } - } - } - } - - return true; -} - common::Status GetQuantizationScaleAndZeroPoint( const InitializedTensorSet& initializers, const NodeUnitIODef& io_def, const Path& model_path, float& scale, int32_t& zero_point) { @@ -365,8 +207,8 @@ common::Status GetQuantizationScaleAndZeroPoint( common::Status GetQuantizationScaleAndZeroPoint( const InitializedTensorSet& initializers, const NodeUnit& node_unit, const std::string& name, - float& scale, int32_t& zero_point, bool is_input) { - const auto& io_defs = is_input ? node_unit.Inputs() : node_unit.Outputs(); + float& scale, int32_t& zero_point, IOKind io_kind) { + const auto& io_defs = io_kind == IOKind::Input ? node_unit.Inputs() : node_unit.Outputs(); for (const auto& io_def : io_defs) { if (io_def.node_arg.Name() == name) return GetQuantizationScaleAndZeroPoint(initializers, io_def, node_unit.ModelPath(), @@ -505,27 +347,6 @@ bool IsNodeSupportedInGroup(const NodeUnit& node_unit, const GraphViewer& graph_ return true; } -bool IsInputSupported(const NodeArg& input, const std::string& parent_name) { - const auto& input_name = input.Name(); - const auto* shape_proto = input.Shape(); - // We do not support input with no shape - if (!shape_proto) { - LOGS_DEFAULT(VERBOSE) << "Input [" << input_name << "] of [" << parent_name - << "] has no shape"; - return false; - } - - for (const auto& dim : shape_proto->dim()) { - // For now we do not support dynamic shape - if (!dim.has_dim_value()) { - LOGS_DEFAULT(WARNING) << "Dynamic shape is not supported for now, for input:" << input_name; - return false; - } - } - - return true; -} - std::string Shape2String(const std::vector& shape) { std::ostringstream os; os << "[ "; diff --git a/onnxruntime/core/providers/nnapi/nnapi_builtin/builders/helper.h b/onnxruntime/core/providers/nnapi/nnapi_builtin/builders/helper.h index c3729fb1c8f10..859a92b0bf982 100644 --- a/onnxruntime/core/providers/nnapi/nnapi_builtin/builders/helper.h +++ b/onnxruntime/core/providers/nnapi/nnapi_builtin/builders/helper.h @@ -73,8 +73,8 @@ struct OpSupportCheckParams; std::string GetErrorCause(int error_code); -enum class QLinearOpType : uint8_t { - Unknown, // Unknown or not a linear quantized op +enum class QuantizedOpType : uint8_t { + Unknown, // Unknown or not a quantized NodeUnit DequantizeLinear, QuantizeLinear, QLinearConv, @@ -82,9 +82,16 @@ enum class QLinearOpType : uint8_t { QLinearAdd, QLinearSigmoid, QLinearAveragePool, + QLinearMul, // Not yet supported - // QLinearMul, // QLinearReduceMean, + QDQConv, + QDQResize, + QDQAveragePool, + QDQAdd, + QDQMul, + QDQTranspose, + // TODO, add other QDQ NodeUnit types }; enum class ConvType : uint8_t { @@ -93,28 +100,29 @@ enum class ConvType : uint8_t { Grouped, }; -QLinearOpType GetQLinearOpType(const onnxruntime::Node& node); +enum class IOKind : uint8_t { + Input, + Output, +}; + +QuantizedOpType GetQuantizedOpType(const NodeUnit& node_unit); // Return the type of the conv ops, // This function assumes the input is a 2d conv node ConvType GetConvType(const NodeUnit& node_unit, const InitializedTensorSet& initializers); -// This qlinear op is an operator takes 2 inputs and produces 1 output -// Such as QLinearConv, QLinearMatMul, QLinearAdd, ... -bool IsQLinearBinaryOp(QLinearOpType qlinear_op_type); +// If this is a quantized Conv (QLinearConv or QDQConv) +bool IsQuantizedConv(QuantizedOpType quant_op_type); -// Check if a qlinear unary op has valid inputs, Qlinear[Sigmoid/AveragePool] -bool HasValidUnaryOpQuantizedInputs(const NodeUnit& node_unit); -// Check if a qlinear binary op has valid inputs, Qlinear[Conv/MatMul/Add] -bool HasValidBinaryOpQuantizedInputs(const NodeUnit& node_unit); +// If this is a quantized Pool (QLinearAveragePool or QDQAveragePool) +bool IsQuantizedPool(QuantizedOpType quant_op_type); -// Check if a qlinear op has valid scales for given indices -bool HasValidQuantizationScales(const InitializedTensorSet& initializers, const NodeUnit& node_unit, - const std::vector& indices, const OpSupportCheckParams& params, bool is_input); +// This quantized op is an operator or qdq node unit takes 2 inputs and produces 1 output +// Such as QLinearConv, QLinearMatMul, QLinearAdd, QDQConv,... +bool IsQuantizedBinaryOp(QuantizedOpType quant_op_type); -// Check if a qlinear op has valid zero points for given indices -bool HasValidQuantizationZeroPoints(const InitializedTensorSet& initializers, const NodeUnit& node_unit, - const std::vector& indices, bool is_input); +// Check if a qlinear binary op has valid inputs, Qlinear[Conv/MatMul/Add] +bool HasValidBinaryOpQuantizedInputTypes(const NodeUnit& node_unit); common::Status GetQuantizationScaleAndZeroPoint( const InitializedTensorSet& initializers, const NodeUnitIODef& io_def, const Path& model_path, @@ -122,7 +130,7 @@ common::Status GetQuantizationScaleAndZeroPoint( common::Status GetQuantizationScaleAndZeroPoint( const InitializedTensorSet& initializers, const NodeUnit& node_unit, const std::string& name, - float& scale, int32_t& zero_point, bool is_input = true); + float& scale, int32_t& zero_point, IOKind io_kind = IOKind::Input); // Get Shape/Type of a NodeArg // TODO, move to shared_utils @@ -141,9 +149,6 @@ bool IsNodeSupportedInGroup(const NodeUnit& node_unit, const GraphViewer& graph_ const OpSupportCheckParams& params, const std::unordered_set& node_outputs_in_group); -// If a graph input is supported by NNAPI -bool IsInputSupported(const NodeArg& input, const std::string& parent_name); - // If an NNAPI partition node group is valid bool IsValidSupportedNodeGroup(const std::vector& supported_node_group); diff --git a/onnxruntime/core/providers/nnapi/nnapi_builtin/builders/model_builder.cc b/onnxruntime/core/providers/nnapi/nnapi_builtin/builders/model_builder.cc index d599d573514e7..3f95048335358 100644 --- a/onnxruntime/core/providers/nnapi/nnapi_builtin/builders/model_builder.cc +++ b/onnxruntime/core/providers/nnapi/nnapi_builtin/builders/model_builder.cc @@ -158,14 +158,10 @@ void ModelBuilder::PreprocessNodeUnits() { // Help to get all quantized operators' input and the NodeUnit(s) using the input void ModelBuilder::GetAllQuantizedOpInputs() { for (const auto& node_unit : node_unit_holder_) { - // TODO, hookup getting quantized inputs with QDQ NodeUnits and remove the ORT_ENFORCE - ORT_ENFORCE(node_unit->UnitType() == NodeUnit::Type::SingleNode, "QDQ NodeUnit is not yet implemented"); + auto quant_op_type = GetQuantizedOpType(*node_unit); - auto qlinear_op_type = GetQLinearOpType(node_unit->GetNode()); - - // Not a qlinear op - // TODO, add handling for QDQ NodeUnit - if (qlinear_op_type == QLinearOpType::Unknown) + // Not a qlinear op or qdq node group + if (quant_op_type == QuantizedOpType::Unknown) continue; const auto add_quantized_input = @@ -174,12 +170,12 @@ void ModelBuilder::GetAllQuantizedOpInputs() { all_quantized_op_inputs[input_name].push_back(&node_unit); }; - // All qlinear ops EXCEPT QuantizeLinear has quantized input - if (qlinear_op_type != QLinearOpType::QuantizeLinear) { + // All quantized ops EXCEPT QuantizeLinear has quantized input + if (quant_op_type != QuantizedOpType::QuantizeLinear) { add_quantized_input(*node_unit, 0); } - if (IsQLinearBinaryOp(qlinear_op_type)) { + if (IsQuantizedBinaryOp(quant_op_type)) { add_quantized_input(*node_unit, 1); } @@ -214,7 +210,7 @@ static Status GetInputDataType( // TODO, verify the scale and zero point match if there are multiple op using same input const auto* node_unit = all_quantized_op_inputs.at(name)[0]; ORT_RETURN_IF_ERROR(GetQuantizationScaleAndZeroPoint( - initializers, *node_unit, name, scale, zero_point, true /* is_input */)); + initializers, *node_unit, name, scale, zero_point, IOKind::Input)); break; } // case ONNX_NAMESPACE::TensorProto_DataType_INT8: @@ -494,14 +490,29 @@ Status ModelBuilder::AddOperandFromPersistMemoryBuffer( Status ModelBuilder::AddOperations() { const auto& node_indices = graph_viewer_.GetNodesInTopologicalOrder(); - std::unordered_set processed_node_units; - processed_node_units.reserve(node_unit_holder_.size()); - for (size_t i = 0; i < node_indices.size(); i++) { - const auto* node(graph_viewer_.GetNode(node_indices[i])); + for (const auto node_idx : node_indices) { + LOGS_DEFAULT(VERBOSE) << "Adding node [" << node_idx << "]"; + const auto* node(graph_viewer_.GetNode(node_idx)); const NodeUnit& node_unit = GetNodeUnit(node); - // Since a NodeUnit may contain multiple nodes, avoid processing the same NodeUnit multiple times - if (Contains(processed_node_units, &node_unit)) + // Since we may have NodeUnit with multiple nodes, insert NodeUnit with the first occurrence of + // its node(s) in topological order may cause the incorrect topological order while inserting + // NodeUNits, for example, + // Q1 + // | + // DQ1 DQ2 + // \ | + // CONV + // | + // Q2 + // In the above graph, we will have 2 NodeUnits, NU1 [Q1] and NU2 [DQ1, DQ2, CONV, Q2] + // The Q1 and DQ2 have the same topological order, if we insert DQ2 (as part of NU2) when we visit DQ2 + // first in the topological order, the input from Q1 required by NU2 is not yet inserted, this will + // cause failure finding the inputs for NU2 + // + // So we only insert the NodeUnit once when we hit the target node, to ensure the topological order + // of the NodeUnits + if (node != &node_unit.GetNode()) continue; if (const auto* op_builder = GetOpBuilder(node_unit)) { @@ -510,8 +521,6 @@ Status ModelBuilder::AddOperations() { return ORT_MAKE_STATUS(ONNXRUNTIME, INVALID_ARGUMENT, "Node [", node_unit.Name(), "], type [", node_unit.OpType(), "] is not supported"); } - - processed_node_units.insert(&node_unit); } return Status::OK(); @@ -535,6 +544,8 @@ Status ModelBuilder::AddOperation(int op, const std::vector& input_ind "op = " + std::to_string(op)); num_nnapi_ops_++; + + LOGS_DEFAULT(VERBOSE) << "Added NNAPI Operation Type [" << op << "]"; return Status::OK(); } @@ -640,8 +651,9 @@ int32_t ModelBuilder::FindActivation(const NodeUnit& node_unit) { // TODO, add support of activation fusion for quantized node group (qdq or qlinear) // We do not support activation fusion for quantized operators for now - auto qlinear_op_type = GetQLinearOpType(node_unit.GetNode()); - if (qlinear_op_type != QLinearOpType::Unknown) + // (usually the activations are fused already in the quantization) + auto quant_op_type = GetQuantizedOpType(node_unit); + if (quant_op_type != QuantizedOpType::Unknown) return fuse_code; for (auto it = output_node.OutputEdgesBegin(), end = output_node.OutputEdgesEnd(); it != end; ++it) { diff --git a/onnxruntime/core/providers/nnapi/nnapi_builtin/builders/op_builder.cc b/onnxruntime/core/providers/nnapi/nnapi_builtin/builders/op_builder.cc index 84022ea774e57..cd467017c3ab4 100644 --- a/onnxruntime/core/providers/nnapi/nnapi_builtin/builders/op_builder.cc +++ b/onnxruntime/core/providers/nnapi/nnapi_builtin/builders/op_builder.cc @@ -452,7 +452,7 @@ static Status HandleAutoPad(const Shape& input_shape, } // Get scales and zero points for the qlinear binary ops (which has 2 input and 1 output) -// QLinearConv, QLinearMatmul, QLinearAdd +// QLinearConv, QLinearMatmul, QLinearAdd, QLinearMul // a, b are inputs, and y is output static Status GetBinaryOpQuantizationScaleAndZeroPoint( const InitializedTensorSet& initializers, const NodeUnit& node_unit, @@ -656,8 +656,11 @@ class BinaryOpBuilder : public BaseOpBuilder { }; /* static */ bool BinaryOpBuilder::IsQuantizedOp(const NodeUnit& node_unit) { - // TODO, add support for QDQ NodeUnit - return node_unit.OpType() == "QLinearAdd"; + const auto quant_type = GetQuantizedOpType(node_unit); + return quant_type == QuantizedOpType::QLinearAdd || + quant_type == QuantizedOpType::QLinearMul || + quant_type == QuantizedOpType::QDQAdd || + quant_type == QuantizedOpType::QDQMul; } void BinaryOpBuilder::AddInitializersToSkip(ModelBuilder& model_builder, const NodeUnit& node_unit) const { @@ -680,6 +683,7 @@ void BinaryOpBuilder::AddInitializersToSkip(ModelBuilder& model_builder, const N "Mul", "Div", "QLinearAdd", + "QLinearMul", "Pow", }); } @@ -690,12 +694,12 @@ Status BinaryOpBuilder::AddToModelBuilderImpl(ModelBuilder& model_builder, const int32_t op_code; bool add_activation = true; - bool op_is_qlinear = op_type == "QLinearAdd"; - if (op_type == "Add" || op_is_qlinear) { + bool is_quant_op = IsQuantizedOp(node_unit); + if (op_type == "Add" || op_type == "QLinearAdd") { // Add/QLinearAdd/QDQAdd op_code = ANEURALNETWORKS_ADD; } else if (op_type == "Sub") { op_code = ANEURALNETWORKS_SUB; - } else if (op_type == "Mul") { + } else if (op_type == "Mul" || op_type == "QLinearMul") { // Mul/QLinearMul/QDQMul op_code = ANEURALNETWORKS_MUL; } else if (op_type == "Div") { op_code = ANEURALNETWORKS_DIV; @@ -721,7 +725,7 @@ Status BinaryOpBuilder::AddToModelBuilderImpl(ModelBuilder& model_builder, const b_zero_point = 0, y_zero_point = 0; - if (op_is_qlinear) { + if (is_quant_op) { ORT_RETURN_IF_ERROR(GetBinaryOpQuantizationScaleAndZeroPoint( model_builder.GetInitializerTensors(), node_unit, a_scale, b_scale, y_scale, @@ -729,7 +733,7 @@ Status BinaryOpBuilder::AddToModelBuilderImpl(ModelBuilder& model_builder, const } // Verify if the scale and zero point matchs from onnx input and nnapi input match - if (op_is_qlinear) { + if (is_quant_op) { ORT_RETURN_IF_ERROR(IsValidInputQuantizedType(model_builder, input1, a_scale, a_zero_point)); ORT_RETURN_IF_ERROR(IsValidInputQuantizedType(model_builder, input2, b_scale, b_zero_point)); } @@ -784,12 +788,29 @@ Status ReluOpBuilder::AddToModelBuilderImpl(ModelBuilder& model_builder, const N #pragma region op_transpose class TransposeOpBuilder : public BaseOpBuilder { + public: + void AddInitializersToSkip(ModelBuilder& model_builder, const NodeUnit& node_unit) const override; + private: Status AddToModelBuilderImpl(ModelBuilder& model_builder, const NodeUnit& node_unit) const override; + static bool IsQuantizedOp(const NodeUnit& node_unit) ORT_MUST_USE_RESULT; // TODO, see if we want to move this to BaseOpBuilder }; +void TransposeOpBuilder::AddInitializersToSkip(ModelBuilder& model_builder, const NodeUnit& node_unit) const { + if (!IsQuantizedOp(node_unit)) + return; + + AddQuantizationScaleAndZeroPointToSkip(model_builder, *node_unit.Inputs()[0].quant_param); // x_scale, x_zp + AddQuantizationScaleAndZeroPointToSkip(model_builder, *node_unit.Outputs()[0].quant_param); // y_scale, y_zp +} + +/* static */ bool TransposeOpBuilder::IsQuantizedOp(const NodeUnit& node_unit) { + return GetQuantizedOpType(node_unit) == QuantizedOpType::QDQTranspose; +} + Status TransposeOpBuilder::AddToModelBuilderImpl(ModelBuilder& model_builder, const NodeUnit& node_unit) const { auto& shaper(model_builder.GetShaper()); + const auto& initializers(model_builder.GetInitializerTensors()); const auto& input = node_unit.Inputs()[0].node_arg.Name(); const auto& output = node_unit.Outputs()[0].node_arg.Name(); @@ -812,6 +833,15 @@ Status TransposeOpBuilder::AddToModelBuilderImpl(ModelBuilder& model_builder, co perm[i] = axis_nchw_to_nhwc[perm[i]]; } + // Check if the quantization scale and ZP are correct + if (IsQuantizedOp(node_unit)) { + float x_scale = 0.0f; + int32_t x_zero_point = 0; + ORT_RETURN_IF_ERROR(GetQuantizationScaleAndZeroPoint( + initializers, node_unit.Inputs()[0], node_unit.ModelPath(), x_scale, x_zero_point)); + ORT_RETURN_IF_ERROR(IsValidInputQuantizedType(model_builder, input, x_scale, x_zero_point)); + } + std::string perm_name = model_builder.GetUniqueName(node_unit.Name() + input + "perm"); // It is possible this onnx transpose operator can be nchw->nhwc, but so far I don't see @@ -996,7 +1026,7 @@ void BatchNormalizationOpBuilder::AddInitializersToSkip(ModelBuilder& model_buil model_builder.AddInitializerToSkip(node_unit.Inputs()[1].node_arg.Name()); // scale model_builder.AddInitializerToSkip(node_unit.Inputs()[2].node_arg.Name()); // B model_builder.AddInitializerToSkip(node_unit.Inputs()[3].node_arg.Name()); // mean - model_builder.AddInitializerToSkip(node_unit.Inputs()[4].node_arg.Name()); //var + model_builder.AddInitializerToSkip(node_unit.Inputs()[4].node_arg.Name()); // var } Status BatchNormalizationOpBuilder::AddToModelBuilderImpl(ModelBuilder& model_builder, const NodeUnit& node_unit) const { @@ -1106,8 +1136,7 @@ class PoolOpBuilder : public BaseOpBuilder { }; /* static */ bool PoolOpBuilder::IsQuantizedOp(const NodeUnit& node_unit) { - // TODO, add support for QDQ NodeUnit - return node_unit.OpType() == "QLinearAveragePool"; + return IsQuantizedPool(GetQuantizedOpType(node_unit)); } void PoolOpBuilder::AddInitializersToSkip(ModelBuilder& model_builder, const NodeUnit& node_unit) const { @@ -1156,8 +1185,8 @@ Status PoolOpBuilder::AddToModelBuilderImpl(ModelBuilder& model_builder, const N const auto& op_type = node_unit.OpType(); int32_t op_code; - bool is_qlinear_average_pool = op_type == "QLinearAveragePool"; - bool is_average_pool = op_type == "AveragePool" || is_qlinear_average_pool; + bool is_quant_pool = IsQuantizedOp(node_unit); + bool is_average_pool = op_type == "AveragePool" || op_type == "QLinearAveragePool"; if (is_average_pool || op_type == "GlobalAveragePool") op_code = ANEURALNETWORKS_AVERAGE_POOL_2D; else // (op_type == "MaxPool" || op_type == "GlobalMaxPool") @@ -1200,7 +1229,7 @@ Status PoolOpBuilder::AddToModelBuilderImpl(ModelBuilder& model_builder, const N const OperandType& input_operand_type = operand_types.at(input); float y_scale = input_operand_type.operandType.scale; int32_t y_zero_point = input_operand_type.operandType.zeroPoint; - if (is_qlinear_average_pool) { + if (is_quant_pool) { const auto& initializers = model_builder.GetInitializerTensors(); float x_scale = 0.0f; int32_t x_zero_point = 0; @@ -1260,8 +1289,7 @@ class ConvOpBuilder : public BaseOpBuilder { }; /* static */ bool ConvOpBuilder::IsQuantizedOp(const NodeUnit& node_unit) { - // TODO, add support for QDQ NodeUnit - return node_unit.OpType() == "QLinearConv"; + return IsQuantizedConv(GetQuantizedOpType(node_unit)); } /* static */ void @@ -1296,7 +1324,7 @@ Status ConvOpBuilder::AddToModelBuilderImpl(ModelBuilder& model_builder, const N const auto& initializers(model_builder.GetInitializerTensors()); NodeAttrHelper helper(node_unit); const auto inputs = node_unit.Inputs(); - bool is_qlinear_conv = IsQuantizedOp(node_unit); + bool is_quant_conv = IsQuantizedOp(node_unit); // onnx strides are in the order height, width // while nnapi strides are in the order width, height @@ -1341,7 +1369,7 @@ Status ConvOpBuilder::AddToModelBuilderImpl(ModelBuilder& model_builder, const N // this is for per-channel quantization weights optional> w_scales; bool is_per_tensor_u8s8 = false; - if (is_qlinear_conv) { + if (is_quant_conv) { ORT_RETURN_IF_ERROR(GetConvMatMulOpQuantizationScaleAndZeroPoint(model_builder, node_unit, x_scale, w_scale, y_scale, x_zero_point, w_zero_point, y_zero_point, @@ -1379,7 +1407,7 @@ Status ConvOpBuilder::AddToModelBuilderImpl(ModelBuilder& model_builder, const N // Get weight operand type // Per-channel quantized weight is handled differently OperandType onnx_weight_operand_type = - (is_qlinear_conv && w_scales.has_value()) + (is_quant_conv && w_scales.has_value()) ? OperandType{onnx_weight_type, onnx_weight_shape, SymmPerChannelQuantParams{w_scales.value(), depthwise_conv_2d ? 3u : 0u}} // channelDim is 3 for depthwise-conv @@ -1392,7 +1420,7 @@ Status ConvOpBuilder::AddToModelBuilderImpl(ModelBuilder& model_builder, const N ORT_RETURN_IF_ERROR(AddInitializerInNewLayout(model_builder, weight, onnx_weight_operand_type, L_1230, is_per_tensor_u8s8)); } - if (is_qlinear_conv) { + if (is_quant_conv) { // Verify if the scale and zero point matchs from onnx input/weight and nnapi input/weight ORT_RETURN_IF_ERROR(IsValidInputQuantizedType(model_builder, input, x_scale, x_zero_point)); ORT_RETURN_IF_ERROR(IsValidConvWeightQuantizedType(model_builder, weight, w_scale, w_zero_point, w_scales)); @@ -1420,7 +1448,7 @@ Status ConvOpBuilder::AddToModelBuilderImpl(ModelBuilder& model_builder, const N } else { return ORT_MAKE_STATUS(ONNXRUNTIME, INVALID_ARGUMENT, "Unknown weight type ", TypeToStr(weight_type)); } - } else if (is_qlinear_conv) { + } else if (is_quant_conv) { // QLinearConv's bias type need special handling to add scale for quantization input const auto& bias_tensor = *model_builder.GetInitializerTensors().at(bias); ORT_RETURN_IF_NOT(bias_tensor.data_type() == ONNX_NAMESPACE::TensorProto_DataType_INT32, @@ -2259,10 +2287,20 @@ class ResizeOpBuilder : public BaseOpBuilder { private: Status AddToModelBuilderImpl(ModelBuilder& model_builder, const NodeUnit& node_unit) const override; + static bool IsQuantizedOp(const NodeUnit& node_unit) ORT_MUST_USE_RESULT; // TODO, see if we want to move this to BaseOpBuilder }; +/* static */ bool ResizeOpBuilder::IsQuantizedOp(const NodeUnit& node_unit) { + return GetQuantizedOpType(node_unit) == QuantizedOpType::QDQResize; +} + void ResizeOpBuilder::AddInitializersToSkip(ModelBuilder& model_builder, const NodeUnit& node_unit) const { const auto& inputs = node_unit.Inputs(); + if (IsQuantizedOp(node_unit)) { + AddQuantizationScaleAndZeroPointToSkip(model_builder, *inputs[0].quant_param); // x_scale, x_zp + AddQuantizationScaleAndZeroPointToSkip(model_builder, *node_unit.Outputs()[0].quant_param); // y_scale, y_zp + } + // We don't really use ROI here, so add them to skipped list model_builder.AddInitializerToSkip(inputs[1].node_arg.Name()); // ROI @@ -2297,6 +2335,15 @@ Status ResizeOpBuilder::AddToModelBuilderImpl(ModelBuilder& model_builder, const } } + // Check if the quantization scale and ZP is correct + if (IsQuantizedOp(node_unit)) { + float x_scale = 0.0f; + int32_t x_zero_point = 0; + ORT_RETURN_IF_ERROR(GetQuantizationScaleAndZeroPoint( + initializers, node_unit.Inputs()[0], node_unit.ModelPath(), x_scale, x_zero_point)); + ORT_RETURN_IF_ERROR(IsValidInputQuantizedType(model_builder, input, x_scale, x_zero_point)); + } + bool is_linear_resize = helper.Get("mode", "nearest") == "linear"; int32_t operationCode = is_linear_resize ? ANEURALNETWORKS_RESIZE_BILINEAR @@ -2606,7 +2653,7 @@ Status SliceOpBuilder::AddToModelBuilderImpl(ModelBuilder& model_builder, const return Status::OK(); }; - ORT_RETURN_IF_ERROR(AddOperand("starts", param_dimen, compute_metadata.starts_)); //nnapi_begin + ORT_RETURN_IF_ERROR(AddOperand("starts", param_dimen, compute_metadata.starts_)); // nnapi_begin // NNAPI has 2 slice operations // - ANEURALNETWORKS_SLICE @@ -2621,7 +2668,7 @@ Status SliceOpBuilder::AddToModelBuilderImpl(ModelBuilder& model_builder, const model_builder.GetNNAPIFeatureLevel() > ANEURALNETWORKS_FEATURE_LEVEL_2) { op_code = ANEURALNETWORKS_SLICE; // the nnapi size of the slice in this case is the output shape - ORT_RETURN_IF_ERROR(AddOperand("sizes", param_dimen, compute_metadata.output_dims_)); //nnapi_sizes + ORT_RETURN_IF_ERROR(AddOperand("sizes", param_dimen, compute_metadata.output_dims_)); // nnapi_sizes } else { // ** The special treatment of ends ** // The nnapi_end need some special handling, based on the current undocumented design of @@ -2644,8 +2691,8 @@ Status SliceOpBuilder::AddToModelBuilderImpl(ModelBuilder& model_builder, const ends[i] = -static_cast(input_shape[i] + 1); } } - ORT_RETURN_IF_ERROR(AddOperand("ends", param_dimen, ends)); //nnapi_end - ORT_RETURN_IF_ERROR(AddOperand("steps", param_dimen, compute_metadata.steps_)); //nnapi_strides + ORT_RETURN_IF_ERROR(AddOperand("ends", param_dimen, ends)); // nnapi_end + ORT_RETURN_IF_ERROR(AddOperand("steps", param_dimen, compute_metadata.steps_)); // nnapi_strides // We do not use the following inputs in ANEURALNETWORKS_STRIDED_SLICE, set them all to 0 ADD_SCALAR_OPERAND(model_builder, input_indices, 0); // begin_mask ADD_SCALAR_OPERAND(model_builder, input_indices, 0); // end_mask @@ -2700,6 +2747,7 @@ static OpBuilderRegistrations CreateOpBuilderRegistrations() { NNAPI_EP_ADD_SHARED_OP_BUILDER("Mul", BinaryOpBuilder); NNAPI_EP_ADD_SHARED_OP_BUILDER("Pow", BinaryOpBuilder); NNAPI_EP_ADD_SHARED_OP_BUILDER("QLinearAdd", BinaryOpBuilder); + NNAPI_EP_ADD_SHARED_OP_BUILDER("QLinearMul", BinaryOpBuilder); NNAPI_EP_ADD_SHARED_OP_BUILDER("Sub", BinaryOpBuilder); } diff --git a/onnxruntime/core/providers/nnapi/nnapi_builtin/builders/op_support_checker.cc b/onnxruntime/core/providers/nnapi/nnapi_builtin/builders/op_support_checker.cc index 5949e67d6b559..5f0b4d840a67a 100644 --- a/onnxruntime/core/providers/nnapi/nnapi_builtin/builders/op_support_checker.cc +++ b/onnxruntime/core/providers/nnapi/nnapi_builtin/builders/op_support_checker.cc @@ -22,7 +22,21 @@ struct OpSupportCheckerRegistrations { std::unordered_map op_support_checker_map; }; -bool HasExternalInitializer(const InitializedTensorSet& initializers, const NodeUnit& node_unit) { +template +void CreateSharedOpSupportCheckerImpl(const std::string& op_type, + OpSupportCheckerRegistrations& op_registrations, + const std::vector& op_types) { + // The shared OpSupportChecker is already in the OpSupportCheckerRegistrations + if (op_registrations.op_support_checker_map.find(op_type) != op_registrations.op_support_checker_map.cend()) + return; + + op_registrations.support_checkers.push_back(std::make_unique()); + for (const auto& op : op_types) { + op_registrations.op_support_checker_map.emplace(op, op_registrations.support_checkers.back().get()); + } +} + +static bool HasExternalInitializer(const InitializedTensorSet& initializers, const NodeUnit& node_unit) { const auto is_ext_initializer = [&](const NodeArg& node_arg) { const auto& input_name(node_arg.Name()); @@ -58,18 +72,200 @@ bool HasExternalInitializer(const InitializedTensorSet& initializers, const Node return false; } -template -void CreateSharedOpSupportCheckerImpl(const std::string& op_type, - OpSupportCheckerRegistrations& op_registrations, - const std::vector& op_types) { - // The shared OpSupportChecker is already in the OpSupportCheckerRegistrations - if (op_registrations.op_support_checker_map.find(op_type) != op_registrations.op_support_checker_map.cend()) - return; +static bool IsQuantizationScaleSupported(const InitializedTensorSet& initializers, + const NodeUnitIODef& io_def, + const OpSupportCheckParams& params, + const std::string& op_type, + bool is_quant_matmul, + bool is_conv_matmul_u8s8_weight) { + const auto scale_name = io_def.quant_param->scale.Name(); + auto it = initializers.find(scale_name); + if (it == initializers.cend()) { + LOGS_DEFAULT(VERBOSE) << "The scale of " << op_type << " must be an initializer tensor"; + return false; + } + + const auto& scale_tensor = *it->second; + int64_t scales_dim = scale_tensor.dims().empty() ? 1 : scale_tensor.dims()[0]; + if (!is_conv_matmul_u8s8_weight) { + if (scales_dim != 1) { + LOGS_DEFAULT(VERBOSE) << op_type << " does not support per-channel quantization, " + << " for now, only u8s8 QlinearConv supports per-channel quantization on API 29+"; + return false; + } + } else if (scales_dim != 1) { + // For u8s8 Qlinear[Conv/MatMul], we support + // 1. Per-tensor, the weight will be transformed to uint8 later + // 2. Per-channel, only from Android API level 29 + if (is_quant_matmul) { + LOGS_DEFAULT(VERBOSE) << "QLinearMatMul does not support per-channel quantization"; + return false; + } - op_registrations.support_checkers.push_back(std::make_unique()); - for (const auto& op : op_types) { - op_registrations.op_support_checker_map.emplace(op, op_registrations.support_checkers.back().get()); + if (params.android_feature_level < ANEURALNETWORKS_FEATURE_LEVEL_3) { + LOGS_DEFAULT(VERBOSE) << op_type << " only supports per-channel quantization on Android API 29+, " + << "system NNAPI feature level: " << params.android_feature_level; + return false; + } + + Shape weight_shape; + if (!GetShape(io_def.node_arg, weight_shape)) + return false; + + if (weight_shape[0] != scales_dim) { + LOGS_DEFAULT(VERBOSE) << op_type << " mismatch int8 per-channel quantization weight," + << " weight dimension[0] " << weight_shape[0] + << " scale dimension " << scales_dim; + return false; + } + } + + return true; +} + +static bool IsQuantizationZeroPointSupported(const InitializedTensorSet& initializers, + const NodeUnitIODef& io_def, + const std::string& op_type, + const Path& model_path, + bool is_quant_matmul, + bool is_conv_matmul_u8s8_weight) { + // zero point is optional here + if (!io_def.quant_param->zero_point) + return true; + + const auto& zero_point_name = io_def.quant_param->zero_point->Name(); + if (!Contains(initializers, zero_point_name)) { + LOGS_DEFAULT(VERBOSE) << "The zero point of " << op_type << " must be an initializer tensor"; + return false; + } + + const auto& zero_tensor = *initializers.at(zero_point_name); + int64_t zero_dim = zero_tensor.dims().empty() ? 1 : zero_tensor.dims()[0]; + + if (!is_conv_matmul_u8s8_weight) { + if (zero_dim != 1) { + LOGS_DEFAULT(VERBOSE) << op_type << " does not support per-channel quantization, " + << " for now, only u8s8 QlinearConv supports per-channel quantization on API 29+"; + return false; + } + } else { + // For u8s8 Qlinear[Conv/MatMul], we support + // 1. Per-tensor, the weight will be transformed to uint8 later + // 2. Per-channel, only from Android API level 29 + if (zero_tensor.data_type() != ONNX_NAMESPACE::TensorProto_DataType_INT8) { + LOGS_DEFAULT(VERBOSE) << "u8s8 Qlinear[Conv/MatMul] only supports int8 zero point for weight, " + << "actual zero point type: [" << zero_tensor.data_type() << "]"; + return false; + } + + if (zero_dim != 1) { + if (is_quant_matmul) { + LOGS_DEFAULT(VERBOSE) << "QLinearMatMul does not support per-channel quantization"; + return false; + } + } + + // For onnx, u8s8 QlinearConv, the weight zero point can be a scalar, + // or a tensor with same channel as weight, for NNAPI we only support it be + // 0 (scalar) or all 0 (tensor), NNAPI will assume the zero point for per-channel + // quantization is 0 there is no input for it + Shape weight_shape; + if (!GetShape(io_def.node_arg, weight_shape)) + return false; + + if (weight_shape[0] != zero_dim && zero_dim != 1) { + LOGS_DEFAULT(VERBOSE) << op_type << " mismatch int8 per-channel quantization weight," + << " weight dimension[0] " << weight_shape[0] + << " zero point dimension " << zero_dim; + return false; + } + + std::vector unpacked_tensor; + auto status = onnxruntime::utils::UnpackInitializerData(zero_tensor, model_path, unpacked_tensor); + if (!status.IsOK()) { + LOGS_DEFAULT(ERROR) << "Qlinear[Conv/MatMul] error when unpack zero tensor: " << zero_point_name + << ", error msg: " << status.ErrorMessage(); + return false; + } + + // Verify all onnx weight zero point(s) are 0(s) + const int8_t* zero_points = reinterpret_cast(unpacked_tensor.data()); + for (size_t i = 0; i < unpacked_tensor.size(); i++) { + if (zero_points[i] != 0) { + LOGS_DEFAULT(VERBOSE) << "u8s8 Qlinear[Conv/MatMul] only support 0 as zero point, " + << "zero_points[" << i << "] has value: " << zero_points[i]; + return false; + } + } + } + + return true; +} + +// Check if the given quantized input(s) or output(s) is supported +static bool IsQuantizedIOSupported(const InitializedTensorSet& initializers, const NodeUnit& node_unit, + const std::vector& indices, const OpSupportCheckParams& params, IOKind io_kind) { + const auto& op_type = node_unit.OpType(); + auto quant_op_type = GetQuantizedOpType(node_unit); + + ORT_ENFORCE(quant_op_type != QuantizedOpType::Unknown, "[", op_type, "] is not a quantized op"); + + bool is_input = io_kind == IOKind::Input; + bool is_quant_conv = IsQuantizedConv(quant_op_type); + bool is_quant_matmul = (quant_op_type == QuantizedOpType::QLinearMatMul); + const auto& io_defs = is_input ? node_unit.Inputs() : node_unit.Outputs(); + + for (const auto idx : indices) { + if (idx >= io_defs.size()) { + LOGS_DEFAULT(VERBOSE) << (is_input ? "Input" : "Output") << " index, " << idx + << " >= size, " << io_defs.size() + << " of NodeUnit: " << node_unit.Name(); + return false; + } + + const auto& io_def = io_defs[idx]; + ORT_ENFORCE(io_def.quant_param.has_value(), "Input index, ", idx, " has no quant_param"); + + // If this op is Qlinear[Conv/MatMul], we want to check u8s8 support for weight tensor (or B tensor for QlinearMatMul) + bool is_conv_matmul_weight = is_input && (is_quant_conv || is_quant_matmul) && idx == 1; + bool is_conv_matmul_u8s8_weight = false; + + if (is_conv_matmul_weight) { + int32_t weight_type; + if (!GetType(io_def.node_arg, weight_type)) + return false; + is_conv_matmul_u8s8_weight = weight_type == ONNX_NAMESPACE::TensorProto_DataType_INT8; + } + + int32_t input_type; + if (!GetType(io_def.node_arg, input_type)) + return false; + + // We only support u8 for most of the inputs and all outputs, with the exception for Quantized MatMul and Conv, + // which allows s8 weight (u8s8) + // TODO, add support of s8s8 + if (input_type != ONNX_NAMESPACE::TensorProto_DataType_UINT8 && + !(input_type == ONNX_NAMESPACE::TensorProto_DataType_INT8 && is_conv_matmul_u8s8_weight)) { + LOGS_DEFAULT(VERBOSE) << op_type << "NodeUnit [" << node_unit.Name() + << "], type [" << op_type << "]'s " + << (is_input ? "Input" : "Output") << " index [" << idx + << "] has unsupported type [" << input_type << "]"; + return false; + } + + // Check scale and zero point + if (!IsQuantizationScaleSupported(initializers, io_def, params, op_type, + is_quant_matmul, is_conv_matmul_u8s8_weight)) { + return false; + } + + if (!IsQuantizationZeroPointSupported(initializers, io_def, op_type, node_unit.ModelPath(), + is_quant_matmul, is_conv_matmul_u8s8_weight)) { + return false; + } } + + return true; } #pragma endregion helpers @@ -100,7 +296,9 @@ class BaseOpSupportChecker : public IOpSupportChecker { return ANEURALNETWORKS_FEATURE_LEVEL_1; } - virtual bool HasSupportedInputsImpl(const NodeUnit& node_unit) const; + virtual bool HasSupportedInputOutputsImpl( + const InitializedTensorSet& initializers, const NodeUnit& node_unit, + const OpSupportCheckParams& params) const; virtual int GetMinSupportedOpSet(const NodeUnit& /* node_unit */) const { return 1; } virtual int GetMaxSupportedOpSet(const NodeUnit& /* node_unit */) const { return 15; } @@ -112,7 +310,8 @@ class BaseOpSupportChecker : public IOpSupportChecker { private: bool HasSupportedOpSet(const NodeUnit& node_unit) const; - bool HasSupportedInputs(const NodeUnit& node_unit) const; + bool HasSupportedInputOutputs(const InitializedTensorSet& initializers, const NodeUnit& node_unit, + const OpSupportCheckParams& params) const; }; /* static */ void BaseOpSupportChecker::CreateSharedOpSupportChecker( @@ -138,7 +337,7 @@ bool BaseOpSupportChecker::IsOpSupported(const InitializedTensorSet& initializer if (!IsNodeUnitTypeSupported(node_unit)) return false; - if (!HasSupportedInputs(node_unit)) + if (!HasSupportedInputOutputs(initializers, node_unit, params)) return false; // We do not support external initializers for now @@ -151,35 +350,47 @@ bool BaseOpSupportChecker::IsOpSupported(const InitializedTensorSet& initializer return IsOpSupportedImpl(initializers, node_unit, params); } -bool BaseOpSupportChecker::HasSupportedInputs(const NodeUnit& node_unit) const { +bool BaseOpSupportChecker::HasSupportedInputOutputs(const InitializedTensorSet& initializers, const NodeUnit& node_unit, + const OpSupportCheckParams& params) const { // We do not support unknown(null) input shape - auto has_shape = [](const NodeArg& node_arg, const std::string& name, const std::string op_type) { - if (!node_arg.Shape()) { + auto has_supported_shape = [](const NodeArg& node_arg, const std::string& name, const std::string op_type) { + const auto* shape_proto = node_arg.Shape(); + if (!shape_proto) { LOGS_DEFAULT(VERBOSE) << "Node [" << name << "] type [" << op_type << "] Input [" << node_arg.Name() << "] has no shape"; return false; } + + // We do not support dynamic shape input for now + for (const auto& dim : shape_proto->dim()) { + if (!dim.has_dim_value()) { + LOGS_DEFAULT(VERBOSE) << "Dynamic shape is not supported for now, for input:" << node_arg.Name(); + return false; + } + } return true; }; for (const auto& input : node_unit.Inputs()) { - if (!has_shape(input.node_arg, node_unit.Name(), node_unit.OpType())) + if (!has_supported_shape(input.node_arg, node_unit.Name(), node_unit.OpType())) return false; if (input.quant_param.has_value()) { - if (!has_shape(input.quant_param->scale, node_unit.Name(), node_unit.OpType())) + if (!has_supported_shape(input.quant_param->scale, node_unit.Name(), node_unit.OpType())) return false; // zero point is optional if (input.quant_param->zero_point && - !has_shape(*input.quant_param->zero_point, node_unit.Name(), node_unit.OpType())) + !has_supported_shape(*input.quant_param->zero_point, node_unit.Name(), node_unit.OpType())) return false; } } - return HasSupportedInputsImpl(node_unit); + return HasSupportedInputOutputsImpl(initializers, node_unit, params); } -bool BaseOpSupportChecker::HasSupportedInputsImpl(const NodeUnit& node_unit) const { +bool BaseOpSupportChecker::HasSupportedInputOutputsImpl( + const InitializedTensorSet& /* initializers */, const NodeUnit& node_unit, + const OpSupportCheckParams& /* params */) const { // We only check the type of input 0 by default // specific op builder can override this const auto& input = node_unit.Inputs()[0].node_arg; @@ -236,8 +447,13 @@ class BinaryOpSupportChecker : public BaseOpSupportChecker { const OpSupportCheckParams& params) const override; bool IsOpSupportedImpl(const InitializedTensorSet& initializers, const NodeUnit& node_unit, const OpSupportCheckParams& params) const override; - bool HasSupportedInputsImpl(const NodeUnit& node_unit) const override; + bool HasSupportedInputOutputsImpl( + const InitializedTensorSet& initializers, const NodeUnit& node_unit, + const OpSupportCheckParams& params) const override; int GetMinSupportedOpSet(const NodeUnit& node_unit) const override; + + bool IsNodeUnitTypeSupported(const NodeUnit& node_unit) const override; + static bool IsQuantizedOp(const NodeUnit& node_unit); }; /* static */ void BinaryOpSupportChecker::CreateSharedOpSupportChecker( @@ -250,10 +466,29 @@ class BinaryOpSupportChecker : public BaseOpSupportChecker { "Mul", "Div", "QLinearAdd", + "QLinearMul", "Pow", }); } +bool BinaryOpSupportChecker::IsNodeUnitTypeSupported(const NodeUnit& node_unit) const { + if (node_unit.UnitType() == NodeUnit::Type::QDQGroup) { + const auto quant_type = GetQuantizedOpType(node_unit); + return quant_type == QuantizedOpType::QDQAdd || + quant_type == QuantizedOpType::QDQMul; + } + + return true; +} + +/* static */ bool BinaryOpSupportChecker::IsQuantizedOp(const NodeUnit& node_unit) { + const auto quant_type = GetQuantizedOpType(node_unit); + return quant_type == QuantizedOpType::QLinearAdd || + quant_type == QuantizedOpType::QLinearMul || + quant_type == QuantizedOpType::QDQAdd || + quant_type == QuantizedOpType::QDQMul; +} + int32_t BinaryOpSupportChecker::GetMinSupportedNNAPIFeatureLevel( const NodeUnit& node_unit, const OpSupportCheckParams& /* params */) const { const auto& op(node_unit.OpType()); @@ -272,21 +507,29 @@ int BinaryOpSupportChecker::GetMinSupportedOpSet(const NodeUnit& node_unit) cons const auto& op(node_unit.OpType()); // Add/Sub/Mul/Div/Pow opset 6- has broadcast attributes we do not support now - if (op != "QLinearAdd") + if (op != "QLinearAdd" && op != "QLinearMul") return 7; return 1; } -bool BinaryOpSupportChecker::HasSupportedInputsImpl(const NodeUnit& node_unit) const { - bool is_qlinear_add = node_unit.OpType() == "QLinearAdd"; +bool BinaryOpSupportChecker::HasSupportedInputOutputsImpl( + const InitializedTensorSet& initializers, const NodeUnit& node_unit, + const OpSupportCheckParams& params) const { + bool is_quantized_op = IsQuantizedOp(node_unit); bool is_pow = node_unit.OpType() == "Pow"; - if (!is_qlinear_add && !is_pow) - return BaseOpSupportChecker::HasSupportedInputsImpl(node_unit); + if (!is_quantized_op && !is_pow) + return BaseOpSupportChecker::HasSupportedInputOutputsImpl(initializers, node_unit, params); + + if (is_quantized_op) { + // QLinearAdd/QDQAdd/QLinearMul/QDQMul + if (!HasValidBinaryOpQuantizedInputTypes(node_unit)) + return false; + + if (!IsQuantizedIOSupported(initializers, node_unit, {0, 1}, params, IOKind::Input)) + return false; - if (is_qlinear_add) { - // QLinearAdd - if (!HasValidBinaryOpQuantizedInputs(node_unit)) + if (!IsQuantizedIOSupported(initializers, node_unit, {0}, params, IOKind::Output)) return false; } @@ -311,11 +554,10 @@ bool BinaryOpSupportChecker::HasSupportedInputsImpl(const NodeUnit& node_unit) c return true; } -bool BinaryOpSupportChecker::IsOpSupportedImpl(const InitializedTensorSet& initializers, const NodeUnit& node_unit, - const OpSupportCheckParams& params) const { +bool BinaryOpSupportChecker::IsOpSupportedImpl(const InitializedTensorSet& /* initializers */, const NodeUnit& node_unit, + const OpSupportCheckParams& /* params */) const { const auto& op_type(node_unit.OpType()); const auto& inputs = node_unit.Inputs(); - bool op_is_qlinear = op_type == "QLinearAdd"; Shape input1_shape, input2_shape; if (!GetShape(inputs[0].node_arg, input1_shape) || !GetShape(inputs[1].node_arg, input2_shape)) @@ -330,32 +572,6 @@ bool BinaryOpSupportChecker::IsOpSupportedImpl(const InitializedTensorSet& initi return false; } - if (op_is_qlinear) { - // For QLinearAdd, we only support uint8 output now - int32_t output_type; - if (!GetType(node_unit.Outputs()[0].node_arg, output_type)) - return false; - - if (output_type != ONNX_NAMESPACE::TensorProto_DataType_UINT8) { - LOGS_DEFAULT(VERBOSE) << "[" << op_type - << "] output type: [" << output_type - << "] is not supported for now"; - return false; - } - - // Check input scales and ZPs - if (!HasValidQuantizationScales(initializers, node_unit, {0, 1}, params, true /* is_input */)) - return false; - if (!HasValidQuantizationZeroPoints(initializers, node_unit, {0, 1}, true /* is_input */)) - return false; - - // Check output scale and ZP - if (!HasValidQuantizationScales(initializers, node_unit, {0}, params, false /* is_input */)) - return false; - if (!HasValidQuantizationZeroPoints(initializers, node_unit, {0}, false /* is_input */)) - return false; - } - return true; } @@ -373,9 +589,17 @@ class TransposeOpSupportChecker : public BaseOpSupportChecker { return ANEURALNETWORKS_FEATURE_LEVEL_2; } - bool HasSupportedInputsImpl(const NodeUnit& node_unit) const override; + bool HasSupportedInputOutputsImpl( + const InitializedTensorSet& initializers, const NodeUnit& node_unit, + const OpSupportCheckParams& params) const override; + bool IsNodeUnitTypeSupported(const NodeUnit& /* node_unit */) const override { return true; } + static bool IsQuantizedOp(const NodeUnit& node_unit) ORT_MUST_USE_RESULT; // TODO, see if we want to move this to BaseOpBuilder }; +/* static */ bool TransposeOpSupportChecker::IsQuantizedOp(const NodeUnit& node_unit) { + return GetQuantizedOpType(node_unit) == QuantizedOpType::QDQTranspose; +} + bool TransposeOpSupportChecker::IsOpSupportedImpl(const InitializedTensorSet& /* initializers */, const NodeUnit& node_unit, const OpSupportCheckParams& /* params */) const { Shape input_shape; @@ -392,7 +616,9 @@ bool TransposeOpSupportChecker::IsOpSupportedImpl(const InitializedTensorSet& /* return true; } -bool TransposeOpSupportChecker::HasSupportedInputsImpl(const NodeUnit& node_unit) const { +bool TransposeOpSupportChecker::HasSupportedInputOutputsImpl( + const InitializedTensorSet& initializers, const NodeUnit& node_unit, + const OpSupportCheckParams& params) const { int32_t input_type; if (!GetType(node_unit.Inputs()[0].node_arg, input_type)) return false; @@ -405,6 +631,14 @@ bool TransposeOpSupportChecker::HasSupportedInputsImpl(const NodeUnit& node_unit return false; } + if (IsQuantizedOp(node_unit)) { + if (!IsQuantizedIOSupported(initializers, node_unit, {0}, params, IOKind::Input)) + return false; + + if (!IsQuantizedIOSupported(initializers, node_unit, {0}, params, IOKind::Output)) + return false; + } + return true; } @@ -552,7 +786,11 @@ class PoolOpSupportChecker : public BaseOpSupportChecker { return params.use_nchw ? ANEURALNETWORKS_FEATURE_LEVEL_3 : ANEURALNETWORKS_FEATURE_LEVEL_2; } - bool HasSupportedInputsImpl(const NodeUnit& node_unit) const override; + bool HasSupportedInputOutputsImpl( + const InitializedTensorSet& initializers, const NodeUnit& node_unit, + const OpSupportCheckParams& params) const override; + bool IsNodeUnitTypeSupported(const NodeUnit& /* node_unit */) const override; + static bool IsQuantizedOp(const NodeUnit& node_unit); }; /* static */ void PoolOpSupportChecker::CreateSharedOpSupportChecker( @@ -568,8 +806,21 @@ class PoolOpSupportChecker : public BaseOpSupportChecker { }); } +bool PoolOpSupportChecker::IsNodeUnitTypeSupported(const NodeUnit& node_unit) const { + if (node_unit.UnitType() == NodeUnit::Type::QDQGroup) { + const auto quant_type = GetQuantizedOpType(node_unit); + return quant_type == QuantizedOpType::QDQAveragePool; + } + + return true; +} + +/* static */ bool PoolOpSupportChecker::IsQuantizedOp(const NodeUnit& node_unit) { + return IsQuantizedPool(GetQuantizedOpType(node_unit)); +} + bool PoolOpSupportChecker::IsOpSupportedImpl(const InitializedTensorSet& initializers, const NodeUnit& node_unit, - const OpSupportCheckParams& params) const { + const OpSupportCheckParams& /* params */) const { const auto& op_name = node_unit.Name(); const auto& op_type = node_unit.OpType(); const auto& inputs = node_unit.Inputs(); @@ -585,8 +836,9 @@ bool PoolOpSupportChecker::IsOpSupportedImpl(const InitializedTensorSet& initial return false; } - bool is_qlinear_average_pool = op_type == "QLinearAveragePool"; - if (op_type == "AveragePool" || op_type == "MaxPool" || is_qlinear_average_pool) { + bool is_quant_pool = IsQuantizedOp(node_unit); + bool is_average_pool = op_type == "AveragePool" || op_type == "QLinearAveragePool"; + if (is_average_pool || op_type == "MaxPool") { NodeAttrHelper helper(node_unit); const auto count_include_pad = helper.Get("count_include_pad", 0); @@ -627,20 +879,7 @@ bool PoolOpSupportChecker::IsOpSupportedImpl(const InitializedTensorSet& initial } // We need to check if we have valid scales and zero points for QLinearAveragePool - if (is_qlinear_average_pool) { - // Check input scales and ZPs - if (!HasValidQuantizationScales(initializers, node_unit, {0}, params, true /* is_input */)) - return false; - if (!HasValidQuantizationZeroPoints(initializers, node_unit, {0}, true /* is_input */)) - return false; - - // Check output scale and ZP - - if (!HasValidQuantizationScales(initializers, node_unit, {0}, params, false /* is_input */)) - return false; - if (!HasValidQuantizationZeroPoints(initializers, node_unit, {0}, false /* is_input */)) - return false; - + if (is_average_pool && is_quant_pool) { // NNAPI requires Quantized Average Pool has same scale and zero point for both input and output float input_scale = 0.0f; int32_t input_zp = 0; @@ -682,14 +921,23 @@ bool PoolOpSupportChecker::IsOpSupportedImpl(const InitializedTensorSet& initial return true; } -bool PoolOpSupportChecker::HasSupportedInputsImpl(const NodeUnit& node_unit) const { - bool is_max_pool = node_unit.OpType() == "MaxPool"; - bool is_qlinear_average_pool = node_unit.OpType() == "QLinearAveragePool"; - if (!is_max_pool && !is_qlinear_average_pool) - return BaseOpSupportChecker::HasSupportedInputsImpl(node_unit); +bool PoolOpSupportChecker::HasSupportedInputOutputsImpl( + const InitializedTensorSet& initializers, const NodeUnit& node_unit, + const OpSupportCheckParams& params) const { + const auto& op_type = node_unit.OpType(); + bool is_quant_pool = IsQuantizedOp(node_unit); + bool is_max_pool = op_type == "MaxPool"; + bool is_average_pool = op_type == "AveragePool" || op_type == "QLinearAveragePool"; + bool is_quant_average_pool = is_quant_pool && is_average_pool; + if (!is_max_pool && !is_quant_average_pool) + return BaseOpSupportChecker::HasSupportedInputOutputsImpl(initializers, node_unit, params); + + if (is_quant_average_pool) { + if (!IsQuantizedIOSupported(initializers, node_unit, {0}, params, IOKind::Input)) + return false; - if (is_qlinear_average_pool) { - return HasValidUnaryOpQuantizedInputs(node_unit); + if (!IsQuantizedIOSupported(initializers, node_unit, {0}, params, IOKind::Output)) + return false; } // is_max_pool @@ -727,7 +975,11 @@ class ConvOpSupportChecker : public BaseOpSupportChecker { return params.use_nchw ? ANEURALNETWORKS_FEATURE_LEVEL_3 : ANEURALNETWORKS_FEATURE_LEVEL_2; } - bool HasSupportedInputsImpl(const NodeUnit& node_unit) const override; + bool HasSupportedInputOutputsImpl( + const InitializedTensorSet& /* initializers */, const NodeUnit& node_unit, + const OpSupportCheckParams& /* params */) const override; + bool IsNodeUnitTypeSupported(const NodeUnit& /* node_unit */) const override { return true; } + static bool IsQuantizedOp(const NodeUnit& node_unit); }; /* static */ void ConvOpSupportChecker::CreateSharedOpSupportChecker( @@ -740,12 +992,24 @@ class ConvOpSupportChecker : public BaseOpSupportChecker { }); } -bool ConvOpSupportChecker::HasSupportedInputsImpl(const NodeUnit& node_unit) const { - if (node_unit.OpType() != "QLinearConv") - return BaseOpSupportChecker::HasSupportedInputsImpl(node_unit); +/* static */ bool ConvOpSupportChecker::IsQuantizedOp(const NodeUnit& node_unit) { + return IsQuantizedConv(GetQuantizedOpType(node_unit)); +} + +bool ConvOpSupportChecker::HasSupportedInputOutputsImpl( + const InitializedTensorSet& initializers, const NodeUnit& node_unit, + const OpSupportCheckParams& params) const { + if (!IsQuantizedOp(node_unit)) + return BaseOpSupportChecker::HasSupportedInputOutputsImpl(initializers, node_unit, params); // QLinearConv only supports input of uint8 for now - if (!HasValidBinaryOpQuantizedInputs(node_unit)) + if (!HasValidBinaryOpQuantizedInputTypes(node_unit)) + return false; + + if (!IsQuantizedIOSupported(initializers, node_unit, {0, 1}, params, IOKind::Input)) + return false; + + if (!IsQuantizedIOSupported(initializers, node_unit, {0}, params, IOKind::Output)) return false; return true; @@ -754,10 +1018,10 @@ bool ConvOpSupportChecker::HasSupportedInputsImpl(const NodeUnit& node_unit) con bool ConvOpSupportChecker::IsOpSupportedImpl(const InitializedTensorSet& initializers, const NodeUnit& node_unit, const OpSupportCheckParams& params) const { const auto& op_type = node_unit.OpType(); - const bool is_qlinear_conv = (op_type == "QLinearConv"); + bool is_quant_conv = IsQuantizedOp(node_unit); // We don't support nhwc com.microsoft.QLinearConv for now - if (is_qlinear_conv && node_unit.Domain() == kMSDomain) { + if (is_quant_conv && node_unit.Domain() == kMSDomain) { LOGS_DEFAULT(VERBOSE) << "com.microsoft.QLinearConv is not supported"; return false; } @@ -791,35 +1055,11 @@ bool ConvOpSupportChecker::IsOpSupportedImpl(const InitializedTensorSet& initial return false; } - if (is_qlinear_conv) { - // For QLinearConv, we only support uint8 output now - int32_t output_type; - if (!GetType(node_unit.Outputs()[0].node_arg, output_type)) - return false; - - if (output_type != ONNX_NAMESPACE::TensorProto_DataType_UINT8) { - LOGS_DEFAULT(VERBOSE) << "[" << op_type - << "] output type: [" << output_type - << "] is not supported for now"; - return false; - } - + if (is_quant_conv) { if (inputs.size() > 2 && !Contains(initializers, inputs[2].node_arg.Name())) { LOGS_DEFAULT(VERBOSE) << "Bias of QLinearConv must be known"; return false; } - - // Check input scales and ZPs - if (!HasValidQuantizationScales(initializers, node_unit, {0, 1}, params, true /* is_input */)) - return false; - if (!HasValidQuantizationZeroPoints(initializers, node_unit, {0, 1}, true /* is_input */)) - return false; - - // Check output scale and ZP - if (!HasValidQuantizationScales(initializers, node_unit, {0}, params, false /* is_input */)) - return false; - if (!HasValidQuantizationZeroPoints(initializers, node_unit, {0}, false /* is_input */)) - return false; } return true; @@ -910,16 +1150,26 @@ class GemmOpSupportChecker : public BaseOpSupportChecker { private: bool IsOpSupportedImpl(const InitializedTensorSet& initializers, const NodeUnit& node_unit, const OpSupportCheckParams& params) const override; - bool HasSupportedInputsImpl(const NodeUnit& node_unit) const override; + bool HasSupportedInputOutputsImpl( + const InitializedTensorSet& /* initializers */, const NodeUnit& node_unit, + const OpSupportCheckParams& /* params */) const override; int GetMinSupportedOpSet(const NodeUnit& node_unit) const override; }; -bool GemmOpSupportChecker::HasSupportedInputsImpl(const NodeUnit& node_unit) const { +bool GemmOpSupportChecker::HasSupportedInputOutputsImpl( + const InitializedTensorSet& initializers, const NodeUnit& node_unit, + const OpSupportCheckParams& params) const { if (node_unit.OpType() != "QLinearMatMul") - return BaseOpSupportChecker::HasSupportedInputsImpl(node_unit); + return BaseOpSupportChecker::HasSupportedInputOutputsImpl(initializers, node_unit, params); // QLinearMatMul - if (!HasValidBinaryOpQuantizedInputs(node_unit)) + if (!HasValidBinaryOpQuantizedInputTypes(node_unit)) + return false; + + if (!IsQuantizedIOSupported(initializers, node_unit, {0, 1}, params, IOKind::Input)) + return false; + + if (!IsQuantizedIOSupported(initializers, node_unit, {0}, params, IOKind::Output)) return false; return true; @@ -1056,33 +1306,6 @@ bool GemmOpSupportChecker::IsOpSupportedImpl(const InitializedTensorSet& initial LOGS_DEFAULT(VERBOSE) << "B of MatMul must be known"; return false; } - - if (is_qlinear_matmul) { - // For QLinearMatMul, we only support uint8 output now - int32_t output_type; - if (!GetType(node_unit.Outputs()[0].node_arg, output_type)) - return false; - - if (output_type != ONNX_NAMESPACE::TensorProto_DataType_UINT8) { - LOGS_DEFAULT(VERBOSE) << "[" << op_type - << "] output type: [" << output_type - << "] is not supported for now"; - return false; - } - - // All scale/zero points are initializer scalars - // Check input scales and ZPs - if (!HasValidQuantizationScales(initializers, node_unit, {0, 1}, params, true /* is_input */)) - return false; - if (!HasValidQuantizationZeroPoints(initializers, node_unit, {0, 1}, true /* is_input */)) - return false; - - // Check output scale and ZP - if (!HasValidQuantizationScales(initializers, node_unit, {0}, params, false /* is_input */)) - return false; - if (!HasValidQuantizationZeroPoints(initializers, node_unit, {0}, false /* is_input */)) - return false; - } } else { LOGS_DEFAULT(VERBOSE) << "GemmOpSupportChecker, unknown op: " << op_type; } @@ -1106,7 +1329,9 @@ class UnaryOpSupportChecker : public BaseOpSupportChecker { int32_t GetMinSupportedNNAPIFeatureLevel(const NodeUnit& /* node_unit */, const OpSupportCheckParams& params) const override; - bool HasSupportedInputsImpl(const NodeUnit& node_unit) const override; + bool HasSupportedInputOutputsImpl( + const InitializedTensorSet& /* initializers */, const NodeUnit& node_unit, + const OpSupportCheckParams& /* params */) const override; int GetMinSupportedOpSet(const NodeUnit& node_unit) const override; @@ -1155,12 +1380,20 @@ int32_t UnaryOpSupportChecker::GetMinSupportedNNAPIFeatureLevel(const NodeUnit& return ANEURALNETWORKS_FEATURE_LEVEL_1; } -bool UnaryOpSupportChecker::HasSupportedInputsImpl(const NodeUnit& node_unit) const { +bool UnaryOpSupportChecker::HasSupportedInputOutputsImpl( + const InitializedTensorSet& initializers, const NodeUnit& node_unit, + const OpSupportCheckParams& params) const { // We only need to override input check for QLinearSigmoid if (node_unit.OpType() != "QLinearSigmoid") - return BaseOpSupportChecker::HasSupportedInputsImpl(node_unit); + return BaseOpSupportChecker::HasSupportedInputOutputsImpl(initializers, node_unit, params); + + if (!IsQuantizedIOSupported(initializers, node_unit, {0}, params, IOKind::Input)) + return false; - return HasValidUnaryOpQuantizedInputs(node_unit); + if (!IsQuantizedIOSupported(initializers, node_unit, {0}, params, IOKind::Output)) + return false; + + return true; } // All ops except "Sin" opset 5- uses consumed_inputs attribute which is not supported for now @@ -1174,26 +1407,11 @@ int UnaryOpSupportChecker::GetMinSupportedOpSet(const NodeUnit& node_unit) const } /* static */ bool UnaryOpSupportChecker::IsQuantizedOpSupported( - const InitializedTensorSet& initializers, const NodeUnit& node_unit, const OpSupportCheckParams& params) { + const InitializedTensorSet& initializers, const NodeUnit& node_unit, const OpSupportCheckParams& /* params */) { const auto& op_type = node_unit.OpType(); ORT_ENFORCE(op_type == "QLinearSigmoid"); - const auto& op_name = node_unit.Name(); - // Check input scales and ZPs - if (!HasValidQuantizationScales(initializers, node_unit, {0}, params, true /* is_input */)) - return false; - if (!HasValidQuantizationZeroPoints(initializers, node_unit, {0}, true /* is_input */)) - return false; - - // Check output scale and ZP - if (!HasValidQuantizationScales(initializers, node_unit, {0}, params, false /* is_input */)) - return false; - if (!HasValidQuantizationZeroPoints(initializers, node_unit, {0}, false /* is_input */)) - return false; - - return false; - // NNAPI requires the scale be 1.f/256 and zero point to be 0 // See https://android.googlesource.com/platform/frameworks/ml/+/refs/heads/android10-c2f2-release/nn/common/operations/Activation.cpp#180 float output_scale = 0.0f; @@ -1230,7 +1448,9 @@ class ConcatOpSupportChecker : public BaseOpSupportChecker { bool IsOpSupportedImpl(const InitializedTensorSet& initializers, const NodeUnit& node_unit, const OpSupportCheckParams& params) const override; - bool HasSupportedInputsImpl(const NodeUnit& node_unit) const override; + bool HasSupportedInputOutputsImpl( + const InitializedTensorSet& /* initializers */, const NodeUnit& node_unit, + const OpSupportCheckParams& /* params */) const override; }; bool ConcatOpSupportChecker::IsOpSupportedImpl(const InitializedTensorSet& /* initializers */, const NodeUnit& node_unit, @@ -1249,7 +1469,9 @@ bool ConcatOpSupportChecker::IsOpSupportedImpl(const InitializedTensorSet& /* in return true; } -bool ConcatOpSupportChecker::HasSupportedInputsImpl(const NodeUnit& node_unit) const { +bool ConcatOpSupportChecker::HasSupportedInputOutputsImpl( + const InitializedTensorSet& /* initializers */, const NodeUnit& node_unit, + const OpSupportCheckParams& /* params */) const { int32_t input_type; if (!GetType(node_unit.Inputs()[0].node_arg, input_type)) return false; @@ -1312,37 +1534,17 @@ bool SqueezeOpSupportChecker::IsOpSupportedImpl(const InitializedTensorSet& init class QuantizeLinearOpSupportChecker : public BaseOpSupportChecker { private: - bool IsOpSupportedImpl(const InitializedTensorSet& initializers, const NodeUnit& node_unit, - const OpSupportCheckParams& params) const override; - int32_t GetMinSupportedNNAPIFeatureLevel(const NodeUnit& /* node_unit */, const OpSupportCheckParams& /* params */) const override { return ANEURALNETWORKS_FEATURE_LEVEL_3; } -}; -bool QuantizeLinearOpSupportChecker::IsOpSupportedImpl(const InitializedTensorSet& initializers, const NodeUnit& node_unit, - const OpSupportCheckParams& params) const { - int32_t output_type; - if (!GetType(node_unit.Outputs()[0].node_arg, output_type)) - return false; - - if (output_type != ONNX_NAMESPACE::TensorProto_DataType_UINT8) { - LOGS_DEFAULT(VERBOSE) << "[" << node_unit.OpType() - << "] output type: [" << output_type - << "] is not supported for now"; - return false; + bool HasSupportedInputOutputsImpl( + const InitializedTensorSet& initializers, const NodeUnit& node_unit, + const OpSupportCheckParams& params) const override { + return IsQuantizedIOSupported(initializers, node_unit, {0}, params, IOKind::Output); } - - // For QuantizeLinear only output is quantized - // Check output scale and ZP - if (!HasValidQuantizationScales(initializers, node_unit, {0}, params, false /* is_input */)) - return false; - if (!HasValidQuantizationZeroPoints(initializers, node_unit, {0}, false /* is_input */)) - return false; - - return true; -} +}; #pragma endregion @@ -1350,42 +1552,17 @@ bool QuantizeLinearOpSupportChecker::IsOpSupportedImpl(const InitializedTensorSe class DequantizeLinearOpSupportChecker : public BaseOpSupportChecker { private: - bool IsOpSupportedImpl(const InitializedTensorSet& initializers, const NodeUnit& node_unit, - const OpSupportCheckParams& params) const override; - int32_t GetMinSupportedNNAPIFeatureLevel(const NodeUnit& /* node_unit */, const OpSupportCheckParams& /* params */) const override { return ANEURALNETWORKS_FEATURE_LEVEL_1; } - bool HasSupportedInputsImpl(const NodeUnit& node_unit) const override; -}; - -bool DequantizeLinearOpSupportChecker::IsOpSupportedImpl(const InitializedTensorSet& initializers, const NodeUnit& node_unit, - const OpSupportCheckParams& params) const { - // For DequantizeLinear only input is quantized - // Check input scale and ZP - if (!HasValidQuantizationScales(initializers, node_unit, {0}, params, true /* is_input */)) - return false; - if (!HasValidQuantizationZeroPoints(initializers, node_unit, {0}, true /* is_input */)) - return false; - return true; -} - -bool DequantizeLinearOpSupportChecker::HasSupportedInputsImpl(const NodeUnit& node_unit) const { - int32_t input_type; - if (!GetType(node_unit.Inputs()[0].node_arg, input_type)) - return false; - - if (input_type != ONNX_NAMESPACE::TensorProto_DataType_UINT8) { - LOGS_DEFAULT(VERBOSE) << "[" << node_unit.OpType() - << "] Input type: [" << input_type - << "] is not supported for now"; - return false; + bool HasSupportedInputOutputsImpl( + const InitializedTensorSet& initializers, const NodeUnit& node_unit, + const OpSupportCheckParams& params) const override { + return IsQuantizedIOSupported(initializers, node_unit, {0}, params, IOKind::Input); } - - return true; -} +}; #pragma endregion @@ -1461,9 +1638,17 @@ class ResizeOpSupportChecker : public BaseOpSupportChecker { // We only support Resize opset 11+ here int GetMinSupportedOpSet(const NodeUnit& /* node_unit */) const override { return 11; } - bool HasSupportedInputsImpl(const NodeUnit& node_unit) const override; + bool HasSupportedInputOutputsImpl( + const InitializedTensorSet& /* initializers */, const NodeUnit& node_unit, + const OpSupportCheckParams& /* params */) const override; + bool IsNodeUnitTypeSupported(const NodeUnit& /* node_unit */) const override { return true; } + static bool IsQuantizedOp(const NodeUnit& node_unit) ORT_MUST_USE_RESULT; // TODO, see if we want to move this to BaseOpBuilder }; +/* static */ bool ResizeOpSupportChecker::IsQuantizedOp(const NodeUnit& node_unit) { + return GetQuantizedOpType(node_unit) == QuantizedOpType::QDQResize; +} + bool ResizeOpSupportChecker::IsOpSupportedImpl(const InitializedTensorSet& initializers, const NodeUnit& node_unit, const OpSupportCheckParams& params) const { Shape input_shape; @@ -1583,6 +1768,7 @@ bool ResizeOpSupportChecker::IsOpSupportedImpl(const InitializedTensorSet& initi } } } + return true; } @@ -1600,7 +1786,9 @@ int32_t ResizeOpSupportChecker::GetMinSupportedNNAPIFeatureLevel(const NodeUnit& return ANEURALNETWORKS_FEATURE_LEVEL_2; } -bool ResizeOpSupportChecker::HasSupportedInputsImpl(const NodeUnit& node_unit) const { +bool ResizeOpSupportChecker::HasSupportedInputOutputsImpl( + const InitializedTensorSet& initializers, const NodeUnit& node_unit, + const OpSupportCheckParams& params) const { int32_t input_type; if (!GetType(node_unit.Inputs()[0].node_arg, input_type)) return false; @@ -1613,6 +1801,14 @@ bool ResizeOpSupportChecker::HasSupportedInputsImpl(const NodeUnit& node_unit) c return false; } + if (IsQuantizedOp(node_unit)) { + if (!IsQuantizedIOSupported(initializers, node_unit, {0}, params, IOKind::Input)) + return false; + + if (!IsQuantizedIOSupported(initializers, node_unit, {0}, params, IOKind::Output)) + return false; + } + return true; } @@ -1643,7 +1839,7 @@ bool FlattenOpSupportChecker::IsOpSupportedImpl(const InitializedTensorSet& /* i GetFlattenOutputShape(node_unit, input_shape, dim_1, dim_2); if (dim_1 == 0 && dim_2 == 0) { - LOGS_DEFAULT(VERBOSE) << "The dynamical input shape " << Shape2String(input_shape) + LOGS_DEFAULT(VERBOSE) << "The dynamic input shape " << Shape2String(input_shape) << " is not supported"; return false; } @@ -1817,6 +2013,7 @@ static OpSupportCheckerRegistrations CreateOpSupportCheckerRegistrations() { NNAPI_EP_ADD_SHARED_OP_SUPPORT_CHECKER("Mul", BinaryOpSupportChecker); NNAPI_EP_ADD_SHARED_OP_SUPPORT_CHECKER("Pow", BinaryOpSupportChecker); NNAPI_EP_ADD_SHARED_OP_SUPPORT_CHECKER("QLinearAdd", BinaryOpSupportChecker); + NNAPI_EP_ADD_SHARED_OP_SUPPORT_CHECKER("QLinearMul", BinaryOpSupportChecker); NNAPI_EP_ADD_SHARED_OP_SUPPORT_CHECKER("Sub", BinaryOpSupportChecker); } diff --git a/onnxruntime/core/providers/nnapi/nnapi_builtin/nnapi_execution_provider.cc b/onnxruntime/core/providers/nnapi/nnapi_builtin/nnapi_execution_provider.cc index 32fffec7395b3..4ff0b41a51773 100644 --- a/onnxruntime/core/providers/nnapi/nnapi_builtin/nnapi_execution_provider.cc +++ b/onnxruntime/core/providers/nnapi/nnapi_builtin/nnapi_execution_provider.cc @@ -110,13 +110,6 @@ NnapiExecutionProvider::GetCapability(const onnxruntime::GraphViewer& graph_view return result; } - // Disable NNAPI if the graph has any unsupported inputs - for (const auto* input : graph_viewer.GetInputs()) { - if (!nnapi::IsInputSupported(*input, "graph")) { - return result; - } - } - // Get all the NodeUnits in the graph_viewer std::vector> node_unit_holder; std::unordered_map node_unit_map; diff --git a/onnxruntime/core/providers/rocm/math/softmax.cc b/onnxruntime/core/providers/rocm/math/softmax.cc index e1d9aa83f6d08..59e0e54049b7c 100644 --- a/onnxruntime/core/providers/rocm/math/softmax.cc +++ b/onnxruntime/core/providers/rocm/math/softmax.cc @@ -44,6 +44,7 @@ SPECIALIZED_SOFTMAX_HELPER_IMPL(float) // MIOpen double data type not supported // SPECIALIZED_SOFTMAX_HELPER_IMPL(double) SPECIALIZED_SOFTMAX_HELPER_IMPL(MLFloat16) +SPECIALIZED_SOFTMAX_HELPER_IMPL(BFloat16) #define REGISTER_KERNEL_TYPED(T) \ ONNX_OPERATOR_VERSIONED_TYPED_KERNEL_EX( \ @@ -203,6 +204,7 @@ SPECIALIZED_COMPUTE(float) // MIOpen double data type not supported // SPECIALIZED_COMPUTE(double) SPECIALIZED_COMPUTE(MLFloat16) +SPECIALIZED_COMPUTE(BFloat16) } // namespace rocm } // namespace onnxruntime diff --git a/onnxruntime/core/providers/rocm/math/softmax_impl.cu b/onnxruntime/core/providers/rocm/math/softmax_impl.cu index d892b9fb86371..68f04070f3fb0 100644 --- a/onnxruntime/core/providers/rocm/math/softmax_impl.cu +++ b/onnxruntime/core/providers/rocm/math/softmax_impl.cu @@ -97,6 +97,7 @@ template void dispatch_warpwise_softmax_forward( SPECIALIZED_SOFTMAX_IMPL(float, float, float) SPECIALIZED_SOFTMAX_IMPL(half, half, float) SPECIALIZED_SOFTMAX_IMPL(double, double, double) +SPECIALIZED_SOFTMAX_IMPL(BFloat16, BFloat16, float) template void dispatch_blockwise_softmax_forward(hipStream_t stream, output_t* output, const input_t* input, int softmax_elements, int softmax_elements_stride, int batch_count) { @@ -119,6 +120,7 @@ template void dispatch_blockwise_softmax_forward SPECIALIZED_BLOCKWISE_SOFTMAX_IMPL(float, float, float) SPECIALIZED_BLOCKWISE_SOFTMAX_IMPL(half, half, float) SPECIALIZED_BLOCKWISE_SOFTMAX_IMPL(double, double, double) +SPECIALIZED_BLOCKWISE_SOFTMAX_IMPL(BFloat16, BFloat16, float) } diff --git a/onnxruntime/core/providers/rocm/miopen_common.cc b/onnxruntime/core/providers/rocm/miopen_common.cc index 3de6c408cbb0b..24f23853f15b3 100644 --- a/onnxruntime/core/providers/rocm/miopen_common.cc +++ b/onnxruntime/core/providers/rocm/miopen_common.cc @@ -91,6 +91,11 @@ miopenDataType_t MiopenTensor::GetDataType() { return miopenHalf; } +template <> +miopenDataType_t MiopenTensor::GetDataType() { + return miopenBFloat16; +} + template <> miopenDataType_t MiopenTensor::GetDataType() { return miopenInt32; @@ -117,10 +122,18 @@ const float Consts::Zero = 0; const float Consts::One = 1; +const float Consts::Zero = 0; + +const float Consts::One = 1; + #if ROCM_VERSION >= 40300 const float ReduceConsts::One = 1; const float ReduceConsts::Zero = 0; + +const float ReduceConsts::One = 1; + +const float ReduceConsts::Zero = 0; #else // Up until ROCm 4.2, miopenReduceTensor() required alpha/beta to be the same data // type as the input type. This differs from cudnnReduceTensor() and other @@ -130,6 +143,12 @@ const half ReduceConsts::One = 1.f; template <> const half ReduceConsts::Zero = 0.f; + +template <> +const BFloat16 ReduceConsts::One = 1.f; + +template <> +const BFloat16 ReduceConsts::Zero = 0.f; #endif template <> diff --git a/onnxruntime/core/providers/rocm/miopen_common.h b/onnxruntime/core/providers/rocm/miopen_common.h index 8140cad54bf7d..b2da1ae9902ed 100644 --- a/onnxruntime/core/providers/rocm/miopen_common.h +++ b/onnxruntime/core/providers/rocm/miopen_common.h @@ -64,6 +64,12 @@ struct Consts { static const float One; }; +template <> +struct Consts { + static const float Zero; + static const float One; +}; + template struct ReduceConsts { static const ElemType Zero; @@ -79,6 +85,12 @@ struct ReduceConsts { static const float Zero; static const float One; }; + +template <> +struct ReduceConsts { + static const float Zero; + static const float One; +}; #endif inline double ClampMiopenBatchNormEpsilon(double epsilon) { diff --git a/onnxruntime/core/providers/rocm/reduction/reduction_ops.cc b/onnxruntime/core/providers/rocm/reduction/reduction_ops.cc index 9258beb423ad8..dbc4bfd6aad49 100644 --- a/onnxruntime/core/providers/rocm/reduction/reduction_ops.cc +++ b/onnxruntime/core/providers/rocm/reduction/reduction_ops.cc @@ -379,7 +379,6 @@ Status PrepareForReduce(const Tensor* X, const auto input_dims = input_shape.GetDims(); InlinedShapeVector reduced(rank, false); - prepare_reduce_metadata.output_dims.reserve(input_dims.size()); if (axes.size() > 0) { prepare_reduce_metadata.output_dims = input_shape.AsShapeVector(); for (auto axis : axes) { @@ -393,6 +392,7 @@ Status PrepareForReduce(const Tensor* X, } } else { // no axes provided (i.e.) default axes => reduce on all dims + prepare_reduce_metadata.output_dims.reserve(input_dims.size()); for (auto dim : input_dims) { ORT_ENFORCE(keepdims || dim != 0, "Can't reduce on dim with value of 0 if 'keepdims' is false. " @@ -511,7 +511,10 @@ Status ReduceComputeCore(ROCMExecutionProvider& rocm_ep, const Tensor& input, Pr IAllocatorUniquePtr temp_X; miopenDataType_t miopen_type_X = MiopenTensor::GetDataType(); - if (ReduceTensorIndices == MIOPEN_REDUCE_TENSOR_FLATTENED_INDICES && std::is_same::value) { + // unlike bfp16 not supported in cudnn, miopen call for bfp16 succeeded below, however, UT shows data error + // so for now, follow the same logic in cudnn and convert input to fp32 then call miopen + if ((ReduceTensorIndices == MIOPEN_REDUCE_TENSOR_FLATTENED_INDICES && std::is_same::value) || + (ReduceTensorIndices == MIOPEN_REDUCE_TENSOR_NO_INDICES && std::is_same::value)) { // ArgMax/ArgMin with FP16 are not supported by miopen, so convert input to fp32 then call miopen temp_X = rocm_ep.GetScratchBuffer(input_count); miopen_type_X = miopenFloat; @@ -519,7 +522,7 @@ Status ReduceComputeCore(ROCMExecutionProvider& rocm_ep, const Tensor& input, Pr } MiopenReduceDescriptor reduce_desc; - if (std::is_same::value) { + ORT_IF_CONSTEXPR (std::is_same::value || std::is_same::value) { ORT_RETURN_IF_ERROR(reduce_desc.Set(miopen_reduce_op, MiopenTensor::GetDataType(), ReduceTensorIndices)); } else { ORT_RETURN_IF_ERROR(reduce_desc.Set(miopen_reduce_op, miopen_type_X, ReduceTensorIndices)); @@ -651,11 +654,22 @@ Status ReduceComputeCore(ROCMExecutionProvider& rocm_ep, const Tensor& input, Pr HIP_RETURN_IF_ERROR(hipMemcpyAsync(output.template MutableData(), input.template Data(), input_count * sizeof(T), hipMemcpyDeviceToDevice, stream)); } } else { - MIOPEN_RETURN_IF_ERROR(miopenReduceTensor( - rocm_ep.PerThreadMiopenHandle(), reduce_desc, indices_rocm.get(), indices_bytes, - workspace_rocm.get(), workspace_bytes, - &one, input_tensor, reinterpret_cast(input.template Data()), - &zero, output_tensor, reinterpret_cast(output.template MutableData()))); + if (temp_X) { + auto temp_output = rocm_ep.GetScratchBuffer(output_count); + MIOPEN_RETURN_IF_ERROR(miopenReduceTensor( + rocm_ep.PerThreadMiopenHandle(), reduce_desc, indices_rocm.get(), indices_bytes, + workspace_rocm.get(), workspace_bytes, + &one, input_tensor, temp_X.get(), + &zero, output_tensor, temp_output.get())); + + Impl_Cast(stream, temp_output.get(), reinterpret_cast(output.template MutableData()), output_count); + } else { + MIOPEN_RETURN_IF_ERROR(miopenReduceTensor( + rocm_ep.PerThreadMiopenHandle(), reduce_desc, indices_rocm.get(), indices_bytes, + workspace_rocm.get(), workspace_bytes, + &one, input_tensor, reinterpret_cast(input.template Data()), + &zero, output_tensor, reinterpret_cast(output.template MutableData()))); + } } } } else { @@ -880,7 +894,8 @@ template std::unique_ptr ReduceCompute, BuildKernelCreateInfo, - // BuildKernelCreateInfo, - // BuildKernelCreateInfo, - // BuildKernelCreateInfo, - // BuildKernelCreateInfo, - // BuildKernelCreateInfo, - // BuildKernelCreateInfo, + BuildKernelCreateInfo, + BuildKernelCreateInfo, + BuildKernelCreateInfo, + BuildKernelCreateInfo, + BuildKernelCreateInfo, + BuildKernelCreateInfo, BuildKernelCreateInfo, - // BuildKernelCreateInfo, - // BuildKernelCreateInfo, - // BuildKernelCreateInfo, + BuildKernelCreateInfo, + BuildKernelCreateInfo, + BuildKernelCreateInfo, BuildKernelCreateInfo, - // BuildKernelCreateInfo, + BuildKernelCreateInfo, // OpSet 14 BuildKernelCreateInfo, @@ -2031,11 +2031,11 @@ static Status RegisterRocmKernels(KernelRegistry& kernel_registry) { BuildKernelCreateInfo, BuildKernelCreateInfo, BuildKernelCreateInfo, - // BuildKernelCreateInfo, - // BuildKernelCreateInfo, - // BuildKernelCreateInfo, - // BuildKernelCreateInfo, - // BuildKernelCreateInfo, + BuildKernelCreateInfo, + BuildKernelCreateInfo, + BuildKernelCreateInfo, + BuildKernelCreateInfo, + BuildKernelCreateInfo, // OpSet 15 BuildKernelCreateInfo, diff --git a/onnxruntime/core/providers/shared/node_unit/node_unit.cc b/onnxruntime/core/providers/shared/node_unit/node_unit.cc index a98336c6fe331..f78e2a550c8fc 100644 --- a/onnxruntime/core/providers/shared/node_unit/node_unit.cc +++ b/onnxruntime/core/providers/shared/node_unit/node_unit.cc @@ -10,9 +10,6 @@ namespace onnxruntime { namespace { -// The QLinearOpType GetQLinearOpType, is very similar to the one in NNAPI -// However, the NNAPI ones are only the subset of the ones here, -// TODO, make these shared enum class QLinearOpType : uint8_t { Unknown, // Unknown or not a linear quantized op DequantizeLinear, @@ -81,13 +78,15 @@ bool IsVariadicQLinearOp(QLinearOpType type) { return type == QLinearOpType::QLinearConcat; } -const std::vector GetQDQOutputNodes(const GraphViewer& graph_viewer, const QDQ::NodeGroup& node_group) { - std::vector output_nodes; - output_nodes.reserve(node_group.q_nodes.size()); - for (const auto& node_idx : node_group.q_nodes) { - output_nodes.push_back(graph_viewer.GetNode(node_idx)); +const std::vector GetQDQIONodes(const GraphViewer& graph_viewer, + const QDQ::NodeGroup& node_group, bool is_input) { + std::vector io_nodes; + const auto& src_nodes = is_input ? node_group.dq_nodes : node_group.q_nodes; + io_nodes.reserve(src_nodes.size()); + for (const auto& node_idx : src_nodes) { + io_nodes.push_back(graph_viewer.GetNode(node_idx)); } - return output_nodes; + return io_nodes; } // Get the input or output NodeUnitIODef(s) for the given QDQ NodeGroup @@ -154,7 +153,7 @@ NodeUnit::NodeUnit(const Node& node) } NodeUnit::NodeUnit(const GraphViewer& graph_viewer, const QDQ::NodeGroup& node_group) - : output_nodes_{GetQDQOutputNodes(graph_viewer, node_group)}, + : output_nodes_{GetQDQIONodes(graph_viewer, node_group, false /* is_input */)}, target_node_(*graph_viewer.GetNode(node_group.target_node)), type_(Type::QDQGroup), inputs_{GetQDQIODefs(target_node_, node_group, true /* is_input */)}, diff --git a/onnxruntime/core/providers/tensorrt/tensorrt_execution_provider_info.cc b/onnxruntime/core/providers/tensorrt/tensorrt_execution_provider_info.cc index cfc43350a210e..7386ce6c88322 100644 --- a/onnxruntime/core/providers/tensorrt/tensorrt_execution_provider_info.cc +++ b/onnxruntime/core/providers/tensorrt/tensorrt_execution_provider_info.cc @@ -27,6 +27,7 @@ constexpr const char* kCachePath = "trt_engine_cache_path"; constexpr const char* kDecryptionEnable = "trt_engine_decryption_enable"; constexpr const char* kDecryptionLibPath = "trt_engine_decryption_lib_path"; constexpr const char* kForceSequentialEngineBuild = "trt_force_sequential_engine_build"; +// add new provider option name here. } // namespace provider_option_names } // namespace tensorrt @@ -63,7 +64,7 @@ TensorrtExecutionProviderInfo TensorrtExecutionProviderInfo::FromProviderOptions .AddAssignmentToReference(tensorrt::provider_option_names::kDecryptionEnable, info.engine_decryption_enable) .AddAssignmentToReference(tensorrt::provider_option_names::kDecryptionLibPath, info.engine_decryption_lib_path) .AddAssignmentToReference(tensorrt::provider_option_names::kForceSequentialEngineBuild, info.force_sequential_engine_build) - .Parse(options)); + .Parse(options)); // add new provider option here. return info; } @@ -87,6 +88,7 @@ ProviderOptions TensorrtExecutionProviderInfo::ToProviderOptions(const TensorrtE {tensorrt::provider_option_names::kDecryptionEnable, MakeStringWithClassicLocale(info.engine_decryption_enable)}, {tensorrt::provider_option_names::kDecryptionLibPath, MakeStringWithClassicLocale(info.engine_decryption_lib_path)}, {tensorrt::provider_option_names::kForceSequentialEngineBuild, MakeStringWithClassicLocale(info.force_sequential_engine_build)}, + // add new provider option here. }; return options; } diff --git a/onnxruntime/core/providers/tensorrt/tensorrt_provider_factory.cc b/onnxruntime/core/providers/tensorrt/tensorrt_provider_factory.cc index d65c91d88f60d..0929b193f3dba 100644 --- a/onnxruntime/core/providers/tensorrt/tensorrt_provider_factory.cc +++ b/onnxruntime/core/providers/tensorrt/tensorrt_provider_factory.cc @@ -6,6 +6,7 @@ #include #include "tensorrt_execution_provider.h" #include "core/framework/provider_options.h" +#include "core/providers/tensorrt/tensorrt_provider_options.h" #include using namespace onnxruntime; @@ -48,7 +49,7 @@ struct Tensorrt_Provider : Provider { } std::shared_ptr CreateExecutionProviderFactory(const void* provider_options) override { - auto& options = *reinterpret_cast(provider_options); + auto& options = *reinterpret_cast(provider_options); TensorrtExecutionProviderInfo info; info.device_id = options.device_id; info.has_user_compute_stream = options.has_user_compute_stream != 0; @@ -74,7 +75,7 @@ struct Tensorrt_Provider : Provider { void UpdateProviderOptions(void* provider_options, const ProviderOptions& options) override { auto internal_options = onnxruntime::TensorrtExecutionProviderInfo::FromProviderOptions(options); - auto& trt_options = *reinterpret_cast(provider_options); + auto& trt_options = *reinterpret_cast(provider_options); trt_options.device_id = internal_options.device_id; trt_options.trt_max_partition_iterations = internal_options.max_partition_iterations; trt_options.trt_min_subgraph_size = internal_options.min_subgraph_size; diff --git a/onnxruntime/core/session/environment.cc b/onnxruntime/core/session/environment.cc index ba479a1b80f19..daa47db1fcffd 100644 --- a/onnxruntime/core/session/environment.cc +++ b/onnxruntime/core/session/environment.cc @@ -10,6 +10,8 @@ #if !defined(ORT_MINIMAL_BUILD) #include "onnx/defs/operator_sets.h" #include "onnx/defs/operator_sets_ml.h" +#include "core/graph/contrib_ops/ms_opset.h" +#include "core/graph/contrib_ops/onnx_deprecated_opset.h" #if defined(ENABLE_TRAINING) || defined(ENABLE_TRAINING_OPS) #include "onnx/defs/operator_sets_training.h" #endif @@ -24,6 +26,7 @@ #include "core/platform/env.h" #include "core/util/thread_utils.h" + #ifdef ONNXRUNTIME_ENABLE_INSTRUMENT #include "core/platform/tracing.h" #endif @@ -225,6 +228,10 @@ Status Environment::Initialize(std::unique_ptr logging_ // Register contributed schemas. // The corresponding kernels are registered inside the appropriate execution provider. #ifndef DISABLE_CONTRIB_OPS +#ifndef ORT_MINIMAL_BUILD + RegisterOpSetSchema(); + RegisterOpSetSchema(); +#endif contrib::RegisterContribSchemas(); #endif #ifdef USE_DML diff --git a/onnxruntime/core/session/inference_session.cc b/onnxruntime/core/session/inference_session.cc index e36808dba2778..71aa9bc499fbb 100644 --- a/onnxruntime/core/session/inference_session.cc +++ b/onnxruntime/core/session/inference_session.cc @@ -120,11 +120,10 @@ Status VerifyEachNodeIsAssignedToAnEpImpl(const Graph& graph, bool is_verbose, #endif // !defined(ORT_MINIMAL_BUILD) // recurse into subgraphs - const auto subgraphs = node.GetSubgraphs(); - for (const auto& subgraph : subgraphs) { - const auto status = VerifyEachNodeIsAssignedToAnEpImpl(*subgraph, is_verbose, node_placements); - if (!status.IsOK()) { - return status; + if (node.ContainsSubgraph()) { + const auto subgraphs = node.GetSubgraphs(); + for (const auto& subgraph : subgraphs) { + ORT_RETURN_IF_ERROR(VerifyEachNodeIsAssignedToAnEpImpl(*subgraph, is_verbose, node_placements)); } } } @@ -1149,7 +1148,7 @@ Status PartitionOrtFormatModel(onnxruntime::Graph& graph, return Status::OK(); } -#if !defined(ORT_MINIMAL_BUILD) || defined(ORT_ENABLE_RUNTIME_OPTIMIZATION_REPLAY_IN_MINIMAL_BUILD) +#if !defined(ORT_MINIMAL_BUILD) || defined(ORT_ENABLE_RUNTIME_OPTIMIZATION_IN_MINIMAL_BUILD) Status ReplaySavedRuntimeOptimizations( onnxruntime::Graph& graph, const logging::Logger& logger, const SessionOptions& session_options) { bool modified = false; @@ -1167,7 +1166,7 @@ Status ReplaySavedRuntimeOptimizations( return Status::OK(); } -#endif // !defined(ORT_MINIMAL_BUILD) || defined(ORT_ENABLE_RUNTIME_OPTIMIZATION_REPLAY_IN_MINIMAL_BUILD) +#endif // !defined(ORT_MINIMAL_BUILD) || defined(ORT_ENABLE_RUNTIME_OPTIMIZATION_IN_MINIMAL_BUILD) #endif // !defined(ORT_MINIMAL_BUILD) || defined(ORT_EXTENDED_MINIMAL_BUILD) Status AssignNodesToEpsFromHashesImpl(Graph& graph, const fbs::SessionState& fbs_session_state, @@ -1207,12 +1206,12 @@ Status AssignNodesToEpsFromHashesImpl(Graph& graph, const fbs::SessionState& fbs ORT_RETURN_IF_ERROR(set_node_ep(node_kernel_info.node_index, node_kernel_info.kernel_def_hash)); } -#if !defined(ORT_MINIMAL_BUILD) || defined(ORT_ENABLE_RUNTIME_OPTIMIZATION_REPLAY_IN_MINIMAL_BUILD) +#if !defined(ORT_MINIMAL_BUILD) || defined(ORT_ENABLE_RUNTIME_OPTIMIZATION_IN_MINIMAL_BUILD) for (const auto& [node_index, kernel_def_hash] : graph.RuntimeOptimizationReplayCtx().produced_node_index_to_kernel_def_hash) { ORT_RETURN_IF_ERROR(set_node_ep(node_index, kernel_def_hash)); } -#endif // !defined(ORT_MINIMAL_BUILD) || defined(ORT_ENABLE_RUNTIME_OPTIMIZATION_REPLAY_IN_MINIMAL_BUILD) +#endif // !defined(ORT_MINIMAL_BUILD) || defined(ORT_ENABLE_RUNTIME_OPTIMIZATION_IN_MINIMAL_BUILD) return Status::OK(); } @@ -1398,9 +1397,9 @@ common::Status InferenceSession::Initialize() { *session_state_)); } -#if !defined(ORT_MINIMAL_BUILD) || defined(ORT_ENABLE_RUNTIME_OPTIMIZATION_REPLAY_IN_MINIMAL_BUILD) +#if !defined(ORT_MINIMAL_BUILD) || defined(ORT_ENABLE_RUNTIME_OPTIMIZATION_IN_MINIMAL_BUILD) ORT_RETURN_IF_ERROR_SESSIONID_(ReplaySavedRuntimeOptimizations(graph, *session_logger_, session_options_)); -#endif // !defined(ORT_MINIMAL_BUILD) || defined(ORT_ENABLE_RUNTIME_OPTIMIZATION_REPLAY_IN_MINIMAL_BUILD) +#endif // !defined(ORT_MINIMAL_BUILD) || defined(ORT_ENABLE_RUNTIME_OPTIMIZATION_IN_MINIMAL_BUILD) #endif // !defined(ORT_MINIMAL_BUILD) || defined(ORT_EXTENDED_MINIMAL_BUILD) ORT_RETURN_IF_ERROR(AssignNodesToEpsFromHashes(graph, *serialized_session_state, kernel_registry_manager_, diff --git a/onnxruntime/core/session/provider_bridge_ort.cc b/onnxruntime/core/session/provider_bridge_ort.cc index 950c8ba657ba5..e94a58cb20dfd 100644 --- a/onnxruntime/core/session/provider_bridge_ort.cc +++ b/onnxruntime/core/session/provider_bridge_ort.cc @@ -1148,7 +1148,43 @@ std::shared_ptr CreateExecutionProviderFactory_MIGrap return nullptr; } +// Adapter to convert the legacy OrtTensorRTProviderOptions to the latest OrtTensorRTProviderOptionsV2 +OrtTensorRTProviderOptionsV2 OrtTensorRTProviderOptionsToOrtTensorRTProviderOptionsV2(const OrtTensorRTProviderOptions* legacy_trt_options) { + OrtTensorRTProviderOptionsV2 trt_options_converted; + + trt_options_converted.device_id = legacy_trt_options->device_id; + trt_options_converted.has_user_compute_stream = legacy_trt_options->has_user_compute_stream; + trt_options_converted.user_compute_stream = legacy_trt_options->user_compute_stream; + trt_options_converted.trt_max_partition_iterations = legacy_trt_options->trt_max_partition_iterations; + trt_options_converted.trt_min_subgraph_size = legacy_trt_options->trt_min_subgraph_size; + trt_options_converted.trt_max_workspace_size = legacy_trt_options->trt_max_workspace_size; + trt_options_converted.trt_fp16_enable = legacy_trt_options->trt_fp16_enable; + trt_options_converted.trt_int8_enable = legacy_trt_options->trt_int8_enable; + trt_options_converted.trt_int8_calibration_table_name = legacy_trt_options->trt_int8_calibration_table_name; + trt_options_converted.trt_int8_use_native_calibration_table = legacy_trt_options->trt_int8_use_native_calibration_table; + trt_options_converted.trt_dla_enable = legacy_trt_options->trt_dla_enable; + trt_options_converted.trt_dla_core = legacy_trt_options->trt_dla_core; + trt_options_converted.trt_dump_subgraphs = legacy_trt_options->trt_dump_subgraphs; + trt_options_converted.trt_engine_cache_enable = legacy_trt_options->trt_engine_cache_enable; + trt_options_converted.trt_engine_cache_path = legacy_trt_options->trt_engine_cache_path; + trt_options_converted.trt_engine_decryption_enable = legacy_trt_options->trt_engine_decryption_enable; + trt_options_converted.trt_engine_decryption_lib_path = legacy_trt_options->trt_engine_decryption_lib_path; + trt_options_converted.trt_force_sequential_engine_build = legacy_trt_options->trt_force_sequential_engine_build; + // Add new provider option below + // Use default value as this field is not available in OrtTensorRTProviderOptionsV + + return trt_options_converted; +} + std::shared_ptr CreateExecutionProviderFactory_Tensorrt(const OrtTensorRTProviderOptions* provider_options) { + OrtTensorRTProviderOptionsV2 trt_options_converted = onnxruntime::OrtTensorRTProviderOptionsToOrtTensorRTProviderOptionsV2(provider_options); + if (auto* provider = s_library_tensorrt.Get()) + return provider->CreateExecutionProviderFactory(&trt_options_converted); + + return nullptr; +} + +std::shared_ptr CreateExecutionProviderFactory_Tensorrt(const OrtTensorRTProviderOptionsV2* provider_options) { if (auto* provider = s_library_tensorrt.Get()) return provider->CreateExecutionProviderFactory(provider_options); @@ -1420,7 +1456,15 @@ ORT_API_STATUS_IMPL(OrtApis::SessionOptionsAppendExecutionProvider_ROCM, _In_ Or } ORT_API_STATUS_IMPL(OrtApis::SessionOptionsAppendExecutionProvider_TensorRT_V2, _In_ OrtSessionOptions* options, _In_ const OrtTensorRTProviderOptionsV2* tensorrt_options) { - return OrtApis::SessionOptionsAppendExecutionProvider_TensorRT(options, reinterpret_cast(tensorrt_options)); + API_IMPL_BEGIN + auto factory = onnxruntime::CreateExecutionProviderFactory_Tensorrt(tensorrt_options); + if (!factory) { + return OrtApis::CreateStatus(ORT_FAIL, "OrtSessionOptionsAppendExecutionProvider_TensorRT: Failed to load shared library"); + } + + options->provider_factories.push_back(factory); + return nullptr; + API_IMPL_END } ORT_API_STATUS_IMPL(OrtApis::CreateTensorRTProviderOptions, _Outptr_ OrtTensorRTProviderOptionsV2** out) { diff --git a/onnxruntime/core/util/distance.h b/onnxruntime/core/util/distance.h index 02d6147df5024..1a40d2142cb81 100644 --- a/onnxruntime/core/util/distance.h +++ b/onnxruntime/core/util/distance.h @@ -3,7 +3,7 @@ #pragma once #include -#include "math_cpuonly.h" +#include "core/util/math_cpuonly.h" namespace onnxruntime { diff --git a/onnxruntime/core/util/math.h b/onnxruntime/core/util/math.h index 393340ffc0938..97d2e7bd4bb4d 100644 --- a/onnxruntime/core/util/math.h +++ b/onnxruntime/core/util/math.h @@ -16,9 +16,10 @@ #pragma once +#include + #ifndef SHARED_PROVIDER #include "core/common/common.h" -#include "core/framework/tensor.h" #endif #ifndef CBLAS_ENUM_DEFINED_H @@ -89,8 +90,7 @@ void RowwiseSum(int N, int D, const T* x, T* y, // Sum of vector x, and writes the result to a single value y. template -void Sum(int N, const T* x, T* y, Provider* provider, - Tensor* scratch_ptr = nullptr); +void Sum(int N, const T* x, T* y, Provider* provider); template void Scale(int N, float alpha, const T* x, T* y, Provider* provider); diff --git a/onnxruntime/core/util/math_cpu.cc b/onnxruntime/core/util/math_cpu.cc index c09d885a23d81..164e88573c4cb 100644 --- a/onnxruntime/core/util/math_cpu.cc +++ b/onnxruntime/core/util/math_cpu.cc @@ -15,9 +15,11 @@ */ // Modifications Copyright (c) Microsoft. -#include -#include "core/util/math.h" #include "core/util/math_cpuonly.h" +#include "core/util/math.h" + +#include +#include #include "core/mlas/inc/mlas.h" #if defined(__GNUC__) #pragma GCC diagnostic push @@ -859,10 +861,10 @@ SPECIALIZED_ROWWISESUM(int64_t) SPECIALIZED_ROWWISESUM(double) #undef SPECIALIZED_ROWWISESUM -#define SPECIALIZED_SUM(T) \ - template <> \ - void Sum(int N, const T* x, T* y, CPUMathUtil* /* unused */, Tensor* /* unused */) { \ - *y = ConstEigenVectorMap(x, N).sum(); \ +#define SPECIALIZED_SUM(T) \ + template <> \ + void Sum(int N, const T* x, T* y, CPUMathUtil* /* unused */) { \ + *y = ConstEigenVectorMap(x, N).sum(); \ } SPECIALIZED_SUM(float); diff --git a/onnxruntime/core/util/math_cpuonly.h b/onnxruntime/core/util/math_cpuonly.h index d9214b16c0b12..7e70bfc99be7d 100644 --- a/onnxruntime/core/util/math_cpuonly.h +++ b/onnxruntime/core/util/math_cpuonly.h @@ -62,9 +62,6 @@ #pragma warning(pop) #endif -#ifndef SHARED_PROVIDER -#include "core/framework/tensor.h" -#endif namespace onnxruntime { // common Eigen types that we will often use @@ -109,15 +106,6 @@ template using ConstEigenMatrixMapRowMajorOuterStride = Eigen::Map, 0, Eigen::OuterStride<>>; -template -auto EigenMap(Tensor& t) -> EigenVectorMap { - return EigenVectorMap(t.template MutableData(), gsl::narrow(t.Shape().Size())); -} -template -auto EigenMap(const Tensor& t) -> ConstEigenVectorMap { - return ConstEigenVectorMap(t.template Data(), gsl::narrow(t.Shape().Size())); -} - class CPUMathUtil { public: /*CPUMathUtil contains some help method like generate a diff --git a/onnxruntime/core/util/thread_utils.cc b/onnxruntime/core/util/thread_utils.cc index f47791baac4b3..94218ee7c9e0f 100644 --- a/onnxruntime/core/util/thread_utils.cc +++ b/onnxruntime/core/util/thread_utils.cc @@ -1,4 +1,8 @@ -#include "thread_utils.h" +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +#include "core/util/thread_utils.h" + #include #ifdef _WIN32 diff --git a/onnxruntime/python/onnxruntime_pybind_state.cc b/onnxruntime/python/onnxruntime_pybind_state.cc index 6834c4acb2d42..56c59406bcd56 100644 --- a/onnxruntime/python/onnxruntime_pybind_state.cc +++ b/onnxruntime/python/onnxruntime_pybind_state.cc @@ -27,6 +27,7 @@ #include "core/session/abi_session_options_impl.h" #include "core/session/onnxruntime_session_options_config_keys.h" #include "core/session/provider_bridge_ort.h" +#include "core/providers/tensorrt/tensorrt_provider_options.h" // Explicitly provide a definition for the static const var 'GPU' in the OrtDevice struct, // GCC 4.x doesn't seem to define this and it breaks the pipelines based on CentOS as it uses @@ -374,7 +375,7 @@ std::unique_ptr CreateExecutionProviderInstance( std::string calibration_table, cache_path, lib_path; auto it = provider_options_map.find(type); if (it != provider_options_map.end()) { - OrtTensorRTProviderOptions params{ + OrtTensorRTProviderOptionsV2 params{ 0, 0, nullptr, diff --git a/onnxruntime/python/onnxruntime_pybind_state_common.h b/onnxruntime/python/onnxruntime_pybind_state_common.h index 773db017e5adc..5477f3c91ad43 100644 --- a/onnxruntime/python/onnxruntime_pybind_state_common.h +++ b/onnxruntime/python/onnxruntime_pybind_state_common.h @@ -28,6 +28,7 @@ struct OrtStatus { #include "core/providers/providers.h" #include "core/providers/cpu/cpu_execution_provider.h" #include "core/providers/cpu/cpu_provider_factory_creator.h" +#include "core/providers/tensorrt/tensorrt_provider_options.h" #if defined(USE_CUDA) || defined(USE_ROCM) #define BACKEND_PROC "GPU" @@ -474,6 +475,7 @@ OrtValue FromDlpack(PyObject* dlpack_tensor, const bool is_bool_tensor); } // namespace python std::shared_ptr CreateExecutionProviderFactory_Tensorrt(const OrtTensorRTProviderOptions* params); +std::shared_ptr CreateExecutionProviderFactory_Tensorrt(const OrtTensorRTProviderOptionsV2* params); std::shared_ptr CreateExecutionProviderFactory_Tensorrt(int device_id); std::shared_ptr CreateExecutionProviderFactory_MIGraphX(const OrtMIGraphXProviderOptions* params); std::shared_ptr CreateExecutionProviderFactory_MIGraphX(int device_id); diff --git a/onnxruntime/python/tools/quantization/onnx_quantizer.py b/onnxruntime/python/tools/quantization/onnx_quantizer.py index ed99ee1560752..6349bd8cd53eb 100644 --- a/onnxruntime/python/tools/quantization/onnx_quantizer.py +++ b/onnxruntime/python/tools/quantization/onnx_quantizer.py @@ -553,7 +553,7 @@ def find_quantized_value(self, input_name): return self.parent.find_quantized_value(input_name) return None - def quantize_bias_static(self, bias_name, input_name, weight_name): + def quantize_bias_static(self, bias_name, input_name, weight_name, beta = 1.0): ''' Quantized the bias. Zero Point == 0 and Scale == Input_Scale * Weight_Scale ''' @@ -584,7 +584,7 @@ def quantize_bias_static(self, bias_name, input_name, weight_name): input_scale = self.tensor_proto_to_array(inputscale_initializer) # calcuate scale for bias - bias_scale = input_scale * weight_scale + bias_scale = input_scale * weight_scale * beta # quantize bias quantized_data = (np.asarray(bias_data) / bias_scale).round().astype(np.int32) diff --git a/onnxruntime/python/tools/quantization/operators/gemm.py b/onnxruntime/python/tools/quantization/operators/gemm.py new file mode 100644 index 0000000000000..f297bfb428a19 --- /dev/null +++ b/onnxruntime/python/tools/quantization/operators/gemm.py @@ -0,0 +1,117 @@ +import onnx +import numpy as np +import logging +from .base_operator import QuantOperatorBase +from .qdq_base_operator import QDQOperatorBase +from ..quant_utils import find_by_name, get_mul_node, QuantizedValue, QuantizedValueType, attribute_to_kwarg, ms_domain +from onnx import onnx_pb as onnx_proto + + +def is_B_transposed(gemm_node): + transB_attribute = [attr for attr in gemm_node.attribute if attr.name == 'transB'] + if len(transB_attribute): + return 0 < onnx.helper.get_attribute_value(transB_attribute[0]) + + return False + +def get_beta(gemm_node): + beta_attribute = [attr for attr in gemm_node.attribute if attr.name == 'beta'] + if len(beta_attribute): + return onnx.helper.get_attribute_value(beta_attribute[0]) + + return 1.0 + +def set_default_beta(gemm_node): + beta_attribute = [attr for attr in gemm_node.attribute if attr.name == 'beta'] + if len(beta_attribute): + beta_attribute[0].f = 1.0 + + return 1.0 + +class QLinearGemm(QuantOperatorBase): + def __init__(self, onnx_quantizer, onnx_node): + super().__init__(onnx_quantizer, onnx_node) + + def quantize(self): + node = self.node + assert (node.op_type == "Gemm") + + data_found, output_scale_name, output_zp_name, _, _ = \ + self.quantizer._get_quantization_params(node.output[0]) + + if self.quantizer.is_input_a_weight(node.input[1]) and self.quantizer.is_per_channel(): + (quantized_input_names, zero_point_names, scale_names, nodes) = \ + self.quantizer.quantize_inputs(node, [0], reduce_range=self.quantizer.reduce_range) + quant_weight_tuple = self.quantizer.quantize_weight_per_channel(node.input[1], onnx_proto.TensorProto.INT8, + 0 if is_B_transposed(node) else 1) + quantized_input_names.append(quant_weight_tuple[0]) + zero_point_names.append(quant_weight_tuple[1]) + scale_names.append(quant_weight_tuple[2]) + else: + (quantized_input_names, zero_point_names, scale_names, nodes) = \ + self.quantizer.quantize_inputs(node, [0, 1], reduce_range=self.quantizer.reduce_range) + + if not data_found or quantized_input_names is None: + return super().quantize() + + quantized_bias_name = "" + if len(node.input) == 3: + if not self.quantizer.is_input_a_weight(node.input[2]): + return super().quantize() + + quantized_bias_name = self.quantizer.quantize_bias_static(node.input[2], node.input[0], node.input[1], get_beta(self.node)) + + qgemm_output = node.output[0] + "_quantized" + qgemm_name = qgemm_name = node.name + "_quant" if node.name != "" else "" + + kwargs = {} + for attribute in node.attribute: + if attribute.name != "beta": + kwargs.update(attribute_to_kwarg(attribute)) + kwargs["domain"] = ms_domain + + # generate input + qgemm_inputs = [] + for i in range(2): + qgemm_inputs.extend([quantized_input_names[i], scale_names[i], zero_point_names[i]]) + + qgemm_inputs.extend([quantized_bias_name, output_scale_name, output_zp_name]) + + qgemm_node = onnx.helper.make_node("QGemm", qgemm_inputs, [qgemm_output], + qgemm_name, **kwargs) + nodes.append(qgemm_node) + + # Create an entry for this quantized value + q_output = QuantizedValue(node.output[0], qgemm_output, output_scale_name, output_zp_name, + QuantizedValueType.Input) + self.quantizer.quantized_value_map[node.output[0]] = q_output + + self.quantizer.new_nodes += nodes + + +class QDQGemm(QDQOperatorBase): + def __init__(self, onnx_quantizer, onnx_node): + super().__init__(onnx_quantizer, onnx_node) + + def quantize(self): + node = self.node + assert (node.op_type == "Gemm") + + self.quantizer.quantize_tensor(node.input[0]) + if not self.disable_qdq_for_node_output: + self.quantizer.quantize_tensor(node.output[0]) + + if self.quantizer.is_per_channel(): + self.quantizer.quantize_tensor_per_channel(node.input[1], 0 if is_B_transposed(node) else 1) + else: + self.quantizer.quantize_tensor(node.input[1]) + + if len(node.input) == 3: + if self.quantizer.is_input_a_weight(node.input[2]): + self.quantizer.quantize_bias_tensor(node.input[2], node.input[0], node.input[1], get_beta(self.node)) + set_default_beta(self.node) + else: + logging.warning( + "Bias of Gemm node '{}' is not constant. Please exclude this node for better performance." + .format(self.node.name)) + diff --git a/onnxruntime/python/tools/quantization/qdq_quantizer.py b/onnxruntime/python/tools/quantization/qdq_quantizer.py index f5797282dda06..09a45799f4737 100644 --- a/onnxruntime/python/tools/quantization/qdq_quantizer.py +++ b/onnxruntime/python/tools/quantization/qdq_quantizer.py @@ -83,11 +83,11 @@ def quantize_tensor_per_channel(self, tensor_name, axis): tensor_name)) self.quantize_tensor(tensor_name) - def quantize_bias_tensor(self, bias_name, input_name, weight_name): + def quantize_bias_tensor(self, bias_name, input_name, weight_name, beta = 1.0): weight = find_by_name(bias_name, self.model.initializer()) if weight is not None: if weight.data_type == onnx_proto.TensorProto.FLOAT: - self.bias_to_quantize.append((bias_name, input_name, weight_name)) + self.bias_to_quantize.append((bias_name, input_name, weight_name, beta)) else: logging.warning("Expected {} to be a weight".format(bias_name)) @@ -222,11 +222,11 @@ def quantize_tensors(self): self.quantized_value_map[tensor_name] = quantized_value def quantize_bias_tensors(self): - for bias_name, input_name, weight_name in self.bias_to_quantize: + for bias_name, input_name, weight_name, beta in self.bias_to_quantize: if bias_name in self.quantized_value_map.keys(): continue # Quantize the input - self.quantize_bias_static(bias_name, input_name, weight_name) + self.quantize_bias_static(bias_name, input_name, weight_name, beta) self.model.remove_initializer(find_by_name(bias_name, self.model.initializer())) quant_value = self.quantized_value_map[bias_name] inputs = [quant_value.q_name, quant_value.scale_name, quant_value.zp_name] diff --git a/onnxruntime/python/tools/quantization/quantize.py b/onnxruntime/python/tools/quantization/quantize.py index a0a0b935226cd..826d53884facd 100644 --- a/onnxruntime/python/tools/quantization/quantize.py +++ b/onnxruntime/python/tools/quantization/quantize.py @@ -42,15 +42,16 @@ def optimize_model(model_path: Path): return optimized_model -def load_model(model_path: Path, optimize=True): - if optimize: - #optimize the original model - onnx_model = ONNXModel(optimize_model(Path(model_path))) - # to support GEMM +def load_model(model_path: Path, optimize=True, handle_gemm_with_matmul=True): + + model = optimize_model(Path(model_path)) if optimize else onnx.load(Path(model_path)) + + if handle_gemm_with_matmul: + onnx_model = ONNXModel(model) onnx_model.replace_gemm_with_matmul() return onnx_model.model - return onnx.load(Path(model_path)) + return model def quantize(model, @@ -211,7 +212,7 @@ def quantize_static(model_input, if not op_types_to_quantize or len(op_types_to_quantize) == 0: op_types_to_quantize = list(QLinearOpsRegistry.keys()) - model = load_model(Path(model_input), optimize_model) + model = load_model(Path(model_input), optimize_model, False) calibrator = create_calibrator(model, op_types_to_quantize, calibrate_method=calibrate_method) calibrator.collect_data(calibration_data_reader) diff --git a/onnxruntime/python/tools/quantization/registry.py b/onnxruntime/python/tools/quantization/registry.py index e63e1761f7143..d046cbc6dfa86 100644 --- a/onnxruntime/python/tools/quantization/registry.py +++ b/onnxruntime/python/tools/quantization/registry.py @@ -18,6 +18,7 @@ from .operators.resize import QResize, QDQResize from .operators.pooling import QLinearPool from .operators.concat import QLinearConcat, QDQConcat +from .operators.gemm import QLinearGemm, QDQGemm CommonOpsRegistry = { "Gather": GatherQuant, @@ -36,6 +37,7 @@ QLinearOpsRegistry = { "ArgMax": QArgMax, "Conv": QLinearConv, + "Gemm": QLinearGemm, "MatMul": QLinearMatMul, "Add": QLinearBinaryOp, "Mul": QLinearBinaryOp, @@ -58,6 +60,7 @@ QDQRegistry = { "Conv": QDQConv, + "Gemm": QDQGemm, "Clip": QDQRemovableActivation, "Relu": QDQRemovableActivation, "Reshape": QDQDirect8BitOp, diff --git a/onnxruntime/python/tools/tensorrt/perf/benchmark.py b/onnxruntime/python/tools/tensorrt/perf/benchmark.py index 9510125b9f6f0..4ce586e6fbdfb 100644 --- a/onnxruntime/python/tools/tensorrt/perf/benchmark.py +++ b/onnxruntime/python/tools/tensorrt/perf/benchmark.py @@ -64,9 +64,14 @@ def run_trt_standalone(trtexec, model_name, model_path, ort_inputs, all_inputs_s # load inputs input_shape = [] loaded_inputs = [] + + output = get_output(["find", "-L", os.getcwd(), "-name", "test_data*", "-type", "d"]) + test_data_dir = split_and_sort_output(output)[0] + for i in range(len(ort_inputs)): name = ort_inputs[i].name - loaded_input = name + ':' + str(i) + '.bin' + loaded_input = name + ':' + test_data_dir + '/' + str(i) + '.bin' + logger.info(loaded_input) shape = [] for j in all_inputs_shape[i]: shape.append(str(j)) @@ -79,7 +84,7 @@ def run_trt_standalone(trtexec, model_name, model_path, ort_inputs, all_inputs_s inputs_arg = '--loadInputs=' + ','.join(loaded_inputs) result = {} command = [trtexec, onnx_model_path, "--duration=50", "--percentile=90", "--workspace=4096"] - #command.extend([inputs_arg]) TODO: rebind IO inputs in TRT 8.2 + command.extend([inputs_arg]) # add benchmarking flags model = onnx.load(model_path) @@ -998,11 +1003,17 @@ def run_onnxruntime(args, models): model_path = model_info["model_path"] test_data_dir = model_info["test_data_path"] - fp16 = False - os.environ["ORT_TENSORRT_FP16_ENABLE"] = "1" if "Fp16" in ep else "0" logger.info("[Initialize] model = {}, ep = {} ...".format(name, ep)) + + # Set environment variables for ort-trt benchmarking + if "ORT-TRT" in ep: + os.environ["ORT_TENSORRT_FP16_ENABLE"] = "1" if "Fp16" in ep else "0" + os.environ["ORT_TENSORRT_ENGINE_CACHE_ENABLE"] = "1" + os.environ["ORT_TENSORRT_MAX_WORKSPACE_SIZE"] = "4294967296" - # use float16.py for cuda fp16 only + fp16 = False + + # use float16.py for cuda fp16 only if cuda_fp16 == ep: # handle model @@ -1041,7 +1052,7 @@ def run_onnxruntime(args, models): if is_standalone(ep): providers = ep_to_provider_list[trt] else: - providers = ep_to_provider_list[ep] + providers = ep_to_provider_list[ep] options = onnxruntime.SessionOptions() options.graph_optimization_level = onnxruntime.GraphOptimizationLevel.ORT_ENABLE_ALL diff --git a/onnxruntime/python/tools/tensorrt/perf/benchmark_wrapper.py b/onnxruntime/python/tools/tensorrt/perf/benchmark_wrapper.py index 29d5a0d42e472..86186a8037b6b 100644 --- a/onnxruntime/python/tools/tensorrt/perf/benchmark_wrapper.py +++ b/onnxruntime/python/tools/tensorrt/perf/benchmark_wrapper.py @@ -47,13 +47,13 @@ def main(): model_to_fail_ep = {} - benchmark_fail_csv = 'fail.csv' - benchmark_metrics_csv = 'metrics.csv' - benchmark_success_csv = 'success.csv' - benchmark_latency_csv = 'latency.csv' - benchmark_status_csv = 'status.csv' - benchmark_session_csv = 'session.csv' - specs_csv = 'specs.csv' + benchmark_fail_csv = fail_name + csv_ending + benchmark_metrics_csv = metrics_name + csv_ending + benchmark_success_csv = success_name + csv_ending + benchmark_latency_csv = latency_name + csv_ending + benchmark_status_csv = status_name + csv_ending + benchmark_session_csv = session_name + csv_ending + specs_csv = specs_name + csv_ending for model, model_info in models.items(): logger.info("\n" + "="*40 + "="*len(model)) diff --git a/onnxruntime/python/tools/tensorrt/perf/perf.sh b/onnxruntime/python/tools/tensorrt/perf/perf.sh index a30028bfdb558..6dcb6c1c51e1a 100755 --- a/onnxruntime/python/tools/tensorrt/perf/perf.sh +++ b/onnxruntime/python/tools/tensorrt/perf/perf.sh @@ -29,6 +29,7 @@ fi FAIL_MODEL_FILE=".fail_model_map" LATENCY_FILE=".latency_map" METRICS_FILE=".metrics_map" +SESSION_FILE=".session_map" PROFILE="*onnxruntime_profile*" # files to download info @@ -39,6 +40,7 @@ cleanup_files() { rm -f $FAIL_MODEL_FILE rm -f $LATENCY_FILE rm -f $METRICS_FILE + rm -f $SESSION_FILE rm -f $FLOAT_16 rm -rf result/$OPTION find -name $PROFILE -delete diff --git a/onnxruntime/python/tools/tensorrt/perf/perf_utils.py b/onnxruntime/python/tools/tensorrt/perf/perf_utils.py index 5182147fa3b19..fb142b54cdb87 100644 --- a/onnxruntime/python/tools/tensorrt/perf/perf_utils.py +++ b/onnxruntime/python/tools/tensorrt/perf/perf_utils.py @@ -25,11 +25,25 @@ standalone_trt_fp16 = "TRTFp16" acl = "ORT-ACLFp32" +# table names +metrics_name = 'metrics' +success_name = 'success' +fail_name = 'fail' +memory_name = 'memory' +latency_name = 'latency' +status_name = 'status' +latency_over_time_name = 'latency_over_time' +specs_name = 'specs' +session_name = 'session' + +time_string_format = '%Y-%m-%d %H:%M:%S' + # column names model_title = 'Model' group_title = 'Group' # endings +csv_ending = '.csv' avg_ending = ' \nmean (ms)' percentile_ending = ' \n90th percentile (ms)' memory_ending = ' \npeak memory usage (MiB)' diff --git a/onnxruntime/python/tools/tensorrt/perf/post.py b/onnxruntime/python/tools/tensorrt/perf/post.py index 037be08571114..7a0ae034b2053 100644 --- a/onnxruntime/python/tools/tensorrt/perf/post.py +++ b/onnxruntime/python/tools/tensorrt/perf/post.py @@ -18,17 +18,6 @@ cluster_ingest = "https://ingest-onnxruntimedashboarddb.southcentralus.kusto.windows.net" database = "ep_perf_dashboard" -# table names -fail = 'fail' -memory = 'memory' -latency = 'latency' -status = 'status' -latency_over_time = 'latency_over_time' -specs = 'specs' -session = 'session' - -time_string_format = '%Y-%m-%d %H:%M:%S' - def parse_arguments(): parser = argparse.ArgumentParser() parser.add_argument( @@ -142,7 +131,7 @@ def main(): folders = os.listdir(result_file) os.chdir(result_file) - tables = [fail, memory, latency, status, latency_over_time, specs, session] + tables = [fail_name, memory_name, latency_name, status_name, latency_over_time_name, specs_name, session_name] table_results = {} for table_name in tables: table_results[table_name] = pd.DataFrame() @@ -152,18 +141,18 @@ def main(): csv_filenames = os.listdir() for csv in csv_filenames: table = parse_csv(csv) - if session in csv: - table_results[session] = table_results[session].append(get_session(table, model_group), ignore_index=True) - if specs in csv: - table_results[specs] = table_results[specs].append(get_specs(table, args.branch, args.commit_hash, date_time), ignore_index=True) - if fail in csv: - table_results[fail] = table_results[fail].append(get_failures(table, model_group), ignore_index=True) - if latency in csv: - table_results[memory] = table_results[memory].append(get_memory(table, model_group), ignore_index=True) - table_results[latency] = table_results[latency].append(get_latency(table, model_group), ignore_index=True) - table_results[latency_over_time] = table_results[latency_over_time].append(get_latency_over_time(args.commit_hash, args.report_url, args.branch, table_results[latency]), ignore_index=True) - if status in csv: - table_results[status] = table_results[status].append(get_status(table, model_group), ignore_index=True) + if session_name in csv: + table_results[session_name] = table_results[session_name].append(get_session(table, model_group), ignore_index=True) + if specs_name in csv: + table_results[specs_name] = table_results[specs_name].append(get_specs(table, args.branch, args.commit_hash, date_time), ignore_index=True) + if fail_name in csv: + table_results[fail_name] = table_results[fail_name].append(get_failures(table, model_group), ignore_index=True) + if latency_name in csv: + table_results[memory_name] = table_results[memory_name].append(get_memory(table, model_group), ignore_index=True) + table_results[latency_name] = table_results[latency_name].append(get_latency(table, model_group), ignore_index=True) + table_results[latency_over_time_name] = table_results[latency_over_time_name].append(get_latency_over_time(args.commit_hash, args.report_url, args.branch, table_results[latency_name]), ignore_index=True) + if status_name in csv: + table_results[status_name] = table_results[status_name].append(get_status(table, model_group), ignore_index=True) os.chdir(result_file) for table in tables: print('writing ' + table + ' to database') diff --git a/onnxruntime/python/tools/transformers/benchmark.py b/onnxruntime/python/tools/transformers/benchmark.py index 6e5d5b98ef651..c6e042d4947a0 100644 --- a/onnxruntime/python/tools/transformers/benchmark.py +++ b/onnxruntime/python/tools/transformers/benchmark.py @@ -48,9 +48,9 @@ import psutil import onnx from enum import Enum -from benchmark_helper import (create_onnxruntime_session, Precision, setup_logger, get_latency_result, output_details, - output_summary, output_fusion_statistics, inference_ort, inference_ort_with_io_binding, - allocateOutputBuffers) +from benchmark_helper import (OptimizerInfo, create_onnxruntime_session, Precision, setup_logger, get_latency_result, + output_details, output_summary, output_fusion_statistics, inference_ort, + inference_ort_with_io_binding, allocateOutputBuffers) from quantize_helper import QuantizeHelper from onnx_exporter import create_onnxruntime_input, load_pretrained_model, export_onnx_model_from_pt, export_onnx_model_from_tf @@ -68,18 +68,29 @@ from transformers import (AutoConfig, AutoTokenizer, AutoModel, GPT2Model, LxmertConfig) -def run_onnxruntime(use_gpu, model_names, model_class, precision, num_threads, batch_sizes, sequence_lengths, - repeat_times, input_counts, optimize_onnx, validate_onnx, cache_dir, onnx_dir, verbose, overwrite, +def run_onnxruntime(use_gpu, provider, model_names, model_class, precision, num_threads, batch_sizes, sequence_lengths, + repeat_times, input_counts, optimizer_info, validate_onnx, cache_dir, onnx_dir, verbose, overwrite, disable_ort_io_binding, use_raw_attention_mask, model_fusion_statistics, model_source): import onnxruntime results = [] - if use_gpu and ('CUDAExecutionProvider' not in onnxruntime.get_available_providers()): + if (use_gpu and ('CUDAExecutionProvider' not in onnxruntime.get_available_providers()) + and ('ROCMExecutionProvider' not in onnxruntime.get_available_providers())): logger.error( "Please install onnxruntime-gpu package instead of onnxruntime, and use a machine with GPU for testing gpu performance." ) return results + warm_up_repeat = 0 + if provider == 'tensorrt': + optimizer_info = OptimizerInfo.NOOPT + warm_up_repeat = 5 + if 'TensorrtExecutionProvider' not in onnxruntime.get_available_providers(): + logger.error( + "Please install onnxruntime-gpu-tensorrt package, and use a machine with GPU for testing gpu performance." + ) + return results + for model_name in model_names: all_input_names = MODELS[model_name][0] for num_inputs in input_counts: @@ -92,12 +103,12 @@ def run_onnxruntime(use_gpu, model_names, model_class, precision, num_threads, b with torch.no_grad(): onnx_model_file, is_valid_onnx_model, vocab_size, max_sequence_length = export_onnx_model_from_pt( model_name, MODELS[model_name][1], MODELS[model_name][2], MODELS[model_name][3], model_class, - cache_dir, onnx_dir, input_names, use_gpu, precision, optimize_onnx, validate_onnx, + cache_dir, onnx_dir, input_names, use_gpu, precision, optimizer_info, validate_onnx, use_raw_attention_mask, overwrite, model_fusion_statistics) if 'tf' in model_source: onnx_model_file, is_valid_onnx_model, vocab_size, max_sequence_length = export_onnx_model_from_tf( model_name, MODELS[model_name][1], MODELS[model_name][2], MODELS[model_name][3], model_class, - cache_dir, onnx_dir, input_names, use_gpu, precision, optimize_onnx, validate_onnx, + cache_dir, onnx_dir, input_names, use_gpu, precision, optimizer_info, validate_onnx, use_raw_attention_mask, overwrite, model_fusion_statistics) if not is_valid_onnx_model: @@ -105,6 +116,7 @@ def run_onnxruntime(use_gpu, model_names, model_class, precision, num_threads, b ort_session = create_onnxruntime_session(onnx_model_file, use_gpu, + provider, enable_all_optimization=True, num_threads=num_threads, verbose=verbose) @@ -132,8 +144,9 @@ def run_onnxruntime(use_gpu, model_names, model_class, precision, num_threads, b result_template = { "engine": "onnxruntime", "version": onnxruntime.__version__, + "providers": provider, "device": device, - "optimizer": optimize_onnx, + "optimizer": optimizer_info, "precision": precision, "io_binding": not disable_ort_io_binding, "model_name": model_name, @@ -148,7 +161,8 @@ def run_onnxruntime(use_gpu, model_names, model_class, precision, num_threads, b [batch_size, sequence_length])) if disable_ort_io_binding: - result = inference_ort(ort_session, ort_inputs, result_template, repeat_times, batch_size) + result = inference_ort(ort_session, ort_inputs, result_template, repeat_times, batch_size, + warm_up_repeat) else: # Get output sizes from a dummy ort run ort_outputs = ort_session.run(ort_output_names, ort_inputs) @@ -163,7 +177,8 @@ def run_onnxruntime(use_gpu, model_names, model_class, precision, num_threads, b data_type = numpy.longlong if 'pt' in model_source else numpy.intc result = inference_ort_with_io_binding(ort_session, ort_inputs, result_template, repeat_times, ort_output_names, ort_outputs, output_buffers, - output_buffer_max_sizes, batch_size, device, data_type) + output_buffer_max_sizes, batch_size, device, data_type, + warm_up_repeat) logger.info(result) results.append(result) @@ -425,7 +440,9 @@ def parse_arguments(): default=os.path.join('.', 'onnx_models'), help="Directory to store onnx models") - parser.add_argument("-g", "--use_gpu", required=False, action="store_true", help="Run on cuda device") + parser.add_argument("-g", "--use_gpu", required=False, action="store_true", help="Run on gpu device") + + parser.add_argument("--provider", required=False, type=str, default=None, help="Execution provider to use") parser.add_argument( "-p", @@ -439,11 +456,14 @@ def parse_arguments(): parser.add_argument("--overwrite", required=False, action="store_true", help="Overwrite existing models") - parser.add_argument("-o", - "--optimize_onnx", - required=False, - action="store_true", - help="Use optimizer.py to optimize onnx model") + parser.add_argument( + "-o", + "--optimizer_info", + type=OptimizerInfo, + default=OptimizerInfo.BYSCRIPT, + choices=list(OptimizerInfo), + help="Optimizer info: Use optimizer.py to optimize onnx model as default. Can also choose from by_ort and no_opt" + ) parser.add_argument("-v", "--validate_onnx", required=False, action="store_true", help="Validate ONNX model") @@ -545,10 +565,10 @@ def main(): if enable_onnxruntime: try: use_raw_attention_mask = True - results += run_onnxruntime(args.use_gpu, args.models, args.model_class, args.precision, num_threads, - args.batch_sizes, args.sequence_lengths, args.test_times, args.input_counts, - args.optimize_onnx, args.validate_onnx, args.cache_dir, args.onnx_dir, - args.verbose, args.overwrite, args.disable_ort_io_binding, + results += run_onnxruntime(args.use_gpu, args.provider, args.models, args.model_class, args.precision, + num_threads, args.batch_sizes, args.sequence_lengths, args.test_times, + args.input_counts, args.optimizer_info, args.validate_onnx, args.cache_dir, + args.onnx_dir, args.verbose, args.overwrite, args.disable_ort_io_binding, use_raw_attention_mask, model_fusion_statistics, args.model_source) except: logger.error(f"Exception", exc_info=True) diff --git a/onnxruntime/python/tools/transformers/benchmark_helper.py b/onnxruntime/python/tools/transformers/benchmark_helper.py index 8c9afbe6561e0..03296eb451394 100644 --- a/onnxruntime/python/tools/transformers/benchmark_helper.py +++ b/onnxruntime/python/tools/transformers/benchmark_helper.py @@ -31,6 +31,17 @@ def __str__(self): return self.value +class OptimizerInfo(Enum): + # no_opt means using the raw ONNX model, but OnnxRuntime might still apply optimization as long as + # graph optimization level is not 0 (disable all). + NOOPT = 'no_opt' + BYORT = 'by_ort' + BYSCRIPT = 'by_script' + + def __str__(self): + return self.value + + IO_BINDING_DATA_TYPE_MAP = { "float32": numpy.float32, # TODO: Add more. @@ -39,11 +50,11 @@ def __str__(self): def create_onnxruntime_session(onnx_model_path, use_gpu, + provider=None, enable_all_optimization=True, num_threads=-1, enable_profiling=False, - verbose=False, - use_dml=False): + verbose=False): session = None try: from onnxruntime import SessionOptions, InferenceSession, GraphOptimizationLevel, __version__ as onnxruntime_version @@ -68,8 +79,16 @@ def create_onnxruntime_session(onnx_model_path, logger.debug(f"Create session for onnx model: {onnx_model_path}") if use_gpu: - if use_dml: + if provider == 'dml': execution_providers = ['DmlExecutionProvider', 'CPUExecutionProvider'] + elif provider == 'rocm': + execution_providers = ['ROCMExecutionProvider', 'CPUExecutionProvider'] + elif provider == 'migraphx': + execution_providers = ['MIGraphXExecutionProvider', 'ROCMExecutionProvider', 'CPUExecutionProvider'] + elif provider == 'cuda': + execution_providers = ['CUDAExecutionProvider', 'CPUExecutionProvider'] + elif provider == 'tensorrt': + execution_providers = ['TensorrtExecutionProvider', 'CUDAExecutionProvider', 'CPUExecutionProvider'] else: execution_providers = ['CUDAExecutionProvider', 'CPUExecutionProvider'] else: @@ -89,7 +108,7 @@ def setup_logger(verbose=True): logging.getLogger("transformers").setLevel(logging.WARNING) -def prepare_environment(cache_dir, output_dir, use_gpu, use_dml=False): +def prepare_environment(cache_dir, output_dir, use_gpu, provider=None): if cache_dir and not os.path.exists(cache_dir): os.makedirs(cache_dir) @@ -98,7 +117,7 @@ def prepare_environment(cache_dir, output_dir, use_gpu, use_dml=False): import onnxruntime if use_gpu: - if use_dml: + if provider == 'dml': assert 'DmlExecutionProvider' in onnxruntime.get_available_providers( ), "Please install onnxruntime-directml package to test GPU inference." @@ -106,7 +125,6 @@ def prepare_environment(cache_dir, output_dir, use_gpu, use_dml=False): assert 'CUDAExecutionProvider' in onnxruntime.get_available_providers( ), "Please install onnxruntime-gpu package to test GPU inference." - import transformers logger.info(f'PyTorch Version:{torch.__version__}') logger.info(f'Transformers Version:{transformers.__version__}') @@ -138,9 +156,9 @@ def get_latency_result(runtimes, batch_size): def output_details(results, csv_filename): with open(csv_filename, mode="a", newline='') as csv_file: column_names = [ - "engine", "version", "device", "precision", "optimizer", "io_binding", "model_name", "inputs", "threads", - "batch_size", "sequence_length", "datetime", "test_times", "QPS", "average_latency_ms", "latency_variance", - "latency_90_percentile", "latency_95_percentile", "latency_99_percentile" + "engine", "version", "providers", "device", "precision", "optimizer", "io_binding", "model_name", "inputs", + "threads", "batch_size", "sequence_length", "datetime", "test_times", "QPS", "average_latency_ms", + "latency_variance", "latency_90_percentile", "latency_95_percentile", "latency_99_percentile" ] csv_writer = csv.DictWriter(csv_file, fieldnames=column_names) @@ -154,7 +172,8 @@ def output_details(results, csv_filename): def output_summary(results, csv_filename, args): with open(csv_filename, mode="a", newline='') as csv_file: header_names = [ - "model_name", "inputs", "engine", "version", "device", "precision", "optimizer", "io_binding", "threads" + "model_name", "inputs", "engine", "version", "providers", "device", "precision", "optimizer", "io_binding", + "threads" ] data_names = [] for batch_size in args.batch_sizes: @@ -205,8 +224,9 @@ def output_fusion_statistics(model_fusion_statistics, csv_filename): logger.info(f"Fusion statistics is saved to csv file: {csv_filename}") -def inference_ort(ort_session, ort_inputs, result_template, repeat_times, batch_size): +def inference_ort(ort_session, ort_inputs, result_template, repeat_times, batch_size, warm_up_repeat=0): result = {} + timeit.repeat(lambda: ort_session.run(None, ort_inputs), number=1, repeat=warm_up_repeat) # Dry run runtimes = timeit.repeat(lambda: ort_session.run(None, ort_inputs), number=1, repeat=repeat_times) result.update(result_template) result.update({"io_binding": False}) @@ -224,7 +244,8 @@ def inference_ort_with_io_binding(ort_session, output_buffer_max_sizes, batch_size, device, - data_type=numpy.longlong): + data_type=numpy.longlong, + warm_up_repeat=0): result = {} # Bind inputs and outputs to onnxruntime session @@ -242,6 +263,7 @@ def inference_ort_with_io_binding(ort_session, for i in range(len(ort_output_names)): io_binding.bind_output(ort_output_names[i], output_buffers[i].device.type, 0, numpy.float32, ort_outputs[i].shape, output_buffers[i].data_ptr()) + timeit.repeat(lambda: ort_session.run_with_iobinding(io_binding), number=1, repeat=warm_up_repeat) # Dry run runtimes = timeit.repeat(lambda: ort_session.run_with_iobinding(io_binding), number=1, repeat=repeat_times) result.update(result_template) result.update({"io_binding": True}) diff --git a/onnxruntime/python/tools/transformers/bert_perf_test.py b/onnxruntime/python/tools/transformers/bert_perf_test.py index 45682eb18ab8a..6b621492b2ec2 100644 --- a/onnxruntime/python/tools/transformers/bert_perf_test.py +++ b/onnxruntime/python/tools/transformers/bert_perf_test.py @@ -36,6 +36,7 @@ class TestSetting: test_cases: int test_times: int use_gpu: bool + provider: str intra_op_num_threads: int seed: int verbose: bool @@ -50,7 +51,7 @@ class ModelSetting: opt_level: int -def create_session(model_path, use_gpu, intra_op_num_threads, graph_optimization_level=None): +def create_session(model_path, use_gpu, provider, intra_op_num_threads, graph_optimization_level=None): import onnxruntime if use_gpu and ('CUDAExecutionProvider' not in onnxruntime.get_available_providers()): @@ -61,8 +62,21 @@ def create_session(model_path, use_gpu, intra_op_num_threads, graph_optimization if intra_op_num_threads is None and graph_optimization_level is None: session = onnxruntime.InferenceSession(model_path) else: - execution_providers = ['CPUExecutionProvider' - ] if not use_gpu else ['CUDAExecutionProvider', 'CPUExecutionProvider'] + if use_gpu: + if provider == 'dml': + execution_providers = ['DmlExecutionProvider', 'CPUExecutionProvider'] + elif provider == 'rocm': + execution_providers = ['ROCMExecutionProvider', 'CPUExecutionProvider'] + elif provider == 'migraphx': + execution_providers = ['MIGraphXExecutionProvider', 'ROCMExecutionProvider', 'CPUExecutionProvider'] + elif provider == 'cuda': + execution_providers = ['CUDAExecutionProvider', 'CPUExecutionProvider'] + elif provider == 'tensorrt': + execution_providers = ['TensorrtExecutionProvider', 'CUDAExecutionProvider', 'CPUExecutionProvider'] + else: + execution_providers = ['CUDAExecutionProvider', 'CPUExecutionProvider'] + else: + execution_providers = ['CPUExecutionProvider'] sess_options = onnxruntime.SessionOptions() sess_options.execution_mode = onnxruntime.ExecutionMode.ORT_SEQUENTIAL @@ -86,7 +100,23 @@ def create_session(model_path, use_gpu, intra_op_num_threads, graph_optimization session = onnxruntime.InferenceSession(model_path, sess_options, providers=execution_providers) if use_gpu: - assert 'CUDAExecutionProvider' in session.get_providers() + if provider == 'dml': + assert 'DmlExecutionProvider' in session.get_providers() + elif provider == 'rocm': + assert 'ROCMExecutionProvider' in session.get_providers() + elif provider == 'migraphx': + assert 'MIGraphXExecutionProvider' in session.get_providers() + assert 'ROCMExecutionProvider' in session.get_providers() + elif provider == 'cuda': + assert 'CUDAExecutionProvider' in session.get_providers() + elif provider == 'tensorrt': + assert 'TensorrtExecutionProvider' in session.get_providers() + assert 'CUDAExecutionProvider' in session.get_providers() + else: + assert 'CUDAExecutionProvider' in session.get_providers() + else: + assert 'CPUExecutionProvider' in session.get_providers() + return session @@ -117,7 +147,7 @@ def to_string(model_path, session, test_setting): def run_one_test(model_setting, test_setting, perf_results, all_inputs, intra_op_num_threads): - session = create_session(model_setting.model_path, test_setting.use_gpu, intra_op_num_threads, + session = create_session(model_setting.model_path, test_setting.use_gpu, test_setting.provider, intra_op_num_threads, model_setting.opt_level) output_names = [output.name for output in session.get_outputs()] @@ -239,6 +269,12 @@ def parse_arguments(): parser.add_argument('--use_gpu', required=False, action='store_true', help="use GPU") parser.set_defaults(use_gpu=False) + parser.add_argument("--provider", + required=False, + type=str, + default=None, + help="Execution provider to use") + parser.add_argument('-n', '--intra_op_num_threads', required=False, @@ -276,7 +312,7 @@ def main(): for batch_size in batch_size_set: test_setting = TestSetting(batch_size, args.sequence_length, args.samples, args.test_times, args.use_gpu, - args.intra_op_num_threads, args.seed, args.verbose) + args.provider, args.intra_op_num_threads, args.seed, args.verbose) print("test setting", test_setting) run_performance(model_setting, test_setting, perf_results) diff --git a/onnxruntime/python/tools/transformers/convert_beam_search.py b/onnxruntime/python/tools/transformers/convert_beam_search.py index 3f251fb9ad507..8bb25fd609b15 100644 --- a/onnxruntime/python/tools/transformers/convert_beam_search.py +++ b/onnxruntime/python/tools/transformers/convert_beam_search.py @@ -128,6 +128,18 @@ def parse_arguments(argv=None): default=1, help='Positive. >1 to penalize and <1 to encorage.') + beam_search_group.add_argument('--vocab_size', + type=int, + required=False, + default=-1, + help="Vocab_size of the underlying model") + + beam_search_group.add_argument('--prefix_vocab_mask', + required=False, + action='store_true', + help="This vocab mask applies only to first iteration, enable if last word in query might need auto complete") + beam_search_group.set_defaults(prefix_vocab_mask=False) + mixed_precision_option_group = parser.add_argument_group( "mixed precision conversion parameters that works when \"--precision fp16\" is specified") @@ -230,12 +242,18 @@ def convert_model(args): pad_token_id = config.eos_token_id vocab_size = config.vocab_size + # if vocab_size is given in parameters use that. + if args.vocab_size != -1: + vocab_size = args.vocab_size + model = onnx.load(args.gpt2_onnx) model.graph.name = "gpt2 subgraph" inputs = [ "input_ids", "max_length", "min_length", "num_beams", "num_return_sequences", "temperature", "length_penalty", "repetition_penalty", "vocab_mask" ] + if args.prefix_vocab_mask: + inputs.append("prefix_vocab_mask") outputs = ["sequences"] if args.output_sequences_scores: @@ -273,6 +291,10 @@ def convert_model(args): repetition_penalty, vocab_mask ] + if args.prefix_vocab_mask: + prefix_vocab_mask = helper.make_tensor_value_info('prefix_vocab_mask', TensorProto.INT32, ['batch_size', vocab_size]) + graph_inputs.append(prefix_vocab_mask) + # graph outputs sequences = helper.make_tensor_value_info('sequences', TensorProto.INT32, ['batch_size', 'num_return_sequences', 'max_length']) @@ -301,6 +323,11 @@ def convert_model(args): def test_model(args, use_vocab_mask: bool = False, sentences: List[str] = None): + + if args.prefix_vocab_mask: + print("Skipping parity test as prefix vocab mask is not implemented by Hugging Face") + return + from transformers import GPT2Tokenizer, GPT2LMHeadModel tokenizer = GPT2Tokenizer.from_pretrained(args.model_name_or_path, cache_dir=args.cache_dir) diff --git a/onnxruntime/python/tools/transformers/dev_benchmark.cmd b/onnxruntime/python/tools/transformers/dev_benchmark.cmd index 3f0b397a14eb2..7a9b3254a1708 100644 --- a/onnxruntime/python/tools/transformers/dev_benchmark.cmd +++ b/onnxruntime/python/tools/transformers/dev_benchmark.cmd @@ -86,8 +86,11 @@ set onnx_export_options=-i %input_counts% -v -b 0 -f fusion.csv --overwrite set benchmark_options=-b %batch_sizes% -s %sequence_length% -t %average_over% -f fusion.csv -r result.csv -d detail.csv if %use_optimizer% == true ( - set onnx_export_options=%onnx_export_options% -o - set benchmark_options=%benchmark_options% -o + set onnx_export_options=%onnx_export_options% -o by_script + set benchmark_options=%benchmark_options% -o by_script +) else ( + set onnx_export_options=%onnx_export_options% -o by_ort + set benchmark_options=%benchmark_options% -o by_ort ) if %run_gpu_fp32% == true ( diff --git a/onnxruntime/python/tools/transformers/huggingface_models.py b/onnxruntime/python/tools/transformers/huggingface_models.py index 051480ebb0ade..642669156cbb8 100644 --- a/onnxruntime/python/tools/transformers/huggingface_models.py +++ b/onnxruntime/python/tools/transformers/huggingface_models.py @@ -6,7 +6,8 @@ # Maps model class name to a tuple of model class MODEL_CLASSES = [ - 'AutoModel', 'AutoModelWithLMHead', 'AutoModelForSequenceClassification', 'AutoModelForQuestionAnswering' + 'AutoModel', 'AutoModelWithLMHead', 'AutoModelForSequenceClassification', 'AutoModelForQuestionAnswering', + 'AutoModelForCausalLM', ] # List of pretrained models: https://huggingface.co/transformers/pretrained_models.html diff --git a/onnxruntime/python/tools/transformers/onnx_exporter.py b/onnxruntime/python/tools/transformers/onnx_exporter.py index d12c1d13070ae..3029bb8f416c8 100644 --- a/onnxruntime/python/tools/transformers/onnx_exporter.py +++ b/onnxruntime/python/tools/transformers/onnx_exporter.py @@ -11,7 +11,7 @@ from pathlib import Path from transformers import AutoConfig, AutoTokenizer, LxmertConfig, TransfoXLConfig from affinity_helper import AffinitySetting -from benchmark_helper import create_onnxruntime_session, Precision +from benchmark_helper import create_onnxruntime_session, Precision, OptimizerInfo from gpt2_helper import GPT2ModelNoPastState, PRETRAINED_GPT2_MODELS, TFGPT2ModelNoPastState from quantize_helper import QuantizeHelper from huggingface_models import MODEL_CLASSES @@ -255,6 +255,7 @@ def load_pretrained_model(model_name, config, cache_dir, custom_model_class, is_ model_class_name = 'TF' + model_class_name transformers_module = __import__("transformers", fromlist=[model_class_name]) + logger.info(f"Model class name: {model_class_name}") model_class = getattr(transformers_module, model_class_name) return model_class.from_pretrained(model_name, config=config, cache_dir=cache_dir) @@ -304,7 +305,7 @@ def validate_and_optimize_onnx(model_name, input_names, use_gpu, precision, - optimize_onnx, + optimize_info, validate_onnx, use_raw_attention_mask, overwrite, @@ -318,8 +319,10 @@ def validate_and_optimize_onnx(model_name, if validate_onnx: is_valid_onnx_model = validate_onnx_model(onnx_model_path, example_inputs, example_outputs_flatten, use_gpu, False, output_names) + if optimize_info == OptimizerInfo.NOOPT: + return onnx_model_path, is_valid_onnx_model, config.vocab_size - if optimize_onnx or precision == Precision.FLOAT16 or precision == Precision.INT8: # Use script (optimizer.py) to optimize + if optimize_info == OptimizerInfo.BYSCRIPT or precision == Precision.FLOAT16 or precision == Precision.INT8: # Use script (optimizer.py) to optimize optimized_model_path = get_onnx_file_path(onnx_dir, model_name, len(input_names), True, use_gpu, precision, False, use_external_data_format) optimize_onnx_model(model_name, onnx_model_path, optimized_model_path, model_type, config.num_attention_heads, @@ -336,7 +339,7 @@ def validate_and_optimize_onnx(model_name, QuantizeHelper.quantize_onnx_model(onnx_model_path, onnx_model_path, use_external_data_format) logger.info(f"Finished quantizing model: {onnx_model_path}") - else: # Use OnnxRuntime to optimize + if optimize_info == OptimizerInfo.BYORT: # Use OnnxRuntime to optimize if is_valid_onnx_model: ort_model_path = add_filename_suffix(onnx_model_path, '_ort') optimize_onnx_model_by_ort(onnx_model_path, ort_model_path, use_gpu, overwrite, model_fusion_statistics) @@ -345,7 +348,7 @@ def validate_and_optimize_onnx(model_name, def export_onnx_model_from_pt(model_name, opset_version, use_external_data_format, model_type, model_class, cache_dir, - onnx_dir, input_names, use_gpu, precision, optimize_onnx, validate_onnx, + onnx_dir, input_names, use_gpu, precision, optimizer_info, validate_onnx, use_raw_attention_mask, overwrite, model_fusion_statistics): config, model = load_pt_model(model_name, model_class, cache_dir) @@ -393,15 +396,15 @@ def export_onnx_model_from_pt(model_name, opset_version, use_external_data_forma logger.info(f"Skip export since model existed: {onnx_model_path}") onnx_model_file, is_valid_onnx_model, vocab_size = validate_and_optimize_onnx( - model_name, use_external_data_format, model_type, onnx_dir, input_names, use_gpu, precision, optimize_onnx, + model_name, use_external_data_format, model_type, onnx_dir, input_names, use_gpu, precision, optimizer_info, validate_onnx, use_raw_attention_mask, overwrite, config, model_fusion_statistics, onnx_model_path, - example_inputs, example_outputs_flatten) + example_inputs, example_outputs_flatten, None) return onnx_model_file, is_valid_onnx_model, vocab_size, max_input_size def export_onnx_model_from_tf(model_name, opset_version, use_external_data_format, model_type, model_class, cache_dir, - onnx_dir, input_names, use_gpu, precision, optimize_onnx, validate_onnx, + onnx_dir, input_names, use_gpu, precision, optimizer_info, validate_onnx, use_raw_attention_mask, overwrite, model_fusion_statistics): # Use CPU to export import tensorflow as tf @@ -486,9 +489,9 @@ def export_onnx_model_from_tf(model_name, opset_version, use_external_data_forma logger.info(f"Skip export since model existed: {onnx_model_path}") model_type = model_type + '_tf' - onnx_model_file, is_valid_onnx_model, vocab_size = validate_and_optimize_onnx( - model_name, use_external_data_format, model_type, onnx_dir, input_names, use_gpu, precision, optimize_onnx, + opt_onnx_model_file, onnx_model_file, is_valid_onnx_model, vocab_size = validate_and_optimize_onnx( + model_name, use_external_data_format, model_type, onnx_dir, input_names, use_gpu, precision, optimizer_info, validate_onnx, use_raw_attention_mask, overwrite, config, model_fusion_statistics, onnx_model_path, example_inputs, example_outputs_flatten, output_names) - return onnx_model_file, is_valid_onnx_model, vocab_size, max_input_size + return opt_onnx_model_file, onnx_model_file, is_valid_onnx_model, vocab_size, max_input_size diff --git a/onnxruntime/python/tools/transformers/onnx_model_tnlr.py b/onnxruntime/python/tools/transformers/onnx_model_tnlr.py new file mode 100644 index 0000000000000..c99817c410c3e --- /dev/null +++ b/onnxruntime/python/tools/transformers/onnx_model_tnlr.py @@ -0,0 +1,180 @@ +#------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. +#-------------------------------------------------------------------------- +import logging +from fusion_attention import FusionAttention, AttentionMask +from fusion_utils import NumpyHelper +from onnx import helper, numpy_helper, TensorProto, NodeProto +from onnx_model import OnnxModel +from onnx_model_bert import BertOnnxModel +from typing import Union + +logger = logging.getLogger(__name__) + + +class FusionTnlrAttention(FusionAttention): + """ + Fuse TNLR Attention subgraph into one Attention node. + TNLR Attention has extra addtion after qk nodes and adopts [S, B, NH] as I/O shape. + """ + def __init__(self, model: OnnxModel, hidden_size: int, num_heads: int, attention_mask: AttentionMask): + super().__init__(model, hidden_size, num_heads, attention_mask) + + def create_attention_node(self, mask_index: str, matmul: NodeProto, add: NodeProto, num_heads: int, + hidden_size: int, input: str, output: str, add_qk_str: str) -> Union[NodeProto, None]: + + assert num_heads > 0 + if hidden_size > 0 and (hidden_size % num_heads) != 0: + logger.debug(f"input hidden size {hidden_size} is not a multiple of num of heads {num_heads}") + return None + + weight = self.model.get_initializer(matmul.input[1]) + bias = self.model.get_initializer(add.input[1]) or self.model.get_initializer(add.input[0]) + + if weight is None or bias is None: + return None + + qkv_weight = NumpyHelper.to_array(weight) + qkv_bias = NumpyHelper.to_array(bias) + + attention_node_name = self.model.create_node_name('Attention') + + weight = helper.make_tensor(name=attention_node_name + '_qkv_weight', + data_type=TensorProto.FLOAT, + dims=[hidden_size, 3 * hidden_size], + vals=qkv_weight.flatten().tolist()) + + # Sometimes weights and bias are stored in fp16 + if weight.data_type == 10: + weight.CopyFrom(numpy_helper.from_array(NumpyHelper.to_array(weight).astype(np.float16), weight.name)) + self.model.add_initializer(weight, self.this_graph_name) + + bias = helper.make_tensor(name=attention_node_name + '_qkv_bias', + data_type=TensorProto.FLOAT, + dims=[3 * hidden_size], + vals=qkv_bias.flatten().tolist()) + if bias.data_type == 10: + bias.CopyFrom(numpy_helper.from_array(NumpyHelper.to_array(bias).astype(np.float16), bias.name)) + self.model.add_initializer(bias, self.this_graph_name) + + attention_inputs = [input, attention_node_name + '_qkv_weight', attention_node_name + '_qkv_bias'] + if mask_index is not None: + attention_inputs.append(mask_index) + else: + attention_inputs.append("") + + if add_qk_str is not None: + attention_inputs.append("") + attention_inputs.append(add_qk_str) + + attention_node = helper.make_node('Attention', + inputs=attention_inputs, + outputs=[output], + name=attention_node_name) + attention_node.domain = "com.microsoft" + attention_node.attribute.extend([helper.make_attribute("num_heads", num_heads)]) + + return attention_node + + def fuse(self, normalize_node, input_name_to_nodes, output_name_to_node): + # Sometimes we can not fuse skiplayernormalization since the add before layernorm has an output that used by nodes outside skiplayernorm + # Conceptually we treat add before layernorm as skiplayernorm node since they share the same pattern + start_node = normalize_node + if normalize_node.op_type != 'SkipLayerNormalization': + return + + # SkipLayerNormalization has two inputs, and one of them is the root input for attention. + qkv_nodes = self.model.match_parent_path(start_node, + ['Where', 'Add', 'MatMul', 'Reshape', 'Transpose', 'MatMul'], + [1, 1, 1, 0, 0, 0]) + if qkv_nodes is not None: + (_, _, matmul_below, reshape_qkv, transpose_qkv, matmul_qkv) = qkv_nodes + else: + return + + other_inputs = [] + for i, input in enumerate(start_node.input): + if input not in output_name_to_node: + continue + + if input == qkv_nodes[0].output[0]: + continue + other_inputs.append(input) + if len(other_inputs) != 1: + return + + root_input = other_inputs[0] + + v_nodes = self.model.match_parent_path(matmul_qkv, ['Transpose', 'Reshape', 'Slice', 'Add', 'MatMul'], + [1, 0, 0, 0, 1]) + if v_nodes is None: + return + (_, _, _, add, matmul) = v_nodes + + upper_nodes = self.model.match_parent_path(matmul, ['Transpose'], [0]) + transpose = upper_nodes[0] + + qk_nodes = self.model.match_parent_path(matmul_qkv, ['Softmax', 'Add', 'MatMul'], [0, 0, 0]) + if qk_nodes is None: + return + (_, add_qk, matmul_qk) = qk_nodes + + q_nodes = self.model.match_parent_path(matmul_qk, ['Mul', 'Transpose', 'Reshape', 'Slice', 'Add', 'MatMul'], + [0, 0, 0, 0, 0, 1]) + if q_nodes is None: + return + add = q_nodes[-2] + matmul = q_nodes[-1] + + k_nodes = self.model.match_parent_path(matmul_qk, ['Transpose', 'Reshape', 'Slice', 'Add', 'MatMul'], + [1, 0, 0, 0, 1]) + if k_nodes is None: + return + add = k_nodes[-2] + matmul = k_nodes[-1] + + extra_add_qk_nodes = self.model.match_parent_path(add_qk, ['Reshape', 'Where'], [1, 0]) + if extra_add_qk_nodes is None: + return + + if matmul.input[0] == root_input: + mask_index = None + attention_last_node = reshape_qkv + # number of heads are same for all the paths, hence to create attention node, we pass the q_num_heads + # the input_hidden_size represents the input hidden size, this is used as needed but hidden sizes for Q, K are extracted appropriately + new_node = self.create_attention_node(mask_index, matmul, add, self.num_heads, self.hidden_size, root_input, + attention_last_node.output[0], extra_add_qk_nodes[0].input[0]) + if new_node is None: + return + + self.nodes_to_add.append(new_node) + self.node_name_to_graph_name[new_node.name] = self.this_graph_name + + # Add a transpose node after the attention node + back_transpose = helper.make_node("Transpose", ["back_transpose_in_" + new_node.name], [new_node.output[0]], + "back_transpose_" + new_node.name, + perm=[1, 0, 2]) + self.model.add_node(back_transpose, self.this_graph_name) + new_node.input[0] = transpose.input[0] + new_node.output[0] = "back_transpose_in_" + new_node.name + + self.nodes_to_remove.extend([attention_last_node, transpose_qkv, matmul_qkv]) + self.nodes_to_remove.extend(qk_nodes) + self.nodes_to_remove.extend(q_nodes) + self.nodes_to_remove.extend(k_nodes) + self.nodes_to_remove.extend(v_nodes) + + # Use prune graph to remove mask nodes since they are shared by all attention nodes. + #self.nodes_to_remove.extend(mask_nodes) + self.prune_graph = True + + +class TnlrOnnxModel(BertOnnxModel): + def __init__(self, model, num_heads, hidden_size): + super().__init__(model, num_heads, hidden_size) + self.attention_mask = AttentionMask(self) + self.attention_fusion = FusionTnlrAttention(self, self.hidden_size, self.num_heads, self.attention_mask) + + def fuse_attention(self): + self.attention_fusion.apply() diff --git a/onnxruntime/python/tools/transformers/optimizer.py b/onnxruntime/python/tools/transformers/optimizer.py index d7a25c829d916..55699ae7b0544 100644 --- a/onnxruntime/python/tools/transformers/optimizer.py +++ b/onnxruntime/python/tools/transformers/optimizer.py @@ -28,6 +28,7 @@ from onnx_model_bert_tf import BertOnnxModelTF from onnx_model_bert_keras import BertOnnxModelKeras from onnx_model_gpt2 import Gpt2OnnxModel +from onnx_model_tnlr import TnlrOnnxModel from fusion_options import FusionOptions logger = logging.getLogger(__name__) @@ -39,7 +40,8 @@ "bert_tf": (BertOnnxModelTF, "tf2onnx", 0), "bert_keras": (BertOnnxModelKeras, "keras2onnx", 0), "gpt2": (Gpt2OnnxModel, "pytorch", 1), - "gpt2_tf": (Gpt2OnnxModel, 'tf2onnx', 0) # might add a class for GPT2OnnxModel for TF later. + "gpt2_tf": (Gpt2OnnxModel, 'tf2onnx', 0), # might add a class for GPT2OnnxModel for TF later. + "tnlr": (TnlrOnnxModel, "pytorch", 1), } @@ -115,9 +117,9 @@ def optimize_by_fusion(model: ModelProto, model (ModelProto): model object model_type (str, optional): model type - like bert, bert_tf, bert_keras or gpt2. Defaults to 'bert'. num_heads (int, optional): number of attention heads. Defaults to 0. - 0 allows detect the parameter from graph automatically (for model_type "bert" only). + 0 allows detect the parameter from graph automatically (for model_type "bert" only). hidden_size (int, optional): hidden size. Defaults to 0. - 0 allows detect the parameter from graph automatically (for model_type "bert" only). + 0 allows detect the parameter from graph automatically (for model_type "bert" only). optimization_options (FusionOptions, optional): optimization options that turn on/off some fusions. Defaults to None. Returns: @@ -159,7 +161,7 @@ def optimize_model(input: str, only_onnxruntime: bool = False): """ Optimize Model by OnnxRuntime and/or python fusion logic. - ONNX Runtime has graph optimizations (https://onnxruntime.ai/docs/resources/graph-optimizations.html). + ONNX Runtime has graph optimizations (https://onnxruntime.ai/docs/resources/graph-optimizations.html). However, the coverage is limited. We also have graph fusions that implemented in Python to improve the coverage. They can combined: ONNX Runtime will run first when opt_level > 0, then graph fusions in Python will be applied. @@ -170,8 +172,8 @@ def optimize_model(input: str, When opt_level is 0 and only_onnxruntime is False, only python fusion logic is used and onnxruntime is disabled. - When opt_level > 1, use_gpu shall set properly since the optimized graph might contain operators for GPU or CPU only. - If your model is intended for GPU inference only (especially float16 or mixed precision model), it is recommended to + When opt_level > 1, use_gpu shall set properly since the optimized graph might contain operators for GPU or CPU only. + If your model is intended for GPU inference only (especially float16 or mixed precision model), it is recommended to set use_gpu to be True, otherwise the model is not optimized for GPU inference. For BERT model, num_heads and hidden_size are optional. For other model types, you need specify these parameters. @@ -180,9 +182,9 @@ def optimize_model(input: str, input (str): input model path. model_type (str, optional): model type - like bert, bert_tf, bert_keras or gpt2. Defaults to 'bert'. num_heads (int, optional): number of attention heads. Defaults to 0. - 0 allows detect the parameter from graph automatically (for model_type "bert" only). + 0 allows detect the parameter from graph automatically (for model_type "bert" only). hidden_size (int, optional): hidden size. Defaults to 0. - 0 allows detect the parameter from graph automatically (for model_type "bert" only). + 0 allows detect the parameter from graph automatically (for model_type "bert" only). optimization_options (FusionOptions, optional): optimization options that turn on/off some fusions. Defaults to None. opt_level (int, optional): onnxruntime graph optimization level (0, 1, 2 or 99) or None. Defaults to None. When the value is None, default value (1 for bert and gpt2, 0 for other model types) will be used. diff --git a/onnxruntime/python/tools/transformers/profiler.py b/onnxruntime/python/tools/transformers/profiler.py index 753b976758af1..ddd00e94ebb7a 100644 --- a/onnxruntime/python/tools/transformers/profiler.py +++ b/onnxruntime/python/tools/transformers/profiler.py @@ -86,8 +86,11 @@ def parse_arguments(argv=None): parser.add_argument('-g', '--use_gpu', required=False, action='store_true', help="use GPU") parser.set_defaults(use_gpu=False) - parser.add_argument('-d', '--use_dml', required=False, action='store_true', help="use DML") - parser.set_defaults(use_dml=False) + parser.add_argument('--provider', + required=False, + type=str, + default='cuda', + help="Execution provider to use") parser.add_argument( '--basic_optimization', @@ -108,15 +111,15 @@ def parse_arguments(argv=None): return parser.parse_args(argv) -def run_profile(onnx_model_path, use_gpu, basic_optimization, thread_num, all_inputs, use_dml): +def run_profile(onnx_model_path, use_gpu, provider, basic_optimization, thread_num, all_inputs): from benchmark_helper import create_onnxruntime_session session = create_onnxruntime_session(onnx_model_path, use_gpu, + provider, enable_all_optimization=not basic_optimization, num_threads=thread_num, - enable_profiling=True, - use_dml=use_dml) + enable_profiling=True) for inputs in all_inputs: _ = session.run(None, inputs) @@ -604,7 +607,7 @@ def run(args): else: # default all_inputs = create_dummy_inputs(onnx_model, args.batch_size, args.sequence_length, args.samples) - profile_file = run_profile(args.model, args.use_gpu, args.basic_optimization, args.thread_num, all_inputs, args.use_dml) + profile_file = run_profile(args.model, args.use_gpu, args.provider, args.basic_optimization, args.thread_num, all_inputs) return profile_file diff --git a/onnxruntime/python/tools/transformers/run_benchmark.sh b/onnxruntime/python/tools/transformers/run_benchmark.sh index 1fe18be104b37..5962a4df27e49 100644 --- a/onnxruntime/python/tools/transformers/run_benchmark.sh +++ b/onnxruntime/python/tools/transformers/run_benchmark.sh @@ -16,7 +16,9 @@ use_package=true run_install=true # Engines to test. +# To run ort_trt, you need to build and install the onnxruntime-gpu-tensorrt package on your own run_ort=true +run_ort_trt=false run_torch=false run_torchscript=true run_tensorflow=false @@ -107,8 +109,11 @@ if [ "$export_onnx_from_tf" = true ] ; then fi if [ "$use_optimizer" = true ] ; then - onnx_export_options="$onnx_export_options -o" - benchmark_options="$benchmark_options -o" + onnx_export_options="$onnx_export_options -o by_script" + benchmark_options="$benchmark_options -o by_script" +else + onnx_export_options="$onnx_export_options -o by_ort" + benchmark_options="$benchmark_options -o by_ort" fi # ------------------------------------------- @@ -122,6 +127,16 @@ run_one_test() { fi fi + if [ "$run_ort_trt" = true ] ; then + trt_options="--provider tensorrt --disable_ort_io_binding" + echo python $benchmark_script -m $1 $onnx_export_options $trt_options $2 $3 $4 >> benchmark.log + echo python $benchmark_script -m $1 $benchmark_options $trt_options $2 $3 $4 -i $input_counts >> benchmark.log + if [ "$run_tests" = true ] ; then + python $benchmark_script -m $1 $onnx_export_options $trt_options $2 $3 $4 + python $benchmark_script -m $1 $benchmark_options $trt_options $2 $3 $4 -i $input_counts + fi + fi + if [ "$run_torch" = true ] ; then echo python $benchmark_script -e torch -m $1 $benchmark_options $2 $3 $4 >> benchmark.log if [ "$run_tests" = true ] ; then @@ -146,6 +161,9 @@ run_one_test() { # ------------------------------------------- if [ "$run_gpu_fp32" = true ] ; then + if [ "$run_ort_trt" = true ] ; then + export ORT_TENSORRT_FP16_ENABLE=0 + fi for m in $models_to_test do echo Run GPU FP32 Benchmark on model ${m} @@ -154,6 +172,9 @@ if [ "$run_gpu_fp32" = true ] ; then fi if [ "$run_gpu_fp16" = true ] ; then + if [ "$run_ort_trt" = true ] ; then + export ORT_TENSORRT_FP16_ENABLE=1 + fi for m in $models_to_test do echo Run GPU FP16 Benchmark on model ${m} diff --git a/onnxruntime/test/common/tensor_op_test_utils.h b/onnxruntime/test/common/tensor_op_test_utils.h index 7a7c9b512b3c0..85371f21dc10b 100644 --- a/onnxruntime/test/common/tensor_op_test_utils.h +++ b/onnxruntime/test/common/tensor_op_test_utils.h @@ -6,11 +6,14 @@ #include #include +#include + #include "gtest/gtest.h" #include "core/common/common.h" #include "core/common/optional.h" #include "core/common/type_utils.h" +#include "core/framework/tensor.h" #include "core/util/math.h" namespace onnxruntime { diff --git a/onnxruntime/test/contrib_ops/element_wise_ops_test.cc b/onnxruntime/test/contrib_ops/element_wise_ops_test.cc index 03a67b539ba28..af9244873d9dc 100644 --- a/onnxruntime/test/contrib_ops/element_wise_ops_test.cc +++ b/onnxruntime/test/contrib_ops/element_wise_ops_test.cc @@ -112,6 +112,77 @@ TEST(BiasGeluTest, Two_One_Dim) { RunBiasGeluTest(input_a_data, input_b_data, {2, 4}, {4}); } +#if defined(USE_CUDA) || defined(USE_ROCM) +TEST(BiasGeluTest, Two_One_Dim_fp16) { +#ifdef USE_CUDA + int min_cuda_architecture = 530; + if (!HasCudaEnvironment(min_cuda_architecture)) { + LOGS_DEFAULT(WARNING) << "Hardware NOT support FP16"; + return; + } +#endif + OpTester tester("BiasGelu", 1, onnxruntime::kMSDomain); + + std::vector A = { + 0.8f, -0.5f, 0.0f, 1.f, + 0.5f, 0.2f, 0.3f, -0.6f}; + + std::vector B = { + -0.5f, 0.6f, 1.2f, 2.1f}; + + std::vector Y = ComputeGeluWithErf(Add_Simple(A, B)); + + std::vector f_A(8); + std::vector f_B(4); + std::vector f_Y(8); + ConvertFloatToMLFloat16(A.data(), f_A.data(), 8); + ConvertFloatToMLFloat16(B.data(), f_B.data(), 4); + ConvertFloatToMLFloat16(Y.data(), f_Y.data(), 8); + + tester.AddInput("A", {2, 4}, f_A); + tester.AddInput("B", {4}, f_B); + tester.AddOutput("Y", {2, 4}, f_Y); + tester.Run(OpTester::ExpectResult::kExpectSuccess, "", {kTensorrtExecutionProvider}); //TensorRT: fp16 is not supported +} +#endif + +#if defined(USE_CUDA) || defined(USE_ROCM) +TEST(BiasGeluTest, Two_One_Dim_bfloat16) { +#ifdef USE_CUDA + int min_cuda_architecture = 530; + if (!HasCudaEnvironment(min_cuda_architecture)) { + LOGS_DEFAULT(WARNING) << "Hardware NOT support BFP16"; + return; + } +#endif + OpTester tester("BiasGelu", 1, onnxruntime::kMSDomain); + + std::vector A = { + 0.8f, -0.5f, 0.0f, 1.f, + 0.5f, 0.2f, 0.3f, -0.6f}; + + std::vector B = { + -0.5f, 0.6f, 1.2f, 2.1f}; + + std::vector Y = ComputeGeluWithErf(Add_Simple(A, B)); + + std::vector f_A = FloatsToBFloat16s(A); + std::vector f_B = FloatsToBFloat16s(B); + std::vector f_Y = FloatsToBFloat16s(Y); + + tester.AddInput("A", {2, 4}, f_A); + tester.AddInput("B", {4}, f_B); + tester.AddOutput("Y", {2, 4}, f_Y); + std::vector> execution_providers; +#ifdef USE_CUDA + execution_providers.push_back(DefaultCudaExecutionProvider()); +#elif USE_ROCM + execution_providers.push_back(DefaultRocmExecutionProvider()); +#endif + tester.Run(OpTester::ExpectResult::kExpectSuccess, "", {}, nullptr, &execution_providers); +} +#endif + TEST(MathOpTest, ComplexMul) { if (DefaultCudaExecutionProvider() == nullptr) return; diff --git a/onnxruntime/test/contrib_ops/fastgelu_op_test.cc b/onnxruntime/test/contrib_ops/fastgelu_op_test.cc index 5e0c513397fc7..89ae3772630a9 100644 --- a/onnxruntime/test/contrib_ops/fastgelu_op_test.cc +++ b/onnxruntime/test/contrib_ops/fastgelu_op_test.cc @@ -110,6 +110,19 @@ static void RunFastGeluTest( RunFastGeluTest(input_data, bias_data, output_data, input_dims, bias_dims, output_dims, has_bias); } +TEST(FastGeluTest, FastGeluWithNullInput) { + int batch_size = 1; + int sequence_length = 0; + int hidden_size = 4; + + std::vector input_data = {}; + + std::vector bias_data = { + -0.5f, 0.6f, 1.2f, 2.1f}; + + RunFastGeluTest(input_data, bias_data, batch_size, sequence_length, hidden_size); +} + TEST(FastGeluTest, FastGeluWithBiasFloat32) { int batch_size = 1; int sequence_length = 2; @@ -184,5 +197,50 @@ TEST(FastGeluTest, FastGeluWithoutBiasFloat16) { RunFastGeluTest(input_data, bias_data, output_data, input_dims, bias_dims, output_dims, false, true); } +// CUDA only, ROCM has not been supported yet +#ifdef USE_CUDA +TEST(FastGeluTest, FastGeluWithBias_BFloat16) { + int min_cuda_architecture = 530; + if (!HasCudaEnvironment(min_cuda_architecture)) { + LOGS_DEFAULT(WARNING) << "Hardware NOT support BFP16"; + return; + } + OpTester tester("FastGelu", 1, onnxruntime::kMSDomain); + + int batch_size = 1; + int sequence_length = 2; + int hidden_size = 4; + + std::vector X = { + 0.8f, -0.5f, 0.0f, 1.f, + 0.5f, 0.2f, 0.3f, -0.6f}; + + std::vector B = { + -0.5f, 0.6f, 1.2f, 2.1f}; + + std::vector Y = { + 0.1851806640625f, 0.054046630859375f, 1.0615234375f, 3.095703125f, + 0, 0.63037109375f, 1.3984375f, 1.3984375f}; + + std::vector input_dims = {batch_size, sequence_length, hidden_size}; + std::vector bias_dims = {hidden_size}; + std::vector output_dims = input_dims; + + std::vector f_X = FloatsToBFloat16s(X); + std::vector f_B = FloatsToBFloat16s(B); + std::vector f_Y = FloatsToBFloat16s(Y); + + tester.AddInput("X", input_dims, f_X); + tester.AddInput("bias", bias_dims, f_B); + tester.AddOutput("Y", output_dims, f_Y); + + std::vector> execution_providers; + execution_providers.push_back(DefaultCudaExecutionProvider()); + tester.Run(OpTester::ExpectResult::kExpectSuccess, "", {}, nullptr, &execution_providers); +} +#endif + + + } // namespace test } // namespace onnxruntime diff --git a/onnxruntime/test/contrib_ops/fused_matmul_op_test.cc b/onnxruntime/test/contrib_ops/fused_matmul_op_test.cc index c1c7a75a186a3..75857c61b8376 100644 --- a/onnxruntime/test/contrib_ops/fused_matmul_op_test.cc +++ b/onnxruntime/test/contrib_ops/fused_matmul_op_test.cc @@ -3,6 +3,7 @@ #include "gtest/gtest.h" #include "test/providers/provider_test_utils.h" +#include "test/common/cuda_op_test_utils.h" namespace onnxruntime { namespace test { @@ -269,6 +270,100 @@ TEST(FusedMatMulOpTest, FloatTypeTransposeBatch) { RunFusedMatMulTest("FusedMatMul", 1, true, true, true, true); } +#if defined(USE_CUDA) || defined(USE_ROCM) +TEST(FusedMatMulOpTest, Float16_NoTranspose) { +#ifdef USE_CUDA + int min_cuda_architecture = 530; + if (!HasCudaEnvironment(min_cuda_architecture)) { + LOGS_DEFAULT(WARNING) << "Hardware NOT support FP16"; + return; + } +#endif + std::vector common_input_vals{0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15}; + for (auto t : GenerateSimpleTestCases()) { + + OpTester test("FusedMatMul", 1, onnxruntime::kMSDomain); + + std::vector input0_dims(t.input0_dims); + std::vector input0_vals; + ProcessInputs(t.input0_dims, common_input_vals, false, false, input0_dims, input0_vals); + + std::vector input1_dims(t.input1_dims); + std::vector input1_vals; + ProcessInputs(t.input1_dims, common_input_vals, false, false, input1_dims, input1_vals); + + std::vector f_A(input0_vals.size()); + std::vector f_B(input1_vals.size()); + std::vector f_Y(t.expected_vals.size()); + ConvertFloatToMLFloat16(input0_vals.data(), f_A.data(), (int)input0_vals.size()); + ConvertFloatToMLFloat16(input1_vals.data(), f_B.data(), (int)input1_vals.size()); + ConvertFloatToMLFloat16(t.expected_vals.data(), f_Y.data(), (int)t.expected_vals.size()); + + test.AddInput("A", input0_dims, f_A); + test.AddInput("B", input1_dims, f_B, false); + + test.AddAttribute("transA", (int64_t)0); + test.AddAttribute("transB", (int64_t)0); + test.AddAttribute("transBatchA", (int64_t)0); + test.AddAttribute("transBatchB", (int64_t)0); + test.AddAttribute("alpha", 1.0f); + + test.AddOutput("Y", t.expected_dims, f_Y); + + // Disable TensorRT because of unsupported data type + test.Run(OpTester::ExpectResult::kExpectSuccess, "", {kTensorrtExecutionProvider}); + } +} +#endif + +#if defined(USE_CUDA) || defined(USE_ROCM) +TEST(FusedMatMulOpTest, BFloat16_NoTranspose) { +#ifdef USE_CUDA + int min_cuda_architecture = 530; + if (!HasCudaEnvironment(min_cuda_architecture)) { + LOGS_DEFAULT(WARNING) << "Hardware NOT support FP16"; + return; + } +#endif + std::vector common_input_vals{0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15}; + for (auto t : GenerateSimpleTestCases()) { + + OpTester test("FusedMatMul", 1, onnxruntime::kMSDomain); + + std::vector input0_dims(t.input0_dims); + std::vector input0_vals; + ProcessInputs(t.input0_dims, common_input_vals, false, false, input0_dims, input0_vals); + + std::vector input1_dims(t.input1_dims); + std::vector input1_vals; + ProcessInputs(t.input1_dims, common_input_vals, false, false, input1_dims, input1_vals); + + std::vector f_A = FloatsToBFloat16s(input0_vals); + std::vector f_B = FloatsToBFloat16s(input1_vals); + std::vector f_Y = FloatsToBFloat16s(t.expected_vals); + + test.AddInput("A", input0_dims, f_A); + test.AddInput("B", input1_dims, f_B, false); + + test.AddAttribute("transA", (int64_t)0); + test.AddAttribute("transB", (int64_t)0); + test.AddAttribute("transBatchA", (int64_t)0); + test.AddAttribute("transBatchB", (int64_t)0); + test.AddAttribute("alpha", 1.0f); + + test.AddOutput("Y", t.expected_dims, f_Y); + + std::vector> execution_providers; +#ifdef USE_CUDA + execution_providers.push_back(DefaultCudaExecutionProvider()); +#elif USE_ROCM + execution_providers.push_back(DefaultRocmExecutionProvider()); +#endif + test.Run(OpTester::ExpectResult::kExpectSuccess, "", {}, nullptr, &execution_providers); + } +} +#endif + } // namespace transpose_matmul } // namespace test } // namespace onnxruntime diff --git a/onnxruntime/test/contrib_ops/layer_norm_test.cc b/onnxruntime/test/contrib_ops/layer_norm_test.cc index 1e2203d5204db..9c41f218a2f2f 100644 --- a/onnxruntime/test/contrib_ops/layer_norm_test.cc +++ b/onnxruntime/test/contrib_ops/layer_norm_test.cc @@ -80,6 +80,11 @@ static void TestLayerNorm(const std::vector& x_dims, #endif } +TEST(CudaKernelTest, LayerNorm_NullInput) { + const std::vector X_dims{0, 20, 128}; + TestLayerNorm(X_dims, LAYER_NORM_OP, k_epsilon_default); +} + TEST(CudaKernelTest, LayerNorm_SmallSizeTensor) { const std::vector X_dims{4, 20, 128}; TestLayerNorm(X_dims, LAYER_NORM_OP, k_epsilon_default); diff --git a/onnxruntime/test/contrib_ops/qlinear_binary_op_test.cc b/onnxruntime/test/contrib_ops/qlinear_binary_op_test.cc index 8dbc58c12ce74..c63d79806e777 100644 --- a/onnxruntime/test/contrib_ops/qlinear_binary_op_test.cc +++ b/onnxruntime/test/contrib_ops/qlinear_binary_op_test.cc @@ -43,61 +43,87 @@ void RunQLinearMathTestFromFloat( const quantization::Params& a_params, const std::vector& b, const std::vector& b_shape_origin, const quantization::Params& b_params, - const quantization::Params& c_params, - bool input_b_is_initializer = false, - bool all_initializer_scale_zero_point = false) { - size_t number_dims = std::max(a_shape_origin.size(), b_shape_origin.size()); - std::vector a_shape = PrefixingDims(a_shape_origin, number_dims); - std::vector b_shape = PrefixingDims(b_shape_origin, number_dims); - // calc broadcasting shaped - std::vector c_shape(number_dims, 1); - for (size_t axis = 0; axis < number_dims; ++axis) { - if (a_shape[axis] != b_shape[axis] && (a_shape[axis] != 1 && b_shape[axis] != 1)) { - ORT_THROW("Shapes can not be broadcasted"); + const quantization::Params& c_params) { + const auto run_test = [&](bool input_b_is_initializer, + bool all_initializer_scale_zero_point) { + size_t number_dims = std::max(a_shape_origin.size(), b_shape_origin.size()); + std::vector a_shape = PrefixingDims(a_shape_origin, number_dims); + std::vector b_shape = PrefixingDims(b_shape_origin, number_dims); + // calc broadcasting shaped + std::vector c_shape(number_dims, 1); + for (size_t axis = 0; axis < number_dims; ++axis) { + if (a_shape[axis] != b_shape[axis] && (a_shape[axis] != 1 && b_shape[axis] != 1)) { + ORT_THROW("Shapes can not be broadcasted"); + } + c_shape[axis] = std::max(a_shape[axis], b_shape[axis]); } - c_shape[axis] = std::max(a_shape[axis], b_shape[axis]); - } - std::vector a_strides, b_strides, c_strides; - auto c_size = CalcStrides(c_shape, c_strides, false); - auto a_size = CalcStrides(a_shape, a_strides, true); - auto b_size = CalcStrides(b_shape, b_strides, true); - if (a_size != static_cast(a.size()) || b_size != static_cast(b.size())) { - ORT_THROW("Input size not match input shape!"); - } - constexpr int qmax = std::numeric_limits::max(); - constexpr int qmin = std::numeric_limits::min(); - - OpTester test(op_name, 1, onnxruntime::kMSDomain); - std::vector a_quantized = QuantizeTestVector(a, a_params); - test.template AddInput("A", a_shape_origin, a_quantized); - test.AddInput("A_scale", {}, {a_params.scale}, all_initializer_scale_zero_point); - test.template AddInput("A_zero_point", {}, {a_params.zero_point}, all_initializer_scale_zero_point); - - std::vector b_quantized = QuantizeTestVector(b, b_params); - test.template AddInput("B", b_shape_origin, b_quantized, input_b_is_initializer); - test.AddInput("B_scale", {}, {b_params.scale}, all_initializer_scale_zero_point); - test.template AddInput("B_zero_point", {}, {b_params.zero_point}, all_initializer_scale_zero_point); - - test.AddInput("C_scale", {}, {c_params.scale}, all_initializer_scale_zero_point); - test.template AddInput("C_zero_point", {}, {c_params.zero_point}, all_initializer_scale_zero_point); - std::vector c(c_size); - for (int64_t offset = 0; offset < c_size; ++offset) { - int64_t remain = offset, a_offset = 0, b_offset = 0; - for (size_t axis = 0; axis < number_dims; ++axis) { - int64_t index = remain / c_strides[axis]; - remain = remain % c_strides[axis]; - a_offset += index * a_strides[axis]; - b_offset += index * b_strides[axis]; + std::vector a_strides, b_strides, c_strides; + auto c_size = CalcStrides(c_shape, c_strides, false); + auto a_size = CalcStrides(a_shape, a_strides, true); + auto b_size = CalcStrides(b_shape, b_strides, true); + if (a_size != static_cast(a.size()) || b_size != static_cast(b.size())) { + ORT_THROW("Input size not match input shape!"); + } + constexpr int qmax = std::numeric_limits::max(); + constexpr int qmin = std::numeric_limits::min(); + + OpTester test(op_name, 1, onnxruntime::kMSDomain); + std::vector a_quantized = QuantizeTestVector(a, a_params); + test.template AddInput("A", a_shape_origin, a_quantized); + test.AddInput("A_scale", {}, {a_params.scale}, all_initializer_scale_zero_point); + test.template AddInput("A_zero_point", {}, {a_params.zero_point}, all_initializer_scale_zero_point); + + std::vector b_quantized = QuantizeTestVector(b, b_params); + test.template AddInput("B", b_shape_origin, b_quantized, input_b_is_initializer); + test.AddInput("B_scale", {}, {b_params.scale}, all_initializer_scale_zero_point); + test.template AddInput("B_zero_point", {}, {b_params.zero_point}, all_initializer_scale_zero_point); + + test.AddInput("C_scale", {}, {c_params.scale}, all_initializer_scale_zero_point); + test.template AddInput("C_zero_point", {}, {c_params.zero_point}, all_initializer_scale_zero_point); + std::vector c(c_size); + for (int64_t offset = 0; offset < c_size; ++offset) { + int64_t remain = offset, a_offset = 0, b_offset = 0; + for (size_t axis = 0; axis < number_dims; ++axis) { + int64_t index = remain / c_strides[axis]; + remain = remain % c_strides[axis]; + a_offset += index * a_strides[axis]; + b_offset += index * b_strides[axis]; + } + + float a_dequantized = quantization::Dequantize(a_quantized[a_offset], a_params); + float b_dequantized = quantization::Dequantize(b_quantized[b_offset], b_params); + c[offset] = clampi(static_cast(std::nearbyintf(calc(a_dequantized, b_dequantized) / c_params.scale)) + c_params.zero_point, qmin, qmax); } - float a_dequantized = quantization::Dequantize(a_quantized[a_offset], a_params); - float b_dequantized = quantization::Dequantize(b_quantized[b_offset], b_params); - c[offset] = clampi(static_cast(std::nearbyintf(calc(a_dequantized, b_dequantized) / c_params.scale)) + c_params.zero_point, qmin, qmax); - } - test.template AddOutput("C", c_shape, c); + float abs_error = 0.0f; + + // For quantized models, NNAPI's rounding is different than CPU provider + // Sometimes the result is within +/-1 of result of CPU provider + // For ONNX, we use rounding to nearest ties to even. + // For NNAPI, it is using std::round which is HALF_AWAY_FROM_ZERO, see + // https://android.googlesource.com/platform/frameworks/ml/+/refs/heads/master/nn/common/operations/Quantize.cpp + // Use 1 as abs_error which is the smallest possbile for uint8_t + // + // NOTE, for now the tolerance will only apply if the NNAPI is actually used, + // if for any reason the execution falls back to CPU, we still expect an exact match + // See, 'void Check(...' in onnxruntime/test/providers/provider_test_utils.cc +#ifdef USE_NNAPI + abs_error = 1.0f; +#endif + + test.template AddOutput("C", c_shape, c, false /* sort_output */, 0.0f /* rel_error */, abs_error); + + test.Run(); + }; + + run_test(false /* input_b_is_initializer */, false /* all_initializer_scale_zero_point */); - test.Run(); + // NNAPI will require all the scales and zero points be initializers + run_test(false /* input_b_is_initializer */, true /* all_initializer_scale_zero_point */); + + // We also want to test the case input B is an initializer + run_test(true /* input_b_is_initializer */, true /* all_initializer_scale_zero_point */); } // total 32 + 31 elements to cover all path @@ -145,22 +171,6 @@ TEST(QLinearBinaryOpTest, AddU8VectorVectorFull) { A, {63}, A_params, B, {63}, B_params, C_params); - - // NNAPI will require all the scales and zero points be initializers - // We also want to test the case input B is an initializer - RunQLinearMathTestFromFloat("QLinearAdd", add_function, - A, {63}, A_params, - B, {63}, B_params, - C_params, - false /* input_b_is_initializer */, - true /* all_initializer_scale_zero_point */); - - RunQLinearMathTestFromFloat("QLinearAdd", add_function, - A, {63}, A_params, - B, {63}, B_params, - C_params, - true /* input_b_is_initializer */, - true /* all_initializer_scale_zero_point */); } TEST(QLinearBinaryOpTest, AddU8VectorVectorBroadcast) { @@ -180,22 +190,6 @@ TEST(QLinearBinaryOpTest, AddU8VectorVectorBroadcast) { A, {3, 3, 7}, A_params, B, {3, 1, 7}, B_params, C_params); - - // NNAPI will require all the scales and zero points be initializers - // We also want to test the case input B is an initializer - RunQLinearMathTestFromFloat("QLinearAdd", add_function, - A, {3, 3, 7}, A_params, - B, {3, 1, 7}, B_params, - C_params, - false /* input_b_is_initializer */, - true /* all_initializer_scale_zero_point */); - - RunQLinearMathTestFromFloat("QLinearAdd", add_function, - A, {3, 3, 7}, A_params, - B, {3, 1, 7}, B_params, - C_params, - true /* input_b_is_initializer */, - true /* all_initializer_scale_zero_point */); } TEST(QLinearBinaryOpTest, AddU8ScalarVectorFull) { @@ -212,22 +206,6 @@ TEST(QLinearBinaryOpTest, AddU8ScalarVectorFull) { B, {1}, B_params, A, {63}, A_params, C_params); - - // NNAPI will require all the scales and zero points be initializers - // We also want to test the case input B is an initializer - RunQLinearMathTestFromFloat("QLinearAdd", add_function, - B, {1}, B_params, - A, {63}, A_params, - C_params, - false /* input_b_is_initializer */, - true /* all_initializer_scale_zero_point */); - - RunQLinearMathTestFromFloat("QLinearAdd", add_function, - B, {1}, B_params, - A, {63}, A_params, - C_params, - true /* input_b_is_initializer */, - true /* all_initializer_scale_zero_point */); } TEST(QLinearBinaryOpTest, AddU8ScalarVectorBroadcast) { @@ -244,22 +222,6 @@ TEST(QLinearBinaryOpTest, AddU8ScalarVectorBroadcast) { B, {3, 1, 1}, B_params, A, {3, 7, 3}, A_params, C_params); - - // NNAPI will require all the scales and zero points be initializers - // We also want to test the case input B is an initializer - RunQLinearMathTestFromFloat("QLinearAdd", add_function, - B, {3, 1, 1}, B_params, - A, {3, 7, 3}, A_params, - C_params, - false /* input_b_is_initializer */, - true /* all_initializer_scale_zero_point */); - - RunQLinearMathTestFromFloat("QLinearAdd", add_function, - B, {3, 1, 1}, B_params, - A, {3, 7, 3}, A_params, - C_params, - true /* input_b_is_initializer */, - true /* all_initializer_scale_zero_point */); } TEST(QLinearBinaryOpTest, AddU8VectorScalarFull) { @@ -276,22 +238,6 @@ TEST(QLinearBinaryOpTest, AddU8VectorScalarFull) { A, {63}, A_params, B, {1}, B_params, C_params); - - // NNAPI will require all the scales and zero points be initializers - // We also want to test the case input B is an initializer - RunQLinearMathTestFromFloat("QLinearAdd", add_function, - A, {63}, A_params, - B, {1}, B_params, - C_params, - false /* input_b_is_initializer */, - true /* all_initializer_scale_zero_point */); - - RunQLinearMathTestFromFloat("QLinearAdd", add_function, - A, {63}, A_params, - B, {1}, B_params, - C_params, - true /* input_b_is_initializer */, - true /* all_initializer_scale_zero_point */); } TEST(QLinearBinaryOpTest, AddU8VectorScalarBroadcast) { @@ -308,22 +254,6 @@ TEST(QLinearBinaryOpTest, AddU8VectorScalarBroadcast) { A, {3, 7, 3}, A_params, B, {1, 1, 3}, B_params, C_params); - - // NNAPI will require all the scales and zero points be initializers - // We also want to test the case input B is an initializer - RunQLinearMathTestFromFloat("QLinearAdd", add_function, - A, {3, 7, 3}, A_params, - B, {1, 1, 3}, B_params, - C_params, - false /* input_b_is_initializer */, - true /* all_initializer_scale_zero_point */); - - RunQLinearMathTestFromFloat("QLinearAdd", add_function, - A, {3, 7, 3}, A_params, - B, {1, 1, 3}, B_params, - C_params, - true /* input_b_is_initializer */, - true /* all_initializer_scale_zero_point */); } TEST(QLinearBinaryOpTest, AddS8VectorVectorFull) { diff --git a/onnxruntime/test/contrib_ops/quant_gemm_test.cc b/onnxruntime/test/contrib_ops/quant_gemm_test.cc index 897320b30eef7..3afcd6651aadd 100644 --- a/onnxruntime/test/contrib_ops/quant_gemm_test.cc +++ b/onnxruntime/test/contrib_ops/quant_gemm_test.cc @@ -15,11 +15,12 @@ #include #include +#include namespace onnxruntime { namespace test { -template +template void RunQuantGemmU8X8Test(const int M, const int N, const int K, @@ -29,36 +30,42 @@ void RunQuantGemmU8X8Test(const int M, bool B_is_initializer, bool per_column = false) { static std::default_random_engine e(123); - static std::uniform_int_distribution n_unsigned(0, 127); - static std::uniform_int_distribution n_xint8(std::numeric_limits::min(), std::numeric_limits::max()); + static std::uniform_int_distribution random_A(std::numeric_limits::min(), + std::numeric_limits::max()); + + constexpr int overflow_adjust = std::is_signed_v ? 2 : 1; + constexpr int random_B_min = std::numeric_limits::min() / overflow_adjust; + constexpr int random_B_max = std::numeric_limits::max() / overflow_adjust; + static std::uniform_int_distribution random_B(random_B_min, + random_B_max); static std::uniform_real_distribution n_apha(1.0f, 2.0f); static std::uniform_real_distribution n_scale(0.003f, 0.004f); Eigen::MatrixXi matrix_a = Eigen::MatrixXi::Random(K, M) - .unaryExpr([](int) { return n_unsigned(e); }); - std::vector matrix_a_data; + .unaryExpr([](int) { return random_A(e); }); + std::vector matrix_a_data; if (is_A_trans) { Eigen::MatrixXi matrix_a_trans = matrix_a.transpose().eval(); - matrix_a_data = ToVector(matrix_a_trans.data(), M * K); + matrix_a_data = ToVector(matrix_a_trans.data(), M * K); } else { - matrix_a_data = ToVector(matrix_a.data(), M * K); + matrix_a_data = ToVector(matrix_a.data(), M * K); } - uint8_t a_zero_point = GetMiddle(matrix_a_data); + ActType a_zero_point = GetMiddle(matrix_a_data); Eigen::MatrixXi matrix_a_offset = matrix_a - a_zero_point * Eigen::MatrixXi::Ones(K, M); float a_scale = n_scale(e); Eigen::MatrixXi matrix_b = Eigen::MatrixXi::Random(N, K) - .unaryExpr([](int) { return n_xint8(e); }); - std::vector matrix_b_data; + .unaryExpr([](int) { return random_B(e); }); + std::vector matrix_b_data; if (is_B_trans) { Eigen::MatrixXi matrix_b_trans = matrix_b.transpose().eval(); - matrix_b_data = ToVector(matrix_b_trans.data(), N * K); + matrix_b_data = ToVector(matrix_b_trans.data(), N * K); } else { - matrix_b_data = ToVector(matrix_b.data(), N * K); + matrix_b_data = ToVector(matrix_b.data(), N * K); } - ScalarB b_zero_point = GetMiddle(matrix_b_data); + WeightType b_zero_point = GetMiddle(matrix_b_data); std::vector b_scale({n_scale(e)}); - std::vector b_zp_per_column({b_zero_point}); + std::vector b_zp_per_column({b_zero_point}); Eigen::MatrixXi b_zp_matrix = b_zero_point * Eigen::MatrixXi::Ones(N, K); Eigen::MatrixXf b_scale_matrix = b_scale[0] * Eigen::MatrixXf::Ones(N, M); if (per_column) { @@ -74,7 +81,7 @@ void RunQuantGemmU8X8Test(const int M, float alpha = n_apha(e); Eigen::MatrixXi matrix_c = Eigen::MatrixXi::Random(N, M) - .unaryExpr([](int) { return n_xint8(e); }); + .unaryExpr([](int) { return random_A(e); }); Eigen::MatrixXi matrix_int32 = (matrix_b - b_zp_matrix) * matrix_a_offset; if (has_C) { @@ -86,12 +93,12 @@ void RunQuantGemmU8X8Test(const int M, test.AddAttribute("transA", is_A_trans ? 1 : 0); test.AddAttribute("transB", is_B_trans ? 1 : 0); test.AddAttribute("alpha", alpha); - test.AddInput("A", is_A_trans ? std::vector({K, M}) : std::vector({M, K}), std::move(matrix_a_data)); + test.AddInput("A", is_A_trans ? std::vector({K, M}) : std::vector({M, K}), std::move(matrix_a_data)); test.AddInput("a_scale", {}, {a_scale}); - test.AddInput("a_zero_point", {}, {a_zero_point}); - test.AddInput("B", is_B_trans ? std::vector({N, K}) : std::vector({K, N}), std::move(matrix_b_data), B_is_initializer); + test.AddInput("a_zero_point", {}, {a_zero_point}); + test.AddInput("B", is_B_trans ? std::vector({N, K}) : std::vector({K, N}), std::move(matrix_b_data), B_is_initializer); test.AddInput("b_scale", {SafeInt(b_scale.size())}, b_scale); - test.AddInput("b_zero_point", {SafeInt(b_zp_per_column.size())}, b_zp_per_column); + test.AddInput("b_zero_point", {SafeInt(b_zp_per_column.size())}, b_zp_per_column); if (has_C) { test.AddInput("C", {M, N}, ToVector(matrix_c.data(), M * N)); @@ -101,14 +108,14 @@ void RunQuantGemmU8X8Test(const int M, if constexpr (std::is_same_v) { test.AddOptionalInputEdge(); - test.AddOptionalInputEdge(); + test.AddOptionalInputEdge(); test.AddOutput("Y", {M, N}, std::vector(matrix_output.data(), matrix_output.data() + M * N)); } else { - std::vector quant_output(M * N); - quantization::Params quant_param = quantization::QuantizeLinear(matrix_output.data(), quant_output.data(), M * N); + std::vector quant_output(M * N); + quantization::Params quant_param = quantization::QuantizeLinear(matrix_output.data(), quant_output.data(), M * N); test.AddInput("y_scale", {}, {quant_param.scale}); - test.AddInput("y_zero_point", {}, {quant_param.zero_point}); - test.AddOutput("Y", {M, N}, quant_output); + test.AddInput("y_zero_point", {}, {quant_param.zero_point}); + test.AddOutput("Y", {M, N}, quant_output); } test.Run(); @@ -122,10 +129,13 @@ void RunQuantGemmTest(const int M, bool has_C, bool B_is_initializer, bool per_column = false) { - RunQuantGemmU8X8Test(M, N, K, is_A_trans, is_B_trans, has_C, B_is_initializer, per_column); - RunQuantGemmU8X8Test(M, N, K, is_A_trans, is_B_trans, has_C, B_is_initializer, per_column); - RunQuantGemmU8X8Test(M, N, K, is_A_trans, is_B_trans, has_C, B_is_initializer, per_column); - RunQuantGemmU8X8Test(M, N, K, is_A_trans, is_B_trans, has_C, B_is_initializer, per_column); + RunQuantGemmU8X8Test(M, N, K, is_A_trans, is_B_trans, has_C, B_is_initializer, per_column); + RunQuantGemmU8X8Test(M, N, K, is_A_trans, is_B_trans, has_C, B_is_initializer, per_column); + RunQuantGemmU8X8Test(M, N, K, is_A_trans, is_B_trans, has_C, B_is_initializer, per_column); + RunQuantGemmU8X8Test(M, N, K, is_A_trans, is_B_trans, has_C, B_is_initializer, per_column); + + RunQuantGemmU8X8Test(M, N, K, is_A_trans, is_B_trans, has_C, B_is_initializer, per_column); + RunQuantGemmU8X8Test(M, N, K, is_A_trans, is_B_trans, has_C, B_is_initializer, per_column); } void RunQuantGemmTestBatch(const int M, const int N, const int K) { diff --git a/onnxruntime/test/contrib_ops/skiplayernorm_op_test.cc b/onnxruntime/test/contrib_ops/skiplayernorm_op_test.cc index 5a9a4dce6e806..009201f13072c 100644 --- a/onnxruntime/test/contrib_ops/skiplayernorm_op_test.cc +++ b/onnxruntime/test/contrib_ops/skiplayernorm_op_test.cc @@ -83,6 +83,35 @@ static void RunTest( } } +TEST(SkipLayerNormTest, SkipLayerNormNullInput) { + int batch_size = 1; + int sequence_length = 0; + int hidden_size = 4; + + std::vector input_data = {}; + + std::vector skip_data = {}; + + std::vector gamma_data = { + 0.3f, 0.2f, 4.0f, 2.2f}; + + std::vector beta_data = { + 0.2f, 0.1f, 0.4f, 1.6f}; + + std::vector output_data = {}; + + RunTest(input_data, + skip_data, + gamma_data, + beta_data, + std::vector(), + output_data, + epsilon_, + batch_size, + sequence_length, + hidden_size); +} + TEST(SkipLayerNormTest, SkipLayerNormBatch1) { int batch_size = 1; int sequence_length = 2; diff --git a/onnxruntime/test/optimizer/graph_transform_test_builder.cc b/onnxruntime/test/optimizer/graph_transform_test_builder.cc index 573166895736d..6312547d8a6fe 100644 --- a/onnxruntime/test/optimizer/graph_transform_test_builder.cc +++ b/onnxruntime/test/optimizer/graph_transform_test_builder.cc @@ -1,6 +1,8 @@ // Copyright (c) Microsoft Corporation. All rights reserved. // Licensed under the MIT License. +#include "test/optimizer/graph_transform_test_builder.h" + #include #include #include @@ -9,10 +11,9 @@ #include "core/session/inference_session.h" #include "test/compare_ortvalue.h" #include "test/test_environment.h" +#include "test/util/include/asserts.h" #include "test/util/include/inference_session_wrapper.h" -#include "graph_transform_test_builder.h" - namespace onnxruntime { namespace test { @@ -34,39 +35,33 @@ void TransformerTester(const std::function& buil ModelTestBuilder helper(graph); build_test_case(helper); helper.SetGraphOutputs(); - ASSERT_TRUE(model.MainGraph().Resolve().IsOK()); + ASSERT_STATUS_OK(model.MainGraph().Resolve()); // Serialize the model to a string. std::string model_data; model.ToProto().SerializeToString(&model_data); - auto run_model = [&](TransformerLevel level, std::vector& fetches, std::unique_ptr transformer = nullptr) { + auto run_model = [&](TransformerLevel level, std::vector& fetches, + std::unique_ptr transformer = nullptr) { SessionOptions session_options; session_options.graph_optimization_level = transformer ? baseline_level : level; #if 0 // enable to dump model for debugging - session_options.optimized_model_filepath = L"model" + std::to_wstring(static_cast(level)) + L".onnx"; + session_options.optimized_model_filepath = + ToPathString("model" + std::to_string(static_cast(level)) + ".onnx"); #endif InferenceSessionWrapper session{session_options, GetEnvironment()}; - ASSERT_TRUE(session.Load(model_data.data(), static_cast(model_data.size())).IsOK()); + ASSERT_STATUS_OK(session.Load(model_data.data(), static_cast(model_data.size()))); if (transformer) { - ASSERT_TRUE(session.RegisterGraphTransformer(std::move(transformer), level).IsOK()); + ASSERT_STATUS_OK(session.RegisterGraphTransformer(std::move(transformer), level)); } - auto status = session.Initialize(); - if (!status.IsOK()) { - std::cout << "Model initialized failed with status message: " << status.ErrorMessage() << std::endl; - } - ASSERT_TRUE(status.IsOK()); + ASSERT_STATUS_OK(session.Initialize()); RunOptions run_options; - status = session.Run(run_options, - helper.feeds_, - helper.output_names_, - &fetches); - if (!status.IsOK()) { - std::cout << "Run failed with status message: " << status.ErrorMessage() << std::endl; - } - ASSERT_TRUE(status.IsOK()); + ASSERT_STATUS_OK(session.Run(run_options, + helper.feeds_, + helper.output_names_, + &fetches)); if (level == target_level) { check_transformed_graph(session); @@ -74,13 +69,13 @@ void TransformerTester(const std::function& buil }; std::vector baseline_fetches; - run_model(baseline_level, baseline_fetches); + ASSERT_NO_FATAL_FAILURE(run_model(baseline_level, baseline_fetches)); std::vector target_fetches; - run_model(target_level, target_fetches, std::move(transformer)); + ASSERT_NO_FATAL_FAILURE(run_model(target_level, target_fetches, std::move(transformer))); size_t num_outputs = baseline_fetches.size(); - ASSERT_TRUE(num_outputs == target_fetches.size()); + ASSERT_EQ(num_outputs, target_fetches.size()); for (size_t i = 0; i < num_outputs; i++) { std::pair ret = diff --git a/onnxruntime/test/optimizer/graph_transform_test_builder.h b/onnxruntime/test/optimizer/graph_transform_test_builder.h index 5050f344e938d..13936b7f2ee5b 100644 --- a/onnxruntime/test/optimizer/graph_transform_test_builder.h +++ b/onnxruntime/test/optimizer/graph_transform_test_builder.h @@ -172,11 +172,9 @@ class ModelTestBuilder { return AddNode("DequantizeLinear", input_args, {output_arg}); } - template - typename std::enable_if::value, Node&>::type - AddDequantizeLinearNode(NodeArg* input_arg, - float input_scale, - NodeArg* output_arg) { + Node& AddDequantizeLinearNode(NodeArg* input_arg, + float input_scale, + NodeArg* output_arg) { std::vector input_args; input_args.push_back(input_arg); input_args.push_back(MakeScalarInitializer(input_scale)); diff --git a/onnxruntime/test/optimizer/qdq_test_utils.cc b/onnxruntime/test/optimizer/qdq_test_utils.cc new file mode 100644 index 0000000000000..08c1c751991b4 --- /dev/null +++ b/onnxruntime/test/optimizer/qdq_test_utils.cc @@ -0,0 +1,40 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +#include "qdq_test_utils.h" + +namespace onnxruntime { +namespace test { + +GetQDQTestCaseFn BuildQDQResizeTestCase( + const std::vector& input_shape, + const std::vector& sizes_data, + const std::string& mode, + const std::string& coordinate_transformation_mode) { + return [input_shape, sizes_data, mode, coordinate_transformation_mode](ModelTestBuilder& builder) { + auto* input1_arg = builder.MakeInput(input_shape, + std::numeric_limits::min(), + std::numeric_limits::max()); + auto* roi = builder.MakeInitializer({0}, {}); + auto* scales = builder.MakeInitializer({0}, {}); + auto* sizes = builder.Make1DInitializer(sizes_data); + auto* output_arg = builder.MakeOutput(); + + // add DQ + auto* dq_output = builder.MakeIntermediate(); + builder.AddDequantizeLinearNode(input1_arg, .003f, 1, dq_output); + + // add Resize + auto* resize_output = builder.MakeIntermediate(); + Node& resize_node = builder.AddNode("Resize", {dq_output, roi, scales, sizes}, {resize_output}); + + resize_node.AddAttribute("mode", mode); + resize_node.AddAttribute("coordinate_transformation_mode", coordinate_transformation_mode); + + // add Q + builder.AddQuantizeLinearNode(resize_output, .003f, 1, output_arg); + }; +} + +} // namespace test +} // namespace onnxruntime \ No newline at end of file diff --git a/onnxruntime/test/optimizer/qdq_test_utils.h b/onnxruntime/test/optimizer/qdq_test_utils.h index 2327ba74850b7..1a0c0f044cb34 100644 --- a/onnxruntime/test/optimizer/qdq_test_utils.h +++ b/onnxruntime/test/optimizer/qdq_test_utils.h @@ -1,6 +1,8 @@ // Copyright (c) Microsoft Corporation. All rights reserved. // Licensed under the MIT License. +#pragma once + #include "graph_transform_test_builder.h" #include "core/optimizer/qdq_transformer/selectors_actions/qdq_selector_action_transformer.h" @@ -12,7 +14,7 @@ namespace onnxruntime { namespace test { -using GetQDQConvTestCaseFn = std::function; +using GetQDQTestCaseFn = std::function; template typename std::enable_if::value, NodeArg*>::type @@ -24,10 +26,8 @@ AddQDQNodePair(ModelTestBuilder& builder, NodeArg* q_input, float scale, T zp = return dq_output; } -// TODO: for now it just builds a conv qdq graph. -// can be modified and made it shared among different qdq test graphs associated with other operators template -GetQDQConvTestCaseFn BuildQDQConvTestCase(const std::vector& input_shape, const std::vector& weights_shape) { +GetQDQTestCaseFn BuildQDQConvTestCase(const std::vector& input_shape, const std::vector& weights_shape) { return [input_shape, weights_shape](ModelTestBuilder& builder) { auto* input_arg = builder.MakeInput(input_shape, -1.f, 1.f); auto* output_arg = builder.MakeOutput(); @@ -78,5 +78,139 @@ GetQDQConvTestCaseFn BuildQDQConvTestCase(const std::vector& input_shap }; } +template +GetQDQTestCaseFn BuildQDQAveragePoolTestCase(const std::vector& input_shape) { + return [input_shape](ModelTestBuilder& builder) { + +#ifdef USE_NNAPI // NNAPI require consistent scales/ZPs for DQ -> Pool -> Q + float dq_scale = 0.0038f; + float pool_output_scale = 0.0038f; + float q_scale = 0.0038f; + InputType dq_zp = std::numeric_limits::max() / 2; + InputType pool_output_zp = std::numeric_limits::max() / 2; + InputType q_zp = std::numeric_limits::max() / 2; +#else + float dq_scale = 0.0035f; + float pool_output_scale = 0.0038f; + float q_scale = 0.0039f; + InputType dq_zp = 7; + InputType pool_output_zp = std::numeric_limits::max() / 2; + InputType q_zp = std::numeric_limits::max() / 2; +#endif + + auto* input_arg = builder.MakeInput(input_shape, -1.f, 1.f); + auto* output_arg = builder.MakeOutput(); + // add QDQ + AveragePool + auto* dq_output = AddQDQNodePair(builder, input_arg, dq_scale, dq_zp); + auto* averagepool_output = builder.MakeIntermediate(); + Node& pool_node = builder.AddNode("AveragePool", {dq_output}, {averagepool_output}); + std::vector pads((input_shape.size() - 2) * 2, 1); + pool_node.AddAttribute("pads", pads); + std::vector kernel_shape(input_shape.size() - 2, 3); + pool_node.AddAttribute("kernel_shape", kernel_shape); + + // add QDQ output + auto* q_output = builder.MakeIntermediate(); + builder.AddQuantizeLinearNode(averagepool_output, + pool_output_scale, + pool_output_zp, + q_output); + builder.AddDequantizeLinearNode(q_output, + q_scale, + q_zp, + output_arg); + }; +} + +GetQDQTestCaseFn BuildQDQResizeTestCase(const std::vector& input_shape, + const std::vector& sizes_data, + const std::string& mode = "nearest", + const std::string& coordinate_transformation_mode = "half_pixel"); + +template +GetQDQTestCaseFn BuildBinaryOpTestCase(const std::vector& input_shape, + const std::string& op_type) { + return [input_shape, op_type](ModelTestBuilder& builder) { + auto* input1_arg = builder.MakeInput(input_shape, -1.f, 1.f); + auto* input2_arg = builder.MakeInput(input_shape, -1.f, 1.f); + auto* output_arg = builder.MakeOutput(); + +#ifdef USE_NNAPI // NNAPI require consistent scales for DQ -> bin_op_input and bin_op_output-> Q + float q_scale = 0.008f; + float op_input_scale = 0.008f; + float op_output_scale = 0.0076f; + float dq_scale = 0.0076f; +#else + float q_scale = 0.008f; + float op_input_scale = 0.0079f; + float op_output_scale = 0.0076f; + float dq_scale = 0.0078f; +#endif + + // add QDQ 1 + auto* q1_output = builder.MakeIntermediate(); + auto* dq1_output = builder.MakeIntermediate(); + builder.AddQuantizeLinearNode(input1_arg, + q_scale, + std::numeric_limits::max() / 2, + q1_output); + builder.AddDequantizeLinearNode(q1_output, + op_input_scale, + std::numeric_limits::max() / 2, + dq1_output); + + // add QDQ 2 + auto* q2_output = builder.MakeIntermediate(); + auto* dq2_output = builder.MakeIntermediate(); + builder.AddQuantizeLinearNode(input2_arg, + q_scale, + std::numeric_limits::max() / 2, + q2_output); + builder.AddDequantizeLinearNode(q2_output, + op_input_scale, + std::numeric_limits::max() / 2, + dq2_output); + + // add binary operator + auto* binary_op_output = builder.MakeIntermediate(); + builder.AddNode(op_type, {dq1_output, dq2_output}, {binary_op_output}); + + // add QDQ output + auto* q3_output = builder.MakeIntermediate(); + builder.AddQuantizeLinearNode(binary_op_output, + op_output_scale, + std::numeric_limits::max() / 2, + q3_output); + builder.AddDequantizeLinearNode(q3_output, + dq_scale, + std::numeric_limits::max() / 2, + output_arg); + }; +} + +template +GetQDQTestCaseFn BuildQDQTransposeTestCase( + const std::vector& input_shape, + const std::vector& perms) { + return [input_shape, perms](ModelTestBuilder& builder) { + auto* input_arg = builder.MakeInput(input_shape, -128, 127); + auto* output_arg = builder.MakeOutput(); + + InputType dq_zp = std::numeric_limits::max() / 2; + OutputType q_zp = std::numeric_limits::max() / 2; + + // add DQ + auto* dq_output = builder.MakeIntermediate(); + builder.AddDequantizeLinearNode(input_arg, .003f, dq_zp, dq_output); + + // add Transpose + auto* transpose_output = builder.MakeIntermediate(); + Node& transpose_node = builder.AddNode("Transpose", {dq_output}, {transpose_output}); + transpose_node.AddAttribute("perm", perms); + + // add Q + builder.AddQuantizeLinearNode(transpose_output, .003f, q_zp, output_arg); + }; +} } // namespace test } // namespace onnxruntime \ No newline at end of file diff --git a/onnxruntime/test/optimizer/qdq_transformer_test.cc b/onnxruntime/test/optimizer/qdq_transformer_test.cc index 10d3bec7df1dc..ca59772275897 100644 --- a/onnxruntime/test/optimizer/qdq_transformer_test.cc +++ b/onnxruntime/test/optimizer/qdq_transformer_test.cc @@ -34,12 +34,22 @@ namespace onnxruntime { namespace test { -#ifndef DISABLE_CONTRIB_OPS +static std::vector GetNodeOpTypesInTopologicalOrder(const Graph& graph) { + std::vector op_types{}; + GraphViewer graph_viewer{graph}; + const auto& ordering = graph_viewer.GetNodesInTopologicalOrder(); + for (const auto node_idx : ordering) { + op_types.push_back(graph.GetNode(node_idx)->OpType()); + } + return op_types; +} + +#if !defined(DISABLE_CONTRIB_OPS) template void QDQTransformerConvTests() { auto test_case = [&](const std::vector& input_shape, const std::vector& weights_shape) { - auto check_conv_graph = [&](InferenceSessionWrapper& session) { + auto check_graph = [&](InferenceSessionWrapper& session) { auto op_to_count = CountOpsInGraph(session.GetGraph()); if constexpr (std::is_same::value && std::is_same::value && @@ -57,7 +67,7 @@ void QDQTransformerConvTests() { }; TransformerTester(BuildQDQConvTestCase(input_shape, weights_shape), - check_conv_graph, + check_graph, TransformerLevel::Level1, TransformerLevel::Level2, 12 /*opset_version*/, @@ -136,7 +146,7 @@ TEST(QDQTransformerTests, ConvMaxPoolReshape_UInt8) { builder.AddQuantizeLinearNode(reshape_output, .0039f, 135, output_arg); }; - auto check_mp_reshape_graph = [&](InferenceSessionWrapper& session) { + auto check_graph = [&](InferenceSessionWrapper& session) { auto op_to_count = CountOpsInGraph(session.GetGraph()); EXPECT_EQ(op_to_count["QLinearConv"], 1); EXPECT_EQ(op_to_count["MaxPool"], 1); @@ -146,7 +156,7 @@ TEST(QDQTransformerTests, ConvMaxPoolReshape_UInt8) { }; TransformerTester(build_test_case, - check_mp_reshape_graph, + check_graph, TransformerLevel::Level1, TransformerLevel::Level2, opset_version); @@ -197,16 +207,17 @@ TEST(QDQTransformerTests, ConvMaxPoolReshape_Int8) { } }; - auto check_mp_reshape_graph = [&](InferenceSessionWrapper& session) { - auto op_to_count = CountOpsInGraph(session.GetGraph()); - EXPECT_EQ(op_to_count["QLinearConv"], 1); - EXPECT_EQ(op_to_count["MaxPool"], 1); - EXPECT_EQ(op_to_count["Reshape"], 1); - EXPECT_EQ(op_to_count["QuantizeLinear"], 1); - EXPECT_EQ(op_to_count["DequantizeLinear"], 0); + auto check_graph = [&](InferenceSessionWrapper& session) { + const std::vector expected_op_types_in_order{ + "QuantizeLinear", + "QLinearConv", + "MaxPool", + "Reshape"}; + const auto op_types_in_order = GetNodeOpTypesInTopologicalOrder(session.GetGraph()); + EXPECT_EQ(op_types_in_order, expected_op_types_in_order); }; - TransformerTester(build_test_case, check_mp_reshape_graph, TransformerLevel::Level1, TransformerLevel::Level2); + TransformerTester(build_test_case, check_graph, TransformerLevel::Level1, TransformerLevel::Level2); }; test_case({1, 12, 37}, {32, 12, 5}); @@ -217,31 +228,7 @@ TEST(QDQTransformerTests, ConvMaxPoolReshape_Int8) { template void QDQTransformerAveragePoolTests() { auto test_case = [&](const std::vector& input_shape) { - auto build_test_case = [&](ModelTestBuilder& builder) { - auto* input_arg = builder.MakeInput(input_shape, -1.f, 1.f); - auto* output_arg = builder.MakeOutput(); - // add QDQ + AveragePool - auto* dq_output = AddQDQNodePair(builder, input_arg, .0035f, 7); - auto* averagepool_output = builder.MakeIntermediate(); - Node& pool_node = builder.AddNode("AveragePool", {dq_output}, {averagepool_output}); - std::vector pads((input_shape.size() - 2) * 2, 1); - pool_node.AddAttribute("pads", pads); - std::vector kernel_shape(input_shape.size() - 2, 3); - pool_node.AddAttribute("kernel_shape", kernel_shape); - - // add QDQ output - auto* q_output = builder.MakeIntermediate(); - builder.AddQuantizeLinearNode(averagepool_output, - .0038f, - std::numeric_limits::max() / 2, - q_output); - builder.AddDequantizeLinearNode(q_output, - .0039f, - std::numeric_limits::max() / 2, - output_arg); - }; - - auto check_binary_op_graph = [&](InferenceSessionWrapper& session) { + auto check_graph = [&](InferenceSessionWrapper& session) { auto op_to_count = CountOpsInGraph(session.GetGraph()); if constexpr (std::is_same::value) { EXPECT_EQ(op_to_count["com.microsoft.QLinearAveragePool"], 1); @@ -256,8 +243,8 @@ void QDQTransformerAveragePoolTests() { } }; - TransformerTester(build_test_case, - check_binary_op_graph, + TransformerTester(BuildQDQAveragePoolTestCase(input_shape), + check_graph, TransformerLevel::Level1, TransformerLevel::Level2, 12 /*opset_version*/, @@ -290,52 +277,7 @@ TEST(QDQTransformerTests, AveragePool_U8S8) { template void QDQTransformerBinaryOpTests(const std::string& op_type) { auto test_case = [&](const std::vector& input_shape) { - auto build_test_case = [&](ModelTestBuilder& builder) { - auto* input1_arg = builder.MakeInput(input_shape, -1.f, 1.f); - auto* input2_arg = builder.MakeInput(input_shape, -1.f, 1.f); - auto* output_arg = builder.MakeOutput(); - - // add QDQ 1 - auto* q1_output = builder.MakeIntermediate(); - auto* dq1_output = builder.MakeIntermediate(); - builder.AddQuantizeLinearNode(input1_arg, - .004f, - std::numeric_limits::max() / 2, - q1_output); - builder.AddDequantizeLinearNode(q1_output, - .0039f, - std::numeric_limits::max() / 2, - dq1_output); - - // add QDQ 2 - auto* q2_output = builder.MakeIntermediate(); - auto* dq2_output = builder.MakeIntermediate(); - builder.AddQuantizeLinearNode(input2_arg, - .004f, - std::numeric_limits::max() / 2, - q2_output); - builder.AddDequantizeLinearNode(q2_output, - .0039f, - std::numeric_limits::max() / 2, - dq2_output); - - // add binary operator - auto* binary_op_output = builder.MakeIntermediate(); - builder.AddNode(op_type, {dq1_output, dq2_output}, {binary_op_output}); - - // add QDQ output - auto* q3_output = builder.MakeIntermediate(); - builder.AddQuantizeLinearNode(binary_op_output, - .0038f, - std::numeric_limits::max() / 2, - q3_output); - builder.AddDequantizeLinearNode(q3_output, - .0039f, - std::numeric_limits::max() / 2, - output_arg); - }; - - auto check_binary_op_graph = [&](InferenceSessionWrapper& session) { + auto check_graph = [&](InferenceSessionWrapper& session) { auto op_to_count = CountOpsInGraph(session.GetGraph()); if (std::is_same::value && std::is_same::value) { @@ -351,8 +293,8 @@ void QDQTransformerBinaryOpTests(const std::string& op_type) { } }; - TransformerTester(build_test_case, - check_binary_op_graph, + TransformerTester(BuildBinaryOpTestCase(input_shape, op_type), + check_graph, TransformerLevel::Level1, TransformerLevel::Level2, 12 /*opset_version*/, @@ -450,7 +392,7 @@ void QDQTransformerMatMulTests(bool has_output_q) { } }; - auto check_binary_op_graph = [&](InferenceSessionWrapper& session) { + auto check_graph = [&](InferenceSessionWrapper& session) { auto op_to_count = CountOpsInGraph(session.GetGraph()); if (has_output_q) { if constexpr (std::is_same::value && @@ -483,7 +425,7 @@ void QDQTransformerMatMulTests(bool has_output_q) { }; TransformerTester(build_test_case, - check_binary_op_graph, + check_graph, TransformerLevel::Level1, TransformerLevel::Level2, 12 /*opset_version*/, @@ -537,6 +479,172 @@ TEST(QDQTransformerTests, MatMul_S8S8U8) { QDQTransformerMatMulTests(true); } +template +void QDQTransformerGemmTests(bool has_output_q, bool has_bias, bool beta_not_one = false) { + auto test_case = [&](const std::vector& input1_shape, const std::vector& input2_shape) { + auto build_test_case = [&](ModelTestBuilder& builder) { + auto* input1_arg = builder.MakeInput(input1_shape, -1.f, 1.f); + auto* input2_arg = builder.MakeInput(input2_shape, -1.f, 1.f); + auto* output_arg = builder.MakeOutput(); + + typedef std::numeric_limits Input1Limits; + typedef std::numeric_limits Input2Limits; + typedef std::numeric_limits OutputTypeLimits; + + std::vector input_args; + + // add QDQ A + auto* q1_output = builder.MakeIntermediate(); + auto* dq1_output = builder.MakeIntermediate(); + builder.AddQuantizeLinearNode(input1_arg, + .039f, + (Input1Limits::max() + Input1Limits::min()) / 2 + 1, + q1_output); + builder.AddDequantizeLinearNode(q1_output, + .039f, + (Input2Limits::max() + Input1Limits::min()) / 2 + 1, + dq1_output); + + input_args.push_back(dq1_output); + + // add QDQ B + auto* q2_output = builder.MakeIntermediate(); + auto* dq2_output = builder.MakeIntermediate(); + builder.AddQuantizeLinearNode(input2_arg, + .04f, + (Input2Limits::max() + Input2Limits::min()) / 2 + 1, + q2_output); + builder.AddDequantizeLinearNode(q2_output, + .04f, + (Input2Limits::max() + Input2Limits::min()) / 2 + 1, + dq2_output); + input_args.push_back(dq2_output); + + if (has_bias) { + auto* dq_bias_output = builder.MakeIntermediate(); + auto* bias = builder.MakeInitializer({input2_shape[1]}, static_cast(0), static_cast(127)); + builder.AddDequantizeLinearNode(bias, 0.00156f, + 0, + dq_bias_output); + input_args.push_back(dq_bias_output); + } + + Node* gemm_node = nullptr; + + if (has_output_q) { + auto* gemm_op_output = builder.MakeIntermediate(); + gemm_node = &builder.AddNode("Gemm", input_args, {gemm_op_output}); + + // add QDQ output + auto* q3_output = builder.MakeIntermediate(); + builder.AddQuantizeLinearNode(gemm_op_output, + .039f, + (OutputTypeLimits::max() + OutputTypeLimits::min()) / 2 + 1, + q3_output); + builder.AddDequantizeLinearNode(q3_output, + .039f, + (OutputTypeLimits::max() + OutputTypeLimits::min()) / 2 + 1, + output_arg); + } else { + gemm_node = &builder.AddNode("Gemm", input_args, {output_arg}); + } + + if (beta_not_one) { + gemm_node->AddAttribute("beta", 2.0f); + } + }; + + auto check_binary_op_graph = [&](InferenceSessionWrapper& session) { + auto op_to_count = CountOpsInGraph(session.GetGraph()); + if ((!has_output_q || std::is_same_v)&&(!has_bias || (std::is_same_v && !beta_not_one)) && + (std::is_same_v || std::is_same_v)) { + EXPECT_EQ(op_to_count["com.microsoft.QGemm"], 1); + EXPECT_EQ(op_to_count["Gemm"], 0); + EXPECT_EQ(op_to_count["QuantizeLinear"], 2); + EXPECT_EQ(op_to_count["DequantizeLinear"], has_output_q ? 1 : 0); + } else { + int q_count = 2; // Q for A and B + int dq_count = 2; // DQ for A and B + if (has_bias) { + dq_count++; + } + if (has_output_q) { + q_count++; + dq_count++; + } + EXPECT_EQ(op_to_count["com.microsoft.QGemm"], 0); + EXPECT_EQ(op_to_count["Gemm"], 1); + EXPECT_EQ(op_to_count["QuantizeLinear"], q_count); + EXPECT_EQ(op_to_count["DequantizeLinear"], dq_count); + } + }; + + TransformerTester(build_test_case, + check_binary_op_graph, + TransformerLevel::Level1, + TransformerLevel::Level2, + 12 /*opset_version*/, + 0.01 /*per_sample_tolerance*/, + 0.01 /*relative_per_sample_tolerance*/, + std::make_unique()); + }; + + test_case({2, 2}, {2, 4}); + test_case({13, 15}, {15, 15}); +} + +template +void QDQTransformerGemmTests() { + QDQTransformerGemmTests(false, false); + QDQTransformerGemmTests(false, true); + QDQTransformerGemmTests(true, false); + QDQTransformerGemmTests(true, true); + QDQTransformerGemmTests(false, false, true); + QDQTransformerGemmTests(false, true, true); + QDQTransformerGemmTests(true, false, true); + QDQTransformerGemmTests(true, true, true); +} + +TEST(QDQTransformerTests, Gemm_U8U8U8) { + QDQTransformerGemmTests(); + QDQTransformerGemmTests(); +} + +TEST(QDQTransformerTests, Gemm_U8S8S8) { + QDQTransformerGemmTests(); + QDQTransformerGemmTests(); +} + +TEST(QDQTransformerTests, Gemm_U8U8S8) { + QDQTransformerGemmTests(); + QDQTransformerGemmTests(); +} + +TEST(QDQTransformerTests, Gemm_U8S8U8) { + QDQTransformerGemmTests(); + QDQTransformerGemmTests(); +} + +TEST(QDQTransformerTests, Gemm_S8S8S8) { + QDQTransformerGemmTests(); + QDQTransformerGemmTests(); +} + +TEST(QDQTransformerTests, Gemm_S8U8U8) { + QDQTransformerGemmTests(); + QDQTransformerGemmTests(); +} + +TEST(QDQTransformerTests, Gemm_S8U8S8) { + QDQTransformerGemmTests(); + QDQTransformerGemmTests(); +} + +TEST(QDQTransformerTests, Gemm_S8S8U8) { + QDQTransformerGemmTests(); + QDQTransformerGemmTests(); +} + TEST(QDQTransformerTests, Gather) { auto test_case = [&](const std::vector& input1_shape, const std::vector& weights_shape) { auto build_test_case = [&](ModelTestBuilder& builder) { @@ -554,46 +662,32 @@ TEST(QDQTransformerTests, Gather) { builder.AddQuantizeLinearNode(gather_output, .003f, 1, output_arg); }; - auto check_matmul_graph = [&](InferenceSessionWrapper& session) { + auto check_graph = [&](InferenceSessionWrapper& session) { auto op_to_count = CountOpsInGraph(session.GetGraph()); EXPECT_EQ(op_to_count["Gather"], 1); EXPECT_EQ(op_to_count["QuantizeLinear"], 0); EXPECT_EQ(op_to_count["DequantizeLinear"], 0); }; - TransformerTester(build_test_case, check_matmul_graph, TransformerLevel::Level1, TransformerLevel::Level2); + TransformerTester(build_test_case, check_graph, TransformerLevel::Level1, TransformerLevel::Level2); }; test_case({12, 37}, {24, 12}); } TEST(QDQTransformerTests, Transpose) { - auto test_case = [&](const std::vector& input1_shape, const std::vector& perms) { - auto build_test_case = [&](ModelTestBuilder& builder) { - auto* input1_arg = builder.MakeInput(input1_shape, -128, 127); - auto* output_arg = builder.MakeOutput(); - - // add DQ - auto* dq_output = builder.MakeIntermediate(); - builder.AddDequantizeLinearNode(input1_arg, .003f, 1, dq_output); - - // add Transpose - auto* transpose_output = builder.MakeIntermediate(); - Node& transpose_node = builder.AddNode("Transpose", {dq_output}, {transpose_output}); - transpose_node.AddAttribute("perm", perms); - - // add Q - builder.AddQuantizeLinearNode(transpose_output, .003f, 1, output_arg); - }; - - auto check_matmul_graph = [&](InferenceSessionWrapper& session) { + auto test_case = [&](const std::vector& input_shape, const std::vector& perms) { + auto check_graph = [&](InferenceSessionWrapper& session) { auto op_to_count = CountOpsInGraph(session.GetGraph()); EXPECT_EQ(op_to_count["Transpose"], 1); EXPECT_EQ(op_to_count["QuantizeLinear"], 0); EXPECT_EQ(op_to_count["DequantizeLinear"], 0); }; - TransformerTester(build_test_case, check_matmul_graph, TransformerLevel::Level1, TransformerLevel::Level2); + TransformerTester(BuildQDQTransposeTestCase(input_shape, perms), + check_graph, + TransformerLevel::Level1, + TransformerLevel::Level2); }; test_case({2, 13, 12, 37}, {0, 3, 1, 2}); @@ -618,13 +712,13 @@ TEST(QDQTransformerTests, Transpose_No_Fusion) { builder.AddQuantizeLinearNode(transpose_output, .003f, 1, output_arg); }; - auto check_matmul_graph = [&](InferenceSessionWrapper& session) { + auto check_graph = [&](InferenceSessionWrapper& session) { auto op_to_count = CountOpsInGraph(session.GetGraph()); EXPECT_EQ(op_to_count["QuantizeLinear"], 1); EXPECT_EQ(op_to_count["DequantizeLinear"], 1); }; - TransformerTester(build_test_case, check_matmul_graph, TransformerLevel::Level1, TransformerLevel::Level2); + TransformerTester(build_test_case, check_graph, TransformerLevel::Level1, TransformerLevel::Level2); }; test_case({2, 13, 12, 37}, {0, 3, 1, 2}); @@ -633,40 +727,21 @@ TEST(QDQTransformerTests, Transpose_No_Fusion) { TEST(QDQTransformerTests, Resize) { auto test_case = [&](const std::vector& input1_shape, const std::vector& sizes_shape) { - auto build_test_case = [&](ModelTestBuilder& builder) { - auto* input1_arg = builder.MakeInput(input1_shape, - std::numeric_limits::min(), - std::numeric_limits::max()); - auto* roi = builder.MakeInitializer({0}, {}); - auto* scales = builder.MakeInitializer({0}, {}); - auto* sizes = builder.MakeInitializer(sizes_shape, 1, 16); - auto* output_arg = builder.MakeOutput(); - - // add DQ - auto* dq_output = builder.MakeIntermediate(); - builder.AddDequantizeLinearNode(input1_arg, .003f, 1, dq_output); - - // add Resize - auto* resize_output = builder.MakeIntermediate(); - builder.AddNode("Resize", {dq_output, roi, scales, sizes}, {resize_output}); - - // add Q - builder.AddQuantizeLinearNode(resize_output, .003f, 1, output_arg); - }; - - auto check_matmul_graph = [&](InferenceSessionWrapper& session) { + auto check_graph = [&](InferenceSessionWrapper& session) { auto op_to_count = CountOpsInGraph(session.GetGraph()); EXPECT_EQ(op_to_count["Resize"], 1); EXPECT_EQ(op_to_count["QuantizeLinear"], 0); EXPECT_EQ(op_to_count["DequantizeLinear"], 0); }; - TransformerTester(build_test_case, check_matmul_graph, + TransformerTester(BuildQDQResizeTestCase(input1_shape, sizes_shape), + check_graph, TransformerLevel::Level1, TransformerLevel::Level2); }; - test_case({2, 13, 12, 37}, {4}); + RandomValueGenerator rand_gen{optional{2345}}; + test_case({2, 13, 12, 37}, rand_gen.Uniform(std::vector{4}, 1, 16)); } TEST(QDQTransformerTests, Resize_No_Fusion) { @@ -705,7 +780,7 @@ TEST(QDQTransformerTests, Resize_No_Fusion) { builder.AddQuantizeLinearNode(resize_output, .003f, 1, output_arg); }; - auto check_qdq_graph = [&](InferenceSessionWrapper& session) { + auto check_graph = [&](InferenceSessionWrapper& session) { auto op_to_count = CountOpsInGraph(session.GetGraph()); EXPECT_EQ(op_to_count["Resize"], 1); EXPECT_EQ(op_to_count["Concat"], 1); @@ -713,7 +788,7 @@ TEST(QDQTransformerTests, Resize_No_Fusion) { EXPECT_EQ(op_to_count["DequantizeLinear"], 1); }; - TransformerTester(build_test_case, check_qdq_graph, + TransformerTester(build_test_case, check_graph, TransformerLevel::Level1, TransformerLevel::Level2); }; @@ -744,7 +819,7 @@ TEST(QDQTransformerTests, ResizeReshape) { builder.AddNode("Reshape", {qdq_resize_output, reshape_shape}, {output_arg}); }; - auto check_qdq_graph = [&](InferenceSessionWrapper& session) { + auto check_graph = [&](InferenceSessionWrapper& session) { auto op_to_count = CountOpsInGraph(session.GetGraph()); EXPECT_EQ(op_to_count["Resize"], 1); EXPECT_EQ(op_to_count["Reshape"], 1); @@ -752,7 +827,7 @@ TEST(QDQTransformerTests, ResizeReshape) { EXPECT_EQ(op_to_count["DequantizeLinear"], 1); }; - TransformerTester(build_test_case, check_qdq_graph, + TransformerTester(build_test_case, check_graph, TransformerLevel::Level1, TransformerLevel::Level2); }; @@ -782,13 +857,13 @@ TEST(QDQTransformerTests, ArgMax) { argmax_node.AddAttribute("select_last_index", static_cast(select_last_index)); }; - auto check_argmax_graph = [&](InferenceSessionWrapper& session) { + auto check_graph = [&](InferenceSessionWrapper& session) { auto op_to_count = CountOpsInGraph(session.GetGraph()); EXPECT_EQ(op_to_count["ArgMax"], 1); EXPECT_EQ(op_to_count["DequantizeLinear"], 0); }; - TransformerTester(build_test_case, check_argmax_graph, + TransformerTester(build_test_case, check_graph, TransformerLevel::Level1, TransformerLevel::Level2, /* opset_version */ 13); @@ -816,14 +891,14 @@ TEST(QDQTransformerTests, QLinearMatMul) { builder.AddQuantizeLinearNode(matmul_output, .0039f, 135, output_arg); }; - auto check_matmul_graph = [&](InferenceSessionWrapper& session) { + auto check_graph = [&](InferenceSessionWrapper& session) { auto op_to_count = CountOpsInGraph(session.GetGraph()); EXPECT_EQ(op_to_count["QLinearMatMul"], 1); EXPECT_EQ(op_to_count["QuantizeLinear"], 2); EXPECT_EQ(op_to_count["DequantizeLinear"], 0); }; - TransformerTester(build_test_case, check_matmul_graph, TransformerLevel::Level1, TransformerLevel::Level2); + TransformerTester(build_test_case, check_graph, TransformerLevel::Level1, TransformerLevel::Level2); }; test_case({12, 37}, {37, 12}); @@ -847,7 +922,7 @@ TEST(QDQTransformerTests, MatMul_No_Fusion) { builder.AddQuantizeLinearNode(matmul_output, .0039f, 135, output_arg); }; - auto check_matmul_graph = [&](InferenceSessionWrapper& session) { + auto check_graph = [&](InferenceSessionWrapper& session) { auto op_to_count = CountOpsInGraph(session.GetGraph()); EXPECT_EQ(op_to_count["MatMul"], 1); EXPECT_EQ(op_to_count["QLinearMatMul"], 0); @@ -855,7 +930,7 @@ TEST(QDQTransformerTests, MatMul_No_Fusion) { EXPECT_EQ(op_to_count["DequantizeLinear"], 1); }; - TransformerTester(build_test_case, check_matmul_graph, TransformerLevel::Level1, TransformerLevel::Level2); + TransformerTester(build_test_case, check_graph, TransformerLevel::Level1, TransformerLevel::Level2); }; test_case({12, 37}, {37, 12}); @@ -883,7 +958,7 @@ TEST(QDQTransformerTests, MatMul_1st_Input_Int8) { builder.AddQuantizeLinearNode(matmul_output, .0039f, 135, output_arg); }; - auto check_matmul_graph = [&](InferenceSessionWrapper& session) { + auto check_graph = [&](InferenceSessionWrapper& session) { auto op_to_count = CountOpsInGraph(session.GetGraph()); EXPECT_EQ(op_to_count["MatMul"], 1); EXPECT_EQ(op_to_count["QLinearMatMul"], 0); @@ -891,7 +966,7 @@ TEST(QDQTransformerTests, MatMul_1st_Input_Int8) { EXPECT_EQ(op_to_count["DequantizeLinear"], 2); }; - TransformerTester(build_test_case, check_matmul_graph, TransformerLevel::Level1, TransformerLevel::Level2); + TransformerTester(build_test_case, check_graph, TransformerLevel::Level1, TransformerLevel::Level2); }; test_case({12, 37}, {37, 12}); @@ -920,7 +995,7 @@ TEST(QDQTransformerTests, MatMulIntegerToFloat) { builder.AddNode("MatMul", {dq_output_1, dq_output_2}, {output_arg}); }; - auto check_matmul_graph = [&](InferenceSessionWrapper& session) { + auto check_graph = [&](InferenceSessionWrapper& session) { auto op_to_count = CountOpsInGraph(session.GetGraph()); EXPECT_EQ(op_to_count["com.microsoft.MatMulIntegerToFloat"], 1); EXPECT_EQ(op_to_count["QuantizeLinear"], 0); @@ -928,7 +1003,7 @@ TEST(QDQTransformerTests, MatMulIntegerToFloat) { }; TransformerTester(build_test_case, - check_matmul_graph, + check_graph, TransformerLevel::Level1, TransformerLevel::Level2, 12 /*opset_version*/, @@ -963,7 +1038,7 @@ TEST(QDQTransformerTests, ConvRelu) { builder.AddQuantizeLinearNode(relu_output, .0039f, is_zp_zero ? 0 : 1, output_arg); }; - auto check_mp_reshape_graph = [&](InferenceSessionWrapper& session) { + auto check_graph = [&](InferenceSessionWrapper& session) { auto op_to_count = CountOpsInGraph(session.GetGraph()); if (is_zp_zero) { EXPECT_EQ(op_to_count["QLinearConv"], 1); @@ -981,7 +1056,7 @@ TEST(QDQTransformerTests, ConvRelu) { } }; - TransformerTester(build_test_case, check_mp_reshape_graph, TransformerLevel::Level1, TransformerLevel::Level2); + TransformerTester(build_test_case, check_graph, TransformerLevel::Level1, TransformerLevel::Level2); }; test_case({1, 12, 37}, {32, 12, 5}, true); @@ -1027,7 +1102,7 @@ TEST(QDQTransformerTests, ConvAveragePoolReshape_UInt8) { builder.AddDequantizeLinearNode(q_output, .0035f, 135, output_arg); }; - auto check_mp_reshape_graph = [&](InferenceSessionWrapper& session) { + auto check_graph = [&](InferenceSessionWrapper& session) { auto op_to_count = CountOpsInGraph(session.GetGraph()); EXPECT_EQ(op_to_count["QLinearConv"], 1); EXPECT_EQ(op_to_count["com.microsoft.QLinearAveragePool"], 1); @@ -1037,7 +1112,7 @@ TEST(QDQTransformerTests, ConvAveragePoolReshape_UInt8) { }; TransformerTester(build_test_case, - check_mp_reshape_graph, + check_graph, TransformerLevel::Level1, TransformerLevel::Level2, 12 /*opset_version*/, @@ -1090,7 +1165,7 @@ TEST(QDQTransformerTests, ConvAveragePoolReshape_Int8) { } }; - auto check_mp_reshape_graph = [&](InferenceSessionWrapper& session) { + auto check_graph = [&](InferenceSessionWrapper& session) { auto op_to_count = CountOpsInGraph(session.GetGraph()); EXPECT_EQ(op_to_count["QLinearConv"], 1); EXPECT_EQ(op_to_count["com.microsoft.QLinearAveragePool"], 1); @@ -1100,7 +1175,7 @@ TEST(QDQTransformerTests, ConvAveragePoolReshape_Int8) { }; TransformerTester(build_test_case, - check_mp_reshape_graph, + check_graph, TransformerLevel::Level1, TransformerLevel::Level2, 12 /*opset_version*/, @@ -1154,7 +1229,7 @@ TEST(QDQTransformerTests, ConvAveragePoolReshape_Int8_Fail) { } }; - auto check_mp_reshape_graph = [&](InferenceSessionWrapper& session) { + auto check_graph = [&](InferenceSessionWrapper& session) { auto op_to_count = CountOpsInGraph(session.GetGraph()); EXPECT_EQ(op_to_count["Conv"], 1); EXPECT_EQ(op_to_count["QLinearConv"], 0); @@ -1165,7 +1240,7 @@ TEST(QDQTransformerTests, ConvAveragePoolReshape_Int8_Fail) { }; TransformerTester(build_test_case, - check_mp_reshape_graph, + check_graph, TransformerLevel::Level1, TransformerLevel::Level2, 12 /*opset_version*/, @@ -1202,7 +1277,7 @@ void QDQTransformerLeakyReluTests() { output_arg); }; - auto check_binary_op_graph = [&](InferenceSessionWrapper& session) { + auto check_graph = [&](InferenceSessionWrapper& session) { auto op_to_count = CountOpsInGraph(session.GetGraph()); if constexpr (std::is_same::value) { EXPECT_EQ(op_to_count["com.microsoft.QLinearLeakyRelu"], 1); @@ -1218,7 +1293,7 @@ void QDQTransformerLeakyReluTests() { }; TransformerTester(build_test_case, - check_binary_op_graph, + check_graph, TransformerLevel::Level1, TransformerLevel::Level2, 12 /*opset_version*/, @@ -1278,7 +1353,7 @@ TEST(QDQTransformerTests, ConvTranspose_QBackward) { } }; - auto check_mp_reshape_graph = [&](InferenceSessionWrapper& session) { + auto check_graph = [&](InferenceSessionWrapper& session) { auto op_to_count = CountOpsInGraph(session.GetGraph()); EXPECT_EQ(op_to_count["QLinearConv"], 1); EXPECT_EQ(op_to_count["Transpose"], 1); @@ -1287,7 +1362,7 @@ TEST(QDQTransformerTests, ConvTranspose_QBackward) { }; TransformerTester(build_test_case, - check_mp_reshape_graph, + check_graph, TransformerLevel::Level1, TransformerLevel::Level2); }; @@ -1338,7 +1413,7 @@ TEST(QDQTransformerTests, QBackward_MutilpleSteps) { } }; - auto check_mp_reshape_graph = [&](InferenceSessionWrapper& session) { + auto check_graph = [&](InferenceSessionWrapper& session) { auto op_to_count = CountOpsInGraph(session.GetGraph()); EXPECT_EQ(op_to_count["QLinearConv"], 1); EXPECT_EQ(op_to_count["MaxPool"], 1); @@ -1349,7 +1424,7 @@ TEST(QDQTransformerTests, QBackward_MutilpleSteps) { }; TransformerTester(build_test_case, - check_mp_reshape_graph, + check_graph, TransformerLevel::Level1, TransformerLevel::Level2); }; @@ -1389,7 +1464,7 @@ TEST(QDQTransformerTests, ConvTranspose_DQForward) { } }; - auto check_mp_reshape_graph = [&](InferenceSessionWrapper& session) { + auto check_graph = [&](InferenceSessionWrapper& session) { auto op_to_count = CountOpsInGraph(session.GetGraph()); EXPECT_EQ(op_to_count["QLinearConv"], 1); EXPECT_EQ(op_to_count["Transpose"], 1); @@ -1398,7 +1473,7 @@ TEST(QDQTransformerTests, ConvTranspose_DQForward) { }; TransformerTester(build_test_case, - check_mp_reshape_graph, + check_graph, TransformerLevel::Level1, TransformerLevel::Level2); }; @@ -1449,7 +1524,7 @@ TEST(QDQTransformerTests, DQForward_MutilpleSteps) { } }; - auto check_mp_reshape_graph = [&](InferenceSessionWrapper& session) { + auto check_graph = [&](InferenceSessionWrapper& session) { auto op_to_count = CountOpsInGraph(session.GetGraph()); EXPECT_EQ(op_to_count["QLinearConv"], 1); EXPECT_EQ(op_to_count["MaxPool"], 1); @@ -1460,7 +1535,7 @@ TEST(QDQTransformerTests, DQForward_MutilpleSteps) { }; TransformerTester(build_test_case, - check_mp_reshape_graph, + check_graph, TransformerLevel::Level1, TransformerLevel::Level2); }; @@ -1468,6 +1543,81 @@ TEST(QDQTransformerTests, DQForward_MutilpleSteps) { test_case({1, 13, 13, 23}, {30, 23, 3, 3}, {0, 3, 1, 2}); } +TEST(QDQTransformerTests, Clip) { + constexpr float epsilon = std::numeric_limits::epsilon(); + + auto test_case = [&](float scale, auto zero_point, int clip_count, int opset_version = 12) { + auto build_test_case = [&](ModelTestBuilder& builder) { + auto* input_arg = builder.MakeInput({1, 32, 112, 112}, + std::numeric_limits::min(), + std::numeric_limits::max()); + auto* output_arg = builder.MakeOutput(); + + // add DQ + auto* dq_output = builder.MakeIntermediate(); + builder.AddDequantizeLinearNode(input_arg, .0035f, 7, dq_output); + + // add Clip + auto* clip_output = builder.MakeIntermediate(); + constexpr float min = .0f; + constexpr float max = 6.0f; + if (opset_version >= 11) { + auto* min_initializer = builder.MakeScalarInitializer(min); + auto* max_initializer = builder.MakeScalarInitializer(max); + builder.AddNode("Clip", {dq_output, min_initializer, max_initializer}, {clip_output}); + } else { + Node& argmax_node = builder.AddNode("Clip", {dq_output}, {clip_output}); + argmax_node.AddAttribute("min", min); + argmax_node.AddAttribute("max", max); + } + + // add Q + DQ + auto* q_output = builder.MakeIntermediate(); + builder.AddQuantizeLinearNode(clip_output, scale, zero_point, q_output); + builder.AddDequantizeLinearNode(q_output, scale, zero_point, output_arg); + }; + + auto check_clip_graph = [&](InferenceSessionWrapper& session) { + auto op_to_count = CountOpsInGraph(session.GetGraph()); + EXPECT_EQ(op_to_count["QuantizeLinear"], 1); + EXPECT_EQ(op_to_count["Clip"], clip_count); + EXPECT_EQ(op_to_count["DequantizeLinear"], 2); + }; + + TransformerTester(build_test_case, check_clip_graph, + TransformerLevel::Default, + TransformerLevel::Level1, + opset_version, + epsilon, + epsilon); + }; + + test_case(.0235294122248888f, static_cast(-128), 0); // [0, 6] + test_case(.02f, static_cast(-128), 0); // [0, 5.1] + test_case(.03f, static_cast(-128), 1); // [0, 7.65] + test_case(.02f, static_cast(127), 1); // [-5.1 , 0] + test_case(.02f, static_cast(0), 1); // [-2.56, 2.54] + test_case(.04f, static_cast(-97), 1); // [-1.24, 8.96] + test_case(.02352941176f, static_cast(0), 0); // [0, 6] + test_case(.02f, static_cast(0), 0); // [0, 5.1] + test_case(.03f, static_cast(0), 1); // [0, 7.65] + test_case(.02f, static_cast(255), 1); // [-5.1, 0] + test_case(.02f, static_cast(128), 1); // [-2.56, 2.54] + test_case(.04f, static_cast(31), 1); // [-1.24, 8.96] + + // opset_version = 10 + test_case(.02f, static_cast(-128), 0, 10); // [0, 5.1] + test_case(.03f, static_cast(-128), 1, 10); // [0, 7.65] + test_case(.02f, static_cast(0), 0, 10); // [0, 5.1] + test_case(.03f, static_cast(0), 1, 10); // [0, 7.65] + + // difference between lower/upper and min/max are within epsilon + test_case(epsilon, static_cast(-127), 0); // [-epsilon, x] (x <= 6 + epsilon) + test_case((6 + epsilon) / 255, static_cast(-128), 0); // [0, 6 + epsilon] + test_case(epsilon, static_cast(1), 0); // [-epsilon, x] (x <= 6 + epsilon) + test_case((6 + epsilon) / 255, static_cast(0), 0); // [0, 6 + epsilon] +} + TEST(QDQTransformerTests, Concat) { auto test_case = [&](const std::vector>& input_shapes, int64_t axis, @@ -1506,7 +1656,7 @@ TEST(QDQTransformerTests, Concat) { } }; - auto check_mp_reshape_graph = [&input_shapes, &has_input_float, &has_input_int8, &has_output_int8](InferenceSessionWrapper& session) { + auto check_graph = [&input_shapes, &has_input_float, &has_input_int8, &has_output_int8](InferenceSessionWrapper& session) { auto op_to_count = CountOpsInGraph(session.GetGraph()); if (has_input_float || has_input_int8 || has_output_int8) { EXPECT_EQ(op_to_count["com.microsoft.QLinearConcat"], 0); @@ -1518,7 +1668,7 @@ TEST(QDQTransformerTests, Concat) { }; TransformerTester(build_test_case, - check_mp_reshape_graph, + check_graph, TransformerLevel::Level1, TransformerLevel::Level2, 12 /*opset_version*/, @@ -1534,27 +1684,115 @@ TEST(QDQTransformerTests, Concat) { test_case({{1, 6, 36}, {1, 6, 8}, {1, 6, 2}}, 2, false, false, true); } -TEST(QDQTransformerTests, QDQPropagation_QDQCancelOut) { - auto test_case = [&](const std::vector& input_shape, size_t maxpool_dim, const std::vector& perms) { +#endif // !defined(DISABLE_CONTRIB_OPS) + +TEST(QDQTransformerTests, QDQPropagation_QBackward) { + auto test_case = [&](const std::vector& input_shape, + size_t maxpool_dim, + const std::vector& perms, + bool add_op_boundary, + bool include_zp) { auto build_test_case = [&](ModelTestBuilder& builder) { auto* input_arg = builder.MakeInput(input_shape, -1.f, 1.f); auto* output_arg = builder.MakeOutput(); - // add QDQ - auto* qdq_output = AddQDQNodePair(builder, input_arg, .004f, 129); + auto* transpose_input = add_op_boundary ? builder.MakeIntermediate() : input_arg; + if (add_op_boundary) { + // add Sign as boundary for QDQ propagation + builder.AddNode("Sign", {input_arg}, {transpose_input}); + } // add Transpose auto* transpose_output = builder.MakeIntermediate(); - Node& transpose_node = builder.AddNode("Transpose", {qdq_output}, {transpose_output}); + Node& transpose_node = builder.AddNode("Transpose", {transpose_input}, {transpose_output}); transpose_node.AddAttribute("perm", perms); + // add MaxPool + auto* maxpool_output = builder.MakeIntermediate(); + Node& pool_node = builder.AddNode("MaxPool", {transpose_output}, {maxpool_output}); + std::vector pads((maxpool_dim - 2) * 2, 1); + pool_node.AddAttribute("pads", pads); + std::vector kernel_shape(maxpool_dim - 2, 3); + pool_node.AddAttribute("kernel_shape", kernel_shape); + + // Reshape + auto* reshape_output = builder.MakeIntermediate(); + auto* reshape_shape = builder.Make1DInitializer({-1, 0}); + builder.AddNode("Reshape", {maxpool_output, reshape_shape}, {reshape_output}); + // add Q - auto* q_output = builder.MakeIntermediate(); - builder.AddQuantizeLinearNode(transpose_output, .004f, 129, q_output); + constexpr float qdq_scale = 0.004f; + if (include_zp) { + constexpr uint8_t qdq_zero_point = 129; + builder.AddQuantizeLinearNode(reshape_output, qdq_scale, qdq_zero_point, output_arg); + } else { + builder.AddQuantizeLinearNode(reshape_output, qdq_scale, output_arg); + } + }; + + auto check_graph = [&](InferenceSessionWrapper& session) { + std::vector expected_op_types_in_order{}; + if (add_op_boundary) { + expected_op_types_in_order.push_back("Sign"); + } + expected_op_types_in_order.insert( + expected_op_types_in_order.end(), + {"QuantizeLinear", "DequantizeLinear", + "Transpose", + "QuantizeLinear", "DequantizeLinear", + "MaxPool", + "QuantizeLinear", "DequantizeLinear", + "Reshape", + "QuantizeLinear"}); + + const auto op_types_in_order = GetNodeOpTypesInTopologicalOrder(session.GetGraph()); + EXPECT_EQ(op_types_in_order, expected_op_types_in_order); + }; + + TransformerTester(build_test_case, + check_graph, + TransformerLevel::Default, + TransformerLevel::Level1); + }; + + // TODO re-enable tests after updating ONNX to get QuantizeLinear shape inference fix + // https://github.com/onnx/onnx/pull/3806 + //test_case({1, 13, 13, 23}, 4, {0, 3, 1, 2}, false, false); + test_case({1, 13, 13, 23}, 4, {0, 3, 1, 2}, false, true); + //test_case({1, 13, 13, 23}, 4, {0, 3, 1, 2}, true, false); + test_case({1, 13, 13, 23}, 4, {0, 3, 1, 2}, true, true); +} + +TEST(QDQTransformerTests, QDQPropagation_DQForward) { + auto test_case = [&](const std::vector& input_shape, + size_t maxpool_dim, + const std::vector& perms, + bool add_op_boundary, + bool include_zp) { + auto build_test_case = [&](ModelTestBuilder& builder) { + auto* input_arg = builder.MakeInput(input_shape, + std::numeric_limits::min(), + std::numeric_limits::max()); + auto* output_arg = builder.MakeOutput(); + + // add DQ + constexpr float qdq_scale = 0.004f; + auto* dq_output = builder.MakeIntermediate(); + if (include_zp) { + constexpr uint8_t qdq_zero_point = 129; + builder.AddDequantizeLinearNode(input_arg, qdq_scale, qdq_zero_point, dq_output); + } else { + builder.AddDequantizeLinearNode(input_arg, qdq_scale, dq_output); + } + + // add Transpose + auto* transpose_output = builder.MakeIntermediate(); + Node& transpose_node = builder.AddNode("Transpose", {dq_output}, {transpose_output}); + transpose_node.AddAttribute("perm", perms); // add MaxPool auto* maxpool_output = builder.MakeIntermediate(); - Node& pool_node = builder.AddNode("MaxPool", {q_output}, {maxpool_output}); + Node& pool_node = builder.AddNode("MaxPool", {transpose_output}, {maxpool_output}); std::vector pads((maxpool_dim - 2) * 2, 1); pool_node.AddAttribute("pads", pads); std::vector kernel_shape(maxpool_dim - 2, 3); @@ -1562,28 +1800,47 @@ TEST(QDQTransformerTests, QDQPropagation_QDQCancelOut) { // Reshape auto* reshape_shape = builder.Make1DInitializer({-1, 0}); - builder.AddNode("Reshape", {maxpool_output, reshape_shape}, {output_arg}); + auto* reshape_output = add_op_boundary ? builder.MakeIntermediate() : output_arg; + builder.AddNode("Reshape", {maxpool_output, reshape_shape}, {reshape_output}); + + if (add_op_boundary) { + // add Sign as boundary for QDQ propagation + builder.AddNode("Sign", {reshape_output}, {output_arg}); + } }; - auto check_mp_reshape_graph = [&](InferenceSessionWrapper& session) { - auto op_to_count = CountOpsInGraph(session.GetGraph()); - EXPECT_EQ(op_to_count["MaxPool"], 1); - EXPECT_EQ(op_to_count["Reshape"], 1); - EXPECT_EQ(op_to_count["Transpose"], 1); - EXPECT_EQ(op_to_count["QuantizeLinear"], 1); - EXPECT_EQ(op_to_count["DequantizeLinear"], 0); + auto check_graph = [&](InferenceSessionWrapper& session) { + std::vector expected_op_types_in_order{ + "DequantizeLinear", + "Transpose", + "QuantizeLinear", "DequantizeLinear", + "MaxPool", + "QuantizeLinear", "DequantizeLinear", + "Reshape", + "QuantizeLinear", "DequantizeLinear"}; + if (add_op_boundary) { + expected_op_types_in_order.push_back("Sign"); + } + + const auto op_types_in_order = GetNodeOpTypesInTopologicalOrder(session.GetGraph()); + EXPECT_EQ(op_types_in_order, expected_op_types_in_order); }; TransformerTester(build_test_case, - check_mp_reshape_graph, - TransformerLevel::Level1, - TransformerLevel::Level2); + check_graph, + TransformerLevel::Default, + TransformerLevel::Level1); }; - test_case({1, 13, 13, 23}, 4, {0, 3, 1, 2}); + // TODO re-enable tests after updating ONNX to get QuantizeLinear shape inference fix + // https://github.com/onnx/onnx/pull/3806 + //test_case({1, 13, 13, 23}, 4, {0, 3, 1, 2}, false, false); + test_case({1, 13, 13, 23}, 4, {0, 3, 1, 2}, false, true); + //test_case({1, 13, 13, 23}, 4, {0, 3, 1, 2}, true, false); + test_case({1, 13, 13, 23}, 4, {0, 3, 1, 2}, true, true); } -TEST(QDQTransformerTests, QDQPropagation_QDQ_CancelOut_More) { +TEST(QDQTransformerTests, QDQPropagation_StopAtOtherQDQ) { auto test_case = [&](const std::vector& input_shape, bool same_scale, bool same_zp) { auto build_test_case = [&](ModelTestBuilder& builder) { auto* input_arg = builder.MakeInput(input_shape, -1.f, 1.f); @@ -1601,17 +1858,19 @@ TEST(QDQTransformerTests, QDQPropagation_QDQ_CancelOut_More) { builder.AddQuantizeLinearNode(reshape_output, same_scale ? .004f : .0039f, same_zp ? 129 : 128, output_arg); }; - auto check_mp_reshape_graph = [&](InferenceSessionWrapper& session) { - auto op_to_count = CountOpsInGraph(session.GetGraph()); - EXPECT_EQ(op_to_count["Reshape"], 1); - EXPECT_EQ(op_to_count["QuantizeLinear"], same_scale && same_zp ? 1 : 2); - EXPECT_EQ(op_to_count["DequantizeLinear"], same_scale && same_zp ? 0 : 1); + auto check_graph = [&](InferenceSessionWrapper& session) { + const std::vector expected_op_types_in_order{ + "QuantizeLinear", "DequantizeLinear", + "Reshape", + "QuantizeLinear"}; + const auto op_types_in_order = GetNodeOpTypesInTopologicalOrder(session.GetGraph()); + EXPECT_EQ(op_types_in_order, expected_op_types_in_order); }; TransformerTester(build_test_case, - check_mp_reshape_graph, - TransformerLevel::Level1, - TransformerLevel::Level2); + check_graph, + TransformerLevel::Default, + TransformerLevel::Level1); }; test_case({1, 13, 13, 23}, false, false); @@ -1635,17 +1894,19 @@ TEST(QDQTransformerTests, QDQPropagation_Q_No_Parent) { builder.AddQuantizeLinearNode(transpose_output, .0035f, 135, output_arg); }; - auto check_mp_reshape_graph = [&](InferenceSessionWrapper& session) { - GraphViewer graph_viewer(session.GetGraph()); - const auto& node_topology_list = graph_viewer.GetNodesInTopologicalOrder(); - EXPECT_EQ(graph_viewer.GetNode(node_topology_list[0])->OpType(), "QuantizeLinear"); - EXPECT_EQ(graph_viewer.GetNode(node_topology_list[1])->OpType(), "Transpose"); + auto check_graph = [&](InferenceSessionWrapper& session) { + const std::vector expected_op_types_in_order{ + "QuantizeLinear", "DequantizeLinear", + "Transpose", + "QuantizeLinear"}; + const auto op_types_in_order = GetNodeOpTypesInTopologicalOrder(session.GetGraph()); + EXPECT_EQ(op_types_in_order, expected_op_types_in_order); }; TransformerTester(build_test_case, - check_mp_reshape_graph, - TransformerLevel::Level1, - TransformerLevel::Level2); + check_graph, + TransformerLevel::Default, + TransformerLevel::Level1); }; test_case({1, 13, 13, 23}, {0, 2, 3, 1}); @@ -1668,18 +1929,19 @@ TEST(QDQTransformerTests, QDQPropagation_DQ_No_Children) { transpose_node.AddAttribute("perm", perms); }; - auto check_mp_reshape_graph = [&](InferenceSessionWrapper& session) { - auto op_to_count = CountOpsInGraph(session.GetGraph()); - GraphViewer graph_viewer(session.GetGraph()); - const auto& node_topology_list = graph_viewer.GetNodesInTopologicalOrder(); - EXPECT_EQ(graph_viewer.GetNode(node_topology_list[0])->OpType(), "Transpose"); - EXPECT_EQ(graph_viewer.GetNode(node_topology_list[1])->OpType(), "DequantizeLinear"); + auto check_graph = [&](InferenceSessionWrapper& session) { + const std::vector expected_op_types_in_order{ + "DequantizeLinear", + "Transpose", + "QuantizeLinear", "DequantizeLinear"}; + const auto op_types_in_order = GetNodeOpTypesInTopologicalOrder(session.GetGraph()); + EXPECT_EQ(op_types_in_order, expected_op_types_in_order); }; TransformerTester(build_test_case, - check_mp_reshape_graph, - TransformerLevel::Level1, - TransformerLevel::Level2); + check_graph, + TransformerLevel::Default, + TransformerLevel::Level1); }; test_case({1, 13, 13, 23}, {0, 2, 3, 1}); @@ -1704,18 +1966,18 @@ TEST(QDQTransformerTests, QDQPropagation_Per_Layer_No_Propagation) { transpose_node.AddAttribute("perm", perms); }; - auto check_mp_reshape_graph = [&](InferenceSessionWrapper& session) { - auto op_to_count = CountOpsInGraph(session.GetGraph()); - GraphViewer graph_viewer(session.GetGraph()); - const auto& node_topology_list = graph_viewer.GetNodesInTopologicalOrder(); - EXPECT_EQ(graph_viewer.GetNode(node_topology_list[0])->OpType(), "DequantizeLinear"); - EXPECT_EQ(graph_viewer.GetNode(node_topology_list[1])->OpType(), "Transpose"); + auto check_graph = [&](InferenceSessionWrapper& session) { + const std::vector expected_op_types_in_order{ + "DequantizeLinear", + "Transpose"}; + const auto op_types_in_order = GetNodeOpTypesInTopologicalOrder(session.GetGraph()); + EXPECT_EQ(op_types_in_order, expected_op_types_in_order); }; TransformerTester(build_test_case, - check_mp_reshape_graph, - TransformerLevel::Level1, - TransformerLevel::Level2); + check_graph, + TransformerLevel::Default, + TransformerLevel::Level1); }; test_case({1, 13, 13, 23}, {0, 2, 3, 1}); @@ -1737,23 +1999,23 @@ TEST(QDQTransformerTests, QDQPropagation_DQ_Q) { builder.AddQuantizeLinearNode(dq_output, .0035f, 135, output_arg); }; - auto check_mp_reshape_graph = [&](InferenceSessionWrapper& session) { - auto op_to_count = CountOpsInGraph(session.GetGraph()); - EXPECT_EQ(op_to_count["QuantizeLinear"], 1); - EXPECT_EQ(op_to_count["DequantizeLinear"], 1); + auto check_graph = [&](InferenceSessionWrapper& session) { + const std::vector expected_op_types_in_order{ + "DequantizeLinear", + "QuantizeLinear"}; + const auto op_types_in_order = GetNodeOpTypesInTopologicalOrder(session.GetGraph()); + EXPECT_EQ(op_types_in_order, expected_op_types_in_order); }; TransformerTester(build_test_case, - check_mp_reshape_graph, - TransformerLevel::Level1, - TransformerLevel::Level2); + check_graph, + TransformerLevel::Default, + TransformerLevel::Level1); }; test_case({1, 13, 13, 23}); } -#endif // DISABLE_CONTRIB_OPS - TEST(QDQTransformerTests, QDQ_Selector_Test) { const ORTCHAR_T* model_file_name = ORT_TSTR("testdata/transform/qdq_conv.onnx"); diff --git a/onnxruntime/test/perftest/command_args_parser.cc b/onnxruntime/test/perftest/command_args_parser.cc index 2667060331eb6..2c5e0fa2e575d 100644 --- a/onnxruntime/test/perftest/command_args_parser.cc +++ b/onnxruntime/test/perftest/command_args_parser.cc @@ -33,8 +33,8 @@ namespace perftest { "\t-A: Disable memory arena\n" "\t-I: Generate tensor input binding (Free dimensions are treated as 1.)\n" "\t-c [parallel runs]: Specifies the (max) number of runs to invoke simultaneously. Default:1.\n" - "\t-e [cpu|cuda|dnnl|tensorrt|openvino|nuphar|dml|acl]: Specifies the provider 'cpu','cuda','dnnl','tensorrt', " - "'openvino', 'nuphar', 'dml', 'acl', 'nnapi' or 'coreml'. " + "\t-e [cpu|cuda|dnnl|tensorrt|openvino|nuphar|dml|acl|rocm|migraphx]: Specifies the provider 'cpu','cuda','dnnl','tensorrt', " + "'openvino', 'nuphar', 'dml', 'acl', 'nnapi', 'coreml', 'rocm' or 'migraphx'. " "Default:'cpu'.\n" "\t-b [tf|ort]: backend to use. Default:ort\n" "\t-r [repeated_times]: Specifies the repeated times if running in 'times' test mode.Default:1000.\n" @@ -178,6 +178,10 @@ static bool ParseDimensionOverride(std::basic_string& dim_identifier, test_config.machine_config.provider_type_name = onnxruntime::kAclExecutionProvider; } else if (!CompareCString(optarg, ORT_TSTR("armnn"))) { test_config.machine_config.provider_type_name = onnxruntime::kArmNNExecutionProvider; + } else if (!CompareCString(optarg, ORT_TSTR("rocm"))) { + test_config.machine_config.provider_type_name = onnxruntime::kRocmExecutionProvider; + } else if (!CompareCString(optarg, ORT_TSTR("migraphx"))) { + test_config.machine_config.provider_type_name = onnxruntime::kMIGraphXExecutionProvider; } else { return false; } diff --git a/onnxruntime/test/perftest/ort_test_session.cc b/onnxruntime/test/perftest/ort_test_session.cc index 24f283a4285af..2a288f8575db1 100644 --- a/onnxruntime/test/perftest/ort_test_session.cc +++ b/onnxruntime/test/perftest/ort_test_session.cc @@ -1,6 +1,7 @@ #include "ort_test_session.h" #include #include "core/session/onnxruntime_session_options_config_keys.h" +#include "core/providers/tensorrt/tensorrt_provider_options.h" #include #include "providers.h" #include "TestCase.h" @@ -59,7 +60,7 @@ OnnxRuntimeTestSession::OnnxRuntimeTestSession(Ort::Env& env, std::random_device } else if (provider_name == onnxruntime::kTensorrtExecutionProvider) { #ifdef USE_TENSORRT int device_id = 0; - int trt_max_partition_iterations = 1000; + int trt_max_partition_iterations = 1000; int trt_min_subgraph_size = 1; size_t trt_max_workspace_size = 1 << 30; bool trt_fp16_enable = false; @@ -75,11 +76,11 @@ OnnxRuntimeTestSession::OnnxRuntimeTestSession(Ort::Env& env, std::random_device std::string trt_engine_decryption_lib_path = ""; bool trt_force_sequential_engine_build = false; - #ifdef _MSC_VER +#ifdef _MSC_VER std::string ov_string = ToUTF8String(performance_test_config.run_config.ep_runtime_config_string); - #else +#else std::string ov_string = performance_test_config.run_config.ep_runtime_config_string; - #endif +#endif std::istringstream ss(ov_string); std::string token; while (ss >> token) { @@ -209,12 +210,12 @@ OnnxRuntimeTestSession::OnnxRuntimeTestSession(Ort::Env& env, std::random_device ORT_THROW("[ERROR] [TensorRT] wrong key type entered. Choose from the following runtime key options that are available for TensorRT. ['device_id', 'trt_max_partition_iterations', 'trt_min_subgraph_size', 'trt_max_workspace_size', 'trt_fp16_enable', 'trt_int8_enable', 'trt_int8_calibration_table_name', 'trt_int8_use_native_calibration_table', 'trt_dla_enable', 'trt_dla_core', 'trt_dump_subgraphs', 'trt_engine_cache_enable', 'trt_engine_cache_path', 'trt_engine_decryption_enable', 'trt_engine_decryption_lib_path', 'trt_force_sequential_engine_build'] \n"); } } - OrtTensorRTProviderOptions tensorrt_options; + OrtTensorRTProviderOptionsV2 tensorrt_options; tensorrt_options.device_id = device_id; tensorrt_options.has_user_compute_stream = 0; tensorrt_options.user_compute_stream = nullptr; tensorrt_options.trt_max_partition_iterations = trt_max_partition_iterations; - tensorrt_options.trt_min_subgraph_size = trt_min_subgraph_size; + tensorrt_options.trt_min_subgraph_size = trt_min_subgraph_size; tensorrt_options.trt_max_workspace_size = trt_max_workspace_size; tensorrt_options.trt_fp16_enable = trt_fp16_enable; tensorrt_options.trt_int8_enable = trt_int8_enable; @@ -228,12 +229,12 @@ OnnxRuntimeTestSession::OnnxRuntimeTestSession(Ort::Env& env, std::random_device tensorrt_options.trt_engine_decryption_enable = trt_engine_decryption_enable; tensorrt_options.trt_engine_decryption_lib_path = trt_engine_decryption_lib_path.c_str(); tensorrt_options.trt_force_sequential_engine_build = trt_force_sequential_engine_build; - session_options.AppendExecutionProvider_TensorRT(tensorrt_options); + session_options.AppendExecutionProvider_TensorRT_V2(tensorrt_options); OrtCUDAProviderOptions cuda_options; - cuda_options.device_id=device_id; - cuda_options.cudnn_conv_algo_search=static_cast(performance_test_config.run_config.cudnn_conv_algo); - cuda_options.do_copy_in_default_stream=!performance_test_config.run_config.do_cuda_copy_in_separate_stream; + cuda_options.device_id = device_id; + cuda_options.cudnn_conv_algo_search = static_cast(performance_test_config.run_config.cudnn_conv_algo); + cuda_options.do_copy_in_default_stream = !performance_test_config.run_config.do_cuda_copy_in_separate_stream; // TODO: Support arena configuration for users of perf test session_options.AppendExecutionProvider_CUDA(cuda_options); #else @@ -324,7 +325,11 @@ OnnxRuntimeTestSession::OnnxRuntimeTestSession(Ort::Env& env, std::random_device } else if (provider_name == onnxruntime::kNnapiExecutionProvider) { #ifdef USE_NNAPI uint32_t nnapi_flags = 0; +#ifdef _MSC_VER + std::string ov_string = ToUTF8String(performance_test_config.run_config.ep_runtime_config_string); +#else std::string ov_string = performance_test_config.run_config.ep_runtime_config_string; +#endif std::istringstream ss(ov_string); std::string key; while (ss >> key) { diff --git a/onnxruntime/test/providers/cpu/activation/activation_op_test.cc b/onnxruntime/test/providers/cpu/activation/activation_op_test.cc index 75ead657aeba1..82e9690718871 100644 --- a/onnxruntime/test/providers/cpu/activation/activation_op_test.cc +++ b/onnxruntime/test/providers/cpu/activation/activation_op_test.cc @@ -3,6 +3,7 @@ #include "activation_op_test.h" #include "core/providers/cpu/activation/activations.h" +#include "test/common/cuda_op_test_utils.h" namespace onnxruntime { namespace test { @@ -120,6 +121,198 @@ TEST_F(ActivationOpTest, Relu) { /*opset_version= */ 14); } +#if defined(USE_CUDA) || defined(USE_ROCM) +TEST_F(ActivationOpTest, Sigmoid_fp16) { +#ifdef USE_CUDA + int min_cuda_architecture = 530; + if (!HasCudaEnvironment(min_cuda_architecture)) { + LOGS_DEFAULT(WARNING) << "Hardware NOT support FP16"; + return; + } +#endif + OpTester test("Sigmoid", 14); + + auto formula = [](float x) { + auto y = 1.f / (1.f + std::exp(-std::abs(x))); // safe sigmoid + y = x > 0 ? y : 1 - y; + return y; + }; + + std::vector X = input_values.front(); + std::vector Y; + for (unsigned i = 0; i < X.size(); i++) + Y.push_back(formula(X[i])); + std::vector dims{(int64_t)X.size()}; + + std::vector f_X(X.size()); + std::vector f_Y(Y.size()); + ConvertFloatToMLFloat16(X.data(), f_X.data(), static_cast(X.size())); + ConvertFloatToMLFloat16(Y.data(), f_Y.data(), static_cast(Y.size())); + + test.AddInput("X", dims, f_X); + test.AddOutput("Y", dims, f_Y); + test.Run(OpTester::ExpectResult::kExpectSuccess, "", {kTensorrtExecutionProvider}); +} + +TEST_F(ActivationOpTest, Tanh_fp16) { +#ifdef USE_CUDA + int min_cuda_architecture = 530; + if (!HasCudaEnvironment(min_cuda_architecture)) { + LOGS_DEFAULT(WARNING) << "Hardware NOT support FP16"; + return; + } +#endif + OpTester test("Tanh", 14); + + auto formula = [](float x) { return std::tanh(x); }; + + std::vector X = input_values.front(); + std::vector Y; + for (unsigned i = 0; i < X.size(); i++) + Y.push_back(formula(X[i])); + std::vector dims{(int64_t)X.size()}; + + std::vector f_X(X.size()); + std::vector f_Y(Y.size()); + ConvertFloatToMLFloat16(X.data(), f_X.data(), static_cast(X.size())); + ConvertFloatToMLFloat16(Y.data(), f_Y.data(), static_cast(Y.size())); + + test.AddInput("X", dims, f_X); + test.AddOutput("Y", dims, f_Y); + test.Run(OpTester::ExpectResult::kExpectSuccess, "", {kTensorrtExecutionProvider}); +} + +TEST_F(ActivationOpTest, Relu_fp16) { +#ifdef USE_CUDA + int min_cuda_architecture = 530; + if (!HasCudaEnvironment(min_cuda_architecture)) { + LOGS_DEFAULT(WARNING) << "Hardware NOT support FP16"; + return; + } +#endif + OpTester test("Relu", 14); + + auto formula = [](float x) { return std::max(x, 0.0f); }; + + std::vector X = input_values.front(); + std::vector Y; + for (unsigned i = 0; i < X.size(); i++) + Y.push_back(formula(X[i])); + std::vector dims{(int64_t)X.size()}; + + std::vector f_X(X.size()); + std::vector f_Y(Y.size()); + ConvertFloatToMLFloat16(X.data(), f_X.data(), static_cast(X.size())); + ConvertFloatToMLFloat16(Y.data(), f_Y.data(), static_cast(Y.size())); + + test.AddInput("X", dims, f_X); + test.AddOutput("Y", dims, f_Y); + test.Run(OpTester::ExpectResult::kExpectSuccess, "", {kTensorrtExecutionProvider}); +} +#endif + +#if defined(USE_CUDA) || defined(USE_ROCM) +TEST_F(ActivationOpTest, Sigmoid_bfloat16) { +#ifdef USE_CUDA + int min_cuda_architecture = 530; + if (!HasCudaEnvironment(min_cuda_architecture)) { + LOGS_DEFAULT(WARNING) << "Hardware NOT support BFP16"; + return; + } +#endif + OpTester test("Sigmoid", 14); + + auto formula = [](float x) { + auto y = 1.f / (1.f + std::exp(-std::abs(x))); // safe sigmoid + y = x > 0 ? y : 1 - y; + return y; + }; + + std::vector X = input_values.front(); + std::vector Y; + for (unsigned i = 0; i < X.size(); i++) + Y.push_back(formula(X[i])); + std::vector dims{(int64_t)X.size()}; + + std::vector bf_X = FloatsToBFloat16s(X); + std::vector bf_Y = FloatsToBFloat16s(Y); + + test.AddInput("X", dims, bf_X); + test.AddOutput("Y", dims, bf_Y); + std::vector> execution_providers; +#ifdef USE_CUDA + execution_providers.push_back(DefaultCudaExecutionProvider()); +#elif USE_ROCM + execution_providers.push_back(DefaultRocmExecutionProvider()); +#endif + test.Run(OpTester::ExpectResult::kExpectSuccess, "", {}, nullptr, &execution_providers); +} + +TEST_F(ActivationOpTest, Tanh_bfloat16) { +#ifdef USE_CUDA + int min_cuda_architecture = 530; + if (!HasCudaEnvironment(min_cuda_architecture)) { + LOGS_DEFAULT(WARNING) << "Hardware NOT support BFP16"; + return; + } +#endif + OpTester test("Tanh", 14); + + auto formula = [](float x) { return std::tanh(x); }; + + std::vector X = input_values.front(); + std::vector Y; + for (unsigned i = 0; i < X.size(); i++) + Y.push_back(formula(X[i])); + std::vector dims{(int64_t)X.size()}; + + std::vector bf_X = FloatsToBFloat16s(X); + std::vector bf_Y = FloatsToBFloat16s(Y); + + test.AddInput("X", dims, bf_X); + test.AddOutput("Y", dims, bf_Y); + std::vector> execution_providers; +#ifdef USE_CUDA + execution_providers.push_back(DefaultCudaExecutionProvider()); +#elif USE_ROCM + execution_providers.push_back(DefaultRocmExecutionProvider()); +#endif + test.Run(OpTester::ExpectResult::kExpectSuccess, "", {}, nullptr, &execution_providers); +} + +TEST_F(ActivationOpTest, Relu_bfloat16) { +#ifdef USE_CUDA + int min_cuda_architecture = 530; + if (!HasCudaEnvironment(min_cuda_architecture)) { + LOGS_DEFAULT(WARNING) << "Hardware NOT support BFP16"; + return; + } +#endif + OpTester test("Relu", 14); + + auto formula = [](float x) { return std::max(x, 0.0f); }; + + std::vector X = input_values.front(); + std::vector Y; + for (unsigned i = 0; i < X.size(); i++) + Y.push_back(formula(X[i])); + std::vector dims{(int64_t)X.size()}; + + std::vector bf_X = FloatsToBFloat16s(X); + std::vector bf_Y = FloatsToBFloat16s(Y); + + test.AddInput("X", dims, bf_X); + test.AddOutput("Y", dims, bf_Y); + std::vector> execution_providers; +#ifdef USE_CUDA + execution_providers.push_back(DefaultCudaExecutionProvider()); +#elif USE_ROCM + execution_providers.push_back(DefaultRocmExecutionProvider()); +#endif + test.Run(OpTester::ExpectResult::kExpectSuccess, "", {}, nullptr, &execution_providers); +} +#endif + TEST_F(ActivationOpTest, Elu) { float alpha = 0.1f; TestActivationOp("Elu", diff --git a/onnxruntime/test/providers/cpu/controlflow/loop_test.cc b/onnxruntime/test/providers/cpu/controlflow/loop_test.cc index 32f1a120e2f9b..7628c3454a7b9 100644 --- a/onnxruntime/test/providers/cpu/controlflow/loop_test.cc +++ b/onnxruntime/test/providers/cpu/controlflow/loop_test.cc @@ -596,7 +596,7 @@ TEST(Loop, SubgraphTypeOverride) { std::vector inputs; std::vector outputs; - /* + /* Inputs: iter_num, cond_in, fake_in, loop carried state variables. iter_num_in cond_in fake_in [outer_scope_0] @@ -671,7 +671,7 @@ TEST(Loop, SubgraphTypeOverride) { LoopOpTester test{{}, create_subgraph}; test.AddInput("M", {1}, {1}); - test.AddInput("cond", {1}, {true}); + test.AddOptionalInputEdge(); // 'cond' is optional in this test so don't provide it test.AddInput("fake", {1}, {0.f}); test.AddInput("outer_scope_0", {1}, {kOuterNodeAddValue}); @@ -799,8 +799,8 @@ TEST(Loop, Opset11WithNoVariadicInputsAndOutputs) { auto* constant_attribute_tensor_proto = attr_proto.mutable_t(); constant_attribute_tensor_proto->mutable_dims()->Clear(); // scalar - constant_attribute_tensor_proto->set_data_type(TensorProto_DataType_FLOAT); //float scalar - *constant_attribute_tensor_proto->mutable_float_data()->Add() = 1.0f; //float scalar with value 1.0f + constant_attribute_tensor_proto->set_data_type(TensorProto_DataType_FLOAT); // float scalar + *constant_attribute_tensor_proto->mutable_float_data()->Add() = 1.0f; // float scalar with value 1.0f constant_node.AddAttribute("value", attr_proto); } @@ -977,11 +977,11 @@ TEST(Loop, IterationCountAsOutput) { /* Inputs: iter_num, cond_in, loop carried state variables. - iter_num_in cond_in - | | - [Identity] [Identity] - | | - loop_var_0_out cond_out + iter_num_in cond_in + | | + [Identity] [Identity] + | | + loop_var_0_out cond_out */ // graph inputs types. @@ -1061,12 +1061,12 @@ TEST(Loop, SequenceAsLoopCarriedDependency) { Inputs: iter_num, cond_in, loop_var_0_in - loop_var_0_in inserted_tensor cond_in iter_num - | | | (unused) - [SequenceInsert]-----/ [Identity] + loop_var_0_in inserted_tensor cond_in iter_num + | | | (unused) + [SequenceInsert]-----/ [Identity] | | - | cond_out - loop_var_0_out + | cond_out + loop_var_0_out */ // graph inputs types. @@ -1184,12 +1184,12 @@ TEST(Loop, OptionalTypeAsLoopCarriedDependency) { Inputs: iter_num, cond_in, loop_var_0_in - loop_var_0_in cond_in iter_num - | | (unused) - [Identity] [Identity] + loop_var_0_in cond_in iter_num + | | (unused) + [Identity] [Identity] | | - | cond_out - loop_var_0_out + | cond_out + loop_var_0_out */ // graph inputs types. diff --git a/onnxruntime/test/providers/cpu/math/element_wise_ops_test.cc b/onnxruntime/test/providers/cpu/math/element_wise_ops_test.cc index f47d80f9abf7e..43e9ead580377 100644 --- a/onnxruntime/test/providers/cpu/math/element_wise_ops_test.cc +++ b/onnxruntime/test/providers/cpu/math/element_wise_ops_test.cc @@ -18,7 +18,7 @@ std::vector MakeMLFloat16(const std::initializer_list& input) return output; } -#ifdef USE_CUDA +#if defined(USE_CUDA) || defined(USE_ROCM) void TestFloat16(const char* op_name, const std::vector& lhs_dim, const std::initializer_list& lhs_values, const std::vector& rhs_dim, const std::initializer_list& rhs_values, const std::vector& out_dim, @@ -29,7 +29,11 @@ void TestFloat16(const char* op_name, const std::vector& lhs_dim, tester.AddInput("B", rhs_dim, MakeMLFloat16(rhs_values)); tester.AddOutput("C", out_dim, MakeMLFloat16(out_values)); std::vector> execution_providers; +#ifdef USE_CUDA execution_providers.push_back(DefaultCudaExecutionProvider()); +#elif USE_ROCM + execution_providers.push_back(DefaultRocmExecutionProvider()); +#endif tester.Run(OpTester::ExpectResult::kExpectSuccess, "", {}, nullptr, &execution_providers); } @@ -39,7 +43,11 @@ void TestFloat16(const char* op_name, const std::vector& lhs_dim, tester.AddInput("B", rhs_dim, MakeBFloat16(rhs_values)); tester.AddOutput("C", out_dim, MakeBFloat16(out_values)); std::vector> execution_providers; +#ifdef USE_CUDA execution_providers.push_back(DefaultCudaExecutionProvider()); +#elif USE_ROCM + execution_providers.push_back(DefaultRocmExecutionProvider()); +#endif tester.Run(OpTester::ExpectResult::kExpectSuccess, "", {}, nullptr, &execution_providers); } } @@ -128,7 +136,7 @@ TEST(MathOpTest, Add_float) { test.Run(); #endif -#ifdef USE_CUDA +#if defined(USE_CUDA) || defined(USE_ROCM) TestFloat16("Add", dims, lhs_values, dims, rhs_values, dims, out_values); #endif } @@ -163,7 +171,7 @@ TEST(MathOpTest, Add_Broadcast_Axis) { test.AddOutput("C", dims, out_values); test.Run(OpTester::ExpectResult::kExpectSuccess, ""); -#ifdef USE_CUDA +#if defined(USE_CUDA) || defined(USE_ROCM) TestFloat16("Add", dims, lhs_values, {3, 1}, rhs_values, dims, out_values); #endif } @@ -186,7 +194,7 @@ TEST(MathOpTest, Add_Broadcast_MultidirectionalAB) { {kTensorrtExecutionProvider}); // TensorRT: got C with shape [3, 1] #endif -#ifdef USE_CUDA +#if defined(USE_CUDA) || defined(USE_ROCM) TestFloat16("Add", {3, 1}, lhs_values, {3}, rhs_values, {3, 3}, out_values); #endif } @@ -208,7 +216,7 @@ TEST(MathOpTest, Add_Broadcast_MultidirectionalBA) { {kTensorrtExecutionProvider}); // TensorRT: got C with shape [3, 1] #endif -#ifdef USE_CUDA +#if defined(USE_CUDA) || defined(USE_ROCM) TestFloat16("Add", {3}, lhs_values, {3, 1}, rhs_values, {3, 3}, out_values); #endif } @@ -404,7 +412,7 @@ TEST(MathOpTest, Sub) { test.Run(); #endif -#ifdef USE_CUDA +#if defined(USE_CUDA) || defined(USE_ROCM) TestFloat16("Sub", dims, lhs_values, dims, rhs_values, dims, out_values); #endif } @@ -462,7 +470,7 @@ TEST(MathOpTest, Mul) { test.Run(); #endif -#ifdef USE_CUDA +#if defined(USE_CUDA) || defined(USE_ROCM) TestFloat16("Mul", dims, lhs_values, dims, rhs_values, dims, out_values); #endif } @@ -501,7 +509,7 @@ TEST(MathOpTest, Div) { test.Run(); #endif -#ifdef USE_CUDA +#if defined(USE_CUDA) || defined(USE_ROCM) TestFloat16("Div", dims, lhs_values, dims, rhs_values, dims, out_values); #endif } diff --git a/onnxruntime/test/providers/cpu/math/gemm_test.cc b/onnxruntime/test/providers/cpu/math/gemm_test.cc index 2411f2a14b42d..8d0018c6e8b7b 100644 --- a/onnxruntime/test/providers/cpu/math/gemm_test.cc +++ b/onnxruntime/test/providers/cpu/math/gemm_test.cc @@ -91,7 +91,7 @@ TEST(GemmOpTest, GemmNoTrans_bfloat16) { #ifdef USE_CUDA int min_cuda_architecture = 530; if (!HasCudaEnvironment(min_cuda_architecture)) { - LOGS_DEFAULT(WARNING) << "Hardware NOT support FP16"; + LOGS_DEFAULT(WARNING) << "Hardware NOT support BFP16"; return; } #endif @@ -106,9 +106,9 @@ TEST(GemmOpTest, GemmNoTrans_bfloat16) { test.AddOutput("Y", {2, 3}, MakeBFloat16({11.0f, 11.0f, 11.0f, -9.0f, -9.0f, -9.0f})); std::vector> execution_providers; #ifdef USE_CUDA - execution_providers.push_back(DefaultCudaExecutionProvider()); + execution_providers.push_back(DefaultCudaExecutionProvider()); #elif USE_ROCM - execution_providers.push_back(DefaultRocmExecutionProvider()); + execution_providers.push_back(DefaultRocmExecutionProvider()); #endif test.Run(OpTester::ExpectResult::kExpectSuccess, "", {}, nullptr, &execution_providers); } diff --git a/onnxruntime/test/providers/cpu/math/matmul_integer_test.cc b/onnxruntime/test/providers/cpu/math/matmul_integer_test.cc index 7e41fe020a91d..489d6ff915692 100644 --- a/onnxruntime/test/providers/cpu/math/matmul_integer_test.cc +++ b/onnxruntime/test/providers/cpu/math/matmul_integer_test.cc @@ -302,12 +302,12 @@ TEST(MatmulIntegerOpTest, MatMulInteger_PerColumn_ND) { } // [M x N] = [M x K] x [K x N] = [batch_seq x input_dim] x [input_dim x embed_dim] -template +template void RunMatMulIntegerU8X8Test(const int M, const int N, const int K, bool non_zero_zp, bool B_is_initializer, bool per_column_zp = false) { OpTester test("MatMulInteger", 10); static std::default_random_engine e(123); static std::uniform_int_distribution n_unsigned(0, 127); - static std::uniform_int_distribution n_xint8(std::numeric_limits::min(), std::numeric_limits::max()); + static std::uniform_int_distribution n_xint8(std::numeric_limits::min(), std::numeric_limits::max()); Eigen::MatrixXi matrix_a = Eigen::MatrixXi::Random(K, M) .unaryExpr([](int) { return n_unsigned(e); }); @@ -317,9 +317,9 @@ void RunMatMulIntegerU8X8Test(const int M, const int N, const int K, bool non_ze Eigen::MatrixXi matrix_b = Eigen::MatrixXi::Random(N, K) .unaryExpr([](int) { return n_xint8(e); }); - std::vector matrix_b_data = ToVector(matrix_b.data(), N * K); - ScalarB b_zero_point = non_zero_zp ? GetMiddle(matrix_b_data) : 0; - std::vector b_zp_per_column(N, b_zero_point); + std::vector matrix_b_data = ToVector(matrix_b.data(), N * K); + WeightType b_zero_point = non_zero_zp ? GetMiddle(matrix_b_data) : 0; + std::vector b_zp_per_column(N, b_zero_point); Eigen::MatrixXi b_zp_matrix = b_zero_point * Eigen::MatrixXi::Ones(N, K); if (non_zero_zp && per_column_zp) { for (int i = 0; i < N; i++) { @@ -331,13 +331,13 @@ void RunMatMulIntegerU8X8Test(const int M, const int N, const int K, bool non_ze Eigen::MatrixXi matrix_c = ((matrix_b - b_zp_matrix) * matrix_a_offset).eval(); test.AddInput("T1", {M, K}, std::move(matrix_a_data)); - test.AddInput("T2", {K, N}, std::move(matrix_b_data), B_is_initializer); + test.AddInput("T2", {K, N}, std::move(matrix_b_data), B_is_initializer); if (non_zero_zp) { test.AddInput("a_zero_point", {}, {a_zero_point}); if (per_column_zp) { - test.AddInput("b_zero_point", {N}, b_zp_per_column); + test.AddInput("b_zero_point", {N}, b_zp_per_column); } else { - test.AddInput("b_zero_point", {}, {b_zero_point}); + test.AddInput("b_zero_point", {}, {b_zero_point}); } } diff --git a/onnxruntime/test/providers/cpu/math/matmul_test.cc b/onnxruntime/test/providers/cpu/math/matmul_test.cc index 862ef375b06cc..5633d12d756e7 100644 --- a/onnxruntime/test/providers/cpu/math/matmul_test.cc +++ b/onnxruntime/test/providers/cpu/math/matmul_test.cc @@ -199,7 +199,7 @@ TEST(MathOpTest, MatMul_BFloat16) { #ifdef USE_CUDA int min_cuda_architecture = 530; if (!HasCudaEnvironment(min_cuda_architecture)) { - LOGS_DEFAULT(WARNING) << "Hardware NOT support FP16"; + LOGS_DEFAULT(WARNING) << "Hardware NOT support BFP16"; return; } #endif @@ -210,9 +210,9 @@ TEST(MathOpTest, MatMul_BFloat16) { test.AddOutput("Y", {2, 3}, MakeBFloat16({10.0f, 10.0f, 10.0f, -10.0f, -10.0f, -10.0f})); std::vector> execution_providers; #ifdef USE_CUDA - execution_providers.push_back(DefaultCudaExecutionProvider()); + execution_providers.push_back(DefaultCudaExecutionProvider()); #elif USE_ROCM - execution_providers.push_back(DefaultRocmExecutionProvider()); + execution_providers.push_back(DefaultRocmExecutionProvider()); #endif test.Run(OpTester::ExpectResult::kExpectSuccess, "", {}, nullptr, &execution_providers); } diff --git a/onnxruntime/test/providers/cpu/math/softmax_test.cc b/onnxruntime/test/providers/cpu/math/softmax_test.cc index eeb22f2271541..b821b16bdaeb3 100644 --- a/onnxruntime/test/providers/cpu/math/softmax_test.cc +++ b/onnxruntime/test/providers/cpu/math/softmax_test.cc @@ -4,6 +4,7 @@ #include "gmock/gmock.h" #include "gtest/gtest.h" #include "test/providers/provider_test_utils.h" +#include "test/common/cuda_op_test_utils.h" #include namespace onnxruntime { @@ -45,6 +46,61 @@ TEST(SoftmaxOperator, Simple) { RunTest(x_vals, expected_vals, dimensions); } +#if defined(USE_CUDA) || defined(USE_ROCM) +TEST(SoftmaxOperator, Simple_fp16) { +#ifdef USE_CUDA + int min_cuda_architecture = 530; + if (!HasCudaEnvironment(min_cuda_architecture)) { + LOGS_DEFAULT(WARNING) << "Hardware NOT support FP16"; + return; + } +#endif + OpTester test("Softmax", 14); + + int64_t axis = 1; + test.AddAttribute("axis", axis); + + std::vector X = {-1.0f, 0.0f, 1.0f}; + std::vector Y = {0.09003058f, 0.24472848f, 0.66524094f}; + std::vector dimensions = {1, 3}; + + std::vector f_X(3); + std::vector f_Y(3); + ConvertFloatToMLFloat16(X.data(), f_X.data(), 3); + ConvertFloatToMLFloat16(Y.data(), f_Y.data(), 3); + + test.AddInput("X", dimensions, f_X); + test.AddOutput("Y", dimensions, f_Y); + test.Run(OpTester::ExpectResult::kExpectSuccess, "", {kTensorrtExecutionProvider, kOpenVINOExecutionProvider}); +} +#endif + +#if defined(USE_CUDA) || defined(USE_ROCM) +TEST(SoftmaxOperator, Simple_bfloat16) { +#ifdef USE_CUDA + int min_cuda_architecture = 530; + if (!HasCudaEnvironment(min_cuda_architecture)) { + LOGS_DEFAULT(WARNING) << "Hardware NOT support BFP16"; + return; + } +#endif + OpTester test("Softmax", 14); + + int64_t axis = 1; + test.AddAttribute("axis", axis); + + test.AddInput("X", {1, 3}, MakeBFloat16({-1.0f, 0.0f, 1.0f})); + test.AddOutput("Y", {1, 3}, MakeBFloat16({0.09003058f, 0.24472848f, 0.66524094f})); + std::vector> execution_providers; +#ifdef USE_CUDA + execution_providers.push_back(DefaultCudaExecutionProvider()); +#elif USE_ROCM + execution_providers.push_back(DefaultRocmExecutionProvider()); +#endif + test.Run(OpTester::ExpectResult::kExpectSuccess, "", {}, nullptr, &execution_providers); +} +#endif + TEST(SoftmaxOperator, LargeNumber) { // x = np.array([[0, 1, 2, 3], [10000, 10001, 10002, 10003]]).astype(np.float32) // expected output[[0.0320586, 0.08714432, 0.23688284, 0.64391428], diff --git a/onnxruntime/test/providers/cpu/model_tests.cc b/onnxruntime/test/providers/cpu/model_tests.cc index cba31055b7d54..ac7c6a69f948d 100644 --- a/onnxruntime/test/providers/cpu/model_tests.cc +++ b/onnxruntime/test/providers/cpu/model_tests.cc @@ -8,6 +8,7 @@ #include "core/session/onnxruntime_cxx_api.h" #include "core/session/inference_session.h" #include "core/session/ort_env.h" +#include "core/providers/tensorrt/tensorrt_provider_options.h" #include "asserts.h" #include #include "default_providers.h" @@ -591,7 +592,7 @@ TEST_P(ModelTest, Run) { ASSERT_STATUS_OK(session_object.RegisterExecutionProvider(DefaultNupharExecutionProvider())); } else if (provider_name == "tensorrt") { if (test_case_name.find(ORT_TSTR("FLOAT16")) != std::string::npos) { - OrtTensorRTProviderOptions params{ + OrtTensorRTProviderOptionsV2 params{ 0, 0, nullptr, diff --git a/onnxruntime/test/providers/cpu/reduction/reduction_ops_test.cc b/onnxruntime/test/providers/cpu/reduction/reduction_ops_test.cc index 2c36b80ecc561..683f93224c700 100644 --- a/onnxruntime/test/providers/cpu/reduction/reduction_ops_test.cc +++ b/onnxruntime/test/providers/cpu/reduction/reduction_ops_test.cc @@ -1450,6 +1450,31 @@ TEST(ReductionOpTest, ReduceSumHalfHalf) { test.Run(); } +TEST(ReductionOpTest, ReduceSumHalfHalf_2) { + OpTester test("ReduceSum"); + test.AddAttribute("keepdims", (int64_t)0); + test.AddAttribute("axes", std::vector{0, 2}); + + std::vector data = {1.0f, 2.0f, + 3.0f, 4.0f, + + 5.0f, 6.0f, + 7.0f, 8.0f, + + 9.0f, 10.0f, + 11.0f, 12.0f}; + std::vector data_half(12); + ConvertFloatToMLFloat16(data.data(), data_half.data(), 12); + + std::vector result = {33.0f, 45.0f}; + std::vector result_half(2); + ConvertFloatToMLFloat16(result.data(), result_half.data(), 2); + + test.AddInput("data", {3, 2, 2}, data_half); + test.AddOutput("reduced", {2}, result_half); + test.Run(); +} + void test_half_reduce_sum( int64_t m, int64_t n) { OpTester test("ReduceSum"); @@ -1491,7 +1516,7 @@ TEST(ReductionOpTest, ReduceSum_half_bert) { // Add more UTs for half as needed #endif -#ifdef USE_CUDA +#if defined(USE_CUDA) || defined(USE_ROCM) TEST(ReductionOpTest, ReduceSumBFloat16) { OpTester test("ReduceSum", 14); test.AddAttribute("keepdims", (int64_t)0); @@ -1500,7 +1525,32 @@ TEST(ReductionOpTest, ReduceSumBFloat16) { test.AddInput("axes", {2}, std::vector{0, 1}); test.AddOutput("reduced", {2}, MakeBFloat16({36.0f, 42.0f})); std::vector> execution_providers; +#ifdef USE_CUDA + execution_providers.push_back(DefaultCudaExecutionProvider()); +#elif USE_ROCM + execution_providers.push_back(DefaultRocmExecutionProvider()); +#endif + test.Run(OpTester::ExpectResult::kExpectSuccess, "", {}, nullptr, &execution_providers); +} +#endif + +// on CUDA - this UT, with axes {0,2}, will go thru cudnn lib only if ATenOp is not initialized +// on ROCM - miopen call succeeded, but results in data error, thus follow the same logic done in cudnn for now +// TODO - try ROCm 4.5.2 and/or double check the source code on BFloat16 support +#if defined(USE_CUDA) || defined(USE_ROCM) +TEST(ReductionOpTest, ReduceSumBFloat16_2) { + OpTester test("ReduceSum", 14); + test.AddAttribute("keepdims", (int64_t)0); + test.AddInput("data", {3, 2, 2}, + MakeBFloat16({1.0f, 2.0f, 3.0f, 4.0f, 5.0f, 6.0f, 7.0f, 8.0f, 9.0f, 10.0f, 11.0f, 12.0f})); + test.AddInput("axes", {2}, std::vector{0, 2}); + test.AddOutput("reduced", {2}, MakeBFloat16({33.0f, 45.0f})); + std::vector> execution_providers; +#ifdef USE_CUDA execution_providers.push_back(DefaultCudaExecutionProvider()); +#elif USE_ROCM + execution_providers.push_back(DefaultRocmExecutionProvider()); +#endif test.Run(OpTester::ExpectResult::kExpectSuccess, "", {}, nullptr, &execution_providers); } #endif @@ -1553,9 +1603,15 @@ void test_apex_reduce_sum( } TEST(ReductionOpTest, ReduceSum_apex_matrix_large) { +#ifdef USE_TENSORRT + // Reduction op takes much longer time for TRT 8.2, so we test smaller range of inputs. + int64_t threshold = 4096; +#else + int64_t threshold = 32768; +#endif for (int64_t m = 1; m < 2049; m *= 8) { for (int64_t n = 2; n < 2049; n *= 8) { - if (m * n > 32768) { + if (m * n > threshold) { continue; } test_apex_reduce_sum(m, n); @@ -1583,7 +1639,13 @@ TEST(ReductionOpTest, ReduceSum_batch_by_two) { } TEST(ReductionOpTest, ReduceSum_batch_by_seq_by_128) { - for (int i = 1; i < 16; i += 1) { +#ifdef USE_TENSORRT + // Reduction op takes much longer time for TRT 8.2, so we test smaller range of inputs. + int i_max = 8; +#else + int i_max = 16; +#endif + for (int i = 1; i < i_max; i += 1) { test_apex_reduce_sum(i * 128, 128); test_apex_reduce_sum(i * 512, 128); test_apex_reduce_sum(i * 128, 768); @@ -1612,8 +1674,16 @@ TEST(ReductionOpTest, ReduceSum_bert_selected_batch_size) { TEST(ReductionOpTest, ReduceSum_apex_more) { std::srand(0); - for (int64_t m = 1; m < 16; ++m) { - for (int64_t n = 1; n < 16; ++n) { +#ifdef USE_TENSORRT + // Reduction op takes much longer time for TRT 8.2, so we test smaller range of inputs. + int64_t m_max = 8; + int64_t n_max = 8; +#else + int64_t m_max = 16; + int64_t n_max = 16; +#endif + for (int64_t m = 1; m < m_max; ++m) { + for (int64_t n = 1; n < n_max; ++n) { const auto m_ = 2 * m; const auto n_ = 2 * n; test_apex_reduce_sum(m_, n_); diff --git a/onnxruntime/test/providers/nnapi/nnapi_basic_test.cc b/onnxruntime/test/providers/nnapi/nnapi_basic_test.cc index c5e2c4378096e..a136e0b22c11a 100644 --- a/onnxruntime/test/providers/nnapi/nnapi_basic_test.cc +++ b/onnxruntime/test/providers/nnapi/nnapi_basic_test.cc @@ -76,6 +76,38 @@ TEST(NnapiExecutionProviderTest, ReshapeFlattenTest) { #endif } +// Since NNAPI EP does not support dynamic shape input and we now switch from the approach of immediately rejecting +// the whole graph in NNAPI EP if it has a dynamic input to check at individual operator support check level, we have a +// separated test here. +// Please see BaseOpBuilder::HasSupportedInputs in /onnxruntime/core/providers/nnapi/nnapi_builtin/builders/op_support_checker.cc +TEST(NnapiExecutionProviderTest, DynamicGraphInputTest) { + const ORTCHAR_T* model_file_name = ORT_TSTR("testdata/ep_dynamic_graph_input_test.onnx"); + +#if defined(__ANDROID__) + std::vector dims_mul_x = {1, 1, 4, 4}; + std::vector values_mul_x = {1.0f, 2.0f, 3.0f, 4.0f, 1.0f, 2.0f, 3.0f, 4.0f, 1.0f, 2.0f, 3.0f, 4.0f, 1.0f, 2.0f, 3.0f, 4.0f}; + OrtValue ml_value_x; + CreateMLValue(TestNnapiExecutionProvider()->GetAllocator(0, OrtMemTypeDefault), dims_mul_x, values_mul_x, + &ml_value_x); + + NameMLValMap feeds; + feeds.insert(std::make_pair("X", ml_value_x)); + + RunAndVerifyOutputsWithEP(model_file_name, "NnapiExecutionProviderTest.DynamicGraphInputTest", + std::make_unique(0), + feeds); +#else + // test load only + SessionOptions so; + InferenceSessionWrapper session_object{so, GetEnvironment()}; + ASSERT_STATUS_OK(session_object.RegisterExecutionProvider(std::make_unique(0))); + ASSERT_STATUS_OK(session_object.Load(model_file_name)); + ASSERT_STATUS_OK(session_object.Initialize()); + ASSERT_EQ(CountAssignedNodes(session_object.GetGraph(), kNnapiExecutionProvider), 1) + << "Exactly one node (Add) should have been taken by the NNAPI EP"; +#endif +} + // This is to test the uint8 handling of operators without "QLinear" such as Concat and Transpose // NNAPI will require scale and zero point for inputs of all quantized operations // For these operators without "Qlinear", there is no information about the scale and zero point, we can @@ -239,29 +271,12 @@ TEST(NnapiExecutionProviderTest, TestNoShapeInputModel) { << "No node should be taken by the NNAPI EP"; } -// For now since we don't support QDQ in NNAPI, even the infrastructure is there -// Need to verify a model with QDQ groups only will not be supported by NNAPI at all -// This may need to be changed when we gradually add support for different ops for QDQ -TEST(NnapiExecutionProviderTest, TestQDQConvModel) { - const ORTCHAR_T* model_file_name = ORT_TSTR("testdata/transform/qdq_conv.onnx"); - // test load only - SessionOptions so; - InferenceSessionWrapper session_object{so, GetEnvironment()}; - ASSERT_STATUS_OK(session_object.RegisterExecutionProvider(std::make_unique(0))); - ASSERT_STATUS_OK(session_object.Load(model_file_name)); - ASSERT_STATUS_OK(session_object.Initialize()); - ASSERT_EQ(CountAssignedNodes(session_object.GetGraph(), kNnapiExecutionProvider), 0) - << "No nodes should have been taken by the NNAPI EP"; -} - -#if defined(__ANDROID__) -TEST(NnapiExecutionProviderTest, TestQDQModel) { - onnxruntime::Model model("nnapi_qdq_test_graph", false, DefaultLoggingManager().DefaultLogger()); +static void RunQDQModelTest(const GetQDQTestCaseFn& build_test_case, + const char* test_description, + const EPVerificationParams& params = EPVerificationParams()) { + onnxruntime::Model model(test_description, false, DefaultLoggingManager().DefaultLogger()); Graph& graph = model.MainGraph(); ModelTestBuilder helper(graph); - - auto build_test_case = BuildQDQConvTestCase({1, 1, 5, 5} /*input_shape*/, - {1, 1, 3, 3} /*weights_shape*/); build_test_case(helper); helper.SetGraphOutputs(); ASSERT_STATUS_OK(model.MainGraph().Resolve()); @@ -270,13 +285,90 @@ TEST(NnapiExecutionProviderTest, TestQDQModel) { std::string model_data; model.ToProto().SerializeToString(&model_data); +#if defined(__ANDROID__) RunAndVerifyOutputsWithEP(model_data, "NnapiExecutionProviderTest.TestQDQModel", std::make_unique(0), - helper.feeds_); + helper.feeds_, params); +#else + // test load only + SessionOptions so; + InferenceSessionWrapper session_object{so, GetEnvironment()}; + ASSERT_STATUS_OK(session_object.RegisterExecutionProvider(std::make_unique(0))); + ASSERT_STATUS_OK(session_object.Load(model_data.data(), static_cast(model_data.size()))); + ASSERT_STATUS_OK(session_object.Initialize()); + ASSERT_GT(CountAssignedNodes(session_object.GetGraph(), kNnapiExecutionProvider), 0) + << "Some nodes should have been taken by the NNAPI EP"; +#endif +} + +TEST(NnapiExecutionProviderTest, TestQDQConv) { + RunQDQModelTest(BuildQDQConvTestCase( + {1, 1, 5, 5} /*input_shape*/, + {1, 1, 3, 3} /*weights_shape*/), + "nnapi_qdq_test_graph_conv", + {true /* verify_entire_graph_use_ep */}); +} + +TEST(NnapiExecutionProviderTest, TestQDQResize) { + // NNAPI EP does not support the default setting of Resize Op + // Use bi-linear and asymmetric for NNAPI EP only + RunQDQModelTest(BuildQDQResizeTestCase({1, 3, 64, 64} /* input_shape */, + {1, 3, 32, 32} /* sizes_data */, + "linear" /* mode */, + "asymmetric" /* coordinate_transformation_mode */), + "nnapi_qdq_test_graph_resize", + {true /* verify_entire_graph_use_ep */}); +} + +TEST(NnapiExecutionProviderTest, TestQDQAveragePool) { + // NNAPI use different rounding, which may cause ~1% difference in the result + RunQDQModelTest(BuildQDQAveragePoolTestCase( + {1, 3, 32, 32} /* input_shape */), + "nnapi_qdq_test_graph_averagepool", + { + true /* verify_entire_graph_use_ep */, + 1e-2f /* fp32_abs_err */, + }); +} + +TEST(NnapiExecutionProviderTest, TestQDQAdd) { + RunQDQModelTest(BuildBinaryOpTestCase( + {1, 23, 13, 13} /* input_shape */, + "Add" /* op_type */), + "nnapi_qdq_test_graph_add", + {true /* verify_entire_graph_use_ep */}); +} + +TEST(NnapiExecutionProviderTest, TestQDQMul) { + // NNAPI use different rounding, which may cause ~1% difference in the result + RunQDQModelTest(BuildBinaryOpTestCase( + {1, 23, 13, 13} /* input_shape */, + "Mul" /* op_type */), + "nnapi_qdq_test_graph_mul", + { + true /* verify_entire_graph_use_ep */, + 1e-2f /* fp32_abs_err */, + }); +} - // TODO: can add test load only verfication here later +TEST(NnapiExecutionProviderTest, TestQDQTranspose) { + RunQDQModelTest(BuildQDQTransposeTestCase( + {1, 3, 32, 32} /* input_shape */, + {0, 3, 1, 2} /* perms */), + "nnapi_qdq_test_graph_transpose", + { + true /* verify_entire_graph_use_ep */ + }); } -#endif // defined(__ANDROID__) #endif // !(ORT_MINIMAL_BUILD) diff --git a/onnxruntime/test/providers/provider_test_utils.cc b/onnxruntime/test/providers/provider_test_utils.cc index 0bc17e79eff5e..b937cdca2ff7a 100644 --- a/onnxruntime/test/providers/provider_test_utils.cc +++ b/onnxruntime/test/providers/provider_test_utils.cc @@ -330,7 +330,7 @@ struct TensorCheck { /// XXX: May need to adjust threshold as BFloat is coarse float threshold = 0.001f; #if defined(USE_TENSORRT) || defined(ENABLE_TRAINING) || defined(USE_CUDA) || defined(USE_ROCM) - threshold = 0.008f; + threshold = 0.05f; // expect at least 95% close #endif for (int i = 0; i < size; ++i) { if (std::isnan(f_expected[i])) { @@ -994,6 +994,12 @@ void OpTester::Run( std::vector output_names; FillFeedsAndOutputNames(feeds, output_names); // Run the model +#ifdef USE_TENSORRT + // only run trt ep to reduce test time + static const std::string all_provider_types[] = { + kTensorrtExecutionProvider, + }; +#else static const std::string all_provider_types[] = { kCpuExecutionProvider, kCudaExecutionProvider, @@ -1008,6 +1014,7 @@ void OpTester::Run( kRocmExecutionProvider, kCoreMLExecutionProvider, }; +#endif bool has_run = false; @@ -1168,8 +1175,14 @@ void OpTester::Run( cur_provider = "not set"; } +#ifdef USE_TENSORRT + // We are allowing tests to be run with only TensorRT EP, but TensorRT EP may not support all tests and may be in excluded providers list. + // So, no registered EPs were able to run the model is okay for this situation. + ORT_UNUSED_PARAMETER(has_run); +#else EXPECT_TRUE(has_run) << "No registered execution providers were able to run the model."; +#endif } } ORT_CATCH(const std::exception& ex) { diff --git a/onnxruntime/test/providers/provider_test_utils.h b/onnxruntime/test/providers/provider_test_utils.h index 6958d44a35f36..74bae67c39b1e 100644 --- a/onnxruntime/test/providers/provider_test_utils.h +++ b/onnxruntime/test/providers/provider_test_utils.h @@ -1198,6 +1198,12 @@ inline std::vector MakeBFloat16(const std::initializer_list& in return output; } +inline std::vector FloatsToBFloat16s(const std::vector& input) { + std::vector output; + std::transform(input.begin(), input.end(), std::back_inserter(output), [](float f) { return BFloat16(f); }); + return output; +} + inline CheckParams MakeCheckParams(const OpTester::Data& d) { return CheckParams{d.sort_output_, d.absolute_error_, d.relative_error_}; } diff --git a/onnxruntime/test/python/quantization/test_op_argmax.py b/onnxruntime/test/python/quantization/test_op_argmax.py index cb0a243c7e6e3..86bb187cfa54f 100644 --- a/onnxruntime/test/python/quantization/test_op_argmax.py +++ b/onnxruntime/test/python/quantization/test_op_argmax.py @@ -80,6 +80,7 @@ def quantize_argmax_test(self, activation_type, weight_type, extra_options = {}) weight_type_str = 'u8' if (weight_type == QuantType.QUInt8) else 's8' model_uint8_path = 'argmax_{}{}.onnx'.format(activation_type_str, weight_type_str) model_uint8_qdq_path = 'argmax_{}{}_qdq.onnx'.format(activation_type_str, weight_type_str) + model_uint8_qdq_trt_path = 'argmax_{}{}_qdq_trt.onnx'.format(activation_type_str, weight_type_str) # Verify QOperator mode data_reader = self.input_feeds(1, {'input': [1, 256, 128, 128]}) @@ -105,6 +106,17 @@ def quantize_argmax_test(self, activation_type, weight_type, extra_options = {}) data_reader.rewind() check_model_correctness(self, model_fp32_path, model_uint8_qdq_path, data_reader.get_next()) + # Verify QDQ mode for TensorRT + data_reader.rewind() + quantize_static(model_fp32_path, model_uint8_qdq_trt_path, data_reader, quant_format=QuantFormat.QDQ, + activation_type=activation_type, weight_type=weight_type, extra_options=extra_options, + op_types_to_quantize=['ArgMax']) + qdqnode_counts = {'QuantizeLinear': 1, 'DequantizeLinear': 1, 'ArgMax': 1} + check_op_type_count(self, model_uint8_qdq_trt_path, **qdqnode_counts) + qnode_io_qtypes = {'QuantizeLinear' : [['i', 2, activation_proto_qtype], ['o', 0, activation_proto_qtype]]} + check_qtype_by_node_type(self, model_uint8_qdq_trt_path, qnode_io_qtypes) + data_reader.rewind() + check_model_correctness(self, model_fp32_path, model_uint8_qdq_trt_path, data_reader.get_next()) def test_quantize_argmax(self): self.quantize_argmax_test(QuantType.QUInt8, QuantType.QUInt8) diff --git a/onnxruntime/test/python/quantization/test_op_gemm.py b/onnxruntime/test/python/quantization/test_op_gemm.py index cf61402fa5d84..11a2ba488b4d5 100644 --- a/onnxruntime/test/python/quantization/test_op_gemm.py +++ b/onnxruntime/test/python/quantization/test_op_gemm.py @@ -130,7 +130,7 @@ def static_quant_test(self, model_fp32_path, data_reader, activation_type, weigh data_reader.rewind() quantize_static(model_fp32_path, model_int8_path, data_reader, activation_type=activation_type, weight_type=weight_type, extra_options=extra_options) - quant_nodes = {'QLinearMatMul': 2, 'QLinearAdd': 2, 'QuantizeLinear': 1, 'DequantizeLinear': 1} + quant_nodes = {'QGemm': 2, 'QuantizeLinear': 1, 'DequantizeLinear': 1} check_op_type_count(self, model_int8_path, **quant_nodes) qnode_io_qtypes = {'QuantizeLinear': [['i', 2, activation_proto_qtype], ['o', 0, activation_proto_qtype]]} qnode_io_qtypes.update({'DequantizeLinear': [['i', 2, activation_proto_qtype]]}) @@ -147,7 +147,7 @@ def static_quant_test_qdq(self, model_fp32_path, data_reader, activation_type, w data_reader.rewind() quantize_static(model_fp32_path, model_int8_path, data_reader, quant_format=QuantFormat.QDQ, activation_type=activation_type, weight_type=weight_type, extra_options=extra_options) - quant_nodes = {'MatMul': 2, 'Add': 2, 'QuantizeLinear': 5, 'DequantizeLinear': 9} + quant_nodes = {'Gemm': 2, 'QuantizeLinear': 3, 'DequantizeLinear': 7} check_op_type_count(self, model_int8_path, **quant_nodes) qnode_io_qtypes = {'QuantizeLinear': [['i', 2, activation_proto_qtype], ['o', 0, activation_proto_qtype]]} check_qtype_by_node_type(self, model_int8_path, qnode_io_qtypes) diff --git a/onnxruntime/test/python/transformers/test_optimizer.py b/onnxruntime/test/python/transformers/test_optimizer.py index b149edbb145f4..b4d944caf998f 100644 --- a/onnxruntime/test/python/transformers/test_optimizer.py +++ b/onnxruntime/test/python/transformers/test_optimizer.py @@ -20,13 +20,13 @@ from onnx_model import OnnxModel from onnx_exporter import export_onnx_model_from_tf, export_onnx_model_from_pt from huggingface_models import MODELS - from benchmark_helper import Precision + from benchmark_helper import Precision, OptimizerInfo else: from onnxruntime.transformers.optimizer import optimize_model from onnxruntime.transformers.onnx_model import OnnxModel from onnxruntime.transformers.onnx_exporter import export_onnx_model_from_tf, export_onnx_model_from_pt from onnxruntime.transformers.huggingface_models import MODELS - from onnxruntime.transformers.benchmark_helper import Precision + from onnxruntime.transformers.benchmark_helper import Precision, OptimizerInfo BERT_TEST_MODELS = { "bert_keras_0": ('models', 'TFBertForSequenceClassification_1.onnx'), # bert_mrpc_tensorflow2.1_opset10 @@ -78,7 +78,7 @@ def _test_optimizer_on_huggingface_model(self, MODELS[model_name][2], MODELS[model_name][3], None, './cache_models', './onnx_models', input_names[:inputs_count], False, - Precision.FLOAT32, True, True, True, True, + Precision.FLOAT32, OptimizerInfo.BYSCRIPT, True, True, True, model_fusion_statistics) onnx_model = list(model_fusion_statistics.keys())[0] diff --git a/onnxruntime/test/python/transformers/test_shape_infer_helper.py b/onnxruntime/test/python/transformers/test_shape_infer_helper.py index c38f249a0e907..1a9f22d1477cd 100644 --- a/onnxruntime/test/python/transformers/test_shape_infer_helper.py +++ b/onnxruntime/test/python/transformers/test_shape_infer_helper.py @@ -5,12 +5,12 @@ if find_transformers_source(): from onnx_exporter import export_onnx_model_from_pt from huggingface_models import MODELS - from benchmark_helper import Precision + from benchmark_helper import Precision, OptimizerInfo from shape_infer_helper import SymbolicShapeInferenceHelper else: from onnxruntime.transformers.onnx_exporter import export_onnx_model_from_pt from onnxruntime.transformers.huggingface_models import MODELS - from onnxruntime.transformers.benchmark_helper import Precision + from onnxruntime.transformers.benchmark_helper import Precision, OptimizerInfo from onnxruntime.transformers.shape_infer_helper import SymbolicShapeInferenceHelper @@ -22,7 +22,7 @@ def _load_onnx(self, model_name): with torch.no_grad(): export_onnx_model_from_pt(model_name, MODELS[model_name][1], MODELS[model_name][2], MODELS[model_name][3], None, '../cache_models', base_path, input_names[:1], False, Precision.FLOAT32, - True, True, True, False, {}) + OptimizerInfo.BYSCRIPT, True, True, False, {}) model_path = base_path + model_name.replace('-', '_') + "_1.onnx" import onnx return onnx.load_model(model_path) diff --git a/onnxruntime/test/testdata/ep_dynamic_graph_input_test.onnx b/onnxruntime/test/testdata/ep_dynamic_graph_input_test.onnx new file mode 100644 index 0000000000000..67f0d39f526e4 Binary files /dev/null and b/onnxruntime/test/testdata/ep_dynamic_graph_input_test.onnx differ diff --git a/onnxruntime/test/testdata/ep_dynamic_graph_input_test.py b/onnxruntime/test/testdata/ep_dynamic_graph_input_test.py new file mode 100644 index 0000000000000..d04f8a8884d3d --- /dev/null +++ b/onnxruntime/test/testdata/ep_dynamic_graph_input_test.py @@ -0,0 +1,47 @@ +import onnx +from onnx import helper +from onnx import TensorProto + + +# Since NNAPI EP does not support dynamic shape input and we now switch from the approach of immediately rejecting +# the whole graph in NNAPI EP if it has a dynamic input to checking the dynamic shape at individual operator support check level, +# We have a separated test here using a graph with dynamic input that becomes fixed after a Resize +# Please see BaseOpBuilder::HasSupportedInputs in /onnxruntime/core/providers/nnapi/nnapi_builtin/builders/op_support_checker.cc +def GenerateModel(model_name): + nodes = [ + helper.make_node("Resize", ["X", "", "", "Resize_1_sizes"], [ + "Resize_1_output"], "resize_1", mode="cubic"), + helper.make_node( + "Add", ["Resize_1_output", "Add_2_input"], ["Y"], "add"), + ] + + initializers = [ + helper.make_tensor('Resize_1_sizes', TensorProto.INT64, [ + 4], [1, 1, 3, 3]), + helper.make_tensor('Add_2_input', TensorProto.FLOAT, [1, 1, 3, 3], [ + 1.0, 2.0, 3.0, 4.0, 5.0, 6.0, 7.0, 8.0, 9.0]) + ] + + inputs = [ + helper.make_tensor_value_info( + 'X', TensorProto.FLOAT, ["1", "1", "N", "N"]), # used dim_param here + ] + + outputs = [ + helper.make_tensor_value_info('Y', TensorProto.FLOAT, [1, 1, 3, 3]), + ] + + graph = helper.make_graph( + nodes, + "EP_Dynamic_Graph_Input_Test", + inputs, + outputs, + initializers + ) + + model = helper.make_model(graph) + onnx.save(model, model_name) + + +if __name__ == "__main__": + GenerateModel('ep_dynamic_graph_input_test.onnx') diff --git a/onnxruntime/test/testdata/kernel_def_hashes/contrib.cpu.json b/onnxruntime/test/testdata/kernel_def_hashes/contrib.cpu.json index 120e6de7c00f3..2a85dab3b5fd0 100644 --- a/onnxruntime/test/testdata/kernel_def_hashes/contrib.cpu.json +++ b/onnxruntime/test/testdata/kernel_def_hashes/contrib.cpu.json @@ -283,6 +283,10 @@ "QEmbedLayerNormalization com.microsoft CPUExecutionProvider", 9235385557940152248 ], + [ + "QGemm com.microsoft CPUExecutionProvider", + 13009794669709617232 + ], [ "QGemm com.microsoft CPUExecutionProvider", 13737193491843065240 diff --git a/onnxruntime/test/util/default_providers.cc b/onnxruntime/test/util/default_providers.cc index 209d4244229fc..5e0975d667fb9 100644 --- a/onnxruntime/test/util/default_providers.cc +++ b/onnxruntime/test/util/default_providers.cc @@ -54,6 +54,16 @@ std::unique_ptr TensorrtExecutionProviderWithOptions(const O return nullptr; } +std::unique_ptr TensorrtExecutionProviderWithOptions(const OrtTensorRTProviderOptionsV2* params) { +#ifdef USE_TENSORRT + if (auto factory = CreateExecutionProviderFactory_Tensorrt(params)) + return factory->CreateProvider(); +#else + ORT_UNUSED_PARAMETER(params); +#endif + return nullptr; +} + std::unique_ptr DefaultMIGraphXExecutionProvider() { #ifdef USE_MIGRAPHX OrtMIGraphXProviderOptions params{ diff --git a/onnxruntime/test/util/include/default_providers.h b/onnxruntime/test/util/include/default_providers.h index 6fa50c61cdefa..980129e95c7c4 100644 --- a/onnxruntime/test/util/include/default_providers.h +++ b/onnxruntime/test/util/include/default_providers.h @@ -24,6 +24,7 @@ std::shared_ptr CreateExecutionProviderFactory_OpenVI std::shared_ptr CreateExecutionProviderFactory_Rknpu(); std::shared_ptr CreateExecutionProviderFactory_Rocm(const OrtROCMProviderOptions* provider_options); std::shared_ptr CreateExecutionProviderFactory_Tensorrt(const OrtTensorRTProviderOptions* params); +std::shared_ptr CreateExecutionProviderFactory_Tensorrt(const OrtTensorRTProviderOptionsV2* params); // EP for internal testing std::shared_ptr CreateExecutionProviderFactory_InternalTesting(const std::unordered_set& supported_ops); @@ -38,6 +39,7 @@ std::unique_ptr DefaultNupharExecutionProvider(bool allow_un //std::unique_ptr DefaultStvmExecutionProvider(); std::unique_ptr DefaultTensorrtExecutionProvider(); std::unique_ptr TensorrtExecutionProviderWithOptions(const OrtTensorRTProviderOptions* params); +std::unique_ptr TensorrtExecutionProviderWithOptions(const OrtTensorRTProviderOptionsV2* params); std::unique_ptr DefaultMIGraphXExecutionProvider(); std::unique_ptr MIGraphXExecutionProviderWithOptions(const OrtMIGraphXProviderOptions* params); std::unique_ptr DefaultOpenVINOExecutionProvider(); diff --git a/onnxruntime/test/util/include/test_utils.h b/onnxruntime/test/util/include/test_utils.h index 60d2a3e8caba2..50859d826fa2d 100644 --- a/onnxruntime/test/util/include/test_utils.h +++ b/onnxruntime/test/util/include/test_utils.h @@ -15,6 +15,18 @@ class Graph; namespace test { +// struct to hold some verification params for RunAndVerifyOutputsWithEP +struct EPVerificationParams { + // Verify the entire graph is taken by the EP + // if this is set to false, then will verify that at least one node is assigned to 'execution_provider' + bool verify_entire_graph_use_ep{false}; + + // Some EP may use different rounding than ORT CPU EP, which may cause a bigger abs error than + // the default of 1e-5f, especially for scenarios such as [Q -> Quantized op -> DQ] + // Set this only if this is necessary + float fp32_abs_err = 1e-5f; +}; + // return number of nodes in the Graph and any subgraphs that are assigned to the specified execution provider int CountAssignedNodes(const Graph& current_graph, const std::string& ep_type); @@ -23,13 +35,14 @@ int CountAssignedNodes(const Graph& current_graph, const std::string& ep_type); void RunAndVerifyOutputsWithEP(const ORTCHAR_T* model_path, const char* log_id, std::unique_ptr execution_provider, - const NameMLValMap& feeds); + const NameMLValMap& feeds, + const EPVerificationParams& params = EPVerificationParams()); // helper function that takes in model_data -// used in nnapi qdq model tests void RunAndVerifyOutputsWithEP(const std::string& model_data, const char* log_id, std::unique_ptr execution_provider, - const NameMLValMap& feeds); + const NameMLValMap& feeds, + const EPVerificationParams& params = EPVerificationParams()); } // namespace test } // namespace onnxruntime diff --git a/onnxruntime/test/util/test_utils.cc b/onnxruntime/test/util/test_utils.cc index 476fed4282cdc..b069b08810cb8 100644 --- a/onnxruntime/test/util/test_utils.cc +++ b/onnxruntime/test/util/test_utils.cc @@ -18,7 +18,8 @@ namespace onnxruntime { namespace test { static void VerifyOutputs(const std::vector& output_names, const std::vector& expected_fetches, - const std::vector& fetches) { + const std::vector& fetches, + const EPVerificationParams& params) { ASSERT_EQ(expected_fetches.size(), fetches.size()); for (size_t i = 0, end = expected_fetches.size(); i < end; ++i) { @@ -35,11 +36,13 @@ static void VerifyOutputs(const std::vector& output_names, EXPECT_THAT(ltensor.DataAsSpan(), ::testing::ContainerEq(rtensor.DataAsSpan())) << " mismatch for " << output_names[i]; break; + case ONNX_NAMESPACE::TensorProto_DataType_UINT8: + EXPECT_THAT(ltensor.DataAsSpan(), ::testing::ContainerEq(rtensor.DataAsSpan())) + << " mismatch for " << output_names[i]; + break; case ONNX_NAMESPACE::TensorProto_DataType_FLOAT: { - constexpr float abs_err = 1e-5f; - EXPECT_THAT(ltensor.DataAsSpan(), - ::testing::Pointwise(::testing::FloatNear(abs_err), rtensor.DataAsSpan())); + ::testing::Pointwise(::testing::FloatNear(params.fp32_abs_err), rtensor.DataAsSpan())); break; } default: @@ -68,16 +71,18 @@ int CountAssignedNodes(const Graph& current_graph, const std::string& ep_type) { void RunAndVerifyOutputsWithEP(const ORTCHAR_T* model_path, const char* log_id, std::unique_ptr execution_provider, - const NameMLValMap& feeds) { + const NameMLValMap& feeds, + const EPVerificationParams& params) { // read raw data from model provided by the model_path std::ifstream stream(model_path, std::ios::in | std::ios::binary); std::string model_data((std::istreambuf_iterator(stream)), std::istreambuf_iterator()); - RunAndVerifyOutputsWithEP(model_data, log_id, std::move(execution_provider), feeds); + RunAndVerifyOutputsWithEP(model_data, log_id, std::move(execution_provider), feeds, params); } void RunAndVerifyOutputsWithEP(const std::string& model_data, const char* log_id, std::unique_ptr execution_provider, - const NameMLValMap& feeds) { + const NameMLValMap& feeds, + const EPVerificationParams& params) { SessionOptions so; so.session_logid = log_id; RunOptions run_options; @@ -118,12 +123,17 @@ void RunAndVerifyOutputsWithEP(const std::string& model_data, const char* log_id // make sure that some nodes are assigned to the EP, otherwise this test is pointless... const auto& graph2 = session_object2.GetGraph(); auto ep_nodes = CountAssignedNodes(graph2, provider_type); - ASSERT_GT(ep_nodes, 0) << "No nodes were assigned to " << provider_type; + if (params.verify_entire_graph_use_ep) { + // Verify the entire graph is assigned to the EP + ASSERT_EQ(ep_nodes, graph2.NumberOfNodes()) << "Not all nodes were assigned to " << provider_type; + } else { + ASSERT_GT(ep_nodes, 0) << "No nodes were assigned to " << provider_type; + } // Run with EP and verify the result std::vector fetches; ASSERT_STATUS_OK(session_object2.Run(run_options, feeds, output_names, &fetches)); - VerifyOutputs(output_names, expected_fetches, fetches); + VerifyOutputs(output_names, expected_fetches, fetches, params); } #if !defined(DISABLE_SPARSE_TENSORS) diff --git a/orttraining/orttraining/core/graph/gradient_builder.cc b/orttraining/orttraining/core/graph/gradient_builder.cc index 55b521e13a3cc..f807fe6af4879 100755 --- a/orttraining/orttraining/core/graph/gradient_builder.cc +++ b/orttraining/orttraining/core/graph/gradient_builder.cc @@ -1838,5 +1838,24 @@ IMPLEMENT_GRADIENT_BUILDER(GetScatterNDGradient) { return result; } +IMPLEMENT_GRADIENT_BUILDER(GetScatterElementsGradient) { + auto attributes = SrcNodeAttributes(); + auto axis = utils::HasInt(attributes.at("axis")) ? attributes.at("axis").i() : 0; + std::vector result; + if (IsGradientRequiredForSrcNodeInput(0)) { + result.emplace_back(NodeDef("Shape", {I(2)}, {IA("Shape_updates")})); + result.emplace_back(NodeDef("ConstantOfShape", {IA("Shape_updates")}, {IA("Zero_Shape_updates")}, + {MakeAttribute("value", ScalarTensorProtoByElemType(0.0f, IElemType(0)))})); + result.emplace_back(NodeDef("ScatterElements", {GO(0), I(1), IA("Zero_Shape_updates")}, {GI(0)}, + {MakeAttribute("axis", axis)})); + } + + if (IsGradientRequiredForSrcNodeInput(2)) { + result.emplace_back(NodeDef("GatherElements", {GO(0), I(1)}, {GI(2)}, + {MakeAttribute("axis", axis)})); + } + return result; +} + } // namespace training } // namespace onnxruntime diff --git a/orttraining/orttraining/core/graph/gradient_builder.h b/orttraining/orttraining/core/graph/gradient_builder.h index 8947f40329968..9edccb02cbb5a 100755 --- a/orttraining/orttraining/core/graph/gradient_builder.h +++ b/orttraining/orttraining/core/graph/gradient_builder.h @@ -77,6 +77,7 @@ DECLARE_GRADIENT_BUILDER(GetPadGradient) DECLARE_GRADIENT_BUILDER(GetIdentityGradient) DECLARE_GRADIENT_BUILDER(GetPythonOpGradient) DECLARE_GRADIENT_BUILDER(GetScatterNDGradient) +DECLARE_GRADIENT_BUILDER(GetScatterElementsGradient) DECLARE_GRADIENT_BUILDER(GetTriluGradient) DECLARE_GRADIENT_BUILDER(GetExternalGradient) diff --git a/orttraining/orttraining/core/graph/gradient_builder_base.h b/orttraining/orttraining/core/graph/gradient_builder_base.h index bd615dc0484d1..b2156660b8c1e 100644 --- a/orttraining/orttraining/core/graph/gradient_builder_base.h +++ b/orttraining/orttraining/core/graph/gradient_builder_base.h @@ -5,8 +5,9 @@ #include #include -#include "core/util/math.h" +#include "core/framework/float16.h" #include "core/graph/graph.h" +#include "core/util/math.h" #include "orttraining/core/graph/graph_augmenter.h" #include "orttraining/core/graph/gradient_config.h" #include "orttraining/core/graph/recompute_graph_utils.h" diff --git a/orttraining/orttraining/core/graph/gradient_builder_registry.cc b/orttraining/orttraining/core/graph/gradient_builder_registry.cc index 6fc1fda6443a2..df728a715ec5e 100755 --- a/orttraining/orttraining/core/graph/gradient_builder_registry.cc +++ b/orttraining/orttraining/core/graph/gradient_builder_registry.cc @@ -108,6 +108,7 @@ void GradientBuilderRegistry::RegisterGradientBuilders() { REGISTER_GRADIENT_BUILDER("Identity", GetIdentityGradient); REGISTER_GRADIENT_BUILDER("PythonOp", GetPythonOpGradient); REGISTER_GRADIENT_BUILDER("ScatterND", GetScatterNDGradient); + REGISTER_GRADIENT_BUILDER("ScatterElements", GetScatterElementsGradient); REGISTER_GRADIENT_BUILDER("Trilu", GetTriluGradient); REGISTER_GRADIENT_BUILDER("ExternalGradient", GetExternalGradient); diff --git a/orttraining/orttraining/core/session/training_session.cc b/orttraining/orttraining/core/session/training_session.cc index a5b63d412165b..14c1afccc7aa8 100644 --- a/orttraining/orttraining/core/session/training_session.cc +++ b/orttraining/orttraining/core/session/training_session.cc @@ -36,7 +36,7 @@ #ifdef ENABLE_NVTX_PROFILE #include #include -#include "core/profile/context.h" +#include "core/providers/cuda/nvtx_profile_context.h" #endif namespace onnxruntime { diff --git a/orttraining/orttraining/models/runner/training_runner.cc b/orttraining/orttraining/models/runner/training_runner.cc index d4789d825729f..664b489c62348 100644 --- a/orttraining/orttraining/models/runner/training_runner.cc +++ b/orttraining/orttraining/models/runner/training_runner.cc @@ -13,7 +13,7 @@ #include "core/platform/env.h" #include "core/platform/path_lib.h" #ifdef ENABLE_NVTX_PROFILE -#include "core/profile/context.h" +#include "core/providers/cuda/nvtx_profile_context.h" #endif #include "core/session/environment.h" #include "orttraining/core/framework/checkpointing.h" diff --git a/orttraining/orttraining/python/training/utils/data/__init__.py b/orttraining/orttraining/python/training/utils/data/__init__.py new file mode 100644 index 0000000000000..91207012216d3 --- /dev/null +++ b/orttraining/orttraining/python/training/utils/data/__init__.py @@ -0,0 +1,5 @@ +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. +# __init__.py + +from .sampler import LoadBalancingDistributedSampler, LoadBalancingDistributedBatchSampler diff --git a/orttraining/orttraining/python/training/utils/data/sampler.py b/orttraining/orttraining/python/training/utils/data/sampler.py new file mode 100644 index 0000000000000..e0e42b2b3caab --- /dev/null +++ b/orttraining/orttraining/python/training/utils/data/sampler.py @@ -0,0 +1,371 @@ +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. +# sampler.py + +import torch +import math +import torch.distributed as dist +from torch.utils.data.sampler import Sampler +from torch.utils.data.dataset import Dataset +from typing import Optional, Iterator, Callable +import numpy as np + + +def _shard_wrapped_indices_across_workers(dataset_index_list, num_shards, num_samples_per_shard): + """Yield successive num_shards-sized chunks from dataset_index_list.""" + num_samples = max(1, num_samples_per_shard) + num_elements = num_samples * num_shards + current_lst = [] + for i in range(num_elements): + current_lst.append(dataset_index_list[i % len(dataset_index_list)]) + if len(current_lst) == num_shards: + yield current_lst + current_lst = [] + + +def shard_wrapped_indices_for_worker(dataset_index_list, shard_id, num_shards): + """Shard wrapped around dataset_index_list across num_shards and return the indices for this shard_id""" + num_samples_per_worker = (len(dataset_index_list) + num_shards - 1) // num_shards + sharded_indices = list(_shard_wrapped_indices_across_workers(dataset_index_list, + num_shards, + num_samples_per_worker)) + return [sharded_indices[i][shard_id] for i in range(len(sharded_indices))] + + +# Implementation is adapted from bagua/load_balancing_data_loader.py +# https://github.com/BaguaSys/bagua/blob/01874a7c3f90904c37c5612a9db866b5d4b8b5ed/bagua/torch_api/contrib/load_balancing_data_loader.py#L12 +class LoadBalancingDistributedSampler: + r"""Sampler that balances the data load across workers based on the sample's complexity. + This sampler uses a :attr:`complexity_fn` to calculate each sample's computational + complexity and make each batch get similar computational complexity. + This is useful in scenarios like speech and NLP, where each batch has variable + length and distributed training suffers from straggler problem. In such scenarios, + the complexity function could be defined to return the length of the input sample sequence. + The usage is similar to `torch.utils.data.DistributedSampler`, where each process loads a + subset of the original dataset that is exclusive to it. + The sampler sorts the dataset in increasing order of complexity. If the :attr:`group_size` is + provided, the sorting happens within dataset groups of size :attr:`group_size` before the + group order is shuffled followed by sharding of data across workers. If :attr:`group_size` + is not provided, the data is distributed across workers before the data indices for each worker + is shuffled deterministically. + .. note:: + Dataset is assumed to be of constant size (map-style dataset). + Args: + dataset: Dataset (map-style) used for sampling. + complexity_fn(Callable): A function whose input is a sample and output is an integer as a + measure of the computational complexity of the sample. + world_size (int, optional): Number of processes participating in + distributed training. By default, :attr:`world_size` is retrieved from the + current distributed group. + rank (int, optional): Rank of the current process within :attr:`world_size`. + By default, :attr:`rank` is retrieved from the current distributed + group. + shuffle (bool, optional): If ``True`` (default), sampler will shuffle the + indices within the dataset if :attr:`group_size` is None, else will + shuffle the groups if :attr:`group_size` is not None. + group_size (int, optional): If provided, the dataset will be broken down into + :attr:`group_size` sized groups. Indices will only be sorted within the groups + and not across the entire dataset. If :attr:`shuffle` is ```True``` and + :attr:`group_size` is not ```None```, the position of each group in the dataset + will be shuffled. Default: ```None``` + seed (int, optional): random seed used to shuffle the sampler if + :attr:`shuffle=True`. This number should be identical across all + processes in the distributed group. Default: 0. + drop_last (bool, optional): if ``True``, then the sampler will drop the + tail of the data to make it evenly divisible across the number of + shards. If ``False``, the sampler will add extra indices to make + the data evenly divisible across the shards. Default: ``False``. + random_level (float, optional): A float varies from 0 and 1 that controls the extent + of load balance. 0 means the best load balance, while 1 means the opposite. + .. warning:: + In distributed mode, calling the :meth:`set_epoch` method at + the beginning of each epoch **before** creating the `torch.utils.data.DataLoader` iterator + is necessary to make shuffling work properly across multiple epochs. Otherwise, + the same ordering will be always used. + Example:: + Define your :attr:`complexity_fn`, which accepts a dataset sample as its input and produces an integer + as the sample's computational complexity: + >>> dataset = MyVariableSequenceLengthDataset(dataset_samples) + >>> complexity_fn = lambda x: len(x) + Below is the usage of :class:`LoadBalancingDistributedSampler` + and `torch.utils.data.DataLoader`: + >>> sampler = onnxruntime.training.utils.data.LoadBalancingDistributedSampler( + ... dataset, + ... complexity_fn=complexity_fn) + >>> loader = torch.utils.data.DataLoader(dataset, + ... sampler=sampler) + >>> + >>> for epoch in range(start_epoch, n_epochs): + ... if is_distributed: + ... sampler.set_epoch(epoch) + ... train(loader) + """ + + def __init__( + self, + dataset: Dataset, + complexity_fn: Callable[..., int], + world_size: Optional[int] = None, + rank: Optional[int] = None, + shuffle: bool = True, + group_size: Optional[int] = None, + seed: int = 0, + drop_last: bool = False, + random_level: float = 0, + ) -> None: + if world_size is None: + if not dist.is_available(): + raise RuntimeError("Requires distributed package to be available") + world_size = dist.get_world_size() + if rank is None: + if not dist.is_available(): + raise RuntimeError("Requires distributed package to be available") + rank = dist.get_rank() + if rank >= world_size or rank < 0: + raise ValueError( + "Invalid rank {}, rank should be in the interval" + " [0, {}]".format(rank, world_size - 1) + ) + self.dataset = dataset + self.world_size = world_size + self.rank = rank + self.epoch = 0 + self.drop_last = drop_last + self.group_size = group_size + + # If the dataset length is evenly divisible by number of shards, then there + # is no need to drop any data, since the dataset will be split equally. + dataset_len = len(self.dataset) + if self.drop_last and dataset_len % self.world_size != 0: + # Split to nearest available length that is evenly divisible. + # This is to ensure each rank receives the same amount of data when + # using this Sampler. + self.num_samples = dataset_len // self.world_size + else: + self.num_samples = math.ceil(dataset_len / self.world_size) + self.total_size = self.num_samples * self.world_size + self.shuffle = shuffle + self.seed = seed + + self.complexity_fn = complexity_fn + self.sample_complexities = None + self.ordered_sample_complexities = None + + if random_level < 0.0 or random_level > 1.0: + raise ValueError( + "Invalid random level {}, shoule be in the range [0.0, 1.0]".format( + random_level + ) + ) + + self.random_level = random_level + self.random_number = None + + def _sort_shard_and_shuffle_dataset(self): + # This method returns a list of dataset sample indices after + # the dataset has been sorted, sharded and shuffled. + # The sorting of the dataset happens based on the group_size and complexities + # of each sample. + # Sharding happens across the number of workers. + # Shuffling is done either before sharding on the group indices (if group_size is provided) + # or on the dataset sample indices if the group_size is not provided. + + def sort_in_groups(sample_complexities, group_size): + """Sort the dataset samples indices inside each group of size group_size.""" + # If the group_size is None, the entire dataset is considered as a single group + if group_size is None: + group_size = len(sample_complexities) + # Sort the dataset samples inside each group of the dataset based on sample complexity. + for group_begin_index in range(0, len(sample_complexities), group_size): + group_end_index = min(group_begin_index + group_size, len(sample_complexities)) + sorted_indices = \ + group_begin_index + np.argsort(sample_complexities[group_begin_index:group_end_index, 1]) + sample_complexities[group_begin_index:group_end_index, :] = sample_complexities[sorted_indices] + return sample_complexities + + # Get the samples and their complexities from the complexity_fn + if not self.sample_complexities: + self.sample_complexities = np.empty((len(self.dataset), 2), dtype=np.int64) + for sample_index in range(len(self.dataset)): + self.sample_complexities[sample_index][0] = sample_index + self.sample_complexities[sample_index][1] = self.complexity_fn(self.dataset[sample_index]) + + if self.random_number is None: + max_complexity = max(self.sample_complexities, key=lambda t: t[1])[1] + min_complexity = min(self.sample_complexities, key=lambda t: t[1])[1] + self.random_number = int((max_complexity - min_complexity) * self.random_level + 1) + + sample_complexities = self.sample_complexities.copy() + + # Control the degree of load balancing by modifying the complexities of + # all samples using the random_number. + g = torch.Generator() + g = g.manual_seed(self.seed + self.epoch) + + if self.random_number > 1: + complexity_random_ints = torch.randint( + self.random_number, (len(sample_complexities),), generator=g + ).tolist() + + for index, random_int in enumerate(complexity_random_ints): + sample_complexities[index][1] += random_int + + # Sort the data based on the computed complexities and group sizes. + # Sort only once if random_number <= 1 else sort everytime + if self.ordered_sample_complexities is None or self.random_number > 1: + self.ordered_sample_complexities = sort_in_groups(sample_complexities, self.group_size) + ordered_sample_complexities = self.ordered_sample_complexities + + # If group_size is not None, shuffle the index of each group instead + # of shuffling the data indices. + if self.shuffle and self.group_size is not None: + num_groups = (len(self.sample_complexities) + self.group_size - 1) // self.group_size + group_order = torch.randperm(num_groups, generator=g).tolist() + end = 0 + sample_complexities_copy = ordered_sample_complexities.copy() + for group_index in group_order: + original_list_begin_index = self.group_size*group_index + original_list_end_index = min(original_list_begin_index+self.group_size, len(sample_complexities)) + begin = end + end = begin + (original_list_end_index - original_list_begin_index) + sample_complexities_copy[begin:end, :] = \ + sample_complexities[original_list_begin_index:original_list_end_index, :] + ordered_sample_complexities = sample_complexities_copy + + # Shard the data across the different workers. + index_chunks = list( + _shard_wrapped_indices_across_workers( + [index_complexity_tuple[0] for index_complexity_tuple in ordered_sample_complexities], + self.world_size, + self.num_samples + ) + ) + + # Shuffle the sharded data indices deterministically based on epoch and seed. + chunk_indices = list(range(len(index_chunks))) + if self.shuffle and self.group_size is None: + chunk_indices = torch.randperm(len(index_chunks), generator=g).tolist() + + if not self.drop_last: + # Add extra samples to make it evenly divisible + padding_size = self.num_samples - len(chunk_indices) + if padding_size <= len(chunk_indices): + chunk_indices += chunk_indices[:padding_size] + else: + chunk_indices += ( + chunk_indices * math.ceil(padding_size / len(chunk_indices)) + )[:padding_size] + else: + # Remove tail of data to make it evenly divisible. + chunk_indices = chunk_indices[: self.num_samples] + + assert len(chunk_indices) == self.num_samples + return index_chunks, chunk_indices + + def __iter__(self) -> Iterator: + index_chunks, chunk_indices = self._sort_shard_and_shuffle_dataset() + # Extract indices based on current rank. + indices = [index_chunks[i][self.rank] for i in chunk_indices] + assert len(indices) == self.num_samples + + return iter(indices) + + def __len__(self) -> int: + return self.num_samples + + def set_epoch(self, epoch: int) -> None: + r"""Sets the epoch for this sampler. + When :attr:`shuffle=True`, this ensures all shards use a different + random ordering for each epoch. Otherwise, the next iteration of this + sampler will yield the same ordering. + Args: + epoch (int): Epoch number. + """ + self.epoch = epoch + + +class LoadBalancingDistributedBatchSampler(Sampler): + r"""Wraps another load balance sampler to yield variable sized mini-batches. + Args: + sampler (LoadBalancingDistributedSampler): Load balance sampler. + batch_fn (Callable): Callable to yield mini-batch indices. + drop_last (bool): If ``True``, the sampler will drop the last few batches exceeding + the least number of batches among replicas, otherwise, the number of batches + on each replica will be padded to the same. + :attr:`batch_fn` will have the signature of:: + def batch_fn(indices: List[int]) -> List[List[int]] + Example:: + >>> from onnxruntime.training.utils.data import LoadBalancingDistributedSampler, \ + ... LoadBalancingDistributedBatchSampler + >>> + >>> sampler = LoadBalancingDistributedSampler(dataset, complexity_fn=complexity_fn) + >>> batch_sampler = LoadBalancingDistributedBatchSampler(sampler, batch_fn=batch_fn) + >>> loader = torch.utils.data.DataLoader(dataset, batch_sampler=batch_sampler) + >>> + >>> for epoch in range(start_epoch, n_epochs): + ... batch_sampler.set_epoch(epoch) + ... train(loader) + """ + + def __init__( + self, + sampler: LoadBalancingDistributedSampler, + batch_fn, + drop_last: bool = False, + ) -> None: + if not isinstance(sampler, LoadBalancingDistributedSampler): + raise ValueError( + "sampler should be of LoadBalancingDistributedSampler type." + ) + + if sampler.drop_last: + raise ValueError("drop_last of sampler should be False") + + self.sampler = sampler + self.batch_fn = batch_fn + self.drop_last = drop_last + + self.world_size = self.sampler.world_size + self.rank = self.sampler.rank + + self.generate_batches() + + def generate_batches(self): + index_chunks, chunk_indices = self.sampler._sort_shard_and_shuffle_dataset() + + batches = [] + for rank in range(self.world_size): + sub_indices = [index_chunks[i][rank] for i in chunk_indices] + batches.append(self.batch_fn(sub_indices)) + + self.total_batch = ( + max([len(b) for b in batches]) + if not self.drop_last + else min([len(b) for b in batches]) + ) + + # here {len(batches[self.rank]) - self.total_batch} batches dropped for + # rank {self.rank} + if self.total_batch < len(batches[self.rank]): + pass + + self.padded_batches = [ + batch + batch[: self.total_batch - len(batch)] for batch in batches + ] + + def __iter__(self): + return iter(self.padded_batches[self.rank]) + + def __len__(self): + return self.total_batch + + def set_epoch(self, epoch: int) -> None: + r""" + Sets the epoch for this sampler. When :attr:`shuffle=True`, this ensures all replicas + use a different random ordering for each epoch. Otherwise, the next iteration of this + sampler will yield the same ordering. + Args: + epoch (int): Epoch number. + """ + self.sampler.set_epoch(epoch) + self.generate_batches() diff --git a/orttraining/orttraining/test/gradient/gradient_ops_test.cc b/orttraining/orttraining/test/gradient/gradient_ops_test.cc index 3b7818a25262b..05c27198df182 100644 --- a/orttraining/orttraining/test/gradient/gradient_ops_test.cc +++ b/orttraining/orttraining/test/gradient/gradient_ops_test.cc @@ -2760,6 +2760,60 @@ TEST(GradientCheckerTest, ScatterNDGrad) { } } +TEST(GradientCheckerTest, ScatterElementsGrad) { + float max_error; + GradientChecker gradient_checker; + OpDef op_def{"ScatterElements", kOnnxDomain, 13}; + + { // without axis + TensorInfo data_info({3, 3}, true); + TensorInfo indices_info({2, 3}, false, nullptr, DataTypeImpl::GetTensorType()); + TensorInfo updates_info({2, 3}, true); + std::vector> input_datas = {{ 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, + 0.0f, 0.0f, 0.0f, 0.0f}, + {1, 0, 2, 0, 2, 1}, + {1.0f, 1.1f, 1.2f, 2.0f, 2.1f, 2.2f}}; + + TensorInfo output_info({3, 3}, true); + + ASSERT_STATUS_OK(gradient_checker.ComputeGradientError(op_def, {data_info, indices_info, updates_info}, + {output_info}, &max_error, input_datas)); + EXPECT_IS_TINY(max_error); + } + + { // with axis + TensorInfo data_info({1, 5}, true); + TensorInfo indices_info({1, 2}, false, nullptr, DataTypeImpl::GetTensorType()); + TensorInfo updates_info({1, 2}, true); + std::vector> input_datas = {{1.0f, 2.0f, 3.0f, 4.0f, 5.0f}, + {1, 3}, + {1.1f, 2.1f}}; + + TensorInfo output_info({1, 5}, true); + + ASSERT_STATUS_OK(gradient_checker.ComputeGradientError(op_def, {data_info, indices_info, updates_info}, + {output_info}, &max_error, input_datas, + {MakeAttribute("axis", static_cast(1))})); + EXPECT_IS_TINY(max_error); + } + + { // with -ve axis + TensorInfo data_info({1, 5}, true); + TensorInfo indices_info({1, 2}, false, nullptr, DataTypeImpl::GetTensorType()); + TensorInfo updates_info({1, 2}, true); + std::vector> input_datas = {{1.0f, 2.0f, 3.0f, 4.0f, 5.0f}, + {1, 3}, + {1.1f, 2.1f}}; + + TensorInfo output_info({1, 5}, true); + + ASSERT_STATUS_OK(gradient_checker.ComputeGradientError(op_def, {data_info, indices_info, updates_info}, + {output_info}, &max_error, input_datas, + {MakeAttribute("axis", static_cast(-1))})); + EXPECT_IS_TINY(max_error); + } +} + TEST(GradientCheckerTest, TriluGrad) { float max_error; GradientChecker gradient_checker; diff --git a/orttraining/orttraining/test/python/orttraining_ortmodule_tests.py b/orttraining/orttraining/test/python/orttraining_ortmodule_tests.py index 5d3fea91e883c..abef6bf44725a 100644 --- a/orttraining/orttraining/test/python/orttraining_ortmodule_tests.py +++ b/orttraining/orttraining/test/python/orttraining_ortmodule_tests.py @@ -121,6 +121,14 @@ def run_ortmodule_experimental_json_config_tests(cwd, log): run_subprocess(command, cwd=cwd, log=log).check_returncode() +def run_data_sampler_tests(cwd, log): + log.debug('Running: Data sampler tests') + + command = [sys.executable, '-m', 'pytest', '-sv', 'orttraining_test_sampler.py'] + + run_subprocess(command, cwd=cwd, log=log).check_returncode() + + def main(): args = parse_arguments() cwd = args.cwd @@ -149,7 +157,9 @@ def main(): run_ortmodule_fallback_tests(cwd, log, args.transformers_cache) - run_ortmodule_hierarchical_ortmodule_tests(cwd, log,) + run_ortmodule_hierarchical_ortmodule_tests(cwd, log) + + run_data_sampler_tests(cwd, log) return 0 diff --git a/orttraining/orttraining/test/python/orttraining_test_sampler.py b/orttraining/orttraining/test/python/orttraining_test_sampler.py new file mode 100644 index 0000000000000..32346c7b614b6 --- /dev/null +++ b/orttraining/orttraining/test/python/orttraining_test_sampler.py @@ -0,0 +1,179 @@ +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. +# orttraining_test_sampler.py + +import torch +from onnxruntime.training.utils.data import sampler +import random + +class MyDataset(torch.utils.data.Dataset): + def __init__(self, samples): + self.samples = samples + + def __getitem__(self, index): + return self.samples[index] + + def __len__(self): + return len(self.samples) + + +def test_load_balancing_data_sampler_balances_load(): + samples_and_complexities = \ + [(torch.FloatTensor([val]), torch.randint(0, 100, (1,)).item()) for val in range(100)] + dataset = MyDataset(samples_and_complexities) + + def complexity_fn(sample): + return sample[1] + + data_sampler0 = sampler.LoadBalancingDistributedSampler( + dataset, + complexity_fn=complexity_fn, + world_size=2, + rank=0, + shuffle=False) + data_sampler1 = sampler.LoadBalancingDistributedSampler( + dataset, + complexity_fn=complexity_fn, + world_size=2, + rank=1, + shuffle=False) + + largest_complexity = -1 + for index in data_sampler0: + assert samples_and_complexities[index][1] >= largest_complexity + largest_complexity = samples_and_complexities[index][1] + + largest_complexity = -1 + for index in data_sampler1: + assert samples_and_complexities[index][1] >= largest_complexity + largest_complexity = samples_and_complexities[index][1] + +def test_load_balancing_data_sampler_shuffles_and_balances_load(): + complexities = [] + for i in range(50): + c = torch.randint(0, 100, (1,)).item() + complexities.append(c) + complexities.append(c) + random.shuffle(complexities) + + samples = \ + [torch.FloatTensor([val]) for val in range(100)] + samples_and_complexities = list(zip(samples, complexities)) + dataset = MyDataset(samples_and_complexities) + + def complexity_fn(sample): + return sample[1] + + data_sampler0 = sampler.LoadBalancingDistributedSampler( + dataset, + complexity_fn=complexity_fn, + world_size=2, + rank=0, + shuffle=True) + data_sampler1 = sampler.LoadBalancingDistributedSampler( + dataset, + complexity_fn=complexity_fn, + world_size=2, + rank=1, + shuffle=True) + + for index0, index1 in zip(data_sampler0, data_sampler1): + assert samples_and_complexities[index0][1] == \ + samples_and_complexities[index1][1] + +def test_load_balancing_data_sampler_sorts_in_groups(): + samples_and_complexities = \ + [(torch.FloatTensor([val]), torch.randint(0, 100, (1,)).item()) for val in range(100)] + dataset = MyDataset(samples_and_complexities) + + def complexity_fn(sample): + return sample[1] + + group_size = 8 + samples_and_complexities_sorted = samples_and_complexities.copy() + for begin_index in range(0, len(samples_and_complexities), group_size): + end_index = min(begin_index+group_size, len(samples_and_complexities)) + samples_and_complexities_sorted[begin_index:end_index] = sorted(samples_and_complexities_sorted[begin_index:end_index], key=lambda x: x[1]) + + data_sampler = sampler.LoadBalancingDistributedSampler( + dataset, + complexity_fn=complexity_fn, + world_size=1, + rank=0, + shuffle=False, + group_size=8) + + for index, sorted_sample in zip(data_sampler, samples_and_complexities_sorted): + assert samples_and_complexities[index][1] == sorted_sample[1] + +def test_load_balancing_data_sampler_sorts_and_shuffles_in_groups(): + samples_and_complexities = \ + [(torch.FloatTensor([val]), torch.randint(0, 100, (1,)).item()) for val in range(100)] + dataset = MyDataset(samples_and_complexities) + + def complexity_fn(sample): + return sample[1] + + group_size = 8 + samples_and_complexities_sorted = samples_and_complexities.copy() + for begin_index in range(0, len(samples_and_complexities), group_size): + end_index = min(begin_index+group_size, len(samples_and_complexities)) + samples_and_complexities_sorted[begin_index:end_index] = \ + sorted(samples_and_complexities_sorted[begin_index:end_index], key=lambda x: x[1]) + + samples_and_complexities_sorted_and_shuffled = samples_and_complexities_sorted.copy() + shuffled_group_order = torch.randperm(( + len(samples_and_complexities)+group_size-1) // group_size, + generator=torch.Generator().manual_seed(0)).tolist() + end = 0 + for group_index in shuffled_group_order: + original_begin = group_index*group_size + original_end = min(original_begin+group_size, len(samples_and_complexities)) + begin = end + end = begin + (original_end-original_begin) + samples_and_complexities_sorted_and_shuffled[begin:end] = \ + samples_and_complexities_sorted[original_begin:original_end] + + data_sampler = sampler.LoadBalancingDistributedSampler( + dataset, + complexity_fn=complexity_fn, + world_size=1, + rank=0, + shuffle=True, + group_size=8) + + for index, sorted_and_shuffled_sample in zip(data_sampler, samples_and_complexities_sorted_and_shuffled): + assert samples_and_complexities[index][1] == sorted_and_shuffled_sample[1] + +def test_load_balancing_batch_sampler_uses_data_sampler(): + samples_and_complexities = \ + [(torch.FloatTensor([val]), torch.randint(0, 100, (1,)).item()) for val in range(100)] + dataset = MyDataset(samples_and_complexities) + + def complexity_fn(sample): + return sample[1] + + data_sampler = sampler.LoadBalancingDistributedSampler( + dataset, + complexity_fn=complexity_fn, + world_size=1, + rank=0, + shuffle=False) + + batch_size = 12 + def batch_fn(indices): + nonlocal batch_size + batches = [] + for batch_index_begin in range(0, len(indices), batch_size): + batch_index_end = min(batch_index_begin+batch_size, len(indices)) + batches.append(indices[batch_index_begin:batch_index_end]) + return batches + + batch_sampler = sampler.LoadBalancingDistributedBatchSampler( + data_sampler, + batch_fn + ) + + for batch in batch_sampler: + assert len(batch) == batch_size or \ + len(batch) == len(samples_and_complexities) % batch_size diff --git a/orttraining/orttraining/test/training_ops/cpu/reduction/reduction_ops_test.cc b/orttraining/orttraining/test/training_ops/cpu/reduction/reduction_ops_test.cc index eff6cd567cfcd..46d775173cdc3 100644 --- a/orttraining/orttraining/test/training_ops/cpu/reduction/reduction_ops_test.cc +++ b/orttraining/orttraining/test/training_ops/cpu/reduction/reduction_ops_test.cc @@ -6,6 +6,7 @@ #include #include "gtest/gtest.h" #include "test/providers/provider_test_utils.h" +#include "test/common/cuda_op_test_utils.h" namespace onnxruntime { namespace test { @@ -163,6 +164,106 @@ TEST_P(ReductionOpTest, ReduceAllL2HalfFloat) { } #endif +#if defined(USE_CUDA) || defined(USE_ROCM) +TEST_P(ReductionOpTest, ReduceAllL2_BFloat16_BFloat16) { +#ifdef USE_CUDA + int min_cuda_architecture = 530; + if (!HasCudaEnvironment(min_cuda_architecture)) { + LOGS_DEFAULT(WARNING) << "Hardware NOT support BFP16"; + return; + } +#endif + OpTester test("ReduceAllL2", 1, onnxruntime::kMSDomain, true); + test.SetDeterminism(GetParam()); + + std::vector data0 = {1.0f, 2.0f, 3.0f}; + std::vector data0_bf16 = FloatsToBFloat16s(data0); + + std::vector data1 = {-1.0f, -2.0f}; + std::vector data1_bf16 = FloatsToBFloat16s(data1); + + std::vector result = {4.358898943540674f}; + std::vector result_bf16 = FloatsToBFloat16s(result); + + test.AddInput("data0", {3}, data0_bf16); + test.AddInput("data1", {2}, data1_bf16); + + test.AddOutput("reduced", {}, result_bf16); + + std::vector> execution_providers; +#ifdef USE_CUDA + execution_providers.push_back(DefaultCudaExecutionProvider()); +#elif USE_ROCM + execution_providers.push_back(DefaultRocmExecutionProvider()); +#endif + test.Run(OpTester::ExpectResult::kExpectSuccess, "", {}, nullptr, &execution_providers); +} + +TEST_P(ReductionOpTest, ReduceAllL2_BFloat16_Float) { +#ifdef USE_CUDA + int min_cuda_architecture = 530; + if (!HasCudaEnvironment(min_cuda_architecture)) { + LOGS_DEFAULT(WARNING) << "Hardware NOT support BFP16"; + return; + } +#endif + OpTester test("ReduceAllL2", 1, onnxruntime::kMSDomain, true); + test.SetDeterminism(GetParam()); + + std::vector data0 = {1.0f, 2.0f, 3.0f}; + std::vector data0_bf16 = FloatsToBFloat16s(data0); + + std::vector data1 = {-1.0f, -2.0f}; + std::vector data1_bf16 = FloatsToBFloat16s(data1); + + std::vector result = {4.358898943540674f}; + + test.AddInput("data0", {3}, data0_bf16); + test.AddInput("data1", {2}, data1_bf16); + + test.AddOutput("reduced", {}, result); + + std::vector> execution_providers; +#ifdef USE_CUDA + execution_providers.push_back(DefaultCudaExecutionProvider()); +#elif USE_ROCM + execution_providers.push_back(DefaultRocmExecutionProvider()); +#endif + test.Run(OpTester::ExpectResult::kExpectSuccess, "", {}, nullptr, &execution_providers); +} + +TEST_P(ReductionOpTest, ReduceAllL2_Float_BFloat16) { +#ifdef USE_CUDA + int min_cuda_architecture = 530; + if (!HasCudaEnvironment(min_cuda_architecture)) { + LOGS_DEFAULT(WARNING) << "Hardware NOT support BFP16"; + return; + } +#endif + OpTester test("ReduceAllL2", 1, onnxruntime::kMSDomain, true); + test.SetDeterminism(GetParam()); + + std::vector data0 = {1.0f, 2.0f, 3.0f}; + std::vector data1 = {-1.0f, -2.0f}; + + std::vector result = {4.358898943540674f}; + std::vector result_bf16 = FloatsToBFloat16s(result); + + test.AddInput("data0", {3}, data0); + test.AddInput("data1", {2}, data1); + + test.AddOutput("reduced", {}, result_bf16); + + std::vector> execution_providers; +#ifdef USE_CUDA + execution_providers.push_back(DefaultCudaExecutionProvider()); +#elif USE_ROCM + execution_providers.push_back(DefaultRocmExecutionProvider()); +#endif + test.Run(OpTester::ExpectResult::kExpectSuccess, "", {}, nullptr, &execution_providers); +} +#endif + void TestMultiTensorReduce( const int tensor_count, const int min_tensor_size, diff --git a/orttraining/orttraining/test/training_ops/cuda/mixed_precision_scale_test.cc b/orttraining/orttraining/test/training_ops/cuda/mixed_precision_scale_test.cc index f2b515a01741f..7e1f1558ed783 100644 --- a/orttraining/orttraining/test/training_ops/cuda/mixed_precision_scale_test.cc +++ b/orttraining/orttraining/test/training_ops/cuda/mixed_precision_scale_test.cc @@ -3,6 +3,7 @@ #include "test/common/tensor_op_test_utils.h" #include "test/providers/provider_test_utils.h" +#include "test/common/cuda_op_test_utils.h" namespace onnxruntime { namespace test { @@ -18,6 +19,16 @@ struct MixedPrecisionScaleInputOutput { output2_half.resize(output2.size()); ConvertFloatToMLFloat16(input2.data(), input2_half.data(), int(input2.size())); ConvertFloatToMLFloat16(output2.data(), output2_half.data(), int(output2.size())); + + input1_bf16.resize(input1.size()); + output1_bf16.resize(output1.size()); + input1_bf16 = FloatsToBFloat16s(input1); + output1_bf16 = FloatsToBFloat16s(output1); + + input2_bf16.resize(input2.size()); + output2_bf16.resize(output2.size()); + input2_bf16 = FloatsToBFloat16s(input2); + output2_bf16 = FloatsToBFloat16s(output2); } // Fp32 Inputs/Output @@ -32,6 +43,12 @@ struct MixedPrecisionScaleInputOutput { std::vector input2_half; std::vector output1_half; std::vector output2_half; + + // BF16 Inputs/Output + std::vector input1_bf16; + std::vector input2_bf16; + std::vector output1_bf16; + std::vector output2_bf16; }; TEST(CudaKernelTest, MixedPrecisionScaleF2F) { @@ -130,5 +147,127 @@ TEST(CudaKernelTest, MixedPrecisionScaleH2H) { test.Run(); } +#if defined(USE_CUDA) || defined(USE_ROCM) +TEST(CudaKernelTest, MixedPrecisionScale_bfloat16_bfloat16) { +#ifdef USE_CUDA + int min_cuda_architecture = 530; + if (!HasCudaEnvironment(min_cuda_architecture)) { + LOGS_DEFAULT(WARNING) << "Hardware NOT support BFP16"; + return; + } +#endif + MixedPrecisionScaleInputOutput data; + OpTester test("MixedPrecisionScale", 1, onnxruntime::kMSDomain); + test.AddAttribute("to", int64_t(ONNX_TENSOR_ELEMENT_DATA_TYPE_BFLOAT16)); + test.AddInput("scale", {1}, data.scale); + test.AddInput("input1", {3}, data.input1_bf16); + test.AddOutput("output1", {3}, data.output1_bf16); + + std::vector> execution_providers; +#ifdef USE_CUDA + execution_providers.push_back(DefaultCudaExecutionProvider()); +#elif USE_ROCM + execution_providers.push_back(DefaultRocmExecutionProvider()); +#endif + test.Run(OpTester::ExpectResult::kExpectSuccess, "", {}, nullptr, &execution_providers); +} + +TEST(CudaKernelTest, MixedPrecisionScale_float_bfloat16) { +#ifdef USE_CUDA + int min_cuda_architecture = 530; + if (!HasCudaEnvironment(min_cuda_architecture)) { + LOGS_DEFAULT(WARNING) << "Hardware NOT support BFP16"; + return; + } +#endif + MixedPrecisionScaleInputOutput data; + OpTester test("MixedPrecisionScale", 1, onnxruntime::kMSDomain); + test.AddAttribute("to", int64_t(ONNX_TENSOR_ELEMENT_DATA_TYPE_BFLOAT16)); + test.AddInput("scale", {1}, data.scale); + test.AddInput("input1", {3}, data.input1); + test.AddOutput("output1", {3}, data.output1_bf16); + + std::vector> execution_providers; +#ifdef USE_CUDA + execution_providers.push_back(DefaultCudaExecutionProvider()); +#elif USE_ROCM + execution_providers.push_back(DefaultRocmExecutionProvider()); +#endif + test.Run(OpTester::ExpectResult::kExpectSuccess, "", {}, nullptr, &execution_providers); +} + +TEST(CudaKernelTest, MixedPrecisionScale_bfloat16_float) { +#ifdef USE_CUDA + int min_cuda_architecture = 530; + if (!HasCudaEnvironment(min_cuda_architecture)) { + LOGS_DEFAULT(WARNING) << "Hardware NOT support BFP16"; + return; + } +#endif + MixedPrecisionScaleInputOutput data; + OpTester test("MixedPrecisionScale", 1, onnxruntime::kMSDomain); + test.AddAttribute("to", int64_t(ONNX_TENSOR_ELEMENT_DATA_TYPE_FLOAT)); + test.AddInput("scale", {1}, data.scale); + test.AddInput("input1", {3}, data.input1_bf16); + test.AddOutput("output1", {3}, data.output1); + + std::vector> execution_providers; +#ifdef USE_CUDA + execution_providers.push_back(DefaultCudaExecutionProvider()); +#elif USE_ROCM + execution_providers.push_back(DefaultRocmExecutionProvider()); +#endif + test.Run(OpTester::ExpectResult::kExpectSuccess, "", {}, nullptr, &execution_providers); +} + +TEST(CudaKernelTest, MixedPrecisionScale_half_bfloat16) { +#ifdef USE_CUDA + int min_cuda_architecture = 530; + if (!HasCudaEnvironment(min_cuda_architecture)) { + LOGS_DEFAULT(WARNING) << "Hardware NOT support BFP16"; + return; + } +#endif + MixedPrecisionScaleInputOutput data; + OpTester test("MixedPrecisionScale", 1, onnxruntime::kMSDomain); + test.AddAttribute("to", int64_t(ONNX_TENSOR_ELEMENT_DATA_TYPE_BFLOAT16)); + test.AddInput("scale", {1}, data.scale); + test.AddInput("input1", {3}, data.input1_half); + test.AddOutput("output1", {3}, data.output1_bf16); + + std::vector> execution_providers; +#ifdef USE_CUDA + execution_providers.push_back(DefaultCudaExecutionProvider()); +#elif USE_ROCM + execution_providers.push_back(DefaultRocmExecutionProvider()); +#endif + test.Run(OpTester::ExpectResult::kExpectSuccess, "", {}, nullptr, &execution_providers); +} + +TEST(CudaKernelTest, MixedPrecisionScale_bfloat16_half) { +#ifdef USE_CUDA + int min_cuda_architecture = 530; + if (!HasCudaEnvironment(min_cuda_architecture)) { + LOGS_DEFAULT(WARNING) << "Hardware NOT support BFP16"; + return; + } +#endif + MixedPrecisionScaleInputOutput data; + OpTester test("MixedPrecisionScale", 1, onnxruntime::kMSDomain); + test.AddAttribute("to", int64_t(ONNX_TENSOR_ELEMENT_DATA_TYPE_FLOAT16)); + test.AddInput("scale", {1}, data.scale); + test.AddInput("input1", {3}, data.input1_bf16); + test.AddOutput("output1", {3}, data.output1_half); + + std::vector> execution_providers; +#ifdef USE_CUDA + execution_providers.push_back(DefaultCudaExecutionProvider()); +#elif USE_ROCM + execution_providers.push_back(DefaultRocmExecutionProvider()); +#endif + test.Run(OpTester::ExpectResult::kExpectSuccess, "", {}, nullptr, &execution_providers); +} +#endif + } // namespace test } // namespace onnxruntime \ No newline at end of file diff --git a/orttraining/orttraining/training_ops/cpu/math/scale.cc b/orttraining/orttraining/training_ops/cpu/math/scale.cc index 42e3efe894b67..552fbc67540c3 100644 --- a/orttraining/orttraining/training_ops/cpu/math/scale.cc +++ b/orttraining/orttraining/training_ops/cpu/math/scale.cc @@ -2,8 +2,8 @@ // Licensed under the MIT License. #include "orttraining/training_ops/cpu/math/scale.h" +#include "core/framework/math.h" #include "core/providers/common.h" -#include "core/util/math_cpuonly.h" namespace onnxruntime { namespace contrib { diff --git a/orttraining/orttraining/training_ops/cpu/nn/dropout_7.cc b/orttraining/orttraining/training_ops/cpu/nn/dropout_7.cc index ed126eb2607dd..4aea989c9f003 100644 --- a/orttraining/orttraining/training_ops/cpu/nn/dropout_7.cc +++ b/orttraining/orttraining/training_ops/cpu/nn/dropout_7.cc @@ -2,7 +2,7 @@ // Licensed under the MIT License. #include "orttraining/training_ops/cpu/nn/dropout_7.h" -#include "core/util/math_cpuonly.h" +#include "core/framework/math.h" namespace onnxruntime { diff --git a/orttraining/orttraining/training_ops/cpu/op_gradients.cc b/orttraining/orttraining/training_ops/cpu/op_gradients.cc index a5ef415374907..e25acd6ea0e7d 100644 --- a/orttraining/orttraining/training_ops/cpu/op_gradients.cc +++ b/orttraining/orttraining/training_ops/cpu/op_gradients.cc @@ -143,7 +143,7 @@ Status SoftmaxGrad::Compute(OpKernelContext* context) const { math::Exp(nd, Ydata, eYdata, nullptr); for (size_t i = 0; i < N; ++i) { float sdY; - math::Sum(d, dYdata + i * d, &sdY, nullptr, nullptr); + math::Sum(d, dYdata + i * d, &sdY, nullptr); math::Axpy(d, -sdY, eYdata + i * d, dXdata + i * d, nullptr); } } else { diff --git a/orttraining/orttraining/training_ops/cuda/activation/bias_gelu_grad_impl.cu b/orttraining/orttraining/training_ops/cuda/activation/bias_gelu_grad_impl.cu index 67c95872a9b53..03824c344b1aa 100644 --- a/orttraining/orttraining/training_ops/cuda/activation/bias_gelu_grad_impl.cu +++ b/orttraining/orttraining/training_ops/cuda/activation/bias_gelu_grad_impl.cu @@ -62,9 +62,16 @@ void LaunchBiasGeluGradDxKernel( // given a 2D grid of blocks: // each grid row handles bias_size elements // there are input_size / bias_size rows - constexpr int num_elements_per_thread = GridDim::maxElementsPerThread; - const int num_threads_per_block = - std::min(static_cast(CeilDiv(bias_size, num_elements_per_thread)), static_cast(GridDim::maxThreadsPerBlock)); + + const int num_elements_per_thread = GridDim::maxElementsPerThread; + int max_threads_per_block = GridDim::maxThreadsPerBlock; + #ifdef USE_ROCM + // Optimization for ROCm MI100 + max_threads_per_block = 512; + #endif + + int num_threads_per_block = + std::min(static_cast(CeilDiv(bias_size, num_elements_per_thread)), static_cast(max_threads_per_block)); const auto grid_width = CeilDiv(bias_size, num_elements_per_thread * num_threads_per_block); const auto grid_height = input_size / bias_size; diff --git a/orttraining/orttraining/training_ops/cuda/communication/nccl_service.cc b/orttraining/orttraining/training_ops/cuda/communication/nccl_service.cc index 2628e47c5ec3a..fb9c0e49a3b6a 100644 --- a/orttraining/orttraining/training_ops/cuda/communication/nccl_service.cc +++ b/orttraining/orttraining/training_ops/cuda/communication/nccl_service.cc @@ -5,7 +5,7 @@ #include "orttraining/training_ops/cuda/communication/nccl_service.h" #include "core/common/common.h" -#include "core/profile/context.h" +#include "core/providers/cuda/nvtx_profile_context.h" #include "core/providers/cuda/cuda_check_memory.h" #include "core/providers/cuda/cuda_common.h" #include "orttraining/core/framework/communication/mpi/mpi_context.h" diff --git a/orttraining/orttraining/training_ops/cuda/communication/recv.cc b/orttraining/orttraining/training_ops/cuda/communication/recv.cc index 4f230e23154f8..60ab05d9d2e83 100644 --- a/orttraining/orttraining/training_ops/cuda/communication/recv.cc +++ b/orttraining/orttraining/training_ops/cuda/communication/recv.cc @@ -7,7 +7,7 @@ #include "orttraining/training_ops/communication_common.h" #include "orttraining/training_ops/cuda/communication/nccl_service.h" #include "core/providers/cuda/nvtx_profile.h" -#include "core/profile/context.h" +#include "core/providers/cuda/nvtx_profile_context.h" #include "core/providers/cuda/cuda_check_memory.h" #include "core/providers/cuda/cuda_common.h" #include diff --git a/orttraining/orttraining/training_ops/cuda/communication/send.cc b/orttraining/orttraining/training_ops/cuda/communication/send.cc index 832472013fd29..d36ef09419905 100644 --- a/orttraining/orttraining/training_ops/cuda/communication/send.cc +++ b/orttraining/orttraining/training_ops/cuda/communication/send.cc @@ -6,8 +6,8 @@ #include "orttraining/training_ops/cuda/communication/send.h" #include "orttraining/training_ops/communication_common.h" #include "orttraining/training_ops/cuda/communication/nccl_service.h" -#include "core/providers/cuda/nvtx_profile.h" -#include "core/profile/context.h" +#include "core/providers/cuda/nvtx_profile.h" +#include "core/providers/cuda/nvtx_profile_context.h" #include "core/providers/cuda/cuda_check_memory.h" #include "core/providers/cuda/cuda_common.h" #include diff --git a/orttraining/orttraining/training_ops/cuda/controlflow/record.cc b/orttraining/orttraining/training_ops/cuda/controlflow/record.cc index b491128111a45..f29dcd13f50cb 100644 --- a/orttraining/orttraining/training_ops/cuda/controlflow/record.cc +++ b/orttraining/orttraining/training_ops/cuda/controlflow/record.cc @@ -6,8 +6,8 @@ // Include event mechanism shared by CPU and GPU implementations. #include "orttraining/training_ops/cpu/controlflow/event_pool.h" #include "orttraining/training_ops/cpu/controlflow/record.h" -#include "core/providers/cuda/nvtx_profile.h" -#include "core/profile/context.h" +#include "core/providers/cuda/nvtx_profile.h" +#include "core/providers/cuda/nvtx_profile_context.h" namespace onnxruntime { namespace cuda { diff --git a/orttraining/orttraining/training_ops/cuda/controlflow/wait.cc b/orttraining/orttraining/training_ops/cuda/controlflow/wait.cc index 75f44ffba7e85..1e466dd8d111a 100644 --- a/orttraining/orttraining/training_ops/cuda/controlflow/wait.cc +++ b/orttraining/orttraining/training_ops/cuda/controlflow/wait.cc @@ -6,8 +6,8 @@ // Include event mechanism shared by CPU and GPU implementations. #include "orttraining/training_ops/cpu/controlflow/event_pool.h" #include "orttraining/training_ops/cpu/controlflow/wait.h" -#include "core/providers/cuda/nvtx_profile.h" -#include "core/profile/context.h" +#include "core/providers/cuda/nvtx_profile.h" +#include "core/providers/cuda/nvtx_profile_context.h" namespace onnxruntime { namespace cuda { diff --git a/orttraining/orttraining/training_ops/cuda/math/softmax_grad.cc b/orttraining/orttraining/training_ops/cuda/math/softmax_grad.cc index 4d4c4bdc76b2e..1a8af5045b50a 100644 --- a/orttraining/orttraining/training_ops/cuda/math/softmax_grad.cc +++ b/orttraining/orttraining/training_ops/cuda/math/softmax_grad.cc @@ -62,28 +62,6 @@ Status SoftMaxGradComputeHelper( return Status::OK(); } -// cudnnSoftmaxForward/Backward doesn't support BFloat16. -#define SPECIALIZED_SOFTMAXGRAD_HELPER_IMPL_BFloat16(is_log_softmax) \ - template <> \ - Status SoftMaxGradComputeHelper(cudaStream_t stream, const BFloat16* dY, \ - const TensorShape& input_shape, const BFloat16* Y, \ - BFloat16* dX, cudnnHandle_t, int64_t axis) { \ - typedef typename ToCudaType::MappedType CudaT; \ - const int64_t normalized_axis = HandleNegativeAxis(axis, input_shape.NumDimensions()); \ - int64_t N = input_shape.SizeToDimension(normalized_axis); \ - int64_t D = input_shape.SizeFromDimension(normalized_axis); \ - auto dY_data = reinterpret_cast(dY); \ - auto Y_data = reinterpret_cast(Y); \ - auto dX_data = reinterpret_cast(dX); \ - dispatch_softmax_backward, is_log_softmax>( \ - stream, dX_data, dY_data, Y_data, gsl::narrow_cast(D), gsl::narrow_cast(D), \ - gsl::narrow_cast(N)); \ - return Status::OK(); \ - } - -SPECIALIZED_SOFTMAXGRAD_HELPER_IMPL_BFloat16(true) -SPECIALIZED_SOFTMAXGRAD_HELPER_IMPL_BFloat16(false) - #define REGISTER_GRADIENT_KERNEL_TYPED(T) \ ONNX_OPERATOR_TYPED_KERNEL_EX( \ SoftmaxGrad, \ @@ -121,8 +99,8 @@ SPECIALIZED_SOFTMAXGRAD_HELPER_IMPL_BFloat16(false) (*KernelDefBuilder::Create()).TypeConstraint("T", DataTypeImpl::GetTensorType()), \ SoftmaxGrad); - template - Status SoftmaxGrad::ComputeInternal(OpKernelContext* ctx) const { +template +Status SoftmaxGrad::ComputeInternal(OpKernelContext* ctx) const { const Tensor* dY = ctx->Input(0); const TensorShape& input_shape{dY->Shape()}; const Tensor* Y = ctx->Input(1); diff --git a/orttraining/orttraining/training_ops/cuda/nn/layer_norm.cc b/orttraining/orttraining/training_ops/cuda/nn/layer_norm.cc index 15bac8f7d7c6a..4813382211368 100644 --- a/orttraining/orttraining/training_ops/cuda/nn/layer_norm.cc +++ b/orttraining/orttraining/training_ops/cuda/nn/layer_norm.cc @@ -89,7 +89,12 @@ Status LayerNormGrad::ComputeInternal(OpKernelContext* p_op_ke bias_grad_data = reinterpret_cast(bias_grad->template MutableData()); } + #ifndef USE_ROCM const int part_size = 16; + #else + // Optimization for ROCm MI100 + const int part_size = 64; + #endif auto part_grad_gamma = GetScratchBuffer(part_size * n2); auto part_grad_beta = GetScratchBuffer(part_size * n2); @@ -138,7 +143,12 @@ Status InvertibleLayerNormGrad::ComputeInternal(OpKernelContext* p_op_kern auto scale_grad_data = reinterpret_cast(scale_grad->template MutableData()); auto bias_grad_data = reinterpret_cast(bias_grad->template MutableData()); + #ifndef USE_ROCM const int part_size = 16; + #else + // Optimization for ROCm MI100 + const int part_size = 64; + #endif auto part_grad_gamma = GetScratchBuffer(part_size * n2); auto part_grad_beta = GetScratchBuffer(part_size * n2); diff --git a/orttraining/orttraining/training_ops/rocm/math/softmax_grad.cc b/orttraining/orttraining/training_ops/rocm/math/softmax_grad.cc index c4a52cf9a865f..66e137676540c 100644 --- a/orttraining/orttraining/training_ops/rocm/math/softmax_grad.cc +++ b/orttraining/orttraining/training_ops/rocm/math/softmax_grad.cc @@ -181,6 +181,7 @@ Status SoftmaxGrad::ComputeInternal(OpKernelContext* ctx) const { SPECIALIZED_GRADIENT(float) // SPECIALIZED_GRADIENT(double) SPECIALIZED_GRADIENT(MLFloat16) +SPECIALIZED_GRADIENT(BFloat16) } // namespace rocm } // namespace onnxruntime diff --git a/orttraining/orttraining/training_ops/rocm/math/softmax_grad_impl.cu b/orttraining/orttraining/training_ops/rocm/math/softmax_grad_impl.cu index 73343cafc9cea..81a176fc73b77 100644 --- a/orttraining/orttraining/training_ops/rocm/math/softmax_grad_impl.cu +++ b/orttraining/orttraining/training_ops/rocm/math/softmax_grad_impl.cu @@ -17,6 +17,7 @@ /* Modifications Copyright (c) Microsoft. */ // The code below is mostly copied from Pytorch PersistentSoftmax.cuh +#include "hip/hip_runtime.h" #include "orttraining/training_ops/rocm/math/softmax_grad.h" @@ -190,6 +191,7 @@ template void dispatch_softmax_backward(hipStrea SPECIALIZED_SOFTMAX_GRAD_IMPL(float, float, float) SPECIALIZED_SOFTMAX_GRAD_IMPL(half, half, float) SPECIALIZED_SOFTMAX_GRAD_IMPL(double, double, double) +SPECIALIZED_SOFTMAX_GRAD_IMPL(BFloat16, BFloat16, float) } } diff --git a/orttraining/orttraining/training_ops/rocm/reduction/reduction_all.cc b/orttraining/orttraining/training_ops/rocm/reduction/reduction_all.cc index d1e7985504ea1..1577dafe9dc2a 100644 --- a/orttraining/orttraining/training_ops/rocm/reduction/reduction_all.cc +++ b/orttraining/orttraining/training_ops/rocm/reduction/reduction_all.cc @@ -104,6 +104,9 @@ REGISTER_REDUCE_ALL_KERNEL_TYPED(ReduceAllL2, float, float) REGISTER_REDUCE_ALL_KERNEL_TYPED(ReduceAllL2, MLFloat16, float) REGISTER_REDUCE_ALL_KERNEL_TYPED(ReduceAllL2, float, MLFloat16) REGISTER_REDUCE_ALL_KERNEL_TYPED(ReduceAllL2, MLFloat16, MLFloat16) +REGISTER_REDUCE_ALL_KERNEL_TYPED(ReduceAllL2, BFloat16, float) +REGISTER_REDUCE_ALL_KERNEL_TYPED(ReduceAllL2, float, BFloat16) +REGISTER_REDUCE_ALL_KERNEL_TYPED(ReduceAllL2, BFloat16, BFloat16) } // namespace rocm } // namespace onnxruntime diff --git a/orttraining/orttraining/training_ops/rocm/rocm_training_kernels.cc b/orttraining/orttraining/training_ops/rocm/rocm_training_kernels.cc index b594c7819690f..65b0bfb070f43 100644 --- a/orttraining/orttraining/training_ops/rocm/rocm_training_kernels.cc +++ b/orttraining/orttraining/training_ops/rocm/rocm_training_kernels.cc @@ -57,12 +57,16 @@ class ONNX_OPERATOR_VERSIONED_TWO_TYPED_KERNEL_CLASS_NAME(kRocmExecutionProvider class ONNX_OPERATOR_VERSIONED_TWO_TYPED_KERNEL_CLASS_NAME(kRocmExecutionProvider, kOnnxDomain, 12, 12, float, int64_t, SoftmaxCrossEntropyLoss); class ONNX_OPERATOR_TWO_TYPED_KERNEL_CLASS_NAME(kRocmExecutionProvider, kOnnxDomain, 13, MLFloat16, int64_t, SoftmaxCrossEntropyLoss); class ONNX_OPERATOR_TWO_TYPED_KERNEL_CLASS_NAME(kRocmExecutionProvider, kOnnxDomain, 13, float, int64_t, SoftmaxCrossEntropyLoss); +class ONNX_OPERATOR_TWO_TYPED_KERNEL_CLASS_NAME(kRocmExecutionProvider, kOnnxDomain, 13, BFloat16, int64_t, SoftmaxCrossEntropyLoss); class ONNX_OPERATOR_TWO_TYPED_KERNEL_CLASS_NAME(kRocmExecutionProvider, kMSDomain, 1, float, int64_t, SoftmaxCrossEntropyLossGrad); class ONNX_OPERATOR_TWO_TYPED_KERNEL_CLASS_NAME(kRocmExecutionProvider, kMSDomain, 1, MLFloat16, int64_t, SoftmaxCrossEntropyLossGrad); +class ONNX_OPERATOR_TWO_TYPED_KERNEL_CLASS_NAME(kRocmExecutionProvider, kMSDomain, 1, BFloat16, int64_t, SoftmaxCrossEntropyLossGrad); class ONNX_OPERATOR_TWO_TYPED_KERNEL_CLASS_NAME(kRocmExecutionProvider, kMSDomain, 1, float, int64_t, SoftmaxCrossEntropyLossInternal); class ONNX_OPERATOR_TWO_TYPED_KERNEL_CLASS_NAME(kRocmExecutionProvider, kMSDomain, 1, MLFloat16, int64_t, SoftmaxCrossEntropyLossInternal); +class ONNX_OPERATOR_TWO_TYPED_KERNEL_CLASS_NAME(kRocmExecutionProvider, kMSDomain, 1, BFloat16, int64_t, SoftmaxCrossEntropyLossInternal); class ONNX_OPERATOR_TWO_TYPED_KERNEL_CLASS_NAME(kRocmExecutionProvider, kMSDomain, 1, float, int64_t, SoftmaxCrossEntropyLossInternalGrad); class ONNX_OPERATOR_TWO_TYPED_KERNEL_CLASS_NAME(kRocmExecutionProvider, kMSDomain, 1, MLFloat16, int64_t, SoftmaxCrossEntropyLossInternalGrad); +class ONNX_OPERATOR_TWO_TYPED_KERNEL_CLASS_NAME(kRocmExecutionProvider, kMSDomain, 1, BFloat16, int64_t, SoftmaxCrossEntropyLossInternalGrad); class ONNX_OPERATOR_TYPED_KERNEL_CLASS_NAME(kRocmExecutionProvider, kMSDomain, 1, float, SoftmaxGrad); class ONNX_OPERATOR_TYPED_KERNEL_CLASS_NAME(kRocmExecutionProvider, kMSDomain, 1, double, SoftmaxGrad); class ONNX_OPERATOR_TYPED_KERNEL_CLASS_NAME(kRocmExecutionProvider, kMSDomain, 1, MLFloat16, SoftmaxGrad); @@ -167,6 +171,15 @@ class ONNX_OPERATOR_TYPED_KERNEL_CLASS_NAME(kRocmExecutionProvider, kMSDomain, 1 class ONNX_OPERATOR_TYPED_KERNEL_CLASS_NAME(kRocmExecutionProvider, kMSDomain, 1, MLFloat16_MLFloat16_float, BatchNormInternal); class ONNX_OPERATOR_TYPED_KERNEL_CLASS_NAME(kRocmExecutionProvider, kMSDomain, 1, MLFloat16_float_float, BatchNormInternal); +class ONNX_OPERATOR_TYPED_KERNEL_CLASS_NAME(kRocmExecutionProvider, kMSDomain, 1, BFloat16, SoftmaxGrad); +class ONNX_OPERATOR_TYPED_KERNEL_CLASS_NAME(kRocmExecutionProvider, kMSDomain, 1, BFloat16, SoftmaxGrad_13); +class ONNX_OPERATOR_TYPED_KERNEL_CLASS_NAME(kRocmExecutionProvider, kMSDomain, 1, BFloat16, MixedPrecisionScale); +class ONNX_OPERATOR_TYPED_KERNEL_CLASS_NAME(kRocmExecutionProvider, kMSDomain, 1, BFloat16_float, LayerNormalizationGrad); + +class ONNX_OPERATOR_TYPED_KERNEL_CLASS_NAME(kRocmExecutionProvider, kMSDomain, 1, BFloat16_float, ReduceAllL2); +class ONNX_OPERATOR_TYPED_KERNEL_CLASS_NAME(kRocmExecutionProvider, kMSDomain, 1, float_BFloat16, ReduceAllL2); +class ONNX_OPERATOR_TYPED_KERNEL_CLASS_NAME(kRocmExecutionProvider, kMSDomain, 1, BFloat16_BFloat16, ReduceAllL2); + #if defined(ORT_USE_NCCL) || defined(USE_MPI) // P2P communication operators. class ONNX_OPERATOR_KERNEL_CLASS_NAME(kRocmExecutionProvider, kMSDomain, 1, Send); @@ -263,12 +276,16 @@ Status RegisterRocmTrainingKernels(KernelRegistry& kernel_registry) { BuildKernelCreateInfo, BuildKernelCreateInfo, BuildKernelCreateInfo, + BuildKernelCreateInfo, BuildKernelCreateInfo, BuildKernelCreateInfo, + BuildKernelCreateInfo, BuildKernelCreateInfo, BuildKernelCreateInfo, + BuildKernelCreateInfo, BuildKernelCreateInfo, BuildKernelCreateInfo, + BuildKernelCreateInfo, BuildKernelCreateInfo, // BuildKernelCreateInfo, BuildKernelCreateInfo, @@ -349,6 +366,15 @@ Status RegisterRocmTrainingKernels(KernelRegistry& kernel_registry) { BuildKernelCreateInfo, BuildKernelCreateInfo, + BuildKernelCreateInfo, + BuildKernelCreateInfo, + BuildKernelCreateInfo, + BuildKernelCreateInfo, + + BuildKernelCreateInfo, + BuildKernelCreateInfo, + BuildKernelCreateInfo, + // P2P communication operators. #if defined(ORT_USE_NCCL) || defined(USE_MPI) BuildKernelCreateInfo, diff --git a/setup.py b/setup.py index 98a0f3df3dc49..8a7fc5f935825 100644 --- a/setup.py +++ b/setup.py @@ -397,7 +397,8 @@ def run(self): 'onnxruntime.training.ortmodule.torch_cpp_extensions.cpu.aten_op_executor', 'onnxruntime.training.ortmodule.torch_cpp_extensions.cpu.torch_interop_utils', 'onnxruntime.training.ortmodule.torch_cpp_extensions.cuda.torch_gpu_allocator', - 'onnxruntime.training.ortmodule.torch_cpp_extensions.cuda.fused_ops']) + 'onnxruntime.training.ortmodule.torch_cpp_extensions.cuda.fused_ops', + 'onnxruntime.training.utils.data']) package_data['onnxruntime.training.ortmodule.torch_cpp_extensions.cpu.aten_op_executor'] = ['*.cc'] package_data['onnxruntime.training.ortmodule.torch_cpp_extensions.cpu.torch_interop_utils'] = ['*.cc'] package_data['onnxruntime.training.ortmodule.torch_cpp_extensions.cuda.torch_gpu_allocator'] = ['*.cc'] diff --git a/tools/ci_build/github/azure-pipelines/linux-cpu-minimal-build-ci-pipeline.yml b/tools/ci_build/github/azure-pipelines/linux-cpu-minimal-build-ci-pipeline.yml index a51fe2d60b270..644e5b40bf7a2 100644 --- a/tools/ci_build/github/azure-pipelines/linux-cpu-minimal-build-ci-pipeline.yml +++ b/tools/ci_build/github/azure-pipelines/linux-cpu-minimal-build-ci-pipeline.yml @@ -171,7 +171,7 @@ jobs: --build_shared_lib \ --parallel \ --minimal_build extended \ - --cmake_extra_defines onnxruntime_ENABLE_RUNTIME_OPTIMIZATION_REPLAY_IN_MINIMAL_BUILD=ON + --cmake_extra_defines onnxruntime_ENABLE_RUNTIME_OPTIMIZATION_IN_MINIMAL_BUILD=ON workingDirectory: $(Build.SourcesDirectory) - task: CmdLine@2 @@ -259,7 +259,7 @@ jobs: --include_ops_by_config /home/onnxruntimedev/.test_data/include_no_operators.config \ --cmake_extra_defines onnxruntime_DISABLE_SPARSE_TENSORS=ON \ onnxruntime_DISABLE_OPTIONAL_TYPE=ON \ - onnxruntime_ENABLE_RUNTIME_OPTIMIZATION_REPLAY_IN_MINIMAL_BUILD=OFF \ + onnxruntime_ENABLE_RUNTIME_OPTIMIZATION_IN_MINIMAL_BUILD=OFF \ onnxruntime_BUILD_UNIT_TESTS=OFF workingDirectory: $(Build.SourcesDirectory) diff --git a/tools/ci_build/github/azure-pipelines/linux-gpu-tensorrt-anubis-pipeline.yaml b/tools/ci_build/github/azure-pipelines/linux-gpu-tensorrt-anubis-pipeline.yaml new file mode 100644 index 0000000000000..2763f597b9d11 --- /dev/null +++ b/tools/ci_build/github/azure-pipelines/linux-gpu-tensorrt-anubis-pipeline.yaml @@ -0,0 +1,6 @@ +jobs: +- template: linux-gpu-tensorrt-daily-perf-pipeline.yml + parameters: + PostToDashboard: 'false' + ModelGroups: [] + PublishWheel: 'true' \ No newline at end of file diff --git a/tools/ci_build/github/azure-pipelines/linux-gpu-tensorrt-daily-perf-pipeline.yml b/tools/ci_build/github/azure-pipelines/linux-gpu-tensorrt-daily-perf-pipeline.yml index 1282b6e669fe6..a7caf2b764cef 100644 --- a/tools/ci_build/github/azure-pipelines/linux-gpu-tensorrt-daily-perf-pipeline.yml +++ b/tools/ci_build/github/azure-pipelines/linux-gpu-tensorrt-daily-perf-pipeline.yml @@ -172,7 +172,7 @@ jobs: scriptType: bash inlineScript: | short_hash=$(git rev-parse --short HEAD^) && - python3 $(Build.SourcesDirectory)/onnxruntime/python/tools/tensorrt/perf/post.py -r $(Build.SourcesDirectory)/Artifact/result -c $short_hash -u "https://dev.azure.com/onnxruntime/onnxruntime/_build/results?buildId=$(Build.BuildId)" -t ${{ parameters.TrtVersion }} -b $(branch) + python3 $(Build.SourcesDirectory)/onnxruntime/python/tools/tensorrt/perf/post.py -r $(Build.SourcesDirectory)/Artifact/result -c $short_hash -u "$(reportUrl)?buildId=$(Build.BuildId)" -t ${{ parameters.TrtVersion }} -b $(branch) - template: templates/component-governance-component-detection-steps.yml diff --git a/tools/ci_build/github/azure-pipelines/mac-react-native-ci-pipeline.yml b/tools/ci_build/github/azure-pipelines/mac-react-native-ci-pipeline.yml new file mode 100644 index 0000000000000..c234809b9e899 --- /dev/null +++ b/tools/ci_build/github/azure-pipelines/mac-react-native-ci-pipeline.yml @@ -0,0 +1,297 @@ +parameters: +- name: NpmPublish + displayName: 'NPM packages publish configuration' + type: string + values: + - 'nightly (@dev)' + - 'release candidate (@rc)' + - 'production (@latest)' + - 'custom' + default: 'nightly (@dev)' + +variables: + build_config: Release + ${{ if eq(parameters.NpmPublish, 'nightly (@dev)') }}: + # 'dev' will skip generating onnxruntime-common package when it's not updated + # since react-native e2e always requires both onnxruntime-common and onnxruntime-react-native packages, + # use 'custom' type mode here. + npm_packaging_mode: '-dev.$(Get-Date -Format yyyyMMdd)-$(git rev-parse --short HEAD)' + ${{ if eq(parameters.NpmPublish, 'release candidate (@rc)') }}: + npm_packaging_mode: 'rc' + ${{ if eq(parameters.NpmPublish, 'production (@latest)') }}: + npm_packaging_mode: 'release' + ${{ if eq(parameters.NpmPublish, 'custom') }}: + npm_packaging_mode: '$(VersionSuffix)' + +jobs: +- template: templates/android-java-api-aar.yml + parameters: + buildConfig: '${{variables.build_config}}' + buildSettings: '$(Build.SourcesDirectory)/tools/ci_build/github/js/react_native_e2e_mobile_aar_build_settings.json' + includedOpsConfig: '$(Build.SourcesDirectory)/tools/ci_build/github/android/mobile_package.required_operators.config' + artifactName: 'onnxruntime-android-mobile-aar' + job_name_suffix: 'For_React_Native' + pool_name: 'Linux-CPU-2019' + +- job: ReactNative_CI + pool: + vmImage: 'macOS-11' + dependsOn: + - Android_Java_API_AAR_Packaging_For_React_Native + timeoutInMinutes: 120 + steps: + # Onnx has no 3.9 python package available yet, need to use python 3.8 to avoid build onnx package + # pythonVersion can be updated in Azure pipeline settings + # https://dev.azure.com/onnxruntime/onnxruntime/_build?definitionId=188 + - task: UsePythonVersion@0 + displayName: Use Python $(pythonVersion) + inputs: + versionSpec: $(pythonVersion) + + - task: NodeTool@0 + inputs: + versionSpec: '16.x' + + - script: + brew install coreutils ninja npm yarn + displayName: Install coreutils, ninja, npm, and yarn + + - script: + /bin/bash $(Build.SourcesDirectory)/tools/ci_build/github/android/setup_gradle_wrapper.sh $(pwd) + displayName: Setup gradle wrapper to use gradle 6.8.3 + + - script: | + python3 -m pip install -q flatbuffers + workingDirectory: '$(Build.BinariesDirectory)' + displayName: Install python modules + + - script: | + python3 $(Build.SourcesDirectory)/tools/ci_build/github/apple/build_ios_framework.py \ + --config ${{variables.build_config}} \ + --build_dir $(Build.BinariesDirectory)/ios_framework \ + --include_ops_by_config $(Build.SourcesDirectory)/tools/ci_build/github/android/mobile_package.required_operators.config \ + $(Build.SourcesDirectory)/tools/ci_build/github/js/react_native_e2e_mobile_ios_framework_build_settings.json + cd $(Build.BinariesDirectory)/ios_framework/framework_out + zip -r onnxruntime-mobile-c.zip . + displayName: Build iOS package + + - task: DownloadPipelineArtifact@2 + inputs: + buildType: 'current' + artifactName: 'onnxruntime-android-mobile-aar' + targetPath: '$(Build.BinariesDirectory)/android-mobile-aar' + displayName: Download Android Aar artifacts + + - task: CopyFiles@2 + inputs: + sourceFolder: $(Build.BinariesDirectory)/android-mobile-aar + contents: onnxruntime-mobile-*.aar + targetFolder: $(Build.SourcesDirectory)/js/react_native/android/libs + displayName: Copy Android package to React Native directory + + - task: CopyFiles@2 + inputs: + sourceFolder: $(Build.BinariesDirectory)/ios_framework/framework_out + contents: onnxruntime-mobile-c.zip + targetFolder: $(Build.SourcesDirectory)/js/react_native/local_pods + displayName: Copy iOS package to React Native directory + + - script: | + npm ci + workingDirectory: '$(Build.SourcesDirectory)/js' + displayName: npm ci js + + - script: | + npm ci + workingDirectory: '$(Build.SourcesDirectory)/js/common' + displayName: npm ci js/common + + - script: | + yarn + workingDirectory: '$(Build.SourcesDirectory)/js/react_native' + displayName: yarn js/react_native + + - script: | + python3 tools/python/run_android_emulator.py \ + --android-sdk-root $(ANDROID_SDK_ROOT) \ + --create-avd --system-image "system-images;android-30;google_apis;x86_64" \ + --start --emulator-extra-args="-partition-size 4096" \ + --emulator-pid-file $(Build.BinariesDirectory)/emulator.pid + displayName: Start Android Emulator + + - script: | + xcrun simctl create iPhoneRNTest com.apple.CoreSimulator.SimDeviceType.iPhone-13 + workingDirectory: '$(Build.SourcesDirectory)/js/react_native/e2e/ios' + displayName: Start iOS Simulator + + - task: Gradle@3 + inputs: + gradleWrapperFile: '$(Build.SourcesDirectory)/js/react_native/android/gradlew' + workingDirectory: '$(Build.SourcesDirectory)/js/react_native/android' + options: '--stacktrace' + tasks: 'connectedDebugAndroidTest' + publishJUnitResults: true + testResultsFiles: '**/TEST-*.xml' + testRunTitle: 'React Native Android Instrumented Test results' + javaHomeOption: 'JDKVersion' + sonarQubeRunAnalysis: false + spotBugsAnalysis: false + displayName: Run React Native Android Instrumented Tests + continueOnError: false + + - script: | + pod install + workingDirectory: '$(Build.SourcesDirectory)/js/react_native/ios' + displayName: Pod install for onnxruntime react native ios bridge library + + - task: Xcode@5 + inputs: + actions: 'test' + configuration: 'Debug' + sdk: 'iphonesimulator' + xcWorkspacePath: '$(Build.SourcesDirectory)/js/react_native/ios/OnnxruntimeModule.xcworkspace' + scheme: 'OnnxruntimeModuleTest' + packageApp: false + destinationPlatformOption: 'iOS' + destinationSimulators: 'iPhone 13,OS=latest' + workingDirectory: '$(Build.SourcesDirectory)/js/react_native/ios' + xcprettyArgs: '--output build/reports/test-results.xml' + publishJUnitResults: true + testRunTitle: 'React Native iOS Instrumented Test Results' + displayName: Run React Native iOS Instrumented Tests + + - task: PublishTestResults@2 + inputs: + testResultsFiles: '$(Build.SourcesDirectory)/js/react_native/ios/build/reports/test-results.xml' + failTaskOnFailedTests: true + testRunTitle: 'React Native iOS Instrumented Test results' + condition: succeededOrFailed() + displayName: Publish React Native iOS Instrumented Test Results + + - script: | + yarn prepack-e2e + workingDirectory: '$(Build.SourcesDirectory)/js/react_native' + displayName: Prepare Android and iOS e2e tests + + - task: PowerShell@2 + inputs: + filePath: '$(Build.SourcesDirectory)/tools/ci_build/github/js/pack-npm-packages.ps1' + arguments: '"${{variables.npm_packaging_mode}}" $(Build.SourcesDirectory) react_native' + workingDirectory: '$(Build.SourcesDirectory)' + errorActionPreference: stop + displayName: Pack NPM packages + + - task: CopyFiles@2 + inputs: + sourceFolder: $(Build.SourcesDirectory)/js/common + contents: onnxruntime-common*.tgz + targetFolder: $(Build.SourcesDirectory)/js/react_native/e2e + displayName: Copy onnxruntime-common npm package to React Native e2e directory + + - task: CopyFiles@2 + inputs: + sourceFolder: $(Build.SourcesDirectory)/js/react_native + contents: onnxruntime-react-native*.tgz + targetFolder: $(Build.SourcesDirectory)/js/react_native/e2e + displayName: Copy onnxruntime-react-native npm package to React Native e2e directory + + - script: | + mv onnxruntime-common*.tgz onnxruntime-common.tgz + yarn add --no-lockfile file:./onnxruntime-common.tgz + mv onnxruntime-react-native*.tgz onnxruntime-react-native.tgz + yarn add --no-lockfile file:./onnxruntime-react-native.tgz + yarn + workingDirectory: '$(Build.SourcesDirectory)/js/react_native/e2e' + displayName: Bootstrap Android and iOS e2e tests + + - script: | + pod install + workingDirectory: '$(Build.SourcesDirectory)/js/react_native/e2e/ios' + displayName: Pod install for onnxruntime react native ios e2e tests + + - script: | + keytool -genkey -v -keystore debug.keystore -alias androiddebugkey -storepass android \ + -keypass android -keyalg RSA -keysize 2048 -validity 999999 -dname "CN=Android Debug,O=Android,C=US" + workingDirectory: '$(Build.SourcesDirectory)/js/react_native/e2e/android' + displayName: Generate a debug keystore + + - task: CopyFiles@2 + inputs: + sourceFolder: $(Build.BinariesDirectory)/android-mobile-aar + contents: onnxruntime-mobile-*.aar + targetFolder: $(Build.SourcesDirectory)/js/react_native/e2e/node_modules/onnxruntime-react-native/android/libs + displayName: Copy Android package to React Native e2e directory + + - task: Gradle@3 + inputs: + gradleWrapperFile: '$(Build.SourcesDirectory)/js/react_native/e2e/android/gradlew' + workingDirectory: '$(Build.SourcesDirectory)/js/react_native/e2e/android' + options: '--stacktrace' + tasks: ':app:connectedDebugAndroidTest' + publishJUnitResults: true + testResultsFiles: '**/TEST-*.xml' + testRunTitle: 'React Native Android e2e Test results' + javaHomeOption: 'JDKVersion' + sonarQubeRunAnalysis: false + spotBugsAnalysis: false + displayName: Run React Native Android e2e Tests + continueOnError: false + + - script: | + export FORCE_BUNDLING=1 + export RCT_NO_LAUNCH_PACKAGER=1 + export ENTRY_FILE=index.tsx + xcrun xcodebuild test -workspace $(Build.SourcesDirectory)/js/react_native/e2e/ios/OnnxruntimeModuleExample.xcworkspace \ + -scheme OnnxruntimeModuleExample -destination 'platform=iOS Simulator,OS=latest,name=iPhoneRNTest' \ + -derivedDataPath $(Build.BinariesDirectory)/react_native/ios_e2e_test/derived_data | xcpretty -r junit --no-color \ + --output $(Build.SourcesDirectory)/js/react_native/e2e/ios/build/reports/test-results.xml + workingDirectory: '$(Build.SourcesDirectory)/js/react_native/e2e' + displayName: Run React Native iOS e2e tests + + - task: PublishTestResults@2 + inputs: + testResultsFiles: '$(Build.SourcesDirectory)/js/react_native/e2e/ios/build/reports/test-results.xml' + failTaskOnFailedTests: true + testRunTitle: 'React Native iOS e2e Test results' + condition: succeededOrFailed() + displayName: Publish React Native iOS e2e Test Results + + - script: | + python3 tools/python/run_android_emulator.py \ + --android-sdk-root $(ANDROID_SDK_ROOT) \ + --stop \ + --emulator-pid-file $(Build.BinariesDirectory)/emulator.pid + displayName: Stop Android Emulator + condition: always() + + - script: | + xcrun simctl delete iPhoneRNTest + workingDirectory: '$(Build.SourcesDirectory)/js/react_native/e2e/ios' + displayName: Stop iOS Simulator + condition: always() + + - script: | + git restore . + workingDirectory: '$(Build.SourcesDirectory)/js/react_native' + displayName: Restore git changes for e2e tests + + - task: CopyFiles@2 + inputs: + sourceFolder: $(Build.SourcesDirectory)/js/react_native + contents: onnxruntime-react-native*.tgz + targetFolder: $(Build.ArtifactStagingDirectory) + displayName: Create Artifacts onnxruntime-react-native + + - task: PublishPipelineArtifact@0 + inputs: + artifactName: 'NPM_packages' + targetPath: '$(Build.ArtifactStagingDirectory)' + displayName: Publish Pipeline Artifact + + - template: templates/component-governance-component-detection-steps.yml + parameters : + condition : 'succeeded' + + - task: mspremier.PostBuildCleanup.PostBuildCleanup-task.PostBuildCleanup@3 + displayName: Clean Agent Directories + condition: always() diff --git a/tools/ci_build/github/azure-pipelines/orttraining-linux-ci-pipeline.yml b/tools/ci_build/github/azure-pipelines/orttraining-linux-ci-pipeline.yml index 19cf309505192..d3b8e10e2f2ff 100644 --- a/tools/ci_build/github/azure-pipelines/orttraining-linux-ci-pipeline.yml +++ b/tools/ci_build/github/azure-pipelines/orttraining-linux-ci-pipeline.yml @@ -55,6 +55,8 @@ jobs: # Test ORT with the latest ONNX release. export ONNX_VERSION=$(cat $(Build.SourcesDirectory)/cmake/external/onnx/VERSION_NUMBER) sed -i "s/git+http:\/\/github\.com\/onnx\/onnx.*/onnx==$ONNX_VERSION/" $(Build.BinariesDirectory)/requirements.txt + #Do not explicitly specify numpy version as this is not a packaging pipeline, any version should be ok + sed -i "/^numpy/d" $(Build.BinariesDirectory)/requirements.txt python3 -m pip install -r $(Build.BinariesDirectory)/requirements.txt cp $(Build.SourcesDirectory)/tools/ci_build/github/linux/docker/scripts/training/ortmodule/stage1/requirements_torch_cpu.txt $(Build.BinariesDirectory)/requirements_torch_cpu.txt python3 -m pip install -r $(Build.BinariesDirectory)/requirements_torch_cpu.txt diff --git a/tools/ci_build/github/azure-pipelines/templates/android-java-api-aar.yml b/tools/ci_build/github/azure-pipelines/templates/android-java-api-aar.yml index f6d9e8e2af57e..19397f844503d 100644 --- a/tools/ci_build/github/azure-pipelines/templates/android-java-api-aar.yml +++ b/tools/ci_build/github/azure-pipelines/templates/android-java-api-aar.yml @@ -20,7 +20,7 @@ parameters: default: '' - name: job_name_suffix - displayName: job name + displayName: Job name type: string default: '' @@ -30,12 +30,17 @@ parameters: type: string default: '0' +- name: pool_name + displayName: Pool name + type: string + default: 'Linux-CPU' + jobs: - job: Android_Java_API_AAR_Packaging_${{ parameters.job_name_suffix }} timeoutInMinutes: 120 workspace: clean: all - pool: Linux-CPU + pool: ${{parameters.pool_name}} variables: artifacts_directory: $(Build.BinariesDirectory)/.artifacts diff --git a/tools/ci_build/github/js/pack-npm-packages.ps1 b/tools/ci_build/github/js/pack-npm-packages.ps1 index ca1dd96bda6dc..fb14d456c36ee 100644 --- a/tools/ci_build/github/js/pack-npm-packages.ps1 +++ b/tools/ci_build/github/js/pack-npm-packages.ps1 @@ -1,15 +1,15 @@ # Copyright (c) Microsoft Corporation. All rights reserved. # Licensed under the MIT License. -# This script makes NPM packages for onnxruntime-common and onnxruntime-(node|web) +# This script makes NPM packages for onnxruntime-common and onnxruntime-(node|web|react-native) # # Release Mode (release): # Do not update version number. Version number should be matching $(ORT_ROOT)/VERSION_NUMBER -# Always generate packages for onnxruntime-common and onnxruntime-(node|web) +# Always generate packages for onnxruntime-common and onnxruntime-(node|web|react-native) # # Release Candidate Mode (rc): # Update version number to {VERSION_BASE}-rc.{YYYYMMDD}-{COMMIT} -# Always generate packages for onnxruntime-common and onnxruntime-(node|web) +# Always generate packages for onnxruntime-common and onnxruntime-(node|web|react-native) # # Dev Mode (dev): # Compare current content with latest @dev package for onnxruntime-common. If no change, we @@ -19,7 +19,7 @@ # # Custom Mode: # Use first commandline parameter as version number suffix. -# Always generate packages for onnxruntime-common and onnxruntime-(node|web) +# Always generate packages for onnxruntime-common and onnxruntime-(node|web|react-native) # if ($Args.Count -ne 3) { @@ -27,7 +27,7 @@ if ($Args.Count -ne 3) { } $MODE=$Args[0] # "dev" or "release" or "rc"; otherwise it is considered as a version number $ORT_ROOT=$Args[1] # eg. D:\source\onnxruntime -$TARGET=$Args[2] # "node" or "web" +$TARGET=$Args[2] # "node" or "web" or "react_native" Function Generate-Package-Version-Number { pushd $ORT_ROOT @@ -51,8 +51,8 @@ Function Generate-Package-Version-Number { return @{ version = $version_number; commit = $version_commit } } -$JS_COMMON_DIR="$ORT_ROOT\js\common" -$JS_TARGET_DIR="$ORT_ROOT\js\$TARGET" +$JS_COMMON_DIR=Join-Path -Path "$ORT_ROOT" -ChildPath "js\common" +$JS_TARGET_DIR=Join-Path -Path "$ORT_ROOT" -ChildPath "js\$TARGET" if ($MODE -eq "dev") { # For @dev builds, we compares the following 2 package versions for onnxruntime-common: @@ -60,8 +60,8 @@ if ($MODE -eq "dev") { # - 'latest': the latest @dev version from npm.js repository. # # If the contents of the 2 versions are identical, we don't publish a new version. Instead, - # we only publish onnxruntime-node/onnxruntime-web and set its dependency's version to the - # 'latest'. + # we only publish onnxruntime-node/onnxruntime-web/onnxruntime-react-native and + # set its dependency's version to the 'latest'. # check latest @dev version Write-Host "Start checking version for onnxruntime-common@dev" @@ -83,12 +83,12 @@ if ($MODE -eq "dev") { # make package for latest pushd $JS_COMMON_DIR - npm version $ort_common_latest_version + npm version --allow-same-version $ort_common_latest_version echo $($version_number.commit) | Out-File -Encoding ascii -NoNewline -FilePath ./__commit.txt npm pack popd - $current_tgz_compare_only="$JS_COMMON_DIR\onnxruntime-common-$ort_common_latest_version.tgz" + $current_tgz_compare_only=Join-Path -Path "$JS_COMMON_DIR" -ChildPath "onnxruntime-common-$ort_common_latest_version.tgz" if (!(Test-Path $current_tgz_compare_only)) { throw "File is not generated: $current_tgz_compare_only" } @@ -114,21 +114,27 @@ if ($MODE -eq "dev") { npm install "dir-compare-cli@1.0.1" "json-diff@0.5.4" Write-Host "Compare package.json" - npx json-diff ..\latest\package\package.json ..\current\package\package.json + $latest_package_json=Join-Path -Path ".." -ChildPath "latest\package\package.json" + $current_package_json=Join-Path -Path ".." -ChildPath "current\package\package.json" + npx json-diff $latest_package_json $current_package_json $use_latest=$? Write-Host "Result: $use_latest" if ($use_latest) { # package.json matches. now check package contents. # do not compare commit number - if (test-path ../latest/package/__commit.txt) { rm ../latest/package/__commit.txt } - if (test-path ../current/package/__commit.txt) { rm ../current/package/__commit.txt } + $latest_package_commit=Join-Path -Path ".." -ChildPath "latest\package\__commit.txt" + $current_package_commit=Join-Path -Path ".." -ChildPath "current\package\__commit.txt" + if (test-path $latest_package_commit) { rm $latest_package_commit } + if (test-path $current_package_commit) { rm $current_package_commit } # skip package.json, we already checked them - rm ../latest/package/package.json - rm ../current/package/package.json + rm $latest_package_json + rm $current_package_json Write-Host "Compare package contents" - npx dircompare -c ../latest/package/ ../current/package/ + $latest_package_dir=Join-Path -Path ".." -ChildPath "latest\package" + $current_package_dir=Join-Path -Path ".." -ChildPath "current\package" + npx dircompare -c $latest_package_dir $current_package_dir $use_latest=$? Write-Host "Result: $use_latest" } @@ -139,7 +145,7 @@ if ($MODE -eq "dev") { Write-Host "Need update to onnxruntime-common@dev" # need to publish a new version for onnxruntime-common pushd $JS_COMMON_DIR - npm version $($version_number.version) + npm version --allow-same-version $($version_number.version) # file __commit.txt is already generated npm pack popd @@ -147,7 +153,7 @@ if ($MODE -eq "dev") { # make package for target pushd $JS_TARGET_DIR - npm version $($version_number.version) + npm version --allow-same-version $($version_number.version) echo $($version_number.commit) | Out-File -Encoding ascii -NoNewline -FilePath ./__commit.txt npm pack popd @@ -164,12 +170,12 @@ if ($MODE -eq "dev") { $version_number=Generate-Package-Version-Number pushd $JS_COMMON_DIR - npm version $($version_number.version) + npm version --allow-same-version $($version_number.version) npm pack popd pushd $JS_TARGET_DIR - npm version $($version_number.version) + npm version --allow-same-version $($version_number.version) npm pack popd } diff --git a/tools/ci_build/github/js/react_native_e2e_mobile_aar_build_settings.json b/tools/ci_build/github/js/react_native_e2e_mobile_aar_build_settings.json new file mode 100644 index 0000000000000..b1e92f1446576 --- /dev/null +++ b/tools/ci_build/github/js/react_native_e2e_mobile_aar_build_settings.json @@ -0,0 +1,21 @@ +{ + "build_abis": [ + "x86_64" + ], + "android_min_sdk_version": 21, + "android_target_sdk_version": 24, + "build_params": [ + "--android", + "--parallel", + "--cmake_generator=Ninja", + "--build_java", + "--build_shared_lib", + "--minimal_build=extended", + "--disable_rtti", + "--disable_ml_ops", + "--disable_exceptions", + "--enable_reduced_operator_type_support", + "--use_nnapi", + "--skip_tests" + ] +} diff --git a/tools/ci_build/github/js/react_native_e2e_mobile_ios_framework_build_settings.json b/tools/ci_build/github/js/react_native_e2e_mobile_ios_framework_build_settings.json new file mode 100644 index 0000000000000..c2f1519ac0ffb --- /dev/null +++ b/tools/ci_build/github/js/react_native_e2e_mobile_ios_framework_build_settings.json @@ -0,0 +1,21 @@ +{ + "build_osx_archs": { + "iphonesimulator": [ + "x86_64" + ] + }, + "build_params": [ + "--ios", + "--parallel", + "--use_xcode", + "--build_apple_framework", + "--minimal_build=extended", + "--disable_rtti", + "--disable_ml_ops", + "--disable_exceptions", + "--enable_reduced_operator_type_support", + "--use_coreml", + "--skip_tests", + "--apple_deploy_target=11.0" + ] +}